Linux Audio

Check our new training course

Loading...
v6.13.7
   1// SPDX-License-Identifier: GPL-2.0
   2/*
   3 * Copyright (C) 2017 Marvell
   4 *
   5 * Antoine Tenart <antoine.tenart@free-electrons.com>
   6 */
   7
   8#include <linux/unaligned.h>
   9#include <linux/device.h>
  10#include <linux/dma-mapping.h>
  11#include <linux/dmapool.h>
 
  12#include <crypto/aead.h>
  13#include <crypto/aes.h>
  14#include <crypto/authenc.h>
  15#include <crypto/chacha.h>
  16#include <crypto/ctr.h>
  17#include <crypto/internal/des.h>
  18#include <crypto/gcm.h>
  19#include <crypto/ghash.h>
  20#include <crypto/poly1305.h>
  21#include <crypto/sha1.h>
  22#include <crypto/sha2.h>
  23#include <crypto/sm3.h>
  24#include <crypto/sm4.h>
  25#include <crypto/xts.h>
  26#include <crypto/skcipher.h>
  27#include <crypto/internal/aead.h>
  28#include <crypto/internal/skcipher.h>
  29
  30#include "safexcel.h"
  31
  32enum safexcel_cipher_direction {
  33	SAFEXCEL_ENCRYPT,
  34	SAFEXCEL_DECRYPT,
  35};
  36
  37enum safexcel_cipher_alg {
  38	SAFEXCEL_DES,
  39	SAFEXCEL_3DES,
  40	SAFEXCEL_AES,
  41	SAFEXCEL_CHACHA20,
  42	SAFEXCEL_SM4,
  43};
  44
  45struct safexcel_cipher_ctx {
  46	struct safexcel_context base;
  47	struct safexcel_crypto_priv *priv;
  48
  49	u32 mode;
  50	enum safexcel_cipher_alg alg;
  51	u8 aead; /* !=0=AEAD, 2=IPSec ESP AEAD, 3=IPsec ESP GMAC */
  52	u8 xcm;  /* 0=authenc, 1=GCM, 2 reserved for CCM */
  53	u8 aadskip;
  54	u8 blocksz;
  55	u32 ivmask;
  56	u32 ctrinit;
  57
  58	__le32 key[16];
  59	u32 nonce;
  60	unsigned int key_len, xts;
  61
  62	/* All the below is AEAD specific */
  63	u32 hash_alg;
  64	u32 state_sz;
 
 
  65
  66	struct crypto_aead *fback;
  67};
  68
  69struct safexcel_cipher_req {
  70	enum safexcel_cipher_direction direction;
  71	/* Number of result descriptors associated to the request */
  72	unsigned int rdescs;
  73	bool needs_inv;
  74	int  nr_src, nr_dst;
  75};
  76
  77static int safexcel_skcipher_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
  78				struct safexcel_command_desc *cdesc)
  79{
 
 
  80	if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
  81		cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
 
  82		/* 32 bit nonce */
  83		cdesc->control_data.token[0] = ctx->nonce;
  84		/* 64 bit IV part */
  85		memcpy(&cdesc->control_data.token[1], iv, 8);
  86		/* 32 bit counter, start at 0 or 1 (big endian!) */
  87		cdesc->control_data.token[3] =
  88			(__force u32)cpu_to_be32(ctx->ctrinit);
  89		return 4;
  90	}
  91	if (ctx->alg == SAFEXCEL_CHACHA20) {
  92		cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
  93		/* 96 bit nonce part */
  94		memcpy(&cdesc->control_data.token[0], &iv[4], 12);
  95		/* 32 bit counter */
  96		cdesc->control_data.token[3] = *(u32 *)iv;
  97		return 4;
 
 
 
 
 
 
 
 
 
 
 
 
  98	}
  99
 100	cdesc->control_data.options |= ctx->ivmask;
 101	memcpy(cdesc->control_data.token, iv, ctx->blocksz);
 102	return ctx->blocksz / sizeof(u32);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 103}
 104
 105static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
 106				    struct safexcel_command_desc *cdesc,
 107				    struct safexcel_token *atoken,
 108				    u32 length)
 109{
 110	struct safexcel_token *token;
 111	int ivlen;
 112
 113	ivlen = safexcel_skcipher_iv(ctx, iv, cdesc);
 114	if (ivlen == 4) {
 115		/* No space in cdesc, instruction moves to atoken */
 116		cdesc->additional_cdata_size = 1;
 117		token = atoken;
 118	} else {
 119		/* Everything fits in cdesc */
 120		token = (struct safexcel_token *)(cdesc->control_data.token + 2);
 121		/* Need to pad with NOP */
 122		eip197_noop_token(&token[1]);
 123	}
 124
 125	token->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
 126	token->packet_length = length;
 127	token->stat = EIP197_TOKEN_STAT_LAST_PACKET |
 128		      EIP197_TOKEN_STAT_LAST_HASH;
 129	token->instructions = EIP197_TOKEN_INS_LAST |
 130			      EIP197_TOKEN_INS_TYPE_CRYPTO |
 131			      EIP197_TOKEN_INS_TYPE_OUTPUT;
 132}
 133
 134static void safexcel_aead_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
 135			     struct safexcel_command_desc *cdesc)
 136{
 137	if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD ||
 138	    ctx->aead & EIP197_AEAD_TYPE_IPSEC_ESP) { /* _ESP and _ESP_GMAC */
 139		/* 32 bit nonce */
 140		cdesc->control_data.token[0] = ctx->nonce;
 141		/* 64 bit IV part */
 142		memcpy(&cdesc->control_data.token[1], iv, 8);
 143		/* 32 bit counter, start at 0 or 1 (big endian!) */
 144		cdesc->control_data.token[3] =
 145			(__force u32)cpu_to_be32(ctx->ctrinit);
 146		return;
 147	}
 148	if (ctx->xcm == EIP197_XCM_MODE_GCM || ctx->alg == SAFEXCEL_CHACHA20) {
 149		/* 96 bit IV part */
 150		memcpy(&cdesc->control_data.token[0], iv, 12);
 151		/* 32 bit counter, start at 0 or 1 (big endian!) */
 152		cdesc->control_data.token[3] =
 153			(__force u32)cpu_to_be32(ctx->ctrinit);
 154		return;
 155	}
 156	/* CBC */
 157	memcpy(cdesc->control_data.token, iv, ctx->blocksz);
 158}
 159
 160static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
 161				struct safexcel_command_desc *cdesc,
 162				struct safexcel_token *atoken,
 163				enum safexcel_cipher_direction direction,
 164				u32 cryptlen, u32 assoclen, u32 digestsize)
 165{
 166	struct safexcel_token *aadref;
 167	int atoksize = 2; /* Start with minimum size */
 168	int assocadj = assoclen - ctx->aadskip, aadalign;
 169
 170	/* Always 4 dwords of embedded IV  for AEAD modes */
 171	cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
 172
 173	if (direction == SAFEXCEL_DECRYPT)
 
 
 
 
 
 
 
 
 
 
 
 174		cryptlen -= digestsize;
 175
 176	if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM)) {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 177		/* Construct IV block B0 for the CBC-MAC */
 178		u8 *final_iv = (u8 *)cdesc->control_data.token;
 179		u8 *cbcmaciv = (u8 *)&atoken[1];
 180		__le32 *aadlen = (__le32 *)&atoken[5];
 181
 182		if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
 183			/* Length + nonce */
 184			cdesc->control_data.token[0] = ctx->nonce;
 185			/* Fixup flags byte */
 186			*(__le32 *)cbcmaciv =
 187				cpu_to_le32(ctx->nonce |
 188					    ((assocadj > 0) << 6) |
 189					    ((digestsize - 2) << 2));
 190			/* 64 bit IV part */
 191			memcpy(&cdesc->control_data.token[1], iv, 8);
 192			memcpy(cbcmaciv + 4, iv, 8);
 193			/* Start counter at 0 */
 194			cdesc->control_data.token[3] = 0;
 195			/* Message length */
 196			*(__be32 *)(cbcmaciv + 12) = cpu_to_be32(cryptlen);
 197		} else {
 198			/* Variable length IV part */
 199			memcpy(final_iv, iv, 15 - iv[0]);
 200			memcpy(cbcmaciv, iv, 15 - iv[0]);
 201			/* Start variable length counter at 0 */
 202			memset(final_iv + 15 - iv[0], 0, iv[0] + 1);
 203			memset(cbcmaciv + 15 - iv[0], 0, iv[0] - 1);
 204			/* fixup flags byte */
 205			cbcmaciv[0] |= ((assocadj > 0) << 6) |
 206				       ((digestsize - 2) << 2);
 207			/* insert lower 2 bytes of message length */
 208			cbcmaciv[14] = cryptlen >> 8;
 209			cbcmaciv[15] = cryptlen & 255;
 210		}
 211
 212		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
 213		atoken->packet_length = AES_BLOCK_SIZE +
 214					((assocadj > 0) << 1);
 215		atoken->stat = 0;
 216		atoken->instructions = EIP197_TOKEN_INS_ORIGIN_TOKEN |
 217				       EIP197_TOKEN_INS_TYPE_HASH;
 218
 219		if (likely(assocadj)) {
 220			*aadlen = cpu_to_le32((assocadj >> 8) |
 221					      (assocadj & 255) << 8);
 222			atoken += 6;
 223			atoksize += 7;
 224		} else {
 225			atoken += 5;
 226			atoksize += 6;
 227		}
 228
 229		/* Process AAD data */
 230		aadref = atoken;
 231		atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
 232		atoken->packet_length = assocadj;
 233		atoken->stat = 0;
 234		atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
 235		atoken++;
 236
 237		/* For CCM only, align AAD data towards hash engine */
 238		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
 239		aadalign = (assocadj + 2) & 15;
 240		atoken->packet_length = assocadj && aadalign ?
 241						16 - aadalign :
 242						0;
 243		if (likely(cryptlen)) {
 244			atoken->stat = 0;
 245			atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
 246		} else {
 247			atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
 248			atoken->instructions = EIP197_TOKEN_INS_LAST |
 249					       EIP197_TOKEN_INS_TYPE_HASH;
 250		}
 251	} else {
 252		safexcel_aead_iv(ctx, iv, cdesc);
 253
 254		/* Process AAD data */
 255		aadref = atoken;
 256		atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
 257		atoken->packet_length = assocadj;
 258		atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
 259		atoken->instructions = EIP197_TOKEN_INS_LAST |
 260				       EIP197_TOKEN_INS_TYPE_HASH;
 261	}
 262	atoken++;
 263
 264	if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
 265		/* For ESP mode (and not GMAC), skip over the IV */
 266		atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
 267		atoken->packet_length = EIP197_AEAD_IPSEC_IV_SIZE;
 268		atoken->stat = 0;
 269		atoken->instructions = 0;
 270		atoken++;
 271		atoksize++;
 272	} else if (unlikely(ctx->alg == SAFEXCEL_CHACHA20 &&
 273			    direction == SAFEXCEL_DECRYPT)) {
 274		/* Poly-chacha decryption needs a dummy NOP here ... */
 275		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
 276		atoken->packet_length = 16; /* According to Op Manual */
 277		atoken->stat = 0;
 278		atoken->instructions = 0;
 279		atoken++;
 280		atoksize++;
 281	}
 282
 283	if  (ctx->xcm) {
 284		/* For GCM and CCM, obtain enc(Y0) */
 285		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT_REMRES;
 286		atoken->packet_length = 0;
 287		atoken->stat = 0;
 288		atoken->instructions = AES_BLOCK_SIZE;
 289		atoken++;
 290
 291		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
 292		atoken->packet_length = AES_BLOCK_SIZE;
 293		atoken->stat = 0;
 294		atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
 295				       EIP197_TOKEN_INS_TYPE_CRYPTO;
 296		atoken++;
 297		atoksize += 2;
 298	}
 299
 300	if (likely(cryptlen || ctx->alg == SAFEXCEL_CHACHA20)) {
 301		/* Fixup stat field for AAD direction instruction */
 302		aadref->stat = 0;
 303
 304		/* Process crypto data */
 305		atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
 306		atoken->packet_length = cryptlen;
 307
 308		if (unlikely(ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC)) {
 309			/* Fixup instruction field for AAD dir instruction */
 310			aadref->instructions = EIP197_TOKEN_INS_TYPE_HASH;
 311
 312			/* Do not send to crypt engine in case of GMAC */
 313			atoken->instructions = EIP197_TOKEN_INS_LAST |
 314					       EIP197_TOKEN_INS_TYPE_HASH |
 315					       EIP197_TOKEN_INS_TYPE_OUTPUT;
 316		} else {
 317			atoken->instructions = EIP197_TOKEN_INS_LAST |
 318					       EIP197_TOKEN_INS_TYPE_CRYPTO |
 319					       EIP197_TOKEN_INS_TYPE_HASH |
 320					       EIP197_TOKEN_INS_TYPE_OUTPUT;
 321		}
 322
 323		cryptlen &= 15;
 324		if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM && cryptlen)) {
 325			atoken->stat = 0;
 326			/* For CCM only, pad crypto data to the hash engine */
 327			atoken++;
 328			atoksize++;
 329			atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
 330			atoken->packet_length = 16 - cryptlen;
 331			atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
 332			atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
 333		} else {
 334			atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
 
 
 335		}
 336		atoken++;
 337		atoksize++;
 338	}
 339
 340	if (direction == SAFEXCEL_ENCRYPT) {
 341		/* Append ICV */
 342		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
 343		atoken->packet_length = digestsize;
 344		atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
 345			       EIP197_TOKEN_STAT_LAST_PACKET;
 346		atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
 347				       EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
 348	} else {
 349		/* Extract ICV */
 350		atoken->opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
 351		atoken->packet_length = digestsize;
 352		atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
 353			       EIP197_TOKEN_STAT_LAST_PACKET;
 354		atoken->instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
 355		atoken++;
 356		atoksize++;
 357
 358		/* Verify ICV */
 359		atoken->opcode = EIP197_TOKEN_OPCODE_VERIFY;
 360		atoken->packet_length = digestsize |
 361					EIP197_TOKEN_HASH_RESULT_VERIFY;
 362		atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
 363			       EIP197_TOKEN_STAT_LAST_PACKET;
 364		atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
 365	}
 366
 367	/* Fixup length of the token in the command descriptor */
 368	cdesc->additional_cdata_size = atoksize;
 369}
 370
 371static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
 372					const u8 *key, unsigned int len)
 373{
 374	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
 375	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
 376	struct safexcel_crypto_priv *priv = ctx->base.priv;
 377	struct crypto_aes_ctx aes;
 378	int ret, i;
 379
 380	ret = aes_expandkey(&aes, key, len);
 381	if (ret)
 
 382		return ret;
 
 383
 384	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
 385		for (i = 0; i < len / sizeof(u32); i++) {
 386			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
 387				ctx->base.needs_inv = true;
 388				break;
 389			}
 390		}
 391	}
 392
 393	for (i = 0; i < len / sizeof(u32); i++)
 394		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
 395
 396	ctx->key_len = len;
 397
 398	memzero_explicit(&aes, sizeof(aes));
 399	return 0;
 400}
 401
 402static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
 403				unsigned int len)
 404{
 405	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
 406	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
 407	struct safexcel_crypto_priv *priv = ctx->base.priv;
 
 408	struct crypto_authenc_keys keys;
 409	struct crypto_aes_ctx aes;
 410	int err = -EINVAL, i;
 411	const char *alg;
 412
 413	if (unlikely(crypto_authenc_extractkeys(&keys, key, len)))
 414		goto badkey;
 415
 416	if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
 417		/* Must have at least space for the nonce here */
 418		if (unlikely(keys.enckeylen < CTR_RFC3686_NONCE_SIZE))
 
 419			goto badkey;
 420		/* last 4 bytes of key are the nonce! */
 421		ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen -
 422				      CTR_RFC3686_NONCE_SIZE);
 423		/* exclude the nonce here */
 424		keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
 425	}
 426
 427	/* Encryption key */
 428	switch (ctx->alg) {
 429	case SAFEXCEL_DES:
 430		err = verify_aead_des_key(ctfm, keys.enckey, keys.enckeylen);
 431		if (unlikely(err))
 432			goto badkey;
 433		break;
 434	case SAFEXCEL_3DES:
 435		err = verify_aead_des3_key(ctfm, keys.enckey, keys.enckeylen);
 436		if (unlikely(err))
 437			goto badkey;
 438		break;
 439	case SAFEXCEL_AES:
 440		err = aes_expandkey(&aes, keys.enckey, keys.enckeylen);
 441		if (unlikely(err))
 442			goto badkey;
 443		break;
 444	case SAFEXCEL_SM4:
 445		if (unlikely(keys.enckeylen != SM4_KEY_SIZE))
 446			goto badkey;
 447		break;
 448	default:
 449		dev_err(priv->dev, "aead: unsupported cipher algorithm\n");
 450		goto badkey;
 451	}
 452
 453	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
 454		for (i = 0; i < keys.enckeylen / sizeof(u32); i++) {
 455			if (le32_to_cpu(ctx->key[i]) !=
 456			    ((u32 *)keys.enckey)[i]) {
 457				ctx->base.needs_inv = true;
 458				break;
 459			}
 460		}
 461	}
 462
 463	/* Auth key */
 464	switch (ctx->hash_alg) {
 465	case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
 466		alg = "safexcel-sha1";
 
 
 467		break;
 468	case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
 469		alg = "safexcel-sha224";
 
 
 470		break;
 471	case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
 472		alg = "safexcel-sha256";
 
 
 473		break;
 474	case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
 475		alg = "safexcel-sha384";
 
 
 476		break;
 477	case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
 478		alg = "safexcel-sha512";
 479		break;
 480	case CONTEXT_CONTROL_CRYPTO_ALG_SM3:
 481		alg = "safexcel-sm3";
 482		break;
 483	default:
 484		dev_err(priv->dev, "aead: unsupported hash algorithm\n");
 485		goto badkey;
 486	}
 487
 488	if (safexcel_hmac_setkey(&ctx->base, keys.authkey, keys.authkeylen,
 489				 alg, ctx->state_sz))
 490		goto badkey;
 
 
 
 
 491
 492	/* Now copy the keys into the context */
 493	for (i = 0; i < keys.enckeylen / sizeof(u32); i++)
 494		ctx->key[i] = cpu_to_le32(((u32 *)keys.enckey)[i]);
 495	ctx->key_len = keys.enckeylen;
 496
 
 
 
 497	memzero_explicit(&keys, sizeof(keys));
 498	return 0;
 499
 500badkey:
 
 
 501	memzero_explicit(&keys, sizeof(keys));
 502	return err;
 503}
 504
 505static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
 506				    struct crypto_async_request *async,
 507				    struct safexcel_cipher_req *sreq,
 508				    struct safexcel_command_desc *cdesc)
 509{
 510	struct safexcel_crypto_priv *priv = ctx->base.priv;
 511	int ctrl_size = ctx->key_len / sizeof(u32);
 512
 513	cdesc->control_data.control1 = ctx->mode;
 514
 515	if (ctx->aead) {
 516		/* Take in account the ipad+opad digests */
 517		if (ctx->xcm) {
 518			ctrl_size += ctx->state_sz / sizeof(u32);
 519			cdesc->control_data.control0 =
 520				CONTEXT_CONTROL_KEY_EN |
 521				CONTEXT_CONTROL_DIGEST_XCM |
 522				ctx->hash_alg |
 523				CONTEXT_CONTROL_SIZE(ctrl_size);
 524		} else if (ctx->alg == SAFEXCEL_CHACHA20) {
 525			/* Chacha20-Poly1305 */
 526			cdesc->control_data.control0 =
 527				CONTEXT_CONTROL_KEY_EN |
 528				CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20 |
 529				(sreq->direction == SAFEXCEL_ENCRYPT ?
 530					CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT :
 531					CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN) |
 532				ctx->hash_alg |
 533				CONTEXT_CONTROL_SIZE(ctrl_size);
 534			return 0;
 535		} else {
 536			ctrl_size += ctx->state_sz / sizeof(u32) * 2;
 537			cdesc->control_data.control0 =
 538				CONTEXT_CONTROL_KEY_EN |
 539				CONTEXT_CONTROL_DIGEST_HMAC |
 540				ctx->hash_alg |
 541				CONTEXT_CONTROL_SIZE(ctrl_size);
 542		}
 543
 544		if (sreq->direction == SAFEXCEL_ENCRYPT &&
 545		    (ctx->xcm == EIP197_XCM_MODE_CCM ||
 546		     ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC))
 547			cdesc->control_data.control0 |=
 548				CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT;
 549		else if (sreq->direction == SAFEXCEL_ENCRYPT)
 550			cdesc->control_data.control0 |=
 551				CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
 552		else if (ctx->xcm == EIP197_XCM_MODE_CCM)
 553			cdesc->control_data.control0 |=
 554				CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN;
 
 
 
 555		else
 556			cdesc->control_data.control0 |=
 557				CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
 
 
 558	} else {
 559		if (sreq->direction == SAFEXCEL_ENCRYPT)
 560			cdesc->control_data.control0 =
 561				CONTEXT_CONTROL_TYPE_CRYPTO_OUT |
 562				CONTEXT_CONTROL_KEY_EN |
 563				CONTEXT_CONTROL_SIZE(ctrl_size);
 564		else
 565			cdesc->control_data.control0 =
 566				CONTEXT_CONTROL_TYPE_CRYPTO_IN |
 567				CONTEXT_CONTROL_KEY_EN |
 568				CONTEXT_CONTROL_SIZE(ctrl_size);
 569	}
 570
 571	if (ctx->alg == SAFEXCEL_DES) {
 572		cdesc->control_data.control0 |=
 573			CONTEXT_CONTROL_CRYPTO_ALG_DES;
 574	} else if (ctx->alg == SAFEXCEL_3DES) {
 575		cdesc->control_data.control0 |=
 576			CONTEXT_CONTROL_CRYPTO_ALG_3DES;
 577	} else if (ctx->alg == SAFEXCEL_AES) {
 578		switch (ctx->key_len >> ctx->xts) {
 579		case AES_KEYSIZE_128:
 580			cdesc->control_data.control0 |=
 581				CONTEXT_CONTROL_CRYPTO_ALG_AES128;
 582			break;
 583		case AES_KEYSIZE_192:
 584			cdesc->control_data.control0 |=
 585				CONTEXT_CONTROL_CRYPTO_ALG_AES192;
 586			break;
 587		case AES_KEYSIZE_256:
 588			cdesc->control_data.control0 |=
 589				CONTEXT_CONTROL_CRYPTO_ALG_AES256;
 590			break;
 591		default:
 592			dev_err(priv->dev, "aes keysize not supported: %u\n",
 593				ctx->key_len >> ctx->xts);
 594			return -EINVAL;
 595		}
 596	} else if (ctx->alg == SAFEXCEL_CHACHA20) {
 597		cdesc->control_data.control0 |=
 598			CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20;
 599	} else if (ctx->alg == SAFEXCEL_SM4) {
 600		cdesc->control_data.control0 |=
 601			CONTEXT_CONTROL_CRYPTO_ALG_SM4;
 602	}
 603
 604	return 0;
 605}
 606
 607static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
 608				      struct crypto_async_request *async,
 609				      struct scatterlist *src,
 610				      struct scatterlist *dst,
 611				      unsigned int cryptlen,
 612				      struct safexcel_cipher_req *sreq,
 613				      bool *should_complete, int *ret)
 614{
 615	struct skcipher_request *areq = skcipher_request_cast(async);
 616	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
 617	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(skcipher);
 618	struct safexcel_result_desc *rdesc;
 619	int ndesc = 0;
 620
 621	*ret = 0;
 622
 623	if (unlikely(!sreq->rdescs))
 624		return 0;
 625
 626	while (sreq->rdescs--) {
 627		rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
 628		if (IS_ERR(rdesc)) {
 629			dev_err(priv->dev,
 630				"cipher: result: could not retrieve the result descriptor\n");
 631			*ret = PTR_ERR(rdesc);
 632			break;
 633		}
 634
 635		if (likely(!*ret))
 636			*ret = safexcel_rdesc_check_errors(priv, rdesc);
 637
 638		ndesc++;
 639	}
 640
 641	safexcel_complete(priv, ring);
 642
 643	if (src == dst) {
 644		if (sreq->nr_src > 0)
 645			dma_unmap_sg(priv->dev, src, sreq->nr_src,
 646				     DMA_BIDIRECTIONAL);
 647	} else {
 648		if (sreq->nr_src > 0)
 649			dma_unmap_sg(priv->dev, src, sreq->nr_src,
 650				     DMA_TO_DEVICE);
 651		if (sreq->nr_dst > 0)
 652			dma_unmap_sg(priv->dev, dst, sreq->nr_dst,
 653				     DMA_FROM_DEVICE);
 654	}
 655
 656	/*
 657	 * Update IV in req from last crypto output word for CBC modes
 658	 */
 659	if ((!ctx->aead) && (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
 660	    (sreq->direction == SAFEXCEL_ENCRYPT)) {
 661		/* For encrypt take the last output word */
 662		sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv,
 663				   crypto_skcipher_ivsize(skcipher),
 664				   (cryptlen -
 665				    crypto_skcipher_ivsize(skcipher)));
 666	}
 667
 668	*should_complete = true;
 669
 670	return ndesc;
 671}
 672
 673static int safexcel_send_req(struct crypto_async_request *base, int ring,
 674			     struct safexcel_cipher_req *sreq,
 675			     struct scatterlist *src, struct scatterlist *dst,
 676			     unsigned int cryptlen, unsigned int assoclen,
 677			     unsigned int digestsize, u8 *iv, int *commands,
 678			     int *results)
 679{
 680	struct skcipher_request *areq = skcipher_request_cast(base);
 681	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
 682	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
 683	struct safexcel_crypto_priv *priv = ctx->base.priv;
 684	struct safexcel_command_desc *cdesc;
 685	struct safexcel_command_desc *first_cdesc = NULL;
 686	struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
 687	struct scatterlist *sg;
 688	unsigned int totlen;
 689	unsigned int totlen_src = cryptlen + assoclen;
 690	unsigned int totlen_dst = totlen_src;
 691	struct safexcel_token *atoken;
 692	int n_cdesc = 0, n_rdesc = 0;
 693	int queued, i, ret = 0;
 694	bool first = true;
 695
 696	sreq->nr_src = sg_nents_for_len(src, totlen_src);
 697
 698	if (ctx->aead) {
 699		/*
 700		 * AEAD has auth tag appended to output for encrypt and
 701		 * removed from the output for decrypt!
 702		 */
 703		if (sreq->direction == SAFEXCEL_DECRYPT)
 704			totlen_dst -= digestsize;
 705		else
 706			totlen_dst += digestsize;
 707
 708		memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
 709		       &ctx->base.ipad, ctx->state_sz);
 710		if (!ctx->xcm)
 711			memcpy(ctx->base.ctxr->data + (ctx->key_len +
 712			       ctx->state_sz) / sizeof(u32), &ctx->base.opad,
 713			       ctx->state_sz);
 714	} else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
 715		   (sreq->direction == SAFEXCEL_DECRYPT)) {
 716		/*
 717		 * Save IV from last crypto input word for CBC modes in decrypt
 718		 * direction. Need to do this first in case of inplace operation
 719		 * as it will be overwritten.
 720		 */
 721		sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv,
 722				   crypto_skcipher_ivsize(skcipher),
 723				   (totlen_src -
 724				    crypto_skcipher_ivsize(skcipher)));
 725	}
 726
 727	sreq->nr_dst = sg_nents_for_len(dst, totlen_dst);
 728
 729	/*
 730	 * Remember actual input length, source buffer length may be
 731	 * updated in case of inline operation below.
 732	 */
 733	totlen = totlen_src;
 734	queued = totlen_src;
 735
 736	if (src == dst) {
 737		sreq->nr_src = max(sreq->nr_src, sreq->nr_dst);
 738		sreq->nr_dst = sreq->nr_src;
 739		if (unlikely((totlen_src || totlen_dst) &&
 740		    (sreq->nr_src <= 0))) {
 741			dev_err(priv->dev, "In-place buffer not large enough (need %d bytes)!",
 742				max(totlen_src, totlen_dst));
 743			return -EINVAL;
 744		}
 745		if (sreq->nr_src > 0 &&
 746		    !dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL))
 747			return -EIO;
 748	} else {
 749		if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
 750			dev_err(priv->dev, "Source buffer not large enough (need %d bytes)!",
 751				totlen_src);
 752			return -EINVAL;
 753		}
 754
 755		if (sreq->nr_src > 0 &&
 756		    !dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE))
 757			return -EIO;
 758
 759		if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
 760			dev_err(priv->dev, "Dest buffer not large enough (need %d bytes)!",
 761				totlen_dst);
 762			ret = -EINVAL;
 763			goto unmap;
 764		}
 765
 766		if (sreq->nr_dst > 0 &&
 767		    !dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE)) {
 768			ret = -EIO;
 769			goto unmap;
 770		}
 
 771	}
 772
 773	memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
 774
 775	if (!totlen) {
 776		/*
 777		 * The EIP97 cannot deal with zero length input packets!
 778		 * So stuff a dummy command descriptor indicating a 1 byte
 779		 * (dummy) input packet, using the context record as source.
 780		 */
 781		first_cdesc = safexcel_add_cdesc(priv, ring,
 782						 1, 1, ctx->base.ctxr_dma,
 783						 1, 1, ctx->base.ctxr_dma,
 784						 &atoken);
 785		if (IS_ERR(first_cdesc)) {
 786			/* No space left in the command descriptor ring */
 787			ret = PTR_ERR(first_cdesc);
 788			goto cdesc_rollback;
 789		}
 790		n_cdesc = 1;
 791		goto skip_cdesc;
 792	}
 793
 794	/* command descriptors */
 795	for_each_sg(src, sg, sreq->nr_src, i) {
 796		int len = sg_dma_len(sg);
 797
 798		/* Do not overflow the request */
 799		if (queued < len)
 800			len = queued;
 801
 802		cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
 803					   !(queued - len),
 804					   sg_dma_address(sg), len, totlen,
 805					   ctx->base.ctxr_dma, &atoken);
 806		if (IS_ERR(cdesc)) {
 807			/* No space left in the command descriptor ring */
 808			ret = PTR_ERR(cdesc);
 809			goto cdesc_rollback;
 810		}
 
 811
 812		if (!n_cdesc)
 813			first_cdesc = cdesc;
 
 814
 815		n_cdesc++;
 816		queued -= len;
 817		if (!queued)
 818			break;
 819	}
 820skip_cdesc:
 
 
 
 
 
 
 
 
 
 
 821	/* Add context control words and token to first command descriptor */
 822	safexcel_context_control(ctx, base, sreq, first_cdesc);
 823	if (ctx->aead)
 824		safexcel_aead_token(ctx, iv, first_cdesc, atoken,
 825				    sreq->direction, cryptlen,
 826				    assoclen, digestsize);
 827	else
 828		safexcel_skcipher_token(ctx, iv, first_cdesc, atoken,
 829					cryptlen);
 830
 831	/* result descriptors */
 832	for_each_sg(dst, sg, sreq->nr_dst, i) {
 833		bool last = (i == sreq->nr_dst - 1);
 834		u32 len = sg_dma_len(sg);
 835
 836		/* only allow the part of the buffer we know we need */
 837		if (len > totlen_dst)
 838			len = totlen_dst;
 839		if (unlikely(!len))
 840			break;
 841		totlen_dst -= len;
 842
 843		/* skip over AAD space in buffer - not written */
 844		if (assoclen) {
 845			if (assoclen >= len) {
 846				assoclen -= len;
 847				continue;
 848			}
 849			rdesc = safexcel_add_rdesc(priv, ring, first, last,
 850						   sg_dma_address(sg) +
 851						   assoclen,
 852						   len - assoclen);
 853			assoclen = 0;
 854		} else {
 855			rdesc = safexcel_add_rdesc(priv, ring, first, last,
 856						   sg_dma_address(sg),
 857						   len);
 858		}
 859		if (IS_ERR(rdesc)) {
 860			/* No space left in the result descriptor ring */
 861			ret = PTR_ERR(rdesc);
 862			goto rdesc_rollback;
 863		}
 864		if (first) {
 865			first_rdesc = rdesc;
 866			first = false;
 867		}
 868		n_rdesc++;
 869	}
 870
 871	if (unlikely(first)) {
 872		/*
 873		 * Special case: AEAD decrypt with only AAD data.
 874		 * In this case there is NO output data from the engine,
 875		 * but the engine still needs a result descriptor!
 876		 * Create a dummy one just for catching the result token.
 877		 */
 878		rdesc = safexcel_add_rdesc(priv, ring, true, true, 0, 0);
 879		if (IS_ERR(rdesc)) {
 880			/* No space left in the result descriptor ring */
 881			ret = PTR_ERR(rdesc);
 882			goto rdesc_rollback;
 883		}
 884		first_rdesc = rdesc;
 885		n_rdesc = 1;
 886	}
 887
 888	safexcel_rdr_req_set(priv, ring, first_rdesc, base);
 889
 890	*commands = n_cdesc;
 891	*results = n_rdesc;
 892	return 0;
 893
 894rdesc_rollback:
 895	for (i = 0; i < n_rdesc; i++)
 896		safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
 897cdesc_rollback:
 898	for (i = 0; i < n_cdesc; i++)
 899		safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
 900unmap:
 901	if (src == dst) {
 902		if (sreq->nr_src > 0)
 903			dma_unmap_sg(priv->dev, src, sreq->nr_src,
 904				     DMA_BIDIRECTIONAL);
 905	} else {
 906		if (sreq->nr_src > 0)
 907			dma_unmap_sg(priv->dev, src, sreq->nr_src,
 908				     DMA_TO_DEVICE);
 909		if (sreq->nr_dst > 0)
 910			dma_unmap_sg(priv->dev, dst, sreq->nr_dst,
 911				     DMA_FROM_DEVICE);
 912	}
 913
 914	return ret;
 915}
 916
 917static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
 918				      int ring,
 919				      struct crypto_async_request *base,
 920				      struct safexcel_cipher_req *sreq,
 921				      bool *should_complete, int *ret)
 922{
 923	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
 924	struct safexcel_result_desc *rdesc;
 925	int ndesc = 0, enq_ret;
 926
 927	*ret = 0;
 928
 929	if (unlikely(!sreq->rdescs))
 930		return 0;
 931
 932	while (sreq->rdescs--) {
 933		rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
 934		if (IS_ERR(rdesc)) {
 935			dev_err(priv->dev,
 936				"cipher: invalidate: could not retrieve the result descriptor\n");
 937			*ret = PTR_ERR(rdesc);
 938			break;
 939		}
 940
 941		if (likely(!*ret))
 942			*ret = safexcel_rdesc_check_errors(priv, rdesc);
 943
 944		ndesc++;
 945	}
 946
 947	safexcel_complete(priv, ring);
 948
 949	if (ctx->base.exit_inv) {
 950		dma_pool_free(priv->context_pool, ctx->base.ctxr,
 951			      ctx->base.ctxr_dma);
 952
 953		*should_complete = true;
 954
 955		return ndesc;
 956	}
 957
 958	ring = safexcel_select_ring(priv);
 959	ctx->base.ring = ring;
 960
 961	spin_lock_bh(&priv->ring[ring].queue_lock);
 962	enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
 963	spin_unlock_bh(&priv->ring[ring].queue_lock);
 964
 965	if (enq_ret != -EINPROGRESS)
 966		*ret = enq_ret;
 967
 968	queue_work(priv->ring[ring].workqueue,
 969		   &priv->ring[ring].work_data.work);
 970
 971	*should_complete = false;
 972
 973	return ndesc;
 974}
 975
 976static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
 977					   int ring,
 978					   struct crypto_async_request *async,
 979					   bool *should_complete, int *ret)
 980{
 981	struct skcipher_request *req = skcipher_request_cast(async);
 982	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
 983	int err;
 984
 985	if (sreq->needs_inv) {
 986		sreq->needs_inv = false;
 987		err = safexcel_handle_inv_result(priv, ring, async, sreq,
 988						 should_complete, ret);
 989	} else {
 990		err = safexcel_handle_req_result(priv, ring, async, req->src,
 991						 req->dst, req->cryptlen, sreq,
 992						 should_complete, ret);
 993	}
 994
 995	return err;
 996}
 997
 998static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
 999				       int ring,
1000				       struct crypto_async_request *async,
1001				       bool *should_complete, int *ret)
1002{
1003	struct aead_request *req = aead_request_cast(async);
1004	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1005	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1006	int err;
1007
1008	if (sreq->needs_inv) {
1009		sreq->needs_inv = false;
1010		err = safexcel_handle_inv_result(priv, ring, async, sreq,
1011						 should_complete, ret);
1012	} else {
1013		err = safexcel_handle_req_result(priv, ring, async, req->src,
1014						 req->dst,
1015						 req->cryptlen + crypto_aead_authsize(tfm),
1016						 sreq, should_complete, ret);
1017	}
1018
1019	return err;
1020}
1021
1022static int safexcel_cipher_send_inv(struct crypto_async_request *base,
1023				    int ring, int *commands, int *results)
1024{
1025	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1026	struct safexcel_crypto_priv *priv = ctx->base.priv;
1027	int ret;
1028
1029	ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
1030	if (unlikely(ret))
1031		return ret;
1032
1033	*commands = 1;
1034	*results = 1;
1035
1036	return 0;
1037}
1038
1039static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
1040				  int *commands, int *results)
1041{
1042	struct skcipher_request *req = skcipher_request_cast(async);
1043	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1044	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1045	struct safexcel_crypto_priv *priv = ctx->base.priv;
1046	int ret;
1047
1048	BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1049
1050	if (sreq->needs_inv) {
1051		ret = safexcel_cipher_send_inv(async, ring, commands, results);
1052	} else {
1053		struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1054		u8 input_iv[AES_BLOCK_SIZE];
1055
1056		/*
1057		 * Save input IV in case of CBC decrypt mode
1058		 * Will be overwritten with output IV prior to use!
1059		 */
1060		memcpy(input_iv, req->iv, crypto_skcipher_ivsize(skcipher));
1061
1062		ret = safexcel_send_req(async, ring, sreq, req->src,
1063					req->dst, req->cryptlen, 0, 0, input_iv,
1064					commands, results);
1065	}
1066
1067	sreq->rdescs = *results;
1068	return ret;
1069}
1070
1071static int safexcel_aead_send(struct crypto_async_request *async, int ring,
1072			      int *commands, int *results)
1073{
1074	struct aead_request *req = aead_request_cast(async);
1075	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1076	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1077	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1078	struct safexcel_crypto_priv *priv = ctx->base.priv;
1079	int ret;
1080
1081	BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1082
1083	if (sreq->needs_inv)
1084		ret = safexcel_cipher_send_inv(async, ring, commands, results);
1085	else
1086		ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
1087					req->cryptlen, req->assoclen,
1088					crypto_aead_authsize(tfm), req->iv,
1089					commands, results);
1090	sreq->rdescs = *results;
1091	return ret;
1092}
1093
1094static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
1095				    struct crypto_async_request *base,
1096				    struct safexcel_cipher_req *sreq,
1097				    struct crypto_wait *result)
1098{
1099	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1100	struct safexcel_crypto_priv *priv = ctx->base.priv;
1101	int ring = ctx->base.ring;
1102	int err;
 
1103
1104	ctx = crypto_tfm_ctx(base->tfm);
1105	ctx->base.exit_inv = true;
1106	sreq->needs_inv = true;
1107
1108	spin_lock_bh(&priv->ring[ring].queue_lock);
1109	crypto_enqueue_request(&priv->ring[ring].queue, base);
1110	spin_unlock_bh(&priv->ring[ring].queue_lock);
1111
1112	queue_work(priv->ring[ring].workqueue,
1113		   &priv->ring[ring].work_data.work);
1114
1115	err = crypto_wait_req(-EINPROGRESS, result);
1116
1117	if (err) {
1118		dev_warn(priv->dev,
1119			"cipher: sync: invalidate: completion error %d\n",
1120			 err);
1121		return err;
1122	}
1123
1124	return 0;
1125}
1126
1127static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
1128{
1129	EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
1130	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1131	DECLARE_CRYPTO_WAIT(result);
1132
1133	memset(req, 0, sizeof(struct skcipher_request));
1134
1135	skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1136				      crypto_req_done, &result);
1137	skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
1138
1139	return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1140}
1141
1142static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
1143{
1144	EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
1145	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1146	DECLARE_CRYPTO_WAIT(result);
1147
1148	memset(req, 0, sizeof(struct aead_request));
1149
1150	aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1151				  crypto_req_done, &result);
1152	aead_request_set_tfm(req, __crypto_aead_cast(tfm));
1153
1154	return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1155}
1156
1157static int safexcel_queue_req(struct crypto_async_request *base,
1158			struct safexcel_cipher_req *sreq,
1159			enum safexcel_cipher_direction dir)
1160{
1161	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1162	struct safexcel_crypto_priv *priv = ctx->base.priv;
1163	int ret, ring;
1164
1165	sreq->needs_inv = false;
1166	sreq->direction = dir;
1167
1168	if (ctx->base.ctxr) {
1169		if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
1170			sreq->needs_inv = true;
1171			ctx->base.needs_inv = false;
1172		}
1173	} else {
1174		ctx->base.ring = safexcel_select_ring(priv);
1175		ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
1176						 EIP197_GFP_FLAGS(*base),
1177						 &ctx->base.ctxr_dma);
1178		if (!ctx->base.ctxr)
1179			return -ENOMEM;
1180	}
1181
1182	ring = ctx->base.ring;
1183
1184	spin_lock_bh(&priv->ring[ring].queue_lock);
1185	ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
1186	spin_unlock_bh(&priv->ring[ring].queue_lock);
1187
1188	queue_work(priv->ring[ring].workqueue,
1189		   &priv->ring[ring].work_data.work);
1190
1191	return ret;
1192}
1193
1194static int safexcel_encrypt(struct skcipher_request *req)
1195{
1196	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1197			SAFEXCEL_ENCRYPT);
1198}
1199
1200static int safexcel_decrypt(struct skcipher_request *req)
1201{
1202	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1203			SAFEXCEL_DECRYPT);
1204}
1205
1206static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
1207{
1208	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1209	struct safexcel_alg_template *tmpl =
1210		container_of(tfm->__crt_alg, struct safexcel_alg_template,
1211			     alg.skcipher.base);
1212
1213	crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
1214				    sizeof(struct safexcel_cipher_req));
1215
1216	ctx->base.priv = tmpl->priv;
1217
1218	ctx->base.send = safexcel_skcipher_send;
1219	ctx->base.handle_result = safexcel_skcipher_handle_result;
1220	ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1221	ctx->ctrinit = 1;
1222	return 0;
1223}
1224
1225static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
1226{
1227	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1228
1229	memzero_explicit(ctx->key, sizeof(ctx->key));
1230
1231	/* context not allocated, skip invalidation */
1232	if (!ctx->base.ctxr)
1233		return -ENOMEM;
1234
1235	memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
1236	return 0;
1237}
1238
1239static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
1240{
1241	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1242	struct safexcel_crypto_priv *priv = ctx->base.priv;
1243	int ret;
1244
1245	if (safexcel_cipher_cra_exit(tfm))
1246		return;
1247
1248	if (priv->flags & EIP197_TRC_CACHE) {
1249		ret = safexcel_skcipher_exit_inv(tfm);
1250		if (ret)
1251			dev_warn(priv->dev, "skcipher: invalidation error %d\n",
1252				 ret);
1253	} else {
1254		dma_pool_free(priv->context_pool, ctx->base.ctxr,
1255			      ctx->base.ctxr_dma);
1256	}
1257}
1258
1259static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
1260{
1261	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1262	struct safexcel_crypto_priv *priv = ctx->base.priv;
1263	int ret;
1264
1265	if (safexcel_cipher_cra_exit(tfm))
1266		return;
1267
1268	if (priv->flags & EIP197_TRC_CACHE) {
1269		ret = safexcel_aead_exit_inv(tfm);
1270		if (ret)
1271			dev_warn(priv->dev, "aead: invalidation error %d\n",
1272				 ret);
1273	} else {
1274		dma_pool_free(priv->context_pool, ctx->base.ctxr,
1275			      ctx->base.ctxr_dma);
1276	}
1277}
1278
1279static int safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm *tfm)
1280{
1281	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1282
1283	safexcel_skcipher_cra_init(tfm);
1284	ctx->alg  = SAFEXCEL_AES;
1285	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1286	ctx->blocksz = 0;
1287	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1288	return 0;
1289}
1290
1291struct safexcel_alg_template safexcel_alg_ecb_aes = {
1292	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1293	.algo_mask = SAFEXCEL_ALG_AES,
1294	.alg.skcipher = {
1295		.setkey = safexcel_skcipher_aes_setkey,
1296		.encrypt = safexcel_encrypt,
1297		.decrypt = safexcel_decrypt,
1298		.min_keysize = AES_MIN_KEY_SIZE,
1299		.max_keysize = AES_MAX_KEY_SIZE,
1300		.base = {
1301			.cra_name = "ecb(aes)",
1302			.cra_driver_name = "safexcel-ecb-aes",
1303			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1304			.cra_flags = CRYPTO_ALG_ASYNC |
1305				     CRYPTO_ALG_ALLOCATES_MEMORY |
1306				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1307			.cra_blocksize = AES_BLOCK_SIZE,
1308			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1309			.cra_alignmask = 0,
1310			.cra_init = safexcel_skcipher_aes_ecb_cra_init,
1311			.cra_exit = safexcel_skcipher_cra_exit,
1312			.cra_module = THIS_MODULE,
1313		},
1314	},
1315};
1316
1317static int safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm *tfm)
1318{
1319	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1320
1321	safexcel_skcipher_cra_init(tfm);
1322	ctx->alg  = SAFEXCEL_AES;
1323	ctx->blocksz = AES_BLOCK_SIZE;
1324	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1325	return 0;
1326}
1327
1328struct safexcel_alg_template safexcel_alg_cbc_aes = {
1329	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1330	.algo_mask = SAFEXCEL_ALG_AES,
1331	.alg.skcipher = {
1332		.setkey = safexcel_skcipher_aes_setkey,
1333		.encrypt = safexcel_encrypt,
1334		.decrypt = safexcel_decrypt,
1335		.min_keysize = AES_MIN_KEY_SIZE,
1336		.max_keysize = AES_MAX_KEY_SIZE,
1337		.ivsize = AES_BLOCK_SIZE,
1338		.base = {
1339			.cra_name = "cbc(aes)",
1340			.cra_driver_name = "safexcel-cbc-aes",
1341			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1342			.cra_flags = CRYPTO_ALG_ASYNC |
1343				     CRYPTO_ALG_ALLOCATES_MEMORY |
1344				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1345			.cra_blocksize = AES_BLOCK_SIZE,
1346			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1347			.cra_alignmask = 0,
1348			.cra_init = safexcel_skcipher_aes_cbc_cra_init,
1349			.cra_exit = safexcel_skcipher_cra_exit,
1350			.cra_module = THIS_MODULE,
1351		},
1352	},
1353};
1354
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1355static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
1356					   const u8 *key, unsigned int len)
1357{
1358	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
1359	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1360	struct safexcel_crypto_priv *priv = ctx->base.priv;
1361	struct crypto_aes_ctx aes;
1362	int ret, i;
1363	unsigned int keylen;
1364
1365	/* last 4 bytes of key are the nonce! */
1366	ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
1367	/* exclude the nonce here */
1368	keylen = len - CTR_RFC3686_NONCE_SIZE;
1369	ret = aes_expandkey(&aes, key, keylen);
1370	if (ret)
 
1371		return ret;
 
1372
1373	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1374		for (i = 0; i < keylen / sizeof(u32); i++) {
1375			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
1376				ctx->base.needs_inv = true;
1377				break;
1378			}
1379		}
1380	}
1381
1382	for (i = 0; i < keylen / sizeof(u32); i++)
1383		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
1384
1385	ctx->key_len = keylen;
1386
1387	memzero_explicit(&aes, sizeof(aes));
1388	return 0;
1389}
1390
1391static int safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm *tfm)
1392{
1393	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1394
1395	safexcel_skcipher_cra_init(tfm);
1396	ctx->alg  = SAFEXCEL_AES;
1397	ctx->blocksz = AES_BLOCK_SIZE;
1398	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
1399	return 0;
1400}
1401
1402struct safexcel_alg_template safexcel_alg_ctr_aes = {
1403	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1404	.algo_mask = SAFEXCEL_ALG_AES,
1405	.alg.skcipher = {
1406		.setkey = safexcel_skcipher_aesctr_setkey,
1407		.encrypt = safexcel_encrypt,
1408		.decrypt = safexcel_decrypt,
1409		/* Add nonce size */
1410		.min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1411		.max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1412		.ivsize = CTR_RFC3686_IV_SIZE,
1413		.base = {
1414			.cra_name = "rfc3686(ctr(aes))",
1415			.cra_driver_name = "safexcel-ctr-aes",
1416			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1417			.cra_flags = CRYPTO_ALG_ASYNC |
1418				     CRYPTO_ALG_ALLOCATES_MEMORY |
1419				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1420			.cra_blocksize = 1,
1421			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1422			.cra_alignmask = 0,
1423			.cra_init = safexcel_skcipher_aes_ctr_cra_init,
1424			.cra_exit = safexcel_skcipher_cra_exit,
1425			.cra_module = THIS_MODULE,
1426		},
1427	},
1428};
1429
1430static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
1431			       unsigned int len)
1432{
1433	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1434	struct safexcel_crypto_priv *priv = ctx->base.priv;
1435	int ret;
1436
1437	ret = verify_skcipher_des_key(ctfm, key);
1438	if (ret)
1439		return ret;
1440
1441	/* if context exits and key changed, need to invalidate it */
1442	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1443		if (memcmp(ctx->key, key, len))
1444			ctx->base.needs_inv = true;
1445
1446	memcpy(ctx->key, key, len);
1447	ctx->key_len = len;
1448
1449	return 0;
1450}
1451
1452static int safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm *tfm)
1453{
1454	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1455
1456	safexcel_skcipher_cra_init(tfm);
1457	ctx->alg  = SAFEXCEL_DES;
1458	ctx->blocksz = DES_BLOCK_SIZE;
1459	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1460	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1461	return 0;
1462}
1463
1464struct safexcel_alg_template safexcel_alg_cbc_des = {
1465	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1466	.algo_mask = SAFEXCEL_ALG_DES,
1467	.alg.skcipher = {
1468		.setkey = safexcel_des_setkey,
1469		.encrypt = safexcel_encrypt,
1470		.decrypt = safexcel_decrypt,
1471		.min_keysize = DES_KEY_SIZE,
1472		.max_keysize = DES_KEY_SIZE,
1473		.ivsize = DES_BLOCK_SIZE,
1474		.base = {
1475			.cra_name = "cbc(des)",
1476			.cra_driver_name = "safexcel-cbc-des",
1477			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1478			.cra_flags = CRYPTO_ALG_ASYNC |
1479				     CRYPTO_ALG_ALLOCATES_MEMORY |
1480				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1481			.cra_blocksize = DES_BLOCK_SIZE,
1482			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1483			.cra_alignmask = 0,
1484			.cra_init = safexcel_skcipher_des_cbc_cra_init,
1485			.cra_exit = safexcel_skcipher_cra_exit,
1486			.cra_module = THIS_MODULE,
1487		},
1488	},
1489};
1490
1491static int safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm *tfm)
1492{
1493	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1494
1495	safexcel_skcipher_cra_init(tfm);
1496	ctx->alg  = SAFEXCEL_DES;
1497	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1498	ctx->blocksz = 0;
1499	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1500	return 0;
1501}
1502
1503struct safexcel_alg_template safexcel_alg_ecb_des = {
1504	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1505	.algo_mask = SAFEXCEL_ALG_DES,
1506	.alg.skcipher = {
1507		.setkey = safexcel_des_setkey,
1508		.encrypt = safexcel_encrypt,
1509		.decrypt = safexcel_decrypt,
1510		.min_keysize = DES_KEY_SIZE,
1511		.max_keysize = DES_KEY_SIZE,
1512		.base = {
1513			.cra_name = "ecb(des)",
1514			.cra_driver_name = "safexcel-ecb-des",
1515			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1516			.cra_flags = CRYPTO_ALG_ASYNC |
1517				     CRYPTO_ALG_ALLOCATES_MEMORY |
1518				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1519			.cra_blocksize = DES_BLOCK_SIZE,
1520			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1521			.cra_alignmask = 0,
1522			.cra_init = safexcel_skcipher_des_ecb_cra_init,
1523			.cra_exit = safexcel_skcipher_cra_exit,
1524			.cra_module = THIS_MODULE,
1525		},
1526	},
1527};
1528
1529static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
1530				   const u8 *key, unsigned int len)
1531{
1532	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1533	struct safexcel_crypto_priv *priv = ctx->base.priv;
1534	int err;
1535
1536	err = verify_skcipher_des3_key(ctfm, key);
1537	if (err)
1538		return err;
1539
1540	/* if context exits and key changed, need to invalidate it */
1541	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1542		if (memcmp(ctx->key, key, len))
1543			ctx->base.needs_inv = true;
 
1544
1545	memcpy(ctx->key, key, len);
 
1546	ctx->key_len = len;
1547
1548	return 0;
1549}
1550
1551static int safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm *tfm)
1552{
1553	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1554
1555	safexcel_skcipher_cra_init(tfm);
1556	ctx->alg  = SAFEXCEL_3DES;
1557	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1558	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1559	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1560	return 0;
1561}
1562
1563struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
1564	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1565	.algo_mask = SAFEXCEL_ALG_DES,
1566	.alg.skcipher = {
1567		.setkey = safexcel_des3_ede_setkey,
1568		.encrypt = safexcel_encrypt,
1569		.decrypt = safexcel_decrypt,
1570		.min_keysize = DES3_EDE_KEY_SIZE,
1571		.max_keysize = DES3_EDE_KEY_SIZE,
1572		.ivsize = DES3_EDE_BLOCK_SIZE,
1573		.base = {
1574			.cra_name = "cbc(des3_ede)",
1575			.cra_driver_name = "safexcel-cbc-des3_ede",
1576			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1577			.cra_flags = CRYPTO_ALG_ASYNC |
1578				     CRYPTO_ALG_ALLOCATES_MEMORY |
1579				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1580			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1581			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1582			.cra_alignmask = 0,
1583			.cra_init = safexcel_skcipher_des3_cbc_cra_init,
1584			.cra_exit = safexcel_skcipher_cra_exit,
1585			.cra_module = THIS_MODULE,
1586		},
1587	},
1588};
1589
1590static int safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm *tfm)
1591{
1592	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1593
1594	safexcel_skcipher_cra_init(tfm);
1595	ctx->alg  = SAFEXCEL_3DES;
1596	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1597	ctx->blocksz = 0;
1598	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1599	return 0;
1600}
1601
1602struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
1603	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1604	.algo_mask = SAFEXCEL_ALG_DES,
1605	.alg.skcipher = {
1606		.setkey = safexcel_des3_ede_setkey,
1607		.encrypt = safexcel_encrypt,
1608		.decrypt = safexcel_decrypt,
1609		.min_keysize = DES3_EDE_KEY_SIZE,
1610		.max_keysize = DES3_EDE_KEY_SIZE,
1611		.base = {
1612			.cra_name = "ecb(des3_ede)",
1613			.cra_driver_name = "safexcel-ecb-des3_ede",
1614			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1615			.cra_flags = CRYPTO_ALG_ASYNC |
1616				     CRYPTO_ALG_ALLOCATES_MEMORY |
1617				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1618			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1619			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1620			.cra_alignmask = 0,
1621			.cra_init = safexcel_skcipher_des3_ecb_cra_init,
1622			.cra_exit = safexcel_skcipher_cra_exit,
1623			.cra_module = THIS_MODULE,
1624		},
1625	},
1626};
1627
1628static int safexcel_aead_encrypt(struct aead_request *req)
1629{
1630	struct safexcel_cipher_req *creq = aead_request_ctx(req);
1631
1632	return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
1633}
1634
1635static int safexcel_aead_decrypt(struct aead_request *req)
1636{
1637	struct safexcel_cipher_req *creq = aead_request_ctx(req);
1638
1639	return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
1640}
1641
1642static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
1643{
1644	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1645	struct safexcel_alg_template *tmpl =
1646		container_of(tfm->__crt_alg, struct safexcel_alg_template,
1647			     alg.aead.base);
1648
1649	crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
1650				sizeof(struct safexcel_cipher_req));
1651
1652	ctx->base.priv = tmpl->priv;
1653
1654	ctx->alg  = SAFEXCEL_AES; /* default */
1655	ctx->blocksz = AES_BLOCK_SIZE;
1656	ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1657	ctx->ctrinit = 1;
1658	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC; /* default */
1659	ctx->aead = true;
1660	ctx->base.send = safexcel_aead_send;
1661	ctx->base.handle_result = safexcel_aead_handle_result;
1662	return 0;
1663}
1664
1665static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
1666{
1667	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1668
1669	safexcel_aead_cra_init(tfm);
1670	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1671	ctx->state_sz = SHA1_DIGEST_SIZE;
1672	return 0;
1673}
1674
1675struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
1676	.type = SAFEXCEL_ALG_TYPE_AEAD,
1677	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
1678	.alg.aead = {
1679		.setkey = safexcel_aead_setkey,
1680		.encrypt = safexcel_aead_encrypt,
1681		.decrypt = safexcel_aead_decrypt,
1682		.ivsize = AES_BLOCK_SIZE,
1683		.maxauthsize = SHA1_DIGEST_SIZE,
1684		.base = {
1685			.cra_name = "authenc(hmac(sha1),cbc(aes))",
1686			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
1687			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1688			.cra_flags = CRYPTO_ALG_ASYNC |
1689				     CRYPTO_ALG_ALLOCATES_MEMORY |
1690				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1691			.cra_blocksize = AES_BLOCK_SIZE,
1692			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1693			.cra_alignmask = 0,
1694			.cra_init = safexcel_aead_sha1_cra_init,
1695			.cra_exit = safexcel_aead_cra_exit,
1696			.cra_module = THIS_MODULE,
1697		},
1698	},
1699};
1700
1701static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
1702{
1703	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1704
1705	safexcel_aead_cra_init(tfm);
1706	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1707	ctx->state_sz = SHA256_DIGEST_SIZE;
1708	return 0;
1709}
1710
1711struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
1712	.type = SAFEXCEL_ALG_TYPE_AEAD,
1713	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1714	.alg.aead = {
1715		.setkey = safexcel_aead_setkey,
1716		.encrypt = safexcel_aead_encrypt,
1717		.decrypt = safexcel_aead_decrypt,
1718		.ivsize = AES_BLOCK_SIZE,
1719		.maxauthsize = SHA256_DIGEST_SIZE,
1720		.base = {
1721			.cra_name = "authenc(hmac(sha256),cbc(aes))",
1722			.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
1723			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1724			.cra_flags = CRYPTO_ALG_ASYNC |
1725				     CRYPTO_ALG_ALLOCATES_MEMORY |
1726				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1727			.cra_blocksize = AES_BLOCK_SIZE,
1728			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1729			.cra_alignmask = 0,
1730			.cra_init = safexcel_aead_sha256_cra_init,
1731			.cra_exit = safexcel_aead_cra_exit,
1732			.cra_module = THIS_MODULE,
1733		},
1734	},
1735};
1736
1737static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
1738{
1739	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1740
1741	safexcel_aead_cra_init(tfm);
1742	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1743	ctx->state_sz = SHA256_DIGEST_SIZE;
1744	return 0;
1745}
1746
1747struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
1748	.type = SAFEXCEL_ALG_TYPE_AEAD,
1749	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1750	.alg.aead = {
1751		.setkey = safexcel_aead_setkey,
1752		.encrypt = safexcel_aead_encrypt,
1753		.decrypt = safexcel_aead_decrypt,
1754		.ivsize = AES_BLOCK_SIZE,
1755		.maxauthsize = SHA224_DIGEST_SIZE,
1756		.base = {
1757			.cra_name = "authenc(hmac(sha224),cbc(aes))",
1758			.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
1759			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1760			.cra_flags = CRYPTO_ALG_ASYNC |
1761				     CRYPTO_ALG_ALLOCATES_MEMORY |
1762				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1763			.cra_blocksize = AES_BLOCK_SIZE,
1764			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1765			.cra_alignmask = 0,
1766			.cra_init = safexcel_aead_sha224_cra_init,
1767			.cra_exit = safexcel_aead_cra_exit,
1768			.cra_module = THIS_MODULE,
1769		},
1770	},
1771};
1772
1773static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
1774{
1775	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1776
1777	safexcel_aead_cra_init(tfm);
1778	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1779	ctx->state_sz = SHA512_DIGEST_SIZE;
1780	return 0;
1781}
1782
1783struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
1784	.type = SAFEXCEL_ALG_TYPE_AEAD,
1785	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1786	.alg.aead = {
1787		.setkey = safexcel_aead_setkey,
1788		.encrypt = safexcel_aead_encrypt,
1789		.decrypt = safexcel_aead_decrypt,
1790		.ivsize = AES_BLOCK_SIZE,
1791		.maxauthsize = SHA512_DIGEST_SIZE,
1792		.base = {
1793			.cra_name = "authenc(hmac(sha512),cbc(aes))",
1794			.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
1795			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1796			.cra_flags = CRYPTO_ALG_ASYNC |
1797				     CRYPTO_ALG_ALLOCATES_MEMORY |
1798				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1799			.cra_blocksize = AES_BLOCK_SIZE,
1800			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1801			.cra_alignmask = 0,
1802			.cra_init = safexcel_aead_sha512_cra_init,
1803			.cra_exit = safexcel_aead_cra_exit,
1804			.cra_module = THIS_MODULE,
1805		},
1806	},
1807};
1808
1809static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
1810{
1811	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1812
1813	safexcel_aead_cra_init(tfm);
1814	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1815	ctx->state_sz = SHA512_DIGEST_SIZE;
1816	return 0;
1817}
1818
1819struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
1820	.type = SAFEXCEL_ALG_TYPE_AEAD,
1821	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1822	.alg.aead = {
1823		.setkey = safexcel_aead_setkey,
1824		.encrypt = safexcel_aead_encrypt,
1825		.decrypt = safexcel_aead_decrypt,
1826		.ivsize = AES_BLOCK_SIZE,
1827		.maxauthsize = SHA384_DIGEST_SIZE,
1828		.base = {
1829			.cra_name = "authenc(hmac(sha384),cbc(aes))",
1830			.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
1831			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1832			.cra_flags = CRYPTO_ALG_ASYNC |
1833				     CRYPTO_ALG_ALLOCATES_MEMORY |
1834				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1835			.cra_blocksize = AES_BLOCK_SIZE,
1836			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1837			.cra_alignmask = 0,
1838			.cra_init = safexcel_aead_sha384_cra_init,
1839			.cra_exit = safexcel_aead_cra_exit,
1840			.cra_module = THIS_MODULE,
1841		},
1842	},
1843};
1844
1845static int safexcel_aead_sha1_des3_cra_init(struct crypto_tfm *tfm)
1846{
1847	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1848
1849	safexcel_aead_sha1_cra_init(tfm);
1850	ctx->alg = SAFEXCEL_3DES; /* override default */
1851	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1852	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1853	return 0;
1854}
1855
1856struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede = {
1857	.type = SAFEXCEL_ALG_TYPE_AEAD,
1858	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
1859	.alg.aead = {
1860		.setkey = safexcel_aead_setkey,
1861		.encrypt = safexcel_aead_encrypt,
1862		.decrypt = safexcel_aead_decrypt,
1863		.ivsize = DES3_EDE_BLOCK_SIZE,
1864		.maxauthsize = SHA1_DIGEST_SIZE,
1865		.base = {
1866			.cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
1867			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des3_ede",
1868			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1869			.cra_flags = CRYPTO_ALG_ASYNC |
1870				     CRYPTO_ALG_ALLOCATES_MEMORY |
1871				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1872			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1873			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1874			.cra_alignmask = 0,
1875			.cra_init = safexcel_aead_sha1_des3_cra_init,
1876			.cra_exit = safexcel_aead_cra_exit,
1877			.cra_module = THIS_MODULE,
1878		},
1879	},
1880};
1881
1882static int safexcel_aead_sha256_des3_cra_init(struct crypto_tfm *tfm)
1883{
1884	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1885
1886	safexcel_aead_sha256_cra_init(tfm);
1887	ctx->alg = SAFEXCEL_3DES; /* override default */
1888	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1889	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1890	return 0;
1891}
1892
1893struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des3_ede = {
1894	.type = SAFEXCEL_ALG_TYPE_AEAD,
1895	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1896	.alg.aead = {
1897		.setkey = safexcel_aead_setkey,
1898		.encrypt = safexcel_aead_encrypt,
1899		.decrypt = safexcel_aead_decrypt,
1900		.ivsize = DES3_EDE_BLOCK_SIZE,
1901		.maxauthsize = SHA256_DIGEST_SIZE,
1902		.base = {
1903			.cra_name = "authenc(hmac(sha256),cbc(des3_ede))",
1904			.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des3_ede",
1905			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1906			.cra_flags = CRYPTO_ALG_ASYNC |
1907				     CRYPTO_ALG_ALLOCATES_MEMORY |
1908				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1909			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1910			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1911			.cra_alignmask = 0,
1912			.cra_init = safexcel_aead_sha256_des3_cra_init,
1913			.cra_exit = safexcel_aead_cra_exit,
1914			.cra_module = THIS_MODULE,
1915		},
1916	},
1917};
1918
1919static int safexcel_aead_sha224_des3_cra_init(struct crypto_tfm *tfm)
1920{
1921	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1922
1923	safexcel_aead_sha224_cra_init(tfm);
1924	ctx->alg = SAFEXCEL_3DES; /* override default */
1925	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1926	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1927	return 0;
1928}
1929
1930struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des3_ede = {
1931	.type = SAFEXCEL_ALG_TYPE_AEAD,
1932	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1933	.alg.aead = {
1934		.setkey = safexcel_aead_setkey,
1935		.encrypt = safexcel_aead_encrypt,
1936		.decrypt = safexcel_aead_decrypt,
1937		.ivsize = DES3_EDE_BLOCK_SIZE,
1938		.maxauthsize = SHA224_DIGEST_SIZE,
1939		.base = {
1940			.cra_name = "authenc(hmac(sha224),cbc(des3_ede))",
1941			.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des3_ede",
1942			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1943			.cra_flags = CRYPTO_ALG_ASYNC |
1944				     CRYPTO_ALG_ALLOCATES_MEMORY |
1945				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1946			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1947			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1948			.cra_alignmask = 0,
1949			.cra_init = safexcel_aead_sha224_des3_cra_init,
1950			.cra_exit = safexcel_aead_cra_exit,
1951			.cra_module = THIS_MODULE,
1952		},
1953	},
1954};
1955
1956static int safexcel_aead_sha512_des3_cra_init(struct crypto_tfm *tfm)
1957{
1958	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1959
1960	safexcel_aead_sha512_cra_init(tfm);
1961	ctx->alg = SAFEXCEL_3DES; /* override default */
1962	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1963	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1964	return 0;
1965}
1966
1967struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des3_ede = {
1968	.type = SAFEXCEL_ALG_TYPE_AEAD,
1969	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
1970	.alg.aead = {
1971		.setkey = safexcel_aead_setkey,
1972		.encrypt = safexcel_aead_encrypt,
1973		.decrypt = safexcel_aead_decrypt,
1974		.ivsize = DES3_EDE_BLOCK_SIZE,
1975		.maxauthsize = SHA512_DIGEST_SIZE,
1976		.base = {
1977			.cra_name = "authenc(hmac(sha512),cbc(des3_ede))",
1978			.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des3_ede",
1979			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1980			.cra_flags = CRYPTO_ALG_ASYNC |
1981				     CRYPTO_ALG_ALLOCATES_MEMORY |
1982				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1983			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1984			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1985			.cra_alignmask = 0,
1986			.cra_init = safexcel_aead_sha512_des3_cra_init,
1987			.cra_exit = safexcel_aead_cra_exit,
1988			.cra_module = THIS_MODULE,
1989		},
1990	},
1991};
1992
1993static int safexcel_aead_sha384_des3_cra_init(struct crypto_tfm *tfm)
1994{
1995	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1996
1997	safexcel_aead_sha384_cra_init(tfm);
1998	ctx->alg = SAFEXCEL_3DES; /* override default */
1999	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2000	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2001	return 0;
2002}
2003
2004struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des3_ede = {
2005	.type = SAFEXCEL_ALG_TYPE_AEAD,
2006	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2007	.alg.aead = {
2008		.setkey = safexcel_aead_setkey,
2009		.encrypt = safexcel_aead_encrypt,
2010		.decrypt = safexcel_aead_decrypt,
2011		.ivsize = DES3_EDE_BLOCK_SIZE,
2012		.maxauthsize = SHA384_DIGEST_SIZE,
2013		.base = {
2014			.cra_name = "authenc(hmac(sha384),cbc(des3_ede))",
2015			.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des3_ede",
2016			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2017			.cra_flags = CRYPTO_ALG_ASYNC |
2018				     CRYPTO_ALG_ALLOCATES_MEMORY |
2019				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2020			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2021			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2022			.cra_alignmask = 0,
2023			.cra_init = safexcel_aead_sha384_des3_cra_init,
2024			.cra_exit = safexcel_aead_cra_exit,
2025			.cra_module = THIS_MODULE,
2026		},
2027	},
2028};
2029
2030static int safexcel_aead_sha1_des_cra_init(struct crypto_tfm *tfm)
2031{
2032	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2033
2034	safexcel_aead_sha1_cra_init(tfm);
2035	ctx->alg = SAFEXCEL_DES; /* override default */
2036	ctx->blocksz = DES_BLOCK_SIZE;
2037	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2038	return 0;
2039}
2040
2041struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des = {
2042	.type = SAFEXCEL_ALG_TYPE_AEAD,
2043	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
2044	.alg.aead = {
2045		.setkey = safexcel_aead_setkey,
2046		.encrypt = safexcel_aead_encrypt,
2047		.decrypt = safexcel_aead_decrypt,
2048		.ivsize = DES_BLOCK_SIZE,
2049		.maxauthsize = SHA1_DIGEST_SIZE,
2050		.base = {
2051			.cra_name = "authenc(hmac(sha1),cbc(des))",
2052			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des",
2053			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2054			.cra_flags = CRYPTO_ALG_ASYNC |
2055				     CRYPTO_ALG_ALLOCATES_MEMORY |
2056				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2057			.cra_blocksize = DES_BLOCK_SIZE,
2058			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2059			.cra_alignmask = 0,
2060			.cra_init = safexcel_aead_sha1_des_cra_init,
2061			.cra_exit = safexcel_aead_cra_exit,
2062			.cra_module = THIS_MODULE,
2063		},
2064	},
2065};
2066
2067static int safexcel_aead_sha256_des_cra_init(struct crypto_tfm *tfm)
2068{
2069	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2070
2071	safexcel_aead_sha256_cra_init(tfm);
2072	ctx->alg = SAFEXCEL_DES; /* override default */
2073	ctx->blocksz = DES_BLOCK_SIZE;
2074	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2075	return 0;
2076}
2077
2078struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des = {
2079	.type = SAFEXCEL_ALG_TYPE_AEAD,
2080	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2081	.alg.aead = {
2082		.setkey = safexcel_aead_setkey,
2083		.encrypt = safexcel_aead_encrypt,
2084		.decrypt = safexcel_aead_decrypt,
2085		.ivsize = DES_BLOCK_SIZE,
2086		.maxauthsize = SHA256_DIGEST_SIZE,
2087		.base = {
2088			.cra_name = "authenc(hmac(sha256),cbc(des))",
2089			.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des",
2090			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2091			.cra_flags = CRYPTO_ALG_ASYNC |
2092				     CRYPTO_ALG_ALLOCATES_MEMORY |
2093				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2094			.cra_blocksize = DES_BLOCK_SIZE,
2095			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2096			.cra_alignmask = 0,
2097			.cra_init = safexcel_aead_sha256_des_cra_init,
2098			.cra_exit = safexcel_aead_cra_exit,
2099			.cra_module = THIS_MODULE,
2100		},
2101	},
2102};
2103
2104static int safexcel_aead_sha224_des_cra_init(struct crypto_tfm *tfm)
2105{
2106	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2107
2108	safexcel_aead_sha224_cra_init(tfm);
2109	ctx->alg = SAFEXCEL_DES; /* override default */
2110	ctx->blocksz = DES_BLOCK_SIZE;
2111	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2112	return 0;
2113}
2114
2115struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des = {
2116	.type = SAFEXCEL_ALG_TYPE_AEAD,
2117	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2118	.alg.aead = {
2119		.setkey = safexcel_aead_setkey,
2120		.encrypt = safexcel_aead_encrypt,
2121		.decrypt = safexcel_aead_decrypt,
2122		.ivsize = DES_BLOCK_SIZE,
2123		.maxauthsize = SHA224_DIGEST_SIZE,
2124		.base = {
2125			.cra_name = "authenc(hmac(sha224),cbc(des))",
2126			.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des",
2127			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2128			.cra_flags = CRYPTO_ALG_ASYNC |
2129				     CRYPTO_ALG_ALLOCATES_MEMORY |
2130				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2131			.cra_blocksize = DES_BLOCK_SIZE,
2132			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2133			.cra_alignmask = 0,
2134			.cra_init = safexcel_aead_sha224_des_cra_init,
2135			.cra_exit = safexcel_aead_cra_exit,
2136			.cra_module = THIS_MODULE,
2137		},
2138	},
2139};
2140
2141static int safexcel_aead_sha512_des_cra_init(struct crypto_tfm *tfm)
2142{
2143	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2144
2145	safexcel_aead_sha512_cra_init(tfm);
2146	ctx->alg = SAFEXCEL_DES; /* override default */
2147	ctx->blocksz = DES_BLOCK_SIZE;
2148	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2149	return 0;
2150}
2151
2152struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des = {
2153	.type = SAFEXCEL_ALG_TYPE_AEAD,
2154	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2155	.alg.aead = {
2156		.setkey = safexcel_aead_setkey,
2157		.encrypt = safexcel_aead_encrypt,
2158		.decrypt = safexcel_aead_decrypt,
2159		.ivsize = DES_BLOCK_SIZE,
2160		.maxauthsize = SHA512_DIGEST_SIZE,
2161		.base = {
2162			.cra_name = "authenc(hmac(sha512),cbc(des))",
2163			.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des",
2164			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2165			.cra_flags = CRYPTO_ALG_ASYNC |
2166				     CRYPTO_ALG_ALLOCATES_MEMORY |
2167				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2168			.cra_blocksize = DES_BLOCK_SIZE,
2169			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2170			.cra_alignmask = 0,
2171			.cra_init = safexcel_aead_sha512_des_cra_init,
2172			.cra_exit = safexcel_aead_cra_exit,
2173			.cra_module = THIS_MODULE,
2174		},
2175	},
2176};
2177
2178static int safexcel_aead_sha384_des_cra_init(struct crypto_tfm *tfm)
2179{
2180	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2181
2182	safexcel_aead_sha384_cra_init(tfm);
2183	ctx->alg = SAFEXCEL_DES; /* override default */
2184	ctx->blocksz = DES_BLOCK_SIZE;
2185	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2186	return 0;
2187}
2188
2189struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des = {
2190	.type = SAFEXCEL_ALG_TYPE_AEAD,
2191	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2192	.alg.aead = {
2193		.setkey = safexcel_aead_setkey,
2194		.encrypt = safexcel_aead_encrypt,
2195		.decrypt = safexcel_aead_decrypt,
2196		.ivsize = DES_BLOCK_SIZE,
2197		.maxauthsize = SHA384_DIGEST_SIZE,
2198		.base = {
2199			.cra_name = "authenc(hmac(sha384),cbc(des))",
2200			.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des",
2201			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2202			.cra_flags = CRYPTO_ALG_ASYNC |
2203				     CRYPTO_ALG_ALLOCATES_MEMORY |
2204				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2205			.cra_blocksize = DES_BLOCK_SIZE,
2206			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2207			.cra_alignmask = 0,
2208			.cra_init = safexcel_aead_sha384_des_cra_init,
2209			.cra_exit = safexcel_aead_cra_exit,
2210			.cra_module = THIS_MODULE,
2211		},
2212	},
2213};
2214
2215static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm *tfm)
2216{
2217	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2218
2219	safexcel_aead_sha1_cra_init(tfm);
2220	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2221	return 0;
2222}
2223
2224struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes = {
2225	.type = SAFEXCEL_ALG_TYPE_AEAD,
2226	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
2227	.alg.aead = {
2228		.setkey = safexcel_aead_setkey,
2229		.encrypt = safexcel_aead_encrypt,
2230		.decrypt = safexcel_aead_decrypt,
2231		.ivsize = CTR_RFC3686_IV_SIZE,
2232		.maxauthsize = SHA1_DIGEST_SIZE,
2233		.base = {
2234			.cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2235			.cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-aes",
2236			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2237			.cra_flags = CRYPTO_ALG_ASYNC |
2238				     CRYPTO_ALG_ALLOCATES_MEMORY |
2239				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2240			.cra_blocksize = 1,
2241			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2242			.cra_alignmask = 0,
2243			.cra_init = safexcel_aead_sha1_ctr_cra_init,
2244			.cra_exit = safexcel_aead_cra_exit,
2245			.cra_module = THIS_MODULE,
2246		},
2247	},
2248};
2249
2250static int safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm *tfm)
2251{
2252	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2253
2254	safexcel_aead_sha256_cra_init(tfm);
2255	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2256	return 0;
2257}
2258
2259struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes = {
2260	.type = SAFEXCEL_ALG_TYPE_AEAD,
2261	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2262	.alg.aead = {
2263		.setkey = safexcel_aead_setkey,
2264		.encrypt = safexcel_aead_encrypt,
2265		.decrypt = safexcel_aead_decrypt,
2266		.ivsize = CTR_RFC3686_IV_SIZE,
2267		.maxauthsize = SHA256_DIGEST_SIZE,
2268		.base = {
2269			.cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2270			.cra_driver_name = "safexcel-authenc-hmac-sha256-ctr-aes",
2271			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2272			.cra_flags = CRYPTO_ALG_ASYNC |
2273				     CRYPTO_ALG_ALLOCATES_MEMORY |
2274				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2275			.cra_blocksize = 1,
2276			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2277			.cra_alignmask = 0,
2278			.cra_init = safexcel_aead_sha256_ctr_cra_init,
2279			.cra_exit = safexcel_aead_cra_exit,
2280			.cra_module = THIS_MODULE,
2281		},
2282	},
2283};
2284
2285static int safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm *tfm)
2286{
2287	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2288
2289	safexcel_aead_sha224_cra_init(tfm);
2290	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2291	return 0;
2292}
2293
2294struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes = {
2295	.type = SAFEXCEL_ALG_TYPE_AEAD,
2296	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2297	.alg.aead = {
2298		.setkey = safexcel_aead_setkey,
2299		.encrypt = safexcel_aead_encrypt,
2300		.decrypt = safexcel_aead_decrypt,
2301		.ivsize = CTR_RFC3686_IV_SIZE,
2302		.maxauthsize = SHA224_DIGEST_SIZE,
2303		.base = {
2304			.cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
2305			.cra_driver_name = "safexcel-authenc-hmac-sha224-ctr-aes",
2306			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2307			.cra_flags = CRYPTO_ALG_ASYNC |
2308				     CRYPTO_ALG_ALLOCATES_MEMORY |
2309				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2310			.cra_blocksize = 1,
2311			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2312			.cra_alignmask = 0,
2313			.cra_init = safexcel_aead_sha224_ctr_cra_init,
2314			.cra_exit = safexcel_aead_cra_exit,
2315			.cra_module = THIS_MODULE,
2316		},
2317	},
2318};
2319
2320static int safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm *tfm)
2321{
2322	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2323
2324	safexcel_aead_sha512_cra_init(tfm);
2325	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2326	return 0;
2327}
2328
2329struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes = {
2330	.type = SAFEXCEL_ALG_TYPE_AEAD,
2331	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2332	.alg.aead = {
2333		.setkey = safexcel_aead_setkey,
2334		.encrypt = safexcel_aead_encrypt,
2335		.decrypt = safexcel_aead_decrypt,
2336		.ivsize = CTR_RFC3686_IV_SIZE,
2337		.maxauthsize = SHA512_DIGEST_SIZE,
2338		.base = {
2339			.cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2340			.cra_driver_name = "safexcel-authenc-hmac-sha512-ctr-aes",
2341			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2342			.cra_flags = CRYPTO_ALG_ASYNC |
2343				     CRYPTO_ALG_ALLOCATES_MEMORY |
2344				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2345			.cra_blocksize = 1,
2346			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2347			.cra_alignmask = 0,
2348			.cra_init = safexcel_aead_sha512_ctr_cra_init,
2349			.cra_exit = safexcel_aead_cra_exit,
2350			.cra_module = THIS_MODULE,
2351		},
2352	},
2353};
2354
2355static int safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm *tfm)
2356{
2357	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2358
2359	safexcel_aead_sha384_cra_init(tfm);
2360	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2361	return 0;
2362}
2363
2364struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes = {
2365	.type = SAFEXCEL_ALG_TYPE_AEAD,
2366	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2367	.alg.aead = {
2368		.setkey = safexcel_aead_setkey,
2369		.encrypt = safexcel_aead_encrypt,
2370		.decrypt = safexcel_aead_decrypt,
2371		.ivsize = CTR_RFC3686_IV_SIZE,
2372		.maxauthsize = SHA384_DIGEST_SIZE,
2373		.base = {
2374			.cra_name = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2375			.cra_driver_name = "safexcel-authenc-hmac-sha384-ctr-aes",
2376			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2377			.cra_flags = CRYPTO_ALG_ASYNC |
2378				     CRYPTO_ALG_ALLOCATES_MEMORY |
2379				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2380			.cra_blocksize = 1,
2381			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2382			.cra_alignmask = 0,
2383			.cra_init = safexcel_aead_sha384_ctr_cra_init,
2384			.cra_exit = safexcel_aead_cra_exit,
2385			.cra_module = THIS_MODULE,
2386		},
2387	},
2388};
2389
2390static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
2391					   const u8 *key, unsigned int len)
2392{
2393	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
2394	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2395	struct safexcel_crypto_priv *priv = ctx->base.priv;
2396	struct crypto_aes_ctx aes;
2397	int ret, i;
2398	unsigned int keylen;
2399
2400	/* Check for illegal XTS keys */
2401	ret = xts_verify_key(ctfm, key, len);
2402	if (ret)
2403		return ret;
2404
2405	/* Only half of the key data is cipher key */
2406	keylen = (len >> 1);
2407	ret = aes_expandkey(&aes, key, keylen);
2408	if (ret)
 
2409		return ret;
 
2410
2411	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2412		for (i = 0; i < keylen / sizeof(u32); i++) {
2413			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2414				ctx->base.needs_inv = true;
2415				break;
2416			}
2417		}
2418	}
2419
2420	for (i = 0; i < keylen / sizeof(u32); i++)
2421		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2422
2423	/* The other half is the tweak key */
2424	ret = aes_expandkey(&aes, (u8 *)(key + keylen), keylen);
2425	if (ret)
 
2426		return ret;
 
2427
2428	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2429		for (i = 0; i < keylen / sizeof(u32); i++) {
2430			if (le32_to_cpu(ctx->key[i + keylen / sizeof(u32)]) !=
2431			    aes.key_enc[i]) {
2432				ctx->base.needs_inv = true;
2433				break;
2434			}
2435		}
2436	}
2437
2438	for (i = 0; i < keylen / sizeof(u32); i++)
2439		ctx->key[i + keylen / sizeof(u32)] =
2440			cpu_to_le32(aes.key_enc[i]);
2441
2442	ctx->key_len = keylen << 1;
2443
2444	memzero_explicit(&aes, sizeof(aes));
2445	return 0;
2446}
2447
2448static int safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm *tfm)
2449{
2450	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2451
2452	safexcel_skcipher_cra_init(tfm);
2453	ctx->alg  = SAFEXCEL_AES;
2454	ctx->blocksz = AES_BLOCK_SIZE;
2455	ctx->xts  = 1;
2456	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
2457	return 0;
2458}
2459
2460static int safexcel_encrypt_xts(struct skcipher_request *req)
2461{
2462	if (req->cryptlen < XTS_BLOCK_SIZE)
2463		return -EINVAL;
2464	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2465				  SAFEXCEL_ENCRYPT);
2466}
2467
2468static int safexcel_decrypt_xts(struct skcipher_request *req)
2469{
2470	if (req->cryptlen < XTS_BLOCK_SIZE)
2471		return -EINVAL;
2472	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2473				  SAFEXCEL_DECRYPT);
2474}
2475
2476struct safexcel_alg_template safexcel_alg_xts_aes = {
2477	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2478	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XTS,
2479	.alg.skcipher = {
2480		.setkey = safexcel_skcipher_aesxts_setkey,
2481		.encrypt = safexcel_encrypt_xts,
2482		.decrypt = safexcel_decrypt_xts,
2483		/* XTS actually uses 2 AES keys glued together */
2484		.min_keysize = AES_MIN_KEY_SIZE * 2,
2485		.max_keysize = AES_MAX_KEY_SIZE * 2,
2486		.ivsize = XTS_BLOCK_SIZE,
2487		.base = {
2488			.cra_name = "xts(aes)",
2489			.cra_driver_name = "safexcel-xts-aes",
2490			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2491			.cra_flags = CRYPTO_ALG_ASYNC |
2492				     CRYPTO_ALG_ALLOCATES_MEMORY |
2493				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2494			.cra_blocksize = XTS_BLOCK_SIZE,
2495			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2496			.cra_alignmask = 0,
2497			.cra_init = safexcel_skcipher_aes_xts_cra_init,
2498			.cra_exit = safexcel_skcipher_cra_exit,
2499			.cra_module = THIS_MODULE,
2500		},
2501	},
2502};
2503
2504static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
2505				    unsigned int len)
2506{
2507	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2508	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2509	struct safexcel_crypto_priv *priv = ctx->base.priv;
2510	struct crypto_aes_ctx aes;
2511	u32 hashkey[AES_BLOCK_SIZE >> 2];
2512	int ret, i;
2513
2514	ret = aes_expandkey(&aes, key, len);
2515	if (ret) {
 
2516		memzero_explicit(&aes, sizeof(aes));
2517		return ret;
2518	}
2519
2520	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2521		for (i = 0; i < len / sizeof(u32); i++) {
2522			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2523				ctx->base.needs_inv = true;
2524				break;
2525			}
2526		}
2527	}
2528
2529	for (i = 0; i < len / sizeof(u32); i++)
2530		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2531
2532	ctx->key_len = len;
2533
2534	/* Compute hash key by encrypting zeroes with cipher key */
 
 
 
 
 
 
 
 
 
2535	memset(hashkey, 0, AES_BLOCK_SIZE);
2536	aes_encrypt(&aes, (u8 *)hashkey, (u8 *)hashkey);
2537
2538	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2539		for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
2540			if (be32_to_cpu(ctx->base.ipad.be[i]) != hashkey[i]) {
2541				ctx->base.needs_inv = true;
2542				break;
2543			}
2544		}
2545	}
2546
2547	for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
2548		ctx->base.ipad.be[i] = cpu_to_be32(hashkey[i]);
2549
2550	memzero_explicit(hashkey, AES_BLOCK_SIZE);
2551	memzero_explicit(&aes, sizeof(aes));
2552	return 0;
2553}
2554
2555static int safexcel_aead_gcm_cra_init(struct crypto_tfm *tfm)
2556{
2557	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2558
2559	safexcel_aead_cra_init(tfm);
2560	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_GHASH;
2561	ctx->state_sz = GHASH_BLOCK_SIZE;
2562	ctx->xcm = EIP197_XCM_MODE_GCM;
2563	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2564
 
 
 
 
2565	return 0;
2566}
2567
2568static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm)
2569{
 
 
 
2570	safexcel_aead_cra_exit(tfm);
2571}
2572
2573static int safexcel_aead_gcm_setauthsize(struct crypto_aead *tfm,
2574					 unsigned int authsize)
2575{
2576	return crypto_gcm_check_authsize(authsize);
2577}
2578
2579struct safexcel_alg_template safexcel_alg_gcm = {
2580	.type = SAFEXCEL_ALG_TYPE_AEAD,
2581	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
2582	.alg.aead = {
2583		.setkey = safexcel_aead_gcm_setkey,
2584		.setauthsize = safexcel_aead_gcm_setauthsize,
2585		.encrypt = safexcel_aead_encrypt,
2586		.decrypt = safexcel_aead_decrypt,
2587		.ivsize = GCM_AES_IV_SIZE,
2588		.maxauthsize = GHASH_DIGEST_SIZE,
2589		.base = {
2590			.cra_name = "gcm(aes)",
2591			.cra_driver_name = "safexcel-gcm-aes",
2592			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2593			.cra_flags = CRYPTO_ALG_ASYNC |
2594				     CRYPTO_ALG_ALLOCATES_MEMORY |
2595				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2596			.cra_blocksize = 1,
2597			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2598			.cra_alignmask = 0,
2599			.cra_init = safexcel_aead_gcm_cra_init,
2600			.cra_exit = safexcel_aead_gcm_cra_exit,
2601			.cra_module = THIS_MODULE,
2602		},
2603	},
2604};
2605
2606static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
2607				    unsigned int len)
2608{
2609	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2610	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2611	struct safexcel_crypto_priv *priv = ctx->base.priv;
2612	struct crypto_aes_ctx aes;
2613	int ret, i;
2614
2615	ret = aes_expandkey(&aes, key, len);
2616	if (ret) {
 
2617		memzero_explicit(&aes, sizeof(aes));
2618		return ret;
2619	}
2620
2621	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2622		for (i = 0; i < len / sizeof(u32); i++) {
2623			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2624				ctx->base.needs_inv = true;
2625				break;
2626			}
2627		}
2628	}
2629
2630	for (i = 0; i < len / sizeof(u32); i++) {
2631		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2632		ctx->base.ipad.be[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
2633			cpu_to_be32(aes.key_enc[i]);
2634	}
2635
2636	ctx->key_len = len;
2637	ctx->state_sz = 2 * AES_BLOCK_SIZE + len;
2638
2639	if (len == AES_KEYSIZE_192)
2640		ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2641	else if (len == AES_KEYSIZE_256)
2642		ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2643	else
2644		ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2645
2646	memzero_explicit(&aes, sizeof(aes));
2647	return 0;
2648}
2649
2650static int safexcel_aead_ccm_cra_init(struct crypto_tfm *tfm)
2651{
2652	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2653
2654	safexcel_aead_cra_init(tfm);
2655	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2656	ctx->state_sz = 3 * AES_BLOCK_SIZE;
2657	ctx->xcm = EIP197_XCM_MODE_CCM;
2658	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2659	ctx->ctrinit = 0;
2660	return 0;
2661}
2662
2663static int safexcel_aead_ccm_setauthsize(struct crypto_aead *tfm,
2664					 unsigned int authsize)
2665{
2666	/* Borrowed from crypto/ccm.c */
2667	switch (authsize) {
2668	case 4:
2669	case 6:
2670	case 8:
2671	case 10:
2672	case 12:
2673	case 14:
2674	case 16:
2675		break;
2676	default:
2677		return -EINVAL;
2678	}
2679
2680	return 0;
2681}
2682
2683static int safexcel_ccm_encrypt(struct aead_request *req)
2684{
2685	struct safexcel_cipher_req *creq = aead_request_ctx(req);
2686
2687	if (req->iv[0] < 1 || req->iv[0] > 7)
2688		return -EINVAL;
2689
2690	return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
2691}
2692
2693static int safexcel_ccm_decrypt(struct aead_request *req)
2694{
2695	struct safexcel_cipher_req *creq = aead_request_ctx(req);
2696
2697	if (req->iv[0] < 1 || req->iv[0] > 7)
2698		return -EINVAL;
2699
2700	return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
2701}
2702
2703struct safexcel_alg_template safexcel_alg_ccm = {
2704	.type = SAFEXCEL_ALG_TYPE_AEAD,
2705	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
2706	.alg.aead = {
2707		.setkey = safexcel_aead_ccm_setkey,
2708		.setauthsize = safexcel_aead_ccm_setauthsize,
2709		.encrypt = safexcel_ccm_encrypt,
2710		.decrypt = safexcel_ccm_decrypt,
2711		.ivsize = AES_BLOCK_SIZE,
2712		.maxauthsize = AES_BLOCK_SIZE,
2713		.base = {
2714			.cra_name = "ccm(aes)",
2715			.cra_driver_name = "safexcel-ccm-aes",
2716			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2717			.cra_flags = CRYPTO_ALG_ASYNC |
2718				     CRYPTO_ALG_ALLOCATES_MEMORY |
2719				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2720			.cra_blocksize = 1,
2721			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2722			.cra_alignmask = 0,
2723			.cra_init = safexcel_aead_ccm_cra_init,
2724			.cra_exit = safexcel_aead_cra_exit,
2725			.cra_module = THIS_MODULE,
2726		},
2727	},
2728};
2729
2730static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx,
2731				     const u8 *key)
2732{
2733	struct safexcel_crypto_priv *priv = ctx->base.priv;
2734
2735	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
2736		if (memcmp(ctx->key, key, CHACHA_KEY_SIZE))
2737			ctx->base.needs_inv = true;
2738
2739	memcpy(ctx->key, key, CHACHA_KEY_SIZE);
2740	ctx->key_len = CHACHA_KEY_SIZE;
2741}
2742
2743static int safexcel_skcipher_chacha20_setkey(struct crypto_skcipher *ctfm,
2744					     const u8 *key, unsigned int len)
2745{
2746	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
2747
2748	if (len != CHACHA_KEY_SIZE)
2749		return -EINVAL;
2750
2751	safexcel_chacha20_setkey(ctx, key);
2752
2753	return 0;
2754}
2755
2756static int safexcel_skcipher_chacha20_cra_init(struct crypto_tfm *tfm)
2757{
2758	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2759
2760	safexcel_skcipher_cra_init(tfm);
2761	ctx->alg  = SAFEXCEL_CHACHA20;
2762	ctx->ctrinit = 0;
2763	ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32;
2764	return 0;
2765}
2766
2767struct safexcel_alg_template safexcel_alg_chacha20 = {
2768	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2769	.algo_mask = SAFEXCEL_ALG_CHACHA20,
2770	.alg.skcipher = {
2771		.setkey = safexcel_skcipher_chacha20_setkey,
2772		.encrypt = safexcel_encrypt,
2773		.decrypt = safexcel_decrypt,
2774		.min_keysize = CHACHA_KEY_SIZE,
2775		.max_keysize = CHACHA_KEY_SIZE,
2776		.ivsize = CHACHA_IV_SIZE,
2777		.base = {
2778			.cra_name = "chacha20",
2779			.cra_driver_name = "safexcel-chacha20",
2780			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2781			.cra_flags = CRYPTO_ALG_ASYNC |
2782				     CRYPTO_ALG_ALLOCATES_MEMORY |
2783				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2784			.cra_blocksize = 1,
2785			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2786			.cra_alignmask = 0,
2787			.cra_init = safexcel_skcipher_chacha20_cra_init,
2788			.cra_exit = safexcel_skcipher_cra_exit,
2789			.cra_module = THIS_MODULE,
2790		},
2791	},
2792};
2793
2794static int safexcel_aead_chachapoly_setkey(struct crypto_aead *ctfm,
2795				    const u8 *key, unsigned int len)
2796{
2797	struct safexcel_cipher_ctx *ctx = crypto_aead_ctx(ctfm);
2798
2799	if (ctx->aead  == EIP197_AEAD_TYPE_IPSEC_ESP &&
2800	    len > EIP197_AEAD_IPSEC_NONCE_SIZE) {
2801		/* ESP variant has nonce appended to key */
2802		len -= EIP197_AEAD_IPSEC_NONCE_SIZE;
2803		ctx->nonce = *(u32 *)(key + len);
2804	}
2805	if (len != CHACHA_KEY_SIZE)
2806		return -EINVAL;
2807
2808	safexcel_chacha20_setkey(ctx, key);
2809
2810	return 0;
2811}
2812
2813static int safexcel_aead_chachapoly_setauthsize(struct crypto_aead *tfm,
2814					 unsigned int authsize)
2815{
2816	if (authsize != POLY1305_DIGEST_SIZE)
2817		return -EINVAL;
2818	return 0;
2819}
2820
2821static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
2822					  enum safexcel_cipher_direction dir)
2823{
2824	struct safexcel_cipher_req *creq = aead_request_ctx(req);
2825	struct crypto_aead *aead = crypto_aead_reqtfm(req);
2826	struct crypto_tfm *tfm = crypto_aead_tfm(aead);
2827	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2828	struct aead_request *subreq = aead_request_ctx(req);
2829	u32 key[CHACHA_KEY_SIZE / sizeof(u32) + 1];
2830	int ret = 0;
2831
2832	/*
2833	 * Instead of wasting time detecting umpteen silly corner cases,
2834	 * just dump all "small" requests to the fallback implementation.
2835	 * HW would not be faster on such small requests anyway.
2836	 */
2837	if (likely((ctx->aead != EIP197_AEAD_TYPE_IPSEC_ESP ||
2838		    req->assoclen >= EIP197_AEAD_IPSEC_IV_SIZE) &&
2839		   req->cryptlen > POLY1305_DIGEST_SIZE)) {
2840		return safexcel_queue_req(&req->base, creq, dir);
2841	}
2842
2843	/* HW cannot do full (AAD+payload) zero length, use fallback */
2844	memcpy(key, ctx->key, CHACHA_KEY_SIZE);
2845	if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
2846		/* ESP variant has nonce appended to the key */
2847		key[CHACHA_KEY_SIZE / sizeof(u32)] = ctx->nonce;
2848		ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2849					 CHACHA_KEY_SIZE +
2850					 EIP197_AEAD_IPSEC_NONCE_SIZE);
2851	} else {
2852		ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2853					 CHACHA_KEY_SIZE);
2854	}
2855	if (ret) {
2856		crypto_aead_clear_flags(aead, CRYPTO_TFM_REQ_MASK);
2857		crypto_aead_set_flags(aead, crypto_aead_get_flags(ctx->fback) &
2858					    CRYPTO_TFM_REQ_MASK);
2859		return ret;
2860	}
2861
2862	aead_request_set_tfm(subreq, ctx->fback);
2863	aead_request_set_callback(subreq, req->base.flags, req->base.complete,
2864				  req->base.data);
2865	aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
2866			       req->iv);
2867	aead_request_set_ad(subreq, req->assoclen);
2868
2869	return (dir ==  SAFEXCEL_ENCRYPT) ?
2870		crypto_aead_encrypt(subreq) :
2871		crypto_aead_decrypt(subreq);
2872}
2873
2874static int safexcel_aead_chachapoly_encrypt(struct aead_request *req)
2875{
2876	return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_ENCRYPT);
2877}
2878
2879static int safexcel_aead_chachapoly_decrypt(struct aead_request *req)
2880{
2881	return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_DECRYPT);
2882}
2883
2884static int safexcel_aead_fallback_cra_init(struct crypto_tfm *tfm)
2885{
2886	struct crypto_aead *aead = __crypto_aead_cast(tfm);
2887	struct aead_alg *alg = crypto_aead_alg(aead);
2888	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2889
2890	safexcel_aead_cra_init(tfm);
2891
2892	/* Allocate fallback implementation */
2893	ctx->fback = crypto_alloc_aead(alg->base.cra_name, 0,
2894				       CRYPTO_ALG_ASYNC |
2895				       CRYPTO_ALG_NEED_FALLBACK);
2896	if (IS_ERR(ctx->fback))
2897		return PTR_ERR(ctx->fback);
2898
2899	crypto_aead_set_reqsize(aead, max(sizeof(struct safexcel_cipher_req),
2900					  sizeof(struct aead_request) +
2901					  crypto_aead_reqsize(ctx->fback)));
2902
2903	return 0;
2904}
2905
2906static int safexcel_aead_chachapoly_cra_init(struct crypto_tfm *tfm)
2907{
2908	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2909
2910	safexcel_aead_fallback_cra_init(tfm);
2911	ctx->alg  = SAFEXCEL_CHACHA20;
2912	ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32 |
2913		    CONTEXT_CONTROL_CHACHA20_MODE_CALC_OTK;
2914	ctx->ctrinit = 0;
2915	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_POLY1305;
2916	ctx->state_sz = 0; /* Precomputed by HW */
2917	return 0;
2918}
2919
2920static void safexcel_aead_fallback_cra_exit(struct crypto_tfm *tfm)
2921{
2922	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2923
2924	crypto_free_aead(ctx->fback);
2925	safexcel_aead_cra_exit(tfm);
2926}
2927
2928struct safexcel_alg_template safexcel_alg_chachapoly = {
2929	.type = SAFEXCEL_ALG_TYPE_AEAD,
2930	.algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
2931	.alg.aead = {
2932		.setkey = safexcel_aead_chachapoly_setkey,
2933		.setauthsize = safexcel_aead_chachapoly_setauthsize,
2934		.encrypt = safexcel_aead_chachapoly_encrypt,
2935		.decrypt = safexcel_aead_chachapoly_decrypt,
2936		.ivsize = CHACHAPOLY_IV_SIZE,
2937		.maxauthsize = POLY1305_DIGEST_SIZE,
2938		.base = {
2939			.cra_name = "rfc7539(chacha20,poly1305)",
2940			.cra_driver_name = "safexcel-chacha20-poly1305",
2941			/* +1 to put it above HW chacha + SW poly */
2942			.cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
2943			.cra_flags = CRYPTO_ALG_ASYNC |
2944				     CRYPTO_ALG_ALLOCATES_MEMORY |
2945				     CRYPTO_ALG_KERN_DRIVER_ONLY |
2946				     CRYPTO_ALG_NEED_FALLBACK,
2947			.cra_blocksize = 1,
2948			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2949			.cra_alignmask = 0,
2950			.cra_init = safexcel_aead_chachapoly_cra_init,
2951			.cra_exit = safexcel_aead_fallback_cra_exit,
2952			.cra_module = THIS_MODULE,
2953		},
2954	},
2955};
2956
2957static int safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm *tfm)
2958{
2959	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2960	int ret;
2961
2962	ret = safexcel_aead_chachapoly_cra_init(tfm);
2963	ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
2964	ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
2965	return ret;
2966}
2967
2968struct safexcel_alg_template safexcel_alg_chachapoly_esp = {
2969	.type = SAFEXCEL_ALG_TYPE_AEAD,
2970	.algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
2971	.alg.aead = {
2972		.setkey = safexcel_aead_chachapoly_setkey,
2973		.setauthsize = safexcel_aead_chachapoly_setauthsize,
2974		.encrypt = safexcel_aead_chachapoly_encrypt,
2975		.decrypt = safexcel_aead_chachapoly_decrypt,
2976		.ivsize = CHACHAPOLY_IV_SIZE - EIP197_AEAD_IPSEC_NONCE_SIZE,
2977		.maxauthsize = POLY1305_DIGEST_SIZE,
2978		.base = {
2979			.cra_name = "rfc7539esp(chacha20,poly1305)",
2980			.cra_driver_name = "safexcel-chacha20-poly1305-esp",
2981			/* +1 to put it above HW chacha + SW poly */
2982			.cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
2983			.cra_flags = CRYPTO_ALG_ASYNC |
2984				     CRYPTO_ALG_ALLOCATES_MEMORY |
2985				     CRYPTO_ALG_KERN_DRIVER_ONLY |
2986				     CRYPTO_ALG_NEED_FALLBACK,
2987			.cra_blocksize = 1,
2988			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2989			.cra_alignmask = 0,
2990			.cra_init = safexcel_aead_chachapolyesp_cra_init,
2991			.cra_exit = safexcel_aead_fallback_cra_exit,
2992			.cra_module = THIS_MODULE,
2993		},
2994	},
2995};
2996
2997static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher *ctfm,
2998					const u8 *key, unsigned int len)
2999{
3000	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3001	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3002	struct safexcel_crypto_priv *priv = ctx->base.priv;
3003
3004	if (len != SM4_KEY_SIZE)
3005		return -EINVAL;
3006
3007	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
3008		if (memcmp(ctx->key, key, SM4_KEY_SIZE))
3009			ctx->base.needs_inv = true;
3010
3011	memcpy(ctx->key, key, SM4_KEY_SIZE);
3012	ctx->key_len = SM4_KEY_SIZE;
3013
3014	return 0;
3015}
3016
3017static int safexcel_sm4_blk_encrypt(struct skcipher_request *req)
3018{
3019	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3020	if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3021		return -EINVAL;
3022	else
3023		return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3024					  SAFEXCEL_ENCRYPT);
3025}
3026
3027static int safexcel_sm4_blk_decrypt(struct skcipher_request *req)
3028{
3029	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3030	if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3031		return -EINVAL;
3032	else
3033		return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3034					  SAFEXCEL_DECRYPT);
3035}
3036
3037static int safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm *tfm)
3038{
3039	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3040
3041	safexcel_skcipher_cra_init(tfm);
3042	ctx->alg  = SAFEXCEL_SM4;
3043	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
3044	ctx->blocksz = 0;
3045	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
3046	return 0;
3047}
3048
3049struct safexcel_alg_template safexcel_alg_ecb_sm4 = {
3050	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3051	.algo_mask = SAFEXCEL_ALG_SM4,
3052	.alg.skcipher = {
3053		.setkey = safexcel_skcipher_sm4_setkey,
3054		.encrypt = safexcel_sm4_blk_encrypt,
3055		.decrypt = safexcel_sm4_blk_decrypt,
3056		.min_keysize = SM4_KEY_SIZE,
3057		.max_keysize = SM4_KEY_SIZE,
3058		.base = {
3059			.cra_name = "ecb(sm4)",
3060			.cra_driver_name = "safexcel-ecb-sm4",
3061			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3062			.cra_flags = CRYPTO_ALG_ASYNC |
3063				     CRYPTO_ALG_ALLOCATES_MEMORY |
3064				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3065			.cra_blocksize = SM4_BLOCK_SIZE,
3066			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3067			.cra_alignmask = 0,
3068			.cra_init = safexcel_skcipher_sm4_ecb_cra_init,
3069			.cra_exit = safexcel_skcipher_cra_exit,
3070			.cra_module = THIS_MODULE,
3071		},
3072	},
3073};
3074
3075static int safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm *tfm)
3076{
3077	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3078
3079	safexcel_skcipher_cra_init(tfm);
3080	ctx->alg  = SAFEXCEL_SM4;
3081	ctx->blocksz = SM4_BLOCK_SIZE;
3082	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
3083	return 0;
3084}
3085
3086struct safexcel_alg_template safexcel_alg_cbc_sm4 = {
3087	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3088	.algo_mask = SAFEXCEL_ALG_SM4,
3089	.alg.skcipher = {
3090		.setkey = safexcel_skcipher_sm4_setkey,
3091		.encrypt = safexcel_sm4_blk_encrypt,
3092		.decrypt = safexcel_sm4_blk_decrypt,
3093		.min_keysize = SM4_KEY_SIZE,
3094		.max_keysize = SM4_KEY_SIZE,
3095		.ivsize = SM4_BLOCK_SIZE,
3096		.base = {
3097			.cra_name = "cbc(sm4)",
3098			.cra_driver_name = "safexcel-cbc-sm4",
3099			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3100			.cra_flags = CRYPTO_ALG_ASYNC |
3101				     CRYPTO_ALG_ALLOCATES_MEMORY |
3102				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3103			.cra_blocksize = SM4_BLOCK_SIZE,
3104			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3105			.cra_alignmask = 0,
3106			.cra_init = safexcel_skcipher_sm4_cbc_cra_init,
3107			.cra_exit = safexcel_skcipher_cra_exit,
3108			.cra_module = THIS_MODULE,
3109		},
3110	},
3111};
3112
3113static int safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher *ctfm,
3114					   const u8 *key, unsigned int len)
3115{
3116	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3117	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3118
3119	/* last 4 bytes of key are the nonce! */
3120	ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3121	/* exclude the nonce here */
3122	len -= CTR_RFC3686_NONCE_SIZE;
3123
3124	return safexcel_skcipher_sm4_setkey(ctfm, key, len);
3125}
3126
3127static int safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm *tfm)
3128{
3129	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3130
3131	safexcel_skcipher_cra_init(tfm);
3132	ctx->alg  = SAFEXCEL_SM4;
3133	ctx->blocksz = SM4_BLOCK_SIZE;
3134	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3135	return 0;
3136}
3137
3138struct safexcel_alg_template safexcel_alg_ctr_sm4 = {
3139	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3140	.algo_mask = SAFEXCEL_ALG_SM4,
3141	.alg.skcipher = {
3142		.setkey = safexcel_skcipher_sm4ctr_setkey,
3143		.encrypt = safexcel_encrypt,
3144		.decrypt = safexcel_decrypt,
3145		/* Add nonce size */
3146		.min_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3147		.max_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3148		.ivsize = CTR_RFC3686_IV_SIZE,
3149		.base = {
3150			.cra_name = "rfc3686(ctr(sm4))",
3151			.cra_driver_name = "safexcel-ctr-sm4",
3152			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3153			.cra_flags = CRYPTO_ALG_ASYNC |
3154				     CRYPTO_ALG_ALLOCATES_MEMORY |
3155				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3156			.cra_blocksize = 1,
3157			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3158			.cra_alignmask = 0,
3159			.cra_init = safexcel_skcipher_sm4_ctr_cra_init,
3160			.cra_exit = safexcel_skcipher_cra_exit,
3161			.cra_module = THIS_MODULE,
3162		},
3163	},
3164};
3165
3166static int safexcel_aead_sm4_blk_encrypt(struct aead_request *req)
3167{
3168	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3169	if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3170		return -EINVAL;
3171
3172	return safexcel_queue_req(&req->base, aead_request_ctx(req),
3173				  SAFEXCEL_ENCRYPT);
3174}
3175
3176static int safexcel_aead_sm4_blk_decrypt(struct aead_request *req)
3177{
3178	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3179
3180	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3181	if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3182		return -EINVAL;
3183
3184	return safexcel_queue_req(&req->base, aead_request_ctx(req),
3185				  SAFEXCEL_DECRYPT);
3186}
3187
3188static int safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm *tfm)
3189{
3190	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3191
3192	safexcel_aead_cra_init(tfm);
3193	ctx->alg = SAFEXCEL_SM4;
3194	ctx->blocksz = SM4_BLOCK_SIZE;
3195	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
3196	ctx->state_sz = SHA1_DIGEST_SIZE;
3197	return 0;
3198}
3199
3200struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_sm4 = {
3201	.type = SAFEXCEL_ALG_TYPE_AEAD,
3202	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3203	.alg.aead = {
3204		.setkey = safexcel_aead_setkey,
3205		.encrypt = safexcel_aead_sm4_blk_encrypt,
3206		.decrypt = safexcel_aead_sm4_blk_decrypt,
3207		.ivsize = SM4_BLOCK_SIZE,
3208		.maxauthsize = SHA1_DIGEST_SIZE,
3209		.base = {
3210			.cra_name = "authenc(hmac(sha1),cbc(sm4))",
3211			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-sm4",
3212			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3213			.cra_flags = CRYPTO_ALG_ASYNC |
3214				     CRYPTO_ALG_ALLOCATES_MEMORY |
3215				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3216			.cra_blocksize = SM4_BLOCK_SIZE,
3217			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3218			.cra_alignmask = 0,
3219			.cra_init = safexcel_aead_sm4cbc_sha1_cra_init,
3220			.cra_exit = safexcel_aead_cra_exit,
3221			.cra_module = THIS_MODULE,
3222		},
3223	},
3224};
3225
3226static int safexcel_aead_fallback_setkey(struct crypto_aead *ctfm,
3227					 const u8 *key, unsigned int len)
3228{
3229	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3230	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3231
3232	/* Keep fallback cipher synchronized */
3233	return crypto_aead_setkey(ctx->fback, (u8 *)key, len) ?:
3234	       safexcel_aead_setkey(ctfm, key, len);
3235}
3236
3237static int safexcel_aead_fallback_setauthsize(struct crypto_aead *ctfm,
3238					      unsigned int authsize)
3239{
3240	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3241	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3242
3243	/* Keep fallback cipher synchronized */
3244	return crypto_aead_setauthsize(ctx->fback, authsize);
3245}
3246
3247static int safexcel_aead_fallback_crypt(struct aead_request *req,
3248					enum safexcel_cipher_direction dir)
3249{
3250	struct crypto_aead *aead = crypto_aead_reqtfm(req);
3251	struct crypto_tfm *tfm = crypto_aead_tfm(aead);
3252	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3253	struct aead_request *subreq = aead_request_ctx(req);
3254
3255	aead_request_set_tfm(subreq, ctx->fback);
3256	aead_request_set_callback(subreq, req->base.flags, req->base.complete,
3257				  req->base.data);
3258	aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
3259			       req->iv);
3260	aead_request_set_ad(subreq, req->assoclen);
3261
3262	return (dir ==  SAFEXCEL_ENCRYPT) ?
3263		crypto_aead_encrypt(subreq) :
3264		crypto_aead_decrypt(subreq);
3265}
3266
3267static int safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request *req)
3268{
3269	struct safexcel_cipher_req *creq = aead_request_ctx(req);
3270
3271	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3272	if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3273		return -EINVAL;
3274	else if (req->cryptlen || req->assoclen) /* If input length > 0 only */
3275		return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3276
3277	/* HW cannot do full (AAD+payload) zero length, use fallback */
3278	return safexcel_aead_fallback_crypt(req, SAFEXCEL_ENCRYPT);
3279}
3280
3281static int safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request *req)
3282{
3283	struct safexcel_cipher_req *creq = aead_request_ctx(req);
3284	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3285
3286	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3287	if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3288		return -EINVAL;
3289	else if (req->cryptlen > crypto_aead_authsize(tfm) || req->assoclen)
3290		/* If input length > 0 only */
3291		return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3292
3293	/* HW cannot do full (AAD+payload) zero length, use fallback */
3294	return safexcel_aead_fallback_crypt(req, SAFEXCEL_DECRYPT);
3295}
3296
3297static int safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm *tfm)
3298{
3299	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3300
3301	safexcel_aead_fallback_cra_init(tfm);
3302	ctx->alg = SAFEXCEL_SM4;
3303	ctx->blocksz = SM4_BLOCK_SIZE;
3304	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
3305	ctx->state_sz = SM3_DIGEST_SIZE;
3306	return 0;
3307}
3308
3309struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4 = {
3310	.type = SAFEXCEL_ALG_TYPE_AEAD,
3311	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3312	.alg.aead = {
3313		.setkey = safexcel_aead_fallback_setkey,
3314		.setauthsize = safexcel_aead_fallback_setauthsize,
3315		.encrypt = safexcel_aead_sm4cbc_sm3_encrypt,
3316		.decrypt = safexcel_aead_sm4cbc_sm3_decrypt,
3317		.ivsize = SM4_BLOCK_SIZE,
3318		.maxauthsize = SM3_DIGEST_SIZE,
3319		.base = {
3320			.cra_name = "authenc(hmac(sm3),cbc(sm4))",
3321			.cra_driver_name = "safexcel-authenc-hmac-sm3-cbc-sm4",
3322			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3323			.cra_flags = CRYPTO_ALG_ASYNC |
3324				     CRYPTO_ALG_ALLOCATES_MEMORY |
3325				     CRYPTO_ALG_KERN_DRIVER_ONLY |
3326				     CRYPTO_ALG_NEED_FALLBACK,
3327			.cra_blocksize = SM4_BLOCK_SIZE,
3328			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3329			.cra_alignmask = 0,
3330			.cra_init = safexcel_aead_sm4cbc_sm3_cra_init,
3331			.cra_exit = safexcel_aead_fallback_cra_exit,
3332			.cra_module = THIS_MODULE,
3333		},
3334	},
3335};
3336
3337static int safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm *tfm)
3338{
3339	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3340
3341	safexcel_aead_sm4cbc_sha1_cra_init(tfm);
3342	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3343	return 0;
3344}
3345
3346struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4 = {
3347	.type = SAFEXCEL_ALG_TYPE_AEAD,
3348	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3349	.alg.aead = {
3350		.setkey = safexcel_aead_setkey,
3351		.encrypt = safexcel_aead_encrypt,
3352		.decrypt = safexcel_aead_decrypt,
3353		.ivsize = CTR_RFC3686_IV_SIZE,
3354		.maxauthsize = SHA1_DIGEST_SIZE,
3355		.base = {
3356			.cra_name = "authenc(hmac(sha1),rfc3686(ctr(sm4)))",
3357			.cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-sm4",
3358			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3359			.cra_flags = CRYPTO_ALG_ASYNC |
3360				     CRYPTO_ALG_ALLOCATES_MEMORY |
3361				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3362			.cra_blocksize = 1,
3363			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3364			.cra_alignmask = 0,
3365			.cra_init = safexcel_aead_sm4ctr_sha1_cra_init,
3366			.cra_exit = safexcel_aead_cra_exit,
3367			.cra_module = THIS_MODULE,
3368		},
3369	},
3370};
3371
3372static int safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm *tfm)
3373{
3374	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3375
3376	safexcel_aead_sm4cbc_sm3_cra_init(tfm);
3377	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3378	return 0;
3379}
3380
3381struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4 = {
3382	.type = SAFEXCEL_ALG_TYPE_AEAD,
3383	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3384	.alg.aead = {
3385		.setkey = safexcel_aead_setkey,
3386		.encrypt = safexcel_aead_encrypt,
3387		.decrypt = safexcel_aead_decrypt,
3388		.ivsize = CTR_RFC3686_IV_SIZE,
3389		.maxauthsize = SM3_DIGEST_SIZE,
3390		.base = {
3391			.cra_name = "authenc(hmac(sm3),rfc3686(ctr(sm4)))",
3392			.cra_driver_name = "safexcel-authenc-hmac-sm3-ctr-sm4",
3393			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3394			.cra_flags = CRYPTO_ALG_ASYNC |
3395				     CRYPTO_ALG_ALLOCATES_MEMORY |
3396				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3397			.cra_blocksize = 1,
3398			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3399			.cra_alignmask = 0,
3400			.cra_init = safexcel_aead_sm4ctr_sm3_cra_init,
3401			.cra_exit = safexcel_aead_cra_exit,
3402			.cra_module = THIS_MODULE,
3403		},
3404	},
3405};
3406
3407static int safexcel_rfc4106_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
3408				       unsigned int len)
3409{
3410	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3411	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3412
3413	/* last 4 bytes of key are the nonce! */
3414	ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3415
3416	len -= CTR_RFC3686_NONCE_SIZE;
3417	return safexcel_aead_gcm_setkey(ctfm, key, len);
3418}
3419
3420static int safexcel_rfc4106_gcm_setauthsize(struct crypto_aead *tfm,
3421					    unsigned int authsize)
3422{
3423	return crypto_rfc4106_check_authsize(authsize);
3424}
3425
3426static int safexcel_rfc4106_encrypt(struct aead_request *req)
3427{
3428	return crypto_ipsec_check_assoclen(req->assoclen) ?:
3429	       safexcel_aead_encrypt(req);
3430}
3431
3432static int safexcel_rfc4106_decrypt(struct aead_request *req)
3433{
3434	return crypto_ipsec_check_assoclen(req->assoclen) ?:
3435	       safexcel_aead_decrypt(req);
3436}
3437
3438static int safexcel_rfc4106_gcm_cra_init(struct crypto_tfm *tfm)
3439{
3440	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3441	int ret;
3442
3443	ret = safexcel_aead_gcm_cra_init(tfm);
3444	ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
3445	ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3446	return ret;
3447}
3448
3449struct safexcel_alg_template safexcel_alg_rfc4106_gcm = {
3450	.type = SAFEXCEL_ALG_TYPE_AEAD,
3451	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3452	.alg.aead = {
3453		.setkey = safexcel_rfc4106_gcm_setkey,
3454		.setauthsize = safexcel_rfc4106_gcm_setauthsize,
3455		.encrypt = safexcel_rfc4106_encrypt,
3456		.decrypt = safexcel_rfc4106_decrypt,
3457		.ivsize = GCM_RFC4106_IV_SIZE,
3458		.maxauthsize = GHASH_DIGEST_SIZE,
3459		.base = {
3460			.cra_name = "rfc4106(gcm(aes))",
3461			.cra_driver_name = "safexcel-rfc4106-gcm-aes",
3462			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3463			.cra_flags = CRYPTO_ALG_ASYNC |
3464				     CRYPTO_ALG_ALLOCATES_MEMORY |
3465				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3466			.cra_blocksize = 1,
3467			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3468			.cra_alignmask = 0,
3469			.cra_init = safexcel_rfc4106_gcm_cra_init,
3470			.cra_exit = safexcel_aead_gcm_cra_exit,
3471		},
3472	},
3473};
3474
3475static int safexcel_rfc4543_gcm_setauthsize(struct crypto_aead *tfm,
3476					    unsigned int authsize)
3477{
3478	if (authsize != GHASH_DIGEST_SIZE)
3479		return -EINVAL;
3480
3481	return 0;
3482}
3483
3484static int safexcel_rfc4543_gcm_cra_init(struct crypto_tfm *tfm)
3485{
3486	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3487	int ret;
3488
3489	ret = safexcel_aead_gcm_cra_init(tfm);
3490	ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP_GMAC;
3491	return ret;
3492}
3493
3494struct safexcel_alg_template safexcel_alg_rfc4543_gcm = {
3495	.type = SAFEXCEL_ALG_TYPE_AEAD,
3496	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3497	.alg.aead = {
3498		.setkey = safexcel_rfc4106_gcm_setkey,
3499		.setauthsize = safexcel_rfc4543_gcm_setauthsize,
3500		.encrypt = safexcel_rfc4106_encrypt,
3501		.decrypt = safexcel_rfc4106_decrypt,
3502		.ivsize = GCM_RFC4543_IV_SIZE,
3503		.maxauthsize = GHASH_DIGEST_SIZE,
3504		.base = {
3505			.cra_name = "rfc4543(gcm(aes))",
3506			.cra_driver_name = "safexcel-rfc4543-gcm-aes",
3507			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3508			.cra_flags = CRYPTO_ALG_ASYNC |
3509				     CRYPTO_ALG_ALLOCATES_MEMORY |
3510				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3511			.cra_blocksize = 1,
3512			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3513			.cra_alignmask = 0,
3514			.cra_init = safexcel_rfc4543_gcm_cra_init,
3515			.cra_exit = safexcel_aead_gcm_cra_exit,
3516		},
3517	},
3518};
3519
3520static int safexcel_rfc4309_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
3521				       unsigned int len)
3522{
3523	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3524	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3525
3526	/* First byte of the nonce = L = always 3 for RFC4309 (4 byte ctr) */
3527	*(u8 *)&ctx->nonce = EIP197_AEAD_IPSEC_COUNTER_SIZE - 1;
3528	/* last 3 bytes of key are the nonce! */
3529	memcpy((u8 *)&ctx->nonce + 1, key + len -
3530	       EIP197_AEAD_IPSEC_CCM_NONCE_SIZE,
3531	       EIP197_AEAD_IPSEC_CCM_NONCE_SIZE);
3532
3533	len -= EIP197_AEAD_IPSEC_CCM_NONCE_SIZE;
3534	return safexcel_aead_ccm_setkey(ctfm, key, len);
3535}
3536
3537static int safexcel_rfc4309_ccm_setauthsize(struct crypto_aead *tfm,
3538					    unsigned int authsize)
3539{
3540	/* Borrowed from crypto/ccm.c */
3541	switch (authsize) {
3542	case 8:
3543	case 12:
3544	case 16:
3545		break;
3546	default:
3547		return -EINVAL;
3548	}
3549
3550	return 0;
3551}
3552
3553static int safexcel_rfc4309_ccm_encrypt(struct aead_request *req)
3554{
3555	struct safexcel_cipher_req *creq = aead_request_ctx(req);
3556
3557	/* Borrowed from crypto/ccm.c */
3558	if (req->assoclen != 16 && req->assoclen != 20)
3559		return -EINVAL;
3560
3561	return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3562}
3563
3564static int safexcel_rfc4309_ccm_decrypt(struct aead_request *req)
3565{
3566	struct safexcel_cipher_req *creq = aead_request_ctx(req);
3567
3568	/* Borrowed from crypto/ccm.c */
3569	if (req->assoclen != 16 && req->assoclen != 20)
3570		return -EINVAL;
3571
3572	return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3573}
3574
3575static int safexcel_rfc4309_ccm_cra_init(struct crypto_tfm *tfm)
3576{
3577	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3578	int ret;
3579
3580	ret = safexcel_aead_ccm_cra_init(tfm);
3581	ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
3582	ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3583	return ret;
3584}
3585
3586struct safexcel_alg_template safexcel_alg_rfc4309_ccm = {
3587	.type = SAFEXCEL_ALG_TYPE_AEAD,
3588	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
3589	.alg.aead = {
3590		.setkey = safexcel_rfc4309_ccm_setkey,
3591		.setauthsize = safexcel_rfc4309_ccm_setauthsize,
3592		.encrypt = safexcel_rfc4309_ccm_encrypt,
3593		.decrypt = safexcel_rfc4309_ccm_decrypt,
3594		.ivsize = EIP197_AEAD_IPSEC_IV_SIZE,
3595		.maxauthsize = AES_BLOCK_SIZE,
3596		.base = {
3597			.cra_name = "rfc4309(ccm(aes))",
3598			.cra_driver_name = "safexcel-rfc4309-ccm-aes",
3599			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3600			.cra_flags = CRYPTO_ALG_ASYNC |
3601				     CRYPTO_ALG_ALLOCATES_MEMORY |
3602				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3603			.cra_blocksize = 1,
3604			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3605			.cra_alignmask = 0,
3606			.cra_init = safexcel_rfc4309_ccm_cra_init,
3607			.cra_exit = safexcel_aead_cra_exit,
3608			.cra_module = THIS_MODULE,
3609		},
3610	},
3611};
v5.4
   1// SPDX-License-Identifier: GPL-2.0
   2/*
   3 * Copyright (C) 2017 Marvell
   4 *
   5 * Antoine Tenart <antoine.tenart@free-electrons.com>
   6 */
   7
 
   8#include <linux/device.h>
   9#include <linux/dma-mapping.h>
  10#include <linux/dmapool.h>
  11
  12#include <crypto/aead.h>
  13#include <crypto/aes.h>
  14#include <crypto/authenc.h>
 
  15#include <crypto/ctr.h>
  16#include <crypto/internal/des.h>
  17#include <crypto/gcm.h>
  18#include <crypto/ghash.h>
  19#include <crypto/sha.h>
 
 
 
 
  20#include <crypto/xts.h>
  21#include <crypto/skcipher.h>
  22#include <crypto/internal/aead.h>
  23#include <crypto/internal/skcipher.h>
  24
  25#include "safexcel.h"
  26
  27enum safexcel_cipher_direction {
  28	SAFEXCEL_ENCRYPT,
  29	SAFEXCEL_DECRYPT,
  30};
  31
  32enum safexcel_cipher_alg {
  33	SAFEXCEL_DES,
  34	SAFEXCEL_3DES,
  35	SAFEXCEL_AES,
 
 
  36};
  37
  38struct safexcel_cipher_ctx {
  39	struct safexcel_context base;
  40	struct safexcel_crypto_priv *priv;
  41
  42	u32 mode;
  43	enum safexcel_cipher_alg alg;
  44	bool aead;
  45	int  xcm; /* 0=authenc, 1=GCM, 2 reserved for CCM */
 
 
 
 
  46
  47	__le32 key[16];
  48	u32 nonce;
  49	unsigned int key_len, xts;
  50
  51	/* All the below is AEAD specific */
  52	u32 hash_alg;
  53	u32 state_sz;
  54	u32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
  55	u32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
  56
  57	struct crypto_cipher *hkaes;
  58};
  59
  60struct safexcel_cipher_req {
  61	enum safexcel_cipher_direction direction;
  62	/* Number of result descriptors associated to the request */
  63	unsigned int rdescs;
  64	bool needs_inv;
  65	int  nr_src, nr_dst;
  66};
  67
  68static void safexcel_cipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
  69				  struct safexcel_command_desc *cdesc)
  70{
  71	u32 block_sz = 0;
  72
  73	if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
  74		cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
  75
  76		/* 32 bit nonce */
  77		cdesc->control_data.token[0] = ctx->nonce;
  78		/* 64 bit IV part */
  79		memcpy(&cdesc->control_data.token[1], iv, 8);
  80		/* 32 bit counter, start at 1 (big endian!) */
  81		cdesc->control_data.token[3] = cpu_to_be32(1);
  82
  83		return;
  84	} else if (ctx->xcm == EIP197_XCM_MODE_GCM) {
 
  85		cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
  86
  87		/* 96 bit IV part */
  88		memcpy(&cdesc->control_data.token[0], iv, 12);
  89		/* 32 bit counter, start at 1 (big endian!) */
  90		cdesc->control_data.token[3] = cpu_to_be32(1);
  91
  92		return;
  93	} else if (ctx->xcm == EIP197_XCM_MODE_CCM) {
  94		cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
  95
  96		/* Variable length IV part */
  97		memcpy(&cdesc->control_data.token[0], iv, 15 - iv[0]);
  98		/* Start variable length counter at 0 */
  99		memset((u8 *)&cdesc->control_data.token[0] + 15 - iv[0],
 100		       0, iv[0] + 1);
 101
 102		return;
 103	}
 104
 105	if (ctx->mode != CONTEXT_CONTROL_CRYPTO_MODE_ECB) {
 106		switch (ctx->alg) {
 107		case SAFEXCEL_DES:
 108			block_sz = DES_BLOCK_SIZE;
 109			cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD;
 110			break;
 111		case SAFEXCEL_3DES:
 112			block_sz = DES3_EDE_BLOCK_SIZE;
 113			cdesc->control_data.options |= EIP197_OPTION_2_TOKEN_IV_CMD;
 114			break;
 115		case SAFEXCEL_AES:
 116			block_sz = AES_BLOCK_SIZE;
 117			cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
 118			break;
 119		}
 120		memcpy(cdesc->control_data.token, iv, block_sz);
 121	}
 122}
 123
 124static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
 125				    struct safexcel_command_desc *cdesc,
 
 126				    u32 length)
 127{
 128	struct safexcel_token *token;
 
 129
 130	safexcel_cipher_token(ctx, iv, cdesc);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 131
 132	/* skip over worst case IV of 4 dwords, no need to be exact */
 133	token = (struct safexcel_token *)(cdesc->control_data.token + 4);
 134
 135	token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
 136	token[0].packet_length = length;
 137	token[0].stat = EIP197_TOKEN_STAT_LAST_PACKET |
 138			EIP197_TOKEN_STAT_LAST_HASH;
 139	token[0].instructions = EIP197_TOKEN_INS_LAST |
 140				EIP197_TOKEN_INS_TYPE_CRYPTO |
 141				EIP197_TOKEN_INS_TYPE_OUTPUT;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 142}
 143
 144static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
 145				struct safexcel_command_desc *cdesc,
 
 146				enum safexcel_cipher_direction direction,
 147				u32 cryptlen, u32 assoclen, u32 digestsize)
 148{
 149	struct safexcel_token *token;
 
 
 150
 151	safexcel_cipher_token(ctx, iv, cdesc);
 
 152
 153	if (direction == SAFEXCEL_ENCRYPT) {
 154		/* align end of instruction sequence to end of token */
 155		token = (struct safexcel_token *)(cdesc->control_data.token +
 156			 EIP197_MAX_TOKENS - 13);
 157
 158		token[12].opcode = EIP197_TOKEN_OPCODE_INSERT;
 159		token[12].packet_length = digestsize;
 160		token[12].stat = EIP197_TOKEN_STAT_LAST_HASH |
 161				 EIP197_TOKEN_STAT_LAST_PACKET;
 162		token[12].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
 163					 EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
 164	} else {
 165		cryptlen -= digestsize;
 166
 167		/* align end of instruction sequence to end of token */
 168		token = (struct safexcel_token *)(cdesc->control_data.token +
 169			 EIP197_MAX_TOKENS - 14);
 170
 171		token[12].opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
 172		token[12].packet_length = digestsize;
 173		token[12].stat = EIP197_TOKEN_STAT_LAST_HASH |
 174				 EIP197_TOKEN_STAT_LAST_PACKET;
 175		token[12].instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
 176
 177		token[13].opcode = EIP197_TOKEN_OPCODE_VERIFY;
 178		token[13].packet_length = digestsize |
 179					  EIP197_TOKEN_HASH_RESULT_VERIFY;
 180		token[13].stat = EIP197_TOKEN_STAT_LAST_HASH |
 181				 EIP197_TOKEN_STAT_LAST_PACKET;
 182		token[13].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
 183	}
 184
 185	token[6].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
 186	token[6].packet_length = assoclen;
 187
 188	if (likely(cryptlen)) {
 189		token[6].instructions = EIP197_TOKEN_INS_TYPE_HASH;
 190
 191		token[10].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
 192		token[10].packet_length = cryptlen;
 193		token[10].stat = EIP197_TOKEN_STAT_LAST_HASH;
 194		token[10].instructions = EIP197_TOKEN_INS_LAST |
 195					 EIP197_TOKEN_INS_TYPE_CRYPTO |
 196					 EIP197_TOKEN_INS_TYPE_HASH |
 197					 EIP197_TOKEN_INS_TYPE_OUTPUT;
 198	} else if (ctx->xcm != EIP197_XCM_MODE_CCM) {
 199		token[6].stat = EIP197_TOKEN_STAT_LAST_HASH;
 200		token[6].instructions = EIP197_TOKEN_INS_LAST |
 201					EIP197_TOKEN_INS_TYPE_HASH;
 202	}
 203
 204	if (!ctx->xcm)
 205		return;
 206
 207	token[8].opcode = EIP197_TOKEN_OPCODE_INSERT_REMRES;
 208	token[8].packet_length = 0;
 209	token[8].instructions = AES_BLOCK_SIZE;
 210
 211	token[9].opcode = EIP197_TOKEN_OPCODE_INSERT;
 212	token[9].packet_length = AES_BLOCK_SIZE;
 213	token[9].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
 214				EIP197_TOKEN_INS_TYPE_CRYPTO;
 215
 216	if (ctx->xcm == EIP197_XCM_MODE_GCM) {
 217		token[6].instructions = EIP197_TOKEN_INS_LAST |
 218					EIP197_TOKEN_INS_TYPE_HASH;
 219	} else {
 220		u8 *cbcmaciv = (u8 *)&token[1];
 221		u32 *aadlen = (u32 *)&token[5];
 222
 223		/* Construct IV block B0 for the CBC-MAC */
 224		token[0].opcode = EIP197_TOKEN_OPCODE_INSERT;
 225		token[0].packet_length = AES_BLOCK_SIZE +
 226					 ((assoclen > 0) << 1);
 227		token[0].instructions = EIP197_TOKEN_INS_ORIGIN_TOKEN |
 228					EIP197_TOKEN_INS_TYPE_HASH;
 229		/* Variable length IV part */
 230		memcpy(cbcmaciv, iv, 15 - iv[0]);
 231		/* fixup flags byte */
 232		cbcmaciv[0] |= ((assoclen > 0) << 6) | ((digestsize - 2) << 2);
 233		/* Clear upper bytes of variable message length to 0 */
 234		memset(cbcmaciv + 15 - iv[0], 0, iv[0] - 1);
 235		/* insert lower 2 bytes of message length */
 236		cbcmaciv[14] = cryptlen >> 8;
 237		cbcmaciv[15] = cryptlen & 255;
 238
 239		if (assoclen) {
 240			*aadlen = cpu_to_le32(cpu_to_be16(assoclen));
 241			assoclen += 2;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 242		}
 243
 244		token[6].instructions = EIP197_TOKEN_INS_TYPE_HASH;
 245
 246		/* Align AAD data towards hash engine */
 247		token[7].opcode = EIP197_TOKEN_OPCODE_INSERT;
 248		assoclen &= 15;
 249		token[7].packet_length = assoclen ? 16 - assoclen : 0;
 250
 
 
 
 
 
 
 
 251		if (likely(cryptlen)) {
 252			token[7].instructions = EIP197_TOKEN_INS_TYPE_HASH;
 
 
 
 
 
 
 
 
 253
 254			/* Align crypto data towards hash engine */
 255			token[10].stat = 0;
 256
 257			token[11].opcode = EIP197_TOKEN_OPCODE_INSERT;
 258			cryptlen &= 15;
 259			token[11].packet_length = cryptlen ? 16 - cryptlen : 0;
 260			token[11].stat = EIP197_TOKEN_STAT_LAST_HASH;
 261			token[11].instructions = EIP197_TOKEN_INS_TYPE_HASH;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 262		} else {
 263			token[7].stat = EIP197_TOKEN_STAT_LAST_HASH;
 264			token[7].instructions = EIP197_TOKEN_INS_LAST |
 265						EIP197_TOKEN_INS_TYPE_HASH;
 266		}
 
 
 267	}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 268}
 269
 270static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
 271					const u8 *key, unsigned int len)
 272{
 273	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
 274	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
 275	struct safexcel_crypto_priv *priv = ctx->priv;
 276	struct crypto_aes_ctx aes;
 277	int ret, i;
 278
 279	ret = aes_expandkey(&aes, key, len);
 280	if (ret) {
 281		crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
 282		return ret;
 283	}
 284
 285	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
 286		for (i = 0; i < len / sizeof(u32); i++) {
 287			if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
 288				ctx->base.needs_inv = true;
 289				break;
 290			}
 291		}
 292	}
 293
 294	for (i = 0; i < len / sizeof(u32); i++)
 295		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
 296
 297	ctx->key_len = len;
 298
 299	memzero_explicit(&aes, sizeof(aes));
 300	return 0;
 301}
 302
 303static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
 304				unsigned int len)
 305{
 306	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
 307	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
 308	struct safexcel_ahash_export_state istate, ostate;
 309	struct safexcel_crypto_priv *priv = ctx->priv;
 310	struct crypto_authenc_keys keys;
 311	struct crypto_aes_ctx aes;
 312	int err = -EINVAL;
 
 313
 314	if (crypto_authenc_extractkeys(&keys, key, len) != 0)
 315		goto badkey;
 316
 317	if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
 318		/* Minimum keysize is minimum AES key size + nonce size */
 319		if (keys.enckeylen < (AES_MIN_KEY_SIZE +
 320				      CTR_RFC3686_NONCE_SIZE))
 321			goto badkey;
 322		/* last 4 bytes of key are the nonce! */
 323		ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen -
 324				      CTR_RFC3686_NONCE_SIZE);
 325		/* exclude the nonce here */
 326		keys.enckeylen -= CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
 327	}
 328
 329	/* Encryption key */
 330	switch (ctx->alg) {
 
 
 
 
 
 331	case SAFEXCEL_3DES:
 332		err = verify_aead_des3_key(ctfm, keys.enckey, keys.enckeylen);
 333		if (unlikely(err))
 334			goto badkey_expflags;
 335		break;
 336	case SAFEXCEL_AES:
 337		err = aes_expandkey(&aes, keys.enckey, keys.enckeylen);
 338		if (unlikely(err))
 339			goto badkey;
 340		break;
 
 
 
 
 341	default:
 342		dev_err(priv->dev, "aead: unsupported cipher algorithm\n");
 343		goto badkey;
 344	}
 345
 346	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
 347	    memcmp(ctx->key, keys.enckey, keys.enckeylen))
 348		ctx->base.needs_inv = true;
 
 
 
 
 
 
 349
 350	/* Auth key */
 351	switch (ctx->hash_alg) {
 352	case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
 353		if (safexcel_hmac_setkey("safexcel-sha1", keys.authkey,
 354					 keys.authkeylen, &istate, &ostate))
 355			goto badkey;
 356		break;
 357	case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
 358		if (safexcel_hmac_setkey("safexcel-sha224", keys.authkey,
 359					 keys.authkeylen, &istate, &ostate))
 360			goto badkey;
 361		break;
 362	case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
 363		if (safexcel_hmac_setkey("safexcel-sha256", keys.authkey,
 364					 keys.authkeylen, &istate, &ostate))
 365			goto badkey;
 366		break;
 367	case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
 368		if (safexcel_hmac_setkey("safexcel-sha384", keys.authkey,
 369					 keys.authkeylen, &istate, &ostate))
 370			goto badkey;
 371		break;
 372	case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
 373		if (safexcel_hmac_setkey("safexcel-sha512", keys.authkey,
 374					 keys.authkeylen, &istate, &ostate))
 375			goto badkey;
 
 376		break;
 377	default:
 378		dev_err(priv->dev, "aead: unsupported hash algorithm\n");
 379		goto badkey;
 380	}
 381
 382	crypto_aead_set_flags(ctfm, crypto_aead_get_flags(ctfm) &
 383				    CRYPTO_TFM_RES_MASK);
 384
 385	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
 386	    (memcmp(ctx->ipad, istate.state, ctx->state_sz) ||
 387	     memcmp(ctx->opad, ostate.state, ctx->state_sz)))
 388		ctx->base.needs_inv = true;
 389
 390	/* Now copy the keys into the context */
 391	memcpy(ctx->key, keys.enckey, keys.enckeylen);
 
 392	ctx->key_len = keys.enckeylen;
 393
 394	memcpy(ctx->ipad, &istate.state, ctx->state_sz);
 395	memcpy(ctx->opad, &ostate.state, ctx->state_sz);
 396
 397	memzero_explicit(&keys, sizeof(keys));
 398	return 0;
 399
 400badkey:
 401	crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
 402badkey_expflags:
 403	memzero_explicit(&keys, sizeof(keys));
 404	return err;
 405}
 406
 407static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
 408				    struct crypto_async_request *async,
 409				    struct safexcel_cipher_req *sreq,
 410				    struct safexcel_command_desc *cdesc)
 411{
 412	struct safexcel_crypto_priv *priv = ctx->priv;
 413	int ctrl_size = ctx->key_len / sizeof(u32);
 414
 415	cdesc->control_data.control1 = ctx->mode;
 416
 417	if (ctx->aead) {
 418		/* Take in account the ipad+opad digests */
 419		if (ctx->xcm) {
 420			ctrl_size += ctx->state_sz / sizeof(u32);
 421			cdesc->control_data.control0 =
 422				CONTEXT_CONTROL_KEY_EN |
 423				CONTEXT_CONTROL_DIGEST_XCM |
 424				ctx->hash_alg |
 425				CONTEXT_CONTROL_SIZE(ctrl_size);
 
 
 
 
 
 
 
 
 
 
 
 426		} else {
 427			ctrl_size += ctx->state_sz / sizeof(u32) * 2;
 428			cdesc->control_data.control0 =
 429				CONTEXT_CONTROL_KEY_EN |
 430				CONTEXT_CONTROL_DIGEST_HMAC |
 431				ctx->hash_alg |
 432				CONTEXT_CONTROL_SIZE(ctrl_size);
 433		}
 434		if (sreq->direction == SAFEXCEL_ENCRYPT)
 
 
 
 
 
 
 
 
 
 435			cdesc->control_data.control0 |=
 436				(ctx->xcm == EIP197_XCM_MODE_CCM) ?
 437					CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT :
 438					CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
 439
 440		else
 441			cdesc->control_data.control0 |=
 442				(ctx->xcm == EIP197_XCM_MODE_CCM) ?
 443					CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN :
 444					CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
 445	} else {
 446		if (sreq->direction == SAFEXCEL_ENCRYPT)
 447			cdesc->control_data.control0 =
 448				CONTEXT_CONTROL_TYPE_CRYPTO_OUT |
 449				CONTEXT_CONTROL_KEY_EN |
 450				CONTEXT_CONTROL_SIZE(ctrl_size);
 451		else
 452			cdesc->control_data.control0 =
 453				CONTEXT_CONTROL_TYPE_CRYPTO_IN |
 454				CONTEXT_CONTROL_KEY_EN |
 455				CONTEXT_CONTROL_SIZE(ctrl_size);
 456	}
 457
 458	if (ctx->alg == SAFEXCEL_DES) {
 459		cdesc->control_data.control0 |=
 460			CONTEXT_CONTROL_CRYPTO_ALG_DES;
 461	} else if (ctx->alg == SAFEXCEL_3DES) {
 462		cdesc->control_data.control0 |=
 463			CONTEXT_CONTROL_CRYPTO_ALG_3DES;
 464	} else if (ctx->alg == SAFEXCEL_AES) {
 465		switch (ctx->key_len >> ctx->xts) {
 466		case AES_KEYSIZE_128:
 467			cdesc->control_data.control0 |=
 468				CONTEXT_CONTROL_CRYPTO_ALG_AES128;
 469			break;
 470		case AES_KEYSIZE_192:
 471			cdesc->control_data.control0 |=
 472				CONTEXT_CONTROL_CRYPTO_ALG_AES192;
 473			break;
 474		case AES_KEYSIZE_256:
 475			cdesc->control_data.control0 |=
 476				CONTEXT_CONTROL_CRYPTO_ALG_AES256;
 477			break;
 478		default:
 479			dev_err(priv->dev, "aes keysize not supported: %u\n",
 480				ctx->key_len >> ctx->xts);
 481			return -EINVAL;
 482		}
 
 
 
 
 
 
 483	}
 484
 485	return 0;
 486}
 487
 488static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
 489				      struct crypto_async_request *async,
 490				      struct scatterlist *src,
 491				      struct scatterlist *dst,
 492				      unsigned int cryptlen,
 493				      struct safexcel_cipher_req *sreq,
 494				      bool *should_complete, int *ret)
 495{
 496	struct skcipher_request *areq = skcipher_request_cast(async);
 497	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
 498	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(skcipher);
 499	struct safexcel_result_desc *rdesc;
 500	int ndesc = 0;
 501
 502	*ret = 0;
 503
 504	if (unlikely(!sreq->rdescs))
 505		return 0;
 506
 507	while (sreq->rdescs--) {
 508		rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
 509		if (IS_ERR(rdesc)) {
 510			dev_err(priv->dev,
 511				"cipher: result: could not retrieve the result descriptor\n");
 512			*ret = PTR_ERR(rdesc);
 513			break;
 514		}
 515
 516		if (likely(!*ret))
 517			*ret = safexcel_rdesc_check_errors(priv, rdesc);
 518
 519		ndesc++;
 520	}
 521
 522	safexcel_complete(priv, ring);
 523
 524	if (src == dst) {
 525		dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
 
 
 526	} else {
 527		dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
 528		dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
 
 
 
 
 529	}
 530
 531	/*
 532	 * Update IV in req from last crypto output word for CBC modes
 533	 */
 534	if ((!ctx->aead) && (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
 535	    (sreq->direction == SAFEXCEL_ENCRYPT)) {
 536		/* For encrypt take the last output word */
 537		sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv,
 538				   crypto_skcipher_ivsize(skcipher),
 539				   (cryptlen -
 540				    crypto_skcipher_ivsize(skcipher)));
 541	}
 542
 543	*should_complete = true;
 544
 545	return ndesc;
 546}
 547
 548static int safexcel_send_req(struct crypto_async_request *base, int ring,
 549			     struct safexcel_cipher_req *sreq,
 550			     struct scatterlist *src, struct scatterlist *dst,
 551			     unsigned int cryptlen, unsigned int assoclen,
 552			     unsigned int digestsize, u8 *iv, int *commands,
 553			     int *results)
 554{
 555	struct skcipher_request *areq = skcipher_request_cast(base);
 556	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
 557	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
 558	struct safexcel_crypto_priv *priv = ctx->priv;
 559	struct safexcel_command_desc *cdesc;
 560	struct safexcel_command_desc *first_cdesc = NULL;
 561	struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
 562	struct scatterlist *sg;
 563	unsigned int totlen;
 564	unsigned int totlen_src = cryptlen + assoclen;
 565	unsigned int totlen_dst = totlen_src;
 
 566	int n_cdesc = 0, n_rdesc = 0;
 567	int queued, i, ret = 0;
 568	bool first = true;
 569
 570	sreq->nr_src = sg_nents_for_len(src, totlen_src);
 571
 572	if (ctx->aead) {
 573		/*
 574		 * AEAD has auth tag appended to output for encrypt and
 575		 * removed from the output for decrypt!
 576		 */
 577		if (sreq->direction == SAFEXCEL_DECRYPT)
 578			totlen_dst -= digestsize;
 579		else
 580			totlen_dst += digestsize;
 581
 582		memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
 583		       ctx->ipad, ctx->state_sz);
 584		if (!ctx->xcm)
 585			memcpy(ctx->base.ctxr->data + (ctx->key_len +
 586			       ctx->state_sz) / sizeof(u32), ctx->opad,
 587			       ctx->state_sz);
 588	} else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
 589		   (sreq->direction == SAFEXCEL_DECRYPT)) {
 590		/*
 591		 * Save IV from last crypto input word for CBC modes in decrypt
 592		 * direction. Need to do this first in case of inplace operation
 593		 * as it will be overwritten.
 594		 */
 595		sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv,
 596				   crypto_skcipher_ivsize(skcipher),
 597				   (totlen_src -
 598				    crypto_skcipher_ivsize(skcipher)));
 599	}
 600
 601	sreq->nr_dst = sg_nents_for_len(dst, totlen_dst);
 602
 603	/*
 604	 * Remember actual input length, source buffer length may be
 605	 * updated in case of inline operation below.
 606	 */
 607	totlen = totlen_src;
 608	queued = totlen_src;
 609
 610	if (src == dst) {
 611		sreq->nr_src = max(sreq->nr_src, sreq->nr_dst);
 612		sreq->nr_dst = sreq->nr_src;
 613		if (unlikely((totlen_src || totlen_dst) &&
 614		    (sreq->nr_src <= 0))) {
 615			dev_err(priv->dev, "In-place buffer not large enough (need %d bytes)!",
 616				max(totlen_src, totlen_dst));
 617			return -EINVAL;
 618		}
 619		dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
 
 
 620	} else {
 621		if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
 622			dev_err(priv->dev, "Source buffer not large enough (need %d bytes)!",
 623				totlen_src);
 624			return -EINVAL;
 625		}
 626		dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
 
 
 
 627
 628		if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
 629			dev_err(priv->dev, "Dest buffer not large enough (need %d bytes)!",
 630				totlen_dst);
 631			dma_unmap_sg(priv->dev, src, sreq->nr_src,
 632				     DMA_TO_DEVICE);
 633			return -EINVAL;
 
 
 
 
 
 634		}
 635		dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
 636	}
 637
 638	memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
 639
 640	/* The EIP cannot deal with zero length input packets! */
 641	if (totlen == 0)
 642		totlen = 1;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 643
 644	/* command descriptors */
 645	for_each_sg(src, sg, sreq->nr_src, i) {
 646		int len = sg_dma_len(sg);
 647
 648		/* Do not overflow the request */
 649		if (queued - len < 0)
 650			len = queued;
 651
 652		cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
 653					   !(queued - len),
 654					   sg_dma_address(sg), len, totlen,
 655					   ctx->base.ctxr_dma);
 656		if (IS_ERR(cdesc)) {
 657			/* No space left in the command descriptor ring */
 658			ret = PTR_ERR(cdesc);
 659			goto cdesc_rollback;
 660		}
 661		n_cdesc++;
 662
 663		if (n_cdesc == 1) {
 664			first_cdesc = cdesc;
 665		}
 666
 
 667		queued -= len;
 668		if (!queued)
 669			break;
 670	}
 671
 672	if (unlikely(!n_cdesc)) {
 673		/*
 674		 * Special case: zero length input buffer.
 675		 * The engine always needs the 1st command descriptor, however!
 676		 */
 677		first_cdesc = safexcel_add_cdesc(priv, ring, 1, 1, 0, 0, totlen,
 678						 ctx->base.ctxr_dma);
 679		n_cdesc = 1;
 680	}
 681
 682	/* Add context control words and token to first command descriptor */
 683	safexcel_context_control(ctx, base, sreq, first_cdesc);
 684	if (ctx->aead)
 685		safexcel_aead_token(ctx, iv, first_cdesc,
 686				    sreq->direction, cryptlen,
 687				    assoclen, digestsize);
 688	else
 689		safexcel_skcipher_token(ctx, iv, first_cdesc,
 690					cryptlen);
 691
 692	/* result descriptors */
 693	for_each_sg(dst, sg, sreq->nr_dst, i) {
 694		bool last = (i == sreq->nr_dst - 1);
 695		u32 len = sg_dma_len(sg);
 696
 697		/* only allow the part of the buffer we know we need */
 698		if (len > totlen_dst)
 699			len = totlen_dst;
 700		if (unlikely(!len))
 701			break;
 702		totlen_dst -= len;
 703
 704		/* skip over AAD space in buffer - not written */
 705		if (assoclen) {
 706			if (assoclen >= len) {
 707				assoclen -= len;
 708				continue;
 709			}
 710			rdesc = safexcel_add_rdesc(priv, ring, first, last,
 711						   sg_dma_address(sg) +
 712						   assoclen,
 713						   len - assoclen);
 714			assoclen = 0;
 715		} else {
 716			rdesc = safexcel_add_rdesc(priv, ring, first, last,
 717						   sg_dma_address(sg),
 718						   len);
 719		}
 720		if (IS_ERR(rdesc)) {
 721			/* No space left in the result descriptor ring */
 722			ret = PTR_ERR(rdesc);
 723			goto rdesc_rollback;
 724		}
 725		if (first) {
 726			first_rdesc = rdesc;
 727			first = false;
 728		}
 729		n_rdesc++;
 730	}
 731
 732	if (unlikely(first)) {
 733		/*
 734		 * Special case: AEAD decrypt with only AAD data.
 735		 * In this case there is NO output data from the engine,
 736		 * but the engine still needs a result descriptor!
 737		 * Create a dummy one just for catching the result token.
 738		 */
 739		rdesc = safexcel_add_rdesc(priv, ring, true, true, 0, 0);
 740		if (IS_ERR(rdesc)) {
 741			/* No space left in the result descriptor ring */
 742			ret = PTR_ERR(rdesc);
 743			goto rdesc_rollback;
 744		}
 745		first_rdesc = rdesc;
 746		n_rdesc = 1;
 747	}
 748
 749	safexcel_rdr_req_set(priv, ring, first_rdesc, base);
 750
 751	*commands = n_cdesc;
 752	*results = n_rdesc;
 753	return 0;
 754
 755rdesc_rollback:
 756	for (i = 0; i < n_rdesc; i++)
 757		safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
 758cdesc_rollback:
 759	for (i = 0; i < n_cdesc; i++)
 760		safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
 761
 762	if (src == dst) {
 763		dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
 
 
 764	} else {
 765		dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
 766		dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
 
 
 
 
 767	}
 768
 769	return ret;
 770}
 771
 772static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
 773				      int ring,
 774				      struct crypto_async_request *base,
 775				      struct safexcel_cipher_req *sreq,
 776				      bool *should_complete, int *ret)
 777{
 778	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
 779	struct safexcel_result_desc *rdesc;
 780	int ndesc = 0, enq_ret;
 781
 782	*ret = 0;
 783
 784	if (unlikely(!sreq->rdescs))
 785		return 0;
 786
 787	while (sreq->rdescs--) {
 788		rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
 789		if (IS_ERR(rdesc)) {
 790			dev_err(priv->dev,
 791				"cipher: invalidate: could not retrieve the result descriptor\n");
 792			*ret = PTR_ERR(rdesc);
 793			break;
 794		}
 795
 796		if (likely(!*ret))
 797			*ret = safexcel_rdesc_check_errors(priv, rdesc);
 798
 799		ndesc++;
 800	}
 801
 802	safexcel_complete(priv, ring);
 803
 804	if (ctx->base.exit_inv) {
 805		dma_pool_free(priv->context_pool, ctx->base.ctxr,
 806			      ctx->base.ctxr_dma);
 807
 808		*should_complete = true;
 809
 810		return ndesc;
 811	}
 812
 813	ring = safexcel_select_ring(priv);
 814	ctx->base.ring = ring;
 815
 816	spin_lock_bh(&priv->ring[ring].queue_lock);
 817	enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
 818	spin_unlock_bh(&priv->ring[ring].queue_lock);
 819
 820	if (enq_ret != -EINPROGRESS)
 821		*ret = enq_ret;
 822
 823	queue_work(priv->ring[ring].workqueue,
 824		   &priv->ring[ring].work_data.work);
 825
 826	*should_complete = false;
 827
 828	return ndesc;
 829}
 830
 831static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
 832					   int ring,
 833					   struct crypto_async_request *async,
 834					   bool *should_complete, int *ret)
 835{
 836	struct skcipher_request *req = skcipher_request_cast(async);
 837	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
 838	int err;
 839
 840	if (sreq->needs_inv) {
 841		sreq->needs_inv = false;
 842		err = safexcel_handle_inv_result(priv, ring, async, sreq,
 843						 should_complete, ret);
 844	} else {
 845		err = safexcel_handle_req_result(priv, ring, async, req->src,
 846						 req->dst, req->cryptlen, sreq,
 847						 should_complete, ret);
 848	}
 849
 850	return err;
 851}
 852
 853static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
 854				       int ring,
 855				       struct crypto_async_request *async,
 856				       bool *should_complete, int *ret)
 857{
 858	struct aead_request *req = aead_request_cast(async);
 859	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
 860	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
 861	int err;
 862
 863	if (sreq->needs_inv) {
 864		sreq->needs_inv = false;
 865		err = safexcel_handle_inv_result(priv, ring, async, sreq,
 866						 should_complete, ret);
 867	} else {
 868		err = safexcel_handle_req_result(priv, ring, async, req->src,
 869						 req->dst,
 870						 req->cryptlen + crypto_aead_authsize(tfm),
 871						 sreq, should_complete, ret);
 872	}
 873
 874	return err;
 875}
 876
 877static int safexcel_cipher_send_inv(struct crypto_async_request *base,
 878				    int ring, int *commands, int *results)
 879{
 880	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
 881	struct safexcel_crypto_priv *priv = ctx->priv;
 882	int ret;
 883
 884	ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
 885	if (unlikely(ret))
 886		return ret;
 887
 888	*commands = 1;
 889	*results = 1;
 890
 891	return 0;
 892}
 893
 894static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
 895				  int *commands, int *results)
 896{
 897	struct skcipher_request *req = skcipher_request_cast(async);
 898	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
 899	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
 900	struct safexcel_crypto_priv *priv = ctx->priv;
 901	int ret;
 902
 903	BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
 904
 905	if (sreq->needs_inv) {
 906		ret = safexcel_cipher_send_inv(async, ring, commands, results);
 907	} else {
 908		struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
 909		u8 input_iv[AES_BLOCK_SIZE];
 910
 911		/*
 912		 * Save input IV in case of CBC decrypt mode
 913		 * Will be overwritten with output IV prior to use!
 914		 */
 915		memcpy(input_iv, req->iv, crypto_skcipher_ivsize(skcipher));
 916
 917		ret = safexcel_send_req(async, ring, sreq, req->src,
 918					req->dst, req->cryptlen, 0, 0, input_iv,
 919					commands, results);
 920	}
 921
 922	sreq->rdescs = *results;
 923	return ret;
 924}
 925
 926static int safexcel_aead_send(struct crypto_async_request *async, int ring,
 927			      int *commands, int *results)
 928{
 929	struct aead_request *req = aead_request_cast(async);
 930	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
 931	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
 932	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
 933	struct safexcel_crypto_priv *priv = ctx->priv;
 934	int ret;
 935
 936	BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
 937
 938	if (sreq->needs_inv)
 939		ret = safexcel_cipher_send_inv(async, ring, commands, results);
 940	else
 941		ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
 942					req->cryptlen, req->assoclen,
 943					crypto_aead_authsize(tfm), req->iv,
 944					commands, results);
 945	sreq->rdescs = *results;
 946	return ret;
 947}
 948
 949static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
 950				    struct crypto_async_request *base,
 951				    struct safexcel_cipher_req *sreq,
 952				    struct safexcel_inv_result *result)
 953{
 954	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
 955	struct safexcel_crypto_priv *priv = ctx->priv;
 956	int ring = ctx->base.ring;
 957
 958	init_completion(&result->completion);
 959
 960	ctx = crypto_tfm_ctx(base->tfm);
 961	ctx->base.exit_inv = true;
 962	sreq->needs_inv = true;
 963
 964	spin_lock_bh(&priv->ring[ring].queue_lock);
 965	crypto_enqueue_request(&priv->ring[ring].queue, base);
 966	spin_unlock_bh(&priv->ring[ring].queue_lock);
 967
 968	queue_work(priv->ring[ring].workqueue,
 969		   &priv->ring[ring].work_data.work);
 970
 971	wait_for_completion(&result->completion);
 972
 973	if (result->error) {
 974		dev_warn(priv->dev,
 975			"cipher: sync: invalidate: completion error %d\n",
 976			 result->error);
 977		return result->error;
 978	}
 979
 980	return 0;
 981}
 982
 983static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
 984{
 985	EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
 986	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
 987	struct safexcel_inv_result result = {};
 988
 989	memset(req, 0, sizeof(struct skcipher_request));
 990
 991	skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
 992				      safexcel_inv_complete, &result);
 993	skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
 994
 995	return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
 996}
 997
 998static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
 999{
1000	EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
1001	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1002	struct safexcel_inv_result result = {};
1003
1004	memset(req, 0, sizeof(struct aead_request));
1005
1006	aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1007				  safexcel_inv_complete, &result);
1008	aead_request_set_tfm(req, __crypto_aead_cast(tfm));
1009
1010	return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1011}
1012
1013static int safexcel_queue_req(struct crypto_async_request *base,
1014			struct safexcel_cipher_req *sreq,
1015			enum safexcel_cipher_direction dir)
1016{
1017	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1018	struct safexcel_crypto_priv *priv = ctx->priv;
1019	int ret, ring;
1020
1021	sreq->needs_inv = false;
1022	sreq->direction = dir;
1023
1024	if (ctx->base.ctxr) {
1025		if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
1026			sreq->needs_inv = true;
1027			ctx->base.needs_inv = false;
1028		}
1029	} else {
1030		ctx->base.ring = safexcel_select_ring(priv);
1031		ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
1032						 EIP197_GFP_FLAGS(*base),
1033						 &ctx->base.ctxr_dma);
1034		if (!ctx->base.ctxr)
1035			return -ENOMEM;
1036	}
1037
1038	ring = ctx->base.ring;
1039
1040	spin_lock_bh(&priv->ring[ring].queue_lock);
1041	ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
1042	spin_unlock_bh(&priv->ring[ring].queue_lock);
1043
1044	queue_work(priv->ring[ring].workqueue,
1045		   &priv->ring[ring].work_data.work);
1046
1047	return ret;
1048}
1049
1050static int safexcel_encrypt(struct skcipher_request *req)
1051{
1052	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1053			SAFEXCEL_ENCRYPT);
1054}
1055
1056static int safexcel_decrypt(struct skcipher_request *req)
1057{
1058	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1059			SAFEXCEL_DECRYPT);
1060}
1061
1062static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
1063{
1064	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1065	struct safexcel_alg_template *tmpl =
1066		container_of(tfm->__crt_alg, struct safexcel_alg_template,
1067			     alg.skcipher.base);
1068
1069	crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
1070				    sizeof(struct safexcel_cipher_req));
1071
1072	ctx->priv = tmpl->priv;
1073
1074	ctx->base.send = safexcel_skcipher_send;
1075	ctx->base.handle_result = safexcel_skcipher_handle_result;
 
 
1076	return 0;
1077}
1078
1079static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
1080{
1081	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1082
1083	memzero_explicit(ctx->key, sizeof(ctx->key));
1084
1085	/* context not allocated, skip invalidation */
1086	if (!ctx->base.ctxr)
1087		return -ENOMEM;
1088
1089	memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
1090	return 0;
1091}
1092
1093static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
1094{
1095	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1096	struct safexcel_crypto_priv *priv = ctx->priv;
1097	int ret;
1098
1099	if (safexcel_cipher_cra_exit(tfm))
1100		return;
1101
1102	if (priv->flags & EIP197_TRC_CACHE) {
1103		ret = safexcel_skcipher_exit_inv(tfm);
1104		if (ret)
1105			dev_warn(priv->dev, "skcipher: invalidation error %d\n",
1106				 ret);
1107	} else {
1108		dma_pool_free(priv->context_pool, ctx->base.ctxr,
1109			      ctx->base.ctxr_dma);
1110	}
1111}
1112
1113static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
1114{
1115	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1116	struct safexcel_crypto_priv *priv = ctx->priv;
1117	int ret;
1118
1119	if (safexcel_cipher_cra_exit(tfm))
1120		return;
1121
1122	if (priv->flags & EIP197_TRC_CACHE) {
1123		ret = safexcel_aead_exit_inv(tfm);
1124		if (ret)
1125			dev_warn(priv->dev, "aead: invalidation error %d\n",
1126				 ret);
1127	} else {
1128		dma_pool_free(priv->context_pool, ctx->base.ctxr,
1129			      ctx->base.ctxr_dma);
1130	}
1131}
1132
1133static int safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm *tfm)
1134{
1135	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1136
1137	safexcel_skcipher_cra_init(tfm);
1138	ctx->alg  = SAFEXCEL_AES;
1139	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
 
 
1140	return 0;
1141}
1142
1143struct safexcel_alg_template safexcel_alg_ecb_aes = {
1144	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1145	.algo_mask = SAFEXCEL_ALG_AES,
1146	.alg.skcipher = {
1147		.setkey = safexcel_skcipher_aes_setkey,
1148		.encrypt = safexcel_encrypt,
1149		.decrypt = safexcel_decrypt,
1150		.min_keysize = AES_MIN_KEY_SIZE,
1151		.max_keysize = AES_MAX_KEY_SIZE,
1152		.base = {
1153			.cra_name = "ecb(aes)",
1154			.cra_driver_name = "safexcel-ecb-aes",
1155			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1156			.cra_flags = CRYPTO_ALG_ASYNC |
 
1157				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1158			.cra_blocksize = AES_BLOCK_SIZE,
1159			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1160			.cra_alignmask = 0,
1161			.cra_init = safexcel_skcipher_aes_ecb_cra_init,
1162			.cra_exit = safexcel_skcipher_cra_exit,
1163			.cra_module = THIS_MODULE,
1164		},
1165	},
1166};
1167
1168static int safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm *tfm)
1169{
1170	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1171
1172	safexcel_skcipher_cra_init(tfm);
1173	ctx->alg  = SAFEXCEL_AES;
 
1174	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1175	return 0;
1176}
1177
1178struct safexcel_alg_template safexcel_alg_cbc_aes = {
1179	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1180	.algo_mask = SAFEXCEL_ALG_AES,
1181	.alg.skcipher = {
1182		.setkey = safexcel_skcipher_aes_setkey,
1183		.encrypt = safexcel_encrypt,
1184		.decrypt = safexcel_decrypt,
1185		.min_keysize = AES_MIN_KEY_SIZE,
1186		.max_keysize = AES_MAX_KEY_SIZE,
1187		.ivsize = AES_BLOCK_SIZE,
1188		.base = {
1189			.cra_name = "cbc(aes)",
1190			.cra_driver_name = "safexcel-cbc-aes",
1191			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1192			.cra_flags = CRYPTO_ALG_ASYNC |
 
1193				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1194			.cra_blocksize = AES_BLOCK_SIZE,
1195			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1196			.cra_alignmask = 0,
1197			.cra_init = safexcel_skcipher_aes_cbc_cra_init,
1198			.cra_exit = safexcel_skcipher_cra_exit,
1199			.cra_module = THIS_MODULE,
1200		},
1201	},
1202};
1203
1204static int safexcel_skcipher_aes_cfb_cra_init(struct crypto_tfm *tfm)
1205{
1206	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1207
1208	safexcel_skcipher_cra_init(tfm);
1209	ctx->alg  = SAFEXCEL_AES;
1210	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
1211	return 0;
1212}
1213
1214struct safexcel_alg_template safexcel_alg_cfb_aes = {
1215	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1216	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1217	.alg.skcipher = {
1218		.setkey = safexcel_skcipher_aes_setkey,
1219		.encrypt = safexcel_encrypt,
1220		.decrypt = safexcel_decrypt,
1221		.min_keysize = AES_MIN_KEY_SIZE,
1222		.max_keysize = AES_MAX_KEY_SIZE,
1223		.ivsize = AES_BLOCK_SIZE,
1224		.base = {
1225			.cra_name = "cfb(aes)",
1226			.cra_driver_name = "safexcel-cfb-aes",
1227			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1228			.cra_flags = CRYPTO_ALG_ASYNC |
1229				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1230			.cra_blocksize = 1,
1231			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1232			.cra_alignmask = 0,
1233			.cra_init = safexcel_skcipher_aes_cfb_cra_init,
1234			.cra_exit = safexcel_skcipher_cra_exit,
1235			.cra_module = THIS_MODULE,
1236		},
1237	},
1238};
1239
1240static int safexcel_skcipher_aes_ofb_cra_init(struct crypto_tfm *tfm)
1241{
1242	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1243
1244	safexcel_skcipher_cra_init(tfm);
1245	ctx->alg  = SAFEXCEL_AES;
1246	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
1247	return 0;
1248}
1249
1250struct safexcel_alg_template safexcel_alg_ofb_aes = {
1251	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1252	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1253	.alg.skcipher = {
1254		.setkey = safexcel_skcipher_aes_setkey,
1255		.encrypt = safexcel_encrypt,
1256		.decrypt = safexcel_decrypt,
1257		.min_keysize = AES_MIN_KEY_SIZE,
1258		.max_keysize = AES_MAX_KEY_SIZE,
1259		.ivsize = AES_BLOCK_SIZE,
1260		.base = {
1261			.cra_name = "ofb(aes)",
1262			.cra_driver_name = "safexcel-ofb-aes",
1263			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1264			.cra_flags = CRYPTO_ALG_ASYNC |
1265				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1266			.cra_blocksize = 1,
1267			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1268			.cra_alignmask = 0,
1269			.cra_init = safexcel_skcipher_aes_ofb_cra_init,
1270			.cra_exit = safexcel_skcipher_cra_exit,
1271			.cra_module = THIS_MODULE,
1272		},
1273	},
1274};
1275
1276static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
1277					   const u8 *key, unsigned int len)
1278{
1279	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
1280	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1281	struct safexcel_crypto_priv *priv = ctx->priv;
1282	struct crypto_aes_ctx aes;
1283	int ret, i;
1284	unsigned int keylen;
1285
1286	/* last 4 bytes of key are the nonce! */
1287	ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
1288	/* exclude the nonce here */
1289	keylen = len - CTR_RFC3686_NONCE_SIZE;
1290	ret = aes_expandkey(&aes, key, keylen);
1291	if (ret) {
1292		crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
1293		return ret;
1294	}
1295
1296	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1297		for (i = 0; i < keylen / sizeof(u32); i++) {
1298			if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
1299				ctx->base.needs_inv = true;
1300				break;
1301			}
1302		}
1303	}
1304
1305	for (i = 0; i < keylen / sizeof(u32); i++)
1306		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
1307
1308	ctx->key_len = keylen;
1309
1310	memzero_explicit(&aes, sizeof(aes));
1311	return 0;
1312}
1313
1314static int safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm *tfm)
1315{
1316	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1317
1318	safexcel_skcipher_cra_init(tfm);
1319	ctx->alg  = SAFEXCEL_AES;
 
1320	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
1321	return 0;
1322}
1323
1324struct safexcel_alg_template safexcel_alg_ctr_aes = {
1325	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1326	.algo_mask = SAFEXCEL_ALG_AES,
1327	.alg.skcipher = {
1328		.setkey = safexcel_skcipher_aesctr_setkey,
1329		.encrypt = safexcel_encrypt,
1330		.decrypt = safexcel_decrypt,
1331		/* Add nonce size */
1332		.min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1333		.max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1334		.ivsize = CTR_RFC3686_IV_SIZE,
1335		.base = {
1336			.cra_name = "rfc3686(ctr(aes))",
1337			.cra_driver_name = "safexcel-ctr-aes",
1338			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1339			.cra_flags = CRYPTO_ALG_ASYNC |
 
1340				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1341			.cra_blocksize = 1,
1342			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1343			.cra_alignmask = 0,
1344			.cra_init = safexcel_skcipher_aes_ctr_cra_init,
1345			.cra_exit = safexcel_skcipher_cra_exit,
1346			.cra_module = THIS_MODULE,
1347		},
1348	},
1349};
1350
1351static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
1352			       unsigned int len)
1353{
1354	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
 
1355	int ret;
1356
1357	ret = verify_skcipher_des_key(ctfm, key);
1358	if (ret)
1359		return ret;
1360
1361	/* if context exits and key changed, need to invalidate it */
1362	if (ctx->base.ctxr_dma)
1363		if (memcmp(ctx->key, key, len))
1364			ctx->base.needs_inv = true;
1365
1366	memcpy(ctx->key, key, len);
1367	ctx->key_len = len;
1368
1369	return 0;
1370}
1371
1372static int safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm *tfm)
1373{
1374	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1375
1376	safexcel_skcipher_cra_init(tfm);
1377	ctx->alg  = SAFEXCEL_DES;
 
 
1378	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1379	return 0;
1380}
1381
1382struct safexcel_alg_template safexcel_alg_cbc_des = {
1383	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1384	.algo_mask = SAFEXCEL_ALG_DES,
1385	.alg.skcipher = {
1386		.setkey = safexcel_des_setkey,
1387		.encrypt = safexcel_encrypt,
1388		.decrypt = safexcel_decrypt,
1389		.min_keysize = DES_KEY_SIZE,
1390		.max_keysize = DES_KEY_SIZE,
1391		.ivsize = DES_BLOCK_SIZE,
1392		.base = {
1393			.cra_name = "cbc(des)",
1394			.cra_driver_name = "safexcel-cbc-des",
1395			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1396			.cra_flags = CRYPTO_ALG_ASYNC |
 
1397				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1398			.cra_blocksize = DES_BLOCK_SIZE,
1399			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1400			.cra_alignmask = 0,
1401			.cra_init = safexcel_skcipher_des_cbc_cra_init,
1402			.cra_exit = safexcel_skcipher_cra_exit,
1403			.cra_module = THIS_MODULE,
1404		},
1405	},
1406};
1407
1408static int safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm *tfm)
1409{
1410	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1411
1412	safexcel_skcipher_cra_init(tfm);
1413	ctx->alg  = SAFEXCEL_DES;
1414	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
 
 
1415	return 0;
1416}
1417
1418struct safexcel_alg_template safexcel_alg_ecb_des = {
1419	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1420	.algo_mask = SAFEXCEL_ALG_DES,
1421	.alg.skcipher = {
1422		.setkey = safexcel_des_setkey,
1423		.encrypt = safexcel_encrypt,
1424		.decrypt = safexcel_decrypt,
1425		.min_keysize = DES_KEY_SIZE,
1426		.max_keysize = DES_KEY_SIZE,
1427		.base = {
1428			.cra_name = "ecb(des)",
1429			.cra_driver_name = "safexcel-ecb-des",
1430			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1431			.cra_flags = CRYPTO_ALG_ASYNC |
 
1432				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1433			.cra_blocksize = DES_BLOCK_SIZE,
1434			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1435			.cra_alignmask = 0,
1436			.cra_init = safexcel_skcipher_des_ecb_cra_init,
1437			.cra_exit = safexcel_skcipher_cra_exit,
1438			.cra_module = THIS_MODULE,
1439		},
1440	},
1441};
1442
1443static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
1444				   const u8 *key, unsigned int len)
1445{
1446	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
 
1447	int err;
1448
1449	err = verify_skcipher_des3_key(ctfm, key);
1450	if (err)
1451		return err;
1452
1453	/* if context exits and key changed, need to invalidate it */
1454	if (ctx->base.ctxr_dma) {
1455		if (memcmp(ctx->key, key, len))
1456			ctx->base.needs_inv = true;
1457	}
1458
1459	memcpy(ctx->key, key, len);
1460
1461	ctx->key_len = len;
1462
1463	return 0;
1464}
1465
1466static int safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm *tfm)
1467{
1468	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1469
1470	safexcel_skcipher_cra_init(tfm);
1471	ctx->alg  = SAFEXCEL_3DES;
 
 
1472	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1473	return 0;
1474}
1475
1476struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
1477	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1478	.algo_mask = SAFEXCEL_ALG_DES,
1479	.alg.skcipher = {
1480		.setkey = safexcel_des3_ede_setkey,
1481		.encrypt = safexcel_encrypt,
1482		.decrypt = safexcel_decrypt,
1483		.min_keysize = DES3_EDE_KEY_SIZE,
1484		.max_keysize = DES3_EDE_KEY_SIZE,
1485		.ivsize = DES3_EDE_BLOCK_SIZE,
1486		.base = {
1487			.cra_name = "cbc(des3_ede)",
1488			.cra_driver_name = "safexcel-cbc-des3_ede",
1489			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1490			.cra_flags = CRYPTO_ALG_ASYNC |
 
1491				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1492			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1493			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1494			.cra_alignmask = 0,
1495			.cra_init = safexcel_skcipher_des3_cbc_cra_init,
1496			.cra_exit = safexcel_skcipher_cra_exit,
1497			.cra_module = THIS_MODULE,
1498		},
1499	},
1500};
1501
1502static int safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm *tfm)
1503{
1504	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1505
1506	safexcel_skcipher_cra_init(tfm);
1507	ctx->alg  = SAFEXCEL_3DES;
1508	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
 
 
1509	return 0;
1510}
1511
1512struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
1513	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1514	.algo_mask = SAFEXCEL_ALG_DES,
1515	.alg.skcipher = {
1516		.setkey = safexcel_des3_ede_setkey,
1517		.encrypt = safexcel_encrypt,
1518		.decrypt = safexcel_decrypt,
1519		.min_keysize = DES3_EDE_KEY_SIZE,
1520		.max_keysize = DES3_EDE_KEY_SIZE,
1521		.base = {
1522			.cra_name = "ecb(des3_ede)",
1523			.cra_driver_name = "safexcel-ecb-des3_ede",
1524			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1525			.cra_flags = CRYPTO_ALG_ASYNC |
 
1526				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1527			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1528			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1529			.cra_alignmask = 0,
1530			.cra_init = safexcel_skcipher_des3_ecb_cra_init,
1531			.cra_exit = safexcel_skcipher_cra_exit,
1532			.cra_module = THIS_MODULE,
1533		},
1534	},
1535};
1536
1537static int safexcel_aead_encrypt(struct aead_request *req)
1538{
1539	struct safexcel_cipher_req *creq = aead_request_ctx(req);
1540
1541	return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
1542}
1543
1544static int safexcel_aead_decrypt(struct aead_request *req)
1545{
1546	struct safexcel_cipher_req *creq = aead_request_ctx(req);
1547
1548	return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
1549}
1550
1551static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
1552{
1553	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1554	struct safexcel_alg_template *tmpl =
1555		container_of(tfm->__crt_alg, struct safexcel_alg_template,
1556			     alg.aead.base);
1557
1558	crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
1559				sizeof(struct safexcel_cipher_req));
1560
1561	ctx->priv = tmpl->priv;
1562
1563	ctx->alg  = SAFEXCEL_AES; /* default */
 
 
 
1564	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC; /* default */
1565	ctx->aead = true;
1566	ctx->base.send = safexcel_aead_send;
1567	ctx->base.handle_result = safexcel_aead_handle_result;
1568	return 0;
1569}
1570
1571static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
1572{
1573	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1574
1575	safexcel_aead_cra_init(tfm);
1576	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1577	ctx->state_sz = SHA1_DIGEST_SIZE;
1578	return 0;
1579}
1580
1581struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
1582	.type = SAFEXCEL_ALG_TYPE_AEAD,
1583	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
1584	.alg.aead = {
1585		.setkey = safexcel_aead_setkey,
1586		.encrypt = safexcel_aead_encrypt,
1587		.decrypt = safexcel_aead_decrypt,
1588		.ivsize = AES_BLOCK_SIZE,
1589		.maxauthsize = SHA1_DIGEST_SIZE,
1590		.base = {
1591			.cra_name = "authenc(hmac(sha1),cbc(aes))",
1592			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
1593			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1594			.cra_flags = CRYPTO_ALG_ASYNC |
 
1595				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1596			.cra_blocksize = AES_BLOCK_SIZE,
1597			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1598			.cra_alignmask = 0,
1599			.cra_init = safexcel_aead_sha1_cra_init,
1600			.cra_exit = safexcel_aead_cra_exit,
1601			.cra_module = THIS_MODULE,
1602		},
1603	},
1604};
1605
1606static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
1607{
1608	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1609
1610	safexcel_aead_cra_init(tfm);
1611	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1612	ctx->state_sz = SHA256_DIGEST_SIZE;
1613	return 0;
1614}
1615
1616struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
1617	.type = SAFEXCEL_ALG_TYPE_AEAD,
1618	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1619	.alg.aead = {
1620		.setkey = safexcel_aead_setkey,
1621		.encrypt = safexcel_aead_encrypt,
1622		.decrypt = safexcel_aead_decrypt,
1623		.ivsize = AES_BLOCK_SIZE,
1624		.maxauthsize = SHA256_DIGEST_SIZE,
1625		.base = {
1626			.cra_name = "authenc(hmac(sha256),cbc(aes))",
1627			.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
1628			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1629			.cra_flags = CRYPTO_ALG_ASYNC |
 
1630				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1631			.cra_blocksize = AES_BLOCK_SIZE,
1632			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1633			.cra_alignmask = 0,
1634			.cra_init = safexcel_aead_sha256_cra_init,
1635			.cra_exit = safexcel_aead_cra_exit,
1636			.cra_module = THIS_MODULE,
1637		},
1638	},
1639};
1640
1641static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
1642{
1643	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1644
1645	safexcel_aead_cra_init(tfm);
1646	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1647	ctx->state_sz = SHA256_DIGEST_SIZE;
1648	return 0;
1649}
1650
1651struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
1652	.type = SAFEXCEL_ALG_TYPE_AEAD,
1653	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1654	.alg.aead = {
1655		.setkey = safexcel_aead_setkey,
1656		.encrypt = safexcel_aead_encrypt,
1657		.decrypt = safexcel_aead_decrypt,
1658		.ivsize = AES_BLOCK_SIZE,
1659		.maxauthsize = SHA224_DIGEST_SIZE,
1660		.base = {
1661			.cra_name = "authenc(hmac(sha224),cbc(aes))",
1662			.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
1663			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1664			.cra_flags = CRYPTO_ALG_ASYNC |
 
1665				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1666			.cra_blocksize = AES_BLOCK_SIZE,
1667			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1668			.cra_alignmask = 0,
1669			.cra_init = safexcel_aead_sha224_cra_init,
1670			.cra_exit = safexcel_aead_cra_exit,
1671			.cra_module = THIS_MODULE,
1672		},
1673	},
1674};
1675
1676static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
1677{
1678	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1679
1680	safexcel_aead_cra_init(tfm);
1681	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1682	ctx->state_sz = SHA512_DIGEST_SIZE;
1683	return 0;
1684}
1685
1686struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
1687	.type = SAFEXCEL_ALG_TYPE_AEAD,
1688	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1689	.alg.aead = {
1690		.setkey = safexcel_aead_setkey,
1691		.encrypt = safexcel_aead_encrypt,
1692		.decrypt = safexcel_aead_decrypt,
1693		.ivsize = AES_BLOCK_SIZE,
1694		.maxauthsize = SHA512_DIGEST_SIZE,
1695		.base = {
1696			.cra_name = "authenc(hmac(sha512),cbc(aes))",
1697			.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
1698			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1699			.cra_flags = CRYPTO_ALG_ASYNC |
 
1700				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1701			.cra_blocksize = AES_BLOCK_SIZE,
1702			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1703			.cra_alignmask = 0,
1704			.cra_init = safexcel_aead_sha512_cra_init,
1705			.cra_exit = safexcel_aead_cra_exit,
1706			.cra_module = THIS_MODULE,
1707		},
1708	},
1709};
1710
1711static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
1712{
1713	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1714
1715	safexcel_aead_cra_init(tfm);
1716	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1717	ctx->state_sz = SHA512_DIGEST_SIZE;
1718	return 0;
1719}
1720
1721struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
1722	.type = SAFEXCEL_ALG_TYPE_AEAD,
1723	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1724	.alg.aead = {
1725		.setkey = safexcel_aead_setkey,
1726		.encrypt = safexcel_aead_encrypt,
1727		.decrypt = safexcel_aead_decrypt,
1728		.ivsize = AES_BLOCK_SIZE,
1729		.maxauthsize = SHA384_DIGEST_SIZE,
1730		.base = {
1731			.cra_name = "authenc(hmac(sha384),cbc(aes))",
1732			.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
1733			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1734			.cra_flags = CRYPTO_ALG_ASYNC |
 
1735				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1736			.cra_blocksize = AES_BLOCK_SIZE,
1737			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1738			.cra_alignmask = 0,
1739			.cra_init = safexcel_aead_sha384_cra_init,
1740			.cra_exit = safexcel_aead_cra_exit,
1741			.cra_module = THIS_MODULE,
1742		},
1743	},
1744};
1745
1746static int safexcel_aead_sha1_des3_cra_init(struct crypto_tfm *tfm)
1747{
1748	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1749
1750	safexcel_aead_sha1_cra_init(tfm);
1751	ctx->alg = SAFEXCEL_3DES; /* override default */
 
 
1752	return 0;
1753}
1754
1755struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede = {
1756	.type = SAFEXCEL_ALG_TYPE_AEAD,
1757	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
1758	.alg.aead = {
1759		.setkey = safexcel_aead_setkey,
1760		.encrypt = safexcel_aead_encrypt,
1761		.decrypt = safexcel_aead_decrypt,
1762		.ivsize = DES3_EDE_BLOCK_SIZE,
1763		.maxauthsize = SHA1_DIGEST_SIZE,
1764		.base = {
1765			.cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
1766			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des3_ede",
1767			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1768			.cra_flags = CRYPTO_ALG_ASYNC |
 
1769				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1770			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1771			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1772			.cra_alignmask = 0,
1773			.cra_init = safexcel_aead_sha1_des3_cra_init,
1774			.cra_exit = safexcel_aead_cra_exit,
1775			.cra_module = THIS_MODULE,
1776		},
1777	},
1778};
1779
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1780static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm *tfm)
1781{
1782	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1783
1784	safexcel_aead_sha1_cra_init(tfm);
1785	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
1786	return 0;
1787}
1788
1789struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes = {
1790	.type = SAFEXCEL_ALG_TYPE_AEAD,
1791	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
1792	.alg.aead = {
1793		.setkey = safexcel_aead_setkey,
1794		.encrypt = safexcel_aead_encrypt,
1795		.decrypt = safexcel_aead_decrypt,
1796		.ivsize = CTR_RFC3686_IV_SIZE,
1797		.maxauthsize = SHA1_DIGEST_SIZE,
1798		.base = {
1799			.cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
1800			.cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-aes",
1801			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1802			.cra_flags = CRYPTO_ALG_ASYNC |
 
1803				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1804			.cra_blocksize = 1,
1805			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1806			.cra_alignmask = 0,
1807			.cra_init = safexcel_aead_sha1_ctr_cra_init,
1808			.cra_exit = safexcel_aead_cra_exit,
1809			.cra_module = THIS_MODULE,
1810		},
1811	},
1812};
1813
1814static int safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm *tfm)
1815{
1816	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1817
1818	safexcel_aead_sha256_cra_init(tfm);
1819	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
1820	return 0;
1821}
1822
1823struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes = {
1824	.type = SAFEXCEL_ALG_TYPE_AEAD,
1825	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1826	.alg.aead = {
1827		.setkey = safexcel_aead_setkey,
1828		.encrypt = safexcel_aead_encrypt,
1829		.decrypt = safexcel_aead_decrypt,
1830		.ivsize = CTR_RFC3686_IV_SIZE,
1831		.maxauthsize = SHA256_DIGEST_SIZE,
1832		.base = {
1833			.cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
1834			.cra_driver_name = "safexcel-authenc-hmac-sha256-ctr-aes",
1835			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1836			.cra_flags = CRYPTO_ALG_ASYNC |
 
1837				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1838			.cra_blocksize = 1,
1839			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1840			.cra_alignmask = 0,
1841			.cra_init = safexcel_aead_sha256_ctr_cra_init,
1842			.cra_exit = safexcel_aead_cra_exit,
1843			.cra_module = THIS_MODULE,
1844		},
1845	},
1846};
1847
1848static int safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm *tfm)
1849{
1850	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1851
1852	safexcel_aead_sha224_cra_init(tfm);
1853	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
1854	return 0;
1855}
1856
1857struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes = {
1858	.type = SAFEXCEL_ALG_TYPE_AEAD,
1859	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1860	.alg.aead = {
1861		.setkey = safexcel_aead_setkey,
1862		.encrypt = safexcel_aead_encrypt,
1863		.decrypt = safexcel_aead_decrypt,
1864		.ivsize = CTR_RFC3686_IV_SIZE,
1865		.maxauthsize = SHA224_DIGEST_SIZE,
1866		.base = {
1867			.cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
1868			.cra_driver_name = "safexcel-authenc-hmac-sha224-ctr-aes",
1869			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1870			.cra_flags = CRYPTO_ALG_ASYNC |
 
1871				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1872			.cra_blocksize = 1,
1873			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1874			.cra_alignmask = 0,
1875			.cra_init = safexcel_aead_sha224_ctr_cra_init,
1876			.cra_exit = safexcel_aead_cra_exit,
1877			.cra_module = THIS_MODULE,
1878		},
1879	},
1880};
1881
1882static int safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm *tfm)
1883{
1884	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1885
1886	safexcel_aead_sha512_cra_init(tfm);
1887	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
1888	return 0;
1889}
1890
1891struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes = {
1892	.type = SAFEXCEL_ALG_TYPE_AEAD,
1893	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1894	.alg.aead = {
1895		.setkey = safexcel_aead_setkey,
1896		.encrypt = safexcel_aead_encrypt,
1897		.decrypt = safexcel_aead_decrypt,
1898		.ivsize = CTR_RFC3686_IV_SIZE,
1899		.maxauthsize = SHA512_DIGEST_SIZE,
1900		.base = {
1901			.cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
1902			.cra_driver_name = "safexcel-authenc-hmac-sha512-ctr-aes",
1903			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1904			.cra_flags = CRYPTO_ALG_ASYNC |
 
1905				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1906			.cra_blocksize = 1,
1907			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1908			.cra_alignmask = 0,
1909			.cra_init = safexcel_aead_sha512_ctr_cra_init,
1910			.cra_exit = safexcel_aead_cra_exit,
1911			.cra_module = THIS_MODULE,
1912		},
1913	},
1914};
1915
1916static int safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm *tfm)
1917{
1918	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1919
1920	safexcel_aead_sha384_cra_init(tfm);
1921	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
1922	return 0;
1923}
1924
1925struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes = {
1926	.type = SAFEXCEL_ALG_TYPE_AEAD,
1927	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1928	.alg.aead = {
1929		.setkey = safexcel_aead_setkey,
1930		.encrypt = safexcel_aead_encrypt,
1931		.decrypt = safexcel_aead_decrypt,
1932		.ivsize = CTR_RFC3686_IV_SIZE,
1933		.maxauthsize = SHA384_DIGEST_SIZE,
1934		.base = {
1935			.cra_name = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
1936			.cra_driver_name = "safexcel-authenc-hmac-sha384-ctr-aes",
1937			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1938			.cra_flags = CRYPTO_ALG_ASYNC |
 
1939				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1940			.cra_blocksize = 1,
1941			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1942			.cra_alignmask = 0,
1943			.cra_init = safexcel_aead_sha384_ctr_cra_init,
1944			.cra_exit = safexcel_aead_cra_exit,
1945			.cra_module = THIS_MODULE,
1946		},
1947	},
1948};
1949
1950static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
1951					   const u8 *key, unsigned int len)
1952{
1953	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
1954	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1955	struct safexcel_crypto_priv *priv = ctx->priv;
1956	struct crypto_aes_ctx aes;
1957	int ret, i;
1958	unsigned int keylen;
1959
1960	/* Check for illegal XTS keys */
1961	ret = xts_verify_key(ctfm, key, len);
1962	if (ret)
1963		return ret;
1964
1965	/* Only half of the key data is cipher key */
1966	keylen = (len >> 1);
1967	ret = aes_expandkey(&aes, key, keylen);
1968	if (ret) {
1969		crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
1970		return ret;
1971	}
1972
1973	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1974		for (i = 0; i < keylen / sizeof(u32); i++) {
1975			if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
1976				ctx->base.needs_inv = true;
1977				break;
1978			}
1979		}
1980	}
1981
1982	for (i = 0; i < keylen / sizeof(u32); i++)
1983		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
1984
1985	/* The other half is the tweak key */
1986	ret = aes_expandkey(&aes, (u8 *)(key + keylen), keylen);
1987	if (ret) {
1988		crypto_skcipher_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
1989		return ret;
1990	}
1991
1992	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1993		for (i = 0; i < keylen / sizeof(u32); i++) {
1994			if (ctx->key[i + keylen / sizeof(u32)] !=
1995			    cpu_to_le32(aes.key_enc[i])) {
1996				ctx->base.needs_inv = true;
1997				break;
1998			}
1999		}
2000	}
2001
2002	for (i = 0; i < keylen / sizeof(u32); i++)
2003		ctx->key[i + keylen / sizeof(u32)] =
2004			cpu_to_le32(aes.key_enc[i]);
2005
2006	ctx->key_len = keylen << 1;
2007
2008	memzero_explicit(&aes, sizeof(aes));
2009	return 0;
2010}
2011
2012static int safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm *tfm)
2013{
2014	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2015
2016	safexcel_skcipher_cra_init(tfm);
2017	ctx->alg  = SAFEXCEL_AES;
 
2018	ctx->xts  = 1;
2019	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
2020	return 0;
2021}
2022
2023static int safexcel_encrypt_xts(struct skcipher_request *req)
2024{
2025	if (req->cryptlen < XTS_BLOCK_SIZE)
2026		return -EINVAL;
2027	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2028				  SAFEXCEL_ENCRYPT);
2029}
2030
2031static int safexcel_decrypt_xts(struct skcipher_request *req)
2032{
2033	if (req->cryptlen < XTS_BLOCK_SIZE)
2034		return -EINVAL;
2035	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2036				  SAFEXCEL_DECRYPT);
2037}
2038
2039struct safexcel_alg_template safexcel_alg_xts_aes = {
2040	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2041	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XTS,
2042	.alg.skcipher = {
2043		.setkey = safexcel_skcipher_aesxts_setkey,
2044		.encrypt = safexcel_encrypt_xts,
2045		.decrypt = safexcel_decrypt_xts,
2046		/* XTS actually uses 2 AES keys glued together */
2047		.min_keysize = AES_MIN_KEY_SIZE * 2,
2048		.max_keysize = AES_MAX_KEY_SIZE * 2,
2049		.ivsize = XTS_BLOCK_SIZE,
2050		.base = {
2051			.cra_name = "xts(aes)",
2052			.cra_driver_name = "safexcel-xts-aes",
2053			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2054			.cra_flags = CRYPTO_ALG_ASYNC |
 
2055				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2056			.cra_blocksize = XTS_BLOCK_SIZE,
2057			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2058			.cra_alignmask = 0,
2059			.cra_init = safexcel_skcipher_aes_xts_cra_init,
2060			.cra_exit = safexcel_skcipher_cra_exit,
2061			.cra_module = THIS_MODULE,
2062		},
2063	},
2064};
2065
2066static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
2067				    unsigned int len)
2068{
2069	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2070	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2071	struct safexcel_crypto_priv *priv = ctx->priv;
2072	struct crypto_aes_ctx aes;
2073	u32 hashkey[AES_BLOCK_SIZE >> 2];
2074	int ret, i;
2075
2076	ret = aes_expandkey(&aes, key, len);
2077	if (ret) {
2078		crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2079		memzero_explicit(&aes, sizeof(aes));
2080		return ret;
2081	}
2082
2083	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2084		for (i = 0; i < len / sizeof(u32); i++) {
2085			if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
2086				ctx->base.needs_inv = true;
2087				break;
2088			}
2089		}
2090	}
2091
2092	for (i = 0; i < len / sizeof(u32); i++)
2093		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2094
2095	ctx->key_len = len;
2096
2097	/* Compute hash key by encrypting zeroes with cipher key */
2098	crypto_cipher_clear_flags(ctx->hkaes, CRYPTO_TFM_REQ_MASK);
2099	crypto_cipher_set_flags(ctx->hkaes, crypto_aead_get_flags(ctfm) &
2100				CRYPTO_TFM_REQ_MASK);
2101	ret = crypto_cipher_setkey(ctx->hkaes, key, len);
2102	crypto_aead_set_flags(ctfm, crypto_cipher_get_flags(ctx->hkaes) &
2103			      CRYPTO_TFM_RES_MASK);
2104	if (ret)
2105		return ret;
2106
2107	memset(hashkey, 0, AES_BLOCK_SIZE);
2108	crypto_cipher_encrypt_one(ctx->hkaes, (u8 *)hashkey, (u8 *)hashkey);
2109
2110	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2111		for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
2112			if (ctx->ipad[i] != cpu_to_be32(hashkey[i])) {
2113				ctx->base.needs_inv = true;
2114				break;
2115			}
2116		}
2117	}
2118
2119	for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
2120		ctx->ipad[i] = cpu_to_be32(hashkey[i]);
2121
2122	memzero_explicit(hashkey, AES_BLOCK_SIZE);
2123	memzero_explicit(&aes, sizeof(aes));
2124	return 0;
2125}
2126
2127static int safexcel_aead_gcm_cra_init(struct crypto_tfm *tfm)
2128{
2129	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2130
2131	safexcel_aead_cra_init(tfm);
2132	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_GHASH;
2133	ctx->state_sz = GHASH_BLOCK_SIZE;
2134	ctx->xcm = EIP197_XCM_MODE_GCM;
2135	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2136
2137	ctx->hkaes = crypto_alloc_cipher("aes", 0, 0);
2138	if (IS_ERR(ctx->hkaes))
2139		return PTR_ERR(ctx->hkaes);
2140
2141	return 0;
2142}
2143
2144static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm)
2145{
2146	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2147
2148	crypto_free_cipher(ctx->hkaes);
2149	safexcel_aead_cra_exit(tfm);
2150}
2151
2152static int safexcel_aead_gcm_setauthsize(struct crypto_aead *tfm,
2153					 unsigned int authsize)
2154{
2155	return crypto_gcm_check_authsize(authsize);
2156}
2157
2158struct safexcel_alg_template safexcel_alg_gcm = {
2159	.type = SAFEXCEL_ALG_TYPE_AEAD,
2160	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
2161	.alg.aead = {
2162		.setkey = safexcel_aead_gcm_setkey,
2163		.setauthsize = safexcel_aead_gcm_setauthsize,
2164		.encrypt = safexcel_aead_encrypt,
2165		.decrypt = safexcel_aead_decrypt,
2166		.ivsize = GCM_AES_IV_SIZE,
2167		.maxauthsize = GHASH_DIGEST_SIZE,
2168		.base = {
2169			.cra_name = "gcm(aes)",
2170			.cra_driver_name = "safexcel-gcm-aes",
2171			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2172			.cra_flags = CRYPTO_ALG_ASYNC |
 
2173				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2174			.cra_blocksize = 1,
2175			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2176			.cra_alignmask = 0,
2177			.cra_init = safexcel_aead_gcm_cra_init,
2178			.cra_exit = safexcel_aead_gcm_cra_exit,
2179			.cra_module = THIS_MODULE,
2180		},
2181	},
2182};
2183
2184static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
2185				    unsigned int len)
2186{
2187	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2188	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2189	struct safexcel_crypto_priv *priv = ctx->priv;
2190	struct crypto_aes_ctx aes;
2191	int ret, i;
2192
2193	ret = aes_expandkey(&aes, key, len);
2194	if (ret) {
2195		crypto_aead_set_flags(ctfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2196		memzero_explicit(&aes, sizeof(aes));
2197		return ret;
2198	}
2199
2200	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2201		for (i = 0; i < len / sizeof(u32); i++) {
2202			if (ctx->key[i] != cpu_to_le32(aes.key_enc[i])) {
2203				ctx->base.needs_inv = true;
2204				break;
2205			}
2206		}
2207	}
2208
2209	for (i = 0; i < len / sizeof(u32); i++) {
2210		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2211		ctx->ipad[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
2212			cpu_to_be32(aes.key_enc[i]);
2213	}
2214
2215	ctx->key_len = len;
2216	ctx->state_sz = 2 * AES_BLOCK_SIZE + len;
2217
2218	if (len == AES_KEYSIZE_192)
2219		ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2220	else if (len == AES_KEYSIZE_256)
2221		ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2222	else
2223		ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2224
2225	memzero_explicit(&aes, sizeof(aes));
2226	return 0;
2227}
2228
2229static int safexcel_aead_ccm_cra_init(struct crypto_tfm *tfm)
2230{
2231	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2232
2233	safexcel_aead_cra_init(tfm);
2234	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2235	ctx->state_sz = 3 * AES_BLOCK_SIZE;
2236	ctx->xcm = EIP197_XCM_MODE_CCM;
2237	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
 
2238	return 0;
2239}
2240
2241static int safexcel_aead_ccm_setauthsize(struct crypto_aead *tfm,
2242					 unsigned int authsize)
2243{
2244	/* Borrowed from crypto/ccm.c */
2245	switch (authsize) {
2246	case 4:
2247	case 6:
2248	case 8:
2249	case 10:
2250	case 12:
2251	case 14:
2252	case 16:
2253		break;
2254	default:
2255		return -EINVAL;
2256	}
2257
2258	return 0;
2259}
2260
2261static int safexcel_ccm_encrypt(struct aead_request *req)
2262{
2263	struct safexcel_cipher_req *creq = aead_request_ctx(req);
2264
2265	if (req->iv[0] < 1 || req->iv[0] > 7)
2266		return -EINVAL;
2267
2268	return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
2269}
2270
2271static int safexcel_ccm_decrypt(struct aead_request *req)
2272{
2273	struct safexcel_cipher_req *creq = aead_request_ctx(req);
2274
2275	if (req->iv[0] < 1 || req->iv[0] > 7)
2276		return -EINVAL;
2277
2278	return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
2279}
2280
2281struct safexcel_alg_template safexcel_alg_ccm = {
2282	.type = SAFEXCEL_ALG_TYPE_AEAD,
2283	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
2284	.alg.aead = {
2285		.setkey = safexcel_aead_ccm_setkey,
2286		.setauthsize = safexcel_aead_ccm_setauthsize,
2287		.encrypt = safexcel_ccm_encrypt,
2288		.decrypt = safexcel_ccm_decrypt,
2289		.ivsize = AES_BLOCK_SIZE,
2290		.maxauthsize = AES_BLOCK_SIZE,
2291		.base = {
2292			.cra_name = "ccm(aes)",
2293			.cra_driver_name = "safexcel-ccm-aes",
2294			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2295			.cra_flags = CRYPTO_ALG_ASYNC |
 
2296				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2297			.cra_blocksize = 1,
2298			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2299			.cra_alignmask = 0,
2300			.cra_init = safexcel_aead_ccm_cra_init,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2301			.cra_exit = safexcel_aead_cra_exit,
2302			.cra_module = THIS_MODULE,
2303		},
2304	},
2305};