Linux Audio

Check our new training course

Loading...
v6.13.7
   1// SPDX-License-Identifier: GPL-2.0
   2/*
   3 * Copyright (C) 2017 Marvell
   4 *
   5 * Antoine Tenart <antoine.tenart@free-electrons.com>
   6 */
   7
   8#include <linux/unaligned.h>
   9#include <linux/device.h>
  10#include <linux/dma-mapping.h>
  11#include <linux/dmapool.h>
  12#include <crypto/aead.h>
  13#include <crypto/aes.h>
  14#include <crypto/authenc.h>
  15#include <crypto/chacha.h>
  16#include <crypto/ctr.h>
  17#include <crypto/internal/des.h>
  18#include <crypto/gcm.h>
  19#include <crypto/ghash.h>
  20#include <crypto/poly1305.h>
  21#include <crypto/sha1.h>
  22#include <crypto/sha2.h>
  23#include <crypto/sm3.h>
  24#include <crypto/sm4.h>
  25#include <crypto/xts.h>
  26#include <crypto/skcipher.h>
  27#include <crypto/internal/aead.h>
  28#include <crypto/internal/skcipher.h>
  29
  30#include "safexcel.h"
  31
  32enum safexcel_cipher_direction {
  33	SAFEXCEL_ENCRYPT,
  34	SAFEXCEL_DECRYPT,
  35};
  36
  37enum safexcel_cipher_alg {
  38	SAFEXCEL_DES,
  39	SAFEXCEL_3DES,
  40	SAFEXCEL_AES,
  41	SAFEXCEL_CHACHA20,
  42	SAFEXCEL_SM4,
  43};
  44
  45struct safexcel_cipher_ctx {
  46	struct safexcel_context base;
  47	struct safexcel_crypto_priv *priv;
  48
  49	u32 mode;
  50	enum safexcel_cipher_alg alg;
  51	u8 aead; /* !=0=AEAD, 2=IPSec ESP AEAD, 3=IPsec ESP GMAC */
  52	u8 xcm;  /* 0=authenc, 1=GCM, 2 reserved for CCM */
  53	u8 aadskip;
  54	u8 blocksz;
  55	u32 ivmask;
  56	u32 ctrinit;
  57
  58	__le32 key[16];
  59	u32 nonce;
  60	unsigned int key_len, xts;
  61
  62	/* All the below is AEAD specific */
  63	u32 hash_alg;
  64	u32 state_sz;
  65
  66	struct crypto_aead *fback;
  67};
  68
  69struct safexcel_cipher_req {
  70	enum safexcel_cipher_direction direction;
  71	/* Number of result descriptors associated to the request */
  72	unsigned int rdescs;
  73	bool needs_inv;
  74	int  nr_src, nr_dst;
  75};
  76
  77static int safexcel_skcipher_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
  78				struct safexcel_command_desc *cdesc)
  79{
  80	if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
  81		cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
  82		/* 32 bit nonce */
  83		cdesc->control_data.token[0] = ctx->nonce;
  84		/* 64 bit IV part */
  85		memcpy(&cdesc->control_data.token[1], iv, 8);
  86		/* 32 bit counter, start at 0 or 1 (big endian!) */
  87		cdesc->control_data.token[3] =
  88			(__force u32)cpu_to_be32(ctx->ctrinit);
  89		return 4;
  90	}
  91	if (ctx->alg == SAFEXCEL_CHACHA20) {
  92		cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
  93		/* 96 bit nonce part */
  94		memcpy(&cdesc->control_data.token[0], &iv[4], 12);
  95		/* 32 bit counter */
  96		cdesc->control_data.token[3] = *(u32 *)iv;
  97		return 4;
  98	}
  99
 100	cdesc->control_data.options |= ctx->ivmask;
 101	memcpy(cdesc->control_data.token, iv, ctx->blocksz);
 102	return ctx->blocksz / sizeof(u32);
 103}
 104
 105static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
 106				    struct safexcel_command_desc *cdesc,
 107				    struct safexcel_token *atoken,
 108				    u32 length)
 109{
 110	struct safexcel_token *token;
 111	int ivlen;
 112
 113	ivlen = safexcel_skcipher_iv(ctx, iv, cdesc);
 114	if (ivlen == 4) {
 115		/* No space in cdesc, instruction moves to atoken */
 116		cdesc->additional_cdata_size = 1;
 117		token = atoken;
 118	} else {
 119		/* Everything fits in cdesc */
 120		token = (struct safexcel_token *)(cdesc->control_data.token + 2);
 121		/* Need to pad with NOP */
 122		eip197_noop_token(&token[1]);
 123	}
 124
 125	token->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
 126	token->packet_length = length;
 127	token->stat = EIP197_TOKEN_STAT_LAST_PACKET |
 128		      EIP197_TOKEN_STAT_LAST_HASH;
 129	token->instructions = EIP197_TOKEN_INS_LAST |
 130			      EIP197_TOKEN_INS_TYPE_CRYPTO |
 131			      EIP197_TOKEN_INS_TYPE_OUTPUT;
 132}
 133
 134static void safexcel_aead_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
 135			     struct safexcel_command_desc *cdesc)
 136{
 137	if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD ||
 138	    ctx->aead & EIP197_AEAD_TYPE_IPSEC_ESP) { /* _ESP and _ESP_GMAC */
 139		/* 32 bit nonce */
 140		cdesc->control_data.token[0] = ctx->nonce;
 141		/* 64 bit IV part */
 142		memcpy(&cdesc->control_data.token[1], iv, 8);
 143		/* 32 bit counter, start at 0 or 1 (big endian!) */
 144		cdesc->control_data.token[3] =
 145			(__force u32)cpu_to_be32(ctx->ctrinit);
 146		return;
 147	}
 148	if (ctx->xcm == EIP197_XCM_MODE_GCM || ctx->alg == SAFEXCEL_CHACHA20) {
 149		/* 96 bit IV part */
 150		memcpy(&cdesc->control_data.token[0], iv, 12);
 151		/* 32 bit counter, start at 0 or 1 (big endian!) */
 152		cdesc->control_data.token[3] =
 153			(__force u32)cpu_to_be32(ctx->ctrinit);
 154		return;
 155	}
 156	/* CBC */
 157	memcpy(cdesc->control_data.token, iv, ctx->blocksz);
 158}
 159
 160static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
 161				struct safexcel_command_desc *cdesc,
 162				struct safexcel_token *atoken,
 163				enum safexcel_cipher_direction direction,
 164				u32 cryptlen, u32 assoclen, u32 digestsize)
 165{
 166	struct safexcel_token *aadref;
 167	int atoksize = 2; /* Start with minimum size */
 168	int assocadj = assoclen - ctx->aadskip, aadalign;
 169
 170	/* Always 4 dwords of embedded IV  for AEAD modes */
 171	cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
 172
 173	if (direction == SAFEXCEL_DECRYPT)
 174		cryptlen -= digestsize;
 175
 176	if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM)) {
 177		/* Construct IV block B0 for the CBC-MAC */
 178		u8 *final_iv = (u8 *)cdesc->control_data.token;
 179		u8 *cbcmaciv = (u8 *)&atoken[1];
 180		__le32 *aadlen = (__le32 *)&atoken[5];
 181
 182		if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
 183			/* Length + nonce */
 184			cdesc->control_data.token[0] = ctx->nonce;
 185			/* Fixup flags byte */
 186			*(__le32 *)cbcmaciv =
 187				cpu_to_le32(ctx->nonce |
 188					    ((assocadj > 0) << 6) |
 189					    ((digestsize - 2) << 2));
 190			/* 64 bit IV part */
 191			memcpy(&cdesc->control_data.token[1], iv, 8);
 192			memcpy(cbcmaciv + 4, iv, 8);
 193			/* Start counter at 0 */
 194			cdesc->control_data.token[3] = 0;
 195			/* Message length */
 196			*(__be32 *)(cbcmaciv + 12) = cpu_to_be32(cryptlen);
 197		} else {
 198			/* Variable length IV part */
 199			memcpy(final_iv, iv, 15 - iv[0]);
 200			memcpy(cbcmaciv, iv, 15 - iv[0]);
 201			/* Start variable length counter at 0 */
 202			memset(final_iv + 15 - iv[0], 0, iv[0] + 1);
 203			memset(cbcmaciv + 15 - iv[0], 0, iv[0] - 1);
 204			/* fixup flags byte */
 205			cbcmaciv[0] |= ((assocadj > 0) << 6) |
 206				       ((digestsize - 2) << 2);
 207			/* insert lower 2 bytes of message length */
 208			cbcmaciv[14] = cryptlen >> 8;
 209			cbcmaciv[15] = cryptlen & 255;
 210		}
 211
 212		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
 213		atoken->packet_length = AES_BLOCK_SIZE +
 214					((assocadj > 0) << 1);
 215		atoken->stat = 0;
 216		atoken->instructions = EIP197_TOKEN_INS_ORIGIN_TOKEN |
 217				       EIP197_TOKEN_INS_TYPE_HASH;
 218
 219		if (likely(assocadj)) {
 220			*aadlen = cpu_to_le32((assocadj >> 8) |
 221					      (assocadj & 255) << 8);
 222			atoken += 6;
 223			atoksize += 7;
 224		} else {
 225			atoken += 5;
 226			atoksize += 6;
 227		}
 228
 229		/* Process AAD data */
 230		aadref = atoken;
 231		atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
 232		atoken->packet_length = assocadj;
 233		atoken->stat = 0;
 234		atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
 235		atoken++;
 236
 237		/* For CCM only, align AAD data towards hash engine */
 238		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
 239		aadalign = (assocadj + 2) & 15;
 240		atoken->packet_length = assocadj && aadalign ?
 241						16 - aadalign :
 242						0;
 243		if (likely(cryptlen)) {
 244			atoken->stat = 0;
 245			atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
 246		} else {
 247			atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
 248			atoken->instructions = EIP197_TOKEN_INS_LAST |
 249					       EIP197_TOKEN_INS_TYPE_HASH;
 250		}
 251	} else {
 252		safexcel_aead_iv(ctx, iv, cdesc);
 253
 254		/* Process AAD data */
 255		aadref = atoken;
 256		atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
 257		atoken->packet_length = assocadj;
 258		atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
 259		atoken->instructions = EIP197_TOKEN_INS_LAST |
 260				       EIP197_TOKEN_INS_TYPE_HASH;
 261	}
 262	atoken++;
 263
 264	if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
 265		/* For ESP mode (and not GMAC), skip over the IV */
 266		atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
 267		atoken->packet_length = EIP197_AEAD_IPSEC_IV_SIZE;
 268		atoken->stat = 0;
 269		atoken->instructions = 0;
 270		atoken++;
 271		atoksize++;
 272	} else if (unlikely(ctx->alg == SAFEXCEL_CHACHA20 &&
 273			    direction == SAFEXCEL_DECRYPT)) {
 274		/* Poly-chacha decryption needs a dummy NOP here ... */
 275		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
 276		atoken->packet_length = 16; /* According to Op Manual */
 277		atoken->stat = 0;
 278		atoken->instructions = 0;
 279		atoken++;
 280		atoksize++;
 281	}
 282
 283	if  (ctx->xcm) {
 284		/* For GCM and CCM, obtain enc(Y0) */
 285		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT_REMRES;
 286		atoken->packet_length = 0;
 287		atoken->stat = 0;
 288		atoken->instructions = AES_BLOCK_SIZE;
 289		atoken++;
 290
 291		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
 292		atoken->packet_length = AES_BLOCK_SIZE;
 293		atoken->stat = 0;
 294		atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
 295				       EIP197_TOKEN_INS_TYPE_CRYPTO;
 296		atoken++;
 297		atoksize += 2;
 298	}
 299
 300	if (likely(cryptlen || ctx->alg == SAFEXCEL_CHACHA20)) {
 301		/* Fixup stat field for AAD direction instruction */
 302		aadref->stat = 0;
 303
 304		/* Process crypto data */
 305		atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
 306		atoken->packet_length = cryptlen;
 307
 308		if (unlikely(ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC)) {
 309			/* Fixup instruction field for AAD dir instruction */
 310			aadref->instructions = EIP197_TOKEN_INS_TYPE_HASH;
 311
 312			/* Do not send to crypt engine in case of GMAC */
 313			atoken->instructions = EIP197_TOKEN_INS_LAST |
 314					       EIP197_TOKEN_INS_TYPE_HASH |
 315					       EIP197_TOKEN_INS_TYPE_OUTPUT;
 316		} else {
 317			atoken->instructions = EIP197_TOKEN_INS_LAST |
 318					       EIP197_TOKEN_INS_TYPE_CRYPTO |
 319					       EIP197_TOKEN_INS_TYPE_HASH |
 320					       EIP197_TOKEN_INS_TYPE_OUTPUT;
 321		}
 322
 323		cryptlen &= 15;
 324		if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM && cryptlen)) {
 325			atoken->stat = 0;
 326			/* For CCM only, pad crypto data to the hash engine */
 327			atoken++;
 328			atoksize++;
 329			atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
 330			atoken->packet_length = 16 - cryptlen;
 331			atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
 332			atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
 333		} else {
 334			atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
 335		}
 336		atoken++;
 337		atoksize++;
 338	}
 339
 340	if (direction == SAFEXCEL_ENCRYPT) {
 341		/* Append ICV */
 342		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
 343		atoken->packet_length = digestsize;
 344		atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
 345			       EIP197_TOKEN_STAT_LAST_PACKET;
 346		atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
 347				       EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
 348	} else {
 349		/* Extract ICV */
 350		atoken->opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
 351		atoken->packet_length = digestsize;
 352		atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
 353			       EIP197_TOKEN_STAT_LAST_PACKET;
 354		atoken->instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
 355		atoken++;
 356		atoksize++;
 357
 358		/* Verify ICV */
 359		atoken->opcode = EIP197_TOKEN_OPCODE_VERIFY;
 360		atoken->packet_length = digestsize |
 361					EIP197_TOKEN_HASH_RESULT_VERIFY;
 362		atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
 363			       EIP197_TOKEN_STAT_LAST_PACKET;
 364		atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
 365	}
 366
 367	/* Fixup length of the token in the command descriptor */
 368	cdesc->additional_cdata_size = atoksize;
 369}
 370
 371static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
 372					const u8 *key, unsigned int len)
 373{
 374	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
 375	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
 376	struct safexcel_crypto_priv *priv = ctx->base.priv;
 377	struct crypto_aes_ctx aes;
 378	int ret, i;
 379
 380	ret = aes_expandkey(&aes, key, len);
 381	if (ret)
 382		return ret;
 383
 384	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
 385		for (i = 0; i < len / sizeof(u32); i++) {
 386			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
 387				ctx->base.needs_inv = true;
 388				break;
 389			}
 390		}
 391	}
 392
 393	for (i = 0; i < len / sizeof(u32); i++)
 394		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
 395
 396	ctx->key_len = len;
 397
 398	memzero_explicit(&aes, sizeof(aes));
 399	return 0;
 400}
 401
 402static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
 403				unsigned int len)
 404{
 405	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
 406	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
 407	struct safexcel_crypto_priv *priv = ctx->base.priv;
 408	struct crypto_authenc_keys keys;
 409	struct crypto_aes_ctx aes;
 410	int err = -EINVAL, i;
 411	const char *alg;
 412
 413	if (unlikely(crypto_authenc_extractkeys(&keys, key, len)))
 414		goto badkey;
 415
 416	if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
 417		/* Must have at least space for the nonce here */
 418		if (unlikely(keys.enckeylen < CTR_RFC3686_NONCE_SIZE))
 419			goto badkey;
 420		/* last 4 bytes of key are the nonce! */
 421		ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen -
 422				      CTR_RFC3686_NONCE_SIZE);
 423		/* exclude the nonce here */
 424		keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
 425	}
 426
 427	/* Encryption key */
 428	switch (ctx->alg) {
 429	case SAFEXCEL_DES:
 430		err = verify_aead_des_key(ctfm, keys.enckey, keys.enckeylen);
 431		if (unlikely(err))
 432			goto badkey;
 433		break;
 434	case SAFEXCEL_3DES:
 435		err = verify_aead_des3_key(ctfm, keys.enckey, keys.enckeylen);
 436		if (unlikely(err))
 437			goto badkey;
 438		break;
 439	case SAFEXCEL_AES:
 440		err = aes_expandkey(&aes, keys.enckey, keys.enckeylen);
 441		if (unlikely(err))
 442			goto badkey;
 443		break;
 444	case SAFEXCEL_SM4:
 445		if (unlikely(keys.enckeylen != SM4_KEY_SIZE))
 446			goto badkey;
 447		break;
 448	default:
 449		dev_err(priv->dev, "aead: unsupported cipher algorithm\n");
 450		goto badkey;
 451	}
 452
 453	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
 454		for (i = 0; i < keys.enckeylen / sizeof(u32); i++) {
 455			if (le32_to_cpu(ctx->key[i]) !=
 456			    ((u32 *)keys.enckey)[i]) {
 457				ctx->base.needs_inv = true;
 458				break;
 459			}
 460		}
 461	}
 462
 463	/* Auth key */
 464	switch (ctx->hash_alg) {
 465	case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
 466		alg = "safexcel-sha1";
 467		break;
 468	case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
 469		alg = "safexcel-sha224";
 470		break;
 471	case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
 472		alg = "safexcel-sha256";
 473		break;
 474	case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
 475		alg = "safexcel-sha384";
 476		break;
 477	case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
 478		alg = "safexcel-sha512";
 479		break;
 480	case CONTEXT_CONTROL_CRYPTO_ALG_SM3:
 481		alg = "safexcel-sm3";
 482		break;
 483	default:
 484		dev_err(priv->dev, "aead: unsupported hash algorithm\n");
 485		goto badkey;
 486	}
 487
 488	if (safexcel_hmac_setkey(&ctx->base, keys.authkey, keys.authkeylen,
 489				 alg, ctx->state_sz))
 490		goto badkey;
 491
 492	/* Now copy the keys into the context */
 493	for (i = 0; i < keys.enckeylen / sizeof(u32); i++)
 494		ctx->key[i] = cpu_to_le32(((u32 *)keys.enckey)[i]);
 495	ctx->key_len = keys.enckeylen;
 496
 497	memzero_explicit(&keys, sizeof(keys));
 498	return 0;
 499
 500badkey:
 501	memzero_explicit(&keys, sizeof(keys));
 502	return err;
 503}
 504
 505static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
 506				    struct crypto_async_request *async,
 507				    struct safexcel_cipher_req *sreq,
 508				    struct safexcel_command_desc *cdesc)
 509{
 510	struct safexcel_crypto_priv *priv = ctx->base.priv;
 511	int ctrl_size = ctx->key_len / sizeof(u32);
 512
 513	cdesc->control_data.control1 = ctx->mode;
 514
 515	if (ctx->aead) {
 516		/* Take in account the ipad+opad digests */
 517		if (ctx->xcm) {
 518			ctrl_size += ctx->state_sz / sizeof(u32);
 519			cdesc->control_data.control0 =
 520				CONTEXT_CONTROL_KEY_EN |
 521				CONTEXT_CONTROL_DIGEST_XCM |
 522				ctx->hash_alg |
 523				CONTEXT_CONTROL_SIZE(ctrl_size);
 524		} else if (ctx->alg == SAFEXCEL_CHACHA20) {
 525			/* Chacha20-Poly1305 */
 526			cdesc->control_data.control0 =
 527				CONTEXT_CONTROL_KEY_EN |
 528				CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20 |
 529				(sreq->direction == SAFEXCEL_ENCRYPT ?
 530					CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT :
 531					CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN) |
 532				ctx->hash_alg |
 533				CONTEXT_CONTROL_SIZE(ctrl_size);
 534			return 0;
 535		} else {
 536			ctrl_size += ctx->state_sz / sizeof(u32) * 2;
 537			cdesc->control_data.control0 =
 538				CONTEXT_CONTROL_KEY_EN |
 539				CONTEXT_CONTROL_DIGEST_HMAC |
 540				ctx->hash_alg |
 541				CONTEXT_CONTROL_SIZE(ctrl_size);
 542		}
 543
 544		if (sreq->direction == SAFEXCEL_ENCRYPT &&
 545		    (ctx->xcm == EIP197_XCM_MODE_CCM ||
 546		     ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC))
 547			cdesc->control_data.control0 |=
 548				CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT;
 549		else if (sreq->direction == SAFEXCEL_ENCRYPT)
 550			cdesc->control_data.control0 |=
 551				CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
 552		else if (ctx->xcm == EIP197_XCM_MODE_CCM)
 553			cdesc->control_data.control0 |=
 554				CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN;
 555		else
 556			cdesc->control_data.control0 |=
 557				CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
 558	} else {
 559		if (sreq->direction == SAFEXCEL_ENCRYPT)
 560			cdesc->control_data.control0 =
 561				CONTEXT_CONTROL_TYPE_CRYPTO_OUT |
 562				CONTEXT_CONTROL_KEY_EN |
 563				CONTEXT_CONTROL_SIZE(ctrl_size);
 564		else
 565			cdesc->control_data.control0 =
 566				CONTEXT_CONTROL_TYPE_CRYPTO_IN |
 567				CONTEXT_CONTROL_KEY_EN |
 568				CONTEXT_CONTROL_SIZE(ctrl_size);
 569	}
 570
 571	if (ctx->alg == SAFEXCEL_DES) {
 572		cdesc->control_data.control0 |=
 573			CONTEXT_CONTROL_CRYPTO_ALG_DES;
 574	} else if (ctx->alg == SAFEXCEL_3DES) {
 575		cdesc->control_data.control0 |=
 576			CONTEXT_CONTROL_CRYPTO_ALG_3DES;
 577	} else if (ctx->alg == SAFEXCEL_AES) {
 578		switch (ctx->key_len >> ctx->xts) {
 579		case AES_KEYSIZE_128:
 580			cdesc->control_data.control0 |=
 581				CONTEXT_CONTROL_CRYPTO_ALG_AES128;
 582			break;
 583		case AES_KEYSIZE_192:
 584			cdesc->control_data.control0 |=
 585				CONTEXT_CONTROL_CRYPTO_ALG_AES192;
 586			break;
 587		case AES_KEYSIZE_256:
 588			cdesc->control_data.control0 |=
 589				CONTEXT_CONTROL_CRYPTO_ALG_AES256;
 590			break;
 591		default:
 592			dev_err(priv->dev, "aes keysize not supported: %u\n",
 593				ctx->key_len >> ctx->xts);
 594			return -EINVAL;
 595		}
 596	} else if (ctx->alg == SAFEXCEL_CHACHA20) {
 597		cdesc->control_data.control0 |=
 598			CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20;
 599	} else if (ctx->alg == SAFEXCEL_SM4) {
 600		cdesc->control_data.control0 |=
 601			CONTEXT_CONTROL_CRYPTO_ALG_SM4;
 602	}
 603
 604	return 0;
 605}
 606
 607static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
 608				      struct crypto_async_request *async,
 609				      struct scatterlist *src,
 610				      struct scatterlist *dst,
 611				      unsigned int cryptlen,
 612				      struct safexcel_cipher_req *sreq,
 613				      bool *should_complete, int *ret)
 614{
 615	struct skcipher_request *areq = skcipher_request_cast(async);
 616	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
 617	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(skcipher);
 618	struct safexcel_result_desc *rdesc;
 619	int ndesc = 0;
 620
 621	*ret = 0;
 622
 623	if (unlikely(!sreq->rdescs))
 624		return 0;
 625
 626	while (sreq->rdescs--) {
 627		rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
 628		if (IS_ERR(rdesc)) {
 629			dev_err(priv->dev,
 630				"cipher: result: could not retrieve the result descriptor\n");
 631			*ret = PTR_ERR(rdesc);
 632			break;
 633		}
 634
 635		if (likely(!*ret))
 636			*ret = safexcel_rdesc_check_errors(priv, rdesc);
 637
 638		ndesc++;
 639	}
 640
 641	safexcel_complete(priv, ring);
 642
 643	if (src == dst) {
 644		if (sreq->nr_src > 0)
 645			dma_unmap_sg(priv->dev, src, sreq->nr_src,
 646				     DMA_BIDIRECTIONAL);
 647	} else {
 648		if (sreq->nr_src > 0)
 649			dma_unmap_sg(priv->dev, src, sreq->nr_src,
 650				     DMA_TO_DEVICE);
 651		if (sreq->nr_dst > 0)
 652			dma_unmap_sg(priv->dev, dst, sreq->nr_dst,
 653				     DMA_FROM_DEVICE);
 654	}
 655
 656	/*
 657	 * Update IV in req from last crypto output word for CBC modes
 658	 */
 659	if ((!ctx->aead) && (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
 660	    (sreq->direction == SAFEXCEL_ENCRYPT)) {
 661		/* For encrypt take the last output word */
 662		sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv,
 663				   crypto_skcipher_ivsize(skcipher),
 664				   (cryptlen -
 665				    crypto_skcipher_ivsize(skcipher)));
 666	}
 667
 668	*should_complete = true;
 669
 670	return ndesc;
 671}
 672
 673static int safexcel_send_req(struct crypto_async_request *base, int ring,
 674			     struct safexcel_cipher_req *sreq,
 675			     struct scatterlist *src, struct scatterlist *dst,
 676			     unsigned int cryptlen, unsigned int assoclen,
 677			     unsigned int digestsize, u8 *iv, int *commands,
 678			     int *results)
 679{
 680	struct skcipher_request *areq = skcipher_request_cast(base);
 681	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
 682	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
 683	struct safexcel_crypto_priv *priv = ctx->base.priv;
 684	struct safexcel_command_desc *cdesc;
 685	struct safexcel_command_desc *first_cdesc = NULL;
 686	struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
 687	struct scatterlist *sg;
 688	unsigned int totlen;
 689	unsigned int totlen_src = cryptlen + assoclen;
 690	unsigned int totlen_dst = totlen_src;
 691	struct safexcel_token *atoken;
 692	int n_cdesc = 0, n_rdesc = 0;
 693	int queued, i, ret = 0;
 694	bool first = true;
 695
 696	sreq->nr_src = sg_nents_for_len(src, totlen_src);
 697
 698	if (ctx->aead) {
 699		/*
 700		 * AEAD has auth tag appended to output for encrypt and
 701		 * removed from the output for decrypt!
 702		 */
 703		if (sreq->direction == SAFEXCEL_DECRYPT)
 704			totlen_dst -= digestsize;
 705		else
 706			totlen_dst += digestsize;
 707
 708		memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
 709		       &ctx->base.ipad, ctx->state_sz);
 710		if (!ctx->xcm)
 711			memcpy(ctx->base.ctxr->data + (ctx->key_len +
 712			       ctx->state_sz) / sizeof(u32), &ctx->base.opad,
 713			       ctx->state_sz);
 714	} else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
 715		   (sreq->direction == SAFEXCEL_DECRYPT)) {
 716		/*
 717		 * Save IV from last crypto input word for CBC modes in decrypt
 718		 * direction. Need to do this first in case of inplace operation
 719		 * as it will be overwritten.
 720		 */
 721		sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv,
 722				   crypto_skcipher_ivsize(skcipher),
 723				   (totlen_src -
 724				    crypto_skcipher_ivsize(skcipher)));
 725	}
 726
 727	sreq->nr_dst = sg_nents_for_len(dst, totlen_dst);
 728
 729	/*
 730	 * Remember actual input length, source buffer length may be
 731	 * updated in case of inline operation below.
 732	 */
 733	totlen = totlen_src;
 734	queued = totlen_src;
 735
 736	if (src == dst) {
 737		sreq->nr_src = max(sreq->nr_src, sreq->nr_dst);
 738		sreq->nr_dst = sreq->nr_src;
 739		if (unlikely((totlen_src || totlen_dst) &&
 740		    (sreq->nr_src <= 0))) {
 741			dev_err(priv->dev, "In-place buffer not large enough (need %d bytes)!",
 742				max(totlen_src, totlen_dst));
 743			return -EINVAL;
 744		}
 745		if (sreq->nr_src > 0 &&
 746		    !dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL))
 747			return -EIO;
 748	} else {
 749		if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
 750			dev_err(priv->dev, "Source buffer not large enough (need %d bytes)!",
 751				totlen_src);
 752			return -EINVAL;
 753		}
 754
 755		if (sreq->nr_src > 0 &&
 756		    !dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE))
 757			return -EIO;
 758
 759		if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
 760			dev_err(priv->dev, "Dest buffer not large enough (need %d bytes)!",
 761				totlen_dst);
 762			ret = -EINVAL;
 763			goto unmap;
 764		}
 765
 766		if (sreq->nr_dst > 0 &&
 767		    !dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE)) {
 768			ret = -EIO;
 769			goto unmap;
 770		}
 771	}
 772
 773	memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
 774
 775	if (!totlen) {
 776		/*
 777		 * The EIP97 cannot deal with zero length input packets!
 778		 * So stuff a dummy command descriptor indicating a 1 byte
 779		 * (dummy) input packet, using the context record as source.
 780		 */
 781		first_cdesc = safexcel_add_cdesc(priv, ring,
 782						 1, 1, ctx->base.ctxr_dma,
 783						 1, 1, ctx->base.ctxr_dma,
 784						 &atoken);
 785		if (IS_ERR(first_cdesc)) {
 786			/* No space left in the command descriptor ring */
 787			ret = PTR_ERR(first_cdesc);
 788			goto cdesc_rollback;
 789		}
 790		n_cdesc = 1;
 791		goto skip_cdesc;
 792	}
 793
 794	/* command descriptors */
 795	for_each_sg(src, sg, sreq->nr_src, i) {
 796		int len = sg_dma_len(sg);
 797
 798		/* Do not overflow the request */
 799		if (queued < len)
 800			len = queued;
 801
 802		cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
 803					   !(queued - len),
 804					   sg_dma_address(sg), len, totlen,
 805					   ctx->base.ctxr_dma, &atoken);
 806		if (IS_ERR(cdesc)) {
 807			/* No space left in the command descriptor ring */
 808			ret = PTR_ERR(cdesc);
 809			goto cdesc_rollback;
 810		}
 811
 812		if (!n_cdesc)
 813			first_cdesc = cdesc;
 814
 815		n_cdesc++;
 816		queued -= len;
 817		if (!queued)
 818			break;
 819	}
 820skip_cdesc:
 821	/* Add context control words and token to first command descriptor */
 822	safexcel_context_control(ctx, base, sreq, first_cdesc);
 823	if (ctx->aead)
 824		safexcel_aead_token(ctx, iv, first_cdesc, atoken,
 825				    sreq->direction, cryptlen,
 826				    assoclen, digestsize);
 827	else
 828		safexcel_skcipher_token(ctx, iv, first_cdesc, atoken,
 829					cryptlen);
 830
 831	/* result descriptors */
 832	for_each_sg(dst, sg, sreq->nr_dst, i) {
 833		bool last = (i == sreq->nr_dst - 1);
 834		u32 len = sg_dma_len(sg);
 835
 836		/* only allow the part of the buffer we know we need */
 837		if (len > totlen_dst)
 838			len = totlen_dst;
 839		if (unlikely(!len))
 840			break;
 841		totlen_dst -= len;
 842
 843		/* skip over AAD space in buffer - not written */
 844		if (assoclen) {
 845			if (assoclen >= len) {
 846				assoclen -= len;
 847				continue;
 848			}
 849			rdesc = safexcel_add_rdesc(priv, ring, first, last,
 850						   sg_dma_address(sg) +
 851						   assoclen,
 852						   len - assoclen);
 853			assoclen = 0;
 854		} else {
 855			rdesc = safexcel_add_rdesc(priv, ring, first, last,
 856						   sg_dma_address(sg),
 857						   len);
 858		}
 859		if (IS_ERR(rdesc)) {
 860			/* No space left in the result descriptor ring */
 861			ret = PTR_ERR(rdesc);
 862			goto rdesc_rollback;
 863		}
 864		if (first) {
 865			first_rdesc = rdesc;
 866			first = false;
 867		}
 868		n_rdesc++;
 869	}
 870
 871	if (unlikely(first)) {
 872		/*
 873		 * Special case: AEAD decrypt with only AAD data.
 874		 * In this case there is NO output data from the engine,
 875		 * but the engine still needs a result descriptor!
 876		 * Create a dummy one just for catching the result token.
 877		 */
 878		rdesc = safexcel_add_rdesc(priv, ring, true, true, 0, 0);
 879		if (IS_ERR(rdesc)) {
 880			/* No space left in the result descriptor ring */
 881			ret = PTR_ERR(rdesc);
 882			goto rdesc_rollback;
 883		}
 884		first_rdesc = rdesc;
 885		n_rdesc = 1;
 886	}
 887
 888	safexcel_rdr_req_set(priv, ring, first_rdesc, base);
 889
 890	*commands = n_cdesc;
 891	*results = n_rdesc;
 892	return 0;
 893
 894rdesc_rollback:
 895	for (i = 0; i < n_rdesc; i++)
 896		safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
 897cdesc_rollback:
 898	for (i = 0; i < n_cdesc; i++)
 899		safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
 900unmap:
 901	if (src == dst) {
 902		if (sreq->nr_src > 0)
 903			dma_unmap_sg(priv->dev, src, sreq->nr_src,
 904				     DMA_BIDIRECTIONAL);
 905	} else {
 906		if (sreq->nr_src > 0)
 907			dma_unmap_sg(priv->dev, src, sreq->nr_src,
 908				     DMA_TO_DEVICE);
 909		if (sreq->nr_dst > 0)
 910			dma_unmap_sg(priv->dev, dst, sreq->nr_dst,
 911				     DMA_FROM_DEVICE);
 912	}
 913
 914	return ret;
 915}
 916
 917static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
 918				      int ring,
 919				      struct crypto_async_request *base,
 920				      struct safexcel_cipher_req *sreq,
 921				      bool *should_complete, int *ret)
 922{
 923	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
 924	struct safexcel_result_desc *rdesc;
 925	int ndesc = 0, enq_ret;
 926
 927	*ret = 0;
 928
 929	if (unlikely(!sreq->rdescs))
 930		return 0;
 931
 932	while (sreq->rdescs--) {
 933		rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
 934		if (IS_ERR(rdesc)) {
 935			dev_err(priv->dev,
 936				"cipher: invalidate: could not retrieve the result descriptor\n");
 937			*ret = PTR_ERR(rdesc);
 938			break;
 939		}
 940
 941		if (likely(!*ret))
 942			*ret = safexcel_rdesc_check_errors(priv, rdesc);
 943
 944		ndesc++;
 945	}
 946
 947	safexcel_complete(priv, ring);
 948
 949	if (ctx->base.exit_inv) {
 950		dma_pool_free(priv->context_pool, ctx->base.ctxr,
 951			      ctx->base.ctxr_dma);
 952
 953		*should_complete = true;
 954
 955		return ndesc;
 956	}
 957
 958	ring = safexcel_select_ring(priv);
 959	ctx->base.ring = ring;
 960
 961	spin_lock_bh(&priv->ring[ring].queue_lock);
 962	enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
 963	spin_unlock_bh(&priv->ring[ring].queue_lock);
 964
 965	if (enq_ret != -EINPROGRESS)
 966		*ret = enq_ret;
 967
 968	queue_work(priv->ring[ring].workqueue,
 969		   &priv->ring[ring].work_data.work);
 970
 971	*should_complete = false;
 972
 973	return ndesc;
 974}
 975
 976static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
 977					   int ring,
 978					   struct crypto_async_request *async,
 979					   bool *should_complete, int *ret)
 980{
 981	struct skcipher_request *req = skcipher_request_cast(async);
 982	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
 983	int err;
 984
 985	if (sreq->needs_inv) {
 986		sreq->needs_inv = false;
 987		err = safexcel_handle_inv_result(priv, ring, async, sreq,
 988						 should_complete, ret);
 989	} else {
 990		err = safexcel_handle_req_result(priv, ring, async, req->src,
 991						 req->dst, req->cryptlen, sreq,
 992						 should_complete, ret);
 993	}
 994
 995	return err;
 996}
 997
 998static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
 999				       int ring,
1000				       struct crypto_async_request *async,
1001				       bool *should_complete, int *ret)
1002{
1003	struct aead_request *req = aead_request_cast(async);
1004	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1005	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1006	int err;
1007
1008	if (sreq->needs_inv) {
1009		sreq->needs_inv = false;
1010		err = safexcel_handle_inv_result(priv, ring, async, sreq,
1011						 should_complete, ret);
1012	} else {
1013		err = safexcel_handle_req_result(priv, ring, async, req->src,
1014						 req->dst,
1015						 req->cryptlen + crypto_aead_authsize(tfm),
1016						 sreq, should_complete, ret);
1017	}
1018
1019	return err;
1020}
1021
1022static int safexcel_cipher_send_inv(struct crypto_async_request *base,
1023				    int ring, int *commands, int *results)
1024{
1025	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1026	struct safexcel_crypto_priv *priv = ctx->base.priv;
1027	int ret;
1028
1029	ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
1030	if (unlikely(ret))
1031		return ret;
1032
1033	*commands = 1;
1034	*results = 1;
1035
1036	return 0;
1037}
1038
1039static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
1040				  int *commands, int *results)
1041{
1042	struct skcipher_request *req = skcipher_request_cast(async);
1043	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1044	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1045	struct safexcel_crypto_priv *priv = ctx->base.priv;
1046	int ret;
1047
1048	BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1049
1050	if (sreq->needs_inv) {
1051		ret = safexcel_cipher_send_inv(async, ring, commands, results);
1052	} else {
1053		struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1054		u8 input_iv[AES_BLOCK_SIZE];
1055
1056		/*
1057		 * Save input IV in case of CBC decrypt mode
1058		 * Will be overwritten with output IV prior to use!
1059		 */
1060		memcpy(input_iv, req->iv, crypto_skcipher_ivsize(skcipher));
1061
1062		ret = safexcel_send_req(async, ring, sreq, req->src,
1063					req->dst, req->cryptlen, 0, 0, input_iv,
1064					commands, results);
1065	}
1066
1067	sreq->rdescs = *results;
1068	return ret;
1069}
1070
1071static int safexcel_aead_send(struct crypto_async_request *async, int ring,
1072			      int *commands, int *results)
1073{
1074	struct aead_request *req = aead_request_cast(async);
1075	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1076	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1077	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1078	struct safexcel_crypto_priv *priv = ctx->base.priv;
1079	int ret;
1080
1081	BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1082
1083	if (sreq->needs_inv)
1084		ret = safexcel_cipher_send_inv(async, ring, commands, results);
1085	else
1086		ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
1087					req->cryptlen, req->assoclen,
1088					crypto_aead_authsize(tfm), req->iv,
1089					commands, results);
1090	sreq->rdescs = *results;
1091	return ret;
1092}
1093
1094static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
1095				    struct crypto_async_request *base,
1096				    struct safexcel_cipher_req *sreq,
1097				    struct crypto_wait *result)
1098{
1099	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1100	struct safexcel_crypto_priv *priv = ctx->base.priv;
1101	int ring = ctx->base.ring;
1102	int err;
 
1103
1104	ctx = crypto_tfm_ctx(base->tfm);
1105	ctx->base.exit_inv = true;
1106	sreq->needs_inv = true;
1107
1108	spin_lock_bh(&priv->ring[ring].queue_lock);
1109	crypto_enqueue_request(&priv->ring[ring].queue, base);
1110	spin_unlock_bh(&priv->ring[ring].queue_lock);
1111
1112	queue_work(priv->ring[ring].workqueue,
1113		   &priv->ring[ring].work_data.work);
1114
1115	err = crypto_wait_req(-EINPROGRESS, result);
1116
1117	if (err) {
1118		dev_warn(priv->dev,
1119			"cipher: sync: invalidate: completion error %d\n",
1120			 err);
1121		return err;
1122	}
1123
1124	return 0;
1125}
1126
1127static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
1128{
1129	EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
1130	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1131	DECLARE_CRYPTO_WAIT(result);
1132
1133	memset(req, 0, sizeof(struct skcipher_request));
1134
1135	skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1136				      crypto_req_done, &result);
1137	skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
1138
1139	return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1140}
1141
1142static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
1143{
1144	EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
1145	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1146	DECLARE_CRYPTO_WAIT(result);
1147
1148	memset(req, 0, sizeof(struct aead_request));
1149
1150	aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1151				  crypto_req_done, &result);
1152	aead_request_set_tfm(req, __crypto_aead_cast(tfm));
1153
1154	return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1155}
1156
1157static int safexcel_queue_req(struct crypto_async_request *base,
1158			struct safexcel_cipher_req *sreq,
1159			enum safexcel_cipher_direction dir)
1160{
1161	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1162	struct safexcel_crypto_priv *priv = ctx->base.priv;
1163	int ret, ring;
1164
1165	sreq->needs_inv = false;
1166	sreq->direction = dir;
1167
1168	if (ctx->base.ctxr) {
1169		if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
1170			sreq->needs_inv = true;
1171			ctx->base.needs_inv = false;
1172		}
1173	} else {
1174		ctx->base.ring = safexcel_select_ring(priv);
1175		ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
1176						 EIP197_GFP_FLAGS(*base),
1177						 &ctx->base.ctxr_dma);
1178		if (!ctx->base.ctxr)
1179			return -ENOMEM;
1180	}
1181
1182	ring = ctx->base.ring;
1183
1184	spin_lock_bh(&priv->ring[ring].queue_lock);
1185	ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
1186	spin_unlock_bh(&priv->ring[ring].queue_lock);
1187
1188	queue_work(priv->ring[ring].workqueue,
1189		   &priv->ring[ring].work_data.work);
1190
1191	return ret;
1192}
1193
1194static int safexcel_encrypt(struct skcipher_request *req)
1195{
1196	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1197			SAFEXCEL_ENCRYPT);
1198}
1199
1200static int safexcel_decrypt(struct skcipher_request *req)
1201{
1202	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1203			SAFEXCEL_DECRYPT);
1204}
1205
1206static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
1207{
1208	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1209	struct safexcel_alg_template *tmpl =
1210		container_of(tfm->__crt_alg, struct safexcel_alg_template,
1211			     alg.skcipher.base);
1212
1213	crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
1214				    sizeof(struct safexcel_cipher_req));
1215
1216	ctx->base.priv = tmpl->priv;
1217
1218	ctx->base.send = safexcel_skcipher_send;
1219	ctx->base.handle_result = safexcel_skcipher_handle_result;
1220	ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1221	ctx->ctrinit = 1;
1222	return 0;
1223}
1224
1225static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
1226{
1227	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1228
1229	memzero_explicit(ctx->key, sizeof(ctx->key));
1230
1231	/* context not allocated, skip invalidation */
1232	if (!ctx->base.ctxr)
1233		return -ENOMEM;
1234
1235	memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
1236	return 0;
1237}
1238
1239static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
1240{
1241	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1242	struct safexcel_crypto_priv *priv = ctx->base.priv;
1243	int ret;
1244
1245	if (safexcel_cipher_cra_exit(tfm))
1246		return;
1247
1248	if (priv->flags & EIP197_TRC_CACHE) {
1249		ret = safexcel_skcipher_exit_inv(tfm);
1250		if (ret)
1251			dev_warn(priv->dev, "skcipher: invalidation error %d\n",
1252				 ret);
1253	} else {
1254		dma_pool_free(priv->context_pool, ctx->base.ctxr,
1255			      ctx->base.ctxr_dma);
1256	}
1257}
1258
1259static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
1260{
1261	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1262	struct safexcel_crypto_priv *priv = ctx->base.priv;
1263	int ret;
1264
1265	if (safexcel_cipher_cra_exit(tfm))
1266		return;
1267
1268	if (priv->flags & EIP197_TRC_CACHE) {
1269		ret = safexcel_aead_exit_inv(tfm);
1270		if (ret)
1271			dev_warn(priv->dev, "aead: invalidation error %d\n",
1272				 ret);
1273	} else {
1274		dma_pool_free(priv->context_pool, ctx->base.ctxr,
1275			      ctx->base.ctxr_dma);
1276	}
1277}
1278
1279static int safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm *tfm)
1280{
1281	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1282
1283	safexcel_skcipher_cra_init(tfm);
1284	ctx->alg  = SAFEXCEL_AES;
1285	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1286	ctx->blocksz = 0;
1287	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1288	return 0;
1289}
1290
1291struct safexcel_alg_template safexcel_alg_ecb_aes = {
1292	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1293	.algo_mask = SAFEXCEL_ALG_AES,
1294	.alg.skcipher = {
1295		.setkey = safexcel_skcipher_aes_setkey,
1296		.encrypt = safexcel_encrypt,
1297		.decrypt = safexcel_decrypt,
1298		.min_keysize = AES_MIN_KEY_SIZE,
1299		.max_keysize = AES_MAX_KEY_SIZE,
1300		.base = {
1301			.cra_name = "ecb(aes)",
1302			.cra_driver_name = "safexcel-ecb-aes",
1303			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1304			.cra_flags = CRYPTO_ALG_ASYNC |
1305				     CRYPTO_ALG_ALLOCATES_MEMORY |
1306				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1307			.cra_blocksize = AES_BLOCK_SIZE,
1308			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1309			.cra_alignmask = 0,
1310			.cra_init = safexcel_skcipher_aes_ecb_cra_init,
1311			.cra_exit = safexcel_skcipher_cra_exit,
1312			.cra_module = THIS_MODULE,
1313		},
1314	},
1315};
1316
1317static int safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm *tfm)
1318{
1319	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1320
1321	safexcel_skcipher_cra_init(tfm);
1322	ctx->alg  = SAFEXCEL_AES;
1323	ctx->blocksz = AES_BLOCK_SIZE;
1324	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1325	return 0;
1326}
1327
1328struct safexcel_alg_template safexcel_alg_cbc_aes = {
1329	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1330	.algo_mask = SAFEXCEL_ALG_AES,
1331	.alg.skcipher = {
1332		.setkey = safexcel_skcipher_aes_setkey,
1333		.encrypt = safexcel_encrypt,
1334		.decrypt = safexcel_decrypt,
1335		.min_keysize = AES_MIN_KEY_SIZE,
1336		.max_keysize = AES_MAX_KEY_SIZE,
1337		.ivsize = AES_BLOCK_SIZE,
1338		.base = {
1339			.cra_name = "cbc(aes)",
1340			.cra_driver_name = "safexcel-cbc-aes",
1341			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1342			.cra_flags = CRYPTO_ALG_ASYNC |
1343				     CRYPTO_ALG_ALLOCATES_MEMORY |
1344				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1345			.cra_blocksize = AES_BLOCK_SIZE,
1346			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1347			.cra_alignmask = 0,
1348			.cra_init = safexcel_skcipher_aes_cbc_cra_init,
1349			.cra_exit = safexcel_skcipher_cra_exit,
1350			.cra_module = THIS_MODULE,
1351		},
1352	},
1353};
1354
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1355static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
1356					   const u8 *key, unsigned int len)
1357{
1358	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
1359	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1360	struct safexcel_crypto_priv *priv = ctx->base.priv;
1361	struct crypto_aes_ctx aes;
1362	int ret, i;
1363	unsigned int keylen;
1364
1365	/* last 4 bytes of key are the nonce! */
1366	ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
1367	/* exclude the nonce here */
1368	keylen = len - CTR_RFC3686_NONCE_SIZE;
1369	ret = aes_expandkey(&aes, key, keylen);
1370	if (ret)
1371		return ret;
1372
1373	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1374		for (i = 0; i < keylen / sizeof(u32); i++) {
1375			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
1376				ctx->base.needs_inv = true;
1377				break;
1378			}
1379		}
1380	}
1381
1382	for (i = 0; i < keylen / sizeof(u32); i++)
1383		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
1384
1385	ctx->key_len = keylen;
1386
1387	memzero_explicit(&aes, sizeof(aes));
1388	return 0;
1389}
1390
1391static int safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm *tfm)
1392{
1393	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1394
1395	safexcel_skcipher_cra_init(tfm);
1396	ctx->alg  = SAFEXCEL_AES;
1397	ctx->blocksz = AES_BLOCK_SIZE;
1398	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
1399	return 0;
1400}
1401
1402struct safexcel_alg_template safexcel_alg_ctr_aes = {
1403	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1404	.algo_mask = SAFEXCEL_ALG_AES,
1405	.alg.skcipher = {
1406		.setkey = safexcel_skcipher_aesctr_setkey,
1407		.encrypt = safexcel_encrypt,
1408		.decrypt = safexcel_decrypt,
1409		/* Add nonce size */
1410		.min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1411		.max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1412		.ivsize = CTR_RFC3686_IV_SIZE,
1413		.base = {
1414			.cra_name = "rfc3686(ctr(aes))",
1415			.cra_driver_name = "safexcel-ctr-aes",
1416			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1417			.cra_flags = CRYPTO_ALG_ASYNC |
1418				     CRYPTO_ALG_ALLOCATES_MEMORY |
1419				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1420			.cra_blocksize = 1,
1421			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1422			.cra_alignmask = 0,
1423			.cra_init = safexcel_skcipher_aes_ctr_cra_init,
1424			.cra_exit = safexcel_skcipher_cra_exit,
1425			.cra_module = THIS_MODULE,
1426		},
1427	},
1428};
1429
1430static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
1431			       unsigned int len)
1432{
1433	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1434	struct safexcel_crypto_priv *priv = ctx->base.priv;
1435	int ret;
1436
1437	ret = verify_skcipher_des_key(ctfm, key);
1438	if (ret)
1439		return ret;
1440
1441	/* if context exits and key changed, need to invalidate it */
1442	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1443		if (memcmp(ctx->key, key, len))
1444			ctx->base.needs_inv = true;
1445
1446	memcpy(ctx->key, key, len);
1447	ctx->key_len = len;
1448
1449	return 0;
1450}
1451
1452static int safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm *tfm)
1453{
1454	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1455
1456	safexcel_skcipher_cra_init(tfm);
1457	ctx->alg  = SAFEXCEL_DES;
1458	ctx->blocksz = DES_BLOCK_SIZE;
1459	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1460	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1461	return 0;
1462}
1463
1464struct safexcel_alg_template safexcel_alg_cbc_des = {
1465	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1466	.algo_mask = SAFEXCEL_ALG_DES,
1467	.alg.skcipher = {
1468		.setkey = safexcel_des_setkey,
1469		.encrypt = safexcel_encrypt,
1470		.decrypt = safexcel_decrypt,
1471		.min_keysize = DES_KEY_SIZE,
1472		.max_keysize = DES_KEY_SIZE,
1473		.ivsize = DES_BLOCK_SIZE,
1474		.base = {
1475			.cra_name = "cbc(des)",
1476			.cra_driver_name = "safexcel-cbc-des",
1477			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1478			.cra_flags = CRYPTO_ALG_ASYNC |
1479				     CRYPTO_ALG_ALLOCATES_MEMORY |
1480				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1481			.cra_blocksize = DES_BLOCK_SIZE,
1482			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1483			.cra_alignmask = 0,
1484			.cra_init = safexcel_skcipher_des_cbc_cra_init,
1485			.cra_exit = safexcel_skcipher_cra_exit,
1486			.cra_module = THIS_MODULE,
1487		},
1488	},
1489};
1490
1491static int safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm *tfm)
1492{
1493	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1494
1495	safexcel_skcipher_cra_init(tfm);
1496	ctx->alg  = SAFEXCEL_DES;
1497	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1498	ctx->blocksz = 0;
1499	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1500	return 0;
1501}
1502
1503struct safexcel_alg_template safexcel_alg_ecb_des = {
1504	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1505	.algo_mask = SAFEXCEL_ALG_DES,
1506	.alg.skcipher = {
1507		.setkey = safexcel_des_setkey,
1508		.encrypt = safexcel_encrypt,
1509		.decrypt = safexcel_decrypt,
1510		.min_keysize = DES_KEY_SIZE,
1511		.max_keysize = DES_KEY_SIZE,
1512		.base = {
1513			.cra_name = "ecb(des)",
1514			.cra_driver_name = "safexcel-ecb-des",
1515			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1516			.cra_flags = CRYPTO_ALG_ASYNC |
1517				     CRYPTO_ALG_ALLOCATES_MEMORY |
1518				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1519			.cra_blocksize = DES_BLOCK_SIZE,
1520			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1521			.cra_alignmask = 0,
1522			.cra_init = safexcel_skcipher_des_ecb_cra_init,
1523			.cra_exit = safexcel_skcipher_cra_exit,
1524			.cra_module = THIS_MODULE,
1525		},
1526	},
1527};
1528
1529static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
1530				   const u8 *key, unsigned int len)
1531{
1532	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1533	struct safexcel_crypto_priv *priv = ctx->base.priv;
1534	int err;
1535
1536	err = verify_skcipher_des3_key(ctfm, key);
1537	if (err)
1538		return err;
1539
1540	/* if context exits and key changed, need to invalidate it */
1541	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1542		if (memcmp(ctx->key, key, len))
1543			ctx->base.needs_inv = true;
1544
1545	memcpy(ctx->key, key, len);
1546	ctx->key_len = len;
1547
1548	return 0;
1549}
1550
1551static int safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm *tfm)
1552{
1553	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1554
1555	safexcel_skcipher_cra_init(tfm);
1556	ctx->alg  = SAFEXCEL_3DES;
1557	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1558	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1559	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1560	return 0;
1561}
1562
1563struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
1564	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1565	.algo_mask = SAFEXCEL_ALG_DES,
1566	.alg.skcipher = {
1567		.setkey = safexcel_des3_ede_setkey,
1568		.encrypt = safexcel_encrypt,
1569		.decrypt = safexcel_decrypt,
1570		.min_keysize = DES3_EDE_KEY_SIZE,
1571		.max_keysize = DES3_EDE_KEY_SIZE,
1572		.ivsize = DES3_EDE_BLOCK_SIZE,
1573		.base = {
1574			.cra_name = "cbc(des3_ede)",
1575			.cra_driver_name = "safexcel-cbc-des3_ede",
1576			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1577			.cra_flags = CRYPTO_ALG_ASYNC |
1578				     CRYPTO_ALG_ALLOCATES_MEMORY |
1579				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1580			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1581			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1582			.cra_alignmask = 0,
1583			.cra_init = safexcel_skcipher_des3_cbc_cra_init,
1584			.cra_exit = safexcel_skcipher_cra_exit,
1585			.cra_module = THIS_MODULE,
1586		},
1587	},
1588};
1589
1590static int safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm *tfm)
1591{
1592	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1593
1594	safexcel_skcipher_cra_init(tfm);
1595	ctx->alg  = SAFEXCEL_3DES;
1596	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1597	ctx->blocksz = 0;
1598	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1599	return 0;
1600}
1601
1602struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
1603	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1604	.algo_mask = SAFEXCEL_ALG_DES,
1605	.alg.skcipher = {
1606		.setkey = safexcel_des3_ede_setkey,
1607		.encrypt = safexcel_encrypt,
1608		.decrypt = safexcel_decrypt,
1609		.min_keysize = DES3_EDE_KEY_SIZE,
1610		.max_keysize = DES3_EDE_KEY_SIZE,
1611		.base = {
1612			.cra_name = "ecb(des3_ede)",
1613			.cra_driver_name = "safexcel-ecb-des3_ede",
1614			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1615			.cra_flags = CRYPTO_ALG_ASYNC |
1616				     CRYPTO_ALG_ALLOCATES_MEMORY |
1617				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1618			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1619			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1620			.cra_alignmask = 0,
1621			.cra_init = safexcel_skcipher_des3_ecb_cra_init,
1622			.cra_exit = safexcel_skcipher_cra_exit,
1623			.cra_module = THIS_MODULE,
1624		},
1625	},
1626};
1627
1628static int safexcel_aead_encrypt(struct aead_request *req)
1629{
1630	struct safexcel_cipher_req *creq = aead_request_ctx(req);
1631
1632	return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
1633}
1634
1635static int safexcel_aead_decrypt(struct aead_request *req)
1636{
1637	struct safexcel_cipher_req *creq = aead_request_ctx(req);
1638
1639	return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
1640}
1641
1642static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
1643{
1644	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1645	struct safexcel_alg_template *tmpl =
1646		container_of(tfm->__crt_alg, struct safexcel_alg_template,
1647			     alg.aead.base);
1648
1649	crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
1650				sizeof(struct safexcel_cipher_req));
1651
1652	ctx->base.priv = tmpl->priv;
1653
1654	ctx->alg  = SAFEXCEL_AES; /* default */
1655	ctx->blocksz = AES_BLOCK_SIZE;
1656	ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1657	ctx->ctrinit = 1;
1658	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC; /* default */
1659	ctx->aead = true;
1660	ctx->base.send = safexcel_aead_send;
1661	ctx->base.handle_result = safexcel_aead_handle_result;
1662	return 0;
1663}
1664
1665static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
1666{
1667	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1668
1669	safexcel_aead_cra_init(tfm);
1670	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1671	ctx->state_sz = SHA1_DIGEST_SIZE;
1672	return 0;
1673}
1674
1675struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
1676	.type = SAFEXCEL_ALG_TYPE_AEAD,
1677	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
1678	.alg.aead = {
1679		.setkey = safexcel_aead_setkey,
1680		.encrypt = safexcel_aead_encrypt,
1681		.decrypt = safexcel_aead_decrypt,
1682		.ivsize = AES_BLOCK_SIZE,
1683		.maxauthsize = SHA1_DIGEST_SIZE,
1684		.base = {
1685			.cra_name = "authenc(hmac(sha1),cbc(aes))",
1686			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
1687			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1688			.cra_flags = CRYPTO_ALG_ASYNC |
1689				     CRYPTO_ALG_ALLOCATES_MEMORY |
1690				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1691			.cra_blocksize = AES_BLOCK_SIZE,
1692			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1693			.cra_alignmask = 0,
1694			.cra_init = safexcel_aead_sha1_cra_init,
1695			.cra_exit = safexcel_aead_cra_exit,
1696			.cra_module = THIS_MODULE,
1697		},
1698	},
1699};
1700
1701static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
1702{
1703	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1704
1705	safexcel_aead_cra_init(tfm);
1706	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1707	ctx->state_sz = SHA256_DIGEST_SIZE;
1708	return 0;
1709}
1710
1711struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
1712	.type = SAFEXCEL_ALG_TYPE_AEAD,
1713	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1714	.alg.aead = {
1715		.setkey = safexcel_aead_setkey,
1716		.encrypt = safexcel_aead_encrypt,
1717		.decrypt = safexcel_aead_decrypt,
1718		.ivsize = AES_BLOCK_SIZE,
1719		.maxauthsize = SHA256_DIGEST_SIZE,
1720		.base = {
1721			.cra_name = "authenc(hmac(sha256),cbc(aes))",
1722			.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
1723			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1724			.cra_flags = CRYPTO_ALG_ASYNC |
1725				     CRYPTO_ALG_ALLOCATES_MEMORY |
1726				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1727			.cra_blocksize = AES_BLOCK_SIZE,
1728			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1729			.cra_alignmask = 0,
1730			.cra_init = safexcel_aead_sha256_cra_init,
1731			.cra_exit = safexcel_aead_cra_exit,
1732			.cra_module = THIS_MODULE,
1733		},
1734	},
1735};
1736
1737static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
1738{
1739	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1740
1741	safexcel_aead_cra_init(tfm);
1742	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1743	ctx->state_sz = SHA256_DIGEST_SIZE;
1744	return 0;
1745}
1746
1747struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
1748	.type = SAFEXCEL_ALG_TYPE_AEAD,
1749	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1750	.alg.aead = {
1751		.setkey = safexcel_aead_setkey,
1752		.encrypt = safexcel_aead_encrypt,
1753		.decrypt = safexcel_aead_decrypt,
1754		.ivsize = AES_BLOCK_SIZE,
1755		.maxauthsize = SHA224_DIGEST_SIZE,
1756		.base = {
1757			.cra_name = "authenc(hmac(sha224),cbc(aes))",
1758			.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
1759			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1760			.cra_flags = CRYPTO_ALG_ASYNC |
1761				     CRYPTO_ALG_ALLOCATES_MEMORY |
1762				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1763			.cra_blocksize = AES_BLOCK_SIZE,
1764			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1765			.cra_alignmask = 0,
1766			.cra_init = safexcel_aead_sha224_cra_init,
1767			.cra_exit = safexcel_aead_cra_exit,
1768			.cra_module = THIS_MODULE,
1769		},
1770	},
1771};
1772
1773static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
1774{
1775	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1776
1777	safexcel_aead_cra_init(tfm);
1778	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1779	ctx->state_sz = SHA512_DIGEST_SIZE;
1780	return 0;
1781}
1782
1783struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
1784	.type = SAFEXCEL_ALG_TYPE_AEAD,
1785	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1786	.alg.aead = {
1787		.setkey = safexcel_aead_setkey,
1788		.encrypt = safexcel_aead_encrypt,
1789		.decrypt = safexcel_aead_decrypt,
1790		.ivsize = AES_BLOCK_SIZE,
1791		.maxauthsize = SHA512_DIGEST_SIZE,
1792		.base = {
1793			.cra_name = "authenc(hmac(sha512),cbc(aes))",
1794			.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
1795			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1796			.cra_flags = CRYPTO_ALG_ASYNC |
1797				     CRYPTO_ALG_ALLOCATES_MEMORY |
1798				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1799			.cra_blocksize = AES_BLOCK_SIZE,
1800			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1801			.cra_alignmask = 0,
1802			.cra_init = safexcel_aead_sha512_cra_init,
1803			.cra_exit = safexcel_aead_cra_exit,
1804			.cra_module = THIS_MODULE,
1805		},
1806	},
1807};
1808
1809static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
1810{
1811	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1812
1813	safexcel_aead_cra_init(tfm);
1814	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1815	ctx->state_sz = SHA512_DIGEST_SIZE;
1816	return 0;
1817}
1818
1819struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
1820	.type = SAFEXCEL_ALG_TYPE_AEAD,
1821	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1822	.alg.aead = {
1823		.setkey = safexcel_aead_setkey,
1824		.encrypt = safexcel_aead_encrypt,
1825		.decrypt = safexcel_aead_decrypt,
1826		.ivsize = AES_BLOCK_SIZE,
1827		.maxauthsize = SHA384_DIGEST_SIZE,
1828		.base = {
1829			.cra_name = "authenc(hmac(sha384),cbc(aes))",
1830			.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
1831			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1832			.cra_flags = CRYPTO_ALG_ASYNC |
1833				     CRYPTO_ALG_ALLOCATES_MEMORY |
1834				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1835			.cra_blocksize = AES_BLOCK_SIZE,
1836			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1837			.cra_alignmask = 0,
1838			.cra_init = safexcel_aead_sha384_cra_init,
1839			.cra_exit = safexcel_aead_cra_exit,
1840			.cra_module = THIS_MODULE,
1841		},
1842	},
1843};
1844
1845static int safexcel_aead_sha1_des3_cra_init(struct crypto_tfm *tfm)
1846{
1847	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1848
1849	safexcel_aead_sha1_cra_init(tfm);
1850	ctx->alg = SAFEXCEL_3DES; /* override default */
1851	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1852	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1853	return 0;
1854}
1855
1856struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede = {
1857	.type = SAFEXCEL_ALG_TYPE_AEAD,
1858	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
1859	.alg.aead = {
1860		.setkey = safexcel_aead_setkey,
1861		.encrypt = safexcel_aead_encrypt,
1862		.decrypt = safexcel_aead_decrypt,
1863		.ivsize = DES3_EDE_BLOCK_SIZE,
1864		.maxauthsize = SHA1_DIGEST_SIZE,
1865		.base = {
1866			.cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
1867			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des3_ede",
1868			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1869			.cra_flags = CRYPTO_ALG_ASYNC |
1870				     CRYPTO_ALG_ALLOCATES_MEMORY |
1871				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1872			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1873			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1874			.cra_alignmask = 0,
1875			.cra_init = safexcel_aead_sha1_des3_cra_init,
1876			.cra_exit = safexcel_aead_cra_exit,
1877			.cra_module = THIS_MODULE,
1878		},
1879	},
1880};
1881
1882static int safexcel_aead_sha256_des3_cra_init(struct crypto_tfm *tfm)
1883{
1884	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1885
1886	safexcel_aead_sha256_cra_init(tfm);
1887	ctx->alg = SAFEXCEL_3DES; /* override default */
1888	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1889	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1890	return 0;
1891}
1892
1893struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des3_ede = {
1894	.type = SAFEXCEL_ALG_TYPE_AEAD,
1895	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1896	.alg.aead = {
1897		.setkey = safexcel_aead_setkey,
1898		.encrypt = safexcel_aead_encrypt,
1899		.decrypt = safexcel_aead_decrypt,
1900		.ivsize = DES3_EDE_BLOCK_SIZE,
1901		.maxauthsize = SHA256_DIGEST_SIZE,
1902		.base = {
1903			.cra_name = "authenc(hmac(sha256),cbc(des3_ede))",
1904			.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des3_ede",
1905			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1906			.cra_flags = CRYPTO_ALG_ASYNC |
1907				     CRYPTO_ALG_ALLOCATES_MEMORY |
1908				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1909			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1910			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1911			.cra_alignmask = 0,
1912			.cra_init = safexcel_aead_sha256_des3_cra_init,
1913			.cra_exit = safexcel_aead_cra_exit,
1914			.cra_module = THIS_MODULE,
1915		},
1916	},
1917};
1918
1919static int safexcel_aead_sha224_des3_cra_init(struct crypto_tfm *tfm)
1920{
1921	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1922
1923	safexcel_aead_sha224_cra_init(tfm);
1924	ctx->alg = SAFEXCEL_3DES; /* override default */
1925	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1926	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1927	return 0;
1928}
1929
1930struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des3_ede = {
1931	.type = SAFEXCEL_ALG_TYPE_AEAD,
1932	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1933	.alg.aead = {
1934		.setkey = safexcel_aead_setkey,
1935		.encrypt = safexcel_aead_encrypt,
1936		.decrypt = safexcel_aead_decrypt,
1937		.ivsize = DES3_EDE_BLOCK_SIZE,
1938		.maxauthsize = SHA224_DIGEST_SIZE,
1939		.base = {
1940			.cra_name = "authenc(hmac(sha224),cbc(des3_ede))",
1941			.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des3_ede",
1942			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1943			.cra_flags = CRYPTO_ALG_ASYNC |
1944				     CRYPTO_ALG_ALLOCATES_MEMORY |
1945				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1946			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1947			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1948			.cra_alignmask = 0,
1949			.cra_init = safexcel_aead_sha224_des3_cra_init,
1950			.cra_exit = safexcel_aead_cra_exit,
1951			.cra_module = THIS_MODULE,
1952		},
1953	},
1954};
1955
1956static int safexcel_aead_sha512_des3_cra_init(struct crypto_tfm *tfm)
1957{
1958	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1959
1960	safexcel_aead_sha512_cra_init(tfm);
1961	ctx->alg = SAFEXCEL_3DES; /* override default */
1962	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1963	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1964	return 0;
1965}
1966
1967struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des3_ede = {
1968	.type = SAFEXCEL_ALG_TYPE_AEAD,
1969	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
1970	.alg.aead = {
1971		.setkey = safexcel_aead_setkey,
1972		.encrypt = safexcel_aead_encrypt,
1973		.decrypt = safexcel_aead_decrypt,
1974		.ivsize = DES3_EDE_BLOCK_SIZE,
1975		.maxauthsize = SHA512_DIGEST_SIZE,
1976		.base = {
1977			.cra_name = "authenc(hmac(sha512),cbc(des3_ede))",
1978			.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des3_ede",
1979			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1980			.cra_flags = CRYPTO_ALG_ASYNC |
1981				     CRYPTO_ALG_ALLOCATES_MEMORY |
1982				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1983			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1984			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1985			.cra_alignmask = 0,
1986			.cra_init = safexcel_aead_sha512_des3_cra_init,
1987			.cra_exit = safexcel_aead_cra_exit,
1988			.cra_module = THIS_MODULE,
1989		},
1990	},
1991};
1992
1993static int safexcel_aead_sha384_des3_cra_init(struct crypto_tfm *tfm)
1994{
1995	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1996
1997	safexcel_aead_sha384_cra_init(tfm);
1998	ctx->alg = SAFEXCEL_3DES; /* override default */
1999	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2000	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2001	return 0;
2002}
2003
2004struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des3_ede = {
2005	.type = SAFEXCEL_ALG_TYPE_AEAD,
2006	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2007	.alg.aead = {
2008		.setkey = safexcel_aead_setkey,
2009		.encrypt = safexcel_aead_encrypt,
2010		.decrypt = safexcel_aead_decrypt,
2011		.ivsize = DES3_EDE_BLOCK_SIZE,
2012		.maxauthsize = SHA384_DIGEST_SIZE,
2013		.base = {
2014			.cra_name = "authenc(hmac(sha384),cbc(des3_ede))",
2015			.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des3_ede",
2016			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2017			.cra_flags = CRYPTO_ALG_ASYNC |
2018				     CRYPTO_ALG_ALLOCATES_MEMORY |
2019				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2020			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2021			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2022			.cra_alignmask = 0,
2023			.cra_init = safexcel_aead_sha384_des3_cra_init,
2024			.cra_exit = safexcel_aead_cra_exit,
2025			.cra_module = THIS_MODULE,
2026		},
2027	},
2028};
2029
2030static int safexcel_aead_sha1_des_cra_init(struct crypto_tfm *tfm)
2031{
2032	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2033
2034	safexcel_aead_sha1_cra_init(tfm);
2035	ctx->alg = SAFEXCEL_DES; /* override default */
2036	ctx->blocksz = DES_BLOCK_SIZE;
2037	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2038	return 0;
2039}
2040
2041struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des = {
2042	.type = SAFEXCEL_ALG_TYPE_AEAD,
2043	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
2044	.alg.aead = {
2045		.setkey = safexcel_aead_setkey,
2046		.encrypt = safexcel_aead_encrypt,
2047		.decrypt = safexcel_aead_decrypt,
2048		.ivsize = DES_BLOCK_SIZE,
2049		.maxauthsize = SHA1_DIGEST_SIZE,
2050		.base = {
2051			.cra_name = "authenc(hmac(sha1),cbc(des))",
2052			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des",
2053			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2054			.cra_flags = CRYPTO_ALG_ASYNC |
2055				     CRYPTO_ALG_ALLOCATES_MEMORY |
2056				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2057			.cra_blocksize = DES_BLOCK_SIZE,
2058			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2059			.cra_alignmask = 0,
2060			.cra_init = safexcel_aead_sha1_des_cra_init,
2061			.cra_exit = safexcel_aead_cra_exit,
2062			.cra_module = THIS_MODULE,
2063		},
2064	},
2065};
2066
2067static int safexcel_aead_sha256_des_cra_init(struct crypto_tfm *tfm)
2068{
2069	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2070
2071	safexcel_aead_sha256_cra_init(tfm);
2072	ctx->alg = SAFEXCEL_DES; /* override default */
2073	ctx->blocksz = DES_BLOCK_SIZE;
2074	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2075	return 0;
2076}
2077
2078struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des = {
2079	.type = SAFEXCEL_ALG_TYPE_AEAD,
2080	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2081	.alg.aead = {
2082		.setkey = safexcel_aead_setkey,
2083		.encrypt = safexcel_aead_encrypt,
2084		.decrypt = safexcel_aead_decrypt,
2085		.ivsize = DES_BLOCK_SIZE,
2086		.maxauthsize = SHA256_DIGEST_SIZE,
2087		.base = {
2088			.cra_name = "authenc(hmac(sha256),cbc(des))",
2089			.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des",
2090			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2091			.cra_flags = CRYPTO_ALG_ASYNC |
2092				     CRYPTO_ALG_ALLOCATES_MEMORY |
2093				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2094			.cra_blocksize = DES_BLOCK_SIZE,
2095			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2096			.cra_alignmask = 0,
2097			.cra_init = safexcel_aead_sha256_des_cra_init,
2098			.cra_exit = safexcel_aead_cra_exit,
2099			.cra_module = THIS_MODULE,
2100		},
2101	},
2102};
2103
2104static int safexcel_aead_sha224_des_cra_init(struct crypto_tfm *tfm)
2105{
2106	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2107
2108	safexcel_aead_sha224_cra_init(tfm);
2109	ctx->alg = SAFEXCEL_DES; /* override default */
2110	ctx->blocksz = DES_BLOCK_SIZE;
2111	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2112	return 0;
2113}
2114
2115struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des = {
2116	.type = SAFEXCEL_ALG_TYPE_AEAD,
2117	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2118	.alg.aead = {
2119		.setkey = safexcel_aead_setkey,
2120		.encrypt = safexcel_aead_encrypt,
2121		.decrypt = safexcel_aead_decrypt,
2122		.ivsize = DES_BLOCK_SIZE,
2123		.maxauthsize = SHA224_DIGEST_SIZE,
2124		.base = {
2125			.cra_name = "authenc(hmac(sha224),cbc(des))",
2126			.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des",
2127			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2128			.cra_flags = CRYPTO_ALG_ASYNC |
2129				     CRYPTO_ALG_ALLOCATES_MEMORY |
2130				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2131			.cra_blocksize = DES_BLOCK_SIZE,
2132			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2133			.cra_alignmask = 0,
2134			.cra_init = safexcel_aead_sha224_des_cra_init,
2135			.cra_exit = safexcel_aead_cra_exit,
2136			.cra_module = THIS_MODULE,
2137		},
2138	},
2139};
2140
2141static int safexcel_aead_sha512_des_cra_init(struct crypto_tfm *tfm)
2142{
2143	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2144
2145	safexcel_aead_sha512_cra_init(tfm);
2146	ctx->alg = SAFEXCEL_DES; /* override default */
2147	ctx->blocksz = DES_BLOCK_SIZE;
2148	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2149	return 0;
2150}
2151
2152struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des = {
2153	.type = SAFEXCEL_ALG_TYPE_AEAD,
2154	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2155	.alg.aead = {
2156		.setkey = safexcel_aead_setkey,
2157		.encrypt = safexcel_aead_encrypt,
2158		.decrypt = safexcel_aead_decrypt,
2159		.ivsize = DES_BLOCK_SIZE,
2160		.maxauthsize = SHA512_DIGEST_SIZE,
2161		.base = {
2162			.cra_name = "authenc(hmac(sha512),cbc(des))",
2163			.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des",
2164			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2165			.cra_flags = CRYPTO_ALG_ASYNC |
2166				     CRYPTO_ALG_ALLOCATES_MEMORY |
2167				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2168			.cra_blocksize = DES_BLOCK_SIZE,
2169			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2170			.cra_alignmask = 0,
2171			.cra_init = safexcel_aead_sha512_des_cra_init,
2172			.cra_exit = safexcel_aead_cra_exit,
2173			.cra_module = THIS_MODULE,
2174		},
2175	},
2176};
2177
2178static int safexcel_aead_sha384_des_cra_init(struct crypto_tfm *tfm)
2179{
2180	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2181
2182	safexcel_aead_sha384_cra_init(tfm);
2183	ctx->alg = SAFEXCEL_DES; /* override default */
2184	ctx->blocksz = DES_BLOCK_SIZE;
2185	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2186	return 0;
2187}
2188
2189struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des = {
2190	.type = SAFEXCEL_ALG_TYPE_AEAD,
2191	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2192	.alg.aead = {
2193		.setkey = safexcel_aead_setkey,
2194		.encrypt = safexcel_aead_encrypt,
2195		.decrypt = safexcel_aead_decrypt,
2196		.ivsize = DES_BLOCK_SIZE,
2197		.maxauthsize = SHA384_DIGEST_SIZE,
2198		.base = {
2199			.cra_name = "authenc(hmac(sha384),cbc(des))",
2200			.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des",
2201			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2202			.cra_flags = CRYPTO_ALG_ASYNC |
2203				     CRYPTO_ALG_ALLOCATES_MEMORY |
2204				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2205			.cra_blocksize = DES_BLOCK_SIZE,
2206			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2207			.cra_alignmask = 0,
2208			.cra_init = safexcel_aead_sha384_des_cra_init,
2209			.cra_exit = safexcel_aead_cra_exit,
2210			.cra_module = THIS_MODULE,
2211		},
2212	},
2213};
2214
2215static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm *tfm)
2216{
2217	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2218
2219	safexcel_aead_sha1_cra_init(tfm);
2220	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2221	return 0;
2222}
2223
2224struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes = {
2225	.type = SAFEXCEL_ALG_TYPE_AEAD,
2226	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
2227	.alg.aead = {
2228		.setkey = safexcel_aead_setkey,
2229		.encrypt = safexcel_aead_encrypt,
2230		.decrypt = safexcel_aead_decrypt,
2231		.ivsize = CTR_RFC3686_IV_SIZE,
2232		.maxauthsize = SHA1_DIGEST_SIZE,
2233		.base = {
2234			.cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2235			.cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-aes",
2236			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2237			.cra_flags = CRYPTO_ALG_ASYNC |
2238				     CRYPTO_ALG_ALLOCATES_MEMORY |
2239				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2240			.cra_blocksize = 1,
2241			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2242			.cra_alignmask = 0,
2243			.cra_init = safexcel_aead_sha1_ctr_cra_init,
2244			.cra_exit = safexcel_aead_cra_exit,
2245			.cra_module = THIS_MODULE,
2246		},
2247	},
2248};
2249
2250static int safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm *tfm)
2251{
2252	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2253
2254	safexcel_aead_sha256_cra_init(tfm);
2255	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2256	return 0;
2257}
2258
2259struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes = {
2260	.type = SAFEXCEL_ALG_TYPE_AEAD,
2261	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2262	.alg.aead = {
2263		.setkey = safexcel_aead_setkey,
2264		.encrypt = safexcel_aead_encrypt,
2265		.decrypt = safexcel_aead_decrypt,
2266		.ivsize = CTR_RFC3686_IV_SIZE,
2267		.maxauthsize = SHA256_DIGEST_SIZE,
2268		.base = {
2269			.cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2270			.cra_driver_name = "safexcel-authenc-hmac-sha256-ctr-aes",
2271			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2272			.cra_flags = CRYPTO_ALG_ASYNC |
2273				     CRYPTO_ALG_ALLOCATES_MEMORY |
2274				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2275			.cra_blocksize = 1,
2276			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2277			.cra_alignmask = 0,
2278			.cra_init = safexcel_aead_sha256_ctr_cra_init,
2279			.cra_exit = safexcel_aead_cra_exit,
2280			.cra_module = THIS_MODULE,
2281		},
2282	},
2283};
2284
2285static int safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm *tfm)
2286{
2287	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2288
2289	safexcel_aead_sha224_cra_init(tfm);
2290	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2291	return 0;
2292}
2293
2294struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes = {
2295	.type = SAFEXCEL_ALG_TYPE_AEAD,
2296	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2297	.alg.aead = {
2298		.setkey = safexcel_aead_setkey,
2299		.encrypt = safexcel_aead_encrypt,
2300		.decrypt = safexcel_aead_decrypt,
2301		.ivsize = CTR_RFC3686_IV_SIZE,
2302		.maxauthsize = SHA224_DIGEST_SIZE,
2303		.base = {
2304			.cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
2305			.cra_driver_name = "safexcel-authenc-hmac-sha224-ctr-aes",
2306			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2307			.cra_flags = CRYPTO_ALG_ASYNC |
2308				     CRYPTO_ALG_ALLOCATES_MEMORY |
2309				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2310			.cra_blocksize = 1,
2311			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2312			.cra_alignmask = 0,
2313			.cra_init = safexcel_aead_sha224_ctr_cra_init,
2314			.cra_exit = safexcel_aead_cra_exit,
2315			.cra_module = THIS_MODULE,
2316		},
2317	},
2318};
2319
2320static int safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm *tfm)
2321{
2322	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2323
2324	safexcel_aead_sha512_cra_init(tfm);
2325	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2326	return 0;
2327}
2328
2329struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes = {
2330	.type = SAFEXCEL_ALG_TYPE_AEAD,
2331	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2332	.alg.aead = {
2333		.setkey = safexcel_aead_setkey,
2334		.encrypt = safexcel_aead_encrypt,
2335		.decrypt = safexcel_aead_decrypt,
2336		.ivsize = CTR_RFC3686_IV_SIZE,
2337		.maxauthsize = SHA512_DIGEST_SIZE,
2338		.base = {
2339			.cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2340			.cra_driver_name = "safexcel-authenc-hmac-sha512-ctr-aes",
2341			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2342			.cra_flags = CRYPTO_ALG_ASYNC |
2343				     CRYPTO_ALG_ALLOCATES_MEMORY |
2344				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2345			.cra_blocksize = 1,
2346			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2347			.cra_alignmask = 0,
2348			.cra_init = safexcel_aead_sha512_ctr_cra_init,
2349			.cra_exit = safexcel_aead_cra_exit,
2350			.cra_module = THIS_MODULE,
2351		},
2352	},
2353};
2354
2355static int safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm *tfm)
2356{
2357	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2358
2359	safexcel_aead_sha384_cra_init(tfm);
2360	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2361	return 0;
2362}
2363
2364struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes = {
2365	.type = SAFEXCEL_ALG_TYPE_AEAD,
2366	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2367	.alg.aead = {
2368		.setkey = safexcel_aead_setkey,
2369		.encrypt = safexcel_aead_encrypt,
2370		.decrypt = safexcel_aead_decrypt,
2371		.ivsize = CTR_RFC3686_IV_SIZE,
2372		.maxauthsize = SHA384_DIGEST_SIZE,
2373		.base = {
2374			.cra_name = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2375			.cra_driver_name = "safexcel-authenc-hmac-sha384-ctr-aes",
2376			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2377			.cra_flags = CRYPTO_ALG_ASYNC |
2378				     CRYPTO_ALG_ALLOCATES_MEMORY |
2379				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2380			.cra_blocksize = 1,
2381			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2382			.cra_alignmask = 0,
2383			.cra_init = safexcel_aead_sha384_ctr_cra_init,
2384			.cra_exit = safexcel_aead_cra_exit,
2385			.cra_module = THIS_MODULE,
2386		},
2387	},
2388};
2389
2390static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
2391					   const u8 *key, unsigned int len)
2392{
2393	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
2394	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2395	struct safexcel_crypto_priv *priv = ctx->base.priv;
2396	struct crypto_aes_ctx aes;
2397	int ret, i;
2398	unsigned int keylen;
2399
2400	/* Check for illegal XTS keys */
2401	ret = xts_verify_key(ctfm, key, len);
2402	if (ret)
2403		return ret;
2404
2405	/* Only half of the key data is cipher key */
2406	keylen = (len >> 1);
2407	ret = aes_expandkey(&aes, key, keylen);
2408	if (ret)
2409		return ret;
2410
2411	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2412		for (i = 0; i < keylen / sizeof(u32); i++) {
2413			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2414				ctx->base.needs_inv = true;
2415				break;
2416			}
2417		}
2418	}
2419
2420	for (i = 0; i < keylen / sizeof(u32); i++)
2421		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2422
2423	/* The other half is the tweak key */
2424	ret = aes_expandkey(&aes, (u8 *)(key + keylen), keylen);
2425	if (ret)
2426		return ret;
2427
2428	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2429		for (i = 0; i < keylen / sizeof(u32); i++) {
2430			if (le32_to_cpu(ctx->key[i + keylen / sizeof(u32)]) !=
2431			    aes.key_enc[i]) {
2432				ctx->base.needs_inv = true;
2433				break;
2434			}
2435		}
2436	}
2437
2438	for (i = 0; i < keylen / sizeof(u32); i++)
2439		ctx->key[i + keylen / sizeof(u32)] =
2440			cpu_to_le32(aes.key_enc[i]);
2441
2442	ctx->key_len = keylen << 1;
2443
2444	memzero_explicit(&aes, sizeof(aes));
2445	return 0;
2446}
2447
2448static int safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm *tfm)
2449{
2450	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2451
2452	safexcel_skcipher_cra_init(tfm);
2453	ctx->alg  = SAFEXCEL_AES;
2454	ctx->blocksz = AES_BLOCK_SIZE;
2455	ctx->xts  = 1;
2456	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
2457	return 0;
2458}
2459
2460static int safexcel_encrypt_xts(struct skcipher_request *req)
2461{
2462	if (req->cryptlen < XTS_BLOCK_SIZE)
2463		return -EINVAL;
2464	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2465				  SAFEXCEL_ENCRYPT);
2466}
2467
2468static int safexcel_decrypt_xts(struct skcipher_request *req)
2469{
2470	if (req->cryptlen < XTS_BLOCK_SIZE)
2471		return -EINVAL;
2472	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2473				  SAFEXCEL_DECRYPT);
2474}
2475
2476struct safexcel_alg_template safexcel_alg_xts_aes = {
2477	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2478	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XTS,
2479	.alg.skcipher = {
2480		.setkey = safexcel_skcipher_aesxts_setkey,
2481		.encrypt = safexcel_encrypt_xts,
2482		.decrypt = safexcel_decrypt_xts,
2483		/* XTS actually uses 2 AES keys glued together */
2484		.min_keysize = AES_MIN_KEY_SIZE * 2,
2485		.max_keysize = AES_MAX_KEY_SIZE * 2,
2486		.ivsize = XTS_BLOCK_SIZE,
2487		.base = {
2488			.cra_name = "xts(aes)",
2489			.cra_driver_name = "safexcel-xts-aes",
2490			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2491			.cra_flags = CRYPTO_ALG_ASYNC |
2492				     CRYPTO_ALG_ALLOCATES_MEMORY |
2493				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2494			.cra_blocksize = XTS_BLOCK_SIZE,
2495			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2496			.cra_alignmask = 0,
2497			.cra_init = safexcel_skcipher_aes_xts_cra_init,
2498			.cra_exit = safexcel_skcipher_cra_exit,
2499			.cra_module = THIS_MODULE,
2500		},
2501	},
2502};
2503
2504static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
2505				    unsigned int len)
2506{
2507	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2508	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2509	struct safexcel_crypto_priv *priv = ctx->base.priv;
2510	struct crypto_aes_ctx aes;
2511	u32 hashkey[AES_BLOCK_SIZE >> 2];
2512	int ret, i;
2513
2514	ret = aes_expandkey(&aes, key, len);
2515	if (ret) {
2516		memzero_explicit(&aes, sizeof(aes));
2517		return ret;
2518	}
2519
2520	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2521		for (i = 0; i < len / sizeof(u32); i++) {
2522			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2523				ctx->base.needs_inv = true;
2524				break;
2525			}
2526		}
2527	}
2528
2529	for (i = 0; i < len / sizeof(u32); i++)
2530		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2531
2532	ctx->key_len = len;
2533
2534	/* Compute hash key by encrypting zeroes with cipher key */
2535	memset(hashkey, 0, AES_BLOCK_SIZE);
2536	aes_encrypt(&aes, (u8 *)hashkey, (u8 *)hashkey);
2537
2538	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2539		for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
2540			if (be32_to_cpu(ctx->base.ipad.be[i]) != hashkey[i]) {
2541				ctx->base.needs_inv = true;
2542				break;
2543			}
2544		}
2545	}
2546
2547	for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
2548		ctx->base.ipad.be[i] = cpu_to_be32(hashkey[i]);
2549
2550	memzero_explicit(hashkey, AES_BLOCK_SIZE);
2551	memzero_explicit(&aes, sizeof(aes));
2552	return 0;
2553}
2554
2555static int safexcel_aead_gcm_cra_init(struct crypto_tfm *tfm)
2556{
2557	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2558
2559	safexcel_aead_cra_init(tfm);
2560	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_GHASH;
2561	ctx->state_sz = GHASH_BLOCK_SIZE;
2562	ctx->xcm = EIP197_XCM_MODE_GCM;
2563	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2564
2565	return 0;
2566}
2567
2568static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm)
2569{
2570	safexcel_aead_cra_exit(tfm);
2571}
2572
2573static int safexcel_aead_gcm_setauthsize(struct crypto_aead *tfm,
2574					 unsigned int authsize)
2575{
2576	return crypto_gcm_check_authsize(authsize);
2577}
2578
2579struct safexcel_alg_template safexcel_alg_gcm = {
2580	.type = SAFEXCEL_ALG_TYPE_AEAD,
2581	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
2582	.alg.aead = {
2583		.setkey = safexcel_aead_gcm_setkey,
2584		.setauthsize = safexcel_aead_gcm_setauthsize,
2585		.encrypt = safexcel_aead_encrypt,
2586		.decrypt = safexcel_aead_decrypt,
2587		.ivsize = GCM_AES_IV_SIZE,
2588		.maxauthsize = GHASH_DIGEST_SIZE,
2589		.base = {
2590			.cra_name = "gcm(aes)",
2591			.cra_driver_name = "safexcel-gcm-aes",
2592			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2593			.cra_flags = CRYPTO_ALG_ASYNC |
2594				     CRYPTO_ALG_ALLOCATES_MEMORY |
2595				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2596			.cra_blocksize = 1,
2597			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2598			.cra_alignmask = 0,
2599			.cra_init = safexcel_aead_gcm_cra_init,
2600			.cra_exit = safexcel_aead_gcm_cra_exit,
2601			.cra_module = THIS_MODULE,
2602		},
2603	},
2604};
2605
2606static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
2607				    unsigned int len)
2608{
2609	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2610	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2611	struct safexcel_crypto_priv *priv = ctx->base.priv;
2612	struct crypto_aes_ctx aes;
2613	int ret, i;
2614
2615	ret = aes_expandkey(&aes, key, len);
2616	if (ret) {
2617		memzero_explicit(&aes, sizeof(aes));
2618		return ret;
2619	}
2620
2621	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2622		for (i = 0; i < len / sizeof(u32); i++) {
2623			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2624				ctx->base.needs_inv = true;
2625				break;
2626			}
2627		}
2628	}
2629
2630	for (i = 0; i < len / sizeof(u32); i++) {
2631		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2632		ctx->base.ipad.be[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
2633			cpu_to_be32(aes.key_enc[i]);
2634	}
2635
2636	ctx->key_len = len;
2637	ctx->state_sz = 2 * AES_BLOCK_SIZE + len;
2638
2639	if (len == AES_KEYSIZE_192)
2640		ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2641	else if (len == AES_KEYSIZE_256)
2642		ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2643	else
2644		ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2645
2646	memzero_explicit(&aes, sizeof(aes));
2647	return 0;
2648}
2649
2650static int safexcel_aead_ccm_cra_init(struct crypto_tfm *tfm)
2651{
2652	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2653
2654	safexcel_aead_cra_init(tfm);
2655	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2656	ctx->state_sz = 3 * AES_BLOCK_SIZE;
2657	ctx->xcm = EIP197_XCM_MODE_CCM;
2658	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2659	ctx->ctrinit = 0;
2660	return 0;
2661}
2662
2663static int safexcel_aead_ccm_setauthsize(struct crypto_aead *tfm,
2664					 unsigned int authsize)
2665{
2666	/* Borrowed from crypto/ccm.c */
2667	switch (authsize) {
2668	case 4:
2669	case 6:
2670	case 8:
2671	case 10:
2672	case 12:
2673	case 14:
2674	case 16:
2675		break;
2676	default:
2677		return -EINVAL;
2678	}
2679
2680	return 0;
2681}
2682
2683static int safexcel_ccm_encrypt(struct aead_request *req)
2684{
2685	struct safexcel_cipher_req *creq = aead_request_ctx(req);
2686
2687	if (req->iv[0] < 1 || req->iv[0] > 7)
2688		return -EINVAL;
2689
2690	return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
2691}
2692
2693static int safexcel_ccm_decrypt(struct aead_request *req)
2694{
2695	struct safexcel_cipher_req *creq = aead_request_ctx(req);
2696
2697	if (req->iv[0] < 1 || req->iv[0] > 7)
2698		return -EINVAL;
2699
2700	return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
2701}
2702
2703struct safexcel_alg_template safexcel_alg_ccm = {
2704	.type = SAFEXCEL_ALG_TYPE_AEAD,
2705	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
2706	.alg.aead = {
2707		.setkey = safexcel_aead_ccm_setkey,
2708		.setauthsize = safexcel_aead_ccm_setauthsize,
2709		.encrypt = safexcel_ccm_encrypt,
2710		.decrypt = safexcel_ccm_decrypt,
2711		.ivsize = AES_BLOCK_SIZE,
2712		.maxauthsize = AES_BLOCK_SIZE,
2713		.base = {
2714			.cra_name = "ccm(aes)",
2715			.cra_driver_name = "safexcel-ccm-aes",
2716			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2717			.cra_flags = CRYPTO_ALG_ASYNC |
2718				     CRYPTO_ALG_ALLOCATES_MEMORY |
2719				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2720			.cra_blocksize = 1,
2721			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2722			.cra_alignmask = 0,
2723			.cra_init = safexcel_aead_ccm_cra_init,
2724			.cra_exit = safexcel_aead_cra_exit,
2725			.cra_module = THIS_MODULE,
2726		},
2727	},
2728};
2729
2730static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx,
2731				     const u8 *key)
2732{
2733	struct safexcel_crypto_priv *priv = ctx->base.priv;
2734
2735	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
2736		if (memcmp(ctx->key, key, CHACHA_KEY_SIZE))
2737			ctx->base.needs_inv = true;
2738
2739	memcpy(ctx->key, key, CHACHA_KEY_SIZE);
2740	ctx->key_len = CHACHA_KEY_SIZE;
2741}
2742
2743static int safexcel_skcipher_chacha20_setkey(struct crypto_skcipher *ctfm,
2744					     const u8 *key, unsigned int len)
2745{
2746	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
2747
2748	if (len != CHACHA_KEY_SIZE)
2749		return -EINVAL;
2750
2751	safexcel_chacha20_setkey(ctx, key);
2752
2753	return 0;
2754}
2755
2756static int safexcel_skcipher_chacha20_cra_init(struct crypto_tfm *tfm)
2757{
2758	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2759
2760	safexcel_skcipher_cra_init(tfm);
2761	ctx->alg  = SAFEXCEL_CHACHA20;
2762	ctx->ctrinit = 0;
2763	ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32;
2764	return 0;
2765}
2766
2767struct safexcel_alg_template safexcel_alg_chacha20 = {
2768	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2769	.algo_mask = SAFEXCEL_ALG_CHACHA20,
2770	.alg.skcipher = {
2771		.setkey = safexcel_skcipher_chacha20_setkey,
2772		.encrypt = safexcel_encrypt,
2773		.decrypt = safexcel_decrypt,
2774		.min_keysize = CHACHA_KEY_SIZE,
2775		.max_keysize = CHACHA_KEY_SIZE,
2776		.ivsize = CHACHA_IV_SIZE,
2777		.base = {
2778			.cra_name = "chacha20",
2779			.cra_driver_name = "safexcel-chacha20",
2780			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2781			.cra_flags = CRYPTO_ALG_ASYNC |
2782				     CRYPTO_ALG_ALLOCATES_MEMORY |
2783				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2784			.cra_blocksize = 1,
2785			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2786			.cra_alignmask = 0,
2787			.cra_init = safexcel_skcipher_chacha20_cra_init,
2788			.cra_exit = safexcel_skcipher_cra_exit,
2789			.cra_module = THIS_MODULE,
2790		},
2791	},
2792};
2793
2794static int safexcel_aead_chachapoly_setkey(struct crypto_aead *ctfm,
2795				    const u8 *key, unsigned int len)
2796{
2797	struct safexcel_cipher_ctx *ctx = crypto_aead_ctx(ctfm);
2798
2799	if (ctx->aead  == EIP197_AEAD_TYPE_IPSEC_ESP &&
2800	    len > EIP197_AEAD_IPSEC_NONCE_SIZE) {
2801		/* ESP variant has nonce appended to key */
2802		len -= EIP197_AEAD_IPSEC_NONCE_SIZE;
2803		ctx->nonce = *(u32 *)(key + len);
2804	}
2805	if (len != CHACHA_KEY_SIZE)
2806		return -EINVAL;
2807
2808	safexcel_chacha20_setkey(ctx, key);
2809
2810	return 0;
2811}
2812
2813static int safexcel_aead_chachapoly_setauthsize(struct crypto_aead *tfm,
2814					 unsigned int authsize)
2815{
2816	if (authsize != POLY1305_DIGEST_SIZE)
2817		return -EINVAL;
2818	return 0;
2819}
2820
2821static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
2822					  enum safexcel_cipher_direction dir)
2823{
2824	struct safexcel_cipher_req *creq = aead_request_ctx(req);
2825	struct crypto_aead *aead = crypto_aead_reqtfm(req);
2826	struct crypto_tfm *tfm = crypto_aead_tfm(aead);
2827	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2828	struct aead_request *subreq = aead_request_ctx(req);
2829	u32 key[CHACHA_KEY_SIZE / sizeof(u32) + 1];
2830	int ret = 0;
2831
2832	/*
2833	 * Instead of wasting time detecting umpteen silly corner cases,
2834	 * just dump all "small" requests to the fallback implementation.
2835	 * HW would not be faster on such small requests anyway.
2836	 */
2837	if (likely((ctx->aead != EIP197_AEAD_TYPE_IPSEC_ESP ||
2838		    req->assoclen >= EIP197_AEAD_IPSEC_IV_SIZE) &&
2839		   req->cryptlen > POLY1305_DIGEST_SIZE)) {
2840		return safexcel_queue_req(&req->base, creq, dir);
2841	}
2842
2843	/* HW cannot do full (AAD+payload) zero length, use fallback */
2844	memcpy(key, ctx->key, CHACHA_KEY_SIZE);
2845	if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
2846		/* ESP variant has nonce appended to the key */
2847		key[CHACHA_KEY_SIZE / sizeof(u32)] = ctx->nonce;
2848		ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2849					 CHACHA_KEY_SIZE +
2850					 EIP197_AEAD_IPSEC_NONCE_SIZE);
2851	} else {
2852		ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2853					 CHACHA_KEY_SIZE);
2854	}
2855	if (ret) {
2856		crypto_aead_clear_flags(aead, CRYPTO_TFM_REQ_MASK);
2857		crypto_aead_set_flags(aead, crypto_aead_get_flags(ctx->fback) &
2858					    CRYPTO_TFM_REQ_MASK);
2859		return ret;
2860	}
2861
2862	aead_request_set_tfm(subreq, ctx->fback);
2863	aead_request_set_callback(subreq, req->base.flags, req->base.complete,
2864				  req->base.data);
2865	aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
2866			       req->iv);
2867	aead_request_set_ad(subreq, req->assoclen);
2868
2869	return (dir ==  SAFEXCEL_ENCRYPT) ?
2870		crypto_aead_encrypt(subreq) :
2871		crypto_aead_decrypt(subreq);
2872}
2873
2874static int safexcel_aead_chachapoly_encrypt(struct aead_request *req)
2875{
2876	return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_ENCRYPT);
2877}
2878
2879static int safexcel_aead_chachapoly_decrypt(struct aead_request *req)
2880{
2881	return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_DECRYPT);
2882}
2883
2884static int safexcel_aead_fallback_cra_init(struct crypto_tfm *tfm)
2885{
2886	struct crypto_aead *aead = __crypto_aead_cast(tfm);
2887	struct aead_alg *alg = crypto_aead_alg(aead);
2888	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2889
2890	safexcel_aead_cra_init(tfm);
2891
2892	/* Allocate fallback implementation */
2893	ctx->fback = crypto_alloc_aead(alg->base.cra_name, 0,
2894				       CRYPTO_ALG_ASYNC |
2895				       CRYPTO_ALG_NEED_FALLBACK);
2896	if (IS_ERR(ctx->fback))
2897		return PTR_ERR(ctx->fback);
2898
2899	crypto_aead_set_reqsize(aead, max(sizeof(struct safexcel_cipher_req),
2900					  sizeof(struct aead_request) +
2901					  crypto_aead_reqsize(ctx->fback)));
2902
2903	return 0;
2904}
2905
2906static int safexcel_aead_chachapoly_cra_init(struct crypto_tfm *tfm)
2907{
2908	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2909
2910	safexcel_aead_fallback_cra_init(tfm);
2911	ctx->alg  = SAFEXCEL_CHACHA20;
2912	ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32 |
2913		    CONTEXT_CONTROL_CHACHA20_MODE_CALC_OTK;
2914	ctx->ctrinit = 0;
2915	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_POLY1305;
2916	ctx->state_sz = 0; /* Precomputed by HW */
2917	return 0;
2918}
2919
2920static void safexcel_aead_fallback_cra_exit(struct crypto_tfm *tfm)
2921{
2922	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2923
2924	crypto_free_aead(ctx->fback);
2925	safexcel_aead_cra_exit(tfm);
2926}
2927
2928struct safexcel_alg_template safexcel_alg_chachapoly = {
2929	.type = SAFEXCEL_ALG_TYPE_AEAD,
2930	.algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
2931	.alg.aead = {
2932		.setkey = safexcel_aead_chachapoly_setkey,
2933		.setauthsize = safexcel_aead_chachapoly_setauthsize,
2934		.encrypt = safexcel_aead_chachapoly_encrypt,
2935		.decrypt = safexcel_aead_chachapoly_decrypt,
2936		.ivsize = CHACHAPOLY_IV_SIZE,
2937		.maxauthsize = POLY1305_DIGEST_SIZE,
2938		.base = {
2939			.cra_name = "rfc7539(chacha20,poly1305)",
2940			.cra_driver_name = "safexcel-chacha20-poly1305",
2941			/* +1 to put it above HW chacha + SW poly */
2942			.cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
2943			.cra_flags = CRYPTO_ALG_ASYNC |
2944				     CRYPTO_ALG_ALLOCATES_MEMORY |
2945				     CRYPTO_ALG_KERN_DRIVER_ONLY |
2946				     CRYPTO_ALG_NEED_FALLBACK,
2947			.cra_blocksize = 1,
2948			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2949			.cra_alignmask = 0,
2950			.cra_init = safexcel_aead_chachapoly_cra_init,
2951			.cra_exit = safexcel_aead_fallback_cra_exit,
2952			.cra_module = THIS_MODULE,
2953		},
2954	},
2955};
2956
2957static int safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm *tfm)
2958{
2959	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2960	int ret;
2961
2962	ret = safexcel_aead_chachapoly_cra_init(tfm);
2963	ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
2964	ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
2965	return ret;
2966}
2967
2968struct safexcel_alg_template safexcel_alg_chachapoly_esp = {
2969	.type = SAFEXCEL_ALG_TYPE_AEAD,
2970	.algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
2971	.alg.aead = {
2972		.setkey = safexcel_aead_chachapoly_setkey,
2973		.setauthsize = safexcel_aead_chachapoly_setauthsize,
2974		.encrypt = safexcel_aead_chachapoly_encrypt,
2975		.decrypt = safexcel_aead_chachapoly_decrypt,
2976		.ivsize = CHACHAPOLY_IV_SIZE - EIP197_AEAD_IPSEC_NONCE_SIZE,
2977		.maxauthsize = POLY1305_DIGEST_SIZE,
2978		.base = {
2979			.cra_name = "rfc7539esp(chacha20,poly1305)",
2980			.cra_driver_name = "safexcel-chacha20-poly1305-esp",
2981			/* +1 to put it above HW chacha + SW poly */
2982			.cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
2983			.cra_flags = CRYPTO_ALG_ASYNC |
2984				     CRYPTO_ALG_ALLOCATES_MEMORY |
2985				     CRYPTO_ALG_KERN_DRIVER_ONLY |
2986				     CRYPTO_ALG_NEED_FALLBACK,
2987			.cra_blocksize = 1,
2988			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2989			.cra_alignmask = 0,
2990			.cra_init = safexcel_aead_chachapolyesp_cra_init,
2991			.cra_exit = safexcel_aead_fallback_cra_exit,
2992			.cra_module = THIS_MODULE,
2993		},
2994	},
2995};
2996
2997static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher *ctfm,
2998					const u8 *key, unsigned int len)
2999{
3000	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3001	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3002	struct safexcel_crypto_priv *priv = ctx->base.priv;
3003
3004	if (len != SM4_KEY_SIZE)
3005		return -EINVAL;
3006
3007	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
3008		if (memcmp(ctx->key, key, SM4_KEY_SIZE))
3009			ctx->base.needs_inv = true;
3010
3011	memcpy(ctx->key, key, SM4_KEY_SIZE);
3012	ctx->key_len = SM4_KEY_SIZE;
3013
3014	return 0;
3015}
3016
3017static int safexcel_sm4_blk_encrypt(struct skcipher_request *req)
3018{
3019	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3020	if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3021		return -EINVAL;
3022	else
3023		return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3024					  SAFEXCEL_ENCRYPT);
3025}
3026
3027static int safexcel_sm4_blk_decrypt(struct skcipher_request *req)
3028{
3029	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3030	if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3031		return -EINVAL;
3032	else
3033		return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3034					  SAFEXCEL_DECRYPT);
3035}
3036
3037static int safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm *tfm)
3038{
3039	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3040
3041	safexcel_skcipher_cra_init(tfm);
3042	ctx->alg  = SAFEXCEL_SM4;
3043	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
3044	ctx->blocksz = 0;
3045	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
3046	return 0;
3047}
3048
3049struct safexcel_alg_template safexcel_alg_ecb_sm4 = {
3050	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3051	.algo_mask = SAFEXCEL_ALG_SM4,
3052	.alg.skcipher = {
3053		.setkey = safexcel_skcipher_sm4_setkey,
3054		.encrypt = safexcel_sm4_blk_encrypt,
3055		.decrypt = safexcel_sm4_blk_decrypt,
3056		.min_keysize = SM4_KEY_SIZE,
3057		.max_keysize = SM4_KEY_SIZE,
3058		.base = {
3059			.cra_name = "ecb(sm4)",
3060			.cra_driver_name = "safexcel-ecb-sm4",
3061			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3062			.cra_flags = CRYPTO_ALG_ASYNC |
3063				     CRYPTO_ALG_ALLOCATES_MEMORY |
3064				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3065			.cra_blocksize = SM4_BLOCK_SIZE,
3066			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3067			.cra_alignmask = 0,
3068			.cra_init = safexcel_skcipher_sm4_ecb_cra_init,
3069			.cra_exit = safexcel_skcipher_cra_exit,
3070			.cra_module = THIS_MODULE,
3071		},
3072	},
3073};
3074
3075static int safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm *tfm)
3076{
3077	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3078
3079	safexcel_skcipher_cra_init(tfm);
3080	ctx->alg  = SAFEXCEL_SM4;
3081	ctx->blocksz = SM4_BLOCK_SIZE;
3082	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
3083	return 0;
3084}
3085
3086struct safexcel_alg_template safexcel_alg_cbc_sm4 = {
3087	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3088	.algo_mask = SAFEXCEL_ALG_SM4,
3089	.alg.skcipher = {
3090		.setkey = safexcel_skcipher_sm4_setkey,
3091		.encrypt = safexcel_sm4_blk_encrypt,
3092		.decrypt = safexcel_sm4_blk_decrypt,
3093		.min_keysize = SM4_KEY_SIZE,
3094		.max_keysize = SM4_KEY_SIZE,
3095		.ivsize = SM4_BLOCK_SIZE,
3096		.base = {
3097			.cra_name = "cbc(sm4)",
3098			.cra_driver_name = "safexcel-cbc-sm4",
3099			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3100			.cra_flags = CRYPTO_ALG_ASYNC |
3101				     CRYPTO_ALG_ALLOCATES_MEMORY |
3102				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3103			.cra_blocksize = SM4_BLOCK_SIZE,
3104			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3105			.cra_alignmask = 0,
3106			.cra_init = safexcel_skcipher_sm4_cbc_cra_init,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3107			.cra_exit = safexcel_skcipher_cra_exit,
3108			.cra_module = THIS_MODULE,
3109		},
3110	},
3111};
3112
3113static int safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher *ctfm,
3114					   const u8 *key, unsigned int len)
3115{
3116	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3117	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3118
3119	/* last 4 bytes of key are the nonce! */
3120	ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3121	/* exclude the nonce here */
3122	len -= CTR_RFC3686_NONCE_SIZE;
3123
3124	return safexcel_skcipher_sm4_setkey(ctfm, key, len);
3125}
3126
3127static int safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm *tfm)
3128{
3129	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3130
3131	safexcel_skcipher_cra_init(tfm);
3132	ctx->alg  = SAFEXCEL_SM4;
3133	ctx->blocksz = SM4_BLOCK_SIZE;
3134	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3135	return 0;
3136}
3137
3138struct safexcel_alg_template safexcel_alg_ctr_sm4 = {
3139	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3140	.algo_mask = SAFEXCEL_ALG_SM4,
3141	.alg.skcipher = {
3142		.setkey = safexcel_skcipher_sm4ctr_setkey,
3143		.encrypt = safexcel_encrypt,
3144		.decrypt = safexcel_decrypt,
3145		/* Add nonce size */
3146		.min_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3147		.max_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3148		.ivsize = CTR_RFC3686_IV_SIZE,
3149		.base = {
3150			.cra_name = "rfc3686(ctr(sm4))",
3151			.cra_driver_name = "safexcel-ctr-sm4",
3152			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3153			.cra_flags = CRYPTO_ALG_ASYNC |
3154				     CRYPTO_ALG_ALLOCATES_MEMORY |
3155				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3156			.cra_blocksize = 1,
3157			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3158			.cra_alignmask = 0,
3159			.cra_init = safexcel_skcipher_sm4_ctr_cra_init,
3160			.cra_exit = safexcel_skcipher_cra_exit,
3161			.cra_module = THIS_MODULE,
3162		},
3163	},
3164};
3165
3166static int safexcel_aead_sm4_blk_encrypt(struct aead_request *req)
3167{
3168	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3169	if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3170		return -EINVAL;
3171
3172	return safexcel_queue_req(&req->base, aead_request_ctx(req),
3173				  SAFEXCEL_ENCRYPT);
3174}
3175
3176static int safexcel_aead_sm4_blk_decrypt(struct aead_request *req)
3177{
3178	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3179
3180	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3181	if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3182		return -EINVAL;
3183
3184	return safexcel_queue_req(&req->base, aead_request_ctx(req),
3185				  SAFEXCEL_DECRYPT);
3186}
3187
3188static int safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm *tfm)
3189{
3190	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3191
3192	safexcel_aead_cra_init(tfm);
3193	ctx->alg = SAFEXCEL_SM4;
3194	ctx->blocksz = SM4_BLOCK_SIZE;
3195	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
3196	ctx->state_sz = SHA1_DIGEST_SIZE;
3197	return 0;
3198}
3199
3200struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_sm4 = {
3201	.type = SAFEXCEL_ALG_TYPE_AEAD,
3202	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3203	.alg.aead = {
3204		.setkey = safexcel_aead_setkey,
3205		.encrypt = safexcel_aead_sm4_blk_encrypt,
3206		.decrypt = safexcel_aead_sm4_blk_decrypt,
3207		.ivsize = SM4_BLOCK_SIZE,
3208		.maxauthsize = SHA1_DIGEST_SIZE,
3209		.base = {
3210			.cra_name = "authenc(hmac(sha1),cbc(sm4))",
3211			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-sm4",
3212			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3213			.cra_flags = CRYPTO_ALG_ASYNC |
3214				     CRYPTO_ALG_ALLOCATES_MEMORY |
3215				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3216			.cra_blocksize = SM4_BLOCK_SIZE,
3217			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3218			.cra_alignmask = 0,
3219			.cra_init = safexcel_aead_sm4cbc_sha1_cra_init,
3220			.cra_exit = safexcel_aead_cra_exit,
3221			.cra_module = THIS_MODULE,
3222		},
3223	},
3224};
3225
3226static int safexcel_aead_fallback_setkey(struct crypto_aead *ctfm,
3227					 const u8 *key, unsigned int len)
3228{
3229	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3230	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3231
3232	/* Keep fallback cipher synchronized */
3233	return crypto_aead_setkey(ctx->fback, (u8 *)key, len) ?:
3234	       safexcel_aead_setkey(ctfm, key, len);
3235}
3236
3237static int safexcel_aead_fallback_setauthsize(struct crypto_aead *ctfm,
3238					      unsigned int authsize)
3239{
3240	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3241	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3242
3243	/* Keep fallback cipher synchronized */
3244	return crypto_aead_setauthsize(ctx->fback, authsize);
3245}
3246
3247static int safexcel_aead_fallback_crypt(struct aead_request *req,
3248					enum safexcel_cipher_direction dir)
3249{
3250	struct crypto_aead *aead = crypto_aead_reqtfm(req);
3251	struct crypto_tfm *tfm = crypto_aead_tfm(aead);
3252	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3253	struct aead_request *subreq = aead_request_ctx(req);
3254
3255	aead_request_set_tfm(subreq, ctx->fback);
3256	aead_request_set_callback(subreq, req->base.flags, req->base.complete,
3257				  req->base.data);
3258	aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
3259			       req->iv);
3260	aead_request_set_ad(subreq, req->assoclen);
3261
3262	return (dir ==  SAFEXCEL_ENCRYPT) ?
3263		crypto_aead_encrypt(subreq) :
3264		crypto_aead_decrypt(subreq);
3265}
3266
3267static int safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request *req)
3268{
3269	struct safexcel_cipher_req *creq = aead_request_ctx(req);
3270
3271	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3272	if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3273		return -EINVAL;
3274	else if (req->cryptlen || req->assoclen) /* If input length > 0 only */
3275		return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3276
3277	/* HW cannot do full (AAD+payload) zero length, use fallback */
3278	return safexcel_aead_fallback_crypt(req, SAFEXCEL_ENCRYPT);
3279}
3280
3281static int safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request *req)
3282{
3283	struct safexcel_cipher_req *creq = aead_request_ctx(req);
3284	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3285
3286	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3287	if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3288		return -EINVAL;
3289	else if (req->cryptlen > crypto_aead_authsize(tfm) || req->assoclen)
3290		/* If input length > 0 only */
3291		return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3292
3293	/* HW cannot do full (AAD+payload) zero length, use fallback */
3294	return safexcel_aead_fallback_crypt(req, SAFEXCEL_DECRYPT);
3295}
3296
3297static int safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm *tfm)
3298{
3299	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3300
3301	safexcel_aead_fallback_cra_init(tfm);
3302	ctx->alg = SAFEXCEL_SM4;
3303	ctx->blocksz = SM4_BLOCK_SIZE;
3304	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
3305	ctx->state_sz = SM3_DIGEST_SIZE;
3306	return 0;
3307}
3308
3309struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4 = {
3310	.type = SAFEXCEL_ALG_TYPE_AEAD,
3311	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3312	.alg.aead = {
3313		.setkey = safexcel_aead_fallback_setkey,
3314		.setauthsize = safexcel_aead_fallback_setauthsize,
3315		.encrypt = safexcel_aead_sm4cbc_sm3_encrypt,
3316		.decrypt = safexcel_aead_sm4cbc_sm3_decrypt,
3317		.ivsize = SM4_BLOCK_SIZE,
3318		.maxauthsize = SM3_DIGEST_SIZE,
3319		.base = {
3320			.cra_name = "authenc(hmac(sm3),cbc(sm4))",
3321			.cra_driver_name = "safexcel-authenc-hmac-sm3-cbc-sm4",
3322			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3323			.cra_flags = CRYPTO_ALG_ASYNC |
3324				     CRYPTO_ALG_ALLOCATES_MEMORY |
3325				     CRYPTO_ALG_KERN_DRIVER_ONLY |
3326				     CRYPTO_ALG_NEED_FALLBACK,
3327			.cra_blocksize = SM4_BLOCK_SIZE,
3328			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3329			.cra_alignmask = 0,
3330			.cra_init = safexcel_aead_sm4cbc_sm3_cra_init,
3331			.cra_exit = safexcel_aead_fallback_cra_exit,
3332			.cra_module = THIS_MODULE,
3333		},
3334	},
3335};
3336
3337static int safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm *tfm)
3338{
3339	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3340
3341	safexcel_aead_sm4cbc_sha1_cra_init(tfm);
3342	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3343	return 0;
3344}
3345
3346struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4 = {
3347	.type = SAFEXCEL_ALG_TYPE_AEAD,
3348	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3349	.alg.aead = {
3350		.setkey = safexcel_aead_setkey,
3351		.encrypt = safexcel_aead_encrypt,
3352		.decrypt = safexcel_aead_decrypt,
3353		.ivsize = CTR_RFC3686_IV_SIZE,
3354		.maxauthsize = SHA1_DIGEST_SIZE,
3355		.base = {
3356			.cra_name = "authenc(hmac(sha1),rfc3686(ctr(sm4)))",
3357			.cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-sm4",
3358			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3359			.cra_flags = CRYPTO_ALG_ASYNC |
3360				     CRYPTO_ALG_ALLOCATES_MEMORY |
3361				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3362			.cra_blocksize = 1,
3363			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3364			.cra_alignmask = 0,
3365			.cra_init = safexcel_aead_sm4ctr_sha1_cra_init,
3366			.cra_exit = safexcel_aead_cra_exit,
3367			.cra_module = THIS_MODULE,
3368		},
3369	},
3370};
3371
3372static int safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm *tfm)
3373{
3374	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3375
3376	safexcel_aead_sm4cbc_sm3_cra_init(tfm);
3377	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3378	return 0;
3379}
3380
3381struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4 = {
3382	.type = SAFEXCEL_ALG_TYPE_AEAD,
3383	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3384	.alg.aead = {
3385		.setkey = safexcel_aead_setkey,
3386		.encrypt = safexcel_aead_encrypt,
3387		.decrypt = safexcel_aead_decrypt,
3388		.ivsize = CTR_RFC3686_IV_SIZE,
3389		.maxauthsize = SM3_DIGEST_SIZE,
3390		.base = {
3391			.cra_name = "authenc(hmac(sm3),rfc3686(ctr(sm4)))",
3392			.cra_driver_name = "safexcel-authenc-hmac-sm3-ctr-sm4",
3393			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3394			.cra_flags = CRYPTO_ALG_ASYNC |
3395				     CRYPTO_ALG_ALLOCATES_MEMORY |
3396				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3397			.cra_blocksize = 1,
3398			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3399			.cra_alignmask = 0,
3400			.cra_init = safexcel_aead_sm4ctr_sm3_cra_init,
3401			.cra_exit = safexcel_aead_cra_exit,
3402			.cra_module = THIS_MODULE,
3403		},
3404	},
3405};
3406
3407static int safexcel_rfc4106_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
3408				       unsigned int len)
3409{
3410	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3411	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3412
3413	/* last 4 bytes of key are the nonce! */
3414	ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3415
3416	len -= CTR_RFC3686_NONCE_SIZE;
3417	return safexcel_aead_gcm_setkey(ctfm, key, len);
3418}
3419
3420static int safexcel_rfc4106_gcm_setauthsize(struct crypto_aead *tfm,
3421					    unsigned int authsize)
3422{
3423	return crypto_rfc4106_check_authsize(authsize);
3424}
3425
3426static int safexcel_rfc4106_encrypt(struct aead_request *req)
3427{
3428	return crypto_ipsec_check_assoclen(req->assoclen) ?:
3429	       safexcel_aead_encrypt(req);
3430}
3431
3432static int safexcel_rfc4106_decrypt(struct aead_request *req)
3433{
3434	return crypto_ipsec_check_assoclen(req->assoclen) ?:
3435	       safexcel_aead_decrypt(req);
3436}
3437
3438static int safexcel_rfc4106_gcm_cra_init(struct crypto_tfm *tfm)
3439{
3440	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3441	int ret;
3442
3443	ret = safexcel_aead_gcm_cra_init(tfm);
3444	ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
3445	ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3446	return ret;
3447}
3448
3449struct safexcel_alg_template safexcel_alg_rfc4106_gcm = {
3450	.type = SAFEXCEL_ALG_TYPE_AEAD,
3451	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3452	.alg.aead = {
3453		.setkey = safexcel_rfc4106_gcm_setkey,
3454		.setauthsize = safexcel_rfc4106_gcm_setauthsize,
3455		.encrypt = safexcel_rfc4106_encrypt,
3456		.decrypt = safexcel_rfc4106_decrypt,
3457		.ivsize = GCM_RFC4106_IV_SIZE,
3458		.maxauthsize = GHASH_DIGEST_SIZE,
3459		.base = {
3460			.cra_name = "rfc4106(gcm(aes))",
3461			.cra_driver_name = "safexcel-rfc4106-gcm-aes",
3462			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3463			.cra_flags = CRYPTO_ALG_ASYNC |
3464				     CRYPTO_ALG_ALLOCATES_MEMORY |
3465				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3466			.cra_blocksize = 1,
3467			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3468			.cra_alignmask = 0,
3469			.cra_init = safexcel_rfc4106_gcm_cra_init,
3470			.cra_exit = safexcel_aead_gcm_cra_exit,
3471		},
3472	},
3473};
3474
3475static int safexcel_rfc4543_gcm_setauthsize(struct crypto_aead *tfm,
3476					    unsigned int authsize)
3477{
3478	if (authsize != GHASH_DIGEST_SIZE)
3479		return -EINVAL;
3480
3481	return 0;
3482}
3483
3484static int safexcel_rfc4543_gcm_cra_init(struct crypto_tfm *tfm)
3485{
3486	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3487	int ret;
3488
3489	ret = safexcel_aead_gcm_cra_init(tfm);
3490	ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP_GMAC;
3491	return ret;
3492}
3493
3494struct safexcel_alg_template safexcel_alg_rfc4543_gcm = {
3495	.type = SAFEXCEL_ALG_TYPE_AEAD,
3496	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3497	.alg.aead = {
3498		.setkey = safexcel_rfc4106_gcm_setkey,
3499		.setauthsize = safexcel_rfc4543_gcm_setauthsize,
3500		.encrypt = safexcel_rfc4106_encrypt,
3501		.decrypt = safexcel_rfc4106_decrypt,
3502		.ivsize = GCM_RFC4543_IV_SIZE,
3503		.maxauthsize = GHASH_DIGEST_SIZE,
3504		.base = {
3505			.cra_name = "rfc4543(gcm(aes))",
3506			.cra_driver_name = "safexcel-rfc4543-gcm-aes",
3507			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3508			.cra_flags = CRYPTO_ALG_ASYNC |
3509				     CRYPTO_ALG_ALLOCATES_MEMORY |
3510				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3511			.cra_blocksize = 1,
3512			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3513			.cra_alignmask = 0,
3514			.cra_init = safexcel_rfc4543_gcm_cra_init,
3515			.cra_exit = safexcel_aead_gcm_cra_exit,
3516		},
3517	},
3518};
3519
3520static int safexcel_rfc4309_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
3521				       unsigned int len)
3522{
3523	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3524	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3525
3526	/* First byte of the nonce = L = always 3 for RFC4309 (4 byte ctr) */
3527	*(u8 *)&ctx->nonce = EIP197_AEAD_IPSEC_COUNTER_SIZE - 1;
3528	/* last 3 bytes of key are the nonce! */
3529	memcpy((u8 *)&ctx->nonce + 1, key + len -
3530	       EIP197_AEAD_IPSEC_CCM_NONCE_SIZE,
3531	       EIP197_AEAD_IPSEC_CCM_NONCE_SIZE);
3532
3533	len -= EIP197_AEAD_IPSEC_CCM_NONCE_SIZE;
3534	return safexcel_aead_ccm_setkey(ctfm, key, len);
3535}
3536
3537static int safexcel_rfc4309_ccm_setauthsize(struct crypto_aead *tfm,
3538					    unsigned int authsize)
3539{
3540	/* Borrowed from crypto/ccm.c */
3541	switch (authsize) {
3542	case 8:
3543	case 12:
3544	case 16:
3545		break;
3546	default:
3547		return -EINVAL;
3548	}
3549
3550	return 0;
3551}
3552
3553static int safexcel_rfc4309_ccm_encrypt(struct aead_request *req)
3554{
3555	struct safexcel_cipher_req *creq = aead_request_ctx(req);
3556
3557	/* Borrowed from crypto/ccm.c */
3558	if (req->assoclen != 16 && req->assoclen != 20)
3559		return -EINVAL;
3560
3561	return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3562}
3563
3564static int safexcel_rfc4309_ccm_decrypt(struct aead_request *req)
3565{
3566	struct safexcel_cipher_req *creq = aead_request_ctx(req);
3567
3568	/* Borrowed from crypto/ccm.c */
3569	if (req->assoclen != 16 && req->assoclen != 20)
3570		return -EINVAL;
3571
3572	return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3573}
3574
3575static int safexcel_rfc4309_ccm_cra_init(struct crypto_tfm *tfm)
3576{
3577	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3578	int ret;
3579
3580	ret = safexcel_aead_ccm_cra_init(tfm);
3581	ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
3582	ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3583	return ret;
3584}
3585
3586struct safexcel_alg_template safexcel_alg_rfc4309_ccm = {
3587	.type = SAFEXCEL_ALG_TYPE_AEAD,
3588	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
3589	.alg.aead = {
3590		.setkey = safexcel_rfc4309_ccm_setkey,
3591		.setauthsize = safexcel_rfc4309_ccm_setauthsize,
3592		.encrypt = safexcel_rfc4309_ccm_encrypt,
3593		.decrypt = safexcel_rfc4309_ccm_decrypt,
3594		.ivsize = EIP197_AEAD_IPSEC_IV_SIZE,
3595		.maxauthsize = AES_BLOCK_SIZE,
3596		.base = {
3597			.cra_name = "rfc4309(ccm(aes))",
3598			.cra_driver_name = "safexcel-rfc4309-ccm-aes",
3599			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3600			.cra_flags = CRYPTO_ALG_ASYNC |
3601				     CRYPTO_ALG_ALLOCATES_MEMORY |
3602				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3603			.cra_blocksize = 1,
3604			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3605			.cra_alignmask = 0,
3606			.cra_init = safexcel_rfc4309_ccm_cra_init,
3607			.cra_exit = safexcel_aead_cra_exit,
3608			.cra_module = THIS_MODULE,
3609		},
3610	},
3611};
v6.2
   1// SPDX-License-Identifier: GPL-2.0
   2/*
   3 * Copyright (C) 2017 Marvell
   4 *
   5 * Antoine Tenart <antoine.tenart@free-electrons.com>
   6 */
   7
   8#include <asm/unaligned.h>
   9#include <linux/device.h>
  10#include <linux/dma-mapping.h>
  11#include <linux/dmapool.h>
  12#include <crypto/aead.h>
  13#include <crypto/aes.h>
  14#include <crypto/authenc.h>
  15#include <crypto/chacha.h>
  16#include <crypto/ctr.h>
  17#include <crypto/internal/des.h>
  18#include <crypto/gcm.h>
  19#include <crypto/ghash.h>
  20#include <crypto/poly1305.h>
  21#include <crypto/sha1.h>
  22#include <crypto/sha2.h>
  23#include <crypto/sm3.h>
  24#include <crypto/sm4.h>
  25#include <crypto/xts.h>
  26#include <crypto/skcipher.h>
  27#include <crypto/internal/aead.h>
  28#include <crypto/internal/skcipher.h>
  29
  30#include "safexcel.h"
  31
  32enum safexcel_cipher_direction {
  33	SAFEXCEL_ENCRYPT,
  34	SAFEXCEL_DECRYPT,
  35};
  36
  37enum safexcel_cipher_alg {
  38	SAFEXCEL_DES,
  39	SAFEXCEL_3DES,
  40	SAFEXCEL_AES,
  41	SAFEXCEL_CHACHA20,
  42	SAFEXCEL_SM4,
  43};
  44
  45struct safexcel_cipher_ctx {
  46	struct safexcel_context base;
  47	struct safexcel_crypto_priv *priv;
  48
  49	u32 mode;
  50	enum safexcel_cipher_alg alg;
  51	u8 aead; /* !=0=AEAD, 2=IPSec ESP AEAD, 3=IPsec ESP GMAC */
  52	u8 xcm;  /* 0=authenc, 1=GCM, 2 reserved for CCM */
  53	u8 aadskip;
  54	u8 blocksz;
  55	u32 ivmask;
  56	u32 ctrinit;
  57
  58	__le32 key[16];
  59	u32 nonce;
  60	unsigned int key_len, xts;
  61
  62	/* All the below is AEAD specific */
  63	u32 hash_alg;
  64	u32 state_sz;
  65
  66	struct crypto_aead *fback;
  67};
  68
  69struct safexcel_cipher_req {
  70	enum safexcel_cipher_direction direction;
  71	/* Number of result descriptors associated to the request */
  72	unsigned int rdescs;
  73	bool needs_inv;
  74	int  nr_src, nr_dst;
  75};
  76
  77static int safexcel_skcipher_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
  78				struct safexcel_command_desc *cdesc)
  79{
  80	if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
  81		cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
  82		/* 32 bit nonce */
  83		cdesc->control_data.token[0] = ctx->nonce;
  84		/* 64 bit IV part */
  85		memcpy(&cdesc->control_data.token[1], iv, 8);
  86		/* 32 bit counter, start at 0 or 1 (big endian!) */
  87		cdesc->control_data.token[3] =
  88			(__force u32)cpu_to_be32(ctx->ctrinit);
  89		return 4;
  90	}
  91	if (ctx->alg == SAFEXCEL_CHACHA20) {
  92		cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
  93		/* 96 bit nonce part */
  94		memcpy(&cdesc->control_data.token[0], &iv[4], 12);
  95		/* 32 bit counter */
  96		cdesc->control_data.token[3] = *(u32 *)iv;
  97		return 4;
  98	}
  99
 100	cdesc->control_data.options |= ctx->ivmask;
 101	memcpy(cdesc->control_data.token, iv, ctx->blocksz);
 102	return ctx->blocksz / sizeof(u32);
 103}
 104
 105static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
 106				    struct safexcel_command_desc *cdesc,
 107				    struct safexcel_token *atoken,
 108				    u32 length)
 109{
 110	struct safexcel_token *token;
 111	int ivlen;
 112
 113	ivlen = safexcel_skcipher_iv(ctx, iv, cdesc);
 114	if (ivlen == 4) {
 115		/* No space in cdesc, instruction moves to atoken */
 116		cdesc->additional_cdata_size = 1;
 117		token = atoken;
 118	} else {
 119		/* Everything fits in cdesc */
 120		token = (struct safexcel_token *)(cdesc->control_data.token + 2);
 121		/* Need to pad with NOP */
 122		eip197_noop_token(&token[1]);
 123	}
 124
 125	token->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
 126	token->packet_length = length;
 127	token->stat = EIP197_TOKEN_STAT_LAST_PACKET |
 128		      EIP197_TOKEN_STAT_LAST_HASH;
 129	token->instructions = EIP197_TOKEN_INS_LAST |
 130			      EIP197_TOKEN_INS_TYPE_CRYPTO |
 131			      EIP197_TOKEN_INS_TYPE_OUTPUT;
 132}
 133
 134static void safexcel_aead_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
 135			     struct safexcel_command_desc *cdesc)
 136{
 137	if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD ||
 138	    ctx->aead & EIP197_AEAD_TYPE_IPSEC_ESP) { /* _ESP and _ESP_GMAC */
 139		/* 32 bit nonce */
 140		cdesc->control_data.token[0] = ctx->nonce;
 141		/* 64 bit IV part */
 142		memcpy(&cdesc->control_data.token[1], iv, 8);
 143		/* 32 bit counter, start at 0 or 1 (big endian!) */
 144		cdesc->control_data.token[3] =
 145			(__force u32)cpu_to_be32(ctx->ctrinit);
 146		return;
 147	}
 148	if (ctx->xcm == EIP197_XCM_MODE_GCM || ctx->alg == SAFEXCEL_CHACHA20) {
 149		/* 96 bit IV part */
 150		memcpy(&cdesc->control_data.token[0], iv, 12);
 151		/* 32 bit counter, start at 0 or 1 (big endian!) */
 152		cdesc->control_data.token[3] =
 153			(__force u32)cpu_to_be32(ctx->ctrinit);
 154		return;
 155	}
 156	/* CBC */
 157	memcpy(cdesc->control_data.token, iv, ctx->blocksz);
 158}
 159
 160static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
 161				struct safexcel_command_desc *cdesc,
 162				struct safexcel_token *atoken,
 163				enum safexcel_cipher_direction direction,
 164				u32 cryptlen, u32 assoclen, u32 digestsize)
 165{
 166	struct safexcel_token *aadref;
 167	int atoksize = 2; /* Start with minimum size */
 168	int assocadj = assoclen - ctx->aadskip, aadalign;
 169
 170	/* Always 4 dwords of embedded IV  for AEAD modes */
 171	cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
 172
 173	if (direction == SAFEXCEL_DECRYPT)
 174		cryptlen -= digestsize;
 175
 176	if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM)) {
 177		/* Construct IV block B0 for the CBC-MAC */
 178		u8 *final_iv = (u8 *)cdesc->control_data.token;
 179		u8 *cbcmaciv = (u8 *)&atoken[1];
 180		__le32 *aadlen = (__le32 *)&atoken[5];
 181
 182		if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
 183			/* Length + nonce */
 184			cdesc->control_data.token[0] = ctx->nonce;
 185			/* Fixup flags byte */
 186			*(__le32 *)cbcmaciv =
 187				cpu_to_le32(ctx->nonce |
 188					    ((assocadj > 0) << 6) |
 189					    ((digestsize - 2) << 2));
 190			/* 64 bit IV part */
 191			memcpy(&cdesc->control_data.token[1], iv, 8);
 192			memcpy(cbcmaciv + 4, iv, 8);
 193			/* Start counter at 0 */
 194			cdesc->control_data.token[3] = 0;
 195			/* Message length */
 196			*(__be32 *)(cbcmaciv + 12) = cpu_to_be32(cryptlen);
 197		} else {
 198			/* Variable length IV part */
 199			memcpy(final_iv, iv, 15 - iv[0]);
 200			memcpy(cbcmaciv, iv, 15 - iv[0]);
 201			/* Start variable length counter at 0 */
 202			memset(final_iv + 15 - iv[0], 0, iv[0] + 1);
 203			memset(cbcmaciv + 15 - iv[0], 0, iv[0] - 1);
 204			/* fixup flags byte */
 205			cbcmaciv[0] |= ((assocadj > 0) << 6) |
 206				       ((digestsize - 2) << 2);
 207			/* insert lower 2 bytes of message length */
 208			cbcmaciv[14] = cryptlen >> 8;
 209			cbcmaciv[15] = cryptlen & 255;
 210		}
 211
 212		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
 213		atoken->packet_length = AES_BLOCK_SIZE +
 214					((assocadj > 0) << 1);
 215		atoken->stat = 0;
 216		atoken->instructions = EIP197_TOKEN_INS_ORIGIN_TOKEN |
 217				       EIP197_TOKEN_INS_TYPE_HASH;
 218
 219		if (likely(assocadj)) {
 220			*aadlen = cpu_to_le32((assocadj >> 8) |
 221					      (assocadj & 255) << 8);
 222			atoken += 6;
 223			atoksize += 7;
 224		} else {
 225			atoken += 5;
 226			atoksize += 6;
 227		}
 228
 229		/* Process AAD data */
 230		aadref = atoken;
 231		atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
 232		atoken->packet_length = assocadj;
 233		atoken->stat = 0;
 234		atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
 235		atoken++;
 236
 237		/* For CCM only, align AAD data towards hash engine */
 238		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
 239		aadalign = (assocadj + 2) & 15;
 240		atoken->packet_length = assocadj && aadalign ?
 241						16 - aadalign :
 242						0;
 243		if (likely(cryptlen)) {
 244			atoken->stat = 0;
 245			atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
 246		} else {
 247			atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
 248			atoken->instructions = EIP197_TOKEN_INS_LAST |
 249					       EIP197_TOKEN_INS_TYPE_HASH;
 250		}
 251	} else {
 252		safexcel_aead_iv(ctx, iv, cdesc);
 253
 254		/* Process AAD data */
 255		aadref = atoken;
 256		atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
 257		atoken->packet_length = assocadj;
 258		atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
 259		atoken->instructions = EIP197_TOKEN_INS_LAST |
 260				       EIP197_TOKEN_INS_TYPE_HASH;
 261	}
 262	atoken++;
 263
 264	if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
 265		/* For ESP mode (and not GMAC), skip over the IV */
 266		atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
 267		atoken->packet_length = EIP197_AEAD_IPSEC_IV_SIZE;
 268		atoken->stat = 0;
 269		atoken->instructions = 0;
 270		atoken++;
 271		atoksize++;
 272	} else if (unlikely(ctx->alg == SAFEXCEL_CHACHA20 &&
 273			    direction == SAFEXCEL_DECRYPT)) {
 274		/* Poly-chacha decryption needs a dummy NOP here ... */
 275		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
 276		atoken->packet_length = 16; /* According to Op Manual */
 277		atoken->stat = 0;
 278		atoken->instructions = 0;
 279		atoken++;
 280		atoksize++;
 281	}
 282
 283	if  (ctx->xcm) {
 284		/* For GCM and CCM, obtain enc(Y0) */
 285		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT_REMRES;
 286		atoken->packet_length = 0;
 287		atoken->stat = 0;
 288		atoken->instructions = AES_BLOCK_SIZE;
 289		atoken++;
 290
 291		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
 292		atoken->packet_length = AES_BLOCK_SIZE;
 293		atoken->stat = 0;
 294		atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
 295				       EIP197_TOKEN_INS_TYPE_CRYPTO;
 296		atoken++;
 297		atoksize += 2;
 298	}
 299
 300	if (likely(cryptlen || ctx->alg == SAFEXCEL_CHACHA20)) {
 301		/* Fixup stat field for AAD direction instruction */
 302		aadref->stat = 0;
 303
 304		/* Process crypto data */
 305		atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
 306		atoken->packet_length = cryptlen;
 307
 308		if (unlikely(ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC)) {
 309			/* Fixup instruction field for AAD dir instruction */
 310			aadref->instructions = EIP197_TOKEN_INS_TYPE_HASH;
 311
 312			/* Do not send to crypt engine in case of GMAC */
 313			atoken->instructions = EIP197_TOKEN_INS_LAST |
 314					       EIP197_TOKEN_INS_TYPE_HASH |
 315					       EIP197_TOKEN_INS_TYPE_OUTPUT;
 316		} else {
 317			atoken->instructions = EIP197_TOKEN_INS_LAST |
 318					       EIP197_TOKEN_INS_TYPE_CRYPTO |
 319					       EIP197_TOKEN_INS_TYPE_HASH |
 320					       EIP197_TOKEN_INS_TYPE_OUTPUT;
 321		}
 322
 323		cryptlen &= 15;
 324		if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM && cryptlen)) {
 325			atoken->stat = 0;
 326			/* For CCM only, pad crypto data to the hash engine */
 327			atoken++;
 328			atoksize++;
 329			atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
 330			atoken->packet_length = 16 - cryptlen;
 331			atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
 332			atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
 333		} else {
 334			atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
 335		}
 336		atoken++;
 337		atoksize++;
 338	}
 339
 340	if (direction == SAFEXCEL_ENCRYPT) {
 341		/* Append ICV */
 342		atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
 343		atoken->packet_length = digestsize;
 344		atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
 345			       EIP197_TOKEN_STAT_LAST_PACKET;
 346		atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
 347				       EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
 348	} else {
 349		/* Extract ICV */
 350		atoken->opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
 351		atoken->packet_length = digestsize;
 352		atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
 353			       EIP197_TOKEN_STAT_LAST_PACKET;
 354		atoken->instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
 355		atoken++;
 356		atoksize++;
 357
 358		/* Verify ICV */
 359		atoken->opcode = EIP197_TOKEN_OPCODE_VERIFY;
 360		atoken->packet_length = digestsize |
 361					EIP197_TOKEN_HASH_RESULT_VERIFY;
 362		atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
 363			       EIP197_TOKEN_STAT_LAST_PACKET;
 364		atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
 365	}
 366
 367	/* Fixup length of the token in the command descriptor */
 368	cdesc->additional_cdata_size = atoksize;
 369}
 370
 371static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
 372					const u8 *key, unsigned int len)
 373{
 374	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
 375	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
 376	struct safexcel_crypto_priv *priv = ctx->base.priv;
 377	struct crypto_aes_ctx aes;
 378	int ret, i;
 379
 380	ret = aes_expandkey(&aes, key, len);
 381	if (ret)
 382		return ret;
 383
 384	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
 385		for (i = 0; i < len / sizeof(u32); i++) {
 386			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
 387				ctx->base.needs_inv = true;
 388				break;
 389			}
 390		}
 391	}
 392
 393	for (i = 0; i < len / sizeof(u32); i++)
 394		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
 395
 396	ctx->key_len = len;
 397
 398	memzero_explicit(&aes, sizeof(aes));
 399	return 0;
 400}
 401
 402static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
 403				unsigned int len)
 404{
 405	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
 406	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
 407	struct safexcel_crypto_priv *priv = ctx->base.priv;
 408	struct crypto_authenc_keys keys;
 409	struct crypto_aes_ctx aes;
 410	int err = -EINVAL, i;
 411	const char *alg;
 412
 413	if (unlikely(crypto_authenc_extractkeys(&keys, key, len)))
 414		goto badkey;
 415
 416	if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
 417		/* Must have at least space for the nonce here */
 418		if (unlikely(keys.enckeylen < CTR_RFC3686_NONCE_SIZE))
 419			goto badkey;
 420		/* last 4 bytes of key are the nonce! */
 421		ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen -
 422				      CTR_RFC3686_NONCE_SIZE);
 423		/* exclude the nonce here */
 424		keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
 425	}
 426
 427	/* Encryption key */
 428	switch (ctx->alg) {
 429	case SAFEXCEL_DES:
 430		err = verify_aead_des_key(ctfm, keys.enckey, keys.enckeylen);
 431		if (unlikely(err))
 432			goto badkey;
 433		break;
 434	case SAFEXCEL_3DES:
 435		err = verify_aead_des3_key(ctfm, keys.enckey, keys.enckeylen);
 436		if (unlikely(err))
 437			goto badkey;
 438		break;
 439	case SAFEXCEL_AES:
 440		err = aes_expandkey(&aes, keys.enckey, keys.enckeylen);
 441		if (unlikely(err))
 442			goto badkey;
 443		break;
 444	case SAFEXCEL_SM4:
 445		if (unlikely(keys.enckeylen != SM4_KEY_SIZE))
 446			goto badkey;
 447		break;
 448	default:
 449		dev_err(priv->dev, "aead: unsupported cipher algorithm\n");
 450		goto badkey;
 451	}
 452
 453	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
 454		for (i = 0; i < keys.enckeylen / sizeof(u32); i++) {
 455			if (le32_to_cpu(ctx->key[i]) !=
 456			    ((u32 *)keys.enckey)[i]) {
 457				ctx->base.needs_inv = true;
 458				break;
 459			}
 460		}
 461	}
 462
 463	/* Auth key */
 464	switch (ctx->hash_alg) {
 465	case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
 466		alg = "safexcel-sha1";
 467		break;
 468	case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
 469		alg = "safexcel-sha224";
 470		break;
 471	case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
 472		alg = "safexcel-sha256";
 473		break;
 474	case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
 475		alg = "safexcel-sha384";
 476		break;
 477	case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
 478		alg = "safexcel-sha512";
 479		break;
 480	case CONTEXT_CONTROL_CRYPTO_ALG_SM3:
 481		alg = "safexcel-sm3";
 482		break;
 483	default:
 484		dev_err(priv->dev, "aead: unsupported hash algorithm\n");
 485		goto badkey;
 486	}
 487
 488	if (safexcel_hmac_setkey(&ctx->base, keys.authkey, keys.authkeylen,
 489				 alg, ctx->state_sz))
 490		goto badkey;
 491
 492	/* Now copy the keys into the context */
 493	for (i = 0; i < keys.enckeylen / sizeof(u32); i++)
 494		ctx->key[i] = cpu_to_le32(((u32 *)keys.enckey)[i]);
 495	ctx->key_len = keys.enckeylen;
 496
 497	memzero_explicit(&keys, sizeof(keys));
 498	return 0;
 499
 500badkey:
 501	memzero_explicit(&keys, sizeof(keys));
 502	return err;
 503}
 504
 505static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
 506				    struct crypto_async_request *async,
 507				    struct safexcel_cipher_req *sreq,
 508				    struct safexcel_command_desc *cdesc)
 509{
 510	struct safexcel_crypto_priv *priv = ctx->base.priv;
 511	int ctrl_size = ctx->key_len / sizeof(u32);
 512
 513	cdesc->control_data.control1 = ctx->mode;
 514
 515	if (ctx->aead) {
 516		/* Take in account the ipad+opad digests */
 517		if (ctx->xcm) {
 518			ctrl_size += ctx->state_sz / sizeof(u32);
 519			cdesc->control_data.control0 =
 520				CONTEXT_CONTROL_KEY_EN |
 521				CONTEXT_CONTROL_DIGEST_XCM |
 522				ctx->hash_alg |
 523				CONTEXT_CONTROL_SIZE(ctrl_size);
 524		} else if (ctx->alg == SAFEXCEL_CHACHA20) {
 525			/* Chacha20-Poly1305 */
 526			cdesc->control_data.control0 =
 527				CONTEXT_CONTROL_KEY_EN |
 528				CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20 |
 529				(sreq->direction == SAFEXCEL_ENCRYPT ?
 530					CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT :
 531					CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN) |
 532				ctx->hash_alg |
 533				CONTEXT_CONTROL_SIZE(ctrl_size);
 534			return 0;
 535		} else {
 536			ctrl_size += ctx->state_sz / sizeof(u32) * 2;
 537			cdesc->control_data.control0 =
 538				CONTEXT_CONTROL_KEY_EN |
 539				CONTEXT_CONTROL_DIGEST_HMAC |
 540				ctx->hash_alg |
 541				CONTEXT_CONTROL_SIZE(ctrl_size);
 542		}
 543
 544		if (sreq->direction == SAFEXCEL_ENCRYPT &&
 545		    (ctx->xcm == EIP197_XCM_MODE_CCM ||
 546		     ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC))
 547			cdesc->control_data.control0 |=
 548				CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT;
 549		else if (sreq->direction == SAFEXCEL_ENCRYPT)
 550			cdesc->control_data.control0 |=
 551				CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
 552		else if (ctx->xcm == EIP197_XCM_MODE_CCM)
 553			cdesc->control_data.control0 |=
 554				CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN;
 555		else
 556			cdesc->control_data.control0 |=
 557				CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
 558	} else {
 559		if (sreq->direction == SAFEXCEL_ENCRYPT)
 560			cdesc->control_data.control0 =
 561				CONTEXT_CONTROL_TYPE_CRYPTO_OUT |
 562				CONTEXT_CONTROL_KEY_EN |
 563				CONTEXT_CONTROL_SIZE(ctrl_size);
 564		else
 565			cdesc->control_data.control0 =
 566				CONTEXT_CONTROL_TYPE_CRYPTO_IN |
 567				CONTEXT_CONTROL_KEY_EN |
 568				CONTEXT_CONTROL_SIZE(ctrl_size);
 569	}
 570
 571	if (ctx->alg == SAFEXCEL_DES) {
 572		cdesc->control_data.control0 |=
 573			CONTEXT_CONTROL_CRYPTO_ALG_DES;
 574	} else if (ctx->alg == SAFEXCEL_3DES) {
 575		cdesc->control_data.control0 |=
 576			CONTEXT_CONTROL_CRYPTO_ALG_3DES;
 577	} else if (ctx->alg == SAFEXCEL_AES) {
 578		switch (ctx->key_len >> ctx->xts) {
 579		case AES_KEYSIZE_128:
 580			cdesc->control_data.control0 |=
 581				CONTEXT_CONTROL_CRYPTO_ALG_AES128;
 582			break;
 583		case AES_KEYSIZE_192:
 584			cdesc->control_data.control0 |=
 585				CONTEXT_CONTROL_CRYPTO_ALG_AES192;
 586			break;
 587		case AES_KEYSIZE_256:
 588			cdesc->control_data.control0 |=
 589				CONTEXT_CONTROL_CRYPTO_ALG_AES256;
 590			break;
 591		default:
 592			dev_err(priv->dev, "aes keysize not supported: %u\n",
 593				ctx->key_len >> ctx->xts);
 594			return -EINVAL;
 595		}
 596	} else if (ctx->alg == SAFEXCEL_CHACHA20) {
 597		cdesc->control_data.control0 |=
 598			CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20;
 599	} else if (ctx->alg == SAFEXCEL_SM4) {
 600		cdesc->control_data.control0 |=
 601			CONTEXT_CONTROL_CRYPTO_ALG_SM4;
 602	}
 603
 604	return 0;
 605}
 606
 607static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
 608				      struct crypto_async_request *async,
 609				      struct scatterlist *src,
 610				      struct scatterlist *dst,
 611				      unsigned int cryptlen,
 612				      struct safexcel_cipher_req *sreq,
 613				      bool *should_complete, int *ret)
 614{
 615	struct skcipher_request *areq = skcipher_request_cast(async);
 616	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
 617	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(skcipher);
 618	struct safexcel_result_desc *rdesc;
 619	int ndesc = 0;
 620
 621	*ret = 0;
 622
 623	if (unlikely(!sreq->rdescs))
 624		return 0;
 625
 626	while (sreq->rdescs--) {
 627		rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
 628		if (IS_ERR(rdesc)) {
 629			dev_err(priv->dev,
 630				"cipher: result: could not retrieve the result descriptor\n");
 631			*ret = PTR_ERR(rdesc);
 632			break;
 633		}
 634
 635		if (likely(!*ret))
 636			*ret = safexcel_rdesc_check_errors(priv, rdesc);
 637
 638		ndesc++;
 639	}
 640
 641	safexcel_complete(priv, ring);
 642
 643	if (src == dst) {
 644		if (sreq->nr_src > 0)
 645			dma_unmap_sg(priv->dev, src, sreq->nr_src,
 646				     DMA_BIDIRECTIONAL);
 647	} else {
 648		if (sreq->nr_src > 0)
 649			dma_unmap_sg(priv->dev, src, sreq->nr_src,
 650				     DMA_TO_DEVICE);
 651		if (sreq->nr_dst > 0)
 652			dma_unmap_sg(priv->dev, dst, sreq->nr_dst,
 653				     DMA_FROM_DEVICE);
 654	}
 655
 656	/*
 657	 * Update IV in req from last crypto output word for CBC modes
 658	 */
 659	if ((!ctx->aead) && (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
 660	    (sreq->direction == SAFEXCEL_ENCRYPT)) {
 661		/* For encrypt take the last output word */
 662		sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv,
 663				   crypto_skcipher_ivsize(skcipher),
 664				   (cryptlen -
 665				    crypto_skcipher_ivsize(skcipher)));
 666	}
 667
 668	*should_complete = true;
 669
 670	return ndesc;
 671}
 672
 673static int safexcel_send_req(struct crypto_async_request *base, int ring,
 674			     struct safexcel_cipher_req *sreq,
 675			     struct scatterlist *src, struct scatterlist *dst,
 676			     unsigned int cryptlen, unsigned int assoclen,
 677			     unsigned int digestsize, u8 *iv, int *commands,
 678			     int *results)
 679{
 680	struct skcipher_request *areq = skcipher_request_cast(base);
 681	struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
 682	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
 683	struct safexcel_crypto_priv *priv = ctx->base.priv;
 684	struct safexcel_command_desc *cdesc;
 685	struct safexcel_command_desc *first_cdesc = NULL;
 686	struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
 687	struct scatterlist *sg;
 688	unsigned int totlen;
 689	unsigned int totlen_src = cryptlen + assoclen;
 690	unsigned int totlen_dst = totlen_src;
 691	struct safexcel_token *atoken;
 692	int n_cdesc = 0, n_rdesc = 0;
 693	int queued, i, ret = 0;
 694	bool first = true;
 695
 696	sreq->nr_src = sg_nents_for_len(src, totlen_src);
 697
 698	if (ctx->aead) {
 699		/*
 700		 * AEAD has auth tag appended to output for encrypt and
 701		 * removed from the output for decrypt!
 702		 */
 703		if (sreq->direction == SAFEXCEL_DECRYPT)
 704			totlen_dst -= digestsize;
 705		else
 706			totlen_dst += digestsize;
 707
 708		memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
 709		       &ctx->base.ipad, ctx->state_sz);
 710		if (!ctx->xcm)
 711			memcpy(ctx->base.ctxr->data + (ctx->key_len +
 712			       ctx->state_sz) / sizeof(u32), &ctx->base.opad,
 713			       ctx->state_sz);
 714	} else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
 715		   (sreq->direction == SAFEXCEL_DECRYPT)) {
 716		/*
 717		 * Save IV from last crypto input word for CBC modes in decrypt
 718		 * direction. Need to do this first in case of inplace operation
 719		 * as it will be overwritten.
 720		 */
 721		sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv,
 722				   crypto_skcipher_ivsize(skcipher),
 723				   (totlen_src -
 724				    crypto_skcipher_ivsize(skcipher)));
 725	}
 726
 727	sreq->nr_dst = sg_nents_for_len(dst, totlen_dst);
 728
 729	/*
 730	 * Remember actual input length, source buffer length may be
 731	 * updated in case of inline operation below.
 732	 */
 733	totlen = totlen_src;
 734	queued = totlen_src;
 735
 736	if (src == dst) {
 737		sreq->nr_src = max(sreq->nr_src, sreq->nr_dst);
 738		sreq->nr_dst = sreq->nr_src;
 739		if (unlikely((totlen_src || totlen_dst) &&
 740		    (sreq->nr_src <= 0))) {
 741			dev_err(priv->dev, "In-place buffer not large enough (need %d bytes)!",
 742				max(totlen_src, totlen_dst));
 743			return -EINVAL;
 744		}
 745		if (sreq->nr_src > 0)
 746			dma_map_sg(priv->dev, src, sreq->nr_src,
 747				   DMA_BIDIRECTIONAL);
 748	} else {
 749		if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
 750			dev_err(priv->dev, "Source buffer not large enough (need %d bytes)!",
 751				totlen_src);
 752			return -EINVAL;
 753		}
 754
 755		if (sreq->nr_src > 0)
 756			dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
 
 757
 758		if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
 759			dev_err(priv->dev, "Dest buffer not large enough (need %d bytes)!",
 760				totlen_dst);
 761			ret = -EINVAL;
 762			goto unmap;
 763		}
 764
 765		if (sreq->nr_dst > 0)
 766			dma_map_sg(priv->dev, dst, sreq->nr_dst,
 767				   DMA_FROM_DEVICE);
 
 
 768	}
 769
 770	memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
 771
 772	if (!totlen) {
 773		/*
 774		 * The EIP97 cannot deal with zero length input packets!
 775		 * So stuff a dummy command descriptor indicating a 1 byte
 776		 * (dummy) input packet, using the context record as source.
 777		 */
 778		first_cdesc = safexcel_add_cdesc(priv, ring,
 779						 1, 1, ctx->base.ctxr_dma,
 780						 1, 1, ctx->base.ctxr_dma,
 781						 &atoken);
 782		if (IS_ERR(first_cdesc)) {
 783			/* No space left in the command descriptor ring */
 784			ret = PTR_ERR(first_cdesc);
 785			goto cdesc_rollback;
 786		}
 787		n_cdesc = 1;
 788		goto skip_cdesc;
 789	}
 790
 791	/* command descriptors */
 792	for_each_sg(src, sg, sreq->nr_src, i) {
 793		int len = sg_dma_len(sg);
 794
 795		/* Do not overflow the request */
 796		if (queued < len)
 797			len = queued;
 798
 799		cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
 800					   !(queued - len),
 801					   sg_dma_address(sg), len, totlen,
 802					   ctx->base.ctxr_dma, &atoken);
 803		if (IS_ERR(cdesc)) {
 804			/* No space left in the command descriptor ring */
 805			ret = PTR_ERR(cdesc);
 806			goto cdesc_rollback;
 807		}
 808
 809		if (!n_cdesc)
 810			first_cdesc = cdesc;
 811
 812		n_cdesc++;
 813		queued -= len;
 814		if (!queued)
 815			break;
 816	}
 817skip_cdesc:
 818	/* Add context control words and token to first command descriptor */
 819	safexcel_context_control(ctx, base, sreq, first_cdesc);
 820	if (ctx->aead)
 821		safexcel_aead_token(ctx, iv, first_cdesc, atoken,
 822				    sreq->direction, cryptlen,
 823				    assoclen, digestsize);
 824	else
 825		safexcel_skcipher_token(ctx, iv, first_cdesc, atoken,
 826					cryptlen);
 827
 828	/* result descriptors */
 829	for_each_sg(dst, sg, sreq->nr_dst, i) {
 830		bool last = (i == sreq->nr_dst - 1);
 831		u32 len = sg_dma_len(sg);
 832
 833		/* only allow the part of the buffer we know we need */
 834		if (len > totlen_dst)
 835			len = totlen_dst;
 836		if (unlikely(!len))
 837			break;
 838		totlen_dst -= len;
 839
 840		/* skip over AAD space in buffer - not written */
 841		if (assoclen) {
 842			if (assoclen >= len) {
 843				assoclen -= len;
 844				continue;
 845			}
 846			rdesc = safexcel_add_rdesc(priv, ring, first, last,
 847						   sg_dma_address(sg) +
 848						   assoclen,
 849						   len - assoclen);
 850			assoclen = 0;
 851		} else {
 852			rdesc = safexcel_add_rdesc(priv, ring, first, last,
 853						   sg_dma_address(sg),
 854						   len);
 855		}
 856		if (IS_ERR(rdesc)) {
 857			/* No space left in the result descriptor ring */
 858			ret = PTR_ERR(rdesc);
 859			goto rdesc_rollback;
 860		}
 861		if (first) {
 862			first_rdesc = rdesc;
 863			first = false;
 864		}
 865		n_rdesc++;
 866	}
 867
 868	if (unlikely(first)) {
 869		/*
 870		 * Special case: AEAD decrypt with only AAD data.
 871		 * In this case there is NO output data from the engine,
 872		 * but the engine still needs a result descriptor!
 873		 * Create a dummy one just for catching the result token.
 874		 */
 875		rdesc = safexcel_add_rdesc(priv, ring, true, true, 0, 0);
 876		if (IS_ERR(rdesc)) {
 877			/* No space left in the result descriptor ring */
 878			ret = PTR_ERR(rdesc);
 879			goto rdesc_rollback;
 880		}
 881		first_rdesc = rdesc;
 882		n_rdesc = 1;
 883	}
 884
 885	safexcel_rdr_req_set(priv, ring, first_rdesc, base);
 886
 887	*commands = n_cdesc;
 888	*results = n_rdesc;
 889	return 0;
 890
 891rdesc_rollback:
 892	for (i = 0; i < n_rdesc; i++)
 893		safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
 894cdesc_rollback:
 895	for (i = 0; i < n_cdesc; i++)
 896		safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
 897unmap:
 898	if (src == dst) {
 899		if (sreq->nr_src > 0)
 900			dma_unmap_sg(priv->dev, src, sreq->nr_src,
 901				     DMA_BIDIRECTIONAL);
 902	} else {
 903		if (sreq->nr_src > 0)
 904			dma_unmap_sg(priv->dev, src, sreq->nr_src,
 905				     DMA_TO_DEVICE);
 906		if (sreq->nr_dst > 0)
 907			dma_unmap_sg(priv->dev, dst, sreq->nr_dst,
 908				     DMA_FROM_DEVICE);
 909	}
 910
 911	return ret;
 912}
 913
 914static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
 915				      int ring,
 916				      struct crypto_async_request *base,
 917				      struct safexcel_cipher_req *sreq,
 918				      bool *should_complete, int *ret)
 919{
 920	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
 921	struct safexcel_result_desc *rdesc;
 922	int ndesc = 0, enq_ret;
 923
 924	*ret = 0;
 925
 926	if (unlikely(!sreq->rdescs))
 927		return 0;
 928
 929	while (sreq->rdescs--) {
 930		rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
 931		if (IS_ERR(rdesc)) {
 932			dev_err(priv->dev,
 933				"cipher: invalidate: could not retrieve the result descriptor\n");
 934			*ret = PTR_ERR(rdesc);
 935			break;
 936		}
 937
 938		if (likely(!*ret))
 939			*ret = safexcel_rdesc_check_errors(priv, rdesc);
 940
 941		ndesc++;
 942	}
 943
 944	safexcel_complete(priv, ring);
 945
 946	if (ctx->base.exit_inv) {
 947		dma_pool_free(priv->context_pool, ctx->base.ctxr,
 948			      ctx->base.ctxr_dma);
 949
 950		*should_complete = true;
 951
 952		return ndesc;
 953	}
 954
 955	ring = safexcel_select_ring(priv);
 956	ctx->base.ring = ring;
 957
 958	spin_lock_bh(&priv->ring[ring].queue_lock);
 959	enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
 960	spin_unlock_bh(&priv->ring[ring].queue_lock);
 961
 962	if (enq_ret != -EINPROGRESS)
 963		*ret = enq_ret;
 964
 965	queue_work(priv->ring[ring].workqueue,
 966		   &priv->ring[ring].work_data.work);
 967
 968	*should_complete = false;
 969
 970	return ndesc;
 971}
 972
 973static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
 974					   int ring,
 975					   struct crypto_async_request *async,
 976					   bool *should_complete, int *ret)
 977{
 978	struct skcipher_request *req = skcipher_request_cast(async);
 979	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
 980	int err;
 981
 982	if (sreq->needs_inv) {
 983		sreq->needs_inv = false;
 984		err = safexcel_handle_inv_result(priv, ring, async, sreq,
 985						 should_complete, ret);
 986	} else {
 987		err = safexcel_handle_req_result(priv, ring, async, req->src,
 988						 req->dst, req->cryptlen, sreq,
 989						 should_complete, ret);
 990	}
 991
 992	return err;
 993}
 994
 995static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
 996				       int ring,
 997				       struct crypto_async_request *async,
 998				       bool *should_complete, int *ret)
 999{
1000	struct aead_request *req = aead_request_cast(async);
1001	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1002	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1003	int err;
1004
1005	if (sreq->needs_inv) {
1006		sreq->needs_inv = false;
1007		err = safexcel_handle_inv_result(priv, ring, async, sreq,
1008						 should_complete, ret);
1009	} else {
1010		err = safexcel_handle_req_result(priv, ring, async, req->src,
1011						 req->dst,
1012						 req->cryptlen + crypto_aead_authsize(tfm),
1013						 sreq, should_complete, ret);
1014	}
1015
1016	return err;
1017}
1018
1019static int safexcel_cipher_send_inv(struct crypto_async_request *base,
1020				    int ring, int *commands, int *results)
1021{
1022	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1023	struct safexcel_crypto_priv *priv = ctx->base.priv;
1024	int ret;
1025
1026	ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
1027	if (unlikely(ret))
1028		return ret;
1029
1030	*commands = 1;
1031	*results = 1;
1032
1033	return 0;
1034}
1035
1036static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
1037				  int *commands, int *results)
1038{
1039	struct skcipher_request *req = skcipher_request_cast(async);
1040	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1041	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1042	struct safexcel_crypto_priv *priv = ctx->base.priv;
1043	int ret;
1044
1045	BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1046
1047	if (sreq->needs_inv) {
1048		ret = safexcel_cipher_send_inv(async, ring, commands, results);
1049	} else {
1050		struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1051		u8 input_iv[AES_BLOCK_SIZE];
1052
1053		/*
1054		 * Save input IV in case of CBC decrypt mode
1055		 * Will be overwritten with output IV prior to use!
1056		 */
1057		memcpy(input_iv, req->iv, crypto_skcipher_ivsize(skcipher));
1058
1059		ret = safexcel_send_req(async, ring, sreq, req->src,
1060					req->dst, req->cryptlen, 0, 0, input_iv,
1061					commands, results);
1062	}
1063
1064	sreq->rdescs = *results;
1065	return ret;
1066}
1067
1068static int safexcel_aead_send(struct crypto_async_request *async, int ring,
1069			      int *commands, int *results)
1070{
1071	struct aead_request *req = aead_request_cast(async);
1072	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1073	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1074	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1075	struct safexcel_crypto_priv *priv = ctx->base.priv;
1076	int ret;
1077
1078	BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1079
1080	if (sreq->needs_inv)
1081		ret = safexcel_cipher_send_inv(async, ring, commands, results);
1082	else
1083		ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
1084					req->cryptlen, req->assoclen,
1085					crypto_aead_authsize(tfm), req->iv,
1086					commands, results);
1087	sreq->rdescs = *results;
1088	return ret;
1089}
1090
1091static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
1092				    struct crypto_async_request *base,
1093				    struct safexcel_cipher_req *sreq,
1094				    struct safexcel_inv_result *result)
1095{
1096	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1097	struct safexcel_crypto_priv *priv = ctx->base.priv;
1098	int ring = ctx->base.ring;
1099
1100	init_completion(&result->completion);
1101
1102	ctx = crypto_tfm_ctx(base->tfm);
1103	ctx->base.exit_inv = true;
1104	sreq->needs_inv = true;
1105
1106	spin_lock_bh(&priv->ring[ring].queue_lock);
1107	crypto_enqueue_request(&priv->ring[ring].queue, base);
1108	spin_unlock_bh(&priv->ring[ring].queue_lock);
1109
1110	queue_work(priv->ring[ring].workqueue,
1111		   &priv->ring[ring].work_data.work);
1112
1113	wait_for_completion(&result->completion);
1114
1115	if (result->error) {
1116		dev_warn(priv->dev,
1117			"cipher: sync: invalidate: completion error %d\n",
1118			 result->error);
1119		return result->error;
1120	}
1121
1122	return 0;
1123}
1124
1125static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
1126{
1127	EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
1128	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1129	struct safexcel_inv_result result = {};
1130
1131	memset(req, 0, sizeof(struct skcipher_request));
1132
1133	skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1134				      safexcel_inv_complete, &result);
1135	skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
1136
1137	return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1138}
1139
1140static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
1141{
1142	EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
1143	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1144	struct safexcel_inv_result result = {};
1145
1146	memset(req, 0, sizeof(struct aead_request));
1147
1148	aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1149				  safexcel_inv_complete, &result);
1150	aead_request_set_tfm(req, __crypto_aead_cast(tfm));
1151
1152	return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1153}
1154
1155static int safexcel_queue_req(struct crypto_async_request *base,
1156			struct safexcel_cipher_req *sreq,
1157			enum safexcel_cipher_direction dir)
1158{
1159	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1160	struct safexcel_crypto_priv *priv = ctx->base.priv;
1161	int ret, ring;
1162
1163	sreq->needs_inv = false;
1164	sreq->direction = dir;
1165
1166	if (ctx->base.ctxr) {
1167		if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
1168			sreq->needs_inv = true;
1169			ctx->base.needs_inv = false;
1170		}
1171	} else {
1172		ctx->base.ring = safexcel_select_ring(priv);
1173		ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
1174						 EIP197_GFP_FLAGS(*base),
1175						 &ctx->base.ctxr_dma);
1176		if (!ctx->base.ctxr)
1177			return -ENOMEM;
1178	}
1179
1180	ring = ctx->base.ring;
1181
1182	spin_lock_bh(&priv->ring[ring].queue_lock);
1183	ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
1184	spin_unlock_bh(&priv->ring[ring].queue_lock);
1185
1186	queue_work(priv->ring[ring].workqueue,
1187		   &priv->ring[ring].work_data.work);
1188
1189	return ret;
1190}
1191
1192static int safexcel_encrypt(struct skcipher_request *req)
1193{
1194	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1195			SAFEXCEL_ENCRYPT);
1196}
1197
1198static int safexcel_decrypt(struct skcipher_request *req)
1199{
1200	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1201			SAFEXCEL_DECRYPT);
1202}
1203
1204static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
1205{
1206	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1207	struct safexcel_alg_template *tmpl =
1208		container_of(tfm->__crt_alg, struct safexcel_alg_template,
1209			     alg.skcipher.base);
1210
1211	crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
1212				    sizeof(struct safexcel_cipher_req));
1213
1214	ctx->base.priv = tmpl->priv;
1215
1216	ctx->base.send = safexcel_skcipher_send;
1217	ctx->base.handle_result = safexcel_skcipher_handle_result;
1218	ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1219	ctx->ctrinit = 1;
1220	return 0;
1221}
1222
1223static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
1224{
1225	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1226
1227	memzero_explicit(ctx->key, sizeof(ctx->key));
1228
1229	/* context not allocated, skip invalidation */
1230	if (!ctx->base.ctxr)
1231		return -ENOMEM;
1232
1233	memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
1234	return 0;
1235}
1236
1237static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
1238{
1239	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1240	struct safexcel_crypto_priv *priv = ctx->base.priv;
1241	int ret;
1242
1243	if (safexcel_cipher_cra_exit(tfm))
1244		return;
1245
1246	if (priv->flags & EIP197_TRC_CACHE) {
1247		ret = safexcel_skcipher_exit_inv(tfm);
1248		if (ret)
1249			dev_warn(priv->dev, "skcipher: invalidation error %d\n",
1250				 ret);
1251	} else {
1252		dma_pool_free(priv->context_pool, ctx->base.ctxr,
1253			      ctx->base.ctxr_dma);
1254	}
1255}
1256
1257static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
1258{
1259	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1260	struct safexcel_crypto_priv *priv = ctx->base.priv;
1261	int ret;
1262
1263	if (safexcel_cipher_cra_exit(tfm))
1264		return;
1265
1266	if (priv->flags & EIP197_TRC_CACHE) {
1267		ret = safexcel_aead_exit_inv(tfm);
1268		if (ret)
1269			dev_warn(priv->dev, "aead: invalidation error %d\n",
1270				 ret);
1271	} else {
1272		dma_pool_free(priv->context_pool, ctx->base.ctxr,
1273			      ctx->base.ctxr_dma);
1274	}
1275}
1276
1277static int safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm *tfm)
1278{
1279	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1280
1281	safexcel_skcipher_cra_init(tfm);
1282	ctx->alg  = SAFEXCEL_AES;
1283	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1284	ctx->blocksz = 0;
1285	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1286	return 0;
1287}
1288
1289struct safexcel_alg_template safexcel_alg_ecb_aes = {
1290	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1291	.algo_mask = SAFEXCEL_ALG_AES,
1292	.alg.skcipher = {
1293		.setkey = safexcel_skcipher_aes_setkey,
1294		.encrypt = safexcel_encrypt,
1295		.decrypt = safexcel_decrypt,
1296		.min_keysize = AES_MIN_KEY_SIZE,
1297		.max_keysize = AES_MAX_KEY_SIZE,
1298		.base = {
1299			.cra_name = "ecb(aes)",
1300			.cra_driver_name = "safexcel-ecb-aes",
1301			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1302			.cra_flags = CRYPTO_ALG_ASYNC |
1303				     CRYPTO_ALG_ALLOCATES_MEMORY |
1304				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1305			.cra_blocksize = AES_BLOCK_SIZE,
1306			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1307			.cra_alignmask = 0,
1308			.cra_init = safexcel_skcipher_aes_ecb_cra_init,
1309			.cra_exit = safexcel_skcipher_cra_exit,
1310			.cra_module = THIS_MODULE,
1311		},
1312	},
1313};
1314
1315static int safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm *tfm)
1316{
1317	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1318
1319	safexcel_skcipher_cra_init(tfm);
1320	ctx->alg  = SAFEXCEL_AES;
1321	ctx->blocksz = AES_BLOCK_SIZE;
1322	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1323	return 0;
1324}
1325
1326struct safexcel_alg_template safexcel_alg_cbc_aes = {
1327	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1328	.algo_mask = SAFEXCEL_ALG_AES,
1329	.alg.skcipher = {
1330		.setkey = safexcel_skcipher_aes_setkey,
1331		.encrypt = safexcel_encrypt,
1332		.decrypt = safexcel_decrypt,
1333		.min_keysize = AES_MIN_KEY_SIZE,
1334		.max_keysize = AES_MAX_KEY_SIZE,
1335		.ivsize = AES_BLOCK_SIZE,
1336		.base = {
1337			.cra_name = "cbc(aes)",
1338			.cra_driver_name = "safexcel-cbc-aes",
1339			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1340			.cra_flags = CRYPTO_ALG_ASYNC |
1341				     CRYPTO_ALG_ALLOCATES_MEMORY |
1342				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1343			.cra_blocksize = AES_BLOCK_SIZE,
1344			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1345			.cra_alignmask = 0,
1346			.cra_init = safexcel_skcipher_aes_cbc_cra_init,
1347			.cra_exit = safexcel_skcipher_cra_exit,
1348			.cra_module = THIS_MODULE,
1349		},
1350	},
1351};
1352
1353static int safexcel_skcipher_aes_cfb_cra_init(struct crypto_tfm *tfm)
1354{
1355	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1356
1357	safexcel_skcipher_cra_init(tfm);
1358	ctx->alg  = SAFEXCEL_AES;
1359	ctx->blocksz = AES_BLOCK_SIZE;
1360	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
1361	return 0;
1362}
1363
1364struct safexcel_alg_template safexcel_alg_cfb_aes = {
1365	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1366	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1367	.alg.skcipher = {
1368		.setkey = safexcel_skcipher_aes_setkey,
1369		.encrypt = safexcel_encrypt,
1370		.decrypt = safexcel_decrypt,
1371		.min_keysize = AES_MIN_KEY_SIZE,
1372		.max_keysize = AES_MAX_KEY_SIZE,
1373		.ivsize = AES_BLOCK_SIZE,
1374		.base = {
1375			.cra_name = "cfb(aes)",
1376			.cra_driver_name = "safexcel-cfb-aes",
1377			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1378			.cra_flags = CRYPTO_ALG_ASYNC |
1379				     CRYPTO_ALG_ALLOCATES_MEMORY |
1380				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1381			.cra_blocksize = 1,
1382			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1383			.cra_alignmask = 0,
1384			.cra_init = safexcel_skcipher_aes_cfb_cra_init,
1385			.cra_exit = safexcel_skcipher_cra_exit,
1386			.cra_module = THIS_MODULE,
1387		},
1388	},
1389};
1390
1391static int safexcel_skcipher_aes_ofb_cra_init(struct crypto_tfm *tfm)
1392{
1393	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1394
1395	safexcel_skcipher_cra_init(tfm);
1396	ctx->alg  = SAFEXCEL_AES;
1397	ctx->blocksz = AES_BLOCK_SIZE;
1398	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
1399	return 0;
1400}
1401
1402struct safexcel_alg_template safexcel_alg_ofb_aes = {
1403	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1404	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1405	.alg.skcipher = {
1406		.setkey = safexcel_skcipher_aes_setkey,
1407		.encrypt = safexcel_encrypt,
1408		.decrypt = safexcel_decrypt,
1409		.min_keysize = AES_MIN_KEY_SIZE,
1410		.max_keysize = AES_MAX_KEY_SIZE,
1411		.ivsize = AES_BLOCK_SIZE,
1412		.base = {
1413			.cra_name = "ofb(aes)",
1414			.cra_driver_name = "safexcel-ofb-aes",
1415			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1416			.cra_flags = CRYPTO_ALG_ASYNC |
1417				     CRYPTO_ALG_ALLOCATES_MEMORY |
1418				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1419			.cra_blocksize = 1,
1420			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1421			.cra_alignmask = 0,
1422			.cra_init = safexcel_skcipher_aes_ofb_cra_init,
1423			.cra_exit = safexcel_skcipher_cra_exit,
1424			.cra_module = THIS_MODULE,
1425		},
1426	},
1427};
1428
1429static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
1430					   const u8 *key, unsigned int len)
1431{
1432	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
1433	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1434	struct safexcel_crypto_priv *priv = ctx->base.priv;
1435	struct crypto_aes_ctx aes;
1436	int ret, i;
1437	unsigned int keylen;
1438
1439	/* last 4 bytes of key are the nonce! */
1440	ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
1441	/* exclude the nonce here */
1442	keylen = len - CTR_RFC3686_NONCE_SIZE;
1443	ret = aes_expandkey(&aes, key, keylen);
1444	if (ret)
1445		return ret;
1446
1447	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1448		for (i = 0; i < keylen / sizeof(u32); i++) {
1449			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
1450				ctx->base.needs_inv = true;
1451				break;
1452			}
1453		}
1454	}
1455
1456	for (i = 0; i < keylen / sizeof(u32); i++)
1457		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
1458
1459	ctx->key_len = keylen;
1460
1461	memzero_explicit(&aes, sizeof(aes));
1462	return 0;
1463}
1464
1465static int safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm *tfm)
1466{
1467	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1468
1469	safexcel_skcipher_cra_init(tfm);
1470	ctx->alg  = SAFEXCEL_AES;
1471	ctx->blocksz = AES_BLOCK_SIZE;
1472	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
1473	return 0;
1474}
1475
1476struct safexcel_alg_template safexcel_alg_ctr_aes = {
1477	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1478	.algo_mask = SAFEXCEL_ALG_AES,
1479	.alg.skcipher = {
1480		.setkey = safexcel_skcipher_aesctr_setkey,
1481		.encrypt = safexcel_encrypt,
1482		.decrypt = safexcel_decrypt,
1483		/* Add nonce size */
1484		.min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1485		.max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1486		.ivsize = CTR_RFC3686_IV_SIZE,
1487		.base = {
1488			.cra_name = "rfc3686(ctr(aes))",
1489			.cra_driver_name = "safexcel-ctr-aes",
1490			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1491			.cra_flags = CRYPTO_ALG_ASYNC |
1492				     CRYPTO_ALG_ALLOCATES_MEMORY |
1493				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1494			.cra_blocksize = 1,
1495			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1496			.cra_alignmask = 0,
1497			.cra_init = safexcel_skcipher_aes_ctr_cra_init,
1498			.cra_exit = safexcel_skcipher_cra_exit,
1499			.cra_module = THIS_MODULE,
1500		},
1501	},
1502};
1503
1504static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
1505			       unsigned int len)
1506{
1507	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1508	struct safexcel_crypto_priv *priv = ctx->base.priv;
1509	int ret;
1510
1511	ret = verify_skcipher_des_key(ctfm, key);
1512	if (ret)
1513		return ret;
1514
1515	/* if context exits and key changed, need to invalidate it */
1516	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1517		if (memcmp(ctx->key, key, len))
1518			ctx->base.needs_inv = true;
1519
1520	memcpy(ctx->key, key, len);
1521	ctx->key_len = len;
1522
1523	return 0;
1524}
1525
1526static int safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm *tfm)
1527{
1528	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1529
1530	safexcel_skcipher_cra_init(tfm);
1531	ctx->alg  = SAFEXCEL_DES;
1532	ctx->blocksz = DES_BLOCK_SIZE;
1533	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1534	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1535	return 0;
1536}
1537
1538struct safexcel_alg_template safexcel_alg_cbc_des = {
1539	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1540	.algo_mask = SAFEXCEL_ALG_DES,
1541	.alg.skcipher = {
1542		.setkey = safexcel_des_setkey,
1543		.encrypt = safexcel_encrypt,
1544		.decrypt = safexcel_decrypt,
1545		.min_keysize = DES_KEY_SIZE,
1546		.max_keysize = DES_KEY_SIZE,
1547		.ivsize = DES_BLOCK_SIZE,
1548		.base = {
1549			.cra_name = "cbc(des)",
1550			.cra_driver_name = "safexcel-cbc-des",
1551			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1552			.cra_flags = CRYPTO_ALG_ASYNC |
1553				     CRYPTO_ALG_ALLOCATES_MEMORY |
1554				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1555			.cra_blocksize = DES_BLOCK_SIZE,
1556			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1557			.cra_alignmask = 0,
1558			.cra_init = safexcel_skcipher_des_cbc_cra_init,
1559			.cra_exit = safexcel_skcipher_cra_exit,
1560			.cra_module = THIS_MODULE,
1561		},
1562	},
1563};
1564
1565static int safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm *tfm)
1566{
1567	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1568
1569	safexcel_skcipher_cra_init(tfm);
1570	ctx->alg  = SAFEXCEL_DES;
1571	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1572	ctx->blocksz = 0;
1573	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1574	return 0;
1575}
1576
1577struct safexcel_alg_template safexcel_alg_ecb_des = {
1578	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1579	.algo_mask = SAFEXCEL_ALG_DES,
1580	.alg.skcipher = {
1581		.setkey = safexcel_des_setkey,
1582		.encrypt = safexcel_encrypt,
1583		.decrypt = safexcel_decrypt,
1584		.min_keysize = DES_KEY_SIZE,
1585		.max_keysize = DES_KEY_SIZE,
1586		.base = {
1587			.cra_name = "ecb(des)",
1588			.cra_driver_name = "safexcel-ecb-des",
1589			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1590			.cra_flags = CRYPTO_ALG_ASYNC |
1591				     CRYPTO_ALG_ALLOCATES_MEMORY |
1592				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1593			.cra_blocksize = DES_BLOCK_SIZE,
1594			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1595			.cra_alignmask = 0,
1596			.cra_init = safexcel_skcipher_des_ecb_cra_init,
1597			.cra_exit = safexcel_skcipher_cra_exit,
1598			.cra_module = THIS_MODULE,
1599		},
1600	},
1601};
1602
1603static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
1604				   const u8 *key, unsigned int len)
1605{
1606	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1607	struct safexcel_crypto_priv *priv = ctx->base.priv;
1608	int err;
1609
1610	err = verify_skcipher_des3_key(ctfm, key);
1611	if (err)
1612		return err;
1613
1614	/* if context exits and key changed, need to invalidate it */
1615	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1616		if (memcmp(ctx->key, key, len))
1617			ctx->base.needs_inv = true;
1618
1619	memcpy(ctx->key, key, len);
1620	ctx->key_len = len;
1621
1622	return 0;
1623}
1624
1625static int safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm *tfm)
1626{
1627	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1628
1629	safexcel_skcipher_cra_init(tfm);
1630	ctx->alg  = SAFEXCEL_3DES;
1631	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1632	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1633	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1634	return 0;
1635}
1636
1637struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
1638	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1639	.algo_mask = SAFEXCEL_ALG_DES,
1640	.alg.skcipher = {
1641		.setkey = safexcel_des3_ede_setkey,
1642		.encrypt = safexcel_encrypt,
1643		.decrypt = safexcel_decrypt,
1644		.min_keysize = DES3_EDE_KEY_SIZE,
1645		.max_keysize = DES3_EDE_KEY_SIZE,
1646		.ivsize = DES3_EDE_BLOCK_SIZE,
1647		.base = {
1648			.cra_name = "cbc(des3_ede)",
1649			.cra_driver_name = "safexcel-cbc-des3_ede",
1650			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1651			.cra_flags = CRYPTO_ALG_ASYNC |
1652				     CRYPTO_ALG_ALLOCATES_MEMORY |
1653				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1654			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1655			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1656			.cra_alignmask = 0,
1657			.cra_init = safexcel_skcipher_des3_cbc_cra_init,
1658			.cra_exit = safexcel_skcipher_cra_exit,
1659			.cra_module = THIS_MODULE,
1660		},
1661	},
1662};
1663
1664static int safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm *tfm)
1665{
1666	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1667
1668	safexcel_skcipher_cra_init(tfm);
1669	ctx->alg  = SAFEXCEL_3DES;
1670	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1671	ctx->blocksz = 0;
1672	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1673	return 0;
1674}
1675
1676struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
1677	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1678	.algo_mask = SAFEXCEL_ALG_DES,
1679	.alg.skcipher = {
1680		.setkey = safexcel_des3_ede_setkey,
1681		.encrypt = safexcel_encrypt,
1682		.decrypt = safexcel_decrypt,
1683		.min_keysize = DES3_EDE_KEY_SIZE,
1684		.max_keysize = DES3_EDE_KEY_SIZE,
1685		.base = {
1686			.cra_name = "ecb(des3_ede)",
1687			.cra_driver_name = "safexcel-ecb-des3_ede",
1688			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1689			.cra_flags = CRYPTO_ALG_ASYNC |
1690				     CRYPTO_ALG_ALLOCATES_MEMORY |
1691				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1692			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1693			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1694			.cra_alignmask = 0,
1695			.cra_init = safexcel_skcipher_des3_ecb_cra_init,
1696			.cra_exit = safexcel_skcipher_cra_exit,
1697			.cra_module = THIS_MODULE,
1698		},
1699	},
1700};
1701
1702static int safexcel_aead_encrypt(struct aead_request *req)
1703{
1704	struct safexcel_cipher_req *creq = aead_request_ctx(req);
1705
1706	return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
1707}
1708
1709static int safexcel_aead_decrypt(struct aead_request *req)
1710{
1711	struct safexcel_cipher_req *creq = aead_request_ctx(req);
1712
1713	return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
1714}
1715
1716static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
1717{
1718	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1719	struct safexcel_alg_template *tmpl =
1720		container_of(tfm->__crt_alg, struct safexcel_alg_template,
1721			     alg.aead.base);
1722
1723	crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
1724				sizeof(struct safexcel_cipher_req));
1725
1726	ctx->base.priv = tmpl->priv;
1727
1728	ctx->alg  = SAFEXCEL_AES; /* default */
1729	ctx->blocksz = AES_BLOCK_SIZE;
1730	ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1731	ctx->ctrinit = 1;
1732	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC; /* default */
1733	ctx->aead = true;
1734	ctx->base.send = safexcel_aead_send;
1735	ctx->base.handle_result = safexcel_aead_handle_result;
1736	return 0;
1737}
1738
1739static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
1740{
1741	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1742
1743	safexcel_aead_cra_init(tfm);
1744	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1745	ctx->state_sz = SHA1_DIGEST_SIZE;
1746	return 0;
1747}
1748
1749struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
1750	.type = SAFEXCEL_ALG_TYPE_AEAD,
1751	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
1752	.alg.aead = {
1753		.setkey = safexcel_aead_setkey,
1754		.encrypt = safexcel_aead_encrypt,
1755		.decrypt = safexcel_aead_decrypt,
1756		.ivsize = AES_BLOCK_SIZE,
1757		.maxauthsize = SHA1_DIGEST_SIZE,
1758		.base = {
1759			.cra_name = "authenc(hmac(sha1),cbc(aes))",
1760			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
1761			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1762			.cra_flags = CRYPTO_ALG_ASYNC |
1763				     CRYPTO_ALG_ALLOCATES_MEMORY |
1764				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1765			.cra_blocksize = AES_BLOCK_SIZE,
1766			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1767			.cra_alignmask = 0,
1768			.cra_init = safexcel_aead_sha1_cra_init,
1769			.cra_exit = safexcel_aead_cra_exit,
1770			.cra_module = THIS_MODULE,
1771		},
1772	},
1773};
1774
1775static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
1776{
1777	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1778
1779	safexcel_aead_cra_init(tfm);
1780	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1781	ctx->state_sz = SHA256_DIGEST_SIZE;
1782	return 0;
1783}
1784
1785struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
1786	.type = SAFEXCEL_ALG_TYPE_AEAD,
1787	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1788	.alg.aead = {
1789		.setkey = safexcel_aead_setkey,
1790		.encrypt = safexcel_aead_encrypt,
1791		.decrypt = safexcel_aead_decrypt,
1792		.ivsize = AES_BLOCK_SIZE,
1793		.maxauthsize = SHA256_DIGEST_SIZE,
1794		.base = {
1795			.cra_name = "authenc(hmac(sha256),cbc(aes))",
1796			.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
1797			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1798			.cra_flags = CRYPTO_ALG_ASYNC |
1799				     CRYPTO_ALG_ALLOCATES_MEMORY |
1800				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1801			.cra_blocksize = AES_BLOCK_SIZE,
1802			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1803			.cra_alignmask = 0,
1804			.cra_init = safexcel_aead_sha256_cra_init,
1805			.cra_exit = safexcel_aead_cra_exit,
1806			.cra_module = THIS_MODULE,
1807		},
1808	},
1809};
1810
1811static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
1812{
1813	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1814
1815	safexcel_aead_cra_init(tfm);
1816	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1817	ctx->state_sz = SHA256_DIGEST_SIZE;
1818	return 0;
1819}
1820
1821struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
1822	.type = SAFEXCEL_ALG_TYPE_AEAD,
1823	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1824	.alg.aead = {
1825		.setkey = safexcel_aead_setkey,
1826		.encrypt = safexcel_aead_encrypt,
1827		.decrypt = safexcel_aead_decrypt,
1828		.ivsize = AES_BLOCK_SIZE,
1829		.maxauthsize = SHA224_DIGEST_SIZE,
1830		.base = {
1831			.cra_name = "authenc(hmac(sha224),cbc(aes))",
1832			.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
1833			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1834			.cra_flags = CRYPTO_ALG_ASYNC |
1835				     CRYPTO_ALG_ALLOCATES_MEMORY |
1836				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1837			.cra_blocksize = AES_BLOCK_SIZE,
1838			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1839			.cra_alignmask = 0,
1840			.cra_init = safexcel_aead_sha224_cra_init,
1841			.cra_exit = safexcel_aead_cra_exit,
1842			.cra_module = THIS_MODULE,
1843		},
1844	},
1845};
1846
1847static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
1848{
1849	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1850
1851	safexcel_aead_cra_init(tfm);
1852	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1853	ctx->state_sz = SHA512_DIGEST_SIZE;
1854	return 0;
1855}
1856
1857struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
1858	.type = SAFEXCEL_ALG_TYPE_AEAD,
1859	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1860	.alg.aead = {
1861		.setkey = safexcel_aead_setkey,
1862		.encrypt = safexcel_aead_encrypt,
1863		.decrypt = safexcel_aead_decrypt,
1864		.ivsize = AES_BLOCK_SIZE,
1865		.maxauthsize = SHA512_DIGEST_SIZE,
1866		.base = {
1867			.cra_name = "authenc(hmac(sha512),cbc(aes))",
1868			.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
1869			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1870			.cra_flags = CRYPTO_ALG_ASYNC |
1871				     CRYPTO_ALG_ALLOCATES_MEMORY |
1872				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1873			.cra_blocksize = AES_BLOCK_SIZE,
1874			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1875			.cra_alignmask = 0,
1876			.cra_init = safexcel_aead_sha512_cra_init,
1877			.cra_exit = safexcel_aead_cra_exit,
1878			.cra_module = THIS_MODULE,
1879		},
1880	},
1881};
1882
1883static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
1884{
1885	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1886
1887	safexcel_aead_cra_init(tfm);
1888	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1889	ctx->state_sz = SHA512_DIGEST_SIZE;
1890	return 0;
1891}
1892
1893struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
1894	.type = SAFEXCEL_ALG_TYPE_AEAD,
1895	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1896	.alg.aead = {
1897		.setkey = safexcel_aead_setkey,
1898		.encrypt = safexcel_aead_encrypt,
1899		.decrypt = safexcel_aead_decrypt,
1900		.ivsize = AES_BLOCK_SIZE,
1901		.maxauthsize = SHA384_DIGEST_SIZE,
1902		.base = {
1903			.cra_name = "authenc(hmac(sha384),cbc(aes))",
1904			.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
1905			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1906			.cra_flags = CRYPTO_ALG_ASYNC |
1907				     CRYPTO_ALG_ALLOCATES_MEMORY |
1908				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1909			.cra_blocksize = AES_BLOCK_SIZE,
1910			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1911			.cra_alignmask = 0,
1912			.cra_init = safexcel_aead_sha384_cra_init,
1913			.cra_exit = safexcel_aead_cra_exit,
1914			.cra_module = THIS_MODULE,
1915		},
1916	},
1917};
1918
1919static int safexcel_aead_sha1_des3_cra_init(struct crypto_tfm *tfm)
1920{
1921	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1922
1923	safexcel_aead_sha1_cra_init(tfm);
1924	ctx->alg = SAFEXCEL_3DES; /* override default */
1925	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1926	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1927	return 0;
1928}
1929
1930struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede = {
1931	.type = SAFEXCEL_ALG_TYPE_AEAD,
1932	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
1933	.alg.aead = {
1934		.setkey = safexcel_aead_setkey,
1935		.encrypt = safexcel_aead_encrypt,
1936		.decrypt = safexcel_aead_decrypt,
1937		.ivsize = DES3_EDE_BLOCK_SIZE,
1938		.maxauthsize = SHA1_DIGEST_SIZE,
1939		.base = {
1940			.cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
1941			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des3_ede",
1942			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1943			.cra_flags = CRYPTO_ALG_ASYNC |
1944				     CRYPTO_ALG_ALLOCATES_MEMORY |
1945				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1946			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1947			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1948			.cra_alignmask = 0,
1949			.cra_init = safexcel_aead_sha1_des3_cra_init,
1950			.cra_exit = safexcel_aead_cra_exit,
1951			.cra_module = THIS_MODULE,
1952		},
1953	},
1954};
1955
1956static int safexcel_aead_sha256_des3_cra_init(struct crypto_tfm *tfm)
1957{
1958	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1959
1960	safexcel_aead_sha256_cra_init(tfm);
1961	ctx->alg = SAFEXCEL_3DES; /* override default */
1962	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1963	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1964	return 0;
1965}
1966
1967struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des3_ede = {
1968	.type = SAFEXCEL_ALG_TYPE_AEAD,
1969	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1970	.alg.aead = {
1971		.setkey = safexcel_aead_setkey,
1972		.encrypt = safexcel_aead_encrypt,
1973		.decrypt = safexcel_aead_decrypt,
1974		.ivsize = DES3_EDE_BLOCK_SIZE,
1975		.maxauthsize = SHA256_DIGEST_SIZE,
1976		.base = {
1977			.cra_name = "authenc(hmac(sha256),cbc(des3_ede))",
1978			.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des3_ede",
1979			.cra_priority = SAFEXCEL_CRA_PRIORITY,
1980			.cra_flags = CRYPTO_ALG_ASYNC |
1981				     CRYPTO_ALG_ALLOCATES_MEMORY |
1982				     CRYPTO_ALG_KERN_DRIVER_ONLY,
1983			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
1984			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1985			.cra_alignmask = 0,
1986			.cra_init = safexcel_aead_sha256_des3_cra_init,
1987			.cra_exit = safexcel_aead_cra_exit,
1988			.cra_module = THIS_MODULE,
1989		},
1990	},
1991};
1992
1993static int safexcel_aead_sha224_des3_cra_init(struct crypto_tfm *tfm)
1994{
1995	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1996
1997	safexcel_aead_sha224_cra_init(tfm);
1998	ctx->alg = SAFEXCEL_3DES; /* override default */
1999	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2000	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2001	return 0;
2002}
2003
2004struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des3_ede = {
2005	.type = SAFEXCEL_ALG_TYPE_AEAD,
2006	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2007	.alg.aead = {
2008		.setkey = safexcel_aead_setkey,
2009		.encrypt = safexcel_aead_encrypt,
2010		.decrypt = safexcel_aead_decrypt,
2011		.ivsize = DES3_EDE_BLOCK_SIZE,
2012		.maxauthsize = SHA224_DIGEST_SIZE,
2013		.base = {
2014			.cra_name = "authenc(hmac(sha224),cbc(des3_ede))",
2015			.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des3_ede",
2016			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2017			.cra_flags = CRYPTO_ALG_ASYNC |
2018				     CRYPTO_ALG_ALLOCATES_MEMORY |
2019				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2020			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2021			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2022			.cra_alignmask = 0,
2023			.cra_init = safexcel_aead_sha224_des3_cra_init,
2024			.cra_exit = safexcel_aead_cra_exit,
2025			.cra_module = THIS_MODULE,
2026		},
2027	},
2028};
2029
2030static int safexcel_aead_sha512_des3_cra_init(struct crypto_tfm *tfm)
2031{
2032	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2033
2034	safexcel_aead_sha512_cra_init(tfm);
2035	ctx->alg = SAFEXCEL_3DES; /* override default */
2036	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2037	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2038	return 0;
2039}
2040
2041struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des3_ede = {
2042	.type = SAFEXCEL_ALG_TYPE_AEAD,
2043	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2044	.alg.aead = {
2045		.setkey = safexcel_aead_setkey,
2046		.encrypt = safexcel_aead_encrypt,
2047		.decrypt = safexcel_aead_decrypt,
2048		.ivsize = DES3_EDE_BLOCK_SIZE,
2049		.maxauthsize = SHA512_DIGEST_SIZE,
2050		.base = {
2051			.cra_name = "authenc(hmac(sha512),cbc(des3_ede))",
2052			.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des3_ede",
2053			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2054			.cra_flags = CRYPTO_ALG_ASYNC |
2055				     CRYPTO_ALG_ALLOCATES_MEMORY |
2056				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2057			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2058			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2059			.cra_alignmask = 0,
2060			.cra_init = safexcel_aead_sha512_des3_cra_init,
2061			.cra_exit = safexcel_aead_cra_exit,
2062			.cra_module = THIS_MODULE,
2063		},
2064	},
2065};
2066
2067static int safexcel_aead_sha384_des3_cra_init(struct crypto_tfm *tfm)
2068{
2069	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2070
2071	safexcel_aead_sha384_cra_init(tfm);
2072	ctx->alg = SAFEXCEL_3DES; /* override default */
2073	ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2074	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2075	return 0;
2076}
2077
2078struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des3_ede = {
2079	.type = SAFEXCEL_ALG_TYPE_AEAD,
2080	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2081	.alg.aead = {
2082		.setkey = safexcel_aead_setkey,
2083		.encrypt = safexcel_aead_encrypt,
2084		.decrypt = safexcel_aead_decrypt,
2085		.ivsize = DES3_EDE_BLOCK_SIZE,
2086		.maxauthsize = SHA384_DIGEST_SIZE,
2087		.base = {
2088			.cra_name = "authenc(hmac(sha384),cbc(des3_ede))",
2089			.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des3_ede",
2090			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2091			.cra_flags = CRYPTO_ALG_ASYNC |
2092				     CRYPTO_ALG_ALLOCATES_MEMORY |
2093				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2094			.cra_blocksize = DES3_EDE_BLOCK_SIZE,
2095			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2096			.cra_alignmask = 0,
2097			.cra_init = safexcel_aead_sha384_des3_cra_init,
2098			.cra_exit = safexcel_aead_cra_exit,
2099			.cra_module = THIS_MODULE,
2100		},
2101	},
2102};
2103
2104static int safexcel_aead_sha1_des_cra_init(struct crypto_tfm *tfm)
2105{
2106	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2107
2108	safexcel_aead_sha1_cra_init(tfm);
2109	ctx->alg = SAFEXCEL_DES; /* override default */
2110	ctx->blocksz = DES_BLOCK_SIZE;
2111	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2112	return 0;
2113}
2114
2115struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des = {
2116	.type = SAFEXCEL_ALG_TYPE_AEAD,
2117	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
2118	.alg.aead = {
2119		.setkey = safexcel_aead_setkey,
2120		.encrypt = safexcel_aead_encrypt,
2121		.decrypt = safexcel_aead_decrypt,
2122		.ivsize = DES_BLOCK_SIZE,
2123		.maxauthsize = SHA1_DIGEST_SIZE,
2124		.base = {
2125			.cra_name = "authenc(hmac(sha1),cbc(des))",
2126			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des",
2127			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2128			.cra_flags = CRYPTO_ALG_ASYNC |
2129				     CRYPTO_ALG_ALLOCATES_MEMORY |
2130				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2131			.cra_blocksize = DES_BLOCK_SIZE,
2132			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2133			.cra_alignmask = 0,
2134			.cra_init = safexcel_aead_sha1_des_cra_init,
2135			.cra_exit = safexcel_aead_cra_exit,
2136			.cra_module = THIS_MODULE,
2137		},
2138	},
2139};
2140
2141static int safexcel_aead_sha256_des_cra_init(struct crypto_tfm *tfm)
2142{
2143	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2144
2145	safexcel_aead_sha256_cra_init(tfm);
2146	ctx->alg = SAFEXCEL_DES; /* override default */
2147	ctx->blocksz = DES_BLOCK_SIZE;
2148	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2149	return 0;
2150}
2151
2152struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des = {
2153	.type = SAFEXCEL_ALG_TYPE_AEAD,
2154	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2155	.alg.aead = {
2156		.setkey = safexcel_aead_setkey,
2157		.encrypt = safexcel_aead_encrypt,
2158		.decrypt = safexcel_aead_decrypt,
2159		.ivsize = DES_BLOCK_SIZE,
2160		.maxauthsize = SHA256_DIGEST_SIZE,
2161		.base = {
2162			.cra_name = "authenc(hmac(sha256),cbc(des))",
2163			.cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des",
2164			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2165			.cra_flags = CRYPTO_ALG_ASYNC |
2166				     CRYPTO_ALG_ALLOCATES_MEMORY |
2167				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2168			.cra_blocksize = DES_BLOCK_SIZE,
2169			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2170			.cra_alignmask = 0,
2171			.cra_init = safexcel_aead_sha256_des_cra_init,
2172			.cra_exit = safexcel_aead_cra_exit,
2173			.cra_module = THIS_MODULE,
2174		},
2175	},
2176};
2177
2178static int safexcel_aead_sha224_des_cra_init(struct crypto_tfm *tfm)
2179{
2180	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2181
2182	safexcel_aead_sha224_cra_init(tfm);
2183	ctx->alg = SAFEXCEL_DES; /* override default */
2184	ctx->blocksz = DES_BLOCK_SIZE;
2185	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2186	return 0;
2187}
2188
2189struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des = {
2190	.type = SAFEXCEL_ALG_TYPE_AEAD,
2191	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2192	.alg.aead = {
2193		.setkey = safexcel_aead_setkey,
2194		.encrypt = safexcel_aead_encrypt,
2195		.decrypt = safexcel_aead_decrypt,
2196		.ivsize = DES_BLOCK_SIZE,
2197		.maxauthsize = SHA224_DIGEST_SIZE,
2198		.base = {
2199			.cra_name = "authenc(hmac(sha224),cbc(des))",
2200			.cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des",
2201			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2202			.cra_flags = CRYPTO_ALG_ASYNC |
2203				     CRYPTO_ALG_ALLOCATES_MEMORY |
2204				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2205			.cra_blocksize = DES_BLOCK_SIZE,
2206			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2207			.cra_alignmask = 0,
2208			.cra_init = safexcel_aead_sha224_des_cra_init,
2209			.cra_exit = safexcel_aead_cra_exit,
2210			.cra_module = THIS_MODULE,
2211		},
2212	},
2213};
2214
2215static int safexcel_aead_sha512_des_cra_init(struct crypto_tfm *tfm)
2216{
2217	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2218
2219	safexcel_aead_sha512_cra_init(tfm);
2220	ctx->alg = SAFEXCEL_DES; /* override default */
2221	ctx->blocksz = DES_BLOCK_SIZE;
2222	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2223	return 0;
2224}
2225
2226struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des = {
2227	.type = SAFEXCEL_ALG_TYPE_AEAD,
2228	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2229	.alg.aead = {
2230		.setkey = safexcel_aead_setkey,
2231		.encrypt = safexcel_aead_encrypt,
2232		.decrypt = safexcel_aead_decrypt,
2233		.ivsize = DES_BLOCK_SIZE,
2234		.maxauthsize = SHA512_DIGEST_SIZE,
2235		.base = {
2236			.cra_name = "authenc(hmac(sha512),cbc(des))",
2237			.cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des",
2238			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2239			.cra_flags = CRYPTO_ALG_ASYNC |
2240				     CRYPTO_ALG_ALLOCATES_MEMORY |
2241				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2242			.cra_blocksize = DES_BLOCK_SIZE,
2243			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2244			.cra_alignmask = 0,
2245			.cra_init = safexcel_aead_sha512_des_cra_init,
2246			.cra_exit = safexcel_aead_cra_exit,
2247			.cra_module = THIS_MODULE,
2248		},
2249	},
2250};
2251
2252static int safexcel_aead_sha384_des_cra_init(struct crypto_tfm *tfm)
2253{
2254	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2255
2256	safexcel_aead_sha384_cra_init(tfm);
2257	ctx->alg = SAFEXCEL_DES; /* override default */
2258	ctx->blocksz = DES_BLOCK_SIZE;
2259	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2260	return 0;
2261}
2262
2263struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des = {
2264	.type = SAFEXCEL_ALG_TYPE_AEAD,
2265	.algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2266	.alg.aead = {
2267		.setkey = safexcel_aead_setkey,
2268		.encrypt = safexcel_aead_encrypt,
2269		.decrypt = safexcel_aead_decrypt,
2270		.ivsize = DES_BLOCK_SIZE,
2271		.maxauthsize = SHA384_DIGEST_SIZE,
2272		.base = {
2273			.cra_name = "authenc(hmac(sha384),cbc(des))",
2274			.cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des",
2275			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2276			.cra_flags = CRYPTO_ALG_ASYNC |
2277				     CRYPTO_ALG_ALLOCATES_MEMORY |
2278				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2279			.cra_blocksize = DES_BLOCK_SIZE,
2280			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2281			.cra_alignmask = 0,
2282			.cra_init = safexcel_aead_sha384_des_cra_init,
2283			.cra_exit = safexcel_aead_cra_exit,
2284			.cra_module = THIS_MODULE,
2285		},
2286	},
2287};
2288
2289static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm *tfm)
2290{
2291	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2292
2293	safexcel_aead_sha1_cra_init(tfm);
2294	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2295	return 0;
2296}
2297
2298struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes = {
2299	.type = SAFEXCEL_ALG_TYPE_AEAD,
2300	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
2301	.alg.aead = {
2302		.setkey = safexcel_aead_setkey,
2303		.encrypt = safexcel_aead_encrypt,
2304		.decrypt = safexcel_aead_decrypt,
2305		.ivsize = CTR_RFC3686_IV_SIZE,
2306		.maxauthsize = SHA1_DIGEST_SIZE,
2307		.base = {
2308			.cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2309			.cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-aes",
2310			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2311			.cra_flags = CRYPTO_ALG_ASYNC |
2312				     CRYPTO_ALG_ALLOCATES_MEMORY |
2313				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2314			.cra_blocksize = 1,
2315			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2316			.cra_alignmask = 0,
2317			.cra_init = safexcel_aead_sha1_ctr_cra_init,
2318			.cra_exit = safexcel_aead_cra_exit,
2319			.cra_module = THIS_MODULE,
2320		},
2321	},
2322};
2323
2324static int safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm *tfm)
2325{
2326	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2327
2328	safexcel_aead_sha256_cra_init(tfm);
2329	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2330	return 0;
2331}
2332
2333struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes = {
2334	.type = SAFEXCEL_ALG_TYPE_AEAD,
2335	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2336	.alg.aead = {
2337		.setkey = safexcel_aead_setkey,
2338		.encrypt = safexcel_aead_encrypt,
2339		.decrypt = safexcel_aead_decrypt,
2340		.ivsize = CTR_RFC3686_IV_SIZE,
2341		.maxauthsize = SHA256_DIGEST_SIZE,
2342		.base = {
2343			.cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2344			.cra_driver_name = "safexcel-authenc-hmac-sha256-ctr-aes",
2345			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2346			.cra_flags = CRYPTO_ALG_ASYNC |
2347				     CRYPTO_ALG_ALLOCATES_MEMORY |
2348				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2349			.cra_blocksize = 1,
2350			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2351			.cra_alignmask = 0,
2352			.cra_init = safexcel_aead_sha256_ctr_cra_init,
2353			.cra_exit = safexcel_aead_cra_exit,
2354			.cra_module = THIS_MODULE,
2355		},
2356	},
2357};
2358
2359static int safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm *tfm)
2360{
2361	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2362
2363	safexcel_aead_sha224_cra_init(tfm);
2364	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2365	return 0;
2366}
2367
2368struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes = {
2369	.type = SAFEXCEL_ALG_TYPE_AEAD,
2370	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2371	.alg.aead = {
2372		.setkey = safexcel_aead_setkey,
2373		.encrypt = safexcel_aead_encrypt,
2374		.decrypt = safexcel_aead_decrypt,
2375		.ivsize = CTR_RFC3686_IV_SIZE,
2376		.maxauthsize = SHA224_DIGEST_SIZE,
2377		.base = {
2378			.cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
2379			.cra_driver_name = "safexcel-authenc-hmac-sha224-ctr-aes",
2380			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2381			.cra_flags = CRYPTO_ALG_ASYNC |
2382				     CRYPTO_ALG_ALLOCATES_MEMORY |
2383				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2384			.cra_blocksize = 1,
2385			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2386			.cra_alignmask = 0,
2387			.cra_init = safexcel_aead_sha224_ctr_cra_init,
2388			.cra_exit = safexcel_aead_cra_exit,
2389			.cra_module = THIS_MODULE,
2390		},
2391	},
2392};
2393
2394static int safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm *tfm)
2395{
2396	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2397
2398	safexcel_aead_sha512_cra_init(tfm);
2399	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2400	return 0;
2401}
2402
2403struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes = {
2404	.type = SAFEXCEL_ALG_TYPE_AEAD,
2405	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2406	.alg.aead = {
2407		.setkey = safexcel_aead_setkey,
2408		.encrypt = safexcel_aead_encrypt,
2409		.decrypt = safexcel_aead_decrypt,
2410		.ivsize = CTR_RFC3686_IV_SIZE,
2411		.maxauthsize = SHA512_DIGEST_SIZE,
2412		.base = {
2413			.cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2414			.cra_driver_name = "safexcel-authenc-hmac-sha512-ctr-aes",
2415			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2416			.cra_flags = CRYPTO_ALG_ASYNC |
2417				     CRYPTO_ALG_ALLOCATES_MEMORY |
2418				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2419			.cra_blocksize = 1,
2420			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2421			.cra_alignmask = 0,
2422			.cra_init = safexcel_aead_sha512_ctr_cra_init,
2423			.cra_exit = safexcel_aead_cra_exit,
2424			.cra_module = THIS_MODULE,
2425		},
2426	},
2427};
2428
2429static int safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm *tfm)
2430{
2431	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2432
2433	safexcel_aead_sha384_cra_init(tfm);
2434	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2435	return 0;
2436}
2437
2438struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes = {
2439	.type = SAFEXCEL_ALG_TYPE_AEAD,
2440	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2441	.alg.aead = {
2442		.setkey = safexcel_aead_setkey,
2443		.encrypt = safexcel_aead_encrypt,
2444		.decrypt = safexcel_aead_decrypt,
2445		.ivsize = CTR_RFC3686_IV_SIZE,
2446		.maxauthsize = SHA384_DIGEST_SIZE,
2447		.base = {
2448			.cra_name = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2449			.cra_driver_name = "safexcel-authenc-hmac-sha384-ctr-aes",
2450			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2451			.cra_flags = CRYPTO_ALG_ASYNC |
2452				     CRYPTO_ALG_ALLOCATES_MEMORY |
2453				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2454			.cra_blocksize = 1,
2455			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2456			.cra_alignmask = 0,
2457			.cra_init = safexcel_aead_sha384_ctr_cra_init,
2458			.cra_exit = safexcel_aead_cra_exit,
2459			.cra_module = THIS_MODULE,
2460		},
2461	},
2462};
2463
2464static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
2465					   const u8 *key, unsigned int len)
2466{
2467	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
2468	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2469	struct safexcel_crypto_priv *priv = ctx->base.priv;
2470	struct crypto_aes_ctx aes;
2471	int ret, i;
2472	unsigned int keylen;
2473
2474	/* Check for illegal XTS keys */
2475	ret = xts_verify_key(ctfm, key, len);
2476	if (ret)
2477		return ret;
2478
2479	/* Only half of the key data is cipher key */
2480	keylen = (len >> 1);
2481	ret = aes_expandkey(&aes, key, keylen);
2482	if (ret)
2483		return ret;
2484
2485	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2486		for (i = 0; i < keylen / sizeof(u32); i++) {
2487			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2488				ctx->base.needs_inv = true;
2489				break;
2490			}
2491		}
2492	}
2493
2494	for (i = 0; i < keylen / sizeof(u32); i++)
2495		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2496
2497	/* The other half is the tweak key */
2498	ret = aes_expandkey(&aes, (u8 *)(key + keylen), keylen);
2499	if (ret)
2500		return ret;
2501
2502	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2503		for (i = 0; i < keylen / sizeof(u32); i++) {
2504			if (le32_to_cpu(ctx->key[i + keylen / sizeof(u32)]) !=
2505			    aes.key_enc[i]) {
2506				ctx->base.needs_inv = true;
2507				break;
2508			}
2509		}
2510	}
2511
2512	for (i = 0; i < keylen / sizeof(u32); i++)
2513		ctx->key[i + keylen / sizeof(u32)] =
2514			cpu_to_le32(aes.key_enc[i]);
2515
2516	ctx->key_len = keylen << 1;
2517
2518	memzero_explicit(&aes, sizeof(aes));
2519	return 0;
2520}
2521
2522static int safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm *tfm)
2523{
2524	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2525
2526	safexcel_skcipher_cra_init(tfm);
2527	ctx->alg  = SAFEXCEL_AES;
2528	ctx->blocksz = AES_BLOCK_SIZE;
2529	ctx->xts  = 1;
2530	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
2531	return 0;
2532}
2533
2534static int safexcel_encrypt_xts(struct skcipher_request *req)
2535{
2536	if (req->cryptlen < XTS_BLOCK_SIZE)
2537		return -EINVAL;
2538	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2539				  SAFEXCEL_ENCRYPT);
2540}
2541
2542static int safexcel_decrypt_xts(struct skcipher_request *req)
2543{
2544	if (req->cryptlen < XTS_BLOCK_SIZE)
2545		return -EINVAL;
2546	return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2547				  SAFEXCEL_DECRYPT);
2548}
2549
2550struct safexcel_alg_template safexcel_alg_xts_aes = {
2551	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2552	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XTS,
2553	.alg.skcipher = {
2554		.setkey = safexcel_skcipher_aesxts_setkey,
2555		.encrypt = safexcel_encrypt_xts,
2556		.decrypt = safexcel_decrypt_xts,
2557		/* XTS actually uses 2 AES keys glued together */
2558		.min_keysize = AES_MIN_KEY_SIZE * 2,
2559		.max_keysize = AES_MAX_KEY_SIZE * 2,
2560		.ivsize = XTS_BLOCK_SIZE,
2561		.base = {
2562			.cra_name = "xts(aes)",
2563			.cra_driver_name = "safexcel-xts-aes",
2564			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2565			.cra_flags = CRYPTO_ALG_ASYNC |
2566				     CRYPTO_ALG_ALLOCATES_MEMORY |
2567				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2568			.cra_blocksize = XTS_BLOCK_SIZE,
2569			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2570			.cra_alignmask = 0,
2571			.cra_init = safexcel_skcipher_aes_xts_cra_init,
2572			.cra_exit = safexcel_skcipher_cra_exit,
2573			.cra_module = THIS_MODULE,
2574		},
2575	},
2576};
2577
2578static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
2579				    unsigned int len)
2580{
2581	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2582	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2583	struct safexcel_crypto_priv *priv = ctx->base.priv;
2584	struct crypto_aes_ctx aes;
2585	u32 hashkey[AES_BLOCK_SIZE >> 2];
2586	int ret, i;
2587
2588	ret = aes_expandkey(&aes, key, len);
2589	if (ret) {
2590		memzero_explicit(&aes, sizeof(aes));
2591		return ret;
2592	}
2593
2594	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2595		for (i = 0; i < len / sizeof(u32); i++) {
2596			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2597				ctx->base.needs_inv = true;
2598				break;
2599			}
2600		}
2601	}
2602
2603	for (i = 0; i < len / sizeof(u32); i++)
2604		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2605
2606	ctx->key_len = len;
2607
2608	/* Compute hash key by encrypting zeroes with cipher key */
2609	memset(hashkey, 0, AES_BLOCK_SIZE);
2610	aes_encrypt(&aes, (u8 *)hashkey, (u8 *)hashkey);
2611
2612	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2613		for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
2614			if (be32_to_cpu(ctx->base.ipad.be[i]) != hashkey[i]) {
2615				ctx->base.needs_inv = true;
2616				break;
2617			}
2618		}
2619	}
2620
2621	for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
2622		ctx->base.ipad.be[i] = cpu_to_be32(hashkey[i]);
2623
2624	memzero_explicit(hashkey, AES_BLOCK_SIZE);
2625	memzero_explicit(&aes, sizeof(aes));
2626	return 0;
2627}
2628
2629static int safexcel_aead_gcm_cra_init(struct crypto_tfm *tfm)
2630{
2631	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2632
2633	safexcel_aead_cra_init(tfm);
2634	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_GHASH;
2635	ctx->state_sz = GHASH_BLOCK_SIZE;
2636	ctx->xcm = EIP197_XCM_MODE_GCM;
2637	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2638
2639	return 0;
2640}
2641
2642static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm)
2643{
2644	safexcel_aead_cra_exit(tfm);
2645}
2646
2647static int safexcel_aead_gcm_setauthsize(struct crypto_aead *tfm,
2648					 unsigned int authsize)
2649{
2650	return crypto_gcm_check_authsize(authsize);
2651}
2652
2653struct safexcel_alg_template safexcel_alg_gcm = {
2654	.type = SAFEXCEL_ALG_TYPE_AEAD,
2655	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
2656	.alg.aead = {
2657		.setkey = safexcel_aead_gcm_setkey,
2658		.setauthsize = safexcel_aead_gcm_setauthsize,
2659		.encrypt = safexcel_aead_encrypt,
2660		.decrypt = safexcel_aead_decrypt,
2661		.ivsize = GCM_AES_IV_SIZE,
2662		.maxauthsize = GHASH_DIGEST_SIZE,
2663		.base = {
2664			.cra_name = "gcm(aes)",
2665			.cra_driver_name = "safexcel-gcm-aes",
2666			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2667			.cra_flags = CRYPTO_ALG_ASYNC |
2668				     CRYPTO_ALG_ALLOCATES_MEMORY |
2669				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2670			.cra_blocksize = 1,
2671			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2672			.cra_alignmask = 0,
2673			.cra_init = safexcel_aead_gcm_cra_init,
2674			.cra_exit = safexcel_aead_gcm_cra_exit,
2675			.cra_module = THIS_MODULE,
2676		},
2677	},
2678};
2679
2680static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
2681				    unsigned int len)
2682{
2683	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2684	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2685	struct safexcel_crypto_priv *priv = ctx->base.priv;
2686	struct crypto_aes_ctx aes;
2687	int ret, i;
2688
2689	ret = aes_expandkey(&aes, key, len);
2690	if (ret) {
2691		memzero_explicit(&aes, sizeof(aes));
2692		return ret;
2693	}
2694
2695	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2696		for (i = 0; i < len / sizeof(u32); i++) {
2697			if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2698				ctx->base.needs_inv = true;
2699				break;
2700			}
2701		}
2702	}
2703
2704	for (i = 0; i < len / sizeof(u32); i++) {
2705		ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2706		ctx->base.ipad.be[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
2707			cpu_to_be32(aes.key_enc[i]);
2708	}
2709
2710	ctx->key_len = len;
2711	ctx->state_sz = 2 * AES_BLOCK_SIZE + len;
2712
2713	if (len == AES_KEYSIZE_192)
2714		ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2715	else if (len == AES_KEYSIZE_256)
2716		ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2717	else
2718		ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2719
2720	memzero_explicit(&aes, sizeof(aes));
2721	return 0;
2722}
2723
2724static int safexcel_aead_ccm_cra_init(struct crypto_tfm *tfm)
2725{
2726	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2727
2728	safexcel_aead_cra_init(tfm);
2729	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2730	ctx->state_sz = 3 * AES_BLOCK_SIZE;
2731	ctx->xcm = EIP197_XCM_MODE_CCM;
2732	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2733	ctx->ctrinit = 0;
2734	return 0;
2735}
2736
2737static int safexcel_aead_ccm_setauthsize(struct crypto_aead *tfm,
2738					 unsigned int authsize)
2739{
2740	/* Borrowed from crypto/ccm.c */
2741	switch (authsize) {
2742	case 4:
2743	case 6:
2744	case 8:
2745	case 10:
2746	case 12:
2747	case 14:
2748	case 16:
2749		break;
2750	default:
2751		return -EINVAL;
2752	}
2753
2754	return 0;
2755}
2756
2757static int safexcel_ccm_encrypt(struct aead_request *req)
2758{
2759	struct safexcel_cipher_req *creq = aead_request_ctx(req);
2760
2761	if (req->iv[0] < 1 || req->iv[0] > 7)
2762		return -EINVAL;
2763
2764	return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
2765}
2766
2767static int safexcel_ccm_decrypt(struct aead_request *req)
2768{
2769	struct safexcel_cipher_req *creq = aead_request_ctx(req);
2770
2771	if (req->iv[0] < 1 || req->iv[0] > 7)
2772		return -EINVAL;
2773
2774	return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
2775}
2776
2777struct safexcel_alg_template safexcel_alg_ccm = {
2778	.type = SAFEXCEL_ALG_TYPE_AEAD,
2779	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
2780	.alg.aead = {
2781		.setkey = safexcel_aead_ccm_setkey,
2782		.setauthsize = safexcel_aead_ccm_setauthsize,
2783		.encrypt = safexcel_ccm_encrypt,
2784		.decrypt = safexcel_ccm_decrypt,
2785		.ivsize = AES_BLOCK_SIZE,
2786		.maxauthsize = AES_BLOCK_SIZE,
2787		.base = {
2788			.cra_name = "ccm(aes)",
2789			.cra_driver_name = "safexcel-ccm-aes",
2790			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2791			.cra_flags = CRYPTO_ALG_ASYNC |
2792				     CRYPTO_ALG_ALLOCATES_MEMORY |
2793				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2794			.cra_blocksize = 1,
2795			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2796			.cra_alignmask = 0,
2797			.cra_init = safexcel_aead_ccm_cra_init,
2798			.cra_exit = safexcel_aead_cra_exit,
2799			.cra_module = THIS_MODULE,
2800		},
2801	},
2802};
2803
2804static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx,
2805				     const u8 *key)
2806{
2807	struct safexcel_crypto_priv *priv = ctx->base.priv;
2808
2809	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
2810		if (memcmp(ctx->key, key, CHACHA_KEY_SIZE))
2811			ctx->base.needs_inv = true;
2812
2813	memcpy(ctx->key, key, CHACHA_KEY_SIZE);
2814	ctx->key_len = CHACHA_KEY_SIZE;
2815}
2816
2817static int safexcel_skcipher_chacha20_setkey(struct crypto_skcipher *ctfm,
2818					     const u8 *key, unsigned int len)
2819{
2820	struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
2821
2822	if (len != CHACHA_KEY_SIZE)
2823		return -EINVAL;
2824
2825	safexcel_chacha20_setkey(ctx, key);
2826
2827	return 0;
2828}
2829
2830static int safexcel_skcipher_chacha20_cra_init(struct crypto_tfm *tfm)
2831{
2832	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2833
2834	safexcel_skcipher_cra_init(tfm);
2835	ctx->alg  = SAFEXCEL_CHACHA20;
2836	ctx->ctrinit = 0;
2837	ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32;
2838	return 0;
2839}
2840
2841struct safexcel_alg_template safexcel_alg_chacha20 = {
2842	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2843	.algo_mask = SAFEXCEL_ALG_CHACHA20,
2844	.alg.skcipher = {
2845		.setkey = safexcel_skcipher_chacha20_setkey,
2846		.encrypt = safexcel_encrypt,
2847		.decrypt = safexcel_decrypt,
2848		.min_keysize = CHACHA_KEY_SIZE,
2849		.max_keysize = CHACHA_KEY_SIZE,
2850		.ivsize = CHACHA_IV_SIZE,
2851		.base = {
2852			.cra_name = "chacha20",
2853			.cra_driver_name = "safexcel-chacha20",
2854			.cra_priority = SAFEXCEL_CRA_PRIORITY,
2855			.cra_flags = CRYPTO_ALG_ASYNC |
2856				     CRYPTO_ALG_ALLOCATES_MEMORY |
2857				     CRYPTO_ALG_KERN_DRIVER_ONLY,
2858			.cra_blocksize = 1,
2859			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2860			.cra_alignmask = 0,
2861			.cra_init = safexcel_skcipher_chacha20_cra_init,
2862			.cra_exit = safexcel_skcipher_cra_exit,
2863			.cra_module = THIS_MODULE,
2864		},
2865	},
2866};
2867
2868static int safexcel_aead_chachapoly_setkey(struct crypto_aead *ctfm,
2869				    const u8 *key, unsigned int len)
2870{
2871	struct safexcel_cipher_ctx *ctx = crypto_aead_ctx(ctfm);
2872
2873	if (ctx->aead  == EIP197_AEAD_TYPE_IPSEC_ESP &&
2874	    len > EIP197_AEAD_IPSEC_NONCE_SIZE) {
2875		/* ESP variant has nonce appended to key */
2876		len -= EIP197_AEAD_IPSEC_NONCE_SIZE;
2877		ctx->nonce = *(u32 *)(key + len);
2878	}
2879	if (len != CHACHA_KEY_SIZE)
2880		return -EINVAL;
2881
2882	safexcel_chacha20_setkey(ctx, key);
2883
2884	return 0;
2885}
2886
2887static int safexcel_aead_chachapoly_setauthsize(struct crypto_aead *tfm,
2888					 unsigned int authsize)
2889{
2890	if (authsize != POLY1305_DIGEST_SIZE)
2891		return -EINVAL;
2892	return 0;
2893}
2894
2895static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
2896					  enum safexcel_cipher_direction dir)
2897{
2898	struct safexcel_cipher_req *creq = aead_request_ctx(req);
2899	struct crypto_aead *aead = crypto_aead_reqtfm(req);
2900	struct crypto_tfm *tfm = crypto_aead_tfm(aead);
2901	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2902	struct aead_request *subreq = aead_request_ctx(req);
2903	u32 key[CHACHA_KEY_SIZE / sizeof(u32) + 1];
2904	int ret = 0;
2905
2906	/*
2907	 * Instead of wasting time detecting umpteen silly corner cases,
2908	 * just dump all "small" requests to the fallback implementation.
2909	 * HW would not be faster on such small requests anyway.
2910	 */
2911	if (likely((ctx->aead != EIP197_AEAD_TYPE_IPSEC_ESP ||
2912		    req->assoclen >= EIP197_AEAD_IPSEC_IV_SIZE) &&
2913		   req->cryptlen > POLY1305_DIGEST_SIZE)) {
2914		return safexcel_queue_req(&req->base, creq, dir);
2915	}
2916
2917	/* HW cannot do full (AAD+payload) zero length, use fallback */
2918	memcpy(key, ctx->key, CHACHA_KEY_SIZE);
2919	if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
2920		/* ESP variant has nonce appended to the key */
2921		key[CHACHA_KEY_SIZE / sizeof(u32)] = ctx->nonce;
2922		ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2923					 CHACHA_KEY_SIZE +
2924					 EIP197_AEAD_IPSEC_NONCE_SIZE);
2925	} else {
2926		ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2927					 CHACHA_KEY_SIZE);
2928	}
2929	if (ret) {
2930		crypto_aead_clear_flags(aead, CRYPTO_TFM_REQ_MASK);
2931		crypto_aead_set_flags(aead, crypto_aead_get_flags(ctx->fback) &
2932					    CRYPTO_TFM_REQ_MASK);
2933		return ret;
2934	}
2935
2936	aead_request_set_tfm(subreq, ctx->fback);
2937	aead_request_set_callback(subreq, req->base.flags, req->base.complete,
2938				  req->base.data);
2939	aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
2940			       req->iv);
2941	aead_request_set_ad(subreq, req->assoclen);
2942
2943	return (dir ==  SAFEXCEL_ENCRYPT) ?
2944		crypto_aead_encrypt(subreq) :
2945		crypto_aead_decrypt(subreq);
2946}
2947
2948static int safexcel_aead_chachapoly_encrypt(struct aead_request *req)
2949{
2950	return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_ENCRYPT);
2951}
2952
2953static int safexcel_aead_chachapoly_decrypt(struct aead_request *req)
2954{
2955	return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_DECRYPT);
2956}
2957
2958static int safexcel_aead_fallback_cra_init(struct crypto_tfm *tfm)
2959{
2960	struct crypto_aead *aead = __crypto_aead_cast(tfm);
2961	struct aead_alg *alg = crypto_aead_alg(aead);
2962	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2963
2964	safexcel_aead_cra_init(tfm);
2965
2966	/* Allocate fallback implementation */
2967	ctx->fback = crypto_alloc_aead(alg->base.cra_name, 0,
2968				       CRYPTO_ALG_ASYNC |
2969				       CRYPTO_ALG_NEED_FALLBACK);
2970	if (IS_ERR(ctx->fback))
2971		return PTR_ERR(ctx->fback);
2972
2973	crypto_aead_set_reqsize(aead, max(sizeof(struct safexcel_cipher_req),
2974					  sizeof(struct aead_request) +
2975					  crypto_aead_reqsize(ctx->fback)));
2976
2977	return 0;
2978}
2979
2980static int safexcel_aead_chachapoly_cra_init(struct crypto_tfm *tfm)
2981{
2982	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2983
2984	safexcel_aead_fallback_cra_init(tfm);
2985	ctx->alg  = SAFEXCEL_CHACHA20;
2986	ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32 |
2987		    CONTEXT_CONTROL_CHACHA20_MODE_CALC_OTK;
2988	ctx->ctrinit = 0;
2989	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_POLY1305;
2990	ctx->state_sz = 0; /* Precomputed by HW */
2991	return 0;
2992}
2993
2994static void safexcel_aead_fallback_cra_exit(struct crypto_tfm *tfm)
2995{
2996	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2997
2998	crypto_free_aead(ctx->fback);
2999	safexcel_aead_cra_exit(tfm);
3000}
3001
3002struct safexcel_alg_template safexcel_alg_chachapoly = {
3003	.type = SAFEXCEL_ALG_TYPE_AEAD,
3004	.algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
3005	.alg.aead = {
3006		.setkey = safexcel_aead_chachapoly_setkey,
3007		.setauthsize = safexcel_aead_chachapoly_setauthsize,
3008		.encrypt = safexcel_aead_chachapoly_encrypt,
3009		.decrypt = safexcel_aead_chachapoly_decrypt,
3010		.ivsize = CHACHAPOLY_IV_SIZE,
3011		.maxauthsize = POLY1305_DIGEST_SIZE,
3012		.base = {
3013			.cra_name = "rfc7539(chacha20,poly1305)",
3014			.cra_driver_name = "safexcel-chacha20-poly1305",
3015			/* +1 to put it above HW chacha + SW poly */
3016			.cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
3017			.cra_flags = CRYPTO_ALG_ASYNC |
3018				     CRYPTO_ALG_ALLOCATES_MEMORY |
3019				     CRYPTO_ALG_KERN_DRIVER_ONLY |
3020				     CRYPTO_ALG_NEED_FALLBACK,
3021			.cra_blocksize = 1,
3022			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3023			.cra_alignmask = 0,
3024			.cra_init = safexcel_aead_chachapoly_cra_init,
3025			.cra_exit = safexcel_aead_fallback_cra_exit,
3026			.cra_module = THIS_MODULE,
3027		},
3028	},
3029};
3030
3031static int safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm *tfm)
3032{
3033	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3034	int ret;
3035
3036	ret = safexcel_aead_chachapoly_cra_init(tfm);
3037	ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
3038	ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3039	return ret;
3040}
3041
3042struct safexcel_alg_template safexcel_alg_chachapoly_esp = {
3043	.type = SAFEXCEL_ALG_TYPE_AEAD,
3044	.algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
3045	.alg.aead = {
3046		.setkey = safexcel_aead_chachapoly_setkey,
3047		.setauthsize = safexcel_aead_chachapoly_setauthsize,
3048		.encrypt = safexcel_aead_chachapoly_encrypt,
3049		.decrypt = safexcel_aead_chachapoly_decrypt,
3050		.ivsize = CHACHAPOLY_IV_SIZE - EIP197_AEAD_IPSEC_NONCE_SIZE,
3051		.maxauthsize = POLY1305_DIGEST_SIZE,
3052		.base = {
3053			.cra_name = "rfc7539esp(chacha20,poly1305)",
3054			.cra_driver_name = "safexcel-chacha20-poly1305-esp",
3055			/* +1 to put it above HW chacha + SW poly */
3056			.cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
3057			.cra_flags = CRYPTO_ALG_ASYNC |
3058				     CRYPTO_ALG_ALLOCATES_MEMORY |
3059				     CRYPTO_ALG_KERN_DRIVER_ONLY |
3060				     CRYPTO_ALG_NEED_FALLBACK,
3061			.cra_blocksize = 1,
3062			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3063			.cra_alignmask = 0,
3064			.cra_init = safexcel_aead_chachapolyesp_cra_init,
3065			.cra_exit = safexcel_aead_fallback_cra_exit,
3066			.cra_module = THIS_MODULE,
3067		},
3068	},
3069};
3070
3071static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher *ctfm,
3072					const u8 *key, unsigned int len)
3073{
3074	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3075	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3076	struct safexcel_crypto_priv *priv = ctx->base.priv;
3077
3078	if (len != SM4_KEY_SIZE)
3079		return -EINVAL;
3080
3081	if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
3082		if (memcmp(ctx->key, key, SM4_KEY_SIZE))
3083			ctx->base.needs_inv = true;
3084
3085	memcpy(ctx->key, key, SM4_KEY_SIZE);
3086	ctx->key_len = SM4_KEY_SIZE;
3087
3088	return 0;
3089}
3090
3091static int safexcel_sm4_blk_encrypt(struct skcipher_request *req)
3092{
3093	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3094	if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3095		return -EINVAL;
3096	else
3097		return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3098					  SAFEXCEL_ENCRYPT);
3099}
3100
3101static int safexcel_sm4_blk_decrypt(struct skcipher_request *req)
3102{
3103	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3104	if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3105		return -EINVAL;
3106	else
3107		return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3108					  SAFEXCEL_DECRYPT);
3109}
3110
3111static int safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm *tfm)
3112{
3113	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3114
3115	safexcel_skcipher_cra_init(tfm);
3116	ctx->alg  = SAFEXCEL_SM4;
3117	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
3118	ctx->blocksz = 0;
3119	ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
3120	return 0;
3121}
3122
3123struct safexcel_alg_template safexcel_alg_ecb_sm4 = {
3124	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3125	.algo_mask = SAFEXCEL_ALG_SM4,
3126	.alg.skcipher = {
3127		.setkey = safexcel_skcipher_sm4_setkey,
3128		.encrypt = safexcel_sm4_blk_encrypt,
3129		.decrypt = safexcel_sm4_blk_decrypt,
3130		.min_keysize = SM4_KEY_SIZE,
3131		.max_keysize = SM4_KEY_SIZE,
3132		.base = {
3133			.cra_name = "ecb(sm4)",
3134			.cra_driver_name = "safexcel-ecb-sm4",
3135			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3136			.cra_flags = CRYPTO_ALG_ASYNC |
3137				     CRYPTO_ALG_ALLOCATES_MEMORY |
3138				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3139			.cra_blocksize = SM4_BLOCK_SIZE,
3140			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3141			.cra_alignmask = 0,
3142			.cra_init = safexcel_skcipher_sm4_ecb_cra_init,
3143			.cra_exit = safexcel_skcipher_cra_exit,
3144			.cra_module = THIS_MODULE,
3145		},
3146	},
3147};
3148
3149static int safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm *tfm)
3150{
3151	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3152
3153	safexcel_skcipher_cra_init(tfm);
3154	ctx->alg  = SAFEXCEL_SM4;
3155	ctx->blocksz = SM4_BLOCK_SIZE;
3156	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
3157	return 0;
3158}
3159
3160struct safexcel_alg_template safexcel_alg_cbc_sm4 = {
3161	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3162	.algo_mask = SAFEXCEL_ALG_SM4,
3163	.alg.skcipher = {
3164		.setkey = safexcel_skcipher_sm4_setkey,
3165		.encrypt = safexcel_sm4_blk_encrypt,
3166		.decrypt = safexcel_sm4_blk_decrypt,
3167		.min_keysize = SM4_KEY_SIZE,
3168		.max_keysize = SM4_KEY_SIZE,
3169		.ivsize = SM4_BLOCK_SIZE,
3170		.base = {
3171			.cra_name = "cbc(sm4)",
3172			.cra_driver_name = "safexcel-cbc-sm4",
3173			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3174			.cra_flags = CRYPTO_ALG_ASYNC |
3175				     CRYPTO_ALG_ALLOCATES_MEMORY |
3176				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3177			.cra_blocksize = SM4_BLOCK_SIZE,
3178			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3179			.cra_alignmask = 0,
3180			.cra_init = safexcel_skcipher_sm4_cbc_cra_init,
3181			.cra_exit = safexcel_skcipher_cra_exit,
3182			.cra_module = THIS_MODULE,
3183		},
3184	},
3185};
3186
3187static int safexcel_skcipher_sm4_ofb_cra_init(struct crypto_tfm *tfm)
3188{
3189	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3190
3191	safexcel_skcipher_cra_init(tfm);
3192	ctx->alg  = SAFEXCEL_SM4;
3193	ctx->blocksz = SM4_BLOCK_SIZE;
3194	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
3195	return 0;
3196}
3197
3198struct safexcel_alg_template safexcel_alg_ofb_sm4 = {
3199	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3200	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3201	.alg.skcipher = {
3202		.setkey = safexcel_skcipher_sm4_setkey,
3203		.encrypt = safexcel_encrypt,
3204		.decrypt = safexcel_decrypt,
3205		.min_keysize = SM4_KEY_SIZE,
3206		.max_keysize = SM4_KEY_SIZE,
3207		.ivsize = SM4_BLOCK_SIZE,
3208		.base = {
3209			.cra_name = "ofb(sm4)",
3210			.cra_driver_name = "safexcel-ofb-sm4",
3211			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3212			.cra_flags = CRYPTO_ALG_ASYNC |
3213				     CRYPTO_ALG_ALLOCATES_MEMORY |
3214				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3215			.cra_blocksize = 1,
3216			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3217			.cra_alignmask = 0,
3218			.cra_init = safexcel_skcipher_sm4_ofb_cra_init,
3219			.cra_exit = safexcel_skcipher_cra_exit,
3220			.cra_module = THIS_MODULE,
3221		},
3222	},
3223};
3224
3225static int safexcel_skcipher_sm4_cfb_cra_init(struct crypto_tfm *tfm)
3226{
3227	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3228
3229	safexcel_skcipher_cra_init(tfm);
3230	ctx->alg  = SAFEXCEL_SM4;
3231	ctx->blocksz = SM4_BLOCK_SIZE;
3232	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
3233	return 0;
3234}
3235
3236struct safexcel_alg_template safexcel_alg_cfb_sm4 = {
3237	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3238	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3239	.alg.skcipher = {
3240		.setkey = safexcel_skcipher_sm4_setkey,
3241		.encrypt = safexcel_encrypt,
3242		.decrypt = safexcel_decrypt,
3243		.min_keysize = SM4_KEY_SIZE,
3244		.max_keysize = SM4_KEY_SIZE,
3245		.ivsize = SM4_BLOCK_SIZE,
3246		.base = {
3247			.cra_name = "cfb(sm4)",
3248			.cra_driver_name = "safexcel-cfb-sm4",
3249			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3250			.cra_flags = CRYPTO_ALG_ASYNC |
3251				     CRYPTO_ALG_ALLOCATES_MEMORY |
3252				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3253			.cra_blocksize = 1,
3254			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3255			.cra_alignmask = 0,
3256			.cra_init = safexcel_skcipher_sm4_cfb_cra_init,
3257			.cra_exit = safexcel_skcipher_cra_exit,
3258			.cra_module = THIS_MODULE,
3259		},
3260	},
3261};
3262
3263static int safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher *ctfm,
3264					   const u8 *key, unsigned int len)
3265{
3266	struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3267	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3268
3269	/* last 4 bytes of key are the nonce! */
3270	ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3271	/* exclude the nonce here */
3272	len -= CTR_RFC3686_NONCE_SIZE;
3273
3274	return safexcel_skcipher_sm4_setkey(ctfm, key, len);
3275}
3276
3277static int safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm *tfm)
3278{
3279	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3280
3281	safexcel_skcipher_cra_init(tfm);
3282	ctx->alg  = SAFEXCEL_SM4;
3283	ctx->blocksz = SM4_BLOCK_SIZE;
3284	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3285	return 0;
3286}
3287
3288struct safexcel_alg_template safexcel_alg_ctr_sm4 = {
3289	.type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3290	.algo_mask = SAFEXCEL_ALG_SM4,
3291	.alg.skcipher = {
3292		.setkey = safexcel_skcipher_sm4ctr_setkey,
3293		.encrypt = safexcel_encrypt,
3294		.decrypt = safexcel_decrypt,
3295		/* Add nonce size */
3296		.min_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3297		.max_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3298		.ivsize = CTR_RFC3686_IV_SIZE,
3299		.base = {
3300			.cra_name = "rfc3686(ctr(sm4))",
3301			.cra_driver_name = "safexcel-ctr-sm4",
3302			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3303			.cra_flags = CRYPTO_ALG_ASYNC |
3304				     CRYPTO_ALG_ALLOCATES_MEMORY |
3305				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3306			.cra_blocksize = 1,
3307			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3308			.cra_alignmask = 0,
3309			.cra_init = safexcel_skcipher_sm4_ctr_cra_init,
3310			.cra_exit = safexcel_skcipher_cra_exit,
3311			.cra_module = THIS_MODULE,
3312		},
3313	},
3314};
3315
3316static int safexcel_aead_sm4_blk_encrypt(struct aead_request *req)
3317{
3318	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3319	if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3320		return -EINVAL;
3321
3322	return safexcel_queue_req(&req->base, aead_request_ctx(req),
3323				  SAFEXCEL_ENCRYPT);
3324}
3325
3326static int safexcel_aead_sm4_blk_decrypt(struct aead_request *req)
3327{
3328	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3329
3330	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3331	if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3332		return -EINVAL;
3333
3334	return safexcel_queue_req(&req->base, aead_request_ctx(req),
3335				  SAFEXCEL_DECRYPT);
3336}
3337
3338static int safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm *tfm)
3339{
3340	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3341
3342	safexcel_aead_cra_init(tfm);
3343	ctx->alg = SAFEXCEL_SM4;
3344	ctx->blocksz = SM4_BLOCK_SIZE;
3345	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
3346	ctx->state_sz = SHA1_DIGEST_SIZE;
3347	return 0;
3348}
3349
3350struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_sm4 = {
3351	.type = SAFEXCEL_ALG_TYPE_AEAD,
3352	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3353	.alg.aead = {
3354		.setkey = safexcel_aead_setkey,
3355		.encrypt = safexcel_aead_sm4_blk_encrypt,
3356		.decrypt = safexcel_aead_sm4_blk_decrypt,
3357		.ivsize = SM4_BLOCK_SIZE,
3358		.maxauthsize = SHA1_DIGEST_SIZE,
3359		.base = {
3360			.cra_name = "authenc(hmac(sha1),cbc(sm4))",
3361			.cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-sm4",
3362			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3363			.cra_flags = CRYPTO_ALG_ASYNC |
3364				     CRYPTO_ALG_ALLOCATES_MEMORY |
3365				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3366			.cra_blocksize = SM4_BLOCK_SIZE,
3367			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3368			.cra_alignmask = 0,
3369			.cra_init = safexcel_aead_sm4cbc_sha1_cra_init,
3370			.cra_exit = safexcel_aead_cra_exit,
3371			.cra_module = THIS_MODULE,
3372		},
3373	},
3374};
3375
3376static int safexcel_aead_fallback_setkey(struct crypto_aead *ctfm,
3377					 const u8 *key, unsigned int len)
3378{
3379	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3380	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3381
3382	/* Keep fallback cipher synchronized */
3383	return crypto_aead_setkey(ctx->fback, (u8 *)key, len) ?:
3384	       safexcel_aead_setkey(ctfm, key, len);
3385}
3386
3387static int safexcel_aead_fallback_setauthsize(struct crypto_aead *ctfm,
3388					      unsigned int authsize)
3389{
3390	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3391	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3392
3393	/* Keep fallback cipher synchronized */
3394	return crypto_aead_setauthsize(ctx->fback, authsize);
3395}
3396
3397static int safexcel_aead_fallback_crypt(struct aead_request *req,
3398					enum safexcel_cipher_direction dir)
3399{
3400	struct crypto_aead *aead = crypto_aead_reqtfm(req);
3401	struct crypto_tfm *tfm = crypto_aead_tfm(aead);
3402	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3403	struct aead_request *subreq = aead_request_ctx(req);
3404
3405	aead_request_set_tfm(subreq, ctx->fback);
3406	aead_request_set_callback(subreq, req->base.flags, req->base.complete,
3407				  req->base.data);
3408	aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
3409			       req->iv);
3410	aead_request_set_ad(subreq, req->assoclen);
3411
3412	return (dir ==  SAFEXCEL_ENCRYPT) ?
3413		crypto_aead_encrypt(subreq) :
3414		crypto_aead_decrypt(subreq);
3415}
3416
3417static int safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request *req)
3418{
3419	struct safexcel_cipher_req *creq = aead_request_ctx(req);
3420
3421	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3422	if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3423		return -EINVAL;
3424	else if (req->cryptlen || req->assoclen) /* If input length > 0 only */
3425		return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3426
3427	/* HW cannot do full (AAD+payload) zero length, use fallback */
3428	return safexcel_aead_fallback_crypt(req, SAFEXCEL_ENCRYPT);
3429}
3430
3431static int safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request *req)
3432{
3433	struct safexcel_cipher_req *creq = aead_request_ctx(req);
3434	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3435
3436	/* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3437	if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3438		return -EINVAL;
3439	else if (req->cryptlen > crypto_aead_authsize(tfm) || req->assoclen)
3440		/* If input length > 0 only */
3441		return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3442
3443	/* HW cannot do full (AAD+payload) zero length, use fallback */
3444	return safexcel_aead_fallback_crypt(req, SAFEXCEL_DECRYPT);
3445}
3446
3447static int safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm *tfm)
3448{
3449	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3450
3451	safexcel_aead_fallback_cra_init(tfm);
3452	ctx->alg = SAFEXCEL_SM4;
3453	ctx->blocksz = SM4_BLOCK_SIZE;
3454	ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
3455	ctx->state_sz = SM3_DIGEST_SIZE;
3456	return 0;
3457}
3458
3459struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4 = {
3460	.type = SAFEXCEL_ALG_TYPE_AEAD,
3461	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3462	.alg.aead = {
3463		.setkey = safexcel_aead_fallback_setkey,
3464		.setauthsize = safexcel_aead_fallback_setauthsize,
3465		.encrypt = safexcel_aead_sm4cbc_sm3_encrypt,
3466		.decrypt = safexcel_aead_sm4cbc_sm3_decrypt,
3467		.ivsize = SM4_BLOCK_SIZE,
3468		.maxauthsize = SM3_DIGEST_SIZE,
3469		.base = {
3470			.cra_name = "authenc(hmac(sm3),cbc(sm4))",
3471			.cra_driver_name = "safexcel-authenc-hmac-sm3-cbc-sm4",
3472			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3473			.cra_flags = CRYPTO_ALG_ASYNC |
3474				     CRYPTO_ALG_ALLOCATES_MEMORY |
3475				     CRYPTO_ALG_KERN_DRIVER_ONLY |
3476				     CRYPTO_ALG_NEED_FALLBACK,
3477			.cra_blocksize = SM4_BLOCK_SIZE,
3478			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3479			.cra_alignmask = 0,
3480			.cra_init = safexcel_aead_sm4cbc_sm3_cra_init,
3481			.cra_exit = safexcel_aead_fallback_cra_exit,
3482			.cra_module = THIS_MODULE,
3483		},
3484	},
3485};
3486
3487static int safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm *tfm)
3488{
3489	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3490
3491	safexcel_aead_sm4cbc_sha1_cra_init(tfm);
3492	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3493	return 0;
3494}
3495
3496struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4 = {
3497	.type = SAFEXCEL_ALG_TYPE_AEAD,
3498	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3499	.alg.aead = {
3500		.setkey = safexcel_aead_setkey,
3501		.encrypt = safexcel_aead_encrypt,
3502		.decrypt = safexcel_aead_decrypt,
3503		.ivsize = CTR_RFC3686_IV_SIZE,
3504		.maxauthsize = SHA1_DIGEST_SIZE,
3505		.base = {
3506			.cra_name = "authenc(hmac(sha1),rfc3686(ctr(sm4)))",
3507			.cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-sm4",
3508			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3509			.cra_flags = CRYPTO_ALG_ASYNC |
3510				     CRYPTO_ALG_ALLOCATES_MEMORY |
3511				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3512			.cra_blocksize = 1,
3513			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3514			.cra_alignmask = 0,
3515			.cra_init = safexcel_aead_sm4ctr_sha1_cra_init,
3516			.cra_exit = safexcel_aead_cra_exit,
3517			.cra_module = THIS_MODULE,
3518		},
3519	},
3520};
3521
3522static int safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm *tfm)
3523{
3524	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3525
3526	safexcel_aead_sm4cbc_sm3_cra_init(tfm);
3527	ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3528	return 0;
3529}
3530
3531struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4 = {
3532	.type = SAFEXCEL_ALG_TYPE_AEAD,
3533	.algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3534	.alg.aead = {
3535		.setkey = safexcel_aead_setkey,
3536		.encrypt = safexcel_aead_encrypt,
3537		.decrypt = safexcel_aead_decrypt,
3538		.ivsize = CTR_RFC3686_IV_SIZE,
3539		.maxauthsize = SM3_DIGEST_SIZE,
3540		.base = {
3541			.cra_name = "authenc(hmac(sm3),rfc3686(ctr(sm4)))",
3542			.cra_driver_name = "safexcel-authenc-hmac-sm3-ctr-sm4",
3543			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3544			.cra_flags = CRYPTO_ALG_ASYNC |
3545				     CRYPTO_ALG_ALLOCATES_MEMORY |
3546				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3547			.cra_blocksize = 1,
3548			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3549			.cra_alignmask = 0,
3550			.cra_init = safexcel_aead_sm4ctr_sm3_cra_init,
3551			.cra_exit = safexcel_aead_cra_exit,
3552			.cra_module = THIS_MODULE,
3553		},
3554	},
3555};
3556
3557static int safexcel_rfc4106_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
3558				       unsigned int len)
3559{
3560	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3561	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3562
3563	/* last 4 bytes of key are the nonce! */
3564	ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3565
3566	len -= CTR_RFC3686_NONCE_SIZE;
3567	return safexcel_aead_gcm_setkey(ctfm, key, len);
3568}
3569
3570static int safexcel_rfc4106_gcm_setauthsize(struct crypto_aead *tfm,
3571					    unsigned int authsize)
3572{
3573	return crypto_rfc4106_check_authsize(authsize);
3574}
3575
3576static int safexcel_rfc4106_encrypt(struct aead_request *req)
3577{
3578	return crypto_ipsec_check_assoclen(req->assoclen) ?:
3579	       safexcel_aead_encrypt(req);
3580}
3581
3582static int safexcel_rfc4106_decrypt(struct aead_request *req)
3583{
3584	return crypto_ipsec_check_assoclen(req->assoclen) ?:
3585	       safexcel_aead_decrypt(req);
3586}
3587
3588static int safexcel_rfc4106_gcm_cra_init(struct crypto_tfm *tfm)
3589{
3590	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3591	int ret;
3592
3593	ret = safexcel_aead_gcm_cra_init(tfm);
3594	ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
3595	ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3596	return ret;
3597}
3598
3599struct safexcel_alg_template safexcel_alg_rfc4106_gcm = {
3600	.type = SAFEXCEL_ALG_TYPE_AEAD,
3601	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3602	.alg.aead = {
3603		.setkey = safexcel_rfc4106_gcm_setkey,
3604		.setauthsize = safexcel_rfc4106_gcm_setauthsize,
3605		.encrypt = safexcel_rfc4106_encrypt,
3606		.decrypt = safexcel_rfc4106_decrypt,
3607		.ivsize = GCM_RFC4106_IV_SIZE,
3608		.maxauthsize = GHASH_DIGEST_SIZE,
3609		.base = {
3610			.cra_name = "rfc4106(gcm(aes))",
3611			.cra_driver_name = "safexcel-rfc4106-gcm-aes",
3612			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3613			.cra_flags = CRYPTO_ALG_ASYNC |
3614				     CRYPTO_ALG_ALLOCATES_MEMORY |
3615				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3616			.cra_blocksize = 1,
3617			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3618			.cra_alignmask = 0,
3619			.cra_init = safexcel_rfc4106_gcm_cra_init,
3620			.cra_exit = safexcel_aead_gcm_cra_exit,
3621		},
3622	},
3623};
3624
3625static int safexcel_rfc4543_gcm_setauthsize(struct crypto_aead *tfm,
3626					    unsigned int authsize)
3627{
3628	if (authsize != GHASH_DIGEST_SIZE)
3629		return -EINVAL;
3630
3631	return 0;
3632}
3633
3634static int safexcel_rfc4543_gcm_cra_init(struct crypto_tfm *tfm)
3635{
3636	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3637	int ret;
3638
3639	ret = safexcel_aead_gcm_cra_init(tfm);
3640	ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP_GMAC;
3641	return ret;
3642}
3643
3644struct safexcel_alg_template safexcel_alg_rfc4543_gcm = {
3645	.type = SAFEXCEL_ALG_TYPE_AEAD,
3646	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3647	.alg.aead = {
3648		.setkey = safexcel_rfc4106_gcm_setkey,
3649		.setauthsize = safexcel_rfc4543_gcm_setauthsize,
3650		.encrypt = safexcel_rfc4106_encrypt,
3651		.decrypt = safexcel_rfc4106_decrypt,
3652		.ivsize = GCM_RFC4543_IV_SIZE,
3653		.maxauthsize = GHASH_DIGEST_SIZE,
3654		.base = {
3655			.cra_name = "rfc4543(gcm(aes))",
3656			.cra_driver_name = "safexcel-rfc4543-gcm-aes",
3657			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3658			.cra_flags = CRYPTO_ALG_ASYNC |
3659				     CRYPTO_ALG_ALLOCATES_MEMORY |
3660				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3661			.cra_blocksize = 1,
3662			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3663			.cra_alignmask = 0,
3664			.cra_init = safexcel_rfc4543_gcm_cra_init,
3665			.cra_exit = safexcel_aead_gcm_cra_exit,
3666		},
3667	},
3668};
3669
3670static int safexcel_rfc4309_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
3671				       unsigned int len)
3672{
3673	struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3674	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3675
3676	/* First byte of the nonce = L = always 3 for RFC4309 (4 byte ctr) */
3677	*(u8 *)&ctx->nonce = EIP197_AEAD_IPSEC_COUNTER_SIZE - 1;
3678	/* last 3 bytes of key are the nonce! */
3679	memcpy((u8 *)&ctx->nonce + 1, key + len -
3680	       EIP197_AEAD_IPSEC_CCM_NONCE_SIZE,
3681	       EIP197_AEAD_IPSEC_CCM_NONCE_SIZE);
3682
3683	len -= EIP197_AEAD_IPSEC_CCM_NONCE_SIZE;
3684	return safexcel_aead_ccm_setkey(ctfm, key, len);
3685}
3686
3687static int safexcel_rfc4309_ccm_setauthsize(struct crypto_aead *tfm,
3688					    unsigned int authsize)
3689{
3690	/* Borrowed from crypto/ccm.c */
3691	switch (authsize) {
3692	case 8:
3693	case 12:
3694	case 16:
3695		break;
3696	default:
3697		return -EINVAL;
3698	}
3699
3700	return 0;
3701}
3702
3703static int safexcel_rfc4309_ccm_encrypt(struct aead_request *req)
3704{
3705	struct safexcel_cipher_req *creq = aead_request_ctx(req);
3706
3707	/* Borrowed from crypto/ccm.c */
3708	if (req->assoclen != 16 && req->assoclen != 20)
3709		return -EINVAL;
3710
3711	return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3712}
3713
3714static int safexcel_rfc4309_ccm_decrypt(struct aead_request *req)
3715{
3716	struct safexcel_cipher_req *creq = aead_request_ctx(req);
3717
3718	/* Borrowed from crypto/ccm.c */
3719	if (req->assoclen != 16 && req->assoclen != 20)
3720		return -EINVAL;
3721
3722	return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3723}
3724
3725static int safexcel_rfc4309_ccm_cra_init(struct crypto_tfm *tfm)
3726{
3727	struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3728	int ret;
3729
3730	ret = safexcel_aead_ccm_cra_init(tfm);
3731	ctx->aead  = EIP197_AEAD_TYPE_IPSEC_ESP;
3732	ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3733	return ret;
3734}
3735
3736struct safexcel_alg_template safexcel_alg_rfc4309_ccm = {
3737	.type = SAFEXCEL_ALG_TYPE_AEAD,
3738	.algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
3739	.alg.aead = {
3740		.setkey = safexcel_rfc4309_ccm_setkey,
3741		.setauthsize = safexcel_rfc4309_ccm_setauthsize,
3742		.encrypt = safexcel_rfc4309_ccm_encrypt,
3743		.decrypt = safexcel_rfc4309_ccm_decrypt,
3744		.ivsize = EIP197_AEAD_IPSEC_IV_SIZE,
3745		.maxauthsize = AES_BLOCK_SIZE,
3746		.base = {
3747			.cra_name = "rfc4309(ccm(aes))",
3748			.cra_driver_name = "safexcel-rfc4309-ccm-aes",
3749			.cra_priority = SAFEXCEL_CRA_PRIORITY,
3750			.cra_flags = CRYPTO_ALG_ASYNC |
3751				     CRYPTO_ALG_ALLOCATES_MEMORY |
3752				     CRYPTO_ALG_KERN_DRIVER_ONLY,
3753			.cra_blocksize = 1,
3754			.cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3755			.cra_alignmask = 0,
3756			.cra_init = safexcel_rfc4309_ccm_cra_init,
3757			.cra_exit = safexcel_aead_cra_exit,
3758			.cra_module = THIS_MODULE,
3759		},
3760	},
3761};