Loading...
1// SPDX-License-Identifier: GPL-2.0
2/*
3 * Copyright (C) 2017 Marvell
4 *
5 * Antoine Tenart <antoine.tenart@free-electrons.com>
6 */
7
8#include <asm/unaligned.h>
9#include <linux/device.h>
10#include <linux/dma-mapping.h>
11#include <linux/dmapool.h>
12#include <crypto/aead.h>
13#include <crypto/aes.h>
14#include <crypto/authenc.h>
15#include <crypto/chacha.h>
16#include <crypto/ctr.h>
17#include <crypto/internal/des.h>
18#include <crypto/gcm.h>
19#include <crypto/ghash.h>
20#include <crypto/poly1305.h>
21#include <crypto/sha1.h>
22#include <crypto/sha2.h>
23#include <crypto/sm3.h>
24#include <crypto/sm4.h>
25#include <crypto/xts.h>
26#include <crypto/skcipher.h>
27#include <crypto/internal/aead.h>
28#include <crypto/internal/skcipher.h>
29
30#include "safexcel.h"
31
32enum safexcel_cipher_direction {
33 SAFEXCEL_ENCRYPT,
34 SAFEXCEL_DECRYPT,
35};
36
37enum safexcel_cipher_alg {
38 SAFEXCEL_DES,
39 SAFEXCEL_3DES,
40 SAFEXCEL_AES,
41 SAFEXCEL_CHACHA20,
42 SAFEXCEL_SM4,
43};
44
45struct safexcel_cipher_ctx {
46 struct safexcel_context base;
47 struct safexcel_crypto_priv *priv;
48
49 u32 mode;
50 enum safexcel_cipher_alg alg;
51 u8 aead; /* !=0=AEAD, 2=IPSec ESP AEAD, 3=IPsec ESP GMAC */
52 u8 xcm; /* 0=authenc, 1=GCM, 2 reserved for CCM */
53 u8 aadskip;
54 u8 blocksz;
55 u32 ivmask;
56 u32 ctrinit;
57
58 __le32 key[16];
59 u32 nonce;
60 unsigned int key_len, xts;
61
62 /* All the below is AEAD specific */
63 u32 hash_alg;
64 u32 state_sz;
65
66 struct crypto_aead *fback;
67};
68
69struct safexcel_cipher_req {
70 enum safexcel_cipher_direction direction;
71 /* Number of result descriptors associated to the request */
72 unsigned int rdescs;
73 bool needs_inv;
74 int nr_src, nr_dst;
75};
76
77static int safexcel_skcipher_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
78 struct safexcel_command_desc *cdesc)
79{
80 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
81 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
82 /* 32 bit nonce */
83 cdesc->control_data.token[0] = ctx->nonce;
84 /* 64 bit IV part */
85 memcpy(&cdesc->control_data.token[1], iv, 8);
86 /* 32 bit counter, start at 0 or 1 (big endian!) */
87 cdesc->control_data.token[3] =
88 (__force u32)cpu_to_be32(ctx->ctrinit);
89 return 4;
90 }
91 if (ctx->alg == SAFEXCEL_CHACHA20) {
92 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
93 /* 96 bit nonce part */
94 memcpy(&cdesc->control_data.token[0], &iv[4], 12);
95 /* 32 bit counter */
96 cdesc->control_data.token[3] = *(u32 *)iv;
97 return 4;
98 }
99
100 cdesc->control_data.options |= ctx->ivmask;
101 memcpy(cdesc->control_data.token, iv, ctx->blocksz);
102 return ctx->blocksz / sizeof(u32);
103}
104
105static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
106 struct safexcel_command_desc *cdesc,
107 struct safexcel_token *atoken,
108 u32 length)
109{
110 struct safexcel_token *token;
111 int ivlen;
112
113 ivlen = safexcel_skcipher_iv(ctx, iv, cdesc);
114 if (ivlen == 4) {
115 /* No space in cdesc, instruction moves to atoken */
116 cdesc->additional_cdata_size = 1;
117 token = atoken;
118 } else {
119 /* Everything fits in cdesc */
120 token = (struct safexcel_token *)(cdesc->control_data.token + 2);
121 /* Need to pad with NOP */
122 eip197_noop_token(&token[1]);
123 }
124
125 token->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
126 token->packet_length = length;
127 token->stat = EIP197_TOKEN_STAT_LAST_PACKET |
128 EIP197_TOKEN_STAT_LAST_HASH;
129 token->instructions = EIP197_TOKEN_INS_LAST |
130 EIP197_TOKEN_INS_TYPE_CRYPTO |
131 EIP197_TOKEN_INS_TYPE_OUTPUT;
132}
133
134static void safexcel_aead_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
135 struct safexcel_command_desc *cdesc)
136{
137 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD ||
138 ctx->aead & EIP197_AEAD_TYPE_IPSEC_ESP) { /* _ESP and _ESP_GMAC */
139 /* 32 bit nonce */
140 cdesc->control_data.token[0] = ctx->nonce;
141 /* 64 bit IV part */
142 memcpy(&cdesc->control_data.token[1], iv, 8);
143 /* 32 bit counter, start at 0 or 1 (big endian!) */
144 cdesc->control_data.token[3] =
145 (__force u32)cpu_to_be32(ctx->ctrinit);
146 return;
147 }
148 if (ctx->xcm == EIP197_XCM_MODE_GCM || ctx->alg == SAFEXCEL_CHACHA20) {
149 /* 96 bit IV part */
150 memcpy(&cdesc->control_data.token[0], iv, 12);
151 /* 32 bit counter, start at 0 or 1 (big endian!) */
152 cdesc->control_data.token[3] =
153 (__force u32)cpu_to_be32(ctx->ctrinit);
154 return;
155 }
156 /* CBC */
157 memcpy(cdesc->control_data.token, iv, ctx->blocksz);
158}
159
160static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
161 struct safexcel_command_desc *cdesc,
162 struct safexcel_token *atoken,
163 enum safexcel_cipher_direction direction,
164 u32 cryptlen, u32 assoclen, u32 digestsize)
165{
166 struct safexcel_token *aadref;
167 int atoksize = 2; /* Start with minimum size */
168 int assocadj = assoclen - ctx->aadskip, aadalign;
169
170 /* Always 4 dwords of embedded IV for AEAD modes */
171 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
172
173 if (direction == SAFEXCEL_DECRYPT)
174 cryptlen -= digestsize;
175
176 if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM)) {
177 /* Construct IV block B0 for the CBC-MAC */
178 u8 *final_iv = (u8 *)cdesc->control_data.token;
179 u8 *cbcmaciv = (u8 *)&atoken[1];
180 __le32 *aadlen = (__le32 *)&atoken[5];
181
182 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
183 /* Length + nonce */
184 cdesc->control_data.token[0] = ctx->nonce;
185 /* Fixup flags byte */
186 *(__le32 *)cbcmaciv =
187 cpu_to_le32(ctx->nonce |
188 ((assocadj > 0) << 6) |
189 ((digestsize - 2) << 2));
190 /* 64 bit IV part */
191 memcpy(&cdesc->control_data.token[1], iv, 8);
192 memcpy(cbcmaciv + 4, iv, 8);
193 /* Start counter at 0 */
194 cdesc->control_data.token[3] = 0;
195 /* Message length */
196 *(__be32 *)(cbcmaciv + 12) = cpu_to_be32(cryptlen);
197 } else {
198 /* Variable length IV part */
199 memcpy(final_iv, iv, 15 - iv[0]);
200 memcpy(cbcmaciv, iv, 15 - iv[0]);
201 /* Start variable length counter at 0 */
202 memset(final_iv + 15 - iv[0], 0, iv[0] + 1);
203 memset(cbcmaciv + 15 - iv[0], 0, iv[0] - 1);
204 /* fixup flags byte */
205 cbcmaciv[0] |= ((assocadj > 0) << 6) |
206 ((digestsize - 2) << 2);
207 /* insert lower 2 bytes of message length */
208 cbcmaciv[14] = cryptlen >> 8;
209 cbcmaciv[15] = cryptlen & 255;
210 }
211
212 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
213 atoken->packet_length = AES_BLOCK_SIZE +
214 ((assocadj > 0) << 1);
215 atoken->stat = 0;
216 atoken->instructions = EIP197_TOKEN_INS_ORIGIN_TOKEN |
217 EIP197_TOKEN_INS_TYPE_HASH;
218
219 if (likely(assocadj)) {
220 *aadlen = cpu_to_le32((assocadj >> 8) |
221 (assocadj & 255) << 8);
222 atoken += 6;
223 atoksize += 7;
224 } else {
225 atoken += 5;
226 atoksize += 6;
227 }
228
229 /* Process AAD data */
230 aadref = atoken;
231 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
232 atoken->packet_length = assocadj;
233 atoken->stat = 0;
234 atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
235 atoken++;
236
237 /* For CCM only, align AAD data towards hash engine */
238 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
239 aadalign = (assocadj + 2) & 15;
240 atoken->packet_length = assocadj && aadalign ?
241 16 - aadalign :
242 0;
243 if (likely(cryptlen)) {
244 atoken->stat = 0;
245 atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
246 } else {
247 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
248 atoken->instructions = EIP197_TOKEN_INS_LAST |
249 EIP197_TOKEN_INS_TYPE_HASH;
250 }
251 } else {
252 safexcel_aead_iv(ctx, iv, cdesc);
253
254 /* Process AAD data */
255 aadref = atoken;
256 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
257 atoken->packet_length = assocadj;
258 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
259 atoken->instructions = EIP197_TOKEN_INS_LAST |
260 EIP197_TOKEN_INS_TYPE_HASH;
261 }
262 atoken++;
263
264 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
265 /* For ESP mode (and not GMAC), skip over the IV */
266 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
267 atoken->packet_length = EIP197_AEAD_IPSEC_IV_SIZE;
268 atoken->stat = 0;
269 atoken->instructions = 0;
270 atoken++;
271 atoksize++;
272 } else if (unlikely(ctx->alg == SAFEXCEL_CHACHA20 &&
273 direction == SAFEXCEL_DECRYPT)) {
274 /* Poly-chacha decryption needs a dummy NOP here ... */
275 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
276 atoken->packet_length = 16; /* According to Op Manual */
277 atoken->stat = 0;
278 atoken->instructions = 0;
279 atoken++;
280 atoksize++;
281 }
282
283 if (ctx->xcm) {
284 /* For GCM and CCM, obtain enc(Y0) */
285 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT_REMRES;
286 atoken->packet_length = 0;
287 atoken->stat = 0;
288 atoken->instructions = AES_BLOCK_SIZE;
289 atoken++;
290
291 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
292 atoken->packet_length = AES_BLOCK_SIZE;
293 atoken->stat = 0;
294 atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
295 EIP197_TOKEN_INS_TYPE_CRYPTO;
296 atoken++;
297 atoksize += 2;
298 }
299
300 if (likely(cryptlen || ctx->alg == SAFEXCEL_CHACHA20)) {
301 /* Fixup stat field for AAD direction instruction */
302 aadref->stat = 0;
303
304 /* Process crypto data */
305 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
306 atoken->packet_length = cryptlen;
307
308 if (unlikely(ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC)) {
309 /* Fixup instruction field for AAD dir instruction */
310 aadref->instructions = EIP197_TOKEN_INS_TYPE_HASH;
311
312 /* Do not send to crypt engine in case of GMAC */
313 atoken->instructions = EIP197_TOKEN_INS_LAST |
314 EIP197_TOKEN_INS_TYPE_HASH |
315 EIP197_TOKEN_INS_TYPE_OUTPUT;
316 } else {
317 atoken->instructions = EIP197_TOKEN_INS_LAST |
318 EIP197_TOKEN_INS_TYPE_CRYPTO |
319 EIP197_TOKEN_INS_TYPE_HASH |
320 EIP197_TOKEN_INS_TYPE_OUTPUT;
321 }
322
323 cryptlen &= 15;
324 if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM && cryptlen)) {
325 atoken->stat = 0;
326 /* For CCM only, pad crypto data to the hash engine */
327 atoken++;
328 atoksize++;
329 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
330 atoken->packet_length = 16 - cryptlen;
331 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
332 atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
333 } else {
334 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
335 }
336 atoken++;
337 atoksize++;
338 }
339
340 if (direction == SAFEXCEL_ENCRYPT) {
341 /* Append ICV */
342 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
343 atoken->packet_length = digestsize;
344 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
345 EIP197_TOKEN_STAT_LAST_PACKET;
346 atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
347 EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
348 } else {
349 /* Extract ICV */
350 atoken->opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
351 atoken->packet_length = digestsize;
352 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
353 EIP197_TOKEN_STAT_LAST_PACKET;
354 atoken->instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
355 atoken++;
356 atoksize++;
357
358 /* Verify ICV */
359 atoken->opcode = EIP197_TOKEN_OPCODE_VERIFY;
360 atoken->packet_length = digestsize |
361 EIP197_TOKEN_HASH_RESULT_VERIFY;
362 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
363 EIP197_TOKEN_STAT_LAST_PACKET;
364 atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
365 }
366
367 /* Fixup length of the token in the command descriptor */
368 cdesc->additional_cdata_size = atoksize;
369}
370
371static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
372 const u8 *key, unsigned int len)
373{
374 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
375 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
376 struct safexcel_crypto_priv *priv = ctx->base.priv;
377 struct crypto_aes_ctx aes;
378 int ret, i;
379
380 ret = aes_expandkey(&aes, key, len);
381 if (ret)
382 return ret;
383
384 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
385 for (i = 0; i < len / sizeof(u32); i++) {
386 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
387 ctx->base.needs_inv = true;
388 break;
389 }
390 }
391 }
392
393 for (i = 0; i < len / sizeof(u32); i++)
394 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
395
396 ctx->key_len = len;
397
398 memzero_explicit(&aes, sizeof(aes));
399 return 0;
400}
401
402static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
403 unsigned int len)
404{
405 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
406 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
407 struct safexcel_crypto_priv *priv = ctx->base.priv;
408 struct crypto_authenc_keys keys;
409 struct crypto_aes_ctx aes;
410 int err = -EINVAL, i;
411 const char *alg;
412
413 if (unlikely(crypto_authenc_extractkeys(&keys, key, len)))
414 goto badkey;
415
416 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
417 /* Must have at least space for the nonce here */
418 if (unlikely(keys.enckeylen < CTR_RFC3686_NONCE_SIZE))
419 goto badkey;
420 /* last 4 bytes of key are the nonce! */
421 ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen -
422 CTR_RFC3686_NONCE_SIZE);
423 /* exclude the nonce here */
424 keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
425 }
426
427 /* Encryption key */
428 switch (ctx->alg) {
429 case SAFEXCEL_DES:
430 err = verify_aead_des_key(ctfm, keys.enckey, keys.enckeylen);
431 if (unlikely(err))
432 goto badkey;
433 break;
434 case SAFEXCEL_3DES:
435 err = verify_aead_des3_key(ctfm, keys.enckey, keys.enckeylen);
436 if (unlikely(err))
437 goto badkey;
438 break;
439 case SAFEXCEL_AES:
440 err = aes_expandkey(&aes, keys.enckey, keys.enckeylen);
441 if (unlikely(err))
442 goto badkey;
443 break;
444 case SAFEXCEL_SM4:
445 if (unlikely(keys.enckeylen != SM4_KEY_SIZE))
446 goto badkey;
447 break;
448 default:
449 dev_err(priv->dev, "aead: unsupported cipher algorithm\n");
450 goto badkey;
451 }
452
453 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
454 for (i = 0; i < keys.enckeylen / sizeof(u32); i++) {
455 if (le32_to_cpu(ctx->key[i]) !=
456 ((u32 *)keys.enckey)[i]) {
457 ctx->base.needs_inv = true;
458 break;
459 }
460 }
461 }
462
463 /* Auth key */
464 switch (ctx->hash_alg) {
465 case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
466 alg = "safexcel-sha1";
467 break;
468 case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
469 alg = "safexcel-sha224";
470 break;
471 case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
472 alg = "safexcel-sha256";
473 break;
474 case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
475 alg = "safexcel-sha384";
476 break;
477 case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
478 alg = "safexcel-sha512";
479 break;
480 case CONTEXT_CONTROL_CRYPTO_ALG_SM3:
481 alg = "safexcel-sm3";
482 break;
483 default:
484 dev_err(priv->dev, "aead: unsupported hash algorithm\n");
485 goto badkey;
486 }
487
488 if (safexcel_hmac_setkey(&ctx->base, keys.authkey, keys.authkeylen,
489 alg, ctx->state_sz))
490 goto badkey;
491
492 /* Now copy the keys into the context */
493 for (i = 0; i < keys.enckeylen / sizeof(u32); i++)
494 ctx->key[i] = cpu_to_le32(((u32 *)keys.enckey)[i]);
495 ctx->key_len = keys.enckeylen;
496
497 memzero_explicit(&keys, sizeof(keys));
498 return 0;
499
500badkey:
501 memzero_explicit(&keys, sizeof(keys));
502 return err;
503}
504
505static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
506 struct crypto_async_request *async,
507 struct safexcel_cipher_req *sreq,
508 struct safexcel_command_desc *cdesc)
509{
510 struct safexcel_crypto_priv *priv = ctx->base.priv;
511 int ctrl_size = ctx->key_len / sizeof(u32);
512
513 cdesc->control_data.control1 = ctx->mode;
514
515 if (ctx->aead) {
516 /* Take in account the ipad+opad digests */
517 if (ctx->xcm) {
518 ctrl_size += ctx->state_sz / sizeof(u32);
519 cdesc->control_data.control0 =
520 CONTEXT_CONTROL_KEY_EN |
521 CONTEXT_CONTROL_DIGEST_XCM |
522 ctx->hash_alg |
523 CONTEXT_CONTROL_SIZE(ctrl_size);
524 } else if (ctx->alg == SAFEXCEL_CHACHA20) {
525 /* Chacha20-Poly1305 */
526 cdesc->control_data.control0 =
527 CONTEXT_CONTROL_KEY_EN |
528 CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20 |
529 (sreq->direction == SAFEXCEL_ENCRYPT ?
530 CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT :
531 CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN) |
532 ctx->hash_alg |
533 CONTEXT_CONTROL_SIZE(ctrl_size);
534 return 0;
535 } else {
536 ctrl_size += ctx->state_sz / sizeof(u32) * 2;
537 cdesc->control_data.control0 =
538 CONTEXT_CONTROL_KEY_EN |
539 CONTEXT_CONTROL_DIGEST_HMAC |
540 ctx->hash_alg |
541 CONTEXT_CONTROL_SIZE(ctrl_size);
542 }
543
544 if (sreq->direction == SAFEXCEL_ENCRYPT &&
545 (ctx->xcm == EIP197_XCM_MODE_CCM ||
546 ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC))
547 cdesc->control_data.control0 |=
548 CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT;
549 else if (sreq->direction == SAFEXCEL_ENCRYPT)
550 cdesc->control_data.control0 |=
551 CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
552 else if (ctx->xcm == EIP197_XCM_MODE_CCM)
553 cdesc->control_data.control0 |=
554 CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN;
555 else
556 cdesc->control_data.control0 |=
557 CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
558 } else {
559 if (sreq->direction == SAFEXCEL_ENCRYPT)
560 cdesc->control_data.control0 =
561 CONTEXT_CONTROL_TYPE_CRYPTO_OUT |
562 CONTEXT_CONTROL_KEY_EN |
563 CONTEXT_CONTROL_SIZE(ctrl_size);
564 else
565 cdesc->control_data.control0 =
566 CONTEXT_CONTROL_TYPE_CRYPTO_IN |
567 CONTEXT_CONTROL_KEY_EN |
568 CONTEXT_CONTROL_SIZE(ctrl_size);
569 }
570
571 if (ctx->alg == SAFEXCEL_DES) {
572 cdesc->control_data.control0 |=
573 CONTEXT_CONTROL_CRYPTO_ALG_DES;
574 } else if (ctx->alg == SAFEXCEL_3DES) {
575 cdesc->control_data.control0 |=
576 CONTEXT_CONTROL_CRYPTO_ALG_3DES;
577 } else if (ctx->alg == SAFEXCEL_AES) {
578 switch (ctx->key_len >> ctx->xts) {
579 case AES_KEYSIZE_128:
580 cdesc->control_data.control0 |=
581 CONTEXT_CONTROL_CRYPTO_ALG_AES128;
582 break;
583 case AES_KEYSIZE_192:
584 cdesc->control_data.control0 |=
585 CONTEXT_CONTROL_CRYPTO_ALG_AES192;
586 break;
587 case AES_KEYSIZE_256:
588 cdesc->control_data.control0 |=
589 CONTEXT_CONTROL_CRYPTO_ALG_AES256;
590 break;
591 default:
592 dev_err(priv->dev, "aes keysize not supported: %u\n",
593 ctx->key_len >> ctx->xts);
594 return -EINVAL;
595 }
596 } else if (ctx->alg == SAFEXCEL_CHACHA20) {
597 cdesc->control_data.control0 |=
598 CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20;
599 } else if (ctx->alg == SAFEXCEL_SM4) {
600 cdesc->control_data.control0 |=
601 CONTEXT_CONTROL_CRYPTO_ALG_SM4;
602 }
603
604 return 0;
605}
606
607static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
608 struct crypto_async_request *async,
609 struct scatterlist *src,
610 struct scatterlist *dst,
611 unsigned int cryptlen,
612 struct safexcel_cipher_req *sreq,
613 bool *should_complete, int *ret)
614{
615 struct skcipher_request *areq = skcipher_request_cast(async);
616 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
617 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(skcipher);
618 struct safexcel_result_desc *rdesc;
619 int ndesc = 0;
620
621 *ret = 0;
622
623 if (unlikely(!sreq->rdescs))
624 return 0;
625
626 while (sreq->rdescs--) {
627 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
628 if (IS_ERR(rdesc)) {
629 dev_err(priv->dev,
630 "cipher: result: could not retrieve the result descriptor\n");
631 *ret = PTR_ERR(rdesc);
632 break;
633 }
634
635 if (likely(!*ret))
636 *ret = safexcel_rdesc_check_errors(priv, rdesc);
637
638 ndesc++;
639 }
640
641 safexcel_complete(priv, ring);
642
643 if (src == dst) {
644 if (sreq->nr_src > 0)
645 dma_unmap_sg(priv->dev, src, sreq->nr_src,
646 DMA_BIDIRECTIONAL);
647 } else {
648 if (sreq->nr_src > 0)
649 dma_unmap_sg(priv->dev, src, sreq->nr_src,
650 DMA_TO_DEVICE);
651 if (sreq->nr_dst > 0)
652 dma_unmap_sg(priv->dev, dst, sreq->nr_dst,
653 DMA_FROM_DEVICE);
654 }
655
656 /*
657 * Update IV in req from last crypto output word for CBC modes
658 */
659 if ((!ctx->aead) && (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
660 (sreq->direction == SAFEXCEL_ENCRYPT)) {
661 /* For encrypt take the last output word */
662 sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv,
663 crypto_skcipher_ivsize(skcipher),
664 (cryptlen -
665 crypto_skcipher_ivsize(skcipher)));
666 }
667
668 *should_complete = true;
669
670 return ndesc;
671}
672
673static int safexcel_send_req(struct crypto_async_request *base, int ring,
674 struct safexcel_cipher_req *sreq,
675 struct scatterlist *src, struct scatterlist *dst,
676 unsigned int cryptlen, unsigned int assoclen,
677 unsigned int digestsize, u8 *iv, int *commands,
678 int *results)
679{
680 struct skcipher_request *areq = skcipher_request_cast(base);
681 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
682 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
683 struct safexcel_crypto_priv *priv = ctx->base.priv;
684 struct safexcel_command_desc *cdesc;
685 struct safexcel_command_desc *first_cdesc = NULL;
686 struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
687 struct scatterlist *sg;
688 unsigned int totlen;
689 unsigned int totlen_src = cryptlen + assoclen;
690 unsigned int totlen_dst = totlen_src;
691 struct safexcel_token *atoken;
692 int n_cdesc = 0, n_rdesc = 0;
693 int queued, i, ret = 0;
694 bool first = true;
695
696 sreq->nr_src = sg_nents_for_len(src, totlen_src);
697
698 if (ctx->aead) {
699 /*
700 * AEAD has auth tag appended to output for encrypt and
701 * removed from the output for decrypt!
702 */
703 if (sreq->direction == SAFEXCEL_DECRYPT)
704 totlen_dst -= digestsize;
705 else
706 totlen_dst += digestsize;
707
708 memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
709 &ctx->base.ipad, ctx->state_sz);
710 if (!ctx->xcm)
711 memcpy(ctx->base.ctxr->data + (ctx->key_len +
712 ctx->state_sz) / sizeof(u32), &ctx->base.opad,
713 ctx->state_sz);
714 } else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
715 (sreq->direction == SAFEXCEL_DECRYPT)) {
716 /*
717 * Save IV from last crypto input word for CBC modes in decrypt
718 * direction. Need to do this first in case of inplace operation
719 * as it will be overwritten.
720 */
721 sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv,
722 crypto_skcipher_ivsize(skcipher),
723 (totlen_src -
724 crypto_skcipher_ivsize(skcipher)));
725 }
726
727 sreq->nr_dst = sg_nents_for_len(dst, totlen_dst);
728
729 /*
730 * Remember actual input length, source buffer length may be
731 * updated in case of inline operation below.
732 */
733 totlen = totlen_src;
734 queued = totlen_src;
735
736 if (src == dst) {
737 sreq->nr_src = max(sreq->nr_src, sreq->nr_dst);
738 sreq->nr_dst = sreq->nr_src;
739 if (unlikely((totlen_src || totlen_dst) &&
740 (sreq->nr_src <= 0))) {
741 dev_err(priv->dev, "In-place buffer not large enough (need %d bytes)!",
742 max(totlen_src, totlen_dst));
743 return -EINVAL;
744 }
745 if (sreq->nr_src > 0 &&
746 !dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL))
747 return -EIO;
748 } else {
749 if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
750 dev_err(priv->dev, "Source buffer not large enough (need %d bytes)!",
751 totlen_src);
752 return -EINVAL;
753 }
754
755 if (sreq->nr_src > 0 &&
756 !dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE))
757 return -EIO;
758
759 if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
760 dev_err(priv->dev, "Dest buffer not large enough (need %d bytes)!",
761 totlen_dst);
762 ret = -EINVAL;
763 goto unmap;
764 }
765
766 if (sreq->nr_dst > 0 &&
767 !dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE)) {
768 ret = -EIO;
769 goto unmap;
770 }
771 }
772
773 memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
774
775 if (!totlen) {
776 /*
777 * The EIP97 cannot deal with zero length input packets!
778 * So stuff a dummy command descriptor indicating a 1 byte
779 * (dummy) input packet, using the context record as source.
780 */
781 first_cdesc = safexcel_add_cdesc(priv, ring,
782 1, 1, ctx->base.ctxr_dma,
783 1, 1, ctx->base.ctxr_dma,
784 &atoken);
785 if (IS_ERR(first_cdesc)) {
786 /* No space left in the command descriptor ring */
787 ret = PTR_ERR(first_cdesc);
788 goto cdesc_rollback;
789 }
790 n_cdesc = 1;
791 goto skip_cdesc;
792 }
793
794 /* command descriptors */
795 for_each_sg(src, sg, sreq->nr_src, i) {
796 int len = sg_dma_len(sg);
797
798 /* Do not overflow the request */
799 if (queued < len)
800 len = queued;
801
802 cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
803 !(queued - len),
804 sg_dma_address(sg), len, totlen,
805 ctx->base.ctxr_dma, &atoken);
806 if (IS_ERR(cdesc)) {
807 /* No space left in the command descriptor ring */
808 ret = PTR_ERR(cdesc);
809 goto cdesc_rollback;
810 }
811
812 if (!n_cdesc)
813 first_cdesc = cdesc;
814
815 n_cdesc++;
816 queued -= len;
817 if (!queued)
818 break;
819 }
820skip_cdesc:
821 /* Add context control words and token to first command descriptor */
822 safexcel_context_control(ctx, base, sreq, first_cdesc);
823 if (ctx->aead)
824 safexcel_aead_token(ctx, iv, first_cdesc, atoken,
825 sreq->direction, cryptlen,
826 assoclen, digestsize);
827 else
828 safexcel_skcipher_token(ctx, iv, first_cdesc, atoken,
829 cryptlen);
830
831 /* result descriptors */
832 for_each_sg(dst, sg, sreq->nr_dst, i) {
833 bool last = (i == sreq->nr_dst - 1);
834 u32 len = sg_dma_len(sg);
835
836 /* only allow the part of the buffer we know we need */
837 if (len > totlen_dst)
838 len = totlen_dst;
839 if (unlikely(!len))
840 break;
841 totlen_dst -= len;
842
843 /* skip over AAD space in buffer - not written */
844 if (assoclen) {
845 if (assoclen >= len) {
846 assoclen -= len;
847 continue;
848 }
849 rdesc = safexcel_add_rdesc(priv, ring, first, last,
850 sg_dma_address(sg) +
851 assoclen,
852 len - assoclen);
853 assoclen = 0;
854 } else {
855 rdesc = safexcel_add_rdesc(priv, ring, first, last,
856 sg_dma_address(sg),
857 len);
858 }
859 if (IS_ERR(rdesc)) {
860 /* No space left in the result descriptor ring */
861 ret = PTR_ERR(rdesc);
862 goto rdesc_rollback;
863 }
864 if (first) {
865 first_rdesc = rdesc;
866 first = false;
867 }
868 n_rdesc++;
869 }
870
871 if (unlikely(first)) {
872 /*
873 * Special case: AEAD decrypt with only AAD data.
874 * In this case there is NO output data from the engine,
875 * but the engine still needs a result descriptor!
876 * Create a dummy one just for catching the result token.
877 */
878 rdesc = safexcel_add_rdesc(priv, ring, true, true, 0, 0);
879 if (IS_ERR(rdesc)) {
880 /* No space left in the result descriptor ring */
881 ret = PTR_ERR(rdesc);
882 goto rdesc_rollback;
883 }
884 first_rdesc = rdesc;
885 n_rdesc = 1;
886 }
887
888 safexcel_rdr_req_set(priv, ring, first_rdesc, base);
889
890 *commands = n_cdesc;
891 *results = n_rdesc;
892 return 0;
893
894rdesc_rollback:
895 for (i = 0; i < n_rdesc; i++)
896 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
897cdesc_rollback:
898 for (i = 0; i < n_cdesc; i++)
899 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
900unmap:
901 if (src == dst) {
902 if (sreq->nr_src > 0)
903 dma_unmap_sg(priv->dev, src, sreq->nr_src,
904 DMA_BIDIRECTIONAL);
905 } else {
906 if (sreq->nr_src > 0)
907 dma_unmap_sg(priv->dev, src, sreq->nr_src,
908 DMA_TO_DEVICE);
909 if (sreq->nr_dst > 0)
910 dma_unmap_sg(priv->dev, dst, sreq->nr_dst,
911 DMA_FROM_DEVICE);
912 }
913
914 return ret;
915}
916
917static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
918 int ring,
919 struct crypto_async_request *base,
920 struct safexcel_cipher_req *sreq,
921 bool *should_complete, int *ret)
922{
923 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
924 struct safexcel_result_desc *rdesc;
925 int ndesc = 0, enq_ret;
926
927 *ret = 0;
928
929 if (unlikely(!sreq->rdescs))
930 return 0;
931
932 while (sreq->rdescs--) {
933 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
934 if (IS_ERR(rdesc)) {
935 dev_err(priv->dev,
936 "cipher: invalidate: could not retrieve the result descriptor\n");
937 *ret = PTR_ERR(rdesc);
938 break;
939 }
940
941 if (likely(!*ret))
942 *ret = safexcel_rdesc_check_errors(priv, rdesc);
943
944 ndesc++;
945 }
946
947 safexcel_complete(priv, ring);
948
949 if (ctx->base.exit_inv) {
950 dma_pool_free(priv->context_pool, ctx->base.ctxr,
951 ctx->base.ctxr_dma);
952
953 *should_complete = true;
954
955 return ndesc;
956 }
957
958 ring = safexcel_select_ring(priv);
959 ctx->base.ring = ring;
960
961 spin_lock_bh(&priv->ring[ring].queue_lock);
962 enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
963 spin_unlock_bh(&priv->ring[ring].queue_lock);
964
965 if (enq_ret != -EINPROGRESS)
966 *ret = enq_ret;
967
968 queue_work(priv->ring[ring].workqueue,
969 &priv->ring[ring].work_data.work);
970
971 *should_complete = false;
972
973 return ndesc;
974}
975
976static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
977 int ring,
978 struct crypto_async_request *async,
979 bool *should_complete, int *ret)
980{
981 struct skcipher_request *req = skcipher_request_cast(async);
982 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
983 int err;
984
985 if (sreq->needs_inv) {
986 sreq->needs_inv = false;
987 err = safexcel_handle_inv_result(priv, ring, async, sreq,
988 should_complete, ret);
989 } else {
990 err = safexcel_handle_req_result(priv, ring, async, req->src,
991 req->dst, req->cryptlen, sreq,
992 should_complete, ret);
993 }
994
995 return err;
996}
997
998static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
999 int ring,
1000 struct crypto_async_request *async,
1001 bool *should_complete, int *ret)
1002{
1003 struct aead_request *req = aead_request_cast(async);
1004 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1005 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1006 int err;
1007
1008 if (sreq->needs_inv) {
1009 sreq->needs_inv = false;
1010 err = safexcel_handle_inv_result(priv, ring, async, sreq,
1011 should_complete, ret);
1012 } else {
1013 err = safexcel_handle_req_result(priv, ring, async, req->src,
1014 req->dst,
1015 req->cryptlen + crypto_aead_authsize(tfm),
1016 sreq, should_complete, ret);
1017 }
1018
1019 return err;
1020}
1021
1022static int safexcel_cipher_send_inv(struct crypto_async_request *base,
1023 int ring, int *commands, int *results)
1024{
1025 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1026 struct safexcel_crypto_priv *priv = ctx->base.priv;
1027 int ret;
1028
1029 ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
1030 if (unlikely(ret))
1031 return ret;
1032
1033 *commands = 1;
1034 *results = 1;
1035
1036 return 0;
1037}
1038
1039static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
1040 int *commands, int *results)
1041{
1042 struct skcipher_request *req = skcipher_request_cast(async);
1043 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1044 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1045 struct safexcel_crypto_priv *priv = ctx->base.priv;
1046 int ret;
1047
1048 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1049
1050 if (sreq->needs_inv) {
1051 ret = safexcel_cipher_send_inv(async, ring, commands, results);
1052 } else {
1053 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1054 u8 input_iv[AES_BLOCK_SIZE];
1055
1056 /*
1057 * Save input IV in case of CBC decrypt mode
1058 * Will be overwritten with output IV prior to use!
1059 */
1060 memcpy(input_iv, req->iv, crypto_skcipher_ivsize(skcipher));
1061
1062 ret = safexcel_send_req(async, ring, sreq, req->src,
1063 req->dst, req->cryptlen, 0, 0, input_iv,
1064 commands, results);
1065 }
1066
1067 sreq->rdescs = *results;
1068 return ret;
1069}
1070
1071static int safexcel_aead_send(struct crypto_async_request *async, int ring,
1072 int *commands, int *results)
1073{
1074 struct aead_request *req = aead_request_cast(async);
1075 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1076 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1077 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1078 struct safexcel_crypto_priv *priv = ctx->base.priv;
1079 int ret;
1080
1081 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1082
1083 if (sreq->needs_inv)
1084 ret = safexcel_cipher_send_inv(async, ring, commands, results);
1085 else
1086 ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
1087 req->cryptlen, req->assoclen,
1088 crypto_aead_authsize(tfm), req->iv,
1089 commands, results);
1090 sreq->rdescs = *results;
1091 return ret;
1092}
1093
1094static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
1095 struct crypto_async_request *base,
1096 struct safexcel_cipher_req *sreq,
1097 struct crypto_wait *result)
1098{
1099 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1100 struct safexcel_crypto_priv *priv = ctx->base.priv;
1101 int ring = ctx->base.ring;
1102 int err;
1103
1104 ctx = crypto_tfm_ctx(base->tfm);
1105 ctx->base.exit_inv = true;
1106 sreq->needs_inv = true;
1107
1108 spin_lock_bh(&priv->ring[ring].queue_lock);
1109 crypto_enqueue_request(&priv->ring[ring].queue, base);
1110 spin_unlock_bh(&priv->ring[ring].queue_lock);
1111
1112 queue_work(priv->ring[ring].workqueue,
1113 &priv->ring[ring].work_data.work);
1114
1115 err = crypto_wait_req(-EINPROGRESS, result);
1116
1117 if (err) {
1118 dev_warn(priv->dev,
1119 "cipher: sync: invalidate: completion error %d\n",
1120 err);
1121 return err;
1122 }
1123
1124 return 0;
1125}
1126
1127static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
1128{
1129 EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
1130 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1131 DECLARE_CRYPTO_WAIT(result);
1132
1133 memset(req, 0, sizeof(struct skcipher_request));
1134
1135 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1136 crypto_req_done, &result);
1137 skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
1138
1139 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1140}
1141
1142static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
1143{
1144 EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
1145 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1146 DECLARE_CRYPTO_WAIT(result);
1147
1148 memset(req, 0, sizeof(struct aead_request));
1149
1150 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1151 crypto_req_done, &result);
1152 aead_request_set_tfm(req, __crypto_aead_cast(tfm));
1153
1154 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1155}
1156
1157static int safexcel_queue_req(struct crypto_async_request *base,
1158 struct safexcel_cipher_req *sreq,
1159 enum safexcel_cipher_direction dir)
1160{
1161 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1162 struct safexcel_crypto_priv *priv = ctx->base.priv;
1163 int ret, ring;
1164
1165 sreq->needs_inv = false;
1166 sreq->direction = dir;
1167
1168 if (ctx->base.ctxr) {
1169 if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
1170 sreq->needs_inv = true;
1171 ctx->base.needs_inv = false;
1172 }
1173 } else {
1174 ctx->base.ring = safexcel_select_ring(priv);
1175 ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
1176 EIP197_GFP_FLAGS(*base),
1177 &ctx->base.ctxr_dma);
1178 if (!ctx->base.ctxr)
1179 return -ENOMEM;
1180 }
1181
1182 ring = ctx->base.ring;
1183
1184 spin_lock_bh(&priv->ring[ring].queue_lock);
1185 ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
1186 spin_unlock_bh(&priv->ring[ring].queue_lock);
1187
1188 queue_work(priv->ring[ring].workqueue,
1189 &priv->ring[ring].work_data.work);
1190
1191 return ret;
1192}
1193
1194static int safexcel_encrypt(struct skcipher_request *req)
1195{
1196 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1197 SAFEXCEL_ENCRYPT);
1198}
1199
1200static int safexcel_decrypt(struct skcipher_request *req)
1201{
1202 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1203 SAFEXCEL_DECRYPT);
1204}
1205
1206static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
1207{
1208 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1209 struct safexcel_alg_template *tmpl =
1210 container_of(tfm->__crt_alg, struct safexcel_alg_template,
1211 alg.skcipher.base);
1212
1213 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
1214 sizeof(struct safexcel_cipher_req));
1215
1216 ctx->base.priv = tmpl->priv;
1217
1218 ctx->base.send = safexcel_skcipher_send;
1219 ctx->base.handle_result = safexcel_skcipher_handle_result;
1220 ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1221 ctx->ctrinit = 1;
1222 return 0;
1223}
1224
1225static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
1226{
1227 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1228
1229 memzero_explicit(ctx->key, sizeof(ctx->key));
1230
1231 /* context not allocated, skip invalidation */
1232 if (!ctx->base.ctxr)
1233 return -ENOMEM;
1234
1235 memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
1236 return 0;
1237}
1238
1239static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
1240{
1241 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1242 struct safexcel_crypto_priv *priv = ctx->base.priv;
1243 int ret;
1244
1245 if (safexcel_cipher_cra_exit(tfm))
1246 return;
1247
1248 if (priv->flags & EIP197_TRC_CACHE) {
1249 ret = safexcel_skcipher_exit_inv(tfm);
1250 if (ret)
1251 dev_warn(priv->dev, "skcipher: invalidation error %d\n",
1252 ret);
1253 } else {
1254 dma_pool_free(priv->context_pool, ctx->base.ctxr,
1255 ctx->base.ctxr_dma);
1256 }
1257}
1258
1259static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
1260{
1261 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1262 struct safexcel_crypto_priv *priv = ctx->base.priv;
1263 int ret;
1264
1265 if (safexcel_cipher_cra_exit(tfm))
1266 return;
1267
1268 if (priv->flags & EIP197_TRC_CACHE) {
1269 ret = safexcel_aead_exit_inv(tfm);
1270 if (ret)
1271 dev_warn(priv->dev, "aead: invalidation error %d\n",
1272 ret);
1273 } else {
1274 dma_pool_free(priv->context_pool, ctx->base.ctxr,
1275 ctx->base.ctxr_dma);
1276 }
1277}
1278
1279static int safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm *tfm)
1280{
1281 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1282
1283 safexcel_skcipher_cra_init(tfm);
1284 ctx->alg = SAFEXCEL_AES;
1285 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1286 ctx->blocksz = 0;
1287 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1288 return 0;
1289}
1290
1291struct safexcel_alg_template safexcel_alg_ecb_aes = {
1292 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1293 .algo_mask = SAFEXCEL_ALG_AES,
1294 .alg.skcipher = {
1295 .setkey = safexcel_skcipher_aes_setkey,
1296 .encrypt = safexcel_encrypt,
1297 .decrypt = safexcel_decrypt,
1298 .min_keysize = AES_MIN_KEY_SIZE,
1299 .max_keysize = AES_MAX_KEY_SIZE,
1300 .base = {
1301 .cra_name = "ecb(aes)",
1302 .cra_driver_name = "safexcel-ecb-aes",
1303 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1304 .cra_flags = CRYPTO_ALG_ASYNC |
1305 CRYPTO_ALG_ALLOCATES_MEMORY |
1306 CRYPTO_ALG_KERN_DRIVER_ONLY,
1307 .cra_blocksize = AES_BLOCK_SIZE,
1308 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1309 .cra_alignmask = 0,
1310 .cra_init = safexcel_skcipher_aes_ecb_cra_init,
1311 .cra_exit = safexcel_skcipher_cra_exit,
1312 .cra_module = THIS_MODULE,
1313 },
1314 },
1315};
1316
1317static int safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm *tfm)
1318{
1319 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1320
1321 safexcel_skcipher_cra_init(tfm);
1322 ctx->alg = SAFEXCEL_AES;
1323 ctx->blocksz = AES_BLOCK_SIZE;
1324 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1325 return 0;
1326}
1327
1328struct safexcel_alg_template safexcel_alg_cbc_aes = {
1329 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1330 .algo_mask = SAFEXCEL_ALG_AES,
1331 .alg.skcipher = {
1332 .setkey = safexcel_skcipher_aes_setkey,
1333 .encrypt = safexcel_encrypt,
1334 .decrypt = safexcel_decrypt,
1335 .min_keysize = AES_MIN_KEY_SIZE,
1336 .max_keysize = AES_MAX_KEY_SIZE,
1337 .ivsize = AES_BLOCK_SIZE,
1338 .base = {
1339 .cra_name = "cbc(aes)",
1340 .cra_driver_name = "safexcel-cbc-aes",
1341 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1342 .cra_flags = CRYPTO_ALG_ASYNC |
1343 CRYPTO_ALG_ALLOCATES_MEMORY |
1344 CRYPTO_ALG_KERN_DRIVER_ONLY,
1345 .cra_blocksize = AES_BLOCK_SIZE,
1346 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1347 .cra_alignmask = 0,
1348 .cra_init = safexcel_skcipher_aes_cbc_cra_init,
1349 .cra_exit = safexcel_skcipher_cra_exit,
1350 .cra_module = THIS_MODULE,
1351 },
1352 },
1353};
1354
1355static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
1356 const u8 *key, unsigned int len)
1357{
1358 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
1359 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1360 struct safexcel_crypto_priv *priv = ctx->base.priv;
1361 struct crypto_aes_ctx aes;
1362 int ret, i;
1363 unsigned int keylen;
1364
1365 /* last 4 bytes of key are the nonce! */
1366 ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
1367 /* exclude the nonce here */
1368 keylen = len - CTR_RFC3686_NONCE_SIZE;
1369 ret = aes_expandkey(&aes, key, keylen);
1370 if (ret)
1371 return ret;
1372
1373 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1374 for (i = 0; i < keylen / sizeof(u32); i++) {
1375 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
1376 ctx->base.needs_inv = true;
1377 break;
1378 }
1379 }
1380 }
1381
1382 for (i = 0; i < keylen / sizeof(u32); i++)
1383 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
1384
1385 ctx->key_len = keylen;
1386
1387 memzero_explicit(&aes, sizeof(aes));
1388 return 0;
1389}
1390
1391static int safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm *tfm)
1392{
1393 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1394
1395 safexcel_skcipher_cra_init(tfm);
1396 ctx->alg = SAFEXCEL_AES;
1397 ctx->blocksz = AES_BLOCK_SIZE;
1398 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
1399 return 0;
1400}
1401
1402struct safexcel_alg_template safexcel_alg_ctr_aes = {
1403 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1404 .algo_mask = SAFEXCEL_ALG_AES,
1405 .alg.skcipher = {
1406 .setkey = safexcel_skcipher_aesctr_setkey,
1407 .encrypt = safexcel_encrypt,
1408 .decrypt = safexcel_decrypt,
1409 /* Add nonce size */
1410 .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1411 .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1412 .ivsize = CTR_RFC3686_IV_SIZE,
1413 .base = {
1414 .cra_name = "rfc3686(ctr(aes))",
1415 .cra_driver_name = "safexcel-ctr-aes",
1416 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1417 .cra_flags = CRYPTO_ALG_ASYNC |
1418 CRYPTO_ALG_ALLOCATES_MEMORY |
1419 CRYPTO_ALG_KERN_DRIVER_ONLY,
1420 .cra_blocksize = 1,
1421 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1422 .cra_alignmask = 0,
1423 .cra_init = safexcel_skcipher_aes_ctr_cra_init,
1424 .cra_exit = safexcel_skcipher_cra_exit,
1425 .cra_module = THIS_MODULE,
1426 },
1427 },
1428};
1429
1430static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
1431 unsigned int len)
1432{
1433 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1434 struct safexcel_crypto_priv *priv = ctx->base.priv;
1435 int ret;
1436
1437 ret = verify_skcipher_des_key(ctfm, key);
1438 if (ret)
1439 return ret;
1440
1441 /* if context exits and key changed, need to invalidate it */
1442 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1443 if (memcmp(ctx->key, key, len))
1444 ctx->base.needs_inv = true;
1445
1446 memcpy(ctx->key, key, len);
1447 ctx->key_len = len;
1448
1449 return 0;
1450}
1451
1452static int safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm *tfm)
1453{
1454 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1455
1456 safexcel_skcipher_cra_init(tfm);
1457 ctx->alg = SAFEXCEL_DES;
1458 ctx->blocksz = DES_BLOCK_SIZE;
1459 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1460 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1461 return 0;
1462}
1463
1464struct safexcel_alg_template safexcel_alg_cbc_des = {
1465 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1466 .algo_mask = SAFEXCEL_ALG_DES,
1467 .alg.skcipher = {
1468 .setkey = safexcel_des_setkey,
1469 .encrypt = safexcel_encrypt,
1470 .decrypt = safexcel_decrypt,
1471 .min_keysize = DES_KEY_SIZE,
1472 .max_keysize = DES_KEY_SIZE,
1473 .ivsize = DES_BLOCK_SIZE,
1474 .base = {
1475 .cra_name = "cbc(des)",
1476 .cra_driver_name = "safexcel-cbc-des",
1477 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1478 .cra_flags = CRYPTO_ALG_ASYNC |
1479 CRYPTO_ALG_ALLOCATES_MEMORY |
1480 CRYPTO_ALG_KERN_DRIVER_ONLY,
1481 .cra_blocksize = DES_BLOCK_SIZE,
1482 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1483 .cra_alignmask = 0,
1484 .cra_init = safexcel_skcipher_des_cbc_cra_init,
1485 .cra_exit = safexcel_skcipher_cra_exit,
1486 .cra_module = THIS_MODULE,
1487 },
1488 },
1489};
1490
1491static int safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm *tfm)
1492{
1493 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1494
1495 safexcel_skcipher_cra_init(tfm);
1496 ctx->alg = SAFEXCEL_DES;
1497 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1498 ctx->blocksz = 0;
1499 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1500 return 0;
1501}
1502
1503struct safexcel_alg_template safexcel_alg_ecb_des = {
1504 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1505 .algo_mask = SAFEXCEL_ALG_DES,
1506 .alg.skcipher = {
1507 .setkey = safexcel_des_setkey,
1508 .encrypt = safexcel_encrypt,
1509 .decrypt = safexcel_decrypt,
1510 .min_keysize = DES_KEY_SIZE,
1511 .max_keysize = DES_KEY_SIZE,
1512 .base = {
1513 .cra_name = "ecb(des)",
1514 .cra_driver_name = "safexcel-ecb-des",
1515 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1516 .cra_flags = CRYPTO_ALG_ASYNC |
1517 CRYPTO_ALG_ALLOCATES_MEMORY |
1518 CRYPTO_ALG_KERN_DRIVER_ONLY,
1519 .cra_blocksize = DES_BLOCK_SIZE,
1520 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1521 .cra_alignmask = 0,
1522 .cra_init = safexcel_skcipher_des_ecb_cra_init,
1523 .cra_exit = safexcel_skcipher_cra_exit,
1524 .cra_module = THIS_MODULE,
1525 },
1526 },
1527};
1528
1529static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
1530 const u8 *key, unsigned int len)
1531{
1532 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1533 struct safexcel_crypto_priv *priv = ctx->base.priv;
1534 int err;
1535
1536 err = verify_skcipher_des3_key(ctfm, key);
1537 if (err)
1538 return err;
1539
1540 /* if context exits and key changed, need to invalidate it */
1541 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1542 if (memcmp(ctx->key, key, len))
1543 ctx->base.needs_inv = true;
1544
1545 memcpy(ctx->key, key, len);
1546 ctx->key_len = len;
1547
1548 return 0;
1549}
1550
1551static int safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm *tfm)
1552{
1553 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1554
1555 safexcel_skcipher_cra_init(tfm);
1556 ctx->alg = SAFEXCEL_3DES;
1557 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1558 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1559 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1560 return 0;
1561}
1562
1563struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
1564 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1565 .algo_mask = SAFEXCEL_ALG_DES,
1566 .alg.skcipher = {
1567 .setkey = safexcel_des3_ede_setkey,
1568 .encrypt = safexcel_encrypt,
1569 .decrypt = safexcel_decrypt,
1570 .min_keysize = DES3_EDE_KEY_SIZE,
1571 .max_keysize = DES3_EDE_KEY_SIZE,
1572 .ivsize = DES3_EDE_BLOCK_SIZE,
1573 .base = {
1574 .cra_name = "cbc(des3_ede)",
1575 .cra_driver_name = "safexcel-cbc-des3_ede",
1576 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1577 .cra_flags = CRYPTO_ALG_ASYNC |
1578 CRYPTO_ALG_ALLOCATES_MEMORY |
1579 CRYPTO_ALG_KERN_DRIVER_ONLY,
1580 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1581 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1582 .cra_alignmask = 0,
1583 .cra_init = safexcel_skcipher_des3_cbc_cra_init,
1584 .cra_exit = safexcel_skcipher_cra_exit,
1585 .cra_module = THIS_MODULE,
1586 },
1587 },
1588};
1589
1590static int safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm *tfm)
1591{
1592 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1593
1594 safexcel_skcipher_cra_init(tfm);
1595 ctx->alg = SAFEXCEL_3DES;
1596 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1597 ctx->blocksz = 0;
1598 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1599 return 0;
1600}
1601
1602struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
1603 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1604 .algo_mask = SAFEXCEL_ALG_DES,
1605 .alg.skcipher = {
1606 .setkey = safexcel_des3_ede_setkey,
1607 .encrypt = safexcel_encrypt,
1608 .decrypt = safexcel_decrypt,
1609 .min_keysize = DES3_EDE_KEY_SIZE,
1610 .max_keysize = DES3_EDE_KEY_SIZE,
1611 .base = {
1612 .cra_name = "ecb(des3_ede)",
1613 .cra_driver_name = "safexcel-ecb-des3_ede",
1614 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1615 .cra_flags = CRYPTO_ALG_ASYNC |
1616 CRYPTO_ALG_ALLOCATES_MEMORY |
1617 CRYPTO_ALG_KERN_DRIVER_ONLY,
1618 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1619 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1620 .cra_alignmask = 0,
1621 .cra_init = safexcel_skcipher_des3_ecb_cra_init,
1622 .cra_exit = safexcel_skcipher_cra_exit,
1623 .cra_module = THIS_MODULE,
1624 },
1625 },
1626};
1627
1628static int safexcel_aead_encrypt(struct aead_request *req)
1629{
1630 struct safexcel_cipher_req *creq = aead_request_ctx(req);
1631
1632 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
1633}
1634
1635static int safexcel_aead_decrypt(struct aead_request *req)
1636{
1637 struct safexcel_cipher_req *creq = aead_request_ctx(req);
1638
1639 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
1640}
1641
1642static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
1643{
1644 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1645 struct safexcel_alg_template *tmpl =
1646 container_of(tfm->__crt_alg, struct safexcel_alg_template,
1647 alg.aead.base);
1648
1649 crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
1650 sizeof(struct safexcel_cipher_req));
1651
1652 ctx->base.priv = tmpl->priv;
1653
1654 ctx->alg = SAFEXCEL_AES; /* default */
1655 ctx->blocksz = AES_BLOCK_SIZE;
1656 ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1657 ctx->ctrinit = 1;
1658 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC; /* default */
1659 ctx->aead = true;
1660 ctx->base.send = safexcel_aead_send;
1661 ctx->base.handle_result = safexcel_aead_handle_result;
1662 return 0;
1663}
1664
1665static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
1666{
1667 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1668
1669 safexcel_aead_cra_init(tfm);
1670 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1671 ctx->state_sz = SHA1_DIGEST_SIZE;
1672 return 0;
1673}
1674
1675struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
1676 .type = SAFEXCEL_ALG_TYPE_AEAD,
1677 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
1678 .alg.aead = {
1679 .setkey = safexcel_aead_setkey,
1680 .encrypt = safexcel_aead_encrypt,
1681 .decrypt = safexcel_aead_decrypt,
1682 .ivsize = AES_BLOCK_SIZE,
1683 .maxauthsize = SHA1_DIGEST_SIZE,
1684 .base = {
1685 .cra_name = "authenc(hmac(sha1),cbc(aes))",
1686 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
1687 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1688 .cra_flags = CRYPTO_ALG_ASYNC |
1689 CRYPTO_ALG_ALLOCATES_MEMORY |
1690 CRYPTO_ALG_KERN_DRIVER_ONLY,
1691 .cra_blocksize = AES_BLOCK_SIZE,
1692 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1693 .cra_alignmask = 0,
1694 .cra_init = safexcel_aead_sha1_cra_init,
1695 .cra_exit = safexcel_aead_cra_exit,
1696 .cra_module = THIS_MODULE,
1697 },
1698 },
1699};
1700
1701static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
1702{
1703 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1704
1705 safexcel_aead_cra_init(tfm);
1706 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1707 ctx->state_sz = SHA256_DIGEST_SIZE;
1708 return 0;
1709}
1710
1711struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
1712 .type = SAFEXCEL_ALG_TYPE_AEAD,
1713 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1714 .alg.aead = {
1715 .setkey = safexcel_aead_setkey,
1716 .encrypt = safexcel_aead_encrypt,
1717 .decrypt = safexcel_aead_decrypt,
1718 .ivsize = AES_BLOCK_SIZE,
1719 .maxauthsize = SHA256_DIGEST_SIZE,
1720 .base = {
1721 .cra_name = "authenc(hmac(sha256),cbc(aes))",
1722 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
1723 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1724 .cra_flags = CRYPTO_ALG_ASYNC |
1725 CRYPTO_ALG_ALLOCATES_MEMORY |
1726 CRYPTO_ALG_KERN_DRIVER_ONLY,
1727 .cra_blocksize = AES_BLOCK_SIZE,
1728 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1729 .cra_alignmask = 0,
1730 .cra_init = safexcel_aead_sha256_cra_init,
1731 .cra_exit = safexcel_aead_cra_exit,
1732 .cra_module = THIS_MODULE,
1733 },
1734 },
1735};
1736
1737static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
1738{
1739 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1740
1741 safexcel_aead_cra_init(tfm);
1742 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1743 ctx->state_sz = SHA256_DIGEST_SIZE;
1744 return 0;
1745}
1746
1747struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
1748 .type = SAFEXCEL_ALG_TYPE_AEAD,
1749 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1750 .alg.aead = {
1751 .setkey = safexcel_aead_setkey,
1752 .encrypt = safexcel_aead_encrypt,
1753 .decrypt = safexcel_aead_decrypt,
1754 .ivsize = AES_BLOCK_SIZE,
1755 .maxauthsize = SHA224_DIGEST_SIZE,
1756 .base = {
1757 .cra_name = "authenc(hmac(sha224),cbc(aes))",
1758 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
1759 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1760 .cra_flags = CRYPTO_ALG_ASYNC |
1761 CRYPTO_ALG_ALLOCATES_MEMORY |
1762 CRYPTO_ALG_KERN_DRIVER_ONLY,
1763 .cra_blocksize = AES_BLOCK_SIZE,
1764 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1765 .cra_alignmask = 0,
1766 .cra_init = safexcel_aead_sha224_cra_init,
1767 .cra_exit = safexcel_aead_cra_exit,
1768 .cra_module = THIS_MODULE,
1769 },
1770 },
1771};
1772
1773static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
1774{
1775 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1776
1777 safexcel_aead_cra_init(tfm);
1778 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1779 ctx->state_sz = SHA512_DIGEST_SIZE;
1780 return 0;
1781}
1782
1783struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
1784 .type = SAFEXCEL_ALG_TYPE_AEAD,
1785 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1786 .alg.aead = {
1787 .setkey = safexcel_aead_setkey,
1788 .encrypt = safexcel_aead_encrypt,
1789 .decrypt = safexcel_aead_decrypt,
1790 .ivsize = AES_BLOCK_SIZE,
1791 .maxauthsize = SHA512_DIGEST_SIZE,
1792 .base = {
1793 .cra_name = "authenc(hmac(sha512),cbc(aes))",
1794 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
1795 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1796 .cra_flags = CRYPTO_ALG_ASYNC |
1797 CRYPTO_ALG_ALLOCATES_MEMORY |
1798 CRYPTO_ALG_KERN_DRIVER_ONLY,
1799 .cra_blocksize = AES_BLOCK_SIZE,
1800 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1801 .cra_alignmask = 0,
1802 .cra_init = safexcel_aead_sha512_cra_init,
1803 .cra_exit = safexcel_aead_cra_exit,
1804 .cra_module = THIS_MODULE,
1805 },
1806 },
1807};
1808
1809static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
1810{
1811 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1812
1813 safexcel_aead_cra_init(tfm);
1814 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1815 ctx->state_sz = SHA512_DIGEST_SIZE;
1816 return 0;
1817}
1818
1819struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
1820 .type = SAFEXCEL_ALG_TYPE_AEAD,
1821 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1822 .alg.aead = {
1823 .setkey = safexcel_aead_setkey,
1824 .encrypt = safexcel_aead_encrypt,
1825 .decrypt = safexcel_aead_decrypt,
1826 .ivsize = AES_BLOCK_SIZE,
1827 .maxauthsize = SHA384_DIGEST_SIZE,
1828 .base = {
1829 .cra_name = "authenc(hmac(sha384),cbc(aes))",
1830 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
1831 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1832 .cra_flags = CRYPTO_ALG_ASYNC |
1833 CRYPTO_ALG_ALLOCATES_MEMORY |
1834 CRYPTO_ALG_KERN_DRIVER_ONLY,
1835 .cra_blocksize = AES_BLOCK_SIZE,
1836 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1837 .cra_alignmask = 0,
1838 .cra_init = safexcel_aead_sha384_cra_init,
1839 .cra_exit = safexcel_aead_cra_exit,
1840 .cra_module = THIS_MODULE,
1841 },
1842 },
1843};
1844
1845static int safexcel_aead_sha1_des3_cra_init(struct crypto_tfm *tfm)
1846{
1847 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1848
1849 safexcel_aead_sha1_cra_init(tfm);
1850 ctx->alg = SAFEXCEL_3DES; /* override default */
1851 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1852 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1853 return 0;
1854}
1855
1856struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede = {
1857 .type = SAFEXCEL_ALG_TYPE_AEAD,
1858 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
1859 .alg.aead = {
1860 .setkey = safexcel_aead_setkey,
1861 .encrypt = safexcel_aead_encrypt,
1862 .decrypt = safexcel_aead_decrypt,
1863 .ivsize = DES3_EDE_BLOCK_SIZE,
1864 .maxauthsize = SHA1_DIGEST_SIZE,
1865 .base = {
1866 .cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
1867 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des3_ede",
1868 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1869 .cra_flags = CRYPTO_ALG_ASYNC |
1870 CRYPTO_ALG_ALLOCATES_MEMORY |
1871 CRYPTO_ALG_KERN_DRIVER_ONLY,
1872 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1873 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1874 .cra_alignmask = 0,
1875 .cra_init = safexcel_aead_sha1_des3_cra_init,
1876 .cra_exit = safexcel_aead_cra_exit,
1877 .cra_module = THIS_MODULE,
1878 },
1879 },
1880};
1881
1882static int safexcel_aead_sha256_des3_cra_init(struct crypto_tfm *tfm)
1883{
1884 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1885
1886 safexcel_aead_sha256_cra_init(tfm);
1887 ctx->alg = SAFEXCEL_3DES; /* override default */
1888 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1889 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1890 return 0;
1891}
1892
1893struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des3_ede = {
1894 .type = SAFEXCEL_ALG_TYPE_AEAD,
1895 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1896 .alg.aead = {
1897 .setkey = safexcel_aead_setkey,
1898 .encrypt = safexcel_aead_encrypt,
1899 .decrypt = safexcel_aead_decrypt,
1900 .ivsize = DES3_EDE_BLOCK_SIZE,
1901 .maxauthsize = SHA256_DIGEST_SIZE,
1902 .base = {
1903 .cra_name = "authenc(hmac(sha256),cbc(des3_ede))",
1904 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des3_ede",
1905 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1906 .cra_flags = CRYPTO_ALG_ASYNC |
1907 CRYPTO_ALG_ALLOCATES_MEMORY |
1908 CRYPTO_ALG_KERN_DRIVER_ONLY,
1909 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1910 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1911 .cra_alignmask = 0,
1912 .cra_init = safexcel_aead_sha256_des3_cra_init,
1913 .cra_exit = safexcel_aead_cra_exit,
1914 .cra_module = THIS_MODULE,
1915 },
1916 },
1917};
1918
1919static int safexcel_aead_sha224_des3_cra_init(struct crypto_tfm *tfm)
1920{
1921 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1922
1923 safexcel_aead_sha224_cra_init(tfm);
1924 ctx->alg = SAFEXCEL_3DES; /* override default */
1925 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1926 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1927 return 0;
1928}
1929
1930struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des3_ede = {
1931 .type = SAFEXCEL_ALG_TYPE_AEAD,
1932 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1933 .alg.aead = {
1934 .setkey = safexcel_aead_setkey,
1935 .encrypt = safexcel_aead_encrypt,
1936 .decrypt = safexcel_aead_decrypt,
1937 .ivsize = DES3_EDE_BLOCK_SIZE,
1938 .maxauthsize = SHA224_DIGEST_SIZE,
1939 .base = {
1940 .cra_name = "authenc(hmac(sha224),cbc(des3_ede))",
1941 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des3_ede",
1942 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1943 .cra_flags = CRYPTO_ALG_ASYNC |
1944 CRYPTO_ALG_ALLOCATES_MEMORY |
1945 CRYPTO_ALG_KERN_DRIVER_ONLY,
1946 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1947 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1948 .cra_alignmask = 0,
1949 .cra_init = safexcel_aead_sha224_des3_cra_init,
1950 .cra_exit = safexcel_aead_cra_exit,
1951 .cra_module = THIS_MODULE,
1952 },
1953 },
1954};
1955
1956static int safexcel_aead_sha512_des3_cra_init(struct crypto_tfm *tfm)
1957{
1958 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1959
1960 safexcel_aead_sha512_cra_init(tfm);
1961 ctx->alg = SAFEXCEL_3DES; /* override default */
1962 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1963 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1964 return 0;
1965}
1966
1967struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des3_ede = {
1968 .type = SAFEXCEL_ALG_TYPE_AEAD,
1969 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
1970 .alg.aead = {
1971 .setkey = safexcel_aead_setkey,
1972 .encrypt = safexcel_aead_encrypt,
1973 .decrypt = safexcel_aead_decrypt,
1974 .ivsize = DES3_EDE_BLOCK_SIZE,
1975 .maxauthsize = SHA512_DIGEST_SIZE,
1976 .base = {
1977 .cra_name = "authenc(hmac(sha512),cbc(des3_ede))",
1978 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des3_ede",
1979 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1980 .cra_flags = CRYPTO_ALG_ASYNC |
1981 CRYPTO_ALG_ALLOCATES_MEMORY |
1982 CRYPTO_ALG_KERN_DRIVER_ONLY,
1983 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1984 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1985 .cra_alignmask = 0,
1986 .cra_init = safexcel_aead_sha512_des3_cra_init,
1987 .cra_exit = safexcel_aead_cra_exit,
1988 .cra_module = THIS_MODULE,
1989 },
1990 },
1991};
1992
1993static int safexcel_aead_sha384_des3_cra_init(struct crypto_tfm *tfm)
1994{
1995 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1996
1997 safexcel_aead_sha384_cra_init(tfm);
1998 ctx->alg = SAFEXCEL_3DES; /* override default */
1999 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2000 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2001 return 0;
2002}
2003
2004struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des3_ede = {
2005 .type = SAFEXCEL_ALG_TYPE_AEAD,
2006 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2007 .alg.aead = {
2008 .setkey = safexcel_aead_setkey,
2009 .encrypt = safexcel_aead_encrypt,
2010 .decrypt = safexcel_aead_decrypt,
2011 .ivsize = DES3_EDE_BLOCK_SIZE,
2012 .maxauthsize = SHA384_DIGEST_SIZE,
2013 .base = {
2014 .cra_name = "authenc(hmac(sha384),cbc(des3_ede))",
2015 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des3_ede",
2016 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2017 .cra_flags = CRYPTO_ALG_ASYNC |
2018 CRYPTO_ALG_ALLOCATES_MEMORY |
2019 CRYPTO_ALG_KERN_DRIVER_ONLY,
2020 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2021 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2022 .cra_alignmask = 0,
2023 .cra_init = safexcel_aead_sha384_des3_cra_init,
2024 .cra_exit = safexcel_aead_cra_exit,
2025 .cra_module = THIS_MODULE,
2026 },
2027 },
2028};
2029
2030static int safexcel_aead_sha1_des_cra_init(struct crypto_tfm *tfm)
2031{
2032 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2033
2034 safexcel_aead_sha1_cra_init(tfm);
2035 ctx->alg = SAFEXCEL_DES; /* override default */
2036 ctx->blocksz = DES_BLOCK_SIZE;
2037 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2038 return 0;
2039}
2040
2041struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des = {
2042 .type = SAFEXCEL_ALG_TYPE_AEAD,
2043 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
2044 .alg.aead = {
2045 .setkey = safexcel_aead_setkey,
2046 .encrypt = safexcel_aead_encrypt,
2047 .decrypt = safexcel_aead_decrypt,
2048 .ivsize = DES_BLOCK_SIZE,
2049 .maxauthsize = SHA1_DIGEST_SIZE,
2050 .base = {
2051 .cra_name = "authenc(hmac(sha1),cbc(des))",
2052 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des",
2053 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2054 .cra_flags = CRYPTO_ALG_ASYNC |
2055 CRYPTO_ALG_ALLOCATES_MEMORY |
2056 CRYPTO_ALG_KERN_DRIVER_ONLY,
2057 .cra_blocksize = DES_BLOCK_SIZE,
2058 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2059 .cra_alignmask = 0,
2060 .cra_init = safexcel_aead_sha1_des_cra_init,
2061 .cra_exit = safexcel_aead_cra_exit,
2062 .cra_module = THIS_MODULE,
2063 },
2064 },
2065};
2066
2067static int safexcel_aead_sha256_des_cra_init(struct crypto_tfm *tfm)
2068{
2069 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2070
2071 safexcel_aead_sha256_cra_init(tfm);
2072 ctx->alg = SAFEXCEL_DES; /* override default */
2073 ctx->blocksz = DES_BLOCK_SIZE;
2074 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2075 return 0;
2076}
2077
2078struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des = {
2079 .type = SAFEXCEL_ALG_TYPE_AEAD,
2080 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2081 .alg.aead = {
2082 .setkey = safexcel_aead_setkey,
2083 .encrypt = safexcel_aead_encrypt,
2084 .decrypt = safexcel_aead_decrypt,
2085 .ivsize = DES_BLOCK_SIZE,
2086 .maxauthsize = SHA256_DIGEST_SIZE,
2087 .base = {
2088 .cra_name = "authenc(hmac(sha256),cbc(des))",
2089 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des",
2090 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2091 .cra_flags = CRYPTO_ALG_ASYNC |
2092 CRYPTO_ALG_ALLOCATES_MEMORY |
2093 CRYPTO_ALG_KERN_DRIVER_ONLY,
2094 .cra_blocksize = DES_BLOCK_SIZE,
2095 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2096 .cra_alignmask = 0,
2097 .cra_init = safexcel_aead_sha256_des_cra_init,
2098 .cra_exit = safexcel_aead_cra_exit,
2099 .cra_module = THIS_MODULE,
2100 },
2101 },
2102};
2103
2104static int safexcel_aead_sha224_des_cra_init(struct crypto_tfm *tfm)
2105{
2106 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2107
2108 safexcel_aead_sha224_cra_init(tfm);
2109 ctx->alg = SAFEXCEL_DES; /* override default */
2110 ctx->blocksz = DES_BLOCK_SIZE;
2111 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2112 return 0;
2113}
2114
2115struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des = {
2116 .type = SAFEXCEL_ALG_TYPE_AEAD,
2117 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2118 .alg.aead = {
2119 .setkey = safexcel_aead_setkey,
2120 .encrypt = safexcel_aead_encrypt,
2121 .decrypt = safexcel_aead_decrypt,
2122 .ivsize = DES_BLOCK_SIZE,
2123 .maxauthsize = SHA224_DIGEST_SIZE,
2124 .base = {
2125 .cra_name = "authenc(hmac(sha224),cbc(des))",
2126 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des",
2127 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2128 .cra_flags = CRYPTO_ALG_ASYNC |
2129 CRYPTO_ALG_ALLOCATES_MEMORY |
2130 CRYPTO_ALG_KERN_DRIVER_ONLY,
2131 .cra_blocksize = DES_BLOCK_SIZE,
2132 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2133 .cra_alignmask = 0,
2134 .cra_init = safexcel_aead_sha224_des_cra_init,
2135 .cra_exit = safexcel_aead_cra_exit,
2136 .cra_module = THIS_MODULE,
2137 },
2138 },
2139};
2140
2141static int safexcel_aead_sha512_des_cra_init(struct crypto_tfm *tfm)
2142{
2143 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2144
2145 safexcel_aead_sha512_cra_init(tfm);
2146 ctx->alg = SAFEXCEL_DES; /* override default */
2147 ctx->blocksz = DES_BLOCK_SIZE;
2148 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2149 return 0;
2150}
2151
2152struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des = {
2153 .type = SAFEXCEL_ALG_TYPE_AEAD,
2154 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2155 .alg.aead = {
2156 .setkey = safexcel_aead_setkey,
2157 .encrypt = safexcel_aead_encrypt,
2158 .decrypt = safexcel_aead_decrypt,
2159 .ivsize = DES_BLOCK_SIZE,
2160 .maxauthsize = SHA512_DIGEST_SIZE,
2161 .base = {
2162 .cra_name = "authenc(hmac(sha512),cbc(des))",
2163 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des",
2164 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2165 .cra_flags = CRYPTO_ALG_ASYNC |
2166 CRYPTO_ALG_ALLOCATES_MEMORY |
2167 CRYPTO_ALG_KERN_DRIVER_ONLY,
2168 .cra_blocksize = DES_BLOCK_SIZE,
2169 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2170 .cra_alignmask = 0,
2171 .cra_init = safexcel_aead_sha512_des_cra_init,
2172 .cra_exit = safexcel_aead_cra_exit,
2173 .cra_module = THIS_MODULE,
2174 },
2175 },
2176};
2177
2178static int safexcel_aead_sha384_des_cra_init(struct crypto_tfm *tfm)
2179{
2180 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2181
2182 safexcel_aead_sha384_cra_init(tfm);
2183 ctx->alg = SAFEXCEL_DES; /* override default */
2184 ctx->blocksz = DES_BLOCK_SIZE;
2185 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2186 return 0;
2187}
2188
2189struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des = {
2190 .type = SAFEXCEL_ALG_TYPE_AEAD,
2191 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2192 .alg.aead = {
2193 .setkey = safexcel_aead_setkey,
2194 .encrypt = safexcel_aead_encrypt,
2195 .decrypt = safexcel_aead_decrypt,
2196 .ivsize = DES_BLOCK_SIZE,
2197 .maxauthsize = SHA384_DIGEST_SIZE,
2198 .base = {
2199 .cra_name = "authenc(hmac(sha384),cbc(des))",
2200 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des",
2201 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2202 .cra_flags = CRYPTO_ALG_ASYNC |
2203 CRYPTO_ALG_ALLOCATES_MEMORY |
2204 CRYPTO_ALG_KERN_DRIVER_ONLY,
2205 .cra_blocksize = DES_BLOCK_SIZE,
2206 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2207 .cra_alignmask = 0,
2208 .cra_init = safexcel_aead_sha384_des_cra_init,
2209 .cra_exit = safexcel_aead_cra_exit,
2210 .cra_module = THIS_MODULE,
2211 },
2212 },
2213};
2214
2215static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm *tfm)
2216{
2217 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2218
2219 safexcel_aead_sha1_cra_init(tfm);
2220 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2221 return 0;
2222}
2223
2224struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes = {
2225 .type = SAFEXCEL_ALG_TYPE_AEAD,
2226 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
2227 .alg.aead = {
2228 .setkey = safexcel_aead_setkey,
2229 .encrypt = safexcel_aead_encrypt,
2230 .decrypt = safexcel_aead_decrypt,
2231 .ivsize = CTR_RFC3686_IV_SIZE,
2232 .maxauthsize = SHA1_DIGEST_SIZE,
2233 .base = {
2234 .cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2235 .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-aes",
2236 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2237 .cra_flags = CRYPTO_ALG_ASYNC |
2238 CRYPTO_ALG_ALLOCATES_MEMORY |
2239 CRYPTO_ALG_KERN_DRIVER_ONLY,
2240 .cra_blocksize = 1,
2241 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2242 .cra_alignmask = 0,
2243 .cra_init = safexcel_aead_sha1_ctr_cra_init,
2244 .cra_exit = safexcel_aead_cra_exit,
2245 .cra_module = THIS_MODULE,
2246 },
2247 },
2248};
2249
2250static int safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm *tfm)
2251{
2252 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2253
2254 safexcel_aead_sha256_cra_init(tfm);
2255 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2256 return 0;
2257}
2258
2259struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes = {
2260 .type = SAFEXCEL_ALG_TYPE_AEAD,
2261 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2262 .alg.aead = {
2263 .setkey = safexcel_aead_setkey,
2264 .encrypt = safexcel_aead_encrypt,
2265 .decrypt = safexcel_aead_decrypt,
2266 .ivsize = CTR_RFC3686_IV_SIZE,
2267 .maxauthsize = SHA256_DIGEST_SIZE,
2268 .base = {
2269 .cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2270 .cra_driver_name = "safexcel-authenc-hmac-sha256-ctr-aes",
2271 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2272 .cra_flags = CRYPTO_ALG_ASYNC |
2273 CRYPTO_ALG_ALLOCATES_MEMORY |
2274 CRYPTO_ALG_KERN_DRIVER_ONLY,
2275 .cra_blocksize = 1,
2276 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2277 .cra_alignmask = 0,
2278 .cra_init = safexcel_aead_sha256_ctr_cra_init,
2279 .cra_exit = safexcel_aead_cra_exit,
2280 .cra_module = THIS_MODULE,
2281 },
2282 },
2283};
2284
2285static int safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm *tfm)
2286{
2287 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2288
2289 safexcel_aead_sha224_cra_init(tfm);
2290 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2291 return 0;
2292}
2293
2294struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes = {
2295 .type = SAFEXCEL_ALG_TYPE_AEAD,
2296 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2297 .alg.aead = {
2298 .setkey = safexcel_aead_setkey,
2299 .encrypt = safexcel_aead_encrypt,
2300 .decrypt = safexcel_aead_decrypt,
2301 .ivsize = CTR_RFC3686_IV_SIZE,
2302 .maxauthsize = SHA224_DIGEST_SIZE,
2303 .base = {
2304 .cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
2305 .cra_driver_name = "safexcel-authenc-hmac-sha224-ctr-aes",
2306 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2307 .cra_flags = CRYPTO_ALG_ASYNC |
2308 CRYPTO_ALG_ALLOCATES_MEMORY |
2309 CRYPTO_ALG_KERN_DRIVER_ONLY,
2310 .cra_blocksize = 1,
2311 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2312 .cra_alignmask = 0,
2313 .cra_init = safexcel_aead_sha224_ctr_cra_init,
2314 .cra_exit = safexcel_aead_cra_exit,
2315 .cra_module = THIS_MODULE,
2316 },
2317 },
2318};
2319
2320static int safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm *tfm)
2321{
2322 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2323
2324 safexcel_aead_sha512_cra_init(tfm);
2325 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2326 return 0;
2327}
2328
2329struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes = {
2330 .type = SAFEXCEL_ALG_TYPE_AEAD,
2331 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2332 .alg.aead = {
2333 .setkey = safexcel_aead_setkey,
2334 .encrypt = safexcel_aead_encrypt,
2335 .decrypt = safexcel_aead_decrypt,
2336 .ivsize = CTR_RFC3686_IV_SIZE,
2337 .maxauthsize = SHA512_DIGEST_SIZE,
2338 .base = {
2339 .cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2340 .cra_driver_name = "safexcel-authenc-hmac-sha512-ctr-aes",
2341 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2342 .cra_flags = CRYPTO_ALG_ASYNC |
2343 CRYPTO_ALG_ALLOCATES_MEMORY |
2344 CRYPTO_ALG_KERN_DRIVER_ONLY,
2345 .cra_blocksize = 1,
2346 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2347 .cra_alignmask = 0,
2348 .cra_init = safexcel_aead_sha512_ctr_cra_init,
2349 .cra_exit = safexcel_aead_cra_exit,
2350 .cra_module = THIS_MODULE,
2351 },
2352 },
2353};
2354
2355static int safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm *tfm)
2356{
2357 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2358
2359 safexcel_aead_sha384_cra_init(tfm);
2360 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2361 return 0;
2362}
2363
2364struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes = {
2365 .type = SAFEXCEL_ALG_TYPE_AEAD,
2366 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2367 .alg.aead = {
2368 .setkey = safexcel_aead_setkey,
2369 .encrypt = safexcel_aead_encrypt,
2370 .decrypt = safexcel_aead_decrypt,
2371 .ivsize = CTR_RFC3686_IV_SIZE,
2372 .maxauthsize = SHA384_DIGEST_SIZE,
2373 .base = {
2374 .cra_name = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2375 .cra_driver_name = "safexcel-authenc-hmac-sha384-ctr-aes",
2376 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2377 .cra_flags = CRYPTO_ALG_ASYNC |
2378 CRYPTO_ALG_ALLOCATES_MEMORY |
2379 CRYPTO_ALG_KERN_DRIVER_ONLY,
2380 .cra_blocksize = 1,
2381 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2382 .cra_alignmask = 0,
2383 .cra_init = safexcel_aead_sha384_ctr_cra_init,
2384 .cra_exit = safexcel_aead_cra_exit,
2385 .cra_module = THIS_MODULE,
2386 },
2387 },
2388};
2389
2390static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
2391 const u8 *key, unsigned int len)
2392{
2393 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
2394 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2395 struct safexcel_crypto_priv *priv = ctx->base.priv;
2396 struct crypto_aes_ctx aes;
2397 int ret, i;
2398 unsigned int keylen;
2399
2400 /* Check for illegal XTS keys */
2401 ret = xts_verify_key(ctfm, key, len);
2402 if (ret)
2403 return ret;
2404
2405 /* Only half of the key data is cipher key */
2406 keylen = (len >> 1);
2407 ret = aes_expandkey(&aes, key, keylen);
2408 if (ret)
2409 return ret;
2410
2411 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2412 for (i = 0; i < keylen / sizeof(u32); i++) {
2413 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2414 ctx->base.needs_inv = true;
2415 break;
2416 }
2417 }
2418 }
2419
2420 for (i = 0; i < keylen / sizeof(u32); i++)
2421 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2422
2423 /* The other half is the tweak key */
2424 ret = aes_expandkey(&aes, (u8 *)(key + keylen), keylen);
2425 if (ret)
2426 return ret;
2427
2428 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2429 for (i = 0; i < keylen / sizeof(u32); i++) {
2430 if (le32_to_cpu(ctx->key[i + keylen / sizeof(u32)]) !=
2431 aes.key_enc[i]) {
2432 ctx->base.needs_inv = true;
2433 break;
2434 }
2435 }
2436 }
2437
2438 for (i = 0; i < keylen / sizeof(u32); i++)
2439 ctx->key[i + keylen / sizeof(u32)] =
2440 cpu_to_le32(aes.key_enc[i]);
2441
2442 ctx->key_len = keylen << 1;
2443
2444 memzero_explicit(&aes, sizeof(aes));
2445 return 0;
2446}
2447
2448static int safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm *tfm)
2449{
2450 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2451
2452 safexcel_skcipher_cra_init(tfm);
2453 ctx->alg = SAFEXCEL_AES;
2454 ctx->blocksz = AES_BLOCK_SIZE;
2455 ctx->xts = 1;
2456 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
2457 return 0;
2458}
2459
2460static int safexcel_encrypt_xts(struct skcipher_request *req)
2461{
2462 if (req->cryptlen < XTS_BLOCK_SIZE)
2463 return -EINVAL;
2464 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2465 SAFEXCEL_ENCRYPT);
2466}
2467
2468static int safexcel_decrypt_xts(struct skcipher_request *req)
2469{
2470 if (req->cryptlen < XTS_BLOCK_SIZE)
2471 return -EINVAL;
2472 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2473 SAFEXCEL_DECRYPT);
2474}
2475
2476struct safexcel_alg_template safexcel_alg_xts_aes = {
2477 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2478 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XTS,
2479 .alg.skcipher = {
2480 .setkey = safexcel_skcipher_aesxts_setkey,
2481 .encrypt = safexcel_encrypt_xts,
2482 .decrypt = safexcel_decrypt_xts,
2483 /* XTS actually uses 2 AES keys glued together */
2484 .min_keysize = AES_MIN_KEY_SIZE * 2,
2485 .max_keysize = AES_MAX_KEY_SIZE * 2,
2486 .ivsize = XTS_BLOCK_SIZE,
2487 .base = {
2488 .cra_name = "xts(aes)",
2489 .cra_driver_name = "safexcel-xts-aes",
2490 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2491 .cra_flags = CRYPTO_ALG_ASYNC |
2492 CRYPTO_ALG_ALLOCATES_MEMORY |
2493 CRYPTO_ALG_KERN_DRIVER_ONLY,
2494 .cra_blocksize = XTS_BLOCK_SIZE,
2495 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2496 .cra_alignmask = 0,
2497 .cra_init = safexcel_skcipher_aes_xts_cra_init,
2498 .cra_exit = safexcel_skcipher_cra_exit,
2499 .cra_module = THIS_MODULE,
2500 },
2501 },
2502};
2503
2504static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
2505 unsigned int len)
2506{
2507 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2508 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2509 struct safexcel_crypto_priv *priv = ctx->base.priv;
2510 struct crypto_aes_ctx aes;
2511 u32 hashkey[AES_BLOCK_SIZE >> 2];
2512 int ret, i;
2513
2514 ret = aes_expandkey(&aes, key, len);
2515 if (ret) {
2516 memzero_explicit(&aes, sizeof(aes));
2517 return ret;
2518 }
2519
2520 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2521 for (i = 0; i < len / sizeof(u32); i++) {
2522 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2523 ctx->base.needs_inv = true;
2524 break;
2525 }
2526 }
2527 }
2528
2529 for (i = 0; i < len / sizeof(u32); i++)
2530 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2531
2532 ctx->key_len = len;
2533
2534 /* Compute hash key by encrypting zeroes with cipher key */
2535 memset(hashkey, 0, AES_BLOCK_SIZE);
2536 aes_encrypt(&aes, (u8 *)hashkey, (u8 *)hashkey);
2537
2538 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2539 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
2540 if (be32_to_cpu(ctx->base.ipad.be[i]) != hashkey[i]) {
2541 ctx->base.needs_inv = true;
2542 break;
2543 }
2544 }
2545 }
2546
2547 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
2548 ctx->base.ipad.be[i] = cpu_to_be32(hashkey[i]);
2549
2550 memzero_explicit(hashkey, AES_BLOCK_SIZE);
2551 memzero_explicit(&aes, sizeof(aes));
2552 return 0;
2553}
2554
2555static int safexcel_aead_gcm_cra_init(struct crypto_tfm *tfm)
2556{
2557 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2558
2559 safexcel_aead_cra_init(tfm);
2560 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_GHASH;
2561 ctx->state_sz = GHASH_BLOCK_SIZE;
2562 ctx->xcm = EIP197_XCM_MODE_GCM;
2563 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2564
2565 return 0;
2566}
2567
2568static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm)
2569{
2570 safexcel_aead_cra_exit(tfm);
2571}
2572
2573static int safexcel_aead_gcm_setauthsize(struct crypto_aead *tfm,
2574 unsigned int authsize)
2575{
2576 return crypto_gcm_check_authsize(authsize);
2577}
2578
2579struct safexcel_alg_template safexcel_alg_gcm = {
2580 .type = SAFEXCEL_ALG_TYPE_AEAD,
2581 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
2582 .alg.aead = {
2583 .setkey = safexcel_aead_gcm_setkey,
2584 .setauthsize = safexcel_aead_gcm_setauthsize,
2585 .encrypt = safexcel_aead_encrypt,
2586 .decrypt = safexcel_aead_decrypt,
2587 .ivsize = GCM_AES_IV_SIZE,
2588 .maxauthsize = GHASH_DIGEST_SIZE,
2589 .base = {
2590 .cra_name = "gcm(aes)",
2591 .cra_driver_name = "safexcel-gcm-aes",
2592 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2593 .cra_flags = CRYPTO_ALG_ASYNC |
2594 CRYPTO_ALG_ALLOCATES_MEMORY |
2595 CRYPTO_ALG_KERN_DRIVER_ONLY,
2596 .cra_blocksize = 1,
2597 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2598 .cra_alignmask = 0,
2599 .cra_init = safexcel_aead_gcm_cra_init,
2600 .cra_exit = safexcel_aead_gcm_cra_exit,
2601 .cra_module = THIS_MODULE,
2602 },
2603 },
2604};
2605
2606static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
2607 unsigned int len)
2608{
2609 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2610 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2611 struct safexcel_crypto_priv *priv = ctx->base.priv;
2612 struct crypto_aes_ctx aes;
2613 int ret, i;
2614
2615 ret = aes_expandkey(&aes, key, len);
2616 if (ret) {
2617 memzero_explicit(&aes, sizeof(aes));
2618 return ret;
2619 }
2620
2621 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2622 for (i = 0; i < len / sizeof(u32); i++) {
2623 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2624 ctx->base.needs_inv = true;
2625 break;
2626 }
2627 }
2628 }
2629
2630 for (i = 0; i < len / sizeof(u32); i++) {
2631 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2632 ctx->base.ipad.be[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
2633 cpu_to_be32(aes.key_enc[i]);
2634 }
2635
2636 ctx->key_len = len;
2637 ctx->state_sz = 2 * AES_BLOCK_SIZE + len;
2638
2639 if (len == AES_KEYSIZE_192)
2640 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2641 else if (len == AES_KEYSIZE_256)
2642 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2643 else
2644 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2645
2646 memzero_explicit(&aes, sizeof(aes));
2647 return 0;
2648}
2649
2650static int safexcel_aead_ccm_cra_init(struct crypto_tfm *tfm)
2651{
2652 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2653
2654 safexcel_aead_cra_init(tfm);
2655 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2656 ctx->state_sz = 3 * AES_BLOCK_SIZE;
2657 ctx->xcm = EIP197_XCM_MODE_CCM;
2658 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2659 ctx->ctrinit = 0;
2660 return 0;
2661}
2662
2663static int safexcel_aead_ccm_setauthsize(struct crypto_aead *tfm,
2664 unsigned int authsize)
2665{
2666 /* Borrowed from crypto/ccm.c */
2667 switch (authsize) {
2668 case 4:
2669 case 6:
2670 case 8:
2671 case 10:
2672 case 12:
2673 case 14:
2674 case 16:
2675 break;
2676 default:
2677 return -EINVAL;
2678 }
2679
2680 return 0;
2681}
2682
2683static int safexcel_ccm_encrypt(struct aead_request *req)
2684{
2685 struct safexcel_cipher_req *creq = aead_request_ctx(req);
2686
2687 if (req->iv[0] < 1 || req->iv[0] > 7)
2688 return -EINVAL;
2689
2690 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
2691}
2692
2693static int safexcel_ccm_decrypt(struct aead_request *req)
2694{
2695 struct safexcel_cipher_req *creq = aead_request_ctx(req);
2696
2697 if (req->iv[0] < 1 || req->iv[0] > 7)
2698 return -EINVAL;
2699
2700 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
2701}
2702
2703struct safexcel_alg_template safexcel_alg_ccm = {
2704 .type = SAFEXCEL_ALG_TYPE_AEAD,
2705 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
2706 .alg.aead = {
2707 .setkey = safexcel_aead_ccm_setkey,
2708 .setauthsize = safexcel_aead_ccm_setauthsize,
2709 .encrypt = safexcel_ccm_encrypt,
2710 .decrypt = safexcel_ccm_decrypt,
2711 .ivsize = AES_BLOCK_SIZE,
2712 .maxauthsize = AES_BLOCK_SIZE,
2713 .base = {
2714 .cra_name = "ccm(aes)",
2715 .cra_driver_name = "safexcel-ccm-aes",
2716 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2717 .cra_flags = CRYPTO_ALG_ASYNC |
2718 CRYPTO_ALG_ALLOCATES_MEMORY |
2719 CRYPTO_ALG_KERN_DRIVER_ONLY,
2720 .cra_blocksize = 1,
2721 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2722 .cra_alignmask = 0,
2723 .cra_init = safexcel_aead_ccm_cra_init,
2724 .cra_exit = safexcel_aead_cra_exit,
2725 .cra_module = THIS_MODULE,
2726 },
2727 },
2728};
2729
2730static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx,
2731 const u8 *key)
2732{
2733 struct safexcel_crypto_priv *priv = ctx->base.priv;
2734
2735 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
2736 if (memcmp(ctx->key, key, CHACHA_KEY_SIZE))
2737 ctx->base.needs_inv = true;
2738
2739 memcpy(ctx->key, key, CHACHA_KEY_SIZE);
2740 ctx->key_len = CHACHA_KEY_SIZE;
2741}
2742
2743static int safexcel_skcipher_chacha20_setkey(struct crypto_skcipher *ctfm,
2744 const u8 *key, unsigned int len)
2745{
2746 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
2747
2748 if (len != CHACHA_KEY_SIZE)
2749 return -EINVAL;
2750
2751 safexcel_chacha20_setkey(ctx, key);
2752
2753 return 0;
2754}
2755
2756static int safexcel_skcipher_chacha20_cra_init(struct crypto_tfm *tfm)
2757{
2758 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2759
2760 safexcel_skcipher_cra_init(tfm);
2761 ctx->alg = SAFEXCEL_CHACHA20;
2762 ctx->ctrinit = 0;
2763 ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32;
2764 return 0;
2765}
2766
2767struct safexcel_alg_template safexcel_alg_chacha20 = {
2768 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2769 .algo_mask = SAFEXCEL_ALG_CHACHA20,
2770 .alg.skcipher = {
2771 .setkey = safexcel_skcipher_chacha20_setkey,
2772 .encrypt = safexcel_encrypt,
2773 .decrypt = safexcel_decrypt,
2774 .min_keysize = CHACHA_KEY_SIZE,
2775 .max_keysize = CHACHA_KEY_SIZE,
2776 .ivsize = CHACHA_IV_SIZE,
2777 .base = {
2778 .cra_name = "chacha20",
2779 .cra_driver_name = "safexcel-chacha20",
2780 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2781 .cra_flags = CRYPTO_ALG_ASYNC |
2782 CRYPTO_ALG_ALLOCATES_MEMORY |
2783 CRYPTO_ALG_KERN_DRIVER_ONLY,
2784 .cra_blocksize = 1,
2785 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2786 .cra_alignmask = 0,
2787 .cra_init = safexcel_skcipher_chacha20_cra_init,
2788 .cra_exit = safexcel_skcipher_cra_exit,
2789 .cra_module = THIS_MODULE,
2790 },
2791 },
2792};
2793
2794static int safexcel_aead_chachapoly_setkey(struct crypto_aead *ctfm,
2795 const u8 *key, unsigned int len)
2796{
2797 struct safexcel_cipher_ctx *ctx = crypto_aead_ctx(ctfm);
2798
2799 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP &&
2800 len > EIP197_AEAD_IPSEC_NONCE_SIZE) {
2801 /* ESP variant has nonce appended to key */
2802 len -= EIP197_AEAD_IPSEC_NONCE_SIZE;
2803 ctx->nonce = *(u32 *)(key + len);
2804 }
2805 if (len != CHACHA_KEY_SIZE)
2806 return -EINVAL;
2807
2808 safexcel_chacha20_setkey(ctx, key);
2809
2810 return 0;
2811}
2812
2813static int safexcel_aead_chachapoly_setauthsize(struct crypto_aead *tfm,
2814 unsigned int authsize)
2815{
2816 if (authsize != POLY1305_DIGEST_SIZE)
2817 return -EINVAL;
2818 return 0;
2819}
2820
2821static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
2822 enum safexcel_cipher_direction dir)
2823{
2824 struct safexcel_cipher_req *creq = aead_request_ctx(req);
2825 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2826 struct crypto_tfm *tfm = crypto_aead_tfm(aead);
2827 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2828 struct aead_request *subreq = aead_request_ctx(req);
2829 u32 key[CHACHA_KEY_SIZE / sizeof(u32) + 1];
2830 int ret = 0;
2831
2832 /*
2833 * Instead of wasting time detecting umpteen silly corner cases,
2834 * just dump all "small" requests to the fallback implementation.
2835 * HW would not be faster on such small requests anyway.
2836 */
2837 if (likely((ctx->aead != EIP197_AEAD_TYPE_IPSEC_ESP ||
2838 req->assoclen >= EIP197_AEAD_IPSEC_IV_SIZE) &&
2839 req->cryptlen > POLY1305_DIGEST_SIZE)) {
2840 return safexcel_queue_req(&req->base, creq, dir);
2841 }
2842
2843 /* HW cannot do full (AAD+payload) zero length, use fallback */
2844 memcpy(key, ctx->key, CHACHA_KEY_SIZE);
2845 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
2846 /* ESP variant has nonce appended to the key */
2847 key[CHACHA_KEY_SIZE / sizeof(u32)] = ctx->nonce;
2848 ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2849 CHACHA_KEY_SIZE +
2850 EIP197_AEAD_IPSEC_NONCE_SIZE);
2851 } else {
2852 ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2853 CHACHA_KEY_SIZE);
2854 }
2855 if (ret) {
2856 crypto_aead_clear_flags(aead, CRYPTO_TFM_REQ_MASK);
2857 crypto_aead_set_flags(aead, crypto_aead_get_flags(ctx->fback) &
2858 CRYPTO_TFM_REQ_MASK);
2859 return ret;
2860 }
2861
2862 aead_request_set_tfm(subreq, ctx->fback);
2863 aead_request_set_callback(subreq, req->base.flags, req->base.complete,
2864 req->base.data);
2865 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
2866 req->iv);
2867 aead_request_set_ad(subreq, req->assoclen);
2868
2869 return (dir == SAFEXCEL_ENCRYPT) ?
2870 crypto_aead_encrypt(subreq) :
2871 crypto_aead_decrypt(subreq);
2872}
2873
2874static int safexcel_aead_chachapoly_encrypt(struct aead_request *req)
2875{
2876 return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_ENCRYPT);
2877}
2878
2879static int safexcel_aead_chachapoly_decrypt(struct aead_request *req)
2880{
2881 return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_DECRYPT);
2882}
2883
2884static int safexcel_aead_fallback_cra_init(struct crypto_tfm *tfm)
2885{
2886 struct crypto_aead *aead = __crypto_aead_cast(tfm);
2887 struct aead_alg *alg = crypto_aead_alg(aead);
2888 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2889
2890 safexcel_aead_cra_init(tfm);
2891
2892 /* Allocate fallback implementation */
2893 ctx->fback = crypto_alloc_aead(alg->base.cra_name, 0,
2894 CRYPTO_ALG_ASYNC |
2895 CRYPTO_ALG_NEED_FALLBACK);
2896 if (IS_ERR(ctx->fback))
2897 return PTR_ERR(ctx->fback);
2898
2899 crypto_aead_set_reqsize(aead, max(sizeof(struct safexcel_cipher_req),
2900 sizeof(struct aead_request) +
2901 crypto_aead_reqsize(ctx->fback)));
2902
2903 return 0;
2904}
2905
2906static int safexcel_aead_chachapoly_cra_init(struct crypto_tfm *tfm)
2907{
2908 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2909
2910 safexcel_aead_fallback_cra_init(tfm);
2911 ctx->alg = SAFEXCEL_CHACHA20;
2912 ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32 |
2913 CONTEXT_CONTROL_CHACHA20_MODE_CALC_OTK;
2914 ctx->ctrinit = 0;
2915 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_POLY1305;
2916 ctx->state_sz = 0; /* Precomputed by HW */
2917 return 0;
2918}
2919
2920static void safexcel_aead_fallback_cra_exit(struct crypto_tfm *tfm)
2921{
2922 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2923
2924 crypto_free_aead(ctx->fback);
2925 safexcel_aead_cra_exit(tfm);
2926}
2927
2928struct safexcel_alg_template safexcel_alg_chachapoly = {
2929 .type = SAFEXCEL_ALG_TYPE_AEAD,
2930 .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
2931 .alg.aead = {
2932 .setkey = safexcel_aead_chachapoly_setkey,
2933 .setauthsize = safexcel_aead_chachapoly_setauthsize,
2934 .encrypt = safexcel_aead_chachapoly_encrypt,
2935 .decrypt = safexcel_aead_chachapoly_decrypt,
2936 .ivsize = CHACHAPOLY_IV_SIZE,
2937 .maxauthsize = POLY1305_DIGEST_SIZE,
2938 .base = {
2939 .cra_name = "rfc7539(chacha20,poly1305)",
2940 .cra_driver_name = "safexcel-chacha20-poly1305",
2941 /* +1 to put it above HW chacha + SW poly */
2942 .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
2943 .cra_flags = CRYPTO_ALG_ASYNC |
2944 CRYPTO_ALG_ALLOCATES_MEMORY |
2945 CRYPTO_ALG_KERN_DRIVER_ONLY |
2946 CRYPTO_ALG_NEED_FALLBACK,
2947 .cra_blocksize = 1,
2948 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2949 .cra_alignmask = 0,
2950 .cra_init = safexcel_aead_chachapoly_cra_init,
2951 .cra_exit = safexcel_aead_fallback_cra_exit,
2952 .cra_module = THIS_MODULE,
2953 },
2954 },
2955};
2956
2957static int safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm *tfm)
2958{
2959 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2960 int ret;
2961
2962 ret = safexcel_aead_chachapoly_cra_init(tfm);
2963 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
2964 ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
2965 return ret;
2966}
2967
2968struct safexcel_alg_template safexcel_alg_chachapoly_esp = {
2969 .type = SAFEXCEL_ALG_TYPE_AEAD,
2970 .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
2971 .alg.aead = {
2972 .setkey = safexcel_aead_chachapoly_setkey,
2973 .setauthsize = safexcel_aead_chachapoly_setauthsize,
2974 .encrypt = safexcel_aead_chachapoly_encrypt,
2975 .decrypt = safexcel_aead_chachapoly_decrypt,
2976 .ivsize = CHACHAPOLY_IV_SIZE - EIP197_AEAD_IPSEC_NONCE_SIZE,
2977 .maxauthsize = POLY1305_DIGEST_SIZE,
2978 .base = {
2979 .cra_name = "rfc7539esp(chacha20,poly1305)",
2980 .cra_driver_name = "safexcel-chacha20-poly1305-esp",
2981 /* +1 to put it above HW chacha + SW poly */
2982 .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
2983 .cra_flags = CRYPTO_ALG_ASYNC |
2984 CRYPTO_ALG_ALLOCATES_MEMORY |
2985 CRYPTO_ALG_KERN_DRIVER_ONLY |
2986 CRYPTO_ALG_NEED_FALLBACK,
2987 .cra_blocksize = 1,
2988 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2989 .cra_alignmask = 0,
2990 .cra_init = safexcel_aead_chachapolyesp_cra_init,
2991 .cra_exit = safexcel_aead_fallback_cra_exit,
2992 .cra_module = THIS_MODULE,
2993 },
2994 },
2995};
2996
2997static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher *ctfm,
2998 const u8 *key, unsigned int len)
2999{
3000 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3001 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3002 struct safexcel_crypto_priv *priv = ctx->base.priv;
3003
3004 if (len != SM4_KEY_SIZE)
3005 return -EINVAL;
3006
3007 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
3008 if (memcmp(ctx->key, key, SM4_KEY_SIZE))
3009 ctx->base.needs_inv = true;
3010
3011 memcpy(ctx->key, key, SM4_KEY_SIZE);
3012 ctx->key_len = SM4_KEY_SIZE;
3013
3014 return 0;
3015}
3016
3017static int safexcel_sm4_blk_encrypt(struct skcipher_request *req)
3018{
3019 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3020 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3021 return -EINVAL;
3022 else
3023 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3024 SAFEXCEL_ENCRYPT);
3025}
3026
3027static int safexcel_sm4_blk_decrypt(struct skcipher_request *req)
3028{
3029 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3030 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3031 return -EINVAL;
3032 else
3033 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3034 SAFEXCEL_DECRYPT);
3035}
3036
3037static int safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm *tfm)
3038{
3039 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3040
3041 safexcel_skcipher_cra_init(tfm);
3042 ctx->alg = SAFEXCEL_SM4;
3043 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
3044 ctx->blocksz = 0;
3045 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
3046 return 0;
3047}
3048
3049struct safexcel_alg_template safexcel_alg_ecb_sm4 = {
3050 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3051 .algo_mask = SAFEXCEL_ALG_SM4,
3052 .alg.skcipher = {
3053 .setkey = safexcel_skcipher_sm4_setkey,
3054 .encrypt = safexcel_sm4_blk_encrypt,
3055 .decrypt = safexcel_sm4_blk_decrypt,
3056 .min_keysize = SM4_KEY_SIZE,
3057 .max_keysize = SM4_KEY_SIZE,
3058 .base = {
3059 .cra_name = "ecb(sm4)",
3060 .cra_driver_name = "safexcel-ecb-sm4",
3061 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3062 .cra_flags = CRYPTO_ALG_ASYNC |
3063 CRYPTO_ALG_ALLOCATES_MEMORY |
3064 CRYPTO_ALG_KERN_DRIVER_ONLY,
3065 .cra_blocksize = SM4_BLOCK_SIZE,
3066 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3067 .cra_alignmask = 0,
3068 .cra_init = safexcel_skcipher_sm4_ecb_cra_init,
3069 .cra_exit = safexcel_skcipher_cra_exit,
3070 .cra_module = THIS_MODULE,
3071 },
3072 },
3073};
3074
3075static int safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm *tfm)
3076{
3077 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3078
3079 safexcel_skcipher_cra_init(tfm);
3080 ctx->alg = SAFEXCEL_SM4;
3081 ctx->blocksz = SM4_BLOCK_SIZE;
3082 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
3083 return 0;
3084}
3085
3086struct safexcel_alg_template safexcel_alg_cbc_sm4 = {
3087 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3088 .algo_mask = SAFEXCEL_ALG_SM4,
3089 .alg.skcipher = {
3090 .setkey = safexcel_skcipher_sm4_setkey,
3091 .encrypt = safexcel_sm4_blk_encrypt,
3092 .decrypt = safexcel_sm4_blk_decrypt,
3093 .min_keysize = SM4_KEY_SIZE,
3094 .max_keysize = SM4_KEY_SIZE,
3095 .ivsize = SM4_BLOCK_SIZE,
3096 .base = {
3097 .cra_name = "cbc(sm4)",
3098 .cra_driver_name = "safexcel-cbc-sm4",
3099 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3100 .cra_flags = CRYPTO_ALG_ASYNC |
3101 CRYPTO_ALG_ALLOCATES_MEMORY |
3102 CRYPTO_ALG_KERN_DRIVER_ONLY,
3103 .cra_blocksize = SM4_BLOCK_SIZE,
3104 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3105 .cra_alignmask = 0,
3106 .cra_init = safexcel_skcipher_sm4_cbc_cra_init,
3107 .cra_exit = safexcel_skcipher_cra_exit,
3108 .cra_module = THIS_MODULE,
3109 },
3110 },
3111};
3112
3113static int safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher *ctfm,
3114 const u8 *key, unsigned int len)
3115{
3116 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3117 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3118
3119 /* last 4 bytes of key are the nonce! */
3120 ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3121 /* exclude the nonce here */
3122 len -= CTR_RFC3686_NONCE_SIZE;
3123
3124 return safexcel_skcipher_sm4_setkey(ctfm, key, len);
3125}
3126
3127static int safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm *tfm)
3128{
3129 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3130
3131 safexcel_skcipher_cra_init(tfm);
3132 ctx->alg = SAFEXCEL_SM4;
3133 ctx->blocksz = SM4_BLOCK_SIZE;
3134 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3135 return 0;
3136}
3137
3138struct safexcel_alg_template safexcel_alg_ctr_sm4 = {
3139 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3140 .algo_mask = SAFEXCEL_ALG_SM4,
3141 .alg.skcipher = {
3142 .setkey = safexcel_skcipher_sm4ctr_setkey,
3143 .encrypt = safexcel_encrypt,
3144 .decrypt = safexcel_decrypt,
3145 /* Add nonce size */
3146 .min_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3147 .max_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3148 .ivsize = CTR_RFC3686_IV_SIZE,
3149 .base = {
3150 .cra_name = "rfc3686(ctr(sm4))",
3151 .cra_driver_name = "safexcel-ctr-sm4",
3152 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3153 .cra_flags = CRYPTO_ALG_ASYNC |
3154 CRYPTO_ALG_ALLOCATES_MEMORY |
3155 CRYPTO_ALG_KERN_DRIVER_ONLY,
3156 .cra_blocksize = 1,
3157 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3158 .cra_alignmask = 0,
3159 .cra_init = safexcel_skcipher_sm4_ctr_cra_init,
3160 .cra_exit = safexcel_skcipher_cra_exit,
3161 .cra_module = THIS_MODULE,
3162 },
3163 },
3164};
3165
3166static int safexcel_aead_sm4_blk_encrypt(struct aead_request *req)
3167{
3168 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3169 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3170 return -EINVAL;
3171
3172 return safexcel_queue_req(&req->base, aead_request_ctx(req),
3173 SAFEXCEL_ENCRYPT);
3174}
3175
3176static int safexcel_aead_sm4_blk_decrypt(struct aead_request *req)
3177{
3178 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3179
3180 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3181 if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3182 return -EINVAL;
3183
3184 return safexcel_queue_req(&req->base, aead_request_ctx(req),
3185 SAFEXCEL_DECRYPT);
3186}
3187
3188static int safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm *tfm)
3189{
3190 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3191
3192 safexcel_aead_cra_init(tfm);
3193 ctx->alg = SAFEXCEL_SM4;
3194 ctx->blocksz = SM4_BLOCK_SIZE;
3195 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
3196 ctx->state_sz = SHA1_DIGEST_SIZE;
3197 return 0;
3198}
3199
3200struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_sm4 = {
3201 .type = SAFEXCEL_ALG_TYPE_AEAD,
3202 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3203 .alg.aead = {
3204 .setkey = safexcel_aead_setkey,
3205 .encrypt = safexcel_aead_sm4_blk_encrypt,
3206 .decrypt = safexcel_aead_sm4_blk_decrypt,
3207 .ivsize = SM4_BLOCK_SIZE,
3208 .maxauthsize = SHA1_DIGEST_SIZE,
3209 .base = {
3210 .cra_name = "authenc(hmac(sha1),cbc(sm4))",
3211 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-sm4",
3212 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3213 .cra_flags = CRYPTO_ALG_ASYNC |
3214 CRYPTO_ALG_ALLOCATES_MEMORY |
3215 CRYPTO_ALG_KERN_DRIVER_ONLY,
3216 .cra_blocksize = SM4_BLOCK_SIZE,
3217 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3218 .cra_alignmask = 0,
3219 .cra_init = safexcel_aead_sm4cbc_sha1_cra_init,
3220 .cra_exit = safexcel_aead_cra_exit,
3221 .cra_module = THIS_MODULE,
3222 },
3223 },
3224};
3225
3226static int safexcel_aead_fallback_setkey(struct crypto_aead *ctfm,
3227 const u8 *key, unsigned int len)
3228{
3229 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3230 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3231
3232 /* Keep fallback cipher synchronized */
3233 return crypto_aead_setkey(ctx->fback, (u8 *)key, len) ?:
3234 safexcel_aead_setkey(ctfm, key, len);
3235}
3236
3237static int safexcel_aead_fallback_setauthsize(struct crypto_aead *ctfm,
3238 unsigned int authsize)
3239{
3240 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3241 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3242
3243 /* Keep fallback cipher synchronized */
3244 return crypto_aead_setauthsize(ctx->fback, authsize);
3245}
3246
3247static int safexcel_aead_fallback_crypt(struct aead_request *req,
3248 enum safexcel_cipher_direction dir)
3249{
3250 struct crypto_aead *aead = crypto_aead_reqtfm(req);
3251 struct crypto_tfm *tfm = crypto_aead_tfm(aead);
3252 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3253 struct aead_request *subreq = aead_request_ctx(req);
3254
3255 aead_request_set_tfm(subreq, ctx->fback);
3256 aead_request_set_callback(subreq, req->base.flags, req->base.complete,
3257 req->base.data);
3258 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
3259 req->iv);
3260 aead_request_set_ad(subreq, req->assoclen);
3261
3262 return (dir == SAFEXCEL_ENCRYPT) ?
3263 crypto_aead_encrypt(subreq) :
3264 crypto_aead_decrypt(subreq);
3265}
3266
3267static int safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request *req)
3268{
3269 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3270
3271 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3272 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3273 return -EINVAL;
3274 else if (req->cryptlen || req->assoclen) /* If input length > 0 only */
3275 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3276
3277 /* HW cannot do full (AAD+payload) zero length, use fallback */
3278 return safexcel_aead_fallback_crypt(req, SAFEXCEL_ENCRYPT);
3279}
3280
3281static int safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request *req)
3282{
3283 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3284 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3285
3286 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3287 if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3288 return -EINVAL;
3289 else if (req->cryptlen > crypto_aead_authsize(tfm) || req->assoclen)
3290 /* If input length > 0 only */
3291 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3292
3293 /* HW cannot do full (AAD+payload) zero length, use fallback */
3294 return safexcel_aead_fallback_crypt(req, SAFEXCEL_DECRYPT);
3295}
3296
3297static int safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm *tfm)
3298{
3299 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3300
3301 safexcel_aead_fallback_cra_init(tfm);
3302 ctx->alg = SAFEXCEL_SM4;
3303 ctx->blocksz = SM4_BLOCK_SIZE;
3304 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
3305 ctx->state_sz = SM3_DIGEST_SIZE;
3306 return 0;
3307}
3308
3309struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4 = {
3310 .type = SAFEXCEL_ALG_TYPE_AEAD,
3311 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3312 .alg.aead = {
3313 .setkey = safexcel_aead_fallback_setkey,
3314 .setauthsize = safexcel_aead_fallback_setauthsize,
3315 .encrypt = safexcel_aead_sm4cbc_sm3_encrypt,
3316 .decrypt = safexcel_aead_sm4cbc_sm3_decrypt,
3317 .ivsize = SM4_BLOCK_SIZE,
3318 .maxauthsize = SM3_DIGEST_SIZE,
3319 .base = {
3320 .cra_name = "authenc(hmac(sm3),cbc(sm4))",
3321 .cra_driver_name = "safexcel-authenc-hmac-sm3-cbc-sm4",
3322 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3323 .cra_flags = CRYPTO_ALG_ASYNC |
3324 CRYPTO_ALG_ALLOCATES_MEMORY |
3325 CRYPTO_ALG_KERN_DRIVER_ONLY |
3326 CRYPTO_ALG_NEED_FALLBACK,
3327 .cra_blocksize = SM4_BLOCK_SIZE,
3328 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3329 .cra_alignmask = 0,
3330 .cra_init = safexcel_aead_sm4cbc_sm3_cra_init,
3331 .cra_exit = safexcel_aead_fallback_cra_exit,
3332 .cra_module = THIS_MODULE,
3333 },
3334 },
3335};
3336
3337static int safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm *tfm)
3338{
3339 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3340
3341 safexcel_aead_sm4cbc_sha1_cra_init(tfm);
3342 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3343 return 0;
3344}
3345
3346struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4 = {
3347 .type = SAFEXCEL_ALG_TYPE_AEAD,
3348 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3349 .alg.aead = {
3350 .setkey = safexcel_aead_setkey,
3351 .encrypt = safexcel_aead_encrypt,
3352 .decrypt = safexcel_aead_decrypt,
3353 .ivsize = CTR_RFC3686_IV_SIZE,
3354 .maxauthsize = SHA1_DIGEST_SIZE,
3355 .base = {
3356 .cra_name = "authenc(hmac(sha1),rfc3686(ctr(sm4)))",
3357 .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-sm4",
3358 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3359 .cra_flags = CRYPTO_ALG_ASYNC |
3360 CRYPTO_ALG_ALLOCATES_MEMORY |
3361 CRYPTO_ALG_KERN_DRIVER_ONLY,
3362 .cra_blocksize = 1,
3363 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3364 .cra_alignmask = 0,
3365 .cra_init = safexcel_aead_sm4ctr_sha1_cra_init,
3366 .cra_exit = safexcel_aead_cra_exit,
3367 .cra_module = THIS_MODULE,
3368 },
3369 },
3370};
3371
3372static int safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm *tfm)
3373{
3374 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3375
3376 safexcel_aead_sm4cbc_sm3_cra_init(tfm);
3377 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3378 return 0;
3379}
3380
3381struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4 = {
3382 .type = SAFEXCEL_ALG_TYPE_AEAD,
3383 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3384 .alg.aead = {
3385 .setkey = safexcel_aead_setkey,
3386 .encrypt = safexcel_aead_encrypt,
3387 .decrypt = safexcel_aead_decrypt,
3388 .ivsize = CTR_RFC3686_IV_SIZE,
3389 .maxauthsize = SM3_DIGEST_SIZE,
3390 .base = {
3391 .cra_name = "authenc(hmac(sm3),rfc3686(ctr(sm4)))",
3392 .cra_driver_name = "safexcel-authenc-hmac-sm3-ctr-sm4",
3393 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3394 .cra_flags = CRYPTO_ALG_ASYNC |
3395 CRYPTO_ALG_ALLOCATES_MEMORY |
3396 CRYPTO_ALG_KERN_DRIVER_ONLY,
3397 .cra_blocksize = 1,
3398 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3399 .cra_alignmask = 0,
3400 .cra_init = safexcel_aead_sm4ctr_sm3_cra_init,
3401 .cra_exit = safexcel_aead_cra_exit,
3402 .cra_module = THIS_MODULE,
3403 },
3404 },
3405};
3406
3407static int safexcel_rfc4106_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
3408 unsigned int len)
3409{
3410 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3411 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3412
3413 /* last 4 bytes of key are the nonce! */
3414 ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3415
3416 len -= CTR_RFC3686_NONCE_SIZE;
3417 return safexcel_aead_gcm_setkey(ctfm, key, len);
3418}
3419
3420static int safexcel_rfc4106_gcm_setauthsize(struct crypto_aead *tfm,
3421 unsigned int authsize)
3422{
3423 return crypto_rfc4106_check_authsize(authsize);
3424}
3425
3426static int safexcel_rfc4106_encrypt(struct aead_request *req)
3427{
3428 return crypto_ipsec_check_assoclen(req->assoclen) ?:
3429 safexcel_aead_encrypt(req);
3430}
3431
3432static int safexcel_rfc4106_decrypt(struct aead_request *req)
3433{
3434 return crypto_ipsec_check_assoclen(req->assoclen) ?:
3435 safexcel_aead_decrypt(req);
3436}
3437
3438static int safexcel_rfc4106_gcm_cra_init(struct crypto_tfm *tfm)
3439{
3440 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3441 int ret;
3442
3443 ret = safexcel_aead_gcm_cra_init(tfm);
3444 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
3445 ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3446 return ret;
3447}
3448
3449struct safexcel_alg_template safexcel_alg_rfc4106_gcm = {
3450 .type = SAFEXCEL_ALG_TYPE_AEAD,
3451 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3452 .alg.aead = {
3453 .setkey = safexcel_rfc4106_gcm_setkey,
3454 .setauthsize = safexcel_rfc4106_gcm_setauthsize,
3455 .encrypt = safexcel_rfc4106_encrypt,
3456 .decrypt = safexcel_rfc4106_decrypt,
3457 .ivsize = GCM_RFC4106_IV_SIZE,
3458 .maxauthsize = GHASH_DIGEST_SIZE,
3459 .base = {
3460 .cra_name = "rfc4106(gcm(aes))",
3461 .cra_driver_name = "safexcel-rfc4106-gcm-aes",
3462 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3463 .cra_flags = CRYPTO_ALG_ASYNC |
3464 CRYPTO_ALG_ALLOCATES_MEMORY |
3465 CRYPTO_ALG_KERN_DRIVER_ONLY,
3466 .cra_blocksize = 1,
3467 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3468 .cra_alignmask = 0,
3469 .cra_init = safexcel_rfc4106_gcm_cra_init,
3470 .cra_exit = safexcel_aead_gcm_cra_exit,
3471 },
3472 },
3473};
3474
3475static int safexcel_rfc4543_gcm_setauthsize(struct crypto_aead *tfm,
3476 unsigned int authsize)
3477{
3478 if (authsize != GHASH_DIGEST_SIZE)
3479 return -EINVAL;
3480
3481 return 0;
3482}
3483
3484static int safexcel_rfc4543_gcm_cra_init(struct crypto_tfm *tfm)
3485{
3486 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3487 int ret;
3488
3489 ret = safexcel_aead_gcm_cra_init(tfm);
3490 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP_GMAC;
3491 return ret;
3492}
3493
3494struct safexcel_alg_template safexcel_alg_rfc4543_gcm = {
3495 .type = SAFEXCEL_ALG_TYPE_AEAD,
3496 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3497 .alg.aead = {
3498 .setkey = safexcel_rfc4106_gcm_setkey,
3499 .setauthsize = safexcel_rfc4543_gcm_setauthsize,
3500 .encrypt = safexcel_rfc4106_encrypt,
3501 .decrypt = safexcel_rfc4106_decrypt,
3502 .ivsize = GCM_RFC4543_IV_SIZE,
3503 .maxauthsize = GHASH_DIGEST_SIZE,
3504 .base = {
3505 .cra_name = "rfc4543(gcm(aes))",
3506 .cra_driver_name = "safexcel-rfc4543-gcm-aes",
3507 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3508 .cra_flags = CRYPTO_ALG_ASYNC |
3509 CRYPTO_ALG_ALLOCATES_MEMORY |
3510 CRYPTO_ALG_KERN_DRIVER_ONLY,
3511 .cra_blocksize = 1,
3512 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3513 .cra_alignmask = 0,
3514 .cra_init = safexcel_rfc4543_gcm_cra_init,
3515 .cra_exit = safexcel_aead_gcm_cra_exit,
3516 },
3517 },
3518};
3519
3520static int safexcel_rfc4309_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
3521 unsigned int len)
3522{
3523 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3524 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3525
3526 /* First byte of the nonce = L = always 3 for RFC4309 (4 byte ctr) */
3527 *(u8 *)&ctx->nonce = EIP197_AEAD_IPSEC_COUNTER_SIZE - 1;
3528 /* last 3 bytes of key are the nonce! */
3529 memcpy((u8 *)&ctx->nonce + 1, key + len -
3530 EIP197_AEAD_IPSEC_CCM_NONCE_SIZE,
3531 EIP197_AEAD_IPSEC_CCM_NONCE_SIZE);
3532
3533 len -= EIP197_AEAD_IPSEC_CCM_NONCE_SIZE;
3534 return safexcel_aead_ccm_setkey(ctfm, key, len);
3535}
3536
3537static int safexcel_rfc4309_ccm_setauthsize(struct crypto_aead *tfm,
3538 unsigned int authsize)
3539{
3540 /* Borrowed from crypto/ccm.c */
3541 switch (authsize) {
3542 case 8:
3543 case 12:
3544 case 16:
3545 break;
3546 default:
3547 return -EINVAL;
3548 }
3549
3550 return 0;
3551}
3552
3553static int safexcel_rfc4309_ccm_encrypt(struct aead_request *req)
3554{
3555 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3556
3557 /* Borrowed from crypto/ccm.c */
3558 if (req->assoclen != 16 && req->assoclen != 20)
3559 return -EINVAL;
3560
3561 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3562}
3563
3564static int safexcel_rfc4309_ccm_decrypt(struct aead_request *req)
3565{
3566 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3567
3568 /* Borrowed from crypto/ccm.c */
3569 if (req->assoclen != 16 && req->assoclen != 20)
3570 return -EINVAL;
3571
3572 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3573}
3574
3575static int safexcel_rfc4309_ccm_cra_init(struct crypto_tfm *tfm)
3576{
3577 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3578 int ret;
3579
3580 ret = safexcel_aead_ccm_cra_init(tfm);
3581 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
3582 ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3583 return ret;
3584}
3585
3586struct safexcel_alg_template safexcel_alg_rfc4309_ccm = {
3587 .type = SAFEXCEL_ALG_TYPE_AEAD,
3588 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
3589 .alg.aead = {
3590 .setkey = safexcel_rfc4309_ccm_setkey,
3591 .setauthsize = safexcel_rfc4309_ccm_setauthsize,
3592 .encrypt = safexcel_rfc4309_ccm_encrypt,
3593 .decrypt = safexcel_rfc4309_ccm_decrypt,
3594 .ivsize = EIP197_AEAD_IPSEC_IV_SIZE,
3595 .maxauthsize = AES_BLOCK_SIZE,
3596 .base = {
3597 .cra_name = "rfc4309(ccm(aes))",
3598 .cra_driver_name = "safexcel-rfc4309-ccm-aes",
3599 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3600 .cra_flags = CRYPTO_ALG_ASYNC |
3601 CRYPTO_ALG_ALLOCATES_MEMORY |
3602 CRYPTO_ALG_KERN_DRIVER_ONLY,
3603 .cra_blocksize = 1,
3604 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3605 .cra_alignmask = 0,
3606 .cra_init = safexcel_rfc4309_ccm_cra_init,
3607 .cra_exit = safexcel_aead_cra_exit,
3608 .cra_module = THIS_MODULE,
3609 },
3610 },
3611};
1// SPDX-License-Identifier: GPL-2.0
2/*
3 * Copyright (C) 2017 Marvell
4 *
5 * Antoine Tenart <antoine.tenart@free-electrons.com>
6 */
7
8#include <asm/unaligned.h>
9#include <linux/device.h>
10#include <linux/dma-mapping.h>
11#include <linux/dmapool.h>
12#include <crypto/aead.h>
13#include <crypto/aes.h>
14#include <crypto/authenc.h>
15#include <crypto/chacha.h>
16#include <crypto/ctr.h>
17#include <crypto/internal/des.h>
18#include <crypto/gcm.h>
19#include <crypto/ghash.h>
20#include <crypto/poly1305.h>
21#include <crypto/sha1.h>
22#include <crypto/sha2.h>
23#include <crypto/sm3.h>
24#include <crypto/sm4.h>
25#include <crypto/xts.h>
26#include <crypto/skcipher.h>
27#include <crypto/internal/aead.h>
28#include <crypto/internal/skcipher.h>
29
30#include "safexcel.h"
31
32enum safexcel_cipher_direction {
33 SAFEXCEL_ENCRYPT,
34 SAFEXCEL_DECRYPT,
35};
36
37enum safexcel_cipher_alg {
38 SAFEXCEL_DES,
39 SAFEXCEL_3DES,
40 SAFEXCEL_AES,
41 SAFEXCEL_CHACHA20,
42 SAFEXCEL_SM4,
43};
44
45struct safexcel_cipher_ctx {
46 struct safexcel_context base;
47 struct safexcel_crypto_priv *priv;
48
49 u32 mode;
50 enum safexcel_cipher_alg alg;
51 u8 aead; /* !=0=AEAD, 2=IPSec ESP AEAD, 3=IPsec ESP GMAC */
52 u8 xcm; /* 0=authenc, 1=GCM, 2 reserved for CCM */
53 u8 aadskip;
54 u8 blocksz;
55 u32 ivmask;
56 u32 ctrinit;
57
58 __le32 key[16];
59 u32 nonce;
60 unsigned int key_len, xts;
61
62 /* All the below is AEAD specific */
63 u32 hash_alg;
64 u32 state_sz;
65
66 struct crypto_cipher *hkaes;
67 struct crypto_aead *fback;
68};
69
70struct safexcel_cipher_req {
71 enum safexcel_cipher_direction direction;
72 /* Number of result descriptors associated to the request */
73 unsigned int rdescs;
74 bool needs_inv;
75 int nr_src, nr_dst;
76};
77
78static int safexcel_skcipher_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
79 struct safexcel_command_desc *cdesc)
80{
81 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
82 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
83 /* 32 bit nonce */
84 cdesc->control_data.token[0] = ctx->nonce;
85 /* 64 bit IV part */
86 memcpy(&cdesc->control_data.token[1], iv, 8);
87 /* 32 bit counter, start at 0 or 1 (big endian!) */
88 cdesc->control_data.token[3] =
89 (__force u32)cpu_to_be32(ctx->ctrinit);
90 return 4;
91 }
92 if (ctx->alg == SAFEXCEL_CHACHA20) {
93 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
94 /* 96 bit nonce part */
95 memcpy(&cdesc->control_data.token[0], &iv[4], 12);
96 /* 32 bit counter */
97 cdesc->control_data.token[3] = *(u32 *)iv;
98 return 4;
99 }
100
101 cdesc->control_data.options |= ctx->ivmask;
102 memcpy(cdesc->control_data.token, iv, ctx->blocksz);
103 return ctx->blocksz / sizeof(u32);
104}
105
106static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
107 struct safexcel_command_desc *cdesc,
108 struct safexcel_token *atoken,
109 u32 length)
110{
111 struct safexcel_token *token;
112 int ivlen;
113
114 ivlen = safexcel_skcipher_iv(ctx, iv, cdesc);
115 if (ivlen == 4) {
116 /* No space in cdesc, instruction moves to atoken */
117 cdesc->additional_cdata_size = 1;
118 token = atoken;
119 } else {
120 /* Everything fits in cdesc */
121 token = (struct safexcel_token *)(cdesc->control_data.token + 2);
122 /* Need to pad with NOP */
123 eip197_noop_token(&token[1]);
124 }
125
126 token->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
127 token->packet_length = length;
128 token->stat = EIP197_TOKEN_STAT_LAST_PACKET |
129 EIP197_TOKEN_STAT_LAST_HASH;
130 token->instructions = EIP197_TOKEN_INS_LAST |
131 EIP197_TOKEN_INS_TYPE_CRYPTO |
132 EIP197_TOKEN_INS_TYPE_OUTPUT;
133}
134
135static void safexcel_aead_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
136 struct safexcel_command_desc *cdesc)
137{
138 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD ||
139 ctx->aead & EIP197_AEAD_TYPE_IPSEC_ESP) { /* _ESP and _ESP_GMAC */
140 /* 32 bit nonce */
141 cdesc->control_data.token[0] = ctx->nonce;
142 /* 64 bit IV part */
143 memcpy(&cdesc->control_data.token[1], iv, 8);
144 /* 32 bit counter, start at 0 or 1 (big endian!) */
145 cdesc->control_data.token[3] =
146 (__force u32)cpu_to_be32(ctx->ctrinit);
147 return;
148 }
149 if (ctx->xcm == EIP197_XCM_MODE_GCM || ctx->alg == SAFEXCEL_CHACHA20) {
150 /* 96 bit IV part */
151 memcpy(&cdesc->control_data.token[0], iv, 12);
152 /* 32 bit counter, start at 0 or 1 (big endian!) */
153 cdesc->control_data.token[3] =
154 (__force u32)cpu_to_be32(ctx->ctrinit);
155 return;
156 }
157 /* CBC */
158 memcpy(cdesc->control_data.token, iv, ctx->blocksz);
159}
160
161static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
162 struct safexcel_command_desc *cdesc,
163 struct safexcel_token *atoken,
164 enum safexcel_cipher_direction direction,
165 u32 cryptlen, u32 assoclen, u32 digestsize)
166{
167 struct safexcel_token *aadref;
168 int atoksize = 2; /* Start with minimum size */
169 int assocadj = assoclen - ctx->aadskip, aadalign;
170
171 /* Always 4 dwords of embedded IV for AEAD modes */
172 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
173
174 if (direction == SAFEXCEL_DECRYPT)
175 cryptlen -= digestsize;
176
177 if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM)) {
178 /* Construct IV block B0 for the CBC-MAC */
179 u8 *final_iv = (u8 *)cdesc->control_data.token;
180 u8 *cbcmaciv = (u8 *)&atoken[1];
181 __le32 *aadlen = (__le32 *)&atoken[5];
182
183 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
184 /* Length + nonce */
185 cdesc->control_data.token[0] = ctx->nonce;
186 /* Fixup flags byte */
187 *(__le32 *)cbcmaciv =
188 cpu_to_le32(ctx->nonce |
189 ((assocadj > 0) << 6) |
190 ((digestsize - 2) << 2));
191 /* 64 bit IV part */
192 memcpy(&cdesc->control_data.token[1], iv, 8);
193 memcpy(cbcmaciv + 4, iv, 8);
194 /* Start counter at 0 */
195 cdesc->control_data.token[3] = 0;
196 /* Message length */
197 *(__be32 *)(cbcmaciv + 12) = cpu_to_be32(cryptlen);
198 } else {
199 /* Variable length IV part */
200 memcpy(final_iv, iv, 15 - iv[0]);
201 memcpy(cbcmaciv, iv, 15 - iv[0]);
202 /* Start variable length counter at 0 */
203 memset(final_iv + 15 - iv[0], 0, iv[0] + 1);
204 memset(cbcmaciv + 15 - iv[0], 0, iv[0] - 1);
205 /* fixup flags byte */
206 cbcmaciv[0] |= ((assocadj > 0) << 6) |
207 ((digestsize - 2) << 2);
208 /* insert lower 2 bytes of message length */
209 cbcmaciv[14] = cryptlen >> 8;
210 cbcmaciv[15] = cryptlen & 255;
211 }
212
213 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
214 atoken->packet_length = AES_BLOCK_SIZE +
215 ((assocadj > 0) << 1);
216 atoken->stat = 0;
217 atoken->instructions = EIP197_TOKEN_INS_ORIGIN_TOKEN |
218 EIP197_TOKEN_INS_TYPE_HASH;
219
220 if (likely(assocadj)) {
221 *aadlen = cpu_to_le32((assocadj >> 8) |
222 (assocadj & 255) << 8);
223 atoken += 6;
224 atoksize += 7;
225 } else {
226 atoken += 5;
227 atoksize += 6;
228 }
229
230 /* Process AAD data */
231 aadref = atoken;
232 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
233 atoken->packet_length = assocadj;
234 atoken->stat = 0;
235 atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
236 atoken++;
237
238 /* For CCM only, align AAD data towards hash engine */
239 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
240 aadalign = (assocadj + 2) & 15;
241 atoken->packet_length = assocadj && aadalign ?
242 16 - aadalign :
243 0;
244 if (likely(cryptlen)) {
245 atoken->stat = 0;
246 atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
247 } else {
248 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
249 atoken->instructions = EIP197_TOKEN_INS_LAST |
250 EIP197_TOKEN_INS_TYPE_HASH;
251 }
252 } else {
253 safexcel_aead_iv(ctx, iv, cdesc);
254
255 /* Process AAD data */
256 aadref = atoken;
257 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
258 atoken->packet_length = assocadj;
259 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
260 atoken->instructions = EIP197_TOKEN_INS_LAST |
261 EIP197_TOKEN_INS_TYPE_HASH;
262 }
263 atoken++;
264
265 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
266 /* For ESP mode (and not GMAC), skip over the IV */
267 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
268 atoken->packet_length = EIP197_AEAD_IPSEC_IV_SIZE;
269 atoken->stat = 0;
270 atoken->instructions = 0;
271 atoken++;
272 atoksize++;
273 } else if (unlikely(ctx->alg == SAFEXCEL_CHACHA20 &&
274 direction == SAFEXCEL_DECRYPT)) {
275 /* Poly-chacha decryption needs a dummy NOP here ... */
276 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
277 atoken->packet_length = 16; /* According to Op Manual */
278 atoken->stat = 0;
279 atoken->instructions = 0;
280 atoken++;
281 atoksize++;
282 }
283
284 if (ctx->xcm) {
285 /* For GCM and CCM, obtain enc(Y0) */
286 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT_REMRES;
287 atoken->packet_length = 0;
288 atoken->stat = 0;
289 atoken->instructions = AES_BLOCK_SIZE;
290 atoken++;
291
292 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
293 atoken->packet_length = AES_BLOCK_SIZE;
294 atoken->stat = 0;
295 atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
296 EIP197_TOKEN_INS_TYPE_CRYPTO;
297 atoken++;
298 atoksize += 2;
299 }
300
301 if (likely(cryptlen || ctx->alg == SAFEXCEL_CHACHA20)) {
302 /* Fixup stat field for AAD direction instruction */
303 aadref->stat = 0;
304
305 /* Process crypto data */
306 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
307 atoken->packet_length = cryptlen;
308
309 if (unlikely(ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC)) {
310 /* Fixup instruction field for AAD dir instruction */
311 aadref->instructions = EIP197_TOKEN_INS_TYPE_HASH;
312
313 /* Do not send to crypt engine in case of GMAC */
314 atoken->instructions = EIP197_TOKEN_INS_LAST |
315 EIP197_TOKEN_INS_TYPE_HASH |
316 EIP197_TOKEN_INS_TYPE_OUTPUT;
317 } else {
318 atoken->instructions = EIP197_TOKEN_INS_LAST |
319 EIP197_TOKEN_INS_TYPE_CRYPTO |
320 EIP197_TOKEN_INS_TYPE_HASH |
321 EIP197_TOKEN_INS_TYPE_OUTPUT;
322 }
323
324 cryptlen &= 15;
325 if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM && cryptlen)) {
326 atoken->stat = 0;
327 /* For CCM only, pad crypto data to the hash engine */
328 atoken++;
329 atoksize++;
330 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
331 atoken->packet_length = 16 - cryptlen;
332 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
333 atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
334 } else {
335 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
336 }
337 atoken++;
338 atoksize++;
339 }
340
341 if (direction == SAFEXCEL_ENCRYPT) {
342 /* Append ICV */
343 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
344 atoken->packet_length = digestsize;
345 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
346 EIP197_TOKEN_STAT_LAST_PACKET;
347 atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
348 EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
349 } else {
350 /* Extract ICV */
351 atoken->opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
352 atoken->packet_length = digestsize;
353 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
354 EIP197_TOKEN_STAT_LAST_PACKET;
355 atoken->instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
356 atoken++;
357 atoksize++;
358
359 /* Verify ICV */
360 atoken->opcode = EIP197_TOKEN_OPCODE_VERIFY;
361 atoken->packet_length = digestsize |
362 EIP197_TOKEN_HASH_RESULT_VERIFY;
363 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
364 EIP197_TOKEN_STAT_LAST_PACKET;
365 atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
366 }
367
368 /* Fixup length of the token in the command descriptor */
369 cdesc->additional_cdata_size = atoksize;
370}
371
372static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
373 const u8 *key, unsigned int len)
374{
375 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
376 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
377 struct safexcel_crypto_priv *priv = ctx->base.priv;
378 struct crypto_aes_ctx aes;
379 int ret, i;
380
381 ret = aes_expandkey(&aes, key, len);
382 if (ret)
383 return ret;
384
385 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
386 for (i = 0; i < len / sizeof(u32); i++) {
387 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
388 ctx->base.needs_inv = true;
389 break;
390 }
391 }
392 }
393
394 for (i = 0; i < len / sizeof(u32); i++)
395 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
396
397 ctx->key_len = len;
398
399 memzero_explicit(&aes, sizeof(aes));
400 return 0;
401}
402
403static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
404 unsigned int len)
405{
406 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
407 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
408 struct safexcel_crypto_priv *priv = ctx->base.priv;
409 struct crypto_authenc_keys keys;
410 struct crypto_aes_ctx aes;
411 int err = -EINVAL, i;
412 const char *alg;
413
414 if (unlikely(crypto_authenc_extractkeys(&keys, key, len)))
415 goto badkey;
416
417 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
418 /* Must have at least space for the nonce here */
419 if (unlikely(keys.enckeylen < CTR_RFC3686_NONCE_SIZE))
420 goto badkey;
421 /* last 4 bytes of key are the nonce! */
422 ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen -
423 CTR_RFC3686_NONCE_SIZE);
424 /* exclude the nonce here */
425 keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
426 }
427
428 /* Encryption key */
429 switch (ctx->alg) {
430 case SAFEXCEL_DES:
431 err = verify_aead_des_key(ctfm, keys.enckey, keys.enckeylen);
432 if (unlikely(err))
433 goto badkey;
434 break;
435 case SAFEXCEL_3DES:
436 err = verify_aead_des3_key(ctfm, keys.enckey, keys.enckeylen);
437 if (unlikely(err))
438 goto badkey;
439 break;
440 case SAFEXCEL_AES:
441 err = aes_expandkey(&aes, keys.enckey, keys.enckeylen);
442 if (unlikely(err))
443 goto badkey;
444 break;
445 case SAFEXCEL_SM4:
446 if (unlikely(keys.enckeylen != SM4_KEY_SIZE))
447 goto badkey;
448 break;
449 default:
450 dev_err(priv->dev, "aead: unsupported cipher algorithm\n");
451 goto badkey;
452 }
453
454 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
455 for (i = 0; i < keys.enckeylen / sizeof(u32); i++) {
456 if (le32_to_cpu(ctx->key[i]) !=
457 ((u32 *)keys.enckey)[i]) {
458 ctx->base.needs_inv = true;
459 break;
460 }
461 }
462 }
463
464 /* Auth key */
465 switch (ctx->hash_alg) {
466 case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
467 alg = "safexcel-sha1";
468 break;
469 case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
470 alg = "safexcel-sha224";
471 break;
472 case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
473 alg = "safexcel-sha256";
474 break;
475 case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
476 alg = "safexcel-sha384";
477 break;
478 case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
479 alg = "safexcel-sha512";
480 break;
481 case CONTEXT_CONTROL_CRYPTO_ALG_SM3:
482 alg = "safexcel-sm3";
483 break;
484 default:
485 dev_err(priv->dev, "aead: unsupported hash algorithm\n");
486 goto badkey;
487 }
488
489 if (safexcel_hmac_setkey(&ctx->base, keys.authkey, keys.authkeylen,
490 alg, ctx->state_sz))
491 goto badkey;
492
493 /* Now copy the keys into the context */
494 for (i = 0; i < keys.enckeylen / sizeof(u32); i++)
495 ctx->key[i] = cpu_to_le32(((u32 *)keys.enckey)[i]);
496 ctx->key_len = keys.enckeylen;
497
498 memzero_explicit(&keys, sizeof(keys));
499 return 0;
500
501badkey:
502 memzero_explicit(&keys, sizeof(keys));
503 return err;
504}
505
506static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
507 struct crypto_async_request *async,
508 struct safexcel_cipher_req *sreq,
509 struct safexcel_command_desc *cdesc)
510{
511 struct safexcel_crypto_priv *priv = ctx->base.priv;
512 int ctrl_size = ctx->key_len / sizeof(u32);
513
514 cdesc->control_data.control1 = ctx->mode;
515
516 if (ctx->aead) {
517 /* Take in account the ipad+opad digests */
518 if (ctx->xcm) {
519 ctrl_size += ctx->state_sz / sizeof(u32);
520 cdesc->control_data.control0 =
521 CONTEXT_CONTROL_KEY_EN |
522 CONTEXT_CONTROL_DIGEST_XCM |
523 ctx->hash_alg |
524 CONTEXT_CONTROL_SIZE(ctrl_size);
525 } else if (ctx->alg == SAFEXCEL_CHACHA20) {
526 /* Chacha20-Poly1305 */
527 cdesc->control_data.control0 =
528 CONTEXT_CONTROL_KEY_EN |
529 CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20 |
530 (sreq->direction == SAFEXCEL_ENCRYPT ?
531 CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT :
532 CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN) |
533 ctx->hash_alg |
534 CONTEXT_CONTROL_SIZE(ctrl_size);
535 return 0;
536 } else {
537 ctrl_size += ctx->state_sz / sizeof(u32) * 2;
538 cdesc->control_data.control0 =
539 CONTEXT_CONTROL_KEY_EN |
540 CONTEXT_CONTROL_DIGEST_HMAC |
541 ctx->hash_alg |
542 CONTEXT_CONTROL_SIZE(ctrl_size);
543 }
544
545 if (sreq->direction == SAFEXCEL_ENCRYPT &&
546 (ctx->xcm == EIP197_XCM_MODE_CCM ||
547 ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC))
548 cdesc->control_data.control0 |=
549 CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT;
550 else if (sreq->direction == SAFEXCEL_ENCRYPT)
551 cdesc->control_data.control0 |=
552 CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
553 else if (ctx->xcm == EIP197_XCM_MODE_CCM)
554 cdesc->control_data.control0 |=
555 CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN;
556 else
557 cdesc->control_data.control0 |=
558 CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
559 } else {
560 if (sreq->direction == SAFEXCEL_ENCRYPT)
561 cdesc->control_data.control0 =
562 CONTEXT_CONTROL_TYPE_CRYPTO_OUT |
563 CONTEXT_CONTROL_KEY_EN |
564 CONTEXT_CONTROL_SIZE(ctrl_size);
565 else
566 cdesc->control_data.control0 =
567 CONTEXT_CONTROL_TYPE_CRYPTO_IN |
568 CONTEXT_CONTROL_KEY_EN |
569 CONTEXT_CONTROL_SIZE(ctrl_size);
570 }
571
572 if (ctx->alg == SAFEXCEL_DES) {
573 cdesc->control_data.control0 |=
574 CONTEXT_CONTROL_CRYPTO_ALG_DES;
575 } else if (ctx->alg == SAFEXCEL_3DES) {
576 cdesc->control_data.control0 |=
577 CONTEXT_CONTROL_CRYPTO_ALG_3DES;
578 } else if (ctx->alg == SAFEXCEL_AES) {
579 switch (ctx->key_len >> ctx->xts) {
580 case AES_KEYSIZE_128:
581 cdesc->control_data.control0 |=
582 CONTEXT_CONTROL_CRYPTO_ALG_AES128;
583 break;
584 case AES_KEYSIZE_192:
585 cdesc->control_data.control0 |=
586 CONTEXT_CONTROL_CRYPTO_ALG_AES192;
587 break;
588 case AES_KEYSIZE_256:
589 cdesc->control_data.control0 |=
590 CONTEXT_CONTROL_CRYPTO_ALG_AES256;
591 break;
592 default:
593 dev_err(priv->dev, "aes keysize not supported: %u\n",
594 ctx->key_len >> ctx->xts);
595 return -EINVAL;
596 }
597 } else if (ctx->alg == SAFEXCEL_CHACHA20) {
598 cdesc->control_data.control0 |=
599 CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20;
600 } else if (ctx->alg == SAFEXCEL_SM4) {
601 cdesc->control_data.control0 |=
602 CONTEXT_CONTROL_CRYPTO_ALG_SM4;
603 }
604
605 return 0;
606}
607
608static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
609 struct crypto_async_request *async,
610 struct scatterlist *src,
611 struct scatterlist *dst,
612 unsigned int cryptlen,
613 struct safexcel_cipher_req *sreq,
614 bool *should_complete, int *ret)
615{
616 struct skcipher_request *areq = skcipher_request_cast(async);
617 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
618 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(skcipher);
619 struct safexcel_result_desc *rdesc;
620 int ndesc = 0;
621
622 *ret = 0;
623
624 if (unlikely(!sreq->rdescs))
625 return 0;
626
627 while (sreq->rdescs--) {
628 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
629 if (IS_ERR(rdesc)) {
630 dev_err(priv->dev,
631 "cipher: result: could not retrieve the result descriptor\n");
632 *ret = PTR_ERR(rdesc);
633 break;
634 }
635
636 if (likely(!*ret))
637 *ret = safexcel_rdesc_check_errors(priv, rdesc);
638
639 ndesc++;
640 }
641
642 safexcel_complete(priv, ring);
643
644 if (src == dst) {
645 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
646 } else {
647 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
648 dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
649 }
650
651 /*
652 * Update IV in req from last crypto output word for CBC modes
653 */
654 if ((!ctx->aead) && (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
655 (sreq->direction == SAFEXCEL_ENCRYPT)) {
656 /* For encrypt take the last output word */
657 sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv,
658 crypto_skcipher_ivsize(skcipher),
659 (cryptlen -
660 crypto_skcipher_ivsize(skcipher)));
661 }
662
663 *should_complete = true;
664
665 return ndesc;
666}
667
668static int safexcel_send_req(struct crypto_async_request *base, int ring,
669 struct safexcel_cipher_req *sreq,
670 struct scatterlist *src, struct scatterlist *dst,
671 unsigned int cryptlen, unsigned int assoclen,
672 unsigned int digestsize, u8 *iv, int *commands,
673 int *results)
674{
675 struct skcipher_request *areq = skcipher_request_cast(base);
676 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
677 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
678 struct safexcel_crypto_priv *priv = ctx->base.priv;
679 struct safexcel_command_desc *cdesc;
680 struct safexcel_command_desc *first_cdesc = NULL;
681 struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
682 struct scatterlist *sg;
683 unsigned int totlen;
684 unsigned int totlen_src = cryptlen + assoclen;
685 unsigned int totlen_dst = totlen_src;
686 struct safexcel_token *atoken;
687 int n_cdesc = 0, n_rdesc = 0;
688 int queued, i, ret = 0;
689 bool first = true;
690
691 sreq->nr_src = sg_nents_for_len(src, totlen_src);
692
693 if (ctx->aead) {
694 /*
695 * AEAD has auth tag appended to output for encrypt and
696 * removed from the output for decrypt!
697 */
698 if (sreq->direction == SAFEXCEL_DECRYPT)
699 totlen_dst -= digestsize;
700 else
701 totlen_dst += digestsize;
702
703 memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
704 &ctx->base.ipad, ctx->state_sz);
705 if (!ctx->xcm)
706 memcpy(ctx->base.ctxr->data + (ctx->key_len +
707 ctx->state_sz) / sizeof(u32), &ctx->base.opad,
708 ctx->state_sz);
709 } else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
710 (sreq->direction == SAFEXCEL_DECRYPT)) {
711 /*
712 * Save IV from last crypto input word for CBC modes in decrypt
713 * direction. Need to do this first in case of inplace operation
714 * as it will be overwritten.
715 */
716 sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv,
717 crypto_skcipher_ivsize(skcipher),
718 (totlen_src -
719 crypto_skcipher_ivsize(skcipher)));
720 }
721
722 sreq->nr_dst = sg_nents_for_len(dst, totlen_dst);
723
724 /*
725 * Remember actual input length, source buffer length may be
726 * updated in case of inline operation below.
727 */
728 totlen = totlen_src;
729 queued = totlen_src;
730
731 if (src == dst) {
732 sreq->nr_src = max(sreq->nr_src, sreq->nr_dst);
733 sreq->nr_dst = sreq->nr_src;
734 if (unlikely((totlen_src || totlen_dst) &&
735 (sreq->nr_src <= 0))) {
736 dev_err(priv->dev, "In-place buffer not large enough (need %d bytes)!",
737 max(totlen_src, totlen_dst));
738 return -EINVAL;
739 }
740 dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
741 } else {
742 if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
743 dev_err(priv->dev, "Source buffer not large enough (need %d bytes)!",
744 totlen_src);
745 return -EINVAL;
746 }
747 dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
748
749 if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
750 dev_err(priv->dev, "Dest buffer not large enough (need %d bytes)!",
751 totlen_dst);
752 dma_unmap_sg(priv->dev, src, sreq->nr_src,
753 DMA_TO_DEVICE);
754 return -EINVAL;
755 }
756 dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
757 }
758
759 memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
760
761 if (!totlen) {
762 /*
763 * The EIP97 cannot deal with zero length input packets!
764 * So stuff a dummy command descriptor indicating a 1 byte
765 * (dummy) input packet, using the context record as source.
766 */
767 first_cdesc = safexcel_add_cdesc(priv, ring,
768 1, 1, ctx->base.ctxr_dma,
769 1, 1, ctx->base.ctxr_dma,
770 &atoken);
771 if (IS_ERR(first_cdesc)) {
772 /* No space left in the command descriptor ring */
773 ret = PTR_ERR(first_cdesc);
774 goto cdesc_rollback;
775 }
776 n_cdesc = 1;
777 goto skip_cdesc;
778 }
779
780 /* command descriptors */
781 for_each_sg(src, sg, sreq->nr_src, i) {
782 int len = sg_dma_len(sg);
783
784 /* Do not overflow the request */
785 if (queued < len)
786 len = queued;
787
788 cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
789 !(queued - len),
790 sg_dma_address(sg), len, totlen,
791 ctx->base.ctxr_dma, &atoken);
792 if (IS_ERR(cdesc)) {
793 /* No space left in the command descriptor ring */
794 ret = PTR_ERR(cdesc);
795 goto cdesc_rollback;
796 }
797
798 if (!n_cdesc)
799 first_cdesc = cdesc;
800
801 n_cdesc++;
802 queued -= len;
803 if (!queued)
804 break;
805 }
806skip_cdesc:
807 /* Add context control words and token to first command descriptor */
808 safexcel_context_control(ctx, base, sreq, first_cdesc);
809 if (ctx->aead)
810 safexcel_aead_token(ctx, iv, first_cdesc, atoken,
811 sreq->direction, cryptlen,
812 assoclen, digestsize);
813 else
814 safexcel_skcipher_token(ctx, iv, first_cdesc, atoken,
815 cryptlen);
816
817 /* result descriptors */
818 for_each_sg(dst, sg, sreq->nr_dst, i) {
819 bool last = (i == sreq->nr_dst - 1);
820 u32 len = sg_dma_len(sg);
821
822 /* only allow the part of the buffer we know we need */
823 if (len > totlen_dst)
824 len = totlen_dst;
825 if (unlikely(!len))
826 break;
827 totlen_dst -= len;
828
829 /* skip over AAD space in buffer - not written */
830 if (assoclen) {
831 if (assoclen >= len) {
832 assoclen -= len;
833 continue;
834 }
835 rdesc = safexcel_add_rdesc(priv, ring, first, last,
836 sg_dma_address(sg) +
837 assoclen,
838 len - assoclen);
839 assoclen = 0;
840 } else {
841 rdesc = safexcel_add_rdesc(priv, ring, first, last,
842 sg_dma_address(sg),
843 len);
844 }
845 if (IS_ERR(rdesc)) {
846 /* No space left in the result descriptor ring */
847 ret = PTR_ERR(rdesc);
848 goto rdesc_rollback;
849 }
850 if (first) {
851 first_rdesc = rdesc;
852 first = false;
853 }
854 n_rdesc++;
855 }
856
857 if (unlikely(first)) {
858 /*
859 * Special case: AEAD decrypt with only AAD data.
860 * In this case there is NO output data from the engine,
861 * but the engine still needs a result descriptor!
862 * Create a dummy one just for catching the result token.
863 */
864 rdesc = safexcel_add_rdesc(priv, ring, true, true, 0, 0);
865 if (IS_ERR(rdesc)) {
866 /* No space left in the result descriptor ring */
867 ret = PTR_ERR(rdesc);
868 goto rdesc_rollback;
869 }
870 first_rdesc = rdesc;
871 n_rdesc = 1;
872 }
873
874 safexcel_rdr_req_set(priv, ring, first_rdesc, base);
875
876 *commands = n_cdesc;
877 *results = n_rdesc;
878 return 0;
879
880rdesc_rollback:
881 for (i = 0; i < n_rdesc; i++)
882 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
883cdesc_rollback:
884 for (i = 0; i < n_cdesc; i++)
885 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
886
887 if (src == dst) {
888 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
889 } else {
890 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
891 dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
892 }
893
894 return ret;
895}
896
897static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
898 int ring,
899 struct crypto_async_request *base,
900 struct safexcel_cipher_req *sreq,
901 bool *should_complete, int *ret)
902{
903 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
904 struct safexcel_result_desc *rdesc;
905 int ndesc = 0, enq_ret;
906
907 *ret = 0;
908
909 if (unlikely(!sreq->rdescs))
910 return 0;
911
912 while (sreq->rdescs--) {
913 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
914 if (IS_ERR(rdesc)) {
915 dev_err(priv->dev,
916 "cipher: invalidate: could not retrieve the result descriptor\n");
917 *ret = PTR_ERR(rdesc);
918 break;
919 }
920
921 if (likely(!*ret))
922 *ret = safexcel_rdesc_check_errors(priv, rdesc);
923
924 ndesc++;
925 }
926
927 safexcel_complete(priv, ring);
928
929 if (ctx->base.exit_inv) {
930 dma_pool_free(priv->context_pool, ctx->base.ctxr,
931 ctx->base.ctxr_dma);
932
933 *should_complete = true;
934
935 return ndesc;
936 }
937
938 ring = safexcel_select_ring(priv);
939 ctx->base.ring = ring;
940
941 spin_lock_bh(&priv->ring[ring].queue_lock);
942 enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
943 spin_unlock_bh(&priv->ring[ring].queue_lock);
944
945 if (enq_ret != -EINPROGRESS)
946 *ret = enq_ret;
947
948 queue_work(priv->ring[ring].workqueue,
949 &priv->ring[ring].work_data.work);
950
951 *should_complete = false;
952
953 return ndesc;
954}
955
956static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
957 int ring,
958 struct crypto_async_request *async,
959 bool *should_complete, int *ret)
960{
961 struct skcipher_request *req = skcipher_request_cast(async);
962 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
963 int err;
964
965 if (sreq->needs_inv) {
966 sreq->needs_inv = false;
967 err = safexcel_handle_inv_result(priv, ring, async, sreq,
968 should_complete, ret);
969 } else {
970 err = safexcel_handle_req_result(priv, ring, async, req->src,
971 req->dst, req->cryptlen, sreq,
972 should_complete, ret);
973 }
974
975 return err;
976}
977
978static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
979 int ring,
980 struct crypto_async_request *async,
981 bool *should_complete, int *ret)
982{
983 struct aead_request *req = aead_request_cast(async);
984 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
985 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
986 int err;
987
988 if (sreq->needs_inv) {
989 sreq->needs_inv = false;
990 err = safexcel_handle_inv_result(priv, ring, async, sreq,
991 should_complete, ret);
992 } else {
993 err = safexcel_handle_req_result(priv, ring, async, req->src,
994 req->dst,
995 req->cryptlen + crypto_aead_authsize(tfm),
996 sreq, should_complete, ret);
997 }
998
999 return err;
1000}
1001
1002static int safexcel_cipher_send_inv(struct crypto_async_request *base,
1003 int ring, int *commands, int *results)
1004{
1005 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1006 struct safexcel_crypto_priv *priv = ctx->base.priv;
1007 int ret;
1008
1009 ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
1010 if (unlikely(ret))
1011 return ret;
1012
1013 *commands = 1;
1014 *results = 1;
1015
1016 return 0;
1017}
1018
1019static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
1020 int *commands, int *results)
1021{
1022 struct skcipher_request *req = skcipher_request_cast(async);
1023 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1024 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1025 struct safexcel_crypto_priv *priv = ctx->base.priv;
1026 int ret;
1027
1028 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1029
1030 if (sreq->needs_inv) {
1031 ret = safexcel_cipher_send_inv(async, ring, commands, results);
1032 } else {
1033 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1034 u8 input_iv[AES_BLOCK_SIZE];
1035
1036 /*
1037 * Save input IV in case of CBC decrypt mode
1038 * Will be overwritten with output IV prior to use!
1039 */
1040 memcpy(input_iv, req->iv, crypto_skcipher_ivsize(skcipher));
1041
1042 ret = safexcel_send_req(async, ring, sreq, req->src,
1043 req->dst, req->cryptlen, 0, 0, input_iv,
1044 commands, results);
1045 }
1046
1047 sreq->rdescs = *results;
1048 return ret;
1049}
1050
1051static int safexcel_aead_send(struct crypto_async_request *async, int ring,
1052 int *commands, int *results)
1053{
1054 struct aead_request *req = aead_request_cast(async);
1055 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1056 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1057 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1058 struct safexcel_crypto_priv *priv = ctx->base.priv;
1059 int ret;
1060
1061 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1062
1063 if (sreq->needs_inv)
1064 ret = safexcel_cipher_send_inv(async, ring, commands, results);
1065 else
1066 ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
1067 req->cryptlen, req->assoclen,
1068 crypto_aead_authsize(tfm), req->iv,
1069 commands, results);
1070 sreq->rdescs = *results;
1071 return ret;
1072}
1073
1074static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
1075 struct crypto_async_request *base,
1076 struct safexcel_cipher_req *sreq,
1077 struct safexcel_inv_result *result)
1078{
1079 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1080 struct safexcel_crypto_priv *priv = ctx->base.priv;
1081 int ring = ctx->base.ring;
1082
1083 init_completion(&result->completion);
1084
1085 ctx = crypto_tfm_ctx(base->tfm);
1086 ctx->base.exit_inv = true;
1087 sreq->needs_inv = true;
1088
1089 spin_lock_bh(&priv->ring[ring].queue_lock);
1090 crypto_enqueue_request(&priv->ring[ring].queue, base);
1091 spin_unlock_bh(&priv->ring[ring].queue_lock);
1092
1093 queue_work(priv->ring[ring].workqueue,
1094 &priv->ring[ring].work_data.work);
1095
1096 wait_for_completion(&result->completion);
1097
1098 if (result->error) {
1099 dev_warn(priv->dev,
1100 "cipher: sync: invalidate: completion error %d\n",
1101 result->error);
1102 return result->error;
1103 }
1104
1105 return 0;
1106}
1107
1108static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
1109{
1110 EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
1111 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1112 struct safexcel_inv_result result = {};
1113
1114 memset(req, 0, sizeof(struct skcipher_request));
1115
1116 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1117 safexcel_inv_complete, &result);
1118 skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
1119
1120 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1121}
1122
1123static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
1124{
1125 EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
1126 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1127 struct safexcel_inv_result result = {};
1128
1129 memset(req, 0, sizeof(struct aead_request));
1130
1131 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1132 safexcel_inv_complete, &result);
1133 aead_request_set_tfm(req, __crypto_aead_cast(tfm));
1134
1135 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1136}
1137
1138static int safexcel_queue_req(struct crypto_async_request *base,
1139 struct safexcel_cipher_req *sreq,
1140 enum safexcel_cipher_direction dir)
1141{
1142 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1143 struct safexcel_crypto_priv *priv = ctx->base.priv;
1144 int ret, ring;
1145
1146 sreq->needs_inv = false;
1147 sreq->direction = dir;
1148
1149 if (ctx->base.ctxr) {
1150 if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
1151 sreq->needs_inv = true;
1152 ctx->base.needs_inv = false;
1153 }
1154 } else {
1155 ctx->base.ring = safexcel_select_ring(priv);
1156 ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
1157 EIP197_GFP_FLAGS(*base),
1158 &ctx->base.ctxr_dma);
1159 if (!ctx->base.ctxr)
1160 return -ENOMEM;
1161 }
1162
1163 ring = ctx->base.ring;
1164
1165 spin_lock_bh(&priv->ring[ring].queue_lock);
1166 ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
1167 spin_unlock_bh(&priv->ring[ring].queue_lock);
1168
1169 queue_work(priv->ring[ring].workqueue,
1170 &priv->ring[ring].work_data.work);
1171
1172 return ret;
1173}
1174
1175static int safexcel_encrypt(struct skcipher_request *req)
1176{
1177 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1178 SAFEXCEL_ENCRYPT);
1179}
1180
1181static int safexcel_decrypt(struct skcipher_request *req)
1182{
1183 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1184 SAFEXCEL_DECRYPT);
1185}
1186
1187static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
1188{
1189 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1190 struct safexcel_alg_template *tmpl =
1191 container_of(tfm->__crt_alg, struct safexcel_alg_template,
1192 alg.skcipher.base);
1193
1194 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
1195 sizeof(struct safexcel_cipher_req));
1196
1197 ctx->base.priv = tmpl->priv;
1198
1199 ctx->base.send = safexcel_skcipher_send;
1200 ctx->base.handle_result = safexcel_skcipher_handle_result;
1201 ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1202 ctx->ctrinit = 1;
1203 return 0;
1204}
1205
1206static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
1207{
1208 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1209
1210 memzero_explicit(ctx->key, sizeof(ctx->key));
1211
1212 /* context not allocated, skip invalidation */
1213 if (!ctx->base.ctxr)
1214 return -ENOMEM;
1215
1216 memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
1217 return 0;
1218}
1219
1220static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
1221{
1222 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1223 struct safexcel_crypto_priv *priv = ctx->base.priv;
1224 int ret;
1225
1226 if (safexcel_cipher_cra_exit(tfm))
1227 return;
1228
1229 if (priv->flags & EIP197_TRC_CACHE) {
1230 ret = safexcel_skcipher_exit_inv(tfm);
1231 if (ret)
1232 dev_warn(priv->dev, "skcipher: invalidation error %d\n",
1233 ret);
1234 } else {
1235 dma_pool_free(priv->context_pool, ctx->base.ctxr,
1236 ctx->base.ctxr_dma);
1237 }
1238}
1239
1240static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
1241{
1242 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1243 struct safexcel_crypto_priv *priv = ctx->base.priv;
1244 int ret;
1245
1246 if (safexcel_cipher_cra_exit(tfm))
1247 return;
1248
1249 if (priv->flags & EIP197_TRC_CACHE) {
1250 ret = safexcel_aead_exit_inv(tfm);
1251 if (ret)
1252 dev_warn(priv->dev, "aead: invalidation error %d\n",
1253 ret);
1254 } else {
1255 dma_pool_free(priv->context_pool, ctx->base.ctxr,
1256 ctx->base.ctxr_dma);
1257 }
1258}
1259
1260static int safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm *tfm)
1261{
1262 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1263
1264 safexcel_skcipher_cra_init(tfm);
1265 ctx->alg = SAFEXCEL_AES;
1266 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1267 ctx->blocksz = 0;
1268 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1269 return 0;
1270}
1271
1272struct safexcel_alg_template safexcel_alg_ecb_aes = {
1273 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1274 .algo_mask = SAFEXCEL_ALG_AES,
1275 .alg.skcipher = {
1276 .setkey = safexcel_skcipher_aes_setkey,
1277 .encrypt = safexcel_encrypt,
1278 .decrypt = safexcel_decrypt,
1279 .min_keysize = AES_MIN_KEY_SIZE,
1280 .max_keysize = AES_MAX_KEY_SIZE,
1281 .base = {
1282 .cra_name = "ecb(aes)",
1283 .cra_driver_name = "safexcel-ecb-aes",
1284 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1285 .cra_flags = CRYPTO_ALG_ASYNC |
1286 CRYPTO_ALG_ALLOCATES_MEMORY |
1287 CRYPTO_ALG_KERN_DRIVER_ONLY,
1288 .cra_blocksize = AES_BLOCK_SIZE,
1289 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1290 .cra_alignmask = 0,
1291 .cra_init = safexcel_skcipher_aes_ecb_cra_init,
1292 .cra_exit = safexcel_skcipher_cra_exit,
1293 .cra_module = THIS_MODULE,
1294 },
1295 },
1296};
1297
1298static int safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm *tfm)
1299{
1300 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1301
1302 safexcel_skcipher_cra_init(tfm);
1303 ctx->alg = SAFEXCEL_AES;
1304 ctx->blocksz = AES_BLOCK_SIZE;
1305 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1306 return 0;
1307}
1308
1309struct safexcel_alg_template safexcel_alg_cbc_aes = {
1310 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1311 .algo_mask = SAFEXCEL_ALG_AES,
1312 .alg.skcipher = {
1313 .setkey = safexcel_skcipher_aes_setkey,
1314 .encrypt = safexcel_encrypt,
1315 .decrypt = safexcel_decrypt,
1316 .min_keysize = AES_MIN_KEY_SIZE,
1317 .max_keysize = AES_MAX_KEY_SIZE,
1318 .ivsize = AES_BLOCK_SIZE,
1319 .base = {
1320 .cra_name = "cbc(aes)",
1321 .cra_driver_name = "safexcel-cbc-aes",
1322 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1323 .cra_flags = CRYPTO_ALG_ASYNC |
1324 CRYPTO_ALG_ALLOCATES_MEMORY |
1325 CRYPTO_ALG_KERN_DRIVER_ONLY,
1326 .cra_blocksize = AES_BLOCK_SIZE,
1327 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1328 .cra_alignmask = 0,
1329 .cra_init = safexcel_skcipher_aes_cbc_cra_init,
1330 .cra_exit = safexcel_skcipher_cra_exit,
1331 .cra_module = THIS_MODULE,
1332 },
1333 },
1334};
1335
1336static int safexcel_skcipher_aes_cfb_cra_init(struct crypto_tfm *tfm)
1337{
1338 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1339
1340 safexcel_skcipher_cra_init(tfm);
1341 ctx->alg = SAFEXCEL_AES;
1342 ctx->blocksz = AES_BLOCK_SIZE;
1343 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
1344 return 0;
1345}
1346
1347struct safexcel_alg_template safexcel_alg_cfb_aes = {
1348 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1349 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1350 .alg.skcipher = {
1351 .setkey = safexcel_skcipher_aes_setkey,
1352 .encrypt = safexcel_encrypt,
1353 .decrypt = safexcel_decrypt,
1354 .min_keysize = AES_MIN_KEY_SIZE,
1355 .max_keysize = AES_MAX_KEY_SIZE,
1356 .ivsize = AES_BLOCK_SIZE,
1357 .base = {
1358 .cra_name = "cfb(aes)",
1359 .cra_driver_name = "safexcel-cfb-aes",
1360 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1361 .cra_flags = CRYPTO_ALG_ASYNC |
1362 CRYPTO_ALG_ALLOCATES_MEMORY |
1363 CRYPTO_ALG_KERN_DRIVER_ONLY,
1364 .cra_blocksize = 1,
1365 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1366 .cra_alignmask = 0,
1367 .cra_init = safexcel_skcipher_aes_cfb_cra_init,
1368 .cra_exit = safexcel_skcipher_cra_exit,
1369 .cra_module = THIS_MODULE,
1370 },
1371 },
1372};
1373
1374static int safexcel_skcipher_aes_ofb_cra_init(struct crypto_tfm *tfm)
1375{
1376 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1377
1378 safexcel_skcipher_cra_init(tfm);
1379 ctx->alg = SAFEXCEL_AES;
1380 ctx->blocksz = AES_BLOCK_SIZE;
1381 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
1382 return 0;
1383}
1384
1385struct safexcel_alg_template safexcel_alg_ofb_aes = {
1386 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1387 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1388 .alg.skcipher = {
1389 .setkey = safexcel_skcipher_aes_setkey,
1390 .encrypt = safexcel_encrypt,
1391 .decrypt = safexcel_decrypt,
1392 .min_keysize = AES_MIN_KEY_SIZE,
1393 .max_keysize = AES_MAX_KEY_SIZE,
1394 .ivsize = AES_BLOCK_SIZE,
1395 .base = {
1396 .cra_name = "ofb(aes)",
1397 .cra_driver_name = "safexcel-ofb-aes",
1398 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1399 .cra_flags = CRYPTO_ALG_ASYNC |
1400 CRYPTO_ALG_ALLOCATES_MEMORY |
1401 CRYPTO_ALG_KERN_DRIVER_ONLY,
1402 .cra_blocksize = 1,
1403 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1404 .cra_alignmask = 0,
1405 .cra_init = safexcel_skcipher_aes_ofb_cra_init,
1406 .cra_exit = safexcel_skcipher_cra_exit,
1407 .cra_module = THIS_MODULE,
1408 },
1409 },
1410};
1411
1412static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
1413 const u8 *key, unsigned int len)
1414{
1415 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
1416 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1417 struct safexcel_crypto_priv *priv = ctx->base.priv;
1418 struct crypto_aes_ctx aes;
1419 int ret, i;
1420 unsigned int keylen;
1421
1422 /* last 4 bytes of key are the nonce! */
1423 ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
1424 /* exclude the nonce here */
1425 keylen = len - CTR_RFC3686_NONCE_SIZE;
1426 ret = aes_expandkey(&aes, key, keylen);
1427 if (ret)
1428 return ret;
1429
1430 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1431 for (i = 0; i < keylen / sizeof(u32); i++) {
1432 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
1433 ctx->base.needs_inv = true;
1434 break;
1435 }
1436 }
1437 }
1438
1439 for (i = 0; i < keylen / sizeof(u32); i++)
1440 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
1441
1442 ctx->key_len = keylen;
1443
1444 memzero_explicit(&aes, sizeof(aes));
1445 return 0;
1446}
1447
1448static int safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm *tfm)
1449{
1450 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1451
1452 safexcel_skcipher_cra_init(tfm);
1453 ctx->alg = SAFEXCEL_AES;
1454 ctx->blocksz = AES_BLOCK_SIZE;
1455 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
1456 return 0;
1457}
1458
1459struct safexcel_alg_template safexcel_alg_ctr_aes = {
1460 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1461 .algo_mask = SAFEXCEL_ALG_AES,
1462 .alg.skcipher = {
1463 .setkey = safexcel_skcipher_aesctr_setkey,
1464 .encrypt = safexcel_encrypt,
1465 .decrypt = safexcel_decrypt,
1466 /* Add nonce size */
1467 .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1468 .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1469 .ivsize = CTR_RFC3686_IV_SIZE,
1470 .base = {
1471 .cra_name = "rfc3686(ctr(aes))",
1472 .cra_driver_name = "safexcel-ctr-aes",
1473 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1474 .cra_flags = CRYPTO_ALG_ASYNC |
1475 CRYPTO_ALG_ALLOCATES_MEMORY |
1476 CRYPTO_ALG_KERN_DRIVER_ONLY,
1477 .cra_blocksize = 1,
1478 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1479 .cra_alignmask = 0,
1480 .cra_init = safexcel_skcipher_aes_ctr_cra_init,
1481 .cra_exit = safexcel_skcipher_cra_exit,
1482 .cra_module = THIS_MODULE,
1483 },
1484 },
1485};
1486
1487static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
1488 unsigned int len)
1489{
1490 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1491 struct safexcel_crypto_priv *priv = ctx->base.priv;
1492 int ret;
1493
1494 ret = verify_skcipher_des_key(ctfm, key);
1495 if (ret)
1496 return ret;
1497
1498 /* if context exits and key changed, need to invalidate it */
1499 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1500 if (memcmp(ctx->key, key, len))
1501 ctx->base.needs_inv = true;
1502
1503 memcpy(ctx->key, key, len);
1504 ctx->key_len = len;
1505
1506 return 0;
1507}
1508
1509static int safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm *tfm)
1510{
1511 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1512
1513 safexcel_skcipher_cra_init(tfm);
1514 ctx->alg = SAFEXCEL_DES;
1515 ctx->blocksz = DES_BLOCK_SIZE;
1516 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1517 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1518 return 0;
1519}
1520
1521struct safexcel_alg_template safexcel_alg_cbc_des = {
1522 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1523 .algo_mask = SAFEXCEL_ALG_DES,
1524 .alg.skcipher = {
1525 .setkey = safexcel_des_setkey,
1526 .encrypt = safexcel_encrypt,
1527 .decrypt = safexcel_decrypt,
1528 .min_keysize = DES_KEY_SIZE,
1529 .max_keysize = DES_KEY_SIZE,
1530 .ivsize = DES_BLOCK_SIZE,
1531 .base = {
1532 .cra_name = "cbc(des)",
1533 .cra_driver_name = "safexcel-cbc-des",
1534 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1535 .cra_flags = CRYPTO_ALG_ASYNC |
1536 CRYPTO_ALG_ALLOCATES_MEMORY |
1537 CRYPTO_ALG_KERN_DRIVER_ONLY,
1538 .cra_blocksize = DES_BLOCK_SIZE,
1539 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1540 .cra_alignmask = 0,
1541 .cra_init = safexcel_skcipher_des_cbc_cra_init,
1542 .cra_exit = safexcel_skcipher_cra_exit,
1543 .cra_module = THIS_MODULE,
1544 },
1545 },
1546};
1547
1548static int safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm *tfm)
1549{
1550 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1551
1552 safexcel_skcipher_cra_init(tfm);
1553 ctx->alg = SAFEXCEL_DES;
1554 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1555 ctx->blocksz = 0;
1556 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1557 return 0;
1558}
1559
1560struct safexcel_alg_template safexcel_alg_ecb_des = {
1561 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1562 .algo_mask = SAFEXCEL_ALG_DES,
1563 .alg.skcipher = {
1564 .setkey = safexcel_des_setkey,
1565 .encrypt = safexcel_encrypt,
1566 .decrypt = safexcel_decrypt,
1567 .min_keysize = DES_KEY_SIZE,
1568 .max_keysize = DES_KEY_SIZE,
1569 .base = {
1570 .cra_name = "ecb(des)",
1571 .cra_driver_name = "safexcel-ecb-des",
1572 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1573 .cra_flags = CRYPTO_ALG_ASYNC |
1574 CRYPTO_ALG_ALLOCATES_MEMORY |
1575 CRYPTO_ALG_KERN_DRIVER_ONLY,
1576 .cra_blocksize = DES_BLOCK_SIZE,
1577 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1578 .cra_alignmask = 0,
1579 .cra_init = safexcel_skcipher_des_ecb_cra_init,
1580 .cra_exit = safexcel_skcipher_cra_exit,
1581 .cra_module = THIS_MODULE,
1582 },
1583 },
1584};
1585
1586static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
1587 const u8 *key, unsigned int len)
1588{
1589 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1590 struct safexcel_crypto_priv *priv = ctx->base.priv;
1591 int err;
1592
1593 err = verify_skcipher_des3_key(ctfm, key);
1594 if (err)
1595 return err;
1596
1597 /* if context exits and key changed, need to invalidate it */
1598 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1599 if (memcmp(ctx->key, key, len))
1600 ctx->base.needs_inv = true;
1601
1602 memcpy(ctx->key, key, len);
1603 ctx->key_len = len;
1604
1605 return 0;
1606}
1607
1608static int safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm *tfm)
1609{
1610 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1611
1612 safexcel_skcipher_cra_init(tfm);
1613 ctx->alg = SAFEXCEL_3DES;
1614 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1615 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1616 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1617 return 0;
1618}
1619
1620struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
1621 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1622 .algo_mask = SAFEXCEL_ALG_DES,
1623 .alg.skcipher = {
1624 .setkey = safexcel_des3_ede_setkey,
1625 .encrypt = safexcel_encrypt,
1626 .decrypt = safexcel_decrypt,
1627 .min_keysize = DES3_EDE_KEY_SIZE,
1628 .max_keysize = DES3_EDE_KEY_SIZE,
1629 .ivsize = DES3_EDE_BLOCK_SIZE,
1630 .base = {
1631 .cra_name = "cbc(des3_ede)",
1632 .cra_driver_name = "safexcel-cbc-des3_ede",
1633 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1634 .cra_flags = CRYPTO_ALG_ASYNC |
1635 CRYPTO_ALG_ALLOCATES_MEMORY |
1636 CRYPTO_ALG_KERN_DRIVER_ONLY,
1637 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1638 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1639 .cra_alignmask = 0,
1640 .cra_init = safexcel_skcipher_des3_cbc_cra_init,
1641 .cra_exit = safexcel_skcipher_cra_exit,
1642 .cra_module = THIS_MODULE,
1643 },
1644 },
1645};
1646
1647static int safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm *tfm)
1648{
1649 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1650
1651 safexcel_skcipher_cra_init(tfm);
1652 ctx->alg = SAFEXCEL_3DES;
1653 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1654 ctx->blocksz = 0;
1655 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1656 return 0;
1657}
1658
1659struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
1660 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1661 .algo_mask = SAFEXCEL_ALG_DES,
1662 .alg.skcipher = {
1663 .setkey = safexcel_des3_ede_setkey,
1664 .encrypt = safexcel_encrypt,
1665 .decrypt = safexcel_decrypt,
1666 .min_keysize = DES3_EDE_KEY_SIZE,
1667 .max_keysize = DES3_EDE_KEY_SIZE,
1668 .base = {
1669 .cra_name = "ecb(des3_ede)",
1670 .cra_driver_name = "safexcel-ecb-des3_ede",
1671 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1672 .cra_flags = CRYPTO_ALG_ASYNC |
1673 CRYPTO_ALG_ALLOCATES_MEMORY |
1674 CRYPTO_ALG_KERN_DRIVER_ONLY,
1675 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1676 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1677 .cra_alignmask = 0,
1678 .cra_init = safexcel_skcipher_des3_ecb_cra_init,
1679 .cra_exit = safexcel_skcipher_cra_exit,
1680 .cra_module = THIS_MODULE,
1681 },
1682 },
1683};
1684
1685static int safexcel_aead_encrypt(struct aead_request *req)
1686{
1687 struct safexcel_cipher_req *creq = aead_request_ctx(req);
1688
1689 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
1690}
1691
1692static int safexcel_aead_decrypt(struct aead_request *req)
1693{
1694 struct safexcel_cipher_req *creq = aead_request_ctx(req);
1695
1696 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
1697}
1698
1699static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
1700{
1701 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1702 struct safexcel_alg_template *tmpl =
1703 container_of(tfm->__crt_alg, struct safexcel_alg_template,
1704 alg.aead.base);
1705
1706 crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
1707 sizeof(struct safexcel_cipher_req));
1708
1709 ctx->base.priv = tmpl->priv;
1710
1711 ctx->alg = SAFEXCEL_AES; /* default */
1712 ctx->blocksz = AES_BLOCK_SIZE;
1713 ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1714 ctx->ctrinit = 1;
1715 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC; /* default */
1716 ctx->aead = true;
1717 ctx->base.send = safexcel_aead_send;
1718 ctx->base.handle_result = safexcel_aead_handle_result;
1719 return 0;
1720}
1721
1722static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
1723{
1724 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1725
1726 safexcel_aead_cra_init(tfm);
1727 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1728 ctx->state_sz = SHA1_DIGEST_SIZE;
1729 return 0;
1730}
1731
1732struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
1733 .type = SAFEXCEL_ALG_TYPE_AEAD,
1734 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
1735 .alg.aead = {
1736 .setkey = safexcel_aead_setkey,
1737 .encrypt = safexcel_aead_encrypt,
1738 .decrypt = safexcel_aead_decrypt,
1739 .ivsize = AES_BLOCK_SIZE,
1740 .maxauthsize = SHA1_DIGEST_SIZE,
1741 .base = {
1742 .cra_name = "authenc(hmac(sha1),cbc(aes))",
1743 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
1744 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1745 .cra_flags = CRYPTO_ALG_ASYNC |
1746 CRYPTO_ALG_ALLOCATES_MEMORY |
1747 CRYPTO_ALG_KERN_DRIVER_ONLY,
1748 .cra_blocksize = AES_BLOCK_SIZE,
1749 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1750 .cra_alignmask = 0,
1751 .cra_init = safexcel_aead_sha1_cra_init,
1752 .cra_exit = safexcel_aead_cra_exit,
1753 .cra_module = THIS_MODULE,
1754 },
1755 },
1756};
1757
1758static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
1759{
1760 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1761
1762 safexcel_aead_cra_init(tfm);
1763 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1764 ctx->state_sz = SHA256_DIGEST_SIZE;
1765 return 0;
1766}
1767
1768struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
1769 .type = SAFEXCEL_ALG_TYPE_AEAD,
1770 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1771 .alg.aead = {
1772 .setkey = safexcel_aead_setkey,
1773 .encrypt = safexcel_aead_encrypt,
1774 .decrypt = safexcel_aead_decrypt,
1775 .ivsize = AES_BLOCK_SIZE,
1776 .maxauthsize = SHA256_DIGEST_SIZE,
1777 .base = {
1778 .cra_name = "authenc(hmac(sha256),cbc(aes))",
1779 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
1780 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1781 .cra_flags = CRYPTO_ALG_ASYNC |
1782 CRYPTO_ALG_ALLOCATES_MEMORY |
1783 CRYPTO_ALG_KERN_DRIVER_ONLY,
1784 .cra_blocksize = AES_BLOCK_SIZE,
1785 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1786 .cra_alignmask = 0,
1787 .cra_init = safexcel_aead_sha256_cra_init,
1788 .cra_exit = safexcel_aead_cra_exit,
1789 .cra_module = THIS_MODULE,
1790 },
1791 },
1792};
1793
1794static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
1795{
1796 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1797
1798 safexcel_aead_cra_init(tfm);
1799 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1800 ctx->state_sz = SHA256_DIGEST_SIZE;
1801 return 0;
1802}
1803
1804struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
1805 .type = SAFEXCEL_ALG_TYPE_AEAD,
1806 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1807 .alg.aead = {
1808 .setkey = safexcel_aead_setkey,
1809 .encrypt = safexcel_aead_encrypt,
1810 .decrypt = safexcel_aead_decrypt,
1811 .ivsize = AES_BLOCK_SIZE,
1812 .maxauthsize = SHA224_DIGEST_SIZE,
1813 .base = {
1814 .cra_name = "authenc(hmac(sha224),cbc(aes))",
1815 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
1816 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1817 .cra_flags = CRYPTO_ALG_ASYNC |
1818 CRYPTO_ALG_ALLOCATES_MEMORY |
1819 CRYPTO_ALG_KERN_DRIVER_ONLY,
1820 .cra_blocksize = AES_BLOCK_SIZE,
1821 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1822 .cra_alignmask = 0,
1823 .cra_init = safexcel_aead_sha224_cra_init,
1824 .cra_exit = safexcel_aead_cra_exit,
1825 .cra_module = THIS_MODULE,
1826 },
1827 },
1828};
1829
1830static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
1831{
1832 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1833
1834 safexcel_aead_cra_init(tfm);
1835 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1836 ctx->state_sz = SHA512_DIGEST_SIZE;
1837 return 0;
1838}
1839
1840struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
1841 .type = SAFEXCEL_ALG_TYPE_AEAD,
1842 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1843 .alg.aead = {
1844 .setkey = safexcel_aead_setkey,
1845 .encrypt = safexcel_aead_encrypt,
1846 .decrypt = safexcel_aead_decrypt,
1847 .ivsize = AES_BLOCK_SIZE,
1848 .maxauthsize = SHA512_DIGEST_SIZE,
1849 .base = {
1850 .cra_name = "authenc(hmac(sha512),cbc(aes))",
1851 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
1852 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1853 .cra_flags = CRYPTO_ALG_ASYNC |
1854 CRYPTO_ALG_ALLOCATES_MEMORY |
1855 CRYPTO_ALG_KERN_DRIVER_ONLY,
1856 .cra_blocksize = AES_BLOCK_SIZE,
1857 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1858 .cra_alignmask = 0,
1859 .cra_init = safexcel_aead_sha512_cra_init,
1860 .cra_exit = safexcel_aead_cra_exit,
1861 .cra_module = THIS_MODULE,
1862 },
1863 },
1864};
1865
1866static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
1867{
1868 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1869
1870 safexcel_aead_cra_init(tfm);
1871 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1872 ctx->state_sz = SHA512_DIGEST_SIZE;
1873 return 0;
1874}
1875
1876struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
1877 .type = SAFEXCEL_ALG_TYPE_AEAD,
1878 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1879 .alg.aead = {
1880 .setkey = safexcel_aead_setkey,
1881 .encrypt = safexcel_aead_encrypt,
1882 .decrypt = safexcel_aead_decrypt,
1883 .ivsize = AES_BLOCK_SIZE,
1884 .maxauthsize = SHA384_DIGEST_SIZE,
1885 .base = {
1886 .cra_name = "authenc(hmac(sha384),cbc(aes))",
1887 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
1888 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1889 .cra_flags = CRYPTO_ALG_ASYNC |
1890 CRYPTO_ALG_ALLOCATES_MEMORY |
1891 CRYPTO_ALG_KERN_DRIVER_ONLY,
1892 .cra_blocksize = AES_BLOCK_SIZE,
1893 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1894 .cra_alignmask = 0,
1895 .cra_init = safexcel_aead_sha384_cra_init,
1896 .cra_exit = safexcel_aead_cra_exit,
1897 .cra_module = THIS_MODULE,
1898 },
1899 },
1900};
1901
1902static int safexcel_aead_sha1_des3_cra_init(struct crypto_tfm *tfm)
1903{
1904 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1905
1906 safexcel_aead_sha1_cra_init(tfm);
1907 ctx->alg = SAFEXCEL_3DES; /* override default */
1908 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1909 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1910 return 0;
1911}
1912
1913struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede = {
1914 .type = SAFEXCEL_ALG_TYPE_AEAD,
1915 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
1916 .alg.aead = {
1917 .setkey = safexcel_aead_setkey,
1918 .encrypt = safexcel_aead_encrypt,
1919 .decrypt = safexcel_aead_decrypt,
1920 .ivsize = DES3_EDE_BLOCK_SIZE,
1921 .maxauthsize = SHA1_DIGEST_SIZE,
1922 .base = {
1923 .cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
1924 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des3_ede",
1925 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1926 .cra_flags = CRYPTO_ALG_ASYNC |
1927 CRYPTO_ALG_ALLOCATES_MEMORY |
1928 CRYPTO_ALG_KERN_DRIVER_ONLY,
1929 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1930 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1931 .cra_alignmask = 0,
1932 .cra_init = safexcel_aead_sha1_des3_cra_init,
1933 .cra_exit = safexcel_aead_cra_exit,
1934 .cra_module = THIS_MODULE,
1935 },
1936 },
1937};
1938
1939static int safexcel_aead_sha256_des3_cra_init(struct crypto_tfm *tfm)
1940{
1941 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1942
1943 safexcel_aead_sha256_cra_init(tfm);
1944 ctx->alg = SAFEXCEL_3DES; /* override default */
1945 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1946 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1947 return 0;
1948}
1949
1950struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des3_ede = {
1951 .type = SAFEXCEL_ALG_TYPE_AEAD,
1952 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1953 .alg.aead = {
1954 .setkey = safexcel_aead_setkey,
1955 .encrypt = safexcel_aead_encrypt,
1956 .decrypt = safexcel_aead_decrypt,
1957 .ivsize = DES3_EDE_BLOCK_SIZE,
1958 .maxauthsize = SHA256_DIGEST_SIZE,
1959 .base = {
1960 .cra_name = "authenc(hmac(sha256),cbc(des3_ede))",
1961 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des3_ede",
1962 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1963 .cra_flags = CRYPTO_ALG_ASYNC |
1964 CRYPTO_ALG_ALLOCATES_MEMORY |
1965 CRYPTO_ALG_KERN_DRIVER_ONLY,
1966 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1967 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1968 .cra_alignmask = 0,
1969 .cra_init = safexcel_aead_sha256_des3_cra_init,
1970 .cra_exit = safexcel_aead_cra_exit,
1971 .cra_module = THIS_MODULE,
1972 },
1973 },
1974};
1975
1976static int safexcel_aead_sha224_des3_cra_init(struct crypto_tfm *tfm)
1977{
1978 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1979
1980 safexcel_aead_sha224_cra_init(tfm);
1981 ctx->alg = SAFEXCEL_3DES; /* override default */
1982 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1983 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1984 return 0;
1985}
1986
1987struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des3_ede = {
1988 .type = SAFEXCEL_ALG_TYPE_AEAD,
1989 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1990 .alg.aead = {
1991 .setkey = safexcel_aead_setkey,
1992 .encrypt = safexcel_aead_encrypt,
1993 .decrypt = safexcel_aead_decrypt,
1994 .ivsize = DES3_EDE_BLOCK_SIZE,
1995 .maxauthsize = SHA224_DIGEST_SIZE,
1996 .base = {
1997 .cra_name = "authenc(hmac(sha224),cbc(des3_ede))",
1998 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des3_ede",
1999 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2000 .cra_flags = CRYPTO_ALG_ASYNC |
2001 CRYPTO_ALG_ALLOCATES_MEMORY |
2002 CRYPTO_ALG_KERN_DRIVER_ONLY,
2003 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2004 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2005 .cra_alignmask = 0,
2006 .cra_init = safexcel_aead_sha224_des3_cra_init,
2007 .cra_exit = safexcel_aead_cra_exit,
2008 .cra_module = THIS_MODULE,
2009 },
2010 },
2011};
2012
2013static int safexcel_aead_sha512_des3_cra_init(struct crypto_tfm *tfm)
2014{
2015 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2016
2017 safexcel_aead_sha512_cra_init(tfm);
2018 ctx->alg = SAFEXCEL_3DES; /* override default */
2019 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2020 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2021 return 0;
2022}
2023
2024struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des3_ede = {
2025 .type = SAFEXCEL_ALG_TYPE_AEAD,
2026 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2027 .alg.aead = {
2028 .setkey = safexcel_aead_setkey,
2029 .encrypt = safexcel_aead_encrypt,
2030 .decrypt = safexcel_aead_decrypt,
2031 .ivsize = DES3_EDE_BLOCK_SIZE,
2032 .maxauthsize = SHA512_DIGEST_SIZE,
2033 .base = {
2034 .cra_name = "authenc(hmac(sha512),cbc(des3_ede))",
2035 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des3_ede",
2036 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2037 .cra_flags = CRYPTO_ALG_ASYNC |
2038 CRYPTO_ALG_ALLOCATES_MEMORY |
2039 CRYPTO_ALG_KERN_DRIVER_ONLY,
2040 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2041 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2042 .cra_alignmask = 0,
2043 .cra_init = safexcel_aead_sha512_des3_cra_init,
2044 .cra_exit = safexcel_aead_cra_exit,
2045 .cra_module = THIS_MODULE,
2046 },
2047 },
2048};
2049
2050static int safexcel_aead_sha384_des3_cra_init(struct crypto_tfm *tfm)
2051{
2052 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2053
2054 safexcel_aead_sha384_cra_init(tfm);
2055 ctx->alg = SAFEXCEL_3DES; /* override default */
2056 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2057 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2058 return 0;
2059}
2060
2061struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des3_ede = {
2062 .type = SAFEXCEL_ALG_TYPE_AEAD,
2063 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2064 .alg.aead = {
2065 .setkey = safexcel_aead_setkey,
2066 .encrypt = safexcel_aead_encrypt,
2067 .decrypt = safexcel_aead_decrypt,
2068 .ivsize = DES3_EDE_BLOCK_SIZE,
2069 .maxauthsize = SHA384_DIGEST_SIZE,
2070 .base = {
2071 .cra_name = "authenc(hmac(sha384),cbc(des3_ede))",
2072 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des3_ede",
2073 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2074 .cra_flags = CRYPTO_ALG_ASYNC |
2075 CRYPTO_ALG_ALLOCATES_MEMORY |
2076 CRYPTO_ALG_KERN_DRIVER_ONLY,
2077 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2078 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2079 .cra_alignmask = 0,
2080 .cra_init = safexcel_aead_sha384_des3_cra_init,
2081 .cra_exit = safexcel_aead_cra_exit,
2082 .cra_module = THIS_MODULE,
2083 },
2084 },
2085};
2086
2087static int safexcel_aead_sha1_des_cra_init(struct crypto_tfm *tfm)
2088{
2089 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2090
2091 safexcel_aead_sha1_cra_init(tfm);
2092 ctx->alg = SAFEXCEL_DES; /* override default */
2093 ctx->blocksz = DES_BLOCK_SIZE;
2094 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2095 return 0;
2096}
2097
2098struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des = {
2099 .type = SAFEXCEL_ALG_TYPE_AEAD,
2100 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
2101 .alg.aead = {
2102 .setkey = safexcel_aead_setkey,
2103 .encrypt = safexcel_aead_encrypt,
2104 .decrypt = safexcel_aead_decrypt,
2105 .ivsize = DES_BLOCK_SIZE,
2106 .maxauthsize = SHA1_DIGEST_SIZE,
2107 .base = {
2108 .cra_name = "authenc(hmac(sha1),cbc(des))",
2109 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des",
2110 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2111 .cra_flags = CRYPTO_ALG_ASYNC |
2112 CRYPTO_ALG_ALLOCATES_MEMORY |
2113 CRYPTO_ALG_KERN_DRIVER_ONLY,
2114 .cra_blocksize = DES_BLOCK_SIZE,
2115 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2116 .cra_alignmask = 0,
2117 .cra_init = safexcel_aead_sha1_des_cra_init,
2118 .cra_exit = safexcel_aead_cra_exit,
2119 .cra_module = THIS_MODULE,
2120 },
2121 },
2122};
2123
2124static int safexcel_aead_sha256_des_cra_init(struct crypto_tfm *tfm)
2125{
2126 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2127
2128 safexcel_aead_sha256_cra_init(tfm);
2129 ctx->alg = SAFEXCEL_DES; /* override default */
2130 ctx->blocksz = DES_BLOCK_SIZE;
2131 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2132 return 0;
2133}
2134
2135struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des = {
2136 .type = SAFEXCEL_ALG_TYPE_AEAD,
2137 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2138 .alg.aead = {
2139 .setkey = safexcel_aead_setkey,
2140 .encrypt = safexcel_aead_encrypt,
2141 .decrypt = safexcel_aead_decrypt,
2142 .ivsize = DES_BLOCK_SIZE,
2143 .maxauthsize = SHA256_DIGEST_SIZE,
2144 .base = {
2145 .cra_name = "authenc(hmac(sha256),cbc(des))",
2146 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des",
2147 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2148 .cra_flags = CRYPTO_ALG_ASYNC |
2149 CRYPTO_ALG_ALLOCATES_MEMORY |
2150 CRYPTO_ALG_KERN_DRIVER_ONLY,
2151 .cra_blocksize = DES_BLOCK_SIZE,
2152 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2153 .cra_alignmask = 0,
2154 .cra_init = safexcel_aead_sha256_des_cra_init,
2155 .cra_exit = safexcel_aead_cra_exit,
2156 .cra_module = THIS_MODULE,
2157 },
2158 },
2159};
2160
2161static int safexcel_aead_sha224_des_cra_init(struct crypto_tfm *tfm)
2162{
2163 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2164
2165 safexcel_aead_sha224_cra_init(tfm);
2166 ctx->alg = SAFEXCEL_DES; /* override default */
2167 ctx->blocksz = DES_BLOCK_SIZE;
2168 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2169 return 0;
2170}
2171
2172struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des = {
2173 .type = SAFEXCEL_ALG_TYPE_AEAD,
2174 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2175 .alg.aead = {
2176 .setkey = safexcel_aead_setkey,
2177 .encrypt = safexcel_aead_encrypt,
2178 .decrypt = safexcel_aead_decrypt,
2179 .ivsize = DES_BLOCK_SIZE,
2180 .maxauthsize = SHA224_DIGEST_SIZE,
2181 .base = {
2182 .cra_name = "authenc(hmac(sha224),cbc(des))",
2183 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des",
2184 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2185 .cra_flags = CRYPTO_ALG_ASYNC |
2186 CRYPTO_ALG_ALLOCATES_MEMORY |
2187 CRYPTO_ALG_KERN_DRIVER_ONLY,
2188 .cra_blocksize = DES_BLOCK_SIZE,
2189 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2190 .cra_alignmask = 0,
2191 .cra_init = safexcel_aead_sha224_des_cra_init,
2192 .cra_exit = safexcel_aead_cra_exit,
2193 .cra_module = THIS_MODULE,
2194 },
2195 },
2196};
2197
2198static int safexcel_aead_sha512_des_cra_init(struct crypto_tfm *tfm)
2199{
2200 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2201
2202 safexcel_aead_sha512_cra_init(tfm);
2203 ctx->alg = SAFEXCEL_DES; /* override default */
2204 ctx->blocksz = DES_BLOCK_SIZE;
2205 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2206 return 0;
2207}
2208
2209struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des = {
2210 .type = SAFEXCEL_ALG_TYPE_AEAD,
2211 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2212 .alg.aead = {
2213 .setkey = safexcel_aead_setkey,
2214 .encrypt = safexcel_aead_encrypt,
2215 .decrypt = safexcel_aead_decrypt,
2216 .ivsize = DES_BLOCK_SIZE,
2217 .maxauthsize = SHA512_DIGEST_SIZE,
2218 .base = {
2219 .cra_name = "authenc(hmac(sha512),cbc(des))",
2220 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des",
2221 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2222 .cra_flags = CRYPTO_ALG_ASYNC |
2223 CRYPTO_ALG_ALLOCATES_MEMORY |
2224 CRYPTO_ALG_KERN_DRIVER_ONLY,
2225 .cra_blocksize = DES_BLOCK_SIZE,
2226 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2227 .cra_alignmask = 0,
2228 .cra_init = safexcel_aead_sha512_des_cra_init,
2229 .cra_exit = safexcel_aead_cra_exit,
2230 .cra_module = THIS_MODULE,
2231 },
2232 },
2233};
2234
2235static int safexcel_aead_sha384_des_cra_init(struct crypto_tfm *tfm)
2236{
2237 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2238
2239 safexcel_aead_sha384_cra_init(tfm);
2240 ctx->alg = SAFEXCEL_DES; /* override default */
2241 ctx->blocksz = DES_BLOCK_SIZE;
2242 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2243 return 0;
2244}
2245
2246struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des = {
2247 .type = SAFEXCEL_ALG_TYPE_AEAD,
2248 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2249 .alg.aead = {
2250 .setkey = safexcel_aead_setkey,
2251 .encrypt = safexcel_aead_encrypt,
2252 .decrypt = safexcel_aead_decrypt,
2253 .ivsize = DES_BLOCK_SIZE,
2254 .maxauthsize = SHA384_DIGEST_SIZE,
2255 .base = {
2256 .cra_name = "authenc(hmac(sha384),cbc(des))",
2257 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des",
2258 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2259 .cra_flags = CRYPTO_ALG_ASYNC |
2260 CRYPTO_ALG_ALLOCATES_MEMORY |
2261 CRYPTO_ALG_KERN_DRIVER_ONLY,
2262 .cra_blocksize = DES_BLOCK_SIZE,
2263 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2264 .cra_alignmask = 0,
2265 .cra_init = safexcel_aead_sha384_des_cra_init,
2266 .cra_exit = safexcel_aead_cra_exit,
2267 .cra_module = THIS_MODULE,
2268 },
2269 },
2270};
2271
2272static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm *tfm)
2273{
2274 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2275
2276 safexcel_aead_sha1_cra_init(tfm);
2277 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2278 return 0;
2279}
2280
2281struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes = {
2282 .type = SAFEXCEL_ALG_TYPE_AEAD,
2283 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
2284 .alg.aead = {
2285 .setkey = safexcel_aead_setkey,
2286 .encrypt = safexcel_aead_encrypt,
2287 .decrypt = safexcel_aead_decrypt,
2288 .ivsize = CTR_RFC3686_IV_SIZE,
2289 .maxauthsize = SHA1_DIGEST_SIZE,
2290 .base = {
2291 .cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2292 .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-aes",
2293 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2294 .cra_flags = CRYPTO_ALG_ASYNC |
2295 CRYPTO_ALG_ALLOCATES_MEMORY |
2296 CRYPTO_ALG_KERN_DRIVER_ONLY,
2297 .cra_blocksize = 1,
2298 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2299 .cra_alignmask = 0,
2300 .cra_init = safexcel_aead_sha1_ctr_cra_init,
2301 .cra_exit = safexcel_aead_cra_exit,
2302 .cra_module = THIS_MODULE,
2303 },
2304 },
2305};
2306
2307static int safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm *tfm)
2308{
2309 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2310
2311 safexcel_aead_sha256_cra_init(tfm);
2312 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2313 return 0;
2314}
2315
2316struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes = {
2317 .type = SAFEXCEL_ALG_TYPE_AEAD,
2318 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2319 .alg.aead = {
2320 .setkey = safexcel_aead_setkey,
2321 .encrypt = safexcel_aead_encrypt,
2322 .decrypt = safexcel_aead_decrypt,
2323 .ivsize = CTR_RFC3686_IV_SIZE,
2324 .maxauthsize = SHA256_DIGEST_SIZE,
2325 .base = {
2326 .cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2327 .cra_driver_name = "safexcel-authenc-hmac-sha256-ctr-aes",
2328 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2329 .cra_flags = CRYPTO_ALG_ASYNC |
2330 CRYPTO_ALG_ALLOCATES_MEMORY |
2331 CRYPTO_ALG_KERN_DRIVER_ONLY,
2332 .cra_blocksize = 1,
2333 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2334 .cra_alignmask = 0,
2335 .cra_init = safexcel_aead_sha256_ctr_cra_init,
2336 .cra_exit = safexcel_aead_cra_exit,
2337 .cra_module = THIS_MODULE,
2338 },
2339 },
2340};
2341
2342static int safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm *tfm)
2343{
2344 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2345
2346 safexcel_aead_sha224_cra_init(tfm);
2347 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2348 return 0;
2349}
2350
2351struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes = {
2352 .type = SAFEXCEL_ALG_TYPE_AEAD,
2353 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2354 .alg.aead = {
2355 .setkey = safexcel_aead_setkey,
2356 .encrypt = safexcel_aead_encrypt,
2357 .decrypt = safexcel_aead_decrypt,
2358 .ivsize = CTR_RFC3686_IV_SIZE,
2359 .maxauthsize = SHA224_DIGEST_SIZE,
2360 .base = {
2361 .cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
2362 .cra_driver_name = "safexcel-authenc-hmac-sha224-ctr-aes",
2363 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2364 .cra_flags = CRYPTO_ALG_ASYNC |
2365 CRYPTO_ALG_ALLOCATES_MEMORY |
2366 CRYPTO_ALG_KERN_DRIVER_ONLY,
2367 .cra_blocksize = 1,
2368 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2369 .cra_alignmask = 0,
2370 .cra_init = safexcel_aead_sha224_ctr_cra_init,
2371 .cra_exit = safexcel_aead_cra_exit,
2372 .cra_module = THIS_MODULE,
2373 },
2374 },
2375};
2376
2377static int safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm *tfm)
2378{
2379 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2380
2381 safexcel_aead_sha512_cra_init(tfm);
2382 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2383 return 0;
2384}
2385
2386struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes = {
2387 .type = SAFEXCEL_ALG_TYPE_AEAD,
2388 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2389 .alg.aead = {
2390 .setkey = safexcel_aead_setkey,
2391 .encrypt = safexcel_aead_encrypt,
2392 .decrypt = safexcel_aead_decrypt,
2393 .ivsize = CTR_RFC3686_IV_SIZE,
2394 .maxauthsize = SHA512_DIGEST_SIZE,
2395 .base = {
2396 .cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2397 .cra_driver_name = "safexcel-authenc-hmac-sha512-ctr-aes",
2398 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2399 .cra_flags = CRYPTO_ALG_ASYNC |
2400 CRYPTO_ALG_ALLOCATES_MEMORY |
2401 CRYPTO_ALG_KERN_DRIVER_ONLY,
2402 .cra_blocksize = 1,
2403 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2404 .cra_alignmask = 0,
2405 .cra_init = safexcel_aead_sha512_ctr_cra_init,
2406 .cra_exit = safexcel_aead_cra_exit,
2407 .cra_module = THIS_MODULE,
2408 },
2409 },
2410};
2411
2412static int safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm *tfm)
2413{
2414 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2415
2416 safexcel_aead_sha384_cra_init(tfm);
2417 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2418 return 0;
2419}
2420
2421struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes = {
2422 .type = SAFEXCEL_ALG_TYPE_AEAD,
2423 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2424 .alg.aead = {
2425 .setkey = safexcel_aead_setkey,
2426 .encrypt = safexcel_aead_encrypt,
2427 .decrypt = safexcel_aead_decrypt,
2428 .ivsize = CTR_RFC3686_IV_SIZE,
2429 .maxauthsize = SHA384_DIGEST_SIZE,
2430 .base = {
2431 .cra_name = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2432 .cra_driver_name = "safexcel-authenc-hmac-sha384-ctr-aes",
2433 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2434 .cra_flags = CRYPTO_ALG_ASYNC |
2435 CRYPTO_ALG_ALLOCATES_MEMORY |
2436 CRYPTO_ALG_KERN_DRIVER_ONLY,
2437 .cra_blocksize = 1,
2438 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2439 .cra_alignmask = 0,
2440 .cra_init = safexcel_aead_sha384_ctr_cra_init,
2441 .cra_exit = safexcel_aead_cra_exit,
2442 .cra_module = THIS_MODULE,
2443 },
2444 },
2445};
2446
2447static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
2448 const u8 *key, unsigned int len)
2449{
2450 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
2451 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2452 struct safexcel_crypto_priv *priv = ctx->base.priv;
2453 struct crypto_aes_ctx aes;
2454 int ret, i;
2455 unsigned int keylen;
2456
2457 /* Check for illegal XTS keys */
2458 ret = xts_verify_key(ctfm, key, len);
2459 if (ret)
2460 return ret;
2461
2462 /* Only half of the key data is cipher key */
2463 keylen = (len >> 1);
2464 ret = aes_expandkey(&aes, key, keylen);
2465 if (ret)
2466 return ret;
2467
2468 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2469 for (i = 0; i < keylen / sizeof(u32); i++) {
2470 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2471 ctx->base.needs_inv = true;
2472 break;
2473 }
2474 }
2475 }
2476
2477 for (i = 0; i < keylen / sizeof(u32); i++)
2478 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2479
2480 /* The other half is the tweak key */
2481 ret = aes_expandkey(&aes, (u8 *)(key + keylen), keylen);
2482 if (ret)
2483 return ret;
2484
2485 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2486 for (i = 0; i < keylen / sizeof(u32); i++) {
2487 if (le32_to_cpu(ctx->key[i + keylen / sizeof(u32)]) !=
2488 aes.key_enc[i]) {
2489 ctx->base.needs_inv = true;
2490 break;
2491 }
2492 }
2493 }
2494
2495 for (i = 0; i < keylen / sizeof(u32); i++)
2496 ctx->key[i + keylen / sizeof(u32)] =
2497 cpu_to_le32(aes.key_enc[i]);
2498
2499 ctx->key_len = keylen << 1;
2500
2501 memzero_explicit(&aes, sizeof(aes));
2502 return 0;
2503}
2504
2505static int safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm *tfm)
2506{
2507 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2508
2509 safexcel_skcipher_cra_init(tfm);
2510 ctx->alg = SAFEXCEL_AES;
2511 ctx->blocksz = AES_BLOCK_SIZE;
2512 ctx->xts = 1;
2513 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
2514 return 0;
2515}
2516
2517static int safexcel_encrypt_xts(struct skcipher_request *req)
2518{
2519 if (req->cryptlen < XTS_BLOCK_SIZE)
2520 return -EINVAL;
2521 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2522 SAFEXCEL_ENCRYPT);
2523}
2524
2525static int safexcel_decrypt_xts(struct skcipher_request *req)
2526{
2527 if (req->cryptlen < XTS_BLOCK_SIZE)
2528 return -EINVAL;
2529 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2530 SAFEXCEL_DECRYPT);
2531}
2532
2533struct safexcel_alg_template safexcel_alg_xts_aes = {
2534 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2535 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XTS,
2536 .alg.skcipher = {
2537 .setkey = safexcel_skcipher_aesxts_setkey,
2538 .encrypt = safexcel_encrypt_xts,
2539 .decrypt = safexcel_decrypt_xts,
2540 /* XTS actually uses 2 AES keys glued together */
2541 .min_keysize = AES_MIN_KEY_SIZE * 2,
2542 .max_keysize = AES_MAX_KEY_SIZE * 2,
2543 .ivsize = XTS_BLOCK_SIZE,
2544 .base = {
2545 .cra_name = "xts(aes)",
2546 .cra_driver_name = "safexcel-xts-aes",
2547 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2548 .cra_flags = CRYPTO_ALG_ASYNC |
2549 CRYPTO_ALG_ALLOCATES_MEMORY |
2550 CRYPTO_ALG_KERN_DRIVER_ONLY,
2551 .cra_blocksize = XTS_BLOCK_SIZE,
2552 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2553 .cra_alignmask = 0,
2554 .cra_init = safexcel_skcipher_aes_xts_cra_init,
2555 .cra_exit = safexcel_skcipher_cra_exit,
2556 .cra_module = THIS_MODULE,
2557 },
2558 },
2559};
2560
2561static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
2562 unsigned int len)
2563{
2564 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2565 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2566 struct safexcel_crypto_priv *priv = ctx->base.priv;
2567 struct crypto_aes_ctx aes;
2568 u32 hashkey[AES_BLOCK_SIZE >> 2];
2569 int ret, i;
2570
2571 ret = aes_expandkey(&aes, key, len);
2572 if (ret) {
2573 memzero_explicit(&aes, sizeof(aes));
2574 return ret;
2575 }
2576
2577 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2578 for (i = 0; i < len / sizeof(u32); i++) {
2579 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2580 ctx->base.needs_inv = true;
2581 break;
2582 }
2583 }
2584 }
2585
2586 for (i = 0; i < len / sizeof(u32); i++)
2587 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2588
2589 ctx->key_len = len;
2590
2591 /* Compute hash key by encrypting zeroes with cipher key */
2592 crypto_cipher_clear_flags(ctx->hkaes, CRYPTO_TFM_REQ_MASK);
2593 crypto_cipher_set_flags(ctx->hkaes, crypto_aead_get_flags(ctfm) &
2594 CRYPTO_TFM_REQ_MASK);
2595 ret = crypto_cipher_setkey(ctx->hkaes, key, len);
2596 if (ret)
2597 return ret;
2598
2599 memset(hashkey, 0, AES_BLOCK_SIZE);
2600 crypto_cipher_encrypt_one(ctx->hkaes, (u8 *)hashkey, (u8 *)hashkey);
2601
2602 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2603 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
2604 if (be32_to_cpu(ctx->base.ipad.be[i]) != hashkey[i]) {
2605 ctx->base.needs_inv = true;
2606 break;
2607 }
2608 }
2609 }
2610
2611 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
2612 ctx->base.ipad.be[i] = cpu_to_be32(hashkey[i]);
2613
2614 memzero_explicit(hashkey, AES_BLOCK_SIZE);
2615 memzero_explicit(&aes, sizeof(aes));
2616 return 0;
2617}
2618
2619static int safexcel_aead_gcm_cra_init(struct crypto_tfm *tfm)
2620{
2621 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2622
2623 safexcel_aead_cra_init(tfm);
2624 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_GHASH;
2625 ctx->state_sz = GHASH_BLOCK_SIZE;
2626 ctx->xcm = EIP197_XCM_MODE_GCM;
2627 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2628
2629 ctx->hkaes = crypto_alloc_cipher("aes", 0, 0);
2630 return PTR_ERR_OR_ZERO(ctx->hkaes);
2631}
2632
2633static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm)
2634{
2635 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2636
2637 crypto_free_cipher(ctx->hkaes);
2638 safexcel_aead_cra_exit(tfm);
2639}
2640
2641static int safexcel_aead_gcm_setauthsize(struct crypto_aead *tfm,
2642 unsigned int authsize)
2643{
2644 return crypto_gcm_check_authsize(authsize);
2645}
2646
2647struct safexcel_alg_template safexcel_alg_gcm = {
2648 .type = SAFEXCEL_ALG_TYPE_AEAD,
2649 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
2650 .alg.aead = {
2651 .setkey = safexcel_aead_gcm_setkey,
2652 .setauthsize = safexcel_aead_gcm_setauthsize,
2653 .encrypt = safexcel_aead_encrypt,
2654 .decrypt = safexcel_aead_decrypt,
2655 .ivsize = GCM_AES_IV_SIZE,
2656 .maxauthsize = GHASH_DIGEST_SIZE,
2657 .base = {
2658 .cra_name = "gcm(aes)",
2659 .cra_driver_name = "safexcel-gcm-aes",
2660 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2661 .cra_flags = CRYPTO_ALG_ASYNC |
2662 CRYPTO_ALG_ALLOCATES_MEMORY |
2663 CRYPTO_ALG_KERN_DRIVER_ONLY,
2664 .cra_blocksize = 1,
2665 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2666 .cra_alignmask = 0,
2667 .cra_init = safexcel_aead_gcm_cra_init,
2668 .cra_exit = safexcel_aead_gcm_cra_exit,
2669 .cra_module = THIS_MODULE,
2670 },
2671 },
2672};
2673
2674static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
2675 unsigned int len)
2676{
2677 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2678 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2679 struct safexcel_crypto_priv *priv = ctx->base.priv;
2680 struct crypto_aes_ctx aes;
2681 int ret, i;
2682
2683 ret = aes_expandkey(&aes, key, len);
2684 if (ret) {
2685 memzero_explicit(&aes, sizeof(aes));
2686 return ret;
2687 }
2688
2689 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2690 for (i = 0; i < len / sizeof(u32); i++) {
2691 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2692 ctx->base.needs_inv = true;
2693 break;
2694 }
2695 }
2696 }
2697
2698 for (i = 0; i < len / sizeof(u32); i++) {
2699 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2700 ctx->base.ipad.be[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
2701 cpu_to_be32(aes.key_enc[i]);
2702 }
2703
2704 ctx->key_len = len;
2705 ctx->state_sz = 2 * AES_BLOCK_SIZE + len;
2706
2707 if (len == AES_KEYSIZE_192)
2708 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2709 else if (len == AES_KEYSIZE_256)
2710 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2711 else
2712 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2713
2714 memzero_explicit(&aes, sizeof(aes));
2715 return 0;
2716}
2717
2718static int safexcel_aead_ccm_cra_init(struct crypto_tfm *tfm)
2719{
2720 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2721
2722 safexcel_aead_cra_init(tfm);
2723 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2724 ctx->state_sz = 3 * AES_BLOCK_SIZE;
2725 ctx->xcm = EIP197_XCM_MODE_CCM;
2726 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2727 ctx->ctrinit = 0;
2728 return 0;
2729}
2730
2731static int safexcel_aead_ccm_setauthsize(struct crypto_aead *tfm,
2732 unsigned int authsize)
2733{
2734 /* Borrowed from crypto/ccm.c */
2735 switch (authsize) {
2736 case 4:
2737 case 6:
2738 case 8:
2739 case 10:
2740 case 12:
2741 case 14:
2742 case 16:
2743 break;
2744 default:
2745 return -EINVAL;
2746 }
2747
2748 return 0;
2749}
2750
2751static int safexcel_ccm_encrypt(struct aead_request *req)
2752{
2753 struct safexcel_cipher_req *creq = aead_request_ctx(req);
2754
2755 if (req->iv[0] < 1 || req->iv[0] > 7)
2756 return -EINVAL;
2757
2758 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
2759}
2760
2761static int safexcel_ccm_decrypt(struct aead_request *req)
2762{
2763 struct safexcel_cipher_req *creq = aead_request_ctx(req);
2764
2765 if (req->iv[0] < 1 || req->iv[0] > 7)
2766 return -EINVAL;
2767
2768 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
2769}
2770
2771struct safexcel_alg_template safexcel_alg_ccm = {
2772 .type = SAFEXCEL_ALG_TYPE_AEAD,
2773 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
2774 .alg.aead = {
2775 .setkey = safexcel_aead_ccm_setkey,
2776 .setauthsize = safexcel_aead_ccm_setauthsize,
2777 .encrypt = safexcel_ccm_encrypt,
2778 .decrypt = safexcel_ccm_decrypt,
2779 .ivsize = AES_BLOCK_SIZE,
2780 .maxauthsize = AES_BLOCK_SIZE,
2781 .base = {
2782 .cra_name = "ccm(aes)",
2783 .cra_driver_name = "safexcel-ccm-aes",
2784 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2785 .cra_flags = CRYPTO_ALG_ASYNC |
2786 CRYPTO_ALG_ALLOCATES_MEMORY |
2787 CRYPTO_ALG_KERN_DRIVER_ONLY,
2788 .cra_blocksize = 1,
2789 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2790 .cra_alignmask = 0,
2791 .cra_init = safexcel_aead_ccm_cra_init,
2792 .cra_exit = safexcel_aead_cra_exit,
2793 .cra_module = THIS_MODULE,
2794 },
2795 },
2796};
2797
2798static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx,
2799 const u8 *key)
2800{
2801 struct safexcel_crypto_priv *priv = ctx->base.priv;
2802
2803 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
2804 if (memcmp(ctx->key, key, CHACHA_KEY_SIZE))
2805 ctx->base.needs_inv = true;
2806
2807 memcpy(ctx->key, key, CHACHA_KEY_SIZE);
2808 ctx->key_len = CHACHA_KEY_SIZE;
2809}
2810
2811static int safexcel_skcipher_chacha20_setkey(struct crypto_skcipher *ctfm,
2812 const u8 *key, unsigned int len)
2813{
2814 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
2815
2816 if (len != CHACHA_KEY_SIZE)
2817 return -EINVAL;
2818
2819 safexcel_chacha20_setkey(ctx, key);
2820
2821 return 0;
2822}
2823
2824static int safexcel_skcipher_chacha20_cra_init(struct crypto_tfm *tfm)
2825{
2826 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2827
2828 safexcel_skcipher_cra_init(tfm);
2829 ctx->alg = SAFEXCEL_CHACHA20;
2830 ctx->ctrinit = 0;
2831 ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32;
2832 return 0;
2833}
2834
2835struct safexcel_alg_template safexcel_alg_chacha20 = {
2836 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2837 .algo_mask = SAFEXCEL_ALG_CHACHA20,
2838 .alg.skcipher = {
2839 .setkey = safexcel_skcipher_chacha20_setkey,
2840 .encrypt = safexcel_encrypt,
2841 .decrypt = safexcel_decrypt,
2842 .min_keysize = CHACHA_KEY_SIZE,
2843 .max_keysize = CHACHA_KEY_SIZE,
2844 .ivsize = CHACHA_IV_SIZE,
2845 .base = {
2846 .cra_name = "chacha20",
2847 .cra_driver_name = "safexcel-chacha20",
2848 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2849 .cra_flags = CRYPTO_ALG_ASYNC |
2850 CRYPTO_ALG_ALLOCATES_MEMORY |
2851 CRYPTO_ALG_KERN_DRIVER_ONLY,
2852 .cra_blocksize = 1,
2853 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2854 .cra_alignmask = 0,
2855 .cra_init = safexcel_skcipher_chacha20_cra_init,
2856 .cra_exit = safexcel_skcipher_cra_exit,
2857 .cra_module = THIS_MODULE,
2858 },
2859 },
2860};
2861
2862static int safexcel_aead_chachapoly_setkey(struct crypto_aead *ctfm,
2863 const u8 *key, unsigned int len)
2864{
2865 struct safexcel_cipher_ctx *ctx = crypto_aead_ctx(ctfm);
2866
2867 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP &&
2868 len > EIP197_AEAD_IPSEC_NONCE_SIZE) {
2869 /* ESP variant has nonce appended to key */
2870 len -= EIP197_AEAD_IPSEC_NONCE_SIZE;
2871 ctx->nonce = *(u32 *)(key + len);
2872 }
2873 if (len != CHACHA_KEY_SIZE)
2874 return -EINVAL;
2875
2876 safexcel_chacha20_setkey(ctx, key);
2877
2878 return 0;
2879}
2880
2881static int safexcel_aead_chachapoly_setauthsize(struct crypto_aead *tfm,
2882 unsigned int authsize)
2883{
2884 if (authsize != POLY1305_DIGEST_SIZE)
2885 return -EINVAL;
2886 return 0;
2887}
2888
2889static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
2890 enum safexcel_cipher_direction dir)
2891{
2892 struct safexcel_cipher_req *creq = aead_request_ctx(req);
2893 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2894 struct crypto_tfm *tfm = crypto_aead_tfm(aead);
2895 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2896 struct aead_request *subreq = aead_request_ctx(req);
2897 u32 key[CHACHA_KEY_SIZE / sizeof(u32) + 1];
2898 int ret = 0;
2899
2900 /*
2901 * Instead of wasting time detecting umpteen silly corner cases,
2902 * just dump all "small" requests to the fallback implementation.
2903 * HW would not be faster on such small requests anyway.
2904 */
2905 if (likely((ctx->aead != EIP197_AEAD_TYPE_IPSEC_ESP ||
2906 req->assoclen >= EIP197_AEAD_IPSEC_IV_SIZE) &&
2907 req->cryptlen > POLY1305_DIGEST_SIZE)) {
2908 return safexcel_queue_req(&req->base, creq, dir);
2909 }
2910
2911 /* HW cannot do full (AAD+payload) zero length, use fallback */
2912 memcpy(key, ctx->key, CHACHA_KEY_SIZE);
2913 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
2914 /* ESP variant has nonce appended to the key */
2915 key[CHACHA_KEY_SIZE / sizeof(u32)] = ctx->nonce;
2916 ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2917 CHACHA_KEY_SIZE +
2918 EIP197_AEAD_IPSEC_NONCE_SIZE);
2919 } else {
2920 ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2921 CHACHA_KEY_SIZE);
2922 }
2923 if (ret) {
2924 crypto_aead_clear_flags(aead, CRYPTO_TFM_REQ_MASK);
2925 crypto_aead_set_flags(aead, crypto_aead_get_flags(ctx->fback) &
2926 CRYPTO_TFM_REQ_MASK);
2927 return ret;
2928 }
2929
2930 aead_request_set_tfm(subreq, ctx->fback);
2931 aead_request_set_callback(subreq, req->base.flags, req->base.complete,
2932 req->base.data);
2933 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
2934 req->iv);
2935 aead_request_set_ad(subreq, req->assoclen);
2936
2937 return (dir == SAFEXCEL_ENCRYPT) ?
2938 crypto_aead_encrypt(subreq) :
2939 crypto_aead_decrypt(subreq);
2940}
2941
2942static int safexcel_aead_chachapoly_encrypt(struct aead_request *req)
2943{
2944 return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_ENCRYPT);
2945}
2946
2947static int safexcel_aead_chachapoly_decrypt(struct aead_request *req)
2948{
2949 return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_DECRYPT);
2950}
2951
2952static int safexcel_aead_fallback_cra_init(struct crypto_tfm *tfm)
2953{
2954 struct crypto_aead *aead = __crypto_aead_cast(tfm);
2955 struct aead_alg *alg = crypto_aead_alg(aead);
2956 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2957
2958 safexcel_aead_cra_init(tfm);
2959
2960 /* Allocate fallback implementation */
2961 ctx->fback = crypto_alloc_aead(alg->base.cra_name, 0,
2962 CRYPTO_ALG_ASYNC |
2963 CRYPTO_ALG_NEED_FALLBACK);
2964 if (IS_ERR(ctx->fback))
2965 return PTR_ERR(ctx->fback);
2966
2967 crypto_aead_set_reqsize(aead, max(sizeof(struct safexcel_cipher_req),
2968 sizeof(struct aead_request) +
2969 crypto_aead_reqsize(ctx->fback)));
2970
2971 return 0;
2972}
2973
2974static int safexcel_aead_chachapoly_cra_init(struct crypto_tfm *tfm)
2975{
2976 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2977
2978 safexcel_aead_fallback_cra_init(tfm);
2979 ctx->alg = SAFEXCEL_CHACHA20;
2980 ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32 |
2981 CONTEXT_CONTROL_CHACHA20_MODE_CALC_OTK;
2982 ctx->ctrinit = 0;
2983 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_POLY1305;
2984 ctx->state_sz = 0; /* Precomputed by HW */
2985 return 0;
2986}
2987
2988static void safexcel_aead_fallback_cra_exit(struct crypto_tfm *tfm)
2989{
2990 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2991
2992 crypto_free_aead(ctx->fback);
2993 safexcel_aead_cra_exit(tfm);
2994}
2995
2996struct safexcel_alg_template safexcel_alg_chachapoly = {
2997 .type = SAFEXCEL_ALG_TYPE_AEAD,
2998 .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
2999 .alg.aead = {
3000 .setkey = safexcel_aead_chachapoly_setkey,
3001 .setauthsize = safexcel_aead_chachapoly_setauthsize,
3002 .encrypt = safexcel_aead_chachapoly_encrypt,
3003 .decrypt = safexcel_aead_chachapoly_decrypt,
3004 .ivsize = CHACHAPOLY_IV_SIZE,
3005 .maxauthsize = POLY1305_DIGEST_SIZE,
3006 .base = {
3007 .cra_name = "rfc7539(chacha20,poly1305)",
3008 .cra_driver_name = "safexcel-chacha20-poly1305",
3009 /* +1 to put it above HW chacha + SW poly */
3010 .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
3011 .cra_flags = CRYPTO_ALG_ASYNC |
3012 CRYPTO_ALG_ALLOCATES_MEMORY |
3013 CRYPTO_ALG_KERN_DRIVER_ONLY |
3014 CRYPTO_ALG_NEED_FALLBACK,
3015 .cra_blocksize = 1,
3016 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3017 .cra_alignmask = 0,
3018 .cra_init = safexcel_aead_chachapoly_cra_init,
3019 .cra_exit = safexcel_aead_fallback_cra_exit,
3020 .cra_module = THIS_MODULE,
3021 },
3022 },
3023};
3024
3025static int safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm *tfm)
3026{
3027 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3028 int ret;
3029
3030 ret = safexcel_aead_chachapoly_cra_init(tfm);
3031 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
3032 ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3033 return ret;
3034}
3035
3036struct safexcel_alg_template safexcel_alg_chachapoly_esp = {
3037 .type = SAFEXCEL_ALG_TYPE_AEAD,
3038 .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
3039 .alg.aead = {
3040 .setkey = safexcel_aead_chachapoly_setkey,
3041 .setauthsize = safexcel_aead_chachapoly_setauthsize,
3042 .encrypt = safexcel_aead_chachapoly_encrypt,
3043 .decrypt = safexcel_aead_chachapoly_decrypt,
3044 .ivsize = CHACHAPOLY_IV_SIZE - EIP197_AEAD_IPSEC_NONCE_SIZE,
3045 .maxauthsize = POLY1305_DIGEST_SIZE,
3046 .base = {
3047 .cra_name = "rfc7539esp(chacha20,poly1305)",
3048 .cra_driver_name = "safexcel-chacha20-poly1305-esp",
3049 /* +1 to put it above HW chacha + SW poly */
3050 .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
3051 .cra_flags = CRYPTO_ALG_ASYNC |
3052 CRYPTO_ALG_ALLOCATES_MEMORY |
3053 CRYPTO_ALG_KERN_DRIVER_ONLY |
3054 CRYPTO_ALG_NEED_FALLBACK,
3055 .cra_blocksize = 1,
3056 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3057 .cra_alignmask = 0,
3058 .cra_init = safexcel_aead_chachapolyesp_cra_init,
3059 .cra_exit = safexcel_aead_fallback_cra_exit,
3060 .cra_module = THIS_MODULE,
3061 },
3062 },
3063};
3064
3065static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher *ctfm,
3066 const u8 *key, unsigned int len)
3067{
3068 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3069 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3070 struct safexcel_crypto_priv *priv = ctx->base.priv;
3071
3072 if (len != SM4_KEY_SIZE)
3073 return -EINVAL;
3074
3075 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
3076 if (memcmp(ctx->key, key, SM4_KEY_SIZE))
3077 ctx->base.needs_inv = true;
3078
3079 memcpy(ctx->key, key, SM4_KEY_SIZE);
3080 ctx->key_len = SM4_KEY_SIZE;
3081
3082 return 0;
3083}
3084
3085static int safexcel_sm4_blk_encrypt(struct skcipher_request *req)
3086{
3087 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3088 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3089 return -EINVAL;
3090 else
3091 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3092 SAFEXCEL_ENCRYPT);
3093}
3094
3095static int safexcel_sm4_blk_decrypt(struct skcipher_request *req)
3096{
3097 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3098 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3099 return -EINVAL;
3100 else
3101 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3102 SAFEXCEL_DECRYPT);
3103}
3104
3105static int safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm *tfm)
3106{
3107 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3108
3109 safexcel_skcipher_cra_init(tfm);
3110 ctx->alg = SAFEXCEL_SM4;
3111 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
3112 ctx->blocksz = 0;
3113 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
3114 return 0;
3115}
3116
3117struct safexcel_alg_template safexcel_alg_ecb_sm4 = {
3118 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3119 .algo_mask = SAFEXCEL_ALG_SM4,
3120 .alg.skcipher = {
3121 .setkey = safexcel_skcipher_sm4_setkey,
3122 .encrypt = safexcel_sm4_blk_encrypt,
3123 .decrypt = safexcel_sm4_blk_decrypt,
3124 .min_keysize = SM4_KEY_SIZE,
3125 .max_keysize = SM4_KEY_SIZE,
3126 .base = {
3127 .cra_name = "ecb(sm4)",
3128 .cra_driver_name = "safexcel-ecb-sm4",
3129 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3130 .cra_flags = CRYPTO_ALG_ASYNC |
3131 CRYPTO_ALG_ALLOCATES_MEMORY |
3132 CRYPTO_ALG_KERN_DRIVER_ONLY,
3133 .cra_blocksize = SM4_BLOCK_SIZE,
3134 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3135 .cra_alignmask = 0,
3136 .cra_init = safexcel_skcipher_sm4_ecb_cra_init,
3137 .cra_exit = safexcel_skcipher_cra_exit,
3138 .cra_module = THIS_MODULE,
3139 },
3140 },
3141};
3142
3143static int safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm *tfm)
3144{
3145 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3146
3147 safexcel_skcipher_cra_init(tfm);
3148 ctx->alg = SAFEXCEL_SM4;
3149 ctx->blocksz = SM4_BLOCK_SIZE;
3150 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
3151 return 0;
3152}
3153
3154struct safexcel_alg_template safexcel_alg_cbc_sm4 = {
3155 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3156 .algo_mask = SAFEXCEL_ALG_SM4,
3157 .alg.skcipher = {
3158 .setkey = safexcel_skcipher_sm4_setkey,
3159 .encrypt = safexcel_sm4_blk_encrypt,
3160 .decrypt = safexcel_sm4_blk_decrypt,
3161 .min_keysize = SM4_KEY_SIZE,
3162 .max_keysize = SM4_KEY_SIZE,
3163 .ivsize = SM4_BLOCK_SIZE,
3164 .base = {
3165 .cra_name = "cbc(sm4)",
3166 .cra_driver_name = "safexcel-cbc-sm4",
3167 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3168 .cra_flags = CRYPTO_ALG_ASYNC |
3169 CRYPTO_ALG_ALLOCATES_MEMORY |
3170 CRYPTO_ALG_KERN_DRIVER_ONLY,
3171 .cra_blocksize = SM4_BLOCK_SIZE,
3172 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3173 .cra_alignmask = 0,
3174 .cra_init = safexcel_skcipher_sm4_cbc_cra_init,
3175 .cra_exit = safexcel_skcipher_cra_exit,
3176 .cra_module = THIS_MODULE,
3177 },
3178 },
3179};
3180
3181static int safexcel_skcipher_sm4_ofb_cra_init(struct crypto_tfm *tfm)
3182{
3183 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3184
3185 safexcel_skcipher_cra_init(tfm);
3186 ctx->alg = SAFEXCEL_SM4;
3187 ctx->blocksz = SM4_BLOCK_SIZE;
3188 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
3189 return 0;
3190}
3191
3192struct safexcel_alg_template safexcel_alg_ofb_sm4 = {
3193 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3194 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3195 .alg.skcipher = {
3196 .setkey = safexcel_skcipher_sm4_setkey,
3197 .encrypt = safexcel_encrypt,
3198 .decrypt = safexcel_decrypt,
3199 .min_keysize = SM4_KEY_SIZE,
3200 .max_keysize = SM4_KEY_SIZE,
3201 .ivsize = SM4_BLOCK_SIZE,
3202 .base = {
3203 .cra_name = "ofb(sm4)",
3204 .cra_driver_name = "safexcel-ofb-sm4",
3205 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3206 .cra_flags = CRYPTO_ALG_ASYNC |
3207 CRYPTO_ALG_ALLOCATES_MEMORY |
3208 CRYPTO_ALG_KERN_DRIVER_ONLY,
3209 .cra_blocksize = 1,
3210 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3211 .cra_alignmask = 0,
3212 .cra_init = safexcel_skcipher_sm4_ofb_cra_init,
3213 .cra_exit = safexcel_skcipher_cra_exit,
3214 .cra_module = THIS_MODULE,
3215 },
3216 },
3217};
3218
3219static int safexcel_skcipher_sm4_cfb_cra_init(struct crypto_tfm *tfm)
3220{
3221 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3222
3223 safexcel_skcipher_cra_init(tfm);
3224 ctx->alg = SAFEXCEL_SM4;
3225 ctx->blocksz = SM4_BLOCK_SIZE;
3226 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
3227 return 0;
3228}
3229
3230struct safexcel_alg_template safexcel_alg_cfb_sm4 = {
3231 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3232 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3233 .alg.skcipher = {
3234 .setkey = safexcel_skcipher_sm4_setkey,
3235 .encrypt = safexcel_encrypt,
3236 .decrypt = safexcel_decrypt,
3237 .min_keysize = SM4_KEY_SIZE,
3238 .max_keysize = SM4_KEY_SIZE,
3239 .ivsize = SM4_BLOCK_SIZE,
3240 .base = {
3241 .cra_name = "cfb(sm4)",
3242 .cra_driver_name = "safexcel-cfb-sm4",
3243 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3244 .cra_flags = CRYPTO_ALG_ASYNC |
3245 CRYPTO_ALG_ALLOCATES_MEMORY |
3246 CRYPTO_ALG_KERN_DRIVER_ONLY,
3247 .cra_blocksize = 1,
3248 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3249 .cra_alignmask = 0,
3250 .cra_init = safexcel_skcipher_sm4_cfb_cra_init,
3251 .cra_exit = safexcel_skcipher_cra_exit,
3252 .cra_module = THIS_MODULE,
3253 },
3254 },
3255};
3256
3257static int safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher *ctfm,
3258 const u8 *key, unsigned int len)
3259{
3260 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3261 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3262
3263 /* last 4 bytes of key are the nonce! */
3264 ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3265 /* exclude the nonce here */
3266 len -= CTR_RFC3686_NONCE_SIZE;
3267
3268 return safexcel_skcipher_sm4_setkey(ctfm, key, len);
3269}
3270
3271static int safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm *tfm)
3272{
3273 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3274
3275 safexcel_skcipher_cra_init(tfm);
3276 ctx->alg = SAFEXCEL_SM4;
3277 ctx->blocksz = SM4_BLOCK_SIZE;
3278 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3279 return 0;
3280}
3281
3282struct safexcel_alg_template safexcel_alg_ctr_sm4 = {
3283 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3284 .algo_mask = SAFEXCEL_ALG_SM4,
3285 .alg.skcipher = {
3286 .setkey = safexcel_skcipher_sm4ctr_setkey,
3287 .encrypt = safexcel_encrypt,
3288 .decrypt = safexcel_decrypt,
3289 /* Add nonce size */
3290 .min_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3291 .max_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3292 .ivsize = CTR_RFC3686_IV_SIZE,
3293 .base = {
3294 .cra_name = "rfc3686(ctr(sm4))",
3295 .cra_driver_name = "safexcel-ctr-sm4",
3296 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3297 .cra_flags = CRYPTO_ALG_ASYNC |
3298 CRYPTO_ALG_ALLOCATES_MEMORY |
3299 CRYPTO_ALG_KERN_DRIVER_ONLY,
3300 .cra_blocksize = 1,
3301 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3302 .cra_alignmask = 0,
3303 .cra_init = safexcel_skcipher_sm4_ctr_cra_init,
3304 .cra_exit = safexcel_skcipher_cra_exit,
3305 .cra_module = THIS_MODULE,
3306 },
3307 },
3308};
3309
3310static int safexcel_aead_sm4_blk_encrypt(struct aead_request *req)
3311{
3312 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3313 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3314 return -EINVAL;
3315
3316 return safexcel_queue_req(&req->base, aead_request_ctx(req),
3317 SAFEXCEL_ENCRYPT);
3318}
3319
3320static int safexcel_aead_sm4_blk_decrypt(struct aead_request *req)
3321{
3322 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3323
3324 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3325 if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3326 return -EINVAL;
3327
3328 return safexcel_queue_req(&req->base, aead_request_ctx(req),
3329 SAFEXCEL_DECRYPT);
3330}
3331
3332static int safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm *tfm)
3333{
3334 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3335
3336 safexcel_aead_cra_init(tfm);
3337 ctx->alg = SAFEXCEL_SM4;
3338 ctx->blocksz = SM4_BLOCK_SIZE;
3339 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
3340 ctx->state_sz = SHA1_DIGEST_SIZE;
3341 return 0;
3342}
3343
3344struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_sm4 = {
3345 .type = SAFEXCEL_ALG_TYPE_AEAD,
3346 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3347 .alg.aead = {
3348 .setkey = safexcel_aead_setkey,
3349 .encrypt = safexcel_aead_sm4_blk_encrypt,
3350 .decrypt = safexcel_aead_sm4_blk_decrypt,
3351 .ivsize = SM4_BLOCK_SIZE,
3352 .maxauthsize = SHA1_DIGEST_SIZE,
3353 .base = {
3354 .cra_name = "authenc(hmac(sha1),cbc(sm4))",
3355 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-sm4",
3356 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3357 .cra_flags = CRYPTO_ALG_ASYNC |
3358 CRYPTO_ALG_ALLOCATES_MEMORY |
3359 CRYPTO_ALG_KERN_DRIVER_ONLY,
3360 .cra_blocksize = SM4_BLOCK_SIZE,
3361 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3362 .cra_alignmask = 0,
3363 .cra_init = safexcel_aead_sm4cbc_sha1_cra_init,
3364 .cra_exit = safexcel_aead_cra_exit,
3365 .cra_module = THIS_MODULE,
3366 },
3367 },
3368};
3369
3370static int safexcel_aead_fallback_setkey(struct crypto_aead *ctfm,
3371 const u8 *key, unsigned int len)
3372{
3373 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3374 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3375
3376 /* Keep fallback cipher synchronized */
3377 return crypto_aead_setkey(ctx->fback, (u8 *)key, len) ?:
3378 safexcel_aead_setkey(ctfm, key, len);
3379}
3380
3381static int safexcel_aead_fallback_setauthsize(struct crypto_aead *ctfm,
3382 unsigned int authsize)
3383{
3384 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3385 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3386
3387 /* Keep fallback cipher synchronized */
3388 return crypto_aead_setauthsize(ctx->fback, authsize);
3389}
3390
3391static int safexcel_aead_fallback_crypt(struct aead_request *req,
3392 enum safexcel_cipher_direction dir)
3393{
3394 struct crypto_aead *aead = crypto_aead_reqtfm(req);
3395 struct crypto_tfm *tfm = crypto_aead_tfm(aead);
3396 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3397 struct aead_request *subreq = aead_request_ctx(req);
3398
3399 aead_request_set_tfm(subreq, ctx->fback);
3400 aead_request_set_callback(subreq, req->base.flags, req->base.complete,
3401 req->base.data);
3402 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
3403 req->iv);
3404 aead_request_set_ad(subreq, req->assoclen);
3405
3406 return (dir == SAFEXCEL_ENCRYPT) ?
3407 crypto_aead_encrypt(subreq) :
3408 crypto_aead_decrypt(subreq);
3409}
3410
3411static int safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request *req)
3412{
3413 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3414
3415 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3416 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3417 return -EINVAL;
3418 else if (req->cryptlen || req->assoclen) /* If input length > 0 only */
3419 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3420
3421 /* HW cannot do full (AAD+payload) zero length, use fallback */
3422 return safexcel_aead_fallback_crypt(req, SAFEXCEL_ENCRYPT);
3423}
3424
3425static int safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request *req)
3426{
3427 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3428 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3429
3430 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3431 if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3432 return -EINVAL;
3433 else if (req->cryptlen > crypto_aead_authsize(tfm) || req->assoclen)
3434 /* If input length > 0 only */
3435 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3436
3437 /* HW cannot do full (AAD+payload) zero length, use fallback */
3438 return safexcel_aead_fallback_crypt(req, SAFEXCEL_DECRYPT);
3439}
3440
3441static int safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm *tfm)
3442{
3443 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3444
3445 safexcel_aead_fallback_cra_init(tfm);
3446 ctx->alg = SAFEXCEL_SM4;
3447 ctx->blocksz = SM4_BLOCK_SIZE;
3448 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
3449 ctx->state_sz = SM3_DIGEST_SIZE;
3450 return 0;
3451}
3452
3453struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4 = {
3454 .type = SAFEXCEL_ALG_TYPE_AEAD,
3455 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3456 .alg.aead = {
3457 .setkey = safexcel_aead_fallback_setkey,
3458 .setauthsize = safexcel_aead_fallback_setauthsize,
3459 .encrypt = safexcel_aead_sm4cbc_sm3_encrypt,
3460 .decrypt = safexcel_aead_sm4cbc_sm3_decrypt,
3461 .ivsize = SM4_BLOCK_SIZE,
3462 .maxauthsize = SM3_DIGEST_SIZE,
3463 .base = {
3464 .cra_name = "authenc(hmac(sm3),cbc(sm4))",
3465 .cra_driver_name = "safexcel-authenc-hmac-sm3-cbc-sm4",
3466 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3467 .cra_flags = CRYPTO_ALG_ASYNC |
3468 CRYPTO_ALG_ALLOCATES_MEMORY |
3469 CRYPTO_ALG_KERN_DRIVER_ONLY |
3470 CRYPTO_ALG_NEED_FALLBACK,
3471 .cra_blocksize = SM4_BLOCK_SIZE,
3472 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3473 .cra_alignmask = 0,
3474 .cra_init = safexcel_aead_sm4cbc_sm3_cra_init,
3475 .cra_exit = safexcel_aead_fallback_cra_exit,
3476 .cra_module = THIS_MODULE,
3477 },
3478 },
3479};
3480
3481static int safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm *tfm)
3482{
3483 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3484
3485 safexcel_aead_sm4cbc_sha1_cra_init(tfm);
3486 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3487 return 0;
3488}
3489
3490struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4 = {
3491 .type = SAFEXCEL_ALG_TYPE_AEAD,
3492 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3493 .alg.aead = {
3494 .setkey = safexcel_aead_setkey,
3495 .encrypt = safexcel_aead_encrypt,
3496 .decrypt = safexcel_aead_decrypt,
3497 .ivsize = CTR_RFC3686_IV_SIZE,
3498 .maxauthsize = SHA1_DIGEST_SIZE,
3499 .base = {
3500 .cra_name = "authenc(hmac(sha1),rfc3686(ctr(sm4)))",
3501 .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-sm4",
3502 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3503 .cra_flags = CRYPTO_ALG_ASYNC |
3504 CRYPTO_ALG_ALLOCATES_MEMORY |
3505 CRYPTO_ALG_KERN_DRIVER_ONLY,
3506 .cra_blocksize = 1,
3507 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3508 .cra_alignmask = 0,
3509 .cra_init = safexcel_aead_sm4ctr_sha1_cra_init,
3510 .cra_exit = safexcel_aead_cra_exit,
3511 .cra_module = THIS_MODULE,
3512 },
3513 },
3514};
3515
3516static int safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm *tfm)
3517{
3518 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3519
3520 safexcel_aead_sm4cbc_sm3_cra_init(tfm);
3521 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3522 return 0;
3523}
3524
3525struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4 = {
3526 .type = SAFEXCEL_ALG_TYPE_AEAD,
3527 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3528 .alg.aead = {
3529 .setkey = safexcel_aead_setkey,
3530 .encrypt = safexcel_aead_encrypt,
3531 .decrypt = safexcel_aead_decrypt,
3532 .ivsize = CTR_RFC3686_IV_SIZE,
3533 .maxauthsize = SM3_DIGEST_SIZE,
3534 .base = {
3535 .cra_name = "authenc(hmac(sm3),rfc3686(ctr(sm4)))",
3536 .cra_driver_name = "safexcel-authenc-hmac-sm3-ctr-sm4",
3537 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3538 .cra_flags = CRYPTO_ALG_ASYNC |
3539 CRYPTO_ALG_ALLOCATES_MEMORY |
3540 CRYPTO_ALG_KERN_DRIVER_ONLY,
3541 .cra_blocksize = 1,
3542 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3543 .cra_alignmask = 0,
3544 .cra_init = safexcel_aead_sm4ctr_sm3_cra_init,
3545 .cra_exit = safexcel_aead_cra_exit,
3546 .cra_module = THIS_MODULE,
3547 },
3548 },
3549};
3550
3551static int safexcel_rfc4106_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
3552 unsigned int len)
3553{
3554 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3555 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3556
3557 /* last 4 bytes of key are the nonce! */
3558 ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3559
3560 len -= CTR_RFC3686_NONCE_SIZE;
3561 return safexcel_aead_gcm_setkey(ctfm, key, len);
3562}
3563
3564static int safexcel_rfc4106_gcm_setauthsize(struct crypto_aead *tfm,
3565 unsigned int authsize)
3566{
3567 return crypto_rfc4106_check_authsize(authsize);
3568}
3569
3570static int safexcel_rfc4106_encrypt(struct aead_request *req)
3571{
3572 return crypto_ipsec_check_assoclen(req->assoclen) ?:
3573 safexcel_aead_encrypt(req);
3574}
3575
3576static int safexcel_rfc4106_decrypt(struct aead_request *req)
3577{
3578 return crypto_ipsec_check_assoclen(req->assoclen) ?:
3579 safexcel_aead_decrypt(req);
3580}
3581
3582static int safexcel_rfc4106_gcm_cra_init(struct crypto_tfm *tfm)
3583{
3584 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3585 int ret;
3586
3587 ret = safexcel_aead_gcm_cra_init(tfm);
3588 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
3589 ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3590 return ret;
3591}
3592
3593struct safexcel_alg_template safexcel_alg_rfc4106_gcm = {
3594 .type = SAFEXCEL_ALG_TYPE_AEAD,
3595 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3596 .alg.aead = {
3597 .setkey = safexcel_rfc4106_gcm_setkey,
3598 .setauthsize = safexcel_rfc4106_gcm_setauthsize,
3599 .encrypt = safexcel_rfc4106_encrypt,
3600 .decrypt = safexcel_rfc4106_decrypt,
3601 .ivsize = GCM_RFC4106_IV_SIZE,
3602 .maxauthsize = GHASH_DIGEST_SIZE,
3603 .base = {
3604 .cra_name = "rfc4106(gcm(aes))",
3605 .cra_driver_name = "safexcel-rfc4106-gcm-aes",
3606 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3607 .cra_flags = CRYPTO_ALG_ASYNC |
3608 CRYPTO_ALG_ALLOCATES_MEMORY |
3609 CRYPTO_ALG_KERN_DRIVER_ONLY,
3610 .cra_blocksize = 1,
3611 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3612 .cra_alignmask = 0,
3613 .cra_init = safexcel_rfc4106_gcm_cra_init,
3614 .cra_exit = safexcel_aead_gcm_cra_exit,
3615 },
3616 },
3617};
3618
3619static int safexcel_rfc4543_gcm_setauthsize(struct crypto_aead *tfm,
3620 unsigned int authsize)
3621{
3622 if (authsize != GHASH_DIGEST_SIZE)
3623 return -EINVAL;
3624
3625 return 0;
3626}
3627
3628static int safexcel_rfc4543_gcm_cra_init(struct crypto_tfm *tfm)
3629{
3630 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3631 int ret;
3632
3633 ret = safexcel_aead_gcm_cra_init(tfm);
3634 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP_GMAC;
3635 return ret;
3636}
3637
3638struct safexcel_alg_template safexcel_alg_rfc4543_gcm = {
3639 .type = SAFEXCEL_ALG_TYPE_AEAD,
3640 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3641 .alg.aead = {
3642 .setkey = safexcel_rfc4106_gcm_setkey,
3643 .setauthsize = safexcel_rfc4543_gcm_setauthsize,
3644 .encrypt = safexcel_rfc4106_encrypt,
3645 .decrypt = safexcel_rfc4106_decrypt,
3646 .ivsize = GCM_RFC4543_IV_SIZE,
3647 .maxauthsize = GHASH_DIGEST_SIZE,
3648 .base = {
3649 .cra_name = "rfc4543(gcm(aes))",
3650 .cra_driver_name = "safexcel-rfc4543-gcm-aes",
3651 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3652 .cra_flags = CRYPTO_ALG_ASYNC |
3653 CRYPTO_ALG_ALLOCATES_MEMORY |
3654 CRYPTO_ALG_KERN_DRIVER_ONLY,
3655 .cra_blocksize = 1,
3656 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3657 .cra_alignmask = 0,
3658 .cra_init = safexcel_rfc4543_gcm_cra_init,
3659 .cra_exit = safexcel_aead_gcm_cra_exit,
3660 },
3661 },
3662};
3663
3664static int safexcel_rfc4309_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
3665 unsigned int len)
3666{
3667 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3668 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3669
3670 /* First byte of the nonce = L = always 3 for RFC4309 (4 byte ctr) */
3671 *(u8 *)&ctx->nonce = EIP197_AEAD_IPSEC_COUNTER_SIZE - 1;
3672 /* last 3 bytes of key are the nonce! */
3673 memcpy((u8 *)&ctx->nonce + 1, key + len -
3674 EIP197_AEAD_IPSEC_CCM_NONCE_SIZE,
3675 EIP197_AEAD_IPSEC_CCM_NONCE_SIZE);
3676
3677 len -= EIP197_AEAD_IPSEC_CCM_NONCE_SIZE;
3678 return safexcel_aead_ccm_setkey(ctfm, key, len);
3679}
3680
3681static int safexcel_rfc4309_ccm_setauthsize(struct crypto_aead *tfm,
3682 unsigned int authsize)
3683{
3684 /* Borrowed from crypto/ccm.c */
3685 switch (authsize) {
3686 case 8:
3687 case 12:
3688 case 16:
3689 break;
3690 default:
3691 return -EINVAL;
3692 }
3693
3694 return 0;
3695}
3696
3697static int safexcel_rfc4309_ccm_encrypt(struct aead_request *req)
3698{
3699 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3700
3701 /* Borrowed from crypto/ccm.c */
3702 if (req->assoclen != 16 && req->assoclen != 20)
3703 return -EINVAL;
3704
3705 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3706}
3707
3708static int safexcel_rfc4309_ccm_decrypt(struct aead_request *req)
3709{
3710 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3711
3712 /* Borrowed from crypto/ccm.c */
3713 if (req->assoclen != 16 && req->assoclen != 20)
3714 return -EINVAL;
3715
3716 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3717}
3718
3719static int safexcel_rfc4309_ccm_cra_init(struct crypto_tfm *tfm)
3720{
3721 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3722 int ret;
3723
3724 ret = safexcel_aead_ccm_cra_init(tfm);
3725 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
3726 ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3727 return ret;
3728}
3729
3730struct safexcel_alg_template safexcel_alg_rfc4309_ccm = {
3731 .type = SAFEXCEL_ALG_TYPE_AEAD,
3732 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
3733 .alg.aead = {
3734 .setkey = safexcel_rfc4309_ccm_setkey,
3735 .setauthsize = safexcel_rfc4309_ccm_setauthsize,
3736 .encrypt = safexcel_rfc4309_ccm_encrypt,
3737 .decrypt = safexcel_rfc4309_ccm_decrypt,
3738 .ivsize = EIP197_AEAD_IPSEC_IV_SIZE,
3739 .maxauthsize = AES_BLOCK_SIZE,
3740 .base = {
3741 .cra_name = "rfc4309(ccm(aes))",
3742 .cra_driver_name = "safexcel-rfc4309-ccm-aes",
3743 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3744 .cra_flags = CRYPTO_ALG_ASYNC |
3745 CRYPTO_ALG_ALLOCATES_MEMORY |
3746 CRYPTO_ALG_KERN_DRIVER_ONLY,
3747 .cra_blocksize = 1,
3748 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3749 .cra_alignmask = 0,
3750 .cra_init = safexcel_rfc4309_ccm_cra_init,
3751 .cra_exit = safexcel_aead_cra_exit,
3752 .cra_module = THIS_MODULE,
3753 },
3754 },
3755};