Loading...
1// SPDX-License-Identifier: GPL-2.0
2/*
3 * Copyright (C) 2017 Marvell
4 *
5 * Antoine Tenart <antoine.tenart@free-electrons.com>
6 */
7
8#include <asm/unaligned.h>
9#include <linux/device.h>
10#include <linux/dma-mapping.h>
11#include <linux/dmapool.h>
12#include <crypto/aead.h>
13#include <crypto/aes.h>
14#include <crypto/authenc.h>
15#include <crypto/chacha.h>
16#include <crypto/ctr.h>
17#include <crypto/internal/des.h>
18#include <crypto/gcm.h>
19#include <crypto/ghash.h>
20#include <crypto/poly1305.h>
21#include <crypto/sha1.h>
22#include <crypto/sha2.h>
23#include <crypto/sm3.h>
24#include <crypto/sm4.h>
25#include <crypto/xts.h>
26#include <crypto/skcipher.h>
27#include <crypto/internal/aead.h>
28#include <crypto/internal/skcipher.h>
29
30#include "safexcel.h"
31
32enum safexcel_cipher_direction {
33 SAFEXCEL_ENCRYPT,
34 SAFEXCEL_DECRYPT,
35};
36
37enum safexcel_cipher_alg {
38 SAFEXCEL_DES,
39 SAFEXCEL_3DES,
40 SAFEXCEL_AES,
41 SAFEXCEL_CHACHA20,
42 SAFEXCEL_SM4,
43};
44
45struct safexcel_cipher_ctx {
46 struct safexcel_context base;
47 struct safexcel_crypto_priv *priv;
48
49 u32 mode;
50 enum safexcel_cipher_alg alg;
51 u8 aead; /* !=0=AEAD, 2=IPSec ESP AEAD, 3=IPsec ESP GMAC */
52 u8 xcm; /* 0=authenc, 1=GCM, 2 reserved for CCM */
53 u8 aadskip;
54 u8 blocksz;
55 u32 ivmask;
56 u32 ctrinit;
57
58 __le32 key[16];
59 u32 nonce;
60 unsigned int key_len, xts;
61
62 /* All the below is AEAD specific */
63 u32 hash_alg;
64 u32 state_sz;
65
66 struct crypto_aead *fback;
67};
68
69struct safexcel_cipher_req {
70 enum safexcel_cipher_direction direction;
71 /* Number of result descriptors associated to the request */
72 unsigned int rdescs;
73 bool needs_inv;
74 int nr_src, nr_dst;
75};
76
77static int safexcel_skcipher_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
78 struct safexcel_command_desc *cdesc)
79{
80 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
81 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
82 /* 32 bit nonce */
83 cdesc->control_data.token[0] = ctx->nonce;
84 /* 64 bit IV part */
85 memcpy(&cdesc->control_data.token[1], iv, 8);
86 /* 32 bit counter, start at 0 or 1 (big endian!) */
87 cdesc->control_data.token[3] =
88 (__force u32)cpu_to_be32(ctx->ctrinit);
89 return 4;
90 }
91 if (ctx->alg == SAFEXCEL_CHACHA20) {
92 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
93 /* 96 bit nonce part */
94 memcpy(&cdesc->control_data.token[0], &iv[4], 12);
95 /* 32 bit counter */
96 cdesc->control_data.token[3] = *(u32 *)iv;
97 return 4;
98 }
99
100 cdesc->control_data.options |= ctx->ivmask;
101 memcpy(cdesc->control_data.token, iv, ctx->blocksz);
102 return ctx->blocksz / sizeof(u32);
103}
104
105static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
106 struct safexcel_command_desc *cdesc,
107 struct safexcel_token *atoken,
108 u32 length)
109{
110 struct safexcel_token *token;
111 int ivlen;
112
113 ivlen = safexcel_skcipher_iv(ctx, iv, cdesc);
114 if (ivlen == 4) {
115 /* No space in cdesc, instruction moves to atoken */
116 cdesc->additional_cdata_size = 1;
117 token = atoken;
118 } else {
119 /* Everything fits in cdesc */
120 token = (struct safexcel_token *)(cdesc->control_data.token + 2);
121 /* Need to pad with NOP */
122 eip197_noop_token(&token[1]);
123 }
124
125 token->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
126 token->packet_length = length;
127 token->stat = EIP197_TOKEN_STAT_LAST_PACKET |
128 EIP197_TOKEN_STAT_LAST_HASH;
129 token->instructions = EIP197_TOKEN_INS_LAST |
130 EIP197_TOKEN_INS_TYPE_CRYPTO |
131 EIP197_TOKEN_INS_TYPE_OUTPUT;
132}
133
134static void safexcel_aead_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
135 struct safexcel_command_desc *cdesc)
136{
137 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD ||
138 ctx->aead & EIP197_AEAD_TYPE_IPSEC_ESP) { /* _ESP and _ESP_GMAC */
139 /* 32 bit nonce */
140 cdesc->control_data.token[0] = ctx->nonce;
141 /* 64 bit IV part */
142 memcpy(&cdesc->control_data.token[1], iv, 8);
143 /* 32 bit counter, start at 0 or 1 (big endian!) */
144 cdesc->control_data.token[3] =
145 (__force u32)cpu_to_be32(ctx->ctrinit);
146 return;
147 }
148 if (ctx->xcm == EIP197_XCM_MODE_GCM || ctx->alg == SAFEXCEL_CHACHA20) {
149 /* 96 bit IV part */
150 memcpy(&cdesc->control_data.token[0], iv, 12);
151 /* 32 bit counter, start at 0 or 1 (big endian!) */
152 cdesc->control_data.token[3] =
153 (__force u32)cpu_to_be32(ctx->ctrinit);
154 return;
155 }
156 /* CBC */
157 memcpy(cdesc->control_data.token, iv, ctx->blocksz);
158}
159
160static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
161 struct safexcel_command_desc *cdesc,
162 struct safexcel_token *atoken,
163 enum safexcel_cipher_direction direction,
164 u32 cryptlen, u32 assoclen, u32 digestsize)
165{
166 struct safexcel_token *aadref;
167 int atoksize = 2; /* Start with minimum size */
168 int assocadj = assoclen - ctx->aadskip, aadalign;
169
170 /* Always 4 dwords of embedded IV for AEAD modes */
171 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
172
173 if (direction == SAFEXCEL_DECRYPT)
174 cryptlen -= digestsize;
175
176 if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM)) {
177 /* Construct IV block B0 for the CBC-MAC */
178 u8 *final_iv = (u8 *)cdesc->control_data.token;
179 u8 *cbcmaciv = (u8 *)&atoken[1];
180 __le32 *aadlen = (__le32 *)&atoken[5];
181
182 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
183 /* Length + nonce */
184 cdesc->control_data.token[0] = ctx->nonce;
185 /* Fixup flags byte */
186 *(__le32 *)cbcmaciv =
187 cpu_to_le32(ctx->nonce |
188 ((assocadj > 0) << 6) |
189 ((digestsize - 2) << 2));
190 /* 64 bit IV part */
191 memcpy(&cdesc->control_data.token[1], iv, 8);
192 memcpy(cbcmaciv + 4, iv, 8);
193 /* Start counter at 0 */
194 cdesc->control_data.token[3] = 0;
195 /* Message length */
196 *(__be32 *)(cbcmaciv + 12) = cpu_to_be32(cryptlen);
197 } else {
198 /* Variable length IV part */
199 memcpy(final_iv, iv, 15 - iv[0]);
200 memcpy(cbcmaciv, iv, 15 - iv[0]);
201 /* Start variable length counter at 0 */
202 memset(final_iv + 15 - iv[0], 0, iv[0] + 1);
203 memset(cbcmaciv + 15 - iv[0], 0, iv[0] - 1);
204 /* fixup flags byte */
205 cbcmaciv[0] |= ((assocadj > 0) << 6) |
206 ((digestsize - 2) << 2);
207 /* insert lower 2 bytes of message length */
208 cbcmaciv[14] = cryptlen >> 8;
209 cbcmaciv[15] = cryptlen & 255;
210 }
211
212 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
213 atoken->packet_length = AES_BLOCK_SIZE +
214 ((assocadj > 0) << 1);
215 atoken->stat = 0;
216 atoken->instructions = EIP197_TOKEN_INS_ORIGIN_TOKEN |
217 EIP197_TOKEN_INS_TYPE_HASH;
218
219 if (likely(assocadj)) {
220 *aadlen = cpu_to_le32((assocadj >> 8) |
221 (assocadj & 255) << 8);
222 atoken += 6;
223 atoksize += 7;
224 } else {
225 atoken += 5;
226 atoksize += 6;
227 }
228
229 /* Process AAD data */
230 aadref = atoken;
231 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
232 atoken->packet_length = assocadj;
233 atoken->stat = 0;
234 atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
235 atoken++;
236
237 /* For CCM only, align AAD data towards hash engine */
238 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
239 aadalign = (assocadj + 2) & 15;
240 atoken->packet_length = assocadj && aadalign ?
241 16 - aadalign :
242 0;
243 if (likely(cryptlen)) {
244 atoken->stat = 0;
245 atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
246 } else {
247 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
248 atoken->instructions = EIP197_TOKEN_INS_LAST |
249 EIP197_TOKEN_INS_TYPE_HASH;
250 }
251 } else {
252 safexcel_aead_iv(ctx, iv, cdesc);
253
254 /* Process AAD data */
255 aadref = atoken;
256 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
257 atoken->packet_length = assocadj;
258 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
259 atoken->instructions = EIP197_TOKEN_INS_LAST |
260 EIP197_TOKEN_INS_TYPE_HASH;
261 }
262 atoken++;
263
264 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
265 /* For ESP mode (and not GMAC), skip over the IV */
266 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
267 atoken->packet_length = EIP197_AEAD_IPSEC_IV_SIZE;
268 atoken->stat = 0;
269 atoken->instructions = 0;
270 atoken++;
271 atoksize++;
272 } else if (unlikely(ctx->alg == SAFEXCEL_CHACHA20 &&
273 direction == SAFEXCEL_DECRYPT)) {
274 /* Poly-chacha decryption needs a dummy NOP here ... */
275 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
276 atoken->packet_length = 16; /* According to Op Manual */
277 atoken->stat = 0;
278 atoken->instructions = 0;
279 atoken++;
280 atoksize++;
281 }
282
283 if (ctx->xcm) {
284 /* For GCM and CCM, obtain enc(Y0) */
285 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT_REMRES;
286 atoken->packet_length = 0;
287 atoken->stat = 0;
288 atoken->instructions = AES_BLOCK_SIZE;
289 atoken++;
290
291 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
292 atoken->packet_length = AES_BLOCK_SIZE;
293 atoken->stat = 0;
294 atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
295 EIP197_TOKEN_INS_TYPE_CRYPTO;
296 atoken++;
297 atoksize += 2;
298 }
299
300 if (likely(cryptlen || ctx->alg == SAFEXCEL_CHACHA20)) {
301 /* Fixup stat field for AAD direction instruction */
302 aadref->stat = 0;
303
304 /* Process crypto data */
305 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
306 atoken->packet_length = cryptlen;
307
308 if (unlikely(ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC)) {
309 /* Fixup instruction field for AAD dir instruction */
310 aadref->instructions = EIP197_TOKEN_INS_TYPE_HASH;
311
312 /* Do not send to crypt engine in case of GMAC */
313 atoken->instructions = EIP197_TOKEN_INS_LAST |
314 EIP197_TOKEN_INS_TYPE_HASH |
315 EIP197_TOKEN_INS_TYPE_OUTPUT;
316 } else {
317 atoken->instructions = EIP197_TOKEN_INS_LAST |
318 EIP197_TOKEN_INS_TYPE_CRYPTO |
319 EIP197_TOKEN_INS_TYPE_HASH |
320 EIP197_TOKEN_INS_TYPE_OUTPUT;
321 }
322
323 cryptlen &= 15;
324 if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM && cryptlen)) {
325 atoken->stat = 0;
326 /* For CCM only, pad crypto data to the hash engine */
327 atoken++;
328 atoksize++;
329 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
330 atoken->packet_length = 16 - cryptlen;
331 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
332 atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
333 } else {
334 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
335 }
336 atoken++;
337 atoksize++;
338 }
339
340 if (direction == SAFEXCEL_ENCRYPT) {
341 /* Append ICV */
342 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
343 atoken->packet_length = digestsize;
344 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
345 EIP197_TOKEN_STAT_LAST_PACKET;
346 atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
347 EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
348 } else {
349 /* Extract ICV */
350 atoken->opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
351 atoken->packet_length = digestsize;
352 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
353 EIP197_TOKEN_STAT_LAST_PACKET;
354 atoken->instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
355 atoken++;
356 atoksize++;
357
358 /* Verify ICV */
359 atoken->opcode = EIP197_TOKEN_OPCODE_VERIFY;
360 atoken->packet_length = digestsize |
361 EIP197_TOKEN_HASH_RESULT_VERIFY;
362 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
363 EIP197_TOKEN_STAT_LAST_PACKET;
364 atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
365 }
366
367 /* Fixup length of the token in the command descriptor */
368 cdesc->additional_cdata_size = atoksize;
369}
370
371static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
372 const u8 *key, unsigned int len)
373{
374 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
375 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
376 struct safexcel_crypto_priv *priv = ctx->base.priv;
377 struct crypto_aes_ctx aes;
378 int ret, i;
379
380 ret = aes_expandkey(&aes, key, len);
381 if (ret)
382 return ret;
383
384 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
385 for (i = 0; i < len / sizeof(u32); i++) {
386 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
387 ctx->base.needs_inv = true;
388 break;
389 }
390 }
391 }
392
393 for (i = 0; i < len / sizeof(u32); i++)
394 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
395
396 ctx->key_len = len;
397
398 memzero_explicit(&aes, sizeof(aes));
399 return 0;
400}
401
402static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
403 unsigned int len)
404{
405 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
406 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
407 struct safexcel_crypto_priv *priv = ctx->base.priv;
408 struct crypto_authenc_keys keys;
409 struct crypto_aes_ctx aes;
410 int err = -EINVAL, i;
411 const char *alg;
412
413 if (unlikely(crypto_authenc_extractkeys(&keys, key, len)))
414 goto badkey;
415
416 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
417 /* Must have at least space for the nonce here */
418 if (unlikely(keys.enckeylen < CTR_RFC3686_NONCE_SIZE))
419 goto badkey;
420 /* last 4 bytes of key are the nonce! */
421 ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen -
422 CTR_RFC3686_NONCE_SIZE);
423 /* exclude the nonce here */
424 keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
425 }
426
427 /* Encryption key */
428 switch (ctx->alg) {
429 case SAFEXCEL_DES:
430 err = verify_aead_des_key(ctfm, keys.enckey, keys.enckeylen);
431 if (unlikely(err))
432 goto badkey;
433 break;
434 case SAFEXCEL_3DES:
435 err = verify_aead_des3_key(ctfm, keys.enckey, keys.enckeylen);
436 if (unlikely(err))
437 goto badkey;
438 break;
439 case SAFEXCEL_AES:
440 err = aes_expandkey(&aes, keys.enckey, keys.enckeylen);
441 if (unlikely(err))
442 goto badkey;
443 break;
444 case SAFEXCEL_SM4:
445 if (unlikely(keys.enckeylen != SM4_KEY_SIZE))
446 goto badkey;
447 break;
448 default:
449 dev_err(priv->dev, "aead: unsupported cipher algorithm\n");
450 goto badkey;
451 }
452
453 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
454 for (i = 0; i < keys.enckeylen / sizeof(u32); i++) {
455 if (le32_to_cpu(ctx->key[i]) !=
456 ((u32 *)keys.enckey)[i]) {
457 ctx->base.needs_inv = true;
458 break;
459 }
460 }
461 }
462
463 /* Auth key */
464 switch (ctx->hash_alg) {
465 case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
466 alg = "safexcel-sha1";
467 break;
468 case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
469 alg = "safexcel-sha224";
470 break;
471 case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
472 alg = "safexcel-sha256";
473 break;
474 case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
475 alg = "safexcel-sha384";
476 break;
477 case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
478 alg = "safexcel-sha512";
479 break;
480 case CONTEXT_CONTROL_CRYPTO_ALG_SM3:
481 alg = "safexcel-sm3";
482 break;
483 default:
484 dev_err(priv->dev, "aead: unsupported hash algorithm\n");
485 goto badkey;
486 }
487
488 if (safexcel_hmac_setkey(&ctx->base, keys.authkey, keys.authkeylen,
489 alg, ctx->state_sz))
490 goto badkey;
491
492 /* Now copy the keys into the context */
493 for (i = 0; i < keys.enckeylen / sizeof(u32); i++)
494 ctx->key[i] = cpu_to_le32(((u32 *)keys.enckey)[i]);
495 ctx->key_len = keys.enckeylen;
496
497 memzero_explicit(&keys, sizeof(keys));
498 return 0;
499
500badkey:
501 memzero_explicit(&keys, sizeof(keys));
502 return err;
503}
504
505static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
506 struct crypto_async_request *async,
507 struct safexcel_cipher_req *sreq,
508 struct safexcel_command_desc *cdesc)
509{
510 struct safexcel_crypto_priv *priv = ctx->base.priv;
511 int ctrl_size = ctx->key_len / sizeof(u32);
512
513 cdesc->control_data.control1 = ctx->mode;
514
515 if (ctx->aead) {
516 /* Take in account the ipad+opad digests */
517 if (ctx->xcm) {
518 ctrl_size += ctx->state_sz / sizeof(u32);
519 cdesc->control_data.control0 =
520 CONTEXT_CONTROL_KEY_EN |
521 CONTEXT_CONTROL_DIGEST_XCM |
522 ctx->hash_alg |
523 CONTEXT_CONTROL_SIZE(ctrl_size);
524 } else if (ctx->alg == SAFEXCEL_CHACHA20) {
525 /* Chacha20-Poly1305 */
526 cdesc->control_data.control0 =
527 CONTEXT_CONTROL_KEY_EN |
528 CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20 |
529 (sreq->direction == SAFEXCEL_ENCRYPT ?
530 CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT :
531 CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN) |
532 ctx->hash_alg |
533 CONTEXT_CONTROL_SIZE(ctrl_size);
534 return 0;
535 } else {
536 ctrl_size += ctx->state_sz / sizeof(u32) * 2;
537 cdesc->control_data.control0 =
538 CONTEXT_CONTROL_KEY_EN |
539 CONTEXT_CONTROL_DIGEST_HMAC |
540 ctx->hash_alg |
541 CONTEXT_CONTROL_SIZE(ctrl_size);
542 }
543
544 if (sreq->direction == SAFEXCEL_ENCRYPT &&
545 (ctx->xcm == EIP197_XCM_MODE_CCM ||
546 ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC))
547 cdesc->control_data.control0 |=
548 CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT;
549 else if (sreq->direction == SAFEXCEL_ENCRYPT)
550 cdesc->control_data.control0 |=
551 CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
552 else if (ctx->xcm == EIP197_XCM_MODE_CCM)
553 cdesc->control_data.control0 |=
554 CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN;
555 else
556 cdesc->control_data.control0 |=
557 CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
558 } else {
559 if (sreq->direction == SAFEXCEL_ENCRYPT)
560 cdesc->control_data.control0 =
561 CONTEXT_CONTROL_TYPE_CRYPTO_OUT |
562 CONTEXT_CONTROL_KEY_EN |
563 CONTEXT_CONTROL_SIZE(ctrl_size);
564 else
565 cdesc->control_data.control0 =
566 CONTEXT_CONTROL_TYPE_CRYPTO_IN |
567 CONTEXT_CONTROL_KEY_EN |
568 CONTEXT_CONTROL_SIZE(ctrl_size);
569 }
570
571 if (ctx->alg == SAFEXCEL_DES) {
572 cdesc->control_data.control0 |=
573 CONTEXT_CONTROL_CRYPTO_ALG_DES;
574 } else if (ctx->alg == SAFEXCEL_3DES) {
575 cdesc->control_data.control0 |=
576 CONTEXT_CONTROL_CRYPTO_ALG_3DES;
577 } else if (ctx->alg == SAFEXCEL_AES) {
578 switch (ctx->key_len >> ctx->xts) {
579 case AES_KEYSIZE_128:
580 cdesc->control_data.control0 |=
581 CONTEXT_CONTROL_CRYPTO_ALG_AES128;
582 break;
583 case AES_KEYSIZE_192:
584 cdesc->control_data.control0 |=
585 CONTEXT_CONTROL_CRYPTO_ALG_AES192;
586 break;
587 case AES_KEYSIZE_256:
588 cdesc->control_data.control0 |=
589 CONTEXT_CONTROL_CRYPTO_ALG_AES256;
590 break;
591 default:
592 dev_err(priv->dev, "aes keysize not supported: %u\n",
593 ctx->key_len >> ctx->xts);
594 return -EINVAL;
595 }
596 } else if (ctx->alg == SAFEXCEL_CHACHA20) {
597 cdesc->control_data.control0 |=
598 CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20;
599 } else if (ctx->alg == SAFEXCEL_SM4) {
600 cdesc->control_data.control0 |=
601 CONTEXT_CONTROL_CRYPTO_ALG_SM4;
602 }
603
604 return 0;
605}
606
607static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
608 struct crypto_async_request *async,
609 struct scatterlist *src,
610 struct scatterlist *dst,
611 unsigned int cryptlen,
612 struct safexcel_cipher_req *sreq,
613 bool *should_complete, int *ret)
614{
615 struct skcipher_request *areq = skcipher_request_cast(async);
616 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
617 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(skcipher);
618 struct safexcel_result_desc *rdesc;
619 int ndesc = 0;
620
621 *ret = 0;
622
623 if (unlikely(!sreq->rdescs))
624 return 0;
625
626 while (sreq->rdescs--) {
627 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
628 if (IS_ERR(rdesc)) {
629 dev_err(priv->dev,
630 "cipher: result: could not retrieve the result descriptor\n");
631 *ret = PTR_ERR(rdesc);
632 break;
633 }
634
635 if (likely(!*ret))
636 *ret = safexcel_rdesc_check_errors(priv, rdesc);
637
638 ndesc++;
639 }
640
641 safexcel_complete(priv, ring);
642
643 if (src == dst) {
644 if (sreq->nr_src > 0)
645 dma_unmap_sg(priv->dev, src, sreq->nr_src,
646 DMA_BIDIRECTIONAL);
647 } else {
648 if (sreq->nr_src > 0)
649 dma_unmap_sg(priv->dev, src, sreq->nr_src,
650 DMA_TO_DEVICE);
651 if (sreq->nr_dst > 0)
652 dma_unmap_sg(priv->dev, dst, sreq->nr_dst,
653 DMA_FROM_DEVICE);
654 }
655
656 /*
657 * Update IV in req from last crypto output word for CBC modes
658 */
659 if ((!ctx->aead) && (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
660 (sreq->direction == SAFEXCEL_ENCRYPT)) {
661 /* For encrypt take the last output word */
662 sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv,
663 crypto_skcipher_ivsize(skcipher),
664 (cryptlen -
665 crypto_skcipher_ivsize(skcipher)));
666 }
667
668 *should_complete = true;
669
670 return ndesc;
671}
672
673static int safexcel_send_req(struct crypto_async_request *base, int ring,
674 struct safexcel_cipher_req *sreq,
675 struct scatterlist *src, struct scatterlist *dst,
676 unsigned int cryptlen, unsigned int assoclen,
677 unsigned int digestsize, u8 *iv, int *commands,
678 int *results)
679{
680 struct skcipher_request *areq = skcipher_request_cast(base);
681 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
682 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
683 struct safexcel_crypto_priv *priv = ctx->base.priv;
684 struct safexcel_command_desc *cdesc;
685 struct safexcel_command_desc *first_cdesc = NULL;
686 struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
687 struct scatterlist *sg;
688 unsigned int totlen;
689 unsigned int totlen_src = cryptlen + assoclen;
690 unsigned int totlen_dst = totlen_src;
691 struct safexcel_token *atoken;
692 int n_cdesc = 0, n_rdesc = 0;
693 int queued, i, ret = 0;
694 bool first = true;
695
696 sreq->nr_src = sg_nents_for_len(src, totlen_src);
697
698 if (ctx->aead) {
699 /*
700 * AEAD has auth tag appended to output for encrypt and
701 * removed from the output for decrypt!
702 */
703 if (sreq->direction == SAFEXCEL_DECRYPT)
704 totlen_dst -= digestsize;
705 else
706 totlen_dst += digestsize;
707
708 memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
709 &ctx->base.ipad, ctx->state_sz);
710 if (!ctx->xcm)
711 memcpy(ctx->base.ctxr->data + (ctx->key_len +
712 ctx->state_sz) / sizeof(u32), &ctx->base.opad,
713 ctx->state_sz);
714 } else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
715 (sreq->direction == SAFEXCEL_DECRYPT)) {
716 /*
717 * Save IV from last crypto input word for CBC modes in decrypt
718 * direction. Need to do this first in case of inplace operation
719 * as it will be overwritten.
720 */
721 sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv,
722 crypto_skcipher_ivsize(skcipher),
723 (totlen_src -
724 crypto_skcipher_ivsize(skcipher)));
725 }
726
727 sreq->nr_dst = sg_nents_for_len(dst, totlen_dst);
728
729 /*
730 * Remember actual input length, source buffer length may be
731 * updated in case of inline operation below.
732 */
733 totlen = totlen_src;
734 queued = totlen_src;
735
736 if (src == dst) {
737 sreq->nr_src = max(sreq->nr_src, sreq->nr_dst);
738 sreq->nr_dst = sreq->nr_src;
739 if (unlikely((totlen_src || totlen_dst) &&
740 (sreq->nr_src <= 0))) {
741 dev_err(priv->dev, "In-place buffer not large enough (need %d bytes)!",
742 max(totlen_src, totlen_dst));
743 return -EINVAL;
744 }
745 if (sreq->nr_src > 0 &&
746 !dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL))
747 return -EIO;
748 } else {
749 if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
750 dev_err(priv->dev, "Source buffer not large enough (need %d bytes)!",
751 totlen_src);
752 return -EINVAL;
753 }
754
755 if (sreq->nr_src > 0 &&
756 !dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE))
757 return -EIO;
758
759 if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
760 dev_err(priv->dev, "Dest buffer not large enough (need %d bytes)!",
761 totlen_dst);
762 ret = -EINVAL;
763 goto unmap;
764 }
765
766 if (sreq->nr_dst > 0 &&
767 !dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE)) {
768 ret = -EIO;
769 goto unmap;
770 }
771 }
772
773 memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
774
775 if (!totlen) {
776 /*
777 * The EIP97 cannot deal with zero length input packets!
778 * So stuff a dummy command descriptor indicating a 1 byte
779 * (dummy) input packet, using the context record as source.
780 */
781 first_cdesc = safexcel_add_cdesc(priv, ring,
782 1, 1, ctx->base.ctxr_dma,
783 1, 1, ctx->base.ctxr_dma,
784 &atoken);
785 if (IS_ERR(first_cdesc)) {
786 /* No space left in the command descriptor ring */
787 ret = PTR_ERR(first_cdesc);
788 goto cdesc_rollback;
789 }
790 n_cdesc = 1;
791 goto skip_cdesc;
792 }
793
794 /* command descriptors */
795 for_each_sg(src, sg, sreq->nr_src, i) {
796 int len = sg_dma_len(sg);
797
798 /* Do not overflow the request */
799 if (queued < len)
800 len = queued;
801
802 cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
803 !(queued - len),
804 sg_dma_address(sg), len, totlen,
805 ctx->base.ctxr_dma, &atoken);
806 if (IS_ERR(cdesc)) {
807 /* No space left in the command descriptor ring */
808 ret = PTR_ERR(cdesc);
809 goto cdesc_rollback;
810 }
811
812 if (!n_cdesc)
813 first_cdesc = cdesc;
814
815 n_cdesc++;
816 queued -= len;
817 if (!queued)
818 break;
819 }
820skip_cdesc:
821 /* Add context control words and token to first command descriptor */
822 safexcel_context_control(ctx, base, sreq, first_cdesc);
823 if (ctx->aead)
824 safexcel_aead_token(ctx, iv, first_cdesc, atoken,
825 sreq->direction, cryptlen,
826 assoclen, digestsize);
827 else
828 safexcel_skcipher_token(ctx, iv, first_cdesc, atoken,
829 cryptlen);
830
831 /* result descriptors */
832 for_each_sg(dst, sg, sreq->nr_dst, i) {
833 bool last = (i == sreq->nr_dst - 1);
834 u32 len = sg_dma_len(sg);
835
836 /* only allow the part of the buffer we know we need */
837 if (len > totlen_dst)
838 len = totlen_dst;
839 if (unlikely(!len))
840 break;
841 totlen_dst -= len;
842
843 /* skip over AAD space in buffer - not written */
844 if (assoclen) {
845 if (assoclen >= len) {
846 assoclen -= len;
847 continue;
848 }
849 rdesc = safexcel_add_rdesc(priv, ring, first, last,
850 sg_dma_address(sg) +
851 assoclen,
852 len - assoclen);
853 assoclen = 0;
854 } else {
855 rdesc = safexcel_add_rdesc(priv, ring, first, last,
856 sg_dma_address(sg),
857 len);
858 }
859 if (IS_ERR(rdesc)) {
860 /* No space left in the result descriptor ring */
861 ret = PTR_ERR(rdesc);
862 goto rdesc_rollback;
863 }
864 if (first) {
865 first_rdesc = rdesc;
866 first = false;
867 }
868 n_rdesc++;
869 }
870
871 if (unlikely(first)) {
872 /*
873 * Special case: AEAD decrypt with only AAD data.
874 * In this case there is NO output data from the engine,
875 * but the engine still needs a result descriptor!
876 * Create a dummy one just for catching the result token.
877 */
878 rdesc = safexcel_add_rdesc(priv, ring, true, true, 0, 0);
879 if (IS_ERR(rdesc)) {
880 /* No space left in the result descriptor ring */
881 ret = PTR_ERR(rdesc);
882 goto rdesc_rollback;
883 }
884 first_rdesc = rdesc;
885 n_rdesc = 1;
886 }
887
888 safexcel_rdr_req_set(priv, ring, first_rdesc, base);
889
890 *commands = n_cdesc;
891 *results = n_rdesc;
892 return 0;
893
894rdesc_rollback:
895 for (i = 0; i < n_rdesc; i++)
896 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
897cdesc_rollback:
898 for (i = 0; i < n_cdesc; i++)
899 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
900unmap:
901 if (src == dst) {
902 if (sreq->nr_src > 0)
903 dma_unmap_sg(priv->dev, src, sreq->nr_src,
904 DMA_BIDIRECTIONAL);
905 } else {
906 if (sreq->nr_src > 0)
907 dma_unmap_sg(priv->dev, src, sreq->nr_src,
908 DMA_TO_DEVICE);
909 if (sreq->nr_dst > 0)
910 dma_unmap_sg(priv->dev, dst, sreq->nr_dst,
911 DMA_FROM_DEVICE);
912 }
913
914 return ret;
915}
916
917static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
918 int ring,
919 struct crypto_async_request *base,
920 struct safexcel_cipher_req *sreq,
921 bool *should_complete, int *ret)
922{
923 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
924 struct safexcel_result_desc *rdesc;
925 int ndesc = 0, enq_ret;
926
927 *ret = 0;
928
929 if (unlikely(!sreq->rdescs))
930 return 0;
931
932 while (sreq->rdescs--) {
933 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
934 if (IS_ERR(rdesc)) {
935 dev_err(priv->dev,
936 "cipher: invalidate: could not retrieve the result descriptor\n");
937 *ret = PTR_ERR(rdesc);
938 break;
939 }
940
941 if (likely(!*ret))
942 *ret = safexcel_rdesc_check_errors(priv, rdesc);
943
944 ndesc++;
945 }
946
947 safexcel_complete(priv, ring);
948
949 if (ctx->base.exit_inv) {
950 dma_pool_free(priv->context_pool, ctx->base.ctxr,
951 ctx->base.ctxr_dma);
952
953 *should_complete = true;
954
955 return ndesc;
956 }
957
958 ring = safexcel_select_ring(priv);
959 ctx->base.ring = ring;
960
961 spin_lock_bh(&priv->ring[ring].queue_lock);
962 enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
963 spin_unlock_bh(&priv->ring[ring].queue_lock);
964
965 if (enq_ret != -EINPROGRESS)
966 *ret = enq_ret;
967
968 queue_work(priv->ring[ring].workqueue,
969 &priv->ring[ring].work_data.work);
970
971 *should_complete = false;
972
973 return ndesc;
974}
975
976static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
977 int ring,
978 struct crypto_async_request *async,
979 bool *should_complete, int *ret)
980{
981 struct skcipher_request *req = skcipher_request_cast(async);
982 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
983 int err;
984
985 if (sreq->needs_inv) {
986 sreq->needs_inv = false;
987 err = safexcel_handle_inv_result(priv, ring, async, sreq,
988 should_complete, ret);
989 } else {
990 err = safexcel_handle_req_result(priv, ring, async, req->src,
991 req->dst, req->cryptlen, sreq,
992 should_complete, ret);
993 }
994
995 return err;
996}
997
998static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
999 int ring,
1000 struct crypto_async_request *async,
1001 bool *should_complete, int *ret)
1002{
1003 struct aead_request *req = aead_request_cast(async);
1004 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1005 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1006 int err;
1007
1008 if (sreq->needs_inv) {
1009 sreq->needs_inv = false;
1010 err = safexcel_handle_inv_result(priv, ring, async, sreq,
1011 should_complete, ret);
1012 } else {
1013 err = safexcel_handle_req_result(priv, ring, async, req->src,
1014 req->dst,
1015 req->cryptlen + crypto_aead_authsize(tfm),
1016 sreq, should_complete, ret);
1017 }
1018
1019 return err;
1020}
1021
1022static int safexcel_cipher_send_inv(struct crypto_async_request *base,
1023 int ring, int *commands, int *results)
1024{
1025 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1026 struct safexcel_crypto_priv *priv = ctx->base.priv;
1027 int ret;
1028
1029 ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
1030 if (unlikely(ret))
1031 return ret;
1032
1033 *commands = 1;
1034 *results = 1;
1035
1036 return 0;
1037}
1038
1039static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
1040 int *commands, int *results)
1041{
1042 struct skcipher_request *req = skcipher_request_cast(async);
1043 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1044 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1045 struct safexcel_crypto_priv *priv = ctx->base.priv;
1046 int ret;
1047
1048 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1049
1050 if (sreq->needs_inv) {
1051 ret = safexcel_cipher_send_inv(async, ring, commands, results);
1052 } else {
1053 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1054 u8 input_iv[AES_BLOCK_SIZE];
1055
1056 /*
1057 * Save input IV in case of CBC decrypt mode
1058 * Will be overwritten with output IV prior to use!
1059 */
1060 memcpy(input_iv, req->iv, crypto_skcipher_ivsize(skcipher));
1061
1062 ret = safexcel_send_req(async, ring, sreq, req->src,
1063 req->dst, req->cryptlen, 0, 0, input_iv,
1064 commands, results);
1065 }
1066
1067 sreq->rdescs = *results;
1068 return ret;
1069}
1070
1071static int safexcel_aead_send(struct crypto_async_request *async, int ring,
1072 int *commands, int *results)
1073{
1074 struct aead_request *req = aead_request_cast(async);
1075 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1076 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1077 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1078 struct safexcel_crypto_priv *priv = ctx->base.priv;
1079 int ret;
1080
1081 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1082
1083 if (sreq->needs_inv)
1084 ret = safexcel_cipher_send_inv(async, ring, commands, results);
1085 else
1086 ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
1087 req->cryptlen, req->assoclen,
1088 crypto_aead_authsize(tfm), req->iv,
1089 commands, results);
1090 sreq->rdescs = *results;
1091 return ret;
1092}
1093
1094static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
1095 struct crypto_async_request *base,
1096 struct safexcel_cipher_req *sreq,
1097 struct crypto_wait *result)
1098{
1099 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1100 struct safexcel_crypto_priv *priv = ctx->base.priv;
1101 int ring = ctx->base.ring;
1102 int err;
1103
1104 ctx = crypto_tfm_ctx(base->tfm);
1105 ctx->base.exit_inv = true;
1106 sreq->needs_inv = true;
1107
1108 spin_lock_bh(&priv->ring[ring].queue_lock);
1109 crypto_enqueue_request(&priv->ring[ring].queue, base);
1110 spin_unlock_bh(&priv->ring[ring].queue_lock);
1111
1112 queue_work(priv->ring[ring].workqueue,
1113 &priv->ring[ring].work_data.work);
1114
1115 err = crypto_wait_req(-EINPROGRESS, result);
1116
1117 if (err) {
1118 dev_warn(priv->dev,
1119 "cipher: sync: invalidate: completion error %d\n",
1120 err);
1121 return err;
1122 }
1123
1124 return 0;
1125}
1126
1127static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
1128{
1129 EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
1130 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1131 DECLARE_CRYPTO_WAIT(result);
1132
1133 memset(req, 0, sizeof(struct skcipher_request));
1134
1135 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1136 crypto_req_done, &result);
1137 skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
1138
1139 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1140}
1141
1142static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
1143{
1144 EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
1145 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1146 DECLARE_CRYPTO_WAIT(result);
1147
1148 memset(req, 0, sizeof(struct aead_request));
1149
1150 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1151 crypto_req_done, &result);
1152 aead_request_set_tfm(req, __crypto_aead_cast(tfm));
1153
1154 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1155}
1156
1157static int safexcel_queue_req(struct crypto_async_request *base,
1158 struct safexcel_cipher_req *sreq,
1159 enum safexcel_cipher_direction dir)
1160{
1161 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1162 struct safexcel_crypto_priv *priv = ctx->base.priv;
1163 int ret, ring;
1164
1165 sreq->needs_inv = false;
1166 sreq->direction = dir;
1167
1168 if (ctx->base.ctxr) {
1169 if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
1170 sreq->needs_inv = true;
1171 ctx->base.needs_inv = false;
1172 }
1173 } else {
1174 ctx->base.ring = safexcel_select_ring(priv);
1175 ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
1176 EIP197_GFP_FLAGS(*base),
1177 &ctx->base.ctxr_dma);
1178 if (!ctx->base.ctxr)
1179 return -ENOMEM;
1180 }
1181
1182 ring = ctx->base.ring;
1183
1184 spin_lock_bh(&priv->ring[ring].queue_lock);
1185 ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
1186 spin_unlock_bh(&priv->ring[ring].queue_lock);
1187
1188 queue_work(priv->ring[ring].workqueue,
1189 &priv->ring[ring].work_data.work);
1190
1191 return ret;
1192}
1193
1194static int safexcel_encrypt(struct skcipher_request *req)
1195{
1196 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1197 SAFEXCEL_ENCRYPT);
1198}
1199
1200static int safexcel_decrypt(struct skcipher_request *req)
1201{
1202 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1203 SAFEXCEL_DECRYPT);
1204}
1205
1206static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
1207{
1208 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1209 struct safexcel_alg_template *tmpl =
1210 container_of(tfm->__crt_alg, struct safexcel_alg_template,
1211 alg.skcipher.base);
1212
1213 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
1214 sizeof(struct safexcel_cipher_req));
1215
1216 ctx->base.priv = tmpl->priv;
1217
1218 ctx->base.send = safexcel_skcipher_send;
1219 ctx->base.handle_result = safexcel_skcipher_handle_result;
1220 ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1221 ctx->ctrinit = 1;
1222 return 0;
1223}
1224
1225static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
1226{
1227 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1228
1229 memzero_explicit(ctx->key, sizeof(ctx->key));
1230
1231 /* context not allocated, skip invalidation */
1232 if (!ctx->base.ctxr)
1233 return -ENOMEM;
1234
1235 memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
1236 return 0;
1237}
1238
1239static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
1240{
1241 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1242 struct safexcel_crypto_priv *priv = ctx->base.priv;
1243 int ret;
1244
1245 if (safexcel_cipher_cra_exit(tfm))
1246 return;
1247
1248 if (priv->flags & EIP197_TRC_CACHE) {
1249 ret = safexcel_skcipher_exit_inv(tfm);
1250 if (ret)
1251 dev_warn(priv->dev, "skcipher: invalidation error %d\n",
1252 ret);
1253 } else {
1254 dma_pool_free(priv->context_pool, ctx->base.ctxr,
1255 ctx->base.ctxr_dma);
1256 }
1257}
1258
1259static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
1260{
1261 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1262 struct safexcel_crypto_priv *priv = ctx->base.priv;
1263 int ret;
1264
1265 if (safexcel_cipher_cra_exit(tfm))
1266 return;
1267
1268 if (priv->flags & EIP197_TRC_CACHE) {
1269 ret = safexcel_aead_exit_inv(tfm);
1270 if (ret)
1271 dev_warn(priv->dev, "aead: invalidation error %d\n",
1272 ret);
1273 } else {
1274 dma_pool_free(priv->context_pool, ctx->base.ctxr,
1275 ctx->base.ctxr_dma);
1276 }
1277}
1278
1279static int safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm *tfm)
1280{
1281 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1282
1283 safexcel_skcipher_cra_init(tfm);
1284 ctx->alg = SAFEXCEL_AES;
1285 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1286 ctx->blocksz = 0;
1287 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1288 return 0;
1289}
1290
1291struct safexcel_alg_template safexcel_alg_ecb_aes = {
1292 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1293 .algo_mask = SAFEXCEL_ALG_AES,
1294 .alg.skcipher = {
1295 .setkey = safexcel_skcipher_aes_setkey,
1296 .encrypt = safexcel_encrypt,
1297 .decrypt = safexcel_decrypt,
1298 .min_keysize = AES_MIN_KEY_SIZE,
1299 .max_keysize = AES_MAX_KEY_SIZE,
1300 .base = {
1301 .cra_name = "ecb(aes)",
1302 .cra_driver_name = "safexcel-ecb-aes",
1303 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1304 .cra_flags = CRYPTO_ALG_ASYNC |
1305 CRYPTO_ALG_ALLOCATES_MEMORY |
1306 CRYPTO_ALG_KERN_DRIVER_ONLY,
1307 .cra_blocksize = AES_BLOCK_SIZE,
1308 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1309 .cra_alignmask = 0,
1310 .cra_init = safexcel_skcipher_aes_ecb_cra_init,
1311 .cra_exit = safexcel_skcipher_cra_exit,
1312 .cra_module = THIS_MODULE,
1313 },
1314 },
1315};
1316
1317static int safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm *tfm)
1318{
1319 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1320
1321 safexcel_skcipher_cra_init(tfm);
1322 ctx->alg = SAFEXCEL_AES;
1323 ctx->blocksz = AES_BLOCK_SIZE;
1324 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1325 return 0;
1326}
1327
1328struct safexcel_alg_template safexcel_alg_cbc_aes = {
1329 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1330 .algo_mask = SAFEXCEL_ALG_AES,
1331 .alg.skcipher = {
1332 .setkey = safexcel_skcipher_aes_setkey,
1333 .encrypt = safexcel_encrypt,
1334 .decrypt = safexcel_decrypt,
1335 .min_keysize = AES_MIN_KEY_SIZE,
1336 .max_keysize = AES_MAX_KEY_SIZE,
1337 .ivsize = AES_BLOCK_SIZE,
1338 .base = {
1339 .cra_name = "cbc(aes)",
1340 .cra_driver_name = "safexcel-cbc-aes",
1341 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1342 .cra_flags = CRYPTO_ALG_ASYNC |
1343 CRYPTO_ALG_ALLOCATES_MEMORY |
1344 CRYPTO_ALG_KERN_DRIVER_ONLY,
1345 .cra_blocksize = AES_BLOCK_SIZE,
1346 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1347 .cra_alignmask = 0,
1348 .cra_init = safexcel_skcipher_aes_cbc_cra_init,
1349 .cra_exit = safexcel_skcipher_cra_exit,
1350 .cra_module = THIS_MODULE,
1351 },
1352 },
1353};
1354
1355static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
1356 const u8 *key, unsigned int len)
1357{
1358 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
1359 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1360 struct safexcel_crypto_priv *priv = ctx->base.priv;
1361 struct crypto_aes_ctx aes;
1362 int ret, i;
1363 unsigned int keylen;
1364
1365 /* last 4 bytes of key are the nonce! */
1366 ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
1367 /* exclude the nonce here */
1368 keylen = len - CTR_RFC3686_NONCE_SIZE;
1369 ret = aes_expandkey(&aes, key, keylen);
1370 if (ret)
1371 return ret;
1372
1373 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1374 for (i = 0; i < keylen / sizeof(u32); i++) {
1375 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
1376 ctx->base.needs_inv = true;
1377 break;
1378 }
1379 }
1380 }
1381
1382 for (i = 0; i < keylen / sizeof(u32); i++)
1383 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
1384
1385 ctx->key_len = keylen;
1386
1387 memzero_explicit(&aes, sizeof(aes));
1388 return 0;
1389}
1390
1391static int safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm *tfm)
1392{
1393 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1394
1395 safexcel_skcipher_cra_init(tfm);
1396 ctx->alg = SAFEXCEL_AES;
1397 ctx->blocksz = AES_BLOCK_SIZE;
1398 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
1399 return 0;
1400}
1401
1402struct safexcel_alg_template safexcel_alg_ctr_aes = {
1403 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1404 .algo_mask = SAFEXCEL_ALG_AES,
1405 .alg.skcipher = {
1406 .setkey = safexcel_skcipher_aesctr_setkey,
1407 .encrypt = safexcel_encrypt,
1408 .decrypt = safexcel_decrypt,
1409 /* Add nonce size */
1410 .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1411 .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1412 .ivsize = CTR_RFC3686_IV_SIZE,
1413 .base = {
1414 .cra_name = "rfc3686(ctr(aes))",
1415 .cra_driver_name = "safexcel-ctr-aes",
1416 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1417 .cra_flags = CRYPTO_ALG_ASYNC |
1418 CRYPTO_ALG_ALLOCATES_MEMORY |
1419 CRYPTO_ALG_KERN_DRIVER_ONLY,
1420 .cra_blocksize = 1,
1421 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1422 .cra_alignmask = 0,
1423 .cra_init = safexcel_skcipher_aes_ctr_cra_init,
1424 .cra_exit = safexcel_skcipher_cra_exit,
1425 .cra_module = THIS_MODULE,
1426 },
1427 },
1428};
1429
1430static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
1431 unsigned int len)
1432{
1433 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1434 struct safexcel_crypto_priv *priv = ctx->base.priv;
1435 int ret;
1436
1437 ret = verify_skcipher_des_key(ctfm, key);
1438 if (ret)
1439 return ret;
1440
1441 /* if context exits and key changed, need to invalidate it */
1442 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1443 if (memcmp(ctx->key, key, len))
1444 ctx->base.needs_inv = true;
1445
1446 memcpy(ctx->key, key, len);
1447 ctx->key_len = len;
1448
1449 return 0;
1450}
1451
1452static int safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm *tfm)
1453{
1454 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1455
1456 safexcel_skcipher_cra_init(tfm);
1457 ctx->alg = SAFEXCEL_DES;
1458 ctx->blocksz = DES_BLOCK_SIZE;
1459 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1460 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1461 return 0;
1462}
1463
1464struct safexcel_alg_template safexcel_alg_cbc_des = {
1465 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1466 .algo_mask = SAFEXCEL_ALG_DES,
1467 .alg.skcipher = {
1468 .setkey = safexcel_des_setkey,
1469 .encrypt = safexcel_encrypt,
1470 .decrypt = safexcel_decrypt,
1471 .min_keysize = DES_KEY_SIZE,
1472 .max_keysize = DES_KEY_SIZE,
1473 .ivsize = DES_BLOCK_SIZE,
1474 .base = {
1475 .cra_name = "cbc(des)",
1476 .cra_driver_name = "safexcel-cbc-des",
1477 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1478 .cra_flags = CRYPTO_ALG_ASYNC |
1479 CRYPTO_ALG_ALLOCATES_MEMORY |
1480 CRYPTO_ALG_KERN_DRIVER_ONLY,
1481 .cra_blocksize = DES_BLOCK_SIZE,
1482 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1483 .cra_alignmask = 0,
1484 .cra_init = safexcel_skcipher_des_cbc_cra_init,
1485 .cra_exit = safexcel_skcipher_cra_exit,
1486 .cra_module = THIS_MODULE,
1487 },
1488 },
1489};
1490
1491static int safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm *tfm)
1492{
1493 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1494
1495 safexcel_skcipher_cra_init(tfm);
1496 ctx->alg = SAFEXCEL_DES;
1497 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1498 ctx->blocksz = 0;
1499 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1500 return 0;
1501}
1502
1503struct safexcel_alg_template safexcel_alg_ecb_des = {
1504 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1505 .algo_mask = SAFEXCEL_ALG_DES,
1506 .alg.skcipher = {
1507 .setkey = safexcel_des_setkey,
1508 .encrypt = safexcel_encrypt,
1509 .decrypt = safexcel_decrypt,
1510 .min_keysize = DES_KEY_SIZE,
1511 .max_keysize = DES_KEY_SIZE,
1512 .base = {
1513 .cra_name = "ecb(des)",
1514 .cra_driver_name = "safexcel-ecb-des",
1515 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1516 .cra_flags = CRYPTO_ALG_ASYNC |
1517 CRYPTO_ALG_ALLOCATES_MEMORY |
1518 CRYPTO_ALG_KERN_DRIVER_ONLY,
1519 .cra_blocksize = DES_BLOCK_SIZE,
1520 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1521 .cra_alignmask = 0,
1522 .cra_init = safexcel_skcipher_des_ecb_cra_init,
1523 .cra_exit = safexcel_skcipher_cra_exit,
1524 .cra_module = THIS_MODULE,
1525 },
1526 },
1527};
1528
1529static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
1530 const u8 *key, unsigned int len)
1531{
1532 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1533 struct safexcel_crypto_priv *priv = ctx->base.priv;
1534 int err;
1535
1536 err = verify_skcipher_des3_key(ctfm, key);
1537 if (err)
1538 return err;
1539
1540 /* if context exits and key changed, need to invalidate it */
1541 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1542 if (memcmp(ctx->key, key, len))
1543 ctx->base.needs_inv = true;
1544
1545 memcpy(ctx->key, key, len);
1546 ctx->key_len = len;
1547
1548 return 0;
1549}
1550
1551static int safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm *tfm)
1552{
1553 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1554
1555 safexcel_skcipher_cra_init(tfm);
1556 ctx->alg = SAFEXCEL_3DES;
1557 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1558 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1559 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1560 return 0;
1561}
1562
1563struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
1564 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1565 .algo_mask = SAFEXCEL_ALG_DES,
1566 .alg.skcipher = {
1567 .setkey = safexcel_des3_ede_setkey,
1568 .encrypt = safexcel_encrypt,
1569 .decrypt = safexcel_decrypt,
1570 .min_keysize = DES3_EDE_KEY_SIZE,
1571 .max_keysize = DES3_EDE_KEY_SIZE,
1572 .ivsize = DES3_EDE_BLOCK_SIZE,
1573 .base = {
1574 .cra_name = "cbc(des3_ede)",
1575 .cra_driver_name = "safexcel-cbc-des3_ede",
1576 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1577 .cra_flags = CRYPTO_ALG_ASYNC |
1578 CRYPTO_ALG_ALLOCATES_MEMORY |
1579 CRYPTO_ALG_KERN_DRIVER_ONLY,
1580 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1581 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1582 .cra_alignmask = 0,
1583 .cra_init = safexcel_skcipher_des3_cbc_cra_init,
1584 .cra_exit = safexcel_skcipher_cra_exit,
1585 .cra_module = THIS_MODULE,
1586 },
1587 },
1588};
1589
1590static int safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm *tfm)
1591{
1592 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1593
1594 safexcel_skcipher_cra_init(tfm);
1595 ctx->alg = SAFEXCEL_3DES;
1596 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1597 ctx->blocksz = 0;
1598 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1599 return 0;
1600}
1601
1602struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
1603 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1604 .algo_mask = SAFEXCEL_ALG_DES,
1605 .alg.skcipher = {
1606 .setkey = safexcel_des3_ede_setkey,
1607 .encrypt = safexcel_encrypt,
1608 .decrypt = safexcel_decrypt,
1609 .min_keysize = DES3_EDE_KEY_SIZE,
1610 .max_keysize = DES3_EDE_KEY_SIZE,
1611 .base = {
1612 .cra_name = "ecb(des3_ede)",
1613 .cra_driver_name = "safexcel-ecb-des3_ede",
1614 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1615 .cra_flags = CRYPTO_ALG_ASYNC |
1616 CRYPTO_ALG_ALLOCATES_MEMORY |
1617 CRYPTO_ALG_KERN_DRIVER_ONLY,
1618 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1619 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1620 .cra_alignmask = 0,
1621 .cra_init = safexcel_skcipher_des3_ecb_cra_init,
1622 .cra_exit = safexcel_skcipher_cra_exit,
1623 .cra_module = THIS_MODULE,
1624 },
1625 },
1626};
1627
1628static int safexcel_aead_encrypt(struct aead_request *req)
1629{
1630 struct safexcel_cipher_req *creq = aead_request_ctx(req);
1631
1632 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
1633}
1634
1635static int safexcel_aead_decrypt(struct aead_request *req)
1636{
1637 struct safexcel_cipher_req *creq = aead_request_ctx(req);
1638
1639 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
1640}
1641
1642static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
1643{
1644 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1645 struct safexcel_alg_template *tmpl =
1646 container_of(tfm->__crt_alg, struct safexcel_alg_template,
1647 alg.aead.base);
1648
1649 crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
1650 sizeof(struct safexcel_cipher_req));
1651
1652 ctx->base.priv = tmpl->priv;
1653
1654 ctx->alg = SAFEXCEL_AES; /* default */
1655 ctx->blocksz = AES_BLOCK_SIZE;
1656 ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1657 ctx->ctrinit = 1;
1658 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC; /* default */
1659 ctx->aead = true;
1660 ctx->base.send = safexcel_aead_send;
1661 ctx->base.handle_result = safexcel_aead_handle_result;
1662 return 0;
1663}
1664
1665static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
1666{
1667 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1668
1669 safexcel_aead_cra_init(tfm);
1670 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1671 ctx->state_sz = SHA1_DIGEST_SIZE;
1672 return 0;
1673}
1674
1675struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
1676 .type = SAFEXCEL_ALG_TYPE_AEAD,
1677 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
1678 .alg.aead = {
1679 .setkey = safexcel_aead_setkey,
1680 .encrypt = safexcel_aead_encrypt,
1681 .decrypt = safexcel_aead_decrypt,
1682 .ivsize = AES_BLOCK_SIZE,
1683 .maxauthsize = SHA1_DIGEST_SIZE,
1684 .base = {
1685 .cra_name = "authenc(hmac(sha1),cbc(aes))",
1686 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
1687 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1688 .cra_flags = CRYPTO_ALG_ASYNC |
1689 CRYPTO_ALG_ALLOCATES_MEMORY |
1690 CRYPTO_ALG_KERN_DRIVER_ONLY,
1691 .cra_blocksize = AES_BLOCK_SIZE,
1692 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1693 .cra_alignmask = 0,
1694 .cra_init = safexcel_aead_sha1_cra_init,
1695 .cra_exit = safexcel_aead_cra_exit,
1696 .cra_module = THIS_MODULE,
1697 },
1698 },
1699};
1700
1701static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
1702{
1703 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1704
1705 safexcel_aead_cra_init(tfm);
1706 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1707 ctx->state_sz = SHA256_DIGEST_SIZE;
1708 return 0;
1709}
1710
1711struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
1712 .type = SAFEXCEL_ALG_TYPE_AEAD,
1713 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1714 .alg.aead = {
1715 .setkey = safexcel_aead_setkey,
1716 .encrypt = safexcel_aead_encrypt,
1717 .decrypt = safexcel_aead_decrypt,
1718 .ivsize = AES_BLOCK_SIZE,
1719 .maxauthsize = SHA256_DIGEST_SIZE,
1720 .base = {
1721 .cra_name = "authenc(hmac(sha256),cbc(aes))",
1722 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
1723 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1724 .cra_flags = CRYPTO_ALG_ASYNC |
1725 CRYPTO_ALG_ALLOCATES_MEMORY |
1726 CRYPTO_ALG_KERN_DRIVER_ONLY,
1727 .cra_blocksize = AES_BLOCK_SIZE,
1728 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1729 .cra_alignmask = 0,
1730 .cra_init = safexcel_aead_sha256_cra_init,
1731 .cra_exit = safexcel_aead_cra_exit,
1732 .cra_module = THIS_MODULE,
1733 },
1734 },
1735};
1736
1737static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
1738{
1739 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1740
1741 safexcel_aead_cra_init(tfm);
1742 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1743 ctx->state_sz = SHA256_DIGEST_SIZE;
1744 return 0;
1745}
1746
1747struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
1748 .type = SAFEXCEL_ALG_TYPE_AEAD,
1749 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1750 .alg.aead = {
1751 .setkey = safexcel_aead_setkey,
1752 .encrypt = safexcel_aead_encrypt,
1753 .decrypt = safexcel_aead_decrypt,
1754 .ivsize = AES_BLOCK_SIZE,
1755 .maxauthsize = SHA224_DIGEST_SIZE,
1756 .base = {
1757 .cra_name = "authenc(hmac(sha224),cbc(aes))",
1758 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
1759 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1760 .cra_flags = CRYPTO_ALG_ASYNC |
1761 CRYPTO_ALG_ALLOCATES_MEMORY |
1762 CRYPTO_ALG_KERN_DRIVER_ONLY,
1763 .cra_blocksize = AES_BLOCK_SIZE,
1764 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1765 .cra_alignmask = 0,
1766 .cra_init = safexcel_aead_sha224_cra_init,
1767 .cra_exit = safexcel_aead_cra_exit,
1768 .cra_module = THIS_MODULE,
1769 },
1770 },
1771};
1772
1773static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
1774{
1775 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1776
1777 safexcel_aead_cra_init(tfm);
1778 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1779 ctx->state_sz = SHA512_DIGEST_SIZE;
1780 return 0;
1781}
1782
1783struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
1784 .type = SAFEXCEL_ALG_TYPE_AEAD,
1785 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1786 .alg.aead = {
1787 .setkey = safexcel_aead_setkey,
1788 .encrypt = safexcel_aead_encrypt,
1789 .decrypt = safexcel_aead_decrypt,
1790 .ivsize = AES_BLOCK_SIZE,
1791 .maxauthsize = SHA512_DIGEST_SIZE,
1792 .base = {
1793 .cra_name = "authenc(hmac(sha512),cbc(aes))",
1794 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
1795 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1796 .cra_flags = CRYPTO_ALG_ASYNC |
1797 CRYPTO_ALG_ALLOCATES_MEMORY |
1798 CRYPTO_ALG_KERN_DRIVER_ONLY,
1799 .cra_blocksize = AES_BLOCK_SIZE,
1800 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1801 .cra_alignmask = 0,
1802 .cra_init = safexcel_aead_sha512_cra_init,
1803 .cra_exit = safexcel_aead_cra_exit,
1804 .cra_module = THIS_MODULE,
1805 },
1806 },
1807};
1808
1809static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
1810{
1811 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1812
1813 safexcel_aead_cra_init(tfm);
1814 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1815 ctx->state_sz = SHA512_DIGEST_SIZE;
1816 return 0;
1817}
1818
1819struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
1820 .type = SAFEXCEL_ALG_TYPE_AEAD,
1821 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1822 .alg.aead = {
1823 .setkey = safexcel_aead_setkey,
1824 .encrypt = safexcel_aead_encrypt,
1825 .decrypt = safexcel_aead_decrypt,
1826 .ivsize = AES_BLOCK_SIZE,
1827 .maxauthsize = SHA384_DIGEST_SIZE,
1828 .base = {
1829 .cra_name = "authenc(hmac(sha384),cbc(aes))",
1830 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
1831 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1832 .cra_flags = CRYPTO_ALG_ASYNC |
1833 CRYPTO_ALG_ALLOCATES_MEMORY |
1834 CRYPTO_ALG_KERN_DRIVER_ONLY,
1835 .cra_blocksize = AES_BLOCK_SIZE,
1836 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1837 .cra_alignmask = 0,
1838 .cra_init = safexcel_aead_sha384_cra_init,
1839 .cra_exit = safexcel_aead_cra_exit,
1840 .cra_module = THIS_MODULE,
1841 },
1842 },
1843};
1844
1845static int safexcel_aead_sha1_des3_cra_init(struct crypto_tfm *tfm)
1846{
1847 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1848
1849 safexcel_aead_sha1_cra_init(tfm);
1850 ctx->alg = SAFEXCEL_3DES; /* override default */
1851 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1852 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1853 return 0;
1854}
1855
1856struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede = {
1857 .type = SAFEXCEL_ALG_TYPE_AEAD,
1858 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
1859 .alg.aead = {
1860 .setkey = safexcel_aead_setkey,
1861 .encrypt = safexcel_aead_encrypt,
1862 .decrypt = safexcel_aead_decrypt,
1863 .ivsize = DES3_EDE_BLOCK_SIZE,
1864 .maxauthsize = SHA1_DIGEST_SIZE,
1865 .base = {
1866 .cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
1867 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des3_ede",
1868 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1869 .cra_flags = CRYPTO_ALG_ASYNC |
1870 CRYPTO_ALG_ALLOCATES_MEMORY |
1871 CRYPTO_ALG_KERN_DRIVER_ONLY,
1872 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1873 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1874 .cra_alignmask = 0,
1875 .cra_init = safexcel_aead_sha1_des3_cra_init,
1876 .cra_exit = safexcel_aead_cra_exit,
1877 .cra_module = THIS_MODULE,
1878 },
1879 },
1880};
1881
1882static int safexcel_aead_sha256_des3_cra_init(struct crypto_tfm *tfm)
1883{
1884 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1885
1886 safexcel_aead_sha256_cra_init(tfm);
1887 ctx->alg = SAFEXCEL_3DES; /* override default */
1888 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1889 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1890 return 0;
1891}
1892
1893struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des3_ede = {
1894 .type = SAFEXCEL_ALG_TYPE_AEAD,
1895 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1896 .alg.aead = {
1897 .setkey = safexcel_aead_setkey,
1898 .encrypt = safexcel_aead_encrypt,
1899 .decrypt = safexcel_aead_decrypt,
1900 .ivsize = DES3_EDE_BLOCK_SIZE,
1901 .maxauthsize = SHA256_DIGEST_SIZE,
1902 .base = {
1903 .cra_name = "authenc(hmac(sha256),cbc(des3_ede))",
1904 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des3_ede",
1905 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1906 .cra_flags = CRYPTO_ALG_ASYNC |
1907 CRYPTO_ALG_ALLOCATES_MEMORY |
1908 CRYPTO_ALG_KERN_DRIVER_ONLY,
1909 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1910 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1911 .cra_alignmask = 0,
1912 .cra_init = safexcel_aead_sha256_des3_cra_init,
1913 .cra_exit = safexcel_aead_cra_exit,
1914 .cra_module = THIS_MODULE,
1915 },
1916 },
1917};
1918
1919static int safexcel_aead_sha224_des3_cra_init(struct crypto_tfm *tfm)
1920{
1921 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1922
1923 safexcel_aead_sha224_cra_init(tfm);
1924 ctx->alg = SAFEXCEL_3DES; /* override default */
1925 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1926 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1927 return 0;
1928}
1929
1930struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des3_ede = {
1931 .type = SAFEXCEL_ALG_TYPE_AEAD,
1932 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1933 .alg.aead = {
1934 .setkey = safexcel_aead_setkey,
1935 .encrypt = safexcel_aead_encrypt,
1936 .decrypt = safexcel_aead_decrypt,
1937 .ivsize = DES3_EDE_BLOCK_SIZE,
1938 .maxauthsize = SHA224_DIGEST_SIZE,
1939 .base = {
1940 .cra_name = "authenc(hmac(sha224),cbc(des3_ede))",
1941 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des3_ede",
1942 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1943 .cra_flags = CRYPTO_ALG_ASYNC |
1944 CRYPTO_ALG_ALLOCATES_MEMORY |
1945 CRYPTO_ALG_KERN_DRIVER_ONLY,
1946 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1947 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1948 .cra_alignmask = 0,
1949 .cra_init = safexcel_aead_sha224_des3_cra_init,
1950 .cra_exit = safexcel_aead_cra_exit,
1951 .cra_module = THIS_MODULE,
1952 },
1953 },
1954};
1955
1956static int safexcel_aead_sha512_des3_cra_init(struct crypto_tfm *tfm)
1957{
1958 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1959
1960 safexcel_aead_sha512_cra_init(tfm);
1961 ctx->alg = SAFEXCEL_3DES; /* override default */
1962 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1963 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1964 return 0;
1965}
1966
1967struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des3_ede = {
1968 .type = SAFEXCEL_ALG_TYPE_AEAD,
1969 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
1970 .alg.aead = {
1971 .setkey = safexcel_aead_setkey,
1972 .encrypt = safexcel_aead_encrypt,
1973 .decrypt = safexcel_aead_decrypt,
1974 .ivsize = DES3_EDE_BLOCK_SIZE,
1975 .maxauthsize = SHA512_DIGEST_SIZE,
1976 .base = {
1977 .cra_name = "authenc(hmac(sha512),cbc(des3_ede))",
1978 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des3_ede",
1979 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1980 .cra_flags = CRYPTO_ALG_ASYNC |
1981 CRYPTO_ALG_ALLOCATES_MEMORY |
1982 CRYPTO_ALG_KERN_DRIVER_ONLY,
1983 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1984 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1985 .cra_alignmask = 0,
1986 .cra_init = safexcel_aead_sha512_des3_cra_init,
1987 .cra_exit = safexcel_aead_cra_exit,
1988 .cra_module = THIS_MODULE,
1989 },
1990 },
1991};
1992
1993static int safexcel_aead_sha384_des3_cra_init(struct crypto_tfm *tfm)
1994{
1995 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1996
1997 safexcel_aead_sha384_cra_init(tfm);
1998 ctx->alg = SAFEXCEL_3DES; /* override default */
1999 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2000 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2001 return 0;
2002}
2003
2004struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des3_ede = {
2005 .type = SAFEXCEL_ALG_TYPE_AEAD,
2006 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2007 .alg.aead = {
2008 .setkey = safexcel_aead_setkey,
2009 .encrypt = safexcel_aead_encrypt,
2010 .decrypt = safexcel_aead_decrypt,
2011 .ivsize = DES3_EDE_BLOCK_SIZE,
2012 .maxauthsize = SHA384_DIGEST_SIZE,
2013 .base = {
2014 .cra_name = "authenc(hmac(sha384),cbc(des3_ede))",
2015 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des3_ede",
2016 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2017 .cra_flags = CRYPTO_ALG_ASYNC |
2018 CRYPTO_ALG_ALLOCATES_MEMORY |
2019 CRYPTO_ALG_KERN_DRIVER_ONLY,
2020 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2021 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2022 .cra_alignmask = 0,
2023 .cra_init = safexcel_aead_sha384_des3_cra_init,
2024 .cra_exit = safexcel_aead_cra_exit,
2025 .cra_module = THIS_MODULE,
2026 },
2027 },
2028};
2029
2030static int safexcel_aead_sha1_des_cra_init(struct crypto_tfm *tfm)
2031{
2032 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2033
2034 safexcel_aead_sha1_cra_init(tfm);
2035 ctx->alg = SAFEXCEL_DES; /* override default */
2036 ctx->blocksz = DES_BLOCK_SIZE;
2037 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2038 return 0;
2039}
2040
2041struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des = {
2042 .type = SAFEXCEL_ALG_TYPE_AEAD,
2043 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
2044 .alg.aead = {
2045 .setkey = safexcel_aead_setkey,
2046 .encrypt = safexcel_aead_encrypt,
2047 .decrypt = safexcel_aead_decrypt,
2048 .ivsize = DES_BLOCK_SIZE,
2049 .maxauthsize = SHA1_DIGEST_SIZE,
2050 .base = {
2051 .cra_name = "authenc(hmac(sha1),cbc(des))",
2052 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des",
2053 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2054 .cra_flags = CRYPTO_ALG_ASYNC |
2055 CRYPTO_ALG_ALLOCATES_MEMORY |
2056 CRYPTO_ALG_KERN_DRIVER_ONLY,
2057 .cra_blocksize = DES_BLOCK_SIZE,
2058 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2059 .cra_alignmask = 0,
2060 .cra_init = safexcel_aead_sha1_des_cra_init,
2061 .cra_exit = safexcel_aead_cra_exit,
2062 .cra_module = THIS_MODULE,
2063 },
2064 },
2065};
2066
2067static int safexcel_aead_sha256_des_cra_init(struct crypto_tfm *tfm)
2068{
2069 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2070
2071 safexcel_aead_sha256_cra_init(tfm);
2072 ctx->alg = SAFEXCEL_DES; /* override default */
2073 ctx->blocksz = DES_BLOCK_SIZE;
2074 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2075 return 0;
2076}
2077
2078struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des = {
2079 .type = SAFEXCEL_ALG_TYPE_AEAD,
2080 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2081 .alg.aead = {
2082 .setkey = safexcel_aead_setkey,
2083 .encrypt = safexcel_aead_encrypt,
2084 .decrypt = safexcel_aead_decrypt,
2085 .ivsize = DES_BLOCK_SIZE,
2086 .maxauthsize = SHA256_DIGEST_SIZE,
2087 .base = {
2088 .cra_name = "authenc(hmac(sha256),cbc(des))",
2089 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des",
2090 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2091 .cra_flags = CRYPTO_ALG_ASYNC |
2092 CRYPTO_ALG_ALLOCATES_MEMORY |
2093 CRYPTO_ALG_KERN_DRIVER_ONLY,
2094 .cra_blocksize = DES_BLOCK_SIZE,
2095 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2096 .cra_alignmask = 0,
2097 .cra_init = safexcel_aead_sha256_des_cra_init,
2098 .cra_exit = safexcel_aead_cra_exit,
2099 .cra_module = THIS_MODULE,
2100 },
2101 },
2102};
2103
2104static int safexcel_aead_sha224_des_cra_init(struct crypto_tfm *tfm)
2105{
2106 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2107
2108 safexcel_aead_sha224_cra_init(tfm);
2109 ctx->alg = SAFEXCEL_DES; /* override default */
2110 ctx->blocksz = DES_BLOCK_SIZE;
2111 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2112 return 0;
2113}
2114
2115struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des = {
2116 .type = SAFEXCEL_ALG_TYPE_AEAD,
2117 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2118 .alg.aead = {
2119 .setkey = safexcel_aead_setkey,
2120 .encrypt = safexcel_aead_encrypt,
2121 .decrypt = safexcel_aead_decrypt,
2122 .ivsize = DES_BLOCK_SIZE,
2123 .maxauthsize = SHA224_DIGEST_SIZE,
2124 .base = {
2125 .cra_name = "authenc(hmac(sha224),cbc(des))",
2126 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des",
2127 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2128 .cra_flags = CRYPTO_ALG_ASYNC |
2129 CRYPTO_ALG_ALLOCATES_MEMORY |
2130 CRYPTO_ALG_KERN_DRIVER_ONLY,
2131 .cra_blocksize = DES_BLOCK_SIZE,
2132 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2133 .cra_alignmask = 0,
2134 .cra_init = safexcel_aead_sha224_des_cra_init,
2135 .cra_exit = safexcel_aead_cra_exit,
2136 .cra_module = THIS_MODULE,
2137 },
2138 },
2139};
2140
2141static int safexcel_aead_sha512_des_cra_init(struct crypto_tfm *tfm)
2142{
2143 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2144
2145 safexcel_aead_sha512_cra_init(tfm);
2146 ctx->alg = SAFEXCEL_DES; /* override default */
2147 ctx->blocksz = DES_BLOCK_SIZE;
2148 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2149 return 0;
2150}
2151
2152struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des = {
2153 .type = SAFEXCEL_ALG_TYPE_AEAD,
2154 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2155 .alg.aead = {
2156 .setkey = safexcel_aead_setkey,
2157 .encrypt = safexcel_aead_encrypt,
2158 .decrypt = safexcel_aead_decrypt,
2159 .ivsize = DES_BLOCK_SIZE,
2160 .maxauthsize = SHA512_DIGEST_SIZE,
2161 .base = {
2162 .cra_name = "authenc(hmac(sha512),cbc(des))",
2163 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des",
2164 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2165 .cra_flags = CRYPTO_ALG_ASYNC |
2166 CRYPTO_ALG_ALLOCATES_MEMORY |
2167 CRYPTO_ALG_KERN_DRIVER_ONLY,
2168 .cra_blocksize = DES_BLOCK_SIZE,
2169 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2170 .cra_alignmask = 0,
2171 .cra_init = safexcel_aead_sha512_des_cra_init,
2172 .cra_exit = safexcel_aead_cra_exit,
2173 .cra_module = THIS_MODULE,
2174 },
2175 },
2176};
2177
2178static int safexcel_aead_sha384_des_cra_init(struct crypto_tfm *tfm)
2179{
2180 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2181
2182 safexcel_aead_sha384_cra_init(tfm);
2183 ctx->alg = SAFEXCEL_DES; /* override default */
2184 ctx->blocksz = DES_BLOCK_SIZE;
2185 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2186 return 0;
2187}
2188
2189struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des = {
2190 .type = SAFEXCEL_ALG_TYPE_AEAD,
2191 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2192 .alg.aead = {
2193 .setkey = safexcel_aead_setkey,
2194 .encrypt = safexcel_aead_encrypt,
2195 .decrypt = safexcel_aead_decrypt,
2196 .ivsize = DES_BLOCK_SIZE,
2197 .maxauthsize = SHA384_DIGEST_SIZE,
2198 .base = {
2199 .cra_name = "authenc(hmac(sha384),cbc(des))",
2200 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des",
2201 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2202 .cra_flags = CRYPTO_ALG_ASYNC |
2203 CRYPTO_ALG_ALLOCATES_MEMORY |
2204 CRYPTO_ALG_KERN_DRIVER_ONLY,
2205 .cra_blocksize = DES_BLOCK_SIZE,
2206 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2207 .cra_alignmask = 0,
2208 .cra_init = safexcel_aead_sha384_des_cra_init,
2209 .cra_exit = safexcel_aead_cra_exit,
2210 .cra_module = THIS_MODULE,
2211 },
2212 },
2213};
2214
2215static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm *tfm)
2216{
2217 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2218
2219 safexcel_aead_sha1_cra_init(tfm);
2220 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2221 return 0;
2222}
2223
2224struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes = {
2225 .type = SAFEXCEL_ALG_TYPE_AEAD,
2226 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
2227 .alg.aead = {
2228 .setkey = safexcel_aead_setkey,
2229 .encrypt = safexcel_aead_encrypt,
2230 .decrypt = safexcel_aead_decrypt,
2231 .ivsize = CTR_RFC3686_IV_SIZE,
2232 .maxauthsize = SHA1_DIGEST_SIZE,
2233 .base = {
2234 .cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2235 .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-aes",
2236 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2237 .cra_flags = CRYPTO_ALG_ASYNC |
2238 CRYPTO_ALG_ALLOCATES_MEMORY |
2239 CRYPTO_ALG_KERN_DRIVER_ONLY,
2240 .cra_blocksize = 1,
2241 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2242 .cra_alignmask = 0,
2243 .cra_init = safexcel_aead_sha1_ctr_cra_init,
2244 .cra_exit = safexcel_aead_cra_exit,
2245 .cra_module = THIS_MODULE,
2246 },
2247 },
2248};
2249
2250static int safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm *tfm)
2251{
2252 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2253
2254 safexcel_aead_sha256_cra_init(tfm);
2255 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2256 return 0;
2257}
2258
2259struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes = {
2260 .type = SAFEXCEL_ALG_TYPE_AEAD,
2261 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2262 .alg.aead = {
2263 .setkey = safexcel_aead_setkey,
2264 .encrypt = safexcel_aead_encrypt,
2265 .decrypt = safexcel_aead_decrypt,
2266 .ivsize = CTR_RFC3686_IV_SIZE,
2267 .maxauthsize = SHA256_DIGEST_SIZE,
2268 .base = {
2269 .cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2270 .cra_driver_name = "safexcel-authenc-hmac-sha256-ctr-aes",
2271 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2272 .cra_flags = CRYPTO_ALG_ASYNC |
2273 CRYPTO_ALG_ALLOCATES_MEMORY |
2274 CRYPTO_ALG_KERN_DRIVER_ONLY,
2275 .cra_blocksize = 1,
2276 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2277 .cra_alignmask = 0,
2278 .cra_init = safexcel_aead_sha256_ctr_cra_init,
2279 .cra_exit = safexcel_aead_cra_exit,
2280 .cra_module = THIS_MODULE,
2281 },
2282 },
2283};
2284
2285static int safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm *tfm)
2286{
2287 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2288
2289 safexcel_aead_sha224_cra_init(tfm);
2290 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2291 return 0;
2292}
2293
2294struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes = {
2295 .type = SAFEXCEL_ALG_TYPE_AEAD,
2296 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2297 .alg.aead = {
2298 .setkey = safexcel_aead_setkey,
2299 .encrypt = safexcel_aead_encrypt,
2300 .decrypt = safexcel_aead_decrypt,
2301 .ivsize = CTR_RFC3686_IV_SIZE,
2302 .maxauthsize = SHA224_DIGEST_SIZE,
2303 .base = {
2304 .cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
2305 .cra_driver_name = "safexcel-authenc-hmac-sha224-ctr-aes",
2306 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2307 .cra_flags = CRYPTO_ALG_ASYNC |
2308 CRYPTO_ALG_ALLOCATES_MEMORY |
2309 CRYPTO_ALG_KERN_DRIVER_ONLY,
2310 .cra_blocksize = 1,
2311 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2312 .cra_alignmask = 0,
2313 .cra_init = safexcel_aead_sha224_ctr_cra_init,
2314 .cra_exit = safexcel_aead_cra_exit,
2315 .cra_module = THIS_MODULE,
2316 },
2317 },
2318};
2319
2320static int safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm *tfm)
2321{
2322 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2323
2324 safexcel_aead_sha512_cra_init(tfm);
2325 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2326 return 0;
2327}
2328
2329struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes = {
2330 .type = SAFEXCEL_ALG_TYPE_AEAD,
2331 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2332 .alg.aead = {
2333 .setkey = safexcel_aead_setkey,
2334 .encrypt = safexcel_aead_encrypt,
2335 .decrypt = safexcel_aead_decrypt,
2336 .ivsize = CTR_RFC3686_IV_SIZE,
2337 .maxauthsize = SHA512_DIGEST_SIZE,
2338 .base = {
2339 .cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2340 .cra_driver_name = "safexcel-authenc-hmac-sha512-ctr-aes",
2341 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2342 .cra_flags = CRYPTO_ALG_ASYNC |
2343 CRYPTO_ALG_ALLOCATES_MEMORY |
2344 CRYPTO_ALG_KERN_DRIVER_ONLY,
2345 .cra_blocksize = 1,
2346 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2347 .cra_alignmask = 0,
2348 .cra_init = safexcel_aead_sha512_ctr_cra_init,
2349 .cra_exit = safexcel_aead_cra_exit,
2350 .cra_module = THIS_MODULE,
2351 },
2352 },
2353};
2354
2355static int safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm *tfm)
2356{
2357 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2358
2359 safexcel_aead_sha384_cra_init(tfm);
2360 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2361 return 0;
2362}
2363
2364struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes = {
2365 .type = SAFEXCEL_ALG_TYPE_AEAD,
2366 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2367 .alg.aead = {
2368 .setkey = safexcel_aead_setkey,
2369 .encrypt = safexcel_aead_encrypt,
2370 .decrypt = safexcel_aead_decrypt,
2371 .ivsize = CTR_RFC3686_IV_SIZE,
2372 .maxauthsize = SHA384_DIGEST_SIZE,
2373 .base = {
2374 .cra_name = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2375 .cra_driver_name = "safexcel-authenc-hmac-sha384-ctr-aes",
2376 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2377 .cra_flags = CRYPTO_ALG_ASYNC |
2378 CRYPTO_ALG_ALLOCATES_MEMORY |
2379 CRYPTO_ALG_KERN_DRIVER_ONLY,
2380 .cra_blocksize = 1,
2381 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2382 .cra_alignmask = 0,
2383 .cra_init = safexcel_aead_sha384_ctr_cra_init,
2384 .cra_exit = safexcel_aead_cra_exit,
2385 .cra_module = THIS_MODULE,
2386 },
2387 },
2388};
2389
2390static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
2391 const u8 *key, unsigned int len)
2392{
2393 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
2394 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2395 struct safexcel_crypto_priv *priv = ctx->base.priv;
2396 struct crypto_aes_ctx aes;
2397 int ret, i;
2398 unsigned int keylen;
2399
2400 /* Check for illegal XTS keys */
2401 ret = xts_verify_key(ctfm, key, len);
2402 if (ret)
2403 return ret;
2404
2405 /* Only half of the key data is cipher key */
2406 keylen = (len >> 1);
2407 ret = aes_expandkey(&aes, key, keylen);
2408 if (ret)
2409 return ret;
2410
2411 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2412 for (i = 0; i < keylen / sizeof(u32); i++) {
2413 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2414 ctx->base.needs_inv = true;
2415 break;
2416 }
2417 }
2418 }
2419
2420 for (i = 0; i < keylen / sizeof(u32); i++)
2421 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2422
2423 /* The other half is the tweak key */
2424 ret = aes_expandkey(&aes, (u8 *)(key + keylen), keylen);
2425 if (ret)
2426 return ret;
2427
2428 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2429 for (i = 0; i < keylen / sizeof(u32); i++) {
2430 if (le32_to_cpu(ctx->key[i + keylen / sizeof(u32)]) !=
2431 aes.key_enc[i]) {
2432 ctx->base.needs_inv = true;
2433 break;
2434 }
2435 }
2436 }
2437
2438 for (i = 0; i < keylen / sizeof(u32); i++)
2439 ctx->key[i + keylen / sizeof(u32)] =
2440 cpu_to_le32(aes.key_enc[i]);
2441
2442 ctx->key_len = keylen << 1;
2443
2444 memzero_explicit(&aes, sizeof(aes));
2445 return 0;
2446}
2447
2448static int safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm *tfm)
2449{
2450 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2451
2452 safexcel_skcipher_cra_init(tfm);
2453 ctx->alg = SAFEXCEL_AES;
2454 ctx->blocksz = AES_BLOCK_SIZE;
2455 ctx->xts = 1;
2456 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
2457 return 0;
2458}
2459
2460static int safexcel_encrypt_xts(struct skcipher_request *req)
2461{
2462 if (req->cryptlen < XTS_BLOCK_SIZE)
2463 return -EINVAL;
2464 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2465 SAFEXCEL_ENCRYPT);
2466}
2467
2468static int safexcel_decrypt_xts(struct skcipher_request *req)
2469{
2470 if (req->cryptlen < XTS_BLOCK_SIZE)
2471 return -EINVAL;
2472 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2473 SAFEXCEL_DECRYPT);
2474}
2475
2476struct safexcel_alg_template safexcel_alg_xts_aes = {
2477 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2478 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XTS,
2479 .alg.skcipher = {
2480 .setkey = safexcel_skcipher_aesxts_setkey,
2481 .encrypt = safexcel_encrypt_xts,
2482 .decrypt = safexcel_decrypt_xts,
2483 /* XTS actually uses 2 AES keys glued together */
2484 .min_keysize = AES_MIN_KEY_SIZE * 2,
2485 .max_keysize = AES_MAX_KEY_SIZE * 2,
2486 .ivsize = XTS_BLOCK_SIZE,
2487 .base = {
2488 .cra_name = "xts(aes)",
2489 .cra_driver_name = "safexcel-xts-aes",
2490 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2491 .cra_flags = CRYPTO_ALG_ASYNC |
2492 CRYPTO_ALG_ALLOCATES_MEMORY |
2493 CRYPTO_ALG_KERN_DRIVER_ONLY,
2494 .cra_blocksize = XTS_BLOCK_SIZE,
2495 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2496 .cra_alignmask = 0,
2497 .cra_init = safexcel_skcipher_aes_xts_cra_init,
2498 .cra_exit = safexcel_skcipher_cra_exit,
2499 .cra_module = THIS_MODULE,
2500 },
2501 },
2502};
2503
2504static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
2505 unsigned int len)
2506{
2507 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2508 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2509 struct safexcel_crypto_priv *priv = ctx->base.priv;
2510 struct crypto_aes_ctx aes;
2511 u32 hashkey[AES_BLOCK_SIZE >> 2];
2512 int ret, i;
2513
2514 ret = aes_expandkey(&aes, key, len);
2515 if (ret) {
2516 memzero_explicit(&aes, sizeof(aes));
2517 return ret;
2518 }
2519
2520 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2521 for (i = 0; i < len / sizeof(u32); i++) {
2522 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2523 ctx->base.needs_inv = true;
2524 break;
2525 }
2526 }
2527 }
2528
2529 for (i = 0; i < len / sizeof(u32); i++)
2530 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2531
2532 ctx->key_len = len;
2533
2534 /* Compute hash key by encrypting zeroes with cipher key */
2535 memset(hashkey, 0, AES_BLOCK_SIZE);
2536 aes_encrypt(&aes, (u8 *)hashkey, (u8 *)hashkey);
2537
2538 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2539 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
2540 if (be32_to_cpu(ctx->base.ipad.be[i]) != hashkey[i]) {
2541 ctx->base.needs_inv = true;
2542 break;
2543 }
2544 }
2545 }
2546
2547 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
2548 ctx->base.ipad.be[i] = cpu_to_be32(hashkey[i]);
2549
2550 memzero_explicit(hashkey, AES_BLOCK_SIZE);
2551 memzero_explicit(&aes, sizeof(aes));
2552 return 0;
2553}
2554
2555static int safexcel_aead_gcm_cra_init(struct crypto_tfm *tfm)
2556{
2557 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2558
2559 safexcel_aead_cra_init(tfm);
2560 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_GHASH;
2561 ctx->state_sz = GHASH_BLOCK_SIZE;
2562 ctx->xcm = EIP197_XCM_MODE_GCM;
2563 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2564
2565 return 0;
2566}
2567
2568static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm)
2569{
2570 safexcel_aead_cra_exit(tfm);
2571}
2572
2573static int safexcel_aead_gcm_setauthsize(struct crypto_aead *tfm,
2574 unsigned int authsize)
2575{
2576 return crypto_gcm_check_authsize(authsize);
2577}
2578
2579struct safexcel_alg_template safexcel_alg_gcm = {
2580 .type = SAFEXCEL_ALG_TYPE_AEAD,
2581 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
2582 .alg.aead = {
2583 .setkey = safexcel_aead_gcm_setkey,
2584 .setauthsize = safexcel_aead_gcm_setauthsize,
2585 .encrypt = safexcel_aead_encrypt,
2586 .decrypt = safexcel_aead_decrypt,
2587 .ivsize = GCM_AES_IV_SIZE,
2588 .maxauthsize = GHASH_DIGEST_SIZE,
2589 .base = {
2590 .cra_name = "gcm(aes)",
2591 .cra_driver_name = "safexcel-gcm-aes",
2592 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2593 .cra_flags = CRYPTO_ALG_ASYNC |
2594 CRYPTO_ALG_ALLOCATES_MEMORY |
2595 CRYPTO_ALG_KERN_DRIVER_ONLY,
2596 .cra_blocksize = 1,
2597 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2598 .cra_alignmask = 0,
2599 .cra_init = safexcel_aead_gcm_cra_init,
2600 .cra_exit = safexcel_aead_gcm_cra_exit,
2601 .cra_module = THIS_MODULE,
2602 },
2603 },
2604};
2605
2606static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
2607 unsigned int len)
2608{
2609 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2610 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2611 struct safexcel_crypto_priv *priv = ctx->base.priv;
2612 struct crypto_aes_ctx aes;
2613 int ret, i;
2614
2615 ret = aes_expandkey(&aes, key, len);
2616 if (ret) {
2617 memzero_explicit(&aes, sizeof(aes));
2618 return ret;
2619 }
2620
2621 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2622 for (i = 0; i < len / sizeof(u32); i++) {
2623 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2624 ctx->base.needs_inv = true;
2625 break;
2626 }
2627 }
2628 }
2629
2630 for (i = 0; i < len / sizeof(u32); i++) {
2631 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2632 ctx->base.ipad.be[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
2633 cpu_to_be32(aes.key_enc[i]);
2634 }
2635
2636 ctx->key_len = len;
2637 ctx->state_sz = 2 * AES_BLOCK_SIZE + len;
2638
2639 if (len == AES_KEYSIZE_192)
2640 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2641 else if (len == AES_KEYSIZE_256)
2642 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2643 else
2644 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2645
2646 memzero_explicit(&aes, sizeof(aes));
2647 return 0;
2648}
2649
2650static int safexcel_aead_ccm_cra_init(struct crypto_tfm *tfm)
2651{
2652 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2653
2654 safexcel_aead_cra_init(tfm);
2655 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2656 ctx->state_sz = 3 * AES_BLOCK_SIZE;
2657 ctx->xcm = EIP197_XCM_MODE_CCM;
2658 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2659 ctx->ctrinit = 0;
2660 return 0;
2661}
2662
2663static int safexcel_aead_ccm_setauthsize(struct crypto_aead *tfm,
2664 unsigned int authsize)
2665{
2666 /* Borrowed from crypto/ccm.c */
2667 switch (authsize) {
2668 case 4:
2669 case 6:
2670 case 8:
2671 case 10:
2672 case 12:
2673 case 14:
2674 case 16:
2675 break;
2676 default:
2677 return -EINVAL;
2678 }
2679
2680 return 0;
2681}
2682
2683static int safexcel_ccm_encrypt(struct aead_request *req)
2684{
2685 struct safexcel_cipher_req *creq = aead_request_ctx(req);
2686
2687 if (req->iv[0] < 1 || req->iv[0] > 7)
2688 return -EINVAL;
2689
2690 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
2691}
2692
2693static int safexcel_ccm_decrypt(struct aead_request *req)
2694{
2695 struct safexcel_cipher_req *creq = aead_request_ctx(req);
2696
2697 if (req->iv[0] < 1 || req->iv[0] > 7)
2698 return -EINVAL;
2699
2700 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
2701}
2702
2703struct safexcel_alg_template safexcel_alg_ccm = {
2704 .type = SAFEXCEL_ALG_TYPE_AEAD,
2705 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
2706 .alg.aead = {
2707 .setkey = safexcel_aead_ccm_setkey,
2708 .setauthsize = safexcel_aead_ccm_setauthsize,
2709 .encrypt = safexcel_ccm_encrypt,
2710 .decrypt = safexcel_ccm_decrypt,
2711 .ivsize = AES_BLOCK_SIZE,
2712 .maxauthsize = AES_BLOCK_SIZE,
2713 .base = {
2714 .cra_name = "ccm(aes)",
2715 .cra_driver_name = "safexcel-ccm-aes",
2716 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2717 .cra_flags = CRYPTO_ALG_ASYNC |
2718 CRYPTO_ALG_ALLOCATES_MEMORY |
2719 CRYPTO_ALG_KERN_DRIVER_ONLY,
2720 .cra_blocksize = 1,
2721 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2722 .cra_alignmask = 0,
2723 .cra_init = safexcel_aead_ccm_cra_init,
2724 .cra_exit = safexcel_aead_cra_exit,
2725 .cra_module = THIS_MODULE,
2726 },
2727 },
2728};
2729
2730static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx,
2731 const u8 *key)
2732{
2733 struct safexcel_crypto_priv *priv = ctx->base.priv;
2734
2735 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
2736 if (memcmp(ctx->key, key, CHACHA_KEY_SIZE))
2737 ctx->base.needs_inv = true;
2738
2739 memcpy(ctx->key, key, CHACHA_KEY_SIZE);
2740 ctx->key_len = CHACHA_KEY_SIZE;
2741}
2742
2743static int safexcel_skcipher_chacha20_setkey(struct crypto_skcipher *ctfm,
2744 const u8 *key, unsigned int len)
2745{
2746 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
2747
2748 if (len != CHACHA_KEY_SIZE)
2749 return -EINVAL;
2750
2751 safexcel_chacha20_setkey(ctx, key);
2752
2753 return 0;
2754}
2755
2756static int safexcel_skcipher_chacha20_cra_init(struct crypto_tfm *tfm)
2757{
2758 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2759
2760 safexcel_skcipher_cra_init(tfm);
2761 ctx->alg = SAFEXCEL_CHACHA20;
2762 ctx->ctrinit = 0;
2763 ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32;
2764 return 0;
2765}
2766
2767struct safexcel_alg_template safexcel_alg_chacha20 = {
2768 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2769 .algo_mask = SAFEXCEL_ALG_CHACHA20,
2770 .alg.skcipher = {
2771 .setkey = safexcel_skcipher_chacha20_setkey,
2772 .encrypt = safexcel_encrypt,
2773 .decrypt = safexcel_decrypt,
2774 .min_keysize = CHACHA_KEY_SIZE,
2775 .max_keysize = CHACHA_KEY_SIZE,
2776 .ivsize = CHACHA_IV_SIZE,
2777 .base = {
2778 .cra_name = "chacha20",
2779 .cra_driver_name = "safexcel-chacha20",
2780 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2781 .cra_flags = CRYPTO_ALG_ASYNC |
2782 CRYPTO_ALG_ALLOCATES_MEMORY |
2783 CRYPTO_ALG_KERN_DRIVER_ONLY,
2784 .cra_blocksize = 1,
2785 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2786 .cra_alignmask = 0,
2787 .cra_init = safexcel_skcipher_chacha20_cra_init,
2788 .cra_exit = safexcel_skcipher_cra_exit,
2789 .cra_module = THIS_MODULE,
2790 },
2791 },
2792};
2793
2794static int safexcel_aead_chachapoly_setkey(struct crypto_aead *ctfm,
2795 const u8 *key, unsigned int len)
2796{
2797 struct safexcel_cipher_ctx *ctx = crypto_aead_ctx(ctfm);
2798
2799 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP &&
2800 len > EIP197_AEAD_IPSEC_NONCE_SIZE) {
2801 /* ESP variant has nonce appended to key */
2802 len -= EIP197_AEAD_IPSEC_NONCE_SIZE;
2803 ctx->nonce = *(u32 *)(key + len);
2804 }
2805 if (len != CHACHA_KEY_SIZE)
2806 return -EINVAL;
2807
2808 safexcel_chacha20_setkey(ctx, key);
2809
2810 return 0;
2811}
2812
2813static int safexcel_aead_chachapoly_setauthsize(struct crypto_aead *tfm,
2814 unsigned int authsize)
2815{
2816 if (authsize != POLY1305_DIGEST_SIZE)
2817 return -EINVAL;
2818 return 0;
2819}
2820
2821static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
2822 enum safexcel_cipher_direction dir)
2823{
2824 struct safexcel_cipher_req *creq = aead_request_ctx(req);
2825 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2826 struct crypto_tfm *tfm = crypto_aead_tfm(aead);
2827 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2828 struct aead_request *subreq = aead_request_ctx(req);
2829 u32 key[CHACHA_KEY_SIZE / sizeof(u32) + 1];
2830 int ret = 0;
2831
2832 /*
2833 * Instead of wasting time detecting umpteen silly corner cases,
2834 * just dump all "small" requests to the fallback implementation.
2835 * HW would not be faster on such small requests anyway.
2836 */
2837 if (likely((ctx->aead != EIP197_AEAD_TYPE_IPSEC_ESP ||
2838 req->assoclen >= EIP197_AEAD_IPSEC_IV_SIZE) &&
2839 req->cryptlen > POLY1305_DIGEST_SIZE)) {
2840 return safexcel_queue_req(&req->base, creq, dir);
2841 }
2842
2843 /* HW cannot do full (AAD+payload) zero length, use fallback */
2844 memcpy(key, ctx->key, CHACHA_KEY_SIZE);
2845 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
2846 /* ESP variant has nonce appended to the key */
2847 key[CHACHA_KEY_SIZE / sizeof(u32)] = ctx->nonce;
2848 ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2849 CHACHA_KEY_SIZE +
2850 EIP197_AEAD_IPSEC_NONCE_SIZE);
2851 } else {
2852 ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2853 CHACHA_KEY_SIZE);
2854 }
2855 if (ret) {
2856 crypto_aead_clear_flags(aead, CRYPTO_TFM_REQ_MASK);
2857 crypto_aead_set_flags(aead, crypto_aead_get_flags(ctx->fback) &
2858 CRYPTO_TFM_REQ_MASK);
2859 return ret;
2860 }
2861
2862 aead_request_set_tfm(subreq, ctx->fback);
2863 aead_request_set_callback(subreq, req->base.flags, req->base.complete,
2864 req->base.data);
2865 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
2866 req->iv);
2867 aead_request_set_ad(subreq, req->assoclen);
2868
2869 return (dir == SAFEXCEL_ENCRYPT) ?
2870 crypto_aead_encrypt(subreq) :
2871 crypto_aead_decrypt(subreq);
2872}
2873
2874static int safexcel_aead_chachapoly_encrypt(struct aead_request *req)
2875{
2876 return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_ENCRYPT);
2877}
2878
2879static int safexcel_aead_chachapoly_decrypt(struct aead_request *req)
2880{
2881 return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_DECRYPT);
2882}
2883
2884static int safexcel_aead_fallback_cra_init(struct crypto_tfm *tfm)
2885{
2886 struct crypto_aead *aead = __crypto_aead_cast(tfm);
2887 struct aead_alg *alg = crypto_aead_alg(aead);
2888 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2889
2890 safexcel_aead_cra_init(tfm);
2891
2892 /* Allocate fallback implementation */
2893 ctx->fback = crypto_alloc_aead(alg->base.cra_name, 0,
2894 CRYPTO_ALG_ASYNC |
2895 CRYPTO_ALG_NEED_FALLBACK);
2896 if (IS_ERR(ctx->fback))
2897 return PTR_ERR(ctx->fback);
2898
2899 crypto_aead_set_reqsize(aead, max(sizeof(struct safexcel_cipher_req),
2900 sizeof(struct aead_request) +
2901 crypto_aead_reqsize(ctx->fback)));
2902
2903 return 0;
2904}
2905
2906static int safexcel_aead_chachapoly_cra_init(struct crypto_tfm *tfm)
2907{
2908 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2909
2910 safexcel_aead_fallback_cra_init(tfm);
2911 ctx->alg = SAFEXCEL_CHACHA20;
2912 ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32 |
2913 CONTEXT_CONTROL_CHACHA20_MODE_CALC_OTK;
2914 ctx->ctrinit = 0;
2915 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_POLY1305;
2916 ctx->state_sz = 0; /* Precomputed by HW */
2917 return 0;
2918}
2919
2920static void safexcel_aead_fallback_cra_exit(struct crypto_tfm *tfm)
2921{
2922 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2923
2924 crypto_free_aead(ctx->fback);
2925 safexcel_aead_cra_exit(tfm);
2926}
2927
2928struct safexcel_alg_template safexcel_alg_chachapoly = {
2929 .type = SAFEXCEL_ALG_TYPE_AEAD,
2930 .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
2931 .alg.aead = {
2932 .setkey = safexcel_aead_chachapoly_setkey,
2933 .setauthsize = safexcel_aead_chachapoly_setauthsize,
2934 .encrypt = safexcel_aead_chachapoly_encrypt,
2935 .decrypt = safexcel_aead_chachapoly_decrypt,
2936 .ivsize = CHACHAPOLY_IV_SIZE,
2937 .maxauthsize = POLY1305_DIGEST_SIZE,
2938 .base = {
2939 .cra_name = "rfc7539(chacha20,poly1305)",
2940 .cra_driver_name = "safexcel-chacha20-poly1305",
2941 /* +1 to put it above HW chacha + SW poly */
2942 .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
2943 .cra_flags = CRYPTO_ALG_ASYNC |
2944 CRYPTO_ALG_ALLOCATES_MEMORY |
2945 CRYPTO_ALG_KERN_DRIVER_ONLY |
2946 CRYPTO_ALG_NEED_FALLBACK,
2947 .cra_blocksize = 1,
2948 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2949 .cra_alignmask = 0,
2950 .cra_init = safexcel_aead_chachapoly_cra_init,
2951 .cra_exit = safexcel_aead_fallback_cra_exit,
2952 .cra_module = THIS_MODULE,
2953 },
2954 },
2955};
2956
2957static int safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm *tfm)
2958{
2959 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2960 int ret;
2961
2962 ret = safexcel_aead_chachapoly_cra_init(tfm);
2963 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
2964 ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
2965 return ret;
2966}
2967
2968struct safexcel_alg_template safexcel_alg_chachapoly_esp = {
2969 .type = SAFEXCEL_ALG_TYPE_AEAD,
2970 .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
2971 .alg.aead = {
2972 .setkey = safexcel_aead_chachapoly_setkey,
2973 .setauthsize = safexcel_aead_chachapoly_setauthsize,
2974 .encrypt = safexcel_aead_chachapoly_encrypt,
2975 .decrypt = safexcel_aead_chachapoly_decrypt,
2976 .ivsize = CHACHAPOLY_IV_SIZE - EIP197_AEAD_IPSEC_NONCE_SIZE,
2977 .maxauthsize = POLY1305_DIGEST_SIZE,
2978 .base = {
2979 .cra_name = "rfc7539esp(chacha20,poly1305)",
2980 .cra_driver_name = "safexcel-chacha20-poly1305-esp",
2981 /* +1 to put it above HW chacha + SW poly */
2982 .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
2983 .cra_flags = CRYPTO_ALG_ASYNC |
2984 CRYPTO_ALG_ALLOCATES_MEMORY |
2985 CRYPTO_ALG_KERN_DRIVER_ONLY |
2986 CRYPTO_ALG_NEED_FALLBACK,
2987 .cra_blocksize = 1,
2988 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2989 .cra_alignmask = 0,
2990 .cra_init = safexcel_aead_chachapolyesp_cra_init,
2991 .cra_exit = safexcel_aead_fallback_cra_exit,
2992 .cra_module = THIS_MODULE,
2993 },
2994 },
2995};
2996
2997static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher *ctfm,
2998 const u8 *key, unsigned int len)
2999{
3000 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3001 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3002 struct safexcel_crypto_priv *priv = ctx->base.priv;
3003
3004 if (len != SM4_KEY_SIZE)
3005 return -EINVAL;
3006
3007 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
3008 if (memcmp(ctx->key, key, SM4_KEY_SIZE))
3009 ctx->base.needs_inv = true;
3010
3011 memcpy(ctx->key, key, SM4_KEY_SIZE);
3012 ctx->key_len = SM4_KEY_SIZE;
3013
3014 return 0;
3015}
3016
3017static int safexcel_sm4_blk_encrypt(struct skcipher_request *req)
3018{
3019 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3020 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3021 return -EINVAL;
3022 else
3023 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3024 SAFEXCEL_ENCRYPT);
3025}
3026
3027static int safexcel_sm4_blk_decrypt(struct skcipher_request *req)
3028{
3029 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3030 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3031 return -EINVAL;
3032 else
3033 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3034 SAFEXCEL_DECRYPT);
3035}
3036
3037static int safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm *tfm)
3038{
3039 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3040
3041 safexcel_skcipher_cra_init(tfm);
3042 ctx->alg = SAFEXCEL_SM4;
3043 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
3044 ctx->blocksz = 0;
3045 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
3046 return 0;
3047}
3048
3049struct safexcel_alg_template safexcel_alg_ecb_sm4 = {
3050 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3051 .algo_mask = SAFEXCEL_ALG_SM4,
3052 .alg.skcipher = {
3053 .setkey = safexcel_skcipher_sm4_setkey,
3054 .encrypt = safexcel_sm4_blk_encrypt,
3055 .decrypt = safexcel_sm4_blk_decrypt,
3056 .min_keysize = SM4_KEY_SIZE,
3057 .max_keysize = SM4_KEY_SIZE,
3058 .base = {
3059 .cra_name = "ecb(sm4)",
3060 .cra_driver_name = "safexcel-ecb-sm4",
3061 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3062 .cra_flags = CRYPTO_ALG_ASYNC |
3063 CRYPTO_ALG_ALLOCATES_MEMORY |
3064 CRYPTO_ALG_KERN_DRIVER_ONLY,
3065 .cra_blocksize = SM4_BLOCK_SIZE,
3066 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3067 .cra_alignmask = 0,
3068 .cra_init = safexcel_skcipher_sm4_ecb_cra_init,
3069 .cra_exit = safexcel_skcipher_cra_exit,
3070 .cra_module = THIS_MODULE,
3071 },
3072 },
3073};
3074
3075static int safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm *tfm)
3076{
3077 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3078
3079 safexcel_skcipher_cra_init(tfm);
3080 ctx->alg = SAFEXCEL_SM4;
3081 ctx->blocksz = SM4_BLOCK_SIZE;
3082 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
3083 return 0;
3084}
3085
3086struct safexcel_alg_template safexcel_alg_cbc_sm4 = {
3087 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3088 .algo_mask = SAFEXCEL_ALG_SM4,
3089 .alg.skcipher = {
3090 .setkey = safexcel_skcipher_sm4_setkey,
3091 .encrypt = safexcel_sm4_blk_encrypt,
3092 .decrypt = safexcel_sm4_blk_decrypt,
3093 .min_keysize = SM4_KEY_SIZE,
3094 .max_keysize = SM4_KEY_SIZE,
3095 .ivsize = SM4_BLOCK_SIZE,
3096 .base = {
3097 .cra_name = "cbc(sm4)",
3098 .cra_driver_name = "safexcel-cbc-sm4",
3099 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3100 .cra_flags = CRYPTO_ALG_ASYNC |
3101 CRYPTO_ALG_ALLOCATES_MEMORY |
3102 CRYPTO_ALG_KERN_DRIVER_ONLY,
3103 .cra_blocksize = SM4_BLOCK_SIZE,
3104 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3105 .cra_alignmask = 0,
3106 .cra_init = safexcel_skcipher_sm4_cbc_cra_init,
3107 .cra_exit = safexcel_skcipher_cra_exit,
3108 .cra_module = THIS_MODULE,
3109 },
3110 },
3111};
3112
3113static int safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher *ctfm,
3114 const u8 *key, unsigned int len)
3115{
3116 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3117 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3118
3119 /* last 4 bytes of key are the nonce! */
3120 ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3121 /* exclude the nonce here */
3122 len -= CTR_RFC3686_NONCE_SIZE;
3123
3124 return safexcel_skcipher_sm4_setkey(ctfm, key, len);
3125}
3126
3127static int safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm *tfm)
3128{
3129 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3130
3131 safexcel_skcipher_cra_init(tfm);
3132 ctx->alg = SAFEXCEL_SM4;
3133 ctx->blocksz = SM4_BLOCK_SIZE;
3134 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3135 return 0;
3136}
3137
3138struct safexcel_alg_template safexcel_alg_ctr_sm4 = {
3139 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3140 .algo_mask = SAFEXCEL_ALG_SM4,
3141 .alg.skcipher = {
3142 .setkey = safexcel_skcipher_sm4ctr_setkey,
3143 .encrypt = safexcel_encrypt,
3144 .decrypt = safexcel_decrypt,
3145 /* Add nonce size */
3146 .min_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3147 .max_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3148 .ivsize = CTR_RFC3686_IV_SIZE,
3149 .base = {
3150 .cra_name = "rfc3686(ctr(sm4))",
3151 .cra_driver_name = "safexcel-ctr-sm4",
3152 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3153 .cra_flags = CRYPTO_ALG_ASYNC |
3154 CRYPTO_ALG_ALLOCATES_MEMORY |
3155 CRYPTO_ALG_KERN_DRIVER_ONLY,
3156 .cra_blocksize = 1,
3157 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3158 .cra_alignmask = 0,
3159 .cra_init = safexcel_skcipher_sm4_ctr_cra_init,
3160 .cra_exit = safexcel_skcipher_cra_exit,
3161 .cra_module = THIS_MODULE,
3162 },
3163 },
3164};
3165
3166static int safexcel_aead_sm4_blk_encrypt(struct aead_request *req)
3167{
3168 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3169 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3170 return -EINVAL;
3171
3172 return safexcel_queue_req(&req->base, aead_request_ctx(req),
3173 SAFEXCEL_ENCRYPT);
3174}
3175
3176static int safexcel_aead_sm4_blk_decrypt(struct aead_request *req)
3177{
3178 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3179
3180 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3181 if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3182 return -EINVAL;
3183
3184 return safexcel_queue_req(&req->base, aead_request_ctx(req),
3185 SAFEXCEL_DECRYPT);
3186}
3187
3188static int safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm *tfm)
3189{
3190 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3191
3192 safexcel_aead_cra_init(tfm);
3193 ctx->alg = SAFEXCEL_SM4;
3194 ctx->blocksz = SM4_BLOCK_SIZE;
3195 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
3196 ctx->state_sz = SHA1_DIGEST_SIZE;
3197 return 0;
3198}
3199
3200struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_sm4 = {
3201 .type = SAFEXCEL_ALG_TYPE_AEAD,
3202 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3203 .alg.aead = {
3204 .setkey = safexcel_aead_setkey,
3205 .encrypt = safexcel_aead_sm4_blk_encrypt,
3206 .decrypt = safexcel_aead_sm4_blk_decrypt,
3207 .ivsize = SM4_BLOCK_SIZE,
3208 .maxauthsize = SHA1_DIGEST_SIZE,
3209 .base = {
3210 .cra_name = "authenc(hmac(sha1),cbc(sm4))",
3211 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-sm4",
3212 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3213 .cra_flags = CRYPTO_ALG_ASYNC |
3214 CRYPTO_ALG_ALLOCATES_MEMORY |
3215 CRYPTO_ALG_KERN_DRIVER_ONLY,
3216 .cra_blocksize = SM4_BLOCK_SIZE,
3217 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3218 .cra_alignmask = 0,
3219 .cra_init = safexcel_aead_sm4cbc_sha1_cra_init,
3220 .cra_exit = safexcel_aead_cra_exit,
3221 .cra_module = THIS_MODULE,
3222 },
3223 },
3224};
3225
3226static int safexcel_aead_fallback_setkey(struct crypto_aead *ctfm,
3227 const u8 *key, unsigned int len)
3228{
3229 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3230 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3231
3232 /* Keep fallback cipher synchronized */
3233 return crypto_aead_setkey(ctx->fback, (u8 *)key, len) ?:
3234 safexcel_aead_setkey(ctfm, key, len);
3235}
3236
3237static int safexcel_aead_fallback_setauthsize(struct crypto_aead *ctfm,
3238 unsigned int authsize)
3239{
3240 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3241 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3242
3243 /* Keep fallback cipher synchronized */
3244 return crypto_aead_setauthsize(ctx->fback, authsize);
3245}
3246
3247static int safexcel_aead_fallback_crypt(struct aead_request *req,
3248 enum safexcel_cipher_direction dir)
3249{
3250 struct crypto_aead *aead = crypto_aead_reqtfm(req);
3251 struct crypto_tfm *tfm = crypto_aead_tfm(aead);
3252 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3253 struct aead_request *subreq = aead_request_ctx(req);
3254
3255 aead_request_set_tfm(subreq, ctx->fback);
3256 aead_request_set_callback(subreq, req->base.flags, req->base.complete,
3257 req->base.data);
3258 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
3259 req->iv);
3260 aead_request_set_ad(subreq, req->assoclen);
3261
3262 return (dir == SAFEXCEL_ENCRYPT) ?
3263 crypto_aead_encrypt(subreq) :
3264 crypto_aead_decrypt(subreq);
3265}
3266
3267static int safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request *req)
3268{
3269 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3270
3271 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3272 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3273 return -EINVAL;
3274 else if (req->cryptlen || req->assoclen) /* If input length > 0 only */
3275 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3276
3277 /* HW cannot do full (AAD+payload) zero length, use fallback */
3278 return safexcel_aead_fallback_crypt(req, SAFEXCEL_ENCRYPT);
3279}
3280
3281static int safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request *req)
3282{
3283 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3284 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3285
3286 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3287 if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3288 return -EINVAL;
3289 else if (req->cryptlen > crypto_aead_authsize(tfm) || req->assoclen)
3290 /* If input length > 0 only */
3291 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3292
3293 /* HW cannot do full (AAD+payload) zero length, use fallback */
3294 return safexcel_aead_fallback_crypt(req, SAFEXCEL_DECRYPT);
3295}
3296
3297static int safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm *tfm)
3298{
3299 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3300
3301 safexcel_aead_fallback_cra_init(tfm);
3302 ctx->alg = SAFEXCEL_SM4;
3303 ctx->blocksz = SM4_BLOCK_SIZE;
3304 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
3305 ctx->state_sz = SM3_DIGEST_SIZE;
3306 return 0;
3307}
3308
3309struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4 = {
3310 .type = SAFEXCEL_ALG_TYPE_AEAD,
3311 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3312 .alg.aead = {
3313 .setkey = safexcel_aead_fallback_setkey,
3314 .setauthsize = safexcel_aead_fallback_setauthsize,
3315 .encrypt = safexcel_aead_sm4cbc_sm3_encrypt,
3316 .decrypt = safexcel_aead_sm4cbc_sm3_decrypt,
3317 .ivsize = SM4_BLOCK_SIZE,
3318 .maxauthsize = SM3_DIGEST_SIZE,
3319 .base = {
3320 .cra_name = "authenc(hmac(sm3),cbc(sm4))",
3321 .cra_driver_name = "safexcel-authenc-hmac-sm3-cbc-sm4",
3322 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3323 .cra_flags = CRYPTO_ALG_ASYNC |
3324 CRYPTO_ALG_ALLOCATES_MEMORY |
3325 CRYPTO_ALG_KERN_DRIVER_ONLY |
3326 CRYPTO_ALG_NEED_FALLBACK,
3327 .cra_blocksize = SM4_BLOCK_SIZE,
3328 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3329 .cra_alignmask = 0,
3330 .cra_init = safexcel_aead_sm4cbc_sm3_cra_init,
3331 .cra_exit = safexcel_aead_fallback_cra_exit,
3332 .cra_module = THIS_MODULE,
3333 },
3334 },
3335};
3336
3337static int safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm *tfm)
3338{
3339 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3340
3341 safexcel_aead_sm4cbc_sha1_cra_init(tfm);
3342 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3343 return 0;
3344}
3345
3346struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4 = {
3347 .type = SAFEXCEL_ALG_TYPE_AEAD,
3348 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3349 .alg.aead = {
3350 .setkey = safexcel_aead_setkey,
3351 .encrypt = safexcel_aead_encrypt,
3352 .decrypt = safexcel_aead_decrypt,
3353 .ivsize = CTR_RFC3686_IV_SIZE,
3354 .maxauthsize = SHA1_DIGEST_SIZE,
3355 .base = {
3356 .cra_name = "authenc(hmac(sha1),rfc3686(ctr(sm4)))",
3357 .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-sm4",
3358 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3359 .cra_flags = CRYPTO_ALG_ASYNC |
3360 CRYPTO_ALG_ALLOCATES_MEMORY |
3361 CRYPTO_ALG_KERN_DRIVER_ONLY,
3362 .cra_blocksize = 1,
3363 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3364 .cra_alignmask = 0,
3365 .cra_init = safexcel_aead_sm4ctr_sha1_cra_init,
3366 .cra_exit = safexcel_aead_cra_exit,
3367 .cra_module = THIS_MODULE,
3368 },
3369 },
3370};
3371
3372static int safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm *tfm)
3373{
3374 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3375
3376 safexcel_aead_sm4cbc_sm3_cra_init(tfm);
3377 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3378 return 0;
3379}
3380
3381struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4 = {
3382 .type = SAFEXCEL_ALG_TYPE_AEAD,
3383 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3384 .alg.aead = {
3385 .setkey = safexcel_aead_setkey,
3386 .encrypt = safexcel_aead_encrypt,
3387 .decrypt = safexcel_aead_decrypt,
3388 .ivsize = CTR_RFC3686_IV_SIZE,
3389 .maxauthsize = SM3_DIGEST_SIZE,
3390 .base = {
3391 .cra_name = "authenc(hmac(sm3),rfc3686(ctr(sm4)))",
3392 .cra_driver_name = "safexcel-authenc-hmac-sm3-ctr-sm4",
3393 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3394 .cra_flags = CRYPTO_ALG_ASYNC |
3395 CRYPTO_ALG_ALLOCATES_MEMORY |
3396 CRYPTO_ALG_KERN_DRIVER_ONLY,
3397 .cra_blocksize = 1,
3398 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3399 .cra_alignmask = 0,
3400 .cra_init = safexcel_aead_sm4ctr_sm3_cra_init,
3401 .cra_exit = safexcel_aead_cra_exit,
3402 .cra_module = THIS_MODULE,
3403 },
3404 },
3405};
3406
3407static int safexcel_rfc4106_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
3408 unsigned int len)
3409{
3410 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3411 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3412
3413 /* last 4 bytes of key are the nonce! */
3414 ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3415
3416 len -= CTR_RFC3686_NONCE_SIZE;
3417 return safexcel_aead_gcm_setkey(ctfm, key, len);
3418}
3419
3420static int safexcel_rfc4106_gcm_setauthsize(struct crypto_aead *tfm,
3421 unsigned int authsize)
3422{
3423 return crypto_rfc4106_check_authsize(authsize);
3424}
3425
3426static int safexcel_rfc4106_encrypt(struct aead_request *req)
3427{
3428 return crypto_ipsec_check_assoclen(req->assoclen) ?:
3429 safexcel_aead_encrypt(req);
3430}
3431
3432static int safexcel_rfc4106_decrypt(struct aead_request *req)
3433{
3434 return crypto_ipsec_check_assoclen(req->assoclen) ?:
3435 safexcel_aead_decrypt(req);
3436}
3437
3438static int safexcel_rfc4106_gcm_cra_init(struct crypto_tfm *tfm)
3439{
3440 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3441 int ret;
3442
3443 ret = safexcel_aead_gcm_cra_init(tfm);
3444 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
3445 ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3446 return ret;
3447}
3448
3449struct safexcel_alg_template safexcel_alg_rfc4106_gcm = {
3450 .type = SAFEXCEL_ALG_TYPE_AEAD,
3451 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3452 .alg.aead = {
3453 .setkey = safexcel_rfc4106_gcm_setkey,
3454 .setauthsize = safexcel_rfc4106_gcm_setauthsize,
3455 .encrypt = safexcel_rfc4106_encrypt,
3456 .decrypt = safexcel_rfc4106_decrypt,
3457 .ivsize = GCM_RFC4106_IV_SIZE,
3458 .maxauthsize = GHASH_DIGEST_SIZE,
3459 .base = {
3460 .cra_name = "rfc4106(gcm(aes))",
3461 .cra_driver_name = "safexcel-rfc4106-gcm-aes",
3462 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3463 .cra_flags = CRYPTO_ALG_ASYNC |
3464 CRYPTO_ALG_ALLOCATES_MEMORY |
3465 CRYPTO_ALG_KERN_DRIVER_ONLY,
3466 .cra_blocksize = 1,
3467 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3468 .cra_alignmask = 0,
3469 .cra_init = safexcel_rfc4106_gcm_cra_init,
3470 .cra_exit = safexcel_aead_gcm_cra_exit,
3471 },
3472 },
3473};
3474
3475static int safexcel_rfc4543_gcm_setauthsize(struct crypto_aead *tfm,
3476 unsigned int authsize)
3477{
3478 if (authsize != GHASH_DIGEST_SIZE)
3479 return -EINVAL;
3480
3481 return 0;
3482}
3483
3484static int safexcel_rfc4543_gcm_cra_init(struct crypto_tfm *tfm)
3485{
3486 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3487 int ret;
3488
3489 ret = safexcel_aead_gcm_cra_init(tfm);
3490 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP_GMAC;
3491 return ret;
3492}
3493
3494struct safexcel_alg_template safexcel_alg_rfc4543_gcm = {
3495 .type = SAFEXCEL_ALG_TYPE_AEAD,
3496 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3497 .alg.aead = {
3498 .setkey = safexcel_rfc4106_gcm_setkey,
3499 .setauthsize = safexcel_rfc4543_gcm_setauthsize,
3500 .encrypt = safexcel_rfc4106_encrypt,
3501 .decrypt = safexcel_rfc4106_decrypt,
3502 .ivsize = GCM_RFC4543_IV_SIZE,
3503 .maxauthsize = GHASH_DIGEST_SIZE,
3504 .base = {
3505 .cra_name = "rfc4543(gcm(aes))",
3506 .cra_driver_name = "safexcel-rfc4543-gcm-aes",
3507 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3508 .cra_flags = CRYPTO_ALG_ASYNC |
3509 CRYPTO_ALG_ALLOCATES_MEMORY |
3510 CRYPTO_ALG_KERN_DRIVER_ONLY,
3511 .cra_blocksize = 1,
3512 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3513 .cra_alignmask = 0,
3514 .cra_init = safexcel_rfc4543_gcm_cra_init,
3515 .cra_exit = safexcel_aead_gcm_cra_exit,
3516 },
3517 },
3518};
3519
3520static int safexcel_rfc4309_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
3521 unsigned int len)
3522{
3523 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3524 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3525
3526 /* First byte of the nonce = L = always 3 for RFC4309 (4 byte ctr) */
3527 *(u8 *)&ctx->nonce = EIP197_AEAD_IPSEC_COUNTER_SIZE - 1;
3528 /* last 3 bytes of key are the nonce! */
3529 memcpy((u8 *)&ctx->nonce + 1, key + len -
3530 EIP197_AEAD_IPSEC_CCM_NONCE_SIZE,
3531 EIP197_AEAD_IPSEC_CCM_NONCE_SIZE);
3532
3533 len -= EIP197_AEAD_IPSEC_CCM_NONCE_SIZE;
3534 return safexcel_aead_ccm_setkey(ctfm, key, len);
3535}
3536
3537static int safexcel_rfc4309_ccm_setauthsize(struct crypto_aead *tfm,
3538 unsigned int authsize)
3539{
3540 /* Borrowed from crypto/ccm.c */
3541 switch (authsize) {
3542 case 8:
3543 case 12:
3544 case 16:
3545 break;
3546 default:
3547 return -EINVAL;
3548 }
3549
3550 return 0;
3551}
3552
3553static int safexcel_rfc4309_ccm_encrypt(struct aead_request *req)
3554{
3555 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3556
3557 /* Borrowed from crypto/ccm.c */
3558 if (req->assoclen != 16 && req->assoclen != 20)
3559 return -EINVAL;
3560
3561 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3562}
3563
3564static int safexcel_rfc4309_ccm_decrypt(struct aead_request *req)
3565{
3566 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3567
3568 /* Borrowed from crypto/ccm.c */
3569 if (req->assoclen != 16 && req->assoclen != 20)
3570 return -EINVAL;
3571
3572 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3573}
3574
3575static int safexcel_rfc4309_ccm_cra_init(struct crypto_tfm *tfm)
3576{
3577 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3578 int ret;
3579
3580 ret = safexcel_aead_ccm_cra_init(tfm);
3581 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
3582 ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3583 return ret;
3584}
3585
3586struct safexcel_alg_template safexcel_alg_rfc4309_ccm = {
3587 .type = SAFEXCEL_ALG_TYPE_AEAD,
3588 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
3589 .alg.aead = {
3590 .setkey = safexcel_rfc4309_ccm_setkey,
3591 .setauthsize = safexcel_rfc4309_ccm_setauthsize,
3592 .encrypt = safexcel_rfc4309_ccm_encrypt,
3593 .decrypt = safexcel_rfc4309_ccm_decrypt,
3594 .ivsize = EIP197_AEAD_IPSEC_IV_SIZE,
3595 .maxauthsize = AES_BLOCK_SIZE,
3596 .base = {
3597 .cra_name = "rfc4309(ccm(aes))",
3598 .cra_driver_name = "safexcel-rfc4309-ccm-aes",
3599 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3600 .cra_flags = CRYPTO_ALG_ASYNC |
3601 CRYPTO_ALG_ALLOCATES_MEMORY |
3602 CRYPTO_ALG_KERN_DRIVER_ONLY,
3603 .cra_blocksize = 1,
3604 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3605 .cra_alignmask = 0,
3606 .cra_init = safexcel_rfc4309_ccm_cra_init,
3607 .cra_exit = safexcel_aead_cra_exit,
3608 .cra_module = THIS_MODULE,
3609 },
3610 },
3611};
1// SPDX-License-Identifier: GPL-2.0
2/*
3 * Copyright (C) 2017 Marvell
4 *
5 * Antoine Tenart <antoine.tenart@free-electrons.com>
6 */
7
8#include <asm/unaligned.h>
9#include <linux/device.h>
10#include <linux/dma-mapping.h>
11#include <linux/dmapool.h>
12#include <crypto/aead.h>
13#include <crypto/aes.h>
14#include <crypto/authenc.h>
15#include <crypto/chacha.h>
16#include <crypto/ctr.h>
17#include <crypto/internal/des.h>
18#include <crypto/gcm.h>
19#include <crypto/ghash.h>
20#include <crypto/poly1305.h>
21#include <crypto/sha.h>
22#include <crypto/sm3.h>
23#include <crypto/sm4.h>
24#include <crypto/xts.h>
25#include <crypto/skcipher.h>
26#include <crypto/internal/aead.h>
27#include <crypto/internal/skcipher.h>
28
29#include "safexcel.h"
30
31enum safexcel_cipher_direction {
32 SAFEXCEL_ENCRYPT,
33 SAFEXCEL_DECRYPT,
34};
35
36enum safexcel_cipher_alg {
37 SAFEXCEL_DES,
38 SAFEXCEL_3DES,
39 SAFEXCEL_AES,
40 SAFEXCEL_CHACHA20,
41 SAFEXCEL_SM4,
42};
43
44struct safexcel_cipher_ctx {
45 struct safexcel_context base;
46 struct safexcel_crypto_priv *priv;
47
48 u32 mode;
49 enum safexcel_cipher_alg alg;
50 u8 aead; /* !=0=AEAD, 2=IPSec ESP AEAD, 3=IPsec ESP GMAC */
51 u8 xcm; /* 0=authenc, 1=GCM, 2 reserved for CCM */
52 u8 aadskip;
53 u8 blocksz;
54 u32 ivmask;
55 u32 ctrinit;
56
57 __le32 key[16];
58 u32 nonce;
59 unsigned int key_len, xts;
60
61 /* All the below is AEAD specific */
62 u32 hash_alg;
63 u32 state_sz;
64 __be32 ipad[SHA512_DIGEST_SIZE / sizeof(u32)];
65 __be32 opad[SHA512_DIGEST_SIZE / sizeof(u32)];
66
67 struct crypto_cipher *hkaes;
68 struct crypto_aead *fback;
69};
70
71struct safexcel_cipher_req {
72 enum safexcel_cipher_direction direction;
73 /* Number of result descriptors associated to the request */
74 unsigned int rdescs;
75 bool needs_inv;
76 int nr_src, nr_dst;
77};
78
79static int safexcel_skcipher_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
80 struct safexcel_command_desc *cdesc)
81{
82 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
83 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
84 /* 32 bit nonce */
85 cdesc->control_data.token[0] = ctx->nonce;
86 /* 64 bit IV part */
87 memcpy(&cdesc->control_data.token[1], iv, 8);
88 /* 32 bit counter, start at 0 or 1 (big endian!) */
89 cdesc->control_data.token[3] =
90 (__force u32)cpu_to_be32(ctx->ctrinit);
91 return 4;
92 }
93 if (ctx->alg == SAFEXCEL_CHACHA20) {
94 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
95 /* 96 bit nonce part */
96 memcpy(&cdesc->control_data.token[0], &iv[4], 12);
97 /* 32 bit counter */
98 cdesc->control_data.token[3] = *(u32 *)iv;
99 return 4;
100 }
101
102 cdesc->control_data.options |= ctx->ivmask;
103 memcpy(cdesc->control_data.token, iv, ctx->blocksz);
104 return ctx->blocksz / sizeof(u32);
105}
106
107static void safexcel_skcipher_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
108 struct safexcel_command_desc *cdesc,
109 struct safexcel_token *atoken,
110 u32 length)
111{
112 struct safexcel_token *token;
113 int ivlen;
114
115 ivlen = safexcel_skcipher_iv(ctx, iv, cdesc);
116 if (ivlen == 4) {
117 /* No space in cdesc, instruction moves to atoken */
118 cdesc->additional_cdata_size = 1;
119 token = atoken;
120 } else {
121 /* Everything fits in cdesc */
122 token = (struct safexcel_token *)(cdesc->control_data.token + 2);
123 /* Need to pad with NOP */
124 eip197_noop_token(&token[1]);
125 }
126
127 token->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
128 token->packet_length = length;
129 token->stat = EIP197_TOKEN_STAT_LAST_PACKET |
130 EIP197_TOKEN_STAT_LAST_HASH;
131 token->instructions = EIP197_TOKEN_INS_LAST |
132 EIP197_TOKEN_INS_TYPE_CRYPTO |
133 EIP197_TOKEN_INS_TYPE_OUTPUT;
134}
135
136static void safexcel_aead_iv(struct safexcel_cipher_ctx *ctx, u8 *iv,
137 struct safexcel_command_desc *cdesc)
138{
139 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD ||
140 ctx->aead & EIP197_AEAD_TYPE_IPSEC_ESP) { /* _ESP and _ESP_GMAC */
141 /* 32 bit nonce */
142 cdesc->control_data.token[0] = ctx->nonce;
143 /* 64 bit IV part */
144 memcpy(&cdesc->control_data.token[1], iv, 8);
145 /* 32 bit counter, start at 0 or 1 (big endian!) */
146 cdesc->control_data.token[3] =
147 (__force u32)cpu_to_be32(ctx->ctrinit);
148 return;
149 }
150 if (ctx->xcm == EIP197_XCM_MODE_GCM || ctx->alg == SAFEXCEL_CHACHA20) {
151 /* 96 bit IV part */
152 memcpy(&cdesc->control_data.token[0], iv, 12);
153 /* 32 bit counter, start at 0 or 1 (big endian!) */
154 cdesc->control_data.token[3] =
155 (__force u32)cpu_to_be32(ctx->ctrinit);
156 return;
157 }
158 /* CBC */
159 memcpy(cdesc->control_data.token, iv, ctx->blocksz);
160}
161
162static void safexcel_aead_token(struct safexcel_cipher_ctx *ctx, u8 *iv,
163 struct safexcel_command_desc *cdesc,
164 struct safexcel_token *atoken,
165 enum safexcel_cipher_direction direction,
166 u32 cryptlen, u32 assoclen, u32 digestsize)
167{
168 struct safexcel_token *aadref;
169 int atoksize = 2; /* Start with minimum size */
170 int assocadj = assoclen - ctx->aadskip, aadalign;
171
172 /* Always 4 dwords of embedded IV for AEAD modes */
173 cdesc->control_data.options |= EIP197_OPTION_4_TOKEN_IV_CMD;
174
175 if (direction == SAFEXCEL_DECRYPT)
176 cryptlen -= digestsize;
177
178 if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM)) {
179 /* Construct IV block B0 for the CBC-MAC */
180 u8 *final_iv = (u8 *)cdesc->control_data.token;
181 u8 *cbcmaciv = (u8 *)&atoken[1];
182 __le32 *aadlen = (__le32 *)&atoken[5];
183
184 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
185 /* Length + nonce */
186 cdesc->control_data.token[0] = ctx->nonce;
187 /* Fixup flags byte */
188 *(__le32 *)cbcmaciv =
189 cpu_to_le32(ctx->nonce |
190 ((assocadj > 0) << 6) |
191 ((digestsize - 2) << 2));
192 /* 64 bit IV part */
193 memcpy(&cdesc->control_data.token[1], iv, 8);
194 memcpy(cbcmaciv + 4, iv, 8);
195 /* Start counter at 0 */
196 cdesc->control_data.token[3] = 0;
197 /* Message length */
198 *(__be32 *)(cbcmaciv + 12) = cpu_to_be32(cryptlen);
199 } else {
200 /* Variable length IV part */
201 memcpy(final_iv, iv, 15 - iv[0]);
202 memcpy(cbcmaciv, iv, 15 - iv[0]);
203 /* Start variable length counter at 0 */
204 memset(final_iv + 15 - iv[0], 0, iv[0] + 1);
205 memset(cbcmaciv + 15 - iv[0], 0, iv[0] - 1);
206 /* fixup flags byte */
207 cbcmaciv[0] |= ((assocadj > 0) << 6) |
208 ((digestsize - 2) << 2);
209 /* insert lower 2 bytes of message length */
210 cbcmaciv[14] = cryptlen >> 8;
211 cbcmaciv[15] = cryptlen & 255;
212 }
213
214 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
215 atoken->packet_length = AES_BLOCK_SIZE +
216 ((assocadj > 0) << 1);
217 atoken->stat = 0;
218 atoken->instructions = EIP197_TOKEN_INS_ORIGIN_TOKEN |
219 EIP197_TOKEN_INS_TYPE_HASH;
220
221 if (likely(assocadj)) {
222 *aadlen = cpu_to_le32((assocadj >> 8) |
223 (assocadj & 255) << 8);
224 atoken += 6;
225 atoksize += 7;
226 } else {
227 atoken += 5;
228 atoksize += 6;
229 }
230
231 /* Process AAD data */
232 aadref = atoken;
233 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
234 atoken->packet_length = assocadj;
235 atoken->stat = 0;
236 atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
237 atoken++;
238
239 /* For CCM only, align AAD data towards hash engine */
240 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
241 aadalign = (assocadj + 2) & 15;
242 atoken->packet_length = assocadj && aadalign ?
243 16 - aadalign :
244 0;
245 if (likely(cryptlen)) {
246 atoken->stat = 0;
247 atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
248 } else {
249 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
250 atoken->instructions = EIP197_TOKEN_INS_LAST |
251 EIP197_TOKEN_INS_TYPE_HASH;
252 }
253 } else {
254 safexcel_aead_iv(ctx, iv, cdesc);
255
256 /* Process AAD data */
257 aadref = atoken;
258 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
259 atoken->packet_length = assocadj;
260 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
261 atoken->instructions = EIP197_TOKEN_INS_LAST |
262 EIP197_TOKEN_INS_TYPE_HASH;
263 }
264 atoken++;
265
266 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
267 /* For ESP mode (and not GMAC), skip over the IV */
268 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
269 atoken->packet_length = EIP197_AEAD_IPSEC_IV_SIZE;
270 atoken->stat = 0;
271 atoken->instructions = 0;
272 atoken++;
273 atoksize++;
274 } else if (unlikely(ctx->alg == SAFEXCEL_CHACHA20 &&
275 direction == SAFEXCEL_DECRYPT)) {
276 /* Poly-chacha decryption needs a dummy NOP here ... */
277 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
278 atoken->packet_length = 16; /* According to Op Manual */
279 atoken->stat = 0;
280 atoken->instructions = 0;
281 atoken++;
282 atoksize++;
283 }
284
285 if (ctx->xcm) {
286 /* For GCM and CCM, obtain enc(Y0) */
287 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT_REMRES;
288 atoken->packet_length = 0;
289 atoken->stat = 0;
290 atoken->instructions = AES_BLOCK_SIZE;
291 atoken++;
292
293 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
294 atoken->packet_length = AES_BLOCK_SIZE;
295 atoken->stat = 0;
296 atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
297 EIP197_TOKEN_INS_TYPE_CRYPTO;
298 atoken++;
299 atoksize += 2;
300 }
301
302 if (likely(cryptlen || ctx->alg == SAFEXCEL_CHACHA20)) {
303 /* Fixup stat field for AAD direction instruction */
304 aadref->stat = 0;
305
306 /* Process crypto data */
307 atoken->opcode = EIP197_TOKEN_OPCODE_DIRECTION;
308 atoken->packet_length = cryptlen;
309
310 if (unlikely(ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC)) {
311 /* Fixup instruction field for AAD dir instruction */
312 aadref->instructions = EIP197_TOKEN_INS_TYPE_HASH;
313
314 /* Do not send to crypt engine in case of GMAC */
315 atoken->instructions = EIP197_TOKEN_INS_LAST |
316 EIP197_TOKEN_INS_TYPE_HASH |
317 EIP197_TOKEN_INS_TYPE_OUTPUT;
318 } else {
319 atoken->instructions = EIP197_TOKEN_INS_LAST |
320 EIP197_TOKEN_INS_TYPE_CRYPTO |
321 EIP197_TOKEN_INS_TYPE_HASH |
322 EIP197_TOKEN_INS_TYPE_OUTPUT;
323 }
324
325 cryptlen &= 15;
326 if (unlikely(ctx->xcm == EIP197_XCM_MODE_CCM && cryptlen)) {
327 atoken->stat = 0;
328 /* For CCM only, pad crypto data to the hash engine */
329 atoken++;
330 atoksize++;
331 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
332 atoken->packet_length = 16 - cryptlen;
333 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
334 atoken->instructions = EIP197_TOKEN_INS_TYPE_HASH;
335 } else {
336 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH;
337 }
338 atoken++;
339 atoksize++;
340 }
341
342 if (direction == SAFEXCEL_ENCRYPT) {
343 /* Append ICV */
344 atoken->opcode = EIP197_TOKEN_OPCODE_INSERT;
345 atoken->packet_length = digestsize;
346 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
347 EIP197_TOKEN_STAT_LAST_PACKET;
348 atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
349 EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
350 } else {
351 /* Extract ICV */
352 atoken->opcode = EIP197_TOKEN_OPCODE_RETRIEVE;
353 atoken->packet_length = digestsize;
354 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
355 EIP197_TOKEN_STAT_LAST_PACKET;
356 atoken->instructions = EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
357 atoken++;
358 atoksize++;
359
360 /* Verify ICV */
361 atoken->opcode = EIP197_TOKEN_OPCODE_VERIFY;
362 atoken->packet_length = digestsize |
363 EIP197_TOKEN_HASH_RESULT_VERIFY;
364 atoken->stat = EIP197_TOKEN_STAT_LAST_HASH |
365 EIP197_TOKEN_STAT_LAST_PACKET;
366 atoken->instructions = EIP197_TOKEN_INS_TYPE_OUTPUT;
367 }
368
369 /* Fixup length of the token in the command descriptor */
370 cdesc->additional_cdata_size = atoksize;
371}
372
373static int safexcel_skcipher_aes_setkey(struct crypto_skcipher *ctfm,
374 const u8 *key, unsigned int len)
375{
376 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
377 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
378 struct safexcel_crypto_priv *priv = ctx->priv;
379 struct crypto_aes_ctx aes;
380 int ret, i;
381
382 ret = aes_expandkey(&aes, key, len);
383 if (ret)
384 return ret;
385
386 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
387 for (i = 0; i < len / sizeof(u32); i++) {
388 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
389 ctx->base.needs_inv = true;
390 break;
391 }
392 }
393 }
394
395 for (i = 0; i < len / sizeof(u32); i++)
396 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
397
398 ctx->key_len = len;
399
400 memzero_explicit(&aes, sizeof(aes));
401 return 0;
402}
403
404static int safexcel_aead_setkey(struct crypto_aead *ctfm, const u8 *key,
405 unsigned int len)
406{
407 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
408 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
409 struct safexcel_ahash_export_state istate, ostate;
410 struct safexcel_crypto_priv *priv = ctx->priv;
411 struct crypto_authenc_keys keys;
412 struct crypto_aes_ctx aes;
413 int err = -EINVAL, i;
414
415 if (unlikely(crypto_authenc_extractkeys(&keys, key, len)))
416 goto badkey;
417
418 if (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD) {
419 /* Must have at least space for the nonce here */
420 if (unlikely(keys.enckeylen < CTR_RFC3686_NONCE_SIZE))
421 goto badkey;
422 /* last 4 bytes of key are the nonce! */
423 ctx->nonce = *(u32 *)(keys.enckey + keys.enckeylen -
424 CTR_RFC3686_NONCE_SIZE);
425 /* exclude the nonce here */
426 keys.enckeylen -= CTR_RFC3686_NONCE_SIZE;
427 }
428
429 /* Encryption key */
430 switch (ctx->alg) {
431 case SAFEXCEL_DES:
432 err = verify_aead_des_key(ctfm, keys.enckey, keys.enckeylen);
433 if (unlikely(err))
434 goto badkey;
435 break;
436 case SAFEXCEL_3DES:
437 err = verify_aead_des3_key(ctfm, keys.enckey, keys.enckeylen);
438 if (unlikely(err))
439 goto badkey;
440 break;
441 case SAFEXCEL_AES:
442 err = aes_expandkey(&aes, keys.enckey, keys.enckeylen);
443 if (unlikely(err))
444 goto badkey;
445 break;
446 case SAFEXCEL_SM4:
447 if (unlikely(keys.enckeylen != SM4_KEY_SIZE))
448 goto badkey;
449 break;
450 default:
451 dev_err(priv->dev, "aead: unsupported cipher algorithm\n");
452 goto badkey;
453 }
454
455 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
456 for (i = 0; i < keys.enckeylen / sizeof(u32); i++) {
457 if (le32_to_cpu(ctx->key[i]) !=
458 ((u32 *)keys.enckey)[i]) {
459 ctx->base.needs_inv = true;
460 break;
461 }
462 }
463 }
464
465 /* Auth key */
466 switch (ctx->hash_alg) {
467 case CONTEXT_CONTROL_CRYPTO_ALG_SHA1:
468 if (safexcel_hmac_setkey("safexcel-sha1", keys.authkey,
469 keys.authkeylen, &istate, &ostate))
470 goto badkey;
471 break;
472 case CONTEXT_CONTROL_CRYPTO_ALG_SHA224:
473 if (safexcel_hmac_setkey("safexcel-sha224", keys.authkey,
474 keys.authkeylen, &istate, &ostate))
475 goto badkey;
476 break;
477 case CONTEXT_CONTROL_CRYPTO_ALG_SHA256:
478 if (safexcel_hmac_setkey("safexcel-sha256", keys.authkey,
479 keys.authkeylen, &istate, &ostate))
480 goto badkey;
481 break;
482 case CONTEXT_CONTROL_CRYPTO_ALG_SHA384:
483 if (safexcel_hmac_setkey("safexcel-sha384", keys.authkey,
484 keys.authkeylen, &istate, &ostate))
485 goto badkey;
486 break;
487 case CONTEXT_CONTROL_CRYPTO_ALG_SHA512:
488 if (safexcel_hmac_setkey("safexcel-sha512", keys.authkey,
489 keys.authkeylen, &istate, &ostate))
490 goto badkey;
491 break;
492 case CONTEXT_CONTROL_CRYPTO_ALG_SM3:
493 if (safexcel_hmac_setkey("safexcel-sm3", keys.authkey,
494 keys.authkeylen, &istate, &ostate))
495 goto badkey;
496 break;
497 default:
498 dev_err(priv->dev, "aead: unsupported hash algorithm\n");
499 goto badkey;
500 }
501
502 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma &&
503 (memcmp(ctx->ipad, istate.state, ctx->state_sz) ||
504 memcmp(ctx->opad, ostate.state, ctx->state_sz)))
505 ctx->base.needs_inv = true;
506
507 /* Now copy the keys into the context */
508 for (i = 0; i < keys.enckeylen / sizeof(u32); i++)
509 ctx->key[i] = cpu_to_le32(((u32 *)keys.enckey)[i]);
510 ctx->key_len = keys.enckeylen;
511
512 memcpy(ctx->ipad, &istate.state, ctx->state_sz);
513 memcpy(ctx->opad, &ostate.state, ctx->state_sz);
514
515 memzero_explicit(&keys, sizeof(keys));
516 return 0;
517
518badkey:
519 memzero_explicit(&keys, sizeof(keys));
520 return err;
521}
522
523static int safexcel_context_control(struct safexcel_cipher_ctx *ctx,
524 struct crypto_async_request *async,
525 struct safexcel_cipher_req *sreq,
526 struct safexcel_command_desc *cdesc)
527{
528 struct safexcel_crypto_priv *priv = ctx->priv;
529 int ctrl_size = ctx->key_len / sizeof(u32);
530
531 cdesc->control_data.control1 = ctx->mode;
532
533 if (ctx->aead) {
534 /* Take in account the ipad+opad digests */
535 if (ctx->xcm) {
536 ctrl_size += ctx->state_sz / sizeof(u32);
537 cdesc->control_data.control0 =
538 CONTEXT_CONTROL_KEY_EN |
539 CONTEXT_CONTROL_DIGEST_XCM |
540 ctx->hash_alg |
541 CONTEXT_CONTROL_SIZE(ctrl_size);
542 } else if (ctx->alg == SAFEXCEL_CHACHA20) {
543 /* Chacha20-Poly1305 */
544 cdesc->control_data.control0 =
545 CONTEXT_CONTROL_KEY_EN |
546 CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20 |
547 (sreq->direction == SAFEXCEL_ENCRYPT ?
548 CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT :
549 CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN) |
550 ctx->hash_alg |
551 CONTEXT_CONTROL_SIZE(ctrl_size);
552 return 0;
553 } else {
554 ctrl_size += ctx->state_sz / sizeof(u32) * 2;
555 cdesc->control_data.control0 =
556 CONTEXT_CONTROL_KEY_EN |
557 CONTEXT_CONTROL_DIGEST_HMAC |
558 ctx->hash_alg |
559 CONTEXT_CONTROL_SIZE(ctrl_size);
560 }
561
562 if (sreq->direction == SAFEXCEL_ENCRYPT &&
563 (ctx->xcm == EIP197_XCM_MODE_CCM ||
564 ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP_GMAC))
565 cdesc->control_data.control0 |=
566 CONTEXT_CONTROL_TYPE_HASH_ENCRYPT_OUT;
567 else if (sreq->direction == SAFEXCEL_ENCRYPT)
568 cdesc->control_data.control0 |=
569 CONTEXT_CONTROL_TYPE_ENCRYPT_HASH_OUT;
570 else if (ctx->xcm == EIP197_XCM_MODE_CCM)
571 cdesc->control_data.control0 |=
572 CONTEXT_CONTROL_TYPE_DECRYPT_HASH_IN;
573 else
574 cdesc->control_data.control0 |=
575 CONTEXT_CONTROL_TYPE_HASH_DECRYPT_IN;
576 } else {
577 if (sreq->direction == SAFEXCEL_ENCRYPT)
578 cdesc->control_data.control0 =
579 CONTEXT_CONTROL_TYPE_CRYPTO_OUT |
580 CONTEXT_CONTROL_KEY_EN |
581 CONTEXT_CONTROL_SIZE(ctrl_size);
582 else
583 cdesc->control_data.control0 =
584 CONTEXT_CONTROL_TYPE_CRYPTO_IN |
585 CONTEXT_CONTROL_KEY_EN |
586 CONTEXT_CONTROL_SIZE(ctrl_size);
587 }
588
589 if (ctx->alg == SAFEXCEL_DES) {
590 cdesc->control_data.control0 |=
591 CONTEXT_CONTROL_CRYPTO_ALG_DES;
592 } else if (ctx->alg == SAFEXCEL_3DES) {
593 cdesc->control_data.control0 |=
594 CONTEXT_CONTROL_CRYPTO_ALG_3DES;
595 } else if (ctx->alg == SAFEXCEL_AES) {
596 switch (ctx->key_len >> ctx->xts) {
597 case AES_KEYSIZE_128:
598 cdesc->control_data.control0 |=
599 CONTEXT_CONTROL_CRYPTO_ALG_AES128;
600 break;
601 case AES_KEYSIZE_192:
602 cdesc->control_data.control0 |=
603 CONTEXT_CONTROL_CRYPTO_ALG_AES192;
604 break;
605 case AES_KEYSIZE_256:
606 cdesc->control_data.control0 |=
607 CONTEXT_CONTROL_CRYPTO_ALG_AES256;
608 break;
609 default:
610 dev_err(priv->dev, "aes keysize not supported: %u\n",
611 ctx->key_len >> ctx->xts);
612 return -EINVAL;
613 }
614 } else if (ctx->alg == SAFEXCEL_CHACHA20) {
615 cdesc->control_data.control0 |=
616 CONTEXT_CONTROL_CRYPTO_ALG_CHACHA20;
617 } else if (ctx->alg == SAFEXCEL_SM4) {
618 cdesc->control_data.control0 |=
619 CONTEXT_CONTROL_CRYPTO_ALG_SM4;
620 }
621
622 return 0;
623}
624
625static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv, int ring,
626 struct crypto_async_request *async,
627 struct scatterlist *src,
628 struct scatterlist *dst,
629 unsigned int cryptlen,
630 struct safexcel_cipher_req *sreq,
631 bool *should_complete, int *ret)
632{
633 struct skcipher_request *areq = skcipher_request_cast(async);
634 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
635 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(skcipher);
636 struct safexcel_result_desc *rdesc;
637 int ndesc = 0;
638
639 *ret = 0;
640
641 if (unlikely(!sreq->rdescs))
642 return 0;
643
644 while (sreq->rdescs--) {
645 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
646 if (IS_ERR(rdesc)) {
647 dev_err(priv->dev,
648 "cipher: result: could not retrieve the result descriptor\n");
649 *ret = PTR_ERR(rdesc);
650 break;
651 }
652
653 if (likely(!*ret))
654 *ret = safexcel_rdesc_check_errors(priv, rdesc);
655
656 ndesc++;
657 }
658
659 safexcel_complete(priv, ring);
660
661 if (src == dst) {
662 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
663 } else {
664 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
665 dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
666 }
667
668 /*
669 * Update IV in req from last crypto output word for CBC modes
670 */
671 if ((!ctx->aead) && (ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
672 (sreq->direction == SAFEXCEL_ENCRYPT)) {
673 /* For encrypt take the last output word */
674 sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv,
675 crypto_skcipher_ivsize(skcipher),
676 (cryptlen -
677 crypto_skcipher_ivsize(skcipher)));
678 }
679
680 *should_complete = true;
681
682 return ndesc;
683}
684
685static int safexcel_send_req(struct crypto_async_request *base, int ring,
686 struct safexcel_cipher_req *sreq,
687 struct scatterlist *src, struct scatterlist *dst,
688 unsigned int cryptlen, unsigned int assoclen,
689 unsigned int digestsize, u8 *iv, int *commands,
690 int *results)
691{
692 struct skcipher_request *areq = skcipher_request_cast(base);
693 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(areq);
694 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
695 struct safexcel_crypto_priv *priv = ctx->priv;
696 struct safexcel_command_desc *cdesc;
697 struct safexcel_command_desc *first_cdesc = NULL;
698 struct safexcel_result_desc *rdesc, *first_rdesc = NULL;
699 struct scatterlist *sg;
700 unsigned int totlen;
701 unsigned int totlen_src = cryptlen + assoclen;
702 unsigned int totlen_dst = totlen_src;
703 struct safexcel_token *atoken;
704 int n_cdesc = 0, n_rdesc = 0;
705 int queued, i, ret = 0;
706 bool first = true;
707
708 sreq->nr_src = sg_nents_for_len(src, totlen_src);
709
710 if (ctx->aead) {
711 /*
712 * AEAD has auth tag appended to output for encrypt and
713 * removed from the output for decrypt!
714 */
715 if (sreq->direction == SAFEXCEL_DECRYPT)
716 totlen_dst -= digestsize;
717 else
718 totlen_dst += digestsize;
719
720 memcpy(ctx->base.ctxr->data + ctx->key_len / sizeof(u32),
721 ctx->ipad, ctx->state_sz);
722 if (!ctx->xcm)
723 memcpy(ctx->base.ctxr->data + (ctx->key_len +
724 ctx->state_sz) / sizeof(u32), ctx->opad,
725 ctx->state_sz);
726 } else if ((ctx->mode == CONTEXT_CONTROL_CRYPTO_MODE_CBC) &&
727 (sreq->direction == SAFEXCEL_DECRYPT)) {
728 /*
729 * Save IV from last crypto input word for CBC modes in decrypt
730 * direction. Need to do this first in case of inplace operation
731 * as it will be overwritten.
732 */
733 sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv,
734 crypto_skcipher_ivsize(skcipher),
735 (totlen_src -
736 crypto_skcipher_ivsize(skcipher)));
737 }
738
739 sreq->nr_dst = sg_nents_for_len(dst, totlen_dst);
740
741 /*
742 * Remember actual input length, source buffer length may be
743 * updated in case of inline operation below.
744 */
745 totlen = totlen_src;
746 queued = totlen_src;
747
748 if (src == dst) {
749 sreq->nr_src = max(sreq->nr_src, sreq->nr_dst);
750 sreq->nr_dst = sreq->nr_src;
751 if (unlikely((totlen_src || totlen_dst) &&
752 (sreq->nr_src <= 0))) {
753 dev_err(priv->dev, "In-place buffer not large enough (need %d bytes)!",
754 max(totlen_src, totlen_dst));
755 return -EINVAL;
756 }
757 dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
758 } else {
759 if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
760 dev_err(priv->dev, "Source buffer not large enough (need %d bytes)!",
761 totlen_src);
762 return -EINVAL;
763 }
764 dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
765
766 if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
767 dev_err(priv->dev, "Dest buffer not large enough (need %d bytes)!",
768 totlen_dst);
769 dma_unmap_sg(priv->dev, src, sreq->nr_src,
770 DMA_TO_DEVICE);
771 return -EINVAL;
772 }
773 dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
774 }
775
776 memcpy(ctx->base.ctxr->data, ctx->key, ctx->key_len);
777
778 if (!totlen) {
779 /*
780 * The EIP97 cannot deal with zero length input packets!
781 * So stuff a dummy command descriptor indicating a 1 byte
782 * (dummy) input packet, using the context record as source.
783 */
784 first_cdesc = safexcel_add_cdesc(priv, ring,
785 1, 1, ctx->base.ctxr_dma,
786 1, 1, ctx->base.ctxr_dma,
787 &atoken);
788 if (IS_ERR(first_cdesc)) {
789 /* No space left in the command descriptor ring */
790 ret = PTR_ERR(first_cdesc);
791 goto cdesc_rollback;
792 }
793 n_cdesc = 1;
794 goto skip_cdesc;
795 }
796
797 /* command descriptors */
798 for_each_sg(src, sg, sreq->nr_src, i) {
799 int len = sg_dma_len(sg);
800
801 /* Do not overflow the request */
802 if (queued < len)
803 len = queued;
804
805 cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
806 !(queued - len),
807 sg_dma_address(sg), len, totlen,
808 ctx->base.ctxr_dma, &atoken);
809 if (IS_ERR(cdesc)) {
810 /* No space left in the command descriptor ring */
811 ret = PTR_ERR(cdesc);
812 goto cdesc_rollback;
813 }
814
815 if (!n_cdesc)
816 first_cdesc = cdesc;
817
818 n_cdesc++;
819 queued -= len;
820 if (!queued)
821 break;
822 }
823skip_cdesc:
824 /* Add context control words and token to first command descriptor */
825 safexcel_context_control(ctx, base, sreq, first_cdesc);
826 if (ctx->aead)
827 safexcel_aead_token(ctx, iv, first_cdesc, atoken,
828 sreq->direction, cryptlen,
829 assoclen, digestsize);
830 else
831 safexcel_skcipher_token(ctx, iv, first_cdesc, atoken,
832 cryptlen);
833
834 /* result descriptors */
835 for_each_sg(dst, sg, sreq->nr_dst, i) {
836 bool last = (i == sreq->nr_dst - 1);
837 u32 len = sg_dma_len(sg);
838
839 /* only allow the part of the buffer we know we need */
840 if (len > totlen_dst)
841 len = totlen_dst;
842 if (unlikely(!len))
843 break;
844 totlen_dst -= len;
845
846 /* skip over AAD space in buffer - not written */
847 if (assoclen) {
848 if (assoclen >= len) {
849 assoclen -= len;
850 continue;
851 }
852 rdesc = safexcel_add_rdesc(priv, ring, first, last,
853 sg_dma_address(sg) +
854 assoclen,
855 len - assoclen);
856 assoclen = 0;
857 } else {
858 rdesc = safexcel_add_rdesc(priv, ring, first, last,
859 sg_dma_address(sg),
860 len);
861 }
862 if (IS_ERR(rdesc)) {
863 /* No space left in the result descriptor ring */
864 ret = PTR_ERR(rdesc);
865 goto rdesc_rollback;
866 }
867 if (first) {
868 first_rdesc = rdesc;
869 first = false;
870 }
871 n_rdesc++;
872 }
873
874 if (unlikely(first)) {
875 /*
876 * Special case: AEAD decrypt with only AAD data.
877 * In this case there is NO output data from the engine,
878 * but the engine still needs a result descriptor!
879 * Create a dummy one just for catching the result token.
880 */
881 rdesc = safexcel_add_rdesc(priv, ring, true, true, 0, 0);
882 if (IS_ERR(rdesc)) {
883 /* No space left in the result descriptor ring */
884 ret = PTR_ERR(rdesc);
885 goto rdesc_rollback;
886 }
887 first_rdesc = rdesc;
888 n_rdesc = 1;
889 }
890
891 safexcel_rdr_req_set(priv, ring, first_rdesc, base);
892
893 *commands = n_cdesc;
894 *results = n_rdesc;
895 return 0;
896
897rdesc_rollback:
898 for (i = 0; i < n_rdesc; i++)
899 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].rdr);
900cdesc_rollback:
901 for (i = 0; i < n_cdesc; i++)
902 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
903
904 if (src == dst) {
905 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
906 } else {
907 dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
908 dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
909 }
910
911 return ret;
912}
913
914static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
915 int ring,
916 struct crypto_async_request *base,
917 struct safexcel_cipher_req *sreq,
918 bool *should_complete, int *ret)
919{
920 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
921 struct safexcel_result_desc *rdesc;
922 int ndesc = 0, enq_ret;
923
924 *ret = 0;
925
926 if (unlikely(!sreq->rdescs))
927 return 0;
928
929 while (sreq->rdescs--) {
930 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
931 if (IS_ERR(rdesc)) {
932 dev_err(priv->dev,
933 "cipher: invalidate: could not retrieve the result descriptor\n");
934 *ret = PTR_ERR(rdesc);
935 break;
936 }
937
938 if (likely(!*ret))
939 *ret = safexcel_rdesc_check_errors(priv, rdesc);
940
941 ndesc++;
942 }
943
944 safexcel_complete(priv, ring);
945
946 if (ctx->base.exit_inv) {
947 dma_pool_free(priv->context_pool, ctx->base.ctxr,
948 ctx->base.ctxr_dma);
949
950 *should_complete = true;
951
952 return ndesc;
953 }
954
955 ring = safexcel_select_ring(priv);
956 ctx->base.ring = ring;
957
958 spin_lock_bh(&priv->ring[ring].queue_lock);
959 enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
960 spin_unlock_bh(&priv->ring[ring].queue_lock);
961
962 if (enq_ret != -EINPROGRESS)
963 *ret = enq_ret;
964
965 queue_work(priv->ring[ring].workqueue,
966 &priv->ring[ring].work_data.work);
967
968 *should_complete = false;
969
970 return ndesc;
971}
972
973static int safexcel_skcipher_handle_result(struct safexcel_crypto_priv *priv,
974 int ring,
975 struct crypto_async_request *async,
976 bool *should_complete, int *ret)
977{
978 struct skcipher_request *req = skcipher_request_cast(async);
979 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
980 int err;
981
982 if (sreq->needs_inv) {
983 sreq->needs_inv = false;
984 err = safexcel_handle_inv_result(priv, ring, async, sreq,
985 should_complete, ret);
986 } else {
987 err = safexcel_handle_req_result(priv, ring, async, req->src,
988 req->dst, req->cryptlen, sreq,
989 should_complete, ret);
990 }
991
992 return err;
993}
994
995static int safexcel_aead_handle_result(struct safexcel_crypto_priv *priv,
996 int ring,
997 struct crypto_async_request *async,
998 bool *should_complete, int *ret)
999{
1000 struct aead_request *req = aead_request_cast(async);
1001 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1002 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1003 int err;
1004
1005 if (sreq->needs_inv) {
1006 sreq->needs_inv = false;
1007 err = safexcel_handle_inv_result(priv, ring, async, sreq,
1008 should_complete, ret);
1009 } else {
1010 err = safexcel_handle_req_result(priv, ring, async, req->src,
1011 req->dst,
1012 req->cryptlen + crypto_aead_authsize(tfm),
1013 sreq, should_complete, ret);
1014 }
1015
1016 return err;
1017}
1018
1019static int safexcel_cipher_send_inv(struct crypto_async_request *base,
1020 int ring, int *commands, int *results)
1021{
1022 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1023 struct safexcel_crypto_priv *priv = ctx->priv;
1024 int ret;
1025
1026 ret = safexcel_invalidate_cache(base, priv, ctx->base.ctxr_dma, ring);
1027 if (unlikely(ret))
1028 return ret;
1029
1030 *commands = 1;
1031 *results = 1;
1032
1033 return 0;
1034}
1035
1036static int safexcel_skcipher_send(struct crypto_async_request *async, int ring,
1037 int *commands, int *results)
1038{
1039 struct skcipher_request *req = skcipher_request_cast(async);
1040 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1041 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1042 struct safexcel_crypto_priv *priv = ctx->priv;
1043 int ret;
1044
1045 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1046
1047 if (sreq->needs_inv) {
1048 ret = safexcel_cipher_send_inv(async, ring, commands, results);
1049 } else {
1050 struct crypto_skcipher *skcipher = crypto_skcipher_reqtfm(req);
1051 u8 input_iv[AES_BLOCK_SIZE];
1052
1053 /*
1054 * Save input IV in case of CBC decrypt mode
1055 * Will be overwritten with output IV prior to use!
1056 */
1057 memcpy(input_iv, req->iv, crypto_skcipher_ivsize(skcipher));
1058
1059 ret = safexcel_send_req(async, ring, sreq, req->src,
1060 req->dst, req->cryptlen, 0, 0, input_iv,
1061 commands, results);
1062 }
1063
1064 sreq->rdescs = *results;
1065 return ret;
1066}
1067
1068static int safexcel_aead_send(struct crypto_async_request *async, int ring,
1069 int *commands, int *results)
1070{
1071 struct aead_request *req = aead_request_cast(async);
1072 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1073 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(req->base.tfm);
1074 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1075 struct safexcel_crypto_priv *priv = ctx->priv;
1076 int ret;
1077
1078 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
1079
1080 if (sreq->needs_inv)
1081 ret = safexcel_cipher_send_inv(async, ring, commands, results);
1082 else
1083 ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
1084 req->cryptlen, req->assoclen,
1085 crypto_aead_authsize(tfm), req->iv,
1086 commands, results);
1087 sreq->rdescs = *results;
1088 return ret;
1089}
1090
1091static int safexcel_cipher_exit_inv(struct crypto_tfm *tfm,
1092 struct crypto_async_request *base,
1093 struct safexcel_cipher_req *sreq,
1094 struct safexcel_inv_result *result)
1095{
1096 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1097 struct safexcel_crypto_priv *priv = ctx->priv;
1098 int ring = ctx->base.ring;
1099
1100 init_completion(&result->completion);
1101
1102 ctx = crypto_tfm_ctx(base->tfm);
1103 ctx->base.exit_inv = true;
1104 sreq->needs_inv = true;
1105
1106 spin_lock_bh(&priv->ring[ring].queue_lock);
1107 crypto_enqueue_request(&priv->ring[ring].queue, base);
1108 spin_unlock_bh(&priv->ring[ring].queue_lock);
1109
1110 queue_work(priv->ring[ring].workqueue,
1111 &priv->ring[ring].work_data.work);
1112
1113 wait_for_completion(&result->completion);
1114
1115 if (result->error) {
1116 dev_warn(priv->dev,
1117 "cipher: sync: invalidate: completion error %d\n",
1118 result->error);
1119 return result->error;
1120 }
1121
1122 return 0;
1123}
1124
1125static int safexcel_skcipher_exit_inv(struct crypto_tfm *tfm)
1126{
1127 EIP197_REQUEST_ON_STACK(req, skcipher, EIP197_SKCIPHER_REQ_SIZE);
1128 struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
1129 struct safexcel_inv_result result = {};
1130
1131 memset(req, 0, sizeof(struct skcipher_request));
1132
1133 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1134 safexcel_inv_complete, &result);
1135 skcipher_request_set_tfm(req, __crypto_skcipher_cast(tfm));
1136
1137 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1138}
1139
1140static int safexcel_aead_exit_inv(struct crypto_tfm *tfm)
1141{
1142 EIP197_REQUEST_ON_STACK(req, aead, EIP197_AEAD_REQ_SIZE);
1143 struct safexcel_cipher_req *sreq = aead_request_ctx(req);
1144 struct safexcel_inv_result result = {};
1145
1146 memset(req, 0, sizeof(struct aead_request));
1147
1148 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1149 safexcel_inv_complete, &result);
1150 aead_request_set_tfm(req, __crypto_aead_cast(tfm));
1151
1152 return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
1153}
1154
1155static int safexcel_queue_req(struct crypto_async_request *base,
1156 struct safexcel_cipher_req *sreq,
1157 enum safexcel_cipher_direction dir)
1158{
1159 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(base->tfm);
1160 struct safexcel_crypto_priv *priv = ctx->priv;
1161 int ret, ring;
1162
1163 sreq->needs_inv = false;
1164 sreq->direction = dir;
1165
1166 if (ctx->base.ctxr) {
1167 if (priv->flags & EIP197_TRC_CACHE && ctx->base.needs_inv) {
1168 sreq->needs_inv = true;
1169 ctx->base.needs_inv = false;
1170 }
1171 } else {
1172 ctx->base.ring = safexcel_select_ring(priv);
1173 ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
1174 EIP197_GFP_FLAGS(*base),
1175 &ctx->base.ctxr_dma);
1176 if (!ctx->base.ctxr)
1177 return -ENOMEM;
1178 }
1179
1180 ring = ctx->base.ring;
1181
1182 spin_lock_bh(&priv->ring[ring].queue_lock);
1183 ret = crypto_enqueue_request(&priv->ring[ring].queue, base);
1184 spin_unlock_bh(&priv->ring[ring].queue_lock);
1185
1186 queue_work(priv->ring[ring].workqueue,
1187 &priv->ring[ring].work_data.work);
1188
1189 return ret;
1190}
1191
1192static int safexcel_encrypt(struct skcipher_request *req)
1193{
1194 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1195 SAFEXCEL_ENCRYPT);
1196}
1197
1198static int safexcel_decrypt(struct skcipher_request *req)
1199{
1200 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
1201 SAFEXCEL_DECRYPT);
1202}
1203
1204static int safexcel_skcipher_cra_init(struct crypto_tfm *tfm)
1205{
1206 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1207 struct safexcel_alg_template *tmpl =
1208 container_of(tfm->__crt_alg, struct safexcel_alg_template,
1209 alg.skcipher.base);
1210
1211 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
1212 sizeof(struct safexcel_cipher_req));
1213
1214 ctx->priv = tmpl->priv;
1215
1216 ctx->base.send = safexcel_skcipher_send;
1217 ctx->base.handle_result = safexcel_skcipher_handle_result;
1218 ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1219 ctx->ctrinit = 1;
1220 return 0;
1221}
1222
1223static int safexcel_cipher_cra_exit(struct crypto_tfm *tfm)
1224{
1225 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1226
1227 memzero_explicit(ctx->key, sizeof(ctx->key));
1228
1229 /* context not allocated, skip invalidation */
1230 if (!ctx->base.ctxr)
1231 return -ENOMEM;
1232
1233 memzero_explicit(ctx->base.ctxr->data, sizeof(ctx->base.ctxr->data));
1234 return 0;
1235}
1236
1237static void safexcel_skcipher_cra_exit(struct crypto_tfm *tfm)
1238{
1239 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1240 struct safexcel_crypto_priv *priv = ctx->priv;
1241 int ret;
1242
1243 if (safexcel_cipher_cra_exit(tfm))
1244 return;
1245
1246 if (priv->flags & EIP197_TRC_CACHE) {
1247 ret = safexcel_skcipher_exit_inv(tfm);
1248 if (ret)
1249 dev_warn(priv->dev, "skcipher: invalidation error %d\n",
1250 ret);
1251 } else {
1252 dma_pool_free(priv->context_pool, ctx->base.ctxr,
1253 ctx->base.ctxr_dma);
1254 }
1255}
1256
1257static void safexcel_aead_cra_exit(struct crypto_tfm *tfm)
1258{
1259 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1260 struct safexcel_crypto_priv *priv = ctx->priv;
1261 int ret;
1262
1263 if (safexcel_cipher_cra_exit(tfm))
1264 return;
1265
1266 if (priv->flags & EIP197_TRC_CACHE) {
1267 ret = safexcel_aead_exit_inv(tfm);
1268 if (ret)
1269 dev_warn(priv->dev, "aead: invalidation error %d\n",
1270 ret);
1271 } else {
1272 dma_pool_free(priv->context_pool, ctx->base.ctxr,
1273 ctx->base.ctxr_dma);
1274 }
1275}
1276
1277static int safexcel_skcipher_aes_ecb_cra_init(struct crypto_tfm *tfm)
1278{
1279 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1280
1281 safexcel_skcipher_cra_init(tfm);
1282 ctx->alg = SAFEXCEL_AES;
1283 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1284 ctx->blocksz = 0;
1285 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1286 return 0;
1287}
1288
1289struct safexcel_alg_template safexcel_alg_ecb_aes = {
1290 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1291 .algo_mask = SAFEXCEL_ALG_AES,
1292 .alg.skcipher = {
1293 .setkey = safexcel_skcipher_aes_setkey,
1294 .encrypt = safexcel_encrypt,
1295 .decrypt = safexcel_decrypt,
1296 .min_keysize = AES_MIN_KEY_SIZE,
1297 .max_keysize = AES_MAX_KEY_SIZE,
1298 .base = {
1299 .cra_name = "ecb(aes)",
1300 .cra_driver_name = "safexcel-ecb-aes",
1301 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1302 .cra_flags = CRYPTO_ALG_ASYNC |
1303 CRYPTO_ALG_ALLOCATES_MEMORY |
1304 CRYPTO_ALG_KERN_DRIVER_ONLY,
1305 .cra_blocksize = AES_BLOCK_SIZE,
1306 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1307 .cra_alignmask = 0,
1308 .cra_init = safexcel_skcipher_aes_ecb_cra_init,
1309 .cra_exit = safexcel_skcipher_cra_exit,
1310 .cra_module = THIS_MODULE,
1311 },
1312 },
1313};
1314
1315static int safexcel_skcipher_aes_cbc_cra_init(struct crypto_tfm *tfm)
1316{
1317 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1318
1319 safexcel_skcipher_cra_init(tfm);
1320 ctx->alg = SAFEXCEL_AES;
1321 ctx->blocksz = AES_BLOCK_SIZE;
1322 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1323 return 0;
1324}
1325
1326struct safexcel_alg_template safexcel_alg_cbc_aes = {
1327 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1328 .algo_mask = SAFEXCEL_ALG_AES,
1329 .alg.skcipher = {
1330 .setkey = safexcel_skcipher_aes_setkey,
1331 .encrypt = safexcel_encrypt,
1332 .decrypt = safexcel_decrypt,
1333 .min_keysize = AES_MIN_KEY_SIZE,
1334 .max_keysize = AES_MAX_KEY_SIZE,
1335 .ivsize = AES_BLOCK_SIZE,
1336 .base = {
1337 .cra_name = "cbc(aes)",
1338 .cra_driver_name = "safexcel-cbc-aes",
1339 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1340 .cra_flags = CRYPTO_ALG_ASYNC |
1341 CRYPTO_ALG_ALLOCATES_MEMORY |
1342 CRYPTO_ALG_KERN_DRIVER_ONLY,
1343 .cra_blocksize = AES_BLOCK_SIZE,
1344 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1345 .cra_alignmask = 0,
1346 .cra_init = safexcel_skcipher_aes_cbc_cra_init,
1347 .cra_exit = safexcel_skcipher_cra_exit,
1348 .cra_module = THIS_MODULE,
1349 },
1350 },
1351};
1352
1353static int safexcel_skcipher_aes_cfb_cra_init(struct crypto_tfm *tfm)
1354{
1355 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1356
1357 safexcel_skcipher_cra_init(tfm);
1358 ctx->alg = SAFEXCEL_AES;
1359 ctx->blocksz = AES_BLOCK_SIZE;
1360 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
1361 return 0;
1362}
1363
1364struct safexcel_alg_template safexcel_alg_cfb_aes = {
1365 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1366 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1367 .alg.skcipher = {
1368 .setkey = safexcel_skcipher_aes_setkey,
1369 .encrypt = safexcel_encrypt,
1370 .decrypt = safexcel_decrypt,
1371 .min_keysize = AES_MIN_KEY_SIZE,
1372 .max_keysize = AES_MAX_KEY_SIZE,
1373 .ivsize = AES_BLOCK_SIZE,
1374 .base = {
1375 .cra_name = "cfb(aes)",
1376 .cra_driver_name = "safexcel-cfb-aes",
1377 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1378 .cra_flags = CRYPTO_ALG_ASYNC |
1379 CRYPTO_ALG_ALLOCATES_MEMORY |
1380 CRYPTO_ALG_KERN_DRIVER_ONLY,
1381 .cra_blocksize = 1,
1382 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1383 .cra_alignmask = 0,
1384 .cra_init = safexcel_skcipher_aes_cfb_cra_init,
1385 .cra_exit = safexcel_skcipher_cra_exit,
1386 .cra_module = THIS_MODULE,
1387 },
1388 },
1389};
1390
1391static int safexcel_skcipher_aes_ofb_cra_init(struct crypto_tfm *tfm)
1392{
1393 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1394
1395 safexcel_skcipher_cra_init(tfm);
1396 ctx->alg = SAFEXCEL_AES;
1397 ctx->blocksz = AES_BLOCK_SIZE;
1398 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
1399 return 0;
1400}
1401
1402struct safexcel_alg_template safexcel_alg_ofb_aes = {
1403 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1404 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XFB,
1405 .alg.skcipher = {
1406 .setkey = safexcel_skcipher_aes_setkey,
1407 .encrypt = safexcel_encrypt,
1408 .decrypt = safexcel_decrypt,
1409 .min_keysize = AES_MIN_KEY_SIZE,
1410 .max_keysize = AES_MAX_KEY_SIZE,
1411 .ivsize = AES_BLOCK_SIZE,
1412 .base = {
1413 .cra_name = "ofb(aes)",
1414 .cra_driver_name = "safexcel-ofb-aes",
1415 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1416 .cra_flags = CRYPTO_ALG_ASYNC |
1417 CRYPTO_ALG_ALLOCATES_MEMORY |
1418 CRYPTO_ALG_KERN_DRIVER_ONLY,
1419 .cra_blocksize = 1,
1420 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1421 .cra_alignmask = 0,
1422 .cra_init = safexcel_skcipher_aes_ofb_cra_init,
1423 .cra_exit = safexcel_skcipher_cra_exit,
1424 .cra_module = THIS_MODULE,
1425 },
1426 },
1427};
1428
1429static int safexcel_skcipher_aesctr_setkey(struct crypto_skcipher *ctfm,
1430 const u8 *key, unsigned int len)
1431{
1432 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
1433 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1434 struct safexcel_crypto_priv *priv = ctx->priv;
1435 struct crypto_aes_ctx aes;
1436 int ret, i;
1437 unsigned int keylen;
1438
1439 /* last 4 bytes of key are the nonce! */
1440 ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
1441 /* exclude the nonce here */
1442 keylen = len - CTR_RFC3686_NONCE_SIZE;
1443 ret = aes_expandkey(&aes, key, keylen);
1444 if (ret)
1445 return ret;
1446
1447 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
1448 for (i = 0; i < keylen / sizeof(u32); i++) {
1449 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
1450 ctx->base.needs_inv = true;
1451 break;
1452 }
1453 }
1454 }
1455
1456 for (i = 0; i < keylen / sizeof(u32); i++)
1457 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
1458
1459 ctx->key_len = keylen;
1460
1461 memzero_explicit(&aes, sizeof(aes));
1462 return 0;
1463}
1464
1465static int safexcel_skcipher_aes_ctr_cra_init(struct crypto_tfm *tfm)
1466{
1467 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1468
1469 safexcel_skcipher_cra_init(tfm);
1470 ctx->alg = SAFEXCEL_AES;
1471 ctx->blocksz = AES_BLOCK_SIZE;
1472 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
1473 return 0;
1474}
1475
1476struct safexcel_alg_template safexcel_alg_ctr_aes = {
1477 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1478 .algo_mask = SAFEXCEL_ALG_AES,
1479 .alg.skcipher = {
1480 .setkey = safexcel_skcipher_aesctr_setkey,
1481 .encrypt = safexcel_encrypt,
1482 .decrypt = safexcel_decrypt,
1483 /* Add nonce size */
1484 .min_keysize = AES_MIN_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1485 .max_keysize = AES_MAX_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
1486 .ivsize = CTR_RFC3686_IV_SIZE,
1487 .base = {
1488 .cra_name = "rfc3686(ctr(aes))",
1489 .cra_driver_name = "safexcel-ctr-aes",
1490 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1491 .cra_flags = CRYPTO_ALG_ASYNC |
1492 CRYPTO_ALG_ALLOCATES_MEMORY |
1493 CRYPTO_ALG_KERN_DRIVER_ONLY,
1494 .cra_blocksize = 1,
1495 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1496 .cra_alignmask = 0,
1497 .cra_init = safexcel_skcipher_aes_ctr_cra_init,
1498 .cra_exit = safexcel_skcipher_cra_exit,
1499 .cra_module = THIS_MODULE,
1500 },
1501 },
1502};
1503
1504static int safexcel_des_setkey(struct crypto_skcipher *ctfm, const u8 *key,
1505 unsigned int len)
1506{
1507 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1508 struct safexcel_crypto_priv *priv = ctx->priv;
1509 int ret;
1510
1511 ret = verify_skcipher_des_key(ctfm, key);
1512 if (ret)
1513 return ret;
1514
1515 /* if context exits and key changed, need to invalidate it */
1516 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1517 if (memcmp(ctx->key, key, len))
1518 ctx->base.needs_inv = true;
1519
1520 memcpy(ctx->key, key, len);
1521 ctx->key_len = len;
1522
1523 return 0;
1524}
1525
1526static int safexcel_skcipher_des_cbc_cra_init(struct crypto_tfm *tfm)
1527{
1528 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1529
1530 safexcel_skcipher_cra_init(tfm);
1531 ctx->alg = SAFEXCEL_DES;
1532 ctx->blocksz = DES_BLOCK_SIZE;
1533 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1534 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1535 return 0;
1536}
1537
1538struct safexcel_alg_template safexcel_alg_cbc_des = {
1539 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1540 .algo_mask = SAFEXCEL_ALG_DES,
1541 .alg.skcipher = {
1542 .setkey = safexcel_des_setkey,
1543 .encrypt = safexcel_encrypt,
1544 .decrypt = safexcel_decrypt,
1545 .min_keysize = DES_KEY_SIZE,
1546 .max_keysize = DES_KEY_SIZE,
1547 .ivsize = DES_BLOCK_SIZE,
1548 .base = {
1549 .cra_name = "cbc(des)",
1550 .cra_driver_name = "safexcel-cbc-des",
1551 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1552 .cra_flags = CRYPTO_ALG_ASYNC |
1553 CRYPTO_ALG_ALLOCATES_MEMORY |
1554 CRYPTO_ALG_KERN_DRIVER_ONLY,
1555 .cra_blocksize = DES_BLOCK_SIZE,
1556 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1557 .cra_alignmask = 0,
1558 .cra_init = safexcel_skcipher_des_cbc_cra_init,
1559 .cra_exit = safexcel_skcipher_cra_exit,
1560 .cra_module = THIS_MODULE,
1561 },
1562 },
1563};
1564
1565static int safexcel_skcipher_des_ecb_cra_init(struct crypto_tfm *tfm)
1566{
1567 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1568
1569 safexcel_skcipher_cra_init(tfm);
1570 ctx->alg = SAFEXCEL_DES;
1571 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1572 ctx->blocksz = 0;
1573 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1574 return 0;
1575}
1576
1577struct safexcel_alg_template safexcel_alg_ecb_des = {
1578 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1579 .algo_mask = SAFEXCEL_ALG_DES,
1580 .alg.skcipher = {
1581 .setkey = safexcel_des_setkey,
1582 .encrypt = safexcel_encrypt,
1583 .decrypt = safexcel_decrypt,
1584 .min_keysize = DES_KEY_SIZE,
1585 .max_keysize = DES_KEY_SIZE,
1586 .base = {
1587 .cra_name = "ecb(des)",
1588 .cra_driver_name = "safexcel-ecb-des",
1589 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1590 .cra_flags = CRYPTO_ALG_ASYNC |
1591 CRYPTO_ALG_ALLOCATES_MEMORY |
1592 CRYPTO_ALG_KERN_DRIVER_ONLY,
1593 .cra_blocksize = DES_BLOCK_SIZE,
1594 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1595 .cra_alignmask = 0,
1596 .cra_init = safexcel_skcipher_des_ecb_cra_init,
1597 .cra_exit = safexcel_skcipher_cra_exit,
1598 .cra_module = THIS_MODULE,
1599 },
1600 },
1601};
1602
1603static int safexcel_des3_ede_setkey(struct crypto_skcipher *ctfm,
1604 const u8 *key, unsigned int len)
1605{
1606 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
1607 struct safexcel_crypto_priv *priv = ctx->priv;
1608 int err;
1609
1610 err = verify_skcipher_des3_key(ctfm, key);
1611 if (err)
1612 return err;
1613
1614 /* if context exits and key changed, need to invalidate it */
1615 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
1616 if (memcmp(ctx->key, key, len))
1617 ctx->base.needs_inv = true;
1618
1619 memcpy(ctx->key, key, len);
1620 ctx->key_len = len;
1621
1622 return 0;
1623}
1624
1625static int safexcel_skcipher_des3_cbc_cra_init(struct crypto_tfm *tfm)
1626{
1627 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1628
1629 safexcel_skcipher_cra_init(tfm);
1630 ctx->alg = SAFEXCEL_3DES;
1631 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1632 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1633 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
1634 return 0;
1635}
1636
1637struct safexcel_alg_template safexcel_alg_cbc_des3_ede = {
1638 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1639 .algo_mask = SAFEXCEL_ALG_DES,
1640 .alg.skcipher = {
1641 .setkey = safexcel_des3_ede_setkey,
1642 .encrypt = safexcel_encrypt,
1643 .decrypt = safexcel_decrypt,
1644 .min_keysize = DES3_EDE_KEY_SIZE,
1645 .max_keysize = DES3_EDE_KEY_SIZE,
1646 .ivsize = DES3_EDE_BLOCK_SIZE,
1647 .base = {
1648 .cra_name = "cbc(des3_ede)",
1649 .cra_driver_name = "safexcel-cbc-des3_ede",
1650 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1651 .cra_flags = CRYPTO_ALG_ASYNC |
1652 CRYPTO_ALG_ALLOCATES_MEMORY |
1653 CRYPTO_ALG_KERN_DRIVER_ONLY,
1654 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1655 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1656 .cra_alignmask = 0,
1657 .cra_init = safexcel_skcipher_des3_cbc_cra_init,
1658 .cra_exit = safexcel_skcipher_cra_exit,
1659 .cra_module = THIS_MODULE,
1660 },
1661 },
1662};
1663
1664static int safexcel_skcipher_des3_ecb_cra_init(struct crypto_tfm *tfm)
1665{
1666 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1667
1668 safexcel_skcipher_cra_init(tfm);
1669 ctx->alg = SAFEXCEL_3DES;
1670 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
1671 ctx->blocksz = 0;
1672 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1673 return 0;
1674}
1675
1676struct safexcel_alg_template safexcel_alg_ecb_des3_ede = {
1677 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
1678 .algo_mask = SAFEXCEL_ALG_DES,
1679 .alg.skcipher = {
1680 .setkey = safexcel_des3_ede_setkey,
1681 .encrypt = safexcel_encrypt,
1682 .decrypt = safexcel_decrypt,
1683 .min_keysize = DES3_EDE_KEY_SIZE,
1684 .max_keysize = DES3_EDE_KEY_SIZE,
1685 .base = {
1686 .cra_name = "ecb(des3_ede)",
1687 .cra_driver_name = "safexcel-ecb-des3_ede",
1688 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1689 .cra_flags = CRYPTO_ALG_ASYNC |
1690 CRYPTO_ALG_ALLOCATES_MEMORY |
1691 CRYPTO_ALG_KERN_DRIVER_ONLY,
1692 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1693 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1694 .cra_alignmask = 0,
1695 .cra_init = safexcel_skcipher_des3_ecb_cra_init,
1696 .cra_exit = safexcel_skcipher_cra_exit,
1697 .cra_module = THIS_MODULE,
1698 },
1699 },
1700};
1701
1702static int safexcel_aead_encrypt(struct aead_request *req)
1703{
1704 struct safexcel_cipher_req *creq = aead_request_ctx(req);
1705
1706 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
1707}
1708
1709static int safexcel_aead_decrypt(struct aead_request *req)
1710{
1711 struct safexcel_cipher_req *creq = aead_request_ctx(req);
1712
1713 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
1714}
1715
1716static int safexcel_aead_cra_init(struct crypto_tfm *tfm)
1717{
1718 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1719 struct safexcel_alg_template *tmpl =
1720 container_of(tfm->__crt_alg, struct safexcel_alg_template,
1721 alg.aead.base);
1722
1723 crypto_aead_set_reqsize(__crypto_aead_cast(tfm),
1724 sizeof(struct safexcel_cipher_req));
1725
1726 ctx->priv = tmpl->priv;
1727
1728 ctx->alg = SAFEXCEL_AES; /* default */
1729 ctx->blocksz = AES_BLOCK_SIZE;
1730 ctx->ivmask = EIP197_OPTION_4_TOKEN_IV_CMD;
1731 ctx->ctrinit = 1;
1732 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC; /* default */
1733 ctx->aead = true;
1734 ctx->base.send = safexcel_aead_send;
1735 ctx->base.handle_result = safexcel_aead_handle_result;
1736 return 0;
1737}
1738
1739static int safexcel_aead_sha1_cra_init(struct crypto_tfm *tfm)
1740{
1741 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1742
1743 safexcel_aead_cra_init(tfm);
1744 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1745 ctx->state_sz = SHA1_DIGEST_SIZE;
1746 return 0;
1747}
1748
1749struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_aes = {
1750 .type = SAFEXCEL_ALG_TYPE_AEAD,
1751 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
1752 .alg.aead = {
1753 .setkey = safexcel_aead_setkey,
1754 .encrypt = safexcel_aead_encrypt,
1755 .decrypt = safexcel_aead_decrypt,
1756 .ivsize = AES_BLOCK_SIZE,
1757 .maxauthsize = SHA1_DIGEST_SIZE,
1758 .base = {
1759 .cra_name = "authenc(hmac(sha1),cbc(aes))",
1760 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-aes",
1761 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1762 .cra_flags = CRYPTO_ALG_ASYNC |
1763 CRYPTO_ALG_ALLOCATES_MEMORY |
1764 CRYPTO_ALG_KERN_DRIVER_ONLY,
1765 .cra_blocksize = AES_BLOCK_SIZE,
1766 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1767 .cra_alignmask = 0,
1768 .cra_init = safexcel_aead_sha1_cra_init,
1769 .cra_exit = safexcel_aead_cra_exit,
1770 .cra_module = THIS_MODULE,
1771 },
1772 },
1773};
1774
1775static int safexcel_aead_sha256_cra_init(struct crypto_tfm *tfm)
1776{
1777 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1778
1779 safexcel_aead_cra_init(tfm);
1780 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1781 ctx->state_sz = SHA256_DIGEST_SIZE;
1782 return 0;
1783}
1784
1785struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_aes = {
1786 .type = SAFEXCEL_ALG_TYPE_AEAD,
1787 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1788 .alg.aead = {
1789 .setkey = safexcel_aead_setkey,
1790 .encrypt = safexcel_aead_encrypt,
1791 .decrypt = safexcel_aead_decrypt,
1792 .ivsize = AES_BLOCK_SIZE,
1793 .maxauthsize = SHA256_DIGEST_SIZE,
1794 .base = {
1795 .cra_name = "authenc(hmac(sha256),cbc(aes))",
1796 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-aes",
1797 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1798 .cra_flags = CRYPTO_ALG_ASYNC |
1799 CRYPTO_ALG_ALLOCATES_MEMORY |
1800 CRYPTO_ALG_KERN_DRIVER_ONLY,
1801 .cra_blocksize = AES_BLOCK_SIZE,
1802 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1803 .cra_alignmask = 0,
1804 .cra_init = safexcel_aead_sha256_cra_init,
1805 .cra_exit = safexcel_aead_cra_exit,
1806 .cra_module = THIS_MODULE,
1807 },
1808 },
1809};
1810
1811static int safexcel_aead_sha224_cra_init(struct crypto_tfm *tfm)
1812{
1813 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1814
1815 safexcel_aead_cra_init(tfm);
1816 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1817 ctx->state_sz = SHA256_DIGEST_SIZE;
1818 return 0;
1819}
1820
1821struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_aes = {
1822 .type = SAFEXCEL_ALG_TYPE_AEAD,
1823 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
1824 .alg.aead = {
1825 .setkey = safexcel_aead_setkey,
1826 .encrypt = safexcel_aead_encrypt,
1827 .decrypt = safexcel_aead_decrypt,
1828 .ivsize = AES_BLOCK_SIZE,
1829 .maxauthsize = SHA224_DIGEST_SIZE,
1830 .base = {
1831 .cra_name = "authenc(hmac(sha224),cbc(aes))",
1832 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-aes",
1833 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1834 .cra_flags = CRYPTO_ALG_ASYNC |
1835 CRYPTO_ALG_ALLOCATES_MEMORY |
1836 CRYPTO_ALG_KERN_DRIVER_ONLY,
1837 .cra_blocksize = AES_BLOCK_SIZE,
1838 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1839 .cra_alignmask = 0,
1840 .cra_init = safexcel_aead_sha224_cra_init,
1841 .cra_exit = safexcel_aead_cra_exit,
1842 .cra_module = THIS_MODULE,
1843 },
1844 },
1845};
1846
1847static int safexcel_aead_sha512_cra_init(struct crypto_tfm *tfm)
1848{
1849 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1850
1851 safexcel_aead_cra_init(tfm);
1852 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1853 ctx->state_sz = SHA512_DIGEST_SIZE;
1854 return 0;
1855}
1856
1857struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_aes = {
1858 .type = SAFEXCEL_ALG_TYPE_AEAD,
1859 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1860 .alg.aead = {
1861 .setkey = safexcel_aead_setkey,
1862 .encrypt = safexcel_aead_encrypt,
1863 .decrypt = safexcel_aead_decrypt,
1864 .ivsize = AES_BLOCK_SIZE,
1865 .maxauthsize = SHA512_DIGEST_SIZE,
1866 .base = {
1867 .cra_name = "authenc(hmac(sha512),cbc(aes))",
1868 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-aes",
1869 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1870 .cra_flags = CRYPTO_ALG_ASYNC |
1871 CRYPTO_ALG_ALLOCATES_MEMORY |
1872 CRYPTO_ALG_KERN_DRIVER_ONLY,
1873 .cra_blocksize = AES_BLOCK_SIZE,
1874 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1875 .cra_alignmask = 0,
1876 .cra_init = safexcel_aead_sha512_cra_init,
1877 .cra_exit = safexcel_aead_cra_exit,
1878 .cra_module = THIS_MODULE,
1879 },
1880 },
1881};
1882
1883static int safexcel_aead_sha384_cra_init(struct crypto_tfm *tfm)
1884{
1885 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1886
1887 safexcel_aead_cra_init(tfm);
1888 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1889 ctx->state_sz = SHA512_DIGEST_SIZE;
1890 return 0;
1891}
1892
1893struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_aes = {
1894 .type = SAFEXCEL_ALG_TYPE_AEAD,
1895 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
1896 .alg.aead = {
1897 .setkey = safexcel_aead_setkey,
1898 .encrypt = safexcel_aead_encrypt,
1899 .decrypt = safexcel_aead_decrypt,
1900 .ivsize = AES_BLOCK_SIZE,
1901 .maxauthsize = SHA384_DIGEST_SIZE,
1902 .base = {
1903 .cra_name = "authenc(hmac(sha384),cbc(aes))",
1904 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-aes",
1905 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1906 .cra_flags = CRYPTO_ALG_ASYNC |
1907 CRYPTO_ALG_ALLOCATES_MEMORY |
1908 CRYPTO_ALG_KERN_DRIVER_ONLY,
1909 .cra_blocksize = AES_BLOCK_SIZE,
1910 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1911 .cra_alignmask = 0,
1912 .cra_init = safexcel_aead_sha384_cra_init,
1913 .cra_exit = safexcel_aead_cra_exit,
1914 .cra_module = THIS_MODULE,
1915 },
1916 },
1917};
1918
1919static int safexcel_aead_sha1_des3_cra_init(struct crypto_tfm *tfm)
1920{
1921 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1922
1923 safexcel_aead_sha1_cra_init(tfm);
1924 ctx->alg = SAFEXCEL_3DES; /* override default */
1925 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1926 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1927 return 0;
1928}
1929
1930struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des3_ede = {
1931 .type = SAFEXCEL_ALG_TYPE_AEAD,
1932 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
1933 .alg.aead = {
1934 .setkey = safexcel_aead_setkey,
1935 .encrypt = safexcel_aead_encrypt,
1936 .decrypt = safexcel_aead_decrypt,
1937 .ivsize = DES3_EDE_BLOCK_SIZE,
1938 .maxauthsize = SHA1_DIGEST_SIZE,
1939 .base = {
1940 .cra_name = "authenc(hmac(sha1),cbc(des3_ede))",
1941 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des3_ede",
1942 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1943 .cra_flags = CRYPTO_ALG_ASYNC |
1944 CRYPTO_ALG_ALLOCATES_MEMORY |
1945 CRYPTO_ALG_KERN_DRIVER_ONLY,
1946 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1947 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1948 .cra_alignmask = 0,
1949 .cra_init = safexcel_aead_sha1_des3_cra_init,
1950 .cra_exit = safexcel_aead_cra_exit,
1951 .cra_module = THIS_MODULE,
1952 },
1953 },
1954};
1955
1956static int safexcel_aead_sha256_des3_cra_init(struct crypto_tfm *tfm)
1957{
1958 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1959
1960 safexcel_aead_sha256_cra_init(tfm);
1961 ctx->alg = SAFEXCEL_3DES; /* override default */
1962 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
1963 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
1964 return 0;
1965}
1966
1967struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des3_ede = {
1968 .type = SAFEXCEL_ALG_TYPE_AEAD,
1969 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
1970 .alg.aead = {
1971 .setkey = safexcel_aead_setkey,
1972 .encrypt = safexcel_aead_encrypt,
1973 .decrypt = safexcel_aead_decrypt,
1974 .ivsize = DES3_EDE_BLOCK_SIZE,
1975 .maxauthsize = SHA256_DIGEST_SIZE,
1976 .base = {
1977 .cra_name = "authenc(hmac(sha256),cbc(des3_ede))",
1978 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des3_ede",
1979 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1980 .cra_flags = CRYPTO_ALG_ASYNC |
1981 CRYPTO_ALG_ALLOCATES_MEMORY |
1982 CRYPTO_ALG_KERN_DRIVER_ONLY,
1983 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
1984 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
1985 .cra_alignmask = 0,
1986 .cra_init = safexcel_aead_sha256_des3_cra_init,
1987 .cra_exit = safexcel_aead_cra_exit,
1988 .cra_module = THIS_MODULE,
1989 },
1990 },
1991};
1992
1993static int safexcel_aead_sha224_des3_cra_init(struct crypto_tfm *tfm)
1994{
1995 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
1996
1997 safexcel_aead_sha224_cra_init(tfm);
1998 ctx->alg = SAFEXCEL_3DES; /* override default */
1999 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2000 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2001 return 0;
2002}
2003
2004struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des3_ede = {
2005 .type = SAFEXCEL_ALG_TYPE_AEAD,
2006 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2007 .alg.aead = {
2008 .setkey = safexcel_aead_setkey,
2009 .encrypt = safexcel_aead_encrypt,
2010 .decrypt = safexcel_aead_decrypt,
2011 .ivsize = DES3_EDE_BLOCK_SIZE,
2012 .maxauthsize = SHA224_DIGEST_SIZE,
2013 .base = {
2014 .cra_name = "authenc(hmac(sha224),cbc(des3_ede))",
2015 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des3_ede",
2016 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2017 .cra_flags = CRYPTO_ALG_ASYNC |
2018 CRYPTO_ALG_ALLOCATES_MEMORY |
2019 CRYPTO_ALG_KERN_DRIVER_ONLY,
2020 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2021 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2022 .cra_alignmask = 0,
2023 .cra_init = safexcel_aead_sha224_des3_cra_init,
2024 .cra_exit = safexcel_aead_cra_exit,
2025 .cra_module = THIS_MODULE,
2026 },
2027 },
2028};
2029
2030static int safexcel_aead_sha512_des3_cra_init(struct crypto_tfm *tfm)
2031{
2032 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2033
2034 safexcel_aead_sha512_cra_init(tfm);
2035 ctx->alg = SAFEXCEL_3DES; /* override default */
2036 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2037 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2038 return 0;
2039}
2040
2041struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des3_ede = {
2042 .type = SAFEXCEL_ALG_TYPE_AEAD,
2043 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2044 .alg.aead = {
2045 .setkey = safexcel_aead_setkey,
2046 .encrypt = safexcel_aead_encrypt,
2047 .decrypt = safexcel_aead_decrypt,
2048 .ivsize = DES3_EDE_BLOCK_SIZE,
2049 .maxauthsize = SHA512_DIGEST_SIZE,
2050 .base = {
2051 .cra_name = "authenc(hmac(sha512),cbc(des3_ede))",
2052 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des3_ede",
2053 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2054 .cra_flags = CRYPTO_ALG_ASYNC |
2055 CRYPTO_ALG_ALLOCATES_MEMORY |
2056 CRYPTO_ALG_KERN_DRIVER_ONLY,
2057 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2058 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2059 .cra_alignmask = 0,
2060 .cra_init = safexcel_aead_sha512_des3_cra_init,
2061 .cra_exit = safexcel_aead_cra_exit,
2062 .cra_module = THIS_MODULE,
2063 },
2064 },
2065};
2066
2067static int safexcel_aead_sha384_des3_cra_init(struct crypto_tfm *tfm)
2068{
2069 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2070
2071 safexcel_aead_sha384_cra_init(tfm);
2072 ctx->alg = SAFEXCEL_3DES; /* override default */
2073 ctx->blocksz = DES3_EDE_BLOCK_SIZE;
2074 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2075 return 0;
2076}
2077
2078struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des3_ede = {
2079 .type = SAFEXCEL_ALG_TYPE_AEAD,
2080 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2081 .alg.aead = {
2082 .setkey = safexcel_aead_setkey,
2083 .encrypt = safexcel_aead_encrypt,
2084 .decrypt = safexcel_aead_decrypt,
2085 .ivsize = DES3_EDE_BLOCK_SIZE,
2086 .maxauthsize = SHA384_DIGEST_SIZE,
2087 .base = {
2088 .cra_name = "authenc(hmac(sha384),cbc(des3_ede))",
2089 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des3_ede",
2090 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2091 .cra_flags = CRYPTO_ALG_ASYNC |
2092 CRYPTO_ALG_ALLOCATES_MEMORY |
2093 CRYPTO_ALG_KERN_DRIVER_ONLY,
2094 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2095 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2096 .cra_alignmask = 0,
2097 .cra_init = safexcel_aead_sha384_des3_cra_init,
2098 .cra_exit = safexcel_aead_cra_exit,
2099 .cra_module = THIS_MODULE,
2100 },
2101 },
2102};
2103
2104static int safexcel_aead_sha1_des_cra_init(struct crypto_tfm *tfm)
2105{
2106 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2107
2108 safexcel_aead_sha1_cra_init(tfm);
2109 ctx->alg = SAFEXCEL_DES; /* override default */
2110 ctx->blocksz = DES_BLOCK_SIZE;
2111 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2112 return 0;
2113}
2114
2115struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_des = {
2116 .type = SAFEXCEL_ALG_TYPE_AEAD,
2117 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA1,
2118 .alg.aead = {
2119 .setkey = safexcel_aead_setkey,
2120 .encrypt = safexcel_aead_encrypt,
2121 .decrypt = safexcel_aead_decrypt,
2122 .ivsize = DES_BLOCK_SIZE,
2123 .maxauthsize = SHA1_DIGEST_SIZE,
2124 .base = {
2125 .cra_name = "authenc(hmac(sha1),cbc(des))",
2126 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-des",
2127 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2128 .cra_flags = CRYPTO_ALG_ASYNC |
2129 CRYPTO_ALG_ALLOCATES_MEMORY |
2130 CRYPTO_ALG_KERN_DRIVER_ONLY,
2131 .cra_blocksize = DES_BLOCK_SIZE,
2132 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2133 .cra_alignmask = 0,
2134 .cra_init = safexcel_aead_sha1_des_cra_init,
2135 .cra_exit = safexcel_aead_cra_exit,
2136 .cra_module = THIS_MODULE,
2137 },
2138 },
2139};
2140
2141static int safexcel_aead_sha256_des_cra_init(struct crypto_tfm *tfm)
2142{
2143 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2144
2145 safexcel_aead_sha256_cra_init(tfm);
2146 ctx->alg = SAFEXCEL_DES; /* override default */
2147 ctx->blocksz = DES_BLOCK_SIZE;
2148 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2149 return 0;
2150}
2151
2152struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_cbc_des = {
2153 .type = SAFEXCEL_ALG_TYPE_AEAD,
2154 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2155 .alg.aead = {
2156 .setkey = safexcel_aead_setkey,
2157 .encrypt = safexcel_aead_encrypt,
2158 .decrypt = safexcel_aead_decrypt,
2159 .ivsize = DES_BLOCK_SIZE,
2160 .maxauthsize = SHA256_DIGEST_SIZE,
2161 .base = {
2162 .cra_name = "authenc(hmac(sha256),cbc(des))",
2163 .cra_driver_name = "safexcel-authenc-hmac-sha256-cbc-des",
2164 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2165 .cra_flags = CRYPTO_ALG_ASYNC |
2166 CRYPTO_ALG_ALLOCATES_MEMORY |
2167 CRYPTO_ALG_KERN_DRIVER_ONLY,
2168 .cra_blocksize = DES_BLOCK_SIZE,
2169 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2170 .cra_alignmask = 0,
2171 .cra_init = safexcel_aead_sha256_des_cra_init,
2172 .cra_exit = safexcel_aead_cra_exit,
2173 .cra_module = THIS_MODULE,
2174 },
2175 },
2176};
2177
2178static int safexcel_aead_sha224_des_cra_init(struct crypto_tfm *tfm)
2179{
2180 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2181
2182 safexcel_aead_sha224_cra_init(tfm);
2183 ctx->alg = SAFEXCEL_DES; /* override default */
2184 ctx->blocksz = DES_BLOCK_SIZE;
2185 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2186 return 0;
2187}
2188
2189struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_cbc_des = {
2190 .type = SAFEXCEL_ALG_TYPE_AEAD,
2191 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_256,
2192 .alg.aead = {
2193 .setkey = safexcel_aead_setkey,
2194 .encrypt = safexcel_aead_encrypt,
2195 .decrypt = safexcel_aead_decrypt,
2196 .ivsize = DES_BLOCK_SIZE,
2197 .maxauthsize = SHA224_DIGEST_SIZE,
2198 .base = {
2199 .cra_name = "authenc(hmac(sha224),cbc(des))",
2200 .cra_driver_name = "safexcel-authenc-hmac-sha224-cbc-des",
2201 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2202 .cra_flags = CRYPTO_ALG_ASYNC |
2203 CRYPTO_ALG_ALLOCATES_MEMORY |
2204 CRYPTO_ALG_KERN_DRIVER_ONLY,
2205 .cra_blocksize = DES_BLOCK_SIZE,
2206 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2207 .cra_alignmask = 0,
2208 .cra_init = safexcel_aead_sha224_des_cra_init,
2209 .cra_exit = safexcel_aead_cra_exit,
2210 .cra_module = THIS_MODULE,
2211 },
2212 },
2213};
2214
2215static int safexcel_aead_sha512_des_cra_init(struct crypto_tfm *tfm)
2216{
2217 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2218
2219 safexcel_aead_sha512_cra_init(tfm);
2220 ctx->alg = SAFEXCEL_DES; /* override default */
2221 ctx->blocksz = DES_BLOCK_SIZE;
2222 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2223 return 0;
2224}
2225
2226struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_cbc_des = {
2227 .type = SAFEXCEL_ALG_TYPE_AEAD,
2228 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2229 .alg.aead = {
2230 .setkey = safexcel_aead_setkey,
2231 .encrypt = safexcel_aead_encrypt,
2232 .decrypt = safexcel_aead_decrypt,
2233 .ivsize = DES_BLOCK_SIZE,
2234 .maxauthsize = SHA512_DIGEST_SIZE,
2235 .base = {
2236 .cra_name = "authenc(hmac(sha512),cbc(des))",
2237 .cra_driver_name = "safexcel-authenc-hmac-sha512-cbc-des",
2238 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2239 .cra_flags = CRYPTO_ALG_ASYNC |
2240 CRYPTO_ALG_ALLOCATES_MEMORY |
2241 CRYPTO_ALG_KERN_DRIVER_ONLY,
2242 .cra_blocksize = DES_BLOCK_SIZE,
2243 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2244 .cra_alignmask = 0,
2245 .cra_init = safexcel_aead_sha512_des_cra_init,
2246 .cra_exit = safexcel_aead_cra_exit,
2247 .cra_module = THIS_MODULE,
2248 },
2249 },
2250};
2251
2252static int safexcel_aead_sha384_des_cra_init(struct crypto_tfm *tfm)
2253{
2254 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2255
2256 safexcel_aead_sha384_cra_init(tfm);
2257 ctx->alg = SAFEXCEL_DES; /* override default */
2258 ctx->blocksz = DES_BLOCK_SIZE;
2259 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
2260 return 0;
2261}
2262
2263struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_cbc_des = {
2264 .type = SAFEXCEL_ALG_TYPE_AEAD,
2265 .algo_mask = SAFEXCEL_ALG_DES | SAFEXCEL_ALG_SHA2_512,
2266 .alg.aead = {
2267 .setkey = safexcel_aead_setkey,
2268 .encrypt = safexcel_aead_encrypt,
2269 .decrypt = safexcel_aead_decrypt,
2270 .ivsize = DES_BLOCK_SIZE,
2271 .maxauthsize = SHA384_DIGEST_SIZE,
2272 .base = {
2273 .cra_name = "authenc(hmac(sha384),cbc(des))",
2274 .cra_driver_name = "safexcel-authenc-hmac-sha384-cbc-des",
2275 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2276 .cra_flags = CRYPTO_ALG_ASYNC |
2277 CRYPTO_ALG_ALLOCATES_MEMORY |
2278 CRYPTO_ALG_KERN_DRIVER_ONLY,
2279 .cra_blocksize = DES_BLOCK_SIZE,
2280 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2281 .cra_alignmask = 0,
2282 .cra_init = safexcel_aead_sha384_des_cra_init,
2283 .cra_exit = safexcel_aead_cra_exit,
2284 .cra_module = THIS_MODULE,
2285 },
2286 },
2287};
2288
2289static int safexcel_aead_sha1_ctr_cra_init(struct crypto_tfm *tfm)
2290{
2291 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2292
2293 safexcel_aead_sha1_cra_init(tfm);
2294 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2295 return 0;
2296}
2297
2298struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_aes = {
2299 .type = SAFEXCEL_ALG_TYPE_AEAD,
2300 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA1,
2301 .alg.aead = {
2302 .setkey = safexcel_aead_setkey,
2303 .encrypt = safexcel_aead_encrypt,
2304 .decrypt = safexcel_aead_decrypt,
2305 .ivsize = CTR_RFC3686_IV_SIZE,
2306 .maxauthsize = SHA1_DIGEST_SIZE,
2307 .base = {
2308 .cra_name = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2309 .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-aes",
2310 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2311 .cra_flags = CRYPTO_ALG_ASYNC |
2312 CRYPTO_ALG_ALLOCATES_MEMORY |
2313 CRYPTO_ALG_KERN_DRIVER_ONLY,
2314 .cra_blocksize = 1,
2315 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2316 .cra_alignmask = 0,
2317 .cra_init = safexcel_aead_sha1_ctr_cra_init,
2318 .cra_exit = safexcel_aead_cra_exit,
2319 .cra_module = THIS_MODULE,
2320 },
2321 },
2322};
2323
2324static int safexcel_aead_sha256_ctr_cra_init(struct crypto_tfm *tfm)
2325{
2326 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2327
2328 safexcel_aead_sha256_cra_init(tfm);
2329 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2330 return 0;
2331}
2332
2333struct safexcel_alg_template safexcel_alg_authenc_hmac_sha256_ctr_aes = {
2334 .type = SAFEXCEL_ALG_TYPE_AEAD,
2335 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2336 .alg.aead = {
2337 .setkey = safexcel_aead_setkey,
2338 .encrypt = safexcel_aead_encrypt,
2339 .decrypt = safexcel_aead_decrypt,
2340 .ivsize = CTR_RFC3686_IV_SIZE,
2341 .maxauthsize = SHA256_DIGEST_SIZE,
2342 .base = {
2343 .cra_name = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2344 .cra_driver_name = "safexcel-authenc-hmac-sha256-ctr-aes",
2345 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2346 .cra_flags = CRYPTO_ALG_ASYNC |
2347 CRYPTO_ALG_ALLOCATES_MEMORY |
2348 CRYPTO_ALG_KERN_DRIVER_ONLY,
2349 .cra_blocksize = 1,
2350 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2351 .cra_alignmask = 0,
2352 .cra_init = safexcel_aead_sha256_ctr_cra_init,
2353 .cra_exit = safexcel_aead_cra_exit,
2354 .cra_module = THIS_MODULE,
2355 },
2356 },
2357};
2358
2359static int safexcel_aead_sha224_ctr_cra_init(struct crypto_tfm *tfm)
2360{
2361 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2362
2363 safexcel_aead_sha224_cra_init(tfm);
2364 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2365 return 0;
2366}
2367
2368struct safexcel_alg_template safexcel_alg_authenc_hmac_sha224_ctr_aes = {
2369 .type = SAFEXCEL_ALG_TYPE_AEAD,
2370 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_256,
2371 .alg.aead = {
2372 .setkey = safexcel_aead_setkey,
2373 .encrypt = safexcel_aead_encrypt,
2374 .decrypt = safexcel_aead_decrypt,
2375 .ivsize = CTR_RFC3686_IV_SIZE,
2376 .maxauthsize = SHA224_DIGEST_SIZE,
2377 .base = {
2378 .cra_name = "authenc(hmac(sha224),rfc3686(ctr(aes)))",
2379 .cra_driver_name = "safexcel-authenc-hmac-sha224-ctr-aes",
2380 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2381 .cra_flags = CRYPTO_ALG_ASYNC |
2382 CRYPTO_ALG_ALLOCATES_MEMORY |
2383 CRYPTO_ALG_KERN_DRIVER_ONLY,
2384 .cra_blocksize = 1,
2385 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2386 .cra_alignmask = 0,
2387 .cra_init = safexcel_aead_sha224_ctr_cra_init,
2388 .cra_exit = safexcel_aead_cra_exit,
2389 .cra_module = THIS_MODULE,
2390 },
2391 },
2392};
2393
2394static int safexcel_aead_sha512_ctr_cra_init(struct crypto_tfm *tfm)
2395{
2396 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2397
2398 safexcel_aead_sha512_cra_init(tfm);
2399 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2400 return 0;
2401}
2402
2403struct safexcel_alg_template safexcel_alg_authenc_hmac_sha512_ctr_aes = {
2404 .type = SAFEXCEL_ALG_TYPE_AEAD,
2405 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2406 .alg.aead = {
2407 .setkey = safexcel_aead_setkey,
2408 .encrypt = safexcel_aead_encrypt,
2409 .decrypt = safexcel_aead_decrypt,
2410 .ivsize = CTR_RFC3686_IV_SIZE,
2411 .maxauthsize = SHA512_DIGEST_SIZE,
2412 .base = {
2413 .cra_name = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2414 .cra_driver_name = "safexcel-authenc-hmac-sha512-ctr-aes",
2415 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2416 .cra_flags = CRYPTO_ALG_ASYNC |
2417 CRYPTO_ALG_ALLOCATES_MEMORY |
2418 CRYPTO_ALG_KERN_DRIVER_ONLY,
2419 .cra_blocksize = 1,
2420 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2421 .cra_alignmask = 0,
2422 .cra_init = safexcel_aead_sha512_ctr_cra_init,
2423 .cra_exit = safexcel_aead_cra_exit,
2424 .cra_module = THIS_MODULE,
2425 },
2426 },
2427};
2428
2429static int safexcel_aead_sha384_ctr_cra_init(struct crypto_tfm *tfm)
2430{
2431 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2432
2433 safexcel_aead_sha384_cra_init(tfm);
2434 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD; /* override default */
2435 return 0;
2436}
2437
2438struct safexcel_alg_template safexcel_alg_authenc_hmac_sha384_ctr_aes = {
2439 .type = SAFEXCEL_ALG_TYPE_AEAD,
2440 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_SHA2_512,
2441 .alg.aead = {
2442 .setkey = safexcel_aead_setkey,
2443 .encrypt = safexcel_aead_encrypt,
2444 .decrypt = safexcel_aead_decrypt,
2445 .ivsize = CTR_RFC3686_IV_SIZE,
2446 .maxauthsize = SHA384_DIGEST_SIZE,
2447 .base = {
2448 .cra_name = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2449 .cra_driver_name = "safexcel-authenc-hmac-sha384-ctr-aes",
2450 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2451 .cra_flags = CRYPTO_ALG_ASYNC |
2452 CRYPTO_ALG_ALLOCATES_MEMORY |
2453 CRYPTO_ALG_KERN_DRIVER_ONLY,
2454 .cra_blocksize = 1,
2455 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2456 .cra_alignmask = 0,
2457 .cra_init = safexcel_aead_sha384_ctr_cra_init,
2458 .cra_exit = safexcel_aead_cra_exit,
2459 .cra_module = THIS_MODULE,
2460 },
2461 },
2462};
2463
2464static int safexcel_skcipher_aesxts_setkey(struct crypto_skcipher *ctfm,
2465 const u8 *key, unsigned int len)
2466{
2467 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
2468 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2469 struct safexcel_crypto_priv *priv = ctx->priv;
2470 struct crypto_aes_ctx aes;
2471 int ret, i;
2472 unsigned int keylen;
2473
2474 /* Check for illegal XTS keys */
2475 ret = xts_verify_key(ctfm, key, len);
2476 if (ret)
2477 return ret;
2478
2479 /* Only half of the key data is cipher key */
2480 keylen = (len >> 1);
2481 ret = aes_expandkey(&aes, key, keylen);
2482 if (ret)
2483 return ret;
2484
2485 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2486 for (i = 0; i < keylen / sizeof(u32); i++) {
2487 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2488 ctx->base.needs_inv = true;
2489 break;
2490 }
2491 }
2492 }
2493
2494 for (i = 0; i < keylen / sizeof(u32); i++)
2495 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2496
2497 /* The other half is the tweak key */
2498 ret = aes_expandkey(&aes, (u8 *)(key + keylen), keylen);
2499 if (ret)
2500 return ret;
2501
2502 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2503 for (i = 0; i < keylen / sizeof(u32); i++) {
2504 if (le32_to_cpu(ctx->key[i + keylen / sizeof(u32)]) !=
2505 aes.key_enc[i]) {
2506 ctx->base.needs_inv = true;
2507 break;
2508 }
2509 }
2510 }
2511
2512 for (i = 0; i < keylen / sizeof(u32); i++)
2513 ctx->key[i + keylen / sizeof(u32)] =
2514 cpu_to_le32(aes.key_enc[i]);
2515
2516 ctx->key_len = keylen << 1;
2517
2518 memzero_explicit(&aes, sizeof(aes));
2519 return 0;
2520}
2521
2522static int safexcel_skcipher_aes_xts_cra_init(struct crypto_tfm *tfm)
2523{
2524 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2525
2526 safexcel_skcipher_cra_init(tfm);
2527 ctx->alg = SAFEXCEL_AES;
2528 ctx->blocksz = AES_BLOCK_SIZE;
2529 ctx->xts = 1;
2530 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XTS;
2531 return 0;
2532}
2533
2534static int safexcel_encrypt_xts(struct skcipher_request *req)
2535{
2536 if (req->cryptlen < XTS_BLOCK_SIZE)
2537 return -EINVAL;
2538 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2539 SAFEXCEL_ENCRYPT);
2540}
2541
2542static int safexcel_decrypt_xts(struct skcipher_request *req)
2543{
2544 if (req->cryptlen < XTS_BLOCK_SIZE)
2545 return -EINVAL;
2546 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
2547 SAFEXCEL_DECRYPT);
2548}
2549
2550struct safexcel_alg_template safexcel_alg_xts_aes = {
2551 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2552 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_AES_XTS,
2553 .alg.skcipher = {
2554 .setkey = safexcel_skcipher_aesxts_setkey,
2555 .encrypt = safexcel_encrypt_xts,
2556 .decrypt = safexcel_decrypt_xts,
2557 /* XTS actually uses 2 AES keys glued together */
2558 .min_keysize = AES_MIN_KEY_SIZE * 2,
2559 .max_keysize = AES_MAX_KEY_SIZE * 2,
2560 .ivsize = XTS_BLOCK_SIZE,
2561 .base = {
2562 .cra_name = "xts(aes)",
2563 .cra_driver_name = "safexcel-xts-aes",
2564 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2565 .cra_flags = CRYPTO_ALG_ASYNC |
2566 CRYPTO_ALG_ALLOCATES_MEMORY |
2567 CRYPTO_ALG_KERN_DRIVER_ONLY,
2568 .cra_blocksize = XTS_BLOCK_SIZE,
2569 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2570 .cra_alignmask = 0,
2571 .cra_init = safexcel_skcipher_aes_xts_cra_init,
2572 .cra_exit = safexcel_skcipher_cra_exit,
2573 .cra_module = THIS_MODULE,
2574 },
2575 },
2576};
2577
2578static int safexcel_aead_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
2579 unsigned int len)
2580{
2581 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2582 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2583 struct safexcel_crypto_priv *priv = ctx->priv;
2584 struct crypto_aes_ctx aes;
2585 u32 hashkey[AES_BLOCK_SIZE >> 2];
2586 int ret, i;
2587
2588 ret = aes_expandkey(&aes, key, len);
2589 if (ret) {
2590 memzero_explicit(&aes, sizeof(aes));
2591 return ret;
2592 }
2593
2594 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2595 for (i = 0; i < len / sizeof(u32); i++) {
2596 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2597 ctx->base.needs_inv = true;
2598 break;
2599 }
2600 }
2601 }
2602
2603 for (i = 0; i < len / sizeof(u32); i++)
2604 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2605
2606 ctx->key_len = len;
2607
2608 /* Compute hash key by encrypting zeroes with cipher key */
2609 crypto_cipher_clear_flags(ctx->hkaes, CRYPTO_TFM_REQ_MASK);
2610 crypto_cipher_set_flags(ctx->hkaes, crypto_aead_get_flags(ctfm) &
2611 CRYPTO_TFM_REQ_MASK);
2612 ret = crypto_cipher_setkey(ctx->hkaes, key, len);
2613 if (ret)
2614 return ret;
2615
2616 memset(hashkey, 0, AES_BLOCK_SIZE);
2617 crypto_cipher_encrypt_one(ctx->hkaes, (u8 *)hashkey, (u8 *)hashkey);
2618
2619 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2620 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
2621 if (be32_to_cpu(ctx->ipad[i]) != hashkey[i]) {
2622 ctx->base.needs_inv = true;
2623 break;
2624 }
2625 }
2626 }
2627
2628 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
2629 ctx->ipad[i] = cpu_to_be32(hashkey[i]);
2630
2631 memzero_explicit(hashkey, AES_BLOCK_SIZE);
2632 memzero_explicit(&aes, sizeof(aes));
2633 return 0;
2634}
2635
2636static int safexcel_aead_gcm_cra_init(struct crypto_tfm *tfm)
2637{
2638 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2639
2640 safexcel_aead_cra_init(tfm);
2641 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_GHASH;
2642 ctx->state_sz = GHASH_BLOCK_SIZE;
2643 ctx->xcm = EIP197_XCM_MODE_GCM;
2644 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2645
2646 ctx->hkaes = crypto_alloc_cipher("aes", 0, 0);
2647 return PTR_ERR_OR_ZERO(ctx->hkaes);
2648}
2649
2650static void safexcel_aead_gcm_cra_exit(struct crypto_tfm *tfm)
2651{
2652 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2653
2654 crypto_free_cipher(ctx->hkaes);
2655 safexcel_aead_cra_exit(tfm);
2656}
2657
2658static int safexcel_aead_gcm_setauthsize(struct crypto_aead *tfm,
2659 unsigned int authsize)
2660{
2661 return crypto_gcm_check_authsize(authsize);
2662}
2663
2664struct safexcel_alg_template safexcel_alg_gcm = {
2665 .type = SAFEXCEL_ALG_TYPE_AEAD,
2666 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
2667 .alg.aead = {
2668 .setkey = safexcel_aead_gcm_setkey,
2669 .setauthsize = safexcel_aead_gcm_setauthsize,
2670 .encrypt = safexcel_aead_encrypt,
2671 .decrypt = safexcel_aead_decrypt,
2672 .ivsize = GCM_AES_IV_SIZE,
2673 .maxauthsize = GHASH_DIGEST_SIZE,
2674 .base = {
2675 .cra_name = "gcm(aes)",
2676 .cra_driver_name = "safexcel-gcm-aes",
2677 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2678 .cra_flags = CRYPTO_ALG_ASYNC |
2679 CRYPTO_ALG_ALLOCATES_MEMORY |
2680 CRYPTO_ALG_KERN_DRIVER_ONLY,
2681 .cra_blocksize = 1,
2682 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2683 .cra_alignmask = 0,
2684 .cra_init = safexcel_aead_gcm_cra_init,
2685 .cra_exit = safexcel_aead_gcm_cra_exit,
2686 .cra_module = THIS_MODULE,
2687 },
2688 },
2689};
2690
2691static int safexcel_aead_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
2692 unsigned int len)
2693{
2694 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
2695 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2696 struct safexcel_crypto_priv *priv = ctx->priv;
2697 struct crypto_aes_ctx aes;
2698 int ret, i;
2699
2700 ret = aes_expandkey(&aes, key, len);
2701 if (ret) {
2702 memzero_explicit(&aes, sizeof(aes));
2703 return ret;
2704 }
2705
2706 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma) {
2707 for (i = 0; i < len / sizeof(u32); i++) {
2708 if (le32_to_cpu(ctx->key[i]) != aes.key_enc[i]) {
2709 ctx->base.needs_inv = true;
2710 break;
2711 }
2712 }
2713 }
2714
2715 for (i = 0; i < len / sizeof(u32); i++) {
2716 ctx->key[i] = cpu_to_le32(aes.key_enc[i]);
2717 ctx->ipad[i + 2 * AES_BLOCK_SIZE / sizeof(u32)] =
2718 cpu_to_be32(aes.key_enc[i]);
2719 }
2720
2721 ctx->key_len = len;
2722 ctx->state_sz = 2 * AES_BLOCK_SIZE + len;
2723
2724 if (len == AES_KEYSIZE_192)
2725 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2726 else if (len == AES_KEYSIZE_256)
2727 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2728 else
2729 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2730
2731 memzero_explicit(&aes, sizeof(aes));
2732 return 0;
2733}
2734
2735static int safexcel_aead_ccm_cra_init(struct crypto_tfm *tfm)
2736{
2737 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2738
2739 safexcel_aead_cra_init(tfm);
2740 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2741 ctx->state_sz = 3 * AES_BLOCK_SIZE;
2742 ctx->xcm = EIP197_XCM_MODE_CCM;
2743 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_XCM; /* override default */
2744 ctx->ctrinit = 0;
2745 return 0;
2746}
2747
2748static int safexcel_aead_ccm_setauthsize(struct crypto_aead *tfm,
2749 unsigned int authsize)
2750{
2751 /* Borrowed from crypto/ccm.c */
2752 switch (authsize) {
2753 case 4:
2754 case 6:
2755 case 8:
2756 case 10:
2757 case 12:
2758 case 14:
2759 case 16:
2760 break;
2761 default:
2762 return -EINVAL;
2763 }
2764
2765 return 0;
2766}
2767
2768static int safexcel_ccm_encrypt(struct aead_request *req)
2769{
2770 struct safexcel_cipher_req *creq = aead_request_ctx(req);
2771
2772 if (req->iv[0] < 1 || req->iv[0] > 7)
2773 return -EINVAL;
2774
2775 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
2776}
2777
2778static int safexcel_ccm_decrypt(struct aead_request *req)
2779{
2780 struct safexcel_cipher_req *creq = aead_request_ctx(req);
2781
2782 if (req->iv[0] < 1 || req->iv[0] > 7)
2783 return -EINVAL;
2784
2785 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
2786}
2787
2788struct safexcel_alg_template safexcel_alg_ccm = {
2789 .type = SAFEXCEL_ALG_TYPE_AEAD,
2790 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
2791 .alg.aead = {
2792 .setkey = safexcel_aead_ccm_setkey,
2793 .setauthsize = safexcel_aead_ccm_setauthsize,
2794 .encrypt = safexcel_ccm_encrypt,
2795 .decrypt = safexcel_ccm_decrypt,
2796 .ivsize = AES_BLOCK_SIZE,
2797 .maxauthsize = AES_BLOCK_SIZE,
2798 .base = {
2799 .cra_name = "ccm(aes)",
2800 .cra_driver_name = "safexcel-ccm-aes",
2801 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2802 .cra_flags = CRYPTO_ALG_ASYNC |
2803 CRYPTO_ALG_ALLOCATES_MEMORY |
2804 CRYPTO_ALG_KERN_DRIVER_ONLY,
2805 .cra_blocksize = 1,
2806 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2807 .cra_alignmask = 0,
2808 .cra_init = safexcel_aead_ccm_cra_init,
2809 .cra_exit = safexcel_aead_cra_exit,
2810 .cra_module = THIS_MODULE,
2811 },
2812 },
2813};
2814
2815static void safexcel_chacha20_setkey(struct safexcel_cipher_ctx *ctx,
2816 const u8 *key)
2817{
2818 struct safexcel_crypto_priv *priv = ctx->priv;
2819
2820 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
2821 if (memcmp(ctx->key, key, CHACHA_KEY_SIZE))
2822 ctx->base.needs_inv = true;
2823
2824 memcpy(ctx->key, key, CHACHA_KEY_SIZE);
2825 ctx->key_len = CHACHA_KEY_SIZE;
2826}
2827
2828static int safexcel_skcipher_chacha20_setkey(struct crypto_skcipher *ctfm,
2829 const u8 *key, unsigned int len)
2830{
2831 struct safexcel_cipher_ctx *ctx = crypto_skcipher_ctx(ctfm);
2832
2833 if (len != CHACHA_KEY_SIZE)
2834 return -EINVAL;
2835
2836 safexcel_chacha20_setkey(ctx, key);
2837
2838 return 0;
2839}
2840
2841static int safexcel_skcipher_chacha20_cra_init(struct crypto_tfm *tfm)
2842{
2843 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2844
2845 safexcel_skcipher_cra_init(tfm);
2846 ctx->alg = SAFEXCEL_CHACHA20;
2847 ctx->ctrinit = 0;
2848 ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32;
2849 return 0;
2850}
2851
2852struct safexcel_alg_template safexcel_alg_chacha20 = {
2853 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
2854 .algo_mask = SAFEXCEL_ALG_CHACHA20,
2855 .alg.skcipher = {
2856 .setkey = safexcel_skcipher_chacha20_setkey,
2857 .encrypt = safexcel_encrypt,
2858 .decrypt = safexcel_decrypt,
2859 .min_keysize = CHACHA_KEY_SIZE,
2860 .max_keysize = CHACHA_KEY_SIZE,
2861 .ivsize = CHACHA_IV_SIZE,
2862 .base = {
2863 .cra_name = "chacha20",
2864 .cra_driver_name = "safexcel-chacha20",
2865 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2866 .cra_flags = CRYPTO_ALG_ASYNC |
2867 CRYPTO_ALG_ALLOCATES_MEMORY |
2868 CRYPTO_ALG_KERN_DRIVER_ONLY,
2869 .cra_blocksize = 1,
2870 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
2871 .cra_alignmask = 0,
2872 .cra_init = safexcel_skcipher_chacha20_cra_init,
2873 .cra_exit = safexcel_skcipher_cra_exit,
2874 .cra_module = THIS_MODULE,
2875 },
2876 },
2877};
2878
2879static int safexcel_aead_chachapoly_setkey(struct crypto_aead *ctfm,
2880 const u8 *key, unsigned int len)
2881{
2882 struct safexcel_cipher_ctx *ctx = crypto_aead_ctx(ctfm);
2883
2884 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP &&
2885 len > EIP197_AEAD_IPSEC_NONCE_SIZE) {
2886 /* ESP variant has nonce appended to key */
2887 len -= EIP197_AEAD_IPSEC_NONCE_SIZE;
2888 ctx->nonce = *(u32 *)(key + len);
2889 }
2890 if (len != CHACHA_KEY_SIZE)
2891 return -EINVAL;
2892
2893 safexcel_chacha20_setkey(ctx, key);
2894
2895 return 0;
2896}
2897
2898static int safexcel_aead_chachapoly_setauthsize(struct crypto_aead *tfm,
2899 unsigned int authsize)
2900{
2901 if (authsize != POLY1305_DIGEST_SIZE)
2902 return -EINVAL;
2903 return 0;
2904}
2905
2906static int safexcel_aead_chachapoly_crypt(struct aead_request *req,
2907 enum safexcel_cipher_direction dir)
2908{
2909 struct safexcel_cipher_req *creq = aead_request_ctx(req);
2910 struct crypto_aead *aead = crypto_aead_reqtfm(req);
2911 struct crypto_tfm *tfm = crypto_aead_tfm(aead);
2912 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2913 struct aead_request *subreq = aead_request_ctx(req);
2914 u32 key[CHACHA_KEY_SIZE / sizeof(u32) + 1];
2915 int ret = 0;
2916
2917 /*
2918 * Instead of wasting time detecting umpteen silly corner cases,
2919 * just dump all "small" requests to the fallback implementation.
2920 * HW would not be faster on such small requests anyway.
2921 */
2922 if (likely((ctx->aead != EIP197_AEAD_TYPE_IPSEC_ESP ||
2923 req->assoclen >= EIP197_AEAD_IPSEC_IV_SIZE) &&
2924 req->cryptlen > POLY1305_DIGEST_SIZE)) {
2925 return safexcel_queue_req(&req->base, creq, dir);
2926 }
2927
2928 /* HW cannot do full (AAD+payload) zero length, use fallback */
2929 memcpy(key, ctx->key, CHACHA_KEY_SIZE);
2930 if (ctx->aead == EIP197_AEAD_TYPE_IPSEC_ESP) {
2931 /* ESP variant has nonce appended to the key */
2932 key[CHACHA_KEY_SIZE / sizeof(u32)] = ctx->nonce;
2933 ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2934 CHACHA_KEY_SIZE +
2935 EIP197_AEAD_IPSEC_NONCE_SIZE);
2936 } else {
2937 ret = crypto_aead_setkey(ctx->fback, (u8 *)key,
2938 CHACHA_KEY_SIZE);
2939 }
2940 if (ret) {
2941 crypto_aead_clear_flags(aead, CRYPTO_TFM_REQ_MASK);
2942 crypto_aead_set_flags(aead, crypto_aead_get_flags(ctx->fback) &
2943 CRYPTO_TFM_REQ_MASK);
2944 return ret;
2945 }
2946
2947 aead_request_set_tfm(subreq, ctx->fback);
2948 aead_request_set_callback(subreq, req->base.flags, req->base.complete,
2949 req->base.data);
2950 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
2951 req->iv);
2952 aead_request_set_ad(subreq, req->assoclen);
2953
2954 return (dir == SAFEXCEL_ENCRYPT) ?
2955 crypto_aead_encrypt(subreq) :
2956 crypto_aead_decrypt(subreq);
2957}
2958
2959static int safexcel_aead_chachapoly_encrypt(struct aead_request *req)
2960{
2961 return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_ENCRYPT);
2962}
2963
2964static int safexcel_aead_chachapoly_decrypt(struct aead_request *req)
2965{
2966 return safexcel_aead_chachapoly_crypt(req, SAFEXCEL_DECRYPT);
2967}
2968
2969static int safexcel_aead_fallback_cra_init(struct crypto_tfm *tfm)
2970{
2971 struct crypto_aead *aead = __crypto_aead_cast(tfm);
2972 struct aead_alg *alg = crypto_aead_alg(aead);
2973 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2974
2975 safexcel_aead_cra_init(tfm);
2976
2977 /* Allocate fallback implementation */
2978 ctx->fback = crypto_alloc_aead(alg->base.cra_name, 0,
2979 CRYPTO_ALG_ASYNC |
2980 CRYPTO_ALG_NEED_FALLBACK);
2981 if (IS_ERR(ctx->fback))
2982 return PTR_ERR(ctx->fback);
2983
2984 crypto_aead_set_reqsize(aead, max(sizeof(struct safexcel_cipher_req),
2985 sizeof(struct aead_request) +
2986 crypto_aead_reqsize(ctx->fback)));
2987
2988 return 0;
2989}
2990
2991static int safexcel_aead_chachapoly_cra_init(struct crypto_tfm *tfm)
2992{
2993 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
2994
2995 safexcel_aead_fallback_cra_init(tfm);
2996 ctx->alg = SAFEXCEL_CHACHA20;
2997 ctx->mode = CONTEXT_CONTROL_CHACHA20_MODE_256_32 |
2998 CONTEXT_CONTROL_CHACHA20_MODE_CALC_OTK;
2999 ctx->ctrinit = 0;
3000 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_POLY1305;
3001 ctx->state_sz = 0; /* Precomputed by HW */
3002 return 0;
3003}
3004
3005static void safexcel_aead_fallback_cra_exit(struct crypto_tfm *tfm)
3006{
3007 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3008
3009 crypto_free_aead(ctx->fback);
3010 safexcel_aead_cra_exit(tfm);
3011}
3012
3013struct safexcel_alg_template safexcel_alg_chachapoly = {
3014 .type = SAFEXCEL_ALG_TYPE_AEAD,
3015 .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
3016 .alg.aead = {
3017 .setkey = safexcel_aead_chachapoly_setkey,
3018 .setauthsize = safexcel_aead_chachapoly_setauthsize,
3019 .encrypt = safexcel_aead_chachapoly_encrypt,
3020 .decrypt = safexcel_aead_chachapoly_decrypt,
3021 .ivsize = CHACHAPOLY_IV_SIZE,
3022 .maxauthsize = POLY1305_DIGEST_SIZE,
3023 .base = {
3024 .cra_name = "rfc7539(chacha20,poly1305)",
3025 .cra_driver_name = "safexcel-chacha20-poly1305",
3026 /* +1 to put it above HW chacha + SW poly */
3027 .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
3028 .cra_flags = CRYPTO_ALG_ASYNC |
3029 CRYPTO_ALG_ALLOCATES_MEMORY |
3030 CRYPTO_ALG_KERN_DRIVER_ONLY |
3031 CRYPTO_ALG_NEED_FALLBACK,
3032 .cra_blocksize = 1,
3033 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3034 .cra_alignmask = 0,
3035 .cra_init = safexcel_aead_chachapoly_cra_init,
3036 .cra_exit = safexcel_aead_fallback_cra_exit,
3037 .cra_module = THIS_MODULE,
3038 },
3039 },
3040};
3041
3042static int safexcel_aead_chachapolyesp_cra_init(struct crypto_tfm *tfm)
3043{
3044 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3045 int ret;
3046
3047 ret = safexcel_aead_chachapoly_cra_init(tfm);
3048 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
3049 ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3050 return ret;
3051}
3052
3053struct safexcel_alg_template safexcel_alg_chachapoly_esp = {
3054 .type = SAFEXCEL_ALG_TYPE_AEAD,
3055 .algo_mask = SAFEXCEL_ALG_CHACHA20 | SAFEXCEL_ALG_POLY1305,
3056 .alg.aead = {
3057 .setkey = safexcel_aead_chachapoly_setkey,
3058 .setauthsize = safexcel_aead_chachapoly_setauthsize,
3059 .encrypt = safexcel_aead_chachapoly_encrypt,
3060 .decrypt = safexcel_aead_chachapoly_decrypt,
3061 .ivsize = CHACHAPOLY_IV_SIZE - EIP197_AEAD_IPSEC_NONCE_SIZE,
3062 .maxauthsize = POLY1305_DIGEST_SIZE,
3063 .base = {
3064 .cra_name = "rfc7539esp(chacha20,poly1305)",
3065 .cra_driver_name = "safexcel-chacha20-poly1305-esp",
3066 /* +1 to put it above HW chacha + SW poly */
3067 .cra_priority = SAFEXCEL_CRA_PRIORITY + 1,
3068 .cra_flags = CRYPTO_ALG_ASYNC |
3069 CRYPTO_ALG_ALLOCATES_MEMORY |
3070 CRYPTO_ALG_KERN_DRIVER_ONLY |
3071 CRYPTO_ALG_NEED_FALLBACK,
3072 .cra_blocksize = 1,
3073 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3074 .cra_alignmask = 0,
3075 .cra_init = safexcel_aead_chachapolyesp_cra_init,
3076 .cra_exit = safexcel_aead_fallback_cra_exit,
3077 .cra_module = THIS_MODULE,
3078 },
3079 },
3080};
3081
3082static int safexcel_skcipher_sm4_setkey(struct crypto_skcipher *ctfm,
3083 const u8 *key, unsigned int len)
3084{
3085 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3086 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3087 struct safexcel_crypto_priv *priv = ctx->priv;
3088
3089 if (len != SM4_KEY_SIZE)
3090 return -EINVAL;
3091
3092 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr_dma)
3093 if (memcmp(ctx->key, key, SM4_KEY_SIZE))
3094 ctx->base.needs_inv = true;
3095
3096 memcpy(ctx->key, key, SM4_KEY_SIZE);
3097 ctx->key_len = SM4_KEY_SIZE;
3098
3099 return 0;
3100}
3101
3102static int safexcel_sm4_blk_encrypt(struct skcipher_request *req)
3103{
3104 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3105 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3106 return -EINVAL;
3107 else
3108 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3109 SAFEXCEL_ENCRYPT);
3110}
3111
3112static int safexcel_sm4_blk_decrypt(struct skcipher_request *req)
3113{
3114 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3115 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3116 return -EINVAL;
3117 else
3118 return safexcel_queue_req(&req->base, skcipher_request_ctx(req),
3119 SAFEXCEL_DECRYPT);
3120}
3121
3122static int safexcel_skcipher_sm4_ecb_cra_init(struct crypto_tfm *tfm)
3123{
3124 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3125
3126 safexcel_skcipher_cra_init(tfm);
3127 ctx->alg = SAFEXCEL_SM4;
3128 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_ECB;
3129 ctx->blocksz = 0;
3130 ctx->ivmask = EIP197_OPTION_2_TOKEN_IV_CMD;
3131 return 0;
3132}
3133
3134struct safexcel_alg_template safexcel_alg_ecb_sm4 = {
3135 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3136 .algo_mask = SAFEXCEL_ALG_SM4,
3137 .alg.skcipher = {
3138 .setkey = safexcel_skcipher_sm4_setkey,
3139 .encrypt = safexcel_sm4_blk_encrypt,
3140 .decrypt = safexcel_sm4_blk_decrypt,
3141 .min_keysize = SM4_KEY_SIZE,
3142 .max_keysize = SM4_KEY_SIZE,
3143 .base = {
3144 .cra_name = "ecb(sm4)",
3145 .cra_driver_name = "safexcel-ecb-sm4",
3146 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3147 .cra_flags = CRYPTO_ALG_ASYNC |
3148 CRYPTO_ALG_ALLOCATES_MEMORY |
3149 CRYPTO_ALG_KERN_DRIVER_ONLY,
3150 .cra_blocksize = SM4_BLOCK_SIZE,
3151 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3152 .cra_alignmask = 0,
3153 .cra_init = safexcel_skcipher_sm4_ecb_cra_init,
3154 .cra_exit = safexcel_skcipher_cra_exit,
3155 .cra_module = THIS_MODULE,
3156 },
3157 },
3158};
3159
3160static int safexcel_skcipher_sm4_cbc_cra_init(struct crypto_tfm *tfm)
3161{
3162 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3163
3164 safexcel_skcipher_cra_init(tfm);
3165 ctx->alg = SAFEXCEL_SM4;
3166 ctx->blocksz = SM4_BLOCK_SIZE;
3167 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CBC;
3168 return 0;
3169}
3170
3171struct safexcel_alg_template safexcel_alg_cbc_sm4 = {
3172 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3173 .algo_mask = SAFEXCEL_ALG_SM4,
3174 .alg.skcipher = {
3175 .setkey = safexcel_skcipher_sm4_setkey,
3176 .encrypt = safexcel_sm4_blk_encrypt,
3177 .decrypt = safexcel_sm4_blk_decrypt,
3178 .min_keysize = SM4_KEY_SIZE,
3179 .max_keysize = SM4_KEY_SIZE,
3180 .ivsize = SM4_BLOCK_SIZE,
3181 .base = {
3182 .cra_name = "cbc(sm4)",
3183 .cra_driver_name = "safexcel-cbc-sm4",
3184 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3185 .cra_flags = CRYPTO_ALG_ASYNC |
3186 CRYPTO_ALG_ALLOCATES_MEMORY |
3187 CRYPTO_ALG_KERN_DRIVER_ONLY,
3188 .cra_blocksize = SM4_BLOCK_SIZE,
3189 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3190 .cra_alignmask = 0,
3191 .cra_init = safexcel_skcipher_sm4_cbc_cra_init,
3192 .cra_exit = safexcel_skcipher_cra_exit,
3193 .cra_module = THIS_MODULE,
3194 },
3195 },
3196};
3197
3198static int safexcel_skcipher_sm4_ofb_cra_init(struct crypto_tfm *tfm)
3199{
3200 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3201
3202 safexcel_skcipher_cra_init(tfm);
3203 ctx->alg = SAFEXCEL_SM4;
3204 ctx->blocksz = SM4_BLOCK_SIZE;
3205 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_OFB;
3206 return 0;
3207}
3208
3209struct safexcel_alg_template safexcel_alg_ofb_sm4 = {
3210 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3211 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3212 .alg.skcipher = {
3213 .setkey = safexcel_skcipher_sm4_setkey,
3214 .encrypt = safexcel_encrypt,
3215 .decrypt = safexcel_decrypt,
3216 .min_keysize = SM4_KEY_SIZE,
3217 .max_keysize = SM4_KEY_SIZE,
3218 .ivsize = SM4_BLOCK_SIZE,
3219 .base = {
3220 .cra_name = "ofb(sm4)",
3221 .cra_driver_name = "safexcel-ofb-sm4",
3222 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3223 .cra_flags = CRYPTO_ALG_ASYNC |
3224 CRYPTO_ALG_ALLOCATES_MEMORY |
3225 CRYPTO_ALG_KERN_DRIVER_ONLY,
3226 .cra_blocksize = 1,
3227 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3228 .cra_alignmask = 0,
3229 .cra_init = safexcel_skcipher_sm4_ofb_cra_init,
3230 .cra_exit = safexcel_skcipher_cra_exit,
3231 .cra_module = THIS_MODULE,
3232 },
3233 },
3234};
3235
3236static int safexcel_skcipher_sm4_cfb_cra_init(struct crypto_tfm *tfm)
3237{
3238 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3239
3240 safexcel_skcipher_cra_init(tfm);
3241 ctx->alg = SAFEXCEL_SM4;
3242 ctx->blocksz = SM4_BLOCK_SIZE;
3243 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CFB;
3244 return 0;
3245}
3246
3247struct safexcel_alg_template safexcel_alg_cfb_sm4 = {
3248 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3249 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_AES_XFB,
3250 .alg.skcipher = {
3251 .setkey = safexcel_skcipher_sm4_setkey,
3252 .encrypt = safexcel_encrypt,
3253 .decrypt = safexcel_decrypt,
3254 .min_keysize = SM4_KEY_SIZE,
3255 .max_keysize = SM4_KEY_SIZE,
3256 .ivsize = SM4_BLOCK_SIZE,
3257 .base = {
3258 .cra_name = "cfb(sm4)",
3259 .cra_driver_name = "safexcel-cfb-sm4",
3260 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3261 .cra_flags = CRYPTO_ALG_ASYNC |
3262 CRYPTO_ALG_ALLOCATES_MEMORY |
3263 CRYPTO_ALG_KERN_DRIVER_ONLY,
3264 .cra_blocksize = 1,
3265 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3266 .cra_alignmask = 0,
3267 .cra_init = safexcel_skcipher_sm4_cfb_cra_init,
3268 .cra_exit = safexcel_skcipher_cra_exit,
3269 .cra_module = THIS_MODULE,
3270 },
3271 },
3272};
3273
3274static int safexcel_skcipher_sm4ctr_setkey(struct crypto_skcipher *ctfm,
3275 const u8 *key, unsigned int len)
3276{
3277 struct crypto_tfm *tfm = crypto_skcipher_tfm(ctfm);
3278 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3279
3280 /* last 4 bytes of key are the nonce! */
3281 ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3282 /* exclude the nonce here */
3283 len -= CTR_RFC3686_NONCE_SIZE;
3284
3285 return safexcel_skcipher_sm4_setkey(ctfm, key, len);
3286}
3287
3288static int safexcel_skcipher_sm4_ctr_cra_init(struct crypto_tfm *tfm)
3289{
3290 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3291
3292 safexcel_skcipher_cra_init(tfm);
3293 ctx->alg = SAFEXCEL_SM4;
3294 ctx->blocksz = SM4_BLOCK_SIZE;
3295 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3296 return 0;
3297}
3298
3299struct safexcel_alg_template safexcel_alg_ctr_sm4 = {
3300 .type = SAFEXCEL_ALG_TYPE_SKCIPHER,
3301 .algo_mask = SAFEXCEL_ALG_SM4,
3302 .alg.skcipher = {
3303 .setkey = safexcel_skcipher_sm4ctr_setkey,
3304 .encrypt = safexcel_encrypt,
3305 .decrypt = safexcel_decrypt,
3306 /* Add nonce size */
3307 .min_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3308 .max_keysize = SM4_KEY_SIZE + CTR_RFC3686_NONCE_SIZE,
3309 .ivsize = CTR_RFC3686_IV_SIZE,
3310 .base = {
3311 .cra_name = "rfc3686(ctr(sm4))",
3312 .cra_driver_name = "safexcel-ctr-sm4",
3313 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3314 .cra_flags = CRYPTO_ALG_ASYNC |
3315 CRYPTO_ALG_ALLOCATES_MEMORY |
3316 CRYPTO_ALG_KERN_DRIVER_ONLY,
3317 .cra_blocksize = 1,
3318 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3319 .cra_alignmask = 0,
3320 .cra_init = safexcel_skcipher_sm4_ctr_cra_init,
3321 .cra_exit = safexcel_skcipher_cra_exit,
3322 .cra_module = THIS_MODULE,
3323 },
3324 },
3325};
3326
3327static int safexcel_aead_sm4_blk_encrypt(struct aead_request *req)
3328{
3329 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3330 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3331 return -EINVAL;
3332
3333 return safexcel_queue_req(&req->base, aead_request_ctx(req),
3334 SAFEXCEL_ENCRYPT);
3335}
3336
3337static int safexcel_aead_sm4_blk_decrypt(struct aead_request *req)
3338{
3339 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3340
3341 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3342 if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3343 return -EINVAL;
3344
3345 return safexcel_queue_req(&req->base, aead_request_ctx(req),
3346 SAFEXCEL_DECRYPT);
3347}
3348
3349static int safexcel_aead_sm4cbc_sha1_cra_init(struct crypto_tfm *tfm)
3350{
3351 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3352
3353 safexcel_aead_cra_init(tfm);
3354 ctx->alg = SAFEXCEL_SM4;
3355 ctx->blocksz = SM4_BLOCK_SIZE;
3356 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
3357 ctx->state_sz = SHA1_DIGEST_SIZE;
3358 return 0;
3359}
3360
3361struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_cbc_sm4 = {
3362 .type = SAFEXCEL_ALG_TYPE_AEAD,
3363 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3364 .alg.aead = {
3365 .setkey = safexcel_aead_setkey,
3366 .encrypt = safexcel_aead_sm4_blk_encrypt,
3367 .decrypt = safexcel_aead_sm4_blk_decrypt,
3368 .ivsize = SM4_BLOCK_SIZE,
3369 .maxauthsize = SHA1_DIGEST_SIZE,
3370 .base = {
3371 .cra_name = "authenc(hmac(sha1),cbc(sm4))",
3372 .cra_driver_name = "safexcel-authenc-hmac-sha1-cbc-sm4",
3373 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3374 .cra_flags = CRYPTO_ALG_ASYNC |
3375 CRYPTO_ALG_ALLOCATES_MEMORY |
3376 CRYPTO_ALG_KERN_DRIVER_ONLY,
3377 .cra_blocksize = SM4_BLOCK_SIZE,
3378 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3379 .cra_alignmask = 0,
3380 .cra_init = safexcel_aead_sm4cbc_sha1_cra_init,
3381 .cra_exit = safexcel_aead_cra_exit,
3382 .cra_module = THIS_MODULE,
3383 },
3384 },
3385};
3386
3387static int safexcel_aead_fallback_setkey(struct crypto_aead *ctfm,
3388 const u8 *key, unsigned int len)
3389{
3390 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3391 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3392
3393 /* Keep fallback cipher synchronized */
3394 return crypto_aead_setkey(ctx->fback, (u8 *)key, len) ?:
3395 safexcel_aead_setkey(ctfm, key, len);
3396}
3397
3398static int safexcel_aead_fallback_setauthsize(struct crypto_aead *ctfm,
3399 unsigned int authsize)
3400{
3401 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3402 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3403
3404 /* Keep fallback cipher synchronized */
3405 return crypto_aead_setauthsize(ctx->fback, authsize);
3406}
3407
3408static int safexcel_aead_fallback_crypt(struct aead_request *req,
3409 enum safexcel_cipher_direction dir)
3410{
3411 struct crypto_aead *aead = crypto_aead_reqtfm(req);
3412 struct crypto_tfm *tfm = crypto_aead_tfm(aead);
3413 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3414 struct aead_request *subreq = aead_request_ctx(req);
3415
3416 aead_request_set_tfm(subreq, ctx->fback);
3417 aead_request_set_callback(subreq, req->base.flags, req->base.complete,
3418 req->base.data);
3419 aead_request_set_crypt(subreq, req->src, req->dst, req->cryptlen,
3420 req->iv);
3421 aead_request_set_ad(subreq, req->assoclen);
3422
3423 return (dir == SAFEXCEL_ENCRYPT) ?
3424 crypto_aead_encrypt(subreq) :
3425 crypto_aead_decrypt(subreq);
3426}
3427
3428static int safexcel_aead_sm4cbc_sm3_encrypt(struct aead_request *req)
3429{
3430 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3431
3432 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3433 if (req->cryptlen & (SM4_BLOCK_SIZE - 1))
3434 return -EINVAL;
3435 else if (req->cryptlen || req->assoclen) /* If input length > 0 only */
3436 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3437
3438 /* HW cannot do full (AAD+payload) zero length, use fallback */
3439 return safexcel_aead_fallback_crypt(req, SAFEXCEL_ENCRYPT);
3440}
3441
3442static int safexcel_aead_sm4cbc_sm3_decrypt(struct aead_request *req)
3443{
3444 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3445 struct crypto_aead *tfm = crypto_aead_reqtfm(req);
3446
3447 /* Workaround for HW bug: EIP96 4.3 does not report blocksize error */
3448 if ((req->cryptlen - crypto_aead_authsize(tfm)) & (SM4_BLOCK_SIZE - 1))
3449 return -EINVAL;
3450 else if (req->cryptlen > crypto_aead_authsize(tfm) || req->assoclen)
3451 /* If input length > 0 only */
3452 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3453
3454 /* HW cannot do full (AAD+payload) zero length, use fallback */
3455 return safexcel_aead_fallback_crypt(req, SAFEXCEL_DECRYPT);
3456}
3457
3458static int safexcel_aead_sm4cbc_sm3_cra_init(struct crypto_tfm *tfm)
3459{
3460 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3461
3462 safexcel_aead_fallback_cra_init(tfm);
3463 ctx->alg = SAFEXCEL_SM4;
3464 ctx->blocksz = SM4_BLOCK_SIZE;
3465 ctx->hash_alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
3466 ctx->state_sz = SM3_DIGEST_SIZE;
3467 return 0;
3468}
3469
3470struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_cbc_sm4 = {
3471 .type = SAFEXCEL_ALG_TYPE_AEAD,
3472 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3473 .alg.aead = {
3474 .setkey = safexcel_aead_fallback_setkey,
3475 .setauthsize = safexcel_aead_fallback_setauthsize,
3476 .encrypt = safexcel_aead_sm4cbc_sm3_encrypt,
3477 .decrypt = safexcel_aead_sm4cbc_sm3_decrypt,
3478 .ivsize = SM4_BLOCK_SIZE,
3479 .maxauthsize = SM3_DIGEST_SIZE,
3480 .base = {
3481 .cra_name = "authenc(hmac(sm3),cbc(sm4))",
3482 .cra_driver_name = "safexcel-authenc-hmac-sm3-cbc-sm4",
3483 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3484 .cra_flags = CRYPTO_ALG_ASYNC |
3485 CRYPTO_ALG_ALLOCATES_MEMORY |
3486 CRYPTO_ALG_KERN_DRIVER_ONLY |
3487 CRYPTO_ALG_NEED_FALLBACK,
3488 .cra_blocksize = SM4_BLOCK_SIZE,
3489 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3490 .cra_alignmask = 0,
3491 .cra_init = safexcel_aead_sm4cbc_sm3_cra_init,
3492 .cra_exit = safexcel_aead_fallback_cra_exit,
3493 .cra_module = THIS_MODULE,
3494 },
3495 },
3496};
3497
3498static int safexcel_aead_sm4ctr_sha1_cra_init(struct crypto_tfm *tfm)
3499{
3500 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3501
3502 safexcel_aead_sm4cbc_sha1_cra_init(tfm);
3503 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3504 return 0;
3505}
3506
3507struct safexcel_alg_template safexcel_alg_authenc_hmac_sha1_ctr_sm4 = {
3508 .type = SAFEXCEL_ALG_TYPE_AEAD,
3509 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SHA1,
3510 .alg.aead = {
3511 .setkey = safexcel_aead_setkey,
3512 .encrypt = safexcel_aead_encrypt,
3513 .decrypt = safexcel_aead_decrypt,
3514 .ivsize = CTR_RFC3686_IV_SIZE,
3515 .maxauthsize = SHA1_DIGEST_SIZE,
3516 .base = {
3517 .cra_name = "authenc(hmac(sha1),rfc3686(ctr(sm4)))",
3518 .cra_driver_name = "safexcel-authenc-hmac-sha1-ctr-sm4",
3519 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3520 .cra_flags = CRYPTO_ALG_ASYNC |
3521 CRYPTO_ALG_ALLOCATES_MEMORY |
3522 CRYPTO_ALG_KERN_DRIVER_ONLY,
3523 .cra_blocksize = 1,
3524 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3525 .cra_alignmask = 0,
3526 .cra_init = safexcel_aead_sm4ctr_sha1_cra_init,
3527 .cra_exit = safexcel_aead_cra_exit,
3528 .cra_module = THIS_MODULE,
3529 },
3530 },
3531};
3532
3533static int safexcel_aead_sm4ctr_sm3_cra_init(struct crypto_tfm *tfm)
3534{
3535 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3536
3537 safexcel_aead_sm4cbc_sm3_cra_init(tfm);
3538 ctx->mode = CONTEXT_CONTROL_CRYPTO_MODE_CTR_LOAD;
3539 return 0;
3540}
3541
3542struct safexcel_alg_template safexcel_alg_authenc_hmac_sm3_ctr_sm4 = {
3543 .type = SAFEXCEL_ALG_TYPE_AEAD,
3544 .algo_mask = SAFEXCEL_ALG_SM4 | SAFEXCEL_ALG_SM3,
3545 .alg.aead = {
3546 .setkey = safexcel_aead_setkey,
3547 .encrypt = safexcel_aead_encrypt,
3548 .decrypt = safexcel_aead_decrypt,
3549 .ivsize = CTR_RFC3686_IV_SIZE,
3550 .maxauthsize = SM3_DIGEST_SIZE,
3551 .base = {
3552 .cra_name = "authenc(hmac(sm3),rfc3686(ctr(sm4)))",
3553 .cra_driver_name = "safexcel-authenc-hmac-sm3-ctr-sm4",
3554 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3555 .cra_flags = CRYPTO_ALG_ASYNC |
3556 CRYPTO_ALG_ALLOCATES_MEMORY |
3557 CRYPTO_ALG_KERN_DRIVER_ONLY,
3558 .cra_blocksize = 1,
3559 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3560 .cra_alignmask = 0,
3561 .cra_init = safexcel_aead_sm4ctr_sm3_cra_init,
3562 .cra_exit = safexcel_aead_cra_exit,
3563 .cra_module = THIS_MODULE,
3564 },
3565 },
3566};
3567
3568static int safexcel_rfc4106_gcm_setkey(struct crypto_aead *ctfm, const u8 *key,
3569 unsigned int len)
3570{
3571 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3572 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3573
3574 /* last 4 bytes of key are the nonce! */
3575 ctx->nonce = *(u32 *)(key + len - CTR_RFC3686_NONCE_SIZE);
3576
3577 len -= CTR_RFC3686_NONCE_SIZE;
3578 return safexcel_aead_gcm_setkey(ctfm, key, len);
3579}
3580
3581static int safexcel_rfc4106_gcm_setauthsize(struct crypto_aead *tfm,
3582 unsigned int authsize)
3583{
3584 return crypto_rfc4106_check_authsize(authsize);
3585}
3586
3587static int safexcel_rfc4106_encrypt(struct aead_request *req)
3588{
3589 return crypto_ipsec_check_assoclen(req->assoclen) ?:
3590 safexcel_aead_encrypt(req);
3591}
3592
3593static int safexcel_rfc4106_decrypt(struct aead_request *req)
3594{
3595 return crypto_ipsec_check_assoclen(req->assoclen) ?:
3596 safexcel_aead_decrypt(req);
3597}
3598
3599static int safexcel_rfc4106_gcm_cra_init(struct crypto_tfm *tfm)
3600{
3601 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3602 int ret;
3603
3604 ret = safexcel_aead_gcm_cra_init(tfm);
3605 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
3606 ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3607 return ret;
3608}
3609
3610struct safexcel_alg_template safexcel_alg_rfc4106_gcm = {
3611 .type = SAFEXCEL_ALG_TYPE_AEAD,
3612 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3613 .alg.aead = {
3614 .setkey = safexcel_rfc4106_gcm_setkey,
3615 .setauthsize = safexcel_rfc4106_gcm_setauthsize,
3616 .encrypt = safexcel_rfc4106_encrypt,
3617 .decrypt = safexcel_rfc4106_decrypt,
3618 .ivsize = GCM_RFC4106_IV_SIZE,
3619 .maxauthsize = GHASH_DIGEST_SIZE,
3620 .base = {
3621 .cra_name = "rfc4106(gcm(aes))",
3622 .cra_driver_name = "safexcel-rfc4106-gcm-aes",
3623 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3624 .cra_flags = CRYPTO_ALG_ASYNC |
3625 CRYPTO_ALG_ALLOCATES_MEMORY |
3626 CRYPTO_ALG_KERN_DRIVER_ONLY,
3627 .cra_blocksize = 1,
3628 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3629 .cra_alignmask = 0,
3630 .cra_init = safexcel_rfc4106_gcm_cra_init,
3631 .cra_exit = safexcel_aead_gcm_cra_exit,
3632 },
3633 },
3634};
3635
3636static int safexcel_rfc4543_gcm_setauthsize(struct crypto_aead *tfm,
3637 unsigned int authsize)
3638{
3639 if (authsize != GHASH_DIGEST_SIZE)
3640 return -EINVAL;
3641
3642 return 0;
3643}
3644
3645static int safexcel_rfc4543_gcm_cra_init(struct crypto_tfm *tfm)
3646{
3647 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3648 int ret;
3649
3650 ret = safexcel_aead_gcm_cra_init(tfm);
3651 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP_GMAC;
3652 return ret;
3653}
3654
3655struct safexcel_alg_template safexcel_alg_rfc4543_gcm = {
3656 .type = SAFEXCEL_ALG_TYPE_AEAD,
3657 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_GHASH,
3658 .alg.aead = {
3659 .setkey = safexcel_rfc4106_gcm_setkey,
3660 .setauthsize = safexcel_rfc4543_gcm_setauthsize,
3661 .encrypt = safexcel_rfc4106_encrypt,
3662 .decrypt = safexcel_rfc4106_decrypt,
3663 .ivsize = GCM_RFC4543_IV_SIZE,
3664 .maxauthsize = GHASH_DIGEST_SIZE,
3665 .base = {
3666 .cra_name = "rfc4543(gcm(aes))",
3667 .cra_driver_name = "safexcel-rfc4543-gcm-aes",
3668 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3669 .cra_flags = CRYPTO_ALG_ASYNC |
3670 CRYPTO_ALG_ALLOCATES_MEMORY |
3671 CRYPTO_ALG_KERN_DRIVER_ONLY,
3672 .cra_blocksize = 1,
3673 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3674 .cra_alignmask = 0,
3675 .cra_init = safexcel_rfc4543_gcm_cra_init,
3676 .cra_exit = safexcel_aead_gcm_cra_exit,
3677 },
3678 },
3679};
3680
3681static int safexcel_rfc4309_ccm_setkey(struct crypto_aead *ctfm, const u8 *key,
3682 unsigned int len)
3683{
3684 struct crypto_tfm *tfm = crypto_aead_tfm(ctfm);
3685 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3686
3687 /* First byte of the nonce = L = always 3 for RFC4309 (4 byte ctr) */
3688 *(u8 *)&ctx->nonce = EIP197_AEAD_IPSEC_COUNTER_SIZE - 1;
3689 /* last 3 bytes of key are the nonce! */
3690 memcpy((u8 *)&ctx->nonce + 1, key + len -
3691 EIP197_AEAD_IPSEC_CCM_NONCE_SIZE,
3692 EIP197_AEAD_IPSEC_CCM_NONCE_SIZE);
3693
3694 len -= EIP197_AEAD_IPSEC_CCM_NONCE_SIZE;
3695 return safexcel_aead_ccm_setkey(ctfm, key, len);
3696}
3697
3698static int safexcel_rfc4309_ccm_setauthsize(struct crypto_aead *tfm,
3699 unsigned int authsize)
3700{
3701 /* Borrowed from crypto/ccm.c */
3702 switch (authsize) {
3703 case 8:
3704 case 12:
3705 case 16:
3706 break;
3707 default:
3708 return -EINVAL;
3709 }
3710
3711 return 0;
3712}
3713
3714static int safexcel_rfc4309_ccm_encrypt(struct aead_request *req)
3715{
3716 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3717
3718 /* Borrowed from crypto/ccm.c */
3719 if (req->assoclen != 16 && req->assoclen != 20)
3720 return -EINVAL;
3721
3722 return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT);
3723}
3724
3725static int safexcel_rfc4309_ccm_decrypt(struct aead_request *req)
3726{
3727 struct safexcel_cipher_req *creq = aead_request_ctx(req);
3728
3729 /* Borrowed from crypto/ccm.c */
3730 if (req->assoclen != 16 && req->assoclen != 20)
3731 return -EINVAL;
3732
3733 return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT);
3734}
3735
3736static int safexcel_rfc4309_ccm_cra_init(struct crypto_tfm *tfm)
3737{
3738 struct safexcel_cipher_ctx *ctx = crypto_tfm_ctx(tfm);
3739 int ret;
3740
3741 ret = safexcel_aead_ccm_cra_init(tfm);
3742 ctx->aead = EIP197_AEAD_TYPE_IPSEC_ESP;
3743 ctx->aadskip = EIP197_AEAD_IPSEC_IV_SIZE;
3744 return ret;
3745}
3746
3747struct safexcel_alg_template safexcel_alg_rfc4309_ccm = {
3748 .type = SAFEXCEL_ALG_TYPE_AEAD,
3749 .algo_mask = SAFEXCEL_ALG_AES | SAFEXCEL_ALG_CBC_MAC_ALL,
3750 .alg.aead = {
3751 .setkey = safexcel_rfc4309_ccm_setkey,
3752 .setauthsize = safexcel_rfc4309_ccm_setauthsize,
3753 .encrypt = safexcel_rfc4309_ccm_encrypt,
3754 .decrypt = safexcel_rfc4309_ccm_decrypt,
3755 .ivsize = EIP197_AEAD_IPSEC_IV_SIZE,
3756 .maxauthsize = AES_BLOCK_SIZE,
3757 .base = {
3758 .cra_name = "rfc4309(ccm(aes))",
3759 .cra_driver_name = "safexcel-rfc4309-ccm-aes",
3760 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3761 .cra_flags = CRYPTO_ALG_ASYNC |
3762 CRYPTO_ALG_ALLOCATES_MEMORY |
3763 CRYPTO_ALG_KERN_DRIVER_ONLY,
3764 .cra_blocksize = 1,
3765 .cra_ctxsize = sizeof(struct safexcel_cipher_ctx),
3766 .cra_alignmask = 0,
3767 .cra_init = safexcel_rfc4309_ccm_cra_init,
3768 .cra_exit = safexcel_aead_cra_exit,
3769 .cra_module = THIS_MODULE,
3770 },
3771 },
3772};