Loading...
1// SPDX-License-Identifier: GPL-2.0
2/*
3 * Copyright (C) 2017 Marvell
4 *
5 * Antoine Tenart <antoine.tenart@free-electrons.com>
6 */
7
8#include <crypto/aes.h>
9#include <crypto/hmac.h>
10#include <crypto/md5.h>
11#include <crypto/sha1.h>
12#include <crypto/sha2.h>
13#include <crypto/sha3.h>
14#include <crypto/skcipher.h>
15#include <crypto/sm3.h>
16#include <crypto/internal/cipher.h>
17#include <linux/device.h>
18#include <linux/dma-mapping.h>
19#include <linux/dmapool.h>
20
21#include "safexcel.h"
22
23struct safexcel_ahash_ctx {
24 struct safexcel_context base;
25
26 u32 alg;
27 u8 key_sz;
28 bool cbcmac;
29 bool do_fallback;
30 bool fb_init_done;
31 bool fb_do_setkey;
32
33 struct crypto_aes_ctx *aes;
34 struct crypto_ahash *fback;
35 struct crypto_shash *shpre;
36 struct shash_desc *shdesc;
37};
38
39struct safexcel_ahash_req {
40 bool last_req;
41 bool finish;
42 bool hmac;
43 bool needs_inv;
44 bool hmac_zlen;
45 bool len_is_le;
46 bool not_first;
47 bool xcbcmac;
48
49 int nents;
50 dma_addr_t result_dma;
51
52 u32 digest;
53
54 u8 state_sz; /* expected state size, only set once */
55 u8 block_sz; /* block size, only set once */
56 u8 digest_sz; /* output digest size, only set once */
57 __le32 state[SHA3_512_BLOCK_SIZE /
58 sizeof(__le32)] __aligned(sizeof(__le32));
59
60 u64 len;
61 u64 processed;
62
63 u8 cache[HASH_CACHE_SIZE] __aligned(sizeof(u32));
64 dma_addr_t cache_dma;
65 unsigned int cache_sz;
66
67 u8 cache_next[HASH_CACHE_SIZE] __aligned(sizeof(u32));
68};
69
70static inline u64 safexcel_queued_len(struct safexcel_ahash_req *req)
71{
72 return req->len - req->processed;
73}
74
75static void safexcel_hash_token(struct safexcel_command_desc *cdesc,
76 u32 input_length, u32 result_length,
77 bool cbcmac)
78{
79 struct safexcel_token *token =
80 (struct safexcel_token *)cdesc->control_data.token;
81
82 token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
83 token[0].packet_length = input_length;
84 token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH;
85
86 input_length &= 15;
87 if (unlikely(cbcmac && input_length)) {
88 token[0].stat = 0;
89 token[1].opcode = EIP197_TOKEN_OPCODE_INSERT;
90 token[1].packet_length = 16 - input_length;
91 token[1].stat = EIP197_TOKEN_STAT_LAST_HASH;
92 token[1].instructions = EIP197_TOKEN_INS_TYPE_HASH;
93 } else {
94 token[0].stat = EIP197_TOKEN_STAT_LAST_HASH;
95 eip197_noop_token(&token[1]);
96 }
97
98 token[2].opcode = EIP197_TOKEN_OPCODE_INSERT;
99 token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
100 EIP197_TOKEN_STAT_LAST_PACKET;
101 token[2].packet_length = result_length;
102 token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
103 EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
104
105 eip197_noop_token(&token[3]);
106}
107
108static void safexcel_context_control(struct safexcel_ahash_ctx *ctx,
109 struct safexcel_ahash_req *req,
110 struct safexcel_command_desc *cdesc)
111{
112 struct safexcel_crypto_priv *priv = ctx->base.priv;
113 u64 count = 0;
114
115 cdesc->control_data.control0 = ctx->alg;
116 cdesc->control_data.control1 = 0;
117
118 /*
119 * Copy the input digest if needed, and setup the context
120 * fields. Do this now as we need it to setup the first command
121 * descriptor.
122 */
123 if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM)) {
124 if (req->xcbcmac)
125 memcpy(ctx->base.ctxr->data, &ctx->base.ipad, ctx->key_sz);
126 else
127 memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
128
129 if (!req->finish && req->xcbcmac)
130 cdesc->control_data.control0 |=
131 CONTEXT_CONTROL_DIGEST_XCM |
132 CONTEXT_CONTROL_TYPE_HASH_OUT |
133 CONTEXT_CONTROL_NO_FINISH_HASH |
134 CONTEXT_CONTROL_SIZE(req->state_sz /
135 sizeof(u32));
136 else
137 cdesc->control_data.control0 |=
138 CONTEXT_CONTROL_DIGEST_XCM |
139 CONTEXT_CONTROL_TYPE_HASH_OUT |
140 CONTEXT_CONTROL_SIZE(req->state_sz /
141 sizeof(u32));
142 return;
143 } else if (!req->processed) {
144 /* First - and possibly only - block of basic hash only */
145 if (req->finish)
146 cdesc->control_data.control0 |= req->digest |
147 CONTEXT_CONTROL_TYPE_HASH_OUT |
148 CONTEXT_CONTROL_RESTART_HASH |
149 /* ensure its not 0! */
150 CONTEXT_CONTROL_SIZE(1);
151 else
152 cdesc->control_data.control0 |= req->digest |
153 CONTEXT_CONTROL_TYPE_HASH_OUT |
154 CONTEXT_CONTROL_RESTART_HASH |
155 CONTEXT_CONTROL_NO_FINISH_HASH |
156 /* ensure its not 0! */
157 CONTEXT_CONTROL_SIZE(1);
158 return;
159 }
160
161 /* Hash continuation or HMAC, setup (inner) digest from state */
162 memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
163
164 if (req->finish) {
165 /* Compute digest count for hash/HMAC finish operations */
166 if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
167 req->hmac_zlen || (req->processed != req->block_sz)) {
168 count = req->processed / EIP197_COUNTER_BLOCK_SIZE;
169
170 /* This is a hardware limitation, as the
171 * counter must fit into an u32. This represents
172 * a fairly big amount of input data, so we
173 * shouldn't see this.
174 */
175 if (unlikely(count & 0xffffffff00000000ULL)) {
176 dev_warn(priv->dev,
177 "Input data is too big\n");
178 return;
179 }
180 }
181
182 if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
183 /* Special case: zero length HMAC */
184 req->hmac_zlen ||
185 /* PE HW < 4.4 cannot do HMAC continue, fake using hash */
186 (req->processed != req->block_sz)) {
187 /* Basic hash continue operation, need digest + cnt */
188 cdesc->control_data.control0 |=
189 CONTEXT_CONTROL_SIZE((req->state_sz >> 2) + 1) |
190 CONTEXT_CONTROL_TYPE_HASH_OUT |
191 CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
192 /* For zero-len HMAC, don't finalize, already padded! */
193 if (req->hmac_zlen)
194 cdesc->control_data.control0 |=
195 CONTEXT_CONTROL_NO_FINISH_HASH;
196 cdesc->control_data.control1 |=
197 CONTEXT_CONTROL_DIGEST_CNT;
198 ctx->base.ctxr->data[req->state_sz >> 2] =
199 cpu_to_le32(count);
200 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
201
202 /* Clear zero-length HMAC flag for next operation! */
203 req->hmac_zlen = false;
204 } else { /* HMAC */
205 /* Need outer digest for HMAC finalization */
206 memcpy(ctx->base.ctxr->data + (req->state_sz >> 2),
207 &ctx->base.opad, req->state_sz);
208
209 /* Single pass HMAC - no digest count */
210 cdesc->control_data.control0 |=
211 CONTEXT_CONTROL_SIZE(req->state_sz >> 1) |
212 CONTEXT_CONTROL_TYPE_HASH_OUT |
213 CONTEXT_CONTROL_DIGEST_HMAC;
214 }
215 } else { /* Hash continuation, do not finish yet */
216 cdesc->control_data.control0 |=
217 CONTEXT_CONTROL_SIZE(req->state_sz >> 2) |
218 CONTEXT_CONTROL_DIGEST_PRECOMPUTED |
219 CONTEXT_CONTROL_TYPE_HASH_OUT |
220 CONTEXT_CONTROL_NO_FINISH_HASH;
221 }
222}
223
224static int safexcel_ahash_enqueue(struct ahash_request *areq);
225
226static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv,
227 int ring,
228 struct crypto_async_request *async,
229 bool *should_complete, int *ret)
230{
231 struct safexcel_result_desc *rdesc;
232 struct ahash_request *areq = ahash_request_cast(async);
233 struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
234 struct safexcel_ahash_req *sreq = ahash_request_ctx_dma(areq);
235 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
236 u64 cache_len;
237
238 *ret = 0;
239
240 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
241 if (IS_ERR(rdesc)) {
242 dev_err(priv->dev,
243 "hash: result: could not retrieve the result descriptor\n");
244 *ret = PTR_ERR(rdesc);
245 } else {
246 *ret = safexcel_rdesc_check_errors(priv, rdesc);
247 }
248
249 safexcel_complete(priv, ring);
250
251 if (sreq->nents) {
252 dma_unmap_sg(priv->dev, areq->src, sreq->nents, DMA_TO_DEVICE);
253 sreq->nents = 0;
254 }
255
256 if (sreq->result_dma) {
257 dma_unmap_single(priv->dev, sreq->result_dma, sreq->digest_sz,
258 DMA_FROM_DEVICE);
259 sreq->result_dma = 0;
260 }
261
262 if (sreq->cache_dma) {
263 dma_unmap_single(priv->dev, sreq->cache_dma, sreq->cache_sz,
264 DMA_TO_DEVICE);
265 sreq->cache_dma = 0;
266 sreq->cache_sz = 0;
267 }
268
269 if (sreq->finish) {
270 if (sreq->hmac &&
271 (sreq->digest != CONTEXT_CONTROL_DIGEST_HMAC)) {
272 /* Faking HMAC using hash - need to do outer hash */
273 memcpy(sreq->cache, sreq->state,
274 crypto_ahash_digestsize(ahash));
275
276 memcpy(sreq->state, &ctx->base.opad, sreq->digest_sz);
277
278 sreq->len = sreq->block_sz +
279 crypto_ahash_digestsize(ahash);
280 sreq->processed = sreq->block_sz;
281 sreq->hmac = 0;
282
283 if (priv->flags & EIP197_TRC_CACHE)
284 ctx->base.needs_inv = true;
285 areq->nbytes = 0;
286 safexcel_ahash_enqueue(areq);
287
288 *should_complete = false; /* Not done yet */
289 return 1;
290 }
291
292 if (unlikely(sreq->digest == CONTEXT_CONTROL_DIGEST_XCM &&
293 ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_CRC32)) {
294 /* Undo final XOR with 0xffffffff ...*/
295 *(__le32 *)areq->result = ~sreq->state[0];
296 } else {
297 memcpy(areq->result, sreq->state,
298 crypto_ahash_digestsize(ahash));
299 }
300 }
301
302 cache_len = safexcel_queued_len(sreq);
303 if (cache_len)
304 memcpy(sreq->cache, sreq->cache_next, cache_len);
305
306 *should_complete = true;
307
308 return 1;
309}
310
311static int safexcel_ahash_send_req(struct crypto_async_request *async, int ring,
312 int *commands, int *results)
313{
314 struct ahash_request *areq = ahash_request_cast(async);
315 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
316 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
317 struct safexcel_crypto_priv *priv = ctx->base.priv;
318 struct safexcel_command_desc *cdesc, *first_cdesc = NULL;
319 struct safexcel_result_desc *rdesc;
320 struct scatterlist *sg;
321 struct safexcel_token *dmmy;
322 int i, extra = 0, n_cdesc = 0, ret = 0, cache_len, skip = 0;
323 u64 queued, len;
324
325 queued = safexcel_queued_len(req);
326 if (queued <= HASH_CACHE_SIZE)
327 cache_len = queued;
328 else
329 cache_len = queued - areq->nbytes;
330
331 if (!req->finish && !req->last_req) {
332 /* If this is not the last request and the queued data does not
333 * fit into full cache blocks, cache it for the next send call.
334 */
335 extra = queued & (HASH_CACHE_SIZE - 1);
336
337 /* If this is not the last request and the queued data
338 * is a multiple of a block, cache the last one for now.
339 */
340 if (!extra)
341 extra = HASH_CACHE_SIZE;
342
343 sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
344 req->cache_next, extra,
345 areq->nbytes - extra);
346
347 queued -= extra;
348
349 if (!queued) {
350 *commands = 0;
351 *results = 0;
352 return 0;
353 }
354
355 extra = 0;
356 }
357
358 if (unlikely(req->xcbcmac && req->processed > AES_BLOCK_SIZE)) {
359 if (unlikely(cache_len < AES_BLOCK_SIZE)) {
360 /*
361 * Cache contains less than 1 full block, complete.
362 */
363 extra = AES_BLOCK_SIZE - cache_len;
364 if (queued > cache_len) {
365 /* More data follows: borrow bytes */
366 u64 tmp = queued - cache_len;
367
368 skip = min_t(u64, tmp, extra);
369 sg_pcopy_to_buffer(areq->src,
370 sg_nents(areq->src),
371 req->cache + cache_len,
372 skip, 0);
373 }
374 extra -= skip;
375 memset(req->cache + cache_len + skip, 0, extra);
376 if (!ctx->cbcmac && extra) {
377 // 10- padding for XCBCMAC & CMAC
378 req->cache[cache_len + skip] = 0x80;
379 // HW will use K2 iso K3 - compensate!
380 for (i = 0; i < AES_BLOCK_SIZE / 4; i++) {
381 u32 *cache = (void *)req->cache;
382 u32 *ipad = ctx->base.ipad.word;
383 u32 x;
384
385 x = ipad[i] ^ ipad[i + 4];
386 cache[i] ^= swab32(x);
387 }
388 }
389 cache_len = AES_BLOCK_SIZE;
390 queued = queued + extra;
391 }
392
393 /* XCBC continue: XOR previous result into 1st word */
394 crypto_xor(req->cache, (const u8 *)req->state, AES_BLOCK_SIZE);
395 }
396
397 len = queued;
398 /* Add a command descriptor for the cached data, if any */
399 if (cache_len) {
400 req->cache_dma = dma_map_single(priv->dev, req->cache,
401 cache_len, DMA_TO_DEVICE);
402 if (dma_mapping_error(priv->dev, req->cache_dma))
403 return -EINVAL;
404
405 req->cache_sz = cache_len;
406 first_cdesc = safexcel_add_cdesc(priv, ring, 1,
407 (cache_len == len),
408 req->cache_dma, cache_len,
409 len, ctx->base.ctxr_dma,
410 &dmmy);
411 if (IS_ERR(first_cdesc)) {
412 ret = PTR_ERR(first_cdesc);
413 goto unmap_cache;
414 }
415 n_cdesc++;
416
417 queued -= cache_len;
418 if (!queued)
419 goto send_command;
420 }
421
422 /* Now handle the current ahash request buffer(s) */
423 req->nents = dma_map_sg(priv->dev, areq->src,
424 sg_nents_for_len(areq->src,
425 areq->nbytes),
426 DMA_TO_DEVICE);
427 if (!req->nents) {
428 ret = -ENOMEM;
429 goto cdesc_rollback;
430 }
431
432 for_each_sg(areq->src, sg, req->nents, i) {
433 int sglen = sg_dma_len(sg);
434
435 if (unlikely(sglen <= skip)) {
436 skip -= sglen;
437 continue;
438 }
439
440 /* Do not overflow the request */
441 if ((queued + skip) <= sglen)
442 sglen = queued;
443 else
444 sglen -= skip;
445
446 cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
447 !(queued - sglen),
448 sg_dma_address(sg) + skip, sglen,
449 len, ctx->base.ctxr_dma, &dmmy);
450 if (IS_ERR(cdesc)) {
451 ret = PTR_ERR(cdesc);
452 goto unmap_sg;
453 }
454
455 if (!n_cdesc)
456 first_cdesc = cdesc;
457 n_cdesc++;
458
459 queued -= sglen;
460 if (!queued)
461 break;
462 skip = 0;
463 }
464
465send_command:
466 /* Setup the context options */
467 safexcel_context_control(ctx, req, first_cdesc);
468
469 /* Add the token */
470 safexcel_hash_token(first_cdesc, len, req->digest_sz, ctx->cbcmac);
471
472 req->result_dma = dma_map_single(priv->dev, req->state, req->digest_sz,
473 DMA_FROM_DEVICE);
474 if (dma_mapping_error(priv->dev, req->result_dma)) {
475 ret = -EINVAL;
476 goto unmap_sg;
477 }
478
479 /* Add a result descriptor */
480 rdesc = safexcel_add_rdesc(priv, ring, 1, 1, req->result_dma,
481 req->digest_sz);
482 if (IS_ERR(rdesc)) {
483 ret = PTR_ERR(rdesc);
484 goto unmap_result;
485 }
486
487 safexcel_rdr_req_set(priv, ring, rdesc, &areq->base);
488
489 req->processed += len - extra;
490
491 *commands = n_cdesc;
492 *results = 1;
493 return 0;
494
495unmap_result:
496 dma_unmap_single(priv->dev, req->result_dma, req->digest_sz,
497 DMA_FROM_DEVICE);
498unmap_sg:
499 if (req->nents) {
500 dma_unmap_sg(priv->dev, areq->src, req->nents, DMA_TO_DEVICE);
501 req->nents = 0;
502 }
503cdesc_rollback:
504 for (i = 0; i < n_cdesc; i++)
505 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
506unmap_cache:
507 if (req->cache_dma) {
508 dma_unmap_single(priv->dev, req->cache_dma, req->cache_sz,
509 DMA_TO_DEVICE);
510 req->cache_dma = 0;
511 req->cache_sz = 0;
512 }
513
514 return ret;
515}
516
517static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
518 int ring,
519 struct crypto_async_request *async,
520 bool *should_complete, int *ret)
521{
522 struct safexcel_result_desc *rdesc;
523 struct ahash_request *areq = ahash_request_cast(async);
524 struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
525 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
526 int enq_ret;
527
528 *ret = 0;
529
530 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
531 if (IS_ERR(rdesc)) {
532 dev_err(priv->dev,
533 "hash: invalidate: could not retrieve the result descriptor\n");
534 *ret = PTR_ERR(rdesc);
535 } else {
536 *ret = safexcel_rdesc_check_errors(priv, rdesc);
537 }
538
539 safexcel_complete(priv, ring);
540
541 if (ctx->base.exit_inv) {
542 dma_pool_free(priv->context_pool, ctx->base.ctxr,
543 ctx->base.ctxr_dma);
544
545 *should_complete = true;
546 return 1;
547 }
548
549 ring = safexcel_select_ring(priv);
550 ctx->base.ring = ring;
551
552 spin_lock_bh(&priv->ring[ring].queue_lock);
553 enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, async);
554 spin_unlock_bh(&priv->ring[ring].queue_lock);
555
556 if (enq_ret != -EINPROGRESS)
557 *ret = enq_ret;
558
559 queue_work(priv->ring[ring].workqueue,
560 &priv->ring[ring].work_data.work);
561
562 *should_complete = false;
563
564 return 1;
565}
566
567static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring,
568 struct crypto_async_request *async,
569 bool *should_complete, int *ret)
570{
571 struct ahash_request *areq = ahash_request_cast(async);
572 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
573 int err;
574
575 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && req->needs_inv);
576
577 if (req->needs_inv) {
578 req->needs_inv = false;
579 err = safexcel_handle_inv_result(priv, ring, async,
580 should_complete, ret);
581 } else {
582 err = safexcel_handle_req_result(priv, ring, async,
583 should_complete, ret);
584 }
585
586 return err;
587}
588
589static int safexcel_ahash_send_inv(struct crypto_async_request *async,
590 int ring, int *commands, int *results)
591{
592 struct ahash_request *areq = ahash_request_cast(async);
593 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
594 int ret;
595
596 ret = safexcel_invalidate_cache(async, ctx->base.priv,
597 ctx->base.ctxr_dma, ring);
598 if (unlikely(ret))
599 return ret;
600
601 *commands = 1;
602 *results = 1;
603
604 return 0;
605}
606
607static int safexcel_ahash_send(struct crypto_async_request *async,
608 int ring, int *commands, int *results)
609{
610 struct ahash_request *areq = ahash_request_cast(async);
611 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
612 int ret;
613
614 if (req->needs_inv)
615 ret = safexcel_ahash_send_inv(async, ring, commands, results);
616 else
617 ret = safexcel_ahash_send_req(async, ring, commands, results);
618
619 return ret;
620}
621
622static int safexcel_ahash_exit_inv(struct crypto_tfm *tfm)
623{
624 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
625 struct safexcel_crypto_priv *priv = ctx->base.priv;
626 EIP197_REQUEST_ON_STACK(req, ahash, EIP197_AHASH_REQ_SIZE);
627 struct safexcel_ahash_req *rctx = ahash_request_ctx_dma(req);
628 DECLARE_CRYPTO_WAIT(result);
629 int ring = ctx->base.ring;
630 int err;
631
632 memset(req, 0, EIP197_AHASH_REQ_SIZE);
633
634 /* create invalidation request */
635 init_completion(&result.completion);
636 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
637 crypto_req_done, &result);
638
639 ahash_request_set_tfm(req, __crypto_ahash_cast(tfm));
640 ctx = crypto_tfm_ctx(req->base.tfm);
641 ctx->base.exit_inv = true;
642 rctx->needs_inv = true;
643
644 spin_lock_bh(&priv->ring[ring].queue_lock);
645 crypto_enqueue_request(&priv->ring[ring].queue, &req->base);
646 spin_unlock_bh(&priv->ring[ring].queue_lock);
647
648 queue_work(priv->ring[ring].workqueue,
649 &priv->ring[ring].work_data.work);
650
651 err = crypto_wait_req(-EINPROGRESS, &result);
652
653 if (err) {
654 dev_warn(priv->dev, "hash: completion error (%d)\n", err);
655 return err;
656 }
657
658 return 0;
659}
660
661/* safexcel_ahash_cache: cache data until at least one request can be sent to
662 * the engine, aka. when there is at least 1 block size in the pipe.
663 */
664static int safexcel_ahash_cache(struct ahash_request *areq)
665{
666 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
667 u64 cache_len;
668
669 /* cache_len: everything accepted by the driver but not sent yet,
670 * tot sz handled by update() - last req sz - tot sz handled by send()
671 */
672 cache_len = safexcel_queued_len(req);
673
674 /*
675 * In case there isn't enough bytes to proceed (less than a
676 * block size), cache the data until we have enough.
677 */
678 if (cache_len + areq->nbytes <= HASH_CACHE_SIZE) {
679 sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
680 req->cache + cache_len,
681 areq->nbytes, 0);
682 return 0;
683 }
684
685 /* We couldn't cache all the data */
686 return -E2BIG;
687}
688
689static int safexcel_ahash_enqueue(struct ahash_request *areq)
690{
691 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
692 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
693 struct safexcel_crypto_priv *priv = ctx->base.priv;
694 int ret, ring;
695
696 req->needs_inv = false;
697
698 if (ctx->base.ctxr) {
699 if (priv->flags & EIP197_TRC_CACHE && !ctx->base.needs_inv &&
700 /* invalidate for *any* non-XCBC continuation */
701 ((req->not_first && !req->xcbcmac) ||
702 /* invalidate if (i)digest changed */
703 memcmp(ctx->base.ctxr->data, req->state, req->state_sz) ||
704 /* invalidate for HMAC finish with odigest changed */
705 (req->finish && req->hmac &&
706 memcmp(ctx->base.ctxr->data + (req->state_sz>>2),
707 &ctx->base.opad, req->state_sz))))
708 /*
709 * We're still setting needs_inv here, even though it is
710 * cleared right away, because the needs_inv flag can be
711 * set in other functions and we want to keep the same
712 * logic.
713 */
714 ctx->base.needs_inv = true;
715
716 if (ctx->base.needs_inv) {
717 ctx->base.needs_inv = false;
718 req->needs_inv = true;
719 }
720 } else {
721 ctx->base.ring = safexcel_select_ring(priv);
722 ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
723 EIP197_GFP_FLAGS(areq->base),
724 &ctx->base.ctxr_dma);
725 if (!ctx->base.ctxr)
726 return -ENOMEM;
727 }
728 req->not_first = true;
729
730 ring = ctx->base.ring;
731
732 spin_lock_bh(&priv->ring[ring].queue_lock);
733 ret = crypto_enqueue_request(&priv->ring[ring].queue, &areq->base);
734 spin_unlock_bh(&priv->ring[ring].queue_lock);
735
736 queue_work(priv->ring[ring].workqueue,
737 &priv->ring[ring].work_data.work);
738
739 return ret;
740}
741
742static int safexcel_ahash_update(struct ahash_request *areq)
743{
744 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
745 int ret;
746
747 /* If the request is 0 length, do nothing */
748 if (!areq->nbytes)
749 return 0;
750
751 /* Add request to the cache if it fits */
752 ret = safexcel_ahash_cache(areq);
753
754 /* Update total request length */
755 req->len += areq->nbytes;
756
757 /* If not all data could fit into the cache, go process the excess.
758 * Also go process immediately for an HMAC IV precompute, which
759 * will never be finished at all, but needs to be processed anyway.
760 */
761 if ((ret && !req->finish) || req->last_req)
762 return safexcel_ahash_enqueue(areq);
763
764 return 0;
765}
766
767static int safexcel_ahash_final(struct ahash_request *areq)
768{
769 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
770 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
771
772 req->finish = true;
773
774 if (unlikely(!req->len && !areq->nbytes)) {
775 /*
776 * If we have an overall 0 length *hash* request:
777 * The HW cannot do 0 length hash, so we provide the correct
778 * result directly here.
779 */
780 if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5)
781 memcpy(areq->result, md5_zero_message_hash,
782 MD5_DIGEST_SIZE);
783 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA1)
784 memcpy(areq->result, sha1_zero_message_hash,
785 SHA1_DIGEST_SIZE);
786 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA224)
787 memcpy(areq->result, sha224_zero_message_hash,
788 SHA224_DIGEST_SIZE);
789 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA256)
790 memcpy(areq->result, sha256_zero_message_hash,
791 SHA256_DIGEST_SIZE);
792 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA384)
793 memcpy(areq->result, sha384_zero_message_hash,
794 SHA384_DIGEST_SIZE);
795 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA512)
796 memcpy(areq->result, sha512_zero_message_hash,
797 SHA512_DIGEST_SIZE);
798 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SM3) {
799 memcpy(areq->result,
800 EIP197_SM3_ZEROM_HASH, SM3_DIGEST_SIZE);
801 }
802
803 return 0;
804 } else if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM &&
805 ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5 &&
806 req->len == sizeof(u32) && !areq->nbytes)) {
807 /* Zero length CRC32 */
808 memcpy(areq->result, &ctx->base.ipad, sizeof(u32));
809 return 0;
810 } else if (unlikely(ctx->cbcmac && req->len == AES_BLOCK_SIZE &&
811 !areq->nbytes)) {
812 /* Zero length CBC MAC */
813 memset(areq->result, 0, AES_BLOCK_SIZE);
814 return 0;
815 } else if (unlikely(req->xcbcmac && req->len == AES_BLOCK_SIZE &&
816 !areq->nbytes)) {
817 /* Zero length (X)CBC/CMAC */
818 int i;
819
820 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++) {
821 u32 *result = (void *)areq->result;
822
823 /* K3 */
824 result[i] = swab32(ctx->base.ipad.word[i + 4]);
825 }
826 areq->result[0] ^= 0x80; // 10- padding
827 aes_encrypt(ctx->aes, areq->result, areq->result);
828 return 0;
829 } else if (unlikely(req->hmac &&
830 (req->len == req->block_sz) &&
831 !areq->nbytes)) {
832 /*
833 * If we have an overall 0 length *HMAC* request:
834 * For HMAC, we need to finalize the inner digest
835 * and then perform the outer hash.
836 */
837
838 /* generate pad block in the cache */
839 /* start with a hash block of all zeroes */
840 memset(req->cache, 0, req->block_sz);
841 /* set the first byte to 0x80 to 'append a 1 bit' */
842 req->cache[0] = 0x80;
843 /* add the length in bits in the last 2 bytes */
844 if (req->len_is_le) {
845 /* Little endian length word (e.g. MD5) */
846 req->cache[req->block_sz-8] = (req->block_sz << 3) &
847 255;
848 req->cache[req->block_sz-7] = (req->block_sz >> 5);
849 } else {
850 /* Big endian length word (e.g. any SHA) */
851 req->cache[req->block_sz-2] = (req->block_sz >> 5);
852 req->cache[req->block_sz-1] = (req->block_sz << 3) &
853 255;
854 }
855
856 req->len += req->block_sz; /* plus 1 hash block */
857
858 /* Set special zero-length HMAC flag */
859 req->hmac_zlen = true;
860
861 /* Finalize HMAC */
862 req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
863 } else if (req->hmac) {
864 /* Finalize HMAC */
865 req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
866 }
867
868 return safexcel_ahash_enqueue(areq);
869}
870
871static int safexcel_ahash_finup(struct ahash_request *areq)
872{
873 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
874
875 req->finish = true;
876
877 safexcel_ahash_update(areq);
878 return safexcel_ahash_final(areq);
879}
880
881static int safexcel_ahash_export(struct ahash_request *areq, void *out)
882{
883 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
884 struct safexcel_ahash_export_state *export = out;
885
886 export->len = req->len;
887 export->processed = req->processed;
888
889 export->digest = req->digest;
890
891 memcpy(export->state, req->state, req->state_sz);
892 memcpy(export->cache, req->cache, HASH_CACHE_SIZE);
893
894 return 0;
895}
896
897static int safexcel_ahash_import(struct ahash_request *areq, const void *in)
898{
899 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
900 const struct safexcel_ahash_export_state *export = in;
901 int ret;
902
903 ret = crypto_ahash_init(areq);
904 if (ret)
905 return ret;
906
907 req->len = export->len;
908 req->processed = export->processed;
909
910 req->digest = export->digest;
911
912 memcpy(req->cache, export->cache, HASH_CACHE_SIZE);
913 memcpy(req->state, export->state, req->state_sz);
914
915 return 0;
916}
917
918static int safexcel_ahash_cra_init(struct crypto_tfm *tfm)
919{
920 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
921 struct safexcel_alg_template *tmpl =
922 container_of(__crypto_ahash_alg(tfm->__crt_alg),
923 struct safexcel_alg_template, alg.ahash);
924
925 ctx->base.priv = tmpl->priv;
926 ctx->base.send = safexcel_ahash_send;
927 ctx->base.handle_result = safexcel_handle_result;
928 ctx->fb_do_setkey = false;
929
930 crypto_ahash_set_reqsize_dma(__crypto_ahash_cast(tfm),
931 sizeof(struct safexcel_ahash_req));
932 return 0;
933}
934
935static int safexcel_sha1_init(struct ahash_request *areq)
936{
937 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
938 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
939
940 memset(req, 0, sizeof(*req));
941
942 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
943 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
944 req->state_sz = SHA1_DIGEST_SIZE;
945 req->digest_sz = SHA1_DIGEST_SIZE;
946 req->block_sz = SHA1_BLOCK_SIZE;
947
948 return 0;
949}
950
951static int safexcel_sha1_digest(struct ahash_request *areq)
952{
953 int ret = safexcel_sha1_init(areq);
954
955 if (ret)
956 return ret;
957
958 return safexcel_ahash_finup(areq);
959}
960
961static void safexcel_ahash_cra_exit(struct crypto_tfm *tfm)
962{
963 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
964 struct safexcel_crypto_priv *priv = ctx->base.priv;
965 int ret;
966
967 /* context not allocated, skip invalidation */
968 if (!ctx->base.ctxr)
969 return;
970
971 if (priv->flags & EIP197_TRC_CACHE) {
972 ret = safexcel_ahash_exit_inv(tfm);
973 if (ret)
974 dev_warn(priv->dev, "hash: invalidation error %d\n", ret);
975 } else {
976 dma_pool_free(priv->context_pool, ctx->base.ctxr,
977 ctx->base.ctxr_dma);
978 }
979}
980
981struct safexcel_alg_template safexcel_alg_sha1 = {
982 .type = SAFEXCEL_ALG_TYPE_AHASH,
983 .algo_mask = SAFEXCEL_ALG_SHA1,
984 .alg.ahash = {
985 .init = safexcel_sha1_init,
986 .update = safexcel_ahash_update,
987 .final = safexcel_ahash_final,
988 .finup = safexcel_ahash_finup,
989 .digest = safexcel_sha1_digest,
990 .export = safexcel_ahash_export,
991 .import = safexcel_ahash_import,
992 .halg = {
993 .digestsize = SHA1_DIGEST_SIZE,
994 .statesize = sizeof(struct safexcel_ahash_export_state),
995 .base = {
996 .cra_name = "sha1",
997 .cra_driver_name = "safexcel-sha1",
998 .cra_priority = SAFEXCEL_CRA_PRIORITY,
999 .cra_flags = CRYPTO_ALG_ASYNC |
1000 CRYPTO_ALG_ALLOCATES_MEMORY |
1001 CRYPTO_ALG_KERN_DRIVER_ONLY,
1002 .cra_blocksize = SHA1_BLOCK_SIZE,
1003 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1004 .cra_init = safexcel_ahash_cra_init,
1005 .cra_exit = safexcel_ahash_cra_exit,
1006 .cra_module = THIS_MODULE,
1007 },
1008 },
1009 },
1010};
1011
1012static int safexcel_hmac_sha1_init(struct ahash_request *areq)
1013{
1014 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1015 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1016
1017 memset(req, 0, sizeof(*req));
1018
1019 /* Start from ipad precompute */
1020 memcpy(req->state, &ctx->base.ipad, SHA1_DIGEST_SIZE);
1021 /* Already processed the key^ipad part now! */
1022 req->len = SHA1_BLOCK_SIZE;
1023 req->processed = SHA1_BLOCK_SIZE;
1024
1025 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1026 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1027 req->state_sz = SHA1_DIGEST_SIZE;
1028 req->digest_sz = SHA1_DIGEST_SIZE;
1029 req->block_sz = SHA1_BLOCK_SIZE;
1030 req->hmac = true;
1031
1032 return 0;
1033}
1034
1035static int safexcel_hmac_sha1_digest(struct ahash_request *areq)
1036{
1037 int ret = safexcel_hmac_sha1_init(areq);
1038
1039 if (ret)
1040 return ret;
1041
1042 return safexcel_ahash_finup(areq);
1043}
1044
1045static int safexcel_hmac_init_pad(struct ahash_request *areq,
1046 unsigned int blocksize, const u8 *key,
1047 unsigned int keylen, u8 *ipad, u8 *opad)
1048{
1049 DECLARE_CRYPTO_WAIT(result);
1050 struct scatterlist sg;
1051 int ret, i;
1052 u8 *keydup;
1053
1054 if (keylen <= blocksize) {
1055 memcpy(ipad, key, keylen);
1056 } else {
1057 keydup = kmemdup(key, keylen, GFP_KERNEL);
1058 if (!keydup)
1059 return -ENOMEM;
1060
1061 ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
1062 crypto_req_done, &result);
1063 sg_init_one(&sg, keydup, keylen);
1064 ahash_request_set_crypt(areq, &sg, ipad, keylen);
1065
1066 ret = crypto_ahash_digest(areq);
1067 ret = crypto_wait_req(ret, &result);
1068
1069 /* Avoid leaking */
1070 kfree_sensitive(keydup);
1071
1072 if (ret)
1073 return ret;
1074
1075 keylen = crypto_ahash_digestsize(crypto_ahash_reqtfm(areq));
1076 }
1077
1078 memset(ipad + keylen, 0, blocksize - keylen);
1079 memcpy(opad, ipad, blocksize);
1080
1081 for (i = 0; i < blocksize; i++) {
1082 ipad[i] ^= HMAC_IPAD_VALUE;
1083 opad[i] ^= HMAC_OPAD_VALUE;
1084 }
1085
1086 return 0;
1087}
1088
1089static int safexcel_hmac_init_iv(struct ahash_request *areq,
1090 unsigned int blocksize, u8 *pad, void *state)
1091{
1092 struct safexcel_ahash_req *req;
1093 DECLARE_CRYPTO_WAIT(result);
1094 struct scatterlist sg;
1095 int ret;
1096
1097 ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
1098 crypto_req_done, &result);
1099 sg_init_one(&sg, pad, blocksize);
1100 ahash_request_set_crypt(areq, &sg, pad, blocksize);
1101
1102 ret = crypto_ahash_init(areq);
1103 if (ret)
1104 return ret;
1105
1106 req = ahash_request_ctx_dma(areq);
1107 req->hmac = true;
1108 req->last_req = true;
1109
1110 ret = crypto_ahash_update(areq);
1111 ret = crypto_wait_req(ret, &result);
1112
1113 return ret ?: crypto_ahash_export(areq, state);
1114}
1115
1116static int __safexcel_hmac_setkey(const char *alg, const u8 *key,
1117 unsigned int keylen,
1118 void *istate, void *ostate)
1119{
1120 struct ahash_request *areq;
1121 struct crypto_ahash *tfm;
1122 unsigned int blocksize;
1123 u8 *ipad, *opad;
1124 int ret;
1125
1126 tfm = crypto_alloc_ahash(alg, 0, 0);
1127 if (IS_ERR(tfm))
1128 return PTR_ERR(tfm);
1129
1130 areq = ahash_request_alloc(tfm, GFP_KERNEL);
1131 if (!areq) {
1132 ret = -ENOMEM;
1133 goto free_ahash;
1134 }
1135
1136 crypto_ahash_clear_flags(tfm, ~0);
1137 blocksize = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
1138
1139 ipad = kcalloc(2, blocksize, GFP_KERNEL);
1140 if (!ipad) {
1141 ret = -ENOMEM;
1142 goto free_request;
1143 }
1144
1145 opad = ipad + blocksize;
1146
1147 ret = safexcel_hmac_init_pad(areq, blocksize, key, keylen, ipad, opad);
1148 if (ret)
1149 goto free_ipad;
1150
1151 ret = safexcel_hmac_init_iv(areq, blocksize, ipad, istate);
1152 if (ret)
1153 goto free_ipad;
1154
1155 ret = safexcel_hmac_init_iv(areq, blocksize, opad, ostate);
1156
1157free_ipad:
1158 kfree(ipad);
1159free_request:
1160 ahash_request_free(areq);
1161free_ahash:
1162 crypto_free_ahash(tfm);
1163
1164 return ret;
1165}
1166
1167int safexcel_hmac_setkey(struct safexcel_context *base, const u8 *key,
1168 unsigned int keylen, const char *alg,
1169 unsigned int state_sz)
1170{
1171 struct safexcel_crypto_priv *priv = base->priv;
1172 struct safexcel_ahash_export_state istate, ostate;
1173 int ret;
1174
1175 ret = __safexcel_hmac_setkey(alg, key, keylen, &istate, &ostate);
1176 if (ret)
1177 return ret;
1178
1179 if (priv->flags & EIP197_TRC_CACHE && base->ctxr &&
1180 (memcmp(&base->ipad, istate.state, state_sz) ||
1181 memcmp(&base->opad, ostate.state, state_sz)))
1182 base->needs_inv = true;
1183
1184 memcpy(&base->ipad, &istate.state, state_sz);
1185 memcpy(&base->opad, &ostate.state, state_sz);
1186
1187 return 0;
1188}
1189
1190static int safexcel_hmac_alg_setkey(struct crypto_ahash *tfm, const u8 *key,
1191 unsigned int keylen, const char *alg,
1192 unsigned int state_sz)
1193{
1194 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
1195
1196 return safexcel_hmac_setkey(&ctx->base, key, keylen, alg, state_sz);
1197}
1198
1199static int safexcel_hmac_sha1_setkey(struct crypto_ahash *tfm, const u8 *key,
1200 unsigned int keylen)
1201{
1202 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha1",
1203 SHA1_DIGEST_SIZE);
1204}
1205
1206struct safexcel_alg_template safexcel_alg_hmac_sha1 = {
1207 .type = SAFEXCEL_ALG_TYPE_AHASH,
1208 .algo_mask = SAFEXCEL_ALG_SHA1,
1209 .alg.ahash = {
1210 .init = safexcel_hmac_sha1_init,
1211 .update = safexcel_ahash_update,
1212 .final = safexcel_ahash_final,
1213 .finup = safexcel_ahash_finup,
1214 .digest = safexcel_hmac_sha1_digest,
1215 .setkey = safexcel_hmac_sha1_setkey,
1216 .export = safexcel_ahash_export,
1217 .import = safexcel_ahash_import,
1218 .halg = {
1219 .digestsize = SHA1_DIGEST_SIZE,
1220 .statesize = sizeof(struct safexcel_ahash_export_state),
1221 .base = {
1222 .cra_name = "hmac(sha1)",
1223 .cra_driver_name = "safexcel-hmac-sha1",
1224 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1225 .cra_flags = CRYPTO_ALG_ASYNC |
1226 CRYPTO_ALG_ALLOCATES_MEMORY |
1227 CRYPTO_ALG_KERN_DRIVER_ONLY,
1228 .cra_blocksize = SHA1_BLOCK_SIZE,
1229 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1230 .cra_init = safexcel_ahash_cra_init,
1231 .cra_exit = safexcel_ahash_cra_exit,
1232 .cra_module = THIS_MODULE,
1233 },
1234 },
1235 },
1236};
1237
1238static int safexcel_sha256_init(struct ahash_request *areq)
1239{
1240 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1241 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1242
1243 memset(req, 0, sizeof(*req));
1244
1245 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1246 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1247 req->state_sz = SHA256_DIGEST_SIZE;
1248 req->digest_sz = SHA256_DIGEST_SIZE;
1249 req->block_sz = SHA256_BLOCK_SIZE;
1250
1251 return 0;
1252}
1253
1254static int safexcel_sha256_digest(struct ahash_request *areq)
1255{
1256 int ret = safexcel_sha256_init(areq);
1257
1258 if (ret)
1259 return ret;
1260
1261 return safexcel_ahash_finup(areq);
1262}
1263
1264struct safexcel_alg_template safexcel_alg_sha256 = {
1265 .type = SAFEXCEL_ALG_TYPE_AHASH,
1266 .algo_mask = SAFEXCEL_ALG_SHA2_256,
1267 .alg.ahash = {
1268 .init = safexcel_sha256_init,
1269 .update = safexcel_ahash_update,
1270 .final = safexcel_ahash_final,
1271 .finup = safexcel_ahash_finup,
1272 .digest = safexcel_sha256_digest,
1273 .export = safexcel_ahash_export,
1274 .import = safexcel_ahash_import,
1275 .halg = {
1276 .digestsize = SHA256_DIGEST_SIZE,
1277 .statesize = sizeof(struct safexcel_ahash_export_state),
1278 .base = {
1279 .cra_name = "sha256",
1280 .cra_driver_name = "safexcel-sha256",
1281 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1282 .cra_flags = CRYPTO_ALG_ASYNC |
1283 CRYPTO_ALG_ALLOCATES_MEMORY |
1284 CRYPTO_ALG_KERN_DRIVER_ONLY,
1285 .cra_blocksize = SHA256_BLOCK_SIZE,
1286 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1287 .cra_init = safexcel_ahash_cra_init,
1288 .cra_exit = safexcel_ahash_cra_exit,
1289 .cra_module = THIS_MODULE,
1290 },
1291 },
1292 },
1293};
1294
1295static int safexcel_sha224_init(struct ahash_request *areq)
1296{
1297 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1298 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1299
1300 memset(req, 0, sizeof(*req));
1301
1302 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1303 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1304 req->state_sz = SHA256_DIGEST_SIZE;
1305 req->digest_sz = SHA256_DIGEST_SIZE;
1306 req->block_sz = SHA256_BLOCK_SIZE;
1307
1308 return 0;
1309}
1310
1311static int safexcel_sha224_digest(struct ahash_request *areq)
1312{
1313 int ret = safexcel_sha224_init(areq);
1314
1315 if (ret)
1316 return ret;
1317
1318 return safexcel_ahash_finup(areq);
1319}
1320
1321struct safexcel_alg_template safexcel_alg_sha224 = {
1322 .type = SAFEXCEL_ALG_TYPE_AHASH,
1323 .algo_mask = SAFEXCEL_ALG_SHA2_256,
1324 .alg.ahash = {
1325 .init = safexcel_sha224_init,
1326 .update = safexcel_ahash_update,
1327 .final = safexcel_ahash_final,
1328 .finup = safexcel_ahash_finup,
1329 .digest = safexcel_sha224_digest,
1330 .export = safexcel_ahash_export,
1331 .import = safexcel_ahash_import,
1332 .halg = {
1333 .digestsize = SHA224_DIGEST_SIZE,
1334 .statesize = sizeof(struct safexcel_ahash_export_state),
1335 .base = {
1336 .cra_name = "sha224",
1337 .cra_driver_name = "safexcel-sha224",
1338 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1339 .cra_flags = CRYPTO_ALG_ASYNC |
1340 CRYPTO_ALG_ALLOCATES_MEMORY |
1341 CRYPTO_ALG_KERN_DRIVER_ONLY,
1342 .cra_blocksize = SHA224_BLOCK_SIZE,
1343 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1344 .cra_init = safexcel_ahash_cra_init,
1345 .cra_exit = safexcel_ahash_cra_exit,
1346 .cra_module = THIS_MODULE,
1347 },
1348 },
1349 },
1350};
1351
1352static int safexcel_hmac_sha224_setkey(struct crypto_ahash *tfm, const u8 *key,
1353 unsigned int keylen)
1354{
1355 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha224",
1356 SHA256_DIGEST_SIZE);
1357}
1358
1359static int safexcel_hmac_sha224_init(struct ahash_request *areq)
1360{
1361 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1362 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1363
1364 memset(req, 0, sizeof(*req));
1365
1366 /* Start from ipad precompute */
1367 memcpy(req->state, &ctx->base.ipad, SHA256_DIGEST_SIZE);
1368 /* Already processed the key^ipad part now! */
1369 req->len = SHA256_BLOCK_SIZE;
1370 req->processed = SHA256_BLOCK_SIZE;
1371
1372 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1373 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1374 req->state_sz = SHA256_DIGEST_SIZE;
1375 req->digest_sz = SHA256_DIGEST_SIZE;
1376 req->block_sz = SHA256_BLOCK_SIZE;
1377 req->hmac = true;
1378
1379 return 0;
1380}
1381
1382static int safexcel_hmac_sha224_digest(struct ahash_request *areq)
1383{
1384 int ret = safexcel_hmac_sha224_init(areq);
1385
1386 if (ret)
1387 return ret;
1388
1389 return safexcel_ahash_finup(areq);
1390}
1391
1392struct safexcel_alg_template safexcel_alg_hmac_sha224 = {
1393 .type = SAFEXCEL_ALG_TYPE_AHASH,
1394 .algo_mask = SAFEXCEL_ALG_SHA2_256,
1395 .alg.ahash = {
1396 .init = safexcel_hmac_sha224_init,
1397 .update = safexcel_ahash_update,
1398 .final = safexcel_ahash_final,
1399 .finup = safexcel_ahash_finup,
1400 .digest = safexcel_hmac_sha224_digest,
1401 .setkey = safexcel_hmac_sha224_setkey,
1402 .export = safexcel_ahash_export,
1403 .import = safexcel_ahash_import,
1404 .halg = {
1405 .digestsize = SHA224_DIGEST_SIZE,
1406 .statesize = sizeof(struct safexcel_ahash_export_state),
1407 .base = {
1408 .cra_name = "hmac(sha224)",
1409 .cra_driver_name = "safexcel-hmac-sha224",
1410 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1411 .cra_flags = CRYPTO_ALG_ASYNC |
1412 CRYPTO_ALG_ALLOCATES_MEMORY |
1413 CRYPTO_ALG_KERN_DRIVER_ONLY,
1414 .cra_blocksize = SHA224_BLOCK_SIZE,
1415 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1416 .cra_init = safexcel_ahash_cra_init,
1417 .cra_exit = safexcel_ahash_cra_exit,
1418 .cra_module = THIS_MODULE,
1419 },
1420 },
1421 },
1422};
1423
1424static int safexcel_hmac_sha256_setkey(struct crypto_ahash *tfm, const u8 *key,
1425 unsigned int keylen)
1426{
1427 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha256",
1428 SHA256_DIGEST_SIZE);
1429}
1430
1431static int safexcel_hmac_sha256_init(struct ahash_request *areq)
1432{
1433 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1434 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1435
1436 memset(req, 0, sizeof(*req));
1437
1438 /* Start from ipad precompute */
1439 memcpy(req->state, &ctx->base.ipad, SHA256_DIGEST_SIZE);
1440 /* Already processed the key^ipad part now! */
1441 req->len = SHA256_BLOCK_SIZE;
1442 req->processed = SHA256_BLOCK_SIZE;
1443
1444 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1445 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1446 req->state_sz = SHA256_DIGEST_SIZE;
1447 req->digest_sz = SHA256_DIGEST_SIZE;
1448 req->block_sz = SHA256_BLOCK_SIZE;
1449 req->hmac = true;
1450
1451 return 0;
1452}
1453
1454static int safexcel_hmac_sha256_digest(struct ahash_request *areq)
1455{
1456 int ret = safexcel_hmac_sha256_init(areq);
1457
1458 if (ret)
1459 return ret;
1460
1461 return safexcel_ahash_finup(areq);
1462}
1463
1464struct safexcel_alg_template safexcel_alg_hmac_sha256 = {
1465 .type = SAFEXCEL_ALG_TYPE_AHASH,
1466 .algo_mask = SAFEXCEL_ALG_SHA2_256,
1467 .alg.ahash = {
1468 .init = safexcel_hmac_sha256_init,
1469 .update = safexcel_ahash_update,
1470 .final = safexcel_ahash_final,
1471 .finup = safexcel_ahash_finup,
1472 .digest = safexcel_hmac_sha256_digest,
1473 .setkey = safexcel_hmac_sha256_setkey,
1474 .export = safexcel_ahash_export,
1475 .import = safexcel_ahash_import,
1476 .halg = {
1477 .digestsize = SHA256_DIGEST_SIZE,
1478 .statesize = sizeof(struct safexcel_ahash_export_state),
1479 .base = {
1480 .cra_name = "hmac(sha256)",
1481 .cra_driver_name = "safexcel-hmac-sha256",
1482 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1483 .cra_flags = CRYPTO_ALG_ASYNC |
1484 CRYPTO_ALG_ALLOCATES_MEMORY |
1485 CRYPTO_ALG_KERN_DRIVER_ONLY,
1486 .cra_blocksize = SHA256_BLOCK_SIZE,
1487 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1488 .cra_init = safexcel_ahash_cra_init,
1489 .cra_exit = safexcel_ahash_cra_exit,
1490 .cra_module = THIS_MODULE,
1491 },
1492 },
1493 },
1494};
1495
1496static int safexcel_sha512_init(struct ahash_request *areq)
1497{
1498 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1499 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1500
1501 memset(req, 0, sizeof(*req));
1502
1503 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1504 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1505 req->state_sz = SHA512_DIGEST_SIZE;
1506 req->digest_sz = SHA512_DIGEST_SIZE;
1507 req->block_sz = SHA512_BLOCK_SIZE;
1508
1509 return 0;
1510}
1511
1512static int safexcel_sha512_digest(struct ahash_request *areq)
1513{
1514 int ret = safexcel_sha512_init(areq);
1515
1516 if (ret)
1517 return ret;
1518
1519 return safexcel_ahash_finup(areq);
1520}
1521
1522struct safexcel_alg_template safexcel_alg_sha512 = {
1523 .type = SAFEXCEL_ALG_TYPE_AHASH,
1524 .algo_mask = SAFEXCEL_ALG_SHA2_512,
1525 .alg.ahash = {
1526 .init = safexcel_sha512_init,
1527 .update = safexcel_ahash_update,
1528 .final = safexcel_ahash_final,
1529 .finup = safexcel_ahash_finup,
1530 .digest = safexcel_sha512_digest,
1531 .export = safexcel_ahash_export,
1532 .import = safexcel_ahash_import,
1533 .halg = {
1534 .digestsize = SHA512_DIGEST_SIZE,
1535 .statesize = sizeof(struct safexcel_ahash_export_state),
1536 .base = {
1537 .cra_name = "sha512",
1538 .cra_driver_name = "safexcel-sha512",
1539 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1540 .cra_flags = CRYPTO_ALG_ASYNC |
1541 CRYPTO_ALG_ALLOCATES_MEMORY |
1542 CRYPTO_ALG_KERN_DRIVER_ONLY,
1543 .cra_blocksize = SHA512_BLOCK_SIZE,
1544 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1545 .cra_init = safexcel_ahash_cra_init,
1546 .cra_exit = safexcel_ahash_cra_exit,
1547 .cra_module = THIS_MODULE,
1548 },
1549 },
1550 },
1551};
1552
1553static int safexcel_sha384_init(struct ahash_request *areq)
1554{
1555 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1556 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1557
1558 memset(req, 0, sizeof(*req));
1559
1560 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1561 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1562 req->state_sz = SHA512_DIGEST_SIZE;
1563 req->digest_sz = SHA512_DIGEST_SIZE;
1564 req->block_sz = SHA512_BLOCK_SIZE;
1565
1566 return 0;
1567}
1568
1569static int safexcel_sha384_digest(struct ahash_request *areq)
1570{
1571 int ret = safexcel_sha384_init(areq);
1572
1573 if (ret)
1574 return ret;
1575
1576 return safexcel_ahash_finup(areq);
1577}
1578
1579struct safexcel_alg_template safexcel_alg_sha384 = {
1580 .type = SAFEXCEL_ALG_TYPE_AHASH,
1581 .algo_mask = SAFEXCEL_ALG_SHA2_512,
1582 .alg.ahash = {
1583 .init = safexcel_sha384_init,
1584 .update = safexcel_ahash_update,
1585 .final = safexcel_ahash_final,
1586 .finup = safexcel_ahash_finup,
1587 .digest = safexcel_sha384_digest,
1588 .export = safexcel_ahash_export,
1589 .import = safexcel_ahash_import,
1590 .halg = {
1591 .digestsize = SHA384_DIGEST_SIZE,
1592 .statesize = sizeof(struct safexcel_ahash_export_state),
1593 .base = {
1594 .cra_name = "sha384",
1595 .cra_driver_name = "safexcel-sha384",
1596 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1597 .cra_flags = CRYPTO_ALG_ASYNC |
1598 CRYPTO_ALG_ALLOCATES_MEMORY |
1599 CRYPTO_ALG_KERN_DRIVER_ONLY,
1600 .cra_blocksize = SHA384_BLOCK_SIZE,
1601 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1602 .cra_init = safexcel_ahash_cra_init,
1603 .cra_exit = safexcel_ahash_cra_exit,
1604 .cra_module = THIS_MODULE,
1605 },
1606 },
1607 },
1608};
1609
1610static int safexcel_hmac_sha512_setkey(struct crypto_ahash *tfm, const u8 *key,
1611 unsigned int keylen)
1612{
1613 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha512",
1614 SHA512_DIGEST_SIZE);
1615}
1616
1617static int safexcel_hmac_sha512_init(struct ahash_request *areq)
1618{
1619 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1620 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1621
1622 memset(req, 0, sizeof(*req));
1623
1624 /* Start from ipad precompute */
1625 memcpy(req->state, &ctx->base.ipad, SHA512_DIGEST_SIZE);
1626 /* Already processed the key^ipad part now! */
1627 req->len = SHA512_BLOCK_SIZE;
1628 req->processed = SHA512_BLOCK_SIZE;
1629
1630 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1631 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1632 req->state_sz = SHA512_DIGEST_SIZE;
1633 req->digest_sz = SHA512_DIGEST_SIZE;
1634 req->block_sz = SHA512_BLOCK_SIZE;
1635 req->hmac = true;
1636
1637 return 0;
1638}
1639
1640static int safexcel_hmac_sha512_digest(struct ahash_request *areq)
1641{
1642 int ret = safexcel_hmac_sha512_init(areq);
1643
1644 if (ret)
1645 return ret;
1646
1647 return safexcel_ahash_finup(areq);
1648}
1649
1650struct safexcel_alg_template safexcel_alg_hmac_sha512 = {
1651 .type = SAFEXCEL_ALG_TYPE_AHASH,
1652 .algo_mask = SAFEXCEL_ALG_SHA2_512,
1653 .alg.ahash = {
1654 .init = safexcel_hmac_sha512_init,
1655 .update = safexcel_ahash_update,
1656 .final = safexcel_ahash_final,
1657 .finup = safexcel_ahash_finup,
1658 .digest = safexcel_hmac_sha512_digest,
1659 .setkey = safexcel_hmac_sha512_setkey,
1660 .export = safexcel_ahash_export,
1661 .import = safexcel_ahash_import,
1662 .halg = {
1663 .digestsize = SHA512_DIGEST_SIZE,
1664 .statesize = sizeof(struct safexcel_ahash_export_state),
1665 .base = {
1666 .cra_name = "hmac(sha512)",
1667 .cra_driver_name = "safexcel-hmac-sha512",
1668 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1669 .cra_flags = CRYPTO_ALG_ASYNC |
1670 CRYPTO_ALG_ALLOCATES_MEMORY |
1671 CRYPTO_ALG_KERN_DRIVER_ONLY,
1672 .cra_blocksize = SHA512_BLOCK_SIZE,
1673 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1674 .cra_init = safexcel_ahash_cra_init,
1675 .cra_exit = safexcel_ahash_cra_exit,
1676 .cra_module = THIS_MODULE,
1677 },
1678 },
1679 },
1680};
1681
1682static int safexcel_hmac_sha384_setkey(struct crypto_ahash *tfm, const u8 *key,
1683 unsigned int keylen)
1684{
1685 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha384",
1686 SHA512_DIGEST_SIZE);
1687}
1688
1689static int safexcel_hmac_sha384_init(struct ahash_request *areq)
1690{
1691 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1692 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1693
1694 memset(req, 0, sizeof(*req));
1695
1696 /* Start from ipad precompute */
1697 memcpy(req->state, &ctx->base.ipad, SHA512_DIGEST_SIZE);
1698 /* Already processed the key^ipad part now! */
1699 req->len = SHA512_BLOCK_SIZE;
1700 req->processed = SHA512_BLOCK_SIZE;
1701
1702 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1703 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1704 req->state_sz = SHA512_DIGEST_SIZE;
1705 req->digest_sz = SHA512_DIGEST_SIZE;
1706 req->block_sz = SHA512_BLOCK_SIZE;
1707 req->hmac = true;
1708
1709 return 0;
1710}
1711
1712static int safexcel_hmac_sha384_digest(struct ahash_request *areq)
1713{
1714 int ret = safexcel_hmac_sha384_init(areq);
1715
1716 if (ret)
1717 return ret;
1718
1719 return safexcel_ahash_finup(areq);
1720}
1721
1722struct safexcel_alg_template safexcel_alg_hmac_sha384 = {
1723 .type = SAFEXCEL_ALG_TYPE_AHASH,
1724 .algo_mask = SAFEXCEL_ALG_SHA2_512,
1725 .alg.ahash = {
1726 .init = safexcel_hmac_sha384_init,
1727 .update = safexcel_ahash_update,
1728 .final = safexcel_ahash_final,
1729 .finup = safexcel_ahash_finup,
1730 .digest = safexcel_hmac_sha384_digest,
1731 .setkey = safexcel_hmac_sha384_setkey,
1732 .export = safexcel_ahash_export,
1733 .import = safexcel_ahash_import,
1734 .halg = {
1735 .digestsize = SHA384_DIGEST_SIZE,
1736 .statesize = sizeof(struct safexcel_ahash_export_state),
1737 .base = {
1738 .cra_name = "hmac(sha384)",
1739 .cra_driver_name = "safexcel-hmac-sha384",
1740 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1741 .cra_flags = CRYPTO_ALG_ASYNC |
1742 CRYPTO_ALG_ALLOCATES_MEMORY |
1743 CRYPTO_ALG_KERN_DRIVER_ONLY,
1744 .cra_blocksize = SHA384_BLOCK_SIZE,
1745 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1746 .cra_init = safexcel_ahash_cra_init,
1747 .cra_exit = safexcel_ahash_cra_exit,
1748 .cra_module = THIS_MODULE,
1749 },
1750 },
1751 },
1752};
1753
1754static int safexcel_md5_init(struct ahash_request *areq)
1755{
1756 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1757 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1758
1759 memset(req, 0, sizeof(*req));
1760
1761 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1762 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1763 req->state_sz = MD5_DIGEST_SIZE;
1764 req->digest_sz = MD5_DIGEST_SIZE;
1765 req->block_sz = MD5_HMAC_BLOCK_SIZE;
1766
1767 return 0;
1768}
1769
1770static int safexcel_md5_digest(struct ahash_request *areq)
1771{
1772 int ret = safexcel_md5_init(areq);
1773
1774 if (ret)
1775 return ret;
1776
1777 return safexcel_ahash_finup(areq);
1778}
1779
1780struct safexcel_alg_template safexcel_alg_md5 = {
1781 .type = SAFEXCEL_ALG_TYPE_AHASH,
1782 .algo_mask = SAFEXCEL_ALG_MD5,
1783 .alg.ahash = {
1784 .init = safexcel_md5_init,
1785 .update = safexcel_ahash_update,
1786 .final = safexcel_ahash_final,
1787 .finup = safexcel_ahash_finup,
1788 .digest = safexcel_md5_digest,
1789 .export = safexcel_ahash_export,
1790 .import = safexcel_ahash_import,
1791 .halg = {
1792 .digestsize = MD5_DIGEST_SIZE,
1793 .statesize = sizeof(struct safexcel_ahash_export_state),
1794 .base = {
1795 .cra_name = "md5",
1796 .cra_driver_name = "safexcel-md5",
1797 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1798 .cra_flags = CRYPTO_ALG_ASYNC |
1799 CRYPTO_ALG_ALLOCATES_MEMORY |
1800 CRYPTO_ALG_KERN_DRIVER_ONLY,
1801 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1802 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1803 .cra_init = safexcel_ahash_cra_init,
1804 .cra_exit = safexcel_ahash_cra_exit,
1805 .cra_module = THIS_MODULE,
1806 },
1807 },
1808 },
1809};
1810
1811static int safexcel_hmac_md5_init(struct ahash_request *areq)
1812{
1813 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1814 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1815
1816 memset(req, 0, sizeof(*req));
1817
1818 /* Start from ipad precompute */
1819 memcpy(req->state, &ctx->base.ipad, MD5_DIGEST_SIZE);
1820 /* Already processed the key^ipad part now! */
1821 req->len = MD5_HMAC_BLOCK_SIZE;
1822 req->processed = MD5_HMAC_BLOCK_SIZE;
1823
1824 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1825 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1826 req->state_sz = MD5_DIGEST_SIZE;
1827 req->digest_sz = MD5_DIGEST_SIZE;
1828 req->block_sz = MD5_HMAC_BLOCK_SIZE;
1829 req->len_is_le = true; /* MD5 is little endian! ... */
1830 req->hmac = true;
1831
1832 return 0;
1833}
1834
1835static int safexcel_hmac_md5_setkey(struct crypto_ahash *tfm, const u8 *key,
1836 unsigned int keylen)
1837{
1838 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-md5",
1839 MD5_DIGEST_SIZE);
1840}
1841
1842static int safexcel_hmac_md5_digest(struct ahash_request *areq)
1843{
1844 int ret = safexcel_hmac_md5_init(areq);
1845
1846 if (ret)
1847 return ret;
1848
1849 return safexcel_ahash_finup(areq);
1850}
1851
1852struct safexcel_alg_template safexcel_alg_hmac_md5 = {
1853 .type = SAFEXCEL_ALG_TYPE_AHASH,
1854 .algo_mask = SAFEXCEL_ALG_MD5,
1855 .alg.ahash = {
1856 .init = safexcel_hmac_md5_init,
1857 .update = safexcel_ahash_update,
1858 .final = safexcel_ahash_final,
1859 .finup = safexcel_ahash_finup,
1860 .digest = safexcel_hmac_md5_digest,
1861 .setkey = safexcel_hmac_md5_setkey,
1862 .export = safexcel_ahash_export,
1863 .import = safexcel_ahash_import,
1864 .halg = {
1865 .digestsize = MD5_DIGEST_SIZE,
1866 .statesize = sizeof(struct safexcel_ahash_export_state),
1867 .base = {
1868 .cra_name = "hmac(md5)",
1869 .cra_driver_name = "safexcel-hmac-md5",
1870 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1871 .cra_flags = CRYPTO_ALG_ASYNC |
1872 CRYPTO_ALG_ALLOCATES_MEMORY |
1873 CRYPTO_ALG_KERN_DRIVER_ONLY,
1874 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1875 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1876 .cra_init = safexcel_ahash_cra_init,
1877 .cra_exit = safexcel_ahash_cra_exit,
1878 .cra_module = THIS_MODULE,
1879 },
1880 },
1881 },
1882};
1883
1884static int safexcel_crc32_cra_init(struct crypto_tfm *tfm)
1885{
1886 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
1887 int ret = safexcel_ahash_cra_init(tfm);
1888
1889 /* Default 'key' is all zeroes */
1890 memset(&ctx->base.ipad, 0, sizeof(u32));
1891 return ret;
1892}
1893
1894static int safexcel_crc32_init(struct ahash_request *areq)
1895{
1896 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1897 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1898
1899 memset(req, 0, sizeof(*req));
1900
1901 /* Start from loaded key */
1902 req->state[0] = cpu_to_le32(~ctx->base.ipad.word[0]);
1903 /* Set processed to non-zero to enable invalidation detection */
1904 req->len = sizeof(u32);
1905 req->processed = sizeof(u32);
1906
1907 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_CRC32;
1908 req->digest = CONTEXT_CONTROL_DIGEST_XCM;
1909 req->state_sz = sizeof(u32);
1910 req->digest_sz = sizeof(u32);
1911 req->block_sz = sizeof(u32);
1912
1913 return 0;
1914}
1915
1916static int safexcel_crc32_setkey(struct crypto_ahash *tfm, const u8 *key,
1917 unsigned int keylen)
1918{
1919 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1920
1921 if (keylen != sizeof(u32))
1922 return -EINVAL;
1923
1924 memcpy(&ctx->base.ipad, key, sizeof(u32));
1925 return 0;
1926}
1927
1928static int safexcel_crc32_digest(struct ahash_request *areq)
1929{
1930 return safexcel_crc32_init(areq) ?: safexcel_ahash_finup(areq);
1931}
1932
1933struct safexcel_alg_template safexcel_alg_crc32 = {
1934 .type = SAFEXCEL_ALG_TYPE_AHASH,
1935 .algo_mask = 0,
1936 .alg.ahash = {
1937 .init = safexcel_crc32_init,
1938 .update = safexcel_ahash_update,
1939 .final = safexcel_ahash_final,
1940 .finup = safexcel_ahash_finup,
1941 .digest = safexcel_crc32_digest,
1942 .setkey = safexcel_crc32_setkey,
1943 .export = safexcel_ahash_export,
1944 .import = safexcel_ahash_import,
1945 .halg = {
1946 .digestsize = sizeof(u32),
1947 .statesize = sizeof(struct safexcel_ahash_export_state),
1948 .base = {
1949 .cra_name = "crc32",
1950 .cra_driver_name = "safexcel-crc32",
1951 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1952 .cra_flags = CRYPTO_ALG_OPTIONAL_KEY |
1953 CRYPTO_ALG_ASYNC |
1954 CRYPTO_ALG_ALLOCATES_MEMORY |
1955 CRYPTO_ALG_KERN_DRIVER_ONLY,
1956 .cra_blocksize = 1,
1957 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1958 .cra_init = safexcel_crc32_cra_init,
1959 .cra_exit = safexcel_ahash_cra_exit,
1960 .cra_module = THIS_MODULE,
1961 },
1962 },
1963 },
1964};
1965
1966static int safexcel_cbcmac_init(struct ahash_request *areq)
1967{
1968 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1969 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
1970
1971 memset(req, 0, sizeof(*req));
1972
1973 /* Start from loaded keys */
1974 memcpy(req->state, &ctx->base.ipad, ctx->key_sz);
1975 /* Set processed to non-zero to enable invalidation detection */
1976 req->len = AES_BLOCK_SIZE;
1977 req->processed = AES_BLOCK_SIZE;
1978
1979 req->digest = CONTEXT_CONTROL_DIGEST_XCM;
1980 req->state_sz = ctx->key_sz;
1981 req->digest_sz = AES_BLOCK_SIZE;
1982 req->block_sz = AES_BLOCK_SIZE;
1983 req->xcbcmac = true;
1984
1985 return 0;
1986}
1987
1988static int safexcel_cbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
1989 unsigned int len)
1990{
1991 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1992 struct crypto_aes_ctx aes;
1993 int ret, i;
1994
1995 ret = aes_expandkey(&aes, key, len);
1996 if (ret)
1997 return ret;
1998
1999 memset(&ctx->base.ipad, 0, 2 * AES_BLOCK_SIZE);
2000 for (i = 0; i < len / sizeof(u32); i++)
2001 ctx->base.ipad.be[i + 8] = cpu_to_be32(aes.key_enc[i]);
2002
2003 if (len == AES_KEYSIZE_192) {
2004 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2005 ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2006 } else if (len == AES_KEYSIZE_256) {
2007 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2008 ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2009 } else {
2010 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2011 ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2012 }
2013 ctx->cbcmac = true;
2014
2015 memzero_explicit(&aes, sizeof(aes));
2016 return 0;
2017}
2018
2019static int safexcel_cbcmac_digest(struct ahash_request *areq)
2020{
2021 return safexcel_cbcmac_init(areq) ?: safexcel_ahash_finup(areq);
2022}
2023
2024struct safexcel_alg_template safexcel_alg_cbcmac = {
2025 .type = SAFEXCEL_ALG_TYPE_AHASH,
2026 .algo_mask = 0,
2027 .alg.ahash = {
2028 .init = safexcel_cbcmac_init,
2029 .update = safexcel_ahash_update,
2030 .final = safexcel_ahash_final,
2031 .finup = safexcel_ahash_finup,
2032 .digest = safexcel_cbcmac_digest,
2033 .setkey = safexcel_cbcmac_setkey,
2034 .export = safexcel_ahash_export,
2035 .import = safexcel_ahash_import,
2036 .halg = {
2037 .digestsize = AES_BLOCK_SIZE,
2038 .statesize = sizeof(struct safexcel_ahash_export_state),
2039 .base = {
2040 .cra_name = "cbcmac(aes)",
2041 .cra_driver_name = "safexcel-cbcmac-aes",
2042 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2043 .cra_flags = CRYPTO_ALG_ASYNC |
2044 CRYPTO_ALG_ALLOCATES_MEMORY |
2045 CRYPTO_ALG_KERN_DRIVER_ONLY,
2046 .cra_blocksize = 1,
2047 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2048 .cra_init = safexcel_ahash_cra_init,
2049 .cra_exit = safexcel_ahash_cra_exit,
2050 .cra_module = THIS_MODULE,
2051 },
2052 },
2053 },
2054};
2055
2056static int safexcel_xcbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2057 unsigned int len)
2058{
2059 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2060 u32 key_tmp[3 * AES_BLOCK_SIZE / sizeof(u32)];
2061 int ret, i;
2062
2063 ret = aes_expandkey(ctx->aes, key, len);
2064 if (ret)
2065 return ret;
2066
2067 /* precompute the XCBC key material */
2068 aes_encrypt(ctx->aes, (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
2069 "\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1");
2070 aes_encrypt(ctx->aes, (u8 *)key_tmp,
2071 "\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2");
2072 aes_encrypt(ctx->aes, (u8 *)key_tmp + AES_BLOCK_SIZE,
2073 "\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3");
2074 for (i = 0; i < 3 * AES_BLOCK_SIZE / sizeof(u32); i++)
2075 ctx->base.ipad.word[i] = swab32(key_tmp[i]);
2076
2077 ret = aes_expandkey(ctx->aes,
2078 (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
2079 AES_MIN_KEY_SIZE);
2080 if (ret)
2081 return ret;
2082
2083 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2084 ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2085 ctx->cbcmac = false;
2086
2087 return 0;
2088}
2089
2090static int safexcel_xcbcmac_cra_init(struct crypto_tfm *tfm)
2091{
2092 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2093
2094 safexcel_ahash_cra_init(tfm);
2095 ctx->aes = kmalloc(sizeof(*ctx->aes), GFP_KERNEL);
2096 return ctx->aes == NULL ? -ENOMEM : 0;
2097}
2098
2099static void safexcel_xcbcmac_cra_exit(struct crypto_tfm *tfm)
2100{
2101 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2102
2103 kfree(ctx->aes);
2104 safexcel_ahash_cra_exit(tfm);
2105}
2106
2107struct safexcel_alg_template safexcel_alg_xcbcmac = {
2108 .type = SAFEXCEL_ALG_TYPE_AHASH,
2109 .algo_mask = 0,
2110 .alg.ahash = {
2111 .init = safexcel_cbcmac_init,
2112 .update = safexcel_ahash_update,
2113 .final = safexcel_ahash_final,
2114 .finup = safexcel_ahash_finup,
2115 .digest = safexcel_cbcmac_digest,
2116 .setkey = safexcel_xcbcmac_setkey,
2117 .export = safexcel_ahash_export,
2118 .import = safexcel_ahash_import,
2119 .halg = {
2120 .digestsize = AES_BLOCK_SIZE,
2121 .statesize = sizeof(struct safexcel_ahash_export_state),
2122 .base = {
2123 .cra_name = "xcbc(aes)",
2124 .cra_driver_name = "safexcel-xcbc-aes",
2125 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2126 .cra_flags = CRYPTO_ALG_ASYNC |
2127 CRYPTO_ALG_ALLOCATES_MEMORY |
2128 CRYPTO_ALG_KERN_DRIVER_ONLY,
2129 .cra_blocksize = AES_BLOCK_SIZE,
2130 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2131 .cra_init = safexcel_xcbcmac_cra_init,
2132 .cra_exit = safexcel_xcbcmac_cra_exit,
2133 .cra_module = THIS_MODULE,
2134 },
2135 },
2136 },
2137};
2138
2139static int safexcel_cmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2140 unsigned int len)
2141{
2142 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2143 __be64 consts[4];
2144 u64 _const[2];
2145 u8 msb_mask, gfmask;
2146 int ret, i;
2147
2148 /* precompute the CMAC key material */
2149 ret = aes_expandkey(ctx->aes, key, len);
2150 if (ret)
2151 return ret;
2152
2153 for (i = 0; i < len / sizeof(u32); i++)
2154 ctx->base.ipad.word[i + 8] = swab32(ctx->aes->key_enc[i]);
2155
2156 /* code below borrowed from crypto/cmac.c */
2157 /* encrypt the zero block */
2158 memset(consts, 0, AES_BLOCK_SIZE);
2159 aes_encrypt(ctx->aes, (u8 *)consts, (u8 *)consts);
2160
2161 gfmask = 0x87;
2162 _const[0] = be64_to_cpu(consts[1]);
2163 _const[1] = be64_to_cpu(consts[0]);
2164
2165 /* gf(2^128) multiply zero-ciphertext with u and u^2 */
2166 for (i = 0; i < 4; i += 2) {
2167 msb_mask = ((s64)_const[1] >> 63) & gfmask;
2168 _const[1] = (_const[1] << 1) | (_const[0] >> 63);
2169 _const[0] = (_const[0] << 1) ^ msb_mask;
2170
2171 consts[i + 0] = cpu_to_be64(_const[1]);
2172 consts[i + 1] = cpu_to_be64(_const[0]);
2173 }
2174 /* end of code borrowed from crypto/cmac.c */
2175
2176 for (i = 0; i < 2 * AES_BLOCK_SIZE / sizeof(u32); i++)
2177 ctx->base.ipad.be[i] = cpu_to_be32(((u32 *)consts)[i]);
2178
2179 if (len == AES_KEYSIZE_192) {
2180 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2181 ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2182 } else if (len == AES_KEYSIZE_256) {
2183 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2184 ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2185 } else {
2186 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2187 ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2188 }
2189 ctx->cbcmac = false;
2190
2191 return 0;
2192}
2193
2194struct safexcel_alg_template safexcel_alg_cmac = {
2195 .type = SAFEXCEL_ALG_TYPE_AHASH,
2196 .algo_mask = 0,
2197 .alg.ahash = {
2198 .init = safexcel_cbcmac_init,
2199 .update = safexcel_ahash_update,
2200 .final = safexcel_ahash_final,
2201 .finup = safexcel_ahash_finup,
2202 .digest = safexcel_cbcmac_digest,
2203 .setkey = safexcel_cmac_setkey,
2204 .export = safexcel_ahash_export,
2205 .import = safexcel_ahash_import,
2206 .halg = {
2207 .digestsize = AES_BLOCK_SIZE,
2208 .statesize = sizeof(struct safexcel_ahash_export_state),
2209 .base = {
2210 .cra_name = "cmac(aes)",
2211 .cra_driver_name = "safexcel-cmac-aes",
2212 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2213 .cra_flags = CRYPTO_ALG_ASYNC |
2214 CRYPTO_ALG_ALLOCATES_MEMORY |
2215 CRYPTO_ALG_KERN_DRIVER_ONLY,
2216 .cra_blocksize = AES_BLOCK_SIZE,
2217 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2218 .cra_init = safexcel_xcbcmac_cra_init,
2219 .cra_exit = safexcel_xcbcmac_cra_exit,
2220 .cra_module = THIS_MODULE,
2221 },
2222 },
2223 },
2224};
2225
2226static int safexcel_sm3_init(struct ahash_request *areq)
2227{
2228 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
2229 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2230
2231 memset(req, 0, sizeof(*req));
2232
2233 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
2234 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
2235 req->state_sz = SM3_DIGEST_SIZE;
2236 req->digest_sz = SM3_DIGEST_SIZE;
2237 req->block_sz = SM3_BLOCK_SIZE;
2238
2239 return 0;
2240}
2241
2242static int safexcel_sm3_digest(struct ahash_request *areq)
2243{
2244 int ret = safexcel_sm3_init(areq);
2245
2246 if (ret)
2247 return ret;
2248
2249 return safexcel_ahash_finup(areq);
2250}
2251
2252struct safexcel_alg_template safexcel_alg_sm3 = {
2253 .type = SAFEXCEL_ALG_TYPE_AHASH,
2254 .algo_mask = SAFEXCEL_ALG_SM3,
2255 .alg.ahash = {
2256 .init = safexcel_sm3_init,
2257 .update = safexcel_ahash_update,
2258 .final = safexcel_ahash_final,
2259 .finup = safexcel_ahash_finup,
2260 .digest = safexcel_sm3_digest,
2261 .export = safexcel_ahash_export,
2262 .import = safexcel_ahash_import,
2263 .halg = {
2264 .digestsize = SM3_DIGEST_SIZE,
2265 .statesize = sizeof(struct safexcel_ahash_export_state),
2266 .base = {
2267 .cra_name = "sm3",
2268 .cra_driver_name = "safexcel-sm3",
2269 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2270 .cra_flags = CRYPTO_ALG_ASYNC |
2271 CRYPTO_ALG_ALLOCATES_MEMORY |
2272 CRYPTO_ALG_KERN_DRIVER_ONLY,
2273 .cra_blocksize = SM3_BLOCK_SIZE,
2274 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2275 .cra_init = safexcel_ahash_cra_init,
2276 .cra_exit = safexcel_ahash_cra_exit,
2277 .cra_module = THIS_MODULE,
2278 },
2279 },
2280 },
2281};
2282
2283static int safexcel_hmac_sm3_setkey(struct crypto_ahash *tfm, const u8 *key,
2284 unsigned int keylen)
2285{
2286 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sm3",
2287 SM3_DIGEST_SIZE);
2288}
2289
2290static int safexcel_hmac_sm3_init(struct ahash_request *areq)
2291{
2292 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
2293 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2294
2295 memset(req, 0, sizeof(*req));
2296
2297 /* Start from ipad precompute */
2298 memcpy(req->state, &ctx->base.ipad, SM3_DIGEST_SIZE);
2299 /* Already processed the key^ipad part now! */
2300 req->len = SM3_BLOCK_SIZE;
2301 req->processed = SM3_BLOCK_SIZE;
2302
2303 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
2304 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
2305 req->state_sz = SM3_DIGEST_SIZE;
2306 req->digest_sz = SM3_DIGEST_SIZE;
2307 req->block_sz = SM3_BLOCK_SIZE;
2308 req->hmac = true;
2309
2310 return 0;
2311}
2312
2313static int safexcel_hmac_sm3_digest(struct ahash_request *areq)
2314{
2315 int ret = safexcel_hmac_sm3_init(areq);
2316
2317 if (ret)
2318 return ret;
2319
2320 return safexcel_ahash_finup(areq);
2321}
2322
2323struct safexcel_alg_template safexcel_alg_hmac_sm3 = {
2324 .type = SAFEXCEL_ALG_TYPE_AHASH,
2325 .algo_mask = SAFEXCEL_ALG_SM3,
2326 .alg.ahash = {
2327 .init = safexcel_hmac_sm3_init,
2328 .update = safexcel_ahash_update,
2329 .final = safexcel_ahash_final,
2330 .finup = safexcel_ahash_finup,
2331 .digest = safexcel_hmac_sm3_digest,
2332 .setkey = safexcel_hmac_sm3_setkey,
2333 .export = safexcel_ahash_export,
2334 .import = safexcel_ahash_import,
2335 .halg = {
2336 .digestsize = SM3_DIGEST_SIZE,
2337 .statesize = sizeof(struct safexcel_ahash_export_state),
2338 .base = {
2339 .cra_name = "hmac(sm3)",
2340 .cra_driver_name = "safexcel-hmac-sm3",
2341 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2342 .cra_flags = CRYPTO_ALG_ASYNC |
2343 CRYPTO_ALG_ALLOCATES_MEMORY |
2344 CRYPTO_ALG_KERN_DRIVER_ONLY,
2345 .cra_blocksize = SM3_BLOCK_SIZE,
2346 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2347 .cra_init = safexcel_ahash_cra_init,
2348 .cra_exit = safexcel_ahash_cra_exit,
2349 .cra_module = THIS_MODULE,
2350 },
2351 },
2352 },
2353};
2354
2355static int safexcel_sha3_224_init(struct ahash_request *areq)
2356{
2357 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2358 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2359 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2360
2361 memset(req, 0, sizeof(*req));
2362
2363 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
2364 req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2365 req->state_sz = SHA3_224_DIGEST_SIZE;
2366 req->digest_sz = SHA3_224_DIGEST_SIZE;
2367 req->block_sz = SHA3_224_BLOCK_SIZE;
2368 ctx->do_fallback = false;
2369 ctx->fb_init_done = false;
2370 return 0;
2371}
2372
2373static int safexcel_sha3_fbcheck(struct ahash_request *req)
2374{
2375 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2376 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2377 struct ahash_request *subreq = ahash_request_ctx_dma(req);
2378 int ret = 0;
2379
2380 if (ctx->do_fallback) {
2381 ahash_request_set_tfm(subreq, ctx->fback);
2382 ahash_request_set_callback(subreq, req->base.flags,
2383 req->base.complete, req->base.data);
2384 ahash_request_set_crypt(subreq, req->src, req->result,
2385 req->nbytes);
2386 if (!ctx->fb_init_done) {
2387 if (ctx->fb_do_setkey) {
2388 /* Set fallback cipher HMAC key */
2389 u8 key[SHA3_224_BLOCK_SIZE];
2390
2391 memcpy(key, &ctx->base.ipad,
2392 crypto_ahash_blocksize(ctx->fback) / 2);
2393 memcpy(key +
2394 crypto_ahash_blocksize(ctx->fback) / 2,
2395 &ctx->base.opad,
2396 crypto_ahash_blocksize(ctx->fback) / 2);
2397 ret = crypto_ahash_setkey(ctx->fback, key,
2398 crypto_ahash_blocksize(ctx->fback));
2399 memzero_explicit(key,
2400 crypto_ahash_blocksize(ctx->fback));
2401 ctx->fb_do_setkey = false;
2402 }
2403 ret = ret ?: crypto_ahash_init(subreq);
2404 ctx->fb_init_done = true;
2405 }
2406 }
2407 return ret;
2408}
2409
2410static int safexcel_sha3_update(struct ahash_request *req)
2411{
2412 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2413 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2414 struct ahash_request *subreq = ahash_request_ctx_dma(req);
2415
2416 ctx->do_fallback = true;
2417 return safexcel_sha3_fbcheck(req) ?: crypto_ahash_update(subreq);
2418}
2419
2420static int safexcel_sha3_final(struct ahash_request *req)
2421{
2422 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2423 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2424 struct ahash_request *subreq = ahash_request_ctx_dma(req);
2425
2426 ctx->do_fallback = true;
2427 return safexcel_sha3_fbcheck(req) ?: crypto_ahash_final(subreq);
2428}
2429
2430static int safexcel_sha3_finup(struct ahash_request *req)
2431{
2432 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2433 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2434 struct ahash_request *subreq = ahash_request_ctx_dma(req);
2435
2436 ctx->do_fallback |= !req->nbytes;
2437 if (ctx->do_fallback)
2438 /* Update or ex/import happened or len 0, cannot use the HW */
2439 return safexcel_sha3_fbcheck(req) ?:
2440 crypto_ahash_finup(subreq);
2441 else
2442 return safexcel_ahash_finup(req);
2443}
2444
2445static int safexcel_sha3_digest_fallback(struct ahash_request *req)
2446{
2447 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2448 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2449 struct ahash_request *subreq = ahash_request_ctx_dma(req);
2450
2451 ctx->do_fallback = true;
2452 ctx->fb_init_done = false;
2453 return safexcel_sha3_fbcheck(req) ?: crypto_ahash_finup(subreq);
2454}
2455
2456static int safexcel_sha3_224_digest(struct ahash_request *req)
2457{
2458 if (req->nbytes)
2459 return safexcel_sha3_224_init(req) ?: safexcel_ahash_finup(req);
2460
2461 /* HW cannot do zero length hash, use fallback instead */
2462 return safexcel_sha3_digest_fallback(req);
2463}
2464
2465static int safexcel_sha3_export(struct ahash_request *req, void *out)
2466{
2467 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2468 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2469 struct ahash_request *subreq = ahash_request_ctx_dma(req);
2470
2471 ctx->do_fallback = true;
2472 return safexcel_sha3_fbcheck(req) ?: crypto_ahash_export(subreq, out);
2473}
2474
2475static int safexcel_sha3_import(struct ahash_request *req, const void *in)
2476{
2477 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2478 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2479 struct ahash_request *subreq = ahash_request_ctx_dma(req);
2480
2481 ctx->do_fallback = true;
2482 return safexcel_sha3_fbcheck(req) ?: crypto_ahash_import(subreq, in);
2483 // return safexcel_ahash_import(req, in);
2484}
2485
2486static int safexcel_sha3_cra_init(struct crypto_tfm *tfm)
2487{
2488 struct crypto_ahash *ahash = __crypto_ahash_cast(tfm);
2489 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2490
2491 safexcel_ahash_cra_init(tfm);
2492
2493 /* Allocate fallback implementation */
2494 ctx->fback = crypto_alloc_ahash(crypto_tfm_alg_name(tfm), 0,
2495 CRYPTO_ALG_ASYNC |
2496 CRYPTO_ALG_NEED_FALLBACK);
2497 if (IS_ERR(ctx->fback))
2498 return PTR_ERR(ctx->fback);
2499
2500 /* Update statesize from fallback algorithm! */
2501 crypto_hash_alg_common(ahash)->statesize =
2502 crypto_ahash_statesize(ctx->fback);
2503 crypto_ahash_set_reqsize_dma(
2504 ahash, max(sizeof(struct safexcel_ahash_req),
2505 sizeof(struct ahash_request) +
2506 crypto_ahash_reqsize(ctx->fback)));
2507 return 0;
2508}
2509
2510static void safexcel_sha3_cra_exit(struct crypto_tfm *tfm)
2511{
2512 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2513
2514 crypto_free_ahash(ctx->fback);
2515 safexcel_ahash_cra_exit(tfm);
2516}
2517
2518struct safexcel_alg_template safexcel_alg_sha3_224 = {
2519 .type = SAFEXCEL_ALG_TYPE_AHASH,
2520 .algo_mask = SAFEXCEL_ALG_SHA3,
2521 .alg.ahash = {
2522 .init = safexcel_sha3_224_init,
2523 .update = safexcel_sha3_update,
2524 .final = safexcel_sha3_final,
2525 .finup = safexcel_sha3_finup,
2526 .digest = safexcel_sha3_224_digest,
2527 .export = safexcel_sha3_export,
2528 .import = safexcel_sha3_import,
2529 .halg = {
2530 .digestsize = SHA3_224_DIGEST_SIZE,
2531 .statesize = sizeof(struct safexcel_ahash_export_state),
2532 .base = {
2533 .cra_name = "sha3-224",
2534 .cra_driver_name = "safexcel-sha3-224",
2535 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2536 .cra_flags = CRYPTO_ALG_ASYNC |
2537 CRYPTO_ALG_KERN_DRIVER_ONLY |
2538 CRYPTO_ALG_NEED_FALLBACK,
2539 .cra_blocksize = SHA3_224_BLOCK_SIZE,
2540 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2541 .cra_init = safexcel_sha3_cra_init,
2542 .cra_exit = safexcel_sha3_cra_exit,
2543 .cra_module = THIS_MODULE,
2544 },
2545 },
2546 },
2547};
2548
2549static int safexcel_sha3_256_init(struct ahash_request *areq)
2550{
2551 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2552 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2553 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2554
2555 memset(req, 0, sizeof(*req));
2556
2557 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
2558 req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2559 req->state_sz = SHA3_256_DIGEST_SIZE;
2560 req->digest_sz = SHA3_256_DIGEST_SIZE;
2561 req->block_sz = SHA3_256_BLOCK_SIZE;
2562 ctx->do_fallback = false;
2563 ctx->fb_init_done = false;
2564 return 0;
2565}
2566
2567static int safexcel_sha3_256_digest(struct ahash_request *req)
2568{
2569 if (req->nbytes)
2570 return safexcel_sha3_256_init(req) ?: safexcel_ahash_finup(req);
2571
2572 /* HW cannot do zero length hash, use fallback instead */
2573 return safexcel_sha3_digest_fallback(req);
2574}
2575
2576struct safexcel_alg_template safexcel_alg_sha3_256 = {
2577 .type = SAFEXCEL_ALG_TYPE_AHASH,
2578 .algo_mask = SAFEXCEL_ALG_SHA3,
2579 .alg.ahash = {
2580 .init = safexcel_sha3_256_init,
2581 .update = safexcel_sha3_update,
2582 .final = safexcel_sha3_final,
2583 .finup = safexcel_sha3_finup,
2584 .digest = safexcel_sha3_256_digest,
2585 .export = safexcel_sha3_export,
2586 .import = safexcel_sha3_import,
2587 .halg = {
2588 .digestsize = SHA3_256_DIGEST_SIZE,
2589 .statesize = sizeof(struct safexcel_ahash_export_state),
2590 .base = {
2591 .cra_name = "sha3-256",
2592 .cra_driver_name = "safexcel-sha3-256",
2593 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2594 .cra_flags = CRYPTO_ALG_ASYNC |
2595 CRYPTO_ALG_KERN_DRIVER_ONLY |
2596 CRYPTO_ALG_NEED_FALLBACK,
2597 .cra_blocksize = SHA3_256_BLOCK_SIZE,
2598 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2599 .cra_init = safexcel_sha3_cra_init,
2600 .cra_exit = safexcel_sha3_cra_exit,
2601 .cra_module = THIS_MODULE,
2602 },
2603 },
2604 },
2605};
2606
2607static int safexcel_sha3_384_init(struct ahash_request *areq)
2608{
2609 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2610 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2611 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2612
2613 memset(req, 0, sizeof(*req));
2614
2615 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
2616 req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2617 req->state_sz = SHA3_384_DIGEST_SIZE;
2618 req->digest_sz = SHA3_384_DIGEST_SIZE;
2619 req->block_sz = SHA3_384_BLOCK_SIZE;
2620 ctx->do_fallback = false;
2621 ctx->fb_init_done = false;
2622 return 0;
2623}
2624
2625static int safexcel_sha3_384_digest(struct ahash_request *req)
2626{
2627 if (req->nbytes)
2628 return safexcel_sha3_384_init(req) ?: safexcel_ahash_finup(req);
2629
2630 /* HW cannot do zero length hash, use fallback instead */
2631 return safexcel_sha3_digest_fallback(req);
2632}
2633
2634struct safexcel_alg_template safexcel_alg_sha3_384 = {
2635 .type = SAFEXCEL_ALG_TYPE_AHASH,
2636 .algo_mask = SAFEXCEL_ALG_SHA3,
2637 .alg.ahash = {
2638 .init = safexcel_sha3_384_init,
2639 .update = safexcel_sha3_update,
2640 .final = safexcel_sha3_final,
2641 .finup = safexcel_sha3_finup,
2642 .digest = safexcel_sha3_384_digest,
2643 .export = safexcel_sha3_export,
2644 .import = safexcel_sha3_import,
2645 .halg = {
2646 .digestsize = SHA3_384_DIGEST_SIZE,
2647 .statesize = sizeof(struct safexcel_ahash_export_state),
2648 .base = {
2649 .cra_name = "sha3-384",
2650 .cra_driver_name = "safexcel-sha3-384",
2651 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2652 .cra_flags = CRYPTO_ALG_ASYNC |
2653 CRYPTO_ALG_KERN_DRIVER_ONLY |
2654 CRYPTO_ALG_NEED_FALLBACK,
2655 .cra_blocksize = SHA3_384_BLOCK_SIZE,
2656 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2657 .cra_init = safexcel_sha3_cra_init,
2658 .cra_exit = safexcel_sha3_cra_exit,
2659 .cra_module = THIS_MODULE,
2660 },
2661 },
2662 },
2663};
2664
2665static int safexcel_sha3_512_init(struct ahash_request *areq)
2666{
2667 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2668 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2669 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2670
2671 memset(req, 0, sizeof(*req));
2672
2673 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
2674 req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2675 req->state_sz = SHA3_512_DIGEST_SIZE;
2676 req->digest_sz = SHA3_512_DIGEST_SIZE;
2677 req->block_sz = SHA3_512_BLOCK_SIZE;
2678 ctx->do_fallback = false;
2679 ctx->fb_init_done = false;
2680 return 0;
2681}
2682
2683static int safexcel_sha3_512_digest(struct ahash_request *req)
2684{
2685 if (req->nbytes)
2686 return safexcel_sha3_512_init(req) ?: safexcel_ahash_finup(req);
2687
2688 /* HW cannot do zero length hash, use fallback instead */
2689 return safexcel_sha3_digest_fallback(req);
2690}
2691
2692struct safexcel_alg_template safexcel_alg_sha3_512 = {
2693 .type = SAFEXCEL_ALG_TYPE_AHASH,
2694 .algo_mask = SAFEXCEL_ALG_SHA3,
2695 .alg.ahash = {
2696 .init = safexcel_sha3_512_init,
2697 .update = safexcel_sha3_update,
2698 .final = safexcel_sha3_final,
2699 .finup = safexcel_sha3_finup,
2700 .digest = safexcel_sha3_512_digest,
2701 .export = safexcel_sha3_export,
2702 .import = safexcel_sha3_import,
2703 .halg = {
2704 .digestsize = SHA3_512_DIGEST_SIZE,
2705 .statesize = sizeof(struct safexcel_ahash_export_state),
2706 .base = {
2707 .cra_name = "sha3-512",
2708 .cra_driver_name = "safexcel-sha3-512",
2709 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2710 .cra_flags = CRYPTO_ALG_ASYNC |
2711 CRYPTO_ALG_KERN_DRIVER_ONLY |
2712 CRYPTO_ALG_NEED_FALLBACK,
2713 .cra_blocksize = SHA3_512_BLOCK_SIZE,
2714 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2715 .cra_init = safexcel_sha3_cra_init,
2716 .cra_exit = safexcel_sha3_cra_exit,
2717 .cra_module = THIS_MODULE,
2718 },
2719 },
2720 },
2721};
2722
2723static int safexcel_hmac_sha3_cra_init(struct crypto_tfm *tfm, const char *alg)
2724{
2725 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2726 int ret;
2727
2728 ret = safexcel_sha3_cra_init(tfm);
2729 if (ret)
2730 return ret;
2731
2732 /* Allocate precalc basic digest implementation */
2733 ctx->shpre = crypto_alloc_shash(alg, 0, CRYPTO_ALG_NEED_FALLBACK);
2734 if (IS_ERR(ctx->shpre))
2735 return PTR_ERR(ctx->shpre);
2736
2737 ctx->shdesc = kmalloc(sizeof(*ctx->shdesc) +
2738 crypto_shash_descsize(ctx->shpre), GFP_KERNEL);
2739 if (!ctx->shdesc) {
2740 crypto_free_shash(ctx->shpre);
2741 return -ENOMEM;
2742 }
2743 ctx->shdesc->tfm = ctx->shpre;
2744 return 0;
2745}
2746
2747static void safexcel_hmac_sha3_cra_exit(struct crypto_tfm *tfm)
2748{
2749 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2750
2751 crypto_free_ahash(ctx->fback);
2752 crypto_free_shash(ctx->shpre);
2753 kfree(ctx->shdesc);
2754 safexcel_ahash_cra_exit(tfm);
2755}
2756
2757static int safexcel_hmac_sha3_setkey(struct crypto_ahash *tfm, const u8 *key,
2758 unsigned int keylen)
2759{
2760 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2761 int ret = 0;
2762
2763 if (keylen > crypto_ahash_blocksize(tfm)) {
2764 /*
2765 * If the key is larger than the blocksize, then hash it
2766 * first using our fallback cipher
2767 */
2768 ret = crypto_shash_digest(ctx->shdesc, key, keylen,
2769 ctx->base.ipad.byte);
2770 keylen = crypto_shash_digestsize(ctx->shpre);
2771
2772 /*
2773 * If the digest is larger than half the blocksize, we need to
2774 * move the rest to opad due to the way our HMAC infra works.
2775 */
2776 if (keylen > crypto_ahash_blocksize(tfm) / 2)
2777 /* Buffers overlap, need to use memmove iso memcpy! */
2778 memmove(&ctx->base.opad,
2779 ctx->base.ipad.byte +
2780 crypto_ahash_blocksize(tfm) / 2,
2781 keylen - crypto_ahash_blocksize(tfm) / 2);
2782 } else {
2783 /*
2784 * Copy the key to our ipad & opad buffers
2785 * Note that ipad and opad each contain one half of the key,
2786 * to match the existing HMAC driver infrastructure.
2787 */
2788 if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
2789 memcpy(&ctx->base.ipad, key, keylen);
2790 } else {
2791 memcpy(&ctx->base.ipad, key,
2792 crypto_ahash_blocksize(tfm) / 2);
2793 memcpy(&ctx->base.opad,
2794 key + crypto_ahash_blocksize(tfm) / 2,
2795 keylen - crypto_ahash_blocksize(tfm) / 2);
2796 }
2797 }
2798
2799 /* Pad key with zeroes */
2800 if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
2801 memset(ctx->base.ipad.byte + keylen, 0,
2802 crypto_ahash_blocksize(tfm) / 2 - keylen);
2803 memset(&ctx->base.opad, 0, crypto_ahash_blocksize(tfm) / 2);
2804 } else {
2805 memset(ctx->base.opad.byte + keylen -
2806 crypto_ahash_blocksize(tfm) / 2, 0,
2807 crypto_ahash_blocksize(tfm) - keylen);
2808 }
2809
2810 /* If doing fallback, still need to set the new key! */
2811 ctx->fb_do_setkey = true;
2812 return ret;
2813}
2814
2815static int safexcel_hmac_sha3_224_init(struct ahash_request *areq)
2816{
2817 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2818 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2819 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2820
2821 memset(req, 0, sizeof(*req));
2822
2823 /* Copy (half of) the key */
2824 memcpy(req->state, &ctx->base.ipad, SHA3_224_BLOCK_SIZE / 2);
2825 /* Start of HMAC should have len == processed == blocksize */
2826 req->len = SHA3_224_BLOCK_SIZE;
2827 req->processed = SHA3_224_BLOCK_SIZE;
2828 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
2829 req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2830 req->state_sz = SHA3_224_BLOCK_SIZE / 2;
2831 req->digest_sz = SHA3_224_DIGEST_SIZE;
2832 req->block_sz = SHA3_224_BLOCK_SIZE;
2833 req->hmac = true;
2834 ctx->do_fallback = false;
2835 ctx->fb_init_done = false;
2836 return 0;
2837}
2838
2839static int safexcel_hmac_sha3_224_digest(struct ahash_request *req)
2840{
2841 if (req->nbytes)
2842 return safexcel_hmac_sha3_224_init(req) ?:
2843 safexcel_ahash_finup(req);
2844
2845 /* HW cannot do zero length HMAC, use fallback instead */
2846 return safexcel_sha3_digest_fallback(req);
2847}
2848
2849static int safexcel_hmac_sha3_224_cra_init(struct crypto_tfm *tfm)
2850{
2851 return safexcel_hmac_sha3_cra_init(tfm, "sha3-224");
2852}
2853
2854struct safexcel_alg_template safexcel_alg_hmac_sha3_224 = {
2855 .type = SAFEXCEL_ALG_TYPE_AHASH,
2856 .algo_mask = SAFEXCEL_ALG_SHA3,
2857 .alg.ahash = {
2858 .init = safexcel_hmac_sha3_224_init,
2859 .update = safexcel_sha3_update,
2860 .final = safexcel_sha3_final,
2861 .finup = safexcel_sha3_finup,
2862 .digest = safexcel_hmac_sha3_224_digest,
2863 .setkey = safexcel_hmac_sha3_setkey,
2864 .export = safexcel_sha3_export,
2865 .import = safexcel_sha3_import,
2866 .halg = {
2867 .digestsize = SHA3_224_DIGEST_SIZE,
2868 .statesize = sizeof(struct safexcel_ahash_export_state),
2869 .base = {
2870 .cra_name = "hmac(sha3-224)",
2871 .cra_driver_name = "safexcel-hmac-sha3-224",
2872 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2873 .cra_flags = CRYPTO_ALG_ASYNC |
2874 CRYPTO_ALG_KERN_DRIVER_ONLY |
2875 CRYPTO_ALG_NEED_FALLBACK,
2876 .cra_blocksize = SHA3_224_BLOCK_SIZE,
2877 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2878 .cra_init = safexcel_hmac_sha3_224_cra_init,
2879 .cra_exit = safexcel_hmac_sha3_cra_exit,
2880 .cra_module = THIS_MODULE,
2881 },
2882 },
2883 },
2884};
2885
2886static int safexcel_hmac_sha3_256_init(struct ahash_request *areq)
2887{
2888 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2889 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2890 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2891
2892 memset(req, 0, sizeof(*req));
2893
2894 /* Copy (half of) the key */
2895 memcpy(req->state, &ctx->base.ipad, SHA3_256_BLOCK_SIZE / 2);
2896 /* Start of HMAC should have len == processed == blocksize */
2897 req->len = SHA3_256_BLOCK_SIZE;
2898 req->processed = SHA3_256_BLOCK_SIZE;
2899 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
2900 req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2901 req->state_sz = SHA3_256_BLOCK_SIZE / 2;
2902 req->digest_sz = SHA3_256_DIGEST_SIZE;
2903 req->block_sz = SHA3_256_BLOCK_SIZE;
2904 req->hmac = true;
2905 ctx->do_fallback = false;
2906 ctx->fb_init_done = false;
2907 return 0;
2908}
2909
2910static int safexcel_hmac_sha3_256_digest(struct ahash_request *req)
2911{
2912 if (req->nbytes)
2913 return safexcel_hmac_sha3_256_init(req) ?:
2914 safexcel_ahash_finup(req);
2915
2916 /* HW cannot do zero length HMAC, use fallback instead */
2917 return safexcel_sha3_digest_fallback(req);
2918}
2919
2920static int safexcel_hmac_sha3_256_cra_init(struct crypto_tfm *tfm)
2921{
2922 return safexcel_hmac_sha3_cra_init(tfm, "sha3-256");
2923}
2924
2925struct safexcel_alg_template safexcel_alg_hmac_sha3_256 = {
2926 .type = SAFEXCEL_ALG_TYPE_AHASH,
2927 .algo_mask = SAFEXCEL_ALG_SHA3,
2928 .alg.ahash = {
2929 .init = safexcel_hmac_sha3_256_init,
2930 .update = safexcel_sha3_update,
2931 .final = safexcel_sha3_final,
2932 .finup = safexcel_sha3_finup,
2933 .digest = safexcel_hmac_sha3_256_digest,
2934 .setkey = safexcel_hmac_sha3_setkey,
2935 .export = safexcel_sha3_export,
2936 .import = safexcel_sha3_import,
2937 .halg = {
2938 .digestsize = SHA3_256_DIGEST_SIZE,
2939 .statesize = sizeof(struct safexcel_ahash_export_state),
2940 .base = {
2941 .cra_name = "hmac(sha3-256)",
2942 .cra_driver_name = "safexcel-hmac-sha3-256",
2943 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2944 .cra_flags = CRYPTO_ALG_ASYNC |
2945 CRYPTO_ALG_KERN_DRIVER_ONLY |
2946 CRYPTO_ALG_NEED_FALLBACK,
2947 .cra_blocksize = SHA3_256_BLOCK_SIZE,
2948 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2949 .cra_init = safexcel_hmac_sha3_256_cra_init,
2950 .cra_exit = safexcel_hmac_sha3_cra_exit,
2951 .cra_module = THIS_MODULE,
2952 },
2953 },
2954 },
2955};
2956
2957static int safexcel_hmac_sha3_384_init(struct ahash_request *areq)
2958{
2959 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2960 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2961 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
2962
2963 memset(req, 0, sizeof(*req));
2964
2965 /* Copy (half of) the key */
2966 memcpy(req->state, &ctx->base.ipad, SHA3_384_BLOCK_SIZE / 2);
2967 /* Start of HMAC should have len == processed == blocksize */
2968 req->len = SHA3_384_BLOCK_SIZE;
2969 req->processed = SHA3_384_BLOCK_SIZE;
2970 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
2971 req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2972 req->state_sz = SHA3_384_BLOCK_SIZE / 2;
2973 req->digest_sz = SHA3_384_DIGEST_SIZE;
2974 req->block_sz = SHA3_384_BLOCK_SIZE;
2975 req->hmac = true;
2976 ctx->do_fallback = false;
2977 ctx->fb_init_done = false;
2978 return 0;
2979}
2980
2981static int safexcel_hmac_sha3_384_digest(struct ahash_request *req)
2982{
2983 if (req->nbytes)
2984 return safexcel_hmac_sha3_384_init(req) ?:
2985 safexcel_ahash_finup(req);
2986
2987 /* HW cannot do zero length HMAC, use fallback instead */
2988 return safexcel_sha3_digest_fallback(req);
2989}
2990
2991static int safexcel_hmac_sha3_384_cra_init(struct crypto_tfm *tfm)
2992{
2993 return safexcel_hmac_sha3_cra_init(tfm, "sha3-384");
2994}
2995
2996struct safexcel_alg_template safexcel_alg_hmac_sha3_384 = {
2997 .type = SAFEXCEL_ALG_TYPE_AHASH,
2998 .algo_mask = SAFEXCEL_ALG_SHA3,
2999 .alg.ahash = {
3000 .init = safexcel_hmac_sha3_384_init,
3001 .update = safexcel_sha3_update,
3002 .final = safexcel_sha3_final,
3003 .finup = safexcel_sha3_finup,
3004 .digest = safexcel_hmac_sha3_384_digest,
3005 .setkey = safexcel_hmac_sha3_setkey,
3006 .export = safexcel_sha3_export,
3007 .import = safexcel_sha3_import,
3008 .halg = {
3009 .digestsize = SHA3_384_DIGEST_SIZE,
3010 .statesize = sizeof(struct safexcel_ahash_export_state),
3011 .base = {
3012 .cra_name = "hmac(sha3-384)",
3013 .cra_driver_name = "safexcel-hmac-sha3-384",
3014 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3015 .cra_flags = CRYPTO_ALG_ASYNC |
3016 CRYPTO_ALG_KERN_DRIVER_ONLY |
3017 CRYPTO_ALG_NEED_FALLBACK,
3018 .cra_blocksize = SHA3_384_BLOCK_SIZE,
3019 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
3020 .cra_init = safexcel_hmac_sha3_384_cra_init,
3021 .cra_exit = safexcel_hmac_sha3_cra_exit,
3022 .cra_module = THIS_MODULE,
3023 },
3024 },
3025 },
3026};
3027
3028static int safexcel_hmac_sha3_512_init(struct ahash_request *areq)
3029{
3030 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
3031 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
3032 struct safexcel_ahash_req *req = ahash_request_ctx_dma(areq);
3033
3034 memset(req, 0, sizeof(*req));
3035
3036 /* Copy (half of) the key */
3037 memcpy(req->state, &ctx->base.ipad, SHA3_512_BLOCK_SIZE / 2);
3038 /* Start of HMAC should have len == processed == blocksize */
3039 req->len = SHA3_512_BLOCK_SIZE;
3040 req->processed = SHA3_512_BLOCK_SIZE;
3041 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
3042 req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
3043 req->state_sz = SHA3_512_BLOCK_SIZE / 2;
3044 req->digest_sz = SHA3_512_DIGEST_SIZE;
3045 req->block_sz = SHA3_512_BLOCK_SIZE;
3046 req->hmac = true;
3047 ctx->do_fallback = false;
3048 ctx->fb_init_done = false;
3049 return 0;
3050}
3051
3052static int safexcel_hmac_sha3_512_digest(struct ahash_request *req)
3053{
3054 if (req->nbytes)
3055 return safexcel_hmac_sha3_512_init(req) ?:
3056 safexcel_ahash_finup(req);
3057
3058 /* HW cannot do zero length HMAC, use fallback instead */
3059 return safexcel_sha3_digest_fallback(req);
3060}
3061
3062static int safexcel_hmac_sha3_512_cra_init(struct crypto_tfm *tfm)
3063{
3064 return safexcel_hmac_sha3_cra_init(tfm, "sha3-512");
3065}
3066struct safexcel_alg_template safexcel_alg_hmac_sha3_512 = {
3067 .type = SAFEXCEL_ALG_TYPE_AHASH,
3068 .algo_mask = SAFEXCEL_ALG_SHA3,
3069 .alg.ahash = {
3070 .init = safexcel_hmac_sha3_512_init,
3071 .update = safexcel_sha3_update,
3072 .final = safexcel_sha3_final,
3073 .finup = safexcel_sha3_finup,
3074 .digest = safexcel_hmac_sha3_512_digest,
3075 .setkey = safexcel_hmac_sha3_setkey,
3076 .export = safexcel_sha3_export,
3077 .import = safexcel_sha3_import,
3078 .halg = {
3079 .digestsize = SHA3_512_DIGEST_SIZE,
3080 .statesize = sizeof(struct safexcel_ahash_export_state),
3081 .base = {
3082 .cra_name = "hmac(sha3-512)",
3083 .cra_driver_name = "safexcel-hmac-sha3-512",
3084 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3085 .cra_flags = CRYPTO_ALG_ASYNC |
3086 CRYPTO_ALG_KERN_DRIVER_ONLY |
3087 CRYPTO_ALG_NEED_FALLBACK,
3088 .cra_blocksize = SHA3_512_BLOCK_SIZE,
3089 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
3090 .cra_init = safexcel_hmac_sha3_512_cra_init,
3091 .cra_exit = safexcel_hmac_sha3_cra_exit,
3092 .cra_module = THIS_MODULE,
3093 },
3094 },
3095 },
3096};
1// SPDX-License-Identifier: GPL-2.0
2/*
3 * Copyright (C) 2017 Marvell
4 *
5 * Antoine Tenart <antoine.tenart@free-electrons.com>
6 */
7
8#include <crypto/aes.h>
9#include <crypto/hmac.h>
10#include <crypto/md5.h>
11#include <crypto/sha.h>
12#include <crypto/sha3.h>
13#include <crypto/skcipher.h>
14#include <crypto/sm3.h>
15#include <linux/device.h>
16#include <linux/dma-mapping.h>
17#include <linux/dmapool.h>
18
19#include "safexcel.h"
20
21struct safexcel_ahash_ctx {
22 struct safexcel_context base;
23 struct safexcel_crypto_priv *priv;
24
25 u32 alg;
26 u8 key_sz;
27 bool cbcmac;
28 bool do_fallback;
29 bool fb_init_done;
30 bool fb_do_setkey;
31
32 __le32 ipad[SHA3_512_BLOCK_SIZE / sizeof(__le32)];
33 __le32 opad[SHA3_512_BLOCK_SIZE / sizeof(__le32)];
34
35 struct crypto_cipher *kaes;
36 struct crypto_ahash *fback;
37 struct crypto_shash *shpre;
38 struct shash_desc *shdesc;
39};
40
41struct safexcel_ahash_req {
42 bool last_req;
43 bool finish;
44 bool hmac;
45 bool needs_inv;
46 bool hmac_zlen;
47 bool len_is_le;
48 bool not_first;
49 bool xcbcmac;
50
51 int nents;
52 dma_addr_t result_dma;
53
54 u32 digest;
55
56 u8 state_sz; /* expected state size, only set once */
57 u8 block_sz; /* block size, only set once */
58 u8 digest_sz; /* output digest size, only set once */
59 __le32 state[SHA3_512_BLOCK_SIZE /
60 sizeof(__le32)] __aligned(sizeof(__le32));
61
62 u64 len;
63 u64 processed;
64
65 u8 cache[HASH_CACHE_SIZE] __aligned(sizeof(u32));
66 dma_addr_t cache_dma;
67 unsigned int cache_sz;
68
69 u8 cache_next[HASH_CACHE_SIZE] __aligned(sizeof(u32));
70};
71
72static inline u64 safexcel_queued_len(struct safexcel_ahash_req *req)
73{
74 return req->len - req->processed;
75}
76
77static void safexcel_hash_token(struct safexcel_command_desc *cdesc,
78 u32 input_length, u32 result_length,
79 bool cbcmac)
80{
81 struct safexcel_token *token =
82 (struct safexcel_token *)cdesc->control_data.token;
83
84 token[0].opcode = EIP197_TOKEN_OPCODE_DIRECTION;
85 token[0].packet_length = input_length;
86 token[0].instructions = EIP197_TOKEN_INS_TYPE_HASH;
87
88 input_length &= 15;
89 if (unlikely(cbcmac && input_length)) {
90 token[0].stat = 0;
91 token[1].opcode = EIP197_TOKEN_OPCODE_INSERT;
92 token[1].packet_length = 16 - input_length;
93 token[1].stat = EIP197_TOKEN_STAT_LAST_HASH;
94 token[1].instructions = EIP197_TOKEN_INS_TYPE_HASH;
95 } else {
96 token[0].stat = EIP197_TOKEN_STAT_LAST_HASH;
97 eip197_noop_token(&token[1]);
98 }
99
100 token[2].opcode = EIP197_TOKEN_OPCODE_INSERT;
101 token[2].stat = EIP197_TOKEN_STAT_LAST_HASH |
102 EIP197_TOKEN_STAT_LAST_PACKET;
103 token[2].packet_length = result_length;
104 token[2].instructions = EIP197_TOKEN_INS_TYPE_OUTPUT |
105 EIP197_TOKEN_INS_INSERT_HASH_DIGEST;
106
107 eip197_noop_token(&token[3]);
108}
109
110static void safexcel_context_control(struct safexcel_ahash_ctx *ctx,
111 struct safexcel_ahash_req *req,
112 struct safexcel_command_desc *cdesc)
113{
114 struct safexcel_crypto_priv *priv = ctx->priv;
115 u64 count = 0;
116
117 cdesc->control_data.control0 = ctx->alg;
118 cdesc->control_data.control1 = 0;
119
120 /*
121 * Copy the input digest if needed, and setup the context
122 * fields. Do this now as we need it to setup the first command
123 * descriptor.
124 */
125 if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM)) {
126 if (req->xcbcmac)
127 memcpy(ctx->base.ctxr->data, ctx->ipad, ctx->key_sz);
128 else
129 memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
130
131 if (!req->finish && req->xcbcmac)
132 cdesc->control_data.control0 |=
133 CONTEXT_CONTROL_DIGEST_XCM |
134 CONTEXT_CONTROL_TYPE_HASH_OUT |
135 CONTEXT_CONTROL_NO_FINISH_HASH |
136 CONTEXT_CONTROL_SIZE(req->state_sz /
137 sizeof(u32));
138 else
139 cdesc->control_data.control0 |=
140 CONTEXT_CONTROL_DIGEST_XCM |
141 CONTEXT_CONTROL_TYPE_HASH_OUT |
142 CONTEXT_CONTROL_SIZE(req->state_sz /
143 sizeof(u32));
144 return;
145 } else if (!req->processed) {
146 /* First - and possibly only - block of basic hash only */
147 if (req->finish)
148 cdesc->control_data.control0 |= req->digest |
149 CONTEXT_CONTROL_TYPE_HASH_OUT |
150 CONTEXT_CONTROL_RESTART_HASH |
151 /* ensure its not 0! */
152 CONTEXT_CONTROL_SIZE(1);
153 else
154 cdesc->control_data.control0 |= req->digest |
155 CONTEXT_CONTROL_TYPE_HASH_OUT |
156 CONTEXT_CONTROL_RESTART_HASH |
157 CONTEXT_CONTROL_NO_FINISH_HASH |
158 /* ensure its not 0! */
159 CONTEXT_CONTROL_SIZE(1);
160 return;
161 }
162
163 /* Hash continuation or HMAC, setup (inner) digest from state */
164 memcpy(ctx->base.ctxr->data, req->state, req->state_sz);
165
166 if (req->finish) {
167 /* Compute digest count for hash/HMAC finish operations */
168 if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
169 req->hmac_zlen || (req->processed != req->block_sz)) {
170 count = req->processed / EIP197_COUNTER_BLOCK_SIZE;
171
172 /* This is a hardware limitation, as the
173 * counter must fit into an u32. This represents
174 * a fairly big amount of input data, so we
175 * shouldn't see this.
176 */
177 if (unlikely(count & 0xffffffff00000000ULL)) {
178 dev_warn(priv->dev,
179 "Input data is too big\n");
180 return;
181 }
182 }
183
184 if ((req->digest == CONTEXT_CONTROL_DIGEST_PRECOMPUTED) ||
185 /* Special case: zero length HMAC */
186 req->hmac_zlen ||
187 /* PE HW < 4.4 cannot do HMAC continue, fake using hash */
188 (req->processed != req->block_sz)) {
189 /* Basic hash continue operation, need digest + cnt */
190 cdesc->control_data.control0 |=
191 CONTEXT_CONTROL_SIZE((req->state_sz >> 2) + 1) |
192 CONTEXT_CONTROL_TYPE_HASH_OUT |
193 CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
194 /* For zero-len HMAC, don't finalize, already padded! */
195 if (req->hmac_zlen)
196 cdesc->control_data.control0 |=
197 CONTEXT_CONTROL_NO_FINISH_HASH;
198 cdesc->control_data.control1 |=
199 CONTEXT_CONTROL_DIGEST_CNT;
200 ctx->base.ctxr->data[req->state_sz >> 2] =
201 cpu_to_le32(count);
202 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
203
204 /* Clear zero-length HMAC flag for next operation! */
205 req->hmac_zlen = false;
206 } else { /* HMAC */
207 /* Need outer digest for HMAC finalization */
208 memcpy(ctx->base.ctxr->data + (req->state_sz >> 2),
209 ctx->opad, req->state_sz);
210
211 /* Single pass HMAC - no digest count */
212 cdesc->control_data.control0 |=
213 CONTEXT_CONTROL_SIZE(req->state_sz >> 1) |
214 CONTEXT_CONTROL_TYPE_HASH_OUT |
215 CONTEXT_CONTROL_DIGEST_HMAC;
216 }
217 } else { /* Hash continuation, do not finish yet */
218 cdesc->control_data.control0 |=
219 CONTEXT_CONTROL_SIZE(req->state_sz >> 2) |
220 CONTEXT_CONTROL_DIGEST_PRECOMPUTED |
221 CONTEXT_CONTROL_TYPE_HASH_OUT |
222 CONTEXT_CONTROL_NO_FINISH_HASH;
223 }
224}
225
226static int safexcel_ahash_enqueue(struct ahash_request *areq);
227
228static int safexcel_handle_req_result(struct safexcel_crypto_priv *priv,
229 int ring,
230 struct crypto_async_request *async,
231 bool *should_complete, int *ret)
232{
233 struct safexcel_result_desc *rdesc;
234 struct ahash_request *areq = ahash_request_cast(async);
235 struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
236 struct safexcel_ahash_req *sreq = ahash_request_ctx(areq);
237 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
238 u64 cache_len;
239
240 *ret = 0;
241
242 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
243 if (IS_ERR(rdesc)) {
244 dev_err(priv->dev,
245 "hash: result: could not retrieve the result descriptor\n");
246 *ret = PTR_ERR(rdesc);
247 } else {
248 *ret = safexcel_rdesc_check_errors(priv, rdesc);
249 }
250
251 safexcel_complete(priv, ring);
252
253 if (sreq->nents) {
254 dma_unmap_sg(priv->dev, areq->src, sreq->nents, DMA_TO_DEVICE);
255 sreq->nents = 0;
256 }
257
258 if (sreq->result_dma) {
259 dma_unmap_single(priv->dev, sreq->result_dma, sreq->digest_sz,
260 DMA_FROM_DEVICE);
261 sreq->result_dma = 0;
262 }
263
264 if (sreq->cache_dma) {
265 dma_unmap_single(priv->dev, sreq->cache_dma, sreq->cache_sz,
266 DMA_TO_DEVICE);
267 sreq->cache_dma = 0;
268 sreq->cache_sz = 0;
269 }
270
271 if (sreq->finish) {
272 if (sreq->hmac &&
273 (sreq->digest != CONTEXT_CONTROL_DIGEST_HMAC)) {
274 /* Faking HMAC using hash - need to do outer hash */
275 memcpy(sreq->cache, sreq->state,
276 crypto_ahash_digestsize(ahash));
277
278 memcpy(sreq->state, ctx->opad, sreq->digest_sz);
279
280 sreq->len = sreq->block_sz +
281 crypto_ahash_digestsize(ahash);
282 sreq->processed = sreq->block_sz;
283 sreq->hmac = 0;
284
285 if (priv->flags & EIP197_TRC_CACHE)
286 ctx->base.needs_inv = true;
287 areq->nbytes = 0;
288 safexcel_ahash_enqueue(areq);
289
290 *should_complete = false; /* Not done yet */
291 return 1;
292 }
293
294 if (unlikely(sreq->digest == CONTEXT_CONTROL_DIGEST_XCM &&
295 ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_CRC32)) {
296 /* Undo final XOR with 0xffffffff ...*/
297 *(__le32 *)areq->result = ~sreq->state[0];
298 } else {
299 memcpy(areq->result, sreq->state,
300 crypto_ahash_digestsize(ahash));
301 }
302 }
303
304 cache_len = safexcel_queued_len(sreq);
305 if (cache_len)
306 memcpy(sreq->cache, sreq->cache_next, cache_len);
307
308 *should_complete = true;
309
310 return 1;
311}
312
313static int safexcel_ahash_send_req(struct crypto_async_request *async, int ring,
314 int *commands, int *results)
315{
316 struct ahash_request *areq = ahash_request_cast(async);
317 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
318 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
319 struct safexcel_crypto_priv *priv = ctx->priv;
320 struct safexcel_command_desc *cdesc, *first_cdesc = NULL;
321 struct safexcel_result_desc *rdesc;
322 struct scatterlist *sg;
323 struct safexcel_token *dmmy;
324 int i, extra = 0, n_cdesc = 0, ret = 0, cache_len, skip = 0;
325 u64 queued, len;
326
327 queued = safexcel_queued_len(req);
328 if (queued <= HASH_CACHE_SIZE)
329 cache_len = queued;
330 else
331 cache_len = queued - areq->nbytes;
332
333 if (!req->finish && !req->last_req) {
334 /* If this is not the last request and the queued data does not
335 * fit into full cache blocks, cache it for the next send call.
336 */
337 extra = queued & (HASH_CACHE_SIZE - 1);
338
339 /* If this is not the last request and the queued data
340 * is a multiple of a block, cache the last one for now.
341 */
342 if (!extra)
343 extra = HASH_CACHE_SIZE;
344
345 sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
346 req->cache_next, extra,
347 areq->nbytes - extra);
348
349 queued -= extra;
350
351 if (!queued) {
352 *commands = 0;
353 *results = 0;
354 return 0;
355 }
356
357 extra = 0;
358 }
359
360 if (unlikely(req->xcbcmac && req->processed > AES_BLOCK_SIZE)) {
361 if (unlikely(cache_len < AES_BLOCK_SIZE)) {
362 /*
363 * Cache contains less than 1 full block, complete.
364 */
365 extra = AES_BLOCK_SIZE - cache_len;
366 if (queued > cache_len) {
367 /* More data follows: borrow bytes */
368 u64 tmp = queued - cache_len;
369
370 skip = min_t(u64, tmp, extra);
371 sg_pcopy_to_buffer(areq->src,
372 sg_nents(areq->src),
373 req->cache + cache_len,
374 skip, 0);
375 }
376 extra -= skip;
377 memset(req->cache + cache_len + skip, 0, extra);
378 if (!ctx->cbcmac && extra) {
379 // 10- padding for XCBCMAC & CMAC
380 req->cache[cache_len + skip] = 0x80;
381 // HW will use K2 iso K3 - compensate!
382 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
383 ((__be32 *)req->cache)[i] ^=
384 cpu_to_be32(le32_to_cpu(
385 ctx->ipad[i] ^ ctx->ipad[i + 4]));
386 }
387 cache_len = AES_BLOCK_SIZE;
388 queued = queued + extra;
389 }
390
391 /* XCBC continue: XOR previous result into 1st word */
392 crypto_xor(req->cache, (const u8 *)req->state, AES_BLOCK_SIZE);
393 }
394
395 len = queued;
396 /* Add a command descriptor for the cached data, if any */
397 if (cache_len) {
398 req->cache_dma = dma_map_single(priv->dev, req->cache,
399 cache_len, DMA_TO_DEVICE);
400 if (dma_mapping_error(priv->dev, req->cache_dma))
401 return -EINVAL;
402
403 req->cache_sz = cache_len;
404 first_cdesc = safexcel_add_cdesc(priv, ring, 1,
405 (cache_len == len),
406 req->cache_dma, cache_len,
407 len, ctx->base.ctxr_dma,
408 &dmmy);
409 if (IS_ERR(first_cdesc)) {
410 ret = PTR_ERR(first_cdesc);
411 goto unmap_cache;
412 }
413 n_cdesc++;
414
415 queued -= cache_len;
416 if (!queued)
417 goto send_command;
418 }
419
420 /* Now handle the current ahash request buffer(s) */
421 req->nents = dma_map_sg(priv->dev, areq->src,
422 sg_nents_for_len(areq->src,
423 areq->nbytes),
424 DMA_TO_DEVICE);
425 if (!req->nents) {
426 ret = -ENOMEM;
427 goto cdesc_rollback;
428 }
429
430 for_each_sg(areq->src, sg, req->nents, i) {
431 int sglen = sg_dma_len(sg);
432
433 if (unlikely(sglen <= skip)) {
434 skip -= sglen;
435 continue;
436 }
437
438 /* Do not overflow the request */
439 if ((queued + skip) <= sglen)
440 sglen = queued;
441 else
442 sglen -= skip;
443
444 cdesc = safexcel_add_cdesc(priv, ring, !n_cdesc,
445 !(queued - sglen),
446 sg_dma_address(sg) + skip, sglen,
447 len, ctx->base.ctxr_dma, &dmmy);
448 if (IS_ERR(cdesc)) {
449 ret = PTR_ERR(cdesc);
450 goto unmap_sg;
451 }
452
453 if (!n_cdesc)
454 first_cdesc = cdesc;
455 n_cdesc++;
456
457 queued -= sglen;
458 if (!queued)
459 break;
460 skip = 0;
461 }
462
463send_command:
464 /* Setup the context options */
465 safexcel_context_control(ctx, req, first_cdesc);
466
467 /* Add the token */
468 safexcel_hash_token(first_cdesc, len, req->digest_sz, ctx->cbcmac);
469
470 req->result_dma = dma_map_single(priv->dev, req->state, req->digest_sz,
471 DMA_FROM_DEVICE);
472 if (dma_mapping_error(priv->dev, req->result_dma)) {
473 ret = -EINVAL;
474 goto unmap_sg;
475 }
476
477 /* Add a result descriptor */
478 rdesc = safexcel_add_rdesc(priv, ring, 1, 1, req->result_dma,
479 req->digest_sz);
480 if (IS_ERR(rdesc)) {
481 ret = PTR_ERR(rdesc);
482 goto unmap_result;
483 }
484
485 safexcel_rdr_req_set(priv, ring, rdesc, &areq->base);
486
487 req->processed += len - extra;
488
489 *commands = n_cdesc;
490 *results = 1;
491 return 0;
492
493unmap_result:
494 dma_unmap_single(priv->dev, req->result_dma, req->digest_sz,
495 DMA_FROM_DEVICE);
496unmap_sg:
497 if (req->nents) {
498 dma_unmap_sg(priv->dev, areq->src, req->nents, DMA_TO_DEVICE);
499 req->nents = 0;
500 }
501cdesc_rollback:
502 for (i = 0; i < n_cdesc; i++)
503 safexcel_ring_rollback_wptr(priv, &priv->ring[ring].cdr);
504unmap_cache:
505 if (req->cache_dma) {
506 dma_unmap_single(priv->dev, req->cache_dma, req->cache_sz,
507 DMA_TO_DEVICE);
508 req->cache_dma = 0;
509 req->cache_sz = 0;
510 }
511
512 return ret;
513}
514
515static int safexcel_handle_inv_result(struct safexcel_crypto_priv *priv,
516 int ring,
517 struct crypto_async_request *async,
518 bool *should_complete, int *ret)
519{
520 struct safexcel_result_desc *rdesc;
521 struct ahash_request *areq = ahash_request_cast(async);
522 struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
523 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(ahash);
524 int enq_ret;
525
526 *ret = 0;
527
528 rdesc = safexcel_ring_next_rptr(priv, &priv->ring[ring].rdr);
529 if (IS_ERR(rdesc)) {
530 dev_err(priv->dev,
531 "hash: invalidate: could not retrieve the result descriptor\n");
532 *ret = PTR_ERR(rdesc);
533 } else {
534 *ret = safexcel_rdesc_check_errors(priv, rdesc);
535 }
536
537 safexcel_complete(priv, ring);
538
539 if (ctx->base.exit_inv) {
540 dma_pool_free(priv->context_pool, ctx->base.ctxr,
541 ctx->base.ctxr_dma);
542
543 *should_complete = true;
544 return 1;
545 }
546
547 ring = safexcel_select_ring(priv);
548 ctx->base.ring = ring;
549
550 spin_lock_bh(&priv->ring[ring].queue_lock);
551 enq_ret = crypto_enqueue_request(&priv->ring[ring].queue, async);
552 spin_unlock_bh(&priv->ring[ring].queue_lock);
553
554 if (enq_ret != -EINPROGRESS)
555 *ret = enq_ret;
556
557 queue_work(priv->ring[ring].workqueue,
558 &priv->ring[ring].work_data.work);
559
560 *should_complete = false;
561
562 return 1;
563}
564
565static int safexcel_handle_result(struct safexcel_crypto_priv *priv, int ring,
566 struct crypto_async_request *async,
567 bool *should_complete, int *ret)
568{
569 struct ahash_request *areq = ahash_request_cast(async);
570 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
571 int err;
572
573 BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && req->needs_inv);
574
575 if (req->needs_inv) {
576 req->needs_inv = false;
577 err = safexcel_handle_inv_result(priv, ring, async,
578 should_complete, ret);
579 } else {
580 err = safexcel_handle_req_result(priv, ring, async,
581 should_complete, ret);
582 }
583
584 return err;
585}
586
587static int safexcel_ahash_send_inv(struct crypto_async_request *async,
588 int ring, int *commands, int *results)
589{
590 struct ahash_request *areq = ahash_request_cast(async);
591 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
592 int ret;
593
594 ret = safexcel_invalidate_cache(async, ctx->priv,
595 ctx->base.ctxr_dma, ring);
596 if (unlikely(ret))
597 return ret;
598
599 *commands = 1;
600 *results = 1;
601
602 return 0;
603}
604
605static int safexcel_ahash_send(struct crypto_async_request *async,
606 int ring, int *commands, int *results)
607{
608 struct ahash_request *areq = ahash_request_cast(async);
609 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
610 int ret;
611
612 if (req->needs_inv)
613 ret = safexcel_ahash_send_inv(async, ring, commands, results);
614 else
615 ret = safexcel_ahash_send_req(async, ring, commands, results);
616
617 return ret;
618}
619
620static int safexcel_ahash_exit_inv(struct crypto_tfm *tfm)
621{
622 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
623 struct safexcel_crypto_priv *priv = ctx->priv;
624 EIP197_REQUEST_ON_STACK(req, ahash, EIP197_AHASH_REQ_SIZE);
625 struct safexcel_ahash_req *rctx = ahash_request_ctx(req);
626 struct safexcel_inv_result result = {};
627 int ring = ctx->base.ring;
628
629 memset(req, 0, EIP197_AHASH_REQ_SIZE);
630
631 /* create invalidation request */
632 init_completion(&result.completion);
633 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
634 safexcel_inv_complete, &result);
635
636 ahash_request_set_tfm(req, __crypto_ahash_cast(tfm));
637 ctx = crypto_tfm_ctx(req->base.tfm);
638 ctx->base.exit_inv = true;
639 rctx->needs_inv = true;
640
641 spin_lock_bh(&priv->ring[ring].queue_lock);
642 crypto_enqueue_request(&priv->ring[ring].queue, &req->base);
643 spin_unlock_bh(&priv->ring[ring].queue_lock);
644
645 queue_work(priv->ring[ring].workqueue,
646 &priv->ring[ring].work_data.work);
647
648 wait_for_completion(&result.completion);
649
650 if (result.error) {
651 dev_warn(priv->dev, "hash: completion error (%d)\n",
652 result.error);
653 return result.error;
654 }
655
656 return 0;
657}
658
659/* safexcel_ahash_cache: cache data until at least one request can be sent to
660 * the engine, aka. when there is at least 1 block size in the pipe.
661 */
662static int safexcel_ahash_cache(struct ahash_request *areq)
663{
664 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
665 u64 cache_len;
666
667 /* cache_len: everything accepted by the driver but not sent yet,
668 * tot sz handled by update() - last req sz - tot sz handled by send()
669 */
670 cache_len = safexcel_queued_len(req);
671
672 /*
673 * In case there isn't enough bytes to proceed (less than a
674 * block size), cache the data until we have enough.
675 */
676 if (cache_len + areq->nbytes <= HASH_CACHE_SIZE) {
677 sg_pcopy_to_buffer(areq->src, sg_nents(areq->src),
678 req->cache + cache_len,
679 areq->nbytes, 0);
680 return 0;
681 }
682
683 /* We couldn't cache all the data */
684 return -E2BIG;
685}
686
687static int safexcel_ahash_enqueue(struct ahash_request *areq)
688{
689 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
690 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
691 struct safexcel_crypto_priv *priv = ctx->priv;
692 int ret, ring;
693
694 req->needs_inv = false;
695
696 if (ctx->base.ctxr) {
697 if (priv->flags & EIP197_TRC_CACHE && !ctx->base.needs_inv &&
698 /* invalidate for *any* non-XCBC continuation */
699 ((req->not_first && !req->xcbcmac) ||
700 /* invalidate if (i)digest changed */
701 memcmp(ctx->base.ctxr->data, req->state, req->state_sz) ||
702 /* invalidate for HMAC finish with odigest changed */
703 (req->finish && req->hmac &&
704 memcmp(ctx->base.ctxr->data + (req->state_sz>>2),
705 ctx->opad, req->state_sz))))
706 /*
707 * We're still setting needs_inv here, even though it is
708 * cleared right away, because the needs_inv flag can be
709 * set in other functions and we want to keep the same
710 * logic.
711 */
712 ctx->base.needs_inv = true;
713
714 if (ctx->base.needs_inv) {
715 ctx->base.needs_inv = false;
716 req->needs_inv = true;
717 }
718 } else {
719 ctx->base.ring = safexcel_select_ring(priv);
720 ctx->base.ctxr = dma_pool_zalloc(priv->context_pool,
721 EIP197_GFP_FLAGS(areq->base),
722 &ctx->base.ctxr_dma);
723 if (!ctx->base.ctxr)
724 return -ENOMEM;
725 }
726 req->not_first = true;
727
728 ring = ctx->base.ring;
729
730 spin_lock_bh(&priv->ring[ring].queue_lock);
731 ret = crypto_enqueue_request(&priv->ring[ring].queue, &areq->base);
732 spin_unlock_bh(&priv->ring[ring].queue_lock);
733
734 queue_work(priv->ring[ring].workqueue,
735 &priv->ring[ring].work_data.work);
736
737 return ret;
738}
739
740static int safexcel_ahash_update(struct ahash_request *areq)
741{
742 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
743 int ret;
744
745 /* If the request is 0 length, do nothing */
746 if (!areq->nbytes)
747 return 0;
748
749 /* Add request to the cache if it fits */
750 ret = safexcel_ahash_cache(areq);
751
752 /* Update total request length */
753 req->len += areq->nbytes;
754
755 /* If not all data could fit into the cache, go process the excess.
756 * Also go process immediately for an HMAC IV precompute, which
757 * will never be finished at all, but needs to be processed anyway.
758 */
759 if ((ret && !req->finish) || req->last_req)
760 return safexcel_ahash_enqueue(areq);
761
762 return 0;
763}
764
765static int safexcel_ahash_final(struct ahash_request *areq)
766{
767 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
768 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
769
770 req->finish = true;
771
772 if (unlikely(!req->len && !areq->nbytes)) {
773 /*
774 * If we have an overall 0 length *hash* request:
775 * The HW cannot do 0 length hash, so we provide the correct
776 * result directly here.
777 */
778 if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5)
779 memcpy(areq->result, md5_zero_message_hash,
780 MD5_DIGEST_SIZE);
781 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA1)
782 memcpy(areq->result, sha1_zero_message_hash,
783 SHA1_DIGEST_SIZE);
784 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA224)
785 memcpy(areq->result, sha224_zero_message_hash,
786 SHA224_DIGEST_SIZE);
787 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA256)
788 memcpy(areq->result, sha256_zero_message_hash,
789 SHA256_DIGEST_SIZE);
790 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA384)
791 memcpy(areq->result, sha384_zero_message_hash,
792 SHA384_DIGEST_SIZE);
793 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SHA512)
794 memcpy(areq->result, sha512_zero_message_hash,
795 SHA512_DIGEST_SIZE);
796 else if (ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_SM3) {
797 memcpy(areq->result,
798 EIP197_SM3_ZEROM_HASH, SM3_DIGEST_SIZE);
799 }
800
801 return 0;
802 } else if (unlikely(req->digest == CONTEXT_CONTROL_DIGEST_XCM &&
803 ctx->alg == CONTEXT_CONTROL_CRYPTO_ALG_MD5 &&
804 req->len == sizeof(u32) && !areq->nbytes)) {
805 /* Zero length CRC32 */
806 memcpy(areq->result, ctx->ipad, sizeof(u32));
807 return 0;
808 } else if (unlikely(ctx->cbcmac && req->len == AES_BLOCK_SIZE &&
809 !areq->nbytes)) {
810 /* Zero length CBC MAC */
811 memset(areq->result, 0, AES_BLOCK_SIZE);
812 return 0;
813 } else if (unlikely(req->xcbcmac && req->len == AES_BLOCK_SIZE &&
814 !areq->nbytes)) {
815 /* Zero length (X)CBC/CMAC */
816 int i;
817
818 for (i = 0; i < AES_BLOCK_SIZE / sizeof(u32); i++)
819 ((__be32 *)areq->result)[i] =
820 cpu_to_be32(le32_to_cpu(ctx->ipad[i + 4]));//K3
821 areq->result[0] ^= 0x80; // 10- padding
822 crypto_cipher_encrypt_one(ctx->kaes, areq->result, areq->result);
823 return 0;
824 } else if (unlikely(req->hmac &&
825 (req->len == req->block_sz) &&
826 !areq->nbytes)) {
827 /*
828 * If we have an overall 0 length *HMAC* request:
829 * For HMAC, we need to finalize the inner digest
830 * and then perform the outer hash.
831 */
832
833 /* generate pad block in the cache */
834 /* start with a hash block of all zeroes */
835 memset(req->cache, 0, req->block_sz);
836 /* set the first byte to 0x80 to 'append a 1 bit' */
837 req->cache[0] = 0x80;
838 /* add the length in bits in the last 2 bytes */
839 if (req->len_is_le) {
840 /* Little endian length word (e.g. MD5) */
841 req->cache[req->block_sz-8] = (req->block_sz << 3) &
842 255;
843 req->cache[req->block_sz-7] = (req->block_sz >> 5);
844 } else {
845 /* Big endian length word (e.g. any SHA) */
846 req->cache[req->block_sz-2] = (req->block_sz >> 5);
847 req->cache[req->block_sz-1] = (req->block_sz << 3) &
848 255;
849 }
850
851 req->len += req->block_sz; /* plus 1 hash block */
852
853 /* Set special zero-length HMAC flag */
854 req->hmac_zlen = true;
855
856 /* Finalize HMAC */
857 req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
858 } else if (req->hmac) {
859 /* Finalize HMAC */
860 req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
861 }
862
863 return safexcel_ahash_enqueue(areq);
864}
865
866static int safexcel_ahash_finup(struct ahash_request *areq)
867{
868 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
869
870 req->finish = true;
871
872 safexcel_ahash_update(areq);
873 return safexcel_ahash_final(areq);
874}
875
876static int safexcel_ahash_export(struct ahash_request *areq, void *out)
877{
878 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
879 struct safexcel_ahash_export_state *export = out;
880
881 export->len = req->len;
882 export->processed = req->processed;
883
884 export->digest = req->digest;
885
886 memcpy(export->state, req->state, req->state_sz);
887 memcpy(export->cache, req->cache, HASH_CACHE_SIZE);
888
889 return 0;
890}
891
892static int safexcel_ahash_import(struct ahash_request *areq, const void *in)
893{
894 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
895 const struct safexcel_ahash_export_state *export = in;
896 int ret;
897
898 ret = crypto_ahash_init(areq);
899 if (ret)
900 return ret;
901
902 req->len = export->len;
903 req->processed = export->processed;
904
905 req->digest = export->digest;
906
907 memcpy(req->cache, export->cache, HASH_CACHE_SIZE);
908 memcpy(req->state, export->state, req->state_sz);
909
910 return 0;
911}
912
913static int safexcel_ahash_cra_init(struct crypto_tfm *tfm)
914{
915 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
916 struct safexcel_alg_template *tmpl =
917 container_of(__crypto_ahash_alg(tfm->__crt_alg),
918 struct safexcel_alg_template, alg.ahash);
919
920 ctx->priv = tmpl->priv;
921 ctx->base.send = safexcel_ahash_send;
922 ctx->base.handle_result = safexcel_handle_result;
923 ctx->fb_do_setkey = false;
924
925 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
926 sizeof(struct safexcel_ahash_req));
927 return 0;
928}
929
930static int safexcel_sha1_init(struct ahash_request *areq)
931{
932 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
933 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
934
935 memset(req, 0, sizeof(*req));
936
937 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
938 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
939 req->state_sz = SHA1_DIGEST_SIZE;
940 req->digest_sz = SHA1_DIGEST_SIZE;
941 req->block_sz = SHA1_BLOCK_SIZE;
942
943 return 0;
944}
945
946static int safexcel_sha1_digest(struct ahash_request *areq)
947{
948 int ret = safexcel_sha1_init(areq);
949
950 if (ret)
951 return ret;
952
953 return safexcel_ahash_finup(areq);
954}
955
956static void safexcel_ahash_cra_exit(struct crypto_tfm *tfm)
957{
958 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
959 struct safexcel_crypto_priv *priv = ctx->priv;
960 int ret;
961
962 /* context not allocated, skip invalidation */
963 if (!ctx->base.ctxr)
964 return;
965
966 if (priv->flags & EIP197_TRC_CACHE) {
967 ret = safexcel_ahash_exit_inv(tfm);
968 if (ret)
969 dev_warn(priv->dev, "hash: invalidation error %d\n", ret);
970 } else {
971 dma_pool_free(priv->context_pool, ctx->base.ctxr,
972 ctx->base.ctxr_dma);
973 }
974}
975
976struct safexcel_alg_template safexcel_alg_sha1 = {
977 .type = SAFEXCEL_ALG_TYPE_AHASH,
978 .algo_mask = SAFEXCEL_ALG_SHA1,
979 .alg.ahash = {
980 .init = safexcel_sha1_init,
981 .update = safexcel_ahash_update,
982 .final = safexcel_ahash_final,
983 .finup = safexcel_ahash_finup,
984 .digest = safexcel_sha1_digest,
985 .export = safexcel_ahash_export,
986 .import = safexcel_ahash_import,
987 .halg = {
988 .digestsize = SHA1_DIGEST_SIZE,
989 .statesize = sizeof(struct safexcel_ahash_export_state),
990 .base = {
991 .cra_name = "sha1",
992 .cra_driver_name = "safexcel-sha1",
993 .cra_priority = SAFEXCEL_CRA_PRIORITY,
994 .cra_flags = CRYPTO_ALG_ASYNC |
995 CRYPTO_ALG_ALLOCATES_MEMORY |
996 CRYPTO_ALG_KERN_DRIVER_ONLY,
997 .cra_blocksize = SHA1_BLOCK_SIZE,
998 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
999 .cra_init = safexcel_ahash_cra_init,
1000 .cra_exit = safexcel_ahash_cra_exit,
1001 .cra_module = THIS_MODULE,
1002 },
1003 },
1004 },
1005};
1006
1007static int safexcel_hmac_sha1_init(struct ahash_request *areq)
1008{
1009 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1010 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1011
1012 memset(req, 0, sizeof(*req));
1013
1014 /* Start from ipad precompute */
1015 memcpy(req->state, ctx->ipad, SHA1_DIGEST_SIZE);
1016 /* Already processed the key^ipad part now! */
1017 req->len = SHA1_BLOCK_SIZE;
1018 req->processed = SHA1_BLOCK_SIZE;
1019
1020 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA1;
1021 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1022 req->state_sz = SHA1_DIGEST_SIZE;
1023 req->digest_sz = SHA1_DIGEST_SIZE;
1024 req->block_sz = SHA1_BLOCK_SIZE;
1025 req->hmac = true;
1026
1027 return 0;
1028}
1029
1030static int safexcel_hmac_sha1_digest(struct ahash_request *areq)
1031{
1032 int ret = safexcel_hmac_sha1_init(areq);
1033
1034 if (ret)
1035 return ret;
1036
1037 return safexcel_ahash_finup(areq);
1038}
1039
1040struct safexcel_ahash_result {
1041 struct completion completion;
1042 int error;
1043};
1044
1045static void safexcel_ahash_complete(struct crypto_async_request *req, int error)
1046{
1047 struct safexcel_ahash_result *result = req->data;
1048
1049 if (error == -EINPROGRESS)
1050 return;
1051
1052 result->error = error;
1053 complete(&result->completion);
1054}
1055
1056static int safexcel_hmac_init_pad(struct ahash_request *areq,
1057 unsigned int blocksize, const u8 *key,
1058 unsigned int keylen, u8 *ipad, u8 *opad)
1059{
1060 struct safexcel_ahash_result result;
1061 struct scatterlist sg;
1062 int ret, i;
1063 u8 *keydup;
1064
1065 if (keylen <= blocksize) {
1066 memcpy(ipad, key, keylen);
1067 } else {
1068 keydup = kmemdup(key, keylen, GFP_KERNEL);
1069 if (!keydup)
1070 return -ENOMEM;
1071
1072 ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
1073 safexcel_ahash_complete, &result);
1074 sg_init_one(&sg, keydup, keylen);
1075 ahash_request_set_crypt(areq, &sg, ipad, keylen);
1076 init_completion(&result.completion);
1077
1078 ret = crypto_ahash_digest(areq);
1079 if (ret == -EINPROGRESS || ret == -EBUSY) {
1080 wait_for_completion_interruptible(&result.completion);
1081 ret = result.error;
1082 }
1083
1084 /* Avoid leaking */
1085 memzero_explicit(keydup, keylen);
1086 kfree(keydup);
1087
1088 if (ret)
1089 return ret;
1090
1091 keylen = crypto_ahash_digestsize(crypto_ahash_reqtfm(areq));
1092 }
1093
1094 memset(ipad + keylen, 0, blocksize - keylen);
1095 memcpy(opad, ipad, blocksize);
1096
1097 for (i = 0; i < blocksize; i++) {
1098 ipad[i] ^= HMAC_IPAD_VALUE;
1099 opad[i] ^= HMAC_OPAD_VALUE;
1100 }
1101
1102 return 0;
1103}
1104
1105static int safexcel_hmac_init_iv(struct ahash_request *areq,
1106 unsigned int blocksize, u8 *pad, void *state)
1107{
1108 struct safexcel_ahash_result result;
1109 struct safexcel_ahash_req *req;
1110 struct scatterlist sg;
1111 int ret;
1112
1113 ahash_request_set_callback(areq, CRYPTO_TFM_REQ_MAY_BACKLOG,
1114 safexcel_ahash_complete, &result);
1115 sg_init_one(&sg, pad, blocksize);
1116 ahash_request_set_crypt(areq, &sg, pad, blocksize);
1117 init_completion(&result.completion);
1118
1119 ret = crypto_ahash_init(areq);
1120 if (ret)
1121 return ret;
1122
1123 req = ahash_request_ctx(areq);
1124 req->hmac = true;
1125 req->last_req = true;
1126
1127 ret = crypto_ahash_update(areq);
1128 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1129 return ret;
1130
1131 wait_for_completion_interruptible(&result.completion);
1132 if (result.error)
1133 return result.error;
1134
1135 return crypto_ahash_export(areq, state);
1136}
1137
1138int safexcel_hmac_setkey(const char *alg, const u8 *key, unsigned int keylen,
1139 void *istate, void *ostate)
1140{
1141 struct ahash_request *areq;
1142 struct crypto_ahash *tfm;
1143 unsigned int blocksize;
1144 u8 *ipad, *opad;
1145 int ret;
1146
1147 tfm = crypto_alloc_ahash(alg, 0, 0);
1148 if (IS_ERR(tfm))
1149 return PTR_ERR(tfm);
1150
1151 areq = ahash_request_alloc(tfm, GFP_KERNEL);
1152 if (!areq) {
1153 ret = -ENOMEM;
1154 goto free_ahash;
1155 }
1156
1157 crypto_ahash_clear_flags(tfm, ~0);
1158 blocksize = crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
1159
1160 ipad = kcalloc(2, blocksize, GFP_KERNEL);
1161 if (!ipad) {
1162 ret = -ENOMEM;
1163 goto free_request;
1164 }
1165
1166 opad = ipad + blocksize;
1167
1168 ret = safexcel_hmac_init_pad(areq, blocksize, key, keylen, ipad, opad);
1169 if (ret)
1170 goto free_ipad;
1171
1172 ret = safexcel_hmac_init_iv(areq, blocksize, ipad, istate);
1173 if (ret)
1174 goto free_ipad;
1175
1176 ret = safexcel_hmac_init_iv(areq, blocksize, opad, ostate);
1177
1178free_ipad:
1179 kfree(ipad);
1180free_request:
1181 ahash_request_free(areq);
1182free_ahash:
1183 crypto_free_ahash(tfm);
1184
1185 return ret;
1186}
1187
1188static int safexcel_hmac_alg_setkey(struct crypto_ahash *tfm, const u8 *key,
1189 unsigned int keylen, const char *alg,
1190 unsigned int state_sz)
1191{
1192 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1193 struct safexcel_crypto_priv *priv = ctx->priv;
1194 struct safexcel_ahash_export_state istate, ostate;
1195 int ret;
1196
1197 ret = safexcel_hmac_setkey(alg, key, keylen, &istate, &ostate);
1198 if (ret)
1199 return ret;
1200
1201 if (priv->flags & EIP197_TRC_CACHE && ctx->base.ctxr &&
1202 (memcmp(ctx->ipad, istate.state, state_sz) ||
1203 memcmp(ctx->opad, ostate.state, state_sz)))
1204 ctx->base.needs_inv = true;
1205
1206 memcpy(ctx->ipad, &istate.state, state_sz);
1207 memcpy(ctx->opad, &ostate.state, state_sz);
1208
1209 return 0;
1210}
1211
1212static int safexcel_hmac_sha1_setkey(struct crypto_ahash *tfm, const u8 *key,
1213 unsigned int keylen)
1214{
1215 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha1",
1216 SHA1_DIGEST_SIZE);
1217}
1218
1219struct safexcel_alg_template safexcel_alg_hmac_sha1 = {
1220 .type = SAFEXCEL_ALG_TYPE_AHASH,
1221 .algo_mask = SAFEXCEL_ALG_SHA1,
1222 .alg.ahash = {
1223 .init = safexcel_hmac_sha1_init,
1224 .update = safexcel_ahash_update,
1225 .final = safexcel_ahash_final,
1226 .finup = safexcel_ahash_finup,
1227 .digest = safexcel_hmac_sha1_digest,
1228 .setkey = safexcel_hmac_sha1_setkey,
1229 .export = safexcel_ahash_export,
1230 .import = safexcel_ahash_import,
1231 .halg = {
1232 .digestsize = SHA1_DIGEST_SIZE,
1233 .statesize = sizeof(struct safexcel_ahash_export_state),
1234 .base = {
1235 .cra_name = "hmac(sha1)",
1236 .cra_driver_name = "safexcel-hmac-sha1",
1237 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1238 .cra_flags = CRYPTO_ALG_ASYNC |
1239 CRYPTO_ALG_ALLOCATES_MEMORY |
1240 CRYPTO_ALG_KERN_DRIVER_ONLY,
1241 .cra_blocksize = SHA1_BLOCK_SIZE,
1242 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1243 .cra_init = safexcel_ahash_cra_init,
1244 .cra_exit = safexcel_ahash_cra_exit,
1245 .cra_module = THIS_MODULE,
1246 },
1247 },
1248 },
1249};
1250
1251static int safexcel_sha256_init(struct ahash_request *areq)
1252{
1253 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1254 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1255
1256 memset(req, 0, sizeof(*req));
1257
1258 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1259 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1260 req->state_sz = SHA256_DIGEST_SIZE;
1261 req->digest_sz = SHA256_DIGEST_SIZE;
1262 req->block_sz = SHA256_BLOCK_SIZE;
1263
1264 return 0;
1265}
1266
1267static int safexcel_sha256_digest(struct ahash_request *areq)
1268{
1269 int ret = safexcel_sha256_init(areq);
1270
1271 if (ret)
1272 return ret;
1273
1274 return safexcel_ahash_finup(areq);
1275}
1276
1277struct safexcel_alg_template safexcel_alg_sha256 = {
1278 .type = SAFEXCEL_ALG_TYPE_AHASH,
1279 .algo_mask = SAFEXCEL_ALG_SHA2_256,
1280 .alg.ahash = {
1281 .init = safexcel_sha256_init,
1282 .update = safexcel_ahash_update,
1283 .final = safexcel_ahash_final,
1284 .finup = safexcel_ahash_finup,
1285 .digest = safexcel_sha256_digest,
1286 .export = safexcel_ahash_export,
1287 .import = safexcel_ahash_import,
1288 .halg = {
1289 .digestsize = SHA256_DIGEST_SIZE,
1290 .statesize = sizeof(struct safexcel_ahash_export_state),
1291 .base = {
1292 .cra_name = "sha256",
1293 .cra_driver_name = "safexcel-sha256",
1294 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1295 .cra_flags = CRYPTO_ALG_ASYNC |
1296 CRYPTO_ALG_ALLOCATES_MEMORY |
1297 CRYPTO_ALG_KERN_DRIVER_ONLY,
1298 .cra_blocksize = SHA256_BLOCK_SIZE,
1299 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1300 .cra_init = safexcel_ahash_cra_init,
1301 .cra_exit = safexcel_ahash_cra_exit,
1302 .cra_module = THIS_MODULE,
1303 },
1304 },
1305 },
1306};
1307
1308static int safexcel_sha224_init(struct ahash_request *areq)
1309{
1310 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1311 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1312
1313 memset(req, 0, sizeof(*req));
1314
1315 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1316 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1317 req->state_sz = SHA256_DIGEST_SIZE;
1318 req->digest_sz = SHA256_DIGEST_SIZE;
1319 req->block_sz = SHA256_BLOCK_SIZE;
1320
1321 return 0;
1322}
1323
1324static int safexcel_sha224_digest(struct ahash_request *areq)
1325{
1326 int ret = safexcel_sha224_init(areq);
1327
1328 if (ret)
1329 return ret;
1330
1331 return safexcel_ahash_finup(areq);
1332}
1333
1334struct safexcel_alg_template safexcel_alg_sha224 = {
1335 .type = SAFEXCEL_ALG_TYPE_AHASH,
1336 .algo_mask = SAFEXCEL_ALG_SHA2_256,
1337 .alg.ahash = {
1338 .init = safexcel_sha224_init,
1339 .update = safexcel_ahash_update,
1340 .final = safexcel_ahash_final,
1341 .finup = safexcel_ahash_finup,
1342 .digest = safexcel_sha224_digest,
1343 .export = safexcel_ahash_export,
1344 .import = safexcel_ahash_import,
1345 .halg = {
1346 .digestsize = SHA224_DIGEST_SIZE,
1347 .statesize = sizeof(struct safexcel_ahash_export_state),
1348 .base = {
1349 .cra_name = "sha224",
1350 .cra_driver_name = "safexcel-sha224",
1351 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1352 .cra_flags = CRYPTO_ALG_ASYNC |
1353 CRYPTO_ALG_ALLOCATES_MEMORY |
1354 CRYPTO_ALG_KERN_DRIVER_ONLY,
1355 .cra_blocksize = SHA224_BLOCK_SIZE,
1356 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1357 .cra_init = safexcel_ahash_cra_init,
1358 .cra_exit = safexcel_ahash_cra_exit,
1359 .cra_module = THIS_MODULE,
1360 },
1361 },
1362 },
1363};
1364
1365static int safexcel_hmac_sha224_setkey(struct crypto_ahash *tfm, const u8 *key,
1366 unsigned int keylen)
1367{
1368 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha224",
1369 SHA256_DIGEST_SIZE);
1370}
1371
1372static int safexcel_hmac_sha224_init(struct ahash_request *areq)
1373{
1374 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1375 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1376
1377 memset(req, 0, sizeof(*req));
1378
1379 /* Start from ipad precompute */
1380 memcpy(req->state, ctx->ipad, SHA256_DIGEST_SIZE);
1381 /* Already processed the key^ipad part now! */
1382 req->len = SHA256_BLOCK_SIZE;
1383 req->processed = SHA256_BLOCK_SIZE;
1384
1385 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA224;
1386 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1387 req->state_sz = SHA256_DIGEST_SIZE;
1388 req->digest_sz = SHA256_DIGEST_SIZE;
1389 req->block_sz = SHA256_BLOCK_SIZE;
1390 req->hmac = true;
1391
1392 return 0;
1393}
1394
1395static int safexcel_hmac_sha224_digest(struct ahash_request *areq)
1396{
1397 int ret = safexcel_hmac_sha224_init(areq);
1398
1399 if (ret)
1400 return ret;
1401
1402 return safexcel_ahash_finup(areq);
1403}
1404
1405struct safexcel_alg_template safexcel_alg_hmac_sha224 = {
1406 .type = SAFEXCEL_ALG_TYPE_AHASH,
1407 .algo_mask = SAFEXCEL_ALG_SHA2_256,
1408 .alg.ahash = {
1409 .init = safexcel_hmac_sha224_init,
1410 .update = safexcel_ahash_update,
1411 .final = safexcel_ahash_final,
1412 .finup = safexcel_ahash_finup,
1413 .digest = safexcel_hmac_sha224_digest,
1414 .setkey = safexcel_hmac_sha224_setkey,
1415 .export = safexcel_ahash_export,
1416 .import = safexcel_ahash_import,
1417 .halg = {
1418 .digestsize = SHA224_DIGEST_SIZE,
1419 .statesize = sizeof(struct safexcel_ahash_export_state),
1420 .base = {
1421 .cra_name = "hmac(sha224)",
1422 .cra_driver_name = "safexcel-hmac-sha224",
1423 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1424 .cra_flags = CRYPTO_ALG_ASYNC |
1425 CRYPTO_ALG_ALLOCATES_MEMORY |
1426 CRYPTO_ALG_KERN_DRIVER_ONLY,
1427 .cra_blocksize = SHA224_BLOCK_SIZE,
1428 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1429 .cra_init = safexcel_ahash_cra_init,
1430 .cra_exit = safexcel_ahash_cra_exit,
1431 .cra_module = THIS_MODULE,
1432 },
1433 },
1434 },
1435};
1436
1437static int safexcel_hmac_sha256_setkey(struct crypto_ahash *tfm, const u8 *key,
1438 unsigned int keylen)
1439{
1440 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha256",
1441 SHA256_DIGEST_SIZE);
1442}
1443
1444static int safexcel_hmac_sha256_init(struct ahash_request *areq)
1445{
1446 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1447 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1448
1449 memset(req, 0, sizeof(*req));
1450
1451 /* Start from ipad precompute */
1452 memcpy(req->state, ctx->ipad, SHA256_DIGEST_SIZE);
1453 /* Already processed the key^ipad part now! */
1454 req->len = SHA256_BLOCK_SIZE;
1455 req->processed = SHA256_BLOCK_SIZE;
1456
1457 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA256;
1458 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1459 req->state_sz = SHA256_DIGEST_SIZE;
1460 req->digest_sz = SHA256_DIGEST_SIZE;
1461 req->block_sz = SHA256_BLOCK_SIZE;
1462 req->hmac = true;
1463
1464 return 0;
1465}
1466
1467static int safexcel_hmac_sha256_digest(struct ahash_request *areq)
1468{
1469 int ret = safexcel_hmac_sha256_init(areq);
1470
1471 if (ret)
1472 return ret;
1473
1474 return safexcel_ahash_finup(areq);
1475}
1476
1477struct safexcel_alg_template safexcel_alg_hmac_sha256 = {
1478 .type = SAFEXCEL_ALG_TYPE_AHASH,
1479 .algo_mask = SAFEXCEL_ALG_SHA2_256,
1480 .alg.ahash = {
1481 .init = safexcel_hmac_sha256_init,
1482 .update = safexcel_ahash_update,
1483 .final = safexcel_ahash_final,
1484 .finup = safexcel_ahash_finup,
1485 .digest = safexcel_hmac_sha256_digest,
1486 .setkey = safexcel_hmac_sha256_setkey,
1487 .export = safexcel_ahash_export,
1488 .import = safexcel_ahash_import,
1489 .halg = {
1490 .digestsize = SHA256_DIGEST_SIZE,
1491 .statesize = sizeof(struct safexcel_ahash_export_state),
1492 .base = {
1493 .cra_name = "hmac(sha256)",
1494 .cra_driver_name = "safexcel-hmac-sha256",
1495 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1496 .cra_flags = CRYPTO_ALG_ASYNC |
1497 CRYPTO_ALG_ALLOCATES_MEMORY |
1498 CRYPTO_ALG_KERN_DRIVER_ONLY,
1499 .cra_blocksize = SHA256_BLOCK_SIZE,
1500 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1501 .cra_init = safexcel_ahash_cra_init,
1502 .cra_exit = safexcel_ahash_cra_exit,
1503 .cra_module = THIS_MODULE,
1504 },
1505 },
1506 },
1507};
1508
1509static int safexcel_sha512_init(struct ahash_request *areq)
1510{
1511 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1512 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1513
1514 memset(req, 0, sizeof(*req));
1515
1516 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1517 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1518 req->state_sz = SHA512_DIGEST_SIZE;
1519 req->digest_sz = SHA512_DIGEST_SIZE;
1520 req->block_sz = SHA512_BLOCK_SIZE;
1521
1522 return 0;
1523}
1524
1525static int safexcel_sha512_digest(struct ahash_request *areq)
1526{
1527 int ret = safexcel_sha512_init(areq);
1528
1529 if (ret)
1530 return ret;
1531
1532 return safexcel_ahash_finup(areq);
1533}
1534
1535struct safexcel_alg_template safexcel_alg_sha512 = {
1536 .type = SAFEXCEL_ALG_TYPE_AHASH,
1537 .algo_mask = SAFEXCEL_ALG_SHA2_512,
1538 .alg.ahash = {
1539 .init = safexcel_sha512_init,
1540 .update = safexcel_ahash_update,
1541 .final = safexcel_ahash_final,
1542 .finup = safexcel_ahash_finup,
1543 .digest = safexcel_sha512_digest,
1544 .export = safexcel_ahash_export,
1545 .import = safexcel_ahash_import,
1546 .halg = {
1547 .digestsize = SHA512_DIGEST_SIZE,
1548 .statesize = sizeof(struct safexcel_ahash_export_state),
1549 .base = {
1550 .cra_name = "sha512",
1551 .cra_driver_name = "safexcel-sha512",
1552 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1553 .cra_flags = CRYPTO_ALG_ASYNC |
1554 CRYPTO_ALG_ALLOCATES_MEMORY |
1555 CRYPTO_ALG_KERN_DRIVER_ONLY,
1556 .cra_blocksize = SHA512_BLOCK_SIZE,
1557 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1558 .cra_init = safexcel_ahash_cra_init,
1559 .cra_exit = safexcel_ahash_cra_exit,
1560 .cra_module = THIS_MODULE,
1561 },
1562 },
1563 },
1564};
1565
1566static int safexcel_sha384_init(struct ahash_request *areq)
1567{
1568 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1569 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1570
1571 memset(req, 0, sizeof(*req));
1572
1573 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1574 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1575 req->state_sz = SHA512_DIGEST_SIZE;
1576 req->digest_sz = SHA512_DIGEST_SIZE;
1577 req->block_sz = SHA512_BLOCK_SIZE;
1578
1579 return 0;
1580}
1581
1582static int safexcel_sha384_digest(struct ahash_request *areq)
1583{
1584 int ret = safexcel_sha384_init(areq);
1585
1586 if (ret)
1587 return ret;
1588
1589 return safexcel_ahash_finup(areq);
1590}
1591
1592struct safexcel_alg_template safexcel_alg_sha384 = {
1593 .type = SAFEXCEL_ALG_TYPE_AHASH,
1594 .algo_mask = SAFEXCEL_ALG_SHA2_512,
1595 .alg.ahash = {
1596 .init = safexcel_sha384_init,
1597 .update = safexcel_ahash_update,
1598 .final = safexcel_ahash_final,
1599 .finup = safexcel_ahash_finup,
1600 .digest = safexcel_sha384_digest,
1601 .export = safexcel_ahash_export,
1602 .import = safexcel_ahash_import,
1603 .halg = {
1604 .digestsize = SHA384_DIGEST_SIZE,
1605 .statesize = sizeof(struct safexcel_ahash_export_state),
1606 .base = {
1607 .cra_name = "sha384",
1608 .cra_driver_name = "safexcel-sha384",
1609 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1610 .cra_flags = CRYPTO_ALG_ASYNC |
1611 CRYPTO_ALG_ALLOCATES_MEMORY |
1612 CRYPTO_ALG_KERN_DRIVER_ONLY,
1613 .cra_blocksize = SHA384_BLOCK_SIZE,
1614 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1615 .cra_init = safexcel_ahash_cra_init,
1616 .cra_exit = safexcel_ahash_cra_exit,
1617 .cra_module = THIS_MODULE,
1618 },
1619 },
1620 },
1621};
1622
1623static int safexcel_hmac_sha512_setkey(struct crypto_ahash *tfm, const u8 *key,
1624 unsigned int keylen)
1625{
1626 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha512",
1627 SHA512_DIGEST_SIZE);
1628}
1629
1630static int safexcel_hmac_sha512_init(struct ahash_request *areq)
1631{
1632 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1633 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1634
1635 memset(req, 0, sizeof(*req));
1636
1637 /* Start from ipad precompute */
1638 memcpy(req->state, ctx->ipad, SHA512_DIGEST_SIZE);
1639 /* Already processed the key^ipad part now! */
1640 req->len = SHA512_BLOCK_SIZE;
1641 req->processed = SHA512_BLOCK_SIZE;
1642
1643 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA512;
1644 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1645 req->state_sz = SHA512_DIGEST_SIZE;
1646 req->digest_sz = SHA512_DIGEST_SIZE;
1647 req->block_sz = SHA512_BLOCK_SIZE;
1648 req->hmac = true;
1649
1650 return 0;
1651}
1652
1653static int safexcel_hmac_sha512_digest(struct ahash_request *areq)
1654{
1655 int ret = safexcel_hmac_sha512_init(areq);
1656
1657 if (ret)
1658 return ret;
1659
1660 return safexcel_ahash_finup(areq);
1661}
1662
1663struct safexcel_alg_template safexcel_alg_hmac_sha512 = {
1664 .type = SAFEXCEL_ALG_TYPE_AHASH,
1665 .algo_mask = SAFEXCEL_ALG_SHA2_512,
1666 .alg.ahash = {
1667 .init = safexcel_hmac_sha512_init,
1668 .update = safexcel_ahash_update,
1669 .final = safexcel_ahash_final,
1670 .finup = safexcel_ahash_finup,
1671 .digest = safexcel_hmac_sha512_digest,
1672 .setkey = safexcel_hmac_sha512_setkey,
1673 .export = safexcel_ahash_export,
1674 .import = safexcel_ahash_import,
1675 .halg = {
1676 .digestsize = SHA512_DIGEST_SIZE,
1677 .statesize = sizeof(struct safexcel_ahash_export_state),
1678 .base = {
1679 .cra_name = "hmac(sha512)",
1680 .cra_driver_name = "safexcel-hmac-sha512",
1681 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1682 .cra_flags = CRYPTO_ALG_ASYNC |
1683 CRYPTO_ALG_ALLOCATES_MEMORY |
1684 CRYPTO_ALG_KERN_DRIVER_ONLY,
1685 .cra_blocksize = SHA512_BLOCK_SIZE,
1686 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1687 .cra_init = safexcel_ahash_cra_init,
1688 .cra_exit = safexcel_ahash_cra_exit,
1689 .cra_module = THIS_MODULE,
1690 },
1691 },
1692 },
1693};
1694
1695static int safexcel_hmac_sha384_setkey(struct crypto_ahash *tfm, const u8 *key,
1696 unsigned int keylen)
1697{
1698 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sha384",
1699 SHA512_DIGEST_SIZE);
1700}
1701
1702static int safexcel_hmac_sha384_init(struct ahash_request *areq)
1703{
1704 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1705 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1706
1707 memset(req, 0, sizeof(*req));
1708
1709 /* Start from ipad precompute */
1710 memcpy(req->state, ctx->ipad, SHA512_DIGEST_SIZE);
1711 /* Already processed the key^ipad part now! */
1712 req->len = SHA512_BLOCK_SIZE;
1713 req->processed = SHA512_BLOCK_SIZE;
1714
1715 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA384;
1716 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1717 req->state_sz = SHA512_DIGEST_SIZE;
1718 req->digest_sz = SHA512_DIGEST_SIZE;
1719 req->block_sz = SHA512_BLOCK_SIZE;
1720 req->hmac = true;
1721
1722 return 0;
1723}
1724
1725static int safexcel_hmac_sha384_digest(struct ahash_request *areq)
1726{
1727 int ret = safexcel_hmac_sha384_init(areq);
1728
1729 if (ret)
1730 return ret;
1731
1732 return safexcel_ahash_finup(areq);
1733}
1734
1735struct safexcel_alg_template safexcel_alg_hmac_sha384 = {
1736 .type = SAFEXCEL_ALG_TYPE_AHASH,
1737 .algo_mask = SAFEXCEL_ALG_SHA2_512,
1738 .alg.ahash = {
1739 .init = safexcel_hmac_sha384_init,
1740 .update = safexcel_ahash_update,
1741 .final = safexcel_ahash_final,
1742 .finup = safexcel_ahash_finup,
1743 .digest = safexcel_hmac_sha384_digest,
1744 .setkey = safexcel_hmac_sha384_setkey,
1745 .export = safexcel_ahash_export,
1746 .import = safexcel_ahash_import,
1747 .halg = {
1748 .digestsize = SHA384_DIGEST_SIZE,
1749 .statesize = sizeof(struct safexcel_ahash_export_state),
1750 .base = {
1751 .cra_name = "hmac(sha384)",
1752 .cra_driver_name = "safexcel-hmac-sha384",
1753 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1754 .cra_flags = CRYPTO_ALG_ASYNC |
1755 CRYPTO_ALG_ALLOCATES_MEMORY |
1756 CRYPTO_ALG_KERN_DRIVER_ONLY,
1757 .cra_blocksize = SHA384_BLOCK_SIZE,
1758 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1759 .cra_init = safexcel_ahash_cra_init,
1760 .cra_exit = safexcel_ahash_cra_exit,
1761 .cra_module = THIS_MODULE,
1762 },
1763 },
1764 },
1765};
1766
1767static int safexcel_md5_init(struct ahash_request *areq)
1768{
1769 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1770 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1771
1772 memset(req, 0, sizeof(*req));
1773
1774 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1775 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1776 req->state_sz = MD5_DIGEST_SIZE;
1777 req->digest_sz = MD5_DIGEST_SIZE;
1778 req->block_sz = MD5_HMAC_BLOCK_SIZE;
1779
1780 return 0;
1781}
1782
1783static int safexcel_md5_digest(struct ahash_request *areq)
1784{
1785 int ret = safexcel_md5_init(areq);
1786
1787 if (ret)
1788 return ret;
1789
1790 return safexcel_ahash_finup(areq);
1791}
1792
1793struct safexcel_alg_template safexcel_alg_md5 = {
1794 .type = SAFEXCEL_ALG_TYPE_AHASH,
1795 .algo_mask = SAFEXCEL_ALG_MD5,
1796 .alg.ahash = {
1797 .init = safexcel_md5_init,
1798 .update = safexcel_ahash_update,
1799 .final = safexcel_ahash_final,
1800 .finup = safexcel_ahash_finup,
1801 .digest = safexcel_md5_digest,
1802 .export = safexcel_ahash_export,
1803 .import = safexcel_ahash_import,
1804 .halg = {
1805 .digestsize = MD5_DIGEST_SIZE,
1806 .statesize = sizeof(struct safexcel_ahash_export_state),
1807 .base = {
1808 .cra_name = "md5",
1809 .cra_driver_name = "safexcel-md5",
1810 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1811 .cra_flags = CRYPTO_ALG_ASYNC |
1812 CRYPTO_ALG_ALLOCATES_MEMORY |
1813 CRYPTO_ALG_KERN_DRIVER_ONLY,
1814 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1815 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1816 .cra_init = safexcel_ahash_cra_init,
1817 .cra_exit = safexcel_ahash_cra_exit,
1818 .cra_module = THIS_MODULE,
1819 },
1820 },
1821 },
1822};
1823
1824static int safexcel_hmac_md5_init(struct ahash_request *areq)
1825{
1826 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1827 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1828
1829 memset(req, 0, sizeof(*req));
1830
1831 /* Start from ipad precompute */
1832 memcpy(req->state, ctx->ipad, MD5_DIGEST_SIZE);
1833 /* Already processed the key^ipad part now! */
1834 req->len = MD5_HMAC_BLOCK_SIZE;
1835 req->processed = MD5_HMAC_BLOCK_SIZE;
1836
1837 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_MD5;
1838 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
1839 req->state_sz = MD5_DIGEST_SIZE;
1840 req->digest_sz = MD5_DIGEST_SIZE;
1841 req->block_sz = MD5_HMAC_BLOCK_SIZE;
1842 req->len_is_le = true; /* MD5 is little endian! ... */
1843 req->hmac = true;
1844
1845 return 0;
1846}
1847
1848static int safexcel_hmac_md5_setkey(struct crypto_ahash *tfm, const u8 *key,
1849 unsigned int keylen)
1850{
1851 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-md5",
1852 MD5_DIGEST_SIZE);
1853}
1854
1855static int safexcel_hmac_md5_digest(struct ahash_request *areq)
1856{
1857 int ret = safexcel_hmac_md5_init(areq);
1858
1859 if (ret)
1860 return ret;
1861
1862 return safexcel_ahash_finup(areq);
1863}
1864
1865struct safexcel_alg_template safexcel_alg_hmac_md5 = {
1866 .type = SAFEXCEL_ALG_TYPE_AHASH,
1867 .algo_mask = SAFEXCEL_ALG_MD5,
1868 .alg.ahash = {
1869 .init = safexcel_hmac_md5_init,
1870 .update = safexcel_ahash_update,
1871 .final = safexcel_ahash_final,
1872 .finup = safexcel_ahash_finup,
1873 .digest = safexcel_hmac_md5_digest,
1874 .setkey = safexcel_hmac_md5_setkey,
1875 .export = safexcel_ahash_export,
1876 .import = safexcel_ahash_import,
1877 .halg = {
1878 .digestsize = MD5_DIGEST_SIZE,
1879 .statesize = sizeof(struct safexcel_ahash_export_state),
1880 .base = {
1881 .cra_name = "hmac(md5)",
1882 .cra_driver_name = "safexcel-hmac-md5",
1883 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1884 .cra_flags = CRYPTO_ALG_ASYNC |
1885 CRYPTO_ALG_ALLOCATES_MEMORY |
1886 CRYPTO_ALG_KERN_DRIVER_ONLY,
1887 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
1888 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1889 .cra_init = safexcel_ahash_cra_init,
1890 .cra_exit = safexcel_ahash_cra_exit,
1891 .cra_module = THIS_MODULE,
1892 },
1893 },
1894 },
1895};
1896
1897static int safexcel_crc32_cra_init(struct crypto_tfm *tfm)
1898{
1899 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
1900 int ret = safexcel_ahash_cra_init(tfm);
1901
1902 /* Default 'key' is all zeroes */
1903 memset(ctx->ipad, 0, sizeof(u32));
1904 return ret;
1905}
1906
1907static int safexcel_crc32_init(struct ahash_request *areq)
1908{
1909 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1910 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1911
1912 memset(req, 0, sizeof(*req));
1913
1914 /* Start from loaded key */
1915 req->state[0] = (__force __le32)le32_to_cpu(~ctx->ipad[0]);
1916 /* Set processed to non-zero to enable invalidation detection */
1917 req->len = sizeof(u32);
1918 req->processed = sizeof(u32);
1919
1920 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_CRC32;
1921 req->digest = CONTEXT_CONTROL_DIGEST_XCM;
1922 req->state_sz = sizeof(u32);
1923 req->digest_sz = sizeof(u32);
1924 req->block_sz = sizeof(u32);
1925
1926 return 0;
1927}
1928
1929static int safexcel_crc32_setkey(struct crypto_ahash *tfm, const u8 *key,
1930 unsigned int keylen)
1931{
1932 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
1933
1934 if (keylen != sizeof(u32))
1935 return -EINVAL;
1936
1937 memcpy(ctx->ipad, key, sizeof(u32));
1938 return 0;
1939}
1940
1941static int safexcel_crc32_digest(struct ahash_request *areq)
1942{
1943 return safexcel_crc32_init(areq) ?: safexcel_ahash_finup(areq);
1944}
1945
1946struct safexcel_alg_template safexcel_alg_crc32 = {
1947 .type = SAFEXCEL_ALG_TYPE_AHASH,
1948 .algo_mask = 0,
1949 .alg.ahash = {
1950 .init = safexcel_crc32_init,
1951 .update = safexcel_ahash_update,
1952 .final = safexcel_ahash_final,
1953 .finup = safexcel_ahash_finup,
1954 .digest = safexcel_crc32_digest,
1955 .setkey = safexcel_crc32_setkey,
1956 .export = safexcel_ahash_export,
1957 .import = safexcel_ahash_import,
1958 .halg = {
1959 .digestsize = sizeof(u32),
1960 .statesize = sizeof(struct safexcel_ahash_export_state),
1961 .base = {
1962 .cra_name = "crc32",
1963 .cra_driver_name = "safexcel-crc32",
1964 .cra_priority = SAFEXCEL_CRA_PRIORITY,
1965 .cra_flags = CRYPTO_ALG_OPTIONAL_KEY |
1966 CRYPTO_ALG_ASYNC |
1967 CRYPTO_ALG_ALLOCATES_MEMORY |
1968 CRYPTO_ALG_KERN_DRIVER_ONLY,
1969 .cra_blocksize = 1,
1970 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
1971 .cra_init = safexcel_crc32_cra_init,
1972 .cra_exit = safexcel_ahash_cra_exit,
1973 .cra_module = THIS_MODULE,
1974 },
1975 },
1976 },
1977};
1978
1979static int safexcel_cbcmac_init(struct ahash_request *areq)
1980{
1981 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
1982 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
1983
1984 memset(req, 0, sizeof(*req));
1985
1986 /* Start from loaded keys */
1987 memcpy(req->state, ctx->ipad, ctx->key_sz);
1988 /* Set processed to non-zero to enable invalidation detection */
1989 req->len = AES_BLOCK_SIZE;
1990 req->processed = AES_BLOCK_SIZE;
1991
1992 req->digest = CONTEXT_CONTROL_DIGEST_XCM;
1993 req->state_sz = ctx->key_sz;
1994 req->digest_sz = AES_BLOCK_SIZE;
1995 req->block_sz = AES_BLOCK_SIZE;
1996 req->xcbcmac = true;
1997
1998 return 0;
1999}
2000
2001static int safexcel_cbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2002 unsigned int len)
2003{
2004 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2005 struct crypto_aes_ctx aes;
2006 int ret, i;
2007
2008 ret = aes_expandkey(&aes, key, len);
2009 if (ret)
2010 return ret;
2011
2012 memset(ctx->ipad, 0, 2 * AES_BLOCK_SIZE);
2013 for (i = 0; i < len / sizeof(u32); i++)
2014 ctx->ipad[i + 8] = (__force __le32)cpu_to_be32(aes.key_enc[i]);
2015
2016 if (len == AES_KEYSIZE_192) {
2017 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2018 ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2019 } else if (len == AES_KEYSIZE_256) {
2020 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2021 ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2022 } else {
2023 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2024 ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2025 }
2026 ctx->cbcmac = true;
2027
2028 memzero_explicit(&aes, sizeof(aes));
2029 return 0;
2030}
2031
2032static int safexcel_cbcmac_digest(struct ahash_request *areq)
2033{
2034 return safexcel_cbcmac_init(areq) ?: safexcel_ahash_finup(areq);
2035}
2036
2037struct safexcel_alg_template safexcel_alg_cbcmac = {
2038 .type = SAFEXCEL_ALG_TYPE_AHASH,
2039 .algo_mask = 0,
2040 .alg.ahash = {
2041 .init = safexcel_cbcmac_init,
2042 .update = safexcel_ahash_update,
2043 .final = safexcel_ahash_final,
2044 .finup = safexcel_ahash_finup,
2045 .digest = safexcel_cbcmac_digest,
2046 .setkey = safexcel_cbcmac_setkey,
2047 .export = safexcel_ahash_export,
2048 .import = safexcel_ahash_import,
2049 .halg = {
2050 .digestsize = AES_BLOCK_SIZE,
2051 .statesize = sizeof(struct safexcel_ahash_export_state),
2052 .base = {
2053 .cra_name = "cbcmac(aes)",
2054 .cra_driver_name = "safexcel-cbcmac-aes",
2055 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2056 .cra_flags = CRYPTO_ALG_ASYNC |
2057 CRYPTO_ALG_ALLOCATES_MEMORY |
2058 CRYPTO_ALG_KERN_DRIVER_ONLY,
2059 .cra_blocksize = 1,
2060 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2061 .cra_init = safexcel_ahash_cra_init,
2062 .cra_exit = safexcel_ahash_cra_exit,
2063 .cra_module = THIS_MODULE,
2064 },
2065 },
2066 },
2067};
2068
2069static int safexcel_xcbcmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2070 unsigned int len)
2071{
2072 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2073 struct crypto_aes_ctx aes;
2074 u32 key_tmp[3 * AES_BLOCK_SIZE / sizeof(u32)];
2075 int ret, i;
2076
2077 ret = aes_expandkey(&aes, key, len);
2078 if (ret)
2079 return ret;
2080
2081 /* precompute the XCBC key material */
2082 crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2083 crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2084 CRYPTO_TFM_REQ_MASK);
2085 ret = crypto_cipher_setkey(ctx->kaes, key, len);
2086 if (ret)
2087 return ret;
2088
2089 crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
2090 "\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1\x1");
2091 crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp,
2092 "\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2\x2");
2093 crypto_cipher_encrypt_one(ctx->kaes, (u8 *)key_tmp + AES_BLOCK_SIZE,
2094 "\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3\x3");
2095 for (i = 0; i < 3 * AES_BLOCK_SIZE / sizeof(u32); i++)
2096 ctx->ipad[i] =
2097 cpu_to_le32((__force u32)cpu_to_be32(key_tmp[i]));
2098
2099 crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2100 crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2101 CRYPTO_TFM_REQ_MASK);
2102 ret = crypto_cipher_setkey(ctx->kaes,
2103 (u8 *)key_tmp + 2 * AES_BLOCK_SIZE,
2104 AES_MIN_KEY_SIZE);
2105 if (ret)
2106 return ret;
2107
2108 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2109 ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2110 ctx->cbcmac = false;
2111
2112 memzero_explicit(&aes, sizeof(aes));
2113 return 0;
2114}
2115
2116static int safexcel_xcbcmac_cra_init(struct crypto_tfm *tfm)
2117{
2118 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2119
2120 safexcel_ahash_cra_init(tfm);
2121 ctx->kaes = crypto_alloc_cipher("aes", 0, 0);
2122 return PTR_ERR_OR_ZERO(ctx->kaes);
2123}
2124
2125static void safexcel_xcbcmac_cra_exit(struct crypto_tfm *tfm)
2126{
2127 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2128
2129 crypto_free_cipher(ctx->kaes);
2130 safexcel_ahash_cra_exit(tfm);
2131}
2132
2133struct safexcel_alg_template safexcel_alg_xcbcmac = {
2134 .type = SAFEXCEL_ALG_TYPE_AHASH,
2135 .algo_mask = 0,
2136 .alg.ahash = {
2137 .init = safexcel_cbcmac_init,
2138 .update = safexcel_ahash_update,
2139 .final = safexcel_ahash_final,
2140 .finup = safexcel_ahash_finup,
2141 .digest = safexcel_cbcmac_digest,
2142 .setkey = safexcel_xcbcmac_setkey,
2143 .export = safexcel_ahash_export,
2144 .import = safexcel_ahash_import,
2145 .halg = {
2146 .digestsize = AES_BLOCK_SIZE,
2147 .statesize = sizeof(struct safexcel_ahash_export_state),
2148 .base = {
2149 .cra_name = "xcbc(aes)",
2150 .cra_driver_name = "safexcel-xcbc-aes",
2151 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2152 .cra_flags = CRYPTO_ALG_ASYNC |
2153 CRYPTO_ALG_ALLOCATES_MEMORY |
2154 CRYPTO_ALG_KERN_DRIVER_ONLY,
2155 .cra_blocksize = AES_BLOCK_SIZE,
2156 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2157 .cra_init = safexcel_xcbcmac_cra_init,
2158 .cra_exit = safexcel_xcbcmac_cra_exit,
2159 .cra_module = THIS_MODULE,
2160 },
2161 },
2162 },
2163};
2164
2165static int safexcel_cmac_setkey(struct crypto_ahash *tfm, const u8 *key,
2166 unsigned int len)
2167{
2168 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2169 struct crypto_aes_ctx aes;
2170 __be64 consts[4];
2171 u64 _const[2];
2172 u8 msb_mask, gfmask;
2173 int ret, i;
2174
2175 ret = aes_expandkey(&aes, key, len);
2176 if (ret)
2177 return ret;
2178
2179 for (i = 0; i < len / sizeof(u32); i++)
2180 ctx->ipad[i + 8] =
2181 cpu_to_le32((__force u32)cpu_to_be32(aes.key_enc[i]));
2182
2183 /* precompute the CMAC key material */
2184 crypto_cipher_clear_flags(ctx->kaes, CRYPTO_TFM_REQ_MASK);
2185 crypto_cipher_set_flags(ctx->kaes, crypto_ahash_get_flags(tfm) &
2186 CRYPTO_TFM_REQ_MASK);
2187 ret = crypto_cipher_setkey(ctx->kaes, key, len);
2188 if (ret)
2189 return ret;
2190
2191 /* code below borrowed from crypto/cmac.c */
2192 /* encrypt the zero block */
2193 memset(consts, 0, AES_BLOCK_SIZE);
2194 crypto_cipher_encrypt_one(ctx->kaes, (u8 *)consts, (u8 *)consts);
2195
2196 gfmask = 0x87;
2197 _const[0] = be64_to_cpu(consts[1]);
2198 _const[1] = be64_to_cpu(consts[0]);
2199
2200 /* gf(2^128) multiply zero-ciphertext with u and u^2 */
2201 for (i = 0; i < 4; i += 2) {
2202 msb_mask = ((s64)_const[1] >> 63) & gfmask;
2203 _const[1] = (_const[1] << 1) | (_const[0] >> 63);
2204 _const[0] = (_const[0] << 1) ^ msb_mask;
2205
2206 consts[i + 0] = cpu_to_be64(_const[1]);
2207 consts[i + 1] = cpu_to_be64(_const[0]);
2208 }
2209 /* end of code borrowed from crypto/cmac.c */
2210
2211 for (i = 0; i < 2 * AES_BLOCK_SIZE / sizeof(u32); i++)
2212 ctx->ipad[i] = (__force __le32)cpu_to_be32(((u32 *)consts)[i]);
2213
2214 if (len == AES_KEYSIZE_192) {
2215 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC192;
2216 ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2217 } else if (len == AES_KEYSIZE_256) {
2218 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC256;
2219 ctx->key_sz = AES_MAX_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2220 } else {
2221 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_XCBC128;
2222 ctx->key_sz = AES_MIN_KEY_SIZE + 2 * AES_BLOCK_SIZE;
2223 }
2224 ctx->cbcmac = false;
2225
2226 memzero_explicit(&aes, sizeof(aes));
2227 return 0;
2228}
2229
2230struct safexcel_alg_template safexcel_alg_cmac = {
2231 .type = SAFEXCEL_ALG_TYPE_AHASH,
2232 .algo_mask = 0,
2233 .alg.ahash = {
2234 .init = safexcel_cbcmac_init,
2235 .update = safexcel_ahash_update,
2236 .final = safexcel_ahash_final,
2237 .finup = safexcel_ahash_finup,
2238 .digest = safexcel_cbcmac_digest,
2239 .setkey = safexcel_cmac_setkey,
2240 .export = safexcel_ahash_export,
2241 .import = safexcel_ahash_import,
2242 .halg = {
2243 .digestsize = AES_BLOCK_SIZE,
2244 .statesize = sizeof(struct safexcel_ahash_export_state),
2245 .base = {
2246 .cra_name = "cmac(aes)",
2247 .cra_driver_name = "safexcel-cmac-aes",
2248 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2249 .cra_flags = CRYPTO_ALG_ASYNC |
2250 CRYPTO_ALG_ALLOCATES_MEMORY |
2251 CRYPTO_ALG_KERN_DRIVER_ONLY,
2252 .cra_blocksize = AES_BLOCK_SIZE,
2253 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2254 .cra_init = safexcel_xcbcmac_cra_init,
2255 .cra_exit = safexcel_xcbcmac_cra_exit,
2256 .cra_module = THIS_MODULE,
2257 },
2258 },
2259 },
2260};
2261
2262static int safexcel_sm3_init(struct ahash_request *areq)
2263{
2264 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
2265 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2266
2267 memset(req, 0, sizeof(*req));
2268
2269 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
2270 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
2271 req->state_sz = SM3_DIGEST_SIZE;
2272 req->digest_sz = SM3_DIGEST_SIZE;
2273 req->block_sz = SM3_BLOCK_SIZE;
2274
2275 return 0;
2276}
2277
2278static int safexcel_sm3_digest(struct ahash_request *areq)
2279{
2280 int ret = safexcel_sm3_init(areq);
2281
2282 if (ret)
2283 return ret;
2284
2285 return safexcel_ahash_finup(areq);
2286}
2287
2288struct safexcel_alg_template safexcel_alg_sm3 = {
2289 .type = SAFEXCEL_ALG_TYPE_AHASH,
2290 .algo_mask = SAFEXCEL_ALG_SM3,
2291 .alg.ahash = {
2292 .init = safexcel_sm3_init,
2293 .update = safexcel_ahash_update,
2294 .final = safexcel_ahash_final,
2295 .finup = safexcel_ahash_finup,
2296 .digest = safexcel_sm3_digest,
2297 .export = safexcel_ahash_export,
2298 .import = safexcel_ahash_import,
2299 .halg = {
2300 .digestsize = SM3_DIGEST_SIZE,
2301 .statesize = sizeof(struct safexcel_ahash_export_state),
2302 .base = {
2303 .cra_name = "sm3",
2304 .cra_driver_name = "safexcel-sm3",
2305 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2306 .cra_flags = CRYPTO_ALG_ASYNC |
2307 CRYPTO_ALG_ALLOCATES_MEMORY |
2308 CRYPTO_ALG_KERN_DRIVER_ONLY,
2309 .cra_blocksize = SM3_BLOCK_SIZE,
2310 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2311 .cra_init = safexcel_ahash_cra_init,
2312 .cra_exit = safexcel_ahash_cra_exit,
2313 .cra_module = THIS_MODULE,
2314 },
2315 },
2316 },
2317};
2318
2319static int safexcel_hmac_sm3_setkey(struct crypto_ahash *tfm, const u8 *key,
2320 unsigned int keylen)
2321{
2322 return safexcel_hmac_alg_setkey(tfm, key, keylen, "safexcel-sm3",
2323 SM3_DIGEST_SIZE);
2324}
2325
2326static int safexcel_hmac_sm3_init(struct ahash_request *areq)
2327{
2328 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(crypto_ahash_reqtfm(areq));
2329 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2330
2331 memset(req, 0, sizeof(*req));
2332
2333 /* Start from ipad precompute */
2334 memcpy(req->state, ctx->ipad, SM3_DIGEST_SIZE);
2335 /* Already processed the key^ipad part now! */
2336 req->len = SM3_BLOCK_SIZE;
2337 req->processed = SM3_BLOCK_SIZE;
2338
2339 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SM3;
2340 req->digest = CONTEXT_CONTROL_DIGEST_PRECOMPUTED;
2341 req->state_sz = SM3_DIGEST_SIZE;
2342 req->digest_sz = SM3_DIGEST_SIZE;
2343 req->block_sz = SM3_BLOCK_SIZE;
2344 req->hmac = true;
2345
2346 return 0;
2347}
2348
2349static int safexcel_hmac_sm3_digest(struct ahash_request *areq)
2350{
2351 int ret = safexcel_hmac_sm3_init(areq);
2352
2353 if (ret)
2354 return ret;
2355
2356 return safexcel_ahash_finup(areq);
2357}
2358
2359struct safexcel_alg_template safexcel_alg_hmac_sm3 = {
2360 .type = SAFEXCEL_ALG_TYPE_AHASH,
2361 .algo_mask = SAFEXCEL_ALG_SM3,
2362 .alg.ahash = {
2363 .init = safexcel_hmac_sm3_init,
2364 .update = safexcel_ahash_update,
2365 .final = safexcel_ahash_final,
2366 .finup = safexcel_ahash_finup,
2367 .digest = safexcel_hmac_sm3_digest,
2368 .setkey = safexcel_hmac_sm3_setkey,
2369 .export = safexcel_ahash_export,
2370 .import = safexcel_ahash_import,
2371 .halg = {
2372 .digestsize = SM3_DIGEST_SIZE,
2373 .statesize = sizeof(struct safexcel_ahash_export_state),
2374 .base = {
2375 .cra_name = "hmac(sm3)",
2376 .cra_driver_name = "safexcel-hmac-sm3",
2377 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2378 .cra_flags = CRYPTO_ALG_ASYNC |
2379 CRYPTO_ALG_ALLOCATES_MEMORY |
2380 CRYPTO_ALG_KERN_DRIVER_ONLY,
2381 .cra_blocksize = SM3_BLOCK_SIZE,
2382 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2383 .cra_init = safexcel_ahash_cra_init,
2384 .cra_exit = safexcel_ahash_cra_exit,
2385 .cra_module = THIS_MODULE,
2386 },
2387 },
2388 },
2389};
2390
2391static int safexcel_sha3_224_init(struct ahash_request *areq)
2392{
2393 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2394 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2395 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2396
2397 memset(req, 0, sizeof(*req));
2398
2399 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
2400 req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2401 req->state_sz = SHA3_224_DIGEST_SIZE;
2402 req->digest_sz = SHA3_224_DIGEST_SIZE;
2403 req->block_sz = SHA3_224_BLOCK_SIZE;
2404 ctx->do_fallback = false;
2405 ctx->fb_init_done = false;
2406 return 0;
2407}
2408
2409static int safexcel_sha3_fbcheck(struct ahash_request *req)
2410{
2411 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2412 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2413 struct ahash_request *subreq = ahash_request_ctx(req);
2414 int ret = 0;
2415
2416 if (ctx->do_fallback) {
2417 ahash_request_set_tfm(subreq, ctx->fback);
2418 ahash_request_set_callback(subreq, req->base.flags,
2419 req->base.complete, req->base.data);
2420 ahash_request_set_crypt(subreq, req->src, req->result,
2421 req->nbytes);
2422 if (!ctx->fb_init_done) {
2423 if (ctx->fb_do_setkey) {
2424 /* Set fallback cipher HMAC key */
2425 u8 key[SHA3_224_BLOCK_SIZE];
2426
2427 memcpy(key, ctx->ipad,
2428 crypto_ahash_blocksize(ctx->fback) / 2);
2429 memcpy(key +
2430 crypto_ahash_blocksize(ctx->fback) / 2,
2431 ctx->opad,
2432 crypto_ahash_blocksize(ctx->fback) / 2);
2433 ret = crypto_ahash_setkey(ctx->fback, key,
2434 crypto_ahash_blocksize(ctx->fback));
2435 memzero_explicit(key,
2436 crypto_ahash_blocksize(ctx->fback));
2437 ctx->fb_do_setkey = false;
2438 }
2439 ret = ret ?: crypto_ahash_init(subreq);
2440 ctx->fb_init_done = true;
2441 }
2442 }
2443 return ret;
2444}
2445
2446static int safexcel_sha3_update(struct ahash_request *req)
2447{
2448 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2449 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2450 struct ahash_request *subreq = ahash_request_ctx(req);
2451
2452 ctx->do_fallback = true;
2453 return safexcel_sha3_fbcheck(req) ?: crypto_ahash_update(subreq);
2454}
2455
2456static int safexcel_sha3_final(struct ahash_request *req)
2457{
2458 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2459 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2460 struct ahash_request *subreq = ahash_request_ctx(req);
2461
2462 ctx->do_fallback = true;
2463 return safexcel_sha3_fbcheck(req) ?: crypto_ahash_final(subreq);
2464}
2465
2466static int safexcel_sha3_finup(struct ahash_request *req)
2467{
2468 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2469 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2470 struct ahash_request *subreq = ahash_request_ctx(req);
2471
2472 ctx->do_fallback |= !req->nbytes;
2473 if (ctx->do_fallback)
2474 /* Update or ex/import happened or len 0, cannot use the HW */
2475 return safexcel_sha3_fbcheck(req) ?:
2476 crypto_ahash_finup(subreq);
2477 else
2478 return safexcel_ahash_finup(req);
2479}
2480
2481static int safexcel_sha3_digest_fallback(struct ahash_request *req)
2482{
2483 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2484 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2485 struct ahash_request *subreq = ahash_request_ctx(req);
2486
2487 ctx->do_fallback = true;
2488 ctx->fb_init_done = false;
2489 return safexcel_sha3_fbcheck(req) ?: crypto_ahash_finup(subreq);
2490}
2491
2492static int safexcel_sha3_224_digest(struct ahash_request *req)
2493{
2494 if (req->nbytes)
2495 return safexcel_sha3_224_init(req) ?: safexcel_ahash_finup(req);
2496
2497 /* HW cannot do zero length hash, use fallback instead */
2498 return safexcel_sha3_digest_fallback(req);
2499}
2500
2501static int safexcel_sha3_export(struct ahash_request *req, void *out)
2502{
2503 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2504 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2505 struct ahash_request *subreq = ahash_request_ctx(req);
2506
2507 ctx->do_fallback = true;
2508 return safexcel_sha3_fbcheck(req) ?: crypto_ahash_export(subreq, out);
2509}
2510
2511static int safexcel_sha3_import(struct ahash_request *req, const void *in)
2512{
2513 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
2514 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2515 struct ahash_request *subreq = ahash_request_ctx(req);
2516
2517 ctx->do_fallback = true;
2518 return safexcel_sha3_fbcheck(req) ?: crypto_ahash_import(subreq, in);
2519 // return safexcel_ahash_import(req, in);
2520}
2521
2522static int safexcel_sha3_cra_init(struct crypto_tfm *tfm)
2523{
2524 struct crypto_ahash *ahash = __crypto_ahash_cast(tfm);
2525 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2526
2527 safexcel_ahash_cra_init(tfm);
2528
2529 /* Allocate fallback implementation */
2530 ctx->fback = crypto_alloc_ahash(crypto_tfm_alg_name(tfm), 0,
2531 CRYPTO_ALG_ASYNC |
2532 CRYPTO_ALG_NEED_FALLBACK);
2533 if (IS_ERR(ctx->fback))
2534 return PTR_ERR(ctx->fback);
2535
2536 /* Update statesize from fallback algorithm! */
2537 crypto_hash_alg_common(ahash)->statesize =
2538 crypto_ahash_statesize(ctx->fback);
2539 crypto_ahash_set_reqsize(ahash, max(sizeof(struct safexcel_ahash_req),
2540 sizeof(struct ahash_request) +
2541 crypto_ahash_reqsize(ctx->fback)));
2542 return 0;
2543}
2544
2545static void safexcel_sha3_cra_exit(struct crypto_tfm *tfm)
2546{
2547 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2548
2549 crypto_free_ahash(ctx->fback);
2550 safexcel_ahash_cra_exit(tfm);
2551}
2552
2553struct safexcel_alg_template safexcel_alg_sha3_224 = {
2554 .type = SAFEXCEL_ALG_TYPE_AHASH,
2555 .algo_mask = SAFEXCEL_ALG_SHA3,
2556 .alg.ahash = {
2557 .init = safexcel_sha3_224_init,
2558 .update = safexcel_sha3_update,
2559 .final = safexcel_sha3_final,
2560 .finup = safexcel_sha3_finup,
2561 .digest = safexcel_sha3_224_digest,
2562 .export = safexcel_sha3_export,
2563 .import = safexcel_sha3_import,
2564 .halg = {
2565 .digestsize = SHA3_224_DIGEST_SIZE,
2566 .statesize = sizeof(struct safexcel_ahash_export_state),
2567 .base = {
2568 .cra_name = "sha3-224",
2569 .cra_driver_name = "safexcel-sha3-224",
2570 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2571 .cra_flags = CRYPTO_ALG_ASYNC |
2572 CRYPTO_ALG_KERN_DRIVER_ONLY |
2573 CRYPTO_ALG_NEED_FALLBACK,
2574 .cra_blocksize = SHA3_224_BLOCK_SIZE,
2575 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2576 .cra_init = safexcel_sha3_cra_init,
2577 .cra_exit = safexcel_sha3_cra_exit,
2578 .cra_module = THIS_MODULE,
2579 },
2580 },
2581 },
2582};
2583
2584static int safexcel_sha3_256_init(struct ahash_request *areq)
2585{
2586 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2587 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2588 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2589
2590 memset(req, 0, sizeof(*req));
2591
2592 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
2593 req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2594 req->state_sz = SHA3_256_DIGEST_SIZE;
2595 req->digest_sz = SHA3_256_DIGEST_SIZE;
2596 req->block_sz = SHA3_256_BLOCK_SIZE;
2597 ctx->do_fallback = false;
2598 ctx->fb_init_done = false;
2599 return 0;
2600}
2601
2602static int safexcel_sha3_256_digest(struct ahash_request *req)
2603{
2604 if (req->nbytes)
2605 return safexcel_sha3_256_init(req) ?: safexcel_ahash_finup(req);
2606
2607 /* HW cannot do zero length hash, use fallback instead */
2608 return safexcel_sha3_digest_fallback(req);
2609}
2610
2611struct safexcel_alg_template safexcel_alg_sha3_256 = {
2612 .type = SAFEXCEL_ALG_TYPE_AHASH,
2613 .algo_mask = SAFEXCEL_ALG_SHA3,
2614 .alg.ahash = {
2615 .init = safexcel_sha3_256_init,
2616 .update = safexcel_sha3_update,
2617 .final = safexcel_sha3_final,
2618 .finup = safexcel_sha3_finup,
2619 .digest = safexcel_sha3_256_digest,
2620 .export = safexcel_sha3_export,
2621 .import = safexcel_sha3_import,
2622 .halg = {
2623 .digestsize = SHA3_256_DIGEST_SIZE,
2624 .statesize = sizeof(struct safexcel_ahash_export_state),
2625 .base = {
2626 .cra_name = "sha3-256",
2627 .cra_driver_name = "safexcel-sha3-256",
2628 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2629 .cra_flags = CRYPTO_ALG_ASYNC |
2630 CRYPTO_ALG_KERN_DRIVER_ONLY |
2631 CRYPTO_ALG_NEED_FALLBACK,
2632 .cra_blocksize = SHA3_256_BLOCK_SIZE,
2633 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2634 .cra_init = safexcel_sha3_cra_init,
2635 .cra_exit = safexcel_sha3_cra_exit,
2636 .cra_module = THIS_MODULE,
2637 },
2638 },
2639 },
2640};
2641
2642static int safexcel_sha3_384_init(struct ahash_request *areq)
2643{
2644 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2645 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2646 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2647
2648 memset(req, 0, sizeof(*req));
2649
2650 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
2651 req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2652 req->state_sz = SHA3_384_DIGEST_SIZE;
2653 req->digest_sz = SHA3_384_DIGEST_SIZE;
2654 req->block_sz = SHA3_384_BLOCK_SIZE;
2655 ctx->do_fallback = false;
2656 ctx->fb_init_done = false;
2657 return 0;
2658}
2659
2660static int safexcel_sha3_384_digest(struct ahash_request *req)
2661{
2662 if (req->nbytes)
2663 return safexcel_sha3_384_init(req) ?: safexcel_ahash_finup(req);
2664
2665 /* HW cannot do zero length hash, use fallback instead */
2666 return safexcel_sha3_digest_fallback(req);
2667}
2668
2669struct safexcel_alg_template safexcel_alg_sha3_384 = {
2670 .type = SAFEXCEL_ALG_TYPE_AHASH,
2671 .algo_mask = SAFEXCEL_ALG_SHA3,
2672 .alg.ahash = {
2673 .init = safexcel_sha3_384_init,
2674 .update = safexcel_sha3_update,
2675 .final = safexcel_sha3_final,
2676 .finup = safexcel_sha3_finup,
2677 .digest = safexcel_sha3_384_digest,
2678 .export = safexcel_sha3_export,
2679 .import = safexcel_sha3_import,
2680 .halg = {
2681 .digestsize = SHA3_384_DIGEST_SIZE,
2682 .statesize = sizeof(struct safexcel_ahash_export_state),
2683 .base = {
2684 .cra_name = "sha3-384",
2685 .cra_driver_name = "safexcel-sha3-384",
2686 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2687 .cra_flags = CRYPTO_ALG_ASYNC |
2688 CRYPTO_ALG_KERN_DRIVER_ONLY |
2689 CRYPTO_ALG_NEED_FALLBACK,
2690 .cra_blocksize = SHA3_384_BLOCK_SIZE,
2691 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2692 .cra_init = safexcel_sha3_cra_init,
2693 .cra_exit = safexcel_sha3_cra_exit,
2694 .cra_module = THIS_MODULE,
2695 },
2696 },
2697 },
2698};
2699
2700static int safexcel_sha3_512_init(struct ahash_request *areq)
2701{
2702 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2703 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2704 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2705
2706 memset(req, 0, sizeof(*req));
2707
2708 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
2709 req->digest = CONTEXT_CONTROL_DIGEST_INITIAL;
2710 req->state_sz = SHA3_512_DIGEST_SIZE;
2711 req->digest_sz = SHA3_512_DIGEST_SIZE;
2712 req->block_sz = SHA3_512_BLOCK_SIZE;
2713 ctx->do_fallback = false;
2714 ctx->fb_init_done = false;
2715 return 0;
2716}
2717
2718static int safexcel_sha3_512_digest(struct ahash_request *req)
2719{
2720 if (req->nbytes)
2721 return safexcel_sha3_512_init(req) ?: safexcel_ahash_finup(req);
2722
2723 /* HW cannot do zero length hash, use fallback instead */
2724 return safexcel_sha3_digest_fallback(req);
2725}
2726
2727struct safexcel_alg_template safexcel_alg_sha3_512 = {
2728 .type = SAFEXCEL_ALG_TYPE_AHASH,
2729 .algo_mask = SAFEXCEL_ALG_SHA3,
2730 .alg.ahash = {
2731 .init = safexcel_sha3_512_init,
2732 .update = safexcel_sha3_update,
2733 .final = safexcel_sha3_final,
2734 .finup = safexcel_sha3_finup,
2735 .digest = safexcel_sha3_512_digest,
2736 .export = safexcel_sha3_export,
2737 .import = safexcel_sha3_import,
2738 .halg = {
2739 .digestsize = SHA3_512_DIGEST_SIZE,
2740 .statesize = sizeof(struct safexcel_ahash_export_state),
2741 .base = {
2742 .cra_name = "sha3-512",
2743 .cra_driver_name = "safexcel-sha3-512",
2744 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2745 .cra_flags = CRYPTO_ALG_ASYNC |
2746 CRYPTO_ALG_KERN_DRIVER_ONLY |
2747 CRYPTO_ALG_NEED_FALLBACK,
2748 .cra_blocksize = SHA3_512_BLOCK_SIZE,
2749 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2750 .cra_init = safexcel_sha3_cra_init,
2751 .cra_exit = safexcel_sha3_cra_exit,
2752 .cra_module = THIS_MODULE,
2753 },
2754 },
2755 },
2756};
2757
2758static int safexcel_hmac_sha3_cra_init(struct crypto_tfm *tfm, const char *alg)
2759{
2760 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2761 int ret;
2762
2763 ret = safexcel_sha3_cra_init(tfm);
2764 if (ret)
2765 return ret;
2766
2767 /* Allocate precalc basic digest implementation */
2768 ctx->shpre = crypto_alloc_shash(alg, 0, CRYPTO_ALG_NEED_FALLBACK);
2769 if (IS_ERR(ctx->shpre))
2770 return PTR_ERR(ctx->shpre);
2771
2772 ctx->shdesc = kmalloc(sizeof(*ctx->shdesc) +
2773 crypto_shash_descsize(ctx->shpre), GFP_KERNEL);
2774 if (!ctx->shdesc) {
2775 crypto_free_shash(ctx->shpre);
2776 return -ENOMEM;
2777 }
2778 ctx->shdesc->tfm = ctx->shpre;
2779 return 0;
2780}
2781
2782static void safexcel_hmac_sha3_cra_exit(struct crypto_tfm *tfm)
2783{
2784 struct safexcel_ahash_ctx *ctx = crypto_tfm_ctx(tfm);
2785
2786 crypto_free_ahash(ctx->fback);
2787 crypto_free_shash(ctx->shpre);
2788 kfree(ctx->shdesc);
2789 safexcel_ahash_cra_exit(tfm);
2790}
2791
2792static int safexcel_hmac_sha3_setkey(struct crypto_ahash *tfm, const u8 *key,
2793 unsigned int keylen)
2794{
2795 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2796 int ret = 0;
2797
2798 if (keylen > crypto_ahash_blocksize(tfm)) {
2799 /*
2800 * If the key is larger than the blocksize, then hash it
2801 * first using our fallback cipher
2802 */
2803 ret = crypto_shash_digest(ctx->shdesc, key, keylen,
2804 (u8 *)ctx->ipad);
2805 keylen = crypto_shash_digestsize(ctx->shpre);
2806
2807 /*
2808 * If the digest is larger than half the blocksize, we need to
2809 * move the rest to opad due to the way our HMAC infra works.
2810 */
2811 if (keylen > crypto_ahash_blocksize(tfm) / 2)
2812 /* Buffers overlap, need to use memmove iso memcpy! */
2813 memmove(ctx->opad,
2814 (u8 *)ctx->ipad +
2815 crypto_ahash_blocksize(tfm) / 2,
2816 keylen - crypto_ahash_blocksize(tfm) / 2);
2817 } else {
2818 /*
2819 * Copy the key to our ipad & opad buffers
2820 * Note that ipad and opad each contain one half of the key,
2821 * to match the existing HMAC driver infrastructure.
2822 */
2823 if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
2824 memcpy(ctx->ipad, key, keylen);
2825 } else {
2826 memcpy(ctx->ipad, key,
2827 crypto_ahash_blocksize(tfm) / 2);
2828 memcpy(ctx->opad,
2829 key + crypto_ahash_blocksize(tfm) / 2,
2830 keylen - crypto_ahash_blocksize(tfm) / 2);
2831 }
2832 }
2833
2834 /* Pad key with zeroes */
2835 if (keylen <= crypto_ahash_blocksize(tfm) / 2) {
2836 memset((u8 *)ctx->ipad + keylen, 0,
2837 crypto_ahash_blocksize(tfm) / 2 - keylen);
2838 memset(ctx->opad, 0, crypto_ahash_blocksize(tfm) / 2);
2839 } else {
2840 memset((u8 *)ctx->opad + keylen -
2841 crypto_ahash_blocksize(tfm) / 2, 0,
2842 crypto_ahash_blocksize(tfm) - keylen);
2843 }
2844
2845 /* If doing fallback, still need to set the new key! */
2846 ctx->fb_do_setkey = true;
2847 return ret;
2848}
2849
2850static int safexcel_hmac_sha3_224_init(struct ahash_request *areq)
2851{
2852 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2853 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2854 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2855
2856 memset(req, 0, sizeof(*req));
2857
2858 /* Copy (half of) the key */
2859 memcpy(req->state, ctx->ipad, SHA3_224_BLOCK_SIZE / 2);
2860 /* Start of HMAC should have len == processed == blocksize */
2861 req->len = SHA3_224_BLOCK_SIZE;
2862 req->processed = SHA3_224_BLOCK_SIZE;
2863 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_224;
2864 req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2865 req->state_sz = SHA3_224_BLOCK_SIZE / 2;
2866 req->digest_sz = SHA3_224_DIGEST_SIZE;
2867 req->block_sz = SHA3_224_BLOCK_SIZE;
2868 req->hmac = true;
2869 ctx->do_fallback = false;
2870 ctx->fb_init_done = false;
2871 return 0;
2872}
2873
2874static int safexcel_hmac_sha3_224_digest(struct ahash_request *req)
2875{
2876 if (req->nbytes)
2877 return safexcel_hmac_sha3_224_init(req) ?:
2878 safexcel_ahash_finup(req);
2879
2880 /* HW cannot do zero length HMAC, use fallback instead */
2881 return safexcel_sha3_digest_fallback(req);
2882}
2883
2884static int safexcel_hmac_sha3_224_cra_init(struct crypto_tfm *tfm)
2885{
2886 return safexcel_hmac_sha3_cra_init(tfm, "sha3-224");
2887}
2888
2889struct safexcel_alg_template safexcel_alg_hmac_sha3_224 = {
2890 .type = SAFEXCEL_ALG_TYPE_AHASH,
2891 .algo_mask = SAFEXCEL_ALG_SHA3,
2892 .alg.ahash = {
2893 .init = safexcel_hmac_sha3_224_init,
2894 .update = safexcel_sha3_update,
2895 .final = safexcel_sha3_final,
2896 .finup = safexcel_sha3_finup,
2897 .digest = safexcel_hmac_sha3_224_digest,
2898 .setkey = safexcel_hmac_sha3_setkey,
2899 .export = safexcel_sha3_export,
2900 .import = safexcel_sha3_import,
2901 .halg = {
2902 .digestsize = SHA3_224_DIGEST_SIZE,
2903 .statesize = sizeof(struct safexcel_ahash_export_state),
2904 .base = {
2905 .cra_name = "hmac(sha3-224)",
2906 .cra_driver_name = "safexcel-hmac-sha3-224",
2907 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2908 .cra_flags = CRYPTO_ALG_ASYNC |
2909 CRYPTO_ALG_KERN_DRIVER_ONLY |
2910 CRYPTO_ALG_NEED_FALLBACK,
2911 .cra_blocksize = SHA3_224_BLOCK_SIZE,
2912 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2913 .cra_init = safexcel_hmac_sha3_224_cra_init,
2914 .cra_exit = safexcel_hmac_sha3_cra_exit,
2915 .cra_module = THIS_MODULE,
2916 },
2917 },
2918 },
2919};
2920
2921static int safexcel_hmac_sha3_256_init(struct ahash_request *areq)
2922{
2923 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2924 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2925 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2926
2927 memset(req, 0, sizeof(*req));
2928
2929 /* Copy (half of) the key */
2930 memcpy(req->state, ctx->ipad, SHA3_256_BLOCK_SIZE / 2);
2931 /* Start of HMAC should have len == processed == blocksize */
2932 req->len = SHA3_256_BLOCK_SIZE;
2933 req->processed = SHA3_256_BLOCK_SIZE;
2934 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_256;
2935 req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
2936 req->state_sz = SHA3_256_BLOCK_SIZE / 2;
2937 req->digest_sz = SHA3_256_DIGEST_SIZE;
2938 req->block_sz = SHA3_256_BLOCK_SIZE;
2939 req->hmac = true;
2940 ctx->do_fallback = false;
2941 ctx->fb_init_done = false;
2942 return 0;
2943}
2944
2945static int safexcel_hmac_sha3_256_digest(struct ahash_request *req)
2946{
2947 if (req->nbytes)
2948 return safexcel_hmac_sha3_256_init(req) ?:
2949 safexcel_ahash_finup(req);
2950
2951 /* HW cannot do zero length HMAC, use fallback instead */
2952 return safexcel_sha3_digest_fallback(req);
2953}
2954
2955static int safexcel_hmac_sha3_256_cra_init(struct crypto_tfm *tfm)
2956{
2957 return safexcel_hmac_sha3_cra_init(tfm, "sha3-256");
2958}
2959
2960struct safexcel_alg_template safexcel_alg_hmac_sha3_256 = {
2961 .type = SAFEXCEL_ALG_TYPE_AHASH,
2962 .algo_mask = SAFEXCEL_ALG_SHA3,
2963 .alg.ahash = {
2964 .init = safexcel_hmac_sha3_256_init,
2965 .update = safexcel_sha3_update,
2966 .final = safexcel_sha3_final,
2967 .finup = safexcel_sha3_finup,
2968 .digest = safexcel_hmac_sha3_256_digest,
2969 .setkey = safexcel_hmac_sha3_setkey,
2970 .export = safexcel_sha3_export,
2971 .import = safexcel_sha3_import,
2972 .halg = {
2973 .digestsize = SHA3_256_DIGEST_SIZE,
2974 .statesize = sizeof(struct safexcel_ahash_export_state),
2975 .base = {
2976 .cra_name = "hmac(sha3-256)",
2977 .cra_driver_name = "safexcel-hmac-sha3-256",
2978 .cra_priority = SAFEXCEL_CRA_PRIORITY,
2979 .cra_flags = CRYPTO_ALG_ASYNC |
2980 CRYPTO_ALG_KERN_DRIVER_ONLY |
2981 CRYPTO_ALG_NEED_FALLBACK,
2982 .cra_blocksize = SHA3_256_BLOCK_SIZE,
2983 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
2984 .cra_init = safexcel_hmac_sha3_256_cra_init,
2985 .cra_exit = safexcel_hmac_sha3_cra_exit,
2986 .cra_module = THIS_MODULE,
2987 },
2988 },
2989 },
2990};
2991
2992static int safexcel_hmac_sha3_384_init(struct ahash_request *areq)
2993{
2994 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2995 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
2996 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
2997
2998 memset(req, 0, sizeof(*req));
2999
3000 /* Copy (half of) the key */
3001 memcpy(req->state, ctx->ipad, SHA3_384_BLOCK_SIZE / 2);
3002 /* Start of HMAC should have len == processed == blocksize */
3003 req->len = SHA3_384_BLOCK_SIZE;
3004 req->processed = SHA3_384_BLOCK_SIZE;
3005 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_384;
3006 req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
3007 req->state_sz = SHA3_384_BLOCK_SIZE / 2;
3008 req->digest_sz = SHA3_384_DIGEST_SIZE;
3009 req->block_sz = SHA3_384_BLOCK_SIZE;
3010 req->hmac = true;
3011 ctx->do_fallback = false;
3012 ctx->fb_init_done = false;
3013 return 0;
3014}
3015
3016static int safexcel_hmac_sha3_384_digest(struct ahash_request *req)
3017{
3018 if (req->nbytes)
3019 return safexcel_hmac_sha3_384_init(req) ?:
3020 safexcel_ahash_finup(req);
3021
3022 /* HW cannot do zero length HMAC, use fallback instead */
3023 return safexcel_sha3_digest_fallback(req);
3024}
3025
3026static int safexcel_hmac_sha3_384_cra_init(struct crypto_tfm *tfm)
3027{
3028 return safexcel_hmac_sha3_cra_init(tfm, "sha3-384");
3029}
3030
3031struct safexcel_alg_template safexcel_alg_hmac_sha3_384 = {
3032 .type = SAFEXCEL_ALG_TYPE_AHASH,
3033 .algo_mask = SAFEXCEL_ALG_SHA3,
3034 .alg.ahash = {
3035 .init = safexcel_hmac_sha3_384_init,
3036 .update = safexcel_sha3_update,
3037 .final = safexcel_sha3_final,
3038 .finup = safexcel_sha3_finup,
3039 .digest = safexcel_hmac_sha3_384_digest,
3040 .setkey = safexcel_hmac_sha3_setkey,
3041 .export = safexcel_sha3_export,
3042 .import = safexcel_sha3_import,
3043 .halg = {
3044 .digestsize = SHA3_384_DIGEST_SIZE,
3045 .statesize = sizeof(struct safexcel_ahash_export_state),
3046 .base = {
3047 .cra_name = "hmac(sha3-384)",
3048 .cra_driver_name = "safexcel-hmac-sha3-384",
3049 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3050 .cra_flags = CRYPTO_ALG_ASYNC |
3051 CRYPTO_ALG_KERN_DRIVER_ONLY |
3052 CRYPTO_ALG_NEED_FALLBACK,
3053 .cra_blocksize = SHA3_384_BLOCK_SIZE,
3054 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
3055 .cra_init = safexcel_hmac_sha3_384_cra_init,
3056 .cra_exit = safexcel_hmac_sha3_cra_exit,
3057 .cra_module = THIS_MODULE,
3058 },
3059 },
3060 },
3061};
3062
3063static int safexcel_hmac_sha3_512_init(struct ahash_request *areq)
3064{
3065 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
3066 struct safexcel_ahash_ctx *ctx = crypto_ahash_ctx(tfm);
3067 struct safexcel_ahash_req *req = ahash_request_ctx(areq);
3068
3069 memset(req, 0, sizeof(*req));
3070
3071 /* Copy (half of) the key */
3072 memcpy(req->state, ctx->ipad, SHA3_512_BLOCK_SIZE / 2);
3073 /* Start of HMAC should have len == processed == blocksize */
3074 req->len = SHA3_512_BLOCK_SIZE;
3075 req->processed = SHA3_512_BLOCK_SIZE;
3076 ctx->alg = CONTEXT_CONTROL_CRYPTO_ALG_SHA3_512;
3077 req->digest = CONTEXT_CONTROL_DIGEST_HMAC;
3078 req->state_sz = SHA3_512_BLOCK_SIZE / 2;
3079 req->digest_sz = SHA3_512_DIGEST_SIZE;
3080 req->block_sz = SHA3_512_BLOCK_SIZE;
3081 req->hmac = true;
3082 ctx->do_fallback = false;
3083 ctx->fb_init_done = false;
3084 return 0;
3085}
3086
3087static int safexcel_hmac_sha3_512_digest(struct ahash_request *req)
3088{
3089 if (req->nbytes)
3090 return safexcel_hmac_sha3_512_init(req) ?:
3091 safexcel_ahash_finup(req);
3092
3093 /* HW cannot do zero length HMAC, use fallback instead */
3094 return safexcel_sha3_digest_fallback(req);
3095}
3096
3097static int safexcel_hmac_sha3_512_cra_init(struct crypto_tfm *tfm)
3098{
3099 return safexcel_hmac_sha3_cra_init(tfm, "sha3-512");
3100}
3101struct safexcel_alg_template safexcel_alg_hmac_sha3_512 = {
3102 .type = SAFEXCEL_ALG_TYPE_AHASH,
3103 .algo_mask = SAFEXCEL_ALG_SHA3,
3104 .alg.ahash = {
3105 .init = safexcel_hmac_sha3_512_init,
3106 .update = safexcel_sha3_update,
3107 .final = safexcel_sha3_final,
3108 .finup = safexcel_sha3_finup,
3109 .digest = safexcel_hmac_sha3_512_digest,
3110 .setkey = safexcel_hmac_sha3_setkey,
3111 .export = safexcel_sha3_export,
3112 .import = safexcel_sha3_import,
3113 .halg = {
3114 .digestsize = SHA3_512_DIGEST_SIZE,
3115 .statesize = sizeof(struct safexcel_ahash_export_state),
3116 .base = {
3117 .cra_name = "hmac(sha3-512)",
3118 .cra_driver_name = "safexcel-hmac-sha3-512",
3119 .cra_priority = SAFEXCEL_CRA_PRIORITY,
3120 .cra_flags = CRYPTO_ALG_ASYNC |
3121 CRYPTO_ALG_KERN_DRIVER_ONLY |
3122 CRYPTO_ALG_NEED_FALLBACK,
3123 .cra_blocksize = SHA3_512_BLOCK_SIZE,
3124 .cra_ctxsize = sizeof(struct safexcel_ahash_ctx),
3125 .cra_init = safexcel_hmac_sha3_512_cra_init,
3126 .cra_exit = safexcel_hmac_sha3_cra_exit,
3127 .cra_module = THIS_MODULE,
3128 },
3129 },
3130 },
3131};