Loading...
1// SPDX-License-Identifier: GPL-2.0-only
2/*
3 * Copyright (C) STMicroelectronics SA 2017
4 * Author: Fabien Dessenne <fabien.dessenne@st.com>
5 */
6
7#include <linux/clk.h>
8#include <linux/delay.h>
9#include <linux/interrupt.h>
10#include <linux/iopoll.h>
11#include <linux/module.h>
12#include <linux/of_device.h>
13#include <linux/platform_device.h>
14#include <linux/pm_runtime.h>
15#include <linux/reset.h>
16
17#include <crypto/aes.h>
18#include <crypto/internal/des.h>
19#include <crypto/engine.h>
20#include <crypto/scatterwalk.h>
21#include <crypto/internal/aead.h>
22#include <crypto/internal/skcipher.h>
23
24#define DRIVER_NAME "stm32-cryp"
25
26/* Bit [0] encrypt / decrypt */
27#define FLG_ENCRYPT BIT(0)
28/* Bit [8..1] algo & operation mode */
29#define FLG_AES BIT(1)
30#define FLG_DES BIT(2)
31#define FLG_TDES BIT(3)
32#define FLG_ECB BIT(4)
33#define FLG_CBC BIT(5)
34#define FLG_CTR BIT(6)
35#define FLG_GCM BIT(7)
36#define FLG_CCM BIT(8)
37/* Mode mask = bits [15..0] */
38#define FLG_MODE_MASK GENMASK(15, 0)
39/* Bit [31..16] status */
40#define FLG_CCM_PADDED_WA BIT(16)
41
42/* Registers */
43#define CRYP_CR 0x00000000
44#define CRYP_SR 0x00000004
45#define CRYP_DIN 0x00000008
46#define CRYP_DOUT 0x0000000C
47#define CRYP_DMACR 0x00000010
48#define CRYP_IMSCR 0x00000014
49#define CRYP_RISR 0x00000018
50#define CRYP_MISR 0x0000001C
51#define CRYP_K0LR 0x00000020
52#define CRYP_K0RR 0x00000024
53#define CRYP_K1LR 0x00000028
54#define CRYP_K1RR 0x0000002C
55#define CRYP_K2LR 0x00000030
56#define CRYP_K2RR 0x00000034
57#define CRYP_K3LR 0x00000038
58#define CRYP_K3RR 0x0000003C
59#define CRYP_IV0LR 0x00000040
60#define CRYP_IV0RR 0x00000044
61#define CRYP_IV1LR 0x00000048
62#define CRYP_IV1RR 0x0000004C
63#define CRYP_CSGCMCCM0R 0x00000050
64#define CRYP_CSGCM0R 0x00000070
65
66/* Registers values */
67#define CR_DEC_NOT_ENC 0x00000004
68#define CR_TDES_ECB 0x00000000
69#define CR_TDES_CBC 0x00000008
70#define CR_DES_ECB 0x00000010
71#define CR_DES_CBC 0x00000018
72#define CR_AES_ECB 0x00000020
73#define CR_AES_CBC 0x00000028
74#define CR_AES_CTR 0x00000030
75#define CR_AES_KP 0x00000038
76#define CR_AES_GCM 0x00080000
77#define CR_AES_CCM 0x00080008
78#define CR_AES_UNKNOWN 0xFFFFFFFF
79#define CR_ALGO_MASK 0x00080038
80#define CR_DATA32 0x00000000
81#define CR_DATA16 0x00000040
82#define CR_DATA8 0x00000080
83#define CR_DATA1 0x000000C0
84#define CR_KEY128 0x00000000
85#define CR_KEY192 0x00000100
86#define CR_KEY256 0x00000200
87#define CR_FFLUSH 0x00004000
88#define CR_CRYPEN 0x00008000
89#define CR_PH_INIT 0x00000000
90#define CR_PH_HEADER 0x00010000
91#define CR_PH_PAYLOAD 0x00020000
92#define CR_PH_FINAL 0x00030000
93#define CR_PH_MASK 0x00030000
94#define CR_NBPBL_SHIFT 20
95
96#define SR_BUSY 0x00000010
97#define SR_OFNE 0x00000004
98
99#define IMSCR_IN BIT(0)
100#define IMSCR_OUT BIT(1)
101
102#define MISR_IN BIT(0)
103#define MISR_OUT BIT(1)
104
105/* Misc */
106#define AES_BLOCK_32 (AES_BLOCK_SIZE / sizeof(u32))
107#define GCM_CTR_INIT 2
108#define _walked_in (cryp->in_walk.offset - cryp->in_sg->offset)
109#define _walked_out (cryp->out_walk.offset - cryp->out_sg->offset)
110#define CRYP_AUTOSUSPEND_DELAY 50
111
112struct stm32_cryp_caps {
113 bool swap_final;
114 bool padding_wa;
115};
116
117struct stm32_cryp_ctx {
118 struct crypto_engine_ctx enginectx;
119 struct stm32_cryp *cryp;
120 int keylen;
121 u32 key[AES_KEYSIZE_256 / sizeof(u32)];
122 unsigned long flags;
123};
124
125struct stm32_cryp_reqctx {
126 unsigned long mode;
127};
128
129struct stm32_cryp {
130 struct list_head list;
131 struct device *dev;
132 void __iomem *regs;
133 struct clk *clk;
134 unsigned long flags;
135 u32 irq_status;
136 const struct stm32_cryp_caps *caps;
137 struct stm32_cryp_ctx *ctx;
138
139 struct crypto_engine *engine;
140
141 struct skcipher_request *req;
142 struct aead_request *areq;
143
144 size_t authsize;
145 size_t hw_blocksize;
146
147 size_t total_in;
148 size_t total_in_save;
149 size_t total_out;
150 size_t total_out_save;
151
152 struct scatterlist *in_sg;
153 struct scatterlist *out_sg;
154 struct scatterlist *out_sg_save;
155
156 struct scatterlist in_sgl;
157 struct scatterlist out_sgl;
158 bool sgs_copied;
159
160 int in_sg_len;
161 int out_sg_len;
162
163 struct scatter_walk in_walk;
164 struct scatter_walk out_walk;
165
166 u32 last_ctr[4];
167 u32 gcm_ctr;
168};
169
170struct stm32_cryp_list {
171 struct list_head dev_list;
172 spinlock_t lock; /* protect dev_list */
173};
174
175static struct stm32_cryp_list cryp_list = {
176 .dev_list = LIST_HEAD_INIT(cryp_list.dev_list),
177 .lock = __SPIN_LOCK_UNLOCKED(cryp_list.lock),
178};
179
180static inline bool is_aes(struct stm32_cryp *cryp)
181{
182 return cryp->flags & FLG_AES;
183}
184
185static inline bool is_des(struct stm32_cryp *cryp)
186{
187 return cryp->flags & FLG_DES;
188}
189
190static inline bool is_tdes(struct stm32_cryp *cryp)
191{
192 return cryp->flags & FLG_TDES;
193}
194
195static inline bool is_ecb(struct stm32_cryp *cryp)
196{
197 return cryp->flags & FLG_ECB;
198}
199
200static inline bool is_cbc(struct stm32_cryp *cryp)
201{
202 return cryp->flags & FLG_CBC;
203}
204
205static inline bool is_ctr(struct stm32_cryp *cryp)
206{
207 return cryp->flags & FLG_CTR;
208}
209
210static inline bool is_gcm(struct stm32_cryp *cryp)
211{
212 return cryp->flags & FLG_GCM;
213}
214
215static inline bool is_ccm(struct stm32_cryp *cryp)
216{
217 return cryp->flags & FLG_CCM;
218}
219
220static inline bool is_encrypt(struct stm32_cryp *cryp)
221{
222 return cryp->flags & FLG_ENCRYPT;
223}
224
225static inline bool is_decrypt(struct stm32_cryp *cryp)
226{
227 return !is_encrypt(cryp);
228}
229
230static inline u32 stm32_cryp_read(struct stm32_cryp *cryp, u32 ofst)
231{
232 return readl_relaxed(cryp->regs + ofst);
233}
234
235static inline void stm32_cryp_write(struct stm32_cryp *cryp, u32 ofst, u32 val)
236{
237 writel_relaxed(val, cryp->regs + ofst);
238}
239
240static inline int stm32_cryp_wait_busy(struct stm32_cryp *cryp)
241{
242 u32 status;
243
244 return readl_relaxed_poll_timeout(cryp->regs + CRYP_SR, status,
245 !(status & SR_BUSY), 10, 100000);
246}
247
248static inline int stm32_cryp_wait_enable(struct stm32_cryp *cryp)
249{
250 u32 status;
251
252 return readl_relaxed_poll_timeout(cryp->regs + CRYP_CR, status,
253 !(status & CR_CRYPEN), 10, 100000);
254}
255
256static inline int stm32_cryp_wait_output(struct stm32_cryp *cryp)
257{
258 u32 status;
259
260 return readl_relaxed_poll_timeout(cryp->regs + CRYP_SR, status,
261 status & SR_OFNE, 10, 100000);
262}
263
264static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp);
265
266static struct stm32_cryp *stm32_cryp_find_dev(struct stm32_cryp_ctx *ctx)
267{
268 struct stm32_cryp *tmp, *cryp = NULL;
269
270 spin_lock_bh(&cryp_list.lock);
271 if (!ctx->cryp) {
272 list_for_each_entry(tmp, &cryp_list.dev_list, list) {
273 cryp = tmp;
274 break;
275 }
276 ctx->cryp = cryp;
277 } else {
278 cryp = ctx->cryp;
279 }
280
281 spin_unlock_bh(&cryp_list.lock);
282
283 return cryp;
284}
285
286static int stm32_cryp_check_aligned(struct scatterlist *sg, size_t total,
287 size_t align)
288{
289 int len = 0;
290
291 if (!total)
292 return 0;
293
294 if (!IS_ALIGNED(total, align))
295 return -EINVAL;
296
297 while (sg) {
298 if (!IS_ALIGNED(sg->offset, sizeof(u32)))
299 return -EINVAL;
300
301 if (!IS_ALIGNED(sg->length, align))
302 return -EINVAL;
303
304 len += sg->length;
305 sg = sg_next(sg);
306 }
307
308 if (len != total)
309 return -EINVAL;
310
311 return 0;
312}
313
314static int stm32_cryp_check_io_aligned(struct stm32_cryp *cryp)
315{
316 int ret;
317
318 ret = stm32_cryp_check_aligned(cryp->in_sg, cryp->total_in,
319 cryp->hw_blocksize);
320 if (ret)
321 return ret;
322
323 ret = stm32_cryp_check_aligned(cryp->out_sg, cryp->total_out,
324 cryp->hw_blocksize);
325
326 return ret;
327}
328
329static void sg_copy_buf(void *buf, struct scatterlist *sg,
330 unsigned int start, unsigned int nbytes, int out)
331{
332 struct scatter_walk walk;
333
334 if (!nbytes)
335 return;
336
337 scatterwalk_start(&walk, sg);
338 scatterwalk_advance(&walk, start);
339 scatterwalk_copychunks(buf, &walk, nbytes, out);
340 scatterwalk_done(&walk, out, 0);
341}
342
343static int stm32_cryp_copy_sgs(struct stm32_cryp *cryp)
344{
345 void *buf_in, *buf_out;
346 int pages, total_in, total_out;
347
348 if (!stm32_cryp_check_io_aligned(cryp)) {
349 cryp->sgs_copied = 0;
350 return 0;
351 }
352
353 total_in = ALIGN(cryp->total_in, cryp->hw_blocksize);
354 pages = total_in ? get_order(total_in) : 1;
355 buf_in = (void *)__get_free_pages(GFP_ATOMIC, pages);
356
357 total_out = ALIGN(cryp->total_out, cryp->hw_blocksize);
358 pages = total_out ? get_order(total_out) : 1;
359 buf_out = (void *)__get_free_pages(GFP_ATOMIC, pages);
360
361 if (!buf_in || !buf_out) {
362 dev_err(cryp->dev, "Can't allocate pages when unaligned\n");
363 cryp->sgs_copied = 0;
364 return -EFAULT;
365 }
366
367 sg_copy_buf(buf_in, cryp->in_sg, 0, cryp->total_in, 0);
368
369 sg_init_one(&cryp->in_sgl, buf_in, total_in);
370 cryp->in_sg = &cryp->in_sgl;
371 cryp->in_sg_len = 1;
372
373 sg_init_one(&cryp->out_sgl, buf_out, total_out);
374 cryp->out_sg_save = cryp->out_sg;
375 cryp->out_sg = &cryp->out_sgl;
376 cryp->out_sg_len = 1;
377
378 cryp->sgs_copied = 1;
379
380 return 0;
381}
382
383static void stm32_cryp_hw_write_iv(struct stm32_cryp *cryp, u32 *iv)
384{
385 if (!iv)
386 return;
387
388 stm32_cryp_write(cryp, CRYP_IV0LR, cpu_to_be32(*iv++));
389 stm32_cryp_write(cryp, CRYP_IV0RR, cpu_to_be32(*iv++));
390
391 if (is_aes(cryp)) {
392 stm32_cryp_write(cryp, CRYP_IV1LR, cpu_to_be32(*iv++));
393 stm32_cryp_write(cryp, CRYP_IV1RR, cpu_to_be32(*iv++));
394 }
395}
396
397static void stm32_cryp_get_iv(struct stm32_cryp *cryp)
398{
399 struct skcipher_request *req = cryp->req;
400 u32 *tmp = (void *)req->iv;
401
402 if (!tmp)
403 return;
404
405 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0LR));
406 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV0RR));
407
408 if (is_aes(cryp)) {
409 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1LR));
410 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, CRYP_IV1RR));
411 }
412}
413
414static void stm32_cryp_hw_write_key(struct stm32_cryp *c)
415{
416 unsigned int i;
417 int r_id;
418
419 if (is_des(c)) {
420 stm32_cryp_write(c, CRYP_K1LR, cpu_to_be32(c->ctx->key[0]));
421 stm32_cryp_write(c, CRYP_K1RR, cpu_to_be32(c->ctx->key[1]));
422 } else {
423 r_id = CRYP_K3RR;
424 for (i = c->ctx->keylen / sizeof(u32); i > 0; i--, r_id -= 4)
425 stm32_cryp_write(c, r_id,
426 cpu_to_be32(c->ctx->key[i - 1]));
427 }
428}
429
430static u32 stm32_cryp_get_hw_mode(struct stm32_cryp *cryp)
431{
432 if (is_aes(cryp) && is_ecb(cryp))
433 return CR_AES_ECB;
434
435 if (is_aes(cryp) && is_cbc(cryp))
436 return CR_AES_CBC;
437
438 if (is_aes(cryp) && is_ctr(cryp))
439 return CR_AES_CTR;
440
441 if (is_aes(cryp) && is_gcm(cryp))
442 return CR_AES_GCM;
443
444 if (is_aes(cryp) && is_ccm(cryp))
445 return CR_AES_CCM;
446
447 if (is_des(cryp) && is_ecb(cryp))
448 return CR_DES_ECB;
449
450 if (is_des(cryp) && is_cbc(cryp))
451 return CR_DES_CBC;
452
453 if (is_tdes(cryp) && is_ecb(cryp))
454 return CR_TDES_ECB;
455
456 if (is_tdes(cryp) && is_cbc(cryp))
457 return CR_TDES_CBC;
458
459 dev_err(cryp->dev, "Unknown mode\n");
460 return CR_AES_UNKNOWN;
461}
462
463static unsigned int stm32_cryp_get_input_text_len(struct stm32_cryp *cryp)
464{
465 return is_encrypt(cryp) ? cryp->areq->cryptlen :
466 cryp->areq->cryptlen - cryp->authsize;
467}
468
469static int stm32_cryp_gcm_init(struct stm32_cryp *cryp, u32 cfg)
470{
471 int ret;
472 u32 iv[4];
473
474 /* Phase 1 : init */
475 memcpy(iv, cryp->areq->iv, 12);
476 iv[3] = cpu_to_be32(GCM_CTR_INIT);
477 cryp->gcm_ctr = GCM_CTR_INIT;
478 stm32_cryp_hw_write_iv(cryp, iv);
479
480 stm32_cryp_write(cryp, CRYP_CR, cfg | CR_PH_INIT | CR_CRYPEN);
481
482 /* Wait for end of processing */
483 ret = stm32_cryp_wait_enable(cryp);
484 if (ret)
485 dev_err(cryp->dev, "Timeout (gcm init)\n");
486
487 return ret;
488}
489
490static int stm32_cryp_ccm_init(struct stm32_cryp *cryp, u32 cfg)
491{
492 int ret;
493 u8 iv[AES_BLOCK_SIZE], b0[AES_BLOCK_SIZE];
494 u32 *d;
495 unsigned int i, textlen;
496
497 /* Phase 1 : init. Firstly set the CTR value to 1 (not 0) */
498 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
499 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
500 iv[AES_BLOCK_SIZE - 1] = 1;
501 stm32_cryp_hw_write_iv(cryp, (u32 *)iv);
502
503 /* Build B0 */
504 memcpy(b0, iv, AES_BLOCK_SIZE);
505
506 b0[0] |= (8 * ((cryp->authsize - 2) / 2));
507
508 if (cryp->areq->assoclen)
509 b0[0] |= 0x40;
510
511 textlen = stm32_cryp_get_input_text_len(cryp);
512
513 b0[AES_BLOCK_SIZE - 2] = textlen >> 8;
514 b0[AES_BLOCK_SIZE - 1] = textlen & 0xFF;
515
516 /* Enable HW */
517 stm32_cryp_write(cryp, CRYP_CR, cfg | CR_PH_INIT | CR_CRYPEN);
518
519 /* Write B0 */
520 d = (u32 *)b0;
521
522 for (i = 0; i < AES_BLOCK_32; i++) {
523 if (!cryp->caps->padding_wa)
524 *d = cpu_to_be32(*d);
525 stm32_cryp_write(cryp, CRYP_DIN, *d++);
526 }
527
528 /* Wait for end of processing */
529 ret = stm32_cryp_wait_enable(cryp);
530 if (ret)
531 dev_err(cryp->dev, "Timeout (ccm init)\n");
532
533 return ret;
534}
535
536static int stm32_cryp_hw_init(struct stm32_cryp *cryp)
537{
538 int ret;
539 u32 cfg, hw_mode;
540
541 pm_runtime_get_sync(cryp->dev);
542
543 /* Disable interrupt */
544 stm32_cryp_write(cryp, CRYP_IMSCR, 0);
545
546 /* Set key */
547 stm32_cryp_hw_write_key(cryp);
548
549 /* Set configuration */
550 cfg = CR_DATA8 | CR_FFLUSH;
551
552 switch (cryp->ctx->keylen) {
553 case AES_KEYSIZE_128:
554 cfg |= CR_KEY128;
555 break;
556
557 case AES_KEYSIZE_192:
558 cfg |= CR_KEY192;
559 break;
560
561 default:
562 case AES_KEYSIZE_256:
563 cfg |= CR_KEY256;
564 break;
565 }
566
567 hw_mode = stm32_cryp_get_hw_mode(cryp);
568 if (hw_mode == CR_AES_UNKNOWN)
569 return -EINVAL;
570
571 /* AES ECB/CBC decrypt: run key preparation first */
572 if (is_decrypt(cryp) &&
573 ((hw_mode == CR_AES_ECB) || (hw_mode == CR_AES_CBC))) {
574 stm32_cryp_write(cryp, CRYP_CR, cfg | CR_AES_KP | CR_CRYPEN);
575
576 /* Wait for end of processing */
577 ret = stm32_cryp_wait_busy(cryp);
578 if (ret) {
579 dev_err(cryp->dev, "Timeout (key preparation)\n");
580 return ret;
581 }
582 }
583
584 cfg |= hw_mode;
585
586 if (is_decrypt(cryp))
587 cfg |= CR_DEC_NOT_ENC;
588
589 /* Apply config and flush (valid when CRYPEN = 0) */
590 stm32_cryp_write(cryp, CRYP_CR, cfg);
591
592 switch (hw_mode) {
593 case CR_AES_GCM:
594 case CR_AES_CCM:
595 /* Phase 1 : init */
596 if (hw_mode == CR_AES_CCM)
597 ret = stm32_cryp_ccm_init(cryp, cfg);
598 else
599 ret = stm32_cryp_gcm_init(cryp, cfg);
600
601 if (ret)
602 return ret;
603
604 /* Phase 2 : header (authenticated data) */
605 if (cryp->areq->assoclen) {
606 cfg |= CR_PH_HEADER;
607 } else if (stm32_cryp_get_input_text_len(cryp)) {
608 cfg |= CR_PH_PAYLOAD;
609 stm32_cryp_write(cryp, CRYP_CR, cfg);
610 } else {
611 cfg |= CR_PH_INIT;
612 }
613
614 break;
615
616 case CR_DES_CBC:
617 case CR_TDES_CBC:
618 case CR_AES_CBC:
619 case CR_AES_CTR:
620 stm32_cryp_hw_write_iv(cryp, (u32 *)cryp->req->iv);
621 break;
622
623 default:
624 break;
625 }
626
627 /* Enable now */
628 cfg |= CR_CRYPEN;
629
630 stm32_cryp_write(cryp, CRYP_CR, cfg);
631
632 cryp->flags &= ~FLG_CCM_PADDED_WA;
633
634 return 0;
635}
636
637static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err)
638{
639 if (!err && (is_gcm(cryp) || is_ccm(cryp)))
640 /* Phase 4 : output tag */
641 err = stm32_cryp_read_auth_tag(cryp);
642
643 if (!err && (!(is_gcm(cryp) || is_ccm(cryp))))
644 stm32_cryp_get_iv(cryp);
645
646 if (cryp->sgs_copied) {
647 void *buf_in, *buf_out;
648 int pages, len;
649
650 buf_in = sg_virt(&cryp->in_sgl);
651 buf_out = sg_virt(&cryp->out_sgl);
652
653 sg_copy_buf(buf_out, cryp->out_sg_save, 0,
654 cryp->total_out_save, 1);
655
656 len = ALIGN(cryp->total_in_save, cryp->hw_blocksize);
657 pages = len ? get_order(len) : 1;
658 free_pages((unsigned long)buf_in, pages);
659
660 len = ALIGN(cryp->total_out_save, cryp->hw_blocksize);
661 pages = len ? get_order(len) : 1;
662 free_pages((unsigned long)buf_out, pages);
663 }
664
665 pm_runtime_mark_last_busy(cryp->dev);
666 pm_runtime_put_autosuspend(cryp->dev);
667
668 if (is_gcm(cryp) || is_ccm(cryp))
669 crypto_finalize_aead_request(cryp->engine, cryp->areq, err);
670 else
671 crypto_finalize_skcipher_request(cryp->engine, cryp->req,
672 err);
673
674 memset(cryp->ctx->key, 0, cryp->ctx->keylen);
675}
676
677static int stm32_cryp_cpu_start(struct stm32_cryp *cryp)
678{
679 /* Enable interrupt and let the IRQ handler do everything */
680 stm32_cryp_write(cryp, CRYP_IMSCR, IMSCR_IN | IMSCR_OUT);
681
682 return 0;
683}
684
685static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq);
686static int stm32_cryp_prepare_cipher_req(struct crypto_engine *engine,
687 void *areq);
688
689static int stm32_cryp_init_tfm(struct crypto_skcipher *tfm)
690{
691 struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
692
693 crypto_skcipher_set_reqsize(tfm, sizeof(struct stm32_cryp_reqctx));
694
695 ctx->enginectx.op.do_one_request = stm32_cryp_cipher_one_req;
696 ctx->enginectx.op.prepare_request = stm32_cryp_prepare_cipher_req;
697 ctx->enginectx.op.unprepare_request = NULL;
698 return 0;
699}
700
701static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq);
702static int stm32_cryp_prepare_aead_req(struct crypto_engine *engine,
703 void *areq);
704
705static int stm32_cryp_aes_aead_init(struct crypto_aead *tfm)
706{
707 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm);
708
709 tfm->reqsize = sizeof(struct stm32_cryp_reqctx);
710
711 ctx->enginectx.op.do_one_request = stm32_cryp_aead_one_req;
712 ctx->enginectx.op.prepare_request = stm32_cryp_prepare_aead_req;
713 ctx->enginectx.op.unprepare_request = NULL;
714
715 return 0;
716}
717
718static int stm32_cryp_crypt(struct skcipher_request *req, unsigned long mode)
719{
720 struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(
721 crypto_skcipher_reqtfm(req));
722 struct stm32_cryp_reqctx *rctx = skcipher_request_ctx(req);
723 struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
724
725 if (!cryp)
726 return -ENODEV;
727
728 rctx->mode = mode;
729
730 return crypto_transfer_skcipher_request_to_engine(cryp->engine, req);
731}
732
733static int stm32_cryp_aead_crypt(struct aead_request *req, unsigned long mode)
734{
735 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
736 struct stm32_cryp_reqctx *rctx = aead_request_ctx(req);
737 struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
738
739 if (!cryp)
740 return -ENODEV;
741
742 rctx->mode = mode;
743
744 return crypto_transfer_aead_request_to_engine(cryp->engine, req);
745}
746
747static int stm32_cryp_setkey(struct crypto_skcipher *tfm, const u8 *key,
748 unsigned int keylen)
749{
750 struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
751
752 memcpy(ctx->key, key, keylen);
753 ctx->keylen = keylen;
754
755 return 0;
756}
757
758static int stm32_cryp_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
759 unsigned int keylen)
760{
761 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
762 keylen != AES_KEYSIZE_256)
763 return -EINVAL;
764 else
765 return stm32_cryp_setkey(tfm, key, keylen);
766}
767
768static int stm32_cryp_des_setkey(struct crypto_skcipher *tfm, const u8 *key,
769 unsigned int keylen)
770{
771 return verify_skcipher_des_key(tfm, key) ?:
772 stm32_cryp_setkey(tfm, key, keylen);
773}
774
775static int stm32_cryp_tdes_setkey(struct crypto_skcipher *tfm, const u8 *key,
776 unsigned int keylen)
777{
778 return verify_skcipher_des3_key(tfm, key) ?:
779 stm32_cryp_setkey(tfm, key, keylen);
780}
781
782static int stm32_cryp_aes_aead_setkey(struct crypto_aead *tfm, const u8 *key,
783 unsigned int keylen)
784{
785 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm);
786
787 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
788 keylen != AES_KEYSIZE_256)
789 return -EINVAL;
790
791 memcpy(ctx->key, key, keylen);
792 ctx->keylen = keylen;
793
794 return 0;
795}
796
797static int stm32_cryp_aes_gcm_setauthsize(struct crypto_aead *tfm,
798 unsigned int authsize)
799{
800 return authsize == AES_BLOCK_SIZE ? 0 : -EINVAL;
801}
802
803static int stm32_cryp_aes_ccm_setauthsize(struct crypto_aead *tfm,
804 unsigned int authsize)
805{
806 switch (authsize) {
807 case 4:
808 case 6:
809 case 8:
810 case 10:
811 case 12:
812 case 14:
813 case 16:
814 break;
815 default:
816 return -EINVAL;
817 }
818
819 return 0;
820}
821
822static int stm32_cryp_aes_ecb_encrypt(struct skcipher_request *req)
823{
824 return stm32_cryp_crypt(req, FLG_AES | FLG_ECB | FLG_ENCRYPT);
825}
826
827static int stm32_cryp_aes_ecb_decrypt(struct skcipher_request *req)
828{
829 return stm32_cryp_crypt(req, FLG_AES | FLG_ECB);
830}
831
832static int stm32_cryp_aes_cbc_encrypt(struct skcipher_request *req)
833{
834 return stm32_cryp_crypt(req, FLG_AES | FLG_CBC | FLG_ENCRYPT);
835}
836
837static int stm32_cryp_aes_cbc_decrypt(struct skcipher_request *req)
838{
839 return stm32_cryp_crypt(req, FLG_AES | FLG_CBC);
840}
841
842static int stm32_cryp_aes_ctr_encrypt(struct skcipher_request *req)
843{
844 return stm32_cryp_crypt(req, FLG_AES | FLG_CTR | FLG_ENCRYPT);
845}
846
847static int stm32_cryp_aes_ctr_decrypt(struct skcipher_request *req)
848{
849 return stm32_cryp_crypt(req, FLG_AES | FLG_CTR);
850}
851
852static int stm32_cryp_aes_gcm_encrypt(struct aead_request *req)
853{
854 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM | FLG_ENCRYPT);
855}
856
857static int stm32_cryp_aes_gcm_decrypt(struct aead_request *req)
858{
859 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM);
860}
861
862static int stm32_cryp_aes_ccm_encrypt(struct aead_request *req)
863{
864 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM | FLG_ENCRYPT);
865}
866
867static int stm32_cryp_aes_ccm_decrypt(struct aead_request *req)
868{
869 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM);
870}
871
872static int stm32_cryp_des_ecb_encrypt(struct skcipher_request *req)
873{
874 return stm32_cryp_crypt(req, FLG_DES | FLG_ECB | FLG_ENCRYPT);
875}
876
877static int stm32_cryp_des_ecb_decrypt(struct skcipher_request *req)
878{
879 return stm32_cryp_crypt(req, FLG_DES | FLG_ECB);
880}
881
882static int stm32_cryp_des_cbc_encrypt(struct skcipher_request *req)
883{
884 return stm32_cryp_crypt(req, FLG_DES | FLG_CBC | FLG_ENCRYPT);
885}
886
887static int stm32_cryp_des_cbc_decrypt(struct skcipher_request *req)
888{
889 return stm32_cryp_crypt(req, FLG_DES | FLG_CBC);
890}
891
892static int stm32_cryp_tdes_ecb_encrypt(struct skcipher_request *req)
893{
894 return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB | FLG_ENCRYPT);
895}
896
897static int stm32_cryp_tdes_ecb_decrypt(struct skcipher_request *req)
898{
899 return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB);
900}
901
902static int stm32_cryp_tdes_cbc_encrypt(struct skcipher_request *req)
903{
904 return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC | FLG_ENCRYPT);
905}
906
907static int stm32_cryp_tdes_cbc_decrypt(struct skcipher_request *req)
908{
909 return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC);
910}
911
912static int stm32_cryp_prepare_req(struct skcipher_request *req,
913 struct aead_request *areq)
914{
915 struct stm32_cryp_ctx *ctx;
916 struct stm32_cryp *cryp;
917 struct stm32_cryp_reqctx *rctx;
918 int ret;
919
920 if (!req && !areq)
921 return -EINVAL;
922
923 ctx = req ? crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)) :
924 crypto_aead_ctx(crypto_aead_reqtfm(areq));
925
926 cryp = ctx->cryp;
927
928 if (!cryp)
929 return -ENODEV;
930
931 rctx = req ? skcipher_request_ctx(req) : aead_request_ctx(areq);
932 rctx->mode &= FLG_MODE_MASK;
933
934 ctx->cryp = cryp;
935
936 cryp->flags = (cryp->flags & ~FLG_MODE_MASK) | rctx->mode;
937 cryp->hw_blocksize = is_aes(cryp) ? AES_BLOCK_SIZE : DES_BLOCK_SIZE;
938 cryp->ctx = ctx;
939
940 if (req) {
941 cryp->req = req;
942 cryp->areq = NULL;
943 cryp->total_in = req->cryptlen;
944 cryp->total_out = cryp->total_in;
945 } else {
946 /*
947 * Length of input and output data:
948 * Encryption case:
949 * INPUT = AssocData || PlainText
950 * <- assoclen -> <- cryptlen ->
951 * <------- total_in ----------->
952 *
953 * OUTPUT = AssocData || CipherText || AuthTag
954 * <- assoclen -> <- cryptlen -> <- authsize ->
955 * <---------------- total_out ----------------->
956 *
957 * Decryption case:
958 * INPUT = AssocData || CipherText || AuthTag
959 * <- assoclen -> <--------- cryptlen --------->
960 * <- authsize ->
961 * <---------------- total_in ------------------>
962 *
963 * OUTPUT = AssocData || PlainText
964 * <- assoclen -> <- crypten - authsize ->
965 * <---------- total_out ----------------->
966 */
967 cryp->areq = areq;
968 cryp->req = NULL;
969 cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq));
970 cryp->total_in = areq->assoclen + areq->cryptlen;
971 if (is_encrypt(cryp))
972 /* Append auth tag to output */
973 cryp->total_out = cryp->total_in + cryp->authsize;
974 else
975 /* No auth tag in output */
976 cryp->total_out = cryp->total_in - cryp->authsize;
977 }
978
979 cryp->total_in_save = cryp->total_in;
980 cryp->total_out_save = cryp->total_out;
981
982 cryp->in_sg = req ? req->src : areq->src;
983 cryp->out_sg = req ? req->dst : areq->dst;
984 cryp->out_sg_save = cryp->out_sg;
985
986 cryp->in_sg_len = sg_nents_for_len(cryp->in_sg, cryp->total_in);
987 if (cryp->in_sg_len < 0) {
988 dev_err(cryp->dev, "Cannot get in_sg_len\n");
989 ret = cryp->in_sg_len;
990 return ret;
991 }
992
993 cryp->out_sg_len = sg_nents_for_len(cryp->out_sg, cryp->total_out);
994 if (cryp->out_sg_len < 0) {
995 dev_err(cryp->dev, "Cannot get out_sg_len\n");
996 ret = cryp->out_sg_len;
997 return ret;
998 }
999
1000 ret = stm32_cryp_copy_sgs(cryp);
1001 if (ret)
1002 return ret;
1003
1004 scatterwalk_start(&cryp->in_walk, cryp->in_sg);
1005 scatterwalk_start(&cryp->out_walk, cryp->out_sg);
1006
1007 if (is_gcm(cryp) || is_ccm(cryp)) {
1008 /* In output, jump after assoc data */
1009 scatterwalk_advance(&cryp->out_walk, cryp->areq->assoclen);
1010 cryp->total_out -= cryp->areq->assoclen;
1011 }
1012
1013 ret = stm32_cryp_hw_init(cryp);
1014 return ret;
1015}
1016
1017static int stm32_cryp_prepare_cipher_req(struct crypto_engine *engine,
1018 void *areq)
1019{
1020 struct skcipher_request *req = container_of(areq,
1021 struct skcipher_request,
1022 base);
1023
1024 return stm32_cryp_prepare_req(req, NULL);
1025}
1026
1027static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq)
1028{
1029 struct skcipher_request *req = container_of(areq,
1030 struct skcipher_request,
1031 base);
1032 struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(
1033 crypto_skcipher_reqtfm(req));
1034 struct stm32_cryp *cryp = ctx->cryp;
1035
1036 if (!cryp)
1037 return -ENODEV;
1038
1039 return stm32_cryp_cpu_start(cryp);
1040}
1041
1042static int stm32_cryp_prepare_aead_req(struct crypto_engine *engine, void *areq)
1043{
1044 struct aead_request *req = container_of(areq, struct aead_request,
1045 base);
1046
1047 return stm32_cryp_prepare_req(NULL, req);
1048}
1049
1050static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq)
1051{
1052 struct aead_request *req = container_of(areq, struct aead_request,
1053 base);
1054 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
1055 struct stm32_cryp *cryp = ctx->cryp;
1056
1057 if (!cryp)
1058 return -ENODEV;
1059
1060 if (unlikely(!cryp->areq->assoclen &&
1061 !stm32_cryp_get_input_text_len(cryp))) {
1062 /* No input data to process: get tag and finish */
1063 stm32_cryp_finish_req(cryp, 0);
1064 return 0;
1065 }
1066
1067 return stm32_cryp_cpu_start(cryp);
1068}
1069
1070static u32 *stm32_cryp_next_out(struct stm32_cryp *cryp, u32 *dst,
1071 unsigned int n)
1072{
1073 scatterwalk_advance(&cryp->out_walk, n);
1074
1075 if (unlikely(cryp->out_sg->length == _walked_out)) {
1076 cryp->out_sg = sg_next(cryp->out_sg);
1077 if (cryp->out_sg) {
1078 scatterwalk_start(&cryp->out_walk, cryp->out_sg);
1079 return (sg_virt(cryp->out_sg) + _walked_out);
1080 }
1081 }
1082
1083 return (u32 *)((u8 *)dst + n);
1084}
1085
1086static u32 *stm32_cryp_next_in(struct stm32_cryp *cryp, u32 *src,
1087 unsigned int n)
1088{
1089 scatterwalk_advance(&cryp->in_walk, n);
1090
1091 if (unlikely(cryp->in_sg->length == _walked_in)) {
1092 cryp->in_sg = sg_next(cryp->in_sg);
1093 if (cryp->in_sg) {
1094 scatterwalk_start(&cryp->in_walk, cryp->in_sg);
1095 return (sg_virt(cryp->in_sg) + _walked_in);
1096 }
1097 }
1098
1099 return (u32 *)((u8 *)src + n);
1100}
1101
1102static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp)
1103{
1104 u32 cfg, size_bit, *dst, d32;
1105 u8 *d8;
1106 unsigned int i, j;
1107 int ret = 0;
1108
1109 /* Update Config */
1110 cfg = stm32_cryp_read(cryp, CRYP_CR);
1111
1112 cfg &= ~CR_PH_MASK;
1113 cfg |= CR_PH_FINAL;
1114 cfg &= ~CR_DEC_NOT_ENC;
1115 cfg |= CR_CRYPEN;
1116
1117 stm32_cryp_write(cryp, CRYP_CR, cfg);
1118
1119 if (is_gcm(cryp)) {
1120 /* GCM: write aad and payload size (in bits) */
1121 size_bit = cryp->areq->assoclen * 8;
1122 if (cryp->caps->swap_final)
1123 size_bit = cpu_to_be32(size_bit);
1124
1125 stm32_cryp_write(cryp, CRYP_DIN, 0);
1126 stm32_cryp_write(cryp, CRYP_DIN, size_bit);
1127
1128 size_bit = is_encrypt(cryp) ? cryp->areq->cryptlen :
1129 cryp->areq->cryptlen - AES_BLOCK_SIZE;
1130 size_bit *= 8;
1131 if (cryp->caps->swap_final)
1132 size_bit = cpu_to_be32(size_bit);
1133
1134 stm32_cryp_write(cryp, CRYP_DIN, 0);
1135 stm32_cryp_write(cryp, CRYP_DIN, size_bit);
1136 } else {
1137 /* CCM: write CTR0 */
1138 u8 iv[AES_BLOCK_SIZE];
1139 u32 *iv32 = (u32 *)iv;
1140
1141 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
1142 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
1143
1144 for (i = 0; i < AES_BLOCK_32; i++) {
1145 if (!cryp->caps->padding_wa)
1146 *iv32 = cpu_to_be32(*iv32);
1147 stm32_cryp_write(cryp, CRYP_DIN, *iv32++);
1148 }
1149 }
1150
1151 /* Wait for output data */
1152 ret = stm32_cryp_wait_output(cryp);
1153 if (ret) {
1154 dev_err(cryp->dev, "Timeout (read tag)\n");
1155 return ret;
1156 }
1157
1158 if (is_encrypt(cryp)) {
1159 /* Get and write tag */
1160 dst = sg_virt(cryp->out_sg) + _walked_out;
1161
1162 for (i = 0; i < AES_BLOCK_32; i++) {
1163 if (cryp->total_out >= sizeof(u32)) {
1164 /* Read a full u32 */
1165 *dst = stm32_cryp_read(cryp, CRYP_DOUT);
1166
1167 dst = stm32_cryp_next_out(cryp, dst,
1168 sizeof(u32));
1169 cryp->total_out -= sizeof(u32);
1170 } else if (!cryp->total_out) {
1171 /* Empty fifo out (data from input padding) */
1172 stm32_cryp_read(cryp, CRYP_DOUT);
1173 } else {
1174 /* Read less than an u32 */
1175 d32 = stm32_cryp_read(cryp, CRYP_DOUT);
1176 d8 = (u8 *)&d32;
1177
1178 for (j = 0; j < cryp->total_out; j++) {
1179 *((u8 *)dst) = *(d8++);
1180 dst = stm32_cryp_next_out(cryp, dst, 1);
1181 }
1182 cryp->total_out = 0;
1183 }
1184 }
1185 } else {
1186 /* Get and check tag */
1187 u32 in_tag[AES_BLOCK_32], out_tag[AES_BLOCK_32];
1188
1189 scatterwalk_map_and_copy(in_tag, cryp->in_sg,
1190 cryp->total_in_save - cryp->authsize,
1191 cryp->authsize, 0);
1192
1193 for (i = 0; i < AES_BLOCK_32; i++)
1194 out_tag[i] = stm32_cryp_read(cryp, CRYP_DOUT);
1195
1196 if (crypto_memneq(in_tag, out_tag, cryp->authsize))
1197 ret = -EBADMSG;
1198 }
1199
1200 /* Disable cryp */
1201 cfg &= ~CR_CRYPEN;
1202 stm32_cryp_write(cryp, CRYP_CR, cfg);
1203
1204 return ret;
1205}
1206
1207static void stm32_cryp_check_ctr_counter(struct stm32_cryp *cryp)
1208{
1209 u32 cr;
1210
1211 if (unlikely(cryp->last_ctr[3] == 0xFFFFFFFF)) {
1212 cryp->last_ctr[3] = 0;
1213 cryp->last_ctr[2]++;
1214 if (!cryp->last_ctr[2]) {
1215 cryp->last_ctr[1]++;
1216 if (!cryp->last_ctr[1])
1217 cryp->last_ctr[0]++;
1218 }
1219
1220 cr = stm32_cryp_read(cryp, CRYP_CR);
1221 stm32_cryp_write(cryp, CRYP_CR, cr & ~CR_CRYPEN);
1222
1223 stm32_cryp_hw_write_iv(cryp, (u32 *)cryp->last_ctr);
1224
1225 stm32_cryp_write(cryp, CRYP_CR, cr);
1226 }
1227
1228 cryp->last_ctr[0] = stm32_cryp_read(cryp, CRYP_IV0LR);
1229 cryp->last_ctr[1] = stm32_cryp_read(cryp, CRYP_IV0RR);
1230 cryp->last_ctr[2] = stm32_cryp_read(cryp, CRYP_IV1LR);
1231 cryp->last_ctr[3] = stm32_cryp_read(cryp, CRYP_IV1RR);
1232}
1233
1234static bool stm32_cryp_irq_read_data(struct stm32_cryp *cryp)
1235{
1236 unsigned int i, j;
1237 u32 d32, *dst;
1238 u8 *d8;
1239 size_t tag_size;
1240
1241 /* Do no read tag now (if any) */
1242 if (is_encrypt(cryp) && (is_gcm(cryp) || is_ccm(cryp)))
1243 tag_size = cryp->authsize;
1244 else
1245 tag_size = 0;
1246
1247 dst = sg_virt(cryp->out_sg) + _walked_out;
1248
1249 for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++) {
1250 if (likely(cryp->total_out - tag_size >= sizeof(u32))) {
1251 /* Read a full u32 */
1252 *dst = stm32_cryp_read(cryp, CRYP_DOUT);
1253
1254 dst = stm32_cryp_next_out(cryp, dst, sizeof(u32));
1255 cryp->total_out -= sizeof(u32);
1256 } else if (cryp->total_out == tag_size) {
1257 /* Empty fifo out (data from input padding) */
1258 d32 = stm32_cryp_read(cryp, CRYP_DOUT);
1259 } else {
1260 /* Read less than an u32 */
1261 d32 = stm32_cryp_read(cryp, CRYP_DOUT);
1262 d8 = (u8 *)&d32;
1263
1264 for (j = 0; j < cryp->total_out - tag_size; j++) {
1265 *((u8 *)dst) = *(d8++);
1266 dst = stm32_cryp_next_out(cryp, dst, 1);
1267 }
1268 cryp->total_out = tag_size;
1269 }
1270 }
1271
1272 return !(cryp->total_out - tag_size) || !cryp->total_in;
1273}
1274
1275static void stm32_cryp_irq_write_block(struct stm32_cryp *cryp)
1276{
1277 unsigned int i, j;
1278 u32 *src;
1279 u8 d8[4];
1280 size_t tag_size;
1281
1282 /* Do no write tag (if any) */
1283 if (is_decrypt(cryp) && (is_gcm(cryp) || is_ccm(cryp)))
1284 tag_size = cryp->authsize;
1285 else
1286 tag_size = 0;
1287
1288 src = sg_virt(cryp->in_sg) + _walked_in;
1289
1290 for (i = 0; i < cryp->hw_blocksize / sizeof(u32); i++) {
1291 if (likely(cryp->total_in - tag_size >= sizeof(u32))) {
1292 /* Write a full u32 */
1293 stm32_cryp_write(cryp, CRYP_DIN, *src);
1294
1295 src = stm32_cryp_next_in(cryp, src, sizeof(u32));
1296 cryp->total_in -= sizeof(u32);
1297 } else if (cryp->total_in == tag_size) {
1298 /* Write padding data */
1299 stm32_cryp_write(cryp, CRYP_DIN, 0);
1300 } else {
1301 /* Write less than an u32 */
1302 memset(d8, 0, sizeof(u32));
1303 for (j = 0; j < cryp->total_in - tag_size; j++) {
1304 d8[j] = *((u8 *)src);
1305 src = stm32_cryp_next_in(cryp, src, 1);
1306 }
1307
1308 stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
1309 cryp->total_in = tag_size;
1310 }
1311 }
1312}
1313
1314static void stm32_cryp_irq_write_gcm_padded_data(struct stm32_cryp *cryp)
1315{
1316 int err;
1317 u32 cfg, tmp[AES_BLOCK_32];
1318 size_t total_in_ori = cryp->total_in;
1319 struct scatterlist *out_sg_ori = cryp->out_sg;
1320 unsigned int i;
1321
1322 /* 'Special workaround' procedure described in the datasheet */
1323
1324 /* a) disable ip */
1325 stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1326 cfg = stm32_cryp_read(cryp, CRYP_CR);
1327 cfg &= ~CR_CRYPEN;
1328 stm32_cryp_write(cryp, CRYP_CR, cfg);
1329
1330 /* b) Update IV1R */
1331 stm32_cryp_write(cryp, CRYP_IV1RR, cryp->gcm_ctr - 2);
1332
1333 /* c) change mode to CTR */
1334 cfg &= ~CR_ALGO_MASK;
1335 cfg |= CR_AES_CTR;
1336 stm32_cryp_write(cryp, CRYP_CR, cfg);
1337
1338 /* a) enable IP */
1339 cfg |= CR_CRYPEN;
1340 stm32_cryp_write(cryp, CRYP_CR, cfg);
1341
1342 /* b) pad and write the last block */
1343 stm32_cryp_irq_write_block(cryp);
1344 cryp->total_in = total_in_ori;
1345 err = stm32_cryp_wait_output(cryp);
1346 if (err) {
1347 dev_err(cryp->dev, "Timeout (write gcm header)\n");
1348 return stm32_cryp_finish_req(cryp, err);
1349 }
1350
1351 /* c) get and store encrypted data */
1352 stm32_cryp_irq_read_data(cryp);
1353 scatterwalk_map_and_copy(tmp, out_sg_ori,
1354 cryp->total_in_save - total_in_ori,
1355 total_in_ori, 0);
1356
1357 /* d) change mode back to AES GCM */
1358 cfg &= ~CR_ALGO_MASK;
1359 cfg |= CR_AES_GCM;
1360 stm32_cryp_write(cryp, CRYP_CR, cfg);
1361
1362 /* e) change phase to Final */
1363 cfg &= ~CR_PH_MASK;
1364 cfg |= CR_PH_FINAL;
1365 stm32_cryp_write(cryp, CRYP_CR, cfg);
1366
1367 /* f) write padded data */
1368 for (i = 0; i < AES_BLOCK_32; i++) {
1369 if (cryp->total_in)
1370 stm32_cryp_write(cryp, CRYP_DIN, tmp[i]);
1371 else
1372 stm32_cryp_write(cryp, CRYP_DIN, 0);
1373
1374 cryp->total_in -= min_t(size_t, sizeof(u32), cryp->total_in);
1375 }
1376
1377 /* g) Empty fifo out */
1378 err = stm32_cryp_wait_output(cryp);
1379 if (err) {
1380 dev_err(cryp->dev, "Timeout (write gcm header)\n");
1381 return stm32_cryp_finish_req(cryp, err);
1382 }
1383
1384 for (i = 0; i < AES_BLOCK_32; i++)
1385 stm32_cryp_read(cryp, CRYP_DOUT);
1386
1387 /* h) run the he normal Final phase */
1388 stm32_cryp_finish_req(cryp, 0);
1389}
1390
1391static void stm32_cryp_irq_set_npblb(struct stm32_cryp *cryp)
1392{
1393 u32 cfg, payload_bytes;
1394
1395 /* disable ip, set NPBLB and reneable ip */
1396 cfg = stm32_cryp_read(cryp, CRYP_CR);
1397 cfg &= ~CR_CRYPEN;
1398 stm32_cryp_write(cryp, CRYP_CR, cfg);
1399
1400 payload_bytes = is_decrypt(cryp) ? cryp->total_in - cryp->authsize :
1401 cryp->total_in;
1402 cfg |= (cryp->hw_blocksize - payload_bytes) << CR_NBPBL_SHIFT;
1403 cfg |= CR_CRYPEN;
1404 stm32_cryp_write(cryp, CRYP_CR, cfg);
1405}
1406
1407static void stm32_cryp_irq_write_ccm_padded_data(struct stm32_cryp *cryp)
1408{
1409 int err = 0;
1410 u32 cfg, iv1tmp;
1411 u32 cstmp1[AES_BLOCK_32], cstmp2[AES_BLOCK_32], tmp[AES_BLOCK_32];
1412 size_t last_total_out, total_in_ori = cryp->total_in;
1413 struct scatterlist *out_sg_ori = cryp->out_sg;
1414 unsigned int i;
1415
1416 /* 'Special workaround' procedure described in the datasheet */
1417 cryp->flags |= FLG_CCM_PADDED_WA;
1418
1419 /* a) disable ip */
1420 stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1421
1422 cfg = stm32_cryp_read(cryp, CRYP_CR);
1423 cfg &= ~CR_CRYPEN;
1424 stm32_cryp_write(cryp, CRYP_CR, cfg);
1425
1426 /* b) get IV1 from CRYP_CSGCMCCM7 */
1427 iv1tmp = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + 7 * 4);
1428
1429 /* c) Load CRYP_CSGCMCCMxR */
1430 for (i = 0; i < ARRAY_SIZE(cstmp1); i++)
1431 cstmp1[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
1432
1433 /* d) Write IV1R */
1434 stm32_cryp_write(cryp, CRYP_IV1RR, iv1tmp);
1435
1436 /* e) change mode to CTR */
1437 cfg &= ~CR_ALGO_MASK;
1438 cfg |= CR_AES_CTR;
1439 stm32_cryp_write(cryp, CRYP_CR, cfg);
1440
1441 /* a) enable IP */
1442 cfg |= CR_CRYPEN;
1443 stm32_cryp_write(cryp, CRYP_CR, cfg);
1444
1445 /* b) pad and write the last block */
1446 stm32_cryp_irq_write_block(cryp);
1447 cryp->total_in = total_in_ori;
1448 err = stm32_cryp_wait_output(cryp);
1449 if (err) {
1450 dev_err(cryp->dev, "Timeout (wite ccm padded data)\n");
1451 return stm32_cryp_finish_req(cryp, err);
1452 }
1453
1454 /* c) get and store decrypted data */
1455 last_total_out = cryp->total_out;
1456 stm32_cryp_irq_read_data(cryp);
1457
1458 memset(tmp, 0, sizeof(tmp));
1459 scatterwalk_map_and_copy(tmp, out_sg_ori,
1460 cryp->total_out_save - last_total_out,
1461 last_total_out, 0);
1462
1463 /* d) Load again CRYP_CSGCMCCMxR */
1464 for (i = 0; i < ARRAY_SIZE(cstmp2); i++)
1465 cstmp2[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
1466
1467 /* e) change mode back to AES CCM */
1468 cfg &= ~CR_ALGO_MASK;
1469 cfg |= CR_AES_CCM;
1470 stm32_cryp_write(cryp, CRYP_CR, cfg);
1471
1472 /* f) change phase to header */
1473 cfg &= ~CR_PH_MASK;
1474 cfg |= CR_PH_HEADER;
1475 stm32_cryp_write(cryp, CRYP_CR, cfg);
1476
1477 /* g) XOR and write padded data */
1478 for (i = 0; i < ARRAY_SIZE(tmp); i++) {
1479 tmp[i] ^= cstmp1[i];
1480 tmp[i] ^= cstmp2[i];
1481 stm32_cryp_write(cryp, CRYP_DIN, tmp[i]);
1482 }
1483
1484 /* h) wait for completion */
1485 err = stm32_cryp_wait_busy(cryp);
1486 if (err)
1487 dev_err(cryp->dev, "Timeout (wite ccm padded data)\n");
1488
1489 /* i) run the he normal Final phase */
1490 stm32_cryp_finish_req(cryp, err);
1491}
1492
1493static void stm32_cryp_irq_write_data(struct stm32_cryp *cryp)
1494{
1495 if (unlikely(!cryp->total_in)) {
1496 dev_warn(cryp->dev, "No more data to process\n");
1497 return;
1498 }
1499
1500 if (unlikely(cryp->total_in < AES_BLOCK_SIZE &&
1501 (stm32_cryp_get_hw_mode(cryp) == CR_AES_GCM) &&
1502 is_encrypt(cryp))) {
1503 /* Padding for AES GCM encryption */
1504 if (cryp->caps->padding_wa)
1505 /* Special case 1 */
1506 return stm32_cryp_irq_write_gcm_padded_data(cryp);
1507
1508 /* Setting padding bytes (NBBLB) */
1509 stm32_cryp_irq_set_npblb(cryp);
1510 }
1511
1512 if (unlikely((cryp->total_in - cryp->authsize < AES_BLOCK_SIZE) &&
1513 (stm32_cryp_get_hw_mode(cryp) == CR_AES_CCM) &&
1514 is_decrypt(cryp))) {
1515 /* Padding for AES CCM decryption */
1516 if (cryp->caps->padding_wa)
1517 /* Special case 2 */
1518 return stm32_cryp_irq_write_ccm_padded_data(cryp);
1519
1520 /* Setting padding bytes (NBBLB) */
1521 stm32_cryp_irq_set_npblb(cryp);
1522 }
1523
1524 if (is_aes(cryp) && is_ctr(cryp))
1525 stm32_cryp_check_ctr_counter(cryp);
1526
1527 stm32_cryp_irq_write_block(cryp);
1528}
1529
1530static void stm32_cryp_irq_write_gcm_header(struct stm32_cryp *cryp)
1531{
1532 int err;
1533 unsigned int i, j;
1534 u32 cfg, *src;
1535
1536 src = sg_virt(cryp->in_sg) + _walked_in;
1537
1538 for (i = 0; i < AES_BLOCK_32; i++) {
1539 stm32_cryp_write(cryp, CRYP_DIN, *src);
1540
1541 src = stm32_cryp_next_in(cryp, src, sizeof(u32));
1542 cryp->total_in -= min_t(size_t, sizeof(u32), cryp->total_in);
1543
1544 /* Check if whole header written */
1545 if ((cryp->total_in_save - cryp->total_in) ==
1546 cryp->areq->assoclen) {
1547 /* Write padding if needed */
1548 for (j = i + 1; j < AES_BLOCK_32; j++)
1549 stm32_cryp_write(cryp, CRYP_DIN, 0);
1550
1551 /* Wait for completion */
1552 err = stm32_cryp_wait_busy(cryp);
1553 if (err) {
1554 dev_err(cryp->dev, "Timeout (gcm header)\n");
1555 return stm32_cryp_finish_req(cryp, err);
1556 }
1557
1558 if (stm32_cryp_get_input_text_len(cryp)) {
1559 /* Phase 3 : payload */
1560 cfg = stm32_cryp_read(cryp, CRYP_CR);
1561 cfg &= ~CR_CRYPEN;
1562 stm32_cryp_write(cryp, CRYP_CR, cfg);
1563
1564 cfg &= ~CR_PH_MASK;
1565 cfg |= CR_PH_PAYLOAD;
1566 cfg |= CR_CRYPEN;
1567 stm32_cryp_write(cryp, CRYP_CR, cfg);
1568 } else {
1569 /* Phase 4 : tag */
1570 stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1571 stm32_cryp_finish_req(cryp, 0);
1572 }
1573
1574 break;
1575 }
1576
1577 if (!cryp->total_in)
1578 break;
1579 }
1580}
1581
1582static void stm32_cryp_irq_write_ccm_header(struct stm32_cryp *cryp)
1583{
1584 int err;
1585 unsigned int i = 0, j, k;
1586 u32 alen, cfg, *src;
1587 u8 d8[4];
1588
1589 src = sg_virt(cryp->in_sg) + _walked_in;
1590 alen = cryp->areq->assoclen;
1591
1592 if (!_walked_in) {
1593 if (cryp->areq->assoclen <= 65280) {
1594 /* Write first u32 of B1 */
1595 d8[0] = (alen >> 8) & 0xFF;
1596 d8[1] = alen & 0xFF;
1597 d8[2] = *((u8 *)src);
1598 src = stm32_cryp_next_in(cryp, src, 1);
1599 d8[3] = *((u8 *)src);
1600 src = stm32_cryp_next_in(cryp, src, 1);
1601
1602 stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
1603 i++;
1604
1605 cryp->total_in -= min_t(size_t, 2, cryp->total_in);
1606 } else {
1607 /* Build the two first u32 of B1 */
1608 d8[0] = 0xFF;
1609 d8[1] = 0xFE;
1610 d8[2] = alen & 0xFF000000;
1611 d8[3] = alen & 0x00FF0000;
1612
1613 stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
1614 i++;
1615
1616 d8[0] = alen & 0x0000FF00;
1617 d8[1] = alen & 0x000000FF;
1618 d8[2] = *((u8 *)src);
1619 src = stm32_cryp_next_in(cryp, src, 1);
1620 d8[3] = *((u8 *)src);
1621 src = stm32_cryp_next_in(cryp, src, 1);
1622
1623 stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
1624 i++;
1625
1626 cryp->total_in -= min_t(size_t, 2, cryp->total_in);
1627 }
1628 }
1629
1630 /* Write next u32 */
1631 for (; i < AES_BLOCK_32; i++) {
1632 /* Build an u32 */
1633 memset(d8, 0, sizeof(u32));
1634 for (k = 0; k < sizeof(u32); k++) {
1635 d8[k] = *((u8 *)src);
1636 src = stm32_cryp_next_in(cryp, src, 1);
1637
1638 cryp->total_in -= min_t(size_t, 1, cryp->total_in);
1639 if ((cryp->total_in_save - cryp->total_in) == alen)
1640 break;
1641 }
1642
1643 stm32_cryp_write(cryp, CRYP_DIN, *(u32 *)d8);
1644
1645 if ((cryp->total_in_save - cryp->total_in) == alen) {
1646 /* Write padding if needed */
1647 for (j = i + 1; j < AES_BLOCK_32; j++)
1648 stm32_cryp_write(cryp, CRYP_DIN, 0);
1649
1650 /* Wait for completion */
1651 err = stm32_cryp_wait_busy(cryp);
1652 if (err) {
1653 dev_err(cryp->dev, "Timeout (ccm header)\n");
1654 return stm32_cryp_finish_req(cryp, err);
1655 }
1656
1657 if (stm32_cryp_get_input_text_len(cryp)) {
1658 /* Phase 3 : payload */
1659 cfg = stm32_cryp_read(cryp, CRYP_CR);
1660 cfg &= ~CR_CRYPEN;
1661 stm32_cryp_write(cryp, CRYP_CR, cfg);
1662
1663 cfg &= ~CR_PH_MASK;
1664 cfg |= CR_PH_PAYLOAD;
1665 cfg |= CR_CRYPEN;
1666 stm32_cryp_write(cryp, CRYP_CR, cfg);
1667 } else {
1668 /* Phase 4 : tag */
1669 stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1670 stm32_cryp_finish_req(cryp, 0);
1671 }
1672
1673 break;
1674 }
1675 }
1676}
1677
1678static irqreturn_t stm32_cryp_irq_thread(int irq, void *arg)
1679{
1680 struct stm32_cryp *cryp = arg;
1681 u32 ph;
1682
1683 if (cryp->irq_status & MISR_OUT)
1684 /* Output FIFO IRQ: read data */
1685 if (unlikely(stm32_cryp_irq_read_data(cryp))) {
1686 /* All bytes processed, finish */
1687 stm32_cryp_write(cryp, CRYP_IMSCR, 0);
1688 stm32_cryp_finish_req(cryp, 0);
1689 return IRQ_HANDLED;
1690 }
1691
1692 if (cryp->irq_status & MISR_IN) {
1693 if (is_gcm(cryp)) {
1694 ph = stm32_cryp_read(cryp, CRYP_CR) & CR_PH_MASK;
1695 if (unlikely(ph == CR_PH_HEADER))
1696 /* Write Header */
1697 stm32_cryp_irq_write_gcm_header(cryp);
1698 else
1699 /* Input FIFO IRQ: write data */
1700 stm32_cryp_irq_write_data(cryp);
1701 cryp->gcm_ctr++;
1702 } else if (is_ccm(cryp)) {
1703 ph = stm32_cryp_read(cryp, CRYP_CR) & CR_PH_MASK;
1704 if (unlikely(ph == CR_PH_HEADER))
1705 /* Write Header */
1706 stm32_cryp_irq_write_ccm_header(cryp);
1707 else
1708 /* Input FIFO IRQ: write data */
1709 stm32_cryp_irq_write_data(cryp);
1710 } else {
1711 /* Input FIFO IRQ: write data */
1712 stm32_cryp_irq_write_data(cryp);
1713 }
1714 }
1715
1716 return IRQ_HANDLED;
1717}
1718
1719static irqreturn_t stm32_cryp_irq(int irq, void *arg)
1720{
1721 struct stm32_cryp *cryp = arg;
1722
1723 cryp->irq_status = stm32_cryp_read(cryp, CRYP_MISR);
1724
1725 return IRQ_WAKE_THREAD;
1726}
1727
1728static struct skcipher_alg crypto_algs[] = {
1729{
1730 .base.cra_name = "ecb(aes)",
1731 .base.cra_driver_name = "stm32-ecb-aes",
1732 .base.cra_priority = 200,
1733 .base.cra_flags = CRYPTO_ALG_ASYNC,
1734 .base.cra_blocksize = AES_BLOCK_SIZE,
1735 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1736 .base.cra_alignmask = 0xf,
1737 .base.cra_module = THIS_MODULE,
1738
1739 .init = stm32_cryp_init_tfm,
1740 .min_keysize = AES_MIN_KEY_SIZE,
1741 .max_keysize = AES_MAX_KEY_SIZE,
1742 .setkey = stm32_cryp_aes_setkey,
1743 .encrypt = stm32_cryp_aes_ecb_encrypt,
1744 .decrypt = stm32_cryp_aes_ecb_decrypt,
1745},
1746{
1747 .base.cra_name = "cbc(aes)",
1748 .base.cra_driver_name = "stm32-cbc-aes",
1749 .base.cra_priority = 200,
1750 .base.cra_flags = CRYPTO_ALG_ASYNC,
1751 .base.cra_blocksize = AES_BLOCK_SIZE,
1752 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1753 .base.cra_alignmask = 0xf,
1754 .base.cra_module = THIS_MODULE,
1755
1756 .init = stm32_cryp_init_tfm,
1757 .min_keysize = AES_MIN_KEY_SIZE,
1758 .max_keysize = AES_MAX_KEY_SIZE,
1759 .ivsize = AES_BLOCK_SIZE,
1760 .setkey = stm32_cryp_aes_setkey,
1761 .encrypt = stm32_cryp_aes_cbc_encrypt,
1762 .decrypt = stm32_cryp_aes_cbc_decrypt,
1763},
1764{
1765 .base.cra_name = "ctr(aes)",
1766 .base.cra_driver_name = "stm32-ctr-aes",
1767 .base.cra_priority = 200,
1768 .base.cra_flags = CRYPTO_ALG_ASYNC,
1769 .base.cra_blocksize = 1,
1770 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1771 .base.cra_alignmask = 0xf,
1772 .base.cra_module = THIS_MODULE,
1773
1774 .init = stm32_cryp_init_tfm,
1775 .min_keysize = AES_MIN_KEY_SIZE,
1776 .max_keysize = AES_MAX_KEY_SIZE,
1777 .ivsize = AES_BLOCK_SIZE,
1778 .setkey = stm32_cryp_aes_setkey,
1779 .encrypt = stm32_cryp_aes_ctr_encrypt,
1780 .decrypt = stm32_cryp_aes_ctr_decrypt,
1781},
1782{
1783 .base.cra_name = "ecb(des)",
1784 .base.cra_driver_name = "stm32-ecb-des",
1785 .base.cra_priority = 200,
1786 .base.cra_flags = CRYPTO_ALG_ASYNC,
1787 .base.cra_blocksize = DES_BLOCK_SIZE,
1788 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1789 .base.cra_alignmask = 0xf,
1790 .base.cra_module = THIS_MODULE,
1791
1792 .init = stm32_cryp_init_tfm,
1793 .min_keysize = DES_BLOCK_SIZE,
1794 .max_keysize = DES_BLOCK_SIZE,
1795 .setkey = stm32_cryp_des_setkey,
1796 .encrypt = stm32_cryp_des_ecb_encrypt,
1797 .decrypt = stm32_cryp_des_ecb_decrypt,
1798},
1799{
1800 .base.cra_name = "cbc(des)",
1801 .base.cra_driver_name = "stm32-cbc-des",
1802 .base.cra_priority = 200,
1803 .base.cra_flags = CRYPTO_ALG_ASYNC,
1804 .base.cra_blocksize = DES_BLOCK_SIZE,
1805 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1806 .base.cra_alignmask = 0xf,
1807 .base.cra_module = THIS_MODULE,
1808
1809 .init = stm32_cryp_init_tfm,
1810 .min_keysize = DES_BLOCK_SIZE,
1811 .max_keysize = DES_BLOCK_SIZE,
1812 .ivsize = DES_BLOCK_SIZE,
1813 .setkey = stm32_cryp_des_setkey,
1814 .encrypt = stm32_cryp_des_cbc_encrypt,
1815 .decrypt = stm32_cryp_des_cbc_decrypt,
1816},
1817{
1818 .base.cra_name = "ecb(des3_ede)",
1819 .base.cra_driver_name = "stm32-ecb-des3",
1820 .base.cra_priority = 200,
1821 .base.cra_flags = CRYPTO_ALG_ASYNC,
1822 .base.cra_blocksize = DES_BLOCK_SIZE,
1823 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1824 .base.cra_alignmask = 0xf,
1825 .base.cra_module = THIS_MODULE,
1826
1827 .init = stm32_cryp_init_tfm,
1828 .min_keysize = 3 * DES_BLOCK_SIZE,
1829 .max_keysize = 3 * DES_BLOCK_SIZE,
1830 .setkey = stm32_cryp_tdes_setkey,
1831 .encrypt = stm32_cryp_tdes_ecb_encrypt,
1832 .decrypt = stm32_cryp_tdes_ecb_decrypt,
1833},
1834{
1835 .base.cra_name = "cbc(des3_ede)",
1836 .base.cra_driver_name = "stm32-cbc-des3",
1837 .base.cra_priority = 200,
1838 .base.cra_flags = CRYPTO_ALG_ASYNC,
1839 .base.cra_blocksize = DES_BLOCK_SIZE,
1840 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1841 .base.cra_alignmask = 0xf,
1842 .base.cra_module = THIS_MODULE,
1843
1844 .init = stm32_cryp_init_tfm,
1845 .min_keysize = 3 * DES_BLOCK_SIZE,
1846 .max_keysize = 3 * DES_BLOCK_SIZE,
1847 .ivsize = DES_BLOCK_SIZE,
1848 .setkey = stm32_cryp_tdes_setkey,
1849 .encrypt = stm32_cryp_tdes_cbc_encrypt,
1850 .decrypt = stm32_cryp_tdes_cbc_decrypt,
1851},
1852};
1853
1854static struct aead_alg aead_algs[] = {
1855{
1856 .setkey = stm32_cryp_aes_aead_setkey,
1857 .setauthsize = stm32_cryp_aes_gcm_setauthsize,
1858 .encrypt = stm32_cryp_aes_gcm_encrypt,
1859 .decrypt = stm32_cryp_aes_gcm_decrypt,
1860 .init = stm32_cryp_aes_aead_init,
1861 .ivsize = 12,
1862 .maxauthsize = AES_BLOCK_SIZE,
1863
1864 .base = {
1865 .cra_name = "gcm(aes)",
1866 .cra_driver_name = "stm32-gcm-aes",
1867 .cra_priority = 200,
1868 .cra_flags = CRYPTO_ALG_ASYNC,
1869 .cra_blocksize = 1,
1870 .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1871 .cra_alignmask = 0xf,
1872 .cra_module = THIS_MODULE,
1873 },
1874},
1875{
1876 .setkey = stm32_cryp_aes_aead_setkey,
1877 .setauthsize = stm32_cryp_aes_ccm_setauthsize,
1878 .encrypt = stm32_cryp_aes_ccm_encrypt,
1879 .decrypt = stm32_cryp_aes_ccm_decrypt,
1880 .init = stm32_cryp_aes_aead_init,
1881 .ivsize = AES_BLOCK_SIZE,
1882 .maxauthsize = AES_BLOCK_SIZE,
1883
1884 .base = {
1885 .cra_name = "ccm(aes)",
1886 .cra_driver_name = "stm32-ccm-aes",
1887 .cra_priority = 200,
1888 .cra_flags = CRYPTO_ALG_ASYNC,
1889 .cra_blocksize = 1,
1890 .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
1891 .cra_alignmask = 0xf,
1892 .cra_module = THIS_MODULE,
1893 },
1894},
1895};
1896
1897static const struct stm32_cryp_caps f7_data = {
1898 .swap_final = true,
1899 .padding_wa = true,
1900};
1901
1902static const struct stm32_cryp_caps mp1_data = {
1903 .swap_final = false,
1904 .padding_wa = false,
1905};
1906
1907static const struct of_device_id stm32_dt_ids[] = {
1908 { .compatible = "st,stm32f756-cryp", .data = &f7_data},
1909 { .compatible = "st,stm32mp1-cryp", .data = &mp1_data},
1910 {},
1911};
1912MODULE_DEVICE_TABLE(of, stm32_dt_ids);
1913
1914static int stm32_cryp_probe(struct platform_device *pdev)
1915{
1916 struct device *dev = &pdev->dev;
1917 struct stm32_cryp *cryp;
1918 struct reset_control *rst;
1919 int irq, ret;
1920
1921 cryp = devm_kzalloc(dev, sizeof(*cryp), GFP_KERNEL);
1922 if (!cryp)
1923 return -ENOMEM;
1924
1925 cryp->caps = of_device_get_match_data(dev);
1926 if (!cryp->caps)
1927 return -ENODEV;
1928
1929 cryp->dev = dev;
1930
1931 cryp->regs = devm_platform_ioremap_resource(pdev, 0);
1932 if (IS_ERR(cryp->regs))
1933 return PTR_ERR(cryp->regs);
1934
1935 irq = platform_get_irq(pdev, 0);
1936 if (irq < 0)
1937 return irq;
1938
1939 ret = devm_request_threaded_irq(dev, irq, stm32_cryp_irq,
1940 stm32_cryp_irq_thread, IRQF_ONESHOT,
1941 dev_name(dev), cryp);
1942 if (ret) {
1943 dev_err(dev, "Cannot grab IRQ\n");
1944 return ret;
1945 }
1946
1947 cryp->clk = devm_clk_get(dev, NULL);
1948 if (IS_ERR(cryp->clk)) {
1949 dev_err(dev, "Could not get clock\n");
1950 return PTR_ERR(cryp->clk);
1951 }
1952
1953 ret = clk_prepare_enable(cryp->clk);
1954 if (ret) {
1955 dev_err(cryp->dev, "Failed to enable clock\n");
1956 return ret;
1957 }
1958
1959 pm_runtime_set_autosuspend_delay(dev, CRYP_AUTOSUSPEND_DELAY);
1960 pm_runtime_use_autosuspend(dev);
1961
1962 pm_runtime_get_noresume(dev);
1963 pm_runtime_set_active(dev);
1964 pm_runtime_enable(dev);
1965
1966 rst = devm_reset_control_get(dev, NULL);
1967 if (!IS_ERR(rst)) {
1968 reset_control_assert(rst);
1969 udelay(2);
1970 reset_control_deassert(rst);
1971 }
1972
1973 platform_set_drvdata(pdev, cryp);
1974
1975 spin_lock(&cryp_list.lock);
1976 list_add(&cryp->list, &cryp_list.dev_list);
1977 spin_unlock(&cryp_list.lock);
1978
1979 /* Initialize crypto engine */
1980 cryp->engine = crypto_engine_alloc_init(dev, 1);
1981 if (!cryp->engine) {
1982 dev_err(dev, "Could not init crypto engine\n");
1983 ret = -ENOMEM;
1984 goto err_engine1;
1985 }
1986
1987 ret = crypto_engine_start(cryp->engine);
1988 if (ret) {
1989 dev_err(dev, "Could not start crypto engine\n");
1990 goto err_engine2;
1991 }
1992
1993 ret = crypto_register_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
1994 if (ret) {
1995 dev_err(dev, "Could not register algs\n");
1996 goto err_algs;
1997 }
1998
1999 ret = crypto_register_aeads(aead_algs, ARRAY_SIZE(aead_algs));
2000 if (ret)
2001 goto err_aead_algs;
2002
2003 dev_info(dev, "Initialized\n");
2004
2005 pm_runtime_put_sync(dev);
2006
2007 return 0;
2008
2009err_aead_algs:
2010 crypto_unregister_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
2011err_algs:
2012err_engine2:
2013 crypto_engine_exit(cryp->engine);
2014err_engine1:
2015 spin_lock(&cryp_list.lock);
2016 list_del(&cryp->list);
2017 spin_unlock(&cryp_list.lock);
2018
2019 pm_runtime_disable(dev);
2020 pm_runtime_put_noidle(dev);
2021 pm_runtime_disable(dev);
2022 pm_runtime_put_noidle(dev);
2023
2024 clk_disable_unprepare(cryp->clk);
2025
2026 return ret;
2027}
2028
2029static int stm32_cryp_remove(struct platform_device *pdev)
2030{
2031 struct stm32_cryp *cryp = platform_get_drvdata(pdev);
2032 int ret;
2033
2034 if (!cryp)
2035 return -ENODEV;
2036
2037 ret = pm_runtime_get_sync(cryp->dev);
2038 if (ret < 0)
2039 return ret;
2040
2041 crypto_unregister_aeads(aead_algs, ARRAY_SIZE(aead_algs));
2042 crypto_unregister_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
2043
2044 crypto_engine_exit(cryp->engine);
2045
2046 spin_lock(&cryp_list.lock);
2047 list_del(&cryp->list);
2048 spin_unlock(&cryp_list.lock);
2049
2050 pm_runtime_disable(cryp->dev);
2051 pm_runtime_put_noidle(cryp->dev);
2052
2053 clk_disable_unprepare(cryp->clk);
2054
2055 return 0;
2056}
2057
2058#ifdef CONFIG_PM
2059static int stm32_cryp_runtime_suspend(struct device *dev)
2060{
2061 struct stm32_cryp *cryp = dev_get_drvdata(dev);
2062
2063 clk_disable_unprepare(cryp->clk);
2064
2065 return 0;
2066}
2067
2068static int stm32_cryp_runtime_resume(struct device *dev)
2069{
2070 struct stm32_cryp *cryp = dev_get_drvdata(dev);
2071 int ret;
2072
2073 ret = clk_prepare_enable(cryp->clk);
2074 if (ret) {
2075 dev_err(cryp->dev, "Failed to prepare_enable clock\n");
2076 return ret;
2077 }
2078
2079 return 0;
2080}
2081#endif
2082
2083static const struct dev_pm_ops stm32_cryp_pm_ops = {
2084 SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend,
2085 pm_runtime_force_resume)
2086 SET_RUNTIME_PM_OPS(stm32_cryp_runtime_suspend,
2087 stm32_cryp_runtime_resume, NULL)
2088};
2089
2090static struct platform_driver stm32_cryp_driver = {
2091 .probe = stm32_cryp_probe,
2092 .remove = stm32_cryp_remove,
2093 .driver = {
2094 .name = DRIVER_NAME,
2095 .pm = &stm32_cryp_pm_ops,
2096 .of_match_table = stm32_dt_ids,
2097 },
2098};
2099
2100module_platform_driver(stm32_cryp_driver);
2101
2102MODULE_AUTHOR("Fabien Dessenne <fabien.dessenne@st.com>");
2103MODULE_DESCRIPTION("STMicrolectronics STM32 CRYP hardware driver");
2104MODULE_LICENSE("GPL");
1// SPDX-License-Identifier: GPL-2.0-only
2/*
3 * Copyright (C) STMicroelectronics SA 2017
4 * Author: Fabien Dessenne <fabien.dessenne@st.com>
5 * Ux500 support taken from snippets in the old Ux500 cryp driver
6 */
7
8#include <crypto/aes.h>
9#include <crypto/engine.h>
10#include <crypto/internal/aead.h>
11#include <crypto/internal/des.h>
12#include <crypto/internal/skcipher.h>
13#include <crypto/scatterwalk.h>
14#include <linux/bottom_half.h>
15#include <linux/clk.h>
16#include <linux/delay.h>
17#include <linux/dma-mapping.h>
18#include <linux/dmaengine.h>
19#include <linux/err.h>
20#include <linux/iopoll.h>
21#include <linux/interrupt.h>
22#include <linux/kernel.h>
23#include <linux/module.h>
24#include <linux/of.h>
25#include <linux/platform_device.h>
26#include <linux/pm_runtime.h>
27#include <linux/reset.h>
28#include <linux/string.h>
29
30#define DRIVER_NAME "stm32-cryp"
31
32/* Bit [0] encrypt / decrypt */
33#define FLG_ENCRYPT BIT(0)
34/* Bit [8..1] algo & operation mode */
35#define FLG_AES BIT(1)
36#define FLG_DES BIT(2)
37#define FLG_TDES BIT(3)
38#define FLG_ECB BIT(4)
39#define FLG_CBC BIT(5)
40#define FLG_CTR BIT(6)
41#define FLG_GCM BIT(7)
42#define FLG_CCM BIT(8)
43/* Mode mask = bits [15..0] */
44#define FLG_MODE_MASK GENMASK(15, 0)
45/* Bit [31..16] status */
46#define FLG_IN_OUT_DMA BIT(16)
47#define FLG_HEADER_DMA BIT(17)
48
49/* Registers */
50#define CRYP_CR 0x00000000
51#define CRYP_SR 0x00000004
52#define CRYP_DIN 0x00000008
53#define CRYP_DOUT 0x0000000C
54#define CRYP_DMACR 0x00000010
55#define CRYP_IMSCR 0x00000014
56#define CRYP_RISR 0x00000018
57#define CRYP_MISR 0x0000001C
58#define CRYP_K0LR 0x00000020
59#define CRYP_K0RR 0x00000024
60#define CRYP_K1LR 0x00000028
61#define CRYP_K1RR 0x0000002C
62#define CRYP_K2LR 0x00000030
63#define CRYP_K2RR 0x00000034
64#define CRYP_K3LR 0x00000038
65#define CRYP_K3RR 0x0000003C
66#define CRYP_IV0LR 0x00000040
67#define CRYP_IV0RR 0x00000044
68#define CRYP_IV1LR 0x00000048
69#define CRYP_IV1RR 0x0000004C
70#define CRYP_CSGCMCCM0R 0x00000050
71#define CRYP_CSGCM0R 0x00000070
72
73#define UX500_CRYP_CR 0x00000000
74#define UX500_CRYP_SR 0x00000004
75#define UX500_CRYP_DIN 0x00000008
76#define UX500_CRYP_DINSIZE 0x0000000C
77#define UX500_CRYP_DOUT 0x00000010
78#define UX500_CRYP_DOUSIZE 0x00000014
79#define UX500_CRYP_DMACR 0x00000018
80#define UX500_CRYP_IMSC 0x0000001C
81#define UX500_CRYP_RIS 0x00000020
82#define UX500_CRYP_MIS 0x00000024
83#define UX500_CRYP_K1L 0x00000028
84#define UX500_CRYP_K1R 0x0000002C
85#define UX500_CRYP_K2L 0x00000030
86#define UX500_CRYP_K2R 0x00000034
87#define UX500_CRYP_K3L 0x00000038
88#define UX500_CRYP_K3R 0x0000003C
89#define UX500_CRYP_K4L 0x00000040
90#define UX500_CRYP_K4R 0x00000044
91#define UX500_CRYP_IV0L 0x00000048
92#define UX500_CRYP_IV0R 0x0000004C
93#define UX500_CRYP_IV1L 0x00000050
94#define UX500_CRYP_IV1R 0x00000054
95
96/* Registers values */
97#define CR_DEC_NOT_ENC 0x00000004
98#define CR_TDES_ECB 0x00000000
99#define CR_TDES_CBC 0x00000008
100#define CR_DES_ECB 0x00000010
101#define CR_DES_CBC 0x00000018
102#define CR_AES_ECB 0x00000020
103#define CR_AES_CBC 0x00000028
104#define CR_AES_CTR 0x00000030
105#define CR_AES_KP 0x00000038 /* Not on Ux500 */
106#define CR_AES_XTS 0x00000038 /* Only on Ux500 */
107#define CR_AES_GCM 0x00080000
108#define CR_AES_CCM 0x00080008
109#define CR_AES_UNKNOWN 0xFFFFFFFF
110#define CR_ALGO_MASK 0x00080038
111#define CR_DATA32 0x00000000
112#define CR_DATA16 0x00000040
113#define CR_DATA8 0x00000080
114#define CR_DATA1 0x000000C0
115#define CR_KEY128 0x00000000
116#define CR_KEY192 0x00000100
117#define CR_KEY256 0x00000200
118#define CR_KEYRDEN 0x00000400 /* Only on Ux500 */
119#define CR_KSE 0x00000800 /* Only on Ux500 */
120#define CR_FFLUSH 0x00004000
121#define CR_CRYPEN 0x00008000
122#define CR_PH_INIT 0x00000000
123#define CR_PH_HEADER 0x00010000
124#define CR_PH_PAYLOAD 0x00020000
125#define CR_PH_FINAL 0x00030000
126#define CR_PH_MASK 0x00030000
127#define CR_NBPBL_SHIFT 20
128
129#define SR_IFNF BIT(1)
130#define SR_OFNE BIT(2)
131#define SR_BUSY BIT(8)
132
133#define DMACR_DIEN BIT(0)
134#define DMACR_DOEN BIT(1)
135
136#define IMSCR_IN BIT(0)
137#define IMSCR_OUT BIT(1)
138
139#define MISR_IN BIT(0)
140#define MISR_OUT BIT(1)
141
142/* Misc */
143#define AES_BLOCK_32 (AES_BLOCK_SIZE / sizeof(u32))
144#define GCM_CTR_INIT 2
145#define CRYP_AUTOSUSPEND_DELAY 50
146
147#define CRYP_DMA_BURST_REG 4
148
149enum stm32_dma_mode {
150 NO_DMA,
151 DMA_PLAIN_SG,
152 DMA_NEED_SG_TRUNC
153};
154
155struct stm32_cryp_caps {
156 bool aeads_support;
157 bool linear_aes_key;
158 bool kp_mode;
159 bool iv_protection;
160 bool swap_final;
161 bool padding_wa;
162 u32 cr;
163 u32 sr;
164 u32 din;
165 u32 dout;
166 u32 dmacr;
167 u32 imsc;
168 u32 mis;
169 u32 k1l;
170 u32 k1r;
171 u32 k3r;
172 u32 iv0l;
173 u32 iv0r;
174 u32 iv1l;
175 u32 iv1r;
176};
177
178struct stm32_cryp_ctx {
179 struct stm32_cryp *cryp;
180 int keylen;
181 __be32 key[AES_KEYSIZE_256 / sizeof(u32)];
182 unsigned long flags;
183};
184
185struct stm32_cryp_reqctx {
186 unsigned long mode;
187};
188
189struct stm32_cryp {
190 struct list_head list;
191 struct device *dev;
192 void __iomem *regs;
193 phys_addr_t phys_base;
194 struct clk *clk;
195 unsigned long flags;
196 u32 irq_status;
197 const struct stm32_cryp_caps *caps;
198 struct stm32_cryp_ctx *ctx;
199
200 struct crypto_engine *engine;
201
202 struct skcipher_request *req;
203 struct aead_request *areq;
204
205 size_t authsize;
206 size_t hw_blocksize;
207
208 size_t payload_in;
209 size_t header_in;
210 size_t payload_out;
211
212 /* DMA process fields */
213 struct scatterlist *in_sg;
214 struct scatterlist *header_sg;
215 struct scatterlist *out_sg;
216 size_t in_sg_len;
217 size_t header_sg_len;
218 size_t out_sg_len;
219 struct completion dma_completion;
220
221 struct dma_chan *dma_lch_in;
222 struct dma_chan *dma_lch_out;
223 enum stm32_dma_mode dma_mode;
224
225 /* IT process fields */
226 struct scatter_walk in_walk;
227 struct scatter_walk out_walk;
228
229 __be32 last_ctr[4];
230 u32 gcm_ctr;
231};
232
233struct stm32_cryp_list {
234 struct list_head dev_list;
235 spinlock_t lock; /* protect dev_list */
236};
237
238static struct stm32_cryp_list cryp_list = {
239 .dev_list = LIST_HEAD_INIT(cryp_list.dev_list),
240 .lock = __SPIN_LOCK_UNLOCKED(cryp_list.lock),
241};
242
243static inline bool is_aes(struct stm32_cryp *cryp)
244{
245 return cryp->flags & FLG_AES;
246}
247
248static inline bool is_des(struct stm32_cryp *cryp)
249{
250 return cryp->flags & FLG_DES;
251}
252
253static inline bool is_tdes(struct stm32_cryp *cryp)
254{
255 return cryp->flags & FLG_TDES;
256}
257
258static inline bool is_ecb(struct stm32_cryp *cryp)
259{
260 return cryp->flags & FLG_ECB;
261}
262
263static inline bool is_cbc(struct stm32_cryp *cryp)
264{
265 return cryp->flags & FLG_CBC;
266}
267
268static inline bool is_ctr(struct stm32_cryp *cryp)
269{
270 return cryp->flags & FLG_CTR;
271}
272
273static inline bool is_gcm(struct stm32_cryp *cryp)
274{
275 return cryp->flags & FLG_GCM;
276}
277
278static inline bool is_ccm(struct stm32_cryp *cryp)
279{
280 return cryp->flags & FLG_CCM;
281}
282
283static inline bool is_encrypt(struct stm32_cryp *cryp)
284{
285 return cryp->flags & FLG_ENCRYPT;
286}
287
288static inline bool is_decrypt(struct stm32_cryp *cryp)
289{
290 return !is_encrypt(cryp);
291}
292
293static inline u32 stm32_cryp_read(struct stm32_cryp *cryp, u32 ofst)
294{
295 return readl_relaxed(cryp->regs + ofst);
296}
297
298static inline void stm32_cryp_write(struct stm32_cryp *cryp, u32 ofst, u32 val)
299{
300 writel_relaxed(val, cryp->regs + ofst);
301}
302
303static inline int stm32_cryp_wait_busy(struct stm32_cryp *cryp)
304{
305 u32 status;
306
307 return readl_relaxed_poll_timeout(cryp->regs + cryp->caps->sr, status,
308 !(status & SR_BUSY), 10, 100000);
309}
310
311static inline void stm32_cryp_enable(struct stm32_cryp *cryp)
312{
313 writel_relaxed(readl_relaxed(cryp->regs + cryp->caps->cr) | CR_CRYPEN,
314 cryp->regs + cryp->caps->cr);
315}
316
317static inline int stm32_cryp_wait_enable(struct stm32_cryp *cryp)
318{
319 u32 status;
320
321 return readl_relaxed_poll_timeout(cryp->regs + cryp->caps->cr, status,
322 !(status & CR_CRYPEN), 10, 100000);
323}
324
325static inline int stm32_cryp_wait_input(struct stm32_cryp *cryp)
326{
327 u32 status;
328
329 return readl_relaxed_poll_timeout_atomic(cryp->regs + cryp->caps->sr, status,
330 status & SR_IFNF, 1, 10);
331}
332
333static inline int stm32_cryp_wait_output(struct stm32_cryp *cryp)
334{
335 u32 status;
336
337 return readl_relaxed_poll_timeout_atomic(cryp->regs + cryp->caps->sr, status,
338 status & SR_OFNE, 1, 10);
339}
340
341static inline void stm32_cryp_key_read_enable(struct stm32_cryp *cryp)
342{
343 writel_relaxed(readl_relaxed(cryp->regs + cryp->caps->cr) | CR_KEYRDEN,
344 cryp->regs + cryp->caps->cr);
345}
346
347static inline void stm32_cryp_key_read_disable(struct stm32_cryp *cryp)
348{
349 writel_relaxed(readl_relaxed(cryp->regs + cryp->caps->cr) & ~CR_KEYRDEN,
350 cryp->regs + cryp->caps->cr);
351}
352
353static void stm32_cryp_irq_read_data(struct stm32_cryp *cryp);
354static void stm32_cryp_irq_write_data(struct stm32_cryp *cryp);
355static void stm32_cryp_irq_write_gcmccm_header(struct stm32_cryp *cryp);
356static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp);
357static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err);
358static int stm32_cryp_dma_start(struct stm32_cryp *cryp);
359static int stm32_cryp_it_start(struct stm32_cryp *cryp);
360
361static struct stm32_cryp *stm32_cryp_find_dev(struct stm32_cryp_ctx *ctx)
362{
363 struct stm32_cryp *tmp, *cryp = NULL;
364
365 spin_lock_bh(&cryp_list.lock);
366 if (!ctx->cryp) {
367 list_for_each_entry(tmp, &cryp_list.dev_list, list) {
368 cryp = tmp;
369 break;
370 }
371 ctx->cryp = cryp;
372 } else {
373 cryp = ctx->cryp;
374 }
375
376 spin_unlock_bh(&cryp_list.lock);
377
378 return cryp;
379}
380
381static void stm32_cryp_hw_write_iv(struct stm32_cryp *cryp, __be32 *iv)
382{
383 if (!iv)
384 return;
385
386 stm32_cryp_write(cryp, cryp->caps->iv0l, be32_to_cpu(*iv++));
387 stm32_cryp_write(cryp, cryp->caps->iv0r, be32_to_cpu(*iv++));
388
389 if (is_aes(cryp)) {
390 stm32_cryp_write(cryp, cryp->caps->iv1l, be32_to_cpu(*iv++));
391 stm32_cryp_write(cryp, cryp->caps->iv1r, be32_to_cpu(*iv++));
392 }
393}
394
395static void stm32_cryp_get_iv(struct stm32_cryp *cryp)
396{
397 struct skcipher_request *req = cryp->req;
398 __be32 *tmp = (void *)req->iv;
399
400 if (!tmp)
401 return;
402
403 if (cryp->caps->iv_protection)
404 stm32_cryp_key_read_enable(cryp);
405
406 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0l));
407 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0r));
408
409 if (is_aes(cryp)) {
410 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1l));
411 *tmp++ = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1r));
412 }
413
414 if (cryp->caps->iv_protection)
415 stm32_cryp_key_read_disable(cryp);
416}
417
418/**
419 * ux500_swap_bits_in_byte() - mirror the bits in a byte
420 * @b: the byte to be mirrored
421 *
422 * The bits are swapped the following way:
423 * Byte b include bits 0-7, nibble 1 (n1) include bits 0-3 and
424 * nibble 2 (n2) bits 4-7.
425 *
426 * Nibble 1 (n1):
427 * (The "old" (moved) bit is replaced with a zero)
428 * 1. Move bit 6 and 7, 4 positions to the left.
429 * 2. Move bit 3 and 5, 2 positions to the left.
430 * 3. Move bit 1-4, 1 position to the left.
431 *
432 * Nibble 2 (n2):
433 * 1. Move bit 0 and 1, 4 positions to the right.
434 * 2. Move bit 2 and 4, 2 positions to the right.
435 * 3. Move bit 3-6, 1 position to the right.
436 *
437 * Combine the two nibbles to a complete and swapped byte.
438 */
439static inline u8 ux500_swap_bits_in_byte(u8 b)
440{
441#define R_SHIFT_4_MASK 0xc0 /* Bits 6 and 7, right shift 4 */
442#define R_SHIFT_2_MASK 0x28 /* (After right shift 4) Bits 3 and 5,
443 right shift 2 */
444#define R_SHIFT_1_MASK 0x1e /* (After right shift 2) Bits 1-4,
445 right shift 1 */
446#define L_SHIFT_4_MASK 0x03 /* Bits 0 and 1, left shift 4 */
447#define L_SHIFT_2_MASK 0x14 /* (After left shift 4) Bits 2 and 4,
448 left shift 2 */
449#define L_SHIFT_1_MASK 0x78 /* (After left shift 1) Bits 3-6,
450 left shift 1 */
451
452 u8 n1;
453 u8 n2;
454
455 /* Swap most significant nibble */
456 /* Right shift 4, bits 6 and 7 */
457 n1 = ((b & R_SHIFT_4_MASK) >> 4) | (b & ~(R_SHIFT_4_MASK >> 4));
458 /* Right shift 2, bits 3 and 5 */
459 n1 = ((n1 & R_SHIFT_2_MASK) >> 2) | (n1 & ~(R_SHIFT_2_MASK >> 2));
460 /* Right shift 1, bits 1-4 */
461 n1 = (n1 & R_SHIFT_1_MASK) >> 1;
462
463 /* Swap least significant nibble */
464 /* Left shift 4, bits 0 and 1 */
465 n2 = ((b & L_SHIFT_4_MASK) << 4) | (b & ~(L_SHIFT_4_MASK << 4));
466 /* Left shift 2, bits 2 and 4 */
467 n2 = ((n2 & L_SHIFT_2_MASK) << 2) | (n2 & ~(L_SHIFT_2_MASK << 2));
468 /* Left shift 1, bits 3-6 */
469 n2 = (n2 & L_SHIFT_1_MASK) << 1;
470
471 return n1 | n2;
472}
473
474/**
475 * ux500_swizzle_key() - Shuffle around words and bits in the AES key
476 * @in: key to swizzle
477 * @out: swizzled key
478 * @len: length of key, in bytes
479 *
480 * This "key swizzling procedure" is described in the examples in the
481 * DB8500 design specification. There is no real description of why
482 * the bits have been arranged like this in the hardware.
483 */
484static inline void ux500_swizzle_key(const u8 *in, u8 *out, u32 len)
485{
486 int i = 0;
487 int bpw = sizeof(u32);
488 int j;
489 int index = 0;
490
491 j = len - bpw;
492 while (j >= 0) {
493 for (i = 0; i < bpw; i++) {
494 index = len - j - bpw + i;
495 out[j + i] =
496 ux500_swap_bits_in_byte(in[index]);
497 }
498 j -= bpw;
499 }
500}
501
502static void stm32_cryp_hw_write_key(struct stm32_cryp *c)
503{
504 unsigned int i;
505 int r_id;
506
507 if (is_des(c)) {
508 stm32_cryp_write(c, c->caps->k1l, be32_to_cpu(c->ctx->key[0]));
509 stm32_cryp_write(c, c->caps->k1r, be32_to_cpu(c->ctx->key[1]));
510 return;
511 }
512
513 /*
514 * On the Ux500 the AES key is considered as a single bit sequence
515 * of 128, 192 or 256 bits length. It is written linearly into the
516 * registers from K1L and down, and need to be processed to become
517 * a proper big-endian bit sequence.
518 */
519 if (is_aes(c) && c->caps->linear_aes_key) {
520 u32 tmpkey[8];
521
522 ux500_swizzle_key((u8 *)c->ctx->key,
523 (u8 *)tmpkey, c->ctx->keylen);
524
525 r_id = c->caps->k1l;
526 for (i = 0; i < c->ctx->keylen / sizeof(u32); i++, r_id += 4)
527 stm32_cryp_write(c, r_id, tmpkey[i]);
528
529 return;
530 }
531
532 r_id = c->caps->k3r;
533 for (i = c->ctx->keylen / sizeof(u32); i > 0; i--, r_id -= 4)
534 stm32_cryp_write(c, r_id, be32_to_cpu(c->ctx->key[i - 1]));
535}
536
537static u32 stm32_cryp_get_hw_mode(struct stm32_cryp *cryp)
538{
539 if (is_aes(cryp) && is_ecb(cryp))
540 return CR_AES_ECB;
541
542 if (is_aes(cryp) && is_cbc(cryp))
543 return CR_AES_CBC;
544
545 if (is_aes(cryp) && is_ctr(cryp))
546 return CR_AES_CTR;
547
548 if (is_aes(cryp) && is_gcm(cryp))
549 return CR_AES_GCM;
550
551 if (is_aes(cryp) && is_ccm(cryp))
552 return CR_AES_CCM;
553
554 if (is_des(cryp) && is_ecb(cryp))
555 return CR_DES_ECB;
556
557 if (is_des(cryp) && is_cbc(cryp))
558 return CR_DES_CBC;
559
560 if (is_tdes(cryp) && is_ecb(cryp))
561 return CR_TDES_ECB;
562
563 if (is_tdes(cryp) && is_cbc(cryp))
564 return CR_TDES_CBC;
565
566 dev_err(cryp->dev, "Unknown mode\n");
567 return CR_AES_UNKNOWN;
568}
569
570static unsigned int stm32_cryp_get_input_text_len(struct stm32_cryp *cryp)
571{
572 return is_encrypt(cryp) ? cryp->areq->cryptlen :
573 cryp->areq->cryptlen - cryp->authsize;
574}
575
576static int stm32_cryp_gcm_init(struct stm32_cryp *cryp, u32 cfg)
577{
578 int ret;
579 __be32 iv[4];
580
581 /* Phase 1 : init */
582 memcpy(iv, cryp->areq->iv, 12);
583 iv[3] = cpu_to_be32(GCM_CTR_INIT);
584 cryp->gcm_ctr = GCM_CTR_INIT;
585 stm32_cryp_hw_write_iv(cryp, iv);
586
587 stm32_cryp_write(cryp, cryp->caps->cr, cfg | CR_PH_INIT | CR_CRYPEN);
588
589 /* Wait for end of processing */
590 ret = stm32_cryp_wait_enable(cryp);
591 if (ret) {
592 dev_err(cryp->dev, "Timeout (gcm init)\n");
593 return ret;
594 }
595
596 /* Prepare next phase */
597 if (cryp->areq->assoclen) {
598 cfg |= CR_PH_HEADER;
599 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
600 } else if (stm32_cryp_get_input_text_len(cryp)) {
601 cfg |= CR_PH_PAYLOAD;
602 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
603 }
604
605 return 0;
606}
607
608static void stm32_crypt_gcmccm_end_header(struct stm32_cryp *cryp)
609{
610 u32 cfg;
611 int err;
612
613 /* Check if whole header written */
614 if (!cryp->header_in) {
615 /* Wait for completion */
616 err = stm32_cryp_wait_busy(cryp);
617 if (err) {
618 dev_err(cryp->dev, "Timeout (gcm/ccm header)\n");
619 stm32_cryp_write(cryp, cryp->caps->imsc, 0);
620 stm32_cryp_finish_req(cryp, err);
621 return;
622 }
623
624 if (stm32_cryp_get_input_text_len(cryp)) {
625 /* Phase 3 : payload */
626 cfg = stm32_cryp_read(cryp, cryp->caps->cr);
627 cfg &= ~CR_CRYPEN;
628 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
629
630 cfg &= ~CR_PH_MASK;
631 cfg |= CR_PH_PAYLOAD | CR_CRYPEN;
632 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
633 } else {
634 /*
635 * Phase 4 : tag.
636 * Nothing to read, nothing to write, caller have to
637 * end request
638 */
639 }
640 }
641}
642
643static void stm32_cryp_write_ccm_first_header(struct stm32_cryp *cryp)
644{
645 size_t written;
646 size_t len;
647 u32 alen = cryp->areq->assoclen;
648 u32 block[AES_BLOCK_32] = {0};
649 u8 *b8 = (u8 *)block;
650
651 if (alen <= 65280) {
652 /* Write first u32 of B1 */
653 b8[0] = (alen >> 8) & 0xFF;
654 b8[1] = alen & 0xFF;
655 len = 2;
656 } else {
657 /* Build the two first u32 of B1 */
658 b8[0] = 0xFF;
659 b8[1] = 0xFE;
660 b8[2] = (alen & 0xFF000000) >> 24;
661 b8[3] = (alen & 0x00FF0000) >> 16;
662 b8[4] = (alen & 0x0000FF00) >> 8;
663 b8[5] = alen & 0x000000FF;
664 len = 6;
665 }
666
667 written = min_t(size_t, AES_BLOCK_SIZE - len, alen);
668
669 scatterwalk_copychunks((char *)block + len, &cryp->in_walk, written, 0);
670
671 writesl(cryp->regs + cryp->caps->din, block, AES_BLOCK_32);
672
673 cryp->header_in -= written;
674
675 stm32_crypt_gcmccm_end_header(cryp);
676}
677
678static int stm32_cryp_ccm_init(struct stm32_cryp *cryp, u32 cfg)
679{
680 int ret;
681 u32 iv_32[AES_BLOCK_32], b0_32[AES_BLOCK_32];
682 u8 *iv = (u8 *)iv_32, *b0 = (u8 *)b0_32;
683 __be32 *bd;
684 u32 *d;
685 unsigned int i, textlen;
686
687 /* Phase 1 : init. Firstly set the CTR value to 1 (not 0) */
688 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
689 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
690 iv[AES_BLOCK_SIZE - 1] = 1;
691 stm32_cryp_hw_write_iv(cryp, (__be32 *)iv);
692
693 /* Build B0 */
694 memcpy(b0, iv, AES_BLOCK_SIZE);
695
696 b0[0] |= (8 * ((cryp->authsize - 2) / 2));
697
698 if (cryp->areq->assoclen)
699 b0[0] |= 0x40;
700
701 textlen = stm32_cryp_get_input_text_len(cryp);
702
703 b0[AES_BLOCK_SIZE - 2] = textlen >> 8;
704 b0[AES_BLOCK_SIZE - 1] = textlen & 0xFF;
705
706 /* Enable HW */
707 stm32_cryp_write(cryp, cryp->caps->cr, cfg | CR_PH_INIT | CR_CRYPEN);
708
709 /* Write B0 */
710 d = (u32 *)b0;
711 bd = (__be32 *)b0;
712
713 for (i = 0; i < AES_BLOCK_32; i++) {
714 u32 xd = d[i];
715
716 if (!cryp->caps->padding_wa)
717 xd = be32_to_cpu(bd[i]);
718 stm32_cryp_write(cryp, cryp->caps->din, xd);
719 }
720
721 /* Wait for end of processing */
722 ret = stm32_cryp_wait_enable(cryp);
723 if (ret) {
724 dev_err(cryp->dev, "Timeout (ccm init)\n");
725 return ret;
726 }
727
728 /* Prepare next phase */
729 if (cryp->areq->assoclen) {
730 cfg |= CR_PH_HEADER | CR_CRYPEN;
731 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
732
733 /* Write first (special) block (may move to next phase [payload]) */
734 stm32_cryp_write_ccm_first_header(cryp);
735 } else if (stm32_cryp_get_input_text_len(cryp)) {
736 cfg |= CR_PH_PAYLOAD;
737 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
738 }
739
740 return 0;
741}
742
743static int stm32_cryp_hw_init(struct stm32_cryp *cryp)
744{
745 int ret;
746 u32 cfg, hw_mode;
747
748 pm_runtime_get_sync(cryp->dev);
749
750 /* Disable interrupt */
751 stm32_cryp_write(cryp, cryp->caps->imsc, 0);
752
753 /* Set configuration */
754 cfg = CR_DATA8 | CR_FFLUSH;
755
756 switch (cryp->ctx->keylen) {
757 case AES_KEYSIZE_128:
758 cfg |= CR_KEY128;
759 break;
760
761 case AES_KEYSIZE_192:
762 cfg |= CR_KEY192;
763 break;
764
765 default:
766 case AES_KEYSIZE_256:
767 cfg |= CR_KEY256;
768 break;
769 }
770
771 hw_mode = stm32_cryp_get_hw_mode(cryp);
772 if (hw_mode == CR_AES_UNKNOWN)
773 return -EINVAL;
774
775 /* AES ECB/CBC decrypt: run key preparation first */
776 if (is_decrypt(cryp) &&
777 ((hw_mode == CR_AES_ECB) || (hw_mode == CR_AES_CBC))) {
778 /* Configure in key preparation mode */
779 if (cryp->caps->kp_mode)
780 stm32_cryp_write(cryp, cryp->caps->cr,
781 cfg | CR_AES_KP);
782 else
783 stm32_cryp_write(cryp,
784 cryp->caps->cr, cfg | CR_AES_ECB | CR_KSE);
785
786 /* Set key only after full configuration done */
787 stm32_cryp_hw_write_key(cryp);
788
789 /* Start prepare key */
790 stm32_cryp_enable(cryp);
791 /* Wait for end of processing */
792 ret = stm32_cryp_wait_busy(cryp);
793 if (ret) {
794 dev_err(cryp->dev, "Timeout (key preparation)\n");
795 return ret;
796 }
797
798 cfg |= hw_mode | CR_DEC_NOT_ENC;
799
800 /* Apply updated config (Decrypt + algo) and flush */
801 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
802 } else {
803 cfg |= hw_mode;
804 if (is_decrypt(cryp))
805 cfg |= CR_DEC_NOT_ENC;
806
807 /* Apply config and flush */
808 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
809
810 /* Set key only after configuration done */
811 stm32_cryp_hw_write_key(cryp);
812 }
813
814 switch (hw_mode) {
815 case CR_AES_GCM:
816 case CR_AES_CCM:
817 /* Phase 1 : init */
818 if (hw_mode == CR_AES_CCM)
819 ret = stm32_cryp_ccm_init(cryp, cfg);
820 else
821 ret = stm32_cryp_gcm_init(cryp, cfg);
822
823 if (ret)
824 return ret;
825
826 break;
827
828 case CR_DES_CBC:
829 case CR_TDES_CBC:
830 case CR_AES_CBC:
831 case CR_AES_CTR:
832 stm32_cryp_hw_write_iv(cryp, (__be32 *)cryp->req->iv);
833 break;
834
835 default:
836 break;
837 }
838
839 /* Enable now */
840 stm32_cryp_enable(cryp);
841
842 return 0;
843}
844
845static void stm32_cryp_finish_req(struct stm32_cryp *cryp, int err)
846{
847 if (!err && (is_gcm(cryp) || is_ccm(cryp)))
848 /* Phase 4 : output tag */
849 err = stm32_cryp_read_auth_tag(cryp);
850
851 if (!err && (!(is_gcm(cryp) || is_ccm(cryp) || is_ecb(cryp))))
852 stm32_cryp_get_iv(cryp);
853
854 pm_runtime_mark_last_busy(cryp->dev);
855 pm_runtime_put_autosuspend(cryp->dev);
856
857 if (is_gcm(cryp) || is_ccm(cryp))
858 crypto_finalize_aead_request(cryp->engine, cryp->areq, err);
859 else
860 crypto_finalize_skcipher_request(cryp->engine, cryp->req, err);
861}
862
863static void stm32_cryp_header_dma_callback(void *param)
864{
865 struct stm32_cryp *cryp = (struct stm32_cryp *)param;
866 int ret;
867 u32 reg;
868
869 dma_unmap_sg(cryp->dev, cryp->header_sg, cryp->header_sg_len, DMA_TO_DEVICE);
870
871 reg = stm32_cryp_read(cryp, cryp->caps->dmacr);
872 stm32_cryp_write(cryp, cryp->caps->dmacr, reg & ~(DMACR_DOEN | DMACR_DIEN));
873
874 kfree(cryp->header_sg);
875
876 reg = stm32_cryp_read(cryp, cryp->caps->cr);
877
878 if (cryp->header_in) {
879 stm32_cryp_write(cryp, cryp->caps->cr, reg | CR_CRYPEN);
880
881 ret = stm32_cryp_wait_input(cryp);
882 if (ret) {
883 dev_err(cryp->dev, "input header ready timeout after dma\n");
884 stm32_cryp_finish_req(cryp, ret);
885 return;
886 }
887 stm32_cryp_irq_write_gcmccm_header(cryp);
888 WARN_ON(cryp->header_in);
889 }
890
891 if (stm32_cryp_get_input_text_len(cryp)) {
892 /* Phase 3 : payload */
893 reg = stm32_cryp_read(cryp, cryp->caps->cr);
894 stm32_cryp_write(cryp, cryp->caps->cr, reg & ~CR_CRYPEN);
895
896 reg &= ~CR_PH_MASK;
897 reg |= CR_PH_PAYLOAD | CR_CRYPEN;
898 stm32_cryp_write(cryp, cryp->caps->cr, reg);
899
900 if (cryp->flags & FLG_IN_OUT_DMA) {
901 ret = stm32_cryp_dma_start(cryp);
902 if (ret)
903 stm32_cryp_finish_req(cryp, ret);
904 } else {
905 stm32_cryp_it_start(cryp);
906 }
907 } else {
908 /*
909 * Phase 4 : tag.
910 * Nothing to read, nothing to write => end request
911 */
912 stm32_cryp_finish_req(cryp, 0);
913 }
914}
915
916static void stm32_cryp_dma_callback(void *param)
917{
918 struct stm32_cryp *cryp = (struct stm32_cryp *)param;
919 int ret;
920 u32 reg;
921
922 complete(&cryp->dma_completion); /* completion to indicate no timeout */
923
924 dma_sync_sg_for_device(cryp->dev, cryp->out_sg, cryp->out_sg_len, DMA_FROM_DEVICE);
925
926 if (cryp->in_sg != cryp->out_sg)
927 dma_unmap_sg(cryp->dev, cryp->in_sg, cryp->in_sg_len, DMA_TO_DEVICE);
928
929 dma_unmap_sg(cryp->dev, cryp->out_sg, cryp->out_sg_len, DMA_FROM_DEVICE);
930
931 reg = stm32_cryp_read(cryp, cryp->caps->dmacr);
932 stm32_cryp_write(cryp, cryp->caps->dmacr, reg & ~(DMACR_DOEN | DMACR_DIEN));
933
934 reg = stm32_cryp_read(cryp, cryp->caps->cr);
935
936 if (is_gcm(cryp) || is_ccm(cryp)) {
937 kfree(cryp->in_sg);
938 kfree(cryp->out_sg);
939 } else {
940 if (cryp->in_sg != cryp->req->src)
941 kfree(cryp->in_sg);
942 if (cryp->out_sg != cryp->req->dst)
943 kfree(cryp->out_sg);
944 }
945
946 if (cryp->payload_in) {
947 stm32_cryp_write(cryp, cryp->caps->cr, reg | CR_CRYPEN);
948
949 ret = stm32_cryp_wait_input(cryp);
950 if (ret) {
951 dev_err(cryp->dev, "input ready timeout after dma\n");
952 stm32_cryp_finish_req(cryp, ret);
953 return;
954 }
955 stm32_cryp_irq_write_data(cryp);
956
957 ret = stm32_cryp_wait_output(cryp);
958 if (ret) {
959 dev_err(cryp->dev, "output ready timeout after dma\n");
960 stm32_cryp_finish_req(cryp, ret);
961 return;
962 }
963 stm32_cryp_irq_read_data(cryp);
964 }
965
966 stm32_cryp_finish_req(cryp, 0);
967}
968
969static int stm32_cryp_header_dma_start(struct stm32_cryp *cryp)
970{
971 int ret;
972 struct dma_async_tx_descriptor *tx_in;
973 u32 reg;
974 size_t align_size;
975
976 ret = dma_map_sg(cryp->dev, cryp->header_sg, cryp->header_sg_len, DMA_TO_DEVICE);
977 if (!ret) {
978 dev_err(cryp->dev, "dma_map_sg() error\n");
979 return -ENOMEM;
980 }
981
982 dma_sync_sg_for_device(cryp->dev, cryp->header_sg, cryp->header_sg_len, DMA_TO_DEVICE);
983
984 tx_in = dmaengine_prep_slave_sg(cryp->dma_lch_in, cryp->header_sg, cryp->header_sg_len,
985 DMA_MEM_TO_DEV, DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
986 if (!tx_in) {
987 dev_err(cryp->dev, "IN prep_slave_sg() failed\n");
988 return -EINVAL;
989 }
990
991 tx_in->callback_param = cryp;
992 tx_in->callback = stm32_cryp_header_dma_callback;
993
994 /* Advance scatterwalk to not DMA'ed data */
995 align_size = ALIGN_DOWN(cryp->header_in, cryp->hw_blocksize);
996 scatterwalk_copychunks(NULL, &cryp->in_walk, align_size, 2);
997 cryp->header_in -= align_size;
998
999 ret = dma_submit_error(dmaengine_submit(tx_in));
1000 if (ret < 0) {
1001 dev_err(cryp->dev, "DMA in submit failed\n");
1002 return ret;
1003 }
1004 dma_async_issue_pending(cryp->dma_lch_in);
1005
1006 reg = stm32_cryp_read(cryp, cryp->caps->dmacr);
1007 stm32_cryp_write(cryp, cryp->caps->dmacr, reg | DMACR_DIEN);
1008
1009 return 0;
1010}
1011
1012static int stm32_cryp_dma_start(struct stm32_cryp *cryp)
1013{
1014 int ret;
1015 size_t align_size;
1016 struct dma_async_tx_descriptor *tx_in, *tx_out;
1017 u32 reg;
1018
1019 if (cryp->in_sg != cryp->out_sg) {
1020 ret = dma_map_sg(cryp->dev, cryp->in_sg, cryp->in_sg_len, DMA_TO_DEVICE);
1021 if (!ret) {
1022 dev_err(cryp->dev, "dma_map_sg() error\n");
1023 return -ENOMEM;
1024 }
1025 }
1026
1027 ret = dma_map_sg(cryp->dev, cryp->out_sg, cryp->out_sg_len, DMA_FROM_DEVICE);
1028 if (!ret) {
1029 dev_err(cryp->dev, "dma_map_sg() error\n");
1030 return -ENOMEM;
1031 }
1032
1033 dma_sync_sg_for_device(cryp->dev, cryp->in_sg, cryp->in_sg_len, DMA_TO_DEVICE);
1034
1035 tx_in = dmaengine_prep_slave_sg(cryp->dma_lch_in, cryp->in_sg, cryp->in_sg_len,
1036 DMA_MEM_TO_DEV, DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
1037 if (!tx_in) {
1038 dev_err(cryp->dev, "IN prep_slave_sg() failed\n");
1039 return -EINVAL;
1040 }
1041
1042 /* No callback necessary */
1043 tx_in->callback_param = cryp;
1044 tx_in->callback = NULL;
1045
1046 tx_out = dmaengine_prep_slave_sg(cryp->dma_lch_out, cryp->out_sg, cryp->out_sg_len,
1047 DMA_DEV_TO_MEM, DMA_PREP_INTERRUPT | DMA_CTRL_ACK);
1048 if (!tx_out) {
1049 dev_err(cryp->dev, "OUT prep_slave_sg() failed\n");
1050 return -EINVAL;
1051 }
1052
1053 reinit_completion(&cryp->dma_completion);
1054 tx_out->callback = stm32_cryp_dma_callback;
1055 tx_out->callback_param = cryp;
1056
1057 /* Advance scatterwalk to not DMA'ed data */
1058 align_size = ALIGN_DOWN(cryp->payload_in, cryp->hw_blocksize);
1059 scatterwalk_copychunks(NULL, &cryp->in_walk, align_size, 2);
1060 cryp->payload_in -= align_size;
1061
1062 ret = dma_submit_error(dmaengine_submit(tx_in));
1063 if (ret < 0) {
1064 dev_err(cryp->dev, "DMA in submit failed\n");
1065 return ret;
1066 }
1067 dma_async_issue_pending(cryp->dma_lch_in);
1068
1069 /* Advance scatterwalk to not DMA'ed data */
1070 scatterwalk_copychunks(NULL, &cryp->out_walk, align_size, 2);
1071 cryp->payload_out -= align_size;
1072 ret = dma_submit_error(dmaengine_submit(tx_out));
1073 if (ret < 0) {
1074 dev_err(cryp->dev, "DMA out submit failed\n");
1075 return ret;
1076 }
1077 dma_async_issue_pending(cryp->dma_lch_out);
1078
1079 reg = stm32_cryp_read(cryp, cryp->caps->dmacr);
1080 stm32_cryp_write(cryp, cryp->caps->dmacr, reg | DMACR_DOEN | DMACR_DIEN);
1081
1082 if (!wait_for_completion_timeout(&cryp->dma_completion, msecs_to_jiffies(1000))) {
1083 dev_err(cryp->dev, "DMA out timed out\n");
1084 dmaengine_terminate_sync(cryp->dma_lch_out);
1085 return -ETIMEDOUT;
1086 }
1087
1088 return 0;
1089}
1090
1091static int stm32_cryp_it_start(struct stm32_cryp *cryp)
1092{
1093 /* Enable interrupt and let the IRQ handler do everything */
1094 stm32_cryp_write(cryp, cryp->caps->imsc, IMSCR_IN | IMSCR_OUT);
1095
1096 return 0;
1097}
1098
1099static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq);
1100
1101static int stm32_cryp_init_tfm(struct crypto_skcipher *tfm)
1102{
1103 crypto_skcipher_set_reqsize(tfm, sizeof(struct stm32_cryp_reqctx));
1104
1105 return 0;
1106}
1107
1108static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq);
1109
1110static int stm32_cryp_aes_aead_init(struct crypto_aead *tfm)
1111{
1112 crypto_aead_set_reqsize(tfm, sizeof(struct stm32_cryp_reqctx));
1113
1114 return 0;
1115}
1116
1117static int stm32_cryp_crypt(struct skcipher_request *req, unsigned long mode)
1118{
1119 struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(
1120 crypto_skcipher_reqtfm(req));
1121 struct stm32_cryp_reqctx *rctx = skcipher_request_ctx(req);
1122 struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
1123
1124 if (!cryp)
1125 return -ENODEV;
1126
1127 rctx->mode = mode;
1128
1129 return crypto_transfer_skcipher_request_to_engine(cryp->engine, req);
1130}
1131
1132static int stm32_cryp_aead_crypt(struct aead_request *req, unsigned long mode)
1133{
1134 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
1135 struct stm32_cryp_reqctx *rctx = aead_request_ctx(req);
1136 struct stm32_cryp *cryp = stm32_cryp_find_dev(ctx);
1137
1138 if (!cryp)
1139 return -ENODEV;
1140
1141 rctx->mode = mode;
1142
1143 return crypto_transfer_aead_request_to_engine(cryp->engine, req);
1144}
1145
1146static int stm32_cryp_setkey(struct crypto_skcipher *tfm, const u8 *key,
1147 unsigned int keylen)
1148{
1149 struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(tfm);
1150
1151 memcpy(ctx->key, key, keylen);
1152 ctx->keylen = keylen;
1153
1154 return 0;
1155}
1156
1157static int stm32_cryp_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
1158 unsigned int keylen)
1159{
1160 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
1161 keylen != AES_KEYSIZE_256)
1162 return -EINVAL;
1163 else
1164 return stm32_cryp_setkey(tfm, key, keylen);
1165}
1166
1167static int stm32_cryp_des_setkey(struct crypto_skcipher *tfm, const u8 *key,
1168 unsigned int keylen)
1169{
1170 return verify_skcipher_des_key(tfm, key) ?:
1171 stm32_cryp_setkey(tfm, key, keylen);
1172}
1173
1174static int stm32_cryp_tdes_setkey(struct crypto_skcipher *tfm, const u8 *key,
1175 unsigned int keylen)
1176{
1177 return verify_skcipher_des3_key(tfm, key) ?:
1178 stm32_cryp_setkey(tfm, key, keylen);
1179}
1180
1181static int stm32_cryp_aes_aead_setkey(struct crypto_aead *tfm, const u8 *key,
1182 unsigned int keylen)
1183{
1184 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(tfm);
1185
1186 if (keylen != AES_KEYSIZE_128 && keylen != AES_KEYSIZE_192 &&
1187 keylen != AES_KEYSIZE_256)
1188 return -EINVAL;
1189
1190 memcpy(ctx->key, key, keylen);
1191 ctx->keylen = keylen;
1192
1193 return 0;
1194}
1195
1196static int stm32_cryp_aes_gcm_setauthsize(struct crypto_aead *tfm,
1197 unsigned int authsize)
1198{
1199 switch (authsize) {
1200 case 4:
1201 case 8:
1202 case 12:
1203 case 13:
1204 case 14:
1205 case 15:
1206 case 16:
1207 break;
1208 default:
1209 return -EINVAL;
1210 }
1211
1212 return 0;
1213}
1214
1215static int stm32_cryp_aes_ccm_setauthsize(struct crypto_aead *tfm,
1216 unsigned int authsize)
1217{
1218 switch (authsize) {
1219 case 4:
1220 case 6:
1221 case 8:
1222 case 10:
1223 case 12:
1224 case 14:
1225 case 16:
1226 break;
1227 default:
1228 return -EINVAL;
1229 }
1230
1231 return 0;
1232}
1233
1234static int stm32_cryp_aes_ecb_encrypt(struct skcipher_request *req)
1235{
1236 if (req->cryptlen % AES_BLOCK_SIZE)
1237 return -EINVAL;
1238
1239 if (req->cryptlen == 0)
1240 return 0;
1241
1242 return stm32_cryp_crypt(req, FLG_AES | FLG_ECB | FLG_ENCRYPT);
1243}
1244
1245static int stm32_cryp_aes_ecb_decrypt(struct skcipher_request *req)
1246{
1247 if (req->cryptlen % AES_BLOCK_SIZE)
1248 return -EINVAL;
1249
1250 if (req->cryptlen == 0)
1251 return 0;
1252
1253 return stm32_cryp_crypt(req, FLG_AES | FLG_ECB);
1254}
1255
1256static int stm32_cryp_aes_cbc_encrypt(struct skcipher_request *req)
1257{
1258 if (req->cryptlen % AES_BLOCK_SIZE)
1259 return -EINVAL;
1260
1261 if (req->cryptlen == 0)
1262 return 0;
1263
1264 return stm32_cryp_crypt(req, FLG_AES | FLG_CBC | FLG_ENCRYPT);
1265}
1266
1267static int stm32_cryp_aes_cbc_decrypt(struct skcipher_request *req)
1268{
1269 if (req->cryptlen % AES_BLOCK_SIZE)
1270 return -EINVAL;
1271
1272 if (req->cryptlen == 0)
1273 return 0;
1274
1275 return stm32_cryp_crypt(req, FLG_AES | FLG_CBC);
1276}
1277
1278static int stm32_cryp_aes_ctr_encrypt(struct skcipher_request *req)
1279{
1280 if (req->cryptlen == 0)
1281 return 0;
1282
1283 return stm32_cryp_crypt(req, FLG_AES | FLG_CTR | FLG_ENCRYPT);
1284}
1285
1286static int stm32_cryp_aes_ctr_decrypt(struct skcipher_request *req)
1287{
1288 if (req->cryptlen == 0)
1289 return 0;
1290
1291 return stm32_cryp_crypt(req, FLG_AES | FLG_CTR);
1292}
1293
1294static int stm32_cryp_aes_gcm_encrypt(struct aead_request *req)
1295{
1296 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM | FLG_ENCRYPT);
1297}
1298
1299static int stm32_cryp_aes_gcm_decrypt(struct aead_request *req)
1300{
1301 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_GCM);
1302}
1303
1304static inline int crypto_ccm_check_iv(const u8 *iv)
1305{
1306 /* 2 <= L <= 8, so 1 <= L' <= 7. */
1307 if (iv[0] < 1 || iv[0] > 7)
1308 return -EINVAL;
1309
1310 return 0;
1311}
1312
1313static int stm32_cryp_aes_ccm_encrypt(struct aead_request *req)
1314{
1315 int err;
1316
1317 err = crypto_ccm_check_iv(req->iv);
1318 if (err)
1319 return err;
1320
1321 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM | FLG_ENCRYPT);
1322}
1323
1324static int stm32_cryp_aes_ccm_decrypt(struct aead_request *req)
1325{
1326 int err;
1327
1328 err = crypto_ccm_check_iv(req->iv);
1329 if (err)
1330 return err;
1331
1332 return stm32_cryp_aead_crypt(req, FLG_AES | FLG_CCM);
1333}
1334
1335static int stm32_cryp_des_ecb_encrypt(struct skcipher_request *req)
1336{
1337 if (req->cryptlen % DES_BLOCK_SIZE)
1338 return -EINVAL;
1339
1340 if (req->cryptlen == 0)
1341 return 0;
1342
1343 return stm32_cryp_crypt(req, FLG_DES | FLG_ECB | FLG_ENCRYPT);
1344}
1345
1346static int stm32_cryp_des_ecb_decrypt(struct skcipher_request *req)
1347{
1348 if (req->cryptlen % DES_BLOCK_SIZE)
1349 return -EINVAL;
1350
1351 if (req->cryptlen == 0)
1352 return 0;
1353
1354 return stm32_cryp_crypt(req, FLG_DES | FLG_ECB);
1355}
1356
1357static int stm32_cryp_des_cbc_encrypt(struct skcipher_request *req)
1358{
1359 if (req->cryptlen % DES_BLOCK_SIZE)
1360 return -EINVAL;
1361
1362 if (req->cryptlen == 0)
1363 return 0;
1364
1365 return stm32_cryp_crypt(req, FLG_DES | FLG_CBC | FLG_ENCRYPT);
1366}
1367
1368static int stm32_cryp_des_cbc_decrypt(struct skcipher_request *req)
1369{
1370 if (req->cryptlen % DES_BLOCK_SIZE)
1371 return -EINVAL;
1372
1373 if (req->cryptlen == 0)
1374 return 0;
1375
1376 return stm32_cryp_crypt(req, FLG_DES | FLG_CBC);
1377}
1378
1379static int stm32_cryp_tdes_ecb_encrypt(struct skcipher_request *req)
1380{
1381 if (req->cryptlen % DES_BLOCK_SIZE)
1382 return -EINVAL;
1383
1384 if (req->cryptlen == 0)
1385 return 0;
1386
1387 return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB | FLG_ENCRYPT);
1388}
1389
1390static int stm32_cryp_tdes_ecb_decrypt(struct skcipher_request *req)
1391{
1392 if (req->cryptlen % DES_BLOCK_SIZE)
1393 return -EINVAL;
1394
1395 if (req->cryptlen == 0)
1396 return 0;
1397
1398 return stm32_cryp_crypt(req, FLG_TDES | FLG_ECB);
1399}
1400
1401static int stm32_cryp_tdes_cbc_encrypt(struct skcipher_request *req)
1402{
1403 if (req->cryptlen % DES_BLOCK_SIZE)
1404 return -EINVAL;
1405
1406 if (req->cryptlen == 0)
1407 return 0;
1408
1409 return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC | FLG_ENCRYPT);
1410}
1411
1412static int stm32_cryp_tdes_cbc_decrypt(struct skcipher_request *req)
1413{
1414 if (req->cryptlen % DES_BLOCK_SIZE)
1415 return -EINVAL;
1416
1417 if (req->cryptlen == 0)
1418 return 0;
1419
1420 return stm32_cryp_crypt(req, FLG_TDES | FLG_CBC);
1421}
1422
1423static enum stm32_dma_mode stm32_cryp_dma_check_sg(struct scatterlist *test_sg, size_t len,
1424 size_t block_size)
1425{
1426 struct scatterlist *sg;
1427 int i;
1428
1429 if (len <= 16)
1430 return NO_DMA; /* Faster */
1431
1432 for_each_sg(test_sg, sg, sg_nents(test_sg), i) {
1433 if (!IS_ALIGNED(sg->length, block_size) && !sg_is_last(sg))
1434 return NO_DMA;
1435
1436 if (sg->offset % sizeof(u32))
1437 return NO_DMA;
1438
1439 if (sg_is_last(sg) && !IS_ALIGNED(sg->length, AES_BLOCK_SIZE))
1440 return DMA_NEED_SG_TRUNC;
1441 }
1442
1443 return DMA_PLAIN_SG;
1444}
1445
1446static enum stm32_dma_mode stm32_cryp_dma_check(struct stm32_cryp *cryp, struct scatterlist *in_sg,
1447 struct scatterlist *out_sg)
1448{
1449 enum stm32_dma_mode ret = DMA_PLAIN_SG;
1450
1451 if (!is_aes(cryp))
1452 return NO_DMA;
1453
1454 if (!cryp->dma_lch_in || !cryp->dma_lch_out)
1455 return NO_DMA;
1456
1457 ret = stm32_cryp_dma_check_sg(in_sg, cryp->payload_in, AES_BLOCK_SIZE);
1458 if (ret == NO_DMA)
1459 return ret;
1460
1461 ret = stm32_cryp_dma_check_sg(out_sg, cryp->payload_out, AES_BLOCK_SIZE);
1462 if (ret == NO_DMA)
1463 return ret;
1464
1465 /* Check CTR counter overflow */
1466 if (is_aes(cryp) && is_ctr(cryp)) {
1467 u32 c;
1468 __be32 iv3;
1469
1470 memcpy(&iv3, &cryp->req->iv[3 * sizeof(u32)], sizeof(iv3));
1471 c = be32_to_cpu(iv3);
1472 if ((c + cryp->payload_in) < cryp->payload_in)
1473 return NO_DMA;
1474 }
1475
1476 /* Workaround */
1477 if (is_aes(cryp) && is_ctr(cryp) && ret == DMA_NEED_SG_TRUNC)
1478 return NO_DMA;
1479
1480 return ret;
1481}
1482
1483static int stm32_cryp_truncate_sg(struct scatterlist **new_sg, size_t *new_sg_len,
1484 struct scatterlist *sg, off_t skip, size_t size)
1485{
1486 struct scatterlist *cur;
1487 int alloc_sg_len;
1488
1489 *new_sg_len = 0;
1490
1491 if (!sg || !size) {
1492 *new_sg = NULL;
1493 return 0;
1494 }
1495
1496 alloc_sg_len = sg_nents_for_len(sg, skip + size);
1497 if (alloc_sg_len < 0)
1498 return alloc_sg_len;
1499
1500 /* We allocate to much sg entry, but it is easier */
1501 *new_sg = kmalloc_array((size_t)alloc_sg_len, sizeof(struct scatterlist), GFP_KERNEL);
1502 if (!*new_sg)
1503 return -ENOMEM;
1504
1505 sg_init_table(*new_sg, (unsigned int)alloc_sg_len);
1506
1507 cur = *new_sg;
1508 while (sg && size) {
1509 unsigned int len = sg->length;
1510 unsigned int offset = sg->offset;
1511
1512 if (skip > len) {
1513 skip -= len;
1514 sg = sg_next(sg);
1515 continue;
1516 }
1517
1518 if (skip) {
1519 len -= skip;
1520 offset += skip;
1521 skip = 0;
1522 }
1523
1524 if (size < len)
1525 len = size;
1526
1527 if (len > 0) {
1528 (*new_sg_len)++;
1529 size -= len;
1530 sg_set_page(cur, sg_page(sg), len, offset);
1531 if (size == 0)
1532 sg_mark_end(cur);
1533 cur = sg_next(cur);
1534 }
1535
1536 sg = sg_next(sg);
1537 }
1538
1539 return 0;
1540}
1541
1542static int stm32_cryp_cipher_prepare(struct stm32_cryp *cryp, struct scatterlist *in_sg,
1543 struct scatterlist *out_sg)
1544{
1545 size_t align_size;
1546 int ret;
1547
1548 cryp->dma_mode = stm32_cryp_dma_check(cryp, in_sg, out_sg);
1549
1550 scatterwalk_start(&cryp->in_walk, in_sg);
1551 scatterwalk_start(&cryp->out_walk, out_sg);
1552
1553 if (cryp->dma_mode == NO_DMA) {
1554 cryp->flags &= ~FLG_IN_OUT_DMA;
1555
1556 if (is_ctr(cryp))
1557 memset(cryp->last_ctr, 0, sizeof(cryp->last_ctr));
1558
1559 } else if (cryp->dma_mode == DMA_NEED_SG_TRUNC) {
1560
1561 cryp->flags |= FLG_IN_OUT_DMA;
1562
1563 align_size = ALIGN_DOWN(cryp->payload_in, cryp->hw_blocksize);
1564 ret = stm32_cryp_truncate_sg(&cryp->in_sg, &cryp->in_sg_len, in_sg, 0, align_size);
1565 if (ret)
1566 return ret;
1567
1568 ret = stm32_cryp_truncate_sg(&cryp->out_sg, &cryp->out_sg_len, out_sg, 0,
1569 align_size);
1570 if (ret) {
1571 kfree(cryp->in_sg);
1572 return ret;
1573 }
1574 } else {
1575 cryp->flags |= FLG_IN_OUT_DMA;
1576
1577 cryp->in_sg = in_sg;
1578 cryp->out_sg = out_sg;
1579
1580 ret = sg_nents_for_len(cryp->in_sg, cryp->payload_in);
1581 if (ret < 0)
1582 return ret;
1583 cryp->in_sg_len = (size_t)ret;
1584
1585 ret = sg_nents_for_len(out_sg, cryp->payload_out);
1586 if (ret < 0)
1587 return ret;
1588 cryp->out_sg_len = (size_t)ret;
1589 }
1590
1591 return 0;
1592}
1593
1594static int stm32_cryp_aead_prepare(struct stm32_cryp *cryp, struct scatterlist *in_sg,
1595 struct scatterlist *out_sg)
1596{
1597 size_t align_size;
1598 off_t skip;
1599 int ret, ret2;
1600
1601 cryp->header_sg = NULL;
1602 cryp->in_sg = NULL;
1603 cryp->out_sg = NULL;
1604
1605 if (!cryp->dma_lch_in || !cryp->dma_lch_out) {
1606 cryp->dma_mode = NO_DMA;
1607 cryp->flags &= ~(FLG_IN_OUT_DMA | FLG_HEADER_DMA);
1608
1609 return 0;
1610 }
1611
1612 /* CCM hw_init may have advanced in header */
1613 skip = cryp->areq->assoclen - cryp->header_in;
1614
1615 align_size = ALIGN_DOWN(cryp->header_in, cryp->hw_blocksize);
1616 ret = stm32_cryp_truncate_sg(&cryp->header_sg, &cryp->header_sg_len, in_sg, skip,
1617 align_size);
1618 if (ret)
1619 return ret;
1620
1621 ret = stm32_cryp_dma_check_sg(cryp->header_sg, align_size, AES_BLOCK_SIZE);
1622 if (ret == NO_DMA) {
1623 /* We cannot DMA the header */
1624 kfree(cryp->header_sg);
1625 cryp->header_sg = NULL;
1626
1627 cryp->flags &= ~FLG_HEADER_DMA;
1628 } else {
1629 cryp->flags |= FLG_HEADER_DMA;
1630 }
1631
1632 /* Now skip all header to be at payload start */
1633 skip = cryp->areq->assoclen;
1634 align_size = ALIGN_DOWN(cryp->payload_in, cryp->hw_blocksize);
1635 ret = stm32_cryp_truncate_sg(&cryp->in_sg, &cryp->in_sg_len, in_sg, skip, align_size);
1636 if (ret) {
1637 kfree(cryp->header_sg);
1638 return ret;
1639 }
1640
1641 /* For out buffer align_size is same as in buffer */
1642 ret = stm32_cryp_truncate_sg(&cryp->out_sg, &cryp->out_sg_len, out_sg, skip, align_size);
1643 if (ret) {
1644 kfree(cryp->header_sg);
1645 kfree(cryp->in_sg);
1646 return ret;
1647 }
1648
1649 ret = stm32_cryp_dma_check_sg(cryp->in_sg, align_size, AES_BLOCK_SIZE);
1650 ret2 = stm32_cryp_dma_check_sg(cryp->out_sg, align_size, AES_BLOCK_SIZE);
1651 if (ret == NO_DMA || ret2 == NO_DMA) {
1652 kfree(cryp->in_sg);
1653 cryp->in_sg = NULL;
1654
1655 kfree(cryp->out_sg);
1656 cryp->out_sg = NULL;
1657
1658 cryp->flags &= ~FLG_IN_OUT_DMA;
1659 } else {
1660 cryp->flags |= FLG_IN_OUT_DMA;
1661 }
1662
1663 return 0;
1664}
1665
1666static int stm32_cryp_prepare_req(struct skcipher_request *req,
1667 struct aead_request *areq)
1668{
1669 struct stm32_cryp_ctx *ctx;
1670 struct stm32_cryp *cryp;
1671 struct stm32_cryp_reqctx *rctx;
1672 struct scatterlist *in_sg, *out_sg;
1673 int ret;
1674
1675 if (!req && !areq)
1676 return -EINVAL;
1677
1678 ctx = req ? crypto_skcipher_ctx(crypto_skcipher_reqtfm(req)) :
1679 crypto_aead_ctx(crypto_aead_reqtfm(areq));
1680
1681 cryp = ctx->cryp;
1682
1683 rctx = req ? skcipher_request_ctx(req) : aead_request_ctx(areq);
1684 rctx->mode &= FLG_MODE_MASK;
1685
1686 cryp->flags = (cryp->flags & ~FLG_MODE_MASK) | rctx->mode;
1687 cryp->hw_blocksize = is_aes(cryp) ? AES_BLOCK_SIZE : DES_BLOCK_SIZE;
1688 cryp->ctx = ctx;
1689
1690 if (req) {
1691 cryp->req = req;
1692 cryp->areq = NULL;
1693 cryp->header_in = 0;
1694 cryp->payload_in = req->cryptlen;
1695 cryp->payload_out = req->cryptlen;
1696 cryp->authsize = 0;
1697
1698 in_sg = req->src;
1699 out_sg = req->dst;
1700
1701 ret = stm32_cryp_cipher_prepare(cryp, in_sg, out_sg);
1702 if (ret)
1703 return ret;
1704
1705 ret = stm32_cryp_hw_init(cryp);
1706 } else {
1707 /*
1708 * Length of input and output data:
1709 * Encryption case:
1710 * INPUT = AssocData || PlainText
1711 * <- assoclen -> <- cryptlen ->
1712 *
1713 * OUTPUT = AssocData || CipherText || AuthTag
1714 * <- assoclen -> <-- cryptlen --> <- authsize ->
1715 *
1716 * Decryption case:
1717 * INPUT = AssocData || CipherTex || AuthTag
1718 * <- assoclen ---> <---------- cryptlen ---------->
1719 *
1720 * OUTPUT = AssocData || PlainText
1721 * <- assoclen -> <- cryptlen - authsize ->
1722 */
1723 cryp->areq = areq;
1724 cryp->req = NULL;
1725 cryp->authsize = crypto_aead_authsize(crypto_aead_reqtfm(areq));
1726 if (is_encrypt(cryp)) {
1727 cryp->payload_in = areq->cryptlen;
1728 cryp->header_in = areq->assoclen;
1729 cryp->payload_out = areq->cryptlen;
1730 } else {
1731 cryp->payload_in = areq->cryptlen - cryp->authsize;
1732 cryp->header_in = areq->assoclen;
1733 cryp->payload_out = cryp->payload_in;
1734 }
1735
1736 in_sg = areq->src;
1737 out_sg = areq->dst;
1738
1739 scatterwalk_start(&cryp->in_walk, in_sg);
1740 scatterwalk_start(&cryp->out_walk, out_sg);
1741 /* In output, jump after assoc data */
1742 scatterwalk_copychunks(NULL, &cryp->out_walk, cryp->areq->assoclen, 2);
1743
1744 ret = stm32_cryp_hw_init(cryp);
1745 if (ret)
1746 return ret;
1747
1748 ret = stm32_cryp_aead_prepare(cryp, in_sg, out_sg);
1749 }
1750
1751 return ret;
1752}
1753
1754static int stm32_cryp_cipher_one_req(struct crypto_engine *engine, void *areq)
1755{
1756 struct skcipher_request *req = container_of(areq,
1757 struct skcipher_request,
1758 base);
1759 struct stm32_cryp_ctx *ctx = crypto_skcipher_ctx(
1760 crypto_skcipher_reqtfm(req));
1761 struct stm32_cryp *cryp = ctx->cryp;
1762 int ret;
1763
1764 if (!cryp)
1765 return -ENODEV;
1766
1767 ret = stm32_cryp_prepare_req(req, NULL);
1768 if (ret)
1769 return ret;
1770
1771 if (cryp->flags & FLG_IN_OUT_DMA)
1772 ret = stm32_cryp_dma_start(cryp);
1773 else
1774 ret = stm32_cryp_it_start(cryp);
1775
1776 if (ret == -ETIMEDOUT)
1777 stm32_cryp_finish_req(cryp, ret);
1778
1779 return ret;
1780}
1781
1782static int stm32_cryp_aead_one_req(struct crypto_engine *engine, void *areq)
1783{
1784 struct aead_request *req = container_of(areq, struct aead_request,
1785 base);
1786 struct stm32_cryp_ctx *ctx = crypto_aead_ctx(crypto_aead_reqtfm(req));
1787 struct stm32_cryp *cryp = ctx->cryp;
1788 int err;
1789
1790 if (!cryp)
1791 return -ENODEV;
1792
1793 err = stm32_cryp_prepare_req(NULL, req);
1794 if (err)
1795 return err;
1796
1797 if (!stm32_cryp_get_input_text_len(cryp) && !cryp->header_in &&
1798 !(cryp->flags & FLG_HEADER_DMA)) {
1799 /* No input data to process: get tag and finish */
1800 stm32_cryp_finish_req(cryp, 0);
1801 return 0;
1802 }
1803
1804 if (cryp->flags & FLG_HEADER_DMA)
1805 return stm32_cryp_header_dma_start(cryp);
1806
1807 if (!cryp->header_in && cryp->flags & FLG_IN_OUT_DMA)
1808 return stm32_cryp_dma_start(cryp);
1809
1810 return stm32_cryp_it_start(cryp);
1811}
1812
1813static int stm32_cryp_read_auth_tag(struct stm32_cryp *cryp)
1814{
1815 u32 cfg, size_bit;
1816 unsigned int i;
1817 int ret = 0;
1818
1819 /* Update Config */
1820 cfg = stm32_cryp_read(cryp, cryp->caps->cr);
1821
1822 cfg &= ~CR_PH_MASK;
1823 cfg |= CR_PH_FINAL;
1824 cfg &= ~CR_DEC_NOT_ENC;
1825 cfg |= CR_CRYPEN;
1826
1827 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1828
1829 if (is_gcm(cryp)) {
1830 /* GCM: write aad and payload size (in bits) */
1831 size_bit = cryp->areq->assoclen * 8;
1832 if (cryp->caps->swap_final)
1833 size_bit = (__force u32)cpu_to_be32(size_bit);
1834
1835 stm32_cryp_write(cryp, cryp->caps->din, 0);
1836 stm32_cryp_write(cryp, cryp->caps->din, size_bit);
1837
1838 size_bit = is_encrypt(cryp) ? cryp->areq->cryptlen :
1839 cryp->areq->cryptlen - cryp->authsize;
1840 size_bit *= 8;
1841 if (cryp->caps->swap_final)
1842 size_bit = (__force u32)cpu_to_be32(size_bit);
1843
1844 stm32_cryp_write(cryp, cryp->caps->din, 0);
1845 stm32_cryp_write(cryp, cryp->caps->din, size_bit);
1846 } else {
1847 /* CCM: write CTR0 */
1848 u32 iv32[AES_BLOCK_32];
1849 u8 *iv = (u8 *)iv32;
1850 __be32 *biv = (__be32 *)iv32;
1851
1852 memcpy(iv, cryp->areq->iv, AES_BLOCK_SIZE);
1853 memset(iv + AES_BLOCK_SIZE - 1 - iv[0], 0, iv[0] + 1);
1854
1855 for (i = 0; i < AES_BLOCK_32; i++) {
1856 u32 xiv = iv32[i];
1857
1858 if (!cryp->caps->padding_wa)
1859 xiv = be32_to_cpu(biv[i]);
1860 stm32_cryp_write(cryp, cryp->caps->din, xiv);
1861 }
1862 }
1863
1864 /* Wait for output data */
1865 ret = stm32_cryp_wait_output(cryp);
1866 if (ret) {
1867 dev_err(cryp->dev, "Timeout (read tag)\n");
1868 return ret;
1869 }
1870
1871 if (is_encrypt(cryp)) {
1872 u32 out_tag[AES_BLOCK_32];
1873
1874 /* Get and write tag */
1875 readsl(cryp->regs + cryp->caps->dout, out_tag, AES_BLOCK_32);
1876 scatterwalk_copychunks(out_tag, &cryp->out_walk, cryp->authsize, 1);
1877 } else {
1878 /* Get and check tag */
1879 u32 in_tag[AES_BLOCK_32], out_tag[AES_BLOCK_32];
1880
1881 scatterwalk_copychunks(in_tag, &cryp->in_walk, cryp->authsize, 0);
1882 readsl(cryp->regs + cryp->caps->dout, out_tag, AES_BLOCK_32);
1883
1884 if (crypto_memneq(in_tag, out_tag, cryp->authsize))
1885 ret = -EBADMSG;
1886 }
1887
1888 /* Disable cryp */
1889 cfg &= ~CR_CRYPEN;
1890 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1891
1892 return ret;
1893}
1894
1895static void stm32_cryp_check_ctr_counter(struct stm32_cryp *cryp)
1896{
1897 u32 cr;
1898
1899 if (unlikely(cryp->last_ctr[3] == cpu_to_be32(0xFFFFFFFF))) {
1900 /*
1901 * In this case, we need to increment manually the ctr counter,
1902 * as HW doesn't handle the U32 carry.
1903 */
1904 crypto_inc((u8 *)cryp->last_ctr, sizeof(cryp->last_ctr));
1905
1906 cr = stm32_cryp_read(cryp, cryp->caps->cr);
1907 stm32_cryp_write(cryp, cryp->caps->cr, cr & ~CR_CRYPEN);
1908
1909 stm32_cryp_hw_write_iv(cryp, cryp->last_ctr);
1910
1911 stm32_cryp_write(cryp, cryp->caps->cr, cr);
1912 }
1913
1914 /* The IV registers are BE */
1915 cryp->last_ctr[0] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0l));
1916 cryp->last_ctr[1] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv0r));
1917 cryp->last_ctr[2] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1l));
1918 cryp->last_ctr[3] = cpu_to_be32(stm32_cryp_read(cryp, cryp->caps->iv1r));
1919}
1920
1921static void stm32_cryp_irq_read_data(struct stm32_cryp *cryp)
1922{
1923 u32 block[AES_BLOCK_32];
1924
1925 readsl(cryp->regs + cryp->caps->dout, block, cryp->hw_blocksize / sizeof(u32));
1926 scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
1927 cryp->payload_out), 1);
1928 cryp->payload_out -= min_t(size_t, cryp->hw_blocksize,
1929 cryp->payload_out);
1930}
1931
1932static void stm32_cryp_irq_write_block(struct stm32_cryp *cryp)
1933{
1934 u32 block[AES_BLOCK_32] = {0};
1935
1936 scatterwalk_copychunks(block, &cryp->in_walk, min_t(size_t, cryp->hw_blocksize,
1937 cryp->payload_in), 0);
1938 writesl(cryp->regs + cryp->caps->din, block, cryp->hw_blocksize / sizeof(u32));
1939 cryp->payload_in -= min_t(size_t, cryp->hw_blocksize, cryp->payload_in);
1940}
1941
1942static void stm32_cryp_irq_write_gcm_padded_data(struct stm32_cryp *cryp)
1943{
1944 int err;
1945 u32 cfg, block[AES_BLOCK_32] = {0};
1946 unsigned int i;
1947
1948 /* 'Special workaround' procedure described in the datasheet */
1949
1950 /* a) disable ip */
1951 stm32_cryp_write(cryp, cryp->caps->imsc, 0);
1952 cfg = stm32_cryp_read(cryp, cryp->caps->cr);
1953 cfg &= ~CR_CRYPEN;
1954 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1955
1956 /* b) Update IV1R */
1957 stm32_cryp_write(cryp, cryp->caps->iv1r, cryp->gcm_ctr - 2);
1958
1959 /* c) change mode to CTR */
1960 cfg &= ~CR_ALGO_MASK;
1961 cfg |= CR_AES_CTR;
1962 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1963
1964 /* a) enable IP */
1965 cfg |= CR_CRYPEN;
1966 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1967
1968 /* b) pad and write the last block */
1969 stm32_cryp_irq_write_block(cryp);
1970 /* wait end of process */
1971 err = stm32_cryp_wait_output(cryp);
1972 if (err) {
1973 dev_err(cryp->dev, "Timeout (write gcm last data)\n");
1974 return stm32_cryp_finish_req(cryp, err);
1975 }
1976
1977 /* c) get and store encrypted data */
1978 /*
1979 * Same code as stm32_cryp_irq_read_data(), but we want to store
1980 * block value
1981 */
1982 readsl(cryp->regs + cryp->caps->dout, block, cryp->hw_blocksize / sizeof(u32));
1983
1984 scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
1985 cryp->payload_out), 1);
1986 cryp->payload_out -= min_t(size_t, cryp->hw_blocksize,
1987 cryp->payload_out);
1988
1989 /* d) change mode back to AES GCM */
1990 cfg &= ~CR_ALGO_MASK;
1991 cfg |= CR_AES_GCM;
1992 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1993
1994 /* e) change phase to Final */
1995 cfg &= ~CR_PH_MASK;
1996 cfg |= CR_PH_FINAL;
1997 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
1998
1999 /* f) write padded data */
2000 writesl(cryp->regs + cryp->caps->din, block, AES_BLOCK_32);
2001
2002 /* g) Empty fifo out */
2003 err = stm32_cryp_wait_output(cryp);
2004 if (err) {
2005 dev_err(cryp->dev, "Timeout (write gcm padded data)\n");
2006 return stm32_cryp_finish_req(cryp, err);
2007 }
2008
2009 for (i = 0; i < AES_BLOCK_32; i++)
2010 stm32_cryp_read(cryp, cryp->caps->dout);
2011
2012 /* h) run the he normal Final phase */
2013 stm32_cryp_finish_req(cryp, 0);
2014}
2015
2016static void stm32_cryp_irq_set_npblb(struct stm32_cryp *cryp)
2017{
2018 u32 cfg;
2019
2020 /* disable ip, set NPBLB and reneable ip */
2021 cfg = stm32_cryp_read(cryp, cryp->caps->cr);
2022 cfg &= ~CR_CRYPEN;
2023 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
2024
2025 cfg |= (cryp->hw_blocksize - cryp->payload_in) << CR_NBPBL_SHIFT;
2026 cfg |= CR_CRYPEN;
2027 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
2028}
2029
2030static void stm32_cryp_irq_write_ccm_padded_data(struct stm32_cryp *cryp)
2031{
2032 int err = 0;
2033 u32 cfg, iv1tmp;
2034 u32 cstmp1[AES_BLOCK_32], cstmp2[AES_BLOCK_32];
2035 u32 block[AES_BLOCK_32] = {0};
2036 unsigned int i;
2037
2038 /* 'Special workaround' procedure described in the datasheet */
2039
2040 /* a) disable ip */
2041 stm32_cryp_write(cryp, cryp->caps->imsc, 0);
2042
2043 cfg = stm32_cryp_read(cryp, cryp->caps->cr);
2044 cfg &= ~CR_CRYPEN;
2045 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
2046
2047 /* b) get IV1 from CRYP_CSGCMCCM7 */
2048 iv1tmp = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + 7 * 4);
2049
2050 /* c) Load CRYP_CSGCMCCMxR */
2051 for (i = 0; i < ARRAY_SIZE(cstmp1); i++)
2052 cstmp1[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
2053
2054 /* d) Write IV1R */
2055 stm32_cryp_write(cryp, cryp->caps->iv1r, iv1tmp);
2056
2057 /* e) change mode to CTR */
2058 cfg &= ~CR_ALGO_MASK;
2059 cfg |= CR_AES_CTR;
2060 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
2061
2062 /* a) enable IP */
2063 cfg |= CR_CRYPEN;
2064 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
2065
2066 /* b) pad and write the last block */
2067 stm32_cryp_irq_write_block(cryp);
2068 /* wait end of process */
2069 err = stm32_cryp_wait_output(cryp);
2070 if (err) {
2071 dev_err(cryp->dev, "Timeout (write ccm padded data)\n");
2072 return stm32_cryp_finish_req(cryp, err);
2073 }
2074
2075 /* c) get and store decrypted data */
2076 /*
2077 * Same code as stm32_cryp_irq_read_data(), but we want to store
2078 * block value
2079 */
2080 readsl(cryp->regs + cryp->caps->dout, block, cryp->hw_blocksize / sizeof(u32));
2081
2082 scatterwalk_copychunks(block, &cryp->out_walk, min_t(size_t, cryp->hw_blocksize,
2083 cryp->payload_out), 1);
2084 cryp->payload_out -= min_t(size_t, cryp->hw_blocksize, cryp->payload_out);
2085
2086 /* d) Load again CRYP_CSGCMCCMxR */
2087 for (i = 0; i < ARRAY_SIZE(cstmp2); i++)
2088 cstmp2[i] = stm32_cryp_read(cryp, CRYP_CSGCMCCM0R + i * 4);
2089
2090 /* e) change mode back to AES CCM */
2091 cfg &= ~CR_ALGO_MASK;
2092 cfg |= CR_AES_CCM;
2093 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
2094
2095 /* f) change phase to header */
2096 cfg &= ~CR_PH_MASK;
2097 cfg |= CR_PH_HEADER;
2098 stm32_cryp_write(cryp, cryp->caps->cr, cfg);
2099
2100 /* g) XOR and write padded data */
2101 for (i = 0; i < ARRAY_SIZE(block); i++) {
2102 block[i] ^= cstmp1[i];
2103 block[i] ^= cstmp2[i];
2104 stm32_cryp_write(cryp, cryp->caps->din, block[i]);
2105 }
2106
2107 /* h) wait for completion */
2108 err = stm32_cryp_wait_busy(cryp);
2109 if (err)
2110 dev_err(cryp->dev, "Timeout (write ccm padded data)\n");
2111
2112 /* i) run the he normal Final phase */
2113 stm32_cryp_finish_req(cryp, err);
2114}
2115
2116static void stm32_cryp_irq_write_data(struct stm32_cryp *cryp)
2117{
2118 if (unlikely(!cryp->payload_in)) {
2119 dev_warn(cryp->dev, "No more data to process\n");
2120 return;
2121 }
2122
2123 if (unlikely(cryp->payload_in < AES_BLOCK_SIZE &&
2124 (stm32_cryp_get_hw_mode(cryp) == CR_AES_GCM) &&
2125 is_encrypt(cryp))) {
2126 /* Padding for AES GCM encryption */
2127 if (cryp->caps->padding_wa) {
2128 /* Special case 1 */
2129 stm32_cryp_irq_write_gcm_padded_data(cryp);
2130 return;
2131 }
2132
2133 /* Setting padding bytes (NBBLB) */
2134 stm32_cryp_irq_set_npblb(cryp);
2135 }
2136
2137 if (unlikely((cryp->payload_in < AES_BLOCK_SIZE) &&
2138 (stm32_cryp_get_hw_mode(cryp) == CR_AES_CCM) &&
2139 is_decrypt(cryp))) {
2140 /* Padding for AES CCM decryption */
2141 if (cryp->caps->padding_wa) {
2142 /* Special case 2 */
2143 stm32_cryp_irq_write_ccm_padded_data(cryp);
2144 return;
2145 }
2146
2147 /* Setting padding bytes (NBBLB) */
2148 stm32_cryp_irq_set_npblb(cryp);
2149 }
2150
2151 if (is_aes(cryp) && is_ctr(cryp))
2152 stm32_cryp_check_ctr_counter(cryp);
2153
2154 stm32_cryp_irq_write_block(cryp);
2155}
2156
2157static void stm32_cryp_irq_write_gcmccm_header(struct stm32_cryp *cryp)
2158{
2159 u32 block[AES_BLOCK_32] = {0};
2160 size_t written;
2161
2162 written = min_t(size_t, AES_BLOCK_SIZE, cryp->header_in);
2163
2164 scatterwalk_copychunks(block, &cryp->in_walk, written, 0);
2165
2166 writesl(cryp->regs + cryp->caps->din, block, AES_BLOCK_32);
2167
2168 cryp->header_in -= written;
2169
2170 stm32_crypt_gcmccm_end_header(cryp);
2171}
2172
2173static irqreturn_t stm32_cryp_irq_thread(int irq, void *arg)
2174{
2175 struct stm32_cryp *cryp = arg;
2176 u32 ph;
2177 u32 it_mask = stm32_cryp_read(cryp, cryp->caps->imsc);
2178
2179 if (cryp->irq_status & MISR_OUT)
2180 /* Output FIFO IRQ: read data */
2181 stm32_cryp_irq_read_data(cryp);
2182
2183 if (cryp->irq_status & MISR_IN) {
2184 if (is_gcm(cryp) || is_ccm(cryp)) {
2185 ph = stm32_cryp_read(cryp, cryp->caps->cr) & CR_PH_MASK;
2186 if (unlikely(ph == CR_PH_HEADER))
2187 /* Write Header */
2188 stm32_cryp_irq_write_gcmccm_header(cryp);
2189 else
2190 /* Input FIFO IRQ: write data */
2191 stm32_cryp_irq_write_data(cryp);
2192 if (is_gcm(cryp))
2193 cryp->gcm_ctr++;
2194 } else {
2195 /* Input FIFO IRQ: write data */
2196 stm32_cryp_irq_write_data(cryp);
2197 }
2198 }
2199
2200 /* Mask useless interrupts */
2201 if (!cryp->payload_in && !cryp->header_in)
2202 it_mask &= ~IMSCR_IN;
2203 if (!cryp->payload_out)
2204 it_mask &= ~IMSCR_OUT;
2205 stm32_cryp_write(cryp, cryp->caps->imsc, it_mask);
2206
2207 if (!cryp->payload_in && !cryp->header_in && !cryp->payload_out) {
2208 local_bh_disable();
2209 stm32_cryp_finish_req(cryp, 0);
2210 local_bh_enable();
2211 }
2212
2213 return IRQ_HANDLED;
2214}
2215
2216static irqreturn_t stm32_cryp_irq(int irq, void *arg)
2217{
2218 struct stm32_cryp *cryp = arg;
2219
2220 cryp->irq_status = stm32_cryp_read(cryp, cryp->caps->mis);
2221
2222 return IRQ_WAKE_THREAD;
2223}
2224
2225static int stm32_cryp_dma_init(struct stm32_cryp *cryp)
2226{
2227 struct dma_slave_config dma_conf;
2228 struct dma_chan *chan;
2229 int ret;
2230
2231 memset(&dma_conf, 0, sizeof(dma_conf));
2232
2233 dma_conf.direction = DMA_MEM_TO_DEV;
2234 dma_conf.dst_addr = cryp->phys_base + cryp->caps->din;
2235 dma_conf.dst_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
2236 dma_conf.dst_maxburst = CRYP_DMA_BURST_REG;
2237 dma_conf.device_fc = false;
2238
2239 chan = dma_request_chan(cryp->dev, "in");
2240 if (IS_ERR(chan))
2241 return PTR_ERR(chan);
2242
2243 cryp->dma_lch_in = chan;
2244 ret = dmaengine_slave_config(cryp->dma_lch_in, &dma_conf);
2245 if (ret) {
2246 dma_release_channel(cryp->dma_lch_in);
2247 cryp->dma_lch_in = NULL;
2248 dev_err(cryp->dev, "Couldn't configure DMA in slave.\n");
2249 return ret;
2250 }
2251
2252 memset(&dma_conf, 0, sizeof(dma_conf));
2253
2254 dma_conf.direction = DMA_DEV_TO_MEM;
2255 dma_conf.src_addr = cryp->phys_base + cryp->caps->dout;
2256 dma_conf.src_addr_width = DMA_SLAVE_BUSWIDTH_4_BYTES;
2257 dma_conf.src_maxburst = CRYP_DMA_BURST_REG;
2258 dma_conf.device_fc = false;
2259
2260 chan = dma_request_chan(cryp->dev, "out");
2261 if (IS_ERR(chan)) {
2262 dma_release_channel(cryp->dma_lch_in);
2263 cryp->dma_lch_in = NULL;
2264 return PTR_ERR(chan);
2265 }
2266
2267 cryp->dma_lch_out = chan;
2268
2269 ret = dmaengine_slave_config(cryp->dma_lch_out, &dma_conf);
2270 if (ret) {
2271 dma_release_channel(cryp->dma_lch_out);
2272 cryp->dma_lch_out = NULL;
2273 dev_err(cryp->dev, "Couldn't configure DMA out slave.\n");
2274 dma_release_channel(cryp->dma_lch_in);
2275 cryp->dma_lch_in = NULL;
2276 return ret;
2277 }
2278
2279 init_completion(&cryp->dma_completion);
2280
2281 return 0;
2282}
2283
2284static struct skcipher_engine_alg crypto_algs[] = {
2285{
2286 .base = {
2287 .base.cra_name = "ecb(aes)",
2288 .base.cra_driver_name = "stm32-ecb-aes",
2289 .base.cra_priority = 300,
2290 .base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2291 .base.cra_blocksize = AES_BLOCK_SIZE,
2292 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
2293 .base.cra_alignmask = 0,
2294 .base.cra_module = THIS_MODULE,
2295
2296 .init = stm32_cryp_init_tfm,
2297 .min_keysize = AES_MIN_KEY_SIZE,
2298 .max_keysize = AES_MAX_KEY_SIZE,
2299 .setkey = stm32_cryp_aes_setkey,
2300 .encrypt = stm32_cryp_aes_ecb_encrypt,
2301 .decrypt = stm32_cryp_aes_ecb_decrypt,
2302 },
2303 .op = {
2304 .do_one_request = stm32_cryp_cipher_one_req,
2305 },
2306},
2307{
2308 .base = {
2309 .base.cra_name = "cbc(aes)",
2310 .base.cra_driver_name = "stm32-cbc-aes",
2311 .base.cra_priority = 300,
2312 .base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2313 .base.cra_blocksize = AES_BLOCK_SIZE,
2314 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
2315 .base.cra_alignmask = 0,
2316 .base.cra_module = THIS_MODULE,
2317
2318 .init = stm32_cryp_init_tfm,
2319 .min_keysize = AES_MIN_KEY_SIZE,
2320 .max_keysize = AES_MAX_KEY_SIZE,
2321 .ivsize = AES_BLOCK_SIZE,
2322 .setkey = stm32_cryp_aes_setkey,
2323 .encrypt = stm32_cryp_aes_cbc_encrypt,
2324 .decrypt = stm32_cryp_aes_cbc_decrypt,
2325 },
2326 .op = {
2327 .do_one_request = stm32_cryp_cipher_one_req,
2328 },
2329},
2330{
2331 .base = {
2332 .base.cra_name = "ctr(aes)",
2333 .base.cra_driver_name = "stm32-ctr-aes",
2334 .base.cra_priority = 300,
2335 .base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2336 .base.cra_blocksize = 1,
2337 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
2338 .base.cra_alignmask = 0,
2339 .base.cra_module = THIS_MODULE,
2340
2341 .init = stm32_cryp_init_tfm,
2342 .min_keysize = AES_MIN_KEY_SIZE,
2343 .max_keysize = AES_MAX_KEY_SIZE,
2344 .ivsize = AES_BLOCK_SIZE,
2345 .setkey = stm32_cryp_aes_setkey,
2346 .encrypt = stm32_cryp_aes_ctr_encrypt,
2347 .decrypt = stm32_cryp_aes_ctr_decrypt,
2348 },
2349 .op = {
2350 .do_one_request = stm32_cryp_cipher_one_req,
2351 },
2352},
2353{
2354 .base = {
2355 .base.cra_name = "ecb(des)",
2356 .base.cra_driver_name = "stm32-ecb-des",
2357 .base.cra_priority = 300,
2358 .base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2359 .base.cra_blocksize = DES_BLOCK_SIZE,
2360 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
2361 .base.cra_alignmask = 0,
2362 .base.cra_module = THIS_MODULE,
2363
2364 .init = stm32_cryp_init_tfm,
2365 .min_keysize = DES_BLOCK_SIZE,
2366 .max_keysize = DES_BLOCK_SIZE,
2367 .setkey = stm32_cryp_des_setkey,
2368 .encrypt = stm32_cryp_des_ecb_encrypt,
2369 .decrypt = stm32_cryp_des_ecb_decrypt,
2370 },
2371 .op = {
2372 .do_one_request = stm32_cryp_cipher_one_req,
2373 },
2374},
2375{
2376 .base = {
2377 .base.cra_name = "cbc(des)",
2378 .base.cra_driver_name = "stm32-cbc-des",
2379 .base.cra_priority = 300,
2380 .base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2381 .base.cra_blocksize = DES_BLOCK_SIZE,
2382 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
2383 .base.cra_alignmask = 0,
2384 .base.cra_module = THIS_MODULE,
2385
2386 .init = stm32_cryp_init_tfm,
2387 .min_keysize = DES_BLOCK_SIZE,
2388 .max_keysize = DES_BLOCK_SIZE,
2389 .ivsize = DES_BLOCK_SIZE,
2390 .setkey = stm32_cryp_des_setkey,
2391 .encrypt = stm32_cryp_des_cbc_encrypt,
2392 .decrypt = stm32_cryp_des_cbc_decrypt,
2393 },
2394 .op = {
2395 .do_one_request = stm32_cryp_cipher_one_req,
2396 },
2397},
2398{
2399 .base = {
2400 .base.cra_name = "ecb(des3_ede)",
2401 .base.cra_driver_name = "stm32-ecb-des3",
2402 .base.cra_priority = 300,
2403 .base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2404 .base.cra_blocksize = DES_BLOCK_SIZE,
2405 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
2406 .base.cra_alignmask = 0,
2407 .base.cra_module = THIS_MODULE,
2408
2409 .init = stm32_cryp_init_tfm,
2410 .min_keysize = 3 * DES_BLOCK_SIZE,
2411 .max_keysize = 3 * DES_BLOCK_SIZE,
2412 .setkey = stm32_cryp_tdes_setkey,
2413 .encrypt = stm32_cryp_tdes_ecb_encrypt,
2414 .decrypt = stm32_cryp_tdes_ecb_decrypt,
2415 },
2416 .op = {
2417 .do_one_request = stm32_cryp_cipher_one_req,
2418 },
2419},
2420{
2421 .base = {
2422 .base.cra_name = "cbc(des3_ede)",
2423 .base.cra_driver_name = "stm32-cbc-des3",
2424 .base.cra_priority = 300,
2425 .base.cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2426 .base.cra_blocksize = DES_BLOCK_SIZE,
2427 .base.cra_ctxsize = sizeof(struct stm32_cryp_ctx),
2428 .base.cra_alignmask = 0,
2429 .base.cra_module = THIS_MODULE,
2430
2431 .init = stm32_cryp_init_tfm,
2432 .min_keysize = 3 * DES_BLOCK_SIZE,
2433 .max_keysize = 3 * DES_BLOCK_SIZE,
2434 .ivsize = DES_BLOCK_SIZE,
2435 .setkey = stm32_cryp_tdes_setkey,
2436 .encrypt = stm32_cryp_tdes_cbc_encrypt,
2437 .decrypt = stm32_cryp_tdes_cbc_decrypt,
2438 },
2439 .op = {
2440 .do_one_request = stm32_cryp_cipher_one_req,
2441 },
2442},
2443};
2444
2445static struct aead_engine_alg aead_algs[] = {
2446{
2447 .base.setkey = stm32_cryp_aes_aead_setkey,
2448 .base.setauthsize = stm32_cryp_aes_gcm_setauthsize,
2449 .base.encrypt = stm32_cryp_aes_gcm_encrypt,
2450 .base.decrypt = stm32_cryp_aes_gcm_decrypt,
2451 .base.init = stm32_cryp_aes_aead_init,
2452 .base.ivsize = 12,
2453 .base.maxauthsize = AES_BLOCK_SIZE,
2454
2455 .base.base = {
2456 .cra_name = "gcm(aes)",
2457 .cra_driver_name = "stm32-gcm-aes",
2458 .cra_priority = 300,
2459 .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2460 .cra_blocksize = 1,
2461 .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
2462 .cra_alignmask = 0,
2463 .cra_module = THIS_MODULE,
2464 },
2465 .op = {
2466 .do_one_request = stm32_cryp_aead_one_req,
2467 },
2468},
2469{
2470 .base.setkey = stm32_cryp_aes_aead_setkey,
2471 .base.setauthsize = stm32_cryp_aes_ccm_setauthsize,
2472 .base.encrypt = stm32_cryp_aes_ccm_encrypt,
2473 .base.decrypt = stm32_cryp_aes_ccm_decrypt,
2474 .base.init = stm32_cryp_aes_aead_init,
2475 .base.ivsize = AES_BLOCK_SIZE,
2476 .base.maxauthsize = AES_BLOCK_SIZE,
2477
2478 .base.base = {
2479 .cra_name = "ccm(aes)",
2480 .cra_driver_name = "stm32-ccm-aes",
2481 .cra_priority = 300,
2482 .cra_flags = CRYPTO_ALG_ASYNC | CRYPTO_ALG_KERN_DRIVER_ONLY,
2483 .cra_blocksize = 1,
2484 .cra_ctxsize = sizeof(struct stm32_cryp_ctx),
2485 .cra_alignmask = 0,
2486 .cra_module = THIS_MODULE,
2487 },
2488 .op = {
2489 .do_one_request = stm32_cryp_aead_one_req,
2490 },
2491},
2492};
2493
2494static const struct stm32_cryp_caps ux500_data = {
2495 .aeads_support = false,
2496 .linear_aes_key = true,
2497 .kp_mode = false,
2498 .iv_protection = true,
2499 .swap_final = true,
2500 .padding_wa = true,
2501 .cr = UX500_CRYP_CR,
2502 .sr = UX500_CRYP_SR,
2503 .din = UX500_CRYP_DIN,
2504 .dout = UX500_CRYP_DOUT,
2505 .dmacr = UX500_CRYP_DMACR,
2506 .imsc = UX500_CRYP_IMSC,
2507 .mis = UX500_CRYP_MIS,
2508 .k1l = UX500_CRYP_K1L,
2509 .k1r = UX500_CRYP_K1R,
2510 .k3r = UX500_CRYP_K3R,
2511 .iv0l = UX500_CRYP_IV0L,
2512 .iv0r = UX500_CRYP_IV0R,
2513 .iv1l = UX500_CRYP_IV1L,
2514 .iv1r = UX500_CRYP_IV1R,
2515};
2516
2517static const struct stm32_cryp_caps f7_data = {
2518 .aeads_support = true,
2519 .linear_aes_key = false,
2520 .kp_mode = true,
2521 .iv_protection = false,
2522 .swap_final = true,
2523 .padding_wa = true,
2524 .cr = CRYP_CR,
2525 .sr = CRYP_SR,
2526 .din = CRYP_DIN,
2527 .dout = CRYP_DOUT,
2528 .dmacr = CRYP_DMACR,
2529 .imsc = CRYP_IMSCR,
2530 .mis = CRYP_MISR,
2531 .k1l = CRYP_K1LR,
2532 .k1r = CRYP_K1RR,
2533 .k3r = CRYP_K3RR,
2534 .iv0l = CRYP_IV0LR,
2535 .iv0r = CRYP_IV0RR,
2536 .iv1l = CRYP_IV1LR,
2537 .iv1r = CRYP_IV1RR,
2538};
2539
2540static const struct stm32_cryp_caps mp1_data = {
2541 .aeads_support = true,
2542 .linear_aes_key = false,
2543 .kp_mode = true,
2544 .iv_protection = false,
2545 .swap_final = false,
2546 .padding_wa = false,
2547 .cr = CRYP_CR,
2548 .sr = CRYP_SR,
2549 .din = CRYP_DIN,
2550 .dout = CRYP_DOUT,
2551 .dmacr = CRYP_DMACR,
2552 .imsc = CRYP_IMSCR,
2553 .mis = CRYP_MISR,
2554 .k1l = CRYP_K1LR,
2555 .k1r = CRYP_K1RR,
2556 .k3r = CRYP_K3RR,
2557 .iv0l = CRYP_IV0LR,
2558 .iv0r = CRYP_IV0RR,
2559 .iv1l = CRYP_IV1LR,
2560 .iv1r = CRYP_IV1RR,
2561};
2562
2563static const struct of_device_id stm32_dt_ids[] = {
2564 { .compatible = "stericsson,ux500-cryp", .data = &ux500_data},
2565 { .compatible = "st,stm32f756-cryp", .data = &f7_data},
2566 { .compatible = "st,stm32mp1-cryp", .data = &mp1_data},
2567 {},
2568};
2569MODULE_DEVICE_TABLE(of, stm32_dt_ids);
2570
2571static int stm32_cryp_probe(struct platform_device *pdev)
2572{
2573 struct device *dev = &pdev->dev;
2574 struct stm32_cryp *cryp;
2575 struct reset_control *rst;
2576 int irq, ret;
2577
2578 cryp = devm_kzalloc(dev, sizeof(*cryp), GFP_KERNEL);
2579 if (!cryp)
2580 return -ENOMEM;
2581
2582 cryp->caps = of_device_get_match_data(dev);
2583 if (!cryp->caps)
2584 return -ENODEV;
2585
2586 cryp->dev = dev;
2587
2588 cryp->regs = devm_platform_ioremap_resource(pdev, 0);
2589 if (IS_ERR(cryp->regs))
2590 return PTR_ERR(cryp->regs);
2591
2592 cryp->phys_base = platform_get_resource(pdev, IORESOURCE_MEM, 0)->start;
2593
2594 irq = platform_get_irq(pdev, 0);
2595 if (irq < 0)
2596 return irq;
2597
2598 ret = devm_request_threaded_irq(dev, irq, stm32_cryp_irq,
2599 stm32_cryp_irq_thread, IRQF_ONESHOT,
2600 dev_name(dev), cryp);
2601 if (ret) {
2602 dev_err(dev, "Cannot grab IRQ\n");
2603 return ret;
2604 }
2605
2606 cryp->clk = devm_clk_get(dev, NULL);
2607 if (IS_ERR(cryp->clk)) {
2608 dev_err_probe(dev, PTR_ERR(cryp->clk), "Could not get clock\n");
2609
2610 return PTR_ERR(cryp->clk);
2611 }
2612
2613 ret = clk_prepare_enable(cryp->clk);
2614 if (ret) {
2615 dev_err(cryp->dev, "Failed to enable clock\n");
2616 return ret;
2617 }
2618
2619 pm_runtime_set_autosuspend_delay(dev, CRYP_AUTOSUSPEND_DELAY);
2620 pm_runtime_use_autosuspend(dev);
2621
2622 pm_runtime_get_noresume(dev);
2623 pm_runtime_set_active(dev);
2624 pm_runtime_enable(dev);
2625
2626 rst = devm_reset_control_get(dev, NULL);
2627 if (IS_ERR(rst)) {
2628 ret = PTR_ERR(rst);
2629 if (ret == -EPROBE_DEFER)
2630 goto err_rst;
2631 } else {
2632 reset_control_assert(rst);
2633 udelay(2);
2634 reset_control_deassert(rst);
2635 }
2636
2637 platform_set_drvdata(pdev, cryp);
2638
2639 ret = stm32_cryp_dma_init(cryp);
2640 switch (ret) {
2641 case 0:
2642 break;
2643 case -ENODEV:
2644 dev_dbg(dev, "DMA mode not available\n");
2645 break;
2646 default:
2647 goto err_dma;
2648 }
2649
2650 spin_lock(&cryp_list.lock);
2651 list_add(&cryp->list, &cryp_list.dev_list);
2652 spin_unlock(&cryp_list.lock);
2653
2654 /* Initialize crypto engine */
2655 cryp->engine = crypto_engine_alloc_init(dev, 1);
2656 if (!cryp->engine) {
2657 dev_err(dev, "Could not init crypto engine\n");
2658 ret = -ENOMEM;
2659 goto err_engine1;
2660 }
2661
2662 ret = crypto_engine_start(cryp->engine);
2663 if (ret) {
2664 dev_err(dev, "Could not start crypto engine\n");
2665 goto err_engine2;
2666 }
2667
2668 ret = crypto_engine_register_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
2669 if (ret) {
2670 dev_err(dev, "Could not register algs\n");
2671 goto err_algs;
2672 }
2673
2674 if (cryp->caps->aeads_support) {
2675 ret = crypto_engine_register_aeads(aead_algs, ARRAY_SIZE(aead_algs));
2676 if (ret)
2677 goto err_aead_algs;
2678 }
2679
2680 dev_info(dev, "Initialized\n");
2681
2682 pm_runtime_put_sync(dev);
2683
2684 return 0;
2685
2686err_aead_algs:
2687 crypto_engine_unregister_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
2688err_algs:
2689err_engine2:
2690 crypto_engine_exit(cryp->engine);
2691err_engine1:
2692 spin_lock(&cryp_list.lock);
2693 list_del(&cryp->list);
2694 spin_unlock(&cryp_list.lock);
2695
2696 if (cryp->dma_lch_in)
2697 dma_release_channel(cryp->dma_lch_in);
2698 if (cryp->dma_lch_out)
2699 dma_release_channel(cryp->dma_lch_out);
2700err_dma:
2701err_rst:
2702 pm_runtime_disable(dev);
2703 pm_runtime_put_noidle(dev);
2704
2705 clk_disable_unprepare(cryp->clk);
2706
2707 return ret;
2708}
2709
2710static void stm32_cryp_remove(struct platform_device *pdev)
2711{
2712 struct stm32_cryp *cryp = platform_get_drvdata(pdev);
2713 int ret;
2714
2715 ret = pm_runtime_get_sync(cryp->dev);
2716
2717 if (cryp->caps->aeads_support)
2718 crypto_engine_unregister_aeads(aead_algs, ARRAY_SIZE(aead_algs));
2719 crypto_engine_unregister_skciphers(crypto_algs, ARRAY_SIZE(crypto_algs));
2720
2721 crypto_engine_exit(cryp->engine);
2722
2723 spin_lock(&cryp_list.lock);
2724 list_del(&cryp->list);
2725 spin_unlock(&cryp_list.lock);
2726
2727 if (cryp->dma_lch_in)
2728 dma_release_channel(cryp->dma_lch_in);
2729
2730 if (cryp->dma_lch_out)
2731 dma_release_channel(cryp->dma_lch_out);
2732
2733 pm_runtime_disable(cryp->dev);
2734 pm_runtime_put_noidle(cryp->dev);
2735
2736 if (ret >= 0)
2737 clk_disable_unprepare(cryp->clk);
2738}
2739
2740#ifdef CONFIG_PM
2741static int stm32_cryp_runtime_suspend(struct device *dev)
2742{
2743 struct stm32_cryp *cryp = dev_get_drvdata(dev);
2744
2745 clk_disable_unprepare(cryp->clk);
2746
2747 return 0;
2748}
2749
2750static int stm32_cryp_runtime_resume(struct device *dev)
2751{
2752 struct stm32_cryp *cryp = dev_get_drvdata(dev);
2753 int ret;
2754
2755 ret = clk_prepare_enable(cryp->clk);
2756 if (ret) {
2757 dev_err(cryp->dev, "Failed to prepare_enable clock\n");
2758 return ret;
2759 }
2760
2761 return 0;
2762}
2763#endif
2764
2765static const struct dev_pm_ops stm32_cryp_pm_ops = {
2766 SET_SYSTEM_SLEEP_PM_OPS(pm_runtime_force_suspend,
2767 pm_runtime_force_resume)
2768 SET_RUNTIME_PM_OPS(stm32_cryp_runtime_suspend,
2769 stm32_cryp_runtime_resume, NULL)
2770};
2771
2772static struct platform_driver stm32_cryp_driver = {
2773 .probe = stm32_cryp_probe,
2774 .remove = stm32_cryp_remove,
2775 .driver = {
2776 .name = DRIVER_NAME,
2777 .pm = &stm32_cryp_pm_ops,
2778 .of_match_table = stm32_dt_ids,
2779 },
2780};
2781
2782module_platform_driver(stm32_cryp_driver);
2783
2784MODULE_AUTHOR("Fabien Dessenne <fabien.dessenne@st.com>");
2785MODULE_DESCRIPTION("STMicrolectronics STM32 CRYP hardware driver");
2786MODULE_LICENSE("GPL");