Loading...
1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * Scatterlist Cryptographic API.
4 *
5 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
6 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
8 *
9 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
10 * and Nettle, by Niels Möller.
11 */
12
13#include <linux/err.h>
14#include <linux/errno.h>
15#include <linux/jump_label.h>
16#include <linux/kernel.h>
17#include <linux/kmod.h>
18#include <linux/module.h>
19#include <linux/param.h>
20#include <linux/sched/signal.h>
21#include <linux/slab.h>
22#include <linux/string.h>
23#include <linux/completion.h>
24#include "internal.h"
25
26LIST_HEAD(crypto_alg_list);
27EXPORT_SYMBOL_GPL(crypto_alg_list);
28DECLARE_RWSEM(crypto_alg_sem);
29EXPORT_SYMBOL_GPL(crypto_alg_sem);
30
31BLOCKING_NOTIFIER_HEAD(crypto_chain);
32EXPORT_SYMBOL_GPL(crypto_chain);
33
34#if IS_BUILTIN(CONFIG_CRYPTO_ALGAPI) && \
35 !IS_ENABLED(CONFIG_CRYPTO_MANAGER_DISABLE_TESTS)
36DEFINE_STATIC_KEY_FALSE(__crypto_boot_test_finished);
37#endif
38
39static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
40static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
41 u32 mask);
42
43struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
44{
45 return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
46}
47EXPORT_SYMBOL_GPL(crypto_mod_get);
48
49void crypto_mod_put(struct crypto_alg *alg)
50{
51 struct module *module = alg->cra_module;
52
53 crypto_alg_put(alg);
54 module_put(module);
55}
56EXPORT_SYMBOL_GPL(crypto_mod_put);
57
58static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
59 u32 mask)
60{
61 struct crypto_alg *q, *alg = NULL;
62 int best = -2;
63
64 list_for_each_entry(q, &crypto_alg_list, cra_list) {
65 int exact, fuzzy;
66
67 if (crypto_is_moribund(q))
68 continue;
69
70 if ((q->cra_flags ^ type) & mask)
71 continue;
72
73 exact = !strcmp(q->cra_driver_name, name);
74 fuzzy = !strcmp(q->cra_name, name);
75 if (!exact && !(fuzzy && q->cra_priority > best))
76 continue;
77
78 if (unlikely(!crypto_mod_get(q)))
79 continue;
80
81 best = q->cra_priority;
82 if (alg)
83 crypto_mod_put(alg);
84 alg = q;
85
86 if (exact)
87 break;
88 }
89
90 return alg;
91}
92
93static void crypto_larval_destroy(struct crypto_alg *alg)
94{
95 struct crypto_larval *larval = (void *)alg;
96
97 BUG_ON(!crypto_is_larval(alg));
98 if (!IS_ERR_OR_NULL(larval->adult))
99 crypto_mod_put(larval->adult);
100 kfree(larval);
101}
102
103struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
104{
105 struct crypto_larval *larval;
106
107 larval = kzalloc(sizeof(*larval), GFP_KERNEL);
108 if (!larval)
109 return ERR_PTR(-ENOMEM);
110
111 type &= ~CRYPTO_ALG_TYPE_MASK | (mask ?: CRYPTO_ALG_TYPE_MASK);
112
113 larval->mask = mask;
114 larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
115 larval->alg.cra_priority = -1;
116 larval->alg.cra_destroy = crypto_larval_destroy;
117
118 strscpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
119 init_completion(&larval->completion);
120
121 return larval;
122}
123EXPORT_SYMBOL_GPL(crypto_larval_alloc);
124
125static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
126 u32 mask)
127{
128 struct crypto_alg *alg;
129 struct crypto_larval *larval;
130
131 larval = crypto_larval_alloc(name, type, mask);
132 if (IS_ERR(larval))
133 return ERR_CAST(larval);
134
135 refcount_set(&larval->alg.cra_refcnt, 2);
136
137 down_write(&crypto_alg_sem);
138 alg = __crypto_alg_lookup(name, type, mask);
139 if (!alg) {
140 alg = &larval->alg;
141 list_add(&alg->cra_list, &crypto_alg_list);
142 }
143 up_write(&crypto_alg_sem);
144
145 if (alg != &larval->alg) {
146 kfree(larval);
147 if (crypto_is_larval(alg))
148 alg = crypto_larval_wait(alg);
149 }
150
151 return alg;
152}
153
154static void crypto_larval_kill(struct crypto_larval *larval)
155{
156 bool unlinked;
157
158 down_write(&crypto_alg_sem);
159 unlinked = list_empty(&larval->alg.cra_list);
160 if (!unlinked)
161 list_del_init(&larval->alg.cra_list);
162 up_write(&crypto_alg_sem);
163
164 if (unlinked)
165 return;
166
167 complete_all(&larval->completion);
168 crypto_alg_put(&larval->alg);
169}
170
171void crypto_schedule_test(struct crypto_larval *larval)
172{
173 int err;
174
175 err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
176 WARN_ON_ONCE(err != NOTIFY_STOP);
177}
178EXPORT_SYMBOL_GPL(crypto_schedule_test);
179
180static void crypto_start_test(struct crypto_larval *larval)
181{
182 if (!crypto_is_test_larval(larval))
183 return;
184
185 if (larval->test_started)
186 return;
187
188 down_write(&crypto_alg_sem);
189 if (larval->test_started) {
190 up_write(&crypto_alg_sem);
191 return;
192 }
193
194 larval->test_started = true;
195 up_write(&crypto_alg_sem);
196
197 crypto_schedule_test(larval);
198}
199
200static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
201{
202 struct crypto_larval *larval;
203 long time_left;
204
205again:
206 larval = container_of(alg, struct crypto_larval, alg);
207
208 if (!crypto_boot_test_finished())
209 crypto_start_test(larval);
210
211 time_left = wait_for_completion_killable_timeout(
212 &larval->completion, 60 * HZ);
213
214 alg = larval->adult;
215 if (time_left < 0)
216 alg = ERR_PTR(-EINTR);
217 else if (!time_left) {
218 if (crypto_is_test_larval(larval))
219 crypto_larval_kill(larval);
220 alg = ERR_PTR(-ETIMEDOUT);
221 } else if (!alg) {
222 u32 type;
223 u32 mask;
224
225 alg = &larval->alg;
226 type = alg->cra_flags & ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
227 mask = larval->mask;
228 alg = crypto_alg_lookup(alg->cra_name, type, mask) ?:
229 ERR_PTR(-EAGAIN);
230 } else if (IS_ERR(alg))
231 ;
232 else if (crypto_is_test_larval(larval) &&
233 !(alg->cra_flags & CRYPTO_ALG_TESTED))
234 alg = ERR_PTR(-EAGAIN);
235 else if (alg->cra_flags & CRYPTO_ALG_FIPS_INTERNAL)
236 alg = ERR_PTR(-EAGAIN);
237 else if (!crypto_mod_get(alg))
238 alg = ERR_PTR(-EAGAIN);
239 crypto_mod_put(&larval->alg);
240
241 if (!IS_ERR(alg) && crypto_is_larval(alg))
242 goto again;
243
244 return alg;
245}
246
247static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
248 u32 mask)
249{
250 const u32 fips = CRYPTO_ALG_FIPS_INTERNAL;
251 struct crypto_alg *alg;
252 u32 test = 0;
253
254 if (!((type | mask) & CRYPTO_ALG_TESTED))
255 test |= CRYPTO_ALG_TESTED;
256
257 down_read(&crypto_alg_sem);
258 alg = __crypto_alg_lookup(name, (type | test) & ~fips,
259 (mask | test) & ~fips);
260 if (alg) {
261 if (((type | mask) ^ fips) & fips)
262 mask |= fips;
263 mask &= fips;
264
265 if (!crypto_is_larval(alg) &&
266 ((type ^ alg->cra_flags) & mask)) {
267 /* Algorithm is disallowed in FIPS mode. */
268 crypto_mod_put(alg);
269 alg = ERR_PTR(-ENOENT);
270 }
271 } else if (test) {
272 alg = __crypto_alg_lookup(name, type, mask);
273 if (alg && !crypto_is_larval(alg)) {
274 /* Test failed */
275 crypto_mod_put(alg);
276 alg = ERR_PTR(-ELIBBAD);
277 }
278 }
279 up_read(&crypto_alg_sem);
280
281 return alg;
282}
283
284static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type,
285 u32 mask)
286{
287 struct crypto_alg *alg;
288
289 if (!name)
290 return ERR_PTR(-ENOENT);
291
292 type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
293 mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
294
295 alg = crypto_alg_lookup(name, type, mask);
296 if (!alg && !(mask & CRYPTO_NOLOAD)) {
297 request_module("crypto-%s", name);
298
299 if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
300 CRYPTO_ALG_NEED_FALLBACK))
301 request_module("crypto-%s-all", name);
302
303 alg = crypto_alg_lookup(name, type, mask);
304 }
305
306 if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg))
307 alg = crypto_larval_wait(alg);
308 else if (alg)
309 ;
310 else if (!(mask & CRYPTO_ALG_TESTED))
311 alg = crypto_larval_add(name, type, mask);
312 else
313 alg = ERR_PTR(-ENOENT);
314
315 return alg;
316}
317
318int crypto_probing_notify(unsigned long val, void *v)
319{
320 int ok;
321
322 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
323 if (ok == NOTIFY_DONE) {
324 request_module("cryptomgr");
325 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
326 }
327
328 return ok;
329}
330EXPORT_SYMBOL_GPL(crypto_probing_notify);
331
332struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
333{
334 struct crypto_alg *alg;
335 struct crypto_alg *larval;
336 int ok;
337
338 /*
339 * If the internal flag is set for a cipher, require a caller to
340 * invoke the cipher with the internal flag to use that cipher.
341 * Also, if a caller wants to allocate a cipher that may or may
342 * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
343 * !(mask & CRYPTO_ALG_INTERNAL).
344 */
345 if (!((type | mask) & CRYPTO_ALG_INTERNAL))
346 mask |= CRYPTO_ALG_INTERNAL;
347
348 larval = crypto_larval_lookup(name, type, mask);
349 if (IS_ERR(larval) || !crypto_is_larval(larval))
350 return larval;
351
352 ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
353
354 if (ok == NOTIFY_STOP)
355 alg = crypto_larval_wait(larval);
356 else {
357 crypto_mod_put(larval);
358 alg = ERR_PTR(-ENOENT);
359 }
360 crypto_larval_kill(container_of(larval, struct crypto_larval, alg));
361 return alg;
362}
363EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
364
365static void crypto_exit_ops(struct crypto_tfm *tfm)
366{
367 const struct crypto_type *type = tfm->__crt_alg->cra_type;
368
369 if (type && tfm->exit)
370 tfm->exit(tfm);
371}
372
373static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
374{
375 const struct crypto_type *type_obj = alg->cra_type;
376 unsigned int len;
377
378 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
379 if (type_obj)
380 return len + type_obj->ctxsize(alg, type, mask);
381
382 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
383 default:
384 BUG();
385
386 case CRYPTO_ALG_TYPE_CIPHER:
387 len += crypto_cipher_ctxsize(alg);
388 break;
389
390 case CRYPTO_ALG_TYPE_COMPRESS:
391 len += crypto_compress_ctxsize(alg);
392 break;
393 }
394
395 return len;
396}
397
398void crypto_shoot_alg(struct crypto_alg *alg)
399{
400 down_write(&crypto_alg_sem);
401 alg->cra_flags |= CRYPTO_ALG_DYING;
402 up_write(&crypto_alg_sem);
403}
404EXPORT_SYMBOL_GPL(crypto_shoot_alg);
405
406struct crypto_tfm *__crypto_alloc_tfmgfp(struct crypto_alg *alg, u32 type,
407 u32 mask, gfp_t gfp)
408{
409 struct crypto_tfm *tfm;
410 unsigned int tfm_size;
411 int err = -ENOMEM;
412
413 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
414 tfm = kzalloc(tfm_size, gfp);
415 if (tfm == NULL)
416 goto out_err;
417
418 tfm->__crt_alg = alg;
419 refcount_set(&tfm->refcnt, 1);
420
421 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
422 goto cra_init_failed;
423
424 goto out;
425
426cra_init_failed:
427 crypto_exit_ops(tfm);
428 if (err == -EAGAIN)
429 crypto_shoot_alg(alg);
430 kfree(tfm);
431out_err:
432 tfm = ERR_PTR(err);
433out:
434 return tfm;
435}
436EXPORT_SYMBOL_GPL(__crypto_alloc_tfmgfp);
437
438struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
439 u32 mask)
440{
441 return __crypto_alloc_tfmgfp(alg, type, mask, GFP_KERNEL);
442}
443EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
444
445/*
446 * crypto_alloc_base - Locate algorithm and allocate transform
447 * @alg_name: Name of algorithm
448 * @type: Type of algorithm
449 * @mask: Mask for type comparison
450 *
451 * This function should not be used by new algorithm types.
452 * Please use crypto_alloc_tfm instead.
453 *
454 * crypto_alloc_base() will first attempt to locate an already loaded
455 * algorithm. If that fails and the kernel supports dynamically loadable
456 * modules, it will then attempt to load a module of the same name or
457 * alias. If that fails it will send a query to any loaded crypto manager
458 * to construct an algorithm on the fly. A refcount is grabbed on the
459 * algorithm which is then associated with the new transform.
460 *
461 * The returned transform is of a non-determinate type. Most people
462 * should use one of the more specific allocation functions such as
463 * crypto_alloc_skcipher().
464 *
465 * In case of error the return value is an error pointer.
466 */
467struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
468{
469 struct crypto_tfm *tfm;
470 int err;
471
472 for (;;) {
473 struct crypto_alg *alg;
474
475 alg = crypto_alg_mod_lookup(alg_name, type, mask);
476 if (IS_ERR(alg)) {
477 err = PTR_ERR(alg);
478 goto err;
479 }
480
481 tfm = __crypto_alloc_tfm(alg, type, mask);
482 if (!IS_ERR(tfm))
483 return tfm;
484
485 crypto_mod_put(alg);
486 err = PTR_ERR(tfm);
487
488err:
489 if (err != -EAGAIN)
490 break;
491 if (fatal_signal_pending(current)) {
492 err = -EINTR;
493 break;
494 }
495 }
496
497 return ERR_PTR(err);
498}
499EXPORT_SYMBOL_GPL(crypto_alloc_base);
500
501static void *crypto_alloc_tfmmem(struct crypto_alg *alg,
502 const struct crypto_type *frontend, int node,
503 gfp_t gfp)
504{
505 struct crypto_tfm *tfm;
506 unsigned int tfmsize;
507 unsigned int total;
508 char *mem;
509
510 tfmsize = frontend->tfmsize;
511 total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
512
513 mem = kzalloc_node(total, gfp, node);
514 if (mem == NULL)
515 return ERR_PTR(-ENOMEM);
516
517 tfm = (struct crypto_tfm *)(mem + tfmsize);
518 tfm->__crt_alg = alg;
519 tfm->node = node;
520 refcount_set(&tfm->refcnt, 1);
521
522 return mem;
523}
524
525void *crypto_create_tfm_node(struct crypto_alg *alg,
526 const struct crypto_type *frontend,
527 int node)
528{
529 struct crypto_tfm *tfm;
530 char *mem;
531 int err;
532
533 mem = crypto_alloc_tfmmem(alg, frontend, node, GFP_KERNEL);
534 if (IS_ERR(mem))
535 goto out;
536
537 tfm = (struct crypto_tfm *)(mem + frontend->tfmsize);
538
539 err = frontend->init_tfm(tfm);
540 if (err)
541 goto out_free_tfm;
542
543 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
544 goto cra_init_failed;
545
546 goto out;
547
548cra_init_failed:
549 crypto_exit_ops(tfm);
550out_free_tfm:
551 if (err == -EAGAIN)
552 crypto_shoot_alg(alg);
553 kfree(mem);
554 mem = ERR_PTR(err);
555out:
556 return mem;
557}
558EXPORT_SYMBOL_GPL(crypto_create_tfm_node);
559
560void *crypto_clone_tfm(const struct crypto_type *frontend,
561 struct crypto_tfm *otfm)
562{
563 struct crypto_alg *alg = otfm->__crt_alg;
564 struct crypto_tfm *tfm;
565 char *mem;
566
567 mem = ERR_PTR(-ESTALE);
568 if (unlikely(!crypto_mod_get(alg)))
569 goto out;
570
571 mem = crypto_alloc_tfmmem(alg, frontend, otfm->node, GFP_ATOMIC);
572 if (IS_ERR(mem)) {
573 crypto_mod_put(alg);
574 goto out;
575 }
576
577 tfm = (struct crypto_tfm *)(mem + frontend->tfmsize);
578 tfm->crt_flags = otfm->crt_flags;
579 tfm->exit = otfm->exit;
580
581out:
582 return mem;
583}
584EXPORT_SYMBOL_GPL(crypto_clone_tfm);
585
586struct crypto_alg *crypto_find_alg(const char *alg_name,
587 const struct crypto_type *frontend,
588 u32 type, u32 mask)
589{
590 if (frontend) {
591 type &= frontend->maskclear;
592 mask &= frontend->maskclear;
593 type |= frontend->type;
594 mask |= frontend->maskset;
595 }
596
597 return crypto_alg_mod_lookup(alg_name, type, mask);
598}
599EXPORT_SYMBOL_GPL(crypto_find_alg);
600
601/*
602 * crypto_alloc_tfm_node - Locate algorithm and allocate transform
603 * @alg_name: Name of algorithm
604 * @frontend: Frontend algorithm type
605 * @type: Type of algorithm
606 * @mask: Mask for type comparison
607 * @node: NUMA node in which users desire to put requests, if node is
608 * NUMA_NO_NODE, it means users have no special requirement.
609 *
610 * crypto_alloc_tfm() will first attempt to locate an already loaded
611 * algorithm. If that fails and the kernel supports dynamically loadable
612 * modules, it will then attempt to load a module of the same name or
613 * alias. If that fails it will send a query to any loaded crypto manager
614 * to construct an algorithm on the fly. A refcount is grabbed on the
615 * algorithm which is then associated with the new transform.
616 *
617 * The returned transform is of a non-determinate type. Most people
618 * should use one of the more specific allocation functions such as
619 * crypto_alloc_skcipher().
620 *
621 * In case of error the return value is an error pointer.
622 */
623
624void *crypto_alloc_tfm_node(const char *alg_name,
625 const struct crypto_type *frontend, u32 type, u32 mask,
626 int node)
627{
628 void *tfm;
629 int err;
630
631 for (;;) {
632 struct crypto_alg *alg;
633
634 alg = crypto_find_alg(alg_name, frontend, type, mask);
635 if (IS_ERR(alg)) {
636 err = PTR_ERR(alg);
637 goto err;
638 }
639
640 tfm = crypto_create_tfm_node(alg, frontend, node);
641 if (!IS_ERR(tfm))
642 return tfm;
643
644 crypto_mod_put(alg);
645 err = PTR_ERR(tfm);
646
647err:
648 if (err != -EAGAIN)
649 break;
650 if (fatal_signal_pending(current)) {
651 err = -EINTR;
652 break;
653 }
654 }
655
656 return ERR_PTR(err);
657}
658EXPORT_SYMBOL_GPL(crypto_alloc_tfm_node);
659
660/*
661 * crypto_destroy_tfm - Free crypto transform
662 * @mem: Start of tfm slab
663 * @tfm: Transform to free
664 *
665 * This function frees up the transform and any associated resources,
666 * then drops the refcount on the associated algorithm.
667 */
668void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
669{
670 struct crypto_alg *alg;
671
672 if (IS_ERR_OR_NULL(mem))
673 return;
674
675 if (!refcount_dec_and_test(&tfm->refcnt))
676 return;
677 alg = tfm->__crt_alg;
678
679 if (!tfm->exit && alg->cra_exit)
680 alg->cra_exit(tfm);
681 crypto_exit_ops(tfm);
682 crypto_mod_put(alg);
683 kfree_sensitive(mem);
684}
685EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
686
687int crypto_has_alg(const char *name, u32 type, u32 mask)
688{
689 int ret = 0;
690 struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
691
692 if (!IS_ERR(alg)) {
693 crypto_mod_put(alg);
694 ret = 1;
695 }
696
697 return ret;
698}
699EXPORT_SYMBOL_GPL(crypto_has_alg);
700
701void crypto_req_done(void *data, int err)
702{
703 struct crypto_wait *wait = data;
704
705 if (err == -EINPROGRESS)
706 return;
707
708 wait->err = err;
709 complete(&wait->completion);
710}
711EXPORT_SYMBOL_GPL(crypto_req_done);
712
713MODULE_DESCRIPTION("Cryptographic core API");
714MODULE_LICENSE("GPL");
1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * Scatterlist Cryptographic API.
4 *
5 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
6 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
8 *
9 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
10 * and Nettle, by Niels Möller.
11 */
12
13#include <linux/err.h>
14#include <linux/errno.h>
15#include <linux/jump_label.h>
16#include <linux/kernel.h>
17#include <linux/kmod.h>
18#include <linux/module.h>
19#include <linux/param.h>
20#include <linux/sched/signal.h>
21#include <linux/slab.h>
22#include <linux/string.h>
23#include <linux/completion.h>
24#include "internal.h"
25
26LIST_HEAD(crypto_alg_list);
27EXPORT_SYMBOL_GPL(crypto_alg_list);
28DECLARE_RWSEM(crypto_alg_sem);
29EXPORT_SYMBOL_GPL(crypto_alg_sem);
30
31BLOCKING_NOTIFIER_HEAD(crypto_chain);
32EXPORT_SYMBOL_GPL(crypto_chain);
33
34#ifndef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
35DEFINE_STATIC_KEY_FALSE(__crypto_boot_test_finished);
36EXPORT_SYMBOL_GPL(__crypto_boot_test_finished);
37#endif
38
39static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
40
41struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
42{
43 return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
44}
45EXPORT_SYMBOL_GPL(crypto_mod_get);
46
47void crypto_mod_put(struct crypto_alg *alg)
48{
49 struct module *module = alg->cra_module;
50
51 crypto_alg_put(alg);
52 module_put(module);
53}
54EXPORT_SYMBOL_GPL(crypto_mod_put);
55
56static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
57 u32 mask)
58{
59 struct crypto_alg *q, *alg = NULL;
60 int best = -2;
61
62 list_for_each_entry(q, &crypto_alg_list, cra_list) {
63 int exact, fuzzy;
64
65 if (crypto_is_moribund(q))
66 continue;
67
68 if ((q->cra_flags ^ type) & mask)
69 continue;
70
71 if (crypto_is_larval(q) &&
72 !crypto_is_test_larval((struct crypto_larval *)q) &&
73 ((struct crypto_larval *)q)->mask != mask)
74 continue;
75
76 exact = !strcmp(q->cra_driver_name, name);
77 fuzzy = !strcmp(q->cra_name, name);
78 if (!exact && !(fuzzy && q->cra_priority > best))
79 continue;
80
81 if (unlikely(!crypto_mod_get(q)))
82 continue;
83
84 best = q->cra_priority;
85 if (alg)
86 crypto_mod_put(alg);
87 alg = q;
88
89 if (exact)
90 break;
91 }
92
93 return alg;
94}
95
96static void crypto_larval_destroy(struct crypto_alg *alg)
97{
98 struct crypto_larval *larval = (void *)alg;
99
100 BUG_ON(!crypto_is_larval(alg));
101 if (!IS_ERR_OR_NULL(larval->adult))
102 crypto_mod_put(larval->adult);
103 kfree(larval);
104}
105
106struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
107{
108 struct crypto_larval *larval;
109
110 larval = kzalloc(sizeof(*larval), GFP_KERNEL);
111 if (!larval)
112 return ERR_PTR(-ENOMEM);
113
114 larval->mask = mask;
115 larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
116 larval->alg.cra_priority = -1;
117 larval->alg.cra_destroy = crypto_larval_destroy;
118
119 strscpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
120 init_completion(&larval->completion);
121
122 return larval;
123}
124EXPORT_SYMBOL_GPL(crypto_larval_alloc);
125
126static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
127 u32 mask)
128{
129 struct crypto_alg *alg;
130 struct crypto_larval *larval;
131
132 larval = crypto_larval_alloc(name, type, mask);
133 if (IS_ERR(larval))
134 return ERR_CAST(larval);
135
136 refcount_set(&larval->alg.cra_refcnt, 2);
137
138 down_write(&crypto_alg_sem);
139 alg = __crypto_alg_lookup(name, type, mask);
140 if (!alg) {
141 alg = &larval->alg;
142 list_add(&alg->cra_list, &crypto_alg_list);
143 }
144 up_write(&crypto_alg_sem);
145
146 if (alg != &larval->alg) {
147 kfree(larval);
148 if (crypto_is_larval(alg))
149 alg = crypto_larval_wait(alg);
150 }
151
152 return alg;
153}
154
155void crypto_larval_kill(struct crypto_alg *alg)
156{
157 struct crypto_larval *larval = (void *)alg;
158
159 down_write(&crypto_alg_sem);
160 list_del(&alg->cra_list);
161 up_write(&crypto_alg_sem);
162 complete_all(&larval->completion);
163 crypto_alg_put(alg);
164}
165EXPORT_SYMBOL_GPL(crypto_larval_kill);
166
167void crypto_wait_for_test(struct crypto_larval *larval)
168{
169 int err;
170
171 err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
172 if (WARN_ON_ONCE(err != NOTIFY_STOP))
173 goto out;
174
175 err = wait_for_completion_killable(&larval->completion);
176 WARN_ON(err);
177out:
178 crypto_larval_kill(&larval->alg);
179}
180EXPORT_SYMBOL_GPL(crypto_wait_for_test);
181
182static void crypto_start_test(struct crypto_larval *larval)
183{
184 if (!crypto_is_test_larval(larval))
185 return;
186
187 if (larval->test_started)
188 return;
189
190 down_write(&crypto_alg_sem);
191 if (larval->test_started) {
192 up_write(&crypto_alg_sem);
193 return;
194 }
195
196 larval->test_started = true;
197 up_write(&crypto_alg_sem);
198
199 crypto_wait_for_test(larval);
200}
201
202static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
203{
204 struct crypto_larval *larval = (void *)alg;
205 long timeout;
206
207 if (!crypto_boot_test_finished())
208 crypto_start_test(larval);
209
210 timeout = wait_for_completion_killable_timeout(
211 &larval->completion, 60 * HZ);
212
213 alg = larval->adult;
214 if (timeout < 0)
215 alg = ERR_PTR(-EINTR);
216 else if (!timeout)
217 alg = ERR_PTR(-ETIMEDOUT);
218 else if (!alg)
219 alg = ERR_PTR(-ENOENT);
220 else if (IS_ERR(alg))
221 ;
222 else if (crypto_is_test_larval(larval) &&
223 !(alg->cra_flags & CRYPTO_ALG_TESTED))
224 alg = ERR_PTR(-EAGAIN);
225 else if (alg->cra_flags & CRYPTO_ALG_FIPS_INTERNAL)
226 alg = ERR_PTR(-EAGAIN);
227 else if (!crypto_mod_get(alg))
228 alg = ERR_PTR(-EAGAIN);
229 crypto_mod_put(&larval->alg);
230
231 return alg;
232}
233
234static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
235 u32 mask)
236{
237 const u32 fips = CRYPTO_ALG_FIPS_INTERNAL;
238 struct crypto_alg *alg;
239 u32 test = 0;
240
241 if (!((type | mask) & CRYPTO_ALG_TESTED))
242 test |= CRYPTO_ALG_TESTED;
243
244 down_read(&crypto_alg_sem);
245 alg = __crypto_alg_lookup(name, (type | test) & ~fips,
246 (mask | test) & ~fips);
247 if (alg) {
248 if (((type | mask) ^ fips) & fips)
249 mask |= fips;
250 mask &= fips;
251
252 if (!crypto_is_larval(alg) &&
253 ((type ^ alg->cra_flags) & mask)) {
254 /* Algorithm is disallowed in FIPS mode. */
255 crypto_mod_put(alg);
256 alg = ERR_PTR(-ENOENT);
257 }
258 } else if (test) {
259 alg = __crypto_alg_lookup(name, type, mask);
260 if (alg && !crypto_is_larval(alg)) {
261 /* Test failed */
262 crypto_mod_put(alg);
263 alg = ERR_PTR(-ELIBBAD);
264 }
265 }
266 up_read(&crypto_alg_sem);
267
268 return alg;
269}
270
271static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type,
272 u32 mask)
273{
274 struct crypto_alg *alg;
275
276 if (!name)
277 return ERR_PTR(-ENOENT);
278
279 type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
280 mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
281
282 alg = crypto_alg_lookup(name, type, mask);
283 if (!alg && !(mask & CRYPTO_NOLOAD)) {
284 request_module("crypto-%s", name);
285
286 if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
287 CRYPTO_ALG_NEED_FALLBACK))
288 request_module("crypto-%s-all", name);
289
290 alg = crypto_alg_lookup(name, type, mask);
291 }
292
293 if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg))
294 alg = crypto_larval_wait(alg);
295 else if (!alg)
296 alg = crypto_larval_add(name, type, mask);
297
298 return alg;
299}
300
301int crypto_probing_notify(unsigned long val, void *v)
302{
303 int ok;
304
305 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
306 if (ok == NOTIFY_DONE) {
307 request_module("cryptomgr");
308 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
309 }
310
311 return ok;
312}
313EXPORT_SYMBOL_GPL(crypto_probing_notify);
314
315struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
316{
317 struct crypto_alg *alg;
318 struct crypto_alg *larval;
319 int ok;
320
321 /*
322 * If the internal flag is set for a cipher, require a caller to
323 * invoke the cipher with the internal flag to use that cipher.
324 * Also, if a caller wants to allocate a cipher that may or may
325 * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
326 * !(mask & CRYPTO_ALG_INTERNAL).
327 */
328 if (!((type | mask) & CRYPTO_ALG_INTERNAL))
329 mask |= CRYPTO_ALG_INTERNAL;
330
331 larval = crypto_larval_lookup(name, type, mask);
332 if (IS_ERR(larval) || !crypto_is_larval(larval))
333 return larval;
334
335 ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
336
337 if (ok == NOTIFY_STOP)
338 alg = crypto_larval_wait(larval);
339 else {
340 crypto_mod_put(larval);
341 alg = ERR_PTR(-ENOENT);
342 }
343 crypto_larval_kill(larval);
344 return alg;
345}
346EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
347
348static void crypto_exit_ops(struct crypto_tfm *tfm)
349{
350 const struct crypto_type *type = tfm->__crt_alg->cra_type;
351
352 if (type && tfm->exit)
353 tfm->exit(tfm);
354}
355
356static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
357{
358 const struct crypto_type *type_obj = alg->cra_type;
359 unsigned int len;
360
361 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
362 if (type_obj)
363 return len + type_obj->ctxsize(alg, type, mask);
364
365 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
366 default:
367 BUG();
368
369 case CRYPTO_ALG_TYPE_CIPHER:
370 len += crypto_cipher_ctxsize(alg);
371 break;
372
373 case CRYPTO_ALG_TYPE_COMPRESS:
374 len += crypto_compress_ctxsize(alg);
375 break;
376 }
377
378 return len;
379}
380
381void crypto_shoot_alg(struct crypto_alg *alg)
382{
383 down_write(&crypto_alg_sem);
384 alg->cra_flags |= CRYPTO_ALG_DYING;
385 up_write(&crypto_alg_sem);
386}
387EXPORT_SYMBOL_GPL(crypto_shoot_alg);
388
389struct crypto_tfm *__crypto_alloc_tfmgfp(struct crypto_alg *alg, u32 type,
390 u32 mask, gfp_t gfp)
391{
392 struct crypto_tfm *tfm;
393 unsigned int tfm_size;
394 int err = -ENOMEM;
395
396 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
397 tfm = kzalloc(tfm_size, gfp);
398 if (tfm == NULL)
399 goto out_err;
400
401 tfm->__crt_alg = alg;
402 refcount_set(&tfm->refcnt, 1);
403
404 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
405 goto cra_init_failed;
406
407 goto out;
408
409cra_init_failed:
410 crypto_exit_ops(tfm);
411 if (err == -EAGAIN)
412 crypto_shoot_alg(alg);
413 kfree(tfm);
414out_err:
415 tfm = ERR_PTR(err);
416out:
417 return tfm;
418}
419EXPORT_SYMBOL_GPL(__crypto_alloc_tfmgfp);
420
421struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
422 u32 mask)
423{
424 return __crypto_alloc_tfmgfp(alg, type, mask, GFP_KERNEL);
425}
426EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
427
428/*
429 * crypto_alloc_base - Locate algorithm and allocate transform
430 * @alg_name: Name of algorithm
431 * @type: Type of algorithm
432 * @mask: Mask for type comparison
433 *
434 * This function should not be used by new algorithm types.
435 * Please use crypto_alloc_tfm instead.
436 *
437 * crypto_alloc_base() will first attempt to locate an already loaded
438 * algorithm. If that fails and the kernel supports dynamically loadable
439 * modules, it will then attempt to load a module of the same name or
440 * alias. If that fails it will send a query to any loaded crypto manager
441 * to construct an algorithm on the fly. A refcount is grabbed on the
442 * algorithm which is then associated with the new transform.
443 *
444 * The returned transform is of a non-determinate type. Most people
445 * should use one of the more specific allocation functions such as
446 * crypto_alloc_skcipher().
447 *
448 * In case of error the return value is an error pointer.
449 */
450struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
451{
452 struct crypto_tfm *tfm;
453 int err;
454
455 for (;;) {
456 struct crypto_alg *alg;
457
458 alg = crypto_alg_mod_lookup(alg_name, type, mask);
459 if (IS_ERR(alg)) {
460 err = PTR_ERR(alg);
461 goto err;
462 }
463
464 tfm = __crypto_alloc_tfm(alg, type, mask);
465 if (!IS_ERR(tfm))
466 return tfm;
467
468 crypto_mod_put(alg);
469 err = PTR_ERR(tfm);
470
471err:
472 if (err != -EAGAIN)
473 break;
474 if (fatal_signal_pending(current)) {
475 err = -EINTR;
476 break;
477 }
478 }
479
480 return ERR_PTR(err);
481}
482EXPORT_SYMBOL_GPL(crypto_alloc_base);
483
484static void *crypto_alloc_tfmmem(struct crypto_alg *alg,
485 const struct crypto_type *frontend, int node,
486 gfp_t gfp)
487{
488 struct crypto_tfm *tfm;
489 unsigned int tfmsize;
490 unsigned int total;
491 char *mem;
492
493 tfmsize = frontend->tfmsize;
494 total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
495
496 mem = kzalloc_node(total, gfp, node);
497 if (mem == NULL)
498 return ERR_PTR(-ENOMEM);
499
500 tfm = (struct crypto_tfm *)(mem + tfmsize);
501 tfm->__crt_alg = alg;
502 tfm->node = node;
503 refcount_set(&tfm->refcnt, 1);
504
505 return mem;
506}
507
508void *crypto_create_tfm_node(struct crypto_alg *alg,
509 const struct crypto_type *frontend,
510 int node)
511{
512 struct crypto_tfm *tfm;
513 char *mem;
514 int err;
515
516 mem = crypto_alloc_tfmmem(alg, frontend, node, GFP_KERNEL);
517 if (IS_ERR(mem))
518 goto out;
519
520 tfm = (struct crypto_tfm *)(mem + frontend->tfmsize);
521
522 err = frontend->init_tfm(tfm);
523 if (err)
524 goto out_free_tfm;
525
526 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
527 goto cra_init_failed;
528
529 goto out;
530
531cra_init_failed:
532 crypto_exit_ops(tfm);
533out_free_tfm:
534 if (err == -EAGAIN)
535 crypto_shoot_alg(alg);
536 kfree(mem);
537 mem = ERR_PTR(err);
538out:
539 return mem;
540}
541EXPORT_SYMBOL_GPL(crypto_create_tfm_node);
542
543void *crypto_clone_tfm(const struct crypto_type *frontend,
544 struct crypto_tfm *otfm)
545{
546 struct crypto_alg *alg = otfm->__crt_alg;
547 struct crypto_tfm *tfm;
548 char *mem;
549
550 mem = ERR_PTR(-ESTALE);
551 if (unlikely(!crypto_mod_get(alg)))
552 goto out;
553
554 mem = crypto_alloc_tfmmem(alg, frontend, otfm->node, GFP_ATOMIC);
555 if (IS_ERR(mem)) {
556 crypto_mod_put(alg);
557 goto out;
558 }
559
560 tfm = (struct crypto_tfm *)(mem + frontend->tfmsize);
561 tfm->crt_flags = otfm->crt_flags;
562 tfm->exit = otfm->exit;
563
564out:
565 return mem;
566}
567EXPORT_SYMBOL_GPL(crypto_clone_tfm);
568
569struct crypto_alg *crypto_find_alg(const char *alg_name,
570 const struct crypto_type *frontend,
571 u32 type, u32 mask)
572{
573 if (frontend) {
574 type &= frontend->maskclear;
575 mask &= frontend->maskclear;
576 type |= frontend->type;
577 mask |= frontend->maskset;
578 }
579
580 return crypto_alg_mod_lookup(alg_name, type, mask);
581}
582EXPORT_SYMBOL_GPL(crypto_find_alg);
583
584/*
585 * crypto_alloc_tfm_node - Locate algorithm and allocate transform
586 * @alg_name: Name of algorithm
587 * @frontend: Frontend algorithm type
588 * @type: Type of algorithm
589 * @mask: Mask for type comparison
590 * @node: NUMA node in which users desire to put requests, if node is
591 * NUMA_NO_NODE, it means users have no special requirement.
592 *
593 * crypto_alloc_tfm() will first attempt to locate an already loaded
594 * algorithm. If that fails and the kernel supports dynamically loadable
595 * modules, it will then attempt to load a module of the same name or
596 * alias. If that fails it will send a query to any loaded crypto manager
597 * to construct an algorithm on the fly. A refcount is grabbed on the
598 * algorithm which is then associated with the new transform.
599 *
600 * The returned transform is of a non-determinate type. Most people
601 * should use one of the more specific allocation functions such as
602 * crypto_alloc_skcipher().
603 *
604 * In case of error the return value is an error pointer.
605 */
606
607void *crypto_alloc_tfm_node(const char *alg_name,
608 const struct crypto_type *frontend, u32 type, u32 mask,
609 int node)
610{
611 void *tfm;
612 int err;
613
614 for (;;) {
615 struct crypto_alg *alg;
616
617 alg = crypto_find_alg(alg_name, frontend, type, mask);
618 if (IS_ERR(alg)) {
619 err = PTR_ERR(alg);
620 goto err;
621 }
622
623 tfm = crypto_create_tfm_node(alg, frontend, node);
624 if (!IS_ERR(tfm))
625 return tfm;
626
627 crypto_mod_put(alg);
628 err = PTR_ERR(tfm);
629
630err:
631 if (err != -EAGAIN)
632 break;
633 if (fatal_signal_pending(current)) {
634 err = -EINTR;
635 break;
636 }
637 }
638
639 return ERR_PTR(err);
640}
641EXPORT_SYMBOL_GPL(crypto_alloc_tfm_node);
642
643/*
644 * crypto_destroy_tfm - Free crypto transform
645 * @mem: Start of tfm slab
646 * @tfm: Transform to free
647 *
648 * This function frees up the transform and any associated resources,
649 * then drops the refcount on the associated algorithm.
650 */
651void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
652{
653 struct crypto_alg *alg;
654
655 if (IS_ERR_OR_NULL(mem))
656 return;
657
658 if (!refcount_dec_and_test(&tfm->refcnt))
659 return;
660 alg = tfm->__crt_alg;
661
662 if (!tfm->exit && alg->cra_exit)
663 alg->cra_exit(tfm);
664 crypto_exit_ops(tfm);
665 crypto_mod_put(alg);
666 kfree_sensitive(mem);
667}
668EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
669
670int crypto_has_alg(const char *name, u32 type, u32 mask)
671{
672 int ret = 0;
673 struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
674
675 if (!IS_ERR(alg)) {
676 crypto_mod_put(alg);
677 ret = 1;
678 }
679
680 return ret;
681}
682EXPORT_SYMBOL_GPL(crypto_has_alg);
683
684void crypto_req_done(void *data, int err)
685{
686 struct crypto_wait *wait = data;
687
688 if (err == -EINPROGRESS)
689 return;
690
691 wait->err = err;
692 complete(&wait->completion);
693}
694EXPORT_SYMBOL_GPL(crypto_req_done);
695
696MODULE_DESCRIPTION("Cryptographic core API");
697MODULE_LICENSE("GPL");