Loading...
1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * Scatterlist Cryptographic API.
4 *
5 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
6 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
8 *
9 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
10 * and Nettle, by Niels Möller.
11 */
12
13#include <linux/err.h>
14#include <linux/errno.h>
15#include <linux/jump_label.h>
16#include <linux/kernel.h>
17#include <linux/kmod.h>
18#include <linux/module.h>
19#include <linux/param.h>
20#include <linux/sched/signal.h>
21#include <linux/slab.h>
22#include <linux/string.h>
23#include <linux/completion.h>
24#include "internal.h"
25
26LIST_HEAD(crypto_alg_list);
27EXPORT_SYMBOL_GPL(crypto_alg_list);
28DECLARE_RWSEM(crypto_alg_sem);
29EXPORT_SYMBOL_GPL(crypto_alg_sem);
30
31BLOCKING_NOTIFIER_HEAD(crypto_chain);
32EXPORT_SYMBOL_GPL(crypto_chain);
33
34#ifndef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
35DEFINE_STATIC_KEY_FALSE(__crypto_boot_test_finished);
36EXPORT_SYMBOL_GPL(__crypto_boot_test_finished);
37#endif
38
39static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
40
41struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
42{
43 return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
44}
45EXPORT_SYMBOL_GPL(crypto_mod_get);
46
47void crypto_mod_put(struct crypto_alg *alg)
48{
49 struct module *module = alg->cra_module;
50
51 crypto_alg_put(alg);
52 module_put(module);
53}
54EXPORT_SYMBOL_GPL(crypto_mod_put);
55
56static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
57 u32 mask)
58{
59 struct crypto_alg *q, *alg = NULL;
60 int best = -2;
61
62 list_for_each_entry(q, &crypto_alg_list, cra_list) {
63 int exact, fuzzy;
64
65 if (crypto_is_moribund(q))
66 continue;
67
68 if ((q->cra_flags ^ type) & mask)
69 continue;
70
71 if (crypto_is_larval(q) &&
72 !crypto_is_test_larval((struct crypto_larval *)q) &&
73 ((struct crypto_larval *)q)->mask != mask)
74 continue;
75
76 exact = !strcmp(q->cra_driver_name, name);
77 fuzzy = !strcmp(q->cra_name, name);
78 if (!exact && !(fuzzy && q->cra_priority > best))
79 continue;
80
81 if (unlikely(!crypto_mod_get(q)))
82 continue;
83
84 best = q->cra_priority;
85 if (alg)
86 crypto_mod_put(alg);
87 alg = q;
88
89 if (exact)
90 break;
91 }
92
93 return alg;
94}
95
96static void crypto_larval_destroy(struct crypto_alg *alg)
97{
98 struct crypto_larval *larval = (void *)alg;
99
100 BUG_ON(!crypto_is_larval(alg));
101 if (!IS_ERR_OR_NULL(larval->adult))
102 crypto_mod_put(larval->adult);
103 kfree(larval);
104}
105
106struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
107{
108 struct crypto_larval *larval;
109
110 larval = kzalloc(sizeof(*larval), GFP_KERNEL);
111 if (!larval)
112 return ERR_PTR(-ENOMEM);
113
114 larval->mask = mask;
115 larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
116 larval->alg.cra_priority = -1;
117 larval->alg.cra_destroy = crypto_larval_destroy;
118
119 strscpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
120 init_completion(&larval->completion);
121
122 return larval;
123}
124EXPORT_SYMBOL_GPL(crypto_larval_alloc);
125
126static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
127 u32 mask)
128{
129 struct crypto_alg *alg;
130 struct crypto_larval *larval;
131
132 larval = crypto_larval_alloc(name, type, mask);
133 if (IS_ERR(larval))
134 return ERR_CAST(larval);
135
136 refcount_set(&larval->alg.cra_refcnt, 2);
137
138 down_write(&crypto_alg_sem);
139 alg = __crypto_alg_lookup(name, type, mask);
140 if (!alg) {
141 alg = &larval->alg;
142 list_add(&alg->cra_list, &crypto_alg_list);
143 }
144 up_write(&crypto_alg_sem);
145
146 if (alg != &larval->alg) {
147 kfree(larval);
148 if (crypto_is_larval(alg))
149 alg = crypto_larval_wait(alg);
150 }
151
152 return alg;
153}
154
155void crypto_larval_kill(struct crypto_alg *alg)
156{
157 struct crypto_larval *larval = (void *)alg;
158
159 down_write(&crypto_alg_sem);
160 list_del(&alg->cra_list);
161 up_write(&crypto_alg_sem);
162 complete_all(&larval->completion);
163 crypto_alg_put(alg);
164}
165EXPORT_SYMBOL_GPL(crypto_larval_kill);
166
167void crypto_wait_for_test(struct crypto_larval *larval)
168{
169 int err;
170
171 err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
172 if (WARN_ON_ONCE(err != NOTIFY_STOP))
173 goto out;
174
175 err = wait_for_completion_killable(&larval->completion);
176 WARN_ON(err);
177out:
178 crypto_larval_kill(&larval->alg);
179}
180EXPORT_SYMBOL_GPL(crypto_wait_for_test);
181
182static void crypto_start_test(struct crypto_larval *larval)
183{
184 if (!crypto_is_test_larval(larval))
185 return;
186
187 if (larval->test_started)
188 return;
189
190 down_write(&crypto_alg_sem);
191 if (larval->test_started) {
192 up_write(&crypto_alg_sem);
193 return;
194 }
195
196 larval->test_started = true;
197 up_write(&crypto_alg_sem);
198
199 crypto_wait_for_test(larval);
200}
201
202static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
203{
204 struct crypto_larval *larval = (void *)alg;
205 long timeout;
206
207 if (!crypto_boot_test_finished())
208 crypto_start_test(larval);
209
210 timeout = wait_for_completion_killable_timeout(
211 &larval->completion, 60 * HZ);
212
213 alg = larval->adult;
214 if (timeout < 0)
215 alg = ERR_PTR(-EINTR);
216 else if (!timeout)
217 alg = ERR_PTR(-ETIMEDOUT);
218 else if (!alg)
219 alg = ERR_PTR(-ENOENT);
220 else if (IS_ERR(alg))
221 ;
222 else if (crypto_is_test_larval(larval) &&
223 !(alg->cra_flags & CRYPTO_ALG_TESTED))
224 alg = ERR_PTR(-EAGAIN);
225 else if (alg->cra_flags & CRYPTO_ALG_FIPS_INTERNAL)
226 alg = ERR_PTR(-EAGAIN);
227 else if (!crypto_mod_get(alg))
228 alg = ERR_PTR(-EAGAIN);
229 crypto_mod_put(&larval->alg);
230
231 return alg;
232}
233
234static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
235 u32 mask)
236{
237 const u32 fips = CRYPTO_ALG_FIPS_INTERNAL;
238 struct crypto_alg *alg;
239 u32 test = 0;
240
241 if (!((type | mask) & CRYPTO_ALG_TESTED))
242 test |= CRYPTO_ALG_TESTED;
243
244 down_read(&crypto_alg_sem);
245 alg = __crypto_alg_lookup(name, (type | test) & ~fips,
246 (mask | test) & ~fips);
247 if (alg) {
248 if (((type | mask) ^ fips) & fips)
249 mask |= fips;
250 mask &= fips;
251
252 if (!crypto_is_larval(alg) &&
253 ((type ^ alg->cra_flags) & mask)) {
254 /* Algorithm is disallowed in FIPS mode. */
255 crypto_mod_put(alg);
256 alg = ERR_PTR(-ENOENT);
257 }
258 } else if (test) {
259 alg = __crypto_alg_lookup(name, type, mask);
260 if (alg && !crypto_is_larval(alg)) {
261 /* Test failed */
262 crypto_mod_put(alg);
263 alg = ERR_PTR(-ELIBBAD);
264 }
265 }
266 up_read(&crypto_alg_sem);
267
268 return alg;
269}
270
271static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type,
272 u32 mask)
273{
274 struct crypto_alg *alg;
275
276 if (!name)
277 return ERR_PTR(-ENOENT);
278
279 type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
280 mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
281
282 alg = crypto_alg_lookup(name, type, mask);
283 if (!alg && !(mask & CRYPTO_NOLOAD)) {
284 request_module("crypto-%s", name);
285
286 if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
287 CRYPTO_ALG_NEED_FALLBACK))
288 request_module("crypto-%s-all", name);
289
290 alg = crypto_alg_lookup(name, type, mask);
291 }
292
293 if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg))
294 alg = crypto_larval_wait(alg);
295 else if (!alg)
296 alg = crypto_larval_add(name, type, mask);
297
298 return alg;
299}
300
301int crypto_probing_notify(unsigned long val, void *v)
302{
303 int ok;
304
305 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
306 if (ok == NOTIFY_DONE) {
307 request_module("cryptomgr");
308 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
309 }
310
311 return ok;
312}
313EXPORT_SYMBOL_GPL(crypto_probing_notify);
314
315struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
316{
317 struct crypto_alg *alg;
318 struct crypto_alg *larval;
319 int ok;
320
321 /*
322 * If the internal flag is set for a cipher, require a caller to
323 * invoke the cipher with the internal flag to use that cipher.
324 * Also, if a caller wants to allocate a cipher that may or may
325 * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
326 * !(mask & CRYPTO_ALG_INTERNAL).
327 */
328 if (!((type | mask) & CRYPTO_ALG_INTERNAL))
329 mask |= CRYPTO_ALG_INTERNAL;
330
331 larval = crypto_larval_lookup(name, type, mask);
332 if (IS_ERR(larval) || !crypto_is_larval(larval))
333 return larval;
334
335 ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
336
337 if (ok == NOTIFY_STOP)
338 alg = crypto_larval_wait(larval);
339 else {
340 crypto_mod_put(larval);
341 alg = ERR_PTR(-ENOENT);
342 }
343 crypto_larval_kill(larval);
344 return alg;
345}
346EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
347
348static void crypto_exit_ops(struct crypto_tfm *tfm)
349{
350 const struct crypto_type *type = tfm->__crt_alg->cra_type;
351
352 if (type && tfm->exit)
353 tfm->exit(tfm);
354}
355
356static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
357{
358 const struct crypto_type *type_obj = alg->cra_type;
359 unsigned int len;
360
361 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
362 if (type_obj)
363 return len + type_obj->ctxsize(alg, type, mask);
364
365 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
366 default:
367 BUG();
368
369 case CRYPTO_ALG_TYPE_CIPHER:
370 len += crypto_cipher_ctxsize(alg);
371 break;
372
373 case CRYPTO_ALG_TYPE_COMPRESS:
374 len += crypto_compress_ctxsize(alg);
375 break;
376 }
377
378 return len;
379}
380
381void crypto_shoot_alg(struct crypto_alg *alg)
382{
383 down_write(&crypto_alg_sem);
384 alg->cra_flags |= CRYPTO_ALG_DYING;
385 up_write(&crypto_alg_sem);
386}
387EXPORT_SYMBOL_GPL(crypto_shoot_alg);
388
389struct crypto_tfm *__crypto_alloc_tfmgfp(struct crypto_alg *alg, u32 type,
390 u32 mask, gfp_t gfp)
391{
392 struct crypto_tfm *tfm;
393 unsigned int tfm_size;
394 int err = -ENOMEM;
395
396 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
397 tfm = kzalloc(tfm_size, gfp);
398 if (tfm == NULL)
399 goto out_err;
400
401 tfm->__crt_alg = alg;
402 refcount_set(&tfm->refcnt, 1);
403
404 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
405 goto cra_init_failed;
406
407 goto out;
408
409cra_init_failed:
410 crypto_exit_ops(tfm);
411 if (err == -EAGAIN)
412 crypto_shoot_alg(alg);
413 kfree(tfm);
414out_err:
415 tfm = ERR_PTR(err);
416out:
417 return tfm;
418}
419EXPORT_SYMBOL_GPL(__crypto_alloc_tfmgfp);
420
421struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
422 u32 mask)
423{
424 return __crypto_alloc_tfmgfp(alg, type, mask, GFP_KERNEL);
425}
426EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
427
428/*
429 * crypto_alloc_base - Locate algorithm and allocate transform
430 * @alg_name: Name of algorithm
431 * @type: Type of algorithm
432 * @mask: Mask for type comparison
433 *
434 * This function should not be used by new algorithm types.
435 * Please use crypto_alloc_tfm instead.
436 *
437 * crypto_alloc_base() will first attempt to locate an already loaded
438 * algorithm. If that fails and the kernel supports dynamically loadable
439 * modules, it will then attempt to load a module of the same name or
440 * alias. If that fails it will send a query to any loaded crypto manager
441 * to construct an algorithm on the fly. A refcount is grabbed on the
442 * algorithm which is then associated with the new transform.
443 *
444 * The returned transform is of a non-determinate type. Most people
445 * should use one of the more specific allocation functions such as
446 * crypto_alloc_skcipher().
447 *
448 * In case of error the return value is an error pointer.
449 */
450struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
451{
452 struct crypto_tfm *tfm;
453 int err;
454
455 for (;;) {
456 struct crypto_alg *alg;
457
458 alg = crypto_alg_mod_lookup(alg_name, type, mask);
459 if (IS_ERR(alg)) {
460 err = PTR_ERR(alg);
461 goto err;
462 }
463
464 tfm = __crypto_alloc_tfm(alg, type, mask);
465 if (!IS_ERR(tfm))
466 return tfm;
467
468 crypto_mod_put(alg);
469 err = PTR_ERR(tfm);
470
471err:
472 if (err != -EAGAIN)
473 break;
474 if (fatal_signal_pending(current)) {
475 err = -EINTR;
476 break;
477 }
478 }
479
480 return ERR_PTR(err);
481}
482EXPORT_SYMBOL_GPL(crypto_alloc_base);
483
484static void *crypto_alloc_tfmmem(struct crypto_alg *alg,
485 const struct crypto_type *frontend, int node,
486 gfp_t gfp)
487{
488 struct crypto_tfm *tfm;
489 unsigned int tfmsize;
490 unsigned int total;
491 char *mem;
492
493 tfmsize = frontend->tfmsize;
494 total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
495
496 mem = kzalloc_node(total, gfp, node);
497 if (mem == NULL)
498 return ERR_PTR(-ENOMEM);
499
500 tfm = (struct crypto_tfm *)(mem + tfmsize);
501 tfm->__crt_alg = alg;
502 tfm->node = node;
503 refcount_set(&tfm->refcnt, 1);
504
505 return mem;
506}
507
508void *crypto_create_tfm_node(struct crypto_alg *alg,
509 const struct crypto_type *frontend,
510 int node)
511{
512 struct crypto_tfm *tfm;
513 char *mem;
514 int err;
515
516 mem = crypto_alloc_tfmmem(alg, frontend, node, GFP_KERNEL);
517 if (IS_ERR(mem))
518 goto out;
519
520 tfm = (struct crypto_tfm *)(mem + frontend->tfmsize);
521
522 err = frontend->init_tfm(tfm);
523 if (err)
524 goto out_free_tfm;
525
526 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
527 goto cra_init_failed;
528
529 goto out;
530
531cra_init_failed:
532 crypto_exit_ops(tfm);
533out_free_tfm:
534 if (err == -EAGAIN)
535 crypto_shoot_alg(alg);
536 kfree(mem);
537 mem = ERR_PTR(err);
538out:
539 return mem;
540}
541EXPORT_SYMBOL_GPL(crypto_create_tfm_node);
542
543void *crypto_clone_tfm(const struct crypto_type *frontend,
544 struct crypto_tfm *otfm)
545{
546 struct crypto_alg *alg = otfm->__crt_alg;
547 struct crypto_tfm *tfm;
548 char *mem;
549
550 mem = ERR_PTR(-ESTALE);
551 if (unlikely(!crypto_mod_get(alg)))
552 goto out;
553
554 mem = crypto_alloc_tfmmem(alg, frontend, otfm->node, GFP_ATOMIC);
555 if (IS_ERR(mem)) {
556 crypto_mod_put(alg);
557 goto out;
558 }
559
560 tfm = (struct crypto_tfm *)(mem + frontend->tfmsize);
561 tfm->crt_flags = otfm->crt_flags;
562 tfm->exit = otfm->exit;
563
564out:
565 return mem;
566}
567EXPORT_SYMBOL_GPL(crypto_clone_tfm);
568
569struct crypto_alg *crypto_find_alg(const char *alg_name,
570 const struct crypto_type *frontend,
571 u32 type, u32 mask)
572{
573 if (frontend) {
574 type &= frontend->maskclear;
575 mask &= frontend->maskclear;
576 type |= frontend->type;
577 mask |= frontend->maskset;
578 }
579
580 return crypto_alg_mod_lookup(alg_name, type, mask);
581}
582EXPORT_SYMBOL_GPL(crypto_find_alg);
583
584/*
585 * crypto_alloc_tfm_node - Locate algorithm and allocate transform
586 * @alg_name: Name of algorithm
587 * @frontend: Frontend algorithm type
588 * @type: Type of algorithm
589 * @mask: Mask for type comparison
590 * @node: NUMA node in which users desire to put requests, if node is
591 * NUMA_NO_NODE, it means users have no special requirement.
592 *
593 * crypto_alloc_tfm() will first attempt to locate an already loaded
594 * algorithm. If that fails and the kernel supports dynamically loadable
595 * modules, it will then attempt to load a module of the same name or
596 * alias. If that fails it will send a query to any loaded crypto manager
597 * to construct an algorithm on the fly. A refcount is grabbed on the
598 * algorithm which is then associated with the new transform.
599 *
600 * The returned transform is of a non-determinate type. Most people
601 * should use one of the more specific allocation functions such as
602 * crypto_alloc_skcipher().
603 *
604 * In case of error the return value is an error pointer.
605 */
606
607void *crypto_alloc_tfm_node(const char *alg_name,
608 const struct crypto_type *frontend, u32 type, u32 mask,
609 int node)
610{
611 void *tfm;
612 int err;
613
614 for (;;) {
615 struct crypto_alg *alg;
616
617 alg = crypto_find_alg(alg_name, frontend, type, mask);
618 if (IS_ERR(alg)) {
619 err = PTR_ERR(alg);
620 goto err;
621 }
622
623 tfm = crypto_create_tfm_node(alg, frontend, node);
624 if (!IS_ERR(tfm))
625 return tfm;
626
627 crypto_mod_put(alg);
628 err = PTR_ERR(tfm);
629
630err:
631 if (err != -EAGAIN)
632 break;
633 if (fatal_signal_pending(current)) {
634 err = -EINTR;
635 break;
636 }
637 }
638
639 return ERR_PTR(err);
640}
641EXPORT_SYMBOL_GPL(crypto_alloc_tfm_node);
642
643/*
644 * crypto_destroy_tfm - Free crypto transform
645 * @mem: Start of tfm slab
646 * @tfm: Transform to free
647 *
648 * This function frees up the transform and any associated resources,
649 * then drops the refcount on the associated algorithm.
650 */
651void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
652{
653 struct crypto_alg *alg;
654
655 if (IS_ERR_OR_NULL(mem))
656 return;
657
658 if (!refcount_dec_and_test(&tfm->refcnt))
659 return;
660 alg = tfm->__crt_alg;
661
662 if (!tfm->exit && alg->cra_exit)
663 alg->cra_exit(tfm);
664 crypto_exit_ops(tfm);
665 crypto_mod_put(alg);
666 kfree_sensitive(mem);
667}
668EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
669
670int crypto_has_alg(const char *name, u32 type, u32 mask)
671{
672 int ret = 0;
673 struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
674
675 if (!IS_ERR(alg)) {
676 crypto_mod_put(alg);
677 ret = 1;
678 }
679
680 return ret;
681}
682EXPORT_SYMBOL_GPL(crypto_has_alg);
683
684void crypto_req_done(void *data, int err)
685{
686 struct crypto_wait *wait = data;
687
688 if (err == -EINPROGRESS)
689 return;
690
691 wait->err = err;
692 complete(&wait->completion);
693}
694EXPORT_SYMBOL_GPL(crypto_req_done);
695
696MODULE_DESCRIPTION("Cryptographic core API");
697MODULE_LICENSE("GPL");
1/*
2 * Scatterlist Cryptographic API.
3 *
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
6 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
7 *
8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
9 * and Nettle, by Niels Möller.
10 *
11 * This program is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the Free
13 * Software Foundation; either version 2 of the License, or (at your option)
14 * any later version.
15 *
16 */
17
18#include <linux/err.h>
19#include <linux/errno.h>
20#include <linux/kernel.h>
21#include <linux/kmod.h>
22#include <linux/module.h>
23#include <linux/param.h>
24#include <linux/sched/signal.h>
25#include <linux/slab.h>
26#include <linux/string.h>
27#include <linux/completion.h>
28#include "internal.h"
29
30LIST_HEAD(crypto_alg_list);
31EXPORT_SYMBOL_GPL(crypto_alg_list);
32DECLARE_RWSEM(crypto_alg_sem);
33EXPORT_SYMBOL_GPL(crypto_alg_sem);
34
35BLOCKING_NOTIFIER_HEAD(crypto_chain);
36EXPORT_SYMBOL_GPL(crypto_chain);
37
38static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
39
40struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
41{
42 return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
43}
44EXPORT_SYMBOL_GPL(crypto_mod_get);
45
46void crypto_mod_put(struct crypto_alg *alg)
47{
48 struct module *module = alg->cra_module;
49
50 crypto_alg_put(alg);
51 module_put(module);
52}
53EXPORT_SYMBOL_GPL(crypto_mod_put);
54
55static inline int crypto_is_test_larval(struct crypto_larval *larval)
56{
57 return larval->alg.cra_driver_name[0];
58}
59
60static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
61 u32 mask)
62{
63 struct crypto_alg *q, *alg = NULL;
64 int best = -2;
65
66 list_for_each_entry(q, &crypto_alg_list, cra_list) {
67 int exact, fuzzy;
68
69 if (crypto_is_moribund(q))
70 continue;
71
72 if ((q->cra_flags ^ type) & mask)
73 continue;
74
75 if (crypto_is_larval(q) &&
76 !crypto_is_test_larval((struct crypto_larval *)q) &&
77 ((struct crypto_larval *)q)->mask != mask)
78 continue;
79
80 exact = !strcmp(q->cra_driver_name, name);
81 fuzzy = !strcmp(q->cra_name, name);
82 if (!exact && !(fuzzy && q->cra_priority > best))
83 continue;
84
85 if (unlikely(!crypto_mod_get(q)))
86 continue;
87
88 best = q->cra_priority;
89 if (alg)
90 crypto_mod_put(alg);
91 alg = q;
92
93 if (exact)
94 break;
95 }
96
97 return alg;
98}
99
100static void crypto_larval_destroy(struct crypto_alg *alg)
101{
102 struct crypto_larval *larval = (void *)alg;
103
104 BUG_ON(!crypto_is_larval(alg));
105 if (larval->adult)
106 crypto_mod_put(larval->adult);
107 kfree(larval);
108}
109
110struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
111{
112 struct crypto_larval *larval;
113
114 larval = kzalloc(sizeof(*larval), GFP_KERNEL);
115 if (!larval)
116 return ERR_PTR(-ENOMEM);
117
118 larval->mask = mask;
119 larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
120 larval->alg.cra_priority = -1;
121 larval->alg.cra_destroy = crypto_larval_destroy;
122
123 strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
124 init_completion(&larval->completion);
125
126 return larval;
127}
128EXPORT_SYMBOL_GPL(crypto_larval_alloc);
129
130static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
131 u32 mask)
132{
133 struct crypto_alg *alg;
134 struct crypto_larval *larval;
135
136 larval = crypto_larval_alloc(name, type, mask);
137 if (IS_ERR(larval))
138 return ERR_CAST(larval);
139
140 refcount_set(&larval->alg.cra_refcnt, 2);
141
142 down_write(&crypto_alg_sem);
143 alg = __crypto_alg_lookup(name, type, mask);
144 if (!alg) {
145 alg = &larval->alg;
146 list_add(&alg->cra_list, &crypto_alg_list);
147 }
148 up_write(&crypto_alg_sem);
149
150 if (alg != &larval->alg) {
151 kfree(larval);
152 if (crypto_is_larval(alg))
153 alg = crypto_larval_wait(alg);
154 }
155
156 return alg;
157}
158
159void crypto_larval_kill(struct crypto_alg *alg)
160{
161 struct crypto_larval *larval = (void *)alg;
162
163 down_write(&crypto_alg_sem);
164 list_del(&alg->cra_list);
165 up_write(&crypto_alg_sem);
166 complete_all(&larval->completion);
167 crypto_alg_put(alg);
168}
169EXPORT_SYMBOL_GPL(crypto_larval_kill);
170
171static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
172{
173 struct crypto_larval *larval = (void *)alg;
174 long timeout;
175
176 timeout = wait_for_completion_killable_timeout(
177 &larval->completion, 60 * HZ);
178
179 alg = larval->adult;
180 if (timeout < 0)
181 alg = ERR_PTR(-EINTR);
182 else if (!timeout)
183 alg = ERR_PTR(-ETIMEDOUT);
184 else if (!alg)
185 alg = ERR_PTR(-ENOENT);
186 else if (crypto_is_test_larval(larval) &&
187 !(alg->cra_flags & CRYPTO_ALG_TESTED))
188 alg = ERR_PTR(-EAGAIN);
189 else if (!crypto_mod_get(alg))
190 alg = ERR_PTR(-EAGAIN);
191 crypto_mod_put(&larval->alg);
192
193 return alg;
194}
195
196static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
197 u32 mask)
198{
199 struct crypto_alg *alg;
200 u32 test = 0;
201
202 if (!((type | mask) & CRYPTO_ALG_TESTED))
203 test |= CRYPTO_ALG_TESTED;
204
205 down_read(&crypto_alg_sem);
206 alg = __crypto_alg_lookup(name, type | test, mask | test);
207 if (!alg && test) {
208 alg = __crypto_alg_lookup(name, type, mask);
209 if (alg && !crypto_is_larval(alg)) {
210 /* Test failed */
211 crypto_mod_put(alg);
212 alg = ERR_PTR(-ELIBBAD);
213 }
214 }
215 up_read(&crypto_alg_sem);
216
217 return alg;
218}
219
220static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type,
221 u32 mask)
222{
223 struct crypto_alg *alg;
224
225 if (!name)
226 return ERR_PTR(-ENOENT);
227
228 type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
229 mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
230
231 alg = crypto_alg_lookup(name, type, mask);
232 if (!alg) {
233 request_module("crypto-%s", name);
234
235 if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
236 CRYPTO_ALG_NEED_FALLBACK))
237 request_module("crypto-%s-all", name);
238
239 alg = crypto_alg_lookup(name, type, mask);
240 }
241
242 if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg))
243 alg = crypto_larval_wait(alg);
244 else if (!alg)
245 alg = crypto_larval_add(name, type, mask);
246
247 return alg;
248}
249
250int crypto_probing_notify(unsigned long val, void *v)
251{
252 int ok;
253
254 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
255 if (ok == NOTIFY_DONE) {
256 request_module("cryptomgr");
257 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
258 }
259
260 return ok;
261}
262EXPORT_SYMBOL_GPL(crypto_probing_notify);
263
264struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
265{
266 struct crypto_alg *alg;
267 struct crypto_alg *larval;
268 int ok;
269
270 /*
271 * If the internal flag is set for a cipher, require a caller to
272 * to invoke the cipher with the internal flag to use that cipher.
273 * Also, if a caller wants to allocate a cipher that may or may
274 * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
275 * !(mask & CRYPTO_ALG_INTERNAL).
276 */
277 if (!((type | mask) & CRYPTO_ALG_INTERNAL))
278 mask |= CRYPTO_ALG_INTERNAL;
279
280 larval = crypto_larval_lookup(name, type, mask);
281 if (IS_ERR(larval) || !crypto_is_larval(larval))
282 return larval;
283
284 ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
285
286 if (ok == NOTIFY_STOP)
287 alg = crypto_larval_wait(larval);
288 else {
289 crypto_mod_put(larval);
290 alg = ERR_PTR(-ENOENT);
291 }
292 crypto_larval_kill(larval);
293 return alg;
294}
295EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
296
297static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
298{
299 const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
300
301 if (type_obj)
302 return type_obj->init(tfm, type, mask);
303
304 switch (crypto_tfm_alg_type(tfm)) {
305 case CRYPTO_ALG_TYPE_CIPHER:
306 return crypto_init_cipher_ops(tfm);
307
308 case CRYPTO_ALG_TYPE_COMPRESS:
309 return crypto_init_compress_ops(tfm);
310
311 default:
312 break;
313 }
314
315 BUG();
316 return -EINVAL;
317}
318
319static void crypto_exit_ops(struct crypto_tfm *tfm)
320{
321 const struct crypto_type *type = tfm->__crt_alg->cra_type;
322
323 if (type && tfm->exit)
324 tfm->exit(tfm);
325}
326
327static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
328{
329 const struct crypto_type *type_obj = alg->cra_type;
330 unsigned int len;
331
332 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
333 if (type_obj)
334 return len + type_obj->ctxsize(alg, type, mask);
335
336 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
337 default:
338 BUG();
339
340 case CRYPTO_ALG_TYPE_CIPHER:
341 len += crypto_cipher_ctxsize(alg);
342 break;
343
344 case CRYPTO_ALG_TYPE_COMPRESS:
345 len += crypto_compress_ctxsize(alg);
346 break;
347 }
348
349 return len;
350}
351
352void crypto_shoot_alg(struct crypto_alg *alg)
353{
354 down_write(&crypto_alg_sem);
355 alg->cra_flags |= CRYPTO_ALG_DYING;
356 up_write(&crypto_alg_sem);
357}
358EXPORT_SYMBOL_GPL(crypto_shoot_alg);
359
360struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
361 u32 mask)
362{
363 struct crypto_tfm *tfm = NULL;
364 unsigned int tfm_size;
365 int err = -ENOMEM;
366
367 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
368 tfm = kzalloc(tfm_size, GFP_KERNEL);
369 if (tfm == NULL)
370 goto out_err;
371
372 tfm->__crt_alg = alg;
373
374 err = crypto_init_ops(tfm, type, mask);
375 if (err)
376 goto out_free_tfm;
377
378 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
379 goto cra_init_failed;
380
381 goto out;
382
383cra_init_failed:
384 crypto_exit_ops(tfm);
385out_free_tfm:
386 if (err == -EAGAIN)
387 crypto_shoot_alg(alg);
388 kfree(tfm);
389out_err:
390 tfm = ERR_PTR(err);
391out:
392 return tfm;
393}
394EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
395
396/*
397 * crypto_alloc_base - Locate algorithm and allocate transform
398 * @alg_name: Name of algorithm
399 * @type: Type of algorithm
400 * @mask: Mask for type comparison
401 *
402 * This function should not be used by new algorithm types.
403 * Please use crypto_alloc_tfm instead.
404 *
405 * crypto_alloc_base() will first attempt to locate an already loaded
406 * algorithm. If that fails and the kernel supports dynamically loadable
407 * modules, it will then attempt to load a module of the same name or
408 * alias. If that fails it will send a query to any loaded crypto manager
409 * to construct an algorithm on the fly. A refcount is grabbed on the
410 * algorithm which is then associated with the new transform.
411 *
412 * The returned transform is of a non-determinate type. Most people
413 * should use one of the more specific allocation functions such as
414 * crypto_alloc_blkcipher.
415 *
416 * In case of error the return value is an error pointer.
417 */
418struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
419{
420 struct crypto_tfm *tfm;
421 int err;
422
423 for (;;) {
424 struct crypto_alg *alg;
425
426 alg = crypto_alg_mod_lookup(alg_name, type, mask);
427 if (IS_ERR(alg)) {
428 err = PTR_ERR(alg);
429 goto err;
430 }
431
432 tfm = __crypto_alloc_tfm(alg, type, mask);
433 if (!IS_ERR(tfm))
434 return tfm;
435
436 crypto_mod_put(alg);
437 err = PTR_ERR(tfm);
438
439err:
440 if (err != -EAGAIN)
441 break;
442 if (fatal_signal_pending(current)) {
443 err = -EINTR;
444 break;
445 }
446 }
447
448 return ERR_PTR(err);
449}
450EXPORT_SYMBOL_GPL(crypto_alloc_base);
451
452void *crypto_create_tfm(struct crypto_alg *alg,
453 const struct crypto_type *frontend)
454{
455 char *mem;
456 struct crypto_tfm *tfm = NULL;
457 unsigned int tfmsize;
458 unsigned int total;
459 int err = -ENOMEM;
460
461 tfmsize = frontend->tfmsize;
462 total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
463
464 mem = kzalloc(total, GFP_KERNEL);
465 if (mem == NULL)
466 goto out_err;
467
468 tfm = (struct crypto_tfm *)(mem + tfmsize);
469 tfm->__crt_alg = alg;
470
471 err = frontend->init_tfm(tfm);
472 if (err)
473 goto out_free_tfm;
474
475 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
476 goto cra_init_failed;
477
478 goto out;
479
480cra_init_failed:
481 crypto_exit_ops(tfm);
482out_free_tfm:
483 if (err == -EAGAIN)
484 crypto_shoot_alg(alg);
485 kfree(mem);
486out_err:
487 mem = ERR_PTR(err);
488out:
489 return mem;
490}
491EXPORT_SYMBOL_GPL(crypto_create_tfm);
492
493struct crypto_alg *crypto_find_alg(const char *alg_name,
494 const struct crypto_type *frontend,
495 u32 type, u32 mask)
496{
497 if (frontend) {
498 type &= frontend->maskclear;
499 mask &= frontend->maskclear;
500 type |= frontend->type;
501 mask |= frontend->maskset;
502 }
503
504 return crypto_alg_mod_lookup(alg_name, type, mask);
505}
506EXPORT_SYMBOL_GPL(crypto_find_alg);
507
508/*
509 * crypto_alloc_tfm - Locate algorithm and allocate transform
510 * @alg_name: Name of algorithm
511 * @frontend: Frontend algorithm type
512 * @type: Type of algorithm
513 * @mask: Mask for type comparison
514 *
515 * crypto_alloc_tfm() will first attempt to locate an already loaded
516 * algorithm. If that fails and the kernel supports dynamically loadable
517 * modules, it will then attempt to load a module of the same name or
518 * alias. If that fails it will send a query to any loaded crypto manager
519 * to construct an algorithm on the fly. A refcount is grabbed on the
520 * algorithm which is then associated with the new transform.
521 *
522 * The returned transform is of a non-determinate type. Most people
523 * should use one of the more specific allocation functions such as
524 * crypto_alloc_blkcipher.
525 *
526 * In case of error the return value is an error pointer.
527 */
528void *crypto_alloc_tfm(const char *alg_name,
529 const struct crypto_type *frontend, u32 type, u32 mask)
530{
531 void *tfm;
532 int err;
533
534 for (;;) {
535 struct crypto_alg *alg;
536
537 alg = crypto_find_alg(alg_name, frontend, type, mask);
538 if (IS_ERR(alg)) {
539 err = PTR_ERR(alg);
540 goto err;
541 }
542
543 tfm = crypto_create_tfm(alg, frontend);
544 if (!IS_ERR(tfm))
545 return tfm;
546
547 crypto_mod_put(alg);
548 err = PTR_ERR(tfm);
549
550err:
551 if (err != -EAGAIN)
552 break;
553 if (fatal_signal_pending(current)) {
554 err = -EINTR;
555 break;
556 }
557 }
558
559 return ERR_PTR(err);
560}
561EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
562
563/*
564 * crypto_destroy_tfm - Free crypto transform
565 * @mem: Start of tfm slab
566 * @tfm: Transform to free
567 *
568 * This function frees up the transform and any associated resources,
569 * then drops the refcount on the associated algorithm.
570 */
571void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
572{
573 struct crypto_alg *alg;
574
575 if (unlikely(!mem))
576 return;
577
578 alg = tfm->__crt_alg;
579
580 if (!tfm->exit && alg->cra_exit)
581 alg->cra_exit(tfm);
582 crypto_exit_ops(tfm);
583 crypto_mod_put(alg);
584 kzfree(mem);
585}
586EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
587
588int crypto_has_alg(const char *name, u32 type, u32 mask)
589{
590 int ret = 0;
591 struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
592
593 if (!IS_ERR(alg)) {
594 crypto_mod_put(alg);
595 ret = 1;
596 }
597
598 return ret;
599}
600EXPORT_SYMBOL_GPL(crypto_has_alg);
601
602void crypto_req_done(struct crypto_async_request *req, int err)
603{
604 struct crypto_wait *wait = req->data;
605
606 if (err == -EINPROGRESS)
607 return;
608
609 wait->err = err;
610 complete(&wait->completion);
611}
612EXPORT_SYMBOL_GPL(crypto_req_done);
613
614MODULE_DESCRIPTION("Cryptographic core API");
615MODULE_LICENSE("GPL");