Loading...
1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * Scatterlist Cryptographic API.
4 *
5 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
6 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
8 *
9 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
10 * and Nettle, by Niels Möller.
11 */
12
13#include <linux/err.h>
14#include <linux/errno.h>
15#include <linux/jump_label.h>
16#include <linux/kernel.h>
17#include <linux/kmod.h>
18#include <linux/module.h>
19#include <linux/param.h>
20#include <linux/sched/signal.h>
21#include <linux/slab.h>
22#include <linux/string.h>
23#include <linux/completion.h>
24#include "internal.h"
25
26LIST_HEAD(crypto_alg_list);
27EXPORT_SYMBOL_GPL(crypto_alg_list);
28DECLARE_RWSEM(crypto_alg_sem);
29EXPORT_SYMBOL_GPL(crypto_alg_sem);
30
31BLOCKING_NOTIFIER_HEAD(crypto_chain);
32EXPORT_SYMBOL_GPL(crypto_chain);
33
34#ifndef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
35DEFINE_STATIC_KEY_FALSE(__crypto_boot_test_finished);
36EXPORT_SYMBOL_GPL(__crypto_boot_test_finished);
37#endif
38
39static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
40
41struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
42{
43 return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
44}
45EXPORT_SYMBOL_GPL(crypto_mod_get);
46
47void crypto_mod_put(struct crypto_alg *alg)
48{
49 struct module *module = alg->cra_module;
50
51 crypto_alg_put(alg);
52 module_put(module);
53}
54EXPORT_SYMBOL_GPL(crypto_mod_put);
55
56static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
57 u32 mask)
58{
59 struct crypto_alg *q, *alg = NULL;
60 int best = -2;
61
62 list_for_each_entry(q, &crypto_alg_list, cra_list) {
63 int exact, fuzzy;
64
65 if (crypto_is_moribund(q))
66 continue;
67
68 if ((q->cra_flags ^ type) & mask)
69 continue;
70
71 if (crypto_is_larval(q) &&
72 !crypto_is_test_larval((struct crypto_larval *)q) &&
73 ((struct crypto_larval *)q)->mask != mask)
74 continue;
75
76 exact = !strcmp(q->cra_driver_name, name);
77 fuzzy = !strcmp(q->cra_name, name);
78 if (!exact && !(fuzzy && q->cra_priority > best))
79 continue;
80
81 if (unlikely(!crypto_mod_get(q)))
82 continue;
83
84 best = q->cra_priority;
85 if (alg)
86 crypto_mod_put(alg);
87 alg = q;
88
89 if (exact)
90 break;
91 }
92
93 return alg;
94}
95
96static void crypto_larval_destroy(struct crypto_alg *alg)
97{
98 struct crypto_larval *larval = (void *)alg;
99
100 BUG_ON(!crypto_is_larval(alg));
101 if (!IS_ERR_OR_NULL(larval->adult))
102 crypto_mod_put(larval->adult);
103 kfree(larval);
104}
105
106struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
107{
108 struct crypto_larval *larval;
109
110 larval = kzalloc(sizeof(*larval), GFP_KERNEL);
111 if (!larval)
112 return ERR_PTR(-ENOMEM);
113
114 larval->mask = mask;
115 larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
116 larval->alg.cra_priority = -1;
117 larval->alg.cra_destroy = crypto_larval_destroy;
118
119 strscpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
120 init_completion(&larval->completion);
121
122 return larval;
123}
124EXPORT_SYMBOL_GPL(crypto_larval_alloc);
125
126static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
127 u32 mask)
128{
129 struct crypto_alg *alg;
130 struct crypto_larval *larval;
131
132 larval = crypto_larval_alloc(name, type, mask);
133 if (IS_ERR(larval))
134 return ERR_CAST(larval);
135
136 refcount_set(&larval->alg.cra_refcnt, 2);
137
138 down_write(&crypto_alg_sem);
139 alg = __crypto_alg_lookup(name, type, mask);
140 if (!alg) {
141 alg = &larval->alg;
142 list_add(&alg->cra_list, &crypto_alg_list);
143 }
144 up_write(&crypto_alg_sem);
145
146 if (alg != &larval->alg) {
147 kfree(larval);
148 if (crypto_is_larval(alg))
149 alg = crypto_larval_wait(alg);
150 }
151
152 return alg;
153}
154
155void crypto_larval_kill(struct crypto_alg *alg)
156{
157 struct crypto_larval *larval = (void *)alg;
158
159 down_write(&crypto_alg_sem);
160 list_del(&alg->cra_list);
161 up_write(&crypto_alg_sem);
162 complete_all(&larval->completion);
163 crypto_alg_put(alg);
164}
165EXPORT_SYMBOL_GPL(crypto_larval_kill);
166
167void crypto_wait_for_test(struct crypto_larval *larval)
168{
169 int err;
170
171 err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
172 if (WARN_ON_ONCE(err != NOTIFY_STOP))
173 goto out;
174
175 err = wait_for_completion_killable(&larval->completion);
176 WARN_ON(err);
177out:
178 crypto_larval_kill(&larval->alg);
179}
180EXPORT_SYMBOL_GPL(crypto_wait_for_test);
181
182static void crypto_start_test(struct crypto_larval *larval)
183{
184 if (!crypto_is_test_larval(larval))
185 return;
186
187 if (larval->test_started)
188 return;
189
190 down_write(&crypto_alg_sem);
191 if (larval->test_started) {
192 up_write(&crypto_alg_sem);
193 return;
194 }
195
196 larval->test_started = true;
197 up_write(&crypto_alg_sem);
198
199 crypto_wait_for_test(larval);
200}
201
202static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
203{
204 struct crypto_larval *larval = (void *)alg;
205 long timeout;
206
207 if (!crypto_boot_test_finished())
208 crypto_start_test(larval);
209
210 timeout = wait_for_completion_killable_timeout(
211 &larval->completion, 60 * HZ);
212
213 alg = larval->adult;
214 if (timeout < 0)
215 alg = ERR_PTR(-EINTR);
216 else if (!timeout)
217 alg = ERR_PTR(-ETIMEDOUT);
218 else if (!alg)
219 alg = ERR_PTR(-ENOENT);
220 else if (IS_ERR(alg))
221 ;
222 else if (crypto_is_test_larval(larval) &&
223 !(alg->cra_flags & CRYPTO_ALG_TESTED))
224 alg = ERR_PTR(-EAGAIN);
225 else if (alg->cra_flags & CRYPTO_ALG_FIPS_INTERNAL)
226 alg = ERR_PTR(-EAGAIN);
227 else if (!crypto_mod_get(alg))
228 alg = ERR_PTR(-EAGAIN);
229 crypto_mod_put(&larval->alg);
230
231 return alg;
232}
233
234static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
235 u32 mask)
236{
237 const u32 fips = CRYPTO_ALG_FIPS_INTERNAL;
238 struct crypto_alg *alg;
239 u32 test = 0;
240
241 if (!((type | mask) & CRYPTO_ALG_TESTED))
242 test |= CRYPTO_ALG_TESTED;
243
244 down_read(&crypto_alg_sem);
245 alg = __crypto_alg_lookup(name, (type | test) & ~fips,
246 (mask | test) & ~fips);
247 if (alg) {
248 if (((type | mask) ^ fips) & fips)
249 mask |= fips;
250 mask &= fips;
251
252 if (!crypto_is_larval(alg) &&
253 ((type ^ alg->cra_flags) & mask)) {
254 /* Algorithm is disallowed in FIPS mode. */
255 crypto_mod_put(alg);
256 alg = ERR_PTR(-ENOENT);
257 }
258 } else if (test) {
259 alg = __crypto_alg_lookup(name, type, mask);
260 if (alg && !crypto_is_larval(alg)) {
261 /* Test failed */
262 crypto_mod_put(alg);
263 alg = ERR_PTR(-ELIBBAD);
264 }
265 }
266 up_read(&crypto_alg_sem);
267
268 return alg;
269}
270
271static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type,
272 u32 mask)
273{
274 struct crypto_alg *alg;
275
276 if (!name)
277 return ERR_PTR(-ENOENT);
278
279 type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
280 mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
281
282 alg = crypto_alg_lookup(name, type, mask);
283 if (!alg && !(mask & CRYPTO_NOLOAD)) {
284 request_module("crypto-%s", name);
285
286 if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
287 CRYPTO_ALG_NEED_FALLBACK))
288 request_module("crypto-%s-all", name);
289
290 alg = crypto_alg_lookup(name, type, mask);
291 }
292
293 if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg))
294 alg = crypto_larval_wait(alg);
295 else if (!alg)
296 alg = crypto_larval_add(name, type, mask);
297
298 return alg;
299}
300
301int crypto_probing_notify(unsigned long val, void *v)
302{
303 int ok;
304
305 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
306 if (ok == NOTIFY_DONE) {
307 request_module("cryptomgr");
308 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
309 }
310
311 return ok;
312}
313EXPORT_SYMBOL_GPL(crypto_probing_notify);
314
315struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
316{
317 struct crypto_alg *alg;
318 struct crypto_alg *larval;
319 int ok;
320
321 /*
322 * If the internal flag is set for a cipher, require a caller to
323 * invoke the cipher with the internal flag to use that cipher.
324 * Also, if a caller wants to allocate a cipher that may or may
325 * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
326 * !(mask & CRYPTO_ALG_INTERNAL).
327 */
328 if (!((type | mask) & CRYPTO_ALG_INTERNAL))
329 mask |= CRYPTO_ALG_INTERNAL;
330
331 larval = crypto_larval_lookup(name, type, mask);
332 if (IS_ERR(larval) || !crypto_is_larval(larval))
333 return larval;
334
335 ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
336
337 if (ok == NOTIFY_STOP)
338 alg = crypto_larval_wait(larval);
339 else {
340 crypto_mod_put(larval);
341 alg = ERR_PTR(-ENOENT);
342 }
343 crypto_larval_kill(larval);
344 return alg;
345}
346EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
347
348static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
349{
350 const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
351
352 if (type_obj)
353 return type_obj->init(tfm, type, mask);
354 return 0;
355}
356
357static void crypto_exit_ops(struct crypto_tfm *tfm)
358{
359 const struct crypto_type *type = tfm->__crt_alg->cra_type;
360
361 if (type && tfm->exit)
362 tfm->exit(tfm);
363}
364
365static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
366{
367 const struct crypto_type *type_obj = alg->cra_type;
368 unsigned int len;
369
370 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
371 if (type_obj)
372 return len + type_obj->ctxsize(alg, type, mask);
373
374 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
375 default:
376 BUG();
377
378 case CRYPTO_ALG_TYPE_CIPHER:
379 len += crypto_cipher_ctxsize(alg);
380 break;
381
382 case CRYPTO_ALG_TYPE_COMPRESS:
383 len += crypto_compress_ctxsize(alg);
384 break;
385 }
386
387 return len;
388}
389
390void crypto_shoot_alg(struct crypto_alg *alg)
391{
392 down_write(&crypto_alg_sem);
393 alg->cra_flags |= CRYPTO_ALG_DYING;
394 up_write(&crypto_alg_sem);
395}
396EXPORT_SYMBOL_GPL(crypto_shoot_alg);
397
398struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
399 u32 mask)
400{
401 struct crypto_tfm *tfm = NULL;
402 unsigned int tfm_size;
403 int err = -ENOMEM;
404
405 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
406 tfm = kzalloc(tfm_size, GFP_KERNEL);
407 if (tfm == NULL)
408 goto out_err;
409
410 tfm->__crt_alg = alg;
411
412 err = crypto_init_ops(tfm, type, mask);
413 if (err)
414 goto out_free_tfm;
415
416 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
417 goto cra_init_failed;
418
419 goto out;
420
421cra_init_failed:
422 crypto_exit_ops(tfm);
423out_free_tfm:
424 if (err == -EAGAIN)
425 crypto_shoot_alg(alg);
426 kfree(tfm);
427out_err:
428 tfm = ERR_PTR(err);
429out:
430 return tfm;
431}
432EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
433
434/*
435 * crypto_alloc_base - Locate algorithm and allocate transform
436 * @alg_name: Name of algorithm
437 * @type: Type of algorithm
438 * @mask: Mask for type comparison
439 *
440 * This function should not be used by new algorithm types.
441 * Please use crypto_alloc_tfm instead.
442 *
443 * crypto_alloc_base() will first attempt to locate an already loaded
444 * algorithm. If that fails and the kernel supports dynamically loadable
445 * modules, it will then attempt to load a module of the same name or
446 * alias. If that fails it will send a query to any loaded crypto manager
447 * to construct an algorithm on the fly. A refcount is grabbed on the
448 * algorithm which is then associated with the new transform.
449 *
450 * The returned transform is of a non-determinate type. Most people
451 * should use one of the more specific allocation functions such as
452 * crypto_alloc_skcipher().
453 *
454 * In case of error the return value is an error pointer.
455 */
456struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
457{
458 struct crypto_tfm *tfm;
459 int err;
460
461 for (;;) {
462 struct crypto_alg *alg;
463
464 alg = crypto_alg_mod_lookup(alg_name, type, mask);
465 if (IS_ERR(alg)) {
466 err = PTR_ERR(alg);
467 goto err;
468 }
469
470 tfm = __crypto_alloc_tfm(alg, type, mask);
471 if (!IS_ERR(tfm))
472 return tfm;
473
474 crypto_mod_put(alg);
475 err = PTR_ERR(tfm);
476
477err:
478 if (err != -EAGAIN)
479 break;
480 if (fatal_signal_pending(current)) {
481 err = -EINTR;
482 break;
483 }
484 }
485
486 return ERR_PTR(err);
487}
488EXPORT_SYMBOL_GPL(crypto_alloc_base);
489
490void *crypto_create_tfm_node(struct crypto_alg *alg,
491 const struct crypto_type *frontend,
492 int node)
493{
494 char *mem;
495 struct crypto_tfm *tfm = NULL;
496 unsigned int tfmsize;
497 unsigned int total;
498 int err = -ENOMEM;
499
500 tfmsize = frontend->tfmsize;
501 total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
502
503 mem = kzalloc_node(total, GFP_KERNEL, node);
504 if (mem == NULL)
505 goto out_err;
506
507 tfm = (struct crypto_tfm *)(mem + tfmsize);
508 tfm->__crt_alg = alg;
509 tfm->node = node;
510
511 err = frontend->init_tfm(tfm);
512 if (err)
513 goto out_free_tfm;
514
515 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
516 goto cra_init_failed;
517
518 goto out;
519
520cra_init_failed:
521 crypto_exit_ops(tfm);
522out_free_tfm:
523 if (err == -EAGAIN)
524 crypto_shoot_alg(alg);
525 kfree(mem);
526out_err:
527 mem = ERR_PTR(err);
528out:
529 return mem;
530}
531EXPORT_SYMBOL_GPL(crypto_create_tfm_node);
532
533struct crypto_alg *crypto_find_alg(const char *alg_name,
534 const struct crypto_type *frontend,
535 u32 type, u32 mask)
536{
537 if (frontend) {
538 type &= frontend->maskclear;
539 mask &= frontend->maskclear;
540 type |= frontend->type;
541 mask |= frontend->maskset;
542 }
543
544 return crypto_alg_mod_lookup(alg_name, type, mask);
545}
546EXPORT_SYMBOL_GPL(crypto_find_alg);
547
548/*
549 * crypto_alloc_tfm_node - Locate algorithm and allocate transform
550 * @alg_name: Name of algorithm
551 * @frontend: Frontend algorithm type
552 * @type: Type of algorithm
553 * @mask: Mask for type comparison
554 * @node: NUMA node in which users desire to put requests, if node is
555 * NUMA_NO_NODE, it means users have no special requirement.
556 *
557 * crypto_alloc_tfm() will first attempt to locate an already loaded
558 * algorithm. If that fails and the kernel supports dynamically loadable
559 * modules, it will then attempt to load a module of the same name or
560 * alias. If that fails it will send a query to any loaded crypto manager
561 * to construct an algorithm on the fly. A refcount is grabbed on the
562 * algorithm which is then associated with the new transform.
563 *
564 * The returned transform is of a non-determinate type. Most people
565 * should use one of the more specific allocation functions such as
566 * crypto_alloc_skcipher().
567 *
568 * In case of error the return value is an error pointer.
569 */
570
571void *crypto_alloc_tfm_node(const char *alg_name,
572 const struct crypto_type *frontend, u32 type, u32 mask,
573 int node)
574{
575 void *tfm;
576 int err;
577
578 for (;;) {
579 struct crypto_alg *alg;
580
581 alg = crypto_find_alg(alg_name, frontend, type, mask);
582 if (IS_ERR(alg)) {
583 err = PTR_ERR(alg);
584 goto err;
585 }
586
587 tfm = crypto_create_tfm_node(alg, frontend, node);
588 if (!IS_ERR(tfm))
589 return tfm;
590
591 crypto_mod_put(alg);
592 err = PTR_ERR(tfm);
593
594err:
595 if (err != -EAGAIN)
596 break;
597 if (fatal_signal_pending(current)) {
598 err = -EINTR;
599 break;
600 }
601 }
602
603 return ERR_PTR(err);
604}
605EXPORT_SYMBOL_GPL(crypto_alloc_tfm_node);
606
607/*
608 * crypto_destroy_tfm - Free crypto transform
609 * @mem: Start of tfm slab
610 * @tfm: Transform to free
611 *
612 * This function frees up the transform and any associated resources,
613 * then drops the refcount on the associated algorithm.
614 */
615void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
616{
617 struct crypto_alg *alg;
618
619 if (IS_ERR_OR_NULL(mem))
620 return;
621
622 alg = tfm->__crt_alg;
623
624 if (!tfm->exit && alg->cra_exit)
625 alg->cra_exit(tfm);
626 crypto_exit_ops(tfm);
627 crypto_mod_put(alg);
628 kfree_sensitive(mem);
629}
630EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
631
632int crypto_has_alg(const char *name, u32 type, u32 mask)
633{
634 int ret = 0;
635 struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
636
637 if (!IS_ERR(alg)) {
638 crypto_mod_put(alg);
639 ret = 1;
640 }
641
642 return ret;
643}
644EXPORT_SYMBOL_GPL(crypto_has_alg);
645
646void crypto_req_done(struct crypto_async_request *req, int err)
647{
648 struct crypto_wait *wait = req->data;
649
650 if (err == -EINPROGRESS)
651 return;
652
653 wait->err = err;
654 complete(&wait->completion);
655}
656EXPORT_SYMBOL_GPL(crypto_req_done);
657
658MODULE_DESCRIPTION("Cryptographic core API");
659MODULE_LICENSE("GPL");
1/*
2 * Scatterlist Cryptographic API.
3 *
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
6 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
7 *
8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
9 * and Nettle, by Niels Möller.
10 *
11 * This program is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the Free
13 * Software Foundation; either version 2 of the License, or (at your option)
14 * any later version.
15 *
16 */
17
18#include <linux/err.h>
19#include <linux/errno.h>
20#include <linux/kernel.h>
21#include <linux/kmod.h>
22#include <linux/module.h>
23#include <linux/param.h>
24#include <linux/sched.h>
25#include <linux/slab.h>
26#include <linux/string.h>
27#include "internal.h"
28
29LIST_HEAD(crypto_alg_list);
30EXPORT_SYMBOL_GPL(crypto_alg_list);
31DECLARE_RWSEM(crypto_alg_sem);
32EXPORT_SYMBOL_GPL(crypto_alg_sem);
33
34BLOCKING_NOTIFIER_HEAD(crypto_chain);
35EXPORT_SYMBOL_GPL(crypto_chain);
36
37static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
38
39struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
40{
41 return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
42}
43EXPORT_SYMBOL_GPL(crypto_mod_get);
44
45void crypto_mod_put(struct crypto_alg *alg)
46{
47 struct module *module = alg->cra_module;
48
49 crypto_alg_put(alg);
50 module_put(module);
51}
52EXPORT_SYMBOL_GPL(crypto_mod_put);
53
54static inline int crypto_is_test_larval(struct crypto_larval *larval)
55{
56 return larval->alg.cra_driver_name[0];
57}
58
59static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
60 u32 mask)
61{
62 struct crypto_alg *q, *alg = NULL;
63 int best = -2;
64
65 list_for_each_entry(q, &crypto_alg_list, cra_list) {
66 int exact, fuzzy;
67
68 if (crypto_is_moribund(q))
69 continue;
70
71 if ((q->cra_flags ^ type) & mask)
72 continue;
73
74 if (crypto_is_larval(q) &&
75 !crypto_is_test_larval((struct crypto_larval *)q) &&
76 ((struct crypto_larval *)q)->mask != mask)
77 continue;
78
79 exact = !strcmp(q->cra_driver_name, name);
80 fuzzy = !strcmp(q->cra_name, name);
81 if (!exact && !(fuzzy && q->cra_priority > best))
82 continue;
83
84 if (unlikely(!crypto_mod_get(q)))
85 continue;
86
87 best = q->cra_priority;
88 if (alg)
89 crypto_mod_put(alg);
90 alg = q;
91
92 if (exact)
93 break;
94 }
95
96 return alg;
97}
98
99static void crypto_larval_destroy(struct crypto_alg *alg)
100{
101 struct crypto_larval *larval = (void *)alg;
102
103 BUG_ON(!crypto_is_larval(alg));
104 if (larval->adult)
105 crypto_mod_put(larval->adult);
106 kfree(larval);
107}
108
109struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
110{
111 struct crypto_larval *larval;
112
113 larval = kzalloc(sizeof(*larval), GFP_KERNEL);
114 if (!larval)
115 return ERR_PTR(-ENOMEM);
116
117 larval->mask = mask;
118 larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
119 larval->alg.cra_priority = -1;
120 larval->alg.cra_destroy = crypto_larval_destroy;
121
122 strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
123 init_completion(&larval->completion);
124
125 return larval;
126}
127EXPORT_SYMBOL_GPL(crypto_larval_alloc);
128
129static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
130 u32 mask)
131{
132 struct crypto_alg *alg;
133 struct crypto_larval *larval;
134
135 larval = crypto_larval_alloc(name, type, mask);
136 if (IS_ERR(larval))
137 return ERR_CAST(larval);
138
139 atomic_set(&larval->alg.cra_refcnt, 2);
140
141 down_write(&crypto_alg_sem);
142 alg = __crypto_alg_lookup(name, type, mask);
143 if (!alg) {
144 alg = &larval->alg;
145 list_add(&alg->cra_list, &crypto_alg_list);
146 }
147 up_write(&crypto_alg_sem);
148
149 if (alg != &larval->alg) {
150 kfree(larval);
151 if (crypto_is_larval(alg))
152 alg = crypto_larval_wait(alg);
153 }
154
155 return alg;
156}
157
158void crypto_larval_kill(struct crypto_alg *alg)
159{
160 struct crypto_larval *larval = (void *)alg;
161
162 down_write(&crypto_alg_sem);
163 list_del(&alg->cra_list);
164 up_write(&crypto_alg_sem);
165 complete_all(&larval->completion);
166 crypto_alg_put(alg);
167}
168EXPORT_SYMBOL_GPL(crypto_larval_kill);
169
170static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
171{
172 struct crypto_larval *larval = (void *)alg;
173 long timeout;
174
175 timeout = wait_for_completion_interruptible_timeout(
176 &larval->completion, 60 * HZ);
177
178 alg = larval->adult;
179 if (timeout < 0)
180 alg = ERR_PTR(-EINTR);
181 else if (!timeout)
182 alg = ERR_PTR(-ETIMEDOUT);
183 else if (!alg)
184 alg = ERR_PTR(-ENOENT);
185 else if (crypto_is_test_larval(larval) &&
186 !(alg->cra_flags & CRYPTO_ALG_TESTED))
187 alg = ERR_PTR(-EAGAIN);
188 else if (!crypto_mod_get(alg))
189 alg = ERR_PTR(-EAGAIN);
190 crypto_mod_put(&larval->alg);
191
192 return alg;
193}
194
195struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, u32 mask)
196{
197 struct crypto_alg *alg;
198
199 down_read(&crypto_alg_sem);
200 alg = __crypto_alg_lookup(name, type, mask);
201 up_read(&crypto_alg_sem);
202
203 return alg;
204}
205EXPORT_SYMBOL_GPL(crypto_alg_lookup);
206
207struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask)
208{
209 struct crypto_alg *alg;
210
211 if (!name)
212 return ERR_PTR(-ENOENT);
213
214 mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
215 type &= mask;
216
217 alg = crypto_alg_lookup(name, type, mask);
218 if (!alg) {
219 request_module("%s", name);
220
221 if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
222 CRYPTO_ALG_NEED_FALLBACK))
223 request_module("%s-all", name);
224
225 alg = crypto_alg_lookup(name, type, mask);
226 }
227
228 if (alg)
229 return crypto_is_larval(alg) ? crypto_larval_wait(alg) : alg;
230
231 return crypto_larval_add(name, type, mask);
232}
233EXPORT_SYMBOL_GPL(crypto_larval_lookup);
234
235int crypto_probing_notify(unsigned long val, void *v)
236{
237 int ok;
238
239 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
240 if (ok == NOTIFY_DONE) {
241 request_module("cryptomgr");
242 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
243 }
244
245 return ok;
246}
247EXPORT_SYMBOL_GPL(crypto_probing_notify);
248
249struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
250{
251 struct crypto_alg *alg;
252 struct crypto_alg *larval;
253 int ok;
254
255 if (!((type | mask) & CRYPTO_ALG_TESTED)) {
256 type |= CRYPTO_ALG_TESTED;
257 mask |= CRYPTO_ALG_TESTED;
258 }
259
260 larval = crypto_larval_lookup(name, type, mask);
261 if (IS_ERR(larval) || !crypto_is_larval(larval))
262 return larval;
263
264 ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
265
266 if (ok == NOTIFY_STOP)
267 alg = crypto_larval_wait(larval);
268 else {
269 crypto_mod_put(larval);
270 alg = ERR_PTR(-ENOENT);
271 }
272 crypto_larval_kill(larval);
273 return alg;
274}
275EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
276
277static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
278{
279 const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
280
281 if (type_obj)
282 return type_obj->init(tfm, type, mask);
283
284 switch (crypto_tfm_alg_type(tfm)) {
285 case CRYPTO_ALG_TYPE_CIPHER:
286 return crypto_init_cipher_ops(tfm);
287
288 case CRYPTO_ALG_TYPE_COMPRESS:
289 return crypto_init_compress_ops(tfm);
290
291 default:
292 break;
293 }
294
295 BUG();
296 return -EINVAL;
297}
298
299static void crypto_exit_ops(struct crypto_tfm *tfm)
300{
301 const struct crypto_type *type = tfm->__crt_alg->cra_type;
302
303 if (type) {
304 if (tfm->exit)
305 tfm->exit(tfm);
306 return;
307 }
308
309 switch (crypto_tfm_alg_type(tfm)) {
310 case CRYPTO_ALG_TYPE_CIPHER:
311 crypto_exit_cipher_ops(tfm);
312 break;
313
314 case CRYPTO_ALG_TYPE_COMPRESS:
315 crypto_exit_compress_ops(tfm);
316 break;
317
318 default:
319 BUG();
320 }
321}
322
323static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
324{
325 const struct crypto_type *type_obj = alg->cra_type;
326 unsigned int len;
327
328 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
329 if (type_obj)
330 return len + type_obj->ctxsize(alg, type, mask);
331
332 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
333 default:
334 BUG();
335
336 case CRYPTO_ALG_TYPE_CIPHER:
337 len += crypto_cipher_ctxsize(alg);
338 break;
339
340 case CRYPTO_ALG_TYPE_COMPRESS:
341 len += crypto_compress_ctxsize(alg);
342 break;
343 }
344
345 return len;
346}
347
348void crypto_shoot_alg(struct crypto_alg *alg)
349{
350 down_write(&crypto_alg_sem);
351 alg->cra_flags |= CRYPTO_ALG_DYING;
352 up_write(&crypto_alg_sem);
353}
354EXPORT_SYMBOL_GPL(crypto_shoot_alg);
355
356struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
357 u32 mask)
358{
359 struct crypto_tfm *tfm = NULL;
360 unsigned int tfm_size;
361 int err = -ENOMEM;
362
363 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
364 tfm = kzalloc(tfm_size, GFP_KERNEL);
365 if (tfm == NULL)
366 goto out_err;
367
368 tfm->__crt_alg = alg;
369
370 err = crypto_init_ops(tfm, type, mask);
371 if (err)
372 goto out_free_tfm;
373
374 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
375 goto cra_init_failed;
376
377 goto out;
378
379cra_init_failed:
380 crypto_exit_ops(tfm);
381out_free_tfm:
382 if (err == -EAGAIN)
383 crypto_shoot_alg(alg);
384 kfree(tfm);
385out_err:
386 tfm = ERR_PTR(err);
387out:
388 return tfm;
389}
390EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
391
392/*
393 * crypto_alloc_base - Locate algorithm and allocate transform
394 * @alg_name: Name of algorithm
395 * @type: Type of algorithm
396 * @mask: Mask for type comparison
397 *
398 * This function should not be used by new algorithm types.
399 * Please use crypto_alloc_tfm instead.
400 *
401 * crypto_alloc_base() will first attempt to locate an already loaded
402 * algorithm. If that fails and the kernel supports dynamically loadable
403 * modules, it will then attempt to load a module of the same name or
404 * alias. If that fails it will send a query to any loaded crypto manager
405 * to construct an algorithm on the fly. A refcount is grabbed on the
406 * algorithm which is then associated with the new transform.
407 *
408 * The returned transform is of a non-determinate type. Most people
409 * should use one of the more specific allocation functions such as
410 * crypto_alloc_blkcipher.
411 *
412 * In case of error the return value is an error pointer.
413 */
414struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
415{
416 struct crypto_tfm *tfm;
417 int err;
418
419 for (;;) {
420 struct crypto_alg *alg;
421
422 alg = crypto_alg_mod_lookup(alg_name, type, mask);
423 if (IS_ERR(alg)) {
424 err = PTR_ERR(alg);
425 goto err;
426 }
427
428 tfm = __crypto_alloc_tfm(alg, type, mask);
429 if (!IS_ERR(tfm))
430 return tfm;
431
432 crypto_mod_put(alg);
433 err = PTR_ERR(tfm);
434
435err:
436 if (err != -EAGAIN)
437 break;
438 if (signal_pending(current)) {
439 err = -EINTR;
440 break;
441 }
442 }
443
444 return ERR_PTR(err);
445}
446EXPORT_SYMBOL_GPL(crypto_alloc_base);
447
448void *crypto_create_tfm(struct crypto_alg *alg,
449 const struct crypto_type *frontend)
450{
451 char *mem;
452 struct crypto_tfm *tfm = NULL;
453 unsigned int tfmsize;
454 unsigned int total;
455 int err = -ENOMEM;
456
457 tfmsize = frontend->tfmsize;
458 total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
459
460 mem = kzalloc(total, GFP_KERNEL);
461 if (mem == NULL)
462 goto out_err;
463
464 tfm = (struct crypto_tfm *)(mem + tfmsize);
465 tfm->__crt_alg = alg;
466
467 err = frontend->init_tfm(tfm);
468 if (err)
469 goto out_free_tfm;
470
471 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
472 goto cra_init_failed;
473
474 goto out;
475
476cra_init_failed:
477 crypto_exit_ops(tfm);
478out_free_tfm:
479 if (err == -EAGAIN)
480 crypto_shoot_alg(alg);
481 kfree(mem);
482out_err:
483 mem = ERR_PTR(err);
484out:
485 return mem;
486}
487EXPORT_SYMBOL_GPL(crypto_create_tfm);
488
489struct crypto_alg *crypto_find_alg(const char *alg_name,
490 const struct crypto_type *frontend,
491 u32 type, u32 mask)
492{
493 struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask) =
494 crypto_alg_mod_lookup;
495
496 if (frontend) {
497 type &= frontend->maskclear;
498 mask &= frontend->maskclear;
499 type |= frontend->type;
500 mask |= frontend->maskset;
501
502 if (frontend->lookup)
503 lookup = frontend->lookup;
504 }
505
506 return lookup(alg_name, type, mask);
507}
508EXPORT_SYMBOL_GPL(crypto_find_alg);
509
510/*
511 * crypto_alloc_tfm - Locate algorithm and allocate transform
512 * @alg_name: Name of algorithm
513 * @frontend: Frontend algorithm type
514 * @type: Type of algorithm
515 * @mask: Mask for type comparison
516 *
517 * crypto_alloc_tfm() will first attempt to locate an already loaded
518 * algorithm. If that fails and the kernel supports dynamically loadable
519 * modules, it will then attempt to load a module of the same name or
520 * alias. If that fails it will send a query to any loaded crypto manager
521 * to construct an algorithm on the fly. A refcount is grabbed on the
522 * algorithm which is then associated with the new transform.
523 *
524 * The returned transform is of a non-determinate type. Most people
525 * should use one of the more specific allocation functions such as
526 * crypto_alloc_blkcipher.
527 *
528 * In case of error the return value is an error pointer.
529 */
530void *crypto_alloc_tfm(const char *alg_name,
531 const struct crypto_type *frontend, u32 type, u32 mask)
532{
533 void *tfm;
534 int err;
535
536 for (;;) {
537 struct crypto_alg *alg;
538
539 alg = crypto_find_alg(alg_name, frontend, type, mask);
540 if (IS_ERR(alg)) {
541 err = PTR_ERR(alg);
542 goto err;
543 }
544
545 tfm = crypto_create_tfm(alg, frontend);
546 if (!IS_ERR(tfm))
547 return tfm;
548
549 crypto_mod_put(alg);
550 err = PTR_ERR(tfm);
551
552err:
553 if (err != -EAGAIN)
554 break;
555 if (signal_pending(current)) {
556 err = -EINTR;
557 break;
558 }
559 }
560
561 return ERR_PTR(err);
562}
563EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
564
565/*
566 * crypto_destroy_tfm - Free crypto transform
567 * @mem: Start of tfm slab
568 * @tfm: Transform to free
569 *
570 * This function frees up the transform and any associated resources,
571 * then drops the refcount on the associated algorithm.
572 */
573void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
574{
575 struct crypto_alg *alg;
576
577 if (unlikely(!mem))
578 return;
579
580 alg = tfm->__crt_alg;
581
582 if (!tfm->exit && alg->cra_exit)
583 alg->cra_exit(tfm);
584 crypto_exit_ops(tfm);
585 crypto_mod_put(alg);
586 kzfree(mem);
587}
588EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
589
590int crypto_has_alg(const char *name, u32 type, u32 mask)
591{
592 int ret = 0;
593 struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
594
595 if (!IS_ERR(alg)) {
596 crypto_mod_put(alg);
597 ret = 1;
598 }
599
600 return ret;
601}
602EXPORT_SYMBOL_GPL(crypto_has_alg);
603
604MODULE_DESCRIPTION("Cryptographic core API");
605MODULE_LICENSE("GPL");