Loading...
1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * Scatterlist Cryptographic API.
4 *
5 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
6 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
8 *
9 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
10 * and Nettle, by Niels Möller.
11 */
12
13#include <linux/err.h>
14#include <linux/errno.h>
15#include <linux/kernel.h>
16#include <linux/kmod.h>
17#include <linux/module.h>
18#include <linux/param.h>
19#include <linux/sched/signal.h>
20#include <linux/slab.h>
21#include <linux/string.h>
22#include <linux/completion.h>
23#include "internal.h"
24
25LIST_HEAD(crypto_alg_list);
26EXPORT_SYMBOL_GPL(crypto_alg_list);
27DECLARE_RWSEM(crypto_alg_sem);
28EXPORT_SYMBOL_GPL(crypto_alg_sem);
29
30BLOCKING_NOTIFIER_HEAD(crypto_chain);
31EXPORT_SYMBOL_GPL(crypto_chain);
32
33static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
34
35struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
36{
37 return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
38}
39EXPORT_SYMBOL_GPL(crypto_mod_get);
40
41void crypto_mod_put(struct crypto_alg *alg)
42{
43 struct module *module = alg->cra_module;
44
45 crypto_alg_put(alg);
46 module_put(module);
47}
48EXPORT_SYMBOL_GPL(crypto_mod_put);
49
50static inline int crypto_is_test_larval(struct crypto_larval *larval)
51{
52 return larval->alg.cra_driver_name[0];
53}
54
55static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
56 u32 mask)
57{
58 struct crypto_alg *q, *alg = NULL;
59 int best = -2;
60
61 list_for_each_entry(q, &crypto_alg_list, cra_list) {
62 int exact, fuzzy;
63
64 if (crypto_is_moribund(q))
65 continue;
66
67 if ((q->cra_flags ^ type) & mask)
68 continue;
69
70 if (crypto_is_larval(q) &&
71 !crypto_is_test_larval((struct crypto_larval *)q) &&
72 ((struct crypto_larval *)q)->mask != mask)
73 continue;
74
75 exact = !strcmp(q->cra_driver_name, name);
76 fuzzy = !strcmp(q->cra_name, name);
77 if (!exact && !(fuzzy && q->cra_priority > best))
78 continue;
79
80 if (unlikely(!crypto_mod_get(q)))
81 continue;
82
83 best = q->cra_priority;
84 if (alg)
85 crypto_mod_put(alg);
86 alg = q;
87
88 if (exact)
89 break;
90 }
91
92 return alg;
93}
94
95static void crypto_larval_destroy(struct crypto_alg *alg)
96{
97 struct crypto_larval *larval = (void *)alg;
98
99 BUG_ON(!crypto_is_larval(alg));
100 if (larval->adult)
101 crypto_mod_put(larval->adult);
102 kfree(larval);
103}
104
105struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
106{
107 struct crypto_larval *larval;
108
109 larval = kzalloc(sizeof(*larval), GFP_KERNEL);
110 if (!larval)
111 return ERR_PTR(-ENOMEM);
112
113 larval->mask = mask;
114 larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
115 larval->alg.cra_priority = -1;
116 larval->alg.cra_destroy = crypto_larval_destroy;
117
118 strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
119 init_completion(&larval->completion);
120
121 return larval;
122}
123EXPORT_SYMBOL_GPL(crypto_larval_alloc);
124
125static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
126 u32 mask)
127{
128 struct crypto_alg *alg;
129 struct crypto_larval *larval;
130
131 larval = crypto_larval_alloc(name, type, mask);
132 if (IS_ERR(larval))
133 return ERR_CAST(larval);
134
135 refcount_set(&larval->alg.cra_refcnt, 2);
136
137 down_write(&crypto_alg_sem);
138 alg = __crypto_alg_lookup(name, type, mask);
139 if (!alg) {
140 alg = &larval->alg;
141 list_add(&alg->cra_list, &crypto_alg_list);
142 }
143 up_write(&crypto_alg_sem);
144
145 if (alg != &larval->alg) {
146 kfree(larval);
147 if (crypto_is_larval(alg))
148 alg = crypto_larval_wait(alg);
149 }
150
151 return alg;
152}
153
154void crypto_larval_kill(struct crypto_alg *alg)
155{
156 struct crypto_larval *larval = (void *)alg;
157
158 down_write(&crypto_alg_sem);
159 list_del(&alg->cra_list);
160 up_write(&crypto_alg_sem);
161 complete_all(&larval->completion);
162 crypto_alg_put(alg);
163}
164EXPORT_SYMBOL_GPL(crypto_larval_kill);
165
166static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
167{
168 struct crypto_larval *larval = (void *)alg;
169 long timeout;
170
171 timeout = wait_for_completion_killable_timeout(
172 &larval->completion, 60 * HZ);
173
174 alg = larval->adult;
175 if (timeout < 0)
176 alg = ERR_PTR(-EINTR);
177 else if (!timeout)
178 alg = ERR_PTR(-ETIMEDOUT);
179 else if (!alg)
180 alg = ERR_PTR(-ENOENT);
181 else if (crypto_is_test_larval(larval) &&
182 !(alg->cra_flags & CRYPTO_ALG_TESTED))
183 alg = ERR_PTR(-EAGAIN);
184 else if (!crypto_mod_get(alg))
185 alg = ERR_PTR(-EAGAIN);
186 crypto_mod_put(&larval->alg);
187
188 return alg;
189}
190
191static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
192 u32 mask)
193{
194 struct crypto_alg *alg;
195 u32 test = 0;
196
197 if (!((type | mask) & CRYPTO_ALG_TESTED))
198 test |= CRYPTO_ALG_TESTED;
199
200 down_read(&crypto_alg_sem);
201 alg = __crypto_alg_lookup(name, type | test, mask | test);
202 if (!alg && test) {
203 alg = __crypto_alg_lookup(name, type, mask);
204 if (alg && !crypto_is_larval(alg)) {
205 /* Test failed */
206 crypto_mod_put(alg);
207 alg = ERR_PTR(-ELIBBAD);
208 }
209 }
210 up_read(&crypto_alg_sem);
211
212 return alg;
213}
214
215static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type,
216 u32 mask)
217{
218 struct crypto_alg *alg;
219
220 if (!name)
221 return ERR_PTR(-ENOENT);
222
223 type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
224 mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
225
226 alg = crypto_alg_lookup(name, type, mask);
227 if (!alg && !(mask & CRYPTO_NOLOAD)) {
228 request_module("crypto-%s", name);
229
230 if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
231 CRYPTO_ALG_NEED_FALLBACK))
232 request_module("crypto-%s-all", name);
233
234 alg = crypto_alg_lookup(name, type, mask);
235 }
236
237 if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg))
238 alg = crypto_larval_wait(alg);
239 else if (!alg)
240 alg = crypto_larval_add(name, type, mask);
241
242 return alg;
243}
244
245int crypto_probing_notify(unsigned long val, void *v)
246{
247 int ok;
248
249 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
250 if (ok == NOTIFY_DONE) {
251 request_module("cryptomgr");
252 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
253 }
254
255 return ok;
256}
257EXPORT_SYMBOL_GPL(crypto_probing_notify);
258
259struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
260{
261 struct crypto_alg *alg;
262 struct crypto_alg *larval;
263 int ok;
264
265 /*
266 * If the internal flag is set for a cipher, require a caller to
267 * to invoke the cipher with the internal flag to use that cipher.
268 * Also, if a caller wants to allocate a cipher that may or may
269 * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
270 * !(mask & CRYPTO_ALG_INTERNAL).
271 */
272 if (!((type | mask) & CRYPTO_ALG_INTERNAL))
273 mask |= CRYPTO_ALG_INTERNAL;
274
275 larval = crypto_larval_lookup(name, type, mask);
276 if (IS_ERR(larval) || !crypto_is_larval(larval))
277 return larval;
278
279 ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
280
281 if (ok == NOTIFY_STOP)
282 alg = crypto_larval_wait(larval);
283 else {
284 crypto_mod_put(larval);
285 alg = ERR_PTR(-ENOENT);
286 }
287 crypto_larval_kill(larval);
288 return alg;
289}
290EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
291
292static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
293{
294 const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
295
296 if (type_obj)
297 return type_obj->init(tfm, type, mask);
298
299 switch (crypto_tfm_alg_type(tfm)) {
300 case CRYPTO_ALG_TYPE_CIPHER:
301 return crypto_init_cipher_ops(tfm);
302
303 case CRYPTO_ALG_TYPE_COMPRESS:
304 return crypto_init_compress_ops(tfm);
305
306 default:
307 break;
308 }
309
310 BUG();
311 return -EINVAL;
312}
313
314static void crypto_exit_ops(struct crypto_tfm *tfm)
315{
316 const struct crypto_type *type = tfm->__crt_alg->cra_type;
317
318 if (type && tfm->exit)
319 tfm->exit(tfm);
320}
321
322static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
323{
324 const struct crypto_type *type_obj = alg->cra_type;
325 unsigned int len;
326
327 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
328 if (type_obj)
329 return len + type_obj->ctxsize(alg, type, mask);
330
331 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
332 default:
333 BUG();
334
335 case CRYPTO_ALG_TYPE_CIPHER:
336 len += crypto_cipher_ctxsize(alg);
337 break;
338
339 case CRYPTO_ALG_TYPE_COMPRESS:
340 len += crypto_compress_ctxsize(alg);
341 break;
342 }
343
344 return len;
345}
346
347void crypto_shoot_alg(struct crypto_alg *alg)
348{
349 down_write(&crypto_alg_sem);
350 alg->cra_flags |= CRYPTO_ALG_DYING;
351 up_write(&crypto_alg_sem);
352}
353EXPORT_SYMBOL_GPL(crypto_shoot_alg);
354
355struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
356 u32 mask)
357{
358 struct crypto_tfm *tfm = NULL;
359 unsigned int tfm_size;
360 int err = -ENOMEM;
361
362 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
363 tfm = kzalloc(tfm_size, GFP_KERNEL);
364 if (tfm == NULL)
365 goto out_err;
366
367 tfm->__crt_alg = alg;
368
369 err = crypto_init_ops(tfm, type, mask);
370 if (err)
371 goto out_free_tfm;
372
373 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
374 goto cra_init_failed;
375
376 goto out;
377
378cra_init_failed:
379 crypto_exit_ops(tfm);
380out_free_tfm:
381 if (err == -EAGAIN)
382 crypto_shoot_alg(alg);
383 kfree(tfm);
384out_err:
385 tfm = ERR_PTR(err);
386out:
387 return tfm;
388}
389EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
390
391/*
392 * crypto_alloc_base - Locate algorithm and allocate transform
393 * @alg_name: Name of algorithm
394 * @type: Type of algorithm
395 * @mask: Mask for type comparison
396 *
397 * This function should not be used by new algorithm types.
398 * Please use crypto_alloc_tfm instead.
399 *
400 * crypto_alloc_base() will first attempt to locate an already loaded
401 * algorithm. If that fails and the kernel supports dynamically loadable
402 * modules, it will then attempt to load a module of the same name or
403 * alias. If that fails it will send a query to any loaded crypto manager
404 * to construct an algorithm on the fly. A refcount is grabbed on the
405 * algorithm which is then associated with the new transform.
406 *
407 * The returned transform is of a non-determinate type. Most people
408 * should use one of the more specific allocation functions such as
409 * crypto_alloc_blkcipher.
410 *
411 * In case of error the return value is an error pointer.
412 */
413struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
414{
415 struct crypto_tfm *tfm;
416 int err;
417
418 for (;;) {
419 struct crypto_alg *alg;
420
421 alg = crypto_alg_mod_lookup(alg_name, type, mask);
422 if (IS_ERR(alg)) {
423 err = PTR_ERR(alg);
424 goto err;
425 }
426
427 tfm = __crypto_alloc_tfm(alg, type, mask);
428 if (!IS_ERR(tfm))
429 return tfm;
430
431 crypto_mod_put(alg);
432 err = PTR_ERR(tfm);
433
434err:
435 if (err != -EAGAIN)
436 break;
437 if (fatal_signal_pending(current)) {
438 err = -EINTR;
439 break;
440 }
441 }
442
443 return ERR_PTR(err);
444}
445EXPORT_SYMBOL_GPL(crypto_alloc_base);
446
447void *crypto_create_tfm(struct crypto_alg *alg,
448 const struct crypto_type *frontend)
449{
450 char *mem;
451 struct crypto_tfm *tfm = NULL;
452 unsigned int tfmsize;
453 unsigned int total;
454 int err = -ENOMEM;
455
456 tfmsize = frontend->tfmsize;
457 total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
458
459 mem = kzalloc(total, GFP_KERNEL);
460 if (mem == NULL)
461 goto out_err;
462
463 tfm = (struct crypto_tfm *)(mem + tfmsize);
464 tfm->__crt_alg = alg;
465
466 err = frontend->init_tfm(tfm);
467 if (err)
468 goto out_free_tfm;
469
470 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
471 goto cra_init_failed;
472
473 goto out;
474
475cra_init_failed:
476 crypto_exit_ops(tfm);
477out_free_tfm:
478 if (err == -EAGAIN)
479 crypto_shoot_alg(alg);
480 kfree(mem);
481out_err:
482 mem = ERR_PTR(err);
483out:
484 return mem;
485}
486EXPORT_SYMBOL_GPL(crypto_create_tfm);
487
488struct crypto_alg *crypto_find_alg(const char *alg_name,
489 const struct crypto_type *frontend,
490 u32 type, u32 mask)
491{
492 if (frontend) {
493 type &= frontend->maskclear;
494 mask &= frontend->maskclear;
495 type |= frontend->type;
496 mask |= frontend->maskset;
497 }
498
499 return crypto_alg_mod_lookup(alg_name, type, mask);
500}
501EXPORT_SYMBOL_GPL(crypto_find_alg);
502
503/*
504 * crypto_alloc_tfm - Locate algorithm and allocate transform
505 * @alg_name: Name of algorithm
506 * @frontend: Frontend algorithm type
507 * @type: Type of algorithm
508 * @mask: Mask for type comparison
509 *
510 * crypto_alloc_tfm() will first attempt to locate an already loaded
511 * algorithm. If that fails and the kernel supports dynamically loadable
512 * modules, it will then attempt to load a module of the same name or
513 * alias. If that fails it will send a query to any loaded crypto manager
514 * to construct an algorithm on the fly. A refcount is grabbed on the
515 * algorithm which is then associated with the new transform.
516 *
517 * The returned transform is of a non-determinate type. Most people
518 * should use one of the more specific allocation functions such as
519 * crypto_alloc_blkcipher.
520 *
521 * In case of error the return value is an error pointer.
522 */
523void *crypto_alloc_tfm(const char *alg_name,
524 const struct crypto_type *frontend, u32 type, u32 mask)
525{
526 void *tfm;
527 int err;
528
529 for (;;) {
530 struct crypto_alg *alg;
531
532 alg = crypto_find_alg(alg_name, frontend, type, mask);
533 if (IS_ERR(alg)) {
534 err = PTR_ERR(alg);
535 goto err;
536 }
537
538 tfm = crypto_create_tfm(alg, frontend);
539 if (!IS_ERR(tfm))
540 return tfm;
541
542 crypto_mod_put(alg);
543 err = PTR_ERR(tfm);
544
545err:
546 if (err != -EAGAIN)
547 break;
548 if (fatal_signal_pending(current)) {
549 err = -EINTR;
550 break;
551 }
552 }
553
554 return ERR_PTR(err);
555}
556EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
557
558/*
559 * crypto_destroy_tfm - Free crypto transform
560 * @mem: Start of tfm slab
561 * @tfm: Transform to free
562 *
563 * This function frees up the transform and any associated resources,
564 * then drops the refcount on the associated algorithm.
565 */
566void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
567{
568 struct crypto_alg *alg;
569
570 if (unlikely(!mem))
571 return;
572
573 alg = tfm->__crt_alg;
574
575 if (!tfm->exit && alg->cra_exit)
576 alg->cra_exit(tfm);
577 crypto_exit_ops(tfm);
578 crypto_mod_put(alg);
579 kzfree(mem);
580}
581EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
582
583int crypto_has_alg(const char *name, u32 type, u32 mask)
584{
585 int ret = 0;
586 struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
587
588 if (!IS_ERR(alg)) {
589 crypto_mod_put(alg);
590 ret = 1;
591 }
592
593 return ret;
594}
595EXPORT_SYMBOL_GPL(crypto_has_alg);
596
597void crypto_req_done(struct crypto_async_request *req, int err)
598{
599 struct crypto_wait *wait = req->data;
600
601 if (err == -EINPROGRESS)
602 return;
603
604 wait->err = err;
605 complete(&wait->completion);
606}
607EXPORT_SYMBOL_GPL(crypto_req_done);
608
609MODULE_DESCRIPTION("Cryptographic core API");
610MODULE_LICENSE("GPL");
1/*
2 * Scatterlist Cryptographic API.
3 *
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
6 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
7 *
8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
9 * and Nettle, by Niels Möller.
10 *
11 * This program is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License as published by the Free
13 * Software Foundation; either version 2 of the License, or (at your option)
14 * any later version.
15 *
16 */
17
18#include <linux/err.h>
19#include <linux/errno.h>
20#include <linux/kernel.h>
21#include <linux/kmod.h>
22#include <linux/module.h>
23#include <linux/param.h>
24#include <linux/sched.h>
25#include <linux/slab.h>
26#include <linux/string.h>
27#include "internal.h"
28
29LIST_HEAD(crypto_alg_list);
30EXPORT_SYMBOL_GPL(crypto_alg_list);
31DECLARE_RWSEM(crypto_alg_sem);
32EXPORT_SYMBOL_GPL(crypto_alg_sem);
33
34BLOCKING_NOTIFIER_HEAD(crypto_chain);
35EXPORT_SYMBOL_GPL(crypto_chain);
36
37static inline struct crypto_alg *crypto_alg_get(struct crypto_alg *alg)
38{
39 atomic_inc(&alg->cra_refcnt);
40 return alg;
41}
42
43struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
44{
45 return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
46}
47EXPORT_SYMBOL_GPL(crypto_mod_get);
48
49void crypto_mod_put(struct crypto_alg *alg)
50{
51 struct module *module = alg->cra_module;
52
53 crypto_alg_put(alg);
54 module_put(module);
55}
56EXPORT_SYMBOL_GPL(crypto_mod_put);
57
58static inline int crypto_is_test_larval(struct crypto_larval *larval)
59{
60 return larval->alg.cra_driver_name[0];
61}
62
63static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
64 u32 mask)
65{
66 struct crypto_alg *q, *alg = NULL;
67 int best = -2;
68
69 list_for_each_entry(q, &crypto_alg_list, cra_list) {
70 int exact, fuzzy;
71
72 if (crypto_is_moribund(q))
73 continue;
74
75 if ((q->cra_flags ^ type) & mask)
76 continue;
77
78 if (crypto_is_larval(q) &&
79 !crypto_is_test_larval((struct crypto_larval *)q) &&
80 ((struct crypto_larval *)q)->mask != mask)
81 continue;
82
83 exact = !strcmp(q->cra_driver_name, name);
84 fuzzy = !strcmp(q->cra_name, name);
85 if (!exact && !(fuzzy && q->cra_priority > best))
86 continue;
87
88 if (unlikely(!crypto_mod_get(q)))
89 continue;
90
91 best = q->cra_priority;
92 if (alg)
93 crypto_mod_put(alg);
94 alg = q;
95
96 if (exact)
97 break;
98 }
99
100 return alg;
101}
102
103static void crypto_larval_destroy(struct crypto_alg *alg)
104{
105 struct crypto_larval *larval = (void *)alg;
106
107 BUG_ON(!crypto_is_larval(alg));
108 if (larval->adult)
109 crypto_mod_put(larval->adult);
110 kfree(larval);
111}
112
113struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
114{
115 struct crypto_larval *larval;
116
117 larval = kzalloc(sizeof(*larval), GFP_KERNEL);
118 if (!larval)
119 return ERR_PTR(-ENOMEM);
120
121 larval->mask = mask;
122 larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
123 larval->alg.cra_priority = -1;
124 larval->alg.cra_destroy = crypto_larval_destroy;
125
126 strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
127 init_completion(&larval->completion);
128
129 return larval;
130}
131EXPORT_SYMBOL_GPL(crypto_larval_alloc);
132
133static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
134 u32 mask)
135{
136 struct crypto_alg *alg;
137 struct crypto_larval *larval;
138
139 larval = crypto_larval_alloc(name, type, mask);
140 if (IS_ERR(larval))
141 return ERR_CAST(larval);
142
143 atomic_set(&larval->alg.cra_refcnt, 2);
144
145 down_write(&crypto_alg_sem);
146 alg = __crypto_alg_lookup(name, type, mask);
147 if (!alg) {
148 alg = &larval->alg;
149 list_add(&alg->cra_list, &crypto_alg_list);
150 }
151 up_write(&crypto_alg_sem);
152
153 if (alg != &larval->alg)
154 kfree(larval);
155
156 return alg;
157}
158
159void crypto_larval_kill(struct crypto_alg *alg)
160{
161 struct crypto_larval *larval = (void *)alg;
162
163 down_write(&crypto_alg_sem);
164 list_del(&alg->cra_list);
165 up_write(&crypto_alg_sem);
166 complete_all(&larval->completion);
167 crypto_alg_put(alg);
168}
169EXPORT_SYMBOL_GPL(crypto_larval_kill);
170
171static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
172{
173 struct crypto_larval *larval = (void *)alg;
174 long timeout;
175
176 timeout = wait_for_completion_interruptible_timeout(
177 &larval->completion, 60 * HZ);
178
179 alg = larval->adult;
180 if (timeout < 0)
181 alg = ERR_PTR(-EINTR);
182 else if (!timeout)
183 alg = ERR_PTR(-ETIMEDOUT);
184 else if (!alg)
185 alg = ERR_PTR(-ENOENT);
186 else if (crypto_is_test_larval(larval) &&
187 !(alg->cra_flags & CRYPTO_ALG_TESTED))
188 alg = ERR_PTR(-EAGAIN);
189 else if (!crypto_mod_get(alg))
190 alg = ERR_PTR(-EAGAIN);
191 crypto_mod_put(&larval->alg);
192
193 return alg;
194}
195
196struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, u32 mask)
197{
198 struct crypto_alg *alg;
199
200 down_read(&crypto_alg_sem);
201 alg = __crypto_alg_lookup(name, type, mask);
202 up_read(&crypto_alg_sem);
203
204 return alg;
205}
206EXPORT_SYMBOL_GPL(crypto_alg_lookup);
207
208struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask)
209{
210 struct crypto_alg *alg;
211
212 if (!name)
213 return ERR_PTR(-ENOENT);
214
215 mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
216 type &= mask;
217
218 alg = crypto_alg_lookup(name, type, mask);
219 if (!alg) {
220 request_module("%s", name);
221
222 if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
223 CRYPTO_ALG_NEED_FALLBACK))
224 request_module("%s-all", name);
225
226 alg = crypto_alg_lookup(name, type, mask);
227 }
228
229 if (alg)
230 return crypto_is_larval(alg) ? crypto_larval_wait(alg) : alg;
231
232 return crypto_larval_add(name, type, mask);
233}
234EXPORT_SYMBOL_GPL(crypto_larval_lookup);
235
236int crypto_probing_notify(unsigned long val, void *v)
237{
238 int ok;
239
240 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
241 if (ok == NOTIFY_DONE) {
242 request_module("cryptomgr");
243 ok = blocking_notifier_call_chain(&crypto_chain, val, v);
244 }
245
246 return ok;
247}
248EXPORT_SYMBOL_GPL(crypto_probing_notify);
249
250struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
251{
252 struct crypto_alg *alg;
253 struct crypto_alg *larval;
254 int ok;
255
256 if (!((type | mask) & CRYPTO_ALG_TESTED)) {
257 type |= CRYPTO_ALG_TESTED;
258 mask |= CRYPTO_ALG_TESTED;
259 }
260
261 larval = crypto_larval_lookup(name, type, mask);
262 if (IS_ERR(larval) || !crypto_is_larval(larval))
263 return larval;
264
265 ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
266
267 if (ok == NOTIFY_STOP)
268 alg = crypto_larval_wait(larval);
269 else {
270 crypto_mod_put(larval);
271 alg = ERR_PTR(-ENOENT);
272 }
273 crypto_larval_kill(larval);
274 return alg;
275}
276EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
277
278static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
279{
280 const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
281
282 if (type_obj)
283 return type_obj->init(tfm, type, mask);
284
285 switch (crypto_tfm_alg_type(tfm)) {
286 case CRYPTO_ALG_TYPE_CIPHER:
287 return crypto_init_cipher_ops(tfm);
288
289 case CRYPTO_ALG_TYPE_COMPRESS:
290 return crypto_init_compress_ops(tfm);
291
292 default:
293 break;
294 }
295
296 BUG();
297 return -EINVAL;
298}
299
300static void crypto_exit_ops(struct crypto_tfm *tfm)
301{
302 const struct crypto_type *type = tfm->__crt_alg->cra_type;
303
304 if (type) {
305 if (tfm->exit)
306 tfm->exit(tfm);
307 return;
308 }
309
310 switch (crypto_tfm_alg_type(tfm)) {
311 case CRYPTO_ALG_TYPE_CIPHER:
312 crypto_exit_cipher_ops(tfm);
313 break;
314
315 case CRYPTO_ALG_TYPE_COMPRESS:
316 crypto_exit_compress_ops(tfm);
317 break;
318
319 default:
320 BUG();
321 }
322}
323
324static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
325{
326 const struct crypto_type *type_obj = alg->cra_type;
327 unsigned int len;
328
329 len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
330 if (type_obj)
331 return len + type_obj->ctxsize(alg, type, mask);
332
333 switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
334 default:
335 BUG();
336
337 case CRYPTO_ALG_TYPE_CIPHER:
338 len += crypto_cipher_ctxsize(alg);
339 break;
340
341 case CRYPTO_ALG_TYPE_COMPRESS:
342 len += crypto_compress_ctxsize(alg);
343 break;
344 }
345
346 return len;
347}
348
349void crypto_shoot_alg(struct crypto_alg *alg)
350{
351 down_write(&crypto_alg_sem);
352 alg->cra_flags |= CRYPTO_ALG_DYING;
353 up_write(&crypto_alg_sem);
354}
355EXPORT_SYMBOL_GPL(crypto_shoot_alg);
356
357struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
358 u32 mask)
359{
360 struct crypto_tfm *tfm = NULL;
361 unsigned int tfm_size;
362 int err = -ENOMEM;
363
364 tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
365 tfm = kzalloc(tfm_size, GFP_KERNEL);
366 if (tfm == NULL)
367 goto out_err;
368
369 tfm->__crt_alg = alg;
370
371 err = crypto_init_ops(tfm, type, mask);
372 if (err)
373 goto out_free_tfm;
374
375 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
376 goto cra_init_failed;
377
378 goto out;
379
380cra_init_failed:
381 crypto_exit_ops(tfm);
382out_free_tfm:
383 if (err == -EAGAIN)
384 crypto_shoot_alg(alg);
385 kfree(tfm);
386out_err:
387 tfm = ERR_PTR(err);
388out:
389 return tfm;
390}
391EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
392
393/*
394 * crypto_alloc_base - Locate algorithm and allocate transform
395 * @alg_name: Name of algorithm
396 * @type: Type of algorithm
397 * @mask: Mask for type comparison
398 *
399 * This function should not be used by new algorithm types.
400 * Plesae use crypto_alloc_tfm instead.
401 *
402 * crypto_alloc_base() will first attempt to locate an already loaded
403 * algorithm. If that fails and the kernel supports dynamically loadable
404 * modules, it will then attempt to load a module of the same name or
405 * alias. If that fails it will send a query to any loaded crypto manager
406 * to construct an algorithm on the fly. A refcount is grabbed on the
407 * algorithm which is then associated with the new transform.
408 *
409 * The returned transform is of a non-determinate type. Most people
410 * should use one of the more specific allocation functions such as
411 * crypto_alloc_blkcipher.
412 *
413 * In case of error the return value is an error pointer.
414 */
415struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
416{
417 struct crypto_tfm *tfm;
418 int err;
419
420 for (;;) {
421 struct crypto_alg *alg;
422
423 alg = crypto_alg_mod_lookup(alg_name, type, mask);
424 if (IS_ERR(alg)) {
425 err = PTR_ERR(alg);
426 goto err;
427 }
428
429 tfm = __crypto_alloc_tfm(alg, type, mask);
430 if (!IS_ERR(tfm))
431 return tfm;
432
433 crypto_mod_put(alg);
434 err = PTR_ERR(tfm);
435
436err:
437 if (err != -EAGAIN)
438 break;
439 if (signal_pending(current)) {
440 err = -EINTR;
441 break;
442 }
443 }
444
445 return ERR_PTR(err);
446}
447EXPORT_SYMBOL_GPL(crypto_alloc_base);
448
449void *crypto_create_tfm(struct crypto_alg *alg,
450 const struct crypto_type *frontend)
451{
452 char *mem;
453 struct crypto_tfm *tfm = NULL;
454 unsigned int tfmsize;
455 unsigned int total;
456 int err = -ENOMEM;
457
458 tfmsize = frontend->tfmsize;
459 total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
460
461 mem = kzalloc(total, GFP_KERNEL);
462 if (mem == NULL)
463 goto out_err;
464
465 tfm = (struct crypto_tfm *)(mem + tfmsize);
466 tfm->__crt_alg = alg;
467
468 err = frontend->init_tfm(tfm);
469 if (err)
470 goto out_free_tfm;
471
472 if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
473 goto cra_init_failed;
474
475 goto out;
476
477cra_init_failed:
478 crypto_exit_ops(tfm);
479out_free_tfm:
480 if (err == -EAGAIN)
481 crypto_shoot_alg(alg);
482 kfree(mem);
483out_err:
484 mem = ERR_PTR(err);
485out:
486 return mem;
487}
488EXPORT_SYMBOL_GPL(crypto_create_tfm);
489
490struct crypto_alg *crypto_find_alg(const char *alg_name,
491 const struct crypto_type *frontend,
492 u32 type, u32 mask)
493{
494 struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask) =
495 crypto_alg_mod_lookup;
496
497 if (frontend) {
498 type &= frontend->maskclear;
499 mask &= frontend->maskclear;
500 type |= frontend->type;
501 mask |= frontend->maskset;
502
503 if (frontend->lookup)
504 lookup = frontend->lookup;
505 }
506
507 return lookup(alg_name, type, mask);
508}
509EXPORT_SYMBOL_GPL(crypto_find_alg);
510
511/*
512 * crypto_alloc_tfm - Locate algorithm and allocate transform
513 * @alg_name: Name of algorithm
514 * @frontend: Frontend algorithm type
515 * @type: Type of algorithm
516 * @mask: Mask for type comparison
517 *
518 * crypto_alloc_tfm() will first attempt to locate an already loaded
519 * algorithm. If that fails and the kernel supports dynamically loadable
520 * modules, it will then attempt to load a module of the same name or
521 * alias. If that fails it will send a query to any loaded crypto manager
522 * to construct an algorithm on the fly. A refcount is grabbed on the
523 * algorithm which is then associated with the new transform.
524 *
525 * The returned transform is of a non-determinate type. Most people
526 * should use one of the more specific allocation functions such as
527 * crypto_alloc_blkcipher.
528 *
529 * In case of error the return value is an error pointer.
530 */
531void *crypto_alloc_tfm(const char *alg_name,
532 const struct crypto_type *frontend, u32 type, u32 mask)
533{
534 void *tfm;
535 int err;
536
537 for (;;) {
538 struct crypto_alg *alg;
539
540 alg = crypto_find_alg(alg_name, frontend, type, mask);
541 if (IS_ERR(alg)) {
542 err = PTR_ERR(alg);
543 goto err;
544 }
545
546 tfm = crypto_create_tfm(alg, frontend);
547 if (!IS_ERR(tfm))
548 return tfm;
549
550 crypto_mod_put(alg);
551 err = PTR_ERR(tfm);
552
553err:
554 if (err != -EAGAIN)
555 break;
556 if (signal_pending(current)) {
557 err = -EINTR;
558 break;
559 }
560 }
561
562 return ERR_PTR(err);
563}
564EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
565
566/*
567 * crypto_destroy_tfm - Free crypto transform
568 * @mem: Start of tfm slab
569 * @tfm: Transform to free
570 *
571 * This function frees up the transform and any associated resources,
572 * then drops the refcount on the associated algorithm.
573 */
574void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
575{
576 struct crypto_alg *alg;
577
578 if (unlikely(!mem))
579 return;
580
581 alg = tfm->__crt_alg;
582
583 if (!tfm->exit && alg->cra_exit)
584 alg->cra_exit(tfm);
585 crypto_exit_ops(tfm);
586 crypto_mod_put(alg);
587 kzfree(mem);
588}
589EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
590
591int crypto_has_alg(const char *name, u32 type, u32 mask)
592{
593 int ret = 0;
594 struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
595
596 if (!IS_ERR(alg)) {
597 crypto_mod_put(alg);
598 ret = 1;
599 }
600
601 return ret;
602}
603EXPORT_SYMBOL_GPL(crypto_has_alg);
604
605MODULE_DESCRIPTION("Cryptographic core API");
606MODULE_LICENSE("GPL");