Loading...
1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * Cryptographic API for algorithms (i.e., low-level API).
4 *
5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 */
7
8#include <crypto/algapi.h>
9#include <crypto/internal/simd.h>
10#include <linux/err.h>
11#include <linux/errno.h>
12#include <linux/fips.h>
13#include <linux/init.h>
14#include <linux/kernel.h>
15#include <linux/list.h>
16#include <linux/module.h>
17#include <linux/rtnetlink.h>
18#include <linux/slab.h>
19#include <linux/string.h>
20#include <linux/workqueue.h>
21
22#include "internal.h"
23
24static LIST_HEAD(crypto_template_list);
25
26#ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
27DEFINE_PER_CPU(bool, crypto_simd_disabled_for_test);
28EXPORT_PER_CPU_SYMBOL_GPL(crypto_simd_disabled_for_test);
29#endif
30
31static inline void crypto_check_module_sig(struct module *mod)
32{
33 if (fips_enabled && mod && !module_sig_ok(mod))
34 panic("Module %s signature verification failed in FIPS mode\n",
35 module_name(mod));
36}
37
38static int crypto_check_alg(struct crypto_alg *alg)
39{
40 crypto_check_module_sig(alg->cra_module);
41
42 if (!alg->cra_name[0] || !alg->cra_driver_name[0])
43 return -EINVAL;
44
45 if (alg->cra_alignmask & (alg->cra_alignmask + 1))
46 return -EINVAL;
47
48 /* General maximums for all algs. */
49 if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK)
50 return -EINVAL;
51
52 if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE)
53 return -EINVAL;
54
55 /* Lower maximums for specific alg types. */
56 if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
57 CRYPTO_ALG_TYPE_CIPHER) {
58 if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK)
59 return -EINVAL;
60
61 if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE)
62 return -EINVAL;
63 }
64
65 if (alg->cra_priority < 0)
66 return -EINVAL;
67
68 refcount_set(&alg->cra_refcnt, 1);
69
70 return 0;
71}
72
73static void crypto_free_instance(struct crypto_instance *inst)
74{
75 inst->alg.cra_type->free(inst);
76}
77
78static void crypto_destroy_instance_workfn(struct work_struct *w)
79{
80 struct crypto_instance *inst = container_of(w, struct crypto_instance,
81 free_work);
82 struct crypto_template *tmpl = inst->tmpl;
83
84 crypto_free_instance(inst);
85 crypto_tmpl_put(tmpl);
86}
87
88static void crypto_destroy_instance(struct crypto_alg *alg)
89{
90 struct crypto_instance *inst = container_of(alg,
91 struct crypto_instance,
92 alg);
93
94 INIT_WORK(&inst->free_work, crypto_destroy_instance_workfn);
95 schedule_work(&inst->free_work);
96}
97
98/*
99 * This function adds a spawn to the list secondary_spawns which
100 * will be used at the end of crypto_remove_spawns to unregister
101 * instances, unless the spawn happens to be one that is depended
102 * on by the new algorithm (nalg in crypto_remove_spawns).
103 *
104 * This function is also responsible for resurrecting any algorithms
105 * in the dependency chain of nalg by unsetting n->dead.
106 */
107static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
108 struct list_head *stack,
109 struct list_head *top,
110 struct list_head *secondary_spawns)
111{
112 struct crypto_spawn *spawn, *n;
113
114 spawn = list_first_entry_or_null(stack, struct crypto_spawn, list);
115 if (!spawn)
116 return NULL;
117
118 n = list_prev_entry(spawn, list);
119 list_move(&spawn->list, secondary_spawns);
120
121 if (list_is_last(&n->list, stack))
122 return top;
123
124 n = list_next_entry(n, list);
125 if (!spawn->dead)
126 n->dead = false;
127
128 return &n->inst->alg.cra_users;
129}
130
131static void crypto_remove_instance(struct crypto_instance *inst,
132 struct list_head *list)
133{
134 struct crypto_template *tmpl = inst->tmpl;
135
136 if (crypto_is_dead(&inst->alg))
137 return;
138
139 inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
140
141 if (!tmpl || !crypto_tmpl_get(tmpl))
142 return;
143
144 list_move(&inst->alg.cra_list, list);
145 hlist_del(&inst->list);
146 inst->alg.cra_destroy = crypto_destroy_instance;
147
148 BUG_ON(!list_empty(&inst->alg.cra_users));
149}
150
151/*
152 * Given an algorithm alg, remove all algorithms that depend on it
153 * through spawns. If nalg is not null, then exempt any algorithms
154 * that is depended on by nalg. This is useful when nalg itself
155 * depends on alg.
156 */
157void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
158 struct crypto_alg *nalg)
159{
160 u32 new_type = (nalg ?: alg)->cra_flags;
161 struct crypto_spawn *spawn, *n;
162 LIST_HEAD(secondary_spawns);
163 struct list_head *spawns;
164 LIST_HEAD(stack);
165 LIST_HEAD(top);
166
167 spawns = &alg->cra_users;
168 list_for_each_entry_safe(spawn, n, spawns, list) {
169 if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
170 continue;
171
172 list_move(&spawn->list, &top);
173 }
174
175 /*
176 * Perform a depth-first walk starting from alg through
177 * the cra_users tree. The list stack records the path
178 * from alg to the current spawn.
179 */
180 spawns = ⊤
181 do {
182 while (!list_empty(spawns)) {
183 struct crypto_instance *inst;
184
185 spawn = list_first_entry(spawns, struct crypto_spawn,
186 list);
187 inst = spawn->inst;
188
189 list_move(&spawn->list, &stack);
190 spawn->dead = !spawn->registered || &inst->alg != nalg;
191
192 if (!spawn->registered)
193 break;
194
195 BUG_ON(&inst->alg == alg);
196
197 if (&inst->alg == nalg)
198 break;
199
200 spawns = &inst->alg.cra_users;
201
202 /*
203 * Even if spawn->registered is true, the
204 * instance itself may still be unregistered.
205 * This is because it may have failed during
206 * registration. Therefore we still need to
207 * make the following test.
208 *
209 * We may encounter an unregistered instance here, since
210 * an instance's spawns are set up prior to the instance
211 * being registered. An unregistered instance will have
212 * NULL ->cra_users.next, since ->cra_users isn't
213 * properly initialized until registration. But an
214 * unregistered instance cannot have any users, so treat
215 * it the same as ->cra_users being empty.
216 */
217 if (spawns->next == NULL)
218 break;
219 }
220 } while ((spawns = crypto_more_spawns(alg, &stack, &top,
221 &secondary_spawns)));
222
223 /*
224 * Remove all instances that are marked as dead. Also
225 * complete the resurrection of the others by moving them
226 * back to the cra_users list.
227 */
228 list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
229 if (!spawn->dead)
230 list_move(&spawn->list, &spawn->alg->cra_users);
231 else if (spawn->registered)
232 crypto_remove_instance(spawn->inst, list);
233 }
234}
235EXPORT_SYMBOL_GPL(crypto_remove_spawns);
236
237static void crypto_alg_finish_registration(struct crypto_alg *alg,
238 bool fulfill_requests,
239 struct list_head *algs_to_put)
240{
241 struct crypto_alg *q;
242
243 list_for_each_entry(q, &crypto_alg_list, cra_list) {
244 if (q == alg)
245 continue;
246
247 if (crypto_is_moribund(q))
248 continue;
249
250 if (crypto_is_larval(q)) {
251 struct crypto_larval *larval = (void *)q;
252
253 /*
254 * Check to see if either our generic name or
255 * specific name can satisfy the name requested
256 * by the larval entry q.
257 */
258 if (strcmp(alg->cra_name, q->cra_name) &&
259 strcmp(alg->cra_driver_name, q->cra_name))
260 continue;
261
262 if (larval->adult)
263 continue;
264 if ((q->cra_flags ^ alg->cra_flags) & larval->mask)
265 continue;
266
267 if (fulfill_requests && crypto_mod_get(alg))
268 larval->adult = alg;
269 else
270 larval->adult = ERR_PTR(-EAGAIN);
271
272 continue;
273 }
274
275 if (strcmp(alg->cra_name, q->cra_name))
276 continue;
277
278 if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
279 q->cra_priority > alg->cra_priority)
280 continue;
281
282 crypto_remove_spawns(q, algs_to_put, alg);
283 }
284
285 crypto_notify(CRYPTO_MSG_ALG_LOADED, alg);
286}
287
288static struct crypto_larval *crypto_alloc_test_larval(struct crypto_alg *alg)
289{
290 struct crypto_larval *larval;
291
292 if (!IS_ENABLED(CONFIG_CRYPTO_MANAGER) ||
293 IS_ENABLED(CONFIG_CRYPTO_MANAGER_DISABLE_TESTS) ||
294 (alg->cra_flags & CRYPTO_ALG_INTERNAL))
295 return NULL; /* No self-test needed */
296
297 larval = crypto_larval_alloc(alg->cra_name,
298 alg->cra_flags | CRYPTO_ALG_TESTED, 0);
299 if (IS_ERR(larval))
300 return larval;
301
302 larval->adult = crypto_mod_get(alg);
303 if (!larval->adult) {
304 kfree(larval);
305 return ERR_PTR(-ENOENT);
306 }
307
308 refcount_set(&larval->alg.cra_refcnt, 1);
309 memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
310 CRYPTO_MAX_ALG_NAME);
311 larval->alg.cra_priority = alg->cra_priority;
312
313 return larval;
314}
315
316static struct crypto_larval *
317__crypto_register_alg(struct crypto_alg *alg, struct list_head *algs_to_put)
318{
319 struct crypto_alg *q;
320 struct crypto_larval *larval;
321 int ret = -EAGAIN;
322
323 if (crypto_is_dead(alg))
324 goto err;
325
326 INIT_LIST_HEAD(&alg->cra_users);
327
328 ret = -EEXIST;
329
330 list_for_each_entry(q, &crypto_alg_list, cra_list) {
331 if (q == alg)
332 goto err;
333
334 if (crypto_is_moribund(q))
335 continue;
336
337 if (crypto_is_larval(q)) {
338 if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
339 goto err;
340 continue;
341 }
342
343 if (!strcmp(q->cra_driver_name, alg->cra_name) ||
344 !strcmp(q->cra_driver_name, alg->cra_driver_name) ||
345 !strcmp(q->cra_name, alg->cra_driver_name))
346 goto err;
347 }
348
349 larval = crypto_alloc_test_larval(alg);
350 if (IS_ERR(larval))
351 goto out;
352
353 list_add(&alg->cra_list, &crypto_alg_list);
354
355 if (larval) {
356 /* No cheating! */
357 alg->cra_flags &= ~CRYPTO_ALG_TESTED;
358
359 list_add(&larval->alg.cra_list, &crypto_alg_list);
360 } else {
361 alg->cra_flags |= CRYPTO_ALG_TESTED;
362 crypto_alg_finish_registration(alg, true, algs_to_put);
363 }
364
365out:
366 return larval;
367
368err:
369 larval = ERR_PTR(ret);
370 goto out;
371}
372
373void crypto_alg_tested(const char *name, int err)
374{
375 struct crypto_larval *test;
376 struct crypto_alg *alg;
377 struct crypto_alg *q;
378 LIST_HEAD(list);
379 bool best;
380
381 down_write(&crypto_alg_sem);
382 list_for_each_entry(q, &crypto_alg_list, cra_list) {
383 if (crypto_is_moribund(q) || !crypto_is_larval(q))
384 continue;
385
386 test = (struct crypto_larval *)q;
387
388 if (!strcmp(q->cra_driver_name, name))
389 goto found;
390 }
391
392 pr_err("alg: Unexpected test result for %s: %d\n", name, err);
393 goto unlock;
394
395found:
396 q->cra_flags |= CRYPTO_ALG_DEAD;
397 alg = test->adult;
398
399 if (list_empty(&alg->cra_list))
400 goto complete;
401
402 if (err == -ECANCELED)
403 alg->cra_flags |= CRYPTO_ALG_FIPS_INTERNAL;
404 else if (err)
405 goto complete;
406 else
407 alg->cra_flags &= ~CRYPTO_ALG_FIPS_INTERNAL;
408
409 alg->cra_flags |= CRYPTO_ALG_TESTED;
410
411 /*
412 * If a higher-priority implementation of the same algorithm is
413 * currently being tested, then don't fulfill request larvals.
414 */
415 best = true;
416 list_for_each_entry(q, &crypto_alg_list, cra_list) {
417 if (crypto_is_moribund(q) || !crypto_is_larval(q))
418 continue;
419
420 if (strcmp(alg->cra_name, q->cra_name))
421 continue;
422
423 if (q->cra_priority > alg->cra_priority) {
424 best = false;
425 break;
426 }
427 }
428
429 crypto_alg_finish_registration(alg, best, &list);
430
431complete:
432 complete_all(&test->completion);
433
434unlock:
435 up_write(&crypto_alg_sem);
436
437 crypto_remove_final(&list);
438}
439EXPORT_SYMBOL_GPL(crypto_alg_tested);
440
441void crypto_remove_final(struct list_head *list)
442{
443 struct crypto_alg *alg;
444 struct crypto_alg *n;
445
446 list_for_each_entry_safe(alg, n, list, cra_list) {
447 list_del_init(&alg->cra_list);
448 crypto_alg_put(alg);
449 }
450}
451EXPORT_SYMBOL_GPL(crypto_remove_final);
452
453int crypto_register_alg(struct crypto_alg *alg)
454{
455 struct crypto_larval *larval;
456 LIST_HEAD(algs_to_put);
457 bool test_started = false;
458 int err;
459
460 alg->cra_flags &= ~CRYPTO_ALG_DEAD;
461 err = crypto_check_alg(alg);
462 if (err)
463 return err;
464
465 down_write(&crypto_alg_sem);
466 larval = __crypto_register_alg(alg, &algs_to_put);
467 if (!IS_ERR_OR_NULL(larval)) {
468 test_started = crypto_boot_test_finished();
469 larval->test_started = test_started;
470 }
471 up_write(&crypto_alg_sem);
472
473 if (IS_ERR(larval))
474 return PTR_ERR(larval);
475 if (test_started)
476 crypto_wait_for_test(larval);
477 crypto_remove_final(&algs_to_put);
478 return 0;
479}
480EXPORT_SYMBOL_GPL(crypto_register_alg);
481
482static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
483{
484 if (unlikely(list_empty(&alg->cra_list)))
485 return -ENOENT;
486
487 alg->cra_flags |= CRYPTO_ALG_DEAD;
488
489 list_del_init(&alg->cra_list);
490 crypto_remove_spawns(alg, list, NULL);
491
492 return 0;
493}
494
495void crypto_unregister_alg(struct crypto_alg *alg)
496{
497 int ret;
498 LIST_HEAD(list);
499
500 down_write(&crypto_alg_sem);
501 ret = crypto_remove_alg(alg, &list);
502 up_write(&crypto_alg_sem);
503
504 if (WARN(ret, "Algorithm %s is not registered", alg->cra_driver_name))
505 return;
506
507 if (WARN_ON(refcount_read(&alg->cra_refcnt) != 1))
508 return;
509
510 if (alg->cra_destroy)
511 alg->cra_destroy(alg);
512
513 crypto_remove_final(&list);
514}
515EXPORT_SYMBOL_GPL(crypto_unregister_alg);
516
517int crypto_register_algs(struct crypto_alg *algs, int count)
518{
519 int i, ret;
520
521 for (i = 0; i < count; i++) {
522 ret = crypto_register_alg(&algs[i]);
523 if (ret)
524 goto err;
525 }
526
527 return 0;
528
529err:
530 for (--i; i >= 0; --i)
531 crypto_unregister_alg(&algs[i]);
532
533 return ret;
534}
535EXPORT_SYMBOL_GPL(crypto_register_algs);
536
537void crypto_unregister_algs(struct crypto_alg *algs, int count)
538{
539 int i;
540
541 for (i = 0; i < count; i++)
542 crypto_unregister_alg(&algs[i]);
543}
544EXPORT_SYMBOL_GPL(crypto_unregister_algs);
545
546int crypto_register_template(struct crypto_template *tmpl)
547{
548 struct crypto_template *q;
549 int err = -EEXIST;
550
551 down_write(&crypto_alg_sem);
552
553 crypto_check_module_sig(tmpl->module);
554
555 list_for_each_entry(q, &crypto_template_list, list) {
556 if (q == tmpl)
557 goto out;
558 }
559
560 list_add(&tmpl->list, &crypto_template_list);
561 err = 0;
562out:
563 up_write(&crypto_alg_sem);
564 return err;
565}
566EXPORT_SYMBOL_GPL(crypto_register_template);
567
568int crypto_register_templates(struct crypto_template *tmpls, int count)
569{
570 int i, err;
571
572 for (i = 0; i < count; i++) {
573 err = crypto_register_template(&tmpls[i]);
574 if (err)
575 goto out;
576 }
577 return 0;
578
579out:
580 for (--i; i >= 0; --i)
581 crypto_unregister_template(&tmpls[i]);
582 return err;
583}
584EXPORT_SYMBOL_GPL(crypto_register_templates);
585
586void crypto_unregister_template(struct crypto_template *tmpl)
587{
588 struct crypto_instance *inst;
589 struct hlist_node *n;
590 struct hlist_head *list;
591 LIST_HEAD(users);
592
593 down_write(&crypto_alg_sem);
594
595 BUG_ON(list_empty(&tmpl->list));
596 list_del_init(&tmpl->list);
597
598 list = &tmpl->instances;
599 hlist_for_each_entry(inst, list, list) {
600 int err = crypto_remove_alg(&inst->alg, &users);
601
602 BUG_ON(err);
603 }
604
605 up_write(&crypto_alg_sem);
606
607 hlist_for_each_entry_safe(inst, n, list, list) {
608 BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1);
609 crypto_free_instance(inst);
610 }
611 crypto_remove_final(&users);
612}
613EXPORT_SYMBOL_GPL(crypto_unregister_template);
614
615void crypto_unregister_templates(struct crypto_template *tmpls, int count)
616{
617 int i;
618
619 for (i = count - 1; i >= 0; --i)
620 crypto_unregister_template(&tmpls[i]);
621}
622EXPORT_SYMBOL_GPL(crypto_unregister_templates);
623
624static struct crypto_template *__crypto_lookup_template(const char *name)
625{
626 struct crypto_template *q, *tmpl = NULL;
627
628 down_read(&crypto_alg_sem);
629 list_for_each_entry(q, &crypto_template_list, list) {
630 if (strcmp(q->name, name))
631 continue;
632 if (unlikely(!crypto_tmpl_get(q)))
633 continue;
634
635 tmpl = q;
636 break;
637 }
638 up_read(&crypto_alg_sem);
639
640 return tmpl;
641}
642
643struct crypto_template *crypto_lookup_template(const char *name)
644{
645 return try_then_request_module(__crypto_lookup_template(name),
646 "crypto-%s", name);
647}
648EXPORT_SYMBOL_GPL(crypto_lookup_template);
649
650int crypto_register_instance(struct crypto_template *tmpl,
651 struct crypto_instance *inst)
652{
653 struct crypto_larval *larval;
654 struct crypto_spawn *spawn;
655 u32 fips_internal = 0;
656 LIST_HEAD(algs_to_put);
657 int err;
658
659 err = crypto_check_alg(&inst->alg);
660 if (err)
661 return err;
662
663 inst->alg.cra_module = tmpl->module;
664 inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
665
666 down_write(&crypto_alg_sem);
667
668 larval = ERR_PTR(-EAGAIN);
669 for (spawn = inst->spawns; spawn;) {
670 struct crypto_spawn *next;
671
672 if (spawn->dead)
673 goto unlock;
674
675 next = spawn->next;
676 spawn->inst = inst;
677 spawn->registered = true;
678
679 fips_internal |= spawn->alg->cra_flags;
680
681 crypto_mod_put(spawn->alg);
682
683 spawn = next;
684 }
685
686 inst->alg.cra_flags |= (fips_internal & CRYPTO_ALG_FIPS_INTERNAL);
687
688 larval = __crypto_register_alg(&inst->alg, &algs_to_put);
689 if (IS_ERR(larval))
690 goto unlock;
691 else if (larval)
692 larval->test_started = true;
693
694 hlist_add_head(&inst->list, &tmpl->instances);
695 inst->tmpl = tmpl;
696
697unlock:
698 up_write(&crypto_alg_sem);
699
700 if (IS_ERR(larval))
701 return PTR_ERR(larval);
702 if (larval)
703 crypto_wait_for_test(larval);
704 crypto_remove_final(&algs_to_put);
705 return 0;
706}
707EXPORT_SYMBOL_GPL(crypto_register_instance);
708
709void crypto_unregister_instance(struct crypto_instance *inst)
710{
711 LIST_HEAD(list);
712
713 down_write(&crypto_alg_sem);
714
715 crypto_remove_spawns(&inst->alg, &list, NULL);
716 crypto_remove_instance(inst, &list);
717
718 up_write(&crypto_alg_sem);
719
720 crypto_remove_final(&list);
721}
722EXPORT_SYMBOL_GPL(crypto_unregister_instance);
723
724int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
725 const char *name, u32 type, u32 mask)
726{
727 struct crypto_alg *alg;
728 int err = -EAGAIN;
729
730 if (WARN_ON_ONCE(inst == NULL))
731 return -EINVAL;
732
733 /* Allow the result of crypto_attr_alg_name() to be passed directly */
734 if (IS_ERR(name))
735 return PTR_ERR(name);
736
737 alg = crypto_find_alg(name, spawn->frontend,
738 type | CRYPTO_ALG_FIPS_INTERNAL, mask);
739 if (IS_ERR(alg))
740 return PTR_ERR(alg);
741
742 down_write(&crypto_alg_sem);
743 if (!crypto_is_moribund(alg)) {
744 list_add(&spawn->list, &alg->cra_users);
745 spawn->alg = alg;
746 spawn->mask = mask;
747 spawn->next = inst->spawns;
748 inst->spawns = spawn;
749 inst->alg.cra_flags |=
750 (alg->cra_flags & CRYPTO_ALG_INHERITED_FLAGS);
751 err = 0;
752 }
753 up_write(&crypto_alg_sem);
754 if (err)
755 crypto_mod_put(alg);
756 return err;
757}
758EXPORT_SYMBOL_GPL(crypto_grab_spawn);
759
760void crypto_drop_spawn(struct crypto_spawn *spawn)
761{
762 if (!spawn->alg) /* not yet initialized? */
763 return;
764
765 down_write(&crypto_alg_sem);
766 if (!spawn->dead)
767 list_del(&spawn->list);
768 up_write(&crypto_alg_sem);
769
770 if (!spawn->registered)
771 crypto_mod_put(spawn->alg);
772}
773EXPORT_SYMBOL_GPL(crypto_drop_spawn);
774
775static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
776{
777 struct crypto_alg *alg = ERR_PTR(-EAGAIN);
778 struct crypto_alg *target;
779 bool shoot = false;
780
781 down_read(&crypto_alg_sem);
782 if (!spawn->dead) {
783 alg = spawn->alg;
784 if (!crypto_mod_get(alg)) {
785 target = crypto_alg_get(alg);
786 shoot = true;
787 alg = ERR_PTR(-EAGAIN);
788 }
789 }
790 up_read(&crypto_alg_sem);
791
792 if (shoot) {
793 crypto_shoot_alg(target);
794 crypto_alg_put(target);
795 }
796
797 return alg;
798}
799
800struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
801 u32 mask)
802{
803 struct crypto_alg *alg;
804 struct crypto_tfm *tfm;
805
806 alg = crypto_spawn_alg(spawn);
807 if (IS_ERR(alg))
808 return ERR_CAST(alg);
809
810 tfm = ERR_PTR(-EINVAL);
811 if (unlikely((alg->cra_flags ^ type) & mask))
812 goto out_put_alg;
813
814 tfm = __crypto_alloc_tfm(alg, type, mask);
815 if (IS_ERR(tfm))
816 goto out_put_alg;
817
818 return tfm;
819
820out_put_alg:
821 crypto_mod_put(alg);
822 return tfm;
823}
824EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
825
826void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
827{
828 struct crypto_alg *alg;
829 struct crypto_tfm *tfm;
830
831 alg = crypto_spawn_alg(spawn);
832 if (IS_ERR(alg))
833 return ERR_CAST(alg);
834
835 tfm = crypto_create_tfm(alg, spawn->frontend);
836 if (IS_ERR(tfm))
837 goto out_put_alg;
838
839 return tfm;
840
841out_put_alg:
842 crypto_mod_put(alg);
843 return tfm;
844}
845EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
846
847int crypto_register_notifier(struct notifier_block *nb)
848{
849 return blocking_notifier_chain_register(&crypto_chain, nb);
850}
851EXPORT_SYMBOL_GPL(crypto_register_notifier);
852
853int crypto_unregister_notifier(struct notifier_block *nb)
854{
855 return blocking_notifier_chain_unregister(&crypto_chain, nb);
856}
857EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
858
859struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb)
860{
861 struct rtattr *rta = tb[0];
862 struct crypto_attr_type *algt;
863
864 if (!rta)
865 return ERR_PTR(-ENOENT);
866 if (RTA_PAYLOAD(rta) < sizeof(*algt))
867 return ERR_PTR(-EINVAL);
868 if (rta->rta_type != CRYPTOA_TYPE)
869 return ERR_PTR(-EINVAL);
870
871 algt = RTA_DATA(rta);
872
873 return algt;
874}
875EXPORT_SYMBOL_GPL(crypto_get_attr_type);
876
877/**
878 * crypto_check_attr_type() - check algorithm type and compute inherited mask
879 * @tb: the template parameters
880 * @type: the algorithm type the template would be instantiated as
881 * @mask_ret: (output) the mask that should be passed to crypto_grab_*()
882 * to restrict the flags of any inner algorithms
883 *
884 * Validate that the algorithm type the user requested is compatible with the
885 * one the template would actually be instantiated as. E.g., if the user is
886 * doing crypto_alloc_shash("cbc(aes)", ...), this would return an error because
887 * the "cbc" template creates an "skcipher" algorithm, not an "shash" algorithm.
888 *
889 * Also compute the mask to use to restrict the flags of any inner algorithms.
890 *
891 * Return: 0 on success; -errno on failure
892 */
893int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret)
894{
895 struct crypto_attr_type *algt;
896
897 algt = crypto_get_attr_type(tb);
898 if (IS_ERR(algt))
899 return PTR_ERR(algt);
900
901 if ((algt->type ^ type) & algt->mask)
902 return -EINVAL;
903
904 *mask_ret = crypto_algt_inherited_mask(algt);
905 return 0;
906}
907EXPORT_SYMBOL_GPL(crypto_check_attr_type);
908
909const char *crypto_attr_alg_name(struct rtattr *rta)
910{
911 struct crypto_attr_alg *alga;
912
913 if (!rta)
914 return ERR_PTR(-ENOENT);
915 if (RTA_PAYLOAD(rta) < sizeof(*alga))
916 return ERR_PTR(-EINVAL);
917 if (rta->rta_type != CRYPTOA_ALG)
918 return ERR_PTR(-EINVAL);
919
920 alga = RTA_DATA(rta);
921 alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
922
923 return alga->name;
924}
925EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
926
927int crypto_inst_setname(struct crypto_instance *inst, const char *name,
928 struct crypto_alg *alg)
929{
930 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
931 alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
932 return -ENAMETOOLONG;
933
934 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
935 name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
936 return -ENAMETOOLONG;
937
938 return 0;
939}
940EXPORT_SYMBOL_GPL(crypto_inst_setname);
941
942void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
943{
944 INIT_LIST_HEAD(&queue->list);
945 queue->backlog = &queue->list;
946 queue->qlen = 0;
947 queue->max_qlen = max_qlen;
948}
949EXPORT_SYMBOL_GPL(crypto_init_queue);
950
951int crypto_enqueue_request(struct crypto_queue *queue,
952 struct crypto_async_request *request)
953{
954 int err = -EINPROGRESS;
955
956 if (unlikely(queue->qlen >= queue->max_qlen)) {
957 if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) {
958 err = -ENOSPC;
959 goto out;
960 }
961 err = -EBUSY;
962 if (queue->backlog == &queue->list)
963 queue->backlog = &request->list;
964 }
965
966 queue->qlen++;
967 list_add_tail(&request->list, &queue->list);
968
969out:
970 return err;
971}
972EXPORT_SYMBOL_GPL(crypto_enqueue_request);
973
974void crypto_enqueue_request_head(struct crypto_queue *queue,
975 struct crypto_async_request *request)
976{
977 if (unlikely(queue->qlen >= queue->max_qlen))
978 queue->backlog = queue->backlog->prev;
979
980 queue->qlen++;
981 list_add(&request->list, &queue->list);
982}
983EXPORT_SYMBOL_GPL(crypto_enqueue_request_head);
984
985struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
986{
987 struct list_head *request;
988
989 if (unlikely(!queue->qlen))
990 return NULL;
991
992 queue->qlen--;
993
994 if (queue->backlog != &queue->list)
995 queue->backlog = queue->backlog->next;
996
997 request = queue->list.next;
998 list_del(request);
999
1000 return list_entry(request, struct crypto_async_request, list);
1001}
1002EXPORT_SYMBOL_GPL(crypto_dequeue_request);
1003
1004static inline void crypto_inc_byte(u8 *a, unsigned int size)
1005{
1006 u8 *b = (a + size);
1007 u8 c;
1008
1009 for (; size; size--) {
1010 c = *--b + 1;
1011 *b = c;
1012 if (c)
1013 break;
1014 }
1015}
1016
1017void crypto_inc(u8 *a, unsigned int size)
1018{
1019 __be32 *b = (__be32 *)(a + size);
1020 u32 c;
1021
1022 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
1023 IS_ALIGNED((unsigned long)b, __alignof__(*b)))
1024 for (; size >= 4; size -= 4) {
1025 c = be32_to_cpu(*--b) + 1;
1026 *b = cpu_to_be32(c);
1027 if (likely(c))
1028 return;
1029 }
1030
1031 crypto_inc_byte(a, size);
1032}
1033EXPORT_SYMBOL_GPL(crypto_inc);
1034
1035unsigned int crypto_alg_extsize(struct crypto_alg *alg)
1036{
1037 return alg->cra_ctxsize +
1038 (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
1039}
1040EXPORT_SYMBOL_GPL(crypto_alg_extsize);
1041
1042int crypto_type_has_alg(const char *name, const struct crypto_type *frontend,
1043 u32 type, u32 mask)
1044{
1045 int ret = 0;
1046 struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask);
1047
1048 if (!IS_ERR(alg)) {
1049 crypto_mod_put(alg);
1050 ret = 1;
1051 }
1052
1053 return ret;
1054}
1055EXPORT_SYMBOL_GPL(crypto_type_has_alg);
1056
1057static void __init crypto_start_tests(void)
1058{
1059 if (IS_ENABLED(CONFIG_CRYPTO_MANAGER_DISABLE_TESTS))
1060 return;
1061
1062 for (;;) {
1063 struct crypto_larval *larval = NULL;
1064 struct crypto_alg *q;
1065
1066 down_write(&crypto_alg_sem);
1067
1068 list_for_each_entry(q, &crypto_alg_list, cra_list) {
1069 struct crypto_larval *l;
1070
1071 if (!crypto_is_larval(q))
1072 continue;
1073
1074 l = (void *)q;
1075
1076 if (!crypto_is_test_larval(l))
1077 continue;
1078
1079 if (l->test_started)
1080 continue;
1081
1082 l->test_started = true;
1083 larval = l;
1084 break;
1085 }
1086
1087 up_write(&crypto_alg_sem);
1088
1089 if (!larval)
1090 break;
1091
1092 crypto_wait_for_test(larval);
1093 }
1094
1095 set_crypto_boot_test_finished();
1096}
1097
1098static int __init crypto_algapi_init(void)
1099{
1100 crypto_init_proc();
1101 crypto_start_tests();
1102 return 0;
1103}
1104
1105static void __exit crypto_algapi_exit(void)
1106{
1107 crypto_exit_proc();
1108}
1109
1110/*
1111 * We run this at late_initcall so that all the built-in algorithms
1112 * have had a chance to register themselves first.
1113 */
1114late_initcall(crypto_algapi_init);
1115module_exit(crypto_algapi_exit);
1116
1117MODULE_LICENSE("GPL");
1118MODULE_DESCRIPTION("Cryptographic algorithms API");
1119MODULE_SOFTDEP("pre: cryptomgr");
1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * Cryptographic API for algorithms (i.e., low-level API).
4 *
5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
6 */
7
8#include <crypto/algapi.h>
9#include <linux/err.h>
10#include <linux/errno.h>
11#include <linux/fips.h>
12#include <linux/init.h>
13#include <linux/kernel.h>
14#include <linux/list.h>
15#include <linux/module.h>
16#include <linux/rtnetlink.h>
17#include <linux/slab.h>
18#include <linux/string.h>
19
20#include "internal.h"
21
22static LIST_HEAD(crypto_template_list);
23
24static inline void crypto_check_module_sig(struct module *mod)
25{
26 if (fips_enabled && mod && !module_sig_ok(mod))
27 panic("Module %s signature verification failed in FIPS mode\n",
28 module_name(mod));
29}
30
31static int crypto_check_alg(struct crypto_alg *alg)
32{
33 crypto_check_module_sig(alg->cra_module);
34
35 if (!alg->cra_name[0] || !alg->cra_driver_name[0])
36 return -EINVAL;
37
38 if (alg->cra_alignmask & (alg->cra_alignmask + 1))
39 return -EINVAL;
40
41 /* General maximums for all algs. */
42 if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK)
43 return -EINVAL;
44
45 if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE)
46 return -EINVAL;
47
48 /* Lower maximums for specific alg types. */
49 if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
50 CRYPTO_ALG_TYPE_CIPHER) {
51 if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK)
52 return -EINVAL;
53
54 if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE)
55 return -EINVAL;
56 }
57
58 if (alg->cra_priority < 0)
59 return -EINVAL;
60
61 refcount_set(&alg->cra_refcnt, 1);
62
63 return 0;
64}
65
66static void crypto_free_instance(struct crypto_instance *inst)
67{
68 inst->alg.cra_type->free(inst);
69}
70
71static void crypto_destroy_instance(struct crypto_alg *alg)
72{
73 struct crypto_instance *inst = (void *)alg;
74 struct crypto_template *tmpl = inst->tmpl;
75
76 crypto_free_instance(inst);
77 crypto_tmpl_put(tmpl);
78}
79
80/*
81 * This function adds a spawn to the list secondary_spawns which
82 * will be used at the end of crypto_remove_spawns to unregister
83 * instances, unless the spawn happens to be one that is depended
84 * on by the new algorithm (nalg in crypto_remove_spawns).
85 *
86 * This function is also responsible for resurrecting any algorithms
87 * in the dependency chain of nalg by unsetting n->dead.
88 */
89static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
90 struct list_head *stack,
91 struct list_head *top,
92 struct list_head *secondary_spawns)
93{
94 struct crypto_spawn *spawn, *n;
95
96 spawn = list_first_entry_or_null(stack, struct crypto_spawn, list);
97 if (!spawn)
98 return NULL;
99
100 n = list_prev_entry(spawn, list);
101 list_move(&spawn->list, secondary_spawns);
102
103 if (list_is_last(&n->list, stack))
104 return top;
105
106 n = list_next_entry(n, list);
107 if (!spawn->dead)
108 n->dead = false;
109
110 return &n->inst->alg.cra_users;
111}
112
113static void crypto_remove_instance(struct crypto_instance *inst,
114 struct list_head *list)
115{
116 struct crypto_template *tmpl = inst->tmpl;
117
118 if (crypto_is_dead(&inst->alg))
119 return;
120
121 inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
122
123 if (!tmpl || !crypto_tmpl_get(tmpl))
124 return;
125
126 list_move(&inst->alg.cra_list, list);
127 hlist_del(&inst->list);
128 inst->alg.cra_destroy = crypto_destroy_instance;
129
130 BUG_ON(!list_empty(&inst->alg.cra_users));
131}
132
133/*
134 * Given an algorithm alg, remove all algorithms that depend on it
135 * through spawns. If nalg is not null, then exempt any algorithms
136 * that is depended on by nalg. This is useful when nalg itself
137 * depends on alg.
138 */
139void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
140 struct crypto_alg *nalg)
141{
142 u32 new_type = (nalg ?: alg)->cra_flags;
143 struct crypto_spawn *spawn, *n;
144 LIST_HEAD(secondary_spawns);
145 struct list_head *spawns;
146 LIST_HEAD(stack);
147 LIST_HEAD(top);
148
149 spawns = &alg->cra_users;
150 list_for_each_entry_safe(spawn, n, spawns, list) {
151 if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
152 continue;
153
154 list_move(&spawn->list, &top);
155 }
156
157 /*
158 * Perform a depth-first walk starting from alg through
159 * the cra_users tree. The list stack records the path
160 * from alg to the current spawn.
161 */
162 spawns = ⊤
163 do {
164 while (!list_empty(spawns)) {
165 struct crypto_instance *inst;
166
167 spawn = list_first_entry(spawns, struct crypto_spawn,
168 list);
169 inst = spawn->inst;
170
171 list_move(&spawn->list, &stack);
172 spawn->dead = !spawn->registered || &inst->alg != nalg;
173
174 if (!spawn->registered)
175 break;
176
177 BUG_ON(&inst->alg == alg);
178
179 if (&inst->alg == nalg)
180 break;
181
182 spawns = &inst->alg.cra_users;
183
184 /*
185 * Even if spawn->registered is true, the
186 * instance itself may still be unregistered.
187 * This is because it may have failed during
188 * registration. Therefore we still need to
189 * make the following test.
190 *
191 * We may encounter an unregistered instance here, since
192 * an instance's spawns are set up prior to the instance
193 * being registered. An unregistered instance will have
194 * NULL ->cra_users.next, since ->cra_users isn't
195 * properly initialized until registration. But an
196 * unregistered instance cannot have any users, so treat
197 * it the same as ->cra_users being empty.
198 */
199 if (spawns->next == NULL)
200 break;
201 }
202 } while ((spawns = crypto_more_spawns(alg, &stack, &top,
203 &secondary_spawns)));
204
205 /*
206 * Remove all instances that are marked as dead. Also
207 * complete the resurrection of the others by moving them
208 * back to the cra_users list.
209 */
210 list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
211 if (!spawn->dead)
212 list_move(&spawn->list, &spawn->alg->cra_users);
213 else if (spawn->registered)
214 crypto_remove_instance(spawn->inst, list);
215 }
216}
217EXPORT_SYMBOL_GPL(crypto_remove_spawns);
218
219static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg)
220{
221 struct crypto_alg *q;
222 struct crypto_larval *larval;
223 int ret = -EAGAIN;
224
225 if (crypto_is_dead(alg))
226 goto err;
227
228 INIT_LIST_HEAD(&alg->cra_users);
229
230 /* No cheating! */
231 alg->cra_flags &= ~CRYPTO_ALG_TESTED;
232
233 ret = -EEXIST;
234
235 list_for_each_entry(q, &crypto_alg_list, cra_list) {
236 if (q == alg)
237 goto err;
238
239 if (crypto_is_moribund(q))
240 continue;
241
242 if (crypto_is_larval(q)) {
243 if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
244 goto err;
245 continue;
246 }
247
248 if (!strcmp(q->cra_driver_name, alg->cra_name) ||
249 !strcmp(q->cra_name, alg->cra_driver_name))
250 goto err;
251 }
252
253 larval = crypto_larval_alloc(alg->cra_name,
254 alg->cra_flags | CRYPTO_ALG_TESTED, 0);
255 if (IS_ERR(larval))
256 goto out;
257
258 ret = -ENOENT;
259 larval->adult = crypto_mod_get(alg);
260 if (!larval->adult)
261 goto free_larval;
262
263 refcount_set(&larval->alg.cra_refcnt, 1);
264 memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
265 CRYPTO_MAX_ALG_NAME);
266 larval->alg.cra_priority = alg->cra_priority;
267
268 list_add(&alg->cra_list, &crypto_alg_list);
269 list_add(&larval->alg.cra_list, &crypto_alg_list);
270
271 crypto_stats_init(alg);
272
273out:
274 return larval;
275
276free_larval:
277 kfree(larval);
278err:
279 larval = ERR_PTR(ret);
280 goto out;
281}
282
283void crypto_alg_tested(const char *name, int err)
284{
285 struct crypto_larval *test;
286 struct crypto_alg *alg;
287 struct crypto_alg *q;
288 LIST_HEAD(list);
289 bool best;
290
291 down_write(&crypto_alg_sem);
292 list_for_each_entry(q, &crypto_alg_list, cra_list) {
293 if (crypto_is_moribund(q) || !crypto_is_larval(q))
294 continue;
295
296 test = (struct crypto_larval *)q;
297
298 if (!strcmp(q->cra_driver_name, name))
299 goto found;
300 }
301
302 pr_err("alg: Unexpected test result for %s: %d\n", name, err);
303 goto unlock;
304
305found:
306 q->cra_flags |= CRYPTO_ALG_DEAD;
307 alg = test->adult;
308 if (err || list_empty(&alg->cra_list))
309 goto complete;
310
311 alg->cra_flags |= CRYPTO_ALG_TESTED;
312
313 /* Only satisfy larval waiters if we are the best. */
314 best = true;
315 list_for_each_entry(q, &crypto_alg_list, cra_list) {
316 if (crypto_is_moribund(q) || !crypto_is_larval(q))
317 continue;
318
319 if (strcmp(alg->cra_name, q->cra_name))
320 continue;
321
322 if (q->cra_priority > alg->cra_priority) {
323 best = false;
324 break;
325 }
326 }
327
328 list_for_each_entry(q, &crypto_alg_list, cra_list) {
329 if (q == alg)
330 continue;
331
332 if (crypto_is_moribund(q))
333 continue;
334
335 if (crypto_is_larval(q)) {
336 struct crypto_larval *larval = (void *)q;
337
338 /*
339 * Check to see if either our generic name or
340 * specific name can satisfy the name requested
341 * by the larval entry q.
342 */
343 if (strcmp(alg->cra_name, q->cra_name) &&
344 strcmp(alg->cra_driver_name, q->cra_name))
345 continue;
346
347 if (larval->adult)
348 continue;
349 if ((q->cra_flags ^ alg->cra_flags) & larval->mask)
350 continue;
351
352 if (best && crypto_mod_get(alg))
353 larval->adult = alg;
354 else
355 larval->adult = ERR_PTR(-EAGAIN);
356
357 continue;
358 }
359
360 if (strcmp(alg->cra_name, q->cra_name))
361 continue;
362
363 if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
364 q->cra_priority > alg->cra_priority)
365 continue;
366
367 crypto_remove_spawns(q, &list, alg);
368 }
369
370complete:
371 complete_all(&test->completion);
372
373unlock:
374 up_write(&crypto_alg_sem);
375
376 crypto_remove_final(&list);
377}
378EXPORT_SYMBOL_GPL(crypto_alg_tested);
379
380void crypto_remove_final(struct list_head *list)
381{
382 struct crypto_alg *alg;
383 struct crypto_alg *n;
384
385 list_for_each_entry_safe(alg, n, list, cra_list) {
386 list_del_init(&alg->cra_list);
387 crypto_alg_put(alg);
388 }
389}
390EXPORT_SYMBOL_GPL(crypto_remove_final);
391
392static void crypto_wait_for_test(struct crypto_larval *larval)
393{
394 int err;
395
396 err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
397 if (err != NOTIFY_STOP) {
398 if (WARN_ON(err != NOTIFY_DONE))
399 goto out;
400 crypto_alg_tested(larval->alg.cra_driver_name, 0);
401 }
402
403 err = wait_for_completion_killable(&larval->completion);
404 WARN_ON(err);
405 if (!err)
406 crypto_notify(CRYPTO_MSG_ALG_LOADED, larval);
407
408out:
409 crypto_larval_kill(&larval->alg);
410}
411
412int crypto_register_alg(struct crypto_alg *alg)
413{
414 struct crypto_larval *larval;
415 int err;
416
417 alg->cra_flags &= ~CRYPTO_ALG_DEAD;
418 err = crypto_check_alg(alg);
419 if (err)
420 return err;
421
422 down_write(&crypto_alg_sem);
423 larval = __crypto_register_alg(alg);
424 up_write(&crypto_alg_sem);
425
426 if (IS_ERR(larval))
427 return PTR_ERR(larval);
428
429 crypto_wait_for_test(larval);
430 return 0;
431}
432EXPORT_SYMBOL_GPL(crypto_register_alg);
433
434static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
435{
436 if (unlikely(list_empty(&alg->cra_list)))
437 return -ENOENT;
438
439 alg->cra_flags |= CRYPTO_ALG_DEAD;
440
441 list_del_init(&alg->cra_list);
442 crypto_remove_spawns(alg, list, NULL);
443
444 return 0;
445}
446
447void crypto_unregister_alg(struct crypto_alg *alg)
448{
449 int ret;
450 LIST_HEAD(list);
451
452 down_write(&crypto_alg_sem);
453 ret = crypto_remove_alg(alg, &list);
454 up_write(&crypto_alg_sem);
455
456 if (WARN(ret, "Algorithm %s is not registered", alg->cra_driver_name))
457 return;
458
459 BUG_ON(refcount_read(&alg->cra_refcnt) != 1);
460 if (alg->cra_destroy)
461 alg->cra_destroy(alg);
462
463 crypto_remove_final(&list);
464}
465EXPORT_SYMBOL_GPL(crypto_unregister_alg);
466
467int crypto_register_algs(struct crypto_alg *algs, int count)
468{
469 int i, ret;
470
471 for (i = 0; i < count; i++) {
472 ret = crypto_register_alg(&algs[i]);
473 if (ret)
474 goto err;
475 }
476
477 return 0;
478
479err:
480 for (--i; i >= 0; --i)
481 crypto_unregister_alg(&algs[i]);
482
483 return ret;
484}
485EXPORT_SYMBOL_GPL(crypto_register_algs);
486
487void crypto_unregister_algs(struct crypto_alg *algs, int count)
488{
489 int i;
490
491 for (i = 0; i < count; i++)
492 crypto_unregister_alg(&algs[i]);
493}
494EXPORT_SYMBOL_GPL(crypto_unregister_algs);
495
496int crypto_register_template(struct crypto_template *tmpl)
497{
498 struct crypto_template *q;
499 int err = -EEXIST;
500
501 down_write(&crypto_alg_sem);
502
503 crypto_check_module_sig(tmpl->module);
504
505 list_for_each_entry(q, &crypto_template_list, list) {
506 if (q == tmpl)
507 goto out;
508 }
509
510 list_add(&tmpl->list, &crypto_template_list);
511 err = 0;
512out:
513 up_write(&crypto_alg_sem);
514 return err;
515}
516EXPORT_SYMBOL_GPL(crypto_register_template);
517
518int crypto_register_templates(struct crypto_template *tmpls, int count)
519{
520 int i, err;
521
522 for (i = 0; i < count; i++) {
523 err = crypto_register_template(&tmpls[i]);
524 if (err)
525 goto out;
526 }
527 return 0;
528
529out:
530 for (--i; i >= 0; --i)
531 crypto_unregister_template(&tmpls[i]);
532 return err;
533}
534EXPORT_SYMBOL_GPL(crypto_register_templates);
535
536void crypto_unregister_template(struct crypto_template *tmpl)
537{
538 struct crypto_instance *inst;
539 struct hlist_node *n;
540 struct hlist_head *list;
541 LIST_HEAD(users);
542
543 down_write(&crypto_alg_sem);
544
545 BUG_ON(list_empty(&tmpl->list));
546 list_del_init(&tmpl->list);
547
548 list = &tmpl->instances;
549 hlist_for_each_entry(inst, list, list) {
550 int err = crypto_remove_alg(&inst->alg, &users);
551
552 BUG_ON(err);
553 }
554
555 up_write(&crypto_alg_sem);
556
557 hlist_for_each_entry_safe(inst, n, list, list) {
558 BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1);
559 crypto_free_instance(inst);
560 }
561 crypto_remove_final(&users);
562}
563EXPORT_SYMBOL_GPL(crypto_unregister_template);
564
565void crypto_unregister_templates(struct crypto_template *tmpls, int count)
566{
567 int i;
568
569 for (i = count - 1; i >= 0; --i)
570 crypto_unregister_template(&tmpls[i]);
571}
572EXPORT_SYMBOL_GPL(crypto_unregister_templates);
573
574static struct crypto_template *__crypto_lookup_template(const char *name)
575{
576 struct crypto_template *q, *tmpl = NULL;
577
578 down_read(&crypto_alg_sem);
579 list_for_each_entry(q, &crypto_template_list, list) {
580 if (strcmp(q->name, name))
581 continue;
582 if (unlikely(!crypto_tmpl_get(q)))
583 continue;
584
585 tmpl = q;
586 break;
587 }
588 up_read(&crypto_alg_sem);
589
590 return tmpl;
591}
592
593struct crypto_template *crypto_lookup_template(const char *name)
594{
595 return try_then_request_module(__crypto_lookup_template(name),
596 "crypto-%s", name);
597}
598EXPORT_SYMBOL_GPL(crypto_lookup_template);
599
600int crypto_register_instance(struct crypto_template *tmpl,
601 struct crypto_instance *inst)
602{
603 struct crypto_larval *larval;
604 struct crypto_spawn *spawn;
605 int err;
606
607 err = crypto_check_alg(&inst->alg);
608 if (err)
609 return err;
610
611 inst->alg.cra_module = tmpl->module;
612 inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
613
614 down_write(&crypto_alg_sem);
615
616 larval = ERR_PTR(-EAGAIN);
617 for (spawn = inst->spawns; spawn;) {
618 struct crypto_spawn *next;
619
620 if (spawn->dead)
621 goto unlock;
622
623 next = spawn->next;
624 spawn->inst = inst;
625 spawn->registered = true;
626
627 crypto_mod_put(spawn->alg);
628
629 spawn = next;
630 }
631
632 larval = __crypto_register_alg(&inst->alg);
633 if (IS_ERR(larval))
634 goto unlock;
635
636 hlist_add_head(&inst->list, &tmpl->instances);
637 inst->tmpl = tmpl;
638
639unlock:
640 up_write(&crypto_alg_sem);
641
642 err = PTR_ERR(larval);
643 if (IS_ERR(larval))
644 goto err;
645
646 crypto_wait_for_test(larval);
647 err = 0;
648
649err:
650 return err;
651}
652EXPORT_SYMBOL_GPL(crypto_register_instance);
653
654void crypto_unregister_instance(struct crypto_instance *inst)
655{
656 LIST_HEAD(list);
657
658 down_write(&crypto_alg_sem);
659
660 crypto_remove_spawns(&inst->alg, &list, NULL);
661 crypto_remove_instance(inst, &list);
662
663 up_write(&crypto_alg_sem);
664
665 crypto_remove_final(&list);
666}
667EXPORT_SYMBOL_GPL(crypto_unregister_instance);
668
669int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
670 const char *name, u32 type, u32 mask)
671{
672 struct crypto_alg *alg;
673 int err = -EAGAIN;
674
675 if (WARN_ON_ONCE(inst == NULL))
676 return -EINVAL;
677
678 /* Allow the result of crypto_attr_alg_name() to be passed directly */
679 if (IS_ERR(name))
680 return PTR_ERR(name);
681
682 alg = crypto_find_alg(name, spawn->frontend, type, mask);
683 if (IS_ERR(alg))
684 return PTR_ERR(alg);
685
686 down_write(&crypto_alg_sem);
687 if (!crypto_is_moribund(alg)) {
688 list_add(&spawn->list, &alg->cra_users);
689 spawn->alg = alg;
690 spawn->mask = mask;
691 spawn->next = inst->spawns;
692 inst->spawns = spawn;
693 inst->alg.cra_flags |=
694 (alg->cra_flags & CRYPTO_ALG_INHERITED_FLAGS);
695 err = 0;
696 }
697 up_write(&crypto_alg_sem);
698 if (err)
699 crypto_mod_put(alg);
700 return err;
701}
702EXPORT_SYMBOL_GPL(crypto_grab_spawn);
703
704void crypto_drop_spawn(struct crypto_spawn *spawn)
705{
706 if (!spawn->alg) /* not yet initialized? */
707 return;
708
709 down_write(&crypto_alg_sem);
710 if (!spawn->dead)
711 list_del(&spawn->list);
712 up_write(&crypto_alg_sem);
713
714 if (!spawn->registered)
715 crypto_mod_put(spawn->alg);
716}
717EXPORT_SYMBOL_GPL(crypto_drop_spawn);
718
719static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
720{
721 struct crypto_alg *alg = ERR_PTR(-EAGAIN);
722 struct crypto_alg *target;
723 bool shoot = false;
724
725 down_read(&crypto_alg_sem);
726 if (!spawn->dead) {
727 alg = spawn->alg;
728 if (!crypto_mod_get(alg)) {
729 target = crypto_alg_get(alg);
730 shoot = true;
731 alg = ERR_PTR(-EAGAIN);
732 }
733 }
734 up_read(&crypto_alg_sem);
735
736 if (shoot) {
737 crypto_shoot_alg(target);
738 crypto_alg_put(target);
739 }
740
741 return alg;
742}
743
744struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
745 u32 mask)
746{
747 struct crypto_alg *alg;
748 struct crypto_tfm *tfm;
749
750 alg = crypto_spawn_alg(spawn);
751 if (IS_ERR(alg))
752 return ERR_CAST(alg);
753
754 tfm = ERR_PTR(-EINVAL);
755 if (unlikely((alg->cra_flags ^ type) & mask))
756 goto out_put_alg;
757
758 tfm = __crypto_alloc_tfm(alg, type, mask);
759 if (IS_ERR(tfm))
760 goto out_put_alg;
761
762 return tfm;
763
764out_put_alg:
765 crypto_mod_put(alg);
766 return tfm;
767}
768EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
769
770void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
771{
772 struct crypto_alg *alg;
773 struct crypto_tfm *tfm;
774
775 alg = crypto_spawn_alg(spawn);
776 if (IS_ERR(alg))
777 return ERR_CAST(alg);
778
779 tfm = crypto_create_tfm(alg, spawn->frontend);
780 if (IS_ERR(tfm))
781 goto out_put_alg;
782
783 return tfm;
784
785out_put_alg:
786 crypto_mod_put(alg);
787 return tfm;
788}
789EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
790
791int crypto_register_notifier(struct notifier_block *nb)
792{
793 return blocking_notifier_chain_register(&crypto_chain, nb);
794}
795EXPORT_SYMBOL_GPL(crypto_register_notifier);
796
797int crypto_unregister_notifier(struct notifier_block *nb)
798{
799 return blocking_notifier_chain_unregister(&crypto_chain, nb);
800}
801EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
802
803struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb)
804{
805 struct rtattr *rta = tb[0];
806 struct crypto_attr_type *algt;
807
808 if (!rta)
809 return ERR_PTR(-ENOENT);
810 if (RTA_PAYLOAD(rta) < sizeof(*algt))
811 return ERR_PTR(-EINVAL);
812 if (rta->rta_type != CRYPTOA_TYPE)
813 return ERR_PTR(-EINVAL);
814
815 algt = RTA_DATA(rta);
816
817 return algt;
818}
819EXPORT_SYMBOL_GPL(crypto_get_attr_type);
820
821/**
822 * crypto_check_attr_type() - check algorithm type and compute inherited mask
823 * @tb: the template parameters
824 * @type: the algorithm type the template would be instantiated as
825 * @mask_ret: (output) the mask that should be passed to crypto_grab_*()
826 * to restrict the flags of any inner algorithms
827 *
828 * Validate that the algorithm type the user requested is compatible with the
829 * one the template would actually be instantiated as. E.g., if the user is
830 * doing crypto_alloc_shash("cbc(aes)", ...), this would return an error because
831 * the "cbc" template creates an "skcipher" algorithm, not an "shash" algorithm.
832 *
833 * Also compute the mask to use to restrict the flags of any inner algorithms.
834 *
835 * Return: 0 on success; -errno on failure
836 */
837int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret)
838{
839 struct crypto_attr_type *algt;
840
841 algt = crypto_get_attr_type(tb);
842 if (IS_ERR(algt))
843 return PTR_ERR(algt);
844
845 if ((algt->type ^ type) & algt->mask)
846 return -EINVAL;
847
848 *mask_ret = crypto_algt_inherited_mask(algt);
849 return 0;
850}
851EXPORT_SYMBOL_GPL(crypto_check_attr_type);
852
853const char *crypto_attr_alg_name(struct rtattr *rta)
854{
855 struct crypto_attr_alg *alga;
856
857 if (!rta)
858 return ERR_PTR(-ENOENT);
859 if (RTA_PAYLOAD(rta) < sizeof(*alga))
860 return ERR_PTR(-EINVAL);
861 if (rta->rta_type != CRYPTOA_ALG)
862 return ERR_PTR(-EINVAL);
863
864 alga = RTA_DATA(rta);
865 alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
866
867 return alga->name;
868}
869EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
870
871int crypto_inst_setname(struct crypto_instance *inst, const char *name,
872 struct crypto_alg *alg)
873{
874 if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
875 alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
876 return -ENAMETOOLONG;
877
878 if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
879 name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
880 return -ENAMETOOLONG;
881
882 return 0;
883}
884EXPORT_SYMBOL_GPL(crypto_inst_setname);
885
886void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
887{
888 INIT_LIST_HEAD(&queue->list);
889 queue->backlog = &queue->list;
890 queue->qlen = 0;
891 queue->max_qlen = max_qlen;
892}
893EXPORT_SYMBOL_GPL(crypto_init_queue);
894
895int crypto_enqueue_request(struct crypto_queue *queue,
896 struct crypto_async_request *request)
897{
898 int err = -EINPROGRESS;
899
900 if (unlikely(queue->qlen >= queue->max_qlen)) {
901 if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) {
902 err = -ENOSPC;
903 goto out;
904 }
905 err = -EBUSY;
906 if (queue->backlog == &queue->list)
907 queue->backlog = &request->list;
908 }
909
910 queue->qlen++;
911 list_add_tail(&request->list, &queue->list);
912
913out:
914 return err;
915}
916EXPORT_SYMBOL_GPL(crypto_enqueue_request);
917
918void crypto_enqueue_request_head(struct crypto_queue *queue,
919 struct crypto_async_request *request)
920{
921 queue->qlen++;
922 list_add(&request->list, &queue->list);
923}
924EXPORT_SYMBOL_GPL(crypto_enqueue_request_head);
925
926struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
927{
928 struct list_head *request;
929
930 if (unlikely(!queue->qlen))
931 return NULL;
932
933 queue->qlen--;
934
935 if (queue->backlog != &queue->list)
936 queue->backlog = queue->backlog->next;
937
938 request = queue->list.next;
939 list_del(request);
940
941 return list_entry(request, struct crypto_async_request, list);
942}
943EXPORT_SYMBOL_GPL(crypto_dequeue_request);
944
945static inline void crypto_inc_byte(u8 *a, unsigned int size)
946{
947 u8 *b = (a + size);
948 u8 c;
949
950 for (; size; size--) {
951 c = *--b + 1;
952 *b = c;
953 if (c)
954 break;
955 }
956}
957
958void crypto_inc(u8 *a, unsigned int size)
959{
960 __be32 *b = (__be32 *)(a + size);
961 u32 c;
962
963 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
964 IS_ALIGNED((unsigned long)b, __alignof__(*b)))
965 for (; size >= 4; size -= 4) {
966 c = be32_to_cpu(*--b) + 1;
967 *b = cpu_to_be32(c);
968 if (likely(c))
969 return;
970 }
971
972 crypto_inc_byte(a, size);
973}
974EXPORT_SYMBOL_GPL(crypto_inc);
975
976void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int len)
977{
978 int relalign = 0;
979
980 if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) {
981 int size = sizeof(unsigned long);
982 int d = (((unsigned long)dst ^ (unsigned long)src1) |
983 ((unsigned long)dst ^ (unsigned long)src2)) &
984 (size - 1);
985
986 relalign = d ? 1 << __ffs(d) : size;
987
988 /*
989 * If we care about alignment, process as many bytes as
990 * needed to advance dst and src to values whose alignments
991 * equal their relative alignment. This will allow us to
992 * process the remainder of the input using optimal strides.
993 */
994 while (((unsigned long)dst & (relalign - 1)) && len > 0) {
995 *dst++ = *src1++ ^ *src2++;
996 len--;
997 }
998 }
999
1000 while (IS_ENABLED(CONFIG_64BIT) && len >= 8 && !(relalign & 7)) {
1001 *(u64 *)dst = *(u64 *)src1 ^ *(u64 *)src2;
1002 dst += 8;
1003 src1 += 8;
1004 src2 += 8;
1005 len -= 8;
1006 }
1007
1008 while (len >= 4 && !(relalign & 3)) {
1009 *(u32 *)dst = *(u32 *)src1 ^ *(u32 *)src2;
1010 dst += 4;
1011 src1 += 4;
1012 src2 += 4;
1013 len -= 4;
1014 }
1015
1016 while (len >= 2 && !(relalign & 1)) {
1017 *(u16 *)dst = *(u16 *)src1 ^ *(u16 *)src2;
1018 dst += 2;
1019 src1 += 2;
1020 src2 += 2;
1021 len -= 2;
1022 }
1023
1024 while (len--)
1025 *dst++ = *src1++ ^ *src2++;
1026}
1027EXPORT_SYMBOL_GPL(__crypto_xor);
1028
1029unsigned int crypto_alg_extsize(struct crypto_alg *alg)
1030{
1031 return alg->cra_ctxsize +
1032 (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
1033}
1034EXPORT_SYMBOL_GPL(crypto_alg_extsize);
1035
1036int crypto_type_has_alg(const char *name, const struct crypto_type *frontend,
1037 u32 type, u32 mask)
1038{
1039 int ret = 0;
1040 struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask);
1041
1042 if (!IS_ERR(alg)) {
1043 crypto_mod_put(alg);
1044 ret = 1;
1045 }
1046
1047 return ret;
1048}
1049EXPORT_SYMBOL_GPL(crypto_type_has_alg);
1050
1051#ifdef CONFIG_CRYPTO_STATS
1052void crypto_stats_init(struct crypto_alg *alg)
1053{
1054 memset(&alg->stats, 0, sizeof(alg->stats));
1055}
1056EXPORT_SYMBOL_GPL(crypto_stats_init);
1057
1058void crypto_stats_get(struct crypto_alg *alg)
1059{
1060 crypto_alg_get(alg);
1061}
1062EXPORT_SYMBOL_GPL(crypto_stats_get);
1063
1064void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg,
1065 int ret)
1066{
1067 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1068 atomic64_inc(&alg->stats.aead.err_cnt);
1069 } else {
1070 atomic64_inc(&alg->stats.aead.encrypt_cnt);
1071 atomic64_add(cryptlen, &alg->stats.aead.encrypt_tlen);
1072 }
1073 crypto_alg_put(alg);
1074}
1075EXPORT_SYMBOL_GPL(crypto_stats_aead_encrypt);
1076
1077void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg,
1078 int ret)
1079{
1080 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1081 atomic64_inc(&alg->stats.aead.err_cnt);
1082 } else {
1083 atomic64_inc(&alg->stats.aead.decrypt_cnt);
1084 atomic64_add(cryptlen, &alg->stats.aead.decrypt_tlen);
1085 }
1086 crypto_alg_put(alg);
1087}
1088EXPORT_SYMBOL_GPL(crypto_stats_aead_decrypt);
1089
1090void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret,
1091 struct crypto_alg *alg)
1092{
1093 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1094 atomic64_inc(&alg->stats.akcipher.err_cnt);
1095 } else {
1096 atomic64_inc(&alg->stats.akcipher.encrypt_cnt);
1097 atomic64_add(src_len, &alg->stats.akcipher.encrypt_tlen);
1098 }
1099 crypto_alg_put(alg);
1100}
1101EXPORT_SYMBOL_GPL(crypto_stats_akcipher_encrypt);
1102
1103void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret,
1104 struct crypto_alg *alg)
1105{
1106 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1107 atomic64_inc(&alg->stats.akcipher.err_cnt);
1108 } else {
1109 atomic64_inc(&alg->stats.akcipher.decrypt_cnt);
1110 atomic64_add(src_len, &alg->stats.akcipher.decrypt_tlen);
1111 }
1112 crypto_alg_put(alg);
1113}
1114EXPORT_SYMBOL_GPL(crypto_stats_akcipher_decrypt);
1115
1116void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg)
1117{
1118 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1119 atomic64_inc(&alg->stats.akcipher.err_cnt);
1120 else
1121 atomic64_inc(&alg->stats.akcipher.sign_cnt);
1122 crypto_alg_put(alg);
1123}
1124EXPORT_SYMBOL_GPL(crypto_stats_akcipher_sign);
1125
1126void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg)
1127{
1128 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1129 atomic64_inc(&alg->stats.akcipher.err_cnt);
1130 else
1131 atomic64_inc(&alg->stats.akcipher.verify_cnt);
1132 crypto_alg_put(alg);
1133}
1134EXPORT_SYMBOL_GPL(crypto_stats_akcipher_verify);
1135
1136void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg)
1137{
1138 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1139 atomic64_inc(&alg->stats.compress.err_cnt);
1140 } else {
1141 atomic64_inc(&alg->stats.compress.compress_cnt);
1142 atomic64_add(slen, &alg->stats.compress.compress_tlen);
1143 }
1144 crypto_alg_put(alg);
1145}
1146EXPORT_SYMBOL_GPL(crypto_stats_compress);
1147
1148void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg)
1149{
1150 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1151 atomic64_inc(&alg->stats.compress.err_cnt);
1152 } else {
1153 atomic64_inc(&alg->stats.compress.decompress_cnt);
1154 atomic64_add(slen, &alg->stats.compress.decompress_tlen);
1155 }
1156 crypto_alg_put(alg);
1157}
1158EXPORT_SYMBOL_GPL(crypto_stats_decompress);
1159
1160void crypto_stats_ahash_update(unsigned int nbytes, int ret,
1161 struct crypto_alg *alg)
1162{
1163 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1164 atomic64_inc(&alg->stats.hash.err_cnt);
1165 else
1166 atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1167 crypto_alg_put(alg);
1168}
1169EXPORT_SYMBOL_GPL(crypto_stats_ahash_update);
1170
1171void crypto_stats_ahash_final(unsigned int nbytes, int ret,
1172 struct crypto_alg *alg)
1173{
1174 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1175 atomic64_inc(&alg->stats.hash.err_cnt);
1176 } else {
1177 atomic64_inc(&alg->stats.hash.hash_cnt);
1178 atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1179 }
1180 crypto_alg_put(alg);
1181}
1182EXPORT_SYMBOL_GPL(crypto_stats_ahash_final);
1183
1184void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret)
1185{
1186 if (ret)
1187 atomic64_inc(&alg->stats.kpp.err_cnt);
1188 else
1189 atomic64_inc(&alg->stats.kpp.setsecret_cnt);
1190 crypto_alg_put(alg);
1191}
1192EXPORT_SYMBOL_GPL(crypto_stats_kpp_set_secret);
1193
1194void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret)
1195{
1196 if (ret)
1197 atomic64_inc(&alg->stats.kpp.err_cnt);
1198 else
1199 atomic64_inc(&alg->stats.kpp.generate_public_key_cnt);
1200 crypto_alg_put(alg);
1201}
1202EXPORT_SYMBOL_GPL(crypto_stats_kpp_generate_public_key);
1203
1204void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret)
1205{
1206 if (ret)
1207 atomic64_inc(&alg->stats.kpp.err_cnt);
1208 else
1209 atomic64_inc(&alg->stats.kpp.compute_shared_secret_cnt);
1210 crypto_alg_put(alg);
1211}
1212EXPORT_SYMBOL_GPL(crypto_stats_kpp_compute_shared_secret);
1213
1214void crypto_stats_rng_seed(struct crypto_alg *alg, int ret)
1215{
1216 if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1217 atomic64_inc(&alg->stats.rng.err_cnt);
1218 else
1219 atomic64_inc(&alg->stats.rng.seed_cnt);
1220 crypto_alg_put(alg);
1221}
1222EXPORT_SYMBOL_GPL(crypto_stats_rng_seed);
1223
1224void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen,
1225 int ret)
1226{
1227 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1228 atomic64_inc(&alg->stats.rng.err_cnt);
1229 } else {
1230 atomic64_inc(&alg->stats.rng.generate_cnt);
1231 atomic64_add(dlen, &alg->stats.rng.generate_tlen);
1232 }
1233 crypto_alg_put(alg);
1234}
1235EXPORT_SYMBOL_GPL(crypto_stats_rng_generate);
1236
1237void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret,
1238 struct crypto_alg *alg)
1239{
1240 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1241 atomic64_inc(&alg->stats.cipher.err_cnt);
1242 } else {
1243 atomic64_inc(&alg->stats.cipher.encrypt_cnt);
1244 atomic64_add(cryptlen, &alg->stats.cipher.encrypt_tlen);
1245 }
1246 crypto_alg_put(alg);
1247}
1248EXPORT_SYMBOL_GPL(crypto_stats_skcipher_encrypt);
1249
1250void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret,
1251 struct crypto_alg *alg)
1252{
1253 if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1254 atomic64_inc(&alg->stats.cipher.err_cnt);
1255 } else {
1256 atomic64_inc(&alg->stats.cipher.decrypt_cnt);
1257 atomic64_add(cryptlen, &alg->stats.cipher.decrypt_tlen);
1258 }
1259 crypto_alg_put(alg);
1260}
1261EXPORT_SYMBOL_GPL(crypto_stats_skcipher_decrypt);
1262#endif
1263
1264static int __init crypto_algapi_init(void)
1265{
1266 crypto_init_proc();
1267 return 0;
1268}
1269
1270static void __exit crypto_algapi_exit(void)
1271{
1272 crypto_exit_proc();
1273}
1274
1275module_init(crypto_algapi_init);
1276module_exit(crypto_algapi_exit);
1277
1278MODULE_LICENSE("GPL");
1279MODULE_DESCRIPTION("Cryptographic algorithms API");