Linux Audio

Check our new training course

Linux debugging, profiling, tracing and performance analysis training

Mar 24-27, 2025, special US time zones
Register
Loading...
v3.15
  1/*
  2 * Cryptographic API for algorithms (i.e., low-level API).
  3 *
  4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
  5 *
  6 * This program is free software; you can redistribute it and/or modify it
  7 * under the terms of the GNU General Public License as published by the Free
  8 * Software Foundation; either version 2 of the License, or (at your option)
  9 * any later version.
 10 *
 11 */
 12
 13#include <linux/err.h>
 14#include <linux/errno.h>
 15#include <linux/init.h>
 16#include <linux/kernel.h>
 17#include <linux/list.h>
 18#include <linux/module.h>
 19#include <linux/rtnetlink.h>
 20#include <linux/slab.h>
 21#include <linux/string.h>
 22
 23#include "internal.h"
 24
 25static LIST_HEAD(crypto_template_list);
 26
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 27static inline int crypto_set_driver_name(struct crypto_alg *alg)
 28{
 29	static const char suffix[] = "-generic";
 30	char *driver_name = alg->cra_driver_name;
 31	int len;
 32
 33	if (*driver_name)
 34		return 0;
 35
 36	len = strlcpy(driver_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
 37	if (len + sizeof(suffix) > CRYPTO_MAX_ALG_NAME)
 38		return -ENAMETOOLONG;
 39
 40	memcpy(driver_name + len, suffix, sizeof(suffix));
 41	return 0;
 42}
 43
 44static int crypto_check_alg(struct crypto_alg *alg)
 45{
 46	if (alg->cra_alignmask & (alg->cra_alignmask + 1))
 47		return -EINVAL;
 48
 49	if (alg->cra_blocksize > PAGE_SIZE / 8)
 50		return -EINVAL;
 51
 52	if (alg->cra_priority < 0)
 53		return -EINVAL;
 54
 55	return crypto_set_driver_name(alg);
 56}
 57
 58static void crypto_destroy_instance(struct crypto_alg *alg)
 59{
 60	struct crypto_instance *inst = (void *)alg;
 61	struct crypto_template *tmpl = inst->tmpl;
 62
 63	tmpl->free(inst);
 64	crypto_tmpl_put(tmpl);
 65}
 66
 67static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
 68					    struct list_head *stack,
 69					    struct list_head *top,
 70					    struct list_head *secondary_spawns)
 71{
 72	struct crypto_spawn *spawn, *n;
 73
 74	if (list_empty(stack))
 75		return NULL;
 76
 77	spawn = list_first_entry(stack, struct crypto_spawn, list);
 78	n = list_entry(spawn->list.next, struct crypto_spawn, list);
 79
 80	if (spawn->alg && &n->list != stack && !n->alg)
 81		n->alg = (n->list.next == stack) ? alg :
 82			 &list_entry(n->list.next, struct crypto_spawn,
 83				     list)->inst->alg;
 84
 85	list_move(&spawn->list, secondary_spawns);
 86
 87	return &n->list == stack ? top : &n->inst->alg.cra_users;
 88}
 89
 90static void crypto_remove_spawn(struct crypto_spawn *spawn,
 91				struct list_head *list)
 92{
 93	struct crypto_instance *inst = spawn->inst;
 94	struct crypto_template *tmpl = inst->tmpl;
 95
 96	if (crypto_is_dead(&inst->alg))
 97		return;
 98
 99	inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
100	if (hlist_unhashed(&inst->list))
101		return;
102
103	if (!tmpl || !crypto_tmpl_get(tmpl))
104		return;
105
106	crypto_notify(CRYPTO_MSG_ALG_UNREGISTER, &inst->alg);
107	list_move(&inst->alg.cra_list, list);
108	hlist_del(&inst->list);
109	inst->alg.cra_destroy = crypto_destroy_instance;
110
111	BUG_ON(!list_empty(&inst->alg.cra_users));
112}
113
114void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
115			  struct crypto_alg *nalg)
116{
117	u32 new_type = (nalg ?: alg)->cra_flags;
118	struct crypto_spawn *spawn, *n;
119	LIST_HEAD(secondary_spawns);
120	struct list_head *spawns;
121	LIST_HEAD(stack);
122	LIST_HEAD(top);
123
124	spawns = &alg->cra_users;
125	list_for_each_entry_safe(spawn, n, spawns, list) {
126		if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
127			continue;
128
129		list_move(&spawn->list, &top);
130	}
131
132	spawns = &top;
133	do {
134		while (!list_empty(spawns)) {
135			struct crypto_instance *inst;
136
137			spawn = list_first_entry(spawns, struct crypto_spawn,
138						 list);
139			inst = spawn->inst;
140
141			BUG_ON(&inst->alg == alg);
142
143			list_move(&spawn->list, &stack);
144
145			if (&inst->alg == nalg)
146				break;
147
148			spawn->alg = NULL;
149			spawns = &inst->alg.cra_users;
150		}
151	} while ((spawns = crypto_more_spawns(alg, &stack, &top,
152					      &secondary_spawns)));
153
154	list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
155		if (spawn->alg)
156			list_move(&spawn->list, &spawn->alg->cra_users);
157		else
158			crypto_remove_spawn(spawn, list);
159	}
160}
161EXPORT_SYMBOL_GPL(crypto_remove_spawns);
162
163static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg)
164{
165	struct crypto_alg *q;
166	struct crypto_larval *larval;
167	int ret = -EAGAIN;
168
169	if (crypto_is_dead(alg))
170		goto err;
171
172	INIT_LIST_HEAD(&alg->cra_users);
173
174	/* No cheating! */
175	alg->cra_flags &= ~CRYPTO_ALG_TESTED;
176
177	ret = -EEXIST;
178
179	atomic_set(&alg->cra_refcnt, 1);
180	list_for_each_entry(q, &crypto_alg_list, cra_list) {
181		if (q == alg)
182			goto err;
183
184		if (crypto_is_moribund(q))
185			continue;
186
187		if (crypto_is_larval(q)) {
188			if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
189				goto err;
190			continue;
191		}
192
193		if (!strcmp(q->cra_driver_name, alg->cra_name) ||
194		    !strcmp(q->cra_name, alg->cra_driver_name))
195			goto err;
196	}
197
198	larval = crypto_larval_alloc(alg->cra_name,
199				     alg->cra_flags | CRYPTO_ALG_TESTED, 0);
200	if (IS_ERR(larval))
201		goto out;
202
203	ret = -ENOENT;
204	larval->adult = crypto_mod_get(alg);
205	if (!larval->adult)
206		goto free_larval;
207
208	atomic_set(&larval->alg.cra_refcnt, 1);
209	memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
210	       CRYPTO_MAX_ALG_NAME);
211	larval->alg.cra_priority = alg->cra_priority;
212
213	list_add(&alg->cra_list, &crypto_alg_list);
214	list_add(&larval->alg.cra_list, &crypto_alg_list);
215
216out:
217	return larval;
218
219free_larval:
220	kfree(larval);
221err:
222	larval = ERR_PTR(ret);
223	goto out;
224}
225
226void crypto_alg_tested(const char *name, int err)
227{
228	struct crypto_larval *test;
229	struct crypto_alg *alg;
230	struct crypto_alg *q;
231	LIST_HEAD(list);
232
233	down_write(&crypto_alg_sem);
234	list_for_each_entry(q, &crypto_alg_list, cra_list) {
235		if (crypto_is_moribund(q) || !crypto_is_larval(q))
236			continue;
237
238		test = (struct crypto_larval *)q;
239
240		if (!strcmp(q->cra_driver_name, name))
241			goto found;
242	}
243
244	printk(KERN_ERR "alg: Unexpected test result for %s: %d\n", name, err);
245	goto unlock;
246
247found:
248	q->cra_flags |= CRYPTO_ALG_DEAD;
249	alg = test->adult;
250	if (err || list_empty(&alg->cra_list))
251		goto complete;
252
253	alg->cra_flags |= CRYPTO_ALG_TESTED;
254
255	list_for_each_entry(q, &crypto_alg_list, cra_list) {
256		if (q == alg)
257			continue;
258
259		if (crypto_is_moribund(q))
260			continue;
261
262		if (crypto_is_larval(q)) {
263			struct crypto_larval *larval = (void *)q;
264
265			/*
266			 * Check to see if either our generic name or
267			 * specific name can satisfy the name requested
268			 * by the larval entry q.
269			 */
270			if (strcmp(alg->cra_name, q->cra_name) &&
271			    strcmp(alg->cra_driver_name, q->cra_name))
272				continue;
273
274			if (larval->adult)
275				continue;
276			if ((q->cra_flags ^ alg->cra_flags) & larval->mask)
277				continue;
278			if (!crypto_mod_get(alg))
279				continue;
280
281			larval->adult = alg;
 
282			continue;
283		}
284
285		if (strcmp(alg->cra_name, q->cra_name))
286			continue;
287
288		if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
289		    q->cra_priority > alg->cra_priority)
290			continue;
291
292		crypto_remove_spawns(q, &list, alg);
293	}
294
295complete:
296	complete_all(&test->completion);
297
298unlock:
299	up_write(&crypto_alg_sem);
300
301	crypto_remove_final(&list);
302}
303EXPORT_SYMBOL_GPL(crypto_alg_tested);
304
305void crypto_remove_final(struct list_head *list)
306{
307	struct crypto_alg *alg;
308	struct crypto_alg *n;
309
310	list_for_each_entry_safe(alg, n, list, cra_list) {
311		list_del_init(&alg->cra_list);
312		crypto_alg_put(alg);
313	}
314}
315EXPORT_SYMBOL_GPL(crypto_remove_final);
316
317static void crypto_wait_for_test(struct crypto_larval *larval)
318{
319	int err;
320
321	err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
322	if (err != NOTIFY_STOP) {
323		if (WARN_ON(err != NOTIFY_DONE))
324			goto out;
325		crypto_alg_tested(larval->alg.cra_driver_name, 0);
326	}
327
328	err = wait_for_completion_interruptible(&larval->completion);
329	WARN_ON(err);
330
331out:
332	crypto_larval_kill(&larval->alg);
333}
334
335int crypto_register_alg(struct crypto_alg *alg)
336{
337	struct crypto_larval *larval;
338	int err;
339
340	err = crypto_check_alg(alg);
341	if (err)
342		return err;
343
344	down_write(&crypto_alg_sem);
345	larval = __crypto_register_alg(alg);
346	up_write(&crypto_alg_sem);
347
348	if (IS_ERR(larval))
349		return PTR_ERR(larval);
350
351	crypto_wait_for_test(larval);
352	return 0;
353}
354EXPORT_SYMBOL_GPL(crypto_register_alg);
355
356static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
357{
358	if (unlikely(list_empty(&alg->cra_list)))
359		return -ENOENT;
360
361	alg->cra_flags |= CRYPTO_ALG_DEAD;
362
363	crypto_notify(CRYPTO_MSG_ALG_UNREGISTER, alg);
364	list_del_init(&alg->cra_list);
365	crypto_remove_spawns(alg, list, NULL);
366
367	return 0;
368}
369
370int crypto_unregister_alg(struct crypto_alg *alg)
371{
372	int ret;
373	LIST_HEAD(list);
374
375	down_write(&crypto_alg_sem);
376	ret = crypto_remove_alg(alg, &list);
377	up_write(&crypto_alg_sem);
378
379	if (ret)
380		return ret;
381
382	BUG_ON(atomic_read(&alg->cra_refcnt) != 1);
383	if (alg->cra_destroy)
384		alg->cra_destroy(alg);
385
386	crypto_remove_final(&list);
387	return 0;
388}
389EXPORT_SYMBOL_GPL(crypto_unregister_alg);
390
391int crypto_register_algs(struct crypto_alg *algs, int count)
392{
393	int i, ret;
394
395	for (i = 0; i < count; i++) {
396		ret = crypto_register_alg(&algs[i]);
397		if (ret)
398			goto err;
399	}
400
401	return 0;
402
403err:
404	for (--i; i >= 0; --i)
405		crypto_unregister_alg(&algs[i]);
406
407	return ret;
408}
409EXPORT_SYMBOL_GPL(crypto_register_algs);
410
411int crypto_unregister_algs(struct crypto_alg *algs, int count)
412{
413	int i, ret;
414
415	for (i = 0; i < count; i++) {
416		ret = crypto_unregister_alg(&algs[i]);
417		if (ret)
418			pr_err("Failed to unregister %s %s: %d\n",
419			       algs[i].cra_driver_name, algs[i].cra_name, ret);
420	}
421
422	return 0;
423}
424EXPORT_SYMBOL_GPL(crypto_unregister_algs);
425
426int crypto_register_template(struct crypto_template *tmpl)
427{
428	struct crypto_template *q;
429	int err = -EEXIST;
430
431	down_write(&crypto_alg_sem);
432
433	list_for_each_entry(q, &crypto_template_list, list) {
434		if (q == tmpl)
435			goto out;
436	}
437
438	list_add(&tmpl->list, &crypto_template_list);
439	crypto_notify(CRYPTO_MSG_TMPL_REGISTER, tmpl);
440	err = 0;
441out:
442	up_write(&crypto_alg_sem);
443	return err;
444}
445EXPORT_SYMBOL_GPL(crypto_register_template);
446
447void crypto_unregister_template(struct crypto_template *tmpl)
448{
449	struct crypto_instance *inst;
450	struct hlist_node *n;
451	struct hlist_head *list;
452	LIST_HEAD(users);
453
454	down_write(&crypto_alg_sem);
455
456	BUG_ON(list_empty(&tmpl->list));
457	list_del_init(&tmpl->list);
458
459	list = &tmpl->instances;
460	hlist_for_each_entry(inst, list, list) {
461		int err = crypto_remove_alg(&inst->alg, &users);
462		BUG_ON(err);
463	}
464
465	crypto_notify(CRYPTO_MSG_TMPL_UNREGISTER, tmpl);
466
467	up_write(&crypto_alg_sem);
468
469	hlist_for_each_entry_safe(inst, n, list, list) {
470		BUG_ON(atomic_read(&inst->alg.cra_refcnt) != 1);
471		tmpl->free(inst);
472	}
473	crypto_remove_final(&users);
474}
475EXPORT_SYMBOL_GPL(crypto_unregister_template);
476
477static struct crypto_template *__crypto_lookup_template(const char *name)
478{
479	struct crypto_template *q, *tmpl = NULL;
480
481	down_read(&crypto_alg_sem);
482	list_for_each_entry(q, &crypto_template_list, list) {
483		if (strcmp(q->name, name))
484			continue;
485		if (unlikely(!crypto_tmpl_get(q)))
486			continue;
487
488		tmpl = q;
489		break;
490	}
491	up_read(&crypto_alg_sem);
492
493	return tmpl;
494}
495
496struct crypto_template *crypto_lookup_template(const char *name)
497{
498	return try_then_request_module(__crypto_lookup_template(name), "%s",
499				       name);
500}
501EXPORT_SYMBOL_GPL(crypto_lookup_template);
502
503int crypto_register_instance(struct crypto_template *tmpl,
504			     struct crypto_instance *inst)
505{
506	struct crypto_larval *larval;
507	int err;
508
509	err = crypto_check_alg(&inst->alg);
510	if (err)
511		goto err;
512
513	inst->alg.cra_module = tmpl->module;
514	inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
515
516	down_write(&crypto_alg_sem);
517
518	larval = __crypto_register_alg(&inst->alg);
519	if (IS_ERR(larval))
520		goto unlock;
521
522	hlist_add_head(&inst->list, &tmpl->instances);
523	inst->tmpl = tmpl;
524
525unlock:
526	up_write(&crypto_alg_sem);
527
528	err = PTR_ERR(larval);
529	if (IS_ERR(larval))
530		goto err;
531
532	crypto_wait_for_test(larval);
533	err = 0;
534
535err:
536	return err;
537}
538EXPORT_SYMBOL_GPL(crypto_register_instance);
539
540int crypto_unregister_instance(struct crypto_alg *alg)
541{
542	int err;
543	struct crypto_instance *inst = (void *)alg;
544	struct crypto_template *tmpl = inst->tmpl;
545	LIST_HEAD(users);
546
547	if (!(alg->cra_flags & CRYPTO_ALG_INSTANCE))
548		return -EINVAL;
549
550	BUG_ON(atomic_read(&alg->cra_refcnt) != 1);
551
552	down_write(&crypto_alg_sem);
553
554	hlist_del_init(&inst->list);
555	err = crypto_remove_alg(alg, &users);
556
557	up_write(&crypto_alg_sem);
558
559	if (err)
560		return err;
561
562	tmpl->free(inst);
563	crypto_remove_final(&users);
564
565	return 0;
566}
567EXPORT_SYMBOL_GPL(crypto_unregister_instance);
568
569int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg,
570		      struct crypto_instance *inst, u32 mask)
571{
572	int err = -EAGAIN;
573
574	spawn->inst = inst;
575	spawn->mask = mask;
576
577	down_write(&crypto_alg_sem);
578	if (!crypto_is_moribund(alg)) {
579		list_add(&spawn->list, &alg->cra_users);
580		spawn->alg = alg;
581		err = 0;
582	}
583	up_write(&crypto_alg_sem);
584
585	return err;
586}
587EXPORT_SYMBOL_GPL(crypto_init_spawn);
588
589int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg,
590		       struct crypto_instance *inst,
591		       const struct crypto_type *frontend)
592{
593	int err = -EINVAL;
594
595	if ((alg->cra_flags ^ frontend->type) & frontend->maskset)
596		goto out;
597
598	spawn->frontend = frontend;
599	err = crypto_init_spawn(spawn, alg, inst, frontend->maskset);
600
601out:
602	return err;
603}
604EXPORT_SYMBOL_GPL(crypto_init_spawn2);
605
606void crypto_drop_spawn(struct crypto_spawn *spawn)
607{
608	if (!spawn->alg)
609		return;
610
611	down_write(&crypto_alg_sem);
612	list_del(&spawn->list);
613	up_write(&crypto_alg_sem);
614}
615EXPORT_SYMBOL_GPL(crypto_drop_spawn);
616
617static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
618{
619	struct crypto_alg *alg;
620	struct crypto_alg *alg2;
621
622	down_read(&crypto_alg_sem);
623	alg = spawn->alg;
624	alg2 = alg;
625	if (alg2)
626		alg2 = crypto_mod_get(alg2);
627	up_read(&crypto_alg_sem);
628
629	if (!alg2) {
630		if (alg)
631			crypto_shoot_alg(alg);
632		return ERR_PTR(-EAGAIN);
633	}
634
635	return alg;
636}
637
638struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
639				    u32 mask)
640{
641	struct crypto_alg *alg;
642	struct crypto_tfm *tfm;
643
644	alg = crypto_spawn_alg(spawn);
645	if (IS_ERR(alg))
646		return ERR_CAST(alg);
647
648	tfm = ERR_PTR(-EINVAL);
649	if (unlikely((alg->cra_flags ^ type) & mask))
650		goto out_put_alg;
651
652	tfm = __crypto_alloc_tfm(alg, type, mask);
653	if (IS_ERR(tfm))
654		goto out_put_alg;
655
656	return tfm;
657
658out_put_alg:
659	crypto_mod_put(alg);
660	return tfm;
661}
662EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
663
664void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
665{
666	struct crypto_alg *alg;
667	struct crypto_tfm *tfm;
668
669	alg = crypto_spawn_alg(spawn);
670	if (IS_ERR(alg))
671		return ERR_CAST(alg);
672
673	tfm = crypto_create_tfm(alg, spawn->frontend);
674	if (IS_ERR(tfm))
675		goto out_put_alg;
676
677	return tfm;
678
679out_put_alg:
680	crypto_mod_put(alg);
681	return tfm;
682}
683EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
684
685int crypto_register_notifier(struct notifier_block *nb)
686{
687	return blocking_notifier_chain_register(&crypto_chain, nb);
688}
689EXPORT_SYMBOL_GPL(crypto_register_notifier);
690
691int crypto_unregister_notifier(struct notifier_block *nb)
692{
693	return blocking_notifier_chain_unregister(&crypto_chain, nb);
694}
695EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
696
697struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb)
698{
699	struct rtattr *rta = tb[0];
700	struct crypto_attr_type *algt;
701
702	if (!rta)
703		return ERR_PTR(-ENOENT);
704	if (RTA_PAYLOAD(rta) < sizeof(*algt))
705		return ERR_PTR(-EINVAL);
706	if (rta->rta_type != CRYPTOA_TYPE)
707		return ERR_PTR(-EINVAL);
708
709	algt = RTA_DATA(rta);
710
711	return algt;
712}
713EXPORT_SYMBOL_GPL(crypto_get_attr_type);
714
715int crypto_check_attr_type(struct rtattr **tb, u32 type)
716{
717	struct crypto_attr_type *algt;
718
719	algt = crypto_get_attr_type(tb);
720	if (IS_ERR(algt))
721		return PTR_ERR(algt);
722
723	if ((algt->type ^ type) & algt->mask)
724		return -EINVAL;
725
726	return 0;
727}
728EXPORT_SYMBOL_GPL(crypto_check_attr_type);
729
730const char *crypto_attr_alg_name(struct rtattr *rta)
731{
732	struct crypto_attr_alg *alga;
733
734	if (!rta)
735		return ERR_PTR(-ENOENT);
736	if (RTA_PAYLOAD(rta) < sizeof(*alga))
737		return ERR_PTR(-EINVAL);
738	if (rta->rta_type != CRYPTOA_ALG)
739		return ERR_PTR(-EINVAL);
740
741	alga = RTA_DATA(rta);
742	alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
743
744	return alga->name;
745}
746EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
747
748struct crypto_alg *crypto_attr_alg2(struct rtattr *rta,
749				    const struct crypto_type *frontend,
750				    u32 type, u32 mask)
751{
752	const char *name;
 
753
754	name = crypto_attr_alg_name(rta);
 
755	if (IS_ERR(name))
756		return ERR_CAST(name);
757
758	return crypto_find_alg(name, frontend, type, mask);
759}
760EXPORT_SYMBOL_GPL(crypto_attr_alg2);
761
762int crypto_attr_u32(struct rtattr *rta, u32 *num)
763{
764	struct crypto_attr_u32 *nu32;
765
766	if (!rta)
767		return -ENOENT;
768	if (RTA_PAYLOAD(rta) < sizeof(*nu32))
769		return -EINVAL;
770	if (rta->rta_type != CRYPTOA_U32)
771		return -EINVAL;
772
773	nu32 = RTA_DATA(rta);
774	*num = nu32->num;
775
776	return 0;
777}
778EXPORT_SYMBOL_GPL(crypto_attr_u32);
779
780void *crypto_alloc_instance2(const char *name, struct crypto_alg *alg,
781			     unsigned int head)
782{
783	struct crypto_instance *inst;
784	char *p;
785	int err;
786
787	p = kzalloc(head + sizeof(*inst) + sizeof(struct crypto_spawn),
788		    GFP_KERNEL);
789	if (!p)
790		return ERR_PTR(-ENOMEM);
791
792	inst = (void *)(p + head);
793
794	err = -ENAMETOOLONG;
795	if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
796		     alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
797		goto err_free_inst;
798
799	if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
800		     name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
801		goto err_free_inst;
802
803	return p;
804
805err_free_inst:
806	kfree(p);
807	return ERR_PTR(err);
808}
809EXPORT_SYMBOL_GPL(crypto_alloc_instance2);
810
811struct crypto_instance *crypto_alloc_instance(const char *name,
812					      struct crypto_alg *alg)
813{
814	struct crypto_instance *inst;
815	struct crypto_spawn *spawn;
816	int err;
817
818	inst = crypto_alloc_instance2(name, alg, 0);
819	if (IS_ERR(inst))
820		goto out;
821
822	spawn = crypto_instance_ctx(inst);
823	err = crypto_init_spawn(spawn, alg, inst,
824				CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
825
826	if (err)
827		goto err_free_inst;
828
829	return inst;
830
831err_free_inst:
832	kfree(inst);
833	inst = ERR_PTR(err);
834
835out:
836	return inst;
837}
838EXPORT_SYMBOL_GPL(crypto_alloc_instance);
839
840void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
841{
842	INIT_LIST_HEAD(&queue->list);
843	queue->backlog = &queue->list;
844	queue->qlen = 0;
845	queue->max_qlen = max_qlen;
846}
847EXPORT_SYMBOL_GPL(crypto_init_queue);
848
849int crypto_enqueue_request(struct crypto_queue *queue,
850			   struct crypto_async_request *request)
851{
852	int err = -EINPROGRESS;
853
854	if (unlikely(queue->qlen >= queue->max_qlen)) {
855		err = -EBUSY;
856		if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG))
857			goto out;
858		if (queue->backlog == &queue->list)
859			queue->backlog = &request->list;
860	}
861
862	queue->qlen++;
863	list_add_tail(&request->list, &queue->list);
864
865out:
866	return err;
867}
868EXPORT_SYMBOL_GPL(crypto_enqueue_request);
869
870void *__crypto_dequeue_request(struct crypto_queue *queue, unsigned int offset)
871{
872	struct list_head *request;
873
874	if (unlikely(!queue->qlen))
875		return NULL;
876
877	queue->qlen--;
878
879	if (queue->backlog != &queue->list)
880		queue->backlog = queue->backlog->next;
881
882	request = queue->list.next;
883	list_del(request);
884
885	return (char *)list_entry(request, struct crypto_async_request, list) -
886	       offset;
887}
888EXPORT_SYMBOL_GPL(__crypto_dequeue_request);
889
890struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
891{
892	return __crypto_dequeue_request(queue, 0);
893}
894EXPORT_SYMBOL_GPL(crypto_dequeue_request);
895
896int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm)
897{
898	struct crypto_async_request *req;
899
900	list_for_each_entry(req, &queue->list, list) {
901		if (req->tfm == tfm)
902			return 1;
903	}
904
905	return 0;
906}
907EXPORT_SYMBOL_GPL(crypto_tfm_in_queue);
908
909static inline void crypto_inc_byte(u8 *a, unsigned int size)
910{
911	u8 *b = (a + size);
912	u8 c;
913
914	for (; size; size--) {
915		c = *--b + 1;
916		*b = c;
917		if (c)
918			break;
919	}
920}
921
922void crypto_inc(u8 *a, unsigned int size)
923{
924	__be32 *b = (__be32 *)(a + size);
925	u32 c;
926
927	for (; size >= 4; size -= 4) {
928		c = be32_to_cpu(*--b) + 1;
929		*b = cpu_to_be32(c);
930		if (c)
931			return;
932	}
933
934	crypto_inc_byte(a, size);
935}
936EXPORT_SYMBOL_GPL(crypto_inc);
937
938static inline void crypto_xor_byte(u8 *a, const u8 *b, unsigned int size)
939{
940	for (; size; size--)
941		*a++ ^= *b++;
942}
943
944void crypto_xor(u8 *dst, const u8 *src, unsigned int size)
945{
946	u32 *a = (u32 *)dst;
947	u32 *b = (u32 *)src;
948
949	for (; size >= 4; size -= 4)
950		*a++ ^= *b++;
951
952	crypto_xor_byte((u8 *)a, (u8 *)b, size);
953}
954EXPORT_SYMBOL_GPL(crypto_xor);
955
956static int __init crypto_algapi_init(void)
957{
958	crypto_init_proc();
959	return 0;
960}
961
962static void __exit crypto_algapi_exit(void)
963{
964	crypto_exit_proc();
965}
966
967module_init(crypto_algapi_init);
968module_exit(crypto_algapi_exit);
969
970MODULE_LICENSE("GPL");
971MODULE_DESCRIPTION("Cryptographic algorithms API");
v3.5.6
  1/*
  2 * Cryptographic API for algorithms (i.e., low-level API).
  3 *
  4 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
  5 *
  6 * This program is free software; you can redistribute it and/or modify it
  7 * under the terms of the GNU General Public License as published by the Free
  8 * Software Foundation; either version 2 of the License, or (at your option)
  9 * any later version.
 10 *
 11 */
 12
 13#include <linux/err.h>
 14#include <linux/errno.h>
 15#include <linux/init.h>
 16#include <linux/kernel.h>
 17#include <linux/list.h>
 18#include <linux/module.h>
 19#include <linux/rtnetlink.h>
 20#include <linux/slab.h>
 21#include <linux/string.h>
 22
 23#include "internal.h"
 24
 25static LIST_HEAD(crypto_template_list);
 26
 27void crypto_larval_error(const char *name, u32 type, u32 mask)
 28{
 29	struct crypto_alg *alg;
 30
 31	alg = crypto_alg_lookup(name, type, mask);
 32
 33	if (alg) {
 34		if (crypto_is_larval(alg)) {
 35			struct crypto_larval *larval = (void *)alg;
 36			complete_all(&larval->completion);
 37		}
 38		crypto_mod_put(alg);
 39	}
 40}
 41EXPORT_SYMBOL_GPL(crypto_larval_error);
 42
 43static inline int crypto_set_driver_name(struct crypto_alg *alg)
 44{
 45	static const char suffix[] = "-generic";
 46	char *driver_name = alg->cra_driver_name;
 47	int len;
 48
 49	if (*driver_name)
 50		return 0;
 51
 52	len = strlcpy(driver_name, alg->cra_name, CRYPTO_MAX_ALG_NAME);
 53	if (len + sizeof(suffix) > CRYPTO_MAX_ALG_NAME)
 54		return -ENAMETOOLONG;
 55
 56	memcpy(driver_name + len, suffix, sizeof(suffix));
 57	return 0;
 58}
 59
 60static int crypto_check_alg(struct crypto_alg *alg)
 61{
 62	if (alg->cra_alignmask & (alg->cra_alignmask + 1))
 63		return -EINVAL;
 64
 65	if (alg->cra_blocksize > PAGE_SIZE / 8)
 66		return -EINVAL;
 67
 68	if (alg->cra_priority < 0)
 69		return -EINVAL;
 70
 71	return crypto_set_driver_name(alg);
 72}
 73
 74static void crypto_destroy_instance(struct crypto_alg *alg)
 75{
 76	struct crypto_instance *inst = (void *)alg;
 77	struct crypto_template *tmpl = inst->tmpl;
 78
 79	tmpl->free(inst);
 80	crypto_tmpl_put(tmpl);
 81}
 82
 83static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
 84					    struct list_head *stack,
 85					    struct list_head *top,
 86					    struct list_head *secondary_spawns)
 87{
 88	struct crypto_spawn *spawn, *n;
 89
 90	if (list_empty(stack))
 91		return NULL;
 92
 93	spawn = list_first_entry(stack, struct crypto_spawn, list);
 94	n = list_entry(spawn->list.next, struct crypto_spawn, list);
 95
 96	if (spawn->alg && &n->list != stack && !n->alg)
 97		n->alg = (n->list.next == stack) ? alg :
 98			 &list_entry(n->list.next, struct crypto_spawn,
 99				     list)->inst->alg;
100
101	list_move(&spawn->list, secondary_spawns);
102
103	return &n->list == stack ? top : &n->inst->alg.cra_users;
104}
105
106static void crypto_remove_spawn(struct crypto_spawn *spawn,
107				struct list_head *list)
108{
109	struct crypto_instance *inst = spawn->inst;
110	struct crypto_template *tmpl = inst->tmpl;
111
112	if (crypto_is_dead(&inst->alg))
113		return;
114
115	inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
116	if (hlist_unhashed(&inst->list))
117		return;
118
119	if (!tmpl || !crypto_tmpl_get(tmpl))
120		return;
121
122	crypto_notify(CRYPTO_MSG_ALG_UNREGISTER, &inst->alg);
123	list_move(&inst->alg.cra_list, list);
124	hlist_del(&inst->list);
125	inst->alg.cra_destroy = crypto_destroy_instance;
126
127	BUG_ON(!list_empty(&inst->alg.cra_users));
128}
129
130void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
131			  struct crypto_alg *nalg)
132{
133	u32 new_type = (nalg ?: alg)->cra_flags;
134	struct crypto_spawn *spawn, *n;
135	LIST_HEAD(secondary_spawns);
136	struct list_head *spawns;
137	LIST_HEAD(stack);
138	LIST_HEAD(top);
139
140	spawns = &alg->cra_users;
141	list_for_each_entry_safe(spawn, n, spawns, list) {
142		if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
143			continue;
144
145		list_move(&spawn->list, &top);
146	}
147
148	spawns = &top;
149	do {
150		while (!list_empty(spawns)) {
151			struct crypto_instance *inst;
152
153			spawn = list_first_entry(spawns, struct crypto_spawn,
154						 list);
155			inst = spawn->inst;
156
157			BUG_ON(&inst->alg == alg);
158
159			list_move(&spawn->list, &stack);
160
161			if (&inst->alg == nalg)
162				break;
163
164			spawn->alg = NULL;
165			spawns = &inst->alg.cra_users;
166		}
167	} while ((spawns = crypto_more_spawns(alg, &stack, &top,
168					      &secondary_spawns)));
169
170	list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
171		if (spawn->alg)
172			list_move(&spawn->list, &spawn->alg->cra_users);
173		else
174			crypto_remove_spawn(spawn, list);
175	}
176}
177EXPORT_SYMBOL_GPL(crypto_remove_spawns);
178
179static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg)
180{
181	struct crypto_alg *q;
182	struct crypto_larval *larval;
183	int ret = -EAGAIN;
184
185	if (crypto_is_dead(alg))
186		goto err;
187
188	INIT_LIST_HEAD(&alg->cra_users);
189
190	/* No cheating! */
191	alg->cra_flags &= ~CRYPTO_ALG_TESTED;
192
193	ret = -EEXIST;
194
195	atomic_set(&alg->cra_refcnt, 1);
196	list_for_each_entry(q, &crypto_alg_list, cra_list) {
197		if (q == alg)
198			goto err;
199
200		if (crypto_is_moribund(q))
201			continue;
202
203		if (crypto_is_larval(q)) {
204			if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
205				goto err;
206			continue;
207		}
208
209		if (!strcmp(q->cra_driver_name, alg->cra_name) ||
210		    !strcmp(q->cra_name, alg->cra_driver_name))
211			goto err;
212	}
213
214	larval = crypto_larval_alloc(alg->cra_name,
215				     alg->cra_flags | CRYPTO_ALG_TESTED, 0);
216	if (IS_ERR(larval))
217		goto out;
218
219	ret = -ENOENT;
220	larval->adult = crypto_mod_get(alg);
221	if (!larval->adult)
222		goto free_larval;
223
224	atomic_set(&larval->alg.cra_refcnt, 1);
225	memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
226	       CRYPTO_MAX_ALG_NAME);
227	larval->alg.cra_priority = alg->cra_priority;
228
229	list_add(&alg->cra_list, &crypto_alg_list);
230	list_add(&larval->alg.cra_list, &crypto_alg_list);
231
232out:
233	return larval;
234
235free_larval:
236	kfree(larval);
237err:
238	larval = ERR_PTR(ret);
239	goto out;
240}
241
242void crypto_alg_tested(const char *name, int err)
243{
244	struct crypto_larval *test;
245	struct crypto_alg *alg;
246	struct crypto_alg *q;
247	LIST_HEAD(list);
248
249	down_write(&crypto_alg_sem);
250	list_for_each_entry(q, &crypto_alg_list, cra_list) {
251		if (crypto_is_moribund(q) || !crypto_is_larval(q))
252			continue;
253
254		test = (struct crypto_larval *)q;
255
256		if (!strcmp(q->cra_driver_name, name))
257			goto found;
258	}
259
260	printk(KERN_ERR "alg: Unexpected test result for %s: %d\n", name, err);
261	goto unlock;
262
263found:
264	q->cra_flags |= CRYPTO_ALG_DEAD;
265	alg = test->adult;
266	if (err || list_empty(&alg->cra_list))
267		goto complete;
268
269	alg->cra_flags |= CRYPTO_ALG_TESTED;
270
271	list_for_each_entry(q, &crypto_alg_list, cra_list) {
272		if (q == alg)
273			continue;
274
275		if (crypto_is_moribund(q))
276			continue;
277
278		if (crypto_is_larval(q)) {
279			struct crypto_larval *larval = (void *)q;
280
281			/*
282			 * Check to see if either our generic name or
283			 * specific name can satisfy the name requested
284			 * by the larval entry q.
285			 */
286			if (strcmp(alg->cra_name, q->cra_name) &&
287			    strcmp(alg->cra_driver_name, q->cra_name))
288				continue;
289
290			if (larval->adult)
291				continue;
292			if ((q->cra_flags ^ alg->cra_flags) & larval->mask)
293				continue;
294			if (!crypto_mod_get(alg))
295				continue;
296
297			larval->adult = alg;
298			complete_all(&larval->completion);
299			continue;
300		}
301
302		if (strcmp(alg->cra_name, q->cra_name))
303			continue;
304
305		if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
306		    q->cra_priority > alg->cra_priority)
307			continue;
308
309		crypto_remove_spawns(q, &list, alg);
310	}
311
312complete:
313	complete_all(&test->completion);
314
315unlock:
316	up_write(&crypto_alg_sem);
317
318	crypto_remove_final(&list);
319}
320EXPORT_SYMBOL_GPL(crypto_alg_tested);
321
322void crypto_remove_final(struct list_head *list)
323{
324	struct crypto_alg *alg;
325	struct crypto_alg *n;
326
327	list_for_each_entry_safe(alg, n, list, cra_list) {
328		list_del_init(&alg->cra_list);
329		crypto_alg_put(alg);
330	}
331}
332EXPORT_SYMBOL_GPL(crypto_remove_final);
333
334static void crypto_wait_for_test(struct crypto_larval *larval)
335{
336	int err;
337
338	err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
339	if (err != NOTIFY_STOP) {
340		if (WARN_ON(err != NOTIFY_DONE))
341			goto out;
342		crypto_alg_tested(larval->alg.cra_driver_name, 0);
343	}
344
345	err = wait_for_completion_interruptible(&larval->completion);
346	WARN_ON(err);
347
348out:
349	crypto_larval_kill(&larval->alg);
350}
351
352int crypto_register_alg(struct crypto_alg *alg)
353{
354	struct crypto_larval *larval;
355	int err;
356
357	err = crypto_check_alg(alg);
358	if (err)
359		return err;
360
361	down_write(&crypto_alg_sem);
362	larval = __crypto_register_alg(alg);
363	up_write(&crypto_alg_sem);
364
365	if (IS_ERR(larval))
366		return PTR_ERR(larval);
367
368	crypto_wait_for_test(larval);
369	return 0;
370}
371EXPORT_SYMBOL_GPL(crypto_register_alg);
372
373static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
374{
375	if (unlikely(list_empty(&alg->cra_list)))
376		return -ENOENT;
377
378	alg->cra_flags |= CRYPTO_ALG_DEAD;
379
380	crypto_notify(CRYPTO_MSG_ALG_UNREGISTER, alg);
381	list_del_init(&alg->cra_list);
382	crypto_remove_spawns(alg, list, NULL);
383
384	return 0;
385}
386
387int crypto_unregister_alg(struct crypto_alg *alg)
388{
389	int ret;
390	LIST_HEAD(list);
391
392	down_write(&crypto_alg_sem);
393	ret = crypto_remove_alg(alg, &list);
394	up_write(&crypto_alg_sem);
395
396	if (ret)
397		return ret;
398
399	BUG_ON(atomic_read(&alg->cra_refcnt) != 1);
400	if (alg->cra_destroy)
401		alg->cra_destroy(alg);
402
403	crypto_remove_final(&list);
404	return 0;
405}
406EXPORT_SYMBOL_GPL(crypto_unregister_alg);
407
408int crypto_register_algs(struct crypto_alg *algs, int count)
409{
410	int i, ret;
411
412	for (i = 0; i < count; i++) {
413		ret = crypto_register_alg(&algs[i]);
414		if (ret)
415			goto err;
416	}
417
418	return 0;
419
420err:
421	for (--i; i >= 0; --i)
422		crypto_unregister_alg(&algs[i]);
423
424	return ret;
425}
426EXPORT_SYMBOL_GPL(crypto_register_algs);
427
428int crypto_unregister_algs(struct crypto_alg *algs, int count)
429{
430	int i, ret;
431
432	for (i = 0; i < count; i++) {
433		ret = crypto_unregister_alg(&algs[i]);
434		if (ret)
435			pr_err("Failed to unregister %s %s: %d\n",
436			       algs[i].cra_driver_name, algs[i].cra_name, ret);
437	}
438
439	return 0;
440}
441EXPORT_SYMBOL_GPL(crypto_unregister_algs);
442
443int crypto_register_template(struct crypto_template *tmpl)
444{
445	struct crypto_template *q;
446	int err = -EEXIST;
447
448	down_write(&crypto_alg_sem);
449
450	list_for_each_entry(q, &crypto_template_list, list) {
451		if (q == tmpl)
452			goto out;
453	}
454
455	list_add(&tmpl->list, &crypto_template_list);
456	crypto_notify(CRYPTO_MSG_TMPL_REGISTER, tmpl);
457	err = 0;
458out:
459	up_write(&crypto_alg_sem);
460	return err;
461}
462EXPORT_SYMBOL_GPL(crypto_register_template);
463
464void crypto_unregister_template(struct crypto_template *tmpl)
465{
466	struct crypto_instance *inst;
467	struct hlist_node *p, *n;
468	struct hlist_head *list;
469	LIST_HEAD(users);
470
471	down_write(&crypto_alg_sem);
472
473	BUG_ON(list_empty(&tmpl->list));
474	list_del_init(&tmpl->list);
475
476	list = &tmpl->instances;
477	hlist_for_each_entry(inst, p, list, list) {
478		int err = crypto_remove_alg(&inst->alg, &users);
479		BUG_ON(err);
480	}
481
482	crypto_notify(CRYPTO_MSG_TMPL_UNREGISTER, tmpl);
483
484	up_write(&crypto_alg_sem);
485
486	hlist_for_each_entry_safe(inst, p, n, list, list) {
487		BUG_ON(atomic_read(&inst->alg.cra_refcnt) != 1);
488		tmpl->free(inst);
489	}
490	crypto_remove_final(&users);
491}
492EXPORT_SYMBOL_GPL(crypto_unregister_template);
493
494static struct crypto_template *__crypto_lookup_template(const char *name)
495{
496	struct crypto_template *q, *tmpl = NULL;
497
498	down_read(&crypto_alg_sem);
499	list_for_each_entry(q, &crypto_template_list, list) {
500		if (strcmp(q->name, name))
501			continue;
502		if (unlikely(!crypto_tmpl_get(q)))
503			continue;
504
505		tmpl = q;
506		break;
507	}
508	up_read(&crypto_alg_sem);
509
510	return tmpl;
511}
512
513struct crypto_template *crypto_lookup_template(const char *name)
514{
515	return try_then_request_module(__crypto_lookup_template(name), name);
 
516}
517EXPORT_SYMBOL_GPL(crypto_lookup_template);
518
519int crypto_register_instance(struct crypto_template *tmpl,
520			     struct crypto_instance *inst)
521{
522	struct crypto_larval *larval;
523	int err;
524
525	err = crypto_check_alg(&inst->alg);
526	if (err)
527		goto err;
528
529	inst->alg.cra_module = tmpl->module;
530	inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
531
532	down_write(&crypto_alg_sem);
533
534	larval = __crypto_register_alg(&inst->alg);
535	if (IS_ERR(larval))
536		goto unlock;
537
538	hlist_add_head(&inst->list, &tmpl->instances);
539	inst->tmpl = tmpl;
540
541unlock:
542	up_write(&crypto_alg_sem);
543
544	err = PTR_ERR(larval);
545	if (IS_ERR(larval))
546		goto err;
547
548	crypto_wait_for_test(larval);
549	err = 0;
550
551err:
552	return err;
553}
554EXPORT_SYMBOL_GPL(crypto_register_instance);
555
556int crypto_unregister_instance(struct crypto_alg *alg)
557{
558	int err;
559	struct crypto_instance *inst = (void *)alg;
560	struct crypto_template *tmpl = inst->tmpl;
561	LIST_HEAD(users);
562
563	if (!(alg->cra_flags & CRYPTO_ALG_INSTANCE))
564		return -EINVAL;
565
566	BUG_ON(atomic_read(&alg->cra_refcnt) != 1);
567
568	down_write(&crypto_alg_sem);
569
570	hlist_del_init(&inst->list);
571	err = crypto_remove_alg(alg, &users);
572
573	up_write(&crypto_alg_sem);
574
575	if (err)
576		return err;
577
578	tmpl->free(inst);
579	crypto_remove_final(&users);
580
581	return 0;
582}
583EXPORT_SYMBOL_GPL(crypto_unregister_instance);
584
585int crypto_init_spawn(struct crypto_spawn *spawn, struct crypto_alg *alg,
586		      struct crypto_instance *inst, u32 mask)
587{
588	int err = -EAGAIN;
589
590	spawn->inst = inst;
591	spawn->mask = mask;
592
593	down_write(&crypto_alg_sem);
594	if (!crypto_is_moribund(alg)) {
595		list_add(&spawn->list, &alg->cra_users);
596		spawn->alg = alg;
597		err = 0;
598	}
599	up_write(&crypto_alg_sem);
600
601	return err;
602}
603EXPORT_SYMBOL_GPL(crypto_init_spawn);
604
605int crypto_init_spawn2(struct crypto_spawn *spawn, struct crypto_alg *alg,
606		       struct crypto_instance *inst,
607		       const struct crypto_type *frontend)
608{
609	int err = -EINVAL;
610
611	if ((alg->cra_flags ^ frontend->type) & frontend->maskset)
612		goto out;
613
614	spawn->frontend = frontend;
615	err = crypto_init_spawn(spawn, alg, inst, frontend->maskset);
616
617out:
618	return err;
619}
620EXPORT_SYMBOL_GPL(crypto_init_spawn2);
621
622void crypto_drop_spawn(struct crypto_spawn *spawn)
623{
624	if (!spawn->alg)
625		return;
626
627	down_write(&crypto_alg_sem);
628	list_del(&spawn->list);
629	up_write(&crypto_alg_sem);
630}
631EXPORT_SYMBOL_GPL(crypto_drop_spawn);
632
633static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
634{
635	struct crypto_alg *alg;
636	struct crypto_alg *alg2;
637
638	down_read(&crypto_alg_sem);
639	alg = spawn->alg;
640	alg2 = alg;
641	if (alg2)
642		alg2 = crypto_mod_get(alg2);
643	up_read(&crypto_alg_sem);
644
645	if (!alg2) {
646		if (alg)
647			crypto_shoot_alg(alg);
648		return ERR_PTR(-EAGAIN);
649	}
650
651	return alg;
652}
653
654struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
655				    u32 mask)
656{
657	struct crypto_alg *alg;
658	struct crypto_tfm *tfm;
659
660	alg = crypto_spawn_alg(spawn);
661	if (IS_ERR(alg))
662		return ERR_CAST(alg);
663
664	tfm = ERR_PTR(-EINVAL);
665	if (unlikely((alg->cra_flags ^ type) & mask))
666		goto out_put_alg;
667
668	tfm = __crypto_alloc_tfm(alg, type, mask);
669	if (IS_ERR(tfm))
670		goto out_put_alg;
671
672	return tfm;
673
674out_put_alg:
675	crypto_mod_put(alg);
676	return tfm;
677}
678EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
679
680void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
681{
682	struct crypto_alg *alg;
683	struct crypto_tfm *tfm;
684
685	alg = crypto_spawn_alg(spawn);
686	if (IS_ERR(alg))
687		return ERR_CAST(alg);
688
689	tfm = crypto_create_tfm(alg, spawn->frontend);
690	if (IS_ERR(tfm))
691		goto out_put_alg;
692
693	return tfm;
694
695out_put_alg:
696	crypto_mod_put(alg);
697	return tfm;
698}
699EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
700
701int crypto_register_notifier(struct notifier_block *nb)
702{
703	return blocking_notifier_chain_register(&crypto_chain, nb);
704}
705EXPORT_SYMBOL_GPL(crypto_register_notifier);
706
707int crypto_unregister_notifier(struct notifier_block *nb)
708{
709	return blocking_notifier_chain_unregister(&crypto_chain, nb);
710}
711EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
712
713struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb)
714{
715	struct rtattr *rta = tb[0];
716	struct crypto_attr_type *algt;
717
718	if (!rta)
719		return ERR_PTR(-ENOENT);
720	if (RTA_PAYLOAD(rta) < sizeof(*algt))
721		return ERR_PTR(-EINVAL);
722	if (rta->rta_type != CRYPTOA_TYPE)
723		return ERR_PTR(-EINVAL);
724
725	algt = RTA_DATA(rta);
726
727	return algt;
728}
729EXPORT_SYMBOL_GPL(crypto_get_attr_type);
730
731int crypto_check_attr_type(struct rtattr **tb, u32 type)
732{
733	struct crypto_attr_type *algt;
734
735	algt = crypto_get_attr_type(tb);
736	if (IS_ERR(algt))
737		return PTR_ERR(algt);
738
739	if ((algt->type ^ type) & algt->mask)
740		return -EINVAL;
741
742	return 0;
743}
744EXPORT_SYMBOL_GPL(crypto_check_attr_type);
745
746const char *crypto_attr_alg_name(struct rtattr *rta)
747{
748	struct crypto_attr_alg *alga;
749
750	if (!rta)
751		return ERR_PTR(-ENOENT);
752	if (RTA_PAYLOAD(rta) < sizeof(*alga))
753		return ERR_PTR(-EINVAL);
754	if (rta->rta_type != CRYPTOA_ALG)
755		return ERR_PTR(-EINVAL);
756
757	alga = RTA_DATA(rta);
758	alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
759
760	return alga->name;
761}
762EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
763
764struct crypto_alg *crypto_attr_alg2(struct rtattr *rta,
765				    const struct crypto_type *frontend,
766				    u32 type, u32 mask)
767{
768	const char *name;
769	int err;
770
771	name = crypto_attr_alg_name(rta);
772	err = PTR_ERR(name);
773	if (IS_ERR(name))
774		return ERR_PTR(err);
775
776	return crypto_find_alg(name, frontend, type, mask);
777}
778EXPORT_SYMBOL_GPL(crypto_attr_alg2);
779
780int crypto_attr_u32(struct rtattr *rta, u32 *num)
781{
782	struct crypto_attr_u32 *nu32;
783
784	if (!rta)
785		return -ENOENT;
786	if (RTA_PAYLOAD(rta) < sizeof(*nu32))
787		return -EINVAL;
788	if (rta->rta_type != CRYPTOA_U32)
789		return -EINVAL;
790
791	nu32 = RTA_DATA(rta);
792	*num = nu32->num;
793
794	return 0;
795}
796EXPORT_SYMBOL_GPL(crypto_attr_u32);
797
798void *crypto_alloc_instance2(const char *name, struct crypto_alg *alg,
799			     unsigned int head)
800{
801	struct crypto_instance *inst;
802	char *p;
803	int err;
804
805	p = kzalloc(head + sizeof(*inst) + sizeof(struct crypto_spawn),
806		    GFP_KERNEL);
807	if (!p)
808		return ERR_PTR(-ENOMEM);
809
810	inst = (void *)(p + head);
811
812	err = -ENAMETOOLONG;
813	if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
814		     alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
815		goto err_free_inst;
816
817	if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
818		     name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
819		goto err_free_inst;
820
821	return p;
822
823err_free_inst:
824	kfree(p);
825	return ERR_PTR(err);
826}
827EXPORT_SYMBOL_GPL(crypto_alloc_instance2);
828
829struct crypto_instance *crypto_alloc_instance(const char *name,
830					      struct crypto_alg *alg)
831{
832	struct crypto_instance *inst;
833	struct crypto_spawn *spawn;
834	int err;
835
836	inst = crypto_alloc_instance2(name, alg, 0);
837	if (IS_ERR(inst))
838		goto out;
839
840	spawn = crypto_instance_ctx(inst);
841	err = crypto_init_spawn(spawn, alg, inst,
842				CRYPTO_ALG_TYPE_MASK | CRYPTO_ALG_ASYNC);
843
844	if (err)
845		goto err_free_inst;
846
847	return inst;
848
849err_free_inst:
850	kfree(inst);
851	inst = ERR_PTR(err);
852
853out:
854	return inst;
855}
856EXPORT_SYMBOL_GPL(crypto_alloc_instance);
857
858void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
859{
860	INIT_LIST_HEAD(&queue->list);
861	queue->backlog = &queue->list;
862	queue->qlen = 0;
863	queue->max_qlen = max_qlen;
864}
865EXPORT_SYMBOL_GPL(crypto_init_queue);
866
867int crypto_enqueue_request(struct crypto_queue *queue,
868			   struct crypto_async_request *request)
869{
870	int err = -EINPROGRESS;
871
872	if (unlikely(queue->qlen >= queue->max_qlen)) {
873		err = -EBUSY;
874		if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG))
875			goto out;
876		if (queue->backlog == &queue->list)
877			queue->backlog = &request->list;
878	}
879
880	queue->qlen++;
881	list_add_tail(&request->list, &queue->list);
882
883out:
884	return err;
885}
886EXPORT_SYMBOL_GPL(crypto_enqueue_request);
887
888void *__crypto_dequeue_request(struct crypto_queue *queue, unsigned int offset)
889{
890	struct list_head *request;
891
892	if (unlikely(!queue->qlen))
893		return NULL;
894
895	queue->qlen--;
896
897	if (queue->backlog != &queue->list)
898		queue->backlog = queue->backlog->next;
899
900	request = queue->list.next;
901	list_del(request);
902
903	return (char *)list_entry(request, struct crypto_async_request, list) -
904	       offset;
905}
906EXPORT_SYMBOL_GPL(__crypto_dequeue_request);
907
908struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
909{
910	return __crypto_dequeue_request(queue, 0);
911}
912EXPORT_SYMBOL_GPL(crypto_dequeue_request);
913
914int crypto_tfm_in_queue(struct crypto_queue *queue, struct crypto_tfm *tfm)
915{
916	struct crypto_async_request *req;
917
918	list_for_each_entry(req, &queue->list, list) {
919		if (req->tfm == tfm)
920			return 1;
921	}
922
923	return 0;
924}
925EXPORT_SYMBOL_GPL(crypto_tfm_in_queue);
926
927static inline void crypto_inc_byte(u8 *a, unsigned int size)
928{
929	u8 *b = (a + size);
930	u8 c;
931
932	for (; size; size--) {
933		c = *--b + 1;
934		*b = c;
935		if (c)
936			break;
937	}
938}
939
940void crypto_inc(u8 *a, unsigned int size)
941{
942	__be32 *b = (__be32 *)(a + size);
943	u32 c;
944
945	for (; size >= 4; size -= 4) {
946		c = be32_to_cpu(*--b) + 1;
947		*b = cpu_to_be32(c);
948		if (c)
949			return;
950	}
951
952	crypto_inc_byte(a, size);
953}
954EXPORT_SYMBOL_GPL(crypto_inc);
955
956static inline void crypto_xor_byte(u8 *a, const u8 *b, unsigned int size)
957{
958	for (; size; size--)
959		*a++ ^= *b++;
960}
961
962void crypto_xor(u8 *dst, const u8 *src, unsigned int size)
963{
964	u32 *a = (u32 *)dst;
965	u32 *b = (u32 *)src;
966
967	for (; size >= 4; size -= 4)
968		*a++ ^= *b++;
969
970	crypto_xor_byte((u8 *)a, (u8 *)b, size);
971}
972EXPORT_SYMBOL_GPL(crypto_xor);
973
974static int __init crypto_algapi_init(void)
975{
976	crypto_init_proc();
977	return 0;
978}
979
980static void __exit crypto_algapi_exit(void)
981{
982	crypto_exit_proc();
983}
984
985module_init(crypto_algapi_init);
986module_exit(crypto_algapi_exit);
987
988MODULE_LICENSE("GPL");
989MODULE_DESCRIPTION("Cryptographic algorithms API");