Linux Audio

Check our new training course

Loading...
v6.13.7
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * Scatterlist Cryptographic API.
  4 *
  5 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
  6 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
  7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
  8 *
  9 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
 10 * and Nettle, by Niels Möller.
 11 */
 12
 13#include <linux/err.h>
 14#include <linux/errno.h>
 15#include <linux/jump_label.h>
 16#include <linux/kernel.h>
 17#include <linux/kmod.h>
 18#include <linux/module.h>
 19#include <linux/param.h>
 20#include <linux/sched/signal.h>
 21#include <linux/slab.h>
 22#include <linux/string.h>
 23#include <linux/completion.h>
 24#include "internal.h"
 25
 26LIST_HEAD(crypto_alg_list);
 27EXPORT_SYMBOL_GPL(crypto_alg_list);
 28DECLARE_RWSEM(crypto_alg_sem);
 29EXPORT_SYMBOL_GPL(crypto_alg_sem);
 30
 31BLOCKING_NOTIFIER_HEAD(crypto_chain);
 32EXPORT_SYMBOL_GPL(crypto_chain);
 33
 34#if IS_BUILTIN(CONFIG_CRYPTO_ALGAPI) && \
 35    !IS_ENABLED(CONFIG_CRYPTO_MANAGER_DISABLE_TESTS)
 36DEFINE_STATIC_KEY_FALSE(__crypto_boot_test_finished);
 37#endif
 38
 39static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
 40static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
 41					    u32 mask);
 42
 43struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
 44{
 45	return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
 46}
 47EXPORT_SYMBOL_GPL(crypto_mod_get);
 48
 49void crypto_mod_put(struct crypto_alg *alg)
 50{
 51	struct module *module = alg->cra_module;
 52
 53	crypto_alg_put(alg);
 54	module_put(module);
 55}
 56EXPORT_SYMBOL_GPL(crypto_mod_put);
 57
 
 
 
 
 
 58static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
 59					      u32 mask)
 60{
 61	struct crypto_alg *q, *alg = NULL;
 62	int best = -2;
 63
 64	list_for_each_entry(q, &crypto_alg_list, cra_list) {
 65		int exact, fuzzy;
 66
 67		if (crypto_is_moribund(q))
 68			continue;
 69
 70		if ((q->cra_flags ^ type) & mask)
 71			continue;
 72
 
 
 
 
 
 73		exact = !strcmp(q->cra_driver_name, name);
 74		fuzzy = !strcmp(q->cra_name, name);
 75		if (!exact && !(fuzzy && q->cra_priority > best))
 76			continue;
 77
 78		if (unlikely(!crypto_mod_get(q)))
 79			continue;
 80
 81		best = q->cra_priority;
 82		if (alg)
 83			crypto_mod_put(alg);
 84		alg = q;
 85
 86		if (exact)
 87			break;
 88	}
 89
 90	return alg;
 91}
 92
 93static void crypto_larval_destroy(struct crypto_alg *alg)
 94{
 95	struct crypto_larval *larval = (void *)alg;
 96
 97	BUG_ON(!crypto_is_larval(alg));
 98	if (!IS_ERR_OR_NULL(larval->adult))
 99		crypto_mod_put(larval->adult);
100	kfree(larval);
101}
102
103struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
104{
105	struct crypto_larval *larval;
106
107	larval = kzalloc(sizeof(*larval), GFP_KERNEL);
108	if (!larval)
109		return ERR_PTR(-ENOMEM);
110
111	type &= ~CRYPTO_ALG_TYPE_MASK | (mask ?: CRYPTO_ALG_TYPE_MASK);
112
113	larval->mask = mask;
114	larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
115	larval->alg.cra_priority = -1;
116	larval->alg.cra_destroy = crypto_larval_destroy;
117
118	strscpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
119	init_completion(&larval->completion);
120
121	return larval;
122}
123EXPORT_SYMBOL_GPL(crypto_larval_alloc);
124
125static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
126					    u32 mask)
127{
128	struct crypto_alg *alg;
129	struct crypto_larval *larval;
130
131	larval = crypto_larval_alloc(name, type, mask);
132	if (IS_ERR(larval))
133		return ERR_CAST(larval);
134
135	refcount_set(&larval->alg.cra_refcnt, 2);
136
137	down_write(&crypto_alg_sem);
138	alg = __crypto_alg_lookup(name, type, mask);
139	if (!alg) {
140		alg = &larval->alg;
141		list_add(&alg->cra_list, &crypto_alg_list);
142	}
143	up_write(&crypto_alg_sem);
144
145	if (alg != &larval->alg) {
146		kfree(larval);
147		if (crypto_is_larval(alg))
148			alg = crypto_larval_wait(alg);
149	}
150
151	return alg;
152}
153
154static void crypto_larval_kill(struct crypto_larval *larval)
155{
156	bool unlinked;
157
158	down_write(&crypto_alg_sem);
159	unlinked = list_empty(&larval->alg.cra_list);
160	if (!unlinked)
161		list_del_init(&larval->alg.cra_list);
162	up_write(&crypto_alg_sem);
163
164	if (unlinked)
165		return;
166
167	complete_all(&larval->completion);
168	crypto_alg_put(&larval->alg);
169}
170
171void crypto_schedule_test(struct crypto_larval *larval)
172{
173	int err;
174
175	err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
176	WARN_ON_ONCE(err != NOTIFY_STOP);
177}
178EXPORT_SYMBOL_GPL(crypto_schedule_test);
179
180static void crypto_start_test(struct crypto_larval *larval)
181{
182	if (!crypto_is_test_larval(larval))
183		return;
184
185	if (larval->test_started)
186		return;
187
188	down_write(&crypto_alg_sem);
189	if (larval->test_started) {
190		up_write(&crypto_alg_sem);
191		return;
192	}
193
194	larval->test_started = true;
195	up_write(&crypto_alg_sem);
196
197	crypto_schedule_test(larval);
198}
 
199
200static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
201{
202	struct crypto_larval *larval;
203	long time_left;
204
205again:
206	larval = container_of(alg, struct crypto_larval, alg);
207
208	if (!crypto_boot_test_finished())
209		crypto_start_test(larval);
210
211	time_left = wait_for_completion_killable_timeout(
212		&larval->completion, 60 * HZ);
213
214	alg = larval->adult;
215	if (time_left < 0)
216		alg = ERR_PTR(-EINTR);
217	else if (!time_left) {
218		if (crypto_is_test_larval(larval))
219			crypto_larval_kill(larval);
220		alg = ERR_PTR(-ETIMEDOUT);
221	} else if (!alg) {
222		u32 type;
223		u32 mask;
224
225		alg = &larval->alg;
226		type = alg->cra_flags & ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
227		mask = larval->mask;
228		alg = crypto_alg_lookup(alg->cra_name, type, mask) ?:
229		      ERR_PTR(-EAGAIN);
230	} else if (IS_ERR(alg))
231		;
232	else if (crypto_is_test_larval(larval) &&
233		 !(alg->cra_flags & CRYPTO_ALG_TESTED))
234		alg = ERR_PTR(-EAGAIN);
235	else if (alg->cra_flags & CRYPTO_ALG_FIPS_INTERNAL)
236		alg = ERR_PTR(-EAGAIN);
237	else if (!crypto_mod_get(alg))
238		alg = ERR_PTR(-EAGAIN);
239	crypto_mod_put(&larval->alg);
240
241	if (!IS_ERR(alg) && crypto_is_larval(alg))
242		goto again;
243
244	return alg;
245}
246
247static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
248					    u32 mask)
249{
250	const u32 fips = CRYPTO_ALG_FIPS_INTERNAL;
251	struct crypto_alg *alg;
252	u32 test = 0;
253
254	if (!((type | mask) & CRYPTO_ALG_TESTED))
255		test |= CRYPTO_ALG_TESTED;
256
257	down_read(&crypto_alg_sem);
258	alg = __crypto_alg_lookup(name, (type | test) & ~fips,
259				  (mask | test) & ~fips);
260	if (alg) {
261		if (((type | mask) ^ fips) & fips)
262			mask |= fips;
263		mask &= fips;
264
265		if (!crypto_is_larval(alg) &&
266		    ((type ^ alg->cra_flags) & mask)) {
267			/* Algorithm is disallowed in FIPS mode. */
268			crypto_mod_put(alg);
269			alg = ERR_PTR(-ENOENT);
270		}
271	} else if (test) {
272		alg = __crypto_alg_lookup(name, type, mask);
273		if (alg && !crypto_is_larval(alg)) {
274			/* Test failed */
275			crypto_mod_put(alg);
276			alg = ERR_PTR(-ELIBBAD);
277		}
278	}
279	up_read(&crypto_alg_sem);
280
281	return alg;
282}
283
284static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type,
285					       u32 mask)
286{
287	struct crypto_alg *alg;
288
289	if (!name)
290		return ERR_PTR(-ENOENT);
291
292	type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
293	mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
294
295	alg = crypto_alg_lookup(name, type, mask);
296	if (!alg && !(mask & CRYPTO_NOLOAD)) {
297		request_module("crypto-%s", name);
298
299		if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
300		      CRYPTO_ALG_NEED_FALLBACK))
301			request_module("crypto-%s-all", name);
302
303		alg = crypto_alg_lookup(name, type, mask);
304	}
305
306	if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg))
307		alg = crypto_larval_wait(alg);
308	else if (alg)
309		;
310	else if (!(mask & CRYPTO_ALG_TESTED))
311		alg = crypto_larval_add(name, type, mask);
312	else
313		alg = ERR_PTR(-ENOENT);
314
315	return alg;
316}
317
318int crypto_probing_notify(unsigned long val, void *v)
319{
320	int ok;
321
322	ok = blocking_notifier_call_chain(&crypto_chain, val, v);
323	if (ok == NOTIFY_DONE) {
324		request_module("cryptomgr");
325		ok = blocking_notifier_call_chain(&crypto_chain, val, v);
326	}
327
328	return ok;
329}
330EXPORT_SYMBOL_GPL(crypto_probing_notify);
331
332struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
333{
334	struct crypto_alg *alg;
335	struct crypto_alg *larval;
336	int ok;
337
338	/*
339	 * If the internal flag is set for a cipher, require a caller to
340	 * invoke the cipher with the internal flag to use that cipher.
341	 * Also, if a caller wants to allocate a cipher that may or may
342	 * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
343	 * !(mask & CRYPTO_ALG_INTERNAL).
344	 */
345	if (!((type | mask) & CRYPTO_ALG_INTERNAL))
346		mask |= CRYPTO_ALG_INTERNAL;
347
348	larval = crypto_larval_lookup(name, type, mask);
349	if (IS_ERR(larval) || !crypto_is_larval(larval))
350		return larval;
351
352	ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
353
354	if (ok == NOTIFY_STOP)
355		alg = crypto_larval_wait(larval);
356	else {
357		crypto_mod_put(larval);
358		alg = ERR_PTR(-ENOENT);
359	}
360	crypto_larval_kill(container_of(larval, struct crypto_larval, alg));
361	return alg;
362}
363EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
364
 
 
 
 
 
 
 
 
 
365static void crypto_exit_ops(struct crypto_tfm *tfm)
366{
367	const struct crypto_type *type = tfm->__crt_alg->cra_type;
368
369	if (type && tfm->exit)
370		tfm->exit(tfm);
371}
372
373static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
374{
375	const struct crypto_type *type_obj = alg->cra_type;
376	unsigned int len;
377
378	len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
379	if (type_obj)
380		return len + type_obj->ctxsize(alg, type, mask);
381
382	switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
383	default:
384		BUG();
385
386	case CRYPTO_ALG_TYPE_CIPHER:
387		len += crypto_cipher_ctxsize(alg);
388		break;
389
390	case CRYPTO_ALG_TYPE_COMPRESS:
391		len += crypto_compress_ctxsize(alg);
392		break;
393	}
394
395	return len;
396}
397
398void crypto_shoot_alg(struct crypto_alg *alg)
399{
400	down_write(&crypto_alg_sem);
401	alg->cra_flags |= CRYPTO_ALG_DYING;
402	up_write(&crypto_alg_sem);
403}
404EXPORT_SYMBOL_GPL(crypto_shoot_alg);
405
406struct crypto_tfm *__crypto_alloc_tfmgfp(struct crypto_alg *alg, u32 type,
407					 u32 mask, gfp_t gfp)
408{
409	struct crypto_tfm *tfm;
410	unsigned int tfm_size;
411	int err = -ENOMEM;
412
413	tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
414	tfm = kzalloc(tfm_size, gfp);
415	if (tfm == NULL)
416		goto out_err;
417
418	tfm->__crt_alg = alg;
419	refcount_set(&tfm->refcnt, 1);
 
 
 
420
421	if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
422		goto cra_init_failed;
423
424	goto out;
425
426cra_init_failed:
427	crypto_exit_ops(tfm);
 
428	if (err == -EAGAIN)
429		crypto_shoot_alg(alg);
430	kfree(tfm);
431out_err:
432	tfm = ERR_PTR(err);
433out:
434	return tfm;
435}
436EXPORT_SYMBOL_GPL(__crypto_alloc_tfmgfp);
437
438struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
439				      u32 mask)
440{
441	return __crypto_alloc_tfmgfp(alg, type, mask, GFP_KERNEL);
442}
443EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
444
445/*
446 *	crypto_alloc_base - Locate algorithm and allocate transform
447 *	@alg_name: Name of algorithm
448 *	@type: Type of algorithm
449 *	@mask: Mask for type comparison
450 *
451 *	This function should not be used by new algorithm types.
452 *	Please use crypto_alloc_tfm instead.
453 *
454 *	crypto_alloc_base() will first attempt to locate an already loaded
455 *	algorithm.  If that fails and the kernel supports dynamically loadable
456 *	modules, it will then attempt to load a module of the same name or
457 *	alias.  If that fails it will send a query to any loaded crypto manager
458 *	to construct an algorithm on the fly.  A refcount is grabbed on the
459 *	algorithm which is then associated with the new transform.
460 *
461 *	The returned transform is of a non-determinate type.  Most people
462 *	should use one of the more specific allocation functions such as
463 *	crypto_alloc_skcipher().
464 *
465 *	In case of error the return value is an error pointer.
466 */
467struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
468{
469	struct crypto_tfm *tfm;
470	int err;
471
472	for (;;) {
473		struct crypto_alg *alg;
474
475		alg = crypto_alg_mod_lookup(alg_name, type, mask);
476		if (IS_ERR(alg)) {
477			err = PTR_ERR(alg);
478			goto err;
479		}
480
481		tfm = __crypto_alloc_tfm(alg, type, mask);
482		if (!IS_ERR(tfm))
483			return tfm;
484
485		crypto_mod_put(alg);
486		err = PTR_ERR(tfm);
487
488err:
489		if (err != -EAGAIN)
490			break;
491		if (fatal_signal_pending(current)) {
492			err = -EINTR;
493			break;
494		}
495	}
496
497	return ERR_PTR(err);
498}
499EXPORT_SYMBOL_GPL(crypto_alloc_base);
500
501static void *crypto_alloc_tfmmem(struct crypto_alg *alg,
502				 const struct crypto_type *frontend, int node,
503				 gfp_t gfp)
504{
505	struct crypto_tfm *tfm;
 
506	unsigned int tfmsize;
507	unsigned int total;
508	char *mem;
509
510	tfmsize = frontend->tfmsize;
511	total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
512
513	mem = kzalloc_node(total, gfp, node);
514	if (mem == NULL)
515		return ERR_PTR(-ENOMEM);
516
517	tfm = (struct crypto_tfm *)(mem + tfmsize);
518	tfm->__crt_alg = alg;
519	tfm->node = node;
520	refcount_set(&tfm->refcnt, 1);
521
522	return mem;
523}
524
525void *crypto_create_tfm_node(struct crypto_alg *alg,
526			     const struct crypto_type *frontend,
527			     int node)
528{
529	struct crypto_tfm *tfm;
530	char *mem;
531	int err;
532
533	mem = crypto_alloc_tfmmem(alg, frontend, node, GFP_KERNEL);
534	if (IS_ERR(mem))
535		goto out;
536
537	tfm = (struct crypto_tfm *)(mem + frontend->tfmsize);
538
539	err = frontend->init_tfm(tfm);
540	if (err)
541		goto out_free_tfm;
542
543	if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
544		goto cra_init_failed;
545
546	goto out;
547
548cra_init_failed:
549	crypto_exit_ops(tfm);
550out_free_tfm:
551	if (err == -EAGAIN)
552		crypto_shoot_alg(alg);
553	kfree(mem);
 
554	mem = ERR_PTR(err);
555out:
556	return mem;
557}
558EXPORT_SYMBOL_GPL(crypto_create_tfm_node);
559
560void *crypto_clone_tfm(const struct crypto_type *frontend,
561		       struct crypto_tfm *otfm)
562{
563	struct crypto_alg *alg = otfm->__crt_alg;
564	struct crypto_tfm *tfm;
565	char *mem;
566
567	mem = ERR_PTR(-ESTALE);
568	if (unlikely(!crypto_mod_get(alg)))
569		goto out;
570
571	mem = crypto_alloc_tfmmem(alg, frontend, otfm->node, GFP_ATOMIC);
572	if (IS_ERR(mem)) {
573		crypto_mod_put(alg);
574		goto out;
575	}
576
577	tfm = (struct crypto_tfm *)(mem + frontend->tfmsize);
578	tfm->crt_flags = otfm->crt_flags;
579	tfm->exit = otfm->exit;
580
581out:
582	return mem;
583}
584EXPORT_SYMBOL_GPL(crypto_clone_tfm);
585
586struct crypto_alg *crypto_find_alg(const char *alg_name,
587				   const struct crypto_type *frontend,
588				   u32 type, u32 mask)
589{
590	if (frontend) {
591		type &= frontend->maskclear;
592		mask &= frontend->maskclear;
593		type |= frontend->type;
594		mask |= frontend->maskset;
595	}
596
597	return crypto_alg_mod_lookup(alg_name, type, mask);
598}
599EXPORT_SYMBOL_GPL(crypto_find_alg);
600
601/*
602 *	crypto_alloc_tfm_node - Locate algorithm and allocate transform
603 *	@alg_name: Name of algorithm
604 *	@frontend: Frontend algorithm type
605 *	@type: Type of algorithm
606 *	@mask: Mask for type comparison
607 *	@node: NUMA node in which users desire to put requests, if node is
608 *		NUMA_NO_NODE, it means users have no special requirement.
609 *
610 *	crypto_alloc_tfm() will first attempt to locate an already loaded
611 *	algorithm.  If that fails and the kernel supports dynamically loadable
612 *	modules, it will then attempt to load a module of the same name or
613 *	alias.  If that fails it will send a query to any loaded crypto manager
614 *	to construct an algorithm on the fly.  A refcount is grabbed on the
615 *	algorithm which is then associated with the new transform.
616 *
617 *	The returned transform is of a non-determinate type.  Most people
618 *	should use one of the more specific allocation functions such as
619 *	crypto_alloc_skcipher().
620 *
621 *	In case of error the return value is an error pointer.
622 */
623
624void *crypto_alloc_tfm_node(const char *alg_name,
625		       const struct crypto_type *frontend, u32 type, u32 mask,
626		       int node)
627{
628	void *tfm;
629	int err;
630
631	for (;;) {
632		struct crypto_alg *alg;
633
634		alg = crypto_find_alg(alg_name, frontend, type, mask);
635		if (IS_ERR(alg)) {
636			err = PTR_ERR(alg);
637			goto err;
638		}
639
640		tfm = crypto_create_tfm_node(alg, frontend, node);
641		if (!IS_ERR(tfm))
642			return tfm;
643
644		crypto_mod_put(alg);
645		err = PTR_ERR(tfm);
646
647err:
648		if (err != -EAGAIN)
649			break;
650		if (fatal_signal_pending(current)) {
651			err = -EINTR;
652			break;
653		}
654	}
655
656	return ERR_PTR(err);
657}
658EXPORT_SYMBOL_GPL(crypto_alloc_tfm_node);
659
660/*
661 *	crypto_destroy_tfm - Free crypto transform
662 *	@mem: Start of tfm slab
663 *	@tfm: Transform to free
664 *
665 *	This function frees up the transform and any associated resources,
666 *	then drops the refcount on the associated algorithm.
667 */
668void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
669{
670	struct crypto_alg *alg;
671
672	if (IS_ERR_OR_NULL(mem))
673		return;
674
675	if (!refcount_dec_and_test(&tfm->refcnt))
676		return;
677	alg = tfm->__crt_alg;
678
679	if (!tfm->exit && alg->cra_exit)
680		alg->cra_exit(tfm);
681	crypto_exit_ops(tfm);
682	crypto_mod_put(alg);
683	kfree_sensitive(mem);
684}
685EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
686
687int crypto_has_alg(const char *name, u32 type, u32 mask)
688{
689	int ret = 0;
690	struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
691
692	if (!IS_ERR(alg)) {
693		crypto_mod_put(alg);
694		ret = 1;
695	}
696
697	return ret;
698}
699EXPORT_SYMBOL_GPL(crypto_has_alg);
700
701void crypto_req_done(void *data, int err)
702{
703	struct crypto_wait *wait = data;
704
705	if (err == -EINPROGRESS)
706		return;
707
708	wait->err = err;
709	complete(&wait->completion);
710}
711EXPORT_SYMBOL_GPL(crypto_req_done);
712
713MODULE_DESCRIPTION("Cryptographic core API");
714MODULE_LICENSE("GPL");
v5.9
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * Scatterlist Cryptographic API.
  4 *
  5 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
  6 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
  7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
  8 *
  9 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
 10 * and Nettle, by Niels Möller.
 11 */
 12
 13#include <linux/err.h>
 14#include <linux/errno.h>
 
 15#include <linux/kernel.h>
 16#include <linux/kmod.h>
 17#include <linux/module.h>
 18#include <linux/param.h>
 19#include <linux/sched/signal.h>
 20#include <linux/slab.h>
 21#include <linux/string.h>
 22#include <linux/completion.h>
 23#include "internal.h"
 24
 25LIST_HEAD(crypto_alg_list);
 26EXPORT_SYMBOL_GPL(crypto_alg_list);
 27DECLARE_RWSEM(crypto_alg_sem);
 28EXPORT_SYMBOL_GPL(crypto_alg_sem);
 29
 30BLOCKING_NOTIFIER_HEAD(crypto_chain);
 31EXPORT_SYMBOL_GPL(crypto_chain);
 32
 
 
 
 
 
 33static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
 
 
 34
 35struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
 36{
 37	return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
 38}
 39EXPORT_SYMBOL_GPL(crypto_mod_get);
 40
 41void crypto_mod_put(struct crypto_alg *alg)
 42{
 43	struct module *module = alg->cra_module;
 44
 45	crypto_alg_put(alg);
 46	module_put(module);
 47}
 48EXPORT_SYMBOL_GPL(crypto_mod_put);
 49
 50static inline int crypto_is_test_larval(struct crypto_larval *larval)
 51{
 52	return larval->alg.cra_driver_name[0];
 53}
 54
 55static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
 56					      u32 mask)
 57{
 58	struct crypto_alg *q, *alg = NULL;
 59	int best = -2;
 60
 61	list_for_each_entry(q, &crypto_alg_list, cra_list) {
 62		int exact, fuzzy;
 63
 64		if (crypto_is_moribund(q))
 65			continue;
 66
 67		if ((q->cra_flags ^ type) & mask)
 68			continue;
 69
 70		if (crypto_is_larval(q) &&
 71		    !crypto_is_test_larval((struct crypto_larval *)q) &&
 72		    ((struct crypto_larval *)q)->mask != mask)
 73			continue;
 74
 75		exact = !strcmp(q->cra_driver_name, name);
 76		fuzzy = !strcmp(q->cra_name, name);
 77		if (!exact && !(fuzzy && q->cra_priority > best))
 78			continue;
 79
 80		if (unlikely(!crypto_mod_get(q)))
 81			continue;
 82
 83		best = q->cra_priority;
 84		if (alg)
 85			crypto_mod_put(alg);
 86		alg = q;
 87
 88		if (exact)
 89			break;
 90	}
 91
 92	return alg;
 93}
 94
 95static void crypto_larval_destroy(struct crypto_alg *alg)
 96{
 97	struct crypto_larval *larval = (void *)alg;
 98
 99	BUG_ON(!crypto_is_larval(alg));
100	if (!IS_ERR_OR_NULL(larval->adult))
101		crypto_mod_put(larval->adult);
102	kfree(larval);
103}
104
105struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
106{
107	struct crypto_larval *larval;
108
109	larval = kzalloc(sizeof(*larval), GFP_KERNEL);
110	if (!larval)
111		return ERR_PTR(-ENOMEM);
112
 
 
113	larval->mask = mask;
114	larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
115	larval->alg.cra_priority = -1;
116	larval->alg.cra_destroy = crypto_larval_destroy;
117
118	strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
119	init_completion(&larval->completion);
120
121	return larval;
122}
123EXPORT_SYMBOL_GPL(crypto_larval_alloc);
124
125static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
126					    u32 mask)
127{
128	struct crypto_alg *alg;
129	struct crypto_larval *larval;
130
131	larval = crypto_larval_alloc(name, type, mask);
132	if (IS_ERR(larval))
133		return ERR_CAST(larval);
134
135	refcount_set(&larval->alg.cra_refcnt, 2);
136
137	down_write(&crypto_alg_sem);
138	alg = __crypto_alg_lookup(name, type, mask);
139	if (!alg) {
140		alg = &larval->alg;
141		list_add(&alg->cra_list, &crypto_alg_list);
142	}
143	up_write(&crypto_alg_sem);
144
145	if (alg != &larval->alg) {
146		kfree(larval);
147		if (crypto_is_larval(alg))
148			alg = crypto_larval_wait(alg);
149	}
150
151	return alg;
152}
153
154void crypto_larval_kill(struct crypto_alg *alg)
155{
156	struct crypto_larval *larval = (void *)alg;
157
158	down_write(&crypto_alg_sem);
159	list_del(&alg->cra_list);
 
 
160	up_write(&crypto_alg_sem);
 
 
 
 
161	complete_all(&larval->completion);
162	crypto_alg_put(alg);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
163}
164EXPORT_SYMBOL_GPL(crypto_larval_kill);
165
166static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
167{
168	struct crypto_larval *larval = (void *)alg;
169	long timeout;
 
 
 
 
 
 
170
171	timeout = wait_for_completion_killable_timeout(
172		&larval->completion, 60 * HZ);
173
174	alg = larval->adult;
175	if (timeout < 0)
176		alg = ERR_PTR(-EINTR);
177	else if (!timeout)
 
 
178		alg = ERR_PTR(-ETIMEDOUT);
179	else if (!alg)
180		alg = ERR_PTR(-ENOENT);
181	else if (IS_ERR(alg))
 
 
 
 
 
 
 
182		;
183	else if (crypto_is_test_larval(larval) &&
184		 !(alg->cra_flags & CRYPTO_ALG_TESTED))
185		alg = ERR_PTR(-EAGAIN);
 
 
186	else if (!crypto_mod_get(alg))
187		alg = ERR_PTR(-EAGAIN);
188	crypto_mod_put(&larval->alg);
189
 
 
 
190	return alg;
191}
192
193static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
194					    u32 mask)
195{
 
196	struct crypto_alg *alg;
197	u32 test = 0;
198
199	if (!((type | mask) & CRYPTO_ALG_TESTED))
200		test |= CRYPTO_ALG_TESTED;
201
202	down_read(&crypto_alg_sem);
203	alg = __crypto_alg_lookup(name, type | test, mask | test);
204	if (!alg && test) {
 
 
 
 
 
 
 
 
 
 
 
 
205		alg = __crypto_alg_lookup(name, type, mask);
206		if (alg && !crypto_is_larval(alg)) {
207			/* Test failed */
208			crypto_mod_put(alg);
209			alg = ERR_PTR(-ELIBBAD);
210		}
211	}
212	up_read(&crypto_alg_sem);
213
214	return alg;
215}
216
217static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type,
218					       u32 mask)
219{
220	struct crypto_alg *alg;
221
222	if (!name)
223		return ERR_PTR(-ENOENT);
224
225	type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
226	mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
227
228	alg = crypto_alg_lookup(name, type, mask);
229	if (!alg && !(mask & CRYPTO_NOLOAD)) {
230		request_module("crypto-%s", name);
231
232		if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
233		      CRYPTO_ALG_NEED_FALLBACK))
234			request_module("crypto-%s-all", name);
235
236		alg = crypto_alg_lookup(name, type, mask);
237	}
238
239	if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg))
240		alg = crypto_larval_wait(alg);
241	else if (!alg)
 
 
242		alg = crypto_larval_add(name, type, mask);
 
 
243
244	return alg;
245}
246
247int crypto_probing_notify(unsigned long val, void *v)
248{
249	int ok;
250
251	ok = blocking_notifier_call_chain(&crypto_chain, val, v);
252	if (ok == NOTIFY_DONE) {
253		request_module("cryptomgr");
254		ok = blocking_notifier_call_chain(&crypto_chain, val, v);
255	}
256
257	return ok;
258}
259EXPORT_SYMBOL_GPL(crypto_probing_notify);
260
261struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
262{
263	struct crypto_alg *alg;
264	struct crypto_alg *larval;
265	int ok;
266
267	/*
268	 * If the internal flag is set for a cipher, require a caller to
269	 * to invoke the cipher with the internal flag to use that cipher.
270	 * Also, if a caller wants to allocate a cipher that may or may
271	 * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
272	 * !(mask & CRYPTO_ALG_INTERNAL).
273	 */
274	if (!((type | mask) & CRYPTO_ALG_INTERNAL))
275		mask |= CRYPTO_ALG_INTERNAL;
276
277	larval = crypto_larval_lookup(name, type, mask);
278	if (IS_ERR(larval) || !crypto_is_larval(larval))
279		return larval;
280
281	ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
282
283	if (ok == NOTIFY_STOP)
284		alg = crypto_larval_wait(larval);
285	else {
286		crypto_mod_put(larval);
287		alg = ERR_PTR(-ENOENT);
288	}
289	crypto_larval_kill(larval);
290	return alg;
291}
292EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
293
294static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
295{
296	const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
297
298	if (type_obj)
299		return type_obj->init(tfm, type, mask);
300	return 0;
301}
302
303static void crypto_exit_ops(struct crypto_tfm *tfm)
304{
305	const struct crypto_type *type = tfm->__crt_alg->cra_type;
306
307	if (type && tfm->exit)
308		tfm->exit(tfm);
309}
310
311static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
312{
313	const struct crypto_type *type_obj = alg->cra_type;
314	unsigned int len;
315
316	len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
317	if (type_obj)
318		return len + type_obj->ctxsize(alg, type, mask);
319
320	switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
321	default:
322		BUG();
323
324	case CRYPTO_ALG_TYPE_CIPHER:
325		len += crypto_cipher_ctxsize(alg);
326		break;
327
328	case CRYPTO_ALG_TYPE_COMPRESS:
329		len += crypto_compress_ctxsize(alg);
330		break;
331	}
332
333	return len;
334}
335
336void crypto_shoot_alg(struct crypto_alg *alg)
337{
338	down_write(&crypto_alg_sem);
339	alg->cra_flags |= CRYPTO_ALG_DYING;
340	up_write(&crypto_alg_sem);
341}
342EXPORT_SYMBOL_GPL(crypto_shoot_alg);
343
344struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
345				      u32 mask)
346{
347	struct crypto_tfm *tfm = NULL;
348	unsigned int tfm_size;
349	int err = -ENOMEM;
350
351	tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
352	tfm = kzalloc(tfm_size, GFP_KERNEL);
353	if (tfm == NULL)
354		goto out_err;
355
356	tfm->__crt_alg = alg;
357
358	err = crypto_init_ops(tfm, type, mask);
359	if (err)
360		goto out_free_tfm;
361
362	if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
363		goto cra_init_failed;
364
365	goto out;
366
367cra_init_failed:
368	crypto_exit_ops(tfm);
369out_free_tfm:
370	if (err == -EAGAIN)
371		crypto_shoot_alg(alg);
372	kfree(tfm);
373out_err:
374	tfm = ERR_PTR(err);
375out:
376	return tfm;
377}
 
 
 
 
 
 
 
378EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
379
380/*
381 *	crypto_alloc_base - Locate algorithm and allocate transform
382 *	@alg_name: Name of algorithm
383 *	@type: Type of algorithm
384 *	@mask: Mask for type comparison
385 *
386 *	This function should not be used by new algorithm types.
387 *	Please use crypto_alloc_tfm instead.
388 *
389 *	crypto_alloc_base() will first attempt to locate an already loaded
390 *	algorithm.  If that fails and the kernel supports dynamically loadable
391 *	modules, it will then attempt to load a module of the same name or
392 *	alias.  If that fails it will send a query to any loaded crypto manager
393 *	to construct an algorithm on the fly.  A refcount is grabbed on the
394 *	algorithm which is then associated with the new transform.
395 *
396 *	The returned transform is of a non-determinate type.  Most people
397 *	should use one of the more specific allocation functions such as
398 *	crypto_alloc_skcipher().
399 *
400 *	In case of error the return value is an error pointer.
401 */
402struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
403{
404	struct crypto_tfm *tfm;
405	int err;
406
407	for (;;) {
408		struct crypto_alg *alg;
409
410		alg = crypto_alg_mod_lookup(alg_name, type, mask);
411		if (IS_ERR(alg)) {
412			err = PTR_ERR(alg);
413			goto err;
414		}
415
416		tfm = __crypto_alloc_tfm(alg, type, mask);
417		if (!IS_ERR(tfm))
418			return tfm;
419
420		crypto_mod_put(alg);
421		err = PTR_ERR(tfm);
422
423err:
424		if (err != -EAGAIN)
425			break;
426		if (fatal_signal_pending(current)) {
427			err = -EINTR;
428			break;
429		}
430	}
431
432	return ERR_PTR(err);
433}
434EXPORT_SYMBOL_GPL(crypto_alloc_base);
435
436void *crypto_create_tfm_node(struct crypto_alg *alg,
437			const struct crypto_type *frontend,
438			int node)
439{
440	char *mem;
441	struct crypto_tfm *tfm = NULL;
442	unsigned int tfmsize;
443	unsigned int total;
444	int err = -ENOMEM;
445
446	tfmsize = frontend->tfmsize;
447	total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
448
449	mem = kzalloc_node(total, GFP_KERNEL, node);
450	if (mem == NULL)
451		goto out_err;
452
453	tfm = (struct crypto_tfm *)(mem + tfmsize);
454	tfm->__crt_alg = alg;
455	tfm->node = node;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
456
457	err = frontend->init_tfm(tfm);
458	if (err)
459		goto out_free_tfm;
460
461	if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
462		goto cra_init_failed;
463
464	goto out;
465
466cra_init_failed:
467	crypto_exit_ops(tfm);
468out_free_tfm:
469	if (err == -EAGAIN)
470		crypto_shoot_alg(alg);
471	kfree(mem);
472out_err:
473	mem = ERR_PTR(err);
474out:
475	return mem;
476}
477EXPORT_SYMBOL_GPL(crypto_create_tfm_node);
478
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
479struct crypto_alg *crypto_find_alg(const char *alg_name,
480				   const struct crypto_type *frontend,
481				   u32 type, u32 mask)
482{
483	if (frontend) {
484		type &= frontend->maskclear;
485		mask &= frontend->maskclear;
486		type |= frontend->type;
487		mask |= frontend->maskset;
488	}
489
490	return crypto_alg_mod_lookup(alg_name, type, mask);
491}
492EXPORT_SYMBOL_GPL(crypto_find_alg);
493
494/*
495 *	crypto_alloc_tfm_node - Locate algorithm and allocate transform
496 *	@alg_name: Name of algorithm
497 *	@frontend: Frontend algorithm type
498 *	@type: Type of algorithm
499 *	@mask: Mask for type comparison
500 *	@node: NUMA node in which users desire to put requests, if node is
501 *		NUMA_NO_NODE, it means users have no special requirement.
502 *
503 *	crypto_alloc_tfm() will first attempt to locate an already loaded
504 *	algorithm.  If that fails and the kernel supports dynamically loadable
505 *	modules, it will then attempt to load a module of the same name or
506 *	alias.  If that fails it will send a query to any loaded crypto manager
507 *	to construct an algorithm on the fly.  A refcount is grabbed on the
508 *	algorithm which is then associated with the new transform.
509 *
510 *	The returned transform is of a non-determinate type.  Most people
511 *	should use one of the more specific allocation functions such as
512 *	crypto_alloc_skcipher().
513 *
514 *	In case of error the return value is an error pointer.
515 */
516
517void *crypto_alloc_tfm_node(const char *alg_name,
518		       const struct crypto_type *frontend, u32 type, u32 mask,
519		       int node)
520{
521	void *tfm;
522	int err;
523
524	for (;;) {
525		struct crypto_alg *alg;
526
527		alg = crypto_find_alg(alg_name, frontend, type, mask);
528		if (IS_ERR(alg)) {
529			err = PTR_ERR(alg);
530			goto err;
531		}
532
533		tfm = crypto_create_tfm_node(alg, frontend, node);
534		if (!IS_ERR(tfm))
535			return tfm;
536
537		crypto_mod_put(alg);
538		err = PTR_ERR(tfm);
539
540err:
541		if (err != -EAGAIN)
542			break;
543		if (fatal_signal_pending(current)) {
544			err = -EINTR;
545			break;
546		}
547	}
548
549	return ERR_PTR(err);
550}
551EXPORT_SYMBOL_GPL(crypto_alloc_tfm_node);
552
553/*
554 *	crypto_destroy_tfm - Free crypto transform
555 *	@mem: Start of tfm slab
556 *	@tfm: Transform to free
557 *
558 *	This function frees up the transform and any associated resources,
559 *	then drops the refcount on the associated algorithm.
560 */
561void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
562{
563	struct crypto_alg *alg;
564
565	if (unlikely(!mem))
566		return;
567
 
 
568	alg = tfm->__crt_alg;
569
570	if (!tfm->exit && alg->cra_exit)
571		alg->cra_exit(tfm);
572	crypto_exit_ops(tfm);
573	crypto_mod_put(alg);
574	kfree_sensitive(mem);
575}
576EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
577
578int crypto_has_alg(const char *name, u32 type, u32 mask)
579{
580	int ret = 0;
581	struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
582
583	if (!IS_ERR(alg)) {
584		crypto_mod_put(alg);
585		ret = 1;
586	}
587
588	return ret;
589}
590EXPORT_SYMBOL_GPL(crypto_has_alg);
591
592void crypto_req_done(struct crypto_async_request *req, int err)
593{
594	struct crypto_wait *wait = req->data;
595
596	if (err == -EINPROGRESS)
597		return;
598
599	wait->err = err;
600	complete(&wait->completion);
601}
602EXPORT_SYMBOL_GPL(crypto_req_done);
603
604MODULE_DESCRIPTION("Cryptographic core API");
605MODULE_LICENSE("GPL");
606MODULE_SOFTDEP("pre: cryptomgr");