Linux Audio

Check our new training course

Loading...
v4.6
 
  1/*
  2 * Scatterlist Cryptographic API.
  3 *
  4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
  5 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
  6 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
  7 *
  8 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
  9 * and Nettle, by Niels Möller.
 10 *
 11 * This program is free software; you can redistribute it and/or modify it
 12 * under the terms of the GNU General Public License as published by the Free
 13 * Software Foundation; either version 2 of the License, or (at your option)
 14 * any later version.
 15 *
 16 */
 17
 18#include <linux/err.h>
 19#include <linux/errno.h>
 20#include <linux/kernel.h>
 21#include <linux/kmod.h>
 22#include <linux/module.h>
 23#include <linux/param.h>
 24#include <linux/sched.h>
 25#include <linux/slab.h>
 26#include <linux/string.h>
 
 27#include "internal.h"
 28
 29LIST_HEAD(crypto_alg_list);
 30EXPORT_SYMBOL_GPL(crypto_alg_list);
 31DECLARE_RWSEM(crypto_alg_sem);
 32EXPORT_SYMBOL_GPL(crypto_alg_sem);
 33
 34BLOCKING_NOTIFIER_HEAD(crypto_chain);
 35EXPORT_SYMBOL_GPL(crypto_chain);
 36
 37static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
 38
 39struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
 40{
 41	return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
 42}
 43EXPORT_SYMBOL_GPL(crypto_mod_get);
 44
 45void crypto_mod_put(struct crypto_alg *alg)
 46{
 47	struct module *module = alg->cra_module;
 48
 49	crypto_alg_put(alg);
 50	module_put(module);
 51}
 52EXPORT_SYMBOL_GPL(crypto_mod_put);
 53
 54static inline int crypto_is_test_larval(struct crypto_larval *larval)
 55{
 56	return larval->alg.cra_driver_name[0];
 57}
 58
 59static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
 60					      u32 mask)
 61{
 62	struct crypto_alg *q, *alg = NULL;
 63	int best = -2;
 64
 65	list_for_each_entry(q, &crypto_alg_list, cra_list) {
 66		int exact, fuzzy;
 67
 68		if (crypto_is_moribund(q))
 69			continue;
 70
 71		if ((q->cra_flags ^ type) & mask)
 72			continue;
 73
 74		if (crypto_is_larval(q) &&
 75		    !crypto_is_test_larval((struct crypto_larval *)q) &&
 76		    ((struct crypto_larval *)q)->mask != mask)
 77			continue;
 78
 79		exact = !strcmp(q->cra_driver_name, name);
 80		fuzzy = !strcmp(q->cra_name, name);
 81		if (!exact && !(fuzzy && q->cra_priority > best))
 82			continue;
 83
 84		if (unlikely(!crypto_mod_get(q)))
 85			continue;
 86
 87		best = q->cra_priority;
 88		if (alg)
 89			crypto_mod_put(alg);
 90		alg = q;
 91
 92		if (exact)
 93			break;
 94	}
 95
 96	return alg;
 97}
 98
 99static void crypto_larval_destroy(struct crypto_alg *alg)
100{
101	struct crypto_larval *larval = (void *)alg;
102
103	BUG_ON(!crypto_is_larval(alg));
104	if (larval->adult)
105		crypto_mod_put(larval->adult);
106	kfree(larval);
107}
108
109struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
110{
111	struct crypto_larval *larval;
112
113	larval = kzalloc(sizeof(*larval), GFP_KERNEL);
114	if (!larval)
115		return ERR_PTR(-ENOMEM);
116
117	larval->mask = mask;
118	larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
119	larval->alg.cra_priority = -1;
120	larval->alg.cra_destroy = crypto_larval_destroy;
121
122	strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
123	init_completion(&larval->completion);
124
125	return larval;
126}
127EXPORT_SYMBOL_GPL(crypto_larval_alloc);
128
129static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
130					    u32 mask)
131{
132	struct crypto_alg *alg;
133	struct crypto_larval *larval;
134
135	larval = crypto_larval_alloc(name, type, mask);
136	if (IS_ERR(larval))
137		return ERR_CAST(larval);
138
139	atomic_set(&larval->alg.cra_refcnt, 2);
140
141	down_write(&crypto_alg_sem);
142	alg = __crypto_alg_lookup(name, type, mask);
143	if (!alg) {
144		alg = &larval->alg;
145		list_add(&alg->cra_list, &crypto_alg_list);
146	}
147	up_write(&crypto_alg_sem);
148
149	if (alg != &larval->alg) {
150		kfree(larval);
151		if (crypto_is_larval(alg))
152			alg = crypto_larval_wait(alg);
153	}
154
155	return alg;
156}
157
158void crypto_larval_kill(struct crypto_alg *alg)
159{
160	struct crypto_larval *larval = (void *)alg;
161
162	down_write(&crypto_alg_sem);
163	list_del(&alg->cra_list);
164	up_write(&crypto_alg_sem);
165	complete_all(&larval->completion);
166	crypto_alg_put(alg);
167}
168EXPORT_SYMBOL_GPL(crypto_larval_kill);
169
170static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
171{
172	struct crypto_larval *larval = (void *)alg;
173	long timeout;
174
175	timeout = wait_for_completion_killable_timeout(
176		&larval->completion, 60 * HZ);
177
178	alg = larval->adult;
179	if (timeout < 0)
180		alg = ERR_PTR(-EINTR);
181	else if (!timeout)
182		alg = ERR_PTR(-ETIMEDOUT);
183	else if (!alg)
184		alg = ERR_PTR(-ENOENT);
 
 
185	else if (crypto_is_test_larval(larval) &&
186		 !(alg->cra_flags & CRYPTO_ALG_TESTED))
187		alg = ERR_PTR(-EAGAIN);
188	else if (!crypto_mod_get(alg))
189		alg = ERR_PTR(-EAGAIN);
190	crypto_mod_put(&larval->alg);
191
192	return alg;
193}
194
195struct crypto_alg *crypto_alg_lookup(const char *name, u32 type, u32 mask)
 
196{
197	struct crypto_alg *alg;
 
 
 
 
198
199	down_read(&crypto_alg_sem);
200	alg = __crypto_alg_lookup(name, type, mask);
 
 
 
 
 
 
 
 
201	up_read(&crypto_alg_sem);
202
203	return alg;
204}
205EXPORT_SYMBOL_GPL(crypto_alg_lookup);
206
207struct crypto_alg *crypto_larval_lookup(const char *name, u32 type, u32 mask)
 
208{
209	struct crypto_alg *alg;
210
211	if (!name)
212		return ERR_PTR(-ENOENT);
213
 
214	mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
215	type &= mask;
216
217	alg = crypto_alg_lookup(name, type, mask);
218	if (!alg) {
219		request_module("crypto-%s", name);
220
221		if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
222		      CRYPTO_ALG_NEED_FALLBACK))
223			request_module("crypto-%s-all", name);
224
225		alg = crypto_alg_lookup(name, type, mask);
226	}
227
228	if (alg)
229		return crypto_is_larval(alg) ? crypto_larval_wait(alg) : alg;
 
 
230
231	return crypto_larval_add(name, type, mask);
232}
233EXPORT_SYMBOL_GPL(crypto_larval_lookup);
234
235int crypto_probing_notify(unsigned long val, void *v)
236{
237	int ok;
238
239	ok = blocking_notifier_call_chain(&crypto_chain, val, v);
240	if (ok == NOTIFY_DONE) {
241		request_module("cryptomgr");
242		ok = blocking_notifier_call_chain(&crypto_chain, val, v);
243	}
244
245	return ok;
246}
247EXPORT_SYMBOL_GPL(crypto_probing_notify);
248
249struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
250{
251	struct crypto_alg *alg;
252	struct crypto_alg *larval;
253	int ok;
254
255	if (!((type | mask) & CRYPTO_ALG_TESTED)) {
256		type |= CRYPTO_ALG_TESTED;
257		mask |= CRYPTO_ALG_TESTED;
258	}
259
260	/*
261	 * If the internal flag is set for a cipher, require a caller to
262	 * to invoke the cipher with the internal flag to use that cipher.
263	 * Also, if a caller wants to allocate a cipher that may or may
264	 * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
265	 * !(mask & CRYPTO_ALG_INTERNAL).
266	 */
267	if (!((type | mask) & CRYPTO_ALG_INTERNAL))
268		mask |= CRYPTO_ALG_INTERNAL;
269
270	larval = crypto_larval_lookup(name, type, mask);
271	if (IS_ERR(larval) || !crypto_is_larval(larval))
272		return larval;
273
274	ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
275
276	if (ok == NOTIFY_STOP)
277		alg = crypto_larval_wait(larval);
278	else {
279		crypto_mod_put(larval);
280		alg = ERR_PTR(-ENOENT);
281	}
282	crypto_larval_kill(larval);
283	return alg;
284}
285EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
286
287static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
288{
289	const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
290
291	if (type_obj)
292		return type_obj->init(tfm, type, mask);
293
294	switch (crypto_tfm_alg_type(tfm)) {
295	case CRYPTO_ALG_TYPE_CIPHER:
296		return crypto_init_cipher_ops(tfm);
297
298	case CRYPTO_ALG_TYPE_COMPRESS:
299		return crypto_init_compress_ops(tfm);
300
301	default:
302		break;
303	}
304
305	BUG();
306	return -EINVAL;
307}
308
309static void crypto_exit_ops(struct crypto_tfm *tfm)
310{
311	const struct crypto_type *type = tfm->__crt_alg->cra_type;
312
313	if (type) {
314		if (tfm->exit)
315			tfm->exit(tfm);
316		return;
317	}
318
319	switch (crypto_tfm_alg_type(tfm)) {
320	case CRYPTO_ALG_TYPE_CIPHER:
321		crypto_exit_cipher_ops(tfm);
322		break;
323
324	case CRYPTO_ALG_TYPE_COMPRESS:
325		crypto_exit_compress_ops(tfm);
326		break;
327
328	default:
329		BUG();
330	}
331}
332
333static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
334{
335	const struct crypto_type *type_obj = alg->cra_type;
336	unsigned int len;
337
338	len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
339	if (type_obj)
340		return len + type_obj->ctxsize(alg, type, mask);
341
342	switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
343	default:
344		BUG();
345
346	case CRYPTO_ALG_TYPE_CIPHER:
347		len += crypto_cipher_ctxsize(alg);
348		break;
349
350	case CRYPTO_ALG_TYPE_COMPRESS:
351		len += crypto_compress_ctxsize(alg);
352		break;
353	}
354
355	return len;
356}
357
358void crypto_shoot_alg(struct crypto_alg *alg)
359{
360	down_write(&crypto_alg_sem);
361	alg->cra_flags |= CRYPTO_ALG_DYING;
362	up_write(&crypto_alg_sem);
363}
364EXPORT_SYMBOL_GPL(crypto_shoot_alg);
365
366struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
367				      u32 mask)
368{
369	struct crypto_tfm *tfm = NULL;
370	unsigned int tfm_size;
371	int err = -ENOMEM;
372
373	tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
374	tfm = kzalloc(tfm_size, GFP_KERNEL);
375	if (tfm == NULL)
376		goto out_err;
377
378	tfm->__crt_alg = alg;
379
380	err = crypto_init_ops(tfm, type, mask);
381	if (err)
382		goto out_free_tfm;
383
384	if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
385		goto cra_init_failed;
386
387	goto out;
388
389cra_init_failed:
390	crypto_exit_ops(tfm);
391out_free_tfm:
392	if (err == -EAGAIN)
393		crypto_shoot_alg(alg);
394	kfree(tfm);
395out_err:
396	tfm = ERR_PTR(err);
397out:
398	return tfm;
399}
400EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
401
402/*
403 *	crypto_alloc_base - Locate algorithm and allocate transform
404 *	@alg_name: Name of algorithm
405 *	@type: Type of algorithm
406 *	@mask: Mask for type comparison
407 *
408 *	This function should not be used by new algorithm types.
409 *	Please use crypto_alloc_tfm instead.
410 *
411 *	crypto_alloc_base() will first attempt to locate an already loaded
412 *	algorithm.  If that fails and the kernel supports dynamically loadable
413 *	modules, it will then attempt to load a module of the same name or
414 *	alias.  If that fails it will send a query to any loaded crypto manager
415 *	to construct an algorithm on the fly.  A refcount is grabbed on the
416 *	algorithm which is then associated with the new transform.
417 *
418 *	The returned transform is of a non-determinate type.  Most people
419 *	should use one of the more specific allocation functions such as
420 *	crypto_alloc_blkcipher.
421 *
422 *	In case of error the return value is an error pointer.
423 */
424struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
425{
426	struct crypto_tfm *tfm;
427	int err;
428
429	for (;;) {
430		struct crypto_alg *alg;
431
432		alg = crypto_alg_mod_lookup(alg_name, type, mask);
433		if (IS_ERR(alg)) {
434			err = PTR_ERR(alg);
435			goto err;
436		}
437
438		tfm = __crypto_alloc_tfm(alg, type, mask);
439		if (!IS_ERR(tfm))
440			return tfm;
441
442		crypto_mod_put(alg);
443		err = PTR_ERR(tfm);
444
445err:
446		if (err != -EAGAIN)
447			break;
448		if (fatal_signal_pending(current)) {
449			err = -EINTR;
450			break;
451		}
452	}
453
454	return ERR_PTR(err);
455}
456EXPORT_SYMBOL_GPL(crypto_alloc_base);
457
458void *crypto_create_tfm(struct crypto_alg *alg,
459			const struct crypto_type *frontend)
 
460{
461	char *mem;
462	struct crypto_tfm *tfm = NULL;
463	unsigned int tfmsize;
464	unsigned int total;
465	int err = -ENOMEM;
466
467	tfmsize = frontend->tfmsize;
468	total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
469
470	mem = kzalloc(total, GFP_KERNEL);
471	if (mem == NULL)
472		goto out_err;
473
474	tfm = (struct crypto_tfm *)(mem + tfmsize);
475	tfm->__crt_alg = alg;
 
476
477	err = frontend->init_tfm(tfm);
478	if (err)
479		goto out_free_tfm;
480
481	if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
482		goto cra_init_failed;
483
484	goto out;
485
486cra_init_failed:
487	crypto_exit_ops(tfm);
488out_free_tfm:
489	if (err == -EAGAIN)
490		crypto_shoot_alg(alg);
491	kfree(mem);
492out_err:
493	mem = ERR_PTR(err);
494out:
495	return mem;
496}
497EXPORT_SYMBOL_GPL(crypto_create_tfm);
498
499struct crypto_alg *crypto_find_alg(const char *alg_name,
500				   const struct crypto_type *frontend,
501				   u32 type, u32 mask)
502{
503	struct crypto_alg *(*lookup)(const char *name, u32 type, u32 mask) =
504		crypto_alg_mod_lookup;
505
506	if (frontend) {
507		type &= frontend->maskclear;
508		mask &= frontend->maskclear;
509		type |= frontend->type;
510		mask |= frontend->maskset;
511
512		if (frontend->lookup)
513			lookup = frontend->lookup;
514	}
515
516	return lookup(alg_name, type, mask);
517}
518EXPORT_SYMBOL_GPL(crypto_find_alg);
519
520/*
521 *	crypto_alloc_tfm - Locate algorithm and allocate transform
522 *	@alg_name: Name of algorithm
523 *	@frontend: Frontend algorithm type
524 *	@type: Type of algorithm
525 *	@mask: Mask for type comparison
 
 
526 *
527 *	crypto_alloc_tfm() will first attempt to locate an already loaded
528 *	algorithm.  If that fails and the kernel supports dynamically loadable
529 *	modules, it will then attempt to load a module of the same name or
530 *	alias.  If that fails it will send a query to any loaded crypto manager
531 *	to construct an algorithm on the fly.  A refcount is grabbed on the
532 *	algorithm which is then associated with the new transform.
533 *
534 *	The returned transform is of a non-determinate type.  Most people
535 *	should use one of the more specific allocation functions such as
536 *	crypto_alloc_blkcipher.
537 *
538 *	In case of error the return value is an error pointer.
539 */
540void *crypto_alloc_tfm(const char *alg_name,
541		       const struct crypto_type *frontend, u32 type, u32 mask)
 
 
542{
543	void *tfm;
544	int err;
545
546	for (;;) {
547		struct crypto_alg *alg;
548
549		alg = crypto_find_alg(alg_name, frontend, type, mask);
550		if (IS_ERR(alg)) {
551			err = PTR_ERR(alg);
552			goto err;
553		}
554
555		tfm = crypto_create_tfm(alg, frontend);
556		if (!IS_ERR(tfm))
557			return tfm;
558
559		crypto_mod_put(alg);
560		err = PTR_ERR(tfm);
561
562err:
563		if (err != -EAGAIN)
564			break;
565		if (fatal_signal_pending(current)) {
566			err = -EINTR;
567			break;
568		}
569	}
570
571	return ERR_PTR(err);
572}
573EXPORT_SYMBOL_GPL(crypto_alloc_tfm);
574
575/*
576 *	crypto_destroy_tfm - Free crypto transform
577 *	@mem: Start of tfm slab
578 *	@tfm: Transform to free
579 *
580 *	This function frees up the transform and any associated resources,
581 *	then drops the refcount on the associated algorithm.
582 */
583void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
584{
585	struct crypto_alg *alg;
586
587	if (unlikely(!mem))
588		return;
589
590	alg = tfm->__crt_alg;
591
592	if (!tfm->exit && alg->cra_exit)
593		alg->cra_exit(tfm);
594	crypto_exit_ops(tfm);
595	crypto_mod_put(alg);
596	kzfree(mem);
597}
598EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
599
600int crypto_has_alg(const char *name, u32 type, u32 mask)
601{
602	int ret = 0;
603	struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
604
605	if (!IS_ERR(alg)) {
606		crypto_mod_put(alg);
607		ret = 1;
608	}
609
610	return ret;
611}
612EXPORT_SYMBOL_GPL(crypto_has_alg);
613
 
 
 
 
 
 
 
 
 
 
 
 
614MODULE_DESCRIPTION("Cryptographic core API");
615MODULE_LICENSE("GPL");
v5.14.15
  1// SPDX-License-Identifier: GPL-2.0-or-later
  2/*
  3 * Scatterlist Cryptographic API.
  4 *
  5 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
  6 * Copyright (c) 2002 David S. Miller (davem@redhat.com)
  7 * Copyright (c) 2005 Herbert Xu <herbert@gondor.apana.org.au>
  8 *
  9 * Portions derived from Cryptoapi, by Alexander Kjeldaas <astor@fast.no>
 10 * and Nettle, by Niels Möller.
 
 
 
 
 
 
 11 */
 12
 13#include <linux/err.h>
 14#include <linux/errno.h>
 15#include <linux/kernel.h>
 16#include <linux/kmod.h>
 17#include <linux/module.h>
 18#include <linux/param.h>
 19#include <linux/sched/signal.h>
 20#include <linux/slab.h>
 21#include <linux/string.h>
 22#include <linux/completion.h>
 23#include "internal.h"
 24
 25LIST_HEAD(crypto_alg_list);
 26EXPORT_SYMBOL_GPL(crypto_alg_list);
 27DECLARE_RWSEM(crypto_alg_sem);
 28EXPORT_SYMBOL_GPL(crypto_alg_sem);
 29
 30BLOCKING_NOTIFIER_HEAD(crypto_chain);
 31EXPORT_SYMBOL_GPL(crypto_chain);
 32
 33static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg);
 34
 35struct crypto_alg *crypto_mod_get(struct crypto_alg *alg)
 36{
 37	return try_module_get(alg->cra_module) ? crypto_alg_get(alg) : NULL;
 38}
 39EXPORT_SYMBOL_GPL(crypto_mod_get);
 40
 41void crypto_mod_put(struct crypto_alg *alg)
 42{
 43	struct module *module = alg->cra_module;
 44
 45	crypto_alg_put(alg);
 46	module_put(module);
 47}
 48EXPORT_SYMBOL_GPL(crypto_mod_put);
 49
 50static inline int crypto_is_test_larval(struct crypto_larval *larval)
 51{
 52	return larval->alg.cra_driver_name[0];
 53}
 54
 55static struct crypto_alg *__crypto_alg_lookup(const char *name, u32 type,
 56					      u32 mask)
 57{
 58	struct crypto_alg *q, *alg = NULL;
 59	int best = -2;
 60
 61	list_for_each_entry(q, &crypto_alg_list, cra_list) {
 62		int exact, fuzzy;
 63
 64		if (crypto_is_moribund(q))
 65			continue;
 66
 67		if ((q->cra_flags ^ type) & mask)
 68			continue;
 69
 70		if (crypto_is_larval(q) &&
 71		    !crypto_is_test_larval((struct crypto_larval *)q) &&
 72		    ((struct crypto_larval *)q)->mask != mask)
 73			continue;
 74
 75		exact = !strcmp(q->cra_driver_name, name);
 76		fuzzy = !strcmp(q->cra_name, name);
 77		if (!exact && !(fuzzy && q->cra_priority > best))
 78			continue;
 79
 80		if (unlikely(!crypto_mod_get(q)))
 81			continue;
 82
 83		best = q->cra_priority;
 84		if (alg)
 85			crypto_mod_put(alg);
 86		alg = q;
 87
 88		if (exact)
 89			break;
 90	}
 91
 92	return alg;
 93}
 94
 95static void crypto_larval_destroy(struct crypto_alg *alg)
 96{
 97	struct crypto_larval *larval = (void *)alg;
 98
 99	BUG_ON(!crypto_is_larval(alg));
100	if (!IS_ERR_OR_NULL(larval->adult))
101		crypto_mod_put(larval->adult);
102	kfree(larval);
103}
104
105struct crypto_larval *crypto_larval_alloc(const char *name, u32 type, u32 mask)
106{
107	struct crypto_larval *larval;
108
109	larval = kzalloc(sizeof(*larval), GFP_KERNEL);
110	if (!larval)
111		return ERR_PTR(-ENOMEM);
112
113	larval->mask = mask;
114	larval->alg.cra_flags = CRYPTO_ALG_LARVAL | type;
115	larval->alg.cra_priority = -1;
116	larval->alg.cra_destroy = crypto_larval_destroy;
117
118	strlcpy(larval->alg.cra_name, name, CRYPTO_MAX_ALG_NAME);
119	init_completion(&larval->completion);
120
121	return larval;
122}
123EXPORT_SYMBOL_GPL(crypto_larval_alloc);
124
125static struct crypto_alg *crypto_larval_add(const char *name, u32 type,
126					    u32 mask)
127{
128	struct crypto_alg *alg;
129	struct crypto_larval *larval;
130
131	larval = crypto_larval_alloc(name, type, mask);
132	if (IS_ERR(larval))
133		return ERR_CAST(larval);
134
135	refcount_set(&larval->alg.cra_refcnt, 2);
136
137	down_write(&crypto_alg_sem);
138	alg = __crypto_alg_lookup(name, type, mask);
139	if (!alg) {
140		alg = &larval->alg;
141		list_add(&alg->cra_list, &crypto_alg_list);
142	}
143	up_write(&crypto_alg_sem);
144
145	if (alg != &larval->alg) {
146		kfree(larval);
147		if (crypto_is_larval(alg))
148			alg = crypto_larval_wait(alg);
149	}
150
151	return alg;
152}
153
154void crypto_larval_kill(struct crypto_alg *alg)
155{
156	struct crypto_larval *larval = (void *)alg;
157
158	down_write(&crypto_alg_sem);
159	list_del(&alg->cra_list);
160	up_write(&crypto_alg_sem);
161	complete_all(&larval->completion);
162	crypto_alg_put(alg);
163}
164EXPORT_SYMBOL_GPL(crypto_larval_kill);
165
166static struct crypto_alg *crypto_larval_wait(struct crypto_alg *alg)
167{
168	struct crypto_larval *larval = (void *)alg;
169	long timeout;
170
171	timeout = wait_for_completion_killable_timeout(
172		&larval->completion, 60 * HZ);
173
174	alg = larval->adult;
175	if (timeout < 0)
176		alg = ERR_PTR(-EINTR);
177	else if (!timeout)
178		alg = ERR_PTR(-ETIMEDOUT);
179	else if (!alg)
180		alg = ERR_PTR(-ENOENT);
181	else if (IS_ERR(alg))
182		;
183	else if (crypto_is_test_larval(larval) &&
184		 !(alg->cra_flags & CRYPTO_ALG_TESTED))
185		alg = ERR_PTR(-EAGAIN);
186	else if (!crypto_mod_get(alg))
187		alg = ERR_PTR(-EAGAIN);
188	crypto_mod_put(&larval->alg);
189
190	return alg;
191}
192
193static struct crypto_alg *crypto_alg_lookup(const char *name, u32 type,
194					    u32 mask)
195{
196	struct crypto_alg *alg;
197	u32 test = 0;
198
199	if (!((type | mask) & CRYPTO_ALG_TESTED))
200		test |= CRYPTO_ALG_TESTED;
201
202	down_read(&crypto_alg_sem);
203	alg = __crypto_alg_lookup(name, type | test, mask | test);
204	if (!alg && test) {
205		alg = __crypto_alg_lookup(name, type, mask);
206		if (alg && !crypto_is_larval(alg)) {
207			/* Test failed */
208			crypto_mod_put(alg);
209			alg = ERR_PTR(-ELIBBAD);
210		}
211	}
212	up_read(&crypto_alg_sem);
213
214	return alg;
215}
 
216
217static struct crypto_alg *crypto_larval_lookup(const char *name, u32 type,
218					       u32 mask)
219{
220	struct crypto_alg *alg;
221
222	if (!name)
223		return ERR_PTR(-ENOENT);
224
225	type &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
226	mask &= ~(CRYPTO_ALG_LARVAL | CRYPTO_ALG_DEAD);
 
227
228	alg = crypto_alg_lookup(name, type, mask);
229	if (!alg && !(mask & CRYPTO_NOLOAD)) {
230		request_module("crypto-%s", name);
231
232		if (!((type ^ CRYPTO_ALG_NEED_FALLBACK) & mask &
233		      CRYPTO_ALG_NEED_FALLBACK))
234			request_module("crypto-%s-all", name);
235
236		alg = crypto_alg_lookup(name, type, mask);
237	}
238
239	if (!IS_ERR_OR_NULL(alg) && crypto_is_larval(alg))
240		alg = crypto_larval_wait(alg);
241	else if (!alg)
242		alg = crypto_larval_add(name, type, mask);
243
244	return alg;
245}
 
246
247int crypto_probing_notify(unsigned long val, void *v)
248{
249	int ok;
250
251	ok = blocking_notifier_call_chain(&crypto_chain, val, v);
252	if (ok == NOTIFY_DONE) {
253		request_module("cryptomgr");
254		ok = blocking_notifier_call_chain(&crypto_chain, val, v);
255	}
256
257	return ok;
258}
259EXPORT_SYMBOL_GPL(crypto_probing_notify);
260
261struct crypto_alg *crypto_alg_mod_lookup(const char *name, u32 type, u32 mask)
262{
263	struct crypto_alg *alg;
264	struct crypto_alg *larval;
265	int ok;
266
 
 
 
 
 
267	/*
268	 * If the internal flag is set for a cipher, require a caller to
269	 * to invoke the cipher with the internal flag to use that cipher.
270	 * Also, if a caller wants to allocate a cipher that may or may
271	 * not be an internal cipher, use type | CRYPTO_ALG_INTERNAL and
272	 * !(mask & CRYPTO_ALG_INTERNAL).
273	 */
274	if (!((type | mask) & CRYPTO_ALG_INTERNAL))
275		mask |= CRYPTO_ALG_INTERNAL;
276
277	larval = crypto_larval_lookup(name, type, mask);
278	if (IS_ERR(larval) || !crypto_is_larval(larval))
279		return larval;
280
281	ok = crypto_probing_notify(CRYPTO_MSG_ALG_REQUEST, larval);
282
283	if (ok == NOTIFY_STOP)
284		alg = crypto_larval_wait(larval);
285	else {
286		crypto_mod_put(larval);
287		alg = ERR_PTR(-ENOENT);
288	}
289	crypto_larval_kill(larval);
290	return alg;
291}
292EXPORT_SYMBOL_GPL(crypto_alg_mod_lookup);
293
294static int crypto_init_ops(struct crypto_tfm *tfm, u32 type, u32 mask)
295{
296	const struct crypto_type *type_obj = tfm->__crt_alg->cra_type;
297
298	if (type_obj)
299		return type_obj->init(tfm, type, mask);
300	return 0;
 
 
 
 
 
 
 
 
 
 
 
 
 
301}
302
303static void crypto_exit_ops(struct crypto_tfm *tfm)
304{
305	const struct crypto_type *type = tfm->__crt_alg->cra_type;
306
307	if (type && tfm->exit)
308		tfm->exit(tfm);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
309}
310
311static unsigned int crypto_ctxsize(struct crypto_alg *alg, u32 type, u32 mask)
312{
313	const struct crypto_type *type_obj = alg->cra_type;
314	unsigned int len;
315
316	len = alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1);
317	if (type_obj)
318		return len + type_obj->ctxsize(alg, type, mask);
319
320	switch (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) {
321	default:
322		BUG();
323
324	case CRYPTO_ALG_TYPE_CIPHER:
325		len += crypto_cipher_ctxsize(alg);
326		break;
327
328	case CRYPTO_ALG_TYPE_COMPRESS:
329		len += crypto_compress_ctxsize(alg);
330		break;
331	}
332
333	return len;
334}
335
336void crypto_shoot_alg(struct crypto_alg *alg)
337{
338	down_write(&crypto_alg_sem);
339	alg->cra_flags |= CRYPTO_ALG_DYING;
340	up_write(&crypto_alg_sem);
341}
342EXPORT_SYMBOL_GPL(crypto_shoot_alg);
343
344struct crypto_tfm *__crypto_alloc_tfm(struct crypto_alg *alg, u32 type,
345				      u32 mask)
346{
347	struct crypto_tfm *tfm = NULL;
348	unsigned int tfm_size;
349	int err = -ENOMEM;
350
351	tfm_size = sizeof(*tfm) + crypto_ctxsize(alg, type, mask);
352	tfm = kzalloc(tfm_size, GFP_KERNEL);
353	if (tfm == NULL)
354		goto out_err;
355
356	tfm->__crt_alg = alg;
357
358	err = crypto_init_ops(tfm, type, mask);
359	if (err)
360		goto out_free_tfm;
361
362	if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
363		goto cra_init_failed;
364
365	goto out;
366
367cra_init_failed:
368	crypto_exit_ops(tfm);
369out_free_tfm:
370	if (err == -EAGAIN)
371		crypto_shoot_alg(alg);
372	kfree(tfm);
373out_err:
374	tfm = ERR_PTR(err);
375out:
376	return tfm;
377}
378EXPORT_SYMBOL_GPL(__crypto_alloc_tfm);
379
380/*
381 *	crypto_alloc_base - Locate algorithm and allocate transform
382 *	@alg_name: Name of algorithm
383 *	@type: Type of algorithm
384 *	@mask: Mask for type comparison
385 *
386 *	This function should not be used by new algorithm types.
387 *	Please use crypto_alloc_tfm instead.
388 *
389 *	crypto_alloc_base() will first attempt to locate an already loaded
390 *	algorithm.  If that fails and the kernel supports dynamically loadable
391 *	modules, it will then attempt to load a module of the same name or
392 *	alias.  If that fails it will send a query to any loaded crypto manager
393 *	to construct an algorithm on the fly.  A refcount is grabbed on the
394 *	algorithm which is then associated with the new transform.
395 *
396 *	The returned transform is of a non-determinate type.  Most people
397 *	should use one of the more specific allocation functions such as
398 *	crypto_alloc_skcipher().
399 *
400 *	In case of error the return value is an error pointer.
401 */
402struct crypto_tfm *crypto_alloc_base(const char *alg_name, u32 type, u32 mask)
403{
404	struct crypto_tfm *tfm;
405	int err;
406
407	for (;;) {
408		struct crypto_alg *alg;
409
410		alg = crypto_alg_mod_lookup(alg_name, type, mask);
411		if (IS_ERR(alg)) {
412			err = PTR_ERR(alg);
413			goto err;
414		}
415
416		tfm = __crypto_alloc_tfm(alg, type, mask);
417		if (!IS_ERR(tfm))
418			return tfm;
419
420		crypto_mod_put(alg);
421		err = PTR_ERR(tfm);
422
423err:
424		if (err != -EAGAIN)
425			break;
426		if (fatal_signal_pending(current)) {
427			err = -EINTR;
428			break;
429		}
430	}
431
432	return ERR_PTR(err);
433}
434EXPORT_SYMBOL_GPL(crypto_alloc_base);
435
436void *crypto_create_tfm_node(struct crypto_alg *alg,
437			const struct crypto_type *frontend,
438			int node)
439{
440	char *mem;
441	struct crypto_tfm *tfm = NULL;
442	unsigned int tfmsize;
443	unsigned int total;
444	int err = -ENOMEM;
445
446	tfmsize = frontend->tfmsize;
447	total = tfmsize + sizeof(*tfm) + frontend->extsize(alg);
448
449	mem = kzalloc_node(total, GFP_KERNEL, node);
450	if (mem == NULL)
451		goto out_err;
452
453	tfm = (struct crypto_tfm *)(mem + tfmsize);
454	tfm->__crt_alg = alg;
455	tfm->node = node;
456
457	err = frontend->init_tfm(tfm);
458	if (err)
459		goto out_free_tfm;
460
461	if (!tfm->exit && alg->cra_init && (err = alg->cra_init(tfm)))
462		goto cra_init_failed;
463
464	goto out;
465
466cra_init_failed:
467	crypto_exit_ops(tfm);
468out_free_tfm:
469	if (err == -EAGAIN)
470		crypto_shoot_alg(alg);
471	kfree(mem);
472out_err:
473	mem = ERR_PTR(err);
474out:
475	return mem;
476}
477EXPORT_SYMBOL_GPL(crypto_create_tfm_node);
478
479struct crypto_alg *crypto_find_alg(const char *alg_name,
480				   const struct crypto_type *frontend,
481				   u32 type, u32 mask)
482{
 
 
 
483	if (frontend) {
484		type &= frontend->maskclear;
485		mask &= frontend->maskclear;
486		type |= frontend->type;
487		mask |= frontend->maskset;
 
 
 
488	}
489
490	return crypto_alg_mod_lookup(alg_name, type, mask);
491}
492EXPORT_SYMBOL_GPL(crypto_find_alg);
493
494/*
495 *	crypto_alloc_tfm_node - Locate algorithm and allocate transform
496 *	@alg_name: Name of algorithm
497 *	@frontend: Frontend algorithm type
498 *	@type: Type of algorithm
499 *	@mask: Mask for type comparison
500 *	@node: NUMA node in which users desire to put requests, if node is
501 *		NUMA_NO_NODE, it means users have no special requirement.
502 *
503 *	crypto_alloc_tfm() will first attempt to locate an already loaded
504 *	algorithm.  If that fails and the kernel supports dynamically loadable
505 *	modules, it will then attempt to load a module of the same name or
506 *	alias.  If that fails it will send a query to any loaded crypto manager
507 *	to construct an algorithm on the fly.  A refcount is grabbed on the
508 *	algorithm which is then associated with the new transform.
509 *
510 *	The returned transform is of a non-determinate type.  Most people
511 *	should use one of the more specific allocation functions such as
512 *	crypto_alloc_skcipher().
513 *
514 *	In case of error the return value is an error pointer.
515 */
516
517void *crypto_alloc_tfm_node(const char *alg_name,
518		       const struct crypto_type *frontend, u32 type, u32 mask,
519		       int node)
520{
521	void *tfm;
522	int err;
523
524	for (;;) {
525		struct crypto_alg *alg;
526
527		alg = crypto_find_alg(alg_name, frontend, type, mask);
528		if (IS_ERR(alg)) {
529			err = PTR_ERR(alg);
530			goto err;
531		}
532
533		tfm = crypto_create_tfm_node(alg, frontend, node);
534		if (!IS_ERR(tfm))
535			return tfm;
536
537		crypto_mod_put(alg);
538		err = PTR_ERR(tfm);
539
540err:
541		if (err != -EAGAIN)
542			break;
543		if (fatal_signal_pending(current)) {
544			err = -EINTR;
545			break;
546		}
547	}
548
549	return ERR_PTR(err);
550}
551EXPORT_SYMBOL_GPL(crypto_alloc_tfm_node);
552
553/*
554 *	crypto_destroy_tfm - Free crypto transform
555 *	@mem: Start of tfm slab
556 *	@tfm: Transform to free
557 *
558 *	This function frees up the transform and any associated resources,
559 *	then drops the refcount on the associated algorithm.
560 */
561void crypto_destroy_tfm(void *mem, struct crypto_tfm *tfm)
562{
563	struct crypto_alg *alg;
564
565	if (IS_ERR_OR_NULL(mem))
566		return;
567
568	alg = tfm->__crt_alg;
569
570	if (!tfm->exit && alg->cra_exit)
571		alg->cra_exit(tfm);
572	crypto_exit_ops(tfm);
573	crypto_mod_put(alg);
574	kfree_sensitive(mem);
575}
576EXPORT_SYMBOL_GPL(crypto_destroy_tfm);
577
578int crypto_has_alg(const char *name, u32 type, u32 mask)
579{
580	int ret = 0;
581	struct crypto_alg *alg = crypto_alg_mod_lookup(name, type, mask);
582
583	if (!IS_ERR(alg)) {
584		crypto_mod_put(alg);
585		ret = 1;
586	}
587
588	return ret;
589}
590EXPORT_SYMBOL_GPL(crypto_has_alg);
591
592void crypto_req_done(struct crypto_async_request *req, int err)
593{
594	struct crypto_wait *wait = req->data;
595
596	if (err == -EINPROGRESS)
597		return;
598
599	wait->err = err;
600	complete(&wait->completion);
601}
602EXPORT_SYMBOL_GPL(crypto_req_done);
603
604MODULE_DESCRIPTION("Cryptographic core API");
605MODULE_LICENSE("GPL");
606MODULE_SOFTDEP("pre: cryptomgr");