Linux Audio

Check our new training course

Loading...
v5.9
   1// SPDX-License-Identifier: GPL-2.0-or-later
   2/*
   3 * Cryptographic API for algorithms (i.e., low-level API).
   4 *
   5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
   6 */
   7
   8#include <crypto/algapi.h>
 
   9#include <linux/err.h>
  10#include <linux/errno.h>
  11#include <linux/fips.h>
  12#include <linux/init.h>
  13#include <linux/kernel.h>
  14#include <linux/list.h>
  15#include <linux/module.h>
  16#include <linux/rtnetlink.h>
  17#include <linux/slab.h>
  18#include <linux/string.h>
  19
  20#include "internal.h"
  21
  22static LIST_HEAD(crypto_template_list);
  23
 
 
 
 
 
  24static inline void crypto_check_module_sig(struct module *mod)
  25{
  26	if (fips_enabled && mod && !module_sig_ok(mod))
  27		panic("Module %s signature verification failed in FIPS mode\n",
  28		      module_name(mod));
  29}
  30
  31static int crypto_check_alg(struct crypto_alg *alg)
  32{
  33	crypto_check_module_sig(alg->cra_module);
  34
  35	if (!alg->cra_name[0] || !alg->cra_driver_name[0])
  36		return -EINVAL;
  37
  38	if (alg->cra_alignmask & (alg->cra_alignmask + 1))
  39		return -EINVAL;
  40
  41	/* General maximums for all algs. */
  42	if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK)
  43		return -EINVAL;
  44
  45	if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE)
  46		return -EINVAL;
  47
  48	/* Lower maximums for specific alg types. */
  49	if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
  50			       CRYPTO_ALG_TYPE_CIPHER) {
  51		if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK)
  52			return -EINVAL;
  53
  54		if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE)
  55			return -EINVAL;
  56	}
  57
  58	if (alg->cra_priority < 0)
  59		return -EINVAL;
  60
  61	refcount_set(&alg->cra_refcnt, 1);
  62
  63	return 0;
  64}
  65
  66static void crypto_free_instance(struct crypto_instance *inst)
  67{
  68	inst->alg.cra_type->free(inst);
  69}
  70
  71static void crypto_destroy_instance(struct crypto_alg *alg)
  72{
  73	struct crypto_instance *inst = (void *)alg;
  74	struct crypto_template *tmpl = inst->tmpl;
  75
  76	crypto_free_instance(inst);
  77	crypto_tmpl_put(tmpl);
  78}
  79
  80/*
  81 * This function adds a spawn to the list secondary_spawns which
  82 * will be used at the end of crypto_remove_spawns to unregister
  83 * instances, unless the spawn happens to be one that is depended
  84 * on by the new algorithm (nalg in crypto_remove_spawns).
  85 *
  86 * This function is also responsible for resurrecting any algorithms
  87 * in the dependency chain of nalg by unsetting n->dead.
  88 */
  89static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
  90					    struct list_head *stack,
  91					    struct list_head *top,
  92					    struct list_head *secondary_spawns)
  93{
  94	struct crypto_spawn *spawn, *n;
  95
  96	spawn = list_first_entry_or_null(stack, struct crypto_spawn, list);
  97	if (!spawn)
  98		return NULL;
  99
 100	n = list_prev_entry(spawn, list);
 101	list_move(&spawn->list, secondary_spawns);
 102
 103	if (list_is_last(&n->list, stack))
 104		return top;
 105
 106	n = list_next_entry(n, list);
 107	if (!spawn->dead)
 108		n->dead = false;
 109
 110	return &n->inst->alg.cra_users;
 111}
 112
 113static void crypto_remove_instance(struct crypto_instance *inst,
 114				   struct list_head *list)
 115{
 116	struct crypto_template *tmpl = inst->tmpl;
 117
 118	if (crypto_is_dead(&inst->alg))
 119		return;
 120
 121	inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
 122
 123	if (!tmpl || !crypto_tmpl_get(tmpl))
 124		return;
 125
 126	list_move(&inst->alg.cra_list, list);
 127	hlist_del(&inst->list);
 128	inst->alg.cra_destroy = crypto_destroy_instance;
 129
 130	BUG_ON(!list_empty(&inst->alg.cra_users));
 131}
 132
 133/*
 134 * Given an algorithm alg, remove all algorithms that depend on it
 135 * through spawns.  If nalg is not null, then exempt any algorithms
 136 * that is depended on by nalg.  This is useful when nalg itself
 137 * depends on alg.
 138 */
 139void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
 140			  struct crypto_alg *nalg)
 141{
 142	u32 new_type = (nalg ?: alg)->cra_flags;
 143	struct crypto_spawn *spawn, *n;
 144	LIST_HEAD(secondary_spawns);
 145	struct list_head *spawns;
 146	LIST_HEAD(stack);
 147	LIST_HEAD(top);
 148
 149	spawns = &alg->cra_users;
 150	list_for_each_entry_safe(spawn, n, spawns, list) {
 151		if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
 152			continue;
 153
 154		list_move(&spawn->list, &top);
 155	}
 156
 157	/*
 158	 * Perform a depth-first walk starting from alg through
 159	 * the cra_users tree.  The list stack records the path
 160	 * from alg to the current spawn.
 161	 */
 162	spawns = &top;
 163	do {
 164		while (!list_empty(spawns)) {
 165			struct crypto_instance *inst;
 166
 167			spawn = list_first_entry(spawns, struct crypto_spawn,
 168						 list);
 169			inst = spawn->inst;
 170
 171			list_move(&spawn->list, &stack);
 172			spawn->dead = !spawn->registered || &inst->alg != nalg;
 173
 174			if (!spawn->registered)
 175				break;
 176
 177			BUG_ON(&inst->alg == alg);
 178
 179			if (&inst->alg == nalg)
 180				break;
 181
 182			spawns = &inst->alg.cra_users;
 183
 184			/*
 185			 * Even if spawn->registered is true, the
 186			 * instance itself may still be unregistered.
 187			 * This is because it may have failed during
 188			 * registration.  Therefore we still need to
 189			 * make the following test.
 190			 *
 191			 * We may encounter an unregistered instance here, since
 192			 * an instance's spawns are set up prior to the instance
 193			 * being registered.  An unregistered instance will have
 194			 * NULL ->cra_users.next, since ->cra_users isn't
 195			 * properly initialized until registration.  But an
 196			 * unregistered instance cannot have any users, so treat
 197			 * it the same as ->cra_users being empty.
 198			 */
 199			if (spawns->next == NULL)
 200				break;
 201		}
 202	} while ((spawns = crypto_more_spawns(alg, &stack, &top,
 203					      &secondary_spawns)));
 204
 205	/*
 206	 * Remove all instances that are marked as dead.  Also
 207	 * complete the resurrection of the others by moving them
 208	 * back to the cra_users list.
 209	 */
 210	list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
 211		if (!spawn->dead)
 212			list_move(&spawn->list, &spawn->alg->cra_users);
 213		else if (spawn->registered)
 214			crypto_remove_instance(spawn->inst, list);
 215	}
 216}
 217EXPORT_SYMBOL_GPL(crypto_remove_spawns);
 218
 219static struct crypto_larval *__crypto_register_alg(struct crypto_alg *alg)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 220{
 221	struct crypto_alg *q;
 222	struct crypto_larval *larval;
 223	int ret = -EAGAIN;
 224
 225	if (crypto_is_dead(alg))
 226		goto err;
 227
 228	INIT_LIST_HEAD(&alg->cra_users);
 229
 230	/* No cheating! */
 231	alg->cra_flags &= ~CRYPTO_ALG_TESTED;
 232
 233	ret = -EEXIST;
 234
 235	list_for_each_entry(q, &crypto_alg_list, cra_list) {
 236		if (q == alg)
 237			goto err;
 238
 239		if (crypto_is_moribund(q))
 240			continue;
 241
 242		if (crypto_is_larval(q)) {
 243			if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
 244				goto err;
 245			continue;
 246		}
 247
 248		if (!strcmp(q->cra_driver_name, alg->cra_name) ||
 249		    !strcmp(q->cra_name, alg->cra_driver_name))
 250			goto err;
 251	}
 252
 253	larval = crypto_larval_alloc(alg->cra_name,
 254				     alg->cra_flags | CRYPTO_ALG_TESTED, 0);
 255	if (IS_ERR(larval))
 256		goto out;
 257
 258	ret = -ENOENT;
 259	larval->adult = crypto_mod_get(alg);
 260	if (!larval->adult)
 261		goto free_larval;
 262
 263	refcount_set(&larval->alg.cra_refcnt, 1);
 264	memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
 265	       CRYPTO_MAX_ALG_NAME);
 266	larval->alg.cra_priority = alg->cra_priority;
 267
 268	list_add(&alg->cra_list, &crypto_alg_list);
 269	list_add(&larval->alg.cra_list, &crypto_alg_list);
 270
 271	crypto_stats_init(alg);
 272
 
 
 
 
 
 
 
 
 
 
 273out:
 274	return larval;
 275
 276free_larval:
 277	kfree(larval);
 278err:
 279	larval = ERR_PTR(ret);
 280	goto out;
 281}
 282
 283void crypto_alg_tested(const char *name, int err)
 284{
 285	struct crypto_larval *test;
 286	struct crypto_alg *alg;
 287	struct crypto_alg *q;
 288	LIST_HEAD(list);
 289	bool best;
 290
 291	down_write(&crypto_alg_sem);
 292	list_for_each_entry(q, &crypto_alg_list, cra_list) {
 293		if (crypto_is_moribund(q) || !crypto_is_larval(q))
 294			continue;
 295
 296		test = (struct crypto_larval *)q;
 297
 298		if (!strcmp(q->cra_driver_name, name))
 299			goto found;
 300	}
 301
 302	pr_err("alg: Unexpected test result for %s: %d\n", name, err);
 303	goto unlock;
 304
 305found:
 306	q->cra_flags |= CRYPTO_ALG_DEAD;
 307	alg = test->adult;
 308	if (err || list_empty(&alg->cra_list))
 
 309		goto complete;
 310
 
 
 
 
 
 
 
 311	alg->cra_flags |= CRYPTO_ALG_TESTED;
 312
 313	/* Only satisfy larval waiters if we are the best. */
 
 
 
 314	best = true;
 315	list_for_each_entry(q, &crypto_alg_list, cra_list) {
 316		if (crypto_is_moribund(q) || !crypto_is_larval(q))
 317			continue;
 318
 319		if (strcmp(alg->cra_name, q->cra_name))
 320			continue;
 321
 322		if (q->cra_priority > alg->cra_priority) {
 323			best = false;
 324			break;
 325		}
 326	}
 327
 328	list_for_each_entry(q, &crypto_alg_list, cra_list) {
 329		if (q == alg)
 330			continue;
 331
 332		if (crypto_is_moribund(q))
 333			continue;
 334
 335		if (crypto_is_larval(q)) {
 336			struct crypto_larval *larval = (void *)q;
 337
 338			/*
 339			 * Check to see if either our generic name or
 340			 * specific name can satisfy the name requested
 341			 * by the larval entry q.
 342			 */
 343			if (strcmp(alg->cra_name, q->cra_name) &&
 344			    strcmp(alg->cra_driver_name, q->cra_name))
 345				continue;
 346
 347			if (larval->adult)
 348				continue;
 349			if ((q->cra_flags ^ alg->cra_flags) & larval->mask)
 350				continue;
 351
 352			if (best && crypto_mod_get(alg))
 353				larval->adult = alg;
 354			else
 355				larval->adult = ERR_PTR(-EAGAIN);
 356
 357			continue;
 358		}
 359
 360		if (strcmp(alg->cra_name, q->cra_name))
 361			continue;
 362
 363		if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
 364		    q->cra_priority > alg->cra_priority)
 365			continue;
 366
 367		crypto_remove_spawns(q, &list, alg);
 368	}
 369
 370complete:
 371	complete_all(&test->completion);
 372
 373unlock:
 374	up_write(&crypto_alg_sem);
 375
 376	crypto_remove_final(&list);
 377}
 378EXPORT_SYMBOL_GPL(crypto_alg_tested);
 379
 380void crypto_remove_final(struct list_head *list)
 381{
 382	struct crypto_alg *alg;
 383	struct crypto_alg *n;
 384
 385	list_for_each_entry_safe(alg, n, list, cra_list) {
 386		list_del_init(&alg->cra_list);
 387		crypto_alg_put(alg);
 388	}
 389}
 390EXPORT_SYMBOL_GPL(crypto_remove_final);
 391
 392static void crypto_wait_for_test(struct crypto_larval *larval)
 393{
 394	int err;
 395
 396	err = crypto_probing_notify(CRYPTO_MSG_ALG_REGISTER, larval->adult);
 397	if (err != NOTIFY_STOP) {
 398		if (WARN_ON(err != NOTIFY_DONE))
 399			goto out;
 400		crypto_alg_tested(larval->alg.cra_driver_name, 0);
 401	}
 402
 403	err = wait_for_completion_killable(&larval->completion);
 404	WARN_ON(err);
 405	if (!err)
 406		crypto_notify(CRYPTO_MSG_ALG_LOADED, larval);
 407
 408out:
 409	crypto_larval_kill(&larval->alg);
 410}
 411
 412int crypto_register_alg(struct crypto_alg *alg)
 413{
 414	struct crypto_larval *larval;
 
 
 415	int err;
 416
 417	alg->cra_flags &= ~CRYPTO_ALG_DEAD;
 418	err = crypto_check_alg(alg);
 419	if (err)
 420		return err;
 421
 422	down_write(&crypto_alg_sem);
 423	larval = __crypto_register_alg(alg);
 
 
 
 
 424	up_write(&crypto_alg_sem);
 425
 426	if (IS_ERR(larval))
 427		return PTR_ERR(larval);
 428
 429	crypto_wait_for_test(larval);
 
 430	return 0;
 431}
 432EXPORT_SYMBOL_GPL(crypto_register_alg);
 433
 434static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
 435{
 436	if (unlikely(list_empty(&alg->cra_list)))
 437		return -ENOENT;
 438
 439	alg->cra_flags |= CRYPTO_ALG_DEAD;
 440
 441	list_del_init(&alg->cra_list);
 442	crypto_remove_spawns(alg, list, NULL);
 443
 444	return 0;
 445}
 446
 447void crypto_unregister_alg(struct crypto_alg *alg)
 448{
 449	int ret;
 450	LIST_HEAD(list);
 451
 452	down_write(&crypto_alg_sem);
 453	ret = crypto_remove_alg(alg, &list);
 454	up_write(&crypto_alg_sem);
 455
 456	if (WARN(ret, "Algorithm %s is not registered", alg->cra_driver_name))
 457		return;
 458
 459	BUG_ON(refcount_read(&alg->cra_refcnt) != 1);
 460	if (alg->cra_destroy)
 461		alg->cra_destroy(alg);
 462
 463	crypto_remove_final(&list);
 464}
 465EXPORT_SYMBOL_GPL(crypto_unregister_alg);
 466
 467int crypto_register_algs(struct crypto_alg *algs, int count)
 468{
 469	int i, ret;
 470
 471	for (i = 0; i < count; i++) {
 472		ret = crypto_register_alg(&algs[i]);
 473		if (ret)
 474			goto err;
 475	}
 476
 477	return 0;
 478
 479err:
 480	for (--i; i >= 0; --i)
 481		crypto_unregister_alg(&algs[i]);
 482
 483	return ret;
 484}
 485EXPORT_SYMBOL_GPL(crypto_register_algs);
 486
 487void crypto_unregister_algs(struct crypto_alg *algs, int count)
 488{
 489	int i;
 490
 491	for (i = 0; i < count; i++)
 492		crypto_unregister_alg(&algs[i]);
 493}
 494EXPORT_SYMBOL_GPL(crypto_unregister_algs);
 495
 496int crypto_register_template(struct crypto_template *tmpl)
 497{
 498	struct crypto_template *q;
 499	int err = -EEXIST;
 500
 501	down_write(&crypto_alg_sem);
 502
 503	crypto_check_module_sig(tmpl->module);
 504
 505	list_for_each_entry(q, &crypto_template_list, list) {
 506		if (q == tmpl)
 507			goto out;
 508	}
 509
 510	list_add(&tmpl->list, &crypto_template_list);
 511	err = 0;
 512out:
 513	up_write(&crypto_alg_sem);
 514	return err;
 515}
 516EXPORT_SYMBOL_GPL(crypto_register_template);
 517
 518int crypto_register_templates(struct crypto_template *tmpls, int count)
 519{
 520	int i, err;
 521
 522	for (i = 0; i < count; i++) {
 523		err = crypto_register_template(&tmpls[i]);
 524		if (err)
 525			goto out;
 526	}
 527	return 0;
 528
 529out:
 530	for (--i; i >= 0; --i)
 531		crypto_unregister_template(&tmpls[i]);
 532	return err;
 533}
 534EXPORT_SYMBOL_GPL(crypto_register_templates);
 535
 536void crypto_unregister_template(struct crypto_template *tmpl)
 537{
 538	struct crypto_instance *inst;
 539	struct hlist_node *n;
 540	struct hlist_head *list;
 541	LIST_HEAD(users);
 542
 543	down_write(&crypto_alg_sem);
 544
 545	BUG_ON(list_empty(&tmpl->list));
 546	list_del_init(&tmpl->list);
 547
 548	list = &tmpl->instances;
 549	hlist_for_each_entry(inst, list, list) {
 550		int err = crypto_remove_alg(&inst->alg, &users);
 551
 552		BUG_ON(err);
 553	}
 554
 555	up_write(&crypto_alg_sem);
 556
 557	hlist_for_each_entry_safe(inst, n, list, list) {
 558		BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1);
 559		crypto_free_instance(inst);
 560	}
 561	crypto_remove_final(&users);
 562}
 563EXPORT_SYMBOL_GPL(crypto_unregister_template);
 564
 565void crypto_unregister_templates(struct crypto_template *tmpls, int count)
 566{
 567	int i;
 568
 569	for (i = count - 1; i >= 0; --i)
 570		crypto_unregister_template(&tmpls[i]);
 571}
 572EXPORT_SYMBOL_GPL(crypto_unregister_templates);
 573
 574static struct crypto_template *__crypto_lookup_template(const char *name)
 575{
 576	struct crypto_template *q, *tmpl = NULL;
 577
 578	down_read(&crypto_alg_sem);
 579	list_for_each_entry(q, &crypto_template_list, list) {
 580		if (strcmp(q->name, name))
 581			continue;
 582		if (unlikely(!crypto_tmpl_get(q)))
 583			continue;
 584
 585		tmpl = q;
 586		break;
 587	}
 588	up_read(&crypto_alg_sem);
 589
 590	return tmpl;
 591}
 592
 593struct crypto_template *crypto_lookup_template(const char *name)
 594{
 595	return try_then_request_module(__crypto_lookup_template(name),
 596				       "crypto-%s", name);
 597}
 598EXPORT_SYMBOL_GPL(crypto_lookup_template);
 599
 600int crypto_register_instance(struct crypto_template *tmpl,
 601			     struct crypto_instance *inst)
 602{
 603	struct crypto_larval *larval;
 604	struct crypto_spawn *spawn;
 
 
 605	int err;
 606
 607	err = crypto_check_alg(&inst->alg);
 608	if (err)
 609		return err;
 610
 611	inst->alg.cra_module = tmpl->module;
 612	inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
 613
 614	down_write(&crypto_alg_sem);
 615
 616	larval = ERR_PTR(-EAGAIN);
 617	for (spawn = inst->spawns; spawn;) {
 618		struct crypto_spawn *next;
 619
 620		if (spawn->dead)
 621			goto unlock;
 622
 623		next = spawn->next;
 624		spawn->inst = inst;
 625		spawn->registered = true;
 626
 
 
 627		crypto_mod_put(spawn->alg);
 628
 629		spawn = next;
 630	}
 631
 632	larval = __crypto_register_alg(&inst->alg);
 
 
 633	if (IS_ERR(larval))
 634		goto unlock;
 
 
 635
 636	hlist_add_head(&inst->list, &tmpl->instances);
 637	inst->tmpl = tmpl;
 638
 639unlock:
 640	up_write(&crypto_alg_sem);
 641
 642	err = PTR_ERR(larval);
 643	if (IS_ERR(larval))
 644		goto err;
 645
 646	crypto_wait_for_test(larval);
 647	err = 0;
 648
 649err:
 650	return err;
 651}
 652EXPORT_SYMBOL_GPL(crypto_register_instance);
 653
 654void crypto_unregister_instance(struct crypto_instance *inst)
 655{
 656	LIST_HEAD(list);
 657
 658	down_write(&crypto_alg_sem);
 659
 660	crypto_remove_spawns(&inst->alg, &list, NULL);
 661	crypto_remove_instance(inst, &list);
 662
 663	up_write(&crypto_alg_sem);
 664
 665	crypto_remove_final(&list);
 666}
 667EXPORT_SYMBOL_GPL(crypto_unregister_instance);
 668
 669int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
 670		      const char *name, u32 type, u32 mask)
 671{
 672	struct crypto_alg *alg;
 673	int err = -EAGAIN;
 674
 675	if (WARN_ON_ONCE(inst == NULL))
 676		return -EINVAL;
 677
 678	/* Allow the result of crypto_attr_alg_name() to be passed directly */
 679	if (IS_ERR(name))
 680		return PTR_ERR(name);
 681
 682	alg = crypto_find_alg(name, spawn->frontend, type, mask);
 
 683	if (IS_ERR(alg))
 684		return PTR_ERR(alg);
 685
 686	down_write(&crypto_alg_sem);
 687	if (!crypto_is_moribund(alg)) {
 688		list_add(&spawn->list, &alg->cra_users);
 689		spawn->alg = alg;
 690		spawn->mask = mask;
 691		spawn->next = inst->spawns;
 692		inst->spawns = spawn;
 693		inst->alg.cra_flags |=
 694			(alg->cra_flags & CRYPTO_ALG_INHERITED_FLAGS);
 695		err = 0;
 696	}
 697	up_write(&crypto_alg_sem);
 698	if (err)
 699		crypto_mod_put(alg);
 700	return err;
 701}
 702EXPORT_SYMBOL_GPL(crypto_grab_spawn);
 703
 704void crypto_drop_spawn(struct crypto_spawn *spawn)
 705{
 706	if (!spawn->alg) /* not yet initialized? */
 707		return;
 708
 709	down_write(&crypto_alg_sem);
 710	if (!spawn->dead)
 711		list_del(&spawn->list);
 712	up_write(&crypto_alg_sem);
 713
 714	if (!spawn->registered)
 715		crypto_mod_put(spawn->alg);
 716}
 717EXPORT_SYMBOL_GPL(crypto_drop_spawn);
 718
 719static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
 720{
 721	struct crypto_alg *alg = ERR_PTR(-EAGAIN);
 722	struct crypto_alg *target;
 723	bool shoot = false;
 724
 725	down_read(&crypto_alg_sem);
 726	if (!spawn->dead) {
 727		alg = spawn->alg;
 728		if (!crypto_mod_get(alg)) {
 729			target = crypto_alg_get(alg);
 730			shoot = true;
 731			alg = ERR_PTR(-EAGAIN);
 732		}
 733	}
 734	up_read(&crypto_alg_sem);
 735
 736	if (shoot) {
 737		crypto_shoot_alg(target);
 738		crypto_alg_put(target);
 739	}
 740
 741	return alg;
 742}
 743
 744struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
 745				    u32 mask)
 746{
 747	struct crypto_alg *alg;
 748	struct crypto_tfm *tfm;
 749
 750	alg = crypto_spawn_alg(spawn);
 751	if (IS_ERR(alg))
 752		return ERR_CAST(alg);
 753
 754	tfm = ERR_PTR(-EINVAL);
 755	if (unlikely((alg->cra_flags ^ type) & mask))
 756		goto out_put_alg;
 757
 758	tfm = __crypto_alloc_tfm(alg, type, mask);
 759	if (IS_ERR(tfm))
 760		goto out_put_alg;
 761
 762	return tfm;
 763
 764out_put_alg:
 765	crypto_mod_put(alg);
 766	return tfm;
 767}
 768EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
 769
 770void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
 771{
 772	struct crypto_alg *alg;
 773	struct crypto_tfm *tfm;
 774
 775	alg = crypto_spawn_alg(spawn);
 776	if (IS_ERR(alg))
 777		return ERR_CAST(alg);
 778
 779	tfm = crypto_create_tfm(alg, spawn->frontend);
 780	if (IS_ERR(tfm))
 781		goto out_put_alg;
 782
 783	return tfm;
 784
 785out_put_alg:
 786	crypto_mod_put(alg);
 787	return tfm;
 788}
 789EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
 790
 791int crypto_register_notifier(struct notifier_block *nb)
 792{
 793	return blocking_notifier_chain_register(&crypto_chain, nb);
 794}
 795EXPORT_SYMBOL_GPL(crypto_register_notifier);
 796
 797int crypto_unregister_notifier(struct notifier_block *nb)
 798{
 799	return blocking_notifier_chain_unregister(&crypto_chain, nb);
 800}
 801EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
 802
 803struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb)
 804{
 805	struct rtattr *rta = tb[0];
 806	struct crypto_attr_type *algt;
 807
 808	if (!rta)
 809		return ERR_PTR(-ENOENT);
 810	if (RTA_PAYLOAD(rta) < sizeof(*algt))
 811		return ERR_PTR(-EINVAL);
 812	if (rta->rta_type != CRYPTOA_TYPE)
 813		return ERR_PTR(-EINVAL);
 814
 815	algt = RTA_DATA(rta);
 816
 817	return algt;
 818}
 819EXPORT_SYMBOL_GPL(crypto_get_attr_type);
 820
 821/**
 822 * crypto_check_attr_type() - check algorithm type and compute inherited mask
 823 * @tb: the template parameters
 824 * @type: the algorithm type the template would be instantiated as
 825 * @mask_ret: (output) the mask that should be passed to crypto_grab_*()
 826 *	      to restrict the flags of any inner algorithms
 827 *
 828 * Validate that the algorithm type the user requested is compatible with the
 829 * one the template would actually be instantiated as.  E.g., if the user is
 830 * doing crypto_alloc_shash("cbc(aes)", ...), this would return an error because
 831 * the "cbc" template creates an "skcipher" algorithm, not an "shash" algorithm.
 832 *
 833 * Also compute the mask to use to restrict the flags of any inner algorithms.
 834 *
 835 * Return: 0 on success; -errno on failure
 836 */
 837int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret)
 838{
 839	struct crypto_attr_type *algt;
 840
 841	algt = crypto_get_attr_type(tb);
 842	if (IS_ERR(algt))
 843		return PTR_ERR(algt);
 844
 845	if ((algt->type ^ type) & algt->mask)
 846		return -EINVAL;
 847
 848	*mask_ret = crypto_algt_inherited_mask(algt);
 849	return 0;
 850}
 851EXPORT_SYMBOL_GPL(crypto_check_attr_type);
 852
 853const char *crypto_attr_alg_name(struct rtattr *rta)
 854{
 855	struct crypto_attr_alg *alga;
 856
 857	if (!rta)
 858		return ERR_PTR(-ENOENT);
 859	if (RTA_PAYLOAD(rta) < sizeof(*alga))
 860		return ERR_PTR(-EINVAL);
 861	if (rta->rta_type != CRYPTOA_ALG)
 862		return ERR_PTR(-EINVAL);
 863
 864	alga = RTA_DATA(rta);
 865	alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
 866
 867	return alga->name;
 868}
 869EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
 870
 871int crypto_attr_u32(struct rtattr *rta, u32 *num)
 872{
 873	struct crypto_attr_u32 *nu32;
 874
 875	if (!rta)
 876		return -ENOENT;
 877	if (RTA_PAYLOAD(rta) < sizeof(*nu32))
 878		return -EINVAL;
 879	if (rta->rta_type != CRYPTOA_U32)
 880		return -EINVAL;
 881
 882	nu32 = RTA_DATA(rta);
 883	*num = nu32->num;
 884
 885	return 0;
 886}
 887EXPORT_SYMBOL_GPL(crypto_attr_u32);
 888
 889int crypto_inst_setname(struct crypto_instance *inst, const char *name,
 890			struct crypto_alg *alg)
 891{
 892	if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
 893		     alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
 894		return -ENAMETOOLONG;
 895
 896	if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
 897		     name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
 898		return -ENAMETOOLONG;
 899
 900	return 0;
 901}
 902EXPORT_SYMBOL_GPL(crypto_inst_setname);
 903
 904void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
 905{
 906	INIT_LIST_HEAD(&queue->list);
 907	queue->backlog = &queue->list;
 908	queue->qlen = 0;
 909	queue->max_qlen = max_qlen;
 910}
 911EXPORT_SYMBOL_GPL(crypto_init_queue);
 912
 913int crypto_enqueue_request(struct crypto_queue *queue,
 914			   struct crypto_async_request *request)
 915{
 916	int err = -EINPROGRESS;
 917
 918	if (unlikely(queue->qlen >= queue->max_qlen)) {
 919		if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) {
 920			err = -ENOSPC;
 921			goto out;
 922		}
 923		err = -EBUSY;
 924		if (queue->backlog == &queue->list)
 925			queue->backlog = &request->list;
 926	}
 927
 928	queue->qlen++;
 929	list_add_tail(&request->list, &queue->list);
 930
 931out:
 932	return err;
 933}
 934EXPORT_SYMBOL_GPL(crypto_enqueue_request);
 935
 936void crypto_enqueue_request_head(struct crypto_queue *queue,
 937				 struct crypto_async_request *request)
 938{
 939	queue->qlen++;
 940	list_add(&request->list, &queue->list);
 941}
 942EXPORT_SYMBOL_GPL(crypto_enqueue_request_head);
 943
 944struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
 945{
 946	struct list_head *request;
 947
 948	if (unlikely(!queue->qlen))
 949		return NULL;
 950
 951	queue->qlen--;
 952
 953	if (queue->backlog != &queue->list)
 954		queue->backlog = queue->backlog->next;
 955
 956	request = queue->list.next;
 957	list_del(request);
 958
 959	return list_entry(request, struct crypto_async_request, list);
 960}
 961EXPORT_SYMBOL_GPL(crypto_dequeue_request);
 962
 963static inline void crypto_inc_byte(u8 *a, unsigned int size)
 964{
 965	u8 *b = (a + size);
 966	u8 c;
 967
 968	for (; size; size--) {
 969		c = *--b + 1;
 970		*b = c;
 971		if (c)
 972			break;
 973	}
 974}
 975
 976void crypto_inc(u8 *a, unsigned int size)
 977{
 978	__be32 *b = (__be32 *)(a + size);
 979	u32 c;
 980
 981	if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
 982	    IS_ALIGNED((unsigned long)b, __alignof__(*b)))
 983		for (; size >= 4; size -= 4) {
 984			c = be32_to_cpu(*--b) + 1;
 985			*b = cpu_to_be32(c);
 986			if (likely(c))
 987				return;
 988		}
 989
 990	crypto_inc_byte(a, size);
 991}
 992EXPORT_SYMBOL_GPL(crypto_inc);
 993
 994void __crypto_xor(u8 *dst, const u8 *src1, const u8 *src2, unsigned int len)
 995{
 996	int relalign = 0;
 997
 998	if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) {
 999		int size = sizeof(unsigned long);
1000		int d = (((unsigned long)dst ^ (unsigned long)src1) |
1001			 ((unsigned long)dst ^ (unsigned long)src2)) &
1002			(size - 1);
1003
1004		relalign = d ? 1 << __ffs(d) : size;
1005
1006		/*
1007		 * If we care about alignment, process as many bytes as
1008		 * needed to advance dst and src to values whose alignments
1009		 * equal their relative alignment. This will allow us to
1010		 * process the remainder of the input using optimal strides.
1011		 */
1012		while (((unsigned long)dst & (relalign - 1)) && len > 0) {
1013			*dst++ = *src1++ ^ *src2++;
1014			len--;
1015		}
1016	}
1017
1018	while (IS_ENABLED(CONFIG_64BIT) && len >= 8 && !(relalign & 7)) {
1019		*(u64 *)dst = *(u64 *)src1 ^  *(u64 *)src2;
1020		dst += 8;
1021		src1 += 8;
1022		src2 += 8;
1023		len -= 8;
1024	}
1025
1026	while (len >= 4 && !(relalign & 3)) {
1027		*(u32 *)dst = *(u32 *)src1 ^ *(u32 *)src2;
1028		dst += 4;
1029		src1 += 4;
1030		src2 += 4;
1031		len -= 4;
1032	}
1033
1034	while (len >= 2 && !(relalign & 1)) {
1035		*(u16 *)dst = *(u16 *)src1 ^ *(u16 *)src2;
1036		dst += 2;
1037		src1 += 2;
1038		src2 += 2;
1039		len -= 2;
1040	}
1041
1042	while (len--)
1043		*dst++ = *src1++ ^ *src2++;
1044}
1045EXPORT_SYMBOL_GPL(__crypto_xor);
1046
1047unsigned int crypto_alg_extsize(struct crypto_alg *alg)
1048{
1049	return alg->cra_ctxsize +
1050	       (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
1051}
1052EXPORT_SYMBOL_GPL(crypto_alg_extsize);
1053
1054int crypto_type_has_alg(const char *name, const struct crypto_type *frontend,
1055			u32 type, u32 mask)
1056{
1057	int ret = 0;
1058	struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask);
1059
1060	if (!IS_ERR(alg)) {
1061		crypto_mod_put(alg);
1062		ret = 1;
1063	}
1064
1065	return ret;
1066}
1067EXPORT_SYMBOL_GPL(crypto_type_has_alg);
1068
1069#ifdef CONFIG_CRYPTO_STATS
1070void crypto_stats_init(struct crypto_alg *alg)
1071{
1072	memset(&alg->stats, 0, sizeof(alg->stats));
1073}
1074EXPORT_SYMBOL_GPL(crypto_stats_init);
1075
1076void crypto_stats_get(struct crypto_alg *alg)
1077{
1078	crypto_alg_get(alg);
1079}
1080EXPORT_SYMBOL_GPL(crypto_stats_get);
1081
1082void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg,
1083			       int ret)
1084{
1085	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1086		atomic64_inc(&alg->stats.aead.err_cnt);
1087	} else {
1088		atomic64_inc(&alg->stats.aead.encrypt_cnt);
1089		atomic64_add(cryptlen, &alg->stats.aead.encrypt_tlen);
1090	}
1091	crypto_alg_put(alg);
1092}
1093EXPORT_SYMBOL_GPL(crypto_stats_aead_encrypt);
1094
1095void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg,
1096			       int ret)
1097{
1098	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1099		atomic64_inc(&alg->stats.aead.err_cnt);
1100	} else {
1101		atomic64_inc(&alg->stats.aead.decrypt_cnt);
1102		atomic64_add(cryptlen, &alg->stats.aead.decrypt_tlen);
1103	}
1104	crypto_alg_put(alg);
1105}
1106EXPORT_SYMBOL_GPL(crypto_stats_aead_decrypt);
1107
1108void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret,
1109				   struct crypto_alg *alg)
1110{
1111	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1112		atomic64_inc(&alg->stats.akcipher.err_cnt);
1113	} else {
1114		atomic64_inc(&alg->stats.akcipher.encrypt_cnt);
1115		atomic64_add(src_len, &alg->stats.akcipher.encrypt_tlen);
1116	}
1117	crypto_alg_put(alg);
1118}
1119EXPORT_SYMBOL_GPL(crypto_stats_akcipher_encrypt);
1120
1121void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret,
1122				   struct crypto_alg *alg)
1123{
1124	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1125		atomic64_inc(&alg->stats.akcipher.err_cnt);
1126	} else {
1127		atomic64_inc(&alg->stats.akcipher.decrypt_cnt);
1128		atomic64_add(src_len, &alg->stats.akcipher.decrypt_tlen);
1129	}
1130	crypto_alg_put(alg);
1131}
1132EXPORT_SYMBOL_GPL(crypto_stats_akcipher_decrypt);
1133
1134void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg)
1135{
1136	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1137		atomic64_inc(&alg->stats.akcipher.err_cnt);
1138	else
1139		atomic64_inc(&alg->stats.akcipher.sign_cnt);
1140	crypto_alg_put(alg);
1141}
1142EXPORT_SYMBOL_GPL(crypto_stats_akcipher_sign);
1143
1144void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg)
1145{
1146	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1147		atomic64_inc(&alg->stats.akcipher.err_cnt);
1148	else
1149		atomic64_inc(&alg->stats.akcipher.verify_cnt);
1150	crypto_alg_put(alg);
1151}
1152EXPORT_SYMBOL_GPL(crypto_stats_akcipher_verify);
1153
1154void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg)
1155{
1156	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1157		atomic64_inc(&alg->stats.compress.err_cnt);
1158	} else {
1159		atomic64_inc(&alg->stats.compress.compress_cnt);
1160		atomic64_add(slen, &alg->stats.compress.compress_tlen);
1161	}
1162	crypto_alg_put(alg);
1163}
1164EXPORT_SYMBOL_GPL(crypto_stats_compress);
1165
1166void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg)
1167{
1168	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1169		atomic64_inc(&alg->stats.compress.err_cnt);
1170	} else {
1171		atomic64_inc(&alg->stats.compress.decompress_cnt);
1172		atomic64_add(slen, &alg->stats.compress.decompress_tlen);
1173	}
1174	crypto_alg_put(alg);
1175}
1176EXPORT_SYMBOL_GPL(crypto_stats_decompress);
1177
1178void crypto_stats_ahash_update(unsigned int nbytes, int ret,
1179			       struct crypto_alg *alg)
1180{
1181	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1182		atomic64_inc(&alg->stats.hash.err_cnt);
1183	else
1184		atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1185	crypto_alg_put(alg);
1186}
1187EXPORT_SYMBOL_GPL(crypto_stats_ahash_update);
1188
1189void crypto_stats_ahash_final(unsigned int nbytes, int ret,
1190			      struct crypto_alg *alg)
1191{
1192	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1193		atomic64_inc(&alg->stats.hash.err_cnt);
1194	} else {
1195		atomic64_inc(&alg->stats.hash.hash_cnt);
1196		atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1197	}
1198	crypto_alg_put(alg);
1199}
1200EXPORT_SYMBOL_GPL(crypto_stats_ahash_final);
1201
1202void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret)
1203{
1204	if (ret)
1205		atomic64_inc(&alg->stats.kpp.err_cnt);
1206	else
1207		atomic64_inc(&alg->stats.kpp.setsecret_cnt);
1208	crypto_alg_put(alg);
1209}
1210EXPORT_SYMBOL_GPL(crypto_stats_kpp_set_secret);
1211
1212void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret)
1213{
1214	if (ret)
1215		atomic64_inc(&alg->stats.kpp.err_cnt);
1216	else
1217		atomic64_inc(&alg->stats.kpp.generate_public_key_cnt);
1218	crypto_alg_put(alg);
1219}
1220EXPORT_SYMBOL_GPL(crypto_stats_kpp_generate_public_key);
1221
1222void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret)
1223{
1224	if (ret)
1225		atomic64_inc(&alg->stats.kpp.err_cnt);
1226	else
1227		atomic64_inc(&alg->stats.kpp.compute_shared_secret_cnt);
1228	crypto_alg_put(alg);
1229}
1230EXPORT_SYMBOL_GPL(crypto_stats_kpp_compute_shared_secret);
1231
1232void crypto_stats_rng_seed(struct crypto_alg *alg, int ret)
1233{
1234	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1235		atomic64_inc(&alg->stats.rng.err_cnt);
1236	else
1237		atomic64_inc(&alg->stats.rng.seed_cnt);
1238	crypto_alg_put(alg);
1239}
1240EXPORT_SYMBOL_GPL(crypto_stats_rng_seed);
1241
1242void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen,
1243			       int ret)
1244{
1245	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1246		atomic64_inc(&alg->stats.rng.err_cnt);
1247	} else {
1248		atomic64_inc(&alg->stats.rng.generate_cnt);
1249		atomic64_add(dlen, &alg->stats.rng.generate_tlen);
1250	}
1251	crypto_alg_put(alg);
1252}
1253EXPORT_SYMBOL_GPL(crypto_stats_rng_generate);
1254
1255void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret,
1256				   struct crypto_alg *alg)
1257{
1258	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1259		atomic64_inc(&alg->stats.cipher.err_cnt);
1260	} else {
1261		atomic64_inc(&alg->stats.cipher.encrypt_cnt);
1262		atomic64_add(cryptlen, &alg->stats.cipher.encrypt_tlen);
1263	}
1264	crypto_alg_put(alg);
1265}
1266EXPORT_SYMBOL_GPL(crypto_stats_skcipher_encrypt);
1267
1268void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret,
1269				   struct crypto_alg *alg)
1270{
1271	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1272		atomic64_inc(&alg->stats.cipher.err_cnt);
1273	} else {
1274		atomic64_inc(&alg->stats.cipher.decrypt_cnt);
1275		atomic64_add(cryptlen, &alg->stats.cipher.decrypt_tlen);
1276	}
1277	crypto_alg_put(alg);
1278}
1279EXPORT_SYMBOL_GPL(crypto_stats_skcipher_decrypt);
1280#endif
1281
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1282static int __init crypto_algapi_init(void)
1283{
1284	crypto_init_proc();
 
1285	return 0;
1286}
1287
1288static void __exit crypto_algapi_exit(void)
1289{
1290	crypto_exit_proc();
1291}
1292
1293module_init(crypto_algapi_init);
 
 
 
 
1294module_exit(crypto_algapi_exit);
1295
1296MODULE_LICENSE("GPL");
1297MODULE_DESCRIPTION("Cryptographic algorithms API");
v6.2
   1// SPDX-License-Identifier: GPL-2.0-or-later
   2/*
   3 * Cryptographic API for algorithms (i.e., low-level API).
   4 *
   5 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
   6 */
   7
   8#include <crypto/algapi.h>
   9#include <crypto/internal/simd.h>
  10#include <linux/err.h>
  11#include <linux/errno.h>
  12#include <linux/fips.h>
  13#include <linux/init.h>
  14#include <linux/kernel.h>
  15#include <linux/list.h>
  16#include <linux/module.h>
  17#include <linux/rtnetlink.h>
  18#include <linux/slab.h>
  19#include <linux/string.h>
  20
  21#include "internal.h"
  22
  23static LIST_HEAD(crypto_template_list);
  24
  25#ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
  26DEFINE_PER_CPU(bool, crypto_simd_disabled_for_test);
  27EXPORT_PER_CPU_SYMBOL_GPL(crypto_simd_disabled_for_test);
  28#endif
  29
  30static inline void crypto_check_module_sig(struct module *mod)
  31{
  32	if (fips_enabled && mod && !module_sig_ok(mod))
  33		panic("Module %s signature verification failed in FIPS mode\n",
  34		      module_name(mod));
  35}
  36
  37static int crypto_check_alg(struct crypto_alg *alg)
  38{
  39	crypto_check_module_sig(alg->cra_module);
  40
  41	if (!alg->cra_name[0] || !alg->cra_driver_name[0])
  42		return -EINVAL;
  43
  44	if (alg->cra_alignmask & (alg->cra_alignmask + 1))
  45		return -EINVAL;
  46
  47	/* General maximums for all algs. */
  48	if (alg->cra_alignmask > MAX_ALGAPI_ALIGNMASK)
  49		return -EINVAL;
  50
  51	if (alg->cra_blocksize > MAX_ALGAPI_BLOCKSIZE)
  52		return -EINVAL;
  53
  54	/* Lower maximums for specific alg types. */
  55	if (!alg->cra_type && (alg->cra_flags & CRYPTO_ALG_TYPE_MASK) ==
  56			       CRYPTO_ALG_TYPE_CIPHER) {
  57		if (alg->cra_alignmask > MAX_CIPHER_ALIGNMASK)
  58			return -EINVAL;
  59
  60		if (alg->cra_blocksize > MAX_CIPHER_BLOCKSIZE)
  61			return -EINVAL;
  62	}
  63
  64	if (alg->cra_priority < 0)
  65		return -EINVAL;
  66
  67	refcount_set(&alg->cra_refcnt, 1);
  68
  69	return 0;
  70}
  71
  72static void crypto_free_instance(struct crypto_instance *inst)
  73{
  74	inst->alg.cra_type->free(inst);
  75}
  76
  77static void crypto_destroy_instance(struct crypto_alg *alg)
  78{
  79	struct crypto_instance *inst = (void *)alg;
  80	struct crypto_template *tmpl = inst->tmpl;
  81
  82	crypto_free_instance(inst);
  83	crypto_tmpl_put(tmpl);
  84}
  85
  86/*
  87 * This function adds a spawn to the list secondary_spawns which
  88 * will be used at the end of crypto_remove_spawns to unregister
  89 * instances, unless the spawn happens to be one that is depended
  90 * on by the new algorithm (nalg in crypto_remove_spawns).
  91 *
  92 * This function is also responsible for resurrecting any algorithms
  93 * in the dependency chain of nalg by unsetting n->dead.
  94 */
  95static struct list_head *crypto_more_spawns(struct crypto_alg *alg,
  96					    struct list_head *stack,
  97					    struct list_head *top,
  98					    struct list_head *secondary_spawns)
  99{
 100	struct crypto_spawn *spawn, *n;
 101
 102	spawn = list_first_entry_or_null(stack, struct crypto_spawn, list);
 103	if (!spawn)
 104		return NULL;
 105
 106	n = list_prev_entry(spawn, list);
 107	list_move(&spawn->list, secondary_spawns);
 108
 109	if (list_is_last(&n->list, stack))
 110		return top;
 111
 112	n = list_next_entry(n, list);
 113	if (!spawn->dead)
 114		n->dead = false;
 115
 116	return &n->inst->alg.cra_users;
 117}
 118
 119static void crypto_remove_instance(struct crypto_instance *inst,
 120				   struct list_head *list)
 121{
 122	struct crypto_template *tmpl = inst->tmpl;
 123
 124	if (crypto_is_dead(&inst->alg))
 125		return;
 126
 127	inst->alg.cra_flags |= CRYPTO_ALG_DEAD;
 128
 129	if (!tmpl || !crypto_tmpl_get(tmpl))
 130		return;
 131
 132	list_move(&inst->alg.cra_list, list);
 133	hlist_del(&inst->list);
 134	inst->alg.cra_destroy = crypto_destroy_instance;
 135
 136	BUG_ON(!list_empty(&inst->alg.cra_users));
 137}
 138
 139/*
 140 * Given an algorithm alg, remove all algorithms that depend on it
 141 * through spawns.  If nalg is not null, then exempt any algorithms
 142 * that is depended on by nalg.  This is useful when nalg itself
 143 * depends on alg.
 144 */
 145void crypto_remove_spawns(struct crypto_alg *alg, struct list_head *list,
 146			  struct crypto_alg *nalg)
 147{
 148	u32 new_type = (nalg ?: alg)->cra_flags;
 149	struct crypto_spawn *spawn, *n;
 150	LIST_HEAD(secondary_spawns);
 151	struct list_head *spawns;
 152	LIST_HEAD(stack);
 153	LIST_HEAD(top);
 154
 155	spawns = &alg->cra_users;
 156	list_for_each_entry_safe(spawn, n, spawns, list) {
 157		if ((spawn->alg->cra_flags ^ new_type) & spawn->mask)
 158			continue;
 159
 160		list_move(&spawn->list, &top);
 161	}
 162
 163	/*
 164	 * Perform a depth-first walk starting from alg through
 165	 * the cra_users tree.  The list stack records the path
 166	 * from alg to the current spawn.
 167	 */
 168	spawns = &top;
 169	do {
 170		while (!list_empty(spawns)) {
 171			struct crypto_instance *inst;
 172
 173			spawn = list_first_entry(spawns, struct crypto_spawn,
 174						 list);
 175			inst = spawn->inst;
 176
 177			list_move(&spawn->list, &stack);
 178			spawn->dead = !spawn->registered || &inst->alg != nalg;
 179
 180			if (!spawn->registered)
 181				break;
 182
 183			BUG_ON(&inst->alg == alg);
 184
 185			if (&inst->alg == nalg)
 186				break;
 187
 188			spawns = &inst->alg.cra_users;
 189
 190			/*
 191			 * Even if spawn->registered is true, the
 192			 * instance itself may still be unregistered.
 193			 * This is because it may have failed during
 194			 * registration.  Therefore we still need to
 195			 * make the following test.
 196			 *
 197			 * We may encounter an unregistered instance here, since
 198			 * an instance's spawns are set up prior to the instance
 199			 * being registered.  An unregistered instance will have
 200			 * NULL ->cra_users.next, since ->cra_users isn't
 201			 * properly initialized until registration.  But an
 202			 * unregistered instance cannot have any users, so treat
 203			 * it the same as ->cra_users being empty.
 204			 */
 205			if (spawns->next == NULL)
 206				break;
 207		}
 208	} while ((spawns = crypto_more_spawns(alg, &stack, &top,
 209					      &secondary_spawns)));
 210
 211	/*
 212	 * Remove all instances that are marked as dead.  Also
 213	 * complete the resurrection of the others by moving them
 214	 * back to the cra_users list.
 215	 */
 216	list_for_each_entry_safe(spawn, n, &secondary_spawns, list) {
 217		if (!spawn->dead)
 218			list_move(&spawn->list, &spawn->alg->cra_users);
 219		else if (spawn->registered)
 220			crypto_remove_instance(spawn->inst, list);
 221	}
 222}
 223EXPORT_SYMBOL_GPL(crypto_remove_spawns);
 224
 225static void crypto_alg_finish_registration(struct crypto_alg *alg,
 226					   bool fulfill_requests,
 227					   struct list_head *algs_to_put)
 228{
 229	struct crypto_alg *q;
 230
 231	list_for_each_entry(q, &crypto_alg_list, cra_list) {
 232		if (q == alg)
 233			continue;
 234
 235		if (crypto_is_moribund(q))
 236			continue;
 237
 238		if (crypto_is_larval(q)) {
 239			struct crypto_larval *larval = (void *)q;
 240
 241			/*
 242			 * Check to see if either our generic name or
 243			 * specific name can satisfy the name requested
 244			 * by the larval entry q.
 245			 */
 246			if (strcmp(alg->cra_name, q->cra_name) &&
 247			    strcmp(alg->cra_driver_name, q->cra_name))
 248				continue;
 249
 250			if (larval->adult)
 251				continue;
 252			if ((q->cra_flags ^ alg->cra_flags) & larval->mask)
 253				continue;
 254
 255			if (fulfill_requests && crypto_mod_get(alg))
 256				larval->adult = alg;
 257			else
 258				larval->adult = ERR_PTR(-EAGAIN);
 259
 260			continue;
 261		}
 262
 263		if (strcmp(alg->cra_name, q->cra_name))
 264			continue;
 265
 266		if (strcmp(alg->cra_driver_name, q->cra_driver_name) &&
 267		    q->cra_priority > alg->cra_priority)
 268			continue;
 269
 270		crypto_remove_spawns(q, algs_to_put, alg);
 271	}
 272
 273	crypto_notify(CRYPTO_MSG_ALG_LOADED, alg);
 274}
 275
 276static struct crypto_larval *crypto_alloc_test_larval(struct crypto_alg *alg)
 277{
 278	struct crypto_larval *larval;
 279
 280	if (!IS_ENABLED(CONFIG_CRYPTO_MANAGER) ||
 281	    IS_ENABLED(CONFIG_CRYPTO_MANAGER_DISABLE_TESTS) ||
 282	    (alg->cra_flags & CRYPTO_ALG_INTERNAL))
 283		return NULL; /* No self-test needed */
 284
 285	larval = crypto_larval_alloc(alg->cra_name,
 286				     alg->cra_flags | CRYPTO_ALG_TESTED, 0);
 287	if (IS_ERR(larval))
 288		return larval;
 289
 290	larval->adult = crypto_mod_get(alg);
 291	if (!larval->adult) {
 292		kfree(larval);
 293		return ERR_PTR(-ENOENT);
 294	}
 295
 296	refcount_set(&larval->alg.cra_refcnt, 1);
 297	memcpy(larval->alg.cra_driver_name, alg->cra_driver_name,
 298	       CRYPTO_MAX_ALG_NAME);
 299	larval->alg.cra_priority = alg->cra_priority;
 300
 301	return larval;
 302}
 303
 304static struct crypto_larval *
 305__crypto_register_alg(struct crypto_alg *alg, struct list_head *algs_to_put)
 306{
 307	struct crypto_alg *q;
 308	struct crypto_larval *larval;
 309	int ret = -EAGAIN;
 310
 311	if (crypto_is_dead(alg))
 312		goto err;
 313
 314	INIT_LIST_HEAD(&alg->cra_users);
 315
 
 
 
 316	ret = -EEXIST;
 317
 318	list_for_each_entry(q, &crypto_alg_list, cra_list) {
 319		if (q == alg)
 320			goto err;
 321
 322		if (crypto_is_moribund(q))
 323			continue;
 324
 325		if (crypto_is_larval(q)) {
 326			if (!strcmp(alg->cra_driver_name, q->cra_driver_name))
 327				goto err;
 328			continue;
 329		}
 330
 331		if (!strcmp(q->cra_driver_name, alg->cra_name) ||
 332		    !strcmp(q->cra_name, alg->cra_driver_name))
 333			goto err;
 334	}
 335
 336	larval = crypto_alloc_test_larval(alg);
 
 337	if (IS_ERR(larval))
 338		goto out;
 339
 
 
 
 
 
 
 
 
 
 
 340	list_add(&alg->cra_list, &crypto_alg_list);
 
 341
 342	crypto_stats_init(alg);
 343
 344	if (larval) {
 345		/* No cheating! */
 346		alg->cra_flags &= ~CRYPTO_ALG_TESTED;
 347
 348		list_add(&larval->alg.cra_list, &crypto_alg_list);
 349	} else {
 350		alg->cra_flags |= CRYPTO_ALG_TESTED;
 351		crypto_alg_finish_registration(alg, true, algs_to_put);
 352	}
 353
 354out:
 355	return larval;
 356
 
 
 357err:
 358	larval = ERR_PTR(ret);
 359	goto out;
 360}
 361
 362void crypto_alg_tested(const char *name, int err)
 363{
 364	struct crypto_larval *test;
 365	struct crypto_alg *alg;
 366	struct crypto_alg *q;
 367	LIST_HEAD(list);
 368	bool best;
 369
 370	down_write(&crypto_alg_sem);
 371	list_for_each_entry(q, &crypto_alg_list, cra_list) {
 372		if (crypto_is_moribund(q) || !crypto_is_larval(q))
 373			continue;
 374
 375		test = (struct crypto_larval *)q;
 376
 377		if (!strcmp(q->cra_driver_name, name))
 378			goto found;
 379	}
 380
 381	pr_err("alg: Unexpected test result for %s: %d\n", name, err);
 382	goto unlock;
 383
 384found:
 385	q->cra_flags |= CRYPTO_ALG_DEAD;
 386	alg = test->adult;
 387
 388	if (list_empty(&alg->cra_list))
 389		goto complete;
 390
 391	if (err == -ECANCELED)
 392		alg->cra_flags |= CRYPTO_ALG_FIPS_INTERNAL;
 393	else if (err)
 394		goto complete;
 395	else
 396		alg->cra_flags &= ~CRYPTO_ALG_FIPS_INTERNAL;
 397
 398	alg->cra_flags |= CRYPTO_ALG_TESTED;
 399
 400	/*
 401	 * If a higher-priority implementation of the same algorithm is
 402	 * currently being tested, then don't fulfill request larvals.
 403	 */
 404	best = true;
 405	list_for_each_entry(q, &crypto_alg_list, cra_list) {
 406		if (crypto_is_moribund(q) || !crypto_is_larval(q))
 407			continue;
 408
 409		if (strcmp(alg->cra_name, q->cra_name))
 410			continue;
 411
 412		if (q->cra_priority > alg->cra_priority) {
 413			best = false;
 414			break;
 415		}
 416	}
 417
 418	crypto_alg_finish_registration(alg, best, &list);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 419
 420complete:
 421	complete_all(&test->completion);
 422
 423unlock:
 424	up_write(&crypto_alg_sem);
 425
 426	crypto_remove_final(&list);
 427}
 428EXPORT_SYMBOL_GPL(crypto_alg_tested);
 429
 430void crypto_remove_final(struct list_head *list)
 431{
 432	struct crypto_alg *alg;
 433	struct crypto_alg *n;
 434
 435	list_for_each_entry_safe(alg, n, list, cra_list) {
 436		list_del_init(&alg->cra_list);
 437		crypto_alg_put(alg);
 438	}
 439}
 440EXPORT_SYMBOL_GPL(crypto_remove_final);
 441
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 442int crypto_register_alg(struct crypto_alg *alg)
 443{
 444	struct crypto_larval *larval;
 445	LIST_HEAD(algs_to_put);
 446	bool test_started = false;
 447	int err;
 448
 449	alg->cra_flags &= ~CRYPTO_ALG_DEAD;
 450	err = crypto_check_alg(alg);
 451	if (err)
 452		return err;
 453
 454	down_write(&crypto_alg_sem);
 455	larval = __crypto_register_alg(alg, &algs_to_put);
 456	if (!IS_ERR_OR_NULL(larval)) {
 457		test_started = crypto_boot_test_finished();
 458		larval->test_started = test_started;
 459	}
 460	up_write(&crypto_alg_sem);
 461
 462	if (IS_ERR(larval))
 463		return PTR_ERR(larval);
 464	if (test_started)
 465		crypto_wait_for_test(larval);
 466	crypto_remove_final(&algs_to_put);
 467	return 0;
 468}
 469EXPORT_SYMBOL_GPL(crypto_register_alg);
 470
 471static int crypto_remove_alg(struct crypto_alg *alg, struct list_head *list)
 472{
 473	if (unlikely(list_empty(&alg->cra_list)))
 474		return -ENOENT;
 475
 476	alg->cra_flags |= CRYPTO_ALG_DEAD;
 477
 478	list_del_init(&alg->cra_list);
 479	crypto_remove_spawns(alg, list, NULL);
 480
 481	return 0;
 482}
 483
 484void crypto_unregister_alg(struct crypto_alg *alg)
 485{
 486	int ret;
 487	LIST_HEAD(list);
 488
 489	down_write(&crypto_alg_sem);
 490	ret = crypto_remove_alg(alg, &list);
 491	up_write(&crypto_alg_sem);
 492
 493	if (WARN(ret, "Algorithm %s is not registered", alg->cra_driver_name))
 494		return;
 495
 496	BUG_ON(refcount_read(&alg->cra_refcnt) != 1);
 497	if (alg->cra_destroy)
 498		alg->cra_destroy(alg);
 499
 500	crypto_remove_final(&list);
 501}
 502EXPORT_SYMBOL_GPL(crypto_unregister_alg);
 503
 504int crypto_register_algs(struct crypto_alg *algs, int count)
 505{
 506	int i, ret;
 507
 508	for (i = 0; i < count; i++) {
 509		ret = crypto_register_alg(&algs[i]);
 510		if (ret)
 511			goto err;
 512	}
 513
 514	return 0;
 515
 516err:
 517	for (--i; i >= 0; --i)
 518		crypto_unregister_alg(&algs[i]);
 519
 520	return ret;
 521}
 522EXPORT_SYMBOL_GPL(crypto_register_algs);
 523
 524void crypto_unregister_algs(struct crypto_alg *algs, int count)
 525{
 526	int i;
 527
 528	for (i = 0; i < count; i++)
 529		crypto_unregister_alg(&algs[i]);
 530}
 531EXPORT_SYMBOL_GPL(crypto_unregister_algs);
 532
 533int crypto_register_template(struct crypto_template *tmpl)
 534{
 535	struct crypto_template *q;
 536	int err = -EEXIST;
 537
 538	down_write(&crypto_alg_sem);
 539
 540	crypto_check_module_sig(tmpl->module);
 541
 542	list_for_each_entry(q, &crypto_template_list, list) {
 543		if (q == tmpl)
 544			goto out;
 545	}
 546
 547	list_add(&tmpl->list, &crypto_template_list);
 548	err = 0;
 549out:
 550	up_write(&crypto_alg_sem);
 551	return err;
 552}
 553EXPORT_SYMBOL_GPL(crypto_register_template);
 554
 555int crypto_register_templates(struct crypto_template *tmpls, int count)
 556{
 557	int i, err;
 558
 559	for (i = 0; i < count; i++) {
 560		err = crypto_register_template(&tmpls[i]);
 561		if (err)
 562			goto out;
 563	}
 564	return 0;
 565
 566out:
 567	for (--i; i >= 0; --i)
 568		crypto_unregister_template(&tmpls[i]);
 569	return err;
 570}
 571EXPORT_SYMBOL_GPL(crypto_register_templates);
 572
 573void crypto_unregister_template(struct crypto_template *tmpl)
 574{
 575	struct crypto_instance *inst;
 576	struct hlist_node *n;
 577	struct hlist_head *list;
 578	LIST_HEAD(users);
 579
 580	down_write(&crypto_alg_sem);
 581
 582	BUG_ON(list_empty(&tmpl->list));
 583	list_del_init(&tmpl->list);
 584
 585	list = &tmpl->instances;
 586	hlist_for_each_entry(inst, list, list) {
 587		int err = crypto_remove_alg(&inst->alg, &users);
 588
 589		BUG_ON(err);
 590	}
 591
 592	up_write(&crypto_alg_sem);
 593
 594	hlist_for_each_entry_safe(inst, n, list, list) {
 595		BUG_ON(refcount_read(&inst->alg.cra_refcnt) != 1);
 596		crypto_free_instance(inst);
 597	}
 598	crypto_remove_final(&users);
 599}
 600EXPORT_SYMBOL_GPL(crypto_unregister_template);
 601
 602void crypto_unregister_templates(struct crypto_template *tmpls, int count)
 603{
 604	int i;
 605
 606	for (i = count - 1; i >= 0; --i)
 607		crypto_unregister_template(&tmpls[i]);
 608}
 609EXPORT_SYMBOL_GPL(crypto_unregister_templates);
 610
 611static struct crypto_template *__crypto_lookup_template(const char *name)
 612{
 613	struct crypto_template *q, *tmpl = NULL;
 614
 615	down_read(&crypto_alg_sem);
 616	list_for_each_entry(q, &crypto_template_list, list) {
 617		if (strcmp(q->name, name))
 618			continue;
 619		if (unlikely(!crypto_tmpl_get(q)))
 620			continue;
 621
 622		tmpl = q;
 623		break;
 624	}
 625	up_read(&crypto_alg_sem);
 626
 627	return tmpl;
 628}
 629
 630struct crypto_template *crypto_lookup_template(const char *name)
 631{
 632	return try_then_request_module(__crypto_lookup_template(name),
 633				       "crypto-%s", name);
 634}
 635EXPORT_SYMBOL_GPL(crypto_lookup_template);
 636
 637int crypto_register_instance(struct crypto_template *tmpl,
 638			     struct crypto_instance *inst)
 639{
 640	struct crypto_larval *larval;
 641	struct crypto_spawn *spawn;
 642	u32 fips_internal = 0;
 643	LIST_HEAD(algs_to_put);
 644	int err;
 645
 646	err = crypto_check_alg(&inst->alg);
 647	if (err)
 648		return err;
 649
 650	inst->alg.cra_module = tmpl->module;
 651	inst->alg.cra_flags |= CRYPTO_ALG_INSTANCE;
 652
 653	down_write(&crypto_alg_sem);
 654
 655	larval = ERR_PTR(-EAGAIN);
 656	for (spawn = inst->spawns; spawn;) {
 657		struct crypto_spawn *next;
 658
 659		if (spawn->dead)
 660			goto unlock;
 661
 662		next = spawn->next;
 663		spawn->inst = inst;
 664		spawn->registered = true;
 665
 666		fips_internal |= spawn->alg->cra_flags;
 667
 668		crypto_mod_put(spawn->alg);
 669
 670		spawn = next;
 671	}
 672
 673	inst->alg.cra_flags |= (fips_internal & CRYPTO_ALG_FIPS_INTERNAL);
 674
 675	larval = __crypto_register_alg(&inst->alg, &algs_to_put);
 676	if (IS_ERR(larval))
 677		goto unlock;
 678	else if (larval)
 679		larval->test_started = true;
 680
 681	hlist_add_head(&inst->list, &tmpl->instances);
 682	inst->tmpl = tmpl;
 683
 684unlock:
 685	up_write(&crypto_alg_sem);
 686
 
 687	if (IS_ERR(larval))
 688		return PTR_ERR(larval);
 689	if (larval)
 690		crypto_wait_for_test(larval);
 691	crypto_remove_final(&algs_to_put);
 692	return 0;
 
 
 693}
 694EXPORT_SYMBOL_GPL(crypto_register_instance);
 695
 696void crypto_unregister_instance(struct crypto_instance *inst)
 697{
 698	LIST_HEAD(list);
 699
 700	down_write(&crypto_alg_sem);
 701
 702	crypto_remove_spawns(&inst->alg, &list, NULL);
 703	crypto_remove_instance(inst, &list);
 704
 705	up_write(&crypto_alg_sem);
 706
 707	crypto_remove_final(&list);
 708}
 709EXPORT_SYMBOL_GPL(crypto_unregister_instance);
 710
 711int crypto_grab_spawn(struct crypto_spawn *spawn, struct crypto_instance *inst,
 712		      const char *name, u32 type, u32 mask)
 713{
 714	struct crypto_alg *alg;
 715	int err = -EAGAIN;
 716
 717	if (WARN_ON_ONCE(inst == NULL))
 718		return -EINVAL;
 719
 720	/* Allow the result of crypto_attr_alg_name() to be passed directly */
 721	if (IS_ERR(name))
 722		return PTR_ERR(name);
 723
 724	alg = crypto_find_alg(name, spawn->frontend,
 725			      type | CRYPTO_ALG_FIPS_INTERNAL, mask);
 726	if (IS_ERR(alg))
 727		return PTR_ERR(alg);
 728
 729	down_write(&crypto_alg_sem);
 730	if (!crypto_is_moribund(alg)) {
 731		list_add(&spawn->list, &alg->cra_users);
 732		spawn->alg = alg;
 733		spawn->mask = mask;
 734		spawn->next = inst->spawns;
 735		inst->spawns = spawn;
 736		inst->alg.cra_flags |=
 737			(alg->cra_flags & CRYPTO_ALG_INHERITED_FLAGS);
 738		err = 0;
 739	}
 740	up_write(&crypto_alg_sem);
 741	if (err)
 742		crypto_mod_put(alg);
 743	return err;
 744}
 745EXPORT_SYMBOL_GPL(crypto_grab_spawn);
 746
 747void crypto_drop_spawn(struct crypto_spawn *spawn)
 748{
 749	if (!spawn->alg) /* not yet initialized? */
 750		return;
 751
 752	down_write(&crypto_alg_sem);
 753	if (!spawn->dead)
 754		list_del(&spawn->list);
 755	up_write(&crypto_alg_sem);
 756
 757	if (!spawn->registered)
 758		crypto_mod_put(spawn->alg);
 759}
 760EXPORT_SYMBOL_GPL(crypto_drop_spawn);
 761
 762static struct crypto_alg *crypto_spawn_alg(struct crypto_spawn *spawn)
 763{
 764	struct crypto_alg *alg = ERR_PTR(-EAGAIN);
 765	struct crypto_alg *target;
 766	bool shoot = false;
 767
 768	down_read(&crypto_alg_sem);
 769	if (!spawn->dead) {
 770		alg = spawn->alg;
 771		if (!crypto_mod_get(alg)) {
 772			target = crypto_alg_get(alg);
 773			shoot = true;
 774			alg = ERR_PTR(-EAGAIN);
 775		}
 776	}
 777	up_read(&crypto_alg_sem);
 778
 779	if (shoot) {
 780		crypto_shoot_alg(target);
 781		crypto_alg_put(target);
 782	}
 783
 784	return alg;
 785}
 786
 787struct crypto_tfm *crypto_spawn_tfm(struct crypto_spawn *spawn, u32 type,
 788				    u32 mask)
 789{
 790	struct crypto_alg *alg;
 791	struct crypto_tfm *tfm;
 792
 793	alg = crypto_spawn_alg(spawn);
 794	if (IS_ERR(alg))
 795		return ERR_CAST(alg);
 796
 797	tfm = ERR_PTR(-EINVAL);
 798	if (unlikely((alg->cra_flags ^ type) & mask))
 799		goto out_put_alg;
 800
 801	tfm = __crypto_alloc_tfm(alg, type, mask);
 802	if (IS_ERR(tfm))
 803		goto out_put_alg;
 804
 805	return tfm;
 806
 807out_put_alg:
 808	crypto_mod_put(alg);
 809	return tfm;
 810}
 811EXPORT_SYMBOL_GPL(crypto_spawn_tfm);
 812
 813void *crypto_spawn_tfm2(struct crypto_spawn *spawn)
 814{
 815	struct crypto_alg *alg;
 816	struct crypto_tfm *tfm;
 817
 818	alg = crypto_spawn_alg(spawn);
 819	if (IS_ERR(alg))
 820		return ERR_CAST(alg);
 821
 822	tfm = crypto_create_tfm(alg, spawn->frontend);
 823	if (IS_ERR(tfm))
 824		goto out_put_alg;
 825
 826	return tfm;
 827
 828out_put_alg:
 829	crypto_mod_put(alg);
 830	return tfm;
 831}
 832EXPORT_SYMBOL_GPL(crypto_spawn_tfm2);
 833
 834int crypto_register_notifier(struct notifier_block *nb)
 835{
 836	return blocking_notifier_chain_register(&crypto_chain, nb);
 837}
 838EXPORT_SYMBOL_GPL(crypto_register_notifier);
 839
 840int crypto_unregister_notifier(struct notifier_block *nb)
 841{
 842	return blocking_notifier_chain_unregister(&crypto_chain, nb);
 843}
 844EXPORT_SYMBOL_GPL(crypto_unregister_notifier);
 845
 846struct crypto_attr_type *crypto_get_attr_type(struct rtattr **tb)
 847{
 848	struct rtattr *rta = tb[0];
 849	struct crypto_attr_type *algt;
 850
 851	if (!rta)
 852		return ERR_PTR(-ENOENT);
 853	if (RTA_PAYLOAD(rta) < sizeof(*algt))
 854		return ERR_PTR(-EINVAL);
 855	if (rta->rta_type != CRYPTOA_TYPE)
 856		return ERR_PTR(-EINVAL);
 857
 858	algt = RTA_DATA(rta);
 859
 860	return algt;
 861}
 862EXPORT_SYMBOL_GPL(crypto_get_attr_type);
 863
 864/**
 865 * crypto_check_attr_type() - check algorithm type and compute inherited mask
 866 * @tb: the template parameters
 867 * @type: the algorithm type the template would be instantiated as
 868 * @mask_ret: (output) the mask that should be passed to crypto_grab_*()
 869 *	      to restrict the flags of any inner algorithms
 870 *
 871 * Validate that the algorithm type the user requested is compatible with the
 872 * one the template would actually be instantiated as.  E.g., if the user is
 873 * doing crypto_alloc_shash("cbc(aes)", ...), this would return an error because
 874 * the "cbc" template creates an "skcipher" algorithm, not an "shash" algorithm.
 875 *
 876 * Also compute the mask to use to restrict the flags of any inner algorithms.
 877 *
 878 * Return: 0 on success; -errno on failure
 879 */
 880int crypto_check_attr_type(struct rtattr **tb, u32 type, u32 *mask_ret)
 881{
 882	struct crypto_attr_type *algt;
 883
 884	algt = crypto_get_attr_type(tb);
 885	if (IS_ERR(algt))
 886		return PTR_ERR(algt);
 887
 888	if ((algt->type ^ type) & algt->mask)
 889		return -EINVAL;
 890
 891	*mask_ret = crypto_algt_inherited_mask(algt);
 892	return 0;
 893}
 894EXPORT_SYMBOL_GPL(crypto_check_attr_type);
 895
 896const char *crypto_attr_alg_name(struct rtattr *rta)
 897{
 898	struct crypto_attr_alg *alga;
 899
 900	if (!rta)
 901		return ERR_PTR(-ENOENT);
 902	if (RTA_PAYLOAD(rta) < sizeof(*alga))
 903		return ERR_PTR(-EINVAL);
 904	if (rta->rta_type != CRYPTOA_ALG)
 905		return ERR_PTR(-EINVAL);
 906
 907	alga = RTA_DATA(rta);
 908	alga->name[CRYPTO_MAX_ALG_NAME - 1] = 0;
 909
 910	return alga->name;
 911}
 912EXPORT_SYMBOL_GPL(crypto_attr_alg_name);
 913
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 914int crypto_inst_setname(struct crypto_instance *inst, const char *name,
 915			struct crypto_alg *alg)
 916{
 917	if (snprintf(inst->alg.cra_name, CRYPTO_MAX_ALG_NAME, "%s(%s)", name,
 918		     alg->cra_name) >= CRYPTO_MAX_ALG_NAME)
 919		return -ENAMETOOLONG;
 920
 921	if (snprintf(inst->alg.cra_driver_name, CRYPTO_MAX_ALG_NAME, "%s(%s)",
 922		     name, alg->cra_driver_name) >= CRYPTO_MAX_ALG_NAME)
 923		return -ENAMETOOLONG;
 924
 925	return 0;
 926}
 927EXPORT_SYMBOL_GPL(crypto_inst_setname);
 928
 929void crypto_init_queue(struct crypto_queue *queue, unsigned int max_qlen)
 930{
 931	INIT_LIST_HEAD(&queue->list);
 932	queue->backlog = &queue->list;
 933	queue->qlen = 0;
 934	queue->max_qlen = max_qlen;
 935}
 936EXPORT_SYMBOL_GPL(crypto_init_queue);
 937
 938int crypto_enqueue_request(struct crypto_queue *queue,
 939			   struct crypto_async_request *request)
 940{
 941	int err = -EINPROGRESS;
 942
 943	if (unlikely(queue->qlen >= queue->max_qlen)) {
 944		if (!(request->flags & CRYPTO_TFM_REQ_MAY_BACKLOG)) {
 945			err = -ENOSPC;
 946			goto out;
 947		}
 948		err = -EBUSY;
 949		if (queue->backlog == &queue->list)
 950			queue->backlog = &request->list;
 951	}
 952
 953	queue->qlen++;
 954	list_add_tail(&request->list, &queue->list);
 955
 956out:
 957	return err;
 958}
 959EXPORT_SYMBOL_GPL(crypto_enqueue_request);
 960
 961void crypto_enqueue_request_head(struct crypto_queue *queue,
 962				 struct crypto_async_request *request)
 963{
 964	queue->qlen++;
 965	list_add(&request->list, &queue->list);
 966}
 967EXPORT_SYMBOL_GPL(crypto_enqueue_request_head);
 968
 969struct crypto_async_request *crypto_dequeue_request(struct crypto_queue *queue)
 970{
 971	struct list_head *request;
 972
 973	if (unlikely(!queue->qlen))
 974		return NULL;
 975
 976	queue->qlen--;
 977
 978	if (queue->backlog != &queue->list)
 979		queue->backlog = queue->backlog->next;
 980
 981	request = queue->list.next;
 982	list_del(request);
 983
 984	return list_entry(request, struct crypto_async_request, list);
 985}
 986EXPORT_SYMBOL_GPL(crypto_dequeue_request);
 987
 988static inline void crypto_inc_byte(u8 *a, unsigned int size)
 989{
 990	u8 *b = (a + size);
 991	u8 c;
 992
 993	for (; size; size--) {
 994		c = *--b + 1;
 995		*b = c;
 996		if (c)
 997			break;
 998	}
 999}
1000
1001void crypto_inc(u8 *a, unsigned int size)
1002{
1003	__be32 *b = (__be32 *)(a + size);
1004	u32 c;
1005
1006	if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) ||
1007	    IS_ALIGNED((unsigned long)b, __alignof__(*b)))
1008		for (; size >= 4; size -= 4) {
1009			c = be32_to_cpu(*--b) + 1;
1010			*b = cpu_to_be32(c);
1011			if (likely(c))
1012				return;
1013		}
1014
1015	crypto_inc_byte(a, size);
1016}
1017EXPORT_SYMBOL_GPL(crypto_inc);
1018
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1019unsigned int crypto_alg_extsize(struct crypto_alg *alg)
1020{
1021	return alg->cra_ctxsize +
1022	       (alg->cra_alignmask & ~(crypto_tfm_ctx_alignment() - 1));
1023}
1024EXPORT_SYMBOL_GPL(crypto_alg_extsize);
1025
1026int crypto_type_has_alg(const char *name, const struct crypto_type *frontend,
1027			u32 type, u32 mask)
1028{
1029	int ret = 0;
1030	struct crypto_alg *alg = crypto_find_alg(name, frontend, type, mask);
1031
1032	if (!IS_ERR(alg)) {
1033		crypto_mod_put(alg);
1034		ret = 1;
1035	}
1036
1037	return ret;
1038}
1039EXPORT_SYMBOL_GPL(crypto_type_has_alg);
1040
1041#ifdef CONFIG_CRYPTO_STATS
1042void crypto_stats_init(struct crypto_alg *alg)
1043{
1044	memset(&alg->stats, 0, sizeof(alg->stats));
1045}
1046EXPORT_SYMBOL_GPL(crypto_stats_init);
1047
1048void crypto_stats_get(struct crypto_alg *alg)
1049{
1050	crypto_alg_get(alg);
1051}
1052EXPORT_SYMBOL_GPL(crypto_stats_get);
1053
1054void crypto_stats_aead_encrypt(unsigned int cryptlen, struct crypto_alg *alg,
1055			       int ret)
1056{
1057	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1058		atomic64_inc(&alg->stats.aead.err_cnt);
1059	} else {
1060		atomic64_inc(&alg->stats.aead.encrypt_cnt);
1061		atomic64_add(cryptlen, &alg->stats.aead.encrypt_tlen);
1062	}
1063	crypto_alg_put(alg);
1064}
1065EXPORT_SYMBOL_GPL(crypto_stats_aead_encrypt);
1066
1067void crypto_stats_aead_decrypt(unsigned int cryptlen, struct crypto_alg *alg,
1068			       int ret)
1069{
1070	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1071		atomic64_inc(&alg->stats.aead.err_cnt);
1072	} else {
1073		atomic64_inc(&alg->stats.aead.decrypt_cnt);
1074		atomic64_add(cryptlen, &alg->stats.aead.decrypt_tlen);
1075	}
1076	crypto_alg_put(alg);
1077}
1078EXPORT_SYMBOL_GPL(crypto_stats_aead_decrypt);
1079
1080void crypto_stats_akcipher_encrypt(unsigned int src_len, int ret,
1081				   struct crypto_alg *alg)
1082{
1083	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1084		atomic64_inc(&alg->stats.akcipher.err_cnt);
1085	} else {
1086		atomic64_inc(&alg->stats.akcipher.encrypt_cnt);
1087		atomic64_add(src_len, &alg->stats.akcipher.encrypt_tlen);
1088	}
1089	crypto_alg_put(alg);
1090}
1091EXPORT_SYMBOL_GPL(crypto_stats_akcipher_encrypt);
1092
1093void crypto_stats_akcipher_decrypt(unsigned int src_len, int ret,
1094				   struct crypto_alg *alg)
1095{
1096	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1097		atomic64_inc(&alg->stats.akcipher.err_cnt);
1098	} else {
1099		atomic64_inc(&alg->stats.akcipher.decrypt_cnt);
1100		atomic64_add(src_len, &alg->stats.akcipher.decrypt_tlen);
1101	}
1102	crypto_alg_put(alg);
1103}
1104EXPORT_SYMBOL_GPL(crypto_stats_akcipher_decrypt);
1105
1106void crypto_stats_akcipher_sign(int ret, struct crypto_alg *alg)
1107{
1108	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1109		atomic64_inc(&alg->stats.akcipher.err_cnt);
1110	else
1111		atomic64_inc(&alg->stats.akcipher.sign_cnt);
1112	crypto_alg_put(alg);
1113}
1114EXPORT_SYMBOL_GPL(crypto_stats_akcipher_sign);
1115
1116void crypto_stats_akcipher_verify(int ret, struct crypto_alg *alg)
1117{
1118	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1119		atomic64_inc(&alg->stats.akcipher.err_cnt);
1120	else
1121		atomic64_inc(&alg->stats.akcipher.verify_cnt);
1122	crypto_alg_put(alg);
1123}
1124EXPORT_SYMBOL_GPL(crypto_stats_akcipher_verify);
1125
1126void crypto_stats_compress(unsigned int slen, int ret, struct crypto_alg *alg)
1127{
1128	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1129		atomic64_inc(&alg->stats.compress.err_cnt);
1130	} else {
1131		atomic64_inc(&alg->stats.compress.compress_cnt);
1132		atomic64_add(slen, &alg->stats.compress.compress_tlen);
1133	}
1134	crypto_alg_put(alg);
1135}
1136EXPORT_SYMBOL_GPL(crypto_stats_compress);
1137
1138void crypto_stats_decompress(unsigned int slen, int ret, struct crypto_alg *alg)
1139{
1140	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1141		atomic64_inc(&alg->stats.compress.err_cnt);
1142	} else {
1143		atomic64_inc(&alg->stats.compress.decompress_cnt);
1144		atomic64_add(slen, &alg->stats.compress.decompress_tlen);
1145	}
1146	crypto_alg_put(alg);
1147}
1148EXPORT_SYMBOL_GPL(crypto_stats_decompress);
1149
1150void crypto_stats_ahash_update(unsigned int nbytes, int ret,
1151			       struct crypto_alg *alg)
1152{
1153	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1154		atomic64_inc(&alg->stats.hash.err_cnt);
1155	else
1156		atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1157	crypto_alg_put(alg);
1158}
1159EXPORT_SYMBOL_GPL(crypto_stats_ahash_update);
1160
1161void crypto_stats_ahash_final(unsigned int nbytes, int ret,
1162			      struct crypto_alg *alg)
1163{
1164	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1165		atomic64_inc(&alg->stats.hash.err_cnt);
1166	} else {
1167		atomic64_inc(&alg->stats.hash.hash_cnt);
1168		atomic64_add(nbytes, &alg->stats.hash.hash_tlen);
1169	}
1170	crypto_alg_put(alg);
1171}
1172EXPORT_SYMBOL_GPL(crypto_stats_ahash_final);
1173
1174void crypto_stats_kpp_set_secret(struct crypto_alg *alg, int ret)
1175{
1176	if (ret)
1177		atomic64_inc(&alg->stats.kpp.err_cnt);
1178	else
1179		atomic64_inc(&alg->stats.kpp.setsecret_cnt);
1180	crypto_alg_put(alg);
1181}
1182EXPORT_SYMBOL_GPL(crypto_stats_kpp_set_secret);
1183
1184void crypto_stats_kpp_generate_public_key(struct crypto_alg *alg, int ret)
1185{
1186	if (ret)
1187		atomic64_inc(&alg->stats.kpp.err_cnt);
1188	else
1189		atomic64_inc(&alg->stats.kpp.generate_public_key_cnt);
1190	crypto_alg_put(alg);
1191}
1192EXPORT_SYMBOL_GPL(crypto_stats_kpp_generate_public_key);
1193
1194void crypto_stats_kpp_compute_shared_secret(struct crypto_alg *alg, int ret)
1195{
1196	if (ret)
1197		atomic64_inc(&alg->stats.kpp.err_cnt);
1198	else
1199		atomic64_inc(&alg->stats.kpp.compute_shared_secret_cnt);
1200	crypto_alg_put(alg);
1201}
1202EXPORT_SYMBOL_GPL(crypto_stats_kpp_compute_shared_secret);
1203
1204void crypto_stats_rng_seed(struct crypto_alg *alg, int ret)
1205{
1206	if (ret && ret != -EINPROGRESS && ret != -EBUSY)
1207		atomic64_inc(&alg->stats.rng.err_cnt);
1208	else
1209		atomic64_inc(&alg->stats.rng.seed_cnt);
1210	crypto_alg_put(alg);
1211}
1212EXPORT_SYMBOL_GPL(crypto_stats_rng_seed);
1213
1214void crypto_stats_rng_generate(struct crypto_alg *alg, unsigned int dlen,
1215			       int ret)
1216{
1217	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1218		atomic64_inc(&alg->stats.rng.err_cnt);
1219	} else {
1220		atomic64_inc(&alg->stats.rng.generate_cnt);
1221		atomic64_add(dlen, &alg->stats.rng.generate_tlen);
1222	}
1223	crypto_alg_put(alg);
1224}
1225EXPORT_SYMBOL_GPL(crypto_stats_rng_generate);
1226
1227void crypto_stats_skcipher_encrypt(unsigned int cryptlen, int ret,
1228				   struct crypto_alg *alg)
1229{
1230	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1231		atomic64_inc(&alg->stats.cipher.err_cnt);
1232	} else {
1233		atomic64_inc(&alg->stats.cipher.encrypt_cnt);
1234		atomic64_add(cryptlen, &alg->stats.cipher.encrypt_tlen);
1235	}
1236	crypto_alg_put(alg);
1237}
1238EXPORT_SYMBOL_GPL(crypto_stats_skcipher_encrypt);
1239
1240void crypto_stats_skcipher_decrypt(unsigned int cryptlen, int ret,
1241				   struct crypto_alg *alg)
1242{
1243	if (ret && ret != -EINPROGRESS && ret != -EBUSY) {
1244		atomic64_inc(&alg->stats.cipher.err_cnt);
1245	} else {
1246		atomic64_inc(&alg->stats.cipher.decrypt_cnt);
1247		atomic64_add(cryptlen, &alg->stats.cipher.decrypt_tlen);
1248	}
1249	crypto_alg_put(alg);
1250}
1251EXPORT_SYMBOL_GPL(crypto_stats_skcipher_decrypt);
1252#endif
1253
1254static void __init crypto_start_tests(void)
1255{
1256	if (IS_ENABLED(CONFIG_CRYPTO_MANAGER_DISABLE_TESTS))
1257		return;
1258
1259	for (;;) {
1260		struct crypto_larval *larval = NULL;
1261		struct crypto_alg *q;
1262
1263		down_write(&crypto_alg_sem);
1264
1265		list_for_each_entry(q, &crypto_alg_list, cra_list) {
1266			struct crypto_larval *l;
1267
1268			if (!crypto_is_larval(q))
1269				continue;
1270
1271			l = (void *)q;
1272
1273			if (!crypto_is_test_larval(l))
1274				continue;
1275
1276			if (l->test_started)
1277				continue;
1278
1279			l->test_started = true;
1280			larval = l;
1281			break;
1282		}
1283
1284		up_write(&crypto_alg_sem);
1285
1286		if (!larval)
1287			break;
1288
1289		crypto_wait_for_test(larval);
1290	}
1291
1292	set_crypto_boot_test_finished();
1293}
1294
1295static int __init crypto_algapi_init(void)
1296{
1297	crypto_init_proc();
1298	crypto_start_tests();
1299	return 0;
1300}
1301
1302static void __exit crypto_algapi_exit(void)
1303{
1304	crypto_exit_proc();
1305}
1306
1307/*
1308 * We run this at late_initcall so that all the built-in algorithms
1309 * have had a chance to register themselves first.
1310 */
1311late_initcall(crypto_algapi_init);
1312module_exit(crypto_algapi_exit);
1313
1314MODULE_LICENSE("GPL");
1315MODULE_DESCRIPTION("Cryptographic algorithms API");
1316MODULE_SOFTDEP("pre: cryptomgr");