Linux Audio

Check our new training course

Loading...
v4.6
   1/*
   2 * Algorithm testing framework and tests.
   3 *
   4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
   5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
   6 * Copyright (c) 2007 Nokia Siemens Networks
   7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
   8 *
   9 * Updated RFC4106 AES-GCM testing.
  10 *    Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
  11 *             Adrian Hoban <adrian.hoban@intel.com>
  12 *             Gabriele Paoloni <gabriele.paoloni@intel.com>
  13 *             Tadeusz Struk (tadeusz.struk@intel.com)
  14 *    Copyright (c) 2010, Intel Corporation.
  15 *
  16 * This program is free software; you can redistribute it and/or modify it
  17 * under the terms of the GNU General Public License as published by the Free
  18 * Software Foundation; either version 2 of the License, or (at your option)
  19 * any later version.
  20 *
  21 */
  22
  23#include <crypto/aead.h>
  24#include <crypto/hash.h>
  25#include <crypto/skcipher.h>
  26#include <linux/err.h>
  27#include <linux/fips.h>
  28#include <linux/module.h>
  29#include <linux/scatterlist.h>
  30#include <linux/slab.h>
  31#include <linux/string.h>
  32#include <crypto/rng.h>
  33#include <crypto/drbg.h>
  34#include <crypto/akcipher.h>
 
 
  35
  36#include "internal.h"
  37
 
 
 
 
  38#ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
  39
  40/* a perfect nop */
  41int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
  42{
  43	return 0;
  44}
  45
  46#else
  47
  48#include "testmgr.h"
  49
  50/*
  51 * Need slab memory for testing (size in number of pages).
  52 */
  53#define XBUFSIZE	8
  54
  55/*
  56 * Indexes into the xbuf to simulate cross-page access.
  57 */
  58#define IDX1		32
  59#define IDX2		32400
  60#define IDX3		1
  61#define IDX4		8193
  62#define IDX5		22222
  63#define IDX6		17101
  64#define IDX7		27333
  65#define IDX8		3000
  66
  67/*
  68* Used by test_cipher()
  69*/
  70#define ENCRYPT 1
  71#define DECRYPT 0
  72
  73struct tcrypt_result {
  74	struct completion completion;
  75	int err;
  76};
  77
  78struct aead_test_suite {
  79	struct {
  80		struct aead_testvec *vecs;
  81		unsigned int count;
  82	} enc, dec;
  83};
  84
  85struct cipher_test_suite {
  86	struct {
  87		struct cipher_testvec *vecs;
  88		unsigned int count;
  89	} enc, dec;
  90};
  91
  92struct comp_test_suite {
  93	struct {
  94		struct comp_testvec *vecs;
  95		unsigned int count;
  96	} comp, decomp;
  97};
  98
  99struct hash_test_suite {
 100	struct hash_testvec *vecs;
 101	unsigned int count;
 102};
 103
 104struct cprng_test_suite {
 105	struct cprng_testvec *vecs;
 106	unsigned int count;
 107};
 108
 109struct drbg_test_suite {
 110	struct drbg_testvec *vecs;
 111	unsigned int count;
 112};
 113
 114struct akcipher_test_suite {
 115	struct akcipher_testvec *vecs;
 
 
 
 
 
 116	unsigned int count;
 117};
 118
 119struct alg_test_desc {
 120	const char *alg;
 121	int (*test)(const struct alg_test_desc *desc, const char *driver,
 122		    u32 type, u32 mask);
 123	int fips_allowed;	/* set if alg is allowed in fips mode */
 124
 125	union {
 126		struct aead_test_suite aead;
 127		struct cipher_test_suite cipher;
 128		struct comp_test_suite comp;
 129		struct hash_test_suite hash;
 130		struct cprng_test_suite cprng;
 131		struct drbg_test_suite drbg;
 132		struct akcipher_test_suite akcipher;
 
 133	} suite;
 134};
 135
 136static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
 
 137
 138static void hexdump(unsigned char *buf, unsigned int len)
 139{
 140	print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
 141			16, 1,
 142			buf, len, false);
 143}
 144
 145static void tcrypt_complete(struct crypto_async_request *req, int err)
 146{
 147	struct tcrypt_result *res = req->data;
 148
 149	if (err == -EINPROGRESS)
 150		return;
 151
 152	res->err = err;
 153	complete(&res->completion);
 154}
 155
 156static int testmgr_alloc_buf(char *buf[XBUFSIZE])
 157{
 158	int i;
 159
 160	for (i = 0; i < XBUFSIZE; i++) {
 161		buf[i] = (void *)__get_free_page(GFP_KERNEL);
 162		if (!buf[i])
 163			goto err_free_buf;
 164	}
 165
 166	return 0;
 167
 168err_free_buf:
 169	while (i-- > 0)
 170		free_page((unsigned long)buf[i]);
 171
 172	return -ENOMEM;
 173}
 174
 175static void testmgr_free_buf(char *buf[XBUFSIZE])
 176{
 177	int i;
 178
 179	for (i = 0; i < XBUFSIZE; i++)
 180		free_page((unsigned long)buf[i]);
 181}
 182
 183static int wait_async_op(struct tcrypt_result *tr, int ret)
 184{
 185	if (ret == -EINPROGRESS || ret == -EBUSY) {
 186		wait_for_completion(&tr->completion);
 187		reinit_completion(&tr->completion);
 188		ret = tr->err;
 
 189	}
 190	return ret;
 
 191}
 192
 193static int ahash_partial_update(struct ahash_request **preq,
 194	struct crypto_ahash *tfm, struct hash_testvec *template,
 195	void *hash_buff, int k, int temp, struct scatterlist *sg,
 196	const char *algo, char *result, struct tcrypt_result *tresult)
 197{
 198	char *state;
 199	struct ahash_request *req;
 200	int statesize, ret = -EINVAL;
 
 
 201
 202	req = *preq;
 203	statesize = crypto_ahash_statesize(
 204			crypto_ahash_reqtfm(req));
 205	state = kmalloc(statesize, GFP_KERNEL);
 206	if (!state) {
 207		pr_err("alt: hash: Failed to alloc state for %s\n", algo);
 208		goto out_nostate;
 209	}
 
 
 210	ret = crypto_ahash_export(req, state);
 
 211	if (ret) {
 212		pr_err("alt: hash: Failed to export() for %s\n", algo);
 
 
 
 
 
 
 213		goto out;
 214	}
 215	ahash_request_free(req);
 216	req = ahash_request_alloc(tfm, GFP_KERNEL);
 217	if (!req) {
 218		pr_err("alg: hash: Failed to alloc request for %s\n", algo);
 219		goto out_noreq;
 220	}
 221	ahash_request_set_callback(req,
 222		CRYPTO_TFM_REQ_MAY_BACKLOG,
 223		tcrypt_complete, tresult);
 224
 225	memcpy(hash_buff, template->plaintext + temp,
 226		template->tap[k]);
 227	sg_init_one(&sg[0], hash_buff, template->tap[k]);
 228	ahash_request_set_crypt(req, sg, result, template->tap[k]);
 229	ret = crypto_ahash_import(req, state);
 230	if (ret) {
 231		pr_err("alg: hash: Failed to import() for %s\n", algo);
 232		goto out;
 233	}
 234	ret = wait_async_op(tresult, crypto_ahash_update(req));
 
 
 
 
 
 
 235	if (ret)
 236		goto out;
 237	*preq = req;
 238	ret = 0;
 239	goto out_noreq;
 240out:
 241	ahash_request_free(req);
 242out_noreq:
 243	kfree(state);
 244out_nostate:
 245	return ret;
 246}
 247
 248static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
 249		       unsigned int tcount, bool use_digest,
 250		       const int align_offset)
 251{
 252	const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
 
 253	unsigned int i, j, k, temp;
 254	struct scatterlist sg[8];
 255	char *result;
 256	char *key;
 257	struct ahash_request *req;
 258	struct tcrypt_result tresult;
 259	void *hash_buff;
 260	char *xbuf[XBUFSIZE];
 261	int ret = -ENOMEM;
 262
 263	result = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL);
 264	if (!result)
 265		return ret;
 266	key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
 267	if (!key)
 268		goto out_nobuf;
 269	if (testmgr_alloc_buf(xbuf))
 270		goto out_nobuf;
 271
 272	init_completion(&tresult.completion);
 273
 274	req = ahash_request_alloc(tfm, GFP_KERNEL);
 275	if (!req) {
 276		printk(KERN_ERR "alg: hash: Failed to allocate request for "
 277		       "%s\n", algo);
 278		goto out_noreq;
 279	}
 280	ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
 281				   tcrypt_complete, &tresult);
 282
 283	j = 0;
 284	for (i = 0; i < tcount; i++) {
 285		if (template[i].np)
 286			continue;
 287
 288		ret = -EINVAL;
 289		if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE))
 290			goto out;
 291
 292		j++;
 293		memset(result, 0, MAX_DIGEST_SIZE);
 294
 295		hash_buff = xbuf[0];
 296		hash_buff += align_offset;
 297
 298		memcpy(hash_buff, template[i].plaintext, template[i].psize);
 299		sg_init_one(&sg[0], hash_buff, template[i].psize);
 300
 301		if (template[i].ksize) {
 302			crypto_ahash_clear_flags(tfm, ~0);
 303			if (template[i].ksize > MAX_KEYLEN) {
 304				pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
 305				       j, algo, template[i].ksize, MAX_KEYLEN);
 306				ret = -EINVAL;
 307				goto out;
 308			}
 309			memcpy(key, template[i].key, template[i].ksize);
 310			ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
 311			if (ret) {
 312				printk(KERN_ERR "alg: hash: setkey failed on "
 313				       "test %d for %s: ret=%d\n", j, algo,
 314				       -ret);
 315				goto out;
 316			}
 317		}
 318
 319		ahash_request_set_crypt(req, sg, result, template[i].psize);
 320		if (use_digest) {
 321			ret = wait_async_op(&tresult, crypto_ahash_digest(req));
 322			if (ret) {
 323				pr_err("alg: hash: digest failed on test %d "
 324				       "for %s: ret=%d\n", j, algo, -ret);
 325				goto out;
 326			}
 327		} else {
 328			ret = wait_async_op(&tresult, crypto_ahash_init(req));
 
 329			if (ret) {
 330				pr_err("alt: hash: init failed on test %d "
 331				       "for %s: ret=%d\n", j, algo, -ret);
 332				goto out;
 333			}
 334			ret = wait_async_op(&tresult, crypto_ahash_update(req));
 335			if (ret) {
 336				pr_err("alt: hash: update failed on test %d "
 
 
 
 
 
 
 337				       "for %s: ret=%d\n", j, algo, -ret);
 338				goto out;
 339			}
 340			ret = wait_async_op(&tresult, crypto_ahash_final(req));
 
 
 
 
 
 
 341			if (ret) {
 342				pr_err("alt: hash: final failed on test %d "
 343				       "for %s: ret=%d\n", j, algo, -ret);
 344				goto out;
 345			}
 346		}
 347
 348		if (memcmp(result, template[i].digest,
 349			   crypto_ahash_digestsize(tfm))) {
 350			printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
 351			       j, algo);
 352			hexdump(result, crypto_ahash_digestsize(tfm));
 353			ret = -EINVAL;
 354			goto out;
 355		}
 356	}
 357
 358	j = 0;
 359	for (i = 0; i < tcount; i++) {
 360		/* alignment tests are only done with continuous buffers */
 361		if (align_offset != 0)
 362			break;
 363
 364		if (!template[i].np)
 365			continue;
 366
 367		j++;
 368		memset(result, 0, MAX_DIGEST_SIZE);
 369
 370		temp = 0;
 371		sg_init_table(sg, template[i].np);
 372		ret = -EINVAL;
 373		for (k = 0; k < template[i].np; k++) {
 374			if (WARN_ON(offset_in_page(IDX[k]) +
 375				    template[i].tap[k] > PAGE_SIZE))
 376				goto out;
 377			sg_set_buf(&sg[k],
 378				   memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
 379					  offset_in_page(IDX[k]),
 380					  template[i].plaintext + temp,
 381					  template[i].tap[k]),
 382				   template[i].tap[k]);
 383			temp += template[i].tap[k];
 384		}
 385
 386		if (template[i].ksize) {
 387			if (template[i].ksize > MAX_KEYLEN) {
 388				pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
 389				       j, algo, template[i].ksize, MAX_KEYLEN);
 390				ret = -EINVAL;
 391				goto out;
 392			}
 393			crypto_ahash_clear_flags(tfm, ~0);
 394			memcpy(key, template[i].key, template[i].ksize);
 395			ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
 396
 397			if (ret) {
 398				printk(KERN_ERR "alg: hash: setkey "
 399				       "failed on chunking test %d "
 400				       "for %s: ret=%d\n", j, algo, -ret);
 401				goto out;
 402			}
 403		}
 404
 405		ahash_request_set_crypt(req, sg, result, template[i].psize);
 406		ret = crypto_ahash_digest(req);
 407		switch (ret) {
 408		case 0:
 409			break;
 410		case -EINPROGRESS:
 411		case -EBUSY:
 412			wait_for_completion(&tresult.completion);
 413			reinit_completion(&tresult.completion);
 414			ret = tresult.err;
 415			if (!ret)
 416				break;
 417			/* fall through */
 418		default:
 419			printk(KERN_ERR "alg: hash: digest failed "
 420			       "on chunking test %d for %s: "
 421			       "ret=%d\n", j, algo, -ret);
 422			goto out;
 423		}
 424
 425		if (memcmp(result, template[i].digest,
 426			   crypto_ahash_digestsize(tfm))) {
 427			printk(KERN_ERR "alg: hash: Chunking test %d "
 428			       "failed for %s\n", j, algo);
 429			hexdump(result, crypto_ahash_digestsize(tfm));
 430			ret = -EINVAL;
 431			goto out;
 432		}
 433	}
 434
 435	/* partial update exercise */
 436	j = 0;
 437	for (i = 0; i < tcount; i++) {
 438		/* alignment tests are only done with continuous buffers */
 439		if (align_offset != 0)
 440			break;
 441
 442		if (template[i].np < 2)
 443			continue;
 444
 445		j++;
 446		memset(result, 0, MAX_DIGEST_SIZE);
 447
 448		ret = -EINVAL;
 449		hash_buff = xbuf[0];
 450		memcpy(hash_buff, template[i].plaintext,
 451			template[i].tap[0]);
 452		sg_init_one(&sg[0], hash_buff, template[i].tap[0]);
 453
 454		if (template[i].ksize) {
 455			crypto_ahash_clear_flags(tfm, ~0);
 456			if (template[i].ksize > MAX_KEYLEN) {
 457				pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
 458					j, algo, template[i].ksize, MAX_KEYLEN);
 459				ret = -EINVAL;
 460				goto out;
 461			}
 462			memcpy(key, template[i].key, template[i].ksize);
 463			ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
 464			if (ret) {
 465				pr_err("alg: hash: setkey failed on test %d for %s: ret=%d\n",
 466					j, algo, -ret);
 467				goto out;
 468			}
 469		}
 470
 471		ahash_request_set_crypt(req, sg, result, template[i].tap[0]);
 472		ret = wait_async_op(&tresult, crypto_ahash_init(req));
 473		if (ret) {
 474			pr_err("alt: hash: init failed on test %d for %s: ret=%d\n",
 475				j, algo, -ret);
 476			goto out;
 477		}
 478		ret = wait_async_op(&tresult, crypto_ahash_update(req));
 479		if (ret) {
 480			pr_err("alt: hash: update failed on test %d for %s: ret=%d\n",
 481				j, algo, -ret);
 482			goto out;
 483		}
 484
 485		temp = template[i].tap[0];
 486		for (k = 1; k < template[i].np; k++) {
 487			ret = ahash_partial_update(&req, tfm, &template[i],
 488				hash_buff, k, temp, &sg[0], algo, result,
 489				&tresult);
 490			if (ret) {
 491				pr_err("hash: partial update failed on test %d for %s: ret=%d\n",
 492					j, algo, -ret);
 493				goto out_noreq;
 494			}
 495			temp += template[i].tap[k];
 496		}
 497		ret = wait_async_op(&tresult, crypto_ahash_final(req));
 498		if (ret) {
 499			pr_err("alt: hash: final failed on test %d for %s: ret=%d\n",
 500				j, algo, -ret);
 501			goto out;
 502		}
 503		if (memcmp(result, template[i].digest,
 504			   crypto_ahash_digestsize(tfm))) {
 505			pr_err("alg: hash: Partial Test %d failed for %s\n",
 506			       j, algo);
 507			hexdump(result, crypto_ahash_digestsize(tfm));
 508			ret = -EINVAL;
 509			goto out;
 510		}
 511	}
 512
 513	ret = 0;
 514
 515out:
 516	ahash_request_free(req);
 517out_noreq:
 518	testmgr_free_buf(xbuf);
 519out_nobuf:
 520	kfree(key);
 521	kfree(result);
 522	return ret;
 523}
 524
 525static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
 
 526		     unsigned int tcount, bool use_digest)
 527{
 528	unsigned int alignmask;
 529	int ret;
 530
 531	ret = __test_hash(tfm, template, tcount, use_digest, 0);
 532	if (ret)
 533		return ret;
 534
 535	/* test unaligned buffers, check with one byte offset */
 536	ret = __test_hash(tfm, template, tcount, use_digest, 1);
 537	if (ret)
 538		return ret;
 539
 540	alignmask = crypto_tfm_alg_alignmask(&tfm->base);
 541	if (alignmask) {
 542		/* Check if alignment mask for tfm is correctly set. */
 543		ret = __test_hash(tfm, template, tcount, use_digest,
 544				  alignmask + 1);
 545		if (ret)
 546			return ret;
 547	}
 548
 549	return 0;
 550}
 551
 552static int __test_aead(struct crypto_aead *tfm, int enc,
 553		       struct aead_testvec *template, unsigned int tcount,
 554		       const bool diff_dst, const int align_offset)
 555{
 556	const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
 557	unsigned int i, j, k, n, temp;
 558	int ret = -ENOMEM;
 559	char *q;
 560	char *key;
 561	struct aead_request *req;
 562	struct scatterlist *sg;
 563	struct scatterlist *sgout;
 564	const char *e, *d;
 565	struct tcrypt_result result;
 566	unsigned int authsize, iv_len;
 567	void *input;
 568	void *output;
 569	void *assoc;
 570	char *iv;
 571	char *xbuf[XBUFSIZE];
 572	char *xoutbuf[XBUFSIZE];
 573	char *axbuf[XBUFSIZE];
 574
 575	iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
 576	if (!iv)
 577		return ret;
 578	key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
 579	if (!key)
 580		goto out_noxbuf;
 581	if (testmgr_alloc_buf(xbuf))
 582		goto out_noxbuf;
 583	if (testmgr_alloc_buf(axbuf))
 584		goto out_noaxbuf;
 585	if (diff_dst && testmgr_alloc_buf(xoutbuf))
 586		goto out_nooutbuf;
 587
 588	/* avoid "the frame size is larger than 1024 bytes" compiler warning */
 589	sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 4 : 2), GFP_KERNEL);
 590	if (!sg)
 591		goto out_nosg;
 592	sgout = &sg[16];
 593
 594	if (diff_dst)
 595		d = "-ddst";
 596	else
 597		d = "";
 598
 599	if (enc == ENCRYPT)
 600		e = "encryption";
 601	else
 602		e = "decryption";
 603
 604	init_completion(&result.completion);
 605
 606	req = aead_request_alloc(tfm, GFP_KERNEL);
 607	if (!req) {
 608		pr_err("alg: aead%s: Failed to allocate request for %s\n",
 609		       d, algo);
 610		goto out;
 611	}
 612
 613	aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
 614				  tcrypt_complete, &result);
 615
 616	iv_len = crypto_aead_ivsize(tfm);
 617
 618	for (i = 0, j = 0; i < tcount; i++) {
 619		if (template[i].np)
 620			continue;
 621
 622		j++;
 623
 624		/* some templates have no input data but they will
 625		 * touch input
 626		 */
 627		input = xbuf[0];
 628		input += align_offset;
 629		assoc = axbuf[0];
 630
 631		ret = -EINVAL;
 632		if (WARN_ON(align_offset + template[i].ilen >
 633			    PAGE_SIZE || template[i].alen > PAGE_SIZE))
 634			goto out;
 635
 636		memcpy(input, template[i].input, template[i].ilen);
 637		memcpy(assoc, template[i].assoc, template[i].alen);
 638		if (template[i].iv)
 639			memcpy(iv, template[i].iv, iv_len);
 640		else
 641			memset(iv, 0, iv_len);
 642
 643		crypto_aead_clear_flags(tfm, ~0);
 644		if (template[i].wk)
 645			crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
 646
 647		if (template[i].klen > MAX_KEYLEN) {
 648			pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
 649			       d, j, algo, template[i].klen,
 650			       MAX_KEYLEN);
 651			ret = -EINVAL;
 652			goto out;
 653		}
 654		memcpy(key, template[i].key, template[i].klen);
 655
 656		ret = crypto_aead_setkey(tfm, key, template[i].klen);
 657		if (!ret == template[i].fail) {
 658			pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
 659			       d, j, algo, crypto_aead_get_flags(tfm));
 660			goto out;
 661		} else if (ret)
 662			continue;
 663
 664		authsize = abs(template[i].rlen - template[i].ilen);
 665		ret = crypto_aead_setauthsize(tfm, authsize);
 666		if (ret) {
 667			pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
 668			       d, authsize, j, algo);
 669			goto out;
 670		}
 671
 672		k = !!template[i].alen;
 673		sg_init_table(sg, k + 1);
 674		sg_set_buf(&sg[0], assoc, template[i].alen);
 675		sg_set_buf(&sg[k], input,
 676			   template[i].ilen + (enc ? authsize : 0));
 677		output = input;
 678
 679		if (diff_dst) {
 680			sg_init_table(sgout, k + 1);
 681			sg_set_buf(&sgout[0], assoc, template[i].alen);
 682
 683			output = xoutbuf[0];
 684			output += align_offset;
 685			sg_set_buf(&sgout[k], output,
 686				   template[i].rlen + (enc ? 0 : authsize));
 687		}
 688
 689		aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
 690				       template[i].ilen, iv);
 691
 692		aead_request_set_ad(req, template[i].alen);
 693
 694		ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
 
 695
 696		switch (ret) {
 697		case 0:
 698			if (template[i].novrfy) {
 699				/* verification was supposed to fail */
 700				pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
 701				       d, e, j, algo);
 702				/* so really, we got a bad message */
 703				ret = -EBADMSG;
 704				goto out;
 705			}
 706			break;
 707		case -EINPROGRESS:
 708		case -EBUSY:
 709			wait_for_completion(&result.completion);
 710			reinit_completion(&result.completion);
 711			ret = result.err;
 712			if (!ret)
 713				break;
 714		case -EBADMSG:
 715			if (template[i].novrfy)
 716				/* verification failure was expected */
 717				continue;
 718			/* fall through */
 719		default:
 720			pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
 721			       d, e, j, algo, -ret);
 722			goto out;
 723		}
 724
 725		q = output;
 726		if (memcmp(q, template[i].result, template[i].rlen)) {
 727			pr_err("alg: aead%s: Test %d failed on %s for %s\n",
 728			       d, j, e, algo);
 729			hexdump(q, template[i].rlen);
 730			ret = -EINVAL;
 731			goto out;
 732		}
 733	}
 734
 735	for (i = 0, j = 0; i < tcount; i++) {
 736		/* alignment tests are only done with continuous buffers */
 737		if (align_offset != 0)
 738			break;
 739
 740		if (!template[i].np)
 741			continue;
 742
 743		j++;
 744
 745		if (template[i].iv)
 746			memcpy(iv, template[i].iv, iv_len);
 747		else
 748			memset(iv, 0, MAX_IVLEN);
 749
 750		crypto_aead_clear_flags(tfm, ~0);
 751		if (template[i].wk)
 752			crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
 753		if (template[i].klen > MAX_KEYLEN) {
 754			pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
 755			       d, j, algo, template[i].klen, MAX_KEYLEN);
 756			ret = -EINVAL;
 757			goto out;
 758		}
 759		memcpy(key, template[i].key, template[i].klen);
 760
 761		ret = crypto_aead_setkey(tfm, key, template[i].klen);
 762		if (!ret == template[i].fail) {
 763			pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
 764			       d, j, algo, crypto_aead_get_flags(tfm));
 765			goto out;
 766		} else if (ret)
 767			continue;
 768
 769		authsize = abs(template[i].rlen - template[i].ilen);
 770
 771		ret = -EINVAL;
 772		sg_init_table(sg, template[i].anp + template[i].np);
 773		if (diff_dst)
 774			sg_init_table(sgout, template[i].anp + template[i].np);
 775
 776		ret = -EINVAL;
 777		for (k = 0, temp = 0; k < template[i].anp; k++) {
 778			if (WARN_ON(offset_in_page(IDX[k]) +
 779				    template[i].atap[k] > PAGE_SIZE))
 780				goto out;
 781			sg_set_buf(&sg[k],
 782				   memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
 783					  offset_in_page(IDX[k]),
 784					  template[i].assoc + temp,
 785					  template[i].atap[k]),
 786				   template[i].atap[k]);
 787			if (diff_dst)
 788				sg_set_buf(&sgout[k],
 789					   axbuf[IDX[k] >> PAGE_SHIFT] +
 790					   offset_in_page(IDX[k]),
 791					   template[i].atap[k]);
 792			temp += template[i].atap[k];
 793		}
 794
 795		for (k = 0, temp = 0; k < template[i].np; k++) {
 796			if (WARN_ON(offset_in_page(IDX[k]) +
 797				    template[i].tap[k] > PAGE_SIZE))
 798				goto out;
 799
 800			q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
 801			memcpy(q, template[i].input + temp, template[i].tap[k]);
 802			sg_set_buf(&sg[template[i].anp + k],
 803				   q, template[i].tap[k]);
 804
 805			if (diff_dst) {
 806				q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
 807				    offset_in_page(IDX[k]);
 808
 809				memset(q, 0, template[i].tap[k]);
 810
 811				sg_set_buf(&sgout[template[i].anp + k],
 812					   q, template[i].tap[k]);
 813			}
 814
 815			n = template[i].tap[k];
 816			if (k == template[i].np - 1 && enc)
 817				n += authsize;
 818			if (offset_in_page(q) + n < PAGE_SIZE)
 819				q[n] = 0;
 820
 821			temp += template[i].tap[k];
 822		}
 823
 824		ret = crypto_aead_setauthsize(tfm, authsize);
 825		if (ret) {
 826			pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
 827			       d, authsize, j, algo);
 828			goto out;
 829		}
 830
 831		if (enc) {
 832			if (WARN_ON(sg[template[i].anp + k - 1].offset +
 833				    sg[template[i].anp + k - 1].length +
 834				    authsize > PAGE_SIZE)) {
 835				ret = -EINVAL;
 836				goto out;
 837			}
 838
 839			if (diff_dst)
 840				sgout[template[i].anp + k - 1].length +=
 841					authsize;
 842			sg[template[i].anp + k - 1].length += authsize;
 843		}
 844
 845		aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
 846				       template[i].ilen,
 847				       iv);
 848
 849		aead_request_set_ad(req, template[i].alen);
 850
 851		ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
 
 852
 853		switch (ret) {
 854		case 0:
 855			if (template[i].novrfy) {
 856				/* verification was supposed to fail */
 857				pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
 858				       d, e, j, algo);
 859				/* so really, we got a bad message */
 860				ret = -EBADMSG;
 861				goto out;
 862			}
 863			break;
 864		case -EINPROGRESS:
 865		case -EBUSY:
 866			wait_for_completion(&result.completion);
 867			reinit_completion(&result.completion);
 868			ret = result.err;
 869			if (!ret)
 870				break;
 871		case -EBADMSG:
 872			if (template[i].novrfy)
 873				/* verification failure was expected */
 874				continue;
 875			/* fall through */
 876		default:
 877			pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
 878			       d, e, j, algo, -ret);
 879			goto out;
 880		}
 881
 882		ret = -EINVAL;
 883		for (k = 0, temp = 0; k < template[i].np; k++) {
 884			if (diff_dst)
 885				q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
 886				    offset_in_page(IDX[k]);
 887			else
 888				q = xbuf[IDX[k] >> PAGE_SHIFT] +
 889				    offset_in_page(IDX[k]);
 890
 891			n = template[i].tap[k];
 892			if (k == template[i].np - 1)
 893				n += enc ? authsize : -authsize;
 894
 895			if (memcmp(q, template[i].result + temp, n)) {
 896				pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
 897				       d, j, e, k, algo);
 898				hexdump(q, n);
 899				goto out;
 900			}
 901
 902			q += n;
 903			if (k == template[i].np - 1 && !enc) {
 904				if (!diff_dst &&
 905					memcmp(q, template[i].input +
 906					      temp + n, authsize))
 907					n = authsize;
 908				else
 909					n = 0;
 910			} else {
 911				for (n = 0; offset_in_page(q + n) && q[n]; n++)
 912					;
 913			}
 914			if (n) {
 915				pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
 916				       d, j, e, k, algo, n);
 917				hexdump(q, n);
 918				goto out;
 919			}
 920
 921			temp += template[i].tap[k];
 922		}
 923	}
 924
 925	ret = 0;
 926
 927out:
 928	aead_request_free(req);
 929	kfree(sg);
 930out_nosg:
 931	if (diff_dst)
 932		testmgr_free_buf(xoutbuf);
 933out_nooutbuf:
 934	testmgr_free_buf(axbuf);
 935out_noaxbuf:
 936	testmgr_free_buf(xbuf);
 937out_noxbuf:
 938	kfree(key);
 939	kfree(iv);
 940	return ret;
 941}
 942
 943static int test_aead(struct crypto_aead *tfm, int enc,
 944		     struct aead_testvec *template, unsigned int tcount)
 945{
 946	unsigned int alignmask;
 947	int ret;
 948
 949	/* test 'dst == src' case */
 950	ret = __test_aead(tfm, enc, template, tcount, false, 0);
 951	if (ret)
 952		return ret;
 953
 954	/* test 'dst != src' case */
 955	ret = __test_aead(tfm, enc, template, tcount, true, 0);
 956	if (ret)
 957		return ret;
 958
 959	/* test unaligned buffers, check with one byte offset */
 960	ret = __test_aead(tfm, enc, template, tcount, true, 1);
 961	if (ret)
 962		return ret;
 963
 964	alignmask = crypto_tfm_alg_alignmask(&tfm->base);
 965	if (alignmask) {
 966		/* Check if alignment mask for tfm is correctly set. */
 967		ret = __test_aead(tfm, enc, template, tcount, true,
 968				  alignmask + 1);
 969		if (ret)
 970			return ret;
 971	}
 972
 973	return 0;
 974}
 975
 976static int test_cipher(struct crypto_cipher *tfm, int enc,
 977		       struct cipher_testvec *template, unsigned int tcount)
 
 978{
 979	const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
 980	unsigned int i, j, k;
 981	char *q;
 982	const char *e;
 983	void *data;
 984	char *xbuf[XBUFSIZE];
 985	int ret = -ENOMEM;
 986
 987	if (testmgr_alloc_buf(xbuf))
 988		goto out_nobuf;
 989
 990	if (enc == ENCRYPT)
 991	        e = "encryption";
 992	else
 993		e = "decryption";
 994
 995	j = 0;
 996	for (i = 0; i < tcount; i++) {
 997		if (template[i].np)
 998			continue;
 999
 
 
 
1000		j++;
1001
1002		ret = -EINVAL;
1003		if (WARN_ON(template[i].ilen > PAGE_SIZE))
1004			goto out;
1005
1006		data = xbuf[0];
1007		memcpy(data, template[i].input, template[i].ilen);
1008
1009		crypto_cipher_clear_flags(tfm, ~0);
1010		if (template[i].wk)
1011			crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
1012
1013		ret = crypto_cipher_setkey(tfm, template[i].key,
1014					   template[i].klen);
1015		if (!ret == template[i].fail) {
1016			printk(KERN_ERR "alg: cipher: setkey failed "
1017			       "on test %d for %s: flags=%x\n", j,
1018			       algo, crypto_cipher_get_flags(tfm));
1019			goto out;
1020		} else if (ret)
1021			continue;
1022
1023		for (k = 0; k < template[i].ilen;
1024		     k += crypto_cipher_blocksize(tfm)) {
1025			if (enc)
1026				crypto_cipher_encrypt_one(tfm, data + k,
1027							  data + k);
1028			else
1029				crypto_cipher_decrypt_one(tfm, data + k,
1030							  data + k);
1031		}
1032
1033		q = data;
1034		if (memcmp(q, template[i].result, template[i].rlen)) {
1035			printk(KERN_ERR "alg: cipher: Test %d failed "
1036			       "on %s for %s\n", j, e, algo);
1037			hexdump(q, template[i].rlen);
1038			ret = -EINVAL;
1039			goto out;
1040		}
1041	}
1042
1043	ret = 0;
1044
1045out:
1046	testmgr_free_buf(xbuf);
1047out_nobuf:
1048	return ret;
1049}
1050
1051static int __test_skcipher(struct crypto_skcipher *tfm, int enc,
1052			   struct cipher_testvec *template, unsigned int tcount,
 
1053			   const bool diff_dst, const int align_offset)
1054{
1055	const char *algo =
1056		crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
1057	unsigned int i, j, k, n, temp;
1058	char *q;
1059	struct skcipher_request *req;
1060	struct scatterlist sg[8];
1061	struct scatterlist sgout[8];
1062	const char *e, *d;
1063	struct tcrypt_result result;
1064	void *data;
1065	char iv[MAX_IVLEN];
1066	char *xbuf[XBUFSIZE];
1067	char *xoutbuf[XBUFSIZE];
1068	int ret = -ENOMEM;
1069	unsigned int ivsize = crypto_skcipher_ivsize(tfm);
1070
1071	if (testmgr_alloc_buf(xbuf))
1072		goto out_nobuf;
1073
1074	if (diff_dst && testmgr_alloc_buf(xoutbuf))
1075		goto out_nooutbuf;
1076
1077	if (diff_dst)
1078		d = "-ddst";
1079	else
1080		d = "";
1081
1082	if (enc == ENCRYPT)
1083	        e = "encryption";
1084	else
1085		e = "decryption";
1086
1087	init_completion(&result.completion);
1088
1089	req = skcipher_request_alloc(tfm, GFP_KERNEL);
1090	if (!req) {
1091		pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
1092		       d, algo);
1093		goto out;
1094	}
1095
1096	skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1097				      tcrypt_complete, &result);
1098
1099	j = 0;
1100	for (i = 0; i < tcount; i++) {
1101		if (template[i].np && !template[i].also_non_np)
1102			continue;
1103
 
 
 
1104		if (template[i].iv)
1105			memcpy(iv, template[i].iv, ivsize);
1106		else
1107			memset(iv, 0, MAX_IVLEN);
1108
1109		j++;
1110		ret = -EINVAL;
1111		if (WARN_ON(align_offset + template[i].ilen > PAGE_SIZE))
1112			goto out;
1113
1114		data = xbuf[0];
1115		data += align_offset;
1116		memcpy(data, template[i].input, template[i].ilen);
1117
1118		crypto_skcipher_clear_flags(tfm, ~0);
1119		if (template[i].wk)
1120			crypto_skcipher_set_flags(tfm,
1121						  CRYPTO_TFM_REQ_WEAK_KEY);
1122
1123		ret = crypto_skcipher_setkey(tfm, template[i].key,
1124					     template[i].klen);
1125		if (!ret == template[i].fail) {
1126			pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
1127			       d, j, algo, crypto_skcipher_get_flags(tfm));
1128			goto out;
1129		} else if (ret)
1130			continue;
1131
1132		sg_init_one(&sg[0], data, template[i].ilen);
1133		if (diff_dst) {
1134			data = xoutbuf[0];
1135			data += align_offset;
1136			sg_init_one(&sgout[0], data, template[i].ilen);
1137		}
1138
1139		skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1140					   template[i].ilen, iv);
1141		ret = enc ? crypto_skcipher_encrypt(req) :
1142			    crypto_skcipher_decrypt(req);
1143
1144		switch (ret) {
1145		case 0:
1146			break;
1147		case -EINPROGRESS:
1148		case -EBUSY:
1149			wait_for_completion(&result.completion);
1150			reinit_completion(&result.completion);
1151			ret = result.err;
1152			if (!ret)
1153				break;
1154			/* fall through */
1155		default:
1156			pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
1157			       d, e, j, algo, -ret);
1158			goto out;
1159		}
1160
1161		q = data;
1162		if (memcmp(q, template[i].result, template[i].rlen)) {
1163			pr_err("alg: skcipher%s: Test %d failed (invalid result) on %s for %s\n",
1164			       d, j, e, algo);
1165			hexdump(q, template[i].rlen);
1166			ret = -EINVAL;
1167			goto out;
1168		}
1169
1170		if (template[i].iv_out &&
1171		    memcmp(iv, template[i].iv_out,
1172			   crypto_skcipher_ivsize(tfm))) {
1173			pr_err("alg: skcipher%s: Test %d failed (invalid output IV) on %s for %s\n",
1174			       d, j, e, algo);
1175			hexdump(iv, crypto_skcipher_ivsize(tfm));
1176			ret = -EINVAL;
1177			goto out;
1178		}
1179	}
1180
1181	j = 0;
1182	for (i = 0; i < tcount; i++) {
1183		/* alignment tests are only done with continuous buffers */
1184		if (align_offset != 0)
1185			break;
1186
1187		if (!template[i].np)
1188			continue;
1189
 
 
 
1190		if (template[i].iv)
1191			memcpy(iv, template[i].iv, ivsize);
1192		else
1193			memset(iv, 0, MAX_IVLEN);
1194
1195		j++;
1196		crypto_skcipher_clear_flags(tfm, ~0);
1197		if (template[i].wk)
1198			crypto_skcipher_set_flags(tfm,
1199						  CRYPTO_TFM_REQ_WEAK_KEY);
1200
1201		ret = crypto_skcipher_setkey(tfm, template[i].key,
1202					     template[i].klen);
1203		if (!ret == template[i].fail) {
1204			pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
1205			       d, j, algo, crypto_skcipher_get_flags(tfm));
1206			goto out;
1207		} else if (ret)
1208			continue;
1209
1210		temp = 0;
1211		ret = -EINVAL;
1212		sg_init_table(sg, template[i].np);
1213		if (diff_dst)
1214			sg_init_table(sgout, template[i].np);
1215		for (k = 0; k < template[i].np; k++) {
1216			if (WARN_ON(offset_in_page(IDX[k]) +
1217				    template[i].tap[k] > PAGE_SIZE))
1218				goto out;
1219
1220			q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
1221
1222			memcpy(q, template[i].input + temp, template[i].tap[k]);
1223
1224			if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE)
1225				q[template[i].tap[k]] = 0;
1226
1227			sg_set_buf(&sg[k], q, template[i].tap[k]);
1228			if (diff_dst) {
1229				q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1230				    offset_in_page(IDX[k]);
1231
1232				sg_set_buf(&sgout[k], q, template[i].tap[k]);
1233
1234				memset(q, 0, template[i].tap[k]);
1235				if (offset_in_page(q) +
1236				    template[i].tap[k] < PAGE_SIZE)
1237					q[template[i].tap[k]] = 0;
1238			}
1239
1240			temp += template[i].tap[k];
1241		}
1242
1243		skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1244					   template[i].ilen, iv);
1245
1246		ret = enc ? crypto_skcipher_encrypt(req) :
1247			    crypto_skcipher_decrypt(req);
1248
1249		switch (ret) {
1250		case 0:
1251			break;
1252		case -EINPROGRESS:
1253		case -EBUSY:
1254			wait_for_completion(&result.completion);
1255			reinit_completion(&result.completion);
1256			ret = result.err;
1257			if (!ret)
1258				break;
1259			/* fall through */
1260		default:
1261			pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1262			       d, e, j, algo, -ret);
1263			goto out;
1264		}
1265
1266		temp = 0;
1267		ret = -EINVAL;
1268		for (k = 0; k < template[i].np; k++) {
1269			if (diff_dst)
1270				q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1271				    offset_in_page(IDX[k]);
1272			else
1273				q = xbuf[IDX[k] >> PAGE_SHIFT] +
1274				    offset_in_page(IDX[k]);
1275
1276			if (memcmp(q, template[i].result + temp,
1277				   template[i].tap[k])) {
1278				pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1279				       d, j, e, k, algo);
1280				hexdump(q, template[i].tap[k]);
1281				goto out;
1282			}
1283
1284			q += template[i].tap[k];
1285			for (n = 0; offset_in_page(q + n) && q[n]; n++)
1286				;
1287			if (n) {
1288				pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1289				       d, j, e, k, algo, n);
1290				hexdump(q, n);
1291				goto out;
1292			}
1293			temp += template[i].tap[k];
1294		}
1295	}
1296
1297	ret = 0;
1298
1299out:
1300	skcipher_request_free(req);
1301	if (diff_dst)
1302		testmgr_free_buf(xoutbuf);
1303out_nooutbuf:
1304	testmgr_free_buf(xbuf);
1305out_nobuf:
1306	return ret;
1307}
1308
1309static int test_skcipher(struct crypto_skcipher *tfm, int enc,
1310			 struct cipher_testvec *template, unsigned int tcount)
 
1311{
1312	unsigned int alignmask;
1313	int ret;
1314
1315	/* test 'dst == src' case */
1316	ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
1317	if (ret)
1318		return ret;
1319
1320	/* test 'dst != src' case */
1321	ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
1322	if (ret)
1323		return ret;
1324
1325	/* test unaligned buffers, check with one byte offset */
1326	ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
1327	if (ret)
1328		return ret;
1329
1330	alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1331	if (alignmask) {
1332		/* Check if alignment mask for tfm is correctly set. */
1333		ret = __test_skcipher(tfm, enc, template, tcount, true,
1334				      alignmask + 1);
1335		if (ret)
1336			return ret;
1337	}
1338
1339	return 0;
1340}
1341
1342static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
1343		     struct comp_testvec *dtemplate, int ctcount, int dtcount)
 
 
1344{
1345	const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1346	unsigned int i;
1347	char result[COMP_BUF_SIZE];
1348	int ret;
1349
1350	for (i = 0; i < ctcount; i++) {
1351		int ilen;
1352		unsigned int dlen = COMP_BUF_SIZE;
1353
1354		memset(result, 0, sizeof (result));
1355
1356		ilen = ctemplate[i].inlen;
1357		ret = crypto_comp_compress(tfm, ctemplate[i].input,
1358		                           ilen, result, &dlen);
1359		if (ret) {
1360			printk(KERN_ERR "alg: comp: compression failed "
1361			       "on test %d for %s: ret=%d\n", i + 1, algo,
1362			       -ret);
1363			goto out;
1364		}
1365
1366		if (dlen != ctemplate[i].outlen) {
1367			printk(KERN_ERR "alg: comp: Compression test %d "
1368			       "failed for %s: output len = %d\n", i + 1, algo,
1369			       dlen);
1370			ret = -EINVAL;
1371			goto out;
1372		}
1373
1374		if (memcmp(result, ctemplate[i].output, dlen)) {
1375			printk(KERN_ERR "alg: comp: Compression test %d "
1376			       "failed for %s\n", i + 1, algo);
1377			hexdump(result, dlen);
1378			ret = -EINVAL;
1379			goto out;
1380		}
1381	}
1382
1383	for (i = 0; i < dtcount; i++) {
1384		int ilen;
1385		unsigned int dlen = COMP_BUF_SIZE;
1386
1387		memset(result, 0, sizeof (result));
1388
1389		ilen = dtemplate[i].inlen;
1390		ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1391		                             ilen, result, &dlen);
1392		if (ret) {
1393			printk(KERN_ERR "alg: comp: decompression failed "
1394			       "on test %d for %s: ret=%d\n", i + 1, algo,
1395			       -ret);
1396			goto out;
1397		}
1398
1399		if (dlen != dtemplate[i].outlen) {
1400			printk(KERN_ERR "alg: comp: Decompression test %d "
1401			       "failed for %s: output len = %d\n", i + 1, algo,
1402			       dlen);
1403			ret = -EINVAL;
1404			goto out;
1405		}
1406
1407		if (memcmp(result, dtemplate[i].output, dlen)) {
1408			printk(KERN_ERR "alg: comp: Decompression test %d "
1409			       "failed for %s\n", i + 1, algo);
1410			hexdump(result, dlen);
1411			ret = -EINVAL;
1412			goto out;
1413		}
1414	}
1415
1416	ret = 0;
1417
1418out:
1419	return ret;
1420}
1421
1422static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1423		      unsigned int tcount)
1424{
1425	const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1426	int err = 0, i, j, seedsize;
1427	u8 *seed;
1428	char result[32];
1429
1430	seedsize = crypto_rng_seedsize(tfm);
1431
1432	seed = kmalloc(seedsize, GFP_KERNEL);
1433	if (!seed) {
1434		printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1435		       "for %s\n", algo);
1436		return -ENOMEM;
1437	}
1438
1439	for (i = 0; i < tcount; i++) {
1440		memset(result, 0, 32);
1441
1442		memcpy(seed, template[i].v, template[i].vlen);
1443		memcpy(seed + template[i].vlen, template[i].key,
1444		       template[i].klen);
1445		memcpy(seed + template[i].vlen + template[i].klen,
1446		       template[i].dt, template[i].dtlen);
1447
1448		err = crypto_rng_reset(tfm, seed, seedsize);
1449		if (err) {
1450			printk(KERN_ERR "alg: cprng: Failed to reset rng "
1451			       "for %s\n", algo);
1452			goto out;
1453		}
1454
1455		for (j = 0; j < template[i].loops; j++) {
1456			err = crypto_rng_get_bytes(tfm, result,
1457						   template[i].rlen);
1458			if (err < 0) {
1459				printk(KERN_ERR "alg: cprng: Failed to obtain "
1460				       "the correct amount of random data for "
1461				       "%s (requested %d)\n", algo,
1462				       template[i].rlen);
1463				goto out;
1464			}
1465		}
1466
1467		err = memcmp(result, template[i].result,
1468			     template[i].rlen);
1469		if (err) {
1470			printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1471			       i, algo);
1472			hexdump(result, template[i].rlen);
1473			err = -EINVAL;
1474			goto out;
1475		}
1476	}
1477
1478out:
1479	kfree(seed);
1480	return err;
1481}
1482
1483static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1484			 u32 type, u32 mask)
1485{
1486	struct crypto_aead *tfm;
1487	int err = 0;
1488
1489	tfm = crypto_alloc_aead(driver, type | CRYPTO_ALG_INTERNAL, mask);
1490	if (IS_ERR(tfm)) {
1491		printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1492		       "%ld\n", driver, PTR_ERR(tfm));
1493		return PTR_ERR(tfm);
1494	}
1495
1496	if (desc->suite.aead.enc.vecs) {
1497		err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1498				desc->suite.aead.enc.count);
1499		if (err)
1500			goto out;
1501	}
1502
1503	if (!err && desc->suite.aead.dec.vecs)
1504		err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1505				desc->suite.aead.dec.count);
1506
1507out:
1508	crypto_free_aead(tfm);
1509	return err;
1510}
1511
1512static int alg_test_cipher(const struct alg_test_desc *desc,
1513			   const char *driver, u32 type, u32 mask)
1514{
1515	struct crypto_cipher *tfm;
1516	int err = 0;
1517
1518	tfm = crypto_alloc_cipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1519	if (IS_ERR(tfm)) {
1520		printk(KERN_ERR "alg: cipher: Failed to load transform for "
1521		       "%s: %ld\n", driver, PTR_ERR(tfm));
1522		return PTR_ERR(tfm);
1523	}
1524
1525	if (desc->suite.cipher.enc.vecs) {
1526		err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1527				  desc->suite.cipher.enc.count);
1528		if (err)
1529			goto out;
1530	}
1531
1532	if (desc->suite.cipher.dec.vecs)
1533		err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1534				  desc->suite.cipher.dec.count);
1535
1536out:
1537	crypto_free_cipher(tfm);
1538	return err;
1539}
1540
1541static int alg_test_skcipher(const struct alg_test_desc *desc,
1542			     const char *driver, u32 type, u32 mask)
1543{
1544	struct crypto_skcipher *tfm;
1545	int err = 0;
1546
1547	tfm = crypto_alloc_skcipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1548	if (IS_ERR(tfm)) {
1549		printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1550		       "%s: %ld\n", driver, PTR_ERR(tfm));
1551		return PTR_ERR(tfm);
1552	}
1553
1554	if (desc->suite.cipher.enc.vecs) {
1555		err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1556				    desc->suite.cipher.enc.count);
1557		if (err)
1558			goto out;
1559	}
1560
1561	if (desc->suite.cipher.dec.vecs)
1562		err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1563				    desc->suite.cipher.dec.count);
1564
1565out:
1566	crypto_free_skcipher(tfm);
1567	return err;
1568}
1569
1570static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1571			 u32 type, u32 mask)
1572{
1573	struct crypto_comp *tfm;
 
1574	int err;
 
1575
1576	tfm = crypto_alloc_comp(driver, type, mask);
1577	if (IS_ERR(tfm)) {
1578		printk(KERN_ERR "alg: comp: Failed to load transform for %s: "
1579		       "%ld\n", driver, PTR_ERR(tfm));
1580		return PTR_ERR(tfm);
1581	}
 
 
 
 
 
 
 
 
 
 
 
 
 
1582
1583	err = test_comp(tfm, desc->suite.comp.comp.vecs,
1584			desc->suite.comp.decomp.vecs,
1585			desc->suite.comp.comp.count,
1586			desc->suite.comp.decomp.count);
1587
1588	crypto_free_comp(tfm);
 
1589	return err;
1590}
1591
1592static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1593			 u32 type, u32 mask)
1594{
1595	struct crypto_ahash *tfm;
1596	int err;
1597
1598	tfm = crypto_alloc_ahash(driver, type | CRYPTO_ALG_INTERNAL, mask);
1599	if (IS_ERR(tfm)) {
1600		printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1601		       "%ld\n", driver, PTR_ERR(tfm));
1602		return PTR_ERR(tfm);
1603	}
1604
1605	err = test_hash(tfm, desc->suite.hash.vecs,
1606			desc->suite.hash.count, true);
1607	if (!err)
1608		err = test_hash(tfm, desc->suite.hash.vecs,
1609				desc->suite.hash.count, false);
1610
1611	crypto_free_ahash(tfm);
1612	return err;
1613}
1614
1615static int alg_test_crc32c(const struct alg_test_desc *desc,
1616			   const char *driver, u32 type, u32 mask)
1617{
1618	struct crypto_shash *tfm;
1619	u32 val;
1620	int err;
1621
1622	err = alg_test_hash(desc, driver, type, mask);
1623	if (err)
1624		goto out;
1625
1626	tfm = crypto_alloc_shash(driver, type | CRYPTO_ALG_INTERNAL, mask);
1627	if (IS_ERR(tfm)) {
1628		printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1629		       "%ld\n", driver, PTR_ERR(tfm));
1630		err = PTR_ERR(tfm);
1631		goto out;
1632	}
1633
1634	do {
1635		SHASH_DESC_ON_STACK(shash, tfm);
1636		u32 *ctx = (u32 *)shash_desc_ctx(shash);
1637
1638		shash->tfm = tfm;
1639		shash->flags = 0;
1640
1641		*ctx = le32_to_cpu(420553207);
1642		err = crypto_shash_final(shash, (u8 *)&val);
1643		if (err) {
1644			printk(KERN_ERR "alg: crc32c: Operation failed for "
1645			       "%s: %d\n", driver, err);
1646			break;
1647		}
1648
1649		if (val != ~420553207) {
1650			printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1651			       "%d\n", driver, val);
1652			err = -EINVAL;
1653		}
1654	} while (0);
1655
1656	crypto_free_shash(tfm);
1657
1658out:
1659	return err;
1660}
1661
1662static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1663			  u32 type, u32 mask)
1664{
1665	struct crypto_rng *rng;
1666	int err;
1667
1668	rng = crypto_alloc_rng(driver, type | CRYPTO_ALG_INTERNAL, mask);
1669	if (IS_ERR(rng)) {
1670		printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1671		       "%ld\n", driver, PTR_ERR(rng));
1672		return PTR_ERR(rng);
1673	}
1674
1675	err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1676
1677	crypto_free_rng(rng);
1678
1679	return err;
1680}
1681
1682
1683static int drbg_cavs_test(struct drbg_testvec *test, int pr,
1684			  const char *driver, u32 type, u32 mask)
1685{
1686	int ret = -EAGAIN;
1687	struct crypto_rng *drng;
1688	struct drbg_test_data test_data;
1689	struct drbg_string addtl, pers, testentropy;
1690	unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL);
1691
1692	if (!buf)
1693		return -ENOMEM;
1694
1695	drng = crypto_alloc_rng(driver, type | CRYPTO_ALG_INTERNAL, mask);
1696	if (IS_ERR(drng)) {
1697		printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for "
1698		       "%s\n", driver);
1699		kzfree(buf);
1700		return -ENOMEM;
1701	}
1702
1703	test_data.testentropy = &testentropy;
1704	drbg_string_fill(&testentropy, test->entropy, test->entropylen);
1705	drbg_string_fill(&pers, test->pers, test->perslen);
1706	ret = crypto_drbg_reset_test(drng, &pers, &test_data);
1707	if (ret) {
1708		printk(KERN_ERR "alg: drbg: Failed to reset rng\n");
1709		goto outbuf;
1710	}
1711
1712	drbg_string_fill(&addtl, test->addtla, test->addtllen);
1713	if (pr) {
1714		drbg_string_fill(&testentropy, test->entpra, test->entprlen);
1715		ret = crypto_drbg_get_bytes_addtl_test(drng,
1716			buf, test->expectedlen, &addtl,	&test_data);
1717	} else {
1718		ret = crypto_drbg_get_bytes_addtl(drng,
1719			buf, test->expectedlen, &addtl);
1720	}
1721	if (ret < 0) {
1722		printk(KERN_ERR "alg: drbg: could not obtain random data for "
1723		       "driver %s\n", driver);
1724		goto outbuf;
1725	}
1726
1727	drbg_string_fill(&addtl, test->addtlb, test->addtllen);
1728	if (pr) {
1729		drbg_string_fill(&testentropy, test->entprb, test->entprlen);
1730		ret = crypto_drbg_get_bytes_addtl_test(drng,
1731			buf, test->expectedlen, &addtl, &test_data);
1732	} else {
1733		ret = crypto_drbg_get_bytes_addtl(drng,
1734			buf, test->expectedlen, &addtl);
1735	}
1736	if (ret < 0) {
1737		printk(KERN_ERR "alg: drbg: could not obtain random data for "
1738		       "driver %s\n", driver);
1739		goto outbuf;
1740	}
1741
1742	ret = memcmp(test->expected, buf, test->expectedlen);
1743
1744outbuf:
1745	crypto_free_rng(drng);
1746	kzfree(buf);
1747	return ret;
1748}
1749
1750
1751static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver,
1752			 u32 type, u32 mask)
1753{
1754	int err = 0;
1755	int pr = 0;
1756	int i = 0;
1757	struct drbg_testvec *template = desc->suite.drbg.vecs;
1758	unsigned int tcount = desc->suite.drbg.count;
1759
1760	if (0 == memcmp(driver, "drbg_pr_", 8))
1761		pr = 1;
1762
1763	for (i = 0; i < tcount; i++) {
1764		err = drbg_cavs_test(&template[i], pr, driver, type, mask);
1765		if (err) {
1766			printk(KERN_ERR "alg: drbg: Test %d failed for %s\n",
1767			       i, driver);
1768			err = -EINVAL;
1769			break;
1770		}
1771	}
1772	return err;
1773
1774}
1775
1776static int do_test_rsa(struct crypto_akcipher *tfm,
1777		       struct akcipher_testvec *vecs)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1778{
1779	char *xbuf[XBUFSIZE];
1780	struct akcipher_request *req;
1781	void *outbuf_enc = NULL;
1782	void *outbuf_dec = NULL;
1783	struct tcrypt_result result;
1784	unsigned int out_len_max, out_len = 0;
1785	int err = -ENOMEM;
1786	struct scatterlist src, dst, src_tab[2];
1787
1788	if (testmgr_alloc_buf(xbuf))
1789		return err;
1790
1791	req = akcipher_request_alloc(tfm, GFP_KERNEL);
1792	if (!req)
1793		goto free_xbuf;
1794
1795	init_completion(&result.completion);
1796
1797	if (vecs->public_key_vec)
1798		err = crypto_akcipher_set_pub_key(tfm, vecs->key,
1799						  vecs->key_len);
1800	else
1801		err = crypto_akcipher_set_priv_key(tfm, vecs->key,
1802						   vecs->key_len);
1803	if (err)
1804		goto free_req;
1805
 
1806	out_len_max = crypto_akcipher_maxsize(tfm);
1807	outbuf_enc = kzalloc(out_len_max, GFP_KERNEL);
1808	if (!outbuf_enc)
1809		goto free_req;
1810
1811	if (WARN_ON(vecs->m_size > PAGE_SIZE))
1812		goto free_all;
1813
1814	memcpy(xbuf[0], vecs->m, vecs->m_size);
1815
1816	sg_init_table(src_tab, 2);
1817	sg_set_buf(&src_tab[0], xbuf[0], 8);
1818	sg_set_buf(&src_tab[1], xbuf[0] + 8, vecs->m_size - 8);
1819	sg_init_one(&dst, outbuf_enc, out_len_max);
1820	akcipher_request_set_crypt(req, src_tab, &dst, vecs->m_size,
1821				   out_len_max);
1822	akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1823				      tcrypt_complete, &result);
1824
1825	/* Run RSA encrypt - c = m^e mod n;*/
1826	err = wait_async_op(&result, crypto_akcipher_encrypt(req));
 
 
 
1827	if (err) {
1828		pr_err("alg: rsa: encrypt test failed. err %d\n", err);
1829		goto free_all;
1830	}
1831	if (req->dst_len != vecs->c_size) {
1832		pr_err("alg: rsa: encrypt test failed. Invalid output len\n");
1833		err = -EINVAL;
1834		goto free_all;
1835	}
1836	/* verify that encrypted message is equal to expected */
1837	if (memcmp(vecs->c, outbuf_enc, vecs->c_size)) {
1838		pr_err("alg: rsa: encrypt test failed. Invalid output\n");
 
1839		err = -EINVAL;
1840		goto free_all;
1841	}
1842	/* Don't invoke decrypt for vectors with public key */
1843	if (vecs->public_key_vec) {
1844		err = 0;
1845		goto free_all;
1846	}
1847	outbuf_dec = kzalloc(out_len_max, GFP_KERNEL);
1848	if (!outbuf_dec) {
1849		err = -ENOMEM;
1850		goto free_all;
1851	}
1852
1853	if (WARN_ON(vecs->c_size > PAGE_SIZE))
1854		goto free_all;
1855
1856	memcpy(xbuf[0], vecs->c, vecs->c_size);
1857
1858	sg_init_one(&src, xbuf[0], vecs->c_size);
1859	sg_init_one(&dst, outbuf_dec, out_len_max);
1860	init_completion(&result.completion);
1861	akcipher_request_set_crypt(req, &src, &dst, vecs->c_size, out_len_max);
1862
1863	/* Run RSA decrypt - m = c^d mod n;*/
1864	err = wait_async_op(&result, crypto_akcipher_decrypt(req));
 
 
 
1865	if (err) {
1866		pr_err("alg: rsa: decrypt test failed. err %d\n", err);
1867		goto free_all;
1868	}
1869	out_len = req->dst_len;
1870	if (out_len != vecs->m_size) {
1871		pr_err("alg: rsa: decrypt test failed. Invalid output len\n");
 
1872		err = -EINVAL;
1873		goto free_all;
1874	}
1875	/* verify that decrypted message is equal to the original msg */
1876	if (memcmp(vecs->m, outbuf_dec, vecs->m_size)) {
1877		pr_err("alg: rsa: decrypt test failed. Invalid output\n");
 
 
 
1878		err = -EINVAL;
1879	}
1880free_all:
1881	kfree(outbuf_dec);
1882	kfree(outbuf_enc);
1883free_req:
1884	akcipher_request_free(req);
1885free_xbuf:
1886	testmgr_free_buf(xbuf);
1887	return err;
1888}
1889
1890static int test_rsa(struct crypto_akcipher *tfm, struct akcipher_testvec *vecs,
1891		    unsigned int tcount)
 
1892{
 
 
1893	int ret, i;
1894
1895	for (i = 0; i < tcount; i++) {
1896		ret = do_test_rsa(tfm, vecs++);
1897		if (ret) {
1898			pr_err("alg: rsa: test failed on vector %d, err=%d\n",
1899			       i + 1, ret);
1900			return ret;
1901		}
1902	}
1903	return 0;
1904}
1905
1906static int test_akcipher(struct crypto_akcipher *tfm, const char *alg,
1907			 struct akcipher_testvec *vecs, unsigned int tcount)
1908{
1909	if (strncmp(alg, "rsa", 3) == 0)
1910		return test_rsa(tfm, vecs, tcount);
1911
 
 
 
 
1912	return 0;
1913}
1914
1915static int alg_test_akcipher(const struct alg_test_desc *desc,
1916			     const char *driver, u32 type, u32 mask)
1917{
1918	struct crypto_akcipher *tfm;
1919	int err = 0;
1920
1921	tfm = crypto_alloc_akcipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1922	if (IS_ERR(tfm)) {
1923		pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n",
1924		       driver, PTR_ERR(tfm));
1925		return PTR_ERR(tfm);
1926	}
1927	if (desc->suite.akcipher.vecs)
1928		err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs,
1929				    desc->suite.akcipher.count);
1930
1931	crypto_free_akcipher(tfm);
1932	return err;
1933}
1934
1935static int alg_test_null(const struct alg_test_desc *desc,
1936			     const char *driver, u32 type, u32 mask)
1937{
1938	return 0;
1939}
1940
 
 
1941/* Please keep this list sorted by algorithm name. */
1942static const struct alg_test_desc alg_test_descs[] = {
1943	{
1944		.alg = "__cbc-cast5-avx",
1945		.test = alg_test_null,
1946	}, {
1947		.alg = "__cbc-cast6-avx",
1948		.test = alg_test_null,
1949	}, {
1950		.alg = "__cbc-serpent-avx",
1951		.test = alg_test_null,
1952	}, {
1953		.alg = "__cbc-serpent-avx2",
1954		.test = alg_test_null,
1955	}, {
1956		.alg = "__cbc-serpent-sse2",
1957		.test = alg_test_null,
1958	}, {
1959		.alg = "__cbc-twofish-avx",
1960		.test = alg_test_null,
1961	}, {
1962		.alg = "__driver-cbc-aes-aesni",
1963		.test = alg_test_null,
1964		.fips_allowed = 1,
1965	}, {
1966		.alg = "__driver-cbc-camellia-aesni",
1967		.test = alg_test_null,
1968	}, {
1969		.alg = "__driver-cbc-camellia-aesni-avx2",
1970		.test = alg_test_null,
1971	}, {
1972		.alg = "__driver-cbc-cast5-avx",
1973		.test = alg_test_null,
1974	}, {
1975		.alg = "__driver-cbc-cast6-avx",
1976		.test = alg_test_null,
1977	}, {
1978		.alg = "__driver-cbc-serpent-avx",
1979		.test = alg_test_null,
1980	}, {
1981		.alg = "__driver-cbc-serpent-avx2",
1982		.test = alg_test_null,
1983	}, {
1984		.alg = "__driver-cbc-serpent-sse2",
1985		.test = alg_test_null,
1986	}, {
1987		.alg = "__driver-cbc-twofish-avx",
1988		.test = alg_test_null,
1989	}, {
1990		.alg = "__driver-ecb-aes-aesni",
1991		.test = alg_test_null,
1992		.fips_allowed = 1,
1993	}, {
1994		.alg = "__driver-ecb-camellia-aesni",
1995		.test = alg_test_null,
1996	}, {
1997		.alg = "__driver-ecb-camellia-aesni-avx2",
1998		.test = alg_test_null,
1999	}, {
2000		.alg = "__driver-ecb-cast5-avx",
2001		.test = alg_test_null,
2002	}, {
2003		.alg = "__driver-ecb-cast6-avx",
2004		.test = alg_test_null,
2005	}, {
2006		.alg = "__driver-ecb-serpent-avx",
2007		.test = alg_test_null,
2008	}, {
2009		.alg = "__driver-ecb-serpent-avx2",
2010		.test = alg_test_null,
2011	}, {
2012		.alg = "__driver-ecb-serpent-sse2",
2013		.test = alg_test_null,
2014	}, {
2015		.alg = "__driver-ecb-twofish-avx",
2016		.test = alg_test_null,
2017	}, {
2018		.alg = "__driver-gcm-aes-aesni",
2019		.test = alg_test_null,
2020		.fips_allowed = 1,
2021	}, {
2022		.alg = "__ghash-pclmulqdqni",
2023		.test = alg_test_null,
2024		.fips_allowed = 1,
2025	}, {
2026		.alg = "ansi_cprng",
2027		.test = alg_test_cprng,
2028		.suite = {
2029			.cprng = {
2030				.vecs = ansi_cprng_aes_tv_template,
2031				.count = ANSI_CPRNG_AES_TEST_VECTORS
2032			}
2033		}
2034	}, {
2035		.alg = "authenc(hmac(md5),ecb(cipher_null))",
2036		.test = alg_test_aead,
2037		.suite = {
2038			.aead = {
2039				.enc = {
2040					.vecs = hmac_md5_ecb_cipher_null_enc_tv_template,
2041					.count = HMAC_MD5_ECB_CIPHER_NULL_ENC_TEST_VECTORS
2042				},
2043				.dec = {
2044					.vecs = hmac_md5_ecb_cipher_null_dec_tv_template,
2045					.count = HMAC_MD5_ECB_CIPHER_NULL_DEC_TEST_VECTORS
2046				}
2047			}
2048		}
2049	}, {
2050		.alg = "authenc(hmac(sha1),cbc(aes))",
2051		.test = alg_test_aead,
 
2052		.suite = {
2053			.aead = {
2054				.enc = {
2055					.vecs =
2056					hmac_sha1_aes_cbc_enc_tv_temp,
2057					.count =
2058					HMAC_SHA1_AES_CBC_ENC_TEST_VEC
2059				}
2060			}
2061		}
2062	}, {
2063		.alg = "authenc(hmac(sha1),cbc(des))",
2064		.test = alg_test_aead,
2065		.suite = {
2066			.aead = {
2067				.enc = {
2068					.vecs =
2069					hmac_sha1_des_cbc_enc_tv_temp,
2070					.count =
2071					HMAC_SHA1_DES_CBC_ENC_TEST_VEC
2072				}
2073			}
2074		}
2075	}, {
2076		.alg = "authenc(hmac(sha1),cbc(des3_ede))",
2077		.test = alg_test_aead,
2078		.fips_allowed = 1,
2079		.suite = {
2080			.aead = {
2081				.enc = {
2082					.vecs =
2083					hmac_sha1_des3_ede_cbc_enc_tv_temp,
2084					.count =
2085					HMAC_SHA1_DES3_EDE_CBC_ENC_TEST_VEC
2086				}
2087			}
2088		}
2089	}, {
2090		.alg = "authenc(hmac(sha1),ctr(aes))",
2091		.test = alg_test_null,
2092		.fips_allowed = 1,
2093	}, {
2094		.alg = "authenc(hmac(sha1),ecb(cipher_null))",
2095		.test = alg_test_aead,
2096		.suite = {
2097			.aead = {
2098				.enc = {
2099					.vecs =
2100					hmac_sha1_ecb_cipher_null_enc_tv_temp,
2101					.count =
2102					HMAC_SHA1_ECB_CIPHER_NULL_ENC_TEST_VEC
2103				},
2104				.dec = {
2105					.vecs =
2106					hmac_sha1_ecb_cipher_null_dec_tv_temp,
2107					.count =
2108					HMAC_SHA1_ECB_CIPHER_NULL_DEC_TEST_VEC
2109				}
2110			}
2111		}
2112	}, {
2113		.alg = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2114		.test = alg_test_null,
2115		.fips_allowed = 1,
2116	}, {
2117		.alg = "authenc(hmac(sha224),cbc(des))",
2118		.test = alg_test_aead,
2119		.suite = {
2120			.aead = {
2121				.enc = {
2122					.vecs =
2123					hmac_sha224_des_cbc_enc_tv_temp,
2124					.count =
2125					HMAC_SHA224_DES_CBC_ENC_TEST_VEC
2126				}
2127			}
2128		}
2129	}, {
2130		.alg = "authenc(hmac(sha224),cbc(des3_ede))",
2131		.test = alg_test_aead,
2132		.fips_allowed = 1,
2133		.suite = {
2134			.aead = {
2135				.enc = {
2136					.vecs =
2137					hmac_sha224_des3_ede_cbc_enc_tv_temp,
2138					.count =
2139					HMAC_SHA224_DES3_EDE_CBC_ENC_TEST_VEC
2140				}
2141			}
2142		}
2143	}, {
2144		.alg = "authenc(hmac(sha256),cbc(aes))",
2145		.test = alg_test_aead,
2146		.fips_allowed = 1,
2147		.suite = {
2148			.aead = {
2149				.enc = {
2150					.vecs =
2151					hmac_sha256_aes_cbc_enc_tv_temp,
2152					.count =
2153					HMAC_SHA256_AES_CBC_ENC_TEST_VEC
2154				}
2155			}
2156		}
2157	}, {
2158		.alg = "authenc(hmac(sha256),cbc(des))",
2159		.test = alg_test_aead,
2160		.suite = {
2161			.aead = {
2162				.enc = {
2163					.vecs =
2164					hmac_sha256_des_cbc_enc_tv_temp,
2165					.count =
2166					HMAC_SHA256_DES_CBC_ENC_TEST_VEC
2167				}
2168			}
2169		}
2170	}, {
2171		.alg = "authenc(hmac(sha256),cbc(des3_ede))",
2172		.test = alg_test_aead,
2173		.fips_allowed = 1,
2174		.suite = {
2175			.aead = {
2176				.enc = {
2177					.vecs =
2178					hmac_sha256_des3_ede_cbc_enc_tv_temp,
2179					.count =
2180					HMAC_SHA256_DES3_EDE_CBC_ENC_TEST_VEC
2181				}
2182			}
2183		}
2184	}, {
2185		.alg = "authenc(hmac(sha256),ctr(aes))",
2186		.test = alg_test_null,
2187		.fips_allowed = 1,
2188	}, {
2189		.alg = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2190		.test = alg_test_null,
2191		.fips_allowed = 1,
2192	}, {
2193		.alg = "authenc(hmac(sha384),cbc(des))",
2194		.test = alg_test_aead,
2195		.suite = {
2196			.aead = {
2197				.enc = {
2198					.vecs =
2199					hmac_sha384_des_cbc_enc_tv_temp,
2200					.count =
2201					HMAC_SHA384_DES_CBC_ENC_TEST_VEC
2202				}
2203			}
2204		}
2205	}, {
2206		.alg = "authenc(hmac(sha384),cbc(des3_ede))",
2207		.test = alg_test_aead,
2208		.fips_allowed = 1,
2209		.suite = {
2210			.aead = {
2211				.enc = {
2212					.vecs =
2213					hmac_sha384_des3_ede_cbc_enc_tv_temp,
2214					.count =
2215					HMAC_SHA384_DES3_EDE_CBC_ENC_TEST_VEC
2216				}
2217			}
2218		}
2219	}, {
2220		.alg = "authenc(hmac(sha384),ctr(aes))",
2221		.test = alg_test_null,
2222		.fips_allowed = 1,
2223	}, {
2224		.alg = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2225		.test = alg_test_null,
2226		.fips_allowed = 1,
2227	}, {
2228		.alg = "authenc(hmac(sha512),cbc(aes))",
2229		.fips_allowed = 1,
2230		.test = alg_test_aead,
2231		.suite = {
2232			.aead = {
2233				.enc = {
2234					.vecs =
2235					hmac_sha512_aes_cbc_enc_tv_temp,
2236					.count =
2237					HMAC_SHA512_AES_CBC_ENC_TEST_VEC
2238				}
2239			}
2240		}
2241	}, {
2242		.alg = "authenc(hmac(sha512),cbc(des))",
2243		.test = alg_test_aead,
2244		.suite = {
2245			.aead = {
2246				.enc = {
2247					.vecs =
2248					hmac_sha512_des_cbc_enc_tv_temp,
2249					.count =
2250					HMAC_SHA512_DES_CBC_ENC_TEST_VEC
2251				}
2252			}
2253		}
2254	}, {
2255		.alg = "authenc(hmac(sha512),cbc(des3_ede))",
2256		.test = alg_test_aead,
2257		.fips_allowed = 1,
2258		.suite = {
2259			.aead = {
2260				.enc = {
2261					.vecs =
2262					hmac_sha512_des3_ede_cbc_enc_tv_temp,
2263					.count =
2264					HMAC_SHA512_DES3_EDE_CBC_ENC_TEST_VEC
2265				}
2266			}
2267		}
2268	}, {
2269		.alg = "authenc(hmac(sha512),ctr(aes))",
2270		.test = alg_test_null,
2271		.fips_allowed = 1,
2272	}, {
2273		.alg = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2274		.test = alg_test_null,
2275		.fips_allowed = 1,
2276	}, {
2277		.alg = "cbc(aes)",
2278		.test = alg_test_skcipher,
2279		.fips_allowed = 1,
2280		.suite = {
2281			.cipher = {
2282				.enc = {
2283					.vecs = aes_cbc_enc_tv_template,
2284					.count = AES_CBC_ENC_TEST_VECTORS
2285				},
2286				.dec = {
2287					.vecs = aes_cbc_dec_tv_template,
2288					.count = AES_CBC_DEC_TEST_VECTORS
2289				}
2290			}
2291		}
2292	}, {
2293		.alg = "cbc(anubis)",
2294		.test = alg_test_skcipher,
2295		.suite = {
2296			.cipher = {
2297				.enc = {
2298					.vecs = anubis_cbc_enc_tv_template,
2299					.count = ANUBIS_CBC_ENC_TEST_VECTORS
2300				},
2301				.dec = {
2302					.vecs = anubis_cbc_dec_tv_template,
2303					.count = ANUBIS_CBC_DEC_TEST_VECTORS
2304				}
2305			}
2306		}
2307	}, {
2308		.alg = "cbc(blowfish)",
2309		.test = alg_test_skcipher,
2310		.suite = {
2311			.cipher = {
2312				.enc = {
2313					.vecs = bf_cbc_enc_tv_template,
2314					.count = BF_CBC_ENC_TEST_VECTORS
2315				},
2316				.dec = {
2317					.vecs = bf_cbc_dec_tv_template,
2318					.count = BF_CBC_DEC_TEST_VECTORS
2319				}
2320			}
2321		}
2322	}, {
2323		.alg = "cbc(camellia)",
2324		.test = alg_test_skcipher,
2325		.suite = {
2326			.cipher = {
2327				.enc = {
2328					.vecs = camellia_cbc_enc_tv_template,
2329					.count = CAMELLIA_CBC_ENC_TEST_VECTORS
2330				},
2331				.dec = {
2332					.vecs = camellia_cbc_dec_tv_template,
2333					.count = CAMELLIA_CBC_DEC_TEST_VECTORS
2334				}
2335			}
2336		}
2337	}, {
2338		.alg = "cbc(cast5)",
2339		.test = alg_test_skcipher,
2340		.suite = {
2341			.cipher = {
2342				.enc = {
2343					.vecs = cast5_cbc_enc_tv_template,
2344					.count = CAST5_CBC_ENC_TEST_VECTORS
2345				},
2346				.dec = {
2347					.vecs = cast5_cbc_dec_tv_template,
2348					.count = CAST5_CBC_DEC_TEST_VECTORS
2349				}
2350			}
2351		}
2352	}, {
2353		.alg = "cbc(cast6)",
2354		.test = alg_test_skcipher,
2355		.suite = {
2356			.cipher = {
2357				.enc = {
2358					.vecs = cast6_cbc_enc_tv_template,
2359					.count = CAST6_CBC_ENC_TEST_VECTORS
2360				},
2361				.dec = {
2362					.vecs = cast6_cbc_dec_tv_template,
2363					.count = CAST6_CBC_DEC_TEST_VECTORS
2364				}
2365			}
2366		}
2367	}, {
2368		.alg = "cbc(des)",
2369		.test = alg_test_skcipher,
2370		.suite = {
2371			.cipher = {
2372				.enc = {
2373					.vecs = des_cbc_enc_tv_template,
2374					.count = DES_CBC_ENC_TEST_VECTORS
2375				},
2376				.dec = {
2377					.vecs = des_cbc_dec_tv_template,
2378					.count = DES_CBC_DEC_TEST_VECTORS
2379				}
2380			}
2381		}
2382	}, {
2383		.alg = "cbc(des3_ede)",
2384		.test = alg_test_skcipher,
2385		.fips_allowed = 1,
2386		.suite = {
2387			.cipher = {
2388				.enc = {
2389					.vecs = des3_ede_cbc_enc_tv_template,
2390					.count = DES3_EDE_CBC_ENC_TEST_VECTORS
2391				},
2392				.dec = {
2393					.vecs = des3_ede_cbc_dec_tv_template,
2394					.count = DES3_EDE_CBC_DEC_TEST_VECTORS
2395				}
2396			}
2397		}
2398	}, {
2399		.alg = "cbc(serpent)",
2400		.test = alg_test_skcipher,
2401		.suite = {
2402			.cipher = {
2403				.enc = {
2404					.vecs = serpent_cbc_enc_tv_template,
2405					.count = SERPENT_CBC_ENC_TEST_VECTORS
2406				},
2407				.dec = {
2408					.vecs = serpent_cbc_dec_tv_template,
2409					.count = SERPENT_CBC_DEC_TEST_VECTORS
2410				}
2411			}
2412		}
2413	}, {
2414		.alg = "cbc(twofish)",
2415		.test = alg_test_skcipher,
2416		.suite = {
2417			.cipher = {
2418				.enc = {
2419					.vecs = tf_cbc_enc_tv_template,
2420					.count = TF_CBC_ENC_TEST_VECTORS
2421				},
2422				.dec = {
2423					.vecs = tf_cbc_dec_tv_template,
2424					.count = TF_CBC_DEC_TEST_VECTORS
2425				}
2426			}
2427		}
2428	}, {
 
 
 
 
 
 
 
2429		.alg = "ccm(aes)",
2430		.test = alg_test_aead,
2431		.fips_allowed = 1,
2432		.suite = {
2433			.aead = {
2434				.enc = {
2435					.vecs = aes_ccm_enc_tv_template,
2436					.count = AES_CCM_ENC_TEST_VECTORS
2437				},
2438				.dec = {
2439					.vecs = aes_ccm_dec_tv_template,
2440					.count = AES_CCM_DEC_TEST_VECTORS
2441				}
2442			}
2443		}
2444	}, {
2445		.alg = "chacha20",
2446		.test = alg_test_skcipher,
2447		.suite = {
2448			.cipher = {
2449				.enc = {
2450					.vecs = chacha20_enc_tv_template,
2451					.count = CHACHA20_ENC_TEST_VECTORS
2452				},
2453				.dec = {
2454					.vecs = chacha20_enc_tv_template,
2455					.count = CHACHA20_ENC_TEST_VECTORS
2456				},
2457			}
2458		}
2459	}, {
2460		.alg = "cmac(aes)",
2461		.fips_allowed = 1,
2462		.test = alg_test_hash,
2463		.suite = {
2464			.hash = {
2465				.vecs = aes_cmac128_tv_template,
2466				.count = CMAC_AES_TEST_VECTORS
2467			}
2468		}
2469	}, {
2470		.alg = "cmac(des3_ede)",
2471		.fips_allowed = 1,
2472		.test = alg_test_hash,
2473		.suite = {
2474			.hash = {
2475				.vecs = des3_ede_cmac64_tv_template,
2476				.count = CMAC_DES3_EDE_TEST_VECTORS
2477			}
2478		}
2479	}, {
2480		.alg = "compress_null",
2481		.test = alg_test_null,
2482	}, {
2483		.alg = "crc32",
2484		.test = alg_test_hash,
2485		.suite = {
2486			.hash = {
2487				.vecs = crc32_tv_template,
2488				.count = CRC32_TEST_VECTORS
2489			}
2490		}
2491	}, {
2492		.alg = "crc32c",
2493		.test = alg_test_crc32c,
2494		.fips_allowed = 1,
2495		.suite = {
2496			.hash = {
2497				.vecs = crc32c_tv_template,
2498				.count = CRC32C_TEST_VECTORS
2499			}
2500		}
2501	}, {
2502		.alg = "crct10dif",
2503		.test = alg_test_hash,
2504		.fips_allowed = 1,
2505		.suite = {
2506			.hash = {
2507				.vecs = crct10dif_tv_template,
2508				.count = CRCT10DIF_TEST_VECTORS
2509			}
2510		}
2511	}, {
2512		.alg = "cryptd(__driver-cbc-aes-aesni)",
2513		.test = alg_test_null,
2514		.fips_allowed = 1,
2515	}, {
2516		.alg = "cryptd(__driver-cbc-camellia-aesni)",
2517		.test = alg_test_null,
2518	}, {
2519		.alg = "cryptd(__driver-cbc-camellia-aesni-avx2)",
2520		.test = alg_test_null,
2521	}, {
2522		.alg = "cryptd(__driver-cbc-serpent-avx2)",
2523		.test = alg_test_null,
2524	}, {
2525		.alg = "cryptd(__driver-ecb-aes-aesni)",
2526		.test = alg_test_null,
2527		.fips_allowed = 1,
2528	}, {
2529		.alg = "cryptd(__driver-ecb-camellia-aesni)",
2530		.test = alg_test_null,
2531	}, {
2532		.alg = "cryptd(__driver-ecb-camellia-aesni-avx2)",
2533		.test = alg_test_null,
2534	}, {
2535		.alg = "cryptd(__driver-ecb-cast5-avx)",
2536		.test = alg_test_null,
2537	}, {
2538		.alg = "cryptd(__driver-ecb-cast6-avx)",
2539		.test = alg_test_null,
2540	}, {
2541		.alg = "cryptd(__driver-ecb-serpent-avx)",
2542		.test = alg_test_null,
2543	}, {
2544		.alg = "cryptd(__driver-ecb-serpent-avx2)",
2545		.test = alg_test_null,
2546	}, {
2547		.alg = "cryptd(__driver-ecb-serpent-sse2)",
2548		.test = alg_test_null,
2549	}, {
2550		.alg = "cryptd(__driver-ecb-twofish-avx)",
2551		.test = alg_test_null,
2552	}, {
2553		.alg = "cryptd(__driver-gcm-aes-aesni)",
2554		.test = alg_test_null,
2555		.fips_allowed = 1,
2556	}, {
2557		.alg = "cryptd(__ghash-pclmulqdqni)",
2558		.test = alg_test_null,
2559		.fips_allowed = 1,
2560	}, {
2561		.alg = "ctr(aes)",
2562		.test = alg_test_skcipher,
2563		.fips_allowed = 1,
2564		.suite = {
2565			.cipher = {
2566				.enc = {
2567					.vecs = aes_ctr_enc_tv_template,
2568					.count = AES_CTR_ENC_TEST_VECTORS
2569				},
2570				.dec = {
2571					.vecs = aes_ctr_dec_tv_template,
2572					.count = AES_CTR_DEC_TEST_VECTORS
2573				}
2574			}
2575		}
2576	}, {
2577		.alg = "ctr(blowfish)",
2578		.test = alg_test_skcipher,
2579		.suite = {
2580			.cipher = {
2581				.enc = {
2582					.vecs = bf_ctr_enc_tv_template,
2583					.count = BF_CTR_ENC_TEST_VECTORS
2584				},
2585				.dec = {
2586					.vecs = bf_ctr_dec_tv_template,
2587					.count = BF_CTR_DEC_TEST_VECTORS
2588				}
2589			}
2590		}
2591	}, {
2592		.alg = "ctr(camellia)",
2593		.test = alg_test_skcipher,
2594		.suite = {
2595			.cipher = {
2596				.enc = {
2597					.vecs = camellia_ctr_enc_tv_template,
2598					.count = CAMELLIA_CTR_ENC_TEST_VECTORS
2599				},
2600				.dec = {
2601					.vecs = camellia_ctr_dec_tv_template,
2602					.count = CAMELLIA_CTR_DEC_TEST_VECTORS
2603				}
2604			}
2605		}
2606	}, {
2607		.alg = "ctr(cast5)",
2608		.test = alg_test_skcipher,
2609		.suite = {
2610			.cipher = {
2611				.enc = {
2612					.vecs = cast5_ctr_enc_tv_template,
2613					.count = CAST5_CTR_ENC_TEST_VECTORS
2614				},
2615				.dec = {
2616					.vecs = cast5_ctr_dec_tv_template,
2617					.count = CAST5_CTR_DEC_TEST_VECTORS
2618				}
2619			}
2620		}
2621	}, {
2622		.alg = "ctr(cast6)",
2623		.test = alg_test_skcipher,
2624		.suite = {
2625			.cipher = {
2626				.enc = {
2627					.vecs = cast6_ctr_enc_tv_template,
2628					.count = CAST6_CTR_ENC_TEST_VECTORS
2629				},
2630				.dec = {
2631					.vecs = cast6_ctr_dec_tv_template,
2632					.count = CAST6_CTR_DEC_TEST_VECTORS
2633				}
2634			}
2635		}
2636	}, {
2637		.alg = "ctr(des)",
2638		.test = alg_test_skcipher,
2639		.suite = {
2640			.cipher = {
2641				.enc = {
2642					.vecs = des_ctr_enc_tv_template,
2643					.count = DES_CTR_ENC_TEST_VECTORS
2644				},
2645				.dec = {
2646					.vecs = des_ctr_dec_tv_template,
2647					.count = DES_CTR_DEC_TEST_VECTORS
2648				}
2649			}
2650		}
2651	}, {
2652		.alg = "ctr(des3_ede)",
2653		.test = alg_test_skcipher,
 
2654		.suite = {
2655			.cipher = {
2656				.enc = {
2657					.vecs = des3_ede_ctr_enc_tv_template,
2658					.count = DES3_EDE_CTR_ENC_TEST_VECTORS
2659				},
2660				.dec = {
2661					.vecs = des3_ede_ctr_dec_tv_template,
2662					.count = DES3_EDE_CTR_DEC_TEST_VECTORS
2663				}
2664			}
2665		}
2666	}, {
2667		.alg = "ctr(serpent)",
2668		.test = alg_test_skcipher,
2669		.suite = {
2670			.cipher = {
2671				.enc = {
2672					.vecs = serpent_ctr_enc_tv_template,
2673					.count = SERPENT_CTR_ENC_TEST_VECTORS
2674				},
2675				.dec = {
2676					.vecs = serpent_ctr_dec_tv_template,
2677					.count = SERPENT_CTR_DEC_TEST_VECTORS
2678				}
2679			}
2680		}
2681	}, {
2682		.alg = "ctr(twofish)",
2683		.test = alg_test_skcipher,
2684		.suite = {
2685			.cipher = {
2686				.enc = {
2687					.vecs = tf_ctr_enc_tv_template,
2688					.count = TF_CTR_ENC_TEST_VECTORS
2689				},
2690				.dec = {
2691					.vecs = tf_ctr_dec_tv_template,
2692					.count = TF_CTR_DEC_TEST_VECTORS
2693				}
2694			}
2695		}
2696	}, {
2697		.alg = "cts(cbc(aes))",
2698		.test = alg_test_skcipher,
2699		.suite = {
2700			.cipher = {
2701				.enc = {
2702					.vecs = cts_mode_enc_tv_template,
2703					.count = CTS_MODE_ENC_TEST_VECTORS
2704				},
2705				.dec = {
2706					.vecs = cts_mode_dec_tv_template,
2707					.count = CTS_MODE_DEC_TEST_VECTORS
2708				}
2709			}
2710		}
2711	}, {
2712		.alg = "deflate",
2713		.test = alg_test_comp,
2714		.fips_allowed = 1,
2715		.suite = {
2716			.comp = {
2717				.comp = {
2718					.vecs = deflate_comp_tv_template,
2719					.count = DEFLATE_COMP_TEST_VECTORS
2720				},
2721				.decomp = {
2722					.vecs = deflate_decomp_tv_template,
2723					.count = DEFLATE_DECOMP_TEST_VECTORS
2724				}
2725			}
2726		}
2727	}, {
 
 
 
 
 
 
 
2728		.alg = "digest_null",
2729		.test = alg_test_null,
2730	}, {
2731		.alg = "drbg_nopr_ctr_aes128",
2732		.test = alg_test_drbg,
2733		.fips_allowed = 1,
2734		.suite = {
2735			.drbg = {
2736				.vecs = drbg_nopr_ctr_aes128_tv_template,
2737				.count = ARRAY_SIZE(drbg_nopr_ctr_aes128_tv_template)
2738			}
2739		}
2740	}, {
2741		.alg = "drbg_nopr_ctr_aes192",
2742		.test = alg_test_drbg,
2743		.fips_allowed = 1,
2744		.suite = {
2745			.drbg = {
2746				.vecs = drbg_nopr_ctr_aes192_tv_template,
2747				.count = ARRAY_SIZE(drbg_nopr_ctr_aes192_tv_template)
2748			}
2749		}
2750	}, {
2751		.alg = "drbg_nopr_ctr_aes256",
2752		.test = alg_test_drbg,
2753		.fips_allowed = 1,
2754		.suite = {
2755			.drbg = {
2756				.vecs = drbg_nopr_ctr_aes256_tv_template,
2757				.count = ARRAY_SIZE(drbg_nopr_ctr_aes256_tv_template)
2758			}
2759		}
2760	}, {
2761		/*
2762		 * There is no need to specifically test the DRBG with every
2763		 * backend cipher -- covered by drbg_nopr_hmac_sha256 test
2764		 */
2765		.alg = "drbg_nopr_hmac_sha1",
2766		.fips_allowed = 1,
2767		.test = alg_test_null,
2768	}, {
2769		.alg = "drbg_nopr_hmac_sha256",
2770		.test = alg_test_drbg,
2771		.fips_allowed = 1,
2772		.suite = {
2773			.drbg = {
2774				.vecs = drbg_nopr_hmac_sha256_tv_template,
2775				.count =
2776				ARRAY_SIZE(drbg_nopr_hmac_sha256_tv_template)
2777			}
2778		}
2779	}, {
2780		/* covered by drbg_nopr_hmac_sha256 test */
2781		.alg = "drbg_nopr_hmac_sha384",
2782		.fips_allowed = 1,
2783		.test = alg_test_null,
2784	}, {
2785		.alg = "drbg_nopr_hmac_sha512",
2786		.test = alg_test_null,
2787		.fips_allowed = 1,
2788	}, {
2789		.alg = "drbg_nopr_sha1",
2790		.fips_allowed = 1,
2791		.test = alg_test_null,
2792	}, {
2793		.alg = "drbg_nopr_sha256",
2794		.test = alg_test_drbg,
2795		.fips_allowed = 1,
2796		.suite = {
2797			.drbg = {
2798				.vecs = drbg_nopr_sha256_tv_template,
2799				.count = ARRAY_SIZE(drbg_nopr_sha256_tv_template)
2800			}
2801		}
2802	}, {
2803		/* covered by drbg_nopr_sha256 test */
2804		.alg = "drbg_nopr_sha384",
2805		.fips_allowed = 1,
2806		.test = alg_test_null,
2807	}, {
2808		.alg = "drbg_nopr_sha512",
2809		.fips_allowed = 1,
2810		.test = alg_test_null,
2811	}, {
2812		.alg = "drbg_pr_ctr_aes128",
2813		.test = alg_test_drbg,
2814		.fips_allowed = 1,
2815		.suite = {
2816			.drbg = {
2817				.vecs = drbg_pr_ctr_aes128_tv_template,
2818				.count = ARRAY_SIZE(drbg_pr_ctr_aes128_tv_template)
2819			}
2820		}
2821	}, {
2822		/* covered by drbg_pr_ctr_aes128 test */
2823		.alg = "drbg_pr_ctr_aes192",
2824		.fips_allowed = 1,
2825		.test = alg_test_null,
2826	}, {
2827		.alg = "drbg_pr_ctr_aes256",
2828		.fips_allowed = 1,
2829		.test = alg_test_null,
2830	}, {
2831		.alg = "drbg_pr_hmac_sha1",
2832		.fips_allowed = 1,
2833		.test = alg_test_null,
2834	}, {
2835		.alg = "drbg_pr_hmac_sha256",
2836		.test = alg_test_drbg,
2837		.fips_allowed = 1,
2838		.suite = {
2839			.drbg = {
2840				.vecs = drbg_pr_hmac_sha256_tv_template,
2841				.count = ARRAY_SIZE(drbg_pr_hmac_sha256_tv_template)
2842			}
2843		}
2844	}, {
2845		/* covered by drbg_pr_hmac_sha256 test */
2846		.alg = "drbg_pr_hmac_sha384",
2847		.fips_allowed = 1,
2848		.test = alg_test_null,
2849	}, {
2850		.alg = "drbg_pr_hmac_sha512",
2851		.test = alg_test_null,
2852		.fips_allowed = 1,
2853	}, {
2854		.alg = "drbg_pr_sha1",
2855		.fips_allowed = 1,
2856		.test = alg_test_null,
2857	}, {
2858		.alg = "drbg_pr_sha256",
2859		.test = alg_test_drbg,
2860		.fips_allowed = 1,
2861		.suite = {
2862			.drbg = {
2863				.vecs = drbg_pr_sha256_tv_template,
2864				.count = ARRAY_SIZE(drbg_pr_sha256_tv_template)
2865			}
2866		}
2867	}, {
2868		/* covered by drbg_pr_sha256 test */
2869		.alg = "drbg_pr_sha384",
2870		.fips_allowed = 1,
2871		.test = alg_test_null,
2872	}, {
2873		.alg = "drbg_pr_sha512",
2874		.fips_allowed = 1,
2875		.test = alg_test_null,
2876	}, {
2877		.alg = "ecb(__aes-aesni)",
2878		.test = alg_test_null,
2879		.fips_allowed = 1,
2880	}, {
2881		.alg = "ecb(aes)",
2882		.test = alg_test_skcipher,
2883		.fips_allowed = 1,
2884		.suite = {
2885			.cipher = {
2886				.enc = {
2887					.vecs = aes_enc_tv_template,
2888					.count = AES_ENC_TEST_VECTORS
2889				},
2890				.dec = {
2891					.vecs = aes_dec_tv_template,
2892					.count = AES_DEC_TEST_VECTORS
2893				}
2894			}
2895		}
2896	}, {
2897		.alg = "ecb(anubis)",
2898		.test = alg_test_skcipher,
2899		.suite = {
2900			.cipher = {
2901				.enc = {
2902					.vecs = anubis_enc_tv_template,
2903					.count = ANUBIS_ENC_TEST_VECTORS
2904				},
2905				.dec = {
2906					.vecs = anubis_dec_tv_template,
2907					.count = ANUBIS_DEC_TEST_VECTORS
2908				}
2909			}
2910		}
2911	}, {
2912		.alg = "ecb(arc4)",
2913		.test = alg_test_skcipher,
2914		.suite = {
2915			.cipher = {
2916				.enc = {
2917					.vecs = arc4_enc_tv_template,
2918					.count = ARC4_ENC_TEST_VECTORS
2919				},
2920				.dec = {
2921					.vecs = arc4_dec_tv_template,
2922					.count = ARC4_DEC_TEST_VECTORS
2923				}
2924			}
2925		}
2926	}, {
2927		.alg = "ecb(blowfish)",
2928		.test = alg_test_skcipher,
2929		.suite = {
2930			.cipher = {
2931				.enc = {
2932					.vecs = bf_enc_tv_template,
2933					.count = BF_ENC_TEST_VECTORS
2934				},
2935				.dec = {
2936					.vecs = bf_dec_tv_template,
2937					.count = BF_DEC_TEST_VECTORS
2938				}
2939			}
2940		}
2941	}, {
2942		.alg = "ecb(camellia)",
2943		.test = alg_test_skcipher,
2944		.suite = {
2945			.cipher = {
2946				.enc = {
2947					.vecs = camellia_enc_tv_template,
2948					.count = CAMELLIA_ENC_TEST_VECTORS
2949				},
2950				.dec = {
2951					.vecs = camellia_dec_tv_template,
2952					.count = CAMELLIA_DEC_TEST_VECTORS
2953				}
2954			}
2955		}
2956	}, {
2957		.alg = "ecb(cast5)",
2958		.test = alg_test_skcipher,
2959		.suite = {
2960			.cipher = {
2961				.enc = {
2962					.vecs = cast5_enc_tv_template,
2963					.count = CAST5_ENC_TEST_VECTORS
2964				},
2965				.dec = {
2966					.vecs = cast5_dec_tv_template,
2967					.count = CAST5_DEC_TEST_VECTORS
2968				}
2969			}
2970		}
2971	}, {
2972		.alg = "ecb(cast6)",
2973		.test = alg_test_skcipher,
2974		.suite = {
2975			.cipher = {
2976				.enc = {
2977					.vecs = cast6_enc_tv_template,
2978					.count = CAST6_ENC_TEST_VECTORS
2979				},
2980				.dec = {
2981					.vecs = cast6_dec_tv_template,
2982					.count = CAST6_DEC_TEST_VECTORS
2983				}
2984			}
2985		}
2986	}, {
2987		.alg = "ecb(cipher_null)",
2988		.test = alg_test_null,
 
2989	}, {
2990		.alg = "ecb(des)",
2991		.test = alg_test_skcipher,
2992		.suite = {
2993			.cipher = {
2994				.enc = {
2995					.vecs = des_enc_tv_template,
2996					.count = DES_ENC_TEST_VECTORS
2997				},
2998				.dec = {
2999					.vecs = des_dec_tv_template,
3000					.count = DES_DEC_TEST_VECTORS
3001				}
3002			}
3003		}
3004	}, {
3005		.alg = "ecb(des3_ede)",
3006		.test = alg_test_skcipher,
3007		.fips_allowed = 1,
3008		.suite = {
3009			.cipher = {
3010				.enc = {
3011					.vecs = des3_ede_enc_tv_template,
3012					.count = DES3_EDE_ENC_TEST_VECTORS
3013				},
3014				.dec = {
3015					.vecs = des3_ede_dec_tv_template,
3016					.count = DES3_EDE_DEC_TEST_VECTORS
3017				}
3018			}
3019		}
3020	}, {
3021		.alg = "ecb(fcrypt)",
3022		.test = alg_test_skcipher,
3023		.suite = {
3024			.cipher = {
3025				.enc = {
3026					.vecs = fcrypt_pcbc_enc_tv_template,
3027					.count = 1
3028				},
3029				.dec = {
3030					.vecs = fcrypt_pcbc_dec_tv_template,
3031					.count = 1
3032				}
3033			}
3034		}
3035	}, {
3036		.alg = "ecb(khazad)",
3037		.test = alg_test_skcipher,
3038		.suite = {
3039			.cipher = {
3040				.enc = {
3041					.vecs = khazad_enc_tv_template,
3042					.count = KHAZAD_ENC_TEST_VECTORS
3043				},
3044				.dec = {
3045					.vecs = khazad_dec_tv_template,
3046					.count = KHAZAD_DEC_TEST_VECTORS
3047				}
3048			}
3049		}
3050	}, {
3051		.alg = "ecb(seed)",
3052		.test = alg_test_skcipher,
3053		.suite = {
3054			.cipher = {
3055				.enc = {
3056					.vecs = seed_enc_tv_template,
3057					.count = SEED_ENC_TEST_VECTORS
3058				},
3059				.dec = {
3060					.vecs = seed_dec_tv_template,
3061					.count = SEED_DEC_TEST_VECTORS
3062				}
3063			}
3064		}
3065	}, {
3066		.alg = "ecb(serpent)",
3067		.test = alg_test_skcipher,
3068		.suite = {
3069			.cipher = {
3070				.enc = {
3071					.vecs = serpent_enc_tv_template,
3072					.count = SERPENT_ENC_TEST_VECTORS
3073				},
3074				.dec = {
3075					.vecs = serpent_dec_tv_template,
3076					.count = SERPENT_DEC_TEST_VECTORS
3077				}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3078			}
3079		}
3080	}, {
3081		.alg = "ecb(tea)",
3082		.test = alg_test_skcipher,
3083		.suite = {
3084			.cipher = {
3085				.enc = {
3086					.vecs = tea_enc_tv_template,
3087					.count = TEA_ENC_TEST_VECTORS
3088				},
3089				.dec = {
3090					.vecs = tea_dec_tv_template,
3091					.count = TEA_DEC_TEST_VECTORS
3092				}
3093			}
3094		}
3095	}, {
3096		.alg = "ecb(tnepres)",
3097		.test = alg_test_skcipher,
3098		.suite = {
3099			.cipher = {
3100				.enc = {
3101					.vecs = tnepres_enc_tv_template,
3102					.count = TNEPRES_ENC_TEST_VECTORS
3103				},
3104				.dec = {
3105					.vecs = tnepres_dec_tv_template,
3106					.count = TNEPRES_DEC_TEST_VECTORS
3107				}
3108			}
3109		}
3110	}, {
3111		.alg = "ecb(twofish)",
3112		.test = alg_test_skcipher,
3113		.suite = {
3114			.cipher = {
3115				.enc = {
3116					.vecs = tf_enc_tv_template,
3117					.count = TF_ENC_TEST_VECTORS
3118				},
3119				.dec = {
3120					.vecs = tf_dec_tv_template,
3121					.count = TF_DEC_TEST_VECTORS
3122				}
3123			}
3124		}
3125	}, {
3126		.alg = "ecb(xeta)",
3127		.test = alg_test_skcipher,
3128		.suite = {
3129			.cipher = {
3130				.enc = {
3131					.vecs = xeta_enc_tv_template,
3132					.count = XETA_ENC_TEST_VECTORS
3133				},
3134				.dec = {
3135					.vecs = xeta_dec_tv_template,
3136					.count = XETA_DEC_TEST_VECTORS
3137				}
3138			}
3139		}
3140	}, {
3141		.alg = "ecb(xtea)",
3142		.test = alg_test_skcipher,
3143		.suite = {
3144			.cipher = {
3145				.enc = {
3146					.vecs = xtea_enc_tv_template,
3147					.count = XTEA_ENC_TEST_VECTORS
3148				},
3149				.dec = {
3150					.vecs = xtea_dec_tv_template,
3151					.count = XTEA_DEC_TEST_VECTORS
3152				}
3153			}
3154		}
3155	}, {
 
 
 
 
 
 
 
3156		.alg = "gcm(aes)",
3157		.test = alg_test_aead,
3158		.fips_allowed = 1,
3159		.suite = {
3160			.aead = {
3161				.enc = {
3162					.vecs = aes_gcm_enc_tv_template,
3163					.count = AES_GCM_ENC_TEST_VECTORS
3164				},
3165				.dec = {
3166					.vecs = aes_gcm_dec_tv_template,
3167					.count = AES_GCM_DEC_TEST_VECTORS
3168				}
3169			}
3170		}
3171	}, {
3172		.alg = "ghash",
3173		.test = alg_test_hash,
3174		.fips_allowed = 1,
3175		.suite = {
3176			.hash = {
3177				.vecs = ghash_tv_template,
3178				.count = GHASH_TEST_VECTORS
3179			}
3180		}
3181	}, {
3182		.alg = "hmac(crc32)",
3183		.test = alg_test_hash,
3184		.suite = {
3185			.hash = {
3186				.vecs = bfin_crc_tv_template,
3187				.count = BFIN_CRC_TEST_VECTORS
3188			}
3189		}
3190	}, {
3191		.alg = "hmac(md5)",
3192		.test = alg_test_hash,
3193		.suite = {
3194			.hash = {
3195				.vecs = hmac_md5_tv_template,
3196				.count = HMAC_MD5_TEST_VECTORS
3197			}
3198		}
3199	}, {
3200		.alg = "hmac(rmd128)",
3201		.test = alg_test_hash,
3202		.suite = {
3203			.hash = {
3204				.vecs = hmac_rmd128_tv_template,
3205				.count = HMAC_RMD128_TEST_VECTORS
3206			}
3207		}
3208	}, {
3209		.alg = "hmac(rmd160)",
3210		.test = alg_test_hash,
3211		.suite = {
3212			.hash = {
3213				.vecs = hmac_rmd160_tv_template,
3214				.count = HMAC_RMD160_TEST_VECTORS
3215			}
3216		}
3217	}, {
3218		.alg = "hmac(sha1)",
3219		.test = alg_test_hash,
3220		.fips_allowed = 1,
3221		.suite = {
3222			.hash = {
3223				.vecs = hmac_sha1_tv_template,
3224				.count = HMAC_SHA1_TEST_VECTORS
3225			}
3226		}
3227	}, {
3228		.alg = "hmac(sha224)",
3229		.test = alg_test_hash,
3230		.fips_allowed = 1,
3231		.suite = {
3232			.hash = {
3233				.vecs = hmac_sha224_tv_template,
3234				.count = HMAC_SHA224_TEST_VECTORS
3235			}
3236		}
3237	}, {
3238		.alg = "hmac(sha256)",
3239		.test = alg_test_hash,
3240		.fips_allowed = 1,
3241		.suite = {
3242			.hash = {
3243				.vecs = hmac_sha256_tv_template,
3244				.count = HMAC_SHA256_TEST_VECTORS
3245			}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3246		}
3247	}, {
3248		.alg = "hmac(sha384)",
3249		.test = alg_test_hash,
3250		.fips_allowed = 1,
3251		.suite = {
3252			.hash = {
3253				.vecs = hmac_sha384_tv_template,
3254				.count = HMAC_SHA384_TEST_VECTORS
3255			}
3256		}
3257	}, {
3258		.alg = "hmac(sha512)",
3259		.test = alg_test_hash,
3260		.fips_allowed = 1,
3261		.suite = {
3262			.hash = {
3263				.vecs = hmac_sha512_tv_template,
3264				.count = HMAC_SHA512_TEST_VECTORS
3265			}
3266		}
3267	}, {
3268		.alg = "jitterentropy_rng",
3269		.fips_allowed = 1,
3270		.test = alg_test_null,
3271	}, {
3272		.alg = "kw(aes)",
3273		.test = alg_test_skcipher,
3274		.fips_allowed = 1,
3275		.suite = {
3276			.cipher = {
3277				.enc = {
3278					.vecs = aes_kw_enc_tv_template,
3279					.count = ARRAY_SIZE(aes_kw_enc_tv_template)
3280				},
3281				.dec = {
3282					.vecs = aes_kw_dec_tv_template,
3283					.count = ARRAY_SIZE(aes_kw_dec_tv_template)
3284				}
3285			}
3286		}
3287	}, {
3288		.alg = "lrw(aes)",
3289		.test = alg_test_skcipher,
3290		.suite = {
3291			.cipher = {
3292				.enc = {
3293					.vecs = aes_lrw_enc_tv_template,
3294					.count = AES_LRW_ENC_TEST_VECTORS
3295				},
3296				.dec = {
3297					.vecs = aes_lrw_dec_tv_template,
3298					.count = AES_LRW_DEC_TEST_VECTORS
3299				}
3300			}
3301		}
3302	}, {
3303		.alg = "lrw(camellia)",
3304		.test = alg_test_skcipher,
3305		.suite = {
3306			.cipher = {
3307				.enc = {
3308					.vecs = camellia_lrw_enc_tv_template,
3309					.count = CAMELLIA_LRW_ENC_TEST_VECTORS
3310				},
3311				.dec = {
3312					.vecs = camellia_lrw_dec_tv_template,
3313					.count = CAMELLIA_LRW_DEC_TEST_VECTORS
3314				}
3315			}
3316		}
3317	}, {
3318		.alg = "lrw(cast6)",
3319		.test = alg_test_skcipher,
3320		.suite = {
3321			.cipher = {
3322				.enc = {
3323					.vecs = cast6_lrw_enc_tv_template,
3324					.count = CAST6_LRW_ENC_TEST_VECTORS
3325				},
3326				.dec = {
3327					.vecs = cast6_lrw_dec_tv_template,
3328					.count = CAST6_LRW_DEC_TEST_VECTORS
3329				}
3330			}
3331		}
3332	}, {
3333		.alg = "lrw(serpent)",
3334		.test = alg_test_skcipher,
3335		.suite = {
3336			.cipher = {
3337				.enc = {
3338					.vecs = serpent_lrw_enc_tv_template,
3339					.count = SERPENT_LRW_ENC_TEST_VECTORS
3340				},
3341				.dec = {
3342					.vecs = serpent_lrw_dec_tv_template,
3343					.count = SERPENT_LRW_DEC_TEST_VECTORS
3344				}
3345			}
3346		}
3347	}, {
3348		.alg = "lrw(twofish)",
3349		.test = alg_test_skcipher,
3350		.suite = {
3351			.cipher = {
3352				.enc = {
3353					.vecs = tf_lrw_enc_tv_template,
3354					.count = TF_LRW_ENC_TEST_VECTORS
3355				},
3356				.dec = {
3357					.vecs = tf_lrw_dec_tv_template,
3358					.count = TF_LRW_DEC_TEST_VECTORS
3359				}
3360			}
3361		}
3362	}, {
3363		.alg = "lz4",
3364		.test = alg_test_comp,
3365		.fips_allowed = 1,
3366		.suite = {
3367			.comp = {
3368				.comp = {
3369					.vecs = lz4_comp_tv_template,
3370					.count = LZ4_COMP_TEST_VECTORS
3371				},
3372				.decomp = {
3373					.vecs = lz4_decomp_tv_template,
3374					.count = LZ4_DECOMP_TEST_VECTORS
3375				}
3376			}
3377		}
3378	}, {
3379		.alg = "lz4hc",
3380		.test = alg_test_comp,
3381		.fips_allowed = 1,
3382		.suite = {
3383			.comp = {
3384				.comp = {
3385					.vecs = lz4hc_comp_tv_template,
3386					.count = LZ4HC_COMP_TEST_VECTORS
3387				},
3388				.decomp = {
3389					.vecs = lz4hc_decomp_tv_template,
3390					.count = LZ4HC_DECOMP_TEST_VECTORS
3391				}
3392			}
3393		}
3394	}, {
3395		.alg = "lzo",
3396		.test = alg_test_comp,
3397		.fips_allowed = 1,
3398		.suite = {
3399			.comp = {
3400				.comp = {
3401					.vecs = lzo_comp_tv_template,
3402					.count = LZO_COMP_TEST_VECTORS
3403				},
3404				.decomp = {
3405					.vecs = lzo_decomp_tv_template,
3406					.count = LZO_DECOMP_TEST_VECTORS
3407				}
3408			}
3409		}
3410	}, {
3411		.alg = "md4",
3412		.test = alg_test_hash,
3413		.suite = {
3414			.hash = {
3415				.vecs = md4_tv_template,
3416				.count = MD4_TEST_VECTORS
3417			}
3418		}
3419	}, {
3420		.alg = "md5",
3421		.test = alg_test_hash,
3422		.suite = {
3423			.hash = {
3424				.vecs = md5_tv_template,
3425				.count = MD5_TEST_VECTORS
3426			}
3427		}
3428	}, {
3429		.alg = "michael_mic",
3430		.test = alg_test_hash,
3431		.suite = {
3432			.hash = {
3433				.vecs = michael_mic_tv_template,
3434				.count = MICHAEL_MIC_TEST_VECTORS
3435			}
3436		}
3437	}, {
3438		.alg = "ofb(aes)",
3439		.test = alg_test_skcipher,
3440		.fips_allowed = 1,
3441		.suite = {
3442			.cipher = {
3443				.enc = {
3444					.vecs = aes_ofb_enc_tv_template,
3445					.count = AES_OFB_ENC_TEST_VECTORS
3446				},
3447				.dec = {
3448					.vecs = aes_ofb_dec_tv_template,
3449					.count = AES_OFB_DEC_TEST_VECTORS
3450				}
3451			}
3452		}
3453	}, {
3454		.alg = "pcbc(fcrypt)",
3455		.test = alg_test_skcipher,
3456		.suite = {
3457			.cipher = {
3458				.enc = {
3459					.vecs = fcrypt_pcbc_enc_tv_template,
3460					.count = FCRYPT_ENC_TEST_VECTORS
3461				},
3462				.dec = {
3463					.vecs = fcrypt_pcbc_dec_tv_template,
3464					.count = FCRYPT_DEC_TEST_VECTORS
3465				}
3466			}
3467		}
3468	}, {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3469		.alg = "poly1305",
3470		.test = alg_test_hash,
3471		.suite = {
3472			.hash = {
3473				.vecs = poly1305_tv_template,
3474				.count = POLY1305_TEST_VECTORS
3475			}
3476		}
3477	}, {
3478		.alg = "rfc3686(ctr(aes))",
3479		.test = alg_test_skcipher,
3480		.fips_allowed = 1,
3481		.suite = {
3482			.cipher = {
3483				.enc = {
3484					.vecs = aes_ctr_rfc3686_enc_tv_template,
3485					.count = AES_CTR_3686_ENC_TEST_VECTORS
3486				},
3487				.dec = {
3488					.vecs = aes_ctr_rfc3686_dec_tv_template,
3489					.count = AES_CTR_3686_DEC_TEST_VECTORS
3490				}
3491			}
3492		}
3493	}, {
3494		.alg = "rfc4106(gcm(aes))",
3495		.test = alg_test_aead,
3496		.fips_allowed = 1,
3497		.suite = {
3498			.aead = {
3499				.enc = {
3500					.vecs = aes_gcm_rfc4106_enc_tv_template,
3501					.count = AES_GCM_4106_ENC_TEST_VECTORS
3502				},
3503				.dec = {
3504					.vecs = aes_gcm_rfc4106_dec_tv_template,
3505					.count = AES_GCM_4106_DEC_TEST_VECTORS
3506				}
3507			}
3508		}
3509	}, {
3510		.alg = "rfc4309(ccm(aes))",
3511		.test = alg_test_aead,
3512		.fips_allowed = 1,
3513		.suite = {
3514			.aead = {
3515				.enc = {
3516					.vecs = aes_ccm_rfc4309_enc_tv_template,
3517					.count = AES_CCM_4309_ENC_TEST_VECTORS
3518				},
3519				.dec = {
3520					.vecs = aes_ccm_rfc4309_dec_tv_template,
3521					.count = AES_CCM_4309_DEC_TEST_VECTORS
3522				}
3523			}
3524		}
3525	}, {
3526		.alg = "rfc4543(gcm(aes))",
3527		.test = alg_test_aead,
3528		.suite = {
3529			.aead = {
3530				.enc = {
3531					.vecs = aes_gcm_rfc4543_enc_tv_template,
3532					.count = AES_GCM_4543_ENC_TEST_VECTORS
3533				},
3534				.dec = {
3535					.vecs = aes_gcm_rfc4543_dec_tv_template,
3536					.count = AES_GCM_4543_DEC_TEST_VECTORS
3537				},
3538			}
3539		}
3540	}, {
3541		.alg = "rfc7539(chacha20,poly1305)",
3542		.test = alg_test_aead,
3543		.suite = {
3544			.aead = {
3545				.enc = {
3546					.vecs = rfc7539_enc_tv_template,
3547					.count = RFC7539_ENC_TEST_VECTORS
3548				},
3549				.dec = {
3550					.vecs = rfc7539_dec_tv_template,
3551					.count = RFC7539_DEC_TEST_VECTORS
3552				},
3553			}
3554		}
3555	}, {
3556		.alg = "rfc7539esp(chacha20,poly1305)",
3557		.test = alg_test_aead,
3558		.suite = {
3559			.aead = {
3560				.enc = {
3561					.vecs = rfc7539esp_enc_tv_template,
3562					.count = RFC7539ESP_ENC_TEST_VECTORS
3563				},
3564				.dec = {
3565					.vecs = rfc7539esp_dec_tv_template,
3566					.count = RFC7539ESP_DEC_TEST_VECTORS
3567				},
3568			}
3569		}
3570	}, {
3571		.alg = "rmd128",
3572		.test = alg_test_hash,
3573		.suite = {
3574			.hash = {
3575				.vecs = rmd128_tv_template,
3576				.count = RMD128_TEST_VECTORS
3577			}
3578		}
3579	}, {
3580		.alg = "rmd160",
3581		.test = alg_test_hash,
3582		.suite = {
3583			.hash = {
3584				.vecs = rmd160_tv_template,
3585				.count = RMD160_TEST_VECTORS
3586			}
3587		}
3588	}, {
3589		.alg = "rmd256",
3590		.test = alg_test_hash,
3591		.suite = {
3592			.hash = {
3593				.vecs = rmd256_tv_template,
3594				.count = RMD256_TEST_VECTORS
3595			}
3596		}
3597	}, {
3598		.alg = "rmd320",
3599		.test = alg_test_hash,
3600		.suite = {
3601			.hash = {
3602				.vecs = rmd320_tv_template,
3603				.count = RMD320_TEST_VECTORS
3604			}
3605		}
3606	}, {
3607		.alg = "rsa",
3608		.test = alg_test_akcipher,
3609		.fips_allowed = 1,
3610		.suite = {
3611			.akcipher = {
3612				.vecs = rsa_tv_template,
3613				.count = RSA_TEST_VECTORS
3614			}
3615		}
3616	}, {
3617		.alg = "salsa20",
3618		.test = alg_test_skcipher,
3619		.suite = {
3620			.cipher = {
3621				.enc = {
3622					.vecs = salsa20_stream_enc_tv_template,
3623					.count = SALSA20_STREAM_ENC_TEST_VECTORS
3624				}
3625			}
3626		}
3627	}, {
3628		.alg = "sha1",
3629		.test = alg_test_hash,
3630		.fips_allowed = 1,
3631		.suite = {
3632			.hash = {
3633				.vecs = sha1_tv_template,
3634				.count = SHA1_TEST_VECTORS
3635			}
3636		}
3637	}, {
3638		.alg = "sha224",
3639		.test = alg_test_hash,
3640		.fips_allowed = 1,
3641		.suite = {
3642			.hash = {
3643				.vecs = sha224_tv_template,
3644				.count = SHA224_TEST_VECTORS
3645			}
3646		}
3647	}, {
3648		.alg = "sha256",
3649		.test = alg_test_hash,
3650		.fips_allowed = 1,
3651		.suite = {
3652			.hash = {
3653				.vecs = sha256_tv_template,
3654				.count = SHA256_TEST_VECTORS
3655			}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3656		}
3657	}, {
3658		.alg = "sha384",
3659		.test = alg_test_hash,
3660		.fips_allowed = 1,
3661		.suite = {
3662			.hash = {
3663				.vecs = sha384_tv_template,
3664				.count = SHA384_TEST_VECTORS
3665			}
3666		}
3667	}, {
3668		.alg = "sha512",
3669		.test = alg_test_hash,
3670		.fips_allowed = 1,
3671		.suite = {
3672			.hash = {
3673				.vecs = sha512_tv_template,
3674				.count = SHA512_TEST_VECTORS
3675			}
 
 
 
3676		}
3677	}, {
3678		.alg = "tgr128",
3679		.test = alg_test_hash,
3680		.suite = {
3681			.hash = {
3682				.vecs = tgr128_tv_template,
3683				.count = TGR128_TEST_VECTORS
3684			}
3685		}
3686	}, {
3687		.alg = "tgr160",
3688		.test = alg_test_hash,
3689		.suite = {
3690			.hash = {
3691				.vecs = tgr160_tv_template,
3692				.count = TGR160_TEST_VECTORS
3693			}
3694		}
3695	}, {
3696		.alg = "tgr192",
3697		.test = alg_test_hash,
3698		.suite = {
3699			.hash = {
3700				.vecs = tgr192_tv_template,
3701				.count = TGR192_TEST_VECTORS
3702			}
3703		}
3704	}, {
3705		.alg = "vmac(aes)",
3706		.test = alg_test_hash,
3707		.suite = {
3708			.hash = {
3709				.vecs = aes_vmac128_tv_template,
3710				.count = VMAC_AES_TEST_VECTORS
3711			}
3712		}
3713	}, {
3714		.alg = "wp256",
3715		.test = alg_test_hash,
3716		.suite = {
3717			.hash = {
3718				.vecs = wp256_tv_template,
3719				.count = WP256_TEST_VECTORS
3720			}
3721		}
3722	}, {
3723		.alg = "wp384",
3724		.test = alg_test_hash,
3725		.suite = {
3726			.hash = {
3727				.vecs = wp384_tv_template,
3728				.count = WP384_TEST_VECTORS
3729			}
3730		}
3731	}, {
3732		.alg = "wp512",
3733		.test = alg_test_hash,
3734		.suite = {
3735			.hash = {
3736				.vecs = wp512_tv_template,
3737				.count = WP512_TEST_VECTORS
3738			}
3739		}
3740	}, {
3741		.alg = "xcbc(aes)",
3742		.test = alg_test_hash,
3743		.suite = {
3744			.hash = {
3745				.vecs = aes_xcbc128_tv_template,
3746				.count = XCBC_AES_TEST_VECTORS
3747			}
3748		}
3749	}, {
3750		.alg = "xts(aes)",
3751		.test = alg_test_skcipher,
3752		.fips_allowed = 1,
3753		.suite = {
3754			.cipher = {
3755				.enc = {
3756					.vecs = aes_xts_enc_tv_template,
3757					.count = AES_XTS_ENC_TEST_VECTORS
3758				},
3759				.dec = {
3760					.vecs = aes_xts_dec_tv_template,
3761					.count = AES_XTS_DEC_TEST_VECTORS
3762				}
3763			}
3764		}
3765	}, {
3766		.alg = "xts(camellia)",
3767		.test = alg_test_skcipher,
3768		.suite = {
3769			.cipher = {
3770				.enc = {
3771					.vecs = camellia_xts_enc_tv_template,
3772					.count = CAMELLIA_XTS_ENC_TEST_VECTORS
3773				},
3774				.dec = {
3775					.vecs = camellia_xts_dec_tv_template,
3776					.count = CAMELLIA_XTS_DEC_TEST_VECTORS
3777				}
3778			}
3779		}
3780	}, {
3781		.alg = "xts(cast6)",
3782		.test = alg_test_skcipher,
3783		.suite = {
3784			.cipher = {
3785				.enc = {
3786					.vecs = cast6_xts_enc_tv_template,
3787					.count = CAST6_XTS_ENC_TEST_VECTORS
3788				},
3789				.dec = {
3790					.vecs = cast6_xts_dec_tv_template,
3791					.count = CAST6_XTS_DEC_TEST_VECTORS
3792				}
3793			}
3794		}
3795	}, {
3796		.alg = "xts(serpent)",
3797		.test = alg_test_skcipher,
3798		.suite = {
3799			.cipher = {
3800				.enc = {
3801					.vecs = serpent_xts_enc_tv_template,
3802					.count = SERPENT_XTS_ENC_TEST_VECTORS
3803				},
3804				.dec = {
3805					.vecs = serpent_xts_dec_tv_template,
3806					.count = SERPENT_XTS_DEC_TEST_VECTORS
3807				}
 
 
 
 
 
 
 
 
 
 
 
 
3808			}
3809		}
3810	}, {
3811		.alg = "xts(twofish)",
3812		.test = alg_test_skcipher,
3813		.suite = {
3814			.cipher = {
3815				.enc = {
3816					.vecs = tf_xts_enc_tv_template,
3817					.count = TF_XTS_ENC_TEST_VECTORS
3818				},
3819				.dec = {
3820					.vecs = tf_xts_dec_tv_template,
3821					.count = TF_XTS_DEC_TEST_VECTORS
3822				}
 
 
 
 
3823			}
3824		}
3825	}
3826};
3827
3828static bool alg_test_descs_checked;
3829
3830static void alg_test_descs_check_order(void)
3831{
3832	int i;
3833
3834	/* only check once */
3835	if (alg_test_descs_checked)
3836		return;
3837
3838	alg_test_descs_checked = true;
3839
3840	for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
3841		int diff = strcmp(alg_test_descs[i - 1].alg,
3842				  alg_test_descs[i].alg);
3843
3844		if (WARN_ON(diff > 0)) {
3845			pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
3846				alg_test_descs[i - 1].alg,
3847				alg_test_descs[i].alg);
3848		}
3849
3850		if (WARN_ON(diff == 0)) {
3851			pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
3852				alg_test_descs[i].alg);
3853		}
3854	}
3855}
3856
3857static int alg_find_test(const char *alg)
3858{
3859	int start = 0;
3860	int end = ARRAY_SIZE(alg_test_descs);
3861
3862	while (start < end) {
3863		int i = (start + end) / 2;
3864		int diff = strcmp(alg_test_descs[i].alg, alg);
3865
3866		if (diff > 0) {
3867			end = i;
3868			continue;
3869		}
3870
3871		if (diff < 0) {
3872			start = i + 1;
3873			continue;
3874		}
3875
3876		return i;
3877	}
3878
3879	return -1;
3880}
3881
3882int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
3883{
3884	int i;
3885	int j;
3886	int rc;
 
 
 
 
 
3887
3888	alg_test_descs_check_order();
3889
3890	if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
3891		char nalg[CRYPTO_MAX_ALG_NAME];
3892
3893		if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
3894		    sizeof(nalg))
3895			return -ENAMETOOLONG;
3896
3897		i = alg_find_test(nalg);
3898		if (i < 0)
3899			goto notest;
3900
3901		if (fips_enabled && !alg_test_descs[i].fips_allowed)
3902			goto non_fips_alg;
3903
3904		rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
3905		goto test_done;
3906	}
3907
3908	i = alg_find_test(alg);
3909	j = alg_find_test(driver);
3910	if (i < 0 && j < 0)
3911		goto notest;
3912
3913	if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
3914			     (j >= 0 && !alg_test_descs[j].fips_allowed)))
3915		goto non_fips_alg;
3916
3917	rc = 0;
3918	if (i >= 0)
3919		rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
3920					     type, mask);
3921	if (j >= 0 && j != i)
3922		rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
3923					     type, mask);
3924
3925test_done:
3926	if (fips_enabled && rc)
3927		panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
3928
3929	if (fips_enabled && !rc)
3930		pr_info("alg: self-tests for %s (%s) passed\n", driver, alg);
3931
3932	return rc;
3933
3934notest:
3935	printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
3936	return 0;
3937non_fips_alg:
3938	return -EINVAL;
3939}
3940
3941#endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
3942
3943EXPORT_SYMBOL_GPL(alg_test);
v4.17
   1/*
   2 * Algorithm testing framework and tests.
   3 *
   4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
   5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
   6 * Copyright (c) 2007 Nokia Siemens Networks
   7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
   8 *
   9 * Updated RFC4106 AES-GCM testing.
  10 *    Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
  11 *             Adrian Hoban <adrian.hoban@intel.com>
  12 *             Gabriele Paoloni <gabriele.paoloni@intel.com>
  13 *             Tadeusz Struk (tadeusz.struk@intel.com)
  14 *    Copyright (c) 2010, Intel Corporation.
  15 *
  16 * This program is free software; you can redistribute it and/or modify it
  17 * under the terms of the GNU General Public License as published by the Free
  18 * Software Foundation; either version 2 of the License, or (at your option)
  19 * any later version.
  20 *
  21 */
  22
  23#include <crypto/aead.h>
  24#include <crypto/hash.h>
  25#include <crypto/skcipher.h>
  26#include <linux/err.h>
  27#include <linux/fips.h>
  28#include <linux/module.h>
  29#include <linux/scatterlist.h>
  30#include <linux/slab.h>
  31#include <linux/string.h>
  32#include <crypto/rng.h>
  33#include <crypto/drbg.h>
  34#include <crypto/akcipher.h>
  35#include <crypto/kpp.h>
  36#include <crypto/acompress.h>
  37
  38#include "internal.h"
  39
  40static bool notests;
  41module_param(notests, bool, 0644);
  42MODULE_PARM_DESC(notests, "disable crypto self-tests");
  43
  44#ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
  45
  46/* a perfect nop */
  47int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
  48{
  49	return 0;
  50}
  51
  52#else
  53
  54#include "testmgr.h"
  55
  56/*
  57 * Need slab memory for testing (size in number of pages).
  58 */
  59#define XBUFSIZE	8
  60
  61/*
  62 * Indexes into the xbuf to simulate cross-page access.
  63 */
  64#define IDX1		32
  65#define IDX2		32400
  66#define IDX3		1511
  67#define IDX4		8193
  68#define IDX5		22222
  69#define IDX6		17101
  70#define IDX7		27333
  71#define IDX8		3000
  72
  73/*
  74* Used by test_cipher()
  75*/
  76#define ENCRYPT 1
  77#define DECRYPT 0
  78
 
 
 
 
 
  79struct aead_test_suite {
  80	struct {
  81		const struct aead_testvec *vecs;
  82		unsigned int count;
  83	} enc, dec;
  84};
  85
  86struct cipher_test_suite {
  87	struct {
  88		const struct cipher_testvec *vecs;
  89		unsigned int count;
  90	} enc, dec;
  91};
  92
  93struct comp_test_suite {
  94	struct {
  95		const struct comp_testvec *vecs;
  96		unsigned int count;
  97	} comp, decomp;
  98};
  99
 100struct hash_test_suite {
 101	const struct hash_testvec *vecs;
 102	unsigned int count;
 103};
 104
 105struct cprng_test_suite {
 106	const struct cprng_testvec *vecs;
 107	unsigned int count;
 108};
 109
 110struct drbg_test_suite {
 111	const struct drbg_testvec *vecs;
 112	unsigned int count;
 113};
 114
 115struct akcipher_test_suite {
 116	const struct akcipher_testvec *vecs;
 117	unsigned int count;
 118};
 119
 120struct kpp_test_suite {
 121	const struct kpp_testvec *vecs;
 122	unsigned int count;
 123};
 124
 125struct alg_test_desc {
 126	const char *alg;
 127	int (*test)(const struct alg_test_desc *desc, const char *driver,
 128		    u32 type, u32 mask);
 129	int fips_allowed;	/* set if alg is allowed in fips mode */
 130
 131	union {
 132		struct aead_test_suite aead;
 133		struct cipher_test_suite cipher;
 134		struct comp_test_suite comp;
 135		struct hash_test_suite hash;
 136		struct cprng_test_suite cprng;
 137		struct drbg_test_suite drbg;
 138		struct akcipher_test_suite akcipher;
 139		struct kpp_test_suite kpp;
 140	} suite;
 141};
 142
 143static const unsigned int IDX[8] = {
 144	IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
 145
 146static void hexdump(unsigned char *buf, unsigned int len)
 147{
 148	print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
 149			16, 1,
 150			buf, len, false);
 151}
 152
 
 
 
 
 
 
 
 
 
 
 
 153static int testmgr_alloc_buf(char *buf[XBUFSIZE])
 154{
 155	int i;
 156
 157	for (i = 0; i < XBUFSIZE; i++) {
 158		buf[i] = (void *)__get_free_page(GFP_KERNEL);
 159		if (!buf[i])
 160			goto err_free_buf;
 161	}
 162
 163	return 0;
 164
 165err_free_buf:
 166	while (i-- > 0)
 167		free_page((unsigned long)buf[i]);
 168
 169	return -ENOMEM;
 170}
 171
 172static void testmgr_free_buf(char *buf[XBUFSIZE])
 173{
 174	int i;
 175
 176	for (i = 0; i < XBUFSIZE; i++)
 177		free_page((unsigned long)buf[i]);
 178}
 179
 180static int ahash_guard_result(char *result, char c, int size)
 181{
 182	int i;
 183
 184	for (i = 0; i < size; i++) {
 185		if (result[i] != c)
 186			return -EINVAL;
 187	}
 188
 189	return 0;
 190}
 191
 192static int ahash_partial_update(struct ahash_request **preq,
 193	struct crypto_ahash *tfm, const struct hash_testvec *template,
 194	void *hash_buff, int k, int temp, struct scatterlist *sg,
 195	const char *algo, char *result, struct crypto_wait *wait)
 196{
 197	char *state;
 198	struct ahash_request *req;
 199	int statesize, ret = -EINVAL;
 200	static const unsigned char guard[] = { 0x00, 0xba, 0xad, 0x00 };
 201	int digestsize = crypto_ahash_digestsize(tfm);
 202
 203	req = *preq;
 204	statesize = crypto_ahash_statesize(
 205			crypto_ahash_reqtfm(req));
 206	state = kmalloc(statesize + sizeof(guard), GFP_KERNEL);
 207	if (!state) {
 208		pr_err("alg: hash: Failed to alloc state for %s\n", algo);
 209		goto out_nostate;
 210	}
 211	memcpy(state + statesize, guard, sizeof(guard));
 212	memset(result, 1, digestsize);
 213	ret = crypto_ahash_export(req, state);
 214	WARN_ON(memcmp(state + statesize, guard, sizeof(guard)));
 215	if (ret) {
 216		pr_err("alg: hash: Failed to export() for %s\n", algo);
 217		goto out;
 218	}
 219	ret = ahash_guard_result(result, 1, digestsize);
 220	if (ret) {
 221		pr_err("alg: hash: Failed, export used req->result for %s\n",
 222		       algo);
 223		goto out;
 224	}
 225	ahash_request_free(req);
 226	req = ahash_request_alloc(tfm, GFP_KERNEL);
 227	if (!req) {
 228		pr_err("alg: hash: Failed to alloc request for %s\n", algo);
 229		goto out_noreq;
 230	}
 231	ahash_request_set_callback(req,
 232		CRYPTO_TFM_REQ_MAY_BACKLOG,
 233		crypto_req_done, wait);
 234
 235	memcpy(hash_buff, template->plaintext + temp,
 236		template->tap[k]);
 237	sg_init_one(&sg[0], hash_buff, template->tap[k]);
 238	ahash_request_set_crypt(req, sg, result, template->tap[k]);
 239	ret = crypto_ahash_import(req, state);
 240	if (ret) {
 241		pr_err("alg: hash: Failed to import() for %s\n", algo);
 242		goto out;
 243	}
 244	ret = ahash_guard_result(result, 1, digestsize);
 245	if (ret) {
 246		pr_err("alg: hash: Failed, import used req->result for %s\n",
 247		       algo);
 248		goto out;
 249	}
 250	ret = crypto_wait_req(crypto_ahash_update(req), wait);
 251	if (ret)
 252		goto out;
 253	*preq = req;
 254	ret = 0;
 255	goto out_noreq;
 256out:
 257	ahash_request_free(req);
 258out_noreq:
 259	kfree(state);
 260out_nostate:
 261	return ret;
 262}
 263
 264static int __test_hash(struct crypto_ahash *tfm,
 265		       const struct hash_testvec *template, unsigned int tcount,
 266		       bool use_digest, const int align_offset)
 267{
 268	const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
 269	size_t digest_size = crypto_ahash_digestsize(tfm);
 270	unsigned int i, j, k, temp;
 271	struct scatterlist sg[8];
 272	char *result;
 273	char *key;
 274	struct ahash_request *req;
 275	struct crypto_wait wait;
 276	void *hash_buff;
 277	char *xbuf[XBUFSIZE];
 278	int ret = -ENOMEM;
 279
 280	result = kmalloc(digest_size, GFP_KERNEL);
 281	if (!result)
 282		return ret;
 283	key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
 284	if (!key)
 285		goto out_nobuf;
 286	if (testmgr_alloc_buf(xbuf))
 287		goto out_nobuf;
 288
 289	crypto_init_wait(&wait);
 290
 291	req = ahash_request_alloc(tfm, GFP_KERNEL);
 292	if (!req) {
 293		printk(KERN_ERR "alg: hash: Failed to allocate request for "
 294		       "%s\n", algo);
 295		goto out_noreq;
 296	}
 297	ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
 298				   crypto_req_done, &wait);
 299
 300	j = 0;
 301	for (i = 0; i < tcount; i++) {
 302		if (template[i].np)
 303			continue;
 304
 305		ret = -EINVAL;
 306		if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE))
 307			goto out;
 308
 309		j++;
 310		memset(result, 0, digest_size);
 311
 312		hash_buff = xbuf[0];
 313		hash_buff += align_offset;
 314
 315		memcpy(hash_buff, template[i].plaintext, template[i].psize);
 316		sg_init_one(&sg[0], hash_buff, template[i].psize);
 317
 318		if (template[i].ksize) {
 319			crypto_ahash_clear_flags(tfm, ~0);
 320			if (template[i].ksize > MAX_KEYLEN) {
 321				pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
 322				       j, algo, template[i].ksize, MAX_KEYLEN);
 323				ret = -EINVAL;
 324				goto out;
 325			}
 326			memcpy(key, template[i].key, template[i].ksize);
 327			ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
 328			if (ret) {
 329				printk(KERN_ERR "alg: hash: setkey failed on "
 330				       "test %d for %s: ret=%d\n", j, algo,
 331				       -ret);
 332				goto out;
 333			}
 334		}
 335
 336		ahash_request_set_crypt(req, sg, result, template[i].psize);
 337		if (use_digest) {
 338			ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
 339			if (ret) {
 340				pr_err("alg: hash: digest failed on test %d "
 341				       "for %s: ret=%d\n", j, algo, -ret);
 342				goto out;
 343			}
 344		} else {
 345			memset(result, 1, digest_size);
 346			ret = crypto_wait_req(crypto_ahash_init(req), &wait);
 347			if (ret) {
 348				pr_err("alg: hash: init failed on test %d "
 349				       "for %s: ret=%d\n", j, algo, -ret);
 350				goto out;
 351			}
 352			ret = ahash_guard_result(result, 1, digest_size);
 353			if (ret) {
 354				pr_err("alg: hash: init failed on test %d "
 355				       "for %s: used req->result\n", j, algo);
 356				goto out;
 357			}
 358			ret = crypto_wait_req(crypto_ahash_update(req), &wait);
 359			if (ret) {
 360				pr_err("alg: hash: update failed on test %d "
 361				       "for %s: ret=%d\n", j, algo, -ret);
 362				goto out;
 363			}
 364			ret = ahash_guard_result(result, 1, digest_size);
 365			if (ret) {
 366				pr_err("alg: hash: update failed on test %d "
 367				       "for %s: used req->result\n", j, algo);
 368				goto out;
 369			}
 370			ret = crypto_wait_req(crypto_ahash_final(req), &wait);
 371			if (ret) {
 372				pr_err("alg: hash: final failed on test %d "
 373				       "for %s: ret=%d\n", j, algo, -ret);
 374				goto out;
 375			}
 376		}
 377
 378		if (memcmp(result, template[i].digest,
 379			   crypto_ahash_digestsize(tfm))) {
 380			printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
 381			       j, algo);
 382			hexdump(result, crypto_ahash_digestsize(tfm));
 383			ret = -EINVAL;
 384			goto out;
 385		}
 386	}
 387
 388	j = 0;
 389	for (i = 0; i < tcount; i++) {
 390		/* alignment tests are only done with continuous buffers */
 391		if (align_offset != 0)
 392			break;
 393
 394		if (!template[i].np)
 395			continue;
 396
 397		j++;
 398		memset(result, 0, digest_size);
 399
 400		temp = 0;
 401		sg_init_table(sg, template[i].np);
 402		ret = -EINVAL;
 403		for (k = 0; k < template[i].np; k++) {
 404			if (WARN_ON(offset_in_page(IDX[k]) +
 405				    template[i].tap[k] > PAGE_SIZE))
 406				goto out;
 407			sg_set_buf(&sg[k],
 408				   memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
 409					  offset_in_page(IDX[k]),
 410					  template[i].plaintext + temp,
 411					  template[i].tap[k]),
 412				   template[i].tap[k]);
 413			temp += template[i].tap[k];
 414		}
 415
 416		if (template[i].ksize) {
 417			if (template[i].ksize > MAX_KEYLEN) {
 418				pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
 419				       j, algo, template[i].ksize, MAX_KEYLEN);
 420				ret = -EINVAL;
 421				goto out;
 422			}
 423			crypto_ahash_clear_flags(tfm, ~0);
 424			memcpy(key, template[i].key, template[i].ksize);
 425			ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
 426
 427			if (ret) {
 428				printk(KERN_ERR "alg: hash: setkey "
 429				       "failed on chunking test %d "
 430				       "for %s: ret=%d\n", j, algo, -ret);
 431				goto out;
 432			}
 433		}
 434
 435		ahash_request_set_crypt(req, sg, result, template[i].psize);
 436		ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
 437		if (ret) {
 438			pr_err("alg: hash: digest failed on chunking test %d for %s: ret=%d\n",
 439			       j, algo, -ret);
 
 
 
 
 
 
 
 
 
 
 
 
 440			goto out;
 441		}
 442
 443		if (memcmp(result, template[i].digest,
 444			   crypto_ahash_digestsize(tfm))) {
 445			printk(KERN_ERR "alg: hash: Chunking test %d "
 446			       "failed for %s\n", j, algo);
 447			hexdump(result, crypto_ahash_digestsize(tfm));
 448			ret = -EINVAL;
 449			goto out;
 450		}
 451	}
 452
 453	/* partial update exercise */
 454	j = 0;
 455	for (i = 0; i < tcount; i++) {
 456		/* alignment tests are only done with continuous buffers */
 457		if (align_offset != 0)
 458			break;
 459
 460		if (template[i].np < 2)
 461			continue;
 462
 463		j++;
 464		memset(result, 0, digest_size);
 465
 466		ret = -EINVAL;
 467		hash_buff = xbuf[0];
 468		memcpy(hash_buff, template[i].plaintext,
 469			template[i].tap[0]);
 470		sg_init_one(&sg[0], hash_buff, template[i].tap[0]);
 471
 472		if (template[i].ksize) {
 473			crypto_ahash_clear_flags(tfm, ~0);
 474			if (template[i].ksize > MAX_KEYLEN) {
 475				pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
 476					j, algo, template[i].ksize, MAX_KEYLEN);
 477				ret = -EINVAL;
 478				goto out;
 479			}
 480			memcpy(key, template[i].key, template[i].ksize);
 481			ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
 482			if (ret) {
 483				pr_err("alg: hash: setkey failed on test %d for %s: ret=%d\n",
 484					j, algo, -ret);
 485				goto out;
 486			}
 487		}
 488
 489		ahash_request_set_crypt(req, sg, result, template[i].tap[0]);
 490		ret = crypto_wait_req(crypto_ahash_init(req), &wait);
 491		if (ret) {
 492			pr_err("alg: hash: init failed on test %d for %s: ret=%d\n",
 493				j, algo, -ret);
 494			goto out;
 495		}
 496		ret = crypto_wait_req(crypto_ahash_update(req), &wait);
 497		if (ret) {
 498			pr_err("alg: hash: update failed on test %d for %s: ret=%d\n",
 499				j, algo, -ret);
 500			goto out;
 501		}
 502
 503		temp = template[i].tap[0];
 504		for (k = 1; k < template[i].np; k++) {
 505			ret = ahash_partial_update(&req, tfm, &template[i],
 506				hash_buff, k, temp, &sg[0], algo, result,
 507				&wait);
 508			if (ret) {
 509				pr_err("alg: hash: partial update failed on test %d for %s: ret=%d\n",
 510					j, algo, -ret);
 511				goto out_noreq;
 512			}
 513			temp += template[i].tap[k];
 514		}
 515		ret = crypto_wait_req(crypto_ahash_final(req), &wait);
 516		if (ret) {
 517			pr_err("alg: hash: final failed on test %d for %s: ret=%d\n",
 518				j, algo, -ret);
 519			goto out;
 520		}
 521		if (memcmp(result, template[i].digest,
 522			   crypto_ahash_digestsize(tfm))) {
 523			pr_err("alg: hash: Partial Test %d failed for %s\n",
 524			       j, algo);
 525			hexdump(result, crypto_ahash_digestsize(tfm));
 526			ret = -EINVAL;
 527			goto out;
 528		}
 529	}
 530
 531	ret = 0;
 532
 533out:
 534	ahash_request_free(req);
 535out_noreq:
 536	testmgr_free_buf(xbuf);
 537out_nobuf:
 538	kfree(key);
 539	kfree(result);
 540	return ret;
 541}
 542
 543static int test_hash(struct crypto_ahash *tfm,
 544		     const struct hash_testvec *template,
 545		     unsigned int tcount, bool use_digest)
 546{
 547	unsigned int alignmask;
 548	int ret;
 549
 550	ret = __test_hash(tfm, template, tcount, use_digest, 0);
 551	if (ret)
 552		return ret;
 553
 554	/* test unaligned buffers, check with one byte offset */
 555	ret = __test_hash(tfm, template, tcount, use_digest, 1);
 556	if (ret)
 557		return ret;
 558
 559	alignmask = crypto_tfm_alg_alignmask(&tfm->base);
 560	if (alignmask) {
 561		/* Check if alignment mask for tfm is correctly set. */
 562		ret = __test_hash(tfm, template, tcount, use_digest,
 563				  alignmask + 1);
 564		if (ret)
 565			return ret;
 566	}
 567
 568	return 0;
 569}
 570
 571static int __test_aead(struct crypto_aead *tfm, int enc,
 572		       const struct aead_testvec *template, unsigned int tcount,
 573		       const bool diff_dst, const int align_offset)
 574{
 575	const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
 576	unsigned int i, j, k, n, temp;
 577	int ret = -ENOMEM;
 578	char *q;
 579	char *key;
 580	struct aead_request *req;
 581	struct scatterlist *sg;
 582	struct scatterlist *sgout;
 583	const char *e, *d;
 584	struct crypto_wait wait;
 585	unsigned int authsize, iv_len;
 586	void *input;
 587	void *output;
 588	void *assoc;
 589	char *iv;
 590	char *xbuf[XBUFSIZE];
 591	char *xoutbuf[XBUFSIZE];
 592	char *axbuf[XBUFSIZE];
 593
 594	iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
 595	if (!iv)
 596		return ret;
 597	key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
 598	if (!key)
 599		goto out_noxbuf;
 600	if (testmgr_alloc_buf(xbuf))
 601		goto out_noxbuf;
 602	if (testmgr_alloc_buf(axbuf))
 603		goto out_noaxbuf;
 604	if (diff_dst && testmgr_alloc_buf(xoutbuf))
 605		goto out_nooutbuf;
 606
 607	/* avoid "the frame size is larger than 1024 bytes" compiler warning */
 608	sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 4 : 2), GFP_KERNEL);
 609	if (!sg)
 610		goto out_nosg;
 611	sgout = &sg[16];
 612
 613	if (diff_dst)
 614		d = "-ddst";
 615	else
 616		d = "";
 617
 618	if (enc == ENCRYPT)
 619		e = "encryption";
 620	else
 621		e = "decryption";
 622
 623	crypto_init_wait(&wait);
 624
 625	req = aead_request_alloc(tfm, GFP_KERNEL);
 626	if (!req) {
 627		pr_err("alg: aead%s: Failed to allocate request for %s\n",
 628		       d, algo);
 629		goto out;
 630	}
 631
 632	aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
 633				  crypto_req_done, &wait);
 634
 635	iv_len = crypto_aead_ivsize(tfm);
 636
 637	for (i = 0, j = 0; i < tcount; i++) {
 638		if (template[i].np)
 639			continue;
 640
 641		j++;
 642
 643		/* some templates have no input data but they will
 644		 * touch input
 645		 */
 646		input = xbuf[0];
 647		input += align_offset;
 648		assoc = axbuf[0];
 649
 650		ret = -EINVAL;
 651		if (WARN_ON(align_offset + template[i].ilen >
 652			    PAGE_SIZE || template[i].alen > PAGE_SIZE))
 653			goto out;
 654
 655		memcpy(input, template[i].input, template[i].ilen);
 656		memcpy(assoc, template[i].assoc, template[i].alen);
 657		if (template[i].iv)
 658			memcpy(iv, template[i].iv, iv_len);
 659		else
 660			memset(iv, 0, iv_len);
 661
 662		crypto_aead_clear_flags(tfm, ~0);
 663		if (template[i].wk)
 664			crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
 665
 666		if (template[i].klen > MAX_KEYLEN) {
 667			pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
 668			       d, j, algo, template[i].klen,
 669			       MAX_KEYLEN);
 670			ret = -EINVAL;
 671			goto out;
 672		}
 673		memcpy(key, template[i].key, template[i].klen);
 674
 675		ret = crypto_aead_setkey(tfm, key, template[i].klen);
 676		if (template[i].fail == !ret) {
 677			pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
 678			       d, j, algo, crypto_aead_get_flags(tfm));
 679			goto out;
 680		} else if (ret)
 681			continue;
 682
 683		authsize = abs(template[i].rlen - template[i].ilen);
 684		ret = crypto_aead_setauthsize(tfm, authsize);
 685		if (ret) {
 686			pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
 687			       d, authsize, j, algo);
 688			goto out;
 689		}
 690
 691		k = !!template[i].alen;
 692		sg_init_table(sg, k + 1);
 693		sg_set_buf(&sg[0], assoc, template[i].alen);
 694		sg_set_buf(&sg[k], input,
 695			   template[i].ilen + (enc ? authsize : 0));
 696		output = input;
 697
 698		if (diff_dst) {
 699			sg_init_table(sgout, k + 1);
 700			sg_set_buf(&sgout[0], assoc, template[i].alen);
 701
 702			output = xoutbuf[0];
 703			output += align_offset;
 704			sg_set_buf(&sgout[k], output,
 705				   template[i].rlen + (enc ? 0 : authsize));
 706		}
 707
 708		aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
 709				       template[i].ilen, iv);
 710
 711		aead_request_set_ad(req, template[i].alen);
 712
 713		ret = crypto_wait_req(enc ? crypto_aead_encrypt(req)
 714				      : crypto_aead_decrypt(req), &wait);
 715
 716		switch (ret) {
 717		case 0:
 718			if (template[i].novrfy) {
 719				/* verification was supposed to fail */
 720				pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
 721				       d, e, j, algo);
 722				/* so really, we got a bad message */
 723				ret = -EBADMSG;
 724				goto out;
 725			}
 726			break;
 
 
 
 
 
 
 
 727		case -EBADMSG:
 728			if (template[i].novrfy)
 729				/* verification failure was expected */
 730				continue;
 731			/* fall through */
 732		default:
 733			pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
 734			       d, e, j, algo, -ret);
 735			goto out;
 736		}
 737
 738		q = output;
 739		if (memcmp(q, template[i].result, template[i].rlen)) {
 740			pr_err("alg: aead%s: Test %d failed on %s for %s\n",
 741			       d, j, e, algo);
 742			hexdump(q, template[i].rlen);
 743			ret = -EINVAL;
 744			goto out;
 745		}
 746	}
 747
 748	for (i = 0, j = 0; i < tcount; i++) {
 749		/* alignment tests are only done with continuous buffers */
 750		if (align_offset != 0)
 751			break;
 752
 753		if (!template[i].np)
 754			continue;
 755
 756		j++;
 757
 758		if (template[i].iv)
 759			memcpy(iv, template[i].iv, iv_len);
 760		else
 761			memset(iv, 0, MAX_IVLEN);
 762
 763		crypto_aead_clear_flags(tfm, ~0);
 764		if (template[i].wk)
 765			crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
 766		if (template[i].klen > MAX_KEYLEN) {
 767			pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
 768			       d, j, algo, template[i].klen, MAX_KEYLEN);
 769			ret = -EINVAL;
 770			goto out;
 771		}
 772		memcpy(key, template[i].key, template[i].klen);
 773
 774		ret = crypto_aead_setkey(tfm, key, template[i].klen);
 775		if (template[i].fail == !ret) {
 776			pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
 777			       d, j, algo, crypto_aead_get_flags(tfm));
 778			goto out;
 779		} else if (ret)
 780			continue;
 781
 782		authsize = abs(template[i].rlen - template[i].ilen);
 783
 784		ret = -EINVAL;
 785		sg_init_table(sg, template[i].anp + template[i].np);
 786		if (diff_dst)
 787			sg_init_table(sgout, template[i].anp + template[i].np);
 788
 789		ret = -EINVAL;
 790		for (k = 0, temp = 0; k < template[i].anp; k++) {
 791			if (WARN_ON(offset_in_page(IDX[k]) +
 792				    template[i].atap[k] > PAGE_SIZE))
 793				goto out;
 794			sg_set_buf(&sg[k],
 795				   memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
 796					  offset_in_page(IDX[k]),
 797					  template[i].assoc + temp,
 798					  template[i].atap[k]),
 799				   template[i].atap[k]);
 800			if (diff_dst)
 801				sg_set_buf(&sgout[k],
 802					   axbuf[IDX[k] >> PAGE_SHIFT] +
 803					   offset_in_page(IDX[k]),
 804					   template[i].atap[k]);
 805			temp += template[i].atap[k];
 806		}
 807
 808		for (k = 0, temp = 0; k < template[i].np; k++) {
 809			if (WARN_ON(offset_in_page(IDX[k]) +
 810				    template[i].tap[k] > PAGE_SIZE))
 811				goto out;
 812
 813			q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
 814			memcpy(q, template[i].input + temp, template[i].tap[k]);
 815			sg_set_buf(&sg[template[i].anp + k],
 816				   q, template[i].tap[k]);
 817
 818			if (diff_dst) {
 819				q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
 820				    offset_in_page(IDX[k]);
 821
 822				memset(q, 0, template[i].tap[k]);
 823
 824				sg_set_buf(&sgout[template[i].anp + k],
 825					   q, template[i].tap[k]);
 826			}
 827
 828			n = template[i].tap[k];
 829			if (k == template[i].np - 1 && enc)
 830				n += authsize;
 831			if (offset_in_page(q) + n < PAGE_SIZE)
 832				q[n] = 0;
 833
 834			temp += template[i].tap[k];
 835		}
 836
 837		ret = crypto_aead_setauthsize(tfm, authsize);
 838		if (ret) {
 839			pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
 840			       d, authsize, j, algo);
 841			goto out;
 842		}
 843
 844		if (enc) {
 845			if (WARN_ON(sg[template[i].anp + k - 1].offset +
 846				    sg[template[i].anp + k - 1].length +
 847				    authsize > PAGE_SIZE)) {
 848				ret = -EINVAL;
 849				goto out;
 850			}
 851
 852			if (diff_dst)
 853				sgout[template[i].anp + k - 1].length +=
 854					authsize;
 855			sg[template[i].anp + k - 1].length += authsize;
 856		}
 857
 858		aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
 859				       template[i].ilen,
 860				       iv);
 861
 862		aead_request_set_ad(req, template[i].alen);
 863
 864		ret = crypto_wait_req(enc ? crypto_aead_encrypt(req)
 865				      : crypto_aead_decrypt(req), &wait);
 866
 867		switch (ret) {
 868		case 0:
 869			if (template[i].novrfy) {
 870				/* verification was supposed to fail */
 871				pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
 872				       d, e, j, algo);
 873				/* so really, we got a bad message */
 874				ret = -EBADMSG;
 875				goto out;
 876			}
 877			break;
 
 
 
 
 
 
 
 878		case -EBADMSG:
 879			if (template[i].novrfy)
 880				/* verification failure was expected */
 881				continue;
 882			/* fall through */
 883		default:
 884			pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
 885			       d, e, j, algo, -ret);
 886			goto out;
 887		}
 888
 889		ret = -EINVAL;
 890		for (k = 0, temp = 0; k < template[i].np; k++) {
 891			if (diff_dst)
 892				q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
 893				    offset_in_page(IDX[k]);
 894			else
 895				q = xbuf[IDX[k] >> PAGE_SHIFT] +
 896				    offset_in_page(IDX[k]);
 897
 898			n = template[i].tap[k];
 899			if (k == template[i].np - 1)
 900				n += enc ? authsize : -authsize;
 901
 902			if (memcmp(q, template[i].result + temp, n)) {
 903				pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
 904				       d, j, e, k, algo);
 905				hexdump(q, n);
 906				goto out;
 907			}
 908
 909			q += n;
 910			if (k == template[i].np - 1 && !enc) {
 911				if (!diff_dst &&
 912					memcmp(q, template[i].input +
 913					      temp + n, authsize))
 914					n = authsize;
 915				else
 916					n = 0;
 917			} else {
 918				for (n = 0; offset_in_page(q + n) && q[n]; n++)
 919					;
 920			}
 921			if (n) {
 922				pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
 923				       d, j, e, k, algo, n);
 924				hexdump(q, n);
 925				goto out;
 926			}
 927
 928			temp += template[i].tap[k];
 929		}
 930	}
 931
 932	ret = 0;
 933
 934out:
 935	aead_request_free(req);
 936	kfree(sg);
 937out_nosg:
 938	if (diff_dst)
 939		testmgr_free_buf(xoutbuf);
 940out_nooutbuf:
 941	testmgr_free_buf(axbuf);
 942out_noaxbuf:
 943	testmgr_free_buf(xbuf);
 944out_noxbuf:
 945	kfree(key);
 946	kfree(iv);
 947	return ret;
 948}
 949
 950static int test_aead(struct crypto_aead *tfm, int enc,
 951		     const struct aead_testvec *template, unsigned int tcount)
 952{
 953	unsigned int alignmask;
 954	int ret;
 955
 956	/* test 'dst == src' case */
 957	ret = __test_aead(tfm, enc, template, tcount, false, 0);
 958	if (ret)
 959		return ret;
 960
 961	/* test 'dst != src' case */
 962	ret = __test_aead(tfm, enc, template, tcount, true, 0);
 963	if (ret)
 964		return ret;
 965
 966	/* test unaligned buffers, check with one byte offset */
 967	ret = __test_aead(tfm, enc, template, tcount, true, 1);
 968	if (ret)
 969		return ret;
 970
 971	alignmask = crypto_tfm_alg_alignmask(&tfm->base);
 972	if (alignmask) {
 973		/* Check if alignment mask for tfm is correctly set. */
 974		ret = __test_aead(tfm, enc, template, tcount, true,
 975				  alignmask + 1);
 976		if (ret)
 977			return ret;
 978	}
 979
 980	return 0;
 981}
 982
 983static int test_cipher(struct crypto_cipher *tfm, int enc,
 984		       const struct cipher_testvec *template,
 985		       unsigned int tcount)
 986{
 987	const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
 988	unsigned int i, j, k;
 989	char *q;
 990	const char *e;
 991	void *data;
 992	char *xbuf[XBUFSIZE];
 993	int ret = -ENOMEM;
 994
 995	if (testmgr_alloc_buf(xbuf))
 996		goto out_nobuf;
 997
 998	if (enc == ENCRYPT)
 999	        e = "encryption";
1000	else
1001		e = "decryption";
1002
1003	j = 0;
1004	for (i = 0; i < tcount; i++) {
1005		if (template[i].np)
1006			continue;
1007
1008		if (fips_enabled && template[i].fips_skip)
1009			continue;
1010
1011		j++;
1012
1013		ret = -EINVAL;
1014		if (WARN_ON(template[i].ilen > PAGE_SIZE))
1015			goto out;
1016
1017		data = xbuf[0];
1018		memcpy(data, template[i].input, template[i].ilen);
1019
1020		crypto_cipher_clear_flags(tfm, ~0);
1021		if (template[i].wk)
1022			crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
1023
1024		ret = crypto_cipher_setkey(tfm, template[i].key,
1025					   template[i].klen);
1026		if (template[i].fail == !ret) {
1027			printk(KERN_ERR "alg: cipher: setkey failed "
1028			       "on test %d for %s: flags=%x\n", j,
1029			       algo, crypto_cipher_get_flags(tfm));
1030			goto out;
1031		} else if (ret)
1032			continue;
1033
1034		for (k = 0; k < template[i].ilen;
1035		     k += crypto_cipher_blocksize(tfm)) {
1036			if (enc)
1037				crypto_cipher_encrypt_one(tfm, data + k,
1038							  data + k);
1039			else
1040				crypto_cipher_decrypt_one(tfm, data + k,
1041							  data + k);
1042		}
1043
1044		q = data;
1045		if (memcmp(q, template[i].result, template[i].rlen)) {
1046			printk(KERN_ERR "alg: cipher: Test %d failed "
1047			       "on %s for %s\n", j, e, algo);
1048			hexdump(q, template[i].rlen);
1049			ret = -EINVAL;
1050			goto out;
1051		}
1052	}
1053
1054	ret = 0;
1055
1056out:
1057	testmgr_free_buf(xbuf);
1058out_nobuf:
1059	return ret;
1060}
1061
1062static int __test_skcipher(struct crypto_skcipher *tfm, int enc,
1063			   const struct cipher_testvec *template,
1064			   unsigned int tcount,
1065			   const bool diff_dst, const int align_offset)
1066{
1067	const char *algo =
1068		crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
1069	unsigned int i, j, k, n, temp;
1070	char *q;
1071	struct skcipher_request *req;
1072	struct scatterlist sg[8];
1073	struct scatterlist sgout[8];
1074	const char *e, *d;
1075	struct crypto_wait wait;
1076	void *data;
1077	char iv[MAX_IVLEN];
1078	char *xbuf[XBUFSIZE];
1079	char *xoutbuf[XBUFSIZE];
1080	int ret = -ENOMEM;
1081	unsigned int ivsize = crypto_skcipher_ivsize(tfm);
1082
1083	if (testmgr_alloc_buf(xbuf))
1084		goto out_nobuf;
1085
1086	if (diff_dst && testmgr_alloc_buf(xoutbuf))
1087		goto out_nooutbuf;
1088
1089	if (diff_dst)
1090		d = "-ddst";
1091	else
1092		d = "";
1093
1094	if (enc == ENCRYPT)
1095	        e = "encryption";
1096	else
1097		e = "decryption";
1098
1099	crypto_init_wait(&wait);
1100
1101	req = skcipher_request_alloc(tfm, GFP_KERNEL);
1102	if (!req) {
1103		pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
1104		       d, algo);
1105		goto out;
1106	}
1107
1108	skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1109				      crypto_req_done, &wait);
1110
1111	j = 0;
1112	for (i = 0; i < tcount; i++) {
1113		if (template[i].np && !template[i].also_non_np)
1114			continue;
1115
1116		if (fips_enabled && template[i].fips_skip)
1117			continue;
1118
1119		if (template[i].iv)
1120			memcpy(iv, template[i].iv, ivsize);
1121		else
1122			memset(iv, 0, MAX_IVLEN);
1123
1124		j++;
1125		ret = -EINVAL;
1126		if (WARN_ON(align_offset + template[i].ilen > PAGE_SIZE))
1127			goto out;
1128
1129		data = xbuf[0];
1130		data += align_offset;
1131		memcpy(data, template[i].input, template[i].ilen);
1132
1133		crypto_skcipher_clear_flags(tfm, ~0);
1134		if (template[i].wk)
1135			crypto_skcipher_set_flags(tfm,
1136						  CRYPTO_TFM_REQ_WEAK_KEY);
1137
1138		ret = crypto_skcipher_setkey(tfm, template[i].key,
1139					     template[i].klen);
1140		if (template[i].fail == !ret) {
1141			pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
1142			       d, j, algo, crypto_skcipher_get_flags(tfm));
1143			goto out;
1144		} else if (ret)
1145			continue;
1146
1147		sg_init_one(&sg[0], data, template[i].ilen);
1148		if (diff_dst) {
1149			data = xoutbuf[0];
1150			data += align_offset;
1151			sg_init_one(&sgout[0], data, template[i].ilen);
1152		}
1153
1154		skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1155					   template[i].ilen, iv);
1156		ret = crypto_wait_req(enc ? crypto_skcipher_encrypt(req) :
1157				      crypto_skcipher_decrypt(req), &wait);
1158
1159		if (ret) {
 
 
 
 
 
 
 
 
 
 
 
1160			pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
1161			       d, e, j, algo, -ret);
1162			goto out;
1163		}
1164
1165		q = data;
1166		if (memcmp(q, template[i].result, template[i].rlen)) {
1167			pr_err("alg: skcipher%s: Test %d failed (invalid result) on %s for %s\n",
1168			       d, j, e, algo);
1169			hexdump(q, template[i].rlen);
1170			ret = -EINVAL;
1171			goto out;
1172		}
1173
1174		if (template[i].iv_out &&
1175		    memcmp(iv, template[i].iv_out,
1176			   crypto_skcipher_ivsize(tfm))) {
1177			pr_err("alg: skcipher%s: Test %d failed (invalid output IV) on %s for %s\n",
1178			       d, j, e, algo);
1179			hexdump(iv, crypto_skcipher_ivsize(tfm));
1180			ret = -EINVAL;
1181			goto out;
1182		}
1183	}
1184
1185	j = 0;
1186	for (i = 0; i < tcount; i++) {
1187		/* alignment tests are only done with continuous buffers */
1188		if (align_offset != 0)
1189			break;
1190
1191		if (!template[i].np)
1192			continue;
1193
1194		if (fips_enabled && template[i].fips_skip)
1195			continue;
1196
1197		if (template[i].iv)
1198			memcpy(iv, template[i].iv, ivsize);
1199		else
1200			memset(iv, 0, MAX_IVLEN);
1201
1202		j++;
1203		crypto_skcipher_clear_flags(tfm, ~0);
1204		if (template[i].wk)
1205			crypto_skcipher_set_flags(tfm,
1206						  CRYPTO_TFM_REQ_WEAK_KEY);
1207
1208		ret = crypto_skcipher_setkey(tfm, template[i].key,
1209					     template[i].klen);
1210		if (template[i].fail == !ret) {
1211			pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
1212			       d, j, algo, crypto_skcipher_get_flags(tfm));
1213			goto out;
1214		} else if (ret)
1215			continue;
1216
1217		temp = 0;
1218		ret = -EINVAL;
1219		sg_init_table(sg, template[i].np);
1220		if (diff_dst)
1221			sg_init_table(sgout, template[i].np);
1222		for (k = 0; k < template[i].np; k++) {
1223			if (WARN_ON(offset_in_page(IDX[k]) +
1224				    template[i].tap[k] > PAGE_SIZE))
1225				goto out;
1226
1227			q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
1228
1229			memcpy(q, template[i].input + temp, template[i].tap[k]);
1230
1231			if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE)
1232				q[template[i].tap[k]] = 0;
1233
1234			sg_set_buf(&sg[k], q, template[i].tap[k]);
1235			if (diff_dst) {
1236				q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1237				    offset_in_page(IDX[k]);
1238
1239				sg_set_buf(&sgout[k], q, template[i].tap[k]);
1240
1241				memset(q, 0, template[i].tap[k]);
1242				if (offset_in_page(q) +
1243				    template[i].tap[k] < PAGE_SIZE)
1244					q[template[i].tap[k]] = 0;
1245			}
1246
1247			temp += template[i].tap[k];
1248		}
1249
1250		skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1251					   template[i].ilen, iv);
1252
1253		ret = crypto_wait_req(enc ? crypto_skcipher_encrypt(req) :
1254				      crypto_skcipher_decrypt(req), &wait);
1255
1256		if (ret) {
 
 
 
 
 
 
 
 
 
 
 
1257			pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1258			       d, e, j, algo, -ret);
1259			goto out;
1260		}
1261
1262		temp = 0;
1263		ret = -EINVAL;
1264		for (k = 0; k < template[i].np; k++) {
1265			if (diff_dst)
1266				q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1267				    offset_in_page(IDX[k]);
1268			else
1269				q = xbuf[IDX[k] >> PAGE_SHIFT] +
1270				    offset_in_page(IDX[k]);
1271
1272			if (memcmp(q, template[i].result + temp,
1273				   template[i].tap[k])) {
1274				pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1275				       d, j, e, k, algo);
1276				hexdump(q, template[i].tap[k]);
1277				goto out;
1278			}
1279
1280			q += template[i].tap[k];
1281			for (n = 0; offset_in_page(q + n) && q[n]; n++)
1282				;
1283			if (n) {
1284				pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1285				       d, j, e, k, algo, n);
1286				hexdump(q, n);
1287				goto out;
1288			}
1289			temp += template[i].tap[k];
1290		}
1291	}
1292
1293	ret = 0;
1294
1295out:
1296	skcipher_request_free(req);
1297	if (diff_dst)
1298		testmgr_free_buf(xoutbuf);
1299out_nooutbuf:
1300	testmgr_free_buf(xbuf);
1301out_nobuf:
1302	return ret;
1303}
1304
1305static int test_skcipher(struct crypto_skcipher *tfm, int enc,
1306			 const struct cipher_testvec *template,
1307			 unsigned int tcount)
1308{
1309	unsigned int alignmask;
1310	int ret;
1311
1312	/* test 'dst == src' case */
1313	ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
1314	if (ret)
1315		return ret;
1316
1317	/* test 'dst != src' case */
1318	ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
1319	if (ret)
1320		return ret;
1321
1322	/* test unaligned buffers, check with one byte offset */
1323	ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
1324	if (ret)
1325		return ret;
1326
1327	alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1328	if (alignmask) {
1329		/* Check if alignment mask for tfm is correctly set. */
1330		ret = __test_skcipher(tfm, enc, template, tcount, true,
1331				      alignmask + 1);
1332		if (ret)
1333			return ret;
1334	}
1335
1336	return 0;
1337}
1338
1339static int test_comp(struct crypto_comp *tfm,
1340		     const struct comp_testvec *ctemplate,
1341		     const struct comp_testvec *dtemplate,
1342		     int ctcount, int dtcount)
1343{
1344	const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1345	unsigned int i;
1346	char result[COMP_BUF_SIZE];
1347	int ret;
1348
1349	for (i = 0; i < ctcount; i++) {
1350		int ilen;
1351		unsigned int dlen = COMP_BUF_SIZE;
1352
1353		memset(result, 0, sizeof (result));
1354
1355		ilen = ctemplate[i].inlen;
1356		ret = crypto_comp_compress(tfm, ctemplate[i].input,
1357		                           ilen, result, &dlen);
1358		if (ret) {
1359			printk(KERN_ERR "alg: comp: compression failed "
1360			       "on test %d for %s: ret=%d\n", i + 1, algo,
1361			       -ret);
1362			goto out;
1363		}
1364
1365		if (dlen != ctemplate[i].outlen) {
1366			printk(KERN_ERR "alg: comp: Compression test %d "
1367			       "failed for %s: output len = %d\n", i + 1, algo,
1368			       dlen);
1369			ret = -EINVAL;
1370			goto out;
1371		}
1372
1373		if (memcmp(result, ctemplate[i].output, dlen)) {
1374			printk(KERN_ERR "alg: comp: Compression test %d "
1375			       "failed for %s\n", i + 1, algo);
1376			hexdump(result, dlen);
1377			ret = -EINVAL;
1378			goto out;
1379		}
1380	}
1381
1382	for (i = 0; i < dtcount; i++) {
1383		int ilen;
1384		unsigned int dlen = COMP_BUF_SIZE;
1385
1386		memset(result, 0, sizeof (result));
1387
1388		ilen = dtemplate[i].inlen;
1389		ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1390		                             ilen, result, &dlen);
1391		if (ret) {
1392			printk(KERN_ERR "alg: comp: decompression failed "
1393			       "on test %d for %s: ret=%d\n", i + 1, algo,
1394			       -ret);
1395			goto out;
1396		}
1397
1398		if (dlen != dtemplate[i].outlen) {
1399			printk(KERN_ERR "alg: comp: Decompression test %d "
1400			       "failed for %s: output len = %d\n", i + 1, algo,
1401			       dlen);
1402			ret = -EINVAL;
1403			goto out;
1404		}
1405
1406		if (memcmp(result, dtemplate[i].output, dlen)) {
1407			printk(KERN_ERR "alg: comp: Decompression test %d "
1408			       "failed for %s\n", i + 1, algo);
1409			hexdump(result, dlen);
1410			ret = -EINVAL;
1411			goto out;
1412		}
1413	}
1414
1415	ret = 0;
1416
1417out:
1418	return ret;
1419}
1420
1421static int test_acomp(struct crypto_acomp *tfm,
1422		      const struct comp_testvec *ctemplate,
1423		      const struct comp_testvec *dtemplate,
1424		      int ctcount, int dtcount)
1425{
1426	const char *algo = crypto_tfm_alg_driver_name(crypto_acomp_tfm(tfm));
1427	unsigned int i;
1428	char *output, *decomp_out;
1429	int ret;
1430	struct scatterlist src, dst;
1431	struct acomp_req *req;
1432	struct crypto_wait wait;
1433
1434	output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
1435	if (!output)
1436		return -ENOMEM;
1437
1438	decomp_out = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
1439	if (!decomp_out) {
1440		kfree(output);
1441		return -ENOMEM;
1442	}
1443
1444	for (i = 0; i < ctcount; i++) {
1445		unsigned int dlen = COMP_BUF_SIZE;
1446		int ilen = ctemplate[i].inlen;
1447		void *input_vec;
1448
1449		input_vec = kmemdup(ctemplate[i].input, ilen, GFP_KERNEL);
1450		if (!input_vec) {
1451			ret = -ENOMEM;
1452			goto out;
1453		}
1454
1455		memset(output, 0, dlen);
1456		crypto_init_wait(&wait);
1457		sg_init_one(&src, input_vec, ilen);
1458		sg_init_one(&dst, output, dlen);
1459
1460		req = acomp_request_alloc(tfm);
1461		if (!req) {
1462			pr_err("alg: acomp: request alloc failed for %s\n",
1463			       algo);
1464			kfree(input_vec);
1465			ret = -ENOMEM;
1466			goto out;
1467		}
1468
1469		acomp_request_set_params(req, &src, &dst, ilen, dlen);
1470		acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1471					   crypto_req_done, &wait);
1472
1473		ret = crypto_wait_req(crypto_acomp_compress(req), &wait);
1474		if (ret) {
1475			pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
1476			       i + 1, algo, -ret);
1477			kfree(input_vec);
1478			acomp_request_free(req);
1479			goto out;
1480		}
1481
1482		ilen = req->dlen;
1483		dlen = COMP_BUF_SIZE;
1484		sg_init_one(&src, output, ilen);
1485		sg_init_one(&dst, decomp_out, dlen);
1486		crypto_init_wait(&wait);
1487		acomp_request_set_params(req, &src, &dst, ilen, dlen);
1488
1489		ret = crypto_wait_req(crypto_acomp_decompress(req), &wait);
1490		if (ret) {
1491			pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
1492			       i + 1, algo, -ret);
1493			kfree(input_vec);
1494			acomp_request_free(req);
1495			goto out;
1496		}
1497
1498		if (req->dlen != ctemplate[i].inlen) {
1499			pr_err("alg: acomp: Compression test %d failed for %s: output len = %d\n",
1500			       i + 1, algo, req->dlen);
1501			ret = -EINVAL;
1502			kfree(input_vec);
1503			acomp_request_free(req);
1504			goto out;
1505		}
1506
1507		if (memcmp(input_vec, decomp_out, req->dlen)) {
1508			pr_err("alg: acomp: Compression test %d failed for %s\n",
1509			       i + 1, algo);
1510			hexdump(output, req->dlen);
1511			ret = -EINVAL;
1512			kfree(input_vec);
1513			acomp_request_free(req);
1514			goto out;
1515		}
1516
1517		kfree(input_vec);
1518		acomp_request_free(req);
1519	}
1520
1521	for (i = 0; i < dtcount; i++) {
1522		unsigned int dlen = COMP_BUF_SIZE;
1523		int ilen = dtemplate[i].inlen;
1524		void *input_vec;
1525
1526		input_vec = kmemdup(dtemplate[i].input, ilen, GFP_KERNEL);
1527		if (!input_vec) {
1528			ret = -ENOMEM;
1529			goto out;
1530		}
1531
1532		memset(output, 0, dlen);
1533		crypto_init_wait(&wait);
1534		sg_init_one(&src, input_vec, ilen);
1535		sg_init_one(&dst, output, dlen);
1536
1537		req = acomp_request_alloc(tfm);
1538		if (!req) {
1539			pr_err("alg: acomp: request alloc failed for %s\n",
1540			       algo);
1541			kfree(input_vec);
1542			ret = -ENOMEM;
1543			goto out;
1544		}
1545
1546		acomp_request_set_params(req, &src, &dst, ilen, dlen);
1547		acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1548					   crypto_req_done, &wait);
1549
1550		ret = crypto_wait_req(crypto_acomp_decompress(req), &wait);
1551		if (ret) {
1552			pr_err("alg: acomp: decompression failed on test %d for %s: ret=%d\n",
1553			       i + 1, algo, -ret);
1554			kfree(input_vec);
1555			acomp_request_free(req);
1556			goto out;
1557		}
1558
1559		if (req->dlen != dtemplate[i].outlen) {
1560			pr_err("alg: acomp: Decompression test %d failed for %s: output len = %d\n",
1561			       i + 1, algo, req->dlen);
1562			ret = -EINVAL;
1563			kfree(input_vec);
1564			acomp_request_free(req);
1565			goto out;
1566		}
1567
1568		if (memcmp(output, dtemplate[i].output, req->dlen)) {
1569			pr_err("alg: acomp: Decompression test %d failed for %s\n",
1570			       i + 1, algo);
1571			hexdump(output, req->dlen);
1572			ret = -EINVAL;
1573			kfree(input_vec);
1574			acomp_request_free(req);
1575			goto out;
1576		}
1577
1578		kfree(input_vec);
1579		acomp_request_free(req);
1580	}
1581
1582	ret = 0;
1583
1584out:
1585	kfree(decomp_out);
1586	kfree(output);
1587	return ret;
1588}
1589
1590static int test_cprng(struct crypto_rng *tfm,
1591		      const struct cprng_testvec *template,
1592		      unsigned int tcount)
1593{
1594	const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1595	int err = 0, i, j, seedsize;
1596	u8 *seed;
1597	char result[32];
1598
1599	seedsize = crypto_rng_seedsize(tfm);
1600
1601	seed = kmalloc(seedsize, GFP_KERNEL);
1602	if (!seed) {
1603		printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1604		       "for %s\n", algo);
1605		return -ENOMEM;
1606	}
1607
1608	for (i = 0; i < tcount; i++) {
1609		memset(result, 0, 32);
1610
1611		memcpy(seed, template[i].v, template[i].vlen);
1612		memcpy(seed + template[i].vlen, template[i].key,
1613		       template[i].klen);
1614		memcpy(seed + template[i].vlen + template[i].klen,
1615		       template[i].dt, template[i].dtlen);
1616
1617		err = crypto_rng_reset(tfm, seed, seedsize);
1618		if (err) {
1619			printk(KERN_ERR "alg: cprng: Failed to reset rng "
1620			       "for %s\n", algo);
1621			goto out;
1622		}
1623
1624		for (j = 0; j < template[i].loops; j++) {
1625			err = crypto_rng_get_bytes(tfm, result,
1626						   template[i].rlen);
1627			if (err < 0) {
1628				printk(KERN_ERR "alg: cprng: Failed to obtain "
1629				       "the correct amount of random data for "
1630				       "%s (requested %d)\n", algo,
1631				       template[i].rlen);
1632				goto out;
1633			}
1634		}
1635
1636		err = memcmp(result, template[i].result,
1637			     template[i].rlen);
1638		if (err) {
1639			printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1640			       i, algo);
1641			hexdump(result, template[i].rlen);
1642			err = -EINVAL;
1643			goto out;
1644		}
1645	}
1646
1647out:
1648	kfree(seed);
1649	return err;
1650}
1651
1652static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1653			 u32 type, u32 mask)
1654{
1655	struct crypto_aead *tfm;
1656	int err = 0;
1657
1658	tfm = crypto_alloc_aead(driver, type, mask);
1659	if (IS_ERR(tfm)) {
1660		printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1661		       "%ld\n", driver, PTR_ERR(tfm));
1662		return PTR_ERR(tfm);
1663	}
1664
1665	if (desc->suite.aead.enc.vecs) {
1666		err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1667				desc->suite.aead.enc.count);
1668		if (err)
1669			goto out;
1670	}
1671
1672	if (!err && desc->suite.aead.dec.vecs)
1673		err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1674				desc->suite.aead.dec.count);
1675
1676out:
1677	crypto_free_aead(tfm);
1678	return err;
1679}
1680
1681static int alg_test_cipher(const struct alg_test_desc *desc,
1682			   const char *driver, u32 type, u32 mask)
1683{
1684	struct crypto_cipher *tfm;
1685	int err = 0;
1686
1687	tfm = crypto_alloc_cipher(driver, type, mask);
1688	if (IS_ERR(tfm)) {
1689		printk(KERN_ERR "alg: cipher: Failed to load transform for "
1690		       "%s: %ld\n", driver, PTR_ERR(tfm));
1691		return PTR_ERR(tfm);
1692	}
1693
1694	if (desc->suite.cipher.enc.vecs) {
1695		err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1696				  desc->suite.cipher.enc.count);
1697		if (err)
1698			goto out;
1699	}
1700
1701	if (desc->suite.cipher.dec.vecs)
1702		err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1703				  desc->suite.cipher.dec.count);
1704
1705out:
1706	crypto_free_cipher(tfm);
1707	return err;
1708}
1709
1710static int alg_test_skcipher(const struct alg_test_desc *desc,
1711			     const char *driver, u32 type, u32 mask)
1712{
1713	struct crypto_skcipher *tfm;
1714	int err = 0;
1715
1716	tfm = crypto_alloc_skcipher(driver, type, mask);
1717	if (IS_ERR(tfm)) {
1718		printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1719		       "%s: %ld\n", driver, PTR_ERR(tfm));
1720		return PTR_ERR(tfm);
1721	}
1722
1723	if (desc->suite.cipher.enc.vecs) {
1724		err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1725				    desc->suite.cipher.enc.count);
1726		if (err)
1727			goto out;
1728	}
1729
1730	if (desc->suite.cipher.dec.vecs)
1731		err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1732				    desc->suite.cipher.dec.count);
1733
1734out:
1735	crypto_free_skcipher(tfm);
1736	return err;
1737}
1738
1739static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1740			 u32 type, u32 mask)
1741{
1742	struct crypto_comp *comp;
1743	struct crypto_acomp *acomp;
1744	int err;
1745	u32 algo_type = type & CRYPTO_ALG_TYPE_ACOMPRESS_MASK;
1746
1747	if (algo_type == CRYPTO_ALG_TYPE_ACOMPRESS) {
1748		acomp = crypto_alloc_acomp(driver, type, mask);
1749		if (IS_ERR(acomp)) {
1750			pr_err("alg: acomp: Failed to load transform for %s: %ld\n",
1751			       driver, PTR_ERR(acomp));
1752			return PTR_ERR(acomp);
1753		}
1754		err = test_acomp(acomp, desc->suite.comp.comp.vecs,
1755				 desc->suite.comp.decomp.vecs,
1756				 desc->suite.comp.comp.count,
1757				 desc->suite.comp.decomp.count);
1758		crypto_free_acomp(acomp);
1759	} else {
1760		comp = crypto_alloc_comp(driver, type, mask);
1761		if (IS_ERR(comp)) {
1762			pr_err("alg: comp: Failed to load transform for %s: %ld\n",
1763			       driver, PTR_ERR(comp));
1764			return PTR_ERR(comp);
1765		}
1766
1767		err = test_comp(comp, desc->suite.comp.comp.vecs,
1768				desc->suite.comp.decomp.vecs,
1769				desc->suite.comp.comp.count,
1770				desc->suite.comp.decomp.count);
1771
1772		crypto_free_comp(comp);
1773	}
1774	return err;
1775}
1776
1777static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1778			 u32 type, u32 mask)
1779{
1780	struct crypto_ahash *tfm;
1781	int err;
1782
1783	tfm = crypto_alloc_ahash(driver, type, mask);
1784	if (IS_ERR(tfm)) {
1785		printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1786		       "%ld\n", driver, PTR_ERR(tfm));
1787		return PTR_ERR(tfm);
1788	}
1789
1790	err = test_hash(tfm, desc->suite.hash.vecs,
1791			desc->suite.hash.count, true);
1792	if (!err)
1793		err = test_hash(tfm, desc->suite.hash.vecs,
1794				desc->suite.hash.count, false);
1795
1796	crypto_free_ahash(tfm);
1797	return err;
1798}
1799
1800static int alg_test_crc32c(const struct alg_test_desc *desc,
1801			   const char *driver, u32 type, u32 mask)
1802{
1803	struct crypto_shash *tfm;
1804	u32 val;
1805	int err;
1806
1807	err = alg_test_hash(desc, driver, type, mask);
1808	if (err)
1809		goto out;
1810
1811	tfm = crypto_alloc_shash(driver, type, mask);
1812	if (IS_ERR(tfm)) {
1813		printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1814		       "%ld\n", driver, PTR_ERR(tfm));
1815		err = PTR_ERR(tfm);
1816		goto out;
1817	}
1818
1819	do {
1820		SHASH_DESC_ON_STACK(shash, tfm);
1821		u32 *ctx = (u32 *)shash_desc_ctx(shash);
1822
1823		shash->tfm = tfm;
1824		shash->flags = 0;
1825
1826		*ctx = le32_to_cpu(420553207);
1827		err = crypto_shash_final(shash, (u8 *)&val);
1828		if (err) {
1829			printk(KERN_ERR "alg: crc32c: Operation failed for "
1830			       "%s: %d\n", driver, err);
1831			break;
1832		}
1833
1834		if (val != ~420553207) {
1835			printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1836			       "%d\n", driver, val);
1837			err = -EINVAL;
1838		}
1839	} while (0);
1840
1841	crypto_free_shash(tfm);
1842
1843out:
1844	return err;
1845}
1846
1847static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1848			  u32 type, u32 mask)
1849{
1850	struct crypto_rng *rng;
1851	int err;
1852
1853	rng = crypto_alloc_rng(driver, type, mask);
1854	if (IS_ERR(rng)) {
1855		printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1856		       "%ld\n", driver, PTR_ERR(rng));
1857		return PTR_ERR(rng);
1858	}
1859
1860	err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1861
1862	crypto_free_rng(rng);
1863
1864	return err;
1865}
1866
1867
1868static int drbg_cavs_test(const struct drbg_testvec *test, int pr,
1869			  const char *driver, u32 type, u32 mask)
1870{
1871	int ret = -EAGAIN;
1872	struct crypto_rng *drng;
1873	struct drbg_test_data test_data;
1874	struct drbg_string addtl, pers, testentropy;
1875	unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL);
1876
1877	if (!buf)
1878		return -ENOMEM;
1879
1880	drng = crypto_alloc_rng(driver, type, mask);
1881	if (IS_ERR(drng)) {
1882		printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for "
1883		       "%s\n", driver);
1884		kzfree(buf);
1885		return -ENOMEM;
1886	}
1887
1888	test_data.testentropy = &testentropy;
1889	drbg_string_fill(&testentropy, test->entropy, test->entropylen);
1890	drbg_string_fill(&pers, test->pers, test->perslen);
1891	ret = crypto_drbg_reset_test(drng, &pers, &test_data);
1892	if (ret) {
1893		printk(KERN_ERR "alg: drbg: Failed to reset rng\n");
1894		goto outbuf;
1895	}
1896
1897	drbg_string_fill(&addtl, test->addtla, test->addtllen);
1898	if (pr) {
1899		drbg_string_fill(&testentropy, test->entpra, test->entprlen);
1900		ret = crypto_drbg_get_bytes_addtl_test(drng,
1901			buf, test->expectedlen, &addtl,	&test_data);
1902	} else {
1903		ret = crypto_drbg_get_bytes_addtl(drng,
1904			buf, test->expectedlen, &addtl);
1905	}
1906	if (ret < 0) {
1907		printk(KERN_ERR "alg: drbg: could not obtain random data for "
1908		       "driver %s\n", driver);
1909		goto outbuf;
1910	}
1911
1912	drbg_string_fill(&addtl, test->addtlb, test->addtllen);
1913	if (pr) {
1914		drbg_string_fill(&testentropy, test->entprb, test->entprlen);
1915		ret = crypto_drbg_get_bytes_addtl_test(drng,
1916			buf, test->expectedlen, &addtl, &test_data);
1917	} else {
1918		ret = crypto_drbg_get_bytes_addtl(drng,
1919			buf, test->expectedlen, &addtl);
1920	}
1921	if (ret < 0) {
1922		printk(KERN_ERR "alg: drbg: could not obtain random data for "
1923		       "driver %s\n", driver);
1924		goto outbuf;
1925	}
1926
1927	ret = memcmp(test->expected, buf, test->expectedlen);
1928
1929outbuf:
1930	crypto_free_rng(drng);
1931	kzfree(buf);
1932	return ret;
1933}
1934
1935
1936static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver,
1937			 u32 type, u32 mask)
1938{
1939	int err = 0;
1940	int pr = 0;
1941	int i = 0;
1942	const struct drbg_testvec *template = desc->suite.drbg.vecs;
1943	unsigned int tcount = desc->suite.drbg.count;
1944
1945	if (0 == memcmp(driver, "drbg_pr_", 8))
1946		pr = 1;
1947
1948	for (i = 0; i < tcount; i++) {
1949		err = drbg_cavs_test(&template[i], pr, driver, type, mask);
1950		if (err) {
1951			printk(KERN_ERR "alg: drbg: Test %d failed for %s\n",
1952			       i, driver);
1953			err = -EINVAL;
1954			break;
1955		}
1956	}
1957	return err;
1958
1959}
1960
1961static int do_test_kpp(struct crypto_kpp *tfm, const struct kpp_testvec *vec,
1962		       const char *alg)
1963{
1964	struct kpp_request *req;
1965	void *input_buf = NULL;
1966	void *output_buf = NULL;
1967	void *a_public = NULL;
1968	void *a_ss = NULL;
1969	void *shared_secret = NULL;
1970	struct crypto_wait wait;
1971	unsigned int out_len_max;
1972	int err = -ENOMEM;
1973	struct scatterlist src, dst;
1974
1975	req = kpp_request_alloc(tfm, GFP_KERNEL);
1976	if (!req)
1977		return err;
1978
1979	crypto_init_wait(&wait);
1980
1981	err = crypto_kpp_set_secret(tfm, vec->secret, vec->secret_size);
1982	if (err < 0)
1983		goto free_req;
1984
1985	out_len_max = crypto_kpp_maxsize(tfm);
1986	output_buf = kzalloc(out_len_max, GFP_KERNEL);
1987	if (!output_buf) {
1988		err = -ENOMEM;
1989		goto free_req;
1990	}
1991
1992	/* Use appropriate parameter as base */
1993	kpp_request_set_input(req, NULL, 0);
1994	sg_init_one(&dst, output_buf, out_len_max);
1995	kpp_request_set_output(req, &dst, out_len_max);
1996	kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1997				 crypto_req_done, &wait);
1998
1999	/* Compute party A's public key */
2000	err = crypto_wait_req(crypto_kpp_generate_public_key(req), &wait);
2001	if (err) {
2002		pr_err("alg: %s: Party A: generate public key test failed. err %d\n",
2003		       alg, err);
2004		goto free_output;
2005	}
2006
2007	if (vec->genkey) {
2008		/* Save party A's public key */
2009		a_public = kzalloc(out_len_max, GFP_KERNEL);
2010		if (!a_public) {
2011			err = -ENOMEM;
2012			goto free_output;
2013		}
2014		memcpy(a_public, sg_virt(req->dst), out_len_max);
2015	} else {
2016		/* Verify calculated public key */
2017		if (memcmp(vec->expected_a_public, sg_virt(req->dst),
2018			   vec->expected_a_public_size)) {
2019			pr_err("alg: %s: Party A: generate public key test failed. Invalid output\n",
2020			       alg);
2021			err = -EINVAL;
2022			goto free_output;
2023		}
2024	}
2025
2026	/* Calculate shared secret key by using counter part (b) public key. */
2027	input_buf = kzalloc(vec->b_public_size, GFP_KERNEL);
2028	if (!input_buf) {
2029		err = -ENOMEM;
2030		goto free_output;
2031	}
2032
2033	memcpy(input_buf, vec->b_public, vec->b_public_size);
2034	sg_init_one(&src, input_buf, vec->b_public_size);
2035	sg_init_one(&dst, output_buf, out_len_max);
2036	kpp_request_set_input(req, &src, vec->b_public_size);
2037	kpp_request_set_output(req, &dst, out_len_max);
2038	kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2039				 crypto_req_done, &wait);
2040	err = crypto_wait_req(crypto_kpp_compute_shared_secret(req), &wait);
2041	if (err) {
2042		pr_err("alg: %s: Party A: compute shared secret test failed. err %d\n",
2043		       alg, err);
2044		goto free_all;
2045	}
2046
2047	if (vec->genkey) {
2048		/* Save the shared secret obtained by party A */
2049		a_ss = kzalloc(vec->expected_ss_size, GFP_KERNEL);
2050		if (!a_ss) {
2051			err = -ENOMEM;
2052			goto free_all;
2053		}
2054		memcpy(a_ss, sg_virt(req->dst), vec->expected_ss_size);
2055
2056		/*
2057		 * Calculate party B's shared secret by using party A's
2058		 * public key.
2059		 */
2060		err = crypto_kpp_set_secret(tfm, vec->b_secret,
2061					    vec->b_secret_size);
2062		if (err < 0)
2063			goto free_all;
2064
2065		sg_init_one(&src, a_public, vec->expected_a_public_size);
2066		sg_init_one(&dst, output_buf, out_len_max);
2067		kpp_request_set_input(req, &src, vec->expected_a_public_size);
2068		kpp_request_set_output(req, &dst, out_len_max);
2069		kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2070					 crypto_req_done, &wait);
2071		err = crypto_wait_req(crypto_kpp_compute_shared_secret(req),
2072				      &wait);
2073		if (err) {
2074			pr_err("alg: %s: Party B: compute shared secret failed. err %d\n",
2075			       alg, err);
2076			goto free_all;
2077		}
2078
2079		shared_secret = a_ss;
2080	} else {
2081		shared_secret = (void *)vec->expected_ss;
2082	}
2083
2084	/*
2085	 * verify shared secret from which the user will derive
2086	 * secret key by executing whatever hash it has chosen
2087	 */
2088	if (memcmp(shared_secret, sg_virt(req->dst),
2089		   vec->expected_ss_size)) {
2090		pr_err("alg: %s: compute shared secret test failed. Invalid output\n",
2091		       alg);
2092		err = -EINVAL;
2093	}
2094
2095free_all:
2096	kfree(a_ss);
2097	kfree(input_buf);
2098free_output:
2099	kfree(a_public);
2100	kfree(output_buf);
2101free_req:
2102	kpp_request_free(req);
2103	return err;
2104}
2105
2106static int test_kpp(struct crypto_kpp *tfm, const char *alg,
2107		    const struct kpp_testvec *vecs, unsigned int tcount)
2108{
2109	int ret, i;
2110
2111	for (i = 0; i < tcount; i++) {
2112		ret = do_test_kpp(tfm, vecs++, alg);
2113		if (ret) {
2114			pr_err("alg: %s: test failed on vector %d, err=%d\n",
2115			       alg, i + 1, ret);
2116			return ret;
2117		}
2118	}
2119	return 0;
2120}
2121
2122static int alg_test_kpp(const struct alg_test_desc *desc, const char *driver,
2123			u32 type, u32 mask)
2124{
2125	struct crypto_kpp *tfm;
2126	int err = 0;
2127
2128	tfm = crypto_alloc_kpp(driver, type, mask);
2129	if (IS_ERR(tfm)) {
2130		pr_err("alg: kpp: Failed to load tfm for %s: %ld\n",
2131		       driver, PTR_ERR(tfm));
2132		return PTR_ERR(tfm);
2133	}
2134	if (desc->suite.kpp.vecs)
2135		err = test_kpp(tfm, desc->alg, desc->suite.kpp.vecs,
2136			       desc->suite.kpp.count);
2137
2138	crypto_free_kpp(tfm);
2139	return err;
2140}
2141
2142static int test_akcipher_one(struct crypto_akcipher *tfm,
2143			     const struct akcipher_testvec *vecs)
2144{
2145	char *xbuf[XBUFSIZE];
2146	struct akcipher_request *req;
2147	void *outbuf_enc = NULL;
2148	void *outbuf_dec = NULL;
2149	struct crypto_wait wait;
2150	unsigned int out_len_max, out_len = 0;
2151	int err = -ENOMEM;
2152	struct scatterlist src, dst, src_tab[2];
2153
2154	if (testmgr_alloc_buf(xbuf))
2155		return err;
2156
2157	req = akcipher_request_alloc(tfm, GFP_KERNEL);
2158	if (!req)
2159		goto free_xbuf;
2160
2161	crypto_init_wait(&wait);
2162
2163	if (vecs->public_key_vec)
2164		err = crypto_akcipher_set_pub_key(tfm, vecs->key,
2165						  vecs->key_len);
2166	else
2167		err = crypto_akcipher_set_priv_key(tfm, vecs->key,
2168						   vecs->key_len);
2169	if (err)
2170		goto free_req;
2171
2172	err = -ENOMEM;
2173	out_len_max = crypto_akcipher_maxsize(tfm);
2174	outbuf_enc = kzalloc(out_len_max, GFP_KERNEL);
2175	if (!outbuf_enc)
2176		goto free_req;
2177
2178	if (WARN_ON(vecs->m_size > PAGE_SIZE))
2179		goto free_all;
2180
2181	memcpy(xbuf[0], vecs->m, vecs->m_size);
2182
2183	sg_init_table(src_tab, 2);
2184	sg_set_buf(&src_tab[0], xbuf[0], 8);
2185	sg_set_buf(&src_tab[1], xbuf[0] + 8, vecs->m_size - 8);
2186	sg_init_one(&dst, outbuf_enc, out_len_max);
2187	akcipher_request_set_crypt(req, src_tab, &dst, vecs->m_size,
2188				   out_len_max);
2189	akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2190				      crypto_req_done, &wait);
2191
2192	err = crypto_wait_req(vecs->siggen_sigver_test ?
2193			      /* Run asymmetric signature generation */
2194			      crypto_akcipher_sign(req) :
2195			      /* Run asymmetric encrypt */
2196			      crypto_akcipher_encrypt(req), &wait);
2197	if (err) {
2198		pr_err("alg: akcipher: encrypt test failed. err %d\n", err);
2199		goto free_all;
2200	}
2201	if (req->dst_len != vecs->c_size) {
2202		pr_err("alg: akcipher: encrypt test failed. Invalid output len\n");
2203		err = -EINVAL;
2204		goto free_all;
2205	}
2206	/* verify that encrypted message is equal to expected */
2207	if (memcmp(vecs->c, outbuf_enc, vecs->c_size)) {
2208		pr_err("alg: akcipher: encrypt test failed. Invalid output\n");
2209		hexdump(outbuf_enc, vecs->c_size);
2210		err = -EINVAL;
2211		goto free_all;
2212	}
2213	/* Don't invoke decrypt for vectors with public key */
2214	if (vecs->public_key_vec) {
2215		err = 0;
2216		goto free_all;
2217	}
2218	outbuf_dec = kzalloc(out_len_max, GFP_KERNEL);
2219	if (!outbuf_dec) {
2220		err = -ENOMEM;
2221		goto free_all;
2222	}
2223
2224	if (WARN_ON(vecs->c_size > PAGE_SIZE))
2225		goto free_all;
2226
2227	memcpy(xbuf[0], vecs->c, vecs->c_size);
2228
2229	sg_init_one(&src, xbuf[0], vecs->c_size);
2230	sg_init_one(&dst, outbuf_dec, out_len_max);
2231	crypto_init_wait(&wait);
2232	akcipher_request_set_crypt(req, &src, &dst, vecs->c_size, out_len_max);
2233
2234	err = crypto_wait_req(vecs->siggen_sigver_test ?
2235			      /* Run asymmetric signature verification */
2236			      crypto_akcipher_verify(req) :
2237			      /* Run asymmetric decrypt */
2238			      crypto_akcipher_decrypt(req), &wait);
2239	if (err) {
2240		pr_err("alg: akcipher: decrypt test failed. err %d\n", err);
2241		goto free_all;
2242	}
2243	out_len = req->dst_len;
2244	if (out_len < vecs->m_size) {
2245		pr_err("alg: akcipher: decrypt test failed. "
2246		       "Invalid output len %u\n", out_len);
2247		err = -EINVAL;
2248		goto free_all;
2249	}
2250	/* verify that decrypted message is equal to the original msg */
2251	if (memchr_inv(outbuf_dec, 0, out_len - vecs->m_size) ||
2252	    memcmp(vecs->m, outbuf_dec + out_len - vecs->m_size,
2253		   vecs->m_size)) {
2254		pr_err("alg: akcipher: decrypt test failed. Invalid output\n");
2255		hexdump(outbuf_dec, out_len);
2256		err = -EINVAL;
2257	}
2258free_all:
2259	kfree(outbuf_dec);
2260	kfree(outbuf_enc);
2261free_req:
2262	akcipher_request_free(req);
2263free_xbuf:
2264	testmgr_free_buf(xbuf);
2265	return err;
2266}
2267
2268static int test_akcipher(struct crypto_akcipher *tfm, const char *alg,
2269			 const struct akcipher_testvec *vecs,
2270			 unsigned int tcount)
2271{
2272	const char *algo =
2273		crypto_tfm_alg_driver_name(crypto_akcipher_tfm(tfm));
2274	int ret, i;
2275
2276	for (i = 0; i < tcount; i++) {
2277		ret = test_akcipher_one(tfm, vecs++);
2278		if (!ret)
2279			continue;
 
 
 
 
 
 
 
 
 
 
 
 
2280
2281		pr_err("alg: akcipher: test %d failed for %s, err=%d\n",
2282		       i + 1, algo, ret);
2283		return ret;
2284	}
2285	return 0;
2286}
2287
2288static int alg_test_akcipher(const struct alg_test_desc *desc,
2289			     const char *driver, u32 type, u32 mask)
2290{
2291	struct crypto_akcipher *tfm;
2292	int err = 0;
2293
2294	tfm = crypto_alloc_akcipher(driver, type, mask);
2295	if (IS_ERR(tfm)) {
2296		pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n",
2297		       driver, PTR_ERR(tfm));
2298		return PTR_ERR(tfm);
2299	}
2300	if (desc->suite.akcipher.vecs)
2301		err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs,
2302				    desc->suite.akcipher.count);
2303
2304	crypto_free_akcipher(tfm);
2305	return err;
2306}
2307
2308static int alg_test_null(const struct alg_test_desc *desc,
2309			     const char *driver, u32 type, u32 mask)
2310{
2311	return 0;
2312}
2313
2314#define __VECS(tv)	{ .vecs = tv, .count = ARRAY_SIZE(tv) }
2315
2316/* Please keep this list sorted by algorithm name. */
2317static const struct alg_test_desc alg_test_descs[] = {
2318	{
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2319		.alg = "ansi_cprng",
2320		.test = alg_test_cprng,
2321		.suite = {
2322			.cprng = __VECS(ansi_cprng_aes_tv_template)
 
 
 
2323		}
2324	}, {
2325		.alg = "authenc(hmac(md5),ecb(cipher_null))",
2326		.test = alg_test_aead,
2327		.suite = {
2328			.aead = {
2329				.enc = __VECS(hmac_md5_ecb_cipher_null_enc_tv_template),
2330				.dec = __VECS(hmac_md5_ecb_cipher_null_dec_tv_template)
 
 
 
 
 
 
2331			}
2332		}
2333	}, {
2334		.alg = "authenc(hmac(sha1),cbc(aes))",
2335		.test = alg_test_aead,
2336		.fips_allowed = 1,
2337		.suite = {
2338			.aead = {
2339				.enc = __VECS(hmac_sha1_aes_cbc_enc_tv_temp)
 
 
 
 
 
2340			}
2341		}
2342	}, {
2343		.alg = "authenc(hmac(sha1),cbc(des))",
2344		.test = alg_test_aead,
2345		.suite = {
2346			.aead = {
2347				.enc = __VECS(hmac_sha1_des_cbc_enc_tv_temp)
 
 
 
 
 
2348			}
2349		}
2350	}, {
2351		.alg = "authenc(hmac(sha1),cbc(des3_ede))",
2352		.test = alg_test_aead,
2353		.fips_allowed = 1,
2354		.suite = {
2355			.aead = {
2356				.enc = __VECS(hmac_sha1_des3_ede_cbc_enc_tv_temp)
 
 
 
 
 
2357			}
2358		}
2359	}, {
2360		.alg = "authenc(hmac(sha1),ctr(aes))",
2361		.test = alg_test_null,
2362		.fips_allowed = 1,
2363	}, {
2364		.alg = "authenc(hmac(sha1),ecb(cipher_null))",
2365		.test = alg_test_aead,
2366		.suite = {
2367			.aead = {
2368				.enc = __VECS(hmac_sha1_ecb_cipher_null_enc_tv_temp),
2369				.dec = __VECS(hmac_sha1_ecb_cipher_null_dec_tv_temp)
 
 
 
 
 
 
 
 
 
 
2370			}
2371		}
2372	}, {
2373		.alg = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2374		.test = alg_test_null,
2375		.fips_allowed = 1,
2376	}, {
2377		.alg = "authenc(hmac(sha224),cbc(des))",
2378		.test = alg_test_aead,
2379		.suite = {
2380			.aead = {
2381				.enc = __VECS(hmac_sha224_des_cbc_enc_tv_temp)
 
 
 
 
 
2382			}
2383		}
2384	}, {
2385		.alg = "authenc(hmac(sha224),cbc(des3_ede))",
2386		.test = alg_test_aead,
2387		.fips_allowed = 1,
2388		.suite = {
2389			.aead = {
2390				.enc = __VECS(hmac_sha224_des3_ede_cbc_enc_tv_temp)
 
 
 
 
 
2391			}
2392		}
2393	}, {
2394		.alg = "authenc(hmac(sha256),cbc(aes))",
2395		.test = alg_test_aead,
2396		.fips_allowed = 1,
2397		.suite = {
2398			.aead = {
2399				.enc = __VECS(hmac_sha256_aes_cbc_enc_tv_temp)
 
 
 
 
 
2400			}
2401		}
2402	}, {
2403		.alg = "authenc(hmac(sha256),cbc(des))",
2404		.test = alg_test_aead,
2405		.suite = {
2406			.aead = {
2407				.enc = __VECS(hmac_sha256_des_cbc_enc_tv_temp)
 
 
 
 
 
2408			}
2409		}
2410	}, {
2411		.alg = "authenc(hmac(sha256),cbc(des3_ede))",
2412		.test = alg_test_aead,
2413		.fips_allowed = 1,
2414		.suite = {
2415			.aead = {
2416				.enc = __VECS(hmac_sha256_des3_ede_cbc_enc_tv_temp)
 
 
 
 
 
2417			}
2418		}
2419	}, {
2420		.alg = "authenc(hmac(sha256),ctr(aes))",
2421		.test = alg_test_null,
2422		.fips_allowed = 1,
2423	}, {
2424		.alg = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2425		.test = alg_test_null,
2426		.fips_allowed = 1,
2427	}, {
2428		.alg = "authenc(hmac(sha384),cbc(des))",
2429		.test = alg_test_aead,
2430		.suite = {
2431			.aead = {
2432				.enc = __VECS(hmac_sha384_des_cbc_enc_tv_temp)
 
 
 
 
 
2433			}
2434		}
2435	}, {
2436		.alg = "authenc(hmac(sha384),cbc(des3_ede))",
2437		.test = alg_test_aead,
2438		.fips_allowed = 1,
2439		.suite = {
2440			.aead = {
2441				.enc = __VECS(hmac_sha384_des3_ede_cbc_enc_tv_temp)
 
 
 
 
 
2442			}
2443		}
2444	}, {
2445		.alg = "authenc(hmac(sha384),ctr(aes))",
2446		.test = alg_test_null,
2447		.fips_allowed = 1,
2448	}, {
2449		.alg = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2450		.test = alg_test_null,
2451		.fips_allowed = 1,
2452	}, {
2453		.alg = "authenc(hmac(sha512),cbc(aes))",
2454		.fips_allowed = 1,
2455		.test = alg_test_aead,
2456		.suite = {
2457			.aead = {
2458				.enc = __VECS(hmac_sha512_aes_cbc_enc_tv_temp)
 
 
 
 
 
2459			}
2460		}
2461	}, {
2462		.alg = "authenc(hmac(sha512),cbc(des))",
2463		.test = alg_test_aead,
2464		.suite = {
2465			.aead = {
2466				.enc = __VECS(hmac_sha512_des_cbc_enc_tv_temp)
 
 
 
 
 
2467			}
2468		}
2469	}, {
2470		.alg = "authenc(hmac(sha512),cbc(des3_ede))",
2471		.test = alg_test_aead,
2472		.fips_allowed = 1,
2473		.suite = {
2474			.aead = {
2475				.enc = __VECS(hmac_sha512_des3_ede_cbc_enc_tv_temp)
 
 
 
 
 
2476			}
2477		}
2478	}, {
2479		.alg = "authenc(hmac(sha512),ctr(aes))",
2480		.test = alg_test_null,
2481		.fips_allowed = 1,
2482	}, {
2483		.alg = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2484		.test = alg_test_null,
2485		.fips_allowed = 1,
2486	}, {
2487		.alg = "cbc(aes)",
2488		.test = alg_test_skcipher,
2489		.fips_allowed = 1,
2490		.suite = {
2491			.cipher = {
2492				.enc = __VECS(aes_cbc_enc_tv_template),
2493				.dec = __VECS(aes_cbc_dec_tv_template)
 
 
 
 
 
 
2494			}
2495		}
2496	}, {
2497		.alg = "cbc(anubis)",
2498		.test = alg_test_skcipher,
2499		.suite = {
2500			.cipher = {
2501				.enc = __VECS(anubis_cbc_enc_tv_template),
2502				.dec = __VECS(anubis_cbc_dec_tv_template)
 
 
 
 
 
 
2503			}
2504		}
2505	}, {
2506		.alg = "cbc(blowfish)",
2507		.test = alg_test_skcipher,
2508		.suite = {
2509			.cipher = {
2510				.enc = __VECS(bf_cbc_enc_tv_template),
2511				.dec = __VECS(bf_cbc_dec_tv_template)
 
 
 
 
 
 
2512			}
2513		}
2514	}, {
2515		.alg = "cbc(camellia)",
2516		.test = alg_test_skcipher,
2517		.suite = {
2518			.cipher = {
2519				.enc = __VECS(camellia_cbc_enc_tv_template),
2520				.dec = __VECS(camellia_cbc_dec_tv_template)
 
 
 
 
 
 
2521			}
2522		}
2523	}, {
2524		.alg = "cbc(cast5)",
2525		.test = alg_test_skcipher,
2526		.suite = {
2527			.cipher = {
2528				.enc = __VECS(cast5_cbc_enc_tv_template),
2529				.dec = __VECS(cast5_cbc_dec_tv_template)
 
 
 
 
 
 
2530			}
2531		}
2532	}, {
2533		.alg = "cbc(cast6)",
2534		.test = alg_test_skcipher,
2535		.suite = {
2536			.cipher = {
2537				.enc = __VECS(cast6_cbc_enc_tv_template),
2538				.dec = __VECS(cast6_cbc_dec_tv_template)
 
 
 
 
 
 
2539			}
2540		}
2541	}, {
2542		.alg = "cbc(des)",
2543		.test = alg_test_skcipher,
2544		.suite = {
2545			.cipher = {
2546				.enc = __VECS(des_cbc_enc_tv_template),
2547				.dec = __VECS(des_cbc_dec_tv_template)
 
 
 
 
 
 
2548			}
2549		}
2550	}, {
2551		.alg = "cbc(des3_ede)",
2552		.test = alg_test_skcipher,
2553		.fips_allowed = 1,
2554		.suite = {
2555			.cipher = {
2556				.enc = __VECS(des3_ede_cbc_enc_tv_template),
2557				.dec = __VECS(des3_ede_cbc_dec_tv_template)
 
 
 
 
 
 
2558			}
2559		}
2560	}, {
2561		.alg = "cbc(serpent)",
2562		.test = alg_test_skcipher,
2563		.suite = {
2564			.cipher = {
2565				.enc = __VECS(serpent_cbc_enc_tv_template),
2566				.dec = __VECS(serpent_cbc_dec_tv_template)
 
 
 
 
 
 
2567			}
2568		}
2569	}, {
2570		.alg = "cbc(twofish)",
2571		.test = alg_test_skcipher,
2572		.suite = {
2573			.cipher = {
2574				.enc = __VECS(tf_cbc_enc_tv_template),
2575				.dec = __VECS(tf_cbc_dec_tv_template)
 
 
 
 
 
 
2576			}
2577		}
2578	}, {
2579		.alg = "cbcmac(aes)",
2580		.fips_allowed = 1,
2581		.test = alg_test_hash,
2582		.suite = {
2583			.hash = __VECS(aes_cbcmac_tv_template)
2584		}
2585	}, {
2586		.alg = "ccm(aes)",
2587		.test = alg_test_aead,
2588		.fips_allowed = 1,
2589		.suite = {
2590			.aead = {
2591				.enc = __VECS(aes_ccm_enc_tv_template),
2592				.dec = __VECS(aes_ccm_dec_tv_template)
 
 
 
 
 
 
2593			}
2594		}
2595	}, {
2596		.alg = "chacha20",
2597		.test = alg_test_skcipher,
2598		.suite = {
2599			.cipher = {
2600				.enc = __VECS(chacha20_enc_tv_template),
2601				.dec = __VECS(chacha20_enc_tv_template),
 
 
 
 
 
 
2602			}
2603		}
2604	}, {
2605		.alg = "cmac(aes)",
2606		.fips_allowed = 1,
2607		.test = alg_test_hash,
2608		.suite = {
2609			.hash = __VECS(aes_cmac128_tv_template)
 
 
 
2610		}
2611	}, {
2612		.alg = "cmac(des3_ede)",
2613		.fips_allowed = 1,
2614		.test = alg_test_hash,
2615		.suite = {
2616			.hash = __VECS(des3_ede_cmac64_tv_template)
 
 
 
2617		}
2618	}, {
2619		.alg = "compress_null",
2620		.test = alg_test_null,
2621	}, {
2622		.alg = "crc32",
2623		.test = alg_test_hash,
2624		.suite = {
2625			.hash = __VECS(crc32_tv_template)
 
 
 
2626		}
2627	}, {
2628		.alg = "crc32c",
2629		.test = alg_test_crc32c,
2630		.fips_allowed = 1,
2631		.suite = {
2632			.hash = __VECS(crc32c_tv_template)
 
 
 
2633		}
2634	}, {
2635		.alg = "crct10dif",
2636		.test = alg_test_hash,
2637		.fips_allowed = 1,
2638		.suite = {
2639			.hash = __VECS(crct10dif_tv_template)
 
 
 
2640		}
2641	}, {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2642		.alg = "ctr(aes)",
2643		.test = alg_test_skcipher,
2644		.fips_allowed = 1,
2645		.suite = {
2646			.cipher = {
2647				.enc = __VECS(aes_ctr_enc_tv_template),
2648				.dec = __VECS(aes_ctr_dec_tv_template)
 
 
 
 
 
 
2649			}
2650		}
2651	}, {
2652		.alg = "ctr(blowfish)",
2653		.test = alg_test_skcipher,
2654		.suite = {
2655			.cipher = {
2656				.enc = __VECS(bf_ctr_enc_tv_template),
2657				.dec = __VECS(bf_ctr_dec_tv_template)
 
 
 
 
 
 
2658			}
2659		}
2660	}, {
2661		.alg = "ctr(camellia)",
2662		.test = alg_test_skcipher,
2663		.suite = {
2664			.cipher = {
2665				.enc = __VECS(camellia_ctr_enc_tv_template),
2666				.dec = __VECS(camellia_ctr_dec_tv_template)
 
 
 
 
 
 
2667			}
2668		}
2669	}, {
2670		.alg = "ctr(cast5)",
2671		.test = alg_test_skcipher,
2672		.suite = {
2673			.cipher = {
2674				.enc = __VECS(cast5_ctr_enc_tv_template),
2675				.dec = __VECS(cast5_ctr_dec_tv_template)
 
 
 
 
 
 
2676			}
2677		}
2678	}, {
2679		.alg = "ctr(cast6)",
2680		.test = alg_test_skcipher,
2681		.suite = {
2682			.cipher = {
2683				.enc = __VECS(cast6_ctr_enc_tv_template),
2684				.dec = __VECS(cast6_ctr_dec_tv_template)
 
 
 
 
 
 
2685			}
2686		}
2687	}, {
2688		.alg = "ctr(des)",
2689		.test = alg_test_skcipher,
2690		.suite = {
2691			.cipher = {
2692				.enc = __VECS(des_ctr_enc_tv_template),
2693				.dec = __VECS(des_ctr_dec_tv_template)
 
 
 
 
 
 
2694			}
2695		}
2696	}, {
2697		.alg = "ctr(des3_ede)",
2698		.test = alg_test_skcipher,
2699		.fips_allowed = 1,
2700		.suite = {
2701			.cipher = {
2702				.enc = __VECS(des3_ede_ctr_enc_tv_template),
2703				.dec = __VECS(des3_ede_ctr_dec_tv_template)
 
 
 
 
 
 
2704			}
2705		}
2706	}, {
2707		.alg = "ctr(serpent)",
2708		.test = alg_test_skcipher,
2709		.suite = {
2710			.cipher = {
2711				.enc = __VECS(serpent_ctr_enc_tv_template),
2712				.dec = __VECS(serpent_ctr_dec_tv_template)
 
 
 
 
 
 
2713			}
2714		}
2715	}, {
2716		.alg = "ctr(twofish)",
2717		.test = alg_test_skcipher,
2718		.suite = {
2719			.cipher = {
2720				.enc = __VECS(tf_ctr_enc_tv_template),
2721				.dec = __VECS(tf_ctr_dec_tv_template)
 
 
 
 
 
 
2722			}
2723		}
2724	}, {
2725		.alg = "cts(cbc(aes))",
2726		.test = alg_test_skcipher,
2727		.suite = {
2728			.cipher = {
2729				.enc = __VECS(cts_mode_enc_tv_template),
2730				.dec = __VECS(cts_mode_dec_tv_template)
 
 
 
 
 
 
2731			}
2732		}
2733	}, {
2734		.alg = "deflate",
2735		.test = alg_test_comp,
2736		.fips_allowed = 1,
2737		.suite = {
2738			.comp = {
2739				.comp = __VECS(deflate_comp_tv_template),
2740				.decomp = __VECS(deflate_decomp_tv_template)
 
 
 
 
 
 
2741			}
2742		}
2743	}, {
2744		.alg = "dh",
2745		.test = alg_test_kpp,
2746		.fips_allowed = 1,
2747		.suite = {
2748			.kpp = __VECS(dh_tv_template)
2749		}
2750	}, {
2751		.alg = "digest_null",
2752		.test = alg_test_null,
2753	}, {
2754		.alg = "drbg_nopr_ctr_aes128",
2755		.test = alg_test_drbg,
2756		.fips_allowed = 1,
2757		.suite = {
2758			.drbg = __VECS(drbg_nopr_ctr_aes128_tv_template)
 
 
 
2759		}
2760	}, {
2761		.alg = "drbg_nopr_ctr_aes192",
2762		.test = alg_test_drbg,
2763		.fips_allowed = 1,
2764		.suite = {
2765			.drbg = __VECS(drbg_nopr_ctr_aes192_tv_template)
 
 
 
2766		}
2767	}, {
2768		.alg = "drbg_nopr_ctr_aes256",
2769		.test = alg_test_drbg,
2770		.fips_allowed = 1,
2771		.suite = {
2772			.drbg = __VECS(drbg_nopr_ctr_aes256_tv_template)
 
 
 
2773		}
2774	}, {
2775		/*
2776		 * There is no need to specifically test the DRBG with every
2777		 * backend cipher -- covered by drbg_nopr_hmac_sha256 test
2778		 */
2779		.alg = "drbg_nopr_hmac_sha1",
2780		.fips_allowed = 1,
2781		.test = alg_test_null,
2782	}, {
2783		.alg = "drbg_nopr_hmac_sha256",
2784		.test = alg_test_drbg,
2785		.fips_allowed = 1,
2786		.suite = {
2787			.drbg = __VECS(drbg_nopr_hmac_sha256_tv_template)
 
 
 
 
2788		}
2789	}, {
2790		/* covered by drbg_nopr_hmac_sha256 test */
2791		.alg = "drbg_nopr_hmac_sha384",
2792		.fips_allowed = 1,
2793		.test = alg_test_null,
2794	}, {
2795		.alg = "drbg_nopr_hmac_sha512",
2796		.test = alg_test_null,
2797		.fips_allowed = 1,
2798	}, {
2799		.alg = "drbg_nopr_sha1",
2800		.fips_allowed = 1,
2801		.test = alg_test_null,
2802	}, {
2803		.alg = "drbg_nopr_sha256",
2804		.test = alg_test_drbg,
2805		.fips_allowed = 1,
2806		.suite = {
2807			.drbg = __VECS(drbg_nopr_sha256_tv_template)
 
 
 
2808		}
2809	}, {
2810		/* covered by drbg_nopr_sha256 test */
2811		.alg = "drbg_nopr_sha384",
2812		.fips_allowed = 1,
2813		.test = alg_test_null,
2814	}, {
2815		.alg = "drbg_nopr_sha512",
2816		.fips_allowed = 1,
2817		.test = alg_test_null,
2818	}, {
2819		.alg = "drbg_pr_ctr_aes128",
2820		.test = alg_test_drbg,
2821		.fips_allowed = 1,
2822		.suite = {
2823			.drbg = __VECS(drbg_pr_ctr_aes128_tv_template)
 
 
 
2824		}
2825	}, {
2826		/* covered by drbg_pr_ctr_aes128 test */
2827		.alg = "drbg_pr_ctr_aes192",
2828		.fips_allowed = 1,
2829		.test = alg_test_null,
2830	}, {
2831		.alg = "drbg_pr_ctr_aes256",
2832		.fips_allowed = 1,
2833		.test = alg_test_null,
2834	}, {
2835		.alg = "drbg_pr_hmac_sha1",
2836		.fips_allowed = 1,
2837		.test = alg_test_null,
2838	}, {
2839		.alg = "drbg_pr_hmac_sha256",
2840		.test = alg_test_drbg,
2841		.fips_allowed = 1,
2842		.suite = {
2843			.drbg = __VECS(drbg_pr_hmac_sha256_tv_template)
 
 
 
2844		}
2845	}, {
2846		/* covered by drbg_pr_hmac_sha256 test */
2847		.alg = "drbg_pr_hmac_sha384",
2848		.fips_allowed = 1,
2849		.test = alg_test_null,
2850	}, {
2851		.alg = "drbg_pr_hmac_sha512",
2852		.test = alg_test_null,
2853		.fips_allowed = 1,
2854	}, {
2855		.alg = "drbg_pr_sha1",
2856		.fips_allowed = 1,
2857		.test = alg_test_null,
2858	}, {
2859		.alg = "drbg_pr_sha256",
2860		.test = alg_test_drbg,
2861		.fips_allowed = 1,
2862		.suite = {
2863			.drbg = __VECS(drbg_pr_sha256_tv_template)
 
 
 
2864		}
2865	}, {
2866		/* covered by drbg_pr_sha256 test */
2867		.alg = "drbg_pr_sha384",
2868		.fips_allowed = 1,
2869		.test = alg_test_null,
2870	}, {
2871		.alg = "drbg_pr_sha512",
2872		.fips_allowed = 1,
2873		.test = alg_test_null,
2874	}, {
 
 
 
 
2875		.alg = "ecb(aes)",
2876		.test = alg_test_skcipher,
2877		.fips_allowed = 1,
2878		.suite = {
2879			.cipher = {
2880				.enc = __VECS(aes_enc_tv_template),
2881				.dec = __VECS(aes_dec_tv_template)
 
 
 
 
 
 
2882			}
2883		}
2884	}, {
2885		.alg = "ecb(anubis)",
2886		.test = alg_test_skcipher,
2887		.suite = {
2888			.cipher = {
2889				.enc = __VECS(anubis_enc_tv_template),
2890				.dec = __VECS(anubis_dec_tv_template)
 
 
 
 
 
 
2891			}
2892		}
2893	}, {
2894		.alg = "ecb(arc4)",
2895		.test = alg_test_skcipher,
2896		.suite = {
2897			.cipher = {
2898				.enc = __VECS(arc4_enc_tv_template),
2899				.dec = __VECS(arc4_dec_tv_template)
 
 
 
 
 
 
2900			}
2901		}
2902	}, {
2903		.alg = "ecb(blowfish)",
2904		.test = alg_test_skcipher,
2905		.suite = {
2906			.cipher = {
2907				.enc = __VECS(bf_enc_tv_template),
2908				.dec = __VECS(bf_dec_tv_template)
 
 
 
 
 
 
2909			}
2910		}
2911	}, {
2912		.alg = "ecb(camellia)",
2913		.test = alg_test_skcipher,
2914		.suite = {
2915			.cipher = {
2916				.enc = __VECS(camellia_enc_tv_template),
2917				.dec = __VECS(camellia_dec_tv_template)
 
 
 
 
 
 
2918			}
2919		}
2920	}, {
2921		.alg = "ecb(cast5)",
2922		.test = alg_test_skcipher,
2923		.suite = {
2924			.cipher = {
2925				.enc = __VECS(cast5_enc_tv_template),
2926				.dec = __VECS(cast5_dec_tv_template)
 
 
 
 
 
 
2927			}
2928		}
2929	}, {
2930		.alg = "ecb(cast6)",
2931		.test = alg_test_skcipher,
2932		.suite = {
2933			.cipher = {
2934				.enc = __VECS(cast6_enc_tv_template),
2935				.dec = __VECS(cast6_dec_tv_template)
 
 
 
 
 
 
2936			}
2937		}
2938	}, {
2939		.alg = "ecb(cipher_null)",
2940		.test = alg_test_null,
2941		.fips_allowed = 1,
2942	}, {
2943		.alg = "ecb(des)",
2944		.test = alg_test_skcipher,
2945		.suite = {
2946			.cipher = {
2947				.enc = __VECS(des_enc_tv_template),
2948				.dec = __VECS(des_dec_tv_template)
 
 
 
 
 
 
2949			}
2950		}
2951	}, {
2952		.alg = "ecb(des3_ede)",
2953		.test = alg_test_skcipher,
2954		.fips_allowed = 1,
2955		.suite = {
2956			.cipher = {
2957				.enc = __VECS(des3_ede_enc_tv_template),
2958				.dec = __VECS(des3_ede_dec_tv_template)
 
 
 
 
 
 
2959			}
2960		}
2961	}, {
2962		.alg = "ecb(fcrypt)",
2963		.test = alg_test_skcipher,
2964		.suite = {
2965			.cipher = {
2966				.enc = {
2967					.vecs = fcrypt_pcbc_enc_tv_template,
2968					.count = 1
2969				},
2970				.dec = {
2971					.vecs = fcrypt_pcbc_dec_tv_template,
2972					.count = 1
2973				}
2974			}
2975		}
2976	}, {
2977		.alg = "ecb(khazad)",
2978		.test = alg_test_skcipher,
2979		.suite = {
2980			.cipher = {
2981				.enc = __VECS(khazad_enc_tv_template),
2982				.dec = __VECS(khazad_dec_tv_template)
 
 
 
 
 
 
2983			}
2984		}
2985	}, {
2986		.alg = "ecb(seed)",
2987		.test = alg_test_skcipher,
2988		.suite = {
2989			.cipher = {
2990				.enc = __VECS(seed_enc_tv_template),
2991				.dec = __VECS(seed_dec_tv_template)
 
 
 
 
 
 
2992			}
2993		}
2994	}, {
2995		.alg = "ecb(serpent)",
2996		.test = alg_test_skcipher,
2997		.suite = {
2998			.cipher = {
2999				.enc = __VECS(serpent_enc_tv_template),
3000				.dec = __VECS(serpent_dec_tv_template)
3001			}
3002		}
3003	}, {
3004		.alg = "ecb(sm4)",
3005		.test = alg_test_skcipher,
3006		.suite = {
3007			.cipher = {
3008				.enc = __VECS(sm4_enc_tv_template),
3009				.dec = __VECS(sm4_dec_tv_template)
3010			}
3011		}
3012	}, {
3013		.alg = "ecb(speck128)",
3014		.test = alg_test_skcipher,
3015		.suite = {
3016			.cipher = {
3017				.enc = __VECS(speck128_enc_tv_template),
3018				.dec = __VECS(speck128_dec_tv_template)
3019			}
3020		}
3021	}, {
3022		.alg = "ecb(speck64)",
3023		.test = alg_test_skcipher,
3024		.suite = {
3025			.cipher = {
3026				.enc = __VECS(speck64_enc_tv_template),
3027				.dec = __VECS(speck64_dec_tv_template)
3028			}
3029		}
3030	}, {
3031		.alg = "ecb(tea)",
3032		.test = alg_test_skcipher,
3033		.suite = {
3034			.cipher = {
3035				.enc = __VECS(tea_enc_tv_template),
3036				.dec = __VECS(tea_dec_tv_template)
 
 
 
 
 
 
3037			}
3038		}
3039	}, {
3040		.alg = "ecb(tnepres)",
3041		.test = alg_test_skcipher,
3042		.suite = {
3043			.cipher = {
3044				.enc = __VECS(tnepres_enc_tv_template),
3045				.dec = __VECS(tnepres_dec_tv_template)
 
 
 
 
 
 
3046			}
3047		}
3048	}, {
3049		.alg = "ecb(twofish)",
3050		.test = alg_test_skcipher,
3051		.suite = {
3052			.cipher = {
3053				.enc = __VECS(tf_enc_tv_template),
3054				.dec = __VECS(tf_dec_tv_template)
 
 
 
 
 
 
3055			}
3056		}
3057	}, {
3058		.alg = "ecb(xeta)",
3059		.test = alg_test_skcipher,
3060		.suite = {
3061			.cipher = {
3062				.enc = __VECS(xeta_enc_tv_template),
3063				.dec = __VECS(xeta_dec_tv_template)
 
 
 
 
 
 
3064			}
3065		}
3066	}, {
3067		.alg = "ecb(xtea)",
3068		.test = alg_test_skcipher,
3069		.suite = {
3070			.cipher = {
3071				.enc = __VECS(xtea_enc_tv_template),
3072				.dec = __VECS(xtea_dec_tv_template)
 
 
 
 
 
 
3073			}
3074		}
3075	}, {
3076		.alg = "ecdh",
3077		.test = alg_test_kpp,
3078		.fips_allowed = 1,
3079		.suite = {
3080			.kpp = __VECS(ecdh_tv_template)
3081		}
3082	}, {
3083		.alg = "gcm(aes)",
3084		.test = alg_test_aead,
3085		.fips_allowed = 1,
3086		.suite = {
3087			.aead = {
3088				.enc = __VECS(aes_gcm_enc_tv_template),
3089				.dec = __VECS(aes_gcm_dec_tv_template)
 
 
 
 
 
 
3090			}
3091		}
3092	}, {
3093		.alg = "ghash",
3094		.test = alg_test_hash,
3095		.fips_allowed = 1,
3096		.suite = {
3097			.hash = __VECS(ghash_tv_template)
 
 
 
3098		}
3099	}, {
3100		.alg = "hmac(crc32)",
3101		.test = alg_test_hash,
3102		.suite = {
3103			.hash = __VECS(bfin_crc_tv_template)
 
 
 
3104		}
3105	}, {
3106		.alg = "hmac(md5)",
3107		.test = alg_test_hash,
3108		.suite = {
3109			.hash = __VECS(hmac_md5_tv_template)
 
 
 
3110		}
3111	}, {
3112		.alg = "hmac(rmd128)",
3113		.test = alg_test_hash,
3114		.suite = {
3115			.hash = __VECS(hmac_rmd128_tv_template)
 
 
 
3116		}
3117	}, {
3118		.alg = "hmac(rmd160)",
3119		.test = alg_test_hash,
3120		.suite = {
3121			.hash = __VECS(hmac_rmd160_tv_template)
 
 
 
3122		}
3123	}, {
3124		.alg = "hmac(sha1)",
3125		.test = alg_test_hash,
3126		.fips_allowed = 1,
3127		.suite = {
3128			.hash = __VECS(hmac_sha1_tv_template)
 
 
 
3129		}
3130	}, {
3131		.alg = "hmac(sha224)",
3132		.test = alg_test_hash,
3133		.fips_allowed = 1,
3134		.suite = {
3135			.hash = __VECS(hmac_sha224_tv_template)
 
 
 
3136		}
3137	}, {
3138		.alg = "hmac(sha256)",
3139		.test = alg_test_hash,
3140		.fips_allowed = 1,
3141		.suite = {
3142			.hash = __VECS(hmac_sha256_tv_template)
3143		}
3144	}, {
3145		.alg = "hmac(sha3-224)",
3146		.test = alg_test_hash,
3147		.fips_allowed = 1,
3148		.suite = {
3149			.hash = __VECS(hmac_sha3_224_tv_template)
3150		}
3151	}, {
3152		.alg = "hmac(sha3-256)",
3153		.test = alg_test_hash,
3154		.fips_allowed = 1,
3155		.suite = {
3156			.hash = __VECS(hmac_sha3_256_tv_template)
3157		}
3158	}, {
3159		.alg = "hmac(sha3-384)",
3160		.test = alg_test_hash,
3161		.fips_allowed = 1,
3162		.suite = {
3163			.hash = __VECS(hmac_sha3_384_tv_template)
3164		}
3165	}, {
3166		.alg = "hmac(sha3-512)",
3167		.test = alg_test_hash,
3168		.fips_allowed = 1,
3169		.suite = {
3170			.hash = __VECS(hmac_sha3_512_tv_template)
3171		}
3172	}, {
3173		.alg = "hmac(sha384)",
3174		.test = alg_test_hash,
3175		.fips_allowed = 1,
3176		.suite = {
3177			.hash = __VECS(hmac_sha384_tv_template)
 
 
 
3178		}
3179	}, {
3180		.alg = "hmac(sha512)",
3181		.test = alg_test_hash,
3182		.fips_allowed = 1,
3183		.suite = {
3184			.hash = __VECS(hmac_sha512_tv_template)
 
 
 
3185		}
3186	}, {
3187		.alg = "jitterentropy_rng",
3188		.fips_allowed = 1,
3189		.test = alg_test_null,
3190	}, {
3191		.alg = "kw(aes)",
3192		.test = alg_test_skcipher,
3193		.fips_allowed = 1,
3194		.suite = {
3195			.cipher = {
3196				.enc = __VECS(aes_kw_enc_tv_template),
3197				.dec = __VECS(aes_kw_dec_tv_template)
 
 
 
 
 
 
3198			}
3199		}
3200	}, {
3201		.alg = "lrw(aes)",
3202		.test = alg_test_skcipher,
3203		.suite = {
3204			.cipher = {
3205				.enc = __VECS(aes_lrw_enc_tv_template),
3206				.dec = __VECS(aes_lrw_dec_tv_template)
 
 
 
 
 
 
3207			}
3208		}
3209	}, {
3210		.alg = "lrw(camellia)",
3211		.test = alg_test_skcipher,
3212		.suite = {
3213			.cipher = {
3214				.enc = __VECS(camellia_lrw_enc_tv_template),
3215				.dec = __VECS(camellia_lrw_dec_tv_template)
 
 
 
 
 
 
3216			}
3217		}
3218	}, {
3219		.alg = "lrw(cast6)",
3220		.test = alg_test_skcipher,
3221		.suite = {
3222			.cipher = {
3223				.enc = __VECS(cast6_lrw_enc_tv_template),
3224				.dec = __VECS(cast6_lrw_dec_tv_template)
 
 
 
 
 
 
3225			}
3226		}
3227	}, {
3228		.alg = "lrw(serpent)",
3229		.test = alg_test_skcipher,
3230		.suite = {
3231			.cipher = {
3232				.enc = __VECS(serpent_lrw_enc_tv_template),
3233				.dec = __VECS(serpent_lrw_dec_tv_template)
 
 
 
 
 
 
3234			}
3235		}
3236	}, {
3237		.alg = "lrw(twofish)",
3238		.test = alg_test_skcipher,
3239		.suite = {
3240			.cipher = {
3241				.enc = __VECS(tf_lrw_enc_tv_template),
3242				.dec = __VECS(tf_lrw_dec_tv_template)
 
 
 
 
 
 
3243			}
3244		}
3245	}, {
3246		.alg = "lz4",
3247		.test = alg_test_comp,
3248		.fips_allowed = 1,
3249		.suite = {
3250			.comp = {
3251				.comp = __VECS(lz4_comp_tv_template),
3252				.decomp = __VECS(lz4_decomp_tv_template)
 
 
 
 
 
 
3253			}
3254		}
3255	}, {
3256		.alg = "lz4hc",
3257		.test = alg_test_comp,
3258		.fips_allowed = 1,
3259		.suite = {
3260			.comp = {
3261				.comp = __VECS(lz4hc_comp_tv_template),
3262				.decomp = __VECS(lz4hc_decomp_tv_template)
 
 
 
 
 
 
3263			}
3264		}
3265	}, {
3266		.alg = "lzo",
3267		.test = alg_test_comp,
3268		.fips_allowed = 1,
3269		.suite = {
3270			.comp = {
3271				.comp = __VECS(lzo_comp_tv_template),
3272				.decomp = __VECS(lzo_decomp_tv_template)
 
 
 
 
 
 
3273			}
3274		}
3275	}, {
3276		.alg = "md4",
3277		.test = alg_test_hash,
3278		.suite = {
3279			.hash = __VECS(md4_tv_template)
 
 
 
3280		}
3281	}, {
3282		.alg = "md5",
3283		.test = alg_test_hash,
3284		.suite = {
3285			.hash = __VECS(md5_tv_template)
 
 
 
3286		}
3287	}, {
3288		.alg = "michael_mic",
3289		.test = alg_test_hash,
3290		.suite = {
3291			.hash = __VECS(michael_mic_tv_template)
 
 
 
3292		}
3293	}, {
3294		.alg = "ofb(aes)",
3295		.test = alg_test_skcipher,
3296		.fips_allowed = 1,
3297		.suite = {
3298			.cipher = {
3299				.enc = __VECS(aes_ofb_enc_tv_template),
3300				.dec = __VECS(aes_ofb_dec_tv_template)
 
 
 
 
 
 
3301			}
3302		}
3303	}, {
3304		.alg = "pcbc(fcrypt)",
3305		.test = alg_test_skcipher,
3306		.suite = {
3307			.cipher = {
3308				.enc = __VECS(fcrypt_pcbc_enc_tv_template),
3309				.dec = __VECS(fcrypt_pcbc_dec_tv_template)
 
 
 
 
 
 
3310			}
3311		}
3312	}, {
3313		.alg = "pkcs1pad(rsa,sha224)",
3314		.test = alg_test_null,
3315		.fips_allowed = 1,
3316	}, {
3317		.alg = "pkcs1pad(rsa,sha256)",
3318		.test = alg_test_akcipher,
3319		.fips_allowed = 1,
3320		.suite = {
3321			.akcipher = __VECS(pkcs1pad_rsa_tv_template)
3322		}
3323	}, {
3324		.alg = "pkcs1pad(rsa,sha384)",
3325		.test = alg_test_null,
3326		.fips_allowed = 1,
3327	}, {
3328		.alg = "pkcs1pad(rsa,sha512)",
3329		.test = alg_test_null,
3330		.fips_allowed = 1,
3331	}, {
3332		.alg = "poly1305",
3333		.test = alg_test_hash,
3334		.suite = {
3335			.hash = __VECS(poly1305_tv_template)
 
 
 
3336		}
3337	}, {
3338		.alg = "rfc3686(ctr(aes))",
3339		.test = alg_test_skcipher,
3340		.fips_allowed = 1,
3341		.suite = {
3342			.cipher = {
3343				.enc = __VECS(aes_ctr_rfc3686_enc_tv_template),
3344				.dec = __VECS(aes_ctr_rfc3686_dec_tv_template)
 
 
 
 
 
 
3345			}
3346		}
3347	}, {
3348		.alg = "rfc4106(gcm(aes))",
3349		.test = alg_test_aead,
3350		.fips_allowed = 1,
3351		.suite = {
3352			.aead = {
3353				.enc = __VECS(aes_gcm_rfc4106_enc_tv_template),
3354				.dec = __VECS(aes_gcm_rfc4106_dec_tv_template)
 
 
 
 
 
 
3355			}
3356		}
3357	}, {
3358		.alg = "rfc4309(ccm(aes))",
3359		.test = alg_test_aead,
3360		.fips_allowed = 1,
3361		.suite = {
3362			.aead = {
3363				.enc = __VECS(aes_ccm_rfc4309_enc_tv_template),
3364				.dec = __VECS(aes_ccm_rfc4309_dec_tv_template)
 
 
 
 
 
 
3365			}
3366		}
3367	}, {
3368		.alg = "rfc4543(gcm(aes))",
3369		.test = alg_test_aead,
3370		.suite = {
3371			.aead = {
3372				.enc = __VECS(aes_gcm_rfc4543_enc_tv_template),
3373				.dec = __VECS(aes_gcm_rfc4543_dec_tv_template),
 
 
 
 
 
 
3374			}
3375		}
3376	}, {
3377		.alg = "rfc7539(chacha20,poly1305)",
3378		.test = alg_test_aead,
3379		.suite = {
3380			.aead = {
3381				.enc = __VECS(rfc7539_enc_tv_template),
3382				.dec = __VECS(rfc7539_dec_tv_template),
 
 
 
 
 
 
3383			}
3384		}
3385	}, {
3386		.alg = "rfc7539esp(chacha20,poly1305)",
3387		.test = alg_test_aead,
3388		.suite = {
3389			.aead = {
3390				.enc = __VECS(rfc7539esp_enc_tv_template),
3391				.dec = __VECS(rfc7539esp_dec_tv_template),
 
 
 
 
 
 
3392			}
3393		}
3394	}, {
3395		.alg = "rmd128",
3396		.test = alg_test_hash,
3397		.suite = {
3398			.hash = __VECS(rmd128_tv_template)
 
 
 
3399		}
3400	}, {
3401		.alg = "rmd160",
3402		.test = alg_test_hash,
3403		.suite = {
3404			.hash = __VECS(rmd160_tv_template)
 
 
 
3405		}
3406	}, {
3407		.alg = "rmd256",
3408		.test = alg_test_hash,
3409		.suite = {
3410			.hash = __VECS(rmd256_tv_template)
 
 
 
3411		}
3412	}, {
3413		.alg = "rmd320",
3414		.test = alg_test_hash,
3415		.suite = {
3416			.hash = __VECS(rmd320_tv_template)
 
 
 
3417		}
3418	}, {
3419		.alg = "rsa",
3420		.test = alg_test_akcipher,
3421		.fips_allowed = 1,
3422		.suite = {
3423			.akcipher = __VECS(rsa_tv_template)
 
 
 
3424		}
3425	}, {
3426		.alg = "salsa20",
3427		.test = alg_test_skcipher,
3428		.suite = {
3429			.cipher = {
3430				.enc = __VECS(salsa20_stream_enc_tv_template)
 
 
 
3431			}
3432		}
3433	}, {
3434		.alg = "sha1",
3435		.test = alg_test_hash,
3436		.fips_allowed = 1,
3437		.suite = {
3438			.hash = __VECS(sha1_tv_template)
 
 
 
3439		}
3440	}, {
3441		.alg = "sha224",
3442		.test = alg_test_hash,
3443		.fips_allowed = 1,
3444		.suite = {
3445			.hash = __VECS(sha224_tv_template)
 
 
 
3446		}
3447	}, {
3448		.alg = "sha256",
3449		.test = alg_test_hash,
3450		.fips_allowed = 1,
3451		.suite = {
3452			.hash = __VECS(sha256_tv_template)
3453		}
3454	}, {
3455		.alg = "sha3-224",
3456		.test = alg_test_hash,
3457		.fips_allowed = 1,
3458		.suite = {
3459			.hash = __VECS(sha3_224_tv_template)
3460		}
3461	}, {
3462		.alg = "sha3-256",
3463		.test = alg_test_hash,
3464		.fips_allowed = 1,
3465		.suite = {
3466			.hash = __VECS(sha3_256_tv_template)
3467		}
3468	}, {
3469		.alg = "sha3-384",
3470		.test = alg_test_hash,
3471		.fips_allowed = 1,
3472		.suite = {
3473			.hash = __VECS(sha3_384_tv_template)
3474		}
3475	}, {
3476		.alg = "sha3-512",
3477		.test = alg_test_hash,
3478		.fips_allowed = 1,
3479		.suite = {
3480			.hash = __VECS(sha3_512_tv_template)
3481		}
3482	}, {
3483		.alg = "sha384",
3484		.test = alg_test_hash,
3485		.fips_allowed = 1,
3486		.suite = {
3487			.hash = __VECS(sha384_tv_template)
 
 
 
3488		}
3489	}, {
3490		.alg = "sha512",
3491		.test = alg_test_hash,
3492		.fips_allowed = 1,
3493		.suite = {
3494			.hash = __VECS(sha512_tv_template)
3495		}
3496	}, {
3497		.alg = "sm3",
3498		.test = alg_test_hash,
3499		.suite = {
3500			.hash = __VECS(sm3_tv_template)
3501		}
3502	}, {
3503		.alg = "tgr128",
3504		.test = alg_test_hash,
3505		.suite = {
3506			.hash = __VECS(tgr128_tv_template)
 
 
 
3507		}
3508	}, {
3509		.alg = "tgr160",
3510		.test = alg_test_hash,
3511		.suite = {
3512			.hash = __VECS(tgr160_tv_template)
 
 
 
3513		}
3514	}, {
3515		.alg = "tgr192",
3516		.test = alg_test_hash,
3517		.suite = {
3518			.hash = __VECS(tgr192_tv_template)
 
 
 
3519		}
3520	}, {
3521		.alg = "vmac(aes)",
3522		.test = alg_test_hash,
3523		.suite = {
3524			.hash = __VECS(aes_vmac128_tv_template)
 
 
 
3525		}
3526	}, {
3527		.alg = "wp256",
3528		.test = alg_test_hash,
3529		.suite = {
3530			.hash = __VECS(wp256_tv_template)
 
 
 
3531		}
3532	}, {
3533		.alg = "wp384",
3534		.test = alg_test_hash,
3535		.suite = {
3536			.hash = __VECS(wp384_tv_template)
 
 
 
3537		}
3538	}, {
3539		.alg = "wp512",
3540		.test = alg_test_hash,
3541		.suite = {
3542			.hash = __VECS(wp512_tv_template)
 
 
 
3543		}
3544	}, {
3545		.alg = "xcbc(aes)",
3546		.test = alg_test_hash,
3547		.suite = {
3548			.hash = __VECS(aes_xcbc128_tv_template)
 
 
 
3549		}
3550	}, {
3551		.alg = "xts(aes)",
3552		.test = alg_test_skcipher,
3553		.fips_allowed = 1,
3554		.suite = {
3555			.cipher = {
3556				.enc = __VECS(aes_xts_enc_tv_template),
3557				.dec = __VECS(aes_xts_dec_tv_template)
 
 
 
 
 
 
3558			}
3559		}
3560	}, {
3561		.alg = "xts(camellia)",
3562		.test = alg_test_skcipher,
3563		.suite = {
3564			.cipher = {
3565				.enc = __VECS(camellia_xts_enc_tv_template),
3566				.dec = __VECS(camellia_xts_dec_tv_template)
 
 
 
 
 
 
3567			}
3568		}
3569	}, {
3570		.alg = "xts(cast6)",
3571		.test = alg_test_skcipher,
3572		.suite = {
3573			.cipher = {
3574				.enc = __VECS(cast6_xts_enc_tv_template),
3575				.dec = __VECS(cast6_xts_dec_tv_template)
 
 
 
 
 
 
3576			}
3577		}
3578	}, {
3579		.alg = "xts(serpent)",
3580		.test = alg_test_skcipher,
3581		.suite = {
3582			.cipher = {
3583				.enc = __VECS(serpent_xts_enc_tv_template),
3584				.dec = __VECS(serpent_xts_dec_tv_template)
3585			}
3586		}
3587	}, {
3588		.alg = "xts(speck128)",
3589		.test = alg_test_skcipher,
3590		.suite = {
3591			.cipher = {
3592				.enc = __VECS(speck128_xts_enc_tv_template),
3593				.dec = __VECS(speck128_xts_dec_tv_template)
3594			}
3595		}
3596	}, {
3597		.alg = "xts(speck64)",
3598		.test = alg_test_skcipher,
3599		.suite = {
3600			.cipher = {
3601				.enc = __VECS(speck64_xts_enc_tv_template),
3602				.dec = __VECS(speck64_xts_dec_tv_template)
3603			}
3604		}
3605	}, {
3606		.alg = "xts(twofish)",
3607		.test = alg_test_skcipher,
3608		.suite = {
3609			.cipher = {
3610				.enc = __VECS(tf_xts_enc_tv_template),
3611				.dec = __VECS(tf_xts_dec_tv_template)
3612			}
3613		}
3614	}, {
3615		.alg = "zlib-deflate",
3616		.test = alg_test_comp,
3617		.fips_allowed = 1,
3618		.suite = {
3619			.comp = {
3620				.comp = __VECS(zlib_deflate_comp_tv_template),
3621				.decomp = __VECS(zlib_deflate_decomp_tv_template)
3622			}
3623		}
3624	}
3625};
3626
3627static bool alg_test_descs_checked;
3628
3629static void alg_test_descs_check_order(void)
3630{
3631	int i;
3632
3633	/* only check once */
3634	if (alg_test_descs_checked)
3635		return;
3636
3637	alg_test_descs_checked = true;
3638
3639	for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
3640		int diff = strcmp(alg_test_descs[i - 1].alg,
3641				  alg_test_descs[i].alg);
3642
3643		if (WARN_ON(diff > 0)) {
3644			pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
3645				alg_test_descs[i - 1].alg,
3646				alg_test_descs[i].alg);
3647		}
3648
3649		if (WARN_ON(diff == 0)) {
3650			pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
3651				alg_test_descs[i].alg);
3652		}
3653	}
3654}
3655
3656static int alg_find_test(const char *alg)
3657{
3658	int start = 0;
3659	int end = ARRAY_SIZE(alg_test_descs);
3660
3661	while (start < end) {
3662		int i = (start + end) / 2;
3663		int diff = strcmp(alg_test_descs[i].alg, alg);
3664
3665		if (diff > 0) {
3666			end = i;
3667			continue;
3668		}
3669
3670		if (diff < 0) {
3671			start = i + 1;
3672			continue;
3673		}
3674
3675		return i;
3676	}
3677
3678	return -1;
3679}
3680
3681int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
3682{
3683	int i;
3684	int j;
3685	int rc;
3686
3687	if (!fips_enabled && notests) {
3688		printk_once(KERN_INFO "alg: self-tests disabled\n");
3689		return 0;
3690	}
3691
3692	alg_test_descs_check_order();
3693
3694	if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
3695		char nalg[CRYPTO_MAX_ALG_NAME];
3696
3697		if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
3698		    sizeof(nalg))
3699			return -ENAMETOOLONG;
3700
3701		i = alg_find_test(nalg);
3702		if (i < 0)
3703			goto notest;
3704
3705		if (fips_enabled && !alg_test_descs[i].fips_allowed)
3706			goto non_fips_alg;
3707
3708		rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
3709		goto test_done;
3710	}
3711
3712	i = alg_find_test(alg);
3713	j = alg_find_test(driver);
3714	if (i < 0 && j < 0)
3715		goto notest;
3716
3717	if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
3718			     (j >= 0 && !alg_test_descs[j].fips_allowed)))
3719		goto non_fips_alg;
3720
3721	rc = 0;
3722	if (i >= 0)
3723		rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
3724					     type, mask);
3725	if (j >= 0 && j != i)
3726		rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
3727					     type, mask);
3728
3729test_done:
3730	if (fips_enabled && rc)
3731		panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
3732
3733	if (fips_enabled && !rc)
3734		pr_info("alg: self-tests for %s (%s) passed\n", driver, alg);
3735
3736	return rc;
3737
3738notest:
3739	printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
3740	return 0;
3741non_fips_alg:
3742	return -EINVAL;
3743}
3744
3745#endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
3746
3747EXPORT_SYMBOL_GPL(alg_test);