Linux Audio

Check our new training course

Loading...
v4.17
   1/*
   2 * Algorithm testing framework and tests.
   3 *
   4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
   5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
   6 * Copyright (c) 2007 Nokia Siemens Networks
   7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
   8 *
   9 * Updated RFC4106 AES-GCM testing.
  10 *    Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
  11 *             Adrian Hoban <adrian.hoban@intel.com>
  12 *             Gabriele Paoloni <gabriele.paoloni@intel.com>
  13 *             Tadeusz Struk (tadeusz.struk@intel.com)
  14 *    Copyright (c) 2010, Intel Corporation.
  15 *
  16 * This program is free software; you can redistribute it and/or modify it
  17 * under the terms of the GNU General Public License as published by the Free
  18 * Software Foundation; either version 2 of the License, or (at your option)
  19 * any later version.
  20 *
  21 */
  22
  23#include <crypto/aead.h>
  24#include <crypto/hash.h>
  25#include <crypto/skcipher.h>
  26#include <linux/err.h>
  27#include <linux/fips.h>
  28#include <linux/module.h>
  29#include <linux/scatterlist.h>
  30#include <linux/slab.h>
  31#include <linux/string.h>
  32#include <crypto/rng.h>
  33#include <crypto/drbg.h>
  34#include <crypto/akcipher.h>
  35#include <crypto/kpp.h>
  36#include <crypto/acompress.h>
  37
  38#include "internal.h"
  39
  40static bool notests;
  41module_param(notests, bool, 0644);
  42MODULE_PARM_DESC(notests, "disable crypto self-tests");
  43
  44#ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
  45
  46/* a perfect nop */
  47int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
  48{
  49	return 0;
  50}
  51
  52#else
  53
  54#include "testmgr.h"
  55
  56/*
  57 * Need slab memory for testing (size in number of pages).
  58 */
  59#define XBUFSIZE	8
  60
  61/*
  62 * Indexes into the xbuf to simulate cross-page access.
  63 */
  64#define IDX1		32
  65#define IDX2		32400
  66#define IDX3		1511
  67#define IDX4		8193
  68#define IDX5		22222
  69#define IDX6		17101
  70#define IDX7		27333
  71#define IDX8		3000
  72
  73/*
  74* Used by test_cipher()
  75*/
  76#define ENCRYPT 1
  77#define DECRYPT 0
  78
 
 
 
 
 
  79struct aead_test_suite {
  80	struct {
  81		const struct aead_testvec *vecs;
  82		unsigned int count;
  83	} enc, dec;
  84};
  85
  86struct cipher_test_suite {
  87	struct {
  88		const struct cipher_testvec *vecs;
  89		unsigned int count;
  90	} enc, dec;
  91};
  92
  93struct comp_test_suite {
  94	struct {
  95		const struct comp_testvec *vecs;
  96		unsigned int count;
  97	} comp, decomp;
  98};
  99
 100struct hash_test_suite {
 101	const struct hash_testvec *vecs;
 102	unsigned int count;
 103};
 104
 105struct cprng_test_suite {
 106	const struct cprng_testvec *vecs;
 107	unsigned int count;
 108};
 109
 110struct drbg_test_suite {
 111	const struct drbg_testvec *vecs;
 112	unsigned int count;
 113};
 114
 115struct akcipher_test_suite {
 116	const struct akcipher_testvec *vecs;
 117	unsigned int count;
 118};
 119
 120struct kpp_test_suite {
 121	const struct kpp_testvec *vecs;
 122	unsigned int count;
 123};
 124
 125struct alg_test_desc {
 126	const char *alg;
 127	int (*test)(const struct alg_test_desc *desc, const char *driver,
 128		    u32 type, u32 mask);
 129	int fips_allowed;	/* set if alg is allowed in fips mode */
 130
 131	union {
 132		struct aead_test_suite aead;
 133		struct cipher_test_suite cipher;
 134		struct comp_test_suite comp;
 
 135		struct hash_test_suite hash;
 136		struct cprng_test_suite cprng;
 137		struct drbg_test_suite drbg;
 138		struct akcipher_test_suite akcipher;
 139		struct kpp_test_suite kpp;
 140	} suite;
 141};
 142
 143static const unsigned int IDX[8] = {
 144	IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
 145
 146static void hexdump(unsigned char *buf, unsigned int len)
 147{
 148	print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
 149			16, 1,
 150			buf, len, false);
 151}
 152
 
 
 
 
 
 
 
 
 
 
 
 153static int testmgr_alloc_buf(char *buf[XBUFSIZE])
 154{
 155	int i;
 156
 157	for (i = 0; i < XBUFSIZE; i++) {
 158		buf[i] = (void *)__get_free_page(GFP_KERNEL);
 159		if (!buf[i])
 160			goto err_free_buf;
 161	}
 162
 163	return 0;
 164
 165err_free_buf:
 166	while (i-- > 0)
 167		free_page((unsigned long)buf[i]);
 168
 169	return -ENOMEM;
 170}
 171
 172static void testmgr_free_buf(char *buf[XBUFSIZE])
 173{
 174	int i;
 175
 176	for (i = 0; i < XBUFSIZE; i++)
 177		free_page((unsigned long)buf[i]);
 178}
 179
 180static int ahash_guard_result(char *result, char c, int size)
 181{
 182	int i;
 183
 184	for (i = 0; i < size; i++) {
 185		if (result[i] != c)
 186			return -EINVAL;
 187	}
 188
 189	return 0;
 190}
 191
 192static int ahash_partial_update(struct ahash_request **preq,
 193	struct crypto_ahash *tfm, const struct hash_testvec *template,
 194	void *hash_buff, int k, int temp, struct scatterlist *sg,
 195	const char *algo, char *result, struct crypto_wait *wait)
 196{
 197	char *state;
 198	struct ahash_request *req;
 199	int statesize, ret = -EINVAL;
 200	static const unsigned char guard[] = { 0x00, 0xba, 0xad, 0x00 };
 201	int digestsize = crypto_ahash_digestsize(tfm);
 202
 203	req = *preq;
 204	statesize = crypto_ahash_statesize(
 205			crypto_ahash_reqtfm(req));
 206	state = kmalloc(statesize + sizeof(guard), GFP_KERNEL);
 207	if (!state) {
 208		pr_err("alg: hash: Failed to alloc state for %s\n", algo);
 209		goto out_nostate;
 210	}
 211	memcpy(state + statesize, guard, sizeof(guard));
 212	memset(result, 1, digestsize);
 213	ret = crypto_ahash_export(req, state);
 214	WARN_ON(memcmp(state + statesize, guard, sizeof(guard)));
 215	if (ret) {
 216		pr_err("alg: hash: Failed to export() for %s\n", algo);
 217		goto out;
 218	}
 219	ret = ahash_guard_result(result, 1, digestsize);
 220	if (ret) {
 221		pr_err("alg: hash: Failed, export used req->result for %s\n",
 222		       algo);
 223		goto out;
 224	}
 225	ahash_request_free(req);
 226	req = ahash_request_alloc(tfm, GFP_KERNEL);
 227	if (!req) {
 228		pr_err("alg: hash: Failed to alloc request for %s\n", algo);
 229		goto out_noreq;
 230	}
 231	ahash_request_set_callback(req,
 232		CRYPTO_TFM_REQ_MAY_BACKLOG,
 233		crypto_req_done, wait);
 234
 235	memcpy(hash_buff, template->plaintext + temp,
 236		template->tap[k]);
 237	sg_init_one(&sg[0], hash_buff, template->tap[k]);
 238	ahash_request_set_crypt(req, sg, result, template->tap[k]);
 239	ret = crypto_ahash_import(req, state);
 240	if (ret) {
 241		pr_err("alg: hash: Failed to import() for %s\n", algo);
 242		goto out;
 243	}
 244	ret = ahash_guard_result(result, 1, digestsize);
 245	if (ret) {
 246		pr_err("alg: hash: Failed, import used req->result for %s\n",
 247		       algo);
 248		goto out;
 249	}
 250	ret = crypto_wait_req(crypto_ahash_update(req), wait);
 251	if (ret)
 252		goto out;
 253	*preq = req;
 254	ret = 0;
 255	goto out_noreq;
 256out:
 257	ahash_request_free(req);
 258out_noreq:
 259	kfree(state);
 260out_nostate:
 261	return ret;
 262}
 263
 264static int __test_hash(struct crypto_ahash *tfm,
 265		       const struct hash_testvec *template, unsigned int tcount,
 266		       bool use_digest, const int align_offset)
 267{
 268	const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
 269	size_t digest_size = crypto_ahash_digestsize(tfm);
 270	unsigned int i, j, k, temp;
 271	struct scatterlist sg[8];
 272	char *result;
 273	char *key;
 274	struct ahash_request *req;
 275	struct crypto_wait wait;
 276	void *hash_buff;
 277	char *xbuf[XBUFSIZE];
 278	int ret = -ENOMEM;
 279
 280	result = kmalloc(digest_size, GFP_KERNEL);
 281	if (!result)
 282		return ret;
 283	key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
 284	if (!key)
 285		goto out_nobuf;
 286	if (testmgr_alloc_buf(xbuf))
 287		goto out_nobuf;
 288
 289	crypto_init_wait(&wait);
 290
 291	req = ahash_request_alloc(tfm, GFP_KERNEL);
 292	if (!req) {
 293		printk(KERN_ERR "alg: hash: Failed to allocate request for "
 294		       "%s\n", algo);
 295		goto out_noreq;
 296	}
 297	ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
 298				   crypto_req_done, &wait);
 299
 300	j = 0;
 301	for (i = 0; i < tcount; i++) {
 302		if (template[i].np)
 303			continue;
 304
 305		ret = -EINVAL;
 306		if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE))
 307			goto out;
 308
 309		j++;
 310		memset(result, 0, digest_size);
 311
 312		hash_buff = xbuf[0];
 313		hash_buff += align_offset;
 314
 315		memcpy(hash_buff, template[i].plaintext, template[i].psize);
 316		sg_init_one(&sg[0], hash_buff, template[i].psize);
 317
 318		if (template[i].ksize) {
 319			crypto_ahash_clear_flags(tfm, ~0);
 320			if (template[i].ksize > MAX_KEYLEN) {
 321				pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
 322				       j, algo, template[i].ksize, MAX_KEYLEN);
 323				ret = -EINVAL;
 324				goto out;
 325			}
 326			memcpy(key, template[i].key, template[i].ksize);
 327			ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
 328			if (ret) {
 329				printk(KERN_ERR "alg: hash: setkey failed on "
 330				       "test %d for %s: ret=%d\n", j, algo,
 331				       -ret);
 332				goto out;
 333			}
 334		}
 335
 336		ahash_request_set_crypt(req, sg, result, template[i].psize);
 337		if (use_digest) {
 338			ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
 
 339			if (ret) {
 340				pr_err("alg: hash: digest failed on test %d "
 341				       "for %s: ret=%d\n", j, algo, -ret);
 342				goto out;
 343			}
 344		} else {
 345			memset(result, 1, digest_size);
 346			ret = crypto_wait_req(crypto_ahash_init(req), &wait);
 347			if (ret) {
 348				pr_err("alg: hash: init failed on test %d "
 349				       "for %s: ret=%d\n", j, algo, -ret);
 350				goto out;
 351			}
 352			ret = ahash_guard_result(result, 1, digest_size);
 
 353			if (ret) {
 354				pr_err("alg: hash: init failed on test %d "
 355				       "for %s: used req->result\n", j, algo);
 356				goto out;
 357			}
 358			ret = crypto_wait_req(crypto_ahash_update(req), &wait);
 359			if (ret) {
 360				pr_err("alg: hash: update failed on test %d "
 361				       "for %s: ret=%d\n", j, algo, -ret);
 362				goto out;
 363			}
 364			ret = ahash_guard_result(result, 1, digest_size);
 365			if (ret) {
 366				pr_err("alg: hash: update failed on test %d "
 367				       "for %s: used req->result\n", j, algo);
 368				goto out;
 369			}
 370			ret = crypto_wait_req(crypto_ahash_final(req), &wait);
 371			if (ret) {
 372				pr_err("alg: hash: final failed on test %d "
 373				       "for %s: ret=%d\n", j, algo, -ret);
 374				goto out;
 375			}
 376		}
 377
 378		if (memcmp(result, template[i].digest,
 379			   crypto_ahash_digestsize(tfm))) {
 380			printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
 381			       j, algo);
 382			hexdump(result, crypto_ahash_digestsize(tfm));
 383			ret = -EINVAL;
 384			goto out;
 385		}
 386	}
 387
 388	j = 0;
 389	for (i = 0; i < tcount; i++) {
 390		/* alignment tests are only done with continuous buffers */
 391		if (align_offset != 0)
 392			break;
 393
 394		if (!template[i].np)
 395			continue;
 396
 397		j++;
 398		memset(result, 0, digest_size);
 399
 400		temp = 0;
 401		sg_init_table(sg, template[i].np);
 402		ret = -EINVAL;
 403		for (k = 0; k < template[i].np; k++) {
 404			if (WARN_ON(offset_in_page(IDX[k]) +
 405				    template[i].tap[k] > PAGE_SIZE))
 406				goto out;
 407			sg_set_buf(&sg[k],
 408				   memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
 409					  offset_in_page(IDX[k]),
 410					  template[i].plaintext + temp,
 411					  template[i].tap[k]),
 412				   template[i].tap[k]);
 413			temp += template[i].tap[k];
 414		}
 415
 416		if (template[i].ksize) {
 417			if (template[i].ksize > MAX_KEYLEN) {
 418				pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
 419				       j, algo, template[i].ksize, MAX_KEYLEN);
 420				ret = -EINVAL;
 421				goto out;
 
 
 
 
 
 
 422			}
 423			crypto_ahash_clear_flags(tfm, ~0);
 424			memcpy(key, template[i].key, template[i].ksize);
 425			ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
 426
 427			if (ret) {
 428				printk(KERN_ERR "alg: hash: setkey "
 429				       "failed on chunking test %d "
 430				       "for %s: ret=%d\n", j, algo, -ret);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 431				goto out;
 432			}
 433		}
 434
 435		ahash_request_set_crypt(req, sg, result, template[i].psize);
 436		ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
 437		if (ret) {
 438			pr_err("alg: hash: digest failed on chunking test %d for %s: ret=%d\n",
 439			       j, algo, -ret);
 440			goto out;
 441		}
 442
 443		if (memcmp(result, template[i].digest,
 444			   crypto_ahash_digestsize(tfm))) {
 445			printk(KERN_ERR "alg: hash: Chunking test %d "
 446			       "failed for %s\n", j, algo);
 447			hexdump(result, crypto_ahash_digestsize(tfm));
 448			ret = -EINVAL;
 449			goto out;
 450		}
 451	}
 452
 453	/* partial update exercise */
 454	j = 0;
 455	for (i = 0; i < tcount; i++) {
 456		/* alignment tests are only done with continuous buffers */
 457		if (align_offset != 0)
 458			break;
 459
 460		if (template[i].np < 2)
 461			continue;
 462
 463		j++;
 464		memset(result, 0, digest_size);
 465
 466		ret = -EINVAL;
 467		hash_buff = xbuf[0];
 468		memcpy(hash_buff, template[i].plaintext,
 469			template[i].tap[0]);
 470		sg_init_one(&sg[0], hash_buff, template[i].tap[0]);
 471
 472		if (template[i].ksize) {
 473			crypto_ahash_clear_flags(tfm, ~0);
 474			if (template[i].ksize > MAX_KEYLEN) {
 475				pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
 476					j, algo, template[i].ksize, MAX_KEYLEN);
 477				ret = -EINVAL;
 478				goto out;
 479			}
 480			memcpy(key, template[i].key, template[i].ksize);
 481			ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
 482			if (ret) {
 483				pr_err("alg: hash: setkey failed on test %d for %s: ret=%d\n",
 484					j, algo, -ret);
 485				goto out;
 486			}
 487		}
 488
 489		ahash_request_set_crypt(req, sg, result, template[i].tap[0]);
 490		ret = crypto_wait_req(crypto_ahash_init(req), &wait);
 491		if (ret) {
 492			pr_err("alg: hash: init failed on test %d for %s: ret=%d\n",
 493				j, algo, -ret);
 494			goto out;
 495		}
 496		ret = crypto_wait_req(crypto_ahash_update(req), &wait);
 497		if (ret) {
 498			pr_err("alg: hash: update failed on test %d for %s: ret=%d\n",
 499				j, algo, -ret);
 500			goto out;
 501		}
 502
 503		temp = template[i].tap[0];
 504		for (k = 1; k < template[i].np; k++) {
 505			ret = ahash_partial_update(&req, tfm, &template[i],
 506				hash_buff, k, temp, &sg[0], algo, result,
 507				&wait);
 508			if (ret) {
 509				pr_err("alg: hash: partial update failed on test %d for %s: ret=%d\n",
 510					j, algo, -ret);
 511				goto out_noreq;
 512			}
 513			temp += template[i].tap[k];
 514		}
 515		ret = crypto_wait_req(crypto_ahash_final(req), &wait);
 516		if (ret) {
 517			pr_err("alg: hash: final failed on test %d for %s: ret=%d\n",
 518				j, algo, -ret);
 519			goto out;
 520		}
 521		if (memcmp(result, template[i].digest,
 522			   crypto_ahash_digestsize(tfm))) {
 523			pr_err("alg: hash: Partial Test %d failed for %s\n",
 524			       j, algo);
 525			hexdump(result, crypto_ahash_digestsize(tfm));
 526			ret = -EINVAL;
 527			goto out;
 528		}
 529	}
 530
 531	ret = 0;
 532
 533out:
 534	ahash_request_free(req);
 535out_noreq:
 536	testmgr_free_buf(xbuf);
 537out_nobuf:
 538	kfree(key);
 539	kfree(result);
 540	return ret;
 541}
 542
 543static int test_hash(struct crypto_ahash *tfm,
 544		     const struct hash_testvec *template,
 545		     unsigned int tcount, bool use_digest)
 546{
 547	unsigned int alignmask;
 548	int ret;
 549
 550	ret = __test_hash(tfm, template, tcount, use_digest, 0);
 551	if (ret)
 552		return ret;
 553
 554	/* test unaligned buffers, check with one byte offset */
 555	ret = __test_hash(tfm, template, tcount, use_digest, 1);
 556	if (ret)
 557		return ret;
 558
 559	alignmask = crypto_tfm_alg_alignmask(&tfm->base);
 560	if (alignmask) {
 561		/* Check if alignment mask for tfm is correctly set. */
 562		ret = __test_hash(tfm, template, tcount, use_digest,
 563				  alignmask + 1);
 564		if (ret)
 565			return ret;
 566	}
 567
 568	return 0;
 569}
 570
 571static int __test_aead(struct crypto_aead *tfm, int enc,
 572		       const struct aead_testvec *template, unsigned int tcount,
 573		       const bool diff_dst, const int align_offset)
 574{
 575	const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
 576	unsigned int i, j, k, n, temp;
 577	int ret = -ENOMEM;
 578	char *q;
 579	char *key;
 580	struct aead_request *req;
 581	struct scatterlist *sg;
 
 582	struct scatterlist *sgout;
 583	const char *e, *d;
 584	struct crypto_wait wait;
 585	unsigned int authsize, iv_len;
 586	void *input;
 587	void *output;
 588	void *assoc;
 589	char *iv;
 590	char *xbuf[XBUFSIZE];
 591	char *xoutbuf[XBUFSIZE];
 592	char *axbuf[XBUFSIZE];
 593
 594	iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
 595	if (!iv)
 596		return ret;
 597	key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
 598	if (!key)
 599		goto out_noxbuf;
 600	if (testmgr_alloc_buf(xbuf))
 601		goto out_noxbuf;
 602	if (testmgr_alloc_buf(axbuf))
 603		goto out_noaxbuf;
 
 604	if (diff_dst && testmgr_alloc_buf(xoutbuf))
 605		goto out_nooutbuf;
 606
 607	/* avoid "the frame size is larger than 1024 bytes" compiler warning */
 608	sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 4 : 2), GFP_KERNEL);
 609	if (!sg)
 610		goto out_nosg;
 611	sgout = &sg[16];
 
 612
 613	if (diff_dst)
 614		d = "-ddst";
 615	else
 616		d = "";
 617
 618	if (enc == ENCRYPT)
 619		e = "encryption";
 620	else
 621		e = "decryption";
 622
 623	crypto_init_wait(&wait);
 624
 625	req = aead_request_alloc(tfm, GFP_KERNEL);
 626	if (!req) {
 627		pr_err("alg: aead%s: Failed to allocate request for %s\n",
 628		       d, algo);
 629		goto out;
 630	}
 631
 632	aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
 633				  crypto_req_done, &wait);
 634
 635	iv_len = crypto_aead_ivsize(tfm);
 636
 637	for (i = 0, j = 0; i < tcount; i++) {
 638		if (template[i].np)
 639			continue;
 640
 641		j++;
 
 
 
 
 
 642
 643		/* some templates have no input data but they will
 644		 * touch input
 645		 */
 646		input = xbuf[0];
 647		input += align_offset;
 648		assoc = axbuf[0];
 649
 650		ret = -EINVAL;
 651		if (WARN_ON(align_offset + template[i].ilen >
 652			    PAGE_SIZE || template[i].alen > PAGE_SIZE))
 653			goto out;
 654
 655		memcpy(input, template[i].input, template[i].ilen);
 656		memcpy(assoc, template[i].assoc, template[i].alen);
 657		if (template[i].iv)
 658			memcpy(iv, template[i].iv, iv_len);
 659		else
 660			memset(iv, 0, iv_len);
 661
 662		crypto_aead_clear_flags(tfm, ~0);
 663		if (template[i].wk)
 664			crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
 665
 666		if (template[i].klen > MAX_KEYLEN) {
 667			pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
 668			       d, j, algo, template[i].klen,
 669			       MAX_KEYLEN);
 670			ret = -EINVAL;
 671			goto out;
 672		}
 673		memcpy(key, template[i].key, template[i].klen);
 674
 675		ret = crypto_aead_setkey(tfm, key, template[i].klen);
 676		if (template[i].fail == !ret) {
 677			pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
 678			       d, j, algo, crypto_aead_get_flags(tfm));
 679			goto out;
 680		} else if (ret)
 681			continue;
 682
 683		authsize = abs(template[i].rlen - template[i].ilen);
 684		ret = crypto_aead_setauthsize(tfm, authsize);
 685		if (ret) {
 686			pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
 687			       d, authsize, j, algo);
 688			goto out;
 689		}
 
 
 
 
 
 
 
 
 690
 691		k = !!template[i].alen;
 692		sg_init_table(sg, k + 1);
 693		sg_set_buf(&sg[0], assoc, template[i].alen);
 694		sg_set_buf(&sg[k], input,
 695			   template[i].ilen + (enc ? authsize : 0));
 696		output = input;
 697
 698		if (diff_dst) {
 699			sg_init_table(sgout, k + 1);
 700			sg_set_buf(&sgout[0], assoc, template[i].alen);
 701
 702			output = xoutbuf[0];
 703			output += align_offset;
 704			sg_set_buf(&sgout[k], output,
 705				   template[i].rlen + (enc ? 0 : authsize));
 706		}
 707
 708		aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
 709				       template[i].ilen, iv);
 710
 711		aead_request_set_ad(req, template[i].alen);
 712
 713		ret = crypto_wait_req(enc ? crypto_aead_encrypt(req)
 714				      : crypto_aead_decrypt(req), &wait);
 715
 716		switch (ret) {
 717		case 0:
 718			if (template[i].novrfy) {
 719				/* verification was supposed to fail */
 720				pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
 721				       d, e, j, algo);
 722				/* so really, we got a bad message */
 723				ret = -EBADMSG;
 724				goto out;
 725			}
 726			break;
 727		case -EBADMSG:
 728			if (template[i].novrfy)
 729				/* verification failure was expected */
 730				continue;
 731			/* fall through */
 732		default:
 733			pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
 734			       d, e, j, algo, -ret);
 735			goto out;
 736		}
 737
 738		q = output;
 739		if (memcmp(q, template[i].result, template[i].rlen)) {
 740			pr_err("alg: aead%s: Test %d failed on %s for %s\n",
 741			       d, j, e, algo);
 742			hexdump(q, template[i].rlen);
 743			ret = -EINVAL;
 744			goto out;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 745		}
 746	}
 747
 748	for (i = 0, j = 0; i < tcount; i++) {
 749		/* alignment tests are only done with continuous buffers */
 750		if (align_offset != 0)
 751			break;
 752
 753		if (!template[i].np)
 754			continue;
 755
 756		j++;
 757
 758		if (template[i].iv)
 759			memcpy(iv, template[i].iv, iv_len);
 760		else
 761			memset(iv, 0, MAX_IVLEN);
 762
 763		crypto_aead_clear_flags(tfm, ~0);
 764		if (template[i].wk)
 765			crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
 766		if (template[i].klen > MAX_KEYLEN) {
 767			pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
 768			       d, j, algo, template[i].klen, MAX_KEYLEN);
 769			ret = -EINVAL;
 770			goto out;
 771		}
 772		memcpy(key, template[i].key, template[i].klen);
 773
 774		ret = crypto_aead_setkey(tfm, key, template[i].klen);
 775		if (template[i].fail == !ret) {
 776			pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
 777			       d, j, algo, crypto_aead_get_flags(tfm));
 778			goto out;
 779		} else if (ret)
 780			continue;
 781
 782		authsize = abs(template[i].rlen - template[i].ilen);
 
 
 
 
 
 
 
 
 
 
 
 
 783
 784		ret = -EINVAL;
 785		sg_init_table(sg, template[i].anp + template[i].np);
 786		if (diff_dst)
 787			sg_init_table(sgout, template[i].anp + template[i].np);
 788
 789		ret = -EINVAL;
 790		for (k = 0, temp = 0; k < template[i].anp; k++) {
 791			if (WARN_ON(offset_in_page(IDX[k]) +
 792				    template[i].atap[k] > PAGE_SIZE))
 793				goto out;
 794			sg_set_buf(&sg[k],
 795				   memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
 796					  offset_in_page(IDX[k]),
 797					  template[i].assoc + temp,
 798					  template[i].atap[k]),
 799				   template[i].atap[k]);
 800			if (diff_dst)
 801				sg_set_buf(&sgout[k],
 802					   axbuf[IDX[k] >> PAGE_SHIFT] +
 803					   offset_in_page(IDX[k]),
 804					   template[i].atap[k]);
 805			temp += template[i].atap[k];
 806		}
 807
 808		for (k = 0, temp = 0; k < template[i].np; k++) {
 809			if (WARN_ON(offset_in_page(IDX[k]) +
 810				    template[i].tap[k] > PAGE_SIZE))
 811				goto out;
 812
 813			q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
 814			memcpy(q, template[i].input + temp, template[i].tap[k]);
 815			sg_set_buf(&sg[template[i].anp + k],
 816				   q, template[i].tap[k]);
 817
 818			if (diff_dst) {
 819				q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
 820				    offset_in_page(IDX[k]);
 821
 822				memset(q, 0, template[i].tap[k]);
 
 823
 824				sg_set_buf(&sgout[template[i].anp + k],
 825					   q, template[i].tap[k]);
 826			}
 827
 828			n = template[i].tap[k];
 829			if (k == template[i].np - 1 && enc)
 830				n += authsize;
 831			if (offset_in_page(q) + n < PAGE_SIZE)
 832				q[n] = 0;
 833
 834			temp += template[i].tap[k];
 835		}
 836
 837		ret = crypto_aead_setauthsize(tfm, authsize);
 838		if (ret) {
 839			pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
 840			       d, authsize, j, algo);
 841			goto out;
 842		}
 843
 844		if (enc) {
 845			if (WARN_ON(sg[template[i].anp + k - 1].offset +
 846				    sg[template[i].anp + k - 1].length +
 847				    authsize > PAGE_SIZE)) {
 848				ret = -EINVAL;
 849				goto out;
 
 850			}
 851
 852			if (diff_dst)
 853				sgout[template[i].anp + k - 1].length +=
 854					authsize;
 855			sg[template[i].anp + k - 1].length += authsize;
 856		}
 857
 858		aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
 859				       template[i].ilen,
 860				       iv);
 861
 862		aead_request_set_ad(req, template[i].alen);
 863
 864		ret = crypto_wait_req(enc ? crypto_aead_encrypt(req)
 865				      : crypto_aead_decrypt(req), &wait);
 866
 867		switch (ret) {
 868		case 0:
 869			if (template[i].novrfy) {
 870				/* verification was supposed to fail */
 871				pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
 872				       d, e, j, algo);
 873				/* so really, we got a bad message */
 874				ret = -EBADMSG;
 875				goto out;
 876			}
 877			break;
 878		case -EBADMSG:
 879			if (template[i].novrfy)
 880				/* verification failure was expected */
 881				continue;
 882			/* fall through */
 883		default:
 884			pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
 885			       d, e, j, algo, -ret);
 886			goto out;
 887		}
 888
 889		ret = -EINVAL;
 890		for (k = 0, temp = 0; k < template[i].np; k++) {
 891			if (diff_dst)
 892				q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
 893				    offset_in_page(IDX[k]);
 894			else
 895				q = xbuf[IDX[k] >> PAGE_SHIFT] +
 896				    offset_in_page(IDX[k]);
 897
 898			n = template[i].tap[k];
 899			if (k == template[i].np - 1)
 900				n += enc ? authsize : -authsize;
 901
 902			if (memcmp(q, template[i].result + temp, n)) {
 903				pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
 904				       d, j, e, k, algo);
 905				hexdump(q, n);
 906				goto out;
 907			}
 908
 909			q += n;
 910			if (k == template[i].np - 1 && !enc) {
 911				if (!diff_dst &&
 912					memcmp(q, template[i].input +
 913					      temp + n, authsize))
 914					n = authsize;
 915				else
 916					n = 0;
 917			} else {
 918				for (n = 0; offset_in_page(q + n) && q[n]; n++)
 919					;
 
 
 920			}
 921			if (n) {
 922				pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
 923				       d, j, e, k, algo, n);
 924				hexdump(q, n);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 925				goto out;
 926			}
 927
 928			temp += template[i].tap[k];
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 929		}
 930	}
 931
 932	ret = 0;
 933
 934out:
 935	aead_request_free(req);
 936	kfree(sg);
 937out_nosg:
 938	if (diff_dst)
 939		testmgr_free_buf(xoutbuf);
 940out_nooutbuf:
 941	testmgr_free_buf(axbuf);
 942out_noaxbuf:
 943	testmgr_free_buf(xbuf);
 944out_noxbuf:
 945	kfree(key);
 946	kfree(iv);
 947	return ret;
 948}
 949
 950static int test_aead(struct crypto_aead *tfm, int enc,
 951		     const struct aead_testvec *template, unsigned int tcount)
 952{
 953	unsigned int alignmask;
 954	int ret;
 955
 956	/* test 'dst == src' case */
 957	ret = __test_aead(tfm, enc, template, tcount, false, 0);
 958	if (ret)
 959		return ret;
 960
 961	/* test 'dst != src' case */
 962	ret = __test_aead(tfm, enc, template, tcount, true, 0);
 963	if (ret)
 964		return ret;
 965
 966	/* test unaligned buffers, check with one byte offset */
 967	ret = __test_aead(tfm, enc, template, tcount, true, 1);
 968	if (ret)
 969		return ret;
 970
 971	alignmask = crypto_tfm_alg_alignmask(&tfm->base);
 972	if (alignmask) {
 973		/* Check if alignment mask for tfm is correctly set. */
 974		ret = __test_aead(tfm, enc, template, tcount, true,
 975				  alignmask + 1);
 976		if (ret)
 977			return ret;
 978	}
 979
 980	return 0;
 981}
 982
 983static int test_cipher(struct crypto_cipher *tfm, int enc,
 984		       const struct cipher_testvec *template,
 985		       unsigned int tcount)
 986{
 987	const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
 988	unsigned int i, j, k;
 989	char *q;
 990	const char *e;
 991	void *data;
 992	char *xbuf[XBUFSIZE];
 993	int ret = -ENOMEM;
 994
 995	if (testmgr_alloc_buf(xbuf))
 996		goto out_nobuf;
 997
 998	if (enc == ENCRYPT)
 999	        e = "encryption";
1000	else
1001		e = "decryption";
1002
1003	j = 0;
1004	for (i = 0; i < tcount; i++) {
1005		if (template[i].np)
1006			continue;
1007
1008		if (fips_enabled && template[i].fips_skip)
1009			continue;
1010
1011		j++;
1012
1013		ret = -EINVAL;
1014		if (WARN_ON(template[i].ilen > PAGE_SIZE))
1015			goto out;
1016
1017		data = xbuf[0];
1018		memcpy(data, template[i].input, template[i].ilen);
1019
1020		crypto_cipher_clear_flags(tfm, ~0);
1021		if (template[i].wk)
1022			crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
1023
1024		ret = crypto_cipher_setkey(tfm, template[i].key,
1025					   template[i].klen);
1026		if (template[i].fail == !ret) {
1027			printk(KERN_ERR "alg: cipher: setkey failed "
1028			       "on test %d for %s: flags=%x\n", j,
1029			       algo, crypto_cipher_get_flags(tfm));
1030			goto out;
1031		} else if (ret)
1032			continue;
1033
1034		for (k = 0; k < template[i].ilen;
1035		     k += crypto_cipher_blocksize(tfm)) {
1036			if (enc)
1037				crypto_cipher_encrypt_one(tfm, data + k,
1038							  data + k);
1039			else
1040				crypto_cipher_decrypt_one(tfm, data + k,
1041							  data + k);
1042		}
1043
1044		q = data;
1045		if (memcmp(q, template[i].result, template[i].rlen)) {
1046			printk(KERN_ERR "alg: cipher: Test %d failed "
1047			       "on %s for %s\n", j, e, algo);
1048			hexdump(q, template[i].rlen);
1049			ret = -EINVAL;
1050			goto out;
1051		}
1052	}
1053
1054	ret = 0;
1055
1056out:
1057	testmgr_free_buf(xbuf);
1058out_nobuf:
1059	return ret;
1060}
1061
1062static int __test_skcipher(struct crypto_skcipher *tfm, int enc,
1063			   const struct cipher_testvec *template,
1064			   unsigned int tcount,
1065			   const bool diff_dst, const int align_offset)
1066{
1067	const char *algo =
1068		crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
1069	unsigned int i, j, k, n, temp;
1070	char *q;
1071	struct skcipher_request *req;
1072	struct scatterlist sg[8];
1073	struct scatterlist sgout[8];
1074	const char *e, *d;
1075	struct crypto_wait wait;
1076	void *data;
1077	char iv[MAX_IVLEN];
1078	char *xbuf[XBUFSIZE];
1079	char *xoutbuf[XBUFSIZE];
1080	int ret = -ENOMEM;
1081	unsigned int ivsize = crypto_skcipher_ivsize(tfm);
1082
1083	if (testmgr_alloc_buf(xbuf))
1084		goto out_nobuf;
1085
1086	if (diff_dst && testmgr_alloc_buf(xoutbuf))
1087		goto out_nooutbuf;
1088
1089	if (diff_dst)
1090		d = "-ddst";
1091	else
1092		d = "";
1093
1094	if (enc == ENCRYPT)
1095	        e = "encryption";
1096	else
1097		e = "decryption";
1098
1099	crypto_init_wait(&wait);
1100
1101	req = skcipher_request_alloc(tfm, GFP_KERNEL);
1102	if (!req) {
1103		pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
1104		       d, algo);
1105		goto out;
1106	}
1107
1108	skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1109				      crypto_req_done, &wait);
1110
1111	j = 0;
1112	for (i = 0; i < tcount; i++) {
1113		if (template[i].np && !template[i].also_non_np)
1114			continue;
1115
1116		if (fips_enabled && template[i].fips_skip)
1117			continue;
1118
1119		if (template[i].iv)
1120			memcpy(iv, template[i].iv, ivsize);
1121		else
1122			memset(iv, 0, MAX_IVLEN);
1123
1124		j++;
1125		ret = -EINVAL;
1126		if (WARN_ON(align_offset + template[i].ilen > PAGE_SIZE))
1127			goto out;
1128
1129		data = xbuf[0];
1130		data += align_offset;
1131		memcpy(data, template[i].input, template[i].ilen);
1132
1133		crypto_skcipher_clear_flags(tfm, ~0);
1134		if (template[i].wk)
1135			crypto_skcipher_set_flags(tfm,
1136						  CRYPTO_TFM_REQ_WEAK_KEY);
1137
1138		ret = crypto_skcipher_setkey(tfm, template[i].key,
1139					     template[i].klen);
1140		if (template[i].fail == !ret) {
1141			pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
1142			       d, j, algo, crypto_skcipher_get_flags(tfm));
1143			goto out;
1144		} else if (ret)
1145			continue;
1146
1147		sg_init_one(&sg[0], data, template[i].ilen);
1148		if (diff_dst) {
1149			data = xoutbuf[0];
1150			data += align_offset;
1151			sg_init_one(&sgout[0], data, template[i].ilen);
1152		}
1153
1154		skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1155					   template[i].ilen, iv);
1156		ret = crypto_wait_req(enc ? crypto_skcipher_encrypt(req) :
1157				      crypto_skcipher_decrypt(req), &wait);
1158
1159		if (ret) {
1160			pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
1161			       d, e, j, algo, -ret);
1162			goto out;
1163		}
 
 
 
 
1164
1165		q = data;
1166		if (memcmp(q, template[i].result, template[i].rlen)) {
1167			pr_err("alg: skcipher%s: Test %d failed (invalid result) on %s for %s\n",
1168			       d, j, e, algo);
1169			hexdump(q, template[i].rlen);
1170			ret = -EINVAL;
1171			goto out;
1172		}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1173
1174		if (template[i].iv_out &&
1175		    memcmp(iv, template[i].iv_out,
1176			   crypto_skcipher_ivsize(tfm))) {
1177			pr_err("alg: skcipher%s: Test %d failed (invalid output IV) on %s for %s\n",
1178			       d, j, e, algo);
1179			hexdump(iv, crypto_skcipher_ivsize(tfm));
1180			ret = -EINVAL;
1181			goto out;
1182		}
1183	}
1184
1185	j = 0;
1186	for (i = 0; i < tcount; i++) {
1187		/* alignment tests are only done with continuous buffers */
1188		if (align_offset != 0)
1189			break;
1190
1191		if (!template[i].np)
1192			continue;
1193
1194		if (fips_enabled && template[i].fips_skip)
1195			continue;
1196
1197		if (template[i].iv)
1198			memcpy(iv, template[i].iv, ivsize);
1199		else
1200			memset(iv, 0, MAX_IVLEN);
1201
1202		j++;
1203		crypto_skcipher_clear_flags(tfm, ~0);
1204		if (template[i].wk)
1205			crypto_skcipher_set_flags(tfm,
1206						  CRYPTO_TFM_REQ_WEAK_KEY);
1207
1208		ret = crypto_skcipher_setkey(tfm, template[i].key,
1209					     template[i].klen);
1210		if (template[i].fail == !ret) {
1211			pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
1212			       d, j, algo, crypto_skcipher_get_flags(tfm));
1213			goto out;
1214		} else if (ret)
1215			continue;
1216
1217		temp = 0;
1218		ret = -EINVAL;
1219		sg_init_table(sg, template[i].np);
1220		if (diff_dst)
1221			sg_init_table(sgout, template[i].np);
1222		for (k = 0; k < template[i].np; k++) {
1223			if (WARN_ON(offset_in_page(IDX[k]) +
1224				    template[i].tap[k] > PAGE_SIZE))
 
 
 
1225				goto out;
 
 
1226
1227			q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
1228
1229			memcpy(q, template[i].input + temp, template[i].tap[k]);
1230
1231			if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE)
1232				q[template[i].tap[k]] = 0;
 
 
 
1233
1234			sg_set_buf(&sg[k], q, template[i].tap[k]);
1235			if (diff_dst) {
1236				q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1237				    offset_in_page(IDX[k]);
1238
1239				sg_set_buf(&sgout[k], q, template[i].tap[k]);
 
1240
1241				memset(q, 0, template[i].tap[k]);
1242				if (offset_in_page(q) +
1243				    template[i].tap[k] < PAGE_SIZE)
1244					q[template[i].tap[k]] = 0;
1245			}
1246
1247			temp += template[i].tap[k];
1248		}
1249
1250		skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1251					   template[i].ilen, iv);
1252
1253		ret = crypto_wait_req(enc ? crypto_skcipher_encrypt(req) :
1254				      crypto_skcipher_decrypt(req), &wait);
1255
1256		if (ret) {
1257			pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1258			       d, e, j, algo, -ret);
1259			goto out;
1260		}
1261
1262		temp = 0;
1263		ret = -EINVAL;
1264		for (k = 0; k < template[i].np; k++) {
1265			if (diff_dst)
1266				q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1267				    offset_in_page(IDX[k]);
1268			else
1269				q = xbuf[IDX[k] >> PAGE_SHIFT] +
1270				    offset_in_page(IDX[k]);
1271
1272			if (memcmp(q, template[i].result + temp,
1273				   template[i].tap[k])) {
1274				pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1275				       d, j, e, k, algo);
1276				hexdump(q, template[i].tap[k]);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1277				goto out;
1278			}
1279
1280			q += template[i].tap[k];
1281			for (n = 0; offset_in_page(q + n) && q[n]; n++)
1282				;
1283			if (n) {
1284				pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1285				       d, j, e, k, algo, n);
1286				hexdump(q, n);
1287				goto out;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1288			}
1289			temp += template[i].tap[k];
1290		}
1291	}
1292
1293	ret = 0;
1294
1295out:
1296	skcipher_request_free(req);
1297	if (diff_dst)
1298		testmgr_free_buf(xoutbuf);
1299out_nooutbuf:
1300	testmgr_free_buf(xbuf);
1301out_nobuf:
1302	return ret;
1303}
1304
1305static int test_skcipher(struct crypto_skcipher *tfm, int enc,
1306			 const struct cipher_testvec *template,
1307			 unsigned int tcount)
1308{
1309	unsigned int alignmask;
1310	int ret;
1311
1312	/* test 'dst == src' case */
1313	ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
1314	if (ret)
1315		return ret;
1316
1317	/* test 'dst != src' case */
1318	ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
1319	if (ret)
1320		return ret;
1321
1322	/* test unaligned buffers, check with one byte offset */
1323	ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
1324	if (ret)
1325		return ret;
1326
1327	alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1328	if (alignmask) {
1329		/* Check if alignment mask for tfm is correctly set. */
1330		ret = __test_skcipher(tfm, enc, template, tcount, true,
1331				      alignmask + 1);
1332		if (ret)
1333			return ret;
1334	}
1335
1336	return 0;
1337}
1338
1339static int test_comp(struct crypto_comp *tfm,
1340		     const struct comp_testvec *ctemplate,
1341		     const struct comp_testvec *dtemplate,
1342		     int ctcount, int dtcount)
1343{
1344	const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1345	unsigned int i;
1346	char result[COMP_BUF_SIZE];
1347	int ret;
1348
1349	for (i = 0; i < ctcount; i++) {
1350		int ilen;
1351		unsigned int dlen = COMP_BUF_SIZE;
1352
1353		memset(result, 0, sizeof (result));
1354
1355		ilen = ctemplate[i].inlen;
1356		ret = crypto_comp_compress(tfm, ctemplate[i].input,
1357		                           ilen, result, &dlen);
1358		if (ret) {
1359			printk(KERN_ERR "alg: comp: compression failed "
1360			       "on test %d for %s: ret=%d\n", i + 1, algo,
1361			       -ret);
1362			goto out;
1363		}
1364
1365		if (dlen != ctemplate[i].outlen) {
1366			printk(KERN_ERR "alg: comp: Compression test %d "
1367			       "failed for %s: output len = %d\n", i + 1, algo,
1368			       dlen);
1369			ret = -EINVAL;
1370			goto out;
1371		}
1372
1373		if (memcmp(result, ctemplate[i].output, dlen)) {
1374			printk(KERN_ERR "alg: comp: Compression test %d "
1375			       "failed for %s\n", i + 1, algo);
1376			hexdump(result, dlen);
1377			ret = -EINVAL;
1378			goto out;
1379		}
1380	}
1381
1382	for (i = 0; i < dtcount; i++) {
1383		int ilen;
1384		unsigned int dlen = COMP_BUF_SIZE;
1385
1386		memset(result, 0, sizeof (result));
1387
1388		ilen = dtemplate[i].inlen;
1389		ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1390		                             ilen, result, &dlen);
1391		if (ret) {
1392			printk(KERN_ERR "alg: comp: decompression failed "
1393			       "on test %d for %s: ret=%d\n", i + 1, algo,
1394			       -ret);
1395			goto out;
1396		}
1397
1398		if (dlen != dtemplate[i].outlen) {
1399			printk(KERN_ERR "alg: comp: Decompression test %d "
1400			       "failed for %s: output len = %d\n", i + 1, algo,
1401			       dlen);
1402			ret = -EINVAL;
1403			goto out;
1404		}
1405
1406		if (memcmp(result, dtemplate[i].output, dlen)) {
1407			printk(KERN_ERR "alg: comp: Decompression test %d "
1408			       "failed for %s\n", i + 1, algo);
1409			hexdump(result, dlen);
1410			ret = -EINVAL;
1411			goto out;
1412		}
1413	}
1414
1415	ret = 0;
1416
1417out:
1418	return ret;
1419}
1420
1421static int test_acomp(struct crypto_acomp *tfm,
1422		      const struct comp_testvec *ctemplate,
1423		      const struct comp_testvec *dtemplate,
1424		      int ctcount, int dtcount)
1425{
1426	const char *algo = crypto_tfm_alg_driver_name(crypto_acomp_tfm(tfm));
1427	unsigned int i;
1428	char *output, *decomp_out;
1429	int ret;
1430	struct scatterlist src, dst;
1431	struct acomp_req *req;
1432	struct crypto_wait wait;
1433
1434	output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
1435	if (!output)
1436		return -ENOMEM;
1437
1438	decomp_out = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
1439	if (!decomp_out) {
1440		kfree(output);
1441		return -ENOMEM;
1442	}
1443
1444	for (i = 0; i < ctcount; i++) {
1445		unsigned int dlen = COMP_BUF_SIZE;
1446		int ilen = ctemplate[i].inlen;
1447		void *input_vec;
1448
1449		input_vec = kmemdup(ctemplate[i].input, ilen, GFP_KERNEL);
1450		if (!input_vec) {
1451			ret = -ENOMEM;
1452			goto out;
1453		}
1454
1455		memset(output, 0, dlen);
1456		crypto_init_wait(&wait);
1457		sg_init_one(&src, input_vec, ilen);
1458		sg_init_one(&dst, output, dlen);
1459
1460		req = acomp_request_alloc(tfm);
1461		if (!req) {
1462			pr_err("alg: acomp: request alloc failed for %s\n",
1463			       algo);
1464			kfree(input_vec);
1465			ret = -ENOMEM;
1466			goto out;
1467		}
1468
1469		acomp_request_set_params(req, &src, &dst, ilen, dlen);
1470		acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1471					   crypto_req_done, &wait);
1472
1473		ret = crypto_wait_req(crypto_acomp_compress(req), &wait);
1474		if (ret) {
1475			pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
1476			       i + 1, algo, -ret);
1477			kfree(input_vec);
1478			acomp_request_free(req);
1479			goto out;
1480		}
1481
1482		ilen = req->dlen;
1483		dlen = COMP_BUF_SIZE;
1484		sg_init_one(&src, output, ilen);
1485		sg_init_one(&dst, decomp_out, dlen);
1486		crypto_init_wait(&wait);
1487		acomp_request_set_params(req, &src, &dst, ilen, dlen);
1488
1489		ret = crypto_wait_req(crypto_acomp_decompress(req), &wait);
1490		if (ret) {
1491			pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
1492			       i + 1, algo, -ret);
1493			kfree(input_vec);
1494			acomp_request_free(req);
1495			goto out;
 
 
 
 
 
 
 
 
 
 
 
 
 
1496		}
1497
1498		if (req->dlen != ctemplate[i].inlen) {
1499			pr_err("alg: acomp: Compression test %d failed for %s: output len = %d\n",
1500			       i + 1, algo, req->dlen);
1501			ret = -EINVAL;
1502			kfree(input_vec);
1503			acomp_request_free(req);
1504			goto out;
1505		}
1506
1507		if (memcmp(input_vec, decomp_out, req->dlen)) {
1508			pr_err("alg: acomp: Compression test %d failed for %s\n",
1509			       i + 1, algo);
1510			hexdump(output, req->dlen);
1511			ret = -EINVAL;
1512			kfree(input_vec);
1513			acomp_request_free(req);
1514			goto out;
1515		}
1516
1517		kfree(input_vec);
1518		acomp_request_free(req);
1519	}
1520
1521	for (i = 0; i < dtcount; i++) {
1522		unsigned int dlen = COMP_BUF_SIZE;
1523		int ilen = dtemplate[i].inlen;
1524		void *input_vec;
1525
1526		input_vec = kmemdup(dtemplate[i].input, ilen, GFP_KERNEL);
1527		if (!input_vec) {
1528			ret = -ENOMEM;
1529			goto out;
1530		}
1531
1532		memset(output, 0, dlen);
1533		crypto_init_wait(&wait);
1534		sg_init_one(&src, input_vec, ilen);
1535		sg_init_one(&dst, output, dlen);
1536
1537		req = acomp_request_alloc(tfm);
1538		if (!req) {
1539			pr_err("alg: acomp: request alloc failed for %s\n",
1540			       algo);
1541			kfree(input_vec);
1542			ret = -ENOMEM;
1543			goto out;
1544		}
1545
1546		acomp_request_set_params(req, &src, &dst, ilen, dlen);
1547		acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1548					   crypto_req_done, &wait);
1549
1550		ret = crypto_wait_req(crypto_acomp_decompress(req), &wait);
1551		if (ret) {
1552			pr_err("alg: acomp: decompression failed on test %d for %s: ret=%d\n",
1553			       i + 1, algo, -ret);
1554			kfree(input_vec);
1555			acomp_request_free(req);
1556			goto out;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1557		}
1558
1559		if (req->dlen != dtemplate[i].outlen) {
1560			pr_err("alg: acomp: Decompression test %d failed for %s: output len = %d\n",
1561			       i + 1, algo, req->dlen);
1562			ret = -EINVAL;
1563			kfree(input_vec);
1564			acomp_request_free(req);
1565			goto out;
1566		}
1567
1568		if (memcmp(output, dtemplate[i].output, req->dlen)) {
1569			pr_err("alg: acomp: Decompression test %d failed for %s\n",
1570			       i + 1, algo);
1571			hexdump(output, req->dlen);
1572			ret = -EINVAL;
1573			kfree(input_vec);
1574			acomp_request_free(req);
1575			goto out;
1576		}
1577
1578		kfree(input_vec);
1579		acomp_request_free(req);
1580	}
1581
1582	ret = 0;
1583
1584out:
1585	kfree(decomp_out);
1586	kfree(output);
1587	return ret;
1588}
1589
1590static int test_cprng(struct crypto_rng *tfm,
1591		      const struct cprng_testvec *template,
1592		      unsigned int tcount)
1593{
1594	const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1595	int err = 0, i, j, seedsize;
1596	u8 *seed;
1597	char result[32];
1598
1599	seedsize = crypto_rng_seedsize(tfm);
1600
1601	seed = kmalloc(seedsize, GFP_KERNEL);
1602	if (!seed) {
1603		printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1604		       "for %s\n", algo);
1605		return -ENOMEM;
1606	}
1607
1608	for (i = 0; i < tcount; i++) {
1609		memset(result, 0, 32);
1610
1611		memcpy(seed, template[i].v, template[i].vlen);
1612		memcpy(seed + template[i].vlen, template[i].key,
1613		       template[i].klen);
1614		memcpy(seed + template[i].vlen + template[i].klen,
1615		       template[i].dt, template[i].dtlen);
1616
1617		err = crypto_rng_reset(tfm, seed, seedsize);
1618		if (err) {
1619			printk(KERN_ERR "alg: cprng: Failed to reset rng "
1620			       "for %s\n", algo);
1621			goto out;
1622		}
1623
1624		for (j = 0; j < template[i].loops; j++) {
1625			err = crypto_rng_get_bytes(tfm, result,
1626						   template[i].rlen);
1627			if (err < 0) {
1628				printk(KERN_ERR "alg: cprng: Failed to obtain "
1629				       "the correct amount of random data for "
1630				       "%s (requested %d)\n", algo,
1631				       template[i].rlen);
1632				goto out;
1633			}
1634		}
1635
1636		err = memcmp(result, template[i].result,
1637			     template[i].rlen);
1638		if (err) {
1639			printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1640			       i, algo);
1641			hexdump(result, template[i].rlen);
1642			err = -EINVAL;
1643			goto out;
1644		}
1645	}
1646
1647out:
1648	kfree(seed);
1649	return err;
1650}
1651
1652static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1653			 u32 type, u32 mask)
1654{
1655	struct crypto_aead *tfm;
1656	int err = 0;
1657
1658	tfm = crypto_alloc_aead(driver, type, mask);
1659	if (IS_ERR(tfm)) {
1660		printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1661		       "%ld\n", driver, PTR_ERR(tfm));
1662		return PTR_ERR(tfm);
1663	}
1664
1665	if (desc->suite.aead.enc.vecs) {
1666		err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1667				desc->suite.aead.enc.count);
1668		if (err)
1669			goto out;
1670	}
1671
1672	if (!err && desc->suite.aead.dec.vecs)
1673		err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1674				desc->suite.aead.dec.count);
1675
1676out:
1677	crypto_free_aead(tfm);
1678	return err;
1679}
1680
1681static int alg_test_cipher(const struct alg_test_desc *desc,
1682			   const char *driver, u32 type, u32 mask)
1683{
1684	struct crypto_cipher *tfm;
1685	int err = 0;
1686
1687	tfm = crypto_alloc_cipher(driver, type, mask);
1688	if (IS_ERR(tfm)) {
1689		printk(KERN_ERR "alg: cipher: Failed to load transform for "
1690		       "%s: %ld\n", driver, PTR_ERR(tfm));
1691		return PTR_ERR(tfm);
1692	}
1693
1694	if (desc->suite.cipher.enc.vecs) {
1695		err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1696				  desc->suite.cipher.enc.count);
1697		if (err)
1698			goto out;
1699	}
1700
1701	if (desc->suite.cipher.dec.vecs)
1702		err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1703				  desc->suite.cipher.dec.count);
1704
1705out:
1706	crypto_free_cipher(tfm);
1707	return err;
1708}
1709
1710static int alg_test_skcipher(const struct alg_test_desc *desc,
1711			     const char *driver, u32 type, u32 mask)
1712{
1713	struct crypto_skcipher *tfm;
1714	int err = 0;
1715
1716	tfm = crypto_alloc_skcipher(driver, type, mask);
1717	if (IS_ERR(tfm)) {
1718		printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1719		       "%s: %ld\n", driver, PTR_ERR(tfm));
1720		return PTR_ERR(tfm);
1721	}
1722
1723	if (desc->suite.cipher.enc.vecs) {
1724		err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1725				    desc->suite.cipher.enc.count);
1726		if (err)
1727			goto out;
1728	}
1729
1730	if (desc->suite.cipher.dec.vecs)
1731		err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1732				    desc->suite.cipher.dec.count);
1733
1734out:
1735	crypto_free_skcipher(tfm);
1736	return err;
1737}
1738
1739static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1740			 u32 type, u32 mask)
1741{
1742	struct crypto_comp *comp;
1743	struct crypto_acomp *acomp;
1744	int err;
1745	u32 algo_type = type & CRYPTO_ALG_TYPE_ACOMPRESS_MASK;
1746
1747	if (algo_type == CRYPTO_ALG_TYPE_ACOMPRESS) {
1748		acomp = crypto_alloc_acomp(driver, type, mask);
1749		if (IS_ERR(acomp)) {
1750			pr_err("alg: acomp: Failed to load transform for %s: %ld\n",
1751			       driver, PTR_ERR(acomp));
1752			return PTR_ERR(acomp);
1753		}
1754		err = test_acomp(acomp, desc->suite.comp.comp.vecs,
1755				 desc->suite.comp.decomp.vecs,
1756				 desc->suite.comp.comp.count,
1757				 desc->suite.comp.decomp.count);
1758		crypto_free_acomp(acomp);
1759	} else {
1760		comp = crypto_alloc_comp(driver, type, mask);
1761		if (IS_ERR(comp)) {
1762			pr_err("alg: comp: Failed to load transform for %s: %ld\n",
1763			       driver, PTR_ERR(comp));
1764			return PTR_ERR(comp);
1765		}
1766
1767		err = test_comp(comp, desc->suite.comp.comp.vecs,
1768				desc->suite.comp.decomp.vecs,
1769				desc->suite.comp.comp.count,
1770				desc->suite.comp.decomp.count);
1771
1772		crypto_free_comp(comp);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1773	}
 
 
 
 
 
 
 
1774	return err;
1775}
1776
1777static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1778			 u32 type, u32 mask)
1779{
1780	struct crypto_ahash *tfm;
1781	int err;
1782
1783	tfm = crypto_alloc_ahash(driver, type, mask);
1784	if (IS_ERR(tfm)) {
1785		printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1786		       "%ld\n", driver, PTR_ERR(tfm));
1787		return PTR_ERR(tfm);
1788	}
1789
1790	err = test_hash(tfm, desc->suite.hash.vecs,
1791			desc->suite.hash.count, true);
1792	if (!err)
1793		err = test_hash(tfm, desc->suite.hash.vecs,
1794				desc->suite.hash.count, false);
1795
1796	crypto_free_ahash(tfm);
1797	return err;
1798}
1799
1800static int alg_test_crc32c(const struct alg_test_desc *desc,
1801			   const char *driver, u32 type, u32 mask)
1802{
1803	struct crypto_shash *tfm;
1804	u32 val;
1805	int err;
1806
1807	err = alg_test_hash(desc, driver, type, mask);
1808	if (err)
1809		goto out;
1810
1811	tfm = crypto_alloc_shash(driver, type, mask);
1812	if (IS_ERR(tfm)) {
1813		printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1814		       "%ld\n", driver, PTR_ERR(tfm));
1815		err = PTR_ERR(tfm);
1816		goto out;
1817	}
1818
1819	do {
1820		SHASH_DESC_ON_STACK(shash, tfm);
1821		u32 *ctx = (u32 *)shash_desc_ctx(shash);
 
 
1822
1823		shash->tfm = tfm;
1824		shash->flags = 0;
1825
1826		*ctx = le32_to_cpu(420553207);
1827		err = crypto_shash_final(shash, (u8 *)&val);
1828		if (err) {
1829			printk(KERN_ERR "alg: crc32c: Operation failed for "
1830			       "%s: %d\n", driver, err);
1831			break;
1832		}
1833
1834		if (val != ~420553207) {
1835			printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1836			       "%d\n", driver, val);
1837			err = -EINVAL;
1838		}
1839	} while (0);
1840
1841	crypto_free_shash(tfm);
1842
1843out:
1844	return err;
1845}
1846
1847static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1848			  u32 type, u32 mask)
1849{
1850	struct crypto_rng *rng;
1851	int err;
1852
1853	rng = crypto_alloc_rng(driver, type, mask);
1854	if (IS_ERR(rng)) {
1855		printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1856		       "%ld\n", driver, PTR_ERR(rng));
1857		return PTR_ERR(rng);
1858	}
1859
1860	err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1861
1862	crypto_free_rng(rng);
1863
1864	return err;
1865}
1866
1867
1868static int drbg_cavs_test(const struct drbg_testvec *test, int pr,
1869			  const char *driver, u32 type, u32 mask)
1870{
1871	int ret = -EAGAIN;
1872	struct crypto_rng *drng;
1873	struct drbg_test_data test_data;
1874	struct drbg_string addtl, pers, testentropy;
1875	unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL);
1876
1877	if (!buf)
1878		return -ENOMEM;
1879
1880	drng = crypto_alloc_rng(driver, type, mask);
1881	if (IS_ERR(drng)) {
1882		printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for "
1883		       "%s\n", driver);
1884		kzfree(buf);
1885		return -ENOMEM;
1886	}
1887
1888	test_data.testentropy = &testentropy;
1889	drbg_string_fill(&testentropy, test->entropy, test->entropylen);
1890	drbg_string_fill(&pers, test->pers, test->perslen);
1891	ret = crypto_drbg_reset_test(drng, &pers, &test_data);
1892	if (ret) {
1893		printk(KERN_ERR "alg: drbg: Failed to reset rng\n");
1894		goto outbuf;
1895	}
1896
1897	drbg_string_fill(&addtl, test->addtla, test->addtllen);
1898	if (pr) {
1899		drbg_string_fill(&testentropy, test->entpra, test->entprlen);
1900		ret = crypto_drbg_get_bytes_addtl_test(drng,
1901			buf, test->expectedlen, &addtl,	&test_data);
1902	} else {
1903		ret = crypto_drbg_get_bytes_addtl(drng,
1904			buf, test->expectedlen, &addtl);
1905	}
1906	if (ret < 0) {
1907		printk(KERN_ERR "alg: drbg: could not obtain random data for "
1908		       "driver %s\n", driver);
1909		goto outbuf;
1910	}
1911
1912	drbg_string_fill(&addtl, test->addtlb, test->addtllen);
1913	if (pr) {
1914		drbg_string_fill(&testentropy, test->entprb, test->entprlen);
1915		ret = crypto_drbg_get_bytes_addtl_test(drng,
1916			buf, test->expectedlen, &addtl, &test_data);
1917	} else {
1918		ret = crypto_drbg_get_bytes_addtl(drng,
1919			buf, test->expectedlen, &addtl);
1920	}
1921	if (ret < 0) {
1922		printk(KERN_ERR "alg: drbg: could not obtain random data for "
1923		       "driver %s\n", driver);
1924		goto outbuf;
1925	}
1926
1927	ret = memcmp(test->expected, buf, test->expectedlen);
1928
1929outbuf:
1930	crypto_free_rng(drng);
1931	kzfree(buf);
1932	return ret;
1933}
1934
1935
1936static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver,
1937			 u32 type, u32 mask)
1938{
1939	int err = 0;
1940	int pr = 0;
1941	int i = 0;
1942	const struct drbg_testvec *template = desc->suite.drbg.vecs;
1943	unsigned int tcount = desc->suite.drbg.count;
1944
1945	if (0 == memcmp(driver, "drbg_pr_", 8))
1946		pr = 1;
1947
1948	for (i = 0; i < tcount; i++) {
1949		err = drbg_cavs_test(&template[i], pr, driver, type, mask);
1950		if (err) {
1951			printk(KERN_ERR "alg: drbg: Test %d failed for %s\n",
1952			       i, driver);
1953			err = -EINVAL;
1954			break;
1955		}
1956	}
1957	return err;
1958
1959}
1960
1961static int do_test_kpp(struct crypto_kpp *tfm, const struct kpp_testvec *vec,
1962		       const char *alg)
1963{
1964	struct kpp_request *req;
1965	void *input_buf = NULL;
1966	void *output_buf = NULL;
1967	void *a_public = NULL;
1968	void *a_ss = NULL;
1969	void *shared_secret = NULL;
1970	struct crypto_wait wait;
1971	unsigned int out_len_max;
1972	int err = -ENOMEM;
1973	struct scatterlist src, dst;
1974
1975	req = kpp_request_alloc(tfm, GFP_KERNEL);
1976	if (!req)
1977		return err;
1978
1979	crypto_init_wait(&wait);
1980
1981	err = crypto_kpp_set_secret(tfm, vec->secret, vec->secret_size);
1982	if (err < 0)
1983		goto free_req;
1984
1985	out_len_max = crypto_kpp_maxsize(tfm);
1986	output_buf = kzalloc(out_len_max, GFP_KERNEL);
1987	if (!output_buf) {
1988		err = -ENOMEM;
1989		goto free_req;
1990	}
1991
1992	/* Use appropriate parameter as base */
1993	kpp_request_set_input(req, NULL, 0);
1994	sg_init_one(&dst, output_buf, out_len_max);
1995	kpp_request_set_output(req, &dst, out_len_max);
1996	kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1997				 crypto_req_done, &wait);
1998
1999	/* Compute party A's public key */
2000	err = crypto_wait_req(crypto_kpp_generate_public_key(req), &wait);
2001	if (err) {
2002		pr_err("alg: %s: Party A: generate public key test failed. err %d\n",
2003		       alg, err);
2004		goto free_output;
2005	}
2006
2007	if (vec->genkey) {
2008		/* Save party A's public key */
2009		a_public = kzalloc(out_len_max, GFP_KERNEL);
2010		if (!a_public) {
2011			err = -ENOMEM;
2012			goto free_output;
2013		}
2014		memcpy(a_public, sg_virt(req->dst), out_len_max);
2015	} else {
2016		/* Verify calculated public key */
2017		if (memcmp(vec->expected_a_public, sg_virt(req->dst),
2018			   vec->expected_a_public_size)) {
2019			pr_err("alg: %s: Party A: generate public key test failed. Invalid output\n",
2020			       alg);
2021			err = -EINVAL;
2022			goto free_output;
2023		}
2024	}
2025
2026	/* Calculate shared secret key by using counter part (b) public key. */
2027	input_buf = kzalloc(vec->b_public_size, GFP_KERNEL);
2028	if (!input_buf) {
2029		err = -ENOMEM;
2030		goto free_output;
2031	}
2032
2033	memcpy(input_buf, vec->b_public, vec->b_public_size);
2034	sg_init_one(&src, input_buf, vec->b_public_size);
2035	sg_init_one(&dst, output_buf, out_len_max);
2036	kpp_request_set_input(req, &src, vec->b_public_size);
2037	kpp_request_set_output(req, &dst, out_len_max);
2038	kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2039				 crypto_req_done, &wait);
2040	err = crypto_wait_req(crypto_kpp_compute_shared_secret(req), &wait);
2041	if (err) {
2042		pr_err("alg: %s: Party A: compute shared secret test failed. err %d\n",
2043		       alg, err);
2044		goto free_all;
2045	}
2046
2047	if (vec->genkey) {
2048		/* Save the shared secret obtained by party A */
2049		a_ss = kzalloc(vec->expected_ss_size, GFP_KERNEL);
2050		if (!a_ss) {
2051			err = -ENOMEM;
2052			goto free_all;
2053		}
2054		memcpy(a_ss, sg_virt(req->dst), vec->expected_ss_size);
2055
2056		/*
2057		 * Calculate party B's shared secret by using party A's
2058		 * public key.
2059		 */
2060		err = crypto_kpp_set_secret(tfm, vec->b_secret,
2061					    vec->b_secret_size);
2062		if (err < 0)
2063			goto free_all;
2064
2065		sg_init_one(&src, a_public, vec->expected_a_public_size);
2066		sg_init_one(&dst, output_buf, out_len_max);
2067		kpp_request_set_input(req, &src, vec->expected_a_public_size);
2068		kpp_request_set_output(req, &dst, out_len_max);
2069		kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2070					 crypto_req_done, &wait);
2071		err = crypto_wait_req(crypto_kpp_compute_shared_secret(req),
2072				      &wait);
2073		if (err) {
2074			pr_err("alg: %s: Party B: compute shared secret failed. err %d\n",
2075			       alg, err);
2076			goto free_all;
2077		}
2078
2079		shared_secret = a_ss;
2080	} else {
2081		shared_secret = (void *)vec->expected_ss;
2082	}
2083
2084	/*
2085	 * verify shared secret from which the user will derive
2086	 * secret key by executing whatever hash it has chosen
2087	 */
2088	if (memcmp(shared_secret, sg_virt(req->dst),
2089		   vec->expected_ss_size)) {
2090		pr_err("alg: %s: compute shared secret test failed. Invalid output\n",
2091		       alg);
2092		err = -EINVAL;
2093	}
2094
2095free_all:
2096	kfree(a_ss);
2097	kfree(input_buf);
2098free_output:
2099	kfree(a_public);
2100	kfree(output_buf);
2101free_req:
2102	kpp_request_free(req);
2103	return err;
2104}
2105
2106static int test_kpp(struct crypto_kpp *tfm, const char *alg,
2107		    const struct kpp_testvec *vecs, unsigned int tcount)
2108{
2109	int ret, i;
2110
2111	for (i = 0; i < tcount; i++) {
2112		ret = do_test_kpp(tfm, vecs++, alg);
2113		if (ret) {
2114			pr_err("alg: %s: test failed on vector %d, err=%d\n",
2115			       alg, i + 1, ret);
2116			return ret;
2117		}
2118	}
2119	return 0;
2120}
2121
2122static int alg_test_kpp(const struct alg_test_desc *desc, const char *driver,
2123			u32 type, u32 mask)
2124{
2125	struct crypto_kpp *tfm;
2126	int err = 0;
2127
2128	tfm = crypto_alloc_kpp(driver, type, mask);
2129	if (IS_ERR(tfm)) {
2130		pr_err("alg: kpp: Failed to load tfm for %s: %ld\n",
2131		       driver, PTR_ERR(tfm));
2132		return PTR_ERR(tfm);
2133	}
2134	if (desc->suite.kpp.vecs)
2135		err = test_kpp(tfm, desc->alg, desc->suite.kpp.vecs,
2136			       desc->suite.kpp.count);
2137
2138	crypto_free_kpp(tfm);
2139	return err;
2140}
2141
2142static int test_akcipher_one(struct crypto_akcipher *tfm,
2143			     const struct akcipher_testvec *vecs)
2144{
2145	char *xbuf[XBUFSIZE];
2146	struct akcipher_request *req;
2147	void *outbuf_enc = NULL;
2148	void *outbuf_dec = NULL;
2149	struct crypto_wait wait;
2150	unsigned int out_len_max, out_len = 0;
2151	int err = -ENOMEM;
2152	struct scatterlist src, dst, src_tab[2];
2153
2154	if (testmgr_alloc_buf(xbuf))
2155		return err;
2156
2157	req = akcipher_request_alloc(tfm, GFP_KERNEL);
2158	if (!req)
2159		goto free_xbuf;
2160
2161	crypto_init_wait(&wait);
2162
2163	if (vecs->public_key_vec)
2164		err = crypto_akcipher_set_pub_key(tfm, vecs->key,
2165						  vecs->key_len);
2166	else
2167		err = crypto_akcipher_set_priv_key(tfm, vecs->key,
2168						   vecs->key_len);
2169	if (err)
2170		goto free_req;
2171
2172	err = -ENOMEM;
2173	out_len_max = crypto_akcipher_maxsize(tfm);
2174	outbuf_enc = kzalloc(out_len_max, GFP_KERNEL);
2175	if (!outbuf_enc)
2176		goto free_req;
2177
2178	if (WARN_ON(vecs->m_size > PAGE_SIZE))
2179		goto free_all;
2180
2181	memcpy(xbuf[0], vecs->m, vecs->m_size);
2182
2183	sg_init_table(src_tab, 2);
2184	sg_set_buf(&src_tab[0], xbuf[0], 8);
2185	sg_set_buf(&src_tab[1], xbuf[0] + 8, vecs->m_size - 8);
2186	sg_init_one(&dst, outbuf_enc, out_len_max);
2187	akcipher_request_set_crypt(req, src_tab, &dst, vecs->m_size,
2188				   out_len_max);
2189	akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2190				      crypto_req_done, &wait);
2191
2192	err = crypto_wait_req(vecs->siggen_sigver_test ?
2193			      /* Run asymmetric signature generation */
2194			      crypto_akcipher_sign(req) :
2195			      /* Run asymmetric encrypt */
2196			      crypto_akcipher_encrypt(req), &wait);
2197	if (err) {
2198		pr_err("alg: akcipher: encrypt test failed. err %d\n", err);
2199		goto free_all;
2200	}
2201	if (req->dst_len != vecs->c_size) {
2202		pr_err("alg: akcipher: encrypt test failed. Invalid output len\n");
2203		err = -EINVAL;
2204		goto free_all;
2205	}
2206	/* verify that encrypted message is equal to expected */
2207	if (memcmp(vecs->c, outbuf_enc, vecs->c_size)) {
2208		pr_err("alg: akcipher: encrypt test failed. Invalid output\n");
2209		hexdump(outbuf_enc, vecs->c_size);
2210		err = -EINVAL;
2211		goto free_all;
2212	}
2213	/* Don't invoke decrypt for vectors with public key */
2214	if (vecs->public_key_vec) {
2215		err = 0;
2216		goto free_all;
2217	}
2218	outbuf_dec = kzalloc(out_len_max, GFP_KERNEL);
2219	if (!outbuf_dec) {
2220		err = -ENOMEM;
2221		goto free_all;
2222	}
2223
2224	if (WARN_ON(vecs->c_size > PAGE_SIZE))
2225		goto free_all;
2226
2227	memcpy(xbuf[0], vecs->c, vecs->c_size);
2228
2229	sg_init_one(&src, xbuf[0], vecs->c_size);
2230	sg_init_one(&dst, outbuf_dec, out_len_max);
2231	crypto_init_wait(&wait);
2232	akcipher_request_set_crypt(req, &src, &dst, vecs->c_size, out_len_max);
2233
2234	err = crypto_wait_req(vecs->siggen_sigver_test ?
2235			      /* Run asymmetric signature verification */
2236			      crypto_akcipher_verify(req) :
2237			      /* Run asymmetric decrypt */
2238			      crypto_akcipher_decrypt(req), &wait);
2239	if (err) {
2240		pr_err("alg: akcipher: decrypt test failed. err %d\n", err);
2241		goto free_all;
2242	}
2243	out_len = req->dst_len;
2244	if (out_len < vecs->m_size) {
2245		pr_err("alg: akcipher: decrypt test failed. "
2246		       "Invalid output len %u\n", out_len);
2247		err = -EINVAL;
2248		goto free_all;
2249	}
2250	/* verify that decrypted message is equal to the original msg */
2251	if (memchr_inv(outbuf_dec, 0, out_len - vecs->m_size) ||
2252	    memcmp(vecs->m, outbuf_dec + out_len - vecs->m_size,
2253		   vecs->m_size)) {
2254		pr_err("alg: akcipher: decrypt test failed. Invalid output\n");
2255		hexdump(outbuf_dec, out_len);
2256		err = -EINVAL;
2257	}
2258free_all:
2259	kfree(outbuf_dec);
2260	kfree(outbuf_enc);
2261free_req:
2262	akcipher_request_free(req);
2263free_xbuf:
2264	testmgr_free_buf(xbuf);
2265	return err;
2266}
2267
2268static int test_akcipher(struct crypto_akcipher *tfm, const char *alg,
2269			 const struct akcipher_testvec *vecs,
2270			 unsigned int tcount)
2271{
2272	const char *algo =
2273		crypto_tfm_alg_driver_name(crypto_akcipher_tfm(tfm));
2274	int ret, i;
2275
2276	for (i = 0; i < tcount; i++) {
2277		ret = test_akcipher_one(tfm, vecs++);
2278		if (!ret)
2279			continue;
2280
2281		pr_err("alg: akcipher: test %d failed for %s, err=%d\n",
2282		       i + 1, algo, ret);
2283		return ret;
2284	}
2285	return 0;
2286}
2287
2288static int alg_test_akcipher(const struct alg_test_desc *desc,
2289			     const char *driver, u32 type, u32 mask)
2290{
2291	struct crypto_akcipher *tfm;
2292	int err = 0;
2293
2294	tfm = crypto_alloc_akcipher(driver, type, mask);
2295	if (IS_ERR(tfm)) {
2296		pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n",
2297		       driver, PTR_ERR(tfm));
2298		return PTR_ERR(tfm);
2299	}
2300	if (desc->suite.akcipher.vecs)
2301		err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs,
2302				    desc->suite.akcipher.count);
2303
2304	crypto_free_akcipher(tfm);
2305	return err;
2306}
2307
2308static int alg_test_null(const struct alg_test_desc *desc,
2309			     const char *driver, u32 type, u32 mask)
2310{
2311	return 0;
2312}
2313
2314#define __VECS(tv)	{ .vecs = tv, .count = ARRAY_SIZE(tv) }
2315
2316/* Please keep this list sorted by algorithm name. */
2317static const struct alg_test_desc alg_test_descs[] = {
2318	{
2319		.alg = "ansi_cprng",
2320		.test = alg_test_cprng,
2321		.suite = {
2322			.cprng = __VECS(ansi_cprng_aes_tv_template)
2323		}
2324	}, {
2325		.alg = "authenc(hmac(md5),ecb(cipher_null))",
2326		.test = alg_test_aead,
2327		.suite = {
2328			.aead = {
2329				.enc = __VECS(hmac_md5_ecb_cipher_null_enc_tv_template),
2330				.dec = __VECS(hmac_md5_ecb_cipher_null_dec_tv_template)
2331			}
2332		}
2333	}, {
2334		.alg = "authenc(hmac(sha1),cbc(aes))",
2335		.test = alg_test_aead,
2336		.fips_allowed = 1,
2337		.suite = {
2338			.aead = {
2339				.enc = __VECS(hmac_sha1_aes_cbc_enc_tv_temp)
2340			}
2341		}
2342	}, {
2343		.alg = "authenc(hmac(sha1),cbc(des))",
2344		.test = alg_test_aead,
2345		.suite = {
2346			.aead = {
2347				.enc = __VECS(hmac_sha1_des_cbc_enc_tv_temp)
2348			}
2349		}
2350	}, {
2351		.alg = "authenc(hmac(sha1),cbc(des3_ede))",
2352		.test = alg_test_aead,
2353		.fips_allowed = 1,
2354		.suite = {
2355			.aead = {
2356				.enc = __VECS(hmac_sha1_des3_ede_cbc_enc_tv_temp)
2357			}
2358		}
2359	}, {
2360		.alg = "authenc(hmac(sha1),ctr(aes))",
 
 
 
2361		.test = alg_test_null,
2362		.fips_allowed = 1,
2363	}, {
2364		.alg = "authenc(hmac(sha1),ecb(cipher_null))",
2365		.test = alg_test_aead,
2366		.suite = {
2367			.aead = {
2368				.enc = __VECS(hmac_sha1_ecb_cipher_null_enc_tv_temp),
2369				.dec = __VECS(hmac_sha1_ecb_cipher_null_dec_tv_temp)
2370			}
2371		}
2372	}, {
2373		.alg = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2374		.test = alg_test_null,
2375		.fips_allowed = 1,
2376	}, {
2377		.alg = "authenc(hmac(sha224),cbc(des))",
2378		.test = alg_test_aead,
2379		.suite = {
2380			.aead = {
2381				.enc = __VECS(hmac_sha224_des_cbc_enc_tv_temp)
2382			}
2383		}
2384	}, {
2385		.alg = "authenc(hmac(sha224),cbc(des3_ede))",
2386		.test = alg_test_aead,
2387		.fips_allowed = 1,
2388		.suite = {
2389			.aead = {
2390				.enc = __VECS(hmac_sha224_des3_ede_cbc_enc_tv_temp)
2391			}
2392		}
2393	}, {
2394		.alg = "authenc(hmac(sha256),cbc(aes))",
2395		.test = alg_test_aead,
2396		.fips_allowed = 1,
2397		.suite = {
2398			.aead = {
2399				.enc = __VECS(hmac_sha256_aes_cbc_enc_tv_temp)
2400			}
2401		}
2402	}, {
2403		.alg = "authenc(hmac(sha256),cbc(des))",
2404		.test = alg_test_aead,
2405		.suite = {
2406			.aead = {
2407				.enc = __VECS(hmac_sha256_des_cbc_enc_tv_temp)
2408			}
2409		}
2410	}, {
2411		.alg = "authenc(hmac(sha256),cbc(des3_ede))",
2412		.test = alg_test_aead,
2413		.fips_allowed = 1,
2414		.suite = {
2415			.aead = {
2416				.enc = __VECS(hmac_sha256_des3_ede_cbc_enc_tv_temp)
2417			}
2418		}
2419	}, {
2420		.alg = "authenc(hmac(sha256),ctr(aes))",
2421		.test = alg_test_null,
2422		.fips_allowed = 1,
2423	}, {
2424		.alg = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2425		.test = alg_test_null,
2426		.fips_allowed = 1,
2427	}, {
2428		.alg = "authenc(hmac(sha384),cbc(des))",
2429		.test = alg_test_aead,
 
2430		.suite = {
2431			.aead = {
2432				.enc = __VECS(hmac_sha384_des_cbc_enc_tv_temp)
 
2433			}
2434		}
2435	}, {
2436		.alg = "authenc(hmac(sha384),cbc(des3_ede))",
2437		.test = alg_test_aead,
2438		.fips_allowed = 1,
2439		.suite = {
2440			.aead = {
2441				.enc = __VECS(hmac_sha384_des3_ede_cbc_enc_tv_temp)
 
 
 
 
 
 
 
2442			}
2443		}
2444	}, {
2445		.alg = "authenc(hmac(sha384),ctr(aes))",
2446		.test = alg_test_null,
2447		.fips_allowed = 1,
2448	}, {
2449		.alg = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2450		.test = alg_test_null,
2451		.fips_allowed = 1,
2452	}, {
2453		.alg = "authenc(hmac(sha512),cbc(aes))",
2454		.fips_allowed = 1,
2455		.test = alg_test_aead,
 
2456		.suite = {
2457			.aead = {
2458				.enc = __VECS(hmac_sha512_aes_cbc_enc_tv_temp)
 
 
 
2459			}
2460		}
2461	}, {
2462		.alg = "authenc(hmac(sha512),cbc(des))",
2463		.test = alg_test_aead,
 
2464		.suite = {
2465			.aead = {
2466				.enc = __VECS(hmac_sha512_des_cbc_enc_tv_temp)
 
 
 
 
 
 
 
2467			}
2468		}
2469	}, {
2470		.alg = "authenc(hmac(sha512),cbc(des3_ede))",
2471		.test = alg_test_aead,
2472		.fips_allowed = 1,
2473		.suite = {
2474			.aead = {
2475				.enc = __VECS(hmac_sha512_des3_ede_cbc_enc_tv_temp)
 
 
 
2476			}
2477		}
2478	}, {
2479		.alg = "authenc(hmac(sha512),ctr(aes))",
2480		.test = alg_test_null,
2481		.fips_allowed = 1,
2482	}, {
2483		.alg = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2484		.test = alg_test_null,
2485		.fips_allowed = 1,
 
 
 
 
 
 
 
 
2486	}, {
2487		.alg = "cbc(aes)",
2488		.test = alg_test_skcipher,
2489		.fips_allowed = 1,
2490		.suite = {
2491			.cipher = {
2492				.enc = __VECS(aes_cbc_enc_tv_template),
2493				.dec = __VECS(aes_cbc_dec_tv_template)
 
 
 
 
 
 
2494			}
2495		}
2496	}, {
2497		.alg = "cbc(anubis)",
2498		.test = alg_test_skcipher,
2499		.suite = {
2500			.cipher = {
2501				.enc = __VECS(anubis_cbc_enc_tv_template),
2502				.dec = __VECS(anubis_cbc_dec_tv_template)
 
 
 
 
 
 
2503			}
2504		}
2505	}, {
2506		.alg = "cbc(blowfish)",
2507		.test = alg_test_skcipher,
2508		.suite = {
2509			.cipher = {
2510				.enc = __VECS(bf_cbc_enc_tv_template),
2511				.dec = __VECS(bf_cbc_dec_tv_template)
 
 
 
 
 
 
2512			}
2513		}
2514	}, {
2515		.alg = "cbc(camellia)",
2516		.test = alg_test_skcipher,
2517		.suite = {
2518			.cipher = {
2519				.enc = __VECS(camellia_cbc_enc_tv_template),
2520				.dec = __VECS(camellia_cbc_dec_tv_template)
 
 
 
 
 
 
2521			}
2522		}
2523	}, {
2524		.alg = "cbc(cast5)",
2525		.test = alg_test_skcipher,
2526		.suite = {
2527			.cipher = {
2528				.enc = __VECS(cast5_cbc_enc_tv_template),
2529				.dec = __VECS(cast5_cbc_dec_tv_template)
 
 
 
 
 
 
2530			}
2531		}
2532	}, {
2533		.alg = "cbc(cast6)",
2534		.test = alg_test_skcipher,
2535		.suite = {
2536			.cipher = {
2537				.enc = __VECS(cast6_cbc_enc_tv_template),
2538				.dec = __VECS(cast6_cbc_dec_tv_template)
 
 
 
 
 
 
2539			}
2540		}
2541	}, {
2542		.alg = "cbc(des)",
2543		.test = alg_test_skcipher,
2544		.suite = {
2545			.cipher = {
2546				.enc = __VECS(des_cbc_enc_tv_template),
2547				.dec = __VECS(des_cbc_dec_tv_template)
 
 
 
 
 
 
2548			}
2549		}
2550	}, {
2551		.alg = "cbc(des3_ede)",
2552		.test = alg_test_skcipher,
2553		.fips_allowed = 1,
2554		.suite = {
2555			.cipher = {
2556				.enc = __VECS(des3_ede_cbc_enc_tv_template),
2557				.dec = __VECS(des3_ede_cbc_dec_tv_template)
 
 
 
 
 
 
2558			}
2559		}
2560	}, {
2561		.alg = "cbc(serpent)",
2562		.test = alg_test_skcipher,
2563		.suite = {
2564			.cipher = {
2565				.enc = __VECS(serpent_cbc_enc_tv_template),
2566				.dec = __VECS(serpent_cbc_dec_tv_template)
 
 
 
 
 
 
2567			}
2568		}
2569	}, {
2570		.alg = "cbc(twofish)",
2571		.test = alg_test_skcipher,
2572		.suite = {
2573			.cipher = {
2574				.enc = __VECS(tf_cbc_enc_tv_template),
2575				.dec = __VECS(tf_cbc_dec_tv_template)
 
 
 
 
 
 
2576			}
2577		}
2578	}, {
2579		.alg = "cbcmac(aes)",
2580		.fips_allowed = 1,
2581		.test = alg_test_hash,
2582		.suite = {
2583			.hash = __VECS(aes_cbcmac_tv_template)
2584		}
2585	}, {
2586		.alg = "ccm(aes)",
2587		.test = alg_test_aead,
2588		.fips_allowed = 1,
2589		.suite = {
2590			.aead = {
2591				.enc = __VECS(aes_ccm_enc_tv_template),
2592				.dec = __VECS(aes_ccm_dec_tv_template)
2593			}
2594		}
2595	}, {
2596		.alg = "chacha20",
2597		.test = alg_test_skcipher,
2598		.suite = {
2599			.cipher = {
2600				.enc = __VECS(chacha20_enc_tv_template),
2601				.dec = __VECS(chacha20_enc_tv_template),
2602			}
2603		}
2604	}, {
2605		.alg = "cmac(aes)",
2606		.fips_allowed = 1,
2607		.test = alg_test_hash,
2608		.suite = {
2609			.hash = __VECS(aes_cmac128_tv_template)
 
 
 
2610		}
2611	}, {
2612		.alg = "cmac(des3_ede)",
2613		.fips_allowed = 1,
2614		.test = alg_test_hash,
2615		.suite = {
2616			.hash = __VECS(des3_ede_cmac64_tv_template)
 
 
 
2617		}
2618	}, {
2619		.alg = "compress_null",
2620		.test = alg_test_null,
2621	}, {
2622		.alg = "crc32",
2623		.test = alg_test_hash,
2624		.suite = {
2625			.hash = __VECS(crc32_tv_template)
2626		}
2627	}, {
2628		.alg = "crc32c",
2629		.test = alg_test_crc32c,
2630		.fips_allowed = 1,
2631		.suite = {
2632			.hash = __VECS(crc32c_tv_template)
 
 
 
2633		}
2634	}, {
2635		.alg = "crct10dif",
2636		.test = alg_test_hash,
2637		.fips_allowed = 1,
2638		.suite = {
2639			.hash = __VECS(crct10dif_tv_template)
 
 
 
2640		}
2641	}, {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2642		.alg = "ctr(aes)",
2643		.test = alg_test_skcipher,
2644		.fips_allowed = 1,
2645		.suite = {
2646			.cipher = {
2647				.enc = __VECS(aes_ctr_enc_tv_template),
2648				.dec = __VECS(aes_ctr_dec_tv_template)
 
 
 
 
 
 
2649			}
2650		}
2651	}, {
2652		.alg = "ctr(blowfish)",
2653		.test = alg_test_skcipher,
2654		.suite = {
2655			.cipher = {
2656				.enc = __VECS(bf_ctr_enc_tv_template),
2657				.dec = __VECS(bf_ctr_dec_tv_template)
 
 
 
 
 
 
2658			}
2659		}
2660	}, {
2661		.alg = "ctr(camellia)",
2662		.test = alg_test_skcipher,
2663		.suite = {
2664			.cipher = {
2665				.enc = __VECS(camellia_ctr_enc_tv_template),
2666				.dec = __VECS(camellia_ctr_dec_tv_template)
 
 
 
 
 
 
2667			}
2668		}
2669	}, {
2670		.alg = "ctr(cast5)",
2671		.test = alg_test_skcipher,
2672		.suite = {
2673			.cipher = {
2674				.enc = __VECS(cast5_ctr_enc_tv_template),
2675				.dec = __VECS(cast5_ctr_dec_tv_template)
 
 
 
 
 
 
2676			}
2677		}
2678	}, {
2679		.alg = "ctr(cast6)",
2680		.test = alg_test_skcipher,
2681		.suite = {
2682			.cipher = {
2683				.enc = __VECS(cast6_ctr_enc_tv_template),
2684				.dec = __VECS(cast6_ctr_dec_tv_template)
 
 
 
 
 
 
2685			}
2686		}
2687	}, {
2688		.alg = "ctr(des)",
2689		.test = alg_test_skcipher,
2690		.suite = {
2691			.cipher = {
2692				.enc = __VECS(des_ctr_enc_tv_template),
2693				.dec = __VECS(des_ctr_dec_tv_template)
 
 
 
 
 
 
2694			}
2695		}
2696	}, {
2697		.alg = "ctr(des3_ede)",
2698		.test = alg_test_skcipher,
2699		.fips_allowed = 1,
2700		.suite = {
2701			.cipher = {
2702				.enc = __VECS(des3_ede_ctr_enc_tv_template),
2703				.dec = __VECS(des3_ede_ctr_dec_tv_template)
 
 
 
 
 
 
2704			}
2705		}
2706	}, {
2707		.alg = "ctr(serpent)",
2708		.test = alg_test_skcipher,
2709		.suite = {
2710			.cipher = {
2711				.enc = __VECS(serpent_ctr_enc_tv_template),
2712				.dec = __VECS(serpent_ctr_dec_tv_template)
 
 
 
 
 
 
2713			}
2714		}
2715	}, {
2716		.alg = "ctr(twofish)",
2717		.test = alg_test_skcipher,
2718		.suite = {
2719			.cipher = {
2720				.enc = __VECS(tf_ctr_enc_tv_template),
2721				.dec = __VECS(tf_ctr_dec_tv_template)
 
 
 
 
 
 
2722			}
2723		}
2724	}, {
2725		.alg = "cts(cbc(aes))",
2726		.test = alg_test_skcipher,
2727		.suite = {
2728			.cipher = {
2729				.enc = __VECS(cts_mode_enc_tv_template),
2730				.dec = __VECS(cts_mode_dec_tv_template)
 
 
 
 
 
 
2731			}
2732		}
2733	}, {
2734		.alg = "deflate",
2735		.test = alg_test_comp,
2736		.fips_allowed = 1,
2737		.suite = {
2738			.comp = {
2739				.comp = __VECS(deflate_comp_tv_template),
2740				.decomp = __VECS(deflate_decomp_tv_template)
 
 
 
 
 
 
2741			}
2742		}
2743	}, {
2744		.alg = "dh",
2745		.test = alg_test_kpp,
2746		.fips_allowed = 1,
2747		.suite = {
2748			.kpp = __VECS(dh_tv_template)
2749		}
2750	}, {
2751		.alg = "digest_null",
2752		.test = alg_test_null,
2753	}, {
2754		.alg = "drbg_nopr_ctr_aes128",
2755		.test = alg_test_drbg,
2756		.fips_allowed = 1,
2757		.suite = {
2758			.drbg = __VECS(drbg_nopr_ctr_aes128_tv_template)
2759		}
2760	}, {
2761		.alg = "drbg_nopr_ctr_aes192",
2762		.test = alg_test_drbg,
2763		.fips_allowed = 1,
2764		.suite = {
2765			.drbg = __VECS(drbg_nopr_ctr_aes192_tv_template)
2766		}
2767	}, {
2768		.alg = "drbg_nopr_ctr_aes256",
2769		.test = alg_test_drbg,
2770		.fips_allowed = 1,
2771		.suite = {
2772			.drbg = __VECS(drbg_nopr_ctr_aes256_tv_template)
2773		}
2774	}, {
2775		/*
2776		 * There is no need to specifically test the DRBG with every
2777		 * backend cipher -- covered by drbg_nopr_hmac_sha256 test
2778		 */
2779		.alg = "drbg_nopr_hmac_sha1",
2780		.fips_allowed = 1,
2781		.test = alg_test_null,
2782	}, {
2783		.alg = "drbg_nopr_hmac_sha256",
2784		.test = alg_test_drbg,
2785		.fips_allowed = 1,
2786		.suite = {
2787			.drbg = __VECS(drbg_nopr_hmac_sha256_tv_template)
2788		}
2789	}, {
2790		/* covered by drbg_nopr_hmac_sha256 test */
2791		.alg = "drbg_nopr_hmac_sha384",
2792		.fips_allowed = 1,
2793		.test = alg_test_null,
2794	}, {
2795		.alg = "drbg_nopr_hmac_sha512",
2796		.test = alg_test_null,
2797		.fips_allowed = 1,
2798	}, {
2799		.alg = "drbg_nopr_sha1",
2800		.fips_allowed = 1,
2801		.test = alg_test_null,
2802	}, {
2803		.alg = "drbg_nopr_sha256",
2804		.test = alg_test_drbg,
2805		.fips_allowed = 1,
2806		.suite = {
2807			.drbg = __VECS(drbg_nopr_sha256_tv_template)
2808		}
2809	}, {
2810		/* covered by drbg_nopr_sha256 test */
2811		.alg = "drbg_nopr_sha384",
2812		.fips_allowed = 1,
2813		.test = alg_test_null,
2814	}, {
2815		.alg = "drbg_nopr_sha512",
2816		.fips_allowed = 1,
2817		.test = alg_test_null,
2818	}, {
2819		.alg = "drbg_pr_ctr_aes128",
2820		.test = alg_test_drbg,
2821		.fips_allowed = 1,
2822		.suite = {
2823			.drbg = __VECS(drbg_pr_ctr_aes128_tv_template)
2824		}
2825	}, {
2826		/* covered by drbg_pr_ctr_aes128 test */
2827		.alg = "drbg_pr_ctr_aes192",
2828		.fips_allowed = 1,
2829		.test = alg_test_null,
2830	}, {
2831		.alg = "drbg_pr_ctr_aes256",
2832		.fips_allowed = 1,
2833		.test = alg_test_null,
2834	}, {
2835		.alg = "drbg_pr_hmac_sha1",
2836		.fips_allowed = 1,
2837		.test = alg_test_null,
2838	}, {
2839		.alg = "drbg_pr_hmac_sha256",
2840		.test = alg_test_drbg,
2841		.fips_allowed = 1,
2842		.suite = {
2843			.drbg = __VECS(drbg_pr_hmac_sha256_tv_template)
2844		}
2845	}, {
2846		/* covered by drbg_pr_hmac_sha256 test */
2847		.alg = "drbg_pr_hmac_sha384",
2848		.fips_allowed = 1,
2849		.test = alg_test_null,
2850	}, {
2851		.alg = "drbg_pr_hmac_sha512",
2852		.test = alg_test_null,
2853		.fips_allowed = 1,
2854	}, {
2855		.alg = "drbg_pr_sha1",
2856		.fips_allowed = 1,
2857		.test = alg_test_null,
2858	}, {
2859		.alg = "drbg_pr_sha256",
2860		.test = alg_test_drbg,
2861		.fips_allowed = 1,
2862		.suite = {
2863			.drbg = __VECS(drbg_pr_sha256_tv_template)
2864		}
2865	}, {
2866		/* covered by drbg_pr_sha256 test */
2867		.alg = "drbg_pr_sha384",
2868		.fips_allowed = 1,
2869		.test = alg_test_null,
2870	}, {
2871		.alg = "drbg_pr_sha512",
2872		.fips_allowed = 1,
2873		.test = alg_test_null,
2874	}, {
2875		.alg = "ecb(aes)",
2876		.test = alg_test_skcipher,
2877		.fips_allowed = 1,
2878		.suite = {
2879			.cipher = {
2880				.enc = __VECS(aes_enc_tv_template),
2881				.dec = __VECS(aes_dec_tv_template)
 
 
 
 
 
 
2882			}
2883		}
2884	}, {
2885		.alg = "ecb(anubis)",
2886		.test = alg_test_skcipher,
2887		.suite = {
2888			.cipher = {
2889				.enc = __VECS(anubis_enc_tv_template),
2890				.dec = __VECS(anubis_dec_tv_template)
 
 
 
 
 
 
2891			}
2892		}
2893	}, {
2894		.alg = "ecb(arc4)",
2895		.test = alg_test_skcipher,
2896		.suite = {
2897			.cipher = {
2898				.enc = __VECS(arc4_enc_tv_template),
2899				.dec = __VECS(arc4_dec_tv_template)
 
 
 
 
 
 
2900			}
2901		}
2902	}, {
2903		.alg = "ecb(blowfish)",
2904		.test = alg_test_skcipher,
2905		.suite = {
2906			.cipher = {
2907				.enc = __VECS(bf_enc_tv_template),
2908				.dec = __VECS(bf_dec_tv_template)
 
 
 
 
 
 
2909			}
2910		}
2911	}, {
2912		.alg = "ecb(camellia)",
2913		.test = alg_test_skcipher,
2914		.suite = {
2915			.cipher = {
2916				.enc = __VECS(camellia_enc_tv_template),
2917				.dec = __VECS(camellia_dec_tv_template)
 
 
 
 
 
 
2918			}
2919		}
2920	}, {
2921		.alg = "ecb(cast5)",
2922		.test = alg_test_skcipher,
2923		.suite = {
2924			.cipher = {
2925				.enc = __VECS(cast5_enc_tv_template),
2926				.dec = __VECS(cast5_dec_tv_template)
 
 
 
 
 
 
2927			}
2928		}
2929	}, {
2930		.alg = "ecb(cast6)",
2931		.test = alg_test_skcipher,
2932		.suite = {
2933			.cipher = {
2934				.enc = __VECS(cast6_enc_tv_template),
2935				.dec = __VECS(cast6_dec_tv_template)
 
 
 
 
 
 
2936			}
2937		}
2938	}, {
2939		.alg = "ecb(cipher_null)",
2940		.test = alg_test_null,
2941		.fips_allowed = 1,
2942	}, {
2943		.alg = "ecb(des)",
2944		.test = alg_test_skcipher,
 
2945		.suite = {
2946			.cipher = {
2947				.enc = __VECS(des_enc_tv_template),
2948				.dec = __VECS(des_dec_tv_template)
 
 
 
 
 
 
2949			}
2950		}
2951	}, {
2952		.alg = "ecb(des3_ede)",
2953		.test = alg_test_skcipher,
2954		.fips_allowed = 1,
2955		.suite = {
2956			.cipher = {
2957				.enc = __VECS(des3_ede_enc_tv_template),
2958				.dec = __VECS(des3_ede_dec_tv_template)
 
 
 
 
 
 
2959			}
2960		}
2961	}, {
2962		.alg = "ecb(fcrypt)",
2963		.test = alg_test_skcipher,
2964		.suite = {
2965			.cipher = {
2966				.enc = {
2967					.vecs = fcrypt_pcbc_enc_tv_template,
2968					.count = 1
2969				},
2970				.dec = {
2971					.vecs = fcrypt_pcbc_dec_tv_template,
2972					.count = 1
2973				}
2974			}
2975		}
2976	}, {
2977		.alg = "ecb(khazad)",
2978		.test = alg_test_skcipher,
2979		.suite = {
2980			.cipher = {
2981				.enc = __VECS(khazad_enc_tv_template),
2982				.dec = __VECS(khazad_dec_tv_template)
 
 
 
 
 
 
2983			}
2984		}
2985	}, {
2986		.alg = "ecb(seed)",
2987		.test = alg_test_skcipher,
2988		.suite = {
2989			.cipher = {
2990				.enc = __VECS(seed_enc_tv_template),
2991				.dec = __VECS(seed_dec_tv_template)
 
 
 
 
 
 
2992			}
2993		}
2994	}, {
2995		.alg = "ecb(serpent)",
2996		.test = alg_test_skcipher,
2997		.suite = {
2998			.cipher = {
2999				.enc = __VECS(serpent_enc_tv_template),
3000				.dec = __VECS(serpent_dec_tv_template)
3001			}
3002		}
3003	}, {
3004		.alg = "ecb(sm4)",
3005		.test = alg_test_skcipher,
3006		.suite = {
3007			.cipher = {
3008				.enc = __VECS(sm4_enc_tv_template),
3009				.dec = __VECS(sm4_dec_tv_template)
3010			}
3011		}
3012	}, {
3013		.alg = "ecb(speck128)",
3014		.test = alg_test_skcipher,
3015		.suite = {
3016			.cipher = {
3017				.enc = __VECS(speck128_enc_tv_template),
3018				.dec = __VECS(speck128_dec_tv_template)
3019			}
3020		}
3021	}, {
3022		.alg = "ecb(speck64)",
3023		.test = alg_test_skcipher,
3024		.suite = {
3025			.cipher = {
3026				.enc = __VECS(speck64_enc_tv_template),
3027				.dec = __VECS(speck64_dec_tv_template)
3028			}
3029		}
3030	}, {
3031		.alg = "ecb(tea)",
3032		.test = alg_test_skcipher,
3033		.suite = {
3034			.cipher = {
3035				.enc = __VECS(tea_enc_tv_template),
3036				.dec = __VECS(tea_dec_tv_template)
 
 
 
 
 
 
3037			}
3038		}
3039	}, {
3040		.alg = "ecb(tnepres)",
3041		.test = alg_test_skcipher,
3042		.suite = {
3043			.cipher = {
3044				.enc = __VECS(tnepres_enc_tv_template),
3045				.dec = __VECS(tnepres_dec_tv_template)
 
 
 
 
 
 
3046			}
3047		}
3048	}, {
3049		.alg = "ecb(twofish)",
3050		.test = alg_test_skcipher,
3051		.suite = {
3052			.cipher = {
3053				.enc = __VECS(tf_enc_tv_template),
3054				.dec = __VECS(tf_dec_tv_template)
 
 
 
 
 
 
3055			}
3056		}
3057	}, {
3058		.alg = "ecb(xeta)",
3059		.test = alg_test_skcipher,
3060		.suite = {
3061			.cipher = {
3062				.enc = __VECS(xeta_enc_tv_template),
3063				.dec = __VECS(xeta_dec_tv_template)
 
 
 
 
 
 
3064			}
3065		}
3066	}, {
3067		.alg = "ecb(xtea)",
3068		.test = alg_test_skcipher,
3069		.suite = {
3070			.cipher = {
3071				.enc = __VECS(xtea_enc_tv_template),
3072				.dec = __VECS(xtea_dec_tv_template)
 
 
 
 
 
 
3073			}
3074		}
3075	}, {
3076		.alg = "ecdh",
3077		.test = alg_test_kpp,
3078		.fips_allowed = 1,
3079		.suite = {
3080			.kpp = __VECS(ecdh_tv_template)
3081		}
3082	}, {
3083		.alg = "gcm(aes)",
3084		.test = alg_test_aead,
3085		.fips_allowed = 1,
3086		.suite = {
3087			.aead = {
3088				.enc = __VECS(aes_gcm_enc_tv_template),
3089				.dec = __VECS(aes_gcm_dec_tv_template)
 
 
 
 
 
 
3090			}
3091		}
3092	}, {
3093		.alg = "ghash",
3094		.test = alg_test_hash,
3095		.fips_allowed = 1,
3096		.suite = {
3097			.hash = __VECS(ghash_tv_template)
 
 
 
3098		}
3099	}, {
3100		.alg = "hmac(crc32)",
3101		.test = alg_test_hash,
3102		.suite = {
3103			.hash = __VECS(bfin_crc_tv_template)
 
 
 
3104		}
3105	}, {
3106		.alg = "hmac(md5)",
3107		.test = alg_test_hash,
3108		.suite = {
3109			.hash = __VECS(hmac_md5_tv_template)
 
 
 
3110		}
3111	}, {
3112		.alg = "hmac(rmd128)",
3113		.test = alg_test_hash,
3114		.suite = {
3115			.hash = __VECS(hmac_rmd128_tv_template)
 
 
 
3116		}
3117	}, {
3118		.alg = "hmac(rmd160)",
3119		.test = alg_test_hash,
3120		.suite = {
3121			.hash = __VECS(hmac_rmd160_tv_template)
 
 
 
3122		}
3123	}, {
3124		.alg = "hmac(sha1)",
3125		.test = alg_test_hash,
3126		.fips_allowed = 1,
3127		.suite = {
3128			.hash = __VECS(hmac_sha1_tv_template)
 
 
 
3129		}
3130	}, {
3131		.alg = "hmac(sha224)",
3132		.test = alg_test_hash,
3133		.fips_allowed = 1,
3134		.suite = {
3135			.hash = __VECS(hmac_sha224_tv_template)
 
 
 
3136		}
3137	}, {
3138		.alg = "hmac(sha256)",
3139		.test = alg_test_hash,
3140		.fips_allowed = 1,
3141		.suite = {
3142			.hash = __VECS(hmac_sha256_tv_template)
3143		}
3144	}, {
3145		.alg = "hmac(sha3-224)",
3146		.test = alg_test_hash,
3147		.fips_allowed = 1,
3148		.suite = {
3149			.hash = __VECS(hmac_sha3_224_tv_template)
3150		}
3151	}, {
3152		.alg = "hmac(sha3-256)",
3153		.test = alg_test_hash,
3154		.fips_allowed = 1,
3155		.suite = {
3156			.hash = __VECS(hmac_sha3_256_tv_template)
3157		}
3158	}, {
3159		.alg = "hmac(sha3-384)",
3160		.test = alg_test_hash,
3161		.fips_allowed = 1,
3162		.suite = {
3163			.hash = __VECS(hmac_sha3_384_tv_template)
3164		}
3165	}, {
3166		.alg = "hmac(sha3-512)",
3167		.test = alg_test_hash,
3168		.fips_allowed = 1,
3169		.suite = {
3170			.hash = __VECS(hmac_sha3_512_tv_template)
3171		}
3172	}, {
3173		.alg = "hmac(sha384)",
3174		.test = alg_test_hash,
3175		.fips_allowed = 1,
3176		.suite = {
3177			.hash = __VECS(hmac_sha384_tv_template)
 
 
 
3178		}
3179	}, {
3180		.alg = "hmac(sha512)",
3181		.test = alg_test_hash,
3182		.fips_allowed = 1,
3183		.suite = {
3184			.hash = __VECS(hmac_sha512_tv_template)
3185		}
3186	}, {
3187		.alg = "jitterentropy_rng",
3188		.fips_allowed = 1,
3189		.test = alg_test_null,
3190	}, {
3191		.alg = "kw(aes)",
3192		.test = alg_test_skcipher,
3193		.fips_allowed = 1,
3194		.suite = {
3195			.cipher = {
3196				.enc = __VECS(aes_kw_enc_tv_template),
3197				.dec = __VECS(aes_kw_dec_tv_template)
3198			}
3199		}
3200	}, {
3201		.alg = "lrw(aes)",
3202		.test = alg_test_skcipher,
3203		.suite = {
3204			.cipher = {
3205				.enc = __VECS(aes_lrw_enc_tv_template),
3206				.dec = __VECS(aes_lrw_dec_tv_template)
 
 
 
 
 
 
3207			}
3208		}
3209	}, {
3210		.alg = "lrw(camellia)",
3211		.test = alg_test_skcipher,
3212		.suite = {
3213			.cipher = {
3214				.enc = __VECS(camellia_lrw_enc_tv_template),
3215				.dec = __VECS(camellia_lrw_dec_tv_template)
 
 
 
 
 
 
3216			}
3217		}
3218	}, {
3219		.alg = "lrw(cast6)",
3220		.test = alg_test_skcipher,
3221		.suite = {
3222			.cipher = {
3223				.enc = __VECS(cast6_lrw_enc_tv_template),
3224				.dec = __VECS(cast6_lrw_dec_tv_template)
 
 
 
 
 
 
3225			}
3226		}
3227	}, {
3228		.alg = "lrw(serpent)",
3229		.test = alg_test_skcipher,
3230		.suite = {
3231			.cipher = {
3232				.enc = __VECS(serpent_lrw_enc_tv_template),
3233				.dec = __VECS(serpent_lrw_dec_tv_template)
 
 
 
 
 
 
3234			}
3235		}
3236	}, {
3237		.alg = "lrw(twofish)",
3238		.test = alg_test_skcipher,
3239		.suite = {
3240			.cipher = {
3241				.enc = __VECS(tf_lrw_enc_tv_template),
3242				.dec = __VECS(tf_lrw_dec_tv_template)
3243			}
3244		}
3245	}, {
3246		.alg = "lz4",
3247		.test = alg_test_comp,
3248		.fips_allowed = 1,
3249		.suite = {
3250			.comp = {
3251				.comp = __VECS(lz4_comp_tv_template),
3252				.decomp = __VECS(lz4_decomp_tv_template)
3253			}
3254		}
3255	}, {
3256		.alg = "lz4hc",
3257		.test = alg_test_comp,
3258		.fips_allowed = 1,
3259		.suite = {
3260			.comp = {
3261				.comp = __VECS(lz4hc_comp_tv_template),
3262				.decomp = __VECS(lz4hc_decomp_tv_template)
3263			}
3264		}
3265	}, {
3266		.alg = "lzo",
3267		.test = alg_test_comp,
3268		.fips_allowed = 1,
3269		.suite = {
3270			.comp = {
3271				.comp = __VECS(lzo_comp_tv_template),
3272				.decomp = __VECS(lzo_decomp_tv_template)
 
 
 
 
 
 
3273			}
3274		}
3275	}, {
3276		.alg = "md4",
3277		.test = alg_test_hash,
3278		.suite = {
3279			.hash = __VECS(md4_tv_template)
 
 
 
3280		}
3281	}, {
3282		.alg = "md5",
3283		.test = alg_test_hash,
3284		.suite = {
3285			.hash = __VECS(md5_tv_template)
 
 
 
3286		}
3287	}, {
3288		.alg = "michael_mic",
3289		.test = alg_test_hash,
3290		.suite = {
3291			.hash = __VECS(michael_mic_tv_template)
 
 
 
3292		}
3293	}, {
3294		.alg = "ofb(aes)",
3295		.test = alg_test_skcipher,
3296		.fips_allowed = 1,
3297		.suite = {
3298			.cipher = {
3299				.enc = __VECS(aes_ofb_enc_tv_template),
3300				.dec = __VECS(aes_ofb_dec_tv_template)
 
 
 
 
 
 
3301			}
3302		}
3303	}, {
3304		.alg = "pcbc(fcrypt)",
3305		.test = alg_test_skcipher,
3306		.suite = {
3307			.cipher = {
3308				.enc = __VECS(fcrypt_pcbc_enc_tv_template),
3309				.dec = __VECS(fcrypt_pcbc_dec_tv_template)
 
 
 
 
 
 
3310			}
3311		}
3312	}, {
3313		.alg = "pkcs1pad(rsa,sha224)",
3314		.test = alg_test_null,
3315		.fips_allowed = 1,
3316	}, {
3317		.alg = "pkcs1pad(rsa,sha256)",
3318		.test = alg_test_akcipher,
3319		.fips_allowed = 1,
3320		.suite = {
3321			.akcipher = __VECS(pkcs1pad_rsa_tv_template)
3322		}
3323	}, {
3324		.alg = "pkcs1pad(rsa,sha384)",
3325		.test = alg_test_null,
3326		.fips_allowed = 1,
3327	}, {
3328		.alg = "pkcs1pad(rsa,sha512)",
3329		.test = alg_test_null,
3330		.fips_allowed = 1,
3331	}, {
3332		.alg = "poly1305",
3333		.test = alg_test_hash,
3334		.suite = {
3335			.hash = __VECS(poly1305_tv_template)
3336		}
3337	}, {
3338		.alg = "rfc3686(ctr(aes))",
3339		.test = alg_test_skcipher,
3340		.fips_allowed = 1,
3341		.suite = {
3342			.cipher = {
3343				.enc = __VECS(aes_ctr_rfc3686_enc_tv_template),
3344				.dec = __VECS(aes_ctr_rfc3686_dec_tv_template)
 
 
 
 
 
 
3345			}
3346		}
3347	}, {
3348		.alg = "rfc4106(gcm(aes))",
3349		.test = alg_test_aead,
3350		.fips_allowed = 1,
3351		.suite = {
3352			.aead = {
3353				.enc = __VECS(aes_gcm_rfc4106_enc_tv_template),
3354				.dec = __VECS(aes_gcm_rfc4106_dec_tv_template)
 
 
 
 
 
 
3355			}
3356		}
3357	}, {
3358		.alg = "rfc4309(ccm(aes))",
3359		.test = alg_test_aead,
3360		.fips_allowed = 1,
3361		.suite = {
3362			.aead = {
3363				.enc = __VECS(aes_ccm_rfc4309_enc_tv_template),
3364				.dec = __VECS(aes_ccm_rfc4309_dec_tv_template)
 
 
 
 
 
 
3365			}
3366		}
3367	}, {
3368		.alg = "rfc4543(gcm(aes))",
3369		.test = alg_test_aead,
3370		.suite = {
3371			.aead = {
3372				.enc = __VECS(aes_gcm_rfc4543_enc_tv_template),
3373				.dec = __VECS(aes_gcm_rfc4543_dec_tv_template),
3374			}
3375		}
3376	}, {
3377		.alg = "rfc7539(chacha20,poly1305)",
3378		.test = alg_test_aead,
3379		.suite = {
3380			.aead = {
3381				.enc = __VECS(rfc7539_enc_tv_template),
3382				.dec = __VECS(rfc7539_dec_tv_template),
3383			}
3384		}
3385	}, {
3386		.alg = "rfc7539esp(chacha20,poly1305)",
3387		.test = alg_test_aead,
3388		.suite = {
3389			.aead = {
3390				.enc = __VECS(rfc7539esp_enc_tv_template),
3391				.dec = __VECS(rfc7539esp_dec_tv_template),
3392			}
3393		}
3394	}, {
3395		.alg = "rmd128",
3396		.test = alg_test_hash,
3397		.suite = {
3398			.hash = __VECS(rmd128_tv_template)
 
 
 
3399		}
3400	}, {
3401		.alg = "rmd160",
3402		.test = alg_test_hash,
3403		.suite = {
3404			.hash = __VECS(rmd160_tv_template)
 
 
 
3405		}
3406	}, {
3407		.alg = "rmd256",
3408		.test = alg_test_hash,
3409		.suite = {
3410			.hash = __VECS(rmd256_tv_template)
 
 
 
3411		}
3412	}, {
3413		.alg = "rmd320",
3414		.test = alg_test_hash,
3415		.suite = {
3416			.hash = __VECS(rmd320_tv_template)
3417		}
3418	}, {
3419		.alg = "rsa",
3420		.test = alg_test_akcipher,
3421		.fips_allowed = 1,
3422		.suite = {
3423			.akcipher = __VECS(rsa_tv_template)
3424		}
3425	}, {
3426		.alg = "salsa20",
3427		.test = alg_test_skcipher,
3428		.suite = {
3429			.cipher = {
3430				.enc = __VECS(salsa20_stream_enc_tv_template)
 
 
 
3431			}
3432		}
3433	}, {
3434		.alg = "sha1",
3435		.test = alg_test_hash,
3436		.fips_allowed = 1,
3437		.suite = {
3438			.hash = __VECS(sha1_tv_template)
 
 
 
3439		}
3440	}, {
3441		.alg = "sha224",
3442		.test = alg_test_hash,
3443		.fips_allowed = 1,
3444		.suite = {
3445			.hash = __VECS(sha224_tv_template)
 
 
 
3446		}
3447	}, {
3448		.alg = "sha256",
3449		.test = alg_test_hash,
3450		.fips_allowed = 1,
3451		.suite = {
3452			.hash = __VECS(sha256_tv_template)
3453		}
3454	}, {
3455		.alg = "sha3-224",
3456		.test = alg_test_hash,
3457		.fips_allowed = 1,
3458		.suite = {
3459			.hash = __VECS(sha3_224_tv_template)
3460		}
3461	}, {
3462		.alg = "sha3-256",
3463		.test = alg_test_hash,
3464		.fips_allowed = 1,
3465		.suite = {
3466			.hash = __VECS(sha3_256_tv_template)
3467		}
3468	}, {
3469		.alg = "sha3-384",
3470		.test = alg_test_hash,
3471		.fips_allowed = 1,
3472		.suite = {
3473			.hash = __VECS(sha3_384_tv_template)
3474		}
3475	}, {
3476		.alg = "sha3-512",
3477		.test = alg_test_hash,
3478		.fips_allowed = 1,
3479		.suite = {
3480			.hash = __VECS(sha3_512_tv_template)
3481		}
3482	}, {
3483		.alg = "sha384",
3484		.test = alg_test_hash,
3485		.fips_allowed = 1,
3486		.suite = {
3487			.hash = __VECS(sha384_tv_template)
 
 
 
3488		}
3489	}, {
3490		.alg = "sha512",
3491		.test = alg_test_hash,
3492		.fips_allowed = 1,
3493		.suite = {
3494			.hash = __VECS(sha512_tv_template)
3495		}
3496	}, {
3497		.alg = "sm3",
3498		.test = alg_test_hash,
3499		.suite = {
3500			.hash = __VECS(sm3_tv_template)
3501		}
3502	}, {
3503		.alg = "tgr128",
3504		.test = alg_test_hash,
3505		.suite = {
3506			.hash = __VECS(tgr128_tv_template)
 
 
 
3507		}
3508	}, {
3509		.alg = "tgr160",
3510		.test = alg_test_hash,
3511		.suite = {
3512			.hash = __VECS(tgr160_tv_template)
 
 
 
3513		}
3514	}, {
3515		.alg = "tgr192",
3516		.test = alg_test_hash,
3517		.suite = {
3518			.hash = __VECS(tgr192_tv_template)
 
 
 
3519		}
3520	}, {
3521		.alg = "vmac(aes)",
3522		.test = alg_test_hash,
3523		.suite = {
3524			.hash = __VECS(aes_vmac128_tv_template)
 
 
 
3525		}
3526	}, {
3527		.alg = "wp256",
3528		.test = alg_test_hash,
3529		.suite = {
3530			.hash = __VECS(wp256_tv_template)
 
 
 
3531		}
3532	}, {
3533		.alg = "wp384",
3534		.test = alg_test_hash,
3535		.suite = {
3536			.hash = __VECS(wp384_tv_template)
 
 
 
3537		}
3538	}, {
3539		.alg = "wp512",
3540		.test = alg_test_hash,
3541		.suite = {
3542			.hash = __VECS(wp512_tv_template)
 
 
 
3543		}
3544	}, {
3545		.alg = "xcbc(aes)",
3546		.test = alg_test_hash,
3547		.suite = {
3548			.hash = __VECS(aes_xcbc128_tv_template)
 
 
 
3549		}
3550	}, {
3551		.alg = "xts(aes)",
3552		.test = alg_test_skcipher,
3553		.fips_allowed = 1,
3554		.suite = {
3555			.cipher = {
3556				.enc = __VECS(aes_xts_enc_tv_template),
3557				.dec = __VECS(aes_xts_dec_tv_template)
 
 
 
 
 
 
3558			}
3559		}
3560	}, {
3561		.alg = "xts(camellia)",
3562		.test = alg_test_skcipher,
3563		.suite = {
3564			.cipher = {
3565				.enc = __VECS(camellia_xts_enc_tv_template),
3566				.dec = __VECS(camellia_xts_dec_tv_template)
 
 
 
 
 
 
3567			}
3568		}
3569	}, {
3570		.alg = "xts(cast6)",
3571		.test = alg_test_skcipher,
3572		.suite = {
3573			.cipher = {
3574				.enc = __VECS(cast6_xts_enc_tv_template),
3575				.dec = __VECS(cast6_xts_dec_tv_template)
 
 
 
 
 
 
3576			}
3577		}
3578	}, {
3579		.alg = "xts(serpent)",
3580		.test = alg_test_skcipher,
3581		.suite = {
3582			.cipher = {
3583				.enc = __VECS(serpent_xts_enc_tv_template),
3584				.dec = __VECS(serpent_xts_dec_tv_template)
3585			}
3586		}
3587	}, {
3588		.alg = "xts(speck128)",
3589		.test = alg_test_skcipher,
3590		.suite = {
3591			.cipher = {
3592				.enc = __VECS(speck128_xts_enc_tv_template),
3593				.dec = __VECS(speck128_xts_dec_tv_template)
3594			}
3595		}
3596	}, {
3597		.alg = "xts(speck64)",
3598		.test = alg_test_skcipher,
3599		.suite = {
3600			.cipher = {
3601				.enc = __VECS(speck64_xts_enc_tv_template),
3602				.dec = __VECS(speck64_xts_dec_tv_template)
3603			}
3604		}
3605	}, {
3606		.alg = "xts(twofish)",
3607		.test = alg_test_skcipher,
3608		.suite = {
3609			.cipher = {
3610				.enc = __VECS(tf_xts_enc_tv_template),
3611				.dec = __VECS(tf_xts_dec_tv_template)
 
 
 
 
 
 
3612			}
3613		}
3614	}, {
3615		.alg = "zlib-deflate",
3616		.test = alg_test_comp,
3617		.fips_allowed = 1,
3618		.suite = {
3619			.comp = {
3620				.comp = __VECS(zlib_deflate_comp_tv_template),
3621				.decomp = __VECS(zlib_deflate_decomp_tv_template)
 
 
 
 
 
 
3622			}
3623		}
3624	}
3625};
3626
3627static bool alg_test_descs_checked;
3628
3629static void alg_test_descs_check_order(void)
3630{
3631	int i;
3632
3633	/* only check once */
3634	if (alg_test_descs_checked)
3635		return;
3636
3637	alg_test_descs_checked = true;
3638
3639	for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
3640		int diff = strcmp(alg_test_descs[i - 1].alg,
3641				  alg_test_descs[i].alg);
3642
3643		if (WARN_ON(diff > 0)) {
3644			pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
3645				alg_test_descs[i - 1].alg,
3646				alg_test_descs[i].alg);
3647		}
3648
3649		if (WARN_ON(diff == 0)) {
3650			pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
3651				alg_test_descs[i].alg);
3652		}
3653	}
3654}
3655
3656static int alg_find_test(const char *alg)
3657{
3658	int start = 0;
3659	int end = ARRAY_SIZE(alg_test_descs);
3660
3661	while (start < end) {
3662		int i = (start + end) / 2;
3663		int diff = strcmp(alg_test_descs[i].alg, alg);
3664
3665		if (diff > 0) {
3666			end = i;
3667			continue;
3668		}
3669
3670		if (diff < 0) {
3671			start = i + 1;
3672			continue;
3673		}
3674
3675		return i;
3676	}
3677
3678	return -1;
3679}
3680
3681int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
3682{
3683	int i;
3684	int j;
3685	int rc;
3686
3687	if (!fips_enabled && notests) {
3688		printk_once(KERN_INFO "alg: self-tests disabled\n");
3689		return 0;
3690	}
3691
3692	alg_test_descs_check_order();
3693
3694	if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
3695		char nalg[CRYPTO_MAX_ALG_NAME];
3696
3697		if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
3698		    sizeof(nalg))
3699			return -ENAMETOOLONG;
3700
3701		i = alg_find_test(nalg);
3702		if (i < 0)
3703			goto notest;
3704
3705		if (fips_enabled && !alg_test_descs[i].fips_allowed)
3706			goto non_fips_alg;
3707
3708		rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
3709		goto test_done;
3710	}
3711
3712	i = alg_find_test(alg);
3713	j = alg_find_test(driver);
3714	if (i < 0 && j < 0)
3715		goto notest;
3716
3717	if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
3718			     (j >= 0 && !alg_test_descs[j].fips_allowed)))
3719		goto non_fips_alg;
3720
3721	rc = 0;
3722	if (i >= 0)
3723		rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
3724					     type, mask);
3725	if (j >= 0 && j != i)
3726		rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
3727					     type, mask);
3728
3729test_done:
3730	if (fips_enabled && rc)
3731		panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
3732
3733	if (fips_enabled && !rc)
3734		pr_info("alg: self-tests for %s (%s) passed\n", driver, alg);
 
3735
3736	return rc;
3737
3738notest:
3739	printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
3740	return 0;
3741non_fips_alg:
3742	return -EINVAL;
3743}
3744
3745#endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
3746
3747EXPORT_SYMBOL_GPL(alg_test);
v3.15
   1/*
   2 * Algorithm testing framework and tests.
   3 *
   4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
   5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
   6 * Copyright (c) 2007 Nokia Siemens Networks
   7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
   8 *
   9 * Updated RFC4106 AES-GCM testing.
  10 *    Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
  11 *             Adrian Hoban <adrian.hoban@intel.com>
  12 *             Gabriele Paoloni <gabriele.paoloni@intel.com>
  13 *             Tadeusz Struk (tadeusz.struk@intel.com)
  14 *    Copyright (c) 2010, Intel Corporation.
  15 *
  16 * This program is free software; you can redistribute it and/or modify it
  17 * under the terms of the GNU General Public License as published by the Free
  18 * Software Foundation; either version 2 of the License, or (at your option)
  19 * any later version.
  20 *
  21 */
  22
 
  23#include <crypto/hash.h>
 
  24#include <linux/err.h>
 
  25#include <linux/module.h>
  26#include <linux/scatterlist.h>
  27#include <linux/slab.h>
  28#include <linux/string.h>
  29#include <crypto/rng.h>
 
 
 
 
  30
  31#include "internal.h"
  32
 
 
 
 
  33#ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
  34
  35/* a perfect nop */
  36int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
  37{
  38	return 0;
  39}
  40
  41#else
  42
  43#include "testmgr.h"
  44
  45/*
  46 * Need slab memory for testing (size in number of pages).
  47 */
  48#define XBUFSIZE	8
  49
  50/*
  51 * Indexes into the xbuf to simulate cross-page access.
  52 */
  53#define IDX1		32
  54#define IDX2		32400
  55#define IDX3		1
  56#define IDX4		8193
  57#define IDX5		22222
  58#define IDX6		17101
  59#define IDX7		27333
  60#define IDX8		3000
  61
  62/*
  63* Used by test_cipher()
  64*/
  65#define ENCRYPT 1
  66#define DECRYPT 0
  67
  68struct tcrypt_result {
  69	struct completion completion;
  70	int err;
  71};
  72
  73struct aead_test_suite {
  74	struct {
  75		struct aead_testvec *vecs;
  76		unsigned int count;
  77	} enc, dec;
  78};
  79
  80struct cipher_test_suite {
  81	struct {
  82		struct cipher_testvec *vecs;
  83		unsigned int count;
  84	} enc, dec;
  85};
  86
  87struct comp_test_suite {
  88	struct {
  89		struct comp_testvec *vecs;
  90		unsigned int count;
  91	} comp, decomp;
  92};
  93
  94struct pcomp_test_suite {
  95	struct {
  96		struct pcomp_testvec *vecs;
  97		unsigned int count;
  98	} comp, decomp;
 
 
 
 
 
 
 
 
  99};
 100
 101struct hash_test_suite {
 102	struct hash_testvec *vecs;
 103	unsigned int count;
 104};
 105
 106struct cprng_test_suite {
 107	struct cprng_testvec *vecs;
 108	unsigned int count;
 109};
 110
 111struct alg_test_desc {
 112	const char *alg;
 113	int (*test)(const struct alg_test_desc *desc, const char *driver,
 114		    u32 type, u32 mask);
 115	int fips_allowed;	/* set if alg is allowed in fips mode */
 116
 117	union {
 118		struct aead_test_suite aead;
 119		struct cipher_test_suite cipher;
 120		struct comp_test_suite comp;
 121		struct pcomp_test_suite pcomp;
 122		struct hash_test_suite hash;
 123		struct cprng_test_suite cprng;
 
 
 
 124	} suite;
 125};
 126
 127static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
 
 128
 129static void hexdump(unsigned char *buf, unsigned int len)
 130{
 131	print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
 132			16, 1,
 133			buf, len, false);
 134}
 135
 136static void tcrypt_complete(struct crypto_async_request *req, int err)
 137{
 138	struct tcrypt_result *res = req->data;
 139
 140	if (err == -EINPROGRESS)
 141		return;
 142
 143	res->err = err;
 144	complete(&res->completion);
 145}
 146
 147static int testmgr_alloc_buf(char *buf[XBUFSIZE])
 148{
 149	int i;
 150
 151	for (i = 0; i < XBUFSIZE; i++) {
 152		buf[i] = (void *)__get_free_page(GFP_KERNEL);
 153		if (!buf[i])
 154			goto err_free_buf;
 155	}
 156
 157	return 0;
 158
 159err_free_buf:
 160	while (i-- > 0)
 161		free_page((unsigned long)buf[i]);
 162
 163	return -ENOMEM;
 164}
 165
 166static void testmgr_free_buf(char *buf[XBUFSIZE])
 167{
 168	int i;
 169
 170	for (i = 0; i < XBUFSIZE; i++)
 171		free_page((unsigned long)buf[i]);
 172}
 173
 174static int do_one_async_hash_op(struct ahash_request *req,
 175				struct tcrypt_result *tr,
 176				int ret)
 
 
 
 
 
 
 
 
 
 
 
 
 
 177{
 178	if (ret == -EINPROGRESS || ret == -EBUSY) {
 179		ret = wait_for_completion_interruptible(&tr->completion);
 180		if (!ret)
 181			ret = tr->err;
 182		reinit_completion(&tr->completion);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 183	}
 
 
 
 
 
 
 
 
 
 
 
 184	return ret;
 185}
 186
 187static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
 188		       unsigned int tcount, bool use_digest,
 189		       const int align_offset)
 190{
 191	const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
 
 192	unsigned int i, j, k, temp;
 193	struct scatterlist sg[8];
 194	char result[64];
 
 195	struct ahash_request *req;
 196	struct tcrypt_result tresult;
 197	void *hash_buff;
 198	char *xbuf[XBUFSIZE];
 199	int ret = -ENOMEM;
 200
 
 
 
 
 
 
 201	if (testmgr_alloc_buf(xbuf))
 202		goto out_nobuf;
 203
 204	init_completion(&tresult.completion);
 205
 206	req = ahash_request_alloc(tfm, GFP_KERNEL);
 207	if (!req) {
 208		printk(KERN_ERR "alg: hash: Failed to allocate request for "
 209		       "%s\n", algo);
 210		goto out_noreq;
 211	}
 212	ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
 213				   tcrypt_complete, &tresult);
 214
 215	j = 0;
 216	for (i = 0; i < tcount; i++) {
 217		if (template[i].np)
 218			continue;
 219
 220		ret = -EINVAL;
 221		if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE))
 222			goto out;
 223
 224		j++;
 225		memset(result, 0, 64);
 226
 227		hash_buff = xbuf[0];
 228		hash_buff += align_offset;
 229
 230		memcpy(hash_buff, template[i].plaintext, template[i].psize);
 231		sg_init_one(&sg[0], hash_buff, template[i].psize);
 232
 233		if (template[i].ksize) {
 234			crypto_ahash_clear_flags(tfm, ~0);
 235			ret = crypto_ahash_setkey(tfm, template[i].key,
 236						  template[i].ksize);
 
 
 
 
 
 
 237			if (ret) {
 238				printk(KERN_ERR "alg: hash: setkey failed on "
 239				       "test %d for %s: ret=%d\n", j, algo,
 240				       -ret);
 241				goto out;
 242			}
 243		}
 244
 245		ahash_request_set_crypt(req, sg, result, template[i].psize);
 246		if (use_digest) {
 247			ret = do_one_async_hash_op(req, &tresult,
 248						   crypto_ahash_digest(req));
 249			if (ret) {
 250				pr_err("alg: hash: digest failed on test %d "
 251				       "for %s: ret=%d\n", j, algo, -ret);
 252				goto out;
 253			}
 254		} else {
 255			ret = do_one_async_hash_op(req, &tresult,
 256						   crypto_ahash_init(req));
 257			if (ret) {
 258				pr_err("alt: hash: init failed on test %d "
 259				       "for %s: ret=%d\n", j, algo, -ret);
 260				goto out;
 261			}
 262			ret = do_one_async_hash_op(req, &tresult,
 263						   crypto_ahash_update(req));
 264			if (ret) {
 265				pr_err("alt: hash: update failed on test %d "
 
 
 
 
 
 
 266				       "for %s: ret=%d\n", j, algo, -ret);
 267				goto out;
 268			}
 269			ret = do_one_async_hash_op(req, &tresult,
 270						   crypto_ahash_final(req));
 
 
 
 
 
 271			if (ret) {
 272				pr_err("alt: hash: final failed on test %d "
 273				       "for %s: ret=%d\n", j, algo, -ret);
 274				goto out;
 275			}
 276		}
 277
 278		if (memcmp(result, template[i].digest,
 279			   crypto_ahash_digestsize(tfm))) {
 280			printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
 281			       j, algo);
 282			hexdump(result, crypto_ahash_digestsize(tfm));
 283			ret = -EINVAL;
 284			goto out;
 285		}
 286	}
 287
 288	j = 0;
 289	for (i = 0; i < tcount; i++) {
 290		/* alignment tests are only done with continuous buffers */
 291		if (align_offset != 0)
 292			break;
 293
 294		if (template[i].np) {
 295			j++;
 296			memset(result, 0, 64);
 
 
 297
 298			temp = 0;
 299			sg_init_table(sg, template[i].np);
 300			ret = -EINVAL;
 301			for (k = 0; k < template[i].np; k++) {
 302				if (WARN_ON(offset_in_page(IDX[k]) +
 303					    template[i].tap[k] > PAGE_SIZE))
 304					goto out;
 305				sg_set_buf(&sg[k],
 306					   memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
 307						  offset_in_page(IDX[k]),
 308						  template[i].plaintext + temp,
 309						  template[i].tap[k]),
 310					   template[i].tap[k]);
 311				temp += template[i].tap[k];
 312			}
 313
 314			if (template[i].ksize) {
 315				crypto_ahash_clear_flags(tfm, ~0);
 316				ret = crypto_ahash_setkey(tfm, template[i].key,
 317							  template[i].ksize);
 318
 319				if (ret) {
 320					printk(KERN_ERR "alg: hash: setkey "
 321					       "failed on chunking test %d "
 322					       "for %s: ret=%d\n", j, algo,
 323					       -ret);
 324					goto out;
 325				}
 326			}
 
 
 
 327
 328			ahash_request_set_crypt(req, sg, result,
 329						template[i].psize);
 330			ret = crypto_ahash_digest(req);
 331			switch (ret) {
 332			case 0:
 333				break;
 334			case -EINPROGRESS:
 335			case -EBUSY:
 336				ret = wait_for_completion_interruptible(
 337					&tresult.completion);
 338				if (!ret && !(ret = tresult.err)) {
 339					reinit_completion(&tresult.completion);
 340					break;
 341				}
 342				/* fall through */
 343			default:
 344				printk(KERN_ERR "alg: hash: digest failed "
 345				       "on chunking test %d for %s: "
 346				       "ret=%d\n", j, algo, -ret);
 347				goto out;
 348			}
 
 349
 350			if (memcmp(result, template[i].digest,
 351				   crypto_ahash_digestsize(tfm))) {
 352				printk(KERN_ERR "alg: hash: Chunking test %d "
 353				       "failed for %s\n", j, algo);
 354				hexdump(result, crypto_ahash_digestsize(tfm));
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 355				ret = -EINVAL;
 356				goto out;
 357			}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 358		}
 359	}
 360
 361	ret = 0;
 362
 363out:
 364	ahash_request_free(req);
 365out_noreq:
 366	testmgr_free_buf(xbuf);
 367out_nobuf:
 
 
 368	return ret;
 369}
 370
 371static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
 
 372		     unsigned int tcount, bool use_digest)
 373{
 374	unsigned int alignmask;
 375	int ret;
 376
 377	ret = __test_hash(tfm, template, tcount, use_digest, 0);
 378	if (ret)
 379		return ret;
 380
 381	/* test unaligned buffers, check with one byte offset */
 382	ret = __test_hash(tfm, template, tcount, use_digest, 1);
 383	if (ret)
 384		return ret;
 385
 386	alignmask = crypto_tfm_alg_alignmask(&tfm->base);
 387	if (alignmask) {
 388		/* Check if alignment mask for tfm is correctly set. */
 389		ret = __test_hash(tfm, template, tcount, use_digest,
 390				  alignmask + 1);
 391		if (ret)
 392			return ret;
 393	}
 394
 395	return 0;
 396}
 397
 398static int __test_aead(struct crypto_aead *tfm, int enc,
 399		       struct aead_testvec *template, unsigned int tcount,
 400		       const bool diff_dst, const int align_offset)
 401{
 402	const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
 403	unsigned int i, j, k, n, temp;
 404	int ret = -ENOMEM;
 405	char *q;
 406	char *key;
 407	struct aead_request *req;
 408	struct scatterlist *sg;
 409	struct scatterlist *asg;
 410	struct scatterlist *sgout;
 411	const char *e, *d;
 412	struct tcrypt_result result;
 413	unsigned int authsize;
 414	void *input;
 415	void *output;
 416	void *assoc;
 417	char iv[MAX_IVLEN];
 418	char *xbuf[XBUFSIZE];
 419	char *xoutbuf[XBUFSIZE];
 420	char *axbuf[XBUFSIZE];
 421
 
 
 
 
 
 
 422	if (testmgr_alloc_buf(xbuf))
 423		goto out_noxbuf;
 424	if (testmgr_alloc_buf(axbuf))
 425		goto out_noaxbuf;
 426
 427	if (diff_dst && testmgr_alloc_buf(xoutbuf))
 428		goto out_nooutbuf;
 429
 430	/* avoid "the frame size is larger than 1024 bytes" compiler warning */
 431	sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 3 : 2), GFP_KERNEL);
 432	if (!sg)
 433		goto out_nosg;
 434	asg = &sg[8];
 435	sgout = &asg[8];
 436
 437	if (diff_dst)
 438		d = "-ddst";
 439	else
 440		d = "";
 441
 442	if (enc == ENCRYPT)
 443		e = "encryption";
 444	else
 445		e = "decryption";
 446
 447	init_completion(&result.completion);
 448
 449	req = aead_request_alloc(tfm, GFP_KERNEL);
 450	if (!req) {
 451		pr_err("alg: aead%s: Failed to allocate request for %s\n",
 452		       d, algo);
 453		goto out;
 454	}
 455
 456	aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
 457				  tcrypt_complete, &result);
 
 
 458
 459	for (i = 0, j = 0; i < tcount; i++) {
 460		if (!template[i].np) {
 461			j++;
 462
 463			/* some templates have no input data but they will
 464			 * touch input
 465			 */
 466			input = xbuf[0];
 467			input += align_offset;
 468			assoc = axbuf[0];
 469
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 470			ret = -EINVAL;
 471			if (WARN_ON(align_offset + template[i].ilen >
 472				    PAGE_SIZE || template[i].alen > PAGE_SIZE))
 473				goto out;
 474
 475			memcpy(input, template[i].input, template[i].ilen);
 476			memcpy(assoc, template[i].assoc, template[i].alen);
 477			if (template[i].iv)
 478				memcpy(iv, template[i].iv, MAX_IVLEN);
 479			else
 480				memset(iv, 0, MAX_IVLEN);
 
 481
 482			crypto_aead_clear_flags(tfm, ~0);
 483			if (template[i].wk)
 484				crypto_aead_set_flags(
 485					tfm, CRYPTO_TFM_REQ_WEAK_KEY);
 486
 487			key = template[i].key;
 488
 489			ret = crypto_aead_setkey(tfm, key,
 490						 template[i].klen);
 491			if (!ret == template[i].fail) {
 492				pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
 493				       d, j, algo, crypto_aead_get_flags(tfm));
 494				goto out;
 495			} else if (ret)
 496				continue;
 497
 498			authsize = abs(template[i].rlen - template[i].ilen);
 499			ret = crypto_aead_setauthsize(tfm, authsize);
 500			if (ret) {
 501				pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
 502				       d, authsize, j, algo);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 503				goto out;
 504			}
 
 
 
 
 
 
 
 
 
 
 
 505
 506			if (diff_dst) {
 507				output = xoutbuf[0];
 508				output += align_offset;
 509				sg_init_one(&sg[0], input, template[i].ilen);
 510				sg_init_one(&sgout[0], output,
 511					    template[i].rlen);
 512			} else {
 513				sg_init_one(&sg[0], input,
 514					    template[i].ilen +
 515						(enc ? authsize : 0));
 516				output = input;
 517			}
 518
 519			sg_init_one(&asg[0], assoc, template[i].alen);
 520
 521			aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
 522					       template[i].ilen, iv);
 523
 524			aead_request_set_assoc(req, asg, template[i].alen);
 525
 526			ret = enc ?
 527				crypto_aead_encrypt(req) :
 528				crypto_aead_decrypt(req);
 529
 530			switch (ret) {
 531			case 0:
 532				if (template[i].novrfy) {
 533					/* verification was supposed to fail */
 534					pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
 535					       d, e, j, algo);
 536					/* so really, we got a bad message */
 537					ret = -EBADMSG;
 538					goto out;
 539				}
 540				break;
 541			case -EINPROGRESS:
 542			case -EBUSY:
 543				ret = wait_for_completion_interruptible(
 544					&result.completion);
 545				if (!ret && !(ret = result.err)) {
 546					reinit_completion(&result.completion);
 547					break;
 548				}
 549			case -EBADMSG:
 550				if (template[i].novrfy)
 551					/* verification failure was expected */
 552					continue;
 553				/* fall through */
 554			default:
 555				pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
 556				       d, e, j, algo, -ret);
 557				goto out;
 558			}
 559
 560			q = output;
 561			if (memcmp(q, template[i].result, template[i].rlen)) {
 562				pr_err("alg: aead%s: Test %d failed on %s for %s\n",
 563				       d, j, e, algo);
 564				hexdump(q, template[i].rlen);
 565				ret = -EINVAL;
 566				goto out;
 567			}
 568		}
 569	}
 570
 571	for (i = 0, j = 0; i < tcount; i++) {
 572		/* alignment tests are only done with continuous buffers */
 573		if (align_offset != 0)
 574			break;
 575
 576		if (template[i].np) {
 577			j++;
 
 
 578
 579			if (template[i].iv)
 580				memcpy(iv, template[i].iv, MAX_IVLEN);
 581			else
 582				memset(iv, 0, MAX_IVLEN);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 583
 584			crypto_aead_clear_flags(tfm, ~0);
 585			if (template[i].wk)
 586				crypto_aead_set_flags(
 587					tfm, CRYPTO_TFM_REQ_WEAK_KEY);
 588			key = template[i].key;
 589
 590			ret = crypto_aead_setkey(tfm, key, template[i].klen);
 591			if (!ret == template[i].fail) {
 592				pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
 593				       d, j, algo, crypto_aead_get_flags(tfm));
 594				goto out;
 595			} else if (ret)
 596				continue;
 597
 598			authsize = abs(template[i].rlen - template[i].ilen);
 
 
 
 599
 600			ret = -EINVAL;
 601			sg_init_table(sg, template[i].np);
 
 
 
 
 
 
 
 
 
 602			if (diff_dst)
 603				sg_init_table(sgout, template[i].np);
 604			for (k = 0, temp = 0; k < template[i].np; k++) {
 605				if (WARN_ON(offset_in_page(IDX[k]) +
 606					    template[i].tap[k] > PAGE_SIZE))
 607					goto out;
 
 
 
 
 
 
 
 
 
 
 
 608
 609				q = xbuf[IDX[k] >> PAGE_SHIFT] +
 
 610				    offset_in_page(IDX[k]);
 611
 612				memcpy(q, template[i].input + temp,
 613				       template[i].tap[k]);
 614
 615				sg_set_buf(&sg[k], q, template[i].tap[k]);
 
 
 616
 617				if (diff_dst) {
 618					q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
 619					    offset_in_page(IDX[k]);
 
 
 620
 621					memset(q, 0, template[i].tap[k]);
 
 622
 623					sg_set_buf(&sgout[k], q,
 624						   template[i].tap[k]);
 625				}
 
 
 
 626
 627				n = template[i].tap[k];
 628				if (k == template[i].np - 1 && enc)
 629					n += authsize;
 630				if (offset_in_page(q) + n < PAGE_SIZE)
 631					q[n] = 0;
 632
 633				temp += template[i].tap[k];
 634			}
 635
 636			ret = crypto_aead_setauthsize(tfm, authsize);
 637			if (ret) {
 638				pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
 639				       d, authsize, j, algo);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 640				goto out;
 641			}
 
 
 
 
 
 
 
 
 
 
 
 642
 643			if (enc) {
 644				if (WARN_ON(sg[k - 1].offset +
 645					    sg[k - 1].length + authsize >
 646					    PAGE_SIZE)) {
 647					ret = -EINVAL;
 648					goto out;
 649				}
 
 650
 651				if (diff_dst)
 652					sgout[k - 1].length += authsize;
 653				else
 654					sg[k - 1].length += authsize;
 
 
 
 
 
 655			}
 656
 657			sg_init_table(asg, template[i].anp);
 658			ret = -EINVAL;
 659			for (k = 0, temp = 0; k < template[i].anp; k++) {
 660				if (WARN_ON(offset_in_page(IDX[k]) +
 661					    template[i].atap[k] > PAGE_SIZE))
 662					goto out;
 663				sg_set_buf(&asg[k],
 664					   memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
 665						  offset_in_page(IDX[k]),
 666						  template[i].assoc + temp,
 667						  template[i].atap[k]),
 668					   template[i].atap[k]);
 669				temp += template[i].atap[k];
 670			}
 671
 672			aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
 673					       template[i].ilen,
 674					       iv);
 675
 676			aead_request_set_assoc(req, asg, template[i].alen);
 677
 678			ret = enc ?
 679				crypto_aead_encrypt(req) :
 680				crypto_aead_decrypt(req);
 681
 682			switch (ret) {
 683			case 0:
 684				if (template[i].novrfy) {
 685					/* verification was supposed to fail */
 686					pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
 687					       d, e, j, algo);
 688					/* so really, we got a bad message */
 689					ret = -EBADMSG;
 690					goto out;
 691				}
 692				break;
 693			case -EINPROGRESS:
 694			case -EBUSY:
 695				ret = wait_for_completion_interruptible(
 696					&result.completion);
 697				if (!ret && !(ret = result.err)) {
 698					reinit_completion(&result.completion);
 699					break;
 700				}
 701			case -EBADMSG:
 702				if (template[i].novrfy)
 703					/* verification failure was expected */
 704					continue;
 705				/* fall through */
 706			default:
 707				pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
 708				       d, e, j, algo, -ret);
 709				goto out;
 710			}
 711
 712			ret = -EINVAL;
 713			for (k = 0, temp = 0; k < template[i].np; k++) {
 714				if (diff_dst)
 715					q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
 716					    offset_in_page(IDX[k]);
 717				else
 718					q = xbuf[IDX[k] >> PAGE_SHIFT] +
 719					    offset_in_page(IDX[k]);
 720
 721				n = template[i].tap[k];
 722				if (k == template[i].np - 1)
 723					n += enc ? authsize : -authsize;
 724
 725				if (memcmp(q, template[i].result + temp, n)) {
 726					pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
 727					       d, j, e, k, algo);
 728					hexdump(q, n);
 729					goto out;
 730				}
 731
 732				q += n;
 733				if (k == template[i].np - 1 && !enc) {
 734					if (!diff_dst &&
 735						memcmp(q, template[i].input +
 736						      temp + n, authsize))
 737						n = authsize;
 738					else
 739						n = 0;
 740				} else {
 741					for (n = 0; offset_in_page(q + n) &&
 742						    q[n]; n++)
 743						;
 744				}
 745				if (n) {
 746					pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
 747					       d, j, e, k, algo, n);
 748					hexdump(q, n);
 749					goto out;
 750				}
 751
 752				temp += template[i].tap[k];
 753			}
 754		}
 755	}
 756
 757	ret = 0;
 758
 759out:
 760	aead_request_free(req);
 761	kfree(sg);
 762out_nosg:
 763	if (diff_dst)
 764		testmgr_free_buf(xoutbuf);
 765out_nooutbuf:
 766	testmgr_free_buf(axbuf);
 767out_noaxbuf:
 768	testmgr_free_buf(xbuf);
 769out_noxbuf:
 
 
 770	return ret;
 771}
 772
 773static int test_aead(struct crypto_aead *tfm, int enc,
 774		     struct aead_testvec *template, unsigned int tcount)
 775{
 776	unsigned int alignmask;
 777	int ret;
 778
 779	/* test 'dst == src' case */
 780	ret = __test_aead(tfm, enc, template, tcount, false, 0);
 781	if (ret)
 782		return ret;
 783
 784	/* test 'dst != src' case */
 785	ret = __test_aead(tfm, enc, template, tcount, true, 0);
 786	if (ret)
 787		return ret;
 788
 789	/* test unaligned buffers, check with one byte offset */
 790	ret = __test_aead(tfm, enc, template, tcount, true, 1);
 791	if (ret)
 792		return ret;
 793
 794	alignmask = crypto_tfm_alg_alignmask(&tfm->base);
 795	if (alignmask) {
 796		/* Check if alignment mask for tfm is correctly set. */
 797		ret = __test_aead(tfm, enc, template, tcount, true,
 798				  alignmask + 1);
 799		if (ret)
 800			return ret;
 801	}
 802
 803	return 0;
 804}
 805
 806static int test_cipher(struct crypto_cipher *tfm, int enc,
 807		       struct cipher_testvec *template, unsigned int tcount)
 
 808{
 809	const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
 810	unsigned int i, j, k;
 811	char *q;
 812	const char *e;
 813	void *data;
 814	char *xbuf[XBUFSIZE];
 815	int ret = -ENOMEM;
 816
 817	if (testmgr_alloc_buf(xbuf))
 818		goto out_nobuf;
 819
 820	if (enc == ENCRYPT)
 821	        e = "encryption";
 822	else
 823		e = "decryption";
 824
 825	j = 0;
 826	for (i = 0; i < tcount; i++) {
 827		if (template[i].np)
 828			continue;
 829
 
 
 
 830		j++;
 831
 832		ret = -EINVAL;
 833		if (WARN_ON(template[i].ilen > PAGE_SIZE))
 834			goto out;
 835
 836		data = xbuf[0];
 837		memcpy(data, template[i].input, template[i].ilen);
 838
 839		crypto_cipher_clear_flags(tfm, ~0);
 840		if (template[i].wk)
 841			crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
 842
 843		ret = crypto_cipher_setkey(tfm, template[i].key,
 844					   template[i].klen);
 845		if (!ret == template[i].fail) {
 846			printk(KERN_ERR "alg: cipher: setkey failed "
 847			       "on test %d for %s: flags=%x\n", j,
 848			       algo, crypto_cipher_get_flags(tfm));
 849			goto out;
 850		} else if (ret)
 851			continue;
 852
 853		for (k = 0; k < template[i].ilen;
 854		     k += crypto_cipher_blocksize(tfm)) {
 855			if (enc)
 856				crypto_cipher_encrypt_one(tfm, data + k,
 857							  data + k);
 858			else
 859				crypto_cipher_decrypt_one(tfm, data + k,
 860							  data + k);
 861		}
 862
 863		q = data;
 864		if (memcmp(q, template[i].result, template[i].rlen)) {
 865			printk(KERN_ERR "alg: cipher: Test %d failed "
 866			       "on %s for %s\n", j, e, algo);
 867			hexdump(q, template[i].rlen);
 868			ret = -EINVAL;
 869			goto out;
 870		}
 871	}
 872
 873	ret = 0;
 874
 875out:
 876	testmgr_free_buf(xbuf);
 877out_nobuf:
 878	return ret;
 879}
 880
 881static int __test_skcipher(struct crypto_ablkcipher *tfm, int enc,
 882			   struct cipher_testvec *template, unsigned int tcount,
 
 883			   const bool diff_dst, const int align_offset)
 884{
 885	const char *algo =
 886		crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm));
 887	unsigned int i, j, k, n, temp;
 888	char *q;
 889	struct ablkcipher_request *req;
 890	struct scatterlist sg[8];
 891	struct scatterlist sgout[8];
 892	const char *e, *d;
 893	struct tcrypt_result result;
 894	void *data;
 895	char iv[MAX_IVLEN];
 896	char *xbuf[XBUFSIZE];
 897	char *xoutbuf[XBUFSIZE];
 898	int ret = -ENOMEM;
 
 899
 900	if (testmgr_alloc_buf(xbuf))
 901		goto out_nobuf;
 902
 903	if (diff_dst && testmgr_alloc_buf(xoutbuf))
 904		goto out_nooutbuf;
 905
 906	if (diff_dst)
 907		d = "-ddst";
 908	else
 909		d = "";
 910
 911	if (enc == ENCRYPT)
 912	        e = "encryption";
 913	else
 914		e = "decryption";
 915
 916	init_completion(&result.completion);
 917
 918	req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
 919	if (!req) {
 920		pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
 921		       d, algo);
 922		goto out;
 923	}
 924
 925	ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
 926					tcrypt_complete, &result);
 927
 928	j = 0;
 929	for (i = 0; i < tcount; i++) {
 
 
 
 
 
 
 930		if (template[i].iv)
 931			memcpy(iv, template[i].iv, MAX_IVLEN);
 932		else
 933			memset(iv, 0, MAX_IVLEN);
 934
 935		if (!(template[i].np) || (template[i].also_non_np)) {
 936			j++;
 
 
 937
 938			ret = -EINVAL;
 939			if (WARN_ON(align_offset + template[i].ilen >
 940				    PAGE_SIZE))
 941				goto out;
 
 
 
 
 
 
 
 
 
 
 
 
 
 942
 943			data = xbuf[0];
 
 
 944			data += align_offset;
 945			memcpy(data, template[i].input, template[i].ilen);
 
 946
 947			crypto_ablkcipher_clear_flags(tfm, ~0);
 948			if (template[i].wk)
 949				crypto_ablkcipher_set_flags(
 950					tfm, CRYPTO_TFM_REQ_WEAK_KEY);
 951
 952			ret = crypto_ablkcipher_setkey(tfm, template[i].key,
 953						       template[i].klen);
 954			if (!ret == template[i].fail) {
 955				pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
 956				       d, j, algo,
 957				       crypto_ablkcipher_get_flags(tfm));
 958				goto out;
 959			} else if (ret)
 960				continue;
 961
 962			sg_init_one(&sg[0], data, template[i].ilen);
 963			if (diff_dst) {
 964				data = xoutbuf[0];
 965				data += align_offset;
 966				sg_init_one(&sgout[0], data, template[i].ilen);
 967			}
 968
 969			ablkcipher_request_set_crypt(req, sg,
 970						     (diff_dst) ? sgout : sg,
 971						     template[i].ilen, iv);
 972			ret = enc ?
 973				crypto_ablkcipher_encrypt(req) :
 974				crypto_ablkcipher_decrypt(req);
 975
 976			switch (ret) {
 977			case 0:
 978				break;
 979			case -EINPROGRESS:
 980			case -EBUSY:
 981				ret = wait_for_completion_interruptible(
 982					&result.completion);
 983				if (!ret && !((ret = result.err))) {
 984					reinit_completion(&result.completion);
 985					break;
 986				}
 987				/* fall through */
 988			default:
 989				pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
 990				       d, e, j, algo, -ret);
 991				goto out;
 992			}
 993
 994			q = data;
 995			if (memcmp(q, template[i].result, template[i].rlen)) {
 996				pr_err("alg: skcipher%s: Test %d failed on %s for %s\n",
 997				       d, j, e, algo);
 998				hexdump(q, template[i].rlen);
 999				ret = -EINVAL;
1000				goto out;
1001			}
1002		}
1003	}
1004
1005	j = 0;
1006	for (i = 0; i < tcount; i++) {
1007		/* alignment tests are only done with continuous buffers */
1008		if (align_offset != 0)
1009			break;
1010
 
 
 
 
 
 
1011		if (template[i].iv)
1012			memcpy(iv, template[i].iv, MAX_IVLEN);
1013		else
1014			memset(iv, 0, MAX_IVLEN);
1015
1016		if (template[i].np) {
1017			j++;
 
 
 
 
 
 
 
 
 
 
 
 
1018
1019			crypto_ablkcipher_clear_flags(tfm, ~0);
1020			if (template[i].wk)
1021				crypto_ablkcipher_set_flags(
1022					tfm, CRYPTO_TFM_REQ_WEAK_KEY);
1023
1024			ret = crypto_ablkcipher_setkey(tfm, template[i].key,
1025						       template[i].klen);
1026			if (!ret == template[i].fail) {
1027				pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
1028				       d, j, algo,
1029				       crypto_ablkcipher_get_flags(tfm));
1030				goto out;
1031			} else if (ret)
1032				continue;
1033
1034			temp = 0;
1035			ret = -EINVAL;
1036			sg_init_table(sg, template[i].np);
1037			if (diff_dst)
1038				sg_init_table(sgout, template[i].np);
1039			for (k = 0; k < template[i].np; k++) {
1040				if (WARN_ON(offset_in_page(IDX[k]) +
1041					    template[i].tap[k] > PAGE_SIZE))
1042					goto out;
1043
1044				q = xbuf[IDX[k] >> PAGE_SHIFT] +
 
 
1045				    offset_in_page(IDX[k]);
1046
1047				memcpy(q, template[i].input + temp,
1048				       template[i].tap[k]);
1049
1050				if (offset_in_page(q) + template[i].tap[k] <
1051				    PAGE_SIZE)
 
1052					q[template[i].tap[k]] = 0;
 
1053
1054				sg_set_buf(&sg[k], q, template[i].tap[k]);
1055				if (diff_dst) {
1056					q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1057					    offset_in_page(IDX[k]);
1058
1059					sg_set_buf(&sgout[k], q,
1060						   template[i].tap[k]);
1061
1062					memset(q, 0, template[i].tap[k]);
1063					if (offset_in_page(q) +
1064					    template[i].tap[k] < PAGE_SIZE)
1065						q[template[i].tap[k]] = 0;
1066				}
 
1067
1068				temp += template[i].tap[k];
1069			}
 
 
 
 
 
 
 
1070
1071			ablkcipher_request_set_crypt(req, sg,
1072					(diff_dst) ? sgout : sg,
1073					template[i].ilen, iv);
1074
1075			ret = enc ?
1076				crypto_ablkcipher_encrypt(req) :
1077				crypto_ablkcipher_decrypt(req);
1078
1079			switch (ret) {
1080			case 0:
1081				break;
1082			case -EINPROGRESS:
1083			case -EBUSY:
1084				ret = wait_for_completion_interruptible(
1085					&result.completion);
1086				if (!ret && !((ret = result.err))) {
1087					reinit_completion(&result.completion);
1088					break;
1089				}
1090				/* fall through */
1091			default:
1092				pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1093				       d, e, j, algo, -ret);
1094				goto out;
1095			}
1096
1097			temp = 0;
1098			ret = -EINVAL;
1099			for (k = 0; k < template[i].np; k++) {
1100				if (diff_dst)
1101					q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1102					    offset_in_page(IDX[k]);
1103				else
1104					q = xbuf[IDX[k] >> PAGE_SHIFT] +
1105					    offset_in_page(IDX[k]);
1106
1107				if (memcmp(q, template[i].result + temp,
1108					   template[i].tap[k])) {
1109					pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1110					       d, j, e, k, algo);
1111					hexdump(q, template[i].tap[k]);
1112					goto out;
1113				}
1114
1115				q += template[i].tap[k];
1116				for (n = 0; offset_in_page(q + n) && q[n]; n++)
1117					;
1118				if (n) {
1119					pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1120					       d, j, e, k, algo, n);
1121					hexdump(q, n);
1122					goto out;
1123				}
1124				temp += template[i].tap[k];
1125			}
 
1126		}
1127	}
1128
1129	ret = 0;
1130
1131out:
1132	ablkcipher_request_free(req);
1133	if (diff_dst)
1134		testmgr_free_buf(xoutbuf);
1135out_nooutbuf:
1136	testmgr_free_buf(xbuf);
1137out_nobuf:
1138	return ret;
1139}
1140
1141static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
1142			 struct cipher_testvec *template, unsigned int tcount)
 
1143{
1144	unsigned int alignmask;
1145	int ret;
1146
1147	/* test 'dst == src' case */
1148	ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
1149	if (ret)
1150		return ret;
1151
1152	/* test 'dst != src' case */
1153	ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
1154	if (ret)
1155		return ret;
1156
1157	/* test unaligned buffers, check with one byte offset */
1158	ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
1159	if (ret)
1160		return ret;
1161
1162	alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1163	if (alignmask) {
1164		/* Check if alignment mask for tfm is correctly set. */
1165		ret = __test_skcipher(tfm, enc, template, tcount, true,
1166				      alignmask + 1);
1167		if (ret)
1168			return ret;
1169	}
1170
1171	return 0;
1172}
1173
1174static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
1175		     struct comp_testvec *dtemplate, int ctcount, int dtcount)
 
 
1176{
1177	const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1178	unsigned int i;
1179	char result[COMP_BUF_SIZE];
1180	int ret;
1181
1182	for (i = 0; i < ctcount; i++) {
1183		int ilen;
1184		unsigned int dlen = COMP_BUF_SIZE;
1185
1186		memset(result, 0, sizeof (result));
1187
1188		ilen = ctemplate[i].inlen;
1189		ret = crypto_comp_compress(tfm, ctemplate[i].input,
1190		                           ilen, result, &dlen);
1191		if (ret) {
1192			printk(KERN_ERR "alg: comp: compression failed "
1193			       "on test %d for %s: ret=%d\n", i + 1, algo,
1194			       -ret);
1195			goto out;
1196		}
1197
1198		if (dlen != ctemplate[i].outlen) {
1199			printk(KERN_ERR "alg: comp: Compression test %d "
1200			       "failed for %s: output len = %d\n", i + 1, algo,
1201			       dlen);
1202			ret = -EINVAL;
1203			goto out;
1204		}
1205
1206		if (memcmp(result, ctemplate[i].output, dlen)) {
1207			printk(KERN_ERR "alg: comp: Compression test %d "
1208			       "failed for %s\n", i + 1, algo);
1209			hexdump(result, dlen);
1210			ret = -EINVAL;
1211			goto out;
1212		}
1213	}
1214
1215	for (i = 0; i < dtcount; i++) {
1216		int ilen;
1217		unsigned int dlen = COMP_BUF_SIZE;
1218
1219		memset(result, 0, sizeof (result));
1220
1221		ilen = dtemplate[i].inlen;
1222		ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1223		                             ilen, result, &dlen);
1224		if (ret) {
1225			printk(KERN_ERR "alg: comp: decompression failed "
1226			       "on test %d for %s: ret=%d\n", i + 1, algo,
1227			       -ret);
1228			goto out;
1229		}
1230
1231		if (dlen != dtemplate[i].outlen) {
1232			printk(KERN_ERR "alg: comp: Decompression test %d "
1233			       "failed for %s: output len = %d\n", i + 1, algo,
1234			       dlen);
1235			ret = -EINVAL;
1236			goto out;
1237		}
1238
1239		if (memcmp(result, dtemplate[i].output, dlen)) {
1240			printk(KERN_ERR "alg: comp: Decompression test %d "
1241			       "failed for %s\n", i + 1, algo);
1242			hexdump(result, dlen);
1243			ret = -EINVAL;
1244			goto out;
1245		}
1246	}
1247
1248	ret = 0;
1249
1250out:
1251	return ret;
1252}
1253
1254static int test_pcomp(struct crypto_pcomp *tfm,
1255		      struct pcomp_testvec *ctemplate,
1256		      struct pcomp_testvec *dtemplate, int ctcount,
1257		      int dtcount)
1258{
1259	const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm));
1260	unsigned int i;
1261	char result[COMP_BUF_SIZE];
1262	int res;
 
 
 
 
 
 
 
 
 
 
 
 
 
1263
1264	for (i = 0; i < ctcount; i++) {
1265		struct comp_request req;
1266		unsigned int produced = 0;
 
1267
1268		res = crypto_compress_setup(tfm, ctemplate[i].params,
1269					    ctemplate[i].paramsize);
1270		if (res) {
1271			pr_err("alg: pcomp: compression setup failed on test "
1272			       "%d for %s: error=%d\n", i + 1, algo, res);
1273			return res;
1274		}
1275
1276		res = crypto_compress_init(tfm);
1277		if (res) {
1278			pr_err("alg: pcomp: compression init failed on test "
1279			       "%d for %s: error=%d\n", i + 1, algo, res);
1280			return res;
1281		}
1282
1283		memset(result, 0, sizeof(result));
1284
1285		req.next_in = ctemplate[i].input;
1286		req.avail_in = ctemplate[i].inlen / 2;
1287		req.next_out = result;
1288		req.avail_out = ctemplate[i].outlen / 2;
1289
1290		res = crypto_compress_update(tfm, &req);
1291		if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1292			pr_err("alg: pcomp: compression update failed on test "
1293			       "%d for %s: error=%d\n", i + 1, algo, res);
1294			return res;
1295		}
1296		if (res > 0)
1297			produced += res;
1298
1299		/* Add remaining input data */
1300		req.avail_in += (ctemplate[i].inlen + 1) / 2;
1301
1302		res = crypto_compress_update(tfm, &req);
1303		if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1304			pr_err("alg: pcomp: compression update failed on test "
1305			       "%d for %s: error=%d\n", i + 1, algo, res);
1306			return res;
1307		}
1308		if (res > 0)
1309			produced += res;
1310
1311		/* Provide remaining output space */
1312		req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2;
1313
1314		res = crypto_compress_final(tfm, &req);
1315		if (res < 0) {
1316			pr_err("alg: pcomp: compression final failed on test "
1317			       "%d for %s: error=%d\n", i + 1, algo, res);
1318			return res;
1319		}
1320		produced += res;
1321
1322		if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) {
1323			pr_err("alg: comp: Compression test %d failed for %s: "
1324			       "output len = %d (expected %d)\n", i + 1, algo,
1325			       COMP_BUF_SIZE - req.avail_out,
1326			       ctemplate[i].outlen);
1327			return -EINVAL;
1328		}
1329
1330		if (produced != ctemplate[i].outlen) {
1331			pr_err("alg: comp: Compression test %d failed for %s: "
1332			       "returned len = %u (expected %d)\n", i + 1,
1333			       algo, produced, ctemplate[i].outlen);
1334			return -EINVAL;
 
 
1335		}
1336
1337		if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) {
1338			pr_err("alg: pcomp: Compression test %d failed for "
1339			       "%s\n", i + 1, algo);
1340			hexdump(result, ctemplate[i].outlen);
1341			return -EINVAL;
 
 
 
1342		}
 
 
 
1343	}
1344
1345	for (i = 0; i < dtcount; i++) {
1346		struct comp_request req;
1347		unsigned int produced = 0;
 
1348
1349		res = crypto_decompress_setup(tfm, dtemplate[i].params,
1350					      dtemplate[i].paramsize);
1351		if (res) {
1352			pr_err("alg: pcomp: decompression setup failed on "
1353			       "test %d for %s: error=%d\n", i + 1, algo, res);
1354			return res;
1355		}
1356
1357		res = crypto_decompress_init(tfm);
1358		if (res) {
1359			pr_err("alg: pcomp: decompression init failed on test "
1360			       "%d for %s: error=%d\n", i + 1, algo, res);
1361			return res;
1362		}
1363
1364		memset(result, 0, sizeof(result));
1365
1366		req.next_in = dtemplate[i].input;
1367		req.avail_in = dtemplate[i].inlen / 2;
1368		req.next_out = result;
1369		req.avail_out = dtemplate[i].outlen / 2;
1370
1371		res = crypto_decompress_update(tfm, &req);
1372		if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1373			pr_err("alg: pcomp: decompression update failed on "
1374			       "test %d for %s: error=%d\n", i + 1, algo, res);
1375			return res;
1376		}
1377		if (res > 0)
1378			produced += res;
1379
1380		/* Add remaining input data */
1381		req.avail_in += (dtemplate[i].inlen + 1) / 2;
1382
1383		res = crypto_decompress_update(tfm, &req);
1384		if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1385			pr_err("alg: pcomp: decompression update failed on "
1386			       "test %d for %s: error=%d\n", i + 1, algo, res);
1387			return res;
1388		}
1389		if (res > 0)
1390			produced += res;
1391
1392		/* Provide remaining output space */
1393		req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2;
1394
1395		res = crypto_decompress_final(tfm, &req);
1396		if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1397			pr_err("alg: pcomp: decompression final failed on "
1398			       "test %d for %s: error=%d\n", i + 1, algo, res);
1399			return res;
1400		}
1401		if (res > 0)
1402			produced += res;
1403
1404		if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) {
1405			pr_err("alg: comp: Decompression test %d failed for "
1406			       "%s: output len = %d (expected %d)\n", i + 1,
1407			       algo, COMP_BUF_SIZE - req.avail_out,
1408			       dtemplate[i].outlen);
1409			return -EINVAL;
1410		}
1411
1412		if (produced != dtemplate[i].outlen) {
1413			pr_err("alg: comp: Decompression test %d failed for "
1414			       "%s: returned len = %u (expected %d)\n", i + 1,
1415			       algo, produced, dtemplate[i].outlen);
1416			return -EINVAL;
 
 
1417		}
1418
1419		if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) {
1420			pr_err("alg: pcomp: Decompression test %d failed for "
1421			       "%s\n", i + 1, algo);
1422			hexdump(result, dtemplate[i].outlen);
1423			return -EINVAL;
 
 
 
1424		}
 
 
 
1425	}
1426
1427	return 0;
 
 
 
 
 
1428}
1429
1430
1431static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
1432		      unsigned int tcount)
1433{
1434	const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1435	int err = 0, i, j, seedsize;
1436	u8 *seed;
1437	char result[32];
1438
1439	seedsize = crypto_rng_seedsize(tfm);
1440
1441	seed = kmalloc(seedsize, GFP_KERNEL);
1442	if (!seed) {
1443		printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1444		       "for %s\n", algo);
1445		return -ENOMEM;
1446	}
1447
1448	for (i = 0; i < tcount; i++) {
1449		memset(result, 0, 32);
1450
1451		memcpy(seed, template[i].v, template[i].vlen);
1452		memcpy(seed + template[i].vlen, template[i].key,
1453		       template[i].klen);
1454		memcpy(seed + template[i].vlen + template[i].klen,
1455		       template[i].dt, template[i].dtlen);
1456
1457		err = crypto_rng_reset(tfm, seed, seedsize);
1458		if (err) {
1459			printk(KERN_ERR "alg: cprng: Failed to reset rng "
1460			       "for %s\n", algo);
1461			goto out;
1462		}
1463
1464		for (j = 0; j < template[i].loops; j++) {
1465			err = crypto_rng_get_bytes(tfm, result,
1466						   template[i].rlen);
1467			if (err != template[i].rlen) {
1468				printk(KERN_ERR "alg: cprng: Failed to obtain "
1469				       "the correct amount of random data for "
1470				       "%s (requested %d, got %d)\n", algo,
1471				       template[i].rlen, err);
1472				goto out;
1473			}
1474		}
1475
1476		err = memcmp(result, template[i].result,
1477			     template[i].rlen);
1478		if (err) {
1479			printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1480			       i, algo);
1481			hexdump(result, template[i].rlen);
1482			err = -EINVAL;
1483			goto out;
1484		}
1485	}
1486
1487out:
1488	kfree(seed);
1489	return err;
1490}
1491
1492static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1493			 u32 type, u32 mask)
1494{
1495	struct crypto_aead *tfm;
1496	int err = 0;
1497
1498	tfm = crypto_alloc_aead(driver, type, mask);
1499	if (IS_ERR(tfm)) {
1500		printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1501		       "%ld\n", driver, PTR_ERR(tfm));
1502		return PTR_ERR(tfm);
1503	}
1504
1505	if (desc->suite.aead.enc.vecs) {
1506		err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1507				desc->suite.aead.enc.count);
1508		if (err)
1509			goto out;
1510	}
1511
1512	if (!err && desc->suite.aead.dec.vecs)
1513		err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1514				desc->suite.aead.dec.count);
1515
1516out:
1517	crypto_free_aead(tfm);
1518	return err;
1519}
1520
1521static int alg_test_cipher(const struct alg_test_desc *desc,
1522			   const char *driver, u32 type, u32 mask)
1523{
1524	struct crypto_cipher *tfm;
1525	int err = 0;
1526
1527	tfm = crypto_alloc_cipher(driver, type, mask);
1528	if (IS_ERR(tfm)) {
1529		printk(KERN_ERR "alg: cipher: Failed to load transform for "
1530		       "%s: %ld\n", driver, PTR_ERR(tfm));
1531		return PTR_ERR(tfm);
1532	}
1533
1534	if (desc->suite.cipher.enc.vecs) {
1535		err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1536				  desc->suite.cipher.enc.count);
1537		if (err)
1538			goto out;
1539	}
1540
1541	if (desc->suite.cipher.dec.vecs)
1542		err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1543				  desc->suite.cipher.dec.count);
1544
1545out:
1546	crypto_free_cipher(tfm);
1547	return err;
1548}
1549
1550static int alg_test_skcipher(const struct alg_test_desc *desc,
1551			     const char *driver, u32 type, u32 mask)
1552{
1553	struct crypto_ablkcipher *tfm;
1554	int err = 0;
1555
1556	tfm = crypto_alloc_ablkcipher(driver, type, mask);
1557	if (IS_ERR(tfm)) {
1558		printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1559		       "%s: %ld\n", driver, PTR_ERR(tfm));
1560		return PTR_ERR(tfm);
1561	}
1562
1563	if (desc->suite.cipher.enc.vecs) {
1564		err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1565				    desc->suite.cipher.enc.count);
1566		if (err)
1567			goto out;
1568	}
1569
1570	if (desc->suite.cipher.dec.vecs)
1571		err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1572				    desc->suite.cipher.dec.count);
1573
1574out:
1575	crypto_free_ablkcipher(tfm);
1576	return err;
1577}
1578
1579static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1580			 u32 type, u32 mask)
1581{
1582	struct crypto_comp *tfm;
 
1583	int err;
 
1584
1585	tfm = crypto_alloc_comp(driver, type, mask);
1586	if (IS_ERR(tfm)) {
1587		printk(KERN_ERR "alg: comp: Failed to load transform for %s: "
1588		       "%ld\n", driver, PTR_ERR(tfm));
1589		return PTR_ERR(tfm);
1590	}
1591
1592	err = test_comp(tfm, desc->suite.comp.comp.vecs,
1593			desc->suite.comp.decomp.vecs,
1594			desc->suite.comp.comp.count,
1595			desc->suite.comp.decomp.count);
 
 
 
 
 
 
 
 
 
 
 
 
 
1596
1597	crypto_free_comp(tfm);
1598	return err;
1599}
1600
1601static int alg_test_pcomp(const struct alg_test_desc *desc, const char *driver,
1602			  u32 type, u32 mask)
1603{
1604	struct crypto_pcomp *tfm;
1605	int err;
1606
1607	tfm = crypto_alloc_pcomp(driver, type, mask);
1608	if (IS_ERR(tfm)) {
1609		pr_err("alg: pcomp: Failed to load transform for %s: %ld\n",
1610		       driver, PTR_ERR(tfm));
1611		return PTR_ERR(tfm);
1612	}
1613
1614	err = test_pcomp(tfm, desc->suite.pcomp.comp.vecs,
1615			 desc->suite.pcomp.decomp.vecs,
1616			 desc->suite.pcomp.comp.count,
1617			 desc->suite.pcomp.decomp.count);
1618
1619	crypto_free_pcomp(tfm);
1620	return err;
1621}
1622
1623static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1624			 u32 type, u32 mask)
1625{
1626	struct crypto_ahash *tfm;
1627	int err;
1628
1629	tfm = crypto_alloc_ahash(driver, type, mask);
1630	if (IS_ERR(tfm)) {
1631		printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1632		       "%ld\n", driver, PTR_ERR(tfm));
1633		return PTR_ERR(tfm);
1634	}
1635
1636	err = test_hash(tfm, desc->suite.hash.vecs,
1637			desc->suite.hash.count, true);
1638	if (!err)
1639		err = test_hash(tfm, desc->suite.hash.vecs,
1640				desc->suite.hash.count, false);
1641
1642	crypto_free_ahash(tfm);
1643	return err;
1644}
1645
1646static int alg_test_crc32c(const struct alg_test_desc *desc,
1647			   const char *driver, u32 type, u32 mask)
1648{
1649	struct crypto_shash *tfm;
1650	u32 val;
1651	int err;
1652
1653	err = alg_test_hash(desc, driver, type, mask);
1654	if (err)
1655		goto out;
1656
1657	tfm = crypto_alloc_shash(driver, type, mask);
1658	if (IS_ERR(tfm)) {
1659		printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1660		       "%ld\n", driver, PTR_ERR(tfm));
1661		err = PTR_ERR(tfm);
1662		goto out;
1663	}
1664
1665	do {
1666		struct {
1667			struct shash_desc shash;
1668			char ctx[crypto_shash_descsize(tfm)];
1669		} sdesc;
1670
1671		sdesc.shash.tfm = tfm;
1672		sdesc.shash.flags = 0;
1673
1674		*(u32 *)sdesc.ctx = le32_to_cpu(420553207);
1675		err = crypto_shash_final(&sdesc.shash, (u8 *)&val);
1676		if (err) {
1677			printk(KERN_ERR "alg: crc32c: Operation failed for "
1678			       "%s: %d\n", driver, err);
1679			break;
1680		}
1681
1682		if (val != ~420553207) {
1683			printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1684			       "%d\n", driver, val);
1685			err = -EINVAL;
1686		}
1687	} while (0);
1688
1689	crypto_free_shash(tfm);
1690
1691out:
1692	return err;
1693}
1694
1695static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1696			  u32 type, u32 mask)
1697{
1698	struct crypto_rng *rng;
1699	int err;
1700
1701	rng = crypto_alloc_rng(driver, type, mask);
1702	if (IS_ERR(rng)) {
1703		printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1704		       "%ld\n", driver, PTR_ERR(rng));
1705		return PTR_ERR(rng);
1706	}
1707
1708	err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1709
1710	crypto_free_rng(rng);
1711
1712	return err;
1713}
1714
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1715static int alg_test_null(const struct alg_test_desc *desc,
1716			     const char *driver, u32 type, u32 mask)
1717{
1718	return 0;
1719}
1720
 
 
1721/* Please keep this list sorted by algorithm name. */
1722static const struct alg_test_desc alg_test_descs[] = {
1723	{
1724		.alg = "__cbc-cast5-avx",
1725		.test = alg_test_null,
 
 
 
1726	}, {
1727		.alg = "__cbc-cast6-avx",
1728		.test = alg_test_null,
 
 
 
 
 
 
1729	}, {
1730		.alg = "__cbc-serpent-avx",
1731		.test = alg_test_null,
 
 
 
 
 
 
1732	}, {
1733		.alg = "__cbc-serpent-avx2",
1734		.test = alg_test_null,
 
 
 
 
 
1735	}, {
1736		.alg = "__cbc-serpent-sse2",
1737		.test = alg_test_null,
 
 
 
 
 
 
1738	}, {
1739		.alg = "__cbc-twofish-avx",
1740		.test = alg_test_null,
1741	}, {
1742		.alg = "__driver-cbc-aes-aesni",
1743		.test = alg_test_null,
1744		.fips_allowed = 1,
1745	}, {
1746		.alg = "__driver-cbc-camellia-aesni",
1747		.test = alg_test_null,
 
 
 
 
 
 
1748	}, {
1749		.alg = "__driver-cbc-camellia-aesni-avx2",
1750		.test = alg_test_null,
 
1751	}, {
1752		.alg = "__driver-cbc-cast5-avx",
1753		.test = alg_test_null,
 
 
 
 
 
1754	}, {
1755		.alg = "__driver-cbc-cast6-avx",
1756		.test = alg_test_null,
 
 
 
 
 
 
1757	}, {
1758		.alg = "__driver-cbc-serpent-avx",
1759		.test = alg_test_null,
 
 
 
 
 
 
1760	}, {
1761		.alg = "__driver-cbc-serpent-avx2",
1762		.test = alg_test_null,
 
 
 
 
 
1763	}, {
1764		.alg = "__driver-cbc-serpent-sse2",
1765		.test = alg_test_null,
1766	}, {
1767		.alg = "__driver-cbc-twofish-avx",
1768		.test = alg_test_null,
 
 
 
1769	}, {
1770		.alg = "__driver-ecb-aes-aesni",
1771		.test = alg_test_null,
1772		.fips_allowed = 1,
1773	}, {
1774		.alg = "__driver-ecb-camellia-aesni",
1775		.test = alg_test_null,
1776	}, {
1777		.alg = "__driver-ecb-camellia-aesni-avx2",
1778		.test = alg_test_null,
1779	}, {
1780		.alg = "__driver-ecb-cast5-avx",
1781		.test = alg_test_null,
1782	}, {
1783		.alg = "__driver-ecb-cast6-avx",
1784		.test = alg_test_null,
1785	}, {
1786		.alg = "__driver-ecb-serpent-avx",
1787		.test = alg_test_null,
1788	}, {
1789		.alg = "__driver-ecb-serpent-avx2",
1790		.test = alg_test_null,
1791	}, {
1792		.alg = "__driver-ecb-serpent-sse2",
1793		.test = alg_test_null,
1794	}, {
1795		.alg = "__driver-ecb-twofish-avx",
1796		.test = alg_test_null,
1797	}, {
1798		.alg = "__ghash-pclmulqdqni",
1799		.test = alg_test_null,
1800		.fips_allowed = 1,
1801	}, {
1802		.alg = "ansi_cprng",
1803		.test = alg_test_cprng,
1804		.fips_allowed = 1,
1805		.suite = {
1806			.cprng = {
1807				.vecs = ansi_cprng_aes_tv_template,
1808				.count = ANSI_CPRNG_AES_TEST_VECTORS
1809			}
1810		}
1811	}, {
1812		.alg = "authenc(hmac(md5),ecb(cipher_null))",
1813		.test = alg_test_aead,
1814		.fips_allowed = 1,
1815		.suite = {
1816			.aead = {
1817				.enc = {
1818					.vecs = hmac_md5_ecb_cipher_null_enc_tv_template,
1819					.count = HMAC_MD5_ECB_CIPHER_NULL_ENC_TEST_VECTORS
1820				},
1821				.dec = {
1822					.vecs = hmac_md5_ecb_cipher_null_dec_tv_template,
1823					.count = HMAC_MD5_ECB_CIPHER_NULL_DEC_TEST_VECTORS
1824				}
1825			}
1826		}
1827	}, {
1828		.alg = "authenc(hmac(sha1),cbc(aes))",
 
 
 
 
 
 
 
 
 
1829		.test = alg_test_aead,
1830		.fips_allowed = 1,
1831		.suite = {
1832			.aead = {
1833				.enc = {
1834					.vecs = hmac_sha1_aes_cbc_enc_tv_template,
1835					.count = HMAC_SHA1_AES_CBC_ENC_TEST_VECTORS
1836				}
1837			}
1838		}
1839	}, {
1840		.alg = "authenc(hmac(sha1),ecb(cipher_null))",
1841		.test = alg_test_aead,
1842		.fips_allowed = 1,
1843		.suite = {
1844			.aead = {
1845				.enc = {
1846					.vecs = hmac_sha1_ecb_cipher_null_enc_tv_template,
1847					.count = HMAC_SHA1_ECB_CIPHER_NULL_ENC_TEST_VECTORS
1848				},
1849				.dec = {
1850					.vecs = hmac_sha1_ecb_cipher_null_dec_tv_template,
1851					.count = HMAC_SHA1_ECB_CIPHER_NULL_DEC_TEST_VECTORS
1852				}
1853			}
1854		}
1855	}, {
1856		.alg = "authenc(hmac(sha256),cbc(aes))",
1857		.test = alg_test_aead,
1858		.fips_allowed = 1,
1859		.suite = {
1860			.aead = {
1861				.enc = {
1862					.vecs = hmac_sha256_aes_cbc_enc_tv_template,
1863					.count = HMAC_SHA256_AES_CBC_ENC_TEST_VECTORS
1864				}
1865			}
1866		}
1867	}, {
1868		.alg = "authenc(hmac(sha512),cbc(aes))",
1869		.test = alg_test_aead,
 
 
 
 
1870		.fips_allowed = 1,
1871		.suite = {
1872			.aead = {
1873				.enc = {
1874					.vecs = hmac_sha512_aes_cbc_enc_tv_template,
1875					.count = HMAC_SHA512_AES_CBC_ENC_TEST_VECTORS
1876				}
1877			}
1878		}
1879	}, {
1880		.alg = "cbc(aes)",
1881		.test = alg_test_skcipher,
1882		.fips_allowed = 1,
1883		.suite = {
1884			.cipher = {
1885				.enc = {
1886					.vecs = aes_cbc_enc_tv_template,
1887					.count = AES_CBC_ENC_TEST_VECTORS
1888				},
1889				.dec = {
1890					.vecs = aes_cbc_dec_tv_template,
1891					.count = AES_CBC_DEC_TEST_VECTORS
1892				}
1893			}
1894		}
1895	}, {
1896		.alg = "cbc(anubis)",
1897		.test = alg_test_skcipher,
1898		.suite = {
1899			.cipher = {
1900				.enc = {
1901					.vecs = anubis_cbc_enc_tv_template,
1902					.count = ANUBIS_CBC_ENC_TEST_VECTORS
1903				},
1904				.dec = {
1905					.vecs = anubis_cbc_dec_tv_template,
1906					.count = ANUBIS_CBC_DEC_TEST_VECTORS
1907				}
1908			}
1909		}
1910	}, {
1911		.alg = "cbc(blowfish)",
1912		.test = alg_test_skcipher,
1913		.suite = {
1914			.cipher = {
1915				.enc = {
1916					.vecs = bf_cbc_enc_tv_template,
1917					.count = BF_CBC_ENC_TEST_VECTORS
1918				},
1919				.dec = {
1920					.vecs = bf_cbc_dec_tv_template,
1921					.count = BF_CBC_DEC_TEST_VECTORS
1922				}
1923			}
1924		}
1925	}, {
1926		.alg = "cbc(camellia)",
1927		.test = alg_test_skcipher,
1928		.suite = {
1929			.cipher = {
1930				.enc = {
1931					.vecs = camellia_cbc_enc_tv_template,
1932					.count = CAMELLIA_CBC_ENC_TEST_VECTORS
1933				},
1934				.dec = {
1935					.vecs = camellia_cbc_dec_tv_template,
1936					.count = CAMELLIA_CBC_DEC_TEST_VECTORS
1937				}
1938			}
1939		}
1940	}, {
1941		.alg = "cbc(cast5)",
1942		.test = alg_test_skcipher,
1943		.suite = {
1944			.cipher = {
1945				.enc = {
1946					.vecs = cast5_cbc_enc_tv_template,
1947					.count = CAST5_CBC_ENC_TEST_VECTORS
1948				},
1949				.dec = {
1950					.vecs = cast5_cbc_dec_tv_template,
1951					.count = CAST5_CBC_DEC_TEST_VECTORS
1952				}
1953			}
1954		}
1955	}, {
1956		.alg = "cbc(cast6)",
1957		.test = alg_test_skcipher,
1958		.suite = {
1959			.cipher = {
1960				.enc = {
1961					.vecs = cast6_cbc_enc_tv_template,
1962					.count = CAST6_CBC_ENC_TEST_VECTORS
1963				},
1964				.dec = {
1965					.vecs = cast6_cbc_dec_tv_template,
1966					.count = CAST6_CBC_DEC_TEST_VECTORS
1967				}
1968			}
1969		}
1970	}, {
1971		.alg = "cbc(des)",
1972		.test = alg_test_skcipher,
1973		.suite = {
1974			.cipher = {
1975				.enc = {
1976					.vecs = des_cbc_enc_tv_template,
1977					.count = DES_CBC_ENC_TEST_VECTORS
1978				},
1979				.dec = {
1980					.vecs = des_cbc_dec_tv_template,
1981					.count = DES_CBC_DEC_TEST_VECTORS
1982				}
1983			}
1984		}
1985	}, {
1986		.alg = "cbc(des3_ede)",
1987		.test = alg_test_skcipher,
1988		.fips_allowed = 1,
1989		.suite = {
1990			.cipher = {
1991				.enc = {
1992					.vecs = des3_ede_cbc_enc_tv_template,
1993					.count = DES3_EDE_CBC_ENC_TEST_VECTORS
1994				},
1995				.dec = {
1996					.vecs = des3_ede_cbc_dec_tv_template,
1997					.count = DES3_EDE_CBC_DEC_TEST_VECTORS
1998				}
1999			}
2000		}
2001	}, {
2002		.alg = "cbc(serpent)",
2003		.test = alg_test_skcipher,
2004		.suite = {
2005			.cipher = {
2006				.enc = {
2007					.vecs = serpent_cbc_enc_tv_template,
2008					.count = SERPENT_CBC_ENC_TEST_VECTORS
2009				},
2010				.dec = {
2011					.vecs = serpent_cbc_dec_tv_template,
2012					.count = SERPENT_CBC_DEC_TEST_VECTORS
2013				}
2014			}
2015		}
2016	}, {
2017		.alg = "cbc(twofish)",
2018		.test = alg_test_skcipher,
2019		.suite = {
2020			.cipher = {
2021				.enc = {
2022					.vecs = tf_cbc_enc_tv_template,
2023					.count = TF_CBC_ENC_TEST_VECTORS
2024				},
2025				.dec = {
2026					.vecs = tf_cbc_dec_tv_template,
2027					.count = TF_CBC_DEC_TEST_VECTORS
2028				}
2029			}
2030		}
2031	}, {
 
 
 
 
 
 
 
2032		.alg = "ccm(aes)",
2033		.test = alg_test_aead,
2034		.fips_allowed = 1,
2035		.suite = {
2036			.aead = {
2037				.enc = {
2038					.vecs = aes_ccm_enc_tv_template,
2039					.count = AES_CCM_ENC_TEST_VECTORS
2040				},
2041				.dec = {
2042					.vecs = aes_ccm_dec_tv_template,
2043					.count = AES_CCM_DEC_TEST_VECTORS
2044				}
 
 
 
2045			}
2046		}
2047	}, {
2048		.alg = "cmac(aes)",
 
2049		.test = alg_test_hash,
2050		.suite = {
2051			.hash = {
2052				.vecs = aes_cmac128_tv_template,
2053				.count = CMAC_AES_TEST_VECTORS
2054			}
2055		}
2056	}, {
2057		.alg = "cmac(des3_ede)",
 
2058		.test = alg_test_hash,
2059		.suite = {
2060			.hash = {
2061				.vecs = des3_ede_cmac64_tv_template,
2062				.count = CMAC_DES3_EDE_TEST_VECTORS
2063			}
2064		}
2065	}, {
2066		.alg = "compress_null",
2067		.test = alg_test_null,
2068	}, {
 
 
 
 
 
 
2069		.alg = "crc32c",
2070		.test = alg_test_crc32c,
2071		.fips_allowed = 1,
2072		.suite = {
2073			.hash = {
2074				.vecs = crc32c_tv_template,
2075				.count = CRC32C_TEST_VECTORS
2076			}
2077		}
2078	}, {
2079		.alg = "crct10dif",
2080		.test = alg_test_hash,
2081		.fips_allowed = 1,
2082		.suite = {
2083			.hash = {
2084				.vecs = crct10dif_tv_template,
2085				.count = CRCT10DIF_TEST_VECTORS
2086			}
2087		}
2088	}, {
2089		.alg = "cryptd(__driver-cbc-aes-aesni)",
2090		.test = alg_test_null,
2091		.fips_allowed = 1,
2092	}, {
2093		.alg = "cryptd(__driver-cbc-camellia-aesni)",
2094		.test = alg_test_null,
2095	}, {
2096		.alg = "cryptd(__driver-cbc-camellia-aesni-avx2)",
2097		.test = alg_test_null,
2098	}, {
2099		.alg = "cryptd(__driver-cbc-serpent-avx2)",
2100		.test = alg_test_null,
2101	}, {
2102		.alg = "cryptd(__driver-ecb-aes-aesni)",
2103		.test = alg_test_null,
2104		.fips_allowed = 1,
2105	}, {
2106		.alg = "cryptd(__driver-ecb-camellia-aesni)",
2107		.test = alg_test_null,
2108	}, {
2109		.alg = "cryptd(__driver-ecb-camellia-aesni-avx2)",
2110		.test = alg_test_null,
2111	}, {
2112		.alg = "cryptd(__driver-ecb-cast5-avx)",
2113		.test = alg_test_null,
2114	}, {
2115		.alg = "cryptd(__driver-ecb-cast6-avx)",
2116		.test = alg_test_null,
2117	}, {
2118		.alg = "cryptd(__driver-ecb-serpent-avx)",
2119		.test = alg_test_null,
2120	}, {
2121		.alg = "cryptd(__driver-ecb-serpent-avx2)",
2122		.test = alg_test_null,
2123	}, {
2124		.alg = "cryptd(__driver-ecb-serpent-sse2)",
2125		.test = alg_test_null,
2126	}, {
2127		.alg = "cryptd(__driver-ecb-twofish-avx)",
2128		.test = alg_test_null,
2129	}, {
2130		.alg = "cryptd(__driver-gcm-aes-aesni)",
2131		.test = alg_test_null,
2132		.fips_allowed = 1,
2133	}, {
2134		.alg = "cryptd(__ghash-pclmulqdqni)",
2135		.test = alg_test_null,
2136		.fips_allowed = 1,
2137	}, {
2138		.alg = "ctr(aes)",
2139		.test = alg_test_skcipher,
2140		.fips_allowed = 1,
2141		.suite = {
2142			.cipher = {
2143				.enc = {
2144					.vecs = aes_ctr_enc_tv_template,
2145					.count = AES_CTR_ENC_TEST_VECTORS
2146				},
2147				.dec = {
2148					.vecs = aes_ctr_dec_tv_template,
2149					.count = AES_CTR_DEC_TEST_VECTORS
2150				}
2151			}
2152		}
2153	}, {
2154		.alg = "ctr(blowfish)",
2155		.test = alg_test_skcipher,
2156		.suite = {
2157			.cipher = {
2158				.enc = {
2159					.vecs = bf_ctr_enc_tv_template,
2160					.count = BF_CTR_ENC_TEST_VECTORS
2161				},
2162				.dec = {
2163					.vecs = bf_ctr_dec_tv_template,
2164					.count = BF_CTR_DEC_TEST_VECTORS
2165				}
2166			}
2167		}
2168	}, {
2169		.alg = "ctr(camellia)",
2170		.test = alg_test_skcipher,
2171		.suite = {
2172			.cipher = {
2173				.enc = {
2174					.vecs = camellia_ctr_enc_tv_template,
2175					.count = CAMELLIA_CTR_ENC_TEST_VECTORS
2176				},
2177				.dec = {
2178					.vecs = camellia_ctr_dec_tv_template,
2179					.count = CAMELLIA_CTR_DEC_TEST_VECTORS
2180				}
2181			}
2182		}
2183	}, {
2184		.alg = "ctr(cast5)",
2185		.test = alg_test_skcipher,
2186		.suite = {
2187			.cipher = {
2188				.enc = {
2189					.vecs = cast5_ctr_enc_tv_template,
2190					.count = CAST5_CTR_ENC_TEST_VECTORS
2191				},
2192				.dec = {
2193					.vecs = cast5_ctr_dec_tv_template,
2194					.count = CAST5_CTR_DEC_TEST_VECTORS
2195				}
2196			}
2197		}
2198	}, {
2199		.alg = "ctr(cast6)",
2200		.test = alg_test_skcipher,
2201		.suite = {
2202			.cipher = {
2203				.enc = {
2204					.vecs = cast6_ctr_enc_tv_template,
2205					.count = CAST6_CTR_ENC_TEST_VECTORS
2206				},
2207				.dec = {
2208					.vecs = cast6_ctr_dec_tv_template,
2209					.count = CAST6_CTR_DEC_TEST_VECTORS
2210				}
2211			}
2212		}
2213	}, {
2214		.alg = "ctr(des)",
2215		.test = alg_test_skcipher,
2216		.suite = {
2217			.cipher = {
2218				.enc = {
2219					.vecs = des_ctr_enc_tv_template,
2220					.count = DES_CTR_ENC_TEST_VECTORS
2221				},
2222				.dec = {
2223					.vecs = des_ctr_dec_tv_template,
2224					.count = DES_CTR_DEC_TEST_VECTORS
2225				}
2226			}
2227		}
2228	}, {
2229		.alg = "ctr(des3_ede)",
2230		.test = alg_test_skcipher,
 
2231		.suite = {
2232			.cipher = {
2233				.enc = {
2234					.vecs = des3_ede_ctr_enc_tv_template,
2235					.count = DES3_EDE_CTR_ENC_TEST_VECTORS
2236				},
2237				.dec = {
2238					.vecs = des3_ede_ctr_dec_tv_template,
2239					.count = DES3_EDE_CTR_DEC_TEST_VECTORS
2240				}
2241			}
2242		}
2243	}, {
2244		.alg = "ctr(serpent)",
2245		.test = alg_test_skcipher,
2246		.suite = {
2247			.cipher = {
2248				.enc = {
2249					.vecs = serpent_ctr_enc_tv_template,
2250					.count = SERPENT_CTR_ENC_TEST_VECTORS
2251				},
2252				.dec = {
2253					.vecs = serpent_ctr_dec_tv_template,
2254					.count = SERPENT_CTR_DEC_TEST_VECTORS
2255				}
2256			}
2257		}
2258	}, {
2259		.alg = "ctr(twofish)",
2260		.test = alg_test_skcipher,
2261		.suite = {
2262			.cipher = {
2263				.enc = {
2264					.vecs = tf_ctr_enc_tv_template,
2265					.count = TF_CTR_ENC_TEST_VECTORS
2266				},
2267				.dec = {
2268					.vecs = tf_ctr_dec_tv_template,
2269					.count = TF_CTR_DEC_TEST_VECTORS
2270				}
2271			}
2272		}
2273	}, {
2274		.alg = "cts(cbc(aes))",
2275		.test = alg_test_skcipher,
2276		.suite = {
2277			.cipher = {
2278				.enc = {
2279					.vecs = cts_mode_enc_tv_template,
2280					.count = CTS_MODE_ENC_TEST_VECTORS
2281				},
2282				.dec = {
2283					.vecs = cts_mode_dec_tv_template,
2284					.count = CTS_MODE_DEC_TEST_VECTORS
2285				}
2286			}
2287		}
2288	}, {
2289		.alg = "deflate",
2290		.test = alg_test_comp,
2291		.fips_allowed = 1,
2292		.suite = {
2293			.comp = {
2294				.comp = {
2295					.vecs = deflate_comp_tv_template,
2296					.count = DEFLATE_COMP_TEST_VECTORS
2297				},
2298				.decomp = {
2299					.vecs = deflate_decomp_tv_template,
2300					.count = DEFLATE_DECOMP_TEST_VECTORS
2301				}
2302			}
2303		}
2304	}, {
 
 
 
 
 
 
 
2305		.alg = "digest_null",
2306		.test = alg_test_null,
2307	}, {
2308		.alg = "ecb(__aes-aesni)",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2309		.test = alg_test_null,
 
 
 
2310		.fips_allowed = 1,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2311	}, {
2312		.alg = "ecb(aes)",
2313		.test = alg_test_skcipher,
2314		.fips_allowed = 1,
2315		.suite = {
2316			.cipher = {
2317				.enc = {
2318					.vecs = aes_enc_tv_template,
2319					.count = AES_ENC_TEST_VECTORS
2320				},
2321				.dec = {
2322					.vecs = aes_dec_tv_template,
2323					.count = AES_DEC_TEST_VECTORS
2324				}
2325			}
2326		}
2327	}, {
2328		.alg = "ecb(anubis)",
2329		.test = alg_test_skcipher,
2330		.suite = {
2331			.cipher = {
2332				.enc = {
2333					.vecs = anubis_enc_tv_template,
2334					.count = ANUBIS_ENC_TEST_VECTORS
2335				},
2336				.dec = {
2337					.vecs = anubis_dec_tv_template,
2338					.count = ANUBIS_DEC_TEST_VECTORS
2339				}
2340			}
2341		}
2342	}, {
2343		.alg = "ecb(arc4)",
2344		.test = alg_test_skcipher,
2345		.suite = {
2346			.cipher = {
2347				.enc = {
2348					.vecs = arc4_enc_tv_template,
2349					.count = ARC4_ENC_TEST_VECTORS
2350				},
2351				.dec = {
2352					.vecs = arc4_dec_tv_template,
2353					.count = ARC4_DEC_TEST_VECTORS
2354				}
2355			}
2356		}
2357	}, {
2358		.alg = "ecb(blowfish)",
2359		.test = alg_test_skcipher,
2360		.suite = {
2361			.cipher = {
2362				.enc = {
2363					.vecs = bf_enc_tv_template,
2364					.count = BF_ENC_TEST_VECTORS
2365				},
2366				.dec = {
2367					.vecs = bf_dec_tv_template,
2368					.count = BF_DEC_TEST_VECTORS
2369				}
2370			}
2371		}
2372	}, {
2373		.alg = "ecb(camellia)",
2374		.test = alg_test_skcipher,
2375		.suite = {
2376			.cipher = {
2377				.enc = {
2378					.vecs = camellia_enc_tv_template,
2379					.count = CAMELLIA_ENC_TEST_VECTORS
2380				},
2381				.dec = {
2382					.vecs = camellia_dec_tv_template,
2383					.count = CAMELLIA_DEC_TEST_VECTORS
2384				}
2385			}
2386		}
2387	}, {
2388		.alg = "ecb(cast5)",
2389		.test = alg_test_skcipher,
2390		.suite = {
2391			.cipher = {
2392				.enc = {
2393					.vecs = cast5_enc_tv_template,
2394					.count = CAST5_ENC_TEST_VECTORS
2395				},
2396				.dec = {
2397					.vecs = cast5_dec_tv_template,
2398					.count = CAST5_DEC_TEST_VECTORS
2399				}
2400			}
2401		}
2402	}, {
2403		.alg = "ecb(cast6)",
2404		.test = alg_test_skcipher,
2405		.suite = {
2406			.cipher = {
2407				.enc = {
2408					.vecs = cast6_enc_tv_template,
2409					.count = CAST6_ENC_TEST_VECTORS
2410				},
2411				.dec = {
2412					.vecs = cast6_dec_tv_template,
2413					.count = CAST6_DEC_TEST_VECTORS
2414				}
2415			}
2416		}
2417	}, {
2418		.alg = "ecb(cipher_null)",
2419		.test = alg_test_null,
 
2420	}, {
2421		.alg = "ecb(des)",
2422		.test = alg_test_skcipher,
2423		.fips_allowed = 1,
2424		.suite = {
2425			.cipher = {
2426				.enc = {
2427					.vecs = des_enc_tv_template,
2428					.count = DES_ENC_TEST_VECTORS
2429				},
2430				.dec = {
2431					.vecs = des_dec_tv_template,
2432					.count = DES_DEC_TEST_VECTORS
2433				}
2434			}
2435		}
2436	}, {
2437		.alg = "ecb(des3_ede)",
2438		.test = alg_test_skcipher,
2439		.fips_allowed = 1,
2440		.suite = {
2441			.cipher = {
2442				.enc = {
2443					.vecs = des3_ede_enc_tv_template,
2444					.count = DES3_EDE_ENC_TEST_VECTORS
2445				},
2446				.dec = {
2447					.vecs = des3_ede_dec_tv_template,
2448					.count = DES3_EDE_DEC_TEST_VECTORS
2449				}
2450			}
2451		}
2452	}, {
2453		.alg = "ecb(fcrypt)",
2454		.test = alg_test_skcipher,
2455		.suite = {
2456			.cipher = {
2457				.enc = {
2458					.vecs = fcrypt_pcbc_enc_tv_template,
2459					.count = 1
2460				},
2461				.dec = {
2462					.vecs = fcrypt_pcbc_dec_tv_template,
2463					.count = 1
2464				}
2465			}
2466		}
2467	}, {
2468		.alg = "ecb(khazad)",
2469		.test = alg_test_skcipher,
2470		.suite = {
2471			.cipher = {
2472				.enc = {
2473					.vecs = khazad_enc_tv_template,
2474					.count = KHAZAD_ENC_TEST_VECTORS
2475				},
2476				.dec = {
2477					.vecs = khazad_dec_tv_template,
2478					.count = KHAZAD_DEC_TEST_VECTORS
2479				}
2480			}
2481		}
2482	}, {
2483		.alg = "ecb(seed)",
2484		.test = alg_test_skcipher,
2485		.suite = {
2486			.cipher = {
2487				.enc = {
2488					.vecs = seed_enc_tv_template,
2489					.count = SEED_ENC_TEST_VECTORS
2490				},
2491				.dec = {
2492					.vecs = seed_dec_tv_template,
2493					.count = SEED_DEC_TEST_VECTORS
2494				}
2495			}
2496		}
2497	}, {
2498		.alg = "ecb(serpent)",
2499		.test = alg_test_skcipher,
2500		.suite = {
2501			.cipher = {
2502				.enc = {
2503					.vecs = serpent_enc_tv_template,
2504					.count = SERPENT_ENC_TEST_VECTORS
2505				},
2506				.dec = {
2507					.vecs = serpent_dec_tv_template,
2508					.count = SERPENT_DEC_TEST_VECTORS
2509				}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2510			}
2511		}
2512	}, {
2513		.alg = "ecb(tea)",
2514		.test = alg_test_skcipher,
2515		.suite = {
2516			.cipher = {
2517				.enc = {
2518					.vecs = tea_enc_tv_template,
2519					.count = TEA_ENC_TEST_VECTORS
2520				},
2521				.dec = {
2522					.vecs = tea_dec_tv_template,
2523					.count = TEA_DEC_TEST_VECTORS
2524				}
2525			}
2526		}
2527	}, {
2528		.alg = "ecb(tnepres)",
2529		.test = alg_test_skcipher,
2530		.suite = {
2531			.cipher = {
2532				.enc = {
2533					.vecs = tnepres_enc_tv_template,
2534					.count = TNEPRES_ENC_TEST_VECTORS
2535				},
2536				.dec = {
2537					.vecs = tnepres_dec_tv_template,
2538					.count = TNEPRES_DEC_TEST_VECTORS
2539				}
2540			}
2541		}
2542	}, {
2543		.alg = "ecb(twofish)",
2544		.test = alg_test_skcipher,
2545		.suite = {
2546			.cipher = {
2547				.enc = {
2548					.vecs = tf_enc_tv_template,
2549					.count = TF_ENC_TEST_VECTORS
2550				},
2551				.dec = {
2552					.vecs = tf_dec_tv_template,
2553					.count = TF_DEC_TEST_VECTORS
2554				}
2555			}
2556		}
2557	}, {
2558		.alg = "ecb(xeta)",
2559		.test = alg_test_skcipher,
2560		.suite = {
2561			.cipher = {
2562				.enc = {
2563					.vecs = xeta_enc_tv_template,
2564					.count = XETA_ENC_TEST_VECTORS
2565				},
2566				.dec = {
2567					.vecs = xeta_dec_tv_template,
2568					.count = XETA_DEC_TEST_VECTORS
2569				}
2570			}
2571		}
2572	}, {
2573		.alg = "ecb(xtea)",
2574		.test = alg_test_skcipher,
2575		.suite = {
2576			.cipher = {
2577				.enc = {
2578					.vecs = xtea_enc_tv_template,
2579					.count = XTEA_ENC_TEST_VECTORS
2580				},
2581				.dec = {
2582					.vecs = xtea_dec_tv_template,
2583					.count = XTEA_DEC_TEST_VECTORS
2584				}
2585			}
2586		}
2587	}, {
 
 
 
 
 
 
 
2588		.alg = "gcm(aes)",
2589		.test = alg_test_aead,
2590		.fips_allowed = 1,
2591		.suite = {
2592			.aead = {
2593				.enc = {
2594					.vecs = aes_gcm_enc_tv_template,
2595					.count = AES_GCM_ENC_TEST_VECTORS
2596				},
2597				.dec = {
2598					.vecs = aes_gcm_dec_tv_template,
2599					.count = AES_GCM_DEC_TEST_VECTORS
2600				}
2601			}
2602		}
2603	}, {
2604		.alg = "ghash",
2605		.test = alg_test_hash,
2606		.fips_allowed = 1,
2607		.suite = {
2608			.hash = {
2609				.vecs = ghash_tv_template,
2610				.count = GHASH_TEST_VECTORS
2611			}
2612		}
2613	}, {
2614		.alg = "hmac(crc32)",
2615		.test = alg_test_hash,
2616		.suite = {
2617			.hash = {
2618				.vecs = bfin_crc_tv_template,
2619				.count = BFIN_CRC_TEST_VECTORS
2620			}
2621		}
2622	}, {
2623		.alg = "hmac(md5)",
2624		.test = alg_test_hash,
2625		.suite = {
2626			.hash = {
2627				.vecs = hmac_md5_tv_template,
2628				.count = HMAC_MD5_TEST_VECTORS
2629			}
2630		}
2631	}, {
2632		.alg = "hmac(rmd128)",
2633		.test = alg_test_hash,
2634		.suite = {
2635			.hash = {
2636				.vecs = hmac_rmd128_tv_template,
2637				.count = HMAC_RMD128_TEST_VECTORS
2638			}
2639		}
2640	}, {
2641		.alg = "hmac(rmd160)",
2642		.test = alg_test_hash,
2643		.suite = {
2644			.hash = {
2645				.vecs = hmac_rmd160_tv_template,
2646				.count = HMAC_RMD160_TEST_VECTORS
2647			}
2648		}
2649	}, {
2650		.alg = "hmac(sha1)",
2651		.test = alg_test_hash,
2652		.fips_allowed = 1,
2653		.suite = {
2654			.hash = {
2655				.vecs = hmac_sha1_tv_template,
2656				.count = HMAC_SHA1_TEST_VECTORS
2657			}
2658		}
2659	}, {
2660		.alg = "hmac(sha224)",
2661		.test = alg_test_hash,
2662		.fips_allowed = 1,
2663		.suite = {
2664			.hash = {
2665				.vecs = hmac_sha224_tv_template,
2666				.count = HMAC_SHA224_TEST_VECTORS
2667			}
2668		}
2669	}, {
2670		.alg = "hmac(sha256)",
2671		.test = alg_test_hash,
2672		.fips_allowed = 1,
2673		.suite = {
2674			.hash = {
2675				.vecs = hmac_sha256_tv_template,
2676				.count = HMAC_SHA256_TEST_VECTORS
2677			}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2678		}
2679	}, {
2680		.alg = "hmac(sha384)",
2681		.test = alg_test_hash,
2682		.fips_allowed = 1,
2683		.suite = {
2684			.hash = {
2685				.vecs = hmac_sha384_tv_template,
2686				.count = HMAC_SHA384_TEST_VECTORS
2687			}
2688		}
2689	}, {
2690		.alg = "hmac(sha512)",
2691		.test = alg_test_hash,
2692		.fips_allowed = 1,
2693		.suite = {
2694			.hash = {
2695				.vecs = hmac_sha512_tv_template,
2696				.count = HMAC_SHA512_TEST_VECTORS
 
 
 
 
 
 
 
 
 
 
 
2697			}
2698		}
2699	}, {
2700		.alg = "lrw(aes)",
2701		.test = alg_test_skcipher,
2702		.suite = {
2703			.cipher = {
2704				.enc = {
2705					.vecs = aes_lrw_enc_tv_template,
2706					.count = AES_LRW_ENC_TEST_VECTORS
2707				},
2708				.dec = {
2709					.vecs = aes_lrw_dec_tv_template,
2710					.count = AES_LRW_DEC_TEST_VECTORS
2711				}
2712			}
2713		}
2714	}, {
2715		.alg = "lrw(camellia)",
2716		.test = alg_test_skcipher,
2717		.suite = {
2718			.cipher = {
2719				.enc = {
2720					.vecs = camellia_lrw_enc_tv_template,
2721					.count = CAMELLIA_LRW_ENC_TEST_VECTORS
2722				},
2723				.dec = {
2724					.vecs = camellia_lrw_dec_tv_template,
2725					.count = CAMELLIA_LRW_DEC_TEST_VECTORS
2726				}
2727			}
2728		}
2729	}, {
2730		.alg = "lrw(cast6)",
2731		.test = alg_test_skcipher,
2732		.suite = {
2733			.cipher = {
2734				.enc = {
2735					.vecs = cast6_lrw_enc_tv_template,
2736					.count = CAST6_LRW_ENC_TEST_VECTORS
2737				},
2738				.dec = {
2739					.vecs = cast6_lrw_dec_tv_template,
2740					.count = CAST6_LRW_DEC_TEST_VECTORS
2741				}
2742			}
2743		}
2744	}, {
2745		.alg = "lrw(serpent)",
2746		.test = alg_test_skcipher,
2747		.suite = {
2748			.cipher = {
2749				.enc = {
2750					.vecs = serpent_lrw_enc_tv_template,
2751					.count = SERPENT_LRW_ENC_TEST_VECTORS
2752				},
2753				.dec = {
2754					.vecs = serpent_lrw_dec_tv_template,
2755					.count = SERPENT_LRW_DEC_TEST_VECTORS
2756				}
2757			}
2758		}
2759	}, {
2760		.alg = "lrw(twofish)",
2761		.test = alg_test_skcipher,
2762		.suite = {
2763			.cipher = {
2764				.enc = {
2765					.vecs = tf_lrw_enc_tv_template,
2766					.count = TF_LRW_ENC_TEST_VECTORS
2767				},
2768				.dec = {
2769					.vecs = tf_lrw_dec_tv_template,
2770					.count = TF_LRW_DEC_TEST_VECTORS
2771				}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2772			}
2773		}
2774	}, {
2775		.alg = "lzo",
2776		.test = alg_test_comp,
2777		.fips_allowed = 1,
2778		.suite = {
2779			.comp = {
2780				.comp = {
2781					.vecs = lzo_comp_tv_template,
2782					.count = LZO_COMP_TEST_VECTORS
2783				},
2784				.decomp = {
2785					.vecs = lzo_decomp_tv_template,
2786					.count = LZO_DECOMP_TEST_VECTORS
2787				}
2788			}
2789		}
2790	}, {
2791		.alg = "md4",
2792		.test = alg_test_hash,
2793		.suite = {
2794			.hash = {
2795				.vecs = md4_tv_template,
2796				.count = MD4_TEST_VECTORS
2797			}
2798		}
2799	}, {
2800		.alg = "md5",
2801		.test = alg_test_hash,
2802		.suite = {
2803			.hash = {
2804				.vecs = md5_tv_template,
2805				.count = MD5_TEST_VECTORS
2806			}
2807		}
2808	}, {
2809		.alg = "michael_mic",
2810		.test = alg_test_hash,
2811		.suite = {
2812			.hash = {
2813				.vecs = michael_mic_tv_template,
2814				.count = MICHAEL_MIC_TEST_VECTORS
2815			}
2816		}
2817	}, {
2818		.alg = "ofb(aes)",
2819		.test = alg_test_skcipher,
2820		.fips_allowed = 1,
2821		.suite = {
2822			.cipher = {
2823				.enc = {
2824					.vecs = aes_ofb_enc_tv_template,
2825					.count = AES_OFB_ENC_TEST_VECTORS
2826				},
2827				.dec = {
2828					.vecs = aes_ofb_dec_tv_template,
2829					.count = AES_OFB_DEC_TEST_VECTORS
2830				}
2831			}
2832		}
2833	}, {
2834		.alg = "pcbc(fcrypt)",
2835		.test = alg_test_skcipher,
2836		.suite = {
2837			.cipher = {
2838				.enc = {
2839					.vecs = fcrypt_pcbc_enc_tv_template,
2840					.count = FCRYPT_ENC_TEST_VECTORS
2841				},
2842				.dec = {
2843					.vecs = fcrypt_pcbc_dec_tv_template,
2844					.count = FCRYPT_DEC_TEST_VECTORS
2845				}
2846			}
2847		}
2848	}, {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2849		.alg = "rfc3686(ctr(aes))",
2850		.test = alg_test_skcipher,
2851		.fips_allowed = 1,
2852		.suite = {
2853			.cipher = {
2854				.enc = {
2855					.vecs = aes_ctr_rfc3686_enc_tv_template,
2856					.count = AES_CTR_3686_ENC_TEST_VECTORS
2857				},
2858				.dec = {
2859					.vecs = aes_ctr_rfc3686_dec_tv_template,
2860					.count = AES_CTR_3686_DEC_TEST_VECTORS
2861				}
2862			}
2863		}
2864	}, {
2865		.alg = "rfc4106(gcm(aes))",
2866		.test = alg_test_aead,
 
2867		.suite = {
2868			.aead = {
2869				.enc = {
2870					.vecs = aes_gcm_rfc4106_enc_tv_template,
2871					.count = AES_GCM_4106_ENC_TEST_VECTORS
2872				},
2873				.dec = {
2874					.vecs = aes_gcm_rfc4106_dec_tv_template,
2875					.count = AES_GCM_4106_DEC_TEST_VECTORS
2876				}
2877			}
2878		}
2879	}, {
2880		.alg = "rfc4309(ccm(aes))",
2881		.test = alg_test_aead,
2882		.fips_allowed = 1,
2883		.suite = {
2884			.aead = {
2885				.enc = {
2886					.vecs = aes_ccm_rfc4309_enc_tv_template,
2887					.count = AES_CCM_4309_ENC_TEST_VECTORS
2888				},
2889				.dec = {
2890					.vecs = aes_ccm_rfc4309_dec_tv_template,
2891					.count = AES_CCM_4309_DEC_TEST_VECTORS
2892				}
2893			}
2894		}
2895	}, {
2896		.alg = "rfc4543(gcm(aes))",
2897		.test = alg_test_aead,
2898		.suite = {
2899			.aead = {
2900				.enc = {
2901					.vecs = aes_gcm_rfc4543_enc_tv_template,
2902					.count = AES_GCM_4543_ENC_TEST_VECTORS
2903				},
2904				.dec = {
2905					.vecs = aes_gcm_rfc4543_dec_tv_template,
2906					.count = AES_GCM_4543_DEC_TEST_VECTORS
2907				},
 
 
 
 
 
 
 
 
 
 
 
 
2908			}
2909		}
2910	}, {
2911		.alg = "rmd128",
2912		.test = alg_test_hash,
2913		.suite = {
2914			.hash = {
2915				.vecs = rmd128_tv_template,
2916				.count = RMD128_TEST_VECTORS
2917			}
2918		}
2919	}, {
2920		.alg = "rmd160",
2921		.test = alg_test_hash,
2922		.suite = {
2923			.hash = {
2924				.vecs = rmd160_tv_template,
2925				.count = RMD160_TEST_VECTORS
2926			}
2927		}
2928	}, {
2929		.alg = "rmd256",
2930		.test = alg_test_hash,
2931		.suite = {
2932			.hash = {
2933				.vecs = rmd256_tv_template,
2934				.count = RMD256_TEST_VECTORS
2935			}
2936		}
2937	}, {
2938		.alg = "rmd320",
2939		.test = alg_test_hash,
2940		.suite = {
2941			.hash = {
2942				.vecs = rmd320_tv_template,
2943				.count = RMD320_TEST_VECTORS
2944			}
 
 
 
 
2945		}
2946	}, {
2947		.alg = "salsa20",
2948		.test = alg_test_skcipher,
2949		.suite = {
2950			.cipher = {
2951				.enc = {
2952					.vecs = salsa20_stream_enc_tv_template,
2953					.count = SALSA20_STREAM_ENC_TEST_VECTORS
2954				}
2955			}
2956		}
2957	}, {
2958		.alg = "sha1",
2959		.test = alg_test_hash,
2960		.fips_allowed = 1,
2961		.suite = {
2962			.hash = {
2963				.vecs = sha1_tv_template,
2964				.count = SHA1_TEST_VECTORS
2965			}
2966		}
2967	}, {
2968		.alg = "sha224",
2969		.test = alg_test_hash,
2970		.fips_allowed = 1,
2971		.suite = {
2972			.hash = {
2973				.vecs = sha224_tv_template,
2974				.count = SHA224_TEST_VECTORS
2975			}
2976		}
2977	}, {
2978		.alg = "sha256",
2979		.test = alg_test_hash,
2980		.fips_allowed = 1,
2981		.suite = {
2982			.hash = {
2983				.vecs = sha256_tv_template,
2984				.count = SHA256_TEST_VECTORS
2985			}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2986		}
2987	}, {
2988		.alg = "sha384",
2989		.test = alg_test_hash,
2990		.fips_allowed = 1,
2991		.suite = {
2992			.hash = {
2993				.vecs = sha384_tv_template,
2994				.count = SHA384_TEST_VECTORS
2995			}
2996		}
2997	}, {
2998		.alg = "sha512",
2999		.test = alg_test_hash,
3000		.fips_allowed = 1,
3001		.suite = {
3002			.hash = {
3003				.vecs = sha512_tv_template,
3004				.count = SHA512_TEST_VECTORS
3005			}
 
 
 
3006		}
3007	}, {
3008		.alg = "tgr128",
3009		.test = alg_test_hash,
3010		.suite = {
3011			.hash = {
3012				.vecs = tgr128_tv_template,
3013				.count = TGR128_TEST_VECTORS
3014			}
3015		}
3016	}, {
3017		.alg = "tgr160",
3018		.test = alg_test_hash,
3019		.suite = {
3020			.hash = {
3021				.vecs = tgr160_tv_template,
3022				.count = TGR160_TEST_VECTORS
3023			}
3024		}
3025	}, {
3026		.alg = "tgr192",
3027		.test = alg_test_hash,
3028		.suite = {
3029			.hash = {
3030				.vecs = tgr192_tv_template,
3031				.count = TGR192_TEST_VECTORS
3032			}
3033		}
3034	}, {
3035		.alg = "vmac(aes)",
3036		.test = alg_test_hash,
3037		.suite = {
3038			.hash = {
3039				.vecs = aes_vmac128_tv_template,
3040				.count = VMAC_AES_TEST_VECTORS
3041			}
3042		}
3043	}, {
3044		.alg = "wp256",
3045		.test = alg_test_hash,
3046		.suite = {
3047			.hash = {
3048				.vecs = wp256_tv_template,
3049				.count = WP256_TEST_VECTORS
3050			}
3051		}
3052	}, {
3053		.alg = "wp384",
3054		.test = alg_test_hash,
3055		.suite = {
3056			.hash = {
3057				.vecs = wp384_tv_template,
3058				.count = WP384_TEST_VECTORS
3059			}
3060		}
3061	}, {
3062		.alg = "wp512",
3063		.test = alg_test_hash,
3064		.suite = {
3065			.hash = {
3066				.vecs = wp512_tv_template,
3067				.count = WP512_TEST_VECTORS
3068			}
3069		}
3070	}, {
3071		.alg = "xcbc(aes)",
3072		.test = alg_test_hash,
3073		.suite = {
3074			.hash = {
3075				.vecs = aes_xcbc128_tv_template,
3076				.count = XCBC_AES_TEST_VECTORS
3077			}
3078		}
3079	}, {
3080		.alg = "xts(aes)",
3081		.test = alg_test_skcipher,
3082		.fips_allowed = 1,
3083		.suite = {
3084			.cipher = {
3085				.enc = {
3086					.vecs = aes_xts_enc_tv_template,
3087					.count = AES_XTS_ENC_TEST_VECTORS
3088				},
3089				.dec = {
3090					.vecs = aes_xts_dec_tv_template,
3091					.count = AES_XTS_DEC_TEST_VECTORS
3092				}
3093			}
3094		}
3095	}, {
3096		.alg = "xts(camellia)",
3097		.test = alg_test_skcipher,
3098		.suite = {
3099			.cipher = {
3100				.enc = {
3101					.vecs = camellia_xts_enc_tv_template,
3102					.count = CAMELLIA_XTS_ENC_TEST_VECTORS
3103				},
3104				.dec = {
3105					.vecs = camellia_xts_dec_tv_template,
3106					.count = CAMELLIA_XTS_DEC_TEST_VECTORS
3107				}
3108			}
3109		}
3110	}, {
3111		.alg = "xts(cast6)",
3112		.test = alg_test_skcipher,
3113		.suite = {
3114			.cipher = {
3115				.enc = {
3116					.vecs = cast6_xts_enc_tv_template,
3117					.count = CAST6_XTS_ENC_TEST_VECTORS
3118				},
3119				.dec = {
3120					.vecs = cast6_xts_dec_tv_template,
3121					.count = CAST6_XTS_DEC_TEST_VECTORS
3122				}
3123			}
3124		}
3125	}, {
3126		.alg = "xts(serpent)",
3127		.test = alg_test_skcipher,
3128		.suite = {
3129			.cipher = {
3130				.enc = {
3131					.vecs = serpent_xts_enc_tv_template,
3132					.count = SERPENT_XTS_ENC_TEST_VECTORS
3133				},
3134				.dec = {
3135					.vecs = serpent_xts_dec_tv_template,
3136					.count = SERPENT_XTS_DEC_TEST_VECTORS
3137				}
 
 
 
 
 
 
 
 
 
 
 
 
3138			}
3139		}
3140	}, {
3141		.alg = "xts(twofish)",
3142		.test = alg_test_skcipher,
3143		.suite = {
3144			.cipher = {
3145				.enc = {
3146					.vecs = tf_xts_enc_tv_template,
3147					.count = TF_XTS_ENC_TEST_VECTORS
3148				},
3149				.dec = {
3150					.vecs = tf_xts_dec_tv_template,
3151					.count = TF_XTS_DEC_TEST_VECTORS
3152				}
3153			}
3154		}
3155	}, {
3156		.alg = "zlib",
3157		.test = alg_test_pcomp,
3158		.fips_allowed = 1,
3159		.suite = {
3160			.pcomp = {
3161				.comp = {
3162					.vecs = zlib_comp_tv_template,
3163					.count = ZLIB_COMP_TEST_VECTORS
3164				},
3165				.decomp = {
3166					.vecs = zlib_decomp_tv_template,
3167					.count = ZLIB_DECOMP_TEST_VECTORS
3168				}
3169			}
3170		}
3171	}
3172};
3173
3174static bool alg_test_descs_checked;
3175
3176static void alg_test_descs_check_order(void)
3177{
3178	int i;
3179
3180	/* only check once */
3181	if (alg_test_descs_checked)
3182		return;
3183
3184	alg_test_descs_checked = true;
3185
3186	for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
3187		int diff = strcmp(alg_test_descs[i - 1].alg,
3188				  alg_test_descs[i].alg);
3189
3190		if (WARN_ON(diff > 0)) {
3191			pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
3192				alg_test_descs[i - 1].alg,
3193				alg_test_descs[i].alg);
3194		}
3195
3196		if (WARN_ON(diff == 0)) {
3197			pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
3198				alg_test_descs[i].alg);
3199		}
3200	}
3201}
3202
3203static int alg_find_test(const char *alg)
3204{
3205	int start = 0;
3206	int end = ARRAY_SIZE(alg_test_descs);
3207
3208	while (start < end) {
3209		int i = (start + end) / 2;
3210		int diff = strcmp(alg_test_descs[i].alg, alg);
3211
3212		if (diff > 0) {
3213			end = i;
3214			continue;
3215		}
3216
3217		if (diff < 0) {
3218			start = i + 1;
3219			continue;
3220		}
3221
3222		return i;
3223	}
3224
3225	return -1;
3226}
3227
3228int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
3229{
3230	int i;
3231	int j;
3232	int rc;
3233
 
 
 
 
 
3234	alg_test_descs_check_order();
3235
3236	if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
3237		char nalg[CRYPTO_MAX_ALG_NAME];
3238
3239		if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
3240		    sizeof(nalg))
3241			return -ENAMETOOLONG;
3242
3243		i = alg_find_test(nalg);
3244		if (i < 0)
3245			goto notest;
3246
3247		if (fips_enabled && !alg_test_descs[i].fips_allowed)
3248			goto non_fips_alg;
3249
3250		rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
3251		goto test_done;
3252	}
3253
3254	i = alg_find_test(alg);
3255	j = alg_find_test(driver);
3256	if (i < 0 && j < 0)
3257		goto notest;
3258
3259	if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
3260			     (j >= 0 && !alg_test_descs[j].fips_allowed)))
3261		goto non_fips_alg;
3262
3263	rc = 0;
3264	if (i >= 0)
3265		rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
3266					     type, mask);
3267	if (j >= 0 && j != i)
3268		rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
3269					     type, mask);
3270
3271test_done:
3272	if (fips_enabled && rc)
3273		panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
3274
3275	if (fips_enabled && !rc)
3276		printk(KERN_INFO "alg: self-tests for %s (%s) passed\n",
3277		       driver, alg);
3278
3279	return rc;
3280
3281notest:
3282	printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
3283	return 0;
3284non_fips_alg:
3285	return -EINVAL;
3286}
3287
3288#endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
3289
3290EXPORT_SYMBOL_GPL(alg_test);