Linux Audio

Check our new training course

Loading...
v5.4
   1// SPDX-License-Identifier: GPL-2.0-or-later
   2/*
   3 * Algorithm testing framework and tests.
   4 *
   5 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
   6 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
   7 * Copyright (c) 2007 Nokia Siemens Networks
   8 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
   9 * Copyright (c) 2019 Google LLC
  10 *
  11 * Updated RFC4106 AES-GCM testing.
  12 *    Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
  13 *             Adrian Hoban <adrian.hoban@intel.com>
  14 *             Gabriele Paoloni <gabriele.paoloni@intel.com>
  15 *             Tadeusz Struk (tadeusz.struk@intel.com)
  16 *    Copyright (c) 2010, Intel Corporation.
 
 
 
 
 
 
  17 */
  18
  19#include <crypto/aead.h>
  20#include <crypto/hash.h>
  21#include <crypto/skcipher.h>
  22#include <linux/err.h>
  23#include <linux/fips.h>
  24#include <linux/module.h>
  25#include <linux/once.h>
  26#include <linux/random.h>
  27#include <linux/scatterlist.h>
  28#include <linux/slab.h>
  29#include <linux/string.h>
  30#include <crypto/rng.h>
  31#include <crypto/drbg.h>
  32#include <crypto/akcipher.h>
  33#include <crypto/kpp.h>
  34#include <crypto/acompress.h>
  35#include <crypto/internal/simd.h>
  36
  37#include "internal.h"
  38
  39static bool notests;
  40module_param(notests, bool, 0644);
  41MODULE_PARM_DESC(notests, "disable crypto self-tests");
  42
  43static bool panic_on_fail;
  44module_param(panic_on_fail, bool, 0444);
  45
  46#ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
  47static bool noextratests;
  48module_param(noextratests, bool, 0644);
  49MODULE_PARM_DESC(noextratests, "disable expensive crypto self-tests");
  50
  51static unsigned int fuzz_iterations = 100;
  52module_param(fuzz_iterations, uint, 0644);
  53MODULE_PARM_DESC(fuzz_iterations, "number of fuzz test iterations");
  54
  55DEFINE_PER_CPU(bool, crypto_simd_disabled_for_test);
  56EXPORT_PER_CPU_SYMBOL_GPL(crypto_simd_disabled_for_test);
  57#endif
  58
  59#ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
  60
  61/* a perfect nop */
  62int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
  63{
  64	return 0;
  65}
  66
  67#else
  68
  69#include "testmgr.h"
  70
  71/*
  72 * Need slab memory for testing (size in number of pages).
  73 */
  74#define XBUFSIZE	8
  75
  76/*
 
 
 
 
 
 
 
 
 
 
 
 
  77* Used by test_cipher()
  78*/
  79#define ENCRYPT 1
  80#define DECRYPT 0
  81
 
 
 
 
 
  82struct aead_test_suite {
  83	const struct aead_testvec *vecs;
  84	unsigned int count;
 
 
  85};
  86
  87struct cipher_test_suite {
  88	const struct cipher_testvec *vecs;
  89	unsigned int count;
 
 
  90};
  91
  92struct comp_test_suite {
  93	struct {
  94		const struct comp_testvec *vecs;
  95		unsigned int count;
  96	} comp, decomp;
  97};
  98
  99struct hash_test_suite {
 100	const struct hash_testvec *vecs;
 101	unsigned int count;
 102};
 103
 104struct cprng_test_suite {
 105	const struct cprng_testvec *vecs;
 106	unsigned int count;
 107};
 108
 109struct drbg_test_suite {
 110	const struct drbg_testvec *vecs;
 111	unsigned int count;
 112};
 113
 114struct akcipher_test_suite {
 115	const struct akcipher_testvec *vecs;
 116	unsigned int count;
 117};
 118
 119struct kpp_test_suite {
 120	const struct kpp_testvec *vecs;
 121	unsigned int count;
 122};
 123
 124struct alg_test_desc {
 125	const char *alg;
 126	const char *generic_driver;
 127	int (*test)(const struct alg_test_desc *desc, const char *driver,
 128		    u32 type, u32 mask);
 129	int fips_allowed;	/* set if alg is allowed in fips mode */
 130
 131	union {
 132		struct aead_test_suite aead;
 133		struct cipher_test_suite cipher;
 134		struct comp_test_suite comp;
 
 135		struct hash_test_suite hash;
 136		struct cprng_test_suite cprng;
 137		struct drbg_test_suite drbg;
 138		struct akcipher_test_suite akcipher;
 139		struct kpp_test_suite kpp;
 140	} suite;
 141};
 142
 
 
 143static void hexdump(unsigned char *buf, unsigned int len)
 144{
 145	print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
 146			16, 1,
 147			buf, len, false);
 148}
 149
 150static int __testmgr_alloc_buf(char *buf[XBUFSIZE], int order)
 
 
 
 
 
 
 
 
 
 
 
 151{
 152	int i;
 153
 154	for (i = 0; i < XBUFSIZE; i++) {
 155		buf[i] = (char *)__get_free_pages(GFP_KERNEL, order);
 156		if (!buf[i])
 157			goto err_free_buf;
 158	}
 159
 160	return 0;
 161
 162err_free_buf:
 163	while (i-- > 0)
 164		free_pages((unsigned long)buf[i], order);
 165
 166	return -ENOMEM;
 167}
 168
 169static int testmgr_alloc_buf(char *buf[XBUFSIZE])
 170{
 171	return __testmgr_alloc_buf(buf, 0);
 172}
 173
 174static void __testmgr_free_buf(char *buf[XBUFSIZE], int order)
 175{
 176	int i;
 177
 178	for (i = 0; i < XBUFSIZE; i++)
 179		free_pages((unsigned long)buf[i], order);
 180}
 181
 182static void testmgr_free_buf(char *buf[XBUFSIZE])
 
 
 183{
 184	__testmgr_free_buf(buf, 0);
 185}
 186
 187#define TESTMGR_POISON_BYTE	0xfe
 188#define TESTMGR_POISON_LEN	16
 189
 190static inline void testmgr_poison(void *addr, size_t len)
 191{
 192	memset(addr, TESTMGR_POISON_BYTE, len);
 193}
 194
 195/* Is the memory region still fully poisoned? */
 196static inline bool testmgr_is_poison(const void *addr, size_t len)
 197{
 198	return memchr_inv(addr, TESTMGR_POISON_BYTE, len) == NULL;
 199}
 200
 201/* flush type for hash algorithms */
 202enum flush_type {
 203	/* merge with update of previous buffer(s) */
 204	FLUSH_TYPE_NONE = 0,
 205
 206	/* update with previous buffer(s) before doing this one */
 207	FLUSH_TYPE_FLUSH,
 208
 209	/* likewise, but also export and re-import the intermediate state */
 210	FLUSH_TYPE_REIMPORT,
 211};
 212
 213/* finalization function for hash algorithms */
 214enum finalization_type {
 215	FINALIZATION_TYPE_FINAL,	/* use final() */
 216	FINALIZATION_TYPE_FINUP,	/* use finup() */
 217	FINALIZATION_TYPE_DIGEST,	/* use digest() */
 218};
 219
 220#define TEST_SG_TOTAL	10000
 221
 222/**
 223 * struct test_sg_division - description of a scatterlist entry
 224 *
 225 * This struct describes one entry of a scatterlist being constructed to check a
 226 * crypto test vector.
 227 *
 228 * @proportion_of_total: length of this chunk relative to the total length,
 229 *			 given as a proportion out of TEST_SG_TOTAL so that it
 230 *			 scales to fit any test vector
 231 * @offset: byte offset into a 2-page buffer at which this chunk will start
 232 * @offset_relative_to_alignmask: if true, add the algorithm's alignmask to the
 233 *				  @offset
 234 * @flush_type: for hashes, whether an update() should be done now vs.
 235 *		continuing to accumulate data
 236 * @nosimd: if doing the pending update(), do it with SIMD disabled?
 237 */
 238struct test_sg_division {
 239	unsigned int proportion_of_total;
 240	unsigned int offset;
 241	bool offset_relative_to_alignmask;
 242	enum flush_type flush_type;
 243	bool nosimd;
 244};
 245
 246/**
 247 * struct testvec_config - configuration for testing a crypto test vector
 248 *
 249 * This struct describes the data layout and other parameters with which each
 250 * crypto test vector can be tested.
 251 *
 252 * @name: name of this config, logged for debugging purposes if a test fails
 253 * @inplace: operate on the data in-place, if applicable for the algorithm type?
 254 * @req_flags: extra request_flags, e.g. CRYPTO_TFM_REQ_MAY_SLEEP
 255 * @src_divs: description of how to arrange the source scatterlist
 256 * @dst_divs: description of how to arrange the dst scatterlist, if applicable
 257 *	      for the algorithm type.  Defaults to @src_divs if unset.
 258 * @iv_offset: misalignment of the IV in the range [0..MAX_ALGAPI_ALIGNMASK+1],
 259 *	       where 0 is aligned to a 2*(MAX_ALGAPI_ALIGNMASK+1) byte boundary
 260 * @iv_offset_relative_to_alignmask: if true, add the algorithm's alignmask to
 261 *				     the @iv_offset
 262 * @finalization_type: what finalization function to use for hashes
 263 * @nosimd: execute with SIMD disabled?  Requires !CRYPTO_TFM_REQ_MAY_SLEEP.
 264 */
 265struct testvec_config {
 266	const char *name;
 267	bool inplace;
 268	u32 req_flags;
 269	struct test_sg_division src_divs[XBUFSIZE];
 270	struct test_sg_division dst_divs[XBUFSIZE];
 271	unsigned int iv_offset;
 272	bool iv_offset_relative_to_alignmask;
 273	enum finalization_type finalization_type;
 274	bool nosimd;
 275};
 276
 277#define TESTVEC_CONFIG_NAMELEN	192
 278
 279/*
 280 * The following are the lists of testvec_configs to test for each algorithm
 281 * type when the basic crypto self-tests are enabled, i.e. when
 282 * CONFIG_CRYPTO_MANAGER_DISABLE_TESTS is unset.  They aim to provide good test
 283 * coverage, while keeping the test time much shorter than the full fuzz tests
 284 * so that the basic tests can be enabled in a wider range of circumstances.
 285 */
 286
 287/* Configs for skciphers and aeads */
 288static const struct testvec_config default_cipher_testvec_configs[] = {
 289	{
 290		.name = "in-place",
 291		.inplace = true,
 292		.src_divs = { { .proportion_of_total = 10000 } },
 293	}, {
 294		.name = "out-of-place",
 295		.src_divs = { { .proportion_of_total = 10000 } },
 296	}, {
 297		.name = "unaligned buffer, offset=1",
 298		.src_divs = { { .proportion_of_total = 10000, .offset = 1 } },
 299		.iv_offset = 1,
 300	}, {
 301		.name = "buffer aligned only to alignmask",
 302		.src_divs = {
 303			{
 304				.proportion_of_total = 10000,
 305				.offset = 1,
 306				.offset_relative_to_alignmask = true,
 307			},
 308		},
 309		.iv_offset = 1,
 310		.iv_offset_relative_to_alignmask = true,
 311	}, {
 312		.name = "two even aligned splits",
 313		.src_divs = {
 314			{ .proportion_of_total = 5000 },
 315			{ .proportion_of_total = 5000 },
 316		},
 317	}, {
 318		.name = "uneven misaligned splits, may sleep",
 319		.req_flags = CRYPTO_TFM_REQ_MAY_SLEEP,
 320		.src_divs = {
 321			{ .proportion_of_total = 1900, .offset = 33 },
 322			{ .proportion_of_total = 3300, .offset = 7  },
 323			{ .proportion_of_total = 4800, .offset = 18 },
 324		},
 325		.iv_offset = 3,
 326	}, {
 327		.name = "misaligned splits crossing pages, inplace",
 328		.inplace = true,
 329		.src_divs = {
 330			{
 331				.proportion_of_total = 7500,
 332				.offset = PAGE_SIZE - 32
 333			}, {
 334				.proportion_of_total = 2500,
 335				.offset = PAGE_SIZE - 7
 336			},
 337		},
 338	}
 339};
 340
 341static const struct testvec_config default_hash_testvec_configs[] = {
 342	{
 343		.name = "init+update+final aligned buffer",
 344		.src_divs = { { .proportion_of_total = 10000 } },
 345		.finalization_type = FINALIZATION_TYPE_FINAL,
 346	}, {
 347		.name = "init+finup aligned buffer",
 348		.src_divs = { { .proportion_of_total = 10000 } },
 349		.finalization_type = FINALIZATION_TYPE_FINUP,
 350	}, {
 351		.name = "digest aligned buffer",
 352		.src_divs = { { .proportion_of_total = 10000 } },
 353		.finalization_type = FINALIZATION_TYPE_DIGEST,
 354	}, {
 355		.name = "init+update+final misaligned buffer",
 356		.src_divs = { { .proportion_of_total = 10000, .offset = 1 } },
 357		.finalization_type = FINALIZATION_TYPE_FINAL,
 358	}, {
 359		.name = "digest buffer aligned only to alignmask",
 360		.src_divs = {
 361			{
 362				.proportion_of_total = 10000,
 363				.offset = 1,
 364				.offset_relative_to_alignmask = true,
 365			},
 366		},
 367		.finalization_type = FINALIZATION_TYPE_DIGEST,
 368	}, {
 369		.name = "init+update+update+final two even splits",
 370		.src_divs = {
 371			{ .proportion_of_total = 5000 },
 372			{
 373				.proportion_of_total = 5000,
 374				.flush_type = FLUSH_TYPE_FLUSH,
 375			},
 376		},
 377		.finalization_type = FINALIZATION_TYPE_FINAL,
 378	}, {
 379		.name = "digest uneven misaligned splits, may sleep",
 380		.req_flags = CRYPTO_TFM_REQ_MAY_SLEEP,
 381		.src_divs = {
 382			{ .proportion_of_total = 1900, .offset = 33 },
 383			{ .proportion_of_total = 3300, .offset = 7  },
 384			{ .proportion_of_total = 4800, .offset = 18 },
 385		},
 386		.finalization_type = FINALIZATION_TYPE_DIGEST,
 387	}, {
 388		.name = "digest misaligned splits crossing pages",
 389		.src_divs = {
 390			{
 391				.proportion_of_total = 7500,
 392				.offset = PAGE_SIZE - 32,
 393			}, {
 394				.proportion_of_total = 2500,
 395				.offset = PAGE_SIZE - 7,
 396			},
 397		},
 398		.finalization_type = FINALIZATION_TYPE_DIGEST,
 399	}, {
 400		.name = "import/export",
 401		.src_divs = {
 402			{
 403				.proportion_of_total = 6500,
 404				.flush_type = FLUSH_TYPE_REIMPORT,
 405			}, {
 406				.proportion_of_total = 3500,
 407				.flush_type = FLUSH_TYPE_REIMPORT,
 408			},
 409		},
 410		.finalization_type = FINALIZATION_TYPE_FINAL,
 411	}
 412};
 413
 414static unsigned int count_test_sg_divisions(const struct test_sg_division *divs)
 415{
 416	unsigned int remaining = TEST_SG_TOTAL;
 417	unsigned int ndivs = 0;
 418
 419	do {
 420		remaining -= divs[ndivs++].proportion_of_total;
 421	} while (remaining);
 422
 423	return ndivs;
 424}
 425
 426#define SGDIVS_HAVE_FLUSHES	BIT(0)
 427#define SGDIVS_HAVE_NOSIMD	BIT(1)
 428
 429static bool valid_sg_divisions(const struct test_sg_division *divs,
 430			       unsigned int count, int *flags_ret)
 431{
 432	unsigned int total = 0;
 433	unsigned int i;
 434
 435	for (i = 0; i < count && total != TEST_SG_TOTAL; i++) {
 436		if (divs[i].proportion_of_total <= 0 ||
 437		    divs[i].proportion_of_total > TEST_SG_TOTAL - total)
 438			return false;
 439		total += divs[i].proportion_of_total;
 440		if (divs[i].flush_type != FLUSH_TYPE_NONE)
 441			*flags_ret |= SGDIVS_HAVE_FLUSHES;
 442		if (divs[i].nosimd)
 443			*flags_ret |= SGDIVS_HAVE_NOSIMD;
 444	}
 445	return total == TEST_SG_TOTAL &&
 446		memchr_inv(&divs[i], 0, (count - i) * sizeof(divs[0])) == NULL;
 447}
 448
 449/*
 450 * Check whether the given testvec_config is valid.  This isn't strictly needed
 451 * since every testvec_config should be valid, but check anyway so that people
 452 * don't unknowingly add broken configs that don't do what they wanted.
 453 */
 454static bool valid_testvec_config(const struct testvec_config *cfg)
 455{
 456	int flags = 0;
 457
 458	if (cfg->name == NULL)
 459		return false;
 460
 461	if (!valid_sg_divisions(cfg->src_divs, ARRAY_SIZE(cfg->src_divs),
 462				&flags))
 463		return false;
 464
 465	if (cfg->dst_divs[0].proportion_of_total) {
 466		if (!valid_sg_divisions(cfg->dst_divs,
 467					ARRAY_SIZE(cfg->dst_divs), &flags))
 468			return false;
 469	} else {
 470		if (memchr_inv(cfg->dst_divs, 0, sizeof(cfg->dst_divs)))
 471			return false;
 472		/* defaults to dst_divs=src_divs */
 473	}
 
 
 474
 475	if (cfg->iv_offset +
 476	    (cfg->iv_offset_relative_to_alignmask ? MAX_ALGAPI_ALIGNMASK : 0) >
 477	    MAX_ALGAPI_ALIGNMASK + 1)
 478		return false;
 479
 480	if ((flags & (SGDIVS_HAVE_FLUSHES | SGDIVS_HAVE_NOSIMD)) &&
 481	    cfg->finalization_type == FINALIZATION_TYPE_DIGEST)
 482		return false;
 483
 484	if ((cfg->nosimd || (flags & SGDIVS_HAVE_NOSIMD)) &&
 485	    (cfg->req_flags & CRYPTO_TFM_REQ_MAY_SLEEP))
 486		return false;
 487
 488	return true;
 489}
 490
 491struct test_sglist {
 492	char *bufs[XBUFSIZE];
 493	struct scatterlist sgl[XBUFSIZE];
 494	struct scatterlist sgl_saved[XBUFSIZE];
 495	struct scatterlist *sgl_ptr;
 496	unsigned int nents;
 497};
 498
 499static int init_test_sglist(struct test_sglist *tsgl)
 500{
 501	return __testmgr_alloc_buf(tsgl->bufs, 1 /* two pages per buffer */);
 502}
 503
 504static void destroy_test_sglist(struct test_sglist *tsgl)
 505{
 506	return __testmgr_free_buf(tsgl->bufs, 1 /* two pages per buffer */);
 507}
 508
 509/**
 510 * build_test_sglist() - build a scatterlist for a crypto test
 511 *
 512 * @tsgl: the scatterlist to build.  @tsgl->bufs[] contains an array of 2-page
 513 *	  buffers which the scatterlist @tsgl->sgl[] will be made to point into.
 514 * @divs: the layout specification on which the scatterlist will be based
 515 * @alignmask: the algorithm's alignmask
 516 * @total_len: the total length of the scatterlist to build in bytes
 517 * @data: if non-NULL, the buffers will be filled with this data until it ends.
 518 *	  Otherwise the buffers will be poisoned.  In both cases, some bytes
 519 *	  past the end of each buffer will be poisoned to help detect overruns.
 520 * @out_divs: if non-NULL, the test_sg_division to which each scatterlist entry
 521 *	      corresponds will be returned here.  This will match @divs except
 522 *	      that divisions resolving to a length of 0 are omitted as they are
 523 *	      not included in the scatterlist.
 524 *
 525 * Return: 0 or a -errno value
 526 */
 527static int build_test_sglist(struct test_sglist *tsgl,
 528			     const struct test_sg_division *divs,
 529			     const unsigned int alignmask,
 530			     const unsigned int total_len,
 531			     struct iov_iter *data,
 532			     const struct test_sg_division *out_divs[XBUFSIZE])
 533{
 534	struct {
 535		const struct test_sg_division *div;
 536		size_t length;
 537	} partitions[XBUFSIZE];
 538	const unsigned int ndivs = count_test_sg_divisions(divs);
 539	unsigned int len_remaining = total_len;
 540	unsigned int i;
 541
 542	BUILD_BUG_ON(ARRAY_SIZE(partitions) != ARRAY_SIZE(tsgl->sgl));
 543	if (WARN_ON(ndivs > ARRAY_SIZE(partitions)))
 544		return -EINVAL;
 545
 546	/* Calculate the (div, length) pairs */
 547	tsgl->nents = 0;
 548	for (i = 0; i < ndivs; i++) {
 549		unsigned int len_this_sg =
 550			min(len_remaining,
 551			    (total_len * divs[i].proportion_of_total +
 552			     TEST_SG_TOTAL / 2) / TEST_SG_TOTAL);
 553
 554		if (len_this_sg != 0) {
 555			partitions[tsgl->nents].div = &divs[i];
 556			partitions[tsgl->nents].length = len_this_sg;
 557			tsgl->nents++;
 558			len_remaining -= len_this_sg;
 559		}
 560	}
 561	if (tsgl->nents == 0) {
 562		partitions[tsgl->nents].div = &divs[0];
 563		partitions[tsgl->nents].length = 0;
 564		tsgl->nents++;
 565	}
 566	partitions[tsgl->nents - 1].length += len_remaining;
 567
 568	/* Set up the sgl entries and fill the data or poison */
 569	sg_init_table(tsgl->sgl, tsgl->nents);
 570	for (i = 0; i < tsgl->nents; i++) {
 571		unsigned int offset = partitions[i].div->offset;
 572		void *addr;
 573
 574		if (partitions[i].div->offset_relative_to_alignmask)
 575			offset += alignmask;
 576
 577		while (offset + partitions[i].length + TESTMGR_POISON_LEN >
 578		       2 * PAGE_SIZE) {
 579			if (WARN_ON(offset <= 0))
 580				return -EINVAL;
 581			offset /= 2;
 582		}
 583
 584		addr = &tsgl->bufs[i][offset];
 585		sg_set_buf(&tsgl->sgl[i], addr, partitions[i].length);
 586
 587		if (out_divs)
 588			out_divs[i] = partitions[i].div;
 589
 590		if (data) {
 591			size_t copy_len, copied;
 592
 593			copy_len = min(partitions[i].length, data->count);
 594			copied = copy_from_iter(addr, copy_len, data);
 595			if (WARN_ON(copied != copy_len))
 596				return -EINVAL;
 597			testmgr_poison(addr + copy_len, partitions[i].length +
 598				       TESTMGR_POISON_LEN - copy_len);
 599		} else {
 600			testmgr_poison(addr, partitions[i].length +
 601				       TESTMGR_POISON_LEN);
 602		}
 603	}
 604
 605	sg_mark_end(&tsgl->sgl[tsgl->nents - 1]);
 606	tsgl->sgl_ptr = tsgl->sgl;
 607	memcpy(tsgl->sgl_saved, tsgl->sgl, tsgl->nents * sizeof(tsgl->sgl[0]));
 608	return 0;
 609}
 610
 611/*
 612 * Verify that a scatterlist crypto operation produced the correct output.
 613 *
 614 * @tsgl: scatterlist containing the actual output
 615 * @expected_output: buffer containing the expected output
 616 * @len_to_check: length of @expected_output in bytes
 617 * @unchecked_prefix_len: number of ignored bytes in @tsgl prior to real result
 618 * @check_poison: verify that the poison bytes after each chunk are intact?
 619 *
 620 * Return: 0 if correct, -EINVAL if incorrect, -EOVERFLOW if buffer overrun.
 621 */
 622static int verify_correct_output(const struct test_sglist *tsgl,
 623				 const char *expected_output,
 624				 unsigned int len_to_check,
 625				 unsigned int unchecked_prefix_len,
 626				 bool check_poison)
 627{
 628	unsigned int i;
 629
 630	for (i = 0; i < tsgl->nents; i++) {
 631		struct scatterlist *sg = &tsgl->sgl_ptr[i];
 632		unsigned int len = sg->length;
 633		unsigned int offset = sg->offset;
 634		const char *actual_output;
 635
 636		if (unchecked_prefix_len) {
 637			if (unchecked_prefix_len >= len) {
 638				unchecked_prefix_len -= len;
 639				continue;
 640			}
 641			offset += unchecked_prefix_len;
 642			len -= unchecked_prefix_len;
 643			unchecked_prefix_len = 0;
 644		}
 645		len = min(len, len_to_check);
 646		actual_output = page_address(sg_page(sg)) + offset;
 647		if (memcmp(expected_output, actual_output, len) != 0)
 648			return -EINVAL;
 649		if (check_poison &&
 650		    !testmgr_is_poison(actual_output + len, TESTMGR_POISON_LEN))
 651			return -EOVERFLOW;
 652		len_to_check -= len;
 653		expected_output += len;
 654	}
 655	if (WARN_ON(len_to_check != 0))
 656		return -EINVAL;
 657	return 0;
 658}
 659
 660static bool is_test_sglist_corrupted(const struct test_sglist *tsgl)
 661{
 662	unsigned int i;
 663
 664	for (i = 0; i < tsgl->nents; i++) {
 665		if (tsgl->sgl[i].page_link != tsgl->sgl_saved[i].page_link)
 666			return true;
 667		if (tsgl->sgl[i].offset != tsgl->sgl_saved[i].offset)
 668			return true;
 669		if (tsgl->sgl[i].length != tsgl->sgl_saved[i].length)
 670			return true;
 671	}
 672	return false;
 673}
 674
 675struct cipher_test_sglists {
 676	struct test_sglist src;
 677	struct test_sglist dst;
 678};
 679
 680static struct cipher_test_sglists *alloc_cipher_test_sglists(void)
 681{
 682	struct cipher_test_sglists *tsgls;
 683
 684	tsgls = kmalloc(sizeof(*tsgls), GFP_KERNEL);
 685	if (!tsgls)
 686		return NULL;
 687
 688	if (init_test_sglist(&tsgls->src) != 0)
 689		goto fail_kfree;
 690	if (init_test_sglist(&tsgls->dst) != 0)
 691		goto fail_destroy_src;
 692
 693	return tsgls;
 694
 695fail_destroy_src:
 696	destroy_test_sglist(&tsgls->src);
 697fail_kfree:
 698	kfree(tsgls);
 699	return NULL;
 700}
 701
 702static void free_cipher_test_sglists(struct cipher_test_sglists *tsgls)
 703{
 704	if (tsgls) {
 705		destroy_test_sglist(&tsgls->src);
 706		destroy_test_sglist(&tsgls->dst);
 707		kfree(tsgls);
 708	}
 709}
 710
 711/* Build the src and dst scatterlists for an skcipher or AEAD test */
 712static int build_cipher_test_sglists(struct cipher_test_sglists *tsgls,
 713				     const struct testvec_config *cfg,
 714				     unsigned int alignmask,
 715				     unsigned int src_total_len,
 716				     unsigned int dst_total_len,
 717				     const struct kvec *inputs,
 718				     unsigned int nr_inputs)
 719{
 720	struct iov_iter input;
 721	int err;
 722
 723	iov_iter_kvec(&input, WRITE, inputs, nr_inputs, src_total_len);
 724	err = build_test_sglist(&tsgls->src, cfg->src_divs, alignmask,
 725				cfg->inplace ?
 726					max(dst_total_len, src_total_len) :
 727					src_total_len,
 728				&input, NULL);
 729	if (err)
 730		return err;
 731
 732	if (cfg->inplace) {
 733		tsgls->dst.sgl_ptr = tsgls->src.sgl;
 734		tsgls->dst.nents = tsgls->src.nents;
 735		return 0;
 736	}
 737	return build_test_sglist(&tsgls->dst,
 738				 cfg->dst_divs[0].proportion_of_total ?
 739					cfg->dst_divs : cfg->src_divs,
 740				 alignmask, dst_total_len, NULL, NULL);
 741}
 742
 743#ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
 744
 745/* Generate a random length in range [0, max_len], but prefer smaller values */
 746static unsigned int generate_random_length(unsigned int max_len)
 747{
 748	unsigned int len = prandom_u32() % (max_len + 1);
 749
 750	switch (prandom_u32() % 4) {
 751	case 0:
 752		return len % 64;
 753	case 1:
 754		return len % 256;
 755	case 2:
 756		return len % 1024;
 757	default:
 758		return len;
 759	}
 760}
 761
 762/* Sometimes make some random changes to the given data buffer */
 763static void mutate_buffer(u8 *buf, size_t count)
 764{
 765	size_t num_flips;
 766	size_t i;
 767	size_t pos;
 768
 769	/* Sometimes flip some bits */
 770	if (prandom_u32() % 4 == 0) {
 771		num_flips = min_t(size_t, 1 << (prandom_u32() % 8), count * 8);
 772		for (i = 0; i < num_flips; i++) {
 773			pos = prandom_u32() % (count * 8);
 774			buf[pos / 8] ^= 1 << (pos % 8);
 775		}
 776	}
 777
 778	/* Sometimes flip some bytes */
 779	if (prandom_u32() % 4 == 0) {
 780		num_flips = min_t(size_t, 1 << (prandom_u32() % 8), count);
 781		for (i = 0; i < num_flips; i++)
 782			buf[prandom_u32() % count] ^= 0xff;
 
 
 
 783	}
 784}
 785
 786/* Randomly generate 'count' bytes, but sometimes make them "interesting" */
 787static void generate_random_bytes(u8 *buf, size_t count)
 788{
 789	u8 b;
 790	u8 increment;
 791	size_t i;
 792
 793	if (count == 0)
 794		return;
 795
 796	switch (prandom_u32() % 8) { /* Choose a generation strategy */
 797	case 0:
 798	case 1:
 799		/* All the same byte, plus optional mutations */
 800		switch (prandom_u32() % 4) {
 801		case 0:
 802			b = 0x00;
 803			break;
 804		case 1:
 805			b = 0xff;
 806			break;
 807		default:
 808			b = (u8)prandom_u32();
 809			break;
 810		}
 811		memset(buf, b, count);
 812		mutate_buffer(buf, count);
 813		break;
 814	case 2:
 815		/* Ascending or descending bytes, plus optional mutations */
 816		increment = (u8)prandom_u32();
 817		b = (u8)prandom_u32();
 818		for (i = 0; i < count; i++, b += increment)
 819			buf[i] = b;
 820		mutate_buffer(buf, count);
 821		break;
 822	default:
 823		/* Fully random bytes */
 824		for (i = 0; i < count; i++)
 825			buf[i] = (u8)prandom_u32();
 826	}
 827}
 828
 829static char *generate_random_sgl_divisions(struct test_sg_division *divs,
 830					   size_t max_divs, char *p, char *end,
 831					   bool gen_flushes, u32 req_flags)
 832{
 833	struct test_sg_division *div = divs;
 834	unsigned int remaining = TEST_SG_TOTAL;
 835
 836	do {
 837		unsigned int this_len;
 838		const char *flushtype_str;
 
 
 
 
 
 
 
 
 
 
 
 
 839
 840		if (div == &divs[max_divs - 1] || prandom_u32() % 2 == 0)
 841			this_len = remaining;
 842		else
 843			this_len = 1 + (prandom_u32() % remaining);
 844		div->proportion_of_total = this_len;
 
 
 
 
 
 
 
 
 845
 846		if (prandom_u32() % 4 == 0)
 847			div->offset = (PAGE_SIZE - 128) + (prandom_u32() % 128);
 848		else if (prandom_u32() % 2 == 0)
 849			div->offset = prandom_u32() % 32;
 850		else
 851			div->offset = prandom_u32() % PAGE_SIZE;
 852		if (prandom_u32() % 8 == 0)
 853			div->offset_relative_to_alignmask = true;
 854
 855		div->flush_type = FLUSH_TYPE_NONE;
 856		if (gen_flushes) {
 857			switch (prandom_u32() % 4) {
 858			case 0:
 859				div->flush_type = FLUSH_TYPE_REIMPORT;
 860				break;
 861			case 1:
 862				div->flush_type = FLUSH_TYPE_FLUSH;
 863				break;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 864			}
 865		}
 866
 867		if (div->flush_type != FLUSH_TYPE_NONE &&
 868		    !(req_flags & CRYPTO_TFM_REQ_MAY_SLEEP) &&
 869		    prandom_u32() % 2 == 0)
 870			div->nosimd = true;
 871
 872		switch (div->flush_type) {
 873		case FLUSH_TYPE_FLUSH:
 874			if (div->nosimd)
 875				flushtype_str = "<flush,nosimd>";
 876			else
 877				flushtype_str = "<flush>";
 878			break;
 879		case FLUSH_TYPE_REIMPORT:
 880			if (div->nosimd)
 881				flushtype_str = "<reimport,nosimd>";
 882			else
 883				flushtype_str = "<reimport>";
 884			break;
 885		default:
 886			flushtype_str = "";
 887			break;
 888		}
 889
 890		BUILD_BUG_ON(TEST_SG_TOTAL != 10000); /* for "%u.%u%%" */
 891		p += scnprintf(p, end - p, "%s%u.%u%%@%s+%u%s", flushtype_str,
 892			       this_len / 100, this_len % 100,
 893			       div->offset_relative_to_alignmask ?
 894					"alignmask" : "",
 895			       div->offset, this_len == remaining ? "" : ", ");
 896		remaining -= this_len;
 897		div++;
 898	} while (remaining);
 899
 900	return p;
 901}
 902
 903/* Generate a random testvec_config for fuzz testing */
 904static void generate_random_testvec_config(struct testvec_config *cfg,
 905					   char *name, size_t max_namelen)
 906{
 907	char *p = name;
 908	char * const end = name + max_namelen;
 909
 910	memset(cfg, 0, sizeof(*cfg));
 911
 912	cfg->name = name;
 913
 914	p += scnprintf(p, end - p, "random:");
 915
 916	if (prandom_u32() % 2 == 0) {
 917		cfg->inplace = true;
 918		p += scnprintf(p, end - p, " inplace");
 919	}
 920
 921	if (prandom_u32() % 2 == 0) {
 922		cfg->req_flags |= CRYPTO_TFM_REQ_MAY_SLEEP;
 923		p += scnprintf(p, end - p, " may_sleep");
 924	}
 925
 926	switch (prandom_u32() % 4) {
 927	case 0:
 928		cfg->finalization_type = FINALIZATION_TYPE_FINAL;
 929		p += scnprintf(p, end - p, " use_final");
 930		break;
 931	case 1:
 932		cfg->finalization_type = FINALIZATION_TYPE_FINUP;
 933		p += scnprintf(p, end - p, " use_finup");
 934		break;
 935	default:
 936		cfg->finalization_type = FINALIZATION_TYPE_DIGEST;
 937		p += scnprintf(p, end - p, " use_digest");
 938		break;
 939	}
 940
 941	if (!(cfg->req_flags & CRYPTO_TFM_REQ_MAY_SLEEP) &&
 942	    prandom_u32() % 2 == 0) {
 943		cfg->nosimd = true;
 944		p += scnprintf(p, end - p, " nosimd");
 945	}
 946
 947	p += scnprintf(p, end - p, " src_divs=[");
 948	p = generate_random_sgl_divisions(cfg->src_divs,
 949					  ARRAY_SIZE(cfg->src_divs), p, end,
 950					  (cfg->finalization_type !=
 951					   FINALIZATION_TYPE_DIGEST),
 952					  cfg->req_flags);
 953	p += scnprintf(p, end - p, "]");
 954
 955	if (!cfg->inplace && prandom_u32() % 2 == 0) {
 956		p += scnprintf(p, end - p, " dst_divs=[");
 957		p = generate_random_sgl_divisions(cfg->dst_divs,
 958						  ARRAY_SIZE(cfg->dst_divs),
 959						  p, end, false,
 960						  cfg->req_flags);
 961		p += scnprintf(p, end - p, "]");
 962	}
 963
 964	if (prandom_u32() % 2 == 0) {
 965		cfg->iv_offset = 1 + (prandom_u32() % MAX_ALGAPI_ALIGNMASK);
 966		p += scnprintf(p, end - p, " iv_offset=%u", cfg->iv_offset);
 967	}
 968
 969	WARN_ON_ONCE(!valid_testvec_config(cfg));
 970}
 971
 972static void crypto_disable_simd_for_test(void)
 973{
 974	preempt_disable();
 975	__this_cpu_write(crypto_simd_disabled_for_test, true);
 976}
 977
 978static void crypto_reenable_simd_for_test(void)
 979{
 980	__this_cpu_write(crypto_simd_disabled_for_test, false);
 981	preempt_enable();
 982}
 983
 984/*
 985 * Given an algorithm name, build the name of the generic implementation of that
 986 * algorithm, assuming the usual naming convention.  Specifically, this appends
 987 * "-generic" to every part of the name that is not a template name.  Examples:
 988 *
 989 *	aes => aes-generic
 990 *	cbc(aes) => cbc(aes-generic)
 991 *	cts(cbc(aes)) => cts(cbc(aes-generic))
 992 *	rfc7539(chacha20,poly1305) => rfc7539(chacha20-generic,poly1305-generic)
 993 *
 994 * Return: 0 on success, or -ENAMETOOLONG if the generic name would be too long
 995 */
 996static int build_generic_driver_name(const char *algname,
 997				     char driver_name[CRYPTO_MAX_ALG_NAME])
 998{
 999	const char *in = algname;
1000	char *out = driver_name;
1001	size_t len = strlen(algname);
1002
1003	if (len >= CRYPTO_MAX_ALG_NAME)
1004		goto too_long;
1005	do {
1006		const char *in_saved = in;
1007
1008		while (*in && *in != '(' && *in != ')' && *in != ',')
1009			*out++ = *in++;
1010		if (*in != '(' && in > in_saved) {
1011			len += 8;
1012			if (len >= CRYPTO_MAX_ALG_NAME)
1013				goto too_long;
1014			memcpy(out, "-generic", 8);
1015			out += 8;
1016		}
1017	} while ((*out++ = *in++) != '\0');
1018	return 0;
1019
1020too_long:
1021	pr_err("alg: generic driver name for \"%s\" would be too long\n",
1022	       algname);
1023	return -ENAMETOOLONG;
1024}
1025#else /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
1026static void crypto_disable_simd_for_test(void)
1027{
1028}
1029
1030static void crypto_reenable_simd_for_test(void)
1031{
1032}
1033#endif /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
1034
1035static int build_hash_sglist(struct test_sglist *tsgl,
1036			     const struct hash_testvec *vec,
1037			     const struct testvec_config *cfg,
1038			     unsigned int alignmask,
1039			     const struct test_sg_division *divs[XBUFSIZE])
1040{
1041	struct kvec kv;
1042	struct iov_iter input;
1043
1044	kv.iov_base = (void *)vec->plaintext;
1045	kv.iov_len = vec->psize;
1046	iov_iter_kvec(&input, WRITE, &kv, 1, vec->psize);
1047	return build_test_sglist(tsgl, cfg->src_divs, alignmask, vec->psize,
1048				 &input, divs);
1049}
1050
1051static int check_hash_result(const char *type,
1052			     const u8 *result, unsigned int digestsize,
1053			     const struct hash_testvec *vec,
1054			     const char *vec_name,
1055			     const char *driver,
1056			     const struct testvec_config *cfg)
1057{
1058	if (memcmp(result, vec->digest, digestsize) != 0) {
1059		pr_err("alg: %s: %s test failed (wrong result) on test vector %s, cfg=\"%s\"\n",
1060		       type, driver, vec_name, cfg->name);
1061		return -EINVAL;
1062	}
1063	if (!testmgr_is_poison(&result[digestsize], TESTMGR_POISON_LEN)) {
1064		pr_err("alg: %s: %s overran result buffer on test vector %s, cfg=\"%s\"\n",
1065		       type, driver, vec_name, cfg->name);
1066		return -EOVERFLOW;
1067	}
1068	return 0;
1069}
1070
1071static inline int check_shash_op(const char *op, int err,
1072				 const char *driver, const char *vec_name,
1073				 const struct testvec_config *cfg)
1074{
1075	if (err)
1076		pr_err("alg: shash: %s %s() failed with err %d on test vector %s, cfg=\"%s\"\n",
1077		       driver, op, err, vec_name, cfg->name);
1078	return err;
1079}
1080
1081static inline const void *sg_data(struct scatterlist *sg)
1082{
1083	return page_address(sg_page(sg)) + sg->offset;
1084}
1085
1086/* Test one hash test vector in one configuration, using the shash API */
1087static int test_shash_vec_cfg(const char *driver,
1088			      const struct hash_testvec *vec,
1089			      const char *vec_name,
1090			      const struct testvec_config *cfg,
1091			      struct shash_desc *desc,
1092			      struct test_sglist *tsgl,
1093			      u8 *hashstate)
1094{
1095	struct crypto_shash *tfm = desc->tfm;
1096	const unsigned int alignmask = crypto_shash_alignmask(tfm);
1097	const unsigned int digestsize = crypto_shash_digestsize(tfm);
1098	const unsigned int statesize = crypto_shash_statesize(tfm);
1099	const struct test_sg_division *divs[XBUFSIZE];
1100	unsigned int i;
1101	u8 result[HASH_MAX_DIGESTSIZE + TESTMGR_POISON_LEN];
1102	int err;
1103
1104	/* Set the key, if specified */
1105	if (vec->ksize) {
1106		err = crypto_shash_setkey(tfm, vec->key, vec->ksize);
1107		if (err) {
1108			if (err == vec->setkey_error)
1109				return 0;
1110			pr_err("alg: shash: %s setkey failed on test vector %s; expected_error=%d, actual_error=%d, flags=%#x\n",
1111			       driver, vec_name, vec->setkey_error, err,
1112			       crypto_shash_get_flags(tfm));
1113			return err;
1114		}
1115		if (vec->setkey_error) {
1116			pr_err("alg: shash: %s setkey unexpectedly succeeded on test vector %s; expected_error=%d\n",
1117			       driver, vec_name, vec->setkey_error);
1118			return -EINVAL;
1119		}
1120	}
1121
1122	/* Build the scatterlist for the source data */
1123	err = build_hash_sglist(tsgl, vec, cfg, alignmask, divs);
1124	if (err) {
1125		pr_err("alg: shash: %s: error preparing scatterlist for test vector %s, cfg=\"%s\"\n",
1126		       driver, vec_name, cfg->name);
1127		return err;
1128	}
1129
1130	/* Do the actual hashing */
1131
1132	testmgr_poison(desc->__ctx, crypto_shash_descsize(tfm));
1133	testmgr_poison(result, digestsize + TESTMGR_POISON_LEN);
1134
1135	if (cfg->finalization_type == FINALIZATION_TYPE_DIGEST ||
1136	    vec->digest_error) {
1137		/* Just using digest() */
1138		if (tsgl->nents != 1)
1139			return 0;
1140		if (cfg->nosimd)
1141			crypto_disable_simd_for_test();
1142		err = crypto_shash_digest(desc, sg_data(&tsgl->sgl[0]),
1143					  tsgl->sgl[0].length, result);
1144		if (cfg->nosimd)
1145			crypto_reenable_simd_for_test();
1146		if (err) {
1147			if (err == vec->digest_error)
1148				return 0;
1149			pr_err("alg: shash: %s digest() failed on test vector %s; expected_error=%d, actual_error=%d, cfg=\"%s\"\n",
1150			       driver, vec_name, vec->digest_error, err,
1151			       cfg->name);
1152			return err;
1153		}
1154		if (vec->digest_error) {
1155			pr_err("alg: shash: %s digest() unexpectedly succeeded on test vector %s; expected_error=%d, cfg=\"%s\"\n",
1156			       driver, vec_name, vec->digest_error, cfg->name);
1157			return -EINVAL;
1158		}
1159		goto result_ready;
1160	}
1161
1162	/* Using init(), zero or more update(), then final() or finup() */
1163
1164	if (cfg->nosimd)
1165		crypto_disable_simd_for_test();
1166	err = crypto_shash_init(desc);
1167	if (cfg->nosimd)
1168		crypto_reenable_simd_for_test();
1169	err = check_shash_op("init", err, driver, vec_name, cfg);
1170	if (err)
1171		return err;
1172
1173	for (i = 0; i < tsgl->nents; i++) {
1174		if (i + 1 == tsgl->nents &&
1175		    cfg->finalization_type == FINALIZATION_TYPE_FINUP) {
1176			if (divs[i]->nosimd)
1177				crypto_disable_simd_for_test();
1178			err = crypto_shash_finup(desc, sg_data(&tsgl->sgl[i]),
1179						 tsgl->sgl[i].length, result);
1180			if (divs[i]->nosimd)
1181				crypto_reenable_simd_for_test();
1182			err = check_shash_op("finup", err, driver, vec_name,
1183					     cfg);
1184			if (err)
1185				return err;
1186			goto result_ready;
1187		}
1188		if (divs[i]->nosimd)
1189			crypto_disable_simd_for_test();
1190		err = crypto_shash_update(desc, sg_data(&tsgl->sgl[i]),
1191					  tsgl->sgl[i].length);
1192		if (divs[i]->nosimd)
1193			crypto_reenable_simd_for_test();
1194		err = check_shash_op("update", err, driver, vec_name, cfg);
1195		if (err)
1196			return err;
1197		if (divs[i]->flush_type == FLUSH_TYPE_REIMPORT) {
1198			/* Test ->export() and ->import() */
1199			testmgr_poison(hashstate + statesize,
1200				       TESTMGR_POISON_LEN);
1201			err = crypto_shash_export(desc, hashstate);
1202			err = check_shash_op("export", err, driver, vec_name,
1203					     cfg);
1204			if (err)
1205				return err;
1206			if (!testmgr_is_poison(hashstate + statesize,
1207					       TESTMGR_POISON_LEN)) {
1208				pr_err("alg: shash: %s export() overran state buffer on test vector %s, cfg=\"%s\"\n",
1209				       driver, vec_name, cfg->name);
1210				return -EOVERFLOW;
1211			}
1212			testmgr_poison(desc->__ctx, crypto_shash_descsize(tfm));
1213			err = crypto_shash_import(desc, hashstate);
1214			err = check_shash_op("import", err, driver, vec_name,
1215					     cfg);
1216			if (err)
1217				return err;
1218		}
1219	}
1220
1221	if (cfg->nosimd)
1222		crypto_disable_simd_for_test();
1223	err = crypto_shash_final(desc, result);
1224	if (cfg->nosimd)
1225		crypto_reenable_simd_for_test();
1226	err = check_shash_op("final", err, driver, vec_name, cfg);
1227	if (err)
1228		return err;
1229result_ready:
1230	return check_hash_result("shash", result, digestsize, vec, vec_name,
1231				 driver, cfg);
1232}
1233
1234static int do_ahash_op(int (*op)(struct ahash_request *req),
1235		       struct ahash_request *req,
1236		       struct crypto_wait *wait, bool nosimd)
1237{
1238	int err;
1239
1240	if (nosimd)
1241		crypto_disable_simd_for_test();
1242
1243	err = op(req);
1244
1245	if (nosimd)
1246		crypto_reenable_simd_for_test();
1247
1248	return crypto_wait_req(err, wait);
1249}
1250
1251static int check_nonfinal_ahash_op(const char *op, int err,
1252				   u8 *result, unsigned int digestsize,
1253				   const char *driver, const char *vec_name,
1254				   const struct testvec_config *cfg)
1255{
1256	if (err) {
1257		pr_err("alg: ahash: %s %s() failed with err %d on test vector %s, cfg=\"%s\"\n",
1258		       driver, op, err, vec_name, cfg->name);
1259		return err;
1260	}
1261	if (!testmgr_is_poison(result, digestsize)) {
1262		pr_err("alg: ahash: %s %s() used result buffer on test vector %s, cfg=\"%s\"\n",
1263		       driver, op, vec_name, cfg->name);
1264		return -EINVAL;
1265	}
1266	return 0;
1267}
1268
1269/* Test one hash test vector in one configuration, using the ahash API */
1270static int test_ahash_vec_cfg(const char *driver,
1271			      const struct hash_testvec *vec,
1272			      const char *vec_name,
1273			      const struct testvec_config *cfg,
1274			      struct ahash_request *req,
1275			      struct test_sglist *tsgl,
1276			      u8 *hashstate)
1277{
1278	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
1279	const unsigned int alignmask = crypto_ahash_alignmask(tfm);
1280	const unsigned int digestsize = crypto_ahash_digestsize(tfm);
1281	const unsigned int statesize = crypto_ahash_statesize(tfm);
1282	const u32 req_flags = CRYPTO_TFM_REQ_MAY_BACKLOG | cfg->req_flags;
1283	const struct test_sg_division *divs[XBUFSIZE];
1284	DECLARE_CRYPTO_WAIT(wait);
1285	unsigned int i;
1286	struct scatterlist *pending_sgl;
1287	unsigned int pending_len;
1288	u8 result[HASH_MAX_DIGESTSIZE + TESTMGR_POISON_LEN];
1289	int err;
1290
1291	/* Set the key, if specified */
1292	if (vec->ksize) {
1293		err = crypto_ahash_setkey(tfm, vec->key, vec->ksize);
1294		if (err) {
1295			if (err == vec->setkey_error)
1296				return 0;
1297			pr_err("alg: ahash: %s setkey failed on test vector %s; expected_error=%d, actual_error=%d, flags=%#x\n",
1298			       driver, vec_name, vec->setkey_error, err,
1299			       crypto_ahash_get_flags(tfm));
1300			return err;
1301		}
1302		if (vec->setkey_error) {
1303			pr_err("alg: ahash: %s setkey unexpectedly succeeded on test vector %s; expected_error=%d\n",
1304			       driver, vec_name, vec->setkey_error);
1305			return -EINVAL;
1306		}
1307	}
1308
1309	/* Build the scatterlist for the source data */
1310	err = build_hash_sglist(tsgl, vec, cfg, alignmask, divs);
1311	if (err) {
1312		pr_err("alg: ahash: %s: error preparing scatterlist for test vector %s, cfg=\"%s\"\n",
1313		       driver, vec_name, cfg->name);
1314		return err;
1315	}
1316
1317	/* Do the actual hashing */
1318
1319	testmgr_poison(req->__ctx, crypto_ahash_reqsize(tfm));
1320	testmgr_poison(result, digestsize + TESTMGR_POISON_LEN);
1321
1322	if (cfg->finalization_type == FINALIZATION_TYPE_DIGEST ||
1323	    vec->digest_error) {
1324		/* Just using digest() */
1325		ahash_request_set_callback(req, req_flags, crypto_req_done,
1326					   &wait);
1327		ahash_request_set_crypt(req, tsgl->sgl, result, vec->psize);
1328		err = do_ahash_op(crypto_ahash_digest, req, &wait, cfg->nosimd);
1329		if (err) {
1330			if (err == vec->digest_error)
1331				return 0;
1332			pr_err("alg: ahash: %s digest() failed on test vector %s; expected_error=%d, actual_error=%d, cfg=\"%s\"\n",
1333			       driver, vec_name, vec->digest_error, err,
1334			       cfg->name);
1335			return err;
1336		}
1337		if (vec->digest_error) {
1338			pr_err("alg: ahash: %s digest() unexpectedly succeeded on test vector %s; expected_error=%d, cfg=\"%s\"\n",
1339			       driver, vec_name, vec->digest_error, cfg->name);
1340			return -EINVAL;
1341		}
1342		goto result_ready;
1343	}
1344
1345	/* Using init(), zero or more update(), then final() or finup() */
1346
1347	ahash_request_set_callback(req, req_flags, crypto_req_done, &wait);
1348	ahash_request_set_crypt(req, NULL, result, 0);
1349	err = do_ahash_op(crypto_ahash_init, req, &wait, cfg->nosimd);
1350	err = check_nonfinal_ahash_op("init", err, result, digestsize,
1351				      driver, vec_name, cfg);
1352	if (err)
1353		return err;
1354
1355	pending_sgl = NULL;
1356	pending_len = 0;
1357	for (i = 0; i < tsgl->nents; i++) {
1358		if (divs[i]->flush_type != FLUSH_TYPE_NONE &&
1359		    pending_sgl != NULL) {
1360			/* update() with the pending data */
1361			ahash_request_set_callback(req, req_flags,
1362						   crypto_req_done, &wait);
1363			ahash_request_set_crypt(req, pending_sgl, result,
1364						pending_len);
1365			err = do_ahash_op(crypto_ahash_update, req, &wait,
1366					  divs[i]->nosimd);
1367			err = check_nonfinal_ahash_op("update", err,
1368						      result, digestsize,
1369						      driver, vec_name, cfg);
1370			if (err)
1371				return err;
1372			pending_sgl = NULL;
1373			pending_len = 0;
1374		}
1375		if (divs[i]->flush_type == FLUSH_TYPE_REIMPORT) {
1376			/* Test ->export() and ->import() */
1377			testmgr_poison(hashstate + statesize,
1378				       TESTMGR_POISON_LEN);
1379			err = crypto_ahash_export(req, hashstate);
1380			err = check_nonfinal_ahash_op("export", err,
1381						      result, digestsize,
1382						      driver, vec_name, cfg);
1383			if (err)
1384				return err;
1385			if (!testmgr_is_poison(hashstate + statesize,
1386					       TESTMGR_POISON_LEN)) {
1387				pr_err("alg: ahash: %s export() overran state buffer on test vector %s, cfg=\"%s\"\n",
1388				       driver, vec_name, cfg->name);
1389				return -EOVERFLOW;
1390			}
1391
1392			testmgr_poison(req->__ctx, crypto_ahash_reqsize(tfm));
1393			err = crypto_ahash_import(req, hashstate);
1394			err = check_nonfinal_ahash_op("import", err,
1395						      result, digestsize,
1396						      driver, vec_name, cfg);
1397			if (err)
1398				return err;
1399		}
1400		if (pending_sgl == NULL)
1401			pending_sgl = &tsgl->sgl[i];
1402		pending_len += tsgl->sgl[i].length;
1403	}
1404
1405	ahash_request_set_callback(req, req_flags, crypto_req_done, &wait);
1406	ahash_request_set_crypt(req, pending_sgl, result, pending_len);
1407	if (cfg->finalization_type == FINALIZATION_TYPE_FINAL) {
1408		/* finish with update() and final() */
1409		err = do_ahash_op(crypto_ahash_update, req, &wait, cfg->nosimd);
1410		err = check_nonfinal_ahash_op("update", err, result, digestsize,
1411					      driver, vec_name, cfg);
1412		if (err)
1413			return err;
1414		err = do_ahash_op(crypto_ahash_final, req, &wait, cfg->nosimd);
1415		if (err) {
1416			pr_err("alg: ahash: %s final() failed with err %d on test vector %s, cfg=\"%s\"\n",
1417			       driver, err, vec_name, cfg->name);
1418			return err;
1419		}
1420	} else {
1421		/* finish with finup() */
1422		err = do_ahash_op(crypto_ahash_finup, req, &wait, cfg->nosimd);
1423		if (err) {
1424			pr_err("alg: ahash: %s finup() failed with err %d on test vector %s, cfg=\"%s\"\n",
1425			       driver, err, vec_name, cfg->name);
1426			return err;
1427		}
1428	}
1429
1430result_ready:
1431	return check_hash_result("ahash", result, digestsize, vec, vec_name,
1432				 driver, cfg);
 
 
 
1433}
1434
1435static int test_hash_vec_cfg(const char *driver,
1436			     const struct hash_testvec *vec,
1437			     const char *vec_name,
1438			     const struct testvec_config *cfg,
1439			     struct ahash_request *req,
1440			     struct shash_desc *desc,
1441			     struct test_sglist *tsgl,
1442			     u8 *hashstate)
1443{
1444	int err;
1445
1446	/*
1447	 * For algorithms implemented as "shash", most bugs will be detected by
1448	 * both the shash and ahash tests.  Test the shash API first so that the
1449	 * failures involve less indirection, so are easier to debug.
1450	 */
1451
1452	if (desc) {
1453		err = test_shash_vec_cfg(driver, vec, vec_name, cfg, desc, tsgl,
1454					 hashstate);
1455		if (err)
1456			return err;
1457	}
1458
1459	return test_ahash_vec_cfg(driver, vec, vec_name, cfg, req, tsgl,
1460				  hashstate);
1461}
1462
1463static int test_hash_vec(const char *driver, const struct hash_testvec *vec,
1464			 unsigned int vec_num, struct ahash_request *req,
1465			 struct shash_desc *desc, struct test_sglist *tsgl,
1466			 u8 *hashstate)
1467{
1468	char vec_name[16];
1469	unsigned int i;
1470	int err;
1471
1472	sprintf(vec_name, "%u", vec_num);
 
 
 
1473
1474	for (i = 0; i < ARRAY_SIZE(default_hash_testvec_configs); i++) {
1475		err = test_hash_vec_cfg(driver, vec, vec_name,
1476					&default_hash_testvec_configs[i],
1477					req, desc, tsgl, hashstate);
1478		if (err)
1479			return err;
 
1480	}
1481
1482#ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
1483	if (!noextratests) {
1484		struct testvec_config cfg;
1485		char cfgname[TESTVEC_CONFIG_NAMELEN];
1486
1487		for (i = 0; i < fuzz_iterations; i++) {
1488			generate_random_testvec_config(&cfg, cfgname,
1489						       sizeof(cfgname));
1490			err = test_hash_vec_cfg(driver, vec, vec_name, &cfg,
1491						req, desc, tsgl, hashstate);
1492			if (err)
1493				return err;
1494			cond_resched();
1495		}
1496	}
1497#endif
1498	return 0;
1499}
1500
1501#ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
1502/*
1503 * Generate a hash test vector from the given implementation.
1504 * Assumes the buffers in 'vec' were already allocated.
1505 */
1506static void generate_random_hash_testvec(struct shash_desc *desc,
1507					 struct hash_testvec *vec,
1508					 unsigned int maxkeysize,
1509					 unsigned int maxdatasize,
1510					 char *name, size_t max_namelen)
1511{
1512	/* Data */
1513	vec->psize = generate_random_length(maxdatasize);
1514	generate_random_bytes((u8 *)vec->plaintext, vec->psize);
1515
1516	/*
1517	 * Key: length in range [1, maxkeysize], but usually choose maxkeysize.
1518	 * If algorithm is unkeyed, then maxkeysize == 0 and set ksize = 0.
1519	 */
1520	vec->setkey_error = 0;
1521	vec->ksize = 0;
1522	if (maxkeysize) {
1523		vec->ksize = maxkeysize;
1524		if (prandom_u32() % 4 == 0)
1525			vec->ksize = 1 + (prandom_u32() % maxkeysize);
1526		generate_random_bytes((u8 *)vec->key, vec->ksize);
1527
1528		vec->setkey_error = crypto_shash_setkey(desc->tfm, vec->key,
1529							vec->ksize);
1530		/* If the key couldn't be set, no need to continue to digest. */
1531		if (vec->setkey_error)
1532			goto done;
1533	}
1534
1535	/* Digest */
1536	vec->digest_error = crypto_shash_digest(desc, vec->plaintext,
1537						vec->psize, (u8 *)vec->digest);
1538done:
1539	snprintf(name, max_namelen, "\"random: psize=%u ksize=%u\"",
1540		 vec->psize, vec->ksize);
1541}
1542
1543/*
1544 * Test the hash algorithm represented by @req against the corresponding generic
1545 * implementation, if one is available.
1546 */
1547static int test_hash_vs_generic_impl(const char *driver,
1548				     const char *generic_driver,
1549				     unsigned int maxkeysize,
1550				     struct ahash_request *req,
1551				     struct shash_desc *desc,
1552				     struct test_sglist *tsgl,
1553				     u8 *hashstate)
1554{
1555	struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
1556	const unsigned int digestsize = crypto_ahash_digestsize(tfm);
1557	const unsigned int blocksize = crypto_ahash_blocksize(tfm);
1558	const unsigned int maxdatasize = (2 * PAGE_SIZE) - TESTMGR_POISON_LEN;
1559	const char *algname = crypto_hash_alg_common(tfm)->base.cra_name;
1560	char _generic_driver[CRYPTO_MAX_ALG_NAME];
1561	struct crypto_shash *generic_tfm = NULL;
1562	struct shash_desc *generic_desc = NULL;
1563	unsigned int i;
1564	struct hash_testvec vec = { 0 };
1565	char vec_name[64];
1566	struct testvec_config *cfg;
1567	char cfgname[TESTVEC_CONFIG_NAMELEN];
1568	int err;
1569
1570	if (noextratests)
1571		return 0;
1572
1573	if (!generic_driver) { /* Use default naming convention? */
1574		err = build_generic_driver_name(algname, _generic_driver);
1575		if (err)
1576			return err;
1577		generic_driver = _generic_driver;
1578	}
1579
1580	if (strcmp(generic_driver, driver) == 0) /* Already the generic impl? */
1581		return 0;
1582
1583	generic_tfm = crypto_alloc_shash(generic_driver, 0, 0);
1584	if (IS_ERR(generic_tfm)) {
1585		err = PTR_ERR(generic_tfm);
1586		if (err == -ENOENT) {
1587			pr_warn("alg: hash: skipping comparison tests for %s because %s is unavailable\n",
1588				driver, generic_driver);
1589			return 0;
1590		}
1591		pr_err("alg: hash: error allocating %s (generic impl of %s): %d\n",
1592		       generic_driver, algname, err);
1593		return err;
1594	}
1595
1596	cfg = kzalloc(sizeof(*cfg), GFP_KERNEL);
1597	if (!cfg) {
1598		err = -ENOMEM;
1599		goto out;
1600	}
 
 
 
 
 
 
 
 
 
1601
1602	generic_desc = kzalloc(sizeof(*desc) +
1603			       crypto_shash_descsize(generic_tfm), GFP_KERNEL);
1604	if (!generic_desc) {
1605		err = -ENOMEM;
1606		goto out;
1607	}
1608	generic_desc->tfm = generic_tfm;
1609
1610	/* Check the algorithm properties for consistency. */
 
 
 
1611
1612	if (digestsize != crypto_shash_digestsize(generic_tfm)) {
1613		pr_err("alg: hash: digestsize for %s (%u) doesn't match generic impl (%u)\n",
1614		       driver, digestsize,
1615		       crypto_shash_digestsize(generic_tfm));
1616		err = -EINVAL;
1617		goto out;
1618	}
1619
1620	if (blocksize != crypto_shash_blocksize(generic_tfm)) {
1621		pr_err("alg: hash: blocksize for %s (%u) doesn't match generic impl (%u)\n",
1622		       driver, blocksize, crypto_shash_blocksize(generic_tfm));
1623		err = -EINVAL;
1624		goto out;
1625	}
1626
1627	/*
1628	 * Now generate test vectors using the generic implementation, and test
1629	 * the other implementation against them.
1630	 */
1631
1632	vec.key = kmalloc(maxkeysize, GFP_KERNEL);
1633	vec.plaintext = kmalloc(maxdatasize, GFP_KERNEL);
1634	vec.digest = kmalloc(digestsize, GFP_KERNEL);
1635	if (!vec.key || !vec.plaintext || !vec.digest) {
1636		err = -ENOMEM;
1637		goto out;
1638	}
1639
1640	for (i = 0; i < fuzz_iterations * 8; i++) {
1641		generate_random_hash_testvec(generic_desc, &vec,
1642					     maxkeysize, maxdatasize,
1643					     vec_name, sizeof(vec_name));
1644		generate_random_testvec_config(cfg, cfgname, sizeof(cfgname));
1645
1646		err = test_hash_vec_cfg(driver, &vec, vec_name, cfg,
1647					req, desc, tsgl, hashstate);
1648		if (err)
1649			goto out;
1650		cond_resched();
1651	}
1652	err = 0;
1653out:
1654	kfree(cfg);
1655	kfree(vec.key);
1656	kfree(vec.plaintext);
1657	kfree(vec.digest);
1658	crypto_free_shash(generic_tfm);
1659	kzfree(generic_desc);
1660	return err;
1661}
1662#else /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
1663static int test_hash_vs_generic_impl(const char *driver,
1664				     const char *generic_driver,
1665				     unsigned int maxkeysize,
1666				     struct ahash_request *req,
1667				     struct shash_desc *desc,
1668				     struct test_sglist *tsgl,
1669				     u8 *hashstate)
1670{
1671	return 0;
1672}
1673#endif /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
1674
1675static int alloc_shash(const char *driver, u32 type, u32 mask,
1676		       struct crypto_shash **tfm_ret,
1677		       struct shash_desc **desc_ret)
1678{
1679	struct crypto_shash *tfm;
1680	struct shash_desc *desc;
1681
1682	tfm = crypto_alloc_shash(driver, type, mask);
1683	if (IS_ERR(tfm)) {
1684		if (PTR_ERR(tfm) == -ENOENT) {
1685			/*
1686			 * This algorithm is only available through the ahash
1687			 * API, not the shash API, so skip the shash tests.
1688			 */
1689			return 0;
1690		}
1691		pr_err("alg: hash: failed to allocate shash transform for %s: %ld\n",
1692		       driver, PTR_ERR(tfm));
1693		return PTR_ERR(tfm);
1694	}
1695
1696	desc = kmalloc(sizeof(*desc) + crypto_shash_descsize(tfm), GFP_KERNEL);
1697	if (!desc) {
1698		crypto_free_shash(tfm);
1699		return -ENOMEM;
1700	}
1701	desc->tfm = tfm;
1702
1703	*tfm_ret = tfm;
1704	*desc_ret = desc;
1705	return 0;
1706}
1707
1708static int __alg_test_hash(const struct hash_testvec *vecs,
1709			   unsigned int num_vecs, const char *driver,
1710			   u32 type, u32 mask,
1711			   const char *generic_driver, unsigned int maxkeysize)
1712{
1713	struct crypto_ahash *atfm = NULL;
1714	struct ahash_request *req = NULL;
1715	struct crypto_shash *stfm = NULL;
1716	struct shash_desc *desc = NULL;
1717	struct test_sglist *tsgl = NULL;
1718	u8 *hashstate = NULL;
1719	unsigned int statesize;
1720	unsigned int i;
1721	int err;
1722
1723	/*
1724	 * Always test the ahash API.  This works regardless of whether the
1725	 * algorithm is implemented as ahash or shash.
1726	 */
1727
1728	atfm = crypto_alloc_ahash(driver, type, mask);
1729	if (IS_ERR(atfm)) {
1730		pr_err("alg: hash: failed to allocate transform for %s: %ld\n",
1731		       driver, PTR_ERR(atfm));
1732		return PTR_ERR(atfm);
1733	}
 
 
 
 
1734
1735	req = ahash_request_alloc(atfm, GFP_KERNEL);
1736	if (!req) {
1737		pr_err("alg: hash: failed to allocate request for %s\n",
1738		       driver);
1739		err = -ENOMEM;
1740		goto out;
1741	}
1742
1743	/*
1744	 * If available also test the shash API, to cover corner cases that may
1745	 * be missed by testing the ahash API only.
1746	 */
1747	err = alloc_shash(driver, type, mask, &stfm, &desc);
1748	if (err)
1749		goto out;
 
 
 
 
 
1750
1751	tsgl = kmalloc(sizeof(*tsgl), GFP_KERNEL);
1752	if (!tsgl || init_test_sglist(tsgl) != 0) {
1753		pr_err("alg: hash: failed to allocate test buffers for %s\n",
1754		       driver);
1755		kfree(tsgl);
1756		tsgl = NULL;
1757		err = -ENOMEM;
1758		goto out;
1759	}
1760
1761	statesize = crypto_ahash_statesize(atfm);
1762	if (stfm)
1763		statesize = max(statesize, crypto_shash_statesize(stfm));
1764	hashstate = kmalloc(statesize + TESTMGR_POISON_LEN, GFP_KERNEL);
1765	if (!hashstate) {
1766		pr_err("alg: hash: failed to allocate hash state buffer for %s\n",
1767		       driver);
1768		err = -ENOMEM;
1769		goto out;
1770	}
1771
1772	for (i = 0; i < num_vecs; i++) {
1773		err = test_hash_vec(driver, &vecs[i], i, req, desc, tsgl,
1774				    hashstate);
1775		if (err)
1776			goto out;
1777		cond_resched();
1778	}
1779	err = test_hash_vs_generic_impl(driver, generic_driver, maxkeysize, req,
1780					desc, tsgl, hashstate);
1781out:
1782	kfree(hashstate);
1783	if (tsgl) {
1784		destroy_test_sglist(tsgl);
1785		kfree(tsgl);
1786	}
1787	kfree(desc);
1788	crypto_free_shash(stfm);
1789	ahash_request_free(req);
1790	crypto_free_ahash(atfm);
1791	return err;
1792}
1793
1794static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1795			 u32 type, u32 mask)
1796{
1797	const struct hash_testvec *template = desc->suite.hash.vecs;
1798	unsigned int tcount = desc->suite.hash.count;
1799	unsigned int nr_unkeyed, nr_keyed;
1800	unsigned int maxkeysize = 0;
1801	int err;
1802
1803	/*
1804	 * For OPTIONAL_KEY algorithms, we have to do all the unkeyed tests
1805	 * first, before setting a key on the tfm.  To make this easier, we
1806	 * require that the unkeyed test vectors (if any) are listed first.
1807	 */
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1808
1809	for (nr_unkeyed = 0; nr_unkeyed < tcount; nr_unkeyed++) {
1810		if (template[nr_unkeyed].ksize)
1811			break;
1812	}
1813	for (nr_keyed = 0; nr_unkeyed + nr_keyed < tcount; nr_keyed++) {
1814		if (!template[nr_unkeyed + nr_keyed].ksize) {
1815			pr_err("alg: hash: test vectors for %s out of order, "
1816			       "unkeyed ones must come first\n", desc->alg);
1817			return -EINVAL;
1818		}
1819		maxkeysize = max_t(unsigned int, maxkeysize,
1820				   template[nr_unkeyed + nr_keyed].ksize);
1821	}
1822
1823	err = 0;
1824	if (nr_unkeyed) {
1825		err = __alg_test_hash(template, nr_unkeyed, driver, type, mask,
1826				      desc->generic_driver, maxkeysize);
1827		template += nr_unkeyed;
1828	}
1829
1830	if (!err && nr_keyed)
1831		err = __alg_test_hash(template, nr_keyed, driver, type, mask,
1832				      desc->generic_driver, maxkeysize);
1833
1834	return err;
1835}
1836
1837static int test_aead_vec_cfg(const char *driver, int enc,
1838			     const struct aead_testvec *vec,
1839			     const char *vec_name,
1840			     const struct testvec_config *cfg,
1841			     struct aead_request *req,
1842			     struct cipher_test_sglists *tsgls)
1843{
1844	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
1845	const unsigned int alignmask = crypto_aead_alignmask(tfm);
1846	const unsigned int ivsize = crypto_aead_ivsize(tfm);
1847	const unsigned int authsize = vec->clen - vec->plen;
1848	const u32 req_flags = CRYPTO_TFM_REQ_MAY_BACKLOG | cfg->req_flags;
1849	const char *op = enc ? "encryption" : "decryption";
1850	DECLARE_CRYPTO_WAIT(wait);
1851	u8 _iv[3 * (MAX_ALGAPI_ALIGNMASK + 1) + MAX_IVLEN];
1852	u8 *iv = PTR_ALIGN(&_iv[0], 2 * (MAX_ALGAPI_ALIGNMASK + 1)) +
1853		 cfg->iv_offset +
1854		 (cfg->iv_offset_relative_to_alignmask ? alignmask : 0);
1855	struct kvec input[2];
1856	int expected_error;
1857	int err;
1858
1859	/* Set the key */
1860	if (vec->wk)
1861		crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
1862	else
1863		crypto_aead_clear_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
1864	err = crypto_aead_setkey(tfm, vec->key, vec->klen);
1865	if (err && err != vec->setkey_error) {
1866		pr_err("alg: aead: %s setkey failed on test vector %s; expected_error=%d, actual_error=%d, flags=%#x\n",
1867		       driver, vec_name, vec->setkey_error, err,
1868		       crypto_aead_get_flags(tfm));
1869		return err;
1870	}
1871	if (!err && vec->setkey_error) {
1872		pr_err("alg: aead: %s setkey unexpectedly succeeded on test vector %s; expected_error=%d\n",
1873		       driver, vec_name, vec->setkey_error);
1874		return -EINVAL;
1875	}
1876
1877	/* Set the authentication tag size */
1878	err = crypto_aead_setauthsize(tfm, authsize);
1879	if (err && err != vec->setauthsize_error) {
1880		pr_err("alg: aead: %s setauthsize failed on test vector %s; expected_error=%d, actual_error=%d\n",
1881		       driver, vec_name, vec->setauthsize_error, err);
1882		return err;
1883	}
1884	if (!err && vec->setauthsize_error) {
1885		pr_err("alg: aead: %s setauthsize unexpectedly succeeded on test vector %s; expected_error=%d\n",
1886		       driver, vec_name, vec->setauthsize_error);
1887		return -EINVAL;
1888	}
 
1889
1890	if (vec->setkey_error || vec->setauthsize_error)
1891		return 0;
1892
1893	/* The IV must be copied to a buffer, as the algorithm may modify it */
1894	if (WARN_ON(ivsize > MAX_IVLEN))
1895		return -EINVAL;
1896	if (vec->iv)
1897		memcpy(iv, vec->iv, ivsize);
1898	else
1899		memset(iv, 0, ivsize);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1900
1901	/* Build the src/dst scatterlists */
1902	input[0].iov_base = (void *)vec->assoc;
1903	input[0].iov_len = vec->alen;
1904	input[1].iov_base = enc ? (void *)vec->ptext : (void *)vec->ctext;
1905	input[1].iov_len = enc ? vec->plen : vec->clen;
1906	err = build_cipher_test_sglists(tsgls, cfg, alignmask,
1907					vec->alen + (enc ? vec->plen :
1908						     vec->clen),
1909					vec->alen + (enc ? vec->clen :
1910						     vec->plen),
1911					input, 2);
1912	if (err) {
1913		pr_err("alg: aead: %s %s: error preparing scatterlists for test vector %s, cfg=\"%s\"\n",
1914		       driver, op, vec_name, cfg->name);
1915		return err;
1916	}
1917
1918	/* Do the actual encryption or decryption */
1919	testmgr_poison(req->__ctx, crypto_aead_reqsize(tfm));
1920	aead_request_set_callback(req, req_flags, crypto_req_done, &wait);
1921	aead_request_set_crypt(req, tsgls->src.sgl_ptr, tsgls->dst.sgl_ptr,
1922			       enc ? vec->plen : vec->clen, iv);
1923	aead_request_set_ad(req, vec->alen);
1924	if (cfg->nosimd)
1925		crypto_disable_simd_for_test();
1926	err = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
1927	if (cfg->nosimd)
1928		crypto_reenable_simd_for_test();
1929	err = crypto_wait_req(err, &wait);
1930
1931	/* Check that the algorithm didn't overwrite things it shouldn't have */
1932	if (req->cryptlen != (enc ? vec->plen : vec->clen) ||
1933	    req->assoclen != vec->alen ||
1934	    req->iv != iv ||
1935	    req->src != tsgls->src.sgl_ptr ||
1936	    req->dst != tsgls->dst.sgl_ptr ||
1937	    crypto_aead_reqtfm(req) != tfm ||
1938	    req->base.complete != crypto_req_done ||
1939	    req->base.flags != req_flags ||
1940	    req->base.data != &wait) {
1941		pr_err("alg: aead: %s %s corrupted request struct on test vector %s, cfg=\"%s\"\n",
1942		       driver, op, vec_name, cfg->name);
1943		if (req->cryptlen != (enc ? vec->plen : vec->clen))
1944			pr_err("alg: aead: changed 'req->cryptlen'\n");
1945		if (req->assoclen != vec->alen)
1946			pr_err("alg: aead: changed 'req->assoclen'\n");
1947		if (req->iv != iv)
1948			pr_err("alg: aead: changed 'req->iv'\n");
1949		if (req->src != tsgls->src.sgl_ptr)
1950			pr_err("alg: aead: changed 'req->src'\n");
1951		if (req->dst != tsgls->dst.sgl_ptr)
1952			pr_err("alg: aead: changed 'req->dst'\n");
1953		if (crypto_aead_reqtfm(req) != tfm)
1954			pr_err("alg: aead: changed 'req->base.tfm'\n");
1955		if (req->base.complete != crypto_req_done)
1956			pr_err("alg: aead: changed 'req->base.complete'\n");
1957		if (req->base.flags != req_flags)
1958			pr_err("alg: aead: changed 'req->base.flags'\n");
1959		if (req->base.data != &wait)
1960			pr_err("alg: aead: changed 'req->base.data'\n");
1961		return -EINVAL;
1962	}
1963	if (is_test_sglist_corrupted(&tsgls->src)) {
1964		pr_err("alg: aead: %s %s corrupted src sgl on test vector %s, cfg=\"%s\"\n",
1965		       driver, op, vec_name, cfg->name);
1966		return -EINVAL;
1967	}
1968	if (tsgls->dst.sgl_ptr != tsgls->src.sgl &&
1969	    is_test_sglist_corrupted(&tsgls->dst)) {
1970		pr_err("alg: aead: %s %s corrupted dst sgl on test vector %s, cfg=\"%s\"\n",
1971		       driver, op, vec_name, cfg->name);
1972		return -EINVAL;
1973	}
1974
1975	/* Check for success or failure */
1976	expected_error = vec->novrfy ? -EBADMSG : vec->crypt_error;
1977	if (err) {
1978		if (err == expected_error)
1979			return 0;
1980		pr_err("alg: aead: %s %s failed on test vector %s; expected_error=%d, actual_error=%d, cfg=\"%s\"\n",
1981		       driver, op, vec_name, expected_error, err, cfg->name);
1982		return err;
1983	}
1984	if (expected_error) {
1985		pr_err("alg: aead: %s %s unexpectedly succeeded on test vector %s; expected_error=%d, cfg=\"%s\"\n",
1986		       driver, op, vec_name, expected_error, cfg->name);
1987		return -EINVAL;
1988	}
1989
1990	/* Check for the correct output (ciphertext or plaintext) */
1991	err = verify_correct_output(&tsgls->dst, enc ? vec->ctext : vec->ptext,
1992				    enc ? vec->clen : vec->plen,
1993				    vec->alen, enc || !cfg->inplace);
1994	if (err == -EOVERFLOW) {
1995		pr_err("alg: aead: %s %s overran dst buffer on test vector %s, cfg=\"%s\"\n",
1996		       driver, op, vec_name, cfg->name);
1997		return err;
1998	}
1999	if (err) {
2000		pr_err("alg: aead: %s %s test failed (wrong result) on test vector %s, cfg=\"%s\"\n",
2001		       driver, op, vec_name, cfg->name);
2002		return err;
2003	}
2004
2005	return 0;
2006}
 
2007
2008static int test_aead_vec(const char *driver, int enc,
2009			 const struct aead_testvec *vec, unsigned int vec_num,
2010			 struct aead_request *req,
2011			 struct cipher_test_sglists *tsgls)
2012{
2013	char vec_name[16];
2014	unsigned int i;
2015	int err;
2016
2017	if (enc && vec->novrfy)
2018		return 0;
 
2019
2020	sprintf(vec_name, "%u", vec_num);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2021
2022	for (i = 0; i < ARRAY_SIZE(default_cipher_testvec_configs); i++) {
2023		err = test_aead_vec_cfg(driver, enc, vec, vec_name,
2024					&default_cipher_testvec_configs[i],
2025					req, tsgls);
2026		if (err)
2027			return err;
2028	}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2029
2030#ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
2031	if (!noextratests) {
2032		struct testvec_config cfg;
2033		char cfgname[TESTVEC_CONFIG_NAMELEN];
2034
2035		for (i = 0; i < fuzz_iterations; i++) {
2036			generate_random_testvec_config(&cfg, cfgname,
2037						       sizeof(cfgname));
2038			err = test_aead_vec_cfg(driver, enc, vec, vec_name,
2039						&cfg, req, tsgls);
2040			if (err)
2041				return err;
2042			cond_resched();
2043		}
2044	}
2045#endif
2046	return 0;
2047}
2048
2049#ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
2050/*
2051 * Generate an AEAD test vector from the given implementation.
2052 * Assumes the buffers in 'vec' were already allocated.
2053 */
2054static void generate_random_aead_testvec(struct aead_request *req,
2055					 struct aead_testvec *vec,
2056					 unsigned int maxkeysize,
2057					 unsigned int maxdatasize,
2058					 char *name, size_t max_namelen)
2059{
2060	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2061	const unsigned int ivsize = crypto_aead_ivsize(tfm);
2062	unsigned int maxauthsize = crypto_aead_alg(tfm)->maxauthsize;
2063	unsigned int authsize;
2064	unsigned int total_len;
2065	int i;
2066	struct scatterlist src[2], dst;
2067	u8 iv[MAX_IVLEN];
2068	DECLARE_CRYPTO_WAIT(wait);
2069
2070	/* Key: length in [0, maxkeysize], but usually choose maxkeysize */
2071	vec->klen = maxkeysize;
2072	if (prandom_u32() % 4 == 0)
2073		vec->klen = prandom_u32() % (maxkeysize + 1);
2074	generate_random_bytes((u8 *)vec->key, vec->klen);
2075	vec->setkey_error = crypto_aead_setkey(tfm, vec->key, vec->klen);
2076
2077	/* IV */
2078	generate_random_bytes((u8 *)vec->iv, ivsize);
2079
2080	/* Tag length: in [0, maxauthsize], but usually choose maxauthsize */
2081	authsize = maxauthsize;
2082	if (prandom_u32() % 4 == 0)
2083		authsize = prandom_u32() % (maxauthsize + 1);
2084	if (WARN_ON(authsize > maxdatasize))
2085		authsize = maxdatasize;
2086	maxdatasize -= authsize;
2087	vec->setauthsize_error = crypto_aead_setauthsize(tfm, authsize);
2088
2089	/* Plaintext and associated data */
2090	total_len = generate_random_length(maxdatasize);
2091	if (prandom_u32() % 4 == 0)
2092		vec->alen = 0;
2093	else
2094		vec->alen = generate_random_length(total_len);
2095	vec->plen = total_len - vec->alen;
2096	generate_random_bytes((u8 *)vec->assoc, vec->alen);
2097	generate_random_bytes((u8 *)vec->ptext, vec->plen);
2098
2099	vec->clen = vec->plen + authsize;
2100
2101	/*
2102	 * If the key or authentication tag size couldn't be set, no need to
2103	 * continue to encrypt.
2104	 */
2105	if (vec->setkey_error || vec->setauthsize_error)
2106		goto done;
2107
2108	/* Ciphertext */
2109	sg_init_table(src, 2);
2110	i = 0;
2111	if (vec->alen)
2112		sg_set_buf(&src[i++], vec->assoc, vec->alen);
2113	if (vec->plen)
2114		sg_set_buf(&src[i++], vec->ptext, vec->plen);
2115	sg_init_one(&dst, vec->ctext, vec->alen + vec->clen);
2116	memcpy(iv, vec->iv, ivsize);
2117	aead_request_set_callback(req, 0, crypto_req_done, &wait);
2118	aead_request_set_crypt(req, src, &dst, vec->plen, iv);
2119	aead_request_set_ad(req, vec->alen);
2120	vec->crypt_error = crypto_wait_req(crypto_aead_encrypt(req), &wait);
2121	if (vec->crypt_error == 0)
2122		memmove((u8 *)vec->ctext, vec->ctext + vec->alen, vec->clen);
2123done:
2124	snprintf(name, max_namelen,
2125		 "\"random: alen=%u plen=%u authsize=%u klen=%u\"",
2126		 vec->alen, vec->plen, authsize, vec->klen);
2127}
2128
2129/*
2130 * Test the AEAD algorithm represented by @req against the corresponding generic
2131 * implementation, if one is available.
2132 */
2133static int test_aead_vs_generic_impl(const char *driver,
2134				     const struct alg_test_desc *test_desc,
2135				     struct aead_request *req,
2136				     struct cipher_test_sglists *tsgls)
2137{
2138	struct crypto_aead *tfm = crypto_aead_reqtfm(req);
2139	const unsigned int ivsize = crypto_aead_ivsize(tfm);
2140	const unsigned int maxauthsize = crypto_aead_alg(tfm)->maxauthsize;
2141	const unsigned int blocksize = crypto_aead_blocksize(tfm);
2142	const unsigned int maxdatasize = (2 * PAGE_SIZE) - TESTMGR_POISON_LEN;
2143	const char *algname = crypto_aead_alg(tfm)->base.cra_name;
2144	const char *generic_driver = test_desc->generic_driver;
2145	char _generic_driver[CRYPTO_MAX_ALG_NAME];
2146	struct crypto_aead *generic_tfm = NULL;
2147	struct aead_request *generic_req = NULL;
2148	unsigned int maxkeysize;
2149	unsigned int i;
2150	struct aead_testvec vec = { 0 };
2151	char vec_name[64];
2152	struct testvec_config *cfg;
2153	char cfgname[TESTVEC_CONFIG_NAMELEN];
2154	int err;
2155
2156	if (noextratests)
2157		return 0;
2158
2159	if (!generic_driver) { /* Use default naming convention? */
2160		err = build_generic_driver_name(algname, _generic_driver);
2161		if (err)
2162			return err;
2163		generic_driver = _generic_driver;
2164	}
2165
2166	if (strcmp(generic_driver, driver) == 0) /* Already the generic impl? */
2167		return 0;
2168
2169	generic_tfm = crypto_alloc_aead(generic_driver, 0, 0);
2170	if (IS_ERR(generic_tfm)) {
2171		err = PTR_ERR(generic_tfm);
2172		if (err == -ENOENT) {
2173			pr_warn("alg: aead: skipping comparison tests for %s because %s is unavailable\n",
2174				driver, generic_driver);
2175			return 0;
2176		}
2177		pr_err("alg: aead: error allocating %s (generic impl of %s): %d\n",
2178		       generic_driver, algname, err);
2179		return err;
2180	}
2181
2182	cfg = kzalloc(sizeof(*cfg), GFP_KERNEL);
2183	if (!cfg) {
2184		err = -ENOMEM;
2185		goto out;
2186	}
2187
2188	generic_req = aead_request_alloc(generic_tfm, GFP_KERNEL);
2189	if (!generic_req) {
2190		err = -ENOMEM;
2191		goto out;
2192	}
2193
2194	/* Check the algorithm properties for consistency. */
2195
2196	if (maxauthsize != crypto_aead_alg(generic_tfm)->maxauthsize) {
2197		pr_err("alg: aead: maxauthsize for %s (%u) doesn't match generic impl (%u)\n",
2198		       driver, maxauthsize,
2199		       crypto_aead_alg(generic_tfm)->maxauthsize);
2200		err = -EINVAL;
2201		goto out;
2202	}
2203
2204	if (ivsize != crypto_aead_ivsize(generic_tfm)) {
2205		pr_err("alg: aead: ivsize for %s (%u) doesn't match generic impl (%u)\n",
2206		       driver, ivsize, crypto_aead_ivsize(generic_tfm));
2207		err = -EINVAL;
2208		goto out;
2209	}
2210
2211	if (blocksize != crypto_aead_blocksize(generic_tfm)) {
2212		pr_err("alg: aead: blocksize for %s (%u) doesn't match generic impl (%u)\n",
2213		       driver, blocksize, crypto_aead_blocksize(generic_tfm));
2214		err = -EINVAL;
2215		goto out;
2216	}
2217
2218	/*
2219	 * Now generate test vectors using the generic implementation, and test
2220	 * the other implementation against them.
2221	 */
2222
2223	maxkeysize = 0;
2224	for (i = 0; i < test_desc->suite.aead.count; i++)
2225		maxkeysize = max_t(unsigned int, maxkeysize,
2226				   test_desc->suite.aead.vecs[i].klen);
2227
2228	vec.key = kmalloc(maxkeysize, GFP_KERNEL);
2229	vec.iv = kmalloc(ivsize, GFP_KERNEL);
2230	vec.assoc = kmalloc(maxdatasize, GFP_KERNEL);
2231	vec.ptext = kmalloc(maxdatasize, GFP_KERNEL);
2232	vec.ctext = kmalloc(maxdatasize, GFP_KERNEL);
2233	if (!vec.key || !vec.iv || !vec.assoc || !vec.ptext || !vec.ctext) {
2234		err = -ENOMEM;
2235		goto out;
2236	}
2237
2238	for (i = 0; i < fuzz_iterations * 8; i++) {
2239		generate_random_aead_testvec(generic_req, &vec,
2240					     maxkeysize, maxdatasize,
2241					     vec_name, sizeof(vec_name));
2242		generate_random_testvec_config(cfg, cfgname, sizeof(cfgname));
2243
2244		err = test_aead_vec_cfg(driver, ENCRYPT, &vec, vec_name, cfg,
2245					req, tsgls);
2246		if (err)
2247			goto out;
2248		err = test_aead_vec_cfg(driver, DECRYPT, &vec, vec_name, cfg,
2249					req, tsgls);
2250		if (err)
2251			goto out;
2252		cond_resched();
2253	}
2254	err = 0;
2255out:
2256	kfree(cfg);
2257	kfree(vec.key);
2258	kfree(vec.iv);
2259	kfree(vec.assoc);
2260	kfree(vec.ptext);
2261	kfree(vec.ctext);
2262	crypto_free_aead(generic_tfm);
2263	aead_request_free(generic_req);
2264	return err;
2265}
2266#else /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
2267static int test_aead_vs_generic_impl(const char *driver,
2268				     const struct alg_test_desc *test_desc,
2269				     struct aead_request *req,
2270				     struct cipher_test_sglists *tsgls)
2271{
2272	return 0;
2273}
2274#endif /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
2275
2276static int test_aead(const char *driver, int enc,
2277		     const struct aead_test_suite *suite,
2278		     struct aead_request *req,
2279		     struct cipher_test_sglists *tsgls)
2280{
2281	unsigned int i;
2282	int err;
2283
2284	for (i = 0; i < suite->count; i++) {
2285		err = test_aead_vec(driver, enc, &suite->vecs[i], i, req,
2286				    tsgls);
2287		if (err)
2288			return err;
2289		cond_resched();
2290	}
2291	return 0;
2292}
2293
2294static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
2295			 u32 type, u32 mask)
2296{
2297	const struct aead_test_suite *suite = &desc->suite.aead;
2298	struct crypto_aead *tfm;
2299	struct aead_request *req = NULL;
2300	struct cipher_test_sglists *tsgls = NULL;
2301	int err;
2302
2303	if (suite->count <= 0) {
2304		pr_err("alg: aead: empty test suite for %s\n", driver);
2305		return -EINVAL;
2306	}
2307
2308	tfm = crypto_alloc_aead(driver, type, mask);
2309	if (IS_ERR(tfm)) {
2310		pr_err("alg: aead: failed to allocate transform for %s: %ld\n",
2311		       driver, PTR_ERR(tfm));
2312		return PTR_ERR(tfm);
2313	}
2314
2315	req = aead_request_alloc(tfm, GFP_KERNEL);
2316	if (!req) {
2317		pr_err("alg: aead: failed to allocate request for %s\n",
2318		       driver);
2319		err = -ENOMEM;
2320		goto out;
2321	}
2322
2323	tsgls = alloc_cipher_test_sglists();
2324	if (!tsgls) {
2325		pr_err("alg: aead: failed to allocate test buffers for %s\n",
2326		       driver);
2327		err = -ENOMEM;
2328		goto out;
 
2329	}
2330
2331	err = test_aead(driver, ENCRYPT, suite, req, tsgls);
2332	if (err)
2333		goto out;
2334
2335	err = test_aead(driver, DECRYPT, suite, req, tsgls);
2336	if (err)
2337		goto out;
2338
2339	err = test_aead_vs_generic_impl(driver, desc, req, tsgls);
2340out:
2341	free_cipher_test_sglists(tsgls);
2342	aead_request_free(req);
2343	crypto_free_aead(tfm);
2344	return err;
2345}
2346
2347static int test_cipher(struct crypto_cipher *tfm, int enc,
2348		       const struct cipher_testvec *template,
2349		       unsigned int tcount)
2350{
2351	const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
2352	unsigned int i, j, k;
2353	char *q;
2354	const char *e;
2355	const char *input, *result;
2356	void *data;
2357	char *xbuf[XBUFSIZE];
2358	int ret = -ENOMEM;
2359
2360	if (testmgr_alloc_buf(xbuf))
2361		goto out_nobuf;
2362
2363	if (enc == ENCRYPT)
2364	        e = "encryption";
2365	else
2366		e = "decryption";
2367
2368	j = 0;
2369	for (i = 0; i < tcount; i++) {
2370
2371		if (fips_enabled && template[i].fips_skip)
2372			continue;
2373
2374		input  = enc ? template[i].ptext : template[i].ctext;
2375		result = enc ? template[i].ctext : template[i].ptext;
2376		j++;
2377
2378		ret = -EINVAL;
2379		if (WARN_ON(template[i].len > PAGE_SIZE))
2380			goto out;
2381
2382		data = xbuf[0];
2383		memcpy(data, input, template[i].len);
2384
2385		crypto_cipher_clear_flags(tfm, ~0);
2386		if (template[i].wk)
2387			crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
2388
2389		ret = crypto_cipher_setkey(tfm, template[i].key,
2390					   template[i].klen);
2391		if (ret) {
2392			if (ret == template[i].setkey_error)
2393				continue;
2394			pr_err("alg: cipher: %s setkey failed on test vector %u; expected_error=%d, actual_error=%d, flags=%#x\n",
2395			       algo, j, template[i].setkey_error, ret,
2396			       crypto_cipher_get_flags(tfm));
2397			goto out;
2398		}
2399		if (template[i].setkey_error) {
2400			pr_err("alg: cipher: %s setkey unexpectedly succeeded on test vector %u; expected_error=%d\n",
2401			       algo, j, template[i].setkey_error);
2402			ret = -EINVAL;
2403			goto out;
2404		}
 
2405
2406		for (k = 0; k < template[i].len;
2407		     k += crypto_cipher_blocksize(tfm)) {
2408			if (enc)
2409				crypto_cipher_encrypt_one(tfm, data + k,
2410							  data + k);
2411			else
2412				crypto_cipher_decrypt_one(tfm, data + k,
2413							  data + k);
2414		}
2415
2416		q = data;
2417		if (memcmp(q, result, template[i].len)) {
2418			printk(KERN_ERR "alg: cipher: Test %d failed "
2419			       "on %s for %s\n", j, e, algo);
2420			hexdump(q, template[i].len);
2421			ret = -EINVAL;
2422			goto out;
2423		}
2424	}
2425
2426	ret = 0;
2427
2428out:
2429	testmgr_free_buf(xbuf);
2430out_nobuf:
2431	return ret;
2432}
2433
2434static int test_skcipher_vec_cfg(const char *driver, int enc,
2435				 const struct cipher_testvec *vec,
2436				 const char *vec_name,
2437				 const struct testvec_config *cfg,
2438				 struct skcipher_request *req,
2439				 struct cipher_test_sglists *tsgls)
2440{
2441	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
2442	const unsigned int alignmask = crypto_skcipher_alignmask(tfm);
2443	const unsigned int ivsize = crypto_skcipher_ivsize(tfm);
2444	const u32 req_flags = CRYPTO_TFM_REQ_MAY_BACKLOG | cfg->req_flags;
2445	const char *op = enc ? "encryption" : "decryption";
2446	DECLARE_CRYPTO_WAIT(wait);
2447	u8 _iv[3 * (MAX_ALGAPI_ALIGNMASK + 1) + MAX_IVLEN];
2448	u8 *iv = PTR_ALIGN(&_iv[0], 2 * (MAX_ALGAPI_ALIGNMASK + 1)) +
2449		 cfg->iv_offset +
2450		 (cfg->iv_offset_relative_to_alignmask ? alignmask : 0);
2451	struct kvec input;
2452	int err;
2453
2454	/* Set the key */
2455	if (vec->wk)
2456		crypto_skcipher_set_flags(tfm, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
2457	else
2458		crypto_skcipher_clear_flags(tfm,
2459					    CRYPTO_TFM_REQ_FORBID_WEAK_KEYS);
2460	err = crypto_skcipher_setkey(tfm, vec->key, vec->klen);
2461	if (err) {
2462		if (err == vec->setkey_error)
2463			return 0;
2464		pr_err("alg: skcipher: %s setkey failed on test vector %s; expected_error=%d, actual_error=%d, flags=%#x\n",
2465		       driver, vec_name, vec->setkey_error, err,
2466		       crypto_skcipher_get_flags(tfm));
2467		return err;
2468	}
2469	if (vec->setkey_error) {
2470		pr_err("alg: skcipher: %s setkey unexpectedly succeeded on test vector %s; expected_error=%d\n",
2471		       driver, vec_name, vec->setkey_error);
2472		return -EINVAL;
2473	}
2474
2475	/* The IV must be copied to a buffer, as the algorithm may modify it */
2476	if (ivsize) {
2477		if (WARN_ON(ivsize > MAX_IVLEN))
2478			return -EINVAL;
2479		if (vec->generates_iv && !enc)
2480			memcpy(iv, vec->iv_out, ivsize);
2481		else if (vec->iv)
2482			memcpy(iv, vec->iv, ivsize);
2483		else
2484			memset(iv, 0, ivsize);
2485	} else {
2486		if (vec->generates_iv) {
2487			pr_err("alg: skcipher: %s has ivsize=0 but test vector %s generates IV!\n",
2488			       driver, vec_name);
2489			return -EINVAL;
2490		}
2491		iv = NULL;
2492	}
2493
2494	/* Build the src/dst scatterlists */
2495	input.iov_base = enc ? (void *)vec->ptext : (void *)vec->ctext;
2496	input.iov_len = vec->len;
2497	err = build_cipher_test_sglists(tsgls, cfg, alignmask,
2498					vec->len, vec->len, &input, 1);
2499	if (err) {
2500		pr_err("alg: skcipher: %s %s: error preparing scatterlists for test vector %s, cfg=\"%s\"\n",
2501		       driver, op, vec_name, cfg->name);
2502		return err;
2503	}
2504
2505	/* Do the actual encryption or decryption */
2506	testmgr_poison(req->__ctx, crypto_skcipher_reqsize(tfm));
2507	skcipher_request_set_callback(req, req_flags, crypto_req_done, &wait);
2508	skcipher_request_set_crypt(req, tsgls->src.sgl_ptr, tsgls->dst.sgl_ptr,
2509				   vec->len, iv);
2510	if (cfg->nosimd)
2511		crypto_disable_simd_for_test();
2512	err = enc ? crypto_skcipher_encrypt(req) : crypto_skcipher_decrypt(req);
2513	if (cfg->nosimd)
2514		crypto_reenable_simd_for_test();
2515	err = crypto_wait_req(err, &wait);
2516
2517	/* Check that the algorithm didn't overwrite things it shouldn't have */
2518	if (req->cryptlen != vec->len ||
2519	    req->iv != iv ||
2520	    req->src != tsgls->src.sgl_ptr ||
2521	    req->dst != tsgls->dst.sgl_ptr ||
2522	    crypto_skcipher_reqtfm(req) != tfm ||
2523	    req->base.complete != crypto_req_done ||
2524	    req->base.flags != req_flags ||
2525	    req->base.data != &wait) {
2526		pr_err("alg: skcipher: %s %s corrupted request struct on test vector %s, cfg=\"%s\"\n",
2527		       driver, op, vec_name, cfg->name);
2528		if (req->cryptlen != vec->len)
2529			pr_err("alg: skcipher: changed 'req->cryptlen'\n");
2530		if (req->iv != iv)
2531			pr_err("alg: skcipher: changed 'req->iv'\n");
2532		if (req->src != tsgls->src.sgl_ptr)
2533			pr_err("alg: skcipher: changed 'req->src'\n");
2534		if (req->dst != tsgls->dst.sgl_ptr)
2535			pr_err("alg: skcipher: changed 'req->dst'\n");
2536		if (crypto_skcipher_reqtfm(req) != tfm)
2537			pr_err("alg: skcipher: changed 'req->base.tfm'\n");
2538		if (req->base.complete != crypto_req_done)
2539			pr_err("alg: skcipher: changed 'req->base.complete'\n");
2540		if (req->base.flags != req_flags)
2541			pr_err("alg: skcipher: changed 'req->base.flags'\n");
2542		if (req->base.data != &wait)
2543			pr_err("alg: skcipher: changed 'req->base.data'\n");
2544		return -EINVAL;
2545	}
2546	if (is_test_sglist_corrupted(&tsgls->src)) {
2547		pr_err("alg: skcipher: %s %s corrupted src sgl on test vector %s, cfg=\"%s\"\n",
2548		       driver, op, vec_name, cfg->name);
2549		return -EINVAL;
2550	}
2551	if (tsgls->dst.sgl_ptr != tsgls->src.sgl &&
2552	    is_test_sglist_corrupted(&tsgls->dst)) {
2553		pr_err("alg: skcipher: %s %s corrupted dst sgl on test vector %s, cfg=\"%s\"\n",
2554		       driver, op, vec_name, cfg->name);
2555		return -EINVAL;
2556	}
2557
2558	/* Check for success or failure */
2559	if (err) {
2560		if (err == vec->crypt_error)
2561			return 0;
2562		pr_err("alg: skcipher: %s %s failed on test vector %s; expected_error=%d, actual_error=%d, cfg=\"%s\"\n",
2563		       driver, op, vec_name, vec->crypt_error, err, cfg->name);
2564		return err;
2565	}
2566	if (vec->crypt_error) {
2567		pr_err("alg: skcipher: %s %s unexpectedly succeeded on test vector %s; expected_error=%d, cfg=\"%s\"\n",
2568		       driver, op, vec_name, vec->crypt_error, cfg->name);
2569		return -EINVAL;
2570	}
2571
2572	/* Check for the correct output (ciphertext or plaintext) */
2573	err = verify_correct_output(&tsgls->dst, enc ? vec->ctext : vec->ptext,
2574				    vec->len, 0, true);
2575	if (err == -EOVERFLOW) {
2576		pr_err("alg: skcipher: %s %s overran dst buffer on test vector %s, cfg=\"%s\"\n",
2577		       driver, op, vec_name, cfg->name);
2578		return err;
2579	}
2580	if (err) {
2581		pr_err("alg: skcipher: %s %s test failed (wrong result) on test vector %s, cfg=\"%s\"\n",
2582		       driver, op, vec_name, cfg->name);
2583		return err;
2584	}
2585
2586	/* If applicable, check that the algorithm generated the correct IV */
2587	if (vec->iv_out && memcmp(iv, vec->iv_out, ivsize) != 0) {
2588		pr_err("alg: skcipher: %s %s test failed (wrong output IV) on test vector %s, cfg=\"%s\"\n",
2589		       driver, op, vec_name, cfg->name);
2590		hexdump(iv, ivsize);
2591		return -EINVAL;
2592	}
2593
2594	return 0;
2595}
2596
2597static int test_skcipher_vec(const char *driver, int enc,
2598			     const struct cipher_testvec *vec,
2599			     unsigned int vec_num,
2600			     struct skcipher_request *req,
2601			     struct cipher_test_sglists *tsgls)
2602{
2603	char vec_name[16];
2604	unsigned int i;
2605	int err;
2606
2607	if (fips_enabled && vec->fips_skip)
2608		return 0;
2609
2610	sprintf(vec_name, "%u", vec_num);
2611
2612	for (i = 0; i < ARRAY_SIZE(default_cipher_testvec_configs); i++) {
2613		err = test_skcipher_vec_cfg(driver, enc, vec, vec_name,
2614					    &default_cipher_testvec_configs[i],
2615					    req, tsgls);
2616		if (err)
2617			return err;
2618	}
2619
2620#ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
2621	if (!noextratests) {
2622		struct testvec_config cfg;
2623		char cfgname[TESTVEC_CONFIG_NAMELEN];
2624
2625		for (i = 0; i < fuzz_iterations; i++) {
2626			generate_random_testvec_config(&cfg, cfgname,
2627						       sizeof(cfgname));
2628			err = test_skcipher_vec_cfg(driver, enc, vec, vec_name,
2629						    &cfg, req, tsgls);
2630			if (err)
2631				return err;
2632			cond_resched();
2633		}
2634	}
2635#endif
2636	return 0;
2637}
2638
2639#ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
2640/*
2641 * Generate a symmetric cipher test vector from the given implementation.
2642 * Assumes the buffers in 'vec' were already allocated.
2643 */
2644static void generate_random_cipher_testvec(struct skcipher_request *req,
2645					   struct cipher_testvec *vec,
2646					   unsigned int maxdatasize,
2647					   char *name, size_t max_namelen)
2648{
2649	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
2650	const unsigned int maxkeysize = tfm->keysize;
2651	const unsigned int ivsize = crypto_skcipher_ivsize(tfm);
2652	struct scatterlist src, dst;
2653	u8 iv[MAX_IVLEN];
2654	DECLARE_CRYPTO_WAIT(wait);
2655
2656	/* Key: length in [0, maxkeysize], but usually choose maxkeysize */
2657	vec->klen = maxkeysize;
2658	if (prandom_u32() % 4 == 0)
2659		vec->klen = prandom_u32() % (maxkeysize + 1);
2660	generate_random_bytes((u8 *)vec->key, vec->klen);
2661	vec->setkey_error = crypto_skcipher_setkey(tfm, vec->key, vec->klen);
2662
2663	/* IV */
2664	generate_random_bytes((u8 *)vec->iv, ivsize);
2665
2666	/* Plaintext */
2667	vec->len = generate_random_length(maxdatasize);
2668	generate_random_bytes((u8 *)vec->ptext, vec->len);
2669
2670	/* If the key couldn't be set, no need to continue to encrypt. */
2671	if (vec->setkey_error)
2672		goto done;
2673
2674	/* Ciphertext */
2675	sg_init_one(&src, vec->ptext, vec->len);
2676	sg_init_one(&dst, vec->ctext, vec->len);
2677	memcpy(iv, vec->iv, ivsize);
2678	skcipher_request_set_callback(req, 0, crypto_req_done, &wait);
2679	skcipher_request_set_crypt(req, &src, &dst, vec->len, iv);
2680	vec->crypt_error = crypto_wait_req(crypto_skcipher_encrypt(req), &wait);
2681done:
2682	snprintf(name, max_namelen, "\"random: len=%u klen=%u\"",
2683		 vec->len, vec->klen);
2684}
2685
2686/*
2687 * Test the skcipher algorithm represented by @req against the corresponding
2688 * generic implementation, if one is available.
2689 */
2690static int test_skcipher_vs_generic_impl(const char *driver,
2691					 const char *generic_driver,
2692					 struct skcipher_request *req,
2693					 struct cipher_test_sglists *tsgls)
2694{
2695	struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
2696	const unsigned int ivsize = crypto_skcipher_ivsize(tfm);
2697	const unsigned int blocksize = crypto_skcipher_blocksize(tfm);
2698	const unsigned int maxdatasize = (2 * PAGE_SIZE) - TESTMGR_POISON_LEN;
2699	const char *algname = crypto_skcipher_alg(tfm)->base.cra_name;
2700	char _generic_driver[CRYPTO_MAX_ALG_NAME];
2701	struct crypto_skcipher *generic_tfm = NULL;
2702	struct skcipher_request *generic_req = NULL;
2703	unsigned int i;
2704	struct cipher_testvec vec = { 0 };
2705	char vec_name[64];
2706	struct testvec_config *cfg;
2707	char cfgname[TESTVEC_CONFIG_NAMELEN];
2708	int err;
2709
2710	if (noextratests)
2711		return 0;
 
 
 
 
2712
2713	/* Keywrap isn't supported here yet as it handles its IV differently. */
2714	if (strncmp(algname, "kw(", 3) == 0)
2715		return 0;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2716
2717	if (!generic_driver) { /* Use default naming convention? */
2718		err = build_generic_driver_name(algname, _generic_driver);
2719		if (err)
2720			return err;
2721		generic_driver = _generic_driver;
 
 
 
 
2722	}
2723
2724	if (strcmp(generic_driver, driver) == 0) /* Already the generic impl? */
2725		return 0;
 
 
 
2726
2727	generic_tfm = crypto_alloc_skcipher(generic_driver, 0, 0);
2728	if (IS_ERR(generic_tfm)) {
2729		err = PTR_ERR(generic_tfm);
2730		if (err == -ENOENT) {
2731			pr_warn("alg: skcipher: skipping comparison tests for %s because %s is unavailable\n",
2732				driver, generic_driver);
2733			return 0;
2734		}
2735		pr_err("alg: skcipher: error allocating %s (generic impl of %s): %d\n",
2736		       generic_driver, algname, err);
2737		return err;
2738	}
2739
2740	cfg = kzalloc(sizeof(*cfg), GFP_KERNEL);
2741	if (!cfg) {
2742		err = -ENOMEM;
2743		goto out;
2744	}
2745
2746	generic_req = skcipher_request_alloc(generic_tfm, GFP_KERNEL);
2747	if (!generic_req) {
2748		err = -ENOMEM;
2749		goto out;
2750	}
 
 
 
 
 
 
 
 
 
2751
2752	/* Check the algorithm properties for consistency. */
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2753
2754	if (tfm->keysize != generic_tfm->keysize) {
2755		pr_err("alg: skcipher: max keysize for %s (%u) doesn't match generic impl (%u)\n",
2756		       driver, tfm->keysize, generic_tfm->keysize);
2757		err = -EINVAL;
2758		goto out;
2759	}
2760
2761	if (ivsize != crypto_skcipher_ivsize(generic_tfm)) {
2762		pr_err("alg: skcipher: ivsize for %s (%u) doesn't match generic impl (%u)\n",
2763		       driver, ivsize, crypto_skcipher_ivsize(generic_tfm));
2764		err = -EINVAL;
2765		goto out;
2766	}
 
2767
2768	if (blocksize != crypto_skcipher_blocksize(generic_tfm)) {
2769		pr_err("alg: skcipher: blocksize for %s (%u) doesn't match generic impl (%u)\n",
2770		       driver, blocksize,
2771		       crypto_skcipher_blocksize(generic_tfm));
2772		err = -EINVAL;
2773		goto out;
2774	}
 
 
 
 
 
 
 
 
 
 
2775
2776	/*
2777	 * Now generate test vectors using the generic implementation, and test
2778	 * the other implementation against them.
2779	 */
2780
2781	vec.key = kmalloc(tfm->keysize, GFP_KERNEL);
2782	vec.iv = kmalloc(ivsize, GFP_KERNEL);
2783	vec.ptext = kmalloc(maxdatasize, GFP_KERNEL);
2784	vec.ctext = kmalloc(maxdatasize, GFP_KERNEL);
2785	if (!vec.key || !vec.iv || !vec.ptext || !vec.ctext) {
2786		err = -ENOMEM;
2787		goto out;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2788	}
2789
2790	for (i = 0; i < fuzz_iterations * 8; i++) {
2791		generate_random_cipher_testvec(generic_req, &vec, maxdatasize,
2792					       vec_name, sizeof(vec_name));
2793		generate_random_testvec_config(cfg, cfgname, sizeof(cfgname));
2794
2795		err = test_skcipher_vec_cfg(driver, ENCRYPT, &vec, vec_name,
2796					    cfg, req, tsgls);
2797		if (err)
2798			goto out;
2799		err = test_skcipher_vec_cfg(driver, DECRYPT, &vec, vec_name,
2800					    cfg, req, tsgls);
2801		if (err)
2802			goto out;
2803		cond_resched();
2804	}
2805	err = 0;
2806out:
2807	kfree(cfg);
2808	kfree(vec.key);
2809	kfree(vec.iv);
2810	kfree(vec.ptext);
2811	kfree(vec.ctext);
2812	crypto_free_skcipher(generic_tfm);
2813	skcipher_request_free(generic_req);
2814	return err;
2815}
2816#else /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
2817static int test_skcipher_vs_generic_impl(const char *driver,
2818					 const char *generic_driver,
2819					 struct skcipher_request *req,
2820					 struct cipher_test_sglists *tsgls)
2821{
2822	return 0;
2823}
2824#endif /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
2825
2826static int test_skcipher(const char *driver, int enc,
2827			 const struct cipher_test_suite *suite,
2828			 struct skcipher_request *req,
2829			 struct cipher_test_sglists *tsgls)
2830{
2831	unsigned int i;
2832	int err;
2833
2834	for (i = 0; i < suite->count; i++) {
2835		err = test_skcipher_vec(driver, enc, &suite->vecs[i], i, req,
2836					tsgls);
2837		if (err)
2838			return err;
2839		cond_resched();
2840	}
2841	return 0;
2842}
2843
2844static int alg_test_skcipher(const struct alg_test_desc *desc,
2845			     const char *driver, u32 type, u32 mask)
2846{
2847	const struct cipher_test_suite *suite = &desc->suite.cipher;
2848	struct crypto_skcipher *tfm;
2849	struct skcipher_request *req = NULL;
2850	struct cipher_test_sglists *tsgls = NULL;
2851	int err;
2852
2853	if (suite->count <= 0) {
2854		pr_err("alg: skcipher: empty test suite for %s\n", driver);
2855		return -EINVAL;
2856	}
2857
2858	tfm = crypto_alloc_skcipher(driver, type, mask);
2859	if (IS_ERR(tfm)) {
2860		pr_err("alg: skcipher: failed to allocate transform for %s: %ld\n",
2861		       driver, PTR_ERR(tfm));
2862		return PTR_ERR(tfm);
2863	}
2864
2865	req = skcipher_request_alloc(tfm, GFP_KERNEL);
2866	if (!req) {
2867		pr_err("alg: skcipher: failed to allocate request for %s\n",
2868		       driver);
2869		err = -ENOMEM;
2870		goto out;
2871	}
2872
2873	tsgls = alloc_cipher_test_sglists();
2874	if (!tsgls) {
2875		pr_err("alg: skcipher: failed to allocate test buffers for %s\n",
2876		       driver);
2877		err = -ENOMEM;
2878		goto out;
 
2879	}
2880
2881	err = test_skcipher(driver, ENCRYPT, suite, req, tsgls);
2882	if (err)
2883		goto out;
2884
2885	err = test_skcipher(driver, DECRYPT, suite, req, tsgls);
2886	if (err)
2887		goto out;
2888
2889	err = test_skcipher_vs_generic_impl(driver, desc->generic_driver, req,
2890					    tsgls);
2891out:
2892	free_cipher_test_sglists(tsgls);
2893	skcipher_request_free(req);
2894	crypto_free_skcipher(tfm);
2895	return err;
2896}
2897
2898static int test_comp(struct crypto_comp *tfm,
2899		     const struct comp_testvec *ctemplate,
2900		     const struct comp_testvec *dtemplate,
2901		     int ctcount, int dtcount)
2902{
2903	const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
2904	char *output, *decomp_output;
2905	unsigned int i;
 
2906	int ret;
2907
2908	output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
2909	if (!output)
2910		return -ENOMEM;
2911
2912	decomp_output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
2913	if (!decomp_output) {
2914		kfree(output);
2915		return -ENOMEM;
2916	}
2917
2918	for (i = 0; i < ctcount; i++) {
2919		int ilen;
2920		unsigned int dlen = COMP_BUF_SIZE;
2921
2922		memset(output, 0, COMP_BUF_SIZE);
2923		memset(decomp_output, 0, COMP_BUF_SIZE);
2924
2925		ilen = ctemplate[i].inlen;
2926		ret = crypto_comp_compress(tfm, ctemplate[i].input,
2927					   ilen, output, &dlen);
2928		if (ret) {
2929			printk(KERN_ERR "alg: comp: compression failed "
2930			       "on test %d for %s: ret=%d\n", i + 1, algo,
2931			       -ret);
2932			goto out;
2933		}
2934
2935		ilen = dlen;
2936		dlen = COMP_BUF_SIZE;
2937		ret = crypto_comp_decompress(tfm, output,
2938					     ilen, decomp_output, &dlen);
2939		if (ret) {
2940			pr_err("alg: comp: compression failed: decompress: on test %d for %s failed: ret=%d\n",
2941			       i + 1, algo, -ret);
2942			goto out;
2943		}
2944
2945		if (dlen != ctemplate[i].inlen) {
2946			printk(KERN_ERR "alg: comp: Compression test %d "
2947			       "failed for %s: output len = %d\n", i + 1, algo,
2948			       dlen);
2949			ret = -EINVAL;
2950			goto out;
2951		}
2952
2953		if (memcmp(decomp_output, ctemplate[i].input,
2954			   ctemplate[i].inlen)) {
2955			pr_err("alg: comp: compression failed: output differs: on test %d for %s\n",
2956			       i + 1, algo);
2957			hexdump(decomp_output, dlen);
2958			ret = -EINVAL;
2959			goto out;
2960		}
2961	}
2962
2963	for (i = 0; i < dtcount; i++) {
2964		int ilen;
2965		unsigned int dlen = COMP_BUF_SIZE;
2966
2967		memset(decomp_output, 0, COMP_BUF_SIZE);
2968
2969		ilen = dtemplate[i].inlen;
2970		ret = crypto_comp_decompress(tfm, dtemplate[i].input,
2971					     ilen, decomp_output, &dlen);
2972		if (ret) {
2973			printk(KERN_ERR "alg: comp: decompression failed "
2974			       "on test %d for %s: ret=%d\n", i + 1, algo,
2975			       -ret);
2976			goto out;
2977		}
2978
2979		if (dlen != dtemplate[i].outlen) {
2980			printk(KERN_ERR "alg: comp: Decompression test %d "
2981			       "failed for %s: output len = %d\n", i + 1, algo,
2982			       dlen);
2983			ret = -EINVAL;
2984			goto out;
2985		}
2986
2987		if (memcmp(decomp_output, dtemplate[i].output, dlen)) {
2988			printk(KERN_ERR "alg: comp: Decompression test %d "
2989			       "failed for %s\n", i + 1, algo);
2990			hexdump(decomp_output, dlen);
2991			ret = -EINVAL;
2992			goto out;
2993		}
2994	}
2995
2996	ret = 0;
2997
2998out:
2999	kfree(decomp_output);
3000	kfree(output);
3001	return ret;
3002}
3003
3004static int test_acomp(struct crypto_acomp *tfm,
3005			      const struct comp_testvec *ctemplate,
3006		      const struct comp_testvec *dtemplate,
3007		      int ctcount, int dtcount)
3008{
3009	const char *algo = crypto_tfm_alg_driver_name(crypto_acomp_tfm(tfm));
3010	unsigned int i;
3011	char *output, *decomp_out;
3012	int ret;
3013	struct scatterlist src, dst;
3014	struct acomp_req *req;
3015	struct crypto_wait wait;
3016
3017	output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
3018	if (!output)
3019		return -ENOMEM;
3020
3021	decomp_out = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
3022	if (!decomp_out) {
3023		kfree(output);
3024		return -ENOMEM;
3025	}
3026
3027	for (i = 0; i < ctcount; i++) {
3028		unsigned int dlen = COMP_BUF_SIZE;
3029		int ilen = ctemplate[i].inlen;
3030		void *input_vec;
3031
3032		input_vec = kmemdup(ctemplate[i].input, ilen, GFP_KERNEL);
3033		if (!input_vec) {
3034			ret = -ENOMEM;
3035			goto out;
3036		}
3037
3038		memset(output, 0, dlen);
3039		crypto_init_wait(&wait);
3040		sg_init_one(&src, input_vec, ilen);
3041		sg_init_one(&dst, output, dlen);
3042
3043		req = acomp_request_alloc(tfm);
3044		if (!req) {
3045			pr_err("alg: acomp: request alloc failed for %s\n",
3046			       algo);
3047			kfree(input_vec);
3048			ret = -ENOMEM;
3049			goto out;
3050		}
3051
3052		acomp_request_set_params(req, &src, &dst, ilen, dlen);
3053		acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
3054					   crypto_req_done, &wait);
3055
3056		ret = crypto_wait_req(crypto_acomp_compress(req), &wait);
3057		if (ret) {
3058			pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
3059			       i + 1, algo, -ret);
3060			kfree(input_vec);
3061			acomp_request_free(req);
3062			goto out;
3063		}
3064
3065		ilen = req->dlen;
3066		dlen = COMP_BUF_SIZE;
3067		sg_init_one(&src, output, ilen);
3068		sg_init_one(&dst, decomp_out, dlen);
3069		crypto_init_wait(&wait);
3070		acomp_request_set_params(req, &src, &dst, ilen, dlen);
3071
3072		ret = crypto_wait_req(crypto_acomp_decompress(req), &wait);
3073		if (ret) {
3074			pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
3075			       i + 1, algo, -ret);
3076			kfree(input_vec);
3077			acomp_request_free(req);
3078			goto out;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3079		}
3080
3081		if (req->dlen != ctemplate[i].inlen) {
3082			pr_err("alg: acomp: Compression test %d failed for %s: output len = %d\n",
3083			       i + 1, algo, req->dlen);
3084			ret = -EINVAL;
3085			kfree(input_vec);
3086			acomp_request_free(req);
3087			goto out;
3088		}
3089
3090		if (memcmp(input_vec, decomp_out, req->dlen)) {
3091			pr_err("alg: acomp: Compression test %d failed for %s\n",
3092			       i + 1, algo);
3093			hexdump(output, req->dlen);
3094			ret = -EINVAL;
3095			kfree(input_vec);
3096			acomp_request_free(req);
3097			goto out;
3098		}
3099
3100		kfree(input_vec);
3101		acomp_request_free(req);
3102	}
3103
3104	for (i = 0; i < dtcount; i++) {
3105		unsigned int dlen = COMP_BUF_SIZE;
3106		int ilen = dtemplate[i].inlen;
3107		void *input_vec;
3108
3109		input_vec = kmemdup(dtemplate[i].input, ilen, GFP_KERNEL);
3110		if (!input_vec) {
3111			ret = -ENOMEM;
3112			goto out;
3113		}
3114
3115		memset(output, 0, dlen);
3116		crypto_init_wait(&wait);
3117		sg_init_one(&src, input_vec, ilen);
3118		sg_init_one(&dst, output, dlen);
3119
3120		req = acomp_request_alloc(tfm);
3121		if (!req) {
3122			pr_err("alg: acomp: request alloc failed for %s\n",
3123			       algo);
3124			kfree(input_vec);
3125			ret = -ENOMEM;
3126			goto out;
3127		}
3128
3129		acomp_request_set_params(req, &src, &dst, ilen, dlen);
3130		acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
3131					   crypto_req_done, &wait);
3132
3133		ret = crypto_wait_req(crypto_acomp_decompress(req), &wait);
3134		if (ret) {
3135			pr_err("alg: acomp: decompression failed on test %d for %s: ret=%d\n",
3136			       i + 1, algo, -ret);
3137			kfree(input_vec);
3138			acomp_request_free(req);
3139			goto out;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3140		}
3141
3142		if (req->dlen != dtemplate[i].outlen) {
3143			pr_err("alg: acomp: Decompression test %d failed for %s: output len = %d\n",
3144			       i + 1, algo, req->dlen);
3145			ret = -EINVAL;
3146			kfree(input_vec);
3147			acomp_request_free(req);
3148			goto out;
3149		}
3150
3151		if (memcmp(output, dtemplate[i].output, req->dlen)) {
3152			pr_err("alg: acomp: Decompression test %d failed for %s\n",
3153			       i + 1, algo);
3154			hexdump(output, req->dlen);
3155			ret = -EINVAL;
3156			kfree(input_vec);
3157			acomp_request_free(req);
3158			goto out;
3159		}
3160
3161		kfree(input_vec);
3162		acomp_request_free(req);
3163	}
3164
3165	ret = 0;
3166
3167out:
3168	kfree(decomp_out);
3169	kfree(output);
3170	return ret;
3171}
3172
3173static int test_cprng(struct crypto_rng *tfm,
3174		      const struct cprng_testvec *template,
3175		      unsigned int tcount)
3176{
3177	const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
3178	int err = 0, i, j, seedsize;
3179	u8 *seed;
3180	char result[32];
3181
3182	seedsize = crypto_rng_seedsize(tfm);
3183
3184	seed = kmalloc(seedsize, GFP_KERNEL);
3185	if (!seed) {
3186		printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
3187		       "for %s\n", algo);
3188		return -ENOMEM;
3189	}
3190
3191	for (i = 0; i < tcount; i++) {
3192		memset(result, 0, 32);
3193
3194		memcpy(seed, template[i].v, template[i].vlen);
3195		memcpy(seed + template[i].vlen, template[i].key,
3196		       template[i].klen);
3197		memcpy(seed + template[i].vlen + template[i].klen,
3198		       template[i].dt, template[i].dtlen);
3199
3200		err = crypto_rng_reset(tfm, seed, seedsize);
3201		if (err) {
3202			printk(KERN_ERR "alg: cprng: Failed to reset rng "
3203			       "for %s\n", algo);
3204			goto out;
3205		}
3206
3207		for (j = 0; j < template[i].loops; j++) {
3208			err = crypto_rng_get_bytes(tfm, result,
3209						   template[i].rlen);
3210			if (err < 0) {
3211				printk(KERN_ERR "alg: cprng: Failed to obtain "
3212				       "the correct amount of random data for "
3213				       "%s (requested %d)\n", algo,
3214				       template[i].rlen);
3215				goto out;
3216			}
3217		}
3218
3219		err = memcmp(result, template[i].result,
3220			     template[i].rlen);
3221		if (err) {
3222			printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
3223			       i, algo);
3224			hexdump(result, template[i].rlen);
3225			err = -EINVAL;
3226			goto out;
3227		}
3228	}
3229
3230out:
3231	kfree(seed);
3232	return err;
3233}
3234
3235static int alg_test_cipher(const struct alg_test_desc *desc,
3236			   const char *driver, u32 type, u32 mask)
3237{
3238	const struct cipher_test_suite *suite = &desc->suite.cipher;
3239	struct crypto_cipher *tfm;
3240	int err;
3241
3242	tfm = crypto_alloc_cipher(driver, type, mask);
3243	if (IS_ERR(tfm)) {
3244		printk(KERN_ERR "alg: cipher: Failed to load transform for "
3245		       "%s: %ld\n", driver, PTR_ERR(tfm));
3246		return PTR_ERR(tfm);
3247	}
3248
3249	err = test_cipher(tfm, ENCRYPT, suite->vecs, suite->count);
3250	if (!err)
3251		err = test_cipher(tfm, DECRYPT, suite->vecs, suite->count);
3252
3253	crypto_free_cipher(tfm);
3254	return err;
3255}
3256
3257static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
3258			 u32 type, u32 mask)
3259{
3260	struct crypto_comp *comp;
3261	struct crypto_acomp *acomp;
3262	int err;
3263	u32 algo_type = type & CRYPTO_ALG_TYPE_ACOMPRESS_MASK;
3264
3265	if (algo_type == CRYPTO_ALG_TYPE_ACOMPRESS) {
3266		acomp = crypto_alloc_acomp(driver, type, mask);
3267		if (IS_ERR(acomp)) {
3268			pr_err("alg: acomp: Failed to load transform for %s: %ld\n",
3269			       driver, PTR_ERR(acomp));
3270			return PTR_ERR(acomp);
3271		}
3272		err = test_acomp(acomp, desc->suite.comp.comp.vecs,
3273				 desc->suite.comp.decomp.vecs,
3274				 desc->suite.comp.comp.count,
3275				 desc->suite.comp.decomp.count);
3276		crypto_free_acomp(acomp);
3277	} else {
3278		comp = crypto_alloc_comp(driver, type, mask);
3279		if (IS_ERR(comp)) {
3280			pr_err("alg: comp: Failed to load transform for %s: %ld\n",
3281			       driver, PTR_ERR(comp));
3282			return PTR_ERR(comp);
3283		}
3284
3285		err = test_comp(comp, desc->suite.comp.comp.vecs,
3286				desc->suite.comp.decomp.vecs,
3287				desc->suite.comp.comp.count,
3288				desc->suite.comp.decomp.count);
3289
3290		crypto_free_comp(comp);
3291	}
3292	return err;
3293}
3294
3295static int alg_test_crc32c(const struct alg_test_desc *desc,
3296			   const char *driver, u32 type, u32 mask)
3297{
3298	struct crypto_shash *tfm;
3299	__le32 val;
3300	int err;
3301
3302	err = alg_test_hash(desc, driver, type, mask);
3303	if (err)
3304		return err;
3305
3306	tfm = crypto_alloc_shash(driver, type, mask);
3307	if (IS_ERR(tfm)) {
3308		if (PTR_ERR(tfm) == -ENOENT) {
3309			/*
3310			 * This crc32c implementation is only available through
3311			 * ahash API, not the shash API, so the remaining part
3312			 * of the test is not applicable to it.
3313			 */
3314			return 0;
3315		}
3316		printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
3317		       "%ld\n", driver, PTR_ERR(tfm));
3318		return PTR_ERR(tfm);
3319	}
3320
3321	do {
3322		SHASH_DESC_ON_STACK(shash, tfm);
3323		u32 *ctx = (u32 *)shash_desc_ctx(shash);
3324
3325		shash->tfm = tfm;
3326
3327		*ctx = 420553207;
3328		err = crypto_shash_final(shash, (u8 *)&val);
3329		if (err) {
3330			printk(KERN_ERR "alg: crc32c: Operation failed for "
3331			       "%s: %d\n", driver, err);
3332			break;
3333		}
3334
3335		if (val != cpu_to_le32(~420553207)) {
3336			pr_err("alg: crc32c: Test failed for %s: %u\n",
3337			       driver, le32_to_cpu(val));
3338			err = -EINVAL;
3339		}
3340	} while (0);
3341
3342	crypto_free_shash(tfm);
3343
3344	return err;
3345}
3346
3347static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
3348			  u32 type, u32 mask)
3349{
3350	struct crypto_rng *rng;
3351	int err;
3352
3353	rng = crypto_alloc_rng(driver, type, mask);
3354	if (IS_ERR(rng)) {
3355		printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
3356		       "%ld\n", driver, PTR_ERR(rng));
3357		return PTR_ERR(rng);
3358	}
3359
3360	err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
3361
3362	crypto_free_rng(rng);
3363
 
 
3364	return err;
3365}
3366
3367
3368static int drbg_cavs_test(const struct drbg_testvec *test, int pr,
3369			  const char *driver, u32 type, u32 mask)
3370{
3371	int ret = -EAGAIN;
3372	struct crypto_rng *drng;
3373	struct drbg_test_data test_data;
3374	struct drbg_string addtl, pers, testentropy;
3375	unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL);
3376
3377	if (!buf)
3378		return -ENOMEM;
3379
3380	drng = crypto_alloc_rng(driver, type, mask);
3381	if (IS_ERR(drng)) {
3382		printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for "
3383		       "%s\n", driver);
3384		kzfree(buf);
3385		return -ENOMEM;
3386	}
3387
3388	test_data.testentropy = &testentropy;
3389	drbg_string_fill(&testentropy, test->entropy, test->entropylen);
3390	drbg_string_fill(&pers, test->pers, test->perslen);
3391	ret = crypto_drbg_reset_test(drng, &pers, &test_data);
3392	if (ret) {
3393		printk(KERN_ERR "alg: drbg: Failed to reset rng\n");
3394		goto outbuf;
3395	}
3396
3397	drbg_string_fill(&addtl, test->addtla, test->addtllen);
3398	if (pr) {
3399		drbg_string_fill(&testentropy, test->entpra, test->entprlen);
3400		ret = crypto_drbg_get_bytes_addtl_test(drng,
3401			buf, test->expectedlen, &addtl,	&test_data);
3402	} else {
3403		ret = crypto_drbg_get_bytes_addtl(drng,
3404			buf, test->expectedlen, &addtl);
3405	}
3406	if (ret < 0) {
3407		printk(KERN_ERR "alg: drbg: could not obtain random data for "
3408		       "driver %s\n", driver);
3409		goto outbuf;
3410	}
3411
3412	drbg_string_fill(&addtl, test->addtlb, test->addtllen);
3413	if (pr) {
3414		drbg_string_fill(&testentropy, test->entprb, test->entprlen);
3415		ret = crypto_drbg_get_bytes_addtl_test(drng,
3416			buf, test->expectedlen, &addtl, &test_data);
3417	} else {
3418		ret = crypto_drbg_get_bytes_addtl(drng,
3419			buf, test->expectedlen, &addtl);
3420	}
3421	if (ret < 0) {
3422		printk(KERN_ERR "alg: drbg: could not obtain random data for "
3423		       "driver %s\n", driver);
3424		goto outbuf;
3425	}
3426
3427	ret = memcmp(test->expected, buf, test->expectedlen);
 
 
3428
3429outbuf:
3430	crypto_free_rng(drng);
3431	kzfree(buf);
3432	return ret;
3433}
3434
3435
3436static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver,
3437			 u32 type, u32 mask)
3438{
3439	int err = 0;
3440	int pr = 0;
3441	int i = 0;
3442	const struct drbg_testvec *template = desc->suite.drbg.vecs;
3443	unsigned int tcount = desc->suite.drbg.count;
3444
3445	if (0 == memcmp(driver, "drbg_pr_", 8))
3446		pr = 1;
3447
3448	for (i = 0; i < tcount; i++) {
3449		err = drbg_cavs_test(&template[i], pr, driver, type, mask);
3450		if (err) {
3451			printk(KERN_ERR "alg: drbg: Test %d failed for %s\n",
3452			       i, driver);
3453			err = -EINVAL;
3454			break;
3455		}
3456	}
3457	return err;
3458
 
 
 
 
 
 
 
3459}
3460
3461static int do_test_kpp(struct crypto_kpp *tfm, const struct kpp_testvec *vec,
3462		       const char *alg)
3463{
3464	struct kpp_request *req;
3465	void *input_buf = NULL;
3466	void *output_buf = NULL;
3467	void *a_public = NULL;
3468	void *a_ss = NULL;
3469	void *shared_secret = NULL;
3470	struct crypto_wait wait;
3471	unsigned int out_len_max;
3472	int err = -ENOMEM;
3473	struct scatterlist src, dst;
3474
3475	req = kpp_request_alloc(tfm, GFP_KERNEL);
3476	if (!req)
3477		return err;
3478
3479	crypto_init_wait(&wait);
3480
3481	err = crypto_kpp_set_secret(tfm, vec->secret, vec->secret_size);
3482	if (err < 0)
3483		goto free_req;
3484
3485	out_len_max = crypto_kpp_maxsize(tfm);
3486	output_buf = kzalloc(out_len_max, GFP_KERNEL);
3487	if (!output_buf) {
3488		err = -ENOMEM;
3489		goto free_req;
3490	}
3491
3492	/* Use appropriate parameter as base */
3493	kpp_request_set_input(req, NULL, 0);
3494	sg_init_one(&dst, output_buf, out_len_max);
3495	kpp_request_set_output(req, &dst, out_len_max);
3496	kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
3497				 crypto_req_done, &wait);
3498
3499	/* Compute party A's public key */
3500	err = crypto_wait_req(crypto_kpp_generate_public_key(req), &wait);
3501	if (err) {
3502		pr_err("alg: %s: Party A: generate public key test failed. err %d\n",
3503		       alg, err);
3504		goto free_output;
3505	}
3506
3507	if (vec->genkey) {
3508		/* Save party A's public key */
3509		a_public = kmemdup(sg_virt(req->dst), out_len_max, GFP_KERNEL);
3510		if (!a_public) {
3511			err = -ENOMEM;
3512			goto free_output;
3513		}
3514	} else {
3515		/* Verify calculated public key */
3516		if (memcmp(vec->expected_a_public, sg_virt(req->dst),
3517			   vec->expected_a_public_size)) {
3518			pr_err("alg: %s: Party A: generate public key test failed. Invalid output\n",
3519			       alg);
3520			err = -EINVAL;
3521			goto free_output;
3522		}
3523	}
3524
3525	/* Calculate shared secret key by using counter part (b) public key. */
3526	input_buf = kmemdup(vec->b_public, vec->b_public_size, GFP_KERNEL);
3527	if (!input_buf) {
3528		err = -ENOMEM;
3529		goto free_output;
3530	}
3531
3532	sg_init_one(&src, input_buf, vec->b_public_size);
3533	sg_init_one(&dst, output_buf, out_len_max);
3534	kpp_request_set_input(req, &src, vec->b_public_size);
3535	kpp_request_set_output(req, &dst, out_len_max);
3536	kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
3537				 crypto_req_done, &wait);
3538	err = crypto_wait_req(crypto_kpp_compute_shared_secret(req), &wait);
3539	if (err) {
3540		pr_err("alg: %s: Party A: compute shared secret test failed. err %d\n",
3541		       alg, err);
3542		goto free_all;
3543	}
3544
3545	if (vec->genkey) {
3546		/* Save the shared secret obtained by party A */
3547		a_ss = kmemdup(sg_virt(req->dst), vec->expected_ss_size, GFP_KERNEL);
3548		if (!a_ss) {
3549			err = -ENOMEM;
3550			goto free_all;
3551		}
3552
3553		/*
3554		 * Calculate party B's shared secret by using party A's
3555		 * public key.
3556		 */
3557		err = crypto_kpp_set_secret(tfm, vec->b_secret,
3558					    vec->b_secret_size);
3559		if (err < 0)
3560			goto free_all;
3561
3562		sg_init_one(&src, a_public, vec->expected_a_public_size);
3563		sg_init_one(&dst, output_buf, out_len_max);
3564		kpp_request_set_input(req, &src, vec->expected_a_public_size);
3565		kpp_request_set_output(req, &dst, out_len_max);
3566		kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
3567					 crypto_req_done, &wait);
3568		err = crypto_wait_req(crypto_kpp_compute_shared_secret(req),
3569				      &wait);
3570		if (err) {
3571			pr_err("alg: %s: Party B: compute shared secret failed. err %d\n",
3572			       alg, err);
3573			goto free_all;
3574		}
3575
3576		shared_secret = a_ss;
3577	} else {
3578		shared_secret = (void *)vec->expected_ss;
 
 
3579	}
3580
3581	/*
3582	 * verify shared secret from which the user will derive
3583	 * secret key by executing whatever hash it has chosen
3584	 */
3585	if (memcmp(shared_secret, sg_virt(req->dst),
3586		   vec->expected_ss_size)) {
3587		pr_err("alg: %s: compute shared secret test failed. Invalid output\n",
3588		       alg);
3589		err = -EINVAL;
3590	}
3591
3592free_all:
3593	kfree(a_ss);
3594	kfree(input_buf);
3595free_output:
3596	kfree(a_public);
3597	kfree(output_buf);
3598free_req:
3599	kpp_request_free(req);
3600	return err;
3601}
3602
3603static int test_kpp(struct crypto_kpp *tfm, const char *alg,
3604		    const struct kpp_testvec *vecs, unsigned int tcount)
3605{
3606	int ret, i;
3607
3608	for (i = 0; i < tcount; i++) {
3609		ret = do_test_kpp(tfm, vecs++, alg);
3610		if (ret) {
3611			pr_err("alg: %s: test failed on vector %d, err=%d\n",
3612			       alg, i + 1, ret);
3613			return ret;
3614		}
3615	}
3616	return 0;
3617}
3618
3619static int alg_test_kpp(const struct alg_test_desc *desc, const char *driver,
3620			u32 type, u32 mask)
3621{
3622	struct crypto_kpp *tfm;
3623	int err = 0;
3624
3625	tfm = crypto_alloc_kpp(driver, type, mask);
3626	if (IS_ERR(tfm)) {
3627		pr_err("alg: kpp: Failed to load tfm for %s: %ld\n",
3628		       driver, PTR_ERR(tfm));
3629		return PTR_ERR(tfm);
3630	}
3631	if (desc->suite.kpp.vecs)
3632		err = test_kpp(tfm, desc->alg, desc->suite.kpp.vecs,
3633			       desc->suite.kpp.count);
3634
3635	crypto_free_kpp(tfm);
3636	return err;
3637}
 
 
3638
3639static u8 *test_pack_u32(u8 *dst, u32 val)
3640{
3641	memcpy(dst, &val, sizeof(val));
3642	return dst + sizeof(val);
3643}
3644
3645static int test_akcipher_one(struct crypto_akcipher *tfm,
3646			     const struct akcipher_testvec *vecs)
3647{
3648	char *xbuf[XBUFSIZE];
3649	struct akcipher_request *req;
3650	void *outbuf_enc = NULL;
3651	void *outbuf_dec = NULL;
3652	struct crypto_wait wait;
3653	unsigned int out_len_max, out_len = 0;
3654	int err = -ENOMEM;
3655	struct scatterlist src, dst, src_tab[3];
3656	const char *m, *c;
3657	unsigned int m_size, c_size;
3658	const char *op;
3659	u8 *key, *ptr;
3660
3661	if (testmgr_alloc_buf(xbuf))
3662		return err;
3663
3664	req = akcipher_request_alloc(tfm, GFP_KERNEL);
3665	if (!req)
3666		goto free_xbuf;
3667
3668	crypto_init_wait(&wait);
3669
3670	key = kmalloc(vecs->key_len + sizeof(u32) * 2 + vecs->param_len,
3671		      GFP_KERNEL);
3672	if (!key)
3673		goto free_xbuf;
3674	memcpy(key, vecs->key, vecs->key_len);
3675	ptr = key + vecs->key_len;
3676	ptr = test_pack_u32(ptr, vecs->algo);
3677	ptr = test_pack_u32(ptr, vecs->param_len);
3678	memcpy(ptr, vecs->params, vecs->param_len);
3679
3680	if (vecs->public_key_vec)
3681		err = crypto_akcipher_set_pub_key(tfm, key, vecs->key_len);
3682	else
3683		err = crypto_akcipher_set_priv_key(tfm, key, vecs->key_len);
3684	if (err)
3685		goto free_req;
3686
3687	/*
3688	 * First run test which do not require a private key, such as
3689	 * encrypt or verify.
3690	 */
3691	err = -ENOMEM;
3692	out_len_max = crypto_akcipher_maxsize(tfm);
3693	outbuf_enc = kzalloc(out_len_max, GFP_KERNEL);
3694	if (!outbuf_enc)
3695		goto free_req;
3696
3697	if (!vecs->siggen_sigver_test) {
3698		m = vecs->m;
3699		m_size = vecs->m_size;
3700		c = vecs->c;
3701		c_size = vecs->c_size;
3702		op = "encrypt";
3703	} else {
3704		/* Swap args so we could keep plaintext (digest)
3705		 * in vecs->m, and cooked signature in vecs->c.
3706		 */
3707		m = vecs->c; /* signature */
3708		m_size = vecs->c_size;
3709		c = vecs->m; /* digest */
3710		c_size = vecs->m_size;
3711		op = "verify";
3712	}
3713
3714	if (WARN_ON(m_size > PAGE_SIZE))
3715		goto free_all;
3716	memcpy(xbuf[0], m, m_size);
3717
3718	sg_init_table(src_tab, 3);
3719	sg_set_buf(&src_tab[0], xbuf[0], 8);
3720	sg_set_buf(&src_tab[1], xbuf[0] + 8, m_size - 8);
3721	if (vecs->siggen_sigver_test) {
3722		if (WARN_ON(c_size > PAGE_SIZE))
3723			goto free_all;
3724		memcpy(xbuf[1], c, c_size);
3725		sg_set_buf(&src_tab[2], xbuf[1], c_size);
3726		akcipher_request_set_crypt(req, src_tab, NULL, m_size, c_size);
3727	} else {
3728		sg_init_one(&dst, outbuf_enc, out_len_max);
3729		akcipher_request_set_crypt(req, src_tab, &dst, m_size,
3730					   out_len_max);
3731	}
3732	akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
3733				      crypto_req_done, &wait);
3734
3735	err = crypto_wait_req(vecs->siggen_sigver_test ?
3736			      /* Run asymmetric signature verification */
3737			      crypto_akcipher_verify(req) :
3738			      /* Run asymmetric encrypt */
3739			      crypto_akcipher_encrypt(req), &wait);
3740	if (err) {
3741		pr_err("alg: akcipher: %s test failed. err %d\n", op, err);
3742		goto free_all;
3743	}
3744	if (!vecs->siggen_sigver_test) {
3745		if (req->dst_len != c_size) {
3746			pr_err("alg: akcipher: %s test failed. Invalid output len\n",
3747			       op);
3748			err = -EINVAL;
3749			goto free_all;
3750		}
3751		/* verify that encrypted message is equal to expected */
3752		if (memcmp(c, outbuf_enc, c_size) != 0) {
3753			pr_err("alg: akcipher: %s test failed. Invalid output\n",
3754			       op);
3755			hexdump(outbuf_enc, c_size);
3756			err = -EINVAL;
3757			goto free_all;
3758		}
3759	}
3760
3761	/*
3762	 * Don't invoke (decrypt or sign) test which require a private key
3763	 * for vectors with only a public key.
3764	 */
3765	if (vecs->public_key_vec) {
3766		err = 0;
3767		goto free_all;
3768	}
3769	outbuf_dec = kzalloc(out_len_max, GFP_KERNEL);
3770	if (!outbuf_dec) {
3771		err = -ENOMEM;
3772		goto free_all;
3773	}
3774
3775	op = vecs->siggen_sigver_test ? "sign" : "decrypt";
3776	if (WARN_ON(c_size > PAGE_SIZE))
3777		goto free_all;
3778	memcpy(xbuf[0], c, c_size);
3779
3780	sg_init_one(&src, xbuf[0], c_size);
3781	sg_init_one(&dst, outbuf_dec, out_len_max);
3782	crypto_init_wait(&wait);
3783	akcipher_request_set_crypt(req, &src, &dst, c_size, out_len_max);
3784
3785	err = crypto_wait_req(vecs->siggen_sigver_test ?
3786			      /* Run asymmetric signature generation */
3787			      crypto_akcipher_sign(req) :
3788			      /* Run asymmetric decrypt */
3789			      crypto_akcipher_decrypt(req), &wait);
3790	if (err) {
3791		pr_err("alg: akcipher: %s test failed. err %d\n", op, err);
3792		goto free_all;
3793	}
3794	out_len = req->dst_len;
3795	if (out_len < m_size) {
3796		pr_err("alg: akcipher: %s test failed. Invalid output len %u\n",
3797		       op, out_len);
3798		err = -EINVAL;
3799		goto free_all;
3800	}
3801	/* verify that decrypted message is equal to the original msg */
3802	if (memchr_inv(outbuf_dec, 0, out_len - m_size) ||
3803	    memcmp(m, outbuf_dec + out_len - m_size, m_size)) {
3804		pr_err("alg: akcipher: %s test failed. Invalid output\n", op);
3805		hexdump(outbuf_dec, out_len);
3806		err = -EINVAL;
3807	}
3808free_all:
3809	kfree(outbuf_dec);
3810	kfree(outbuf_enc);
3811free_req:
3812	akcipher_request_free(req);
3813	kfree(key);
3814free_xbuf:
3815	testmgr_free_buf(xbuf);
3816	return err;
3817}
3818
3819static int test_akcipher(struct crypto_akcipher *tfm, const char *alg,
3820			 const struct akcipher_testvec *vecs,
3821			 unsigned int tcount)
3822{
3823	const char *algo =
3824		crypto_tfm_alg_driver_name(crypto_akcipher_tfm(tfm));
3825	int ret, i;
3826
3827	for (i = 0; i < tcount; i++) {
3828		ret = test_akcipher_one(tfm, vecs++);
3829		if (!ret)
3830			continue;
3831
3832		pr_err("alg: akcipher: test %d failed for %s, err=%d\n",
3833		       i + 1, algo, ret);
3834		return ret;
 
 
3835	}
3836	return 0;
3837}
3838
3839static int alg_test_akcipher(const struct alg_test_desc *desc,
3840			     const char *driver, u32 type, u32 mask)
3841{
3842	struct crypto_akcipher *tfm;
3843	int err = 0;
3844
3845	tfm = crypto_alloc_akcipher(driver, type, mask);
3846	if (IS_ERR(tfm)) {
3847		pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n",
3848		       driver, PTR_ERR(tfm));
3849		return PTR_ERR(tfm);
3850	}
3851	if (desc->suite.akcipher.vecs)
3852		err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs,
3853				    desc->suite.akcipher.count);
3854
3855	crypto_free_akcipher(tfm);
3856	return err;
3857}
3858
3859static int alg_test_null(const struct alg_test_desc *desc,
3860			     const char *driver, u32 type, u32 mask)
3861{
3862	return 0;
3863}
3864
3865#define __VECS(tv)	{ .vecs = tv, .count = ARRAY_SIZE(tv) }
3866
3867/* Please keep this list sorted by algorithm name. */
3868static const struct alg_test_desc alg_test_descs[] = {
3869	{
3870		.alg = "adiantum(xchacha12,aes)",
3871		.generic_driver = "adiantum(xchacha12-generic,aes-generic,nhpoly1305-generic)",
3872		.test = alg_test_skcipher,
3873		.suite = {
3874			.cipher = __VECS(adiantum_xchacha12_aes_tv_template)
3875		},
3876	}, {
3877		.alg = "adiantum(xchacha20,aes)",
3878		.generic_driver = "adiantum(xchacha20-generic,aes-generic,nhpoly1305-generic)",
3879		.test = alg_test_skcipher,
3880		.suite = {
3881			.cipher = __VECS(adiantum_xchacha20_aes_tv_template)
3882		},
3883	}, {
3884		.alg = "aegis128",
3885		.test = alg_test_aead,
3886		.suite = {
3887			.aead = __VECS(aegis128_tv_template)
3888		}
3889	}, {
3890		.alg = "ansi_cprng",
3891		.test = alg_test_cprng,
3892		.suite = {
3893			.cprng = __VECS(ansi_cprng_aes_tv_template)
3894		}
3895	}, {
3896		.alg = "authenc(hmac(md5),ecb(cipher_null))",
3897		.test = alg_test_aead,
3898		.suite = {
3899			.aead = __VECS(hmac_md5_ecb_cipher_null_tv_template)
3900		}
3901	}, {
3902		.alg = "authenc(hmac(sha1),cbc(aes))",
3903		.test = alg_test_aead,
 
 
 
3904		.fips_allowed = 1,
3905		.suite = {
3906			.aead = __VECS(hmac_sha1_aes_cbc_tv_temp)
3907		}
3908	}, {
3909		.alg = "authenc(hmac(sha1),cbc(des))",
3910		.test = alg_test_aead,
3911		.suite = {
3912			.aead = __VECS(hmac_sha1_des_cbc_tv_temp)
3913		}
3914	}, {
3915		.alg = "authenc(hmac(sha1),cbc(des3_ede))",
3916		.test = alg_test_aead,
3917		.fips_allowed = 1,
3918		.suite = {
3919			.aead = __VECS(hmac_sha1_des3_ede_cbc_tv_temp)
3920		}
3921	}, {
3922		.alg = "authenc(hmac(sha1),ctr(aes))",
3923		.test = alg_test_null,
3924		.fips_allowed = 1,
3925	}, {
3926		.alg = "authenc(hmac(sha1),ecb(cipher_null))",
3927		.test = alg_test_aead,
3928		.suite = {
3929			.aead = __VECS(hmac_sha1_ecb_cipher_null_tv_temp)
3930		}
3931	}, {
3932		.alg = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
3933		.test = alg_test_null,
3934		.fips_allowed = 1,
3935	}, {
3936		.alg = "authenc(hmac(sha224),cbc(des))",
3937		.test = alg_test_aead,
3938		.suite = {
3939			.aead = __VECS(hmac_sha224_des_cbc_tv_temp)
3940		}
3941	}, {
3942		.alg = "authenc(hmac(sha224),cbc(des3_ede))",
3943		.test = alg_test_aead,
3944		.fips_allowed = 1,
3945		.suite = {
3946			.aead = __VECS(hmac_sha224_des3_ede_cbc_tv_temp)
3947		}
3948	}, {
3949		.alg = "authenc(hmac(sha256),cbc(aes))",
3950		.test = alg_test_aead,
 
 
 
3951		.fips_allowed = 1,
3952		.suite = {
3953			.aead = __VECS(hmac_sha256_aes_cbc_tv_temp)
3954		}
3955	}, {
3956		.alg = "authenc(hmac(sha256),cbc(des))",
3957		.test = alg_test_aead,
3958		.suite = {
3959			.aead = __VECS(hmac_sha256_des_cbc_tv_temp)
3960		}
3961	}, {
3962		.alg = "authenc(hmac(sha256),cbc(des3_ede))",
3963		.test = alg_test_aead,
3964		.fips_allowed = 1,
3965		.suite = {
3966			.aead = __VECS(hmac_sha256_des3_ede_cbc_tv_temp)
3967		}
3968	}, {
3969		.alg = "authenc(hmac(sha256),ctr(aes))",
3970		.test = alg_test_null,
3971		.fips_allowed = 1,
3972	}, {
3973		.alg = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
3974		.test = alg_test_null,
3975		.fips_allowed = 1,
3976	}, {
3977		.alg = "authenc(hmac(sha384),cbc(des))",
3978		.test = alg_test_aead,
3979		.suite = {
3980			.aead = __VECS(hmac_sha384_des_cbc_tv_temp)
3981		}
3982	}, {
3983		.alg = "authenc(hmac(sha384),cbc(des3_ede))",
3984		.test = alg_test_aead,
3985		.fips_allowed = 1,
3986		.suite = {
3987			.aead = __VECS(hmac_sha384_des3_ede_cbc_tv_temp)
3988		}
3989	}, {
3990		.alg = "authenc(hmac(sha384),ctr(aes))",
3991		.test = alg_test_null,
3992		.fips_allowed = 1,
3993	}, {
3994		.alg = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
 
 
 
3995		.test = alg_test_null,
3996		.fips_allowed = 1,
3997	}, {
3998		.alg = "authenc(hmac(sha512),cbc(aes))",
 
3999		.fips_allowed = 1,
 
 
 
 
 
 
 
 
4000		.test = alg_test_aead,
 
4001		.suite = {
4002			.aead = __VECS(hmac_sha512_aes_cbc_tv_temp)
 
 
 
 
 
 
 
 
 
4003		}
4004	}, {
4005		.alg = "authenc(hmac(sha512),cbc(des))",
4006		.test = alg_test_aead,
 
4007		.suite = {
4008			.aead = __VECS(hmac_sha512_des_cbc_tv_temp)
 
 
 
 
 
4009		}
4010	}, {
4011		.alg = "authenc(hmac(sha512),cbc(des3_ede))",
4012		.test = alg_test_aead,
4013		.fips_allowed = 1,
4014		.suite = {
4015			.aead = __VECS(hmac_sha512_des3_ede_cbc_tv_temp)
 
 
 
 
 
 
 
 
 
4016		}
4017	}, {
4018		.alg = "authenc(hmac(sha512),ctr(aes))",
4019		.test = alg_test_null,
4020		.fips_allowed = 1,
 
 
 
 
 
 
 
 
4021	}, {
4022		.alg = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
4023		.test = alg_test_null,
4024		.fips_allowed = 1,
 
 
 
 
 
 
 
 
4025	}, {
4026		.alg = "cbc(aes)",
4027		.test = alg_test_skcipher,
4028		.fips_allowed = 1,
4029		.suite = {
4030			.cipher = __VECS(aes_cbc_tv_template)
4031		},
 
 
 
 
 
 
 
 
 
4032	}, {
4033		.alg = "cbc(anubis)",
4034		.test = alg_test_skcipher,
4035		.suite = {
4036			.cipher = __VECS(anubis_cbc_tv_template)
4037		},
 
 
 
 
 
 
 
 
 
4038	}, {
4039		.alg = "cbc(blowfish)",
4040		.test = alg_test_skcipher,
4041		.suite = {
4042			.cipher = __VECS(bf_cbc_tv_template)
4043		},
 
 
 
 
 
 
 
 
 
4044	}, {
4045		.alg = "cbc(camellia)",
4046		.test = alg_test_skcipher,
4047		.suite = {
4048			.cipher = __VECS(camellia_cbc_tv_template)
4049		},
 
 
 
 
 
 
 
 
 
4050	}, {
4051		.alg = "cbc(cast5)",
4052		.test = alg_test_skcipher,
4053		.suite = {
4054			.cipher = __VECS(cast5_cbc_tv_template)
4055		},
 
 
 
 
 
 
 
 
 
4056	}, {
4057		.alg = "cbc(cast6)",
4058		.test = alg_test_skcipher,
4059		.suite = {
4060			.cipher = __VECS(cast6_cbc_tv_template)
4061		},
 
 
 
 
 
 
 
 
 
4062	}, {
4063		.alg = "cbc(des)",
4064		.test = alg_test_skcipher,
4065		.suite = {
4066			.cipher = __VECS(des_cbc_tv_template)
4067		},
 
 
 
 
 
 
 
 
 
4068	}, {
4069		.alg = "cbc(des3_ede)",
4070		.test = alg_test_skcipher,
4071		.fips_allowed = 1,
4072		.suite = {
4073			.cipher = __VECS(des3_ede_cbc_tv_template)
4074		},
4075	}, {
4076		/* Same as cbc(aes) except the key is stored in
4077		 * hardware secure memory which we reference by index
4078		 */
4079		.alg = "cbc(paes)",
4080		.test = alg_test_null,
4081		.fips_allowed = 1,
4082	}, {
4083		/* Same as cbc(sm4) except the key is stored in
4084		 * hardware secure memory which we reference by index
4085		 */
4086		.alg = "cbc(psm4)",
4087		.test = alg_test_null,
4088	}, {
4089		.alg = "cbc(serpent)",
4090		.test = alg_test_skcipher,
4091		.suite = {
4092			.cipher = __VECS(serpent_cbc_tv_template)
4093		},
4094	}, {
4095		.alg = "cbc(sm4)",
4096		.test = alg_test_skcipher,
4097		.suite = {
4098			.cipher = __VECS(sm4_cbc_tv_template)
 
 
 
4099		}
4100	}, {
4101		.alg = "cbc(twofish)",
4102		.test = alg_test_skcipher,
4103		.suite = {
4104			.cipher = __VECS(tf_cbc_tv_template)
4105		},
4106	}, {
4107		.alg = "cbcmac(aes)",
4108		.fips_allowed = 1,
4109		.test = alg_test_hash,
4110		.suite = {
4111			.hash = __VECS(aes_cbcmac_tv_template)
 
 
4112		}
4113	}, {
4114		.alg = "ccm(aes)",
4115		.generic_driver = "ccm_base(ctr(aes-generic),cbcmac(aes-generic))",
4116		.test = alg_test_aead,
4117		.fips_allowed = 1,
4118		.suite = {
4119			.aead = __VECS(aes_ccm_tv_template)
 
 
 
 
 
 
 
 
 
4120		}
4121	}, {
4122		.alg = "cfb(aes)",
4123		.test = alg_test_skcipher,
4124		.fips_allowed = 1,
4125		.suite = {
4126			.cipher = __VECS(aes_cfb_tv_template)
4127		},
4128	}, {
4129		.alg = "chacha20",
4130		.test = alg_test_skcipher,
4131		.suite = {
4132			.cipher = __VECS(chacha20_tv_template)
4133		},
4134	}, {
4135		.alg = "cmac(aes)",
4136		.fips_allowed = 1,
4137		.test = alg_test_hash,
4138		.suite = {
4139			.hash = __VECS(aes_cmac128_tv_template)
 
 
 
4140		}
4141	}, {
4142		.alg = "cmac(des3_ede)",
4143		.fips_allowed = 1,
4144		.test = alg_test_hash,
4145		.suite = {
4146			.hash = __VECS(des3_ede_cmac64_tv_template)
 
 
 
4147		}
4148	}, {
4149		.alg = "compress_null",
4150		.test = alg_test_null,
4151	}, {
4152		.alg = "crc32",
4153		.test = alg_test_hash,
4154		.fips_allowed = 1,
4155		.suite = {
4156			.hash = __VECS(crc32_tv_template)
4157		}
4158	}, {
4159		.alg = "crc32c",
4160		.test = alg_test_crc32c,
4161		.fips_allowed = 1,
4162		.suite = {
4163			.hash = __VECS(crc32c_tv_template)
 
 
 
4164		}
4165	}, {
4166		.alg = "crct10dif",
4167		.test = alg_test_hash,
4168		.fips_allowed = 1,
4169		.suite = {
4170			.hash = __VECS(crct10dif_tv_template)
 
 
 
4171		}
4172	}, {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4173		.alg = "ctr(aes)",
4174		.test = alg_test_skcipher,
4175		.fips_allowed = 1,
4176		.suite = {
4177			.cipher = __VECS(aes_ctr_tv_template)
 
 
 
 
 
 
 
 
 
4178		}
4179	}, {
4180		.alg = "ctr(blowfish)",
4181		.test = alg_test_skcipher,
4182		.suite = {
4183			.cipher = __VECS(bf_ctr_tv_template)
 
 
 
 
 
 
 
 
 
4184		}
4185	}, {
4186		.alg = "ctr(camellia)",
4187		.test = alg_test_skcipher,
4188		.suite = {
4189			.cipher = __VECS(camellia_ctr_tv_template)
 
 
 
 
 
 
 
 
 
4190		}
4191	}, {
4192		.alg = "ctr(cast5)",
4193		.test = alg_test_skcipher,
4194		.suite = {
4195			.cipher = __VECS(cast5_ctr_tv_template)
 
 
 
 
 
 
 
 
 
4196		}
4197	}, {
4198		.alg = "ctr(cast6)",
4199		.test = alg_test_skcipher,
4200		.suite = {
4201			.cipher = __VECS(cast6_ctr_tv_template)
 
 
 
 
 
 
 
 
 
4202		}
4203	}, {
4204		.alg = "ctr(des)",
4205		.test = alg_test_skcipher,
4206		.suite = {
4207			.cipher = __VECS(des_ctr_tv_template)
 
 
 
 
 
 
 
 
 
4208		}
4209	}, {
4210		.alg = "ctr(des3_ede)",
4211		.test = alg_test_skcipher,
4212		.fips_allowed = 1,
4213		.suite = {
4214			.cipher = __VECS(des3_ede_ctr_tv_template)
 
 
 
 
 
 
 
 
 
4215		}
4216	}, {
4217		/* Same as ctr(aes) except the key is stored in
4218		 * hardware secure memory which we reference by index
4219		 */
4220		.alg = "ctr(paes)",
4221		.test = alg_test_null,
4222		.fips_allowed = 1,
4223	}, {
4224
4225		/* Same as ctr(sm4) except the key is stored in
4226		 * hardware secure memory which we reference by index
4227		 */
4228		.alg = "ctr(psm4)",
4229		.test = alg_test_null,
4230	}, {
4231		.alg = "ctr(serpent)",
4232		.test = alg_test_skcipher,
4233		.suite = {
4234			.cipher = __VECS(serpent_ctr_tv_template)
4235		}
4236	}, {
4237		.alg = "ctr(sm4)",
4238		.test = alg_test_skcipher,
4239		.suite = {
4240			.cipher = __VECS(sm4_ctr_tv_template)
 
 
 
4241		}
4242	}, {
4243		.alg = "ctr(twofish)",
4244		.test = alg_test_skcipher,
4245		.suite = {
4246			.cipher = __VECS(tf_ctr_tv_template)
 
 
 
 
 
 
 
 
 
4247		}
4248	}, {
4249		.alg = "cts(cbc(aes))",
4250		.test = alg_test_skcipher,
4251		.fips_allowed = 1,
4252		.suite = {
4253			.cipher = __VECS(cts_mode_tv_template)
 
 
 
 
 
 
 
 
 
4254		}
4255	}, {
4256		/* Same as cts(cbc((aes)) except the key is stored in
4257		 * hardware secure memory which we reference by index
4258		 */
4259		.alg = "cts(cbc(paes))",
4260		.test = alg_test_null,
4261		.fips_allowed = 1,
4262	}, {
4263		.alg = "deflate",
4264		.test = alg_test_comp,
4265		.fips_allowed = 1,
4266		.suite = {
4267			.comp = {
4268				.comp = __VECS(deflate_comp_tv_template),
4269				.decomp = __VECS(deflate_decomp_tv_template)
 
 
 
 
 
 
4270			}
4271		}
4272	}, {
4273		.alg = "dh",
4274		.test = alg_test_kpp,
4275		.fips_allowed = 1,
4276		.suite = {
4277			.kpp = __VECS(dh_tv_template)
4278		}
4279	}, {
4280		.alg = "digest_null",
4281		.test = alg_test_null,
4282	}, {
4283		.alg = "drbg_nopr_ctr_aes128",
4284		.test = alg_test_drbg,
4285		.fips_allowed = 1,
4286		.suite = {
4287			.drbg = __VECS(drbg_nopr_ctr_aes128_tv_template)
4288		}
4289	}, {
4290		.alg = "drbg_nopr_ctr_aes192",
4291		.test = alg_test_drbg,
4292		.fips_allowed = 1,
4293		.suite = {
4294			.drbg = __VECS(drbg_nopr_ctr_aes192_tv_template)
4295		}
4296	}, {
4297		.alg = "drbg_nopr_ctr_aes256",
4298		.test = alg_test_drbg,
4299		.fips_allowed = 1,
4300		.suite = {
4301			.drbg = __VECS(drbg_nopr_ctr_aes256_tv_template)
4302		}
4303	}, {
4304		/*
4305		 * There is no need to specifically test the DRBG with every
4306		 * backend cipher -- covered by drbg_nopr_hmac_sha256 test
4307		 */
4308		.alg = "drbg_nopr_hmac_sha1",
4309		.fips_allowed = 1,
4310		.test = alg_test_null,
4311	}, {
4312		.alg = "drbg_nopr_hmac_sha256",
4313		.test = alg_test_drbg,
4314		.fips_allowed = 1,
4315		.suite = {
4316			.drbg = __VECS(drbg_nopr_hmac_sha256_tv_template)
4317		}
4318	}, {
4319		/* covered by drbg_nopr_hmac_sha256 test */
4320		.alg = "drbg_nopr_hmac_sha384",
4321		.fips_allowed = 1,
4322		.test = alg_test_null,
4323	}, {
4324		.alg = "drbg_nopr_hmac_sha512",
4325		.test = alg_test_null,
4326		.fips_allowed = 1,
4327	}, {
4328		.alg = "drbg_nopr_sha1",
4329		.fips_allowed = 1,
4330		.test = alg_test_null,
4331	}, {
4332		.alg = "drbg_nopr_sha256",
4333		.test = alg_test_drbg,
4334		.fips_allowed = 1,
4335		.suite = {
4336			.drbg = __VECS(drbg_nopr_sha256_tv_template)
4337		}
4338	}, {
4339		/* covered by drbg_nopr_sha256 test */
4340		.alg = "drbg_nopr_sha384",
4341		.fips_allowed = 1,
4342		.test = alg_test_null,
4343	}, {
4344		.alg = "drbg_nopr_sha512",
4345		.fips_allowed = 1,
4346		.test = alg_test_null,
4347	}, {
4348		.alg = "drbg_pr_ctr_aes128",
4349		.test = alg_test_drbg,
4350		.fips_allowed = 1,
4351		.suite = {
4352			.drbg = __VECS(drbg_pr_ctr_aes128_tv_template)
4353		}
4354	}, {
4355		/* covered by drbg_pr_ctr_aes128 test */
4356		.alg = "drbg_pr_ctr_aes192",
4357		.fips_allowed = 1,
4358		.test = alg_test_null,
4359	}, {
4360		.alg = "drbg_pr_ctr_aes256",
4361		.fips_allowed = 1,
4362		.test = alg_test_null,
4363	}, {
4364		.alg = "drbg_pr_hmac_sha1",
4365		.fips_allowed = 1,
4366		.test = alg_test_null,
4367	}, {
4368		.alg = "drbg_pr_hmac_sha256",
4369		.test = alg_test_drbg,
4370		.fips_allowed = 1,
4371		.suite = {
4372			.drbg = __VECS(drbg_pr_hmac_sha256_tv_template)
4373		}
4374	}, {
4375		/* covered by drbg_pr_hmac_sha256 test */
4376		.alg = "drbg_pr_hmac_sha384",
4377		.fips_allowed = 1,
4378		.test = alg_test_null,
4379	}, {
4380		.alg = "drbg_pr_hmac_sha512",
4381		.test = alg_test_null,
4382		.fips_allowed = 1,
4383	}, {
4384		.alg = "drbg_pr_sha1",
4385		.fips_allowed = 1,
4386		.test = alg_test_null,
4387	}, {
4388		.alg = "drbg_pr_sha256",
4389		.test = alg_test_drbg,
4390		.fips_allowed = 1,
4391		.suite = {
4392			.drbg = __VECS(drbg_pr_sha256_tv_template)
4393		}
4394	}, {
4395		/* covered by drbg_pr_sha256 test */
4396		.alg = "drbg_pr_sha384",
4397		.fips_allowed = 1,
4398		.test = alg_test_null,
4399	}, {
4400		.alg = "drbg_pr_sha512",
4401		.fips_allowed = 1,
4402		.test = alg_test_null,
4403	}, {
4404		.alg = "ecb(aes)",
4405		.test = alg_test_skcipher,
4406		.fips_allowed = 1,
4407		.suite = {
4408			.cipher = __VECS(aes_tv_template)
 
 
 
 
 
 
 
 
 
4409		}
4410	}, {
4411		.alg = "ecb(anubis)",
4412		.test = alg_test_skcipher,
4413		.suite = {
4414			.cipher = __VECS(anubis_tv_template)
 
 
 
 
 
 
 
 
 
4415		}
4416	}, {
4417		.alg = "ecb(arc4)",
4418		.generic_driver = "ecb(arc4)-generic",
4419		.test = alg_test_skcipher,
4420		.suite = {
4421			.cipher = __VECS(arc4_tv_template)
 
 
 
 
 
 
 
 
 
4422		}
4423	}, {
4424		.alg = "ecb(blowfish)",
4425		.test = alg_test_skcipher,
4426		.suite = {
4427			.cipher = __VECS(bf_tv_template)
 
 
 
 
 
 
 
 
 
4428		}
4429	}, {
4430		.alg = "ecb(camellia)",
4431		.test = alg_test_skcipher,
4432		.suite = {
4433			.cipher = __VECS(camellia_tv_template)
 
 
 
 
 
 
 
 
 
4434		}
4435	}, {
4436		.alg = "ecb(cast5)",
4437		.test = alg_test_skcipher,
4438		.suite = {
4439			.cipher = __VECS(cast5_tv_template)
 
 
 
 
 
 
 
 
 
4440		}
4441	}, {
4442		.alg = "ecb(cast6)",
4443		.test = alg_test_skcipher,
4444		.suite = {
4445			.cipher = __VECS(cast6_tv_template)
 
 
 
 
 
 
 
 
 
4446		}
4447	}, {
4448		.alg = "ecb(cipher_null)",
4449		.test = alg_test_null,
4450		.fips_allowed = 1,
4451	}, {
4452		.alg = "ecb(des)",
4453		.test = alg_test_skcipher,
 
4454		.suite = {
4455			.cipher = __VECS(des_tv_template)
 
 
 
 
 
 
 
 
 
4456		}
4457	}, {
4458		.alg = "ecb(des3_ede)",
4459		.test = alg_test_skcipher,
4460		.fips_allowed = 1,
4461		.suite = {
4462			.cipher = __VECS(des3_ede_tv_template)
 
 
 
 
 
 
 
 
 
4463		}
4464	}, {
4465		.alg = "ecb(fcrypt)",
4466		.test = alg_test_skcipher,
4467		.suite = {
4468			.cipher = {
4469				.vecs = fcrypt_pcbc_tv_template,
4470				.count = 1
 
 
 
 
 
 
4471			}
4472		}
4473	}, {
4474		.alg = "ecb(khazad)",
4475		.test = alg_test_skcipher,
4476		.suite = {
4477			.cipher = __VECS(khazad_tv_template)
 
 
 
 
 
 
 
 
 
4478		}
4479	}, {
4480		/* Same as ecb(aes) except the key is stored in
4481		 * hardware secure memory which we reference by index
4482		 */
4483		.alg = "ecb(paes)",
4484		.test = alg_test_null,
4485		.fips_allowed = 1,
4486	}, {
4487		.alg = "ecb(seed)",
4488		.test = alg_test_skcipher,
4489		.suite = {
4490			.cipher = __VECS(seed_tv_template)
 
 
 
 
 
 
 
 
 
4491		}
4492	}, {
4493		.alg = "ecb(serpent)",
4494		.test = alg_test_skcipher,
4495		.suite = {
4496			.cipher = __VECS(serpent_tv_template)
4497		}
4498	}, {
4499		.alg = "ecb(sm4)",
4500		.test = alg_test_skcipher,
4501		.suite = {
4502			.cipher = __VECS(sm4_tv_template)
 
 
 
4503		}
4504	}, {
4505		.alg = "ecb(tea)",
4506		.test = alg_test_skcipher,
4507		.suite = {
4508			.cipher = __VECS(tea_tv_template)
 
 
 
 
 
 
 
 
 
4509		}
4510	}, {
4511		.alg = "ecb(tnepres)",
4512		.test = alg_test_skcipher,
4513		.suite = {
4514			.cipher = __VECS(tnepres_tv_template)
 
 
 
 
 
 
 
 
 
4515		}
4516	}, {
4517		.alg = "ecb(twofish)",
4518		.test = alg_test_skcipher,
4519		.suite = {
4520			.cipher = __VECS(tf_tv_template)
 
 
 
 
 
 
 
 
 
4521		}
4522	}, {
4523		.alg = "ecb(xeta)",
4524		.test = alg_test_skcipher,
4525		.suite = {
4526			.cipher = __VECS(xeta_tv_template)
 
 
 
 
 
 
 
 
 
4527		}
4528	}, {
4529		.alg = "ecb(xtea)",
4530		.test = alg_test_skcipher,
4531		.suite = {
4532			.cipher = __VECS(xtea_tv_template)
4533		}
4534	}, {
4535		.alg = "ecdh",
4536		.test = alg_test_kpp,
4537		.fips_allowed = 1,
4538		.suite = {
4539			.kpp = __VECS(ecdh_tv_template)
4540		}
4541	}, {
4542		.alg = "ecrdsa",
4543		.test = alg_test_akcipher,
4544		.suite = {
4545			.akcipher = __VECS(ecrdsa_tv_template)
4546		}
4547	}, {
4548		.alg = "essiv(authenc(hmac(sha256),cbc(aes)),sha256)",
4549		.test = alg_test_aead,
4550		.fips_allowed = 1,
4551		.suite = {
4552			.aead = __VECS(essiv_hmac_sha256_aes_cbc_tv_temp)
4553		}
4554	}, {
4555		.alg = "essiv(cbc(aes),sha256)",
4556		.test = alg_test_skcipher,
4557		.fips_allowed = 1,
4558		.suite = {
4559			.cipher = __VECS(essiv_aes_cbc_tv_template)
 
 
4560		}
4561	}, {
4562		.alg = "gcm(aes)",
4563		.generic_driver = "gcm_base(ctr(aes-generic),ghash-generic)",
4564		.test = alg_test_aead,
4565		.fips_allowed = 1,
4566		.suite = {
4567			.aead = __VECS(aes_gcm_tv_template)
 
 
 
4568		}
4569	}, {
4570		.alg = "ghash",
4571		.test = alg_test_hash,
4572		.fips_allowed = 1,
4573		.suite = {
4574			.hash = __VECS(ghash_tv_template)
 
 
 
4575		}
4576	}, {
4577		.alg = "hmac(md5)",
4578		.test = alg_test_hash,
4579		.suite = {
4580			.hash = __VECS(hmac_md5_tv_template)
 
 
 
4581		}
4582	}, {
4583		.alg = "hmac(rmd128)",
4584		.test = alg_test_hash,
4585		.suite = {
4586			.hash = __VECS(hmac_rmd128_tv_template)
 
 
 
4587		}
4588	}, {
4589		.alg = "hmac(rmd160)",
4590		.test = alg_test_hash,
4591		.suite = {
4592			.hash = __VECS(hmac_rmd160_tv_template)
 
 
 
4593		}
4594	}, {
4595		.alg = "hmac(sha1)",
4596		.test = alg_test_hash,
4597		.fips_allowed = 1,
4598		.suite = {
4599			.hash = __VECS(hmac_sha1_tv_template)
 
 
 
4600		}
4601	}, {
4602		.alg = "hmac(sha224)",
4603		.test = alg_test_hash,
4604		.fips_allowed = 1,
4605		.suite = {
4606			.hash = __VECS(hmac_sha224_tv_template)
 
 
 
4607		}
4608	}, {
4609		.alg = "hmac(sha256)",
4610		.test = alg_test_hash,
4611		.fips_allowed = 1,
4612		.suite = {
4613			.hash = __VECS(hmac_sha256_tv_template)
4614		}
4615	}, {
4616		.alg = "hmac(sha3-224)",
4617		.test = alg_test_hash,
4618		.fips_allowed = 1,
4619		.suite = {
4620			.hash = __VECS(hmac_sha3_224_tv_template)
4621		}
4622	}, {
4623		.alg = "hmac(sha3-256)",
4624		.test = alg_test_hash,
4625		.fips_allowed = 1,
4626		.suite = {
4627			.hash = __VECS(hmac_sha3_256_tv_template)
4628		}
4629	}, {
4630		.alg = "hmac(sha3-384)",
4631		.test = alg_test_hash,
4632		.fips_allowed = 1,
4633		.suite = {
4634			.hash = __VECS(hmac_sha3_384_tv_template)
4635		}
4636	}, {
4637		.alg = "hmac(sha3-512)",
4638		.test = alg_test_hash,
4639		.fips_allowed = 1,
4640		.suite = {
4641			.hash = __VECS(hmac_sha3_512_tv_template)
4642		}
4643	}, {
4644		.alg = "hmac(sha384)",
4645		.test = alg_test_hash,
4646		.fips_allowed = 1,
4647		.suite = {
4648			.hash = __VECS(hmac_sha384_tv_template)
 
 
 
4649		}
4650	}, {
4651		.alg = "hmac(sha512)",
4652		.test = alg_test_hash,
4653		.fips_allowed = 1,
4654		.suite = {
4655			.hash = __VECS(hmac_sha512_tv_template)
4656		}
4657	}, {
4658		.alg = "hmac(streebog256)",
4659		.test = alg_test_hash,
4660		.suite = {
4661			.hash = __VECS(hmac_streebog256_tv_template)
4662		}
4663	}, {
4664		.alg = "hmac(streebog512)",
4665		.test = alg_test_hash,
4666		.suite = {
4667			.hash = __VECS(hmac_streebog512_tv_template)
4668		}
4669	}, {
4670		.alg = "jitterentropy_rng",
4671		.fips_allowed = 1,
4672		.test = alg_test_null,
4673	}, {
4674		.alg = "kw(aes)",
4675		.test = alg_test_skcipher,
4676		.fips_allowed = 1,
4677		.suite = {
4678			.cipher = __VECS(aes_kw_tv_template)
4679		}
4680	}, {
4681		.alg = "lrw(aes)",
4682		.generic_driver = "lrw(ecb(aes-generic))",
4683		.test = alg_test_skcipher,
4684		.suite = {
4685			.cipher = __VECS(aes_lrw_tv_template)
 
 
 
 
 
 
 
 
 
4686		}
4687	}, {
4688		.alg = "lrw(camellia)",
4689		.generic_driver = "lrw(ecb(camellia-generic))",
4690		.test = alg_test_skcipher,
4691		.suite = {
4692			.cipher = __VECS(camellia_lrw_tv_template)
 
 
 
 
 
 
 
 
 
4693		}
4694	}, {
4695		.alg = "lrw(cast6)",
4696		.generic_driver = "lrw(ecb(cast6-generic))",
4697		.test = alg_test_skcipher,
4698		.suite = {
4699			.cipher = __VECS(cast6_lrw_tv_template)
 
 
 
 
 
 
 
 
 
4700		}
4701	}, {
4702		.alg = "lrw(serpent)",
4703		.generic_driver = "lrw(ecb(serpent-generic))",
4704		.test = alg_test_skcipher,
4705		.suite = {
4706			.cipher = __VECS(serpent_lrw_tv_template)
 
 
 
 
 
 
 
 
 
4707		}
4708	}, {
4709		.alg = "lrw(twofish)",
4710		.generic_driver = "lrw(ecb(twofish-generic))",
4711		.test = alg_test_skcipher,
4712		.suite = {
4713			.cipher = __VECS(tf_lrw_tv_template)
4714		}
4715	}, {
4716		.alg = "lz4",
4717		.test = alg_test_comp,
4718		.fips_allowed = 1,
4719		.suite = {
4720			.comp = {
4721				.comp = __VECS(lz4_comp_tv_template),
4722				.decomp = __VECS(lz4_decomp_tv_template)
4723			}
4724		}
4725	}, {
4726		.alg = "lz4hc",
4727		.test = alg_test_comp,
4728		.fips_allowed = 1,
4729		.suite = {
4730			.comp = {
4731				.comp = __VECS(lz4hc_comp_tv_template),
4732				.decomp = __VECS(lz4hc_decomp_tv_template)
4733			}
4734		}
4735	}, {
4736		.alg = "lzo",
4737		.test = alg_test_comp,
4738		.fips_allowed = 1,
4739		.suite = {
4740			.comp = {
4741				.comp = __VECS(lzo_comp_tv_template),
4742				.decomp = __VECS(lzo_decomp_tv_template)
4743			}
4744		}
4745	}, {
4746		.alg = "lzo-rle",
4747		.test = alg_test_comp,
4748		.fips_allowed = 1,
4749		.suite = {
4750			.comp = {
4751				.comp = __VECS(lzorle_comp_tv_template),
4752				.decomp = __VECS(lzorle_decomp_tv_template)
4753			}
4754		}
4755	}, {
4756		.alg = "md4",
4757		.test = alg_test_hash,
4758		.suite = {
4759			.hash = __VECS(md4_tv_template)
 
 
 
4760		}
4761	}, {
4762		.alg = "md5",
4763		.test = alg_test_hash,
4764		.suite = {
4765			.hash = __VECS(md5_tv_template)
 
 
 
4766		}
4767	}, {
4768		.alg = "michael_mic",
4769		.test = alg_test_hash,
4770		.suite = {
4771			.hash = __VECS(michael_mic_tv_template)
4772		}
4773	}, {
4774		.alg = "nhpoly1305",
4775		.test = alg_test_hash,
4776		.suite = {
4777			.hash = __VECS(nhpoly1305_tv_template)
4778		}
4779	}, {
4780		.alg = "ofb(aes)",
4781		.test = alg_test_skcipher,
4782		.fips_allowed = 1,
4783		.suite = {
4784			.cipher = __VECS(aes_ofb_tv_template)
 
 
 
 
 
 
 
 
 
4785		}
4786	}, {
4787		/* Same as ofb(aes) except the key is stored in
4788		 * hardware secure memory which we reference by index
4789		 */
4790		.alg = "ofb(paes)",
4791		.test = alg_test_null,
4792		.fips_allowed = 1,
4793	}, {
4794		.alg = "pcbc(fcrypt)",
4795		.test = alg_test_skcipher,
4796		.suite = {
4797			.cipher = __VECS(fcrypt_pcbc_tv_template)
4798		}
4799	}, {
4800		.alg = "pkcs1pad(rsa,sha224)",
4801		.test = alg_test_null,
4802		.fips_allowed = 1,
4803	}, {
4804		.alg = "pkcs1pad(rsa,sha256)",
4805		.test = alg_test_akcipher,
4806		.fips_allowed = 1,
4807		.suite = {
4808			.akcipher = __VECS(pkcs1pad_rsa_tv_template)
4809		}
4810	}, {
4811		.alg = "pkcs1pad(rsa,sha384)",
4812		.test = alg_test_null,
4813		.fips_allowed = 1,
4814	}, {
4815		.alg = "pkcs1pad(rsa,sha512)",
4816		.test = alg_test_null,
4817		.fips_allowed = 1,
4818	}, {
4819		.alg = "poly1305",
4820		.test = alg_test_hash,
4821		.suite = {
4822			.hash = __VECS(poly1305_tv_template)
4823		}
4824	}, {
4825		.alg = "rfc3686(ctr(aes))",
4826		.test = alg_test_skcipher,
4827		.fips_allowed = 1,
4828		.suite = {
4829			.cipher = __VECS(aes_ctr_rfc3686_tv_template)
 
 
 
 
 
 
 
 
 
4830		}
4831	}, {
4832		.alg = "rfc4106(gcm(aes))",
4833		.generic_driver = "rfc4106(gcm_base(ctr(aes-generic),ghash-generic))",
4834		.test = alg_test_aead,
4835		.fips_allowed = 1,
4836		.suite = {
4837			.aead = __VECS(aes_gcm_rfc4106_tv_template)
 
 
 
 
 
 
 
 
 
4838		}
4839	}, {
4840		.alg = "rfc4309(ccm(aes))",
4841		.generic_driver = "rfc4309(ccm_base(ctr(aes-generic),cbcmac(aes-generic)))",
4842		.test = alg_test_aead,
4843		.fips_allowed = 1,
4844		.suite = {
4845			.aead = __VECS(aes_ccm_rfc4309_tv_template)
 
 
 
 
 
 
 
 
 
4846		}
4847	}, {
4848		.alg = "rfc4543(gcm(aes))",
4849		.generic_driver = "rfc4543(gcm_base(ctr(aes-generic),ghash-generic))",
4850		.test = alg_test_aead,
4851		.suite = {
4852			.aead = __VECS(aes_gcm_rfc4543_tv_template)
4853		}
4854	}, {
4855		.alg = "rfc7539(chacha20,poly1305)",
4856		.test = alg_test_aead,
4857		.suite = {
4858			.aead = __VECS(rfc7539_tv_template)
4859		}
4860	}, {
4861		.alg = "rfc7539esp(chacha20,poly1305)",
4862		.test = alg_test_aead,
4863		.suite = {
4864			.aead = __VECS(rfc7539esp_tv_template)
4865		}
4866	}, {
4867		.alg = "rmd128",
4868		.test = alg_test_hash,
4869		.suite = {
4870			.hash = __VECS(rmd128_tv_template)
 
 
 
4871		}
4872	}, {
4873		.alg = "rmd160",
4874		.test = alg_test_hash,
4875		.suite = {
4876			.hash = __VECS(rmd160_tv_template)
 
 
 
4877		}
4878	}, {
4879		.alg = "rmd256",
4880		.test = alg_test_hash,
4881		.suite = {
4882			.hash = __VECS(rmd256_tv_template)
 
 
 
4883		}
4884	}, {
4885		.alg = "rmd320",
4886		.test = alg_test_hash,
4887		.suite = {
4888			.hash = __VECS(rmd320_tv_template)
4889		}
4890	}, {
4891		.alg = "rsa",
4892		.test = alg_test_akcipher,
4893		.fips_allowed = 1,
4894		.suite = {
4895			.akcipher = __VECS(rsa_tv_template)
4896		}
4897	}, {
4898		.alg = "salsa20",
4899		.test = alg_test_skcipher,
4900		.suite = {
4901			.cipher = __VECS(salsa20_stream_tv_template)
 
 
 
 
 
4902		}
4903	}, {
4904		.alg = "sha1",
4905		.test = alg_test_hash,
4906		.fips_allowed = 1,
4907		.suite = {
4908			.hash = __VECS(sha1_tv_template)
 
 
 
4909		}
4910	}, {
4911		.alg = "sha224",
4912		.test = alg_test_hash,
4913		.fips_allowed = 1,
4914		.suite = {
4915			.hash = __VECS(sha224_tv_template)
 
 
 
4916		}
4917	}, {
4918		.alg = "sha256",
4919		.test = alg_test_hash,
4920		.fips_allowed = 1,
4921		.suite = {
4922			.hash = __VECS(sha256_tv_template)
4923		}
4924	}, {
4925		.alg = "sha3-224",
4926		.test = alg_test_hash,
4927		.fips_allowed = 1,
4928		.suite = {
4929			.hash = __VECS(sha3_224_tv_template)
4930		}
4931	}, {
4932		.alg = "sha3-256",
4933		.test = alg_test_hash,
4934		.fips_allowed = 1,
4935		.suite = {
4936			.hash = __VECS(sha3_256_tv_template)
4937		}
4938	}, {
4939		.alg = "sha3-384",
4940		.test = alg_test_hash,
4941		.fips_allowed = 1,
4942		.suite = {
4943			.hash = __VECS(sha3_384_tv_template)
4944		}
4945	}, {
4946		.alg = "sha3-512",
4947		.test = alg_test_hash,
4948		.fips_allowed = 1,
4949		.suite = {
4950			.hash = __VECS(sha3_512_tv_template)
4951		}
4952	}, {
4953		.alg = "sha384",
4954		.test = alg_test_hash,
4955		.fips_allowed = 1,
4956		.suite = {
4957			.hash = __VECS(sha384_tv_template)
 
 
 
4958		}
4959	}, {
4960		.alg = "sha512",
4961		.test = alg_test_hash,
4962		.fips_allowed = 1,
4963		.suite = {
4964			.hash = __VECS(sha512_tv_template)
4965		}
4966	}, {
4967		.alg = "sm3",
4968		.test = alg_test_hash,
4969		.suite = {
4970			.hash = __VECS(sm3_tv_template)
4971		}
4972	}, {
4973		.alg = "streebog256",
4974		.test = alg_test_hash,
4975		.suite = {
4976			.hash = __VECS(streebog256_tv_template)
4977		}
4978	}, {
4979		.alg = "streebog512",
4980		.test = alg_test_hash,
4981		.suite = {
4982			.hash = __VECS(streebog512_tv_template)
4983		}
4984	}, {
4985		.alg = "tgr128",
4986		.test = alg_test_hash,
4987		.suite = {
4988			.hash = __VECS(tgr128_tv_template)
 
 
 
4989		}
4990	}, {
4991		.alg = "tgr160",
4992		.test = alg_test_hash,
4993		.suite = {
4994			.hash = __VECS(tgr160_tv_template)
 
 
 
4995		}
4996	}, {
4997		.alg = "tgr192",
4998		.test = alg_test_hash,
4999		.suite = {
5000			.hash = __VECS(tgr192_tv_template)
 
 
 
5001		}
5002	}, {
5003		.alg = "vmac64(aes)",
5004		.test = alg_test_hash,
5005		.suite = {
5006			.hash = __VECS(vmac64_aes_tv_template)
 
 
 
5007		}
5008	}, {
5009		.alg = "wp256",
5010		.test = alg_test_hash,
5011		.suite = {
5012			.hash = __VECS(wp256_tv_template)
 
 
 
5013		}
5014	}, {
5015		.alg = "wp384",
5016		.test = alg_test_hash,
5017		.suite = {
5018			.hash = __VECS(wp384_tv_template)
 
 
 
5019		}
5020	}, {
5021		.alg = "wp512",
5022		.test = alg_test_hash,
5023		.suite = {
5024			.hash = __VECS(wp512_tv_template)
 
 
 
5025		}
5026	}, {
5027		.alg = "xcbc(aes)",
5028		.test = alg_test_hash,
5029		.suite = {
5030			.hash = __VECS(aes_xcbc128_tv_template)
 
 
 
5031		}
5032	}, {
5033		.alg = "xchacha12",
5034		.test = alg_test_skcipher,
5035		.suite = {
5036			.cipher = __VECS(xchacha12_tv_template)
5037		},
5038	}, {
5039		.alg = "xchacha20",
5040		.test = alg_test_skcipher,
5041		.suite = {
5042			.cipher = __VECS(xchacha20_tv_template)
5043		},
5044	}, {
5045		.alg = "xts(aes)",
5046		.generic_driver = "xts(ecb(aes-generic))",
5047		.test = alg_test_skcipher,
5048		.fips_allowed = 1,
5049		.suite = {
5050			.cipher = __VECS(aes_xts_tv_template)
 
 
 
 
 
 
 
 
 
5051		}
5052	}, {
5053		.alg = "xts(camellia)",
5054		.generic_driver = "xts(ecb(camellia-generic))",
5055		.test = alg_test_skcipher,
5056		.suite = {
5057			.cipher = __VECS(camellia_xts_tv_template)
 
 
 
 
 
 
 
 
 
5058		}
5059	}, {
5060		.alg = "xts(cast6)",
5061		.generic_driver = "xts(ecb(cast6-generic))",
5062		.test = alg_test_skcipher,
5063		.suite = {
5064			.cipher = __VECS(cast6_xts_tv_template)
 
 
 
 
 
 
 
 
 
5065		}
5066	}, {
5067		/* Same as xts(aes) except the key is stored in
5068		 * hardware secure memory which we reference by index
5069		 */
5070		.alg = "xts(paes)",
5071		.test = alg_test_null,
5072		.fips_allowed = 1,
5073	}, {
5074		.alg = "xts(serpent)",
5075		.generic_driver = "xts(ecb(serpent-generic))",
5076		.test = alg_test_skcipher,
5077		.suite = {
5078			.cipher = __VECS(serpent_xts_tv_template)
 
 
 
 
 
 
 
 
 
5079		}
5080	}, {
5081		.alg = "xts(twofish)",
5082		.generic_driver = "xts(ecb(twofish-generic))",
5083		.test = alg_test_skcipher,
5084		.suite = {
5085			.cipher = __VECS(tf_xts_tv_template)
5086		}
5087	}, {
5088		.alg = "xts4096(paes)",
5089		.test = alg_test_null,
5090		.fips_allowed = 1,
5091	}, {
5092		.alg = "xts512(paes)",
5093		.test = alg_test_null,
5094		.fips_allowed = 1,
5095	}, {
5096		.alg = "xxhash64",
5097		.test = alg_test_hash,
5098		.fips_allowed = 1,
5099		.suite = {
5100			.hash = __VECS(xxhash64_tv_template)
5101		}
5102	}, {
5103		.alg = "zlib-deflate",
5104		.test = alg_test_comp,
5105		.fips_allowed = 1,
5106		.suite = {
5107			.comp = {
5108				.comp = __VECS(zlib_deflate_comp_tv_template),
5109				.decomp = __VECS(zlib_deflate_decomp_tv_template)
5110			}
5111		}
5112	}, {
5113		.alg = "zstd",
5114		.test = alg_test_comp,
5115		.fips_allowed = 1,
5116		.suite = {
5117			.comp = {
5118				.comp = __VECS(zstd_comp_tv_template),
5119				.decomp = __VECS(zstd_decomp_tv_template)
 
 
 
 
 
 
5120			}
5121		}
5122	}
5123};
5124
5125static void alg_check_test_descs_order(void)
 
 
5126{
5127	int i;
5128
 
 
 
 
 
 
5129	for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
5130		int diff = strcmp(alg_test_descs[i - 1].alg,
5131				  alg_test_descs[i].alg);
5132
5133		if (WARN_ON(diff > 0)) {
5134			pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
5135				alg_test_descs[i - 1].alg,
5136				alg_test_descs[i].alg);
5137		}
5138
5139		if (WARN_ON(diff == 0)) {
5140			pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
5141				alg_test_descs[i].alg);
5142		}
5143	}
5144}
5145
5146static void alg_check_testvec_configs(void)
5147{
5148	int i;
5149
5150	for (i = 0; i < ARRAY_SIZE(default_cipher_testvec_configs); i++)
5151		WARN_ON(!valid_testvec_config(
5152				&default_cipher_testvec_configs[i]));
5153
5154	for (i = 0; i < ARRAY_SIZE(default_hash_testvec_configs); i++)
5155		WARN_ON(!valid_testvec_config(
5156				&default_hash_testvec_configs[i]));
5157}
5158
5159static void testmgr_onetime_init(void)
5160{
5161	alg_check_test_descs_order();
5162	alg_check_testvec_configs();
5163
5164#ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
5165	pr_warn("alg: extra crypto tests enabled.  This is intended for developer use only.\n");
5166#endif
5167}
5168
5169static int alg_find_test(const char *alg)
5170{
5171	int start = 0;
5172	int end = ARRAY_SIZE(alg_test_descs);
5173
5174	while (start < end) {
5175		int i = (start + end) / 2;
5176		int diff = strcmp(alg_test_descs[i].alg, alg);
5177
5178		if (diff > 0) {
5179			end = i;
5180			continue;
5181		}
5182
5183		if (diff < 0) {
5184			start = i + 1;
5185			continue;
5186		}
5187
5188		return i;
5189	}
5190
5191	return -1;
5192}
5193
5194int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
5195{
5196	int i;
5197	int j;
5198	int rc;
5199
5200	if (!fips_enabled && notests) {
5201		printk_once(KERN_INFO "alg: self-tests disabled\n");
5202		return 0;
5203	}
5204
5205	DO_ONCE(testmgr_onetime_init);
5206
5207	if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
5208		char nalg[CRYPTO_MAX_ALG_NAME];
5209
5210		if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
5211		    sizeof(nalg))
5212			return -ENAMETOOLONG;
5213
5214		i = alg_find_test(nalg);
5215		if (i < 0)
5216			goto notest;
5217
5218		if (fips_enabled && !alg_test_descs[i].fips_allowed)
5219			goto non_fips_alg;
5220
5221		rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
5222		goto test_done;
5223	}
5224
5225	i = alg_find_test(alg);
5226	j = alg_find_test(driver);
5227	if (i < 0 && j < 0)
5228		goto notest;
5229
5230	if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
5231			     (j >= 0 && !alg_test_descs[j].fips_allowed)))
5232		goto non_fips_alg;
5233
5234	rc = 0;
5235	if (i >= 0)
5236		rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
5237					     type, mask);
5238	if (j >= 0 && j != i)
5239		rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
5240					     type, mask);
5241
5242test_done:
5243	if (rc && (fips_enabled || panic_on_fail)) {
5244		fips_fail_notify();
5245		panic("alg: self-tests for %s (%s) failed in %s mode!\n",
5246		      driver, alg, fips_enabled ? "fips" : "panic_on_fail");
5247	}
5248
5249	if (fips_enabled && !rc)
5250		pr_info("alg: self-tests for %s (%s) passed\n", driver, alg);
 
5251
5252	return rc;
5253
5254notest:
5255	printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
5256	return 0;
5257non_fips_alg:
5258	return -EINVAL;
5259}
5260
5261#endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
5262
5263EXPORT_SYMBOL_GPL(alg_test);
v3.15
 
   1/*
   2 * Algorithm testing framework and tests.
   3 *
   4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
   5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
   6 * Copyright (c) 2007 Nokia Siemens Networks
   7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
 
   8 *
   9 * Updated RFC4106 AES-GCM testing.
  10 *    Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
  11 *             Adrian Hoban <adrian.hoban@intel.com>
  12 *             Gabriele Paoloni <gabriele.paoloni@intel.com>
  13 *             Tadeusz Struk (tadeusz.struk@intel.com)
  14 *    Copyright (c) 2010, Intel Corporation.
  15 *
  16 * This program is free software; you can redistribute it and/or modify it
  17 * under the terms of the GNU General Public License as published by the Free
  18 * Software Foundation; either version 2 of the License, or (at your option)
  19 * any later version.
  20 *
  21 */
  22
 
  23#include <crypto/hash.h>
 
  24#include <linux/err.h>
 
  25#include <linux/module.h>
 
 
  26#include <linux/scatterlist.h>
  27#include <linux/slab.h>
  28#include <linux/string.h>
  29#include <crypto/rng.h>
 
 
 
 
 
  30
  31#include "internal.h"
  32
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
  33#ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
  34
  35/* a perfect nop */
  36int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
  37{
  38	return 0;
  39}
  40
  41#else
  42
  43#include "testmgr.h"
  44
  45/*
  46 * Need slab memory for testing (size in number of pages).
  47 */
  48#define XBUFSIZE	8
  49
  50/*
  51 * Indexes into the xbuf to simulate cross-page access.
  52 */
  53#define IDX1		32
  54#define IDX2		32400
  55#define IDX3		1
  56#define IDX4		8193
  57#define IDX5		22222
  58#define IDX6		17101
  59#define IDX7		27333
  60#define IDX8		3000
  61
  62/*
  63* Used by test_cipher()
  64*/
  65#define ENCRYPT 1
  66#define DECRYPT 0
  67
  68struct tcrypt_result {
  69	struct completion completion;
  70	int err;
  71};
  72
  73struct aead_test_suite {
  74	struct {
  75		struct aead_testvec *vecs;
  76		unsigned int count;
  77	} enc, dec;
  78};
  79
  80struct cipher_test_suite {
  81	struct {
  82		struct cipher_testvec *vecs;
  83		unsigned int count;
  84	} enc, dec;
  85};
  86
  87struct comp_test_suite {
  88	struct {
  89		struct comp_testvec *vecs;
  90		unsigned int count;
  91	} comp, decomp;
  92};
  93
  94struct pcomp_test_suite {
  95	struct {
  96		struct pcomp_testvec *vecs;
  97		unsigned int count;
  98	} comp, decomp;
 
 
 
 
 
 
 
 
  99};
 100
 101struct hash_test_suite {
 102	struct hash_testvec *vecs;
 103	unsigned int count;
 104};
 105
 106struct cprng_test_suite {
 107	struct cprng_testvec *vecs;
 108	unsigned int count;
 109};
 110
 111struct alg_test_desc {
 112	const char *alg;
 
 113	int (*test)(const struct alg_test_desc *desc, const char *driver,
 114		    u32 type, u32 mask);
 115	int fips_allowed;	/* set if alg is allowed in fips mode */
 116
 117	union {
 118		struct aead_test_suite aead;
 119		struct cipher_test_suite cipher;
 120		struct comp_test_suite comp;
 121		struct pcomp_test_suite pcomp;
 122		struct hash_test_suite hash;
 123		struct cprng_test_suite cprng;
 
 
 
 124	} suite;
 125};
 126
 127static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
 128
 129static void hexdump(unsigned char *buf, unsigned int len)
 130{
 131	print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
 132			16, 1,
 133			buf, len, false);
 134}
 135
 136static void tcrypt_complete(struct crypto_async_request *req, int err)
 137{
 138	struct tcrypt_result *res = req->data;
 139
 140	if (err == -EINPROGRESS)
 141		return;
 142
 143	res->err = err;
 144	complete(&res->completion);
 145}
 146
 147static int testmgr_alloc_buf(char *buf[XBUFSIZE])
 148{
 149	int i;
 150
 151	for (i = 0; i < XBUFSIZE; i++) {
 152		buf[i] = (void *)__get_free_page(GFP_KERNEL);
 153		if (!buf[i])
 154			goto err_free_buf;
 155	}
 156
 157	return 0;
 158
 159err_free_buf:
 160	while (i-- > 0)
 161		free_page((unsigned long)buf[i]);
 162
 163	return -ENOMEM;
 164}
 165
 166static void testmgr_free_buf(char *buf[XBUFSIZE])
 
 
 
 
 
 167{
 168	int i;
 169
 170	for (i = 0; i < XBUFSIZE; i++)
 171		free_page((unsigned long)buf[i]);
 172}
 173
 174static int do_one_async_hash_op(struct ahash_request *req,
 175				struct tcrypt_result *tr,
 176				int ret)
 177{
 178	if (ret == -EINPROGRESS || ret == -EBUSY) {
 179		ret = wait_for_completion_interruptible(&tr->completion);
 180		if (!ret)
 181			ret = tr->err;
 182		reinit_completion(&tr->completion);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 183	}
 184	return ret;
 
 
 
 
 
 
 
 
 
 
 
 185}
 186
 187static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
 188		       unsigned int tcount, bool use_digest,
 189		       const int align_offset)
 190{
 191	const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
 192	unsigned int i, j, k, temp;
 193	struct scatterlist sg[8];
 194	char result[64];
 195	struct ahash_request *req;
 196	struct tcrypt_result tresult;
 197	void *hash_buff;
 198	char *xbuf[XBUFSIZE];
 199	int ret = -ENOMEM;
 
 
 
 
 
 
 
 
 
 200
 201	if (testmgr_alloc_buf(xbuf))
 202		goto out_nobuf;
 
 
 
 
 
 
 203
 204	init_completion(&tresult.completion);
 
 205
 206	req = ahash_request_alloc(tfm, GFP_KERNEL);
 207	if (!req) {
 208		printk(KERN_ERR "alg: hash: Failed to allocate request for "
 209		       "%s\n", algo);
 210		goto out_noreq;
 
 
 
 
 
 
 
 211	}
 212	ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
 213				   tcrypt_complete, &tresult);
 214
 215	j = 0;
 216	for (i = 0; i < tcount; i++) {
 217		if (template[i].np)
 218			continue;
 
 
 
 
 
 
 
 
 
 
 
 219
 220		ret = -EINVAL;
 221		if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE))
 222			goto out;
 
 
 
 
 223
 224		j++;
 225		memset(result, 0, 64);
 
 
 226
 227		hash_buff = xbuf[0];
 228		hash_buff += align_offset;
 
 
 229
 230		memcpy(hash_buff, template[i].plaintext, template[i].psize);
 231		sg_init_one(&sg[0], hash_buff, template[i].psize);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 232
 233		if (template[i].ksize) {
 234			crypto_ahash_clear_flags(tfm, ~0);
 235			ret = crypto_ahash_setkey(tfm, template[i].key,
 236						  template[i].ksize);
 237			if (ret) {
 238				printk(KERN_ERR "alg: hash: setkey failed on "
 239				       "test %d for %s: ret=%d\n", j, algo,
 240				       -ret);
 241				goto out;
 242			}
 
 
 
 
 
 
 
 243		}
 
 
 
 
 
 
 
 244
 245		ahash_request_set_crypt(req, sg, result, template[i].psize);
 246		if (use_digest) {
 247			ret = do_one_async_hash_op(req, &tresult,
 248						   crypto_ahash_digest(req));
 249			if (ret) {
 250				pr_err("alg: hash: digest failed on test %d "
 251				       "for %s: ret=%d\n", j, algo, -ret);
 252				goto out;
 253			}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 254		} else {
 255			ret = do_one_async_hash_op(req, &tresult,
 256						   crypto_ahash_init(req));
 257			if (ret) {
 258				pr_err("alt: hash: init failed on test %d "
 259				       "for %s: ret=%d\n", j, algo, -ret);
 260				goto out;
 261			}
 262			ret = do_one_async_hash_op(req, &tresult,
 263						   crypto_ahash_update(req));
 264			if (ret) {
 265				pr_err("alt: hash: update failed on test %d "
 266				       "for %s: ret=%d\n", j, algo, -ret);
 267				goto out;
 268			}
 269			ret = do_one_async_hash_op(req, &tresult,
 270						   crypto_ahash_final(req));
 271			if (ret) {
 272				pr_err("alt: hash: final failed on test %d "
 273				       "for %s: ret=%d\n", j, algo, -ret);
 274				goto out;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 275			}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 276		}
 
 277
 278		if (memcmp(result, template[i].digest,
 279			   crypto_ahash_digestsize(tfm))) {
 280			printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
 281			       j, algo);
 282			hexdump(result, crypto_ahash_digestsize(tfm));
 283			ret = -EINVAL;
 284			goto out;
 285		}
 286	}
 
 
 
 
 
 
 
 
 
 
 
 287
 288	j = 0;
 289	for (i = 0; i < tcount; i++) {
 290		/* alignment tests are only done with continuous buffers */
 291		if (align_offset != 0)
 
 
 
 
 
 
 
 
 
 292			break;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 293
 294		if (template[i].np) {
 295			j++;
 296			memset(result, 0, 64);
 
 
 
 297
 298			temp = 0;
 299			sg_init_table(sg, template[i].np);
 300			ret = -EINVAL;
 301			for (k = 0; k < template[i].np; k++) {
 302				if (WARN_ON(offset_in_page(IDX[k]) +
 303					    template[i].tap[k] > PAGE_SIZE))
 304					goto out;
 305				sg_set_buf(&sg[k],
 306					   memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
 307						  offset_in_page(IDX[k]),
 308						  template[i].plaintext + temp,
 309						  template[i].tap[k]),
 310					   template[i].tap[k]);
 311				temp += template[i].tap[k];
 312			}
 313
 314			if (template[i].ksize) {
 315				crypto_ahash_clear_flags(tfm, ~0);
 316				ret = crypto_ahash_setkey(tfm, template[i].key,
 317							  template[i].ksize);
 318
 319				if (ret) {
 320					printk(KERN_ERR "alg: hash: setkey "
 321					       "failed on chunking test %d "
 322					       "for %s: ret=%d\n", j, algo,
 323					       -ret);
 324					goto out;
 325				}
 326			}
 327
 328			ahash_request_set_crypt(req, sg, result,
 329						template[i].psize);
 330			ret = crypto_ahash_digest(req);
 331			switch (ret) {
 
 
 
 
 
 
 
 
 332			case 0:
 
 
 
 
 333				break;
 334			case -EINPROGRESS:
 335			case -EBUSY:
 336				ret = wait_for_completion_interruptible(
 337					&tresult.completion);
 338				if (!ret && !(ret = tresult.err)) {
 339					reinit_completion(&tresult.completion);
 340					break;
 341				}
 342				/* fall through */
 343			default:
 344				printk(KERN_ERR "alg: hash: digest failed "
 345				       "on chunking test %d for %s: "
 346				       "ret=%d\n", j, algo, -ret);
 347				goto out;
 348			}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 349
 350			if (memcmp(result, template[i].digest,
 351				   crypto_ahash_digestsize(tfm))) {
 352				printk(KERN_ERR "alg: hash: Chunking test %d "
 353				       "failed for %s\n", j, algo);
 354				hexdump(result, crypto_ahash_digestsize(tfm));
 355				ret = -EINVAL;
 356				goto out;
 357			}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 358		}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 359	}
 360
 361	ret = 0;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 362
 363out:
 364	ahash_request_free(req);
 365out_noreq:
 366	testmgr_free_buf(xbuf);
 367out_nobuf:
 368	return ret;
 369}
 370
 371static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
 372		     unsigned int tcount, bool use_digest)
 
 
 
 
 
 
 373{
 374	unsigned int alignmask;
 375	int ret;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 376
 377	ret = __test_hash(tfm, template, tcount, use_digest, 0);
 378	if (ret)
 379		return ret;
 
 
 
 
 
 380
 381	/* test unaligned buffers, check with one byte offset */
 382	ret = __test_hash(tfm, template, tcount, use_digest, 1);
 383	if (ret)
 384		return ret;
 385
 386	alignmask = crypto_tfm_alg_alignmask(&tfm->base);
 387	if (alignmask) {
 388		/* Check if alignment mask for tfm is correctly set. */
 389		ret = __test_hash(tfm, template, tcount, use_digest,
 390				  alignmask + 1);
 391		if (ret)
 392			return ret;
 393	}
 394
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 395	return 0;
 396}
 397
 398static int __test_aead(struct crypto_aead *tfm, int enc,
 399		       struct aead_testvec *template, unsigned int tcount,
 400		       const bool diff_dst, const int align_offset)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 401{
 402	const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
 403	unsigned int i, j, k, n, temp;
 404	int ret = -ENOMEM;
 405	char *q;
 406	char *key;
 407	struct aead_request *req;
 408	struct scatterlist *sg;
 409	struct scatterlist *asg;
 410	struct scatterlist *sgout;
 411	const char *e, *d;
 412	struct tcrypt_result result;
 413	unsigned int authsize;
 414	void *input;
 415	void *output;
 416	void *assoc;
 417	char iv[MAX_IVLEN];
 418	char *xbuf[XBUFSIZE];
 419	char *xoutbuf[XBUFSIZE];
 420	char *axbuf[XBUFSIZE];
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 421
 422	if (testmgr_alloc_buf(xbuf))
 423		goto out_noxbuf;
 424	if (testmgr_alloc_buf(axbuf))
 425		goto out_noaxbuf;
 426
 427	if (diff_dst && testmgr_alloc_buf(xoutbuf))
 428		goto out_nooutbuf;
 429
 430	/* avoid "the frame size is larger than 1024 bytes" compiler warning */
 431	sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 3 : 2), GFP_KERNEL);
 432	if (!sg)
 433		goto out_nosg;
 434	asg = &sg[8];
 435	sgout = &asg[8];
 436
 437	if (diff_dst)
 438		d = "-ddst";
 439	else
 440		d = "";
 
 
 
 441
 442	if (enc == ENCRYPT)
 443		e = "encryption";
 444	else
 445		e = "decryption";
 446
 447	init_completion(&result.completion);
 
 
 
 
 
 
 448
 449	req = aead_request_alloc(tfm, GFP_KERNEL);
 450	if (!req) {
 451		pr_err("alg: aead%s: Failed to allocate request for %s\n",
 452		       d, algo);
 453		goto out;
 454	}
 455
 456	aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
 457				  tcrypt_complete, &result);
 
 
 
 
 
 
 
 
 
 
 458
 459	for (i = 0, j = 0; i < tcount; i++) {
 460		if (!template[i].np) {
 461			j++;
 
 
 462
 463			/* some templates have no input data but they will
 464			 * touch input
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 465			 */
 466			input = xbuf[0];
 467			input += align_offset;
 468			assoc = axbuf[0];
 
 
 
 
 
 
 
 
 
 
 469
 470			ret = -EINVAL;
 471			if (WARN_ON(align_offset + template[i].ilen >
 472				    PAGE_SIZE || template[i].alen > PAGE_SIZE))
 473				goto out;
 474
 475			memcpy(input, template[i].input, template[i].ilen);
 476			memcpy(assoc, template[i].assoc, template[i].alen);
 477			if (template[i].iv)
 478				memcpy(iv, template[i].iv, MAX_IVLEN);
 479			else
 480				memset(iv, 0, MAX_IVLEN);
 
 
 
 
 
 
 
 
 481
 482			crypto_aead_clear_flags(tfm, ~0);
 483			if (template[i].wk)
 484				crypto_aead_set_flags(
 485					tfm, CRYPTO_TFM_REQ_WEAK_KEY);
 486
 487			key = template[i].key;
 488
 489			ret = crypto_aead_setkey(tfm, key,
 490						 template[i].klen);
 491			if (!ret == template[i].fail) {
 492				pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
 493				       d, j, algo, crypto_aead_get_flags(tfm));
 494				goto out;
 495			} else if (ret)
 496				continue;
 497
 498			authsize = abs(template[i].rlen - template[i].ilen);
 499			ret = crypto_aead_setauthsize(tfm, authsize);
 500			if (ret) {
 501				pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
 502				       d, authsize, j, algo);
 503				goto out;
 504			}
 505
 506			if (diff_dst) {
 507				output = xoutbuf[0];
 508				output += align_offset;
 509				sg_init_one(&sg[0], input, template[i].ilen);
 510				sg_init_one(&sgout[0], output,
 511					    template[i].rlen);
 512			} else {
 513				sg_init_one(&sg[0], input,
 514					    template[i].ilen +
 515						(enc ? authsize : 0));
 516				output = input;
 517			}
 518
 519			sg_init_one(&asg[0], assoc, template[i].alen);
 
 
 
 
 
 
 
 
 520
 521			aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
 522					       template[i].ilen, iv);
 
 
 
 
 
 
 
 
 523
 524			aead_request_set_assoc(req, asg, template[i].alen);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 525
 526			ret = enc ?
 527				crypto_aead_encrypt(req) :
 528				crypto_aead_decrypt(req);
 
 
 
 
 
 529
 530			switch (ret) {
 531			case 0:
 532				if (template[i].novrfy) {
 533					/* verification was supposed to fail */
 534					pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
 535					       d, e, j, algo);
 536					/* so really, we got a bad message */
 537					ret = -EBADMSG;
 538					goto out;
 539				}
 540				break;
 541			case -EINPROGRESS:
 542			case -EBUSY:
 543				ret = wait_for_completion_interruptible(
 544					&result.completion);
 545				if (!ret && !(ret = result.err)) {
 546					reinit_completion(&result.completion);
 547					break;
 548				}
 549			case -EBADMSG:
 550				if (template[i].novrfy)
 551					/* verification failure was expected */
 552					continue;
 553				/* fall through */
 554			default:
 555				pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
 556				       d, e, j, algo, -ret);
 557				goto out;
 558			}
 559
 560			q = output;
 561			if (memcmp(q, template[i].result, template[i].rlen)) {
 562				pr_err("alg: aead%s: Test %d failed on %s for %s\n",
 563				       d, j, e, algo);
 564				hexdump(q, template[i].rlen);
 565				ret = -EINVAL;
 566				goto out;
 567			}
 
 568		}
 
 
 
 
 
 
 
 
 
 569	}
 570
 571	for (i = 0, j = 0; i < tcount; i++) {
 572		/* alignment tests are only done with continuous buffers */
 573		if (align_offset != 0)
 574			break;
 
 
 575
 576		if (template[i].np) {
 577			j++;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 578
 579			if (template[i].iv)
 580				memcpy(iv, template[i].iv, MAX_IVLEN);
 581			else
 582				memset(iv, 0, MAX_IVLEN);
 
 
 
 
 
 
 
 
 
 
 
 
 
 583
 584			crypto_aead_clear_flags(tfm, ~0);
 585			if (template[i].wk)
 586				crypto_aead_set_flags(
 587					tfm, CRYPTO_TFM_REQ_WEAK_KEY);
 588			key = template[i].key;
 589
 590			ret = crypto_aead_setkey(tfm, key, template[i].klen);
 591			if (!ret == template[i].fail) {
 592				pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
 593				       d, j, algo, crypto_aead_get_flags(tfm));
 594				goto out;
 595			} else if (ret)
 596				continue;
 597
 598			authsize = abs(template[i].rlen - template[i].ilen);
 
 599
 600			ret = -EINVAL;
 601			sg_init_table(sg, template[i].np);
 602			if (diff_dst)
 603				sg_init_table(sgout, template[i].np);
 604			for (k = 0, temp = 0; k < template[i].np; k++) {
 605				if (WARN_ON(offset_in_page(IDX[k]) +
 606					    template[i].tap[k] > PAGE_SIZE))
 607					goto out;
 608
 609				q = xbuf[IDX[k] >> PAGE_SHIFT] +
 610				    offset_in_page(IDX[k]);
 611
 612				memcpy(q, template[i].input + temp,
 613				       template[i].tap[k]);
 614
 615				sg_set_buf(&sg[k], q, template[i].tap[k]);
 616
 617				if (diff_dst) {
 618					q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
 619					    offset_in_page(IDX[k]);
 620
 621					memset(q, 0, template[i].tap[k]);
 622
 623					sg_set_buf(&sgout[k], q,
 624						   template[i].tap[k]);
 625				}
 626
 627				n = template[i].tap[k];
 628				if (k == template[i].np - 1 && enc)
 629					n += authsize;
 630				if (offset_in_page(q) + n < PAGE_SIZE)
 631					q[n] = 0;
 632
 633				temp += template[i].tap[k];
 634			}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 635
 636			ret = crypto_aead_setauthsize(tfm, authsize);
 637			if (ret) {
 638				pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
 639				       d, authsize, j, algo);
 640				goto out;
 641			}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 642
 643			if (enc) {
 644				if (WARN_ON(sg[k - 1].offset +
 645					    sg[k - 1].length + authsize >
 646					    PAGE_SIZE)) {
 647					ret = -EINVAL;
 648					goto out;
 649				}
 650
 651				if (diff_dst)
 652					sgout[k - 1].length += authsize;
 653				else
 654					sg[k - 1].length += authsize;
 655			}
 
 656
 657			sg_init_table(asg, template[i].anp);
 658			ret = -EINVAL;
 659			for (k = 0, temp = 0; k < template[i].anp; k++) {
 660				if (WARN_ON(offset_in_page(IDX[k]) +
 661					    template[i].atap[k] > PAGE_SIZE))
 662					goto out;
 663				sg_set_buf(&asg[k],
 664					   memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
 665						  offset_in_page(IDX[k]),
 666						  template[i].assoc + temp,
 667						  template[i].atap[k]),
 668					   template[i].atap[k]);
 669				temp += template[i].atap[k];
 670			}
 671
 672			aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
 673					       template[i].ilen,
 674					       iv);
 675
 676			aead_request_set_assoc(req, asg, template[i].alen);
 
 
 
 
 
 
 
 677
 678			ret = enc ?
 679				crypto_aead_encrypt(req) :
 680				crypto_aead_decrypt(req);
 681
 682			switch (ret) {
 683			case 0:
 684				if (template[i].novrfy) {
 685					/* verification was supposed to fail */
 686					pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
 687					       d, e, j, algo);
 688					/* so really, we got a bad message */
 689					ret = -EBADMSG;
 690					goto out;
 691				}
 692				break;
 693			case -EINPROGRESS:
 694			case -EBUSY:
 695				ret = wait_for_completion_interruptible(
 696					&result.completion);
 697				if (!ret && !(ret = result.err)) {
 698					reinit_completion(&result.completion);
 699					break;
 700				}
 701			case -EBADMSG:
 702				if (template[i].novrfy)
 703					/* verification failure was expected */
 704					continue;
 705				/* fall through */
 706			default:
 707				pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
 708				       d, e, j, algo, -ret);
 709				goto out;
 710			}
 711
 712			ret = -EINVAL;
 713			for (k = 0, temp = 0; k < template[i].np; k++) {
 714				if (diff_dst)
 715					q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
 716					    offset_in_page(IDX[k]);
 717				else
 718					q = xbuf[IDX[k] >> PAGE_SHIFT] +
 719					    offset_in_page(IDX[k]);
 720
 721				n = template[i].tap[k];
 722				if (k == template[i].np - 1)
 723					n += enc ? authsize : -authsize;
 724
 725				if (memcmp(q, template[i].result + temp, n)) {
 726					pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
 727					       d, j, e, k, algo);
 728					hexdump(q, n);
 729					goto out;
 730				}
 731
 732				q += n;
 733				if (k == template[i].np - 1 && !enc) {
 734					if (!diff_dst &&
 735						memcmp(q, template[i].input +
 736						      temp + n, authsize))
 737						n = authsize;
 738					else
 739						n = 0;
 740				} else {
 741					for (n = 0; offset_in_page(q + n) &&
 742						    q[n]; n++)
 743						;
 744				}
 745				if (n) {
 746					pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
 747					       d, j, e, k, algo, n);
 748					hexdump(q, n);
 749					goto out;
 750				}
 751
 752				temp += template[i].tap[k];
 753			}
 
 
 
 
 
 
 
 
 
 
 
 754		}
 755	}
 
 
 
 756
 757	ret = 0;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 758
 
 
 
 
 
 
 
 
 
 
 
 759out:
 760	aead_request_free(req);
 761	kfree(sg);
 762out_nosg:
 763	if (diff_dst)
 764		testmgr_free_buf(xoutbuf);
 765out_nooutbuf:
 766	testmgr_free_buf(axbuf);
 767out_noaxbuf:
 768	testmgr_free_buf(xbuf);
 769out_noxbuf:
 770	return ret;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 771}
 772
 773static int test_aead(struct crypto_aead *tfm, int enc,
 774		     struct aead_testvec *template, unsigned int tcount)
 775{
 776	unsigned int alignmask;
 777	int ret;
 
 
 
 778
 779	/* test 'dst == src' case */
 780	ret = __test_aead(tfm, enc, template, tcount, false, 0);
 781	if (ret)
 782		return ret;
 783
 784	/* test 'dst != src' case */
 785	ret = __test_aead(tfm, enc, template, tcount, true, 0);
 786	if (ret)
 787		return ret;
 
 
 788
 789	/* test unaligned buffers, check with one byte offset */
 790	ret = __test_aead(tfm, enc, template, tcount, true, 1);
 791	if (ret)
 792		return ret;
 
 
 
 793
 794	alignmask = crypto_tfm_alg_alignmask(&tfm->base);
 795	if (alignmask) {
 796		/* Check if alignment mask for tfm is correctly set. */
 797		ret = __test_aead(tfm, enc, template, tcount, true,
 798				  alignmask + 1);
 799		if (ret)
 800			return ret;
 801	}
 802
 803	return 0;
 
 
 
 
 
 
 
 
 
 
 
 
 
 804}
 805
 806static int test_cipher(struct crypto_cipher *tfm, int enc,
 807		       struct cipher_testvec *template, unsigned int tcount)
 
 808{
 809	const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
 810	unsigned int i, j, k;
 811	char *q;
 812	const char *e;
 
 813	void *data;
 814	char *xbuf[XBUFSIZE];
 815	int ret = -ENOMEM;
 816
 817	if (testmgr_alloc_buf(xbuf))
 818		goto out_nobuf;
 819
 820	if (enc == ENCRYPT)
 821	        e = "encryption";
 822	else
 823		e = "decryption";
 824
 825	j = 0;
 826	for (i = 0; i < tcount; i++) {
 827		if (template[i].np)
 
 828			continue;
 829
 
 
 830		j++;
 831
 832		ret = -EINVAL;
 833		if (WARN_ON(template[i].ilen > PAGE_SIZE))
 834			goto out;
 835
 836		data = xbuf[0];
 837		memcpy(data, template[i].input, template[i].ilen);
 838
 839		crypto_cipher_clear_flags(tfm, ~0);
 840		if (template[i].wk)
 841			crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
 842
 843		ret = crypto_cipher_setkey(tfm, template[i].key,
 844					   template[i].klen);
 845		if (!ret == template[i].fail) {
 846			printk(KERN_ERR "alg: cipher: setkey failed "
 847			       "on test %d for %s: flags=%x\n", j,
 848			       algo, crypto_cipher_get_flags(tfm));
 
 
 
 
 
 
 
 
 849			goto out;
 850		} else if (ret)
 851			continue;
 852
 853		for (k = 0; k < template[i].ilen;
 854		     k += crypto_cipher_blocksize(tfm)) {
 855			if (enc)
 856				crypto_cipher_encrypt_one(tfm, data + k,
 857							  data + k);
 858			else
 859				crypto_cipher_decrypt_one(tfm, data + k,
 860							  data + k);
 861		}
 862
 863		q = data;
 864		if (memcmp(q, template[i].result, template[i].rlen)) {
 865			printk(KERN_ERR "alg: cipher: Test %d failed "
 866			       "on %s for %s\n", j, e, algo);
 867			hexdump(q, template[i].rlen);
 868			ret = -EINVAL;
 869			goto out;
 870		}
 871	}
 872
 873	ret = 0;
 874
 875out:
 876	testmgr_free_buf(xbuf);
 877out_nobuf:
 878	return ret;
 879}
 880
 881static int __test_skcipher(struct crypto_ablkcipher *tfm, int enc,
 882			   struct cipher_testvec *template, unsigned int tcount,
 883			   const bool diff_dst, const int align_offset)
 
 
 
 884{
 885	const char *algo =
 886		crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm));
 887	unsigned int i, j, k, n, temp;
 888	char *q;
 889	struct ablkcipher_request *req;
 890	struct scatterlist sg[8];
 891	struct scatterlist sgout[8];
 892	const char *e, *d;
 893	struct tcrypt_result result;
 894	void *data;
 895	char iv[MAX_IVLEN];
 896	char *xbuf[XBUFSIZE];
 897	char *xoutbuf[XBUFSIZE];
 898	int ret = -ENOMEM;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 899
 900	if (testmgr_alloc_buf(xbuf))
 901		goto out_nobuf;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 902
 903	if (diff_dst && testmgr_alloc_buf(xoutbuf))
 904		goto out_nooutbuf;
 
 
 
 
 
 
 
 
 905
 906	if (diff_dst)
 907		d = "-ddst";
 908	else
 909		d = "";
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 910
 911	if (enc == ENCRYPT)
 912	        e = "encryption";
 913	else
 914		e = "decryption";
 
 
 
 
 
 
 
 
 
 915
 916	init_completion(&result.completion);
 
 
 
 
 
 
 
 
 
 
 
 
 917
 918	req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
 919	if (!req) {
 920		pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
 921		       d, algo);
 922		goto out;
 
 923	}
 924
 925	ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
 926					tcrypt_complete, &result);
 927
 928	j = 0;
 929	for (i = 0; i < tcount; i++) {
 930		if (template[i].iv)
 931			memcpy(iv, template[i].iv, MAX_IVLEN);
 932		else
 933			memset(iv, 0, MAX_IVLEN);
 
 
 
 
 
 
 
 
 934
 935		if (!(template[i].np) || (template[i].also_non_np)) {
 936			j++;
 
 
 
 
 
 937
 938			ret = -EINVAL;
 939			if (WARN_ON(align_offset + template[i].ilen >
 940				    PAGE_SIZE))
 941				goto out;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 942
 943			data = xbuf[0];
 944			data += align_offset;
 945			memcpy(data, template[i].input, template[i].ilen);
 946
 947			crypto_ablkcipher_clear_flags(tfm, ~0);
 948			if (template[i].wk)
 949				crypto_ablkcipher_set_flags(
 950					tfm, CRYPTO_TFM_REQ_WEAK_KEY);
 951
 952			ret = crypto_ablkcipher_setkey(tfm, template[i].key,
 953						       template[i].klen);
 954			if (!ret == template[i].fail) {
 955				pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
 956				       d, j, algo,
 957				       crypto_ablkcipher_get_flags(tfm));
 958				goto out;
 959			} else if (ret)
 960				continue;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 961
 962			sg_init_one(&sg[0], data, template[i].ilen);
 963			if (diff_dst) {
 964				data = xoutbuf[0];
 965				data += align_offset;
 966				sg_init_one(&sgout[0], data, template[i].ilen);
 967			}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 968
 969			ablkcipher_request_set_crypt(req, sg,
 970						     (diff_dst) ? sgout : sg,
 971						     template[i].ilen, iv);
 972			ret = enc ?
 973				crypto_ablkcipher_encrypt(req) :
 974				crypto_ablkcipher_decrypt(req);
 975
 976			switch (ret) {
 977			case 0:
 978				break;
 979			case -EINPROGRESS:
 980			case -EBUSY:
 981				ret = wait_for_completion_interruptible(
 982					&result.completion);
 983				if (!ret && !((ret = result.err))) {
 984					reinit_completion(&result.completion);
 985					break;
 986				}
 987				/* fall through */
 988			default:
 989				pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
 990				       d, e, j, algo, -ret);
 991				goto out;
 992			}
 993
 994			q = data;
 995			if (memcmp(q, template[i].result, template[i].rlen)) {
 996				pr_err("alg: skcipher%s: Test %d failed on %s for %s\n",
 997				       d, j, e, algo);
 998				hexdump(q, template[i].rlen);
 999				ret = -EINVAL;
1000				goto out;
1001			}
1002		}
1003	}
1004
1005	j = 0;
1006	for (i = 0; i < tcount; i++) {
1007		/* alignment tests are only done with continuous buffers */
1008		if (align_offset != 0)
1009			break;
1010
1011		if (template[i].iv)
1012			memcpy(iv, template[i].iv, MAX_IVLEN);
1013		else
1014			memset(iv, 0, MAX_IVLEN);
 
 
 
 
 
 
 
 
1015
1016		if (template[i].np) {
1017			j++;
 
 
 
1018
1019			crypto_ablkcipher_clear_flags(tfm, ~0);
1020			if (template[i].wk)
1021				crypto_ablkcipher_set_flags(
1022					tfm, CRYPTO_TFM_REQ_WEAK_KEY);
1023
1024			ret = crypto_ablkcipher_setkey(tfm, template[i].key,
1025						       template[i].klen);
1026			if (!ret == template[i].fail) {
1027				pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
1028				       d, j, algo,
1029				       crypto_ablkcipher_get_flags(tfm));
1030				goto out;
1031			} else if (ret)
1032				continue;
1033
1034			temp = 0;
1035			ret = -EINVAL;
1036			sg_init_table(sg, template[i].np);
1037			if (diff_dst)
1038				sg_init_table(sgout, template[i].np);
1039			for (k = 0; k < template[i].np; k++) {
1040				if (WARN_ON(offset_in_page(IDX[k]) +
1041					    template[i].tap[k] > PAGE_SIZE))
1042					goto out;
1043
1044				q = xbuf[IDX[k] >> PAGE_SHIFT] +
1045				    offset_in_page(IDX[k]);
1046
1047				memcpy(q, template[i].input + temp,
1048				       template[i].tap[k]);
1049
1050				if (offset_in_page(q) + template[i].tap[k] <
1051				    PAGE_SIZE)
1052					q[template[i].tap[k]] = 0;
1053
1054				sg_set_buf(&sg[k], q, template[i].tap[k]);
1055				if (diff_dst) {
1056					q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1057					    offset_in_page(IDX[k]);
1058
1059					sg_set_buf(&sgout[k], q,
1060						   template[i].tap[k]);
1061
1062					memset(q, 0, template[i].tap[k]);
1063					if (offset_in_page(q) +
1064					    template[i].tap[k] < PAGE_SIZE)
1065						q[template[i].tap[k]] = 0;
1066				}
1067
1068				temp += template[i].tap[k];
1069			}
 
 
 
 
1070
1071			ablkcipher_request_set_crypt(req, sg,
1072					(diff_dst) ? sgout : sg,
1073					template[i].ilen, iv);
1074
1075			ret = enc ?
1076				crypto_ablkcipher_encrypt(req) :
1077				crypto_ablkcipher_decrypt(req);
1078
1079			switch (ret) {
1080			case 0:
1081				break;
1082			case -EINPROGRESS:
1083			case -EBUSY:
1084				ret = wait_for_completion_interruptible(
1085					&result.completion);
1086				if (!ret && !((ret = result.err))) {
1087					reinit_completion(&result.completion);
1088					break;
1089				}
1090				/* fall through */
1091			default:
1092				pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1093				       d, e, j, algo, -ret);
1094				goto out;
1095			}
1096
1097			temp = 0;
1098			ret = -EINVAL;
1099			for (k = 0; k < template[i].np; k++) {
1100				if (diff_dst)
1101					q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1102					    offset_in_page(IDX[k]);
1103				else
1104					q = xbuf[IDX[k] >> PAGE_SHIFT] +
1105					    offset_in_page(IDX[k]);
1106
1107				if (memcmp(q, template[i].result + temp,
1108					   template[i].tap[k])) {
1109					pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1110					       d, j, e, k, algo);
1111					hexdump(q, template[i].tap[k]);
1112					goto out;
1113				}
1114
1115				q += template[i].tap[k];
1116				for (n = 0; offset_in_page(q + n) && q[n]; n++)
1117					;
1118				if (n) {
1119					pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1120					       d, j, e, k, algo, n);
1121					hexdump(q, n);
1122					goto out;
1123				}
1124				temp += template[i].tap[k];
1125			}
1126		}
1127	}
1128
1129	ret = 0;
 
 
 
1130
 
 
 
 
 
 
 
 
 
 
 
1131out:
1132	ablkcipher_request_free(req);
1133	if (diff_dst)
1134		testmgr_free_buf(xoutbuf);
1135out_nooutbuf:
1136	testmgr_free_buf(xbuf);
1137out_nobuf:
1138	return ret;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1139}
1140
1141static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
1142			 struct cipher_testvec *template, unsigned int tcount)
1143{
1144	unsigned int alignmask;
1145	int ret;
 
 
 
1146
1147	/* test 'dst == src' case */
1148	ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
1149	if (ret)
1150		return ret;
1151
1152	/* test 'dst != src' case */
1153	ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
1154	if (ret)
1155		return ret;
 
 
1156
1157	/* test unaligned buffers, check with one byte offset */
1158	ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
1159	if (ret)
1160		return ret;
 
 
 
1161
1162	alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1163	if (alignmask) {
1164		/* Check if alignment mask for tfm is correctly set. */
1165		ret = __test_skcipher(tfm, enc, template, tcount, true,
1166				      alignmask + 1);
1167		if (ret)
1168			return ret;
1169	}
1170
1171	return 0;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1172}
1173
1174static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
1175		     struct comp_testvec *dtemplate, int ctcount, int dtcount)
 
 
1176{
1177	const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
 
1178	unsigned int i;
1179	char result[COMP_BUF_SIZE];
1180	int ret;
1181
 
 
 
 
 
 
 
 
 
 
1182	for (i = 0; i < ctcount; i++) {
1183		int ilen;
1184		unsigned int dlen = COMP_BUF_SIZE;
1185
1186		memset(result, 0, sizeof (result));
 
1187
1188		ilen = ctemplate[i].inlen;
1189		ret = crypto_comp_compress(tfm, ctemplate[i].input,
1190		                           ilen, result, &dlen);
1191		if (ret) {
1192			printk(KERN_ERR "alg: comp: compression failed "
1193			       "on test %d for %s: ret=%d\n", i + 1, algo,
1194			       -ret);
1195			goto out;
1196		}
1197
1198		if (dlen != ctemplate[i].outlen) {
 
 
 
 
 
 
 
 
 
 
1199			printk(KERN_ERR "alg: comp: Compression test %d "
1200			       "failed for %s: output len = %d\n", i + 1, algo,
1201			       dlen);
1202			ret = -EINVAL;
1203			goto out;
1204		}
1205
1206		if (memcmp(result, ctemplate[i].output, dlen)) {
1207			printk(KERN_ERR "alg: comp: Compression test %d "
1208			       "failed for %s\n", i + 1, algo);
1209			hexdump(result, dlen);
 
1210			ret = -EINVAL;
1211			goto out;
1212		}
1213	}
1214
1215	for (i = 0; i < dtcount; i++) {
1216		int ilen;
1217		unsigned int dlen = COMP_BUF_SIZE;
1218
1219		memset(result, 0, sizeof (result));
1220
1221		ilen = dtemplate[i].inlen;
1222		ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1223		                             ilen, result, &dlen);
1224		if (ret) {
1225			printk(KERN_ERR "alg: comp: decompression failed "
1226			       "on test %d for %s: ret=%d\n", i + 1, algo,
1227			       -ret);
1228			goto out;
1229		}
1230
1231		if (dlen != dtemplate[i].outlen) {
1232			printk(KERN_ERR "alg: comp: Decompression test %d "
1233			       "failed for %s: output len = %d\n", i + 1, algo,
1234			       dlen);
1235			ret = -EINVAL;
1236			goto out;
1237		}
1238
1239		if (memcmp(result, dtemplate[i].output, dlen)) {
1240			printk(KERN_ERR "alg: comp: Decompression test %d "
1241			       "failed for %s\n", i + 1, algo);
1242			hexdump(result, dlen);
1243			ret = -EINVAL;
1244			goto out;
1245		}
1246	}
1247
1248	ret = 0;
1249
1250out:
 
 
1251	return ret;
1252}
1253
1254static int test_pcomp(struct crypto_pcomp *tfm,
1255		      struct pcomp_testvec *ctemplate,
1256		      struct pcomp_testvec *dtemplate, int ctcount,
1257		      int dtcount)
1258{
1259	const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm));
1260	unsigned int i;
1261	char result[COMP_BUF_SIZE];
1262	int res;
 
 
 
 
 
 
 
 
 
 
 
 
 
1263
1264	for (i = 0; i < ctcount; i++) {
1265		struct comp_request req;
1266		unsigned int produced = 0;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1267
1268		res = crypto_compress_setup(tfm, ctemplate[i].params,
1269					    ctemplate[i].paramsize);
1270		if (res) {
1271			pr_err("alg: pcomp: compression setup failed on test "
1272			       "%d for %s: error=%d\n", i + 1, algo, res);
1273			return res;
1274		}
1275
1276		res = crypto_compress_init(tfm);
1277		if (res) {
1278			pr_err("alg: pcomp: compression init failed on test "
1279			       "%d for %s: error=%d\n", i + 1, algo, res);
1280			return res;
1281		}
1282
1283		memset(result, 0, sizeof(result));
1284
1285		req.next_in = ctemplate[i].input;
1286		req.avail_in = ctemplate[i].inlen / 2;
1287		req.next_out = result;
1288		req.avail_out = ctemplate[i].outlen / 2;
1289
1290		res = crypto_compress_update(tfm, &req);
1291		if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1292			pr_err("alg: pcomp: compression update failed on test "
1293			       "%d for %s: error=%d\n", i + 1, algo, res);
1294			return res;
1295		}
1296		if (res > 0)
1297			produced += res;
1298
1299		/* Add remaining input data */
1300		req.avail_in += (ctemplate[i].inlen + 1) / 2;
1301
1302		res = crypto_compress_update(tfm, &req);
1303		if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1304			pr_err("alg: pcomp: compression update failed on test "
1305			       "%d for %s: error=%d\n", i + 1, algo, res);
1306			return res;
1307		}
1308		if (res > 0)
1309			produced += res;
1310
1311		/* Provide remaining output space */
1312		req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2;
1313
1314		res = crypto_compress_final(tfm, &req);
1315		if (res < 0) {
1316			pr_err("alg: pcomp: compression final failed on test "
1317			       "%d for %s: error=%d\n", i + 1, algo, res);
1318			return res;
1319		}
1320		produced += res;
1321
1322		if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) {
1323			pr_err("alg: comp: Compression test %d failed for %s: "
1324			       "output len = %d (expected %d)\n", i + 1, algo,
1325			       COMP_BUF_SIZE - req.avail_out,
1326			       ctemplate[i].outlen);
1327			return -EINVAL;
1328		}
1329
1330		if (produced != ctemplate[i].outlen) {
1331			pr_err("alg: comp: Compression test %d failed for %s: "
1332			       "returned len = %u (expected %d)\n", i + 1,
1333			       algo, produced, ctemplate[i].outlen);
1334			return -EINVAL;
 
 
1335		}
1336
1337		if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) {
1338			pr_err("alg: pcomp: Compression test %d failed for "
1339			       "%s\n", i + 1, algo);
1340			hexdump(result, ctemplate[i].outlen);
1341			return -EINVAL;
 
 
 
1342		}
 
 
 
1343	}
1344
1345	for (i = 0; i < dtcount; i++) {
1346		struct comp_request req;
1347		unsigned int produced = 0;
 
1348
1349		res = crypto_decompress_setup(tfm, dtemplate[i].params,
1350					      dtemplate[i].paramsize);
1351		if (res) {
1352			pr_err("alg: pcomp: decompression setup failed on "
1353			       "test %d for %s: error=%d\n", i + 1, algo, res);
1354			return res;
1355		}
1356
1357		res = crypto_decompress_init(tfm);
1358		if (res) {
1359			pr_err("alg: pcomp: decompression init failed on test "
1360			       "%d for %s: error=%d\n", i + 1, algo, res);
1361			return res;
1362		}
1363
1364		memset(result, 0, sizeof(result));
1365
1366		req.next_in = dtemplate[i].input;
1367		req.avail_in = dtemplate[i].inlen / 2;
1368		req.next_out = result;
1369		req.avail_out = dtemplate[i].outlen / 2;
1370
1371		res = crypto_decompress_update(tfm, &req);
1372		if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1373			pr_err("alg: pcomp: decompression update failed on "
1374			       "test %d for %s: error=%d\n", i + 1, algo, res);
1375			return res;
1376		}
1377		if (res > 0)
1378			produced += res;
1379
1380		/* Add remaining input data */
1381		req.avail_in += (dtemplate[i].inlen + 1) / 2;
1382
1383		res = crypto_decompress_update(tfm, &req);
1384		if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1385			pr_err("alg: pcomp: decompression update failed on "
1386			       "test %d for %s: error=%d\n", i + 1, algo, res);
1387			return res;
1388		}
1389		if (res > 0)
1390			produced += res;
1391
1392		/* Provide remaining output space */
1393		req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2;
1394
1395		res = crypto_decompress_final(tfm, &req);
1396		if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1397			pr_err("alg: pcomp: decompression final failed on "
1398			       "test %d for %s: error=%d\n", i + 1, algo, res);
1399			return res;
1400		}
1401		if (res > 0)
1402			produced += res;
1403
1404		if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) {
1405			pr_err("alg: comp: Decompression test %d failed for "
1406			       "%s: output len = %d (expected %d)\n", i + 1,
1407			       algo, COMP_BUF_SIZE - req.avail_out,
1408			       dtemplate[i].outlen);
1409			return -EINVAL;
1410		}
1411
1412		if (produced != dtemplate[i].outlen) {
1413			pr_err("alg: comp: Decompression test %d failed for "
1414			       "%s: returned len = %u (expected %d)\n", i + 1,
1415			       algo, produced, dtemplate[i].outlen);
1416			return -EINVAL;
 
 
1417		}
1418
1419		if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) {
1420			pr_err("alg: pcomp: Decompression test %d failed for "
1421			       "%s\n", i + 1, algo);
1422			hexdump(result, dtemplate[i].outlen);
1423			return -EINVAL;
 
 
 
1424		}
 
 
 
1425	}
1426
1427	return 0;
 
 
 
 
 
1428}
1429
1430
1431static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
1432		      unsigned int tcount)
1433{
1434	const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1435	int err = 0, i, j, seedsize;
1436	u8 *seed;
1437	char result[32];
1438
1439	seedsize = crypto_rng_seedsize(tfm);
1440
1441	seed = kmalloc(seedsize, GFP_KERNEL);
1442	if (!seed) {
1443		printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1444		       "for %s\n", algo);
1445		return -ENOMEM;
1446	}
1447
1448	for (i = 0; i < tcount; i++) {
1449		memset(result, 0, 32);
1450
1451		memcpy(seed, template[i].v, template[i].vlen);
1452		memcpy(seed + template[i].vlen, template[i].key,
1453		       template[i].klen);
1454		memcpy(seed + template[i].vlen + template[i].klen,
1455		       template[i].dt, template[i].dtlen);
1456
1457		err = crypto_rng_reset(tfm, seed, seedsize);
1458		if (err) {
1459			printk(KERN_ERR "alg: cprng: Failed to reset rng "
1460			       "for %s\n", algo);
1461			goto out;
1462		}
1463
1464		for (j = 0; j < template[i].loops; j++) {
1465			err = crypto_rng_get_bytes(tfm, result,
1466						   template[i].rlen);
1467			if (err != template[i].rlen) {
1468				printk(KERN_ERR "alg: cprng: Failed to obtain "
1469				       "the correct amount of random data for "
1470				       "%s (requested %d, got %d)\n", algo,
1471				       template[i].rlen, err);
1472				goto out;
1473			}
1474		}
1475
1476		err = memcmp(result, template[i].result,
1477			     template[i].rlen);
1478		if (err) {
1479			printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1480			       i, algo);
1481			hexdump(result, template[i].rlen);
1482			err = -EINVAL;
1483			goto out;
1484		}
1485	}
1486
1487out:
1488	kfree(seed);
1489	return err;
1490}
1491
1492static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1493			 u32 type, u32 mask)
1494{
1495	struct crypto_aead *tfm;
1496	int err = 0;
 
1497
1498	tfm = crypto_alloc_aead(driver, type, mask);
1499	if (IS_ERR(tfm)) {
1500		printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1501		       "%ld\n", driver, PTR_ERR(tfm));
1502		return PTR_ERR(tfm);
1503	}
1504
1505	if (desc->suite.aead.enc.vecs) {
1506		err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1507				desc->suite.aead.enc.count);
1508		if (err)
1509			goto out;
1510	}
 
 
 
 
 
 
 
 
 
1511
1512	if (!err && desc->suite.aead.dec.vecs)
1513		err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1514				desc->suite.aead.dec.count);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1515
1516out:
1517	crypto_free_aead(tfm);
1518	return err;
1519}
1520
1521static int alg_test_cipher(const struct alg_test_desc *desc,
1522			   const char *driver, u32 type, u32 mask)
1523{
1524	struct crypto_cipher *tfm;
1525	int err = 0;
 
 
 
 
 
1526
1527	tfm = crypto_alloc_cipher(driver, type, mask);
1528	if (IS_ERR(tfm)) {
1529		printk(KERN_ERR "alg: cipher: Failed to load transform for "
1530		       "%s: %ld\n", driver, PTR_ERR(tfm));
 
 
 
 
 
 
 
 
1531		return PTR_ERR(tfm);
1532	}
1533
1534	if (desc->suite.cipher.enc.vecs) {
1535		err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1536				  desc->suite.cipher.enc.count);
1537		if (err)
1538			goto out;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1539	}
1540
1541	if (desc->suite.cipher.dec.vecs)
1542		err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1543				  desc->suite.cipher.dec.count);
1544
1545out:
1546	crypto_free_cipher(tfm);
1547	return err;
1548}
1549
1550static int alg_test_skcipher(const struct alg_test_desc *desc,
1551			     const char *driver, u32 type, u32 mask)
 
1552{
1553	struct crypto_ablkcipher *tfm;
1554	int err = 0;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1555
1556	tfm = crypto_alloc_ablkcipher(driver, type, mask);
1557	if (IS_ERR(tfm)) {
1558		printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1559		       "%s: %ld\n", driver, PTR_ERR(tfm));
1560		return PTR_ERR(tfm);
 
 
 
 
 
 
 
 
1561	}
1562
1563	if (desc->suite.cipher.enc.vecs) {
1564		err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1565				    desc->suite.cipher.enc.count);
1566		if (err)
1567			goto out;
 
 
 
 
 
 
 
 
1568	}
1569
1570	if (desc->suite.cipher.dec.vecs)
1571		err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1572				    desc->suite.cipher.dec.count);
1573
1574out:
1575	crypto_free_ablkcipher(tfm);
1576	return err;
 
1577}
1578
1579static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
 
1580			 u32 type, u32 mask)
1581{
1582	struct crypto_comp *tfm;
1583	int err;
 
 
 
 
 
 
1584
1585	tfm = crypto_alloc_comp(driver, type, mask);
1586	if (IS_ERR(tfm)) {
1587		printk(KERN_ERR "alg: comp: Failed to load transform for %s: "
1588		       "%ld\n", driver, PTR_ERR(tfm));
1589		return PTR_ERR(tfm);
 
 
 
1590	}
 
1591
1592	err = test_comp(tfm, desc->suite.comp.comp.vecs,
1593			desc->suite.comp.decomp.vecs,
1594			desc->suite.comp.comp.count,
1595			desc->suite.comp.decomp.count);
1596
1597	crypto_free_comp(tfm);
1598	return err;
1599}
1600
1601static int alg_test_pcomp(const struct alg_test_desc *desc, const char *driver,
1602			  u32 type, u32 mask)
1603{
1604	struct crypto_pcomp *tfm;
1605	int err;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1606
1607	tfm = crypto_alloc_pcomp(driver, type, mask);
1608	if (IS_ERR(tfm)) {
1609		pr_err("alg: pcomp: Failed to load transform for %s: %ld\n",
1610		       driver, PTR_ERR(tfm));
1611		return PTR_ERR(tfm);
1612	}
1613
1614	err = test_pcomp(tfm, desc->suite.pcomp.comp.vecs,
1615			 desc->suite.pcomp.decomp.vecs,
1616			 desc->suite.pcomp.comp.count,
1617			 desc->suite.pcomp.decomp.count);
 
 
 
 
 
 
1618
1619	crypto_free_pcomp(tfm);
 
 
 
 
 
 
 
1620	return err;
1621}
1622
1623static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1624			 u32 type, u32 mask)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1625{
1626	struct crypto_ahash *tfm;
1627	int err;
1628
1629	tfm = crypto_alloc_ahash(driver, type, mask);
1630	if (IS_ERR(tfm)) {
1631		printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1632		       "%ld\n", driver, PTR_ERR(tfm));
1633		return PTR_ERR(tfm);
1634	}
 
 
 
1635
1636	err = test_hash(tfm, desc->suite.hash.vecs,
1637			desc->suite.hash.count, true);
1638	if (!err)
1639		err = test_hash(tfm, desc->suite.hash.vecs,
1640				desc->suite.hash.count, false);
1641
1642	crypto_free_ahash(tfm);
1643	return err;
 
 
1644}
1645
1646static int alg_test_crc32c(const struct alg_test_desc *desc,
1647			   const char *driver, u32 type, u32 mask)
1648{
1649	struct crypto_shash *tfm;
1650	u32 val;
1651	int err;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1652
1653	err = alg_test_hash(desc, driver, type, mask);
 
 
 
1654	if (err)
1655		goto out;
1656
1657	tfm = crypto_alloc_shash(driver, type, mask);
1658	if (IS_ERR(tfm)) {
1659		printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1660		       "%ld\n", driver, PTR_ERR(tfm));
1661		err = PTR_ERR(tfm);
1662		goto out;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1663	}
1664
1665	do {
1666		struct {
1667			struct shash_desc shash;
1668			char ctx[crypto_shash_descsize(tfm)];
1669		} sdesc;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1670
1671		sdesc.shash.tfm = tfm;
1672		sdesc.shash.flags = 0;
1673
1674		*(u32 *)sdesc.ctx = le32_to_cpu(420553207);
1675		err = crypto_shash_final(&sdesc.shash, (u8 *)&val);
1676		if (err) {
1677			printk(KERN_ERR "alg: crc32c: Operation failed for "
1678			       "%s: %d\n", driver, err);
1679			break;
 
 
 
 
 
 
1680		}
1681
1682		if (val != ~420553207) {
1683			printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1684			       "%d\n", driver, val);
 
1685			err = -EINVAL;
 
1686		}
1687	} while (0);
1688
1689	crypto_free_shash(tfm);
 
 
 
 
 
 
 
 
 
 
 
 
1690
1691out:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1692	return err;
1693}
1694
1695static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1696			  u32 type, u32 mask)
 
1697{
1698	struct crypto_rng *rng;
1699	int err;
 
 
 
 
 
 
1700
1701	rng = crypto_alloc_rng(driver, type, mask);
1702	if (IS_ERR(rng)) {
1703		printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1704		       "%ld\n", driver, PTR_ERR(rng));
1705		return PTR_ERR(rng);
1706	}
 
 
1707
1708	err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
 
 
 
 
1709
1710	crypto_free_rng(rng);
 
 
 
 
 
 
 
 
1711
 
1712	return err;
1713}
1714
1715static int alg_test_null(const struct alg_test_desc *desc,
1716			     const char *driver, u32 type, u32 mask)
1717{
1718	return 0;
1719}
1720
 
 
1721/* Please keep this list sorted by algorithm name. */
1722static const struct alg_test_desc alg_test_descs[] = {
1723	{
1724		.alg = "__cbc-cast5-avx",
1725		.test = alg_test_null,
 
 
 
 
1726	}, {
1727		.alg = "__cbc-cast6-avx",
1728		.test = alg_test_null,
 
 
 
 
1729	}, {
1730		.alg = "__cbc-serpent-avx",
1731		.test = alg_test_null,
 
 
 
1732	}, {
1733		.alg = "__cbc-serpent-avx2",
1734		.test = alg_test_null,
 
 
 
1735	}, {
1736		.alg = "__cbc-serpent-sse2",
1737		.test = alg_test_null,
 
 
 
1738	}, {
1739		.alg = "__cbc-twofish-avx",
1740		.test = alg_test_null,
1741	}, {
1742		.alg = "__driver-cbc-aes-aesni",
1743		.test = alg_test_null,
1744		.fips_allowed = 1,
 
 
 
1745	}, {
1746		.alg = "__driver-cbc-camellia-aesni",
1747		.test = alg_test_null,
 
 
 
1748	}, {
1749		.alg = "__driver-cbc-camellia-aesni-avx2",
1750		.test = alg_test_null,
 
 
 
 
1751	}, {
1752		.alg = "__driver-cbc-cast5-avx",
1753		.test = alg_test_null,
 
1754	}, {
1755		.alg = "__driver-cbc-cast6-avx",
1756		.test = alg_test_null,
 
 
 
1757	}, {
1758		.alg = "__driver-cbc-serpent-avx",
1759		.test = alg_test_null,
 
1760	}, {
1761		.alg = "__driver-cbc-serpent-avx2",
1762		.test = alg_test_null,
 
 
 
1763	}, {
1764		.alg = "__driver-cbc-serpent-sse2",
1765		.test = alg_test_null,
 
 
 
 
1766	}, {
1767		.alg = "__driver-cbc-twofish-avx",
1768		.test = alg_test_null,
1769	}, {
1770		.alg = "__driver-ecb-aes-aesni",
1771		.test = alg_test_null,
1772		.fips_allowed = 1,
 
 
 
1773	}, {
1774		.alg = "__driver-ecb-camellia-aesni",
1775		.test = alg_test_null,
 
 
 
1776	}, {
1777		.alg = "__driver-ecb-camellia-aesni-avx2",
1778		.test = alg_test_null,
 
 
 
 
1779	}, {
1780		.alg = "__driver-ecb-cast5-avx",
1781		.test = alg_test_null,
 
1782	}, {
1783		.alg = "__driver-ecb-cast6-avx",
1784		.test = alg_test_null,
 
1785	}, {
1786		.alg = "__driver-ecb-serpent-avx",
1787		.test = alg_test_null,
 
 
 
1788	}, {
1789		.alg = "__driver-ecb-serpent-avx2",
1790		.test = alg_test_null,
 
 
 
 
1791	}, {
1792		.alg = "__driver-ecb-serpent-sse2",
1793		.test = alg_test_null,
 
1794	}, {
1795		.alg = "__driver-ecb-twofish-avx",
1796		.test = alg_test_null,
1797	}, {
1798		.alg = "__ghash-pclmulqdqni",
1799		.test = alg_test_null,
1800		.fips_allowed = 1,
1801	}, {
1802		.alg = "ansi_cprng",
1803		.test = alg_test_cprng,
1804		.fips_allowed = 1,
1805		.suite = {
1806			.cprng = {
1807				.vecs = ansi_cprng_aes_tv_template,
1808				.count = ANSI_CPRNG_AES_TEST_VECTORS
1809			}
1810		}
1811	}, {
1812		.alg = "authenc(hmac(md5),ecb(cipher_null))",
1813		.test = alg_test_aead,
1814		.fips_allowed = 1,
1815		.suite = {
1816			.aead = {
1817				.enc = {
1818					.vecs = hmac_md5_ecb_cipher_null_enc_tv_template,
1819					.count = HMAC_MD5_ECB_CIPHER_NULL_ENC_TEST_VECTORS
1820				},
1821				.dec = {
1822					.vecs = hmac_md5_ecb_cipher_null_dec_tv_template,
1823					.count = HMAC_MD5_ECB_CIPHER_NULL_DEC_TEST_VECTORS
1824				}
1825			}
1826		}
1827	}, {
1828		.alg = "authenc(hmac(sha1),cbc(aes))",
1829		.test = alg_test_aead,
1830		.fips_allowed = 1,
1831		.suite = {
1832			.aead = {
1833				.enc = {
1834					.vecs = hmac_sha1_aes_cbc_enc_tv_template,
1835					.count = HMAC_SHA1_AES_CBC_ENC_TEST_VECTORS
1836				}
1837			}
1838		}
1839	}, {
1840		.alg = "authenc(hmac(sha1),ecb(cipher_null))",
1841		.test = alg_test_aead,
1842		.fips_allowed = 1,
1843		.suite = {
1844			.aead = {
1845				.enc = {
1846					.vecs = hmac_sha1_ecb_cipher_null_enc_tv_template,
1847					.count = HMAC_SHA1_ECB_CIPHER_NULL_ENC_TEST_VECTORS
1848				},
1849				.dec = {
1850					.vecs = hmac_sha1_ecb_cipher_null_dec_tv_template,
1851					.count = HMAC_SHA1_ECB_CIPHER_NULL_DEC_TEST_VECTORS
1852				}
1853			}
1854		}
1855	}, {
1856		.alg = "authenc(hmac(sha256),cbc(aes))",
1857		.test = alg_test_aead,
1858		.fips_allowed = 1,
1859		.suite = {
1860			.aead = {
1861				.enc = {
1862					.vecs = hmac_sha256_aes_cbc_enc_tv_template,
1863					.count = HMAC_SHA256_AES_CBC_ENC_TEST_VECTORS
1864				}
1865			}
1866		}
1867	}, {
1868		.alg = "authenc(hmac(sha512),cbc(aes))",
1869		.test = alg_test_aead,
1870		.fips_allowed = 1,
1871		.suite = {
1872			.aead = {
1873				.enc = {
1874					.vecs = hmac_sha512_aes_cbc_enc_tv_template,
1875					.count = HMAC_SHA512_AES_CBC_ENC_TEST_VECTORS
1876				}
1877			}
1878		}
1879	}, {
1880		.alg = "cbc(aes)",
1881		.test = alg_test_skcipher,
1882		.fips_allowed = 1,
1883		.suite = {
1884			.cipher = {
1885				.enc = {
1886					.vecs = aes_cbc_enc_tv_template,
1887					.count = AES_CBC_ENC_TEST_VECTORS
1888				},
1889				.dec = {
1890					.vecs = aes_cbc_dec_tv_template,
1891					.count = AES_CBC_DEC_TEST_VECTORS
1892				}
1893			}
1894		}
1895	}, {
1896		.alg = "cbc(anubis)",
1897		.test = alg_test_skcipher,
1898		.suite = {
1899			.cipher = {
1900				.enc = {
1901					.vecs = anubis_cbc_enc_tv_template,
1902					.count = ANUBIS_CBC_ENC_TEST_VECTORS
1903				},
1904				.dec = {
1905					.vecs = anubis_cbc_dec_tv_template,
1906					.count = ANUBIS_CBC_DEC_TEST_VECTORS
1907				}
1908			}
1909		}
1910	}, {
1911		.alg = "cbc(blowfish)",
1912		.test = alg_test_skcipher,
1913		.suite = {
1914			.cipher = {
1915				.enc = {
1916					.vecs = bf_cbc_enc_tv_template,
1917					.count = BF_CBC_ENC_TEST_VECTORS
1918				},
1919				.dec = {
1920					.vecs = bf_cbc_dec_tv_template,
1921					.count = BF_CBC_DEC_TEST_VECTORS
1922				}
1923			}
1924		}
1925	}, {
1926		.alg = "cbc(camellia)",
1927		.test = alg_test_skcipher,
1928		.suite = {
1929			.cipher = {
1930				.enc = {
1931					.vecs = camellia_cbc_enc_tv_template,
1932					.count = CAMELLIA_CBC_ENC_TEST_VECTORS
1933				},
1934				.dec = {
1935					.vecs = camellia_cbc_dec_tv_template,
1936					.count = CAMELLIA_CBC_DEC_TEST_VECTORS
1937				}
1938			}
1939		}
1940	}, {
1941		.alg = "cbc(cast5)",
1942		.test = alg_test_skcipher,
1943		.suite = {
1944			.cipher = {
1945				.enc = {
1946					.vecs = cast5_cbc_enc_tv_template,
1947					.count = CAST5_CBC_ENC_TEST_VECTORS
1948				},
1949				.dec = {
1950					.vecs = cast5_cbc_dec_tv_template,
1951					.count = CAST5_CBC_DEC_TEST_VECTORS
1952				}
1953			}
1954		}
1955	}, {
1956		.alg = "cbc(cast6)",
1957		.test = alg_test_skcipher,
1958		.suite = {
1959			.cipher = {
1960				.enc = {
1961					.vecs = cast6_cbc_enc_tv_template,
1962					.count = CAST6_CBC_ENC_TEST_VECTORS
1963				},
1964				.dec = {
1965					.vecs = cast6_cbc_dec_tv_template,
1966					.count = CAST6_CBC_DEC_TEST_VECTORS
1967				}
1968			}
1969		}
1970	}, {
1971		.alg = "cbc(des)",
1972		.test = alg_test_skcipher,
1973		.suite = {
1974			.cipher = {
1975				.enc = {
1976					.vecs = des_cbc_enc_tv_template,
1977					.count = DES_CBC_ENC_TEST_VECTORS
1978				},
1979				.dec = {
1980					.vecs = des_cbc_dec_tv_template,
1981					.count = DES_CBC_DEC_TEST_VECTORS
1982				}
1983			}
1984		}
1985	}, {
1986		.alg = "cbc(des3_ede)",
1987		.test = alg_test_skcipher,
1988		.fips_allowed = 1,
1989		.suite = {
1990			.cipher = {
1991				.enc = {
1992					.vecs = des3_ede_cbc_enc_tv_template,
1993					.count = DES3_EDE_CBC_ENC_TEST_VECTORS
1994				},
1995				.dec = {
1996					.vecs = des3_ede_cbc_dec_tv_template,
1997					.count = DES3_EDE_CBC_DEC_TEST_VECTORS
1998				}
1999			}
2000		}
 
 
 
 
2001	}, {
2002		.alg = "cbc(serpent)",
2003		.test = alg_test_skcipher,
2004		.suite = {
2005			.cipher = {
2006				.enc = {
2007					.vecs = serpent_cbc_enc_tv_template,
2008					.count = SERPENT_CBC_ENC_TEST_VECTORS
2009				},
2010				.dec = {
2011					.vecs = serpent_cbc_dec_tv_template,
2012					.count = SERPENT_CBC_DEC_TEST_VECTORS
2013				}
2014			}
2015		}
2016	}, {
2017		.alg = "cbc(twofish)",
2018		.test = alg_test_skcipher,
2019		.suite = {
2020			.cipher = {
2021				.enc = {
2022					.vecs = tf_cbc_enc_tv_template,
2023					.count = TF_CBC_ENC_TEST_VECTORS
2024				},
2025				.dec = {
2026					.vecs = tf_cbc_dec_tv_template,
2027					.count = TF_CBC_DEC_TEST_VECTORS
2028				}
2029			}
2030		}
2031	}, {
2032		.alg = "ccm(aes)",
 
2033		.test = alg_test_aead,
2034		.fips_allowed = 1,
2035		.suite = {
2036			.aead = {
2037				.enc = {
2038					.vecs = aes_ccm_enc_tv_template,
2039					.count = AES_CCM_ENC_TEST_VECTORS
2040				},
2041				.dec = {
2042					.vecs = aes_ccm_dec_tv_template,
2043					.count = AES_CCM_DEC_TEST_VECTORS
2044				}
2045			}
2046		}
2047	}, {
 
 
 
 
 
 
 
 
 
 
 
 
 
2048		.alg = "cmac(aes)",
 
2049		.test = alg_test_hash,
2050		.suite = {
2051			.hash = {
2052				.vecs = aes_cmac128_tv_template,
2053				.count = CMAC_AES_TEST_VECTORS
2054			}
2055		}
2056	}, {
2057		.alg = "cmac(des3_ede)",
 
2058		.test = alg_test_hash,
2059		.suite = {
2060			.hash = {
2061				.vecs = des3_ede_cmac64_tv_template,
2062				.count = CMAC_DES3_EDE_TEST_VECTORS
2063			}
2064		}
2065	}, {
2066		.alg = "compress_null",
2067		.test = alg_test_null,
2068	}, {
 
 
 
 
 
 
 
2069		.alg = "crc32c",
2070		.test = alg_test_crc32c,
2071		.fips_allowed = 1,
2072		.suite = {
2073			.hash = {
2074				.vecs = crc32c_tv_template,
2075				.count = CRC32C_TEST_VECTORS
2076			}
2077		}
2078	}, {
2079		.alg = "crct10dif",
2080		.test = alg_test_hash,
2081		.fips_allowed = 1,
2082		.suite = {
2083			.hash = {
2084				.vecs = crct10dif_tv_template,
2085				.count = CRCT10DIF_TEST_VECTORS
2086			}
2087		}
2088	}, {
2089		.alg = "cryptd(__driver-cbc-aes-aesni)",
2090		.test = alg_test_null,
2091		.fips_allowed = 1,
2092	}, {
2093		.alg = "cryptd(__driver-cbc-camellia-aesni)",
2094		.test = alg_test_null,
2095	}, {
2096		.alg = "cryptd(__driver-cbc-camellia-aesni-avx2)",
2097		.test = alg_test_null,
2098	}, {
2099		.alg = "cryptd(__driver-cbc-serpent-avx2)",
2100		.test = alg_test_null,
2101	}, {
2102		.alg = "cryptd(__driver-ecb-aes-aesni)",
2103		.test = alg_test_null,
2104		.fips_allowed = 1,
2105	}, {
2106		.alg = "cryptd(__driver-ecb-camellia-aesni)",
2107		.test = alg_test_null,
2108	}, {
2109		.alg = "cryptd(__driver-ecb-camellia-aesni-avx2)",
2110		.test = alg_test_null,
2111	}, {
2112		.alg = "cryptd(__driver-ecb-cast5-avx)",
2113		.test = alg_test_null,
2114	}, {
2115		.alg = "cryptd(__driver-ecb-cast6-avx)",
2116		.test = alg_test_null,
2117	}, {
2118		.alg = "cryptd(__driver-ecb-serpent-avx)",
2119		.test = alg_test_null,
2120	}, {
2121		.alg = "cryptd(__driver-ecb-serpent-avx2)",
2122		.test = alg_test_null,
2123	}, {
2124		.alg = "cryptd(__driver-ecb-serpent-sse2)",
2125		.test = alg_test_null,
2126	}, {
2127		.alg = "cryptd(__driver-ecb-twofish-avx)",
2128		.test = alg_test_null,
2129	}, {
2130		.alg = "cryptd(__driver-gcm-aes-aesni)",
2131		.test = alg_test_null,
2132		.fips_allowed = 1,
2133	}, {
2134		.alg = "cryptd(__ghash-pclmulqdqni)",
2135		.test = alg_test_null,
2136		.fips_allowed = 1,
2137	}, {
2138		.alg = "ctr(aes)",
2139		.test = alg_test_skcipher,
2140		.fips_allowed = 1,
2141		.suite = {
2142			.cipher = {
2143				.enc = {
2144					.vecs = aes_ctr_enc_tv_template,
2145					.count = AES_CTR_ENC_TEST_VECTORS
2146				},
2147				.dec = {
2148					.vecs = aes_ctr_dec_tv_template,
2149					.count = AES_CTR_DEC_TEST_VECTORS
2150				}
2151			}
2152		}
2153	}, {
2154		.alg = "ctr(blowfish)",
2155		.test = alg_test_skcipher,
2156		.suite = {
2157			.cipher = {
2158				.enc = {
2159					.vecs = bf_ctr_enc_tv_template,
2160					.count = BF_CTR_ENC_TEST_VECTORS
2161				},
2162				.dec = {
2163					.vecs = bf_ctr_dec_tv_template,
2164					.count = BF_CTR_DEC_TEST_VECTORS
2165				}
2166			}
2167		}
2168	}, {
2169		.alg = "ctr(camellia)",
2170		.test = alg_test_skcipher,
2171		.suite = {
2172			.cipher = {
2173				.enc = {
2174					.vecs = camellia_ctr_enc_tv_template,
2175					.count = CAMELLIA_CTR_ENC_TEST_VECTORS
2176				},
2177				.dec = {
2178					.vecs = camellia_ctr_dec_tv_template,
2179					.count = CAMELLIA_CTR_DEC_TEST_VECTORS
2180				}
2181			}
2182		}
2183	}, {
2184		.alg = "ctr(cast5)",
2185		.test = alg_test_skcipher,
2186		.suite = {
2187			.cipher = {
2188				.enc = {
2189					.vecs = cast5_ctr_enc_tv_template,
2190					.count = CAST5_CTR_ENC_TEST_VECTORS
2191				},
2192				.dec = {
2193					.vecs = cast5_ctr_dec_tv_template,
2194					.count = CAST5_CTR_DEC_TEST_VECTORS
2195				}
2196			}
2197		}
2198	}, {
2199		.alg = "ctr(cast6)",
2200		.test = alg_test_skcipher,
2201		.suite = {
2202			.cipher = {
2203				.enc = {
2204					.vecs = cast6_ctr_enc_tv_template,
2205					.count = CAST6_CTR_ENC_TEST_VECTORS
2206				},
2207				.dec = {
2208					.vecs = cast6_ctr_dec_tv_template,
2209					.count = CAST6_CTR_DEC_TEST_VECTORS
2210				}
2211			}
2212		}
2213	}, {
2214		.alg = "ctr(des)",
2215		.test = alg_test_skcipher,
2216		.suite = {
2217			.cipher = {
2218				.enc = {
2219					.vecs = des_ctr_enc_tv_template,
2220					.count = DES_CTR_ENC_TEST_VECTORS
2221				},
2222				.dec = {
2223					.vecs = des_ctr_dec_tv_template,
2224					.count = DES_CTR_DEC_TEST_VECTORS
2225				}
2226			}
2227		}
2228	}, {
2229		.alg = "ctr(des3_ede)",
2230		.test = alg_test_skcipher,
 
2231		.suite = {
2232			.cipher = {
2233				.enc = {
2234					.vecs = des3_ede_ctr_enc_tv_template,
2235					.count = DES3_EDE_CTR_ENC_TEST_VECTORS
2236				},
2237				.dec = {
2238					.vecs = des3_ede_ctr_dec_tv_template,
2239					.count = DES3_EDE_CTR_DEC_TEST_VECTORS
2240				}
2241			}
2242		}
2243	}, {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2244		.alg = "ctr(serpent)",
2245		.test = alg_test_skcipher,
2246		.suite = {
2247			.cipher = {
2248				.enc = {
2249					.vecs = serpent_ctr_enc_tv_template,
2250					.count = SERPENT_CTR_ENC_TEST_VECTORS
2251				},
2252				.dec = {
2253					.vecs = serpent_ctr_dec_tv_template,
2254					.count = SERPENT_CTR_DEC_TEST_VECTORS
2255				}
2256			}
2257		}
2258	}, {
2259		.alg = "ctr(twofish)",
2260		.test = alg_test_skcipher,
2261		.suite = {
2262			.cipher = {
2263				.enc = {
2264					.vecs = tf_ctr_enc_tv_template,
2265					.count = TF_CTR_ENC_TEST_VECTORS
2266				},
2267				.dec = {
2268					.vecs = tf_ctr_dec_tv_template,
2269					.count = TF_CTR_DEC_TEST_VECTORS
2270				}
2271			}
2272		}
2273	}, {
2274		.alg = "cts(cbc(aes))",
2275		.test = alg_test_skcipher,
 
2276		.suite = {
2277			.cipher = {
2278				.enc = {
2279					.vecs = cts_mode_enc_tv_template,
2280					.count = CTS_MODE_ENC_TEST_VECTORS
2281				},
2282				.dec = {
2283					.vecs = cts_mode_dec_tv_template,
2284					.count = CTS_MODE_DEC_TEST_VECTORS
2285				}
2286			}
2287		}
2288	}, {
 
 
 
 
 
 
 
2289		.alg = "deflate",
2290		.test = alg_test_comp,
2291		.fips_allowed = 1,
2292		.suite = {
2293			.comp = {
2294				.comp = {
2295					.vecs = deflate_comp_tv_template,
2296					.count = DEFLATE_COMP_TEST_VECTORS
2297				},
2298				.decomp = {
2299					.vecs = deflate_decomp_tv_template,
2300					.count = DEFLATE_DECOMP_TEST_VECTORS
2301				}
2302			}
2303		}
2304	}, {
 
 
 
 
 
 
 
2305		.alg = "digest_null",
2306		.test = alg_test_null,
2307	}, {
2308		.alg = "ecb(__aes-aesni)",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2309		.test = alg_test_null,
 
 
2310		.fips_allowed = 1,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2311	}, {
2312		.alg = "ecb(aes)",
2313		.test = alg_test_skcipher,
2314		.fips_allowed = 1,
2315		.suite = {
2316			.cipher = {
2317				.enc = {
2318					.vecs = aes_enc_tv_template,
2319					.count = AES_ENC_TEST_VECTORS
2320				},
2321				.dec = {
2322					.vecs = aes_dec_tv_template,
2323					.count = AES_DEC_TEST_VECTORS
2324				}
2325			}
2326		}
2327	}, {
2328		.alg = "ecb(anubis)",
2329		.test = alg_test_skcipher,
2330		.suite = {
2331			.cipher = {
2332				.enc = {
2333					.vecs = anubis_enc_tv_template,
2334					.count = ANUBIS_ENC_TEST_VECTORS
2335				},
2336				.dec = {
2337					.vecs = anubis_dec_tv_template,
2338					.count = ANUBIS_DEC_TEST_VECTORS
2339				}
2340			}
2341		}
2342	}, {
2343		.alg = "ecb(arc4)",
 
2344		.test = alg_test_skcipher,
2345		.suite = {
2346			.cipher = {
2347				.enc = {
2348					.vecs = arc4_enc_tv_template,
2349					.count = ARC4_ENC_TEST_VECTORS
2350				},
2351				.dec = {
2352					.vecs = arc4_dec_tv_template,
2353					.count = ARC4_DEC_TEST_VECTORS
2354				}
2355			}
2356		}
2357	}, {
2358		.alg = "ecb(blowfish)",
2359		.test = alg_test_skcipher,
2360		.suite = {
2361			.cipher = {
2362				.enc = {
2363					.vecs = bf_enc_tv_template,
2364					.count = BF_ENC_TEST_VECTORS
2365				},
2366				.dec = {
2367					.vecs = bf_dec_tv_template,
2368					.count = BF_DEC_TEST_VECTORS
2369				}
2370			}
2371		}
2372	}, {
2373		.alg = "ecb(camellia)",
2374		.test = alg_test_skcipher,
2375		.suite = {
2376			.cipher = {
2377				.enc = {
2378					.vecs = camellia_enc_tv_template,
2379					.count = CAMELLIA_ENC_TEST_VECTORS
2380				},
2381				.dec = {
2382					.vecs = camellia_dec_tv_template,
2383					.count = CAMELLIA_DEC_TEST_VECTORS
2384				}
2385			}
2386		}
2387	}, {
2388		.alg = "ecb(cast5)",
2389		.test = alg_test_skcipher,
2390		.suite = {
2391			.cipher = {
2392				.enc = {
2393					.vecs = cast5_enc_tv_template,
2394					.count = CAST5_ENC_TEST_VECTORS
2395				},
2396				.dec = {
2397					.vecs = cast5_dec_tv_template,
2398					.count = CAST5_DEC_TEST_VECTORS
2399				}
2400			}
2401		}
2402	}, {
2403		.alg = "ecb(cast6)",
2404		.test = alg_test_skcipher,
2405		.suite = {
2406			.cipher = {
2407				.enc = {
2408					.vecs = cast6_enc_tv_template,
2409					.count = CAST6_ENC_TEST_VECTORS
2410				},
2411				.dec = {
2412					.vecs = cast6_dec_tv_template,
2413					.count = CAST6_DEC_TEST_VECTORS
2414				}
2415			}
2416		}
2417	}, {
2418		.alg = "ecb(cipher_null)",
2419		.test = alg_test_null,
 
2420	}, {
2421		.alg = "ecb(des)",
2422		.test = alg_test_skcipher,
2423		.fips_allowed = 1,
2424		.suite = {
2425			.cipher = {
2426				.enc = {
2427					.vecs = des_enc_tv_template,
2428					.count = DES_ENC_TEST_VECTORS
2429				},
2430				.dec = {
2431					.vecs = des_dec_tv_template,
2432					.count = DES_DEC_TEST_VECTORS
2433				}
2434			}
2435		}
2436	}, {
2437		.alg = "ecb(des3_ede)",
2438		.test = alg_test_skcipher,
2439		.fips_allowed = 1,
2440		.suite = {
2441			.cipher = {
2442				.enc = {
2443					.vecs = des3_ede_enc_tv_template,
2444					.count = DES3_EDE_ENC_TEST_VECTORS
2445				},
2446				.dec = {
2447					.vecs = des3_ede_dec_tv_template,
2448					.count = DES3_EDE_DEC_TEST_VECTORS
2449				}
2450			}
2451		}
2452	}, {
2453		.alg = "ecb(fcrypt)",
2454		.test = alg_test_skcipher,
2455		.suite = {
2456			.cipher = {
2457				.enc = {
2458					.vecs = fcrypt_pcbc_enc_tv_template,
2459					.count = 1
2460				},
2461				.dec = {
2462					.vecs = fcrypt_pcbc_dec_tv_template,
2463					.count = 1
2464				}
2465			}
2466		}
2467	}, {
2468		.alg = "ecb(khazad)",
2469		.test = alg_test_skcipher,
2470		.suite = {
2471			.cipher = {
2472				.enc = {
2473					.vecs = khazad_enc_tv_template,
2474					.count = KHAZAD_ENC_TEST_VECTORS
2475				},
2476				.dec = {
2477					.vecs = khazad_dec_tv_template,
2478					.count = KHAZAD_DEC_TEST_VECTORS
2479				}
2480			}
2481		}
2482	}, {
 
 
 
 
 
 
 
2483		.alg = "ecb(seed)",
2484		.test = alg_test_skcipher,
2485		.suite = {
2486			.cipher = {
2487				.enc = {
2488					.vecs = seed_enc_tv_template,
2489					.count = SEED_ENC_TEST_VECTORS
2490				},
2491				.dec = {
2492					.vecs = seed_dec_tv_template,
2493					.count = SEED_DEC_TEST_VECTORS
2494				}
2495			}
2496		}
2497	}, {
2498		.alg = "ecb(serpent)",
2499		.test = alg_test_skcipher,
2500		.suite = {
2501			.cipher = {
2502				.enc = {
2503					.vecs = serpent_enc_tv_template,
2504					.count = SERPENT_ENC_TEST_VECTORS
2505				},
2506				.dec = {
2507					.vecs = serpent_dec_tv_template,
2508					.count = SERPENT_DEC_TEST_VECTORS
2509				}
2510			}
2511		}
2512	}, {
2513		.alg = "ecb(tea)",
2514		.test = alg_test_skcipher,
2515		.suite = {
2516			.cipher = {
2517				.enc = {
2518					.vecs = tea_enc_tv_template,
2519					.count = TEA_ENC_TEST_VECTORS
2520				},
2521				.dec = {
2522					.vecs = tea_dec_tv_template,
2523					.count = TEA_DEC_TEST_VECTORS
2524				}
2525			}
2526		}
2527	}, {
2528		.alg = "ecb(tnepres)",
2529		.test = alg_test_skcipher,
2530		.suite = {
2531			.cipher = {
2532				.enc = {
2533					.vecs = tnepres_enc_tv_template,
2534					.count = TNEPRES_ENC_TEST_VECTORS
2535				},
2536				.dec = {
2537					.vecs = tnepres_dec_tv_template,
2538					.count = TNEPRES_DEC_TEST_VECTORS
2539				}
2540			}
2541		}
2542	}, {
2543		.alg = "ecb(twofish)",
2544		.test = alg_test_skcipher,
2545		.suite = {
2546			.cipher = {
2547				.enc = {
2548					.vecs = tf_enc_tv_template,
2549					.count = TF_ENC_TEST_VECTORS
2550				},
2551				.dec = {
2552					.vecs = tf_dec_tv_template,
2553					.count = TF_DEC_TEST_VECTORS
2554				}
2555			}
2556		}
2557	}, {
2558		.alg = "ecb(xeta)",
2559		.test = alg_test_skcipher,
2560		.suite = {
2561			.cipher = {
2562				.enc = {
2563					.vecs = xeta_enc_tv_template,
2564					.count = XETA_ENC_TEST_VECTORS
2565				},
2566				.dec = {
2567					.vecs = xeta_dec_tv_template,
2568					.count = XETA_DEC_TEST_VECTORS
2569				}
2570			}
2571		}
2572	}, {
2573		.alg = "ecb(xtea)",
2574		.test = alg_test_skcipher,
2575		.suite = {
2576			.cipher = {
2577				.enc = {
2578					.vecs = xtea_enc_tv_template,
2579					.count = XTEA_ENC_TEST_VECTORS
2580				},
2581				.dec = {
2582					.vecs = xtea_dec_tv_template,
2583					.count = XTEA_DEC_TEST_VECTORS
2584				}
2585			}
 
 
 
 
2586		}
2587	}, {
2588		.alg = "gcm(aes)",
2589		.test = alg_test_aead,
2590		.fips_allowed = 1,
2591		.suite = {
2592			.aead = {
2593				.enc = {
2594					.vecs = aes_gcm_enc_tv_template,
2595					.count = AES_GCM_ENC_TEST_VECTORS
2596				},
2597				.dec = {
2598					.vecs = aes_gcm_dec_tv_template,
2599					.count = AES_GCM_DEC_TEST_VECTORS
2600				}
2601			}
2602		}
2603	}, {
2604		.alg = "ghash",
2605		.test = alg_test_hash,
 
2606		.fips_allowed = 1,
2607		.suite = {
2608			.hash = {
2609				.vecs = ghash_tv_template,
2610				.count = GHASH_TEST_VECTORS
2611			}
2612		}
2613	}, {
2614		.alg = "hmac(crc32)",
2615		.test = alg_test_hash,
 
2616		.suite = {
2617			.hash = {
2618				.vecs = bfin_crc_tv_template,
2619				.count = BFIN_CRC_TEST_VECTORS
2620			}
2621		}
2622	}, {
2623		.alg = "hmac(md5)",
2624		.test = alg_test_hash,
2625		.suite = {
2626			.hash = {
2627				.vecs = hmac_md5_tv_template,
2628				.count = HMAC_MD5_TEST_VECTORS
2629			}
2630		}
2631	}, {
2632		.alg = "hmac(rmd128)",
2633		.test = alg_test_hash,
2634		.suite = {
2635			.hash = {
2636				.vecs = hmac_rmd128_tv_template,
2637				.count = HMAC_RMD128_TEST_VECTORS
2638			}
2639		}
2640	}, {
2641		.alg = "hmac(rmd160)",
2642		.test = alg_test_hash,
2643		.suite = {
2644			.hash = {
2645				.vecs = hmac_rmd160_tv_template,
2646				.count = HMAC_RMD160_TEST_VECTORS
2647			}
2648		}
2649	}, {
2650		.alg = "hmac(sha1)",
2651		.test = alg_test_hash,
2652		.fips_allowed = 1,
2653		.suite = {
2654			.hash = {
2655				.vecs = hmac_sha1_tv_template,
2656				.count = HMAC_SHA1_TEST_VECTORS
2657			}
2658		}
2659	}, {
2660		.alg = "hmac(sha224)",
2661		.test = alg_test_hash,
2662		.fips_allowed = 1,
2663		.suite = {
2664			.hash = {
2665				.vecs = hmac_sha224_tv_template,
2666				.count = HMAC_SHA224_TEST_VECTORS
2667			}
2668		}
2669	}, {
2670		.alg = "hmac(sha256)",
2671		.test = alg_test_hash,
2672		.fips_allowed = 1,
2673		.suite = {
2674			.hash = {
2675				.vecs = hmac_sha256_tv_template,
2676				.count = HMAC_SHA256_TEST_VECTORS
2677			}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2678		}
2679	}, {
2680		.alg = "hmac(sha384)",
2681		.test = alg_test_hash,
2682		.fips_allowed = 1,
2683		.suite = {
2684			.hash = {
2685				.vecs = hmac_sha384_tv_template,
2686				.count = HMAC_SHA384_TEST_VECTORS
2687			}
2688		}
2689	}, {
2690		.alg = "hmac(sha512)",
2691		.test = alg_test_hash,
2692		.fips_allowed = 1,
2693		.suite = {
2694			.hash = {
2695				.vecs = hmac_sha512_tv_template,
2696				.count = HMAC_SHA512_TEST_VECTORS
2697			}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2698		}
2699	}, {
2700		.alg = "lrw(aes)",
 
2701		.test = alg_test_skcipher,
2702		.suite = {
2703			.cipher = {
2704				.enc = {
2705					.vecs = aes_lrw_enc_tv_template,
2706					.count = AES_LRW_ENC_TEST_VECTORS
2707				},
2708				.dec = {
2709					.vecs = aes_lrw_dec_tv_template,
2710					.count = AES_LRW_DEC_TEST_VECTORS
2711				}
2712			}
2713		}
2714	}, {
2715		.alg = "lrw(camellia)",
 
2716		.test = alg_test_skcipher,
2717		.suite = {
2718			.cipher = {
2719				.enc = {
2720					.vecs = camellia_lrw_enc_tv_template,
2721					.count = CAMELLIA_LRW_ENC_TEST_VECTORS
2722				},
2723				.dec = {
2724					.vecs = camellia_lrw_dec_tv_template,
2725					.count = CAMELLIA_LRW_DEC_TEST_VECTORS
2726				}
2727			}
2728		}
2729	}, {
2730		.alg = "lrw(cast6)",
 
2731		.test = alg_test_skcipher,
2732		.suite = {
2733			.cipher = {
2734				.enc = {
2735					.vecs = cast6_lrw_enc_tv_template,
2736					.count = CAST6_LRW_ENC_TEST_VECTORS
2737				},
2738				.dec = {
2739					.vecs = cast6_lrw_dec_tv_template,
2740					.count = CAST6_LRW_DEC_TEST_VECTORS
2741				}
2742			}
2743		}
2744	}, {
2745		.alg = "lrw(serpent)",
 
2746		.test = alg_test_skcipher,
2747		.suite = {
2748			.cipher = {
2749				.enc = {
2750					.vecs = serpent_lrw_enc_tv_template,
2751					.count = SERPENT_LRW_ENC_TEST_VECTORS
2752				},
2753				.dec = {
2754					.vecs = serpent_lrw_dec_tv_template,
2755					.count = SERPENT_LRW_DEC_TEST_VECTORS
2756				}
2757			}
2758		}
2759	}, {
2760		.alg = "lrw(twofish)",
 
2761		.test = alg_test_skcipher,
2762		.suite = {
2763			.cipher = {
2764				.enc = {
2765					.vecs = tf_lrw_enc_tv_template,
2766					.count = TF_LRW_ENC_TEST_VECTORS
2767				},
2768				.dec = {
2769					.vecs = tf_lrw_dec_tv_template,
2770					.count = TF_LRW_DEC_TEST_VECTORS
2771				}
 
 
 
 
 
 
 
 
 
 
 
2772			}
2773		}
2774	}, {
2775		.alg = "lzo",
2776		.test = alg_test_comp,
2777		.fips_allowed = 1,
2778		.suite = {
2779			.comp = {
2780				.comp = {
2781					.vecs = lzo_comp_tv_template,
2782					.count = LZO_COMP_TEST_VECTORS
2783				},
2784				.decomp = {
2785					.vecs = lzo_decomp_tv_template,
2786					.count = LZO_DECOMP_TEST_VECTORS
2787				}
 
 
 
 
2788			}
2789		}
2790	}, {
2791		.alg = "md4",
2792		.test = alg_test_hash,
2793		.suite = {
2794			.hash = {
2795				.vecs = md4_tv_template,
2796				.count = MD4_TEST_VECTORS
2797			}
2798		}
2799	}, {
2800		.alg = "md5",
2801		.test = alg_test_hash,
2802		.suite = {
2803			.hash = {
2804				.vecs = md5_tv_template,
2805				.count = MD5_TEST_VECTORS
2806			}
2807		}
2808	}, {
2809		.alg = "michael_mic",
2810		.test = alg_test_hash,
2811		.suite = {
2812			.hash = {
2813				.vecs = michael_mic_tv_template,
2814				.count = MICHAEL_MIC_TEST_VECTORS
2815			}
 
 
 
2816		}
2817	}, {
2818		.alg = "ofb(aes)",
2819		.test = alg_test_skcipher,
2820		.fips_allowed = 1,
2821		.suite = {
2822			.cipher = {
2823				.enc = {
2824					.vecs = aes_ofb_enc_tv_template,
2825					.count = AES_OFB_ENC_TEST_VECTORS
2826				},
2827				.dec = {
2828					.vecs = aes_ofb_dec_tv_template,
2829					.count = AES_OFB_DEC_TEST_VECTORS
2830				}
2831			}
2832		}
2833	}, {
 
 
 
 
 
 
 
2834		.alg = "pcbc(fcrypt)",
2835		.test = alg_test_skcipher,
2836		.suite = {
2837			.cipher = {
2838				.enc = {
2839					.vecs = fcrypt_pcbc_enc_tv_template,
2840					.count = FCRYPT_ENC_TEST_VECTORS
2841				},
2842				.dec = {
2843					.vecs = fcrypt_pcbc_dec_tv_template,
2844					.count = FCRYPT_DEC_TEST_VECTORS
2845				}
2846			}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2847		}
2848	}, {
2849		.alg = "rfc3686(ctr(aes))",
2850		.test = alg_test_skcipher,
2851		.fips_allowed = 1,
2852		.suite = {
2853			.cipher = {
2854				.enc = {
2855					.vecs = aes_ctr_rfc3686_enc_tv_template,
2856					.count = AES_CTR_3686_ENC_TEST_VECTORS
2857				},
2858				.dec = {
2859					.vecs = aes_ctr_rfc3686_dec_tv_template,
2860					.count = AES_CTR_3686_DEC_TEST_VECTORS
2861				}
2862			}
2863		}
2864	}, {
2865		.alg = "rfc4106(gcm(aes))",
 
2866		.test = alg_test_aead,
 
2867		.suite = {
2868			.aead = {
2869				.enc = {
2870					.vecs = aes_gcm_rfc4106_enc_tv_template,
2871					.count = AES_GCM_4106_ENC_TEST_VECTORS
2872				},
2873				.dec = {
2874					.vecs = aes_gcm_rfc4106_dec_tv_template,
2875					.count = AES_GCM_4106_DEC_TEST_VECTORS
2876				}
2877			}
2878		}
2879	}, {
2880		.alg = "rfc4309(ccm(aes))",
 
2881		.test = alg_test_aead,
2882		.fips_allowed = 1,
2883		.suite = {
2884			.aead = {
2885				.enc = {
2886					.vecs = aes_ccm_rfc4309_enc_tv_template,
2887					.count = AES_CCM_4309_ENC_TEST_VECTORS
2888				},
2889				.dec = {
2890					.vecs = aes_ccm_rfc4309_dec_tv_template,
2891					.count = AES_CCM_4309_DEC_TEST_VECTORS
2892				}
2893			}
2894		}
2895	}, {
2896		.alg = "rfc4543(gcm(aes))",
 
2897		.test = alg_test_aead,
2898		.suite = {
2899			.aead = {
2900				.enc = {
2901					.vecs = aes_gcm_rfc4543_enc_tv_template,
2902					.count = AES_GCM_4543_ENC_TEST_VECTORS
2903				},
2904				.dec = {
2905					.vecs = aes_gcm_rfc4543_dec_tv_template,
2906					.count = AES_GCM_4543_DEC_TEST_VECTORS
2907				},
2908			}
 
 
 
2909		}
2910	}, {
2911		.alg = "rmd128",
2912		.test = alg_test_hash,
2913		.suite = {
2914			.hash = {
2915				.vecs = rmd128_tv_template,
2916				.count = RMD128_TEST_VECTORS
2917			}
2918		}
2919	}, {
2920		.alg = "rmd160",
2921		.test = alg_test_hash,
2922		.suite = {
2923			.hash = {
2924				.vecs = rmd160_tv_template,
2925				.count = RMD160_TEST_VECTORS
2926			}
2927		}
2928	}, {
2929		.alg = "rmd256",
2930		.test = alg_test_hash,
2931		.suite = {
2932			.hash = {
2933				.vecs = rmd256_tv_template,
2934				.count = RMD256_TEST_VECTORS
2935			}
2936		}
2937	}, {
2938		.alg = "rmd320",
2939		.test = alg_test_hash,
2940		.suite = {
2941			.hash = {
2942				.vecs = rmd320_tv_template,
2943				.count = RMD320_TEST_VECTORS
2944			}
 
 
 
 
2945		}
2946	}, {
2947		.alg = "salsa20",
2948		.test = alg_test_skcipher,
2949		.suite = {
2950			.cipher = {
2951				.enc = {
2952					.vecs = salsa20_stream_enc_tv_template,
2953					.count = SALSA20_STREAM_ENC_TEST_VECTORS
2954				}
2955			}
2956		}
2957	}, {
2958		.alg = "sha1",
2959		.test = alg_test_hash,
2960		.fips_allowed = 1,
2961		.suite = {
2962			.hash = {
2963				.vecs = sha1_tv_template,
2964				.count = SHA1_TEST_VECTORS
2965			}
2966		}
2967	}, {
2968		.alg = "sha224",
2969		.test = alg_test_hash,
2970		.fips_allowed = 1,
2971		.suite = {
2972			.hash = {
2973				.vecs = sha224_tv_template,
2974				.count = SHA224_TEST_VECTORS
2975			}
2976		}
2977	}, {
2978		.alg = "sha256",
2979		.test = alg_test_hash,
2980		.fips_allowed = 1,
2981		.suite = {
2982			.hash = {
2983				.vecs = sha256_tv_template,
2984				.count = SHA256_TEST_VECTORS
2985			}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2986		}
2987	}, {
2988		.alg = "sha384",
2989		.test = alg_test_hash,
2990		.fips_allowed = 1,
2991		.suite = {
2992			.hash = {
2993				.vecs = sha384_tv_template,
2994				.count = SHA384_TEST_VECTORS
2995			}
2996		}
2997	}, {
2998		.alg = "sha512",
2999		.test = alg_test_hash,
3000		.fips_allowed = 1,
3001		.suite = {
3002			.hash = {
3003				.vecs = sha512_tv_template,
3004				.count = SHA512_TEST_VECTORS
3005			}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3006		}
3007	}, {
3008		.alg = "tgr128",
3009		.test = alg_test_hash,
3010		.suite = {
3011			.hash = {
3012				.vecs = tgr128_tv_template,
3013				.count = TGR128_TEST_VECTORS
3014			}
3015		}
3016	}, {
3017		.alg = "tgr160",
3018		.test = alg_test_hash,
3019		.suite = {
3020			.hash = {
3021				.vecs = tgr160_tv_template,
3022				.count = TGR160_TEST_VECTORS
3023			}
3024		}
3025	}, {
3026		.alg = "tgr192",
3027		.test = alg_test_hash,
3028		.suite = {
3029			.hash = {
3030				.vecs = tgr192_tv_template,
3031				.count = TGR192_TEST_VECTORS
3032			}
3033		}
3034	}, {
3035		.alg = "vmac(aes)",
3036		.test = alg_test_hash,
3037		.suite = {
3038			.hash = {
3039				.vecs = aes_vmac128_tv_template,
3040				.count = VMAC_AES_TEST_VECTORS
3041			}
3042		}
3043	}, {
3044		.alg = "wp256",
3045		.test = alg_test_hash,
3046		.suite = {
3047			.hash = {
3048				.vecs = wp256_tv_template,
3049				.count = WP256_TEST_VECTORS
3050			}
3051		}
3052	}, {
3053		.alg = "wp384",
3054		.test = alg_test_hash,
3055		.suite = {
3056			.hash = {
3057				.vecs = wp384_tv_template,
3058				.count = WP384_TEST_VECTORS
3059			}
3060		}
3061	}, {
3062		.alg = "wp512",
3063		.test = alg_test_hash,
3064		.suite = {
3065			.hash = {
3066				.vecs = wp512_tv_template,
3067				.count = WP512_TEST_VECTORS
3068			}
3069		}
3070	}, {
3071		.alg = "xcbc(aes)",
3072		.test = alg_test_hash,
3073		.suite = {
3074			.hash = {
3075				.vecs = aes_xcbc128_tv_template,
3076				.count = XCBC_AES_TEST_VECTORS
3077			}
3078		}
3079	}, {
 
 
 
 
 
 
 
 
 
 
 
 
3080		.alg = "xts(aes)",
 
3081		.test = alg_test_skcipher,
3082		.fips_allowed = 1,
3083		.suite = {
3084			.cipher = {
3085				.enc = {
3086					.vecs = aes_xts_enc_tv_template,
3087					.count = AES_XTS_ENC_TEST_VECTORS
3088				},
3089				.dec = {
3090					.vecs = aes_xts_dec_tv_template,
3091					.count = AES_XTS_DEC_TEST_VECTORS
3092				}
3093			}
3094		}
3095	}, {
3096		.alg = "xts(camellia)",
 
3097		.test = alg_test_skcipher,
3098		.suite = {
3099			.cipher = {
3100				.enc = {
3101					.vecs = camellia_xts_enc_tv_template,
3102					.count = CAMELLIA_XTS_ENC_TEST_VECTORS
3103				},
3104				.dec = {
3105					.vecs = camellia_xts_dec_tv_template,
3106					.count = CAMELLIA_XTS_DEC_TEST_VECTORS
3107				}
3108			}
3109		}
3110	}, {
3111		.alg = "xts(cast6)",
 
3112		.test = alg_test_skcipher,
3113		.suite = {
3114			.cipher = {
3115				.enc = {
3116					.vecs = cast6_xts_enc_tv_template,
3117					.count = CAST6_XTS_ENC_TEST_VECTORS
3118				},
3119				.dec = {
3120					.vecs = cast6_xts_dec_tv_template,
3121					.count = CAST6_XTS_DEC_TEST_VECTORS
3122				}
3123			}
3124		}
3125	}, {
 
 
 
 
 
 
 
3126		.alg = "xts(serpent)",
 
3127		.test = alg_test_skcipher,
3128		.suite = {
3129			.cipher = {
3130				.enc = {
3131					.vecs = serpent_xts_enc_tv_template,
3132					.count = SERPENT_XTS_ENC_TEST_VECTORS
3133				},
3134				.dec = {
3135					.vecs = serpent_xts_dec_tv_template,
3136					.count = SERPENT_XTS_DEC_TEST_VECTORS
3137				}
3138			}
3139		}
3140	}, {
3141		.alg = "xts(twofish)",
 
3142		.test = alg_test_skcipher,
3143		.suite = {
3144			.cipher = {
3145				.enc = {
3146					.vecs = tf_xts_enc_tv_template,
3147					.count = TF_XTS_ENC_TEST_VECTORS
3148				},
3149				.dec = {
3150					.vecs = tf_xts_dec_tv_template,
3151					.count = TF_XTS_DEC_TEST_VECTORS
3152				}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3153			}
3154		}
3155	}, {
3156		.alg = "zlib",
3157		.test = alg_test_pcomp,
3158		.fips_allowed = 1,
3159		.suite = {
3160			.pcomp = {
3161				.comp = {
3162					.vecs = zlib_comp_tv_template,
3163					.count = ZLIB_COMP_TEST_VECTORS
3164				},
3165				.decomp = {
3166					.vecs = zlib_decomp_tv_template,
3167					.count = ZLIB_DECOMP_TEST_VECTORS
3168				}
3169			}
3170		}
3171	}
3172};
3173
3174static bool alg_test_descs_checked;
3175
3176static void alg_test_descs_check_order(void)
3177{
3178	int i;
3179
3180	/* only check once */
3181	if (alg_test_descs_checked)
3182		return;
3183
3184	alg_test_descs_checked = true;
3185
3186	for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
3187		int diff = strcmp(alg_test_descs[i - 1].alg,
3188				  alg_test_descs[i].alg);
3189
3190		if (WARN_ON(diff > 0)) {
3191			pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
3192				alg_test_descs[i - 1].alg,
3193				alg_test_descs[i].alg);
3194		}
3195
3196		if (WARN_ON(diff == 0)) {
3197			pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
3198				alg_test_descs[i].alg);
3199		}
3200	}
3201}
3202
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3203static int alg_find_test(const char *alg)
3204{
3205	int start = 0;
3206	int end = ARRAY_SIZE(alg_test_descs);
3207
3208	while (start < end) {
3209		int i = (start + end) / 2;
3210		int diff = strcmp(alg_test_descs[i].alg, alg);
3211
3212		if (diff > 0) {
3213			end = i;
3214			continue;
3215		}
3216
3217		if (diff < 0) {
3218			start = i + 1;
3219			continue;
3220		}
3221
3222		return i;
3223	}
3224
3225	return -1;
3226}
3227
3228int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
3229{
3230	int i;
3231	int j;
3232	int rc;
3233
3234	alg_test_descs_check_order();
 
 
 
 
 
3235
3236	if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
3237		char nalg[CRYPTO_MAX_ALG_NAME];
3238
3239		if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
3240		    sizeof(nalg))
3241			return -ENAMETOOLONG;
3242
3243		i = alg_find_test(nalg);
3244		if (i < 0)
3245			goto notest;
3246
3247		if (fips_enabled && !alg_test_descs[i].fips_allowed)
3248			goto non_fips_alg;
3249
3250		rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
3251		goto test_done;
3252	}
3253
3254	i = alg_find_test(alg);
3255	j = alg_find_test(driver);
3256	if (i < 0 && j < 0)
3257		goto notest;
3258
3259	if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
3260			     (j >= 0 && !alg_test_descs[j].fips_allowed)))
3261		goto non_fips_alg;
3262
3263	rc = 0;
3264	if (i >= 0)
3265		rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
3266					     type, mask);
3267	if (j >= 0 && j != i)
3268		rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
3269					     type, mask);
3270
3271test_done:
3272	if (fips_enabled && rc)
3273		panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
 
 
 
3274
3275	if (fips_enabled && !rc)
3276		printk(KERN_INFO "alg: self-tests for %s (%s) passed\n",
3277		       driver, alg);
3278
3279	return rc;
3280
3281notest:
3282	printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
3283	return 0;
3284non_fips_alg:
3285	return -EINVAL;
3286}
3287
3288#endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
3289
3290EXPORT_SYMBOL_GPL(alg_test);