Loading...
1/*
2 * Algorithm testing framework and tests.
3 *
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
8 *
9 * Updated RFC4106 AES-GCM testing.
10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
15 *
16 * This program is free software; you can redistribute it and/or modify it
17 * under the terms of the GNU General Public License as published by the Free
18 * Software Foundation; either version 2 of the License, or (at your option)
19 * any later version.
20 *
21 */
22
23#include <crypto/aead.h>
24#include <crypto/hash.h>
25#include <crypto/skcipher.h>
26#include <linux/err.h>
27#include <linux/fips.h>
28#include <linux/module.h>
29#include <linux/scatterlist.h>
30#include <linux/slab.h>
31#include <linux/string.h>
32#include <crypto/rng.h>
33#include <crypto/drbg.h>
34#include <crypto/akcipher.h>
35
36#include "internal.h"
37
38#ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
39
40/* a perfect nop */
41int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
42{
43 return 0;
44}
45
46#else
47
48#include "testmgr.h"
49
50/*
51 * Need slab memory for testing (size in number of pages).
52 */
53#define XBUFSIZE 8
54
55/*
56 * Indexes into the xbuf to simulate cross-page access.
57 */
58#define IDX1 32
59#define IDX2 32400
60#define IDX3 1
61#define IDX4 8193
62#define IDX5 22222
63#define IDX6 17101
64#define IDX7 27333
65#define IDX8 3000
66
67/*
68* Used by test_cipher()
69*/
70#define ENCRYPT 1
71#define DECRYPT 0
72
73struct tcrypt_result {
74 struct completion completion;
75 int err;
76};
77
78struct aead_test_suite {
79 struct {
80 struct aead_testvec *vecs;
81 unsigned int count;
82 } enc, dec;
83};
84
85struct cipher_test_suite {
86 struct {
87 struct cipher_testvec *vecs;
88 unsigned int count;
89 } enc, dec;
90};
91
92struct comp_test_suite {
93 struct {
94 struct comp_testvec *vecs;
95 unsigned int count;
96 } comp, decomp;
97};
98
99struct hash_test_suite {
100 struct hash_testvec *vecs;
101 unsigned int count;
102};
103
104struct cprng_test_suite {
105 struct cprng_testvec *vecs;
106 unsigned int count;
107};
108
109struct drbg_test_suite {
110 struct drbg_testvec *vecs;
111 unsigned int count;
112};
113
114struct akcipher_test_suite {
115 struct akcipher_testvec *vecs;
116 unsigned int count;
117};
118
119struct alg_test_desc {
120 const char *alg;
121 int (*test)(const struct alg_test_desc *desc, const char *driver,
122 u32 type, u32 mask);
123 int fips_allowed; /* set if alg is allowed in fips mode */
124
125 union {
126 struct aead_test_suite aead;
127 struct cipher_test_suite cipher;
128 struct comp_test_suite comp;
129 struct hash_test_suite hash;
130 struct cprng_test_suite cprng;
131 struct drbg_test_suite drbg;
132 struct akcipher_test_suite akcipher;
133 } suite;
134};
135
136static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
137
138static void hexdump(unsigned char *buf, unsigned int len)
139{
140 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
141 16, 1,
142 buf, len, false);
143}
144
145static void tcrypt_complete(struct crypto_async_request *req, int err)
146{
147 struct tcrypt_result *res = req->data;
148
149 if (err == -EINPROGRESS)
150 return;
151
152 res->err = err;
153 complete(&res->completion);
154}
155
156static int testmgr_alloc_buf(char *buf[XBUFSIZE])
157{
158 int i;
159
160 for (i = 0; i < XBUFSIZE; i++) {
161 buf[i] = (void *)__get_free_page(GFP_KERNEL);
162 if (!buf[i])
163 goto err_free_buf;
164 }
165
166 return 0;
167
168err_free_buf:
169 while (i-- > 0)
170 free_page((unsigned long)buf[i]);
171
172 return -ENOMEM;
173}
174
175static void testmgr_free_buf(char *buf[XBUFSIZE])
176{
177 int i;
178
179 for (i = 0; i < XBUFSIZE; i++)
180 free_page((unsigned long)buf[i]);
181}
182
183static int wait_async_op(struct tcrypt_result *tr, int ret)
184{
185 if (ret == -EINPROGRESS || ret == -EBUSY) {
186 wait_for_completion(&tr->completion);
187 reinit_completion(&tr->completion);
188 ret = tr->err;
189 }
190 return ret;
191}
192
193static int ahash_partial_update(struct ahash_request **preq,
194 struct crypto_ahash *tfm, struct hash_testvec *template,
195 void *hash_buff, int k, int temp, struct scatterlist *sg,
196 const char *algo, char *result, struct tcrypt_result *tresult)
197{
198 char *state;
199 struct ahash_request *req;
200 int statesize, ret = -EINVAL;
201
202 req = *preq;
203 statesize = crypto_ahash_statesize(
204 crypto_ahash_reqtfm(req));
205 state = kmalloc(statesize, GFP_KERNEL);
206 if (!state) {
207 pr_err("alt: hash: Failed to alloc state for %s\n", algo);
208 goto out_nostate;
209 }
210 ret = crypto_ahash_export(req, state);
211 if (ret) {
212 pr_err("alt: hash: Failed to export() for %s\n", algo);
213 goto out;
214 }
215 ahash_request_free(req);
216 req = ahash_request_alloc(tfm, GFP_KERNEL);
217 if (!req) {
218 pr_err("alg: hash: Failed to alloc request for %s\n", algo);
219 goto out_noreq;
220 }
221 ahash_request_set_callback(req,
222 CRYPTO_TFM_REQ_MAY_BACKLOG,
223 tcrypt_complete, tresult);
224
225 memcpy(hash_buff, template->plaintext + temp,
226 template->tap[k]);
227 sg_init_one(&sg[0], hash_buff, template->tap[k]);
228 ahash_request_set_crypt(req, sg, result, template->tap[k]);
229 ret = crypto_ahash_import(req, state);
230 if (ret) {
231 pr_err("alg: hash: Failed to import() for %s\n", algo);
232 goto out;
233 }
234 ret = wait_async_op(tresult, crypto_ahash_update(req));
235 if (ret)
236 goto out;
237 *preq = req;
238 ret = 0;
239 goto out_noreq;
240out:
241 ahash_request_free(req);
242out_noreq:
243 kfree(state);
244out_nostate:
245 return ret;
246}
247
248static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
249 unsigned int tcount, bool use_digest,
250 const int align_offset)
251{
252 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
253 unsigned int i, j, k, temp;
254 struct scatterlist sg[8];
255 char *result;
256 char *key;
257 struct ahash_request *req;
258 struct tcrypt_result tresult;
259 void *hash_buff;
260 char *xbuf[XBUFSIZE];
261 int ret = -ENOMEM;
262
263 result = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL);
264 if (!result)
265 return ret;
266 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
267 if (!key)
268 goto out_nobuf;
269 if (testmgr_alloc_buf(xbuf))
270 goto out_nobuf;
271
272 init_completion(&tresult.completion);
273
274 req = ahash_request_alloc(tfm, GFP_KERNEL);
275 if (!req) {
276 printk(KERN_ERR "alg: hash: Failed to allocate request for "
277 "%s\n", algo);
278 goto out_noreq;
279 }
280 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
281 tcrypt_complete, &tresult);
282
283 j = 0;
284 for (i = 0; i < tcount; i++) {
285 if (template[i].np)
286 continue;
287
288 ret = -EINVAL;
289 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE))
290 goto out;
291
292 j++;
293 memset(result, 0, MAX_DIGEST_SIZE);
294
295 hash_buff = xbuf[0];
296 hash_buff += align_offset;
297
298 memcpy(hash_buff, template[i].plaintext, template[i].psize);
299 sg_init_one(&sg[0], hash_buff, template[i].psize);
300
301 if (template[i].ksize) {
302 crypto_ahash_clear_flags(tfm, ~0);
303 if (template[i].ksize > MAX_KEYLEN) {
304 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
305 j, algo, template[i].ksize, MAX_KEYLEN);
306 ret = -EINVAL;
307 goto out;
308 }
309 memcpy(key, template[i].key, template[i].ksize);
310 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
311 if (ret) {
312 printk(KERN_ERR "alg: hash: setkey failed on "
313 "test %d for %s: ret=%d\n", j, algo,
314 -ret);
315 goto out;
316 }
317 }
318
319 ahash_request_set_crypt(req, sg, result, template[i].psize);
320 if (use_digest) {
321 ret = wait_async_op(&tresult, crypto_ahash_digest(req));
322 if (ret) {
323 pr_err("alg: hash: digest failed on test %d "
324 "for %s: ret=%d\n", j, algo, -ret);
325 goto out;
326 }
327 } else {
328 ret = wait_async_op(&tresult, crypto_ahash_init(req));
329 if (ret) {
330 pr_err("alt: hash: init failed on test %d "
331 "for %s: ret=%d\n", j, algo, -ret);
332 goto out;
333 }
334 ret = wait_async_op(&tresult, crypto_ahash_update(req));
335 if (ret) {
336 pr_err("alt: hash: update failed on test %d "
337 "for %s: ret=%d\n", j, algo, -ret);
338 goto out;
339 }
340 ret = wait_async_op(&tresult, crypto_ahash_final(req));
341 if (ret) {
342 pr_err("alt: hash: final failed on test %d "
343 "for %s: ret=%d\n", j, algo, -ret);
344 goto out;
345 }
346 }
347
348 if (memcmp(result, template[i].digest,
349 crypto_ahash_digestsize(tfm))) {
350 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
351 j, algo);
352 hexdump(result, crypto_ahash_digestsize(tfm));
353 ret = -EINVAL;
354 goto out;
355 }
356 }
357
358 j = 0;
359 for (i = 0; i < tcount; i++) {
360 /* alignment tests are only done with continuous buffers */
361 if (align_offset != 0)
362 break;
363
364 if (!template[i].np)
365 continue;
366
367 j++;
368 memset(result, 0, MAX_DIGEST_SIZE);
369
370 temp = 0;
371 sg_init_table(sg, template[i].np);
372 ret = -EINVAL;
373 for (k = 0; k < template[i].np; k++) {
374 if (WARN_ON(offset_in_page(IDX[k]) +
375 template[i].tap[k] > PAGE_SIZE))
376 goto out;
377 sg_set_buf(&sg[k],
378 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
379 offset_in_page(IDX[k]),
380 template[i].plaintext + temp,
381 template[i].tap[k]),
382 template[i].tap[k]);
383 temp += template[i].tap[k];
384 }
385
386 if (template[i].ksize) {
387 if (template[i].ksize > MAX_KEYLEN) {
388 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
389 j, algo, template[i].ksize, MAX_KEYLEN);
390 ret = -EINVAL;
391 goto out;
392 }
393 crypto_ahash_clear_flags(tfm, ~0);
394 memcpy(key, template[i].key, template[i].ksize);
395 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
396
397 if (ret) {
398 printk(KERN_ERR "alg: hash: setkey "
399 "failed on chunking test %d "
400 "for %s: ret=%d\n", j, algo, -ret);
401 goto out;
402 }
403 }
404
405 ahash_request_set_crypt(req, sg, result, template[i].psize);
406 ret = crypto_ahash_digest(req);
407 switch (ret) {
408 case 0:
409 break;
410 case -EINPROGRESS:
411 case -EBUSY:
412 wait_for_completion(&tresult.completion);
413 reinit_completion(&tresult.completion);
414 ret = tresult.err;
415 if (!ret)
416 break;
417 /* fall through */
418 default:
419 printk(KERN_ERR "alg: hash: digest failed "
420 "on chunking test %d for %s: "
421 "ret=%d\n", j, algo, -ret);
422 goto out;
423 }
424
425 if (memcmp(result, template[i].digest,
426 crypto_ahash_digestsize(tfm))) {
427 printk(KERN_ERR "alg: hash: Chunking test %d "
428 "failed for %s\n", j, algo);
429 hexdump(result, crypto_ahash_digestsize(tfm));
430 ret = -EINVAL;
431 goto out;
432 }
433 }
434
435 /* partial update exercise */
436 j = 0;
437 for (i = 0; i < tcount; i++) {
438 /* alignment tests are only done with continuous buffers */
439 if (align_offset != 0)
440 break;
441
442 if (template[i].np < 2)
443 continue;
444
445 j++;
446 memset(result, 0, MAX_DIGEST_SIZE);
447
448 ret = -EINVAL;
449 hash_buff = xbuf[0];
450 memcpy(hash_buff, template[i].plaintext,
451 template[i].tap[0]);
452 sg_init_one(&sg[0], hash_buff, template[i].tap[0]);
453
454 if (template[i].ksize) {
455 crypto_ahash_clear_flags(tfm, ~0);
456 if (template[i].ksize > MAX_KEYLEN) {
457 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
458 j, algo, template[i].ksize, MAX_KEYLEN);
459 ret = -EINVAL;
460 goto out;
461 }
462 memcpy(key, template[i].key, template[i].ksize);
463 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
464 if (ret) {
465 pr_err("alg: hash: setkey failed on test %d for %s: ret=%d\n",
466 j, algo, -ret);
467 goto out;
468 }
469 }
470
471 ahash_request_set_crypt(req, sg, result, template[i].tap[0]);
472 ret = wait_async_op(&tresult, crypto_ahash_init(req));
473 if (ret) {
474 pr_err("alt: hash: init failed on test %d for %s: ret=%d\n",
475 j, algo, -ret);
476 goto out;
477 }
478 ret = wait_async_op(&tresult, crypto_ahash_update(req));
479 if (ret) {
480 pr_err("alt: hash: update failed on test %d for %s: ret=%d\n",
481 j, algo, -ret);
482 goto out;
483 }
484
485 temp = template[i].tap[0];
486 for (k = 1; k < template[i].np; k++) {
487 ret = ahash_partial_update(&req, tfm, &template[i],
488 hash_buff, k, temp, &sg[0], algo, result,
489 &tresult);
490 if (ret) {
491 pr_err("hash: partial update failed on test %d for %s: ret=%d\n",
492 j, algo, -ret);
493 goto out_noreq;
494 }
495 temp += template[i].tap[k];
496 }
497 ret = wait_async_op(&tresult, crypto_ahash_final(req));
498 if (ret) {
499 pr_err("alt: hash: final failed on test %d for %s: ret=%d\n",
500 j, algo, -ret);
501 goto out;
502 }
503 if (memcmp(result, template[i].digest,
504 crypto_ahash_digestsize(tfm))) {
505 pr_err("alg: hash: Partial Test %d failed for %s\n",
506 j, algo);
507 hexdump(result, crypto_ahash_digestsize(tfm));
508 ret = -EINVAL;
509 goto out;
510 }
511 }
512
513 ret = 0;
514
515out:
516 ahash_request_free(req);
517out_noreq:
518 testmgr_free_buf(xbuf);
519out_nobuf:
520 kfree(key);
521 kfree(result);
522 return ret;
523}
524
525static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
526 unsigned int tcount, bool use_digest)
527{
528 unsigned int alignmask;
529 int ret;
530
531 ret = __test_hash(tfm, template, tcount, use_digest, 0);
532 if (ret)
533 return ret;
534
535 /* test unaligned buffers, check with one byte offset */
536 ret = __test_hash(tfm, template, tcount, use_digest, 1);
537 if (ret)
538 return ret;
539
540 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
541 if (alignmask) {
542 /* Check if alignment mask for tfm is correctly set. */
543 ret = __test_hash(tfm, template, tcount, use_digest,
544 alignmask + 1);
545 if (ret)
546 return ret;
547 }
548
549 return 0;
550}
551
552static int __test_aead(struct crypto_aead *tfm, int enc,
553 struct aead_testvec *template, unsigned int tcount,
554 const bool diff_dst, const int align_offset)
555{
556 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
557 unsigned int i, j, k, n, temp;
558 int ret = -ENOMEM;
559 char *q;
560 char *key;
561 struct aead_request *req;
562 struct scatterlist *sg;
563 struct scatterlist *sgout;
564 const char *e, *d;
565 struct tcrypt_result result;
566 unsigned int authsize, iv_len;
567 void *input;
568 void *output;
569 void *assoc;
570 char *iv;
571 char *xbuf[XBUFSIZE];
572 char *xoutbuf[XBUFSIZE];
573 char *axbuf[XBUFSIZE];
574
575 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
576 if (!iv)
577 return ret;
578 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
579 if (!key)
580 goto out_noxbuf;
581 if (testmgr_alloc_buf(xbuf))
582 goto out_noxbuf;
583 if (testmgr_alloc_buf(axbuf))
584 goto out_noaxbuf;
585 if (diff_dst && testmgr_alloc_buf(xoutbuf))
586 goto out_nooutbuf;
587
588 /* avoid "the frame size is larger than 1024 bytes" compiler warning */
589 sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 4 : 2), GFP_KERNEL);
590 if (!sg)
591 goto out_nosg;
592 sgout = &sg[16];
593
594 if (diff_dst)
595 d = "-ddst";
596 else
597 d = "";
598
599 if (enc == ENCRYPT)
600 e = "encryption";
601 else
602 e = "decryption";
603
604 init_completion(&result.completion);
605
606 req = aead_request_alloc(tfm, GFP_KERNEL);
607 if (!req) {
608 pr_err("alg: aead%s: Failed to allocate request for %s\n",
609 d, algo);
610 goto out;
611 }
612
613 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
614 tcrypt_complete, &result);
615
616 iv_len = crypto_aead_ivsize(tfm);
617
618 for (i = 0, j = 0; i < tcount; i++) {
619 if (template[i].np)
620 continue;
621
622 j++;
623
624 /* some templates have no input data but they will
625 * touch input
626 */
627 input = xbuf[0];
628 input += align_offset;
629 assoc = axbuf[0];
630
631 ret = -EINVAL;
632 if (WARN_ON(align_offset + template[i].ilen >
633 PAGE_SIZE || template[i].alen > PAGE_SIZE))
634 goto out;
635
636 memcpy(input, template[i].input, template[i].ilen);
637 memcpy(assoc, template[i].assoc, template[i].alen);
638 if (template[i].iv)
639 memcpy(iv, template[i].iv, iv_len);
640 else
641 memset(iv, 0, iv_len);
642
643 crypto_aead_clear_flags(tfm, ~0);
644 if (template[i].wk)
645 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
646
647 if (template[i].klen > MAX_KEYLEN) {
648 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
649 d, j, algo, template[i].klen,
650 MAX_KEYLEN);
651 ret = -EINVAL;
652 goto out;
653 }
654 memcpy(key, template[i].key, template[i].klen);
655
656 ret = crypto_aead_setkey(tfm, key, template[i].klen);
657 if (!ret == template[i].fail) {
658 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
659 d, j, algo, crypto_aead_get_flags(tfm));
660 goto out;
661 } else if (ret)
662 continue;
663
664 authsize = abs(template[i].rlen - template[i].ilen);
665 ret = crypto_aead_setauthsize(tfm, authsize);
666 if (ret) {
667 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
668 d, authsize, j, algo);
669 goto out;
670 }
671
672 k = !!template[i].alen;
673 sg_init_table(sg, k + 1);
674 sg_set_buf(&sg[0], assoc, template[i].alen);
675 sg_set_buf(&sg[k], input,
676 template[i].ilen + (enc ? authsize : 0));
677 output = input;
678
679 if (diff_dst) {
680 sg_init_table(sgout, k + 1);
681 sg_set_buf(&sgout[0], assoc, template[i].alen);
682
683 output = xoutbuf[0];
684 output += align_offset;
685 sg_set_buf(&sgout[k], output,
686 template[i].rlen + (enc ? 0 : authsize));
687 }
688
689 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
690 template[i].ilen, iv);
691
692 aead_request_set_ad(req, template[i].alen);
693
694 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
695
696 switch (ret) {
697 case 0:
698 if (template[i].novrfy) {
699 /* verification was supposed to fail */
700 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
701 d, e, j, algo);
702 /* so really, we got a bad message */
703 ret = -EBADMSG;
704 goto out;
705 }
706 break;
707 case -EINPROGRESS:
708 case -EBUSY:
709 wait_for_completion(&result.completion);
710 reinit_completion(&result.completion);
711 ret = result.err;
712 if (!ret)
713 break;
714 case -EBADMSG:
715 if (template[i].novrfy)
716 /* verification failure was expected */
717 continue;
718 /* fall through */
719 default:
720 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
721 d, e, j, algo, -ret);
722 goto out;
723 }
724
725 q = output;
726 if (memcmp(q, template[i].result, template[i].rlen)) {
727 pr_err("alg: aead%s: Test %d failed on %s for %s\n",
728 d, j, e, algo);
729 hexdump(q, template[i].rlen);
730 ret = -EINVAL;
731 goto out;
732 }
733 }
734
735 for (i = 0, j = 0; i < tcount; i++) {
736 /* alignment tests are only done with continuous buffers */
737 if (align_offset != 0)
738 break;
739
740 if (!template[i].np)
741 continue;
742
743 j++;
744
745 if (template[i].iv)
746 memcpy(iv, template[i].iv, iv_len);
747 else
748 memset(iv, 0, MAX_IVLEN);
749
750 crypto_aead_clear_flags(tfm, ~0);
751 if (template[i].wk)
752 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
753 if (template[i].klen > MAX_KEYLEN) {
754 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
755 d, j, algo, template[i].klen, MAX_KEYLEN);
756 ret = -EINVAL;
757 goto out;
758 }
759 memcpy(key, template[i].key, template[i].klen);
760
761 ret = crypto_aead_setkey(tfm, key, template[i].klen);
762 if (!ret == template[i].fail) {
763 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
764 d, j, algo, crypto_aead_get_flags(tfm));
765 goto out;
766 } else if (ret)
767 continue;
768
769 authsize = abs(template[i].rlen - template[i].ilen);
770
771 ret = -EINVAL;
772 sg_init_table(sg, template[i].anp + template[i].np);
773 if (diff_dst)
774 sg_init_table(sgout, template[i].anp + template[i].np);
775
776 ret = -EINVAL;
777 for (k = 0, temp = 0; k < template[i].anp; k++) {
778 if (WARN_ON(offset_in_page(IDX[k]) +
779 template[i].atap[k] > PAGE_SIZE))
780 goto out;
781 sg_set_buf(&sg[k],
782 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
783 offset_in_page(IDX[k]),
784 template[i].assoc + temp,
785 template[i].atap[k]),
786 template[i].atap[k]);
787 if (diff_dst)
788 sg_set_buf(&sgout[k],
789 axbuf[IDX[k] >> PAGE_SHIFT] +
790 offset_in_page(IDX[k]),
791 template[i].atap[k]);
792 temp += template[i].atap[k];
793 }
794
795 for (k = 0, temp = 0; k < template[i].np; k++) {
796 if (WARN_ON(offset_in_page(IDX[k]) +
797 template[i].tap[k] > PAGE_SIZE))
798 goto out;
799
800 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
801 memcpy(q, template[i].input + temp, template[i].tap[k]);
802 sg_set_buf(&sg[template[i].anp + k],
803 q, template[i].tap[k]);
804
805 if (diff_dst) {
806 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
807 offset_in_page(IDX[k]);
808
809 memset(q, 0, template[i].tap[k]);
810
811 sg_set_buf(&sgout[template[i].anp + k],
812 q, template[i].tap[k]);
813 }
814
815 n = template[i].tap[k];
816 if (k == template[i].np - 1 && enc)
817 n += authsize;
818 if (offset_in_page(q) + n < PAGE_SIZE)
819 q[n] = 0;
820
821 temp += template[i].tap[k];
822 }
823
824 ret = crypto_aead_setauthsize(tfm, authsize);
825 if (ret) {
826 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
827 d, authsize, j, algo);
828 goto out;
829 }
830
831 if (enc) {
832 if (WARN_ON(sg[template[i].anp + k - 1].offset +
833 sg[template[i].anp + k - 1].length +
834 authsize > PAGE_SIZE)) {
835 ret = -EINVAL;
836 goto out;
837 }
838
839 if (diff_dst)
840 sgout[template[i].anp + k - 1].length +=
841 authsize;
842 sg[template[i].anp + k - 1].length += authsize;
843 }
844
845 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
846 template[i].ilen,
847 iv);
848
849 aead_request_set_ad(req, template[i].alen);
850
851 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
852
853 switch (ret) {
854 case 0:
855 if (template[i].novrfy) {
856 /* verification was supposed to fail */
857 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
858 d, e, j, algo);
859 /* so really, we got a bad message */
860 ret = -EBADMSG;
861 goto out;
862 }
863 break;
864 case -EINPROGRESS:
865 case -EBUSY:
866 wait_for_completion(&result.completion);
867 reinit_completion(&result.completion);
868 ret = result.err;
869 if (!ret)
870 break;
871 case -EBADMSG:
872 if (template[i].novrfy)
873 /* verification failure was expected */
874 continue;
875 /* fall through */
876 default:
877 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
878 d, e, j, algo, -ret);
879 goto out;
880 }
881
882 ret = -EINVAL;
883 for (k = 0, temp = 0; k < template[i].np; k++) {
884 if (diff_dst)
885 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
886 offset_in_page(IDX[k]);
887 else
888 q = xbuf[IDX[k] >> PAGE_SHIFT] +
889 offset_in_page(IDX[k]);
890
891 n = template[i].tap[k];
892 if (k == template[i].np - 1)
893 n += enc ? authsize : -authsize;
894
895 if (memcmp(q, template[i].result + temp, n)) {
896 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
897 d, j, e, k, algo);
898 hexdump(q, n);
899 goto out;
900 }
901
902 q += n;
903 if (k == template[i].np - 1 && !enc) {
904 if (!diff_dst &&
905 memcmp(q, template[i].input +
906 temp + n, authsize))
907 n = authsize;
908 else
909 n = 0;
910 } else {
911 for (n = 0; offset_in_page(q + n) && q[n]; n++)
912 ;
913 }
914 if (n) {
915 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
916 d, j, e, k, algo, n);
917 hexdump(q, n);
918 goto out;
919 }
920
921 temp += template[i].tap[k];
922 }
923 }
924
925 ret = 0;
926
927out:
928 aead_request_free(req);
929 kfree(sg);
930out_nosg:
931 if (diff_dst)
932 testmgr_free_buf(xoutbuf);
933out_nooutbuf:
934 testmgr_free_buf(axbuf);
935out_noaxbuf:
936 testmgr_free_buf(xbuf);
937out_noxbuf:
938 kfree(key);
939 kfree(iv);
940 return ret;
941}
942
943static int test_aead(struct crypto_aead *tfm, int enc,
944 struct aead_testvec *template, unsigned int tcount)
945{
946 unsigned int alignmask;
947 int ret;
948
949 /* test 'dst == src' case */
950 ret = __test_aead(tfm, enc, template, tcount, false, 0);
951 if (ret)
952 return ret;
953
954 /* test 'dst != src' case */
955 ret = __test_aead(tfm, enc, template, tcount, true, 0);
956 if (ret)
957 return ret;
958
959 /* test unaligned buffers, check with one byte offset */
960 ret = __test_aead(tfm, enc, template, tcount, true, 1);
961 if (ret)
962 return ret;
963
964 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
965 if (alignmask) {
966 /* Check if alignment mask for tfm is correctly set. */
967 ret = __test_aead(tfm, enc, template, tcount, true,
968 alignmask + 1);
969 if (ret)
970 return ret;
971 }
972
973 return 0;
974}
975
976static int test_cipher(struct crypto_cipher *tfm, int enc,
977 struct cipher_testvec *template, unsigned int tcount)
978{
979 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
980 unsigned int i, j, k;
981 char *q;
982 const char *e;
983 void *data;
984 char *xbuf[XBUFSIZE];
985 int ret = -ENOMEM;
986
987 if (testmgr_alloc_buf(xbuf))
988 goto out_nobuf;
989
990 if (enc == ENCRYPT)
991 e = "encryption";
992 else
993 e = "decryption";
994
995 j = 0;
996 for (i = 0; i < tcount; i++) {
997 if (template[i].np)
998 continue;
999
1000 j++;
1001
1002 ret = -EINVAL;
1003 if (WARN_ON(template[i].ilen > PAGE_SIZE))
1004 goto out;
1005
1006 data = xbuf[0];
1007 memcpy(data, template[i].input, template[i].ilen);
1008
1009 crypto_cipher_clear_flags(tfm, ~0);
1010 if (template[i].wk)
1011 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
1012
1013 ret = crypto_cipher_setkey(tfm, template[i].key,
1014 template[i].klen);
1015 if (!ret == template[i].fail) {
1016 printk(KERN_ERR "alg: cipher: setkey failed "
1017 "on test %d for %s: flags=%x\n", j,
1018 algo, crypto_cipher_get_flags(tfm));
1019 goto out;
1020 } else if (ret)
1021 continue;
1022
1023 for (k = 0; k < template[i].ilen;
1024 k += crypto_cipher_blocksize(tfm)) {
1025 if (enc)
1026 crypto_cipher_encrypt_one(tfm, data + k,
1027 data + k);
1028 else
1029 crypto_cipher_decrypt_one(tfm, data + k,
1030 data + k);
1031 }
1032
1033 q = data;
1034 if (memcmp(q, template[i].result, template[i].rlen)) {
1035 printk(KERN_ERR "alg: cipher: Test %d failed "
1036 "on %s for %s\n", j, e, algo);
1037 hexdump(q, template[i].rlen);
1038 ret = -EINVAL;
1039 goto out;
1040 }
1041 }
1042
1043 ret = 0;
1044
1045out:
1046 testmgr_free_buf(xbuf);
1047out_nobuf:
1048 return ret;
1049}
1050
1051static int __test_skcipher(struct crypto_skcipher *tfm, int enc,
1052 struct cipher_testvec *template, unsigned int tcount,
1053 const bool diff_dst, const int align_offset)
1054{
1055 const char *algo =
1056 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
1057 unsigned int i, j, k, n, temp;
1058 char *q;
1059 struct skcipher_request *req;
1060 struct scatterlist sg[8];
1061 struct scatterlist sgout[8];
1062 const char *e, *d;
1063 struct tcrypt_result result;
1064 void *data;
1065 char iv[MAX_IVLEN];
1066 char *xbuf[XBUFSIZE];
1067 char *xoutbuf[XBUFSIZE];
1068 int ret = -ENOMEM;
1069 unsigned int ivsize = crypto_skcipher_ivsize(tfm);
1070
1071 if (testmgr_alloc_buf(xbuf))
1072 goto out_nobuf;
1073
1074 if (diff_dst && testmgr_alloc_buf(xoutbuf))
1075 goto out_nooutbuf;
1076
1077 if (diff_dst)
1078 d = "-ddst";
1079 else
1080 d = "";
1081
1082 if (enc == ENCRYPT)
1083 e = "encryption";
1084 else
1085 e = "decryption";
1086
1087 init_completion(&result.completion);
1088
1089 req = skcipher_request_alloc(tfm, GFP_KERNEL);
1090 if (!req) {
1091 pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
1092 d, algo);
1093 goto out;
1094 }
1095
1096 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1097 tcrypt_complete, &result);
1098
1099 j = 0;
1100 for (i = 0; i < tcount; i++) {
1101 if (template[i].np && !template[i].also_non_np)
1102 continue;
1103
1104 if (template[i].iv)
1105 memcpy(iv, template[i].iv, ivsize);
1106 else
1107 memset(iv, 0, MAX_IVLEN);
1108
1109 j++;
1110 ret = -EINVAL;
1111 if (WARN_ON(align_offset + template[i].ilen > PAGE_SIZE))
1112 goto out;
1113
1114 data = xbuf[0];
1115 data += align_offset;
1116 memcpy(data, template[i].input, template[i].ilen);
1117
1118 crypto_skcipher_clear_flags(tfm, ~0);
1119 if (template[i].wk)
1120 crypto_skcipher_set_flags(tfm,
1121 CRYPTO_TFM_REQ_WEAK_KEY);
1122
1123 ret = crypto_skcipher_setkey(tfm, template[i].key,
1124 template[i].klen);
1125 if (!ret == template[i].fail) {
1126 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
1127 d, j, algo, crypto_skcipher_get_flags(tfm));
1128 goto out;
1129 } else if (ret)
1130 continue;
1131
1132 sg_init_one(&sg[0], data, template[i].ilen);
1133 if (diff_dst) {
1134 data = xoutbuf[0];
1135 data += align_offset;
1136 sg_init_one(&sgout[0], data, template[i].ilen);
1137 }
1138
1139 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1140 template[i].ilen, iv);
1141 ret = enc ? crypto_skcipher_encrypt(req) :
1142 crypto_skcipher_decrypt(req);
1143
1144 switch (ret) {
1145 case 0:
1146 break;
1147 case -EINPROGRESS:
1148 case -EBUSY:
1149 wait_for_completion(&result.completion);
1150 reinit_completion(&result.completion);
1151 ret = result.err;
1152 if (!ret)
1153 break;
1154 /* fall through */
1155 default:
1156 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
1157 d, e, j, algo, -ret);
1158 goto out;
1159 }
1160
1161 q = data;
1162 if (memcmp(q, template[i].result, template[i].rlen)) {
1163 pr_err("alg: skcipher%s: Test %d failed (invalid result) on %s for %s\n",
1164 d, j, e, algo);
1165 hexdump(q, template[i].rlen);
1166 ret = -EINVAL;
1167 goto out;
1168 }
1169
1170 if (template[i].iv_out &&
1171 memcmp(iv, template[i].iv_out,
1172 crypto_skcipher_ivsize(tfm))) {
1173 pr_err("alg: skcipher%s: Test %d failed (invalid output IV) on %s for %s\n",
1174 d, j, e, algo);
1175 hexdump(iv, crypto_skcipher_ivsize(tfm));
1176 ret = -EINVAL;
1177 goto out;
1178 }
1179 }
1180
1181 j = 0;
1182 for (i = 0; i < tcount; i++) {
1183 /* alignment tests are only done with continuous buffers */
1184 if (align_offset != 0)
1185 break;
1186
1187 if (!template[i].np)
1188 continue;
1189
1190 if (template[i].iv)
1191 memcpy(iv, template[i].iv, ivsize);
1192 else
1193 memset(iv, 0, MAX_IVLEN);
1194
1195 j++;
1196 crypto_skcipher_clear_flags(tfm, ~0);
1197 if (template[i].wk)
1198 crypto_skcipher_set_flags(tfm,
1199 CRYPTO_TFM_REQ_WEAK_KEY);
1200
1201 ret = crypto_skcipher_setkey(tfm, template[i].key,
1202 template[i].klen);
1203 if (!ret == template[i].fail) {
1204 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
1205 d, j, algo, crypto_skcipher_get_flags(tfm));
1206 goto out;
1207 } else if (ret)
1208 continue;
1209
1210 temp = 0;
1211 ret = -EINVAL;
1212 sg_init_table(sg, template[i].np);
1213 if (diff_dst)
1214 sg_init_table(sgout, template[i].np);
1215 for (k = 0; k < template[i].np; k++) {
1216 if (WARN_ON(offset_in_page(IDX[k]) +
1217 template[i].tap[k] > PAGE_SIZE))
1218 goto out;
1219
1220 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
1221
1222 memcpy(q, template[i].input + temp, template[i].tap[k]);
1223
1224 if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE)
1225 q[template[i].tap[k]] = 0;
1226
1227 sg_set_buf(&sg[k], q, template[i].tap[k]);
1228 if (diff_dst) {
1229 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1230 offset_in_page(IDX[k]);
1231
1232 sg_set_buf(&sgout[k], q, template[i].tap[k]);
1233
1234 memset(q, 0, template[i].tap[k]);
1235 if (offset_in_page(q) +
1236 template[i].tap[k] < PAGE_SIZE)
1237 q[template[i].tap[k]] = 0;
1238 }
1239
1240 temp += template[i].tap[k];
1241 }
1242
1243 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1244 template[i].ilen, iv);
1245
1246 ret = enc ? crypto_skcipher_encrypt(req) :
1247 crypto_skcipher_decrypt(req);
1248
1249 switch (ret) {
1250 case 0:
1251 break;
1252 case -EINPROGRESS:
1253 case -EBUSY:
1254 wait_for_completion(&result.completion);
1255 reinit_completion(&result.completion);
1256 ret = result.err;
1257 if (!ret)
1258 break;
1259 /* fall through */
1260 default:
1261 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1262 d, e, j, algo, -ret);
1263 goto out;
1264 }
1265
1266 temp = 0;
1267 ret = -EINVAL;
1268 for (k = 0; k < template[i].np; k++) {
1269 if (diff_dst)
1270 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1271 offset_in_page(IDX[k]);
1272 else
1273 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1274 offset_in_page(IDX[k]);
1275
1276 if (memcmp(q, template[i].result + temp,
1277 template[i].tap[k])) {
1278 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1279 d, j, e, k, algo);
1280 hexdump(q, template[i].tap[k]);
1281 goto out;
1282 }
1283
1284 q += template[i].tap[k];
1285 for (n = 0; offset_in_page(q + n) && q[n]; n++)
1286 ;
1287 if (n) {
1288 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1289 d, j, e, k, algo, n);
1290 hexdump(q, n);
1291 goto out;
1292 }
1293 temp += template[i].tap[k];
1294 }
1295 }
1296
1297 ret = 0;
1298
1299out:
1300 skcipher_request_free(req);
1301 if (diff_dst)
1302 testmgr_free_buf(xoutbuf);
1303out_nooutbuf:
1304 testmgr_free_buf(xbuf);
1305out_nobuf:
1306 return ret;
1307}
1308
1309static int test_skcipher(struct crypto_skcipher *tfm, int enc,
1310 struct cipher_testvec *template, unsigned int tcount)
1311{
1312 unsigned int alignmask;
1313 int ret;
1314
1315 /* test 'dst == src' case */
1316 ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
1317 if (ret)
1318 return ret;
1319
1320 /* test 'dst != src' case */
1321 ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
1322 if (ret)
1323 return ret;
1324
1325 /* test unaligned buffers, check with one byte offset */
1326 ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
1327 if (ret)
1328 return ret;
1329
1330 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1331 if (alignmask) {
1332 /* Check if alignment mask for tfm is correctly set. */
1333 ret = __test_skcipher(tfm, enc, template, tcount, true,
1334 alignmask + 1);
1335 if (ret)
1336 return ret;
1337 }
1338
1339 return 0;
1340}
1341
1342static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
1343 struct comp_testvec *dtemplate, int ctcount, int dtcount)
1344{
1345 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1346 unsigned int i;
1347 char result[COMP_BUF_SIZE];
1348 int ret;
1349
1350 for (i = 0; i < ctcount; i++) {
1351 int ilen;
1352 unsigned int dlen = COMP_BUF_SIZE;
1353
1354 memset(result, 0, sizeof (result));
1355
1356 ilen = ctemplate[i].inlen;
1357 ret = crypto_comp_compress(tfm, ctemplate[i].input,
1358 ilen, result, &dlen);
1359 if (ret) {
1360 printk(KERN_ERR "alg: comp: compression failed "
1361 "on test %d for %s: ret=%d\n", i + 1, algo,
1362 -ret);
1363 goto out;
1364 }
1365
1366 if (dlen != ctemplate[i].outlen) {
1367 printk(KERN_ERR "alg: comp: Compression test %d "
1368 "failed for %s: output len = %d\n", i + 1, algo,
1369 dlen);
1370 ret = -EINVAL;
1371 goto out;
1372 }
1373
1374 if (memcmp(result, ctemplate[i].output, dlen)) {
1375 printk(KERN_ERR "alg: comp: Compression test %d "
1376 "failed for %s\n", i + 1, algo);
1377 hexdump(result, dlen);
1378 ret = -EINVAL;
1379 goto out;
1380 }
1381 }
1382
1383 for (i = 0; i < dtcount; i++) {
1384 int ilen;
1385 unsigned int dlen = COMP_BUF_SIZE;
1386
1387 memset(result, 0, sizeof (result));
1388
1389 ilen = dtemplate[i].inlen;
1390 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1391 ilen, result, &dlen);
1392 if (ret) {
1393 printk(KERN_ERR "alg: comp: decompression failed "
1394 "on test %d for %s: ret=%d\n", i + 1, algo,
1395 -ret);
1396 goto out;
1397 }
1398
1399 if (dlen != dtemplate[i].outlen) {
1400 printk(KERN_ERR "alg: comp: Decompression test %d "
1401 "failed for %s: output len = %d\n", i + 1, algo,
1402 dlen);
1403 ret = -EINVAL;
1404 goto out;
1405 }
1406
1407 if (memcmp(result, dtemplate[i].output, dlen)) {
1408 printk(KERN_ERR "alg: comp: Decompression test %d "
1409 "failed for %s\n", i + 1, algo);
1410 hexdump(result, dlen);
1411 ret = -EINVAL;
1412 goto out;
1413 }
1414 }
1415
1416 ret = 0;
1417
1418out:
1419 return ret;
1420}
1421
1422static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
1423 unsigned int tcount)
1424{
1425 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1426 int err = 0, i, j, seedsize;
1427 u8 *seed;
1428 char result[32];
1429
1430 seedsize = crypto_rng_seedsize(tfm);
1431
1432 seed = kmalloc(seedsize, GFP_KERNEL);
1433 if (!seed) {
1434 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1435 "for %s\n", algo);
1436 return -ENOMEM;
1437 }
1438
1439 for (i = 0; i < tcount; i++) {
1440 memset(result, 0, 32);
1441
1442 memcpy(seed, template[i].v, template[i].vlen);
1443 memcpy(seed + template[i].vlen, template[i].key,
1444 template[i].klen);
1445 memcpy(seed + template[i].vlen + template[i].klen,
1446 template[i].dt, template[i].dtlen);
1447
1448 err = crypto_rng_reset(tfm, seed, seedsize);
1449 if (err) {
1450 printk(KERN_ERR "alg: cprng: Failed to reset rng "
1451 "for %s\n", algo);
1452 goto out;
1453 }
1454
1455 for (j = 0; j < template[i].loops; j++) {
1456 err = crypto_rng_get_bytes(tfm, result,
1457 template[i].rlen);
1458 if (err < 0) {
1459 printk(KERN_ERR "alg: cprng: Failed to obtain "
1460 "the correct amount of random data for "
1461 "%s (requested %d)\n", algo,
1462 template[i].rlen);
1463 goto out;
1464 }
1465 }
1466
1467 err = memcmp(result, template[i].result,
1468 template[i].rlen);
1469 if (err) {
1470 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1471 i, algo);
1472 hexdump(result, template[i].rlen);
1473 err = -EINVAL;
1474 goto out;
1475 }
1476 }
1477
1478out:
1479 kfree(seed);
1480 return err;
1481}
1482
1483static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1484 u32 type, u32 mask)
1485{
1486 struct crypto_aead *tfm;
1487 int err = 0;
1488
1489 tfm = crypto_alloc_aead(driver, type | CRYPTO_ALG_INTERNAL, mask);
1490 if (IS_ERR(tfm)) {
1491 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1492 "%ld\n", driver, PTR_ERR(tfm));
1493 return PTR_ERR(tfm);
1494 }
1495
1496 if (desc->suite.aead.enc.vecs) {
1497 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1498 desc->suite.aead.enc.count);
1499 if (err)
1500 goto out;
1501 }
1502
1503 if (!err && desc->suite.aead.dec.vecs)
1504 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1505 desc->suite.aead.dec.count);
1506
1507out:
1508 crypto_free_aead(tfm);
1509 return err;
1510}
1511
1512static int alg_test_cipher(const struct alg_test_desc *desc,
1513 const char *driver, u32 type, u32 mask)
1514{
1515 struct crypto_cipher *tfm;
1516 int err = 0;
1517
1518 tfm = crypto_alloc_cipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1519 if (IS_ERR(tfm)) {
1520 printk(KERN_ERR "alg: cipher: Failed to load transform for "
1521 "%s: %ld\n", driver, PTR_ERR(tfm));
1522 return PTR_ERR(tfm);
1523 }
1524
1525 if (desc->suite.cipher.enc.vecs) {
1526 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1527 desc->suite.cipher.enc.count);
1528 if (err)
1529 goto out;
1530 }
1531
1532 if (desc->suite.cipher.dec.vecs)
1533 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1534 desc->suite.cipher.dec.count);
1535
1536out:
1537 crypto_free_cipher(tfm);
1538 return err;
1539}
1540
1541static int alg_test_skcipher(const struct alg_test_desc *desc,
1542 const char *driver, u32 type, u32 mask)
1543{
1544 struct crypto_skcipher *tfm;
1545 int err = 0;
1546
1547 tfm = crypto_alloc_skcipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1548 if (IS_ERR(tfm)) {
1549 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1550 "%s: %ld\n", driver, PTR_ERR(tfm));
1551 return PTR_ERR(tfm);
1552 }
1553
1554 if (desc->suite.cipher.enc.vecs) {
1555 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1556 desc->suite.cipher.enc.count);
1557 if (err)
1558 goto out;
1559 }
1560
1561 if (desc->suite.cipher.dec.vecs)
1562 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1563 desc->suite.cipher.dec.count);
1564
1565out:
1566 crypto_free_skcipher(tfm);
1567 return err;
1568}
1569
1570static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1571 u32 type, u32 mask)
1572{
1573 struct crypto_comp *tfm;
1574 int err;
1575
1576 tfm = crypto_alloc_comp(driver, type, mask);
1577 if (IS_ERR(tfm)) {
1578 printk(KERN_ERR "alg: comp: Failed to load transform for %s: "
1579 "%ld\n", driver, PTR_ERR(tfm));
1580 return PTR_ERR(tfm);
1581 }
1582
1583 err = test_comp(tfm, desc->suite.comp.comp.vecs,
1584 desc->suite.comp.decomp.vecs,
1585 desc->suite.comp.comp.count,
1586 desc->suite.comp.decomp.count);
1587
1588 crypto_free_comp(tfm);
1589 return err;
1590}
1591
1592static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1593 u32 type, u32 mask)
1594{
1595 struct crypto_ahash *tfm;
1596 int err;
1597
1598 tfm = crypto_alloc_ahash(driver, type | CRYPTO_ALG_INTERNAL, mask);
1599 if (IS_ERR(tfm)) {
1600 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1601 "%ld\n", driver, PTR_ERR(tfm));
1602 return PTR_ERR(tfm);
1603 }
1604
1605 err = test_hash(tfm, desc->suite.hash.vecs,
1606 desc->suite.hash.count, true);
1607 if (!err)
1608 err = test_hash(tfm, desc->suite.hash.vecs,
1609 desc->suite.hash.count, false);
1610
1611 crypto_free_ahash(tfm);
1612 return err;
1613}
1614
1615static int alg_test_crc32c(const struct alg_test_desc *desc,
1616 const char *driver, u32 type, u32 mask)
1617{
1618 struct crypto_shash *tfm;
1619 u32 val;
1620 int err;
1621
1622 err = alg_test_hash(desc, driver, type, mask);
1623 if (err)
1624 goto out;
1625
1626 tfm = crypto_alloc_shash(driver, type | CRYPTO_ALG_INTERNAL, mask);
1627 if (IS_ERR(tfm)) {
1628 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1629 "%ld\n", driver, PTR_ERR(tfm));
1630 err = PTR_ERR(tfm);
1631 goto out;
1632 }
1633
1634 do {
1635 SHASH_DESC_ON_STACK(shash, tfm);
1636 u32 *ctx = (u32 *)shash_desc_ctx(shash);
1637
1638 shash->tfm = tfm;
1639 shash->flags = 0;
1640
1641 *ctx = le32_to_cpu(420553207);
1642 err = crypto_shash_final(shash, (u8 *)&val);
1643 if (err) {
1644 printk(KERN_ERR "alg: crc32c: Operation failed for "
1645 "%s: %d\n", driver, err);
1646 break;
1647 }
1648
1649 if (val != ~420553207) {
1650 printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1651 "%d\n", driver, val);
1652 err = -EINVAL;
1653 }
1654 } while (0);
1655
1656 crypto_free_shash(tfm);
1657
1658out:
1659 return err;
1660}
1661
1662static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1663 u32 type, u32 mask)
1664{
1665 struct crypto_rng *rng;
1666 int err;
1667
1668 rng = crypto_alloc_rng(driver, type | CRYPTO_ALG_INTERNAL, mask);
1669 if (IS_ERR(rng)) {
1670 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1671 "%ld\n", driver, PTR_ERR(rng));
1672 return PTR_ERR(rng);
1673 }
1674
1675 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1676
1677 crypto_free_rng(rng);
1678
1679 return err;
1680}
1681
1682
1683static int drbg_cavs_test(struct drbg_testvec *test, int pr,
1684 const char *driver, u32 type, u32 mask)
1685{
1686 int ret = -EAGAIN;
1687 struct crypto_rng *drng;
1688 struct drbg_test_data test_data;
1689 struct drbg_string addtl, pers, testentropy;
1690 unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL);
1691
1692 if (!buf)
1693 return -ENOMEM;
1694
1695 drng = crypto_alloc_rng(driver, type | CRYPTO_ALG_INTERNAL, mask);
1696 if (IS_ERR(drng)) {
1697 printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for "
1698 "%s\n", driver);
1699 kzfree(buf);
1700 return -ENOMEM;
1701 }
1702
1703 test_data.testentropy = &testentropy;
1704 drbg_string_fill(&testentropy, test->entropy, test->entropylen);
1705 drbg_string_fill(&pers, test->pers, test->perslen);
1706 ret = crypto_drbg_reset_test(drng, &pers, &test_data);
1707 if (ret) {
1708 printk(KERN_ERR "alg: drbg: Failed to reset rng\n");
1709 goto outbuf;
1710 }
1711
1712 drbg_string_fill(&addtl, test->addtla, test->addtllen);
1713 if (pr) {
1714 drbg_string_fill(&testentropy, test->entpra, test->entprlen);
1715 ret = crypto_drbg_get_bytes_addtl_test(drng,
1716 buf, test->expectedlen, &addtl, &test_data);
1717 } else {
1718 ret = crypto_drbg_get_bytes_addtl(drng,
1719 buf, test->expectedlen, &addtl);
1720 }
1721 if (ret < 0) {
1722 printk(KERN_ERR "alg: drbg: could not obtain random data for "
1723 "driver %s\n", driver);
1724 goto outbuf;
1725 }
1726
1727 drbg_string_fill(&addtl, test->addtlb, test->addtllen);
1728 if (pr) {
1729 drbg_string_fill(&testentropy, test->entprb, test->entprlen);
1730 ret = crypto_drbg_get_bytes_addtl_test(drng,
1731 buf, test->expectedlen, &addtl, &test_data);
1732 } else {
1733 ret = crypto_drbg_get_bytes_addtl(drng,
1734 buf, test->expectedlen, &addtl);
1735 }
1736 if (ret < 0) {
1737 printk(KERN_ERR "alg: drbg: could not obtain random data for "
1738 "driver %s\n", driver);
1739 goto outbuf;
1740 }
1741
1742 ret = memcmp(test->expected, buf, test->expectedlen);
1743
1744outbuf:
1745 crypto_free_rng(drng);
1746 kzfree(buf);
1747 return ret;
1748}
1749
1750
1751static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver,
1752 u32 type, u32 mask)
1753{
1754 int err = 0;
1755 int pr = 0;
1756 int i = 0;
1757 struct drbg_testvec *template = desc->suite.drbg.vecs;
1758 unsigned int tcount = desc->suite.drbg.count;
1759
1760 if (0 == memcmp(driver, "drbg_pr_", 8))
1761 pr = 1;
1762
1763 for (i = 0; i < tcount; i++) {
1764 err = drbg_cavs_test(&template[i], pr, driver, type, mask);
1765 if (err) {
1766 printk(KERN_ERR "alg: drbg: Test %d failed for %s\n",
1767 i, driver);
1768 err = -EINVAL;
1769 break;
1770 }
1771 }
1772 return err;
1773
1774}
1775
1776static int do_test_rsa(struct crypto_akcipher *tfm,
1777 struct akcipher_testvec *vecs)
1778{
1779 char *xbuf[XBUFSIZE];
1780 struct akcipher_request *req;
1781 void *outbuf_enc = NULL;
1782 void *outbuf_dec = NULL;
1783 struct tcrypt_result result;
1784 unsigned int out_len_max, out_len = 0;
1785 int err = -ENOMEM;
1786 struct scatterlist src, dst, src_tab[2];
1787
1788 if (testmgr_alloc_buf(xbuf))
1789 return err;
1790
1791 req = akcipher_request_alloc(tfm, GFP_KERNEL);
1792 if (!req)
1793 goto free_xbuf;
1794
1795 init_completion(&result.completion);
1796
1797 if (vecs->public_key_vec)
1798 err = crypto_akcipher_set_pub_key(tfm, vecs->key,
1799 vecs->key_len);
1800 else
1801 err = crypto_akcipher_set_priv_key(tfm, vecs->key,
1802 vecs->key_len);
1803 if (err)
1804 goto free_req;
1805
1806 out_len_max = crypto_akcipher_maxsize(tfm);
1807 outbuf_enc = kzalloc(out_len_max, GFP_KERNEL);
1808 if (!outbuf_enc)
1809 goto free_req;
1810
1811 if (WARN_ON(vecs->m_size > PAGE_SIZE))
1812 goto free_all;
1813
1814 memcpy(xbuf[0], vecs->m, vecs->m_size);
1815
1816 sg_init_table(src_tab, 2);
1817 sg_set_buf(&src_tab[0], xbuf[0], 8);
1818 sg_set_buf(&src_tab[1], xbuf[0] + 8, vecs->m_size - 8);
1819 sg_init_one(&dst, outbuf_enc, out_len_max);
1820 akcipher_request_set_crypt(req, src_tab, &dst, vecs->m_size,
1821 out_len_max);
1822 akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1823 tcrypt_complete, &result);
1824
1825 /* Run RSA encrypt - c = m^e mod n;*/
1826 err = wait_async_op(&result, crypto_akcipher_encrypt(req));
1827 if (err) {
1828 pr_err("alg: rsa: encrypt test failed. err %d\n", err);
1829 goto free_all;
1830 }
1831 if (req->dst_len != vecs->c_size) {
1832 pr_err("alg: rsa: encrypt test failed. Invalid output len\n");
1833 err = -EINVAL;
1834 goto free_all;
1835 }
1836 /* verify that encrypted message is equal to expected */
1837 if (memcmp(vecs->c, outbuf_enc, vecs->c_size)) {
1838 pr_err("alg: rsa: encrypt test failed. Invalid output\n");
1839 err = -EINVAL;
1840 goto free_all;
1841 }
1842 /* Don't invoke decrypt for vectors with public key */
1843 if (vecs->public_key_vec) {
1844 err = 0;
1845 goto free_all;
1846 }
1847 outbuf_dec = kzalloc(out_len_max, GFP_KERNEL);
1848 if (!outbuf_dec) {
1849 err = -ENOMEM;
1850 goto free_all;
1851 }
1852
1853 if (WARN_ON(vecs->c_size > PAGE_SIZE))
1854 goto free_all;
1855
1856 memcpy(xbuf[0], vecs->c, vecs->c_size);
1857
1858 sg_init_one(&src, xbuf[0], vecs->c_size);
1859 sg_init_one(&dst, outbuf_dec, out_len_max);
1860 init_completion(&result.completion);
1861 akcipher_request_set_crypt(req, &src, &dst, vecs->c_size, out_len_max);
1862
1863 /* Run RSA decrypt - m = c^d mod n;*/
1864 err = wait_async_op(&result, crypto_akcipher_decrypt(req));
1865 if (err) {
1866 pr_err("alg: rsa: decrypt test failed. err %d\n", err);
1867 goto free_all;
1868 }
1869 out_len = req->dst_len;
1870 if (out_len != vecs->m_size) {
1871 pr_err("alg: rsa: decrypt test failed. Invalid output len\n");
1872 err = -EINVAL;
1873 goto free_all;
1874 }
1875 /* verify that decrypted message is equal to the original msg */
1876 if (memcmp(vecs->m, outbuf_dec, vecs->m_size)) {
1877 pr_err("alg: rsa: decrypt test failed. Invalid output\n");
1878 err = -EINVAL;
1879 }
1880free_all:
1881 kfree(outbuf_dec);
1882 kfree(outbuf_enc);
1883free_req:
1884 akcipher_request_free(req);
1885free_xbuf:
1886 testmgr_free_buf(xbuf);
1887 return err;
1888}
1889
1890static int test_rsa(struct crypto_akcipher *tfm, struct akcipher_testvec *vecs,
1891 unsigned int tcount)
1892{
1893 int ret, i;
1894
1895 for (i = 0; i < tcount; i++) {
1896 ret = do_test_rsa(tfm, vecs++);
1897 if (ret) {
1898 pr_err("alg: rsa: test failed on vector %d, err=%d\n",
1899 i + 1, ret);
1900 return ret;
1901 }
1902 }
1903 return 0;
1904}
1905
1906static int test_akcipher(struct crypto_akcipher *tfm, const char *alg,
1907 struct akcipher_testvec *vecs, unsigned int tcount)
1908{
1909 if (strncmp(alg, "rsa", 3) == 0)
1910 return test_rsa(tfm, vecs, tcount);
1911
1912 return 0;
1913}
1914
1915static int alg_test_akcipher(const struct alg_test_desc *desc,
1916 const char *driver, u32 type, u32 mask)
1917{
1918 struct crypto_akcipher *tfm;
1919 int err = 0;
1920
1921 tfm = crypto_alloc_akcipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1922 if (IS_ERR(tfm)) {
1923 pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n",
1924 driver, PTR_ERR(tfm));
1925 return PTR_ERR(tfm);
1926 }
1927 if (desc->suite.akcipher.vecs)
1928 err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs,
1929 desc->suite.akcipher.count);
1930
1931 crypto_free_akcipher(tfm);
1932 return err;
1933}
1934
1935static int alg_test_null(const struct alg_test_desc *desc,
1936 const char *driver, u32 type, u32 mask)
1937{
1938 return 0;
1939}
1940
1941/* Please keep this list sorted by algorithm name. */
1942static const struct alg_test_desc alg_test_descs[] = {
1943 {
1944 .alg = "__cbc-cast5-avx",
1945 .test = alg_test_null,
1946 }, {
1947 .alg = "__cbc-cast6-avx",
1948 .test = alg_test_null,
1949 }, {
1950 .alg = "__cbc-serpent-avx",
1951 .test = alg_test_null,
1952 }, {
1953 .alg = "__cbc-serpent-avx2",
1954 .test = alg_test_null,
1955 }, {
1956 .alg = "__cbc-serpent-sse2",
1957 .test = alg_test_null,
1958 }, {
1959 .alg = "__cbc-twofish-avx",
1960 .test = alg_test_null,
1961 }, {
1962 .alg = "__driver-cbc-aes-aesni",
1963 .test = alg_test_null,
1964 .fips_allowed = 1,
1965 }, {
1966 .alg = "__driver-cbc-camellia-aesni",
1967 .test = alg_test_null,
1968 }, {
1969 .alg = "__driver-cbc-camellia-aesni-avx2",
1970 .test = alg_test_null,
1971 }, {
1972 .alg = "__driver-cbc-cast5-avx",
1973 .test = alg_test_null,
1974 }, {
1975 .alg = "__driver-cbc-cast6-avx",
1976 .test = alg_test_null,
1977 }, {
1978 .alg = "__driver-cbc-serpent-avx",
1979 .test = alg_test_null,
1980 }, {
1981 .alg = "__driver-cbc-serpent-avx2",
1982 .test = alg_test_null,
1983 }, {
1984 .alg = "__driver-cbc-serpent-sse2",
1985 .test = alg_test_null,
1986 }, {
1987 .alg = "__driver-cbc-twofish-avx",
1988 .test = alg_test_null,
1989 }, {
1990 .alg = "__driver-ecb-aes-aesni",
1991 .test = alg_test_null,
1992 .fips_allowed = 1,
1993 }, {
1994 .alg = "__driver-ecb-camellia-aesni",
1995 .test = alg_test_null,
1996 }, {
1997 .alg = "__driver-ecb-camellia-aesni-avx2",
1998 .test = alg_test_null,
1999 }, {
2000 .alg = "__driver-ecb-cast5-avx",
2001 .test = alg_test_null,
2002 }, {
2003 .alg = "__driver-ecb-cast6-avx",
2004 .test = alg_test_null,
2005 }, {
2006 .alg = "__driver-ecb-serpent-avx",
2007 .test = alg_test_null,
2008 }, {
2009 .alg = "__driver-ecb-serpent-avx2",
2010 .test = alg_test_null,
2011 }, {
2012 .alg = "__driver-ecb-serpent-sse2",
2013 .test = alg_test_null,
2014 }, {
2015 .alg = "__driver-ecb-twofish-avx",
2016 .test = alg_test_null,
2017 }, {
2018 .alg = "__driver-gcm-aes-aesni",
2019 .test = alg_test_null,
2020 .fips_allowed = 1,
2021 }, {
2022 .alg = "__ghash-pclmulqdqni",
2023 .test = alg_test_null,
2024 .fips_allowed = 1,
2025 }, {
2026 .alg = "ansi_cprng",
2027 .test = alg_test_cprng,
2028 .suite = {
2029 .cprng = {
2030 .vecs = ansi_cprng_aes_tv_template,
2031 .count = ANSI_CPRNG_AES_TEST_VECTORS
2032 }
2033 }
2034 }, {
2035 .alg = "authenc(hmac(md5),ecb(cipher_null))",
2036 .test = alg_test_aead,
2037 .suite = {
2038 .aead = {
2039 .enc = {
2040 .vecs = hmac_md5_ecb_cipher_null_enc_tv_template,
2041 .count = HMAC_MD5_ECB_CIPHER_NULL_ENC_TEST_VECTORS
2042 },
2043 .dec = {
2044 .vecs = hmac_md5_ecb_cipher_null_dec_tv_template,
2045 .count = HMAC_MD5_ECB_CIPHER_NULL_DEC_TEST_VECTORS
2046 }
2047 }
2048 }
2049 }, {
2050 .alg = "authenc(hmac(sha1),cbc(aes))",
2051 .test = alg_test_aead,
2052 .suite = {
2053 .aead = {
2054 .enc = {
2055 .vecs =
2056 hmac_sha1_aes_cbc_enc_tv_temp,
2057 .count =
2058 HMAC_SHA1_AES_CBC_ENC_TEST_VEC
2059 }
2060 }
2061 }
2062 }, {
2063 .alg = "authenc(hmac(sha1),cbc(des))",
2064 .test = alg_test_aead,
2065 .suite = {
2066 .aead = {
2067 .enc = {
2068 .vecs =
2069 hmac_sha1_des_cbc_enc_tv_temp,
2070 .count =
2071 HMAC_SHA1_DES_CBC_ENC_TEST_VEC
2072 }
2073 }
2074 }
2075 }, {
2076 .alg = "authenc(hmac(sha1),cbc(des3_ede))",
2077 .test = alg_test_aead,
2078 .fips_allowed = 1,
2079 .suite = {
2080 .aead = {
2081 .enc = {
2082 .vecs =
2083 hmac_sha1_des3_ede_cbc_enc_tv_temp,
2084 .count =
2085 HMAC_SHA1_DES3_EDE_CBC_ENC_TEST_VEC
2086 }
2087 }
2088 }
2089 }, {
2090 .alg = "authenc(hmac(sha1),ctr(aes))",
2091 .test = alg_test_null,
2092 .fips_allowed = 1,
2093 }, {
2094 .alg = "authenc(hmac(sha1),ecb(cipher_null))",
2095 .test = alg_test_aead,
2096 .suite = {
2097 .aead = {
2098 .enc = {
2099 .vecs =
2100 hmac_sha1_ecb_cipher_null_enc_tv_temp,
2101 .count =
2102 HMAC_SHA1_ECB_CIPHER_NULL_ENC_TEST_VEC
2103 },
2104 .dec = {
2105 .vecs =
2106 hmac_sha1_ecb_cipher_null_dec_tv_temp,
2107 .count =
2108 HMAC_SHA1_ECB_CIPHER_NULL_DEC_TEST_VEC
2109 }
2110 }
2111 }
2112 }, {
2113 .alg = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2114 .test = alg_test_null,
2115 .fips_allowed = 1,
2116 }, {
2117 .alg = "authenc(hmac(sha224),cbc(des))",
2118 .test = alg_test_aead,
2119 .suite = {
2120 .aead = {
2121 .enc = {
2122 .vecs =
2123 hmac_sha224_des_cbc_enc_tv_temp,
2124 .count =
2125 HMAC_SHA224_DES_CBC_ENC_TEST_VEC
2126 }
2127 }
2128 }
2129 }, {
2130 .alg = "authenc(hmac(sha224),cbc(des3_ede))",
2131 .test = alg_test_aead,
2132 .fips_allowed = 1,
2133 .suite = {
2134 .aead = {
2135 .enc = {
2136 .vecs =
2137 hmac_sha224_des3_ede_cbc_enc_tv_temp,
2138 .count =
2139 HMAC_SHA224_DES3_EDE_CBC_ENC_TEST_VEC
2140 }
2141 }
2142 }
2143 }, {
2144 .alg = "authenc(hmac(sha256),cbc(aes))",
2145 .test = alg_test_aead,
2146 .fips_allowed = 1,
2147 .suite = {
2148 .aead = {
2149 .enc = {
2150 .vecs =
2151 hmac_sha256_aes_cbc_enc_tv_temp,
2152 .count =
2153 HMAC_SHA256_AES_CBC_ENC_TEST_VEC
2154 }
2155 }
2156 }
2157 }, {
2158 .alg = "authenc(hmac(sha256),cbc(des))",
2159 .test = alg_test_aead,
2160 .suite = {
2161 .aead = {
2162 .enc = {
2163 .vecs =
2164 hmac_sha256_des_cbc_enc_tv_temp,
2165 .count =
2166 HMAC_SHA256_DES_CBC_ENC_TEST_VEC
2167 }
2168 }
2169 }
2170 }, {
2171 .alg = "authenc(hmac(sha256),cbc(des3_ede))",
2172 .test = alg_test_aead,
2173 .fips_allowed = 1,
2174 .suite = {
2175 .aead = {
2176 .enc = {
2177 .vecs =
2178 hmac_sha256_des3_ede_cbc_enc_tv_temp,
2179 .count =
2180 HMAC_SHA256_DES3_EDE_CBC_ENC_TEST_VEC
2181 }
2182 }
2183 }
2184 }, {
2185 .alg = "authenc(hmac(sha256),ctr(aes))",
2186 .test = alg_test_null,
2187 .fips_allowed = 1,
2188 }, {
2189 .alg = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2190 .test = alg_test_null,
2191 .fips_allowed = 1,
2192 }, {
2193 .alg = "authenc(hmac(sha384),cbc(des))",
2194 .test = alg_test_aead,
2195 .suite = {
2196 .aead = {
2197 .enc = {
2198 .vecs =
2199 hmac_sha384_des_cbc_enc_tv_temp,
2200 .count =
2201 HMAC_SHA384_DES_CBC_ENC_TEST_VEC
2202 }
2203 }
2204 }
2205 }, {
2206 .alg = "authenc(hmac(sha384),cbc(des3_ede))",
2207 .test = alg_test_aead,
2208 .fips_allowed = 1,
2209 .suite = {
2210 .aead = {
2211 .enc = {
2212 .vecs =
2213 hmac_sha384_des3_ede_cbc_enc_tv_temp,
2214 .count =
2215 HMAC_SHA384_DES3_EDE_CBC_ENC_TEST_VEC
2216 }
2217 }
2218 }
2219 }, {
2220 .alg = "authenc(hmac(sha384),ctr(aes))",
2221 .test = alg_test_null,
2222 .fips_allowed = 1,
2223 }, {
2224 .alg = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2225 .test = alg_test_null,
2226 .fips_allowed = 1,
2227 }, {
2228 .alg = "authenc(hmac(sha512),cbc(aes))",
2229 .fips_allowed = 1,
2230 .test = alg_test_aead,
2231 .suite = {
2232 .aead = {
2233 .enc = {
2234 .vecs =
2235 hmac_sha512_aes_cbc_enc_tv_temp,
2236 .count =
2237 HMAC_SHA512_AES_CBC_ENC_TEST_VEC
2238 }
2239 }
2240 }
2241 }, {
2242 .alg = "authenc(hmac(sha512),cbc(des))",
2243 .test = alg_test_aead,
2244 .suite = {
2245 .aead = {
2246 .enc = {
2247 .vecs =
2248 hmac_sha512_des_cbc_enc_tv_temp,
2249 .count =
2250 HMAC_SHA512_DES_CBC_ENC_TEST_VEC
2251 }
2252 }
2253 }
2254 }, {
2255 .alg = "authenc(hmac(sha512),cbc(des3_ede))",
2256 .test = alg_test_aead,
2257 .fips_allowed = 1,
2258 .suite = {
2259 .aead = {
2260 .enc = {
2261 .vecs =
2262 hmac_sha512_des3_ede_cbc_enc_tv_temp,
2263 .count =
2264 HMAC_SHA512_DES3_EDE_CBC_ENC_TEST_VEC
2265 }
2266 }
2267 }
2268 }, {
2269 .alg = "authenc(hmac(sha512),ctr(aes))",
2270 .test = alg_test_null,
2271 .fips_allowed = 1,
2272 }, {
2273 .alg = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2274 .test = alg_test_null,
2275 .fips_allowed = 1,
2276 }, {
2277 .alg = "cbc(aes)",
2278 .test = alg_test_skcipher,
2279 .fips_allowed = 1,
2280 .suite = {
2281 .cipher = {
2282 .enc = {
2283 .vecs = aes_cbc_enc_tv_template,
2284 .count = AES_CBC_ENC_TEST_VECTORS
2285 },
2286 .dec = {
2287 .vecs = aes_cbc_dec_tv_template,
2288 .count = AES_CBC_DEC_TEST_VECTORS
2289 }
2290 }
2291 }
2292 }, {
2293 .alg = "cbc(anubis)",
2294 .test = alg_test_skcipher,
2295 .suite = {
2296 .cipher = {
2297 .enc = {
2298 .vecs = anubis_cbc_enc_tv_template,
2299 .count = ANUBIS_CBC_ENC_TEST_VECTORS
2300 },
2301 .dec = {
2302 .vecs = anubis_cbc_dec_tv_template,
2303 .count = ANUBIS_CBC_DEC_TEST_VECTORS
2304 }
2305 }
2306 }
2307 }, {
2308 .alg = "cbc(blowfish)",
2309 .test = alg_test_skcipher,
2310 .suite = {
2311 .cipher = {
2312 .enc = {
2313 .vecs = bf_cbc_enc_tv_template,
2314 .count = BF_CBC_ENC_TEST_VECTORS
2315 },
2316 .dec = {
2317 .vecs = bf_cbc_dec_tv_template,
2318 .count = BF_CBC_DEC_TEST_VECTORS
2319 }
2320 }
2321 }
2322 }, {
2323 .alg = "cbc(camellia)",
2324 .test = alg_test_skcipher,
2325 .suite = {
2326 .cipher = {
2327 .enc = {
2328 .vecs = camellia_cbc_enc_tv_template,
2329 .count = CAMELLIA_CBC_ENC_TEST_VECTORS
2330 },
2331 .dec = {
2332 .vecs = camellia_cbc_dec_tv_template,
2333 .count = CAMELLIA_CBC_DEC_TEST_VECTORS
2334 }
2335 }
2336 }
2337 }, {
2338 .alg = "cbc(cast5)",
2339 .test = alg_test_skcipher,
2340 .suite = {
2341 .cipher = {
2342 .enc = {
2343 .vecs = cast5_cbc_enc_tv_template,
2344 .count = CAST5_CBC_ENC_TEST_VECTORS
2345 },
2346 .dec = {
2347 .vecs = cast5_cbc_dec_tv_template,
2348 .count = CAST5_CBC_DEC_TEST_VECTORS
2349 }
2350 }
2351 }
2352 }, {
2353 .alg = "cbc(cast6)",
2354 .test = alg_test_skcipher,
2355 .suite = {
2356 .cipher = {
2357 .enc = {
2358 .vecs = cast6_cbc_enc_tv_template,
2359 .count = CAST6_CBC_ENC_TEST_VECTORS
2360 },
2361 .dec = {
2362 .vecs = cast6_cbc_dec_tv_template,
2363 .count = CAST6_CBC_DEC_TEST_VECTORS
2364 }
2365 }
2366 }
2367 }, {
2368 .alg = "cbc(des)",
2369 .test = alg_test_skcipher,
2370 .suite = {
2371 .cipher = {
2372 .enc = {
2373 .vecs = des_cbc_enc_tv_template,
2374 .count = DES_CBC_ENC_TEST_VECTORS
2375 },
2376 .dec = {
2377 .vecs = des_cbc_dec_tv_template,
2378 .count = DES_CBC_DEC_TEST_VECTORS
2379 }
2380 }
2381 }
2382 }, {
2383 .alg = "cbc(des3_ede)",
2384 .test = alg_test_skcipher,
2385 .fips_allowed = 1,
2386 .suite = {
2387 .cipher = {
2388 .enc = {
2389 .vecs = des3_ede_cbc_enc_tv_template,
2390 .count = DES3_EDE_CBC_ENC_TEST_VECTORS
2391 },
2392 .dec = {
2393 .vecs = des3_ede_cbc_dec_tv_template,
2394 .count = DES3_EDE_CBC_DEC_TEST_VECTORS
2395 }
2396 }
2397 }
2398 }, {
2399 .alg = "cbc(serpent)",
2400 .test = alg_test_skcipher,
2401 .suite = {
2402 .cipher = {
2403 .enc = {
2404 .vecs = serpent_cbc_enc_tv_template,
2405 .count = SERPENT_CBC_ENC_TEST_VECTORS
2406 },
2407 .dec = {
2408 .vecs = serpent_cbc_dec_tv_template,
2409 .count = SERPENT_CBC_DEC_TEST_VECTORS
2410 }
2411 }
2412 }
2413 }, {
2414 .alg = "cbc(twofish)",
2415 .test = alg_test_skcipher,
2416 .suite = {
2417 .cipher = {
2418 .enc = {
2419 .vecs = tf_cbc_enc_tv_template,
2420 .count = TF_CBC_ENC_TEST_VECTORS
2421 },
2422 .dec = {
2423 .vecs = tf_cbc_dec_tv_template,
2424 .count = TF_CBC_DEC_TEST_VECTORS
2425 }
2426 }
2427 }
2428 }, {
2429 .alg = "ccm(aes)",
2430 .test = alg_test_aead,
2431 .fips_allowed = 1,
2432 .suite = {
2433 .aead = {
2434 .enc = {
2435 .vecs = aes_ccm_enc_tv_template,
2436 .count = AES_CCM_ENC_TEST_VECTORS
2437 },
2438 .dec = {
2439 .vecs = aes_ccm_dec_tv_template,
2440 .count = AES_CCM_DEC_TEST_VECTORS
2441 }
2442 }
2443 }
2444 }, {
2445 .alg = "chacha20",
2446 .test = alg_test_skcipher,
2447 .suite = {
2448 .cipher = {
2449 .enc = {
2450 .vecs = chacha20_enc_tv_template,
2451 .count = CHACHA20_ENC_TEST_VECTORS
2452 },
2453 .dec = {
2454 .vecs = chacha20_enc_tv_template,
2455 .count = CHACHA20_ENC_TEST_VECTORS
2456 },
2457 }
2458 }
2459 }, {
2460 .alg = "cmac(aes)",
2461 .fips_allowed = 1,
2462 .test = alg_test_hash,
2463 .suite = {
2464 .hash = {
2465 .vecs = aes_cmac128_tv_template,
2466 .count = CMAC_AES_TEST_VECTORS
2467 }
2468 }
2469 }, {
2470 .alg = "cmac(des3_ede)",
2471 .fips_allowed = 1,
2472 .test = alg_test_hash,
2473 .suite = {
2474 .hash = {
2475 .vecs = des3_ede_cmac64_tv_template,
2476 .count = CMAC_DES3_EDE_TEST_VECTORS
2477 }
2478 }
2479 }, {
2480 .alg = "compress_null",
2481 .test = alg_test_null,
2482 }, {
2483 .alg = "crc32",
2484 .test = alg_test_hash,
2485 .suite = {
2486 .hash = {
2487 .vecs = crc32_tv_template,
2488 .count = CRC32_TEST_VECTORS
2489 }
2490 }
2491 }, {
2492 .alg = "crc32c",
2493 .test = alg_test_crc32c,
2494 .fips_allowed = 1,
2495 .suite = {
2496 .hash = {
2497 .vecs = crc32c_tv_template,
2498 .count = CRC32C_TEST_VECTORS
2499 }
2500 }
2501 }, {
2502 .alg = "crct10dif",
2503 .test = alg_test_hash,
2504 .fips_allowed = 1,
2505 .suite = {
2506 .hash = {
2507 .vecs = crct10dif_tv_template,
2508 .count = CRCT10DIF_TEST_VECTORS
2509 }
2510 }
2511 }, {
2512 .alg = "cryptd(__driver-cbc-aes-aesni)",
2513 .test = alg_test_null,
2514 .fips_allowed = 1,
2515 }, {
2516 .alg = "cryptd(__driver-cbc-camellia-aesni)",
2517 .test = alg_test_null,
2518 }, {
2519 .alg = "cryptd(__driver-cbc-camellia-aesni-avx2)",
2520 .test = alg_test_null,
2521 }, {
2522 .alg = "cryptd(__driver-cbc-serpent-avx2)",
2523 .test = alg_test_null,
2524 }, {
2525 .alg = "cryptd(__driver-ecb-aes-aesni)",
2526 .test = alg_test_null,
2527 .fips_allowed = 1,
2528 }, {
2529 .alg = "cryptd(__driver-ecb-camellia-aesni)",
2530 .test = alg_test_null,
2531 }, {
2532 .alg = "cryptd(__driver-ecb-camellia-aesni-avx2)",
2533 .test = alg_test_null,
2534 }, {
2535 .alg = "cryptd(__driver-ecb-cast5-avx)",
2536 .test = alg_test_null,
2537 }, {
2538 .alg = "cryptd(__driver-ecb-cast6-avx)",
2539 .test = alg_test_null,
2540 }, {
2541 .alg = "cryptd(__driver-ecb-serpent-avx)",
2542 .test = alg_test_null,
2543 }, {
2544 .alg = "cryptd(__driver-ecb-serpent-avx2)",
2545 .test = alg_test_null,
2546 }, {
2547 .alg = "cryptd(__driver-ecb-serpent-sse2)",
2548 .test = alg_test_null,
2549 }, {
2550 .alg = "cryptd(__driver-ecb-twofish-avx)",
2551 .test = alg_test_null,
2552 }, {
2553 .alg = "cryptd(__driver-gcm-aes-aesni)",
2554 .test = alg_test_null,
2555 .fips_allowed = 1,
2556 }, {
2557 .alg = "cryptd(__ghash-pclmulqdqni)",
2558 .test = alg_test_null,
2559 .fips_allowed = 1,
2560 }, {
2561 .alg = "ctr(aes)",
2562 .test = alg_test_skcipher,
2563 .fips_allowed = 1,
2564 .suite = {
2565 .cipher = {
2566 .enc = {
2567 .vecs = aes_ctr_enc_tv_template,
2568 .count = AES_CTR_ENC_TEST_VECTORS
2569 },
2570 .dec = {
2571 .vecs = aes_ctr_dec_tv_template,
2572 .count = AES_CTR_DEC_TEST_VECTORS
2573 }
2574 }
2575 }
2576 }, {
2577 .alg = "ctr(blowfish)",
2578 .test = alg_test_skcipher,
2579 .suite = {
2580 .cipher = {
2581 .enc = {
2582 .vecs = bf_ctr_enc_tv_template,
2583 .count = BF_CTR_ENC_TEST_VECTORS
2584 },
2585 .dec = {
2586 .vecs = bf_ctr_dec_tv_template,
2587 .count = BF_CTR_DEC_TEST_VECTORS
2588 }
2589 }
2590 }
2591 }, {
2592 .alg = "ctr(camellia)",
2593 .test = alg_test_skcipher,
2594 .suite = {
2595 .cipher = {
2596 .enc = {
2597 .vecs = camellia_ctr_enc_tv_template,
2598 .count = CAMELLIA_CTR_ENC_TEST_VECTORS
2599 },
2600 .dec = {
2601 .vecs = camellia_ctr_dec_tv_template,
2602 .count = CAMELLIA_CTR_DEC_TEST_VECTORS
2603 }
2604 }
2605 }
2606 }, {
2607 .alg = "ctr(cast5)",
2608 .test = alg_test_skcipher,
2609 .suite = {
2610 .cipher = {
2611 .enc = {
2612 .vecs = cast5_ctr_enc_tv_template,
2613 .count = CAST5_CTR_ENC_TEST_VECTORS
2614 },
2615 .dec = {
2616 .vecs = cast5_ctr_dec_tv_template,
2617 .count = CAST5_CTR_DEC_TEST_VECTORS
2618 }
2619 }
2620 }
2621 }, {
2622 .alg = "ctr(cast6)",
2623 .test = alg_test_skcipher,
2624 .suite = {
2625 .cipher = {
2626 .enc = {
2627 .vecs = cast6_ctr_enc_tv_template,
2628 .count = CAST6_CTR_ENC_TEST_VECTORS
2629 },
2630 .dec = {
2631 .vecs = cast6_ctr_dec_tv_template,
2632 .count = CAST6_CTR_DEC_TEST_VECTORS
2633 }
2634 }
2635 }
2636 }, {
2637 .alg = "ctr(des)",
2638 .test = alg_test_skcipher,
2639 .suite = {
2640 .cipher = {
2641 .enc = {
2642 .vecs = des_ctr_enc_tv_template,
2643 .count = DES_CTR_ENC_TEST_VECTORS
2644 },
2645 .dec = {
2646 .vecs = des_ctr_dec_tv_template,
2647 .count = DES_CTR_DEC_TEST_VECTORS
2648 }
2649 }
2650 }
2651 }, {
2652 .alg = "ctr(des3_ede)",
2653 .test = alg_test_skcipher,
2654 .suite = {
2655 .cipher = {
2656 .enc = {
2657 .vecs = des3_ede_ctr_enc_tv_template,
2658 .count = DES3_EDE_CTR_ENC_TEST_VECTORS
2659 },
2660 .dec = {
2661 .vecs = des3_ede_ctr_dec_tv_template,
2662 .count = DES3_EDE_CTR_DEC_TEST_VECTORS
2663 }
2664 }
2665 }
2666 }, {
2667 .alg = "ctr(serpent)",
2668 .test = alg_test_skcipher,
2669 .suite = {
2670 .cipher = {
2671 .enc = {
2672 .vecs = serpent_ctr_enc_tv_template,
2673 .count = SERPENT_CTR_ENC_TEST_VECTORS
2674 },
2675 .dec = {
2676 .vecs = serpent_ctr_dec_tv_template,
2677 .count = SERPENT_CTR_DEC_TEST_VECTORS
2678 }
2679 }
2680 }
2681 }, {
2682 .alg = "ctr(twofish)",
2683 .test = alg_test_skcipher,
2684 .suite = {
2685 .cipher = {
2686 .enc = {
2687 .vecs = tf_ctr_enc_tv_template,
2688 .count = TF_CTR_ENC_TEST_VECTORS
2689 },
2690 .dec = {
2691 .vecs = tf_ctr_dec_tv_template,
2692 .count = TF_CTR_DEC_TEST_VECTORS
2693 }
2694 }
2695 }
2696 }, {
2697 .alg = "cts(cbc(aes))",
2698 .test = alg_test_skcipher,
2699 .suite = {
2700 .cipher = {
2701 .enc = {
2702 .vecs = cts_mode_enc_tv_template,
2703 .count = CTS_MODE_ENC_TEST_VECTORS
2704 },
2705 .dec = {
2706 .vecs = cts_mode_dec_tv_template,
2707 .count = CTS_MODE_DEC_TEST_VECTORS
2708 }
2709 }
2710 }
2711 }, {
2712 .alg = "deflate",
2713 .test = alg_test_comp,
2714 .fips_allowed = 1,
2715 .suite = {
2716 .comp = {
2717 .comp = {
2718 .vecs = deflate_comp_tv_template,
2719 .count = DEFLATE_COMP_TEST_VECTORS
2720 },
2721 .decomp = {
2722 .vecs = deflate_decomp_tv_template,
2723 .count = DEFLATE_DECOMP_TEST_VECTORS
2724 }
2725 }
2726 }
2727 }, {
2728 .alg = "digest_null",
2729 .test = alg_test_null,
2730 }, {
2731 .alg = "drbg_nopr_ctr_aes128",
2732 .test = alg_test_drbg,
2733 .fips_allowed = 1,
2734 .suite = {
2735 .drbg = {
2736 .vecs = drbg_nopr_ctr_aes128_tv_template,
2737 .count = ARRAY_SIZE(drbg_nopr_ctr_aes128_tv_template)
2738 }
2739 }
2740 }, {
2741 .alg = "drbg_nopr_ctr_aes192",
2742 .test = alg_test_drbg,
2743 .fips_allowed = 1,
2744 .suite = {
2745 .drbg = {
2746 .vecs = drbg_nopr_ctr_aes192_tv_template,
2747 .count = ARRAY_SIZE(drbg_nopr_ctr_aes192_tv_template)
2748 }
2749 }
2750 }, {
2751 .alg = "drbg_nopr_ctr_aes256",
2752 .test = alg_test_drbg,
2753 .fips_allowed = 1,
2754 .suite = {
2755 .drbg = {
2756 .vecs = drbg_nopr_ctr_aes256_tv_template,
2757 .count = ARRAY_SIZE(drbg_nopr_ctr_aes256_tv_template)
2758 }
2759 }
2760 }, {
2761 /*
2762 * There is no need to specifically test the DRBG with every
2763 * backend cipher -- covered by drbg_nopr_hmac_sha256 test
2764 */
2765 .alg = "drbg_nopr_hmac_sha1",
2766 .fips_allowed = 1,
2767 .test = alg_test_null,
2768 }, {
2769 .alg = "drbg_nopr_hmac_sha256",
2770 .test = alg_test_drbg,
2771 .fips_allowed = 1,
2772 .suite = {
2773 .drbg = {
2774 .vecs = drbg_nopr_hmac_sha256_tv_template,
2775 .count =
2776 ARRAY_SIZE(drbg_nopr_hmac_sha256_tv_template)
2777 }
2778 }
2779 }, {
2780 /* covered by drbg_nopr_hmac_sha256 test */
2781 .alg = "drbg_nopr_hmac_sha384",
2782 .fips_allowed = 1,
2783 .test = alg_test_null,
2784 }, {
2785 .alg = "drbg_nopr_hmac_sha512",
2786 .test = alg_test_null,
2787 .fips_allowed = 1,
2788 }, {
2789 .alg = "drbg_nopr_sha1",
2790 .fips_allowed = 1,
2791 .test = alg_test_null,
2792 }, {
2793 .alg = "drbg_nopr_sha256",
2794 .test = alg_test_drbg,
2795 .fips_allowed = 1,
2796 .suite = {
2797 .drbg = {
2798 .vecs = drbg_nopr_sha256_tv_template,
2799 .count = ARRAY_SIZE(drbg_nopr_sha256_tv_template)
2800 }
2801 }
2802 }, {
2803 /* covered by drbg_nopr_sha256 test */
2804 .alg = "drbg_nopr_sha384",
2805 .fips_allowed = 1,
2806 .test = alg_test_null,
2807 }, {
2808 .alg = "drbg_nopr_sha512",
2809 .fips_allowed = 1,
2810 .test = alg_test_null,
2811 }, {
2812 .alg = "drbg_pr_ctr_aes128",
2813 .test = alg_test_drbg,
2814 .fips_allowed = 1,
2815 .suite = {
2816 .drbg = {
2817 .vecs = drbg_pr_ctr_aes128_tv_template,
2818 .count = ARRAY_SIZE(drbg_pr_ctr_aes128_tv_template)
2819 }
2820 }
2821 }, {
2822 /* covered by drbg_pr_ctr_aes128 test */
2823 .alg = "drbg_pr_ctr_aes192",
2824 .fips_allowed = 1,
2825 .test = alg_test_null,
2826 }, {
2827 .alg = "drbg_pr_ctr_aes256",
2828 .fips_allowed = 1,
2829 .test = alg_test_null,
2830 }, {
2831 .alg = "drbg_pr_hmac_sha1",
2832 .fips_allowed = 1,
2833 .test = alg_test_null,
2834 }, {
2835 .alg = "drbg_pr_hmac_sha256",
2836 .test = alg_test_drbg,
2837 .fips_allowed = 1,
2838 .suite = {
2839 .drbg = {
2840 .vecs = drbg_pr_hmac_sha256_tv_template,
2841 .count = ARRAY_SIZE(drbg_pr_hmac_sha256_tv_template)
2842 }
2843 }
2844 }, {
2845 /* covered by drbg_pr_hmac_sha256 test */
2846 .alg = "drbg_pr_hmac_sha384",
2847 .fips_allowed = 1,
2848 .test = alg_test_null,
2849 }, {
2850 .alg = "drbg_pr_hmac_sha512",
2851 .test = alg_test_null,
2852 .fips_allowed = 1,
2853 }, {
2854 .alg = "drbg_pr_sha1",
2855 .fips_allowed = 1,
2856 .test = alg_test_null,
2857 }, {
2858 .alg = "drbg_pr_sha256",
2859 .test = alg_test_drbg,
2860 .fips_allowed = 1,
2861 .suite = {
2862 .drbg = {
2863 .vecs = drbg_pr_sha256_tv_template,
2864 .count = ARRAY_SIZE(drbg_pr_sha256_tv_template)
2865 }
2866 }
2867 }, {
2868 /* covered by drbg_pr_sha256 test */
2869 .alg = "drbg_pr_sha384",
2870 .fips_allowed = 1,
2871 .test = alg_test_null,
2872 }, {
2873 .alg = "drbg_pr_sha512",
2874 .fips_allowed = 1,
2875 .test = alg_test_null,
2876 }, {
2877 .alg = "ecb(__aes-aesni)",
2878 .test = alg_test_null,
2879 .fips_allowed = 1,
2880 }, {
2881 .alg = "ecb(aes)",
2882 .test = alg_test_skcipher,
2883 .fips_allowed = 1,
2884 .suite = {
2885 .cipher = {
2886 .enc = {
2887 .vecs = aes_enc_tv_template,
2888 .count = AES_ENC_TEST_VECTORS
2889 },
2890 .dec = {
2891 .vecs = aes_dec_tv_template,
2892 .count = AES_DEC_TEST_VECTORS
2893 }
2894 }
2895 }
2896 }, {
2897 .alg = "ecb(anubis)",
2898 .test = alg_test_skcipher,
2899 .suite = {
2900 .cipher = {
2901 .enc = {
2902 .vecs = anubis_enc_tv_template,
2903 .count = ANUBIS_ENC_TEST_VECTORS
2904 },
2905 .dec = {
2906 .vecs = anubis_dec_tv_template,
2907 .count = ANUBIS_DEC_TEST_VECTORS
2908 }
2909 }
2910 }
2911 }, {
2912 .alg = "ecb(arc4)",
2913 .test = alg_test_skcipher,
2914 .suite = {
2915 .cipher = {
2916 .enc = {
2917 .vecs = arc4_enc_tv_template,
2918 .count = ARC4_ENC_TEST_VECTORS
2919 },
2920 .dec = {
2921 .vecs = arc4_dec_tv_template,
2922 .count = ARC4_DEC_TEST_VECTORS
2923 }
2924 }
2925 }
2926 }, {
2927 .alg = "ecb(blowfish)",
2928 .test = alg_test_skcipher,
2929 .suite = {
2930 .cipher = {
2931 .enc = {
2932 .vecs = bf_enc_tv_template,
2933 .count = BF_ENC_TEST_VECTORS
2934 },
2935 .dec = {
2936 .vecs = bf_dec_tv_template,
2937 .count = BF_DEC_TEST_VECTORS
2938 }
2939 }
2940 }
2941 }, {
2942 .alg = "ecb(camellia)",
2943 .test = alg_test_skcipher,
2944 .suite = {
2945 .cipher = {
2946 .enc = {
2947 .vecs = camellia_enc_tv_template,
2948 .count = CAMELLIA_ENC_TEST_VECTORS
2949 },
2950 .dec = {
2951 .vecs = camellia_dec_tv_template,
2952 .count = CAMELLIA_DEC_TEST_VECTORS
2953 }
2954 }
2955 }
2956 }, {
2957 .alg = "ecb(cast5)",
2958 .test = alg_test_skcipher,
2959 .suite = {
2960 .cipher = {
2961 .enc = {
2962 .vecs = cast5_enc_tv_template,
2963 .count = CAST5_ENC_TEST_VECTORS
2964 },
2965 .dec = {
2966 .vecs = cast5_dec_tv_template,
2967 .count = CAST5_DEC_TEST_VECTORS
2968 }
2969 }
2970 }
2971 }, {
2972 .alg = "ecb(cast6)",
2973 .test = alg_test_skcipher,
2974 .suite = {
2975 .cipher = {
2976 .enc = {
2977 .vecs = cast6_enc_tv_template,
2978 .count = CAST6_ENC_TEST_VECTORS
2979 },
2980 .dec = {
2981 .vecs = cast6_dec_tv_template,
2982 .count = CAST6_DEC_TEST_VECTORS
2983 }
2984 }
2985 }
2986 }, {
2987 .alg = "ecb(cipher_null)",
2988 .test = alg_test_null,
2989 }, {
2990 .alg = "ecb(des)",
2991 .test = alg_test_skcipher,
2992 .suite = {
2993 .cipher = {
2994 .enc = {
2995 .vecs = des_enc_tv_template,
2996 .count = DES_ENC_TEST_VECTORS
2997 },
2998 .dec = {
2999 .vecs = des_dec_tv_template,
3000 .count = DES_DEC_TEST_VECTORS
3001 }
3002 }
3003 }
3004 }, {
3005 .alg = "ecb(des3_ede)",
3006 .test = alg_test_skcipher,
3007 .fips_allowed = 1,
3008 .suite = {
3009 .cipher = {
3010 .enc = {
3011 .vecs = des3_ede_enc_tv_template,
3012 .count = DES3_EDE_ENC_TEST_VECTORS
3013 },
3014 .dec = {
3015 .vecs = des3_ede_dec_tv_template,
3016 .count = DES3_EDE_DEC_TEST_VECTORS
3017 }
3018 }
3019 }
3020 }, {
3021 .alg = "ecb(fcrypt)",
3022 .test = alg_test_skcipher,
3023 .suite = {
3024 .cipher = {
3025 .enc = {
3026 .vecs = fcrypt_pcbc_enc_tv_template,
3027 .count = 1
3028 },
3029 .dec = {
3030 .vecs = fcrypt_pcbc_dec_tv_template,
3031 .count = 1
3032 }
3033 }
3034 }
3035 }, {
3036 .alg = "ecb(khazad)",
3037 .test = alg_test_skcipher,
3038 .suite = {
3039 .cipher = {
3040 .enc = {
3041 .vecs = khazad_enc_tv_template,
3042 .count = KHAZAD_ENC_TEST_VECTORS
3043 },
3044 .dec = {
3045 .vecs = khazad_dec_tv_template,
3046 .count = KHAZAD_DEC_TEST_VECTORS
3047 }
3048 }
3049 }
3050 }, {
3051 .alg = "ecb(seed)",
3052 .test = alg_test_skcipher,
3053 .suite = {
3054 .cipher = {
3055 .enc = {
3056 .vecs = seed_enc_tv_template,
3057 .count = SEED_ENC_TEST_VECTORS
3058 },
3059 .dec = {
3060 .vecs = seed_dec_tv_template,
3061 .count = SEED_DEC_TEST_VECTORS
3062 }
3063 }
3064 }
3065 }, {
3066 .alg = "ecb(serpent)",
3067 .test = alg_test_skcipher,
3068 .suite = {
3069 .cipher = {
3070 .enc = {
3071 .vecs = serpent_enc_tv_template,
3072 .count = SERPENT_ENC_TEST_VECTORS
3073 },
3074 .dec = {
3075 .vecs = serpent_dec_tv_template,
3076 .count = SERPENT_DEC_TEST_VECTORS
3077 }
3078 }
3079 }
3080 }, {
3081 .alg = "ecb(tea)",
3082 .test = alg_test_skcipher,
3083 .suite = {
3084 .cipher = {
3085 .enc = {
3086 .vecs = tea_enc_tv_template,
3087 .count = TEA_ENC_TEST_VECTORS
3088 },
3089 .dec = {
3090 .vecs = tea_dec_tv_template,
3091 .count = TEA_DEC_TEST_VECTORS
3092 }
3093 }
3094 }
3095 }, {
3096 .alg = "ecb(tnepres)",
3097 .test = alg_test_skcipher,
3098 .suite = {
3099 .cipher = {
3100 .enc = {
3101 .vecs = tnepres_enc_tv_template,
3102 .count = TNEPRES_ENC_TEST_VECTORS
3103 },
3104 .dec = {
3105 .vecs = tnepres_dec_tv_template,
3106 .count = TNEPRES_DEC_TEST_VECTORS
3107 }
3108 }
3109 }
3110 }, {
3111 .alg = "ecb(twofish)",
3112 .test = alg_test_skcipher,
3113 .suite = {
3114 .cipher = {
3115 .enc = {
3116 .vecs = tf_enc_tv_template,
3117 .count = TF_ENC_TEST_VECTORS
3118 },
3119 .dec = {
3120 .vecs = tf_dec_tv_template,
3121 .count = TF_DEC_TEST_VECTORS
3122 }
3123 }
3124 }
3125 }, {
3126 .alg = "ecb(xeta)",
3127 .test = alg_test_skcipher,
3128 .suite = {
3129 .cipher = {
3130 .enc = {
3131 .vecs = xeta_enc_tv_template,
3132 .count = XETA_ENC_TEST_VECTORS
3133 },
3134 .dec = {
3135 .vecs = xeta_dec_tv_template,
3136 .count = XETA_DEC_TEST_VECTORS
3137 }
3138 }
3139 }
3140 }, {
3141 .alg = "ecb(xtea)",
3142 .test = alg_test_skcipher,
3143 .suite = {
3144 .cipher = {
3145 .enc = {
3146 .vecs = xtea_enc_tv_template,
3147 .count = XTEA_ENC_TEST_VECTORS
3148 },
3149 .dec = {
3150 .vecs = xtea_dec_tv_template,
3151 .count = XTEA_DEC_TEST_VECTORS
3152 }
3153 }
3154 }
3155 }, {
3156 .alg = "gcm(aes)",
3157 .test = alg_test_aead,
3158 .fips_allowed = 1,
3159 .suite = {
3160 .aead = {
3161 .enc = {
3162 .vecs = aes_gcm_enc_tv_template,
3163 .count = AES_GCM_ENC_TEST_VECTORS
3164 },
3165 .dec = {
3166 .vecs = aes_gcm_dec_tv_template,
3167 .count = AES_GCM_DEC_TEST_VECTORS
3168 }
3169 }
3170 }
3171 }, {
3172 .alg = "ghash",
3173 .test = alg_test_hash,
3174 .fips_allowed = 1,
3175 .suite = {
3176 .hash = {
3177 .vecs = ghash_tv_template,
3178 .count = GHASH_TEST_VECTORS
3179 }
3180 }
3181 }, {
3182 .alg = "hmac(crc32)",
3183 .test = alg_test_hash,
3184 .suite = {
3185 .hash = {
3186 .vecs = bfin_crc_tv_template,
3187 .count = BFIN_CRC_TEST_VECTORS
3188 }
3189 }
3190 }, {
3191 .alg = "hmac(md5)",
3192 .test = alg_test_hash,
3193 .suite = {
3194 .hash = {
3195 .vecs = hmac_md5_tv_template,
3196 .count = HMAC_MD5_TEST_VECTORS
3197 }
3198 }
3199 }, {
3200 .alg = "hmac(rmd128)",
3201 .test = alg_test_hash,
3202 .suite = {
3203 .hash = {
3204 .vecs = hmac_rmd128_tv_template,
3205 .count = HMAC_RMD128_TEST_VECTORS
3206 }
3207 }
3208 }, {
3209 .alg = "hmac(rmd160)",
3210 .test = alg_test_hash,
3211 .suite = {
3212 .hash = {
3213 .vecs = hmac_rmd160_tv_template,
3214 .count = HMAC_RMD160_TEST_VECTORS
3215 }
3216 }
3217 }, {
3218 .alg = "hmac(sha1)",
3219 .test = alg_test_hash,
3220 .fips_allowed = 1,
3221 .suite = {
3222 .hash = {
3223 .vecs = hmac_sha1_tv_template,
3224 .count = HMAC_SHA1_TEST_VECTORS
3225 }
3226 }
3227 }, {
3228 .alg = "hmac(sha224)",
3229 .test = alg_test_hash,
3230 .fips_allowed = 1,
3231 .suite = {
3232 .hash = {
3233 .vecs = hmac_sha224_tv_template,
3234 .count = HMAC_SHA224_TEST_VECTORS
3235 }
3236 }
3237 }, {
3238 .alg = "hmac(sha256)",
3239 .test = alg_test_hash,
3240 .fips_allowed = 1,
3241 .suite = {
3242 .hash = {
3243 .vecs = hmac_sha256_tv_template,
3244 .count = HMAC_SHA256_TEST_VECTORS
3245 }
3246 }
3247 }, {
3248 .alg = "hmac(sha384)",
3249 .test = alg_test_hash,
3250 .fips_allowed = 1,
3251 .suite = {
3252 .hash = {
3253 .vecs = hmac_sha384_tv_template,
3254 .count = HMAC_SHA384_TEST_VECTORS
3255 }
3256 }
3257 }, {
3258 .alg = "hmac(sha512)",
3259 .test = alg_test_hash,
3260 .fips_allowed = 1,
3261 .suite = {
3262 .hash = {
3263 .vecs = hmac_sha512_tv_template,
3264 .count = HMAC_SHA512_TEST_VECTORS
3265 }
3266 }
3267 }, {
3268 .alg = "jitterentropy_rng",
3269 .fips_allowed = 1,
3270 .test = alg_test_null,
3271 }, {
3272 .alg = "kw(aes)",
3273 .test = alg_test_skcipher,
3274 .fips_allowed = 1,
3275 .suite = {
3276 .cipher = {
3277 .enc = {
3278 .vecs = aes_kw_enc_tv_template,
3279 .count = ARRAY_SIZE(aes_kw_enc_tv_template)
3280 },
3281 .dec = {
3282 .vecs = aes_kw_dec_tv_template,
3283 .count = ARRAY_SIZE(aes_kw_dec_tv_template)
3284 }
3285 }
3286 }
3287 }, {
3288 .alg = "lrw(aes)",
3289 .test = alg_test_skcipher,
3290 .suite = {
3291 .cipher = {
3292 .enc = {
3293 .vecs = aes_lrw_enc_tv_template,
3294 .count = AES_LRW_ENC_TEST_VECTORS
3295 },
3296 .dec = {
3297 .vecs = aes_lrw_dec_tv_template,
3298 .count = AES_LRW_DEC_TEST_VECTORS
3299 }
3300 }
3301 }
3302 }, {
3303 .alg = "lrw(camellia)",
3304 .test = alg_test_skcipher,
3305 .suite = {
3306 .cipher = {
3307 .enc = {
3308 .vecs = camellia_lrw_enc_tv_template,
3309 .count = CAMELLIA_LRW_ENC_TEST_VECTORS
3310 },
3311 .dec = {
3312 .vecs = camellia_lrw_dec_tv_template,
3313 .count = CAMELLIA_LRW_DEC_TEST_VECTORS
3314 }
3315 }
3316 }
3317 }, {
3318 .alg = "lrw(cast6)",
3319 .test = alg_test_skcipher,
3320 .suite = {
3321 .cipher = {
3322 .enc = {
3323 .vecs = cast6_lrw_enc_tv_template,
3324 .count = CAST6_LRW_ENC_TEST_VECTORS
3325 },
3326 .dec = {
3327 .vecs = cast6_lrw_dec_tv_template,
3328 .count = CAST6_LRW_DEC_TEST_VECTORS
3329 }
3330 }
3331 }
3332 }, {
3333 .alg = "lrw(serpent)",
3334 .test = alg_test_skcipher,
3335 .suite = {
3336 .cipher = {
3337 .enc = {
3338 .vecs = serpent_lrw_enc_tv_template,
3339 .count = SERPENT_LRW_ENC_TEST_VECTORS
3340 },
3341 .dec = {
3342 .vecs = serpent_lrw_dec_tv_template,
3343 .count = SERPENT_LRW_DEC_TEST_VECTORS
3344 }
3345 }
3346 }
3347 }, {
3348 .alg = "lrw(twofish)",
3349 .test = alg_test_skcipher,
3350 .suite = {
3351 .cipher = {
3352 .enc = {
3353 .vecs = tf_lrw_enc_tv_template,
3354 .count = TF_LRW_ENC_TEST_VECTORS
3355 },
3356 .dec = {
3357 .vecs = tf_lrw_dec_tv_template,
3358 .count = TF_LRW_DEC_TEST_VECTORS
3359 }
3360 }
3361 }
3362 }, {
3363 .alg = "lz4",
3364 .test = alg_test_comp,
3365 .fips_allowed = 1,
3366 .suite = {
3367 .comp = {
3368 .comp = {
3369 .vecs = lz4_comp_tv_template,
3370 .count = LZ4_COMP_TEST_VECTORS
3371 },
3372 .decomp = {
3373 .vecs = lz4_decomp_tv_template,
3374 .count = LZ4_DECOMP_TEST_VECTORS
3375 }
3376 }
3377 }
3378 }, {
3379 .alg = "lz4hc",
3380 .test = alg_test_comp,
3381 .fips_allowed = 1,
3382 .suite = {
3383 .comp = {
3384 .comp = {
3385 .vecs = lz4hc_comp_tv_template,
3386 .count = LZ4HC_COMP_TEST_VECTORS
3387 },
3388 .decomp = {
3389 .vecs = lz4hc_decomp_tv_template,
3390 .count = LZ4HC_DECOMP_TEST_VECTORS
3391 }
3392 }
3393 }
3394 }, {
3395 .alg = "lzo",
3396 .test = alg_test_comp,
3397 .fips_allowed = 1,
3398 .suite = {
3399 .comp = {
3400 .comp = {
3401 .vecs = lzo_comp_tv_template,
3402 .count = LZO_COMP_TEST_VECTORS
3403 },
3404 .decomp = {
3405 .vecs = lzo_decomp_tv_template,
3406 .count = LZO_DECOMP_TEST_VECTORS
3407 }
3408 }
3409 }
3410 }, {
3411 .alg = "md4",
3412 .test = alg_test_hash,
3413 .suite = {
3414 .hash = {
3415 .vecs = md4_tv_template,
3416 .count = MD4_TEST_VECTORS
3417 }
3418 }
3419 }, {
3420 .alg = "md5",
3421 .test = alg_test_hash,
3422 .suite = {
3423 .hash = {
3424 .vecs = md5_tv_template,
3425 .count = MD5_TEST_VECTORS
3426 }
3427 }
3428 }, {
3429 .alg = "michael_mic",
3430 .test = alg_test_hash,
3431 .suite = {
3432 .hash = {
3433 .vecs = michael_mic_tv_template,
3434 .count = MICHAEL_MIC_TEST_VECTORS
3435 }
3436 }
3437 }, {
3438 .alg = "ofb(aes)",
3439 .test = alg_test_skcipher,
3440 .fips_allowed = 1,
3441 .suite = {
3442 .cipher = {
3443 .enc = {
3444 .vecs = aes_ofb_enc_tv_template,
3445 .count = AES_OFB_ENC_TEST_VECTORS
3446 },
3447 .dec = {
3448 .vecs = aes_ofb_dec_tv_template,
3449 .count = AES_OFB_DEC_TEST_VECTORS
3450 }
3451 }
3452 }
3453 }, {
3454 .alg = "pcbc(fcrypt)",
3455 .test = alg_test_skcipher,
3456 .suite = {
3457 .cipher = {
3458 .enc = {
3459 .vecs = fcrypt_pcbc_enc_tv_template,
3460 .count = FCRYPT_ENC_TEST_VECTORS
3461 },
3462 .dec = {
3463 .vecs = fcrypt_pcbc_dec_tv_template,
3464 .count = FCRYPT_DEC_TEST_VECTORS
3465 }
3466 }
3467 }
3468 }, {
3469 .alg = "poly1305",
3470 .test = alg_test_hash,
3471 .suite = {
3472 .hash = {
3473 .vecs = poly1305_tv_template,
3474 .count = POLY1305_TEST_VECTORS
3475 }
3476 }
3477 }, {
3478 .alg = "rfc3686(ctr(aes))",
3479 .test = alg_test_skcipher,
3480 .fips_allowed = 1,
3481 .suite = {
3482 .cipher = {
3483 .enc = {
3484 .vecs = aes_ctr_rfc3686_enc_tv_template,
3485 .count = AES_CTR_3686_ENC_TEST_VECTORS
3486 },
3487 .dec = {
3488 .vecs = aes_ctr_rfc3686_dec_tv_template,
3489 .count = AES_CTR_3686_DEC_TEST_VECTORS
3490 }
3491 }
3492 }
3493 }, {
3494 .alg = "rfc4106(gcm(aes))",
3495 .test = alg_test_aead,
3496 .fips_allowed = 1,
3497 .suite = {
3498 .aead = {
3499 .enc = {
3500 .vecs = aes_gcm_rfc4106_enc_tv_template,
3501 .count = AES_GCM_4106_ENC_TEST_VECTORS
3502 },
3503 .dec = {
3504 .vecs = aes_gcm_rfc4106_dec_tv_template,
3505 .count = AES_GCM_4106_DEC_TEST_VECTORS
3506 }
3507 }
3508 }
3509 }, {
3510 .alg = "rfc4309(ccm(aes))",
3511 .test = alg_test_aead,
3512 .fips_allowed = 1,
3513 .suite = {
3514 .aead = {
3515 .enc = {
3516 .vecs = aes_ccm_rfc4309_enc_tv_template,
3517 .count = AES_CCM_4309_ENC_TEST_VECTORS
3518 },
3519 .dec = {
3520 .vecs = aes_ccm_rfc4309_dec_tv_template,
3521 .count = AES_CCM_4309_DEC_TEST_VECTORS
3522 }
3523 }
3524 }
3525 }, {
3526 .alg = "rfc4543(gcm(aes))",
3527 .test = alg_test_aead,
3528 .suite = {
3529 .aead = {
3530 .enc = {
3531 .vecs = aes_gcm_rfc4543_enc_tv_template,
3532 .count = AES_GCM_4543_ENC_TEST_VECTORS
3533 },
3534 .dec = {
3535 .vecs = aes_gcm_rfc4543_dec_tv_template,
3536 .count = AES_GCM_4543_DEC_TEST_VECTORS
3537 },
3538 }
3539 }
3540 }, {
3541 .alg = "rfc7539(chacha20,poly1305)",
3542 .test = alg_test_aead,
3543 .suite = {
3544 .aead = {
3545 .enc = {
3546 .vecs = rfc7539_enc_tv_template,
3547 .count = RFC7539_ENC_TEST_VECTORS
3548 },
3549 .dec = {
3550 .vecs = rfc7539_dec_tv_template,
3551 .count = RFC7539_DEC_TEST_VECTORS
3552 },
3553 }
3554 }
3555 }, {
3556 .alg = "rfc7539esp(chacha20,poly1305)",
3557 .test = alg_test_aead,
3558 .suite = {
3559 .aead = {
3560 .enc = {
3561 .vecs = rfc7539esp_enc_tv_template,
3562 .count = RFC7539ESP_ENC_TEST_VECTORS
3563 },
3564 .dec = {
3565 .vecs = rfc7539esp_dec_tv_template,
3566 .count = RFC7539ESP_DEC_TEST_VECTORS
3567 },
3568 }
3569 }
3570 }, {
3571 .alg = "rmd128",
3572 .test = alg_test_hash,
3573 .suite = {
3574 .hash = {
3575 .vecs = rmd128_tv_template,
3576 .count = RMD128_TEST_VECTORS
3577 }
3578 }
3579 }, {
3580 .alg = "rmd160",
3581 .test = alg_test_hash,
3582 .suite = {
3583 .hash = {
3584 .vecs = rmd160_tv_template,
3585 .count = RMD160_TEST_VECTORS
3586 }
3587 }
3588 }, {
3589 .alg = "rmd256",
3590 .test = alg_test_hash,
3591 .suite = {
3592 .hash = {
3593 .vecs = rmd256_tv_template,
3594 .count = RMD256_TEST_VECTORS
3595 }
3596 }
3597 }, {
3598 .alg = "rmd320",
3599 .test = alg_test_hash,
3600 .suite = {
3601 .hash = {
3602 .vecs = rmd320_tv_template,
3603 .count = RMD320_TEST_VECTORS
3604 }
3605 }
3606 }, {
3607 .alg = "rsa",
3608 .test = alg_test_akcipher,
3609 .fips_allowed = 1,
3610 .suite = {
3611 .akcipher = {
3612 .vecs = rsa_tv_template,
3613 .count = RSA_TEST_VECTORS
3614 }
3615 }
3616 }, {
3617 .alg = "salsa20",
3618 .test = alg_test_skcipher,
3619 .suite = {
3620 .cipher = {
3621 .enc = {
3622 .vecs = salsa20_stream_enc_tv_template,
3623 .count = SALSA20_STREAM_ENC_TEST_VECTORS
3624 }
3625 }
3626 }
3627 }, {
3628 .alg = "sha1",
3629 .test = alg_test_hash,
3630 .fips_allowed = 1,
3631 .suite = {
3632 .hash = {
3633 .vecs = sha1_tv_template,
3634 .count = SHA1_TEST_VECTORS
3635 }
3636 }
3637 }, {
3638 .alg = "sha224",
3639 .test = alg_test_hash,
3640 .fips_allowed = 1,
3641 .suite = {
3642 .hash = {
3643 .vecs = sha224_tv_template,
3644 .count = SHA224_TEST_VECTORS
3645 }
3646 }
3647 }, {
3648 .alg = "sha256",
3649 .test = alg_test_hash,
3650 .fips_allowed = 1,
3651 .suite = {
3652 .hash = {
3653 .vecs = sha256_tv_template,
3654 .count = SHA256_TEST_VECTORS
3655 }
3656 }
3657 }, {
3658 .alg = "sha384",
3659 .test = alg_test_hash,
3660 .fips_allowed = 1,
3661 .suite = {
3662 .hash = {
3663 .vecs = sha384_tv_template,
3664 .count = SHA384_TEST_VECTORS
3665 }
3666 }
3667 }, {
3668 .alg = "sha512",
3669 .test = alg_test_hash,
3670 .fips_allowed = 1,
3671 .suite = {
3672 .hash = {
3673 .vecs = sha512_tv_template,
3674 .count = SHA512_TEST_VECTORS
3675 }
3676 }
3677 }, {
3678 .alg = "tgr128",
3679 .test = alg_test_hash,
3680 .suite = {
3681 .hash = {
3682 .vecs = tgr128_tv_template,
3683 .count = TGR128_TEST_VECTORS
3684 }
3685 }
3686 }, {
3687 .alg = "tgr160",
3688 .test = alg_test_hash,
3689 .suite = {
3690 .hash = {
3691 .vecs = tgr160_tv_template,
3692 .count = TGR160_TEST_VECTORS
3693 }
3694 }
3695 }, {
3696 .alg = "tgr192",
3697 .test = alg_test_hash,
3698 .suite = {
3699 .hash = {
3700 .vecs = tgr192_tv_template,
3701 .count = TGR192_TEST_VECTORS
3702 }
3703 }
3704 }, {
3705 .alg = "vmac(aes)",
3706 .test = alg_test_hash,
3707 .suite = {
3708 .hash = {
3709 .vecs = aes_vmac128_tv_template,
3710 .count = VMAC_AES_TEST_VECTORS
3711 }
3712 }
3713 }, {
3714 .alg = "wp256",
3715 .test = alg_test_hash,
3716 .suite = {
3717 .hash = {
3718 .vecs = wp256_tv_template,
3719 .count = WP256_TEST_VECTORS
3720 }
3721 }
3722 }, {
3723 .alg = "wp384",
3724 .test = alg_test_hash,
3725 .suite = {
3726 .hash = {
3727 .vecs = wp384_tv_template,
3728 .count = WP384_TEST_VECTORS
3729 }
3730 }
3731 }, {
3732 .alg = "wp512",
3733 .test = alg_test_hash,
3734 .suite = {
3735 .hash = {
3736 .vecs = wp512_tv_template,
3737 .count = WP512_TEST_VECTORS
3738 }
3739 }
3740 }, {
3741 .alg = "xcbc(aes)",
3742 .test = alg_test_hash,
3743 .suite = {
3744 .hash = {
3745 .vecs = aes_xcbc128_tv_template,
3746 .count = XCBC_AES_TEST_VECTORS
3747 }
3748 }
3749 }, {
3750 .alg = "xts(aes)",
3751 .test = alg_test_skcipher,
3752 .fips_allowed = 1,
3753 .suite = {
3754 .cipher = {
3755 .enc = {
3756 .vecs = aes_xts_enc_tv_template,
3757 .count = AES_XTS_ENC_TEST_VECTORS
3758 },
3759 .dec = {
3760 .vecs = aes_xts_dec_tv_template,
3761 .count = AES_XTS_DEC_TEST_VECTORS
3762 }
3763 }
3764 }
3765 }, {
3766 .alg = "xts(camellia)",
3767 .test = alg_test_skcipher,
3768 .suite = {
3769 .cipher = {
3770 .enc = {
3771 .vecs = camellia_xts_enc_tv_template,
3772 .count = CAMELLIA_XTS_ENC_TEST_VECTORS
3773 },
3774 .dec = {
3775 .vecs = camellia_xts_dec_tv_template,
3776 .count = CAMELLIA_XTS_DEC_TEST_VECTORS
3777 }
3778 }
3779 }
3780 }, {
3781 .alg = "xts(cast6)",
3782 .test = alg_test_skcipher,
3783 .suite = {
3784 .cipher = {
3785 .enc = {
3786 .vecs = cast6_xts_enc_tv_template,
3787 .count = CAST6_XTS_ENC_TEST_VECTORS
3788 },
3789 .dec = {
3790 .vecs = cast6_xts_dec_tv_template,
3791 .count = CAST6_XTS_DEC_TEST_VECTORS
3792 }
3793 }
3794 }
3795 }, {
3796 .alg = "xts(serpent)",
3797 .test = alg_test_skcipher,
3798 .suite = {
3799 .cipher = {
3800 .enc = {
3801 .vecs = serpent_xts_enc_tv_template,
3802 .count = SERPENT_XTS_ENC_TEST_VECTORS
3803 },
3804 .dec = {
3805 .vecs = serpent_xts_dec_tv_template,
3806 .count = SERPENT_XTS_DEC_TEST_VECTORS
3807 }
3808 }
3809 }
3810 }, {
3811 .alg = "xts(twofish)",
3812 .test = alg_test_skcipher,
3813 .suite = {
3814 .cipher = {
3815 .enc = {
3816 .vecs = tf_xts_enc_tv_template,
3817 .count = TF_XTS_ENC_TEST_VECTORS
3818 },
3819 .dec = {
3820 .vecs = tf_xts_dec_tv_template,
3821 .count = TF_XTS_DEC_TEST_VECTORS
3822 }
3823 }
3824 }
3825 }
3826};
3827
3828static bool alg_test_descs_checked;
3829
3830static void alg_test_descs_check_order(void)
3831{
3832 int i;
3833
3834 /* only check once */
3835 if (alg_test_descs_checked)
3836 return;
3837
3838 alg_test_descs_checked = true;
3839
3840 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
3841 int diff = strcmp(alg_test_descs[i - 1].alg,
3842 alg_test_descs[i].alg);
3843
3844 if (WARN_ON(diff > 0)) {
3845 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
3846 alg_test_descs[i - 1].alg,
3847 alg_test_descs[i].alg);
3848 }
3849
3850 if (WARN_ON(diff == 0)) {
3851 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
3852 alg_test_descs[i].alg);
3853 }
3854 }
3855}
3856
3857static int alg_find_test(const char *alg)
3858{
3859 int start = 0;
3860 int end = ARRAY_SIZE(alg_test_descs);
3861
3862 while (start < end) {
3863 int i = (start + end) / 2;
3864 int diff = strcmp(alg_test_descs[i].alg, alg);
3865
3866 if (diff > 0) {
3867 end = i;
3868 continue;
3869 }
3870
3871 if (diff < 0) {
3872 start = i + 1;
3873 continue;
3874 }
3875
3876 return i;
3877 }
3878
3879 return -1;
3880}
3881
3882int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
3883{
3884 int i;
3885 int j;
3886 int rc;
3887
3888 alg_test_descs_check_order();
3889
3890 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
3891 char nalg[CRYPTO_MAX_ALG_NAME];
3892
3893 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
3894 sizeof(nalg))
3895 return -ENAMETOOLONG;
3896
3897 i = alg_find_test(nalg);
3898 if (i < 0)
3899 goto notest;
3900
3901 if (fips_enabled && !alg_test_descs[i].fips_allowed)
3902 goto non_fips_alg;
3903
3904 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
3905 goto test_done;
3906 }
3907
3908 i = alg_find_test(alg);
3909 j = alg_find_test(driver);
3910 if (i < 0 && j < 0)
3911 goto notest;
3912
3913 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
3914 (j >= 0 && !alg_test_descs[j].fips_allowed)))
3915 goto non_fips_alg;
3916
3917 rc = 0;
3918 if (i >= 0)
3919 rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
3920 type, mask);
3921 if (j >= 0 && j != i)
3922 rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
3923 type, mask);
3924
3925test_done:
3926 if (fips_enabled && rc)
3927 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
3928
3929 if (fips_enabled && !rc)
3930 pr_info("alg: self-tests for %s (%s) passed\n", driver, alg);
3931
3932 return rc;
3933
3934notest:
3935 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
3936 return 0;
3937non_fips_alg:
3938 return -EINVAL;
3939}
3940
3941#endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
3942
3943EXPORT_SYMBOL_GPL(alg_test);
1/*
2 * Algorithm testing framework and tests.
3 *
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
8 *
9 * Updated RFC4106 AES-GCM testing.
10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
15 *
16 * This program is free software; you can redistribute it and/or modify it
17 * under the terms of the GNU General Public License as published by the Free
18 * Software Foundation; either version 2 of the License, or (at your option)
19 * any later version.
20 *
21 */
22
23#include <crypto/aead.h>
24#include <crypto/hash.h>
25#include <crypto/skcipher.h>
26#include <linux/err.h>
27#include <linux/fips.h>
28#include <linux/module.h>
29#include <linux/scatterlist.h>
30#include <linux/slab.h>
31#include <linux/string.h>
32#include <crypto/rng.h>
33#include <crypto/drbg.h>
34#include <crypto/akcipher.h>
35#include <crypto/kpp.h>
36#include <crypto/acompress.h>
37
38#include "internal.h"
39
40static bool notests;
41module_param(notests, bool, 0644);
42MODULE_PARM_DESC(notests, "disable crypto self-tests");
43
44#ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
45
46/* a perfect nop */
47int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
48{
49 return 0;
50}
51
52#else
53
54#include "testmgr.h"
55
56/*
57 * Need slab memory for testing (size in number of pages).
58 */
59#define XBUFSIZE 8
60
61/*
62 * Indexes into the xbuf to simulate cross-page access.
63 */
64#define IDX1 32
65#define IDX2 32400
66#define IDX3 1511
67#define IDX4 8193
68#define IDX5 22222
69#define IDX6 17101
70#define IDX7 27333
71#define IDX8 3000
72
73/*
74* Used by test_cipher()
75*/
76#define ENCRYPT 1
77#define DECRYPT 0
78
79struct tcrypt_result {
80 struct completion completion;
81 int err;
82};
83
84struct aead_test_suite {
85 struct {
86 struct aead_testvec *vecs;
87 unsigned int count;
88 } enc, dec;
89};
90
91struct cipher_test_suite {
92 struct {
93 struct cipher_testvec *vecs;
94 unsigned int count;
95 } enc, dec;
96};
97
98struct comp_test_suite {
99 struct {
100 struct comp_testvec *vecs;
101 unsigned int count;
102 } comp, decomp;
103};
104
105struct hash_test_suite {
106 struct hash_testvec *vecs;
107 unsigned int count;
108};
109
110struct cprng_test_suite {
111 struct cprng_testvec *vecs;
112 unsigned int count;
113};
114
115struct drbg_test_suite {
116 struct drbg_testvec *vecs;
117 unsigned int count;
118};
119
120struct akcipher_test_suite {
121 struct akcipher_testvec *vecs;
122 unsigned int count;
123};
124
125struct kpp_test_suite {
126 struct kpp_testvec *vecs;
127 unsigned int count;
128};
129
130struct alg_test_desc {
131 const char *alg;
132 int (*test)(const struct alg_test_desc *desc, const char *driver,
133 u32 type, u32 mask);
134 int fips_allowed; /* set if alg is allowed in fips mode */
135
136 union {
137 struct aead_test_suite aead;
138 struct cipher_test_suite cipher;
139 struct comp_test_suite comp;
140 struct hash_test_suite hash;
141 struct cprng_test_suite cprng;
142 struct drbg_test_suite drbg;
143 struct akcipher_test_suite akcipher;
144 struct kpp_test_suite kpp;
145 } suite;
146};
147
148static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
149
150static void hexdump(unsigned char *buf, unsigned int len)
151{
152 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
153 16, 1,
154 buf, len, false);
155}
156
157static void tcrypt_complete(struct crypto_async_request *req, int err)
158{
159 struct tcrypt_result *res = req->data;
160
161 if (err == -EINPROGRESS)
162 return;
163
164 res->err = err;
165 complete(&res->completion);
166}
167
168static int testmgr_alloc_buf(char *buf[XBUFSIZE])
169{
170 int i;
171
172 for (i = 0; i < XBUFSIZE; i++) {
173 buf[i] = (void *)__get_free_page(GFP_KERNEL);
174 if (!buf[i])
175 goto err_free_buf;
176 }
177
178 return 0;
179
180err_free_buf:
181 while (i-- > 0)
182 free_page((unsigned long)buf[i]);
183
184 return -ENOMEM;
185}
186
187static void testmgr_free_buf(char *buf[XBUFSIZE])
188{
189 int i;
190
191 for (i = 0; i < XBUFSIZE; i++)
192 free_page((unsigned long)buf[i]);
193}
194
195static int wait_async_op(struct tcrypt_result *tr, int ret)
196{
197 if (ret == -EINPROGRESS || ret == -EBUSY) {
198 wait_for_completion(&tr->completion);
199 reinit_completion(&tr->completion);
200 ret = tr->err;
201 }
202 return ret;
203}
204
205static int ahash_partial_update(struct ahash_request **preq,
206 struct crypto_ahash *tfm, struct hash_testvec *template,
207 void *hash_buff, int k, int temp, struct scatterlist *sg,
208 const char *algo, char *result, struct tcrypt_result *tresult)
209{
210 char *state;
211 struct ahash_request *req;
212 int statesize, ret = -EINVAL;
213 const char guard[] = { 0x00, 0xba, 0xad, 0x00 };
214
215 req = *preq;
216 statesize = crypto_ahash_statesize(
217 crypto_ahash_reqtfm(req));
218 state = kmalloc(statesize + sizeof(guard), GFP_KERNEL);
219 if (!state) {
220 pr_err("alt: hash: Failed to alloc state for %s\n", algo);
221 goto out_nostate;
222 }
223 memcpy(state + statesize, guard, sizeof(guard));
224 ret = crypto_ahash_export(req, state);
225 WARN_ON(memcmp(state + statesize, guard, sizeof(guard)));
226 if (ret) {
227 pr_err("alt: hash: Failed to export() for %s\n", algo);
228 goto out;
229 }
230 ahash_request_free(req);
231 req = ahash_request_alloc(tfm, GFP_KERNEL);
232 if (!req) {
233 pr_err("alg: hash: Failed to alloc request for %s\n", algo);
234 goto out_noreq;
235 }
236 ahash_request_set_callback(req,
237 CRYPTO_TFM_REQ_MAY_BACKLOG,
238 tcrypt_complete, tresult);
239
240 memcpy(hash_buff, template->plaintext + temp,
241 template->tap[k]);
242 sg_init_one(&sg[0], hash_buff, template->tap[k]);
243 ahash_request_set_crypt(req, sg, result, template->tap[k]);
244 ret = crypto_ahash_import(req, state);
245 if (ret) {
246 pr_err("alg: hash: Failed to import() for %s\n", algo);
247 goto out;
248 }
249 ret = wait_async_op(tresult, crypto_ahash_update(req));
250 if (ret)
251 goto out;
252 *preq = req;
253 ret = 0;
254 goto out_noreq;
255out:
256 ahash_request_free(req);
257out_noreq:
258 kfree(state);
259out_nostate:
260 return ret;
261}
262
263static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
264 unsigned int tcount, bool use_digest,
265 const int align_offset)
266{
267 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
268 unsigned int i, j, k, temp;
269 struct scatterlist sg[8];
270 char *result;
271 char *key;
272 struct ahash_request *req;
273 struct tcrypt_result tresult;
274 void *hash_buff;
275 char *xbuf[XBUFSIZE];
276 int ret = -ENOMEM;
277
278 result = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL);
279 if (!result)
280 return ret;
281 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
282 if (!key)
283 goto out_nobuf;
284 if (testmgr_alloc_buf(xbuf))
285 goto out_nobuf;
286
287 init_completion(&tresult.completion);
288
289 req = ahash_request_alloc(tfm, GFP_KERNEL);
290 if (!req) {
291 printk(KERN_ERR "alg: hash: Failed to allocate request for "
292 "%s\n", algo);
293 goto out_noreq;
294 }
295 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
296 tcrypt_complete, &tresult);
297
298 j = 0;
299 for (i = 0; i < tcount; i++) {
300 if (template[i].np)
301 continue;
302
303 ret = -EINVAL;
304 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE))
305 goto out;
306
307 j++;
308 memset(result, 0, MAX_DIGEST_SIZE);
309
310 hash_buff = xbuf[0];
311 hash_buff += align_offset;
312
313 memcpy(hash_buff, template[i].plaintext, template[i].psize);
314 sg_init_one(&sg[0], hash_buff, template[i].psize);
315
316 if (template[i].ksize) {
317 crypto_ahash_clear_flags(tfm, ~0);
318 if (template[i].ksize > MAX_KEYLEN) {
319 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
320 j, algo, template[i].ksize, MAX_KEYLEN);
321 ret = -EINVAL;
322 goto out;
323 }
324 memcpy(key, template[i].key, template[i].ksize);
325 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
326 if (ret) {
327 printk(KERN_ERR "alg: hash: setkey failed on "
328 "test %d for %s: ret=%d\n", j, algo,
329 -ret);
330 goto out;
331 }
332 }
333
334 ahash_request_set_crypt(req, sg, result, template[i].psize);
335 if (use_digest) {
336 ret = wait_async_op(&tresult, crypto_ahash_digest(req));
337 if (ret) {
338 pr_err("alg: hash: digest failed on test %d "
339 "for %s: ret=%d\n", j, algo, -ret);
340 goto out;
341 }
342 } else {
343 ret = wait_async_op(&tresult, crypto_ahash_init(req));
344 if (ret) {
345 pr_err("alt: hash: init failed on test %d "
346 "for %s: ret=%d\n", j, algo, -ret);
347 goto out;
348 }
349 ret = wait_async_op(&tresult, crypto_ahash_update(req));
350 if (ret) {
351 pr_err("alt: hash: update failed on test %d "
352 "for %s: ret=%d\n", j, algo, -ret);
353 goto out;
354 }
355 ret = wait_async_op(&tresult, crypto_ahash_final(req));
356 if (ret) {
357 pr_err("alt: hash: final failed on test %d "
358 "for %s: ret=%d\n", j, algo, -ret);
359 goto out;
360 }
361 }
362
363 if (memcmp(result, template[i].digest,
364 crypto_ahash_digestsize(tfm))) {
365 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
366 j, algo);
367 hexdump(result, crypto_ahash_digestsize(tfm));
368 ret = -EINVAL;
369 goto out;
370 }
371 }
372
373 j = 0;
374 for (i = 0; i < tcount; i++) {
375 /* alignment tests are only done with continuous buffers */
376 if (align_offset != 0)
377 break;
378
379 if (!template[i].np)
380 continue;
381
382 j++;
383 memset(result, 0, MAX_DIGEST_SIZE);
384
385 temp = 0;
386 sg_init_table(sg, template[i].np);
387 ret = -EINVAL;
388 for (k = 0; k < template[i].np; k++) {
389 if (WARN_ON(offset_in_page(IDX[k]) +
390 template[i].tap[k] > PAGE_SIZE))
391 goto out;
392 sg_set_buf(&sg[k],
393 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
394 offset_in_page(IDX[k]),
395 template[i].plaintext + temp,
396 template[i].tap[k]),
397 template[i].tap[k]);
398 temp += template[i].tap[k];
399 }
400
401 if (template[i].ksize) {
402 if (template[i].ksize > MAX_KEYLEN) {
403 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
404 j, algo, template[i].ksize, MAX_KEYLEN);
405 ret = -EINVAL;
406 goto out;
407 }
408 crypto_ahash_clear_flags(tfm, ~0);
409 memcpy(key, template[i].key, template[i].ksize);
410 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
411
412 if (ret) {
413 printk(KERN_ERR "alg: hash: setkey "
414 "failed on chunking test %d "
415 "for %s: ret=%d\n", j, algo, -ret);
416 goto out;
417 }
418 }
419
420 ahash_request_set_crypt(req, sg, result, template[i].psize);
421 ret = crypto_ahash_digest(req);
422 switch (ret) {
423 case 0:
424 break;
425 case -EINPROGRESS:
426 case -EBUSY:
427 wait_for_completion(&tresult.completion);
428 reinit_completion(&tresult.completion);
429 ret = tresult.err;
430 if (!ret)
431 break;
432 /* fall through */
433 default:
434 printk(KERN_ERR "alg: hash: digest failed "
435 "on chunking test %d for %s: "
436 "ret=%d\n", j, algo, -ret);
437 goto out;
438 }
439
440 if (memcmp(result, template[i].digest,
441 crypto_ahash_digestsize(tfm))) {
442 printk(KERN_ERR "alg: hash: Chunking test %d "
443 "failed for %s\n", j, algo);
444 hexdump(result, crypto_ahash_digestsize(tfm));
445 ret = -EINVAL;
446 goto out;
447 }
448 }
449
450 /* partial update exercise */
451 j = 0;
452 for (i = 0; i < tcount; i++) {
453 /* alignment tests are only done with continuous buffers */
454 if (align_offset != 0)
455 break;
456
457 if (template[i].np < 2)
458 continue;
459
460 j++;
461 memset(result, 0, MAX_DIGEST_SIZE);
462
463 ret = -EINVAL;
464 hash_buff = xbuf[0];
465 memcpy(hash_buff, template[i].plaintext,
466 template[i].tap[0]);
467 sg_init_one(&sg[0], hash_buff, template[i].tap[0]);
468
469 if (template[i].ksize) {
470 crypto_ahash_clear_flags(tfm, ~0);
471 if (template[i].ksize > MAX_KEYLEN) {
472 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
473 j, algo, template[i].ksize, MAX_KEYLEN);
474 ret = -EINVAL;
475 goto out;
476 }
477 memcpy(key, template[i].key, template[i].ksize);
478 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
479 if (ret) {
480 pr_err("alg: hash: setkey failed on test %d for %s: ret=%d\n",
481 j, algo, -ret);
482 goto out;
483 }
484 }
485
486 ahash_request_set_crypt(req, sg, result, template[i].tap[0]);
487 ret = wait_async_op(&tresult, crypto_ahash_init(req));
488 if (ret) {
489 pr_err("alt: hash: init failed on test %d for %s: ret=%d\n",
490 j, algo, -ret);
491 goto out;
492 }
493 ret = wait_async_op(&tresult, crypto_ahash_update(req));
494 if (ret) {
495 pr_err("alt: hash: update failed on test %d for %s: ret=%d\n",
496 j, algo, -ret);
497 goto out;
498 }
499
500 temp = template[i].tap[0];
501 for (k = 1; k < template[i].np; k++) {
502 ret = ahash_partial_update(&req, tfm, &template[i],
503 hash_buff, k, temp, &sg[0], algo, result,
504 &tresult);
505 if (ret) {
506 pr_err("hash: partial update failed on test %d for %s: ret=%d\n",
507 j, algo, -ret);
508 goto out_noreq;
509 }
510 temp += template[i].tap[k];
511 }
512 ret = wait_async_op(&tresult, crypto_ahash_final(req));
513 if (ret) {
514 pr_err("alt: hash: final failed on test %d for %s: ret=%d\n",
515 j, algo, -ret);
516 goto out;
517 }
518 if (memcmp(result, template[i].digest,
519 crypto_ahash_digestsize(tfm))) {
520 pr_err("alg: hash: Partial Test %d failed for %s\n",
521 j, algo);
522 hexdump(result, crypto_ahash_digestsize(tfm));
523 ret = -EINVAL;
524 goto out;
525 }
526 }
527
528 ret = 0;
529
530out:
531 ahash_request_free(req);
532out_noreq:
533 testmgr_free_buf(xbuf);
534out_nobuf:
535 kfree(key);
536 kfree(result);
537 return ret;
538}
539
540static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
541 unsigned int tcount, bool use_digest)
542{
543 unsigned int alignmask;
544 int ret;
545
546 ret = __test_hash(tfm, template, tcount, use_digest, 0);
547 if (ret)
548 return ret;
549
550 /* test unaligned buffers, check with one byte offset */
551 ret = __test_hash(tfm, template, tcount, use_digest, 1);
552 if (ret)
553 return ret;
554
555 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
556 if (alignmask) {
557 /* Check if alignment mask for tfm is correctly set. */
558 ret = __test_hash(tfm, template, tcount, use_digest,
559 alignmask + 1);
560 if (ret)
561 return ret;
562 }
563
564 return 0;
565}
566
567static int __test_aead(struct crypto_aead *tfm, int enc,
568 struct aead_testvec *template, unsigned int tcount,
569 const bool diff_dst, const int align_offset)
570{
571 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
572 unsigned int i, j, k, n, temp;
573 int ret = -ENOMEM;
574 char *q;
575 char *key;
576 struct aead_request *req;
577 struct scatterlist *sg;
578 struct scatterlist *sgout;
579 const char *e, *d;
580 struct tcrypt_result result;
581 unsigned int authsize, iv_len;
582 void *input;
583 void *output;
584 void *assoc;
585 char *iv;
586 char *xbuf[XBUFSIZE];
587 char *xoutbuf[XBUFSIZE];
588 char *axbuf[XBUFSIZE];
589
590 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
591 if (!iv)
592 return ret;
593 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
594 if (!key)
595 goto out_noxbuf;
596 if (testmgr_alloc_buf(xbuf))
597 goto out_noxbuf;
598 if (testmgr_alloc_buf(axbuf))
599 goto out_noaxbuf;
600 if (diff_dst && testmgr_alloc_buf(xoutbuf))
601 goto out_nooutbuf;
602
603 /* avoid "the frame size is larger than 1024 bytes" compiler warning */
604 sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 4 : 2), GFP_KERNEL);
605 if (!sg)
606 goto out_nosg;
607 sgout = &sg[16];
608
609 if (diff_dst)
610 d = "-ddst";
611 else
612 d = "";
613
614 if (enc == ENCRYPT)
615 e = "encryption";
616 else
617 e = "decryption";
618
619 init_completion(&result.completion);
620
621 req = aead_request_alloc(tfm, GFP_KERNEL);
622 if (!req) {
623 pr_err("alg: aead%s: Failed to allocate request for %s\n",
624 d, algo);
625 goto out;
626 }
627
628 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
629 tcrypt_complete, &result);
630
631 iv_len = crypto_aead_ivsize(tfm);
632
633 for (i = 0, j = 0; i < tcount; i++) {
634 if (template[i].np)
635 continue;
636
637 j++;
638
639 /* some templates have no input data but they will
640 * touch input
641 */
642 input = xbuf[0];
643 input += align_offset;
644 assoc = axbuf[0];
645
646 ret = -EINVAL;
647 if (WARN_ON(align_offset + template[i].ilen >
648 PAGE_SIZE || template[i].alen > PAGE_SIZE))
649 goto out;
650
651 memcpy(input, template[i].input, template[i].ilen);
652 memcpy(assoc, template[i].assoc, template[i].alen);
653 if (template[i].iv)
654 memcpy(iv, template[i].iv, iv_len);
655 else
656 memset(iv, 0, iv_len);
657
658 crypto_aead_clear_flags(tfm, ~0);
659 if (template[i].wk)
660 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
661
662 if (template[i].klen > MAX_KEYLEN) {
663 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
664 d, j, algo, template[i].klen,
665 MAX_KEYLEN);
666 ret = -EINVAL;
667 goto out;
668 }
669 memcpy(key, template[i].key, template[i].klen);
670
671 ret = crypto_aead_setkey(tfm, key, template[i].klen);
672 if (template[i].fail == !ret) {
673 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
674 d, j, algo, crypto_aead_get_flags(tfm));
675 goto out;
676 } else if (ret)
677 continue;
678
679 authsize = abs(template[i].rlen - template[i].ilen);
680 ret = crypto_aead_setauthsize(tfm, authsize);
681 if (ret) {
682 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
683 d, authsize, j, algo);
684 goto out;
685 }
686
687 k = !!template[i].alen;
688 sg_init_table(sg, k + 1);
689 sg_set_buf(&sg[0], assoc, template[i].alen);
690 sg_set_buf(&sg[k], input,
691 template[i].ilen + (enc ? authsize : 0));
692 output = input;
693
694 if (diff_dst) {
695 sg_init_table(sgout, k + 1);
696 sg_set_buf(&sgout[0], assoc, template[i].alen);
697
698 output = xoutbuf[0];
699 output += align_offset;
700 sg_set_buf(&sgout[k], output,
701 template[i].rlen + (enc ? 0 : authsize));
702 }
703
704 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
705 template[i].ilen, iv);
706
707 aead_request_set_ad(req, template[i].alen);
708
709 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
710
711 switch (ret) {
712 case 0:
713 if (template[i].novrfy) {
714 /* verification was supposed to fail */
715 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
716 d, e, j, algo);
717 /* so really, we got a bad message */
718 ret = -EBADMSG;
719 goto out;
720 }
721 break;
722 case -EINPROGRESS:
723 case -EBUSY:
724 wait_for_completion(&result.completion);
725 reinit_completion(&result.completion);
726 ret = result.err;
727 if (!ret)
728 break;
729 case -EBADMSG:
730 if (template[i].novrfy)
731 /* verification failure was expected */
732 continue;
733 /* fall through */
734 default:
735 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
736 d, e, j, algo, -ret);
737 goto out;
738 }
739
740 q = output;
741 if (memcmp(q, template[i].result, template[i].rlen)) {
742 pr_err("alg: aead%s: Test %d failed on %s for %s\n",
743 d, j, e, algo);
744 hexdump(q, template[i].rlen);
745 ret = -EINVAL;
746 goto out;
747 }
748 }
749
750 for (i = 0, j = 0; i < tcount; i++) {
751 /* alignment tests are only done with continuous buffers */
752 if (align_offset != 0)
753 break;
754
755 if (!template[i].np)
756 continue;
757
758 j++;
759
760 if (template[i].iv)
761 memcpy(iv, template[i].iv, iv_len);
762 else
763 memset(iv, 0, MAX_IVLEN);
764
765 crypto_aead_clear_flags(tfm, ~0);
766 if (template[i].wk)
767 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
768 if (template[i].klen > MAX_KEYLEN) {
769 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
770 d, j, algo, template[i].klen, MAX_KEYLEN);
771 ret = -EINVAL;
772 goto out;
773 }
774 memcpy(key, template[i].key, template[i].klen);
775
776 ret = crypto_aead_setkey(tfm, key, template[i].klen);
777 if (template[i].fail == !ret) {
778 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
779 d, j, algo, crypto_aead_get_flags(tfm));
780 goto out;
781 } else if (ret)
782 continue;
783
784 authsize = abs(template[i].rlen - template[i].ilen);
785
786 ret = -EINVAL;
787 sg_init_table(sg, template[i].anp + template[i].np);
788 if (diff_dst)
789 sg_init_table(sgout, template[i].anp + template[i].np);
790
791 ret = -EINVAL;
792 for (k = 0, temp = 0; k < template[i].anp; k++) {
793 if (WARN_ON(offset_in_page(IDX[k]) +
794 template[i].atap[k] > PAGE_SIZE))
795 goto out;
796 sg_set_buf(&sg[k],
797 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
798 offset_in_page(IDX[k]),
799 template[i].assoc + temp,
800 template[i].atap[k]),
801 template[i].atap[k]);
802 if (diff_dst)
803 sg_set_buf(&sgout[k],
804 axbuf[IDX[k] >> PAGE_SHIFT] +
805 offset_in_page(IDX[k]),
806 template[i].atap[k]);
807 temp += template[i].atap[k];
808 }
809
810 for (k = 0, temp = 0; k < template[i].np; k++) {
811 if (WARN_ON(offset_in_page(IDX[k]) +
812 template[i].tap[k] > PAGE_SIZE))
813 goto out;
814
815 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
816 memcpy(q, template[i].input + temp, template[i].tap[k]);
817 sg_set_buf(&sg[template[i].anp + k],
818 q, template[i].tap[k]);
819
820 if (diff_dst) {
821 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
822 offset_in_page(IDX[k]);
823
824 memset(q, 0, template[i].tap[k]);
825
826 sg_set_buf(&sgout[template[i].anp + k],
827 q, template[i].tap[k]);
828 }
829
830 n = template[i].tap[k];
831 if (k == template[i].np - 1 && enc)
832 n += authsize;
833 if (offset_in_page(q) + n < PAGE_SIZE)
834 q[n] = 0;
835
836 temp += template[i].tap[k];
837 }
838
839 ret = crypto_aead_setauthsize(tfm, authsize);
840 if (ret) {
841 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
842 d, authsize, j, algo);
843 goto out;
844 }
845
846 if (enc) {
847 if (WARN_ON(sg[template[i].anp + k - 1].offset +
848 sg[template[i].anp + k - 1].length +
849 authsize > PAGE_SIZE)) {
850 ret = -EINVAL;
851 goto out;
852 }
853
854 if (diff_dst)
855 sgout[template[i].anp + k - 1].length +=
856 authsize;
857 sg[template[i].anp + k - 1].length += authsize;
858 }
859
860 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
861 template[i].ilen,
862 iv);
863
864 aead_request_set_ad(req, template[i].alen);
865
866 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
867
868 switch (ret) {
869 case 0:
870 if (template[i].novrfy) {
871 /* verification was supposed to fail */
872 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
873 d, e, j, algo);
874 /* so really, we got a bad message */
875 ret = -EBADMSG;
876 goto out;
877 }
878 break;
879 case -EINPROGRESS:
880 case -EBUSY:
881 wait_for_completion(&result.completion);
882 reinit_completion(&result.completion);
883 ret = result.err;
884 if (!ret)
885 break;
886 case -EBADMSG:
887 if (template[i].novrfy)
888 /* verification failure was expected */
889 continue;
890 /* fall through */
891 default:
892 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
893 d, e, j, algo, -ret);
894 goto out;
895 }
896
897 ret = -EINVAL;
898 for (k = 0, temp = 0; k < template[i].np; k++) {
899 if (diff_dst)
900 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
901 offset_in_page(IDX[k]);
902 else
903 q = xbuf[IDX[k] >> PAGE_SHIFT] +
904 offset_in_page(IDX[k]);
905
906 n = template[i].tap[k];
907 if (k == template[i].np - 1)
908 n += enc ? authsize : -authsize;
909
910 if (memcmp(q, template[i].result + temp, n)) {
911 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
912 d, j, e, k, algo);
913 hexdump(q, n);
914 goto out;
915 }
916
917 q += n;
918 if (k == template[i].np - 1 && !enc) {
919 if (!diff_dst &&
920 memcmp(q, template[i].input +
921 temp + n, authsize))
922 n = authsize;
923 else
924 n = 0;
925 } else {
926 for (n = 0; offset_in_page(q + n) && q[n]; n++)
927 ;
928 }
929 if (n) {
930 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
931 d, j, e, k, algo, n);
932 hexdump(q, n);
933 goto out;
934 }
935
936 temp += template[i].tap[k];
937 }
938 }
939
940 ret = 0;
941
942out:
943 aead_request_free(req);
944 kfree(sg);
945out_nosg:
946 if (diff_dst)
947 testmgr_free_buf(xoutbuf);
948out_nooutbuf:
949 testmgr_free_buf(axbuf);
950out_noaxbuf:
951 testmgr_free_buf(xbuf);
952out_noxbuf:
953 kfree(key);
954 kfree(iv);
955 return ret;
956}
957
958static int test_aead(struct crypto_aead *tfm, int enc,
959 struct aead_testvec *template, unsigned int tcount)
960{
961 unsigned int alignmask;
962 int ret;
963
964 /* test 'dst == src' case */
965 ret = __test_aead(tfm, enc, template, tcount, false, 0);
966 if (ret)
967 return ret;
968
969 /* test 'dst != src' case */
970 ret = __test_aead(tfm, enc, template, tcount, true, 0);
971 if (ret)
972 return ret;
973
974 /* test unaligned buffers, check with one byte offset */
975 ret = __test_aead(tfm, enc, template, tcount, true, 1);
976 if (ret)
977 return ret;
978
979 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
980 if (alignmask) {
981 /* Check if alignment mask for tfm is correctly set. */
982 ret = __test_aead(tfm, enc, template, tcount, true,
983 alignmask + 1);
984 if (ret)
985 return ret;
986 }
987
988 return 0;
989}
990
991static int test_cipher(struct crypto_cipher *tfm, int enc,
992 struct cipher_testvec *template, unsigned int tcount)
993{
994 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
995 unsigned int i, j, k;
996 char *q;
997 const char *e;
998 void *data;
999 char *xbuf[XBUFSIZE];
1000 int ret = -ENOMEM;
1001
1002 if (testmgr_alloc_buf(xbuf))
1003 goto out_nobuf;
1004
1005 if (enc == ENCRYPT)
1006 e = "encryption";
1007 else
1008 e = "decryption";
1009
1010 j = 0;
1011 for (i = 0; i < tcount; i++) {
1012 if (template[i].np)
1013 continue;
1014
1015 if (fips_enabled && template[i].fips_skip)
1016 continue;
1017
1018 j++;
1019
1020 ret = -EINVAL;
1021 if (WARN_ON(template[i].ilen > PAGE_SIZE))
1022 goto out;
1023
1024 data = xbuf[0];
1025 memcpy(data, template[i].input, template[i].ilen);
1026
1027 crypto_cipher_clear_flags(tfm, ~0);
1028 if (template[i].wk)
1029 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
1030
1031 ret = crypto_cipher_setkey(tfm, template[i].key,
1032 template[i].klen);
1033 if (template[i].fail == !ret) {
1034 printk(KERN_ERR "alg: cipher: setkey failed "
1035 "on test %d for %s: flags=%x\n", j,
1036 algo, crypto_cipher_get_flags(tfm));
1037 goto out;
1038 } else if (ret)
1039 continue;
1040
1041 for (k = 0; k < template[i].ilen;
1042 k += crypto_cipher_blocksize(tfm)) {
1043 if (enc)
1044 crypto_cipher_encrypt_one(tfm, data + k,
1045 data + k);
1046 else
1047 crypto_cipher_decrypt_one(tfm, data + k,
1048 data + k);
1049 }
1050
1051 q = data;
1052 if (memcmp(q, template[i].result, template[i].rlen)) {
1053 printk(KERN_ERR "alg: cipher: Test %d failed "
1054 "on %s for %s\n", j, e, algo);
1055 hexdump(q, template[i].rlen);
1056 ret = -EINVAL;
1057 goto out;
1058 }
1059 }
1060
1061 ret = 0;
1062
1063out:
1064 testmgr_free_buf(xbuf);
1065out_nobuf:
1066 return ret;
1067}
1068
1069static int __test_skcipher(struct crypto_skcipher *tfm, int enc,
1070 struct cipher_testvec *template, unsigned int tcount,
1071 const bool diff_dst, const int align_offset)
1072{
1073 const char *algo =
1074 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
1075 unsigned int i, j, k, n, temp;
1076 char *q;
1077 struct skcipher_request *req;
1078 struct scatterlist sg[8];
1079 struct scatterlist sgout[8];
1080 const char *e, *d;
1081 struct tcrypt_result result;
1082 void *data;
1083 char iv[MAX_IVLEN];
1084 char *xbuf[XBUFSIZE];
1085 char *xoutbuf[XBUFSIZE];
1086 int ret = -ENOMEM;
1087 unsigned int ivsize = crypto_skcipher_ivsize(tfm);
1088
1089 if (testmgr_alloc_buf(xbuf))
1090 goto out_nobuf;
1091
1092 if (diff_dst && testmgr_alloc_buf(xoutbuf))
1093 goto out_nooutbuf;
1094
1095 if (diff_dst)
1096 d = "-ddst";
1097 else
1098 d = "";
1099
1100 if (enc == ENCRYPT)
1101 e = "encryption";
1102 else
1103 e = "decryption";
1104
1105 init_completion(&result.completion);
1106
1107 req = skcipher_request_alloc(tfm, GFP_KERNEL);
1108 if (!req) {
1109 pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
1110 d, algo);
1111 goto out;
1112 }
1113
1114 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1115 tcrypt_complete, &result);
1116
1117 j = 0;
1118 for (i = 0; i < tcount; i++) {
1119 if (template[i].np && !template[i].also_non_np)
1120 continue;
1121
1122 if (fips_enabled && template[i].fips_skip)
1123 continue;
1124
1125 if (template[i].iv)
1126 memcpy(iv, template[i].iv, ivsize);
1127 else
1128 memset(iv, 0, MAX_IVLEN);
1129
1130 j++;
1131 ret = -EINVAL;
1132 if (WARN_ON(align_offset + template[i].ilen > PAGE_SIZE))
1133 goto out;
1134
1135 data = xbuf[0];
1136 data += align_offset;
1137 memcpy(data, template[i].input, template[i].ilen);
1138
1139 crypto_skcipher_clear_flags(tfm, ~0);
1140 if (template[i].wk)
1141 crypto_skcipher_set_flags(tfm,
1142 CRYPTO_TFM_REQ_WEAK_KEY);
1143
1144 ret = crypto_skcipher_setkey(tfm, template[i].key,
1145 template[i].klen);
1146 if (template[i].fail == !ret) {
1147 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
1148 d, j, algo, crypto_skcipher_get_flags(tfm));
1149 goto out;
1150 } else if (ret)
1151 continue;
1152
1153 sg_init_one(&sg[0], data, template[i].ilen);
1154 if (diff_dst) {
1155 data = xoutbuf[0];
1156 data += align_offset;
1157 sg_init_one(&sgout[0], data, template[i].ilen);
1158 }
1159
1160 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1161 template[i].ilen, iv);
1162 ret = enc ? crypto_skcipher_encrypt(req) :
1163 crypto_skcipher_decrypt(req);
1164
1165 switch (ret) {
1166 case 0:
1167 break;
1168 case -EINPROGRESS:
1169 case -EBUSY:
1170 wait_for_completion(&result.completion);
1171 reinit_completion(&result.completion);
1172 ret = result.err;
1173 if (!ret)
1174 break;
1175 /* fall through */
1176 default:
1177 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
1178 d, e, j, algo, -ret);
1179 goto out;
1180 }
1181
1182 q = data;
1183 if (memcmp(q, template[i].result, template[i].rlen)) {
1184 pr_err("alg: skcipher%s: Test %d failed (invalid result) on %s for %s\n",
1185 d, j, e, algo);
1186 hexdump(q, template[i].rlen);
1187 ret = -EINVAL;
1188 goto out;
1189 }
1190
1191 if (template[i].iv_out &&
1192 memcmp(iv, template[i].iv_out,
1193 crypto_skcipher_ivsize(tfm))) {
1194 pr_err("alg: skcipher%s: Test %d failed (invalid output IV) on %s for %s\n",
1195 d, j, e, algo);
1196 hexdump(iv, crypto_skcipher_ivsize(tfm));
1197 ret = -EINVAL;
1198 goto out;
1199 }
1200 }
1201
1202 j = 0;
1203 for (i = 0; i < tcount; i++) {
1204 /* alignment tests are only done with continuous buffers */
1205 if (align_offset != 0)
1206 break;
1207
1208 if (!template[i].np)
1209 continue;
1210
1211 if (fips_enabled && template[i].fips_skip)
1212 continue;
1213
1214 if (template[i].iv)
1215 memcpy(iv, template[i].iv, ivsize);
1216 else
1217 memset(iv, 0, MAX_IVLEN);
1218
1219 j++;
1220 crypto_skcipher_clear_flags(tfm, ~0);
1221 if (template[i].wk)
1222 crypto_skcipher_set_flags(tfm,
1223 CRYPTO_TFM_REQ_WEAK_KEY);
1224
1225 ret = crypto_skcipher_setkey(tfm, template[i].key,
1226 template[i].klen);
1227 if (template[i].fail == !ret) {
1228 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
1229 d, j, algo, crypto_skcipher_get_flags(tfm));
1230 goto out;
1231 } else if (ret)
1232 continue;
1233
1234 temp = 0;
1235 ret = -EINVAL;
1236 sg_init_table(sg, template[i].np);
1237 if (diff_dst)
1238 sg_init_table(sgout, template[i].np);
1239 for (k = 0; k < template[i].np; k++) {
1240 if (WARN_ON(offset_in_page(IDX[k]) +
1241 template[i].tap[k] > PAGE_SIZE))
1242 goto out;
1243
1244 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
1245
1246 memcpy(q, template[i].input + temp, template[i].tap[k]);
1247
1248 if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE)
1249 q[template[i].tap[k]] = 0;
1250
1251 sg_set_buf(&sg[k], q, template[i].tap[k]);
1252 if (diff_dst) {
1253 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1254 offset_in_page(IDX[k]);
1255
1256 sg_set_buf(&sgout[k], q, template[i].tap[k]);
1257
1258 memset(q, 0, template[i].tap[k]);
1259 if (offset_in_page(q) +
1260 template[i].tap[k] < PAGE_SIZE)
1261 q[template[i].tap[k]] = 0;
1262 }
1263
1264 temp += template[i].tap[k];
1265 }
1266
1267 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1268 template[i].ilen, iv);
1269
1270 ret = enc ? crypto_skcipher_encrypt(req) :
1271 crypto_skcipher_decrypt(req);
1272
1273 switch (ret) {
1274 case 0:
1275 break;
1276 case -EINPROGRESS:
1277 case -EBUSY:
1278 wait_for_completion(&result.completion);
1279 reinit_completion(&result.completion);
1280 ret = result.err;
1281 if (!ret)
1282 break;
1283 /* fall through */
1284 default:
1285 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1286 d, e, j, algo, -ret);
1287 goto out;
1288 }
1289
1290 temp = 0;
1291 ret = -EINVAL;
1292 for (k = 0; k < template[i].np; k++) {
1293 if (diff_dst)
1294 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1295 offset_in_page(IDX[k]);
1296 else
1297 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1298 offset_in_page(IDX[k]);
1299
1300 if (memcmp(q, template[i].result + temp,
1301 template[i].tap[k])) {
1302 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1303 d, j, e, k, algo);
1304 hexdump(q, template[i].tap[k]);
1305 goto out;
1306 }
1307
1308 q += template[i].tap[k];
1309 for (n = 0; offset_in_page(q + n) && q[n]; n++)
1310 ;
1311 if (n) {
1312 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1313 d, j, e, k, algo, n);
1314 hexdump(q, n);
1315 goto out;
1316 }
1317 temp += template[i].tap[k];
1318 }
1319 }
1320
1321 ret = 0;
1322
1323out:
1324 skcipher_request_free(req);
1325 if (diff_dst)
1326 testmgr_free_buf(xoutbuf);
1327out_nooutbuf:
1328 testmgr_free_buf(xbuf);
1329out_nobuf:
1330 return ret;
1331}
1332
1333static int test_skcipher(struct crypto_skcipher *tfm, int enc,
1334 struct cipher_testvec *template, unsigned int tcount)
1335{
1336 unsigned int alignmask;
1337 int ret;
1338
1339 /* test 'dst == src' case */
1340 ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
1341 if (ret)
1342 return ret;
1343
1344 /* test 'dst != src' case */
1345 ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
1346 if (ret)
1347 return ret;
1348
1349 /* test unaligned buffers, check with one byte offset */
1350 ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
1351 if (ret)
1352 return ret;
1353
1354 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1355 if (alignmask) {
1356 /* Check if alignment mask for tfm is correctly set. */
1357 ret = __test_skcipher(tfm, enc, template, tcount, true,
1358 alignmask + 1);
1359 if (ret)
1360 return ret;
1361 }
1362
1363 return 0;
1364}
1365
1366static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
1367 struct comp_testvec *dtemplate, int ctcount, int dtcount)
1368{
1369 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1370 unsigned int i;
1371 char result[COMP_BUF_SIZE];
1372 int ret;
1373
1374 for (i = 0; i < ctcount; i++) {
1375 int ilen;
1376 unsigned int dlen = COMP_BUF_SIZE;
1377
1378 memset(result, 0, sizeof (result));
1379
1380 ilen = ctemplate[i].inlen;
1381 ret = crypto_comp_compress(tfm, ctemplate[i].input,
1382 ilen, result, &dlen);
1383 if (ret) {
1384 printk(KERN_ERR "alg: comp: compression failed "
1385 "on test %d for %s: ret=%d\n", i + 1, algo,
1386 -ret);
1387 goto out;
1388 }
1389
1390 if (dlen != ctemplate[i].outlen) {
1391 printk(KERN_ERR "alg: comp: Compression test %d "
1392 "failed for %s: output len = %d\n", i + 1, algo,
1393 dlen);
1394 ret = -EINVAL;
1395 goto out;
1396 }
1397
1398 if (memcmp(result, ctemplate[i].output, dlen)) {
1399 printk(KERN_ERR "alg: comp: Compression test %d "
1400 "failed for %s\n", i + 1, algo);
1401 hexdump(result, dlen);
1402 ret = -EINVAL;
1403 goto out;
1404 }
1405 }
1406
1407 for (i = 0; i < dtcount; i++) {
1408 int ilen;
1409 unsigned int dlen = COMP_BUF_SIZE;
1410
1411 memset(result, 0, sizeof (result));
1412
1413 ilen = dtemplate[i].inlen;
1414 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1415 ilen, result, &dlen);
1416 if (ret) {
1417 printk(KERN_ERR "alg: comp: decompression failed "
1418 "on test %d for %s: ret=%d\n", i + 1, algo,
1419 -ret);
1420 goto out;
1421 }
1422
1423 if (dlen != dtemplate[i].outlen) {
1424 printk(KERN_ERR "alg: comp: Decompression test %d "
1425 "failed for %s: output len = %d\n", i + 1, algo,
1426 dlen);
1427 ret = -EINVAL;
1428 goto out;
1429 }
1430
1431 if (memcmp(result, dtemplate[i].output, dlen)) {
1432 printk(KERN_ERR "alg: comp: Decompression test %d "
1433 "failed for %s\n", i + 1, algo);
1434 hexdump(result, dlen);
1435 ret = -EINVAL;
1436 goto out;
1437 }
1438 }
1439
1440 ret = 0;
1441
1442out:
1443 return ret;
1444}
1445
1446static int test_acomp(struct crypto_acomp *tfm, struct comp_testvec *ctemplate,
1447 struct comp_testvec *dtemplate, int ctcount, int dtcount)
1448{
1449 const char *algo = crypto_tfm_alg_driver_name(crypto_acomp_tfm(tfm));
1450 unsigned int i;
1451 char *output;
1452 int ret;
1453 struct scatterlist src, dst;
1454 struct acomp_req *req;
1455 struct tcrypt_result result;
1456
1457 output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
1458 if (!output)
1459 return -ENOMEM;
1460
1461 for (i = 0; i < ctcount; i++) {
1462 unsigned int dlen = COMP_BUF_SIZE;
1463 int ilen = ctemplate[i].inlen;
1464 void *input_vec;
1465
1466 input_vec = kmalloc(ilen, GFP_KERNEL);
1467 if (!input_vec) {
1468 ret = -ENOMEM;
1469 goto out;
1470 }
1471
1472 memcpy(input_vec, ctemplate[i].input, ilen);
1473 memset(output, 0, dlen);
1474 init_completion(&result.completion);
1475 sg_init_one(&src, input_vec, ilen);
1476 sg_init_one(&dst, output, dlen);
1477
1478 req = acomp_request_alloc(tfm);
1479 if (!req) {
1480 pr_err("alg: acomp: request alloc failed for %s\n",
1481 algo);
1482 kfree(input_vec);
1483 ret = -ENOMEM;
1484 goto out;
1485 }
1486
1487 acomp_request_set_params(req, &src, &dst, ilen, dlen);
1488 acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1489 tcrypt_complete, &result);
1490
1491 ret = wait_async_op(&result, crypto_acomp_compress(req));
1492 if (ret) {
1493 pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
1494 i + 1, algo, -ret);
1495 kfree(input_vec);
1496 acomp_request_free(req);
1497 goto out;
1498 }
1499
1500 if (req->dlen != ctemplate[i].outlen) {
1501 pr_err("alg: acomp: Compression test %d failed for %s: output len = %d\n",
1502 i + 1, algo, req->dlen);
1503 ret = -EINVAL;
1504 kfree(input_vec);
1505 acomp_request_free(req);
1506 goto out;
1507 }
1508
1509 if (memcmp(output, ctemplate[i].output, req->dlen)) {
1510 pr_err("alg: acomp: Compression test %d failed for %s\n",
1511 i + 1, algo);
1512 hexdump(output, req->dlen);
1513 ret = -EINVAL;
1514 kfree(input_vec);
1515 acomp_request_free(req);
1516 goto out;
1517 }
1518
1519 kfree(input_vec);
1520 acomp_request_free(req);
1521 }
1522
1523 for (i = 0; i < dtcount; i++) {
1524 unsigned int dlen = COMP_BUF_SIZE;
1525 int ilen = dtemplate[i].inlen;
1526 void *input_vec;
1527
1528 input_vec = kmalloc(ilen, GFP_KERNEL);
1529 if (!input_vec) {
1530 ret = -ENOMEM;
1531 goto out;
1532 }
1533
1534 memcpy(input_vec, dtemplate[i].input, ilen);
1535 memset(output, 0, dlen);
1536 init_completion(&result.completion);
1537 sg_init_one(&src, input_vec, ilen);
1538 sg_init_one(&dst, output, dlen);
1539
1540 req = acomp_request_alloc(tfm);
1541 if (!req) {
1542 pr_err("alg: acomp: request alloc failed for %s\n",
1543 algo);
1544 kfree(input_vec);
1545 ret = -ENOMEM;
1546 goto out;
1547 }
1548
1549 acomp_request_set_params(req, &src, &dst, ilen, dlen);
1550 acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1551 tcrypt_complete, &result);
1552
1553 ret = wait_async_op(&result, crypto_acomp_decompress(req));
1554 if (ret) {
1555 pr_err("alg: acomp: decompression failed on test %d for %s: ret=%d\n",
1556 i + 1, algo, -ret);
1557 kfree(input_vec);
1558 acomp_request_free(req);
1559 goto out;
1560 }
1561
1562 if (req->dlen != dtemplate[i].outlen) {
1563 pr_err("alg: acomp: Decompression test %d failed for %s: output len = %d\n",
1564 i + 1, algo, req->dlen);
1565 ret = -EINVAL;
1566 kfree(input_vec);
1567 acomp_request_free(req);
1568 goto out;
1569 }
1570
1571 if (memcmp(output, dtemplate[i].output, req->dlen)) {
1572 pr_err("alg: acomp: Decompression test %d failed for %s\n",
1573 i + 1, algo);
1574 hexdump(output, req->dlen);
1575 ret = -EINVAL;
1576 kfree(input_vec);
1577 acomp_request_free(req);
1578 goto out;
1579 }
1580
1581 kfree(input_vec);
1582 acomp_request_free(req);
1583 }
1584
1585 ret = 0;
1586
1587out:
1588 kfree(output);
1589 return ret;
1590}
1591
1592static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
1593 unsigned int tcount)
1594{
1595 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1596 int err = 0, i, j, seedsize;
1597 u8 *seed;
1598 char result[32];
1599
1600 seedsize = crypto_rng_seedsize(tfm);
1601
1602 seed = kmalloc(seedsize, GFP_KERNEL);
1603 if (!seed) {
1604 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1605 "for %s\n", algo);
1606 return -ENOMEM;
1607 }
1608
1609 for (i = 0; i < tcount; i++) {
1610 memset(result, 0, 32);
1611
1612 memcpy(seed, template[i].v, template[i].vlen);
1613 memcpy(seed + template[i].vlen, template[i].key,
1614 template[i].klen);
1615 memcpy(seed + template[i].vlen + template[i].klen,
1616 template[i].dt, template[i].dtlen);
1617
1618 err = crypto_rng_reset(tfm, seed, seedsize);
1619 if (err) {
1620 printk(KERN_ERR "alg: cprng: Failed to reset rng "
1621 "for %s\n", algo);
1622 goto out;
1623 }
1624
1625 for (j = 0; j < template[i].loops; j++) {
1626 err = crypto_rng_get_bytes(tfm, result,
1627 template[i].rlen);
1628 if (err < 0) {
1629 printk(KERN_ERR "alg: cprng: Failed to obtain "
1630 "the correct amount of random data for "
1631 "%s (requested %d)\n", algo,
1632 template[i].rlen);
1633 goto out;
1634 }
1635 }
1636
1637 err = memcmp(result, template[i].result,
1638 template[i].rlen);
1639 if (err) {
1640 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1641 i, algo);
1642 hexdump(result, template[i].rlen);
1643 err = -EINVAL;
1644 goto out;
1645 }
1646 }
1647
1648out:
1649 kfree(seed);
1650 return err;
1651}
1652
1653static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1654 u32 type, u32 mask)
1655{
1656 struct crypto_aead *tfm;
1657 int err = 0;
1658
1659 tfm = crypto_alloc_aead(driver, type, mask);
1660 if (IS_ERR(tfm)) {
1661 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1662 "%ld\n", driver, PTR_ERR(tfm));
1663 return PTR_ERR(tfm);
1664 }
1665
1666 if (desc->suite.aead.enc.vecs) {
1667 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1668 desc->suite.aead.enc.count);
1669 if (err)
1670 goto out;
1671 }
1672
1673 if (!err && desc->suite.aead.dec.vecs)
1674 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1675 desc->suite.aead.dec.count);
1676
1677out:
1678 crypto_free_aead(tfm);
1679 return err;
1680}
1681
1682static int alg_test_cipher(const struct alg_test_desc *desc,
1683 const char *driver, u32 type, u32 mask)
1684{
1685 struct crypto_cipher *tfm;
1686 int err = 0;
1687
1688 tfm = crypto_alloc_cipher(driver, type, mask);
1689 if (IS_ERR(tfm)) {
1690 printk(KERN_ERR "alg: cipher: Failed to load transform for "
1691 "%s: %ld\n", driver, PTR_ERR(tfm));
1692 return PTR_ERR(tfm);
1693 }
1694
1695 if (desc->suite.cipher.enc.vecs) {
1696 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1697 desc->suite.cipher.enc.count);
1698 if (err)
1699 goto out;
1700 }
1701
1702 if (desc->suite.cipher.dec.vecs)
1703 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1704 desc->suite.cipher.dec.count);
1705
1706out:
1707 crypto_free_cipher(tfm);
1708 return err;
1709}
1710
1711static int alg_test_skcipher(const struct alg_test_desc *desc,
1712 const char *driver, u32 type, u32 mask)
1713{
1714 struct crypto_skcipher *tfm;
1715 int err = 0;
1716
1717 tfm = crypto_alloc_skcipher(driver, type, mask);
1718 if (IS_ERR(tfm)) {
1719 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1720 "%s: %ld\n", driver, PTR_ERR(tfm));
1721 return PTR_ERR(tfm);
1722 }
1723
1724 if (desc->suite.cipher.enc.vecs) {
1725 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1726 desc->suite.cipher.enc.count);
1727 if (err)
1728 goto out;
1729 }
1730
1731 if (desc->suite.cipher.dec.vecs)
1732 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1733 desc->suite.cipher.dec.count);
1734
1735out:
1736 crypto_free_skcipher(tfm);
1737 return err;
1738}
1739
1740static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1741 u32 type, u32 mask)
1742{
1743 struct crypto_comp *comp;
1744 struct crypto_acomp *acomp;
1745 int err;
1746 u32 algo_type = type & CRYPTO_ALG_TYPE_ACOMPRESS_MASK;
1747
1748 if (algo_type == CRYPTO_ALG_TYPE_ACOMPRESS) {
1749 acomp = crypto_alloc_acomp(driver, type, mask);
1750 if (IS_ERR(acomp)) {
1751 pr_err("alg: acomp: Failed to load transform for %s: %ld\n",
1752 driver, PTR_ERR(acomp));
1753 return PTR_ERR(acomp);
1754 }
1755 err = test_acomp(acomp, desc->suite.comp.comp.vecs,
1756 desc->suite.comp.decomp.vecs,
1757 desc->suite.comp.comp.count,
1758 desc->suite.comp.decomp.count);
1759 crypto_free_acomp(acomp);
1760 } else {
1761 comp = crypto_alloc_comp(driver, type, mask);
1762 if (IS_ERR(comp)) {
1763 pr_err("alg: comp: Failed to load transform for %s: %ld\n",
1764 driver, PTR_ERR(comp));
1765 return PTR_ERR(comp);
1766 }
1767
1768 err = test_comp(comp, desc->suite.comp.comp.vecs,
1769 desc->suite.comp.decomp.vecs,
1770 desc->suite.comp.comp.count,
1771 desc->suite.comp.decomp.count);
1772
1773 crypto_free_comp(comp);
1774 }
1775 return err;
1776}
1777
1778static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1779 u32 type, u32 mask)
1780{
1781 struct crypto_ahash *tfm;
1782 int err;
1783
1784 tfm = crypto_alloc_ahash(driver, type, mask);
1785 if (IS_ERR(tfm)) {
1786 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1787 "%ld\n", driver, PTR_ERR(tfm));
1788 return PTR_ERR(tfm);
1789 }
1790
1791 err = test_hash(tfm, desc->suite.hash.vecs,
1792 desc->suite.hash.count, true);
1793 if (!err)
1794 err = test_hash(tfm, desc->suite.hash.vecs,
1795 desc->suite.hash.count, false);
1796
1797 crypto_free_ahash(tfm);
1798 return err;
1799}
1800
1801static int alg_test_crc32c(const struct alg_test_desc *desc,
1802 const char *driver, u32 type, u32 mask)
1803{
1804 struct crypto_shash *tfm;
1805 u32 val;
1806 int err;
1807
1808 err = alg_test_hash(desc, driver, type, mask);
1809 if (err)
1810 goto out;
1811
1812 tfm = crypto_alloc_shash(driver, type, mask);
1813 if (IS_ERR(tfm)) {
1814 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1815 "%ld\n", driver, PTR_ERR(tfm));
1816 err = PTR_ERR(tfm);
1817 goto out;
1818 }
1819
1820 do {
1821 SHASH_DESC_ON_STACK(shash, tfm);
1822 u32 *ctx = (u32 *)shash_desc_ctx(shash);
1823
1824 shash->tfm = tfm;
1825 shash->flags = 0;
1826
1827 *ctx = le32_to_cpu(420553207);
1828 err = crypto_shash_final(shash, (u8 *)&val);
1829 if (err) {
1830 printk(KERN_ERR "alg: crc32c: Operation failed for "
1831 "%s: %d\n", driver, err);
1832 break;
1833 }
1834
1835 if (val != ~420553207) {
1836 printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1837 "%d\n", driver, val);
1838 err = -EINVAL;
1839 }
1840 } while (0);
1841
1842 crypto_free_shash(tfm);
1843
1844out:
1845 return err;
1846}
1847
1848static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1849 u32 type, u32 mask)
1850{
1851 struct crypto_rng *rng;
1852 int err;
1853
1854 rng = crypto_alloc_rng(driver, type, mask);
1855 if (IS_ERR(rng)) {
1856 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1857 "%ld\n", driver, PTR_ERR(rng));
1858 return PTR_ERR(rng);
1859 }
1860
1861 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1862
1863 crypto_free_rng(rng);
1864
1865 return err;
1866}
1867
1868
1869static int drbg_cavs_test(struct drbg_testvec *test, int pr,
1870 const char *driver, u32 type, u32 mask)
1871{
1872 int ret = -EAGAIN;
1873 struct crypto_rng *drng;
1874 struct drbg_test_data test_data;
1875 struct drbg_string addtl, pers, testentropy;
1876 unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL);
1877
1878 if (!buf)
1879 return -ENOMEM;
1880
1881 drng = crypto_alloc_rng(driver, type, mask);
1882 if (IS_ERR(drng)) {
1883 printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for "
1884 "%s\n", driver);
1885 kzfree(buf);
1886 return -ENOMEM;
1887 }
1888
1889 test_data.testentropy = &testentropy;
1890 drbg_string_fill(&testentropy, test->entropy, test->entropylen);
1891 drbg_string_fill(&pers, test->pers, test->perslen);
1892 ret = crypto_drbg_reset_test(drng, &pers, &test_data);
1893 if (ret) {
1894 printk(KERN_ERR "alg: drbg: Failed to reset rng\n");
1895 goto outbuf;
1896 }
1897
1898 drbg_string_fill(&addtl, test->addtla, test->addtllen);
1899 if (pr) {
1900 drbg_string_fill(&testentropy, test->entpra, test->entprlen);
1901 ret = crypto_drbg_get_bytes_addtl_test(drng,
1902 buf, test->expectedlen, &addtl, &test_data);
1903 } else {
1904 ret = crypto_drbg_get_bytes_addtl(drng,
1905 buf, test->expectedlen, &addtl);
1906 }
1907 if (ret < 0) {
1908 printk(KERN_ERR "alg: drbg: could not obtain random data for "
1909 "driver %s\n", driver);
1910 goto outbuf;
1911 }
1912
1913 drbg_string_fill(&addtl, test->addtlb, test->addtllen);
1914 if (pr) {
1915 drbg_string_fill(&testentropy, test->entprb, test->entprlen);
1916 ret = crypto_drbg_get_bytes_addtl_test(drng,
1917 buf, test->expectedlen, &addtl, &test_data);
1918 } else {
1919 ret = crypto_drbg_get_bytes_addtl(drng,
1920 buf, test->expectedlen, &addtl);
1921 }
1922 if (ret < 0) {
1923 printk(KERN_ERR "alg: drbg: could not obtain random data for "
1924 "driver %s\n", driver);
1925 goto outbuf;
1926 }
1927
1928 ret = memcmp(test->expected, buf, test->expectedlen);
1929
1930outbuf:
1931 crypto_free_rng(drng);
1932 kzfree(buf);
1933 return ret;
1934}
1935
1936
1937static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver,
1938 u32 type, u32 mask)
1939{
1940 int err = 0;
1941 int pr = 0;
1942 int i = 0;
1943 struct drbg_testvec *template = desc->suite.drbg.vecs;
1944 unsigned int tcount = desc->suite.drbg.count;
1945
1946 if (0 == memcmp(driver, "drbg_pr_", 8))
1947 pr = 1;
1948
1949 for (i = 0; i < tcount; i++) {
1950 err = drbg_cavs_test(&template[i], pr, driver, type, mask);
1951 if (err) {
1952 printk(KERN_ERR "alg: drbg: Test %d failed for %s\n",
1953 i, driver);
1954 err = -EINVAL;
1955 break;
1956 }
1957 }
1958 return err;
1959
1960}
1961
1962static int do_test_kpp(struct crypto_kpp *tfm, struct kpp_testvec *vec,
1963 const char *alg)
1964{
1965 struct kpp_request *req;
1966 void *input_buf = NULL;
1967 void *output_buf = NULL;
1968 struct tcrypt_result result;
1969 unsigned int out_len_max;
1970 int err = -ENOMEM;
1971 struct scatterlist src, dst;
1972
1973 req = kpp_request_alloc(tfm, GFP_KERNEL);
1974 if (!req)
1975 return err;
1976
1977 init_completion(&result.completion);
1978
1979 err = crypto_kpp_set_secret(tfm, vec->secret, vec->secret_size);
1980 if (err < 0)
1981 goto free_req;
1982
1983 out_len_max = crypto_kpp_maxsize(tfm);
1984 output_buf = kzalloc(out_len_max, GFP_KERNEL);
1985 if (!output_buf) {
1986 err = -ENOMEM;
1987 goto free_req;
1988 }
1989
1990 /* Use appropriate parameter as base */
1991 kpp_request_set_input(req, NULL, 0);
1992 sg_init_one(&dst, output_buf, out_len_max);
1993 kpp_request_set_output(req, &dst, out_len_max);
1994 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1995 tcrypt_complete, &result);
1996
1997 /* Compute public key */
1998 err = wait_async_op(&result, crypto_kpp_generate_public_key(req));
1999 if (err) {
2000 pr_err("alg: %s: generate public key test failed. err %d\n",
2001 alg, err);
2002 goto free_output;
2003 }
2004 /* Verify calculated public key */
2005 if (memcmp(vec->expected_a_public, sg_virt(req->dst),
2006 vec->expected_a_public_size)) {
2007 pr_err("alg: %s: generate public key test failed. Invalid output\n",
2008 alg);
2009 err = -EINVAL;
2010 goto free_output;
2011 }
2012
2013 /* Calculate shared secret key by using counter part (b) public key. */
2014 input_buf = kzalloc(vec->b_public_size, GFP_KERNEL);
2015 if (!input_buf) {
2016 err = -ENOMEM;
2017 goto free_output;
2018 }
2019
2020 memcpy(input_buf, vec->b_public, vec->b_public_size);
2021 sg_init_one(&src, input_buf, vec->b_public_size);
2022 sg_init_one(&dst, output_buf, out_len_max);
2023 kpp_request_set_input(req, &src, vec->b_public_size);
2024 kpp_request_set_output(req, &dst, out_len_max);
2025 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2026 tcrypt_complete, &result);
2027 err = wait_async_op(&result, crypto_kpp_compute_shared_secret(req));
2028 if (err) {
2029 pr_err("alg: %s: compute shard secret test failed. err %d\n",
2030 alg, err);
2031 goto free_all;
2032 }
2033 /*
2034 * verify shared secret from which the user will derive
2035 * secret key by executing whatever hash it has chosen
2036 */
2037 if (memcmp(vec->expected_ss, sg_virt(req->dst),
2038 vec->expected_ss_size)) {
2039 pr_err("alg: %s: compute shared secret test failed. Invalid output\n",
2040 alg);
2041 err = -EINVAL;
2042 }
2043
2044free_all:
2045 kfree(input_buf);
2046free_output:
2047 kfree(output_buf);
2048free_req:
2049 kpp_request_free(req);
2050 return err;
2051}
2052
2053static int test_kpp(struct crypto_kpp *tfm, const char *alg,
2054 struct kpp_testvec *vecs, unsigned int tcount)
2055{
2056 int ret, i;
2057
2058 for (i = 0; i < tcount; i++) {
2059 ret = do_test_kpp(tfm, vecs++, alg);
2060 if (ret) {
2061 pr_err("alg: %s: test failed on vector %d, err=%d\n",
2062 alg, i + 1, ret);
2063 return ret;
2064 }
2065 }
2066 return 0;
2067}
2068
2069static int alg_test_kpp(const struct alg_test_desc *desc, const char *driver,
2070 u32 type, u32 mask)
2071{
2072 struct crypto_kpp *tfm;
2073 int err = 0;
2074
2075 tfm = crypto_alloc_kpp(driver, type, mask);
2076 if (IS_ERR(tfm)) {
2077 pr_err("alg: kpp: Failed to load tfm for %s: %ld\n",
2078 driver, PTR_ERR(tfm));
2079 return PTR_ERR(tfm);
2080 }
2081 if (desc->suite.kpp.vecs)
2082 err = test_kpp(tfm, desc->alg, desc->suite.kpp.vecs,
2083 desc->suite.kpp.count);
2084
2085 crypto_free_kpp(tfm);
2086 return err;
2087}
2088
2089static int test_akcipher_one(struct crypto_akcipher *tfm,
2090 struct akcipher_testvec *vecs)
2091{
2092 char *xbuf[XBUFSIZE];
2093 struct akcipher_request *req;
2094 void *outbuf_enc = NULL;
2095 void *outbuf_dec = NULL;
2096 struct tcrypt_result result;
2097 unsigned int out_len_max, out_len = 0;
2098 int err = -ENOMEM;
2099 struct scatterlist src, dst, src_tab[2];
2100
2101 if (testmgr_alloc_buf(xbuf))
2102 return err;
2103
2104 req = akcipher_request_alloc(tfm, GFP_KERNEL);
2105 if (!req)
2106 goto free_xbuf;
2107
2108 init_completion(&result.completion);
2109
2110 if (vecs->public_key_vec)
2111 err = crypto_akcipher_set_pub_key(tfm, vecs->key,
2112 vecs->key_len);
2113 else
2114 err = crypto_akcipher_set_priv_key(tfm, vecs->key,
2115 vecs->key_len);
2116 if (err)
2117 goto free_req;
2118
2119 err = -ENOMEM;
2120 out_len_max = crypto_akcipher_maxsize(tfm);
2121 outbuf_enc = kzalloc(out_len_max, GFP_KERNEL);
2122 if (!outbuf_enc)
2123 goto free_req;
2124
2125 if (WARN_ON(vecs->m_size > PAGE_SIZE))
2126 goto free_all;
2127
2128 memcpy(xbuf[0], vecs->m, vecs->m_size);
2129
2130 sg_init_table(src_tab, 2);
2131 sg_set_buf(&src_tab[0], xbuf[0], 8);
2132 sg_set_buf(&src_tab[1], xbuf[0] + 8, vecs->m_size - 8);
2133 sg_init_one(&dst, outbuf_enc, out_len_max);
2134 akcipher_request_set_crypt(req, src_tab, &dst, vecs->m_size,
2135 out_len_max);
2136 akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2137 tcrypt_complete, &result);
2138
2139 /* Run RSA encrypt - c = m^e mod n;*/
2140 err = wait_async_op(&result, crypto_akcipher_encrypt(req));
2141 if (err) {
2142 pr_err("alg: akcipher: encrypt test failed. err %d\n", err);
2143 goto free_all;
2144 }
2145 if (req->dst_len != vecs->c_size) {
2146 pr_err("alg: akcipher: encrypt test failed. Invalid output len\n");
2147 err = -EINVAL;
2148 goto free_all;
2149 }
2150 /* verify that encrypted message is equal to expected */
2151 if (memcmp(vecs->c, outbuf_enc, vecs->c_size)) {
2152 pr_err("alg: akcipher: encrypt test failed. Invalid output\n");
2153 hexdump(outbuf_enc, vecs->c_size);
2154 err = -EINVAL;
2155 goto free_all;
2156 }
2157 /* Don't invoke decrypt for vectors with public key */
2158 if (vecs->public_key_vec) {
2159 err = 0;
2160 goto free_all;
2161 }
2162 outbuf_dec = kzalloc(out_len_max, GFP_KERNEL);
2163 if (!outbuf_dec) {
2164 err = -ENOMEM;
2165 goto free_all;
2166 }
2167
2168 if (WARN_ON(vecs->c_size > PAGE_SIZE))
2169 goto free_all;
2170
2171 memcpy(xbuf[0], vecs->c, vecs->c_size);
2172
2173 sg_init_one(&src, xbuf[0], vecs->c_size);
2174 sg_init_one(&dst, outbuf_dec, out_len_max);
2175 init_completion(&result.completion);
2176 akcipher_request_set_crypt(req, &src, &dst, vecs->c_size, out_len_max);
2177
2178 /* Run RSA decrypt - m = c^d mod n;*/
2179 err = wait_async_op(&result, crypto_akcipher_decrypt(req));
2180 if (err) {
2181 pr_err("alg: akcipher: decrypt test failed. err %d\n", err);
2182 goto free_all;
2183 }
2184 out_len = req->dst_len;
2185 if (out_len < vecs->m_size) {
2186 pr_err("alg: akcipher: decrypt test failed. "
2187 "Invalid output len %u\n", out_len);
2188 err = -EINVAL;
2189 goto free_all;
2190 }
2191 /* verify that decrypted message is equal to the original msg */
2192 if (memchr_inv(outbuf_dec, 0, out_len - vecs->m_size) ||
2193 memcmp(vecs->m, outbuf_dec + out_len - vecs->m_size,
2194 vecs->m_size)) {
2195 pr_err("alg: akcipher: decrypt test failed. Invalid output\n");
2196 hexdump(outbuf_dec, out_len);
2197 err = -EINVAL;
2198 }
2199free_all:
2200 kfree(outbuf_dec);
2201 kfree(outbuf_enc);
2202free_req:
2203 akcipher_request_free(req);
2204free_xbuf:
2205 testmgr_free_buf(xbuf);
2206 return err;
2207}
2208
2209static int test_akcipher(struct crypto_akcipher *tfm, const char *alg,
2210 struct akcipher_testvec *vecs, unsigned int tcount)
2211{
2212 const char *algo =
2213 crypto_tfm_alg_driver_name(crypto_akcipher_tfm(tfm));
2214 int ret, i;
2215
2216 for (i = 0; i < tcount; i++) {
2217 ret = test_akcipher_one(tfm, vecs++);
2218 if (!ret)
2219 continue;
2220
2221 pr_err("alg: akcipher: test %d failed for %s, err=%d\n",
2222 i + 1, algo, ret);
2223 return ret;
2224 }
2225 return 0;
2226}
2227
2228static int alg_test_akcipher(const struct alg_test_desc *desc,
2229 const char *driver, u32 type, u32 mask)
2230{
2231 struct crypto_akcipher *tfm;
2232 int err = 0;
2233
2234 tfm = crypto_alloc_akcipher(driver, type, mask);
2235 if (IS_ERR(tfm)) {
2236 pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n",
2237 driver, PTR_ERR(tfm));
2238 return PTR_ERR(tfm);
2239 }
2240 if (desc->suite.akcipher.vecs)
2241 err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs,
2242 desc->suite.akcipher.count);
2243
2244 crypto_free_akcipher(tfm);
2245 return err;
2246}
2247
2248static int alg_test_null(const struct alg_test_desc *desc,
2249 const char *driver, u32 type, u32 mask)
2250{
2251 return 0;
2252}
2253
2254/* Please keep this list sorted by algorithm name. */
2255static const struct alg_test_desc alg_test_descs[] = {
2256 {
2257 .alg = "ansi_cprng",
2258 .test = alg_test_cprng,
2259 .suite = {
2260 .cprng = {
2261 .vecs = ansi_cprng_aes_tv_template,
2262 .count = ANSI_CPRNG_AES_TEST_VECTORS
2263 }
2264 }
2265 }, {
2266 .alg = "authenc(hmac(md5),ecb(cipher_null))",
2267 .test = alg_test_aead,
2268 .suite = {
2269 .aead = {
2270 .enc = {
2271 .vecs = hmac_md5_ecb_cipher_null_enc_tv_template,
2272 .count = HMAC_MD5_ECB_CIPHER_NULL_ENC_TEST_VECTORS
2273 },
2274 .dec = {
2275 .vecs = hmac_md5_ecb_cipher_null_dec_tv_template,
2276 .count = HMAC_MD5_ECB_CIPHER_NULL_DEC_TEST_VECTORS
2277 }
2278 }
2279 }
2280 }, {
2281 .alg = "authenc(hmac(sha1),cbc(aes))",
2282 .test = alg_test_aead,
2283 .suite = {
2284 .aead = {
2285 .enc = {
2286 .vecs =
2287 hmac_sha1_aes_cbc_enc_tv_temp,
2288 .count =
2289 HMAC_SHA1_AES_CBC_ENC_TEST_VEC
2290 }
2291 }
2292 }
2293 }, {
2294 .alg = "authenc(hmac(sha1),cbc(des))",
2295 .test = alg_test_aead,
2296 .suite = {
2297 .aead = {
2298 .enc = {
2299 .vecs =
2300 hmac_sha1_des_cbc_enc_tv_temp,
2301 .count =
2302 HMAC_SHA1_DES_CBC_ENC_TEST_VEC
2303 }
2304 }
2305 }
2306 }, {
2307 .alg = "authenc(hmac(sha1),cbc(des3_ede))",
2308 .test = alg_test_aead,
2309 .fips_allowed = 1,
2310 .suite = {
2311 .aead = {
2312 .enc = {
2313 .vecs =
2314 hmac_sha1_des3_ede_cbc_enc_tv_temp,
2315 .count =
2316 HMAC_SHA1_DES3_EDE_CBC_ENC_TEST_VEC
2317 }
2318 }
2319 }
2320 }, {
2321 .alg = "authenc(hmac(sha1),ctr(aes))",
2322 .test = alg_test_null,
2323 .fips_allowed = 1,
2324 }, {
2325 .alg = "authenc(hmac(sha1),ecb(cipher_null))",
2326 .test = alg_test_aead,
2327 .suite = {
2328 .aead = {
2329 .enc = {
2330 .vecs =
2331 hmac_sha1_ecb_cipher_null_enc_tv_temp,
2332 .count =
2333 HMAC_SHA1_ECB_CIPHER_NULL_ENC_TEST_VEC
2334 },
2335 .dec = {
2336 .vecs =
2337 hmac_sha1_ecb_cipher_null_dec_tv_temp,
2338 .count =
2339 HMAC_SHA1_ECB_CIPHER_NULL_DEC_TEST_VEC
2340 }
2341 }
2342 }
2343 }, {
2344 .alg = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2345 .test = alg_test_null,
2346 .fips_allowed = 1,
2347 }, {
2348 .alg = "authenc(hmac(sha224),cbc(des))",
2349 .test = alg_test_aead,
2350 .suite = {
2351 .aead = {
2352 .enc = {
2353 .vecs =
2354 hmac_sha224_des_cbc_enc_tv_temp,
2355 .count =
2356 HMAC_SHA224_DES_CBC_ENC_TEST_VEC
2357 }
2358 }
2359 }
2360 }, {
2361 .alg = "authenc(hmac(sha224),cbc(des3_ede))",
2362 .test = alg_test_aead,
2363 .fips_allowed = 1,
2364 .suite = {
2365 .aead = {
2366 .enc = {
2367 .vecs =
2368 hmac_sha224_des3_ede_cbc_enc_tv_temp,
2369 .count =
2370 HMAC_SHA224_DES3_EDE_CBC_ENC_TEST_VEC
2371 }
2372 }
2373 }
2374 }, {
2375 .alg = "authenc(hmac(sha256),cbc(aes))",
2376 .test = alg_test_aead,
2377 .fips_allowed = 1,
2378 .suite = {
2379 .aead = {
2380 .enc = {
2381 .vecs =
2382 hmac_sha256_aes_cbc_enc_tv_temp,
2383 .count =
2384 HMAC_SHA256_AES_CBC_ENC_TEST_VEC
2385 }
2386 }
2387 }
2388 }, {
2389 .alg = "authenc(hmac(sha256),cbc(des))",
2390 .test = alg_test_aead,
2391 .suite = {
2392 .aead = {
2393 .enc = {
2394 .vecs =
2395 hmac_sha256_des_cbc_enc_tv_temp,
2396 .count =
2397 HMAC_SHA256_DES_CBC_ENC_TEST_VEC
2398 }
2399 }
2400 }
2401 }, {
2402 .alg = "authenc(hmac(sha256),cbc(des3_ede))",
2403 .test = alg_test_aead,
2404 .fips_allowed = 1,
2405 .suite = {
2406 .aead = {
2407 .enc = {
2408 .vecs =
2409 hmac_sha256_des3_ede_cbc_enc_tv_temp,
2410 .count =
2411 HMAC_SHA256_DES3_EDE_CBC_ENC_TEST_VEC
2412 }
2413 }
2414 }
2415 }, {
2416 .alg = "authenc(hmac(sha256),ctr(aes))",
2417 .test = alg_test_null,
2418 .fips_allowed = 1,
2419 }, {
2420 .alg = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2421 .test = alg_test_null,
2422 .fips_allowed = 1,
2423 }, {
2424 .alg = "authenc(hmac(sha384),cbc(des))",
2425 .test = alg_test_aead,
2426 .suite = {
2427 .aead = {
2428 .enc = {
2429 .vecs =
2430 hmac_sha384_des_cbc_enc_tv_temp,
2431 .count =
2432 HMAC_SHA384_DES_CBC_ENC_TEST_VEC
2433 }
2434 }
2435 }
2436 }, {
2437 .alg = "authenc(hmac(sha384),cbc(des3_ede))",
2438 .test = alg_test_aead,
2439 .fips_allowed = 1,
2440 .suite = {
2441 .aead = {
2442 .enc = {
2443 .vecs =
2444 hmac_sha384_des3_ede_cbc_enc_tv_temp,
2445 .count =
2446 HMAC_SHA384_DES3_EDE_CBC_ENC_TEST_VEC
2447 }
2448 }
2449 }
2450 }, {
2451 .alg = "authenc(hmac(sha384),ctr(aes))",
2452 .test = alg_test_null,
2453 .fips_allowed = 1,
2454 }, {
2455 .alg = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2456 .test = alg_test_null,
2457 .fips_allowed = 1,
2458 }, {
2459 .alg = "authenc(hmac(sha512),cbc(aes))",
2460 .fips_allowed = 1,
2461 .test = alg_test_aead,
2462 .suite = {
2463 .aead = {
2464 .enc = {
2465 .vecs =
2466 hmac_sha512_aes_cbc_enc_tv_temp,
2467 .count =
2468 HMAC_SHA512_AES_CBC_ENC_TEST_VEC
2469 }
2470 }
2471 }
2472 }, {
2473 .alg = "authenc(hmac(sha512),cbc(des))",
2474 .test = alg_test_aead,
2475 .suite = {
2476 .aead = {
2477 .enc = {
2478 .vecs =
2479 hmac_sha512_des_cbc_enc_tv_temp,
2480 .count =
2481 HMAC_SHA512_DES_CBC_ENC_TEST_VEC
2482 }
2483 }
2484 }
2485 }, {
2486 .alg = "authenc(hmac(sha512),cbc(des3_ede))",
2487 .test = alg_test_aead,
2488 .fips_allowed = 1,
2489 .suite = {
2490 .aead = {
2491 .enc = {
2492 .vecs =
2493 hmac_sha512_des3_ede_cbc_enc_tv_temp,
2494 .count =
2495 HMAC_SHA512_DES3_EDE_CBC_ENC_TEST_VEC
2496 }
2497 }
2498 }
2499 }, {
2500 .alg = "authenc(hmac(sha512),ctr(aes))",
2501 .test = alg_test_null,
2502 .fips_allowed = 1,
2503 }, {
2504 .alg = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2505 .test = alg_test_null,
2506 .fips_allowed = 1,
2507 }, {
2508 .alg = "cbc(aes)",
2509 .test = alg_test_skcipher,
2510 .fips_allowed = 1,
2511 .suite = {
2512 .cipher = {
2513 .enc = {
2514 .vecs = aes_cbc_enc_tv_template,
2515 .count = AES_CBC_ENC_TEST_VECTORS
2516 },
2517 .dec = {
2518 .vecs = aes_cbc_dec_tv_template,
2519 .count = AES_CBC_DEC_TEST_VECTORS
2520 }
2521 }
2522 }
2523 }, {
2524 .alg = "cbc(anubis)",
2525 .test = alg_test_skcipher,
2526 .suite = {
2527 .cipher = {
2528 .enc = {
2529 .vecs = anubis_cbc_enc_tv_template,
2530 .count = ANUBIS_CBC_ENC_TEST_VECTORS
2531 },
2532 .dec = {
2533 .vecs = anubis_cbc_dec_tv_template,
2534 .count = ANUBIS_CBC_DEC_TEST_VECTORS
2535 }
2536 }
2537 }
2538 }, {
2539 .alg = "cbc(blowfish)",
2540 .test = alg_test_skcipher,
2541 .suite = {
2542 .cipher = {
2543 .enc = {
2544 .vecs = bf_cbc_enc_tv_template,
2545 .count = BF_CBC_ENC_TEST_VECTORS
2546 },
2547 .dec = {
2548 .vecs = bf_cbc_dec_tv_template,
2549 .count = BF_CBC_DEC_TEST_VECTORS
2550 }
2551 }
2552 }
2553 }, {
2554 .alg = "cbc(camellia)",
2555 .test = alg_test_skcipher,
2556 .suite = {
2557 .cipher = {
2558 .enc = {
2559 .vecs = camellia_cbc_enc_tv_template,
2560 .count = CAMELLIA_CBC_ENC_TEST_VECTORS
2561 },
2562 .dec = {
2563 .vecs = camellia_cbc_dec_tv_template,
2564 .count = CAMELLIA_CBC_DEC_TEST_VECTORS
2565 }
2566 }
2567 }
2568 }, {
2569 .alg = "cbc(cast5)",
2570 .test = alg_test_skcipher,
2571 .suite = {
2572 .cipher = {
2573 .enc = {
2574 .vecs = cast5_cbc_enc_tv_template,
2575 .count = CAST5_CBC_ENC_TEST_VECTORS
2576 },
2577 .dec = {
2578 .vecs = cast5_cbc_dec_tv_template,
2579 .count = CAST5_CBC_DEC_TEST_VECTORS
2580 }
2581 }
2582 }
2583 }, {
2584 .alg = "cbc(cast6)",
2585 .test = alg_test_skcipher,
2586 .suite = {
2587 .cipher = {
2588 .enc = {
2589 .vecs = cast6_cbc_enc_tv_template,
2590 .count = CAST6_CBC_ENC_TEST_VECTORS
2591 },
2592 .dec = {
2593 .vecs = cast6_cbc_dec_tv_template,
2594 .count = CAST6_CBC_DEC_TEST_VECTORS
2595 }
2596 }
2597 }
2598 }, {
2599 .alg = "cbc(des)",
2600 .test = alg_test_skcipher,
2601 .suite = {
2602 .cipher = {
2603 .enc = {
2604 .vecs = des_cbc_enc_tv_template,
2605 .count = DES_CBC_ENC_TEST_VECTORS
2606 },
2607 .dec = {
2608 .vecs = des_cbc_dec_tv_template,
2609 .count = DES_CBC_DEC_TEST_VECTORS
2610 }
2611 }
2612 }
2613 }, {
2614 .alg = "cbc(des3_ede)",
2615 .test = alg_test_skcipher,
2616 .fips_allowed = 1,
2617 .suite = {
2618 .cipher = {
2619 .enc = {
2620 .vecs = des3_ede_cbc_enc_tv_template,
2621 .count = DES3_EDE_CBC_ENC_TEST_VECTORS
2622 },
2623 .dec = {
2624 .vecs = des3_ede_cbc_dec_tv_template,
2625 .count = DES3_EDE_CBC_DEC_TEST_VECTORS
2626 }
2627 }
2628 }
2629 }, {
2630 .alg = "cbc(serpent)",
2631 .test = alg_test_skcipher,
2632 .suite = {
2633 .cipher = {
2634 .enc = {
2635 .vecs = serpent_cbc_enc_tv_template,
2636 .count = SERPENT_CBC_ENC_TEST_VECTORS
2637 },
2638 .dec = {
2639 .vecs = serpent_cbc_dec_tv_template,
2640 .count = SERPENT_CBC_DEC_TEST_VECTORS
2641 }
2642 }
2643 }
2644 }, {
2645 .alg = "cbc(twofish)",
2646 .test = alg_test_skcipher,
2647 .suite = {
2648 .cipher = {
2649 .enc = {
2650 .vecs = tf_cbc_enc_tv_template,
2651 .count = TF_CBC_ENC_TEST_VECTORS
2652 },
2653 .dec = {
2654 .vecs = tf_cbc_dec_tv_template,
2655 .count = TF_CBC_DEC_TEST_VECTORS
2656 }
2657 }
2658 }
2659 }, {
2660 .alg = "ccm(aes)",
2661 .test = alg_test_aead,
2662 .fips_allowed = 1,
2663 .suite = {
2664 .aead = {
2665 .enc = {
2666 .vecs = aes_ccm_enc_tv_template,
2667 .count = AES_CCM_ENC_TEST_VECTORS
2668 },
2669 .dec = {
2670 .vecs = aes_ccm_dec_tv_template,
2671 .count = AES_CCM_DEC_TEST_VECTORS
2672 }
2673 }
2674 }
2675 }, {
2676 .alg = "chacha20",
2677 .test = alg_test_skcipher,
2678 .suite = {
2679 .cipher = {
2680 .enc = {
2681 .vecs = chacha20_enc_tv_template,
2682 .count = CHACHA20_ENC_TEST_VECTORS
2683 },
2684 .dec = {
2685 .vecs = chacha20_enc_tv_template,
2686 .count = CHACHA20_ENC_TEST_VECTORS
2687 },
2688 }
2689 }
2690 }, {
2691 .alg = "cmac(aes)",
2692 .fips_allowed = 1,
2693 .test = alg_test_hash,
2694 .suite = {
2695 .hash = {
2696 .vecs = aes_cmac128_tv_template,
2697 .count = CMAC_AES_TEST_VECTORS
2698 }
2699 }
2700 }, {
2701 .alg = "cmac(des3_ede)",
2702 .fips_allowed = 1,
2703 .test = alg_test_hash,
2704 .suite = {
2705 .hash = {
2706 .vecs = des3_ede_cmac64_tv_template,
2707 .count = CMAC_DES3_EDE_TEST_VECTORS
2708 }
2709 }
2710 }, {
2711 .alg = "compress_null",
2712 .test = alg_test_null,
2713 }, {
2714 .alg = "crc32",
2715 .test = alg_test_hash,
2716 .suite = {
2717 .hash = {
2718 .vecs = crc32_tv_template,
2719 .count = CRC32_TEST_VECTORS
2720 }
2721 }
2722 }, {
2723 .alg = "crc32c",
2724 .test = alg_test_crc32c,
2725 .fips_allowed = 1,
2726 .suite = {
2727 .hash = {
2728 .vecs = crc32c_tv_template,
2729 .count = CRC32C_TEST_VECTORS
2730 }
2731 }
2732 }, {
2733 .alg = "crct10dif",
2734 .test = alg_test_hash,
2735 .fips_allowed = 1,
2736 .suite = {
2737 .hash = {
2738 .vecs = crct10dif_tv_template,
2739 .count = CRCT10DIF_TEST_VECTORS
2740 }
2741 }
2742 }, {
2743 .alg = "ctr(aes)",
2744 .test = alg_test_skcipher,
2745 .fips_allowed = 1,
2746 .suite = {
2747 .cipher = {
2748 .enc = {
2749 .vecs = aes_ctr_enc_tv_template,
2750 .count = AES_CTR_ENC_TEST_VECTORS
2751 },
2752 .dec = {
2753 .vecs = aes_ctr_dec_tv_template,
2754 .count = AES_CTR_DEC_TEST_VECTORS
2755 }
2756 }
2757 }
2758 }, {
2759 .alg = "ctr(blowfish)",
2760 .test = alg_test_skcipher,
2761 .suite = {
2762 .cipher = {
2763 .enc = {
2764 .vecs = bf_ctr_enc_tv_template,
2765 .count = BF_CTR_ENC_TEST_VECTORS
2766 },
2767 .dec = {
2768 .vecs = bf_ctr_dec_tv_template,
2769 .count = BF_CTR_DEC_TEST_VECTORS
2770 }
2771 }
2772 }
2773 }, {
2774 .alg = "ctr(camellia)",
2775 .test = alg_test_skcipher,
2776 .suite = {
2777 .cipher = {
2778 .enc = {
2779 .vecs = camellia_ctr_enc_tv_template,
2780 .count = CAMELLIA_CTR_ENC_TEST_VECTORS
2781 },
2782 .dec = {
2783 .vecs = camellia_ctr_dec_tv_template,
2784 .count = CAMELLIA_CTR_DEC_TEST_VECTORS
2785 }
2786 }
2787 }
2788 }, {
2789 .alg = "ctr(cast5)",
2790 .test = alg_test_skcipher,
2791 .suite = {
2792 .cipher = {
2793 .enc = {
2794 .vecs = cast5_ctr_enc_tv_template,
2795 .count = CAST5_CTR_ENC_TEST_VECTORS
2796 },
2797 .dec = {
2798 .vecs = cast5_ctr_dec_tv_template,
2799 .count = CAST5_CTR_DEC_TEST_VECTORS
2800 }
2801 }
2802 }
2803 }, {
2804 .alg = "ctr(cast6)",
2805 .test = alg_test_skcipher,
2806 .suite = {
2807 .cipher = {
2808 .enc = {
2809 .vecs = cast6_ctr_enc_tv_template,
2810 .count = CAST6_CTR_ENC_TEST_VECTORS
2811 },
2812 .dec = {
2813 .vecs = cast6_ctr_dec_tv_template,
2814 .count = CAST6_CTR_DEC_TEST_VECTORS
2815 }
2816 }
2817 }
2818 }, {
2819 .alg = "ctr(des)",
2820 .test = alg_test_skcipher,
2821 .suite = {
2822 .cipher = {
2823 .enc = {
2824 .vecs = des_ctr_enc_tv_template,
2825 .count = DES_CTR_ENC_TEST_VECTORS
2826 },
2827 .dec = {
2828 .vecs = des_ctr_dec_tv_template,
2829 .count = DES_CTR_DEC_TEST_VECTORS
2830 }
2831 }
2832 }
2833 }, {
2834 .alg = "ctr(des3_ede)",
2835 .test = alg_test_skcipher,
2836 .suite = {
2837 .cipher = {
2838 .enc = {
2839 .vecs = des3_ede_ctr_enc_tv_template,
2840 .count = DES3_EDE_CTR_ENC_TEST_VECTORS
2841 },
2842 .dec = {
2843 .vecs = des3_ede_ctr_dec_tv_template,
2844 .count = DES3_EDE_CTR_DEC_TEST_VECTORS
2845 }
2846 }
2847 }
2848 }, {
2849 .alg = "ctr(serpent)",
2850 .test = alg_test_skcipher,
2851 .suite = {
2852 .cipher = {
2853 .enc = {
2854 .vecs = serpent_ctr_enc_tv_template,
2855 .count = SERPENT_CTR_ENC_TEST_VECTORS
2856 },
2857 .dec = {
2858 .vecs = serpent_ctr_dec_tv_template,
2859 .count = SERPENT_CTR_DEC_TEST_VECTORS
2860 }
2861 }
2862 }
2863 }, {
2864 .alg = "ctr(twofish)",
2865 .test = alg_test_skcipher,
2866 .suite = {
2867 .cipher = {
2868 .enc = {
2869 .vecs = tf_ctr_enc_tv_template,
2870 .count = TF_CTR_ENC_TEST_VECTORS
2871 },
2872 .dec = {
2873 .vecs = tf_ctr_dec_tv_template,
2874 .count = TF_CTR_DEC_TEST_VECTORS
2875 }
2876 }
2877 }
2878 }, {
2879 .alg = "cts(cbc(aes))",
2880 .test = alg_test_skcipher,
2881 .suite = {
2882 .cipher = {
2883 .enc = {
2884 .vecs = cts_mode_enc_tv_template,
2885 .count = CTS_MODE_ENC_TEST_VECTORS
2886 },
2887 .dec = {
2888 .vecs = cts_mode_dec_tv_template,
2889 .count = CTS_MODE_DEC_TEST_VECTORS
2890 }
2891 }
2892 }
2893 }, {
2894 .alg = "deflate",
2895 .test = alg_test_comp,
2896 .fips_allowed = 1,
2897 .suite = {
2898 .comp = {
2899 .comp = {
2900 .vecs = deflate_comp_tv_template,
2901 .count = DEFLATE_COMP_TEST_VECTORS
2902 },
2903 .decomp = {
2904 .vecs = deflate_decomp_tv_template,
2905 .count = DEFLATE_DECOMP_TEST_VECTORS
2906 }
2907 }
2908 }
2909 }, {
2910 .alg = "dh",
2911 .test = alg_test_kpp,
2912 .fips_allowed = 1,
2913 .suite = {
2914 .kpp = {
2915 .vecs = dh_tv_template,
2916 .count = DH_TEST_VECTORS
2917 }
2918 }
2919 }, {
2920 .alg = "digest_null",
2921 .test = alg_test_null,
2922 }, {
2923 .alg = "drbg_nopr_ctr_aes128",
2924 .test = alg_test_drbg,
2925 .fips_allowed = 1,
2926 .suite = {
2927 .drbg = {
2928 .vecs = drbg_nopr_ctr_aes128_tv_template,
2929 .count = ARRAY_SIZE(drbg_nopr_ctr_aes128_tv_template)
2930 }
2931 }
2932 }, {
2933 .alg = "drbg_nopr_ctr_aes192",
2934 .test = alg_test_drbg,
2935 .fips_allowed = 1,
2936 .suite = {
2937 .drbg = {
2938 .vecs = drbg_nopr_ctr_aes192_tv_template,
2939 .count = ARRAY_SIZE(drbg_nopr_ctr_aes192_tv_template)
2940 }
2941 }
2942 }, {
2943 .alg = "drbg_nopr_ctr_aes256",
2944 .test = alg_test_drbg,
2945 .fips_allowed = 1,
2946 .suite = {
2947 .drbg = {
2948 .vecs = drbg_nopr_ctr_aes256_tv_template,
2949 .count = ARRAY_SIZE(drbg_nopr_ctr_aes256_tv_template)
2950 }
2951 }
2952 }, {
2953 /*
2954 * There is no need to specifically test the DRBG with every
2955 * backend cipher -- covered by drbg_nopr_hmac_sha256 test
2956 */
2957 .alg = "drbg_nopr_hmac_sha1",
2958 .fips_allowed = 1,
2959 .test = alg_test_null,
2960 }, {
2961 .alg = "drbg_nopr_hmac_sha256",
2962 .test = alg_test_drbg,
2963 .fips_allowed = 1,
2964 .suite = {
2965 .drbg = {
2966 .vecs = drbg_nopr_hmac_sha256_tv_template,
2967 .count =
2968 ARRAY_SIZE(drbg_nopr_hmac_sha256_tv_template)
2969 }
2970 }
2971 }, {
2972 /* covered by drbg_nopr_hmac_sha256 test */
2973 .alg = "drbg_nopr_hmac_sha384",
2974 .fips_allowed = 1,
2975 .test = alg_test_null,
2976 }, {
2977 .alg = "drbg_nopr_hmac_sha512",
2978 .test = alg_test_null,
2979 .fips_allowed = 1,
2980 }, {
2981 .alg = "drbg_nopr_sha1",
2982 .fips_allowed = 1,
2983 .test = alg_test_null,
2984 }, {
2985 .alg = "drbg_nopr_sha256",
2986 .test = alg_test_drbg,
2987 .fips_allowed = 1,
2988 .suite = {
2989 .drbg = {
2990 .vecs = drbg_nopr_sha256_tv_template,
2991 .count = ARRAY_SIZE(drbg_nopr_sha256_tv_template)
2992 }
2993 }
2994 }, {
2995 /* covered by drbg_nopr_sha256 test */
2996 .alg = "drbg_nopr_sha384",
2997 .fips_allowed = 1,
2998 .test = alg_test_null,
2999 }, {
3000 .alg = "drbg_nopr_sha512",
3001 .fips_allowed = 1,
3002 .test = alg_test_null,
3003 }, {
3004 .alg = "drbg_pr_ctr_aes128",
3005 .test = alg_test_drbg,
3006 .fips_allowed = 1,
3007 .suite = {
3008 .drbg = {
3009 .vecs = drbg_pr_ctr_aes128_tv_template,
3010 .count = ARRAY_SIZE(drbg_pr_ctr_aes128_tv_template)
3011 }
3012 }
3013 }, {
3014 /* covered by drbg_pr_ctr_aes128 test */
3015 .alg = "drbg_pr_ctr_aes192",
3016 .fips_allowed = 1,
3017 .test = alg_test_null,
3018 }, {
3019 .alg = "drbg_pr_ctr_aes256",
3020 .fips_allowed = 1,
3021 .test = alg_test_null,
3022 }, {
3023 .alg = "drbg_pr_hmac_sha1",
3024 .fips_allowed = 1,
3025 .test = alg_test_null,
3026 }, {
3027 .alg = "drbg_pr_hmac_sha256",
3028 .test = alg_test_drbg,
3029 .fips_allowed = 1,
3030 .suite = {
3031 .drbg = {
3032 .vecs = drbg_pr_hmac_sha256_tv_template,
3033 .count = ARRAY_SIZE(drbg_pr_hmac_sha256_tv_template)
3034 }
3035 }
3036 }, {
3037 /* covered by drbg_pr_hmac_sha256 test */
3038 .alg = "drbg_pr_hmac_sha384",
3039 .fips_allowed = 1,
3040 .test = alg_test_null,
3041 }, {
3042 .alg = "drbg_pr_hmac_sha512",
3043 .test = alg_test_null,
3044 .fips_allowed = 1,
3045 }, {
3046 .alg = "drbg_pr_sha1",
3047 .fips_allowed = 1,
3048 .test = alg_test_null,
3049 }, {
3050 .alg = "drbg_pr_sha256",
3051 .test = alg_test_drbg,
3052 .fips_allowed = 1,
3053 .suite = {
3054 .drbg = {
3055 .vecs = drbg_pr_sha256_tv_template,
3056 .count = ARRAY_SIZE(drbg_pr_sha256_tv_template)
3057 }
3058 }
3059 }, {
3060 /* covered by drbg_pr_sha256 test */
3061 .alg = "drbg_pr_sha384",
3062 .fips_allowed = 1,
3063 .test = alg_test_null,
3064 }, {
3065 .alg = "drbg_pr_sha512",
3066 .fips_allowed = 1,
3067 .test = alg_test_null,
3068 }, {
3069 .alg = "ecb(aes)",
3070 .test = alg_test_skcipher,
3071 .fips_allowed = 1,
3072 .suite = {
3073 .cipher = {
3074 .enc = {
3075 .vecs = aes_enc_tv_template,
3076 .count = AES_ENC_TEST_VECTORS
3077 },
3078 .dec = {
3079 .vecs = aes_dec_tv_template,
3080 .count = AES_DEC_TEST_VECTORS
3081 }
3082 }
3083 }
3084 }, {
3085 .alg = "ecb(anubis)",
3086 .test = alg_test_skcipher,
3087 .suite = {
3088 .cipher = {
3089 .enc = {
3090 .vecs = anubis_enc_tv_template,
3091 .count = ANUBIS_ENC_TEST_VECTORS
3092 },
3093 .dec = {
3094 .vecs = anubis_dec_tv_template,
3095 .count = ANUBIS_DEC_TEST_VECTORS
3096 }
3097 }
3098 }
3099 }, {
3100 .alg = "ecb(arc4)",
3101 .test = alg_test_skcipher,
3102 .suite = {
3103 .cipher = {
3104 .enc = {
3105 .vecs = arc4_enc_tv_template,
3106 .count = ARC4_ENC_TEST_VECTORS
3107 },
3108 .dec = {
3109 .vecs = arc4_dec_tv_template,
3110 .count = ARC4_DEC_TEST_VECTORS
3111 }
3112 }
3113 }
3114 }, {
3115 .alg = "ecb(blowfish)",
3116 .test = alg_test_skcipher,
3117 .suite = {
3118 .cipher = {
3119 .enc = {
3120 .vecs = bf_enc_tv_template,
3121 .count = BF_ENC_TEST_VECTORS
3122 },
3123 .dec = {
3124 .vecs = bf_dec_tv_template,
3125 .count = BF_DEC_TEST_VECTORS
3126 }
3127 }
3128 }
3129 }, {
3130 .alg = "ecb(camellia)",
3131 .test = alg_test_skcipher,
3132 .suite = {
3133 .cipher = {
3134 .enc = {
3135 .vecs = camellia_enc_tv_template,
3136 .count = CAMELLIA_ENC_TEST_VECTORS
3137 },
3138 .dec = {
3139 .vecs = camellia_dec_tv_template,
3140 .count = CAMELLIA_DEC_TEST_VECTORS
3141 }
3142 }
3143 }
3144 }, {
3145 .alg = "ecb(cast5)",
3146 .test = alg_test_skcipher,
3147 .suite = {
3148 .cipher = {
3149 .enc = {
3150 .vecs = cast5_enc_tv_template,
3151 .count = CAST5_ENC_TEST_VECTORS
3152 },
3153 .dec = {
3154 .vecs = cast5_dec_tv_template,
3155 .count = CAST5_DEC_TEST_VECTORS
3156 }
3157 }
3158 }
3159 }, {
3160 .alg = "ecb(cast6)",
3161 .test = alg_test_skcipher,
3162 .suite = {
3163 .cipher = {
3164 .enc = {
3165 .vecs = cast6_enc_tv_template,
3166 .count = CAST6_ENC_TEST_VECTORS
3167 },
3168 .dec = {
3169 .vecs = cast6_dec_tv_template,
3170 .count = CAST6_DEC_TEST_VECTORS
3171 }
3172 }
3173 }
3174 }, {
3175 .alg = "ecb(cipher_null)",
3176 .test = alg_test_null,
3177 }, {
3178 .alg = "ecb(des)",
3179 .test = alg_test_skcipher,
3180 .suite = {
3181 .cipher = {
3182 .enc = {
3183 .vecs = des_enc_tv_template,
3184 .count = DES_ENC_TEST_VECTORS
3185 },
3186 .dec = {
3187 .vecs = des_dec_tv_template,
3188 .count = DES_DEC_TEST_VECTORS
3189 }
3190 }
3191 }
3192 }, {
3193 .alg = "ecb(des3_ede)",
3194 .test = alg_test_skcipher,
3195 .fips_allowed = 1,
3196 .suite = {
3197 .cipher = {
3198 .enc = {
3199 .vecs = des3_ede_enc_tv_template,
3200 .count = DES3_EDE_ENC_TEST_VECTORS
3201 },
3202 .dec = {
3203 .vecs = des3_ede_dec_tv_template,
3204 .count = DES3_EDE_DEC_TEST_VECTORS
3205 }
3206 }
3207 }
3208 }, {
3209 .alg = "ecb(fcrypt)",
3210 .test = alg_test_skcipher,
3211 .suite = {
3212 .cipher = {
3213 .enc = {
3214 .vecs = fcrypt_pcbc_enc_tv_template,
3215 .count = 1
3216 },
3217 .dec = {
3218 .vecs = fcrypt_pcbc_dec_tv_template,
3219 .count = 1
3220 }
3221 }
3222 }
3223 }, {
3224 .alg = "ecb(khazad)",
3225 .test = alg_test_skcipher,
3226 .suite = {
3227 .cipher = {
3228 .enc = {
3229 .vecs = khazad_enc_tv_template,
3230 .count = KHAZAD_ENC_TEST_VECTORS
3231 },
3232 .dec = {
3233 .vecs = khazad_dec_tv_template,
3234 .count = KHAZAD_DEC_TEST_VECTORS
3235 }
3236 }
3237 }
3238 }, {
3239 .alg = "ecb(seed)",
3240 .test = alg_test_skcipher,
3241 .suite = {
3242 .cipher = {
3243 .enc = {
3244 .vecs = seed_enc_tv_template,
3245 .count = SEED_ENC_TEST_VECTORS
3246 },
3247 .dec = {
3248 .vecs = seed_dec_tv_template,
3249 .count = SEED_DEC_TEST_VECTORS
3250 }
3251 }
3252 }
3253 }, {
3254 .alg = "ecb(serpent)",
3255 .test = alg_test_skcipher,
3256 .suite = {
3257 .cipher = {
3258 .enc = {
3259 .vecs = serpent_enc_tv_template,
3260 .count = SERPENT_ENC_TEST_VECTORS
3261 },
3262 .dec = {
3263 .vecs = serpent_dec_tv_template,
3264 .count = SERPENT_DEC_TEST_VECTORS
3265 }
3266 }
3267 }
3268 }, {
3269 .alg = "ecb(tea)",
3270 .test = alg_test_skcipher,
3271 .suite = {
3272 .cipher = {
3273 .enc = {
3274 .vecs = tea_enc_tv_template,
3275 .count = TEA_ENC_TEST_VECTORS
3276 },
3277 .dec = {
3278 .vecs = tea_dec_tv_template,
3279 .count = TEA_DEC_TEST_VECTORS
3280 }
3281 }
3282 }
3283 }, {
3284 .alg = "ecb(tnepres)",
3285 .test = alg_test_skcipher,
3286 .suite = {
3287 .cipher = {
3288 .enc = {
3289 .vecs = tnepres_enc_tv_template,
3290 .count = TNEPRES_ENC_TEST_VECTORS
3291 },
3292 .dec = {
3293 .vecs = tnepres_dec_tv_template,
3294 .count = TNEPRES_DEC_TEST_VECTORS
3295 }
3296 }
3297 }
3298 }, {
3299 .alg = "ecb(twofish)",
3300 .test = alg_test_skcipher,
3301 .suite = {
3302 .cipher = {
3303 .enc = {
3304 .vecs = tf_enc_tv_template,
3305 .count = TF_ENC_TEST_VECTORS
3306 },
3307 .dec = {
3308 .vecs = tf_dec_tv_template,
3309 .count = TF_DEC_TEST_VECTORS
3310 }
3311 }
3312 }
3313 }, {
3314 .alg = "ecb(xeta)",
3315 .test = alg_test_skcipher,
3316 .suite = {
3317 .cipher = {
3318 .enc = {
3319 .vecs = xeta_enc_tv_template,
3320 .count = XETA_ENC_TEST_VECTORS
3321 },
3322 .dec = {
3323 .vecs = xeta_dec_tv_template,
3324 .count = XETA_DEC_TEST_VECTORS
3325 }
3326 }
3327 }
3328 }, {
3329 .alg = "ecb(xtea)",
3330 .test = alg_test_skcipher,
3331 .suite = {
3332 .cipher = {
3333 .enc = {
3334 .vecs = xtea_enc_tv_template,
3335 .count = XTEA_ENC_TEST_VECTORS
3336 },
3337 .dec = {
3338 .vecs = xtea_dec_tv_template,
3339 .count = XTEA_DEC_TEST_VECTORS
3340 }
3341 }
3342 }
3343 }, {
3344 .alg = "ecdh",
3345 .test = alg_test_kpp,
3346 .fips_allowed = 1,
3347 .suite = {
3348 .kpp = {
3349 .vecs = ecdh_tv_template,
3350 .count = ECDH_TEST_VECTORS
3351 }
3352 }
3353 }, {
3354 .alg = "gcm(aes)",
3355 .test = alg_test_aead,
3356 .fips_allowed = 1,
3357 .suite = {
3358 .aead = {
3359 .enc = {
3360 .vecs = aes_gcm_enc_tv_template,
3361 .count = AES_GCM_ENC_TEST_VECTORS
3362 },
3363 .dec = {
3364 .vecs = aes_gcm_dec_tv_template,
3365 .count = AES_GCM_DEC_TEST_VECTORS
3366 }
3367 }
3368 }
3369 }, {
3370 .alg = "ghash",
3371 .test = alg_test_hash,
3372 .fips_allowed = 1,
3373 .suite = {
3374 .hash = {
3375 .vecs = ghash_tv_template,
3376 .count = GHASH_TEST_VECTORS
3377 }
3378 }
3379 }, {
3380 .alg = "hmac(crc32)",
3381 .test = alg_test_hash,
3382 .suite = {
3383 .hash = {
3384 .vecs = bfin_crc_tv_template,
3385 .count = BFIN_CRC_TEST_VECTORS
3386 }
3387 }
3388 }, {
3389 .alg = "hmac(md5)",
3390 .test = alg_test_hash,
3391 .suite = {
3392 .hash = {
3393 .vecs = hmac_md5_tv_template,
3394 .count = HMAC_MD5_TEST_VECTORS
3395 }
3396 }
3397 }, {
3398 .alg = "hmac(rmd128)",
3399 .test = alg_test_hash,
3400 .suite = {
3401 .hash = {
3402 .vecs = hmac_rmd128_tv_template,
3403 .count = HMAC_RMD128_TEST_VECTORS
3404 }
3405 }
3406 }, {
3407 .alg = "hmac(rmd160)",
3408 .test = alg_test_hash,
3409 .suite = {
3410 .hash = {
3411 .vecs = hmac_rmd160_tv_template,
3412 .count = HMAC_RMD160_TEST_VECTORS
3413 }
3414 }
3415 }, {
3416 .alg = "hmac(sha1)",
3417 .test = alg_test_hash,
3418 .fips_allowed = 1,
3419 .suite = {
3420 .hash = {
3421 .vecs = hmac_sha1_tv_template,
3422 .count = HMAC_SHA1_TEST_VECTORS
3423 }
3424 }
3425 }, {
3426 .alg = "hmac(sha224)",
3427 .test = alg_test_hash,
3428 .fips_allowed = 1,
3429 .suite = {
3430 .hash = {
3431 .vecs = hmac_sha224_tv_template,
3432 .count = HMAC_SHA224_TEST_VECTORS
3433 }
3434 }
3435 }, {
3436 .alg = "hmac(sha256)",
3437 .test = alg_test_hash,
3438 .fips_allowed = 1,
3439 .suite = {
3440 .hash = {
3441 .vecs = hmac_sha256_tv_template,
3442 .count = HMAC_SHA256_TEST_VECTORS
3443 }
3444 }
3445 }, {
3446 .alg = "hmac(sha3-224)",
3447 .test = alg_test_hash,
3448 .fips_allowed = 1,
3449 .suite = {
3450 .hash = {
3451 .vecs = hmac_sha3_224_tv_template,
3452 .count = HMAC_SHA3_224_TEST_VECTORS
3453 }
3454 }
3455 }, {
3456 .alg = "hmac(sha3-256)",
3457 .test = alg_test_hash,
3458 .fips_allowed = 1,
3459 .suite = {
3460 .hash = {
3461 .vecs = hmac_sha3_256_tv_template,
3462 .count = HMAC_SHA3_256_TEST_VECTORS
3463 }
3464 }
3465 }, {
3466 .alg = "hmac(sha3-384)",
3467 .test = alg_test_hash,
3468 .fips_allowed = 1,
3469 .suite = {
3470 .hash = {
3471 .vecs = hmac_sha3_384_tv_template,
3472 .count = HMAC_SHA3_384_TEST_VECTORS
3473 }
3474 }
3475 }, {
3476 .alg = "hmac(sha3-512)",
3477 .test = alg_test_hash,
3478 .fips_allowed = 1,
3479 .suite = {
3480 .hash = {
3481 .vecs = hmac_sha3_512_tv_template,
3482 .count = HMAC_SHA3_512_TEST_VECTORS
3483 }
3484 }
3485 }, {
3486 .alg = "hmac(sha384)",
3487 .test = alg_test_hash,
3488 .fips_allowed = 1,
3489 .suite = {
3490 .hash = {
3491 .vecs = hmac_sha384_tv_template,
3492 .count = HMAC_SHA384_TEST_VECTORS
3493 }
3494 }
3495 }, {
3496 .alg = "hmac(sha512)",
3497 .test = alg_test_hash,
3498 .fips_allowed = 1,
3499 .suite = {
3500 .hash = {
3501 .vecs = hmac_sha512_tv_template,
3502 .count = HMAC_SHA512_TEST_VECTORS
3503 }
3504 }
3505 }, {
3506 .alg = "jitterentropy_rng",
3507 .fips_allowed = 1,
3508 .test = alg_test_null,
3509 }, {
3510 .alg = "kw(aes)",
3511 .test = alg_test_skcipher,
3512 .fips_allowed = 1,
3513 .suite = {
3514 .cipher = {
3515 .enc = {
3516 .vecs = aes_kw_enc_tv_template,
3517 .count = ARRAY_SIZE(aes_kw_enc_tv_template)
3518 },
3519 .dec = {
3520 .vecs = aes_kw_dec_tv_template,
3521 .count = ARRAY_SIZE(aes_kw_dec_tv_template)
3522 }
3523 }
3524 }
3525 }, {
3526 .alg = "lrw(aes)",
3527 .test = alg_test_skcipher,
3528 .suite = {
3529 .cipher = {
3530 .enc = {
3531 .vecs = aes_lrw_enc_tv_template,
3532 .count = AES_LRW_ENC_TEST_VECTORS
3533 },
3534 .dec = {
3535 .vecs = aes_lrw_dec_tv_template,
3536 .count = AES_LRW_DEC_TEST_VECTORS
3537 }
3538 }
3539 }
3540 }, {
3541 .alg = "lrw(camellia)",
3542 .test = alg_test_skcipher,
3543 .suite = {
3544 .cipher = {
3545 .enc = {
3546 .vecs = camellia_lrw_enc_tv_template,
3547 .count = CAMELLIA_LRW_ENC_TEST_VECTORS
3548 },
3549 .dec = {
3550 .vecs = camellia_lrw_dec_tv_template,
3551 .count = CAMELLIA_LRW_DEC_TEST_VECTORS
3552 }
3553 }
3554 }
3555 }, {
3556 .alg = "lrw(cast6)",
3557 .test = alg_test_skcipher,
3558 .suite = {
3559 .cipher = {
3560 .enc = {
3561 .vecs = cast6_lrw_enc_tv_template,
3562 .count = CAST6_LRW_ENC_TEST_VECTORS
3563 },
3564 .dec = {
3565 .vecs = cast6_lrw_dec_tv_template,
3566 .count = CAST6_LRW_DEC_TEST_VECTORS
3567 }
3568 }
3569 }
3570 }, {
3571 .alg = "lrw(serpent)",
3572 .test = alg_test_skcipher,
3573 .suite = {
3574 .cipher = {
3575 .enc = {
3576 .vecs = serpent_lrw_enc_tv_template,
3577 .count = SERPENT_LRW_ENC_TEST_VECTORS
3578 },
3579 .dec = {
3580 .vecs = serpent_lrw_dec_tv_template,
3581 .count = SERPENT_LRW_DEC_TEST_VECTORS
3582 }
3583 }
3584 }
3585 }, {
3586 .alg = "lrw(twofish)",
3587 .test = alg_test_skcipher,
3588 .suite = {
3589 .cipher = {
3590 .enc = {
3591 .vecs = tf_lrw_enc_tv_template,
3592 .count = TF_LRW_ENC_TEST_VECTORS
3593 },
3594 .dec = {
3595 .vecs = tf_lrw_dec_tv_template,
3596 .count = TF_LRW_DEC_TEST_VECTORS
3597 }
3598 }
3599 }
3600 }, {
3601 .alg = "lz4",
3602 .test = alg_test_comp,
3603 .fips_allowed = 1,
3604 .suite = {
3605 .comp = {
3606 .comp = {
3607 .vecs = lz4_comp_tv_template,
3608 .count = LZ4_COMP_TEST_VECTORS
3609 },
3610 .decomp = {
3611 .vecs = lz4_decomp_tv_template,
3612 .count = LZ4_DECOMP_TEST_VECTORS
3613 }
3614 }
3615 }
3616 }, {
3617 .alg = "lz4hc",
3618 .test = alg_test_comp,
3619 .fips_allowed = 1,
3620 .suite = {
3621 .comp = {
3622 .comp = {
3623 .vecs = lz4hc_comp_tv_template,
3624 .count = LZ4HC_COMP_TEST_VECTORS
3625 },
3626 .decomp = {
3627 .vecs = lz4hc_decomp_tv_template,
3628 .count = LZ4HC_DECOMP_TEST_VECTORS
3629 }
3630 }
3631 }
3632 }, {
3633 .alg = "lzo",
3634 .test = alg_test_comp,
3635 .fips_allowed = 1,
3636 .suite = {
3637 .comp = {
3638 .comp = {
3639 .vecs = lzo_comp_tv_template,
3640 .count = LZO_COMP_TEST_VECTORS
3641 },
3642 .decomp = {
3643 .vecs = lzo_decomp_tv_template,
3644 .count = LZO_DECOMP_TEST_VECTORS
3645 }
3646 }
3647 }
3648 }, {
3649 .alg = "md4",
3650 .test = alg_test_hash,
3651 .suite = {
3652 .hash = {
3653 .vecs = md4_tv_template,
3654 .count = MD4_TEST_VECTORS
3655 }
3656 }
3657 }, {
3658 .alg = "md5",
3659 .test = alg_test_hash,
3660 .suite = {
3661 .hash = {
3662 .vecs = md5_tv_template,
3663 .count = MD5_TEST_VECTORS
3664 }
3665 }
3666 }, {
3667 .alg = "michael_mic",
3668 .test = alg_test_hash,
3669 .suite = {
3670 .hash = {
3671 .vecs = michael_mic_tv_template,
3672 .count = MICHAEL_MIC_TEST_VECTORS
3673 }
3674 }
3675 }, {
3676 .alg = "ofb(aes)",
3677 .test = alg_test_skcipher,
3678 .fips_allowed = 1,
3679 .suite = {
3680 .cipher = {
3681 .enc = {
3682 .vecs = aes_ofb_enc_tv_template,
3683 .count = AES_OFB_ENC_TEST_VECTORS
3684 },
3685 .dec = {
3686 .vecs = aes_ofb_dec_tv_template,
3687 .count = AES_OFB_DEC_TEST_VECTORS
3688 }
3689 }
3690 }
3691 }, {
3692 .alg = "pcbc(fcrypt)",
3693 .test = alg_test_skcipher,
3694 .suite = {
3695 .cipher = {
3696 .enc = {
3697 .vecs = fcrypt_pcbc_enc_tv_template,
3698 .count = FCRYPT_ENC_TEST_VECTORS
3699 },
3700 .dec = {
3701 .vecs = fcrypt_pcbc_dec_tv_template,
3702 .count = FCRYPT_DEC_TEST_VECTORS
3703 }
3704 }
3705 }
3706 }, {
3707 .alg = "poly1305",
3708 .test = alg_test_hash,
3709 .suite = {
3710 .hash = {
3711 .vecs = poly1305_tv_template,
3712 .count = POLY1305_TEST_VECTORS
3713 }
3714 }
3715 }, {
3716 .alg = "rfc3686(ctr(aes))",
3717 .test = alg_test_skcipher,
3718 .fips_allowed = 1,
3719 .suite = {
3720 .cipher = {
3721 .enc = {
3722 .vecs = aes_ctr_rfc3686_enc_tv_template,
3723 .count = AES_CTR_3686_ENC_TEST_VECTORS
3724 },
3725 .dec = {
3726 .vecs = aes_ctr_rfc3686_dec_tv_template,
3727 .count = AES_CTR_3686_DEC_TEST_VECTORS
3728 }
3729 }
3730 }
3731 }, {
3732 .alg = "rfc4106(gcm(aes))",
3733 .test = alg_test_aead,
3734 .fips_allowed = 1,
3735 .suite = {
3736 .aead = {
3737 .enc = {
3738 .vecs = aes_gcm_rfc4106_enc_tv_template,
3739 .count = AES_GCM_4106_ENC_TEST_VECTORS
3740 },
3741 .dec = {
3742 .vecs = aes_gcm_rfc4106_dec_tv_template,
3743 .count = AES_GCM_4106_DEC_TEST_VECTORS
3744 }
3745 }
3746 }
3747 }, {
3748 .alg = "rfc4309(ccm(aes))",
3749 .test = alg_test_aead,
3750 .fips_allowed = 1,
3751 .suite = {
3752 .aead = {
3753 .enc = {
3754 .vecs = aes_ccm_rfc4309_enc_tv_template,
3755 .count = AES_CCM_4309_ENC_TEST_VECTORS
3756 },
3757 .dec = {
3758 .vecs = aes_ccm_rfc4309_dec_tv_template,
3759 .count = AES_CCM_4309_DEC_TEST_VECTORS
3760 }
3761 }
3762 }
3763 }, {
3764 .alg = "rfc4543(gcm(aes))",
3765 .test = alg_test_aead,
3766 .suite = {
3767 .aead = {
3768 .enc = {
3769 .vecs = aes_gcm_rfc4543_enc_tv_template,
3770 .count = AES_GCM_4543_ENC_TEST_VECTORS
3771 },
3772 .dec = {
3773 .vecs = aes_gcm_rfc4543_dec_tv_template,
3774 .count = AES_GCM_4543_DEC_TEST_VECTORS
3775 },
3776 }
3777 }
3778 }, {
3779 .alg = "rfc7539(chacha20,poly1305)",
3780 .test = alg_test_aead,
3781 .suite = {
3782 .aead = {
3783 .enc = {
3784 .vecs = rfc7539_enc_tv_template,
3785 .count = RFC7539_ENC_TEST_VECTORS
3786 },
3787 .dec = {
3788 .vecs = rfc7539_dec_tv_template,
3789 .count = RFC7539_DEC_TEST_VECTORS
3790 },
3791 }
3792 }
3793 }, {
3794 .alg = "rfc7539esp(chacha20,poly1305)",
3795 .test = alg_test_aead,
3796 .suite = {
3797 .aead = {
3798 .enc = {
3799 .vecs = rfc7539esp_enc_tv_template,
3800 .count = RFC7539ESP_ENC_TEST_VECTORS
3801 },
3802 .dec = {
3803 .vecs = rfc7539esp_dec_tv_template,
3804 .count = RFC7539ESP_DEC_TEST_VECTORS
3805 },
3806 }
3807 }
3808 }, {
3809 .alg = "rmd128",
3810 .test = alg_test_hash,
3811 .suite = {
3812 .hash = {
3813 .vecs = rmd128_tv_template,
3814 .count = RMD128_TEST_VECTORS
3815 }
3816 }
3817 }, {
3818 .alg = "rmd160",
3819 .test = alg_test_hash,
3820 .suite = {
3821 .hash = {
3822 .vecs = rmd160_tv_template,
3823 .count = RMD160_TEST_VECTORS
3824 }
3825 }
3826 }, {
3827 .alg = "rmd256",
3828 .test = alg_test_hash,
3829 .suite = {
3830 .hash = {
3831 .vecs = rmd256_tv_template,
3832 .count = RMD256_TEST_VECTORS
3833 }
3834 }
3835 }, {
3836 .alg = "rmd320",
3837 .test = alg_test_hash,
3838 .suite = {
3839 .hash = {
3840 .vecs = rmd320_tv_template,
3841 .count = RMD320_TEST_VECTORS
3842 }
3843 }
3844 }, {
3845 .alg = "rsa",
3846 .test = alg_test_akcipher,
3847 .fips_allowed = 1,
3848 .suite = {
3849 .akcipher = {
3850 .vecs = rsa_tv_template,
3851 .count = RSA_TEST_VECTORS
3852 }
3853 }
3854 }, {
3855 .alg = "salsa20",
3856 .test = alg_test_skcipher,
3857 .suite = {
3858 .cipher = {
3859 .enc = {
3860 .vecs = salsa20_stream_enc_tv_template,
3861 .count = SALSA20_STREAM_ENC_TEST_VECTORS
3862 }
3863 }
3864 }
3865 }, {
3866 .alg = "sha1",
3867 .test = alg_test_hash,
3868 .fips_allowed = 1,
3869 .suite = {
3870 .hash = {
3871 .vecs = sha1_tv_template,
3872 .count = SHA1_TEST_VECTORS
3873 }
3874 }
3875 }, {
3876 .alg = "sha224",
3877 .test = alg_test_hash,
3878 .fips_allowed = 1,
3879 .suite = {
3880 .hash = {
3881 .vecs = sha224_tv_template,
3882 .count = SHA224_TEST_VECTORS
3883 }
3884 }
3885 }, {
3886 .alg = "sha256",
3887 .test = alg_test_hash,
3888 .fips_allowed = 1,
3889 .suite = {
3890 .hash = {
3891 .vecs = sha256_tv_template,
3892 .count = SHA256_TEST_VECTORS
3893 }
3894 }
3895 }, {
3896 .alg = "sha3-224",
3897 .test = alg_test_hash,
3898 .fips_allowed = 1,
3899 .suite = {
3900 .hash = {
3901 .vecs = sha3_224_tv_template,
3902 .count = SHA3_224_TEST_VECTORS
3903 }
3904 }
3905 }, {
3906 .alg = "sha3-256",
3907 .test = alg_test_hash,
3908 .fips_allowed = 1,
3909 .suite = {
3910 .hash = {
3911 .vecs = sha3_256_tv_template,
3912 .count = SHA3_256_TEST_VECTORS
3913 }
3914 }
3915 }, {
3916 .alg = "sha3-384",
3917 .test = alg_test_hash,
3918 .fips_allowed = 1,
3919 .suite = {
3920 .hash = {
3921 .vecs = sha3_384_tv_template,
3922 .count = SHA3_384_TEST_VECTORS
3923 }
3924 }
3925 }, {
3926 .alg = "sha3-512",
3927 .test = alg_test_hash,
3928 .fips_allowed = 1,
3929 .suite = {
3930 .hash = {
3931 .vecs = sha3_512_tv_template,
3932 .count = SHA3_512_TEST_VECTORS
3933 }
3934 }
3935 }, {
3936 .alg = "sha384",
3937 .test = alg_test_hash,
3938 .fips_allowed = 1,
3939 .suite = {
3940 .hash = {
3941 .vecs = sha384_tv_template,
3942 .count = SHA384_TEST_VECTORS
3943 }
3944 }
3945 }, {
3946 .alg = "sha512",
3947 .test = alg_test_hash,
3948 .fips_allowed = 1,
3949 .suite = {
3950 .hash = {
3951 .vecs = sha512_tv_template,
3952 .count = SHA512_TEST_VECTORS
3953 }
3954 }
3955 }, {
3956 .alg = "tgr128",
3957 .test = alg_test_hash,
3958 .suite = {
3959 .hash = {
3960 .vecs = tgr128_tv_template,
3961 .count = TGR128_TEST_VECTORS
3962 }
3963 }
3964 }, {
3965 .alg = "tgr160",
3966 .test = alg_test_hash,
3967 .suite = {
3968 .hash = {
3969 .vecs = tgr160_tv_template,
3970 .count = TGR160_TEST_VECTORS
3971 }
3972 }
3973 }, {
3974 .alg = "tgr192",
3975 .test = alg_test_hash,
3976 .suite = {
3977 .hash = {
3978 .vecs = tgr192_tv_template,
3979 .count = TGR192_TEST_VECTORS
3980 }
3981 }
3982 }, {
3983 .alg = "vmac(aes)",
3984 .test = alg_test_hash,
3985 .suite = {
3986 .hash = {
3987 .vecs = aes_vmac128_tv_template,
3988 .count = VMAC_AES_TEST_VECTORS
3989 }
3990 }
3991 }, {
3992 .alg = "wp256",
3993 .test = alg_test_hash,
3994 .suite = {
3995 .hash = {
3996 .vecs = wp256_tv_template,
3997 .count = WP256_TEST_VECTORS
3998 }
3999 }
4000 }, {
4001 .alg = "wp384",
4002 .test = alg_test_hash,
4003 .suite = {
4004 .hash = {
4005 .vecs = wp384_tv_template,
4006 .count = WP384_TEST_VECTORS
4007 }
4008 }
4009 }, {
4010 .alg = "wp512",
4011 .test = alg_test_hash,
4012 .suite = {
4013 .hash = {
4014 .vecs = wp512_tv_template,
4015 .count = WP512_TEST_VECTORS
4016 }
4017 }
4018 }, {
4019 .alg = "xcbc(aes)",
4020 .test = alg_test_hash,
4021 .suite = {
4022 .hash = {
4023 .vecs = aes_xcbc128_tv_template,
4024 .count = XCBC_AES_TEST_VECTORS
4025 }
4026 }
4027 }, {
4028 .alg = "xts(aes)",
4029 .test = alg_test_skcipher,
4030 .fips_allowed = 1,
4031 .suite = {
4032 .cipher = {
4033 .enc = {
4034 .vecs = aes_xts_enc_tv_template,
4035 .count = AES_XTS_ENC_TEST_VECTORS
4036 },
4037 .dec = {
4038 .vecs = aes_xts_dec_tv_template,
4039 .count = AES_XTS_DEC_TEST_VECTORS
4040 }
4041 }
4042 }
4043 }, {
4044 .alg = "xts(camellia)",
4045 .test = alg_test_skcipher,
4046 .suite = {
4047 .cipher = {
4048 .enc = {
4049 .vecs = camellia_xts_enc_tv_template,
4050 .count = CAMELLIA_XTS_ENC_TEST_VECTORS
4051 },
4052 .dec = {
4053 .vecs = camellia_xts_dec_tv_template,
4054 .count = CAMELLIA_XTS_DEC_TEST_VECTORS
4055 }
4056 }
4057 }
4058 }, {
4059 .alg = "xts(cast6)",
4060 .test = alg_test_skcipher,
4061 .suite = {
4062 .cipher = {
4063 .enc = {
4064 .vecs = cast6_xts_enc_tv_template,
4065 .count = CAST6_XTS_ENC_TEST_VECTORS
4066 },
4067 .dec = {
4068 .vecs = cast6_xts_dec_tv_template,
4069 .count = CAST6_XTS_DEC_TEST_VECTORS
4070 }
4071 }
4072 }
4073 }, {
4074 .alg = "xts(serpent)",
4075 .test = alg_test_skcipher,
4076 .suite = {
4077 .cipher = {
4078 .enc = {
4079 .vecs = serpent_xts_enc_tv_template,
4080 .count = SERPENT_XTS_ENC_TEST_VECTORS
4081 },
4082 .dec = {
4083 .vecs = serpent_xts_dec_tv_template,
4084 .count = SERPENT_XTS_DEC_TEST_VECTORS
4085 }
4086 }
4087 }
4088 }, {
4089 .alg = "xts(twofish)",
4090 .test = alg_test_skcipher,
4091 .suite = {
4092 .cipher = {
4093 .enc = {
4094 .vecs = tf_xts_enc_tv_template,
4095 .count = TF_XTS_ENC_TEST_VECTORS
4096 },
4097 .dec = {
4098 .vecs = tf_xts_dec_tv_template,
4099 .count = TF_XTS_DEC_TEST_VECTORS
4100 }
4101 }
4102 }
4103 }
4104};
4105
4106static bool alg_test_descs_checked;
4107
4108static void alg_test_descs_check_order(void)
4109{
4110 int i;
4111
4112 /* only check once */
4113 if (alg_test_descs_checked)
4114 return;
4115
4116 alg_test_descs_checked = true;
4117
4118 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
4119 int diff = strcmp(alg_test_descs[i - 1].alg,
4120 alg_test_descs[i].alg);
4121
4122 if (WARN_ON(diff > 0)) {
4123 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
4124 alg_test_descs[i - 1].alg,
4125 alg_test_descs[i].alg);
4126 }
4127
4128 if (WARN_ON(diff == 0)) {
4129 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
4130 alg_test_descs[i].alg);
4131 }
4132 }
4133}
4134
4135static int alg_find_test(const char *alg)
4136{
4137 int start = 0;
4138 int end = ARRAY_SIZE(alg_test_descs);
4139
4140 while (start < end) {
4141 int i = (start + end) / 2;
4142 int diff = strcmp(alg_test_descs[i].alg, alg);
4143
4144 if (diff > 0) {
4145 end = i;
4146 continue;
4147 }
4148
4149 if (diff < 0) {
4150 start = i + 1;
4151 continue;
4152 }
4153
4154 return i;
4155 }
4156
4157 return -1;
4158}
4159
4160int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
4161{
4162 int i;
4163 int j;
4164 int rc;
4165
4166 if (!fips_enabled && notests) {
4167 printk_once(KERN_INFO "alg: self-tests disabled\n");
4168 return 0;
4169 }
4170
4171 alg_test_descs_check_order();
4172
4173 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
4174 char nalg[CRYPTO_MAX_ALG_NAME];
4175
4176 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
4177 sizeof(nalg))
4178 return -ENAMETOOLONG;
4179
4180 i = alg_find_test(nalg);
4181 if (i < 0)
4182 goto notest;
4183
4184 if (fips_enabled && !alg_test_descs[i].fips_allowed)
4185 goto non_fips_alg;
4186
4187 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
4188 goto test_done;
4189 }
4190
4191 i = alg_find_test(alg);
4192 j = alg_find_test(driver);
4193 if (i < 0 && j < 0)
4194 goto notest;
4195
4196 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
4197 (j >= 0 && !alg_test_descs[j].fips_allowed)))
4198 goto non_fips_alg;
4199
4200 rc = 0;
4201 if (i >= 0)
4202 rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
4203 type, mask);
4204 if (j >= 0 && j != i)
4205 rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
4206 type, mask);
4207
4208test_done:
4209 if (fips_enabled && rc)
4210 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
4211
4212 if (fips_enabled && !rc)
4213 pr_info("alg: self-tests for %s (%s) passed\n", driver, alg);
4214
4215 return rc;
4216
4217notest:
4218 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
4219 return 0;
4220non_fips_alg:
4221 return -EINVAL;
4222}
4223
4224#endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
4225
4226EXPORT_SYMBOL_GPL(alg_test);