Lynx framebuffers multidomain implementation.
[linux/elbrus.git] / crypto / testmgr.c
blob81519ca584735d06beda1b180668bade79f25f40
1 /*
2 * Algorithm testing framework and tests.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
9 * Updated RFC4106 AES-GCM testing.
10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
16 * This program is free software; you can redistribute it and/or modify it
17 * under the terms of the GNU General Public License as published by the Free
18 * Software Foundation; either version 2 of the License, or (at your option)
19 * any later version.
23 #include <crypto/hash.h>
24 #include <linux/err.h>
25 #include <linux/module.h>
26 #include <linux/scatterlist.h>
27 #include <linux/slab.h>
28 #include <linux/string.h>
29 #include <crypto/rng.h>
31 #include "internal.h"
33 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
35 /* a perfect nop */
36 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
38 return 0;
41 #else
43 #include "testmgr.h"
46 * Need slab memory for testing (size in number of pages).
48 #define XBUFSIZE 8
51 * Indexes into the xbuf to simulate cross-page access.
53 #define IDX1 32
54 #define IDX2 32400
55 #define IDX3 1
56 #define IDX4 8193
57 #define IDX5 22222
58 #define IDX6 17101
59 #define IDX7 27333
60 #define IDX8 3000
63 * Used by test_cipher()
65 #define ENCRYPT 1
66 #define DECRYPT 0
68 struct tcrypt_result {
69 struct completion completion;
70 int err;
73 struct aead_test_suite {
74 struct {
75 struct aead_testvec *vecs;
76 unsigned int count;
77 } enc, dec;
80 struct cipher_test_suite {
81 struct {
82 struct cipher_testvec *vecs;
83 unsigned int count;
84 } enc, dec;
87 struct comp_test_suite {
88 struct {
89 struct comp_testvec *vecs;
90 unsigned int count;
91 } comp, decomp;
94 struct pcomp_test_suite {
95 struct {
96 struct pcomp_testvec *vecs;
97 unsigned int count;
98 } comp, decomp;
101 struct hash_test_suite {
102 struct hash_testvec *vecs;
103 unsigned int count;
106 struct cprng_test_suite {
107 struct cprng_testvec *vecs;
108 unsigned int count;
111 struct alg_test_desc {
112 const char *alg;
113 int (*test)(const struct alg_test_desc *desc, const char *driver,
114 u32 type, u32 mask);
115 int fips_allowed; /* set if alg is allowed in fips mode */
117 union {
118 struct aead_test_suite aead;
119 struct cipher_test_suite cipher;
120 struct comp_test_suite comp;
121 struct pcomp_test_suite pcomp;
122 struct hash_test_suite hash;
123 struct cprng_test_suite cprng;
124 } suite;
127 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
129 static void hexdump(unsigned char *buf, unsigned int len)
131 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
132 16, 1,
133 buf, len, false);
136 static void tcrypt_complete(struct crypto_async_request *req, int err)
138 struct tcrypt_result *res = req->data;
140 if (err == -EINPROGRESS)
141 return;
143 res->err = err;
144 complete(&res->completion);
147 static int testmgr_alloc_buf(char *buf[XBUFSIZE])
149 int i;
151 for (i = 0; i < XBUFSIZE; i++) {
152 buf[i] = (void *)__get_free_page(GFP_KERNEL);
153 if (!buf[i])
154 goto err_free_buf;
157 return 0;
159 err_free_buf:
160 while (i-- > 0)
161 free_page((unsigned long)buf[i]);
163 return -ENOMEM;
166 static void testmgr_free_buf(char *buf[XBUFSIZE])
168 int i;
170 for (i = 0; i < XBUFSIZE; i++)
171 free_page((unsigned long)buf[i]);
174 static int do_one_async_hash_op(struct ahash_request *req,
175 struct tcrypt_result *tr,
176 int ret)
178 if (ret == -EINPROGRESS || ret == -EBUSY) {
179 ret = wait_for_completion_interruptible(&tr->completion);
180 if (!ret)
181 ret = tr->err;
182 reinit_completion(&tr->completion);
184 return ret;
187 static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
188 unsigned int tcount, bool use_digest,
189 const int align_offset)
191 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
192 unsigned int i, j, k, temp;
193 struct scatterlist sg[8];
194 char result[64];
195 struct ahash_request *req;
196 struct tcrypt_result tresult;
197 void *hash_buff;
198 char *xbuf[XBUFSIZE];
199 int ret = -ENOMEM;
201 if (testmgr_alloc_buf(xbuf))
202 goto out_nobuf;
204 init_completion(&tresult.completion);
206 req = ahash_request_alloc(tfm, GFP_KERNEL);
207 if (!req) {
208 printk(KERN_ERR "alg: hash: Failed to allocate request for "
209 "%s\n", algo);
210 goto out_noreq;
212 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
213 tcrypt_complete, &tresult);
215 j = 0;
216 for (i = 0; i < tcount; i++) {
217 if (template[i].np)
218 continue;
220 ret = -EINVAL;
221 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE))
222 goto out;
224 j++;
225 memset(result, 0, 64);
227 hash_buff = xbuf[0];
228 hash_buff += align_offset;
230 memcpy(hash_buff, template[i].plaintext, template[i].psize);
231 sg_init_one(&sg[0], hash_buff, template[i].psize);
233 if (template[i].ksize) {
234 crypto_ahash_clear_flags(tfm, ~0);
235 ret = crypto_ahash_setkey(tfm, template[i].key,
236 template[i].ksize);
237 if (ret) {
238 printk(KERN_ERR "alg: hash: setkey failed on "
239 "test %d for %s: ret=%d\n", j, algo,
240 -ret);
241 goto out;
245 ahash_request_set_crypt(req, sg, result, template[i].psize);
246 if (use_digest) {
247 ret = do_one_async_hash_op(req, &tresult,
248 crypto_ahash_digest(req));
249 if (ret) {
250 pr_err("alg: hash: digest failed on test %d "
251 "for %s: ret=%d\n", j, algo, -ret);
252 goto out;
254 } else {
255 ret = do_one_async_hash_op(req, &tresult,
256 crypto_ahash_init(req));
257 if (ret) {
258 pr_err("alt: hash: init failed on test %d "
259 "for %s: ret=%d\n", j, algo, -ret);
260 goto out;
262 ret = do_one_async_hash_op(req, &tresult,
263 crypto_ahash_update(req));
264 if (ret) {
265 pr_err("alt: hash: update failed on test %d "
266 "for %s: ret=%d\n", j, algo, -ret);
267 goto out;
269 ret = do_one_async_hash_op(req, &tresult,
270 crypto_ahash_final(req));
271 if (ret) {
272 pr_err("alt: hash: final failed on test %d "
273 "for %s: ret=%d\n", j, algo, -ret);
274 goto out;
278 if (memcmp(result, template[i].digest,
279 crypto_ahash_digestsize(tfm))) {
280 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
281 j, algo);
282 hexdump(result, crypto_ahash_digestsize(tfm));
283 ret = -EINVAL;
284 goto out;
288 j = 0;
289 for (i = 0; i < tcount; i++) {
290 /* alignment tests are only done with continuous buffers */
291 if (align_offset != 0)
292 break;
294 if (template[i].np) {
295 j++;
296 memset(result, 0, 64);
298 temp = 0;
299 sg_init_table(sg, template[i].np);
300 ret = -EINVAL;
301 for (k = 0; k < template[i].np; k++) {
302 if (WARN_ON(offset_in_page(IDX[k]) +
303 template[i].tap[k] > PAGE_SIZE))
304 goto out;
305 sg_set_buf(&sg[k],
306 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
307 offset_in_page(IDX[k]),
308 template[i].plaintext + temp,
309 template[i].tap[k]),
310 template[i].tap[k]);
311 temp += template[i].tap[k];
314 if (template[i].ksize) {
315 crypto_ahash_clear_flags(tfm, ~0);
316 ret = crypto_ahash_setkey(tfm, template[i].key,
317 template[i].ksize);
319 if (ret) {
320 printk(KERN_ERR "alg: hash: setkey "
321 "failed on chunking test %d "
322 "for %s: ret=%d\n", j, algo,
323 -ret);
324 goto out;
328 ahash_request_set_crypt(req, sg, result,
329 template[i].psize);
330 ret = crypto_ahash_digest(req);
331 switch (ret) {
332 case 0:
333 break;
334 case -EINPROGRESS:
335 case -EBUSY:
336 ret = wait_for_completion_interruptible(
337 &tresult.completion);
338 if (!ret && !(ret = tresult.err)) {
339 reinit_completion(&tresult.completion);
340 break;
342 /* fall through */
343 default:
344 printk(KERN_ERR "alg: hash: digest failed "
345 "on chunking test %d for %s: "
346 "ret=%d\n", j, algo, -ret);
347 goto out;
350 if (memcmp(result, template[i].digest,
351 crypto_ahash_digestsize(tfm))) {
352 printk(KERN_ERR "alg: hash: Chunking test %d "
353 "failed for %s\n", j, algo);
354 hexdump(result, crypto_ahash_digestsize(tfm));
355 ret = -EINVAL;
356 goto out;
361 ret = 0;
363 out:
364 ahash_request_free(req);
365 out_noreq:
366 testmgr_free_buf(xbuf);
367 out_nobuf:
368 return ret;
371 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
372 unsigned int tcount, bool use_digest)
374 unsigned int alignmask;
375 int ret;
377 ret = __test_hash(tfm, template, tcount, use_digest, 0);
378 if (ret)
379 return ret;
381 /* test unaligned buffers, check with one byte offset */
382 ret = __test_hash(tfm, template, tcount, use_digest, 1);
383 if (ret)
384 return ret;
386 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
387 if (alignmask) {
388 /* Check if alignment mask for tfm is correctly set. */
389 ret = __test_hash(tfm, template, tcount, use_digest,
390 alignmask + 1);
391 if (ret)
392 return ret;
395 return 0;
398 static int __test_aead(struct crypto_aead *tfm, int enc,
399 struct aead_testvec *template, unsigned int tcount,
400 const bool diff_dst, const int align_offset)
402 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
403 unsigned int i, j, k, n, temp;
404 int ret = -ENOMEM;
405 char *q;
406 char *key;
407 struct aead_request *req;
408 struct scatterlist *sg;
409 struct scatterlist *asg;
410 struct scatterlist *sgout;
411 const char *e, *d;
412 struct tcrypt_result result;
413 unsigned int authsize;
414 void *input;
415 void *output;
416 void *assoc;
417 char iv[MAX_IVLEN];
418 char *xbuf[XBUFSIZE];
419 char *xoutbuf[XBUFSIZE];
420 char *axbuf[XBUFSIZE];
422 if (testmgr_alloc_buf(xbuf))
423 goto out_noxbuf;
424 if (testmgr_alloc_buf(axbuf))
425 goto out_noaxbuf;
427 if (diff_dst && testmgr_alloc_buf(xoutbuf))
428 goto out_nooutbuf;
430 /* avoid "the frame size is larger than 1024 bytes" compiler warning */
431 sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 3 : 2), GFP_KERNEL);
432 if (!sg)
433 goto out_nosg;
434 asg = &sg[8];
435 sgout = &asg[8];
437 if (diff_dst)
438 d = "-ddst";
439 else
440 d = "";
442 if (enc == ENCRYPT)
443 e = "encryption";
444 else
445 e = "decryption";
447 init_completion(&result.completion);
449 req = aead_request_alloc(tfm, GFP_KERNEL);
450 if (!req) {
451 pr_err("alg: aead%s: Failed to allocate request for %s\n",
452 d, algo);
453 goto out;
456 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
457 tcrypt_complete, &result);
459 for (i = 0, j = 0; i < tcount; i++) {
460 if (!template[i].np) {
461 j++;
463 /* some templates have no input data but they will
464 * touch input
466 input = xbuf[0];
467 input += align_offset;
468 assoc = axbuf[0];
470 ret = -EINVAL;
471 if (WARN_ON(align_offset + template[i].ilen >
472 PAGE_SIZE || template[i].alen > PAGE_SIZE))
473 goto out;
475 memcpy(input, template[i].input, template[i].ilen);
476 memcpy(assoc, template[i].assoc, template[i].alen);
477 if (template[i].iv)
478 memcpy(iv, template[i].iv, MAX_IVLEN);
479 else
480 memset(iv, 0, MAX_IVLEN);
482 crypto_aead_clear_flags(tfm, ~0);
483 if (template[i].wk)
484 crypto_aead_set_flags(
485 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
487 key = template[i].key;
489 ret = crypto_aead_setkey(tfm, key,
490 template[i].klen);
491 if (!ret == template[i].fail) {
492 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
493 d, j, algo, crypto_aead_get_flags(tfm));
494 goto out;
495 } else if (ret)
496 continue;
498 authsize = abs(template[i].rlen - template[i].ilen);
499 ret = crypto_aead_setauthsize(tfm, authsize);
500 if (ret) {
501 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
502 d, authsize, j, algo);
503 goto out;
506 if (diff_dst) {
507 output = xoutbuf[0];
508 output += align_offset;
509 sg_init_one(&sg[0], input, template[i].ilen);
510 sg_init_one(&sgout[0], output,
511 template[i].rlen);
512 } else {
513 sg_init_one(&sg[0], input,
514 template[i].ilen +
515 (enc ? authsize : 0));
516 output = input;
519 sg_init_one(&asg[0], assoc, template[i].alen);
521 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
522 template[i].ilen, iv);
524 aead_request_set_assoc(req, asg, template[i].alen);
526 ret = enc ?
527 crypto_aead_encrypt(req) :
528 crypto_aead_decrypt(req);
530 switch (ret) {
531 case 0:
532 if (template[i].novrfy) {
533 /* verification was supposed to fail */
534 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
535 d, e, j, algo);
536 /* so really, we got a bad message */
537 ret = -EBADMSG;
538 goto out;
540 break;
541 case -EINPROGRESS:
542 case -EBUSY:
543 ret = wait_for_completion_interruptible(
544 &result.completion);
545 if (!ret && !(ret = result.err)) {
546 reinit_completion(&result.completion);
547 break;
549 case -EBADMSG:
550 if (template[i].novrfy)
551 /* verification failure was expected */
552 continue;
553 /* fall through */
554 default:
555 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
556 d, e, j, algo, -ret);
557 goto out;
560 q = output;
561 if (memcmp(q, template[i].result, template[i].rlen)) {
562 pr_err("alg: aead%s: Test %d failed on %s for %s\n",
563 d, j, e, algo);
564 hexdump(q, template[i].rlen);
565 ret = -EINVAL;
566 goto out;
571 for (i = 0, j = 0; i < tcount; i++) {
572 /* alignment tests are only done with continuous buffers */
573 if (align_offset != 0)
574 break;
576 if (template[i].np) {
577 j++;
579 if (template[i].iv)
580 memcpy(iv, template[i].iv, MAX_IVLEN);
581 else
582 memset(iv, 0, MAX_IVLEN);
584 crypto_aead_clear_flags(tfm, ~0);
585 if (template[i].wk)
586 crypto_aead_set_flags(
587 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
588 key = template[i].key;
590 ret = crypto_aead_setkey(tfm, key, template[i].klen);
591 if (!ret == template[i].fail) {
592 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
593 d, j, algo, crypto_aead_get_flags(tfm));
594 goto out;
595 } else if (ret)
596 continue;
598 authsize = abs(template[i].rlen - template[i].ilen);
600 ret = -EINVAL;
601 sg_init_table(sg, template[i].np);
602 if (diff_dst)
603 sg_init_table(sgout, template[i].np);
604 for (k = 0, temp = 0; k < template[i].np; k++) {
605 if (WARN_ON(offset_in_page(IDX[k]) +
606 template[i].tap[k] > PAGE_SIZE))
607 goto out;
609 q = xbuf[IDX[k] >> PAGE_SHIFT] +
610 offset_in_page(IDX[k]);
612 memcpy(q, template[i].input + temp,
613 template[i].tap[k]);
615 sg_set_buf(&sg[k], q, template[i].tap[k]);
617 if (diff_dst) {
618 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
619 offset_in_page(IDX[k]);
621 memset(q, 0, template[i].tap[k]);
623 sg_set_buf(&sgout[k], q,
624 template[i].tap[k]);
627 n = template[i].tap[k];
628 if (k == template[i].np - 1 && enc)
629 n += authsize;
630 if (offset_in_page(q) + n < PAGE_SIZE)
631 q[n] = 0;
633 temp += template[i].tap[k];
636 ret = crypto_aead_setauthsize(tfm, authsize);
637 if (ret) {
638 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
639 d, authsize, j, algo);
640 goto out;
643 if (enc) {
644 if (WARN_ON(sg[k - 1].offset +
645 sg[k - 1].length + authsize >
646 PAGE_SIZE)) {
647 ret = -EINVAL;
648 goto out;
651 if (diff_dst)
652 sgout[k - 1].length += authsize;
653 else
654 sg[k - 1].length += authsize;
657 sg_init_table(asg, template[i].anp);
658 ret = -EINVAL;
659 for (k = 0, temp = 0; k < template[i].anp; k++) {
660 if (WARN_ON(offset_in_page(IDX[k]) +
661 template[i].atap[k] > PAGE_SIZE))
662 goto out;
663 sg_set_buf(&asg[k],
664 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
665 offset_in_page(IDX[k]),
666 template[i].assoc + temp,
667 template[i].atap[k]),
668 template[i].atap[k]);
669 temp += template[i].atap[k];
672 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
673 template[i].ilen,
674 iv);
676 aead_request_set_assoc(req, asg, template[i].alen);
678 ret = enc ?
679 crypto_aead_encrypt(req) :
680 crypto_aead_decrypt(req);
682 switch (ret) {
683 case 0:
684 if (template[i].novrfy) {
685 /* verification was supposed to fail */
686 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
687 d, e, j, algo);
688 /* so really, we got a bad message */
689 ret = -EBADMSG;
690 goto out;
692 break;
693 case -EINPROGRESS:
694 case -EBUSY:
695 ret = wait_for_completion_interruptible(
696 &result.completion);
697 if (!ret && !(ret = result.err)) {
698 reinit_completion(&result.completion);
699 break;
701 case -EBADMSG:
702 if (template[i].novrfy)
703 /* verification failure was expected */
704 continue;
705 /* fall through */
706 default:
707 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
708 d, e, j, algo, -ret);
709 goto out;
712 ret = -EINVAL;
713 for (k = 0, temp = 0; k < template[i].np; k++) {
714 if (diff_dst)
715 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
716 offset_in_page(IDX[k]);
717 else
718 q = xbuf[IDX[k] >> PAGE_SHIFT] +
719 offset_in_page(IDX[k]);
721 n = template[i].tap[k];
722 if (k == template[i].np - 1)
723 n += enc ? authsize : -authsize;
725 if (memcmp(q, template[i].result + temp, n)) {
726 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
727 d, j, e, k, algo);
728 hexdump(q, n);
729 goto out;
732 q += n;
733 if (k == template[i].np - 1 && !enc) {
734 if (!diff_dst &&
735 memcmp(q, template[i].input +
736 temp + n, authsize))
737 n = authsize;
738 else
739 n = 0;
740 } else {
741 for (n = 0; offset_in_page(q + n) &&
742 q[n]; n++)
745 if (n) {
746 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
747 d, j, e, k, algo, n);
748 hexdump(q, n);
749 goto out;
752 temp += template[i].tap[k];
757 ret = 0;
759 out:
760 aead_request_free(req);
761 kfree(sg);
762 out_nosg:
763 if (diff_dst)
764 testmgr_free_buf(xoutbuf);
765 out_nooutbuf:
766 testmgr_free_buf(axbuf);
767 out_noaxbuf:
768 testmgr_free_buf(xbuf);
769 out_noxbuf:
770 return ret;
773 static int test_aead(struct crypto_aead *tfm, int enc,
774 struct aead_testvec *template, unsigned int tcount)
776 unsigned int alignmask;
777 int ret;
779 /* test 'dst == src' case */
780 ret = __test_aead(tfm, enc, template, tcount, false, 0);
781 if (ret)
782 return ret;
784 /* test 'dst != src' case */
785 ret = __test_aead(tfm, enc, template, tcount, true, 0);
786 if (ret)
787 return ret;
789 /* test unaligned buffers, check with one byte offset */
790 ret = __test_aead(tfm, enc, template, tcount, true, 1);
791 if (ret)
792 return ret;
794 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
795 if (alignmask) {
796 /* Check if alignment mask for tfm is correctly set. */
797 ret = __test_aead(tfm, enc, template, tcount, true,
798 alignmask + 1);
799 if (ret)
800 return ret;
803 return 0;
806 static int test_cipher(struct crypto_cipher *tfm, int enc,
807 struct cipher_testvec *template, unsigned int tcount)
809 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
810 unsigned int i, j, k;
811 char *q;
812 const char *e;
813 void *data;
814 char *xbuf[XBUFSIZE];
815 int ret = -ENOMEM;
817 if (testmgr_alloc_buf(xbuf))
818 goto out_nobuf;
820 if (enc == ENCRYPT)
821 e = "encryption";
822 else
823 e = "decryption";
825 j = 0;
826 for (i = 0; i < tcount; i++) {
827 if (template[i].np)
828 continue;
830 j++;
832 ret = -EINVAL;
833 if (WARN_ON(template[i].ilen > PAGE_SIZE))
834 goto out;
836 data = xbuf[0];
837 memcpy(data, template[i].input, template[i].ilen);
839 crypto_cipher_clear_flags(tfm, ~0);
840 if (template[i].wk)
841 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
843 ret = crypto_cipher_setkey(tfm, template[i].key,
844 template[i].klen);
845 if (!ret == template[i].fail) {
846 printk(KERN_ERR "alg: cipher: setkey failed "
847 "on test %d for %s: flags=%x\n", j,
848 algo, crypto_cipher_get_flags(tfm));
849 goto out;
850 } else if (ret)
851 continue;
853 for (k = 0; k < template[i].ilen;
854 k += crypto_cipher_blocksize(tfm)) {
855 if (enc)
856 crypto_cipher_encrypt_one(tfm, data + k,
857 data + k);
858 else
859 crypto_cipher_decrypt_one(tfm, data + k,
860 data + k);
863 q = data;
864 if (memcmp(q, template[i].result, template[i].rlen)) {
865 printk(KERN_ERR "alg: cipher: Test %d failed "
866 "on %s for %s\n", j, e, algo);
867 hexdump(q, template[i].rlen);
868 ret = -EINVAL;
869 goto out;
873 ret = 0;
875 out:
876 testmgr_free_buf(xbuf);
877 out_nobuf:
878 return ret;
881 static int __test_skcipher(struct crypto_ablkcipher *tfm, int enc,
882 struct cipher_testvec *template, unsigned int tcount,
883 const bool diff_dst, const int align_offset)
885 const char *algo =
886 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm));
887 unsigned int i, j, k, n, temp;
888 char *q;
889 struct ablkcipher_request *req;
890 struct scatterlist sg[8];
891 struct scatterlist sgout[8];
892 const char *e, *d;
893 struct tcrypt_result result;
894 void *data;
895 char iv[MAX_IVLEN];
896 char *xbuf[XBUFSIZE];
897 char *xoutbuf[XBUFSIZE];
898 int ret = -ENOMEM;
900 if (testmgr_alloc_buf(xbuf))
901 goto out_nobuf;
903 if (diff_dst && testmgr_alloc_buf(xoutbuf))
904 goto out_nooutbuf;
906 if (diff_dst)
907 d = "-ddst";
908 else
909 d = "";
911 if (enc == ENCRYPT)
912 e = "encryption";
913 else
914 e = "decryption";
916 init_completion(&result.completion);
918 req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
919 if (!req) {
920 pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
921 d, algo);
922 goto out;
925 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
926 tcrypt_complete, &result);
928 j = 0;
929 for (i = 0; i < tcount; i++) {
930 if (template[i].iv)
931 memcpy(iv, template[i].iv, MAX_IVLEN);
932 else
933 memset(iv, 0, MAX_IVLEN);
935 if (!(template[i].np) || (template[i].also_non_np)) {
936 j++;
938 ret = -EINVAL;
939 if (WARN_ON(align_offset + template[i].ilen >
940 PAGE_SIZE))
941 goto out;
943 data = xbuf[0];
944 data += align_offset;
945 memcpy(data, template[i].input, template[i].ilen);
947 crypto_ablkcipher_clear_flags(tfm, ~0);
948 if (template[i].wk)
949 crypto_ablkcipher_set_flags(
950 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
952 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
953 template[i].klen);
954 if (!ret == template[i].fail) {
955 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
956 d, j, algo,
957 crypto_ablkcipher_get_flags(tfm));
958 goto out;
959 } else if (ret)
960 continue;
962 sg_init_one(&sg[0], data, template[i].ilen);
963 if (diff_dst) {
964 data = xoutbuf[0];
965 data += align_offset;
966 sg_init_one(&sgout[0], data, template[i].ilen);
969 ablkcipher_request_set_crypt(req, sg,
970 (diff_dst) ? sgout : sg,
971 template[i].ilen, iv);
972 ret = enc ?
973 crypto_ablkcipher_encrypt(req) :
974 crypto_ablkcipher_decrypt(req);
976 switch (ret) {
977 case 0:
978 break;
979 case -EINPROGRESS:
980 case -EBUSY:
981 ret = wait_for_completion_interruptible(
982 &result.completion);
983 if (!ret && !((ret = result.err))) {
984 reinit_completion(&result.completion);
985 break;
987 /* fall through */
988 default:
989 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
990 d, e, j, algo, -ret);
991 goto out;
994 q = data;
995 if (memcmp(q, template[i].result, template[i].rlen)) {
996 pr_err("alg: skcipher%s: Test %d failed on %s for %s\n",
997 d, j, e, algo);
998 hexdump(q, template[i].rlen);
999 ret = -EINVAL;
1000 goto out;
1005 j = 0;
1006 for (i = 0; i < tcount; i++) {
1007 /* alignment tests are only done with continuous buffers */
1008 if (align_offset != 0)
1009 break;
1011 if (template[i].iv)
1012 memcpy(iv, template[i].iv, MAX_IVLEN);
1013 else
1014 memset(iv, 0, MAX_IVLEN);
1016 if (template[i].np) {
1017 j++;
1019 crypto_ablkcipher_clear_flags(tfm, ~0);
1020 if (template[i].wk)
1021 crypto_ablkcipher_set_flags(
1022 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
1024 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
1025 template[i].klen);
1026 if (!ret == template[i].fail) {
1027 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
1028 d, j, algo,
1029 crypto_ablkcipher_get_flags(tfm));
1030 goto out;
1031 } else if (ret)
1032 continue;
1034 temp = 0;
1035 ret = -EINVAL;
1036 sg_init_table(sg, template[i].np);
1037 if (diff_dst)
1038 sg_init_table(sgout, template[i].np);
1039 for (k = 0; k < template[i].np; k++) {
1040 if (WARN_ON(offset_in_page(IDX[k]) +
1041 template[i].tap[k] > PAGE_SIZE))
1042 goto out;
1044 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1045 offset_in_page(IDX[k]);
1047 memcpy(q, template[i].input + temp,
1048 template[i].tap[k]);
1050 if (offset_in_page(q) + template[i].tap[k] <
1051 PAGE_SIZE)
1052 q[template[i].tap[k]] = 0;
1054 sg_set_buf(&sg[k], q, template[i].tap[k]);
1055 if (diff_dst) {
1056 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1057 offset_in_page(IDX[k]);
1059 sg_set_buf(&sgout[k], q,
1060 template[i].tap[k]);
1062 memset(q, 0, template[i].tap[k]);
1063 if (offset_in_page(q) +
1064 template[i].tap[k] < PAGE_SIZE)
1065 q[template[i].tap[k]] = 0;
1068 temp += template[i].tap[k];
1071 ablkcipher_request_set_crypt(req, sg,
1072 (diff_dst) ? sgout : sg,
1073 template[i].ilen, iv);
1075 ret = enc ?
1076 crypto_ablkcipher_encrypt(req) :
1077 crypto_ablkcipher_decrypt(req);
1079 switch (ret) {
1080 case 0:
1081 break;
1082 case -EINPROGRESS:
1083 case -EBUSY:
1084 ret = wait_for_completion_interruptible(
1085 &result.completion);
1086 if (!ret && !((ret = result.err))) {
1087 reinit_completion(&result.completion);
1088 break;
1090 /* fall through */
1091 default:
1092 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1093 d, e, j, algo, -ret);
1094 goto out;
1097 temp = 0;
1098 ret = -EINVAL;
1099 for (k = 0; k < template[i].np; k++) {
1100 if (diff_dst)
1101 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1102 offset_in_page(IDX[k]);
1103 else
1104 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1105 offset_in_page(IDX[k]);
1107 if (memcmp(q, template[i].result + temp,
1108 template[i].tap[k])) {
1109 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1110 d, j, e, k, algo);
1111 hexdump(q, template[i].tap[k]);
1112 goto out;
1115 q += template[i].tap[k];
1116 for (n = 0; offset_in_page(q + n) && q[n]; n++)
1118 if (n) {
1119 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1120 d, j, e, k, algo, n);
1121 hexdump(q, n);
1122 goto out;
1124 temp += template[i].tap[k];
1129 ret = 0;
1131 out:
1132 ablkcipher_request_free(req);
1133 if (diff_dst)
1134 testmgr_free_buf(xoutbuf);
1135 out_nooutbuf:
1136 testmgr_free_buf(xbuf);
1137 out_nobuf:
1138 return ret;
1141 static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
1142 struct cipher_testvec *template, unsigned int tcount)
1144 unsigned int alignmask;
1145 int ret;
1147 /* test 'dst == src' case */
1148 ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
1149 if (ret)
1150 return ret;
1152 /* test 'dst != src' case */
1153 ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
1154 if (ret)
1155 return ret;
1157 /* test unaligned buffers, check with one byte offset */
1158 ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
1159 if (ret)
1160 return ret;
1162 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1163 if (alignmask) {
1164 /* Check if alignment mask for tfm is correctly set. */
1165 ret = __test_skcipher(tfm, enc, template, tcount, true,
1166 alignmask + 1);
1167 if (ret)
1168 return ret;
1171 return 0;
1174 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
1175 struct comp_testvec *dtemplate, int ctcount, int dtcount)
1177 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1178 unsigned int i;
1179 char result[COMP_BUF_SIZE];
1180 int ret;
1182 for (i = 0; i < ctcount; i++) {
1183 int ilen;
1184 unsigned int dlen = COMP_BUF_SIZE;
1186 memset(result, 0, sizeof (result));
1188 ilen = ctemplate[i].inlen;
1189 ret = crypto_comp_compress(tfm, ctemplate[i].input,
1190 ilen, result, &dlen);
1191 if (ret) {
1192 printk(KERN_ERR "alg: comp: compression failed "
1193 "on test %d for %s: ret=%d\n", i + 1, algo,
1194 -ret);
1195 goto out;
1198 if (dlen != ctemplate[i].outlen) {
1199 printk(KERN_ERR "alg: comp: Compression test %d "
1200 "failed for %s: output len = %d\n", i + 1, algo,
1201 dlen);
1202 ret = -EINVAL;
1203 goto out;
1206 if (memcmp(result, ctemplate[i].output, dlen)) {
1207 printk(KERN_ERR "alg: comp: Compression test %d "
1208 "failed for %s\n", i + 1, algo);
1209 hexdump(result, dlen);
1210 ret = -EINVAL;
1211 goto out;
1215 for (i = 0; i < dtcount; i++) {
1216 int ilen;
1217 unsigned int dlen = COMP_BUF_SIZE;
1219 memset(result, 0, sizeof (result));
1221 ilen = dtemplate[i].inlen;
1222 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1223 ilen, result, &dlen);
1224 if (ret) {
1225 printk(KERN_ERR "alg: comp: decompression failed "
1226 "on test %d for %s: ret=%d\n", i + 1, algo,
1227 -ret);
1228 goto out;
1231 if (dlen != dtemplate[i].outlen) {
1232 printk(KERN_ERR "alg: comp: Decompression test %d "
1233 "failed for %s: output len = %d\n", i + 1, algo,
1234 dlen);
1235 ret = -EINVAL;
1236 goto out;
1239 if (memcmp(result, dtemplate[i].output, dlen)) {
1240 printk(KERN_ERR "alg: comp: Decompression test %d "
1241 "failed for %s\n", i + 1, algo);
1242 hexdump(result, dlen);
1243 ret = -EINVAL;
1244 goto out;
1248 ret = 0;
1250 out:
1251 return ret;
1254 static int test_pcomp(struct crypto_pcomp *tfm,
1255 struct pcomp_testvec *ctemplate,
1256 struct pcomp_testvec *dtemplate, int ctcount,
1257 int dtcount)
1259 const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm));
1260 unsigned int i;
1261 char result[COMP_BUF_SIZE];
1262 int res;
1264 for (i = 0; i < ctcount; i++) {
1265 struct comp_request req;
1266 unsigned int produced = 0;
1268 res = crypto_compress_setup(tfm, ctemplate[i].params,
1269 ctemplate[i].paramsize);
1270 if (res) {
1271 pr_err("alg: pcomp: compression setup failed on test "
1272 "%d for %s: error=%d\n", i + 1, algo, res);
1273 return res;
1276 res = crypto_compress_init(tfm);
1277 if (res) {
1278 pr_err("alg: pcomp: compression init failed on test "
1279 "%d for %s: error=%d\n", i + 1, algo, res);
1280 return res;
1283 memset(result, 0, sizeof(result));
1285 req.next_in = ctemplate[i].input;
1286 req.avail_in = ctemplate[i].inlen / 2;
1287 req.next_out = result;
1288 req.avail_out = ctemplate[i].outlen / 2;
1290 res = crypto_compress_update(tfm, &req);
1291 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1292 pr_err("alg: pcomp: compression update failed on test "
1293 "%d for %s: error=%d\n", i + 1, algo, res);
1294 return res;
1296 if (res > 0)
1297 produced += res;
1299 /* Add remaining input data */
1300 req.avail_in += (ctemplate[i].inlen + 1) / 2;
1302 res = crypto_compress_update(tfm, &req);
1303 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1304 pr_err("alg: pcomp: compression update failed on test "
1305 "%d for %s: error=%d\n", i + 1, algo, res);
1306 return res;
1308 if (res > 0)
1309 produced += res;
1311 /* Provide remaining output space */
1312 req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2;
1314 res = crypto_compress_final(tfm, &req);
1315 if (res < 0) {
1316 pr_err("alg: pcomp: compression final failed on test "
1317 "%d for %s: error=%d\n", i + 1, algo, res);
1318 return res;
1320 produced += res;
1322 if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) {
1323 pr_err("alg: comp: Compression test %d failed for %s: "
1324 "output len = %d (expected %d)\n", i + 1, algo,
1325 COMP_BUF_SIZE - req.avail_out,
1326 ctemplate[i].outlen);
1327 return -EINVAL;
1330 if (produced != ctemplate[i].outlen) {
1331 pr_err("alg: comp: Compression test %d failed for %s: "
1332 "returned len = %u (expected %d)\n", i + 1,
1333 algo, produced, ctemplate[i].outlen);
1334 return -EINVAL;
1337 if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) {
1338 pr_err("alg: pcomp: Compression test %d failed for "
1339 "%s\n", i + 1, algo);
1340 hexdump(result, ctemplate[i].outlen);
1341 return -EINVAL;
1345 for (i = 0; i < dtcount; i++) {
1346 struct comp_request req;
1347 unsigned int produced = 0;
1349 res = crypto_decompress_setup(tfm, dtemplate[i].params,
1350 dtemplate[i].paramsize);
1351 if (res) {
1352 pr_err("alg: pcomp: decompression setup failed on "
1353 "test %d for %s: error=%d\n", i + 1, algo, res);
1354 return res;
1357 res = crypto_decompress_init(tfm);
1358 if (res) {
1359 pr_err("alg: pcomp: decompression init failed on test "
1360 "%d for %s: error=%d\n", i + 1, algo, res);
1361 return res;
1364 memset(result, 0, sizeof(result));
1366 req.next_in = dtemplate[i].input;
1367 req.avail_in = dtemplate[i].inlen / 2;
1368 req.next_out = result;
1369 req.avail_out = dtemplate[i].outlen / 2;
1371 res = crypto_decompress_update(tfm, &req);
1372 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1373 pr_err("alg: pcomp: decompression update failed on "
1374 "test %d for %s: error=%d\n", i + 1, algo, res);
1375 return res;
1377 if (res > 0)
1378 produced += res;
1380 /* Add remaining input data */
1381 req.avail_in += (dtemplate[i].inlen + 1) / 2;
1383 res = crypto_decompress_update(tfm, &req);
1384 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1385 pr_err("alg: pcomp: decompression update failed on "
1386 "test %d for %s: error=%d\n", i + 1, algo, res);
1387 return res;
1389 if (res > 0)
1390 produced += res;
1392 /* Provide remaining output space */
1393 req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2;
1395 res = crypto_decompress_final(tfm, &req);
1396 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1397 pr_err("alg: pcomp: decompression final failed on "
1398 "test %d for %s: error=%d\n", i + 1, algo, res);
1399 return res;
1401 if (res > 0)
1402 produced += res;
1404 if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) {
1405 pr_err("alg: comp: Decompression test %d failed for "
1406 "%s: output len = %d (expected %d)\n", i + 1,
1407 algo, COMP_BUF_SIZE - req.avail_out,
1408 dtemplate[i].outlen);
1409 return -EINVAL;
1412 if (produced != dtemplate[i].outlen) {
1413 pr_err("alg: comp: Decompression test %d failed for "
1414 "%s: returned len = %u (expected %d)\n", i + 1,
1415 algo, produced, dtemplate[i].outlen);
1416 return -EINVAL;
1419 if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) {
1420 pr_err("alg: pcomp: Decompression test %d failed for "
1421 "%s\n", i + 1, algo);
1422 hexdump(result, dtemplate[i].outlen);
1423 return -EINVAL;
1427 return 0;
1431 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
1432 unsigned int tcount)
1434 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1435 int err = 0, i, j, seedsize;
1436 u8 *seed;
1437 char result[32];
1439 seedsize = crypto_rng_seedsize(tfm);
1441 seed = kmalloc(seedsize, GFP_KERNEL);
1442 if (!seed) {
1443 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1444 "for %s\n", algo);
1445 return -ENOMEM;
1448 for (i = 0; i < tcount; i++) {
1449 memset(result, 0, 32);
1451 memcpy(seed, template[i].v, template[i].vlen);
1452 memcpy(seed + template[i].vlen, template[i].key,
1453 template[i].klen);
1454 memcpy(seed + template[i].vlen + template[i].klen,
1455 template[i].dt, template[i].dtlen);
1457 err = crypto_rng_reset(tfm, seed, seedsize);
1458 if (err) {
1459 printk(KERN_ERR "alg: cprng: Failed to reset rng "
1460 "for %s\n", algo);
1461 goto out;
1464 for (j = 0; j < template[i].loops; j++) {
1465 err = crypto_rng_get_bytes(tfm, result,
1466 template[i].rlen);
1467 if (err != template[i].rlen) {
1468 printk(KERN_ERR "alg: cprng: Failed to obtain "
1469 "the correct amount of random data for "
1470 "%s (requested %d, got %d)\n", algo,
1471 template[i].rlen, err);
1472 goto out;
1476 err = memcmp(result, template[i].result,
1477 template[i].rlen);
1478 if (err) {
1479 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1480 i, algo);
1481 hexdump(result, template[i].rlen);
1482 err = -EINVAL;
1483 goto out;
1487 out:
1488 kfree(seed);
1489 return err;
1492 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1493 u32 type, u32 mask)
1495 struct crypto_aead *tfm;
1496 int err = 0;
1498 tfm = crypto_alloc_aead(driver, type, mask);
1499 if (IS_ERR(tfm)) {
1500 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1501 "%ld\n", driver, PTR_ERR(tfm));
1502 return PTR_ERR(tfm);
1505 if (desc->suite.aead.enc.vecs) {
1506 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1507 desc->suite.aead.enc.count);
1508 if (err)
1509 goto out;
1512 if (!err && desc->suite.aead.dec.vecs)
1513 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1514 desc->suite.aead.dec.count);
1516 out:
1517 crypto_free_aead(tfm);
1518 return err;
1521 static int alg_test_cipher(const struct alg_test_desc *desc,
1522 const char *driver, u32 type, u32 mask)
1524 struct crypto_cipher *tfm;
1525 int err = 0;
1527 tfm = crypto_alloc_cipher(driver, type, mask);
1528 if (IS_ERR(tfm)) {
1529 printk(KERN_ERR "alg: cipher: Failed to load transform for "
1530 "%s: %ld\n", driver, PTR_ERR(tfm));
1531 return PTR_ERR(tfm);
1534 if (desc->suite.cipher.enc.vecs) {
1535 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1536 desc->suite.cipher.enc.count);
1537 if (err)
1538 goto out;
1541 if (desc->suite.cipher.dec.vecs)
1542 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1543 desc->suite.cipher.dec.count);
1545 out:
1546 crypto_free_cipher(tfm);
1547 return err;
1550 static int alg_test_skcipher(const struct alg_test_desc *desc,
1551 const char *driver, u32 type, u32 mask)
1553 struct crypto_ablkcipher *tfm;
1554 int err = 0;
1556 tfm = crypto_alloc_ablkcipher(driver, type, mask);
1557 if (IS_ERR(tfm)) {
1558 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1559 "%s: %ld\n", driver, PTR_ERR(tfm));
1560 return PTR_ERR(tfm);
1563 if (desc->suite.cipher.enc.vecs) {
1564 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1565 desc->suite.cipher.enc.count);
1566 if (err)
1567 goto out;
1570 if (desc->suite.cipher.dec.vecs)
1571 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1572 desc->suite.cipher.dec.count);
1574 out:
1575 crypto_free_ablkcipher(tfm);
1576 return err;
1579 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1580 u32 type, u32 mask)
1582 struct crypto_comp *tfm;
1583 int err;
1585 tfm = crypto_alloc_comp(driver, type, mask);
1586 if (IS_ERR(tfm)) {
1587 printk(KERN_ERR "alg: comp: Failed to load transform for %s: "
1588 "%ld\n", driver, PTR_ERR(tfm));
1589 return PTR_ERR(tfm);
1592 err = test_comp(tfm, desc->suite.comp.comp.vecs,
1593 desc->suite.comp.decomp.vecs,
1594 desc->suite.comp.comp.count,
1595 desc->suite.comp.decomp.count);
1597 crypto_free_comp(tfm);
1598 return err;
1601 static int alg_test_pcomp(const struct alg_test_desc *desc, const char *driver,
1602 u32 type, u32 mask)
1604 struct crypto_pcomp *tfm;
1605 int err;
1607 tfm = crypto_alloc_pcomp(driver, type, mask);
1608 if (IS_ERR(tfm)) {
1609 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n",
1610 driver, PTR_ERR(tfm));
1611 return PTR_ERR(tfm);
1614 err = test_pcomp(tfm, desc->suite.pcomp.comp.vecs,
1615 desc->suite.pcomp.decomp.vecs,
1616 desc->suite.pcomp.comp.count,
1617 desc->suite.pcomp.decomp.count);
1619 crypto_free_pcomp(tfm);
1620 return err;
1623 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1624 u32 type, u32 mask)
1626 struct crypto_ahash *tfm;
1627 int err;
1629 tfm = crypto_alloc_ahash(driver, type, mask);
1630 if (IS_ERR(tfm)) {
1631 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1632 "%ld\n", driver, PTR_ERR(tfm));
1633 return PTR_ERR(tfm);
1636 err = test_hash(tfm, desc->suite.hash.vecs,
1637 desc->suite.hash.count, true);
1638 if (!err)
1639 err = test_hash(tfm, desc->suite.hash.vecs,
1640 desc->suite.hash.count, false);
1642 crypto_free_ahash(tfm);
1643 return err;
1646 static int alg_test_crc32c(const struct alg_test_desc *desc,
1647 const char *driver, u32 type, u32 mask)
1649 struct crypto_shash *tfm;
1650 u32 val;
1651 int err;
1653 err = alg_test_hash(desc, driver, type, mask);
1654 if (err)
1655 goto out;
1657 tfm = crypto_alloc_shash(driver, type, mask);
1658 if (IS_ERR(tfm)) {
1659 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1660 "%ld\n", driver, PTR_ERR(tfm));
1661 err = PTR_ERR(tfm);
1662 goto out;
1665 do {
1666 char sdesc[sizeof(struct shash_desc)
1667 + crypto_shash_descsize(tfm)
1668 + CRYPTO_MINALIGN] CRYPTO_MINALIGN_ATTR;
1669 struct shash_desc *shash = (struct shash_desc *)sdesc;
1670 u32 *ctx = (u32 *)shash_desc_ctx(shash);
1672 shash->tfm = tfm;
1673 shash->flags = 0;
1675 *ctx = le32_to_cpu(420553207);
1676 err = crypto_shash_final(shash, (u8 *)&val);
1677 if (err) {
1678 printk(KERN_ERR "alg: crc32c: Operation failed for "
1679 "%s: %d\n", driver, err);
1680 break;
1683 if (val != ~420553207) {
1684 printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1685 "%d\n", driver, val);
1686 err = -EINVAL;
1688 } while (0);
1690 crypto_free_shash(tfm);
1692 out:
1693 return err;
1696 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1697 u32 type, u32 mask)
1699 struct crypto_rng *rng;
1700 int err;
1702 rng = crypto_alloc_rng(driver, type, mask);
1703 if (IS_ERR(rng)) {
1704 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1705 "%ld\n", driver, PTR_ERR(rng));
1706 return PTR_ERR(rng);
1709 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1711 crypto_free_rng(rng);
1713 return err;
1716 static int alg_test_null(const struct alg_test_desc *desc,
1717 const char *driver, u32 type, u32 mask)
1719 return 0;
1722 /* Please keep this list sorted by algorithm name. */
1723 static const struct alg_test_desc alg_test_descs[] = {
1725 .alg = "__cbc-cast5-avx",
1726 .test = alg_test_null,
1727 }, {
1728 .alg = "__cbc-cast6-avx",
1729 .test = alg_test_null,
1730 }, {
1731 .alg = "__cbc-serpent-avx",
1732 .test = alg_test_null,
1733 }, {
1734 .alg = "__cbc-serpent-avx2",
1735 .test = alg_test_null,
1736 }, {
1737 .alg = "__cbc-serpent-sse2",
1738 .test = alg_test_null,
1739 }, {
1740 .alg = "__cbc-twofish-avx",
1741 .test = alg_test_null,
1742 }, {
1743 .alg = "__driver-cbc-aes-aesni",
1744 .test = alg_test_null,
1745 .fips_allowed = 1,
1746 }, {
1747 .alg = "__driver-cbc-camellia-aesni",
1748 .test = alg_test_null,
1749 }, {
1750 .alg = "__driver-cbc-camellia-aesni-avx2",
1751 .test = alg_test_null,
1752 }, {
1753 .alg = "__driver-cbc-cast5-avx",
1754 .test = alg_test_null,
1755 }, {
1756 .alg = "__driver-cbc-cast6-avx",
1757 .test = alg_test_null,
1758 }, {
1759 .alg = "__driver-cbc-serpent-avx",
1760 .test = alg_test_null,
1761 }, {
1762 .alg = "__driver-cbc-serpent-avx2",
1763 .test = alg_test_null,
1764 }, {
1765 .alg = "__driver-cbc-serpent-sse2",
1766 .test = alg_test_null,
1767 }, {
1768 .alg = "__driver-cbc-twofish-avx",
1769 .test = alg_test_null,
1770 }, {
1771 .alg = "__driver-ecb-aes-aesni",
1772 .test = alg_test_null,
1773 .fips_allowed = 1,
1774 }, {
1775 .alg = "__driver-ecb-camellia-aesni",
1776 .test = alg_test_null,
1777 }, {
1778 .alg = "__driver-ecb-camellia-aesni-avx2",
1779 .test = alg_test_null,
1780 }, {
1781 .alg = "__driver-ecb-cast5-avx",
1782 .test = alg_test_null,
1783 }, {
1784 .alg = "__driver-ecb-cast6-avx",
1785 .test = alg_test_null,
1786 }, {
1787 .alg = "__driver-ecb-serpent-avx",
1788 .test = alg_test_null,
1789 }, {
1790 .alg = "__driver-ecb-serpent-avx2",
1791 .test = alg_test_null,
1792 }, {
1793 .alg = "__driver-ecb-serpent-sse2",
1794 .test = alg_test_null,
1795 }, {
1796 .alg = "__driver-ecb-twofish-avx",
1797 .test = alg_test_null,
1798 }, {
1799 .alg = "__ghash-pclmulqdqni",
1800 .test = alg_test_null,
1801 .fips_allowed = 1,
1802 }, {
1803 .alg = "ansi_cprng",
1804 .test = alg_test_cprng,
1805 .fips_allowed = 1,
1806 .suite = {
1807 .cprng = {
1808 .vecs = ansi_cprng_aes_tv_template,
1809 .count = ANSI_CPRNG_AES_TEST_VECTORS
1812 }, {
1813 .alg = "authenc(hmac(sha1),cbc(aes))",
1814 .test = alg_test_aead,
1815 .fips_allowed = 1,
1816 .suite = {
1817 .aead = {
1818 .enc = {
1819 .vecs = hmac_sha1_aes_cbc_enc_tv_template,
1820 .count = HMAC_SHA1_AES_CBC_ENC_TEST_VECTORS
1824 }, {
1825 .alg = "authenc(hmac(sha256),cbc(aes))",
1826 .test = alg_test_aead,
1827 .fips_allowed = 1,
1828 .suite = {
1829 .aead = {
1830 .enc = {
1831 .vecs = hmac_sha256_aes_cbc_enc_tv_template,
1832 .count = HMAC_SHA256_AES_CBC_ENC_TEST_VECTORS
1836 }, {
1837 .alg = "authenc(hmac(sha512),cbc(aes))",
1838 .test = alg_test_aead,
1839 .fips_allowed = 1,
1840 .suite = {
1841 .aead = {
1842 .enc = {
1843 .vecs = hmac_sha512_aes_cbc_enc_tv_template,
1844 .count = HMAC_SHA512_AES_CBC_ENC_TEST_VECTORS
1848 }, {
1849 .alg = "cbc(aes)",
1850 .test = alg_test_skcipher,
1851 .fips_allowed = 1,
1852 .suite = {
1853 .cipher = {
1854 .enc = {
1855 .vecs = aes_cbc_enc_tv_template,
1856 .count = AES_CBC_ENC_TEST_VECTORS
1858 .dec = {
1859 .vecs = aes_cbc_dec_tv_template,
1860 .count = AES_CBC_DEC_TEST_VECTORS
1864 }, {
1865 .alg = "cbc(anubis)",
1866 .test = alg_test_skcipher,
1867 .suite = {
1868 .cipher = {
1869 .enc = {
1870 .vecs = anubis_cbc_enc_tv_template,
1871 .count = ANUBIS_CBC_ENC_TEST_VECTORS
1873 .dec = {
1874 .vecs = anubis_cbc_dec_tv_template,
1875 .count = ANUBIS_CBC_DEC_TEST_VECTORS
1879 }, {
1880 .alg = "cbc(blowfish)",
1881 .test = alg_test_skcipher,
1882 .suite = {
1883 .cipher = {
1884 .enc = {
1885 .vecs = bf_cbc_enc_tv_template,
1886 .count = BF_CBC_ENC_TEST_VECTORS
1888 .dec = {
1889 .vecs = bf_cbc_dec_tv_template,
1890 .count = BF_CBC_DEC_TEST_VECTORS
1894 }, {
1895 .alg = "cbc(camellia)",
1896 .test = alg_test_skcipher,
1897 .suite = {
1898 .cipher = {
1899 .enc = {
1900 .vecs = camellia_cbc_enc_tv_template,
1901 .count = CAMELLIA_CBC_ENC_TEST_VECTORS
1903 .dec = {
1904 .vecs = camellia_cbc_dec_tv_template,
1905 .count = CAMELLIA_CBC_DEC_TEST_VECTORS
1909 }, {
1910 .alg = "cbc(cast5)",
1911 .test = alg_test_skcipher,
1912 .suite = {
1913 .cipher = {
1914 .enc = {
1915 .vecs = cast5_cbc_enc_tv_template,
1916 .count = CAST5_CBC_ENC_TEST_VECTORS
1918 .dec = {
1919 .vecs = cast5_cbc_dec_tv_template,
1920 .count = CAST5_CBC_DEC_TEST_VECTORS
1924 }, {
1925 .alg = "cbc(cast6)",
1926 .test = alg_test_skcipher,
1927 .suite = {
1928 .cipher = {
1929 .enc = {
1930 .vecs = cast6_cbc_enc_tv_template,
1931 .count = CAST6_CBC_ENC_TEST_VECTORS
1933 .dec = {
1934 .vecs = cast6_cbc_dec_tv_template,
1935 .count = CAST6_CBC_DEC_TEST_VECTORS
1939 }, {
1940 .alg = "cbc(des)",
1941 .test = alg_test_skcipher,
1942 .suite = {
1943 .cipher = {
1944 .enc = {
1945 .vecs = des_cbc_enc_tv_template,
1946 .count = DES_CBC_ENC_TEST_VECTORS
1948 .dec = {
1949 .vecs = des_cbc_dec_tv_template,
1950 .count = DES_CBC_DEC_TEST_VECTORS
1954 }, {
1955 .alg = "cbc(des3_ede)",
1956 .test = alg_test_skcipher,
1957 .fips_allowed = 1,
1958 .suite = {
1959 .cipher = {
1960 .enc = {
1961 .vecs = des3_ede_cbc_enc_tv_template,
1962 .count = DES3_EDE_CBC_ENC_TEST_VECTORS
1964 .dec = {
1965 .vecs = des3_ede_cbc_dec_tv_template,
1966 .count = DES3_EDE_CBC_DEC_TEST_VECTORS
1970 }, {
1971 .alg = "cbc(serpent)",
1972 .test = alg_test_skcipher,
1973 .suite = {
1974 .cipher = {
1975 .enc = {
1976 .vecs = serpent_cbc_enc_tv_template,
1977 .count = SERPENT_CBC_ENC_TEST_VECTORS
1979 .dec = {
1980 .vecs = serpent_cbc_dec_tv_template,
1981 .count = SERPENT_CBC_DEC_TEST_VECTORS
1985 }, {
1986 .alg = "cbc(twofish)",
1987 .test = alg_test_skcipher,
1988 .suite = {
1989 .cipher = {
1990 .enc = {
1991 .vecs = tf_cbc_enc_tv_template,
1992 .count = TF_CBC_ENC_TEST_VECTORS
1994 .dec = {
1995 .vecs = tf_cbc_dec_tv_template,
1996 .count = TF_CBC_DEC_TEST_VECTORS
2000 }, {
2001 .alg = "ccm(aes)",
2002 .test = alg_test_aead,
2003 .fips_allowed = 1,
2004 .suite = {
2005 .aead = {
2006 .enc = {
2007 .vecs = aes_ccm_enc_tv_template,
2008 .count = AES_CCM_ENC_TEST_VECTORS
2010 .dec = {
2011 .vecs = aes_ccm_dec_tv_template,
2012 .count = AES_CCM_DEC_TEST_VECTORS
2016 }, {
2017 .alg = "cmac(aes)",
2018 .test = alg_test_hash,
2019 .suite = {
2020 .hash = {
2021 .vecs = aes_cmac128_tv_template,
2022 .count = CMAC_AES_TEST_VECTORS
2025 }, {
2026 .alg = "cmac(des3_ede)",
2027 .test = alg_test_hash,
2028 .suite = {
2029 .hash = {
2030 .vecs = des3_ede_cmac64_tv_template,
2031 .count = CMAC_DES3_EDE_TEST_VECTORS
2034 }, {
2035 .alg = "compress_null",
2036 .test = alg_test_null,
2037 }, {
2038 .alg = "crc32c",
2039 .test = alg_test_crc32c,
2040 .fips_allowed = 1,
2041 .suite = {
2042 .hash = {
2043 .vecs = crc32c_tv_template,
2044 .count = CRC32C_TEST_VECTORS
2047 }, {
2048 .alg = "crct10dif",
2049 .test = alg_test_hash,
2050 .fips_allowed = 1,
2051 .suite = {
2052 .hash = {
2053 .vecs = crct10dif_tv_template,
2054 .count = CRCT10DIF_TEST_VECTORS
2057 }, {
2058 .alg = "cryptd(__driver-cbc-aes-aesni)",
2059 .test = alg_test_null,
2060 .fips_allowed = 1,
2061 }, {
2062 .alg = "cryptd(__driver-cbc-camellia-aesni)",
2063 .test = alg_test_null,
2064 }, {
2065 .alg = "cryptd(__driver-cbc-camellia-aesni-avx2)",
2066 .test = alg_test_null,
2067 }, {
2068 .alg = "cryptd(__driver-cbc-serpent-avx2)",
2069 .test = alg_test_null,
2070 }, {
2071 .alg = "cryptd(__driver-ecb-aes-aesni)",
2072 .test = alg_test_null,
2073 .fips_allowed = 1,
2074 }, {
2075 .alg = "cryptd(__driver-ecb-camellia-aesni)",
2076 .test = alg_test_null,
2077 }, {
2078 .alg = "cryptd(__driver-ecb-camellia-aesni-avx2)",
2079 .test = alg_test_null,
2080 }, {
2081 .alg = "cryptd(__driver-ecb-cast5-avx)",
2082 .test = alg_test_null,
2083 }, {
2084 .alg = "cryptd(__driver-ecb-cast6-avx)",
2085 .test = alg_test_null,
2086 }, {
2087 .alg = "cryptd(__driver-ecb-serpent-avx)",
2088 .test = alg_test_null,
2089 }, {
2090 .alg = "cryptd(__driver-ecb-serpent-avx2)",
2091 .test = alg_test_null,
2092 }, {
2093 .alg = "cryptd(__driver-ecb-serpent-sse2)",
2094 .test = alg_test_null,
2095 }, {
2096 .alg = "cryptd(__driver-ecb-twofish-avx)",
2097 .test = alg_test_null,
2098 }, {
2099 .alg = "cryptd(__driver-gcm-aes-aesni)",
2100 .test = alg_test_null,
2101 .fips_allowed = 1,
2102 }, {
2103 .alg = "cryptd(__ghash-pclmulqdqni)",
2104 .test = alg_test_null,
2105 .fips_allowed = 1,
2106 }, {
2107 .alg = "ctr(aes)",
2108 .test = alg_test_skcipher,
2109 .fips_allowed = 1,
2110 .suite = {
2111 .cipher = {
2112 .enc = {
2113 .vecs = aes_ctr_enc_tv_template,
2114 .count = AES_CTR_ENC_TEST_VECTORS
2116 .dec = {
2117 .vecs = aes_ctr_dec_tv_template,
2118 .count = AES_CTR_DEC_TEST_VECTORS
2122 }, {
2123 .alg = "ctr(blowfish)",
2124 .test = alg_test_skcipher,
2125 .suite = {
2126 .cipher = {
2127 .enc = {
2128 .vecs = bf_ctr_enc_tv_template,
2129 .count = BF_CTR_ENC_TEST_VECTORS
2131 .dec = {
2132 .vecs = bf_ctr_dec_tv_template,
2133 .count = BF_CTR_DEC_TEST_VECTORS
2137 }, {
2138 .alg = "ctr(camellia)",
2139 .test = alg_test_skcipher,
2140 .suite = {
2141 .cipher = {
2142 .enc = {
2143 .vecs = camellia_ctr_enc_tv_template,
2144 .count = CAMELLIA_CTR_ENC_TEST_VECTORS
2146 .dec = {
2147 .vecs = camellia_ctr_dec_tv_template,
2148 .count = CAMELLIA_CTR_DEC_TEST_VECTORS
2152 }, {
2153 .alg = "ctr(cast5)",
2154 .test = alg_test_skcipher,
2155 .suite = {
2156 .cipher = {
2157 .enc = {
2158 .vecs = cast5_ctr_enc_tv_template,
2159 .count = CAST5_CTR_ENC_TEST_VECTORS
2161 .dec = {
2162 .vecs = cast5_ctr_dec_tv_template,
2163 .count = CAST5_CTR_DEC_TEST_VECTORS
2167 }, {
2168 .alg = "ctr(cast6)",
2169 .test = alg_test_skcipher,
2170 .suite = {
2171 .cipher = {
2172 .enc = {
2173 .vecs = cast6_ctr_enc_tv_template,
2174 .count = CAST6_CTR_ENC_TEST_VECTORS
2176 .dec = {
2177 .vecs = cast6_ctr_dec_tv_template,
2178 .count = CAST6_CTR_DEC_TEST_VECTORS
2182 }, {
2183 .alg = "ctr(des)",
2184 .test = alg_test_skcipher,
2185 .suite = {
2186 .cipher = {
2187 .enc = {
2188 .vecs = des_ctr_enc_tv_template,
2189 .count = DES_CTR_ENC_TEST_VECTORS
2191 .dec = {
2192 .vecs = des_ctr_dec_tv_template,
2193 .count = DES_CTR_DEC_TEST_VECTORS
2197 }, {
2198 .alg = "ctr(des3_ede)",
2199 .test = alg_test_skcipher,
2200 .suite = {
2201 .cipher = {
2202 .enc = {
2203 .vecs = des3_ede_ctr_enc_tv_template,
2204 .count = DES3_EDE_CTR_ENC_TEST_VECTORS
2206 .dec = {
2207 .vecs = des3_ede_ctr_dec_tv_template,
2208 .count = DES3_EDE_CTR_DEC_TEST_VECTORS
2212 }, {
2213 .alg = "ctr(serpent)",
2214 .test = alg_test_skcipher,
2215 .suite = {
2216 .cipher = {
2217 .enc = {
2218 .vecs = serpent_ctr_enc_tv_template,
2219 .count = SERPENT_CTR_ENC_TEST_VECTORS
2221 .dec = {
2222 .vecs = serpent_ctr_dec_tv_template,
2223 .count = SERPENT_CTR_DEC_TEST_VECTORS
2227 }, {
2228 .alg = "ctr(twofish)",
2229 .test = alg_test_skcipher,
2230 .suite = {
2231 .cipher = {
2232 .enc = {
2233 .vecs = tf_ctr_enc_tv_template,
2234 .count = TF_CTR_ENC_TEST_VECTORS
2236 .dec = {
2237 .vecs = tf_ctr_dec_tv_template,
2238 .count = TF_CTR_DEC_TEST_VECTORS
2242 }, {
2243 .alg = "cts(cbc(aes))",
2244 .test = alg_test_skcipher,
2245 .suite = {
2246 .cipher = {
2247 .enc = {
2248 .vecs = cts_mode_enc_tv_template,
2249 .count = CTS_MODE_ENC_TEST_VECTORS
2251 .dec = {
2252 .vecs = cts_mode_dec_tv_template,
2253 .count = CTS_MODE_DEC_TEST_VECTORS
2257 }, {
2258 .alg = "deflate",
2259 .test = alg_test_comp,
2260 .fips_allowed = 1,
2261 .suite = {
2262 .comp = {
2263 .comp = {
2264 .vecs = deflate_comp_tv_template,
2265 .count = DEFLATE_COMP_TEST_VECTORS
2267 .decomp = {
2268 .vecs = deflate_decomp_tv_template,
2269 .count = DEFLATE_DECOMP_TEST_VECTORS
2273 }, {
2274 .alg = "digest_null",
2275 .test = alg_test_null,
2276 }, {
2277 .alg = "ecb(__aes-aesni)",
2278 .test = alg_test_null,
2279 .fips_allowed = 1,
2280 }, {
2281 .alg = "ecb(aes)",
2282 .test = alg_test_skcipher,
2283 .fips_allowed = 1,
2284 .suite = {
2285 .cipher = {
2286 .enc = {
2287 .vecs = aes_enc_tv_template,
2288 .count = AES_ENC_TEST_VECTORS
2290 .dec = {
2291 .vecs = aes_dec_tv_template,
2292 .count = AES_DEC_TEST_VECTORS
2296 }, {
2297 .alg = "ecb(anubis)",
2298 .test = alg_test_skcipher,
2299 .suite = {
2300 .cipher = {
2301 .enc = {
2302 .vecs = anubis_enc_tv_template,
2303 .count = ANUBIS_ENC_TEST_VECTORS
2305 .dec = {
2306 .vecs = anubis_dec_tv_template,
2307 .count = ANUBIS_DEC_TEST_VECTORS
2311 }, {
2312 .alg = "ecb(arc4)",
2313 .test = alg_test_skcipher,
2314 .suite = {
2315 .cipher = {
2316 .enc = {
2317 .vecs = arc4_enc_tv_template,
2318 .count = ARC4_ENC_TEST_VECTORS
2320 .dec = {
2321 .vecs = arc4_dec_tv_template,
2322 .count = ARC4_DEC_TEST_VECTORS
2326 }, {
2327 .alg = "ecb(blowfish)",
2328 .test = alg_test_skcipher,
2329 .suite = {
2330 .cipher = {
2331 .enc = {
2332 .vecs = bf_enc_tv_template,
2333 .count = BF_ENC_TEST_VECTORS
2335 .dec = {
2336 .vecs = bf_dec_tv_template,
2337 .count = BF_DEC_TEST_VECTORS
2341 }, {
2342 .alg = "ecb(camellia)",
2343 .test = alg_test_skcipher,
2344 .suite = {
2345 .cipher = {
2346 .enc = {
2347 .vecs = camellia_enc_tv_template,
2348 .count = CAMELLIA_ENC_TEST_VECTORS
2350 .dec = {
2351 .vecs = camellia_dec_tv_template,
2352 .count = CAMELLIA_DEC_TEST_VECTORS
2356 }, {
2357 .alg = "ecb(cast5)",
2358 .test = alg_test_skcipher,
2359 .suite = {
2360 .cipher = {
2361 .enc = {
2362 .vecs = cast5_enc_tv_template,
2363 .count = CAST5_ENC_TEST_VECTORS
2365 .dec = {
2366 .vecs = cast5_dec_tv_template,
2367 .count = CAST5_DEC_TEST_VECTORS
2371 }, {
2372 .alg = "ecb(cast6)",
2373 .test = alg_test_skcipher,
2374 .suite = {
2375 .cipher = {
2376 .enc = {
2377 .vecs = cast6_enc_tv_template,
2378 .count = CAST6_ENC_TEST_VECTORS
2380 .dec = {
2381 .vecs = cast6_dec_tv_template,
2382 .count = CAST6_DEC_TEST_VECTORS
2386 }, {
2387 .alg = "ecb(cipher_null)",
2388 .test = alg_test_null,
2389 }, {
2390 .alg = "ecb(des)",
2391 .test = alg_test_skcipher,
2392 .fips_allowed = 1,
2393 .suite = {
2394 .cipher = {
2395 .enc = {
2396 .vecs = des_enc_tv_template,
2397 .count = DES_ENC_TEST_VECTORS
2399 .dec = {
2400 .vecs = des_dec_tv_template,
2401 .count = DES_DEC_TEST_VECTORS
2405 }, {
2406 .alg = "ecb(des3_ede)",
2407 .test = alg_test_skcipher,
2408 .fips_allowed = 1,
2409 .suite = {
2410 .cipher = {
2411 .enc = {
2412 .vecs = des3_ede_enc_tv_template,
2413 .count = DES3_EDE_ENC_TEST_VECTORS
2415 .dec = {
2416 .vecs = des3_ede_dec_tv_template,
2417 .count = DES3_EDE_DEC_TEST_VECTORS
2421 }, {
2422 .alg = "ecb(fcrypt)",
2423 .test = alg_test_skcipher,
2424 .suite = {
2425 .cipher = {
2426 .enc = {
2427 .vecs = fcrypt_pcbc_enc_tv_template,
2428 .count = 1
2430 .dec = {
2431 .vecs = fcrypt_pcbc_dec_tv_template,
2432 .count = 1
2436 }, {
2437 .alg = "ecb(khazad)",
2438 .test = alg_test_skcipher,
2439 .suite = {
2440 .cipher = {
2441 .enc = {
2442 .vecs = khazad_enc_tv_template,
2443 .count = KHAZAD_ENC_TEST_VECTORS
2445 .dec = {
2446 .vecs = khazad_dec_tv_template,
2447 .count = KHAZAD_DEC_TEST_VECTORS
2451 }, {
2452 .alg = "ecb(seed)",
2453 .test = alg_test_skcipher,
2454 .suite = {
2455 .cipher = {
2456 .enc = {
2457 .vecs = seed_enc_tv_template,
2458 .count = SEED_ENC_TEST_VECTORS
2460 .dec = {
2461 .vecs = seed_dec_tv_template,
2462 .count = SEED_DEC_TEST_VECTORS
2466 }, {
2467 .alg = "ecb(serpent)",
2468 .test = alg_test_skcipher,
2469 .suite = {
2470 .cipher = {
2471 .enc = {
2472 .vecs = serpent_enc_tv_template,
2473 .count = SERPENT_ENC_TEST_VECTORS
2475 .dec = {
2476 .vecs = serpent_dec_tv_template,
2477 .count = SERPENT_DEC_TEST_VECTORS
2481 }, {
2482 .alg = "ecb(tea)",
2483 .test = alg_test_skcipher,
2484 .suite = {
2485 .cipher = {
2486 .enc = {
2487 .vecs = tea_enc_tv_template,
2488 .count = TEA_ENC_TEST_VECTORS
2490 .dec = {
2491 .vecs = tea_dec_tv_template,
2492 .count = TEA_DEC_TEST_VECTORS
2496 }, {
2497 .alg = "ecb(tnepres)",
2498 .test = alg_test_skcipher,
2499 .suite = {
2500 .cipher = {
2501 .enc = {
2502 .vecs = tnepres_enc_tv_template,
2503 .count = TNEPRES_ENC_TEST_VECTORS
2505 .dec = {
2506 .vecs = tnepres_dec_tv_template,
2507 .count = TNEPRES_DEC_TEST_VECTORS
2511 }, {
2512 .alg = "ecb(twofish)",
2513 .test = alg_test_skcipher,
2514 .suite = {
2515 .cipher = {
2516 .enc = {
2517 .vecs = tf_enc_tv_template,
2518 .count = TF_ENC_TEST_VECTORS
2520 .dec = {
2521 .vecs = tf_dec_tv_template,
2522 .count = TF_DEC_TEST_VECTORS
2526 }, {
2527 .alg = "ecb(xeta)",
2528 .test = alg_test_skcipher,
2529 .suite = {
2530 .cipher = {
2531 .enc = {
2532 .vecs = xeta_enc_tv_template,
2533 .count = XETA_ENC_TEST_VECTORS
2535 .dec = {
2536 .vecs = xeta_dec_tv_template,
2537 .count = XETA_DEC_TEST_VECTORS
2541 }, {
2542 .alg = "ecb(xtea)",
2543 .test = alg_test_skcipher,
2544 .suite = {
2545 .cipher = {
2546 .enc = {
2547 .vecs = xtea_enc_tv_template,
2548 .count = XTEA_ENC_TEST_VECTORS
2550 .dec = {
2551 .vecs = xtea_dec_tv_template,
2552 .count = XTEA_DEC_TEST_VECTORS
2556 }, {
2557 .alg = "gcm(aes)",
2558 .test = alg_test_aead,
2559 .fips_allowed = 1,
2560 .suite = {
2561 .aead = {
2562 .enc = {
2563 .vecs = aes_gcm_enc_tv_template,
2564 .count = AES_GCM_ENC_TEST_VECTORS
2566 .dec = {
2567 .vecs = aes_gcm_dec_tv_template,
2568 .count = AES_GCM_DEC_TEST_VECTORS
2572 }, {
2573 .alg = "ghash",
2574 .test = alg_test_hash,
2575 .fips_allowed = 1,
2576 .suite = {
2577 .hash = {
2578 .vecs = ghash_tv_template,
2579 .count = GHASH_TEST_VECTORS
2582 }, {
2583 .alg = "hmac(crc32)",
2584 .test = alg_test_hash,
2585 .suite = {
2586 .hash = {
2587 .vecs = bfin_crc_tv_template,
2588 .count = BFIN_CRC_TEST_VECTORS
2591 }, {
2592 .alg = "hmac(md5)",
2593 .test = alg_test_hash,
2594 .suite = {
2595 .hash = {
2596 .vecs = hmac_md5_tv_template,
2597 .count = HMAC_MD5_TEST_VECTORS
2600 }, {
2601 .alg = "hmac(rmd128)",
2602 .test = alg_test_hash,
2603 .suite = {
2604 .hash = {
2605 .vecs = hmac_rmd128_tv_template,
2606 .count = HMAC_RMD128_TEST_VECTORS
2609 }, {
2610 .alg = "hmac(rmd160)",
2611 .test = alg_test_hash,
2612 .suite = {
2613 .hash = {
2614 .vecs = hmac_rmd160_tv_template,
2615 .count = HMAC_RMD160_TEST_VECTORS
2618 }, {
2619 .alg = "hmac(sha1)",
2620 .test = alg_test_hash,
2621 .fips_allowed = 1,
2622 .suite = {
2623 .hash = {
2624 .vecs = hmac_sha1_tv_template,
2625 .count = HMAC_SHA1_TEST_VECTORS
2628 }, {
2629 .alg = "hmac(sha224)",
2630 .test = alg_test_hash,
2631 .fips_allowed = 1,
2632 .suite = {
2633 .hash = {
2634 .vecs = hmac_sha224_tv_template,
2635 .count = HMAC_SHA224_TEST_VECTORS
2638 }, {
2639 .alg = "hmac(sha256)",
2640 .test = alg_test_hash,
2641 .fips_allowed = 1,
2642 .suite = {
2643 .hash = {
2644 .vecs = hmac_sha256_tv_template,
2645 .count = HMAC_SHA256_TEST_VECTORS
2648 }, {
2649 .alg = "hmac(sha384)",
2650 .test = alg_test_hash,
2651 .fips_allowed = 1,
2652 .suite = {
2653 .hash = {
2654 .vecs = hmac_sha384_tv_template,
2655 .count = HMAC_SHA384_TEST_VECTORS
2658 }, {
2659 .alg = "hmac(sha512)",
2660 .test = alg_test_hash,
2661 .fips_allowed = 1,
2662 .suite = {
2663 .hash = {
2664 .vecs = hmac_sha512_tv_template,
2665 .count = HMAC_SHA512_TEST_VECTORS
2668 }, {
2669 .alg = "lrw(aes)",
2670 .test = alg_test_skcipher,
2671 .suite = {
2672 .cipher = {
2673 .enc = {
2674 .vecs = aes_lrw_enc_tv_template,
2675 .count = AES_LRW_ENC_TEST_VECTORS
2677 .dec = {
2678 .vecs = aes_lrw_dec_tv_template,
2679 .count = AES_LRW_DEC_TEST_VECTORS
2683 }, {
2684 .alg = "lrw(camellia)",
2685 .test = alg_test_skcipher,
2686 .suite = {
2687 .cipher = {
2688 .enc = {
2689 .vecs = camellia_lrw_enc_tv_template,
2690 .count = CAMELLIA_LRW_ENC_TEST_VECTORS
2692 .dec = {
2693 .vecs = camellia_lrw_dec_tv_template,
2694 .count = CAMELLIA_LRW_DEC_TEST_VECTORS
2698 }, {
2699 .alg = "lrw(cast6)",
2700 .test = alg_test_skcipher,
2701 .suite = {
2702 .cipher = {
2703 .enc = {
2704 .vecs = cast6_lrw_enc_tv_template,
2705 .count = CAST6_LRW_ENC_TEST_VECTORS
2707 .dec = {
2708 .vecs = cast6_lrw_dec_tv_template,
2709 .count = CAST6_LRW_DEC_TEST_VECTORS
2713 }, {
2714 .alg = "lrw(serpent)",
2715 .test = alg_test_skcipher,
2716 .suite = {
2717 .cipher = {
2718 .enc = {
2719 .vecs = serpent_lrw_enc_tv_template,
2720 .count = SERPENT_LRW_ENC_TEST_VECTORS
2722 .dec = {
2723 .vecs = serpent_lrw_dec_tv_template,
2724 .count = SERPENT_LRW_DEC_TEST_VECTORS
2728 }, {
2729 .alg = "lrw(twofish)",
2730 .test = alg_test_skcipher,
2731 .suite = {
2732 .cipher = {
2733 .enc = {
2734 .vecs = tf_lrw_enc_tv_template,
2735 .count = TF_LRW_ENC_TEST_VECTORS
2737 .dec = {
2738 .vecs = tf_lrw_dec_tv_template,
2739 .count = TF_LRW_DEC_TEST_VECTORS
2743 }, {
2744 .alg = "lzo",
2745 .test = alg_test_comp,
2746 .fips_allowed = 1,
2747 .suite = {
2748 .comp = {
2749 .comp = {
2750 .vecs = lzo_comp_tv_template,
2751 .count = LZO_COMP_TEST_VECTORS
2753 .decomp = {
2754 .vecs = lzo_decomp_tv_template,
2755 .count = LZO_DECOMP_TEST_VECTORS
2759 }, {
2760 .alg = "md4",
2761 .test = alg_test_hash,
2762 .suite = {
2763 .hash = {
2764 .vecs = md4_tv_template,
2765 .count = MD4_TEST_VECTORS
2768 }, {
2769 .alg = "md5",
2770 .test = alg_test_hash,
2771 .suite = {
2772 .hash = {
2773 .vecs = md5_tv_template,
2774 .count = MD5_TEST_VECTORS
2777 }, {
2778 .alg = "michael_mic",
2779 .test = alg_test_hash,
2780 .suite = {
2781 .hash = {
2782 .vecs = michael_mic_tv_template,
2783 .count = MICHAEL_MIC_TEST_VECTORS
2786 }, {
2787 .alg = "ofb(aes)",
2788 .test = alg_test_skcipher,
2789 .fips_allowed = 1,
2790 .suite = {
2791 .cipher = {
2792 .enc = {
2793 .vecs = aes_ofb_enc_tv_template,
2794 .count = AES_OFB_ENC_TEST_VECTORS
2796 .dec = {
2797 .vecs = aes_ofb_dec_tv_template,
2798 .count = AES_OFB_DEC_TEST_VECTORS
2802 }, {
2803 .alg = "pcbc(fcrypt)",
2804 .test = alg_test_skcipher,
2805 .suite = {
2806 .cipher = {
2807 .enc = {
2808 .vecs = fcrypt_pcbc_enc_tv_template,
2809 .count = FCRYPT_ENC_TEST_VECTORS
2811 .dec = {
2812 .vecs = fcrypt_pcbc_dec_tv_template,
2813 .count = FCRYPT_DEC_TEST_VECTORS
2817 }, {
2818 .alg = "rfc3686(ctr(aes))",
2819 .test = alg_test_skcipher,
2820 .fips_allowed = 1,
2821 .suite = {
2822 .cipher = {
2823 .enc = {
2824 .vecs = aes_ctr_rfc3686_enc_tv_template,
2825 .count = AES_CTR_3686_ENC_TEST_VECTORS
2827 .dec = {
2828 .vecs = aes_ctr_rfc3686_dec_tv_template,
2829 .count = AES_CTR_3686_DEC_TEST_VECTORS
2833 }, {
2834 .alg = "rfc4106(gcm(aes))",
2835 .test = alg_test_aead,
2836 .suite = {
2837 .aead = {
2838 .enc = {
2839 .vecs = aes_gcm_rfc4106_enc_tv_template,
2840 .count = AES_GCM_4106_ENC_TEST_VECTORS
2842 .dec = {
2843 .vecs = aes_gcm_rfc4106_dec_tv_template,
2844 .count = AES_GCM_4106_DEC_TEST_VECTORS
2848 }, {
2849 .alg = "rfc4309(ccm(aes))",
2850 .test = alg_test_aead,
2851 .fips_allowed = 1,
2852 .suite = {
2853 .aead = {
2854 .enc = {
2855 .vecs = aes_ccm_rfc4309_enc_tv_template,
2856 .count = AES_CCM_4309_ENC_TEST_VECTORS
2858 .dec = {
2859 .vecs = aes_ccm_rfc4309_dec_tv_template,
2860 .count = AES_CCM_4309_DEC_TEST_VECTORS
2864 }, {
2865 .alg = "rfc4543(gcm(aes))",
2866 .test = alg_test_aead,
2867 .suite = {
2868 .aead = {
2869 .enc = {
2870 .vecs = aes_gcm_rfc4543_enc_tv_template,
2871 .count = AES_GCM_4543_ENC_TEST_VECTORS
2873 .dec = {
2874 .vecs = aes_gcm_rfc4543_dec_tv_template,
2875 .count = AES_GCM_4543_DEC_TEST_VECTORS
2879 }, {
2880 .alg = "rmd128",
2881 .test = alg_test_hash,
2882 .suite = {
2883 .hash = {
2884 .vecs = rmd128_tv_template,
2885 .count = RMD128_TEST_VECTORS
2888 }, {
2889 .alg = "rmd160",
2890 .test = alg_test_hash,
2891 .suite = {
2892 .hash = {
2893 .vecs = rmd160_tv_template,
2894 .count = RMD160_TEST_VECTORS
2897 }, {
2898 .alg = "rmd256",
2899 .test = alg_test_hash,
2900 .suite = {
2901 .hash = {
2902 .vecs = rmd256_tv_template,
2903 .count = RMD256_TEST_VECTORS
2906 }, {
2907 .alg = "rmd320",
2908 .test = alg_test_hash,
2909 .suite = {
2910 .hash = {
2911 .vecs = rmd320_tv_template,
2912 .count = RMD320_TEST_VECTORS
2915 }, {
2916 .alg = "salsa20",
2917 .test = alg_test_skcipher,
2918 .suite = {
2919 .cipher = {
2920 .enc = {
2921 .vecs = salsa20_stream_enc_tv_template,
2922 .count = SALSA20_STREAM_ENC_TEST_VECTORS
2926 }, {
2927 .alg = "sha1",
2928 .test = alg_test_hash,
2929 .fips_allowed = 1,
2930 .suite = {
2931 .hash = {
2932 .vecs = sha1_tv_template,
2933 .count = SHA1_TEST_VECTORS
2936 }, {
2937 .alg = "sha224",
2938 .test = alg_test_hash,
2939 .fips_allowed = 1,
2940 .suite = {
2941 .hash = {
2942 .vecs = sha224_tv_template,
2943 .count = SHA224_TEST_VECTORS
2946 }, {
2947 .alg = "sha256",
2948 .test = alg_test_hash,
2949 .fips_allowed = 1,
2950 .suite = {
2951 .hash = {
2952 .vecs = sha256_tv_template,
2953 .count = SHA256_TEST_VECTORS
2956 }, {
2957 .alg = "sha384",
2958 .test = alg_test_hash,
2959 .fips_allowed = 1,
2960 .suite = {
2961 .hash = {
2962 .vecs = sha384_tv_template,
2963 .count = SHA384_TEST_VECTORS
2966 }, {
2967 .alg = "sha512",
2968 .test = alg_test_hash,
2969 .fips_allowed = 1,
2970 .suite = {
2971 .hash = {
2972 .vecs = sha512_tv_template,
2973 .count = SHA512_TEST_VECTORS
2976 }, {
2977 .alg = "tgr128",
2978 .test = alg_test_hash,
2979 .suite = {
2980 .hash = {
2981 .vecs = tgr128_tv_template,
2982 .count = TGR128_TEST_VECTORS
2985 }, {
2986 .alg = "tgr160",
2987 .test = alg_test_hash,
2988 .suite = {
2989 .hash = {
2990 .vecs = tgr160_tv_template,
2991 .count = TGR160_TEST_VECTORS
2994 }, {
2995 .alg = "tgr192",
2996 .test = alg_test_hash,
2997 .suite = {
2998 .hash = {
2999 .vecs = tgr192_tv_template,
3000 .count = TGR192_TEST_VECTORS
3003 }, {
3004 .alg = "vmac(aes)",
3005 .test = alg_test_hash,
3006 .suite = {
3007 .hash = {
3008 .vecs = aes_vmac128_tv_template,
3009 .count = VMAC_AES_TEST_VECTORS
3012 }, {
3013 .alg = "wp256",
3014 .test = alg_test_hash,
3015 .suite = {
3016 .hash = {
3017 .vecs = wp256_tv_template,
3018 .count = WP256_TEST_VECTORS
3021 }, {
3022 .alg = "wp384",
3023 .test = alg_test_hash,
3024 .suite = {
3025 .hash = {
3026 .vecs = wp384_tv_template,
3027 .count = WP384_TEST_VECTORS
3030 }, {
3031 .alg = "wp512",
3032 .test = alg_test_hash,
3033 .suite = {
3034 .hash = {
3035 .vecs = wp512_tv_template,
3036 .count = WP512_TEST_VECTORS
3039 }, {
3040 .alg = "xcbc(aes)",
3041 .test = alg_test_hash,
3042 .suite = {
3043 .hash = {
3044 .vecs = aes_xcbc128_tv_template,
3045 .count = XCBC_AES_TEST_VECTORS
3048 }, {
3049 .alg = "xts(aes)",
3050 .test = alg_test_skcipher,
3051 .fips_allowed = 1,
3052 .suite = {
3053 .cipher = {
3054 .enc = {
3055 .vecs = aes_xts_enc_tv_template,
3056 .count = AES_XTS_ENC_TEST_VECTORS
3058 .dec = {
3059 .vecs = aes_xts_dec_tv_template,
3060 .count = AES_XTS_DEC_TEST_VECTORS
3064 }, {
3065 .alg = "xts(camellia)",
3066 .test = alg_test_skcipher,
3067 .suite = {
3068 .cipher = {
3069 .enc = {
3070 .vecs = camellia_xts_enc_tv_template,
3071 .count = CAMELLIA_XTS_ENC_TEST_VECTORS
3073 .dec = {
3074 .vecs = camellia_xts_dec_tv_template,
3075 .count = CAMELLIA_XTS_DEC_TEST_VECTORS
3079 }, {
3080 .alg = "xts(cast6)",
3081 .test = alg_test_skcipher,
3082 .suite = {
3083 .cipher = {
3084 .enc = {
3085 .vecs = cast6_xts_enc_tv_template,
3086 .count = CAST6_XTS_ENC_TEST_VECTORS
3088 .dec = {
3089 .vecs = cast6_xts_dec_tv_template,
3090 .count = CAST6_XTS_DEC_TEST_VECTORS
3094 }, {
3095 .alg = "xts(serpent)",
3096 .test = alg_test_skcipher,
3097 .suite = {
3098 .cipher = {
3099 .enc = {
3100 .vecs = serpent_xts_enc_tv_template,
3101 .count = SERPENT_XTS_ENC_TEST_VECTORS
3103 .dec = {
3104 .vecs = serpent_xts_dec_tv_template,
3105 .count = SERPENT_XTS_DEC_TEST_VECTORS
3109 }, {
3110 .alg = "xts(twofish)",
3111 .test = alg_test_skcipher,
3112 .suite = {
3113 .cipher = {
3114 .enc = {
3115 .vecs = tf_xts_enc_tv_template,
3116 .count = TF_XTS_ENC_TEST_VECTORS
3118 .dec = {
3119 .vecs = tf_xts_dec_tv_template,
3120 .count = TF_XTS_DEC_TEST_VECTORS
3124 }, {
3125 .alg = "zlib",
3126 .test = alg_test_pcomp,
3127 .fips_allowed = 1,
3128 .suite = {
3129 .pcomp = {
3130 .comp = {
3131 .vecs = zlib_comp_tv_template,
3132 .count = ZLIB_COMP_TEST_VECTORS
3134 .decomp = {
3135 .vecs = zlib_decomp_tv_template,
3136 .count = ZLIB_DECOMP_TEST_VECTORS
3143 static bool alg_test_descs_checked;
3145 static void alg_test_descs_check_order(void)
3147 int i;
3149 /* only check once */
3150 if (alg_test_descs_checked)
3151 return;
3153 alg_test_descs_checked = true;
3155 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
3156 int diff = strcmp(alg_test_descs[i - 1].alg,
3157 alg_test_descs[i].alg);
3159 if (WARN_ON(diff > 0)) {
3160 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
3161 alg_test_descs[i - 1].alg,
3162 alg_test_descs[i].alg);
3165 if (WARN_ON(diff == 0)) {
3166 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
3167 alg_test_descs[i].alg);
3172 static int alg_find_test(const char *alg)
3174 int start = 0;
3175 int end = ARRAY_SIZE(alg_test_descs);
3177 while (start < end) {
3178 int i = (start + end) / 2;
3179 int diff = strcmp(alg_test_descs[i].alg, alg);
3181 if (diff > 0) {
3182 end = i;
3183 continue;
3186 if (diff < 0) {
3187 start = i + 1;
3188 continue;
3191 return i;
3194 return -1;
3197 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
3199 int i;
3200 int j;
3201 int rc;
3203 alg_test_descs_check_order();
3205 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
3206 char nalg[CRYPTO_MAX_ALG_NAME];
3208 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
3209 sizeof(nalg))
3210 return -ENAMETOOLONG;
3212 i = alg_find_test(nalg);
3213 if (i < 0)
3214 goto notest;
3216 if (fips_enabled && !alg_test_descs[i].fips_allowed)
3217 goto non_fips_alg;
3219 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
3220 goto test_done;
3223 i = alg_find_test(alg);
3224 j = alg_find_test(driver);
3225 if (i < 0 && j < 0)
3226 goto notest;
3228 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
3229 (j >= 0 && !alg_test_descs[j].fips_allowed)))
3230 goto non_fips_alg;
3232 rc = 0;
3233 if (i >= 0)
3234 rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
3235 type, mask);
3236 if (j >= 0 && j != i)
3237 rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
3238 type, mask);
3240 test_done:
3241 if (fips_enabled && rc)
3242 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
3244 if (fips_enabled && !rc)
3245 printk(KERN_INFO "alg: self-tests for %s (%s) passed\n",
3246 driver, alg);
3248 return rc;
3250 notest:
3251 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
3252 return 0;
3253 non_fips_alg:
3254 return -EINVAL;
3257 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
3259 EXPORT_SYMBOL_GPL(alg_test);