Merge branch 'for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/dtor/input
[linux/fpc-iii.git] / crypto / testmgr.c
blob77955507f6f1f73a044554a7d738074c98f9829e
1 /*
2 * Algorithm testing framework and tests.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
9 * Updated RFC4106 AES-GCM testing.
10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
16 * This program is free software; you can redistribute it and/or modify it
17 * under the terms of the GNU General Public License as published by the Free
18 * Software Foundation; either version 2 of the License, or (at your option)
19 * any later version.
23 #include <crypto/hash.h>
24 #include <linux/err.h>
25 #include <linux/module.h>
26 #include <linux/scatterlist.h>
27 #include <linux/slab.h>
28 #include <linux/string.h>
29 #include <crypto/rng.h>
31 #include "internal.h"
33 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
35 /* a perfect nop */
36 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
38 return 0;
41 #else
43 #include "testmgr.h"
46 * Need slab memory for testing (size in number of pages).
48 #define XBUFSIZE 8
51 * Indexes into the xbuf to simulate cross-page access.
53 #define IDX1 32
54 #define IDX2 32400
55 #define IDX3 1
56 #define IDX4 8193
57 #define IDX5 22222
58 #define IDX6 17101
59 #define IDX7 27333
60 #define IDX8 3000
63 * Used by test_cipher()
65 #define ENCRYPT 1
66 #define DECRYPT 0
68 struct tcrypt_result {
69 struct completion completion;
70 int err;
73 struct aead_test_suite {
74 struct {
75 struct aead_testvec *vecs;
76 unsigned int count;
77 } enc, dec;
80 struct cipher_test_suite {
81 struct {
82 struct cipher_testvec *vecs;
83 unsigned int count;
84 } enc, dec;
87 struct comp_test_suite {
88 struct {
89 struct comp_testvec *vecs;
90 unsigned int count;
91 } comp, decomp;
94 struct pcomp_test_suite {
95 struct {
96 struct pcomp_testvec *vecs;
97 unsigned int count;
98 } comp, decomp;
101 struct hash_test_suite {
102 struct hash_testvec *vecs;
103 unsigned int count;
106 struct cprng_test_suite {
107 struct cprng_testvec *vecs;
108 unsigned int count;
111 struct alg_test_desc {
112 const char *alg;
113 int (*test)(const struct alg_test_desc *desc, const char *driver,
114 u32 type, u32 mask);
115 int fips_allowed; /* set if alg is allowed in fips mode */
117 union {
118 struct aead_test_suite aead;
119 struct cipher_test_suite cipher;
120 struct comp_test_suite comp;
121 struct pcomp_test_suite pcomp;
122 struct hash_test_suite hash;
123 struct cprng_test_suite cprng;
124 } suite;
127 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
129 static void hexdump(unsigned char *buf, unsigned int len)
131 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
132 16, 1,
133 buf, len, false);
136 static void tcrypt_complete(struct crypto_async_request *req, int err)
138 struct tcrypt_result *res = req->data;
140 if (err == -EINPROGRESS)
141 return;
143 res->err = err;
144 complete(&res->completion);
147 static int testmgr_alloc_buf(char *buf[XBUFSIZE])
149 int i;
151 for (i = 0; i < XBUFSIZE; i++) {
152 buf[i] = (void *)__get_free_page(GFP_KERNEL);
153 if (!buf[i])
154 goto err_free_buf;
157 return 0;
159 err_free_buf:
160 while (i-- > 0)
161 free_page((unsigned long)buf[i]);
163 return -ENOMEM;
166 static void testmgr_free_buf(char *buf[XBUFSIZE])
168 int i;
170 for (i = 0; i < XBUFSIZE; i++)
171 free_page((unsigned long)buf[i]);
174 static int do_one_async_hash_op(struct ahash_request *req,
175 struct tcrypt_result *tr,
176 int ret)
178 if (ret == -EINPROGRESS || ret == -EBUSY) {
179 ret = wait_for_completion_interruptible(&tr->completion);
180 if (!ret)
181 ret = tr->err;
182 reinit_completion(&tr->completion);
184 return ret;
187 static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
188 unsigned int tcount, bool use_digest,
189 const int align_offset)
191 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
192 unsigned int i, j, k, temp;
193 struct scatterlist sg[8];
194 char result[64];
195 struct ahash_request *req;
196 struct tcrypt_result tresult;
197 void *hash_buff;
198 char *xbuf[XBUFSIZE];
199 int ret = -ENOMEM;
201 if (testmgr_alloc_buf(xbuf))
202 goto out_nobuf;
204 init_completion(&tresult.completion);
206 req = ahash_request_alloc(tfm, GFP_KERNEL);
207 if (!req) {
208 printk(KERN_ERR "alg: hash: Failed to allocate request for "
209 "%s\n", algo);
210 goto out_noreq;
212 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
213 tcrypt_complete, &tresult);
215 j = 0;
216 for (i = 0; i < tcount; i++) {
217 if (template[i].np)
218 continue;
220 ret = -EINVAL;
221 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE))
222 goto out;
224 j++;
225 memset(result, 0, 64);
227 hash_buff = xbuf[0];
228 hash_buff += align_offset;
230 memcpy(hash_buff, template[i].plaintext, template[i].psize);
231 sg_init_one(&sg[0], hash_buff, template[i].psize);
233 if (template[i].ksize) {
234 crypto_ahash_clear_flags(tfm, ~0);
235 ret = crypto_ahash_setkey(tfm, template[i].key,
236 template[i].ksize);
237 if (ret) {
238 printk(KERN_ERR "alg: hash: setkey failed on "
239 "test %d for %s: ret=%d\n", j, algo,
240 -ret);
241 goto out;
245 ahash_request_set_crypt(req, sg, result, template[i].psize);
246 if (use_digest) {
247 ret = do_one_async_hash_op(req, &tresult,
248 crypto_ahash_digest(req));
249 if (ret) {
250 pr_err("alg: hash: digest failed on test %d "
251 "for %s: ret=%d\n", j, algo, -ret);
252 goto out;
254 } else {
255 ret = do_one_async_hash_op(req, &tresult,
256 crypto_ahash_init(req));
257 if (ret) {
258 pr_err("alt: hash: init failed on test %d "
259 "for %s: ret=%d\n", j, algo, -ret);
260 goto out;
262 ret = do_one_async_hash_op(req, &tresult,
263 crypto_ahash_update(req));
264 if (ret) {
265 pr_err("alt: hash: update failed on test %d "
266 "for %s: ret=%d\n", j, algo, -ret);
267 goto out;
269 ret = do_one_async_hash_op(req, &tresult,
270 crypto_ahash_final(req));
271 if (ret) {
272 pr_err("alt: hash: final failed on test %d "
273 "for %s: ret=%d\n", j, algo, -ret);
274 goto out;
278 if (memcmp(result, template[i].digest,
279 crypto_ahash_digestsize(tfm))) {
280 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
281 j, algo);
282 hexdump(result, crypto_ahash_digestsize(tfm));
283 ret = -EINVAL;
284 goto out;
288 j = 0;
289 for (i = 0; i < tcount; i++) {
290 /* alignment tests are only done with continuous buffers */
291 if (align_offset != 0)
292 break;
294 if (template[i].np) {
295 j++;
296 memset(result, 0, 64);
298 temp = 0;
299 sg_init_table(sg, template[i].np);
300 ret = -EINVAL;
301 for (k = 0; k < template[i].np; k++) {
302 if (WARN_ON(offset_in_page(IDX[k]) +
303 template[i].tap[k] > PAGE_SIZE))
304 goto out;
305 sg_set_buf(&sg[k],
306 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
307 offset_in_page(IDX[k]),
308 template[i].plaintext + temp,
309 template[i].tap[k]),
310 template[i].tap[k]);
311 temp += template[i].tap[k];
314 if (template[i].ksize) {
315 crypto_ahash_clear_flags(tfm, ~0);
316 ret = crypto_ahash_setkey(tfm, template[i].key,
317 template[i].ksize);
319 if (ret) {
320 printk(KERN_ERR "alg: hash: setkey "
321 "failed on chunking test %d "
322 "for %s: ret=%d\n", j, algo,
323 -ret);
324 goto out;
328 ahash_request_set_crypt(req, sg, result,
329 template[i].psize);
330 ret = crypto_ahash_digest(req);
331 switch (ret) {
332 case 0:
333 break;
334 case -EINPROGRESS:
335 case -EBUSY:
336 ret = wait_for_completion_interruptible(
337 &tresult.completion);
338 if (!ret && !(ret = tresult.err)) {
339 reinit_completion(&tresult.completion);
340 break;
342 /* fall through */
343 default:
344 printk(KERN_ERR "alg: hash: digest failed "
345 "on chunking test %d for %s: "
346 "ret=%d\n", j, algo, -ret);
347 goto out;
350 if (memcmp(result, template[i].digest,
351 crypto_ahash_digestsize(tfm))) {
352 printk(KERN_ERR "alg: hash: Chunking test %d "
353 "failed for %s\n", j, algo);
354 hexdump(result, crypto_ahash_digestsize(tfm));
355 ret = -EINVAL;
356 goto out;
361 ret = 0;
363 out:
364 ahash_request_free(req);
365 out_noreq:
366 testmgr_free_buf(xbuf);
367 out_nobuf:
368 return ret;
371 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
372 unsigned int tcount, bool use_digest)
374 unsigned int alignmask;
375 int ret;
377 ret = __test_hash(tfm, template, tcount, use_digest, 0);
378 if (ret)
379 return ret;
381 /* test unaligned buffers, check with one byte offset */
382 ret = __test_hash(tfm, template, tcount, use_digest, 1);
383 if (ret)
384 return ret;
386 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
387 if (alignmask) {
388 /* Check if alignment mask for tfm is correctly set. */
389 ret = __test_hash(tfm, template, tcount, use_digest,
390 alignmask + 1);
391 if (ret)
392 return ret;
395 return 0;
398 static int __test_aead(struct crypto_aead *tfm, int enc,
399 struct aead_testvec *template, unsigned int tcount,
400 const bool diff_dst, const int align_offset)
402 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
403 unsigned int i, j, k, n, temp;
404 int ret = -ENOMEM;
405 char *q;
406 char *key;
407 struct aead_request *req;
408 struct scatterlist *sg;
409 struct scatterlist *asg;
410 struct scatterlist *sgout;
411 const char *e, *d;
412 struct tcrypt_result result;
413 unsigned int authsize;
414 void *input;
415 void *output;
416 void *assoc;
417 char iv[MAX_IVLEN];
418 char *xbuf[XBUFSIZE];
419 char *xoutbuf[XBUFSIZE];
420 char *axbuf[XBUFSIZE];
422 if (testmgr_alloc_buf(xbuf))
423 goto out_noxbuf;
424 if (testmgr_alloc_buf(axbuf))
425 goto out_noaxbuf;
427 if (diff_dst && testmgr_alloc_buf(xoutbuf))
428 goto out_nooutbuf;
430 /* avoid "the frame size is larger than 1024 bytes" compiler warning */
431 sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 3 : 2), GFP_KERNEL);
432 if (!sg)
433 goto out_nosg;
434 asg = &sg[8];
435 sgout = &asg[8];
437 if (diff_dst)
438 d = "-ddst";
439 else
440 d = "";
442 if (enc == ENCRYPT)
443 e = "encryption";
444 else
445 e = "decryption";
447 init_completion(&result.completion);
449 req = aead_request_alloc(tfm, GFP_KERNEL);
450 if (!req) {
451 pr_err("alg: aead%s: Failed to allocate request for %s\n",
452 d, algo);
453 goto out;
456 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
457 tcrypt_complete, &result);
459 for (i = 0, j = 0; i < tcount; i++) {
460 if (!template[i].np) {
461 j++;
463 /* some templates have no input data but they will
464 * touch input
466 input = xbuf[0];
467 input += align_offset;
468 assoc = axbuf[0];
470 ret = -EINVAL;
471 if (WARN_ON(align_offset + template[i].ilen >
472 PAGE_SIZE || template[i].alen > PAGE_SIZE))
473 goto out;
475 memcpy(input, template[i].input, template[i].ilen);
476 memcpy(assoc, template[i].assoc, template[i].alen);
477 if (template[i].iv)
478 memcpy(iv, template[i].iv, MAX_IVLEN);
479 else
480 memset(iv, 0, MAX_IVLEN);
482 crypto_aead_clear_flags(tfm, ~0);
483 if (template[i].wk)
484 crypto_aead_set_flags(
485 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
487 key = template[i].key;
489 ret = crypto_aead_setkey(tfm, key,
490 template[i].klen);
491 if (!ret == template[i].fail) {
492 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
493 d, j, algo, crypto_aead_get_flags(tfm));
494 goto out;
495 } else if (ret)
496 continue;
498 authsize = abs(template[i].rlen - template[i].ilen);
499 ret = crypto_aead_setauthsize(tfm, authsize);
500 if (ret) {
501 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
502 d, authsize, j, algo);
503 goto out;
506 if (diff_dst) {
507 output = xoutbuf[0];
508 output += align_offset;
509 sg_init_one(&sg[0], input, template[i].ilen);
510 sg_init_one(&sgout[0], output,
511 template[i].rlen);
512 } else {
513 sg_init_one(&sg[0], input,
514 template[i].ilen +
515 (enc ? authsize : 0));
516 output = input;
519 sg_init_one(&asg[0], assoc, template[i].alen);
521 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
522 template[i].ilen, iv);
524 aead_request_set_assoc(req, asg, template[i].alen);
526 ret = enc ?
527 crypto_aead_encrypt(req) :
528 crypto_aead_decrypt(req);
530 switch (ret) {
531 case 0:
532 if (template[i].novrfy) {
533 /* verification was supposed to fail */
534 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
535 d, e, j, algo);
536 /* so really, we got a bad message */
537 ret = -EBADMSG;
538 goto out;
540 break;
541 case -EINPROGRESS:
542 case -EBUSY:
543 ret = wait_for_completion_interruptible(
544 &result.completion);
545 if (!ret && !(ret = result.err)) {
546 reinit_completion(&result.completion);
547 break;
549 case -EBADMSG:
550 if (template[i].novrfy)
551 /* verification failure was expected */
552 continue;
553 /* fall through */
554 default:
555 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
556 d, e, j, algo, -ret);
557 goto out;
560 q = output;
561 if (memcmp(q, template[i].result, template[i].rlen)) {
562 pr_err("alg: aead%s: Test %d failed on %s for %s\n",
563 d, j, e, algo);
564 hexdump(q, template[i].rlen);
565 ret = -EINVAL;
566 goto out;
571 for (i = 0, j = 0; i < tcount; i++) {
572 /* alignment tests are only done with continuous buffers */
573 if (align_offset != 0)
574 break;
576 if (template[i].np) {
577 j++;
579 if (template[i].iv)
580 memcpy(iv, template[i].iv, MAX_IVLEN);
581 else
582 memset(iv, 0, MAX_IVLEN);
584 crypto_aead_clear_flags(tfm, ~0);
585 if (template[i].wk)
586 crypto_aead_set_flags(
587 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
588 key = template[i].key;
590 ret = crypto_aead_setkey(tfm, key, template[i].klen);
591 if (!ret == template[i].fail) {
592 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
593 d, j, algo, crypto_aead_get_flags(tfm));
594 goto out;
595 } else if (ret)
596 continue;
598 authsize = abs(template[i].rlen - template[i].ilen);
600 ret = -EINVAL;
601 sg_init_table(sg, template[i].np);
602 if (diff_dst)
603 sg_init_table(sgout, template[i].np);
604 for (k = 0, temp = 0; k < template[i].np; k++) {
605 if (WARN_ON(offset_in_page(IDX[k]) +
606 template[i].tap[k] > PAGE_SIZE))
607 goto out;
609 q = xbuf[IDX[k] >> PAGE_SHIFT] +
610 offset_in_page(IDX[k]);
612 memcpy(q, template[i].input + temp,
613 template[i].tap[k]);
615 sg_set_buf(&sg[k], q, template[i].tap[k]);
617 if (diff_dst) {
618 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
619 offset_in_page(IDX[k]);
621 memset(q, 0, template[i].tap[k]);
623 sg_set_buf(&sgout[k], q,
624 template[i].tap[k]);
627 n = template[i].tap[k];
628 if (k == template[i].np - 1 && enc)
629 n += authsize;
630 if (offset_in_page(q) + n < PAGE_SIZE)
631 q[n] = 0;
633 temp += template[i].tap[k];
636 ret = crypto_aead_setauthsize(tfm, authsize);
637 if (ret) {
638 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
639 d, authsize, j, algo);
640 goto out;
643 if (enc) {
644 if (WARN_ON(sg[k - 1].offset +
645 sg[k - 1].length + authsize >
646 PAGE_SIZE)) {
647 ret = -EINVAL;
648 goto out;
651 if (diff_dst)
652 sgout[k - 1].length += authsize;
653 else
654 sg[k - 1].length += authsize;
657 sg_init_table(asg, template[i].anp);
658 ret = -EINVAL;
659 for (k = 0, temp = 0; k < template[i].anp; k++) {
660 if (WARN_ON(offset_in_page(IDX[k]) +
661 template[i].atap[k] > PAGE_SIZE))
662 goto out;
663 sg_set_buf(&asg[k],
664 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
665 offset_in_page(IDX[k]),
666 template[i].assoc + temp,
667 template[i].atap[k]),
668 template[i].atap[k]);
669 temp += template[i].atap[k];
672 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
673 template[i].ilen,
674 iv);
676 aead_request_set_assoc(req, asg, template[i].alen);
678 ret = enc ?
679 crypto_aead_encrypt(req) :
680 crypto_aead_decrypt(req);
682 switch (ret) {
683 case 0:
684 if (template[i].novrfy) {
685 /* verification was supposed to fail */
686 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
687 d, e, j, algo);
688 /* so really, we got a bad message */
689 ret = -EBADMSG;
690 goto out;
692 break;
693 case -EINPROGRESS:
694 case -EBUSY:
695 ret = wait_for_completion_interruptible(
696 &result.completion);
697 if (!ret && !(ret = result.err)) {
698 reinit_completion(&result.completion);
699 break;
701 case -EBADMSG:
702 if (template[i].novrfy)
703 /* verification failure was expected */
704 continue;
705 /* fall through */
706 default:
707 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
708 d, e, j, algo, -ret);
709 goto out;
712 ret = -EINVAL;
713 for (k = 0, temp = 0; k < template[i].np; k++) {
714 if (diff_dst)
715 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
716 offset_in_page(IDX[k]);
717 else
718 q = xbuf[IDX[k] >> PAGE_SHIFT] +
719 offset_in_page(IDX[k]);
721 n = template[i].tap[k];
722 if (k == template[i].np - 1)
723 n += enc ? authsize : -authsize;
725 if (memcmp(q, template[i].result + temp, n)) {
726 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
727 d, j, e, k, algo);
728 hexdump(q, n);
729 goto out;
732 q += n;
733 if (k == template[i].np - 1 && !enc) {
734 if (!diff_dst &&
735 memcmp(q, template[i].input +
736 temp + n, authsize))
737 n = authsize;
738 else
739 n = 0;
740 } else {
741 for (n = 0; offset_in_page(q + n) &&
742 q[n]; n++)
745 if (n) {
746 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
747 d, j, e, k, algo, n);
748 hexdump(q, n);
749 goto out;
752 temp += template[i].tap[k];
757 ret = 0;
759 out:
760 aead_request_free(req);
761 kfree(sg);
762 out_nosg:
763 if (diff_dst)
764 testmgr_free_buf(xoutbuf);
765 out_nooutbuf:
766 testmgr_free_buf(axbuf);
767 out_noaxbuf:
768 testmgr_free_buf(xbuf);
769 out_noxbuf:
770 return ret;
773 static int test_aead(struct crypto_aead *tfm, int enc,
774 struct aead_testvec *template, unsigned int tcount)
776 unsigned int alignmask;
777 int ret;
779 /* test 'dst == src' case */
780 ret = __test_aead(tfm, enc, template, tcount, false, 0);
781 if (ret)
782 return ret;
784 /* test 'dst != src' case */
785 ret = __test_aead(tfm, enc, template, tcount, true, 0);
786 if (ret)
787 return ret;
789 /* test unaligned buffers, check with one byte offset */
790 ret = __test_aead(tfm, enc, template, tcount, true, 1);
791 if (ret)
792 return ret;
794 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
795 if (alignmask) {
796 /* Check if alignment mask for tfm is correctly set. */
797 ret = __test_aead(tfm, enc, template, tcount, true,
798 alignmask + 1);
799 if (ret)
800 return ret;
803 return 0;
806 static int test_cipher(struct crypto_cipher *tfm, int enc,
807 struct cipher_testvec *template, unsigned int tcount)
809 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
810 unsigned int i, j, k;
811 char *q;
812 const char *e;
813 void *data;
814 char *xbuf[XBUFSIZE];
815 int ret = -ENOMEM;
817 if (testmgr_alloc_buf(xbuf))
818 goto out_nobuf;
820 if (enc == ENCRYPT)
821 e = "encryption";
822 else
823 e = "decryption";
825 j = 0;
826 for (i = 0; i < tcount; i++) {
827 if (template[i].np)
828 continue;
830 j++;
832 ret = -EINVAL;
833 if (WARN_ON(template[i].ilen > PAGE_SIZE))
834 goto out;
836 data = xbuf[0];
837 memcpy(data, template[i].input, template[i].ilen);
839 crypto_cipher_clear_flags(tfm, ~0);
840 if (template[i].wk)
841 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
843 ret = crypto_cipher_setkey(tfm, template[i].key,
844 template[i].klen);
845 if (!ret == template[i].fail) {
846 printk(KERN_ERR "alg: cipher: setkey failed "
847 "on test %d for %s: flags=%x\n", j,
848 algo, crypto_cipher_get_flags(tfm));
849 goto out;
850 } else if (ret)
851 continue;
853 for (k = 0; k < template[i].ilen;
854 k += crypto_cipher_blocksize(tfm)) {
855 if (enc)
856 crypto_cipher_encrypt_one(tfm, data + k,
857 data + k);
858 else
859 crypto_cipher_decrypt_one(tfm, data + k,
860 data + k);
863 q = data;
864 if (memcmp(q, template[i].result, template[i].rlen)) {
865 printk(KERN_ERR "alg: cipher: Test %d failed "
866 "on %s for %s\n", j, e, algo);
867 hexdump(q, template[i].rlen);
868 ret = -EINVAL;
869 goto out;
873 ret = 0;
875 out:
876 testmgr_free_buf(xbuf);
877 out_nobuf:
878 return ret;
881 static int __test_skcipher(struct crypto_ablkcipher *tfm, int enc,
882 struct cipher_testvec *template, unsigned int tcount,
883 const bool diff_dst, const int align_offset)
885 const char *algo =
886 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm));
887 unsigned int i, j, k, n, temp;
888 char *q;
889 struct ablkcipher_request *req;
890 struct scatterlist sg[8];
891 struct scatterlist sgout[8];
892 const char *e, *d;
893 struct tcrypt_result result;
894 void *data;
895 char iv[MAX_IVLEN];
896 char *xbuf[XBUFSIZE];
897 char *xoutbuf[XBUFSIZE];
898 int ret = -ENOMEM;
900 if (testmgr_alloc_buf(xbuf))
901 goto out_nobuf;
903 if (diff_dst && testmgr_alloc_buf(xoutbuf))
904 goto out_nooutbuf;
906 if (diff_dst)
907 d = "-ddst";
908 else
909 d = "";
911 if (enc == ENCRYPT)
912 e = "encryption";
913 else
914 e = "decryption";
916 init_completion(&result.completion);
918 req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
919 if (!req) {
920 pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
921 d, algo);
922 goto out;
925 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
926 tcrypt_complete, &result);
928 j = 0;
929 for (i = 0; i < tcount; i++) {
930 if (template[i].iv)
931 memcpy(iv, template[i].iv, MAX_IVLEN);
932 else
933 memset(iv, 0, MAX_IVLEN);
935 if (!(template[i].np) || (template[i].also_non_np)) {
936 j++;
938 ret = -EINVAL;
939 if (WARN_ON(align_offset + template[i].ilen >
940 PAGE_SIZE))
941 goto out;
943 data = xbuf[0];
944 data += align_offset;
945 memcpy(data, template[i].input, template[i].ilen);
947 crypto_ablkcipher_clear_flags(tfm, ~0);
948 if (template[i].wk)
949 crypto_ablkcipher_set_flags(
950 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
952 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
953 template[i].klen);
954 if (!ret == template[i].fail) {
955 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
956 d, j, algo,
957 crypto_ablkcipher_get_flags(tfm));
958 goto out;
959 } else if (ret)
960 continue;
962 sg_init_one(&sg[0], data, template[i].ilen);
963 if (diff_dst) {
964 data = xoutbuf[0];
965 data += align_offset;
966 sg_init_one(&sgout[0], data, template[i].ilen);
969 ablkcipher_request_set_crypt(req, sg,
970 (diff_dst) ? sgout : sg,
971 template[i].ilen, iv);
972 ret = enc ?
973 crypto_ablkcipher_encrypt(req) :
974 crypto_ablkcipher_decrypt(req);
976 switch (ret) {
977 case 0:
978 break;
979 case -EINPROGRESS:
980 case -EBUSY:
981 ret = wait_for_completion_interruptible(
982 &result.completion);
983 if (!ret && !((ret = result.err))) {
984 reinit_completion(&result.completion);
985 break;
987 /* fall through */
988 default:
989 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
990 d, e, j, algo, -ret);
991 goto out;
994 q = data;
995 if (memcmp(q, template[i].result, template[i].rlen)) {
996 pr_err("alg: skcipher%s: Test %d failed on %s for %s\n",
997 d, j, e, algo);
998 hexdump(q, template[i].rlen);
999 ret = -EINVAL;
1000 goto out;
1005 j = 0;
1006 for (i = 0; i < tcount; i++) {
1007 /* alignment tests are only done with continuous buffers */
1008 if (align_offset != 0)
1009 break;
1011 if (template[i].iv)
1012 memcpy(iv, template[i].iv, MAX_IVLEN);
1013 else
1014 memset(iv, 0, MAX_IVLEN);
1016 if (template[i].np) {
1017 j++;
1019 crypto_ablkcipher_clear_flags(tfm, ~0);
1020 if (template[i].wk)
1021 crypto_ablkcipher_set_flags(
1022 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
1024 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
1025 template[i].klen);
1026 if (!ret == template[i].fail) {
1027 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
1028 d, j, algo,
1029 crypto_ablkcipher_get_flags(tfm));
1030 goto out;
1031 } else if (ret)
1032 continue;
1034 temp = 0;
1035 ret = -EINVAL;
1036 sg_init_table(sg, template[i].np);
1037 if (diff_dst)
1038 sg_init_table(sgout, template[i].np);
1039 for (k = 0; k < template[i].np; k++) {
1040 if (WARN_ON(offset_in_page(IDX[k]) +
1041 template[i].tap[k] > PAGE_SIZE))
1042 goto out;
1044 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1045 offset_in_page(IDX[k]);
1047 memcpy(q, template[i].input + temp,
1048 template[i].tap[k]);
1050 if (offset_in_page(q) + template[i].tap[k] <
1051 PAGE_SIZE)
1052 q[template[i].tap[k]] = 0;
1054 sg_set_buf(&sg[k], q, template[i].tap[k]);
1055 if (diff_dst) {
1056 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1057 offset_in_page(IDX[k]);
1059 sg_set_buf(&sgout[k], q,
1060 template[i].tap[k]);
1062 memset(q, 0, template[i].tap[k]);
1063 if (offset_in_page(q) +
1064 template[i].tap[k] < PAGE_SIZE)
1065 q[template[i].tap[k]] = 0;
1068 temp += template[i].tap[k];
1071 ablkcipher_request_set_crypt(req, sg,
1072 (diff_dst) ? sgout : sg,
1073 template[i].ilen, iv);
1075 ret = enc ?
1076 crypto_ablkcipher_encrypt(req) :
1077 crypto_ablkcipher_decrypt(req);
1079 switch (ret) {
1080 case 0:
1081 break;
1082 case -EINPROGRESS:
1083 case -EBUSY:
1084 ret = wait_for_completion_interruptible(
1085 &result.completion);
1086 if (!ret && !((ret = result.err))) {
1087 reinit_completion(&result.completion);
1088 break;
1090 /* fall through */
1091 default:
1092 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1093 d, e, j, algo, -ret);
1094 goto out;
1097 temp = 0;
1098 ret = -EINVAL;
1099 for (k = 0; k < template[i].np; k++) {
1100 if (diff_dst)
1101 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1102 offset_in_page(IDX[k]);
1103 else
1104 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1105 offset_in_page(IDX[k]);
1107 if (memcmp(q, template[i].result + temp,
1108 template[i].tap[k])) {
1109 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1110 d, j, e, k, algo);
1111 hexdump(q, template[i].tap[k]);
1112 goto out;
1115 q += template[i].tap[k];
1116 for (n = 0; offset_in_page(q + n) && q[n]; n++)
1118 if (n) {
1119 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1120 d, j, e, k, algo, n);
1121 hexdump(q, n);
1122 goto out;
1124 temp += template[i].tap[k];
1129 ret = 0;
1131 out:
1132 ablkcipher_request_free(req);
1133 if (diff_dst)
1134 testmgr_free_buf(xoutbuf);
1135 out_nooutbuf:
1136 testmgr_free_buf(xbuf);
1137 out_nobuf:
1138 return ret;
1141 static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
1142 struct cipher_testvec *template, unsigned int tcount)
1144 unsigned int alignmask;
1145 int ret;
1147 /* test 'dst == src' case */
1148 ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
1149 if (ret)
1150 return ret;
1152 /* test 'dst != src' case */
1153 ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
1154 if (ret)
1155 return ret;
1157 /* test unaligned buffers, check with one byte offset */
1158 ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
1159 if (ret)
1160 return ret;
1162 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1163 if (alignmask) {
1164 /* Check if alignment mask for tfm is correctly set. */
1165 ret = __test_skcipher(tfm, enc, template, tcount, true,
1166 alignmask + 1);
1167 if (ret)
1168 return ret;
1171 return 0;
1174 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
1175 struct comp_testvec *dtemplate, int ctcount, int dtcount)
1177 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1178 unsigned int i;
1179 char result[COMP_BUF_SIZE];
1180 int ret;
1182 for (i = 0; i < ctcount; i++) {
1183 int ilen;
1184 unsigned int dlen = COMP_BUF_SIZE;
1186 memset(result, 0, sizeof (result));
1188 ilen = ctemplate[i].inlen;
1189 ret = crypto_comp_compress(tfm, ctemplate[i].input,
1190 ilen, result, &dlen);
1191 if (ret) {
1192 printk(KERN_ERR "alg: comp: compression failed "
1193 "on test %d for %s: ret=%d\n", i + 1, algo,
1194 -ret);
1195 goto out;
1198 if (dlen != ctemplate[i].outlen) {
1199 printk(KERN_ERR "alg: comp: Compression test %d "
1200 "failed for %s: output len = %d\n", i + 1, algo,
1201 dlen);
1202 ret = -EINVAL;
1203 goto out;
1206 if (memcmp(result, ctemplate[i].output, dlen)) {
1207 printk(KERN_ERR "alg: comp: Compression test %d "
1208 "failed for %s\n", i + 1, algo);
1209 hexdump(result, dlen);
1210 ret = -EINVAL;
1211 goto out;
1215 for (i = 0; i < dtcount; i++) {
1216 int ilen;
1217 unsigned int dlen = COMP_BUF_SIZE;
1219 memset(result, 0, sizeof (result));
1221 ilen = dtemplate[i].inlen;
1222 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1223 ilen, result, &dlen);
1224 if (ret) {
1225 printk(KERN_ERR "alg: comp: decompression failed "
1226 "on test %d for %s: ret=%d\n", i + 1, algo,
1227 -ret);
1228 goto out;
1231 if (dlen != dtemplate[i].outlen) {
1232 printk(KERN_ERR "alg: comp: Decompression test %d "
1233 "failed for %s: output len = %d\n", i + 1, algo,
1234 dlen);
1235 ret = -EINVAL;
1236 goto out;
1239 if (memcmp(result, dtemplate[i].output, dlen)) {
1240 printk(KERN_ERR "alg: comp: Decompression test %d "
1241 "failed for %s\n", i + 1, algo);
1242 hexdump(result, dlen);
1243 ret = -EINVAL;
1244 goto out;
1248 ret = 0;
1250 out:
1251 return ret;
1254 static int test_pcomp(struct crypto_pcomp *tfm,
1255 struct pcomp_testvec *ctemplate,
1256 struct pcomp_testvec *dtemplate, int ctcount,
1257 int dtcount)
1259 const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm));
1260 unsigned int i;
1261 char result[COMP_BUF_SIZE];
1262 int res;
1264 for (i = 0; i < ctcount; i++) {
1265 struct comp_request req;
1266 unsigned int produced = 0;
1268 res = crypto_compress_setup(tfm, ctemplate[i].params,
1269 ctemplate[i].paramsize);
1270 if (res) {
1271 pr_err("alg: pcomp: compression setup failed on test "
1272 "%d for %s: error=%d\n", i + 1, algo, res);
1273 return res;
1276 res = crypto_compress_init(tfm);
1277 if (res) {
1278 pr_err("alg: pcomp: compression init failed on test "
1279 "%d for %s: error=%d\n", i + 1, algo, res);
1280 return res;
1283 memset(result, 0, sizeof(result));
1285 req.next_in = ctemplate[i].input;
1286 req.avail_in = ctemplate[i].inlen / 2;
1287 req.next_out = result;
1288 req.avail_out = ctemplate[i].outlen / 2;
1290 res = crypto_compress_update(tfm, &req);
1291 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1292 pr_err("alg: pcomp: compression update failed on test "
1293 "%d for %s: error=%d\n", i + 1, algo, res);
1294 return res;
1296 if (res > 0)
1297 produced += res;
1299 /* Add remaining input data */
1300 req.avail_in += (ctemplate[i].inlen + 1) / 2;
1302 res = crypto_compress_update(tfm, &req);
1303 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1304 pr_err("alg: pcomp: compression update failed on test "
1305 "%d for %s: error=%d\n", i + 1, algo, res);
1306 return res;
1308 if (res > 0)
1309 produced += res;
1311 /* Provide remaining output space */
1312 req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2;
1314 res = crypto_compress_final(tfm, &req);
1315 if (res < 0) {
1316 pr_err("alg: pcomp: compression final failed on test "
1317 "%d for %s: error=%d\n", i + 1, algo, res);
1318 return res;
1320 produced += res;
1322 if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) {
1323 pr_err("alg: comp: Compression test %d failed for %s: "
1324 "output len = %d (expected %d)\n", i + 1, algo,
1325 COMP_BUF_SIZE - req.avail_out,
1326 ctemplate[i].outlen);
1327 return -EINVAL;
1330 if (produced != ctemplate[i].outlen) {
1331 pr_err("alg: comp: Compression test %d failed for %s: "
1332 "returned len = %u (expected %d)\n", i + 1,
1333 algo, produced, ctemplate[i].outlen);
1334 return -EINVAL;
1337 if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) {
1338 pr_err("alg: pcomp: Compression test %d failed for "
1339 "%s\n", i + 1, algo);
1340 hexdump(result, ctemplate[i].outlen);
1341 return -EINVAL;
1345 for (i = 0; i < dtcount; i++) {
1346 struct comp_request req;
1347 unsigned int produced = 0;
1349 res = crypto_decompress_setup(tfm, dtemplate[i].params,
1350 dtemplate[i].paramsize);
1351 if (res) {
1352 pr_err("alg: pcomp: decompression setup failed on "
1353 "test %d for %s: error=%d\n", i + 1, algo, res);
1354 return res;
1357 res = crypto_decompress_init(tfm);
1358 if (res) {
1359 pr_err("alg: pcomp: decompression init failed on test "
1360 "%d for %s: error=%d\n", i + 1, algo, res);
1361 return res;
1364 memset(result, 0, sizeof(result));
1366 req.next_in = dtemplate[i].input;
1367 req.avail_in = dtemplate[i].inlen / 2;
1368 req.next_out = result;
1369 req.avail_out = dtemplate[i].outlen / 2;
1371 res = crypto_decompress_update(tfm, &req);
1372 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1373 pr_err("alg: pcomp: decompression update failed on "
1374 "test %d for %s: error=%d\n", i + 1, algo, res);
1375 return res;
1377 if (res > 0)
1378 produced += res;
1380 /* Add remaining input data */
1381 req.avail_in += (dtemplate[i].inlen + 1) / 2;
1383 res = crypto_decompress_update(tfm, &req);
1384 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1385 pr_err("alg: pcomp: decompression update failed on "
1386 "test %d for %s: error=%d\n", i + 1, algo, res);
1387 return res;
1389 if (res > 0)
1390 produced += res;
1392 /* Provide remaining output space */
1393 req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2;
1395 res = crypto_decompress_final(tfm, &req);
1396 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1397 pr_err("alg: pcomp: decompression final failed on "
1398 "test %d for %s: error=%d\n", i + 1, algo, res);
1399 return res;
1401 if (res > 0)
1402 produced += res;
1404 if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) {
1405 pr_err("alg: comp: Decompression test %d failed for "
1406 "%s: output len = %d (expected %d)\n", i + 1,
1407 algo, COMP_BUF_SIZE - req.avail_out,
1408 dtemplate[i].outlen);
1409 return -EINVAL;
1412 if (produced != dtemplate[i].outlen) {
1413 pr_err("alg: comp: Decompression test %d failed for "
1414 "%s: returned len = %u (expected %d)\n", i + 1,
1415 algo, produced, dtemplate[i].outlen);
1416 return -EINVAL;
1419 if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) {
1420 pr_err("alg: pcomp: Decompression test %d failed for "
1421 "%s\n", i + 1, algo);
1422 hexdump(result, dtemplate[i].outlen);
1423 return -EINVAL;
1427 return 0;
1431 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
1432 unsigned int tcount)
1434 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1435 int err = 0, i, j, seedsize;
1436 u8 *seed;
1437 char result[32];
1439 seedsize = crypto_rng_seedsize(tfm);
1441 seed = kmalloc(seedsize, GFP_KERNEL);
1442 if (!seed) {
1443 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1444 "for %s\n", algo);
1445 return -ENOMEM;
1448 for (i = 0; i < tcount; i++) {
1449 memset(result, 0, 32);
1451 memcpy(seed, template[i].v, template[i].vlen);
1452 memcpy(seed + template[i].vlen, template[i].key,
1453 template[i].klen);
1454 memcpy(seed + template[i].vlen + template[i].klen,
1455 template[i].dt, template[i].dtlen);
1457 err = crypto_rng_reset(tfm, seed, seedsize);
1458 if (err) {
1459 printk(KERN_ERR "alg: cprng: Failed to reset rng "
1460 "for %s\n", algo);
1461 goto out;
1464 for (j = 0; j < template[i].loops; j++) {
1465 err = crypto_rng_get_bytes(tfm, result,
1466 template[i].rlen);
1467 if (err != template[i].rlen) {
1468 printk(KERN_ERR "alg: cprng: Failed to obtain "
1469 "the correct amount of random data for "
1470 "%s (requested %d, got %d)\n", algo,
1471 template[i].rlen, err);
1472 goto out;
1476 err = memcmp(result, template[i].result,
1477 template[i].rlen);
1478 if (err) {
1479 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1480 i, algo);
1481 hexdump(result, template[i].rlen);
1482 err = -EINVAL;
1483 goto out;
1487 out:
1488 kfree(seed);
1489 return err;
1492 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1493 u32 type, u32 mask)
1495 struct crypto_aead *tfm;
1496 int err = 0;
1498 tfm = crypto_alloc_aead(driver, type, mask);
1499 if (IS_ERR(tfm)) {
1500 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1501 "%ld\n", driver, PTR_ERR(tfm));
1502 return PTR_ERR(tfm);
1505 if (desc->suite.aead.enc.vecs) {
1506 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1507 desc->suite.aead.enc.count);
1508 if (err)
1509 goto out;
1512 if (!err && desc->suite.aead.dec.vecs)
1513 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1514 desc->suite.aead.dec.count);
1516 out:
1517 crypto_free_aead(tfm);
1518 return err;
1521 static int alg_test_cipher(const struct alg_test_desc *desc,
1522 const char *driver, u32 type, u32 mask)
1524 struct crypto_cipher *tfm;
1525 int err = 0;
1527 tfm = crypto_alloc_cipher(driver, type, mask);
1528 if (IS_ERR(tfm)) {
1529 printk(KERN_ERR "alg: cipher: Failed to load transform for "
1530 "%s: %ld\n", driver, PTR_ERR(tfm));
1531 return PTR_ERR(tfm);
1534 if (desc->suite.cipher.enc.vecs) {
1535 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1536 desc->suite.cipher.enc.count);
1537 if (err)
1538 goto out;
1541 if (desc->suite.cipher.dec.vecs)
1542 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1543 desc->suite.cipher.dec.count);
1545 out:
1546 crypto_free_cipher(tfm);
1547 return err;
1550 static int alg_test_skcipher(const struct alg_test_desc *desc,
1551 const char *driver, u32 type, u32 mask)
1553 struct crypto_ablkcipher *tfm;
1554 int err = 0;
1556 tfm = crypto_alloc_ablkcipher(driver, type, mask);
1557 if (IS_ERR(tfm)) {
1558 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1559 "%s: %ld\n", driver, PTR_ERR(tfm));
1560 return PTR_ERR(tfm);
1563 if (desc->suite.cipher.enc.vecs) {
1564 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1565 desc->suite.cipher.enc.count);
1566 if (err)
1567 goto out;
1570 if (desc->suite.cipher.dec.vecs)
1571 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1572 desc->suite.cipher.dec.count);
1574 out:
1575 crypto_free_ablkcipher(tfm);
1576 return err;
1579 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1580 u32 type, u32 mask)
1582 struct crypto_comp *tfm;
1583 int err;
1585 tfm = crypto_alloc_comp(driver, type, mask);
1586 if (IS_ERR(tfm)) {
1587 printk(KERN_ERR "alg: comp: Failed to load transform for %s: "
1588 "%ld\n", driver, PTR_ERR(tfm));
1589 return PTR_ERR(tfm);
1592 err = test_comp(tfm, desc->suite.comp.comp.vecs,
1593 desc->suite.comp.decomp.vecs,
1594 desc->suite.comp.comp.count,
1595 desc->suite.comp.decomp.count);
1597 crypto_free_comp(tfm);
1598 return err;
1601 static int alg_test_pcomp(const struct alg_test_desc *desc, const char *driver,
1602 u32 type, u32 mask)
1604 struct crypto_pcomp *tfm;
1605 int err;
1607 tfm = crypto_alloc_pcomp(driver, type, mask);
1608 if (IS_ERR(tfm)) {
1609 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n",
1610 driver, PTR_ERR(tfm));
1611 return PTR_ERR(tfm);
1614 err = test_pcomp(tfm, desc->suite.pcomp.comp.vecs,
1615 desc->suite.pcomp.decomp.vecs,
1616 desc->suite.pcomp.comp.count,
1617 desc->suite.pcomp.decomp.count);
1619 crypto_free_pcomp(tfm);
1620 return err;
1623 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1624 u32 type, u32 mask)
1626 struct crypto_ahash *tfm;
1627 int err;
1629 tfm = crypto_alloc_ahash(driver, type, mask);
1630 if (IS_ERR(tfm)) {
1631 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1632 "%ld\n", driver, PTR_ERR(tfm));
1633 return PTR_ERR(tfm);
1636 err = test_hash(tfm, desc->suite.hash.vecs,
1637 desc->suite.hash.count, true);
1638 if (!err)
1639 err = test_hash(tfm, desc->suite.hash.vecs,
1640 desc->suite.hash.count, false);
1642 crypto_free_ahash(tfm);
1643 return err;
1646 static int alg_test_crc32c(const struct alg_test_desc *desc,
1647 const char *driver, u32 type, u32 mask)
1649 struct crypto_shash *tfm;
1650 u32 val;
1651 int err;
1653 err = alg_test_hash(desc, driver, type, mask);
1654 if (err)
1655 goto out;
1657 tfm = crypto_alloc_shash(driver, type, mask);
1658 if (IS_ERR(tfm)) {
1659 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1660 "%ld\n", driver, PTR_ERR(tfm));
1661 err = PTR_ERR(tfm);
1662 goto out;
1665 do {
1666 struct {
1667 struct shash_desc shash;
1668 char ctx[crypto_shash_descsize(tfm)];
1669 } sdesc;
1671 sdesc.shash.tfm = tfm;
1672 sdesc.shash.flags = 0;
1674 *(u32 *)sdesc.ctx = le32_to_cpu(420553207);
1675 err = crypto_shash_final(&sdesc.shash, (u8 *)&val);
1676 if (err) {
1677 printk(KERN_ERR "alg: crc32c: Operation failed for "
1678 "%s: %d\n", driver, err);
1679 break;
1682 if (val != ~420553207) {
1683 printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1684 "%d\n", driver, val);
1685 err = -EINVAL;
1687 } while (0);
1689 crypto_free_shash(tfm);
1691 out:
1692 return err;
1695 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1696 u32 type, u32 mask)
1698 struct crypto_rng *rng;
1699 int err;
1701 rng = crypto_alloc_rng(driver, type, mask);
1702 if (IS_ERR(rng)) {
1703 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1704 "%ld\n", driver, PTR_ERR(rng));
1705 return PTR_ERR(rng);
1708 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1710 crypto_free_rng(rng);
1712 return err;
1715 static int alg_test_null(const struct alg_test_desc *desc,
1716 const char *driver, u32 type, u32 mask)
1718 return 0;
1721 /* Please keep this list sorted by algorithm name. */
1722 static const struct alg_test_desc alg_test_descs[] = {
1724 .alg = "__cbc-cast5-avx",
1725 .test = alg_test_null,
1726 }, {
1727 .alg = "__cbc-cast6-avx",
1728 .test = alg_test_null,
1729 }, {
1730 .alg = "__cbc-serpent-avx",
1731 .test = alg_test_null,
1732 }, {
1733 .alg = "__cbc-serpent-avx2",
1734 .test = alg_test_null,
1735 }, {
1736 .alg = "__cbc-serpent-sse2",
1737 .test = alg_test_null,
1738 }, {
1739 .alg = "__cbc-twofish-avx",
1740 .test = alg_test_null,
1741 }, {
1742 .alg = "__driver-cbc-aes-aesni",
1743 .test = alg_test_null,
1744 .fips_allowed = 1,
1745 }, {
1746 .alg = "__driver-cbc-camellia-aesni",
1747 .test = alg_test_null,
1748 }, {
1749 .alg = "__driver-cbc-camellia-aesni-avx2",
1750 .test = alg_test_null,
1751 }, {
1752 .alg = "__driver-cbc-cast5-avx",
1753 .test = alg_test_null,
1754 }, {
1755 .alg = "__driver-cbc-cast6-avx",
1756 .test = alg_test_null,
1757 }, {
1758 .alg = "__driver-cbc-serpent-avx",
1759 .test = alg_test_null,
1760 }, {
1761 .alg = "__driver-cbc-serpent-avx2",
1762 .test = alg_test_null,
1763 }, {
1764 .alg = "__driver-cbc-serpent-sse2",
1765 .test = alg_test_null,
1766 }, {
1767 .alg = "__driver-cbc-twofish-avx",
1768 .test = alg_test_null,
1769 }, {
1770 .alg = "__driver-ecb-aes-aesni",
1771 .test = alg_test_null,
1772 .fips_allowed = 1,
1773 }, {
1774 .alg = "__driver-ecb-camellia-aesni",
1775 .test = alg_test_null,
1776 }, {
1777 .alg = "__driver-ecb-camellia-aesni-avx2",
1778 .test = alg_test_null,
1779 }, {
1780 .alg = "__driver-ecb-cast5-avx",
1781 .test = alg_test_null,
1782 }, {
1783 .alg = "__driver-ecb-cast6-avx",
1784 .test = alg_test_null,
1785 }, {
1786 .alg = "__driver-ecb-serpent-avx",
1787 .test = alg_test_null,
1788 }, {
1789 .alg = "__driver-ecb-serpent-avx2",
1790 .test = alg_test_null,
1791 }, {
1792 .alg = "__driver-ecb-serpent-sse2",
1793 .test = alg_test_null,
1794 }, {
1795 .alg = "__driver-ecb-twofish-avx",
1796 .test = alg_test_null,
1797 }, {
1798 .alg = "__ghash-pclmulqdqni",
1799 .test = alg_test_null,
1800 .fips_allowed = 1,
1801 }, {
1802 .alg = "ansi_cprng",
1803 .test = alg_test_cprng,
1804 .fips_allowed = 1,
1805 .suite = {
1806 .cprng = {
1807 .vecs = ansi_cprng_aes_tv_template,
1808 .count = ANSI_CPRNG_AES_TEST_VECTORS
1811 }, {
1812 .alg = "authenc(hmac(sha1),cbc(aes))",
1813 .test = alg_test_aead,
1814 .fips_allowed = 1,
1815 .suite = {
1816 .aead = {
1817 .enc = {
1818 .vecs = hmac_sha1_aes_cbc_enc_tv_template,
1819 .count = HMAC_SHA1_AES_CBC_ENC_TEST_VECTORS
1823 }, {
1824 .alg = "authenc(hmac(sha256),cbc(aes))",
1825 .test = alg_test_aead,
1826 .fips_allowed = 1,
1827 .suite = {
1828 .aead = {
1829 .enc = {
1830 .vecs = hmac_sha256_aes_cbc_enc_tv_template,
1831 .count = HMAC_SHA256_AES_CBC_ENC_TEST_VECTORS
1835 }, {
1836 .alg = "authenc(hmac(sha512),cbc(aes))",
1837 .test = alg_test_aead,
1838 .fips_allowed = 1,
1839 .suite = {
1840 .aead = {
1841 .enc = {
1842 .vecs = hmac_sha512_aes_cbc_enc_tv_template,
1843 .count = HMAC_SHA512_AES_CBC_ENC_TEST_VECTORS
1847 }, {
1848 .alg = "cbc(aes)",
1849 .test = alg_test_skcipher,
1850 .fips_allowed = 1,
1851 .suite = {
1852 .cipher = {
1853 .enc = {
1854 .vecs = aes_cbc_enc_tv_template,
1855 .count = AES_CBC_ENC_TEST_VECTORS
1857 .dec = {
1858 .vecs = aes_cbc_dec_tv_template,
1859 .count = AES_CBC_DEC_TEST_VECTORS
1863 }, {
1864 .alg = "cbc(anubis)",
1865 .test = alg_test_skcipher,
1866 .suite = {
1867 .cipher = {
1868 .enc = {
1869 .vecs = anubis_cbc_enc_tv_template,
1870 .count = ANUBIS_CBC_ENC_TEST_VECTORS
1872 .dec = {
1873 .vecs = anubis_cbc_dec_tv_template,
1874 .count = ANUBIS_CBC_DEC_TEST_VECTORS
1878 }, {
1879 .alg = "cbc(blowfish)",
1880 .test = alg_test_skcipher,
1881 .suite = {
1882 .cipher = {
1883 .enc = {
1884 .vecs = bf_cbc_enc_tv_template,
1885 .count = BF_CBC_ENC_TEST_VECTORS
1887 .dec = {
1888 .vecs = bf_cbc_dec_tv_template,
1889 .count = BF_CBC_DEC_TEST_VECTORS
1893 }, {
1894 .alg = "cbc(camellia)",
1895 .test = alg_test_skcipher,
1896 .suite = {
1897 .cipher = {
1898 .enc = {
1899 .vecs = camellia_cbc_enc_tv_template,
1900 .count = CAMELLIA_CBC_ENC_TEST_VECTORS
1902 .dec = {
1903 .vecs = camellia_cbc_dec_tv_template,
1904 .count = CAMELLIA_CBC_DEC_TEST_VECTORS
1908 }, {
1909 .alg = "cbc(cast5)",
1910 .test = alg_test_skcipher,
1911 .suite = {
1912 .cipher = {
1913 .enc = {
1914 .vecs = cast5_cbc_enc_tv_template,
1915 .count = CAST5_CBC_ENC_TEST_VECTORS
1917 .dec = {
1918 .vecs = cast5_cbc_dec_tv_template,
1919 .count = CAST5_CBC_DEC_TEST_VECTORS
1923 }, {
1924 .alg = "cbc(cast6)",
1925 .test = alg_test_skcipher,
1926 .suite = {
1927 .cipher = {
1928 .enc = {
1929 .vecs = cast6_cbc_enc_tv_template,
1930 .count = CAST6_CBC_ENC_TEST_VECTORS
1932 .dec = {
1933 .vecs = cast6_cbc_dec_tv_template,
1934 .count = CAST6_CBC_DEC_TEST_VECTORS
1938 }, {
1939 .alg = "cbc(des)",
1940 .test = alg_test_skcipher,
1941 .suite = {
1942 .cipher = {
1943 .enc = {
1944 .vecs = des_cbc_enc_tv_template,
1945 .count = DES_CBC_ENC_TEST_VECTORS
1947 .dec = {
1948 .vecs = des_cbc_dec_tv_template,
1949 .count = DES_CBC_DEC_TEST_VECTORS
1953 }, {
1954 .alg = "cbc(des3_ede)",
1955 .test = alg_test_skcipher,
1956 .fips_allowed = 1,
1957 .suite = {
1958 .cipher = {
1959 .enc = {
1960 .vecs = des3_ede_cbc_enc_tv_template,
1961 .count = DES3_EDE_CBC_ENC_TEST_VECTORS
1963 .dec = {
1964 .vecs = des3_ede_cbc_dec_tv_template,
1965 .count = DES3_EDE_CBC_DEC_TEST_VECTORS
1969 }, {
1970 .alg = "cbc(serpent)",
1971 .test = alg_test_skcipher,
1972 .suite = {
1973 .cipher = {
1974 .enc = {
1975 .vecs = serpent_cbc_enc_tv_template,
1976 .count = SERPENT_CBC_ENC_TEST_VECTORS
1978 .dec = {
1979 .vecs = serpent_cbc_dec_tv_template,
1980 .count = SERPENT_CBC_DEC_TEST_VECTORS
1984 }, {
1985 .alg = "cbc(twofish)",
1986 .test = alg_test_skcipher,
1987 .suite = {
1988 .cipher = {
1989 .enc = {
1990 .vecs = tf_cbc_enc_tv_template,
1991 .count = TF_CBC_ENC_TEST_VECTORS
1993 .dec = {
1994 .vecs = tf_cbc_dec_tv_template,
1995 .count = TF_CBC_DEC_TEST_VECTORS
1999 }, {
2000 .alg = "ccm(aes)",
2001 .test = alg_test_aead,
2002 .fips_allowed = 1,
2003 .suite = {
2004 .aead = {
2005 .enc = {
2006 .vecs = aes_ccm_enc_tv_template,
2007 .count = AES_CCM_ENC_TEST_VECTORS
2009 .dec = {
2010 .vecs = aes_ccm_dec_tv_template,
2011 .count = AES_CCM_DEC_TEST_VECTORS
2015 }, {
2016 .alg = "cmac(aes)",
2017 .test = alg_test_hash,
2018 .suite = {
2019 .hash = {
2020 .vecs = aes_cmac128_tv_template,
2021 .count = CMAC_AES_TEST_VECTORS
2024 }, {
2025 .alg = "cmac(des3_ede)",
2026 .test = alg_test_hash,
2027 .suite = {
2028 .hash = {
2029 .vecs = des3_ede_cmac64_tv_template,
2030 .count = CMAC_DES3_EDE_TEST_VECTORS
2033 }, {
2034 .alg = "compress_null",
2035 .test = alg_test_null,
2036 }, {
2037 .alg = "crc32c",
2038 .test = alg_test_crc32c,
2039 .fips_allowed = 1,
2040 .suite = {
2041 .hash = {
2042 .vecs = crc32c_tv_template,
2043 .count = CRC32C_TEST_VECTORS
2046 }, {
2047 .alg = "crct10dif",
2048 .test = alg_test_hash,
2049 .fips_allowed = 1,
2050 .suite = {
2051 .hash = {
2052 .vecs = crct10dif_tv_template,
2053 .count = CRCT10DIF_TEST_VECTORS
2056 }, {
2057 .alg = "cryptd(__driver-cbc-aes-aesni)",
2058 .test = alg_test_null,
2059 .fips_allowed = 1,
2060 }, {
2061 .alg = "cryptd(__driver-cbc-camellia-aesni)",
2062 .test = alg_test_null,
2063 }, {
2064 .alg = "cryptd(__driver-cbc-camellia-aesni-avx2)",
2065 .test = alg_test_null,
2066 }, {
2067 .alg = "cryptd(__driver-cbc-serpent-avx2)",
2068 .test = alg_test_null,
2069 }, {
2070 .alg = "cryptd(__driver-ecb-aes-aesni)",
2071 .test = alg_test_null,
2072 .fips_allowed = 1,
2073 }, {
2074 .alg = "cryptd(__driver-ecb-camellia-aesni)",
2075 .test = alg_test_null,
2076 }, {
2077 .alg = "cryptd(__driver-ecb-camellia-aesni-avx2)",
2078 .test = alg_test_null,
2079 }, {
2080 .alg = "cryptd(__driver-ecb-cast5-avx)",
2081 .test = alg_test_null,
2082 }, {
2083 .alg = "cryptd(__driver-ecb-cast6-avx)",
2084 .test = alg_test_null,
2085 }, {
2086 .alg = "cryptd(__driver-ecb-serpent-avx)",
2087 .test = alg_test_null,
2088 }, {
2089 .alg = "cryptd(__driver-ecb-serpent-avx2)",
2090 .test = alg_test_null,
2091 }, {
2092 .alg = "cryptd(__driver-ecb-serpent-sse2)",
2093 .test = alg_test_null,
2094 }, {
2095 .alg = "cryptd(__driver-ecb-twofish-avx)",
2096 .test = alg_test_null,
2097 }, {
2098 .alg = "cryptd(__driver-gcm-aes-aesni)",
2099 .test = alg_test_null,
2100 .fips_allowed = 1,
2101 }, {
2102 .alg = "cryptd(__ghash-pclmulqdqni)",
2103 .test = alg_test_null,
2104 .fips_allowed = 1,
2105 }, {
2106 .alg = "ctr(aes)",
2107 .test = alg_test_skcipher,
2108 .fips_allowed = 1,
2109 .suite = {
2110 .cipher = {
2111 .enc = {
2112 .vecs = aes_ctr_enc_tv_template,
2113 .count = AES_CTR_ENC_TEST_VECTORS
2115 .dec = {
2116 .vecs = aes_ctr_dec_tv_template,
2117 .count = AES_CTR_DEC_TEST_VECTORS
2121 }, {
2122 .alg = "ctr(blowfish)",
2123 .test = alg_test_skcipher,
2124 .suite = {
2125 .cipher = {
2126 .enc = {
2127 .vecs = bf_ctr_enc_tv_template,
2128 .count = BF_CTR_ENC_TEST_VECTORS
2130 .dec = {
2131 .vecs = bf_ctr_dec_tv_template,
2132 .count = BF_CTR_DEC_TEST_VECTORS
2136 }, {
2137 .alg = "ctr(camellia)",
2138 .test = alg_test_skcipher,
2139 .suite = {
2140 .cipher = {
2141 .enc = {
2142 .vecs = camellia_ctr_enc_tv_template,
2143 .count = CAMELLIA_CTR_ENC_TEST_VECTORS
2145 .dec = {
2146 .vecs = camellia_ctr_dec_tv_template,
2147 .count = CAMELLIA_CTR_DEC_TEST_VECTORS
2151 }, {
2152 .alg = "ctr(cast5)",
2153 .test = alg_test_skcipher,
2154 .suite = {
2155 .cipher = {
2156 .enc = {
2157 .vecs = cast5_ctr_enc_tv_template,
2158 .count = CAST5_CTR_ENC_TEST_VECTORS
2160 .dec = {
2161 .vecs = cast5_ctr_dec_tv_template,
2162 .count = CAST5_CTR_DEC_TEST_VECTORS
2166 }, {
2167 .alg = "ctr(cast6)",
2168 .test = alg_test_skcipher,
2169 .suite = {
2170 .cipher = {
2171 .enc = {
2172 .vecs = cast6_ctr_enc_tv_template,
2173 .count = CAST6_CTR_ENC_TEST_VECTORS
2175 .dec = {
2176 .vecs = cast6_ctr_dec_tv_template,
2177 .count = CAST6_CTR_DEC_TEST_VECTORS
2181 }, {
2182 .alg = "ctr(des)",
2183 .test = alg_test_skcipher,
2184 .suite = {
2185 .cipher = {
2186 .enc = {
2187 .vecs = des_ctr_enc_tv_template,
2188 .count = DES_CTR_ENC_TEST_VECTORS
2190 .dec = {
2191 .vecs = des_ctr_dec_tv_template,
2192 .count = DES_CTR_DEC_TEST_VECTORS
2196 }, {
2197 .alg = "ctr(des3_ede)",
2198 .test = alg_test_skcipher,
2199 .suite = {
2200 .cipher = {
2201 .enc = {
2202 .vecs = des3_ede_ctr_enc_tv_template,
2203 .count = DES3_EDE_CTR_ENC_TEST_VECTORS
2205 .dec = {
2206 .vecs = des3_ede_ctr_dec_tv_template,
2207 .count = DES3_EDE_CTR_DEC_TEST_VECTORS
2211 }, {
2212 .alg = "ctr(serpent)",
2213 .test = alg_test_skcipher,
2214 .suite = {
2215 .cipher = {
2216 .enc = {
2217 .vecs = serpent_ctr_enc_tv_template,
2218 .count = SERPENT_CTR_ENC_TEST_VECTORS
2220 .dec = {
2221 .vecs = serpent_ctr_dec_tv_template,
2222 .count = SERPENT_CTR_DEC_TEST_VECTORS
2226 }, {
2227 .alg = "ctr(twofish)",
2228 .test = alg_test_skcipher,
2229 .suite = {
2230 .cipher = {
2231 .enc = {
2232 .vecs = tf_ctr_enc_tv_template,
2233 .count = TF_CTR_ENC_TEST_VECTORS
2235 .dec = {
2236 .vecs = tf_ctr_dec_tv_template,
2237 .count = TF_CTR_DEC_TEST_VECTORS
2241 }, {
2242 .alg = "cts(cbc(aes))",
2243 .test = alg_test_skcipher,
2244 .suite = {
2245 .cipher = {
2246 .enc = {
2247 .vecs = cts_mode_enc_tv_template,
2248 .count = CTS_MODE_ENC_TEST_VECTORS
2250 .dec = {
2251 .vecs = cts_mode_dec_tv_template,
2252 .count = CTS_MODE_DEC_TEST_VECTORS
2256 }, {
2257 .alg = "deflate",
2258 .test = alg_test_comp,
2259 .fips_allowed = 1,
2260 .suite = {
2261 .comp = {
2262 .comp = {
2263 .vecs = deflate_comp_tv_template,
2264 .count = DEFLATE_COMP_TEST_VECTORS
2266 .decomp = {
2267 .vecs = deflate_decomp_tv_template,
2268 .count = DEFLATE_DECOMP_TEST_VECTORS
2272 }, {
2273 .alg = "digest_null",
2274 .test = alg_test_null,
2275 }, {
2276 .alg = "ecb(__aes-aesni)",
2277 .test = alg_test_null,
2278 .fips_allowed = 1,
2279 }, {
2280 .alg = "ecb(aes)",
2281 .test = alg_test_skcipher,
2282 .fips_allowed = 1,
2283 .suite = {
2284 .cipher = {
2285 .enc = {
2286 .vecs = aes_enc_tv_template,
2287 .count = AES_ENC_TEST_VECTORS
2289 .dec = {
2290 .vecs = aes_dec_tv_template,
2291 .count = AES_DEC_TEST_VECTORS
2295 }, {
2296 .alg = "ecb(anubis)",
2297 .test = alg_test_skcipher,
2298 .suite = {
2299 .cipher = {
2300 .enc = {
2301 .vecs = anubis_enc_tv_template,
2302 .count = ANUBIS_ENC_TEST_VECTORS
2304 .dec = {
2305 .vecs = anubis_dec_tv_template,
2306 .count = ANUBIS_DEC_TEST_VECTORS
2310 }, {
2311 .alg = "ecb(arc4)",
2312 .test = alg_test_skcipher,
2313 .suite = {
2314 .cipher = {
2315 .enc = {
2316 .vecs = arc4_enc_tv_template,
2317 .count = ARC4_ENC_TEST_VECTORS
2319 .dec = {
2320 .vecs = arc4_dec_tv_template,
2321 .count = ARC4_DEC_TEST_VECTORS
2325 }, {
2326 .alg = "ecb(blowfish)",
2327 .test = alg_test_skcipher,
2328 .suite = {
2329 .cipher = {
2330 .enc = {
2331 .vecs = bf_enc_tv_template,
2332 .count = BF_ENC_TEST_VECTORS
2334 .dec = {
2335 .vecs = bf_dec_tv_template,
2336 .count = BF_DEC_TEST_VECTORS
2340 }, {
2341 .alg = "ecb(camellia)",
2342 .test = alg_test_skcipher,
2343 .suite = {
2344 .cipher = {
2345 .enc = {
2346 .vecs = camellia_enc_tv_template,
2347 .count = CAMELLIA_ENC_TEST_VECTORS
2349 .dec = {
2350 .vecs = camellia_dec_tv_template,
2351 .count = CAMELLIA_DEC_TEST_VECTORS
2355 }, {
2356 .alg = "ecb(cast5)",
2357 .test = alg_test_skcipher,
2358 .suite = {
2359 .cipher = {
2360 .enc = {
2361 .vecs = cast5_enc_tv_template,
2362 .count = CAST5_ENC_TEST_VECTORS
2364 .dec = {
2365 .vecs = cast5_dec_tv_template,
2366 .count = CAST5_DEC_TEST_VECTORS
2370 }, {
2371 .alg = "ecb(cast6)",
2372 .test = alg_test_skcipher,
2373 .suite = {
2374 .cipher = {
2375 .enc = {
2376 .vecs = cast6_enc_tv_template,
2377 .count = CAST6_ENC_TEST_VECTORS
2379 .dec = {
2380 .vecs = cast6_dec_tv_template,
2381 .count = CAST6_DEC_TEST_VECTORS
2385 }, {
2386 .alg = "ecb(cipher_null)",
2387 .test = alg_test_null,
2388 }, {
2389 .alg = "ecb(des)",
2390 .test = alg_test_skcipher,
2391 .fips_allowed = 1,
2392 .suite = {
2393 .cipher = {
2394 .enc = {
2395 .vecs = des_enc_tv_template,
2396 .count = DES_ENC_TEST_VECTORS
2398 .dec = {
2399 .vecs = des_dec_tv_template,
2400 .count = DES_DEC_TEST_VECTORS
2404 }, {
2405 .alg = "ecb(des3_ede)",
2406 .test = alg_test_skcipher,
2407 .fips_allowed = 1,
2408 .suite = {
2409 .cipher = {
2410 .enc = {
2411 .vecs = des3_ede_enc_tv_template,
2412 .count = DES3_EDE_ENC_TEST_VECTORS
2414 .dec = {
2415 .vecs = des3_ede_dec_tv_template,
2416 .count = DES3_EDE_DEC_TEST_VECTORS
2420 }, {
2421 .alg = "ecb(fcrypt)",
2422 .test = alg_test_skcipher,
2423 .suite = {
2424 .cipher = {
2425 .enc = {
2426 .vecs = fcrypt_pcbc_enc_tv_template,
2427 .count = 1
2429 .dec = {
2430 .vecs = fcrypt_pcbc_dec_tv_template,
2431 .count = 1
2435 }, {
2436 .alg = "ecb(khazad)",
2437 .test = alg_test_skcipher,
2438 .suite = {
2439 .cipher = {
2440 .enc = {
2441 .vecs = khazad_enc_tv_template,
2442 .count = KHAZAD_ENC_TEST_VECTORS
2444 .dec = {
2445 .vecs = khazad_dec_tv_template,
2446 .count = KHAZAD_DEC_TEST_VECTORS
2450 }, {
2451 .alg = "ecb(seed)",
2452 .test = alg_test_skcipher,
2453 .suite = {
2454 .cipher = {
2455 .enc = {
2456 .vecs = seed_enc_tv_template,
2457 .count = SEED_ENC_TEST_VECTORS
2459 .dec = {
2460 .vecs = seed_dec_tv_template,
2461 .count = SEED_DEC_TEST_VECTORS
2465 }, {
2466 .alg = "ecb(serpent)",
2467 .test = alg_test_skcipher,
2468 .suite = {
2469 .cipher = {
2470 .enc = {
2471 .vecs = serpent_enc_tv_template,
2472 .count = SERPENT_ENC_TEST_VECTORS
2474 .dec = {
2475 .vecs = serpent_dec_tv_template,
2476 .count = SERPENT_DEC_TEST_VECTORS
2480 }, {
2481 .alg = "ecb(tea)",
2482 .test = alg_test_skcipher,
2483 .suite = {
2484 .cipher = {
2485 .enc = {
2486 .vecs = tea_enc_tv_template,
2487 .count = TEA_ENC_TEST_VECTORS
2489 .dec = {
2490 .vecs = tea_dec_tv_template,
2491 .count = TEA_DEC_TEST_VECTORS
2495 }, {
2496 .alg = "ecb(tnepres)",
2497 .test = alg_test_skcipher,
2498 .suite = {
2499 .cipher = {
2500 .enc = {
2501 .vecs = tnepres_enc_tv_template,
2502 .count = TNEPRES_ENC_TEST_VECTORS
2504 .dec = {
2505 .vecs = tnepres_dec_tv_template,
2506 .count = TNEPRES_DEC_TEST_VECTORS
2510 }, {
2511 .alg = "ecb(twofish)",
2512 .test = alg_test_skcipher,
2513 .suite = {
2514 .cipher = {
2515 .enc = {
2516 .vecs = tf_enc_tv_template,
2517 .count = TF_ENC_TEST_VECTORS
2519 .dec = {
2520 .vecs = tf_dec_tv_template,
2521 .count = TF_DEC_TEST_VECTORS
2525 }, {
2526 .alg = "ecb(xeta)",
2527 .test = alg_test_skcipher,
2528 .suite = {
2529 .cipher = {
2530 .enc = {
2531 .vecs = xeta_enc_tv_template,
2532 .count = XETA_ENC_TEST_VECTORS
2534 .dec = {
2535 .vecs = xeta_dec_tv_template,
2536 .count = XETA_DEC_TEST_VECTORS
2540 }, {
2541 .alg = "ecb(xtea)",
2542 .test = alg_test_skcipher,
2543 .suite = {
2544 .cipher = {
2545 .enc = {
2546 .vecs = xtea_enc_tv_template,
2547 .count = XTEA_ENC_TEST_VECTORS
2549 .dec = {
2550 .vecs = xtea_dec_tv_template,
2551 .count = XTEA_DEC_TEST_VECTORS
2555 }, {
2556 .alg = "gcm(aes)",
2557 .test = alg_test_aead,
2558 .fips_allowed = 1,
2559 .suite = {
2560 .aead = {
2561 .enc = {
2562 .vecs = aes_gcm_enc_tv_template,
2563 .count = AES_GCM_ENC_TEST_VECTORS
2565 .dec = {
2566 .vecs = aes_gcm_dec_tv_template,
2567 .count = AES_GCM_DEC_TEST_VECTORS
2571 }, {
2572 .alg = "ghash",
2573 .test = alg_test_hash,
2574 .fips_allowed = 1,
2575 .suite = {
2576 .hash = {
2577 .vecs = ghash_tv_template,
2578 .count = GHASH_TEST_VECTORS
2581 }, {
2582 .alg = "hmac(crc32)",
2583 .test = alg_test_hash,
2584 .suite = {
2585 .hash = {
2586 .vecs = bfin_crc_tv_template,
2587 .count = BFIN_CRC_TEST_VECTORS
2590 }, {
2591 .alg = "hmac(md5)",
2592 .test = alg_test_hash,
2593 .suite = {
2594 .hash = {
2595 .vecs = hmac_md5_tv_template,
2596 .count = HMAC_MD5_TEST_VECTORS
2599 }, {
2600 .alg = "hmac(rmd128)",
2601 .test = alg_test_hash,
2602 .suite = {
2603 .hash = {
2604 .vecs = hmac_rmd128_tv_template,
2605 .count = HMAC_RMD128_TEST_VECTORS
2608 }, {
2609 .alg = "hmac(rmd160)",
2610 .test = alg_test_hash,
2611 .suite = {
2612 .hash = {
2613 .vecs = hmac_rmd160_tv_template,
2614 .count = HMAC_RMD160_TEST_VECTORS
2617 }, {
2618 .alg = "hmac(sha1)",
2619 .test = alg_test_hash,
2620 .fips_allowed = 1,
2621 .suite = {
2622 .hash = {
2623 .vecs = hmac_sha1_tv_template,
2624 .count = HMAC_SHA1_TEST_VECTORS
2627 }, {
2628 .alg = "hmac(sha224)",
2629 .test = alg_test_hash,
2630 .fips_allowed = 1,
2631 .suite = {
2632 .hash = {
2633 .vecs = hmac_sha224_tv_template,
2634 .count = HMAC_SHA224_TEST_VECTORS
2637 }, {
2638 .alg = "hmac(sha256)",
2639 .test = alg_test_hash,
2640 .fips_allowed = 1,
2641 .suite = {
2642 .hash = {
2643 .vecs = hmac_sha256_tv_template,
2644 .count = HMAC_SHA256_TEST_VECTORS
2647 }, {
2648 .alg = "hmac(sha384)",
2649 .test = alg_test_hash,
2650 .fips_allowed = 1,
2651 .suite = {
2652 .hash = {
2653 .vecs = hmac_sha384_tv_template,
2654 .count = HMAC_SHA384_TEST_VECTORS
2657 }, {
2658 .alg = "hmac(sha512)",
2659 .test = alg_test_hash,
2660 .fips_allowed = 1,
2661 .suite = {
2662 .hash = {
2663 .vecs = hmac_sha512_tv_template,
2664 .count = HMAC_SHA512_TEST_VECTORS
2667 }, {
2668 .alg = "lrw(aes)",
2669 .test = alg_test_skcipher,
2670 .suite = {
2671 .cipher = {
2672 .enc = {
2673 .vecs = aes_lrw_enc_tv_template,
2674 .count = AES_LRW_ENC_TEST_VECTORS
2676 .dec = {
2677 .vecs = aes_lrw_dec_tv_template,
2678 .count = AES_LRW_DEC_TEST_VECTORS
2682 }, {
2683 .alg = "lrw(camellia)",
2684 .test = alg_test_skcipher,
2685 .suite = {
2686 .cipher = {
2687 .enc = {
2688 .vecs = camellia_lrw_enc_tv_template,
2689 .count = CAMELLIA_LRW_ENC_TEST_VECTORS
2691 .dec = {
2692 .vecs = camellia_lrw_dec_tv_template,
2693 .count = CAMELLIA_LRW_DEC_TEST_VECTORS
2697 }, {
2698 .alg = "lrw(cast6)",
2699 .test = alg_test_skcipher,
2700 .suite = {
2701 .cipher = {
2702 .enc = {
2703 .vecs = cast6_lrw_enc_tv_template,
2704 .count = CAST6_LRW_ENC_TEST_VECTORS
2706 .dec = {
2707 .vecs = cast6_lrw_dec_tv_template,
2708 .count = CAST6_LRW_DEC_TEST_VECTORS
2712 }, {
2713 .alg = "lrw(serpent)",
2714 .test = alg_test_skcipher,
2715 .suite = {
2716 .cipher = {
2717 .enc = {
2718 .vecs = serpent_lrw_enc_tv_template,
2719 .count = SERPENT_LRW_ENC_TEST_VECTORS
2721 .dec = {
2722 .vecs = serpent_lrw_dec_tv_template,
2723 .count = SERPENT_LRW_DEC_TEST_VECTORS
2727 }, {
2728 .alg = "lrw(twofish)",
2729 .test = alg_test_skcipher,
2730 .suite = {
2731 .cipher = {
2732 .enc = {
2733 .vecs = tf_lrw_enc_tv_template,
2734 .count = TF_LRW_ENC_TEST_VECTORS
2736 .dec = {
2737 .vecs = tf_lrw_dec_tv_template,
2738 .count = TF_LRW_DEC_TEST_VECTORS
2742 }, {
2743 .alg = "lzo",
2744 .test = alg_test_comp,
2745 .fips_allowed = 1,
2746 .suite = {
2747 .comp = {
2748 .comp = {
2749 .vecs = lzo_comp_tv_template,
2750 .count = LZO_COMP_TEST_VECTORS
2752 .decomp = {
2753 .vecs = lzo_decomp_tv_template,
2754 .count = LZO_DECOMP_TEST_VECTORS
2758 }, {
2759 .alg = "md4",
2760 .test = alg_test_hash,
2761 .suite = {
2762 .hash = {
2763 .vecs = md4_tv_template,
2764 .count = MD4_TEST_VECTORS
2767 }, {
2768 .alg = "md5",
2769 .test = alg_test_hash,
2770 .suite = {
2771 .hash = {
2772 .vecs = md5_tv_template,
2773 .count = MD5_TEST_VECTORS
2776 }, {
2777 .alg = "michael_mic",
2778 .test = alg_test_hash,
2779 .suite = {
2780 .hash = {
2781 .vecs = michael_mic_tv_template,
2782 .count = MICHAEL_MIC_TEST_VECTORS
2785 }, {
2786 .alg = "ofb(aes)",
2787 .test = alg_test_skcipher,
2788 .fips_allowed = 1,
2789 .suite = {
2790 .cipher = {
2791 .enc = {
2792 .vecs = aes_ofb_enc_tv_template,
2793 .count = AES_OFB_ENC_TEST_VECTORS
2795 .dec = {
2796 .vecs = aes_ofb_dec_tv_template,
2797 .count = AES_OFB_DEC_TEST_VECTORS
2801 }, {
2802 .alg = "pcbc(fcrypt)",
2803 .test = alg_test_skcipher,
2804 .suite = {
2805 .cipher = {
2806 .enc = {
2807 .vecs = fcrypt_pcbc_enc_tv_template,
2808 .count = FCRYPT_ENC_TEST_VECTORS
2810 .dec = {
2811 .vecs = fcrypt_pcbc_dec_tv_template,
2812 .count = FCRYPT_DEC_TEST_VECTORS
2816 }, {
2817 .alg = "rfc3686(ctr(aes))",
2818 .test = alg_test_skcipher,
2819 .fips_allowed = 1,
2820 .suite = {
2821 .cipher = {
2822 .enc = {
2823 .vecs = aes_ctr_rfc3686_enc_tv_template,
2824 .count = AES_CTR_3686_ENC_TEST_VECTORS
2826 .dec = {
2827 .vecs = aes_ctr_rfc3686_dec_tv_template,
2828 .count = AES_CTR_3686_DEC_TEST_VECTORS
2832 }, {
2833 .alg = "rfc4106(gcm(aes))",
2834 .test = alg_test_aead,
2835 .suite = {
2836 .aead = {
2837 .enc = {
2838 .vecs = aes_gcm_rfc4106_enc_tv_template,
2839 .count = AES_GCM_4106_ENC_TEST_VECTORS
2841 .dec = {
2842 .vecs = aes_gcm_rfc4106_dec_tv_template,
2843 .count = AES_GCM_4106_DEC_TEST_VECTORS
2847 }, {
2848 .alg = "rfc4309(ccm(aes))",
2849 .test = alg_test_aead,
2850 .fips_allowed = 1,
2851 .suite = {
2852 .aead = {
2853 .enc = {
2854 .vecs = aes_ccm_rfc4309_enc_tv_template,
2855 .count = AES_CCM_4309_ENC_TEST_VECTORS
2857 .dec = {
2858 .vecs = aes_ccm_rfc4309_dec_tv_template,
2859 .count = AES_CCM_4309_DEC_TEST_VECTORS
2863 }, {
2864 .alg = "rfc4543(gcm(aes))",
2865 .test = alg_test_aead,
2866 .suite = {
2867 .aead = {
2868 .enc = {
2869 .vecs = aes_gcm_rfc4543_enc_tv_template,
2870 .count = AES_GCM_4543_ENC_TEST_VECTORS
2872 .dec = {
2873 .vecs = aes_gcm_rfc4543_dec_tv_template,
2874 .count = AES_GCM_4543_DEC_TEST_VECTORS
2878 }, {
2879 .alg = "rmd128",
2880 .test = alg_test_hash,
2881 .suite = {
2882 .hash = {
2883 .vecs = rmd128_tv_template,
2884 .count = RMD128_TEST_VECTORS
2887 }, {
2888 .alg = "rmd160",
2889 .test = alg_test_hash,
2890 .suite = {
2891 .hash = {
2892 .vecs = rmd160_tv_template,
2893 .count = RMD160_TEST_VECTORS
2896 }, {
2897 .alg = "rmd256",
2898 .test = alg_test_hash,
2899 .suite = {
2900 .hash = {
2901 .vecs = rmd256_tv_template,
2902 .count = RMD256_TEST_VECTORS
2905 }, {
2906 .alg = "rmd320",
2907 .test = alg_test_hash,
2908 .suite = {
2909 .hash = {
2910 .vecs = rmd320_tv_template,
2911 .count = RMD320_TEST_VECTORS
2914 }, {
2915 .alg = "salsa20",
2916 .test = alg_test_skcipher,
2917 .suite = {
2918 .cipher = {
2919 .enc = {
2920 .vecs = salsa20_stream_enc_tv_template,
2921 .count = SALSA20_STREAM_ENC_TEST_VECTORS
2925 }, {
2926 .alg = "sha1",
2927 .test = alg_test_hash,
2928 .fips_allowed = 1,
2929 .suite = {
2930 .hash = {
2931 .vecs = sha1_tv_template,
2932 .count = SHA1_TEST_VECTORS
2935 }, {
2936 .alg = "sha224",
2937 .test = alg_test_hash,
2938 .fips_allowed = 1,
2939 .suite = {
2940 .hash = {
2941 .vecs = sha224_tv_template,
2942 .count = SHA224_TEST_VECTORS
2945 }, {
2946 .alg = "sha256",
2947 .test = alg_test_hash,
2948 .fips_allowed = 1,
2949 .suite = {
2950 .hash = {
2951 .vecs = sha256_tv_template,
2952 .count = SHA256_TEST_VECTORS
2955 }, {
2956 .alg = "sha384",
2957 .test = alg_test_hash,
2958 .fips_allowed = 1,
2959 .suite = {
2960 .hash = {
2961 .vecs = sha384_tv_template,
2962 .count = SHA384_TEST_VECTORS
2965 }, {
2966 .alg = "sha512",
2967 .test = alg_test_hash,
2968 .fips_allowed = 1,
2969 .suite = {
2970 .hash = {
2971 .vecs = sha512_tv_template,
2972 .count = SHA512_TEST_VECTORS
2975 }, {
2976 .alg = "tgr128",
2977 .test = alg_test_hash,
2978 .suite = {
2979 .hash = {
2980 .vecs = tgr128_tv_template,
2981 .count = TGR128_TEST_VECTORS
2984 }, {
2985 .alg = "tgr160",
2986 .test = alg_test_hash,
2987 .suite = {
2988 .hash = {
2989 .vecs = tgr160_tv_template,
2990 .count = TGR160_TEST_VECTORS
2993 }, {
2994 .alg = "tgr192",
2995 .test = alg_test_hash,
2996 .suite = {
2997 .hash = {
2998 .vecs = tgr192_tv_template,
2999 .count = TGR192_TEST_VECTORS
3002 }, {
3003 .alg = "vmac(aes)",
3004 .test = alg_test_hash,
3005 .suite = {
3006 .hash = {
3007 .vecs = aes_vmac128_tv_template,
3008 .count = VMAC_AES_TEST_VECTORS
3011 }, {
3012 .alg = "wp256",
3013 .test = alg_test_hash,
3014 .suite = {
3015 .hash = {
3016 .vecs = wp256_tv_template,
3017 .count = WP256_TEST_VECTORS
3020 }, {
3021 .alg = "wp384",
3022 .test = alg_test_hash,
3023 .suite = {
3024 .hash = {
3025 .vecs = wp384_tv_template,
3026 .count = WP384_TEST_VECTORS
3029 }, {
3030 .alg = "wp512",
3031 .test = alg_test_hash,
3032 .suite = {
3033 .hash = {
3034 .vecs = wp512_tv_template,
3035 .count = WP512_TEST_VECTORS
3038 }, {
3039 .alg = "xcbc(aes)",
3040 .test = alg_test_hash,
3041 .suite = {
3042 .hash = {
3043 .vecs = aes_xcbc128_tv_template,
3044 .count = XCBC_AES_TEST_VECTORS
3047 }, {
3048 .alg = "xts(aes)",
3049 .test = alg_test_skcipher,
3050 .fips_allowed = 1,
3051 .suite = {
3052 .cipher = {
3053 .enc = {
3054 .vecs = aes_xts_enc_tv_template,
3055 .count = AES_XTS_ENC_TEST_VECTORS
3057 .dec = {
3058 .vecs = aes_xts_dec_tv_template,
3059 .count = AES_XTS_DEC_TEST_VECTORS
3063 }, {
3064 .alg = "xts(camellia)",
3065 .test = alg_test_skcipher,
3066 .suite = {
3067 .cipher = {
3068 .enc = {
3069 .vecs = camellia_xts_enc_tv_template,
3070 .count = CAMELLIA_XTS_ENC_TEST_VECTORS
3072 .dec = {
3073 .vecs = camellia_xts_dec_tv_template,
3074 .count = CAMELLIA_XTS_DEC_TEST_VECTORS
3078 }, {
3079 .alg = "xts(cast6)",
3080 .test = alg_test_skcipher,
3081 .suite = {
3082 .cipher = {
3083 .enc = {
3084 .vecs = cast6_xts_enc_tv_template,
3085 .count = CAST6_XTS_ENC_TEST_VECTORS
3087 .dec = {
3088 .vecs = cast6_xts_dec_tv_template,
3089 .count = CAST6_XTS_DEC_TEST_VECTORS
3093 }, {
3094 .alg = "xts(serpent)",
3095 .test = alg_test_skcipher,
3096 .suite = {
3097 .cipher = {
3098 .enc = {
3099 .vecs = serpent_xts_enc_tv_template,
3100 .count = SERPENT_XTS_ENC_TEST_VECTORS
3102 .dec = {
3103 .vecs = serpent_xts_dec_tv_template,
3104 .count = SERPENT_XTS_DEC_TEST_VECTORS
3108 }, {
3109 .alg = "xts(twofish)",
3110 .test = alg_test_skcipher,
3111 .suite = {
3112 .cipher = {
3113 .enc = {
3114 .vecs = tf_xts_enc_tv_template,
3115 .count = TF_XTS_ENC_TEST_VECTORS
3117 .dec = {
3118 .vecs = tf_xts_dec_tv_template,
3119 .count = TF_XTS_DEC_TEST_VECTORS
3123 }, {
3124 .alg = "zlib",
3125 .test = alg_test_pcomp,
3126 .fips_allowed = 1,
3127 .suite = {
3128 .pcomp = {
3129 .comp = {
3130 .vecs = zlib_comp_tv_template,
3131 .count = ZLIB_COMP_TEST_VECTORS
3133 .decomp = {
3134 .vecs = zlib_decomp_tv_template,
3135 .count = ZLIB_DECOMP_TEST_VECTORS
3142 static bool alg_test_descs_checked;
3144 static void alg_test_descs_check_order(void)
3146 int i;
3148 /* only check once */
3149 if (alg_test_descs_checked)
3150 return;
3152 alg_test_descs_checked = true;
3154 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
3155 int diff = strcmp(alg_test_descs[i - 1].alg,
3156 alg_test_descs[i].alg);
3158 if (WARN_ON(diff > 0)) {
3159 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
3160 alg_test_descs[i - 1].alg,
3161 alg_test_descs[i].alg);
3164 if (WARN_ON(diff == 0)) {
3165 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
3166 alg_test_descs[i].alg);
3171 static int alg_find_test(const char *alg)
3173 int start = 0;
3174 int end = ARRAY_SIZE(alg_test_descs);
3176 while (start < end) {
3177 int i = (start + end) / 2;
3178 int diff = strcmp(alg_test_descs[i].alg, alg);
3180 if (diff > 0) {
3181 end = i;
3182 continue;
3185 if (diff < 0) {
3186 start = i + 1;
3187 continue;
3190 return i;
3193 return -1;
3196 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
3198 int i;
3199 int j;
3200 int rc;
3202 alg_test_descs_check_order();
3204 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
3205 char nalg[CRYPTO_MAX_ALG_NAME];
3207 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
3208 sizeof(nalg))
3209 return -ENAMETOOLONG;
3211 i = alg_find_test(nalg);
3212 if (i < 0)
3213 goto notest;
3215 if (fips_enabled && !alg_test_descs[i].fips_allowed)
3216 goto non_fips_alg;
3218 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
3219 goto test_done;
3222 i = alg_find_test(alg);
3223 j = alg_find_test(driver);
3224 if (i < 0 && j < 0)
3225 goto notest;
3227 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
3228 (j >= 0 && !alg_test_descs[j].fips_allowed)))
3229 goto non_fips_alg;
3231 rc = 0;
3232 if (i >= 0)
3233 rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
3234 type, mask);
3235 if (j >= 0 && j != i)
3236 rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
3237 type, mask);
3239 test_done:
3240 if (fips_enabled && rc)
3241 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
3243 if (fips_enabled && !rc)
3244 printk(KERN_INFO "alg: self-tests for %s (%s) passed\n",
3245 driver, alg);
3247 return rc;
3249 notest:
3250 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
3251 return 0;
3252 non_fips_alg:
3253 return -EINVAL;
3256 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
3258 EXPORT_SYMBOL_GPL(alg_test);