Linux 3.12.46
[linux/fpc-iii.git] / crypto / testmgr.c
blob779a12dcb6a84172f27187324ecc62f9d18fedd0
1 /*
2 * Algorithm testing framework and tests.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
9 * Updated RFC4106 AES-GCM testing.
10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
16 * This program is free software; you can redistribute it and/or modify it
17 * under the terms of the GNU General Public License as published by the Free
18 * Software Foundation; either version 2 of the License, or (at your option)
19 * any later version.
23 #include <crypto/hash.h>
24 #include <linux/err.h>
25 #include <linux/module.h>
26 #include <linux/scatterlist.h>
27 #include <linux/slab.h>
28 #include <linux/string.h>
29 #include <crypto/rng.h>
31 #include "internal.h"
33 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
35 /* a perfect nop */
36 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
38 return 0;
41 #else
43 #include "testmgr.h"
46 * Need slab memory for testing (size in number of pages).
48 #define XBUFSIZE 8
51 * Indexes into the xbuf to simulate cross-page access.
53 #define IDX1 32
54 #define IDX2 32400
55 #define IDX3 1
56 #define IDX4 8193
57 #define IDX5 22222
58 #define IDX6 17101
59 #define IDX7 27333
60 #define IDX8 3000
63 * Used by test_cipher()
65 #define ENCRYPT 1
66 #define DECRYPT 0
68 struct tcrypt_result {
69 struct completion completion;
70 int err;
73 struct aead_test_suite {
74 struct {
75 struct aead_testvec *vecs;
76 unsigned int count;
77 } enc, dec;
80 struct cipher_test_suite {
81 struct {
82 struct cipher_testvec *vecs;
83 unsigned int count;
84 } enc, dec;
87 struct comp_test_suite {
88 struct {
89 struct comp_testvec *vecs;
90 unsigned int count;
91 } comp, decomp;
94 struct pcomp_test_suite {
95 struct {
96 struct pcomp_testvec *vecs;
97 unsigned int count;
98 } comp, decomp;
101 struct hash_test_suite {
102 struct hash_testvec *vecs;
103 unsigned int count;
106 struct cprng_test_suite {
107 struct cprng_testvec *vecs;
108 unsigned int count;
111 struct alg_test_desc {
112 const char *alg;
113 int (*test)(const struct alg_test_desc *desc, const char *driver,
114 u32 type, u32 mask);
115 int fips_allowed; /* set if alg is allowed in fips mode */
117 union {
118 struct aead_test_suite aead;
119 struct cipher_test_suite cipher;
120 struct comp_test_suite comp;
121 struct pcomp_test_suite pcomp;
122 struct hash_test_suite hash;
123 struct cprng_test_suite cprng;
124 } suite;
127 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
129 static void hexdump(unsigned char *buf, unsigned int len)
131 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
132 16, 1,
133 buf, len, false);
136 static void tcrypt_complete(struct crypto_async_request *req, int err)
138 struct tcrypt_result *res = req->data;
140 if (err == -EINPROGRESS)
141 return;
143 res->err = err;
144 complete(&res->completion);
147 static int testmgr_alloc_buf(char *buf[XBUFSIZE])
149 int i;
151 for (i = 0; i < XBUFSIZE; i++) {
152 buf[i] = (void *)__get_free_page(GFP_KERNEL);
153 if (!buf[i])
154 goto err_free_buf;
157 return 0;
159 err_free_buf:
160 while (i-- > 0)
161 free_page((unsigned long)buf[i]);
163 return -ENOMEM;
166 static void testmgr_free_buf(char *buf[XBUFSIZE])
168 int i;
170 for (i = 0; i < XBUFSIZE; i++)
171 free_page((unsigned long)buf[i]);
174 static int do_one_async_hash_op(struct ahash_request *req,
175 struct tcrypt_result *tr,
176 int ret)
178 if (ret == -EINPROGRESS || ret == -EBUSY) {
179 wait_for_completion(&tr->completion);
180 INIT_COMPLETION(tr->completion);
181 ret = tr->err;
183 return ret;
186 static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
187 unsigned int tcount, bool use_digest,
188 const int align_offset)
190 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
191 unsigned int i, j, k, temp;
192 struct scatterlist sg[8];
193 char result[64];
194 struct ahash_request *req;
195 struct tcrypt_result tresult;
196 void *hash_buff;
197 char *xbuf[XBUFSIZE];
198 int ret = -ENOMEM;
200 if (testmgr_alloc_buf(xbuf))
201 goto out_nobuf;
203 init_completion(&tresult.completion);
205 req = ahash_request_alloc(tfm, GFP_KERNEL);
206 if (!req) {
207 printk(KERN_ERR "alg: hash: Failed to allocate request for "
208 "%s\n", algo);
209 goto out_noreq;
211 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
212 tcrypt_complete, &tresult);
214 j = 0;
215 for (i = 0; i < tcount; i++) {
216 if (template[i].np)
217 continue;
219 ret = -EINVAL;
220 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE))
221 goto out;
223 j++;
224 memset(result, 0, 64);
226 hash_buff = xbuf[0];
227 hash_buff += align_offset;
229 memcpy(hash_buff, template[i].plaintext, template[i].psize);
230 sg_init_one(&sg[0], hash_buff, template[i].psize);
232 if (template[i].ksize) {
233 crypto_ahash_clear_flags(tfm, ~0);
234 ret = crypto_ahash_setkey(tfm, template[i].key,
235 template[i].ksize);
236 if (ret) {
237 printk(KERN_ERR "alg: hash: setkey failed on "
238 "test %d for %s: ret=%d\n", j, algo,
239 -ret);
240 goto out;
244 ahash_request_set_crypt(req, sg, result, template[i].psize);
245 if (use_digest) {
246 ret = do_one_async_hash_op(req, &tresult,
247 crypto_ahash_digest(req));
248 if (ret) {
249 pr_err("alg: hash: digest failed on test %d "
250 "for %s: ret=%d\n", j, algo, -ret);
251 goto out;
253 } else {
254 ret = do_one_async_hash_op(req, &tresult,
255 crypto_ahash_init(req));
256 if (ret) {
257 pr_err("alt: hash: init failed on test %d "
258 "for %s: ret=%d\n", j, algo, -ret);
259 goto out;
261 ret = do_one_async_hash_op(req, &tresult,
262 crypto_ahash_update(req));
263 if (ret) {
264 pr_err("alt: hash: update failed on test %d "
265 "for %s: ret=%d\n", j, algo, -ret);
266 goto out;
268 ret = do_one_async_hash_op(req, &tresult,
269 crypto_ahash_final(req));
270 if (ret) {
271 pr_err("alt: hash: final failed on test %d "
272 "for %s: ret=%d\n", j, algo, -ret);
273 goto out;
277 if (memcmp(result, template[i].digest,
278 crypto_ahash_digestsize(tfm))) {
279 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
280 j, algo);
281 hexdump(result, crypto_ahash_digestsize(tfm));
282 ret = -EINVAL;
283 goto out;
287 j = 0;
288 for (i = 0; i < tcount; i++) {
289 /* alignment tests are only done with continuous buffers */
290 if (align_offset != 0)
291 break;
293 if (template[i].np) {
294 j++;
295 memset(result, 0, 64);
297 temp = 0;
298 sg_init_table(sg, template[i].np);
299 ret = -EINVAL;
300 for (k = 0; k < template[i].np; k++) {
301 if (WARN_ON(offset_in_page(IDX[k]) +
302 template[i].tap[k] > PAGE_SIZE))
303 goto out;
304 sg_set_buf(&sg[k],
305 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
306 offset_in_page(IDX[k]),
307 template[i].plaintext + temp,
308 template[i].tap[k]),
309 template[i].tap[k]);
310 temp += template[i].tap[k];
313 if (template[i].ksize) {
314 crypto_ahash_clear_flags(tfm, ~0);
315 ret = crypto_ahash_setkey(tfm, template[i].key,
316 template[i].ksize);
318 if (ret) {
319 printk(KERN_ERR "alg: hash: setkey "
320 "failed on chunking test %d "
321 "for %s: ret=%d\n", j, algo,
322 -ret);
323 goto out;
327 ahash_request_set_crypt(req, sg, result,
328 template[i].psize);
329 ret = crypto_ahash_digest(req);
330 switch (ret) {
331 case 0:
332 break;
333 case -EINPROGRESS:
334 case -EBUSY:
335 wait_for_completion(&tresult.completion);
336 INIT_COMPLETION(tresult.completion);
337 ret = tresult.err;
338 if (!ret)
339 break;
340 /* fall through */
341 default:
342 printk(KERN_ERR "alg: hash: digest failed "
343 "on chunking test %d for %s: "
344 "ret=%d\n", j, algo, -ret);
345 goto out;
348 if (memcmp(result, template[i].digest,
349 crypto_ahash_digestsize(tfm))) {
350 printk(KERN_ERR "alg: hash: Chunking test %d "
351 "failed for %s\n", j, algo);
352 hexdump(result, crypto_ahash_digestsize(tfm));
353 ret = -EINVAL;
354 goto out;
359 ret = 0;
361 out:
362 ahash_request_free(req);
363 out_noreq:
364 testmgr_free_buf(xbuf);
365 out_nobuf:
366 return ret;
369 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
370 unsigned int tcount, bool use_digest)
372 unsigned int alignmask;
373 int ret;
375 ret = __test_hash(tfm, template, tcount, use_digest, 0);
376 if (ret)
377 return ret;
379 /* test unaligned buffers, check with one byte offset */
380 ret = __test_hash(tfm, template, tcount, use_digest, 1);
381 if (ret)
382 return ret;
384 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
385 if (alignmask) {
386 /* Check if alignment mask for tfm is correctly set. */
387 ret = __test_hash(tfm, template, tcount, use_digest,
388 alignmask + 1);
389 if (ret)
390 return ret;
393 return 0;
396 static int __test_aead(struct crypto_aead *tfm, int enc,
397 struct aead_testvec *template, unsigned int tcount,
398 const bool diff_dst, const int align_offset)
400 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
401 unsigned int i, j, k, n, temp;
402 int ret = -ENOMEM;
403 char *q;
404 char *key;
405 struct aead_request *req;
406 struct scatterlist *sg;
407 struct scatterlist *asg;
408 struct scatterlist *sgout;
409 const char *e, *d;
410 struct tcrypt_result result;
411 unsigned int authsize;
412 void *input;
413 void *output;
414 void *assoc;
415 char iv[MAX_IVLEN];
416 char *xbuf[XBUFSIZE];
417 char *xoutbuf[XBUFSIZE];
418 char *axbuf[XBUFSIZE];
420 if (testmgr_alloc_buf(xbuf))
421 goto out_noxbuf;
422 if (testmgr_alloc_buf(axbuf))
423 goto out_noaxbuf;
425 if (diff_dst && testmgr_alloc_buf(xoutbuf))
426 goto out_nooutbuf;
428 /* avoid "the frame size is larger than 1024 bytes" compiler warning */
429 sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 3 : 2), GFP_KERNEL);
430 if (!sg)
431 goto out_nosg;
432 asg = &sg[8];
433 sgout = &asg[8];
435 if (diff_dst)
436 d = "-ddst";
437 else
438 d = "";
440 if (enc == ENCRYPT)
441 e = "encryption";
442 else
443 e = "decryption";
445 init_completion(&result.completion);
447 req = aead_request_alloc(tfm, GFP_KERNEL);
448 if (!req) {
449 pr_err("alg: aead%s: Failed to allocate request for %s\n",
450 d, algo);
451 goto out;
454 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
455 tcrypt_complete, &result);
457 for (i = 0, j = 0; i < tcount; i++) {
458 if (!template[i].np) {
459 j++;
461 /* some templates have no input data but they will
462 * touch input
464 input = xbuf[0];
465 input += align_offset;
466 assoc = axbuf[0];
468 ret = -EINVAL;
469 if (WARN_ON(align_offset + template[i].ilen >
470 PAGE_SIZE || template[i].alen > PAGE_SIZE))
471 goto out;
473 memcpy(input, template[i].input, template[i].ilen);
474 memcpy(assoc, template[i].assoc, template[i].alen);
475 if (template[i].iv)
476 memcpy(iv, template[i].iv, MAX_IVLEN);
477 else
478 memset(iv, 0, MAX_IVLEN);
480 crypto_aead_clear_flags(tfm, ~0);
481 if (template[i].wk)
482 crypto_aead_set_flags(
483 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
485 key = template[i].key;
487 ret = crypto_aead_setkey(tfm, key,
488 template[i].klen);
489 if (!ret == template[i].fail) {
490 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
491 d, j, algo, crypto_aead_get_flags(tfm));
492 goto out;
493 } else if (ret)
494 continue;
496 authsize = abs(template[i].rlen - template[i].ilen);
497 ret = crypto_aead_setauthsize(tfm, authsize);
498 if (ret) {
499 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
500 d, authsize, j, algo);
501 goto out;
504 sg_init_one(&sg[0], input,
505 template[i].ilen + (enc ? authsize : 0));
507 if (diff_dst) {
508 output = xoutbuf[0];
509 output += align_offset;
510 sg_init_one(&sgout[0], output,
511 template[i].ilen +
512 (enc ? authsize : 0));
513 } else {
514 output = input;
517 sg_init_one(&asg[0], assoc, template[i].alen);
519 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
520 template[i].ilen, iv);
522 aead_request_set_assoc(req, asg, template[i].alen);
524 ret = enc ?
525 crypto_aead_encrypt(req) :
526 crypto_aead_decrypt(req);
528 switch (ret) {
529 case 0:
530 if (template[i].novrfy) {
531 /* verification was supposed to fail */
532 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
533 d, e, j, algo);
534 /* so really, we got a bad message */
535 ret = -EBADMSG;
536 goto out;
538 break;
539 case -EINPROGRESS:
540 case -EBUSY:
541 wait_for_completion(&result.completion);
542 INIT_COMPLETION(result.completion);
543 ret = result.err;
544 if (!ret)
545 break;
546 case -EBADMSG:
547 if (template[i].novrfy)
548 /* verification failure was expected */
549 continue;
550 /* fall through */
551 default:
552 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
553 d, e, j, algo, -ret);
554 goto out;
557 q = output;
558 if (memcmp(q, template[i].result, template[i].rlen)) {
559 pr_err("alg: aead%s: Test %d failed on %s for %s\n",
560 d, j, e, algo);
561 hexdump(q, template[i].rlen);
562 ret = -EINVAL;
563 goto out;
568 for (i = 0, j = 0; i < tcount; i++) {
569 /* alignment tests are only done with continuous buffers */
570 if (align_offset != 0)
571 break;
573 if (template[i].np) {
574 j++;
576 if (template[i].iv)
577 memcpy(iv, template[i].iv, MAX_IVLEN);
578 else
579 memset(iv, 0, MAX_IVLEN);
581 crypto_aead_clear_flags(tfm, ~0);
582 if (template[i].wk)
583 crypto_aead_set_flags(
584 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
585 key = template[i].key;
587 ret = crypto_aead_setkey(tfm, key, template[i].klen);
588 if (!ret == template[i].fail) {
589 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
590 d, j, algo, crypto_aead_get_flags(tfm));
591 goto out;
592 } else if (ret)
593 continue;
595 authsize = abs(template[i].rlen - template[i].ilen);
597 ret = -EINVAL;
598 sg_init_table(sg, template[i].np);
599 if (diff_dst)
600 sg_init_table(sgout, template[i].np);
601 for (k = 0, temp = 0; k < template[i].np; k++) {
602 if (WARN_ON(offset_in_page(IDX[k]) +
603 template[i].tap[k] > PAGE_SIZE))
604 goto out;
606 q = xbuf[IDX[k] >> PAGE_SHIFT] +
607 offset_in_page(IDX[k]);
609 memcpy(q, template[i].input + temp,
610 template[i].tap[k]);
612 n = template[i].tap[k];
613 if (k == template[i].np - 1 && enc)
614 n += authsize;
615 if (offset_in_page(q) + n < PAGE_SIZE)
616 q[n] = 0;
618 sg_set_buf(&sg[k], q, template[i].tap[k]);
620 if (diff_dst) {
621 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
622 offset_in_page(IDX[k]);
624 memset(q, 0, template[i].tap[k]);
625 if (offset_in_page(q) + n < PAGE_SIZE)
626 q[n] = 0;
628 sg_set_buf(&sgout[k], q,
629 template[i].tap[k]);
632 temp += template[i].tap[k];
635 ret = crypto_aead_setauthsize(tfm, authsize);
636 if (ret) {
637 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
638 d, authsize, j, algo);
639 goto out;
642 if (enc) {
643 if (WARN_ON(sg[k - 1].offset +
644 sg[k - 1].length + authsize >
645 PAGE_SIZE)) {
646 ret = -EINVAL;
647 goto out;
650 sg[k - 1].length += authsize;
652 if (diff_dst)
653 sgout[k - 1].length += authsize;
656 sg_init_table(asg, template[i].anp);
657 ret = -EINVAL;
658 for (k = 0, temp = 0; k < template[i].anp; k++) {
659 if (WARN_ON(offset_in_page(IDX[k]) +
660 template[i].atap[k] > PAGE_SIZE))
661 goto out;
662 sg_set_buf(&asg[k],
663 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
664 offset_in_page(IDX[k]),
665 template[i].assoc + temp,
666 template[i].atap[k]),
667 template[i].atap[k]);
668 temp += template[i].atap[k];
671 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
672 template[i].ilen,
673 iv);
675 aead_request_set_assoc(req, asg, template[i].alen);
677 ret = enc ?
678 crypto_aead_encrypt(req) :
679 crypto_aead_decrypt(req);
681 switch (ret) {
682 case 0:
683 if (template[i].novrfy) {
684 /* verification was supposed to fail */
685 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
686 d, e, j, algo);
687 /* so really, we got a bad message */
688 ret = -EBADMSG;
689 goto out;
691 break;
692 case -EINPROGRESS:
693 case -EBUSY:
694 wait_for_completion(&result.completion);
695 INIT_COMPLETION(result.completion);
696 ret = result.err;
697 if (!ret)
698 break;
699 case -EBADMSG:
700 if (template[i].novrfy)
701 /* verification failure was expected */
702 continue;
703 /* fall through */
704 default:
705 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
706 d, e, j, algo, -ret);
707 goto out;
710 ret = -EINVAL;
711 for (k = 0, temp = 0; k < template[i].np; k++) {
712 if (diff_dst)
713 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
714 offset_in_page(IDX[k]);
715 else
716 q = xbuf[IDX[k] >> PAGE_SHIFT] +
717 offset_in_page(IDX[k]);
719 n = template[i].tap[k];
720 if (k == template[i].np - 1)
721 n += enc ? authsize : -authsize;
723 if (memcmp(q, template[i].result + temp, n)) {
724 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
725 d, j, e, k, algo);
726 hexdump(q, n);
727 goto out;
730 q += n;
731 if (k == template[i].np - 1 && !enc) {
732 if (!diff_dst &&
733 memcmp(q, template[i].input +
734 temp + n, authsize))
735 n = authsize;
736 else
737 n = 0;
738 } else {
739 for (n = 0; offset_in_page(q + n) &&
740 q[n]; n++)
743 if (n) {
744 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
745 d, j, e, k, algo, n);
746 hexdump(q, n);
747 goto out;
750 temp += template[i].tap[k];
755 ret = 0;
757 out:
758 aead_request_free(req);
759 kfree(sg);
760 out_nosg:
761 if (diff_dst)
762 testmgr_free_buf(xoutbuf);
763 out_nooutbuf:
764 testmgr_free_buf(axbuf);
765 out_noaxbuf:
766 testmgr_free_buf(xbuf);
767 out_noxbuf:
768 return ret;
771 static int test_aead(struct crypto_aead *tfm, int enc,
772 struct aead_testvec *template, unsigned int tcount)
774 unsigned int alignmask;
775 int ret;
777 /* test 'dst == src' case */
778 ret = __test_aead(tfm, enc, template, tcount, false, 0);
779 if (ret)
780 return ret;
782 /* test 'dst != src' case */
783 ret = __test_aead(tfm, enc, template, tcount, true, 0);
784 if (ret)
785 return ret;
787 /* test unaligned buffers, check with one byte offset */
788 ret = __test_aead(tfm, enc, template, tcount, true, 1);
789 if (ret)
790 return ret;
792 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
793 if (alignmask) {
794 /* Check if alignment mask for tfm is correctly set. */
795 ret = __test_aead(tfm, enc, template, tcount, true,
796 alignmask + 1);
797 if (ret)
798 return ret;
801 return 0;
804 static int test_cipher(struct crypto_cipher *tfm, int enc,
805 struct cipher_testvec *template, unsigned int tcount)
807 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
808 unsigned int i, j, k;
809 char *q;
810 const char *e;
811 void *data;
812 char *xbuf[XBUFSIZE];
813 int ret = -ENOMEM;
815 if (testmgr_alloc_buf(xbuf))
816 goto out_nobuf;
818 if (enc == ENCRYPT)
819 e = "encryption";
820 else
821 e = "decryption";
823 j = 0;
824 for (i = 0; i < tcount; i++) {
825 if (template[i].np)
826 continue;
828 j++;
830 ret = -EINVAL;
831 if (WARN_ON(template[i].ilen > PAGE_SIZE))
832 goto out;
834 data = xbuf[0];
835 memcpy(data, template[i].input, template[i].ilen);
837 crypto_cipher_clear_flags(tfm, ~0);
838 if (template[i].wk)
839 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
841 ret = crypto_cipher_setkey(tfm, template[i].key,
842 template[i].klen);
843 if (!ret == template[i].fail) {
844 printk(KERN_ERR "alg: cipher: setkey failed "
845 "on test %d for %s: flags=%x\n", j,
846 algo, crypto_cipher_get_flags(tfm));
847 goto out;
848 } else if (ret)
849 continue;
851 for (k = 0; k < template[i].ilen;
852 k += crypto_cipher_blocksize(tfm)) {
853 if (enc)
854 crypto_cipher_encrypt_one(tfm, data + k,
855 data + k);
856 else
857 crypto_cipher_decrypt_one(tfm, data + k,
858 data + k);
861 q = data;
862 if (memcmp(q, template[i].result, template[i].rlen)) {
863 printk(KERN_ERR "alg: cipher: Test %d failed "
864 "on %s for %s\n", j, e, algo);
865 hexdump(q, template[i].rlen);
866 ret = -EINVAL;
867 goto out;
871 ret = 0;
873 out:
874 testmgr_free_buf(xbuf);
875 out_nobuf:
876 return ret;
879 static int __test_skcipher(struct crypto_ablkcipher *tfm, int enc,
880 struct cipher_testvec *template, unsigned int tcount,
881 const bool diff_dst, const int align_offset)
883 const char *algo =
884 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm));
885 unsigned int i, j, k, n, temp;
886 char *q;
887 struct ablkcipher_request *req;
888 struct scatterlist sg[8];
889 struct scatterlist sgout[8];
890 const char *e, *d;
891 struct tcrypt_result result;
892 void *data;
893 char iv[MAX_IVLEN];
894 char *xbuf[XBUFSIZE];
895 char *xoutbuf[XBUFSIZE];
896 int ret = -ENOMEM;
898 if (testmgr_alloc_buf(xbuf))
899 goto out_nobuf;
901 if (diff_dst && testmgr_alloc_buf(xoutbuf))
902 goto out_nooutbuf;
904 if (diff_dst)
905 d = "-ddst";
906 else
907 d = "";
909 if (enc == ENCRYPT)
910 e = "encryption";
911 else
912 e = "decryption";
914 init_completion(&result.completion);
916 req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
917 if (!req) {
918 pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
919 d, algo);
920 goto out;
923 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
924 tcrypt_complete, &result);
926 j = 0;
927 for (i = 0; i < tcount; i++) {
928 if (template[i].iv)
929 memcpy(iv, template[i].iv, MAX_IVLEN);
930 else
931 memset(iv, 0, MAX_IVLEN);
933 if (!(template[i].np) || (template[i].also_non_np)) {
934 j++;
936 ret = -EINVAL;
937 if (WARN_ON(align_offset + template[i].ilen >
938 PAGE_SIZE))
939 goto out;
941 data = xbuf[0];
942 data += align_offset;
943 memcpy(data, template[i].input, template[i].ilen);
945 crypto_ablkcipher_clear_flags(tfm, ~0);
946 if (template[i].wk)
947 crypto_ablkcipher_set_flags(
948 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
950 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
951 template[i].klen);
952 if (!ret == template[i].fail) {
953 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
954 d, j, algo,
955 crypto_ablkcipher_get_flags(tfm));
956 goto out;
957 } else if (ret)
958 continue;
960 sg_init_one(&sg[0], data, template[i].ilen);
961 if (diff_dst) {
962 data = xoutbuf[0];
963 data += align_offset;
964 sg_init_one(&sgout[0], data, template[i].ilen);
967 ablkcipher_request_set_crypt(req, sg,
968 (diff_dst) ? sgout : sg,
969 template[i].ilen, iv);
970 ret = enc ?
971 crypto_ablkcipher_encrypt(req) :
972 crypto_ablkcipher_decrypt(req);
974 switch (ret) {
975 case 0:
976 break;
977 case -EINPROGRESS:
978 case -EBUSY:
979 wait_for_completion(&result.completion);
980 INIT_COMPLETION(result.completion);
981 ret = result.err;
982 if (!ret)
983 break;
984 /* fall through */
985 default:
986 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
987 d, e, j, algo, -ret);
988 goto out;
991 q = data;
992 if (memcmp(q, template[i].result, template[i].rlen)) {
993 pr_err("alg: skcipher%s: Test %d failed on %s for %s\n",
994 d, j, e, algo);
995 hexdump(q, template[i].rlen);
996 ret = -EINVAL;
997 goto out;
1002 j = 0;
1003 for (i = 0; i < tcount; i++) {
1004 /* alignment tests are only done with continuous buffers */
1005 if (align_offset != 0)
1006 break;
1008 if (template[i].iv)
1009 memcpy(iv, template[i].iv, MAX_IVLEN);
1010 else
1011 memset(iv, 0, MAX_IVLEN);
1013 if (template[i].np) {
1014 j++;
1016 crypto_ablkcipher_clear_flags(tfm, ~0);
1017 if (template[i].wk)
1018 crypto_ablkcipher_set_flags(
1019 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
1021 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
1022 template[i].klen);
1023 if (!ret == template[i].fail) {
1024 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
1025 d, j, algo,
1026 crypto_ablkcipher_get_flags(tfm));
1027 goto out;
1028 } else if (ret)
1029 continue;
1031 temp = 0;
1032 ret = -EINVAL;
1033 sg_init_table(sg, template[i].np);
1034 if (diff_dst)
1035 sg_init_table(sgout, template[i].np);
1036 for (k = 0; k < template[i].np; k++) {
1037 if (WARN_ON(offset_in_page(IDX[k]) +
1038 template[i].tap[k] > PAGE_SIZE))
1039 goto out;
1041 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1042 offset_in_page(IDX[k]);
1044 memcpy(q, template[i].input + temp,
1045 template[i].tap[k]);
1047 if (offset_in_page(q) + template[i].tap[k] <
1048 PAGE_SIZE)
1049 q[template[i].tap[k]] = 0;
1051 sg_set_buf(&sg[k], q, template[i].tap[k]);
1052 if (diff_dst) {
1053 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1054 offset_in_page(IDX[k]);
1056 sg_set_buf(&sgout[k], q,
1057 template[i].tap[k]);
1059 memset(q, 0, template[i].tap[k]);
1060 if (offset_in_page(q) +
1061 template[i].tap[k] < PAGE_SIZE)
1062 q[template[i].tap[k]] = 0;
1065 temp += template[i].tap[k];
1068 ablkcipher_request_set_crypt(req, sg,
1069 (diff_dst) ? sgout : sg,
1070 template[i].ilen, iv);
1072 ret = enc ?
1073 crypto_ablkcipher_encrypt(req) :
1074 crypto_ablkcipher_decrypt(req);
1076 switch (ret) {
1077 case 0:
1078 break;
1079 case -EINPROGRESS:
1080 case -EBUSY:
1081 wait_for_completion(&result.completion);
1082 INIT_COMPLETION(result.completion);
1083 ret = result.err;
1084 if (!ret)
1085 break;
1086 /* fall through */
1087 default:
1088 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1089 d, e, j, algo, -ret);
1090 goto out;
1093 temp = 0;
1094 ret = -EINVAL;
1095 for (k = 0; k < template[i].np; k++) {
1096 if (diff_dst)
1097 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1098 offset_in_page(IDX[k]);
1099 else
1100 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1101 offset_in_page(IDX[k]);
1103 if (memcmp(q, template[i].result + temp,
1104 template[i].tap[k])) {
1105 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1106 d, j, e, k, algo);
1107 hexdump(q, template[i].tap[k]);
1108 goto out;
1111 q += template[i].tap[k];
1112 for (n = 0; offset_in_page(q + n) && q[n]; n++)
1114 if (n) {
1115 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1116 d, j, e, k, algo, n);
1117 hexdump(q, n);
1118 goto out;
1120 temp += template[i].tap[k];
1125 ret = 0;
1127 out:
1128 ablkcipher_request_free(req);
1129 if (diff_dst)
1130 testmgr_free_buf(xoutbuf);
1131 out_nooutbuf:
1132 testmgr_free_buf(xbuf);
1133 out_nobuf:
1134 return ret;
1137 static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
1138 struct cipher_testvec *template, unsigned int tcount)
1140 unsigned int alignmask;
1141 int ret;
1143 /* test 'dst == src' case */
1144 ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
1145 if (ret)
1146 return ret;
1148 /* test 'dst != src' case */
1149 ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
1150 if (ret)
1151 return ret;
1153 /* test unaligned buffers, check with one byte offset */
1154 ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
1155 if (ret)
1156 return ret;
1158 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1159 if (alignmask) {
1160 /* Check if alignment mask for tfm is correctly set. */
1161 ret = __test_skcipher(tfm, enc, template, tcount, true,
1162 alignmask + 1);
1163 if (ret)
1164 return ret;
1167 return 0;
1170 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
1171 struct comp_testvec *dtemplate, int ctcount, int dtcount)
1173 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1174 unsigned int i;
1175 char result[COMP_BUF_SIZE];
1176 int ret;
1178 for (i = 0; i < ctcount; i++) {
1179 int ilen;
1180 unsigned int dlen = COMP_BUF_SIZE;
1182 memset(result, 0, sizeof (result));
1184 ilen = ctemplate[i].inlen;
1185 ret = crypto_comp_compress(tfm, ctemplate[i].input,
1186 ilen, result, &dlen);
1187 if (ret) {
1188 printk(KERN_ERR "alg: comp: compression failed "
1189 "on test %d for %s: ret=%d\n", i + 1, algo,
1190 -ret);
1191 goto out;
1194 if (dlen != ctemplate[i].outlen) {
1195 printk(KERN_ERR "alg: comp: Compression test %d "
1196 "failed for %s: output len = %d\n", i + 1, algo,
1197 dlen);
1198 ret = -EINVAL;
1199 goto out;
1202 if (memcmp(result, ctemplate[i].output, dlen)) {
1203 printk(KERN_ERR "alg: comp: Compression test %d "
1204 "failed for %s\n", i + 1, algo);
1205 hexdump(result, dlen);
1206 ret = -EINVAL;
1207 goto out;
1211 for (i = 0; i < dtcount; i++) {
1212 int ilen;
1213 unsigned int dlen = COMP_BUF_SIZE;
1215 memset(result, 0, sizeof (result));
1217 ilen = dtemplate[i].inlen;
1218 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1219 ilen, result, &dlen);
1220 if (ret) {
1221 printk(KERN_ERR "alg: comp: decompression failed "
1222 "on test %d for %s: ret=%d\n", i + 1, algo,
1223 -ret);
1224 goto out;
1227 if (dlen != dtemplate[i].outlen) {
1228 printk(KERN_ERR "alg: comp: Decompression test %d "
1229 "failed for %s: output len = %d\n", i + 1, algo,
1230 dlen);
1231 ret = -EINVAL;
1232 goto out;
1235 if (memcmp(result, dtemplate[i].output, dlen)) {
1236 printk(KERN_ERR "alg: comp: Decompression test %d "
1237 "failed for %s\n", i + 1, algo);
1238 hexdump(result, dlen);
1239 ret = -EINVAL;
1240 goto out;
1244 ret = 0;
1246 out:
1247 return ret;
1250 static int test_pcomp(struct crypto_pcomp *tfm,
1251 struct pcomp_testvec *ctemplate,
1252 struct pcomp_testvec *dtemplate, int ctcount,
1253 int dtcount)
1255 const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm));
1256 unsigned int i;
1257 char result[COMP_BUF_SIZE];
1258 int res;
1260 for (i = 0; i < ctcount; i++) {
1261 struct comp_request req;
1262 unsigned int produced = 0;
1264 res = crypto_compress_setup(tfm, ctemplate[i].params,
1265 ctemplate[i].paramsize);
1266 if (res) {
1267 pr_err("alg: pcomp: compression setup failed on test "
1268 "%d for %s: error=%d\n", i + 1, algo, res);
1269 return res;
1272 res = crypto_compress_init(tfm);
1273 if (res) {
1274 pr_err("alg: pcomp: compression init failed on test "
1275 "%d for %s: error=%d\n", i + 1, algo, res);
1276 return res;
1279 memset(result, 0, sizeof(result));
1281 req.next_in = ctemplate[i].input;
1282 req.avail_in = ctemplate[i].inlen / 2;
1283 req.next_out = result;
1284 req.avail_out = ctemplate[i].outlen / 2;
1286 res = crypto_compress_update(tfm, &req);
1287 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1288 pr_err("alg: pcomp: compression update failed on test "
1289 "%d for %s: error=%d\n", i + 1, algo, res);
1290 return res;
1292 if (res > 0)
1293 produced += res;
1295 /* Add remaining input data */
1296 req.avail_in += (ctemplate[i].inlen + 1) / 2;
1298 res = crypto_compress_update(tfm, &req);
1299 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1300 pr_err("alg: pcomp: compression update failed on test "
1301 "%d for %s: error=%d\n", i + 1, algo, res);
1302 return res;
1304 if (res > 0)
1305 produced += res;
1307 /* Provide remaining output space */
1308 req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2;
1310 res = crypto_compress_final(tfm, &req);
1311 if (res < 0) {
1312 pr_err("alg: pcomp: compression final failed on test "
1313 "%d for %s: error=%d\n", i + 1, algo, res);
1314 return res;
1316 produced += res;
1318 if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) {
1319 pr_err("alg: comp: Compression test %d failed for %s: "
1320 "output len = %d (expected %d)\n", i + 1, algo,
1321 COMP_BUF_SIZE - req.avail_out,
1322 ctemplate[i].outlen);
1323 return -EINVAL;
1326 if (produced != ctemplate[i].outlen) {
1327 pr_err("alg: comp: Compression test %d failed for %s: "
1328 "returned len = %u (expected %d)\n", i + 1,
1329 algo, produced, ctemplate[i].outlen);
1330 return -EINVAL;
1333 if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) {
1334 pr_err("alg: pcomp: Compression test %d failed for "
1335 "%s\n", i + 1, algo);
1336 hexdump(result, ctemplate[i].outlen);
1337 return -EINVAL;
1341 for (i = 0; i < dtcount; i++) {
1342 struct comp_request req;
1343 unsigned int produced = 0;
1345 res = crypto_decompress_setup(tfm, dtemplate[i].params,
1346 dtemplate[i].paramsize);
1347 if (res) {
1348 pr_err("alg: pcomp: decompression setup failed on "
1349 "test %d for %s: error=%d\n", i + 1, algo, res);
1350 return res;
1353 res = crypto_decompress_init(tfm);
1354 if (res) {
1355 pr_err("alg: pcomp: decompression init failed on test "
1356 "%d for %s: error=%d\n", i + 1, algo, res);
1357 return res;
1360 memset(result, 0, sizeof(result));
1362 req.next_in = dtemplate[i].input;
1363 req.avail_in = dtemplate[i].inlen / 2;
1364 req.next_out = result;
1365 req.avail_out = dtemplate[i].outlen / 2;
1367 res = crypto_decompress_update(tfm, &req);
1368 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1369 pr_err("alg: pcomp: decompression update failed on "
1370 "test %d for %s: error=%d\n", i + 1, algo, res);
1371 return res;
1373 if (res > 0)
1374 produced += res;
1376 /* Add remaining input data */
1377 req.avail_in += (dtemplate[i].inlen + 1) / 2;
1379 res = crypto_decompress_update(tfm, &req);
1380 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1381 pr_err("alg: pcomp: decompression update failed on "
1382 "test %d for %s: error=%d\n", i + 1, algo, res);
1383 return res;
1385 if (res > 0)
1386 produced += res;
1388 /* Provide remaining output space */
1389 req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2;
1391 res = crypto_decompress_final(tfm, &req);
1392 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1393 pr_err("alg: pcomp: decompression final failed on "
1394 "test %d for %s: error=%d\n", i + 1, algo, res);
1395 return res;
1397 if (res > 0)
1398 produced += res;
1400 if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) {
1401 pr_err("alg: comp: Decompression test %d failed for "
1402 "%s: output len = %d (expected %d)\n", i + 1,
1403 algo, COMP_BUF_SIZE - req.avail_out,
1404 dtemplate[i].outlen);
1405 return -EINVAL;
1408 if (produced != dtemplate[i].outlen) {
1409 pr_err("alg: comp: Decompression test %d failed for "
1410 "%s: returned len = %u (expected %d)\n", i + 1,
1411 algo, produced, dtemplate[i].outlen);
1412 return -EINVAL;
1415 if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) {
1416 pr_err("alg: pcomp: Decompression test %d failed for "
1417 "%s\n", i + 1, algo);
1418 hexdump(result, dtemplate[i].outlen);
1419 return -EINVAL;
1423 return 0;
1427 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
1428 unsigned int tcount)
1430 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1431 int err = 0, i, j, seedsize;
1432 u8 *seed;
1433 char result[32];
1435 seedsize = crypto_rng_seedsize(tfm);
1437 seed = kmalloc(seedsize, GFP_KERNEL);
1438 if (!seed) {
1439 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1440 "for %s\n", algo);
1441 return -ENOMEM;
1444 for (i = 0; i < tcount; i++) {
1445 memset(result, 0, 32);
1447 memcpy(seed, template[i].v, template[i].vlen);
1448 memcpy(seed + template[i].vlen, template[i].key,
1449 template[i].klen);
1450 memcpy(seed + template[i].vlen + template[i].klen,
1451 template[i].dt, template[i].dtlen);
1453 err = crypto_rng_reset(tfm, seed, seedsize);
1454 if (err) {
1455 printk(KERN_ERR "alg: cprng: Failed to reset rng "
1456 "for %s\n", algo);
1457 goto out;
1460 for (j = 0; j < template[i].loops; j++) {
1461 err = crypto_rng_get_bytes(tfm, result,
1462 template[i].rlen);
1463 if (err < 0) {
1464 printk(KERN_ERR "alg: cprng: Failed to obtain "
1465 "the correct amount of random data for "
1466 "%s (requested %d)\n", algo,
1467 template[i].rlen);
1468 goto out;
1472 err = memcmp(result, template[i].result,
1473 template[i].rlen);
1474 if (err) {
1475 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1476 i, algo);
1477 hexdump(result, template[i].rlen);
1478 err = -EINVAL;
1479 goto out;
1483 out:
1484 kfree(seed);
1485 return err;
1488 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1489 u32 type, u32 mask)
1491 struct crypto_aead *tfm;
1492 int err = 0;
1494 tfm = crypto_alloc_aead(driver, type, mask);
1495 if (IS_ERR(tfm)) {
1496 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1497 "%ld\n", driver, PTR_ERR(tfm));
1498 return PTR_ERR(tfm);
1501 if (desc->suite.aead.enc.vecs) {
1502 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1503 desc->suite.aead.enc.count);
1504 if (err)
1505 goto out;
1508 if (!err && desc->suite.aead.dec.vecs)
1509 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1510 desc->suite.aead.dec.count);
1512 out:
1513 crypto_free_aead(tfm);
1514 return err;
1517 static int alg_test_cipher(const struct alg_test_desc *desc,
1518 const char *driver, u32 type, u32 mask)
1520 struct crypto_cipher *tfm;
1521 int err = 0;
1523 tfm = crypto_alloc_cipher(driver, type, mask);
1524 if (IS_ERR(tfm)) {
1525 printk(KERN_ERR "alg: cipher: Failed to load transform for "
1526 "%s: %ld\n", driver, PTR_ERR(tfm));
1527 return PTR_ERR(tfm);
1530 if (desc->suite.cipher.enc.vecs) {
1531 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1532 desc->suite.cipher.enc.count);
1533 if (err)
1534 goto out;
1537 if (desc->suite.cipher.dec.vecs)
1538 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1539 desc->suite.cipher.dec.count);
1541 out:
1542 crypto_free_cipher(tfm);
1543 return err;
1546 static int alg_test_skcipher(const struct alg_test_desc *desc,
1547 const char *driver, u32 type, u32 mask)
1549 struct crypto_ablkcipher *tfm;
1550 int err = 0;
1552 tfm = crypto_alloc_ablkcipher(driver, type, mask);
1553 if (IS_ERR(tfm)) {
1554 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1555 "%s: %ld\n", driver, PTR_ERR(tfm));
1556 return PTR_ERR(tfm);
1559 if (desc->suite.cipher.enc.vecs) {
1560 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1561 desc->suite.cipher.enc.count);
1562 if (err)
1563 goto out;
1566 if (desc->suite.cipher.dec.vecs)
1567 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1568 desc->suite.cipher.dec.count);
1570 out:
1571 crypto_free_ablkcipher(tfm);
1572 return err;
1575 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1576 u32 type, u32 mask)
1578 struct crypto_comp *tfm;
1579 int err;
1581 tfm = crypto_alloc_comp(driver, type, mask);
1582 if (IS_ERR(tfm)) {
1583 printk(KERN_ERR "alg: comp: Failed to load transform for %s: "
1584 "%ld\n", driver, PTR_ERR(tfm));
1585 return PTR_ERR(tfm);
1588 err = test_comp(tfm, desc->suite.comp.comp.vecs,
1589 desc->suite.comp.decomp.vecs,
1590 desc->suite.comp.comp.count,
1591 desc->suite.comp.decomp.count);
1593 crypto_free_comp(tfm);
1594 return err;
1597 static int alg_test_pcomp(const struct alg_test_desc *desc, const char *driver,
1598 u32 type, u32 mask)
1600 struct crypto_pcomp *tfm;
1601 int err;
1603 tfm = crypto_alloc_pcomp(driver, type, mask);
1604 if (IS_ERR(tfm)) {
1605 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n",
1606 driver, PTR_ERR(tfm));
1607 return PTR_ERR(tfm);
1610 err = test_pcomp(tfm, desc->suite.pcomp.comp.vecs,
1611 desc->suite.pcomp.decomp.vecs,
1612 desc->suite.pcomp.comp.count,
1613 desc->suite.pcomp.decomp.count);
1615 crypto_free_pcomp(tfm);
1616 return err;
1619 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1620 u32 type, u32 mask)
1622 struct crypto_ahash *tfm;
1623 int err;
1625 tfm = crypto_alloc_ahash(driver, type, mask);
1626 if (IS_ERR(tfm)) {
1627 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1628 "%ld\n", driver, PTR_ERR(tfm));
1629 return PTR_ERR(tfm);
1632 err = test_hash(tfm, desc->suite.hash.vecs,
1633 desc->suite.hash.count, true);
1634 if (!err)
1635 err = test_hash(tfm, desc->suite.hash.vecs,
1636 desc->suite.hash.count, false);
1638 crypto_free_ahash(tfm);
1639 return err;
1642 static int alg_test_crc32c(const struct alg_test_desc *desc,
1643 const char *driver, u32 type, u32 mask)
1645 struct crypto_shash *tfm;
1646 u32 val;
1647 int err;
1649 err = alg_test_hash(desc, driver, type, mask);
1650 if (err)
1651 goto out;
1653 tfm = crypto_alloc_shash(driver, type, mask);
1654 if (IS_ERR(tfm)) {
1655 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1656 "%ld\n", driver, PTR_ERR(tfm));
1657 err = PTR_ERR(tfm);
1658 goto out;
1661 do {
1662 struct {
1663 struct shash_desc shash;
1664 char ctx[crypto_shash_descsize(tfm)];
1665 } sdesc;
1667 sdesc.shash.tfm = tfm;
1668 sdesc.shash.flags = 0;
1670 *(u32 *)sdesc.ctx = le32_to_cpu(420553207);
1671 err = crypto_shash_final(&sdesc.shash, (u8 *)&val);
1672 if (err) {
1673 printk(KERN_ERR "alg: crc32c: Operation failed for "
1674 "%s: %d\n", driver, err);
1675 break;
1678 if (val != ~420553207) {
1679 printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1680 "%d\n", driver, val);
1681 err = -EINVAL;
1683 } while (0);
1685 crypto_free_shash(tfm);
1687 out:
1688 return err;
1691 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1692 u32 type, u32 mask)
1694 struct crypto_rng *rng;
1695 int err;
1697 rng = crypto_alloc_rng(driver, type, mask);
1698 if (IS_ERR(rng)) {
1699 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1700 "%ld\n", driver, PTR_ERR(rng));
1701 return PTR_ERR(rng);
1704 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1706 crypto_free_rng(rng);
1708 return err;
1711 static int alg_test_null(const struct alg_test_desc *desc,
1712 const char *driver, u32 type, u32 mask)
1714 return 0;
1717 /* Please keep this list sorted by algorithm name. */
1718 static const struct alg_test_desc alg_test_descs[] = {
1720 .alg = "__cbc-cast5-avx",
1721 .test = alg_test_null,
1722 }, {
1723 .alg = "__cbc-cast6-avx",
1724 .test = alg_test_null,
1725 }, {
1726 .alg = "__cbc-serpent-avx",
1727 .test = alg_test_null,
1728 }, {
1729 .alg = "__cbc-serpent-avx2",
1730 .test = alg_test_null,
1731 }, {
1732 .alg = "__cbc-serpent-sse2",
1733 .test = alg_test_null,
1734 }, {
1735 .alg = "__cbc-twofish-avx",
1736 .test = alg_test_null,
1737 }, {
1738 .alg = "__driver-cbc-aes-aesni",
1739 .test = alg_test_null,
1740 .fips_allowed = 1,
1741 }, {
1742 .alg = "__driver-cbc-camellia-aesni",
1743 .test = alg_test_null,
1744 }, {
1745 .alg = "__driver-cbc-camellia-aesni-avx2",
1746 .test = alg_test_null,
1747 }, {
1748 .alg = "__driver-cbc-cast5-avx",
1749 .test = alg_test_null,
1750 }, {
1751 .alg = "__driver-cbc-cast6-avx",
1752 .test = alg_test_null,
1753 }, {
1754 .alg = "__driver-cbc-serpent-avx",
1755 .test = alg_test_null,
1756 }, {
1757 .alg = "__driver-cbc-serpent-avx2",
1758 .test = alg_test_null,
1759 }, {
1760 .alg = "__driver-cbc-serpent-sse2",
1761 .test = alg_test_null,
1762 }, {
1763 .alg = "__driver-cbc-twofish-avx",
1764 .test = alg_test_null,
1765 }, {
1766 .alg = "__driver-ecb-aes-aesni",
1767 .test = alg_test_null,
1768 .fips_allowed = 1,
1769 }, {
1770 .alg = "__driver-ecb-camellia-aesni",
1771 .test = alg_test_null,
1772 }, {
1773 .alg = "__driver-ecb-camellia-aesni-avx2",
1774 .test = alg_test_null,
1775 }, {
1776 .alg = "__driver-ecb-cast5-avx",
1777 .test = alg_test_null,
1778 }, {
1779 .alg = "__driver-ecb-cast6-avx",
1780 .test = alg_test_null,
1781 }, {
1782 .alg = "__driver-ecb-serpent-avx",
1783 .test = alg_test_null,
1784 }, {
1785 .alg = "__driver-ecb-serpent-avx2",
1786 .test = alg_test_null,
1787 }, {
1788 .alg = "__driver-ecb-serpent-sse2",
1789 .test = alg_test_null,
1790 }, {
1791 .alg = "__driver-ecb-twofish-avx",
1792 .test = alg_test_null,
1793 }, {
1794 .alg = "__ghash-pclmulqdqni",
1795 .test = alg_test_null,
1796 .fips_allowed = 1,
1797 }, {
1798 .alg = "ansi_cprng",
1799 .test = alg_test_cprng,
1800 .fips_allowed = 1,
1801 .suite = {
1802 .cprng = {
1803 .vecs = ansi_cprng_aes_tv_template,
1804 .count = ANSI_CPRNG_AES_TEST_VECTORS
1807 }, {
1808 .alg = "authenc(hmac(sha1),cbc(aes))",
1809 .test = alg_test_aead,
1810 .fips_allowed = 1,
1811 .suite = {
1812 .aead = {
1813 .enc = {
1814 .vecs = hmac_sha1_aes_cbc_enc_tv_template,
1815 .count = HMAC_SHA1_AES_CBC_ENC_TEST_VECTORS
1819 }, {
1820 .alg = "authenc(hmac(sha256),cbc(aes))",
1821 .test = alg_test_aead,
1822 .fips_allowed = 1,
1823 .suite = {
1824 .aead = {
1825 .enc = {
1826 .vecs = hmac_sha256_aes_cbc_enc_tv_template,
1827 .count = HMAC_SHA256_AES_CBC_ENC_TEST_VECTORS
1831 }, {
1832 .alg = "authenc(hmac(sha512),cbc(aes))",
1833 .test = alg_test_aead,
1834 .fips_allowed = 1,
1835 .suite = {
1836 .aead = {
1837 .enc = {
1838 .vecs = hmac_sha512_aes_cbc_enc_tv_template,
1839 .count = HMAC_SHA512_AES_CBC_ENC_TEST_VECTORS
1843 }, {
1844 .alg = "cbc(aes)",
1845 .test = alg_test_skcipher,
1846 .fips_allowed = 1,
1847 .suite = {
1848 .cipher = {
1849 .enc = {
1850 .vecs = aes_cbc_enc_tv_template,
1851 .count = AES_CBC_ENC_TEST_VECTORS
1853 .dec = {
1854 .vecs = aes_cbc_dec_tv_template,
1855 .count = AES_CBC_DEC_TEST_VECTORS
1859 }, {
1860 .alg = "cbc(anubis)",
1861 .test = alg_test_skcipher,
1862 .suite = {
1863 .cipher = {
1864 .enc = {
1865 .vecs = anubis_cbc_enc_tv_template,
1866 .count = ANUBIS_CBC_ENC_TEST_VECTORS
1868 .dec = {
1869 .vecs = anubis_cbc_dec_tv_template,
1870 .count = ANUBIS_CBC_DEC_TEST_VECTORS
1874 }, {
1875 .alg = "cbc(blowfish)",
1876 .test = alg_test_skcipher,
1877 .suite = {
1878 .cipher = {
1879 .enc = {
1880 .vecs = bf_cbc_enc_tv_template,
1881 .count = BF_CBC_ENC_TEST_VECTORS
1883 .dec = {
1884 .vecs = bf_cbc_dec_tv_template,
1885 .count = BF_CBC_DEC_TEST_VECTORS
1889 }, {
1890 .alg = "cbc(camellia)",
1891 .test = alg_test_skcipher,
1892 .suite = {
1893 .cipher = {
1894 .enc = {
1895 .vecs = camellia_cbc_enc_tv_template,
1896 .count = CAMELLIA_CBC_ENC_TEST_VECTORS
1898 .dec = {
1899 .vecs = camellia_cbc_dec_tv_template,
1900 .count = CAMELLIA_CBC_DEC_TEST_VECTORS
1904 }, {
1905 .alg = "cbc(cast5)",
1906 .test = alg_test_skcipher,
1907 .suite = {
1908 .cipher = {
1909 .enc = {
1910 .vecs = cast5_cbc_enc_tv_template,
1911 .count = CAST5_CBC_ENC_TEST_VECTORS
1913 .dec = {
1914 .vecs = cast5_cbc_dec_tv_template,
1915 .count = CAST5_CBC_DEC_TEST_VECTORS
1919 }, {
1920 .alg = "cbc(cast6)",
1921 .test = alg_test_skcipher,
1922 .suite = {
1923 .cipher = {
1924 .enc = {
1925 .vecs = cast6_cbc_enc_tv_template,
1926 .count = CAST6_CBC_ENC_TEST_VECTORS
1928 .dec = {
1929 .vecs = cast6_cbc_dec_tv_template,
1930 .count = CAST6_CBC_DEC_TEST_VECTORS
1934 }, {
1935 .alg = "cbc(des)",
1936 .test = alg_test_skcipher,
1937 .suite = {
1938 .cipher = {
1939 .enc = {
1940 .vecs = des_cbc_enc_tv_template,
1941 .count = DES_CBC_ENC_TEST_VECTORS
1943 .dec = {
1944 .vecs = des_cbc_dec_tv_template,
1945 .count = DES_CBC_DEC_TEST_VECTORS
1949 }, {
1950 .alg = "cbc(des3_ede)",
1951 .test = alg_test_skcipher,
1952 .fips_allowed = 1,
1953 .suite = {
1954 .cipher = {
1955 .enc = {
1956 .vecs = des3_ede_cbc_enc_tv_template,
1957 .count = DES3_EDE_CBC_ENC_TEST_VECTORS
1959 .dec = {
1960 .vecs = des3_ede_cbc_dec_tv_template,
1961 .count = DES3_EDE_CBC_DEC_TEST_VECTORS
1965 }, {
1966 .alg = "cbc(serpent)",
1967 .test = alg_test_skcipher,
1968 .suite = {
1969 .cipher = {
1970 .enc = {
1971 .vecs = serpent_cbc_enc_tv_template,
1972 .count = SERPENT_CBC_ENC_TEST_VECTORS
1974 .dec = {
1975 .vecs = serpent_cbc_dec_tv_template,
1976 .count = SERPENT_CBC_DEC_TEST_VECTORS
1980 }, {
1981 .alg = "cbc(twofish)",
1982 .test = alg_test_skcipher,
1983 .suite = {
1984 .cipher = {
1985 .enc = {
1986 .vecs = tf_cbc_enc_tv_template,
1987 .count = TF_CBC_ENC_TEST_VECTORS
1989 .dec = {
1990 .vecs = tf_cbc_dec_tv_template,
1991 .count = TF_CBC_DEC_TEST_VECTORS
1995 }, {
1996 .alg = "ccm(aes)",
1997 .test = alg_test_aead,
1998 .fips_allowed = 1,
1999 .suite = {
2000 .aead = {
2001 .enc = {
2002 .vecs = aes_ccm_enc_tv_template,
2003 .count = AES_CCM_ENC_TEST_VECTORS
2005 .dec = {
2006 .vecs = aes_ccm_dec_tv_template,
2007 .count = AES_CCM_DEC_TEST_VECTORS
2011 }, {
2012 .alg = "cmac(aes)",
2013 .test = alg_test_hash,
2014 .suite = {
2015 .hash = {
2016 .vecs = aes_cmac128_tv_template,
2017 .count = CMAC_AES_TEST_VECTORS
2020 }, {
2021 .alg = "cmac(des3_ede)",
2022 .test = alg_test_hash,
2023 .suite = {
2024 .hash = {
2025 .vecs = des3_ede_cmac64_tv_template,
2026 .count = CMAC_DES3_EDE_TEST_VECTORS
2029 }, {
2030 .alg = "compress_null",
2031 .test = alg_test_null,
2032 }, {
2033 .alg = "crc32c",
2034 .test = alg_test_crc32c,
2035 .fips_allowed = 1,
2036 .suite = {
2037 .hash = {
2038 .vecs = crc32c_tv_template,
2039 .count = CRC32C_TEST_VECTORS
2042 }, {
2043 .alg = "crct10dif",
2044 .test = alg_test_hash,
2045 .fips_allowed = 1,
2046 .suite = {
2047 .hash = {
2048 .vecs = crct10dif_tv_template,
2049 .count = CRCT10DIF_TEST_VECTORS
2052 }, {
2053 .alg = "cryptd(__driver-cbc-aes-aesni)",
2054 .test = alg_test_null,
2055 .fips_allowed = 1,
2056 }, {
2057 .alg = "cryptd(__driver-cbc-camellia-aesni)",
2058 .test = alg_test_null,
2059 }, {
2060 .alg = "cryptd(__driver-cbc-camellia-aesni-avx2)",
2061 .test = alg_test_null,
2062 }, {
2063 .alg = "cryptd(__driver-cbc-serpent-avx2)",
2064 .test = alg_test_null,
2065 }, {
2066 .alg = "cryptd(__driver-ecb-aes-aesni)",
2067 .test = alg_test_null,
2068 .fips_allowed = 1,
2069 }, {
2070 .alg = "cryptd(__driver-ecb-camellia-aesni)",
2071 .test = alg_test_null,
2072 }, {
2073 .alg = "cryptd(__driver-ecb-camellia-aesni-avx2)",
2074 .test = alg_test_null,
2075 }, {
2076 .alg = "cryptd(__driver-ecb-cast5-avx)",
2077 .test = alg_test_null,
2078 }, {
2079 .alg = "cryptd(__driver-ecb-cast6-avx)",
2080 .test = alg_test_null,
2081 }, {
2082 .alg = "cryptd(__driver-ecb-serpent-avx)",
2083 .test = alg_test_null,
2084 }, {
2085 .alg = "cryptd(__driver-ecb-serpent-avx2)",
2086 .test = alg_test_null,
2087 }, {
2088 .alg = "cryptd(__driver-ecb-serpent-sse2)",
2089 .test = alg_test_null,
2090 }, {
2091 .alg = "cryptd(__driver-ecb-twofish-avx)",
2092 .test = alg_test_null,
2093 }, {
2094 .alg = "cryptd(__driver-gcm-aes-aesni)",
2095 .test = alg_test_null,
2096 .fips_allowed = 1,
2097 }, {
2098 .alg = "cryptd(__ghash-pclmulqdqni)",
2099 .test = alg_test_null,
2100 .fips_allowed = 1,
2101 }, {
2102 .alg = "ctr(aes)",
2103 .test = alg_test_skcipher,
2104 .fips_allowed = 1,
2105 .suite = {
2106 .cipher = {
2107 .enc = {
2108 .vecs = aes_ctr_enc_tv_template,
2109 .count = AES_CTR_ENC_TEST_VECTORS
2111 .dec = {
2112 .vecs = aes_ctr_dec_tv_template,
2113 .count = AES_CTR_DEC_TEST_VECTORS
2117 }, {
2118 .alg = "ctr(blowfish)",
2119 .test = alg_test_skcipher,
2120 .suite = {
2121 .cipher = {
2122 .enc = {
2123 .vecs = bf_ctr_enc_tv_template,
2124 .count = BF_CTR_ENC_TEST_VECTORS
2126 .dec = {
2127 .vecs = bf_ctr_dec_tv_template,
2128 .count = BF_CTR_DEC_TEST_VECTORS
2132 }, {
2133 .alg = "ctr(camellia)",
2134 .test = alg_test_skcipher,
2135 .suite = {
2136 .cipher = {
2137 .enc = {
2138 .vecs = camellia_ctr_enc_tv_template,
2139 .count = CAMELLIA_CTR_ENC_TEST_VECTORS
2141 .dec = {
2142 .vecs = camellia_ctr_dec_tv_template,
2143 .count = CAMELLIA_CTR_DEC_TEST_VECTORS
2147 }, {
2148 .alg = "ctr(cast5)",
2149 .test = alg_test_skcipher,
2150 .suite = {
2151 .cipher = {
2152 .enc = {
2153 .vecs = cast5_ctr_enc_tv_template,
2154 .count = CAST5_CTR_ENC_TEST_VECTORS
2156 .dec = {
2157 .vecs = cast5_ctr_dec_tv_template,
2158 .count = CAST5_CTR_DEC_TEST_VECTORS
2162 }, {
2163 .alg = "ctr(cast6)",
2164 .test = alg_test_skcipher,
2165 .suite = {
2166 .cipher = {
2167 .enc = {
2168 .vecs = cast6_ctr_enc_tv_template,
2169 .count = CAST6_CTR_ENC_TEST_VECTORS
2171 .dec = {
2172 .vecs = cast6_ctr_dec_tv_template,
2173 .count = CAST6_CTR_DEC_TEST_VECTORS
2177 }, {
2178 .alg = "ctr(des)",
2179 .test = alg_test_skcipher,
2180 .suite = {
2181 .cipher = {
2182 .enc = {
2183 .vecs = des_ctr_enc_tv_template,
2184 .count = DES_CTR_ENC_TEST_VECTORS
2186 .dec = {
2187 .vecs = des_ctr_dec_tv_template,
2188 .count = DES_CTR_DEC_TEST_VECTORS
2192 }, {
2193 .alg = "ctr(des3_ede)",
2194 .test = alg_test_skcipher,
2195 .suite = {
2196 .cipher = {
2197 .enc = {
2198 .vecs = des3_ede_ctr_enc_tv_template,
2199 .count = DES3_EDE_CTR_ENC_TEST_VECTORS
2201 .dec = {
2202 .vecs = des3_ede_ctr_dec_tv_template,
2203 .count = DES3_EDE_CTR_DEC_TEST_VECTORS
2207 }, {
2208 .alg = "ctr(serpent)",
2209 .test = alg_test_skcipher,
2210 .suite = {
2211 .cipher = {
2212 .enc = {
2213 .vecs = serpent_ctr_enc_tv_template,
2214 .count = SERPENT_CTR_ENC_TEST_VECTORS
2216 .dec = {
2217 .vecs = serpent_ctr_dec_tv_template,
2218 .count = SERPENT_CTR_DEC_TEST_VECTORS
2222 }, {
2223 .alg = "ctr(twofish)",
2224 .test = alg_test_skcipher,
2225 .suite = {
2226 .cipher = {
2227 .enc = {
2228 .vecs = tf_ctr_enc_tv_template,
2229 .count = TF_CTR_ENC_TEST_VECTORS
2231 .dec = {
2232 .vecs = tf_ctr_dec_tv_template,
2233 .count = TF_CTR_DEC_TEST_VECTORS
2237 }, {
2238 .alg = "cts(cbc(aes))",
2239 .test = alg_test_skcipher,
2240 .suite = {
2241 .cipher = {
2242 .enc = {
2243 .vecs = cts_mode_enc_tv_template,
2244 .count = CTS_MODE_ENC_TEST_VECTORS
2246 .dec = {
2247 .vecs = cts_mode_dec_tv_template,
2248 .count = CTS_MODE_DEC_TEST_VECTORS
2252 }, {
2253 .alg = "deflate",
2254 .test = alg_test_comp,
2255 .fips_allowed = 1,
2256 .suite = {
2257 .comp = {
2258 .comp = {
2259 .vecs = deflate_comp_tv_template,
2260 .count = DEFLATE_COMP_TEST_VECTORS
2262 .decomp = {
2263 .vecs = deflate_decomp_tv_template,
2264 .count = DEFLATE_DECOMP_TEST_VECTORS
2268 }, {
2269 .alg = "digest_null",
2270 .test = alg_test_null,
2271 }, {
2272 .alg = "ecb(__aes-aesni)",
2273 .test = alg_test_null,
2274 .fips_allowed = 1,
2275 }, {
2276 .alg = "ecb(aes)",
2277 .test = alg_test_skcipher,
2278 .fips_allowed = 1,
2279 .suite = {
2280 .cipher = {
2281 .enc = {
2282 .vecs = aes_enc_tv_template,
2283 .count = AES_ENC_TEST_VECTORS
2285 .dec = {
2286 .vecs = aes_dec_tv_template,
2287 .count = AES_DEC_TEST_VECTORS
2291 }, {
2292 .alg = "ecb(anubis)",
2293 .test = alg_test_skcipher,
2294 .suite = {
2295 .cipher = {
2296 .enc = {
2297 .vecs = anubis_enc_tv_template,
2298 .count = ANUBIS_ENC_TEST_VECTORS
2300 .dec = {
2301 .vecs = anubis_dec_tv_template,
2302 .count = ANUBIS_DEC_TEST_VECTORS
2306 }, {
2307 .alg = "ecb(arc4)",
2308 .test = alg_test_skcipher,
2309 .suite = {
2310 .cipher = {
2311 .enc = {
2312 .vecs = arc4_enc_tv_template,
2313 .count = ARC4_ENC_TEST_VECTORS
2315 .dec = {
2316 .vecs = arc4_dec_tv_template,
2317 .count = ARC4_DEC_TEST_VECTORS
2321 }, {
2322 .alg = "ecb(blowfish)",
2323 .test = alg_test_skcipher,
2324 .suite = {
2325 .cipher = {
2326 .enc = {
2327 .vecs = bf_enc_tv_template,
2328 .count = BF_ENC_TEST_VECTORS
2330 .dec = {
2331 .vecs = bf_dec_tv_template,
2332 .count = BF_DEC_TEST_VECTORS
2336 }, {
2337 .alg = "ecb(camellia)",
2338 .test = alg_test_skcipher,
2339 .suite = {
2340 .cipher = {
2341 .enc = {
2342 .vecs = camellia_enc_tv_template,
2343 .count = CAMELLIA_ENC_TEST_VECTORS
2345 .dec = {
2346 .vecs = camellia_dec_tv_template,
2347 .count = CAMELLIA_DEC_TEST_VECTORS
2351 }, {
2352 .alg = "ecb(cast5)",
2353 .test = alg_test_skcipher,
2354 .suite = {
2355 .cipher = {
2356 .enc = {
2357 .vecs = cast5_enc_tv_template,
2358 .count = CAST5_ENC_TEST_VECTORS
2360 .dec = {
2361 .vecs = cast5_dec_tv_template,
2362 .count = CAST5_DEC_TEST_VECTORS
2366 }, {
2367 .alg = "ecb(cast6)",
2368 .test = alg_test_skcipher,
2369 .suite = {
2370 .cipher = {
2371 .enc = {
2372 .vecs = cast6_enc_tv_template,
2373 .count = CAST6_ENC_TEST_VECTORS
2375 .dec = {
2376 .vecs = cast6_dec_tv_template,
2377 .count = CAST6_DEC_TEST_VECTORS
2381 }, {
2382 .alg = "ecb(cipher_null)",
2383 .test = alg_test_null,
2384 }, {
2385 .alg = "ecb(des)",
2386 .test = alg_test_skcipher,
2387 .fips_allowed = 1,
2388 .suite = {
2389 .cipher = {
2390 .enc = {
2391 .vecs = des_enc_tv_template,
2392 .count = DES_ENC_TEST_VECTORS
2394 .dec = {
2395 .vecs = des_dec_tv_template,
2396 .count = DES_DEC_TEST_VECTORS
2400 }, {
2401 .alg = "ecb(des3_ede)",
2402 .test = alg_test_skcipher,
2403 .fips_allowed = 1,
2404 .suite = {
2405 .cipher = {
2406 .enc = {
2407 .vecs = des3_ede_enc_tv_template,
2408 .count = DES3_EDE_ENC_TEST_VECTORS
2410 .dec = {
2411 .vecs = des3_ede_dec_tv_template,
2412 .count = DES3_EDE_DEC_TEST_VECTORS
2416 }, {
2417 .alg = "ecb(fcrypt)",
2418 .test = alg_test_skcipher,
2419 .suite = {
2420 .cipher = {
2421 .enc = {
2422 .vecs = fcrypt_pcbc_enc_tv_template,
2423 .count = 1
2425 .dec = {
2426 .vecs = fcrypt_pcbc_dec_tv_template,
2427 .count = 1
2431 }, {
2432 .alg = "ecb(khazad)",
2433 .test = alg_test_skcipher,
2434 .suite = {
2435 .cipher = {
2436 .enc = {
2437 .vecs = khazad_enc_tv_template,
2438 .count = KHAZAD_ENC_TEST_VECTORS
2440 .dec = {
2441 .vecs = khazad_dec_tv_template,
2442 .count = KHAZAD_DEC_TEST_VECTORS
2446 }, {
2447 .alg = "ecb(seed)",
2448 .test = alg_test_skcipher,
2449 .suite = {
2450 .cipher = {
2451 .enc = {
2452 .vecs = seed_enc_tv_template,
2453 .count = SEED_ENC_TEST_VECTORS
2455 .dec = {
2456 .vecs = seed_dec_tv_template,
2457 .count = SEED_DEC_TEST_VECTORS
2461 }, {
2462 .alg = "ecb(serpent)",
2463 .test = alg_test_skcipher,
2464 .suite = {
2465 .cipher = {
2466 .enc = {
2467 .vecs = serpent_enc_tv_template,
2468 .count = SERPENT_ENC_TEST_VECTORS
2470 .dec = {
2471 .vecs = serpent_dec_tv_template,
2472 .count = SERPENT_DEC_TEST_VECTORS
2476 }, {
2477 .alg = "ecb(tea)",
2478 .test = alg_test_skcipher,
2479 .suite = {
2480 .cipher = {
2481 .enc = {
2482 .vecs = tea_enc_tv_template,
2483 .count = TEA_ENC_TEST_VECTORS
2485 .dec = {
2486 .vecs = tea_dec_tv_template,
2487 .count = TEA_DEC_TEST_VECTORS
2491 }, {
2492 .alg = "ecb(tnepres)",
2493 .test = alg_test_skcipher,
2494 .suite = {
2495 .cipher = {
2496 .enc = {
2497 .vecs = tnepres_enc_tv_template,
2498 .count = TNEPRES_ENC_TEST_VECTORS
2500 .dec = {
2501 .vecs = tnepres_dec_tv_template,
2502 .count = TNEPRES_DEC_TEST_VECTORS
2506 }, {
2507 .alg = "ecb(twofish)",
2508 .test = alg_test_skcipher,
2509 .suite = {
2510 .cipher = {
2511 .enc = {
2512 .vecs = tf_enc_tv_template,
2513 .count = TF_ENC_TEST_VECTORS
2515 .dec = {
2516 .vecs = tf_dec_tv_template,
2517 .count = TF_DEC_TEST_VECTORS
2521 }, {
2522 .alg = "ecb(xeta)",
2523 .test = alg_test_skcipher,
2524 .suite = {
2525 .cipher = {
2526 .enc = {
2527 .vecs = xeta_enc_tv_template,
2528 .count = XETA_ENC_TEST_VECTORS
2530 .dec = {
2531 .vecs = xeta_dec_tv_template,
2532 .count = XETA_DEC_TEST_VECTORS
2536 }, {
2537 .alg = "ecb(xtea)",
2538 .test = alg_test_skcipher,
2539 .suite = {
2540 .cipher = {
2541 .enc = {
2542 .vecs = xtea_enc_tv_template,
2543 .count = XTEA_ENC_TEST_VECTORS
2545 .dec = {
2546 .vecs = xtea_dec_tv_template,
2547 .count = XTEA_DEC_TEST_VECTORS
2551 }, {
2552 .alg = "gcm(aes)",
2553 .test = alg_test_aead,
2554 .fips_allowed = 1,
2555 .suite = {
2556 .aead = {
2557 .enc = {
2558 .vecs = aes_gcm_enc_tv_template,
2559 .count = AES_GCM_ENC_TEST_VECTORS
2561 .dec = {
2562 .vecs = aes_gcm_dec_tv_template,
2563 .count = AES_GCM_DEC_TEST_VECTORS
2567 }, {
2568 .alg = "ghash",
2569 .test = alg_test_hash,
2570 .fips_allowed = 1,
2571 .suite = {
2572 .hash = {
2573 .vecs = ghash_tv_template,
2574 .count = GHASH_TEST_VECTORS
2577 }, {
2578 .alg = "hmac(crc32)",
2579 .test = alg_test_hash,
2580 .suite = {
2581 .hash = {
2582 .vecs = bfin_crc_tv_template,
2583 .count = BFIN_CRC_TEST_VECTORS
2586 }, {
2587 .alg = "hmac(md5)",
2588 .test = alg_test_hash,
2589 .suite = {
2590 .hash = {
2591 .vecs = hmac_md5_tv_template,
2592 .count = HMAC_MD5_TEST_VECTORS
2595 }, {
2596 .alg = "hmac(rmd128)",
2597 .test = alg_test_hash,
2598 .suite = {
2599 .hash = {
2600 .vecs = hmac_rmd128_tv_template,
2601 .count = HMAC_RMD128_TEST_VECTORS
2604 }, {
2605 .alg = "hmac(rmd160)",
2606 .test = alg_test_hash,
2607 .suite = {
2608 .hash = {
2609 .vecs = hmac_rmd160_tv_template,
2610 .count = HMAC_RMD160_TEST_VECTORS
2613 }, {
2614 .alg = "hmac(sha1)",
2615 .test = alg_test_hash,
2616 .fips_allowed = 1,
2617 .suite = {
2618 .hash = {
2619 .vecs = hmac_sha1_tv_template,
2620 .count = HMAC_SHA1_TEST_VECTORS
2623 }, {
2624 .alg = "hmac(sha224)",
2625 .test = alg_test_hash,
2626 .fips_allowed = 1,
2627 .suite = {
2628 .hash = {
2629 .vecs = hmac_sha224_tv_template,
2630 .count = HMAC_SHA224_TEST_VECTORS
2633 }, {
2634 .alg = "hmac(sha256)",
2635 .test = alg_test_hash,
2636 .fips_allowed = 1,
2637 .suite = {
2638 .hash = {
2639 .vecs = hmac_sha256_tv_template,
2640 .count = HMAC_SHA256_TEST_VECTORS
2643 }, {
2644 .alg = "hmac(sha384)",
2645 .test = alg_test_hash,
2646 .fips_allowed = 1,
2647 .suite = {
2648 .hash = {
2649 .vecs = hmac_sha384_tv_template,
2650 .count = HMAC_SHA384_TEST_VECTORS
2653 }, {
2654 .alg = "hmac(sha512)",
2655 .test = alg_test_hash,
2656 .fips_allowed = 1,
2657 .suite = {
2658 .hash = {
2659 .vecs = hmac_sha512_tv_template,
2660 .count = HMAC_SHA512_TEST_VECTORS
2663 }, {
2664 .alg = "lrw(aes)",
2665 .test = alg_test_skcipher,
2666 .suite = {
2667 .cipher = {
2668 .enc = {
2669 .vecs = aes_lrw_enc_tv_template,
2670 .count = AES_LRW_ENC_TEST_VECTORS
2672 .dec = {
2673 .vecs = aes_lrw_dec_tv_template,
2674 .count = AES_LRW_DEC_TEST_VECTORS
2678 }, {
2679 .alg = "lrw(camellia)",
2680 .test = alg_test_skcipher,
2681 .suite = {
2682 .cipher = {
2683 .enc = {
2684 .vecs = camellia_lrw_enc_tv_template,
2685 .count = CAMELLIA_LRW_ENC_TEST_VECTORS
2687 .dec = {
2688 .vecs = camellia_lrw_dec_tv_template,
2689 .count = CAMELLIA_LRW_DEC_TEST_VECTORS
2693 }, {
2694 .alg = "lrw(cast6)",
2695 .test = alg_test_skcipher,
2696 .suite = {
2697 .cipher = {
2698 .enc = {
2699 .vecs = cast6_lrw_enc_tv_template,
2700 .count = CAST6_LRW_ENC_TEST_VECTORS
2702 .dec = {
2703 .vecs = cast6_lrw_dec_tv_template,
2704 .count = CAST6_LRW_DEC_TEST_VECTORS
2708 }, {
2709 .alg = "lrw(serpent)",
2710 .test = alg_test_skcipher,
2711 .suite = {
2712 .cipher = {
2713 .enc = {
2714 .vecs = serpent_lrw_enc_tv_template,
2715 .count = SERPENT_LRW_ENC_TEST_VECTORS
2717 .dec = {
2718 .vecs = serpent_lrw_dec_tv_template,
2719 .count = SERPENT_LRW_DEC_TEST_VECTORS
2723 }, {
2724 .alg = "lrw(twofish)",
2725 .test = alg_test_skcipher,
2726 .suite = {
2727 .cipher = {
2728 .enc = {
2729 .vecs = tf_lrw_enc_tv_template,
2730 .count = TF_LRW_ENC_TEST_VECTORS
2732 .dec = {
2733 .vecs = tf_lrw_dec_tv_template,
2734 .count = TF_LRW_DEC_TEST_VECTORS
2738 }, {
2739 .alg = "lzo",
2740 .test = alg_test_comp,
2741 .fips_allowed = 1,
2742 .suite = {
2743 .comp = {
2744 .comp = {
2745 .vecs = lzo_comp_tv_template,
2746 .count = LZO_COMP_TEST_VECTORS
2748 .decomp = {
2749 .vecs = lzo_decomp_tv_template,
2750 .count = LZO_DECOMP_TEST_VECTORS
2754 }, {
2755 .alg = "md4",
2756 .test = alg_test_hash,
2757 .suite = {
2758 .hash = {
2759 .vecs = md4_tv_template,
2760 .count = MD4_TEST_VECTORS
2763 }, {
2764 .alg = "md5",
2765 .test = alg_test_hash,
2766 .suite = {
2767 .hash = {
2768 .vecs = md5_tv_template,
2769 .count = MD5_TEST_VECTORS
2772 }, {
2773 .alg = "michael_mic",
2774 .test = alg_test_hash,
2775 .suite = {
2776 .hash = {
2777 .vecs = michael_mic_tv_template,
2778 .count = MICHAEL_MIC_TEST_VECTORS
2781 }, {
2782 .alg = "ofb(aes)",
2783 .test = alg_test_skcipher,
2784 .fips_allowed = 1,
2785 .suite = {
2786 .cipher = {
2787 .enc = {
2788 .vecs = aes_ofb_enc_tv_template,
2789 .count = AES_OFB_ENC_TEST_VECTORS
2791 .dec = {
2792 .vecs = aes_ofb_dec_tv_template,
2793 .count = AES_OFB_DEC_TEST_VECTORS
2797 }, {
2798 .alg = "pcbc(fcrypt)",
2799 .test = alg_test_skcipher,
2800 .suite = {
2801 .cipher = {
2802 .enc = {
2803 .vecs = fcrypt_pcbc_enc_tv_template,
2804 .count = FCRYPT_ENC_TEST_VECTORS
2806 .dec = {
2807 .vecs = fcrypt_pcbc_dec_tv_template,
2808 .count = FCRYPT_DEC_TEST_VECTORS
2812 }, {
2813 .alg = "rfc3686(ctr(aes))",
2814 .test = alg_test_skcipher,
2815 .fips_allowed = 1,
2816 .suite = {
2817 .cipher = {
2818 .enc = {
2819 .vecs = aes_ctr_rfc3686_enc_tv_template,
2820 .count = AES_CTR_3686_ENC_TEST_VECTORS
2822 .dec = {
2823 .vecs = aes_ctr_rfc3686_dec_tv_template,
2824 .count = AES_CTR_3686_DEC_TEST_VECTORS
2828 }, {
2829 .alg = "rfc4106(gcm(aes))",
2830 .test = alg_test_aead,
2831 .suite = {
2832 .aead = {
2833 .enc = {
2834 .vecs = aes_gcm_rfc4106_enc_tv_template,
2835 .count = AES_GCM_4106_ENC_TEST_VECTORS
2837 .dec = {
2838 .vecs = aes_gcm_rfc4106_dec_tv_template,
2839 .count = AES_GCM_4106_DEC_TEST_VECTORS
2843 }, {
2844 .alg = "rfc4309(ccm(aes))",
2845 .test = alg_test_aead,
2846 .fips_allowed = 1,
2847 .suite = {
2848 .aead = {
2849 .enc = {
2850 .vecs = aes_ccm_rfc4309_enc_tv_template,
2851 .count = AES_CCM_4309_ENC_TEST_VECTORS
2853 .dec = {
2854 .vecs = aes_ccm_rfc4309_dec_tv_template,
2855 .count = AES_CCM_4309_DEC_TEST_VECTORS
2859 }, {
2860 .alg = "rfc4543(gcm(aes))",
2861 .test = alg_test_aead,
2862 .suite = {
2863 .aead = {
2864 .enc = {
2865 .vecs = aes_gcm_rfc4543_enc_tv_template,
2866 .count = AES_GCM_4543_ENC_TEST_VECTORS
2868 .dec = {
2869 .vecs = aes_gcm_rfc4543_dec_tv_template,
2870 .count = AES_GCM_4543_DEC_TEST_VECTORS
2874 }, {
2875 .alg = "rmd128",
2876 .test = alg_test_hash,
2877 .suite = {
2878 .hash = {
2879 .vecs = rmd128_tv_template,
2880 .count = RMD128_TEST_VECTORS
2883 }, {
2884 .alg = "rmd160",
2885 .test = alg_test_hash,
2886 .suite = {
2887 .hash = {
2888 .vecs = rmd160_tv_template,
2889 .count = RMD160_TEST_VECTORS
2892 }, {
2893 .alg = "rmd256",
2894 .test = alg_test_hash,
2895 .suite = {
2896 .hash = {
2897 .vecs = rmd256_tv_template,
2898 .count = RMD256_TEST_VECTORS
2901 }, {
2902 .alg = "rmd320",
2903 .test = alg_test_hash,
2904 .suite = {
2905 .hash = {
2906 .vecs = rmd320_tv_template,
2907 .count = RMD320_TEST_VECTORS
2910 }, {
2911 .alg = "salsa20",
2912 .test = alg_test_skcipher,
2913 .suite = {
2914 .cipher = {
2915 .enc = {
2916 .vecs = salsa20_stream_enc_tv_template,
2917 .count = SALSA20_STREAM_ENC_TEST_VECTORS
2921 }, {
2922 .alg = "sha1",
2923 .test = alg_test_hash,
2924 .fips_allowed = 1,
2925 .suite = {
2926 .hash = {
2927 .vecs = sha1_tv_template,
2928 .count = SHA1_TEST_VECTORS
2931 }, {
2932 .alg = "sha224",
2933 .test = alg_test_hash,
2934 .fips_allowed = 1,
2935 .suite = {
2936 .hash = {
2937 .vecs = sha224_tv_template,
2938 .count = SHA224_TEST_VECTORS
2941 }, {
2942 .alg = "sha256",
2943 .test = alg_test_hash,
2944 .fips_allowed = 1,
2945 .suite = {
2946 .hash = {
2947 .vecs = sha256_tv_template,
2948 .count = SHA256_TEST_VECTORS
2951 }, {
2952 .alg = "sha384",
2953 .test = alg_test_hash,
2954 .fips_allowed = 1,
2955 .suite = {
2956 .hash = {
2957 .vecs = sha384_tv_template,
2958 .count = SHA384_TEST_VECTORS
2961 }, {
2962 .alg = "sha512",
2963 .test = alg_test_hash,
2964 .fips_allowed = 1,
2965 .suite = {
2966 .hash = {
2967 .vecs = sha512_tv_template,
2968 .count = SHA512_TEST_VECTORS
2971 }, {
2972 .alg = "tgr128",
2973 .test = alg_test_hash,
2974 .suite = {
2975 .hash = {
2976 .vecs = tgr128_tv_template,
2977 .count = TGR128_TEST_VECTORS
2980 }, {
2981 .alg = "tgr160",
2982 .test = alg_test_hash,
2983 .suite = {
2984 .hash = {
2985 .vecs = tgr160_tv_template,
2986 .count = TGR160_TEST_VECTORS
2989 }, {
2990 .alg = "tgr192",
2991 .test = alg_test_hash,
2992 .suite = {
2993 .hash = {
2994 .vecs = tgr192_tv_template,
2995 .count = TGR192_TEST_VECTORS
2998 }, {
2999 .alg = "vmac(aes)",
3000 .test = alg_test_hash,
3001 .suite = {
3002 .hash = {
3003 .vecs = aes_vmac128_tv_template,
3004 .count = VMAC_AES_TEST_VECTORS
3007 }, {
3008 .alg = "wp256",
3009 .test = alg_test_hash,
3010 .suite = {
3011 .hash = {
3012 .vecs = wp256_tv_template,
3013 .count = WP256_TEST_VECTORS
3016 }, {
3017 .alg = "wp384",
3018 .test = alg_test_hash,
3019 .suite = {
3020 .hash = {
3021 .vecs = wp384_tv_template,
3022 .count = WP384_TEST_VECTORS
3025 }, {
3026 .alg = "wp512",
3027 .test = alg_test_hash,
3028 .suite = {
3029 .hash = {
3030 .vecs = wp512_tv_template,
3031 .count = WP512_TEST_VECTORS
3034 }, {
3035 .alg = "xcbc(aes)",
3036 .test = alg_test_hash,
3037 .suite = {
3038 .hash = {
3039 .vecs = aes_xcbc128_tv_template,
3040 .count = XCBC_AES_TEST_VECTORS
3043 }, {
3044 .alg = "xts(aes)",
3045 .test = alg_test_skcipher,
3046 .fips_allowed = 1,
3047 .suite = {
3048 .cipher = {
3049 .enc = {
3050 .vecs = aes_xts_enc_tv_template,
3051 .count = AES_XTS_ENC_TEST_VECTORS
3053 .dec = {
3054 .vecs = aes_xts_dec_tv_template,
3055 .count = AES_XTS_DEC_TEST_VECTORS
3059 }, {
3060 .alg = "xts(camellia)",
3061 .test = alg_test_skcipher,
3062 .suite = {
3063 .cipher = {
3064 .enc = {
3065 .vecs = camellia_xts_enc_tv_template,
3066 .count = CAMELLIA_XTS_ENC_TEST_VECTORS
3068 .dec = {
3069 .vecs = camellia_xts_dec_tv_template,
3070 .count = CAMELLIA_XTS_DEC_TEST_VECTORS
3074 }, {
3075 .alg = "xts(cast6)",
3076 .test = alg_test_skcipher,
3077 .suite = {
3078 .cipher = {
3079 .enc = {
3080 .vecs = cast6_xts_enc_tv_template,
3081 .count = CAST6_XTS_ENC_TEST_VECTORS
3083 .dec = {
3084 .vecs = cast6_xts_dec_tv_template,
3085 .count = CAST6_XTS_DEC_TEST_VECTORS
3089 }, {
3090 .alg = "xts(serpent)",
3091 .test = alg_test_skcipher,
3092 .suite = {
3093 .cipher = {
3094 .enc = {
3095 .vecs = serpent_xts_enc_tv_template,
3096 .count = SERPENT_XTS_ENC_TEST_VECTORS
3098 .dec = {
3099 .vecs = serpent_xts_dec_tv_template,
3100 .count = SERPENT_XTS_DEC_TEST_VECTORS
3104 }, {
3105 .alg = "xts(twofish)",
3106 .test = alg_test_skcipher,
3107 .suite = {
3108 .cipher = {
3109 .enc = {
3110 .vecs = tf_xts_enc_tv_template,
3111 .count = TF_XTS_ENC_TEST_VECTORS
3113 .dec = {
3114 .vecs = tf_xts_dec_tv_template,
3115 .count = TF_XTS_DEC_TEST_VECTORS
3119 }, {
3120 .alg = "zlib",
3121 .test = alg_test_pcomp,
3122 .fips_allowed = 1,
3123 .suite = {
3124 .pcomp = {
3125 .comp = {
3126 .vecs = zlib_comp_tv_template,
3127 .count = ZLIB_COMP_TEST_VECTORS
3129 .decomp = {
3130 .vecs = zlib_decomp_tv_template,
3131 .count = ZLIB_DECOMP_TEST_VECTORS
3138 static bool alg_test_descs_checked;
3140 static void alg_test_descs_check_order(void)
3142 int i;
3144 /* only check once */
3145 if (alg_test_descs_checked)
3146 return;
3148 alg_test_descs_checked = true;
3150 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
3151 int diff = strcmp(alg_test_descs[i - 1].alg,
3152 alg_test_descs[i].alg);
3154 if (WARN_ON(diff > 0)) {
3155 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
3156 alg_test_descs[i - 1].alg,
3157 alg_test_descs[i].alg);
3160 if (WARN_ON(diff == 0)) {
3161 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
3162 alg_test_descs[i].alg);
3167 static int alg_find_test(const char *alg)
3169 int start = 0;
3170 int end = ARRAY_SIZE(alg_test_descs);
3172 while (start < end) {
3173 int i = (start + end) / 2;
3174 int diff = strcmp(alg_test_descs[i].alg, alg);
3176 if (diff > 0) {
3177 end = i;
3178 continue;
3181 if (diff < 0) {
3182 start = i + 1;
3183 continue;
3186 return i;
3189 return -1;
3192 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
3194 int i;
3195 int j;
3196 int rc;
3198 alg_test_descs_check_order();
3200 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
3201 char nalg[CRYPTO_MAX_ALG_NAME];
3203 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
3204 sizeof(nalg))
3205 return -ENAMETOOLONG;
3207 i = alg_find_test(nalg);
3208 if (i < 0)
3209 goto notest;
3211 if (fips_enabled && !alg_test_descs[i].fips_allowed)
3212 goto non_fips_alg;
3214 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
3215 goto test_done;
3218 i = alg_find_test(alg);
3219 j = alg_find_test(driver);
3220 if (i < 0 && j < 0)
3221 goto notest;
3223 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
3224 (j >= 0 && !alg_test_descs[j].fips_allowed)))
3225 goto non_fips_alg;
3227 rc = 0;
3228 if (i >= 0)
3229 rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
3230 type, mask);
3231 if (j >= 0 && j != i)
3232 rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
3233 type, mask);
3235 test_done:
3236 if (fips_enabled && rc)
3237 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
3239 if (fips_enabled && !rc)
3240 printk(KERN_INFO "alg: self-tests for %s (%s) passed\n",
3241 driver, alg);
3243 return rc;
3245 notest:
3246 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
3247 return 0;
3248 non_fips_alg:
3249 return -EINVAL;
3252 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
3254 EXPORT_SYMBOL_GPL(alg_test);