lockdep, rwsem: provide down_write_nest_lock()
[linux/fpc-iii.git] / crypto / testmgr.c
blobedf4a0818773483538f09ecfbe246dd96eb9aaf5
1 /*
2 * Algorithm testing framework and tests.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
9 * Updated RFC4106 AES-GCM testing.
10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
16 * This program is free software; you can redistribute it and/or modify it
17 * under the terms of the GNU General Public License as published by the Free
18 * Software Foundation; either version 2 of the License, or (at your option)
19 * any later version.
23 #include <crypto/hash.h>
24 #include <linux/err.h>
25 #include <linux/module.h>
26 #include <linux/scatterlist.h>
27 #include <linux/slab.h>
28 #include <linux/string.h>
29 #include <crypto/rng.h>
31 #include "internal.h"
33 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
35 /* a perfect nop */
36 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
38 return 0;
41 #else
43 #include "testmgr.h"
46 * Need slab memory for testing (size in number of pages).
48 #define XBUFSIZE 8
51 * Indexes into the xbuf to simulate cross-page access.
53 #define IDX1 32
54 #define IDX2 32400
55 #define IDX3 1
56 #define IDX4 8193
57 #define IDX5 22222
58 #define IDX6 17101
59 #define IDX7 27333
60 #define IDX8 3000
63 * Used by test_cipher()
65 #define ENCRYPT 1
66 #define DECRYPT 0
68 struct tcrypt_result {
69 struct completion completion;
70 int err;
73 struct aead_test_suite {
74 struct {
75 struct aead_testvec *vecs;
76 unsigned int count;
77 } enc, dec;
80 struct cipher_test_suite {
81 struct {
82 struct cipher_testvec *vecs;
83 unsigned int count;
84 } enc, dec;
87 struct comp_test_suite {
88 struct {
89 struct comp_testvec *vecs;
90 unsigned int count;
91 } comp, decomp;
94 struct pcomp_test_suite {
95 struct {
96 struct pcomp_testvec *vecs;
97 unsigned int count;
98 } comp, decomp;
101 struct hash_test_suite {
102 struct hash_testvec *vecs;
103 unsigned int count;
106 struct cprng_test_suite {
107 struct cprng_testvec *vecs;
108 unsigned int count;
111 struct alg_test_desc {
112 const char *alg;
113 int (*test)(const struct alg_test_desc *desc, const char *driver,
114 u32 type, u32 mask);
115 int fips_allowed; /* set if alg is allowed in fips mode */
117 union {
118 struct aead_test_suite aead;
119 struct cipher_test_suite cipher;
120 struct comp_test_suite comp;
121 struct pcomp_test_suite pcomp;
122 struct hash_test_suite hash;
123 struct cprng_test_suite cprng;
124 } suite;
127 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
129 static void hexdump(unsigned char *buf, unsigned int len)
131 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
132 16, 1,
133 buf, len, false);
136 static void tcrypt_complete(struct crypto_async_request *req, int err)
138 struct tcrypt_result *res = req->data;
140 if (err == -EINPROGRESS)
141 return;
143 res->err = err;
144 complete(&res->completion);
147 static int testmgr_alloc_buf(char *buf[XBUFSIZE])
149 int i;
151 for (i = 0; i < XBUFSIZE; i++) {
152 buf[i] = (void *)__get_free_page(GFP_KERNEL);
153 if (!buf[i])
154 goto err_free_buf;
157 return 0;
159 err_free_buf:
160 while (i-- > 0)
161 free_page((unsigned long)buf[i]);
163 return -ENOMEM;
166 static void testmgr_free_buf(char *buf[XBUFSIZE])
168 int i;
170 for (i = 0; i < XBUFSIZE; i++)
171 free_page((unsigned long)buf[i]);
174 static int do_one_async_hash_op(struct ahash_request *req,
175 struct tcrypt_result *tr,
176 int ret)
178 if (ret == -EINPROGRESS || ret == -EBUSY) {
179 ret = wait_for_completion_interruptible(&tr->completion);
180 if (!ret)
181 ret = tr->err;
182 INIT_COMPLETION(tr->completion);
184 return ret;
187 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
188 unsigned int tcount, bool use_digest)
190 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
191 unsigned int i, j, k, temp;
192 struct scatterlist sg[8];
193 char result[64];
194 struct ahash_request *req;
195 struct tcrypt_result tresult;
196 void *hash_buff;
197 char *xbuf[XBUFSIZE];
198 int ret = -ENOMEM;
200 if (testmgr_alloc_buf(xbuf))
201 goto out_nobuf;
203 init_completion(&tresult.completion);
205 req = ahash_request_alloc(tfm, GFP_KERNEL);
206 if (!req) {
207 printk(KERN_ERR "alg: hash: Failed to allocate request for "
208 "%s\n", algo);
209 goto out_noreq;
211 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
212 tcrypt_complete, &tresult);
214 j = 0;
215 for (i = 0; i < tcount; i++) {
216 if (template[i].np)
217 continue;
219 j++;
220 memset(result, 0, 64);
222 hash_buff = xbuf[0];
224 memcpy(hash_buff, template[i].plaintext, template[i].psize);
225 sg_init_one(&sg[0], hash_buff, template[i].psize);
227 if (template[i].ksize) {
228 crypto_ahash_clear_flags(tfm, ~0);
229 ret = crypto_ahash_setkey(tfm, template[i].key,
230 template[i].ksize);
231 if (ret) {
232 printk(KERN_ERR "alg: hash: setkey failed on "
233 "test %d for %s: ret=%d\n", j, algo,
234 -ret);
235 goto out;
239 ahash_request_set_crypt(req, sg, result, template[i].psize);
240 if (use_digest) {
241 ret = do_one_async_hash_op(req, &tresult,
242 crypto_ahash_digest(req));
243 if (ret) {
244 pr_err("alg: hash: digest failed on test %d "
245 "for %s: ret=%d\n", j, algo, -ret);
246 goto out;
248 } else {
249 ret = do_one_async_hash_op(req, &tresult,
250 crypto_ahash_init(req));
251 if (ret) {
252 pr_err("alt: hash: init failed on test %d "
253 "for %s: ret=%d\n", j, algo, -ret);
254 goto out;
256 ret = do_one_async_hash_op(req, &tresult,
257 crypto_ahash_update(req));
258 if (ret) {
259 pr_err("alt: hash: update failed on test %d "
260 "for %s: ret=%d\n", j, algo, -ret);
261 goto out;
263 ret = do_one_async_hash_op(req, &tresult,
264 crypto_ahash_final(req));
265 if (ret) {
266 pr_err("alt: hash: final failed on test %d "
267 "for %s: ret=%d\n", j, algo, -ret);
268 goto out;
272 if (memcmp(result, template[i].digest,
273 crypto_ahash_digestsize(tfm))) {
274 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
275 j, algo);
276 hexdump(result, crypto_ahash_digestsize(tfm));
277 ret = -EINVAL;
278 goto out;
282 j = 0;
283 for (i = 0; i < tcount; i++) {
284 if (template[i].np) {
285 j++;
286 memset(result, 0, 64);
288 temp = 0;
289 sg_init_table(sg, template[i].np);
290 ret = -EINVAL;
291 for (k = 0; k < template[i].np; k++) {
292 if (WARN_ON(offset_in_page(IDX[k]) +
293 template[i].tap[k] > PAGE_SIZE))
294 goto out;
295 sg_set_buf(&sg[k],
296 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
297 offset_in_page(IDX[k]),
298 template[i].plaintext + temp,
299 template[i].tap[k]),
300 template[i].tap[k]);
301 temp += template[i].tap[k];
304 if (template[i].ksize) {
305 crypto_ahash_clear_flags(tfm, ~0);
306 ret = crypto_ahash_setkey(tfm, template[i].key,
307 template[i].ksize);
309 if (ret) {
310 printk(KERN_ERR "alg: hash: setkey "
311 "failed on chunking test %d "
312 "for %s: ret=%d\n", j, algo,
313 -ret);
314 goto out;
318 ahash_request_set_crypt(req, sg, result,
319 template[i].psize);
320 ret = crypto_ahash_digest(req);
321 switch (ret) {
322 case 0:
323 break;
324 case -EINPROGRESS:
325 case -EBUSY:
326 ret = wait_for_completion_interruptible(
327 &tresult.completion);
328 if (!ret && !(ret = tresult.err)) {
329 INIT_COMPLETION(tresult.completion);
330 break;
332 /* fall through */
333 default:
334 printk(KERN_ERR "alg: hash: digest failed "
335 "on chunking test %d for %s: "
336 "ret=%d\n", j, algo, -ret);
337 goto out;
340 if (memcmp(result, template[i].digest,
341 crypto_ahash_digestsize(tfm))) {
342 printk(KERN_ERR "alg: hash: Chunking test %d "
343 "failed for %s\n", j, algo);
344 hexdump(result, crypto_ahash_digestsize(tfm));
345 ret = -EINVAL;
346 goto out;
351 ret = 0;
353 out:
354 ahash_request_free(req);
355 out_noreq:
356 testmgr_free_buf(xbuf);
357 out_nobuf:
358 return ret;
361 static int __test_aead(struct crypto_aead *tfm, int enc,
362 struct aead_testvec *template, unsigned int tcount,
363 const bool diff_dst)
365 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
366 unsigned int i, j, k, n, temp;
367 int ret = -ENOMEM;
368 char *q;
369 char *key;
370 struct aead_request *req;
371 struct scatterlist *sg;
372 struct scatterlist *asg;
373 struct scatterlist *sgout;
374 const char *e, *d;
375 struct tcrypt_result result;
376 unsigned int authsize;
377 void *input;
378 void *output;
379 void *assoc;
380 char iv[MAX_IVLEN];
381 char *xbuf[XBUFSIZE];
382 char *xoutbuf[XBUFSIZE];
383 char *axbuf[XBUFSIZE];
385 if (testmgr_alloc_buf(xbuf))
386 goto out_noxbuf;
387 if (testmgr_alloc_buf(axbuf))
388 goto out_noaxbuf;
390 if (diff_dst && testmgr_alloc_buf(xoutbuf))
391 goto out_nooutbuf;
393 /* avoid "the frame size is larger than 1024 bytes" compiler warning */
394 sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 3 : 2), GFP_KERNEL);
395 if (!sg)
396 goto out_nosg;
397 asg = &sg[8];
398 sgout = &asg[8];
400 if (diff_dst)
401 d = "-ddst";
402 else
403 d = "";
405 if (enc == ENCRYPT)
406 e = "encryption";
407 else
408 e = "decryption";
410 init_completion(&result.completion);
412 req = aead_request_alloc(tfm, GFP_KERNEL);
413 if (!req) {
414 pr_err("alg: aead%s: Failed to allocate request for %s\n",
415 d, algo);
416 goto out;
419 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
420 tcrypt_complete, &result);
422 for (i = 0, j = 0; i < tcount; i++) {
423 if (!template[i].np) {
424 j++;
426 /* some tepmplates have no input data but they will
427 * touch input
429 input = xbuf[0];
430 assoc = axbuf[0];
432 ret = -EINVAL;
433 if (WARN_ON(template[i].ilen > PAGE_SIZE ||
434 template[i].alen > PAGE_SIZE))
435 goto out;
437 memcpy(input, template[i].input, template[i].ilen);
438 memcpy(assoc, template[i].assoc, template[i].alen);
439 if (template[i].iv)
440 memcpy(iv, template[i].iv, MAX_IVLEN);
441 else
442 memset(iv, 0, MAX_IVLEN);
444 crypto_aead_clear_flags(tfm, ~0);
445 if (template[i].wk)
446 crypto_aead_set_flags(
447 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
449 key = template[i].key;
451 ret = crypto_aead_setkey(tfm, key,
452 template[i].klen);
453 if (!ret == template[i].fail) {
454 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
455 d, j, algo, crypto_aead_get_flags(tfm));
456 goto out;
457 } else if (ret)
458 continue;
460 authsize = abs(template[i].rlen - template[i].ilen);
461 ret = crypto_aead_setauthsize(tfm, authsize);
462 if (ret) {
463 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
464 d, authsize, j, algo);
465 goto out;
468 sg_init_one(&sg[0], input,
469 template[i].ilen + (enc ? authsize : 0));
471 if (diff_dst) {
472 output = xoutbuf[0];
473 sg_init_one(&sgout[0], output,
474 template[i].ilen +
475 (enc ? authsize : 0));
476 } else {
477 output = input;
480 sg_init_one(&asg[0], assoc, template[i].alen);
482 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
483 template[i].ilen, iv);
485 aead_request_set_assoc(req, asg, template[i].alen);
487 ret = enc ?
488 crypto_aead_encrypt(req) :
489 crypto_aead_decrypt(req);
491 switch (ret) {
492 case 0:
493 if (template[i].novrfy) {
494 /* verification was supposed to fail */
495 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
496 d, e, j, algo);
497 /* so really, we got a bad message */
498 ret = -EBADMSG;
499 goto out;
501 break;
502 case -EINPROGRESS:
503 case -EBUSY:
504 ret = wait_for_completion_interruptible(
505 &result.completion);
506 if (!ret && !(ret = result.err)) {
507 INIT_COMPLETION(result.completion);
508 break;
510 case -EBADMSG:
511 if (template[i].novrfy)
512 /* verification failure was expected */
513 continue;
514 /* fall through */
515 default:
516 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
517 d, e, j, algo, -ret);
518 goto out;
521 q = output;
522 if (memcmp(q, template[i].result, template[i].rlen)) {
523 pr_err("alg: aead%s: Test %d failed on %s for %s\n",
524 d, j, e, algo);
525 hexdump(q, template[i].rlen);
526 ret = -EINVAL;
527 goto out;
532 for (i = 0, j = 0; i < tcount; i++) {
533 if (template[i].np) {
534 j++;
536 if (template[i].iv)
537 memcpy(iv, template[i].iv, MAX_IVLEN);
538 else
539 memset(iv, 0, MAX_IVLEN);
541 crypto_aead_clear_flags(tfm, ~0);
542 if (template[i].wk)
543 crypto_aead_set_flags(
544 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
545 key = template[i].key;
547 ret = crypto_aead_setkey(tfm, key, template[i].klen);
548 if (!ret == template[i].fail) {
549 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
550 d, j, algo, crypto_aead_get_flags(tfm));
551 goto out;
552 } else if (ret)
553 continue;
555 authsize = abs(template[i].rlen - template[i].ilen);
557 ret = -EINVAL;
558 sg_init_table(sg, template[i].np);
559 if (diff_dst)
560 sg_init_table(sgout, template[i].np);
561 for (k = 0, temp = 0; k < template[i].np; k++) {
562 if (WARN_ON(offset_in_page(IDX[k]) +
563 template[i].tap[k] > PAGE_SIZE))
564 goto out;
566 q = xbuf[IDX[k] >> PAGE_SHIFT] +
567 offset_in_page(IDX[k]);
569 memcpy(q, template[i].input + temp,
570 template[i].tap[k]);
572 n = template[i].tap[k];
573 if (k == template[i].np - 1 && enc)
574 n += authsize;
575 if (offset_in_page(q) + n < PAGE_SIZE)
576 q[n] = 0;
578 sg_set_buf(&sg[k], q, template[i].tap[k]);
580 if (diff_dst) {
581 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
582 offset_in_page(IDX[k]);
584 memset(q, 0, template[i].tap[k]);
585 if (offset_in_page(q) + n < PAGE_SIZE)
586 q[n] = 0;
588 sg_set_buf(&sgout[k], q,
589 template[i].tap[k]);
592 temp += template[i].tap[k];
595 ret = crypto_aead_setauthsize(tfm, authsize);
596 if (ret) {
597 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
598 d, authsize, j, algo);
599 goto out;
602 if (enc) {
603 if (WARN_ON(sg[k - 1].offset +
604 sg[k - 1].length + authsize >
605 PAGE_SIZE)) {
606 ret = -EINVAL;
607 goto out;
610 sg[k - 1].length += authsize;
612 if (diff_dst)
613 sgout[k - 1].length += authsize;
616 sg_init_table(asg, template[i].anp);
617 ret = -EINVAL;
618 for (k = 0, temp = 0; k < template[i].anp; k++) {
619 if (WARN_ON(offset_in_page(IDX[k]) +
620 template[i].atap[k] > PAGE_SIZE))
621 goto out;
622 sg_set_buf(&asg[k],
623 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
624 offset_in_page(IDX[k]),
625 template[i].assoc + temp,
626 template[i].atap[k]),
627 template[i].atap[k]);
628 temp += template[i].atap[k];
631 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
632 template[i].ilen,
633 iv);
635 aead_request_set_assoc(req, asg, template[i].alen);
637 ret = enc ?
638 crypto_aead_encrypt(req) :
639 crypto_aead_decrypt(req);
641 switch (ret) {
642 case 0:
643 if (template[i].novrfy) {
644 /* verification was supposed to fail */
645 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
646 d, e, j, algo);
647 /* so really, we got a bad message */
648 ret = -EBADMSG;
649 goto out;
651 break;
652 case -EINPROGRESS:
653 case -EBUSY:
654 ret = wait_for_completion_interruptible(
655 &result.completion);
656 if (!ret && !(ret = result.err)) {
657 INIT_COMPLETION(result.completion);
658 break;
660 case -EBADMSG:
661 if (template[i].novrfy)
662 /* verification failure was expected */
663 continue;
664 /* fall through */
665 default:
666 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
667 d, e, j, algo, -ret);
668 goto out;
671 ret = -EINVAL;
672 for (k = 0, temp = 0; k < template[i].np; k++) {
673 if (diff_dst)
674 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
675 offset_in_page(IDX[k]);
676 else
677 q = xbuf[IDX[k] >> PAGE_SHIFT] +
678 offset_in_page(IDX[k]);
680 n = template[i].tap[k];
681 if (k == template[i].np - 1)
682 n += enc ? authsize : -authsize;
684 if (memcmp(q, template[i].result + temp, n)) {
685 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
686 d, j, e, k, algo);
687 hexdump(q, n);
688 goto out;
691 q += n;
692 if (k == template[i].np - 1 && !enc) {
693 if (!diff_dst &&
694 memcmp(q, template[i].input +
695 temp + n, authsize))
696 n = authsize;
697 else
698 n = 0;
699 } else {
700 for (n = 0; offset_in_page(q + n) &&
701 q[n]; n++)
704 if (n) {
705 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
706 d, j, e, k, algo, n);
707 hexdump(q, n);
708 goto out;
711 temp += template[i].tap[k];
716 ret = 0;
718 out:
719 aead_request_free(req);
720 kfree(sg);
721 out_nosg:
722 if (diff_dst)
723 testmgr_free_buf(xoutbuf);
724 out_nooutbuf:
725 testmgr_free_buf(axbuf);
726 out_noaxbuf:
727 testmgr_free_buf(xbuf);
728 out_noxbuf:
729 return ret;
732 static int test_aead(struct crypto_aead *tfm, int enc,
733 struct aead_testvec *template, unsigned int tcount)
735 int ret;
737 /* test 'dst == src' case */
738 ret = __test_aead(tfm, enc, template, tcount, false);
739 if (ret)
740 return ret;
742 /* test 'dst != src' case */
743 return __test_aead(tfm, enc, template, tcount, true);
746 static int test_cipher(struct crypto_cipher *tfm, int enc,
747 struct cipher_testvec *template, unsigned int tcount)
749 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
750 unsigned int i, j, k;
751 char *q;
752 const char *e;
753 void *data;
754 char *xbuf[XBUFSIZE];
755 int ret = -ENOMEM;
757 if (testmgr_alloc_buf(xbuf))
758 goto out_nobuf;
760 if (enc == ENCRYPT)
761 e = "encryption";
762 else
763 e = "decryption";
765 j = 0;
766 for (i = 0; i < tcount; i++) {
767 if (template[i].np)
768 continue;
770 j++;
772 ret = -EINVAL;
773 if (WARN_ON(template[i].ilen > PAGE_SIZE))
774 goto out;
776 data = xbuf[0];
777 memcpy(data, template[i].input, template[i].ilen);
779 crypto_cipher_clear_flags(tfm, ~0);
780 if (template[i].wk)
781 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
783 ret = crypto_cipher_setkey(tfm, template[i].key,
784 template[i].klen);
785 if (!ret == template[i].fail) {
786 printk(KERN_ERR "alg: cipher: setkey failed "
787 "on test %d for %s: flags=%x\n", j,
788 algo, crypto_cipher_get_flags(tfm));
789 goto out;
790 } else if (ret)
791 continue;
793 for (k = 0; k < template[i].ilen;
794 k += crypto_cipher_blocksize(tfm)) {
795 if (enc)
796 crypto_cipher_encrypt_one(tfm, data + k,
797 data + k);
798 else
799 crypto_cipher_decrypt_one(tfm, data + k,
800 data + k);
803 q = data;
804 if (memcmp(q, template[i].result, template[i].rlen)) {
805 printk(KERN_ERR "alg: cipher: Test %d failed "
806 "on %s for %s\n", j, e, algo);
807 hexdump(q, template[i].rlen);
808 ret = -EINVAL;
809 goto out;
813 ret = 0;
815 out:
816 testmgr_free_buf(xbuf);
817 out_nobuf:
818 return ret;
821 static int __test_skcipher(struct crypto_ablkcipher *tfm, int enc,
822 struct cipher_testvec *template, unsigned int tcount,
823 const bool diff_dst)
825 const char *algo =
826 crypto_tfm_alg_driver_name(crypto_ablkcipher_tfm(tfm));
827 unsigned int i, j, k, n, temp;
828 char *q;
829 struct ablkcipher_request *req;
830 struct scatterlist sg[8];
831 struct scatterlist sgout[8];
832 const char *e, *d;
833 struct tcrypt_result result;
834 void *data;
835 char iv[MAX_IVLEN];
836 char *xbuf[XBUFSIZE];
837 char *xoutbuf[XBUFSIZE];
838 int ret = -ENOMEM;
840 if (testmgr_alloc_buf(xbuf))
841 goto out_nobuf;
843 if (diff_dst && testmgr_alloc_buf(xoutbuf))
844 goto out_nooutbuf;
846 if (diff_dst)
847 d = "-ddst";
848 else
849 d = "";
851 if (enc == ENCRYPT)
852 e = "encryption";
853 else
854 e = "decryption";
856 init_completion(&result.completion);
858 req = ablkcipher_request_alloc(tfm, GFP_KERNEL);
859 if (!req) {
860 pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
861 d, algo);
862 goto out;
865 ablkcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
866 tcrypt_complete, &result);
868 j = 0;
869 for (i = 0; i < tcount; i++) {
870 if (template[i].iv)
871 memcpy(iv, template[i].iv, MAX_IVLEN);
872 else
873 memset(iv, 0, MAX_IVLEN);
875 if (!(template[i].np) || (template[i].also_non_np)) {
876 j++;
878 ret = -EINVAL;
879 if (WARN_ON(template[i].ilen > PAGE_SIZE))
880 goto out;
882 data = xbuf[0];
883 memcpy(data, template[i].input, template[i].ilen);
885 crypto_ablkcipher_clear_flags(tfm, ~0);
886 if (template[i].wk)
887 crypto_ablkcipher_set_flags(
888 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
890 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
891 template[i].klen);
892 if (!ret == template[i].fail) {
893 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
894 d, j, algo,
895 crypto_ablkcipher_get_flags(tfm));
896 goto out;
897 } else if (ret)
898 continue;
900 sg_init_one(&sg[0], data, template[i].ilen);
901 if (diff_dst) {
902 data = xoutbuf[0];
903 sg_init_one(&sgout[0], data, template[i].ilen);
906 ablkcipher_request_set_crypt(req, sg,
907 (diff_dst) ? sgout : sg,
908 template[i].ilen, iv);
909 ret = enc ?
910 crypto_ablkcipher_encrypt(req) :
911 crypto_ablkcipher_decrypt(req);
913 switch (ret) {
914 case 0:
915 break;
916 case -EINPROGRESS:
917 case -EBUSY:
918 ret = wait_for_completion_interruptible(
919 &result.completion);
920 if (!ret && !((ret = result.err))) {
921 INIT_COMPLETION(result.completion);
922 break;
924 /* fall through */
925 default:
926 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
927 d, e, j, algo, -ret);
928 goto out;
931 q = data;
932 if (memcmp(q, template[i].result, template[i].rlen)) {
933 pr_err("alg: skcipher%s: Test %d failed on %s for %s\n",
934 d, j, e, algo);
935 hexdump(q, template[i].rlen);
936 ret = -EINVAL;
937 goto out;
942 j = 0;
943 for (i = 0; i < tcount; i++) {
945 if (template[i].iv)
946 memcpy(iv, template[i].iv, MAX_IVLEN);
947 else
948 memset(iv, 0, MAX_IVLEN);
950 if (template[i].np) {
951 j++;
953 crypto_ablkcipher_clear_flags(tfm, ~0);
954 if (template[i].wk)
955 crypto_ablkcipher_set_flags(
956 tfm, CRYPTO_TFM_REQ_WEAK_KEY);
958 ret = crypto_ablkcipher_setkey(tfm, template[i].key,
959 template[i].klen);
960 if (!ret == template[i].fail) {
961 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
962 d, j, algo,
963 crypto_ablkcipher_get_flags(tfm));
964 goto out;
965 } else if (ret)
966 continue;
968 temp = 0;
969 ret = -EINVAL;
970 sg_init_table(sg, template[i].np);
971 if (diff_dst)
972 sg_init_table(sgout, template[i].np);
973 for (k = 0; k < template[i].np; k++) {
974 if (WARN_ON(offset_in_page(IDX[k]) +
975 template[i].tap[k] > PAGE_SIZE))
976 goto out;
978 q = xbuf[IDX[k] >> PAGE_SHIFT] +
979 offset_in_page(IDX[k]);
981 memcpy(q, template[i].input + temp,
982 template[i].tap[k]);
984 if (offset_in_page(q) + template[i].tap[k] <
985 PAGE_SIZE)
986 q[template[i].tap[k]] = 0;
988 sg_set_buf(&sg[k], q, template[i].tap[k]);
989 if (diff_dst) {
990 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
991 offset_in_page(IDX[k]);
993 sg_set_buf(&sgout[k], q,
994 template[i].tap[k]);
996 memset(q, 0, template[i].tap[k]);
997 if (offset_in_page(q) +
998 template[i].tap[k] < PAGE_SIZE)
999 q[template[i].tap[k]] = 0;
1002 temp += template[i].tap[k];
1005 ablkcipher_request_set_crypt(req, sg,
1006 (diff_dst) ? sgout : sg,
1007 template[i].ilen, iv);
1009 ret = enc ?
1010 crypto_ablkcipher_encrypt(req) :
1011 crypto_ablkcipher_decrypt(req);
1013 switch (ret) {
1014 case 0:
1015 break;
1016 case -EINPROGRESS:
1017 case -EBUSY:
1018 ret = wait_for_completion_interruptible(
1019 &result.completion);
1020 if (!ret && !((ret = result.err))) {
1021 INIT_COMPLETION(result.completion);
1022 break;
1024 /* fall through */
1025 default:
1026 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1027 d, e, j, algo, -ret);
1028 goto out;
1031 temp = 0;
1032 ret = -EINVAL;
1033 for (k = 0; k < template[i].np; k++) {
1034 if (diff_dst)
1035 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1036 offset_in_page(IDX[k]);
1037 else
1038 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1039 offset_in_page(IDX[k]);
1041 if (memcmp(q, template[i].result + temp,
1042 template[i].tap[k])) {
1043 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1044 d, j, e, k, algo);
1045 hexdump(q, template[i].tap[k]);
1046 goto out;
1049 q += template[i].tap[k];
1050 for (n = 0; offset_in_page(q + n) && q[n]; n++)
1052 if (n) {
1053 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1054 d, j, e, k, algo, n);
1055 hexdump(q, n);
1056 goto out;
1058 temp += template[i].tap[k];
1063 ret = 0;
1065 out:
1066 ablkcipher_request_free(req);
1067 if (diff_dst)
1068 testmgr_free_buf(xoutbuf);
1069 out_nooutbuf:
1070 testmgr_free_buf(xbuf);
1071 out_nobuf:
1072 return ret;
1075 static int test_skcipher(struct crypto_ablkcipher *tfm, int enc,
1076 struct cipher_testvec *template, unsigned int tcount)
1078 int ret;
1080 /* test 'dst == src' case */
1081 ret = __test_skcipher(tfm, enc, template, tcount, false);
1082 if (ret)
1083 return ret;
1085 /* test 'dst != src' case */
1086 return __test_skcipher(tfm, enc, template, tcount, true);
1089 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
1090 struct comp_testvec *dtemplate, int ctcount, int dtcount)
1092 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1093 unsigned int i;
1094 char result[COMP_BUF_SIZE];
1095 int ret;
1097 for (i = 0; i < ctcount; i++) {
1098 int ilen;
1099 unsigned int dlen = COMP_BUF_SIZE;
1101 memset(result, 0, sizeof (result));
1103 ilen = ctemplate[i].inlen;
1104 ret = crypto_comp_compress(tfm, ctemplate[i].input,
1105 ilen, result, &dlen);
1106 if (ret) {
1107 printk(KERN_ERR "alg: comp: compression failed "
1108 "on test %d for %s: ret=%d\n", i + 1, algo,
1109 -ret);
1110 goto out;
1113 if (dlen != ctemplate[i].outlen) {
1114 printk(KERN_ERR "alg: comp: Compression test %d "
1115 "failed for %s: output len = %d\n", i + 1, algo,
1116 dlen);
1117 ret = -EINVAL;
1118 goto out;
1121 if (memcmp(result, ctemplate[i].output, dlen)) {
1122 printk(KERN_ERR "alg: comp: Compression test %d "
1123 "failed for %s\n", i + 1, algo);
1124 hexdump(result, dlen);
1125 ret = -EINVAL;
1126 goto out;
1130 for (i = 0; i < dtcount; i++) {
1131 int ilen;
1132 unsigned int dlen = COMP_BUF_SIZE;
1134 memset(result, 0, sizeof (result));
1136 ilen = dtemplate[i].inlen;
1137 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1138 ilen, result, &dlen);
1139 if (ret) {
1140 printk(KERN_ERR "alg: comp: decompression failed "
1141 "on test %d for %s: ret=%d\n", i + 1, algo,
1142 -ret);
1143 goto out;
1146 if (dlen != dtemplate[i].outlen) {
1147 printk(KERN_ERR "alg: comp: Decompression test %d "
1148 "failed for %s: output len = %d\n", i + 1, algo,
1149 dlen);
1150 ret = -EINVAL;
1151 goto out;
1154 if (memcmp(result, dtemplate[i].output, dlen)) {
1155 printk(KERN_ERR "alg: comp: Decompression test %d "
1156 "failed for %s\n", i + 1, algo);
1157 hexdump(result, dlen);
1158 ret = -EINVAL;
1159 goto out;
1163 ret = 0;
1165 out:
1166 return ret;
1169 static int test_pcomp(struct crypto_pcomp *tfm,
1170 struct pcomp_testvec *ctemplate,
1171 struct pcomp_testvec *dtemplate, int ctcount,
1172 int dtcount)
1174 const char *algo = crypto_tfm_alg_driver_name(crypto_pcomp_tfm(tfm));
1175 unsigned int i;
1176 char result[COMP_BUF_SIZE];
1177 int res;
1179 for (i = 0; i < ctcount; i++) {
1180 struct comp_request req;
1181 unsigned int produced = 0;
1183 res = crypto_compress_setup(tfm, ctemplate[i].params,
1184 ctemplate[i].paramsize);
1185 if (res) {
1186 pr_err("alg: pcomp: compression setup failed on test "
1187 "%d for %s: error=%d\n", i + 1, algo, res);
1188 return res;
1191 res = crypto_compress_init(tfm);
1192 if (res) {
1193 pr_err("alg: pcomp: compression init failed on test "
1194 "%d for %s: error=%d\n", i + 1, algo, res);
1195 return res;
1198 memset(result, 0, sizeof(result));
1200 req.next_in = ctemplate[i].input;
1201 req.avail_in = ctemplate[i].inlen / 2;
1202 req.next_out = result;
1203 req.avail_out = ctemplate[i].outlen / 2;
1205 res = crypto_compress_update(tfm, &req);
1206 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1207 pr_err("alg: pcomp: compression update failed on test "
1208 "%d for %s: error=%d\n", i + 1, algo, res);
1209 return res;
1211 if (res > 0)
1212 produced += res;
1214 /* Add remaining input data */
1215 req.avail_in += (ctemplate[i].inlen + 1) / 2;
1217 res = crypto_compress_update(tfm, &req);
1218 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1219 pr_err("alg: pcomp: compression update failed on test "
1220 "%d for %s: error=%d\n", i + 1, algo, res);
1221 return res;
1223 if (res > 0)
1224 produced += res;
1226 /* Provide remaining output space */
1227 req.avail_out += COMP_BUF_SIZE - ctemplate[i].outlen / 2;
1229 res = crypto_compress_final(tfm, &req);
1230 if (res < 0) {
1231 pr_err("alg: pcomp: compression final failed on test "
1232 "%d for %s: error=%d\n", i + 1, algo, res);
1233 return res;
1235 produced += res;
1237 if (COMP_BUF_SIZE - req.avail_out != ctemplate[i].outlen) {
1238 pr_err("alg: comp: Compression test %d failed for %s: "
1239 "output len = %d (expected %d)\n", i + 1, algo,
1240 COMP_BUF_SIZE - req.avail_out,
1241 ctemplate[i].outlen);
1242 return -EINVAL;
1245 if (produced != ctemplate[i].outlen) {
1246 pr_err("alg: comp: Compression test %d failed for %s: "
1247 "returned len = %u (expected %d)\n", i + 1,
1248 algo, produced, ctemplate[i].outlen);
1249 return -EINVAL;
1252 if (memcmp(result, ctemplate[i].output, ctemplate[i].outlen)) {
1253 pr_err("alg: pcomp: Compression test %d failed for "
1254 "%s\n", i + 1, algo);
1255 hexdump(result, ctemplate[i].outlen);
1256 return -EINVAL;
1260 for (i = 0; i < dtcount; i++) {
1261 struct comp_request req;
1262 unsigned int produced = 0;
1264 res = crypto_decompress_setup(tfm, dtemplate[i].params,
1265 dtemplate[i].paramsize);
1266 if (res) {
1267 pr_err("alg: pcomp: decompression setup failed on "
1268 "test %d for %s: error=%d\n", i + 1, algo, res);
1269 return res;
1272 res = crypto_decompress_init(tfm);
1273 if (res) {
1274 pr_err("alg: pcomp: decompression init failed on test "
1275 "%d for %s: error=%d\n", i + 1, algo, res);
1276 return res;
1279 memset(result, 0, sizeof(result));
1281 req.next_in = dtemplate[i].input;
1282 req.avail_in = dtemplate[i].inlen / 2;
1283 req.next_out = result;
1284 req.avail_out = dtemplate[i].outlen / 2;
1286 res = crypto_decompress_update(tfm, &req);
1287 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1288 pr_err("alg: pcomp: decompression update failed on "
1289 "test %d for %s: error=%d\n", i + 1, algo, res);
1290 return res;
1292 if (res > 0)
1293 produced += res;
1295 /* Add remaining input data */
1296 req.avail_in += (dtemplate[i].inlen + 1) / 2;
1298 res = crypto_decompress_update(tfm, &req);
1299 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1300 pr_err("alg: pcomp: decompression update failed on "
1301 "test %d for %s: error=%d\n", i + 1, algo, res);
1302 return res;
1304 if (res > 0)
1305 produced += res;
1307 /* Provide remaining output space */
1308 req.avail_out += COMP_BUF_SIZE - dtemplate[i].outlen / 2;
1310 res = crypto_decompress_final(tfm, &req);
1311 if (res < 0 && (res != -EAGAIN || req.avail_in)) {
1312 pr_err("alg: pcomp: decompression final failed on "
1313 "test %d for %s: error=%d\n", i + 1, algo, res);
1314 return res;
1316 if (res > 0)
1317 produced += res;
1319 if (COMP_BUF_SIZE - req.avail_out != dtemplate[i].outlen) {
1320 pr_err("alg: comp: Decompression test %d failed for "
1321 "%s: output len = %d (expected %d)\n", i + 1,
1322 algo, COMP_BUF_SIZE - req.avail_out,
1323 dtemplate[i].outlen);
1324 return -EINVAL;
1327 if (produced != dtemplate[i].outlen) {
1328 pr_err("alg: comp: Decompression test %d failed for "
1329 "%s: returned len = %u (expected %d)\n", i + 1,
1330 algo, produced, dtemplate[i].outlen);
1331 return -EINVAL;
1334 if (memcmp(result, dtemplate[i].output, dtemplate[i].outlen)) {
1335 pr_err("alg: pcomp: Decompression test %d failed for "
1336 "%s\n", i + 1, algo);
1337 hexdump(result, dtemplate[i].outlen);
1338 return -EINVAL;
1342 return 0;
1346 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
1347 unsigned int tcount)
1349 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1350 int err = 0, i, j, seedsize;
1351 u8 *seed;
1352 char result[32];
1354 seedsize = crypto_rng_seedsize(tfm);
1356 seed = kmalloc(seedsize, GFP_KERNEL);
1357 if (!seed) {
1358 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1359 "for %s\n", algo);
1360 return -ENOMEM;
1363 for (i = 0; i < tcount; i++) {
1364 memset(result, 0, 32);
1366 memcpy(seed, template[i].v, template[i].vlen);
1367 memcpy(seed + template[i].vlen, template[i].key,
1368 template[i].klen);
1369 memcpy(seed + template[i].vlen + template[i].klen,
1370 template[i].dt, template[i].dtlen);
1372 err = crypto_rng_reset(tfm, seed, seedsize);
1373 if (err) {
1374 printk(KERN_ERR "alg: cprng: Failed to reset rng "
1375 "for %s\n", algo);
1376 goto out;
1379 for (j = 0; j < template[i].loops; j++) {
1380 err = crypto_rng_get_bytes(tfm, result,
1381 template[i].rlen);
1382 if (err != template[i].rlen) {
1383 printk(KERN_ERR "alg: cprng: Failed to obtain "
1384 "the correct amount of random data for "
1385 "%s (requested %d, got %d)\n", algo,
1386 template[i].rlen, err);
1387 goto out;
1391 err = memcmp(result, template[i].result,
1392 template[i].rlen);
1393 if (err) {
1394 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1395 i, algo);
1396 hexdump(result, template[i].rlen);
1397 err = -EINVAL;
1398 goto out;
1402 out:
1403 kfree(seed);
1404 return err;
1407 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1408 u32 type, u32 mask)
1410 struct crypto_aead *tfm;
1411 int err = 0;
1413 tfm = crypto_alloc_aead(driver, type, mask);
1414 if (IS_ERR(tfm)) {
1415 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1416 "%ld\n", driver, PTR_ERR(tfm));
1417 return PTR_ERR(tfm);
1420 if (desc->suite.aead.enc.vecs) {
1421 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1422 desc->suite.aead.enc.count);
1423 if (err)
1424 goto out;
1427 if (!err && desc->suite.aead.dec.vecs)
1428 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1429 desc->suite.aead.dec.count);
1431 out:
1432 crypto_free_aead(tfm);
1433 return err;
1436 static int alg_test_cipher(const struct alg_test_desc *desc,
1437 const char *driver, u32 type, u32 mask)
1439 struct crypto_cipher *tfm;
1440 int err = 0;
1442 tfm = crypto_alloc_cipher(driver, type, mask);
1443 if (IS_ERR(tfm)) {
1444 printk(KERN_ERR "alg: cipher: Failed to load transform for "
1445 "%s: %ld\n", driver, PTR_ERR(tfm));
1446 return PTR_ERR(tfm);
1449 if (desc->suite.cipher.enc.vecs) {
1450 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1451 desc->suite.cipher.enc.count);
1452 if (err)
1453 goto out;
1456 if (desc->suite.cipher.dec.vecs)
1457 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1458 desc->suite.cipher.dec.count);
1460 out:
1461 crypto_free_cipher(tfm);
1462 return err;
1465 static int alg_test_skcipher(const struct alg_test_desc *desc,
1466 const char *driver, u32 type, u32 mask)
1468 struct crypto_ablkcipher *tfm;
1469 int err = 0;
1471 tfm = crypto_alloc_ablkcipher(driver, type, mask);
1472 if (IS_ERR(tfm)) {
1473 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1474 "%s: %ld\n", driver, PTR_ERR(tfm));
1475 return PTR_ERR(tfm);
1478 if (desc->suite.cipher.enc.vecs) {
1479 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1480 desc->suite.cipher.enc.count);
1481 if (err)
1482 goto out;
1485 if (desc->suite.cipher.dec.vecs)
1486 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1487 desc->suite.cipher.dec.count);
1489 out:
1490 crypto_free_ablkcipher(tfm);
1491 return err;
1494 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1495 u32 type, u32 mask)
1497 struct crypto_comp *tfm;
1498 int err;
1500 tfm = crypto_alloc_comp(driver, type, mask);
1501 if (IS_ERR(tfm)) {
1502 printk(KERN_ERR "alg: comp: Failed to load transform for %s: "
1503 "%ld\n", driver, PTR_ERR(tfm));
1504 return PTR_ERR(tfm);
1507 err = test_comp(tfm, desc->suite.comp.comp.vecs,
1508 desc->suite.comp.decomp.vecs,
1509 desc->suite.comp.comp.count,
1510 desc->suite.comp.decomp.count);
1512 crypto_free_comp(tfm);
1513 return err;
1516 static int alg_test_pcomp(const struct alg_test_desc *desc, const char *driver,
1517 u32 type, u32 mask)
1519 struct crypto_pcomp *tfm;
1520 int err;
1522 tfm = crypto_alloc_pcomp(driver, type, mask);
1523 if (IS_ERR(tfm)) {
1524 pr_err("alg: pcomp: Failed to load transform for %s: %ld\n",
1525 driver, PTR_ERR(tfm));
1526 return PTR_ERR(tfm);
1529 err = test_pcomp(tfm, desc->suite.pcomp.comp.vecs,
1530 desc->suite.pcomp.decomp.vecs,
1531 desc->suite.pcomp.comp.count,
1532 desc->suite.pcomp.decomp.count);
1534 crypto_free_pcomp(tfm);
1535 return err;
1538 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1539 u32 type, u32 mask)
1541 struct crypto_ahash *tfm;
1542 int err;
1544 tfm = crypto_alloc_ahash(driver, type, mask);
1545 if (IS_ERR(tfm)) {
1546 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1547 "%ld\n", driver, PTR_ERR(tfm));
1548 return PTR_ERR(tfm);
1551 err = test_hash(tfm, desc->suite.hash.vecs,
1552 desc->suite.hash.count, true);
1553 if (!err)
1554 err = test_hash(tfm, desc->suite.hash.vecs,
1555 desc->suite.hash.count, false);
1557 crypto_free_ahash(tfm);
1558 return err;
1561 static int alg_test_crc32c(const struct alg_test_desc *desc,
1562 const char *driver, u32 type, u32 mask)
1564 struct crypto_shash *tfm;
1565 u32 val;
1566 int err;
1568 err = alg_test_hash(desc, driver, type, mask);
1569 if (err)
1570 goto out;
1572 tfm = crypto_alloc_shash(driver, type, mask);
1573 if (IS_ERR(tfm)) {
1574 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1575 "%ld\n", driver, PTR_ERR(tfm));
1576 err = PTR_ERR(tfm);
1577 goto out;
1580 do {
1581 struct {
1582 struct shash_desc shash;
1583 char ctx[crypto_shash_descsize(tfm)];
1584 } sdesc;
1586 sdesc.shash.tfm = tfm;
1587 sdesc.shash.flags = 0;
1589 *(u32 *)sdesc.ctx = le32_to_cpu(420553207);
1590 err = crypto_shash_final(&sdesc.shash, (u8 *)&val);
1591 if (err) {
1592 printk(KERN_ERR "alg: crc32c: Operation failed for "
1593 "%s: %d\n", driver, err);
1594 break;
1597 if (val != ~420553207) {
1598 printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1599 "%d\n", driver, val);
1600 err = -EINVAL;
1602 } while (0);
1604 crypto_free_shash(tfm);
1606 out:
1607 return err;
1610 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1611 u32 type, u32 mask)
1613 struct crypto_rng *rng;
1614 int err;
1616 rng = crypto_alloc_rng(driver, type, mask);
1617 if (IS_ERR(rng)) {
1618 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1619 "%ld\n", driver, PTR_ERR(rng));
1620 return PTR_ERR(rng);
1623 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1625 crypto_free_rng(rng);
1627 return err;
1630 static int alg_test_null(const struct alg_test_desc *desc,
1631 const char *driver, u32 type, u32 mask)
1633 return 0;
1636 /* Please keep this list sorted by algorithm name. */
1637 static const struct alg_test_desc alg_test_descs[] = {
1639 .alg = "__cbc-cast5-avx",
1640 .test = alg_test_null,
1641 }, {
1642 .alg = "__cbc-cast6-avx",
1643 .test = alg_test_null,
1644 }, {
1645 .alg = "__cbc-serpent-avx",
1646 .test = alg_test_null,
1647 }, {
1648 .alg = "__cbc-serpent-sse2",
1649 .test = alg_test_null,
1650 }, {
1651 .alg = "__cbc-twofish-avx",
1652 .test = alg_test_null,
1653 }, {
1654 .alg = "__driver-cbc-aes-aesni",
1655 .test = alg_test_null,
1656 .fips_allowed = 1,
1657 }, {
1658 .alg = "__driver-cbc-camellia-aesni",
1659 .test = alg_test_null,
1660 }, {
1661 .alg = "__driver-cbc-cast5-avx",
1662 .test = alg_test_null,
1663 }, {
1664 .alg = "__driver-cbc-cast6-avx",
1665 .test = alg_test_null,
1666 }, {
1667 .alg = "__driver-cbc-serpent-avx",
1668 .test = alg_test_null,
1669 }, {
1670 .alg = "__driver-cbc-serpent-sse2",
1671 .test = alg_test_null,
1672 }, {
1673 .alg = "__driver-cbc-twofish-avx",
1674 .test = alg_test_null,
1675 }, {
1676 .alg = "__driver-ecb-aes-aesni",
1677 .test = alg_test_null,
1678 .fips_allowed = 1,
1679 }, {
1680 .alg = "__driver-ecb-camellia-aesni",
1681 .test = alg_test_null,
1682 }, {
1683 .alg = "__driver-ecb-cast5-avx",
1684 .test = alg_test_null,
1685 }, {
1686 .alg = "__driver-ecb-cast6-avx",
1687 .test = alg_test_null,
1688 }, {
1689 .alg = "__driver-ecb-serpent-avx",
1690 .test = alg_test_null,
1691 }, {
1692 .alg = "__driver-ecb-serpent-sse2",
1693 .test = alg_test_null,
1694 }, {
1695 .alg = "__driver-ecb-twofish-avx",
1696 .test = alg_test_null,
1697 }, {
1698 .alg = "__ghash-pclmulqdqni",
1699 .test = alg_test_null,
1700 .fips_allowed = 1,
1701 }, {
1702 .alg = "ansi_cprng",
1703 .test = alg_test_cprng,
1704 .fips_allowed = 1,
1705 .suite = {
1706 .cprng = {
1707 .vecs = ansi_cprng_aes_tv_template,
1708 .count = ANSI_CPRNG_AES_TEST_VECTORS
1711 }, {
1712 .alg = "authenc(hmac(sha1),cbc(aes))",
1713 .test = alg_test_aead,
1714 .fips_allowed = 1,
1715 .suite = {
1716 .aead = {
1717 .enc = {
1718 .vecs = hmac_sha1_aes_cbc_enc_tv_template,
1719 .count = HMAC_SHA1_AES_CBC_ENC_TEST_VECTORS
1723 }, {
1724 .alg = "authenc(hmac(sha256),cbc(aes))",
1725 .test = alg_test_aead,
1726 .fips_allowed = 1,
1727 .suite = {
1728 .aead = {
1729 .enc = {
1730 .vecs = hmac_sha256_aes_cbc_enc_tv_template,
1731 .count = HMAC_SHA256_AES_CBC_ENC_TEST_VECTORS
1735 }, {
1736 .alg = "authenc(hmac(sha512),cbc(aes))",
1737 .test = alg_test_aead,
1738 .fips_allowed = 1,
1739 .suite = {
1740 .aead = {
1741 .enc = {
1742 .vecs = hmac_sha512_aes_cbc_enc_tv_template,
1743 .count = HMAC_SHA512_AES_CBC_ENC_TEST_VECTORS
1747 }, {
1748 .alg = "cbc(aes)",
1749 .test = alg_test_skcipher,
1750 .fips_allowed = 1,
1751 .suite = {
1752 .cipher = {
1753 .enc = {
1754 .vecs = aes_cbc_enc_tv_template,
1755 .count = AES_CBC_ENC_TEST_VECTORS
1757 .dec = {
1758 .vecs = aes_cbc_dec_tv_template,
1759 .count = AES_CBC_DEC_TEST_VECTORS
1763 }, {
1764 .alg = "cbc(anubis)",
1765 .test = alg_test_skcipher,
1766 .suite = {
1767 .cipher = {
1768 .enc = {
1769 .vecs = anubis_cbc_enc_tv_template,
1770 .count = ANUBIS_CBC_ENC_TEST_VECTORS
1772 .dec = {
1773 .vecs = anubis_cbc_dec_tv_template,
1774 .count = ANUBIS_CBC_DEC_TEST_VECTORS
1778 }, {
1779 .alg = "cbc(blowfish)",
1780 .test = alg_test_skcipher,
1781 .suite = {
1782 .cipher = {
1783 .enc = {
1784 .vecs = bf_cbc_enc_tv_template,
1785 .count = BF_CBC_ENC_TEST_VECTORS
1787 .dec = {
1788 .vecs = bf_cbc_dec_tv_template,
1789 .count = BF_CBC_DEC_TEST_VECTORS
1793 }, {
1794 .alg = "cbc(camellia)",
1795 .test = alg_test_skcipher,
1796 .suite = {
1797 .cipher = {
1798 .enc = {
1799 .vecs = camellia_cbc_enc_tv_template,
1800 .count = CAMELLIA_CBC_ENC_TEST_VECTORS
1802 .dec = {
1803 .vecs = camellia_cbc_dec_tv_template,
1804 .count = CAMELLIA_CBC_DEC_TEST_VECTORS
1808 }, {
1809 .alg = "cbc(cast5)",
1810 .test = alg_test_skcipher,
1811 .suite = {
1812 .cipher = {
1813 .enc = {
1814 .vecs = cast5_cbc_enc_tv_template,
1815 .count = CAST5_CBC_ENC_TEST_VECTORS
1817 .dec = {
1818 .vecs = cast5_cbc_dec_tv_template,
1819 .count = CAST5_CBC_DEC_TEST_VECTORS
1823 }, {
1824 .alg = "cbc(cast6)",
1825 .test = alg_test_skcipher,
1826 .suite = {
1827 .cipher = {
1828 .enc = {
1829 .vecs = cast6_cbc_enc_tv_template,
1830 .count = CAST6_CBC_ENC_TEST_VECTORS
1832 .dec = {
1833 .vecs = cast6_cbc_dec_tv_template,
1834 .count = CAST6_CBC_DEC_TEST_VECTORS
1838 }, {
1839 .alg = "cbc(des)",
1840 .test = alg_test_skcipher,
1841 .suite = {
1842 .cipher = {
1843 .enc = {
1844 .vecs = des_cbc_enc_tv_template,
1845 .count = DES_CBC_ENC_TEST_VECTORS
1847 .dec = {
1848 .vecs = des_cbc_dec_tv_template,
1849 .count = DES_CBC_DEC_TEST_VECTORS
1853 }, {
1854 .alg = "cbc(des3_ede)",
1855 .test = alg_test_skcipher,
1856 .fips_allowed = 1,
1857 .suite = {
1858 .cipher = {
1859 .enc = {
1860 .vecs = des3_ede_cbc_enc_tv_template,
1861 .count = DES3_EDE_CBC_ENC_TEST_VECTORS
1863 .dec = {
1864 .vecs = des3_ede_cbc_dec_tv_template,
1865 .count = DES3_EDE_CBC_DEC_TEST_VECTORS
1869 }, {
1870 .alg = "cbc(serpent)",
1871 .test = alg_test_skcipher,
1872 .suite = {
1873 .cipher = {
1874 .enc = {
1875 .vecs = serpent_cbc_enc_tv_template,
1876 .count = SERPENT_CBC_ENC_TEST_VECTORS
1878 .dec = {
1879 .vecs = serpent_cbc_dec_tv_template,
1880 .count = SERPENT_CBC_DEC_TEST_VECTORS
1884 }, {
1885 .alg = "cbc(twofish)",
1886 .test = alg_test_skcipher,
1887 .suite = {
1888 .cipher = {
1889 .enc = {
1890 .vecs = tf_cbc_enc_tv_template,
1891 .count = TF_CBC_ENC_TEST_VECTORS
1893 .dec = {
1894 .vecs = tf_cbc_dec_tv_template,
1895 .count = TF_CBC_DEC_TEST_VECTORS
1899 }, {
1900 .alg = "ccm(aes)",
1901 .test = alg_test_aead,
1902 .fips_allowed = 1,
1903 .suite = {
1904 .aead = {
1905 .enc = {
1906 .vecs = aes_ccm_enc_tv_template,
1907 .count = AES_CCM_ENC_TEST_VECTORS
1909 .dec = {
1910 .vecs = aes_ccm_dec_tv_template,
1911 .count = AES_CCM_DEC_TEST_VECTORS
1915 }, {
1916 .alg = "crc32c",
1917 .test = alg_test_crc32c,
1918 .fips_allowed = 1,
1919 .suite = {
1920 .hash = {
1921 .vecs = crc32c_tv_template,
1922 .count = CRC32C_TEST_VECTORS
1925 }, {
1926 .alg = "cryptd(__driver-cbc-aes-aesni)",
1927 .test = alg_test_null,
1928 .fips_allowed = 1,
1929 }, {
1930 .alg = "cryptd(__driver-cbc-camellia-aesni)",
1931 .test = alg_test_null,
1932 }, {
1933 .alg = "cryptd(__driver-ecb-aes-aesni)",
1934 .test = alg_test_null,
1935 .fips_allowed = 1,
1936 }, {
1937 .alg = "cryptd(__driver-ecb-camellia-aesni)",
1938 .test = alg_test_null,
1939 }, {
1940 .alg = "cryptd(__driver-ecb-cast5-avx)",
1941 .test = alg_test_null,
1942 }, {
1943 .alg = "cryptd(__driver-ecb-cast6-avx)",
1944 .test = alg_test_null,
1945 }, {
1946 .alg = "cryptd(__driver-ecb-serpent-avx)",
1947 .test = alg_test_null,
1948 }, {
1949 .alg = "cryptd(__driver-ecb-serpent-sse2)",
1950 .test = alg_test_null,
1951 }, {
1952 .alg = "cryptd(__driver-ecb-twofish-avx)",
1953 .test = alg_test_null,
1954 }, {
1955 .alg = "cryptd(__driver-gcm-aes-aesni)",
1956 .test = alg_test_null,
1957 .fips_allowed = 1,
1958 }, {
1959 .alg = "cryptd(__ghash-pclmulqdqni)",
1960 .test = alg_test_null,
1961 .fips_allowed = 1,
1962 }, {
1963 .alg = "ctr(aes)",
1964 .test = alg_test_skcipher,
1965 .fips_allowed = 1,
1966 .suite = {
1967 .cipher = {
1968 .enc = {
1969 .vecs = aes_ctr_enc_tv_template,
1970 .count = AES_CTR_ENC_TEST_VECTORS
1972 .dec = {
1973 .vecs = aes_ctr_dec_tv_template,
1974 .count = AES_CTR_DEC_TEST_VECTORS
1978 }, {
1979 .alg = "ctr(blowfish)",
1980 .test = alg_test_skcipher,
1981 .suite = {
1982 .cipher = {
1983 .enc = {
1984 .vecs = bf_ctr_enc_tv_template,
1985 .count = BF_CTR_ENC_TEST_VECTORS
1987 .dec = {
1988 .vecs = bf_ctr_dec_tv_template,
1989 .count = BF_CTR_DEC_TEST_VECTORS
1993 }, {
1994 .alg = "ctr(camellia)",
1995 .test = alg_test_skcipher,
1996 .suite = {
1997 .cipher = {
1998 .enc = {
1999 .vecs = camellia_ctr_enc_tv_template,
2000 .count = CAMELLIA_CTR_ENC_TEST_VECTORS
2002 .dec = {
2003 .vecs = camellia_ctr_dec_tv_template,
2004 .count = CAMELLIA_CTR_DEC_TEST_VECTORS
2008 }, {
2009 .alg = "ctr(cast5)",
2010 .test = alg_test_skcipher,
2011 .suite = {
2012 .cipher = {
2013 .enc = {
2014 .vecs = cast5_ctr_enc_tv_template,
2015 .count = CAST5_CTR_ENC_TEST_VECTORS
2017 .dec = {
2018 .vecs = cast5_ctr_dec_tv_template,
2019 .count = CAST5_CTR_DEC_TEST_VECTORS
2023 }, {
2024 .alg = "ctr(cast6)",
2025 .test = alg_test_skcipher,
2026 .suite = {
2027 .cipher = {
2028 .enc = {
2029 .vecs = cast6_ctr_enc_tv_template,
2030 .count = CAST6_CTR_ENC_TEST_VECTORS
2032 .dec = {
2033 .vecs = cast6_ctr_dec_tv_template,
2034 .count = CAST6_CTR_DEC_TEST_VECTORS
2038 }, {
2039 .alg = "ctr(des)",
2040 .test = alg_test_skcipher,
2041 .suite = {
2042 .cipher = {
2043 .enc = {
2044 .vecs = des_ctr_enc_tv_template,
2045 .count = DES_CTR_ENC_TEST_VECTORS
2047 .dec = {
2048 .vecs = des_ctr_dec_tv_template,
2049 .count = DES_CTR_DEC_TEST_VECTORS
2053 }, {
2054 .alg = "ctr(des3_ede)",
2055 .test = alg_test_skcipher,
2056 .suite = {
2057 .cipher = {
2058 .enc = {
2059 .vecs = des3_ede_ctr_enc_tv_template,
2060 .count = DES3_EDE_CTR_ENC_TEST_VECTORS
2062 .dec = {
2063 .vecs = des3_ede_ctr_dec_tv_template,
2064 .count = DES3_EDE_CTR_DEC_TEST_VECTORS
2068 }, {
2069 .alg = "ctr(serpent)",
2070 .test = alg_test_skcipher,
2071 .suite = {
2072 .cipher = {
2073 .enc = {
2074 .vecs = serpent_ctr_enc_tv_template,
2075 .count = SERPENT_CTR_ENC_TEST_VECTORS
2077 .dec = {
2078 .vecs = serpent_ctr_dec_tv_template,
2079 .count = SERPENT_CTR_DEC_TEST_VECTORS
2083 }, {
2084 .alg = "ctr(twofish)",
2085 .test = alg_test_skcipher,
2086 .suite = {
2087 .cipher = {
2088 .enc = {
2089 .vecs = tf_ctr_enc_tv_template,
2090 .count = TF_CTR_ENC_TEST_VECTORS
2092 .dec = {
2093 .vecs = tf_ctr_dec_tv_template,
2094 .count = TF_CTR_DEC_TEST_VECTORS
2098 }, {
2099 .alg = "cts(cbc(aes))",
2100 .test = alg_test_skcipher,
2101 .suite = {
2102 .cipher = {
2103 .enc = {
2104 .vecs = cts_mode_enc_tv_template,
2105 .count = CTS_MODE_ENC_TEST_VECTORS
2107 .dec = {
2108 .vecs = cts_mode_dec_tv_template,
2109 .count = CTS_MODE_DEC_TEST_VECTORS
2113 }, {
2114 .alg = "deflate",
2115 .test = alg_test_comp,
2116 .fips_allowed = 1,
2117 .suite = {
2118 .comp = {
2119 .comp = {
2120 .vecs = deflate_comp_tv_template,
2121 .count = DEFLATE_COMP_TEST_VECTORS
2123 .decomp = {
2124 .vecs = deflate_decomp_tv_template,
2125 .count = DEFLATE_DECOMP_TEST_VECTORS
2129 }, {
2130 .alg = "ecb(__aes-aesni)",
2131 .test = alg_test_null,
2132 .fips_allowed = 1,
2133 }, {
2134 .alg = "ecb(aes)",
2135 .test = alg_test_skcipher,
2136 .fips_allowed = 1,
2137 .suite = {
2138 .cipher = {
2139 .enc = {
2140 .vecs = aes_enc_tv_template,
2141 .count = AES_ENC_TEST_VECTORS
2143 .dec = {
2144 .vecs = aes_dec_tv_template,
2145 .count = AES_DEC_TEST_VECTORS
2149 }, {
2150 .alg = "ecb(anubis)",
2151 .test = alg_test_skcipher,
2152 .suite = {
2153 .cipher = {
2154 .enc = {
2155 .vecs = anubis_enc_tv_template,
2156 .count = ANUBIS_ENC_TEST_VECTORS
2158 .dec = {
2159 .vecs = anubis_dec_tv_template,
2160 .count = ANUBIS_DEC_TEST_VECTORS
2164 }, {
2165 .alg = "ecb(arc4)",
2166 .test = alg_test_skcipher,
2167 .suite = {
2168 .cipher = {
2169 .enc = {
2170 .vecs = arc4_enc_tv_template,
2171 .count = ARC4_ENC_TEST_VECTORS
2173 .dec = {
2174 .vecs = arc4_dec_tv_template,
2175 .count = ARC4_DEC_TEST_VECTORS
2179 }, {
2180 .alg = "ecb(blowfish)",
2181 .test = alg_test_skcipher,
2182 .suite = {
2183 .cipher = {
2184 .enc = {
2185 .vecs = bf_enc_tv_template,
2186 .count = BF_ENC_TEST_VECTORS
2188 .dec = {
2189 .vecs = bf_dec_tv_template,
2190 .count = BF_DEC_TEST_VECTORS
2194 }, {
2195 .alg = "ecb(camellia)",
2196 .test = alg_test_skcipher,
2197 .suite = {
2198 .cipher = {
2199 .enc = {
2200 .vecs = camellia_enc_tv_template,
2201 .count = CAMELLIA_ENC_TEST_VECTORS
2203 .dec = {
2204 .vecs = camellia_dec_tv_template,
2205 .count = CAMELLIA_DEC_TEST_VECTORS
2209 }, {
2210 .alg = "ecb(cast5)",
2211 .test = alg_test_skcipher,
2212 .suite = {
2213 .cipher = {
2214 .enc = {
2215 .vecs = cast5_enc_tv_template,
2216 .count = CAST5_ENC_TEST_VECTORS
2218 .dec = {
2219 .vecs = cast5_dec_tv_template,
2220 .count = CAST5_DEC_TEST_VECTORS
2224 }, {
2225 .alg = "ecb(cast6)",
2226 .test = alg_test_skcipher,
2227 .suite = {
2228 .cipher = {
2229 .enc = {
2230 .vecs = cast6_enc_tv_template,
2231 .count = CAST6_ENC_TEST_VECTORS
2233 .dec = {
2234 .vecs = cast6_dec_tv_template,
2235 .count = CAST6_DEC_TEST_VECTORS
2239 }, {
2240 .alg = "ecb(des)",
2241 .test = alg_test_skcipher,
2242 .fips_allowed = 1,
2243 .suite = {
2244 .cipher = {
2245 .enc = {
2246 .vecs = des_enc_tv_template,
2247 .count = DES_ENC_TEST_VECTORS
2249 .dec = {
2250 .vecs = des_dec_tv_template,
2251 .count = DES_DEC_TEST_VECTORS
2255 }, {
2256 .alg = "ecb(des3_ede)",
2257 .test = alg_test_skcipher,
2258 .fips_allowed = 1,
2259 .suite = {
2260 .cipher = {
2261 .enc = {
2262 .vecs = des3_ede_enc_tv_template,
2263 .count = DES3_EDE_ENC_TEST_VECTORS
2265 .dec = {
2266 .vecs = des3_ede_dec_tv_template,
2267 .count = DES3_EDE_DEC_TEST_VECTORS
2271 }, {
2272 .alg = "ecb(khazad)",
2273 .test = alg_test_skcipher,
2274 .suite = {
2275 .cipher = {
2276 .enc = {
2277 .vecs = khazad_enc_tv_template,
2278 .count = KHAZAD_ENC_TEST_VECTORS
2280 .dec = {
2281 .vecs = khazad_dec_tv_template,
2282 .count = KHAZAD_DEC_TEST_VECTORS
2286 }, {
2287 .alg = "ecb(seed)",
2288 .test = alg_test_skcipher,
2289 .suite = {
2290 .cipher = {
2291 .enc = {
2292 .vecs = seed_enc_tv_template,
2293 .count = SEED_ENC_TEST_VECTORS
2295 .dec = {
2296 .vecs = seed_dec_tv_template,
2297 .count = SEED_DEC_TEST_VECTORS
2301 }, {
2302 .alg = "ecb(serpent)",
2303 .test = alg_test_skcipher,
2304 .suite = {
2305 .cipher = {
2306 .enc = {
2307 .vecs = serpent_enc_tv_template,
2308 .count = SERPENT_ENC_TEST_VECTORS
2310 .dec = {
2311 .vecs = serpent_dec_tv_template,
2312 .count = SERPENT_DEC_TEST_VECTORS
2316 }, {
2317 .alg = "ecb(tea)",
2318 .test = alg_test_skcipher,
2319 .suite = {
2320 .cipher = {
2321 .enc = {
2322 .vecs = tea_enc_tv_template,
2323 .count = TEA_ENC_TEST_VECTORS
2325 .dec = {
2326 .vecs = tea_dec_tv_template,
2327 .count = TEA_DEC_TEST_VECTORS
2331 }, {
2332 .alg = "ecb(tnepres)",
2333 .test = alg_test_skcipher,
2334 .suite = {
2335 .cipher = {
2336 .enc = {
2337 .vecs = tnepres_enc_tv_template,
2338 .count = TNEPRES_ENC_TEST_VECTORS
2340 .dec = {
2341 .vecs = tnepres_dec_tv_template,
2342 .count = TNEPRES_DEC_TEST_VECTORS
2346 }, {
2347 .alg = "ecb(twofish)",
2348 .test = alg_test_skcipher,
2349 .suite = {
2350 .cipher = {
2351 .enc = {
2352 .vecs = tf_enc_tv_template,
2353 .count = TF_ENC_TEST_VECTORS
2355 .dec = {
2356 .vecs = tf_dec_tv_template,
2357 .count = TF_DEC_TEST_VECTORS
2361 }, {
2362 .alg = "ecb(xeta)",
2363 .test = alg_test_skcipher,
2364 .suite = {
2365 .cipher = {
2366 .enc = {
2367 .vecs = xeta_enc_tv_template,
2368 .count = XETA_ENC_TEST_VECTORS
2370 .dec = {
2371 .vecs = xeta_dec_tv_template,
2372 .count = XETA_DEC_TEST_VECTORS
2376 }, {
2377 .alg = "ecb(xtea)",
2378 .test = alg_test_skcipher,
2379 .suite = {
2380 .cipher = {
2381 .enc = {
2382 .vecs = xtea_enc_tv_template,
2383 .count = XTEA_ENC_TEST_VECTORS
2385 .dec = {
2386 .vecs = xtea_dec_tv_template,
2387 .count = XTEA_DEC_TEST_VECTORS
2391 }, {
2392 .alg = "gcm(aes)",
2393 .test = alg_test_aead,
2394 .fips_allowed = 1,
2395 .suite = {
2396 .aead = {
2397 .enc = {
2398 .vecs = aes_gcm_enc_tv_template,
2399 .count = AES_GCM_ENC_TEST_VECTORS
2401 .dec = {
2402 .vecs = aes_gcm_dec_tv_template,
2403 .count = AES_GCM_DEC_TEST_VECTORS
2407 }, {
2408 .alg = "ghash",
2409 .test = alg_test_hash,
2410 .fips_allowed = 1,
2411 .suite = {
2412 .hash = {
2413 .vecs = ghash_tv_template,
2414 .count = GHASH_TEST_VECTORS
2417 }, {
2418 .alg = "hmac(crc32)",
2419 .test = alg_test_hash,
2420 .suite = {
2421 .hash = {
2422 .vecs = bfin_crc_tv_template,
2423 .count = BFIN_CRC_TEST_VECTORS
2426 }, {
2427 .alg = "hmac(md5)",
2428 .test = alg_test_hash,
2429 .suite = {
2430 .hash = {
2431 .vecs = hmac_md5_tv_template,
2432 .count = HMAC_MD5_TEST_VECTORS
2435 }, {
2436 .alg = "hmac(rmd128)",
2437 .test = alg_test_hash,
2438 .suite = {
2439 .hash = {
2440 .vecs = hmac_rmd128_tv_template,
2441 .count = HMAC_RMD128_TEST_VECTORS
2444 }, {
2445 .alg = "hmac(rmd160)",
2446 .test = alg_test_hash,
2447 .suite = {
2448 .hash = {
2449 .vecs = hmac_rmd160_tv_template,
2450 .count = HMAC_RMD160_TEST_VECTORS
2453 }, {
2454 .alg = "hmac(sha1)",
2455 .test = alg_test_hash,
2456 .fips_allowed = 1,
2457 .suite = {
2458 .hash = {
2459 .vecs = hmac_sha1_tv_template,
2460 .count = HMAC_SHA1_TEST_VECTORS
2463 }, {
2464 .alg = "hmac(sha224)",
2465 .test = alg_test_hash,
2466 .fips_allowed = 1,
2467 .suite = {
2468 .hash = {
2469 .vecs = hmac_sha224_tv_template,
2470 .count = HMAC_SHA224_TEST_VECTORS
2473 }, {
2474 .alg = "hmac(sha256)",
2475 .test = alg_test_hash,
2476 .fips_allowed = 1,
2477 .suite = {
2478 .hash = {
2479 .vecs = hmac_sha256_tv_template,
2480 .count = HMAC_SHA256_TEST_VECTORS
2483 }, {
2484 .alg = "hmac(sha384)",
2485 .test = alg_test_hash,
2486 .fips_allowed = 1,
2487 .suite = {
2488 .hash = {
2489 .vecs = hmac_sha384_tv_template,
2490 .count = HMAC_SHA384_TEST_VECTORS
2493 }, {
2494 .alg = "hmac(sha512)",
2495 .test = alg_test_hash,
2496 .fips_allowed = 1,
2497 .suite = {
2498 .hash = {
2499 .vecs = hmac_sha512_tv_template,
2500 .count = HMAC_SHA512_TEST_VECTORS
2503 }, {
2504 .alg = "lrw(aes)",
2505 .test = alg_test_skcipher,
2506 .suite = {
2507 .cipher = {
2508 .enc = {
2509 .vecs = aes_lrw_enc_tv_template,
2510 .count = AES_LRW_ENC_TEST_VECTORS
2512 .dec = {
2513 .vecs = aes_lrw_dec_tv_template,
2514 .count = AES_LRW_DEC_TEST_VECTORS
2518 }, {
2519 .alg = "lrw(camellia)",
2520 .test = alg_test_skcipher,
2521 .suite = {
2522 .cipher = {
2523 .enc = {
2524 .vecs = camellia_lrw_enc_tv_template,
2525 .count = CAMELLIA_LRW_ENC_TEST_VECTORS
2527 .dec = {
2528 .vecs = camellia_lrw_dec_tv_template,
2529 .count = CAMELLIA_LRW_DEC_TEST_VECTORS
2533 }, {
2534 .alg = "lrw(cast6)",
2535 .test = alg_test_skcipher,
2536 .suite = {
2537 .cipher = {
2538 .enc = {
2539 .vecs = cast6_lrw_enc_tv_template,
2540 .count = CAST6_LRW_ENC_TEST_VECTORS
2542 .dec = {
2543 .vecs = cast6_lrw_dec_tv_template,
2544 .count = CAST6_LRW_DEC_TEST_VECTORS
2548 }, {
2549 .alg = "lrw(serpent)",
2550 .test = alg_test_skcipher,
2551 .suite = {
2552 .cipher = {
2553 .enc = {
2554 .vecs = serpent_lrw_enc_tv_template,
2555 .count = SERPENT_LRW_ENC_TEST_VECTORS
2557 .dec = {
2558 .vecs = serpent_lrw_dec_tv_template,
2559 .count = SERPENT_LRW_DEC_TEST_VECTORS
2563 }, {
2564 .alg = "lrw(twofish)",
2565 .test = alg_test_skcipher,
2566 .suite = {
2567 .cipher = {
2568 .enc = {
2569 .vecs = tf_lrw_enc_tv_template,
2570 .count = TF_LRW_ENC_TEST_VECTORS
2572 .dec = {
2573 .vecs = tf_lrw_dec_tv_template,
2574 .count = TF_LRW_DEC_TEST_VECTORS
2578 }, {
2579 .alg = "lzo",
2580 .test = alg_test_comp,
2581 .fips_allowed = 1,
2582 .suite = {
2583 .comp = {
2584 .comp = {
2585 .vecs = lzo_comp_tv_template,
2586 .count = LZO_COMP_TEST_VECTORS
2588 .decomp = {
2589 .vecs = lzo_decomp_tv_template,
2590 .count = LZO_DECOMP_TEST_VECTORS
2594 }, {
2595 .alg = "md4",
2596 .test = alg_test_hash,
2597 .suite = {
2598 .hash = {
2599 .vecs = md4_tv_template,
2600 .count = MD4_TEST_VECTORS
2603 }, {
2604 .alg = "md5",
2605 .test = alg_test_hash,
2606 .suite = {
2607 .hash = {
2608 .vecs = md5_tv_template,
2609 .count = MD5_TEST_VECTORS
2612 }, {
2613 .alg = "michael_mic",
2614 .test = alg_test_hash,
2615 .suite = {
2616 .hash = {
2617 .vecs = michael_mic_tv_template,
2618 .count = MICHAEL_MIC_TEST_VECTORS
2621 }, {
2622 .alg = "ofb(aes)",
2623 .test = alg_test_skcipher,
2624 .fips_allowed = 1,
2625 .suite = {
2626 .cipher = {
2627 .enc = {
2628 .vecs = aes_ofb_enc_tv_template,
2629 .count = AES_OFB_ENC_TEST_VECTORS
2631 .dec = {
2632 .vecs = aes_ofb_dec_tv_template,
2633 .count = AES_OFB_DEC_TEST_VECTORS
2637 }, {
2638 .alg = "pcbc(fcrypt)",
2639 .test = alg_test_skcipher,
2640 .suite = {
2641 .cipher = {
2642 .enc = {
2643 .vecs = fcrypt_pcbc_enc_tv_template,
2644 .count = FCRYPT_ENC_TEST_VECTORS
2646 .dec = {
2647 .vecs = fcrypt_pcbc_dec_tv_template,
2648 .count = FCRYPT_DEC_TEST_VECTORS
2652 }, {
2653 .alg = "rfc3686(ctr(aes))",
2654 .test = alg_test_skcipher,
2655 .fips_allowed = 1,
2656 .suite = {
2657 .cipher = {
2658 .enc = {
2659 .vecs = aes_ctr_rfc3686_enc_tv_template,
2660 .count = AES_CTR_3686_ENC_TEST_VECTORS
2662 .dec = {
2663 .vecs = aes_ctr_rfc3686_dec_tv_template,
2664 .count = AES_CTR_3686_DEC_TEST_VECTORS
2668 }, {
2669 .alg = "rfc4106(gcm(aes))",
2670 .test = alg_test_aead,
2671 .suite = {
2672 .aead = {
2673 .enc = {
2674 .vecs = aes_gcm_rfc4106_enc_tv_template,
2675 .count = AES_GCM_4106_ENC_TEST_VECTORS
2677 .dec = {
2678 .vecs = aes_gcm_rfc4106_dec_tv_template,
2679 .count = AES_GCM_4106_DEC_TEST_VECTORS
2683 }, {
2686 .alg = "rfc4309(ccm(aes))",
2687 .test = alg_test_aead,
2688 .fips_allowed = 1,
2689 .suite = {
2690 .aead = {
2691 .enc = {
2692 .vecs = aes_ccm_rfc4309_enc_tv_template,
2693 .count = AES_CCM_4309_ENC_TEST_VECTORS
2695 .dec = {
2696 .vecs = aes_ccm_rfc4309_dec_tv_template,
2697 .count = AES_CCM_4309_DEC_TEST_VECTORS
2701 }, {
2702 .alg = "rmd128",
2703 .test = alg_test_hash,
2704 .suite = {
2705 .hash = {
2706 .vecs = rmd128_tv_template,
2707 .count = RMD128_TEST_VECTORS
2710 }, {
2711 .alg = "rmd160",
2712 .test = alg_test_hash,
2713 .suite = {
2714 .hash = {
2715 .vecs = rmd160_tv_template,
2716 .count = RMD160_TEST_VECTORS
2719 }, {
2720 .alg = "rmd256",
2721 .test = alg_test_hash,
2722 .suite = {
2723 .hash = {
2724 .vecs = rmd256_tv_template,
2725 .count = RMD256_TEST_VECTORS
2728 }, {
2729 .alg = "rmd320",
2730 .test = alg_test_hash,
2731 .suite = {
2732 .hash = {
2733 .vecs = rmd320_tv_template,
2734 .count = RMD320_TEST_VECTORS
2737 }, {
2738 .alg = "salsa20",
2739 .test = alg_test_skcipher,
2740 .suite = {
2741 .cipher = {
2742 .enc = {
2743 .vecs = salsa20_stream_enc_tv_template,
2744 .count = SALSA20_STREAM_ENC_TEST_VECTORS
2748 }, {
2749 .alg = "sha1",
2750 .test = alg_test_hash,
2751 .fips_allowed = 1,
2752 .suite = {
2753 .hash = {
2754 .vecs = sha1_tv_template,
2755 .count = SHA1_TEST_VECTORS
2758 }, {
2759 .alg = "sha224",
2760 .test = alg_test_hash,
2761 .fips_allowed = 1,
2762 .suite = {
2763 .hash = {
2764 .vecs = sha224_tv_template,
2765 .count = SHA224_TEST_VECTORS
2768 }, {
2769 .alg = "sha256",
2770 .test = alg_test_hash,
2771 .fips_allowed = 1,
2772 .suite = {
2773 .hash = {
2774 .vecs = sha256_tv_template,
2775 .count = SHA256_TEST_VECTORS
2778 }, {
2779 .alg = "sha384",
2780 .test = alg_test_hash,
2781 .fips_allowed = 1,
2782 .suite = {
2783 .hash = {
2784 .vecs = sha384_tv_template,
2785 .count = SHA384_TEST_VECTORS
2788 }, {
2789 .alg = "sha512",
2790 .test = alg_test_hash,
2791 .fips_allowed = 1,
2792 .suite = {
2793 .hash = {
2794 .vecs = sha512_tv_template,
2795 .count = SHA512_TEST_VECTORS
2798 }, {
2799 .alg = "tgr128",
2800 .test = alg_test_hash,
2801 .suite = {
2802 .hash = {
2803 .vecs = tgr128_tv_template,
2804 .count = TGR128_TEST_VECTORS
2807 }, {
2808 .alg = "tgr160",
2809 .test = alg_test_hash,
2810 .suite = {
2811 .hash = {
2812 .vecs = tgr160_tv_template,
2813 .count = TGR160_TEST_VECTORS
2816 }, {
2817 .alg = "tgr192",
2818 .test = alg_test_hash,
2819 .suite = {
2820 .hash = {
2821 .vecs = tgr192_tv_template,
2822 .count = TGR192_TEST_VECTORS
2825 }, {
2826 .alg = "vmac(aes)",
2827 .test = alg_test_hash,
2828 .suite = {
2829 .hash = {
2830 .vecs = aes_vmac128_tv_template,
2831 .count = VMAC_AES_TEST_VECTORS
2834 }, {
2835 .alg = "wp256",
2836 .test = alg_test_hash,
2837 .suite = {
2838 .hash = {
2839 .vecs = wp256_tv_template,
2840 .count = WP256_TEST_VECTORS
2843 }, {
2844 .alg = "wp384",
2845 .test = alg_test_hash,
2846 .suite = {
2847 .hash = {
2848 .vecs = wp384_tv_template,
2849 .count = WP384_TEST_VECTORS
2852 }, {
2853 .alg = "wp512",
2854 .test = alg_test_hash,
2855 .suite = {
2856 .hash = {
2857 .vecs = wp512_tv_template,
2858 .count = WP512_TEST_VECTORS
2861 }, {
2862 .alg = "xcbc(aes)",
2863 .test = alg_test_hash,
2864 .suite = {
2865 .hash = {
2866 .vecs = aes_xcbc128_tv_template,
2867 .count = XCBC_AES_TEST_VECTORS
2870 }, {
2871 .alg = "xts(aes)",
2872 .test = alg_test_skcipher,
2873 .fips_allowed = 1,
2874 .suite = {
2875 .cipher = {
2876 .enc = {
2877 .vecs = aes_xts_enc_tv_template,
2878 .count = AES_XTS_ENC_TEST_VECTORS
2880 .dec = {
2881 .vecs = aes_xts_dec_tv_template,
2882 .count = AES_XTS_DEC_TEST_VECTORS
2886 }, {
2887 .alg = "xts(camellia)",
2888 .test = alg_test_skcipher,
2889 .suite = {
2890 .cipher = {
2891 .enc = {
2892 .vecs = camellia_xts_enc_tv_template,
2893 .count = CAMELLIA_XTS_ENC_TEST_VECTORS
2895 .dec = {
2896 .vecs = camellia_xts_dec_tv_template,
2897 .count = CAMELLIA_XTS_DEC_TEST_VECTORS
2901 }, {
2902 .alg = "xts(cast6)",
2903 .test = alg_test_skcipher,
2904 .suite = {
2905 .cipher = {
2906 .enc = {
2907 .vecs = cast6_xts_enc_tv_template,
2908 .count = CAST6_XTS_ENC_TEST_VECTORS
2910 .dec = {
2911 .vecs = cast6_xts_dec_tv_template,
2912 .count = CAST6_XTS_DEC_TEST_VECTORS
2916 }, {
2917 .alg = "xts(serpent)",
2918 .test = alg_test_skcipher,
2919 .suite = {
2920 .cipher = {
2921 .enc = {
2922 .vecs = serpent_xts_enc_tv_template,
2923 .count = SERPENT_XTS_ENC_TEST_VECTORS
2925 .dec = {
2926 .vecs = serpent_xts_dec_tv_template,
2927 .count = SERPENT_XTS_DEC_TEST_VECTORS
2931 }, {
2932 .alg = "xts(twofish)",
2933 .test = alg_test_skcipher,
2934 .suite = {
2935 .cipher = {
2936 .enc = {
2937 .vecs = tf_xts_enc_tv_template,
2938 .count = TF_XTS_ENC_TEST_VECTORS
2940 .dec = {
2941 .vecs = tf_xts_dec_tv_template,
2942 .count = TF_XTS_DEC_TEST_VECTORS
2946 }, {
2947 .alg = "zlib",
2948 .test = alg_test_pcomp,
2949 .fips_allowed = 1,
2950 .suite = {
2951 .pcomp = {
2952 .comp = {
2953 .vecs = zlib_comp_tv_template,
2954 .count = ZLIB_COMP_TEST_VECTORS
2956 .decomp = {
2957 .vecs = zlib_decomp_tv_template,
2958 .count = ZLIB_DECOMP_TEST_VECTORS
2965 static int alg_find_test(const char *alg)
2967 int start = 0;
2968 int end = ARRAY_SIZE(alg_test_descs);
2970 while (start < end) {
2971 int i = (start + end) / 2;
2972 int diff = strcmp(alg_test_descs[i].alg, alg);
2974 if (diff > 0) {
2975 end = i;
2976 continue;
2979 if (diff < 0) {
2980 start = i + 1;
2981 continue;
2984 return i;
2987 return -1;
2990 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
2992 int i;
2993 int j;
2994 int rc;
2996 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
2997 char nalg[CRYPTO_MAX_ALG_NAME];
2999 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
3000 sizeof(nalg))
3001 return -ENAMETOOLONG;
3003 i = alg_find_test(nalg);
3004 if (i < 0)
3005 goto notest;
3007 if (fips_enabled && !alg_test_descs[i].fips_allowed)
3008 goto non_fips_alg;
3010 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
3011 goto test_done;
3014 i = alg_find_test(alg);
3015 j = alg_find_test(driver);
3016 if (i < 0 && j < 0)
3017 goto notest;
3019 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
3020 (j >= 0 && !alg_test_descs[j].fips_allowed)))
3021 goto non_fips_alg;
3023 rc = 0;
3024 if (i >= 0)
3025 rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
3026 type, mask);
3027 if (j >= 0)
3028 rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
3029 type, mask);
3031 test_done:
3032 if (fips_enabled && rc)
3033 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
3035 if (fips_enabled && !rc)
3036 printk(KERN_INFO "alg: self-tests for %s (%s) passed\n",
3037 driver, alg);
3039 return rc;
3041 notest:
3042 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
3043 return 0;
3044 non_fips_alg:
3045 return -EINVAL;
3048 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
3050 EXPORT_SYMBOL_GPL(alg_test);