serial: 8250_lpss: Unconditionally set PCI master for Quark
[linux/fpc-iii.git] / crypto / testmgr.c
blobf9c378af39078c2ad9f8fce0c02d1d4faf432f9f
1 /*
2 * Algorithm testing framework and tests.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
9 * Updated RFC4106 AES-GCM testing.
10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
16 * This program is free software; you can redistribute it and/or modify it
17 * under the terms of the GNU General Public License as published by the Free
18 * Software Foundation; either version 2 of the License, or (at your option)
19 * any later version.
23 #include <crypto/aead.h>
24 #include <crypto/hash.h>
25 #include <crypto/skcipher.h>
26 #include <linux/err.h>
27 #include <linux/fips.h>
28 #include <linux/module.h>
29 #include <linux/scatterlist.h>
30 #include <linux/slab.h>
31 #include <linux/string.h>
32 #include <crypto/rng.h>
33 #include <crypto/drbg.h>
34 #include <crypto/akcipher.h>
35 #include <crypto/kpp.h>
36 #include <crypto/acompress.h>
38 #include "internal.h"
40 static bool notests;
41 module_param(notests, bool, 0644);
42 MODULE_PARM_DESC(notests, "disable crypto self-tests");
44 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
46 /* a perfect nop */
47 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
49 return 0;
52 #else
54 #include "testmgr.h"
57 * Need slab memory for testing (size in number of pages).
59 #define XBUFSIZE 8
62 * Indexes into the xbuf to simulate cross-page access.
64 #define IDX1 32
65 #define IDX2 32400
66 #define IDX3 1511
67 #define IDX4 8193
68 #define IDX5 22222
69 #define IDX6 17101
70 #define IDX7 27333
71 #define IDX8 3000
74 * Used by test_cipher()
76 #define ENCRYPT 1
77 #define DECRYPT 0
79 struct tcrypt_result {
80 struct completion completion;
81 int err;
84 struct aead_test_suite {
85 struct {
86 struct aead_testvec *vecs;
87 unsigned int count;
88 } enc, dec;
91 struct cipher_test_suite {
92 struct {
93 struct cipher_testvec *vecs;
94 unsigned int count;
95 } enc, dec;
98 struct comp_test_suite {
99 struct {
100 struct comp_testvec *vecs;
101 unsigned int count;
102 } comp, decomp;
105 struct hash_test_suite {
106 struct hash_testvec *vecs;
107 unsigned int count;
110 struct cprng_test_suite {
111 struct cprng_testvec *vecs;
112 unsigned int count;
115 struct drbg_test_suite {
116 struct drbg_testvec *vecs;
117 unsigned int count;
120 struct akcipher_test_suite {
121 struct akcipher_testvec *vecs;
122 unsigned int count;
125 struct kpp_test_suite {
126 struct kpp_testvec *vecs;
127 unsigned int count;
130 struct alg_test_desc {
131 const char *alg;
132 int (*test)(const struct alg_test_desc *desc, const char *driver,
133 u32 type, u32 mask);
134 int fips_allowed; /* set if alg is allowed in fips mode */
136 union {
137 struct aead_test_suite aead;
138 struct cipher_test_suite cipher;
139 struct comp_test_suite comp;
140 struct hash_test_suite hash;
141 struct cprng_test_suite cprng;
142 struct drbg_test_suite drbg;
143 struct akcipher_test_suite akcipher;
144 struct kpp_test_suite kpp;
145 } suite;
148 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
150 static void hexdump(unsigned char *buf, unsigned int len)
152 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
153 16, 1,
154 buf, len, false);
157 static void tcrypt_complete(struct crypto_async_request *req, int err)
159 struct tcrypt_result *res = req->data;
161 if (err == -EINPROGRESS)
162 return;
164 res->err = err;
165 complete(&res->completion);
168 static int testmgr_alloc_buf(char *buf[XBUFSIZE])
170 int i;
172 for (i = 0; i < XBUFSIZE; i++) {
173 buf[i] = (void *)__get_free_page(GFP_KERNEL);
174 if (!buf[i])
175 goto err_free_buf;
178 return 0;
180 err_free_buf:
181 while (i-- > 0)
182 free_page((unsigned long)buf[i]);
184 return -ENOMEM;
187 static void testmgr_free_buf(char *buf[XBUFSIZE])
189 int i;
191 for (i = 0; i < XBUFSIZE; i++)
192 free_page((unsigned long)buf[i]);
195 static int wait_async_op(struct tcrypt_result *tr, int ret)
197 if (ret == -EINPROGRESS || ret == -EBUSY) {
198 wait_for_completion(&tr->completion);
199 reinit_completion(&tr->completion);
200 ret = tr->err;
202 return ret;
205 static int ahash_partial_update(struct ahash_request **preq,
206 struct crypto_ahash *tfm, struct hash_testvec *template,
207 void *hash_buff, int k, int temp, struct scatterlist *sg,
208 const char *algo, char *result, struct tcrypt_result *tresult)
210 char *state;
211 struct ahash_request *req;
212 int statesize, ret = -EINVAL;
213 const char guard[] = { 0x00, 0xba, 0xad, 0x00 };
215 req = *preq;
216 statesize = crypto_ahash_statesize(
217 crypto_ahash_reqtfm(req));
218 state = kmalloc(statesize + sizeof(guard), GFP_KERNEL);
219 if (!state) {
220 pr_err("alt: hash: Failed to alloc state for %s\n", algo);
221 goto out_nostate;
223 memcpy(state + statesize, guard, sizeof(guard));
224 ret = crypto_ahash_export(req, state);
225 WARN_ON(memcmp(state + statesize, guard, sizeof(guard)));
226 if (ret) {
227 pr_err("alt: hash: Failed to export() for %s\n", algo);
228 goto out;
230 ahash_request_free(req);
231 req = ahash_request_alloc(tfm, GFP_KERNEL);
232 if (!req) {
233 pr_err("alg: hash: Failed to alloc request for %s\n", algo);
234 goto out_noreq;
236 ahash_request_set_callback(req,
237 CRYPTO_TFM_REQ_MAY_BACKLOG,
238 tcrypt_complete, tresult);
240 memcpy(hash_buff, template->plaintext + temp,
241 template->tap[k]);
242 sg_init_one(&sg[0], hash_buff, template->tap[k]);
243 ahash_request_set_crypt(req, sg, result, template->tap[k]);
244 ret = crypto_ahash_import(req, state);
245 if (ret) {
246 pr_err("alg: hash: Failed to import() for %s\n", algo);
247 goto out;
249 ret = wait_async_op(tresult, crypto_ahash_update(req));
250 if (ret)
251 goto out;
252 *preq = req;
253 ret = 0;
254 goto out_noreq;
255 out:
256 ahash_request_free(req);
257 out_noreq:
258 kfree(state);
259 out_nostate:
260 return ret;
263 static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
264 unsigned int tcount, bool use_digest,
265 const int align_offset)
267 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
268 size_t digest_size = crypto_ahash_digestsize(tfm);
269 unsigned int i, j, k, temp;
270 struct scatterlist sg[8];
271 char *result;
272 char *key;
273 struct ahash_request *req;
274 struct tcrypt_result tresult;
275 void *hash_buff;
276 char *xbuf[XBUFSIZE];
277 int ret = -ENOMEM;
279 result = kmalloc(digest_size, GFP_KERNEL);
280 if (!result)
281 return ret;
282 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
283 if (!key)
284 goto out_nobuf;
285 if (testmgr_alloc_buf(xbuf))
286 goto out_nobuf;
288 init_completion(&tresult.completion);
290 req = ahash_request_alloc(tfm, GFP_KERNEL);
291 if (!req) {
292 printk(KERN_ERR "alg: hash: Failed to allocate request for "
293 "%s\n", algo);
294 goto out_noreq;
296 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
297 tcrypt_complete, &tresult);
299 j = 0;
300 for (i = 0; i < tcount; i++) {
301 if (template[i].np)
302 continue;
304 ret = -EINVAL;
305 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE))
306 goto out;
308 j++;
309 memset(result, 0, digest_size);
311 hash_buff = xbuf[0];
312 hash_buff += align_offset;
314 memcpy(hash_buff, template[i].plaintext, template[i].psize);
315 sg_init_one(&sg[0], hash_buff, template[i].psize);
317 if (template[i].ksize) {
318 crypto_ahash_clear_flags(tfm, ~0);
319 if (template[i].ksize > MAX_KEYLEN) {
320 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
321 j, algo, template[i].ksize, MAX_KEYLEN);
322 ret = -EINVAL;
323 goto out;
325 memcpy(key, template[i].key, template[i].ksize);
326 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
327 if (ret) {
328 printk(KERN_ERR "alg: hash: setkey failed on "
329 "test %d for %s: ret=%d\n", j, algo,
330 -ret);
331 goto out;
335 ahash_request_set_crypt(req, sg, result, template[i].psize);
336 if (use_digest) {
337 ret = wait_async_op(&tresult, crypto_ahash_digest(req));
338 if (ret) {
339 pr_err("alg: hash: digest failed on test %d "
340 "for %s: ret=%d\n", j, algo, -ret);
341 goto out;
343 } else {
344 ret = wait_async_op(&tresult, crypto_ahash_init(req));
345 if (ret) {
346 pr_err("alt: hash: init failed on test %d "
347 "for %s: ret=%d\n", j, algo, -ret);
348 goto out;
350 ret = wait_async_op(&tresult, crypto_ahash_update(req));
351 if (ret) {
352 pr_err("alt: hash: update failed on test %d "
353 "for %s: ret=%d\n", j, algo, -ret);
354 goto out;
356 ret = wait_async_op(&tresult, crypto_ahash_final(req));
357 if (ret) {
358 pr_err("alt: hash: final failed on test %d "
359 "for %s: ret=%d\n", j, algo, -ret);
360 goto out;
364 if (memcmp(result, template[i].digest,
365 crypto_ahash_digestsize(tfm))) {
366 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
367 j, algo);
368 hexdump(result, crypto_ahash_digestsize(tfm));
369 ret = -EINVAL;
370 goto out;
374 j = 0;
375 for (i = 0; i < tcount; i++) {
376 /* alignment tests are only done with continuous buffers */
377 if (align_offset != 0)
378 break;
380 if (!template[i].np)
381 continue;
383 j++;
384 memset(result, 0, digest_size);
386 temp = 0;
387 sg_init_table(sg, template[i].np);
388 ret = -EINVAL;
389 for (k = 0; k < template[i].np; k++) {
390 if (WARN_ON(offset_in_page(IDX[k]) +
391 template[i].tap[k] > PAGE_SIZE))
392 goto out;
393 sg_set_buf(&sg[k],
394 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
395 offset_in_page(IDX[k]),
396 template[i].plaintext + temp,
397 template[i].tap[k]),
398 template[i].tap[k]);
399 temp += template[i].tap[k];
402 if (template[i].ksize) {
403 if (template[i].ksize > MAX_KEYLEN) {
404 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
405 j, algo, template[i].ksize, MAX_KEYLEN);
406 ret = -EINVAL;
407 goto out;
409 crypto_ahash_clear_flags(tfm, ~0);
410 memcpy(key, template[i].key, template[i].ksize);
411 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
413 if (ret) {
414 printk(KERN_ERR "alg: hash: setkey "
415 "failed on chunking test %d "
416 "for %s: ret=%d\n", j, algo, -ret);
417 goto out;
421 ahash_request_set_crypt(req, sg, result, template[i].psize);
422 ret = crypto_ahash_digest(req);
423 switch (ret) {
424 case 0:
425 break;
426 case -EINPROGRESS:
427 case -EBUSY:
428 wait_for_completion(&tresult.completion);
429 reinit_completion(&tresult.completion);
430 ret = tresult.err;
431 if (!ret)
432 break;
433 /* fall through */
434 default:
435 printk(KERN_ERR "alg: hash: digest failed "
436 "on chunking test %d for %s: "
437 "ret=%d\n", j, algo, -ret);
438 goto out;
441 if (memcmp(result, template[i].digest,
442 crypto_ahash_digestsize(tfm))) {
443 printk(KERN_ERR "alg: hash: Chunking test %d "
444 "failed for %s\n", j, algo);
445 hexdump(result, crypto_ahash_digestsize(tfm));
446 ret = -EINVAL;
447 goto out;
451 /* partial update exercise */
452 j = 0;
453 for (i = 0; i < tcount; i++) {
454 /* alignment tests are only done with continuous buffers */
455 if (align_offset != 0)
456 break;
458 if (template[i].np < 2)
459 continue;
461 j++;
462 memset(result, 0, digest_size);
464 ret = -EINVAL;
465 hash_buff = xbuf[0];
466 memcpy(hash_buff, template[i].plaintext,
467 template[i].tap[0]);
468 sg_init_one(&sg[0], hash_buff, template[i].tap[0]);
470 if (template[i].ksize) {
471 crypto_ahash_clear_flags(tfm, ~0);
472 if (template[i].ksize > MAX_KEYLEN) {
473 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
474 j, algo, template[i].ksize, MAX_KEYLEN);
475 ret = -EINVAL;
476 goto out;
478 memcpy(key, template[i].key, template[i].ksize);
479 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
480 if (ret) {
481 pr_err("alg: hash: setkey failed on test %d for %s: ret=%d\n",
482 j, algo, -ret);
483 goto out;
487 ahash_request_set_crypt(req, sg, result, template[i].tap[0]);
488 ret = wait_async_op(&tresult, crypto_ahash_init(req));
489 if (ret) {
490 pr_err("alt: hash: init failed on test %d for %s: ret=%d\n",
491 j, algo, -ret);
492 goto out;
494 ret = wait_async_op(&tresult, crypto_ahash_update(req));
495 if (ret) {
496 pr_err("alt: hash: update failed on test %d for %s: ret=%d\n",
497 j, algo, -ret);
498 goto out;
501 temp = template[i].tap[0];
502 for (k = 1; k < template[i].np; k++) {
503 ret = ahash_partial_update(&req, tfm, &template[i],
504 hash_buff, k, temp, &sg[0], algo, result,
505 &tresult);
506 if (ret) {
507 pr_err("hash: partial update failed on test %d for %s: ret=%d\n",
508 j, algo, -ret);
509 goto out_noreq;
511 temp += template[i].tap[k];
513 ret = wait_async_op(&tresult, crypto_ahash_final(req));
514 if (ret) {
515 pr_err("alt: hash: final failed on test %d for %s: ret=%d\n",
516 j, algo, -ret);
517 goto out;
519 if (memcmp(result, template[i].digest,
520 crypto_ahash_digestsize(tfm))) {
521 pr_err("alg: hash: Partial Test %d failed for %s\n",
522 j, algo);
523 hexdump(result, crypto_ahash_digestsize(tfm));
524 ret = -EINVAL;
525 goto out;
529 ret = 0;
531 out:
532 ahash_request_free(req);
533 out_noreq:
534 testmgr_free_buf(xbuf);
535 out_nobuf:
536 kfree(key);
537 kfree(result);
538 return ret;
541 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
542 unsigned int tcount, bool use_digest)
544 unsigned int alignmask;
545 int ret;
547 ret = __test_hash(tfm, template, tcount, use_digest, 0);
548 if (ret)
549 return ret;
551 /* test unaligned buffers, check with one byte offset */
552 ret = __test_hash(tfm, template, tcount, use_digest, 1);
553 if (ret)
554 return ret;
556 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
557 if (alignmask) {
558 /* Check if alignment mask for tfm is correctly set. */
559 ret = __test_hash(tfm, template, tcount, use_digest,
560 alignmask + 1);
561 if (ret)
562 return ret;
565 return 0;
568 static int __test_aead(struct crypto_aead *tfm, int enc,
569 struct aead_testvec *template, unsigned int tcount,
570 const bool diff_dst, const int align_offset)
572 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
573 unsigned int i, j, k, n, temp;
574 int ret = -ENOMEM;
575 char *q;
576 char *key;
577 struct aead_request *req;
578 struct scatterlist *sg;
579 struct scatterlist *sgout;
580 const char *e, *d;
581 struct tcrypt_result result;
582 unsigned int authsize, iv_len;
583 void *input;
584 void *output;
585 void *assoc;
586 char *iv;
587 char *xbuf[XBUFSIZE];
588 char *xoutbuf[XBUFSIZE];
589 char *axbuf[XBUFSIZE];
591 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
592 if (!iv)
593 return ret;
594 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
595 if (!key)
596 goto out_noxbuf;
597 if (testmgr_alloc_buf(xbuf))
598 goto out_noxbuf;
599 if (testmgr_alloc_buf(axbuf))
600 goto out_noaxbuf;
601 if (diff_dst && testmgr_alloc_buf(xoutbuf))
602 goto out_nooutbuf;
604 /* avoid "the frame size is larger than 1024 bytes" compiler warning */
605 sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 4 : 2), GFP_KERNEL);
606 if (!sg)
607 goto out_nosg;
608 sgout = &sg[16];
610 if (diff_dst)
611 d = "-ddst";
612 else
613 d = "";
615 if (enc == ENCRYPT)
616 e = "encryption";
617 else
618 e = "decryption";
620 init_completion(&result.completion);
622 req = aead_request_alloc(tfm, GFP_KERNEL);
623 if (!req) {
624 pr_err("alg: aead%s: Failed to allocate request for %s\n",
625 d, algo);
626 goto out;
629 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
630 tcrypt_complete, &result);
632 iv_len = crypto_aead_ivsize(tfm);
634 for (i = 0, j = 0; i < tcount; i++) {
635 if (template[i].np)
636 continue;
638 j++;
640 /* some templates have no input data but they will
641 * touch input
643 input = xbuf[0];
644 input += align_offset;
645 assoc = axbuf[0];
647 ret = -EINVAL;
648 if (WARN_ON(align_offset + template[i].ilen >
649 PAGE_SIZE || template[i].alen > PAGE_SIZE))
650 goto out;
652 memcpy(input, template[i].input, template[i].ilen);
653 memcpy(assoc, template[i].assoc, template[i].alen);
654 if (template[i].iv)
655 memcpy(iv, template[i].iv, iv_len);
656 else
657 memset(iv, 0, iv_len);
659 crypto_aead_clear_flags(tfm, ~0);
660 if (template[i].wk)
661 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
663 if (template[i].klen > MAX_KEYLEN) {
664 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
665 d, j, algo, template[i].klen,
666 MAX_KEYLEN);
667 ret = -EINVAL;
668 goto out;
670 memcpy(key, template[i].key, template[i].klen);
672 ret = crypto_aead_setkey(tfm, key, template[i].klen);
673 if (template[i].fail == !ret) {
674 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
675 d, j, algo, crypto_aead_get_flags(tfm));
676 goto out;
677 } else if (ret)
678 continue;
680 authsize = abs(template[i].rlen - template[i].ilen);
681 ret = crypto_aead_setauthsize(tfm, authsize);
682 if (ret) {
683 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
684 d, authsize, j, algo);
685 goto out;
688 k = !!template[i].alen;
689 sg_init_table(sg, k + 1);
690 sg_set_buf(&sg[0], assoc, template[i].alen);
691 sg_set_buf(&sg[k], input,
692 template[i].ilen + (enc ? authsize : 0));
693 output = input;
695 if (diff_dst) {
696 sg_init_table(sgout, k + 1);
697 sg_set_buf(&sgout[0], assoc, template[i].alen);
699 output = xoutbuf[0];
700 output += align_offset;
701 sg_set_buf(&sgout[k], output,
702 template[i].rlen + (enc ? 0 : authsize));
705 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
706 template[i].ilen, iv);
708 aead_request_set_ad(req, template[i].alen);
710 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
712 switch (ret) {
713 case 0:
714 if (template[i].novrfy) {
715 /* verification was supposed to fail */
716 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
717 d, e, j, algo);
718 /* so really, we got a bad message */
719 ret = -EBADMSG;
720 goto out;
722 break;
723 case -EINPROGRESS:
724 case -EBUSY:
725 wait_for_completion(&result.completion);
726 reinit_completion(&result.completion);
727 ret = result.err;
728 if (!ret)
729 break;
730 case -EBADMSG:
731 if (template[i].novrfy)
732 /* verification failure was expected */
733 continue;
734 /* fall through */
735 default:
736 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
737 d, e, j, algo, -ret);
738 goto out;
741 q = output;
742 if (memcmp(q, template[i].result, template[i].rlen)) {
743 pr_err("alg: aead%s: Test %d failed on %s for %s\n",
744 d, j, e, algo);
745 hexdump(q, template[i].rlen);
746 ret = -EINVAL;
747 goto out;
751 for (i = 0, j = 0; i < tcount; i++) {
752 /* alignment tests are only done with continuous buffers */
753 if (align_offset != 0)
754 break;
756 if (!template[i].np)
757 continue;
759 j++;
761 if (template[i].iv)
762 memcpy(iv, template[i].iv, iv_len);
763 else
764 memset(iv, 0, MAX_IVLEN);
766 crypto_aead_clear_flags(tfm, ~0);
767 if (template[i].wk)
768 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
769 if (template[i].klen > MAX_KEYLEN) {
770 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
771 d, j, algo, template[i].klen, MAX_KEYLEN);
772 ret = -EINVAL;
773 goto out;
775 memcpy(key, template[i].key, template[i].klen);
777 ret = crypto_aead_setkey(tfm, key, template[i].klen);
778 if (template[i].fail == !ret) {
779 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
780 d, j, algo, crypto_aead_get_flags(tfm));
781 goto out;
782 } else if (ret)
783 continue;
785 authsize = abs(template[i].rlen - template[i].ilen);
787 ret = -EINVAL;
788 sg_init_table(sg, template[i].anp + template[i].np);
789 if (diff_dst)
790 sg_init_table(sgout, template[i].anp + template[i].np);
792 ret = -EINVAL;
793 for (k = 0, temp = 0; k < template[i].anp; k++) {
794 if (WARN_ON(offset_in_page(IDX[k]) +
795 template[i].atap[k] > PAGE_SIZE))
796 goto out;
797 sg_set_buf(&sg[k],
798 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
799 offset_in_page(IDX[k]),
800 template[i].assoc + temp,
801 template[i].atap[k]),
802 template[i].atap[k]);
803 if (diff_dst)
804 sg_set_buf(&sgout[k],
805 axbuf[IDX[k] >> PAGE_SHIFT] +
806 offset_in_page(IDX[k]),
807 template[i].atap[k]);
808 temp += template[i].atap[k];
811 for (k = 0, temp = 0; k < template[i].np; k++) {
812 if (WARN_ON(offset_in_page(IDX[k]) +
813 template[i].tap[k] > PAGE_SIZE))
814 goto out;
816 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
817 memcpy(q, template[i].input + temp, template[i].tap[k]);
818 sg_set_buf(&sg[template[i].anp + k],
819 q, template[i].tap[k]);
821 if (diff_dst) {
822 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
823 offset_in_page(IDX[k]);
825 memset(q, 0, template[i].tap[k]);
827 sg_set_buf(&sgout[template[i].anp + k],
828 q, template[i].tap[k]);
831 n = template[i].tap[k];
832 if (k == template[i].np - 1 && enc)
833 n += authsize;
834 if (offset_in_page(q) + n < PAGE_SIZE)
835 q[n] = 0;
837 temp += template[i].tap[k];
840 ret = crypto_aead_setauthsize(tfm, authsize);
841 if (ret) {
842 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
843 d, authsize, j, algo);
844 goto out;
847 if (enc) {
848 if (WARN_ON(sg[template[i].anp + k - 1].offset +
849 sg[template[i].anp + k - 1].length +
850 authsize > PAGE_SIZE)) {
851 ret = -EINVAL;
852 goto out;
855 if (diff_dst)
856 sgout[template[i].anp + k - 1].length +=
857 authsize;
858 sg[template[i].anp + k - 1].length += authsize;
861 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
862 template[i].ilen,
863 iv);
865 aead_request_set_ad(req, template[i].alen);
867 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
869 switch (ret) {
870 case 0:
871 if (template[i].novrfy) {
872 /* verification was supposed to fail */
873 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
874 d, e, j, algo);
875 /* so really, we got a bad message */
876 ret = -EBADMSG;
877 goto out;
879 break;
880 case -EINPROGRESS:
881 case -EBUSY:
882 wait_for_completion(&result.completion);
883 reinit_completion(&result.completion);
884 ret = result.err;
885 if (!ret)
886 break;
887 case -EBADMSG:
888 if (template[i].novrfy)
889 /* verification failure was expected */
890 continue;
891 /* fall through */
892 default:
893 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
894 d, e, j, algo, -ret);
895 goto out;
898 ret = -EINVAL;
899 for (k = 0, temp = 0; k < template[i].np; k++) {
900 if (diff_dst)
901 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
902 offset_in_page(IDX[k]);
903 else
904 q = xbuf[IDX[k] >> PAGE_SHIFT] +
905 offset_in_page(IDX[k]);
907 n = template[i].tap[k];
908 if (k == template[i].np - 1)
909 n += enc ? authsize : -authsize;
911 if (memcmp(q, template[i].result + temp, n)) {
912 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
913 d, j, e, k, algo);
914 hexdump(q, n);
915 goto out;
918 q += n;
919 if (k == template[i].np - 1 && !enc) {
920 if (!diff_dst &&
921 memcmp(q, template[i].input +
922 temp + n, authsize))
923 n = authsize;
924 else
925 n = 0;
926 } else {
927 for (n = 0; offset_in_page(q + n) && q[n]; n++)
930 if (n) {
931 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
932 d, j, e, k, algo, n);
933 hexdump(q, n);
934 goto out;
937 temp += template[i].tap[k];
941 ret = 0;
943 out:
944 aead_request_free(req);
945 kfree(sg);
946 out_nosg:
947 if (diff_dst)
948 testmgr_free_buf(xoutbuf);
949 out_nooutbuf:
950 testmgr_free_buf(axbuf);
951 out_noaxbuf:
952 testmgr_free_buf(xbuf);
953 out_noxbuf:
954 kfree(key);
955 kfree(iv);
956 return ret;
959 static int test_aead(struct crypto_aead *tfm, int enc,
960 struct aead_testvec *template, unsigned int tcount)
962 unsigned int alignmask;
963 int ret;
965 /* test 'dst == src' case */
966 ret = __test_aead(tfm, enc, template, tcount, false, 0);
967 if (ret)
968 return ret;
970 /* test 'dst != src' case */
971 ret = __test_aead(tfm, enc, template, tcount, true, 0);
972 if (ret)
973 return ret;
975 /* test unaligned buffers, check with one byte offset */
976 ret = __test_aead(tfm, enc, template, tcount, true, 1);
977 if (ret)
978 return ret;
980 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
981 if (alignmask) {
982 /* Check if alignment mask for tfm is correctly set. */
983 ret = __test_aead(tfm, enc, template, tcount, true,
984 alignmask + 1);
985 if (ret)
986 return ret;
989 return 0;
992 static int test_cipher(struct crypto_cipher *tfm, int enc,
993 struct cipher_testvec *template, unsigned int tcount)
995 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
996 unsigned int i, j, k;
997 char *q;
998 const char *e;
999 void *data;
1000 char *xbuf[XBUFSIZE];
1001 int ret = -ENOMEM;
1003 if (testmgr_alloc_buf(xbuf))
1004 goto out_nobuf;
1006 if (enc == ENCRYPT)
1007 e = "encryption";
1008 else
1009 e = "decryption";
1011 j = 0;
1012 for (i = 0; i < tcount; i++) {
1013 if (template[i].np)
1014 continue;
1016 if (fips_enabled && template[i].fips_skip)
1017 continue;
1019 j++;
1021 ret = -EINVAL;
1022 if (WARN_ON(template[i].ilen > PAGE_SIZE))
1023 goto out;
1025 data = xbuf[0];
1026 memcpy(data, template[i].input, template[i].ilen);
1028 crypto_cipher_clear_flags(tfm, ~0);
1029 if (template[i].wk)
1030 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
1032 ret = crypto_cipher_setkey(tfm, template[i].key,
1033 template[i].klen);
1034 if (template[i].fail == !ret) {
1035 printk(KERN_ERR "alg: cipher: setkey failed "
1036 "on test %d for %s: flags=%x\n", j,
1037 algo, crypto_cipher_get_flags(tfm));
1038 goto out;
1039 } else if (ret)
1040 continue;
1042 for (k = 0; k < template[i].ilen;
1043 k += crypto_cipher_blocksize(tfm)) {
1044 if (enc)
1045 crypto_cipher_encrypt_one(tfm, data + k,
1046 data + k);
1047 else
1048 crypto_cipher_decrypt_one(tfm, data + k,
1049 data + k);
1052 q = data;
1053 if (memcmp(q, template[i].result, template[i].rlen)) {
1054 printk(KERN_ERR "alg: cipher: Test %d failed "
1055 "on %s for %s\n", j, e, algo);
1056 hexdump(q, template[i].rlen);
1057 ret = -EINVAL;
1058 goto out;
1062 ret = 0;
1064 out:
1065 testmgr_free_buf(xbuf);
1066 out_nobuf:
1067 return ret;
1070 static int __test_skcipher(struct crypto_skcipher *tfm, int enc,
1071 struct cipher_testvec *template, unsigned int tcount,
1072 const bool diff_dst, const int align_offset)
1074 const char *algo =
1075 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
1076 unsigned int i, j, k, n, temp;
1077 char *q;
1078 struct skcipher_request *req;
1079 struct scatterlist sg[8];
1080 struct scatterlist sgout[8];
1081 const char *e, *d;
1082 struct tcrypt_result result;
1083 void *data;
1084 char iv[MAX_IVLEN];
1085 char *xbuf[XBUFSIZE];
1086 char *xoutbuf[XBUFSIZE];
1087 int ret = -ENOMEM;
1088 unsigned int ivsize = crypto_skcipher_ivsize(tfm);
1090 if (testmgr_alloc_buf(xbuf))
1091 goto out_nobuf;
1093 if (diff_dst && testmgr_alloc_buf(xoutbuf))
1094 goto out_nooutbuf;
1096 if (diff_dst)
1097 d = "-ddst";
1098 else
1099 d = "";
1101 if (enc == ENCRYPT)
1102 e = "encryption";
1103 else
1104 e = "decryption";
1106 init_completion(&result.completion);
1108 req = skcipher_request_alloc(tfm, GFP_KERNEL);
1109 if (!req) {
1110 pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
1111 d, algo);
1112 goto out;
1115 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1116 tcrypt_complete, &result);
1118 j = 0;
1119 for (i = 0; i < tcount; i++) {
1120 if (template[i].np && !template[i].also_non_np)
1121 continue;
1123 if (fips_enabled && template[i].fips_skip)
1124 continue;
1126 if (template[i].iv)
1127 memcpy(iv, template[i].iv, ivsize);
1128 else
1129 memset(iv, 0, MAX_IVLEN);
1131 j++;
1132 ret = -EINVAL;
1133 if (WARN_ON(align_offset + template[i].ilen > PAGE_SIZE))
1134 goto out;
1136 data = xbuf[0];
1137 data += align_offset;
1138 memcpy(data, template[i].input, template[i].ilen);
1140 crypto_skcipher_clear_flags(tfm, ~0);
1141 if (template[i].wk)
1142 crypto_skcipher_set_flags(tfm,
1143 CRYPTO_TFM_REQ_WEAK_KEY);
1145 ret = crypto_skcipher_setkey(tfm, template[i].key,
1146 template[i].klen);
1147 if (template[i].fail == !ret) {
1148 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
1149 d, j, algo, crypto_skcipher_get_flags(tfm));
1150 goto out;
1151 } else if (ret)
1152 continue;
1154 sg_init_one(&sg[0], data, template[i].ilen);
1155 if (diff_dst) {
1156 data = xoutbuf[0];
1157 data += align_offset;
1158 sg_init_one(&sgout[0], data, template[i].ilen);
1161 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1162 template[i].ilen, iv);
1163 ret = enc ? crypto_skcipher_encrypt(req) :
1164 crypto_skcipher_decrypt(req);
1166 switch (ret) {
1167 case 0:
1168 break;
1169 case -EINPROGRESS:
1170 case -EBUSY:
1171 wait_for_completion(&result.completion);
1172 reinit_completion(&result.completion);
1173 ret = result.err;
1174 if (!ret)
1175 break;
1176 /* fall through */
1177 default:
1178 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
1179 d, e, j, algo, -ret);
1180 goto out;
1183 q = data;
1184 if (memcmp(q, template[i].result, template[i].rlen)) {
1185 pr_err("alg: skcipher%s: Test %d failed (invalid result) on %s for %s\n",
1186 d, j, e, algo);
1187 hexdump(q, template[i].rlen);
1188 ret = -EINVAL;
1189 goto out;
1192 if (template[i].iv_out &&
1193 memcmp(iv, template[i].iv_out,
1194 crypto_skcipher_ivsize(tfm))) {
1195 pr_err("alg: skcipher%s: Test %d failed (invalid output IV) on %s for %s\n",
1196 d, j, e, algo);
1197 hexdump(iv, crypto_skcipher_ivsize(tfm));
1198 ret = -EINVAL;
1199 goto out;
1203 j = 0;
1204 for (i = 0; i < tcount; i++) {
1205 /* alignment tests are only done with continuous buffers */
1206 if (align_offset != 0)
1207 break;
1209 if (!template[i].np)
1210 continue;
1212 if (fips_enabled && template[i].fips_skip)
1213 continue;
1215 if (template[i].iv)
1216 memcpy(iv, template[i].iv, ivsize);
1217 else
1218 memset(iv, 0, MAX_IVLEN);
1220 j++;
1221 crypto_skcipher_clear_flags(tfm, ~0);
1222 if (template[i].wk)
1223 crypto_skcipher_set_flags(tfm,
1224 CRYPTO_TFM_REQ_WEAK_KEY);
1226 ret = crypto_skcipher_setkey(tfm, template[i].key,
1227 template[i].klen);
1228 if (template[i].fail == !ret) {
1229 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
1230 d, j, algo, crypto_skcipher_get_flags(tfm));
1231 goto out;
1232 } else if (ret)
1233 continue;
1235 temp = 0;
1236 ret = -EINVAL;
1237 sg_init_table(sg, template[i].np);
1238 if (diff_dst)
1239 sg_init_table(sgout, template[i].np);
1240 for (k = 0; k < template[i].np; k++) {
1241 if (WARN_ON(offset_in_page(IDX[k]) +
1242 template[i].tap[k] > PAGE_SIZE))
1243 goto out;
1245 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
1247 memcpy(q, template[i].input + temp, template[i].tap[k]);
1249 if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE)
1250 q[template[i].tap[k]] = 0;
1252 sg_set_buf(&sg[k], q, template[i].tap[k]);
1253 if (diff_dst) {
1254 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1255 offset_in_page(IDX[k]);
1257 sg_set_buf(&sgout[k], q, template[i].tap[k]);
1259 memset(q, 0, template[i].tap[k]);
1260 if (offset_in_page(q) +
1261 template[i].tap[k] < PAGE_SIZE)
1262 q[template[i].tap[k]] = 0;
1265 temp += template[i].tap[k];
1268 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1269 template[i].ilen, iv);
1271 ret = enc ? crypto_skcipher_encrypt(req) :
1272 crypto_skcipher_decrypt(req);
1274 switch (ret) {
1275 case 0:
1276 break;
1277 case -EINPROGRESS:
1278 case -EBUSY:
1279 wait_for_completion(&result.completion);
1280 reinit_completion(&result.completion);
1281 ret = result.err;
1282 if (!ret)
1283 break;
1284 /* fall through */
1285 default:
1286 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1287 d, e, j, algo, -ret);
1288 goto out;
1291 temp = 0;
1292 ret = -EINVAL;
1293 for (k = 0; k < template[i].np; k++) {
1294 if (diff_dst)
1295 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1296 offset_in_page(IDX[k]);
1297 else
1298 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1299 offset_in_page(IDX[k]);
1301 if (memcmp(q, template[i].result + temp,
1302 template[i].tap[k])) {
1303 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1304 d, j, e, k, algo);
1305 hexdump(q, template[i].tap[k]);
1306 goto out;
1309 q += template[i].tap[k];
1310 for (n = 0; offset_in_page(q + n) && q[n]; n++)
1312 if (n) {
1313 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1314 d, j, e, k, algo, n);
1315 hexdump(q, n);
1316 goto out;
1318 temp += template[i].tap[k];
1322 ret = 0;
1324 out:
1325 skcipher_request_free(req);
1326 if (diff_dst)
1327 testmgr_free_buf(xoutbuf);
1328 out_nooutbuf:
1329 testmgr_free_buf(xbuf);
1330 out_nobuf:
1331 return ret;
1334 static int test_skcipher(struct crypto_skcipher *tfm, int enc,
1335 struct cipher_testvec *template, unsigned int tcount)
1337 unsigned int alignmask;
1338 int ret;
1340 /* test 'dst == src' case */
1341 ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
1342 if (ret)
1343 return ret;
1345 /* test 'dst != src' case */
1346 ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
1347 if (ret)
1348 return ret;
1350 /* test unaligned buffers, check with one byte offset */
1351 ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
1352 if (ret)
1353 return ret;
1355 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1356 if (alignmask) {
1357 /* Check if alignment mask for tfm is correctly set. */
1358 ret = __test_skcipher(tfm, enc, template, tcount, true,
1359 alignmask + 1);
1360 if (ret)
1361 return ret;
1364 return 0;
1367 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
1368 struct comp_testvec *dtemplate, int ctcount, int dtcount)
1370 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1371 unsigned int i;
1372 char result[COMP_BUF_SIZE];
1373 int ret;
1375 for (i = 0; i < ctcount; i++) {
1376 int ilen;
1377 unsigned int dlen = COMP_BUF_SIZE;
1379 memset(result, 0, sizeof (result));
1381 ilen = ctemplate[i].inlen;
1382 ret = crypto_comp_compress(tfm, ctemplate[i].input,
1383 ilen, result, &dlen);
1384 if (ret) {
1385 printk(KERN_ERR "alg: comp: compression failed "
1386 "on test %d for %s: ret=%d\n", i + 1, algo,
1387 -ret);
1388 goto out;
1391 if (dlen != ctemplate[i].outlen) {
1392 printk(KERN_ERR "alg: comp: Compression test %d "
1393 "failed for %s: output len = %d\n", i + 1, algo,
1394 dlen);
1395 ret = -EINVAL;
1396 goto out;
1399 if (memcmp(result, ctemplate[i].output, dlen)) {
1400 printk(KERN_ERR "alg: comp: Compression test %d "
1401 "failed for %s\n", i + 1, algo);
1402 hexdump(result, dlen);
1403 ret = -EINVAL;
1404 goto out;
1408 for (i = 0; i < dtcount; i++) {
1409 int ilen;
1410 unsigned int dlen = COMP_BUF_SIZE;
1412 memset(result, 0, sizeof (result));
1414 ilen = dtemplate[i].inlen;
1415 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1416 ilen, result, &dlen);
1417 if (ret) {
1418 printk(KERN_ERR "alg: comp: decompression failed "
1419 "on test %d for %s: ret=%d\n", i + 1, algo,
1420 -ret);
1421 goto out;
1424 if (dlen != dtemplate[i].outlen) {
1425 printk(KERN_ERR "alg: comp: Decompression test %d "
1426 "failed for %s: output len = %d\n", i + 1, algo,
1427 dlen);
1428 ret = -EINVAL;
1429 goto out;
1432 if (memcmp(result, dtemplate[i].output, dlen)) {
1433 printk(KERN_ERR "alg: comp: Decompression test %d "
1434 "failed for %s\n", i + 1, algo);
1435 hexdump(result, dlen);
1436 ret = -EINVAL;
1437 goto out;
1441 ret = 0;
1443 out:
1444 return ret;
1447 static int test_acomp(struct crypto_acomp *tfm, struct comp_testvec *ctemplate,
1448 struct comp_testvec *dtemplate, int ctcount, int dtcount)
1450 const char *algo = crypto_tfm_alg_driver_name(crypto_acomp_tfm(tfm));
1451 unsigned int i;
1452 char *output;
1453 int ret;
1454 struct scatterlist src, dst;
1455 struct acomp_req *req;
1456 struct tcrypt_result result;
1458 output = kmalloc(COMP_BUF_SIZE, GFP_KERNEL);
1459 if (!output)
1460 return -ENOMEM;
1462 for (i = 0; i < ctcount; i++) {
1463 unsigned int dlen = COMP_BUF_SIZE;
1464 int ilen = ctemplate[i].inlen;
1465 void *input_vec;
1467 input_vec = kmemdup(ctemplate[i].input, ilen, GFP_KERNEL);
1468 if (!input_vec) {
1469 ret = -ENOMEM;
1470 goto out;
1473 memset(output, 0, dlen);
1474 init_completion(&result.completion);
1475 sg_init_one(&src, input_vec, ilen);
1476 sg_init_one(&dst, output, dlen);
1478 req = acomp_request_alloc(tfm);
1479 if (!req) {
1480 pr_err("alg: acomp: request alloc failed for %s\n",
1481 algo);
1482 kfree(input_vec);
1483 ret = -ENOMEM;
1484 goto out;
1487 acomp_request_set_params(req, &src, &dst, ilen, dlen);
1488 acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1489 tcrypt_complete, &result);
1491 ret = wait_async_op(&result, crypto_acomp_compress(req));
1492 if (ret) {
1493 pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
1494 i + 1, algo, -ret);
1495 kfree(input_vec);
1496 acomp_request_free(req);
1497 goto out;
1500 if (req->dlen != ctemplate[i].outlen) {
1501 pr_err("alg: acomp: Compression test %d failed for %s: output len = %d\n",
1502 i + 1, algo, req->dlen);
1503 ret = -EINVAL;
1504 kfree(input_vec);
1505 acomp_request_free(req);
1506 goto out;
1509 if (memcmp(output, ctemplate[i].output, req->dlen)) {
1510 pr_err("alg: acomp: Compression test %d failed for %s\n",
1511 i + 1, algo);
1512 hexdump(output, req->dlen);
1513 ret = -EINVAL;
1514 kfree(input_vec);
1515 acomp_request_free(req);
1516 goto out;
1519 kfree(input_vec);
1520 acomp_request_free(req);
1523 for (i = 0; i < dtcount; i++) {
1524 unsigned int dlen = COMP_BUF_SIZE;
1525 int ilen = dtemplate[i].inlen;
1526 void *input_vec;
1528 input_vec = kmemdup(dtemplate[i].input, ilen, GFP_KERNEL);
1529 if (!input_vec) {
1530 ret = -ENOMEM;
1531 goto out;
1534 memset(output, 0, dlen);
1535 init_completion(&result.completion);
1536 sg_init_one(&src, input_vec, ilen);
1537 sg_init_one(&dst, output, dlen);
1539 req = acomp_request_alloc(tfm);
1540 if (!req) {
1541 pr_err("alg: acomp: request alloc failed for %s\n",
1542 algo);
1543 kfree(input_vec);
1544 ret = -ENOMEM;
1545 goto out;
1548 acomp_request_set_params(req, &src, &dst, ilen, dlen);
1549 acomp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1550 tcrypt_complete, &result);
1552 ret = wait_async_op(&result, crypto_acomp_decompress(req));
1553 if (ret) {
1554 pr_err("alg: acomp: decompression failed on test %d for %s: ret=%d\n",
1555 i + 1, algo, -ret);
1556 kfree(input_vec);
1557 acomp_request_free(req);
1558 goto out;
1561 if (req->dlen != dtemplate[i].outlen) {
1562 pr_err("alg: acomp: Decompression test %d failed for %s: output len = %d\n",
1563 i + 1, algo, req->dlen);
1564 ret = -EINVAL;
1565 kfree(input_vec);
1566 acomp_request_free(req);
1567 goto out;
1570 if (memcmp(output, dtemplate[i].output, req->dlen)) {
1571 pr_err("alg: acomp: Decompression test %d failed for %s\n",
1572 i + 1, algo);
1573 hexdump(output, req->dlen);
1574 ret = -EINVAL;
1575 kfree(input_vec);
1576 acomp_request_free(req);
1577 goto out;
1580 kfree(input_vec);
1581 acomp_request_free(req);
1584 ret = 0;
1586 out:
1587 kfree(output);
1588 return ret;
1591 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
1592 unsigned int tcount)
1594 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1595 int err = 0, i, j, seedsize;
1596 u8 *seed;
1597 char result[32];
1599 seedsize = crypto_rng_seedsize(tfm);
1601 seed = kmalloc(seedsize, GFP_KERNEL);
1602 if (!seed) {
1603 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1604 "for %s\n", algo);
1605 return -ENOMEM;
1608 for (i = 0; i < tcount; i++) {
1609 memset(result, 0, 32);
1611 memcpy(seed, template[i].v, template[i].vlen);
1612 memcpy(seed + template[i].vlen, template[i].key,
1613 template[i].klen);
1614 memcpy(seed + template[i].vlen + template[i].klen,
1615 template[i].dt, template[i].dtlen);
1617 err = crypto_rng_reset(tfm, seed, seedsize);
1618 if (err) {
1619 printk(KERN_ERR "alg: cprng: Failed to reset rng "
1620 "for %s\n", algo);
1621 goto out;
1624 for (j = 0; j < template[i].loops; j++) {
1625 err = crypto_rng_get_bytes(tfm, result,
1626 template[i].rlen);
1627 if (err < 0) {
1628 printk(KERN_ERR "alg: cprng: Failed to obtain "
1629 "the correct amount of random data for "
1630 "%s (requested %d)\n", algo,
1631 template[i].rlen);
1632 goto out;
1636 err = memcmp(result, template[i].result,
1637 template[i].rlen);
1638 if (err) {
1639 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1640 i, algo);
1641 hexdump(result, template[i].rlen);
1642 err = -EINVAL;
1643 goto out;
1647 out:
1648 kfree(seed);
1649 return err;
1652 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1653 u32 type, u32 mask)
1655 struct crypto_aead *tfm;
1656 int err = 0;
1658 tfm = crypto_alloc_aead(driver, type, mask);
1659 if (IS_ERR(tfm)) {
1660 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1661 "%ld\n", driver, PTR_ERR(tfm));
1662 return PTR_ERR(tfm);
1665 if (desc->suite.aead.enc.vecs) {
1666 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1667 desc->suite.aead.enc.count);
1668 if (err)
1669 goto out;
1672 if (!err && desc->suite.aead.dec.vecs)
1673 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1674 desc->suite.aead.dec.count);
1676 out:
1677 crypto_free_aead(tfm);
1678 return err;
1681 static int alg_test_cipher(const struct alg_test_desc *desc,
1682 const char *driver, u32 type, u32 mask)
1684 struct crypto_cipher *tfm;
1685 int err = 0;
1687 tfm = crypto_alloc_cipher(driver, type, mask);
1688 if (IS_ERR(tfm)) {
1689 printk(KERN_ERR "alg: cipher: Failed to load transform for "
1690 "%s: %ld\n", driver, PTR_ERR(tfm));
1691 return PTR_ERR(tfm);
1694 if (desc->suite.cipher.enc.vecs) {
1695 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1696 desc->suite.cipher.enc.count);
1697 if (err)
1698 goto out;
1701 if (desc->suite.cipher.dec.vecs)
1702 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1703 desc->suite.cipher.dec.count);
1705 out:
1706 crypto_free_cipher(tfm);
1707 return err;
1710 static int alg_test_skcipher(const struct alg_test_desc *desc,
1711 const char *driver, u32 type, u32 mask)
1713 struct crypto_skcipher *tfm;
1714 int err = 0;
1716 tfm = crypto_alloc_skcipher(driver, type, mask);
1717 if (IS_ERR(tfm)) {
1718 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1719 "%s: %ld\n", driver, PTR_ERR(tfm));
1720 return PTR_ERR(tfm);
1723 if (desc->suite.cipher.enc.vecs) {
1724 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1725 desc->suite.cipher.enc.count);
1726 if (err)
1727 goto out;
1730 if (desc->suite.cipher.dec.vecs)
1731 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1732 desc->suite.cipher.dec.count);
1734 out:
1735 crypto_free_skcipher(tfm);
1736 return err;
1739 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1740 u32 type, u32 mask)
1742 struct crypto_comp *comp;
1743 struct crypto_acomp *acomp;
1744 int err;
1745 u32 algo_type = type & CRYPTO_ALG_TYPE_ACOMPRESS_MASK;
1747 if (algo_type == CRYPTO_ALG_TYPE_ACOMPRESS) {
1748 acomp = crypto_alloc_acomp(driver, type, mask);
1749 if (IS_ERR(acomp)) {
1750 pr_err("alg: acomp: Failed to load transform for %s: %ld\n",
1751 driver, PTR_ERR(acomp));
1752 return PTR_ERR(acomp);
1754 err = test_acomp(acomp, desc->suite.comp.comp.vecs,
1755 desc->suite.comp.decomp.vecs,
1756 desc->suite.comp.comp.count,
1757 desc->suite.comp.decomp.count);
1758 crypto_free_acomp(acomp);
1759 } else {
1760 comp = crypto_alloc_comp(driver, type, mask);
1761 if (IS_ERR(comp)) {
1762 pr_err("alg: comp: Failed to load transform for %s: %ld\n",
1763 driver, PTR_ERR(comp));
1764 return PTR_ERR(comp);
1767 err = test_comp(comp, desc->suite.comp.comp.vecs,
1768 desc->suite.comp.decomp.vecs,
1769 desc->suite.comp.comp.count,
1770 desc->suite.comp.decomp.count);
1772 crypto_free_comp(comp);
1774 return err;
1777 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1778 u32 type, u32 mask)
1780 struct crypto_ahash *tfm;
1781 int err;
1783 tfm = crypto_alloc_ahash(driver, type, mask);
1784 if (IS_ERR(tfm)) {
1785 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1786 "%ld\n", driver, PTR_ERR(tfm));
1787 return PTR_ERR(tfm);
1790 err = test_hash(tfm, desc->suite.hash.vecs,
1791 desc->suite.hash.count, true);
1792 if (!err)
1793 err = test_hash(tfm, desc->suite.hash.vecs,
1794 desc->suite.hash.count, false);
1796 crypto_free_ahash(tfm);
1797 return err;
1800 static int alg_test_crc32c(const struct alg_test_desc *desc,
1801 const char *driver, u32 type, u32 mask)
1803 struct crypto_shash *tfm;
1804 u32 val;
1805 int err;
1807 err = alg_test_hash(desc, driver, type, mask);
1808 if (err)
1809 goto out;
1811 tfm = crypto_alloc_shash(driver, type, mask);
1812 if (IS_ERR(tfm)) {
1813 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1814 "%ld\n", driver, PTR_ERR(tfm));
1815 err = PTR_ERR(tfm);
1816 goto out;
1819 do {
1820 SHASH_DESC_ON_STACK(shash, tfm);
1821 u32 *ctx = (u32 *)shash_desc_ctx(shash);
1823 shash->tfm = tfm;
1824 shash->flags = 0;
1826 *ctx = le32_to_cpu(420553207);
1827 err = crypto_shash_final(shash, (u8 *)&val);
1828 if (err) {
1829 printk(KERN_ERR "alg: crc32c: Operation failed for "
1830 "%s: %d\n", driver, err);
1831 break;
1834 if (val != ~420553207) {
1835 printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1836 "%d\n", driver, val);
1837 err = -EINVAL;
1839 } while (0);
1841 crypto_free_shash(tfm);
1843 out:
1844 return err;
1847 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1848 u32 type, u32 mask)
1850 struct crypto_rng *rng;
1851 int err;
1853 rng = crypto_alloc_rng(driver, type, mask);
1854 if (IS_ERR(rng)) {
1855 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1856 "%ld\n", driver, PTR_ERR(rng));
1857 return PTR_ERR(rng);
1860 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1862 crypto_free_rng(rng);
1864 return err;
1868 static int drbg_cavs_test(struct drbg_testvec *test, int pr,
1869 const char *driver, u32 type, u32 mask)
1871 int ret = -EAGAIN;
1872 struct crypto_rng *drng;
1873 struct drbg_test_data test_data;
1874 struct drbg_string addtl, pers, testentropy;
1875 unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL);
1877 if (!buf)
1878 return -ENOMEM;
1880 drng = crypto_alloc_rng(driver, type, mask);
1881 if (IS_ERR(drng)) {
1882 printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for "
1883 "%s\n", driver);
1884 kzfree(buf);
1885 return -ENOMEM;
1888 test_data.testentropy = &testentropy;
1889 drbg_string_fill(&testentropy, test->entropy, test->entropylen);
1890 drbg_string_fill(&pers, test->pers, test->perslen);
1891 ret = crypto_drbg_reset_test(drng, &pers, &test_data);
1892 if (ret) {
1893 printk(KERN_ERR "alg: drbg: Failed to reset rng\n");
1894 goto outbuf;
1897 drbg_string_fill(&addtl, test->addtla, test->addtllen);
1898 if (pr) {
1899 drbg_string_fill(&testentropy, test->entpra, test->entprlen);
1900 ret = crypto_drbg_get_bytes_addtl_test(drng,
1901 buf, test->expectedlen, &addtl, &test_data);
1902 } else {
1903 ret = crypto_drbg_get_bytes_addtl(drng,
1904 buf, test->expectedlen, &addtl);
1906 if (ret < 0) {
1907 printk(KERN_ERR "alg: drbg: could not obtain random data for "
1908 "driver %s\n", driver);
1909 goto outbuf;
1912 drbg_string_fill(&addtl, test->addtlb, test->addtllen);
1913 if (pr) {
1914 drbg_string_fill(&testentropy, test->entprb, test->entprlen);
1915 ret = crypto_drbg_get_bytes_addtl_test(drng,
1916 buf, test->expectedlen, &addtl, &test_data);
1917 } else {
1918 ret = crypto_drbg_get_bytes_addtl(drng,
1919 buf, test->expectedlen, &addtl);
1921 if (ret < 0) {
1922 printk(KERN_ERR "alg: drbg: could not obtain random data for "
1923 "driver %s\n", driver);
1924 goto outbuf;
1927 ret = memcmp(test->expected, buf, test->expectedlen);
1929 outbuf:
1930 crypto_free_rng(drng);
1931 kzfree(buf);
1932 return ret;
1936 static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver,
1937 u32 type, u32 mask)
1939 int err = 0;
1940 int pr = 0;
1941 int i = 0;
1942 struct drbg_testvec *template = desc->suite.drbg.vecs;
1943 unsigned int tcount = desc->suite.drbg.count;
1945 if (0 == memcmp(driver, "drbg_pr_", 8))
1946 pr = 1;
1948 for (i = 0; i < tcount; i++) {
1949 err = drbg_cavs_test(&template[i], pr, driver, type, mask);
1950 if (err) {
1951 printk(KERN_ERR "alg: drbg: Test %d failed for %s\n",
1952 i, driver);
1953 err = -EINVAL;
1954 break;
1957 return err;
1961 static int do_test_kpp(struct crypto_kpp *tfm, struct kpp_testvec *vec,
1962 const char *alg)
1964 struct kpp_request *req;
1965 void *input_buf = NULL;
1966 void *output_buf = NULL;
1967 struct tcrypt_result result;
1968 unsigned int out_len_max;
1969 int err = -ENOMEM;
1970 struct scatterlist src, dst;
1972 req = kpp_request_alloc(tfm, GFP_KERNEL);
1973 if (!req)
1974 return err;
1976 init_completion(&result.completion);
1978 err = crypto_kpp_set_secret(tfm, vec->secret, vec->secret_size);
1979 if (err < 0)
1980 goto free_req;
1982 out_len_max = crypto_kpp_maxsize(tfm);
1983 output_buf = kzalloc(out_len_max, GFP_KERNEL);
1984 if (!output_buf) {
1985 err = -ENOMEM;
1986 goto free_req;
1989 /* Use appropriate parameter as base */
1990 kpp_request_set_input(req, NULL, 0);
1991 sg_init_one(&dst, output_buf, out_len_max);
1992 kpp_request_set_output(req, &dst, out_len_max);
1993 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1994 tcrypt_complete, &result);
1996 /* Compute public key */
1997 err = wait_async_op(&result, crypto_kpp_generate_public_key(req));
1998 if (err) {
1999 pr_err("alg: %s: generate public key test failed. err %d\n",
2000 alg, err);
2001 goto free_output;
2003 /* Verify calculated public key */
2004 if (memcmp(vec->expected_a_public, sg_virt(req->dst),
2005 vec->expected_a_public_size)) {
2006 pr_err("alg: %s: generate public key test failed. Invalid output\n",
2007 alg);
2008 err = -EINVAL;
2009 goto free_output;
2012 /* Calculate shared secret key by using counter part (b) public key. */
2013 input_buf = kzalloc(vec->b_public_size, GFP_KERNEL);
2014 if (!input_buf) {
2015 err = -ENOMEM;
2016 goto free_output;
2019 memcpy(input_buf, vec->b_public, vec->b_public_size);
2020 sg_init_one(&src, input_buf, vec->b_public_size);
2021 sg_init_one(&dst, output_buf, out_len_max);
2022 kpp_request_set_input(req, &src, vec->b_public_size);
2023 kpp_request_set_output(req, &dst, out_len_max);
2024 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2025 tcrypt_complete, &result);
2026 err = wait_async_op(&result, crypto_kpp_compute_shared_secret(req));
2027 if (err) {
2028 pr_err("alg: %s: compute shard secret test failed. err %d\n",
2029 alg, err);
2030 goto free_all;
2033 * verify shared secret from which the user will derive
2034 * secret key by executing whatever hash it has chosen
2036 if (memcmp(vec->expected_ss, sg_virt(req->dst),
2037 vec->expected_ss_size)) {
2038 pr_err("alg: %s: compute shared secret test failed. Invalid output\n",
2039 alg);
2040 err = -EINVAL;
2043 free_all:
2044 kfree(input_buf);
2045 free_output:
2046 kfree(output_buf);
2047 free_req:
2048 kpp_request_free(req);
2049 return err;
2052 static int test_kpp(struct crypto_kpp *tfm, const char *alg,
2053 struct kpp_testvec *vecs, unsigned int tcount)
2055 int ret, i;
2057 for (i = 0; i < tcount; i++) {
2058 ret = do_test_kpp(tfm, vecs++, alg);
2059 if (ret) {
2060 pr_err("alg: %s: test failed on vector %d, err=%d\n",
2061 alg, i + 1, ret);
2062 return ret;
2065 return 0;
2068 static int alg_test_kpp(const struct alg_test_desc *desc, const char *driver,
2069 u32 type, u32 mask)
2071 struct crypto_kpp *tfm;
2072 int err = 0;
2074 tfm = crypto_alloc_kpp(driver, type, mask);
2075 if (IS_ERR(tfm)) {
2076 pr_err("alg: kpp: Failed to load tfm for %s: %ld\n",
2077 driver, PTR_ERR(tfm));
2078 return PTR_ERR(tfm);
2080 if (desc->suite.kpp.vecs)
2081 err = test_kpp(tfm, desc->alg, desc->suite.kpp.vecs,
2082 desc->suite.kpp.count);
2084 crypto_free_kpp(tfm);
2085 return err;
2088 static int test_akcipher_one(struct crypto_akcipher *tfm,
2089 struct akcipher_testvec *vecs)
2091 char *xbuf[XBUFSIZE];
2092 struct akcipher_request *req;
2093 void *outbuf_enc = NULL;
2094 void *outbuf_dec = NULL;
2095 struct tcrypt_result result;
2096 unsigned int out_len_max, out_len = 0;
2097 int err = -ENOMEM;
2098 struct scatterlist src, dst, src_tab[2];
2100 if (testmgr_alloc_buf(xbuf))
2101 return err;
2103 req = akcipher_request_alloc(tfm, GFP_KERNEL);
2104 if (!req)
2105 goto free_xbuf;
2107 init_completion(&result.completion);
2109 if (vecs->public_key_vec)
2110 err = crypto_akcipher_set_pub_key(tfm, vecs->key,
2111 vecs->key_len);
2112 else
2113 err = crypto_akcipher_set_priv_key(tfm, vecs->key,
2114 vecs->key_len);
2115 if (err)
2116 goto free_req;
2118 err = -ENOMEM;
2119 out_len_max = crypto_akcipher_maxsize(tfm);
2120 outbuf_enc = kzalloc(out_len_max, GFP_KERNEL);
2121 if (!outbuf_enc)
2122 goto free_req;
2124 if (WARN_ON(vecs->m_size > PAGE_SIZE))
2125 goto free_all;
2127 memcpy(xbuf[0], vecs->m, vecs->m_size);
2129 sg_init_table(src_tab, 2);
2130 sg_set_buf(&src_tab[0], xbuf[0], 8);
2131 sg_set_buf(&src_tab[1], xbuf[0] + 8, vecs->m_size - 8);
2132 sg_init_one(&dst, outbuf_enc, out_len_max);
2133 akcipher_request_set_crypt(req, src_tab, &dst, vecs->m_size,
2134 out_len_max);
2135 akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2136 tcrypt_complete, &result);
2138 /* Run RSA encrypt - c = m^e mod n;*/
2139 err = wait_async_op(&result, crypto_akcipher_encrypt(req));
2140 if (err) {
2141 pr_err("alg: akcipher: encrypt test failed. err %d\n", err);
2142 goto free_all;
2144 if (req->dst_len != vecs->c_size) {
2145 pr_err("alg: akcipher: encrypt test failed. Invalid output len\n");
2146 err = -EINVAL;
2147 goto free_all;
2149 /* verify that encrypted message is equal to expected */
2150 if (memcmp(vecs->c, outbuf_enc, vecs->c_size)) {
2151 pr_err("alg: akcipher: encrypt test failed. Invalid output\n");
2152 hexdump(outbuf_enc, vecs->c_size);
2153 err = -EINVAL;
2154 goto free_all;
2156 /* Don't invoke decrypt for vectors with public key */
2157 if (vecs->public_key_vec) {
2158 err = 0;
2159 goto free_all;
2161 outbuf_dec = kzalloc(out_len_max, GFP_KERNEL);
2162 if (!outbuf_dec) {
2163 err = -ENOMEM;
2164 goto free_all;
2167 if (WARN_ON(vecs->c_size > PAGE_SIZE))
2168 goto free_all;
2170 memcpy(xbuf[0], vecs->c, vecs->c_size);
2172 sg_init_one(&src, xbuf[0], vecs->c_size);
2173 sg_init_one(&dst, outbuf_dec, out_len_max);
2174 init_completion(&result.completion);
2175 akcipher_request_set_crypt(req, &src, &dst, vecs->c_size, out_len_max);
2177 /* Run RSA decrypt - m = c^d mod n;*/
2178 err = wait_async_op(&result, crypto_akcipher_decrypt(req));
2179 if (err) {
2180 pr_err("alg: akcipher: decrypt test failed. err %d\n", err);
2181 goto free_all;
2183 out_len = req->dst_len;
2184 if (out_len < vecs->m_size) {
2185 pr_err("alg: akcipher: decrypt test failed. "
2186 "Invalid output len %u\n", out_len);
2187 err = -EINVAL;
2188 goto free_all;
2190 /* verify that decrypted message is equal to the original msg */
2191 if (memchr_inv(outbuf_dec, 0, out_len - vecs->m_size) ||
2192 memcmp(vecs->m, outbuf_dec + out_len - vecs->m_size,
2193 vecs->m_size)) {
2194 pr_err("alg: akcipher: decrypt test failed. Invalid output\n");
2195 hexdump(outbuf_dec, out_len);
2196 err = -EINVAL;
2198 free_all:
2199 kfree(outbuf_dec);
2200 kfree(outbuf_enc);
2201 free_req:
2202 akcipher_request_free(req);
2203 free_xbuf:
2204 testmgr_free_buf(xbuf);
2205 return err;
2208 static int test_akcipher(struct crypto_akcipher *tfm, const char *alg,
2209 struct akcipher_testvec *vecs, unsigned int tcount)
2211 const char *algo =
2212 crypto_tfm_alg_driver_name(crypto_akcipher_tfm(tfm));
2213 int ret, i;
2215 for (i = 0; i < tcount; i++) {
2216 ret = test_akcipher_one(tfm, vecs++);
2217 if (!ret)
2218 continue;
2220 pr_err("alg: akcipher: test %d failed for %s, err=%d\n",
2221 i + 1, algo, ret);
2222 return ret;
2224 return 0;
2227 static int alg_test_akcipher(const struct alg_test_desc *desc,
2228 const char *driver, u32 type, u32 mask)
2230 struct crypto_akcipher *tfm;
2231 int err = 0;
2233 tfm = crypto_alloc_akcipher(driver, type, mask);
2234 if (IS_ERR(tfm)) {
2235 pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n",
2236 driver, PTR_ERR(tfm));
2237 return PTR_ERR(tfm);
2239 if (desc->suite.akcipher.vecs)
2240 err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs,
2241 desc->suite.akcipher.count);
2243 crypto_free_akcipher(tfm);
2244 return err;
2247 static int alg_test_null(const struct alg_test_desc *desc,
2248 const char *driver, u32 type, u32 mask)
2250 return 0;
2253 #define __VECS(tv) { .vecs = tv, .count = ARRAY_SIZE(tv) }
2255 /* Please keep this list sorted by algorithm name. */
2256 static const struct alg_test_desc alg_test_descs[] = {
2258 .alg = "ansi_cprng",
2259 .test = alg_test_cprng,
2260 .suite = {
2261 .cprng = __VECS(ansi_cprng_aes_tv_template)
2263 }, {
2264 .alg = "authenc(hmac(md5),ecb(cipher_null))",
2265 .test = alg_test_aead,
2266 .suite = {
2267 .aead = {
2268 .enc = __VECS(hmac_md5_ecb_cipher_null_enc_tv_template),
2269 .dec = __VECS(hmac_md5_ecb_cipher_null_dec_tv_template)
2272 }, {
2273 .alg = "authenc(hmac(sha1),cbc(aes))",
2274 .test = alg_test_aead,
2275 .suite = {
2276 .aead = {
2277 .enc = __VECS(hmac_sha1_aes_cbc_enc_tv_temp)
2280 }, {
2281 .alg = "authenc(hmac(sha1),cbc(des))",
2282 .test = alg_test_aead,
2283 .suite = {
2284 .aead = {
2285 .enc = __VECS(hmac_sha1_des_cbc_enc_tv_temp)
2288 }, {
2289 .alg = "authenc(hmac(sha1),cbc(des3_ede))",
2290 .test = alg_test_aead,
2291 .fips_allowed = 1,
2292 .suite = {
2293 .aead = {
2294 .enc = __VECS(hmac_sha1_des3_ede_cbc_enc_tv_temp)
2297 }, {
2298 .alg = "authenc(hmac(sha1),ctr(aes))",
2299 .test = alg_test_null,
2300 .fips_allowed = 1,
2301 }, {
2302 .alg = "authenc(hmac(sha1),ecb(cipher_null))",
2303 .test = alg_test_aead,
2304 .suite = {
2305 .aead = {
2306 .enc = __VECS(hmac_sha1_ecb_cipher_null_enc_tv_temp),
2307 .dec = __VECS(hmac_sha1_ecb_cipher_null_dec_tv_temp)
2310 }, {
2311 .alg = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2312 .test = alg_test_null,
2313 .fips_allowed = 1,
2314 }, {
2315 .alg = "authenc(hmac(sha224),cbc(des))",
2316 .test = alg_test_aead,
2317 .suite = {
2318 .aead = {
2319 .enc = __VECS(hmac_sha224_des_cbc_enc_tv_temp)
2322 }, {
2323 .alg = "authenc(hmac(sha224),cbc(des3_ede))",
2324 .test = alg_test_aead,
2325 .fips_allowed = 1,
2326 .suite = {
2327 .aead = {
2328 .enc = __VECS(hmac_sha224_des3_ede_cbc_enc_tv_temp)
2331 }, {
2332 .alg = "authenc(hmac(sha256),cbc(aes))",
2333 .test = alg_test_aead,
2334 .fips_allowed = 1,
2335 .suite = {
2336 .aead = {
2337 .enc = __VECS(hmac_sha256_aes_cbc_enc_tv_temp)
2340 }, {
2341 .alg = "authenc(hmac(sha256),cbc(des))",
2342 .test = alg_test_aead,
2343 .suite = {
2344 .aead = {
2345 .enc = __VECS(hmac_sha256_des_cbc_enc_tv_temp)
2348 }, {
2349 .alg = "authenc(hmac(sha256),cbc(des3_ede))",
2350 .test = alg_test_aead,
2351 .fips_allowed = 1,
2352 .suite = {
2353 .aead = {
2354 .enc = __VECS(hmac_sha256_des3_ede_cbc_enc_tv_temp)
2357 }, {
2358 .alg = "authenc(hmac(sha256),ctr(aes))",
2359 .test = alg_test_null,
2360 .fips_allowed = 1,
2361 }, {
2362 .alg = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2363 .test = alg_test_null,
2364 .fips_allowed = 1,
2365 }, {
2366 .alg = "authenc(hmac(sha384),cbc(des))",
2367 .test = alg_test_aead,
2368 .suite = {
2369 .aead = {
2370 .enc = __VECS(hmac_sha384_des_cbc_enc_tv_temp)
2373 }, {
2374 .alg = "authenc(hmac(sha384),cbc(des3_ede))",
2375 .test = alg_test_aead,
2376 .fips_allowed = 1,
2377 .suite = {
2378 .aead = {
2379 .enc = __VECS(hmac_sha384_des3_ede_cbc_enc_tv_temp)
2382 }, {
2383 .alg = "authenc(hmac(sha384),ctr(aes))",
2384 .test = alg_test_null,
2385 .fips_allowed = 1,
2386 }, {
2387 .alg = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2388 .test = alg_test_null,
2389 .fips_allowed = 1,
2390 }, {
2391 .alg = "authenc(hmac(sha512),cbc(aes))",
2392 .fips_allowed = 1,
2393 .test = alg_test_aead,
2394 .suite = {
2395 .aead = {
2396 .enc = __VECS(hmac_sha512_aes_cbc_enc_tv_temp)
2399 }, {
2400 .alg = "authenc(hmac(sha512),cbc(des))",
2401 .test = alg_test_aead,
2402 .suite = {
2403 .aead = {
2404 .enc = __VECS(hmac_sha512_des_cbc_enc_tv_temp)
2407 }, {
2408 .alg = "authenc(hmac(sha512),cbc(des3_ede))",
2409 .test = alg_test_aead,
2410 .fips_allowed = 1,
2411 .suite = {
2412 .aead = {
2413 .enc = __VECS(hmac_sha512_des3_ede_cbc_enc_tv_temp)
2416 }, {
2417 .alg = "authenc(hmac(sha512),ctr(aes))",
2418 .test = alg_test_null,
2419 .fips_allowed = 1,
2420 }, {
2421 .alg = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2422 .test = alg_test_null,
2423 .fips_allowed = 1,
2424 }, {
2425 .alg = "cbc(aes)",
2426 .test = alg_test_skcipher,
2427 .fips_allowed = 1,
2428 .suite = {
2429 .cipher = {
2430 .enc = __VECS(aes_cbc_enc_tv_template),
2431 .dec = __VECS(aes_cbc_dec_tv_template)
2434 }, {
2435 .alg = "cbc(anubis)",
2436 .test = alg_test_skcipher,
2437 .suite = {
2438 .cipher = {
2439 .enc = __VECS(anubis_cbc_enc_tv_template),
2440 .dec = __VECS(anubis_cbc_dec_tv_template)
2443 }, {
2444 .alg = "cbc(blowfish)",
2445 .test = alg_test_skcipher,
2446 .suite = {
2447 .cipher = {
2448 .enc = __VECS(bf_cbc_enc_tv_template),
2449 .dec = __VECS(bf_cbc_dec_tv_template)
2452 }, {
2453 .alg = "cbc(camellia)",
2454 .test = alg_test_skcipher,
2455 .suite = {
2456 .cipher = {
2457 .enc = __VECS(camellia_cbc_enc_tv_template),
2458 .dec = __VECS(camellia_cbc_dec_tv_template)
2461 }, {
2462 .alg = "cbc(cast5)",
2463 .test = alg_test_skcipher,
2464 .suite = {
2465 .cipher = {
2466 .enc = __VECS(cast5_cbc_enc_tv_template),
2467 .dec = __VECS(cast5_cbc_dec_tv_template)
2470 }, {
2471 .alg = "cbc(cast6)",
2472 .test = alg_test_skcipher,
2473 .suite = {
2474 .cipher = {
2475 .enc = __VECS(cast6_cbc_enc_tv_template),
2476 .dec = __VECS(cast6_cbc_dec_tv_template)
2479 }, {
2480 .alg = "cbc(des)",
2481 .test = alg_test_skcipher,
2482 .suite = {
2483 .cipher = {
2484 .enc = __VECS(des_cbc_enc_tv_template),
2485 .dec = __VECS(des_cbc_dec_tv_template)
2488 }, {
2489 .alg = "cbc(des3_ede)",
2490 .test = alg_test_skcipher,
2491 .fips_allowed = 1,
2492 .suite = {
2493 .cipher = {
2494 .enc = __VECS(des3_ede_cbc_enc_tv_template),
2495 .dec = __VECS(des3_ede_cbc_dec_tv_template)
2498 }, {
2499 .alg = "cbc(serpent)",
2500 .test = alg_test_skcipher,
2501 .suite = {
2502 .cipher = {
2503 .enc = __VECS(serpent_cbc_enc_tv_template),
2504 .dec = __VECS(serpent_cbc_dec_tv_template)
2507 }, {
2508 .alg = "cbc(twofish)",
2509 .test = alg_test_skcipher,
2510 .suite = {
2511 .cipher = {
2512 .enc = __VECS(tf_cbc_enc_tv_template),
2513 .dec = __VECS(tf_cbc_dec_tv_template)
2516 }, {
2517 .alg = "cbcmac(aes)",
2518 .fips_allowed = 1,
2519 .test = alg_test_hash,
2520 .suite = {
2521 .hash = __VECS(aes_cbcmac_tv_template)
2523 }, {
2524 .alg = "ccm(aes)",
2525 .test = alg_test_aead,
2526 .fips_allowed = 1,
2527 .suite = {
2528 .aead = {
2529 .enc = __VECS(aes_ccm_enc_tv_template),
2530 .dec = __VECS(aes_ccm_dec_tv_template)
2533 }, {
2534 .alg = "chacha20",
2535 .test = alg_test_skcipher,
2536 .suite = {
2537 .cipher = {
2538 .enc = __VECS(chacha20_enc_tv_template),
2539 .dec = __VECS(chacha20_enc_tv_template),
2542 }, {
2543 .alg = "cmac(aes)",
2544 .fips_allowed = 1,
2545 .test = alg_test_hash,
2546 .suite = {
2547 .hash = __VECS(aes_cmac128_tv_template)
2549 }, {
2550 .alg = "cmac(des3_ede)",
2551 .fips_allowed = 1,
2552 .test = alg_test_hash,
2553 .suite = {
2554 .hash = __VECS(des3_ede_cmac64_tv_template)
2556 }, {
2557 .alg = "compress_null",
2558 .test = alg_test_null,
2559 }, {
2560 .alg = "crc32",
2561 .test = alg_test_hash,
2562 .suite = {
2563 .hash = __VECS(crc32_tv_template)
2565 }, {
2566 .alg = "crc32c",
2567 .test = alg_test_crc32c,
2568 .fips_allowed = 1,
2569 .suite = {
2570 .hash = __VECS(crc32c_tv_template)
2572 }, {
2573 .alg = "crct10dif",
2574 .test = alg_test_hash,
2575 .fips_allowed = 1,
2576 .suite = {
2577 .hash = __VECS(crct10dif_tv_template)
2579 }, {
2580 .alg = "ctr(aes)",
2581 .test = alg_test_skcipher,
2582 .fips_allowed = 1,
2583 .suite = {
2584 .cipher = {
2585 .enc = __VECS(aes_ctr_enc_tv_template),
2586 .dec = __VECS(aes_ctr_dec_tv_template)
2589 }, {
2590 .alg = "ctr(blowfish)",
2591 .test = alg_test_skcipher,
2592 .suite = {
2593 .cipher = {
2594 .enc = __VECS(bf_ctr_enc_tv_template),
2595 .dec = __VECS(bf_ctr_dec_tv_template)
2598 }, {
2599 .alg = "ctr(camellia)",
2600 .test = alg_test_skcipher,
2601 .suite = {
2602 .cipher = {
2603 .enc = __VECS(camellia_ctr_enc_tv_template),
2604 .dec = __VECS(camellia_ctr_dec_tv_template)
2607 }, {
2608 .alg = "ctr(cast5)",
2609 .test = alg_test_skcipher,
2610 .suite = {
2611 .cipher = {
2612 .enc = __VECS(cast5_ctr_enc_tv_template),
2613 .dec = __VECS(cast5_ctr_dec_tv_template)
2616 }, {
2617 .alg = "ctr(cast6)",
2618 .test = alg_test_skcipher,
2619 .suite = {
2620 .cipher = {
2621 .enc = __VECS(cast6_ctr_enc_tv_template),
2622 .dec = __VECS(cast6_ctr_dec_tv_template)
2625 }, {
2626 .alg = "ctr(des)",
2627 .test = alg_test_skcipher,
2628 .suite = {
2629 .cipher = {
2630 .enc = __VECS(des_ctr_enc_tv_template),
2631 .dec = __VECS(des_ctr_dec_tv_template)
2634 }, {
2635 .alg = "ctr(des3_ede)",
2636 .test = alg_test_skcipher,
2637 .suite = {
2638 .cipher = {
2639 .enc = __VECS(des3_ede_ctr_enc_tv_template),
2640 .dec = __VECS(des3_ede_ctr_dec_tv_template)
2643 }, {
2644 .alg = "ctr(serpent)",
2645 .test = alg_test_skcipher,
2646 .suite = {
2647 .cipher = {
2648 .enc = __VECS(serpent_ctr_enc_tv_template),
2649 .dec = __VECS(serpent_ctr_dec_tv_template)
2652 }, {
2653 .alg = "ctr(twofish)",
2654 .test = alg_test_skcipher,
2655 .suite = {
2656 .cipher = {
2657 .enc = __VECS(tf_ctr_enc_tv_template),
2658 .dec = __VECS(tf_ctr_dec_tv_template)
2661 }, {
2662 .alg = "cts(cbc(aes))",
2663 .test = alg_test_skcipher,
2664 .suite = {
2665 .cipher = {
2666 .enc = __VECS(cts_mode_enc_tv_template),
2667 .dec = __VECS(cts_mode_dec_tv_template)
2670 }, {
2671 .alg = "deflate",
2672 .test = alg_test_comp,
2673 .fips_allowed = 1,
2674 .suite = {
2675 .comp = {
2676 .comp = __VECS(deflate_comp_tv_template),
2677 .decomp = __VECS(deflate_decomp_tv_template)
2680 }, {
2681 .alg = "dh",
2682 .test = alg_test_kpp,
2683 .fips_allowed = 1,
2684 .suite = {
2685 .kpp = __VECS(dh_tv_template)
2687 }, {
2688 .alg = "digest_null",
2689 .test = alg_test_null,
2690 }, {
2691 .alg = "drbg_nopr_ctr_aes128",
2692 .test = alg_test_drbg,
2693 .fips_allowed = 1,
2694 .suite = {
2695 .drbg = __VECS(drbg_nopr_ctr_aes128_tv_template)
2697 }, {
2698 .alg = "drbg_nopr_ctr_aes192",
2699 .test = alg_test_drbg,
2700 .fips_allowed = 1,
2701 .suite = {
2702 .drbg = __VECS(drbg_nopr_ctr_aes192_tv_template)
2704 }, {
2705 .alg = "drbg_nopr_ctr_aes256",
2706 .test = alg_test_drbg,
2707 .fips_allowed = 1,
2708 .suite = {
2709 .drbg = __VECS(drbg_nopr_ctr_aes256_tv_template)
2711 }, {
2713 * There is no need to specifically test the DRBG with every
2714 * backend cipher -- covered by drbg_nopr_hmac_sha256 test
2716 .alg = "drbg_nopr_hmac_sha1",
2717 .fips_allowed = 1,
2718 .test = alg_test_null,
2719 }, {
2720 .alg = "drbg_nopr_hmac_sha256",
2721 .test = alg_test_drbg,
2722 .fips_allowed = 1,
2723 .suite = {
2724 .drbg = __VECS(drbg_nopr_hmac_sha256_tv_template)
2726 }, {
2727 /* covered by drbg_nopr_hmac_sha256 test */
2728 .alg = "drbg_nopr_hmac_sha384",
2729 .fips_allowed = 1,
2730 .test = alg_test_null,
2731 }, {
2732 .alg = "drbg_nopr_hmac_sha512",
2733 .test = alg_test_null,
2734 .fips_allowed = 1,
2735 }, {
2736 .alg = "drbg_nopr_sha1",
2737 .fips_allowed = 1,
2738 .test = alg_test_null,
2739 }, {
2740 .alg = "drbg_nopr_sha256",
2741 .test = alg_test_drbg,
2742 .fips_allowed = 1,
2743 .suite = {
2744 .drbg = __VECS(drbg_nopr_sha256_tv_template)
2746 }, {
2747 /* covered by drbg_nopr_sha256 test */
2748 .alg = "drbg_nopr_sha384",
2749 .fips_allowed = 1,
2750 .test = alg_test_null,
2751 }, {
2752 .alg = "drbg_nopr_sha512",
2753 .fips_allowed = 1,
2754 .test = alg_test_null,
2755 }, {
2756 .alg = "drbg_pr_ctr_aes128",
2757 .test = alg_test_drbg,
2758 .fips_allowed = 1,
2759 .suite = {
2760 .drbg = __VECS(drbg_pr_ctr_aes128_tv_template)
2762 }, {
2763 /* covered by drbg_pr_ctr_aes128 test */
2764 .alg = "drbg_pr_ctr_aes192",
2765 .fips_allowed = 1,
2766 .test = alg_test_null,
2767 }, {
2768 .alg = "drbg_pr_ctr_aes256",
2769 .fips_allowed = 1,
2770 .test = alg_test_null,
2771 }, {
2772 .alg = "drbg_pr_hmac_sha1",
2773 .fips_allowed = 1,
2774 .test = alg_test_null,
2775 }, {
2776 .alg = "drbg_pr_hmac_sha256",
2777 .test = alg_test_drbg,
2778 .fips_allowed = 1,
2779 .suite = {
2780 .drbg = __VECS(drbg_pr_hmac_sha256_tv_template)
2782 }, {
2783 /* covered by drbg_pr_hmac_sha256 test */
2784 .alg = "drbg_pr_hmac_sha384",
2785 .fips_allowed = 1,
2786 .test = alg_test_null,
2787 }, {
2788 .alg = "drbg_pr_hmac_sha512",
2789 .test = alg_test_null,
2790 .fips_allowed = 1,
2791 }, {
2792 .alg = "drbg_pr_sha1",
2793 .fips_allowed = 1,
2794 .test = alg_test_null,
2795 }, {
2796 .alg = "drbg_pr_sha256",
2797 .test = alg_test_drbg,
2798 .fips_allowed = 1,
2799 .suite = {
2800 .drbg = __VECS(drbg_pr_sha256_tv_template)
2802 }, {
2803 /* covered by drbg_pr_sha256 test */
2804 .alg = "drbg_pr_sha384",
2805 .fips_allowed = 1,
2806 .test = alg_test_null,
2807 }, {
2808 .alg = "drbg_pr_sha512",
2809 .fips_allowed = 1,
2810 .test = alg_test_null,
2811 }, {
2812 .alg = "ecb(aes)",
2813 .test = alg_test_skcipher,
2814 .fips_allowed = 1,
2815 .suite = {
2816 .cipher = {
2817 .enc = __VECS(aes_enc_tv_template),
2818 .dec = __VECS(aes_dec_tv_template)
2821 }, {
2822 .alg = "ecb(anubis)",
2823 .test = alg_test_skcipher,
2824 .suite = {
2825 .cipher = {
2826 .enc = __VECS(anubis_enc_tv_template),
2827 .dec = __VECS(anubis_dec_tv_template)
2830 }, {
2831 .alg = "ecb(arc4)",
2832 .test = alg_test_skcipher,
2833 .suite = {
2834 .cipher = {
2835 .enc = __VECS(arc4_enc_tv_template),
2836 .dec = __VECS(arc4_dec_tv_template)
2839 }, {
2840 .alg = "ecb(blowfish)",
2841 .test = alg_test_skcipher,
2842 .suite = {
2843 .cipher = {
2844 .enc = __VECS(bf_enc_tv_template),
2845 .dec = __VECS(bf_dec_tv_template)
2848 }, {
2849 .alg = "ecb(camellia)",
2850 .test = alg_test_skcipher,
2851 .suite = {
2852 .cipher = {
2853 .enc = __VECS(camellia_enc_tv_template),
2854 .dec = __VECS(camellia_dec_tv_template)
2857 }, {
2858 .alg = "ecb(cast5)",
2859 .test = alg_test_skcipher,
2860 .suite = {
2861 .cipher = {
2862 .enc = __VECS(cast5_enc_tv_template),
2863 .dec = __VECS(cast5_dec_tv_template)
2866 }, {
2867 .alg = "ecb(cast6)",
2868 .test = alg_test_skcipher,
2869 .suite = {
2870 .cipher = {
2871 .enc = __VECS(cast6_enc_tv_template),
2872 .dec = __VECS(cast6_dec_tv_template)
2875 }, {
2876 .alg = "ecb(cipher_null)",
2877 .test = alg_test_null,
2878 }, {
2879 .alg = "ecb(des)",
2880 .test = alg_test_skcipher,
2881 .suite = {
2882 .cipher = {
2883 .enc = __VECS(des_enc_tv_template),
2884 .dec = __VECS(des_dec_tv_template)
2887 }, {
2888 .alg = "ecb(des3_ede)",
2889 .test = alg_test_skcipher,
2890 .fips_allowed = 1,
2891 .suite = {
2892 .cipher = {
2893 .enc = __VECS(des3_ede_enc_tv_template),
2894 .dec = __VECS(des3_ede_dec_tv_template)
2897 }, {
2898 .alg = "ecb(fcrypt)",
2899 .test = alg_test_skcipher,
2900 .suite = {
2901 .cipher = {
2902 .enc = {
2903 .vecs = fcrypt_pcbc_enc_tv_template,
2904 .count = 1
2906 .dec = {
2907 .vecs = fcrypt_pcbc_dec_tv_template,
2908 .count = 1
2912 }, {
2913 .alg = "ecb(khazad)",
2914 .test = alg_test_skcipher,
2915 .suite = {
2916 .cipher = {
2917 .enc = __VECS(khazad_enc_tv_template),
2918 .dec = __VECS(khazad_dec_tv_template)
2921 }, {
2922 .alg = "ecb(seed)",
2923 .test = alg_test_skcipher,
2924 .suite = {
2925 .cipher = {
2926 .enc = __VECS(seed_enc_tv_template),
2927 .dec = __VECS(seed_dec_tv_template)
2930 }, {
2931 .alg = "ecb(serpent)",
2932 .test = alg_test_skcipher,
2933 .suite = {
2934 .cipher = {
2935 .enc = __VECS(serpent_enc_tv_template),
2936 .dec = __VECS(serpent_dec_tv_template)
2939 }, {
2940 .alg = "ecb(tea)",
2941 .test = alg_test_skcipher,
2942 .suite = {
2943 .cipher = {
2944 .enc = __VECS(tea_enc_tv_template),
2945 .dec = __VECS(tea_dec_tv_template)
2948 }, {
2949 .alg = "ecb(tnepres)",
2950 .test = alg_test_skcipher,
2951 .suite = {
2952 .cipher = {
2953 .enc = __VECS(tnepres_enc_tv_template),
2954 .dec = __VECS(tnepres_dec_tv_template)
2957 }, {
2958 .alg = "ecb(twofish)",
2959 .test = alg_test_skcipher,
2960 .suite = {
2961 .cipher = {
2962 .enc = __VECS(tf_enc_tv_template),
2963 .dec = __VECS(tf_dec_tv_template)
2966 }, {
2967 .alg = "ecb(xeta)",
2968 .test = alg_test_skcipher,
2969 .suite = {
2970 .cipher = {
2971 .enc = __VECS(xeta_enc_tv_template),
2972 .dec = __VECS(xeta_dec_tv_template)
2975 }, {
2976 .alg = "ecb(xtea)",
2977 .test = alg_test_skcipher,
2978 .suite = {
2979 .cipher = {
2980 .enc = __VECS(xtea_enc_tv_template),
2981 .dec = __VECS(xtea_dec_tv_template)
2984 }, {
2985 .alg = "ecdh",
2986 .test = alg_test_kpp,
2987 .fips_allowed = 1,
2988 .suite = {
2989 .kpp = __VECS(ecdh_tv_template)
2991 }, {
2992 .alg = "gcm(aes)",
2993 .test = alg_test_aead,
2994 .fips_allowed = 1,
2995 .suite = {
2996 .aead = {
2997 .enc = __VECS(aes_gcm_enc_tv_template),
2998 .dec = __VECS(aes_gcm_dec_tv_template)
3001 }, {
3002 .alg = "ghash",
3003 .test = alg_test_hash,
3004 .fips_allowed = 1,
3005 .suite = {
3006 .hash = __VECS(ghash_tv_template)
3008 }, {
3009 .alg = "hmac(crc32)",
3010 .test = alg_test_hash,
3011 .suite = {
3012 .hash = __VECS(bfin_crc_tv_template)
3014 }, {
3015 .alg = "hmac(md5)",
3016 .test = alg_test_hash,
3017 .suite = {
3018 .hash = __VECS(hmac_md5_tv_template)
3020 }, {
3021 .alg = "hmac(rmd128)",
3022 .test = alg_test_hash,
3023 .suite = {
3024 .hash = __VECS(hmac_rmd128_tv_template)
3026 }, {
3027 .alg = "hmac(rmd160)",
3028 .test = alg_test_hash,
3029 .suite = {
3030 .hash = __VECS(hmac_rmd160_tv_template)
3032 }, {
3033 .alg = "hmac(sha1)",
3034 .test = alg_test_hash,
3035 .fips_allowed = 1,
3036 .suite = {
3037 .hash = __VECS(hmac_sha1_tv_template)
3039 }, {
3040 .alg = "hmac(sha224)",
3041 .test = alg_test_hash,
3042 .fips_allowed = 1,
3043 .suite = {
3044 .hash = __VECS(hmac_sha224_tv_template)
3046 }, {
3047 .alg = "hmac(sha256)",
3048 .test = alg_test_hash,
3049 .fips_allowed = 1,
3050 .suite = {
3051 .hash = __VECS(hmac_sha256_tv_template)
3053 }, {
3054 .alg = "hmac(sha3-224)",
3055 .test = alg_test_hash,
3056 .fips_allowed = 1,
3057 .suite = {
3058 .hash = __VECS(hmac_sha3_224_tv_template)
3060 }, {
3061 .alg = "hmac(sha3-256)",
3062 .test = alg_test_hash,
3063 .fips_allowed = 1,
3064 .suite = {
3065 .hash = __VECS(hmac_sha3_256_tv_template)
3067 }, {
3068 .alg = "hmac(sha3-384)",
3069 .test = alg_test_hash,
3070 .fips_allowed = 1,
3071 .suite = {
3072 .hash = __VECS(hmac_sha3_384_tv_template)
3074 }, {
3075 .alg = "hmac(sha3-512)",
3076 .test = alg_test_hash,
3077 .fips_allowed = 1,
3078 .suite = {
3079 .hash = __VECS(hmac_sha3_512_tv_template)
3081 }, {
3082 .alg = "hmac(sha384)",
3083 .test = alg_test_hash,
3084 .fips_allowed = 1,
3085 .suite = {
3086 .hash = __VECS(hmac_sha384_tv_template)
3088 }, {
3089 .alg = "hmac(sha512)",
3090 .test = alg_test_hash,
3091 .fips_allowed = 1,
3092 .suite = {
3093 .hash = __VECS(hmac_sha512_tv_template)
3095 }, {
3096 .alg = "jitterentropy_rng",
3097 .fips_allowed = 1,
3098 .test = alg_test_null,
3099 }, {
3100 .alg = "kw(aes)",
3101 .test = alg_test_skcipher,
3102 .fips_allowed = 1,
3103 .suite = {
3104 .cipher = {
3105 .enc = __VECS(aes_kw_enc_tv_template),
3106 .dec = __VECS(aes_kw_dec_tv_template)
3109 }, {
3110 .alg = "lrw(aes)",
3111 .test = alg_test_skcipher,
3112 .suite = {
3113 .cipher = {
3114 .enc = __VECS(aes_lrw_enc_tv_template),
3115 .dec = __VECS(aes_lrw_dec_tv_template)
3118 }, {
3119 .alg = "lrw(camellia)",
3120 .test = alg_test_skcipher,
3121 .suite = {
3122 .cipher = {
3123 .enc = __VECS(camellia_lrw_enc_tv_template),
3124 .dec = __VECS(camellia_lrw_dec_tv_template)
3127 }, {
3128 .alg = "lrw(cast6)",
3129 .test = alg_test_skcipher,
3130 .suite = {
3131 .cipher = {
3132 .enc = __VECS(cast6_lrw_enc_tv_template),
3133 .dec = __VECS(cast6_lrw_dec_tv_template)
3136 }, {
3137 .alg = "lrw(serpent)",
3138 .test = alg_test_skcipher,
3139 .suite = {
3140 .cipher = {
3141 .enc = __VECS(serpent_lrw_enc_tv_template),
3142 .dec = __VECS(serpent_lrw_dec_tv_template)
3145 }, {
3146 .alg = "lrw(twofish)",
3147 .test = alg_test_skcipher,
3148 .suite = {
3149 .cipher = {
3150 .enc = __VECS(tf_lrw_enc_tv_template),
3151 .dec = __VECS(tf_lrw_dec_tv_template)
3154 }, {
3155 .alg = "lz4",
3156 .test = alg_test_comp,
3157 .fips_allowed = 1,
3158 .suite = {
3159 .comp = {
3160 .comp = __VECS(lz4_comp_tv_template),
3161 .decomp = __VECS(lz4_decomp_tv_template)
3164 }, {
3165 .alg = "lz4hc",
3166 .test = alg_test_comp,
3167 .fips_allowed = 1,
3168 .suite = {
3169 .comp = {
3170 .comp = __VECS(lz4hc_comp_tv_template),
3171 .decomp = __VECS(lz4hc_decomp_tv_template)
3174 }, {
3175 .alg = "lzo",
3176 .test = alg_test_comp,
3177 .fips_allowed = 1,
3178 .suite = {
3179 .comp = {
3180 .comp = __VECS(lzo_comp_tv_template),
3181 .decomp = __VECS(lzo_decomp_tv_template)
3184 }, {
3185 .alg = "md4",
3186 .test = alg_test_hash,
3187 .suite = {
3188 .hash = __VECS(md4_tv_template)
3190 }, {
3191 .alg = "md5",
3192 .test = alg_test_hash,
3193 .suite = {
3194 .hash = __VECS(md5_tv_template)
3196 }, {
3197 .alg = "michael_mic",
3198 .test = alg_test_hash,
3199 .suite = {
3200 .hash = __VECS(michael_mic_tv_template)
3202 }, {
3203 .alg = "ofb(aes)",
3204 .test = alg_test_skcipher,
3205 .fips_allowed = 1,
3206 .suite = {
3207 .cipher = {
3208 .enc = __VECS(aes_ofb_enc_tv_template),
3209 .dec = __VECS(aes_ofb_dec_tv_template)
3212 }, {
3213 .alg = "pcbc(fcrypt)",
3214 .test = alg_test_skcipher,
3215 .suite = {
3216 .cipher = {
3217 .enc = __VECS(fcrypt_pcbc_enc_tv_template),
3218 .dec = __VECS(fcrypt_pcbc_dec_tv_template)
3221 }, {
3222 .alg = "poly1305",
3223 .test = alg_test_hash,
3224 .suite = {
3225 .hash = __VECS(poly1305_tv_template)
3227 }, {
3228 .alg = "rfc3686(ctr(aes))",
3229 .test = alg_test_skcipher,
3230 .fips_allowed = 1,
3231 .suite = {
3232 .cipher = {
3233 .enc = __VECS(aes_ctr_rfc3686_enc_tv_template),
3234 .dec = __VECS(aes_ctr_rfc3686_dec_tv_template)
3237 }, {
3238 .alg = "rfc4106(gcm(aes))",
3239 .test = alg_test_aead,
3240 .fips_allowed = 1,
3241 .suite = {
3242 .aead = {
3243 .enc = __VECS(aes_gcm_rfc4106_enc_tv_template),
3244 .dec = __VECS(aes_gcm_rfc4106_dec_tv_template)
3247 }, {
3248 .alg = "rfc4309(ccm(aes))",
3249 .test = alg_test_aead,
3250 .fips_allowed = 1,
3251 .suite = {
3252 .aead = {
3253 .enc = __VECS(aes_ccm_rfc4309_enc_tv_template),
3254 .dec = __VECS(aes_ccm_rfc4309_dec_tv_template)
3257 }, {
3258 .alg = "rfc4543(gcm(aes))",
3259 .test = alg_test_aead,
3260 .suite = {
3261 .aead = {
3262 .enc = __VECS(aes_gcm_rfc4543_enc_tv_template),
3263 .dec = __VECS(aes_gcm_rfc4543_dec_tv_template),
3266 }, {
3267 .alg = "rfc7539(chacha20,poly1305)",
3268 .test = alg_test_aead,
3269 .suite = {
3270 .aead = {
3271 .enc = __VECS(rfc7539_enc_tv_template),
3272 .dec = __VECS(rfc7539_dec_tv_template),
3275 }, {
3276 .alg = "rfc7539esp(chacha20,poly1305)",
3277 .test = alg_test_aead,
3278 .suite = {
3279 .aead = {
3280 .enc = __VECS(rfc7539esp_enc_tv_template),
3281 .dec = __VECS(rfc7539esp_dec_tv_template),
3284 }, {
3285 .alg = "rmd128",
3286 .test = alg_test_hash,
3287 .suite = {
3288 .hash = __VECS(rmd128_tv_template)
3290 }, {
3291 .alg = "rmd160",
3292 .test = alg_test_hash,
3293 .suite = {
3294 .hash = __VECS(rmd160_tv_template)
3296 }, {
3297 .alg = "rmd256",
3298 .test = alg_test_hash,
3299 .suite = {
3300 .hash = __VECS(rmd256_tv_template)
3302 }, {
3303 .alg = "rmd320",
3304 .test = alg_test_hash,
3305 .suite = {
3306 .hash = __VECS(rmd320_tv_template)
3308 }, {
3309 .alg = "rsa",
3310 .test = alg_test_akcipher,
3311 .fips_allowed = 1,
3312 .suite = {
3313 .akcipher = __VECS(rsa_tv_template)
3315 }, {
3316 .alg = "salsa20",
3317 .test = alg_test_skcipher,
3318 .suite = {
3319 .cipher = {
3320 .enc = __VECS(salsa20_stream_enc_tv_template)
3323 }, {
3324 .alg = "sha1",
3325 .test = alg_test_hash,
3326 .fips_allowed = 1,
3327 .suite = {
3328 .hash = __VECS(sha1_tv_template)
3330 }, {
3331 .alg = "sha224",
3332 .test = alg_test_hash,
3333 .fips_allowed = 1,
3334 .suite = {
3335 .hash = __VECS(sha224_tv_template)
3337 }, {
3338 .alg = "sha256",
3339 .test = alg_test_hash,
3340 .fips_allowed = 1,
3341 .suite = {
3342 .hash = __VECS(sha256_tv_template)
3344 }, {
3345 .alg = "sha3-224",
3346 .test = alg_test_hash,
3347 .fips_allowed = 1,
3348 .suite = {
3349 .hash = __VECS(sha3_224_tv_template)
3351 }, {
3352 .alg = "sha3-256",
3353 .test = alg_test_hash,
3354 .fips_allowed = 1,
3355 .suite = {
3356 .hash = __VECS(sha3_256_tv_template)
3358 }, {
3359 .alg = "sha3-384",
3360 .test = alg_test_hash,
3361 .fips_allowed = 1,
3362 .suite = {
3363 .hash = __VECS(sha3_384_tv_template)
3365 }, {
3366 .alg = "sha3-512",
3367 .test = alg_test_hash,
3368 .fips_allowed = 1,
3369 .suite = {
3370 .hash = __VECS(sha3_512_tv_template)
3372 }, {
3373 .alg = "sha384",
3374 .test = alg_test_hash,
3375 .fips_allowed = 1,
3376 .suite = {
3377 .hash = __VECS(sha384_tv_template)
3379 }, {
3380 .alg = "sha512",
3381 .test = alg_test_hash,
3382 .fips_allowed = 1,
3383 .suite = {
3384 .hash = __VECS(sha512_tv_template)
3386 }, {
3387 .alg = "tgr128",
3388 .test = alg_test_hash,
3389 .suite = {
3390 .hash = __VECS(tgr128_tv_template)
3392 }, {
3393 .alg = "tgr160",
3394 .test = alg_test_hash,
3395 .suite = {
3396 .hash = __VECS(tgr160_tv_template)
3398 }, {
3399 .alg = "tgr192",
3400 .test = alg_test_hash,
3401 .suite = {
3402 .hash = __VECS(tgr192_tv_template)
3404 }, {
3405 .alg = "vmac(aes)",
3406 .test = alg_test_hash,
3407 .suite = {
3408 .hash = __VECS(aes_vmac128_tv_template)
3410 }, {
3411 .alg = "wp256",
3412 .test = alg_test_hash,
3413 .suite = {
3414 .hash = __VECS(wp256_tv_template)
3416 }, {
3417 .alg = "wp384",
3418 .test = alg_test_hash,
3419 .suite = {
3420 .hash = __VECS(wp384_tv_template)
3422 }, {
3423 .alg = "wp512",
3424 .test = alg_test_hash,
3425 .suite = {
3426 .hash = __VECS(wp512_tv_template)
3428 }, {
3429 .alg = "xcbc(aes)",
3430 .test = alg_test_hash,
3431 .suite = {
3432 .hash = __VECS(aes_xcbc128_tv_template)
3434 }, {
3435 .alg = "xts(aes)",
3436 .test = alg_test_skcipher,
3437 .fips_allowed = 1,
3438 .suite = {
3439 .cipher = {
3440 .enc = __VECS(aes_xts_enc_tv_template),
3441 .dec = __VECS(aes_xts_dec_tv_template)
3444 }, {
3445 .alg = "xts(camellia)",
3446 .test = alg_test_skcipher,
3447 .suite = {
3448 .cipher = {
3449 .enc = __VECS(camellia_xts_enc_tv_template),
3450 .dec = __VECS(camellia_xts_dec_tv_template)
3453 }, {
3454 .alg = "xts(cast6)",
3455 .test = alg_test_skcipher,
3456 .suite = {
3457 .cipher = {
3458 .enc = __VECS(cast6_xts_enc_tv_template),
3459 .dec = __VECS(cast6_xts_dec_tv_template)
3462 }, {
3463 .alg = "xts(serpent)",
3464 .test = alg_test_skcipher,
3465 .suite = {
3466 .cipher = {
3467 .enc = __VECS(serpent_xts_enc_tv_template),
3468 .dec = __VECS(serpent_xts_dec_tv_template)
3471 }, {
3472 .alg = "xts(twofish)",
3473 .test = alg_test_skcipher,
3474 .suite = {
3475 .cipher = {
3476 .enc = __VECS(tf_xts_enc_tv_template),
3477 .dec = __VECS(tf_xts_dec_tv_template)
3483 static bool alg_test_descs_checked;
3485 static void alg_test_descs_check_order(void)
3487 int i;
3489 /* only check once */
3490 if (alg_test_descs_checked)
3491 return;
3493 alg_test_descs_checked = true;
3495 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
3496 int diff = strcmp(alg_test_descs[i - 1].alg,
3497 alg_test_descs[i].alg);
3499 if (WARN_ON(diff > 0)) {
3500 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
3501 alg_test_descs[i - 1].alg,
3502 alg_test_descs[i].alg);
3505 if (WARN_ON(diff == 0)) {
3506 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
3507 alg_test_descs[i].alg);
3512 static int alg_find_test(const char *alg)
3514 int start = 0;
3515 int end = ARRAY_SIZE(alg_test_descs);
3517 while (start < end) {
3518 int i = (start + end) / 2;
3519 int diff = strcmp(alg_test_descs[i].alg, alg);
3521 if (diff > 0) {
3522 end = i;
3523 continue;
3526 if (diff < 0) {
3527 start = i + 1;
3528 continue;
3531 return i;
3534 return -1;
3537 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
3539 int i;
3540 int j;
3541 int rc;
3543 if (!fips_enabled && notests) {
3544 printk_once(KERN_INFO "alg: self-tests disabled\n");
3545 return 0;
3548 alg_test_descs_check_order();
3550 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
3551 char nalg[CRYPTO_MAX_ALG_NAME];
3553 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
3554 sizeof(nalg))
3555 return -ENAMETOOLONG;
3557 i = alg_find_test(nalg);
3558 if (i < 0)
3559 goto notest;
3561 if (fips_enabled && !alg_test_descs[i].fips_allowed)
3562 goto non_fips_alg;
3564 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
3565 goto test_done;
3568 i = alg_find_test(alg);
3569 j = alg_find_test(driver);
3570 if (i < 0 && j < 0)
3571 goto notest;
3573 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
3574 (j >= 0 && !alg_test_descs[j].fips_allowed)))
3575 goto non_fips_alg;
3577 rc = 0;
3578 if (i >= 0)
3579 rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
3580 type, mask);
3581 if (j >= 0 && j != i)
3582 rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
3583 type, mask);
3585 test_done:
3586 if (fips_enabled && rc)
3587 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
3589 if (fips_enabled && !rc)
3590 pr_info("alg: self-tests for %s (%s) passed\n", driver, alg);
3592 return rc;
3594 notest:
3595 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
3596 return 0;
3597 non_fips_alg:
3598 return -EINVAL;
3601 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
3603 EXPORT_SYMBOL_GPL(alg_test);