gpio: pca953x: refactor pca953x_write_regs()
[linux/fpc-iii.git] / crypto / testmgr.c
blob5c9d5a5e7b65182ed1f78b5c0332c5c9b2a328c1
1 /*
2 * Algorithm testing framework and tests.
4 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
5 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
6 * Copyright (c) 2007 Nokia Siemens Networks
7 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
9 * Updated RFC4106 AES-GCM testing.
10 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
11 * Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
16 * This program is free software; you can redistribute it and/or modify it
17 * under the terms of the GNU General Public License as published by the Free
18 * Software Foundation; either version 2 of the License, or (at your option)
19 * any later version.
23 #include <crypto/aead.h>
24 #include <crypto/hash.h>
25 #include <crypto/skcipher.h>
26 #include <linux/err.h>
27 #include <linux/fips.h>
28 #include <linux/module.h>
29 #include <linux/scatterlist.h>
30 #include <linux/slab.h>
31 #include <linux/string.h>
32 #include <crypto/rng.h>
33 #include <crypto/drbg.h>
34 #include <crypto/akcipher.h>
35 #include <crypto/kpp.h>
37 #include "internal.h"
39 static bool notests;
40 module_param(notests, bool, 0644);
41 MODULE_PARM_DESC(notests, "disable crypto self-tests");
43 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
45 /* a perfect nop */
46 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
48 return 0;
51 #else
53 #include "testmgr.h"
56 * Need slab memory for testing (size in number of pages).
58 #define XBUFSIZE 8
61 * Indexes into the xbuf to simulate cross-page access.
63 #define IDX1 32
64 #define IDX2 32400
65 #define IDX3 1
66 #define IDX4 8193
67 #define IDX5 22222
68 #define IDX6 17101
69 #define IDX7 27333
70 #define IDX8 3000
73 * Used by test_cipher()
75 #define ENCRYPT 1
76 #define DECRYPT 0
78 struct tcrypt_result {
79 struct completion completion;
80 int err;
83 struct aead_test_suite {
84 struct {
85 struct aead_testvec *vecs;
86 unsigned int count;
87 } enc, dec;
90 struct cipher_test_suite {
91 struct {
92 struct cipher_testvec *vecs;
93 unsigned int count;
94 } enc, dec;
97 struct comp_test_suite {
98 struct {
99 struct comp_testvec *vecs;
100 unsigned int count;
101 } comp, decomp;
104 struct hash_test_suite {
105 struct hash_testvec *vecs;
106 unsigned int count;
109 struct cprng_test_suite {
110 struct cprng_testvec *vecs;
111 unsigned int count;
114 struct drbg_test_suite {
115 struct drbg_testvec *vecs;
116 unsigned int count;
119 struct akcipher_test_suite {
120 struct akcipher_testvec *vecs;
121 unsigned int count;
124 struct kpp_test_suite {
125 struct kpp_testvec *vecs;
126 unsigned int count;
129 struct alg_test_desc {
130 const char *alg;
131 int (*test)(const struct alg_test_desc *desc, const char *driver,
132 u32 type, u32 mask);
133 int fips_allowed; /* set if alg is allowed in fips mode */
135 union {
136 struct aead_test_suite aead;
137 struct cipher_test_suite cipher;
138 struct comp_test_suite comp;
139 struct hash_test_suite hash;
140 struct cprng_test_suite cprng;
141 struct drbg_test_suite drbg;
142 struct akcipher_test_suite akcipher;
143 struct kpp_test_suite kpp;
144 } suite;
147 static unsigned int IDX[8] = { IDX1, IDX2, IDX3, IDX4, IDX5, IDX6, IDX7, IDX8 };
149 static void hexdump(unsigned char *buf, unsigned int len)
151 print_hex_dump(KERN_CONT, "", DUMP_PREFIX_OFFSET,
152 16, 1,
153 buf, len, false);
156 static void tcrypt_complete(struct crypto_async_request *req, int err)
158 struct tcrypt_result *res = req->data;
160 if (err == -EINPROGRESS)
161 return;
163 res->err = err;
164 complete(&res->completion);
167 static int testmgr_alloc_buf(char *buf[XBUFSIZE])
169 int i;
171 for (i = 0; i < XBUFSIZE; i++) {
172 buf[i] = (void *)__get_free_page(GFP_KERNEL);
173 if (!buf[i])
174 goto err_free_buf;
177 return 0;
179 err_free_buf:
180 while (i-- > 0)
181 free_page((unsigned long)buf[i]);
183 return -ENOMEM;
186 static void testmgr_free_buf(char *buf[XBUFSIZE])
188 int i;
190 for (i = 0; i < XBUFSIZE; i++)
191 free_page((unsigned long)buf[i]);
194 static int wait_async_op(struct tcrypt_result *tr, int ret)
196 if (ret == -EINPROGRESS || ret == -EBUSY) {
197 wait_for_completion(&tr->completion);
198 reinit_completion(&tr->completion);
199 ret = tr->err;
201 return ret;
204 static int ahash_partial_update(struct ahash_request **preq,
205 struct crypto_ahash *tfm, struct hash_testvec *template,
206 void *hash_buff, int k, int temp, struct scatterlist *sg,
207 const char *algo, char *result, struct tcrypt_result *tresult)
209 char *state;
210 struct ahash_request *req;
211 int statesize, ret = -EINVAL;
213 req = *preq;
214 statesize = crypto_ahash_statesize(
215 crypto_ahash_reqtfm(req));
216 state = kmalloc(statesize, GFP_KERNEL);
217 if (!state) {
218 pr_err("alt: hash: Failed to alloc state for %s\n", algo);
219 goto out_nostate;
221 ret = crypto_ahash_export(req, state);
222 if (ret) {
223 pr_err("alt: hash: Failed to export() for %s\n", algo);
224 goto out;
226 ahash_request_free(req);
227 req = ahash_request_alloc(tfm, GFP_KERNEL);
228 if (!req) {
229 pr_err("alg: hash: Failed to alloc request for %s\n", algo);
230 goto out_noreq;
232 ahash_request_set_callback(req,
233 CRYPTO_TFM_REQ_MAY_BACKLOG,
234 tcrypt_complete, tresult);
236 memcpy(hash_buff, template->plaintext + temp,
237 template->tap[k]);
238 sg_init_one(&sg[0], hash_buff, template->tap[k]);
239 ahash_request_set_crypt(req, sg, result, template->tap[k]);
240 ret = crypto_ahash_import(req, state);
241 if (ret) {
242 pr_err("alg: hash: Failed to import() for %s\n", algo);
243 goto out;
245 ret = wait_async_op(tresult, crypto_ahash_update(req));
246 if (ret)
247 goto out;
248 *preq = req;
249 ret = 0;
250 goto out_noreq;
251 out:
252 ahash_request_free(req);
253 out_noreq:
254 kfree(state);
255 out_nostate:
256 return ret;
259 static int __test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
260 unsigned int tcount, bool use_digest,
261 const int align_offset)
263 const char *algo = crypto_tfm_alg_driver_name(crypto_ahash_tfm(tfm));
264 unsigned int i, j, k, temp;
265 struct scatterlist sg[8];
266 char *result;
267 char *key;
268 struct ahash_request *req;
269 struct tcrypt_result tresult;
270 void *hash_buff;
271 char *xbuf[XBUFSIZE];
272 int ret = -ENOMEM;
274 result = kmalloc(MAX_DIGEST_SIZE, GFP_KERNEL);
275 if (!result)
276 return ret;
277 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
278 if (!key)
279 goto out_nobuf;
280 if (testmgr_alloc_buf(xbuf))
281 goto out_nobuf;
283 init_completion(&tresult.completion);
285 req = ahash_request_alloc(tfm, GFP_KERNEL);
286 if (!req) {
287 printk(KERN_ERR "alg: hash: Failed to allocate request for "
288 "%s\n", algo);
289 goto out_noreq;
291 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
292 tcrypt_complete, &tresult);
294 j = 0;
295 for (i = 0; i < tcount; i++) {
296 if (template[i].np)
297 continue;
299 ret = -EINVAL;
300 if (WARN_ON(align_offset + template[i].psize > PAGE_SIZE))
301 goto out;
303 j++;
304 memset(result, 0, MAX_DIGEST_SIZE);
306 hash_buff = xbuf[0];
307 hash_buff += align_offset;
309 memcpy(hash_buff, template[i].plaintext, template[i].psize);
310 sg_init_one(&sg[0], hash_buff, template[i].psize);
312 if (template[i].ksize) {
313 crypto_ahash_clear_flags(tfm, ~0);
314 if (template[i].ksize > MAX_KEYLEN) {
315 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
316 j, algo, template[i].ksize, MAX_KEYLEN);
317 ret = -EINVAL;
318 goto out;
320 memcpy(key, template[i].key, template[i].ksize);
321 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
322 if (ret) {
323 printk(KERN_ERR "alg: hash: setkey failed on "
324 "test %d for %s: ret=%d\n", j, algo,
325 -ret);
326 goto out;
330 ahash_request_set_crypt(req, sg, result, template[i].psize);
331 if (use_digest) {
332 ret = wait_async_op(&tresult, crypto_ahash_digest(req));
333 if (ret) {
334 pr_err("alg: hash: digest failed on test %d "
335 "for %s: ret=%d\n", j, algo, -ret);
336 goto out;
338 } else {
339 ret = wait_async_op(&tresult, crypto_ahash_init(req));
340 if (ret) {
341 pr_err("alt: hash: init failed on test %d "
342 "for %s: ret=%d\n", j, algo, -ret);
343 goto out;
345 ret = wait_async_op(&tresult, crypto_ahash_update(req));
346 if (ret) {
347 pr_err("alt: hash: update failed on test %d "
348 "for %s: ret=%d\n", j, algo, -ret);
349 goto out;
351 ret = wait_async_op(&tresult, crypto_ahash_final(req));
352 if (ret) {
353 pr_err("alt: hash: final failed on test %d "
354 "for %s: ret=%d\n", j, algo, -ret);
355 goto out;
359 if (memcmp(result, template[i].digest,
360 crypto_ahash_digestsize(tfm))) {
361 printk(KERN_ERR "alg: hash: Test %d failed for %s\n",
362 j, algo);
363 hexdump(result, crypto_ahash_digestsize(tfm));
364 ret = -EINVAL;
365 goto out;
369 j = 0;
370 for (i = 0; i < tcount; i++) {
371 /* alignment tests are only done with continuous buffers */
372 if (align_offset != 0)
373 break;
375 if (!template[i].np)
376 continue;
378 j++;
379 memset(result, 0, MAX_DIGEST_SIZE);
381 temp = 0;
382 sg_init_table(sg, template[i].np);
383 ret = -EINVAL;
384 for (k = 0; k < template[i].np; k++) {
385 if (WARN_ON(offset_in_page(IDX[k]) +
386 template[i].tap[k] > PAGE_SIZE))
387 goto out;
388 sg_set_buf(&sg[k],
389 memcpy(xbuf[IDX[k] >> PAGE_SHIFT] +
390 offset_in_page(IDX[k]),
391 template[i].plaintext + temp,
392 template[i].tap[k]),
393 template[i].tap[k]);
394 temp += template[i].tap[k];
397 if (template[i].ksize) {
398 if (template[i].ksize > MAX_KEYLEN) {
399 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
400 j, algo, template[i].ksize, MAX_KEYLEN);
401 ret = -EINVAL;
402 goto out;
404 crypto_ahash_clear_flags(tfm, ~0);
405 memcpy(key, template[i].key, template[i].ksize);
406 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
408 if (ret) {
409 printk(KERN_ERR "alg: hash: setkey "
410 "failed on chunking test %d "
411 "for %s: ret=%d\n", j, algo, -ret);
412 goto out;
416 ahash_request_set_crypt(req, sg, result, template[i].psize);
417 ret = crypto_ahash_digest(req);
418 switch (ret) {
419 case 0:
420 break;
421 case -EINPROGRESS:
422 case -EBUSY:
423 wait_for_completion(&tresult.completion);
424 reinit_completion(&tresult.completion);
425 ret = tresult.err;
426 if (!ret)
427 break;
428 /* fall through */
429 default:
430 printk(KERN_ERR "alg: hash: digest failed "
431 "on chunking test %d for %s: "
432 "ret=%d\n", j, algo, -ret);
433 goto out;
436 if (memcmp(result, template[i].digest,
437 crypto_ahash_digestsize(tfm))) {
438 printk(KERN_ERR "alg: hash: Chunking test %d "
439 "failed for %s\n", j, algo);
440 hexdump(result, crypto_ahash_digestsize(tfm));
441 ret = -EINVAL;
442 goto out;
446 /* partial update exercise */
447 j = 0;
448 for (i = 0; i < tcount; i++) {
449 /* alignment tests are only done with continuous buffers */
450 if (align_offset != 0)
451 break;
453 if (template[i].np < 2)
454 continue;
456 j++;
457 memset(result, 0, MAX_DIGEST_SIZE);
459 ret = -EINVAL;
460 hash_buff = xbuf[0];
461 memcpy(hash_buff, template[i].plaintext,
462 template[i].tap[0]);
463 sg_init_one(&sg[0], hash_buff, template[i].tap[0]);
465 if (template[i].ksize) {
466 crypto_ahash_clear_flags(tfm, ~0);
467 if (template[i].ksize > MAX_KEYLEN) {
468 pr_err("alg: hash: setkey failed on test %d for %s: key size %d > %d\n",
469 j, algo, template[i].ksize, MAX_KEYLEN);
470 ret = -EINVAL;
471 goto out;
473 memcpy(key, template[i].key, template[i].ksize);
474 ret = crypto_ahash_setkey(tfm, key, template[i].ksize);
475 if (ret) {
476 pr_err("alg: hash: setkey failed on test %d for %s: ret=%d\n",
477 j, algo, -ret);
478 goto out;
482 ahash_request_set_crypt(req, sg, result, template[i].tap[0]);
483 ret = wait_async_op(&tresult, crypto_ahash_init(req));
484 if (ret) {
485 pr_err("alt: hash: init failed on test %d for %s: ret=%d\n",
486 j, algo, -ret);
487 goto out;
489 ret = wait_async_op(&tresult, crypto_ahash_update(req));
490 if (ret) {
491 pr_err("alt: hash: update failed on test %d for %s: ret=%d\n",
492 j, algo, -ret);
493 goto out;
496 temp = template[i].tap[0];
497 for (k = 1; k < template[i].np; k++) {
498 ret = ahash_partial_update(&req, tfm, &template[i],
499 hash_buff, k, temp, &sg[0], algo, result,
500 &tresult);
501 if (ret) {
502 pr_err("hash: partial update failed on test %d for %s: ret=%d\n",
503 j, algo, -ret);
504 goto out_noreq;
506 temp += template[i].tap[k];
508 ret = wait_async_op(&tresult, crypto_ahash_final(req));
509 if (ret) {
510 pr_err("alt: hash: final failed on test %d for %s: ret=%d\n",
511 j, algo, -ret);
512 goto out;
514 if (memcmp(result, template[i].digest,
515 crypto_ahash_digestsize(tfm))) {
516 pr_err("alg: hash: Partial Test %d failed for %s\n",
517 j, algo);
518 hexdump(result, crypto_ahash_digestsize(tfm));
519 ret = -EINVAL;
520 goto out;
524 ret = 0;
526 out:
527 ahash_request_free(req);
528 out_noreq:
529 testmgr_free_buf(xbuf);
530 out_nobuf:
531 kfree(key);
532 kfree(result);
533 return ret;
536 static int test_hash(struct crypto_ahash *tfm, struct hash_testvec *template,
537 unsigned int tcount, bool use_digest)
539 unsigned int alignmask;
540 int ret;
542 ret = __test_hash(tfm, template, tcount, use_digest, 0);
543 if (ret)
544 return ret;
546 /* test unaligned buffers, check with one byte offset */
547 ret = __test_hash(tfm, template, tcount, use_digest, 1);
548 if (ret)
549 return ret;
551 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
552 if (alignmask) {
553 /* Check if alignment mask for tfm is correctly set. */
554 ret = __test_hash(tfm, template, tcount, use_digest,
555 alignmask + 1);
556 if (ret)
557 return ret;
560 return 0;
563 static int __test_aead(struct crypto_aead *tfm, int enc,
564 struct aead_testvec *template, unsigned int tcount,
565 const bool diff_dst, const int align_offset)
567 const char *algo = crypto_tfm_alg_driver_name(crypto_aead_tfm(tfm));
568 unsigned int i, j, k, n, temp;
569 int ret = -ENOMEM;
570 char *q;
571 char *key;
572 struct aead_request *req;
573 struct scatterlist *sg;
574 struct scatterlist *sgout;
575 const char *e, *d;
576 struct tcrypt_result result;
577 unsigned int authsize, iv_len;
578 void *input;
579 void *output;
580 void *assoc;
581 char *iv;
582 char *xbuf[XBUFSIZE];
583 char *xoutbuf[XBUFSIZE];
584 char *axbuf[XBUFSIZE];
586 iv = kzalloc(MAX_IVLEN, GFP_KERNEL);
587 if (!iv)
588 return ret;
589 key = kmalloc(MAX_KEYLEN, GFP_KERNEL);
590 if (!key)
591 goto out_noxbuf;
592 if (testmgr_alloc_buf(xbuf))
593 goto out_noxbuf;
594 if (testmgr_alloc_buf(axbuf))
595 goto out_noaxbuf;
596 if (diff_dst && testmgr_alloc_buf(xoutbuf))
597 goto out_nooutbuf;
599 /* avoid "the frame size is larger than 1024 bytes" compiler warning */
600 sg = kmalloc(sizeof(*sg) * 8 * (diff_dst ? 4 : 2), GFP_KERNEL);
601 if (!sg)
602 goto out_nosg;
603 sgout = &sg[16];
605 if (diff_dst)
606 d = "-ddst";
607 else
608 d = "";
610 if (enc == ENCRYPT)
611 e = "encryption";
612 else
613 e = "decryption";
615 init_completion(&result.completion);
617 req = aead_request_alloc(tfm, GFP_KERNEL);
618 if (!req) {
619 pr_err("alg: aead%s: Failed to allocate request for %s\n",
620 d, algo);
621 goto out;
624 aead_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
625 tcrypt_complete, &result);
627 iv_len = crypto_aead_ivsize(tfm);
629 for (i = 0, j = 0; i < tcount; i++) {
630 if (template[i].np)
631 continue;
633 j++;
635 /* some templates have no input data but they will
636 * touch input
638 input = xbuf[0];
639 input += align_offset;
640 assoc = axbuf[0];
642 ret = -EINVAL;
643 if (WARN_ON(align_offset + template[i].ilen >
644 PAGE_SIZE || template[i].alen > PAGE_SIZE))
645 goto out;
647 memcpy(input, template[i].input, template[i].ilen);
648 memcpy(assoc, template[i].assoc, template[i].alen);
649 if (template[i].iv)
650 memcpy(iv, template[i].iv, iv_len);
651 else
652 memset(iv, 0, iv_len);
654 crypto_aead_clear_flags(tfm, ~0);
655 if (template[i].wk)
656 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
658 if (template[i].klen > MAX_KEYLEN) {
659 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
660 d, j, algo, template[i].klen,
661 MAX_KEYLEN);
662 ret = -EINVAL;
663 goto out;
665 memcpy(key, template[i].key, template[i].klen);
667 ret = crypto_aead_setkey(tfm, key, template[i].klen);
668 if (!ret == template[i].fail) {
669 pr_err("alg: aead%s: setkey failed on test %d for %s: flags=%x\n",
670 d, j, algo, crypto_aead_get_flags(tfm));
671 goto out;
672 } else if (ret)
673 continue;
675 authsize = abs(template[i].rlen - template[i].ilen);
676 ret = crypto_aead_setauthsize(tfm, authsize);
677 if (ret) {
678 pr_err("alg: aead%s: Failed to set authsize to %u on test %d for %s\n",
679 d, authsize, j, algo);
680 goto out;
683 k = !!template[i].alen;
684 sg_init_table(sg, k + 1);
685 sg_set_buf(&sg[0], assoc, template[i].alen);
686 sg_set_buf(&sg[k], input,
687 template[i].ilen + (enc ? authsize : 0));
688 output = input;
690 if (diff_dst) {
691 sg_init_table(sgout, k + 1);
692 sg_set_buf(&sgout[0], assoc, template[i].alen);
694 output = xoutbuf[0];
695 output += align_offset;
696 sg_set_buf(&sgout[k], output,
697 template[i].rlen + (enc ? 0 : authsize));
700 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
701 template[i].ilen, iv);
703 aead_request_set_ad(req, template[i].alen);
705 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
707 switch (ret) {
708 case 0:
709 if (template[i].novrfy) {
710 /* verification was supposed to fail */
711 pr_err("alg: aead%s: %s failed on test %d for %s: ret was 0, expected -EBADMSG\n",
712 d, e, j, algo);
713 /* so really, we got a bad message */
714 ret = -EBADMSG;
715 goto out;
717 break;
718 case -EINPROGRESS:
719 case -EBUSY:
720 wait_for_completion(&result.completion);
721 reinit_completion(&result.completion);
722 ret = result.err;
723 if (!ret)
724 break;
725 case -EBADMSG:
726 if (template[i].novrfy)
727 /* verification failure was expected */
728 continue;
729 /* fall through */
730 default:
731 pr_err("alg: aead%s: %s failed on test %d for %s: ret=%d\n",
732 d, e, j, algo, -ret);
733 goto out;
736 q = output;
737 if (memcmp(q, template[i].result, template[i].rlen)) {
738 pr_err("alg: aead%s: Test %d failed on %s for %s\n",
739 d, j, e, algo);
740 hexdump(q, template[i].rlen);
741 ret = -EINVAL;
742 goto out;
746 for (i = 0, j = 0; i < tcount; i++) {
747 /* alignment tests are only done with continuous buffers */
748 if (align_offset != 0)
749 break;
751 if (!template[i].np)
752 continue;
754 j++;
756 if (template[i].iv)
757 memcpy(iv, template[i].iv, iv_len);
758 else
759 memset(iv, 0, MAX_IVLEN);
761 crypto_aead_clear_flags(tfm, ~0);
762 if (template[i].wk)
763 crypto_aead_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
764 if (template[i].klen > MAX_KEYLEN) {
765 pr_err("alg: aead%s: setkey failed on test %d for %s: key size %d > %d\n",
766 d, j, algo, template[i].klen, MAX_KEYLEN);
767 ret = -EINVAL;
768 goto out;
770 memcpy(key, template[i].key, template[i].klen);
772 ret = crypto_aead_setkey(tfm, key, template[i].klen);
773 if (!ret == template[i].fail) {
774 pr_err("alg: aead%s: setkey failed on chunk test %d for %s: flags=%x\n",
775 d, j, algo, crypto_aead_get_flags(tfm));
776 goto out;
777 } else if (ret)
778 continue;
780 authsize = abs(template[i].rlen - template[i].ilen);
782 ret = -EINVAL;
783 sg_init_table(sg, template[i].anp + template[i].np);
784 if (diff_dst)
785 sg_init_table(sgout, template[i].anp + template[i].np);
787 ret = -EINVAL;
788 for (k = 0, temp = 0; k < template[i].anp; k++) {
789 if (WARN_ON(offset_in_page(IDX[k]) +
790 template[i].atap[k] > PAGE_SIZE))
791 goto out;
792 sg_set_buf(&sg[k],
793 memcpy(axbuf[IDX[k] >> PAGE_SHIFT] +
794 offset_in_page(IDX[k]),
795 template[i].assoc + temp,
796 template[i].atap[k]),
797 template[i].atap[k]);
798 if (diff_dst)
799 sg_set_buf(&sgout[k],
800 axbuf[IDX[k] >> PAGE_SHIFT] +
801 offset_in_page(IDX[k]),
802 template[i].atap[k]);
803 temp += template[i].atap[k];
806 for (k = 0, temp = 0; k < template[i].np; k++) {
807 if (WARN_ON(offset_in_page(IDX[k]) +
808 template[i].tap[k] > PAGE_SIZE))
809 goto out;
811 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
812 memcpy(q, template[i].input + temp, template[i].tap[k]);
813 sg_set_buf(&sg[template[i].anp + k],
814 q, template[i].tap[k]);
816 if (diff_dst) {
817 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
818 offset_in_page(IDX[k]);
820 memset(q, 0, template[i].tap[k]);
822 sg_set_buf(&sgout[template[i].anp + k],
823 q, template[i].tap[k]);
826 n = template[i].tap[k];
827 if (k == template[i].np - 1 && enc)
828 n += authsize;
829 if (offset_in_page(q) + n < PAGE_SIZE)
830 q[n] = 0;
832 temp += template[i].tap[k];
835 ret = crypto_aead_setauthsize(tfm, authsize);
836 if (ret) {
837 pr_err("alg: aead%s: Failed to set authsize to %u on chunk test %d for %s\n",
838 d, authsize, j, algo);
839 goto out;
842 if (enc) {
843 if (WARN_ON(sg[template[i].anp + k - 1].offset +
844 sg[template[i].anp + k - 1].length +
845 authsize > PAGE_SIZE)) {
846 ret = -EINVAL;
847 goto out;
850 if (diff_dst)
851 sgout[template[i].anp + k - 1].length +=
852 authsize;
853 sg[template[i].anp + k - 1].length += authsize;
856 aead_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
857 template[i].ilen,
858 iv);
860 aead_request_set_ad(req, template[i].alen);
862 ret = enc ? crypto_aead_encrypt(req) : crypto_aead_decrypt(req);
864 switch (ret) {
865 case 0:
866 if (template[i].novrfy) {
867 /* verification was supposed to fail */
868 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret was 0, expected -EBADMSG\n",
869 d, e, j, algo);
870 /* so really, we got a bad message */
871 ret = -EBADMSG;
872 goto out;
874 break;
875 case -EINPROGRESS:
876 case -EBUSY:
877 wait_for_completion(&result.completion);
878 reinit_completion(&result.completion);
879 ret = result.err;
880 if (!ret)
881 break;
882 case -EBADMSG:
883 if (template[i].novrfy)
884 /* verification failure was expected */
885 continue;
886 /* fall through */
887 default:
888 pr_err("alg: aead%s: %s failed on chunk test %d for %s: ret=%d\n",
889 d, e, j, algo, -ret);
890 goto out;
893 ret = -EINVAL;
894 for (k = 0, temp = 0; k < template[i].np; k++) {
895 if (diff_dst)
896 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
897 offset_in_page(IDX[k]);
898 else
899 q = xbuf[IDX[k] >> PAGE_SHIFT] +
900 offset_in_page(IDX[k]);
902 n = template[i].tap[k];
903 if (k == template[i].np - 1)
904 n += enc ? authsize : -authsize;
906 if (memcmp(q, template[i].result + temp, n)) {
907 pr_err("alg: aead%s: Chunk test %d failed on %s at page %u for %s\n",
908 d, j, e, k, algo);
909 hexdump(q, n);
910 goto out;
913 q += n;
914 if (k == template[i].np - 1 && !enc) {
915 if (!diff_dst &&
916 memcmp(q, template[i].input +
917 temp + n, authsize))
918 n = authsize;
919 else
920 n = 0;
921 } else {
922 for (n = 0; offset_in_page(q + n) && q[n]; n++)
925 if (n) {
926 pr_err("alg: aead%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
927 d, j, e, k, algo, n);
928 hexdump(q, n);
929 goto out;
932 temp += template[i].tap[k];
936 ret = 0;
938 out:
939 aead_request_free(req);
940 kfree(sg);
941 out_nosg:
942 if (diff_dst)
943 testmgr_free_buf(xoutbuf);
944 out_nooutbuf:
945 testmgr_free_buf(axbuf);
946 out_noaxbuf:
947 testmgr_free_buf(xbuf);
948 out_noxbuf:
949 kfree(key);
950 kfree(iv);
951 return ret;
954 static int test_aead(struct crypto_aead *tfm, int enc,
955 struct aead_testvec *template, unsigned int tcount)
957 unsigned int alignmask;
958 int ret;
960 /* test 'dst == src' case */
961 ret = __test_aead(tfm, enc, template, tcount, false, 0);
962 if (ret)
963 return ret;
965 /* test 'dst != src' case */
966 ret = __test_aead(tfm, enc, template, tcount, true, 0);
967 if (ret)
968 return ret;
970 /* test unaligned buffers, check with one byte offset */
971 ret = __test_aead(tfm, enc, template, tcount, true, 1);
972 if (ret)
973 return ret;
975 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
976 if (alignmask) {
977 /* Check if alignment mask for tfm is correctly set. */
978 ret = __test_aead(tfm, enc, template, tcount, true,
979 alignmask + 1);
980 if (ret)
981 return ret;
984 return 0;
987 static int test_cipher(struct crypto_cipher *tfm, int enc,
988 struct cipher_testvec *template, unsigned int tcount)
990 const char *algo = crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm));
991 unsigned int i, j, k;
992 char *q;
993 const char *e;
994 void *data;
995 char *xbuf[XBUFSIZE];
996 int ret = -ENOMEM;
998 if (testmgr_alloc_buf(xbuf))
999 goto out_nobuf;
1001 if (enc == ENCRYPT)
1002 e = "encryption";
1003 else
1004 e = "decryption";
1006 j = 0;
1007 for (i = 0; i < tcount; i++) {
1008 if (template[i].np)
1009 continue;
1011 j++;
1013 ret = -EINVAL;
1014 if (WARN_ON(template[i].ilen > PAGE_SIZE))
1015 goto out;
1017 data = xbuf[0];
1018 memcpy(data, template[i].input, template[i].ilen);
1020 crypto_cipher_clear_flags(tfm, ~0);
1021 if (template[i].wk)
1022 crypto_cipher_set_flags(tfm, CRYPTO_TFM_REQ_WEAK_KEY);
1024 ret = crypto_cipher_setkey(tfm, template[i].key,
1025 template[i].klen);
1026 if (!ret == template[i].fail) {
1027 printk(KERN_ERR "alg: cipher: setkey failed "
1028 "on test %d for %s: flags=%x\n", j,
1029 algo, crypto_cipher_get_flags(tfm));
1030 goto out;
1031 } else if (ret)
1032 continue;
1034 for (k = 0; k < template[i].ilen;
1035 k += crypto_cipher_blocksize(tfm)) {
1036 if (enc)
1037 crypto_cipher_encrypt_one(tfm, data + k,
1038 data + k);
1039 else
1040 crypto_cipher_decrypt_one(tfm, data + k,
1041 data + k);
1044 q = data;
1045 if (memcmp(q, template[i].result, template[i].rlen)) {
1046 printk(KERN_ERR "alg: cipher: Test %d failed "
1047 "on %s for %s\n", j, e, algo);
1048 hexdump(q, template[i].rlen);
1049 ret = -EINVAL;
1050 goto out;
1054 ret = 0;
1056 out:
1057 testmgr_free_buf(xbuf);
1058 out_nobuf:
1059 return ret;
1062 static int __test_skcipher(struct crypto_skcipher *tfm, int enc,
1063 struct cipher_testvec *template, unsigned int tcount,
1064 const bool diff_dst, const int align_offset)
1066 const char *algo =
1067 crypto_tfm_alg_driver_name(crypto_skcipher_tfm(tfm));
1068 unsigned int i, j, k, n, temp;
1069 char *q;
1070 struct skcipher_request *req;
1071 struct scatterlist sg[8];
1072 struct scatterlist sgout[8];
1073 const char *e, *d;
1074 struct tcrypt_result result;
1075 void *data;
1076 char iv[MAX_IVLEN];
1077 char *xbuf[XBUFSIZE];
1078 char *xoutbuf[XBUFSIZE];
1079 int ret = -ENOMEM;
1080 unsigned int ivsize = crypto_skcipher_ivsize(tfm);
1082 if (testmgr_alloc_buf(xbuf))
1083 goto out_nobuf;
1085 if (diff_dst && testmgr_alloc_buf(xoutbuf))
1086 goto out_nooutbuf;
1088 if (diff_dst)
1089 d = "-ddst";
1090 else
1091 d = "";
1093 if (enc == ENCRYPT)
1094 e = "encryption";
1095 else
1096 e = "decryption";
1098 init_completion(&result.completion);
1100 req = skcipher_request_alloc(tfm, GFP_KERNEL);
1101 if (!req) {
1102 pr_err("alg: skcipher%s: Failed to allocate request for %s\n",
1103 d, algo);
1104 goto out;
1107 skcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1108 tcrypt_complete, &result);
1110 j = 0;
1111 for (i = 0; i < tcount; i++) {
1112 if (template[i].np && !template[i].also_non_np)
1113 continue;
1115 if (template[i].iv)
1116 memcpy(iv, template[i].iv, ivsize);
1117 else
1118 memset(iv, 0, MAX_IVLEN);
1120 j++;
1121 ret = -EINVAL;
1122 if (WARN_ON(align_offset + template[i].ilen > PAGE_SIZE))
1123 goto out;
1125 data = xbuf[0];
1126 data += align_offset;
1127 memcpy(data, template[i].input, template[i].ilen);
1129 crypto_skcipher_clear_flags(tfm, ~0);
1130 if (template[i].wk)
1131 crypto_skcipher_set_flags(tfm,
1132 CRYPTO_TFM_REQ_WEAK_KEY);
1134 ret = crypto_skcipher_setkey(tfm, template[i].key,
1135 template[i].klen);
1136 if (!ret == template[i].fail) {
1137 pr_err("alg: skcipher%s: setkey failed on test %d for %s: flags=%x\n",
1138 d, j, algo, crypto_skcipher_get_flags(tfm));
1139 goto out;
1140 } else if (ret)
1141 continue;
1143 sg_init_one(&sg[0], data, template[i].ilen);
1144 if (diff_dst) {
1145 data = xoutbuf[0];
1146 data += align_offset;
1147 sg_init_one(&sgout[0], data, template[i].ilen);
1150 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1151 template[i].ilen, iv);
1152 ret = enc ? crypto_skcipher_encrypt(req) :
1153 crypto_skcipher_decrypt(req);
1155 switch (ret) {
1156 case 0:
1157 break;
1158 case -EINPROGRESS:
1159 case -EBUSY:
1160 wait_for_completion(&result.completion);
1161 reinit_completion(&result.completion);
1162 ret = result.err;
1163 if (!ret)
1164 break;
1165 /* fall through */
1166 default:
1167 pr_err("alg: skcipher%s: %s failed on test %d for %s: ret=%d\n",
1168 d, e, j, algo, -ret);
1169 goto out;
1172 q = data;
1173 if (memcmp(q, template[i].result, template[i].rlen)) {
1174 pr_err("alg: skcipher%s: Test %d failed (invalid result) on %s for %s\n",
1175 d, j, e, algo);
1176 hexdump(q, template[i].rlen);
1177 ret = -EINVAL;
1178 goto out;
1181 if (template[i].iv_out &&
1182 memcmp(iv, template[i].iv_out,
1183 crypto_skcipher_ivsize(tfm))) {
1184 pr_err("alg: skcipher%s: Test %d failed (invalid output IV) on %s for %s\n",
1185 d, j, e, algo);
1186 hexdump(iv, crypto_skcipher_ivsize(tfm));
1187 ret = -EINVAL;
1188 goto out;
1192 j = 0;
1193 for (i = 0; i < tcount; i++) {
1194 /* alignment tests are only done with continuous buffers */
1195 if (align_offset != 0)
1196 break;
1198 if (!template[i].np)
1199 continue;
1201 if (template[i].iv)
1202 memcpy(iv, template[i].iv, ivsize);
1203 else
1204 memset(iv, 0, MAX_IVLEN);
1206 j++;
1207 crypto_skcipher_clear_flags(tfm, ~0);
1208 if (template[i].wk)
1209 crypto_skcipher_set_flags(tfm,
1210 CRYPTO_TFM_REQ_WEAK_KEY);
1212 ret = crypto_skcipher_setkey(tfm, template[i].key,
1213 template[i].klen);
1214 if (!ret == template[i].fail) {
1215 pr_err("alg: skcipher%s: setkey failed on chunk test %d for %s: flags=%x\n",
1216 d, j, algo, crypto_skcipher_get_flags(tfm));
1217 goto out;
1218 } else if (ret)
1219 continue;
1221 temp = 0;
1222 ret = -EINVAL;
1223 sg_init_table(sg, template[i].np);
1224 if (diff_dst)
1225 sg_init_table(sgout, template[i].np);
1226 for (k = 0; k < template[i].np; k++) {
1227 if (WARN_ON(offset_in_page(IDX[k]) +
1228 template[i].tap[k] > PAGE_SIZE))
1229 goto out;
1231 q = xbuf[IDX[k] >> PAGE_SHIFT] + offset_in_page(IDX[k]);
1233 memcpy(q, template[i].input + temp, template[i].tap[k]);
1235 if (offset_in_page(q) + template[i].tap[k] < PAGE_SIZE)
1236 q[template[i].tap[k]] = 0;
1238 sg_set_buf(&sg[k], q, template[i].tap[k]);
1239 if (diff_dst) {
1240 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1241 offset_in_page(IDX[k]);
1243 sg_set_buf(&sgout[k], q, template[i].tap[k]);
1245 memset(q, 0, template[i].tap[k]);
1246 if (offset_in_page(q) +
1247 template[i].tap[k] < PAGE_SIZE)
1248 q[template[i].tap[k]] = 0;
1251 temp += template[i].tap[k];
1254 skcipher_request_set_crypt(req, sg, (diff_dst) ? sgout : sg,
1255 template[i].ilen, iv);
1257 ret = enc ? crypto_skcipher_encrypt(req) :
1258 crypto_skcipher_decrypt(req);
1260 switch (ret) {
1261 case 0:
1262 break;
1263 case -EINPROGRESS:
1264 case -EBUSY:
1265 wait_for_completion(&result.completion);
1266 reinit_completion(&result.completion);
1267 ret = result.err;
1268 if (!ret)
1269 break;
1270 /* fall through */
1271 default:
1272 pr_err("alg: skcipher%s: %s failed on chunk test %d for %s: ret=%d\n",
1273 d, e, j, algo, -ret);
1274 goto out;
1277 temp = 0;
1278 ret = -EINVAL;
1279 for (k = 0; k < template[i].np; k++) {
1280 if (diff_dst)
1281 q = xoutbuf[IDX[k] >> PAGE_SHIFT] +
1282 offset_in_page(IDX[k]);
1283 else
1284 q = xbuf[IDX[k] >> PAGE_SHIFT] +
1285 offset_in_page(IDX[k]);
1287 if (memcmp(q, template[i].result + temp,
1288 template[i].tap[k])) {
1289 pr_err("alg: skcipher%s: Chunk test %d failed on %s at page %u for %s\n",
1290 d, j, e, k, algo);
1291 hexdump(q, template[i].tap[k]);
1292 goto out;
1295 q += template[i].tap[k];
1296 for (n = 0; offset_in_page(q + n) && q[n]; n++)
1298 if (n) {
1299 pr_err("alg: skcipher%s: Result buffer corruption in chunk test %d on %s at page %u for %s: %u bytes:\n",
1300 d, j, e, k, algo, n);
1301 hexdump(q, n);
1302 goto out;
1304 temp += template[i].tap[k];
1308 ret = 0;
1310 out:
1311 skcipher_request_free(req);
1312 if (diff_dst)
1313 testmgr_free_buf(xoutbuf);
1314 out_nooutbuf:
1315 testmgr_free_buf(xbuf);
1316 out_nobuf:
1317 return ret;
1320 static int test_skcipher(struct crypto_skcipher *tfm, int enc,
1321 struct cipher_testvec *template, unsigned int tcount)
1323 unsigned int alignmask;
1324 int ret;
1326 /* test 'dst == src' case */
1327 ret = __test_skcipher(tfm, enc, template, tcount, false, 0);
1328 if (ret)
1329 return ret;
1331 /* test 'dst != src' case */
1332 ret = __test_skcipher(tfm, enc, template, tcount, true, 0);
1333 if (ret)
1334 return ret;
1336 /* test unaligned buffers, check with one byte offset */
1337 ret = __test_skcipher(tfm, enc, template, tcount, true, 1);
1338 if (ret)
1339 return ret;
1341 alignmask = crypto_tfm_alg_alignmask(&tfm->base);
1342 if (alignmask) {
1343 /* Check if alignment mask for tfm is correctly set. */
1344 ret = __test_skcipher(tfm, enc, template, tcount, true,
1345 alignmask + 1);
1346 if (ret)
1347 return ret;
1350 return 0;
1353 static int test_comp(struct crypto_comp *tfm, struct comp_testvec *ctemplate,
1354 struct comp_testvec *dtemplate, int ctcount, int dtcount)
1356 const char *algo = crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm));
1357 unsigned int i;
1358 char result[COMP_BUF_SIZE];
1359 int ret;
1361 for (i = 0; i < ctcount; i++) {
1362 int ilen;
1363 unsigned int dlen = COMP_BUF_SIZE;
1365 memset(result, 0, sizeof (result));
1367 ilen = ctemplate[i].inlen;
1368 ret = crypto_comp_compress(tfm, ctemplate[i].input,
1369 ilen, result, &dlen);
1370 if (ret) {
1371 printk(KERN_ERR "alg: comp: compression failed "
1372 "on test %d for %s: ret=%d\n", i + 1, algo,
1373 -ret);
1374 goto out;
1377 if (dlen != ctemplate[i].outlen) {
1378 printk(KERN_ERR "alg: comp: Compression test %d "
1379 "failed for %s: output len = %d\n", i + 1, algo,
1380 dlen);
1381 ret = -EINVAL;
1382 goto out;
1385 if (memcmp(result, ctemplate[i].output, dlen)) {
1386 printk(KERN_ERR "alg: comp: Compression test %d "
1387 "failed for %s\n", i + 1, algo);
1388 hexdump(result, dlen);
1389 ret = -EINVAL;
1390 goto out;
1394 for (i = 0; i < dtcount; i++) {
1395 int ilen;
1396 unsigned int dlen = COMP_BUF_SIZE;
1398 memset(result, 0, sizeof (result));
1400 ilen = dtemplate[i].inlen;
1401 ret = crypto_comp_decompress(tfm, dtemplate[i].input,
1402 ilen, result, &dlen);
1403 if (ret) {
1404 printk(KERN_ERR "alg: comp: decompression failed "
1405 "on test %d for %s: ret=%d\n", i + 1, algo,
1406 -ret);
1407 goto out;
1410 if (dlen != dtemplate[i].outlen) {
1411 printk(KERN_ERR "alg: comp: Decompression test %d "
1412 "failed for %s: output len = %d\n", i + 1, algo,
1413 dlen);
1414 ret = -EINVAL;
1415 goto out;
1418 if (memcmp(result, dtemplate[i].output, dlen)) {
1419 printk(KERN_ERR "alg: comp: Decompression test %d "
1420 "failed for %s\n", i + 1, algo);
1421 hexdump(result, dlen);
1422 ret = -EINVAL;
1423 goto out;
1427 ret = 0;
1429 out:
1430 return ret;
1433 static int test_cprng(struct crypto_rng *tfm, struct cprng_testvec *template,
1434 unsigned int tcount)
1436 const char *algo = crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm));
1437 int err = 0, i, j, seedsize;
1438 u8 *seed;
1439 char result[32];
1441 seedsize = crypto_rng_seedsize(tfm);
1443 seed = kmalloc(seedsize, GFP_KERNEL);
1444 if (!seed) {
1445 printk(KERN_ERR "alg: cprng: Failed to allocate seed space "
1446 "for %s\n", algo);
1447 return -ENOMEM;
1450 for (i = 0; i < tcount; i++) {
1451 memset(result, 0, 32);
1453 memcpy(seed, template[i].v, template[i].vlen);
1454 memcpy(seed + template[i].vlen, template[i].key,
1455 template[i].klen);
1456 memcpy(seed + template[i].vlen + template[i].klen,
1457 template[i].dt, template[i].dtlen);
1459 err = crypto_rng_reset(tfm, seed, seedsize);
1460 if (err) {
1461 printk(KERN_ERR "alg: cprng: Failed to reset rng "
1462 "for %s\n", algo);
1463 goto out;
1466 for (j = 0; j < template[i].loops; j++) {
1467 err = crypto_rng_get_bytes(tfm, result,
1468 template[i].rlen);
1469 if (err < 0) {
1470 printk(KERN_ERR "alg: cprng: Failed to obtain "
1471 "the correct amount of random data for "
1472 "%s (requested %d)\n", algo,
1473 template[i].rlen);
1474 goto out;
1478 err = memcmp(result, template[i].result,
1479 template[i].rlen);
1480 if (err) {
1481 printk(KERN_ERR "alg: cprng: Test %d failed for %s\n",
1482 i, algo);
1483 hexdump(result, template[i].rlen);
1484 err = -EINVAL;
1485 goto out;
1489 out:
1490 kfree(seed);
1491 return err;
1494 static int alg_test_aead(const struct alg_test_desc *desc, const char *driver,
1495 u32 type, u32 mask)
1497 struct crypto_aead *tfm;
1498 int err = 0;
1500 tfm = crypto_alloc_aead(driver, type | CRYPTO_ALG_INTERNAL, mask);
1501 if (IS_ERR(tfm)) {
1502 printk(KERN_ERR "alg: aead: Failed to load transform for %s: "
1503 "%ld\n", driver, PTR_ERR(tfm));
1504 return PTR_ERR(tfm);
1507 if (desc->suite.aead.enc.vecs) {
1508 err = test_aead(tfm, ENCRYPT, desc->suite.aead.enc.vecs,
1509 desc->suite.aead.enc.count);
1510 if (err)
1511 goto out;
1514 if (!err && desc->suite.aead.dec.vecs)
1515 err = test_aead(tfm, DECRYPT, desc->suite.aead.dec.vecs,
1516 desc->suite.aead.dec.count);
1518 out:
1519 crypto_free_aead(tfm);
1520 return err;
1523 static int alg_test_cipher(const struct alg_test_desc *desc,
1524 const char *driver, u32 type, u32 mask)
1526 struct crypto_cipher *tfm;
1527 int err = 0;
1529 tfm = crypto_alloc_cipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1530 if (IS_ERR(tfm)) {
1531 printk(KERN_ERR "alg: cipher: Failed to load transform for "
1532 "%s: %ld\n", driver, PTR_ERR(tfm));
1533 return PTR_ERR(tfm);
1536 if (desc->suite.cipher.enc.vecs) {
1537 err = test_cipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1538 desc->suite.cipher.enc.count);
1539 if (err)
1540 goto out;
1543 if (desc->suite.cipher.dec.vecs)
1544 err = test_cipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1545 desc->suite.cipher.dec.count);
1547 out:
1548 crypto_free_cipher(tfm);
1549 return err;
1552 static int alg_test_skcipher(const struct alg_test_desc *desc,
1553 const char *driver, u32 type, u32 mask)
1555 struct crypto_skcipher *tfm;
1556 int err = 0;
1558 tfm = crypto_alloc_skcipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
1559 if (IS_ERR(tfm)) {
1560 printk(KERN_ERR "alg: skcipher: Failed to load transform for "
1561 "%s: %ld\n", driver, PTR_ERR(tfm));
1562 return PTR_ERR(tfm);
1565 if (desc->suite.cipher.enc.vecs) {
1566 err = test_skcipher(tfm, ENCRYPT, desc->suite.cipher.enc.vecs,
1567 desc->suite.cipher.enc.count);
1568 if (err)
1569 goto out;
1572 if (desc->suite.cipher.dec.vecs)
1573 err = test_skcipher(tfm, DECRYPT, desc->suite.cipher.dec.vecs,
1574 desc->suite.cipher.dec.count);
1576 out:
1577 crypto_free_skcipher(tfm);
1578 return err;
1581 static int alg_test_comp(const struct alg_test_desc *desc, const char *driver,
1582 u32 type, u32 mask)
1584 struct crypto_comp *tfm;
1585 int err;
1587 tfm = crypto_alloc_comp(driver, type, mask);
1588 if (IS_ERR(tfm)) {
1589 printk(KERN_ERR "alg: comp: Failed to load transform for %s: "
1590 "%ld\n", driver, PTR_ERR(tfm));
1591 return PTR_ERR(tfm);
1594 err = test_comp(tfm, desc->suite.comp.comp.vecs,
1595 desc->suite.comp.decomp.vecs,
1596 desc->suite.comp.comp.count,
1597 desc->suite.comp.decomp.count);
1599 crypto_free_comp(tfm);
1600 return err;
1603 static int alg_test_hash(const struct alg_test_desc *desc, const char *driver,
1604 u32 type, u32 mask)
1606 struct crypto_ahash *tfm;
1607 int err;
1609 tfm = crypto_alloc_ahash(driver, type | CRYPTO_ALG_INTERNAL, mask);
1610 if (IS_ERR(tfm)) {
1611 printk(KERN_ERR "alg: hash: Failed to load transform for %s: "
1612 "%ld\n", driver, PTR_ERR(tfm));
1613 return PTR_ERR(tfm);
1616 err = test_hash(tfm, desc->suite.hash.vecs,
1617 desc->suite.hash.count, true);
1618 if (!err)
1619 err = test_hash(tfm, desc->suite.hash.vecs,
1620 desc->suite.hash.count, false);
1622 crypto_free_ahash(tfm);
1623 return err;
1626 static int alg_test_crc32c(const struct alg_test_desc *desc,
1627 const char *driver, u32 type, u32 mask)
1629 struct crypto_shash *tfm;
1630 u32 val;
1631 int err;
1633 err = alg_test_hash(desc, driver, type, mask);
1634 if (err)
1635 goto out;
1637 tfm = crypto_alloc_shash(driver, type | CRYPTO_ALG_INTERNAL, mask);
1638 if (IS_ERR(tfm)) {
1639 printk(KERN_ERR "alg: crc32c: Failed to load transform for %s: "
1640 "%ld\n", driver, PTR_ERR(tfm));
1641 err = PTR_ERR(tfm);
1642 goto out;
1645 do {
1646 SHASH_DESC_ON_STACK(shash, tfm);
1647 u32 *ctx = (u32 *)shash_desc_ctx(shash);
1649 shash->tfm = tfm;
1650 shash->flags = 0;
1652 *ctx = le32_to_cpu(420553207);
1653 err = crypto_shash_final(shash, (u8 *)&val);
1654 if (err) {
1655 printk(KERN_ERR "alg: crc32c: Operation failed for "
1656 "%s: %d\n", driver, err);
1657 break;
1660 if (val != ~420553207) {
1661 printk(KERN_ERR "alg: crc32c: Test failed for %s: "
1662 "%d\n", driver, val);
1663 err = -EINVAL;
1665 } while (0);
1667 crypto_free_shash(tfm);
1669 out:
1670 return err;
1673 static int alg_test_cprng(const struct alg_test_desc *desc, const char *driver,
1674 u32 type, u32 mask)
1676 struct crypto_rng *rng;
1677 int err;
1679 rng = crypto_alloc_rng(driver, type | CRYPTO_ALG_INTERNAL, mask);
1680 if (IS_ERR(rng)) {
1681 printk(KERN_ERR "alg: cprng: Failed to load transform for %s: "
1682 "%ld\n", driver, PTR_ERR(rng));
1683 return PTR_ERR(rng);
1686 err = test_cprng(rng, desc->suite.cprng.vecs, desc->suite.cprng.count);
1688 crypto_free_rng(rng);
1690 return err;
1694 static int drbg_cavs_test(struct drbg_testvec *test, int pr,
1695 const char *driver, u32 type, u32 mask)
1697 int ret = -EAGAIN;
1698 struct crypto_rng *drng;
1699 struct drbg_test_data test_data;
1700 struct drbg_string addtl, pers, testentropy;
1701 unsigned char *buf = kzalloc(test->expectedlen, GFP_KERNEL);
1703 if (!buf)
1704 return -ENOMEM;
1706 drng = crypto_alloc_rng(driver, type | CRYPTO_ALG_INTERNAL, mask);
1707 if (IS_ERR(drng)) {
1708 printk(KERN_ERR "alg: drbg: could not allocate DRNG handle for "
1709 "%s\n", driver);
1710 kzfree(buf);
1711 return -ENOMEM;
1714 test_data.testentropy = &testentropy;
1715 drbg_string_fill(&testentropy, test->entropy, test->entropylen);
1716 drbg_string_fill(&pers, test->pers, test->perslen);
1717 ret = crypto_drbg_reset_test(drng, &pers, &test_data);
1718 if (ret) {
1719 printk(KERN_ERR "alg: drbg: Failed to reset rng\n");
1720 goto outbuf;
1723 drbg_string_fill(&addtl, test->addtla, test->addtllen);
1724 if (pr) {
1725 drbg_string_fill(&testentropy, test->entpra, test->entprlen);
1726 ret = crypto_drbg_get_bytes_addtl_test(drng,
1727 buf, test->expectedlen, &addtl, &test_data);
1728 } else {
1729 ret = crypto_drbg_get_bytes_addtl(drng,
1730 buf, test->expectedlen, &addtl);
1732 if (ret < 0) {
1733 printk(KERN_ERR "alg: drbg: could not obtain random data for "
1734 "driver %s\n", driver);
1735 goto outbuf;
1738 drbg_string_fill(&addtl, test->addtlb, test->addtllen);
1739 if (pr) {
1740 drbg_string_fill(&testentropy, test->entprb, test->entprlen);
1741 ret = crypto_drbg_get_bytes_addtl_test(drng,
1742 buf, test->expectedlen, &addtl, &test_data);
1743 } else {
1744 ret = crypto_drbg_get_bytes_addtl(drng,
1745 buf, test->expectedlen, &addtl);
1747 if (ret < 0) {
1748 printk(KERN_ERR "alg: drbg: could not obtain random data for "
1749 "driver %s\n", driver);
1750 goto outbuf;
1753 ret = memcmp(test->expected, buf, test->expectedlen);
1755 outbuf:
1756 crypto_free_rng(drng);
1757 kzfree(buf);
1758 return ret;
1762 static int alg_test_drbg(const struct alg_test_desc *desc, const char *driver,
1763 u32 type, u32 mask)
1765 int err = 0;
1766 int pr = 0;
1767 int i = 0;
1768 struct drbg_testvec *template = desc->suite.drbg.vecs;
1769 unsigned int tcount = desc->suite.drbg.count;
1771 if (0 == memcmp(driver, "drbg_pr_", 8))
1772 pr = 1;
1774 for (i = 0; i < tcount; i++) {
1775 err = drbg_cavs_test(&template[i], pr, driver, type, mask);
1776 if (err) {
1777 printk(KERN_ERR "alg: drbg: Test %d failed for %s\n",
1778 i, driver);
1779 err = -EINVAL;
1780 break;
1783 return err;
1787 static int do_test_kpp(struct crypto_kpp *tfm, struct kpp_testvec *vec,
1788 const char *alg)
1790 struct kpp_request *req;
1791 void *input_buf = NULL;
1792 void *output_buf = NULL;
1793 struct tcrypt_result result;
1794 unsigned int out_len_max;
1795 int err = -ENOMEM;
1796 struct scatterlist src, dst;
1798 req = kpp_request_alloc(tfm, GFP_KERNEL);
1799 if (!req)
1800 return err;
1802 init_completion(&result.completion);
1804 err = crypto_kpp_set_secret(tfm, vec->secret, vec->secret_size);
1805 if (err < 0)
1806 goto free_req;
1808 out_len_max = crypto_kpp_maxsize(tfm);
1809 output_buf = kzalloc(out_len_max, GFP_KERNEL);
1810 if (!output_buf) {
1811 err = -ENOMEM;
1812 goto free_req;
1815 /* Use appropriate parameter as base */
1816 kpp_request_set_input(req, NULL, 0);
1817 sg_init_one(&dst, output_buf, out_len_max);
1818 kpp_request_set_output(req, &dst, out_len_max);
1819 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1820 tcrypt_complete, &result);
1822 /* Compute public key */
1823 err = wait_async_op(&result, crypto_kpp_generate_public_key(req));
1824 if (err) {
1825 pr_err("alg: %s: generate public key test failed. err %d\n",
1826 alg, err);
1827 goto free_output;
1829 /* Verify calculated public key */
1830 if (memcmp(vec->expected_a_public, sg_virt(req->dst),
1831 vec->expected_a_public_size)) {
1832 pr_err("alg: %s: generate public key test failed. Invalid output\n",
1833 alg);
1834 err = -EINVAL;
1835 goto free_output;
1838 /* Calculate shared secret key by using counter part (b) public key. */
1839 input_buf = kzalloc(vec->b_public_size, GFP_KERNEL);
1840 if (!input_buf) {
1841 err = -ENOMEM;
1842 goto free_output;
1845 memcpy(input_buf, vec->b_public, vec->b_public_size);
1846 sg_init_one(&src, input_buf, vec->b_public_size);
1847 sg_init_one(&dst, output_buf, out_len_max);
1848 kpp_request_set_input(req, &src, vec->b_public_size);
1849 kpp_request_set_output(req, &dst, out_len_max);
1850 kpp_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1851 tcrypt_complete, &result);
1852 err = wait_async_op(&result, crypto_kpp_compute_shared_secret(req));
1853 if (err) {
1854 pr_err("alg: %s: compute shard secret test failed. err %d\n",
1855 alg, err);
1856 goto free_all;
1859 * verify shared secret from which the user will derive
1860 * secret key by executing whatever hash it has chosen
1862 if (memcmp(vec->expected_ss, sg_virt(req->dst),
1863 vec->expected_ss_size)) {
1864 pr_err("alg: %s: compute shared secret test failed. Invalid output\n",
1865 alg);
1866 err = -EINVAL;
1869 free_all:
1870 kfree(input_buf);
1871 free_output:
1872 kfree(output_buf);
1873 free_req:
1874 kpp_request_free(req);
1875 return err;
1878 static int test_kpp(struct crypto_kpp *tfm, const char *alg,
1879 struct kpp_testvec *vecs, unsigned int tcount)
1881 int ret, i;
1883 for (i = 0; i < tcount; i++) {
1884 ret = do_test_kpp(tfm, vecs++, alg);
1885 if (ret) {
1886 pr_err("alg: %s: test failed on vector %d, err=%d\n",
1887 alg, i + 1, ret);
1888 return ret;
1891 return 0;
1894 static int alg_test_kpp(const struct alg_test_desc *desc, const char *driver,
1895 u32 type, u32 mask)
1897 struct crypto_kpp *tfm;
1898 int err = 0;
1900 tfm = crypto_alloc_kpp(driver, type | CRYPTO_ALG_INTERNAL, mask);
1901 if (IS_ERR(tfm)) {
1902 pr_err("alg: kpp: Failed to load tfm for %s: %ld\n",
1903 driver, PTR_ERR(tfm));
1904 return PTR_ERR(tfm);
1906 if (desc->suite.kpp.vecs)
1907 err = test_kpp(tfm, desc->alg, desc->suite.kpp.vecs,
1908 desc->suite.kpp.count);
1910 crypto_free_kpp(tfm);
1911 return err;
1914 static int test_akcipher_one(struct crypto_akcipher *tfm,
1915 struct akcipher_testvec *vecs)
1917 char *xbuf[XBUFSIZE];
1918 struct akcipher_request *req;
1919 void *outbuf_enc = NULL;
1920 void *outbuf_dec = NULL;
1921 struct tcrypt_result result;
1922 unsigned int out_len_max, out_len = 0;
1923 int err = -ENOMEM;
1924 struct scatterlist src, dst, src_tab[2];
1926 if (testmgr_alloc_buf(xbuf))
1927 return err;
1929 req = akcipher_request_alloc(tfm, GFP_KERNEL);
1930 if (!req)
1931 goto free_xbuf;
1933 init_completion(&result.completion);
1935 if (vecs->public_key_vec)
1936 err = crypto_akcipher_set_pub_key(tfm, vecs->key,
1937 vecs->key_len);
1938 else
1939 err = crypto_akcipher_set_priv_key(tfm, vecs->key,
1940 vecs->key_len);
1941 if (err)
1942 goto free_req;
1944 err = -ENOMEM;
1945 out_len_max = crypto_akcipher_maxsize(tfm);
1946 outbuf_enc = kzalloc(out_len_max, GFP_KERNEL);
1947 if (!outbuf_enc)
1948 goto free_req;
1950 if (WARN_ON(vecs->m_size > PAGE_SIZE))
1951 goto free_all;
1953 memcpy(xbuf[0], vecs->m, vecs->m_size);
1955 sg_init_table(src_tab, 2);
1956 sg_set_buf(&src_tab[0], xbuf[0], 8);
1957 sg_set_buf(&src_tab[1], xbuf[0] + 8, vecs->m_size - 8);
1958 sg_init_one(&dst, outbuf_enc, out_len_max);
1959 akcipher_request_set_crypt(req, src_tab, &dst, vecs->m_size,
1960 out_len_max);
1961 akcipher_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
1962 tcrypt_complete, &result);
1964 /* Run RSA encrypt - c = m^e mod n;*/
1965 err = wait_async_op(&result, crypto_akcipher_encrypt(req));
1966 if (err) {
1967 pr_err("alg: akcipher: encrypt test failed. err %d\n", err);
1968 goto free_all;
1970 if (req->dst_len != vecs->c_size) {
1971 pr_err("alg: akcipher: encrypt test failed. Invalid output len\n");
1972 err = -EINVAL;
1973 goto free_all;
1975 /* verify that encrypted message is equal to expected */
1976 if (memcmp(vecs->c, outbuf_enc, vecs->c_size)) {
1977 pr_err("alg: akcipher: encrypt test failed. Invalid output\n");
1978 hexdump(outbuf_enc, vecs->c_size);
1979 err = -EINVAL;
1980 goto free_all;
1982 /* Don't invoke decrypt for vectors with public key */
1983 if (vecs->public_key_vec) {
1984 err = 0;
1985 goto free_all;
1987 outbuf_dec = kzalloc(out_len_max, GFP_KERNEL);
1988 if (!outbuf_dec) {
1989 err = -ENOMEM;
1990 goto free_all;
1993 if (WARN_ON(vecs->c_size > PAGE_SIZE))
1994 goto free_all;
1996 memcpy(xbuf[0], vecs->c, vecs->c_size);
1998 sg_init_one(&src, xbuf[0], vecs->c_size);
1999 sg_init_one(&dst, outbuf_dec, out_len_max);
2000 init_completion(&result.completion);
2001 akcipher_request_set_crypt(req, &src, &dst, vecs->c_size, out_len_max);
2003 /* Run RSA decrypt - m = c^d mod n;*/
2004 err = wait_async_op(&result, crypto_akcipher_decrypt(req));
2005 if (err) {
2006 pr_err("alg: akcipher: decrypt test failed. err %d\n", err);
2007 goto free_all;
2009 out_len = req->dst_len;
2010 if (out_len < vecs->m_size) {
2011 pr_err("alg: akcipher: decrypt test failed. "
2012 "Invalid output len %u\n", out_len);
2013 err = -EINVAL;
2014 goto free_all;
2016 /* verify that decrypted message is equal to the original msg */
2017 if (memchr_inv(outbuf_dec, 0, out_len - vecs->m_size) ||
2018 memcmp(vecs->m, outbuf_dec + out_len - vecs->m_size,
2019 vecs->m_size)) {
2020 pr_err("alg: akcipher: decrypt test failed. Invalid output\n");
2021 hexdump(outbuf_dec, out_len);
2022 err = -EINVAL;
2024 free_all:
2025 kfree(outbuf_dec);
2026 kfree(outbuf_enc);
2027 free_req:
2028 akcipher_request_free(req);
2029 free_xbuf:
2030 testmgr_free_buf(xbuf);
2031 return err;
2034 static int test_akcipher(struct crypto_akcipher *tfm, const char *alg,
2035 struct akcipher_testvec *vecs, unsigned int tcount)
2037 const char *algo =
2038 crypto_tfm_alg_driver_name(crypto_akcipher_tfm(tfm));
2039 int ret, i;
2041 for (i = 0; i < tcount; i++) {
2042 ret = test_akcipher_one(tfm, vecs++);
2043 if (!ret)
2044 continue;
2046 pr_err("alg: akcipher: test %d failed for %s, err=%d\n",
2047 i + 1, algo, ret);
2048 return ret;
2050 return 0;
2053 static int alg_test_akcipher(const struct alg_test_desc *desc,
2054 const char *driver, u32 type, u32 mask)
2056 struct crypto_akcipher *tfm;
2057 int err = 0;
2059 tfm = crypto_alloc_akcipher(driver, type | CRYPTO_ALG_INTERNAL, mask);
2060 if (IS_ERR(tfm)) {
2061 pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n",
2062 driver, PTR_ERR(tfm));
2063 return PTR_ERR(tfm);
2065 if (desc->suite.akcipher.vecs)
2066 err = test_akcipher(tfm, desc->alg, desc->suite.akcipher.vecs,
2067 desc->suite.akcipher.count);
2069 crypto_free_akcipher(tfm);
2070 return err;
2073 static int alg_test_null(const struct alg_test_desc *desc,
2074 const char *driver, u32 type, u32 mask)
2076 return 0;
2079 /* Please keep this list sorted by algorithm name. */
2080 static const struct alg_test_desc alg_test_descs[] = {
2082 .alg = "__cbc-cast5-avx",
2083 .test = alg_test_null,
2084 }, {
2085 .alg = "__cbc-cast6-avx",
2086 .test = alg_test_null,
2087 }, {
2088 .alg = "__cbc-serpent-avx",
2089 .test = alg_test_null,
2090 }, {
2091 .alg = "__cbc-serpent-avx2",
2092 .test = alg_test_null,
2093 }, {
2094 .alg = "__cbc-serpent-sse2",
2095 .test = alg_test_null,
2096 }, {
2097 .alg = "__cbc-twofish-avx",
2098 .test = alg_test_null,
2099 }, {
2100 .alg = "__driver-cbc-aes-aesni",
2101 .test = alg_test_null,
2102 .fips_allowed = 1,
2103 }, {
2104 .alg = "__driver-cbc-camellia-aesni",
2105 .test = alg_test_null,
2106 }, {
2107 .alg = "__driver-cbc-camellia-aesni-avx2",
2108 .test = alg_test_null,
2109 }, {
2110 .alg = "__driver-cbc-cast5-avx",
2111 .test = alg_test_null,
2112 }, {
2113 .alg = "__driver-cbc-cast6-avx",
2114 .test = alg_test_null,
2115 }, {
2116 .alg = "__driver-cbc-serpent-avx",
2117 .test = alg_test_null,
2118 }, {
2119 .alg = "__driver-cbc-serpent-avx2",
2120 .test = alg_test_null,
2121 }, {
2122 .alg = "__driver-cbc-serpent-sse2",
2123 .test = alg_test_null,
2124 }, {
2125 .alg = "__driver-cbc-twofish-avx",
2126 .test = alg_test_null,
2127 }, {
2128 .alg = "__driver-ecb-aes-aesni",
2129 .test = alg_test_null,
2130 .fips_allowed = 1,
2131 }, {
2132 .alg = "__driver-ecb-camellia-aesni",
2133 .test = alg_test_null,
2134 }, {
2135 .alg = "__driver-ecb-camellia-aesni-avx2",
2136 .test = alg_test_null,
2137 }, {
2138 .alg = "__driver-ecb-cast5-avx",
2139 .test = alg_test_null,
2140 }, {
2141 .alg = "__driver-ecb-cast6-avx",
2142 .test = alg_test_null,
2143 }, {
2144 .alg = "__driver-ecb-serpent-avx",
2145 .test = alg_test_null,
2146 }, {
2147 .alg = "__driver-ecb-serpent-avx2",
2148 .test = alg_test_null,
2149 }, {
2150 .alg = "__driver-ecb-serpent-sse2",
2151 .test = alg_test_null,
2152 }, {
2153 .alg = "__driver-ecb-twofish-avx",
2154 .test = alg_test_null,
2155 }, {
2156 .alg = "__driver-gcm-aes-aesni",
2157 .test = alg_test_null,
2158 .fips_allowed = 1,
2159 }, {
2160 .alg = "__ghash-pclmulqdqni",
2161 .test = alg_test_null,
2162 .fips_allowed = 1,
2163 }, {
2164 .alg = "ansi_cprng",
2165 .test = alg_test_cprng,
2166 .suite = {
2167 .cprng = {
2168 .vecs = ansi_cprng_aes_tv_template,
2169 .count = ANSI_CPRNG_AES_TEST_VECTORS
2172 }, {
2173 .alg = "authenc(hmac(md5),ecb(cipher_null))",
2174 .test = alg_test_aead,
2175 .suite = {
2176 .aead = {
2177 .enc = {
2178 .vecs = hmac_md5_ecb_cipher_null_enc_tv_template,
2179 .count = HMAC_MD5_ECB_CIPHER_NULL_ENC_TEST_VECTORS
2181 .dec = {
2182 .vecs = hmac_md5_ecb_cipher_null_dec_tv_template,
2183 .count = HMAC_MD5_ECB_CIPHER_NULL_DEC_TEST_VECTORS
2187 }, {
2188 .alg = "authenc(hmac(sha1),cbc(aes))",
2189 .test = alg_test_aead,
2190 .suite = {
2191 .aead = {
2192 .enc = {
2193 .vecs =
2194 hmac_sha1_aes_cbc_enc_tv_temp,
2195 .count =
2196 HMAC_SHA1_AES_CBC_ENC_TEST_VEC
2200 }, {
2201 .alg = "authenc(hmac(sha1),cbc(des))",
2202 .test = alg_test_aead,
2203 .suite = {
2204 .aead = {
2205 .enc = {
2206 .vecs =
2207 hmac_sha1_des_cbc_enc_tv_temp,
2208 .count =
2209 HMAC_SHA1_DES_CBC_ENC_TEST_VEC
2213 }, {
2214 .alg = "authenc(hmac(sha1),cbc(des3_ede))",
2215 .test = alg_test_aead,
2216 .fips_allowed = 1,
2217 .suite = {
2218 .aead = {
2219 .enc = {
2220 .vecs =
2221 hmac_sha1_des3_ede_cbc_enc_tv_temp,
2222 .count =
2223 HMAC_SHA1_DES3_EDE_CBC_ENC_TEST_VEC
2227 }, {
2228 .alg = "authenc(hmac(sha1),ctr(aes))",
2229 .test = alg_test_null,
2230 .fips_allowed = 1,
2231 }, {
2232 .alg = "authenc(hmac(sha1),ecb(cipher_null))",
2233 .test = alg_test_aead,
2234 .suite = {
2235 .aead = {
2236 .enc = {
2237 .vecs =
2238 hmac_sha1_ecb_cipher_null_enc_tv_temp,
2239 .count =
2240 HMAC_SHA1_ECB_CIPHER_NULL_ENC_TEST_VEC
2242 .dec = {
2243 .vecs =
2244 hmac_sha1_ecb_cipher_null_dec_tv_temp,
2245 .count =
2246 HMAC_SHA1_ECB_CIPHER_NULL_DEC_TEST_VEC
2250 }, {
2251 .alg = "authenc(hmac(sha1),rfc3686(ctr(aes)))",
2252 .test = alg_test_null,
2253 .fips_allowed = 1,
2254 }, {
2255 .alg = "authenc(hmac(sha224),cbc(des))",
2256 .test = alg_test_aead,
2257 .suite = {
2258 .aead = {
2259 .enc = {
2260 .vecs =
2261 hmac_sha224_des_cbc_enc_tv_temp,
2262 .count =
2263 HMAC_SHA224_DES_CBC_ENC_TEST_VEC
2267 }, {
2268 .alg = "authenc(hmac(sha224),cbc(des3_ede))",
2269 .test = alg_test_aead,
2270 .fips_allowed = 1,
2271 .suite = {
2272 .aead = {
2273 .enc = {
2274 .vecs =
2275 hmac_sha224_des3_ede_cbc_enc_tv_temp,
2276 .count =
2277 HMAC_SHA224_DES3_EDE_CBC_ENC_TEST_VEC
2281 }, {
2282 .alg = "authenc(hmac(sha256),cbc(aes))",
2283 .test = alg_test_aead,
2284 .fips_allowed = 1,
2285 .suite = {
2286 .aead = {
2287 .enc = {
2288 .vecs =
2289 hmac_sha256_aes_cbc_enc_tv_temp,
2290 .count =
2291 HMAC_SHA256_AES_CBC_ENC_TEST_VEC
2295 }, {
2296 .alg = "authenc(hmac(sha256),cbc(des))",
2297 .test = alg_test_aead,
2298 .suite = {
2299 .aead = {
2300 .enc = {
2301 .vecs =
2302 hmac_sha256_des_cbc_enc_tv_temp,
2303 .count =
2304 HMAC_SHA256_DES_CBC_ENC_TEST_VEC
2308 }, {
2309 .alg = "authenc(hmac(sha256),cbc(des3_ede))",
2310 .test = alg_test_aead,
2311 .fips_allowed = 1,
2312 .suite = {
2313 .aead = {
2314 .enc = {
2315 .vecs =
2316 hmac_sha256_des3_ede_cbc_enc_tv_temp,
2317 .count =
2318 HMAC_SHA256_DES3_EDE_CBC_ENC_TEST_VEC
2322 }, {
2323 .alg = "authenc(hmac(sha256),ctr(aes))",
2324 .test = alg_test_null,
2325 .fips_allowed = 1,
2326 }, {
2327 .alg = "authenc(hmac(sha256),rfc3686(ctr(aes)))",
2328 .test = alg_test_null,
2329 .fips_allowed = 1,
2330 }, {
2331 .alg = "authenc(hmac(sha384),cbc(des))",
2332 .test = alg_test_aead,
2333 .suite = {
2334 .aead = {
2335 .enc = {
2336 .vecs =
2337 hmac_sha384_des_cbc_enc_tv_temp,
2338 .count =
2339 HMAC_SHA384_DES_CBC_ENC_TEST_VEC
2343 }, {
2344 .alg = "authenc(hmac(sha384),cbc(des3_ede))",
2345 .test = alg_test_aead,
2346 .fips_allowed = 1,
2347 .suite = {
2348 .aead = {
2349 .enc = {
2350 .vecs =
2351 hmac_sha384_des3_ede_cbc_enc_tv_temp,
2352 .count =
2353 HMAC_SHA384_DES3_EDE_CBC_ENC_TEST_VEC
2357 }, {
2358 .alg = "authenc(hmac(sha384),ctr(aes))",
2359 .test = alg_test_null,
2360 .fips_allowed = 1,
2361 }, {
2362 .alg = "authenc(hmac(sha384),rfc3686(ctr(aes)))",
2363 .test = alg_test_null,
2364 .fips_allowed = 1,
2365 }, {
2366 .alg = "authenc(hmac(sha512),cbc(aes))",
2367 .fips_allowed = 1,
2368 .test = alg_test_aead,
2369 .suite = {
2370 .aead = {
2371 .enc = {
2372 .vecs =
2373 hmac_sha512_aes_cbc_enc_tv_temp,
2374 .count =
2375 HMAC_SHA512_AES_CBC_ENC_TEST_VEC
2379 }, {
2380 .alg = "authenc(hmac(sha512),cbc(des))",
2381 .test = alg_test_aead,
2382 .suite = {
2383 .aead = {
2384 .enc = {
2385 .vecs =
2386 hmac_sha512_des_cbc_enc_tv_temp,
2387 .count =
2388 HMAC_SHA512_DES_CBC_ENC_TEST_VEC
2392 }, {
2393 .alg = "authenc(hmac(sha512),cbc(des3_ede))",
2394 .test = alg_test_aead,
2395 .fips_allowed = 1,
2396 .suite = {
2397 .aead = {
2398 .enc = {
2399 .vecs =
2400 hmac_sha512_des3_ede_cbc_enc_tv_temp,
2401 .count =
2402 HMAC_SHA512_DES3_EDE_CBC_ENC_TEST_VEC
2406 }, {
2407 .alg = "authenc(hmac(sha512),ctr(aes))",
2408 .test = alg_test_null,
2409 .fips_allowed = 1,
2410 }, {
2411 .alg = "authenc(hmac(sha512),rfc3686(ctr(aes)))",
2412 .test = alg_test_null,
2413 .fips_allowed = 1,
2414 }, {
2415 .alg = "cbc(aes)",
2416 .test = alg_test_skcipher,
2417 .fips_allowed = 1,
2418 .suite = {
2419 .cipher = {
2420 .enc = {
2421 .vecs = aes_cbc_enc_tv_template,
2422 .count = AES_CBC_ENC_TEST_VECTORS
2424 .dec = {
2425 .vecs = aes_cbc_dec_tv_template,
2426 .count = AES_CBC_DEC_TEST_VECTORS
2430 }, {
2431 .alg = "cbc(anubis)",
2432 .test = alg_test_skcipher,
2433 .suite = {
2434 .cipher = {
2435 .enc = {
2436 .vecs = anubis_cbc_enc_tv_template,
2437 .count = ANUBIS_CBC_ENC_TEST_VECTORS
2439 .dec = {
2440 .vecs = anubis_cbc_dec_tv_template,
2441 .count = ANUBIS_CBC_DEC_TEST_VECTORS
2445 }, {
2446 .alg = "cbc(blowfish)",
2447 .test = alg_test_skcipher,
2448 .suite = {
2449 .cipher = {
2450 .enc = {
2451 .vecs = bf_cbc_enc_tv_template,
2452 .count = BF_CBC_ENC_TEST_VECTORS
2454 .dec = {
2455 .vecs = bf_cbc_dec_tv_template,
2456 .count = BF_CBC_DEC_TEST_VECTORS
2460 }, {
2461 .alg = "cbc(camellia)",
2462 .test = alg_test_skcipher,
2463 .suite = {
2464 .cipher = {
2465 .enc = {
2466 .vecs = camellia_cbc_enc_tv_template,
2467 .count = CAMELLIA_CBC_ENC_TEST_VECTORS
2469 .dec = {
2470 .vecs = camellia_cbc_dec_tv_template,
2471 .count = CAMELLIA_CBC_DEC_TEST_VECTORS
2475 }, {
2476 .alg = "cbc(cast5)",
2477 .test = alg_test_skcipher,
2478 .suite = {
2479 .cipher = {
2480 .enc = {
2481 .vecs = cast5_cbc_enc_tv_template,
2482 .count = CAST5_CBC_ENC_TEST_VECTORS
2484 .dec = {
2485 .vecs = cast5_cbc_dec_tv_template,
2486 .count = CAST5_CBC_DEC_TEST_VECTORS
2490 }, {
2491 .alg = "cbc(cast6)",
2492 .test = alg_test_skcipher,
2493 .suite = {
2494 .cipher = {
2495 .enc = {
2496 .vecs = cast6_cbc_enc_tv_template,
2497 .count = CAST6_CBC_ENC_TEST_VECTORS
2499 .dec = {
2500 .vecs = cast6_cbc_dec_tv_template,
2501 .count = CAST6_CBC_DEC_TEST_VECTORS
2505 }, {
2506 .alg = "cbc(des)",
2507 .test = alg_test_skcipher,
2508 .suite = {
2509 .cipher = {
2510 .enc = {
2511 .vecs = des_cbc_enc_tv_template,
2512 .count = DES_CBC_ENC_TEST_VECTORS
2514 .dec = {
2515 .vecs = des_cbc_dec_tv_template,
2516 .count = DES_CBC_DEC_TEST_VECTORS
2520 }, {
2521 .alg = "cbc(des3_ede)",
2522 .test = alg_test_skcipher,
2523 .fips_allowed = 1,
2524 .suite = {
2525 .cipher = {
2526 .enc = {
2527 .vecs = des3_ede_cbc_enc_tv_template,
2528 .count = DES3_EDE_CBC_ENC_TEST_VECTORS
2530 .dec = {
2531 .vecs = des3_ede_cbc_dec_tv_template,
2532 .count = DES3_EDE_CBC_DEC_TEST_VECTORS
2536 }, {
2537 .alg = "cbc(serpent)",
2538 .test = alg_test_skcipher,
2539 .suite = {
2540 .cipher = {
2541 .enc = {
2542 .vecs = serpent_cbc_enc_tv_template,
2543 .count = SERPENT_CBC_ENC_TEST_VECTORS
2545 .dec = {
2546 .vecs = serpent_cbc_dec_tv_template,
2547 .count = SERPENT_CBC_DEC_TEST_VECTORS
2551 }, {
2552 .alg = "cbc(twofish)",
2553 .test = alg_test_skcipher,
2554 .suite = {
2555 .cipher = {
2556 .enc = {
2557 .vecs = tf_cbc_enc_tv_template,
2558 .count = TF_CBC_ENC_TEST_VECTORS
2560 .dec = {
2561 .vecs = tf_cbc_dec_tv_template,
2562 .count = TF_CBC_DEC_TEST_VECTORS
2566 }, {
2567 .alg = "ccm(aes)",
2568 .test = alg_test_aead,
2569 .fips_allowed = 1,
2570 .suite = {
2571 .aead = {
2572 .enc = {
2573 .vecs = aes_ccm_enc_tv_template,
2574 .count = AES_CCM_ENC_TEST_VECTORS
2576 .dec = {
2577 .vecs = aes_ccm_dec_tv_template,
2578 .count = AES_CCM_DEC_TEST_VECTORS
2582 }, {
2583 .alg = "chacha20",
2584 .test = alg_test_skcipher,
2585 .suite = {
2586 .cipher = {
2587 .enc = {
2588 .vecs = chacha20_enc_tv_template,
2589 .count = CHACHA20_ENC_TEST_VECTORS
2591 .dec = {
2592 .vecs = chacha20_enc_tv_template,
2593 .count = CHACHA20_ENC_TEST_VECTORS
2597 }, {
2598 .alg = "cmac(aes)",
2599 .fips_allowed = 1,
2600 .test = alg_test_hash,
2601 .suite = {
2602 .hash = {
2603 .vecs = aes_cmac128_tv_template,
2604 .count = CMAC_AES_TEST_VECTORS
2607 }, {
2608 .alg = "cmac(des3_ede)",
2609 .fips_allowed = 1,
2610 .test = alg_test_hash,
2611 .suite = {
2612 .hash = {
2613 .vecs = des3_ede_cmac64_tv_template,
2614 .count = CMAC_DES3_EDE_TEST_VECTORS
2617 }, {
2618 .alg = "compress_null",
2619 .test = alg_test_null,
2620 }, {
2621 .alg = "crc32",
2622 .test = alg_test_hash,
2623 .suite = {
2624 .hash = {
2625 .vecs = crc32_tv_template,
2626 .count = CRC32_TEST_VECTORS
2629 }, {
2630 .alg = "crc32c",
2631 .test = alg_test_crc32c,
2632 .fips_allowed = 1,
2633 .suite = {
2634 .hash = {
2635 .vecs = crc32c_tv_template,
2636 .count = CRC32C_TEST_VECTORS
2639 }, {
2640 .alg = "crct10dif",
2641 .test = alg_test_hash,
2642 .fips_allowed = 1,
2643 .suite = {
2644 .hash = {
2645 .vecs = crct10dif_tv_template,
2646 .count = CRCT10DIF_TEST_VECTORS
2649 }, {
2650 .alg = "cryptd(__driver-cbc-aes-aesni)",
2651 .test = alg_test_null,
2652 .fips_allowed = 1,
2653 }, {
2654 .alg = "cryptd(__driver-cbc-camellia-aesni)",
2655 .test = alg_test_null,
2656 }, {
2657 .alg = "cryptd(__driver-cbc-camellia-aesni-avx2)",
2658 .test = alg_test_null,
2659 }, {
2660 .alg = "cryptd(__driver-cbc-serpent-avx2)",
2661 .test = alg_test_null,
2662 }, {
2663 .alg = "cryptd(__driver-ecb-aes-aesni)",
2664 .test = alg_test_null,
2665 .fips_allowed = 1,
2666 }, {
2667 .alg = "cryptd(__driver-ecb-camellia-aesni)",
2668 .test = alg_test_null,
2669 }, {
2670 .alg = "cryptd(__driver-ecb-camellia-aesni-avx2)",
2671 .test = alg_test_null,
2672 }, {
2673 .alg = "cryptd(__driver-ecb-cast5-avx)",
2674 .test = alg_test_null,
2675 }, {
2676 .alg = "cryptd(__driver-ecb-cast6-avx)",
2677 .test = alg_test_null,
2678 }, {
2679 .alg = "cryptd(__driver-ecb-serpent-avx)",
2680 .test = alg_test_null,
2681 }, {
2682 .alg = "cryptd(__driver-ecb-serpent-avx2)",
2683 .test = alg_test_null,
2684 }, {
2685 .alg = "cryptd(__driver-ecb-serpent-sse2)",
2686 .test = alg_test_null,
2687 }, {
2688 .alg = "cryptd(__driver-ecb-twofish-avx)",
2689 .test = alg_test_null,
2690 }, {
2691 .alg = "cryptd(__driver-gcm-aes-aesni)",
2692 .test = alg_test_null,
2693 .fips_allowed = 1,
2694 }, {
2695 .alg = "cryptd(__ghash-pclmulqdqni)",
2696 .test = alg_test_null,
2697 .fips_allowed = 1,
2698 }, {
2699 .alg = "ctr(aes)",
2700 .test = alg_test_skcipher,
2701 .fips_allowed = 1,
2702 .suite = {
2703 .cipher = {
2704 .enc = {
2705 .vecs = aes_ctr_enc_tv_template,
2706 .count = AES_CTR_ENC_TEST_VECTORS
2708 .dec = {
2709 .vecs = aes_ctr_dec_tv_template,
2710 .count = AES_CTR_DEC_TEST_VECTORS
2714 }, {
2715 .alg = "ctr(blowfish)",
2716 .test = alg_test_skcipher,
2717 .suite = {
2718 .cipher = {
2719 .enc = {
2720 .vecs = bf_ctr_enc_tv_template,
2721 .count = BF_CTR_ENC_TEST_VECTORS
2723 .dec = {
2724 .vecs = bf_ctr_dec_tv_template,
2725 .count = BF_CTR_DEC_TEST_VECTORS
2729 }, {
2730 .alg = "ctr(camellia)",
2731 .test = alg_test_skcipher,
2732 .suite = {
2733 .cipher = {
2734 .enc = {
2735 .vecs = camellia_ctr_enc_tv_template,
2736 .count = CAMELLIA_CTR_ENC_TEST_VECTORS
2738 .dec = {
2739 .vecs = camellia_ctr_dec_tv_template,
2740 .count = CAMELLIA_CTR_DEC_TEST_VECTORS
2744 }, {
2745 .alg = "ctr(cast5)",
2746 .test = alg_test_skcipher,
2747 .suite = {
2748 .cipher = {
2749 .enc = {
2750 .vecs = cast5_ctr_enc_tv_template,
2751 .count = CAST5_CTR_ENC_TEST_VECTORS
2753 .dec = {
2754 .vecs = cast5_ctr_dec_tv_template,
2755 .count = CAST5_CTR_DEC_TEST_VECTORS
2759 }, {
2760 .alg = "ctr(cast6)",
2761 .test = alg_test_skcipher,
2762 .suite = {
2763 .cipher = {
2764 .enc = {
2765 .vecs = cast6_ctr_enc_tv_template,
2766 .count = CAST6_CTR_ENC_TEST_VECTORS
2768 .dec = {
2769 .vecs = cast6_ctr_dec_tv_template,
2770 .count = CAST6_CTR_DEC_TEST_VECTORS
2774 }, {
2775 .alg = "ctr(des)",
2776 .test = alg_test_skcipher,
2777 .suite = {
2778 .cipher = {
2779 .enc = {
2780 .vecs = des_ctr_enc_tv_template,
2781 .count = DES_CTR_ENC_TEST_VECTORS
2783 .dec = {
2784 .vecs = des_ctr_dec_tv_template,
2785 .count = DES_CTR_DEC_TEST_VECTORS
2789 }, {
2790 .alg = "ctr(des3_ede)",
2791 .test = alg_test_skcipher,
2792 .suite = {
2793 .cipher = {
2794 .enc = {
2795 .vecs = des3_ede_ctr_enc_tv_template,
2796 .count = DES3_EDE_CTR_ENC_TEST_VECTORS
2798 .dec = {
2799 .vecs = des3_ede_ctr_dec_tv_template,
2800 .count = DES3_EDE_CTR_DEC_TEST_VECTORS
2804 }, {
2805 .alg = "ctr(serpent)",
2806 .test = alg_test_skcipher,
2807 .suite = {
2808 .cipher = {
2809 .enc = {
2810 .vecs = serpent_ctr_enc_tv_template,
2811 .count = SERPENT_CTR_ENC_TEST_VECTORS
2813 .dec = {
2814 .vecs = serpent_ctr_dec_tv_template,
2815 .count = SERPENT_CTR_DEC_TEST_VECTORS
2819 }, {
2820 .alg = "ctr(twofish)",
2821 .test = alg_test_skcipher,
2822 .suite = {
2823 .cipher = {
2824 .enc = {
2825 .vecs = tf_ctr_enc_tv_template,
2826 .count = TF_CTR_ENC_TEST_VECTORS
2828 .dec = {
2829 .vecs = tf_ctr_dec_tv_template,
2830 .count = TF_CTR_DEC_TEST_VECTORS
2834 }, {
2835 .alg = "cts(cbc(aes))",
2836 .test = alg_test_skcipher,
2837 .suite = {
2838 .cipher = {
2839 .enc = {
2840 .vecs = cts_mode_enc_tv_template,
2841 .count = CTS_MODE_ENC_TEST_VECTORS
2843 .dec = {
2844 .vecs = cts_mode_dec_tv_template,
2845 .count = CTS_MODE_DEC_TEST_VECTORS
2849 }, {
2850 .alg = "deflate",
2851 .test = alg_test_comp,
2852 .fips_allowed = 1,
2853 .suite = {
2854 .comp = {
2855 .comp = {
2856 .vecs = deflate_comp_tv_template,
2857 .count = DEFLATE_COMP_TEST_VECTORS
2859 .decomp = {
2860 .vecs = deflate_decomp_tv_template,
2861 .count = DEFLATE_DECOMP_TEST_VECTORS
2865 }, {
2866 .alg = "dh",
2867 .test = alg_test_kpp,
2868 .fips_allowed = 1,
2869 .suite = {
2870 .kpp = {
2871 .vecs = dh_tv_template,
2872 .count = DH_TEST_VECTORS
2875 }, {
2876 .alg = "digest_null",
2877 .test = alg_test_null,
2878 }, {
2879 .alg = "drbg_nopr_ctr_aes128",
2880 .test = alg_test_drbg,
2881 .fips_allowed = 1,
2882 .suite = {
2883 .drbg = {
2884 .vecs = drbg_nopr_ctr_aes128_tv_template,
2885 .count = ARRAY_SIZE(drbg_nopr_ctr_aes128_tv_template)
2888 }, {
2889 .alg = "drbg_nopr_ctr_aes192",
2890 .test = alg_test_drbg,
2891 .fips_allowed = 1,
2892 .suite = {
2893 .drbg = {
2894 .vecs = drbg_nopr_ctr_aes192_tv_template,
2895 .count = ARRAY_SIZE(drbg_nopr_ctr_aes192_tv_template)
2898 }, {
2899 .alg = "drbg_nopr_ctr_aes256",
2900 .test = alg_test_drbg,
2901 .fips_allowed = 1,
2902 .suite = {
2903 .drbg = {
2904 .vecs = drbg_nopr_ctr_aes256_tv_template,
2905 .count = ARRAY_SIZE(drbg_nopr_ctr_aes256_tv_template)
2908 }, {
2910 * There is no need to specifically test the DRBG with every
2911 * backend cipher -- covered by drbg_nopr_hmac_sha256 test
2913 .alg = "drbg_nopr_hmac_sha1",
2914 .fips_allowed = 1,
2915 .test = alg_test_null,
2916 }, {
2917 .alg = "drbg_nopr_hmac_sha256",
2918 .test = alg_test_drbg,
2919 .fips_allowed = 1,
2920 .suite = {
2921 .drbg = {
2922 .vecs = drbg_nopr_hmac_sha256_tv_template,
2923 .count =
2924 ARRAY_SIZE(drbg_nopr_hmac_sha256_tv_template)
2927 }, {
2928 /* covered by drbg_nopr_hmac_sha256 test */
2929 .alg = "drbg_nopr_hmac_sha384",
2930 .fips_allowed = 1,
2931 .test = alg_test_null,
2932 }, {
2933 .alg = "drbg_nopr_hmac_sha512",
2934 .test = alg_test_null,
2935 .fips_allowed = 1,
2936 }, {
2937 .alg = "drbg_nopr_sha1",
2938 .fips_allowed = 1,
2939 .test = alg_test_null,
2940 }, {
2941 .alg = "drbg_nopr_sha256",
2942 .test = alg_test_drbg,
2943 .fips_allowed = 1,
2944 .suite = {
2945 .drbg = {
2946 .vecs = drbg_nopr_sha256_tv_template,
2947 .count = ARRAY_SIZE(drbg_nopr_sha256_tv_template)
2950 }, {
2951 /* covered by drbg_nopr_sha256 test */
2952 .alg = "drbg_nopr_sha384",
2953 .fips_allowed = 1,
2954 .test = alg_test_null,
2955 }, {
2956 .alg = "drbg_nopr_sha512",
2957 .fips_allowed = 1,
2958 .test = alg_test_null,
2959 }, {
2960 .alg = "drbg_pr_ctr_aes128",
2961 .test = alg_test_drbg,
2962 .fips_allowed = 1,
2963 .suite = {
2964 .drbg = {
2965 .vecs = drbg_pr_ctr_aes128_tv_template,
2966 .count = ARRAY_SIZE(drbg_pr_ctr_aes128_tv_template)
2969 }, {
2970 /* covered by drbg_pr_ctr_aes128 test */
2971 .alg = "drbg_pr_ctr_aes192",
2972 .fips_allowed = 1,
2973 .test = alg_test_null,
2974 }, {
2975 .alg = "drbg_pr_ctr_aes256",
2976 .fips_allowed = 1,
2977 .test = alg_test_null,
2978 }, {
2979 .alg = "drbg_pr_hmac_sha1",
2980 .fips_allowed = 1,
2981 .test = alg_test_null,
2982 }, {
2983 .alg = "drbg_pr_hmac_sha256",
2984 .test = alg_test_drbg,
2985 .fips_allowed = 1,
2986 .suite = {
2987 .drbg = {
2988 .vecs = drbg_pr_hmac_sha256_tv_template,
2989 .count = ARRAY_SIZE(drbg_pr_hmac_sha256_tv_template)
2992 }, {
2993 /* covered by drbg_pr_hmac_sha256 test */
2994 .alg = "drbg_pr_hmac_sha384",
2995 .fips_allowed = 1,
2996 .test = alg_test_null,
2997 }, {
2998 .alg = "drbg_pr_hmac_sha512",
2999 .test = alg_test_null,
3000 .fips_allowed = 1,
3001 }, {
3002 .alg = "drbg_pr_sha1",
3003 .fips_allowed = 1,
3004 .test = alg_test_null,
3005 }, {
3006 .alg = "drbg_pr_sha256",
3007 .test = alg_test_drbg,
3008 .fips_allowed = 1,
3009 .suite = {
3010 .drbg = {
3011 .vecs = drbg_pr_sha256_tv_template,
3012 .count = ARRAY_SIZE(drbg_pr_sha256_tv_template)
3015 }, {
3016 /* covered by drbg_pr_sha256 test */
3017 .alg = "drbg_pr_sha384",
3018 .fips_allowed = 1,
3019 .test = alg_test_null,
3020 }, {
3021 .alg = "drbg_pr_sha512",
3022 .fips_allowed = 1,
3023 .test = alg_test_null,
3024 }, {
3025 .alg = "ecb(__aes-aesni)",
3026 .test = alg_test_null,
3027 .fips_allowed = 1,
3028 }, {
3029 .alg = "ecb(aes)",
3030 .test = alg_test_skcipher,
3031 .fips_allowed = 1,
3032 .suite = {
3033 .cipher = {
3034 .enc = {
3035 .vecs = aes_enc_tv_template,
3036 .count = AES_ENC_TEST_VECTORS
3038 .dec = {
3039 .vecs = aes_dec_tv_template,
3040 .count = AES_DEC_TEST_VECTORS
3044 }, {
3045 .alg = "ecb(anubis)",
3046 .test = alg_test_skcipher,
3047 .suite = {
3048 .cipher = {
3049 .enc = {
3050 .vecs = anubis_enc_tv_template,
3051 .count = ANUBIS_ENC_TEST_VECTORS
3053 .dec = {
3054 .vecs = anubis_dec_tv_template,
3055 .count = ANUBIS_DEC_TEST_VECTORS
3059 }, {
3060 .alg = "ecb(arc4)",
3061 .test = alg_test_skcipher,
3062 .suite = {
3063 .cipher = {
3064 .enc = {
3065 .vecs = arc4_enc_tv_template,
3066 .count = ARC4_ENC_TEST_VECTORS
3068 .dec = {
3069 .vecs = arc4_dec_tv_template,
3070 .count = ARC4_DEC_TEST_VECTORS
3074 }, {
3075 .alg = "ecb(blowfish)",
3076 .test = alg_test_skcipher,
3077 .suite = {
3078 .cipher = {
3079 .enc = {
3080 .vecs = bf_enc_tv_template,
3081 .count = BF_ENC_TEST_VECTORS
3083 .dec = {
3084 .vecs = bf_dec_tv_template,
3085 .count = BF_DEC_TEST_VECTORS
3089 }, {
3090 .alg = "ecb(camellia)",
3091 .test = alg_test_skcipher,
3092 .suite = {
3093 .cipher = {
3094 .enc = {
3095 .vecs = camellia_enc_tv_template,
3096 .count = CAMELLIA_ENC_TEST_VECTORS
3098 .dec = {
3099 .vecs = camellia_dec_tv_template,
3100 .count = CAMELLIA_DEC_TEST_VECTORS
3104 }, {
3105 .alg = "ecb(cast5)",
3106 .test = alg_test_skcipher,
3107 .suite = {
3108 .cipher = {
3109 .enc = {
3110 .vecs = cast5_enc_tv_template,
3111 .count = CAST5_ENC_TEST_VECTORS
3113 .dec = {
3114 .vecs = cast5_dec_tv_template,
3115 .count = CAST5_DEC_TEST_VECTORS
3119 }, {
3120 .alg = "ecb(cast6)",
3121 .test = alg_test_skcipher,
3122 .suite = {
3123 .cipher = {
3124 .enc = {
3125 .vecs = cast6_enc_tv_template,
3126 .count = CAST6_ENC_TEST_VECTORS
3128 .dec = {
3129 .vecs = cast6_dec_tv_template,
3130 .count = CAST6_DEC_TEST_VECTORS
3134 }, {
3135 .alg = "ecb(cipher_null)",
3136 .test = alg_test_null,
3137 }, {
3138 .alg = "ecb(des)",
3139 .test = alg_test_skcipher,
3140 .suite = {
3141 .cipher = {
3142 .enc = {
3143 .vecs = des_enc_tv_template,
3144 .count = DES_ENC_TEST_VECTORS
3146 .dec = {
3147 .vecs = des_dec_tv_template,
3148 .count = DES_DEC_TEST_VECTORS
3152 }, {
3153 .alg = "ecb(des3_ede)",
3154 .test = alg_test_skcipher,
3155 .fips_allowed = 1,
3156 .suite = {
3157 .cipher = {
3158 .enc = {
3159 .vecs = des3_ede_enc_tv_template,
3160 .count = DES3_EDE_ENC_TEST_VECTORS
3162 .dec = {
3163 .vecs = des3_ede_dec_tv_template,
3164 .count = DES3_EDE_DEC_TEST_VECTORS
3168 }, {
3169 .alg = "ecb(fcrypt)",
3170 .test = alg_test_skcipher,
3171 .suite = {
3172 .cipher = {
3173 .enc = {
3174 .vecs = fcrypt_pcbc_enc_tv_template,
3175 .count = 1
3177 .dec = {
3178 .vecs = fcrypt_pcbc_dec_tv_template,
3179 .count = 1
3183 }, {
3184 .alg = "ecb(khazad)",
3185 .test = alg_test_skcipher,
3186 .suite = {
3187 .cipher = {
3188 .enc = {
3189 .vecs = khazad_enc_tv_template,
3190 .count = KHAZAD_ENC_TEST_VECTORS
3192 .dec = {
3193 .vecs = khazad_dec_tv_template,
3194 .count = KHAZAD_DEC_TEST_VECTORS
3198 }, {
3199 .alg = "ecb(seed)",
3200 .test = alg_test_skcipher,
3201 .suite = {
3202 .cipher = {
3203 .enc = {
3204 .vecs = seed_enc_tv_template,
3205 .count = SEED_ENC_TEST_VECTORS
3207 .dec = {
3208 .vecs = seed_dec_tv_template,
3209 .count = SEED_DEC_TEST_VECTORS
3213 }, {
3214 .alg = "ecb(serpent)",
3215 .test = alg_test_skcipher,
3216 .suite = {
3217 .cipher = {
3218 .enc = {
3219 .vecs = serpent_enc_tv_template,
3220 .count = SERPENT_ENC_TEST_VECTORS
3222 .dec = {
3223 .vecs = serpent_dec_tv_template,
3224 .count = SERPENT_DEC_TEST_VECTORS
3228 }, {
3229 .alg = "ecb(tea)",
3230 .test = alg_test_skcipher,
3231 .suite = {
3232 .cipher = {
3233 .enc = {
3234 .vecs = tea_enc_tv_template,
3235 .count = TEA_ENC_TEST_VECTORS
3237 .dec = {
3238 .vecs = tea_dec_tv_template,
3239 .count = TEA_DEC_TEST_VECTORS
3243 }, {
3244 .alg = "ecb(tnepres)",
3245 .test = alg_test_skcipher,
3246 .suite = {
3247 .cipher = {
3248 .enc = {
3249 .vecs = tnepres_enc_tv_template,
3250 .count = TNEPRES_ENC_TEST_VECTORS
3252 .dec = {
3253 .vecs = tnepres_dec_tv_template,
3254 .count = TNEPRES_DEC_TEST_VECTORS
3258 }, {
3259 .alg = "ecb(twofish)",
3260 .test = alg_test_skcipher,
3261 .suite = {
3262 .cipher = {
3263 .enc = {
3264 .vecs = tf_enc_tv_template,
3265 .count = TF_ENC_TEST_VECTORS
3267 .dec = {
3268 .vecs = tf_dec_tv_template,
3269 .count = TF_DEC_TEST_VECTORS
3273 }, {
3274 .alg = "ecb(xeta)",
3275 .test = alg_test_skcipher,
3276 .suite = {
3277 .cipher = {
3278 .enc = {
3279 .vecs = xeta_enc_tv_template,
3280 .count = XETA_ENC_TEST_VECTORS
3282 .dec = {
3283 .vecs = xeta_dec_tv_template,
3284 .count = XETA_DEC_TEST_VECTORS
3288 }, {
3289 .alg = "ecb(xtea)",
3290 .test = alg_test_skcipher,
3291 .suite = {
3292 .cipher = {
3293 .enc = {
3294 .vecs = xtea_enc_tv_template,
3295 .count = XTEA_ENC_TEST_VECTORS
3297 .dec = {
3298 .vecs = xtea_dec_tv_template,
3299 .count = XTEA_DEC_TEST_VECTORS
3303 }, {
3304 .alg = "ecdh",
3305 .test = alg_test_kpp,
3306 .fips_allowed = 1,
3307 .suite = {
3308 .kpp = {
3309 .vecs = ecdh_tv_template,
3310 .count = ECDH_TEST_VECTORS
3313 }, {
3314 .alg = "gcm(aes)",
3315 .test = alg_test_aead,
3316 .fips_allowed = 1,
3317 .suite = {
3318 .aead = {
3319 .enc = {
3320 .vecs = aes_gcm_enc_tv_template,
3321 .count = AES_GCM_ENC_TEST_VECTORS
3323 .dec = {
3324 .vecs = aes_gcm_dec_tv_template,
3325 .count = AES_GCM_DEC_TEST_VECTORS
3329 }, {
3330 .alg = "ghash",
3331 .test = alg_test_hash,
3332 .fips_allowed = 1,
3333 .suite = {
3334 .hash = {
3335 .vecs = ghash_tv_template,
3336 .count = GHASH_TEST_VECTORS
3339 }, {
3340 .alg = "hmac(crc32)",
3341 .test = alg_test_hash,
3342 .suite = {
3343 .hash = {
3344 .vecs = bfin_crc_tv_template,
3345 .count = BFIN_CRC_TEST_VECTORS
3348 }, {
3349 .alg = "hmac(md5)",
3350 .test = alg_test_hash,
3351 .suite = {
3352 .hash = {
3353 .vecs = hmac_md5_tv_template,
3354 .count = HMAC_MD5_TEST_VECTORS
3357 }, {
3358 .alg = "hmac(rmd128)",
3359 .test = alg_test_hash,
3360 .suite = {
3361 .hash = {
3362 .vecs = hmac_rmd128_tv_template,
3363 .count = HMAC_RMD128_TEST_VECTORS
3366 }, {
3367 .alg = "hmac(rmd160)",
3368 .test = alg_test_hash,
3369 .suite = {
3370 .hash = {
3371 .vecs = hmac_rmd160_tv_template,
3372 .count = HMAC_RMD160_TEST_VECTORS
3375 }, {
3376 .alg = "hmac(sha1)",
3377 .test = alg_test_hash,
3378 .fips_allowed = 1,
3379 .suite = {
3380 .hash = {
3381 .vecs = hmac_sha1_tv_template,
3382 .count = HMAC_SHA1_TEST_VECTORS
3385 }, {
3386 .alg = "hmac(sha224)",
3387 .test = alg_test_hash,
3388 .fips_allowed = 1,
3389 .suite = {
3390 .hash = {
3391 .vecs = hmac_sha224_tv_template,
3392 .count = HMAC_SHA224_TEST_VECTORS
3395 }, {
3396 .alg = "hmac(sha256)",
3397 .test = alg_test_hash,
3398 .fips_allowed = 1,
3399 .suite = {
3400 .hash = {
3401 .vecs = hmac_sha256_tv_template,
3402 .count = HMAC_SHA256_TEST_VECTORS
3405 }, {
3406 .alg = "hmac(sha3-224)",
3407 .test = alg_test_hash,
3408 .fips_allowed = 1,
3409 .suite = {
3410 .hash = {
3411 .vecs = hmac_sha3_224_tv_template,
3412 .count = HMAC_SHA3_224_TEST_VECTORS
3415 }, {
3416 .alg = "hmac(sha3-256)",
3417 .test = alg_test_hash,
3418 .fips_allowed = 1,
3419 .suite = {
3420 .hash = {
3421 .vecs = hmac_sha3_256_tv_template,
3422 .count = HMAC_SHA3_256_TEST_VECTORS
3425 }, {
3426 .alg = "hmac(sha3-384)",
3427 .test = alg_test_hash,
3428 .fips_allowed = 1,
3429 .suite = {
3430 .hash = {
3431 .vecs = hmac_sha3_384_tv_template,
3432 .count = HMAC_SHA3_384_TEST_VECTORS
3435 }, {
3436 .alg = "hmac(sha3-512)",
3437 .test = alg_test_hash,
3438 .fips_allowed = 1,
3439 .suite = {
3440 .hash = {
3441 .vecs = hmac_sha3_512_tv_template,
3442 .count = HMAC_SHA3_512_TEST_VECTORS
3445 }, {
3446 .alg = "hmac(sha384)",
3447 .test = alg_test_hash,
3448 .fips_allowed = 1,
3449 .suite = {
3450 .hash = {
3451 .vecs = hmac_sha384_tv_template,
3452 .count = HMAC_SHA384_TEST_VECTORS
3455 }, {
3456 .alg = "hmac(sha512)",
3457 .test = alg_test_hash,
3458 .fips_allowed = 1,
3459 .suite = {
3460 .hash = {
3461 .vecs = hmac_sha512_tv_template,
3462 .count = HMAC_SHA512_TEST_VECTORS
3465 }, {
3466 .alg = "jitterentropy_rng",
3467 .fips_allowed = 1,
3468 .test = alg_test_null,
3469 }, {
3470 .alg = "kw(aes)",
3471 .test = alg_test_skcipher,
3472 .fips_allowed = 1,
3473 .suite = {
3474 .cipher = {
3475 .enc = {
3476 .vecs = aes_kw_enc_tv_template,
3477 .count = ARRAY_SIZE(aes_kw_enc_tv_template)
3479 .dec = {
3480 .vecs = aes_kw_dec_tv_template,
3481 .count = ARRAY_SIZE(aes_kw_dec_tv_template)
3485 }, {
3486 .alg = "lrw(aes)",
3487 .test = alg_test_skcipher,
3488 .suite = {
3489 .cipher = {
3490 .enc = {
3491 .vecs = aes_lrw_enc_tv_template,
3492 .count = AES_LRW_ENC_TEST_VECTORS
3494 .dec = {
3495 .vecs = aes_lrw_dec_tv_template,
3496 .count = AES_LRW_DEC_TEST_VECTORS
3500 }, {
3501 .alg = "lrw(camellia)",
3502 .test = alg_test_skcipher,
3503 .suite = {
3504 .cipher = {
3505 .enc = {
3506 .vecs = camellia_lrw_enc_tv_template,
3507 .count = CAMELLIA_LRW_ENC_TEST_VECTORS
3509 .dec = {
3510 .vecs = camellia_lrw_dec_tv_template,
3511 .count = CAMELLIA_LRW_DEC_TEST_VECTORS
3515 }, {
3516 .alg = "lrw(cast6)",
3517 .test = alg_test_skcipher,
3518 .suite = {
3519 .cipher = {
3520 .enc = {
3521 .vecs = cast6_lrw_enc_tv_template,
3522 .count = CAST6_LRW_ENC_TEST_VECTORS
3524 .dec = {
3525 .vecs = cast6_lrw_dec_tv_template,
3526 .count = CAST6_LRW_DEC_TEST_VECTORS
3530 }, {
3531 .alg = "lrw(serpent)",
3532 .test = alg_test_skcipher,
3533 .suite = {
3534 .cipher = {
3535 .enc = {
3536 .vecs = serpent_lrw_enc_tv_template,
3537 .count = SERPENT_LRW_ENC_TEST_VECTORS
3539 .dec = {
3540 .vecs = serpent_lrw_dec_tv_template,
3541 .count = SERPENT_LRW_DEC_TEST_VECTORS
3545 }, {
3546 .alg = "lrw(twofish)",
3547 .test = alg_test_skcipher,
3548 .suite = {
3549 .cipher = {
3550 .enc = {
3551 .vecs = tf_lrw_enc_tv_template,
3552 .count = TF_LRW_ENC_TEST_VECTORS
3554 .dec = {
3555 .vecs = tf_lrw_dec_tv_template,
3556 .count = TF_LRW_DEC_TEST_VECTORS
3560 }, {
3561 .alg = "lz4",
3562 .test = alg_test_comp,
3563 .fips_allowed = 1,
3564 .suite = {
3565 .comp = {
3566 .comp = {
3567 .vecs = lz4_comp_tv_template,
3568 .count = LZ4_COMP_TEST_VECTORS
3570 .decomp = {
3571 .vecs = lz4_decomp_tv_template,
3572 .count = LZ4_DECOMP_TEST_VECTORS
3576 }, {
3577 .alg = "lz4hc",
3578 .test = alg_test_comp,
3579 .fips_allowed = 1,
3580 .suite = {
3581 .comp = {
3582 .comp = {
3583 .vecs = lz4hc_comp_tv_template,
3584 .count = LZ4HC_COMP_TEST_VECTORS
3586 .decomp = {
3587 .vecs = lz4hc_decomp_tv_template,
3588 .count = LZ4HC_DECOMP_TEST_VECTORS
3592 }, {
3593 .alg = "lzo",
3594 .test = alg_test_comp,
3595 .fips_allowed = 1,
3596 .suite = {
3597 .comp = {
3598 .comp = {
3599 .vecs = lzo_comp_tv_template,
3600 .count = LZO_COMP_TEST_VECTORS
3602 .decomp = {
3603 .vecs = lzo_decomp_tv_template,
3604 .count = LZO_DECOMP_TEST_VECTORS
3608 }, {
3609 .alg = "md4",
3610 .test = alg_test_hash,
3611 .suite = {
3612 .hash = {
3613 .vecs = md4_tv_template,
3614 .count = MD4_TEST_VECTORS
3617 }, {
3618 .alg = "md5",
3619 .test = alg_test_hash,
3620 .suite = {
3621 .hash = {
3622 .vecs = md5_tv_template,
3623 .count = MD5_TEST_VECTORS
3626 }, {
3627 .alg = "michael_mic",
3628 .test = alg_test_hash,
3629 .suite = {
3630 .hash = {
3631 .vecs = michael_mic_tv_template,
3632 .count = MICHAEL_MIC_TEST_VECTORS
3635 }, {
3636 .alg = "ofb(aes)",
3637 .test = alg_test_skcipher,
3638 .fips_allowed = 1,
3639 .suite = {
3640 .cipher = {
3641 .enc = {
3642 .vecs = aes_ofb_enc_tv_template,
3643 .count = AES_OFB_ENC_TEST_VECTORS
3645 .dec = {
3646 .vecs = aes_ofb_dec_tv_template,
3647 .count = AES_OFB_DEC_TEST_VECTORS
3651 }, {
3652 .alg = "pcbc(fcrypt)",
3653 .test = alg_test_skcipher,
3654 .suite = {
3655 .cipher = {
3656 .enc = {
3657 .vecs = fcrypt_pcbc_enc_tv_template,
3658 .count = FCRYPT_ENC_TEST_VECTORS
3660 .dec = {
3661 .vecs = fcrypt_pcbc_dec_tv_template,
3662 .count = FCRYPT_DEC_TEST_VECTORS
3666 }, {
3667 .alg = "poly1305",
3668 .test = alg_test_hash,
3669 .suite = {
3670 .hash = {
3671 .vecs = poly1305_tv_template,
3672 .count = POLY1305_TEST_VECTORS
3675 }, {
3676 .alg = "rfc3686(ctr(aes))",
3677 .test = alg_test_skcipher,
3678 .fips_allowed = 1,
3679 .suite = {
3680 .cipher = {
3681 .enc = {
3682 .vecs = aes_ctr_rfc3686_enc_tv_template,
3683 .count = AES_CTR_3686_ENC_TEST_VECTORS
3685 .dec = {
3686 .vecs = aes_ctr_rfc3686_dec_tv_template,
3687 .count = AES_CTR_3686_DEC_TEST_VECTORS
3691 }, {
3692 .alg = "rfc4106(gcm(aes))",
3693 .test = alg_test_aead,
3694 .fips_allowed = 1,
3695 .suite = {
3696 .aead = {
3697 .enc = {
3698 .vecs = aes_gcm_rfc4106_enc_tv_template,
3699 .count = AES_GCM_4106_ENC_TEST_VECTORS
3701 .dec = {
3702 .vecs = aes_gcm_rfc4106_dec_tv_template,
3703 .count = AES_GCM_4106_DEC_TEST_VECTORS
3707 }, {
3708 .alg = "rfc4309(ccm(aes))",
3709 .test = alg_test_aead,
3710 .fips_allowed = 1,
3711 .suite = {
3712 .aead = {
3713 .enc = {
3714 .vecs = aes_ccm_rfc4309_enc_tv_template,
3715 .count = AES_CCM_4309_ENC_TEST_VECTORS
3717 .dec = {
3718 .vecs = aes_ccm_rfc4309_dec_tv_template,
3719 .count = AES_CCM_4309_DEC_TEST_VECTORS
3723 }, {
3724 .alg = "rfc4543(gcm(aes))",
3725 .test = alg_test_aead,
3726 .suite = {
3727 .aead = {
3728 .enc = {
3729 .vecs = aes_gcm_rfc4543_enc_tv_template,
3730 .count = AES_GCM_4543_ENC_TEST_VECTORS
3732 .dec = {
3733 .vecs = aes_gcm_rfc4543_dec_tv_template,
3734 .count = AES_GCM_4543_DEC_TEST_VECTORS
3738 }, {
3739 .alg = "rfc7539(chacha20,poly1305)",
3740 .test = alg_test_aead,
3741 .suite = {
3742 .aead = {
3743 .enc = {
3744 .vecs = rfc7539_enc_tv_template,
3745 .count = RFC7539_ENC_TEST_VECTORS
3747 .dec = {
3748 .vecs = rfc7539_dec_tv_template,
3749 .count = RFC7539_DEC_TEST_VECTORS
3753 }, {
3754 .alg = "rfc7539esp(chacha20,poly1305)",
3755 .test = alg_test_aead,
3756 .suite = {
3757 .aead = {
3758 .enc = {
3759 .vecs = rfc7539esp_enc_tv_template,
3760 .count = RFC7539ESP_ENC_TEST_VECTORS
3762 .dec = {
3763 .vecs = rfc7539esp_dec_tv_template,
3764 .count = RFC7539ESP_DEC_TEST_VECTORS
3768 }, {
3769 .alg = "rmd128",
3770 .test = alg_test_hash,
3771 .suite = {
3772 .hash = {
3773 .vecs = rmd128_tv_template,
3774 .count = RMD128_TEST_VECTORS
3777 }, {
3778 .alg = "rmd160",
3779 .test = alg_test_hash,
3780 .suite = {
3781 .hash = {
3782 .vecs = rmd160_tv_template,
3783 .count = RMD160_TEST_VECTORS
3786 }, {
3787 .alg = "rmd256",
3788 .test = alg_test_hash,
3789 .suite = {
3790 .hash = {
3791 .vecs = rmd256_tv_template,
3792 .count = RMD256_TEST_VECTORS
3795 }, {
3796 .alg = "rmd320",
3797 .test = alg_test_hash,
3798 .suite = {
3799 .hash = {
3800 .vecs = rmd320_tv_template,
3801 .count = RMD320_TEST_VECTORS
3804 }, {
3805 .alg = "rsa",
3806 .test = alg_test_akcipher,
3807 .fips_allowed = 1,
3808 .suite = {
3809 .akcipher = {
3810 .vecs = rsa_tv_template,
3811 .count = RSA_TEST_VECTORS
3814 }, {
3815 .alg = "salsa20",
3816 .test = alg_test_skcipher,
3817 .suite = {
3818 .cipher = {
3819 .enc = {
3820 .vecs = salsa20_stream_enc_tv_template,
3821 .count = SALSA20_STREAM_ENC_TEST_VECTORS
3825 }, {
3826 .alg = "sha1",
3827 .test = alg_test_hash,
3828 .fips_allowed = 1,
3829 .suite = {
3830 .hash = {
3831 .vecs = sha1_tv_template,
3832 .count = SHA1_TEST_VECTORS
3835 }, {
3836 .alg = "sha224",
3837 .test = alg_test_hash,
3838 .fips_allowed = 1,
3839 .suite = {
3840 .hash = {
3841 .vecs = sha224_tv_template,
3842 .count = SHA224_TEST_VECTORS
3845 }, {
3846 .alg = "sha256",
3847 .test = alg_test_hash,
3848 .fips_allowed = 1,
3849 .suite = {
3850 .hash = {
3851 .vecs = sha256_tv_template,
3852 .count = SHA256_TEST_VECTORS
3855 }, {
3856 .alg = "sha3-224",
3857 .test = alg_test_hash,
3858 .fips_allowed = 1,
3859 .suite = {
3860 .hash = {
3861 .vecs = sha3_224_tv_template,
3862 .count = SHA3_224_TEST_VECTORS
3865 }, {
3866 .alg = "sha3-256",
3867 .test = alg_test_hash,
3868 .fips_allowed = 1,
3869 .suite = {
3870 .hash = {
3871 .vecs = sha3_256_tv_template,
3872 .count = SHA3_256_TEST_VECTORS
3875 }, {
3876 .alg = "sha3-384",
3877 .test = alg_test_hash,
3878 .fips_allowed = 1,
3879 .suite = {
3880 .hash = {
3881 .vecs = sha3_384_tv_template,
3882 .count = SHA3_384_TEST_VECTORS
3885 }, {
3886 .alg = "sha3-512",
3887 .test = alg_test_hash,
3888 .fips_allowed = 1,
3889 .suite = {
3890 .hash = {
3891 .vecs = sha3_512_tv_template,
3892 .count = SHA3_512_TEST_VECTORS
3895 }, {
3896 .alg = "sha384",
3897 .test = alg_test_hash,
3898 .fips_allowed = 1,
3899 .suite = {
3900 .hash = {
3901 .vecs = sha384_tv_template,
3902 .count = SHA384_TEST_VECTORS
3905 }, {
3906 .alg = "sha512",
3907 .test = alg_test_hash,
3908 .fips_allowed = 1,
3909 .suite = {
3910 .hash = {
3911 .vecs = sha512_tv_template,
3912 .count = SHA512_TEST_VECTORS
3915 }, {
3916 .alg = "tgr128",
3917 .test = alg_test_hash,
3918 .suite = {
3919 .hash = {
3920 .vecs = tgr128_tv_template,
3921 .count = TGR128_TEST_VECTORS
3924 }, {
3925 .alg = "tgr160",
3926 .test = alg_test_hash,
3927 .suite = {
3928 .hash = {
3929 .vecs = tgr160_tv_template,
3930 .count = TGR160_TEST_VECTORS
3933 }, {
3934 .alg = "tgr192",
3935 .test = alg_test_hash,
3936 .suite = {
3937 .hash = {
3938 .vecs = tgr192_tv_template,
3939 .count = TGR192_TEST_VECTORS
3942 }, {
3943 .alg = "vmac(aes)",
3944 .test = alg_test_hash,
3945 .suite = {
3946 .hash = {
3947 .vecs = aes_vmac128_tv_template,
3948 .count = VMAC_AES_TEST_VECTORS
3951 }, {
3952 .alg = "wp256",
3953 .test = alg_test_hash,
3954 .suite = {
3955 .hash = {
3956 .vecs = wp256_tv_template,
3957 .count = WP256_TEST_VECTORS
3960 }, {
3961 .alg = "wp384",
3962 .test = alg_test_hash,
3963 .suite = {
3964 .hash = {
3965 .vecs = wp384_tv_template,
3966 .count = WP384_TEST_VECTORS
3969 }, {
3970 .alg = "wp512",
3971 .test = alg_test_hash,
3972 .suite = {
3973 .hash = {
3974 .vecs = wp512_tv_template,
3975 .count = WP512_TEST_VECTORS
3978 }, {
3979 .alg = "xcbc(aes)",
3980 .test = alg_test_hash,
3981 .suite = {
3982 .hash = {
3983 .vecs = aes_xcbc128_tv_template,
3984 .count = XCBC_AES_TEST_VECTORS
3987 }, {
3988 .alg = "xts(aes)",
3989 .test = alg_test_skcipher,
3990 .fips_allowed = 1,
3991 .suite = {
3992 .cipher = {
3993 .enc = {
3994 .vecs = aes_xts_enc_tv_template,
3995 .count = AES_XTS_ENC_TEST_VECTORS
3997 .dec = {
3998 .vecs = aes_xts_dec_tv_template,
3999 .count = AES_XTS_DEC_TEST_VECTORS
4003 }, {
4004 .alg = "xts(camellia)",
4005 .test = alg_test_skcipher,
4006 .suite = {
4007 .cipher = {
4008 .enc = {
4009 .vecs = camellia_xts_enc_tv_template,
4010 .count = CAMELLIA_XTS_ENC_TEST_VECTORS
4012 .dec = {
4013 .vecs = camellia_xts_dec_tv_template,
4014 .count = CAMELLIA_XTS_DEC_TEST_VECTORS
4018 }, {
4019 .alg = "xts(cast6)",
4020 .test = alg_test_skcipher,
4021 .suite = {
4022 .cipher = {
4023 .enc = {
4024 .vecs = cast6_xts_enc_tv_template,
4025 .count = CAST6_XTS_ENC_TEST_VECTORS
4027 .dec = {
4028 .vecs = cast6_xts_dec_tv_template,
4029 .count = CAST6_XTS_DEC_TEST_VECTORS
4033 }, {
4034 .alg = "xts(serpent)",
4035 .test = alg_test_skcipher,
4036 .suite = {
4037 .cipher = {
4038 .enc = {
4039 .vecs = serpent_xts_enc_tv_template,
4040 .count = SERPENT_XTS_ENC_TEST_VECTORS
4042 .dec = {
4043 .vecs = serpent_xts_dec_tv_template,
4044 .count = SERPENT_XTS_DEC_TEST_VECTORS
4048 }, {
4049 .alg = "xts(twofish)",
4050 .test = alg_test_skcipher,
4051 .suite = {
4052 .cipher = {
4053 .enc = {
4054 .vecs = tf_xts_enc_tv_template,
4055 .count = TF_XTS_ENC_TEST_VECTORS
4057 .dec = {
4058 .vecs = tf_xts_dec_tv_template,
4059 .count = TF_XTS_DEC_TEST_VECTORS
4066 static bool alg_test_descs_checked;
4068 static void alg_test_descs_check_order(void)
4070 int i;
4072 /* only check once */
4073 if (alg_test_descs_checked)
4074 return;
4076 alg_test_descs_checked = true;
4078 for (i = 1; i < ARRAY_SIZE(alg_test_descs); i++) {
4079 int diff = strcmp(alg_test_descs[i - 1].alg,
4080 alg_test_descs[i].alg);
4082 if (WARN_ON(diff > 0)) {
4083 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
4084 alg_test_descs[i - 1].alg,
4085 alg_test_descs[i].alg);
4088 if (WARN_ON(diff == 0)) {
4089 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
4090 alg_test_descs[i].alg);
4095 static int alg_find_test(const char *alg)
4097 int start = 0;
4098 int end = ARRAY_SIZE(alg_test_descs);
4100 while (start < end) {
4101 int i = (start + end) / 2;
4102 int diff = strcmp(alg_test_descs[i].alg, alg);
4104 if (diff > 0) {
4105 end = i;
4106 continue;
4109 if (diff < 0) {
4110 start = i + 1;
4111 continue;
4114 return i;
4117 return -1;
4120 int alg_test(const char *driver, const char *alg, u32 type, u32 mask)
4122 int i;
4123 int j;
4124 int rc;
4126 if (!fips_enabled && notests) {
4127 printk_once(KERN_INFO "alg: self-tests disabled\n");
4128 return 0;
4131 alg_test_descs_check_order();
4133 if ((type & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_CIPHER) {
4134 char nalg[CRYPTO_MAX_ALG_NAME];
4136 if (snprintf(nalg, sizeof(nalg), "ecb(%s)", alg) >=
4137 sizeof(nalg))
4138 return -ENAMETOOLONG;
4140 i = alg_find_test(nalg);
4141 if (i < 0)
4142 goto notest;
4144 if (fips_enabled && !alg_test_descs[i].fips_allowed)
4145 goto non_fips_alg;
4147 rc = alg_test_cipher(alg_test_descs + i, driver, type, mask);
4148 goto test_done;
4151 i = alg_find_test(alg);
4152 j = alg_find_test(driver);
4153 if (i < 0 && j < 0)
4154 goto notest;
4156 if (fips_enabled && ((i >= 0 && !alg_test_descs[i].fips_allowed) ||
4157 (j >= 0 && !alg_test_descs[j].fips_allowed)))
4158 goto non_fips_alg;
4160 rc = 0;
4161 if (i >= 0)
4162 rc |= alg_test_descs[i].test(alg_test_descs + i, driver,
4163 type, mask);
4164 if (j >= 0 && j != i)
4165 rc |= alg_test_descs[j].test(alg_test_descs + j, driver,
4166 type, mask);
4168 test_done:
4169 if (fips_enabled && rc)
4170 panic("%s: %s alg self test failed in fips mode!\n", driver, alg);
4172 if (fips_enabled && !rc)
4173 pr_info("alg: self-tests for %s (%s) passed\n", driver, alg);
4175 return rc;
4177 notest:
4178 printk(KERN_INFO "alg: No test for %s (%s)\n", alg, driver);
4179 return 0;
4180 non_fips_alg:
4181 return -EINVAL;
4184 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
4186 EXPORT_SYMBOL_GPL(alg_test);