1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Algorithm testing framework and tests.
5 * Copyright (c) 2002 James Morris <jmorris@intercode.com.au>
6 * Copyright (c) 2002 Jean-Francois Dive <jef@linuxbe.org>
7 * Copyright (c) 2007 Nokia Siemens Networks
8 * Copyright (c) 2008 Herbert Xu <herbert@gondor.apana.org.au>
9 * Copyright (c) 2019 Google LLC
11 * Updated RFC4106 AES-GCM testing.
12 * Authors: Aidan O'Mahony (aidan.o.mahony@intel.com)
13 * Adrian Hoban <adrian.hoban@intel.com>
14 * Gabriele Paoloni <gabriele.paoloni@intel.com>
15 * Tadeusz Struk (tadeusz.struk@intel.com)
16 * Copyright (c) 2010, Intel Corporation.
19 #include <crypto/aead.h>
20 #include <crypto/hash.h>
21 #include <crypto/skcipher.h>
22 #include <linux/err.h>
23 #include <linux/fips.h>
24 #include <linux/module.h>
25 #include <linux/once.h>
26 #include <linux/random.h>
27 #include <linux/scatterlist.h>
28 #include <linux/slab.h>
29 #include <linux/string.h>
30 #include <crypto/rng.h>
31 #include <crypto/drbg.h>
32 #include <crypto/akcipher.h>
33 #include <crypto/kpp.h>
34 #include <crypto/acompress.h>
35 #include <crypto/internal/simd.h>
40 module_param(notests
, bool, 0644);
41 MODULE_PARM_DESC(notests
, "disable crypto self-tests");
43 static bool panic_on_fail
;
44 module_param(panic_on_fail
, bool, 0444);
46 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
47 static bool noextratests
;
48 module_param(noextratests
, bool, 0644);
49 MODULE_PARM_DESC(noextratests
, "disable expensive crypto self-tests");
51 static unsigned int fuzz_iterations
= 100;
52 module_param(fuzz_iterations
, uint
, 0644);
53 MODULE_PARM_DESC(fuzz_iterations
, "number of fuzz test iterations");
55 DEFINE_PER_CPU(bool, crypto_simd_disabled_for_test
);
56 EXPORT_PER_CPU_SYMBOL_GPL(crypto_simd_disabled_for_test
);
59 #ifdef CONFIG_CRYPTO_MANAGER_DISABLE_TESTS
62 int alg_test(const char *driver
, const char *alg
, u32 type
, u32 mask
)
72 * Need slab memory for testing (size in number of pages).
77 * Used by test_cipher()
82 struct aead_test_suite
{
83 const struct aead_testvec
*vecs
;
87 * Set if trying to decrypt an inauthentic ciphertext with this
88 * algorithm might result in EINVAL rather than EBADMSG, due to other
89 * validation the algorithm does on the inputs such as length checks.
91 unsigned int einval_allowed
: 1;
94 * Set if the algorithm intentionally ignores the last 8 bytes of the
95 * AAD buffer during decryption.
97 unsigned int esp_aad
: 1;
100 struct cipher_test_suite
{
101 const struct cipher_testvec
*vecs
;
105 struct comp_test_suite
{
107 const struct comp_testvec
*vecs
;
112 struct hash_test_suite
{
113 const struct hash_testvec
*vecs
;
117 struct cprng_test_suite
{
118 const struct cprng_testvec
*vecs
;
122 struct drbg_test_suite
{
123 const struct drbg_testvec
*vecs
;
127 struct akcipher_test_suite
{
128 const struct akcipher_testvec
*vecs
;
132 struct kpp_test_suite
{
133 const struct kpp_testvec
*vecs
;
137 struct alg_test_desc
{
139 const char *generic_driver
;
140 int (*test
)(const struct alg_test_desc
*desc
, const char *driver
,
142 int fips_allowed
; /* set if alg is allowed in fips mode */
145 struct aead_test_suite aead
;
146 struct cipher_test_suite cipher
;
147 struct comp_test_suite comp
;
148 struct hash_test_suite hash
;
149 struct cprng_test_suite cprng
;
150 struct drbg_test_suite drbg
;
151 struct akcipher_test_suite akcipher
;
152 struct kpp_test_suite kpp
;
156 static void hexdump(unsigned char *buf
, unsigned int len
)
158 print_hex_dump(KERN_CONT
, "", DUMP_PREFIX_OFFSET
,
163 static int __testmgr_alloc_buf(char *buf
[XBUFSIZE
], int order
)
167 for (i
= 0; i
< XBUFSIZE
; i
++) {
168 buf
[i
] = (char *)__get_free_pages(GFP_KERNEL
, order
);
177 free_pages((unsigned long)buf
[i
], order
);
182 static int testmgr_alloc_buf(char *buf
[XBUFSIZE
])
184 return __testmgr_alloc_buf(buf
, 0);
187 static void __testmgr_free_buf(char *buf
[XBUFSIZE
], int order
)
191 for (i
= 0; i
< XBUFSIZE
; i
++)
192 free_pages((unsigned long)buf
[i
], order
);
195 static void testmgr_free_buf(char *buf
[XBUFSIZE
])
197 __testmgr_free_buf(buf
, 0);
200 #define TESTMGR_POISON_BYTE 0xfe
201 #define TESTMGR_POISON_LEN 16
203 static inline void testmgr_poison(void *addr
, size_t len
)
205 memset(addr
, TESTMGR_POISON_BYTE
, len
);
208 /* Is the memory region still fully poisoned? */
209 static inline bool testmgr_is_poison(const void *addr
, size_t len
)
211 return memchr_inv(addr
, TESTMGR_POISON_BYTE
, len
) == NULL
;
214 /* flush type for hash algorithms */
216 /* merge with update of previous buffer(s) */
219 /* update with previous buffer(s) before doing this one */
222 /* likewise, but also export and re-import the intermediate state */
226 /* finalization function for hash algorithms */
227 enum finalization_type
{
228 FINALIZATION_TYPE_FINAL
, /* use final() */
229 FINALIZATION_TYPE_FINUP
, /* use finup() */
230 FINALIZATION_TYPE_DIGEST
, /* use digest() */
233 #define TEST_SG_TOTAL 10000
236 * struct test_sg_division - description of a scatterlist entry
238 * This struct describes one entry of a scatterlist being constructed to check a
239 * crypto test vector.
241 * @proportion_of_total: length of this chunk relative to the total length,
242 * given as a proportion out of TEST_SG_TOTAL so that it
243 * scales to fit any test vector
244 * @offset: byte offset into a 2-page buffer at which this chunk will start
245 * @offset_relative_to_alignmask: if true, add the algorithm's alignmask to the
247 * @flush_type: for hashes, whether an update() should be done now vs.
248 * continuing to accumulate data
249 * @nosimd: if doing the pending update(), do it with SIMD disabled?
251 struct test_sg_division
{
252 unsigned int proportion_of_total
;
254 bool offset_relative_to_alignmask
;
255 enum flush_type flush_type
;
260 * struct testvec_config - configuration for testing a crypto test vector
262 * This struct describes the data layout and other parameters with which each
263 * crypto test vector can be tested.
265 * @name: name of this config, logged for debugging purposes if a test fails
266 * @inplace: operate on the data in-place, if applicable for the algorithm type?
267 * @req_flags: extra request_flags, e.g. CRYPTO_TFM_REQ_MAY_SLEEP
268 * @src_divs: description of how to arrange the source scatterlist
269 * @dst_divs: description of how to arrange the dst scatterlist, if applicable
270 * for the algorithm type. Defaults to @src_divs if unset.
271 * @iv_offset: misalignment of the IV in the range [0..MAX_ALGAPI_ALIGNMASK+1],
272 * where 0 is aligned to a 2*(MAX_ALGAPI_ALIGNMASK+1) byte boundary
273 * @iv_offset_relative_to_alignmask: if true, add the algorithm's alignmask to
275 * @key_offset: misalignment of the key, where 0 is default alignment
276 * @key_offset_relative_to_alignmask: if true, add the algorithm's alignmask to
278 * @finalization_type: what finalization function to use for hashes
279 * @nosimd: execute with SIMD disabled? Requires !CRYPTO_TFM_REQ_MAY_SLEEP.
281 struct testvec_config
{
285 struct test_sg_division src_divs
[XBUFSIZE
];
286 struct test_sg_division dst_divs
[XBUFSIZE
];
287 unsigned int iv_offset
;
288 unsigned int key_offset
;
289 bool iv_offset_relative_to_alignmask
;
290 bool key_offset_relative_to_alignmask
;
291 enum finalization_type finalization_type
;
295 #define TESTVEC_CONFIG_NAMELEN 192
298 * The following are the lists of testvec_configs to test for each algorithm
299 * type when the basic crypto self-tests are enabled, i.e. when
300 * CONFIG_CRYPTO_MANAGER_DISABLE_TESTS is unset. They aim to provide good test
301 * coverage, while keeping the test time much shorter than the full fuzz tests
302 * so that the basic tests can be enabled in a wider range of circumstances.
305 /* Configs for skciphers and aeads */
306 static const struct testvec_config default_cipher_testvec_configs
[] = {
310 .src_divs
= { { .proportion_of_total
= 10000 } },
312 .name
= "out-of-place",
313 .src_divs
= { { .proportion_of_total
= 10000 } },
315 .name
= "unaligned buffer, offset=1",
316 .src_divs
= { { .proportion_of_total
= 10000, .offset
= 1 } },
320 .name
= "buffer aligned only to alignmask",
323 .proportion_of_total
= 10000,
325 .offset_relative_to_alignmask
= true,
329 .iv_offset_relative_to_alignmask
= true,
331 .key_offset_relative_to_alignmask
= true,
333 .name
= "two even aligned splits",
335 { .proportion_of_total
= 5000 },
336 { .proportion_of_total
= 5000 },
339 .name
= "uneven misaligned splits, may sleep",
340 .req_flags
= CRYPTO_TFM_REQ_MAY_SLEEP
,
342 { .proportion_of_total
= 1900, .offset
= 33 },
343 { .proportion_of_total
= 3300, .offset
= 7 },
344 { .proportion_of_total
= 4800, .offset
= 18 },
349 .name
= "misaligned splits crossing pages, inplace",
353 .proportion_of_total
= 7500,
354 .offset
= PAGE_SIZE
- 32
356 .proportion_of_total
= 2500,
357 .offset
= PAGE_SIZE
- 7
363 static const struct testvec_config default_hash_testvec_configs
[] = {
365 .name
= "init+update+final aligned buffer",
366 .src_divs
= { { .proportion_of_total
= 10000 } },
367 .finalization_type
= FINALIZATION_TYPE_FINAL
,
369 .name
= "init+finup aligned buffer",
370 .src_divs
= { { .proportion_of_total
= 10000 } },
371 .finalization_type
= FINALIZATION_TYPE_FINUP
,
373 .name
= "digest aligned buffer",
374 .src_divs
= { { .proportion_of_total
= 10000 } },
375 .finalization_type
= FINALIZATION_TYPE_DIGEST
,
377 .name
= "init+update+final misaligned buffer",
378 .src_divs
= { { .proportion_of_total
= 10000, .offset
= 1 } },
379 .finalization_type
= FINALIZATION_TYPE_FINAL
,
382 .name
= "digest buffer aligned only to alignmask",
385 .proportion_of_total
= 10000,
387 .offset_relative_to_alignmask
= true,
390 .finalization_type
= FINALIZATION_TYPE_DIGEST
,
392 .key_offset_relative_to_alignmask
= true,
394 .name
= "init+update+update+final two even splits",
396 { .proportion_of_total
= 5000 },
398 .proportion_of_total
= 5000,
399 .flush_type
= FLUSH_TYPE_FLUSH
,
402 .finalization_type
= FINALIZATION_TYPE_FINAL
,
404 .name
= "digest uneven misaligned splits, may sleep",
405 .req_flags
= CRYPTO_TFM_REQ_MAY_SLEEP
,
407 { .proportion_of_total
= 1900, .offset
= 33 },
408 { .proportion_of_total
= 3300, .offset
= 7 },
409 { .proportion_of_total
= 4800, .offset
= 18 },
411 .finalization_type
= FINALIZATION_TYPE_DIGEST
,
413 .name
= "digest misaligned splits crossing pages",
416 .proportion_of_total
= 7500,
417 .offset
= PAGE_SIZE
- 32,
419 .proportion_of_total
= 2500,
420 .offset
= PAGE_SIZE
- 7,
423 .finalization_type
= FINALIZATION_TYPE_DIGEST
,
425 .name
= "import/export",
428 .proportion_of_total
= 6500,
429 .flush_type
= FLUSH_TYPE_REIMPORT
,
431 .proportion_of_total
= 3500,
432 .flush_type
= FLUSH_TYPE_REIMPORT
,
435 .finalization_type
= FINALIZATION_TYPE_FINAL
,
439 static unsigned int count_test_sg_divisions(const struct test_sg_division
*divs
)
441 unsigned int remaining
= TEST_SG_TOTAL
;
442 unsigned int ndivs
= 0;
445 remaining
-= divs
[ndivs
++].proportion_of_total
;
451 #define SGDIVS_HAVE_FLUSHES BIT(0)
452 #define SGDIVS_HAVE_NOSIMD BIT(1)
454 static bool valid_sg_divisions(const struct test_sg_division
*divs
,
455 unsigned int count
, int *flags_ret
)
457 unsigned int total
= 0;
460 for (i
= 0; i
< count
&& total
!= TEST_SG_TOTAL
; i
++) {
461 if (divs
[i
].proportion_of_total
<= 0 ||
462 divs
[i
].proportion_of_total
> TEST_SG_TOTAL
- total
)
464 total
+= divs
[i
].proportion_of_total
;
465 if (divs
[i
].flush_type
!= FLUSH_TYPE_NONE
)
466 *flags_ret
|= SGDIVS_HAVE_FLUSHES
;
468 *flags_ret
|= SGDIVS_HAVE_NOSIMD
;
470 return total
== TEST_SG_TOTAL
&&
471 memchr_inv(&divs
[i
], 0, (count
- i
) * sizeof(divs
[0])) == NULL
;
475 * Check whether the given testvec_config is valid. This isn't strictly needed
476 * since every testvec_config should be valid, but check anyway so that people
477 * don't unknowingly add broken configs that don't do what they wanted.
479 static bool valid_testvec_config(const struct testvec_config
*cfg
)
483 if (cfg
->name
== NULL
)
486 if (!valid_sg_divisions(cfg
->src_divs
, ARRAY_SIZE(cfg
->src_divs
),
490 if (cfg
->dst_divs
[0].proportion_of_total
) {
491 if (!valid_sg_divisions(cfg
->dst_divs
,
492 ARRAY_SIZE(cfg
->dst_divs
), &flags
))
495 if (memchr_inv(cfg
->dst_divs
, 0, sizeof(cfg
->dst_divs
)))
497 /* defaults to dst_divs=src_divs */
501 (cfg
->iv_offset_relative_to_alignmask
? MAX_ALGAPI_ALIGNMASK
: 0) >
502 MAX_ALGAPI_ALIGNMASK
+ 1)
505 if ((flags
& (SGDIVS_HAVE_FLUSHES
| SGDIVS_HAVE_NOSIMD
)) &&
506 cfg
->finalization_type
== FINALIZATION_TYPE_DIGEST
)
509 if ((cfg
->nosimd
|| (flags
& SGDIVS_HAVE_NOSIMD
)) &&
510 (cfg
->req_flags
& CRYPTO_TFM_REQ_MAY_SLEEP
))
517 char *bufs
[XBUFSIZE
];
518 struct scatterlist sgl
[XBUFSIZE
];
519 struct scatterlist sgl_saved
[XBUFSIZE
];
520 struct scatterlist
*sgl_ptr
;
524 static int init_test_sglist(struct test_sglist
*tsgl
)
526 return __testmgr_alloc_buf(tsgl
->bufs
, 1 /* two pages per buffer */);
529 static void destroy_test_sglist(struct test_sglist
*tsgl
)
531 return __testmgr_free_buf(tsgl
->bufs
, 1 /* two pages per buffer */);
535 * build_test_sglist() - build a scatterlist for a crypto test
537 * @tsgl: the scatterlist to build. @tsgl->bufs[] contains an array of 2-page
538 * buffers which the scatterlist @tsgl->sgl[] will be made to point into.
539 * @divs: the layout specification on which the scatterlist will be based
540 * @alignmask: the algorithm's alignmask
541 * @total_len: the total length of the scatterlist to build in bytes
542 * @data: if non-NULL, the buffers will be filled with this data until it ends.
543 * Otherwise the buffers will be poisoned. In both cases, some bytes
544 * past the end of each buffer will be poisoned to help detect overruns.
545 * @out_divs: if non-NULL, the test_sg_division to which each scatterlist entry
546 * corresponds will be returned here. This will match @divs except
547 * that divisions resolving to a length of 0 are omitted as they are
548 * not included in the scatterlist.
550 * Return: 0 or a -errno value
552 static int build_test_sglist(struct test_sglist
*tsgl
,
553 const struct test_sg_division
*divs
,
554 const unsigned int alignmask
,
555 const unsigned int total_len
,
556 struct iov_iter
*data
,
557 const struct test_sg_division
*out_divs
[XBUFSIZE
])
560 const struct test_sg_division
*div
;
562 } partitions
[XBUFSIZE
];
563 const unsigned int ndivs
= count_test_sg_divisions(divs
);
564 unsigned int len_remaining
= total_len
;
567 BUILD_BUG_ON(ARRAY_SIZE(partitions
) != ARRAY_SIZE(tsgl
->sgl
));
568 if (WARN_ON(ndivs
> ARRAY_SIZE(partitions
)))
571 /* Calculate the (div, length) pairs */
573 for (i
= 0; i
< ndivs
; i
++) {
574 unsigned int len_this_sg
=
576 (total_len
* divs
[i
].proportion_of_total
+
577 TEST_SG_TOTAL
/ 2) / TEST_SG_TOTAL
);
579 if (len_this_sg
!= 0) {
580 partitions
[tsgl
->nents
].div
= &divs
[i
];
581 partitions
[tsgl
->nents
].length
= len_this_sg
;
583 len_remaining
-= len_this_sg
;
586 if (tsgl
->nents
== 0) {
587 partitions
[tsgl
->nents
].div
= &divs
[0];
588 partitions
[tsgl
->nents
].length
= 0;
591 partitions
[tsgl
->nents
- 1].length
+= len_remaining
;
593 /* Set up the sgl entries and fill the data or poison */
594 sg_init_table(tsgl
->sgl
, tsgl
->nents
);
595 for (i
= 0; i
< tsgl
->nents
; i
++) {
596 unsigned int offset
= partitions
[i
].div
->offset
;
599 if (partitions
[i
].div
->offset_relative_to_alignmask
)
602 while (offset
+ partitions
[i
].length
+ TESTMGR_POISON_LEN
>
604 if (WARN_ON(offset
<= 0))
609 addr
= &tsgl
->bufs
[i
][offset
];
610 sg_set_buf(&tsgl
->sgl
[i
], addr
, partitions
[i
].length
);
613 out_divs
[i
] = partitions
[i
].div
;
616 size_t copy_len
, copied
;
618 copy_len
= min(partitions
[i
].length
, data
->count
);
619 copied
= copy_from_iter(addr
, copy_len
, data
);
620 if (WARN_ON(copied
!= copy_len
))
622 testmgr_poison(addr
+ copy_len
, partitions
[i
].length
+
623 TESTMGR_POISON_LEN
- copy_len
);
625 testmgr_poison(addr
, partitions
[i
].length
+
630 sg_mark_end(&tsgl
->sgl
[tsgl
->nents
- 1]);
631 tsgl
->sgl_ptr
= tsgl
->sgl
;
632 memcpy(tsgl
->sgl_saved
, tsgl
->sgl
, tsgl
->nents
* sizeof(tsgl
->sgl
[0]));
637 * Verify that a scatterlist crypto operation produced the correct output.
639 * @tsgl: scatterlist containing the actual output
640 * @expected_output: buffer containing the expected output
641 * @len_to_check: length of @expected_output in bytes
642 * @unchecked_prefix_len: number of ignored bytes in @tsgl prior to real result
643 * @check_poison: verify that the poison bytes after each chunk are intact?
645 * Return: 0 if correct, -EINVAL if incorrect, -EOVERFLOW if buffer overrun.
647 static int verify_correct_output(const struct test_sglist
*tsgl
,
648 const char *expected_output
,
649 unsigned int len_to_check
,
650 unsigned int unchecked_prefix_len
,
655 for (i
= 0; i
< tsgl
->nents
; i
++) {
656 struct scatterlist
*sg
= &tsgl
->sgl_ptr
[i
];
657 unsigned int len
= sg
->length
;
658 unsigned int offset
= sg
->offset
;
659 const char *actual_output
;
661 if (unchecked_prefix_len
) {
662 if (unchecked_prefix_len
>= len
) {
663 unchecked_prefix_len
-= len
;
666 offset
+= unchecked_prefix_len
;
667 len
-= unchecked_prefix_len
;
668 unchecked_prefix_len
= 0;
670 len
= min(len
, len_to_check
);
671 actual_output
= page_address(sg_page(sg
)) + offset
;
672 if (memcmp(expected_output
, actual_output
, len
) != 0)
675 !testmgr_is_poison(actual_output
+ len
, TESTMGR_POISON_LEN
))
678 expected_output
+= len
;
680 if (WARN_ON(len_to_check
!= 0))
685 static bool is_test_sglist_corrupted(const struct test_sglist
*tsgl
)
689 for (i
= 0; i
< tsgl
->nents
; i
++) {
690 if (tsgl
->sgl
[i
].page_link
!= tsgl
->sgl_saved
[i
].page_link
)
692 if (tsgl
->sgl
[i
].offset
!= tsgl
->sgl_saved
[i
].offset
)
694 if (tsgl
->sgl
[i
].length
!= tsgl
->sgl_saved
[i
].length
)
700 struct cipher_test_sglists
{
701 struct test_sglist src
;
702 struct test_sglist dst
;
705 static struct cipher_test_sglists
*alloc_cipher_test_sglists(void)
707 struct cipher_test_sglists
*tsgls
;
709 tsgls
= kmalloc(sizeof(*tsgls
), GFP_KERNEL
);
713 if (init_test_sglist(&tsgls
->src
) != 0)
715 if (init_test_sglist(&tsgls
->dst
) != 0)
716 goto fail_destroy_src
;
721 destroy_test_sglist(&tsgls
->src
);
727 static void free_cipher_test_sglists(struct cipher_test_sglists
*tsgls
)
730 destroy_test_sglist(&tsgls
->src
);
731 destroy_test_sglist(&tsgls
->dst
);
736 /* Build the src and dst scatterlists for an skcipher or AEAD test */
737 static int build_cipher_test_sglists(struct cipher_test_sglists
*tsgls
,
738 const struct testvec_config
*cfg
,
739 unsigned int alignmask
,
740 unsigned int src_total_len
,
741 unsigned int dst_total_len
,
742 const struct kvec
*inputs
,
743 unsigned int nr_inputs
)
745 struct iov_iter input
;
748 iov_iter_kvec(&input
, WRITE
, inputs
, nr_inputs
, src_total_len
);
749 err
= build_test_sglist(&tsgls
->src
, cfg
->src_divs
, alignmask
,
751 max(dst_total_len
, src_total_len
) :
758 tsgls
->dst
.sgl_ptr
= tsgls
->src
.sgl
;
759 tsgls
->dst
.nents
= tsgls
->src
.nents
;
762 return build_test_sglist(&tsgls
->dst
,
763 cfg
->dst_divs
[0].proportion_of_total
?
764 cfg
->dst_divs
: cfg
->src_divs
,
765 alignmask
, dst_total_len
, NULL
, NULL
);
769 * Support for testing passing a misaligned key to setkey():
771 * If cfg->key_offset is set, copy the key into a new buffer at that offset,
772 * optionally adding alignmask. Else, just use the key directly.
774 static int prepare_keybuf(const u8
*key
, unsigned int ksize
,
775 const struct testvec_config
*cfg
,
776 unsigned int alignmask
,
777 const u8
**keybuf_ret
, const u8
**keyptr_ret
)
779 unsigned int key_offset
= cfg
->key_offset
;
780 u8
*keybuf
= NULL
, *keyptr
= (u8
*)key
;
782 if (key_offset
!= 0) {
783 if (cfg
->key_offset_relative_to_alignmask
)
784 key_offset
+= alignmask
;
785 keybuf
= kmalloc(key_offset
+ ksize
, GFP_KERNEL
);
788 keyptr
= keybuf
+ key_offset
;
789 memcpy(keyptr
, key
, ksize
);
791 *keybuf_ret
= keybuf
;
792 *keyptr_ret
= keyptr
;
796 /* Like setkey_f(tfm, key, ksize), but sometimes misalign the key */
797 #define do_setkey(setkey_f, tfm, key, ksize, cfg, alignmask) \
799 const u8 *keybuf, *keyptr; \
802 err = prepare_keybuf((key), (ksize), (cfg), (alignmask), \
805 err = setkey_f((tfm), keyptr, (ksize)); \
811 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
813 /* Generate a random length in range [0, max_len], but prefer smaller values */
814 static unsigned int generate_random_length(unsigned int max_len
)
816 unsigned int len
= prandom_u32() % (max_len
+ 1);
818 switch (prandom_u32() % 4) {
830 /* Flip a random bit in the given nonempty data buffer */
831 static void flip_random_bit(u8
*buf
, size_t size
)
835 bitpos
= prandom_u32() % (size
* 8);
836 buf
[bitpos
/ 8] ^= 1 << (bitpos
% 8);
839 /* Flip a random byte in the given nonempty data buffer */
840 static void flip_random_byte(u8
*buf
, size_t size
)
842 buf
[prandom_u32() % size
] ^= 0xff;
845 /* Sometimes make some random changes to the given nonempty data buffer */
846 static void mutate_buffer(u8
*buf
, size_t size
)
851 /* Sometimes flip some bits */
852 if (prandom_u32() % 4 == 0) {
853 num_flips
= min_t(size_t, 1 << (prandom_u32() % 8), size
* 8);
854 for (i
= 0; i
< num_flips
; i
++)
855 flip_random_bit(buf
, size
);
858 /* Sometimes flip some bytes */
859 if (prandom_u32() % 4 == 0) {
860 num_flips
= min_t(size_t, 1 << (prandom_u32() % 8), size
);
861 for (i
= 0; i
< num_flips
; i
++)
862 flip_random_byte(buf
, size
);
866 /* Randomly generate 'count' bytes, but sometimes make them "interesting" */
867 static void generate_random_bytes(u8
*buf
, size_t count
)
876 switch (prandom_u32() % 8) { /* Choose a generation strategy */
879 /* All the same byte, plus optional mutations */
880 switch (prandom_u32() % 4) {
888 b
= (u8
)prandom_u32();
891 memset(buf
, b
, count
);
892 mutate_buffer(buf
, count
);
895 /* Ascending or descending bytes, plus optional mutations */
896 increment
= (u8
)prandom_u32();
897 b
= (u8
)prandom_u32();
898 for (i
= 0; i
< count
; i
++, b
+= increment
)
900 mutate_buffer(buf
, count
);
903 /* Fully random bytes */
904 for (i
= 0; i
< count
; i
++)
905 buf
[i
] = (u8
)prandom_u32();
909 static char *generate_random_sgl_divisions(struct test_sg_division
*divs
,
910 size_t max_divs
, char *p
, char *end
,
911 bool gen_flushes
, u32 req_flags
)
913 struct test_sg_division
*div
= divs
;
914 unsigned int remaining
= TEST_SG_TOTAL
;
917 unsigned int this_len
;
918 const char *flushtype_str
;
920 if (div
== &divs
[max_divs
- 1] || prandom_u32() % 2 == 0)
921 this_len
= remaining
;
923 this_len
= 1 + (prandom_u32() % remaining
);
924 div
->proportion_of_total
= this_len
;
926 if (prandom_u32() % 4 == 0)
927 div
->offset
= (PAGE_SIZE
- 128) + (prandom_u32() % 128);
928 else if (prandom_u32() % 2 == 0)
929 div
->offset
= prandom_u32() % 32;
931 div
->offset
= prandom_u32() % PAGE_SIZE
;
932 if (prandom_u32() % 8 == 0)
933 div
->offset_relative_to_alignmask
= true;
935 div
->flush_type
= FLUSH_TYPE_NONE
;
937 switch (prandom_u32() % 4) {
939 div
->flush_type
= FLUSH_TYPE_REIMPORT
;
942 div
->flush_type
= FLUSH_TYPE_FLUSH
;
947 if (div
->flush_type
!= FLUSH_TYPE_NONE
&&
948 !(req_flags
& CRYPTO_TFM_REQ_MAY_SLEEP
) &&
949 prandom_u32() % 2 == 0)
952 switch (div
->flush_type
) {
953 case FLUSH_TYPE_FLUSH
:
955 flushtype_str
= "<flush,nosimd>";
957 flushtype_str
= "<flush>";
959 case FLUSH_TYPE_REIMPORT
:
961 flushtype_str
= "<reimport,nosimd>";
963 flushtype_str
= "<reimport>";
970 BUILD_BUG_ON(TEST_SG_TOTAL
!= 10000); /* for "%u.%u%%" */
971 p
+= scnprintf(p
, end
- p
, "%s%u.%u%%@%s+%u%s", flushtype_str
,
972 this_len
/ 100, this_len
% 100,
973 div
->offset_relative_to_alignmask
?
975 div
->offset
, this_len
== remaining
? "" : ", ");
976 remaining
-= this_len
;
983 /* Generate a random testvec_config for fuzz testing */
984 static void generate_random_testvec_config(struct testvec_config
*cfg
,
985 char *name
, size_t max_namelen
)
988 char * const end
= name
+ max_namelen
;
990 memset(cfg
, 0, sizeof(*cfg
));
994 p
+= scnprintf(p
, end
- p
, "random:");
996 if (prandom_u32() % 2 == 0) {
998 p
+= scnprintf(p
, end
- p
, " inplace");
1001 if (prandom_u32() % 2 == 0) {
1002 cfg
->req_flags
|= CRYPTO_TFM_REQ_MAY_SLEEP
;
1003 p
+= scnprintf(p
, end
- p
, " may_sleep");
1006 switch (prandom_u32() % 4) {
1008 cfg
->finalization_type
= FINALIZATION_TYPE_FINAL
;
1009 p
+= scnprintf(p
, end
- p
, " use_final");
1012 cfg
->finalization_type
= FINALIZATION_TYPE_FINUP
;
1013 p
+= scnprintf(p
, end
- p
, " use_finup");
1016 cfg
->finalization_type
= FINALIZATION_TYPE_DIGEST
;
1017 p
+= scnprintf(p
, end
- p
, " use_digest");
1021 if (!(cfg
->req_flags
& CRYPTO_TFM_REQ_MAY_SLEEP
) &&
1022 prandom_u32() % 2 == 0) {
1024 p
+= scnprintf(p
, end
- p
, " nosimd");
1027 p
+= scnprintf(p
, end
- p
, " src_divs=[");
1028 p
= generate_random_sgl_divisions(cfg
->src_divs
,
1029 ARRAY_SIZE(cfg
->src_divs
), p
, end
,
1030 (cfg
->finalization_type
!=
1031 FINALIZATION_TYPE_DIGEST
),
1033 p
+= scnprintf(p
, end
- p
, "]");
1035 if (!cfg
->inplace
&& prandom_u32() % 2 == 0) {
1036 p
+= scnprintf(p
, end
- p
, " dst_divs=[");
1037 p
= generate_random_sgl_divisions(cfg
->dst_divs
,
1038 ARRAY_SIZE(cfg
->dst_divs
),
1041 p
+= scnprintf(p
, end
- p
, "]");
1044 if (prandom_u32() % 2 == 0) {
1045 cfg
->iv_offset
= 1 + (prandom_u32() % MAX_ALGAPI_ALIGNMASK
);
1046 p
+= scnprintf(p
, end
- p
, " iv_offset=%u", cfg
->iv_offset
);
1049 if (prandom_u32() % 2 == 0) {
1050 cfg
->key_offset
= 1 + (prandom_u32() % MAX_ALGAPI_ALIGNMASK
);
1051 p
+= scnprintf(p
, end
- p
, " key_offset=%u", cfg
->key_offset
);
1054 WARN_ON_ONCE(!valid_testvec_config(cfg
));
1057 static void crypto_disable_simd_for_test(void)
1060 __this_cpu_write(crypto_simd_disabled_for_test
, true);
1063 static void crypto_reenable_simd_for_test(void)
1065 __this_cpu_write(crypto_simd_disabled_for_test
, false);
1070 * Given an algorithm name, build the name of the generic implementation of that
1071 * algorithm, assuming the usual naming convention. Specifically, this appends
1072 * "-generic" to every part of the name that is not a template name. Examples:
1074 * aes => aes-generic
1075 * cbc(aes) => cbc(aes-generic)
1076 * cts(cbc(aes)) => cts(cbc(aes-generic))
1077 * rfc7539(chacha20,poly1305) => rfc7539(chacha20-generic,poly1305-generic)
1079 * Return: 0 on success, or -ENAMETOOLONG if the generic name would be too long
1081 static int build_generic_driver_name(const char *algname
,
1082 char driver_name
[CRYPTO_MAX_ALG_NAME
])
1084 const char *in
= algname
;
1085 char *out
= driver_name
;
1086 size_t len
= strlen(algname
);
1088 if (len
>= CRYPTO_MAX_ALG_NAME
)
1091 const char *in_saved
= in
;
1093 while (*in
&& *in
!= '(' && *in
!= ')' && *in
!= ',')
1095 if (*in
!= '(' && in
> in_saved
) {
1097 if (len
>= CRYPTO_MAX_ALG_NAME
)
1099 memcpy(out
, "-generic", 8);
1102 } while ((*out
++ = *in
++) != '\0');
1106 pr_err("alg: generic driver name for \"%s\" would be too long\n",
1108 return -ENAMETOOLONG
;
1110 #else /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
1111 static void crypto_disable_simd_for_test(void)
1115 static void crypto_reenable_simd_for_test(void)
1118 #endif /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
1120 static int build_hash_sglist(struct test_sglist
*tsgl
,
1121 const struct hash_testvec
*vec
,
1122 const struct testvec_config
*cfg
,
1123 unsigned int alignmask
,
1124 const struct test_sg_division
*divs
[XBUFSIZE
])
1127 struct iov_iter input
;
1129 kv
.iov_base
= (void *)vec
->plaintext
;
1130 kv
.iov_len
= vec
->psize
;
1131 iov_iter_kvec(&input
, WRITE
, &kv
, 1, vec
->psize
);
1132 return build_test_sglist(tsgl
, cfg
->src_divs
, alignmask
, vec
->psize
,
1136 static int check_hash_result(const char *type
,
1137 const u8
*result
, unsigned int digestsize
,
1138 const struct hash_testvec
*vec
,
1139 const char *vec_name
,
1141 const struct testvec_config
*cfg
)
1143 if (memcmp(result
, vec
->digest
, digestsize
) != 0) {
1144 pr_err("alg: %s: %s test failed (wrong result) on test vector %s, cfg=\"%s\"\n",
1145 type
, driver
, vec_name
, cfg
->name
);
1148 if (!testmgr_is_poison(&result
[digestsize
], TESTMGR_POISON_LEN
)) {
1149 pr_err("alg: %s: %s overran result buffer on test vector %s, cfg=\"%s\"\n",
1150 type
, driver
, vec_name
, cfg
->name
);
1156 static inline int check_shash_op(const char *op
, int err
,
1157 const char *driver
, const char *vec_name
,
1158 const struct testvec_config
*cfg
)
1161 pr_err("alg: shash: %s %s() failed with err %d on test vector %s, cfg=\"%s\"\n",
1162 driver
, op
, err
, vec_name
, cfg
->name
);
1166 static inline const void *sg_data(struct scatterlist
*sg
)
1168 return page_address(sg_page(sg
)) + sg
->offset
;
1171 /* Test one hash test vector in one configuration, using the shash API */
1172 static int test_shash_vec_cfg(const char *driver
,
1173 const struct hash_testvec
*vec
,
1174 const char *vec_name
,
1175 const struct testvec_config
*cfg
,
1176 struct shash_desc
*desc
,
1177 struct test_sglist
*tsgl
,
1180 struct crypto_shash
*tfm
= desc
->tfm
;
1181 const unsigned int alignmask
= crypto_shash_alignmask(tfm
);
1182 const unsigned int digestsize
= crypto_shash_digestsize(tfm
);
1183 const unsigned int statesize
= crypto_shash_statesize(tfm
);
1184 const struct test_sg_division
*divs
[XBUFSIZE
];
1186 u8 result
[HASH_MAX_DIGESTSIZE
+ TESTMGR_POISON_LEN
];
1189 /* Set the key, if specified */
1191 err
= do_setkey(crypto_shash_setkey
, tfm
, vec
->key
, vec
->ksize
,
1194 if (err
== vec
->setkey_error
)
1196 pr_err("alg: shash: %s setkey failed on test vector %s; expected_error=%d, actual_error=%d, flags=%#x\n",
1197 driver
, vec_name
, vec
->setkey_error
, err
,
1198 crypto_shash_get_flags(tfm
));
1201 if (vec
->setkey_error
) {
1202 pr_err("alg: shash: %s setkey unexpectedly succeeded on test vector %s; expected_error=%d\n",
1203 driver
, vec_name
, vec
->setkey_error
);
1208 /* Build the scatterlist for the source data */
1209 err
= build_hash_sglist(tsgl
, vec
, cfg
, alignmask
, divs
);
1211 pr_err("alg: shash: %s: error preparing scatterlist for test vector %s, cfg=\"%s\"\n",
1212 driver
, vec_name
, cfg
->name
);
1216 /* Do the actual hashing */
1218 testmgr_poison(desc
->__ctx
, crypto_shash_descsize(tfm
));
1219 testmgr_poison(result
, digestsize
+ TESTMGR_POISON_LEN
);
1221 if (cfg
->finalization_type
== FINALIZATION_TYPE_DIGEST
||
1222 vec
->digest_error
) {
1223 /* Just using digest() */
1224 if (tsgl
->nents
!= 1)
1227 crypto_disable_simd_for_test();
1228 err
= crypto_shash_digest(desc
, sg_data(&tsgl
->sgl
[0]),
1229 tsgl
->sgl
[0].length
, result
);
1231 crypto_reenable_simd_for_test();
1233 if (err
== vec
->digest_error
)
1235 pr_err("alg: shash: %s digest() failed on test vector %s; expected_error=%d, actual_error=%d, cfg=\"%s\"\n",
1236 driver
, vec_name
, vec
->digest_error
, err
,
1240 if (vec
->digest_error
) {
1241 pr_err("alg: shash: %s digest() unexpectedly succeeded on test vector %s; expected_error=%d, cfg=\"%s\"\n",
1242 driver
, vec_name
, vec
->digest_error
, cfg
->name
);
1248 /* Using init(), zero or more update(), then final() or finup() */
1251 crypto_disable_simd_for_test();
1252 err
= crypto_shash_init(desc
);
1254 crypto_reenable_simd_for_test();
1255 err
= check_shash_op("init", err
, driver
, vec_name
, cfg
);
1259 for (i
= 0; i
< tsgl
->nents
; i
++) {
1260 if (i
+ 1 == tsgl
->nents
&&
1261 cfg
->finalization_type
== FINALIZATION_TYPE_FINUP
) {
1262 if (divs
[i
]->nosimd
)
1263 crypto_disable_simd_for_test();
1264 err
= crypto_shash_finup(desc
, sg_data(&tsgl
->sgl
[i
]),
1265 tsgl
->sgl
[i
].length
, result
);
1266 if (divs
[i
]->nosimd
)
1267 crypto_reenable_simd_for_test();
1268 err
= check_shash_op("finup", err
, driver
, vec_name
,
1274 if (divs
[i
]->nosimd
)
1275 crypto_disable_simd_for_test();
1276 err
= crypto_shash_update(desc
, sg_data(&tsgl
->sgl
[i
]),
1277 tsgl
->sgl
[i
].length
);
1278 if (divs
[i
]->nosimd
)
1279 crypto_reenable_simd_for_test();
1280 err
= check_shash_op("update", err
, driver
, vec_name
, cfg
);
1283 if (divs
[i
]->flush_type
== FLUSH_TYPE_REIMPORT
) {
1284 /* Test ->export() and ->import() */
1285 testmgr_poison(hashstate
+ statesize
,
1286 TESTMGR_POISON_LEN
);
1287 err
= crypto_shash_export(desc
, hashstate
);
1288 err
= check_shash_op("export", err
, driver
, vec_name
,
1292 if (!testmgr_is_poison(hashstate
+ statesize
,
1293 TESTMGR_POISON_LEN
)) {
1294 pr_err("alg: shash: %s export() overran state buffer on test vector %s, cfg=\"%s\"\n",
1295 driver
, vec_name
, cfg
->name
);
1298 testmgr_poison(desc
->__ctx
, crypto_shash_descsize(tfm
));
1299 err
= crypto_shash_import(desc
, hashstate
);
1300 err
= check_shash_op("import", err
, driver
, vec_name
,
1308 crypto_disable_simd_for_test();
1309 err
= crypto_shash_final(desc
, result
);
1311 crypto_reenable_simd_for_test();
1312 err
= check_shash_op("final", err
, driver
, vec_name
, cfg
);
1316 return check_hash_result("shash", result
, digestsize
, vec
, vec_name
,
1320 static int do_ahash_op(int (*op
)(struct ahash_request
*req
),
1321 struct ahash_request
*req
,
1322 struct crypto_wait
*wait
, bool nosimd
)
1327 crypto_disable_simd_for_test();
1332 crypto_reenable_simd_for_test();
1334 return crypto_wait_req(err
, wait
);
1337 static int check_nonfinal_ahash_op(const char *op
, int err
,
1338 u8
*result
, unsigned int digestsize
,
1339 const char *driver
, const char *vec_name
,
1340 const struct testvec_config
*cfg
)
1343 pr_err("alg: ahash: %s %s() failed with err %d on test vector %s, cfg=\"%s\"\n",
1344 driver
, op
, err
, vec_name
, cfg
->name
);
1347 if (!testmgr_is_poison(result
, digestsize
)) {
1348 pr_err("alg: ahash: %s %s() used result buffer on test vector %s, cfg=\"%s\"\n",
1349 driver
, op
, vec_name
, cfg
->name
);
1355 /* Test one hash test vector in one configuration, using the ahash API */
1356 static int test_ahash_vec_cfg(const char *driver
,
1357 const struct hash_testvec
*vec
,
1358 const char *vec_name
,
1359 const struct testvec_config
*cfg
,
1360 struct ahash_request
*req
,
1361 struct test_sglist
*tsgl
,
1364 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
1365 const unsigned int alignmask
= crypto_ahash_alignmask(tfm
);
1366 const unsigned int digestsize
= crypto_ahash_digestsize(tfm
);
1367 const unsigned int statesize
= crypto_ahash_statesize(tfm
);
1368 const u32 req_flags
= CRYPTO_TFM_REQ_MAY_BACKLOG
| cfg
->req_flags
;
1369 const struct test_sg_division
*divs
[XBUFSIZE
];
1370 DECLARE_CRYPTO_WAIT(wait
);
1372 struct scatterlist
*pending_sgl
;
1373 unsigned int pending_len
;
1374 u8 result
[HASH_MAX_DIGESTSIZE
+ TESTMGR_POISON_LEN
];
1377 /* Set the key, if specified */
1379 err
= do_setkey(crypto_ahash_setkey
, tfm
, vec
->key
, vec
->ksize
,
1382 if (err
== vec
->setkey_error
)
1384 pr_err("alg: ahash: %s setkey failed on test vector %s; expected_error=%d, actual_error=%d, flags=%#x\n",
1385 driver
, vec_name
, vec
->setkey_error
, err
,
1386 crypto_ahash_get_flags(tfm
));
1389 if (vec
->setkey_error
) {
1390 pr_err("alg: ahash: %s setkey unexpectedly succeeded on test vector %s; expected_error=%d\n",
1391 driver
, vec_name
, vec
->setkey_error
);
1396 /* Build the scatterlist for the source data */
1397 err
= build_hash_sglist(tsgl
, vec
, cfg
, alignmask
, divs
);
1399 pr_err("alg: ahash: %s: error preparing scatterlist for test vector %s, cfg=\"%s\"\n",
1400 driver
, vec_name
, cfg
->name
);
1404 /* Do the actual hashing */
1406 testmgr_poison(req
->__ctx
, crypto_ahash_reqsize(tfm
));
1407 testmgr_poison(result
, digestsize
+ TESTMGR_POISON_LEN
);
1409 if (cfg
->finalization_type
== FINALIZATION_TYPE_DIGEST
||
1410 vec
->digest_error
) {
1411 /* Just using digest() */
1412 ahash_request_set_callback(req
, req_flags
, crypto_req_done
,
1414 ahash_request_set_crypt(req
, tsgl
->sgl
, result
, vec
->psize
);
1415 err
= do_ahash_op(crypto_ahash_digest
, req
, &wait
, cfg
->nosimd
);
1417 if (err
== vec
->digest_error
)
1419 pr_err("alg: ahash: %s digest() failed on test vector %s; expected_error=%d, actual_error=%d, cfg=\"%s\"\n",
1420 driver
, vec_name
, vec
->digest_error
, err
,
1424 if (vec
->digest_error
) {
1425 pr_err("alg: ahash: %s digest() unexpectedly succeeded on test vector %s; expected_error=%d, cfg=\"%s\"\n",
1426 driver
, vec_name
, vec
->digest_error
, cfg
->name
);
1432 /* Using init(), zero or more update(), then final() or finup() */
1434 ahash_request_set_callback(req
, req_flags
, crypto_req_done
, &wait
);
1435 ahash_request_set_crypt(req
, NULL
, result
, 0);
1436 err
= do_ahash_op(crypto_ahash_init
, req
, &wait
, cfg
->nosimd
);
1437 err
= check_nonfinal_ahash_op("init", err
, result
, digestsize
,
1438 driver
, vec_name
, cfg
);
1444 for (i
= 0; i
< tsgl
->nents
; i
++) {
1445 if (divs
[i
]->flush_type
!= FLUSH_TYPE_NONE
&&
1446 pending_sgl
!= NULL
) {
1447 /* update() with the pending data */
1448 ahash_request_set_callback(req
, req_flags
,
1449 crypto_req_done
, &wait
);
1450 ahash_request_set_crypt(req
, pending_sgl
, result
,
1452 err
= do_ahash_op(crypto_ahash_update
, req
, &wait
,
1454 err
= check_nonfinal_ahash_op("update", err
,
1456 driver
, vec_name
, cfg
);
1462 if (divs
[i
]->flush_type
== FLUSH_TYPE_REIMPORT
) {
1463 /* Test ->export() and ->import() */
1464 testmgr_poison(hashstate
+ statesize
,
1465 TESTMGR_POISON_LEN
);
1466 err
= crypto_ahash_export(req
, hashstate
);
1467 err
= check_nonfinal_ahash_op("export", err
,
1469 driver
, vec_name
, cfg
);
1472 if (!testmgr_is_poison(hashstate
+ statesize
,
1473 TESTMGR_POISON_LEN
)) {
1474 pr_err("alg: ahash: %s export() overran state buffer on test vector %s, cfg=\"%s\"\n",
1475 driver
, vec_name
, cfg
->name
);
1479 testmgr_poison(req
->__ctx
, crypto_ahash_reqsize(tfm
));
1480 err
= crypto_ahash_import(req
, hashstate
);
1481 err
= check_nonfinal_ahash_op("import", err
,
1483 driver
, vec_name
, cfg
);
1487 if (pending_sgl
== NULL
)
1488 pending_sgl
= &tsgl
->sgl
[i
];
1489 pending_len
+= tsgl
->sgl
[i
].length
;
1492 ahash_request_set_callback(req
, req_flags
, crypto_req_done
, &wait
);
1493 ahash_request_set_crypt(req
, pending_sgl
, result
, pending_len
);
1494 if (cfg
->finalization_type
== FINALIZATION_TYPE_FINAL
) {
1495 /* finish with update() and final() */
1496 err
= do_ahash_op(crypto_ahash_update
, req
, &wait
, cfg
->nosimd
);
1497 err
= check_nonfinal_ahash_op("update", err
, result
, digestsize
,
1498 driver
, vec_name
, cfg
);
1501 err
= do_ahash_op(crypto_ahash_final
, req
, &wait
, cfg
->nosimd
);
1503 pr_err("alg: ahash: %s final() failed with err %d on test vector %s, cfg=\"%s\"\n",
1504 driver
, err
, vec_name
, cfg
->name
);
1508 /* finish with finup() */
1509 err
= do_ahash_op(crypto_ahash_finup
, req
, &wait
, cfg
->nosimd
);
1511 pr_err("alg: ahash: %s finup() failed with err %d on test vector %s, cfg=\"%s\"\n",
1512 driver
, err
, vec_name
, cfg
->name
);
1518 return check_hash_result("ahash", result
, digestsize
, vec
, vec_name
,
1522 static int test_hash_vec_cfg(const char *driver
,
1523 const struct hash_testvec
*vec
,
1524 const char *vec_name
,
1525 const struct testvec_config
*cfg
,
1526 struct ahash_request
*req
,
1527 struct shash_desc
*desc
,
1528 struct test_sglist
*tsgl
,
1534 * For algorithms implemented as "shash", most bugs will be detected by
1535 * both the shash and ahash tests. Test the shash API first so that the
1536 * failures involve less indirection, so are easier to debug.
1540 err
= test_shash_vec_cfg(driver
, vec
, vec_name
, cfg
, desc
, tsgl
,
1546 return test_ahash_vec_cfg(driver
, vec
, vec_name
, cfg
, req
, tsgl
,
1550 static int test_hash_vec(const char *driver
, const struct hash_testvec
*vec
,
1551 unsigned int vec_num
, struct ahash_request
*req
,
1552 struct shash_desc
*desc
, struct test_sglist
*tsgl
,
1559 sprintf(vec_name
, "%u", vec_num
);
1561 for (i
= 0; i
< ARRAY_SIZE(default_hash_testvec_configs
); i
++) {
1562 err
= test_hash_vec_cfg(driver
, vec
, vec_name
,
1563 &default_hash_testvec_configs
[i
],
1564 req
, desc
, tsgl
, hashstate
);
1569 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
1570 if (!noextratests
) {
1571 struct testvec_config cfg
;
1572 char cfgname
[TESTVEC_CONFIG_NAMELEN
];
1574 for (i
= 0; i
< fuzz_iterations
; i
++) {
1575 generate_random_testvec_config(&cfg
, cfgname
,
1577 err
= test_hash_vec_cfg(driver
, vec
, vec_name
, &cfg
,
1578 req
, desc
, tsgl
, hashstate
);
1588 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
1590 * Generate a hash test vector from the given implementation.
1591 * Assumes the buffers in 'vec' were already allocated.
1593 static void generate_random_hash_testvec(struct shash_desc
*desc
,
1594 struct hash_testvec
*vec
,
1595 unsigned int maxkeysize
,
1596 unsigned int maxdatasize
,
1597 char *name
, size_t max_namelen
)
1600 vec
->psize
= generate_random_length(maxdatasize
);
1601 generate_random_bytes((u8
*)vec
->plaintext
, vec
->psize
);
1604 * Key: length in range [1, maxkeysize], but usually choose maxkeysize.
1605 * If algorithm is unkeyed, then maxkeysize == 0 and set ksize = 0.
1607 vec
->setkey_error
= 0;
1610 vec
->ksize
= maxkeysize
;
1611 if (prandom_u32() % 4 == 0)
1612 vec
->ksize
= 1 + (prandom_u32() % maxkeysize
);
1613 generate_random_bytes((u8
*)vec
->key
, vec
->ksize
);
1615 vec
->setkey_error
= crypto_shash_setkey(desc
->tfm
, vec
->key
,
1617 /* If the key couldn't be set, no need to continue to digest. */
1618 if (vec
->setkey_error
)
1623 vec
->digest_error
= crypto_shash_digest(desc
, vec
->plaintext
,
1624 vec
->psize
, (u8
*)vec
->digest
);
1626 snprintf(name
, max_namelen
, "\"random: psize=%u ksize=%u\"",
1627 vec
->psize
, vec
->ksize
);
1631 * Test the hash algorithm represented by @req against the corresponding generic
1632 * implementation, if one is available.
1634 static int test_hash_vs_generic_impl(const char *driver
,
1635 const char *generic_driver
,
1636 unsigned int maxkeysize
,
1637 struct ahash_request
*req
,
1638 struct shash_desc
*desc
,
1639 struct test_sglist
*tsgl
,
1642 struct crypto_ahash
*tfm
= crypto_ahash_reqtfm(req
);
1643 const unsigned int digestsize
= crypto_ahash_digestsize(tfm
);
1644 const unsigned int blocksize
= crypto_ahash_blocksize(tfm
);
1645 const unsigned int maxdatasize
= (2 * PAGE_SIZE
) - TESTMGR_POISON_LEN
;
1646 const char *algname
= crypto_hash_alg_common(tfm
)->base
.cra_name
;
1647 char _generic_driver
[CRYPTO_MAX_ALG_NAME
];
1648 struct crypto_shash
*generic_tfm
= NULL
;
1649 struct shash_desc
*generic_desc
= NULL
;
1651 struct hash_testvec vec
= { 0 };
1653 struct testvec_config
*cfg
;
1654 char cfgname
[TESTVEC_CONFIG_NAMELEN
];
1660 if (!generic_driver
) { /* Use default naming convention? */
1661 err
= build_generic_driver_name(algname
, _generic_driver
);
1664 generic_driver
= _generic_driver
;
1667 if (strcmp(generic_driver
, driver
) == 0) /* Already the generic impl? */
1670 generic_tfm
= crypto_alloc_shash(generic_driver
, 0, 0);
1671 if (IS_ERR(generic_tfm
)) {
1672 err
= PTR_ERR(generic_tfm
);
1673 if (err
== -ENOENT
) {
1674 pr_warn("alg: hash: skipping comparison tests for %s because %s is unavailable\n",
1675 driver
, generic_driver
);
1678 pr_err("alg: hash: error allocating %s (generic impl of %s): %d\n",
1679 generic_driver
, algname
, err
);
1683 cfg
= kzalloc(sizeof(*cfg
), GFP_KERNEL
);
1689 generic_desc
= kzalloc(sizeof(*desc
) +
1690 crypto_shash_descsize(generic_tfm
), GFP_KERNEL
);
1691 if (!generic_desc
) {
1695 generic_desc
->tfm
= generic_tfm
;
1697 /* Check the algorithm properties for consistency. */
1699 if (digestsize
!= crypto_shash_digestsize(generic_tfm
)) {
1700 pr_err("alg: hash: digestsize for %s (%u) doesn't match generic impl (%u)\n",
1702 crypto_shash_digestsize(generic_tfm
));
1707 if (blocksize
!= crypto_shash_blocksize(generic_tfm
)) {
1708 pr_err("alg: hash: blocksize for %s (%u) doesn't match generic impl (%u)\n",
1709 driver
, blocksize
, crypto_shash_blocksize(generic_tfm
));
1715 * Now generate test vectors using the generic implementation, and test
1716 * the other implementation against them.
1719 vec
.key
= kmalloc(maxkeysize
, GFP_KERNEL
);
1720 vec
.plaintext
= kmalloc(maxdatasize
, GFP_KERNEL
);
1721 vec
.digest
= kmalloc(digestsize
, GFP_KERNEL
);
1722 if (!vec
.key
|| !vec
.plaintext
|| !vec
.digest
) {
1727 for (i
= 0; i
< fuzz_iterations
* 8; i
++) {
1728 generate_random_hash_testvec(generic_desc
, &vec
,
1729 maxkeysize
, maxdatasize
,
1730 vec_name
, sizeof(vec_name
));
1731 generate_random_testvec_config(cfg
, cfgname
, sizeof(cfgname
));
1733 err
= test_hash_vec_cfg(driver
, &vec
, vec_name
, cfg
,
1734 req
, desc
, tsgl
, hashstate
);
1743 kfree(vec
.plaintext
);
1745 crypto_free_shash(generic_tfm
);
1746 kzfree(generic_desc
);
1749 #else /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
1750 static int test_hash_vs_generic_impl(const char *driver
,
1751 const char *generic_driver
,
1752 unsigned int maxkeysize
,
1753 struct ahash_request
*req
,
1754 struct shash_desc
*desc
,
1755 struct test_sglist
*tsgl
,
1760 #endif /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
1762 static int alloc_shash(const char *driver
, u32 type
, u32 mask
,
1763 struct crypto_shash
**tfm_ret
,
1764 struct shash_desc
**desc_ret
)
1766 struct crypto_shash
*tfm
;
1767 struct shash_desc
*desc
;
1769 tfm
= crypto_alloc_shash(driver
, type
, mask
);
1771 if (PTR_ERR(tfm
) == -ENOENT
) {
1773 * This algorithm is only available through the ahash
1774 * API, not the shash API, so skip the shash tests.
1778 pr_err("alg: hash: failed to allocate shash transform for %s: %ld\n",
1779 driver
, PTR_ERR(tfm
));
1780 return PTR_ERR(tfm
);
1783 desc
= kmalloc(sizeof(*desc
) + crypto_shash_descsize(tfm
), GFP_KERNEL
);
1785 crypto_free_shash(tfm
);
1795 static int __alg_test_hash(const struct hash_testvec
*vecs
,
1796 unsigned int num_vecs
, const char *driver
,
1798 const char *generic_driver
, unsigned int maxkeysize
)
1800 struct crypto_ahash
*atfm
= NULL
;
1801 struct ahash_request
*req
= NULL
;
1802 struct crypto_shash
*stfm
= NULL
;
1803 struct shash_desc
*desc
= NULL
;
1804 struct test_sglist
*tsgl
= NULL
;
1805 u8
*hashstate
= NULL
;
1806 unsigned int statesize
;
1811 * Always test the ahash API. This works regardless of whether the
1812 * algorithm is implemented as ahash or shash.
1815 atfm
= crypto_alloc_ahash(driver
, type
, mask
);
1817 pr_err("alg: hash: failed to allocate transform for %s: %ld\n",
1818 driver
, PTR_ERR(atfm
));
1819 return PTR_ERR(atfm
);
1822 req
= ahash_request_alloc(atfm
, GFP_KERNEL
);
1824 pr_err("alg: hash: failed to allocate request for %s\n",
1831 * If available also test the shash API, to cover corner cases that may
1832 * be missed by testing the ahash API only.
1834 err
= alloc_shash(driver
, type
, mask
, &stfm
, &desc
);
1838 tsgl
= kmalloc(sizeof(*tsgl
), GFP_KERNEL
);
1839 if (!tsgl
|| init_test_sglist(tsgl
) != 0) {
1840 pr_err("alg: hash: failed to allocate test buffers for %s\n",
1848 statesize
= crypto_ahash_statesize(atfm
);
1850 statesize
= max(statesize
, crypto_shash_statesize(stfm
));
1851 hashstate
= kmalloc(statesize
+ TESTMGR_POISON_LEN
, GFP_KERNEL
);
1853 pr_err("alg: hash: failed to allocate hash state buffer for %s\n",
1859 for (i
= 0; i
< num_vecs
; i
++) {
1860 err
= test_hash_vec(driver
, &vecs
[i
], i
, req
, desc
, tsgl
,
1866 err
= test_hash_vs_generic_impl(driver
, generic_driver
, maxkeysize
, req
,
1867 desc
, tsgl
, hashstate
);
1871 destroy_test_sglist(tsgl
);
1875 crypto_free_shash(stfm
);
1876 ahash_request_free(req
);
1877 crypto_free_ahash(atfm
);
1881 static int alg_test_hash(const struct alg_test_desc
*desc
, const char *driver
,
1884 const struct hash_testvec
*template = desc
->suite
.hash
.vecs
;
1885 unsigned int tcount
= desc
->suite
.hash
.count
;
1886 unsigned int nr_unkeyed
, nr_keyed
;
1887 unsigned int maxkeysize
= 0;
1891 * For OPTIONAL_KEY algorithms, we have to do all the unkeyed tests
1892 * first, before setting a key on the tfm. To make this easier, we
1893 * require that the unkeyed test vectors (if any) are listed first.
1896 for (nr_unkeyed
= 0; nr_unkeyed
< tcount
; nr_unkeyed
++) {
1897 if (template[nr_unkeyed
].ksize
)
1900 for (nr_keyed
= 0; nr_unkeyed
+ nr_keyed
< tcount
; nr_keyed
++) {
1901 if (!template[nr_unkeyed
+ nr_keyed
].ksize
) {
1902 pr_err("alg: hash: test vectors for %s out of order, "
1903 "unkeyed ones must come first\n", desc
->alg
);
1906 maxkeysize
= max_t(unsigned int, maxkeysize
,
1907 template[nr_unkeyed
+ nr_keyed
].ksize
);
1912 err
= __alg_test_hash(template, nr_unkeyed
, driver
, type
, mask
,
1913 desc
->generic_driver
, maxkeysize
);
1914 template += nr_unkeyed
;
1917 if (!err
&& nr_keyed
)
1918 err
= __alg_test_hash(template, nr_keyed
, driver
, type
, mask
,
1919 desc
->generic_driver
, maxkeysize
);
1924 static int test_aead_vec_cfg(const char *driver
, int enc
,
1925 const struct aead_testvec
*vec
,
1926 const char *vec_name
,
1927 const struct testvec_config
*cfg
,
1928 struct aead_request
*req
,
1929 struct cipher_test_sglists
*tsgls
)
1931 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1932 const unsigned int alignmask
= crypto_aead_alignmask(tfm
);
1933 const unsigned int ivsize
= crypto_aead_ivsize(tfm
);
1934 const unsigned int authsize
= vec
->clen
- vec
->plen
;
1935 const u32 req_flags
= CRYPTO_TFM_REQ_MAY_BACKLOG
| cfg
->req_flags
;
1936 const char *op
= enc
? "encryption" : "decryption";
1937 DECLARE_CRYPTO_WAIT(wait
);
1938 u8 _iv
[3 * (MAX_ALGAPI_ALIGNMASK
+ 1) + MAX_IVLEN
];
1939 u8
*iv
= PTR_ALIGN(&_iv
[0], 2 * (MAX_ALGAPI_ALIGNMASK
+ 1)) +
1941 (cfg
->iv_offset_relative_to_alignmask
? alignmask
: 0);
1942 struct kvec input
[2];
1947 crypto_aead_set_flags(tfm
, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS
);
1949 crypto_aead_clear_flags(tfm
, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS
);
1951 err
= do_setkey(crypto_aead_setkey
, tfm
, vec
->key
, vec
->klen
,
1953 if (err
&& err
!= vec
->setkey_error
) {
1954 pr_err("alg: aead: %s setkey failed on test vector %s; expected_error=%d, actual_error=%d, flags=%#x\n",
1955 driver
, vec_name
, vec
->setkey_error
, err
,
1956 crypto_aead_get_flags(tfm
));
1959 if (!err
&& vec
->setkey_error
) {
1960 pr_err("alg: aead: %s setkey unexpectedly succeeded on test vector %s; expected_error=%d\n",
1961 driver
, vec_name
, vec
->setkey_error
);
1965 /* Set the authentication tag size */
1966 err
= crypto_aead_setauthsize(tfm
, authsize
);
1967 if (err
&& err
!= vec
->setauthsize_error
) {
1968 pr_err("alg: aead: %s setauthsize failed on test vector %s; expected_error=%d, actual_error=%d\n",
1969 driver
, vec_name
, vec
->setauthsize_error
, err
);
1972 if (!err
&& vec
->setauthsize_error
) {
1973 pr_err("alg: aead: %s setauthsize unexpectedly succeeded on test vector %s; expected_error=%d\n",
1974 driver
, vec_name
, vec
->setauthsize_error
);
1978 if (vec
->setkey_error
|| vec
->setauthsize_error
)
1981 /* The IV must be copied to a buffer, as the algorithm may modify it */
1982 if (WARN_ON(ivsize
> MAX_IVLEN
))
1985 memcpy(iv
, vec
->iv
, ivsize
);
1987 memset(iv
, 0, ivsize
);
1989 /* Build the src/dst scatterlists */
1990 input
[0].iov_base
= (void *)vec
->assoc
;
1991 input
[0].iov_len
= vec
->alen
;
1992 input
[1].iov_base
= enc
? (void *)vec
->ptext
: (void *)vec
->ctext
;
1993 input
[1].iov_len
= enc
? vec
->plen
: vec
->clen
;
1994 err
= build_cipher_test_sglists(tsgls
, cfg
, alignmask
,
1995 vec
->alen
+ (enc
? vec
->plen
:
1997 vec
->alen
+ (enc
? vec
->clen
:
2001 pr_err("alg: aead: %s %s: error preparing scatterlists for test vector %s, cfg=\"%s\"\n",
2002 driver
, op
, vec_name
, cfg
->name
);
2006 /* Do the actual encryption or decryption */
2007 testmgr_poison(req
->__ctx
, crypto_aead_reqsize(tfm
));
2008 aead_request_set_callback(req
, req_flags
, crypto_req_done
, &wait
);
2009 aead_request_set_crypt(req
, tsgls
->src
.sgl_ptr
, tsgls
->dst
.sgl_ptr
,
2010 enc
? vec
->plen
: vec
->clen
, iv
);
2011 aead_request_set_ad(req
, vec
->alen
);
2013 crypto_disable_simd_for_test();
2014 err
= enc
? crypto_aead_encrypt(req
) : crypto_aead_decrypt(req
);
2016 crypto_reenable_simd_for_test();
2017 err
= crypto_wait_req(err
, &wait
);
2019 /* Check that the algorithm didn't overwrite things it shouldn't have */
2020 if (req
->cryptlen
!= (enc
? vec
->plen
: vec
->clen
) ||
2021 req
->assoclen
!= vec
->alen
||
2023 req
->src
!= tsgls
->src
.sgl_ptr
||
2024 req
->dst
!= tsgls
->dst
.sgl_ptr
||
2025 crypto_aead_reqtfm(req
) != tfm
||
2026 req
->base
.complete
!= crypto_req_done
||
2027 req
->base
.flags
!= req_flags
||
2028 req
->base
.data
!= &wait
) {
2029 pr_err("alg: aead: %s %s corrupted request struct on test vector %s, cfg=\"%s\"\n",
2030 driver
, op
, vec_name
, cfg
->name
);
2031 if (req
->cryptlen
!= (enc
? vec
->plen
: vec
->clen
))
2032 pr_err("alg: aead: changed 'req->cryptlen'\n");
2033 if (req
->assoclen
!= vec
->alen
)
2034 pr_err("alg: aead: changed 'req->assoclen'\n");
2036 pr_err("alg: aead: changed 'req->iv'\n");
2037 if (req
->src
!= tsgls
->src
.sgl_ptr
)
2038 pr_err("alg: aead: changed 'req->src'\n");
2039 if (req
->dst
!= tsgls
->dst
.sgl_ptr
)
2040 pr_err("alg: aead: changed 'req->dst'\n");
2041 if (crypto_aead_reqtfm(req
) != tfm
)
2042 pr_err("alg: aead: changed 'req->base.tfm'\n");
2043 if (req
->base
.complete
!= crypto_req_done
)
2044 pr_err("alg: aead: changed 'req->base.complete'\n");
2045 if (req
->base
.flags
!= req_flags
)
2046 pr_err("alg: aead: changed 'req->base.flags'\n");
2047 if (req
->base
.data
!= &wait
)
2048 pr_err("alg: aead: changed 'req->base.data'\n");
2051 if (is_test_sglist_corrupted(&tsgls
->src
)) {
2052 pr_err("alg: aead: %s %s corrupted src sgl on test vector %s, cfg=\"%s\"\n",
2053 driver
, op
, vec_name
, cfg
->name
);
2056 if (tsgls
->dst
.sgl_ptr
!= tsgls
->src
.sgl
&&
2057 is_test_sglist_corrupted(&tsgls
->dst
)) {
2058 pr_err("alg: aead: %s %s corrupted dst sgl on test vector %s, cfg=\"%s\"\n",
2059 driver
, op
, vec_name
, cfg
->name
);
2063 /* Check for unexpected success or failure, or wrong error code */
2064 if ((err
== 0 && vec
->novrfy
) ||
2065 (err
!= vec
->crypt_error
&& !(err
== -EBADMSG
&& vec
->novrfy
))) {
2066 char expected_error
[32];
2069 vec
->crypt_error
!= 0 && vec
->crypt_error
!= -EBADMSG
)
2070 sprintf(expected_error
, "-EBADMSG or %d",
2072 else if (vec
->novrfy
)
2073 sprintf(expected_error
, "-EBADMSG");
2075 sprintf(expected_error
, "%d", vec
->crypt_error
);
2077 pr_err("alg: aead: %s %s failed on test vector %s; expected_error=%s, actual_error=%d, cfg=\"%s\"\n",
2078 driver
, op
, vec_name
, expected_error
, err
,
2082 pr_err("alg: aead: %s %s unexpectedly succeeded on test vector %s; expected_error=%s, cfg=\"%s\"\n",
2083 driver
, op
, vec_name
, expected_error
, cfg
->name
);
2086 if (err
) /* Expectedly failed. */
2089 /* Check for the correct output (ciphertext or plaintext) */
2090 err
= verify_correct_output(&tsgls
->dst
, enc
? vec
->ctext
: vec
->ptext
,
2091 enc
? vec
->clen
: vec
->plen
,
2092 vec
->alen
, enc
|| !cfg
->inplace
);
2093 if (err
== -EOVERFLOW
) {
2094 pr_err("alg: aead: %s %s overran dst buffer on test vector %s, cfg=\"%s\"\n",
2095 driver
, op
, vec_name
, cfg
->name
);
2099 pr_err("alg: aead: %s %s test failed (wrong result) on test vector %s, cfg=\"%s\"\n",
2100 driver
, op
, vec_name
, cfg
->name
);
2107 static int test_aead_vec(const char *driver
, int enc
,
2108 const struct aead_testvec
*vec
, unsigned int vec_num
,
2109 struct aead_request
*req
,
2110 struct cipher_test_sglists
*tsgls
)
2116 if (enc
&& vec
->novrfy
)
2119 sprintf(vec_name
, "%u", vec_num
);
2121 for (i
= 0; i
< ARRAY_SIZE(default_cipher_testvec_configs
); i
++) {
2122 err
= test_aead_vec_cfg(driver
, enc
, vec
, vec_name
,
2123 &default_cipher_testvec_configs
[i
],
2129 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
2130 if (!noextratests
) {
2131 struct testvec_config cfg
;
2132 char cfgname
[TESTVEC_CONFIG_NAMELEN
];
2134 for (i
= 0; i
< fuzz_iterations
; i
++) {
2135 generate_random_testvec_config(&cfg
, cfgname
,
2137 err
= test_aead_vec_cfg(driver
, enc
, vec
, vec_name
,
2148 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
2150 struct aead_extra_tests_ctx
{
2151 struct aead_request
*req
;
2152 struct crypto_aead
*tfm
;
2154 const struct alg_test_desc
*test_desc
;
2155 struct cipher_test_sglists
*tsgls
;
2156 unsigned int maxdatasize
;
2157 unsigned int maxkeysize
;
2159 struct aead_testvec vec
;
2161 char cfgname
[TESTVEC_CONFIG_NAMELEN
];
2162 struct testvec_config cfg
;
2166 * Make at least one random change to a (ciphertext, AAD) pair. "Ciphertext"
2167 * here means the full ciphertext including the authentication tag. The
2168 * authentication tag (and hence also the ciphertext) is assumed to be nonempty.
2170 static void mutate_aead_message(struct aead_testvec
*vec
, bool esp_aad
)
2172 const unsigned int aad_tail_size
= esp_aad
? 8 : 0;
2173 const unsigned int authsize
= vec
->clen
- vec
->plen
;
2175 if (prandom_u32() % 2 == 0 && vec
->alen
> aad_tail_size
) {
2176 /* Mutate the AAD */
2177 flip_random_bit((u8
*)vec
->assoc
, vec
->alen
- aad_tail_size
);
2178 if (prandom_u32() % 2 == 0)
2181 if (prandom_u32() % 2 == 0) {
2182 /* Mutate auth tag (assuming it's at the end of ciphertext) */
2183 flip_random_bit((u8
*)vec
->ctext
+ vec
->plen
, authsize
);
2185 /* Mutate any part of the ciphertext */
2186 flip_random_bit((u8
*)vec
->ctext
, vec
->clen
);
2191 * Minimum authentication tag size in bytes at which we assume that we can
2192 * reliably generate inauthentic messages, i.e. not generate an authentic
2193 * message by chance.
2195 #define MIN_COLLISION_FREE_AUTHSIZE 8
2197 static void generate_aead_message(struct aead_request
*req
,
2198 const struct aead_test_suite
*suite
,
2199 struct aead_testvec
*vec
,
2200 bool prefer_inauthentic
)
2202 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
2203 const unsigned int ivsize
= crypto_aead_ivsize(tfm
);
2204 const unsigned int authsize
= vec
->clen
- vec
->plen
;
2205 const bool inauthentic
= (authsize
>= MIN_COLLISION_FREE_AUTHSIZE
) &&
2206 (prefer_inauthentic
|| prandom_u32() % 4 == 0);
2208 /* Generate the AAD. */
2209 generate_random_bytes((u8
*)vec
->assoc
, vec
->alen
);
2211 if (inauthentic
&& prandom_u32() % 2 == 0) {
2212 /* Generate a random ciphertext. */
2213 generate_random_bytes((u8
*)vec
->ctext
, vec
->clen
);
2216 struct scatterlist src
[2], dst
;
2218 DECLARE_CRYPTO_WAIT(wait
);
2220 /* Generate a random plaintext and encrypt it. */
2221 sg_init_table(src
, 2);
2223 sg_set_buf(&src
[i
++], vec
->assoc
, vec
->alen
);
2225 generate_random_bytes((u8
*)vec
->ptext
, vec
->plen
);
2226 sg_set_buf(&src
[i
++], vec
->ptext
, vec
->plen
);
2228 sg_init_one(&dst
, vec
->ctext
, vec
->alen
+ vec
->clen
);
2229 memcpy(iv
, vec
->iv
, ivsize
);
2230 aead_request_set_callback(req
, 0, crypto_req_done
, &wait
);
2231 aead_request_set_crypt(req
, src
, &dst
, vec
->plen
, iv
);
2232 aead_request_set_ad(req
, vec
->alen
);
2233 vec
->crypt_error
= crypto_wait_req(crypto_aead_encrypt(req
),
2235 /* If encryption failed, we're done. */
2236 if (vec
->crypt_error
!= 0)
2238 memmove((u8
*)vec
->ctext
, vec
->ctext
+ vec
->alen
, vec
->clen
);
2242 * Mutate the authentic (ciphertext, AAD) pair to get an
2245 mutate_aead_message(vec
, suite
->esp_aad
);
2248 if (suite
->einval_allowed
)
2249 vec
->crypt_error
= -EINVAL
;
2253 * Generate an AEAD test vector 'vec' using the implementation specified by
2254 * 'req'. The buffers in 'vec' must already be allocated.
2256 * If 'prefer_inauthentic' is true, then this function will generate inauthentic
2257 * test vectors (i.e. vectors with 'vec->novrfy=1') more often.
2259 static void generate_random_aead_testvec(struct aead_request
*req
,
2260 struct aead_testvec
*vec
,
2261 const struct aead_test_suite
*suite
,
2262 unsigned int maxkeysize
,
2263 unsigned int maxdatasize
,
2264 char *name
, size_t max_namelen
,
2265 bool prefer_inauthentic
)
2267 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
2268 const unsigned int ivsize
= crypto_aead_ivsize(tfm
);
2269 const unsigned int maxauthsize
= crypto_aead_maxauthsize(tfm
);
2270 unsigned int authsize
;
2271 unsigned int total_len
;
2273 /* Key: length in [0, maxkeysize], but usually choose maxkeysize */
2274 vec
->klen
= maxkeysize
;
2275 if (prandom_u32() % 4 == 0)
2276 vec
->klen
= prandom_u32() % (maxkeysize
+ 1);
2277 generate_random_bytes((u8
*)vec
->key
, vec
->klen
);
2278 vec
->setkey_error
= crypto_aead_setkey(tfm
, vec
->key
, vec
->klen
);
2281 generate_random_bytes((u8
*)vec
->iv
, ivsize
);
2283 /* Tag length: in [0, maxauthsize], but usually choose maxauthsize */
2284 authsize
= maxauthsize
;
2285 if (prandom_u32() % 4 == 0)
2286 authsize
= prandom_u32() % (maxauthsize
+ 1);
2287 if (prefer_inauthentic
&& authsize
< MIN_COLLISION_FREE_AUTHSIZE
)
2288 authsize
= MIN_COLLISION_FREE_AUTHSIZE
;
2289 if (WARN_ON(authsize
> maxdatasize
))
2290 authsize
= maxdatasize
;
2291 maxdatasize
-= authsize
;
2292 vec
->setauthsize_error
= crypto_aead_setauthsize(tfm
, authsize
);
2294 /* AAD, plaintext, and ciphertext lengths */
2295 total_len
= generate_random_length(maxdatasize
);
2296 if (prandom_u32() % 4 == 0)
2299 vec
->alen
= generate_random_length(total_len
);
2300 vec
->plen
= total_len
- vec
->alen
;
2301 vec
->clen
= vec
->plen
+ authsize
;
2304 * Generate the AAD, plaintext, and ciphertext. Not applicable if the
2305 * key or the authentication tag size couldn't be set.
2308 vec
->crypt_error
= 0;
2309 if (vec
->setkey_error
== 0 && vec
->setauthsize_error
== 0)
2310 generate_aead_message(req
, suite
, vec
, prefer_inauthentic
);
2311 snprintf(name
, max_namelen
,
2312 "\"random: alen=%u plen=%u authsize=%u klen=%u novrfy=%d\"",
2313 vec
->alen
, vec
->plen
, authsize
, vec
->klen
, vec
->novrfy
);
2316 static void try_to_generate_inauthentic_testvec(
2317 struct aead_extra_tests_ctx
*ctx
)
2321 for (i
= 0; i
< 10; i
++) {
2322 generate_random_aead_testvec(ctx
->req
, &ctx
->vec
,
2323 &ctx
->test_desc
->suite
.aead
,
2324 ctx
->maxkeysize
, ctx
->maxdatasize
,
2326 sizeof(ctx
->vec_name
), true);
2327 if (ctx
->vec
.novrfy
)
2333 * Generate inauthentic test vectors (i.e. ciphertext, AAD pairs that aren't the
2334 * result of an encryption with the key) and verify that decryption fails.
2336 static int test_aead_inauthentic_inputs(struct aead_extra_tests_ctx
*ctx
)
2341 for (i
= 0; i
< fuzz_iterations
* 8; i
++) {
2343 * Since this part of the tests isn't comparing the
2344 * implementation to another, there's no point in testing any
2345 * test vectors other than inauthentic ones (vec.novrfy=1) here.
2347 * If we're having trouble generating such a test vector, e.g.
2348 * if the algorithm keeps rejecting the generated keys, don't
2349 * retry forever; just continue on.
2351 try_to_generate_inauthentic_testvec(ctx
);
2352 if (ctx
->vec
.novrfy
) {
2353 generate_random_testvec_config(&ctx
->cfg
, ctx
->cfgname
,
2354 sizeof(ctx
->cfgname
));
2355 err
= test_aead_vec_cfg(ctx
->driver
, DECRYPT
, &ctx
->vec
,
2356 ctx
->vec_name
, &ctx
->cfg
,
2357 ctx
->req
, ctx
->tsgls
);
2367 * Test the AEAD algorithm against the corresponding generic implementation, if
2370 static int test_aead_vs_generic_impl(struct aead_extra_tests_ctx
*ctx
)
2372 struct crypto_aead
*tfm
= ctx
->tfm
;
2373 const char *algname
= crypto_aead_alg(tfm
)->base
.cra_name
;
2374 const char *driver
= ctx
->driver
;
2375 const char *generic_driver
= ctx
->test_desc
->generic_driver
;
2376 char _generic_driver
[CRYPTO_MAX_ALG_NAME
];
2377 struct crypto_aead
*generic_tfm
= NULL
;
2378 struct aead_request
*generic_req
= NULL
;
2382 if (!generic_driver
) { /* Use default naming convention? */
2383 err
= build_generic_driver_name(algname
, _generic_driver
);
2386 generic_driver
= _generic_driver
;
2389 if (strcmp(generic_driver
, driver
) == 0) /* Already the generic impl? */
2392 generic_tfm
= crypto_alloc_aead(generic_driver
, 0, 0);
2393 if (IS_ERR(generic_tfm
)) {
2394 err
= PTR_ERR(generic_tfm
);
2395 if (err
== -ENOENT
) {
2396 pr_warn("alg: aead: skipping comparison tests for %s because %s is unavailable\n",
2397 driver
, generic_driver
);
2400 pr_err("alg: aead: error allocating %s (generic impl of %s): %d\n",
2401 generic_driver
, algname
, err
);
2405 generic_req
= aead_request_alloc(generic_tfm
, GFP_KERNEL
);
2411 /* Check the algorithm properties for consistency. */
2413 if (crypto_aead_maxauthsize(tfm
) !=
2414 crypto_aead_maxauthsize(generic_tfm
)) {
2415 pr_err("alg: aead: maxauthsize for %s (%u) doesn't match generic impl (%u)\n",
2416 driver
, crypto_aead_maxauthsize(tfm
),
2417 crypto_aead_maxauthsize(generic_tfm
));
2422 if (crypto_aead_ivsize(tfm
) != crypto_aead_ivsize(generic_tfm
)) {
2423 pr_err("alg: aead: ivsize for %s (%u) doesn't match generic impl (%u)\n",
2424 driver
, crypto_aead_ivsize(tfm
),
2425 crypto_aead_ivsize(generic_tfm
));
2430 if (crypto_aead_blocksize(tfm
) != crypto_aead_blocksize(generic_tfm
)) {
2431 pr_err("alg: aead: blocksize for %s (%u) doesn't match generic impl (%u)\n",
2432 driver
, crypto_aead_blocksize(tfm
),
2433 crypto_aead_blocksize(generic_tfm
));
2439 * Now generate test vectors using the generic implementation, and test
2440 * the other implementation against them.
2442 for (i
= 0; i
< fuzz_iterations
* 8; i
++) {
2443 generate_random_aead_testvec(generic_req
, &ctx
->vec
,
2444 &ctx
->test_desc
->suite
.aead
,
2445 ctx
->maxkeysize
, ctx
->maxdatasize
,
2447 sizeof(ctx
->vec_name
), false);
2448 generate_random_testvec_config(&ctx
->cfg
, ctx
->cfgname
,
2449 sizeof(ctx
->cfgname
));
2450 if (!ctx
->vec
.novrfy
) {
2451 err
= test_aead_vec_cfg(driver
, ENCRYPT
, &ctx
->vec
,
2452 ctx
->vec_name
, &ctx
->cfg
,
2453 ctx
->req
, ctx
->tsgls
);
2457 if (ctx
->vec
.crypt_error
== 0 || ctx
->vec
.novrfy
) {
2458 err
= test_aead_vec_cfg(driver
, DECRYPT
, &ctx
->vec
,
2459 ctx
->vec_name
, &ctx
->cfg
,
2460 ctx
->req
, ctx
->tsgls
);
2468 crypto_free_aead(generic_tfm
);
2469 aead_request_free(generic_req
);
2473 static int test_aead_extra(const char *driver
,
2474 const struct alg_test_desc
*test_desc
,
2475 struct aead_request
*req
,
2476 struct cipher_test_sglists
*tsgls
)
2478 struct aead_extra_tests_ctx
*ctx
;
2485 ctx
= kzalloc(sizeof(*ctx
), GFP_KERNEL
);
2489 ctx
->tfm
= crypto_aead_reqtfm(req
);
2490 ctx
->driver
= driver
;
2491 ctx
->test_desc
= test_desc
;
2493 ctx
->maxdatasize
= (2 * PAGE_SIZE
) - TESTMGR_POISON_LEN
;
2494 ctx
->maxkeysize
= 0;
2495 for (i
= 0; i
< test_desc
->suite
.aead
.count
; i
++)
2496 ctx
->maxkeysize
= max_t(unsigned int, ctx
->maxkeysize
,
2497 test_desc
->suite
.aead
.vecs
[i
].klen
);
2499 ctx
->vec
.key
= kmalloc(ctx
->maxkeysize
, GFP_KERNEL
);
2500 ctx
->vec
.iv
= kmalloc(crypto_aead_ivsize(ctx
->tfm
), GFP_KERNEL
);
2501 ctx
->vec
.assoc
= kmalloc(ctx
->maxdatasize
, GFP_KERNEL
);
2502 ctx
->vec
.ptext
= kmalloc(ctx
->maxdatasize
, GFP_KERNEL
);
2503 ctx
->vec
.ctext
= kmalloc(ctx
->maxdatasize
, GFP_KERNEL
);
2504 if (!ctx
->vec
.key
|| !ctx
->vec
.iv
|| !ctx
->vec
.assoc
||
2505 !ctx
->vec
.ptext
|| !ctx
->vec
.ctext
) {
2510 err
= test_aead_inauthentic_inputs(ctx
);
2514 err
= test_aead_vs_generic_impl(ctx
);
2516 kfree(ctx
->vec
.key
);
2518 kfree(ctx
->vec
.assoc
);
2519 kfree(ctx
->vec
.ptext
);
2520 kfree(ctx
->vec
.ctext
);
2524 #else /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
2525 static int test_aead_extra(const char *driver
,
2526 const struct alg_test_desc
*test_desc
,
2527 struct aead_request
*req
,
2528 struct cipher_test_sglists
*tsgls
)
2532 #endif /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
2534 static int test_aead(const char *driver
, int enc
,
2535 const struct aead_test_suite
*suite
,
2536 struct aead_request
*req
,
2537 struct cipher_test_sglists
*tsgls
)
2542 for (i
= 0; i
< suite
->count
; i
++) {
2543 err
= test_aead_vec(driver
, enc
, &suite
->vecs
[i
], i
, req
,
2552 static int alg_test_aead(const struct alg_test_desc
*desc
, const char *driver
,
2555 const struct aead_test_suite
*suite
= &desc
->suite
.aead
;
2556 struct crypto_aead
*tfm
;
2557 struct aead_request
*req
= NULL
;
2558 struct cipher_test_sglists
*tsgls
= NULL
;
2561 if (suite
->count
<= 0) {
2562 pr_err("alg: aead: empty test suite for %s\n", driver
);
2566 tfm
= crypto_alloc_aead(driver
, type
, mask
);
2568 pr_err("alg: aead: failed to allocate transform for %s: %ld\n",
2569 driver
, PTR_ERR(tfm
));
2570 return PTR_ERR(tfm
);
2573 req
= aead_request_alloc(tfm
, GFP_KERNEL
);
2575 pr_err("alg: aead: failed to allocate request for %s\n",
2581 tsgls
= alloc_cipher_test_sglists();
2583 pr_err("alg: aead: failed to allocate test buffers for %s\n",
2589 err
= test_aead(driver
, ENCRYPT
, suite
, req
, tsgls
);
2593 err
= test_aead(driver
, DECRYPT
, suite
, req
, tsgls
);
2597 err
= test_aead_extra(driver
, desc
, req
, tsgls
);
2599 free_cipher_test_sglists(tsgls
);
2600 aead_request_free(req
);
2601 crypto_free_aead(tfm
);
2605 static int test_cipher(struct crypto_cipher
*tfm
, int enc
,
2606 const struct cipher_testvec
*template,
2607 unsigned int tcount
)
2609 const char *algo
= crypto_tfm_alg_driver_name(crypto_cipher_tfm(tfm
));
2610 unsigned int i
, j
, k
;
2613 const char *input
, *result
;
2615 char *xbuf
[XBUFSIZE
];
2618 if (testmgr_alloc_buf(xbuf
))
2627 for (i
= 0; i
< tcount
; i
++) {
2629 if (fips_enabled
&& template[i
].fips_skip
)
2632 input
= enc
? template[i
].ptext
: template[i
].ctext
;
2633 result
= enc
? template[i
].ctext
: template[i
].ptext
;
2637 if (WARN_ON(template[i
].len
> PAGE_SIZE
))
2641 memcpy(data
, input
, template[i
].len
);
2643 crypto_cipher_clear_flags(tfm
, ~0);
2645 crypto_cipher_set_flags(tfm
, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS
);
2647 ret
= crypto_cipher_setkey(tfm
, template[i
].key
,
2650 if (ret
== template[i
].setkey_error
)
2652 pr_err("alg: cipher: %s setkey failed on test vector %u; expected_error=%d, actual_error=%d, flags=%#x\n",
2653 algo
, j
, template[i
].setkey_error
, ret
,
2654 crypto_cipher_get_flags(tfm
));
2657 if (template[i
].setkey_error
) {
2658 pr_err("alg: cipher: %s setkey unexpectedly succeeded on test vector %u; expected_error=%d\n",
2659 algo
, j
, template[i
].setkey_error
);
2664 for (k
= 0; k
< template[i
].len
;
2665 k
+= crypto_cipher_blocksize(tfm
)) {
2667 crypto_cipher_encrypt_one(tfm
, data
+ k
,
2670 crypto_cipher_decrypt_one(tfm
, data
+ k
,
2675 if (memcmp(q
, result
, template[i
].len
)) {
2676 printk(KERN_ERR
"alg: cipher: Test %d failed "
2677 "on %s for %s\n", j
, e
, algo
);
2678 hexdump(q
, template[i
].len
);
2687 testmgr_free_buf(xbuf
);
2692 static int test_skcipher_vec_cfg(const char *driver
, int enc
,
2693 const struct cipher_testvec
*vec
,
2694 const char *vec_name
,
2695 const struct testvec_config
*cfg
,
2696 struct skcipher_request
*req
,
2697 struct cipher_test_sglists
*tsgls
)
2699 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
2700 const unsigned int alignmask
= crypto_skcipher_alignmask(tfm
);
2701 const unsigned int ivsize
= crypto_skcipher_ivsize(tfm
);
2702 const u32 req_flags
= CRYPTO_TFM_REQ_MAY_BACKLOG
| cfg
->req_flags
;
2703 const char *op
= enc
? "encryption" : "decryption";
2704 DECLARE_CRYPTO_WAIT(wait
);
2705 u8 _iv
[3 * (MAX_ALGAPI_ALIGNMASK
+ 1) + MAX_IVLEN
];
2706 u8
*iv
= PTR_ALIGN(&_iv
[0], 2 * (MAX_ALGAPI_ALIGNMASK
+ 1)) +
2708 (cfg
->iv_offset_relative_to_alignmask
? alignmask
: 0);
2714 crypto_skcipher_set_flags(tfm
, CRYPTO_TFM_REQ_FORBID_WEAK_KEYS
);
2716 crypto_skcipher_clear_flags(tfm
,
2717 CRYPTO_TFM_REQ_FORBID_WEAK_KEYS
);
2718 err
= do_setkey(crypto_skcipher_setkey
, tfm
, vec
->key
, vec
->klen
,
2721 if (err
== vec
->setkey_error
)
2723 pr_err("alg: skcipher: %s setkey failed on test vector %s; expected_error=%d, actual_error=%d, flags=%#x\n",
2724 driver
, vec_name
, vec
->setkey_error
, err
,
2725 crypto_skcipher_get_flags(tfm
));
2728 if (vec
->setkey_error
) {
2729 pr_err("alg: skcipher: %s setkey unexpectedly succeeded on test vector %s; expected_error=%d\n",
2730 driver
, vec_name
, vec
->setkey_error
);
2734 /* The IV must be copied to a buffer, as the algorithm may modify it */
2736 if (WARN_ON(ivsize
> MAX_IVLEN
))
2738 if (vec
->generates_iv
&& !enc
)
2739 memcpy(iv
, vec
->iv_out
, ivsize
);
2741 memcpy(iv
, vec
->iv
, ivsize
);
2743 memset(iv
, 0, ivsize
);
2745 if (vec
->generates_iv
) {
2746 pr_err("alg: skcipher: %s has ivsize=0 but test vector %s generates IV!\n",
2753 /* Build the src/dst scatterlists */
2754 input
.iov_base
= enc
? (void *)vec
->ptext
: (void *)vec
->ctext
;
2755 input
.iov_len
= vec
->len
;
2756 err
= build_cipher_test_sglists(tsgls
, cfg
, alignmask
,
2757 vec
->len
, vec
->len
, &input
, 1);
2759 pr_err("alg: skcipher: %s %s: error preparing scatterlists for test vector %s, cfg=\"%s\"\n",
2760 driver
, op
, vec_name
, cfg
->name
);
2764 /* Do the actual encryption or decryption */
2765 testmgr_poison(req
->__ctx
, crypto_skcipher_reqsize(tfm
));
2766 skcipher_request_set_callback(req
, req_flags
, crypto_req_done
, &wait
);
2767 skcipher_request_set_crypt(req
, tsgls
->src
.sgl_ptr
, tsgls
->dst
.sgl_ptr
,
2770 crypto_disable_simd_for_test();
2771 err
= enc
? crypto_skcipher_encrypt(req
) : crypto_skcipher_decrypt(req
);
2773 crypto_reenable_simd_for_test();
2774 err
= crypto_wait_req(err
, &wait
);
2776 /* Check that the algorithm didn't overwrite things it shouldn't have */
2777 if (req
->cryptlen
!= vec
->len
||
2779 req
->src
!= tsgls
->src
.sgl_ptr
||
2780 req
->dst
!= tsgls
->dst
.sgl_ptr
||
2781 crypto_skcipher_reqtfm(req
) != tfm
||
2782 req
->base
.complete
!= crypto_req_done
||
2783 req
->base
.flags
!= req_flags
||
2784 req
->base
.data
!= &wait
) {
2785 pr_err("alg: skcipher: %s %s corrupted request struct on test vector %s, cfg=\"%s\"\n",
2786 driver
, op
, vec_name
, cfg
->name
);
2787 if (req
->cryptlen
!= vec
->len
)
2788 pr_err("alg: skcipher: changed 'req->cryptlen'\n");
2790 pr_err("alg: skcipher: changed 'req->iv'\n");
2791 if (req
->src
!= tsgls
->src
.sgl_ptr
)
2792 pr_err("alg: skcipher: changed 'req->src'\n");
2793 if (req
->dst
!= tsgls
->dst
.sgl_ptr
)
2794 pr_err("alg: skcipher: changed 'req->dst'\n");
2795 if (crypto_skcipher_reqtfm(req
) != tfm
)
2796 pr_err("alg: skcipher: changed 'req->base.tfm'\n");
2797 if (req
->base
.complete
!= crypto_req_done
)
2798 pr_err("alg: skcipher: changed 'req->base.complete'\n");
2799 if (req
->base
.flags
!= req_flags
)
2800 pr_err("alg: skcipher: changed 'req->base.flags'\n");
2801 if (req
->base
.data
!= &wait
)
2802 pr_err("alg: skcipher: changed 'req->base.data'\n");
2805 if (is_test_sglist_corrupted(&tsgls
->src
)) {
2806 pr_err("alg: skcipher: %s %s corrupted src sgl on test vector %s, cfg=\"%s\"\n",
2807 driver
, op
, vec_name
, cfg
->name
);
2810 if (tsgls
->dst
.sgl_ptr
!= tsgls
->src
.sgl
&&
2811 is_test_sglist_corrupted(&tsgls
->dst
)) {
2812 pr_err("alg: skcipher: %s %s corrupted dst sgl on test vector %s, cfg=\"%s\"\n",
2813 driver
, op
, vec_name
, cfg
->name
);
2817 /* Check for success or failure */
2819 if (err
== vec
->crypt_error
)
2821 pr_err("alg: skcipher: %s %s failed on test vector %s; expected_error=%d, actual_error=%d, cfg=\"%s\"\n",
2822 driver
, op
, vec_name
, vec
->crypt_error
, err
, cfg
->name
);
2825 if (vec
->crypt_error
) {
2826 pr_err("alg: skcipher: %s %s unexpectedly succeeded on test vector %s; expected_error=%d, cfg=\"%s\"\n",
2827 driver
, op
, vec_name
, vec
->crypt_error
, cfg
->name
);
2831 /* Check for the correct output (ciphertext or plaintext) */
2832 err
= verify_correct_output(&tsgls
->dst
, enc
? vec
->ctext
: vec
->ptext
,
2834 if (err
== -EOVERFLOW
) {
2835 pr_err("alg: skcipher: %s %s overran dst buffer on test vector %s, cfg=\"%s\"\n",
2836 driver
, op
, vec_name
, cfg
->name
);
2840 pr_err("alg: skcipher: %s %s test failed (wrong result) on test vector %s, cfg=\"%s\"\n",
2841 driver
, op
, vec_name
, cfg
->name
);
2845 /* If applicable, check that the algorithm generated the correct IV */
2846 if (vec
->iv_out
&& memcmp(iv
, vec
->iv_out
, ivsize
) != 0) {
2847 pr_err("alg: skcipher: %s %s test failed (wrong output IV) on test vector %s, cfg=\"%s\"\n",
2848 driver
, op
, vec_name
, cfg
->name
);
2849 hexdump(iv
, ivsize
);
2856 static int test_skcipher_vec(const char *driver
, int enc
,
2857 const struct cipher_testvec
*vec
,
2858 unsigned int vec_num
,
2859 struct skcipher_request
*req
,
2860 struct cipher_test_sglists
*tsgls
)
2866 if (fips_enabled
&& vec
->fips_skip
)
2869 sprintf(vec_name
, "%u", vec_num
);
2871 for (i
= 0; i
< ARRAY_SIZE(default_cipher_testvec_configs
); i
++) {
2872 err
= test_skcipher_vec_cfg(driver
, enc
, vec
, vec_name
,
2873 &default_cipher_testvec_configs
[i
],
2879 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
2880 if (!noextratests
) {
2881 struct testvec_config cfg
;
2882 char cfgname
[TESTVEC_CONFIG_NAMELEN
];
2884 for (i
= 0; i
< fuzz_iterations
; i
++) {
2885 generate_random_testvec_config(&cfg
, cfgname
,
2887 err
= test_skcipher_vec_cfg(driver
, enc
, vec
, vec_name
,
2898 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
2900 * Generate a symmetric cipher test vector from the given implementation.
2901 * Assumes the buffers in 'vec' were already allocated.
2903 static void generate_random_cipher_testvec(struct skcipher_request
*req
,
2904 struct cipher_testvec
*vec
,
2905 unsigned int maxdatasize
,
2906 char *name
, size_t max_namelen
)
2908 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
2909 const unsigned int maxkeysize
= crypto_skcipher_max_keysize(tfm
);
2910 const unsigned int ivsize
= crypto_skcipher_ivsize(tfm
);
2911 struct scatterlist src
, dst
;
2913 DECLARE_CRYPTO_WAIT(wait
);
2915 /* Key: length in [0, maxkeysize], but usually choose maxkeysize */
2916 vec
->klen
= maxkeysize
;
2917 if (prandom_u32() % 4 == 0)
2918 vec
->klen
= prandom_u32() % (maxkeysize
+ 1);
2919 generate_random_bytes((u8
*)vec
->key
, vec
->klen
);
2920 vec
->setkey_error
= crypto_skcipher_setkey(tfm
, vec
->key
, vec
->klen
);
2923 generate_random_bytes((u8
*)vec
->iv
, ivsize
);
2926 vec
->len
= generate_random_length(maxdatasize
);
2927 generate_random_bytes((u8
*)vec
->ptext
, vec
->len
);
2929 /* If the key couldn't be set, no need to continue to encrypt. */
2930 if (vec
->setkey_error
)
2934 sg_init_one(&src
, vec
->ptext
, vec
->len
);
2935 sg_init_one(&dst
, vec
->ctext
, vec
->len
);
2936 memcpy(iv
, vec
->iv
, ivsize
);
2937 skcipher_request_set_callback(req
, 0, crypto_req_done
, &wait
);
2938 skcipher_request_set_crypt(req
, &src
, &dst
, vec
->len
, iv
);
2939 vec
->crypt_error
= crypto_wait_req(crypto_skcipher_encrypt(req
), &wait
);
2940 if (vec
->crypt_error
!= 0) {
2942 * The only acceptable error here is for an invalid length, so
2943 * skcipher decryption should fail with the same error too.
2944 * We'll test for this. But to keep the API usage well-defined,
2945 * explicitly initialize the ciphertext buffer too.
2947 memset((u8
*)vec
->ctext
, 0, vec
->len
);
2950 snprintf(name
, max_namelen
, "\"random: len=%u klen=%u\"",
2951 vec
->len
, vec
->klen
);
2955 * Test the skcipher algorithm represented by @req against the corresponding
2956 * generic implementation, if one is available.
2958 static int test_skcipher_vs_generic_impl(const char *driver
,
2959 const char *generic_driver
,
2960 struct skcipher_request
*req
,
2961 struct cipher_test_sglists
*tsgls
)
2963 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
2964 const unsigned int maxkeysize
= crypto_skcipher_max_keysize(tfm
);
2965 const unsigned int ivsize
= crypto_skcipher_ivsize(tfm
);
2966 const unsigned int blocksize
= crypto_skcipher_blocksize(tfm
);
2967 const unsigned int maxdatasize
= (2 * PAGE_SIZE
) - TESTMGR_POISON_LEN
;
2968 const char *algname
= crypto_skcipher_alg(tfm
)->base
.cra_name
;
2969 char _generic_driver
[CRYPTO_MAX_ALG_NAME
];
2970 struct crypto_skcipher
*generic_tfm
= NULL
;
2971 struct skcipher_request
*generic_req
= NULL
;
2973 struct cipher_testvec vec
= { 0 };
2975 struct testvec_config
*cfg
;
2976 char cfgname
[TESTVEC_CONFIG_NAMELEN
];
2982 /* Keywrap isn't supported here yet as it handles its IV differently. */
2983 if (strncmp(algname
, "kw(", 3) == 0)
2986 if (!generic_driver
) { /* Use default naming convention? */
2987 err
= build_generic_driver_name(algname
, _generic_driver
);
2990 generic_driver
= _generic_driver
;
2993 if (strcmp(generic_driver
, driver
) == 0) /* Already the generic impl? */
2996 generic_tfm
= crypto_alloc_skcipher(generic_driver
, 0, 0);
2997 if (IS_ERR(generic_tfm
)) {
2998 err
= PTR_ERR(generic_tfm
);
2999 if (err
== -ENOENT
) {
3000 pr_warn("alg: skcipher: skipping comparison tests for %s because %s is unavailable\n",
3001 driver
, generic_driver
);
3004 pr_err("alg: skcipher: error allocating %s (generic impl of %s): %d\n",
3005 generic_driver
, algname
, err
);
3009 cfg
= kzalloc(sizeof(*cfg
), GFP_KERNEL
);
3015 generic_req
= skcipher_request_alloc(generic_tfm
, GFP_KERNEL
);
3021 /* Check the algorithm properties for consistency. */
3023 if (crypto_skcipher_min_keysize(tfm
) !=
3024 crypto_skcipher_min_keysize(generic_tfm
)) {
3025 pr_err("alg: skcipher: min keysize for %s (%u) doesn't match generic impl (%u)\n",
3026 driver
, crypto_skcipher_min_keysize(tfm
),
3027 crypto_skcipher_min_keysize(generic_tfm
));
3032 if (maxkeysize
!= crypto_skcipher_max_keysize(generic_tfm
)) {
3033 pr_err("alg: skcipher: max keysize for %s (%u) doesn't match generic impl (%u)\n",
3035 crypto_skcipher_max_keysize(generic_tfm
));
3040 if (ivsize
!= crypto_skcipher_ivsize(generic_tfm
)) {
3041 pr_err("alg: skcipher: ivsize for %s (%u) doesn't match generic impl (%u)\n",
3042 driver
, ivsize
, crypto_skcipher_ivsize(generic_tfm
));
3047 if (blocksize
!= crypto_skcipher_blocksize(generic_tfm
)) {
3048 pr_err("alg: skcipher: blocksize for %s (%u) doesn't match generic impl (%u)\n",
3050 crypto_skcipher_blocksize(generic_tfm
));
3056 * Now generate test vectors using the generic implementation, and test
3057 * the other implementation against them.
3060 vec
.key
= kmalloc(maxkeysize
, GFP_KERNEL
);
3061 vec
.iv
= kmalloc(ivsize
, GFP_KERNEL
);
3062 vec
.ptext
= kmalloc(maxdatasize
, GFP_KERNEL
);
3063 vec
.ctext
= kmalloc(maxdatasize
, GFP_KERNEL
);
3064 if (!vec
.key
|| !vec
.iv
|| !vec
.ptext
|| !vec
.ctext
) {
3069 for (i
= 0; i
< fuzz_iterations
* 8; i
++) {
3070 generate_random_cipher_testvec(generic_req
, &vec
, maxdatasize
,
3071 vec_name
, sizeof(vec_name
));
3072 generate_random_testvec_config(cfg
, cfgname
, sizeof(cfgname
));
3074 err
= test_skcipher_vec_cfg(driver
, ENCRYPT
, &vec
, vec_name
,
3078 err
= test_skcipher_vec_cfg(driver
, DECRYPT
, &vec
, vec_name
,
3091 crypto_free_skcipher(generic_tfm
);
3092 skcipher_request_free(generic_req
);
3095 #else /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
3096 static int test_skcipher_vs_generic_impl(const char *driver
,
3097 const char *generic_driver
,
3098 struct skcipher_request
*req
,
3099 struct cipher_test_sglists
*tsgls
)
3103 #endif /* !CONFIG_CRYPTO_MANAGER_EXTRA_TESTS */
3105 static int test_skcipher(const char *driver
, int enc
,
3106 const struct cipher_test_suite
*suite
,
3107 struct skcipher_request
*req
,
3108 struct cipher_test_sglists
*tsgls
)
3113 for (i
= 0; i
< suite
->count
; i
++) {
3114 err
= test_skcipher_vec(driver
, enc
, &suite
->vecs
[i
], i
, req
,
3123 static int alg_test_skcipher(const struct alg_test_desc
*desc
,
3124 const char *driver
, u32 type
, u32 mask
)
3126 const struct cipher_test_suite
*suite
= &desc
->suite
.cipher
;
3127 struct crypto_skcipher
*tfm
;
3128 struct skcipher_request
*req
= NULL
;
3129 struct cipher_test_sglists
*tsgls
= NULL
;
3132 if (suite
->count
<= 0) {
3133 pr_err("alg: skcipher: empty test suite for %s\n", driver
);
3137 tfm
= crypto_alloc_skcipher(driver
, type
, mask
);
3139 pr_err("alg: skcipher: failed to allocate transform for %s: %ld\n",
3140 driver
, PTR_ERR(tfm
));
3141 return PTR_ERR(tfm
);
3144 req
= skcipher_request_alloc(tfm
, GFP_KERNEL
);
3146 pr_err("alg: skcipher: failed to allocate request for %s\n",
3152 tsgls
= alloc_cipher_test_sglists();
3154 pr_err("alg: skcipher: failed to allocate test buffers for %s\n",
3160 err
= test_skcipher(driver
, ENCRYPT
, suite
, req
, tsgls
);
3164 err
= test_skcipher(driver
, DECRYPT
, suite
, req
, tsgls
);
3168 err
= test_skcipher_vs_generic_impl(driver
, desc
->generic_driver
, req
,
3171 free_cipher_test_sglists(tsgls
);
3172 skcipher_request_free(req
);
3173 crypto_free_skcipher(tfm
);
3177 static int test_comp(struct crypto_comp
*tfm
,
3178 const struct comp_testvec
*ctemplate
,
3179 const struct comp_testvec
*dtemplate
,
3180 int ctcount
, int dtcount
)
3182 const char *algo
= crypto_tfm_alg_driver_name(crypto_comp_tfm(tfm
));
3183 char *output
, *decomp_output
;
3187 output
= kmalloc(COMP_BUF_SIZE
, GFP_KERNEL
);
3191 decomp_output
= kmalloc(COMP_BUF_SIZE
, GFP_KERNEL
);
3192 if (!decomp_output
) {
3197 for (i
= 0; i
< ctcount
; i
++) {
3199 unsigned int dlen
= COMP_BUF_SIZE
;
3201 memset(output
, 0, COMP_BUF_SIZE
);
3202 memset(decomp_output
, 0, COMP_BUF_SIZE
);
3204 ilen
= ctemplate
[i
].inlen
;
3205 ret
= crypto_comp_compress(tfm
, ctemplate
[i
].input
,
3206 ilen
, output
, &dlen
);
3208 printk(KERN_ERR
"alg: comp: compression failed "
3209 "on test %d for %s: ret=%d\n", i
+ 1, algo
,
3215 dlen
= COMP_BUF_SIZE
;
3216 ret
= crypto_comp_decompress(tfm
, output
,
3217 ilen
, decomp_output
, &dlen
);
3219 pr_err("alg: comp: compression failed: decompress: on test %d for %s failed: ret=%d\n",
3224 if (dlen
!= ctemplate
[i
].inlen
) {
3225 printk(KERN_ERR
"alg: comp: Compression test %d "
3226 "failed for %s: output len = %d\n", i
+ 1, algo
,
3232 if (memcmp(decomp_output
, ctemplate
[i
].input
,
3233 ctemplate
[i
].inlen
)) {
3234 pr_err("alg: comp: compression failed: output differs: on test %d for %s\n",
3236 hexdump(decomp_output
, dlen
);
3242 for (i
= 0; i
< dtcount
; i
++) {
3244 unsigned int dlen
= COMP_BUF_SIZE
;
3246 memset(decomp_output
, 0, COMP_BUF_SIZE
);
3248 ilen
= dtemplate
[i
].inlen
;
3249 ret
= crypto_comp_decompress(tfm
, dtemplate
[i
].input
,
3250 ilen
, decomp_output
, &dlen
);
3252 printk(KERN_ERR
"alg: comp: decompression failed "
3253 "on test %d for %s: ret=%d\n", i
+ 1, algo
,
3258 if (dlen
!= dtemplate
[i
].outlen
) {
3259 printk(KERN_ERR
"alg: comp: Decompression test %d "
3260 "failed for %s: output len = %d\n", i
+ 1, algo
,
3266 if (memcmp(decomp_output
, dtemplate
[i
].output
, dlen
)) {
3267 printk(KERN_ERR
"alg: comp: Decompression test %d "
3268 "failed for %s\n", i
+ 1, algo
);
3269 hexdump(decomp_output
, dlen
);
3278 kfree(decomp_output
);
3283 static int test_acomp(struct crypto_acomp
*tfm
,
3284 const struct comp_testvec
*ctemplate
,
3285 const struct comp_testvec
*dtemplate
,
3286 int ctcount
, int dtcount
)
3288 const char *algo
= crypto_tfm_alg_driver_name(crypto_acomp_tfm(tfm
));
3290 char *output
, *decomp_out
;
3292 struct scatterlist src
, dst
;
3293 struct acomp_req
*req
;
3294 struct crypto_wait wait
;
3296 output
= kmalloc(COMP_BUF_SIZE
, GFP_KERNEL
);
3300 decomp_out
= kmalloc(COMP_BUF_SIZE
, GFP_KERNEL
);
3306 for (i
= 0; i
< ctcount
; i
++) {
3307 unsigned int dlen
= COMP_BUF_SIZE
;
3308 int ilen
= ctemplate
[i
].inlen
;
3311 input_vec
= kmemdup(ctemplate
[i
].input
, ilen
, GFP_KERNEL
);
3317 memset(output
, 0, dlen
);
3318 crypto_init_wait(&wait
);
3319 sg_init_one(&src
, input_vec
, ilen
);
3320 sg_init_one(&dst
, output
, dlen
);
3322 req
= acomp_request_alloc(tfm
);
3324 pr_err("alg: acomp: request alloc failed for %s\n",
3331 acomp_request_set_params(req
, &src
, &dst
, ilen
, dlen
);
3332 acomp_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
3333 crypto_req_done
, &wait
);
3335 ret
= crypto_wait_req(crypto_acomp_compress(req
), &wait
);
3337 pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
3340 acomp_request_free(req
);
3345 dlen
= COMP_BUF_SIZE
;
3346 sg_init_one(&src
, output
, ilen
);
3347 sg_init_one(&dst
, decomp_out
, dlen
);
3348 crypto_init_wait(&wait
);
3349 acomp_request_set_params(req
, &src
, &dst
, ilen
, dlen
);
3351 ret
= crypto_wait_req(crypto_acomp_decompress(req
), &wait
);
3353 pr_err("alg: acomp: compression failed on test %d for %s: ret=%d\n",
3356 acomp_request_free(req
);
3360 if (req
->dlen
!= ctemplate
[i
].inlen
) {
3361 pr_err("alg: acomp: Compression test %d failed for %s: output len = %d\n",
3362 i
+ 1, algo
, req
->dlen
);
3365 acomp_request_free(req
);
3369 if (memcmp(input_vec
, decomp_out
, req
->dlen
)) {
3370 pr_err("alg: acomp: Compression test %d failed for %s\n",
3372 hexdump(output
, req
->dlen
);
3375 acomp_request_free(req
);
3380 acomp_request_free(req
);
3383 for (i
= 0; i
< dtcount
; i
++) {
3384 unsigned int dlen
= COMP_BUF_SIZE
;
3385 int ilen
= dtemplate
[i
].inlen
;
3388 input_vec
= kmemdup(dtemplate
[i
].input
, ilen
, GFP_KERNEL
);
3394 memset(output
, 0, dlen
);
3395 crypto_init_wait(&wait
);
3396 sg_init_one(&src
, input_vec
, ilen
);
3397 sg_init_one(&dst
, output
, dlen
);
3399 req
= acomp_request_alloc(tfm
);
3401 pr_err("alg: acomp: request alloc failed for %s\n",
3408 acomp_request_set_params(req
, &src
, &dst
, ilen
, dlen
);
3409 acomp_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
3410 crypto_req_done
, &wait
);
3412 ret
= crypto_wait_req(crypto_acomp_decompress(req
), &wait
);
3414 pr_err("alg: acomp: decompression failed on test %d for %s: ret=%d\n",
3417 acomp_request_free(req
);
3421 if (req
->dlen
!= dtemplate
[i
].outlen
) {
3422 pr_err("alg: acomp: Decompression test %d failed for %s: output len = %d\n",
3423 i
+ 1, algo
, req
->dlen
);
3426 acomp_request_free(req
);
3430 if (memcmp(output
, dtemplate
[i
].output
, req
->dlen
)) {
3431 pr_err("alg: acomp: Decompression test %d failed for %s\n",
3433 hexdump(output
, req
->dlen
);
3436 acomp_request_free(req
);
3441 acomp_request_free(req
);
3452 static int test_cprng(struct crypto_rng
*tfm
,
3453 const struct cprng_testvec
*template,
3454 unsigned int tcount
)
3456 const char *algo
= crypto_tfm_alg_driver_name(crypto_rng_tfm(tfm
));
3457 int err
= 0, i
, j
, seedsize
;
3461 seedsize
= crypto_rng_seedsize(tfm
);
3463 seed
= kmalloc(seedsize
, GFP_KERNEL
);
3465 printk(KERN_ERR
"alg: cprng: Failed to allocate seed space "
3470 for (i
= 0; i
< tcount
; i
++) {
3471 memset(result
, 0, 32);
3473 memcpy(seed
, template[i
].v
, template[i
].vlen
);
3474 memcpy(seed
+ template[i
].vlen
, template[i
].key
,
3476 memcpy(seed
+ template[i
].vlen
+ template[i
].klen
,
3477 template[i
].dt
, template[i
].dtlen
);
3479 err
= crypto_rng_reset(tfm
, seed
, seedsize
);
3481 printk(KERN_ERR
"alg: cprng: Failed to reset rng "
3486 for (j
= 0; j
< template[i
].loops
; j
++) {
3487 err
= crypto_rng_get_bytes(tfm
, result
,
3490 printk(KERN_ERR
"alg: cprng: Failed to obtain "
3491 "the correct amount of random data for "
3492 "%s (requested %d)\n", algo
,
3498 err
= memcmp(result
, template[i
].result
,
3501 printk(KERN_ERR
"alg: cprng: Test %d failed for %s\n",
3503 hexdump(result
, template[i
].rlen
);
3514 static int alg_test_cipher(const struct alg_test_desc
*desc
,
3515 const char *driver
, u32 type
, u32 mask
)
3517 const struct cipher_test_suite
*suite
= &desc
->suite
.cipher
;
3518 struct crypto_cipher
*tfm
;
3521 tfm
= crypto_alloc_cipher(driver
, type
, mask
);
3523 printk(KERN_ERR
"alg: cipher: Failed to load transform for "
3524 "%s: %ld\n", driver
, PTR_ERR(tfm
));
3525 return PTR_ERR(tfm
);
3528 err
= test_cipher(tfm
, ENCRYPT
, suite
->vecs
, suite
->count
);
3530 err
= test_cipher(tfm
, DECRYPT
, suite
->vecs
, suite
->count
);
3532 crypto_free_cipher(tfm
);
3536 static int alg_test_comp(const struct alg_test_desc
*desc
, const char *driver
,
3539 struct crypto_comp
*comp
;
3540 struct crypto_acomp
*acomp
;
3542 u32 algo_type
= type
& CRYPTO_ALG_TYPE_ACOMPRESS_MASK
;
3544 if (algo_type
== CRYPTO_ALG_TYPE_ACOMPRESS
) {
3545 acomp
= crypto_alloc_acomp(driver
, type
, mask
);
3546 if (IS_ERR(acomp
)) {
3547 pr_err("alg: acomp: Failed to load transform for %s: %ld\n",
3548 driver
, PTR_ERR(acomp
));
3549 return PTR_ERR(acomp
);
3551 err
= test_acomp(acomp
, desc
->suite
.comp
.comp
.vecs
,
3552 desc
->suite
.comp
.decomp
.vecs
,
3553 desc
->suite
.comp
.comp
.count
,
3554 desc
->suite
.comp
.decomp
.count
);
3555 crypto_free_acomp(acomp
);
3557 comp
= crypto_alloc_comp(driver
, type
, mask
);
3559 pr_err("alg: comp: Failed to load transform for %s: %ld\n",
3560 driver
, PTR_ERR(comp
));
3561 return PTR_ERR(comp
);
3564 err
= test_comp(comp
, desc
->suite
.comp
.comp
.vecs
,
3565 desc
->suite
.comp
.decomp
.vecs
,
3566 desc
->suite
.comp
.comp
.count
,
3567 desc
->suite
.comp
.decomp
.count
);
3569 crypto_free_comp(comp
);
3574 static int alg_test_crc32c(const struct alg_test_desc
*desc
,
3575 const char *driver
, u32 type
, u32 mask
)
3577 struct crypto_shash
*tfm
;
3581 err
= alg_test_hash(desc
, driver
, type
, mask
);
3585 tfm
= crypto_alloc_shash(driver
, type
, mask
);
3587 if (PTR_ERR(tfm
) == -ENOENT
) {
3589 * This crc32c implementation is only available through
3590 * ahash API, not the shash API, so the remaining part
3591 * of the test is not applicable to it.
3595 printk(KERN_ERR
"alg: crc32c: Failed to load transform for %s: "
3596 "%ld\n", driver
, PTR_ERR(tfm
));
3597 return PTR_ERR(tfm
);
3601 SHASH_DESC_ON_STACK(shash
, tfm
);
3602 u32
*ctx
= (u32
*)shash_desc_ctx(shash
);
3607 err
= crypto_shash_final(shash
, (u8
*)&val
);
3609 printk(KERN_ERR
"alg: crc32c: Operation failed for "
3610 "%s: %d\n", driver
, err
);
3614 if (val
!= cpu_to_le32(~420553207)) {
3615 pr_err("alg: crc32c: Test failed for %s: %u\n",
3616 driver
, le32_to_cpu(val
));
3621 crypto_free_shash(tfm
);
3626 static int alg_test_cprng(const struct alg_test_desc
*desc
, const char *driver
,
3629 struct crypto_rng
*rng
;
3632 rng
= crypto_alloc_rng(driver
, type
, mask
);
3634 printk(KERN_ERR
"alg: cprng: Failed to load transform for %s: "
3635 "%ld\n", driver
, PTR_ERR(rng
));
3636 return PTR_ERR(rng
);
3639 err
= test_cprng(rng
, desc
->suite
.cprng
.vecs
, desc
->suite
.cprng
.count
);
3641 crypto_free_rng(rng
);
3647 static int drbg_cavs_test(const struct drbg_testvec
*test
, int pr
,
3648 const char *driver
, u32 type
, u32 mask
)
3651 struct crypto_rng
*drng
;
3652 struct drbg_test_data test_data
;
3653 struct drbg_string addtl
, pers
, testentropy
;
3654 unsigned char *buf
= kzalloc(test
->expectedlen
, GFP_KERNEL
);
3659 drng
= crypto_alloc_rng(driver
, type
, mask
);
3661 printk(KERN_ERR
"alg: drbg: could not allocate DRNG handle for "
3667 test_data
.testentropy
= &testentropy
;
3668 drbg_string_fill(&testentropy
, test
->entropy
, test
->entropylen
);
3669 drbg_string_fill(&pers
, test
->pers
, test
->perslen
);
3670 ret
= crypto_drbg_reset_test(drng
, &pers
, &test_data
);
3672 printk(KERN_ERR
"alg: drbg: Failed to reset rng\n");
3676 drbg_string_fill(&addtl
, test
->addtla
, test
->addtllen
);
3678 drbg_string_fill(&testentropy
, test
->entpra
, test
->entprlen
);
3679 ret
= crypto_drbg_get_bytes_addtl_test(drng
,
3680 buf
, test
->expectedlen
, &addtl
, &test_data
);
3682 ret
= crypto_drbg_get_bytes_addtl(drng
,
3683 buf
, test
->expectedlen
, &addtl
);
3686 printk(KERN_ERR
"alg: drbg: could not obtain random data for "
3687 "driver %s\n", driver
);
3691 drbg_string_fill(&addtl
, test
->addtlb
, test
->addtllen
);
3693 drbg_string_fill(&testentropy
, test
->entprb
, test
->entprlen
);
3694 ret
= crypto_drbg_get_bytes_addtl_test(drng
,
3695 buf
, test
->expectedlen
, &addtl
, &test_data
);
3697 ret
= crypto_drbg_get_bytes_addtl(drng
,
3698 buf
, test
->expectedlen
, &addtl
);
3701 printk(KERN_ERR
"alg: drbg: could not obtain random data for "
3702 "driver %s\n", driver
);
3706 ret
= memcmp(test
->expected
, buf
, test
->expectedlen
);
3709 crypto_free_rng(drng
);
3715 static int alg_test_drbg(const struct alg_test_desc
*desc
, const char *driver
,
3721 const struct drbg_testvec
*template = desc
->suite
.drbg
.vecs
;
3722 unsigned int tcount
= desc
->suite
.drbg
.count
;
3724 if (0 == memcmp(driver
, "drbg_pr_", 8))
3727 for (i
= 0; i
< tcount
; i
++) {
3728 err
= drbg_cavs_test(&template[i
], pr
, driver
, type
, mask
);
3730 printk(KERN_ERR
"alg: drbg: Test %d failed for %s\n",
3740 static int do_test_kpp(struct crypto_kpp
*tfm
, const struct kpp_testvec
*vec
,
3743 struct kpp_request
*req
;
3744 void *input_buf
= NULL
;
3745 void *output_buf
= NULL
;
3746 void *a_public
= NULL
;
3748 void *shared_secret
= NULL
;
3749 struct crypto_wait wait
;
3750 unsigned int out_len_max
;
3752 struct scatterlist src
, dst
;
3754 req
= kpp_request_alloc(tfm
, GFP_KERNEL
);
3758 crypto_init_wait(&wait
);
3760 err
= crypto_kpp_set_secret(tfm
, vec
->secret
, vec
->secret_size
);
3764 out_len_max
= crypto_kpp_maxsize(tfm
);
3765 output_buf
= kzalloc(out_len_max
, GFP_KERNEL
);
3771 /* Use appropriate parameter as base */
3772 kpp_request_set_input(req
, NULL
, 0);
3773 sg_init_one(&dst
, output_buf
, out_len_max
);
3774 kpp_request_set_output(req
, &dst
, out_len_max
);
3775 kpp_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
3776 crypto_req_done
, &wait
);
3778 /* Compute party A's public key */
3779 err
= crypto_wait_req(crypto_kpp_generate_public_key(req
), &wait
);
3781 pr_err("alg: %s: Party A: generate public key test failed. err %d\n",
3787 /* Save party A's public key */
3788 a_public
= kmemdup(sg_virt(req
->dst
), out_len_max
, GFP_KERNEL
);
3794 /* Verify calculated public key */
3795 if (memcmp(vec
->expected_a_public
, sg_virt(req
->dst
),
3796 vec
->expected_a_public_size
)) {
3797 pr_err("alg: %s: Party A: generate public key test failed. Invalid output\n",
3804 /* Calculate shared secret key by using counter part (b) public key. */
3805 input_buf
= kmemdup(vec
->b_public
, vec
->b_public_size
, GFP_KERNEL
);
3811 sg_init_one(&src
, input_buf
, vec
->b_public_size
);
3812 sg_init_one(&dst
, output_buf
, out_len_max
);
3813 kpp_request_set_input(req
, &src
, vec
->b_public_size
);
3814 kpp_request_set_output(req
, &dst
, out_len_max
);
3815 kpp_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
3816 crypto_req_done
, &wait
);
3817 err
= crypto_wait_req(crypto_kpp_compute_shared_secret(req
), &wait
);
3819 pr_err("alg: %s: Party A: compute shared secret test failed. err %d\n",
3825 /* Save the shared secret obtained by party A */
3826 a_ss
= kmemdup(sg_virt(req
->dst
), vec
->expected_ss_size
, GFP_KERNEL
);
3833 * Calculate party B's shared secret by using party A's
3836 err
= crypto_kpp_set_secret(tfm
, vec
->b_secret
,
3837 vec
->b_secret_size
);
3841 sg_init_one(&src
, a_public
, vec
->expected_a_public_size
);
3842 sg_init_one(&dst
, output_buf
, out_len_max
);
3843 kpp_request_set_input(req
, &src
, vec
->expected_a_public_size
);
3844 kpp_request_set_output(req
, &dst
, out_len_max
);
3845 kpp_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
3846 crypto_req_done
, &wait
);
3847 err
= crypto_wait_req(crypto_kpp_compute_shared_secret(req
),
3850 pr_err("alg: %s: Party B: compute shared secret failed. err %d\n",
3855 shared_secret
= a_ss
;
3857 shared_secret
= (void *)vec
->expected_ss
;
3861 * verify shared secret from which the user will derive
3862 * secret key by executing whatever hash it has chosen
3864 if (memcmp(shared_secret
, sg_virt(req
->dst
),
3865 vec
->expected_ss_size
)) {
3866 pr_err("alg: %s: compute shared secret test failed. Invalid output\n",
3878 kpp_request_free(req
);
3882 static int test_kpp(struct crypto_kpp
*tfm
, const char *alg
,
3883 const struct kpp_testvec
*vecs
, unsigned int tcount
)
3887 for (i
= 0; i
< tcount
; i
++) {
3888 ret
= do_test_kpp(tfm
, vecs
++, alg
);
3890 pr_err("alg: %s: test failed on vector %d, err=%d\n",
3898 static int alg_test_kpp(const struct alg_test_desc
*desc
, const char *driver
,
3901 struct crypto_kpp
*tfm
;
3904 tfm
= crypto_alloc_kpp(driver
, type
, mask
);
3906 pr_err("alg: kpp: Failed to load tfm for %s: %ld\n",
3907 driver
, PTR_ERR(tfm
));
3908 return PTR_ERR(tfm
);
3910 if (desc
->suite
.kpp
.vecs
)
3911 err
= test_kpp(tfm
, desc
->alg
, desc
->suite
.kpp
.vecs
,
3912 desc
->suite
.kpp
.count
);
3914 crypto_free_kpp(tfm
);
3918 static u8
*test_pack_u32(u8
*dst
, u32 val
)
3920 memcpy(dst
, &val
, sizeof(val
));
3921 return dst
+ sizeof(val
);
3924 static int test_akcipher_one(struct crypto_akcipher
*tfm
,
3925 const struct akcipher_testvec
*vecs
)
3927 char *xbuf
[XBUFSIZE
];
3928 struct akcipher_request
*req
;
3929 void *outbuf_enc
= NULL
;
3930 void *outbuf_dec
= NULL
;
3931 struct crypto_wait wait
;
3932 unsigned int out_len_max
, out_len
= 0;
3934 struct scatterlist src
, dst
, src_tab
[3];
3936 unsigned int m_size
, c_size
;
3940 if (testmgr_alloc_buf(xbuf
))
3943 req
= akcipher_request_alloc(tfm
, GFP_KERNEL
);
3947 crypto_init_wait(&wait
);
3949 key
= kmalloc(vecs
->key_len
+ sizeof(u32
) * 2 + vecs
->param_len
,
3953 memcpy(key
, vecs
->key
, vecs
->key_len
);
3954 ptr
= key
+ vecs
->key_len
;
3955 ptr
= test_pack_u32(ptr
, vecs
->algo
);
3956 ptr
= test_pack_u32(ptr
, vecs
->param_len
);
3957 memcpy(ptr
, vecs
->params
, vecs
->param_len
);
3959 if (vecs
->public_key_vec
)
3960 err
= crypto_akcipher_set_pub_key(tfm
, key
, vecs
->key_len
);
3962 err
= crypto_akcipher_set_priv_key(tfm
, key
, vecs
->key_len
);
3967 * First run test which do not require a private key, such as
3968 * encrypt or verify.
3971 out_len_max
= crypto_akcipher_maxsize(tfm
);
3972 outbuf_enc
= kzalloc(out_len_max
, GFP_KERNEL
);
3976 if (!vecs
->siggen_sigver_test
) {
3978 m_size
= vecs
->m_size
;
3980 c_size
= vecs
->c_size
;
3983 /* Swap args so we could keep plaintext (digest)
3984 * in vecs->m, and cooked signature in vecs->c.
3986 m
= vecs
->c
; /* signature */
3987 m_size
= vecs
->c_size
;
3988 c
= vecs
->m
; /* digest */
3989 c_size
= vecs
->m_size
;
3993 if (WARN_ON(m_size
> PAGE_SIZE
))
3995 memcpy(xbuf
[0], m
, m_size
);
3997 sg_init_table(src_tab
, 3);
3998 sg_set_buf(&src_tab
[0], xbuf
[0], 8);
3999 sg_set_buf(&src_tab
[1], xbuf
[0] + 8, m_size
- 8);
4000 if (vecs
->siggen_sigver_test
) {
4001 if (WARN_ON(c_size
> PAGE_SIZE
))
4003 memcpy(xbuf
[1], c
, c_size
);
4004 sg_set_buf(&src_tab
[2], xbuf
[1], c_size
);
4005 akcipher_request_set_crypt(req
, src_tab
, NULL
, m_size
, c_size
);
4007 sg_init_one(&dst
, outbuf_enc
, out_len_max
);
4008 akcipher_request_set_crypt(req
, src_tab
, &dst
, m_size
,
4011 akcipher_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_BACKLOG
,
4012 crypto_req_done
, &wait
);
4014 err
= crypto_wait_req(vecs
->siggen_sigver_test
?
4015 /* Run asymmetric signature verification */
4016 crypto_akcipher_verify(req
) :
4017 /* Run asymmetric encrypt */
4018 crypto_akcipher_encrypt(req
), &wait
);
4020 pr_err("alg: akcipher: %s test failed. err %d\n", op
, err
);
4023 if (!vecs
->siggen_sigver_test
) {
4024 if (req
->dst_len
!= c_size
) {
4025 pr_err("alg: akcipher: %s test failed. Invalid output len\n",
4030 /* verify that encrypted message is equal to expected */
4031 if (memcmp(c
, outbuf_enc
, c_size
) != 0) {
4032 pr_err("alg: akcipher: %s test failed. Invalid output\n",
4034 hexdump(outbuf_enc
, c_size
);
4041 * Don't invoke (decrypt or sign) test which require a private key
4042 * for vectors with only a public key.
4044 if (vecs
->public_key_vec
) {
4048 outbuf_dec
= kzalloc(out_len_max
, GFP_KERNEL
);
4054 op
= vecs
->siggen_sigver_test
? "sign" : "decrypt";
4055 if (WARN_ON(c_size
> PAGE_SIZE
))
4057 memcpy(xbuf
[0], c
, c_size
);
4059 sg_init_one(&src
, xbuf
[0], c_size
);
4060 sg_init_one(&dst
, outbuf_dec
, out_len_max
);
4061 crypto_init_wait(&wait
);
4062 akcipher_request_set_crypt(req
, &src
, &dst
, c_size
, out_len_max
);
4064 err
= crypto_wait_req(vecs
->siggen_sigver_test
?
4065 /* Run asymmetric signature generation */
4066 crypto_akcipher_sign(req
) :
4067 /* Run asymmetric decrypt */
4068 crypto_akcipher_decrypt(req
), &wait
);
4070 pr_err("alg: akcipher: %s test failed. err %d\n", op
, err
);
4073 out_len
= req
->dst_len
;
4074 if (out_len
< m_size
) {
4075 pr_err("alg: akcipher: %s test failed. Invalid output len %u\n",
4080 /* verify that decrypted message is equal to the original msg */
4081 if (memchr_inv(outbuf_dec
, 0, out_len
- m_size
) ||
4082 memcmp(m
, outbuf_dec
+ out_len
- m_size
, m_size
)) {
4083 pr_err("alg: akcipher: %s test failed. Invalid output\n", op
);
4084 hexdump(outbuf_dec
, out_len
);
4091 akcipher_request_free(req
);
4094 testmgr_free_buf(xbuf
);
4098 static int test_akcipher(struct crypto_akcipher
*tfm
, const char *alg
,
4099 const struct akcipher_testvec
*vecs
,
4100 unsigned int tcount
)
4103 crypto_tfm_alg_driver_name(crypto_akcipher_tfm(tfm
));
4106 for (i
= 0; i
< tcount
; i
++) {
4107 ret
= test_akcipher_one(tfm
, vecs
++);
4111 pr_err("alg: akcipher: test %d failed for %s, err=%d\n",
4118 static int alg_test_akcipher(const struct alg_test_desc
*desc
,
4119 const char *driver
, u32 type
, u32 mask
)
4121 struct crypto_akcipher
*tfm
;
4124 tfm
= crypto_alloc_akcipher(driver
, type
, mask
);
4126 pr_err("alg: akcipher: Failed to load tfm for %s: %ld\n",
4127 driver
, PTR_ERR(tfm
));
4128 return PTR_ERR(tfm
);
4130 if (desc
->suite
.akcipher
.vecs
)
4131 err
= test_akcipher(tfm
, desc
->alg
, desc
->suite
.akcipher
.vecs
,
4132 desc
->suite
.akcipher
.count
);
4134 crypto_free_akcipher(tfm
);
4138 static int alg_test_null(const struct alg_test_desc
*desc
,
4139 const char *driver
, u32 type
, u32 mask
)
4144 #define ____VECS(tv) .vecs = tv, .count = ARRAY_SIZE(tv)
4145 #define __VECS(tv) { ____VECS(tv) }
4147 /* Please keep this list sorted by algorithm name. */
4148 static const struct alg_test_desc alg_test_descs
[] = {
4150 .alg
= "adiantum(xchacha12,aes)",
4151 .generic_driver
= "adiantum(xchacha12-generic,aes-generic,nhpoly1305-generic)",
4152 .test
= alg_test_skcipher
,
4154 .cipher
= __VECS(adiantum_xchacha12_aes_tv_template
)
4157 .alg
= "adiantum(xchacha20,aes)",
4158 .generic_driver
= "adiantum(xchacha20-generic,aes-generic,nhpoly1305-generic)",
4159 .test
= alg_test_skcipher
,
4161 .cipher
= __VECS(adiantum_xchacha20_aes_tv_template
)
4165 .test
= alg_test_aead
,
4167 .aead
= __VECS(aegis128_tv_template
)
4170 .alg
= "ansi_cprng",
4171 .test
= alg_test_cprng
,
4173 .cprng
= __VECS(ansi_cprng_aes_tv_template
)
4176 .alg
= "authenc(hmac(md5),ecb(cipher_null))",
4177 .test
= alg_test_aead
,
4179 .aead
= __VECS(hmac_md5_ecb_cipher_null_tv_template
)
4182 .alg
= "authenc(hmac(sha1),cbc(aes))",
4183 .test
= alg_test_aead
,
4186 .aead
= __VECS(hmac_sha1_aes_cbc_tv_temp
)
4189 .alg
= "authenc(hmac(sha1),cbc(des))",
4190 .test
= alg_test_aead
,
4192 .aead
= __VECS(hmac_sha1_des_cbc_tv_temp
)
4195 .alg
= "authenc(hmac(sha1),cbc(des3_ede))",
4196 .test
= alg_test_aead
,
4199 .aead
= __VECS(hmac_sha1_des3_ede_cbc_tv_temp
)
4202 .alg
= "authenc(hmac(sha1),ctr(aes))",
4203 .test
= alg_test_null
,
4206 .alg
= "authenc(hmac(sha1),ecb(cipher_null))",
4207 .test
= alg_test_aead
,
4209 .aead
= __VECS(hmac_sha1_ecb_cipher_null_tv_temp
)
4212 .alg
= "authenc(hmac(sha1),rfc3686(ctr(aes)))",
4213 .test
= alg_test_null
,
4216 .alg
= "authenc(hmac(sha224),cbc(des))",
4217 .test
= alg_test_aead
,
4219 .aead
= __VECS(hmac_sha224_des_cbc_tv_temp
)
4222 .alg
= "authenc(hmac(sha224),cbc(des3_ede))",
4223 .test
= alg_test_aead
,
4226 .aead
= __VECS(hmac_sha224_des3_ede_cbc_tv_temp
)
4229 .alg
= "authenc(hmac(sha256),cbc(aes))",
4230 .test
= alg_test_aead
,
4233 .aead
= __VECS(hmac_sha256_aes_cbc_tv_temp
)
4236 .alg
= "authenc(hmac(sha256),cbc(des))",
4237 .test
= alg_test_aead
,
4239 .aead
= __VECS(hmac_sha256_des_cbc_tv_temp
)
4242 .alg
= "authenc(hmac(sha256),cbc(des3_ede))",
4243 .test
= alg_test_aead
,
4246 .aead
= __VECS(hmac_sha256_des3_ede_cbc_tv_temp
)
4249 .alg
= "authenc(hmac(sha256),ctr(aes))",
4250 .test
= alg_test_null
,
4253 .alg
= "authenc(hmac(sha256),rfc3686(ctr(aes)))",
4254 .test
= alg_test_null
,
4257 .alg
= "authenc(hmac(sha384),cbc(des))",
4258 .test
= alg_test_aead
,
4260 .aead
= __VECS(hmac_sha384_des_cbc_tv_temp
)
4263 .alg
= "authenc(hmac(sha384),cbc(des3_ede))",
4264 .test
= alg_test_aead
,
4267 .aead
= __VECS(hmac_sha384_des3_ede_cbc_tv_temp
)
4270 .alg
= "authenc(hmac(sha384),ctr(aes))",
4271 .test
= alg_test_null
,
4274 .alg
= "authenc(hmac(sha384),rfc3686(ctr(aes)))",
4275 .test
= alg_test_null
,
4278 .alg
= "authenc(hmac(sha512),cbc(aes))",
4280 .test
= alg_test_aead
,
4282 .aead
= __VECS(hmac_sha512_aes_cbc_tv_temp
)
4285 .alg
= "authenc(hmac(sha512),cbc(des))",
4286 .test
= alg_test_aead
,
4288 .aead
= __VECS(hmac_sha512_des_cbc_tv_temp
)
4291 .alg
= "authenc(hmac(sha512),cbc(des3_ede))",
4292 .test
= alg_test_aead
,
4295 .aead
= __VECS(hmac_sha512_des3_ede_cbc_tv_temp
)
4298 .alg
= "authenc(hmac(sha512),ctr(aes))",
4299 .test
= alg_test_null
,
4302 .alg
= "authenc(hmac(sha512),rfc3686(ctr(aes)))",
4303 .test
= alg_test_null
,
4306 .alg
= "blake2b-160",
4307 .test
= alg_test_hash
,
4310 .hash
= __VECS(blake2b_160_tv_template
)
4313 .alg
= "blake2b-256",
4314 .test
= alg_test_hash
,
4317 .hash
= __VECS(blake2b_256_tv_template
)
4320 .alg
= "blake2b-384",
4321 .test
= alg_test_hash
,
4324 .hash
= __VECS(blake2b_384_tv_template
)
4327 .alg
= "blake2b-512",
4328 .test
= alg_test_hash
,
4331 .hash
= __VECS(blake2b_512_tv_template
)
4334 .alg
= "blake2s-128",
4335 .test
= alg_test_hash
,
4337 .hash
= __VECS(blakes2s_128_tv_template
)
4340 .alg
= "blake2s-160",
4341 .test
= alg_test_hash
,
4343 .hash
= __VECS(blakes2s_160_tv_template
)
4346 .alg
= "blake2s-224",
4347 .test
= alg_test_hash
,
4349 .hash
= __VECS(blakes2s_224_tv_template
)
4352 .alg
= "blake2s-256",
4353 .test
= alg_test_hash
,
4355 .hash
= __VECS(blakes2s_256_tv_template
)
4359 .test
= alg_test_skcipher
,
4362 .cipher
= __VECS(aes_cbc_tv_template
)
4365 .alg
= "cbc(anubis)",
4366 .test
= alg_test_skcipher
,
4368 .cipher
= __VECS(anubis_cbc_tv_template
)
4371 .alg
= "cbc(blowfish)",
4372 .test
= alg_test_skcipher
,
4374 .cipher
= __VECS(bf_cbc_tv_template
)
4377 .alg
= "cbc(camellia)",
4378 .test
= alg_test_skcipher
,
4380 .cipher
= __VECS(camellia_cbc_tv_template
)
4383 .alg
= "cbc(cast5)",
4384 .test
= alg_test_skcipher
,
4386 .cipher
= __VECS(cast5_cbc_tv_template
)
4389 .alg
= "cbc(cast6)",
4390 .test
= alg_test_skcipher
,
4392 .cipher
= __VECS(cast6_cbc_tv_template
)
4396 .test
= alg_test_skcipher
,
4398 .cipher
= __VECS(des_cbc_tv_template
)
4401 .alg
= "cbc(des3_ede)",
4402 .test
= alg_test_skcipher
,
4405 .cipher
= __VECS(des3_ede_cbc_tv_template
)
4408 /* Same as cbc(aes) except the key is stored in
4409 * hardware secure memory which we reference by index
4412 .test
= alg_test_null
,
4415 /* Same as cbc(sm4) except the key is stored in
4416 * hardware secure memory which we reference by index
4419 .test
= alg_test_null
,
4421 .alg
= "cbc(serpent)",
4422 .test
= alg_test_skcipher
,
4424 .cipher
= __VECS(serpent_cbc_tv_template
)
4428 .test
= alg_test_skcipher
,
4430 .cipher
= __VECS(sm4_cbc_tv_template
)
4433 .alg
= "cbc(twofish)",
4434 .test
= alg_test_skcipher
,
4436 .cipher
= __VECS(tf_cbc_tv_template
)
4439 .alg
= "cbcmac(aes)",
4441 .test
= alg_test_hash
,
4443 .hash
= __VECS(aes_cbcmac_tv_template
)
4447 .generic_driver
= "ccm_base(ctr(aes-generic),cbcmac(aes-generic))",
4448 .test
= alg_test_aead
,
4452 ____VECS(aes_ccm_tv_template
),
4453 .einval_allowed
= 1,
4458 .test
= alg_test_skcipher
,
4461 .cipher
= __VECS(aes_cfb_tv_template
)
4465 .test
= alg_test_skcipher
,
4467 .cipher
= __VECS(sm4_cfb_tv_template
)
4471 .test
= alg_test_skcipher
,
4473 .cipher
= __VECS(chacha20_tv_template
)
4478 .test
= alg_test_hash
,
4480 .hash
= __VECS(aes_cmac128_tv_template
)
4483 .alg
= "cmac(des3_ede)",
4485 .test
= alg_test_hash
,
4487 .hash
= __VECS(des3_ede_cmac64_tv_template
)
4490 .alg
= "compress_null",
4491 .test
= alg_test_null
,
4494 .test
= alg_test_hash
,
4497 .hash
= __VECS(crc32_tv_template
)
4501 .test
= alg_test_crc32c
,
4504 .hash
= __VECS(crc32c_tv_template
)
4508 .test
= alg_test_hash
,
4511 .hash
= __VECS(crct10dif_tv_template
)
4515 .test
= alg_test_skcipher
,
4518 .cipher
= __VECS(aes_ctr_tv_template
)
4521 .alg
= "ctr(blowfish)",
4522 .test
= alg_test_skcipher
,
4524 .cipher
= __VECS(bf_ctr_tv_template
)
4527 .alg
= "ctr(camellia)",
4528 .test
= alg_test_skcipher
,
4530 .cipher
= __VECS(camellia_ctr_tv_template
)
4533 .alg
= "ctr(cast5)",
4534 .test
= alg_test_skcipher
,
4536 .cipher
= __VECS(cast5_ctr_tv_template
)
4539 .alg
= "ctr(cast6)",
4540 .test
= alg_test_skcipher
,
4542 .cipher
= __VECS(cast6_ctr_tv_template
)
4546 .test
= alg_test_skcipher
,
4548 .cipher
= __VECS(des_ctr_tv_template
)
4551 .alg
= "ctr(des3_ede)",
4552 .test
= alg_test_skcipher
,
4555 .cipher
= __VECS(des3_ede_ctr_tv_template
)
4558 /* Same as ctr(aes) except the key is stored in
4559 * hardware secure memory which we reference by index
4562 .test
= alg_test_null
,
4566 /* Same as ctr(sm4) except the key is stored in
4567 * hardware secure memory which we reference by index
4570 .test
= alg_test_null
,
4572 .alg
= "ctr(serpent)",
4573 .test
= alg_test_skcipher
,
4575 .cipher
= __VECS(serpent_ctr_tv_template
)
4579 .test
= alg_test_skcipher
,
4581 .cipher
= __VECS(sm4_ctr_tv_template
)
4584 .alg
= "ctr(twofish)",
4585 .test
= alg_test_skcipher
,
4587 .cipher
= __VECS(tf_ctr_tv_template
)
4590 .alg
= "cts(cbc(aes))",
4591 .test
= alg_test_skcipher
,
4594 .cipher
= __VECS(cts_mode_tv_template
)
4597 /* Same as cts(cbc((aes)) except the key is stored in
4598 * hardware secure memory which we reference by index
4600 .alg
= "cts(cbc(paes))",
4601 .test
= alg_test_null
,
4604 .alg
= "curve25519",
4605 .test
= alg_test_kpp
,
4607 .kpp
= __VECS(curve25519_tv_template
)
4611 .test
= alg_test_comp
,
4615 .comp
= __VECS(deflate_comp_tv_template
),
4616 .decomp
= __VECS(deflate_decomp_tv_template
)
4621 .test
= alg_test_kpp
,
4624 .kpp
= __VECS(dh_tv_template
)
4627 .alg
= "digest_null",
4628 .test
= alg_test_null
,
4630 .alg
= "drbg_nopr_ctr_aes128",
4631 .test
= alg_test_drbg
,
4634 .drbg
= __VECS(drbg_nopr_ctr_aes128_tv_template
)
4637 .alg
= "drbg_nopr_ctr_aes192",
4638 .test
= alg_test_drbg
,
4641 .drbg
= __VECS(drbg_nopr_ctr_aes192_tv_template
)
4644 .alg
= "drbg_nopr_ctr_aes256",
4645 .test
= alg_test_drbg
,
4648 .drbg
= __VECS(drbg_nopr_ctr_aes256_tv_template
)
4652 * There is no need to specifically test the DRBG with every
4653 * backend cipher -- covered by drbg_nopr_hmac_sha256 test
4655 .alg
= "drbg_nopr_hmac_sha1",
4657 .test
= alg_test_null
,
4659 .alg
= "drbg_nopr_hmac_sha256",
4660 .test
= alg_test_drbg
,
4663 .drbg
= __VECS(drbg_nopr_hmac_sha256_tv_template
)
4666 /* covered by drbg_nopr_hmac_sha256 test */
4667 .alg
= "drbg_nopr_hmac_sha384",
4669 .test
= alg_test_null
,
4671 .alg
= "drbg_nopr_hmac_sha512",
4672 .test
= alg_test_null
,
4675 .alg
= "drbg_nopr_sha1",
4677 .test
= alg_test_null
,
4679 .alg
= "drbg_nopr_sha256",
4680 .test
= alg_test_drbg
,
4683 .drbg
= __VECS(drbg_nopr_sha256_tv_template
)
4686 /* covered by drbg_nopr_sha256 test */
4687 .alg
= "drbg_nopr_sha384",
4689 .test
= alg_test_null
,
4691 .alg
= "drbg_nopr_sha512",
4693 .test
= alg_test_null
,
4695 .alg
= "drbg_pr_ctr_aes128",
4696 .test
= alg_test_drbg
,
4699 .drbg
= __VECS(drbg_pr_ctr_aes128_tv_template
)
4702 /* covered by drbg_pr_ctr_aes128 test */
4703 .alg
= "drbg_pr_ctr_aes192",
4705 .test
= alg_test_null
,
4707 .alg
= "drbg_pr_ctr_aes256",
4709 .test
= alg_test_null
,
4711 .alg
= "drbg_pr_hmac_sha1",
4713 .test
= alg_test_null
,
4715 .alg
= "drbg_pr_hmac_sha256",
4716 .test
= alg_test_drbg
,
4719 .drbg
= __VECS(drbg_pr_hmac_sha256_tv_template
)
4722 /* covered by drbg_pr_hmac_sha256 test */
4723 .alg
= "drbg_pr_hmac_sha384",
4725 .test
= alg_test_null
,
4727 .alg
= "drbg_pr_hmac_sha512",
4728 .test
= alg_test_null
,
4731 .alg
= "drbg_pr_sha1",
4733 .test
= alg_test_null
,
4735 .alg
= "drbg_pr_sha256",
4736 .test
= alg_test_drbg
,
4739 .drbg
= __VECS(drbg_pr_sha256_tv_template
)
4742 /* covered by drbg_pr_sha256 test */
4743 .alg
= "drbg_pr_sha384",
4745 .test
= alg_test_null
,
4747 .alg
= "drbg_pr_sha512",
4749 .test
= alg_test_null
,
4752 .test
= alg_test_skcipher
,
4755 .cipher
= __VECS(aes_tv_template
)
4758 .alg
= "ecb(anubis)",
4759 .test
= alg_test_skcipher
,
4761 .cipher
= __VECS(anubis_tv_template
)
4765 .generic_driver
= "ecb(arc4)-generic",
4766 .test
= alg_test_skcipher
,
4768 .cipher
= __VECS(arc4_tv_template
)
4771 .alg
= "ecb(blowfish)",
4772 .test
= alg_test_skcipher
,
4774 .cipher
= __VECS(bf_tv_template
)
4777 .alg
= "ecb(camellia)",
4778 .test
= alg_test_skcipher
,
4780 .cipher
= __VECS(camellia_tv_template
)
4783 .alg
= "ecb(cast5)",
4784 .test
= alg_test_skcipher
,
4786 .cipher
= __VECS(cast5_tv_template
)
4789 .alg
= "ecb(cast6)",
4790 .test
= alg_test_skcipher
,
4792 .cipher
= __VECS(cast6_tv_template
)
4795 .alg
= "ecb(cipher_null)",
4796 .test
= alg_test_null
,
4800 .test
= alg_test_skcipher
,
4802 .cipher
= __VECS(des_tv_template
)
4805 .alg
= "ecb(des3_ede)",
4806 .test
= alg_test_skcipher
,
4809 .cipher
= __VECS(des3_ede_tv_template
)
4812 .alg
= "ecb(fcrypt)",
4813 .test
= alg_test_skcipher
,
4816 .vecs
= fcrypt_pcbc_tv_template
,
4821 .alg
= "ecb(khazad)",
4822 .test
= alg_test_skcipher
,
4824 .cipher
= __VECS(khazad_tv_template
)
4827 /* Same as ecb(aes) except the key is stored in
4828 * hardware secure memory which we reference by index
4831 .test
= alg_test_null
,
4835 .test
= alg_test_skcipher
,
4837 .cipher
= __VECS(seed_tv_template
)
4840 .alg
= "ecb(serpent)",
4841 .test
= alg_test_skcipher
,
4843 .cipher
= __VECS(serpent_tv_template
)
4847 .test
= alg_test_skcipher
,
4849 .cipher
= __VECS(sm4_tv_template
)
4853 .test
= alg_test_skcipher
,
4855 .cipher
= __VECS(tea_tv_template
)
4858 .alg
= "ecb(tnepres)",
4859 .test
= alg_test_skcipher
,
4861 .cipher
= __VECS(tnepres_tv_template
)
4864 .alg
= "ecb(twofish)",
4865 .test
= alg_test_skcipher
,
4867 .cipher
= __VECS(tf_tv_template
)
4871 .test
= alg_test_skcipher
,
4873 .cipher
= __VECS(xeta_tv_template
)
4877 .test
= alg_test_skcipher
,
4879 .cipher
= __VECS(xtea_tv_template
)
4883 .test
= alg_test_kpp
,
4886 .kpp
= __VECS(ecdh_tv_template
)
4890 .test
= alg_test_akcipher
,
4892 .akcipher
= __VECS(ecrdsa_tv_template
)
4895 .alg
= "essiv(authenc(hmac(sha256),cbc(aes)),sha256)",
4896 .test
= alg_test_aead
,
4899 .aead
= __VECS(essiv_hmac_sha256_aes_cbc_tv_temp
)
4902 .alg
= "essiv(cbc(aes),sha256)",
4903 .test
= alg_test_skcipher
,
4906 .cipher
= __VECS(essiv_aes_cbc_tv_template
)
4910 .generic_driver
= "gcm_base(ctr(aes-generic),ghash-generic)",
4911 .test
= alg_test_aead
,
4914 .aead
= __VECS(aes_gcm_tv_template
)
4918 .test
= alg_test_hash
,
4921 .hash
= __VECS(ghash_tv_template
)
4925 .test
= alg_test_hash
,
4927 .hash
= __VECS(hmac_md5_tv_template
)
4930 .alg
= "hmac(rmd128)",
4931 .test
= alg_test_hash
,
4933 .hash
= __VECS(hmac_rmd128_tv_template
)
4936 .alg
= "hmac(rmd160)",
4937 .test
= alg_test_hash
,
4939 .hash
= __VECS(hmac_rmd160_tv_template
)
4942 .alg
= "hmac(sha1)",
4943 .test
= alg_test_hash
,
4946 .hash
= __VECS(hmac_sha1_tv_template
)
4949 .alg
= "hmac(sha224)",
4950 .test
= alg_test_hash
,
4953 .hash
= __VECS(hmac_sha224_tv_template
)
4956 .alg
= "hmac(sha256)",
4957 .test
= alg_test_hash
,
4960 .hash
= __VECS(hmac_sha256_tv_template
)
4963 .alg
= "hmac(sha3-224)",
4964 .test
= alg_test_hash
,
4967 .hash
= __VECS(hmac_sha3_224_tv_template
)
4970 .alg
= "hmac(sha3-256)",
4971 .test
= alg_test_hash
,
4974 .hash
= __VECS(hmac_sha3_256_tv_template
)
4977 .alg
= "hmac(sha3-384)",
4978 .test
= alg_test_hash
,
4981 .hash
= __VECS(hmac_sha3_384_tv_template
)
4984 .alg
= "hmac(sha3-512)",
4985 .test
= alg_test_hash
,
4988 .hash
= __VECS(hmac_sha3_512_tv_template
)
4991 .alg
= "hmac(sha384)",
4992 .test
= alg_test_hash
,
4995 .hash
= __VECS(hmac_sha384_tv_template
)
4998 .alg
= "hmac(sha512)",
4999 .test
= alg_test_hash
,
5002 .hash
= __VECS(hmac_sha512_tv_template
)
5006 .test
= alg_test_hash
,
5008 .hash
= __VECS(hmac_sm3_tv_template
)
5011 .alg
= "hmac(streebog256)",
5012 .test
= alg_test_hash
,
5014 .hash
= __VECS(hmac_streebog256_tv_template
)
5017 .alg
= "hmac(streebog512)",
5018 .test
= alg_test_hash
,
5020 .hash
= __VECS(hmac_streebog512_tv_template
)
5023 .alg
= "jitterentropy_rng",
5025 .test
= alg_test_null
,
5028 .test
= alg_test_skcipher
,
5031 .cipher
= __VECS(aes_kw_tv_template
)
5035 .generic_driver
= "lrw(ecb(aes-generic))",
5036 .test
= alg_test_skcipher
,
5038 .cipher
= __VECS(aes_lrw_tv_template
)
5041 .alg
= "lrw(camellia)",
5042 .generic_driver
= "lrw(ecb(camellia-generic))",
5043 .test
= alg_test_skcipher
,
5045 .cipher
= __VECS(camellia_lrw_tv_template
)
5048 .alg
= "lrw(cast6)",
5049 .generic_driver
= "lrw(ecb(cast6-generic))",
5050 .test
= alg_test_skcipher
,
5052 .cipher
= __VECS(cast6_lrw_tv_template
)
5055 .alg
= "lrw(serpent)",
5056 .generic_driver
= "lrw(ecb(serpent-generic))",
5057 .test
= alg_test_skcipher
,
5059 .cipher
= __VECS(serpent_lrw_tv_template
)
5062 .alg
= "lrw(twofish)",
5063 .generic_driver
= "lrw(ecb(twofish-generic))",
5064 .test
= alg_test_skcipher
,
5066 .cipher
= __VECS(tf_lrw_tv_template
)
5070 .test
= alg_test_comp
,
5074 .comp
= __VECS(lz4_comp_tv_template
),
5075 .decomp
= __VECS(lz4_decomp_tv_template
)
5080 .test
= alg_test_comp
,
5084 .comp
= __VECS(lz4hc_comp_tv_template
),
5085 .decomp
= __VECS(lz4hc_decomp_tv_template
)
5090 .test
= alg_test_comp
,
5094 .comp
= __VECS(lzo_comp_tv_template
),
5095 .decomp
= __VECS(lzo_decomp_tv_template
)
5100 .test
= alg_test_comp
,
5104 .comp
= __VECS(lzorle_comp_tv_template
),
5105 .decomp
= __VECS(lzorle_decomp_tv_template
)
5110 .test
= alg_test_hash
,
5112 .hash
= __VECS(md4_tv_template
)
5116 .test
= alg_test_hash
,
5118 .hash
= __VECS(md5_tv_template
)
5121 .alg
= "michael_mic",
5122 .test
= alg_test_hash
,
5124 .hash
= __VECS(michael_mic_tv_template
)
5127 .alg
= "nhpoly1305",
5128 .test
= alg_test_hash
,
5130 .hash
= __VECS(nhpoly1305_tv_template
)
5134 .test
= alg_test_skcipher
,
5137 .cipher
= __VECS(aes_ofb_tv_template
)
5140 /* Same as ofb(aes) except the key is stored in
5141 * hardware secure memory which we reference by index
5144 .test
= alg_test_null
,
5148 .test
= alg_test_skcipher
,
5150 .cipher
= __VECS(sm4_ofb_tv_template
)
5153 .alg
= "pcbc(fcrypt)",
5154 .test
= alg_test_skcipher
,
5156 .cipher
= __VECS(fcrypt_pcbc_tv_template
)
5159 .alg
= "pkcs1pad(rsa,sha224)",
5160 .test
= alg_test_null
,
5163 .alg
= "pkcs1pad(rsa,sha256)",
5164 .test
= alg_test_akcipher
,
5167 .akcipher
= __VECS(pkcs1pad_rsa_tv_template
)
5170 .alg
= "pkcs1pad(rsa,sha384)",
5171 .test
= alg_test_null
,
5174 .alg
= "pkcs1pad(rsa,sha512)",
5175 .test
= alg_test_null
,
5179 .test
= alg_test_hash
,
5181 .hash
= __VECS(poly1305_tv_template
)
5184 .alg
= "rfc3686(ctr(aes))",
5185 .test
= alg_test_skcipher
,
5188 .cipher
= __VECS(aes_ctr_rfc3686_tv_template
)
5191 .alg
= "rfc3686(ctr(sm4))",
5192 .test
= alg_test_skcipher
,
5194 .cipher
= __VECS(sm4_ctr_rfc3686_tv_template
)
5197 .alg
= "rfc4106(gcm(aes))",
5198 .generic_driver
= "rfc4106(gcm_base(ctr(aes-generic),ghash-generic))",
5199 .test
= alg_test_aead
,
5203 ____VECS(aes_gcm_rfc4106_tv_template
),
5204 .einval_allowed
= 1,
5209 .alg
= "rfc4309(ccm(aes))",
5210 .generic_driver
= "rfc4309(ccm_base(ctr(aes-generic),cbcmac(aes-generic)))",
5211 .test
= alg_test_aead
,
5215 ____VECS(aes_ccm_rfc4309_tv_template
),
5216 .einval_allowed
= 1,
5221 .alg
= "rfc4543(gcm(aes))",
5222 .generic_driver
= "rfc4543(gcm_base(ctr(aes-generic),ghash-generic))",
5223 .test
= alg_test_aead
,
5226 ____VECS(aes_gcm_rfc4543_tv_template
),
5227 .einval_allowed
= 1,
5231 .alg
= "rfc7539(chacha20,poly1305)",
5232 .test
= alg_test_aead
,
5234 .aead
= __VECS(rfc7539_tv_template
)
5237 .alg
= "rfc7539esp(chacha20,poly1305)",
5238 .test
= alg_test_aead
,
5241 ____VECS(rfc7539esp_tv_template
),
5242 .einval_allowed
= 1,
5248 .test
= alg_test_hash
,
5250 .hash
= __VECS(rmd128_tv_template
)
5254 .test
= alg_test_hash
,
5256 .hash
= __VECS(rmd160_tv_template
)
5260 .test
= alg_test_hash
,
5262 .hash
= __VECS(rmd256_tv_template
)
5266 .test
= alg_test_hash
,
5268 .hash
= __VECS(rmd320_tv_template
)
5272 .test
= alg_test_akcipher
,
5275 .akcipher
= __VECS(rsa_tv_template
)
5279 .test
= alg_test_skcipher
,
5281 .cipher
= __VECS(salsa20_stream_tv_template
)
5285 .test
= alg_test_hash
,
5288 .hash
= __VECS(sha1_tv_template
)
5292 .test
= alg_test_hash
,
5295 .hash
= __VECS(sha224_tv_template
)
5299 .test
= alg_test_hash
,
5302 .hash
= __VECS(sha256_tv_template
)
5306 .test
= alg_test_hash
,
5309 .hash
= __VECS(sha3_224_tv_template
)
5313 .test
= alg_test_hash
,
5316 .hash
= __VECS(sha3_256_tv_template
)
5320 .test
= alg_test_hash
,
5323 .hash
= __VECS(sha3_384_tv_template
)
5327 .test
= alg_test_hash
,
5330 .hash
= __VECS(sha3_512_tv_template
)
5334 .test
= alg_test_hash
,
5337 .hash
= __VECS(sha384_tv_template
)
5341 .test
= alg_test_hash
,
5344 .hash
= __VECS(sha512_tv_template
)
5348 .test
= alg_test_hash
,
5350 .hash
= __VECS(sm3_tv_template
)
5353 .alg
= "streebog256",
5354 .test
= alg_test_hash
,
5356 .hash
= __VECS(streebog256_tv_template
)
5359 .alg
= "streebog512",
5360 .test
= alg_test_hash
,
5362 .hash
= __VECS(streebog512_tv_template
)
5366 .test
= alg_test_hash
,
5368 .hash
= __VECS(tgr128_tv_template
)
5372 .test
= alg_test_hash
,
5374 .hash
= __VECS(tgr160_tv_template
)
5378 .test
= alg_test_hash
,
5380 .hash
= __VECS(tgr192_tv_template
)
5383 .alg
= "vmac64(aes)",
5384 .test
= alg_test_hash
,
5386 .hash
= __VECS(vmac64_aes_tv_template
)
5390 .test
= alg_test_hash
,
5392 .hash
= __VECS(wp256_tv_template
)
5396 .test
= alg_test_hash
,
5398 .hash
= __VECS(wp384_tv_template
)
5402 .test
= alg_test_hash
,
5404 .hash
= __VECS(wp512_tv_template
)
5408 .test
= alg_test_hash
,
5410 .hash
= __VECS(aes_xcbc128_tv_template
)
5414 .test
= alg_test_skcipher
,
5416 .cipher
= __VECS(xchacha12_tv_template
)
5420 .test
= alg_test_skcipher
,
5422 .cipher
= __VECS(xchacha20_tv_template
)
5426 .generic_driver
= "xts(ecb(aes-generic))",
5427 .test
= alg_test_skcipher
,
5430 .cipher
= __VECS(aes_xts_tv_template
)
5433 .alg
= "xts(camellia)",
5434 .generic_driver
= "xts(ecb(camellia-generic))",
5435 .test
= alg_test_skcipher
,
5437 .cipher
= __VECS(camellia_xts_tv_template
)
5440 .alg
= "xts(cast6)",
5441 .generic_driver
= "xts(ecb(cast6-generic))",
5442 .test
= alg_test_skcipher
,
5444 .cipher
= __VECS(cast6_xts_tv_template
)
5447 /* Same as xts(aes) except the key is stored in
5448 * hardware secure memory which we reference by index
5451 .test
= alg_test_null
,
5454 .alg
= "xts(serpent)",
5455 .generic_driver
= "xts(ecb(serpent-generic))",
5456 .test
= alg_test_skcipher
,
5458 .cipher
= __VECS(serpent_xts_tv_template
)
5461 .alg
= "xts(twofish)",
5462 .generic_driver
= "xts(ecb(twofish-generic))",
5463 .test
= alg_test_skcipher
,
5465 .cipher
= __VECS(tf_xts_tv_template
)
5468 .alg
= "xts4096(paes)",
5469 .test
= alg_test_null
,
5472 .alg
= "xts512(paes)",
5473 .test
= alg_test_null
,
5477 .test
= alg_test_hash
,
5480 .hash
= __VECS(xxhash64_tv_template
)
5483 .alg
= "zlib-deflate",
5484 .test
= alg_test_comp
,
5488 .comp
= __VECS(zlib_deflate_comp_tv_template
),
5489 .decomp
= __VECS(zlib_deflate_decomp_tv_template
)
5494 .test
= alg_test_comp
,
5498 .comp
= __VECS(zstd_comp_tv_template
),
5499 .decomp
= __VECS(zstd_decomp_tv_template
)
5505 static void alg_check_test_descs_order(void)
5509 for (i
= 1; i
< ARRAY_SIZE(alg_test_descs
); i
++) {
5510 int diff
= strcmp(alg_test_descs
[i
- 1].alg
,
5511 alg_test_descs
[i
].alg
);
5513 if (WARN_ON(diff
> 0)) {
5514 pr_warn("testmgr: alg_test_descs entries in wrong order: '%s' before '%s'\n",
5515 alg_test_descs
[i
- 1].alg
,
5516 alg_test_descs
[i
].alg
);
5519 if (WARN_ON(diff
== 0)) {
5520 pr_warn("testmgr: duplicate alg_test_descs entry: '%s'\n",
5521 alg_test_descs
[i
].alg
);
5526 static void alg_check_testvec_configs(void)
5530 for (i
= 0; i
< ARRAY_SIZE(default_cipher_testvec_configs
); i
++)
5531 WARN_ON(!valid_testvec_config(
5532 &default_cipher_testvec_configs
[i
]));
5534 for (i
= 0; i
< ARRAY_SIZE(default_hash_testvec_configs
); i
++)
5535 WARN_ON(!valid_testvec_config(
5536 &default_hash_testvec_configs
[i
]));
5539 static void testmgr_onetime_init(void)
5541 alg_check_test_descs_order();
5542 alg_check_testvec_configs();
5544 #ifdef CONFIG_CRYPTO_MANAGER_EXTRA_TESTS
5545 pr_warn("alg: extra crypto tests enabled. This is intended for developer use only.\n");
5549 static int alg_find_test(const char *alg
)
5552 int end
= ARRAY_SIZE(alg_test_descs
);
5554 while (start
< end
) {
5555 int i
= (start
+ end
) / 2;
5556 int diff
= strcmp(alg_test_descs
[i
].alg
, alg
);
5574 int alg_test(const char *driver
, const char *alg
, u32 type
, u32 mask
)
5580 if (!fips_enabled
&& notests
) {
5581 printk_once(KERN_INFO
"alg: self-tests disabled\n");
5585 DO_ONCE(testmgr_onetime_init
);
5587 if ((type
& CRYPTO_ALG_TYPE_MASK
) == CRYPTO_ALG_TYPE_CIPHER
) {
5588 char nalg
[CRYPTO_MAX_ALG_NAME
];
5590 if (snprintf(nalg
, sizeof(nalg
), "ecb(%s)", alg
) >=
5592 return -ENAMETOOLONG
;
5594 i
= alg_find_test(nalg
);
5598 if (fips_enabled
&& !alg_test_descs
[i
].fips_allowed
)
5601 rc
= alg_test_cipher(alg_test_descs
+ i
, driver
, type
, mask
);
5605 i
= alg_find_test(alg
);
5606 j
= alg_find_test(driver
);
5610 if (fips_enabled
&& ((i
>= 0 && !alg_test_descs
[i
].fips_allowed
) ||
5611 (j
>= 0 && !alg_test_descs
[j
].fips_allowed
)))
5616 rc
|= alg_test_descs
[i
].test(alg_test_descs
+ i
, driver
,
5618 if (j
>= 0 && j
!= i
)
5619 rc
|= alg_test_descs
[j
].test(alg_test_descs
+ j
, driver
,
5623 if (rc
&& (fips_enabled
|| panic_on_fail
)) {
5625 panic("alg: self-tests for %s (%s) failed in %s mode!\n",
5626 driver
, alg
, fips_enabled
? "fips" : "panic_on_fail");
5629 if (fips_enabled
&& !rc
)
5630 pr_info("alg: self-tests for %s (%s) passed\n", driver
, alg
);
5635 printk(KERN_INFO
"alg: No test for %s (%s)\n", alg
, driver
);
5641 #endif /* CONFIG_CRYPTO_MANAGER_DISABLE_TESTS */
5643 EXPORT_SYMBOL_GPL(alg_test
);