2 * linux/arch/arm64/crypto/aes-glue.c - wrapper code for ARMv8 AES
4 * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
12 #include <asm/hwcap.h>
13 #include <crypto/aes.h>
14 #include <crypto/internal/hash.h>
15 #include <crypto/internal/simd.h>
16 #include <crypto/internal/skcipher.h>
17 #include <linux/module.h>
18 #include <linux/cpufeature.h>
19 #include <crypto/xts.h>
21 #include "aes-ce-setkey.h"
23 #ifdef USE_V8_CRYPTO_EXTENSIONS
26 #define aes_setkey ce_aes_setkey
27 #define aes_expandkey ce_aes_expandkey
28 #define aes_ecb_encrypt ce_aes_ecb_encrypt
29 #define aes_ecb_decrypt ce_aes_ecb_decrypt
30 #define aes_cbc_encrypt ce_aes_cbc_encrypt
31 #define aes_cbc_decrypt ce_aes_cbc_decrypt
32 #define aes_ctr_encrypt ce_aes_ctr_encrypt
33 #define aes_xts_encrypt ce_aes_xts_encrypt
34 #define aes_xts_decrypt ce_aes_xts_decrypt
35 #define aes_mac_update ce_aes_mac_update
36 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 Crypto Extensions");
40 #define aes_setkey crypto_aes_set_key
41 #define aes_expandkey crypto_aes_expand_key
42 #define aes_ecb_encrypt neon_aes_ecb_encrypt
43 #define aes_ecb_decrypt neon_aes_ecb_decrypt
44 #define aes_cbc_encrypt neon_aes_cbc_encrypt
45 #define aes_cbc_decrypt neon_aes_cbc_decrypt
46 #define aes_ctr_encrypt neon_aes_ctr_encrypt
47 #define aes_xts_encrypt neon_aes_xts_encrypt
48 #define aes_xts_decrypt neon_aes_xts_decrypt
49 #define aes_mac_update neon_aes_mac_update
50 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 NEON");
51 MODULE_ALIAS_CRYPTO("ecb(aes)");
52 MODULE_ALIAS_CRYPTO("cbc(aes)");
53 MODULE_ALIAS_CRYPTO("ctr(aes)");
54 MODULE_ALIAS_CRYPTO("xts(aes)");
55 MODULE_ALIAS_CRYPTO("cmac(aes)");
56 MODULE_ALIAS_CRYPTO("xcbc(aes)");
57 MODULE_ALIAS_CRYPTO("cbcmac(aes)");
60 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
61 MODULE_LICENSE("GPL v2");
63 /* defined in aes-modes.S */
64 asmlinkage
void aes_ecb_encrypt(u8 out
[], u8
const in
[], u8
const rk
[],
65 int rounds
, int blocks
, int first
);
66 asmlinkage
void aes_ecb_decrypt(u8 out
[], u8
const in
[], u8
const rk
[],
67 int rounds
, int blocks
, int first
);
69 asmlinkage
void aes_cbc_encrypt(u8 out
[], u8
const in
[], u8
const rk
[],
70 int rounds
, int blocks
, u8 iv
[], int first
);
71 asmlinkage
void aes_cbc_decrypt(u8 out
[], u8
const in
[], u8
const rk
[],
72 int rounds
, int blocks
, u8 iv
[], int first
);
74 asmlinkage
void aes_ctr_encrypt(u8 out
[], u8
const in
[], u8
const rk
[],
75 int rounds
, int blocks
, u8 ctr
[], int first
);
77 asmlinkage
void aes_xts_encrypt(u8 out
[], u8
const in
[], u8
const rk1
[],
78 int rounds
, int blocks
, u8
const rk2
[], u8 iv
[],
80 asmlinkage
void aes_xts_decrypt(u8 out
[], u8
const in
[], u8
const rk1
[],
81 int rounds
, int blocks
, u8
const rk2
[], u8 iv
[],
84 asmlinkage
void aes_mac_update(u8
const in
[], u32
const rk
[], int rounds
,
85 int blocks
, u8 dg
[], int enc_before
,
88 struct crypto_aes_xts_ctx
{
89 struct crypto_aes_ctx key1
;
90 struct crypto_aes_ctx
__aligned(8) key2
;
94 struct crypto_aes_ctx key
;
95 u8
__aligned(8) consts
[];
100 u8 dg
[AES_BLOCK_SIZE
];
103 static int skcipher_aes_setkey(struct crypto_skcipher
*tfm
, const u8
*in_key
,
104 unsigned int key_len
)
106 return aes_setkey(crypto_skcipher_tfm(tfm
), in_key
, key_len
);
109 static int xts_set_key(struct crypto_skcipher
*tfm
, const u8
*in_key
,
110 unsigned int key_len
)
112 struct crypto_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
115 ret
= xts_verify_key(tfm
, in_key
, key_len
);
119 ret
= aes_expandkey(&ctx
->key1
, in_key
, key_len
/ 2);
121 ret
= aes_expandkey(&ctx
->key2
, &in_key
[key_len
/ 2],
126 crypto_skcipher_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
130 static int ecb_encrypt(struct skcipher_request
*req
)
132 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
133 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
134 int err
, first
, rounds
= 6 + ctx
->key_length
/ 4;
135 struct skcipher_walk walk
;
138 err
= skcipher_walk_virt(&walk
, req
, true);
141 for (first
= 1; (blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
)); first
= 0) {
142 aes_ecb_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
143 (u8
*)ctx
->key_enc
, rounds
, blocks
, first
);
144 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
150 static int ecb_decrypt(struct skcipher_request
*req
)
152 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
153 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
154 int err
, first
, rounds
= 6 + ctx
->key_length
/ 4;
155 struct skcipher_walk walk
;
158 err
= skcipher_walk_virt(&walk
, req
, true);
161 for (first
= 1; (blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
)); first
= 0) {
162 aes_ecb_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
163 (u8
*)ctx
->key_dec
, rounds
, blocks
, first
);
164 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
170 static int cbc_encrypt(struct skcipher_request
*req
)
172 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
173 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
174 int err
, first
, rounds
= 6 + ctx
->key_length
/ 4;
175 struct skcipher_walk walk
;
178 err
= skcipher_walk_virt(&walk
, req
, true);
181 for (first
= 1; (blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
)); first
= 0) {
182 aes_cbc_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
183 (u8
*)ctx
->key_enc
, rounds
, blocks
, walk
.iv
,
185 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
191 static int cbc_decrypt(struct skcipher_request
*req
)
193 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
194 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
195 int err
, first
, rounds
= 6 + ctx
->key_length
/ 4;
196 struct skcipher_walk walk
;
199 err
= skcipher_walk_virt(&walk
, req
, true);
202 for (first
= 1; (blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
)); first
= 0) {
203 aes_cbc_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
204 (u8
*)ctx
->key_dec
, rounds
, blocks
, walk
.iv
,
206 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
212 static int ctr_encrypt(struct skcipher_request
*req
)
214 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
215 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
216 int err
, first
, rounds
= 6 + ctx
->key_length
/ 4;
217 struct skcipher_walk walk
;
220 err
= skcipher_walk_virt(&walk
, req
, true);
224 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
225 aes_ctr_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
226 (u8
*)ctx
->key_enc
, rounds
, blocks
, walk
.iv
,
228 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
232 u8
__aligned(8) tail
[AES_BLOCK_SIZE
];
233 unsigned int nbytes
= walk
.nbytes
;
234 u8
*tdst
= walk
.dst
.virt
.addr
;
235 u8
*tsrc
= walk
.src
.virt
.addr
;
238 * Tell aes_ctr_encrypt() to process a tail block.
242 aes_ctr_encrypt(tail
, NULL
, (u8
*)ctx
->key_enc
, rounds
,
243 blocks
, walk
.iv
, first
);
245 memcpy(tdst
, tsrc
, nbytes
);
246 crypto_xor(tdst
, tail
, nbytes
);
247 err
= skcipher_walk_done(&walk
, 0);
254 static int xts_encrypt(struct skcipher_request
*req
)
256 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
257 struct crypto_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
258 int err
, first
, rounds
= 6 + ctx
->key1
.key_length
/ 4;
259 struct skcipher_walk walk
;
262 err
= skcipher_walk_virt(&walk
, req
, true);
265 for (first
= 1; (blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
)); first
= 0) {
266 aes_xts_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
267 (u8
*)ctx
->key1
.key_enc
, rounds
, blocks
,
268 (u8
*)ctx
->key2
.key_enc
, walk
.iv
, first
);
269 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
276 static int xts_decrypt(struct skcipher_request
*req
)
278 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
279 struct crypto_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
280 int err
, first
, rounds
= 6 + ctx
->key1
.key_length
/ 4;
281 struct skcipher_walk walk
;
284 err
= skcipher_walk_virt(&walk
, req
, true);
287 for (first
= 1; (blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
)); first
= 0) {
288 aes_xts_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
289 (u8
*)ctx
->key1
.key_dec
, rounds
, blocks
,
290 (u8
*)ctx
->key2
.key_enc
, walk
.iv
, first
);
291 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
298 static struct skcipher_alg aes_algs
[] = { {
300 .cra_name
= "__ecb(aes)",
301 .cra_driver_name
= "__ecb-aes-" MODE
,
302 .cra_priority
= PRIO
,
303 .cra_flags
= CRYPTO_ALG_INTERNAL
,
304 .cra_blocksize
= AES_BLOCK_SIZE
,
305 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
306 .cra_module
= THIS_MODULE
,
308 .min_keysize
= AES_MIN_KEY_SIZE
,
309 .max_keysize
= AES_MAX_KEY_SIZE
,
310 .setkey
= skcipher_aes_setkey
,
311 .encrypt
= ecb_encrypt
,
312 .decrypt
= ecb_decrypt
,
315 .cra_name
= "__cbc(aes)",
316 .cra_driver_name
= "__cbc-aes-" MODE
,
317 .cra_priority
= PRIO
,
318 .cra_flags
= CRYPTO_ALG_INTERNAL
,
319 .cra_blocksize
= AES_BLOCK_SIZE
,
320 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
321 .cra_module
= THIS_MODULE
,
323 .min_keysize
= AES_MIN_KEY_SIZE
,
324 .max_keysize
= AES_MAX_KEY_SIZE
,
325 .ivsize
= AES_BLOCK_SIZE
,
326 .setkey
= skcipher_aes_setkey
,
327 .encrypt
= cbc_encrypt
,
328 .decrypt
= cbc_decrypt
,
331 .cra_name
= "__ctr(aes)",
332 .cra_driver_name
= "__ctr-aes-" MODE
,
333 .cra_priority
= PRIO
,
334 .cra_flags
= CRYPTO_ALG_INTERNAL
,
336 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
337 .cra_module
= THIS_MODULE
,
339 .min_keysize
= AES_MIN_KEY_SIZE
,
340 .max_keysize
= AES_MAX_KEY_SIZE
,
341 .ivsize
= AES_BLOCK_SIZE
,
342 .chunksize
= AES_BLOCK_SIZE
,
343 .setkey
= skcipher_aes_setkey
,
344 .encrypt
= ctr_encrypt
,
345 .decrypt
= ctr_encrypt
,
348 .cra_name
= "ctr(aes)",
349 .cra_driver_name
= "ctr-aes-" MODE
,
350 .cra_priority
= PRIO
- 1,
352 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
353 .cra_module
= THIS_MODULE
,
355 .min_keysize
= AES_MIN_KEY_SIZE
,
356 .max_keysize
= AES_MAX_KEY_SIZE
,
357 .ivsize
= AES_BLOCK_SIZE
,
358 .chunksize
= AES_BLOCK_SIZE
,
359 .setkey
= skcipher_aes_setkey
,
360 .encrypt
= ctr_encrypt
,
361 .decrypt
= ctr_encrypt
,
364 .cra_name
= "__xts(aes)",
365 .cra_driver_name
= "__xts-aes-" MODE
,
366 .cra_priority
= PRIO
,
367 .cra_flags
= CRYPTO_ALG_INTERNAL
,
368 .cra_blocksize
= AES_BLOCK_SIZE
,
369 .cra_ctxsize
= sizeof(struct crypto_aes_xts_ctx
),
370 .cra_module
= THIS_MODULE
,
372 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
373 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
374 .ivsize
= AES_BLOCK_SIZE
,
375 .setkey
= xts_set_key
,
376 .encrypt
= xts_encrypt
,
377 .decrypt
= xts_decrypt
,
380 static int cbcmac_setkey(struct crypto_shash
*tfm
, const u8
*in_key
,
381 unsigned int key_len
)
383 struct mac_tfm_ctx
*ctx
= crypto_shash_ctx(tfm
);
386 err
= aes_expandkey(&ctx
->key
, in_key
, key_len
);
388 crypto_shash_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
393 static void cmac_gf128_mul_by_x(be128
*y
, const be128
*x
)
395 u64 a
= be64_to_cpu(x
->a
);
396 u64 b
= be64_to_cpu(x
->b
);
398 y
->a
= cpu_to_be64((a
<< 1) | (b
>> 63));
399 y
->b
= cpu_to_be64((b
<< 1) ^ ((a
>> 63) ? 0x87 : 0));
402 static int cmac_setkey(struct crypto_shash
*tfm
, const u8
*in_key
,
403 unsigned int key_len
)
405 struct mac_tfm_ctx
*ctx
= crypto_shash_ctx(tfm
);
406 be128
*consts
= (be128
*)ctx
->consts
;
407 u8
*rk
= (u8
*)ctx
->key
.key_enc
;
408 int rounds
= 6 + key_len
/ 4;
411 err
= cbcmac_setkey(tfm
, in_key
, key_len
);
415 /* encrypt the zero vector */
417 aes_ecb_encrypt(ctx
->consts
, (u8
[AES_BLOCK_SIZE
]){}, rk
, rounds
, 1, 1);
420 cmac_gf128_mul_by_x(consts
, consts
);
421 cmac_gf128_mul_by_x(consts
+ 1, consts
);
426 static int xcbc_setkey(struct crypto_shash
*tfm
, const u8
*in_key
,
427 unsigned int key_len
)
429 static u8
const ks
[3][AES_BLOCK_SIZE
] = {
430 { [0 ... AES_BLOCK_SIZE
- 1] = 0x1 },
431 { [0 ... AES_BLOCK_SIZE
- 1] = 0x2 },
432 { [0 ... AES_BLOCK_SIZE
- 1] = 0x3 },
435 struct mac_tfm_ctx
*ctx
= crypto_shash_ctx(tfm
);
436 u8
*rk
= (u8
*)ctx
->key
.key_enc
;
437 int rounds
= 6 + key_len
/ 4;
438 u8 key
[AES_BLOCK_SIZE
];
441 err
= cbcmac_setkey(tfm
, in_key
, key_len
);
446 aes_ecb_encrypt(key
, ks
[0], rk
, rounds
, 1, 1);
447 aes_ecb_encrypt(ctx
->consts
, ks
[1], rk
, rounds
, 2, 0);
450 return cbcmac_setkey(tfm
, key
, sizeof(key
));
453 static int mac_init(struct shash_desc
*desc
)
455 struct mac_desc_ctx
*ctx
= shash_desc_ctx(desc
);
457 memset(ctx
->dg
, 0, AES_BLOCK_SIZE
);
463 static int mac_update(struct shash_desc
*desc
, const u8
*p
, unsigned int len
)
465 struct mac_tfm_ctx
*tctx
= crypto_shash_ctx(desc
->tfm
);
466 struct mac_desc_ctx
*ctx
= shash_desc_ctx(desc
);
467 int rounds
= 6 + tctx
->key
.key_length
/ 4;
472 if ((ctx
->len
% AES_BLOCK_SIZE
) == 0 &&
473 (ctx
->len
+ len
) > AES_BLOCK_SIZE
) {
475 int blocks
= len
/ AES_BLOCK_SIZE
;
477 len
%= AES_BLOCK_SIZE
;
480 aes_mac_update(p
, tctx
->key
.key_enc
, rounds
, blocks
,
481 ctx
->dg
, (ctx
->len
!= 0), (len
!= 0));
484 p
+= blocks
* AES_BLOCK_SIZE
;
487 ctx
->len
= AES_BLOCK_SIZE
;
493 l
= min(len
, AES_BLOCK_SIZE
- ctx
->len
);
495 if (l
<= AES_BLOCK_SIZE
) {
496 crypto_xor(ctx
->dg
+ ctx
->len
, p
, l
);
506 static int cbcmac_final(struct shash_desc
*desc
, u8
*out
)
508 struct mac_tfm_ctx
*tctx
= crypto_shash_ctx(desc
->tfm
);
509 struct mac_desc_ctx
*ctx
= shash_desc_ctx(desc
);
510 int rounds
= 6 + tctx
->key
.key_length
/ 4;
513 aes_mac_update(NULL
, tctx
->key
.key_enc
, rounds
, 0, ctx
->dg
, 1, 0);
516 memcpy(out
, ctx
->dg
, AES_BLOCK_SIZE
);
521 static int cmac_final(struct shash_desc
*desc
, u8
*out
)
523 struct mac_tfm_ctx
*tctx
= crypto_shash_ctx(desc
->tfm
);
524 struct mac_desc_ctx
*ctx
= shash_desc_ctx(desc
);
525 int rounds
= 6 + tctx
->key
.key_length
/ 4;
526 u8
*consts
= tctx
->consts
;
528 if (ctx
->len
!= AES_BLOCK_SIZE
) {
529 ctx
->dg
[ctx
->len
] ^= 0x80;
530 consts
+= AES_BLOCK_SIZE
;
534 aes_mac_update(consts
, tctx
->key
.key_enc
, rounds
, 1, ctx
->dg
, 0, 1);
537 memcpy(out
, ctx
->dg
, AES_BLOCK_SIZE
);
542 static struct shash_alg mac_algs
[] = { {
543 .base
.cra_name
= "cmac(aes)",
544 .base
.cra_driver_name
= "cmac-aes-" MODE
,
545 .base
.cra_priority
= PRIO
,
546 .base
.cra_flags
= CRYPTO_ALG_TYPE_SHASH
,
547 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
548 .base
.cra_ctxsize
= sizeof(struct mac_tfm_ctx
) +
550 .base
.cra_module
= THIS_MODULE
,
552 .digestsize
= AES_BLOCK_SIZE
,
554 .update
= mac_update
,
556 .setkey
= cmac_setkey
,
557 .descsize
= sizeof(struct mac_desc_ctx
),
559 .base
.cra_name
= "xcbc(aes)",
560 .base
.cra_driver_name
= "xcbc-aes-" MODE
,
561 .base
.cra_priority
= PRIO
,
562 .base
.cra_flags
= CRYPTO_ALG_TYPE_SHASH
,
563 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
564 .base
.cra_ctxsize
= sizeof(struct mac_tfm_ctx
) +
566 .base
.cra_module
= THIS_MODULE
,
568 .digestsize
= AES_BLOCK_SIZE
,
570 .update
= mac_update
,
572 .setkey
= xcbc_setkey
,
573 .descsize
= sizeof(struct mac_desc_ctx
),
575 .base
.cra_name
= "cbcmac(aes)",
576 .base
.cra_driver_name
= "cbcmac-aes-" MODE
,
577 .base
.cra_priority
= PRIO
,
578 .base
.cra_flags
= CRYPTO_ALG_TYPE_SHASH
,
579 .base
.cra_blocksize
= 1,
580 .base
.cra_ctxsize
= sizeof(struct mac_tfm_ctx
),
581 .base
.cra_module
= THIS_MODULE
,
583 .digestsize
= AES_BLOCK_SIZE
,
585 .update
= mac_update
,
586 .final
= cbcmac_final
,
587 .setkey
= cbcmac_setkey
,
588 .descsize
= sizeof(struct mac_desc_ctx
),
591 static struct simd_skcipher_alg
*aes_simd_algs
[ARRAY_SIZE(aes_algs
)];
593 static void aes_exit(void)
597 for (i
= 0; i
< ARRAY_SIZE(aes_simd_algs
); i
++)
598 if (aes_simd_algs
[i
])
599 simd_skcipher_free(aes_simd_algs
[i
]);
601 crypto_unregister_shashes(mac_algs
, ARRAY_SIZE(mac_algs
));
602 crypto_unregister_skciphers(aes_algs
, ARRAY_SIZE(aes_algs
));
605 static int __init
aes_init(void)
607 struct simd_skcipher_alg
*simd
;
608 const char *basename
;
614 err
= crypto_register_skciphers(aes_algs
, ARRAY_SIZE(aes_algs
));
618 err
= crypto_register_shashes(mac_algs
, ARRAY_SIZE(mac_algs
));
620 goto unregister_ciphers
;
622 for (i
= 0; i
< ARRAY_SIZE(aes_algs
); i
++) {
623 if (!(aes_algs
[i
].base
.cra_flags
& CRYPTO_ALG_INTERNAL
))
626 algname
= aes_algs
[i
].base
.cra_name
+ 2;
627 drvname
= aes_algs
[i
].base
.cra_driver_name
+ 2;
628 basename
= aes_algs
[i
].base
.cra_driver_name
;
629 simd
= simd_skcipher_create_compat(algname
, drvname
, basename
);
632 goto unregister_simds
;
634 aes_simd_algs
[i
] = simd
;
642 crypto_unregister_skciphers(aes_algs
, ARRAY_SIZE(aes_algs
));
646 #ifdef USE_V8_CRYPTO_EXTENSIONS
647 module_cpu_feature_match(AES
, aes_init
);
649 module_init(aes_init
);
650 EXPORT_SYMBOL(neon_aes_ecb_encrypt
);
651 EXPORT_SYMBOL(neon_aes_cbc_encrypt
);
653 module_exit(aes_exit
);