2 * linux/arch/arm64/crypto/aes-glue.c - wrapper code for ARMv8 AES
4 * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
12 #include <asm/hwcap.h>
14 #include <crypto/aes.h>
15 #include <crypto/internal/hash.h>
16 #include <crypto/internal/simd.h>
17 #include <crypto/internal/skcipher.h>
18 #include <linux/module.h>
19 #include <linux/cpufeature.h>
20 #include <crypto/xts.h>
22 #include "aes-ce-setkey.h"
23 #include "aes-ctr-fallback.h"
25 #ifdef USE_V8_CRYPTO_EXTENSIONS
28 #define aes_setkey ce_aes_setkey
29 #define aes_expandkey ce_aes_expandkey
30 #define aes_ecb_encrypt ce_aes_ecb_encrypt
31 #define aes_ecb_decrypt ce_aes_ecb_decrypt
32 #define aes_cbc_encrypt ce_aes_cbc_encrypt
33 #define aes_cbc_decrypt ce_aes_cbc_decrypt
34 #define aes_ctr_encrypt ce_aes_ctr_encrypt
35 #define aes_xts_encrypt ce_aes_xts_encrypt
36 #define aes_xts_decrypt ce_aes_xts_decrypt
37 #define aes_mac_update ce_aes_mac_update
38 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 Crypto Extensions");
42 #define aes_setkey crypto_aes_set_key
43 #define aes_expandkey crypto_aes_expand_key
44 #define aes_ecb_encrypt neon_aes_ecb_encrypt
45 #define aes_ecb_decrypt neon_aes_ecb_decrypt
46 #define aes_cbc_encrypt neon_aes_cbc_encrypt
47 #define aes_cbc_decrypt neon_aes_cbc_decrypt
48 #define aes_ctr_encrypt neon_aes_ctr_encrypt
49 #define aes_xts_encrypt neon_aes_xts_encrypt
50 #define aes_xts_decrypt neon_aes_xts_decrypt
51 #define aes_mac_update neon_aes_mac_update
52 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 NEON");
53 MODULE_ALIAS_CRYPTO("ecb(aes)");
54 MODULE_ALIAS_CRYPTO("cbc(aes)");
55 MODULE_ALIAS_CRYPTO("ctr(aes)");
56 MODULE_ALIAS_CRYPTO("xts(aes)");
57 MODULE_ALIAS_CRYPTO("cmac(aes)");
58 MODULE_ALIAS_CRYPTO("xcbc(aes)");
59 MODULE_ALIAS_CRYPTO("cbcmac(aes)");
62 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
63 MODULE_LICENSE("GPL v2");
65 /* defined in aes-modes.S */
66 asmlinkage
void aes_ecb_encrypt(u8 out
[], u8
const in
[], u8
const rk
[],
67 int rounds
, int blocks
, int first
);
68 asmlinkage
void aes_ecb_decrypt(u8 out
[], u8
const in
[], u8
const rk
[],
69 int rounds
, int blocks
, int first
);
71 asmlinkage
void aes_cbc_encrypt(u8 out
[], u8
const in
[], u8
const rk
[],
72 int rounds
, int blocks
, u8 iv
[], int first
);
73 asmlinkage
void aes_cbc_decrypt(u8 out
[], u8
const in
[], u8
const rk
[],
74 int rounds
, int blocks
, u8 iv
[], int first
);
76 asmlinkage
void aes_ctr_encrypt(u8 out
[], u8
const in
[], u8
const rk
[],
77 int rounds
, int blocks
, u8 ctr
[], int first
);
79 asmlinkage
void aes_xts_encrypt(u8 out
[], u8
const in
[], u8
const rk1
[],
80 int rounds
, int blocks
, u8
const rk2
[], u8 iv
[],
82 asmlinkage
void aes_xts_decrypt(u8 out
[], u8
const in
[], u8
const rk1
[],
83 int rounds
, int blocks
, u8
const rk2
[], u8 iv
[],
86 asmlinkage
void aes_mac_update(u8
const in
[], u32
const rk
[], int rounds
,
87 int blocks
, u8 dg
[], int enc_before
,
90 struct crypto_aes_xts_ctx
{
91 struct crypto_aes_ctx key1
;
92 struct crypto_aes_ctx
__aligned(8) key2
;
96 struct crypto_aes_ctx key
;
97 u8
__aligned(8) consts
[];
100 struct mac_desc_ctx
{
102 u8 dg
[AES_BLOCK_SIZE
];
105 static int skcipher_aes_setkey(struct crypto_skcipher
*tfm
, const u8
*in_key
,
106 unsigned int key_len
)
108 return aes_setkey(crypto_skcipher_tfm(tfm
), in_key
, key_len
);
111 static int xts_set_key(struct crypto_skcipher
*tfm
, const u8
*in_key
,
112 unsigned int key_len
)
114 struct crypto_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
117 ret
= xts_verify_key(tfm
, in_key
, key_len
);
121 ret
= aes_expandkey(&ctx
->key1
, in_key
, key_len
/ 2);
123 ret
= aes_expandkey(&ctx
->key2
, &in_key
[key_len
/ 2],
128 crypto_skcipher_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
132 static int ecb_encrypt(struct skcipher_request
*req
)
134 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
135 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
136 int err
, first
, rounds
= 6 + ctx
->key_length
/ 4;
137 struct skcipher_walk walk
;
140 err
= skcipher_walk_virt(&walk
, req
, true);
143 for (first
= 1; (blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
)); first
= 0) {
144 aes_ecb_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
145 (u8
*)ctx
->key_enc
, rounds
, blocks
, first
);
146 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
152 static int ecb_decrypt(struct skcipher_request
*req
)
154 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
155 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
156 int err
, first
, rounds
= 6 + ctx
->key_length
/ 4;
157 struct skcipher_walk walk
;
160 err
= skcipher_walk_virt(&walk
, req
, true);
163 for (first
= 1; (blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
)); first
= 0) {
164 aes_ecb_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
165 (u8
*)ctx
->key_dec
, rounds
, blocks
, first
);
166 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
172 static int cbc_encrypt(struct skcipher_request
*req
)
174 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
175 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
176 int err
, first
, rounds
= 6 + ctx
->key_length
/ 4;
177 struct skcipher_walk walk
;
180 err
= skcipher_walk_virt(&walk
, req
, true);
183 for (first
= 1; (blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
)); first
= 0) {
184 aes_cbc_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
185 (u8
*)ctx
->key_enc
, rounds
, blocks
, walk
.iv
,
187 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
193 static int cbc_decrypt(struct skcipher_request
*req
)
195 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
196 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
197 int err
, first
, rounds
= 6 + ctx
->key_length
/ 4;
198 struct skcipher_walk walk
;
201 err
= skcipher_walk_virt(&walk
, req
, true);
204 for (first
= 1; (blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
)); first
= 0) {
205 aes_cbc_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
206 (u8
*)ctx
->key_dec
, rounds
, blocks
, walk
.iv
,
208 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
214 static int ctr_encrypt(struct skcipher_request
*req
)
216 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
217 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
218 int err
, first
, rounds
= 6 + ctx
->key_length
/ 4;
219 struct skcipher_walk walk
;
222 err
= skcipher_walk_virt(&walk
, req
, true);
226 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
227 aes_ctr_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
228 (u8
*)ctx
->key_enc
, rounds
, blocks
, walk
.iv
,
230 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
234 u8
__aligned(8) tail
[AES_BLOCK_SIZE
];
235 unsigned int nbytes
= walk
.nbytes
;
236 u8
*tdst
= walk
.dst
.virt
.addr
;
237 u8
*tsrc
= walk
.src
.virt
.addr
;
240 * Tell aes_ctr_encrypt() to process a tail block.
244 aes_ctr_encrypt(tail
, NULL
, (u8
*)ctx
->key_enc
, rounds
,
245 blocks
, walk
.iv
, first
);
246 crypto_xor_cpy(tdst
, tsrc
, tail
, nbytes
);
247 err
= skcipher_walk_done(&walk
, 0);
254 static int ctr_encrypt_sync(struct skcipher_request
*req
)
256 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
257 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
260 return aes_ctr_encrypt_fallback(ctx
, req
);
262 return ctr_encrypt(req
);
265 static int xts_encrypt(struct skcipher_request
*req
)
267 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
268 struct crypto_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
269 int err
, first
, rounds
= 6 + ctx
->key1
.key_length
/ 4;
270 struct skcipher_walk walk
;
273 err
= skcipher_walk_virt(&walk
, req
, true);
276 for (first
= 1; (blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
)); first
= 0) {
277 aes_xts_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
278 (u8
*)ctx
->key1
.key_enc
, rounds
, blocks
,
279 (u8
*)ctx
->key2
.key_enc
, walk
.iv
, first
);
280 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
287 static int xts_decrypt(struct skcipher_request
*req
)
289 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
290 struct crypto_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
291 int err
, first
, rounds
= 6 + ctx
->key1
.key_length
/ 4;
292 struct skcipher_walk walk
;
295 err
= skcipher_walk_virt(&walk
, req
, true);
298 for (first
= 1; (blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
)); first
= 0) {
299 aes_xts_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
300 (u8
*)ctx
->key1
.key_dec
, rounds
, blocks
,
301 (u8
*)ctx
->key2
.key_enc
, walk
.iv
, first
);
302 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
309 static struct skcipher_alg aes_algs
[] = { {
311 .cra_name
= "__ecb(aes)",
312 .cra_driver_name
= "__ecb-aes-" MODE
,
313 .cra_priority
= PRIO
,
314 .cra_flags
= CRYPTO_ALG_INTERNAL
,
315 .cra_blocksize
= AES_BLOCK_SIZE
,
316 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
317 .cra_module
= THIS_MODULE
,
319 .min_keysize
= AES_MIN_KEY_SIZE
,
320 .max_keysize
= AES_MAX_KEY_SIZE
,
321 .setkey
= skcipher_aes_setkey
,
322 .encrypt
= ecb_encrypt
,
323 .decrypt
= ecb_decrypt
,
326 .cra_name
= "__cbc(aes)",
327 .cra_driver_name
= "__cbc-aes-" MODE
,
328 .cra_priority
= PRIO
,
329 .cra_flags
= CRYPTO_ALG_INTERNAL
,
330 .cra_blocksize
= AES_BLOCK_SIZE
,
331 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
332 .cra_module
= THIS_MODULE
,
334 .min_keysize
= AES_MIN_KEY_SIZE
,
335 .max_keysize
= AES_MAX_KEY_SIZE
,
336 .ivsize
= AES_BLOCK_SIZE
,
337 .setkey
= skcipher_aes_setkey
,
338 .encrypt
= cbc_encrypt
,
339 .decrypt
= cbc_decrypt
,
342 .cra_name
= "__ctr(aes)",
343 .cra_driver_name
= "__ctr-aes-" MODE
,
344 .cra_priority
= PRIO
,
345 .cra_flags
= CRYPTO_ALG_INTERNAL
,
347 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
348 .cra_module
= THIS_MODULE
,
350 .min_keysize
= AES_MIN_KEY_SIZE
,
351 .max_keysize
= AES_MAX_KEY_SIZE
,
352 .ivsize
= AES_BLOCK_SIZE
,
353 .chunksize
= AES_BLOCK_SIZE
,
354 .setkey
= skcipher_aes_setkey
,
355 .encrypt
= ctr_encrypt
,
356 .decrypt
= ctr_encrypt
,
359 .cra_name
= "ctr(aes)",
360 .cra_driver_name
= "ctr-aes-" MODE
,
361 .cra_priority
= PRIO
- 1,
363 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
364 .cra_module
= THIS_MODULE
,
366 .min_keysize
= AES_MIN_KEY_SIZE
,
367 .max_keysize
= AES_MAX_KEY_SIZE
,
368 .ivsize
= AES_BLOCK_SIZE
,
369 .chunksize
= AES_BLOCK_SIZE
,
370 .setkey
= skcipher_aes_setkey
,
371 .encrypt
= ctr_encrypt_sync
,
372 .decrypt
= ctr_encrypt_sync
,
375 .cra_name
= "__xts(aes)",
376 .cra_driver_name
= "__xts-aes-" MODE
,
377 .cra_priority
= PRIO
,
378 .cra_flags
= CRYPTO_ALG_INTERNAL
,
379 .cra_blocksize
= AES_BLOCK_SIZE
,
380 .cra_ctxsize
= sizeof(struct crypto_aes_xts_ctx
),
381 .cra_module
= THIS_MODULE
,
383 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
384 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
385 .ivsize
= AES_BLOCK_SIZE
,
386 .setkey
= xts_set_key
,
387 .encrypt
= xts_encrypt
,
388 .decrypt
= xts_decrypt
,
391 static int cbcmac_setkey(struct crypto_shash
*tfm
, const u8
*in_key
,
392 unsigned int key_len
)
394 struct mac_tfm_ctx
*ctx
= crypto_shash_ctx(tfm
);
397 err
= aes_expandkey(&ctx
->key
, in_key
, key_len
);
399 crypto_shash_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
404 static void cmac_gf128_mul_by_x(be128
*y
, const be128
*x
)
406 u64 a
= be64_to_cpu(x
->a
);
407 u64 b
= be64_to_cpu(x
->b
);
409 y
->a
= cpu_to_be64((a
<< 1) | (b
>> 63));
410 y
->b
= cpu_to_be64((b
<< 1) ^ ((a
>> 63) ? 0x87 : 0));
413 static int cmac_setkey(struct crypto_shash
*tfm
, const u8
*in_key
,
414 unsigned int key_len
)
416 struct mac_tfm_ctx
*ctx
= crypto_shash_ctx(tfm
);
417 be128
*consts
= (be128
*)ctx
->consts
;
418 u8
*rk
= (u8
*)ctx
->key
.key_enc
;
419 int rounds
= 6 + key_len
/ 4;
422 err
= cbcmac_setkey(tfm
, in_key
, key_len
);
426 /* encrypt the zero vector */
428 aes_ecb_encrypt(ctx
->consts
, (u8
[AES_BLOCK_SIZE
]){}, rk
, rounds
, 1, 1);
431 cmac_gf128_mul_by_x(consts
, consts
);
432 cmac_gf128_mul_by_x(consts
+ 1, consts
);
437 static int xcbc_setkey(struct crypto_shash
*tfm
, const u8
*in_key
,
438 unsigned int key_len
)
440 static u8
const ks
[3][AES_BLOCK_SIZE
] = {
441 { [0 ... AES_BLOCK_SIZE
- 1] = 0x1 },
442 { [0 ... AES_BLOCK_SIZE
- 1] = 0x2 },
443 { [0 ... AES_BLOCK_SIZE
- 1] = 0x3 },
446 struct mac_tfm_ctx
*ctx
= crypto_shash_ctx(tfm
);
447 u8
*rk
= (u8
*)ctx
->key
.key_enc
;
448 int rounds
= 6 + key_len
/ 4;
449 u8 key
[AES_BLOCK_SIZE
];
452 err
= cbcmac_setkey(tfm
, in_key
, key_len
);
457 aes_ecb_encrypt(key
, ks
[0], rk
, rounds
, 1, 1);
458 aes_ecb_encrypt(ctx
->consts
, ks
[1], rk
, rounds
, 2, 0);
461 return cbcmac_setkey(tfm
, key
, sizeof(key
));
464 static int mac_init(struct shash_desc
*desc
)
466 struct mac_desc_ctx
*ctx
= shash_desc_ctx(desc
);
468 memset(ctx
->dg
, 0, AES_BLOCK_SIZE
);
474 static void mac_do_update(struct crypto_aes_ctx
*ctx
, u8
const in
[], int blocks
,
475 u8 dg
[], int enc_before
, int enc_after
)
477 int rounds
= 6 + ctx
->key_length
/ 4;
479 if (may_use_simd()) {
481 aes_mac_update(in
, ctx
->key_enc
, rounds
, blocks
, dg
, enc_before
,
486 __aes_arm64_encrypt(ctx
->key_enc
, dg
, dg
, rounds
);
489 crypto_xor(dg
, in
, AES_BLOCK_SIZE
);
490 in
+= AES_BLOCK_SIZE
;
492 if (blocks
|| enc_after
)
493 __aes_arm64_encrypt(ctx
->key_enc
, dg
, dg
,
499 static int mac_update(struct shash_desc
*desc
, const u8
*p
, unsigned int len
)
501 struct mac_tfm_ctx
*tctx
= crypto_shash_ctx(desc
->tfm
);
502 struct mac_desc_ctx
*ctx
= shash_desc_ctx(desc
);
507 if ((ctx
->len
% AES_BLOCK_SIZE
) == 0 &&
508 (ctx
->len
+ len
) > AES_BLOCK_SIZE
) {
510 int blocks
= len
/ AES_BLOCK_SIZE
;
512 len
%= AES_BLOCK_SIZE
;
514 mac_do_update(&tctx
->key
, p
, blocks
, ctx
->dg
,
515 (ctx
->len
!= 0), (len
!= 0));
517 p
+= blocks
* AES_BLOCK_SIZE
;
520 ctx
->len
= AES_BLOCK_SIZE
;
526 l
= min(len
, AES_BLOCK_SIZE
- ctx
->len
);
528 if (l
<= AES_BLOCK_SIZE
) {
529 crypto_xor(ctx
->dg
+ ctx
->len
, p
, l
);
539 static int cbcmac_final(struct shash_desc
*desc
, u8
*out
)
541 struct mac_tfm_ctx
*tctx
= crypto_shash_ctx(desc
->tfm
);
542 struct mac_desc_ctx
*ctx
= shash_desc_ctx(desc
);
544 mac_do_update(&tctx
->key
, NULL
, 0, ctx
->dg
, 1, 0);
546 memcpy(out
, ctx
->dg
, AES_BLOCK_SIZE
);
551 static int cmac_final(struct shash_desc
*desc
, u8
*out
)
553 struct mac_tfm_ctx
*tctx
= crypto_shash_ctx(desc
->tfm
);
554 struct mac_desc_ctx
*ctx
= shash_desc_ctx(desc
);
555 u8
*consts
= tctx
->consts
;
557 if (ctx
->len
!= AES_BLOCK_SIZE
) {
558 ctx
->dg
[ctx
->len
] ^= 0x80;
559 consts
+= AES_BLOCK_SIZE
;
562 mac_do_update(&tctx
->key
, consts
, 1, ctx
->dg
, 0, 1);
564 memcpy(out
, ctx
->dg
, AES_BLOCK_SIZE
);
569 static struct shash_alg mac_algs
[] = { {
570 .base
.cra_name
= "cmac(aes)",
571 .base
.cra_driver_name
= "cmac-aes-" MODE
,
572 .base
.cra_priority
= PRIO
,
573 .base
.cra_flags
= CRYPTO_ALG_TYPE_SHASH
,
574 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
575 .base
.cra_ctxsize
= sizeof(struct mac_tfm_ctx
) +
577 .base
.cra_module
= THIS_MODULE
,
579 .digestsize
= AES_BLOCK_SIZE
,
581 .update
= mac_update
,
583 .setkey
= cmac_setkey
,
584 .descsize
= sizeof(struct mac_desc_ctx
),
586 .base
.cra_name
= "xcbc(aes)",
587 .base
.cra_driver_name
= "xcbc-aes-" MODE
,
588 .base
.cra_priority
= PRIO
,
589 .base
.cra_flags
= CRYPTO_ALG_TYPE_SHASH
,
590 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
591 .base
.cra_ctxsize
= sizeof(struct mac_tfm_ctx
) +
593 .base
.cra_module
= THIS_MODULE
,
595 .digestsize
= AES_BLOCK_SIZE
,
597 .update
= mac_update
,
599 .setkey
= xcbc_setkey
,
600 .descsize
= sizeof(struct mac_desc_ctx
),
602 .base
.cra_name
= "cbcmac(aes)",
603 .base
.cra_driver_name
= "cbcmac-aes-" MODE
,
604 .base
.cra_priority
= PRIO
,
605 .base
.cra_flags
= CRYPTO_ALG_TYPE_SHASH
,
606 .base
.cra_blocksize
= 1,
607 .base
.cra_ctxsize
= sizeof(struct mac_tfm_ctx
),
608 .base
.cra_module
= THIS_MODULE
,
610 .digestsize
= AES_BLOCK_SIZE
,
612 .update
= mac_update
,
613 .final
= cbcmac_final
,
614 .setkey
= cbcmac_setkey
,
615 .descsize
= sizeof(struct mac_desc_ctx
),
618 static struct simd_skcipher_alg
*aes_simd_algs
[ARRAY_SIZE(aes_algs
)];
620 static void aes_exit(void)
624 for (i
= 0; i
< ARRAY_SIZE(aes_simd_algs
); i
++)
625 if (aes_simd_algs
[i
])
626 simd_skcipher_free(aes_simd_algs
[i
]);
628 crypto_unregister_shashes(mac_algs
, ARRAY_SIZE(mac_algs
));
629 crypto_unregister_skciphers(aes_algs
, ARRAY_SIZE(aes_algs
));
632 static int __init
aes_init(void)
634 struct simd_skcipher_alg
*simd
;
635 const char *basename
;
641 err
= crypto_register_skciphers(aes_algs
, ARRAY_SIZE(aes_algs
));
645 err
= crypto_register_shashes(mac_algs
, ARRAY_SIZE(mac_algs
));
647 goto unregister_ciphers
;
649 for (i
= 0; i
< ARRAY_SIZE(aes_algs
); i
++) {
650 if (!(aes_algs
[i
].base
.cra_flags
& CRYPTO_ALG_INTERNAL
))
653 algname
= aes_algs
[i
].base
.cra_name
+ 2;
654 drvname
= aes_algs
[i
].base
.cra_driver_name
+ 2;
655 basename
= aes_algs
[i
].base
.cra_driver_name
;
656 simd
= simd_skcipher_create_compat(algname
, drvname
, basename
);
659 goto unregister_simds
;
661 aes_simd_algs
[i
] = simd
;
670 crypto_unregister_skciphers(aes_algs
, ARRAY_SIZE(aes_algs
));
674 #ifdef USE_V8_CRYPTO_EXTENSIONS
675 module_cpu_feature_match(AES
, aes_init
);
677 module_init(aes_init
);
678 EXPORT_SYMBOL(neon_aes_ecb_encrypt
);
679 EXPORT_SYMBOL(neon_aes_cbc_encrypt
);
681 module_exit(aes_exit
);