2 * linux/arch/arm64/crypto/aes-glue.c - wrapper code for ARMv8 AES
4 * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
12 #include <asm/hwcap.h>
14 #include <crypto/aes.h>
15 #include <crypto/internal/hash.h>
16 #include <crypto/internal/simd.h>
17 #include <crypto/internal/skcipher.h>
18 #include <linux/module.h>
19 #include <linux/cpufeature.h>
20 #include <crypto/xts.h>
22 #include "aes-ce-setkey.h"
23 #include "aes-ctr-fallback.h"
25 #ifdef USE_V8_CRYPTO_EXTENSIONS
28 #define aes_setkey ce_aes_setkey
29 #define aes_expandkey ce_aes_expandkey
30 #define aes_ecb_encrypt ce_aes_ecb_encrypt
31 #define aes_ecb_decrypt ce_aes_ecb_decrypt
32 #define aes_cbc_encrypt ce_aes_cbc_encrypt
33 #define aes_cbc_decrypt ce_aes_cbc_decrypt
34 #define aes_ctr_encrypt ce_aes_ctr_encrypt
35 #define aes_xts_encrypt ce_aes_xts_encrypt
36 #define aes_xts_decrypt ce_aes_xts_decrypt
37 #define aes_mac_update ce_aes_mac_update
38 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 Crypto Extensions");
42 #define aes_setkey crypto_aes_set_key
43 #define aes_expandkey crypto_aes_expand_key
44 #define aes_ecb_encrypt neon_aes_ecb_encrypt
45 #define aes_ecb_decrypt neon_aes_ecb_decrypt
46 #define aes_cbc_encrypt neon_aes_cbc_encrypt
47 #define aes_cbc_decrypt neon_aes_cbc_decrypt
48 #define aes_ctr_encrypt neon_aes_ctr_encrypt
49 #define aes_xts_encrypt neon_aes_xts_encrypt
50 #define aes_xts_decrypt neon_aes_xts_decrypt
51 #define aes_mac_update neon_aes_mac_update
52 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 NEON");
53 MODULE_ALIAS_CRYPTO("ecb(aes)");
54 MODULE_ALIAS_CRYPTO("cbc(aes)");
55 MODULE_ALIAS_CRYPTO("ctr(aes)");
56 MODULE_ALIAS_CRYPTO("xts(aes)");
57 MODULE_ALIAS_CRYPTO("cmac(aes)");
58 MODULE_ALIAS_CRYPTO("xcbc(aes)");
59 MODULE_ALIAS_CRYPTO("cbcmac(aes)");
62 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
63 MODULE_LICENSE("GPL v2");
65 /* defined in aes-modes.S */
66 asmlinkage
void aes_ecb_encrypt(u8 out
[], u8
const in
[], u8
const rk
[],
67 int rounds
, int blocks
);
68 asmlinkage
void aes_ecb_decrypt(u8 out
[], u8
const in
[], u8
const rk
[],
69 int rounds
, int blocks
);
71 asmlinkage
void aes_cbc_encrypt(u8 out
[], u8
const in
[], u8
const rk
[],
72 int rounds
, int blocks
, u8 iv
[]);
73 asmlinkage
void aes_cbc_decrypt(u8 out
[], u8
const in
[], u8
const rk
[],
74 int rounds
, int blocks
, u8 iv
[]);
76 asmlinkage
void aes_ctr_encrypt(u8 out
[], u8
const in
[], u8
const rk
[],
77 int rounds
, int blocks
, u8 ctr
[]);
79 asmlinkage
void aes_xts_encrypt(u8 out
[], u8
const in
[], u8
const rk1
[],
80 int rounds
, int blocks
, u8
const rk2
[], u8 iv
[],
82 asmlinkage
void aes_xts_decrypt(u8 out
[], u8
const in
[], u8
const rk1
[],
83 int rounds
, int blocks
, u8
const rk2
[], u8 iv
[],
86 asmlinkage
void aes_mac_update(u8
const in
[], u32
const rk
[], int rounds
,
87 int blocks
, u8 dg
[], int enc_before
,
90 struct crypto_aes_xts_ctx
{
91 struct crypto_aes_ctx key1
;
92 struct crypto_aes_ctx
__aligned(8) key2
;
96 struct crypto_aes_ctx key
;
97 u8
__aligned(8) consts
[];
100 struct mac_desc_ctx
{
102 u8 dg
[AES_BLOCK_SIZE
];
105 static int skcipher_aes_setkey(struct crypto_skcipher
*tfm
, const u8
*in_key
,
106 unsigned int key_len
)
108 return aes_setkey(crypto_skcipher_tfm(tfm
), in_key
, key_len
);
111 static int xts_set_key(struct crypto_skcipher
*tfm
, const u8
*in_key
,
112 unsigned int key_len
)
114 struct crypto_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
117 ret
= xts_verify_key(tfm
, in_key
, key_len
);
121 ret
= aes_expandkey(&ctx
->key1
, in_key
, key_len
/ 2);
123 ret
= aes_expandkey(&ctx
->key2
, &in_key
[key_len
/ 2],
128 crypto_skcipher_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
132 static int ecb_encrypt(struct skcipher_request
*req
)
134 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
135 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
136 int err
, rounds
= 6 + ctx
->key_length
/ 4;
137 struct skcipher_walk walk
;
140 err
= skcipher_walk_virt(&walk
, req
, false);
142 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
144 aes_ecb_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
145 (u8
*)ctx
->key_enc
, rounds
, blocks
);
147 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
152 static int ecb_decrypt(struct skcipher_request
*req
)
154 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
155 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
156 int err
, rounds
= 6 + ctx
->key_length
/ 4;
157 struct skcipher_walk walk
;
160 err
= skcipher_walk_virt(&walk
, req
, false);
162 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
164 aes_ecb_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
165 (u8
*)ctx
->key_dec
, rounds
, blocks
);
167 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
172 static int cbc_encrypt(struct skcipher_request
*req
)
174 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
175 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
176 int err
, rounds
= 6 + ctx
->key_length
/ 4;
177 struct skcipher_walk walk
;
180 err
= skcipher_walk_virt(&walk
, req
, false);
182 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
184 aes_cbc_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
185 (u8
*)ctx
->key_enc
, rounds
, blocks
, walk
.iv
);
187 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
192 static int cbc_decrypt(struct skcipher_request
*req
)
194 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
195 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
196 int err
, rounds
= 6 + ctx
->key_length
/ 4;
197 struct skcipher_walk walk
;
200 err
= skcipher_walk_virt(&walk
, req
, false);
202 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
204 aes_cbc_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
205 (u8
*)ctx
->key_dec
, rounds
, blocks
, walk
.iv
);
207 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
212 static int ctr_encrypt(struct skcipher_request
*req
)
214 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
215 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
216 int err
, rounds
= 6 + ctx
->key_length
/ 4;
217 struct skcipher_walk walk
;
220 err
= skcipher_walk_virt(&walk
, req
, false);
222 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
224 aes_ctr_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
225 (u8
*)ctx
->key_enc
, rounds
, blocks
, walk
.iv
);
227 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
230 u8
__aligned(8) tail
[AES_BLOCK_SIZE
];
231 unsigned int nbytes
= walk
.nbytes
;
232 u8
*tdst
= walk
.dst
.virt
.addr
;
233 u8
*tsrc
= walk
.src
.virt
.addr
;
236 * Tell aes_ctr_encrypt() to process a tail block.
241 aes_ctr_encrypt(tail
, NULL
, (u8
*)ctx
->key_enc
, rounds
,
244 crypto_xor_cpy(tdst
, tsrc
, tail
, nbytes
);
245 err
= skcipher_walk_done(&walk
, 0);
251 static int ctr_encrypt_sync(struct skcipher_request
*req
)
253 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
254 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
257 return aes_ctr_encrypt_fallback(ctx
, req
);
259 return ctr_encrypt(req
);
262 static int xts_encrypt(struct skcipher_request
*req
)
264 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
265 struct crypto_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
266 int err
, first
, rounds
= 6 + ctx
->key1
.key_length
/ 4;
267 struct skcipher_walk walk
;
270 err
= skcipher_walk_virt(&walk
, req
, false);
272 for (first
= 1; (blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
)); first
= 0) {
274 aes_xts_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
275 (u8
*)ctx
->key1
.key_enc
, rounds
, blocks
,
276 (u8
*)ctx
->key2
.key_enc
, walk
.iv
, first
);
278 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
284 static int xts_decrypt(struct skcipher_request
*req
)
286 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
287 struct crypto_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
288 int err
, first
, rounds
= 6 + ctx
->key1
.key_length
/ 4;
289 struct skcipher_walk walk
;
292 err
= skcipher_walk_virt(&walk
, req
, false);
294 for (first
= 1; (blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
)); first
= 0) {
296 aes_xts_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
297 (u8
*)ctx
->key1
.key_dec
, rounds
, blocks
,
298 (u8
*)ctx
->key2
.key_enc
, walk
.iv
, first
);
300 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
306 static struct skcipher_alg aes_algs
[] = { {
308 .cra_name
= "__ecb(aes)",
309 .cra_driver_name
= "__ecb-aes-" MODE
,
310 .cra_priority
= PRIO
,
311 .cra_flags
= CRYPTO_ALG_INTERNAL
,
312 .cra_blocksize
= AES_BLOCK_SIZE
,
313 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
314 .cra_module
= THIS_MODULE
,
316 .min_keysize
= AES_MIN_KEY_SIZE
,
317 .max_keysize
= AES_MAX_KEY_SIZE
,
318 .setkey
= skcipher_aes_setkey
,
319 .encrypt
= ecb_encrypt
,
320 .decrypt
= ecb_decrypt
,
323 .cra_name
= "__cbc(aes)",
324 .cra_driver_name
= "__cbc-aes-" MODE
,
325 .cra_priority
= PRIO
,
326 .cra_flags
= CRYPTO_ALG_INTERNAL
,
327 .cra_blocksize
= AES_BLOCK_SIZE
,
328 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
329 .cra_module
= THIS_MODULE
,
331 .min_keysize
= AES_MIN_KEY_SIZE
,
332 .max_keysize
= AES_MAX_KEY_SIZE
,
333 .ivsize
= AES_BLOCK_SIZE
,
334 .setkey
= skcipher_aes_setkey
,
335 .encrypt
= cbc_encrypt
,
336 .decrypt
= cbc_decrypt
,
339 .cra_name
= "__ctr(aes)",
340 .cra_driver_name
= "__ctr-aes-" MODE
,
341 .cra_priority
= PRIO
,
342 .cra_flags
= CRYPTO_ALG_INTERNAL
,
344 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
345 .cra_module
= THIS_MODULE
,
347 .min_keysize
= AES_MIN_KEY_SIZE
,
348 .max_keysize
= AES_MAX_KEY_SIZE
,
349 .ivsize
= AES_BLOCK_SIZE
,
350 .chunksize
= AES_BLOCK_SIZE
,
351 .setkey
= skcipher_aes_setkey
,
352 .encrypt
= ctr_encrypt
,
353 .decrypt
= ctr_encrypt
,
356 .cra_name
= "ctr(aes)",
357 .cra_driver_name
= "ctr-aes-" MODE
,
358 .cra_priority
= PRIO
- 1,
360 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
361 .cra_module
= THIS_MODULE
,
363 .min_keysize
= AES_MIN_KEY_SIZE
,
364 .max_keysize
= AES_MAX_KEY_SIZE
,
365 .ivsize
= AES_BLOCK_SIZE
,
366 .chunksize
= AES_BLOCK_SIZE
,
367 .setkey
= skcipher_aes_setkey
,
368 .encrypt
= ctr_encrypt_sync
,
369 .decrypt
= ctr_encrypt_sync
,
372 .cra_name
= "__xts(aes)",
373 .cra_driver_name
= "__xts-aes-" MODE
,
374 .cra_priority
= PRIO
,
375 .cra_flags
= CRYPTO_ALG_INTERNAL
,
376 .cra_blocksize
= AES_BLOCK_SIZE
,
377 .cra_ctxsize
= sizeof(struct crypto_aes_xts_ctx
),
378 .cra_module
= THIS_MODULE
,
380 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
381 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
382 .ivsize
= AES_BLOCK_SIZE
,
383 .setkey
= xts_set_key
,
384 .encrypt
= xts_encrypt
,
385 .decrypt
= xts_decrypt
,
388 static int cbcmac_setkey(struct crypto_shash
*tfm
, const u8
*in_key
,
389 unsigned int key_len
)
391 struct mac_tfm_ctx
*ctx
= crypto_shash_ctx(tfm
);
394 err
= aes_expandkey(&ctx
->key
, in_key
, key_len
);
396 crypto_shash_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
401 static void cmac_gf128_mul_by_x(be128
*y
, const be128
*x
)
403 u64 a
= be64_to_cpu(x
->a
);
404 u64 b
= be64_to_cpu(x
->b
);
406 y
->a
= cpu_to_be64((a
<< 1) | (b
>> 63));
407 y
->b
= cpu_to_be64((b
<< 1) ^ ((a
>> 63) ? 0x87 : 0));
410 static int cmac_setkey(struct crypto_shash
*tfm
, const u8
*in_key
,
411 unsigned int key_len
)
413 struct mac_tfm_ctx
*ctx
= crypto_shash_ctx(tfm
);
414 be128
*consts
= (be128
*)ctx
->consts
;
415 u8
*rk
= (u8
*)ctx
->key
.key_enc
;
416 int rounds
= 6 + key_len
/ 4;
419 err
= cbcmac_setkey(tfm
, in_key
, key_len
);
423 /* encrypt the zero vector */
425 aes_ecb_encrypt(ctx
->consts
, (u8
[AES_BLOCK_SIZE
]){}, rk
, rounds
, 1);
428 cmac_gf128_mul_by_x(consts
, consts
);
429 cmac_gf128_mul_by_x(consts
+ 1, consts
);
434 static int xcbc_setkey(struct crypto_shash
*tfm
, const u8
*in_key
,
435 unsigned int key_len
)
437 static u8
const ks
[3][AES_BLOCK_SIZE
] = {
438 { [0 ... AES_BLOCK_SIZE
- 1] = 0x1 },
439 { [0 ... AES_BLOCK_SIZE
- 1] = 0x2 },
440 { [0 ... AES_BLOCK_SIZE
- 1] = 0x3 },
443 struct mac_tfm_ctx
*ctx
= crypto_shash_ctx(tfm
);
444 u8
*rk
= (u8
*)ctx
->key
.key_enc
;
445 int rounds
= 6 + key_len
/ 4;
446 u8 key
[AES_BLOCK_SIZE
];
449 err
= cbcmac_setkey(tfm
, in_key
, key_len
);
454 aes_ecb_encrypt(key
, ks
[0], rk
, rounds
, 1);
455 aes_ecb_encrypt(ctx
->consts
, ks
[1], rk
, rounds
, 2);
458 return cbcmac_setkey(tfm
, key
, sizeof(key
));
461 static int mac_init(struct shash_desc
*desc
)
463 struct mac_desc_ctx
*ctx
= shash_desc_ctx(desc
);
465 memset(ctx
->dg
, 0, AES_BLOCK_SIZE
);
471 static void mac_do_update(struct crypto_aes_ctx
*ctx
, u8
const in
[], int blocks
,
472 u8 dg
[], int enc_before
, int enc_after
)
474 int rounds
= 6 + ctx
->key_length
/ 4;
476 if (may_use_simd()) {
478 aes_mac_update(in
, ctx
->key_enc
, rounds
, blocks
, dg
, enc_before
,
483 __aes_arm64_encrypt(ctx
->key_enc
, dg
, dg
, rounds
);
486 crypto_xor(dg
, in
, AES_BLOCK_SIZE
);
487 in
+= AES_BLOCK_SIZE
;
489 if (blocks
|| enc_after
)
490 __aes_arm64_encrypt(ctx
->key_enc
, dg
, dg
,
496 static int mac_update(struct shash_desc
*desc
, const u8
*p
, unsigned int len
)
498 struct mac_tfm_ctx
*tctx
= crypto_shash_ctx(desc
->tfm
);
499 struct mac_desc_ctx
*ctx
= shash_desc_ctx(desc
);
504 if ((ctx
->len
% AES_BLOCK_SIZE
) == 0 &&
505 (ctx
->len
+ len
) > AES_BLOCK_SIZE
) {
507 int blocks
= len
/ AES_BLOCK_SIZE
;
509 len
%= AES_BLOCK_SIZE
;
511 mac_do_update(&tctx
->key
, p
, blocks
, ctx
->dg
,
512 (ctx
->len
!= 0), (len
!= 0));
514 p
+= blocks
* AES_BLOCK_SIZE
;
517 ctx
->len
= AES_BLOCK_SIZE
;
523 l
= min(len
, AES_BLOCK_SIZE
- ctx
->len
);
525 if (l
<= AES_BLOCK_SIZE
) {
526 crypto_xor(ctx
->dg
+ ctx
->len
, p
, l
);
536 static int cbcmac_final(struct shash_desc
*desc
, u8
*out
)
538 struct mac_tfm_ctx
*tctx
= crypto_shash_ctx(desc
->tfm
);
539 struct mac_desc_ctx
*ctx
= shash_desc_ctx(desc
);
541 mac_do_update(&tctx
->key
, NULL
, 0, ctx
->dg
, 1, 0);
543 memcpy(out
, ctx
->dg
, AES_BLOCK_SIZE
);
548 static int cmac_final(struct shash_desc
*desc
, u8
*out
)
550 struct mac_tfm_ctx
*tctx
= crypto_shash_ctx(desc
->tfm
);
551 struct mac_desc_ctx
*ctx
= shash_desc_ctx(desc
);
552 u8
*consts
= tctx
->consts
;
554 if (ctx
->len
!= AES_BLOCK_SIZE
) {
555 ctx
->dg
[ctx
->len
] ^= 0x80;
556 consts
+= AES_BLOCK_SIZE
;
559 mac_do_update(&tctx
->key
, consts
, 1, ctx
->dg
, 0, 1);
561 memcpy(out
, ctx
->dg
, AES_BLOCK_SIZE
);
566 static struct shash_alg mac_algs
[] = { {
567 .base
.cra_name
= "cmac(aes)",
568 .base
.cra_driver_name
= "cmac-aes-" MODE
,
569 .base
.cra_priority
= PRIO
,
570 .base
.cra_flags
= CRYPTO_ALG_TYPE_SHASH
,
571 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
572 .base
.cra_ctxsize
= sizeof(struct mac_tfm_ctx
) +
574 .base
.cra_module
= THIS_MODULE
,
576 .digestsize
= AES_BLOCK_SIZE
,
578 .update
= mac_update
,
580 .setkey
= cmac_setkey
,
581 .descsize
= sizeof(struct mac_desc_ctx
),
583 .base
.cra_name
= "xcbc(aes)",
584 .base
.cra_driver_name
= "xcbc-aes-" MODE
,
585 .base
.cra_priority
= PRIO
,
586 .base
.cra_flags
= CRYPTO_ALG_TYPE_SHASH
,
587 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
588 .base
.cra_ctxsize
= sizeof(struct mac_tfm_ctx
) +
590 .base
.cra_module
= THIS_MODULE
,
592 .digestsize
= AES_BLOCK_SIZE
,
594 .update
= mac_update
,
596 .setkey
= xcbc_setkey
,
597 .descsize
= sizeof(struct mac_desc_ctx
),
599 .base
.cra_name
= "cbcmac(aes)",
600 .base
.cra_driver_name
= "cbcmac-aes-" MODE
,
601 .base
.cra_priority
= PRIO
,
602 .base
.cra_flags
= CRYPTO_ALG_TYPE_SHASH
,
603 .base
.cra_blocksize
= 1,
604 .base
.cra_ctxsize
= sizeof(struct mac_tfm_ctx
),
605 .base
.cra_module
= THIS_MODULE
,
607 .digestsize
= AES_BLOCK_SIZE
,
609 .update
= mac_update
,
610 .final
= cbcmac_final
,
611 .setkey
= cbcmac_setkey
,
612 .descsize
= sizeof(struct mac_desc_ctx
),
615 static struct simd_skcipher_alg
*aes_simd_algs
[ARRAY_SIZE(aes_algs
)];
617 static void aes_exit(void)
621 for (i
= 0; i
< ARRAY_SIZE(aes_simd_algs
); i
++)
622 if (aes_simd_algs
[i
])
623 simd_skcipher_free(aes_simd_algs
[i
]);
625 crypto_unregister_shashes(mac_algs
, ARRAY_SIZE(mac_algs
));
626 crypto_unregister_skciphers(aes_algs
, ARRAY_SIZE(aes_algs
));
629 static int __init
aes_init(void)
631 struct simd_skcipher_alg
*simd
;
632 const char *basename
;
638 err
= crypto_register_skciphers(aes_algs
, ARRAY_SIZE(aes_algs
));
642 err
= crypto_register_shashes(mac_algs
, ARRAY_SIZE(mac_algs
));
644 goto unregister_ciphers
;
646 for (i
= 0; i
< ARRAY_SIZE(aes_algs
); i
++) {
647 if (!(aes_algs
[i
].base
.cra_flags
& CRYPTO_ALG_INTERNAL
))
650 algname
= aes_algs
[i
].base
.cra_name
+ 2;
651 drvname
= aes_algs
[i
].base
.cra_driver_name
+ 2;
652 basename
= aes_algs
[i
].base
.cra_driver_name
;
653 simd
= simd_skcipher_create_compat(algname
, drvname
, basename
);
656 goto unregister_simds
;
658 aes_simd_algs
[i
] = simd
;
667 crypto_unregister_skciphers(aes_algs
, ARRAY_SIZE(aes_algs
));
671 #ifdef USE_V8_CRYPTO_EXTENSIONS
672 module_cpu_feature_match(AES
, aes_init
);
674 module_init(aes_init
);
675 EXPORT_SYMBOL(neon_aes_ecb_encrypt
);
676 EXPORT_SYMBOL(neon_aes_cbc_encrypt
);
678 module_exit(aes_exit
);