1 // SPDX-License-Identifier: GPL-2.0-only
3 * aes-ce-glue.c - wrapper code for ARMv8 AES
5 * Copyright (C) 2015 Linaro Ltd <ard.biesheuvel@linaro.org>
11 #include <asm/unaligned.h>
12 #include <crypto/aes.h>
13 #include <crypto/ctr.h>
14 #include <crypto/internal/simd.h>
15 #include <crypto/internal/skcipher.h>
16 #include <crypto/scatterwalk.h>
17 #include <linux/cpufeature.h>
18 #include <linux/module.h>
19 #include <crypto/xts.h>
21 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 Crypto Extensions");
22 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
23 MODULE_LICENSE("GPL v2");
25 /* defined in aes-ce-core.S */
26 asmlinkage u32
ce_aes_sub(u32 input
);
27 asmlinkage
void ce_aes_invert(void *dst
, void *src
);
29 asmlinkage
void ce_aes_ecb_encrypt(u8 out
[], u8
const in
[], u32
const rk
[],
30 int rounds
, int blocks
);
31 asmlinkage
void ce_aes_ecb_decrypt(u8 out
[], u8
const in
[], u32
const rk
[],
32 int rounds
, int blocks
);
34 asmlinkage
void ce_aes_cbc_encrypt(u8 out
[], u8
const in
[], u32
const rk
[],
35 int rounds
, int blocks
, u8 iv
[]);
36 asmlinkage
void ce_aes_cbc_decrypt(u8 out
[], u8
const in
[], u32
const rk
[],
37 int rounds
, int blocks
, u8 iv
[]);
38 asmlinkage
void ce_aes_cbc_cts_encrypt(u8 out
[], u8
const in
[], u32
const rk
[],
39 int rounds
, int bytes
, u8
const iv
[]);
40 asmlinkage
void ce_aes_cbc_cts_decrypt(u8 out
[], u8
const in
[], u32
const rk
[],
41 int rounds
, int bytes
, u8
const iv
[]);
43 asmlinkage
void ce_aes_ctr_encrypt(u8 out
[], u8
const in
[], u32
const rk
[],
44 int rounds
, int blocks
, u8 ctr
[]);
46 asmlinkage
void ce_aes_xts_encrypt(u8 out
[], u8
const in
[], u32
const rk1
[],
47 int rounds
, int bytes
, u8 iv
[],
48 u32
const rk2
[], int first
);
49 asmlinkage
void ce_aes_xts_decrypt(u8 out
[], u8
const in
[], u32
const rk1
[],
50 int rounds
, int bytes
, u8 iv
[],
51 u32
const rk2
[], int first
);
57 static int num_rounds(struct crypto_aes_ctx
*ctx
)
60 * # of rounds specified by AES:
61 * 128 bit key 10 rounds
62 * 192 bit key 12 rounds
63 * 256 bit key 14 rounds
64 * => n byte key => 6 + (n/4) rounds
66 return 6 + ctx
->key_length
/ 4;
69 static int ce_aes_expandkey(struct crypto_aes_ctx
*ctx
, const u8
*in_key
,
73 * The AES key schedule round constants
75 static u8
const rcon
[] = {
76 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36,
79 u32 kwords
= key_len
/ sizeof(u32
);
80 struct aes_block
*key_enc
, *key_dec
;
83 if (key_len
!= AES_KEYSIZE_128
&&
84 key_len
!= AES_KEYSIZE_192
&&
85 key_len
!= AES_KEYSIZE_256
)
88 ctx
->key_length
= key_len
;
89 for (i
= 0; i
< kwords
; i
++)
90 ctx
->key_enc
[i
] = get_unaligned_le32(in_key
+ i
* sizeof(u32
));
93 for (i
= 0; i
< sizeof(rcon
); i
++) {
94 u32
*rki
= ctx
->key_enc
+ (i
* kwords
);
95 u32
*rko
= rki
+ kwords
;
97 rko
[0] = ror32(ce_aes_sub(rki
[kwords
- 1]), 8);
98 rko
[0] = rko
[0] ^ rki
[0] ^ rcon
[i
];
99 rko
[1] = rko
[0] ^ rki
[1];
100 rko
[2] = rko
[1] ^ rki
[2];
101 rko
[3] = rko
[2] ^ rki
[3];
103 if (key_len
== AES_KEYSIZE_192
) {
106 rko
[4] = rko
[3] ^ rki
[4];
107 rko
[5] = rko
[4] ^ rki
[5];
108 } else if (key_len
== AES_KEYSIZE_256
) {
111 rko
[4] = ce_aes_sub(rko
[3]) ^ rki
[4];
112 rko
[5] = rko
[4] ^ rki
[5];
113 rko
[6] = rko
[5] ^ rki
[6];
114 rko
[7] = rko
[6] ^ rki
[7];
119 * Generate the decryption keys for the Equivalent Inverse Cipher.
120 * This involves reversing the order of the round keys, and applying
121 * the Inverse Mix Columns transformation on all but the first and
124 key_enc
= (struct aes_block
*)ctx
->key_enc
;
125 key_dec
= (struct aes_block
*)ctx
->key_dec
;
128 key_dec
[0] = key_enc
[j
];
129 for (i
= 1, j
--; j
> 0; i
++, j
--)
130 ce_aes_invert(key_dec
+ i
, key_enc
+ j
);
131 key_dec
[i
] = key_enc
[0];
137 static int ce_aes_setkey(struct crypto_skcipher
*tfm
, const u8
*in_key
,
138 unsigned int key_len
)
140 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
143 ret
= ce_aes_expandkey(ctx
, in_key
, key_len
);
147 crypto_skcipher_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
151 struct crypto_aes_xts_ctx
{
152 struct crypto_aes_ctx key1
;
153 struct crypto_aes_ctx
__aligned(8) key2
;
156 static int xts_set_key(struct crypto_skcipher
*tfm
, const u8
*in_key
,
157 unsigned int key_len
)
159 struct crypto_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
162 ret
= xts_verify_key(tfm
, in_key
, key_len
);
166 ret
= ce_aes_expandkey(&ctx
->key1
, in_key
, key_len
/ 2);
168 ret
= ce_aes_expandkey(&ctx
->key2
, &in_key
[key_len
/ 2],
173 crypto_skcipher_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
177 static int ecb_encrypt(struct skcipher_request
*req
)
179 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
180 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
181 struct skcipher_walk walk
;
185 err
= skcipher_walk_virt(&walk
, req
, false);
187 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
189 ce_aes_ecb_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
190 ctx
->key_enc
, num_rounds(ctx
), blocks
);
192 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
197 static int ecb_decrypt(struct skcipher_request
*req
)
199 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
200 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
201 struct skcipher_walk walk
;
205 err
= skcipher_walk_virt(&walk
, req
, false);
207 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
209 ce_aes_ecb_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
210 ctx
->key_dec
, num_rounds(ctx
), blocks
);
212 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
217 static int cbc_encrypt_walk(struct skcipher_request
*req
,
218 struct skcipher_walk
*walk
)
220 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
221 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
225 while ((blocks
= (walk
->nbytes
/ AES_BLOCK_SIZE
))) {
227 ce_aes_cbc_encrypt(walk
->dst
.virt
.addr
, walk
->src
.virt
.addr
,
228 ctx
->key_enc
, num_rounds(ctx
), blocks
,
231 err
= skcipher_walk_done(walk
, walk
->nbytes
% AES_BLOCK_SIZE
);
236 static int cbc_encrypt(struct skcipher_request
*req
)
238 struct skcipher_walk walk
;
241 err
= skcipher_walk_virt(&walk
, req
, false);
244 return cbc_encrypt_walk(req
, &walk
);
247 static int cbc_decrypt_walk(struct skcipher_request
*req
,
248 struct skcipher_walk
*walk
)
250 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
251 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
255 while ((blocks
= (walk
->nbytes
/ AES_BLOCK_SIZE
))) {
257 ce_aes_cbc_decrypt(walk
->dst
.virt
.addr
, walk
->src
.virt
.addr
,
258 ctx
->key_dec
, num_rounds(ctx
), blocks
,
261 err
= skcipher_walk_done(walk
, walk
->nbytes
% AES_BLOCK_SIZE
);
266 static int cbc_decrypt(struct skcipher_request
*req
)
268 struct skcipher_walk walk
;
271 err
= skcipher_walk_virt(&walk
, req
, false);
274 return cbc_decrypt_walk(req
, &walk
);
277 static int cts_cbc_encrypt(struct skcipher_request
*req
)
279 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
280 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
281 int cbc_blocks
= DIV_ROUND_UP(req
->cryptlen
, AES_BLOCK_SIZE
) - 2;
282 struct scatterlist
*src
= req
->src
, *dst
= req
->dst
;
283 struct scatterlist sg_src
[2], sg_dst
[2];
284 struct skcipher_request subreq
;
285 struct skcipher_walk walk
;
288 skcipher_request_set_tfm(&subreq
, tfm
);
289 skcipher_request_set_callback(&subreq
, skcipher_request_flags(req
),
292 if (req
->cryptlen
<= AES_BLOCK_SIZE
) {
293 if (req
->cryptlen
< AES_BLOCK_SIZE
)
298 if (cbc_blocks
> 0) {
299 skcipher_request_set_crypt(&subreq
, req
->src
, req
->dst
,
300 cbc_blocks
* AES_BLOCK_SIZE
,
303 err
= skcipher_walk_virt(&walk
, &subreq
, false) ?:
304 cbc_encrypt_walk(&subreq
, &walk
);
308 if (req
->cryptlen
== AES_BLOCK_SIZE
)
311 dst
= src
= scatterwalk_ffwd(sg_src
, req
->src
, subreq
.cryptlen
);
312 if (req
->dst
!= req
->src
)
313 dst
= scatterwalk_ffwd(sg_dst
, req
->dst
,
317 /* handle ciphertext stealing */
318 skcipher_request_set_crypt(&subreq
, src
, dst
,
319 req
->cryptlen
- cbc_blocks
* AES_BLOCK_SIZE
,
322 err
= skcipher_walk_virt(&walk
, &subreq
, false);
327 ce_aes_cbc_cts_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
328 ctx
->key_enc
, num_rounds(ctx
), walk
.nbytes
,
332 return skcipher_walk_done(&walk
, 0);
335 static int cts_cbc_decrypt(struct skcipher_request
*req
)
337 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
338 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
339 int cbc_blocks
= DIV_ROUND_UP(req
->cryptlen
, AES_BLOCK_SIZE
) - 2;
340 struct scatterlist
*src
= req
->src
, *dst
= req
->dst
;
341 struct scatterlist sg_src
[2], sg_dst
[2];
342 struct skcipher_request subreq
;
343 struct skcipher_walk walk
;
346 skcipher_request_set_tfm(&subreq
, tfm
);
347 skcipher_request_set_callback(&subreq
, skcipher_request_flags(req
),
350 if (req
->cryptlen
<= AES_BLOCK_SIZE
) {
351 if (req
->cryptlen
< AES_BLOCK_SIZE
)
356 if (cbc_blocks
> 0) {
357 skcipher_request_set_crypt(&subreq
, req
->src
, req
->dst
,
358 cbc_blocks
* AES_BLOCK_SIZE
,
361 err
= skcipher_walk_virt(&walk
, &subreq
, false) ?:
362 cbc_decrypt_walk(&subreq
, &walk
);
366 if (req
->cryptlen
== AES_BLOCK_SIZE
)
369 dst
= src
= scatterwalk_ffwd(sg_src
, req
->src
, subreq
.cryptlen
);
370 if (req
->dst
!= req
->src
)
371 dst
= scatterwalk_ffwd(sg_dst
, req
->dst
,
375 /* handle ciphertext stealing */
376 skcipher_request_set_crypt(&subreq
, src
, dst
,
377 req
->cryptlen
- cbc_blocks
* AES_BLOCK_SIZE
,
380 err
= skcipher_walk_virt(&walk
, &subreq
, false);
385 ce_aes_cbc_cts_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
386 ctx
->key_dec
, num_rounds(ctx
), walk
.nbytes
,
390 return skcipher_walk_done(&walk
, 0);
393 static int ctr_encrypt(struct skcipher_request
*req
)
395 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
396 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
397 struct skcipher_walk walk
;
400 err
= skcipher_walk_virt(&walk
, req
, false);
402 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
404 ce_aes_ctr_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
405 ctx
->key_enc
, num_rounds(ctx
), blocks
,
408 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
411 u8
__aligned(8) tail
[AES_BLOCK_SIZE
];
412 unsigned int nbytes
= walk
.nbytes
;
413 u8
*tdst
= walk
.dst
.virt
.addr
;
414 u8
*tsrc
= walk
.src
.virt
.addr
;
417 * Tell aes_ctr_encrypt() to process a tail block.
422 ce_aes_ctr_encrypt(tail
, NULL
, ctx
->key_enc
, num_rounds(ctx
),
425 crypto_xor_cpy(tdst
, tsrc
, tail
, nbytes
);
426 err
= skcipher_walk_done(&walk
, 0);
431 static void ctr_encrypt_one(struct crypto_skcipher
*tfm
, const u8
*src
, u8
*dst
)
433 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
437 * Temporarily disable interrupts to avoid races where
438 * cachelines are evicted when the CPU is interrupted
439 * to do something else.
441 local_irq_save(flags
);
442 aes_encrypt(ctx
, dst
, src
);
443 local_irq_restore(flags
);
446 static int ctr_encrypt_sync(struct skcipher_request
*req
)
448 if (!crypto_simd_usable())
449 return crypto_ctr_encrypt_walk(req
, ctr_encrypt_one
);
451 return ctr_encrypt(req
);
454 static int xts_encrypt(struct skcipher_request
*req
)
456 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
457 struct crypto_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
458 int err
, first
, rounds
= num_rounds(&ctx
->key1
);
459 int tail
= req
->cryptlen
% AES_BLOCK_SIZE
;
460 struct scatterlist sg_src
[2], sg_dst
[2];
461 struct skcipher_request subreq
;
462 struct scatterlist
*src
, *dst
;
463 struct skcipher_walk walk
;
465 if (req
->cryptlen
< AES_BLOCK_SIZE
)
468 err
= skcipher_walk_virt(&walk
, req
, false);
470 if (unlikely(tail
> 0 && walk
.nbytes
< walk
.total
)) {
471 int xts_blocks
= DIV_ROUND_UP(req
->cryptlen
,
474 skcipher_walk_abort(&walk
);
476 skcipher_request_set_tfm(&subreq
, tfm
);
477 skcipher_request_set_callback(&subreq
,
478 skcipher_request_flags(req
),
480 skcipher_request_set_crypt(&subreq
, req
->src
, req
->dst
,
481 xts_blocks
* AES_BLOCK_SIZE
,
484 err
= skcipher_walk_virt(&walk
, req
, false);
489 for (first
= 1; walk
.nbytes
>= AES_BLOCK_SIZE
; first
= 0) {
490 int nbytes
= walk
.nbytes
;
492 if (walk
.nbytes
< walk
.total
)
493 nbytes
&= ~(AES_BLOCK_SIZE
- 1);
496 ce_aes_xts_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
497 ctx
->key1
.key_enc
, rounds
, nbytes
, walk
.iv
,
498 ctx
->key2
.key_enc
, first
);
500 err
= skcipher_walk_done(&walk
, walk
.nbytes
- nbytes
);
503 if (err
|| likely(!tail
))
506 dst
= src
= scatterwalk_ffwd(sg_src
, req
->src
, req
->cryptlen
);
507 if (req
->dst
!= req
->src
)
508 dst
= scatterwalk_ffwd(sg_dst
, req
->dst
, req
->cryptlen
);
510 skcipher_request_set_crypt(req
, src
, dst
, AES_BLOCK_SIZE
+ tail
,
513 err
= skcipher_walk_virt(&walk
, req
, false);
518 ce_aes_xts_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
519 ctx
->key1
.key_enc
, rounds
, walk
.nbytes
, walk
.iv
,
520 ctx
->key2
.key_enc
, first
);
523 return skcipher_walk_done(&walk
, 0);
526 static int xts_decrypt(struct skcipher_request
*req
)
528 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
529 struct crypto_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
530 int err
, first
, rounds
= num_rounds(&ctx
->key1
);
531 int tail
= req
->cryptlen
% AES_BLOCK_SIZE
;
532 struct scatterlist sg_src
[2], sg_dst
[2];
533 struct skcipher_request subreq
;
534 struct scatterlist
*src
, *dst
;
535 struct skcipher_walk walk
;
537 if (req
->cryptlen
< AES_BLOCK_SIZE
)
540 err
= skcipher_walk_virt(&walk
, req
, false);
542 if (unlikely(tail
> 0 && walk
.nbytes
< walk
.total
)) {
543 int xts_blocks
= DIV_ROUND_UP(req
->cryptlen
,
546 skcipher_walk_abort(&walk
);
548 skcipher_request_set_tfm(&subreq
, tfm
);
549 skcipher_request_set_callback(&subreq
,
550 skcipher_request_flags(req
),
552 skcipher_request_set_crypt(&subreq
, req
->src
, req
->dst
,
553 xts_blocks
* AES_BLOCK_SIZE
,
556 err
= skcipher_walk_virt(&walk
, req
, false);
561 for (first
= 1; walk
.nbytes
>= AES_BLOCK_SIZE
; first
= 0) {
562 int nbytes
= walk
.nbytes
;
564 if (walk
.nbytes
< walk
.total
)
565 nbytes
&= ~(AES_BLOCK_SIZE
- 1);
568 ce_aes_xts_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
569 ctx
->key1
.key_dec
, rounds
, nbytes
, walk
.iv
,
570 ctx
->key2
.key_enc
, first
);
572 err
= skcipher_walk_done(&walk
, walk
.nbytes
- nbytes
);
575 if (err
|| likely(!tail
))
578 dst
= src
= scatterwalk_ffwd(sg_src
, req
->src
, req
->cryptlen
);
579 if (req
->dst
!= req
->src
)
580 dst
= scatterwalk_ffwd(sg_dst
, req
->dst
, req
->cryptlen
);
582 skcipher_request_set_crypt(req
, src
, dst
, AES_BLOCK_SIZE
+ tail
,
585 err
= skcipher_walk_virt(&walk
, req
, false);
590 ce_aes_xts_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
591 ctx
->key1
.key_dec
, rounds
, walk
.nbytes
, walk
.iv
,
592 ctx
->key2
.key_enc
, first
);
595 return skcipher_walk_done(&walk
, 0);
598 static struct skcipher_alg aes_algs
[] = { {
599 .base
.cra_name
= "__ecb(aes)",
600 .base
.cra_driver_name
= "__ecb-aes-ce",
601 .base
.cra_priority
= 300,
602 .base
.cra_flags
= CRYPTO_ALG_INTERNAL
,
603 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
604 .base
.cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
605 .base
.cra_module
= THIS_MODULE
,
607 .min_keysize
= AES_MIN_KEY_SIZE
,
608 .max_keysize
= AES_MAX_KEY_SIZE
,
609 .setkey
= ce_aes_setkey
,
610 .encrypt
= ecb_encrypt
,
611 .decrypt
= ecb_decrypt
,
613 .base
.cra_name
= "__cbc(aes)",
614 .base
.cra_driver_name
= "__cbc-aes-ce",
615 .base
.cra_priority
= 300,
616 .base
.cra_flags
= CRYPTO_ALG_INTERNAL
,
617 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
618 .base
.cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
619 .base
.cra_module
= THIS_MODULE
,
621 .min_keysize
= AES_MIN_KEY_SIZE
,
622 .max_keysize
= AES_MAX_KEY_SIZE
,
623 .ivsize
= AES_BLOCK_SIZE
,
624 .setkey
= ce_aes_setkey
,
625 .encrypt
= cbc_encrypt
,
626 .decrypt
= cbc_decrypt
,
628 .base
.cra_name
= "__cts(cbc(aes))",
629 .base
.cra_driver_name
= "__cts-cbc-aes-ce",
630 .base
.cra_priority
= 300,
631 .base
.cra_flags
= CRYPTO_ALG_INTERNAL
,
632 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
633 .base
.cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
634 .base
.cra_module
= THIS_MODULE
,
636 .min_keysize
= AES_MIN_KEY_SIZE
,
637 .max_keysize
= AES_MAX_KEY_SIZE
,
638 .ivsize
= AES_BLOCK_SIZE
,
639 .walksize
= 2 * AES_BLOCK_SIZE
,
640 .setkey
= ce_aes_setkey
,
641 .encrypt
= cts_cbc_encrypt
,
642 .decrypt
= cts_cbc_decrypt
,
644 .base
.cra_name
= "__ctr(aes)",
645 .base
.cra_driver_name
= "__ctr-aes-ce",
646 .base
.cra_priority
= 300,
647 .base
.cra_flags
= CRYPTO_ALG_INTERNAL
,
648 .base
.cra_blocksize
= 1,
649 .base
.cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
650 .base
.cra_module
= THIS_MODULE
,
652 .min_keysize
= AES_MIN_KEY_SIZE
,
653 .max_keysize
= AES_MAX_KEY_SIZE
,
654 .ivsize
= AES_BLOCK_SIZE
,
655 .chunksize
= AES_BLOCK_SIZE
,
656 .setkey
= ce_aes_setkey
,
657 .encrypt
= ctr_encrypt
,
658 .decrypt
= ctr_encrypt
,
660 .base
.cra_name
= "ctr(aes)",
661 .base
.cra_driver_name
= "ctr-aes-ce-sync",
662 .base
.cra_priority
= 300 - 1,
663 .base
.cra_blocksize
= 1,
664 .base
.cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
665 .base
.cra_module
= THIS_MODULE
,
667 .min_keysize
= AES_MIN_KEY_SIZE
,
668 .max_keysize
= AES_MAX_KEY_SIZE
,
669 .ivsize
= AES_BLOCK_SIZE
,
670 .chunksize
= AES_BLOCK_SIZE
,
671 .setkey
= ce_aes_setkey
,
672 .encrypt
= ctr_encrypt_sync
,
673 .decrypt
= ctr_encrypt_sync
,
675 .base
.cra_name
= "__xts(aes)",
676 .base
.cra_driver_name
= "__xts-aes-ce",
677 .base
.cra_priority
= 300,
678 .base
.cra_flags
= CRYPTO_ALG_INTERNAL
,
679 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
680 .base
.cra_ctxsize
= sizeof(struct crypto_aes_xts_ctx
),
681 .base
.cra_module
= THIS_MODULE
,
683 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
684 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
685 .ivsize
= AES_BLOCK_SIZE
,
686 .walksize
= 2 * AES_BLOCK_SIZE
,
687 .setkey
= xts_set_key
,
688 .encrypt
= xts_encrypt
,
689 .decrypt
= xts_decrypt
,
692 static struct simd_skcipher_alg
*aes_simd_algs
[ARRAY_SIZE(aes_algs
)];
694 static void aes_exit(void)
698 for (i
= 0; i
< ARRAY_SIZE(aes_simd_algs
) && aes_simd_algs
[i
]; i
++)
699 simd_skcipher_free(aes_simd_algs
[i
]);
701 crypto_unregister_skciphers(aes_algs
, ARRAY_SIZE(aes_algs
));
704 static int __init
aes_init(void)
706 struct simd_skcipher_alg
*simd
;
707 const char *basename
;
713 err
= crypto_register_skciphers(aes_algs
, ARRAY_SIZE(aes_algs
));
717 for (i
= 0; i
< ARRAY_SIZE(aes_algs
); i
++) {
718 if (!(aes_algs
[i
].base
.cra_flags
& CRYPTO_ALG_INTERNAL
))
721 algname
= aes_algs
[i
].base
.cra_name
+ 2;
722 drvname
= aes_algs
[i
].base
.cra_driver_name
+ 2;
723 basename
= aes_algs
[i
].base
.cra_driver_name
;
724 simd
= simd_skcipher_create_compat(algname
, drvname
, basename
);
727 goto unregister_simds
;
729 aes_simd_algs
[i
] = simd
;
739 module_cpu_feature_match(AES
, aes_init
);
740 module_exit(aes_exit
);