1 // SPDX-License-Identifier: GPL-2.0-only
3 * linux/arch/arm64/crypto/aes-glue.c - wrapper code for ARMv8 AES
5 * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
11 #include <crypto/aes.h>
12 #include <crypto/ctr.h>
13 #include <crypto/sha2.h>
14 #include <crypto/internal/hash.h>
15 #include <crypto/internal/simd.h>
16 #include <crypto/internal/skcipher.h>
17 #include <crypto/scatterwalk.h>
18 #include <linux/module.h>
19 #include <linux/cpufeature.h>
20 #include <crypto/xts.h>
22 #include "aes-ce-setkey.h"
24 #ifdef USE_V8_CRYPTO_EXTENSIONS
27 #define aes_expandkey ce_aes_expandkey
28 #define aes_ecb_encrypt ce_aes_ecb_encrypt
29 #define aes_ecb_decrypt ce_aes_ecb_decrypt
30 #define aes_cbc_encrypt ce_aes_cbc_encrypt
31 #define aes_cbc_decrypt ce_aes_cbc_decrypt
32 #define aes_cbc_cts_encrypt ce_aes_cbc_cts_encrypt
33 #define aes_cbc_cts_decrypt ce_aes_cbc_cts_decrypt
34 #define aes_essiv_cbc_encrypt ce_aes_essiv_cbc_encrypt
35 #define aes_essiv_cbc_decrypt ce_aes_essiv_cbc_decrypt
36 #define aes_ctr_encrypt ce_aes_ctr_encrypt
37 #define aes_xts_encrypt ce_aes_xts_encrypt
38 #define aes_xts_decrypt ce_aes_xts_decrypt
39 #define aes_mac_update ce_aes_mac_update
40 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 Crypto Extensions");
44 #define aes_ecb_encrypt neon_aes_ecb_encrypt
45 #define aes_ecb_decrypt neon_aes_ecb_decrypt
46 #define aes_cbc_encrypt neon_aes_cbc_encrypt
47 #define aes_cbc_decrypt neon_aes_cbc_decrypt
48 #define aes_cbc_cts_encrypt neon_aes_cbc_cts_encrypt
49 #define aes_cbc_cts_decrypt neon_aes_cbc_cts_decrypt
50 #define aes_essiv_cbc_encrypt neon_aes_essiv_cbc_encrypt
51 #define aes_essiv_cbc_decrypt neon_aes_essiv_cbc_decrypt
52 #define aes_ctr_encrypt neon_aes_ctr_encrypt
53 #define aes_xts_encrypt neon_aes_xts_encrypt
54 #define aes_xts_decrypt neon_aes_xts_decrypt
55 #define aes_mac_update neon_aes_mac_update
56 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 NEON");
58 #if defined(USE_V8_CRYPTO_EXTENSIONS) || !defined(CONFIG_CRYPTO_AES_ARM64_BS)
59 MODULE_ALIAS_CRYPTO("ecb(aes)");
60 MODULE_ALIAS_CRYPTO("cbc(aes)");
61 MODULE_ALIAS_CRYPTO("ctr(aes)");
62 MODULE_ALIAS_CRYPTO("xts(aes)");
64 MODULE_ALIAS_CRYPTO("cts(cbc(aes))");
65 MODULE_ALIAS_CRYPTO("essiv(cbc(aes),sha256)");
66 MODULE_ALIAS_CRYPTO("cmac(aes)");
67 MODULE_ALIAS_CRYPTO("xcbc(aes)");
68 MODULE_ALIAS_CRYPTO("cbcmac(aes)");
70 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
71 MODULE_LICENSE("GPL v2");
73 /* defined in aes-modes.S */
74 asmlinkage
void aes_ecb_encrypt(u8 out
[], u8
const in
[], u32
const rk
[],
75 int rounds
, int blocks
);
76 asmlinkage
void aes_ecb_decrypt(u8 out
[], u8
const in
[], u32
const rk
[],
77 int rounds
, int blocks
);
79 asmlinkage
void aes_cbc_encrypt(u8 out
[], u8
const in
[], u32
const rk
[],
80 int rounds
, int blocks
, u8 iv
[]);
81 asmlinkage
void aes_cbc_decrypt(u8 out
[], u8
const in
[], u32
const rk
[],
82 int rounds
, int blocks
, u8 iv
[]);
84 asmlinkage
void aes_cbc_cts_encrypt(u8 out
[], u8
const in
[], u32
const rk
[],
85 int rounds
, int bytes
, u8
const iv
[]);
86 asmlinkage
void aes_cbc_cts_decrypt(u8 out
[], u8
const in
[], u32
const rk
[],
87 int rounds
, int bytes
, u8
const iv
[]);
89 asmlinkage
void aes_ctr_encrypt(u8 out
[], u8
const in
[], u32
const rk
[],
90 int rounds
, int blocks
, u8 ctr
[]);
92 asmlinkage
void aes_xts_encrypt(u8 out
[], u8
const in
[], u32
const rk1
[],
93 int rounds
, int bytes
, u32
const rk2
[], u8 iv
[],
95 asmlinkage
void aes_xts_decrypt(u8 out
[], u8
const in
[], u32
const rk1
[],
96 int rounds
, int bytes
, u32
const rk2
[], u8 iv
[],
99 asmlinkage
void aes_essiv_cbc_encrypt(u8 out
[], u8
const in
[], u32
const rk1
[],
100 int rounds
, int blocks
, u8 iv
[],
102 asmlinkage
void aes_essiv_cbc_decrypt(u8 out
[], u8
const in
[], u32
const rk1
[],
103 int rounds
, int blocks
, u8 iv
[],
106 asmlinkage
void aes_mac_update(u8
const in
[], u32
const rk
[], int rounds
,
107 int blocks
, u8 dg
[], int enc_before
,
110 struct crypto_aes_xts_ctx
{
111 struct crypto_aes_ctx key1
;
112 struct crypto_aes_ctx
__aligned(8) key2
;
115 struct crypto_aes_essiv_cbc_ctx
{
116 struct crypto_aes_ctx key1
;
117 struct crypto_aes_ctx
__aligned(8) key2
;
118 struct crypto_shash
*hash
;
122 struct crypto_aes_ctx key
;
123 u8
__aligned(8) consts
[];
126 struct mac_desc_ctx
{
128 u8 dg
[AES_BLOCK_SIZE
];
131 static int skcipher_aes_setkey(struct crypto_skcipher
*tfm
, const u8
*in_key
,
132 unsigned int key_len
)
134 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
136 return aes_expandkey(ctx
, in_key
, key_len
);
139 static int __maybe_unused
xts_set_key(struct crypto_skcipher
*tfm
,
140 const u8
*in_key
, unsigned int key_len
)
142 struct crypto_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
145 ret
= xts_verify_key(tfm
, in_key
, key_len
);
149 ret
= aes_expandkey(&ctx
->key1
, in_key
, key_len
/ 2);
151 ret
= aes_expandkey(&ctx
->key2
, &in_key
[key_len
/ 2],
156 static int __maybe_unused
essiv_cbc_set_key(struct crypto_skcipher
*tfm
,
158 unsigned int key_len
)
160 struct crypto_aes_essiv_cbc_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
161 u8 digest
[SHA256_DIGEST_SIZE
];
164 ret
= aes_expandkey(&ctx
->key1
, in_key
, key_len
);
168 crypto_shash_tfm_digest(ctx
->hash
, in_key
, key_len
, digest
);
170 return aes_expandkey(&ctx
->key2
, digest
, sizeof(digest
));
173 static int __maybe_unused
ecb_encrypt(struct skcipher_request
*req
)
175 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
176 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
177 int err
, rounds
= 6 + ctx
->key_length
/ 4;
178 struct skcipher_walk walk
;
181 err
= skcipher_walk_virt(&walk
, req
, false);
183 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
185 aes_ecb_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
186 ctx
->key_enc
, rounds
, blocks
);
188 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
193 static int __maybe_unused
ecb_decrypt(struct skcipher_request
*req
)
195 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
196 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
197 int err
, rounds
= 6 + ctx
->key_length
/ 4;
198 struct skcipher_walk walk
;
201 err
= skcipher_walk_virt(&walk
, req
, false);
203 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
205 aes_ecb_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
206 ctx
->key_dec
, rounds
, blocks
);
208 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
213 static int cbc_encrypt_walk(struct skcipher_request
*req
,
214 struct skcipher_walk
*walk
)
216 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
217 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
218 int err
= 0, rounds
= 6 + ctx
->key_length
/ 4;
221 while ((blocks
= (walk
->nbytes
/ AES_BLOCK_SIZE
))) {
223 aes_cbc_encrypt(walk
->dst
.virt
.addr
, walk
->src
.virt
.addr
,
224 ctx
->key_enc
, rounds
, blocks
, walk
->iv
);
226 err
= skcipher_walk_done(walk
, walk
->nbytes
% AES_BLOCK_SIZE
);
231 static int __maybe_unused
cbc_encrypt(struct skcipher_request
*req
)
233 struct skcipher_walk walk
;
236 err
= skcipher_walk_virt(&walk
, req
, false);
239 return cbc_encrypt_walk(req
, &walk
);
242 static int cbc_decrypt_walk(struct skcipher_request
*req
,
243 struct skcipher_walk
*walk
)
245 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
246 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
247 int err
= 0, rounds
= 6 + ctx
->key_length
/ 4;
250 while ((blocks
= (walk
->nbytes
/ AES_BLOCK_SIZE
))) {
252 aes_cbc_decrypt(walk
->dst
.virt
.addr
, walk
->src
.virt
.addr
,
253 ctx
->key_dec
, rounds
, blocks
, walk
->iv
);
255 err
= skcipher_walk_done(walk
, walk
->nbytes
% AES_BLOCK_SIZE
);
260 static int __maybe_unused
cbc_decrypt(struct skcipher_request
*req
)
262 struct skcipher_walk walk
;
265 err
= skcipher_walk_virt(&walk
, req
, false);
268 return cbc_decrypt_walk(req
, &walk
);
271 static int cts_cbc_encrypt(struct skcipher_request
*req
)
273 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
274 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
275 int err
, rounds
= 6 + ctx
->key_length
/ 4;
276 int cbc_blocks
= DIV_ROUND_UP(req
->cryptlen
, AES_BLOCK_SIZE
) - 2;
277 struct scatterlist
*src
= req
->src
, *dst
= req
->dst
;
278 struct scatterlist sg_src
[2], sg_dst
[2];
279 struct skcipher_request subreq
;
280 struct skcipher_walk walk
;
282 skcipher_request_set_tfm(&subreq
, tfm
);
283 skcipher_request_set_callback(&subreq
, skcipher_request_flags(req
),
286 if (req
->cryptlen
<= AES_BLOCK_SIZE
) {
287 if (req
->cryptlen
< AES_BLOCK_SIZE
)
292 if (cbc_blocks
> 0) {
293 skcipher_request_set_crypt(&subreq
, req
->src
, req
->dst
,
294 cbc_blocks
* AES_BLOCK_SIZE
,
297 err
= skcipher_walk_virt(&walk
, &subreq
, false) ?:
298 cbc_encrypt_walk(&subreq
, &walk
);
302 if (req
->cryptlen
== AES_BLOCK_SIZE
)
305 dst
= src
= scatterwalk_ffwd(sg_src
, req
->src
, subreq
.cryptlen
);
306 if (req
->dst
!= req
->src
)
307 dst
= scatterwalk_ffwd(sg_dst
, req
->dst
,
311 /* handle ciphertext stealing */
312 skcipher_request_set_crypt(&subreq
, src
, dst
,
313 req
->cryptlen
- cbc_blocks
* AES_BLOCK_SIZE
,
316 err
= skcipher_walk_virt(&walk
, &subreq
, false);
321 aes_cbc_cts_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
322 ctx
->key_enc
, rounds
, walk
.nbytes
, walk
.iv
);
325 return skcipher_walk_done(&walk
, 0);
328 static int cts_cbc_decrypt(struct skcipher_request
*req
)
330 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
331 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
332 int err
, rounds
= 6 + ctx
->key_length
/ 4;
333 int cbc_blocks
= DIV_ROUND_UP(req
->cryptlen
, AES_BLOCK_SIZE
) - 2;
334 struct scatterlist
*src
= req
->src
, *dst
= req
->dst
;
335 struct scatterlist sg_src
[2], sg_dst
[2];
336 struct skcipher_request subreq
;
337 struct skcipher_walk walk
;
339 skcipher_request_set_tfm(&subreq
, tfm
);
340 skcipher_request_set_callback(&subreq
, skcipher_request_flags(req
),
343 if (req
->cryptlen
<= AES_BLOCK_SIZE
) {
344 if (req
->cryptlen
< AES_BLOCK_SIZE
)
349 if (cbc_blocks
> 0) {
350 skcipher_request_set_crypt(&subreq
, req
->src
, req
->dst
,
351 cbc_blocks
* AES_BLOCK_SIZE
,
354 err
= skcipher_walk_virt(&walk
, &subreq
, false) ?:
355 cbc_decrypt_walk(&subreq
, &walk
);
359 if (req
->cryptlen
== AES_BLOCK_SIZE
)
362 dst
= src
= scatterwalk_ffwd(sg_src
, req
->src
, subreq
.cryptlen
);
363 if (req
->dst
!= req
->src
)
364 dst
= scatterwalk_ffwd(sg_dst
, req
->dst
,
368 /* handle ciphertext stealing */
369 skcipher_request_set_crypt(&subreq
, src
, dst
,
370 req
->cryptlen
- cbc_blocks
* AES_BLOCK_SIZE
,
373 err
= skcipher_walk_virt(&walk
, &subreq
, false);
378 aes_cbc_cts_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
379 ctx
->key_dec
, rounds
, walk
.nbytes
, walk
.iv
);
382 return skcipher_walk_done(&walk
, 0);
385 static int __maybe_unused
essiv_cbc_init_tfm(struct crypto_skcipher
*tfm
)
387 struct crypto_aes_essiv_cbc_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
389 ctx
->hash
= crypto_alloc_shash("sha256", 0, 0);
391 return PTR_ERR_OR_ZERO(ctx
->hash
);
394 static void __maybe_unused
essiv_cbc_exit_tfm(struct crypto_skcipher
*tfm
)
396 struct crypto_aes_essiv_cbc_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
398 crypto_free_shash(ctx
->hash
);
401 static int __maybe_unused
essiv_cbc_encrypt(struct skcipher_request
*req
)
403 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
404 struct crypto_aes_essiv_cbc_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
405 int err
, rounds
= 6 + ctx
->key1
.key_length
/ 4;
406 struct skcipher_walk walk
;
409 err
= skcipher_walk_virt(&walk
, req
, false);
411 blocks
= walk
.nbytes
/ AES_BLOCK_SIZE
;
414 aes_essiv_cbc_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
415 ctx
->key1
.key_enc
, rounds
, blocks
,
416 req
->iv
, ctx
->key2
.key_enc
);
418 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
420 return err
?: cbc_encrypt_walk(req
, &walk
);
423 static int __maybe_unused
essiv_cbc_decrypt(struct skcipher_request
*req
)
425 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
426 struct crypto_aes_essiv_cbc_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
427 int err
, rounds
= 6 + ctx
->key1
.key_length
/ 4;
428 struct skcipher_walk walk
;
431 err
= skcipher_walk_virt(&walk
, req
, false);
433 blocks
= walk
.nbytes
/ AES_BLOCK_SIZE
;
436 aes_essiv_cbc_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
437 ctx
->key1
.key_dec
, rounds
, blocks
,
438 req
->iv
, ctx
->key2
.key_enc
);
440 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
442 return err
?: cbc_decrypt_walk(req
, &walk
);
445 static int ctr_encrypt(struct skcipher_request
*req
)
447 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
448 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
449 int err
, rounds
= 6 + ctx
->key_length
/ 4;
450 struct skcipher_walk walk
;
453 err
= skcipher_walk_virt(&walk
, req
, false);
455 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
457 aes_ctr_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
458 ctx
->key_enc
, rounds
, blocks
, walk
.iv
);
460 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
463 u8
__aligned(8) tail
[AES_BLOCK_SIZE
];
464 unsigned int nbytes
= walk
.nbytes
;
465 u8
*tdst
= walk
.dst
.virt
.addr
;
466 u8
*tsrc
= walk
.src
.virt
.addr
;
469 * Tell aes_ctr_encrypt() to process a tail block.
474 aes_ctr_encrypt(tail
, NULL
, ctx
->key_enc
, rounds
,
477 crypto_xor_cpy(tdst
, tsrc
, tail
, nbytes
);
478 err
= skcipher_walk_done(&walk
, 0);
484 static void ctr_encrypt_one(struct crypto_skcipher
*tfm
, const u8
*src
, u8
*dst
)
486 const struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
490 * Temporarily disable interrupts to avoid races where
491 * cachelines are evicted when the CPU is interrupted
492 * to do something else.
494 local_irq_save(flags
);
495 aes_encrypt(ctx
, dst
, src
);
496 local_irq_restore(flags
);
499 static int __maybe_unused
ctr_encrypt_sync(struct skcipher_request
*req
)
501 if (!crypto_simd_usable())
502 return crypto_ctr_encrypt_walk(req
, ctr_encrypt_one
);
504 return ctr_encrypt(req
);
507 static int __maybe_unused
xts_encrypt(struct skcipher_request
*req
)
509 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
510 struct crypto_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
511 int err
, first
, rounds
= 6 + ctx
->key1
.key_length
/ 4;
512 int tail
= req
->cryptlen
% AES_BLOCK_SIZE
;
513 struct scatterlist sg_src
[2], sg_dst
[2];
514 struct skcipher_request subreq
;
515 struct scatterlist
*src
, *dst
;
516 struct skcipher_walk walk
;
518 if (req
->cryptlen
< AES_BLOCK_SIZE
)
521 err
= skcipher_walk_virt(&walk
, req
, false);
523 if (unlikely(tail
> 0 && walk
.nbytes
< walk
.total
)) {
524 int xts_blocks
= DIV_ROUND_UP(req
->cryptlen
,
527 skcipher_walk_abort(&walk
);
529 skcipher_request_set_tfm(&subreq
, tfm
);
530 skcipher_request_set_callback(&subreq
,
531 skcipher_request_flags(req
),
533 skcipher_request_set_crypt(&subreq
, req
->src
, req
->dst
,
534 xts_blocks
* AES_BLOCK_SIZE
,
537 err
= skcipher_walk_virt(&walk
, req
, false);
542 for (first
= 1; walk
.nbytes
>= AES_BLOCK_SIZE
; first
= 0) {
543 int nbytes
= walk
.nbytes
;
545 if (walk
.nbytes
< walk
.total
)
546 nbytes
&= ~(AES_BLOCK_SIZE
- 1);
549 aes_xts_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
550 ctx
->key1
.key_enc
, rounds
, nbytes
,
551 ctx
->key2
.key_enc
, walk
.iv
, first
);
553 err
= skcipher_walk_done(&walk
, walk
.nbytes
- nbytes
);
556 if (err
|| likely(!tail
))
559 dst
= src
= scatterwalk_ffwd(sg_src
, req
->src
, req
->cryptlen
);
560 if (req
->dst
!= req
->src
)
561 dst
= scatterwalk_ffwd(sg_dst
, req
->dst
, req
->cryptlen
);
563 skcipher_request_set_crypt(req
, src
, dst
, AES_BLOCK_SIZE
+ tail
,
566 err
= skcipher_walk_virt(&walk
, &subreq
, false);
571 aes_xts_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
572 ctx
->key1
.key_enc
, rounds
, walk
.nbytes
,
573 ctx
->key2
.key_enc
, walk
.iv
, first
);
576 return skcipher_walk_done(&walk
, 0);
579 static int __maybe_unused
xts_decrypt(struct skcipher_request
*req
)
581 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
582 struct crypto_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
583 int err
, first
, rounds
= 6 + ctx
->key1
.key_length
/ 4;
584 int tail
= req
->cryptlen
% AES_BLOCK_SIZE
;
585 struct scatterlist sg_src
[2], sg_dst
[2];
586 struct skcipher_request subreq
;
587 struct scatterlist
*src
, *dst
;
588 struct skcipher_walk walk
;
590 if (req
->cryptlen
< AES_BLOCK_SIZE
)
593 err
= skcipher_walk_virt(&walk
, req
, false);
595 if (unlikely(tail
> 0 && walk
.nbytes
< walk
.total
)) {
596 int xts_blocks
= DIV_ROUND_UP(req
->cryptlen
,
599 skcipher_walk_abort(&walk
);
601 skcipher_request_set_tfm(&subreq
, tfm
);
602 skcipher_request_set_callback(&subreq
,
603 skcipher_request_flags(req
),
605 skcipher_request_set_crypt(&subreq
, req
->src
, req
->dst
,
606 xts_blocks
* AES_BLOCK_SIZE
,
609 err
= skcipher_walk_virt(&walk
, req
, false);
614 for (first
= 1; walk
.nbytes
>= AES_BLOCK_SIZE
; first
= 0) {
615 int nbytes
= walk
.nbytes
;
617 if (walk
.nbytes
< walk
.total
)
618 nbytes
&= ~(AES_BLOCK_SIZE
- 1);
621 aes_xts_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
622 ctx
->key1
.key_dec
, rounds
, nbytes
,
623 ctx
->key2
.key_enc
, walk
.iv
, first
);
625 err
= skcipher_walk_done(&walk
, walk
.nbytes
- nbytes
);
628 if (err
|| likely(!tail
))
631 dst
= src
= scatterwalk_ffwd(sg_src
, req
->src
, req
->cryptlen
);
632 if (req
->dst
!= req
->src
)
633 dst
= scatterwalk_ffwd(sg_dst
, req
->dst
, req
->cryptlen
);
635 skcipher_request_set_crypt(req
, src
, dst
, AES_BLOCK_SIZE
+ tail
,
638 err
= skcipher_walk_virt(&walk
, &subreq
, false);
644 aes_xts_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
645 ctx
->key1
.key_dec
, rounds
, walk
.nbytes
,
646 ctx
->key2
.key_enc
, walk
.iv
, first
);
649 return skcipher_walk_done(&walk
, 0);
652 static struct skcipher_alg aes_algs
[] = { {
653 #if defined(USE_V8_CRYPTO_EXTENSIONS) || !defined(CONFIG_CRYPTO_AES_ARM64_BS)
655 .cra_name
= "__ecb(aes)",
656 .cra_driver_name
= "__ecb-aes-" MODE
,
657 .cra_priority
= PRIO
,
658 .cra_flags
= CRYPTO_ALG_INTERNAL
,
659 .cra_blocksize
= AES_BLOCK_SIZE
,
660 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
661 .cra_module
= THIS_MODULE
,
663 .min_keysize
= AES_MIN_KEY_SIZE
,
664 .max_keysize
= AES_MAX_KEY_SIZE
,
665 .setkey
= skcipher_aes_setkey
,
666 .encrypt
= ecb_encrypt
,
667 .decrypt
= ecb_decrypt
,
670 .cra_name
= "__cbc(aes)",
671 .cra_driver_name
= "__cbc-aes-" MODE
,
672 .cra_priority
= PRIO
,
673 .cra_flags
= CRYPTO_ALG_INTERNAL
,
674 .cra_blocksize
= AES_BLOCK_SIZE
,
675 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
676 .cra_module
= THIS_MODULE
,
678 .min_keysize
= AES_MIN_KEY_SIZE
,
679 .max_keysize
= AES_MAX_KEY_SIZE
,
680 .ivsize
= AES_BLOCK_SIZE
,
681 .setkey
= skcipher_aes_setkey
,
682 .encrypt
= cbc_encrypt
,
683 .decrypt
= cbc_decrypt
,
686 .cra_name
= "__ctr(aes)",
687 .cra_driver_name
= "__ctr-aes-" MODE
,
688 .cra_priority
= PRIO
,
689 .cra_flags
= CRYPTO_ALG_INTERNAL
,
691 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
692 .cra_module
= THIS_MODULE
,
694 .min_keysize
= AES_MIN_KEY_SIZE
,
695 .max_keysize
= AES_MAX_KEY_SIZE
,
696 .ivsize
= AES_BLOCK_SIZE
,
697 .chunksize
= AES_BLOCK_SIZE
,
698 .setkey
= skcipher_aes_setkey
,
699 .encrypt
= ctr_encrypt
,
700 .decrypt
= ctr_encrypt
,
703 .cra_name
= "ctr(aes)",
704 .cra_driver_name
= "ctr-aes-" MODE
,
705 .cra_priority
= PRIO
- 1,
707 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
708 .cra_module
= THIS_MODULE
,
710 .min_keysize
= AES_MIN_KEY_SIZE
,
711 .max_keysize
= AES_MAX_KEY_SIZE
,
712 .ivsize
= AES_BLOCK_SIZE
,
713 .chunksize
= AES_BLOCK_SIZE
,
714 .setkey
= skcipher_aes_setkey
,
715 .encrypt
= ctr_encrypt_sync
,
716 .decrypt
= ctr_encrypt_sync
,
719 .cra_name
= "__xts(aes)",
720 .cra_driver_name
= "__xts-aes-" MODE
,
721 .cra_priority
= PRIO
,
722 .cra_flags
= CRYPTO_ALG_INTERNAL
,
723 .cra_blocksize
= AES_BLOCK_SIZE
,
724 .cra_ctxsize
= sizeof(struct crypto_aes_xts_ctx
),
725 .cra_module
= THIS_MODULE
,
727 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
728 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
729 .ivsize
= AES_BLOCK_SIZE
,
730 .walksize
= 2 * AES_BLOCK_SIZE
,
731 .setkey
= xts_set_key
,
732 .encrypt
= xts_encrypt
,
733 .decrypt
= xts_decrypt
,
737 .cra_name
= "__cts(cbc(aes))",
738 .cra_driver_name
= "__cts-cbc-aes-" MODE
,
739 .cra_priority
= PRIO
,
740 .cra_flags
= CRYPTO_ALG_INTERNAL
,
741 .cra_blocksize
= AES_BLOCK_SIZE
,
742 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
743 .cra_module
= THIS_MODULE
,
745 .min_keysize
= AES_MIN_KEY_SIZE
,
746 .max_keysize
= AES_MAX_KEY_SIZE
,
747 .ivsize
= AES_BLOCK_SIZE
,
748 .walksize
= 2 * AES_BLOCK_SIZE
,
749 .setkey
= skcipher_aes_setkey
,
750 .encrypt
= cts_cbc_encrypt
,
751 .decrypt
= cts_cbc_decrypt
,
754 .cra_name
= "__essiv(cbc(aes),sha256)",
755 .cra_driver_name
= "__essiv-cbc-aes-sha256-" MODE
,
756 .cra_priority
= PRIO
+ 1,
757 .cra_flags
= CRYPTO_ALG_INTERNAL
,
758 .cra_blocksize
= AES_BLOCK_SIZE
,
759 .cra_ctxsize
= sizeof(struct crypto_aes_essiv_cbc_ctx
),
760 .cra_module
= THIS_MODULE
,
762 .min_keysize
= AES_MIN_KEY_SIZE
,
763 .max_keysize
= AES_MAX_KEY_SIZE
,
764 .ivsize
= AES_BLOCK_SIZE
,
765 .setkey
= essiv_cbc_set_key
,
766 .encrypt
= essiv_cbc_encrypt
,
767 .decrypt
= essiv_cbc_decrypt
,
768 .init
= essiv_cbc_init_tfm
,
769 .exit
= essiv_cbc_exit_tfm
,
772 static int cbcmac_setkey(struct crypto_shash
*tfm
, const u8
*in_key
,
773 unsigned int key_len
)
775 struct mac_tfm_ctx
*ctx
= crypto_shash_ctx(tfm
);
777 return aes_expandkey(&ctx
->key
, in_key
, key_len
);
780 static void cmac_gf128_mul_by_x(be128
*y
, const be128
*x
)
782 u64 a
= be64_to_cpu(x
->a
);
783 u64 b
= be64_to_cpu(x
->b
);
785 y
->a
= cpu_to_be64((a
<< 1) | (b
>> 63));
786 y
->b
= cpu_to_be64((b
<< 1) ^ ((a
>> 63) ? 0x87 : 0));
789 static int cmac_setkey(struct crypto_shash
*tfm
, const u8
*in_key
,
790 unsigned int key_len
)
792 struct mac_tfm_ctx
*ctx
= crypto_shash_ctx(tfm
);
793 be128
*consts
= (be128
*)ctx
->consts
;
794 int rounds
= 6 + key_len
/ 4;
797 err
= cbcmac_setkey(tfm
, in_key
, key_len
);
801 /* encrypt the zero vector */
803 aes_ecb_encrypt(ctx
->consts
, (u8
[AES_BLOCK_SIZE
]){}, ctx
->key
.key_enc
,
807 cmac_gf128_mul_by_x(consts
, consts
);
808 cmac_gf128_mul_by_x(consts
+ 1, consts
);
813 static int xcbc_setkey(struct crypto_shash
*tfm
, const u8
*in_key
,
814 unsigned int key_len
)
816 static u8
const ks
[3][AES_BLOCK_SIZE
] = {
817 { [0 ... AES_BLOCK_SIZE
- 1] = 0x1 },
818 { [0 ... AES_BLOCK_SIZE
- 1] = 0x2 },
819 { [0 ... AES_BLOCK_SIZE
- 1] = 0x3 },
822 struct mac_tfm_ctx
*ctx
= crypto_shash_ctx(tfm
);
823 int rounds
= 6 + key_len
/ 4;
824 u8 key
[AES_BLOCK_SIZE
];
827 err
= cbcmac_setkey(tfm
, in_key
, key_len
);
832 aes_ecb_encrypt(key
, ks
[0], ctx
->key
.key_enc
, rounds
, 1);
833 aes_ecb_encrypt(ctx
->consts
, ks
[1], ctx
->key
.key_enc
, rounds
, 2);
836 return cbcmac_setkey(tfm
, key
, sizeof(key
));
839 static int mac_init(struct shash_desc
*desc
)
841 struct mac_desc_ctx
*ctx
= shash_desc_ctx(desc
);
843 memset(ctx
->dg
, 0, AES_BLOCK_SIZE
);
849 static void mac_do_update(struct crypto_aes_ctx
*ctx
, u8
const in
[], int blocks
,
850 u8 dg
[], int enc_before
, int enc_after
)
852 int rounds
= 6 + ctx
->key_length
/ 4;
854 if (crypto_simd_usable()) {
856 aes_mac_update(in
, ctx
->key_enc
, rounds
, blocks
, dg
, enc_before
,
861 aes_encrypt(ctx
, dg
, dg
);
864 crypto_xor(dg
, in
, AES_BLOCK_SIZE
);
865 in
+= AES_BLOCK_SIZE
;
867 if (blocks
|| enc_after
)
868 aes_encrypt(ctx
, dg
, dg
);
873 static int mac_update(struct shash_desc
*desc
, const u8
*p
, unsigned int len
)
875 struct mac_tfm_ctx
*tctx
= crypto_shash_ctx(desc
->tfm
);
876 struct mac_desc_ctx
*ctx
= shash_desc_ctx(desc
);
881 if ((ctx
->len
% AES_BLOCK_SIZE
) == 0 &&
882 (ctx
->len
+ len
) > AES_BLOCK_SIZE
) {
884 int blocks
= len
/ AES_BLOCK_SIZE
;
886 len
%= AES_BLOCK_SIZE
;
888 mac_do_update(&tctx
->key
, p
, blocks
, ctx
->dg
,
889 (ctx
->len
!= 0), (len
!= 0));
891 p
+= blocks
* AES_BLOCK_SIZE
;
894 ctx
->len
= AES_BLOCK_SIZE
;
900 l
= min(len
, AES_BLOCK_SIZE
- ctx
->len
);
902 if (l
<= AES_BLOCK_SIZE
) {
903 crypto_xor(ctx
->dg
+ ctx
->len
, p
, l
);
913 static int cbcmac_final(struct shash_desc
*desc
, u8
*out
)
915 struct mac_tfm_ctx
*tctx
= crypto_shash_ctx(desc
->tfm
);
916 struct mac_desc_ctx
*ctx
= shash_desc_ctx(desc
);
918 mac_do_update(&tctx
->key
, NULL
, 0, ctx
->dg
, (ctx
->len
!= 0), 0);
920 memcpy(out
, ctx
->dg
, AES_BLOCK_SIZE
);
925 static int cmac_final(struct shash_desc
*desc
, u8
*out
)
927 struct mac_tfm_ctx
*tctx
= crypto_shash_ctx(desc
->tfm
);
928 struct mac_desc_ctx
*ctx
= shash_desc_ctx(desc
);
929 u8
*consts
= tctx
->consts
;
931 if (ctx
->len
!= AES_BLOCK_SIZE
) {
932 ctx
->dg
[ctx
->len
] ^= 0x80;
933 consts
+= AES_BLOCK_SIZE
;
936 mac_do_update(&tctx
->key
, consts
, 1, ctx
->dg
, 0, 1);
938 memcpy(out
, ctx
->dg
, AES_BLOCK_SIZE
);
943 static struct shash_alg mac_algs
[] = { {
944 .base
.cra_name
= "cmac(aes)",
945 .base
.cra_driver_name
= "cmac-aes-" MODE
,
946 .base
.cra_priority
= PRIO
,
947 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
948 .base
.cra_ctxsize
= sizeof(struct mac_tfm_ctx
) +
950 .base
.cra_module
= THIS_MODULE
,
952 .digestsize
= AES_BLOCK_SIZE
,
954 .update
= mac_update
,
956 .setkey
= cmac_setkey
,
957 .descsize
= sizeof(struct mac_desc_ctx
),
959 .base
.cra_name
= "xcbc(aes)",
960 .base
.cra_driver_name
= "xcbc-aes-" MODE
,
961 .base
.cra_priority
= PRIO
,
962 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
963 .base
.cra_ctxsize
= sizeof(struct mac_tfm_ctx
) +
965 .base
.cra_module
= THIS_MODULE
,
967 .digestsize
= AES_BLOCK_SIZE
,
969 .update
= mac_update
,
971 .setkey
= xcbc_setkey
,
972 .descsize
= sizeof(struct mac_desc_ctx
),
974 .base
.cra_name
= "cbcmac(aes)",
975 .base
.cra_driver_name
= "cbcmac-aes-" MODE
,
976 .base
.cra_priority
= PRIO
,
977 .base
.cra_blocksize
= 1,
978 .base
.cra_ctxsize
= sizeof(struct mac_tfm_ctx
),
979 .base
.cra_module
= THIS_MODULE
,
981 .digestsize
= AES_BLOCK_SIZE
,
983 .update
= mac_update
,
984 .final
= cbcmac_final
,
985 .setkey
= cbcmac_setkey
,
986 .descsize
= sizeof(struct mac_desc_ctx
),
989 static struct simd_skcipher_alg
*aes_simd_algs
[ARRAY_SIZE(aes_algs
)];
991 static void aes_exit(void)
995 for (i
= 0; i
< ARRAY_SIZE(aes_simd_algs
); i
++)
996 if (aes_simd_algs
[i
])
997 simd_skcipher_free(aes_simd_algs
[i
]);
999 crypto_unregister_shashes(mac_algs
, ARRAY_SIZE(mac_algs
));
1000 crypto_unregister_skciphers(aes_algs
, ARRAY_SIZE(aes_algs
));
1003 static int __init
aes_init(void)
1005 struct simd_skcipher_alg
*simd
;
1006 const char *basename
;
1007 const char *algname
;
1008 const char *drvname
;
1012 err
= crypto_register_skciphers(aes_algs
, ARRAY_SIZE(aes_algs
));
1016 err
= crypto_register_shashes(mac_algs
, ARRAY_SIZE(mac_algs
));
1018 goto unregister_ciphers
;
1020 for (i
= 0; i
< ARRAY_SIZE(aes_algs
); i
++) {
1021 if (!(aes_algs
[i
].base
.cra_flags
& CRYPTO_ALG_INTERNAL
))
1024 algname
= aes_algs
[i
].base
.cra_name
+ 2;
1025 drvname
= aes_algs
[i
].base
.cra_driver_name
+ 2;
1026 basename
= aes_algs
[i
].base
.cra_driver_name
;
1027 simd
= simd_skcipher_create_compat(algname
, drvname
, basename
);
1028 err
= PTR_ERR(simd
);
1030 goto unregister_simds
;
1032 aes_simd_algs
[i
] = simd
;
1041 crypto_unregister_skciphers(aes_algs
, ARRAY_SIZE(aes_algs
));
1045 #ifdef USE_V8_CRYPTO_EXTENSIONS
1046 module_cpu_feature_match(AES
, aes_init
);
1048 module_init(aes_init
);
1049 EXPORT_SYMBOL(neon_aes_ecb_encrypt
);
1050 EXPORT_SYMBOL(neon_aes_cbc_encrypt
);
1051 EXPORT_SYMBOL(neon_aes_xts_encrypt
);
1052 EXPORT_SYMBOL(neon_aes_xts_decrypt
);
1054 module_exit(aes_exit
);