1 /* SPDX-License-Identifier: GPL-2.0-only */
3 * Fallback for sync aes(ctr) in contexts where kernel mode NEON
6 * Copyright (C) 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
9 #include <crypto/aes.h>
10 #include <crypto/internal/skcipher.h>
12 asmlinkage
void __aes_arm64_encrypt(u32
*rk
, u8
*out
, const u8
*in
, int rounds
);
14 static inline int aes_ctr_encrypt_fallback(struct crypto_aes_ctx
*ctx
,
15 struct skcipher_request
*req
)
17 struct skcipher_walk walk
;
18 u8 buf
[AES_BLOCK_SIZE
];
21 err
= skcipher_walk_virt(&walk
, req
, true);
23 while (walk
.nbytes
> 0) {
24 u8
*dst
= walk
.dst
.virt
.addr
;
25 u8
*src
= walk
.src
.virt
.addr
;
26 int nbytes
= walk
.nbytes
;
29 if (nbytes
< walk
.total
) {
30 nbytes
= round_down(nbytes
, AES_BLOCK_SIZE
);
31 tail
= walk
.nbytes
% AES_BLOCK_SIZE
;
35 int bsize
= min(nbytes
, AES_BLOCK_SIZE
);
37 __aes_arm64_encrypt(ctx
->key_enc
, buf
, walk
.iv
,
38 6 + ctx
->key_length
/ 4);
39 crypto_xor_cpy(dst
, src
, buf
, bsize
);
40 crypto_inc(walk
.iv
, AES_BLOCK_SIZE
);
42 dst
+= AES_BLOCK_SIZE
;
43 src
+= AES_BLOCK_SIZE
;
44 nbytes
-= AES_BLOCK_SIZE
;
47 err
= skcipher_walk_done(&walk
, tail
);