2 * Fallback for sync aes(ctr) in contexts where kernel mode NEON
5 * Copyright (C) 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License version 2 as
9 * published by the Free Software Foundation.
12 #include <crypto/aes.h>
13 #include <crypto/internal/skcipher.h>
15 asmlinkage
void __aes_arm64_encrypt(u32
*rk
, u8
*out
, const u8
*in
, int rounds
);
17 static inline int aes_ctr_encrypt_fallback(struct crypto_aes_ctx
*ctx
,
18 struct skcipher_request
*req
)
20 struct skcipher_walk walk
;
21 u8 buf
[AES_BLOCK_SIZE
];
24 err
= skcipher_walk_virt(&walk
, req
, true);
26 while (walk
.nbytes
> 0) {
27 u8
*dst
= walk
.dst
.virt
.addr
;
28 u8
*src
= walk
.src
.virt
.addr
;
29 int nbytes
= walk
.nbytes
;
32 if (nbytes
< walk
.total
) {
33 nbytes
= round_down(nbytes
, AES_BLOCK_SIZE
);
34 tail
= walk
.nbytes
% AES_BLOCK_SIZE
;
38 int bsize
= min(nbytes
, AES_BLOCK_SIZE
);
40 __aes_arm64_encrypt(ctx
->key_enc
, buf
, walk
.iv
,
41 6 + ctx
->key_length
/ 4);
42 crypto_xor_cpy(dst
, src
, buf
, bsize
);
43 crypto_inc(walk
.iv
, AES_BLOCK_SIZE
);
45 dst
+= AES_BLOCK_SIZE
;
46 src
+= AES_BLOCK_SIZE
;
47 nbytes
-= AES_BLOCK_SIZE
;
50 err
= skcipher_walk_done(&walk
, tail
);