2 * linux/arch/arm/crypto/aesbs-glue.c - glue code for NEON bit sliced AES
4 * Copyright (C) 2013 Linaro Ltd <ard.biesheuvel@linaro.org>
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
12 #include <crypto/aes.h>
13 #include <crypto/ablk_helper.h>
14 #include <crypto/algapi.h>
15 #include <linux/module.h>
19 #define BIT_SLICED_KEY_MAXSIZE (128 * (AES_MAXNR - 1) + 2 * AES_BLOCK_SIZE)
24 u8
__aligned(8) bs
[BIT_SLICED_KEY_MAXSIZE
];
27 asmlinkage
void bsaes_enc_key_convert(u8 out
[], struct AES_KEY
const *in
);
28 asmlinkage
void bsaes_dec_key_convert(u8 out
[], struct AES_KEY
const *in
);
30 asmlinkage
void bsaes_cbc_encrypt(u8
const in
[], u8 out
[], u32 bytes
,
31 struct BS_KEY
*key
, u8 iv
[]);
33 asmlinkage
void bsaes_ctr32_encrypt_blocks(u8
const in
[], u8 out
[], u32 blocks
,
34 struct BS_KEY
*key
, u8
const iv
[]);
36 asmlinkage
void bsaes_xts_encrypt(u8
const in
[], u8 out
[], u32 bytes
,
37 struct BS_KEY
*key
, u8 tweak
[]);
39 asmlinkage
void bsaes_xts_decrypt(u8
const in
[], u8 out
[], u32 bytes
,
40 struct BS_KEY
*key
, u8 tweak
[]);
42 struct aesbs_cbc_ctx
{
47 struct aesbs_ctr_ctx
{
51 struct aesbs_xts_ctx
{
57 static int aesbs_cbc_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
60 struct aesbs_cbc_ctx
*ctx
= crypto_tfm_ctx(tfm
);
61 int bits
= key_len
* 8;
63 if (private_AES_set_encrypt_key(in_key
, bits
, &ctx
->enc
)) {
64 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
67 ctx
->dec
.rk
= ctx
->enc
;
68 private_AES_set_decrypt_key(in_key
, bits
, &ctx
->dec
.rk
);
69 ctx
->dec
.converted
= 0;
73 static int aesbs_ctr_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
76 struct aesbs_ctr_ctx
*ctx
= crypto_tfm_ctx(tfm
);
77 int bits
= key_len
* 8;
79 if (private_AES_set_encrypt_key(in_key
, bits
, &ctx
->enc
.rk
)) {
80 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
83 ctx
->enc
.converted
= 0;
87 static int aesbs_xts_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
90 struct aesbs_xts_ctx
*ctx
= crypto_tfm_ctx(tfm
);
91 int bits
= key_len
* 4;
93 if (private_AES_set_encrypt_key(in_key
, bits
, &ctx
->enc
.rk
)) {
94 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
97 ctx
->dec
.rk
= ctx
->enc
.rk
;
98 private_AES_set_decrypt_key(in_key
, bits
, &ctx
->dec
.rk
);
99 private_AES_set_encrypt_key(in_key
+ key_len
/ 2, bits
, &ctx
->twkey
);
100 ctx
->enc
.converted
= ctx
->dec
.converted
= 0;
104 static int aesbs_cbc_encrypt(struct blkcipher_desc
*desc
,
105 struct scatterlist
*dst
,
106 struct scatterlist
*src
, unsigned int nbytes
)
108 struct aesbs_cbc_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
109 struct blkcipher_walk walk
;
112 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
113 err
= blkcipher_walk_virt(desc
, &walk
);
115 while (walk
.nbytes
) {
116 u32 blocks
= walk
.nbytes
/ AES_BLOCK_SIZE
;
117 u8
*src
= walk
.src
.virt
.addr
;
119 if (walk
.dst
.virt
.addr
== walk
.src
.virt
.addr
) {
123 crypto_xor(src
, iv
, AES_BLOCK_SIZE
);
124 AES_encrypt(src
, src
, &ctx
->enc
);
126 src
+= AES_BLOCK_SIZE
;
128 memcpy(walk
.iv
, iv
, AES_BLOCK_SIZE
);
130 u8
*dst
= walk
.dst
.virt
.addr
;
133 crypto_xor(walk
.iv
, src
, AES_BLOCK_SIZE
);
134 AES_encrypt(walk
.iv
, dst
, &ctx
->enc
);
135 memcpy(walk
.iv
, dst
, AES_BLOCK_SIZE
);
136 src
+= AES_BLOCK_SIZE
;
137 dst
+= AES_BLOCK_SIZE
;
140 err
= blkcipher_walk_done(desc
, &walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
145 static int aesbs_cbc_decrypt(struct blkcipher_desc
*desc
,
146 struct scatterlist
*dst
,
147 struct scatterlist
*src
, unsigned int nbytes
)
149 struct aesbs_cbc_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
150 struct blkcipher_walk walk
;
153 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
154 err
= blkcipher_walk_virt_block(desc
, &walk
, 8 * AES_BLOCK_SIZE
);
156 while ((walk
.nbytes
/ AES_BLOCK_SIZE
) >= 8) {
158 bsaes_cbc_encrypt(walk
.src
.virt
.addr
, walk
.dst
.virt
.addr
,
159 walk
.nbytes
, &ctx
->dec
, walk
.iv
);
161 err
= blkcipher_walk_done(desc
, &walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
163 while (walk
.nbytes
) {
164 u32 blocks
= walk
.nbytes
/ AES_BLOCK_SIZE
;
165 u8
*dst
= walk
.dst
.virt
.addr
;
166 u8
*src
= walk
.src
.virt
.addr
;
167 u8 bk
[2][AES_BLOCK_SIZE
];
171 if (walk
.dst
.virt
.addr
== walk
.src
.virt
.addr
)
172 memcpy(bk
[blocks
& 1], src
, AES_BLOCK_SIZE
);
174 AES_decrypt(src
, dst
, &ctx
->dec
.rk
);
175 crypto_xor(dst
, iv
, AES_BLOCK_SIZE
);
177 if (walk
.dst
.virt
.addr
== walk
.src
.virt
.addr
)
182 dst
+= AES_BLOCK_SIZE
;
183 src
+= AES_BLOCK_SIZE
;
185 err
= blkcipher_walk_done(desc
, &walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
190 static void inc_be128_ctr(__be32 ctr
[], u32 addend
)
194 for (i
= 3; i
>= 0; i
--, addend
= 1) {
195 u32 n
= be32_to_cpu(ctr
[i
]) + addend
;
197 ctr
[i
] = cpu_to_be32(n
);
203 static int aesbs_ctr_encrypt(struct blkcipher_desc
*desc
,
204 struct scatterlist
*dst
, struct scatterlist
*src
,
207 struct aesbs_ctr_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
208 struct blkcipher_walk walk
;
212 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
213 err
= blkcipher_walk_virt_block(desc
, &walk
, 8 * AES_BLOCK_SIZE
);
215 while ((blocks
= walk
.nbytes
/ AES_BLOCK_SIZE
)) {
216 u32 tail
= walk
.nbytes
% AES_BLOCK_SIZE
;
217 __be32
*ctr
= (__be32
*)walk
.iv
;
218 u32 headroom
= UINT_MAX
- be32_to_cpu(ctr
[3]);
220 /* avoid 32 bit counter overflow in the NEON code */
221 if (unlikely(headroom
< blocks
)) {
222 blocks
= headroom
+ 1;
223 tail
= walk
.nbytes
- blocks
* AES_BLOCK_SIZE
;
226 bsaes_ctr32_encrypt_blocks(walk
.src
.virt
.addr
,
227 walk
.dst
.virt
.addr
, blocks
,
230 inc_be128_ctr(ctr
, blocks
);
232 nbytes
-= blocks
* AES_BLOCK_SIZE
;
233 if (nbytes
&& nbytes
== tail
&& nbytes
<= AES_BLOCK_SIZE
)
236 err
= blkcipher_walk_done(desc
, &walk
, tail
);
239 u8
*tdst
= walk
.dst
.virt
.addr
+ blocks
* AES_BLOCK_SIZE
;
240 u8
*tsrc
= walk
.src
.virt
.addr
+ blocks
* AES_BLOCK_SIZE
;
241 u8 ks
[AES_BLOCK_SIZE
];
243 AES_encrypt(walk
.iv
, ks
, &ctx
->enc
.rk
);
245 memcpy(tdst
, tsrc
, nbytes
);
246 crypto_xor(tdst
, ks
, nbytes
);
247 err
= blkcipher_walk_done(desc
, &walk
, 0);
252 static int aesbs_xts_encrypt(struct blkcipher_desc
*desc
,
253 struct scatterlist
*dst
,
254 struct scatterlist
*src
, unsigned int nbytes
)
256 struct aesbs_xts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
257 struct blkcipher_walk walk
;
260 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
261 err
= blkcipher_walk_virt_block(desc
, &walk
, 8 * AES_BLOCK_SIZE
);
263 /* generate the initial tweak */
264 AES_encrypt(walk
.iv
, walk
.iv
, &ctx
->twkey
);
266 while (walk
.nbytes
) {
268 bsaes_xts_encrypt(walk
.src
.virt
.addr
, walk
.dst
.virt
.addr
,
269 walk
.nbytes
, &ctx
->enc
, walk
.iv
);
271 err
= blkcipher_walk_done(desc
, &walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
276 static int aesbs_xts_decrypt(struct blkcipher_desc
*desc
,
277 struct scatterlist
*dst
,
278 struct scatterlist
*src
, unsigned int nbytes
)
280 struct aesbs_xts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
281 struct blkcipher_walk walk
;
284 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
285 err
= blkcipher_walk_virt_block(desc
, &walk
, 8 * AES_BLOCK_SIZE
);
287 /* generate the initial tweak */
288 AES_encrypt(walk
.iv
, walk
.iv
, &ctx
->twkey
);
290 while (walk
.nbytes
) {
292 bsaes_xts_decrypt(walk
.src
.virt
.addr
, walk
.dst
.virt
.addr
,
293 walk
.nbytes
, &ctx
->dec
, walk
.iv
);
295 err
= blkcipher_walk_done(desc
, &walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
300 static struct crypto_alg aesbs_algs
[] = { {
301 .cra_name
= "__cbc-aes-neonbs",
302 .cra_driver_name
= "__driver-cbc-aes-neonbs",
304 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
306 .cra_blocksize
= AES_BLOCK_SIZE
,
307 .cra_ctxsize
= sizeof(struct aesbs_cbc_ctx
),
309 .cra_type
= &crypto_blkcipher_type
,
310 .cra_module
= THIS_MODULE
,
312 .min_keysize
= AES_MIN_KEY_SIZE
,
313 .max_keysize
= AES_MAX_KEY_SIZE
,
314 .ivsize
= AES_BLOCK_SIZE
,
315 .setkey
= aesbs_cbc_set_key
,
316 .encrypt
= aesbs_cbc_encrypt
,
317 .decrypt
= aesbs_cbc_decrypt
,
320 .cra_name
= "__ctr-aes-neonbs",
321 .cra_driver_name
= "__driver-ctr-aes-neonbs",
323 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
326 .cra_ctxsize
= sizeof(struct aesbs_ctr_ctx
),
328 .cra_type
= &crypto_blkcipher_type
,
329 .cra_module
= THIS_MODULE
,
331 .min_keysize
= AES_MIN_KEY_SIZE
,
332 .max_keysize
= AES_MAX_KEY_SIZE
,
333 .ivsize
= AES_BLOCK_SIZE
,
334 .setkey
= aesbs_ctr_set_key
,
335 .encrypt
= aesbs_ctr_encrypt
,
336 .decrypt
= aesbs_ctr_encrypt
,
339 .cra_name
= "__xts-aes-neonbs",
340 .cra_driver_name
= "__driver-xts-aes-neonbs",
342 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
344 .cra_blocksize
= AES_BLOCK_SIZE
,
345 .cra_ctxsize
= sizeof(struct aesbs_xts_ctx
),
347 .cra_type
= &crypto_blkcipher_type
,
348 .cra_module
= THIS_MODULE
,
350 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
351 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
352 .ivsize
= AES_BLOCK_SIZE
,
353 .setkey
= aesbs_xts_set_key
,
354 .encrypt
= aesbs_xts_encrypt
,
355 .decrypt
= aesbs_xts_decrypt
,
358 .cra_name
= "cbc(aes)",
359 .cra_driver_name
= "cbc-aes-neonbs",
361 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
362 .cra_blocksize
= AES_BLOCK_SIZE
,
363 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
365 .cra_type
= &crypto_ablkcipher_type
,
366 .cra_module
= THIS_MODULE
,
367 .cra_init
= ablk_init
,
368 .cra_exit
= ablk_exit
,
370 .min_keysize
= AES_MIN_KEY_SIZE
,
371 .max_keysize
= AES_MAX_KEY_SIZE
,
372 .ivsize
= AES_BLOCK_SIZE
,
373 .setkey
= ablk_set_key
,
374 .encrypt
= __ablk_encrypt
,
375 .decrypt
= ablk_decrypt
,
378 .cra_name
= "ctr(aes)",
379 .cra_driver_name
= "ctr-aes-neonbs",
381 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
383 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
385 .cra_type
= &crypto_ablkcipher_type
,
386 .cra_module
= THIS_MODULE
,
387 .cra_init
= ablk_init
,
388 .cra_exit
= ablk_exit
,
390 .min_keysize
= AES_MIN_KEY_SIZE
,
391 .max_keysize
= AES_MAX_KEY_SIZE
,
392 .ivsize
= AES_BLOCK_SIZE
,
393 .setkey
= ablk_set_key
,
394 .encrypt
= ablk_encrypt
,
395 .decrypt
= ablk_decrypt
,
398 .cra_name
= "xts(aes)",
399 .cra_driver_name
= "xts-aes-neonbs",
401 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
402 .cra_blocksize
= AES_BLOCK_SIZE
,
403 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
405 .cra_type
= &crypto_ablkcipher_type
,
406 .cra_module
= THIS_MODULE
,
407 .cra_init
= ablk_init
,
408 .cra_exit
= ablk_exit
,
410 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
411 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
412 .ivsize
= AES_BLOCK_SIZE
,
413 .setkey
= ablk_set_key
,
414 .encrypt
= ablk_encrypt
,
415 .decrypt
= ablk_decrypt
,
419 static int __init
aesbs_mod_init(void)
424 return crypto_register_algs(aesbs_algs
, ARRAY_SIZE(aesbs_algs
));
427 static void __exit
aesbs_mod_exit(void)
429 crypto_unregister_algs(aesbs_algs
, ARRAY_SIZE(aesbs_algs
));
432 module_init(aesbs_mod_init
);
433 module_exit(aesbs_mod_exit
);
435 MODULE_DESCRIPTION("Bit sliced AES in CBC/CTR/XTS modes using NEON");
436 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
437 MODULE_LICENSE("GPL");