2 * linux/arch/arm/crypto/aesbs-glue.c - glue code for NEON bit sliced AES
4 * Copyright (C) 2013 Linaro Ltd <ard.biesheuvel@linaro.org>
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
12 #include <crypto/aes.h>
13 #include <crypto/ablk_helper.h>
14 #include <crypto/algapi.h>
15 #include <linux/module.h>
19 #define BIT_SLICED_KEY_MAXSIZE (128 * (AES_MAXNR - 1) + 2 * AES_BLOCK_SIZE)
24 u8
__aligned(8) bs
[BIT_SLICED_KEY_MAXSIZE
];
27 asmlinkage
void bsaes_enc_key_convert(u8 out
[], struct AES_KEY
const *in
);
28 asmlinkage
void bsaes_dec_key_convert(u8 out
[], struct AES_KEY
const *in
);
30 asmlinkage
void bsaes_cbc_encrypt(u8
const in
[], u8 out
[], u32 bytes
,
31 struct BS_KEY
*key
, u8 iv
[]);
33 asmlinkage
void bsaes_ctr32_encrypt_blocks(u8
const in
[], u8 out
[], u32 blocks
,
34 struct BS_KEY
*key
, u8
const iv
[]);
36 asmlinkage
void bsaes_xts_encrypt(u8
const in
[], u8 out
[], u32 bytes
,
37 struct BS_KEY
*key
, u8 tweak
[]);
39 asmlinkage
void bsaes_xts_decrypt(u8
const in
[], u8 out
[], u32 bytes
,
40 struct BS_KEY
*key
, u8 tweak
[]);
42 struct aesbs_cbc_ctx
{
47 struct aesbs_ctr_ctx
{
51 struct aesbs_xts_ctx
{
57 static int aesbs_cbc_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
60 struct aesbs_cbc_ctx
*ctx
= crypto_tfm_ctx(tfm
);
61 int bits
= key_len
* 8;
63 if (private_AES_set_encrypt_key(in_key
, bits
, &ctx
->enc
)) {
64 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
67 ctx
->dec
.rk
= ctx
->enc
;
68 private_AES_set_decrypt_key(in_key
, bits
, &ctx
->dec
.rk
);
69 ctx
->dec
.converted
= 0;
73 static int aesbs_ctr_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
76 struct aesbs_ctr_ctx
*ctx
= crypto_tfm_ctx(tfm
);
77 int bits
= key_len
* 8;
79 if (private_AES_set_encrypt_key(in_key
, bits
, &ctx
->enc
.rk
)) {
80 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
83 ctx
->enc
.converted
= 0;
87 static int aesbs_xts_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
90 struct aesbs_xts_ctx
*ctx
= crypto_tfm_ctx(tfm
);
91 int bits
= key_len
* 4;
93 if (private_AES_set_encrypt_key(in_key
, bits
, &ctx
->enc
.rk
)) {
94 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
97 ctx
->dec
.rk
= ctx
->enc
.rk
;
98 private_AES_set_decrypt_key(in_key
, bits
, &ctx
->dec
.rk
);
99 private_AES_set_encrypt_key(in_key
+ key_len
/ 2, bits
, &ctx
->twkey
);
100 ctx
->enc
.converted
= ctx
->dec
.converted
= 0;
104 static int aesbs_cbc_encrypt(struct blkcipher_desc
*desc
,
105 struct scatterlist
*dst
,
106 struct scatterlist
*src
, unsigned int nbytes
)
108 struct aesbs_cbc_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
109 struct blkcipher_walk walk
;
112 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
113 err
= blkcipher_walk_virt(desc
, &walk
);
115 while (walk
.nbytes
) {
116 u32 blocks
= walk
.nbytes
/ AES_BLOCK_SIZE
;
117 u8
*src
= walk
.src
.virt
.addr
;
119 if (walk
.dst
.virt
.addr
== walk
.src
.virt
.addr
) {
123 crypto_xor(src
, iv
, AES_BLOCK_SIZE
);
124 AES_encrypt(src
, src
, &ctx
->enc
);
126 src
+= AES_BLOCK_SIZE
;
128 memcpy(walk
.iv
, iv
, AES_BLOCK_SIZE
);
130 u8
*dst
= walk
.dst
.virt
.addr
;
133 crypto_xor(walk
.iv
, src
, AES_BLOCK_SIZE
);
134 AES_encrypt(walk
.iv
, dst
, &ctx
->enc
);
135 memcpy(walk
.iv
, dst
, AES_BLOCK_SIZE
);
136 src
+= AES_BLOCK_SIZE
;
137 dst
+= AES_BLOCK_SIZE
;
140 err
= blkcipher_walk_done(desc
, &walk
, 0);
145 static int aesbs_cbc_decrypt(struct blkcipher_desc
*desc
,
146 struct scatterlist
*dst
,
147 struct scatterlist
*src
, unsigned int nbytes
)
149 struct aesbs_cbc_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
150 struct blkcipher_walk walk
;
153 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
154 err
= blkcipher_walk_virt_block(desc
, &walk
, 8 * AES_BLOCK_SIZE
);
156 while ((walk
.nbytes
/ AES_BLOCK_SIZE
) >= 8) {
158 bsaes_cbc_encrypt(walk
.src
.virt
.addr
, walk
.dst
.virt
.addr
,
159 walk
.nbytes
, &ctx
->dec
, walk
.iv
);
161 err
= blkcipher_walk_done(desc
, &walk
, 0);
163 while (walk
.nbytes
) {
164 u32 blocks
= walk
.nbytes
/ AES_BLOCK_SIZE
;
165 u8
*dst
= walk
.dst
.virt
.addr
;
166 u8
*src
= walk
.src
.virt
.addr
;
167 u8 bk
[2][AES_BLOCK_SIZE
];
171 if (walk
.dst
.virt
.addr
== walk
.src
.virt
.addr
)
172 memcpy(bk
[blocks
& 1], src
, AES_BLOCK_SIZE
);
174 AES_decrypt(src
, dst
, &ctx
->dec
.rk
);
175 crypto_xor(dst
, iv
, AES_BLOCK_SIZE
);
177 if (walk
.dst
.virt
.addr
== walk
.src
.virt
.addr
)
182 dst
+= AES_BLOCK_SIZE
;
183 src
+= AES_BLOCK_SIZE
;
185 err
= blkcipher_walk_done(desc
, &walk
, 0);
190 static void inc_be128_ctr(__be32 ctr
[], u32 addend
)
194 for (i
= 3; i
>= 0; i
--, addend
= 1) {
195 u32 n
= be32_to_cpu(ctr
[i
]) + addend
;
197 ctr
[i
] = cpu_to_be32(n
);
203 static int aesbs_ctr_encrypt(struct blkcipher_desc
*desc
,
204 struct scatterlist
*dst
, struct scatterlist
*src
,
207 struct aesbs_ctr_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
208 struct blkcipher_walk walk
;
212 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
213 err
= blkcipher_walk_virt_block(desc
, &walk
, 8 * AES_BLOCK_SIZE
);
215 while ((blocks
= walk
.nbytes
/ AES_BLOCK_SIZE
)) {
216 u32 tail
= walk
.nbytes
% AES_BLOCK_SIZE
;
217 __be32
*ctr
= (__be32
*)walk
.iv
;
218 u32 headroom
= UINT_MAX
- be32_to_cpu(ctr
[3]);
220 /* avoid 32 bit counter overflow in the NEON code */
221 if (unlikely(headroom
< blocks
)) {
222 blocks
= headroom
+ 1;
223 tail
= walk
.nbytes
- blocks
* AES_BLOCK_SIZE
;
226 bsaes_ctr32_encrypt_blocks(walk
.src
.virt
.addr
,
227 walk
.dst
.virt
.addr
, blocks
,
230 inc_be128_ctr(ctr
, blocks
);
232 nbytes
-= blocks
* AES_BLOCK_SIZE
;
233 if (nbytes
&& nbytes
== tail
&& nbytes
<= AES_BLOCK_SIZE
)
236 err
= blkcipher_walk_done(desc
, &walk
, tail
);
239 u8
*tdst
= walk
.dst
.virt
.addr
+ blocks
* AES_BLOCK_SIZE
;
240 u8
*tsrc
= walk
.src
.virt
.addr
+ blocks
* AES_BLOCK_SIZE
;
241 u8 ks
[AES_BLOCK_SIZE
];
243 AES_encrypt(walk
.iv
, ks
, &ctx
->enc
.rk
);
245 memcpy(tdst
, tsrc
, nbytes
);
246 crypto_xor(tdst
, ks
, nbytes
);
247 err
= blkcipher_walk_done(desc
, &walk
, 0);
252 static int aesbs_xts_encrypt(struct blkcipher_desc
*desc
,
253 struct scatterlist
*dst
,
254 struct scatterlist
*src
, unsigned int nbytes
)
256 struct aesbs_xts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
257 struct blkcipher_walk walk
;
260 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
261 err
= blkcipher_walk_virt_block(desc
, &walk
, 8 * AES_BLOCK_SIZE
);
263 /* generate the initial tweak */
264 AES_encrypt(walk
.iv
, walk
.iv
, &ctx
->twkey
);
266 while (walk
.nbytes
) {
268 bsaes_xts_encrypt(walk
.src
.virt
.addr
, walk
.dst
.virt
.addr
,
269 walk
.nbytes
, &ctx
->enc
, walk
.iv
);
271 err
= blkcipher_walk_done(desc
, &walk
, 0);
276 static int aesbs_xts_decrypt(struct blkcipher_desc
*desc
,
277 struct scatterlist
*dst
,
278 struct scatterlist
*src
, unsigned int nbytes
)
280 struct aesbs_xts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
281 struct blkcipher_walk walk
;
284 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
285 err
= blkcipher_walk_virt_block(desc
, &walk
, 8 * AES_BLOCK_SIZE
);
287 /* generate the initial tweak */
288 AES_encrypt(walk
.iv
, walk
.iv
, &ctx
->twkey
);
290 while (walk
.nbytes
) {
292 bsaes_xts_decrypt(walk
.src
.virt
.addr
, walk
.dst
.virt
.addr
,
293 walk
.nbytes
, &ctx
->dec
, walk
.iv
);
295 err
= blkcipher_walk_done(desc
, &walk
, 0);
300 static struct crypto_alg aesbs_algs
[] = { {
301 .cra_name
= "__cbc-aes-neonbs",
302 .cra_driver_name
= "__driver-cbc-aes-neonbs",
304 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
305 .cra_blocksize
= AES_BLOCK_SIZE
,
306 .cra_ctxsize
= sizeof(struct aesbs_cbc_ctx
),
308 .cra_type
= &crypto_blkcipher_type
,
309 .cra_module
= THIS_MODULE
,
311 .min_keysize
= AES_MIN_KEY_SIZE
,
312 .max_keysize
= AES_MAX_KEY_SIZE
,
313 .ivsize
= AES_BLOCK_SIZE
,
314 .setkey
= aesbs_cbc_set_key
,
315 .encrypt
= aesbs_cbc_encrypt
,
316 .decrypt
= aesbs_cbc_decrypt
,
319 .cra_name
= "__ctr-aes-neonbs",
320 .cra_driver_name
= "__driver-ctr-aes-neonbs",
322 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
324 .cra_ctxsize
= sizeof(struct aesbs_ctr_ctx
),
326 .cra_type
= &crypto_blkcipher_type
,
327 .cra_module
= THIS_MODULE
,
329 .min_keysize
= AES_MIN_KEY_SIZE
,
330 .max_keysize
= AES_MAX_KEY_SIZE
,
331 .ivsize
= AES_BLOCK_SIZE
,
332 .setkey
= aesbs_ctr_set_key
,
333 .encrypt
= aesbs_ctr_encrypt
,
334 .decrypt
= aesbs_ctr_encrypt
,
337 .cra_name
= "__xts-aes-neonbs",
338 .cra_driver_name
= "__driver-xts-aes-neonbs",
340 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
341 .cra_blocksize
= AES_BLOCK_SIZE
,
342 .cra_ctxsize
= sizeof(struct aesbs_xts_ctx
),
344 .cra_type
= &crypto_blkcipher_type
,
345 .cra_module
= THIS_MODULE
,
347 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
348 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
349 .ivsize
= AES_BLOCK_SIZE
,
350 .setkey
= aesbs_xts_set_key
,
351 .encrypt
= aesbs_xts_encrypt
,
352 .decrypt
= aesbs_xts_decrypt
,
355 .cra_name
= "cbc(aes)",
356 .cra_driver_name
= "cbc-aes-neonbs",
358 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
359 .cra_blocksize
= AES_BLOCK_SIZE
,
360 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
362 .cra_type
= &crypto_ablkcipher_type
,
363 .cra_module
= THIS_MODULE
,
364 .cra_init
= ablk_init
,
365 .cra_exit
= ablk_exit
,
367 .min_keysize
= AES_MIN_KEY_SIZE
,
368 .max_keysize
= AES_MAX_KEY_SIZE
,
369 .ivsize
= AES_BLOCK_SIZE
,
370 .setkey
= ablk_set_key
,
371 .encrypt
= __ablk_encrypt
,
372 .decrypt
= ablk_decrypt
,
375 .cra_name
= "ctr(aes)",
376 .cra_driver_name
= "ctr-aes-neonbs",
378 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
380 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
382 .cra_type
= &crypto_ablkcipher_type
,
383 .cra_module
= THIS_MODULE
,
384 .cra_init
= ablk_init
,
385 .cra_exit
= ablk_exit
,
387 .min_keysize
= AES_MIN_KEY_SIZE
,
388 .max_keysize
= AES_MAX_KEY_SIZE
,
389 .ivsize
= AES_BLOCK_SIZE
,
390 .setkey
= ablk_set_key
,
391 .encrypt
= ablk_encrypt
,
392 .decrypt
= ablk_decrypt
,
395 .cra_name
= "xts(aes)",
396 .cra_driver_name
= "xts-aes-neonbs",
398 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
399 .cra_blocksize
= AES_BLOCK_SIZE
,
400 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
402 .cra_type
= &crypto_ablkcipher_type
,
403 .cra_module
= THIS_MODULE
,
404 .cra_init
= ablk_init
,
405 .cra_exit
= ablk_exit
,
407 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
408 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
409 .ivsize
= AES_BLOCK_SIZE
,
410 .setkey
= ablk_set_key
,
411 .encrypt
= ablk_encrypt
,
412 .decrypt
= ablk_decrypt
,
416 static int __init
aesbs_mod_init(void)
421 return crypto_register_algs(aesbs_algs
, ARRAY_SIZE(aesbs_algs
));
424 static void __exit
aesbs_mod_exit(void)
426 crypto_unregister_algs(aesbs_algs
, ARRAY_SIZE(aesbs_algs
));
429 module_init(aesbs_mod_init
);
430 module_exit(aesbs_mod_exit
);
432 MODULE_DESCRIPTION("Bit sliced AES in CBC/CTR/XTS modes using NEON");
433 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
434 MODULE_LICENSE("GPL");