2 * linux/arch/arm/crypto/aesbs-glue.c - glue code for NEON bit sliced AES
4 * Copyright (C) 2013 Linaro Ltd <ard.biesheuvel@linaro.org>
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
12 #include <crypto/aes.h>
13 #include <crypto/ablk_helper.h>
14 #include <crypto/algapi.h>
15 #include <linux/module.h>
16 #include <crypto/xts.h>
20 #define BIT_SLICED_KEY_MAXSIZE (128 * (AES_MAXNR - 1) + 2 * AES_BLOCK_SIZE)
25 u8
__aligned(8) bs
[BIT_SLICED_KEY_MAXSIZE
];
28 asmlinkage
void bsaes_enc_key_convert(u8 out
[], struct AES_KEY
const *in
);
29 asmlinkage
void bsaes_dec_key_convert(u8 out
[], struct AES_KEY
const *in
);
31 asmlinkage
void bsaes_cbc_encrypt(u8
const in
[], u8 out
[], u32 bytes
,
32 struct BS_KEY
*key
, u8 iv
[]);
34 asmlinkage
void bsaes_ctr32_encrypt_blocks(u8
const in
[], u8 out
[], u32 blocks
,
35 struct BS_KEY
*key
, u8
const iv
[]);
37 asmlinkage
void bsaes_xts_encrypt(u8
const in
[], u8 out
[], u32 bytes
,
38 struct BS_KEY
*key
, u8 tweak
[]);
40 asmlinkage
void bsaes_xts_decrypt(u8
const in
[], u8 out
[], u32 bytes
,
41 struct BS_KEY
*key
, u8 tweak
[]);
43 struct aesbs_cbc_ctx
{
48 struct aesbs_ctr_ctx
{
52 struct aesbs_xts_ctx
{
58 static int aesbs_cbc_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
61 struct aesbs_cbc_ctx
*ctx
= crypto_tfm_ctx(tfm
);
62 int bits
= key_len
* 8;
64 if (private_AES_set_encrypt_key(in_key
, bits
, &ctx
->enc
)) {
65 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
68 ctx
->dec
.rk
= ctx
->enc
;
69 private_AES_set_decrypt_key(in_key
, bits
, &ctx
->dec
.rk
);
70 ctx
->dec
.converted
= 0;
74 static int aesbs_ctr_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
77 struct aesbs_ctr_ctx
*ctx
= crypto_tfm_ctx(tfm
);
78 int bits
= key_len
* 8;
80 if (private_AES_set_encrypt_key(in_key
, bits
, &ctx
->enc
.rk
)) {
81 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
84 ctx
->enc
.converted
= 0;
88 static int aesbs_xts_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
91 struct aesbs_xts_ctx
*ctx
= crypto_tfm_ctx(tfm
);
92 int bits
= key_len
* 4;
95 err
= xts_check_key(tfm
, in_key
, key_len
);
99 if (private_AES_set_encrypt_key(in_key
, bits
, &ctx
->enc
.rk
)) {
100 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
103 ctx
->dec
.rk
= ctx
->enc
.rk
;
104 private_AES_set_decrypt_key(in_key
, bits
, &ctx
->dec
.rk
);
105 private_AES_set_encrypt_key(in_key
+ key_len
/ 2, bits
, &ctx
->twkey
);
106 ctx
->enc
.converted
= ctx
->dec
.converted
= 0;
110 static int aesbs_cbc_encrypt(struct blkcipher_desc
*desc
,
111 struct scatterlist
*dst
,
112 struct scatterlist
*src
, unsigned int nbytes
)
114 struct aesbs_cbc_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
115 struct blkcipher_walk walk
;
118 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
119 err
= blkcipher_walk_virt(desc
, &walk
);
121 while (walk
.nbytes
) {
122 u32 blocks
= walk
.nbytes
/ AES_BLOCK_SIZE
;
123 u8
*src
= walk
.src
.virt
.addr
;
125 if (walk
.dst
.virt
.addr
== walk
.src
.virt
.addr
) {
129 crypto_xor(src
, iv
, AES_BLOCK_SIZE
);
130 AES_encrypt(src
, src
, &ctx
->enc
);
132 src
+= AES_BLOCK_SIZE
;
134 memcpy(walk
.iv
, iv
, AES_BLOCK_SIZE
);
136 u8
*dst
= walk
.dst
.virt
.addr
;
139 crypto_xor(walk
.iv
, src
, AES_BLOCK_SIZE
);
140 AES_encrypt(walk
.iv
, dst
, &ctx
->enc
);
141 memcpy(walk
.iv
, dst
, AES_BLOCK_SIZE
);
142 src
+= AES_BLOCK_SIZE
;
143 dst
+= AES_BLOCK_SIZE
;
146 err
= blkcipher_walk_done(desc
, &walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
151 static int aesbs_cbc_decrypt(struct blkcipher_desc
*desc
,
152 struct scatterlist
*dst
,
153 struct scatterlist
*src
, unsigned int nbytes
)
155 struct aesbs_cbc_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
156 struct blkcipher_walk walk
;
159 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
160 err
= blkcipher_walk_virt_block(desc
, &walk
, 8 * AES_BLOCK_SIZE
);
162 while ((walk
.nbytes
/ AES_BLOCK_SIZE
) >= 8) {
164 bsaes_cbc_encrypt(walk
.src
.virt
.addr
, walk
.dst
.virt
.addr
,
165 walk
.nbytes
, &ctx
->dec
, walk
.iv
);
167 err
= blkcipher_walk_done(desc
, &walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
169 while (walk
.nbytes
) {
170 u32 blocks
= walk
.nbytes
/ AES_BLOCK_SIZE
;
171 u8
*dst
= walk
.dst
.virt
.addr
;
172 u8
*src
= walk
.src
.virt
.addr
;
173 u8 bk
[2][AES_BLOCK_SIZE
];
177 if (walk
.dst
.virt
.addr
== walk
.src
.virt
.addr
)
178 memcpy(bk
[blocks
& 1], src
, AES_BLOCK_SIZE
);
180 AES_decrypt(src
, dst
, &ctx
->dec
.rk
);
181 crypto_xor(dst
, iv
, AES_BLOCK_SIZE
);
183 if (walk
.dst
.virt
.addr
== walk
.src
.virt
.addr
)
188 dst
+= AES_BLOCK_SIZE
;
189 src
+= AES_BLOCK_SIZE
;
191 err
= blkcipher_walk_done(desc
, &walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
196 static void inc_be128_ctr(__be32 ctr
[], u32 addend
)
200 for (i
= 3; i
>= 0; i
--, addend
= 1) {
201 u32 n
= be32_to_cpu(ctr
[i
]) + addend
;
203 ctr
[i
] = cpu_to_be32(n
);
209 static int aesbs_ctr_encrypt(struct blkcipher_desc
*desc
,
210 struct scatterlist
*dst
, struct scatterlist
*src
,
213 struct aesbs_ctr_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
214 struct blkcipher_walk walk
;
218 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
219 err
= blkcipher_walk_virt_block(desc
, &walk
, 8 * AES_BLOCK_SIZE
);
221 while ((blocks
= walk
.nbytes
/ AES_BLOCK_SIZE
)) {
222 u32 tail
= walk
.nbytes
% AES_BLOCK_SIZE
;
223 __be32
*ctr
= (__be32
*)walk
.iv
;
224 u32 headroom
= UINT_MAX
- be32_to_cpu(ctr
[3]);
226 /* avoid 32 bit counter overflow in the NEON code */
227 if (unlikely(headroom
< blocks
)) {
228 blocks
= headroom
+ 1;
229 tail
= walk
.nbytes
- blocks
* AES_BLOCK_SIZE
;
232 bsaes_ctr32_encrypt_blocks(walk
.src
.virt
.addr
,
233 walk
.dst
.virt
.addr
, blocks
,
236 inc_be128_ctr(ctr
, blocks
);
238 nbytes
-= blocks
* AES_BLOCK_SIZE
;
239 if (nbytes
&& nbytes
== tail
&& nbytes
<= AES_BLOCK_SIZE
)
242 err
= blkcipher_walk_done(desc
, &walk
, tail
);
245 u8
*tdst
= walk
.dst
.virt
.addr
+ blocks
* AES_BLOCK_SIZE
;
246 u8
*tsrc
= walk
.src
.virt
.addr
+ blocks
* AES_BLOCK_SIZE
;
247 u8 ks
[AES_BLOCK_SIZE
];
249 AES_encrypt(walk
.iv
, ks
, &ctx
->enc
.rk
);
251 memcpy(tdst
, tsrc
, nbytes
);
252 crypto_xor(tdst
, ks
, nbytes
);
253 err
= blkcipher_walk_done(desc
, &walk
, 0);
258 static int aesbs_xts_encrypt(struct blkcipher_desc
*desc
,
259 struct scatterlist
*dst
,
260 struct scatterlist
*src
, unsigned int nbytes
)
262 struct aesbs_xts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
263 struct blkcipher_walk walk
;
266 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
267 err
= blkcipher_walk_virt_block(desc
, &walk
, 8 * AES_BLOCK_SIZE
);
269 /* generate the initial tweak */
270 AES_encrypt(walk
.iv
, walk
.iv
, &ctx
->twkey
);
272 while (walk
.nbytes
) {
274 bsaes_xts_encrypt(walk
.src
.virt
.addr
, walk
.dst
.virt
.addr
,
275 walk
.nbytes
, &ctx
->enc
, walk
.iv
);
277 err
= blkcipher_walk_done(desc
, &walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
282 static int aesbs_xts_decrypt(struct blkcipher_desc
*desc
,
283 struct scatterlist
*dst
,
284 struct scatterlist
*src
, unsigned int nbytes
)
286 struct aesbs_xts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
287 struct blkcipher_walk walk
;
290 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
291 err
= blkcipher_walk_virt_block(desc
, &walk
, 8 * AES_BLOCK_SIZE
);
293 /* generate the initial tweak */
294 AES_encrypt(walk
.iv
, walk
.iv
, &ctx
->twkey
);
296 while (walk
.nbytes
) {
298 bsaes_xts_decrypt(walk
.src
.virt
.addr
, walk
.dst
.virt
.addr
,
299 walk
.nbytes
, &ctx
->dec
, walk
.iv
);
301 err
= blkcipher_walk_done(desc
, &walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
306 static struct crypto_alg aesbs_algs
[] = { {
307 .cra_name
= "__cbc-aes-neonbs",
308 .cra_driver_name
= "__driver-cbc-aes-neonbs",
310 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
312 .cra_blocksize
= AES_BLOCK_SIZE
,
313 .cra_ctxsize
= sizeof(struct aesbs_cbc_ctx
),
315 .cra_type
= &crypto_blkcipher_type
,
316 .cra_module
= THIS_MODULE
,
318 .min_keysize
= AES_MIN_KEY_SIZE
,
319 .max_keysize
= AES_MAX_KEY_SIZE
,
320 .ivsize
= AES_BLOCK_SIZE
,
321 .setkey
= aesbs_cbc_set_key
,
322 .encrypt
= aesbs_cbc_encrypt
,
323 .decrypt
= aesbs_cbc_decrypt
,
326 .cra_name
= "__ctr-aes-neonbs",
327 .cra_driver_name
= "__driver-ctr-aes-neonbs",
329 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
332 .cra_ctxsize
= sizeof(struct aesbs_ctr_ctx
),
334 .cra_type
= &crypto_blkcipher_type
,
335 .cra_module
= THIS_MODULE
,
337 .min_keysize
= AES_MIN_KEY_SIZE
,
338 .max_keysize
= AES_MAX_KEY_SIZE
,
339 .ivsize
= AES_BLOCK_SIZE
,
340 .setkey
= aesbs_ctr_set_key
,
341 .encrypt
= aesbs_ctr_encrypt
,
342 .decrypt
= aesbs_ctr_encrypt
,
345 .cra_name
= "__xts-aes-neonbs",
346 .cra_driver_name
= "__driver-xts-aes-neonbs",
348 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
350 .cra_blocksize
= AES_BLOCK_SIZE
,
351 .cra_ctxsize
= sizeof(struct aesbs_xts_ctx
),
353 .cra_type
= &crypto_blkcipher_type
,
354 .cra_module
= THIS_MODULE
,
356 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
357 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
358 .ivsize
= AES_BLOCK_SIZE
,
359 .setkey
= aesbs_xts_set_key
,
360 .encrypt
= aesbs_xts_encrypt
,
361 .decrypt
= aesbs_xts_decrypt
,
364 .cra_name
= "cbc(aes)",
365 .cra_driver_name
= "cbc-aes-neonbs",
367 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
368 .cra_blocksize
= AES_BLOCK_SIZE
,
369 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
371 .cra_type
= &crypto_ablkcipher_type
,
372 .cra_module
= THIS_MODULE
,
373 .cra_init
= ablk_init
,
374 .cra_exit
= ablk_exit
,
376 .min_keysize
= AES_MIN_KEY_SIZE
,
377 .max_keysize
= AES_MAX_KEY_SIZE
,
378 .ivsize
= AES_BLOCK_SIZE
,
379 .setkey
= ablk_set_key
,
380 .encrypt
= __ablk_encrypt
,
381 .decrypt
= ablk_decrypt
,
384 .cra_name
= "ctr(aes)",
385 .cra_driver_name
= "ctr-aes-neonbs",
387 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
389 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
391 .cra_type
= &crypto_ablkcipher_type
,
392 .cra_module
= THIS_MODULE
,
393 .cra_init
= ablk_init
,
394 .cra_exit
= ablk_exit
,
396 .min_keysize
= AES_MIN_KEY_SIZE
,
397 .max_keysize
= AES_MAX_KEY_SIZE
,
398 .ivsize
= AES_BLOCK_SIZE
,
399 .setkey
= ablk_set_key
,
400 .encrypt
= ablk_encrypt
,
401 .decrypt
= ablk_decrypt
,
404 .cra_name
= "xts(aes)",
405 .cra_driver_name
= "xts-aes-neonbs",
407 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
408 .cra_blocksize
= AES_BLOCK_SIZE
,
409 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
411 .cra_type
= &crypto_ablkcipher_type
,
412 .cra_module
= THIS_MODULE
,
413 .cra_init
= ablk_init
,
414 .cra_exit
= ablk_exit
,
416 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
417 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
418 .ivsize
= AES_BLOCK_SIZE
,
419 .setkey
= ablk_set_key
,
420 .encrypt
= ablk_encrypt
,
421 .decrypt
= ablk_decrypt
,
425 static int __init
aesbs_mod_init(void)
430 return crypto_register_algs(aesbs_algs
, ARRAY_SIZE(aesbs_algs
));
433 static void __exit
aesbs_mod_exit(void)
435 crypto_unregister_algs(aesbs_algs
, ARRAY_SIZE(aesbs_algs
));
438 module_init(aesbs_mod_init
);
439 module_exit(aesbs_mod_exit
);
441 MODULE_DESCRIPTION("Bit sliced AES in CBC/CTR/XTS modes using NEON");
442 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
443 MODULE_LICENSE("GPL");