2 * Bit sliced AES using NEON instructions
4 * Copyright (C) 2016 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
13 #include <crypto/aes.h>
14 #include <crypto/internal/simd.h>
15 #include <crypto/internal/skcipher.h>
16 #include <crypto/xts.h>
17 #include <linux/module.h>
19 #include "aes-ctr-fallback.h"
21 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
22 MODULE_LICENSE("GPL v2");
24 MODULE_ALIAS_CRYPTO("ecb(aes)");
25 MODULE_ALIAS_CRYPTO("cbc(aes)");
26 MODULE_ALIAS_CRYPTO("ctr(aes)");
27 MODULE_ALIAS_CRYPTO("xts(aes)");
29 asmlinkage
void aesbs_convert_key(u8 out
[], u32
const rk
[], int rounds
);
31 asmlinkage
void aesbs_ecb_encrypt(u8 out
[], u8
const in
[], u8
const rk
[],
32 int rounds
, int blocks
);
33 asmlinkage
void aesbs_ecb_decrypt(u8 out
[], u8
const in
[], u8
const rk
[],
34 int rounds
, int blocks
);
36 asmlinkage
void aesbs_cbc_decrypt(u8 out
[], u8
const in
[], u8
const rk
[],
37 int rounds
, int blocks
, u8 iv
[]);
39 asmlinkage
void aesbs_ctr_encrypt(u8 out
[], u8
const in
[], u8
const rk
[],
40 int rounds
, int blocks
, u8 iv
[], u8 final
[]);
42 asmlinkage
void aesbs_xts_encrypt(u8 out
[], u8
const in
[], u8
const rk
[],
43 int rounds
, int blocks
, u8 iv
[]);
44 asmlinkage
void aesbs_xts_decrypt(u8 out
[], u8
const in
[], u8
const rk
[],
45 int rounds
, int blocks
, u8 iv
[]);
47 /* borrowed from aes-neon-blk.ko */
48 asmlinkage
void neon_aes_ecb_encrypt(u8 out
[], u8
const in
[], u32
const rk
[],
49 int rounds
, int blocks
);
50 asmlinkage
void neon_aes_cbc_encrypt(u8 out
[], u8
const in
[], u32
const rk
[],
51 int rounds
, int blocks
, u8 iv
[]);
54 u8 rk
[13 * (8 * AES_BLOCK_SIZE
) + 32];
56 } __aligned(AES_BLOCK_SIZE
);
58 struct aesbs_cbc_ctx
{
60 u32 enc
[AES_MAX_KEYLENGTH_U32
];
63 struct aesbs_ctr_ctx
{
64 struct aesbs_ctx key
; /* must be first member */
65 struct crypto_aes_ctx fallback
;
68 struct aesbs_xts_ctx
{
70 u32 twkey
[AES_MAX_KEYLENGTH_U32
];
73 static int aesbs_setkey(struct crypto_skcipher
*tfm
, const u8
*in_key
,
76 struct aesbs_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
77 struct crypto_aes_ctx rk
;
80 err
= crypto_aes_expand_key(&rk
, in_key
, key_len
);
84 ctx
->rounds
= 6 + key_len
/ 4;
87 aesbs_convert_key(ctx
->rk
, rk
.key_enc
, ctx
->rounds
);
93 static int __ecb_crypt(struct skcipher_request
*req
,
94 void (*fn
)(u8 out
[], u8
const in
[], u8
const rk
[],
95 int rounds
, int blocks
))
97 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
98 struct aesbs_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
99 struct skcipher_walk walk
;
102 err
= skcipher_walk_virt(&walk
, req
, false);
104 while (walk
.nbytes
>= AES_BLOCK_SIZE
) {
105 unsigned int blocks
= walk
.nbytes
/ AES_BLOCK_SIZE
;
107 if (walk
.nbytes
< walk
.total
)
108 blocks
= round_down(blocks
,
109 walk
.stride
/ AES_BLOCK_SIZE
);
112 fn(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
, ctx
->rk
,
113 ctx
->rounds
, blocks
);
115 err
= skcipher_walk_done(&walk
,
116 walk
.nbytes
- blocks
* AES_BLOCK_SIZE
);
122 static int ecb_encrypt(struct skcipher_request
*req
)
124 return __ecb_crypt(req
, aesbs_ecb_encrypt
);
127 static int ecb_decrypt(struct skcipher_request
*req
)
129 return __ecb_crypt(req
, aesbs_ecb_decrypt
);
132 static int aesbs_cbc_setkey(struct crypto_skcipher
*tfm
, const u8
*in_key
,
133 unsigned int key_len
)
135 struct aesbs_cbc_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
136 struct crypto_aes_ctx rk
;
139 err
= crypto_aes_expand_key(&rk
, in_key
, key_len
);
143 ctx
->key
.rounds
= 6 + key_len
/ 4;
145 memcpy(ctx
->enc
, rk
.key_enc
, sizeof(ctx
->enc
));
148 aesbs_convert_key(ctx
->key
.rk
, rk
.key_enc
, ctx
->key
.rounds
);
154 static int cbc_encrypt(struct skcipher_request
*req
)
156 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
157 struct aesbs_cbc_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
158 struct skcipher_walk walk
;
161 err
= skcipher_walk_virt(&walk
, req
, false);
163 while (walk
.nbytes
>= AES_BLOCK_SIZE
) {
164 unsigned int blocks
= walk
.nbytes
/ AES_BLOCK_SIZE
;
166 /* fall back to the non-bitsliced NEON implementation */
168 neon_aes_cbc_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
169 ctx
->enc
, ctx
->key
.rounds
, blocks
,
172 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
177 static int cbc_decrypt(struct skcipher_request
*req
)
179 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
180 struct aesbs_cbc_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
181 struct skcipher_walk walk
;
184 err
= skcipher_walk_virt(&walk
, req
, false);
186 while (walk
.nbytes
>= AES_BLOCK_SIZE
) {
187 unsigned int blocks
= walk
.nbytes
/ AES_BLOCK_SIZE
;
189 if (walk
.nbytes
< walk
.total
)
190 blocks
= round_down(blocks
,
191 walk
.stride
/ AES_BLOCK_SIZE
);
194 aesbs_cbc_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
195 ctx
->key
.rk
, ctx
->key
.rounds
, blocks
,
198 err
= skcipher_walk_done(&walk
,
199 walk
.nbytes
- blocks
* AES_BLOCK_SIZE
);
205 static int aesbs_ctr_setkey_sync(struct crypto_skcipher
*tfm
, const u8
*in_key
,
206 unsigned int key_len
)
208 struct aesbs_ctr_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
211 err
= crypto_aes_expand_key(&ctx
->fallback
, in_key
, key_len
);
215 ctx
->key
.rounds
= 6 + key_len
/ 4;
218 aesbs_convert_key(ctx
->key
.rk
, ctx
->fallback
.key_enc
, ctx
->key
.rounds
);
224 static int ctr_encrypt(struct skcipher_request
*req
)
226 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
227 struct aesbs_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
228 struct skcipher_walk walk
;
229 u8 buf
[AES_BLOCK_SIZE
];
232 err
= skcipher_walk_virt(&walk
, req
, false);
234 while (walk
.nbytes
> 0) {
235 unsigned int blocks
= walk
.nbytes
/ AES_BLOCK_SIZE
;
236 u8
*final
= (walk
.total
% AES_BLOCK_SIZE
) ? buf
: NULL
;
238 if (walk
.nbytes
< walk
.total
) {
239 blocks
= round_down(blocks
,
240 walk
.stride
/ AES_BLOCK_SIZE
);
245 aesbs_ctr_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
246 ctx
->rk
, ctx
->rounds
, blocks
, walk
.iv
, final
);
250 u8
*dst
= walk
.dst
.virt
.addr
+ blocks
* AES_BLOCK_SIZE
;
251 u8
*src
= walk
.src
.virt
.addr
+ blocks
* AES_BLOCK_SIZE
;
253 crypto_xor_cpy(dst
, src
, final
,
254 walk
.total
% AES_BLOCK_SIZE
);
256 err
= skcipher_walk_done(&walk
, 0);
259 err
= skcipher_walk_done(&walk
,
260 walk
.nbytes
- blocks
* AES_BLOCK_SIZE
);
265 static int aesbs_xts_setkey(struct crypto_skcipher
*tfm
, const u8
*in_key
,
266 unsigned int key_len
)
268 struct aesbs_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
269 struct crypto_aes_ctx rk
;
272 err
= xts_verify_key(tfm
, in_key
, key_len
);
277 err
= crypto_aes_expand_key(&rk
, in_key
+ key_len
, key_len
);
281 memcpy(ctx
->twkey
, rk
.key_enc
, sizeof(ctx
->twkey
));
283 return aesbs_setkey(tfm
, in_key
, key_len
);
286 static int ctr_encrypt_sync(struct skcipher_request
*req
)
288 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
289 struct aesbs_ctr_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
292 return aes_ctr_encrypt_fallback(&ctx
->fallback
, req
);
294 return ctr_encrypt(req
);
297 static int __xts_crypt(struct skcipher_request
*req
,
298 void (*fn
)(u8 out
[], u8
const in
[], u8
const rk
[],
299 int rounds
, int blocks
, u8 iv
[]))
301 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
302 struct aesbs_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
303 struct skcipher_walk walk
;
306 err
= skcipher_walk_virt(&walk
, req
, false);
309 neon_aes_ecb_encrypt(walk
.iv
, walk
.iv
, ctx
->twkey
, ctx
->key
.rounds
, 1);
312 while (walk
.nbytes
>= AES_BLOCK_SIZE
) {
313 unsigned int blocks
= walk
.nbytes
/ AES_BLOCK_SIZE
;
315 if (walk
.nbytes
< walk
.total
)
316 blocks
= round_down(blocks
,
317 walk
.stride
/ AES_BLOCK_SIZE
);
320 fn(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
, ctx
->key
.rk
,
321 ctx
->key
.rounds
, blocks
, walk
.iv
);
323 err
= skcipher_walk_done(&walk
,
324 walk
.nbytes
- blocks
* AES_BLOCK_SIZE
);
329 static int xts_encrypt(struct skcipher_request
*req
)
331 return __xts_crypt(req
, aesbs_xts_encrypt
);
334 static int xts_decrypt(struct skcipher_request
*req
)
336 return __xts_crypt(req
, aesbs_xts_decrypt
);
339 static struct skcipher_alg aes_algs
[] = { {
340 .base
.cra_name
= "__ecb(aes)",
341 .base
.cra_driver_name
= "__ecb-aes-neonbs",
342 .base
.cra_priority
= 250,
343 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
344 .base
.cra_ctxsize
= sizeof(struct aesbs_ctx
),
345 .base
.cra_module
= THIS_MODULE
,
346 .base
.cra_flags
= CRYPTO_ALG_INTERNAL
,
348 .min_keysize
= AES_MIN_KEY_SIZE
,
349 .max_keysize
= AES_MAX_KEY_SIZE
,
350 .walksize
= 8 * AES_BLOCK_SIZE
,
351 .setkey
= aesbs_setkey
,
352 .encrypt
= ecb_encrypt
,
353 .decrypt
= ecb_decrypt
,
355 .base
.cra_name
= "__cbc(aes)",
356 .base
.cra_driver_name
= "__cbc-aes-neonbs",
357 .base
.cra_priority
= 250,
358 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
359 .base
.cra_ctxsize
= sizeof(struct aesbs_cbc_ctx
),
360 .base
.cra_module
= THIS_MODULE
,
361 .base
.cra_flags
= CRYPTO_ALG_INTERNAL
,
363 .min_keysize
= AES_MIN_KEY_SIZE
,
364 .max_keysize
= AES_MAX_KEY_SIZE
,
365 .walksize
= 8 * AES_BLOCK_SIZE
,
366 .ivsize
= AES_BLOCK_SIZE
,
367 .setkey
= aesbs_cbc_setkey
,
368 .encrypt
= cbc_encrypt
,
369 .decrypt
= cbc_decrypt
,
371 .base
.cra_name
= "__ctr(aes)",
372 .base
.cra_driver_name
= "__ctr-aes-neonbs",
373 .base
.cra_priority
= 250,
374 .base
.cra_blocksize
= 1,
375 .base
.cra_ctxsize
= sizeof(struct aesbs_ctx
),
376 .base
.cra_module
= THIS_MODULE
,
377 .base
.cra_flags
= CRYPTO_ALG_INTERNAL
,
379 .min_keysize
= AES_MIN_KEY_SIZE
,
380 .max_keysize
= AES_MAX_KEY_SIZE
,
381 .chunksize
= AES_BLOCK_SIZE
,
382 .walksize
= 8 * AES_BLOCK_SIZE
,
383 .ivsize
= AES_BLOCK_SIZE
,
384 .setkey
= aesbs_setkey
,
385 .encrypt
= ctr_encrypt
,
386 .decrypt
= ctr_encrypt
,
388 .base
.cra_name
= "ctr(aes)",
389 .base
.cra_driver_name
= "ctr-aes-neonbs",
390 .base
.cra_priority
= 250 - 1,
391 .base
.cra_blocksize
= 1,
392 .base
.cra_ctxsize
= sizeof(struct aesbs_ctr_ctx
),
393 .base
.cra_module
= THIS_MODULE
,
395 .min_keysize
= AES_MIN_KEY_SIZE
,
396 .max_keysize
= AES_MAX_KEY_SIZE
,
397 .chunksize
= AES_BLOCK_SIZE
,
398 .walksize
= 8 * AES_BLOCK_SIZE
,
399 .ivsize
= AES_BLOCK_SIZE
,
400 .setkey
= aesbs_ctr_setkey_sync
,
401 .encrypt
= ctr_encrypt_sync
,
402 .decrypt
= ctr_encrypt_sync
,
404 .base
.cra_name
= "__xts(aes)",
405 .base
.cra_driver_name
= "__xts-aes-neonbs",
406 .base
.cra_priority
= 250,
407 .base
.cra_blocksize
= AES_BLOCK_SIZE
,
408 .base
.cra_ctxsize
= sizeof(struct aesbs_xts_ctx
),
409 .base
.cra_module
= THIS_MODULE
,
410 .base
.cra_flags
= CRYPTO_ALG_INTERNAL
,
412 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
413 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
414 .walksize
= 8 * AES_BLOCK_SIZE
,
415 .ivsize
= AES_BLOCK_SIZE
,
416 .setkey
= aesbs_xts_setkey
,
417 .encrypt
= xts_encrypt
,
418 .decrypt
= xts_decrypt
,
421 static struct simd_skcipher_alg
*aes_simd_algs
[ARRAY_SIZE(aes_algs
)];
423 static void aes_exit(void)
427 for (i
= 0; i
< ARRAY_SIZE(aes_simd_algs
); i
++)
428 if (aes_simd_algs
[i
])
429 simd_skcipher_free(aes_simd_algs
[i
]);
431 crypto_unregister_skciphers(aes_algs
, ARRAY_SIZE(aes_algs
));
434 static int __init
aes_init(void)
436 struct simd_skcipher_alg
*simd
;
437 const char *basename
;
443 if (!(elf_hwcap
& HWCAP_ASIMD
))
446 err
= crypto_register_skciphers(aes_algs
, ARRAY_SIZE(aes_algs
));
450 for (i
= 0; i
< ARRAY_SIZE(aes_algs
); i
++) {
451 if (!(aes_algs
[i
].base
.cra_flags
& CRYPTO_ALG_INTERNAL
))
454 algname
= aes_algs
[i
].base
.cra_name
+ 2;
455 drvname
= aes_algs
[i
].base
.cra_driver_name
+ 2;
456 basename
= aes_algs
[i
].base
.cra_driver_name
;
457 simd
= simd_skcipher_create_compat(algname
, drvname
, basename
);
460 goto unregister_simds
;
462 aes_simd_algs
[i
] = simd
;
471 module_init(aes_init
);
472 module_exit(aes_exit
);