2 * linux/arch/arm64/crypto/aes-glue.c - wrapper code for ARMv8 AES
4 * Copyright (C) 2013 Linaro Ltd <ard.biesheuvel@linaro.org>
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
12 #include <asm/hwcap.h>
13 #include <crypto/aes.h>
14 #include <crypto/ablk_helper.h>
15 #include <crypto/algapi.h>
16 #include <linux/module.h>
17 #include <linux/cpufeature.h>
19 #ifdef USE_V8_CRYPTO_EXTENSIONS
22 #define aes_ecb_encrypt ce_aes_ecb_encrypt
23 #define aes_ecb_decrypt ce_aes_ecb_decrypt
24 #define aes_cbc_encrypt ce_aes_cbc_encrypt
25 #define aes_cbc_decrypt ce_aes_cbc_decrypt
26 #define aes_ctr_encrypt ce_aes_ctr_encrypt
27 #define aes_xts_encrypt ce_aes_xts_encrypt
28 #define aes_xts_decrypt ce_aes_xts_decrypt
29 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 Crypto Extensions");
33 #define aes_ecb_encrypt neon_aes_ecb_encrypt
34 #define aes_ecb_decrypt neon_aes_ecb_decrypt
35 #define aes_cbc_encrypt neon_aes_cbc_encrypt
36 #define aes_cbc_decrypt neon_aes_cbc_decrypt
37 #define aes_ctr_encrypt neon_aes_ctr_encrypt
38 #define aes_xts_encrypt neon_aes_xts_encrypt
39 #define aes_xts_decrypt neon_aes_xts_decrypt
40 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 NEON");
41 MODULE_ALIAS("ecb(aes)");
42 MODULE_ALIAS("cbc(aes)");
43 MODULE_ALIAS("ctr(aes)");
44 MODULE_ALIAS("xts(aes)");
47 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
48 MODULE_LICENSE("GPL v2");
50 /* defined in aes-modes.S */
51 asmlinkage
void aes_ecb_encrypt(u8 out
[], u8
const in
[], u8
const rk
[],
52 int rounds
, int blocks
, int first
);
53 asmlinkage
void aes_ecb_decrypt(u8 out
[], u8
const in
[], u8
const rk
[],
54 int rounds
, int blocks
, int first
);
56 asmlinkage
void aes_cbc_encrypt(u8 out
[], u8
const in
[], u8
const rk
[],
57 int rounds
, int blocks
, u8 iv
[], int first
);
58 asmlinkage
void aes_cbc_decrypt(u8 out
[], u8
const in
[], u8
const rk
[],
59 int rounds
, int blocks
, u8 iv
[], int first
);
61 asmlinkage
void aes_ctr_encrypt(u8 out
[], u8
const in
[], u8
const rk
[],
62 int rounds
, int blocks
, u8 ctr
[], int first
);
64 asmlinkage
void aes_xts_encrypt(u8 out
[], u8
const in
[], u8
const rk1
[],
65 int rounds
, int blocks
, u8
const rk2
[], u8 iv
[],
67 asmlinkage
void aes_xts_decrypt(u8 out
[], u8
const in
[], u8
const rk1
[],
68 int rounds
, int blocks
, u8
const rk2
[], u8 iv
[],
71 struct crypto_aes_xts_ctx
{
72 struct crypto_aes_ctx key1
;
73 struct crypto_aes_ctx
__aligned(8) key2
;
76 static int xts_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
79 struct crypto_aes_xts_ctx
*ctx
= crypto_tfm_ctx(tfm
);
82 ret
= crypto_aes_expand_key(&ctx
->key1
, in_key
, key_len
/ 2);
84 ret
= crypto_aes_expand_key(&ctx
->key2
, &in_key
[key_len
/ 2],
89 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
93 static int ecb_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
94 struct scatterlist
*src
, unsigned int nbytes
)
96 struct crypto_aes_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
97 int err
, first
, rounds
= 6 + ctx
->key_length
/ 4;
98 struct blkcipher_walk walk
;
101 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
102 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
103 err
= blkcipher_walk_virt(desc
, &walk
);
106 for (first
= 1; (blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
)); first
= 0) {
107 aes_ecb_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
108 (u8
*)ctx
->key_enc
, rounds
, blocks
, first
);
109 err
= blkcipher_walk_done(desc
, &walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
115 static int ecb_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
116 struct scatterlist
*src
, unsigned int nbytes
)
118 struct crypto_aes_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
119 int err
, first
, rounds
= 6 + ctx
->key_length
/ 4;
120 struct blkcipher_walk walk
;
123 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
124 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
125 err
= blkcipher_walk_virt(desc
, &walk
);
128 for (first
= 1; (blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
)); first
= 0) {
129 aes_ecb_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
130 (u8
*)ctx
->key_dec
, rounds
, blocks
, first
);
131 err
= blkcipher_walk_done(desc
, &walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
137 static int cbc_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
138 struct scatterlist
*src
, unsigned int nbytes
)
140 struct crypto_aes_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
141 int err
, first
, rounds
= 6 + ctx
->key_length
/ 4;
142 struct blkcipher_walk walk
;
145 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
146 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
147 err
= blkcipher_walk_virt(desc
, &walk
);
150 for (first
= 1; (blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
)); first
= 0) {
151 aes_cbc_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
152 (u8
*)ctx
->key_enc
, rounds
, blocks
, walk
.iv
,
154 err
= blkcipher_walk_done(desc
, &walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
160 static int cbc_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
161 struct scatterlist
*src
, unsigned int nbytes
)
163 struct crypto_aes_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
164 int err
, first
, rounds
= 6 + ctx
->key_length
/ 4;
165 struct blkcipher_walk walk
;
168 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
169 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
170 err
= blkcipher_walk_virt(desc
, &walk
);
173 for (first
= 1; (blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
)); first
= 0) {
174 aes_cbc_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
175 (u8
*)ctx
->key_dec
, rounds
, blocks
, walk
.iv
,
177 err
= blkcipher_walk_done(desc
, &walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
183 static int ctr_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
184 struct scatterlist
*src
, unsigned int nbytes
)
186 struct crypto_aes_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
187 int err
, first
, rounds
= 6 + ctx
->key_length
/ 4;
188 struct blkcipher_walk walk
;
191 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
192 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
193 err
= blkcipher_walk_virt_block(desc
, &walk
, AES_BLOCK_SIZE
);
197 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
198 aes_ctr_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
199 (u8
*)ctx
->key_enc
, rounds
, blocks
, walk
.iv
,
202 nbytes
-= blocks
* AES_BLOCK_SIZE
;
203 if (nbytes
&& nbytes
== walk
.nbytes
% AES_BLOCK_SIZE
)
205 err
= blkcipher_walk_done(desc
, &walk
,
206 walk
.nbytes
% AES_BLOCK_SIZE
);
209 u8
*tdst
= walk
.dst
.virt
.addr
+ blocks
* AES_BLOCK_SIZE
;
210 u8
*tsrc
= walk
.src
.virt
.addr
+ blocks
* AES_BLOCK_SIZE
;
211 u8
__aligned(8) tail
[AES_BLOCK_SIZE
];
214 * Minimum alignment is 8 bytes, so if nbytes is <= 8, we need
215 * to tell aes_ctr_encrypt() to only read half a block.
217 blocks
= (nbytes
<= 8) ? -1 : 1;
219 aes_ctr_encrypt(tail
, tsrc
, (u8
*)ctx
->key_enc
, rounds
,
220 blocks
, walk
.iv
, first
);
221 memcpy(tdst
, tail
, nbytes
);
222 err
= blkcipher_walk_done(desc
, &walk
, 0);
229 static int xts_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
230 struct scatterlist
*src
, unsigned int nbytes
)
232 struct crypto_aes_xts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
233 int err
, first
, rounds
= 6 + ctx
->key1
.key_length
/ 4;
234 struct blkcipher_walk walk
;
237 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
238 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
239 err
= blkcipher_walk_virt(desc
, &walk
);
242 for (first
= 1; (blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
)); first
= 0) {
243 aes_xts_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
244 (u8
*)ctx
->key1
.key_enc
, rounds
, blocks
,
245 (u8
*)ctx
->key2
.key_enc
, walk
.iv
, first
);
246 err
= blkcipher_walk_done(desc
, &walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
253 static int xts_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
254 struct scatterlist
*src
, unsigned int nbytes
)
256 struct crypto_aes_xts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
257 int err
, first
, rounds
= 6 + ctx
->key1
.key_length
/ 4;
258 struct blkcipher_walk walk
;
261 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
262 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
263 err
= blkcipher_walk_virt(desc
, &walk
);
266 for (first
= 1; (blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
)); first
= 0) {
267 aes_xts_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
268 (u8
*)ctx
->key1
.key_dec
, rounds
, blocks
,
269 (u8
*)ctx
->key2
.key_enc
, walk
.iv
, first
);
270 err
= blkcipher_walk_done(desc
, &walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
277 static struct crypto_alg aes_algs
[] = { {
278 .cra_name
= "__ecb-aes-" MODE
,
279 .cra_driver_name
= "__driver-ecb-aes-" MODE
,
281 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
282 .cra_blocksize
= AES_BLOCK_SIZE
,
283 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
285 .cra_type
= &crypto_blkcipher_type
,
286 .cra_module
= THIS_MODULE
,
288 .min_keysize
= AES_MIN_KEY_SIZE
,
289 .max_keysize
= AES_MAX_KEY_SIZE
,
290 .ivsize
= AES_BLOCK_SIZE
,
291 .setkey
= crypto_aes_set_key
,
292 .encrypt
= ecb_encrypt
,
293 .decrypt
= ecb_decrypt
,
296 .cra_name
= "__cbc-aes-" MODE
,
297 .cra_driver_name
= "__driver-cbc-aes-" MODE
,
299 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
300 .cra_blocksize
= AES_BLOCK_SIZE
,
301 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
303 .cra_type
= &crypto_blkcipher_type
,
304 .cra_module
= THIS_MODULE
,
306 .min_keysize
= AES_MIN_KEY_SIZE
,
307 .max_keysize
= AES_MAX_KEY_SIZE
,
308 .ivsize
= AES_BLOCK_SIZE
,
309 .setkey
= crypto_aes_set_key
,
310 .encrypt
= cbc_encrypt
,
311 .decrypt
= cbc_decrypt
,
314 .cra_name
= "__ctr-aes-" MODE
,
315 .cra_driver_name
= "__driver-ctr-aes-" MODE
,
317 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
319 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
321 .cra_type
= &crypto_blkcipher_type
,
322 .cra_module
= THIS_MODULE
,
324 .min_keysize
= AES_MIN_KEY_SIZE
,
325 .max_keysize
= AES_MAX_KEY_SIZE
,
326 .ivsize
= AES_BLOCK_SIZE
,
327 .setkey
= crypto_aes_set_key
,
328 .encrypt
= ctr_encrypt
,
329 .decrypt
= ctr_encrypt
,
332 .cra_name
= "__xts-aes-" MODE
,
333 .cra_driver_name
= "__driver-xts-aes-" MODE
,
335 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
336 .cra_blocksize
= AES_BLOCK_SIZE
,
337 .cra_ctxsize
= sizeof(struct crypto_aes_xts_ctx
),
339 .cra_type
= &crypto_blkcipher_type
,
340 .cra_module
= THIS_MODULE
,
342 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
343 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
344 .ivsize
= AES_BLOCK_SIZE
,
345 .setkey
= xts_set_key
,
346 .encrypt
= xts_encrypt
,
347 .decrypt
= xts_decrypt
,
350 .cra_name
= "ecb(aes)",
351 .cra_driver_name
= "ecb-aes-" MODE
,
352 .cra_priority
= PRIO
,
353 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
354 .cra_blocksize
= AES_BLOCK_SIZE
,
355 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
357 .cra_type
= &crypto_ablkcipher_type
,
358 .cra_module
= THIS_MODULE
,
359 .cra_init
= ablk_init
,
360 .cra_exit
= ablk_exit
,
362 .min_keysize
= AES_MIN_KEY_SIZE
,
363 .max_keysize
= AES_MAX_KEY_SIZE
,
364 .ivsize
= AES_BLOCK_SIZE
,
365 .setkey
= ablk_set_key
,
366 .encrypt
= ablk_encrypt
,
367 .decrypt
= ablk_decrypt
,
370 .cra_name
= "cbc(aes)",
371 .cra_driver_name
= "cbc-aes-" MODE
,
372 .cra_priority
= PRIO
,
373 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
374 .cra_blocksize
= AES_BLOCK_SIZE
,
375 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
377 .cra_type
= &crypto_ablkcipher_type
,
378 .cra_module
= THIS_MODULE
,
379 .cra_init
= ablk_init
,
380 .cra_exit
= ablk_exit
,
382 .min_keysize
= AES_MIN_KEY_SIZE
,
383 .max_keysize
= AES_MAX_KEY_SIZE
,
384 .ivsize
= AES_BLOCK_SIZE
,
385 .setkey
= ablk_set_key
,
386 .encrypt
= ablk_encrypt
,
387 .decrypt
= ablk_decrypt
,
390 .cra_name
= "ctr(aes)",
391 .cra_driver_name
= "ctr-aes-" MODE
,
392 .cra_priority
= PRIO
,
393 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
395 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
397 .cra_type
= &crypto_ablkcipher_type
,
398 .cra_module
= THIS_MODULE
,
399 .cra_init
= ablk_init
,
400 .cra_exit
= ablk_exit
,
402 .min_keysize
= AES_MIN_KEY_SIZE
,
403 .max_keysize
= AES_MAX_KEY_SIZE
,
404 .ivsize
= AES_BLOCK_SIZE
,
405 .setkey
= ablk_set_key
,
406 .encrypt
= ablk_encrypt
,
407 .decrypt
= ablk_decrypt
,
410 .cra_name
= "xts(aes)",
411 .cra_driver_name
= "xts-aes-" MODE
,
412 .cra_priority
= PRIO
,
413 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
414 .cra_blocksize
= AES_BLOCK_SIZE
,
415 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
417 .cra_type
= &crypto_ablkcipher_type
,
418 .cra_module
= THIS_MODULE
,
419 .cra_init
= ablk_init
,
420 .cra_exit
= ablk_exit
,
422 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
423 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
424 .ivsize
= AES_BLOCK_SIZE
,
425 .setkey
= ablk_set_key
,
426 .encrypt
= ablk_encrypt
,
427 .decrypt
= ablk_decrypt
,
431 static int __init
aes_init(void)
433 return crypto_register_algs(aes_algs
, ARRAY_SIZE(aes_algs
));
436 static void __exit
aes_exit(void)
438 crypto_unregister_algs(aes_algs
, ARRAY_SIZE(aes_algs
));
441 #ifdef USE_V8_CRYPTO_EXTENSIONS
442 module_cpu_feature_match(AES
, aes_init
);
444 module_init(aes_init
);
446 module_exit(aes_exit
);