2 * aes-ce-glue.c - wrapper code for ARMv8 AES
4 * Copyright (C) 2015 Linaro Ltd <ard.biesheuvel@linaro.org>
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
11 #include <asm/hwcap.h>
13 #include <asm/hwcap.h>
14 #include <crypto/aes.h>
15 #include <crypto/ablk_helper.h>
16 #include <crypto/algapi.h>
17 #include <linux/module.h>
19 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 Crypto Extensions");
20 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
21 MODULE_LICENSE("GPL v2");
23 /* defined in aes-ce-core.S */
24 asmlinkage u32
ce_aes_sub(u32 input
);
25 asmlinkage
void ce_aes_invert(void *dst
, void *src
);
27 asmlinkage
void ce_aes_ecb_encrypt(u8 out
[], u8
const in
[], u8
const rk
[],
28 int rounds
, int blocks
);
29 asmlinkage
void ce_aes_ecb_decrypt(u8 out
[], u8
const in
[], u8
const rk
[],
30 int rounds
, int blocks
);
32 asmlinkage
void ce_aes_cbc_encrypt(u8 out
[], u8
const in
[], u8
const rk
[],
33 int rounds
, int blocks
, u8 iv
[]);
34 asmlinkage
void ce_aes_cbc_decrypt(u8 out
[], u8
const in
[], u8
const rk
[],
35 int rounds
, int blocks
, u8 iv
[]);
37 asmlinkage
void ce_aes_ctr_encrypt(u8 out
[], u8
const in
[], u8
const rk
[],
38 int rounds
, int blocks
, u8 ctr
[]);
40 asmlinkage
void ce_aes_xts_encrypt(u8 out
[], u8
const in
[], u8
const rk1
[],
41 int rounds
, int blocks
, u8 iv
[],
42 u8
const rk2
[], int first
);
43 asmlinkage
void ce_aes_xts_decrypt(u8 out
[], u8
const in
[], u8
const rk1
[],
44 int rounds
, int blocks
, u8 iv
[],
45 u8
const rk2
[], int first
);
51 static int num_rounds(struct crypto_aes_ctx
*ctx
)
54 * # of rounds specified by AES:
55 * 128 bit key 10 rounds
56 * 192 bit key 12 rounds
57 * 256 bit key 14 rounds
58 * => n byte key => 6 + (n/4) rounds
60 return 6 + ctx
->key_length
/ 4;
63 static int ce_aes_expandkey(struct crypto_aes_ctx
*ctx
, const u8
*in_key
,
67 * The AES key schedule round constants
69 static u8
const rcon
[] = {
70 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36,
73 u32 kwords
= key_len
/ sizeof(u32
);
74 struct aes_block
*key_enc
, *key_dec
;
77 if (key_len
!= AES_KEYSIZE_128
&&
78 key_len
!= AES_KEYSIZE_192
&&
79 key_len
!= AES_KEYSIZE_256
)
82 memcpy(ctx
->key_enc
, in_key
, key_len
);
83 ctx
->key_length
= key_len
;
86 for (i
= 0; i
< sizeof(rcon
); i
++) {
87 u32
*rki
= ctx
->key_enc
+ (i
* kwords
);
88 u32
*rko
= rki
+ kwords
;
90 rko
[0] = ror32(ce_aes_sub(rki
[kwords
- 1]), 8);
91 rko
[0] = rko
[0] ^ rki
[0] ^ rcon
[i
];
92 rko
[1] = rko
[0] ^ rki
[1];
93 rko
[2] = rko
[1] ^ rki
[2];
94 rko
[3] = rko
[2] ^ rki
[3];
96 if (key_len
== AES_KEYSIZE_192
) {
99 rko
[4] = rko
[3] ^ rki
[4];
100 rko
[5] = rko
[4] ^ rki
[5];
101 } else if (key_len
== AES_KEYSIZE_256
) {
104 rko
[4] = ce_aes_sub(rko
[3]) ^ rki
[4];
105 rko
[5] = rko
[4] ^ rki
[5];
106 rko
[6] = rko
[5] ^ rki
[6];
107 rko
[7] = rko
[6] ^ rki
[7];
112 * Generate the decryption keys for the Equivalent Inverse Cipher.
113 * This involves reversing the order of the round keys, and applying
114 * the Inverse Mix Columns transformation on all but the first and
117 key_enc
= (struct aes_block
*)ctx
->key_enc
;
118 key_dec
= (struct aes_block
*)ctx
->key_dec
;
121 key_dec
[0] = key_enc
[j
];
122 for (i
= 1, j
--; j
> 0; i
++, j
--)
123 ce_aes_invert(key_dec
+ i
, key_enc
+ j
);
124 key_dec
[i
] = key_enc
[0];
130 static int ce_aes_setkey(struct crypto_tfm
*tfm
, const u8
*in_key
,
131 unsigned int key_len
)
133 struct crypto_aes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
136 ret
= ce_aes_expandkey(ctx
, in_key
, key_len
);
140 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
144 struct crypto_aes_xts_ctx
{
145 struct crypto_aes_ctx key1
;
146 struct crypto_aes_ctx
__aligned(8) key2
;
149 static int xts_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
150 unsigned int key_len
)
152 struct crypto_aes_xts_ctx
*ctx
= crypto_tfm_ctx(tfm
);
155 ret
= ce_aes_expandkey(&ctx
->key1
, in_key
, key_len
/ 2);
157 ret
= ce_aes_expandkey(&ctx
->key2
, &in_key
[key_len
/ 2],
162 tfm
->crt_flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
166 static int ecb_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
167 struct scatterlist
*src
, unsigned int nbytes
)
169 struct crypto_aes_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
170 struct blkcipher_walk walk
;
174 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
175 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
176 err
= blkcipher_walk_virt(desc
, &walk
);
179 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
180 ce_aes_ecb_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
181 (u8
*)ctx
->key_enc
, num_rounds(ctx
), blocks
);
182 err
= blkcipher_walk_done(desc
, &walk
,
183 walk
.nbytes
% AES_BLOCK_SIZE
);
189 static int ecb_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
190 struct scatterlist
*src
, unsigned int nbytes
)
192 struct crypto_aes_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
193 struct blkcipher_walk walk
;
197 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
198 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
199 err
= blkcipher_walk_virt(desc
, &walk
);
202 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
203 ce_aes_ecb_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
204 (u8
*)ctx
->key_dec
, num_rounds(ctx
), blocks
);
205 err
= blkcipher_walk_done(desc
, &walk
,
206 walk
.nbytes
% AES_BLOCK_SIZE
);
212 static int cbc_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
213 struct scatterlist
*src
, unsigned int nbytes
)
215 struct crypto_aes_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
216 struct blkcipher_walk walk
;
220 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
221 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
222 err
= blkcipher_walk_virt(desc
, &walk
);
225 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
226 ce_aes_cbc_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
227 (u8
*)ctx
->key_enc
, num_rounds(ctx
), blocks
,
229 err
= blkcipher_walk_done(desc
, &walk
,
230 walk
.nbytes
% AES_BLOCK_SIZE
);
236 static int cbc_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
237 struct scatterlist
*src
, unsigned int nbytes
)
239 struct crypto_aes_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
240 struct blkcipher_walk walk
;
244 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
245 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
246 err
= blkcipher_walk_virt(desc
, &walk
);
249 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
250 ce_aes_cbc_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
251 (u8
*)ctx
->key_dec
, num_rounds(ctx
), blocks
,
253 err
= blkcipher_walk_done(desc
, &walk
,
254 walk
.nbytes
% AES_BLOCK_SIZE
);
260 static int ctr_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
261 struct scatterlist
*src
, unsigned int nbytes
)
263 struct crypto_aes_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
264 struct blkcipher_walk walk
;
267 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
268 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
269 err
= blkcipher_walk_virt_block(desc
, &walk
, AES_BLOCK_SIZE
);
272 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
273 ce_aes_ctr_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
274 (u8
*)ctx
->key_enc
, num_rounds(ctx
), blocks
,
276 nbytes
-= blocks
* AES_BLOCK_SIZE
;
277 if (nbytes
&& nbytes
== walk
.nbytes
% AES_BLOCK_SIZE
)
279 err
= blkcipher_walk_done(desc
, &walk
,
280 walk
.nbytes
% AES_BLOCK_SIZE
);
283 u8
*tdst
= walk
.dst
.virt
.addr
+ blocks
* AES_BLOCK_SIZE
;
284 u8
*tsrc
= walk
.src
.virt
.addr
+ blocks
* AES_BLOCK_SIZE
;
285 u8
__aligned(8) tail
[AES_BLOCK_SIZE
];
288 * Minimum alignment is 8 bytes, so if nbytes is <= 8, we need
289 * to tell aes_ctr_encrypt() to only read half a block.
291 blocks
= (nbytes
<= 8) ? -1 : 1;
293 ce_aes_ctr_encrypt(tail
, tsrc
, (u8
*)ctx
->key_enc
,
294 num_rounds(ctx
), blocks
, walk
.iv
);
295 memcpy(tdst
, tail
, nbytes
);
296 err
= blkcipher_walk_done(desc
, &walk
, 0);
303 static int xts_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
304 struct scatterlist
*src
, unsigned int nbytes
)
306 struct crypto_aes_xts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
307 int err
, first
, rounds
= num_rounds(&ctx
->key1
);
308 struct blkcipher_walk walk
;
311 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
312 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
313 err
= blkcipher_walk_virt(desc
, &walk
);
316 for (first
= 1; (blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
)); first
= 0) {
317 ce_aes_xts_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
318 (u8
*)ctx
->key1
.key_enc
, rounds
, blocks
,
319 walk
.iv
, (u8
*)ctx
->key2
.key_enc
, first
);
320 err
= blkcipher_walk_done(desc
, &walk
,
321 walk
.nbytes
% AES_BLOCK_SIZE
);
328 static int xts_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
329 struct scatterlist
*src
, unsigned int nbytes
)
331 struct crypto_aes_xts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
332 int err
, first
, rounds
= num_rounds(&ctx
->key1
);
333 struct blkcipher_walk walk
;
336 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
337 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
338 err
= blkcipher_walk_virt(desc
, &walk
);
341 for (first
= 1; (blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
)); first
= 0) {
342 ce_aes_xts_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
343 (u8
*)ctx
->key1
.key_dec
, rounds
, blocks
,
344 walk
.iv
, (u8
*)ctx
->key2
.key_enc
, first
);
345 err
= blkcipher_walk_done(desc
, &walk
,
346 walk
.nbytes
% AES_BLOCK_SIZE
);
353 static struct crypto_alg aes_algs
[] = { {
354 .cra_name
= "__ecb-aes-ce",
355 .cra_driver_name
= "__driver-ecb-aes-ce",
357 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
359 .cra_blocksize
= AES_BLOCK_SIZE
,
360 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
362 .cra_type
= &crypto_blkcipher_type
,
363 .cra_module
= THIS_MODULE
,
365 .min_keysize
= AES_MIN_KEY_SIZE
,
366 .max_keysize
= AES_MAX_KEY_SIZE
,
367 .ivsize
= AES_BLOCK_SIZE
,
368 .setkey
= ce_aes_setkey
,
369 .encrypt
= ecb_encrypt
,
370 .decrypt
= ecb_decrypt
,
373 .cra_name
= "__cbc-aes-ce",
374 .cra_driver_name
= "__driver-cbc-aes-ce",
376 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
378 .cra_blocksize
= AES_BLOCK_SIZE
,
379 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
381 .cra_type
= &crypto_blkcipher_type
,
382 .cra_module
= THIS_MODULE
,
384 .min_keysize
= AES_MIN_KEY_SIZE
,
385 .max_keysize
= AES_MAX_KEY_SIZE
,
386 .ivsize
= AES_BLOCK_SIZE
,
387 .setkey
= ce_aes_setkey
,
388 .encrypt
= cbc_encrypt
,
389 .decrypt
= cbc_decrypt
,
392 .cra_name
= "__ctr-aes-ce",
393 .cra_driver_name
= "__driver-ctr-aes-ce",
395 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
398 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
400 .cra_type
= &crypto_blkcipher_type
,
401 .cra_module
= THIS_MODULE
,
403 .min_keysize
= AES_MIN_KEY_SIZE
,
404 .max_keysize
= AES_MAX_KEY_SIZE
,
405 .ivsize
= AES_BLOCK_SIZE
,
406 .setkey
= ce_aes_setkey
,
407 .encrypt
= ctr_encrypt
,
408 .decrypt
= ctr_encrypt
,
411 .cra_name
= "__xts-aes-ce",
412 .cra_driver_name
= "__driver-xts-aes-ce",
414 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
416 .cra_blocksize
= AES_BLOCK_SIZE
,
417 .cra_ctxsize
= sizeof(struct crypto_aes_xts_ctx
),
419 .cra_type
= &crypto_blkcipher_type
,
420 .cra_module
= THIS_MODULE
,
422 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
423 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
424 .ivsize
= AES_BLOCK_SIZE
,
425 .setkey
= xts_set_key
,
426 .encrypt
= xts_encrypt
,
427 .decrypt
= xts_decrypt
,
430 .cra_name
= "ecb(aes)",
431 .cra_driver_name
= "ecb-aes-ce",
433 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
434 .cra_blocksize
= AES_BLOCK_SIZE
,
435 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
437 .cra_type
= &crypto_ablkcipher_type
,
438 .cra_module
= THIS_MODULE
,
439 .cra_init
= ablk_init
,
440 .cra_exit
= ablk_exit
,
442 .min_keysize
= AES_MIN_KEY_SIZE
,
443 .max_keysize
= AES_MAX_KEY_SIZE
,
444 .ivsize
= AES_BLOCK_SIZE
,
445 .setkey
= ablk_set_key
,
446 .encrypt
= ablk_encrypt
,
447 .decrypt
= ablk_decrypt
,
450 .cra_name
= "cbc(aes)",
451 .cra_driver_name
= "cbc-aes-ce",
453 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
454 .cra_blocksize
= AES_BLOCK_SIZE
,
455 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
457 .cra_type
= &crypto_ablkcipher_type
,
458 .cra_module
= THIS_MODULE
,
459 .cra_init
= ablk_init
,
460 .cra_exit
= ablk_exit
,
462 .min_keysize
= AES_MIN_KEY_SIZE
,
463 .max_keysize
= AES_MAX_KEY_SIZE
,
464 .ivsize
= AES_BLOCK_SIZE
,
465 .setkey
= ablk_set_key
,
466 .encrypt
= ablk_encrypt
,
467 .decrypt
= ablk_decrypt
,
470 .cra_name
= "ctr(aes)",
471 .cra_driver_name
= "ctr-aes-ce",
473 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
475 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
477 .cra_type
= &crypto_ablkcipher_type
,
478 .cra_module
= THIS_MODULE
,
479 .cra_init
= ablk_init
,
480 .cra_exit
= ablk_exit
,
482 .min_keysize
= AES_MIN_KEY_SIZE
,
483 .max_keysize
= AES_MAX_KEY_SIZE
,
484 .ivsize
= AES_BLOCK_SIZE
,
485 .setkey
= ablk_set_key
,
486 .encrypt
= ablk_encrypt
,
487 .decrypt
= ablk_decrypt
,
490 .cra_name
= "xts(aes)",
491 .cra_driver_name
= "xts-aes-ce",
493 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
494 .cra_blocksize
= AES_BLOCK_SIZE
,
495 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
497 .cra_type
= &crypto_ablkcipher_type
,
498 .cra_module
= THIS_MODULE
,
499 .cra_init
= ablk_init
,
500 .cra_exit
= ablk_exit
,
502 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
503 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
504 .ivsize
= AES_BLOCK_SIZE
,
505 .setkey
= ablk_set_key
,
506 .encrypt
= ablk_encrypt
,
507 .decrypt
= ablk_decrypt
,
511 static int __init
aes_init(void)
513 if (!(elf_hwcap2
& HWCAP2_AES
))
515 return crypto_register_algs(aes_algs
, ARRAY_SIZE(aes_algs
));
518 static void __exit
aes_exit(void)
520 crypto_unregister_algs(aes_algs
, ARRAY_SIZE(aes_algs
));
523 module_init(aes_init
);
524 module_exit(aes_exit
);