2 * aes-ce-glue.c - wrapper code for ARMv8 AES
4 * Copyright (C) 2015 Linaro Ltd <ard.biesheuvel@linaro.org>
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License version 2 as
8 * published by the Free Software Foundation.
11 #include <asm/hwcap.h>
13 #include <asm/hwcap.h>
14 #include <crypto/aes.h>
15 #include <crypto/internal/simd.h>
16 #include <crypto/internal/skcipher.h>
17 #include <linux/module.h>
18 #include <crypto/xts.h>
20 MODULE_DESCRIPTION("AES-ECB/CBC/CTR/XTS using ARMv8 Crypto Extensions");
21 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
22 MODULE_LICENSE("GPL v2");
24 /* defined in aes-ce-core.S */
25 asmlinkage u32
ce_aes_sub(u32 input
);
26 asmlinkage
void ce_aes_invert(void *dst
, void *src
);
28 asmlinkage
void ce_aes_ecb_encrypt(u8 out
[], u8
const in
[], u8
const rk
[],
29 int rounds
, int blocks
);
30 asmlinkage
void ce_aes_ecb_decrypt(u8 out
[], u8
const in
[], u8
const rk
[],
31 int rounds
, int blocks
);
33 asmlinkage
void ce_aes_cbc_encrypt(u8 out
[], u8
const in
[], u8
const rk
[],
34 int rounds
, int blocks
, u8 iv
[]);
35 asmlinkage
void ce_aes_cbc_decrypt(u8 out
[], u8
const in
[], u8
const rk
[],
36 int rounds
, int blocks
, u8 iv
[]);
38 asmlinkage
void ce_aes_ctr_encrypt(u8 out
[], u8
const in
[], u8
const rk
[],
39 int rounds
, int blocks
, u8 ctr
[]);
41 asmlinkage
void ce_aes_xts_encrypt(u8 out
[], u8
const in
[], u8
const rk1
[],
42 int rounds
, int blocks
, u8 iv
[],
43 u8
const rk2
[], int first
);
44 asmlinkage
void ce_aes_xts_decrypt(u8 out
[], u8
const in
[], u8
const rk1
[],
45 int rounds
, int blocks
, u8 iv
[],
46 u8
const rk2
[], int first
);
52 static int num_rounds(struct crypto_aes_ctx
*ctx
)
55 * # of rounds specified by AES:
56 * 128 bit key 10 rounds
57 * 192 bit key 12 rounds
58 * 256 bit key 14 rounds
59 * => n byte key => 6 + (n/4) rounds
61 return 6 + ctx
->key_length
/ 4;
64 static int ce_aes_expandkey(struct crypto_aes_ctx
*ctx
, const u8
*in_key
,
68 * The AES key schedule round constants
70 static u8
const rcon
[] = {
71 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36,
74 u32 kwords
= key_len
/ sizeof(u32
);
75 struct aes_block
*key_enc
, *key_dec
;
78 if (key_len
!= AES_KEYSIZE_128
&&
79 key_len
!= AES_KEYSIZE_192
&&
80 key_len
!= AES_KEYSIZE_256
)
83 memcpy(ctx
->key_enc
, in_key
, key_len
);
84 ctx
->key_length
= key_len
;
87 for (i
= 0; i
< sizeof(rcon
); i
++) {
88 u32
*rki
= ctx
->key_enc
+ (i
* kwords
);
89 u32
*rko
= rki
+ kwords
;
91 #ifndef CONFIG_CPU_BIG_ENDIAN
92 rko
[0] = ror32(ce_aes_sub(rki
[kwords
- 1]), 8);
93 rko
[0] = rko
[0] ^ rki
[0] ^ rcon
[i
];
95 rko
[0] = rol32(ce_aes_sub(rki
[kwords
- 1]), 8);
96 rko
[0] = rko
[0] ^ rki
[0] ^ (rcon
[i
] << 24);
98 rko
[1] = rko
[0] ^ rki
[1];
99 rko
[2] = rko
[1] ^ rki
[2];
100 rko
[3] = rko
[2] ^ rki
[3];
102 if (key_len
== AES_KEYSIZE_192
) {
105 rko
[4] = rko
[3] ^ rki
[4];
106 rko
[5] = rko
[4] ^ rki
[5];
107 } else if (key_len
== AES_KEYSIZE_256
) {
110 rko
[4] = ce_aes_sub(rko
[3]) ^ rki
[4];
111 rko
[5] = rko
[4] ^ rki
[5];
112 rko
[6] = rko
[5] ^ rki
[6];
113 rko
[7] = rko
[6] ^ rki
[7];
118 * Generate the decryption keys for the Equivalent Inverse Cipher.
119 * This involves reversing the order of the round keys, and applying
120 * the Inverse Mix Columns transformation on all but the first and
123 key_enc
= (struct aes_block
*)ctx
->key_enc
;
124 key_dec
= (struct aes_block
*)ctx
->key_dec
;
127 key_dec
[0] = key_enc
[j
];
128 for (i
= 1, j
--; j
> 0; i
++, j
--)
129 ce_aes_invert(key_dec
+ i
, key_enc
+ j
);
130 key_dec
[i
] = key_enc
[0];
136 static int ce_aes_setkey(struct crypto_skcipher
*tfm
, const u8
*in_key
,
137 unsigned int key_len
)
139 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
142 ret
= ce_aes_expandkey(ctx
, in_key
, key_len
);
146 crypto_skcipher_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
150 struct crypto_aes_xts_ctx
{
151 struct crypto_aes_ctx key1
;
152 struct crypto_aes_ctx
__aligned(8) key2
;
155 static int xts_set_key(struct crypto_skcipher
*tfm
, const u8
*in_key
,
156 unsigned int key_len
)
158 struct crypto_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
161 ret
= xts_verify_key(tfm
, in_key
, key_len
);
165 ret
= ce_aes_expandkey(&ctx
->key1
, in_key
, key_len
/ 2);
167 ret
= ce_aes_expandkey(&ctx
->key2
, &in_key
[key_len
/ 2],
172 crypto_skcipher_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
176 static int ecb_encrypt(struct skcipher_request
*req
)
178 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
179 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
180 struct skcipher_walk walk
;
184 err
= skcipher_walk_virt(&walk
, req
, true);
187 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
188 ce_aes_ecb_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
189 (u8
*)ctx
->key_enc
, num_rounds(ctx
), blocks
);
190 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
196 static int ecb_decrypt(struct skcipher_request
*req
)
198 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
199 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
200 struct skcipher_walk walk
;
204 err
= skcipher_walk_virt(&walk
, req
, true);
207 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
208 ce_aes_ecb_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
209 (u8
*)ctx
->key_dec
, num_rounds(ctx
), blocks
);
210 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
216 static int cbc_encrypt(struct skcipher_request
*req
)
218 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
219 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
220 struct skcipher_walk walk
;
224 err
= skcipher_walk_virt(&walk
, req
, true);
227 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
228 ce_aes_cbc_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
229 (u8
*)ctx
->key_enc
, num_rounds(ctx
), blocks
,
231 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
237 static int cbc_decrypt(struct skcipher_request
*req
)
239 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
240 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
241 struct skcipher_walk walk
;
245 err
= skcipher_walk_virt(&walk
, req
, true);
248 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
249 ce_aes_cbc_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
250 (u8
*)ctx
->key_dec
, num_rounds(ctx
), blocks
,
252 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
258 static int ctr_encrypt(struct skcipher_request
*req
)
260 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
261 struct crypto_aes_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
262 struct skcipher_walk walk
;
265 err
= skcipher_walk_virt(&walk
, req
, true);
268 while ((blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
))) {
269 ce_aes_ctr_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
270 (u8
*)ctx
->key_enc
, num_rounds(ctx
), blocks
,
272 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
275 u8
__aligned(8) tail
[AES_BLOCK_SIZE
];
276 unsigned int nbytes
= walk
.nbytes
;
277 u8
*tdst
= walk
.dst
.virt
.addr
;
278 u8
*tsrc
= walk
.src
.virt
.addr
;
281 * Tell aes_ctr_encrypt() to process a tail block.
285 ce_aes_ctr_encrypt(tail
, NULL
, (u8
*)ctx
->key_enc
,
286 num_rounds(ctx
), blocks
, walk
.iv
);
288 memcpy(tdst
, tsrc
, nbytes
);
289 crypto_xor(tdst
, tail
, nbytes
);
290 err
= skcipher_walk_done(&walk
, 0);
297 static int xts_encrypt(struct skcipher_request
*req
)
299 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
300 struct crypto_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
301 int err
, first
, rounds
= num_rounds(&ctx
->key1
);
302 struct skcipher_walk walk
;
305 err
= skcipher_walk_virt(&walk
, req
, true);
308 for (first
= 1; (blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
)); first
= 0) {
309 ce_aes_xts_encrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
310 (u8
*)ctx
->key1
.key_enc
, rounds
, blocks
,
311 walk
.iv
, (u8
*)ctx
->key2
.key_enc
, first
);
312 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
319 static int xts_decrypt(struct skcipher_request
*req
)
321 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
322 struct crypto_aes_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
323 int err
, first
, rounds
= num_rounds(&ctx
->key1
);
324 struct skcipher_walk walk
;
327 err
= skcipher_walk_virt(&walk
, req
, true);
330 for (first
= 1; (blocks
= (walk
.nbytes
/ AES_BLOCK_SIZE
)); first
= 0) {
331 ce_aes_xts_decrypt(walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
332 (u8
*)ctx
->key1
.key_dec
, rounds
, blocks
,
333 walk
.iv
, (u8
*)ctx
->key2
.key_enc
, first
);
334 err
= skcipher_walk_done(&walk
, walk
.nbytes
% AES_BLOCK_SIZE
);
341 static struct skcipher_alg aes_algs
[] = { {
343 .cra_name
= "__ecb(aes)",
344 .cra_driver_name
= "__ecb-aes-ce",
346 .cra_flags
= CRYPTO_ALG_INTERNAL
,
347 .cra_blocksize
= AES_BLOCK_SIZE
,
348 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
349 .cra_module
= THIS_MODULE
,
351 .min_keysize
= AES_MIN_KEY_SIZE
,
352 .max_keysize
= AES_MAX_KEY_SIZE
,
353 .setkey
= ce_aes_setkey
,
354 .encrypt
= ecb_encrypt
,
355 .decrypt
= ecb_decrypt
,
358 .cra_name
= "__cbc(aes)",
359 .cra_driver_name
= "__cbc-aes-ce",
361 .cra_flags
= CRYPTO_ALG_INTERNAL
,
362 .cra_blocksize
= AES_BLOCK_SIZE
,
363 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
364 .cra_module
= THIS_MODULE
,
366 .min_keysize
= AES_MIN_KEY_SIZE
,
367 .max_keysize
= AES_MAX_KEY_SIZE
,
368 .ivsize
= AES_BLOCK_SIZE
,
369 .setkey
= ce_aes_setkey
,
370 .encrypt
= cbc_encrypt
,
371 .decrypt
= cbc_decrypt
,
374 .cra_name
= "__ctr(aes)",
375 .cra_driver_name
= "__ctr-aes-ce",
377 .cra_flags
= CRYPTO_ALG_INTERNAL
,
379 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
380 .cra_module
= THIS_MODULE
,
382 .min_keysize
= AES_MIN_KEY_SIZE
,
383 .max_keysize
= AES_MAX_KEY_SIZE
,
384 .ivsize
= AES_BLOCK_SIZE
,
385 .chunksize
= AES_BLOCK_SIZE
,
386 .setkey
= ce_aes_setkey
,
387 .encrypt
= ctr_encrypt
,
388 .decrypt
= ctr_encrypt
,
391 .cra_name
= "__xts(aes)",
392 .cra_driver_name
= "__xts-aes-ce",
394 .cra_flags
= CRYPTO_ALG_INTERNAL
,
395 .cra_blocksize
= AES_BLOCK_SIZE
,
396 .cra_ctxsize
= sizeof(struct crypto_aes_xts_ctx
),
397 .cra_module
= THIS_MODULE
,
399 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
400 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
401 .ivsize
= AES_BLOCK_SIZE
,
402 .setkey
= xts_set_key
,
403 .encrypt
= xts_encrypt
,
404 .decrypt
= xts_decrypt
,
407 static struct simd_skcipher_alg
*aes_simd_algs
[ARRAY_SIZE(aes_algs
)];
409 static void aes_exit(void)
413 for (i
= 0; i
< ARRAY_SIZE(aes_simd_algs
) && aes_simd_algs
[i
]; i
++)
414 simd_skcipher_free(aes_simd_algs
[i
]);
416 crypto_unregister_skciphers(aes_algs
, ARRAY_SIZE(aes_algs
));
419 static int __init
aes_init(void)
421 struct simd_skcipher_alg
*simd
;
422 const char *basename
;
428 if (!(elf_hwcap2
& HWCAP2_AES
))
431 err
= crypto_register_skciphers(aes_algs
, ARRAY_SIZE(aes_algs
));
435 for (i
= 0; i
< ARRAY_SIZE(aes_algs
); i
++) {
436 algname
= aes_algs
[i
].base
.cra_name
+ 2;
437 drvname
= aes_algs
[i
].base
.cra_driver_name
+ 2;
438 basename
= aes_algs
[i
].base
.cra_driver_name
;
439 simd
= simd_skcipher_create_compat(algname
, drvname
, basename
);
442 goto unregister_simds
;
444 aes_simd_algs
[i
] = simd
;
454 module_init(aes_init
);
455 module_exit(aes_exit
);