1 // SPDX-License-Identifier: GPL-2.0-only
3 * aes-ce-cipher.c - core AES cipher using ARMv8 Crypto Extensions
5 * Copyright (C) 2013 - 2017 Linaro Ltd <ard.biesheuvel@linaro.org>
10 #include <asm/unaligned.h>
11 #include <crypto/aes.h>
12 #include <crypto/internal/simd.h>
13 #include <linux/cpufeature.h>
14 #include <linux/crypto.h>
15 #include <linux/module.h>
17 #include "aes-ce-setkey.h"
19 MODULE_DESCRIPTION("Synchronous AES cipher using ARMv8 Crypto Extensions");
20 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
21 MODULE_LICENSE("GPL v2");
27 asmlinkage
void __aes_ce_encrypt(u32
*rk
, u8
*out
, const u8
*in
, int rounds
);
28 asmlinkage
void __aes_ce_decrypt(u32
*rk
, u8
*out
, const u8
*in
, int rounds
);
30 asmlinkage u32
__aes_ce_sub(u32 l
);
31 asmlinkage
void __aes_ce_invert(struct aes_block
*out
,
32 const struct aes_block
*in
);
34 static int num_rounds(struct crypto_aes_ctx
*ctx
)
37 * # of rounds specified by AES:
38 * 128 bit key 10 rounds
39 * 192 bit key 12 rounds
40 * 256 bit key 14 rounds
41 * => n byte key => 6 + (n/4) rounds
43 return 6 + ctx
->key_length
/ 4;
46 static void aes_cipher_encrypt(struct crypto_tfm
*tfm
, u8 dst
[], u8
const src
[])
48 struct crypto_aes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
50 if (!crypto_simd_usable()) {
51 aes_encrypt(ctx
, dst
, src
);
56 __aes_ce_encrypt(ctx
->key_enc
, dst
, src
, num_rounds(ctx
));
60 static void aes_cipher_decrypt(struct crypto_tfm
*tfm
, u8 dst
[], u8
const src
[])
62 struct crypto_aes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
64 if (!crypto_simd_usable()) {
65 aes_decrypt(ctx
, dst
, src
);
70 __aes_ce_decrypt(ctx
->key_dec
, dst
, src
, num_rounds(ctx
));
74 int ce_aes_expandkey(struct crypto_aes_ctx
*ctx
, const u8
*in_key
,
78 * The AES key schedule round constants
80 static u8
const rcon
[] = {
81 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x1b, 0x36,
84 u32 kwords
= key_len
/ sizeof(u32
);
85 struct aes_block
*key_enc
, *key_dec
;
88 if (key_len
!= AES_KEYSIZE_128
&&
89 key_len
!= AES_KEYSIZE_192
&&
90 key_len
!= AES_KEYSIZE_256
)
93 ctx
->key_length
= key_len
;
94 for (i
= 0; i
< kwords
; i
++)
95 ctx
->key_enc
[i
] = get_unaligned_le32(in_key
+ i
* sizeof(u32
));
98 for (i
= 0; i
< sizeof(rcon
); i
++) {
99 u32
*rki
= ctx
->key_enc
+ (i
* kwords
);
100 u32
*rko
= rki
+ kwords
;
102 rko
[0] = ror32(__aes_ce_sub(rki
[kwords
- 1]), 8) ^ rcon
[i
] ^ rki
[0];
103 rko
[1] = rko
[0] ^ rki
[1];
104 rko
[2] = rko
[1] ^ rki
[2];
105 rko
[3] = rko
[2] ^ rki
[3];
107 if (key_len
== AES_KEYSIZE_192
) {
110 rko
[4] = rko
[3] ^ rki
[4];
111 rko
[5] = rko
[4] ^ rki
[5];
112 } else if (key_len
== AES_KEYSIZE_256
) {
115 rko
[4] = __aes_ce_sub(rko
[3]) ^ rki
[4];
116 rko
[5] = rko
[4] ^ rki
[5];
117 rko
[6] = rko
[5] ^ rki
[6];
118 rko
[7] = rko
[6] ^ rki
[7];
123 * Generate the decryption keys for the Equivalent Inverse Cipher.
124 * This involves reversing the order of the round keys, and applying
125 * the Inverse Mix Columns transformation on all but the first and
128 key_enc
= (struct aes_block
*)ctx
->key_enc
;
129 key_dec
= (struct aes_block
*)ctx
->key_dec
;
132 key_dec
[0] = key_enc
[j
];
133 for (i
= 1, j
--; j
> 0; i
++, j
--)
134 __aes_ce_invert(key_dec
+ i
, key_enc
+ j
);
135 key_dec
[i
] = key_enc
[0];
140 EXPORT_SYMBOL(ce_aes_expandkey
);
142 int ce_aes_setkey(struct crypto_tfm
*tfm
, const u8
*in_key
,
143 unsigned int key_len
)
145 struct crypto_aes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
147 return ce_aes_expandkey(ctx
, in_key
, key_len
);
149 EXPORT_SYMBOL(ce_aes_setkey
);
151 static struct crypto_alg aes_alg
= {
153 .cra_driver_name
= "aes-ce",
155 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
,
156 .cra_blocksize
= AES_BLOCK_SIZE
,
157 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
),
158 .cra_module
= THIS_MODULE
,
160 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
161 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
162 .cia_setkey
= ce_aes_setkey
,
163 .cia_encrypt
= aes_cipher_encrypt
,
164 .cia_decrypt
= aes_cipher_decrypt
168 static int __init
aes_mod_init(void)
170 return crypto_register_alg(&aes_alg
);
173 static void __exit
aes_mod_exit(void)
175 crypto_unregister_alg(&aes_alg
);
178 module_cpu_feature_match(AES
, aes_mod_init
);
179 module_exit(aes_mod_exit
);