2 * Support for Intel AES-NI instructions. This file contains glue
3 * code, the real AES implementation is in intel-aes_asm.S.
5 * Copyright (C) 2008, Intel Corp.
6 * Author: Huang Ying <ying.huang@intel.com>
8 * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
9 * interface for 64-bit kernels.
10 * Authors: Adrian Hoban <adrian.hoban@intel.com>
11 * Gabriele Paoloni <gabriele.paoloni@intel.com>
12 * Tadeusz Struk (tadeusz.struk@intel.com)
13 * Aidan O'Mahony (aidan.o.mahony@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
16 * This program is free software; you can redistribute it and/or modify
17 * it under the terms of the GNU General Public License as published by
18 * the Free Software Foundation; either version 2 of the License, or
19 * (at your option) any later version.
22 #include <linux/hardirq.h>
23 #include <linux/types.h>
24 #include <linux/crypto.h>
25 #include <linux/module.h>
26 #include <linux/err.h>
27 #include <crypto/algapi.h>
28 #include <crypto/aes.h>
29 #include <crypto/cryptd.h>
30 #include <crypto/ctr.h>
31 #include <crypto/b128ops.h>
32 #include <crypto/lrw.h>
33 #include <crypto/xts.h>
34 #include <asm/cpu_device_id.h>
35 #include <asm/fpu/api.h>
36 #include <asm/crypto/aes.h>
37 #include <crypto/ablk_helper.h>
38 #include <crypto/scatterwalk.h>
39 #include <crypto/internal/aead.h>
40 #include <linux/workqueue.h>
41 #include <linux/spinlock.h>
43 #include <asm/crypto/glue_helper.h>
47 #define AESNI_ALIGN 16
48 #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE - 1))
49 #define RFC4106_HASH_SUBKEY_SIZE 16
51 /* This data is stored at the end of the crypto_tfm struct.
52 * It's a type of per "session" data storage location.
53 * This needs to be 16 byte aligned.
55 struct aesni_rfc4106_gcm_ctx
{
56 u8 hash_subkey
[16] __attribute__ ((__aligned__(AESNI_ALIGN
)));
57 struct crypto_aes_ctx aes_key_expanded
58 __attribute__ ((__aligned__(AESNI_ALIGN
)));
62 struct aesni_gcm_set_hash_subkey_result
{
64 struct completion completion
;
67 struct aesni_hash_subkey_req_data
{
69 struct aesni_gcm_set_hash_subkey_result result
;
70 struct scatterlist sg
;
73 struct aesni_lrw_ctx
{
74 struct lrw_table_ctx lrw_table
;
75 u8 raw_aes_ctx
[sizeof(struct crypto_aes_ctx
) + AESNI_ALIGN
- 1];
78 struct aesni_xts_ctx
{
79 u8 raw_tweak_ctx
[sizeof(struct crypto_aes_ctx
) + AESNI_ALIGN
- 1];
80 u8 raw_crypt_ctx
[sizeof(struct crypto_aes_ctx
) + AESNI_ALIGN
- 1];
83 asmlinkage
int aesni_set_key(struct crypto_aes_ctx
*ctx
, const u8
*in_key
,
84 unsigned int key_len
);
85 asmlinkage
void aesni_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
87 asmlinkage
void aesni_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
89 asmlinkage
void aesni_ecb_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
90 const u8
*in
, unsigned int len
);
91 asmlinkage
void aesni_ecb_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
92 const u8
*in
, unsigned int len
);
93 asmlinkage
void aesni_cbc_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
94 const u8
*in
, unsigned int len
, u8
*iv
);
95 asmlinkage
void aesni_cbc_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
96 const u8
*in
, unsigned int len
, u8
*iv
);
98 int crypto_fpu_init(void);
99 void crypto_fpu_exit(void);
101 #define AVX_GEN2_OPTSIZE 640
102 #define AVX_GEN4_OPTSIZE 4096
106 static void (*aesni_ctr_enc_tfm
)(struct crypto_aes_ctx
*ctx
, u8
*out
,
107 const u8
*in
, unsigned int len
, u8
*iv
);
108 asmlinkage
void aesni_ctr_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
109 const u8
*in
, unsigned int len
, u8
*iv
);
111 asmlinkage
void aesni_xts_crypt8(struct crypto_aes_ctx
*ctx
, u8
*out
,
112 const u8
*in
, bool enc
, u8
*iv
);
114 /* asmlinkage void aesni_gcm_enc()
115 * void *ctx, AES Key schedule. Starts on a 16 byte boundary.
116 * u8 *out, Ciphertext output. Encrypt in-place is allowed.
117 * const u8 *in, Plaintext input
118 * unsigned long plaintext_len, Length of data in bytes for encryption.
119 * u8 *iv, Pre-counter block j0: 4 byte salt (from Security Association)
120 * concatenated with 8 byte Initialisation Vector (from IPSec ESP
121 * Payload) concatenated with 0x00000001. 16-byte aligned pointer.
122 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
123 * const u8 *aad, Additional Authentication Data (AAD)
124 * unsigned long aad_len, Length of AAD in bytes. With RFC4106 this
125 * is going to be 8 or 12 bytes
126 * u8 *auth_tag, Authenticated Tag output.
127 * unsigned long auth_tag_len), Authenticated Tag Length in bytes.
128 * Valid values are 16 (most likely), 12 or 8.
130 asmlinkage
void aesni_gcm_enc(void *ctx
, u8
*out
,
131 const u8
*in
, unsigned long plaintext_len
, u8
*iv
,
132 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
133 u8
*auth_tag
, unsigned long auth_tag_len
);
135 /* asmlinkage void aesni_gcm_dec()
136 * void *ctx, AES Key schedule. Starts on a 16 byte boundary.
137 * u8 *out, Plaintext output. Decrypt in-place is allowed.
138 * const u8 *in, Ciphertext input
139 * unsigned long ciphertext_len, Length of data in bytes for decryption.
140 * u8 *iv, Pre-counter block j0: 4 byte salt (from Security Association)
141 * concatenated with 8 byte Initialisation Vector (from IPSec ESP
142 * Payload) concatenated with 0x00000001. 16-byte aligned pointer.
143 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
144 * const u8 *aad, Additional Authentication Data (AAD)
145 * unsigned long aad_len, Length of AAD in bytes. With RFC4106 this is going
146 * to be 8 or 12 bytes
147 * u8 *auth_tag, Authenticated Tag output.
148 * unsigned long auth_tag_len) Authenticated Tag Length in bytes.
149 * Valid values are 16 (most likely), 12 or 8.
151 asmlinkage
void aesni_gcm_dec(void *ctx
, u8
*out
,
152 const u8
*in
, unsigned long ciphertext_len
, u8
*iv
,
153 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
154 u8
*auth_tag
, unsigned long auth_tag_len
);
158 asmlinkage
void aes_ctr_enc_128_avx_by8(const u8
*in
, u8
*iv
,
159 void *keys
, u8
*out
, unsigned int num_bytes
);
160 asmlinkage
void aes_ctr_enc_192_avx_by8(const u8
*in
, u8
*iv
,
161 void *keys
, u8
*out
, unsigned int num_bytes
);
162 asmlinkage
void aes_ctr_enc_256_avx_by8(const u8
*in
, u8
*iv
,
163 void *keys
, u8
*out
, unsigned int num_bytes
);
165 * asmlinkage void aesni_gcm_precomp_avx_gen2()
166 * gcm_data *my_ctx_data, context data
167 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
169 asmlinkage
void aesni_gcm_precomp_avx_gen2(void *my_ctx_data
, u8
*hash_subkey
);
171 asmlinkage
void aesni_gcm_enc_avx_gen2(void *ctx
, u8
*out
,
172 const u8
*in
, unsigned long plaintext_len
, u8
*iv
,
173 const u8
*aad
, unsigned long aad_len
,
174 u8
*auth_tag
, unsigned long auth_tag_len
);
176 asmlinkage
void aesni_gcm_dec_avx_gen2(void *ctx
, u8
*out
,
177 const u8
*in
, unsigned long ciphertext_len
, u8
*iv
,
178 const u8
*aad
, unsigned long aad_len
,
179 u8
*auth_tag
, unsigned long auth_tag_len
);
181 static void aesni_gcm_enc_avx(void *ctx
, u8
*out
,
182 const u8
*in
, unsigned long plaintext_len
, u8
*iv
,
183 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
184 u8
*auth_tag
, unsigned long auth_tag_len
)
186 struct crypto_aes_ctx
*aes_ctx
= (struct crypto_aes_ctx
*)ctx
;
187 if ((plaintext_len
< AVX_GEN2_OPTSIZE
) || (aes_ctx
-> key_length
!= AES_KEYSIZE_128
)){
188 aesni_gcm_enc(ctx
, out
, in
, plaintext_len
, iv
, hash_subkey
, aad
,
189 aad_len
, auth_tag
, auth_tag_len
);
191 aesni_gcm_precomp_avx_gen2(ctx
, hash_subkey
);
192 aesni_gcm_enc_avx_gen2(ctx
, out
, in
, plaintext_len
, iv
, aad
,
193 aad_len
, auth_tag
, auth_tag_len
);
197 static void aesni_gcm_dec_avx(void *ctx
, u8
*out
,
198 const u8
*in
, unsigned long ciphertext_len
, u8
*iv
,
199 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
200 u8
*auth_tag
, unsigned long auth_tag_len
)
202 struct crypto_aes_ctx
*aes_ctx
= (struct crypto_aes_ctx
*)ctx
;
203 if ((ciphertext_len
< AVX_GEN2_OPTSIZE
) || (aes_ctx
-> key_length
!= AES_KEYSIZE_128
)) {
204 aesni_gcm_dec(ctx
, out
, in
, ciphertext_len
, iv
, hash_subkey
, aad
,
205 aad_len
, auth_tag
, auth_tag_len
);
207 aesni_gcm_precomp_avx_gen2(ctx
, hash_subkey
);
208 aesni_gcm_dec_avx_gen2(ctx
, out
, in
, ciphertext_len
, iv
, aad
,
209 aad_len
, auth_tag
, auth_tag_len
);
214 #ifdef CONFIG_AS_AVX2
216 * asmlinkage void aesni_gcm_precomp_avx_gen4()
217 * gcm_data *my_ctx_data, context data
218 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
220 asmlinkage
void aesni_gcm_precomp_avx_gen4(void *my_ctx_data
, u8
*hash_subkey
);
222 asmlinkage
void aesni_gcm_enc_avx_gen4(void *ctx
, u8
*out
,
223 const u8
*in
, unsigned long plaintext_len
, u8
*iv
,
224 const u8
*aad
, unsigned long aad_len
,
225 u8
*auth_tag
, unsigned long auth_tag_len
);
227 asmlinkage
void aesni_gcm_dec_avx_gen4(void *ctx
, u8
*out
,
228 const u8
*in
, unsigned long ciphertext_len
, u8
*iv
,
229 const u8
*aad
, unsigned long aad_len
,
230 u8
*auth_tag
, unsigned long auth_tag_len
);
232 static void aesni_gcm_enc_avx2(void *ctx
, u8
*out
,
233 const u8
*in
, unsigned long plaintext_len
, u8
*iv
,
234 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
235 u8
*auth_tag
, unsigned long auth_tag_len
)
237 struct crypto_aes_ctx
*aes_ctx
= (struct crypto_aes_ctx
*)ctx
;
238 if ((plaintext_len
< AVX_GEN2_OPTSIZE
) || (aes_ctx
-> key_length
!= AES_KEYSIZE_128
)) {
239 aesni_gcm_enc(ctx
, out
, in
, plaintext_len
, iv
, hash_subkey
, aad
,
240 aad_len
, auth_tag
, auth_tag_len
);
241 } else if (plaintext_len
< AVX_GEN4_OPTSIZE
) {
242 aesni_gcm_precomp_avx_gen2(ctx
, hash_subkey
);
243 aesni_gcm_enc_avx_gen2(ctx
, out
, in
, plaintext_len
, iv
, aad
,
244 aad_len
, auth_tag
, auth_tag_len
);
246 aesni_gcm_precomp_avx_gen4(ctx
, hash_subkey
);
247 aesni_gcm_enc_avx_gen4(ctx
, out
, in
, plaintext_len
, iv
, aad
,
248 aad_len
, auth_tag
, auth_tag_len
);
252 static void aesni_gcm_dec_avx2(void *ctx
, u8
*out
,
253 const u8
*in
, unsigned long ciphertext_len
, u8
*iv
,
254 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
255 u8
*auth_tag
, unsigned long auth_tag_len
)
257 struct crypto_aes_ctx
*aes_ctx
= (struct crypto_aes_ctx
*)ctx
;
258 if ((ciphertext_len
< AVX_GEN2_OPTSIZE
) || (aes_ctx
-> key_length
!= AES_KEYSIZE_128
)) {
259 aesni_gcm_dec(ctx
, out
, in
, ciphertext_len
, iv
, hash_subkey
,
260 aad
, aad_len
, auth_tag
, auth_tag_len
);
261 } else if (ciphertext_len
< AVX_GEN4_OPTSIZE
) {
262 aesni_gcm_precomp_avx_gen2(ctx
, hash_subkey
);
263 aesni_gcm_dec_avx_gen2(ctx
, out
, in
, ciphertext_len
, iv
, aad
,
264 aad_len
, auth_tag
, auth_tag_len
);
266 aesni_gcm_precomp_avx_gen4(ctx
, hash_subkey
);
267 aesni_gcm_dec_avx_gen4(ctx
, out
, in
, ciphertext_len
, iv
, aad
,
268 aad_len
, auth_tag
, auth_tag_len
);
273 static void (*aesni_gcm_enc_tfm
)(void *ctx
, u8
*out
,
274 const u8
*in
, unsigned long plaintext_len
, u8
*iv
,
275 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
276 u8
*auth_tag
, unsigned long auth_tag_len
);
278 static void (*aesni_gcm_dec_tfm
)(void *ctx
, u8
*out
,
279 const u8
*in
, unsigned long ciphertext_len
, u8
*iv
,
280 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
281 u8
*auth_tag
, unsigned long auth_tag_len
);
284 aesni_rfc4106_gcm_ctx
*aesni_rfc4106_gcm_ctx_get(struct crypto_aead
*tfm
)
286 unsigned long align
= AESNI_ALIGN
;
288 if (align
<= crypto_tfm_ctx_alignment())
290 return PTR_ALIGN(crypto_aead_ctx(tfm
), align
);
294 static inline struct crypto_aes_ctx
*aes_ctx(void *raw_ctx
)
296 unsigned long addr
= (unsigned long)raw_ctx
;
297 unsigned long align
= AESNI_ALIGN
;
299 if (align
<= crypto_tfm_ctx_alignment())
301 return (struct crypto_aes_ctx
*)ALIGN(addr
, align
);
304 static int aes_set_key_common(struct crypto_tfm
*tfm
, void *raw_ctx
,
305 const u8
*in_key
, unsigned int key_len
)
307 struct crypto_aes_ctx
*ctx
= aes_ctx(raw_ctx
);
308 u32
*flags
= &tfm
->crt_flags
;
311 if (key_len
!= AES_KEYSIZE_128
&& key_len
!= AES_KEYSIZE_192
&&
312 key_len
!= AES_KEYSIZE_256
) {
313 *flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
317 if (!irq_fpu_usable())
318 err
= crypto_aes_expand_key(ctx
, in_key
, key_len
);
321 err
= aesni_set_key(ctx
, in_key
, key_len
);
328 static int aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
329 unsigned int key_len
)
331 return aes_set_key_common(tfm
, crypto_tfm_ctx(tfm
), in_key
, key_len
);
334 static void aes_encrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
336 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
338 if (!irq_fpu_usable())
339 crypto_aes_encrypt_x86(ctx
, dst
, src
);
342 aesni_enc(ctx
, dst
, src
);
347 static void aes_decrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
349 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
351 if (!irq_fpu_usable())
352 crypto_aes_decrypt_x86(ctx
, dst
, src
);
355 aesni_dec(ctx
, dst
, src
);
360 static void __aes_encrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
362 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
364 aesni_enc(ctx
, dst
, src
);
367 static void __aes_decrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
369 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
371 aesni_dec(ctx
, dst
, src
);
374 static int ecb_encrypt(struct blkcipher_desc
*desc
,
375 struct scatterlist
*dst
, struct scatterlist
*src
,
378 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
379 struct blkcipher_walk walk
;
382 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
383 err
= blkcipher_walk_virt(desc
, &walk
);
384 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
387 while ((nbytes
= walk
.nbytes
)) {
388 aesni_ecb_enc(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
389 nbytes
& AES_BLOCK_MASK
);
390 nbytes
&= AES_BLOCK_SIZE
- 1;
391 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
398 static int ecb_decrypt(struct blkcipher_desc
*desc
,
399 struct scatterlist
*dst
, struct scatterlist
*src
,
402 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
403 struct blkcipher_walk walk
;
406 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
407 err
= blkcipher_walk_virt(desc
, &walk
);
408 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
411 while ((nbytes
= walk
.nbytes
)) {
412 aesni_ecb_dec(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
413 nbytes
& AES_BLOCK_MASK
);
414 nbytes
&= AES_BLOCK_SIZE
- 1;
415 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
422 static int cbc_encrypt(struct blkcipher_desc
*desc
,
423 struct scatterlist
*dst
, struct scatterlist
*src
,
426 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
427 struct blkcipher_walk walk
;
430 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
431 err
= blkcipher_walk_virt(desc
, &walk
);
432 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
435 while ((nbytes
= walk
.nbytes
)) {
436 aesni_cbc_enc(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
437 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
438 nbytes
&= AES_BLOCK_SIZE
- 1;
439 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
446 static int cbc_decrypt(struct blkcipher_desc
*desc
,
447 struct scatterlist
*dst
, struct scatterlist
*src
,
450 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
451 struct blkcipher_walk walk
;
454 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
455 err
= blkcipher_walk_virt(desc
, &walk
);
456 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
459 while ((nbytes
= walk
.nbytes
)) {
460 aesni_cbc_dec(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
461 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
462 nbytes
&= AES_BLOCK_SIZE
- 1;
463 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
471 static void ctr_crypt_final(struct crypto_aes_ctx
*ctx
,
472 struct blkcipher_walk
*walk
)
474 u8
*ctrblk
= walk
->iv
;
475 u8 keystream
[AES_BLOCK_SIZE
];
476 u8
*src
= walk
->src
.virt
.addr
;
477 u8
*dst
= walk
->dst
.virt
.addr
;
478 unsigned int nbytes
= walk
->nbytes
;
480 aesni_enc(ctx
, keystream
, ctrblk
);
481 crypto_xor(keystream
, src
, nbytes
);
482 memcpy(dst
, keystream
, nbytes
);
483 crypto_inc(ctrblk
, AES_BLOCK_SIZE
);
487 static void aesni_ctr_enc_avx_tfm(struct crypto_aes_ctx
*ctx
, u8
*out
,
488 const u8
*in
, unsigned int len
, u8
*iv
)
491 * based on key length, override with the by8 version
492 * of ctr mode encryption/decryption for improved performance
493 * aes_set_key_common() ensures that key length is one of
496 if (ctx
->key_length
== AES_KEYSIZE_128
)
497 aes_ctr_enc_128_avx_by8(in
, iv
, (void *)ctx
, out
, len
);
498 else if (ctx
->key_length
== AES_KEYSIZE_192
)
499 aes_ctr_enc_192_avx_by8(in
, iv
, (void *)ctx
, out
, len
);
501 aes_ctr_enc_256_avx_by8(in
, iv
, (void *)ctx
, out
, len
);
505 static int ctr_crypt(struct blkcipher_desc
*desc
,
506 struct scatterlist
*dst
, struct scatterlist
*src
,
509 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
510 struct blkcipher_walk walk
;
513 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
514 err
= blkcipher_walk_virt_block(desc
, &walk
, AES_BLOCK_SIZE
);
515 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
518 while ((nbytes
= walk
.nbytes
) >= AES_BLOCK_SIZE
) {
519 aesni_ctr_enc_tfm(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
520 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
521 nbytes
&= AES_BLOCK_SIZE
- 1;
522 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
525 ctr_crypt_final(ctx
, &walk
);
526 err
= blkcipher_walk_done(desc
, &walk
, 0);
534 static int ablk_ecb_init(struct crypto_tfm
*tfm
)
536 return ablk_init_common(tfm
, "__driver-ecb-aes-aesni");
539 static int ablk_cbc_init(struct crypto_tfm
*tfm
)
541 return ablk_init_common(tfm
, "__driver-cbc-aes-aesni");
545 static int ablk_ctr_init(struct crypto_tfm
*tfm
)
547 return ablk_init_common(tfm
, "__driver-ctr-aes-aesni");
552 #if IS_ENABLED(CONFIG_CRYPTO_PCBC)
553 static int ablk_pcbc_init(struct crypto_tfm
*tfm
)
555 return ablk_init_common(tfm
, "fpu(pcbc(__driver-aes-aesni))");
559 static void lrw_xts_encrypt_callback(void *ctx
, u8
*blks
, unsigned int nbytes
)
561 aesni_ecb_enc(ctx
, blks
, blks
, nbytes
);
564 static void lrw_xts_decrypt_callback(void *ctx
, u8
*blks
, unsigned int nbytes
)
566 aesni_ecb_dec(ctx
, blks
, blks
, nbytes
);
569 static int lrw_aesni_setkey(struct crypto_tfm
*tfm
, const u8
*key
,
572 struct aesni_lrw_ctx
*ctx
= crypto_tfm_ctx(tfm
);
575 err
= aes_set_key_common(tfm
, ctx
->raw_aes_ctx
, key
,
576 keylen
- AES_BLOCK_SIZE
);
580 return lrw_init_table(&ctx
->lrw_table
, key
+ keylen
- AES_BLOCK_SIZE
);
583 static void lrw_aesni_exit_tfm(struct crypto_tfm
*tfm
)
585 struct aesni_lrw_ctx
*ctx
= crypto_tfm_ctx(tfm
);
587 lrw_free_table(&ctx
->lrw_table
);
590 static int lrw_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
591 struct scatterlist
*src
, unsigned int nbytes
)
593 struct aesni_lrw_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
595 struct lrw_crypt_req req
= {
597 .tbuflen
= sizeof(buf
),
599 .table_ctx
= &ctx
->lrw_table
,
600 .crypt_ctx
= aes_ctx(ctx
->raw_aes_ctx
),
601 .crypt_fn
= lrw_xts_encrypt_callback
,
605 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
608 ret
= lrw_crypt(desc
, dst
, src
, nbytes
, &req
);
614 static int lrw_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
615 struct scatterlist
*src
, unsigned int nbytes
)
617 struct aesni_lrw_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
619 struct lrw_crypt_req req
= {
621 .tbuflen
= sizeof(buf
),
623 .table_ctx
= &ctx
->lrw_table
,
624 .crypt_ctx
= aes_ctx(ctx
->raw_aes_ctx
),
625 .crypt_fn
= lrw_xts_decrypt_callback
,
629 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
632 ret
= lrw_crypt(desc
, dst
, src
, nbytes
, &req
);
638 static int xts_aesni_setkey(struct crypto_tfm
*tfm
, const u8
*key
,
641 struct aesni_xts_ctx
*ctx
= crypto_tfm_ctx(tfm
);
642 u32
*flags
= &tfm
->crt_flags
;
645 /* key consists of keys of equal size concatenated, therefore
646 * the length must be even
649 *flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
653 /* first half of xts-key is for crypt */
654 err
= aes_set_key_common(tfm
, ctx
->raw_crypt_ctx
, key
, keylen
/ 2);
658 /* second half of xts-key is for tweak */
659 return aes_set_key_common(tfm
, ctx
->raw_tweak_ctx
, key
+ keylen
/ 2,
664 static void aesni_xts_tweak(void *ctx
, u8
*out
, const u8
*in
)
666 aesni_enc(ctx
, out
, in
);
671 static void aesni_xts_enc(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
673 glue_xts_crypt_128bit_one(ctx
, dst
, src
, iv
, GLUE_FUNC_CAST(aesni_enc
));
676 static void aesni_xts_dec(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
678 glue_xts_crypt_128bit_one(ctx
, dst
, src
, iv
, GLUE_FUNC_CAST(aesni_dec
));
681 static void aesni_xts_enc8(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
683 aesni_xts_crypt8(ctx
, (u8
*)dst
, (const u8
*)src
, true, (u8
*)iv
);
686 static void aesni_xts_dec8(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
688 aesni_xts_crypt8(ctx
, (u8
*)dst
, (const u8
*)src
, false, (u8
*)iv
);
691 static const struct common_glue_ctx aesni_enc_xts
= {
693 .fpu_blocks_limit
= 1,
697 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(aesni_xts_enc8
) }
700 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(aesni_xts_enc
) }
704 static const struct common_glue_ctx aesni_dec_xts
= {
706 .fpu_blocks_limit
= 1,
710 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(aesni_xts_dec8
) }
713 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(aesni_xts_dec
) }
717 static int xts_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
718 struct scatterlist
*src
, unsigned int nbytes
)
720 struct aesni_xts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
722 return glue_xts_crypt_128bit(&aesni_enc_xts
, desc
, dst
, src
, nbytes
,
723 XTS_TWEAK_CAST(aesni_xts_tweak
),
724 aes_ctx(ctx
->raw_tweak_ctx
),
725 aes_ctx(ctx
->raw_crypt_ctx
));
728 static int xts_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
729 struct scatterlist
*src
, unsigned int nbytes
)
731 struct aesni_xts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
733 return glue_xts_crypt_128bit(&aesni_dec_xts
, desc
, dst
, src
, nbytes
,
734 XTS_TWEAK_CAST(aesni_xts_tweak
),
735 aes_ctx(ctx
->raw_tweak_ctx
),
736 aes_ctx(ctx
->raw_crypt_ctx
));
741 static int xts_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
742 struct scatterlist
*src
, unsigned int nbytes
)
744 struct aesni_xts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
746 struct xts_crypt_req req
= {
748 .tbuflen
= sizeof(buf
),
750 .tweak_ctx
= aes_ctx(ctx
->raw_tweak_ctx
),
751 .tweak_fn
= aesni_xts_tweak
,
752 .crypt_ctx
= aes_ctx(ctx
->raw_crypt_ctx
),
753 .crypt_fn
= lrw_xts_encrypt_callback
,
757 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
760 ret
= xts_crypt(desc
, dst
, src
, nbytes
, &req
);
766 static int xts_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
767 struct scatterlist
*src
, unsigned int nbytes
)
769 struct aesni_xts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
771 struct xts_crypt_req req
= {
773 .tbuflen
= sizeof(buf
),
775 .tweak_ctx
= aes_ctx(ctx
->raw_tweak_ctx
),
776 .tweak_fn
= aesni_xts_tweak
,
777 .crypt_ctx
= aes_ctx(ctx
->raw_crypt_ctx
),
778 .crypt_fn
= lrw_xts_decrypt_callback
,
782 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
785 ret
= xts_crypt(desc
, dst
, src
, nbytes
, &req
);
794 static int rfc4106_init(struct crypto_aead
*aead
)
796 struct cryptd_aead
*cryptd_tfm
;
797 struct cryptd_aead
**ctx
= crypto_aead_ctx(aead
);
799 cryptd_tfm
= cryptd_alloc_aead("__driver-gcm-aes-aesni",
801 CRYPTO_ALG_INTERNAL
);
802 if (IS_ERR(cryptd_tfm
))
803 return PTR_ERR(cryptd_tfm
);
806 crypto_aead_set_reqsize(aead
, crypto_aead_reqsize(&cryptd_tfm
->base
));
810 static void rfc4106_exit(struct crypto_aead
*aead
)
812 struct cryptd_aead
**ctx
= crypto_aead_ctx(aead
);
814 cryptd_free_aead(*ctx
);
818 rfc4106_set_hash_subkey_done(struct crypto_async_request
*req
, int err
)
820 struct aesni_gcm_set_hash_subkey_result
*result
= req
->data
;
822 if (err
== -EINPROGRESS
)
825 complete(&result
->completion
);
829 rfc4106_set_hash_subkey(u8
*hash_subkey
, const u8
*key
, unsigned int key_len
)
831 struct crypto_ablkcipher
*ctr_tfm
;
832 struct ablkcipher_request
*req
;
834 struct aesni_hash_subkey_req_data
*req_data
;
836 ctr_tfm
= crypto_alloc_ablkcipher("ctr(aes)", 0, 0);
838 return PTR_ERR(ctr_tfm
);
840 ret
= crypto_ablkcipher_setkey(ctr_tfm
, key
, key_len
);
842 goto out_free_ablkcipher
;
845 req
= ablkcipher_request_alloc(ctr_tfm
, GFP_KERNEL
);
847 goto out_free_ablkcipher
;
849 req_data
= kmalloc(sizeof(*req_data
), GFP_KERNEL
);
851 goto out_free_request
;
853 memset(req_data
->iv
, 0, sizeof(req_data
->iv
));
855 /* Clear the data in the hash sub key container to zero.*/
856 /* We want to cipher all zeros to create the hash sub key. */
857 memset(hash_subkey
, 0, RFC4106_HASH_SUBKEY_SIZE
);
859 init_completion(&req_data
->result
.completion
);
860 sg_init_one(&req_data
->sg
, hash_subkey
, RFC4106_HASH_SUBKEY_SIZE
);
861 ablkcipher_request_set_tfm(req
, ctr_tfm
);
862 ablkcipher_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_SLEEP
|
863 CRYPTO_TFM_REQ_MAY_BACKLOG
,
864 rfc4106_set_hash_subkey_done
,
867 ablkcipher_request_set_crypt(req
, &req_data
->sg
,
868 &req_data
->sg
, RFC4106_HASH_SUBKEY_SIZE
, req_data
->iv
);
870 ret
= crypto_ablkcipher_encrypt(req
);
871 if (ret
== -EINPROGRESS
|| ret
== -EBUSY
) {
872 ret
= wait_for_completion_interruptible
873 (&req_data
->result
.completion
);
875 ret
= req_data
->result
.err
;
879 ablkcipher_request_free(req
);
881 crypto_free_ablkcipher(ctr_tfm
);
885 static int common_rfc4106_set_key(struct crypto_aead
*aead
, const u8
*key
,
886 unsigned int key_len
)
888 struct aesni_rfc4106_gcm_ctx
*ctx
= aesni_rfc4106_gcm_ctx_get(aead
);
891 crypto_aead_set_flags(aead
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
894 /*Account for 4 byte nonce at the end.*/
897 memcpy(ctx
->nonce
, key
+ key_len
, sizeof(ctx
->nonce
));
899 return aes_set_key_common(crypto_aead_tfm(aead
),
900 &ctx
->aes_key_expanded
, key
, key_len
) ?:
901 rfc4106_set_hash_subkey(ctx
->hash_subkey
, key
, key_len
);
904 static int rfc4106_set_key(struct crypto_aead
*parent
, const u8
*key
,
905 unsigned int key_len
)
907 struct cryptd_aead
**ctx
= crypto_aead_ctx(parent
);
908 struct cryptd_aead
*cryptd_tfm
= *ctx
;
910 return crypto_aead_setkey(&cryptd_tfm
->base
, key
, key_len
);
913 static int common_rfc4106_set_authsize(struct crypto_aead
*aead
,
914 unsigned int authsize
)
928 /* This is the Integrity Check Value (aka the authentication tag length and can
929 * be 8, 12 or 16 bytes long. */
930 static int rfc4106_set_authsize(struct crypto_aead
*parent
,
931 unsigned int authsize
)
933 struct cryptd_aead
**ctx
= crypto_aead_ctx(parent
);
934 struct cryptd_aead
*cryptd_tfm
= *ctx
;
936 return crypto_aead_setauthsize(&cryptd_tfm
->base
, authsize
);
939 static int helper_rfc4106_encrypt(struct aead_request
*req
)
941 u8 one_entry_in_sg
= 0;
942 u8
*src
, *dst
, *assoc
;
943 __be32 counter
= cpu_to_be32(1);
944 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
945 struct aesni_rfc4106_gcm_ctx
*ctx
= aesni_rfc4106_gcm_ctx_get(tfm
);
946 void *aes_ctx
= &(ctx
->aes_key_expanded
);
947 unsigned long auth_tag_len
= crypto_aead_authsize(tfm
);
948 u8 iv
[16] __attribute__ ((__aligned__(AESNI_ALIGN
)));
949 struct scatter_walk src_sg_walk
;
950 struct scatter_walk dst_sg_walk
;
953 /* Assuming we are supporting rfc4106 64-bit extended */
954 /* sequence numbers We need to have the AAD length equal */
955 /* to 16 or 20 bytes */
956 if (unlikely(req
->assoclen
!= 16 && req
->assoclen
!= 20))
960 for (i
= 0; i
< 4; i
++)
961 *(iv
+i
) = ctx
->nonce
[i
];
962 for (i
= 0; i
< 8; i
++)
963 *(iv
+4+i
) = req
->iv
[i
];
964 *((__be32
*)(iv
+12)) = counter
;
966 if (sg_is_last(req
->src
) &&
967 req
->src
->offset
+ req
->src
->length
<= PAGE_SIZE
&&
968 sg_is_last(req
->dst
) &&
969 req
->dst
->offset
+ req
->dst
->length
<= PAGE_SIZE
) {
971 scatterwalk_start(&src_sg_walk
, req
->src
);
972 assoc
= scatterwalk_map(&src_sg_walk
);
973 src
= assoc
+ req
->assoclen
;
975 if (unlikely(req
->src
!= req
->dst
)) {
976 scatterwalk_start(&dst_sg_walk
, req
->dst
);
977 dst
= scatterwalk_map(&dst_sg_walk
) + req
->assoclen
;
980 /* Allocate memory for src, dst, assoc */
981 assoc
= kmalloc(req
->cryptlen
+ auth_tag_len
+ req
->assoclen
,
983 if (unlikely(!assoc
))
985 scatterwalk_map_and_copy(assoc
, req
->src
, 0,
986 req
->assoclen
+ req
->cryptlen
, 0);
987 src
= assoc
+ req
->assoclen
;
992 aesni_gcm_enc_tfm(aes_ctx
, dst
, src
, req
->cryptlen
, iv
,
993 ctx
->hash_subkey
, assoc
, req
->assoclen
- 8,
994 dst
+ req
->cryptlen
, auth_tag_len
);
997 /* The authTag (aka the Integrity Check Value) needs to be written
998 * back to the packet. */
999 if (one_entry_in_sg
) {
1000 if (unlikely(req
->src
!= req
->dst
)) {
1001 scatterwalk_unmap(dst
- req
->assoclen
);
1002 scatterwalk_advance(&dst_sg_walk
, req
->dst
->length
);
1003 scatterwalk_done(&dst_sg_walk
, 1, 0);
1005 scatterwalk_unmap(assoc
);
1006 scatterwalk_advance(&src_sg_walk
, req
->src
->length
);
1007 scatterwalk_done(&src_sg_walk
, req
->src
== req
->dst
, 0);
1009 scatterwalk_map_and_copy(dst
, req
->dst
, req
->assoclen
,
1010 req
->cryptlen
+ auth_tag_len
, 1);
1016 static int helper_rfc4106_decrypt(struct aead_request
*req
)
1018 u8 one_entry_in_sg
= 0;
1019 u8
*src
, *dst
, *assoc
;
1020 unsigned long tempCipherLen
= 0;
1021 __be32 counter
= cpu_to_be32(1);
1023 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1024 struct aesni_rfc4106_gcm_ctx
*ctx
= aesni_rfc4106_gcm_ctx_get(tfm
);
1025 void *aes_ctx
= &(ctx
->aes_key_expanded
);
1026 unsigned long auth_tag_len
= crypto_aead_authsize(tfm
);
1027 u8 iv
[16] __attribute__ ((__aligned__(AESNI_ALIGN
)));
1029 struct scatter_walk src_sg_walk
;
1030 struct scatter_walk dst_sg_walk
;
1033 if (unlikely(req
->assoclen
!= 16 && req
->assoclen
!= 20))
1036 /* Assuming we are supporting rfc4106 64-bit extended */
1037 /* sequence numbers We need to have the AAD length */
1038 /* equal to 16 or 20 bytes */
1040 tempCipherLen
= (unsigned long)(req
->cryptlen
- auth_tag_len
);
1041 /* IV below built */
1042 for (i
= 0; i
< 4; i
++)
1043 *(iv
+i
) = ctx
->nonce
[i
];
1044 for (i
= 0; i
< 8; i
++)
1045 *(iv
+4+i
) = req
->iv
[i
];
1046 *((__be32
*)(iv
+12)) = counter
;
1048 if (sg_is_last(req
->src
) &&
1049 req
->src
->offset
+ req
->src
->length
<= PAGE_SIZE
&&
1050 sg_is_last(req
->dst
) &&
1051 req
->dst
->offset
+ req
->dst
->length
<= PAGE_SIZE
) {
1052 one_entry_in_sg
= 1;
1053 scatterwalk_start(&src_sg_walk
, req
->src
);
1054 assoc
= scatterwalk_map(&src_sg_walk
);
1055 src
= assoc
+ req
->assoclen
;
1057 if (unlikely(req
->src
!= req
->dst
)) {
1058 scatterwalk_start(&dst_sg_walk
, req
->dst
);
1059 dst
= scatterwalk_map(&dst_sg_walk
) + req
->assoclen
;
1063 /* Allocate memory for src, dst, assoc */
1064 assoc
= kmalloc(req
->cryptlen
+ req
->assoclen
, GFP_ATOMIC
);
1067 scatterwalk_map_and_copy(assoc
, req
->src
, 0,
1068 req
->assoclen
+ req
->cryptlen
, 0);
1069 src
= assoc
+ req
->assoclen
;
1074 aesni_gcm_dec_tfm(aes_ctx
, dst
, src
, tempCipherLen
, iv
,
1075 ctx
->hash_subkey
, assoc
, req
->assoclen
- 8,
1076 authTag
, auth_tag_len
);
1079 /* Compare generated tag with passed in tag. */
1080 retval
= crypto_memneq(src
+ tempCipherLen
, authTag
, auth_tag_len
) ?
1083 if (one_entry_in_sg
) {
1084 if (unlikely(req
->src
!= req
->dst
)) {
1085 scatterwalk_unmap(dst
- req
->assoclen
);
1086 scatterwalk_advance(&dst_sg_walk
, req
->dst
->length
);
1087 scatterwalk_done(&dst_sg_walk
, 1, 0);
1089 scatterwalk_unmap(assoc
);
1090 scatterwalk_advance(&src_sg_walk
, req
->src
->length
);
1091 scatterwalk_done(&src_sg_walk
, req
->src
== req
->dst
, 0);
1093 scatterwalk_map_and_copy(dst
, req
->dst
, req
->assoclen
,
1100 static int rfc4106_encrypt(struct aead_request
*req
)
1102 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1103 struct cryptd_aead
**ctx
= crypto_aead_ctx(tfm
);
1104 struct cryptd_aead
*cryptd_tfm
= *ctx
;
1106 aead_request_set_tfm(req
, irq_fpu_usable() ?
1107 cryptd_aead_child(cryptd_tfm
) :
1110 return crypto_aead_encrypt(req
);
1113 static int rfc4106_decrypt(struct aead_request
*req
)
1115 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1116 struct cryptd_aead
**ctx
= crypto_aead_ctx(tfm
);
1117 struct cryptd_aead
*cryptd_tfm
= *ctx
;
1119 aead_request_set_tfm(req
, irq_fpu_usable() ?
1120 cryptd_aead_child(cryptd_tfm
) :
1123 return crypto_aead_decrypt(req
);
1127 static struct crypto_alg aesni_algs
[] = { {
1129 .cra_driver_name
= "aes-aesni",
1130 .cra_priority
= 300,
1131 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
,
1132 .cra_blocksize
= AES_BLOCK_SIZE
,
1133 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
) +
1136 .cra_module
= THIS_MODULE
,
1139 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
1140 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
1141 .cia_setkey
= aes_set_key
,
1142 .cia_encrypt
= aes_encrypt
,
1143 .cia_decrypt
= aes_decrypt
1147 .cra_name
= "__aes-aesni",
1148 .cra_driver_name
= "__driver-aes-aesni",
1150 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
| CRYPTO_ALG_INTERNAL
,
1151 .cra_blocksize
= AES_BLOCK_SIZE
,
1152 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
) +
1155 .cra_module
= THIS_MODULE
,
1158 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
1159 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
1160 .cia_setkey
= aes_set_key
,
1161 .cia_encrypt
= __aes_encrypt
,
1162 .cia_decrypt
= __aes_decrypt
1166 .cra_name
= "__ecb-aes-aesni",
1167 .cra_driver_name
= "__driver-ecb-aes-aesni",
1169 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
1170 CRYPTO_ALG_INTERNAL
,
1171 .cra_blocksize
= AES_BLOCK_SIZE
,
1172 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
) +
1175 .cra_type
= &crypto_blkcipher_type
,
1176 .cra_module
= THIS_MODULE
,
1179 .min_keysize
= AES_MIN_KEY_SIZE
,
1180 .max_keysize
= AES_MAX_KEY_SIZE
,
1181 .setkey
= aes_set_key
,
1182 .encrypt
= ecb_encrypt
,
1183 .decrypt
= ecb_decrypt
,
1187 .cra_name
= "__cbc-aes-aesni",
1188 .cra_driver_name
= "__driver-cbc-aes-aesni",
1190 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
1191 CRYPTO_ALG_INTERNAL
,
1192 .cra_blocksize
= AES_BLOCK_SIZE
,
1193 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
) +
1196 .cra_type
= &crypto_blkcipher_type
,
1197 .cra_module
= THIS_MODULE
,
1200 .min_keysize
= AES_MIN_KEY_SIZE
,
1201 .max_keysize
= AES_MAX_KEY_SIZE
,
1202 .setkey
= aes_set_key
,
1203 .encrypt
= cbc_encrypt
,
1204 .decrypt
= cbc_decrypt
,
1208 .cra_name
= "ecb(aes)",
1209 .cra_driver_name
= "ecb-aes-aesni",
1210 .cra_priority
= 400,
1211 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1212 .cra_blocksize
= AES_BLOCK_SIZE
,
1213 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
1215 .cra_type
= &crypto_ablkcipher_type
,
1216 .cra_module
= THIS_MODULE
,
1217 .cra_init
= ablk_ecb_init
,
1218 .cra_exit
= ablk_exit
,
1221 .min_keysize
= AES_MIN_KEY_SIZE
,
1222 .max_keysize
= AES_MAX_KEY_SIZE
,
1223 .setkey
= ablk_set_key
,
1224 .encrypt
= ablk_encrypt
,
1225 .decrypt
= ablk_decrypt
,
1229 .cra_name
= "cbc(aes)",
1230 .cra_driver_name
= "cbc-aes-aesni",
1231 .cra_priority
= 400,
1232 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1233 .cra_blocksize
= AES_BLOCK_SIZE
,
1234 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
1236 .cra_type
= &crypto_ablkcipher_type
,
1237 .cra_module
= THIS_MODULE
,
1238 .cra_init
= ablk_cbc_init
,
1239 .cra_exit
= ablk_exit
,
1242 .min_keysize
= AES_MIN_KEY_SIZE
,
1243 .max_keysize
= AES_MAX_KEY_SIZE
,
1244 .ivsize
= AES_BLOCK_SIZE
,
1245 .setkey
= ablk_set_key
,
1246 .encrypt
= ablk_encrypt
,
1247 .decrypt
= ablk_decrypt
,
1250 #ifdef CONFIG_X86_64
1252 .cra_name
= "__ctr-aes-aesni",
1253 .cra_driver_name
= "__driver-ctr-aes-aesni",
1255 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
1256 CRYPTO_ALG_INTERNAL
,
1258 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
) +
1261 .cra_type
= &crypto_blkcipher_type
,
1262 .cra_module
= THIS_MODULE
,
1265 .min_keysize
= AES_MIN_KEY_SIZE
,
1266 .max_keysize
= AES_MAX_KEY_SIZE
,
1267 .ivsize
= AES_BLOCK_SIZE
,
1268 .setkey
= aes_set_key
,
1269 .encrypt
= ctr_crypt
,
1270 .decrypt
= ctr_crypt
,
1274 .cra_name
= "ctr(aes)",
1275 .cra_driver_name
= "ctr-aes-aesni",
1276 .cra_priority
= 400,
1277 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1279 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
1281 .cra_type
= &crypto_ablkcipher_type
,
1282 .cra_module
= THIS_MODULE
,
1283 .cra_init
= ablk_ctr_init
,
1284 .cra_exit
= ablk_exit
,
1287 .min_keysize
= AES_MIN_KEY_SIZE
,
1288 .max_keysize
= AES_MAX_KEY_SIZE
,
1289 .ivsize
= AES_BLOCK_SIZE
,
1290 .setkey
= ablk_set_key
,
1291 .encrypt
= ablk_encrypt
,
1292 .decrypt
= ablk_encrypt
,
1297 #if IS_ENABLED(CONFIG_CRYPTO_PCBC)
1299 .cra_name
= "pcbc(aes)",
1300 .cra_driver_name
= "pcbc-aes-aesni",
1301 .cra_priority
= 400,
1302 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1303 .cra_blocksize
= AES_BLOCK_SIZE
,
1304 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
1306 .cra_type
= &crypto_ablkcipher_type
,
1307 .cra_module
= THIS_MODULE
,
1308 .cra_init
= ablk_pcbc_init
,
1309 .cra_exit
= ablk_exit
,
1312 .min_keysize
= AES_MIN_KEY_SIZE
,
1313 .max_keysize
= AES_MAX_KEY_SIZE
,
1314 .ivsize
= AES_BLOCK_SIZE
,
1315 .setkey
= ablk_set_key
,
1316 .encrypt
= ablk_encrypt
,
1317 .decrypt
= ablk_decrypt
,
1322 .cra_name
= "__lrw-aes-aesni",
1323 .cra_driver_name
= "__driver-lrw-aes-aesni",
1325 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
1326 CRYPTO_ALG_INTERNAL
,
1327 .cra_blocksize
= AES_BLOCK_SIZE
,
1328 .cra_ctxsize
= sizeof(struct aesni_lrw_ctx
),
1330 .cra_type
= &crypto_blkcipher_type
,
1331 .cra_module
= THIS_MODULE
,
1332 .cra_exit
= lrw_aesni_exit_tfm
,
1335 .min_keysize
= AES_MIN_KEY_SIZE
+ AES_BLOCK_SIZE
,
1336 .max_keysize
= AES_MAX_KEY_SIZE
+ AES_BLOCK_SIZE
,
1337 .ivsize
= AES_BLOCK_SIZE
,
1338 .setkey
= lrw_aesni_setkey
,
1339 .encrypt
= lrw_encrypt
,
1340 .decrypt
= lrw_decrypt
,
1344 .cra_name
= "__xts-aes-aesni",
1345 .cra_driver_name
= "__driver-xts-aes-aesni",
1347 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
1348 CRYPTO_ALG_INTERNAL
,
1349 .cra_blocksize
= AES_BLOCK_SIZE
,
1350 .cra_ctxsize
= sizeof(struct aesni_xts_ctx
),
1352 .cra_type
= &crypto_blkcipher_type
,
1353 .cra_module
= THIS_MODULE
,
1356 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
1357 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
1358 .ivsize
= AES_BLOCK_SIZE
,
1359 .setkey
= xts_aesni_setkey
,
1360 .encrypt
= xts_encrypt
,
1361 .decrypt
= xts_decrypt
,
1365 .cra_name
= "lrw(aes)",
1366 .cra_driver_name
= "lrw-aes-aesni",
1367 .cra_priority
= 400,
1368 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1369 .cra_blocksize
= AES_BLOCK_SIZE
,
1370 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
1372 .cra_type
= &crypto_ablkcipher_type
,
1373 .cra_module
= THIS_MODULE
,
1374 .cra_init
= ablk_init
,
1375 .cra_exit
= ablk_exit
,
1378 .min_keysize
= AES_MIN_KEY_SIZE
+ AES_BLOCK_SIZE
,
1379 .max_keysize
= AES_MAX_KEY_SIZE
+ AES_BLOCK_SIZE
,
1380 .ivsize
= AES_BLOCK_SIZE
,
1381 .setkey
= ablk_set_key
,
1382 .encrypt
= ablk_encrypt
,
1383 .decrypt
= ablk_decrypt
,
1387 .cra_name
= "xts(aes)",
1388 .cra_driver_name
= "xts-aes-aesni",
1389 .cra_priority
= 400,
1390 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1391 .cra_blocksize
= AES_BLOCK_SIZE
,
1392 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
1394 .cra_type
= &crypto_ablkcipher_type
,
1395 .cra_module
= THIS_MODULE
,
1396 .cra_init
= ablk_init
,
1397 .cra_exit
= ablk_exit
,
1400 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
1401 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
1402 .ivsize
= AES_BLOCK_SIZE
,
1403 .setkey
= ablk_set_key
,
1404 .encrypt
= ablk_encrypt
,
1405 .decrypt
= ablk_decrypt
,
1410 #ifdef CONFIG_X86_64
1411 static struct aead_alg aesni_aead_algs
[] = { {
1412 .setkey
= common_rfc4106_set_key
,
1413 .setauthsize
= common_rfc4106_set_authsize
,
1414 .encrypt
= helper_rfc4106_encrypt
,
1415 .decrypt
= helper_rfc4106_decrypt
,
1419 .cra_name
= "__gcm-aes-aesni",
1420 .cra_driver_name
= "__driver-gcm-aes-aesni",
1421 .cra_flags
= CRYPTO_ALG_INTERNAL
,
1423 .cra_ctxsize
= sizeof(struct aesni_rfc4106_gcm_ctx
),
1424 .cra_alignmask
= AESNI_ALIGN
- 1,
1425 .cra_module
= THIS_MODULE
,
1428 .init
= rfc4106_init
,
1429 .exit
= rfc4106_exit
,
1430 .setkey
= rfc4106_set_key
,
1431 .setauthsize
= rfc4106_set_authsize
,
1432 .encrypt
= rfc4106_encrypt
,
1433 .decrypt
= rfc4106_decrypt
,
1437 .cra_name
= "rfc4106(gcm(aes))",
1438 .cra_driver_name
= "rfc4106-gcm-aesni",
1439 .cra_priority
= 400,
1440 .cra_flags
= CRYPTO_ALG_ASYNC
,
1442 .cra_ctxsize
= sizeof(struct cryptd_aead
*),
1443 .cra_module
= THIS_MODULE
,
1447 static struct aead_alg aesni_aead_algs
[0];
1451 static const struct x86_cpu_id aesni_cpu_id
[] = {
1452 X86_FEATURE_MATCH(X86_FEATURE_AES
),
1455 MODULE_DEVICE_TABLE(x86cpu
, aesni_cpu_id
);
1457 static int __init
aesni_init(void)
1461 if (!x86_match_cpu(aesni_cpu_id
))
1463 #ifdef CONFIG_X86_64
1464 #ifdef CONFIG_AS_AVX2
1465 if (boot_cpu_has(X86_FEATURE_AVX2
)) {
1466 pr_info("AVX2 version of gcm_enc/dec engaged.\n");
1467 aesni_gcm_enc_tfm
= aesni_gcm_enc_avx2
;
1468 aesni_gcm_dec_tfm
= aesni_gcm_dec_avx2
;
1471 #ifdef CONFIG_AS_AVX
1472 if (boot_cpu_has(X86_FEATURE_AVX
)) {
1473 pr_info("AVX version of gcm_enc/dec engaged.\n");
1474 aesni_gcm_enc_tfm
= aesni_gcm_enc_avx
;
1475 aesni_gcm_dec_tfm
= aesni_gcm_dec_avx
;
1479 pr_info("SSE version of gcm_enc/dec engaged.\n");
1480 aesni_gcm_enc_tfm
= aesni_gcm_enc
;
1481 aesni_gcm_dec_tfm
= aesni_gcm_dec
;
1483 aesni_ctr_enc_tfm
= aesni_ctr_enc
;
1484 #ifdef CONFIG_AS_AVX
1486 /* optimize performance of ctr mode encryption transform */
1487 aesni_ctr_enc_tfm
= aesni_ctr_enc_avx_tfm
;
1488 pr_info("AES CTR mode by8 optimization enabled\n");
1493 err
= crypto_fpu_init();
1497 err
= crypto_register_algs(aesni_algs
, ARRAY_SIZE(aesni_algs
));
1501 err
= crypto_register_aeads(aesni_aead_algs
,
1502 ARRAY_SIZE(aesni_aead_algs
));
1504 goto unregister_algs
;
1509 crypto_unregister_algs(aesni_algs
, ARRAY_SIZE(aesni_algs
));
1515 static void __exit
aesni_exit(void)
1517 crypto_unregister_aeads(aesni_aead_algs
, ARRAY_SIZE(aesni_aead_algs
));
1518 crypto_unregister_algs(aesni_algs
, ARRAY_SIZE(aesni_algs
));
1523 late_initcall(aesni_init
);
1524 module_exit(aesni_exit
);
1526 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, Intel AES-NI instructions optimized");
1527 MODULE_LICENSE("GPL");
1528 MODULE_ALIAS_CRYPTO("aes");