2 * Support for Intel AES-NI instructions. This file contains glue
3 * code, the real AES implementation is in intel-aes_asm.S.
5 * Copyright (C) 2008, Intel Corp.
6 * Author: Huang Ying <ying.huang@intel.com>
8 * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
9 * interface for 64-bit kernels.
10 * Authors: Adrian Hoban <adrian.hoban@intel.com>
11 * Gabriele Paoloni <gabriele.paoloni@intel.com>
12 * Tadeusz Struk (tadeusz.struk@intel.com)
13 * Aidan O'Mahony (aidan.o.mahony@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
16 * This program is free software; you can redistribute it and/or modify
17 * it under the terms of the GNU General Public License as published by
18 * the Free Software Foundation; either version 2 of the License, or
19 * (at your option) any later version.
22 #include <linux/hardirq.h>
23 #include <linux/types.h>
24 #include <linux/module.h>
25 #include <linux/err.h>
26 #include <crypto/algapi.h>
27 #include <crypto/aes.h>
28 #include <crypto/cryptd.h>
29 #include <crypto/ctr.h>
30 #include <crypto/b128ops.h>
31 #include <crypto/xts.h>
32 #include <asm/cpu_device_id.h>
33 #include <asm/fpu/api.h>
34 #include <asm/crypto/aes.h>
35 #include <crypto/scatterwalk.h>
36 #include <crypto/internal/aead.h>
37 #include <crypto/internal/simd.h>
38 #include <crypto/internal/skcipher.h>
39 #include <linux/workqueue.h>
40 #include <linux/spinlock.h>
42 #include <asm/crypto/glue_helper.h>
46 #define AESNI_ALIGN 16
47 #define AESNI_ALIGN_ATTR __attribute__ ((__aligned__(AESNI_ALIGN)))
48 #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE - 1))
49 #define RFC4106_HASH_SUBKEY_SIZE 16
50 #define AESNI_ALIGN_EXTRA ((AESNI_ALIGN - 1) & ~(CRYPTO_MINALIGN - 1))
51 #define CRYPTO_AES_CTX_SIZE (sizeof(struct crypto_aes_ctx) + AESNI_ALIGN_EXTRA)
52 #define XTS_AES_CTX_SIZE (sizeof(struct aesni_xts_ctx) + AESNI_ALIGN_EXTRA)
54 /* This data is stored at the end of the crypto_tfm struct.
55 * It's a type of per "session" data storage location.
56 * This needs to be 16 byte aligned.
58 struct aesni_rfc4106_gcm_ctx
{
59 u8 hash_subkey
[16] AESNI_ALIGN_ATTR
;
60 struct crypto_aes_ctx aes_key_expanded AESNI_ALIGN_ATTR
;
64 struct generic_gcmaes_ctx
{
65 u8 hash_subkey
[16] AESNI_ALIGN_ATTR
;
66 struct crypto_aes_ctx aes_key_expanded AESNI_ALIGN_ATTR
;
69 struct aesni_xts_ctx
{
70 u8 raw_tweak_ctx
[sizeof(struct crypto_aes_ctx
)] AESNI_ALIGN_ATTR
;
71 u8 raw_crypt_ctx
[sizeof(struct crypto_aes_ctx
)] AESNI_ALIGN_ATTR
;
74 asmlinkage
int aesni_set_key(struct crypto_aes_ctx
*ctx
, const u8
*in_key
,
75 unsigned int key_len
);
76 asmlinkage
void aesni_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
78 asmlinkage
void aesni_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
80 asmlinkage
void aesni_ecb_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
81 const u8
*in
, unsigned int len
);
82 asmlinkage
void aesni_ecb_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
83 const u8
*in
, unsigned int len
);
84 asmlinkage
void aesni_cbc_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
85 const u8
*in
, unsigned int len
, u8
*iv
);
86 asmlinkage
void aesni_cbc_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
87 const u8
*in
, unsigned int len
, u8
*iv
);
89 int crypto_fpu_init(void);
90 void crypto_fpu_exit(void);
92 #define AVX_GEN2_OPTSIZE 640
93 #define AVX_GEN4_OPTSIZE 4096
97 static void (*aesni_ctr_enc_tfm
)(struct crypto_aes_ctx
*ctx
, u8
*out
,
98 const u8
*in
, unsigned int len
, u8
*iv
);
99 asmlinkage
void aesni_ctr_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
100 const u8
*in
, unsigned int len
, u8
*iv
);
102 asmlinkage
void aesni_xts_crypt8(struct crypto_aes_ctx
*ctx
, u8
*out
,
103 const u8
*in
, bool enc
, u8
*iv
);
105 /* asmlinkage void aesni_gcm_enc()
106 * void *ctx, AES Key schedule. Starts on a 16 byte boundary.
107 * u8 *out, Ciphertext output. Encrypt in-place is allowed.
108 * const u8 *in, Plaintext input
109 * unsigned long plaintext_len, Length of data in bytes for encryption.
110 * u8 *iv, Pre-counter block j0: 12 byte IV concatenated with 0x00000001.
111 * 16-byte aligned pointer.
112 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
113 * const u8 *aad, Additional Authentication Data (AAD)
114 * unsigned long aad_len, Length of AAD in bytes.
115 * u8 *auth_tag, Authenticated Tag output.
116 * unsigned long auth_tag_len), Authenticated Tag Length in bytes.
117 * Valid values are 16 (most likely), 12 or 8.
119 asmlinkage
void aesni_gcm_enc(void *ctx
, u8
*out
,
120 const u8
*in
, unsigned long plaintext_len
, u8
*iv
,
121 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
122 u8
*auth_tag
, unsigned long auth_tag_len
);
124 /* asmlinkage void aesni_gcm_dec()
125 * void *ctx, AES Key schedule. Starts on a 16 byte boundary.
126 * u8 *out, Plaintext output. Decrypt in-place is allowed.
127 * const u8 *in, Ciphertext input
128 * unsigned long ciphertext_len, Length of data in bytes for decryption.
129 * u8 *iv, Pre-counter block j0: 12 byte IV concatenated with 0x00000001.
130 * 16-byte aligned pointer.
131 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
132 * const u8 *aad, Additional Authentication Data (AAD)
133 * unsigned long aad_len, Length of AAD in bytes. With RFC4106 this is going
134 * to be 8 or 12 bytes
135 * u8 *auth_tag, Authenticated Tag output.
136 * unsigned long auth_tag_len) Authenticated Tag Length in bytes.
137 * Valid values are 16 (most likely), 12 or 8.
139 asmlinkage
void aesni_gcm_dec(void *ctx
, u8
*out
,
140 const u8
*in
, unsigned long ciphertext_len
, u8
*iv
,
141 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
142 u8
*auth_tag
, unsigned long auth_tag_len
);
146 asmlinkage
void aes_ctr_enc_128_avx_by8(const u8
*in
, u8
*iv
,
147 void *keys
, u8
*out
, unsigned int num_bytes
);
148 asmlinkage
void aes_ctr_enc_192_avx_by8(const u8
*in
, u8
*iv
,
149 void *keys
, u8
*out
, unsigned int num_bytes
);
150 asmlinkage
void aes_ctr_enc_256_avx_by8(const u8
*in
, u8
*iv
,
151 void *keys
, u8
*out
, unsigned int num_bytes
);
153 * asmlinkage void aesni_gcm_precomp_avx_gen2()
154 * gcm_data *my_ctx_data, context data
155 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
157 asmlinkage
void aesni_gcm_precomp_avx_gen2(void *my_ctx_data
, u8
*hash_subkey
);
159 asmlinkage
void aesni_gcm_enc_avx_gen2(void *ctx
, u8
*out
,
160 const u8
*in
, unsigned long plaintext_len
, u8
*iv
,
161 const u8
*aad
, unsigned long aad_len
,
162 u8
*auth_tag
, unsigned long auth_tag_len
);
164 asmlinkage
void aesni_gcm_dec_avx_gen2(void *ctx
, u8
*out
,
165 const u8
*in
, unsigned long ciphertext_len
, u8
*iv
,
166 const u8
*aad
, unsigned long aad_len
,
167 u8
*auth_tag
, unsigned long auth_tag_len
);
169 static void aesni_gcm_enc_avx(void *ctx
, u8
*out
,
170 const u8
*in
, unsigned long plaintext_len
, u8
*iv
,
171 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
172 u8
*auth_tag
, unsigned long auth_tag_len
)
174 struct crypto_aes_ctx
*aes_ctx
= (struct crypto_aes_ctx
*)ctx
;
175 if ((plaintext_len
< AVX_GEN2_OPTSIZE
) || (aes_ctx
-> key_length
!= AES_KEYSIZE_128
)){
176 aesni_gcm_enc(ctx
, out
, in
, plaintext_len
, iv
, hash_subkey
, aad
,
177 aad_len
, auth_tag
, auth_tag_len
);
179 aesni_gcm_precomp_avx_gen2(ctx
, hash_subkey
);
180 aesni_gcm_enc_avx_gen2(ctx
, out
, in
, plaintext_len
, iv
, aad
,
181 aad_len
, auth_tag
, auth_tag_len
);
185 static void aesni_gcm_dec_avx(void *ctx
, u8
*out
,
186 const u8
*in
, unsigned long ciphertext_len
, u8
*iv
,
187 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
188 u8
*auth_tag
, unsigned long auth_tag_len
)
190 struct crypto_aes_ctx
*aes_ctx
= (struct crypto_aes_ctx
*)ctx
;
191 if ((ciphertext_len
< AVX_GEN2_OPTSIZE
) || (aes_ctx
-> key_length
!= AES_KEYSIZE_128
)) {
192 aesni_gcm_dec(ctx
, out
, in
, ciphertext_len
, iv
, hash_subkey
, aad
,
193 aad_len
, auth_tag
, auth_tag_len
);
195 aesni_gcm_precomp_avx_gen2(ctx
, hash_subkey
);
196 aesni_gcm_dec_avx_gen2(ctx
, out
, in
, ciphertext_len
, iv
, aad
,
197 aad_len
, auth_tag
, auth_tag_len
);
202 #ifdef CONFIG_AS_AVX2
204 * asmlinkage void aesni_gcm_precomp_avx_gen4()
205 * gcm_data *my_ctx_data, context data
206 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
208 asmlinkage
void aesni_gcm_precomp_avx_gen4(void *my_ctx_data
, u8
*hash_subkey
);
210 asmlinkage
void aesni_gcm_enc_avx_gen4(void *ctx
, u8
*out
,
211 const u8
*in
, unsigned long plaintext_len
, u8
*iv
,
212 const u8
*aad
, unsigned long aad_len
,
213 u8
*auth_tag
, unsigned long auth_tag_len
);
215 asmlinkage
void aesni_gcm_dec_avx_gen4(void *ctx
, u8
*out
,
216 const u8
*in
, unsigned long ciphertext_len
, u8
*iv
,
217 const u8
*aad
, unsigned long aad_len
,
218 u8
*auth_tag
, unsigned long auth_tag_len
);
220 static void aesni_gcm_enc_avx2(void *ctx
, u8
*out
,
221 const u8
*in
, unsigned long plaintext_len
, u8
*iv
,
222 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
223 u8
*auth_tag
, unsigned long auth_tag_len
)
225 struct crypto_aes_ctx
*aes_ctx
= (struct crypto_aes_ctx
*)ctx
;
226 if ((plaintext_len
< AVX_GEN2_OPTSIZE
) || (aes_ctx
-> key_length
!= AES_KEYSIZE_128
)) {
227 aesni_gcm_enc(ctx
, out
, in
, plaintext_len
, iv
, hash_subkey
, aad
,
228 aad_len
, auth_tag
, auth_tag_len
);
229 } else if (plaintext_len
< AVX_GEN4_OPTSIZE
) {
230 aesni_gcm_precomp_avx_gen2(ctx
, hash_subkey
);
231 aesni_gcm_enc_avx_gen2(ctx
, out
, in
, plaintext_len
, iv
, aad
,
232 aad_len
, auth_tag
, auth_tag_len
);
234 aesni_gcm_precomp_avx_gen4(ctx
, hash_subkey
);
235 aesni_gcm_enc_avx_gen4(ctx
, out
, in
, plaintext_len
, iv
, aad
,
236 aad_len
, auth_tag
, auth_tag_len
);
240 static void aesni_gcm_dec_avx2(void *ctx
, u8
*out
,
241 const u8
*in
, unsigned long ciphertext_len
, u8
*iv
,
242 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
243 u8
*auth_tag
, unsigned long auth_tag_len
)
245 struct crypto_aes_ctx
*aes_ctx
= (struct crypto_aes_ctx
*)ctx
;
246 if ((ciphertext_len
< AVX_GEN2_OPTSIZE
) || (aes_ctx
-> key_length
!= AES_KEYSIZE_128
)) {
247 aesni_gcm_dec(ctx
, out
, in
, ciphertext_len
, iv
, hash_subkey
,
248 aad
, aad_len
, auth_tag
, auth_tag_len
);
249 } else if (ciphertext_len
< AVX_GEN4_OPTSIZE
) {
250 aesni_gcm_precomp_avx_gen2(ctx
, hash_subkey
);
251 aesni_gcm_dec_avx_gen2(ctx
, out
, in
, ciphertext_len
, iv
, aad
,
252 aad_len
, auth_tag
, auth_tag_len
);
254 aesni_gcm_precomp_avx_gen4(ctx
, hash_subkey
);
255 aesni_gcm_dec_avx_gen4(ctx
, out
, in
, ciphertext_len
, iv
, aad
,
256 aad_len
, auth_tag
, auth_tag_len
);
261 static void (*aesni_gcm_enc_tfm
)(void *ctx
, u8
*out
,
262 const u8
*in
, unsigned long plaintext_len
, u8
*iv
,
263 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
264 u8
*auth_tag
, unsigned long auth_tag_len
);
266 static void (*aesni_gcm_dec_tfm
)(void *ctx
, u8
*out
,
267 const u8
*in
, unsigned long ciphertext_len
, u8
*iv
,
268 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
269 u8
*auth_tag
, unsigned long auth_tag_len
);
272 aesni_rfc4106_gcm_ctx
*aesni_rfc4106_gcm_ctx_get(struct crypto_aead
*tfm
)
274 unsigned long align
= AESNI_ALIGN
;
276 if (align
<= crypto_tfm_ctx_alignment())
278 return PTR_ALIGN(crypto_aead_ctx(tfm
), align
);
282 generic_gcmaes_ctx
*generic_gcmaes_ctx_get(struct crypto_aead
*tfm
)
284 unsigned long align
= AESNI_ALIGN
;
286 if (align
<= crypto_tfm_ctx_alignment())
288 return PTR_ALIGN(crypto_aead_ctx(tfm
), align
);
292 static inline struct crypto_aes_ctx
*aes_ctx(void *raw_ctx
)
294 unsigned long addr
= (unsigned long)raw_ctx
;
295 unsigned long align
= AESNI_ALIGN
;
297 if (align
<= crypto_tfm_ctx_alignment())
299 return (struct crypto_aes_ctx
*)ALIGN(addr
, align
);
302 static int aes_set_key_common(struct crypto_tfm
*tfm
, void *raw_ctx
,
303 const u8
*in_key
, unsigned int key_len
)
305 struct crypto_aes_ctx
*ctx
= aes_ctx(raw_ctx
);
306 u32
*flags
= &tfm
->crt_flags
;
309 if (key_len
!= AES_KEYSIZE_128
&& key_len
!= AES_KEYSIZE_192
&&
310 key_len
!= AES_KEYSIZE_256
) {
311 *flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
315 if (!irq_fpu_usable())
316 err
= crypto_aes_expand_key(ctx
, in_key
, key_len
);
319 err
= aesni_set_key(ctx
, in_key
, key_len
);
326 static int aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
327 unsigned int key_len
)
329 return aes_set_key_common(tfm
, crypto_tfm_ctx(tfm
), in_key
, key_len
);
332 static void aes_encrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
334 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
336 if (!irq_fpu_usable())
337 crypto_aes_encrypt_x86(ctx
, dst
, src
);
340 aesni_enc(ctx
, dst
, src
);
345 static void aes_decrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
347 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
349 if (!irq_fpu_usable())
350 crypto_aes_decrypt_x86(ctx
, dst
, src
);
353 aesni_dec(ctx
, dst
, src
);
358 static void __aes_encrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
360 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
362 aesni_enc(ctx
, dst
, src
);
365 static void __aes_decrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
367 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
369 aesni_dec(ctx
, dst
, src
);
372 static int aesni_skcipher_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
375 return aes_set_key_common(crypto_skcipher_tfm(tfm
),
376 crypto_skcipher_ctx(tfm
), key
, len
);
379 static int ecb_encrypt(struct skcipher_request
*req
)
381 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
382 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_skcipher_ctx(tfm
));
383 struct skcipher_walk walk
;
387 err
= skcipher_walk_virt(&walk
, req
, true);
390 while ((nbytes
= walk
.nbytes
)) {
391 aesni_ecb_enc(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
392 nbytes
& AES_BLOCK_MASK
);
393 nbytes
&= AES_BLOCK_SIZE
- 1;
394 err
= skcipher_walk_done(&walk
, nbytes
);
401 static int ecb_decrypt(struct skcipher_request
*req
)
403 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
404 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_skcipher_ctx(tfm
));
405 struct skcipher_walk walk
;
409 err
= skcipher_walk_virt(&walk
, req
, true);
412 while ((nbytes
= walk
.nbytes
)) {
413 aesni_ecb_dec(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
414 nbytes
& AES_BLOCK_MASK
);
415 nbytes
&= AES_BLOCK_SIZE
- 1;
416 err
= skcipher_walk_done(&walk
, nbytes
);
423 static int cbc_encrypt(struct skcipher_request
*req
)
425 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
426 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_skcipher_ctx(tfm
));
427 struct skcipher_walk walk
;
431 err
= skcipher_walk_virt(&walk
, req
, true);
434 while ((nbytes
= walk
.nbytes
)) {
435 aesni_cbc_enc(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
436 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
437 nbytes
&= AES_BLOCK_SIZE
- 1;
438 err
= skcipher_walk_done(&walk
, nbytes
);
445 static int cbc_decrypt(struct skcipher_request
*req
)
447 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
448 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_skcipher_ctx(tfm
));
449 struct skcipher_walk walk
;
453 err
= skcipher_walk_virt(&walk
, req
, true);
456 while ((nbytes
= walk
.nbytes
)) {
457 aesni_cbc_dec(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
458 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
459 nbytes
&= AES_BLOCK_SIZE
- 1;
460 err
= skcipher_walk_done(&walk
, nbytes
);
468 static void ctr_crypt_final(struct crypto_aes_ctx
*ctx
,
469 struct skcipher_walk
*walk
)
471 u8
*ctrblk
= walk
->iv
;
472 u8 keystream
[AES_BLOCK_SIZE
];
473 u8
*src
= walk
->src
.virt
.addr
;
474 u8
*dst
= walk
->dst
.virt
.addr
;
475 unsigned int nbytes
= walk
->nbytes
;
477 aesni_enc(ctx
, keystream
, ctrblk
);
478 crypto_xor(keystream
, src
, nbytes
);
479 memcpy(dst
, keystream
, nbytes
);
480 crypto_inc(ctrblk
, AES_BLOCK_SIZE
);
484 static void aesni_ctr_enc_avx_tfm(struct crypto_aes_ctx
*ctx
, u8
*out
,
485 const u8
*in
, unsigned int len
, u8
*iv
)
488 * based on key length, override with the by8 version
489 * of ctr mode encryption/decryption for improved performance
490 * aes_set_key_common() ensures that key length is one of
493 if (ctx
->key_length
== AES_KEYSIZE_128
)
494 aes_ctr_enc_128_avx_by8(in
, iv
, (void *)ctx
, out
, len
);
495 else if (ctx
->key_length
== AES_KEYSIZE_192
)
496 aes_ctr_enc_192_avx_by8(in
, iv
, (void *)ctx
, out
, len
);
498 aes_ctr_enc_256_avx_by8(in
, iv
, (void *)ctx
, out
, len
);
502 static int ctr_crypt(struct skcipher_request
*req
)
504 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
505 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_skcipher_ctx(tfm
));
506 struct skcipher_walk walk
;
510 err
= skcipher_walk_virt(&walk
, req
, true);
513 while ((nbytes
= walk
.nbytes
) >= AES_BLOCK_SIZE
) {
514 aesni_ctr_enc_tfm(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
515 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
516 nbytes
&= AES_BLOCK_SIZE
- 1;
517 err
= skcipher_walk_done(&walk
, nbytes
);
520 ctr_crypt_final(ctx
, &walk
);
521 err
= skcipher_walk_done(&walk
, 0);
528 static int xts_aesni_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
531 struct aesni_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
534 err
= xts_verify_key(tfm
, key
, keylen
);
540 /* first half of xts-key is for crypt */
541 err
= aes_set_key_common(crypto_skcipher_tfm(tfm
), ctx
->raw_crypt_ctx
,
546 /* second half of xts-key is for tweak */
547 return aes_set_key_common(crypto_skcipher_tfm(tfm
), ctx
->raw_tweak_ctx
,
548 key
+ keylen
, keylen
);
552 static void aesni_xts_tweak(void *ctx
, u8
*out
, const u8
*in
)
554 aesni_enc(ctx
, out
, in
);
557 static void aesni_xts_enc(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
559 glue_xts_crypt_128bit_one(ctx
, dst
, src
, iv
, GLUE_FUNC_CAST(aesni_enc
));
562 static void aesni_xts_dec(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
564 glue_xts_crypt_128bit_one(ctx
, dst
, src
, iv
, GLUE_FUNC_CAST(aesni_dec
));
567 static void aesni_xts_enc8(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
569 aesni_xts_crypt8(ctx
, (u8
*)dst
, (const u8
*)src
, true, (u8
*)iv
);
572 static void aesni_xts_dec8(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
574 aesni_xts_crypt8(ctx
, (u8
*)dst
, (const u8
*)src
, false, (u8
*)iv
);
577 static const struct common_glue_ctx aesni_enc_xts
= {
579 .fpu_blocks_limit
= 1,
583 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(aesni_xts_enc8
) }
586 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(aesni_xts_enc
) }
590 static const struct common_glue_ctx aesni_dec_xts
= {
592 .fpu_blocks_limit
= 1,
596 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(aesni_xts_dec8
) }
599 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(aesni_xts_dec
) }
603 static int xts_encrypt(struct skcipher_request
*req
)
605 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
606 struct aesni_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
608 return glue_xts_req_128bit(&aesni_enc_xts
, req
,
609 XTS_TWEAK_CAST(aesni_xts_tweak
),
610 aes_ctx(ctx
->raw_tweak_ctx
),
611 aes_ctx(ctx
->raw_crypt_ctx
));
614 static int xts_decrypt(struct skcipher_request
*req
)
616 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
617 struct aesni_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
619 return glue_xts_req_128bit(&aesni_dec_xts
, req
,
620 XTS_TWEAK_CAST(aesni_xts_tweak
),
621 aes_ctx(ctx
->raw_tweak_ctx
),
622 aes_ctx(ctx
->raw_crypt_ctx
));
625 static int rfc4106_init(struct crypto_aead
*aead
)
627 struct cryptd_aead
*cryptd_tfm
;
628 struct cryptd_aead
**ctx
= crypto_aead_ctx(aead
);
630 cryptd_tfm
= cryptd_alloc_aead("__driver-gcm-aes-aesni",
632 CRYPTO_ALG_INTERNAL
);
633 if (IS_ERR(cryptd_tfm
))
634 return PTR_ERR(cryptd_tfm
);
637 crypto_aead_set_reqsize(aead
, crypto_aead_reqsize(&cryptd_tfm
->base
));
641 static void rfc4106_exit(struct crypto_aead
*aead
)
643 struct cryptd_aead
**ctx
= crypto_aead_ctx(aead
);
645 cryptd_free_aead(*ctx
);
649 rfc4106_set_hash_subkey(u8
*hash_subkey
, const u8
*key
, unsigned int key_len
)
651 struct crypto_cipher
*tfm
;
654 tfm
= crypto_alloc_cipher("aes", 0, 0);
658 ret
= crypto_cipher_setkey(tfm
, key
, key_len
);
660 goto out_free_cipher
;
662 /* Clear the data in the hash sub key container to zero.*/
663 /* We want to cipher all zeros to create the hash sub key. */
664 memset(hash_subkey
, 0, RFC4106_HASH_SUBKEY_SIZE
);
666 crypto_cipher_encrypt_one(tfm
, hash_subkey
, hash_subkey
);
669 crypto_free_cipher(tfm
);
673 static int common_rfc4106_set_key(struct crypto_aead
*aead
, const u8
*key
,
674 unsigned int key_len
)
676 struct aesni_rfc4106_gcm_ctx
*ctx
= aesni_rfc4106_gcm_ctx_get(aead
);
679 crypto_aead_set_flags(aead
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
682 /*Account for 4 byte nonce at the end.*/
685 memcpy(ctx
->nonce
, key
+ key_len
, sizeof(ctx
->nonce
));
687 return aes_set_key_common(crypto_aead_tfm(aead
),
688 &ctx
->aes_key_expanded
, key
, key_len
) ?:
689 rfc4106_set_hash_subkey(ctx
->hash_subkey
, key
, key_len
);
692 static int rfc4106_set_key(struct crypto_aead
*parent
, const u8
*key
,
693 unsigned int key_len
)
695 struct cryptd_aead
**ctx
= crypto_aead_ctx(parent
);
696 struct cryptd_aead
*cryptd_tfm
= *ctx
;
698 return crypto_aead_setkey(&cryptd_tfm
->base
, key
, key_len
);
701 static int common_rfc4106_set_authsize(struct crypto_aead
*aead
,
702 unsigned int authsize
)
716 /* This is the Integrity Check Value (aka the authentication tag length and can
717 * be 8, 12 or 16 bytes long. */
718 static int rfc4106_set_authsize(struct crypto_aead
*parent
,
719 unsigned int authsize
)
721 struct cryptd_aead
**ctx
= crypto_aead_ctx(parent
);
722 struct cryptd_aead
*cryptd_tfm
= *ctx
;
724 return crypto_aead_setauthsize(&cryptd_tfm
->base
, authsize
);
727 static int generic_gcmaes_set_authsize(struct crypto_aead
*tfm
,
728 unsigned int authsize
)
746 static int gcmaes_encrypt(struct aead_request
*req
, unsigned int assoclen
,
747 u8
*hash_subkey
, u8
*iv
, void *aes_ctx
)
749 u8 one_entry_in_sg
= 0;
750 u8
*src
, *dst
, *assoc
;
751 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
752 unsigned long auth_tag_len
= crypto_aead_authsize(tfm
);
753 struct scatter_walk src_sg_walk
;
754 struct scatter_walk dst_sg_walk
= {};
756 if (sg_is_last(req
->src
) &&
757 (!PageHighMem(sg_page(req
->src
)) ||
758 req
->src
->offset
+ req
->src
->length
<= PAGE_SIZE
) &&
759 sg_is_last(req
->dst
) &&
760 (!PageHighMem(sg_page(req
->dst
)) ||
761 req
->dst
->offset
+ req
->dst
->length
<= PAGE_SIZE
)) {
763 scatterwalk_start(&src_sg_walk
, req
->src
);
764 assoc
= scatterwalk_map(&src_sg_walk
);
765 src
= assoc
+ req
->assoclen
;
767 if (unlikely(req
->src
!= req
->dst
)) {
768 scatterwalk_start(&dst_sg_walk
, req
->dst
);
769 dst
= scatterwalk_map(&dst_sg_walk
) + req
->assoclen
;
772 /* Allocate memory for src, dst, assoc */
773 assoc
= kmalloc(req
->cryptlen
+ auth_tag_len
+ req
->assoclen
,
775 if (unlikely(!assoc
))
777 scatterwalk_map_and_copy(assoc
, req
->src
, 0,
778 req
->assoclen
+ req
->cryptlen
, 0);
779 src
= assoc
+ req
->assoclen
;
784 aesni_gcm_enc_tfm(aes_ctx
, dst
, src
, req
->cryptlen
, iv
,
785 hash_subkey
, assoc
, assoclen
,
786 dst
+ req
->cryptlen
, auth_tag_len
);
789 /* The authTag (aka the Integrity Check Value) needs to be written
790 * back to the packet. */
791 if (one_entry_in_sg
) {
792 if (unlikely(req
->src
!= req
->dst
)) {
793 scatterwalk_unmap(dst
- req
->assoclen
);
794 scatterwalk_advance(&dst_sg_walk
, req
->dst
->length
);
795 scatterwalk_done(&dst_sg_walk
, 1, 0);
797 scatterwalk_unmap(assoc
);
798 scatterwalk_advance(&src_sg_walk
, req
->src
->length
);
799 scatterwalk_done(&src_sg_walk
, req
->src
== req
->dst
, 0);
801 scatterwalk_map_and_copy(dst
, req
->dst
, req
->assoclen
,
802 req
->cryptlen
+ auth_tag_len
, 1);
808 static int gcmaes_decrypt(struct aead_request
*req
, unsigned int assoclen
,
809 u8
*hash_subkey
, u8
*iv
, void *aes_ctx
)
811 u8 one_entry_in_sg
= 0;
812 u8
*src
, *dst
, *assoc
;
813 unsigned long tempCipherLen
= 0;
814 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
815 unsigned long auth_tag_len
= crypto_aead_authsize(tfm
);
817 struct scatter_walk src_sg_walk
;
818 struct scatter_walk dst_sg_walk
= {};
821 tempCipherLen
= (unsigned long)(req
->cryptlen
- auth_tag_len
);
823 if (sg_is_last(req
->src
) &&
824 (!PageHighMem(sg_page(req
->src
)) ||
825 req
->src
->offset
+ req
->src
->length
<= PAGE_SIZE
) &&
826 sg_is_last(req
->dst
) &&
827 (!PageHighMem(sg_page(req
->dst
)) ||
828 req
->dst
->offset
+ req
->dst
->length
<= PAGE_SIZE
)) {
830 scatterwalk_start(&src_sg_walk
, req
->src
);
831 assoc
= scatterwalk_map(&src_sg_walk
);
832 src
= assoc
+ req
->assoclen
;
834 if (unlikely(req
->src
!= req
->dst
)) {
835 scatterwalk_start(&dst_sg_walk
, req
->dst
);
836 dst
= scatterwalk_map(&dst_sg_walk
) + req
->assoclen
;
839 /* Allocate memory for src, dst, assoc */
840 assoc
= kmalloc(req
->cryptlen
+ req
->assoclen
, GFP_ATOMIC
);
843 scatterwalk_map_and_copy(assoc
, req
->src
, 0,
844 req
->assoclen
+ req
->cryptlen
, 0);
845 src
= assoc
+ req
->assoclen
;
851 aesni_gcm_dec_tfm(aes_ctx
, dst
, src
, tempCipherLen
, iv
,
852 hash_subkey
, assoc
, assoclen
,
853 authTag
, auth_tag_len
);
856 /* Compare generated tag with passed in tag. */
857 retval
= crypto_memneq(src
+ tempCipherLen
, authTag
, auth_tag_len
) ?
860 if (one_entry_in_sg
) {
861 if (unlikely(req
->src
!= req
->dst
)) {
862 scatterwalk_unmap(dst
- req
->assoclen
);
863 scatterwalk_advance(&dst_sg_walk
, req
->dst
->length
);
864 scatterwalk_done(&dst_sg_walk
, 1, 0);
866 scatterwalk_unmap(assoc
);
867 scatterwalk_advance(&src_sg_walk
, req
->src
->length
);
868 scatterwalk_done(&src_sg_walk
, req
->src
== req
->dst
, 0);
870 scatterwalk_map_and_copy(dst
, req
->dst
, req
->assoclen
,
878 static int helper_rfc4106_encrypt(struct aead_request
*req
)
880 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
881 struct aesni_rfc4106_gcm_ctx
*ctx
= aesni_rfc4106_gcm_ctx_get(tfm
);
882 void *aes_ctx
= &(ctx
->aes_key_expanded
);
883 u8 iv
[16] __attribute__ ((__aligned__(AESNI_ALIGN
)));
885 __be32 counter
= cpu_to_be32(1);
887 /* Assuming we are supporting rfc4106 64-bit extended */
888 /* sequence numbers We need to have the AAD length equal */
889 /* to 16 or 20 bytes */
890 if (unlikely(req
->assoclen
!= 16 && req
->assoclen
!= 20))
894 for (i
= 0; i
< 4; i
++)
895 *(iv
+i
) = ctx
->nonce
[i
];
896 for (i
= 0; i
< 8; i
++)
897 *(iv
+4+i
) = req
->iv
[i
];
898 *((__be32
*)(iv
+12)) = counter
;
900 return gcmaes_encrypt(req
, req
->assoclen
- 8, ctx
->hash_subkey
, iv
,
904 static int helper_rfc4106_decrypt(struct aead_request
*req
)
906 __be32 counter
= cpu_to_be32(1);
907 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
908 struct aesni_rfc4106_gcm_ctx
*ctx
= aesni_rfc4106_gcm_ctx_get(tfm
);
909 void *aes_ctx
= &(ctx
->aes_key_expanded
);
910 u8 iv
[16] __attribute__ ((__aligned__(AESNI_ALIGN
)));
913 if (unlikely(req
->assoclen
!= 16 && req
->assoclen
!= 20))
916 /* Assuming we are supporting rfc4106 64-bit extended */
917 /* sequence numbers We need to have the AAD length */
918 /* equal to 16 or 20 bytes */
921 for (i
= 0; i
< 4; i
++)
922 *(iv
+i
) = ctx
->nonce
[i
];
923 for (i
= 0; i
< 8; i
++)
924 *(iv
+4+i
) = req
->iv
[i
];
925 *((__be32
*)(iv
+12)) = counter
;
927 return gcmaes_decrypt(req
, req
->assoclen
- 8, ctx
->hash_subkey
, iv
,
931 static int rfc4106_encrypt(struct aead_request
*req
)
933 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
934 struct cryptd_aead
**ctx
= crypto_aead_ctx(tfm
);
935 struct cryptd_aead
*cryptd_tfm
= *ctx
;
937 tfm
= &cryptd_tfm
->base
;
938 if (irq_fpu_usable() && (!in_atomic() ||
939 !cryptd_aead_queued(cryptd_tfm
)))
940 tfm
= cryptd_aead_child(cryptd_tfm
);
942 aead_request_set_tfm(req
, tfm
);
944 return crypto_aead_encrypt(req
);
947 static int rfc4106_decrypt(struct aead_request
*req
)
949 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
950 struct cryptd_aead
**ctx
= crypto_aead_ctx(tfm
);
951 struct cryptd_aead
*cryptd_tfm
= *ctx
;
953 tfm
= &cryptd_tfm
->base
;
954 if (irq_fpu_usable() && (!in_atomic() ||
955 !cryptd_aead_queued(cryptd_tfm
)))
956 tfm
= cryptd_aead_child(cryptd_tfm
);
958 aead_request_set_tfm(req
, tfm
);
960 return crypto_aead_decrypt(req
);
964 static struct crypto_alg aesni_algs
[] = { {
966 .cra_driver_name
= "aes-aesni",
968 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
,
969 .cra_blocksize
= AES_BLOCK_SIZE
,
970 .cra_ctxsize
= CRYPTO_AES_CTX_SIZE
,
971 .cra_module
= THIS_MODULE
,
974 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
975 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
976 .cia_setkey
= aes_set_key
,
977 .cia_encrypt
= aes_encrypt
,
978 .cia_decrypt
= aes_decrypt
983 .cra_driver_name
= "__aes-aesni",
985 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
| CRYPTO_ALG_INTERNAL
,
986 .cra_blocksize
= AES_BLOCK_SIZE
,
987 .cra_ctxsize
= CRYPTO_AES_CTX_SIZE
,
988 .cra_module
= THIS_MODULE
,
991 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
992 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
993 .cia_setkey
= aes_set_key
,
994 .cia_encrypt
= __aes_encrypt
,
995 .cia_decrypt
= __aes_decrypt
1000 static struct skcipher_alg aesni_skciphers
[] = {
1003 .cra_name
= "__ecb(aes)",
1004 .cra_driver_name
= "__ecb-aes-aesni",
1005 .cra_priority
= 400,
1006 .cra_flags
= CRYPTO_ALG_INTERNAL
,
1007 .cra_blocksize
= AES_BLOCK_SIZE
,
1008 .cra_ctxsize
= CRYPTO_AES_CTX_SIZE
,
1009 .cra_module
= THIS_MODULE
,
1011 .min_keysize
= AES_MIN_KEY_SIZE
,
1012 .max_keysize
= AES_MAX_KEY_SIZE
,
1013 .setkey
= aesni_skcipher_setkey
,
1014 .encrypt
= ecb_encrypt
,
1015 .decrypt
= ecb_decrypt
,
1018 .cra_name
= "__cbc(aes)",
1019 .cra_driver_name
= "__cbc-aes-aesni",
1020 .cra_priority
= 400,
1021 .cra_flags
= CRYPTO_ALG_INTERNAL
,
1022 .cra_blocksize
= AES_BLOCK_SIZE
,
1023 .cra_ctxsize
= CRYPTO_AES_CTX_SIZE
,
1024 .cra_module
= THIS_MODULE
,
1026 .min_keysize
= AES_MIN_KEY_SIZE
,
1027 .max_keysize
= AES_MAX_KEY_SIZE
,
1028 .ivsize
= AES_BLOCK_SIZE
,
1029 .setkey
= aesni_skcipher_setkey
,
1030 .encrypt
= cbc_encrypt
,
1031 .decrypt
= cbc_decrypt
,
1032 #ifdef CONFIG_X86_64
1035 .cra_name
= "__ctr(aes)",
1036 .cra_driver_name
= "__ctr-aes-aesni",
1037 .cra_priority
= 400,
1038 .cra_flags
= CRYPTO_ALG_INTERNAL
,
1040 .cra_ctxsize
= CRYPTO_AES_CTX_SIZE
,
1041 .cra_module
= THIS_MODULE
,
1043 .min_keysize
= AES_MIN_KEY_SIZE
,
1044 .max_keysize
= AES_MAX_KEY_SIZE
,
1045 .ivsize
= AES_BLOCK_SIZE
,
1046 .chunksize
= AES_BLOCK_SIZE
,
1047 .setkey
= aesni_skcipher_setkey
,
1048 .encrypt
= ctr_crypt
,
1049 .decrypt
= ctr_crypt
,
1052 .cra_name
= "__xts(aes)",
1053 .cra_driver_name
= "__xts-aes-aesni",
1054 .cra_priority
= 401,
1055 .cra_flags
= CRYPTO_ALG_INTERNAL
,
1056 .cra_blocksize
= AES_BLOCK_SIZE
,
1057 .cra_ctxsize
= XTS_AES_CTX_SIZE
,
1058 .cra_module
= THIS_MODULE
,
1060 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
1061 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
1062 .ivsize
= AES_BLOCK_SIZE
,
1063 .setkey
= xts_aesni_setkey
,
1064 .encrypt
= xts_encrypt
,
1065 .decrypt
= xts_decrypt
,
1070 struct simd_skcipher_alg
*aesni_simd_skciphers
[ARRAY_SIZE(aesni_skciphers
)];
1073 const char *algname
;
1074 const char *drvname
;
1075 const char *basename
;
1076 struct simd_skcipher_alg
*simd
;
1077 } aesni_simd_skciphers2
[] = {
1078 #if (defined(MODULE) && IS_ENABLED(CONFIG_CRYPTO_PCBC)) || \
1079 IS_BUILTIN(CONFIG_CRYPTO_PCBC)
1081 .algname
= "pcbc(aes)",
1082 .drvname
= "pcbc-aes-aesni",
1083 .basename
= "fpu(pcbc(__aes-aesni))",
1088 #ifdef CONFIG_X86_64
1089 static int generic_gcmaes_set_key(struct crypto_aead
*aead
, const u8
*key
,
1090 unsigned int key_len
)
1092 struct generic_gcmaes_ctx
*ctx
= generic_gcmaes_ctx_get(aead
);
1094 return aes_set_key_common(crypto_aead_tfm(aead
),
1095 &ctx
->aes_key_expanded
, key
, key_len
) ?:
1096 rfc4106_set_hash_subkey(ctx
->hash_subkey
, key
, key_len
);
1099 static int generic_gcmaes_encrypt(struct aead_request
*req
)
1101 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1102 struct generic_gcmaes_ctx
*ctx
= generic_gcmaes_ctx_get(tfm
);
1103 void *aes_ctx
= &(ctx
->aes_key_expanded
);
1104 u8 iv
[16] __attribute__ ((__aligned__(AESNI_ALIGN
)));
1105 __be32 counter
= cpu_to_be32(1);
1107 memcpy(iv
, req
->iv
, 12);
1108 *((__be32
*)(iv
+12)) = counter
;
1110 return gcmaes_encrypt(req
, req
->assoclen
, ctx
->hash_subkey
, iv
,
1114 static int generic_gcmaes_decrypt(struct aead_request
*req
)
1116 __be32 counter
= cpu_to_be32(1);
1117 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1118 struct aesni_rfc4106_gcm_ctx
*ctx
= aesni_rfc4106_gcm_ctx_get(tfm
);
1119 void *aes_ctx
= &(ctx
->aes_key_expanded
);
1120 u8 iv
[16] __attribute__ ((__aligned__(AESNI_ALIGN
)));
1122 memcpy(iv
, req
->iv
, 12);
1123 *((__be32
*)(iv
+12)) = counter
;
1125 return gcmaes_decrypt(req
, req
->assoclen
, ctx
->hash_subkey
, iv
,
1129 static struct aead_alg aesni_aead_algs
[] = { {
1130 .setkey
= common_rfc4106_set_key
,
1131 .setauthsize
= common_rfc4106_set_authsize
,
1132 .encrypt
= helper_rfc4106_encrypt
,
1133 .decrypt
= helper_rfc4106_decrypt
,
1137 .cra_name
= "__gcm-aes-aesni",
1138 .cra_driver_name
= "__driver-gcm-aes-aesni",
1139 .cra_flags
= CRYPTO_ALG_INTERNAL
,
1141 .cra_ctxsize
= sizeof(struct aesni_rfc4106_gcm_ctx
),
1142 .cra_alignmask
= AESNI_ALIGN
- 1,
1143 .cra_module
= THIS_MODULE
,
1146 .init
= rfc4106_init
,
1147 .exit
= rfc4106_exit
,
1148 .setkey
= rfc4106_set_key
,
1149 .setauthsize
= rfc4106_set_authsize
,
1150 .encrypt
= rfc4106_encrypt
,
1151 .decrypt
= rfc4106_decrypt
,
1155 .cra_name
= "rfc4106(gcm(aes))",
1156 .cra_driver_name
= "rfc4106-gcm-aesni",
1157 .cra_priority
= 400,
1158 .cra_flags
= CRYPTO_ALG_ASYNC
,
1160 .cra_ctxsize
= sizeof(struct cryptd_aead
*),
1161 .cra_module
= THIS_MODULE
,
1164 .setkey
= generic_gcmaes_set_key
,
1165 .setauthsize
= generic_gcmaes_set_authsize
,
1166 .encrypt
= generic_gcmaes_encrypt
,
1167 .decrypt
= generic_gcmaes_decrypt
,
1171 .cra_name
= "gcm(aes)",
1172 .cra_driver_name
= "generic-gcm-aesni",
1173 .cra_priority
= 400,
1174 .cra_flags
= CRYPTO_ALG_ASYNC
,
1176 .cra_ctxsize
= sizeof(struct generic_gcmaes_ctx
),
1177 .cra_alignmask
= AESNI_ALIGN
- 1,
1178 .cra_module
= THIS_MODULE
,
1182 static struct aead_alg aesni_aead_algs
[0];
1186 static const struct x86_cpu_id aesni_cpu_id
[] = {
1187 X86_FEATURE_MATCH(X86_FEATURE_AES
),
1190 MODULE_DEVICE_TABLE(x86cpu
, aesni_cpu_id
);
1192 static void aesni_free_simds(void)
1196 for (i
= 0; i
< ARRAY_SIZE(aesni_simd_skciphers
) &&
1197 aesni_simd_skciphers
[i
]; i
++)
1198 simd_skcipher_free(aesni_simd_skciphers
[i
]);
1200 for (i
= 0; i
< ARRAY_SIZE(aesni_simd_skciphers2
); i
++)
1201 if (aesni_simd_skciphers2
[i
].simd
)
1202 simd_skcipher_free(aesni_simd_skciphers2
[i
].simd
);
1205 static int __init
aesni_init(void)
1207 struct simd_skcipher_alg
*simd
;
1208 const char *basename
;
1209 const char *algname
;
1210 const char *drvname
;
1214 if (!x86_match_cpu(aesni_cpu_id
))
1216 #ifdef CONFIG_X86_64
1217 #ifdef CONFIG_AS_AVX2
1218 if (boot_cpu_has(X86_FEATURE_AVX2
)) {
1219 pr_info("AVX2 version of gcm_enc/dec engaged.\n");
1220 aesni_gcm_enc_tfm
= aesni_gcm_enc_avx2
;
1221 aesni_gcm_dec_tfm
= aesni_gcm_dec_avx2
;
1224 #ifdef CONFIG_AS_AVX
1225 if (boot_cpu_has(X86_FEATURE_AVX
)) {
1226 pr_info("AVX version of gcm_enc/dec engaged.\n");
1227 aesni_gcm_enc_tfm
= aesni_gcm_enc_avx
;
1228 aesni_gcm_dec_tfm
= aesni_gcm_dec_avx
;
1232 pr_info("SSE version of gcm_enc/dec engaged.\n");
1233 aesni_gcm_enc_tfm
= aesni_gcm_enc
;
1234 aesni_gcm_dec_tfm
= aesni_gcm_dec
;
1236 aesni_ctr_enc_tfm
= aesni_ctr_enc
;
1237 #ifdef CONFIG_AS_AVX
1238 if (boot_cpu_has(X86_FEATURE_AVX
)) {
1239 /* optimize performance of ctr mode encryption transform */
1240 aesni_ctr_enc_tfm
= aesni_ctr_enc_avx_tfm
;
1241 pr_info("AES CTR mode by8 optimization enabled\n");
1246 err
= crypto_fpu_init();
1250 err
= crypto_register_algs(aesni_algs
, ARRAY_SIZE(aesni_algs
));
1254 err
= crypto_register_skciphers(aesni_skciphers
,
1255 ARRAY_SIZE(aesni_skciphers
));
1257 goto unregister_algs
;
1259 err
= crypto_register_aeads(aesni_aead_algs
,
1260 ARRAY_SIZE(aesni_aead_algs
));
1262 goto unregister_skciphers
;
1264 for (i
= 0; i
< ARRAY_SIZE(aesni_skciphers
); i
++) {
1265 algname
= aesni_skciphers
[i
].base
.cra_name
+ 2;
1266 drvname
= aesni_skciphers
[i
].base
.cra_driver_name
+ 2;
1267 basename
= aesni_skciphers
[i
].base
.cra_driver_name
;
1268 simd
= simd_skcipher_create_compat(algname
, drvname
, basename
);
1269 err
= PTR_ERR(simd
);
1271 goto unregister_simds
;
1273 aesni_simd_skciphers
[i
] = simd
;
1276 for (i
= 0; i
< ARRAY_SIZE(aesni_simd_skciphers2
); i
++) {
1277 algname
= aesni_simd_skciphers2
[i
].algname
;
1278 drvname
= aesni_simd_skciphers2
[i
].drvname
;
1279 basename
= aesni_simd_skciphers2
[i
].basename
;
1280 simd
= simd_skcipher_create_compat(algname
, drvname
, basename
);
1281 err
= PTR_ERR(simd
);
1285 aesni_simd_skciphers2
[i
].simd
= simd
;
1292 crypto_unregister_aeads(aesni_aead_algs
, ARRAY_SIZE(aesni_aead_algs
));
1293 unregister_skciphers
:
1294 crypto_unregister_skciphers(aesni_skciphers
,
1295 ARRAY_SIZE(aesni_skciphers
));
1297 crypto_unregister_algs(aesni_algs
, ARRAY_SIZE(aesni_algs
));
1303 static void __exit
aesni_exit(void)
1306 crypto_unregister_aeads(aesni_aead_algs
, ARRAY_SIZE(aesni_aead_algs
));
1307 crypto_unregister_skciphers(aesni_skciphers
,
1308 ARRAY_SIZE(aesni_skciphers
));
1309 crypto_unregister_algs(aesni_algs
, ARRAY_SIZE(aesni_algs
));
1314 late_initcall(aesni_init
);
1315 module_exit(aesni_exit
);
1317 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, Intel AES-NI instructions optimized");
1318 MODULE_LICENSE("GPL");
1319 MODULE_ALIAS_CRYPTO("aes");