2 * Support for Intel AES-NI instructions. This file contains glue
3 * code, the real AES implementation is in intel-aes_asm.S.
5 * Copyright (C) 2008, Intel Corp.
6 * Author: Huang Ying <ying.huang@intel.com>
8 * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
9 * interface for 64-bit kernels.
10 * Authors: Adrian Hoban <adrian.hoban@intel.com>
11 * Gabriele Paoloni <gabriele.paoloni@intel.com>
12 * Tadeusz Struk (tadeusz.struk@intel.com)
13 * Aidan O'Mahony (aidan.o.mahony@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
16 * This program is free software; you can redistribute it and/or modify
17 * it under the terms of the GNU General Public License as published by
18 * the Free Software Foundation; either version 2 of the License, or
19 * (at your option) any later version.
22 #include <linux/hardirq.h>
23 #include <linux/types.h>
24 #include <linux/module.h>
25 #include <linux/err.h>
26 #include <crypto/algapi.h>
27 #include <crypto/aes.h>
28 #include <crypto/cryptd.h>
29 #include <crypto/ctr.h>
30 #include <crypto/b128ops.h>
31 #include <crypto/gcm.h>
32 #include <crypto/xts.h>
33 #include <asm/cpu_device_id.h>
34 #include <asm/fpu/api.h>
35 #include <asm/crypto/aes.h>
36 #include <crypto/scatterwalk.h>
37 #include <crypto/internal/aead.h>
38 #include <crypto/internal/simd.h>
39 #include <crypto/internal/skcipher.h>
40 #include <linux/workqueue.h>
41 #include <linux/spinlock.h>
43 #include <asm/crypto/glue_helper.h>
47 #define AESNI_ALIGN 16
48 #define AESNI_ALIGN_ATTR __attribute__ ((__aligned__(AESNI_ALIGN)))
49 #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE - 1))
50 #define RFC4106_HASH_SUBKEY_SIZE 16
51 #define AESNI_ALIGN_EXTRA ((AESNI_ALIGN - 1) & ~(CRYPTO_MINALIGN - 1))
52 #define CRYPTO_AES_CTX_SIZE (sizeof(struct crypto_aes_ctx) + AESNI_ALIGN_EXTRA)
53 #define XTS_AES_CTX_SIZE (sizeof(struct aesni_xts_ctx) + AESNI_ALIGN_EXTRA)
55 /* This data is stored at the end of the crypto_tfm struct.
56 * It's a type of per "session" data storage location.
57 * This needs to be 16 byte aligned.
59 struct aesni_rfc4106_gcm_ctx
{
60 u8 hash_subkey
[16] AESNI_ALIGN_ATTR
;
61 struct crypto_aes_ctx aes_key_expanded AESNI_ALIGN_ATTR
;
65 struct generic_gcmaes_ctx
{
66 u8 hash_subkey
[16] AESNI_ALIGN_ATTR
;
67 struct crypto_aes_ctx aes_key_expanded AESNI_ALIGN_ATTR
;
70 struct aesni_xts_ctx
{
71 u8 raw_tweak_ctx
[sizeof(struct crypto_aes_ctx
)] AESNI_ALIGN_ATTR
;
72 u8 raw_crypt_ctx
[sizeof(struct crypto_aes_ctx
)] AESNI_ALIGN_ATTR
;
75 #define GCM_BLOCK_LEN 16
77 struct gcm_context_data
{
78 /* init, update and finalize context data */
79 u8 aad_hash
[GCM_BLOCK_LEN
];
82 u8 partial_block_enc_key
[GCM_BLOCK_LEN
];
83 u8 orig_IV
[GCM_BLOCK_LEN
];
84 u8 current_counter
[GCM_BLOCK_LEN
];
85 u64 partial_block_len
;
87 u8 hash_keys
[GCM_BLOCK_LEN
* 8];
90 asmlinkage
int aesni_set_key(struct crypto_aes_ctx
*ctx
, const u8
*in_key
,
91 unsigned int key_len
);
92 asmlinkage
void aesni_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
94 asmlinkage
void aesni_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
96 asmlinkage
void aesni_ecb_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
97 const u8
*in
, unsigned int len
);
98 asmlinkage
void aesni_ecb_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
99 const u8
*in
, unsigned int len
);
100 asmlinkage
void aesni_cbc_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
101 const u8
*in
, unsigned int len
, u8
*iv
);
102 asmlinkage
void aesni_cbc_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
103 const u8
*in
, unsigned int len
, u8
*iv
);
105 #define AVX_GEN2_OPTSIZE 640
106 #define AVX_GEN4_OPTSIZE 4096
110 static void (*aesni_ctr_enc_tfm
)(struct crypto_aes_ctx
*ctx
, u8
*out
,
111 const u8
*in
, unsigned int len
, u8
*iv
);
112 asmlinkage
void aesni_ctr_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
113 const u8
*in
, unsigned int len
, u8
*iv
);
115 asmlinkage
void aesni_xts_crypt8(struct crypto_aes_ctx
*ctx
, u8
*out
,
116 const u8
*in
, bool enc
, u8
*iv
);
118 /* asmlinkage void aesni_gcm_enc()
119 * void *ctx, AES Key schedule. Starts on a 16 byte boundary.
120 * struct gcm_context_data. May be uninitialized.
121 * u8 *out, Ciphertext output. Encrypt in-place is allowed.
122 * const u8 *in, Plaintext input
123 * unsigned long plaintext_len, Length of data in bytes for encryption.
124 * u8 *iv, Pre-counter block j0: 12 byte IV concatenated with 0x00000001.
125 * 16-byte aligned pointer.
126 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
127 * const u8 *aad, Additional Authentication Data (AAD)
128 * unsigned long aad_len, Length of AAD in bytes.
129 * u8 *auth_tag, Authenticated Tag output.
130 * unsigned long auth_tag_len), Authenticated Tag Length in bytes.
131 * Valid values are 16 (most likely), 12 or 8.
133 asmlinkage
void aesni_gcm_enc(void *ctx
,
134 struct gcm_context_data
*gdata
, u8
*out
,
135 const u8
*in
, unsigned long plaintext_len
, u8
*iv
,
136 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
137 u8
*auth_tag
, unsigned long auth_tag_len
);
139 /* asmlinkage void aesni_gcm_dec()
140 * void *ctx, AES Key schedule. Starts on a 16 byte boundary.
141 * struct gcm_context_data. May be uninitialized.
142 * u8 *out, Plaintext output. Decrypt in-place is allowed.
143 * const u8 *in, Ciphertext input
144 * unsigned long ciphertext_len, Length of data in bytes for decryption.
145 * u8 *iv, Pre-counter block j0: 12 byte IV concatenated with 0x00000001.
146 * 16-byte aligned pointer.
147 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
148 * const u8 *aad, Additional Authentication Data (AAD)
149 * unsigned long aad_len, Length of AAD in bytes. With RFC4106 this is going
150 * to be 8 or 12 bytes
151 * u8 *auth_tag, Authenticated Tag output.
152 * unsigned long auth_tag_len) Authenticated Tag Length in bytes.
153 * Valid values are 16 (most likely), 12 or 8.
155 asmlinkage
void aesni_gcm_dec(void *ctx
,
156 struct gcm_context_data
*gdata
, u8
*out
,
157 const u8
*in
, unsigned long ciphertext_len
, u8
*iv
,
158 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
159 u8
*auth_tag
, unsigned long auth_tag_len
);
161 /* Scatter / Gather routines, with args similar to above */
162 asmlinkage
void aesni_gcm_init(void *ctx
,
163 struct gcm_context_data
*gdata
,
165 u8
*hash_subkey
, const u8
*aad
,
166 unsigned long aad_len
);
167 asmlinkage
void aesni_gcm_enc_update(void *ctx
,
168 struct gcm_context_data
*gdata
, u8
*out
,
169 const u8
*in
, unsigned long plaintext_len
);
170 asmlinkage
void aesni_gcm_dec_update(void *ctx
,
171 struct gcm_context_data
*gdata
, u8
*out
,
173 unsigned long ciphertext_len
);
174 asmlinkage
void aesni_gcm_finalize(void *ctx
,
175 struct gcm_context_data
*gdata
,
176 u8
*auth_tag
, unsigned long auth_tag_len
);
179 asmlinkage
void aes_ctr_enc_128_avx_by8(const u8
*in
, u8
*iv
,
180 void *keys
, u8
*out
, unsigned int num_bytes
);
181 asmlinkage
void aes_ctr_enc_192_avx_by8(const u8
*in
, u8
*iv
,
182 void *keys
, u8
*out
, unsigned int num_bytes
);
183 asmlinkage
void aes_ctr_enc_256_avx_by8(const u8
*in
, u8
*iv
,
184 void *keys
, u8
*out
, unsigned int num_bytes
);
186 * asmlinkage void aesni_gcm_precomp_avx_gen2()
187 * gcm_data *my_ctx_data, context data
188 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
190 asmlinkage
void aesni_gcm_precomp_avx_gen2(void *my_ctx_data
, u8
*hash_subkey
);
192 asmlinkage
void aesni_gcm_enc_avx_gen2(void *ctx
, u8
*out
,
193 const u8
*in
, unsigned long plaintext_len
, u8
*iv
,
194 const u8
*aad
, unsigned long aad_len
,
195 u8
*auth_tag
, unsigned long auth_tag_len
);
197 asmlinkage
void aesni_gcm_dec_avx_gen2(void *ctx
, u8
*out
,
198 const u8
*in
, unsigned long ciphertext_len
, u8
*iv
,
199 const u8
*aad
, unsigned long aad_len
,
200 u8
*auth_tag
, unsigned long auth_tag_len
);
202 static void aesni_gcm_enc_avx(void *ctx
,
203 struct gcm_context_data
*data
, u8
*out
,
204 const u8
*in
, unsigned long plaintext_len
, u8
*iv
,
205 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
206 u8
*auth_tag
, unsigned long auth_tag_len
)
208 struct crypto_aes_ctx
*aes_ctx
= (struct crypto_aes_ctx
*)ctx
;
209 if ((plaintext_len
< AVX_GEN2_OPTSIZE
) || (aes_ctx
-> key_length
!= AES_KEYSIZE_128
)){
210 aesni_gcm_enc(ctx
, data
, out
, in
,
211 plaintext_len
, iv
, hash_subkey
, aad
,
212 aad_len
, auth_tag
, auth_tag_len
);
214 aesni_gcm_precomp_avx_gen2(ctx
, hash_subkey
);
215 aesni_gcm_enc_avx_gen2(ctx
, out
, in
, plaintext_len
, iv
, aad
,
216 aad_len
, auth_tag
, auth_tag_len
);
220 static void aesni_gcm_dec_avx(void *ctx
,
221 struct gcm_context_data
*data
, u8
*out
,
222 const u8
*in
, unsigned long ciphertext_len
, u8
*iv
,
223 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
224 u8
*auth_tag
, unsigned long auth_tag_len
)
226 struct crypto_aes_ctx
*aes_ctx
= (struct crypto_aes_ctx
*)ctx
;
227 if ((ciphertext_len
< AVX_GEN2_OPTSIZE
) || (aes_ctx
-> key_length
!= AES_KEYSIZE_128
)) {
228 aesni_gcm_dec(ctx
, data
, out
, in
,
229 ciphertext_len
, iv
, hash_subkey
, aad
,
230 aad_len
, auth_tag
, auth_tag_len
);
232 aesni_gcm_precomp_avx_gen2(ctx
, hash_subkey
);
233 aesni_gcm_dec_avx_gen2(ctx
, out
, in
, ciphertext_len
, iv
, aad
,
234 aad_len
, auth_tag
, auth_tag_len
);
239 #ifdef CONFIG_AS_AVX2
241 * asmlinkage void aesni_gcm_precomp_avx_gen4()
242 * gcm_data *my_ctx_data, context data
243 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
245 asmlinkage
void aesni_gcm_precomp_avx_gen4(void *my_ctx_data
, u8
*hash_subkey
);
247 asmlinkage
void aesni_gcm_enc_avx_gen4(void *ctx
, u8
*out
,
248 const u8
*in
, unsigned long plaintext_len
, u8
*iv
,
249 const u8
*aad
, unsigned long aad_len
,
250 u8
*auth_tag
, unsigned long auth_tag_len
);
252 asmlinkage
void aesni_gcm_dec_avx_gen4(void *ctx
, u8
*out
,
253 const u8
*in
, unsigned long ciphertext_len
, u8
*iv
,
254 const u8
*aad
, unsigned long aad_len
,
255 u8
*auth_tag
, unsigned long auth_tag_len
);
257 static void aesni_gcm_enc_avx2(void *ctx
,
258 struct gcm_context_data
*data
, u8
*out
,
259 const u8
*in
, unsigned long plaintext_len
, u8
*iv
,
260 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
261 u8
*auth_tag
, unsigned long auth_tag_len
)
263 struct crypto_aes_ctx
*aes_ctx
= (struct crypto_aes_ctx
*)ctx
;
264 if ((plaintext_len
< AVX_GEN2_OPTSIZE
) || (aes_ctx
-> key_length
!= AES_KEYSIZE_128
)) {
265 aesni_gcm_enc(ctx
, data
, out
, in
,
266 plaintext_len
, iv
, hash_subkey
, aad
,
267 aad_len
, auth_tag
, auth_tag_len
);
268 } else if (plaintext_len
< AVX_GEN4_OPTSIZE
) {
269 aesni_gcm_precomp_avx_gen2(ctx
, hash_subkey
);
270 aesni_gcm_enc_avx_gen2(ctx
, out
, in
, plaintext_len
, iv
, aad
,
271 aad_len
, auth_tag
, auth_tag_len
);
273 aesni_gcm_precomp_avx_gen4(ctx
, hash_subkey
);
274 aesni_gcm_enc_avx_gen4(ctx
, out
, in
, plaintext_len
, iv
, aad
,
275 aad_len
, auth_tag
, auth_tag_len
);
279 static void aesni_gcm_dec_avx2(void *ctx
,
280 struct gcm_context_data
*data
, u8
*out
,
281 const u8
*in
, unsigned long ciphertext_len
, u8
*iv
,
282 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
283 u8
*auth_tag
, unsigned long auth_tag_len
)
285 struct crypto_aes_ctx
*aes_ctx
= (struct crypto_aes_ctx
*)ctx
;
286 if ((ciphertext_len
< AVX_GEN2_OPTSIZE
) || (aes_ctx
-> key_length
!= AES_KEYSIZE_128
)) {
287 aesni_gcm_dec(ctx
, data
, out
, in
,
288 ciphertext_len
, iv
, hash_subkey
,
289 aad
, aad_len
, auth_tag
, auth_tag_len
);
290 } else if (ciphertext_len
< AVX_GEN4_OPTSIZE
) {
291 aesni_gcm_precomp_avx_gen2(ctx
, hash_subkey
);
292 aesni_gcm_dec_avx_gen2(ctx
, out
, in
, ciphertext_len
, iv
, aad
,
293 aad_len
, auth_tag
, auth_tag_len
);
295 aesni_gcm_precomp_avx_gen4(ctx
, hash_subkey
);
296 aesni_gcm_dec_avx_gen4(ctx
, out
, in
, ciphertext_len
, iv
, aad
,
297 aad_len
, auth_tag
, auth_tag_len
);
302 static void (*aesni_gcm_enc_tfm
)(void *ctx
,
303 struct gcm_context_data
*data
, u8
*out
,
304 const u8
*in
, unsigned long plaintext_len
,
305 u8
*iv
, u8
*hash_subkey
, const u8
*aad
,
306 unsigned long aad_len
, u8
*auth_tag
,
307 unsigned long auth_tag_len
);
309 static void (*aesni_gcm_dec_tfm
)(void *ctx
,
310 struct gcm_context_data
*data
, u8
*out
,
311 const u8
*in
, unsigned long ciphertext_len
,
312 u8
*iv
, u8
*hash_subkey
, const u8
*aad
,
313 unsigned long aad_len
, u8
*auth_tag
,
314 unsigned long auth_tag_len
);
317 aesni_rfc4106_gcm_ctx
*aesni_rfc4106_gcm_ctx_get(struct crypto_aead
*tfm
)
319 unsigned long align
= AESNI_ALIGN
;
321 if (align
<= crypto_tfm_ctx_alignment())
323 return PTR_ALIGN(crypto_aead_ctx(tfm
), align
);
327 generic_gcmaes_ctx
*generic_gcmaes_ctx_get(struct crypto_aead
*tfm
)
329 unsigned long align
= AESNI_ALIGN
;
331 if (align
<= crypto_tfm_ctx_alignment())
333 return PTR_ALIGN(crypto_aead_ctx(tfm
), align
);
337 static inline struct crypto_aes_ctx
*aes_ctx(void *raw_ctx
)
339 unsigned long addr
= (unsigned long)raw_ctx
;
340 unsigned long align
= AESNI_ALIGN
;
342 if (align
<= crypto_tfm_ctx_alignment())
344 return (struct crypto_aes_ctx
*)ALIGN(addr
, align
);
347 static int aes_set_key_common(struct crypto_tfm
*tfm
, void *raw_ctx
,
348 const u8
*in_key
, unsigned int key_len
)
350 struct crypto_aes_ctx
*ctx
= aes_ctx(raw_ctx
);
351 u32
*flags
= &tfm
->crt_flags
;
354 if (key_len
!= AES_KEYSIZE_128
&& key_len
!= AES_KEYSIZE_192
&&
355 key_len
!= AES_KEYSIZE_256
) {
356 *flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
360 if (!irq_fpu_usable())
361 err
= crypto_aes_expand_key(ctx
, in_key
, key_len
);
364 err
= aesni_set_key(ctx
, in_key
, key_len
);
371 static int aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
372 unsigned int key_len
)
374 return aes_set_key_common(tfm
, crypto_tfm_ctx(tfm
), in_key
, key_len
);
377 static void aes_encrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
379 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
381 if (!irq_fpu_usable())
382 crypto_aes_encrypt_x86(ctx
, dst
, src
);
385 aesni_enc(ctx
, dst
, src
);
390 static void aes_decrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
392 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
394 if (!irq_fpu_usable())
395 crypto_aes_decrypt_x86(ctx
, dst
, src
);
398 aesni_dec(ctx
, dst
, src
);
403 static void __aes_encrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
405 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
407 aesni_enc(ctx
, dst
, src
);
410 static void __aes_decrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
412 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
414 aesni_dec(ctx
, dst
, src
);
417 static int aesni_skcipher_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
420 return aes_set_key_common(crypto_skcipher_tfm(tfm
),
421 crypto_skcipher_ctx(tfm
), key
, len
);
424 static int ecb_encrypt(struct skcipher_request
*req
)
426 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
427 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_skcipher_ctx(tfm
));
428 struct skcipher_walk walk
;
432 err
= skcipher_walk_virt(&walk
, req
, true);
435 while ((nbytes
= walk
.nbytes
)) {
436 aesni_ecb_enc(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
437 nbytes
& AES_BLOCK_MASK
);
438 nbytes
&= AES_BLOCK_SIZE
- 1;
439 err
= skcipher_walk_done(&walk
, nbytes
);
446 static int ecb_decrypt(struct skcipher_request
*req
)
448 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
449 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_skcipher_ctx(tfm
));
450 struct skcipher_walk walk
;
454 err
= skcipher_walk_virt(&walk
, req
, true);
457 while ((nbytes
= walk
.nbytes
)) {
458 aesni_ecb_dec(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
459 nbytes
& AES_BLOCK_MASK
);
460 nbytes
&= AES_BLOCK_SIZE
- 1;
461 err
= skcipher_walk_done(&walk
, nbytes
);
468 static int cbc_encrypt(struct skcipher_request
*req
)
470 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
471 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_skcipher_ctx(tfm
));
472 struct skcipher_walk walk
;
476 err
= skcipher_walk_virt(&walk
, req
, true);
479 while ((nbytes
= walk
.nbytes
)) {
480 aesni_cbc_enc(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
481 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
482 nbytes
&= AES_BLOCK_SIZE
- 1;
483 err
= skcipher_walk_done(&walk
, nbytes
);
490 static int cbc_decrypt(struct skcipher_request
*req
)
492 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
493 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_skcipher_ctx(tfm
));
494 struct skcipher_walk walk
;
498 err
= skcipher_walk_virt(&walk
, req
, true);
501 while ((nbytes
= walk
.nbytes
)) {
502 aesni_cbc_dec(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
503 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
504 nbytes
&= AES_BLOCK_SIZE
- 1;
505 err
= skcipher_walk_done(&walk
, nbytes
);
513 static void ctr_crypt_final(struct crypto_aes_ctx
*ctx
,
514 struct skcipher_walk
*walk
)
516 u8
*ctrblk
= walk
->iv
;
517 u8 keystream
[AES_BLOCK_SIZE
];
518 u8
*src
= walk
->src
.virt
.addr
;
519 u8
*dst
= walk
->dst
.virt
.addr
;
520 unsigned int nbytes
= walk
->nbytes
;
522 aesni_enc(ctx
, keystream
, ctrblk
);
523 crypto_xor_cpy(dst
, keystream
, src
, nbytes
);
525 crypto_inc(ctrblk
, AES_BLOCK_SIZE
);
529 static void aesni_ctr_enc_avx_tfm(struct crypto_aes_ctx
*ctx
, u8
*out
,
530 const u8
*in
, unsigned int len
, u8
*iv
)
533 * based on key length, override with the by8 version
534 * of ctr mode encryption/decryption for improved performance
535 * aes_set_key_common() ensures that key length is one of
538 if (ctx
->key_length
== AES_KEYSIZE_128
)
539 aes_ctr_enc_128_avx_by8(in
, iv
, (void *)ctx
, out
, len
);
540 else if (ctx
->key_length
== AES_KEYSIZE_192
)
541 aes_ctr_enc_192_avx_by8(in
, iv
, (void *)ctx
, out
, len
);
543 aes_ctr_enc_256_avx_by8(in
, iv
, (void *)ctx
, out
, len
);
547 static int ctr_crypt(struct skcipher_request
*req
)
549 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
550 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_skcipher_ctx(tfm
));
551 struct skcipher_walk walk
;
555 err
= skcipher_walk_virt(&walk
, req
, true);
558 while ((nbytes
= walk
.nbytes
) >= AES_BLOCK_SIZE
) {
559 aesni_ctr_enc_tfm(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
560 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
561 nbytes
&= AES_BLOCK_SIZE
- 1;
562 err
= skcipher_walk_done(&walk
, nbytes
);
565 ctr_crypt_final(ctx
, &walk
);
566 err
= skcipher_walk_done(&walk
, 0);
573 static int xts_aesni_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
576 struct aesni_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
579 err
= xts_verify_key(tfm
, key
, keylen
);
585 /* first half of xts-key is for crypt */
586 err
= aes_set_key_common(crypto_skcipher_tfm(tfm
), ctx
->raw_crypt_ctx
,
591 /* second half of xts-key is for tweak */
592 return aes_set_key_common(crypto_skcipher_tfm(tfm
), ctx
->raw_tweak_ctx
,
593 key
+ keylen
, keylen
);
597 static void aesni_xts_tweak(void *ctx
, u8
*out
, const u8
*in
)
599 aesni_enc(ctx
, out
, in
);
602 static void aesni_xts_enc(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
604 glue_xts_crypt_128bit_one(ctx
, dst
, src
, iv
, GLUE_FUNC_CAST(aesni_enc
));
607 static void aesni_xts_dec(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
609 glue_xts_crypt_128bit_one(ctx
, dst
, src
, iv
, GLUE_FUNC_CAST(aesni_dec
));
612 static void aesni_xts_enc8(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
614 aesni_xts_crypt8(ctx
, (u8
*)dst
, (const u8
*)src
, true, (u8
*)iv
);
617 static void aesni_xts_dec8(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
619 aesni_xts_crypt8(ctx
, (u8
*)dst
, (const u8
*)src
, false, (u8
*)iv
);
622 static const struct common_glue_ctx aesni_enc_xts
= {
624 .fpu_blocks_limit
= 1,
628 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(aesni_xts_enc8
) }
631 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(aesni_xts_enc
) }
635 static const struct common_glue_ctx aesni_dec_xts
= {
637 .fpu_blocks_limit
= 1,
641 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(aesni_xts_dec8
) }
644 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(aesni_xts_dec
) }
648 static int xts_encrypt(struct skcipher_request
*req
)
650 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
651 struct aesni_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
653 return glue_xts_req_128bit(&aesni_enc_xts
, req
,
654 XTS_TWEAK_CAST(aesni_xts_tweak
),
655 aes_ctx(ctx
->raw_tweak_ctx
),
656 aes_ctx(ctx
->raw_crypt_ctx
));
659 static int xts_decrypt(struct skcipher_request
*req
)
661 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
662 struct aesni_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
664 return glue_xts_req_128bit(&aesni_dec_xts
, req
,
665 XTS_TWEAK_CAST(aesni_xts_tweak
),
666 aes_ctx(ctx
->raw_tweak_ctx
),
667 aes_ctx(ctx
->raw_crypt_ctx
));
670 static int rfc4106_init(struct crypto_aead
*aead
)
672 struct cryptd_aead
*cryptd_tfm
;
673 struct cryptd_aead
**ctx
= crypto_aead_ctx(aead
);
675 cryptd_tfm
= cryptd_alloc_aead("__driver-gcm-aes-aesni",
677 CRYPTO_ALG_INTERNAL
);
678 if (IS_ERR(cryptd_tfm
))
679 return PTR_ERR(cryptd_tfm
);
682 crypto_aead_set_reqsize(aead
, crypto_aead_reqsize(&cryptd_tfm
->base
));
686 static void rfc4106_exit(struct crypto_aead
*aead
)
688 struct cryptd_aead
**ctx
= crypto_aead_ctx(aead
);
690 cryptd_free_aead(*ctx
);
694 rfc4106_set_hash_subkey(u8
*hash_subkey
, const u8
*key
, unsigned int key_len
)
696 struct crypto_cipher
*tfm
;
699 tfm
= crypto_alloc_cipher("aes", 0, 0);
703 ret
= crypto_cipher_setkey(tfm
, key
, key_len
);
705 goto out_free_cipher
;
707 /* Clear the data in the hash sub key container to zero.*/
708 /* We want to cipher all zeros to create the hash sub key. */
709 memset(hash_subkey
, 0, RFC4106_HASH_SUBKEY_SIZE
);
711 crypto_cipher_encrypt_one(tfm
, hash_subkey
, hash_subkey
);
714 crypto_free_cipher(tfm
);
718 static int common_rfc4106_set_key(struct crypto_aead
*aead
, const u8
*key
,
719 unsigned int key_len
)
721 struct aesni_rfc4106_gcm_ctx
*ctx
= aesni_rfc4106_gcm_ctx_get(aead
);
724 crypto_aead_set_flags(aead
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
727 /*Account for 4 byte nonce at the end.*/
730 memcpy(ctx
->nonce
, key
+ key_len
, sizeof(ctx
->nonce
));
732 return aes_set_key_common(crypto_aead_tfm(aead
),
733 &ctx
->aes_key_expanded
, key
, key_len
) ?:
734 rfc4106_set_hash_subkey(ctx
->hash_subkey
, key
, key_len
);
737 static int gcmaes_wrapper_set_key(struct crypto_aead
*parent
, const u8
*key
,
738 unsigned int key_len
)
740 struct cryptd_aead
**ctx
= crypto_aead_ctx(parent
);
741 struct cryptd_aead
*cryptd_tfm
= *ctx
;
743 return crypto_aead_setkey(&cryptd_tfm
->base
, key
, key_len
);
746 static int common_rfc4106_set_authsize(struct crypto_aead
*aead
,
747 unsigned int authsize
)
761 /* This is the Integrity Check Value (aka the authentication tag length and can
762 * be 8, 12 or 16 bytes long. */
763 static int gcmaes_wrapper_set_authsize(struct crypto_aead
*parent
,
764 unsigned int authsize
)
766 struct cryptd_aead
**ctx
= crypto_aead_ctx(parent
);
767 struct cryptd_aead
*cryptd_tfm
= *ctx
;
769 return crypto_aead_setauthsize(&cryptd_tfm
->base
, authsize
);
772 static int generic_gcmaes_set_authsize(struct crypto_aead
*tfm
,
773 unsigned int authsize
)
791 static int gcmaes_crypt_by_sg(bool enc
, struct aead_request
*req
,
792 unsigned int assoclen
, u8
*hash_subkey
,
793 u8
*iv
, void *aes_ctx
)
795 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
796 unsigned long auth_tag_len
= crypto_aead_authsize(tfm
);
797 struct gcm_context_data data AESNI_ALIGN_ATTR
;
798 struct scatter_walk dst_sg_walk
= {};
799 unsigned long left
= req
->cryptlen
;
800 unsigned long len
, srclen
, dstlen
;
801 struct scatter_walk assoc_sg_walk
;
802 struct scatter_walk src_sg_walk
;
803 struct scatterlist src_start
[2];
804 struct scatterlist dst_start
[2];
805 struct scatterlist
*src_sg
;
806 struct scatterlist
*dst_sg
;
807 u8
*src
, *dst
, *assoc
;
812 left
-= auth_tag_len
;
814 /* Linearize assoc, if not already linear */
815 if (req
->src
->length
>= assoclen
&& req
->src
->length
&&
816 (!PageHighMem(sg_page(req
->src
)) ||
817 req
->src
->offset
+ req
->src
->length
<= PAGE_SIZE
)) {
818 scatterwalk_start(&assoc_sg_walk
, req
->src
);
819 assoc
= scatterwalk_map(&assoc_sg_walk
);
821 /* assoc can be any length, so must be on heap */
822 assocmem
= kmalloc(assoclen
, GFP_ATOMIC
);
823 if (unlikely(!assocmem
))
827 scatterwalk_map_and_copy(assoc
, req
->src
, 0, assoclen
, 0);
830 src_sg
= scatterwalk_ffwd(src_start
, req
->src
, req
->assoclen
);
831 scatterwalk_start(&src_sg_walk
, src_sg
);
832 if (req
->src
!= req
->dst
) {
833 dst_sg
= scatterwalk_ffwd(dst_start
, req
->dst
, req
->assoclen
);
834 scatterwalk_start(&dst_sg_walk
, dst_sg
);
838 aesni_gcm_init(aes_ctx
, &data
, iv
,
839 hash_subkey
, assoc
, assoclen
);
840 if (req
->src
!= req
->dst
) {
842 src
= scatterwalk_map(&src_sg_walk
);
843 dst
= scatterwalk_map(&dst_sg_walk
);
844 srclen
= scatterwalk_clamp(&src_sg_walk
, left
);
845 dstlen
= scatterwalk_clamp(&dst_sg_walk
, left
);
846 len
= min(srclen
, dstlen
);
849 aesni_gcm_enc_update(aes_ctx
, &data
,
852 aesni_gcm_dec_update(aes_ctx
, &data
,
857 scatterwalk_unmap(src
);
858 scatterwalk_unmap(dst
);
859 scatterwalk_advance(&src_sg_walk
, len
);
860 scatterwalk_advance(&dst_sg_walk
, len
);
861 scatterwalk_done(&src_sg_walk
, 0, left
);
862 scatterwalk_done(&dst_sg_walk
, 1, left
);
866 dst
= src
= scatterwalk_map(&src_sg_walk
);
867 len
= scatterwalk_clamp(&src_sg_walk
, left
);
870 aesni_gcm_enc_update(aes_ctx
, &data
,
873 aesni_gcm_dec_update(aes_ctx
, &data
,
877 scatterwalk_unmap(src
);
878 scatterwalk_advance(&src_sg_walk
, len
);
879 scatterwalk_done(&src_sg_walk
, 1, left
);
882 aesni_gcm_finalize(aes_ctx
, &data
, authTag
, auth_tag_len
);
886 scatterwalk_unmap(assoc
);
893 /* Copy out original authTag */
894 scatterwalk_map_and_copy(authTagMsg
, req
->src
,
895 req
->assoclen
+ req
->cryptlen
-
899 /* Compare generated tag with passed in tag. */
900 return crypto_memneq(authTagMsg
, authTag
, auth_tag_len
) ?
904 /* Copy in the authTag */
905 scatterwalk_map_and_copy(authTag
, req
->dst
,
906 req
->assoclen
+ req
->cryptlen
,
912 static int gcmaes_encrypt(struct aead_request
*req
, unsigned int assoclen
,
913 u8
*hash_subkey
, u8
*iv
, void *aes_ctx
)
915 u8 one_entry_in_sg
= 0;
916 u8
*src
, *dst
, *assoc
;
917 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
918 unsigned long auth_tag_len
= crypto_aead_authsize(tfm
);
919 struct scatter_walk src_sg_walk
;
920 struct scatter_walk dst_sg_walk
= {};
921 struct gcm_context_data data AESNI_ALIGN_ATTR
;
923 if (((struct crypto_aes_ctx
*)aes_ctx
)->key_length
!= AES_KEYSIZE_128
||
924 aesni_gcm_enc_tfm
== aesni_gcm_enc
||
925 req
->cryptlen
< AVX_GEN2_OPTSIZE
) {
926 return gcmaes_crypt_by_sg(true, req
, assoclen
, hash_subkey
, iv
,
929 if (sg_is_last(req
->src
) &&
930 (!PageHighMem(sg_page(req
->src
)) ||
931 req
->src
->offset
+ req
->src
->length
<= PAGE_SIZE
) &&
932 sg_is_last(req
->dst
) &&
933 (!PageHighMem(sg_page(req
->dst
)) ||
934 req
->dst
->offset
+ req
->dst
->length
<= PAGE_SIZE
)) {
936 scatterwalk_start(&src_sg_walk
, req
->src
);
937 assoc
= scatterwalk_map(&src_sg_walk
);
938 src
= assoc
+ req
->assoclen
;
940 if (unlikely(req
->src
!= req
->dst
)) {
941 scatterwalk_start(&dst_sg_walk
, req
->dst
);
942 dst
= scatterwalk_map(&dst_sg_walk
) + req
->assoclen
;
945 /* Allocate memory for src, dst, assoc */
946 assoc
= kmalloc(req
->cryptlen
+ auth_tag_len
+ req
->assoclen
,
948 if (unlikely(!assoc
))
950 scatterwalk_map_and_copy(assoc
, req
->src
, 0,
951 req
->assoclen
+ req
->cryptlen
, 0);
952 src
= assoc
+ req
->assoclen
;
957 aesni_gcm_enc_tfm(aes_ctx
, &data
, dst
, src
, req
->cryptlen
, iv
,
958 hash_subkey
, assoc
, assoclen
,
959 dst
+ req
->cryptlen
, auth_tag_len
);
962 /* The authTag (aka the Integrity Check Value) needs to be written
963 * back to the packet. */
964 if (one_entry_in_sg
) {
965 if (unlikely(req
->src
!= req
->dst
)) {
966 scatterwalk_unmap(dst
- req
->assoclen
);
967 scatterwalk_advance(&dst_sg_walk
, req
->dst
->length
);
968 scatterwalk_done(&dst_sg_walk
, 1, 0);
970 scatterwalk_unmap(assoc
);
971 scatterwalk_advance(&src_sg_walk
, req
->src
->length
);
972 scatterwalk_done(&src_sg_walk
, req
->src
== req
->dst
, 0);
974 scatterwalk_map_and_copy(dst
, req
->dst
, req
->assoclen
,
975 req
->cryptlen
+ auth_tag_len
, 1);
981 static int gcmaes_decrypt(struct aead_request
*req
, unsigned int assoclen
,
982 u8
*hash_subkey
, u8
*iv
, void *aes_ctx
)
984 u8 one_entry_in_sg
= 0;
985 u8
*src
, *dst
, *assoc
;
986 unsigned long tempCipherLen
= 0;
987 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
988 unsigned long auth_tag_len
= crypto_aead_authsize(tfm
);
990 struct scatter_walk src_sg_walk
;
991 struct scatter_walk dst_sg_walk
= {};
992 struct gcm_context_data data AESNI_ALIGN_ATTR
;
995 if (((struct crypto_aes_ctx
*)aes_ctx
)->key_length
!= AES_KEYSIZE_128
||
996 aesni_gcm_enc_tfm
== aesni_gcm_enc
||
997 req
->cryptlen
< AVX_GEN2_OPTSIZE
) {
998 return gcmaes_crypt_by_sg(false, req
, assoclen
, hash_subkey
, iv
,
1001 tempCipherLen
= (unsigned long)(req
->cryptlen
- auth_tag_len
);
1003 if (sg_is_last(req
->src
) &&
1004 (!PageHighMem(sg_page(req
->src
)) ||
1005 req
->src
->offset
+ req
->src
->length
<= PAGE_SIZE
) &&
1006 sg_is_last(req
->dst
) && req
->dst
->length
&&
1007 (!PageHighMem(sg_page(req
->dst
)) ||
1008 req
->dst
->offset
+ req
->dst
->length
<= PAGE_SIZE
)) {
1009 one_entry_in_sg
= 1;
1010 scatterwalk_start(&src_sg_walk
, req
->src
);
1011 assoc
= scatterwalk_map(&src_sg_walk
);
1012 src
= assoc
+ req
->assoclen
;
1014 if (unlikely(req
->src
!= req
->dst
)) {
1015 scatterwalk_start(&dst_sg_walk
, req
->dst
);
1016 dst
= scatterwalk_map(&dst_sg_walk
) + req
->assoclen
;
1019 /* Allocate memory for src, dst, assoc */
1020 assoc
= kmalloc(req
->cryptlen
+ req
->assoclen
, GFP_ATOMIC
);
1023 scatterwalk_map_and_copy(assoc
, req
->src
, 0,
1024 req
->assoclen
+ req
->cryptlen
, 0);
1025 src
= assoc
+ req
->assoclen
;
1031 aesni_gcm_dec_tfm(aes_ctx
, &data
, dst
, src
, tempCipherLen
, iv
,
1032 hash_subkey
, assoc
, assoclen
,
1033 authTag
, auth_tag_len
);
1036 /* Compare generated tag with passed in tag. */
1037 retval
= crypto_memneq(src
+ tempCipherLen
, authTag
, auth_tag_len
) ?
1040 if (one_entry_in_sg
) {
1041 if (unlikely(req
->src
!= req
->dst
)) {
1042 scatterwalk_unmap(dst
- req
->assoclen
);
1043 scatterwalk_advance(&dst_sg_walk
, req
->dst
->length
);
1044 scatterwalk_done(&dst_sg_walk
, 1, 0);
1046 scatterwalk_unmap(assoc
);
1047 scatterwalk_advance(&src_sg_walk
, req
->src
->length
);
1048 scatterwalk_done(&src_sg_walk
, req
->src
== req
->dst
, 0);
1050 scatterwalk_map_and_copy(dst
, req
->dst
, req
->assoclen
,
1058 static int helper_rfc4106_encrypt(struct aead_request
*req
)
1060 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1061 struct aesni_rfc4106_gcm_ctx
*ctx
= aesni_rfc4106_gcm_ctx_get(tfm
);
1062 void *aes_ctx
= &(ctx
->aes_key_expanded
);
1063 u8 iv
[16] __attribute__ ((__aligned__(AESNI_ALIGN
)));
1065 __be32 counter
= cpu_to_be32(1);
1067 /* Assuming we are supporting rfc4106 64-bit extended */
1068 /* sequence numbers We need to have the AAD length equal */
1069 /* to 16 or 20 bytes */
1070 if (unlikely(req
->assoclen
!= 16 && req
->assoclen
!= 20))
1073 /* IV below built */
1074 for (i
= 0; i
< 4; i
++)
1075 *(iv
+i
) = ctx
->nonce
[i
];
1076 for (i
= 0; i
< 8; i
++)
1077 *(iv
+4+i
) = req
->iv
[i
];
1078 *((__be32
*)(iv
+12)) = counter
;
1080 return gcmaes_encrypt(req
, req
->assoclen
- 8, ctx
->hash_subkey
, iv
,
1084 static int helper_rfc4106_decrypt(struct aead_request
*req
)
1086 __be32 counter
= cpu_to_be32(1);
1087 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1088 struct aesni_rfc4106_gcm_ctx
*ctx
= aesni_rfc4106_gcm_ctx_get(tfm
);
1089 void *aes_ctx
= &(ctx
->aes_key_expanded
);
1090 u8 iv
[16] __attribute__ ((__aligned__(AESNI_ALIGN
)));
1093 if (unlikely(req
->assoclen
!= 16 && req
->assoclen
!= 20))
1096 /* Assuming we are supporting rfc4106 64-bit extended */
1097 /* sequence numbers We need to have the AAD length */
1098 /* equal to 16 or 20 bytes */
1100 /* IV below built */
1101 for (i
= 0; i
< 4; i
++)
1102 *(iv
+i
) = ctx
->nonce
[i
];
1103 for (i
= 0; i
< 8; i
++)
1104 *(iv
+4+i
) = req
->iv
[i
];
1105 *((__be32
*)(iv
+12)) = counter
;
1107 return gcmaes_decrypt(req
, req
->assoclen
- 8, ctx
->hash_subkey
, iv
,
1111 static int gcmaes_wrapper_encrypt(struct aead_request
*req
)
1113 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1114 struct cryptd_aead
**ctx
= crypto_aead_ctx(tfm
);
1115 struct cryptd_aead
*cryptd_tfm
= *ctx
;
1117 tfm
= &cryptd_tfm
->base
;
1118 if (irq_fpu_usable() && (!in_atomic() ||
1119 !cryptd_aead_queued(cryptd_tfm
)))
1120 tfm
= cryptd_aead_child(cryptd_tfm
);
1122 aead_request_set_tfm(req
, tfm
);
1124 return crypto_aead_encrypt(req
);
1127 static int gcmaes_wrapper_decrypt(struct aead_request
*req
)
1129 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1130 struct cryptd_aead
**ctx
= crypto_aead_ctx(tfm
);
1131 struct cryptd_aead
*cryptd_tfm
= *ctx
;
1133 tfm
= &cryptd_tfm
->base
;
1134 if (irq_fpu_usable() && (!in_atomic() ||
1135 !cryptd_aead_queued(cryptd_tfm
)))
1136 tfm
= cryptd_aead_child(cryptd_tfm
);
1138 aead_request_set_tfm(req
, tfm
);
1140 return crypto_aead_decrypt(req
);
1144 static struct crypto_alg aesni_algs
[] = { {
1146 .cra_driver_name
= "aes-aesni",
1147 .cra_priority
= 300,
1148 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
,
1149 .cra_blocksize
= AES_BLOCK_SIZE
,
1150 .cra_ctxsize
= CRYPTO_AES_CTX_SIZE
,
1151 .cra_module
= THIS_MODULE
,
1154 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
1155 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
1156 .cia_setkey
= aes_set_key
,
1157 .cia_encrypt
= aes_encrypt
,
1158 .cia_decrypt
= aes_decrypt
1162 .cra_name
= "__aes",
1163 .cra_driver_name
= "__aes-aesni",
1164 .cra_priority
= 300,
1165 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
| CRYPTO_ALG_INTERNAL
,
1166 .cra_blocksize
= AES_BLOCK_SIZE
,
1167 .cra_ctxsize
= CRYPTO_AES_CTX_SIZE
,
1168 .cra_module
= THIS_MODULE
,
1171 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
1172 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
1173 .cia_setkey
= aes_set_key
,
1174 .cia_encrypt
= __aes_encrypt
,
1175 .cia_decrypt
= __aes_decrypt
1180 static struct skcipher_alg aesni_skciphers
[] = {
1183 .cra_name
= "__ecb(aes)",
1184 .cra_driver_name
= "__ecb-aes-aesni",
1185 .cra_priority
= 400,
1186 .cra_flags
= CRYPTO_ALG_INTERNAL
,
1187 .cra_blocksize
= AES_BLOCK_SIZE
,
1188 .cra_ctxsize
= CRYPTO_AES_CTX_SIZE
,
1189 .cra_module
= THIS_MODULE
,
1191 .min_keysize
= AES_MIN_KEY_SIZE
,
1192 .max_keysize
= AES_MAX_KEY_SIZE
,
1193 .setkey
= aesni_skcipher_setkey
,
1194 .encrypt
= ecb_encrypt
,
1195 .decrypt
= ecb_decrypt
,
1198 .cra_name
= "__cbc(aes)",
1199 .cra_driver_name
= "__cbc-aes-aesni",
1200 .cra_priority
= 400,
1201 .cra_flags
= CRYPTO_ALG_INTERNAL
,
1202 .cra_blocksize
= AES_BLOCK_SIZE
,
1203 .cra_ctxsize
= CRYPTO_AES_CTX_SIZE
,
1204 .cra_module
= THIS_MODULE
,
1206 .min_keysize
= AES_MIN_KEY_SIZE
,
1207 .max_keysize
= AES_MAX_KEY_SIZE
,
1208 .ivsize
= AES_BLOCK_SIZE
,
1209 .setkey
= aesni_skcipher_setkey
,
1210 .encrypt
= cbc_encrypt
,
1211 .decrypt
= cbc_decrypt
,
1212 #ifdef CONFIG_X86_64
1215 .cra_name
= "__ctr(aes)",
1216 .cra_driver_name
= "__ctr-aes-aesni",
1217 .cra_priority
= 400,
1218 .cra_flags
= CRYPTO_ALG_INTERNAL
,
1220 .cra_ctxsize
= CRYPTO_AES_CTX_SIZE
,
1221 .cra_module
= THIS_MODULE
,
1223 .min_keysize
= AES_MIN_KEY_SIZE
,
1224 .max_keysize
= AES_MAX_KEY_SIZE
,
1225 .ivsize
= AES_BLOCK_SIZE
,
1226 .chunksize
= AES_BLOCK_SIZE
,
1227 .setkey
= aesni_skcipher_setkey
,
1228 .encrypt
= ctr_crypt
,
1229 .decrypt
= ctr_crypt
,
1232 .cra_name
= "__xts(aes)",
1233 .cra_driver_name
= "__xts-aes-aesni",
1234 .cra_priority
= 401,
1235 .cra_flags
= CRYPTO_ALG_INTERNAL
,
1236 .cra_blocksize
= AES_BLOCK_SIZE
,
1237 .cra_ctxsize
= XTS_AES_CTX_SIZE
,
1238 .cra_module
= THIS_MODULE
,
1240 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
1241 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
1242 .ivsize
= AES_BLOCK_SIZE
,
1243 .setkey
= xts_aesni_setkey
,
1244 .encrypt
= xts_encrypt
,
1245 .decrypt
= xts_decrypt
,
1251 struct simd_skcipher_alg
*aesni_simd_skciphers
[ARRAY_SIZE(aesni_skciphers
)];
1253 #ifdef CONFIG_X86_64
1254 static int generic_gcmaes_set_key(struct crypto_aead
*aead
, const u8
*key
,
1255 unsigned int key_len
)
1257 struct generic_gcmaes_ctx
*ctx
= generic_gcmaes_ctx_get(aead
);
1259 return aes_set_key_common(crypto_aead_tfm(aead
),
1260 &ctx
->aes_key_expanded
, key
, key_len
) ?:
1261 rfc4106_set_hash_subkey(ctx
->hash_subkey
, key
, key_len
);
1264 static int generic_gcmaes_encrypt(struct aead_request
*req
)
1266 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1267 struct generic_gcmaes_ctx
*ctx
= generic_gcmaes_ctx_get(tfm
);
1268 void *aes_ctx
= &(ctx
->aes_key_expanded
);
1269 u8 iv
[16] __attribute__ ((__aligned__(AESNI_ALIGN
)));
1270 __be32 counter
= cpu_to_be32(1);
1272 memcpy(iv
, req
->iv
, 12);
1273 *((__be32
*)(iv
+12)) = counter
;
1275 return gcmaes_encrypt(req
, req
->assoclen
, ctx
->hash_subkey
, iv
,
1279 static int generic_gcmaes_decrypt(struct aead_request
*req
)
1281 __be32 counter
= cpu_to_be32(1);
1282 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1283 struct generic_gcmaes_ctx
*ctx
= generic_gcmaes_ctx_get(tfm
);
1284 void *aes_ctx
= &(ctx
->aes_key_expanded
);
1285 u8 iv
[16] __attribute__ ((__aligned__(AESNI_ALIGN
)));
1287 memcpy(iv
, req
->iv
, 12);
1288 *((__be32
*)(iv
+12)) = counter
;
1290 return gcmaes_decrypt(req
, req
->assoclen
, ctx
->hash_subkey
, iv
,
1294 static int generic_gcmaes_init(struct crypto_aead
*aead
)
1296 struct cryptd_aead
*cryptd_tfm
;
1297 struct cryptd_aead
**ctx
= crypto_aead_ctx(aead
);
1299 cryptd_tfm
= cryptd_alloc_aead("__driver-generic-gcm-aes-aesni",
1300 CRYPTO_ALG_INTERNAL
,
1301 CRYPTO_ALG_INTERNAL
);
1302 if (IS_ERR(cryptd_tfm
))
1303 return PTR_ERR(cryptd_tfm
);
1306 crypto_aead_set_reqsize(aead
, crypto_aead_reqsize(&cryptd_tfm
->base
));
1311 static void generic_gcmaes_exit(struct crypto_aead
*aead
)
1313 struct cryptd_aead
**ctx
= crypto_aead_ctx(aead
);
1315 cryptd_free_aead(*ctx
);
1318 static struct aead_alg aesni_aead_algs
[] = { {
1319 .setkey
= common_rfc4106_set_key
,
1320 .setauthsize
= common_rfc4106_set_authsize
,
1321 .encrypt
= helper_rfc4106_encrypt
,
1322 .decrypt
= helper_rfc4106_decrypt
,
1323 .ivsize
= GCM_RFC4106_IV_SIZE
,
1326 .cra_name
= "__gcm-aes-aesni",
1327 .cra_driver_name
= "__driver-gcm-aes-aesni",
1328 .cra_flags
= CRYPTO_ALG_INTERNAL
,
1330 .cra_ctxsize
= sizeof(struct aesni_rfc4106_gcm_ctx
),
1331 .cra_alignmask
= AESNI_ALIGN
- 1,
1332 .cra_module
= THIS_MODULE
,
1335 .init
= rfc4106_init
,
1336 .exit
= rfc4106_exit
,
1337 .setkey
= gcmaes_wrapper_set_key
,
1338 .setauthsize
= gcmaes_wrapper_set_authsize
,
1339 .encrypt
= gcmaes_wrapper_encrypt
,
1340 .decrypt
= gcmaes_wrapper_decrypt
,
1341 .ivsize
= GCM_RFC4106_IV_SIZE
,
1344 .cra_name
= "rfc4106(gcm(aes))",
1345 .cra_driver_name
= "rfc4106-gcm-aesni",
1346 .cra_priority
= 400,
1347 .cra_flags
= CRYPTO_ALG_ASYNC
,
1349 .cra_ctxsize
= sizeof(struct cryptd_aead
*),
1350 .cra_module
= THIS_MODULE
,
1353 .setkey
= generic_gcmaes_set_key
,
1354 .setauthsize
= generic_gcmaes_set_authsize
,
1355 .encrypt
= generic_gcmaes_encrypt
,
1356 .decrypt
= generic_gcmaes_decrypt
,
1357 .ivsize
= GCM_AES_IV_SIZE
,
1360 .cra_name
= "__generic-gcm-aes-aesni",
1361 .cra_driver_name
= "__driver-generic-gcm-aes-aesni",
1363 .cra_flags
= CRYPTO_ALG_INTERNAL
,
1365 .cra_ctxsize
= sizeof(struct generic_gcmaes_ctx
),
1366 .cra_alignmask
= AESNI_ALIGN
- 1,
1367 .cra_module
= THIS_MODULE
,
1370 .init
= generic_gcmaes_init
,
1371 .exit
= generic_gcmaes_exit
,
1372 .setkey
= gcmaes_wrapper_set_key
,
1373 .setauthsize
= gcmaes_wrapper_set_authsize
,
1374 .encrypt
= gcmaes_wrapper_encrypt
,
1375 .decrypt
= gcmaes_wrapper_decrypt
,
1376 .ivsize
= GCM_AES_IV_SIZE
,
1379 .cra_name
= "gcm(aes)",
1380 .cra_driver_name
= "generic-gcm-aesni",
1381 .cra_priority
= 400,
1382 .cra_flags
= CRYPTO_ALG_ASYNC
,
1384 .cra_ctxsize
= sizeof(struct cryptd_aead
*),
1385 .cra_module
= THIS_MODULE
,
1389 static struct aead_alg aesni_aead_algs
[0];
1393 static const struct x86_cpu_id aesni_cpu_id
[] = {
1394 X86_FEATURE_MATCH(X86_FEATURE_AES
),
1397 MODULE_DEVICE_TABLE(x86cpu
, aesni_cpu_id
);
1399 static void aesni_free_simds(void)
1403 for (i
= 0; i
< ARRAY_SIZE(aesni_simd_skciphers
) &&
1404 aesni_simd_skciphers
[i
]; i
++)
1405 simd_skcipher_free(aesni_simd_skciphers
[i
]);
1408 static int __init
aesni_init(void)
1410 struct simd_skcipher_alg
*simd
;
1411 const char *basename
;
1412 const char *algname
;
1413 const char *drvname
;
1417 if (!x86_match_cpu(aesni_cpu_id
))
1419 #ifdef CONFIG_X86_64
1420 #ifdef CONFIG_AS_AVX2
1421 if (boot_cpu_has(X86_FEATURE_AVX2
)) {
1422 pr_info("AVX2 version of gcm_enc/dec engaged.\n");
1423 aesni_gcm_enc_tfm
= aesni_gcm_enc_avx2
;
1424 aesni_gcm_dec_tfm
= aesni_gcm_dec_avx2
;
1427 #ifdef CONFIG_AS_AVX
1428 if (boot_cpu_has(X86_FEATURE_AVX
)) {
1429 pr_info("AVX version of gcm_enc/dec engaged.\n");
1430 aesni_gcm_enc_tfm
= aesni_gcm_enc_avx
;
1431 aesni_gcm_dec_tfm
= aesni_gcm_dec_avx
;
1435 pr_info("SSE version of gcm_enc/dec engaged.\n");
1436 aesni_gcm_enc_tfm
= aesni_gcm_enc
;
1437 aesni_gcm_dec_tfm
= aesni_gcm_dec
;
1439 aesni_ctr_enc_tfm
= aesni_ctr_enc
;
1440 #ifdef CONFIG_AS_AVX
1441 if (boot_cpu_has(X86_FEATURE_AVX
)) {
1442 /* optimize performance of ctr mode encryption transform */
1443 aesni_ctr_enc_tfm
= aesni_ctr_enc_avx_tfm
;
1444 pr_info("AES CTR mode by8 optimization enabled\n");
1449 err
= crypto_register_algs(aesni_algs
, ARRAY_SIZE(aesni_algs
));
1453 err
= crypto_register_skciphers(aesni_skciphers
,
1454 ARRAY_SIZE(aesni_skciphers
));
1456 goto unregister_algs
;
1458 err
= crypto_register_aeads(aesni_aead_algs
,
1459 ARRAY_SIZE(aesni_aead_algs
));
1461 goto unregister_skciphers
;
1463 for (i
= 0; i
< ARRAY_SIZE(aesni_skciphers
); i
++) {
1464 algname
= aesni_skciphers
[i
].base
.cra_name
+ 2;
1465 drvname
= aesni_skciphers
[i
].base
.cra_driver_name
+ 2;
1466 basename
= aesni_skciphers
[i
].base
.cra_driver_name
;
1467 simd
= simd_skcipher_create_compat(algname
, drvname
, basename
);
1468 err
= PTR_ERR(simd
);
1470 goto unregister_simds
;
1472 aesni_simd_skciphers
[i
] = simd
;
1479 crypto_unregister_aeads(aesni_aead_algs
, ARRAY_SIZE(aesni_aead_algs
));
1480 unregister_skciphers
:
1481 crypto_unregister_skciphers(aesni_skciphers
,
1482 ARRAY_SIZE(aesni_skciphers
));
1484 crypto_unregister_algs(aesni_algs
, ARRAY_SIZE(aesni_algs
));
1488 static void __exit
aesni_exit(void)
1491 crypto_unregister_aeads(aesni_aead_algs
, ARRAY_SIZE(aesni_aead_algs
));
1492 crypto_unregister_skciphers(aesni_skciphers
,
1493 ARRAY_SIZE(aesni_skciphers
));
1494 crypto_unregister_algs(aesni_algs
, ARRAY_SIZE(aesni_algs
));
1497 late_initcall(aesni_init
);
1498 module_exit(aesni_exit
);
1500 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, Intel AES-NI instructions optimized");
1501 MODULE_LICENSE("GPL");
1502 MODULE_ALIAS_CRYPTO("aes");