1 // SPDX-License-Identifier: GPL-2.0-or-later
3 * Support for Intel AES-NI instructions. This file contains glue
4 * code, the real AES implementation is in intel-aes_asm.S.
6 * Copyright (C) 2008, Intel Corp.
7 * Author: Huang Ying <ying.huang@intel.com>
9 * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
10 * interface for 64-bit kernels.
11 * Authors: Adrian Hoban <adrian.hoban@intel.com>
12 * Gabriele Paoloni <gabriele.paoloni@intel.com>
13 * Tadeusz Struk (tadeusz.struk@intel.com)
14 * Aidan O'Mahony (aidan.o.mahony@intel.com)
15 * Copyright (c) 2010, Intel Corporation.
18 #include <linux/hardirq.h>
19 #include <linux/types.h>
20 #include <linux/module.h>
21 #include <linux/err.h>
22 #include <crypto/algapi.h>
23 #include <crypto/aes.h>
24 #include <crypto/ctr.h>
25 #include <crypto/b128ops.h>
26 #include <crypto/gcm.h>
27 #include <crypto/xts.h>
28 #include <asm/cpu_device_id.h>
30 #include <crypto/scatterwalk.h>
31 #include <crypto/internal/aead.h>
32 #include <crypto/internal/simd.h>
33 #include <crypto/internal/skcipher.h>
34 #include <linux/workqueue.h>
35 #include <linux/spinlock.h>
37 #include <asm/crypto/glue_helper.h>
41 #define AESNI_ALIGN 16
42 #define AESNI_ALIGN_ATTR __attribute__ ((__aligned__(AESNI_ALIGN)))
43 #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE - 1))
44 #define RFC4106_HASH_SUBKEY_SIZE 16
45 #define AESNI_ALIGN_EXTRA ((AESNI_ALIGN - 1) & ~(CRYPTO_MINALIGN - 1))
46 #define CRYPTO_AES_CTX_SIZE (sizeof(struct crypto_aes_ctx) + AESNI_ALIGN_EXTRA)
47 #define XTS_AES_CTX_SIZE (sizeof(struct aesni_xts_ctx) + AESNI_ALIGN_EXTRA)
49 /* This data is stored at the end of the crypto_tfm struct.
50 * It's a type of per "session" data storage location.
51 * This needs to be 16 byte aligned.
53 struct aesni_rfc4106_gcm_ctx
{
54 u8 hash_subkey
[16] AESNI_ALIGN_ATTR
;
55 struct crypto_aes_ctx aes_key_expanded AESNI_ALIGN_ATTR
;
59 struct generic_gcmaes_ctx
{
60 u8 hash_subkey
[16] AESNI_ALIGN_ATTR
;
61 struct crypto_aes_ctx aes_key_expanded AESNI_ALIGN_ATTR
;
64 struct aesni_xts_ctx
{
65 u8 raw_tweak_ctx
[sizeof(struct crypto_aes_ctx
)] AESNI_ALIGN_ATTR
;
66 u8 raw_crypt_ctx
[sizeof(struct crypto_aes_ctx
)] AESNI_ALIGN_ATTR
;
69 #define GCM_BLOCK_LEN 16
71 struct gcm_context_data
{
72 /* init, update and finalize context data */
73 u8 aad_hash
[GCM_BLOCK_LEN
];
76 u8 partial_block_enc_key
[GCM_BLOCK_LEN
];
77 u8 orig_IV
[GCM_BLOCK_LEN
];
78 u8 current_counter
[GCM_BLOCK_LEN
];
79 u64 partial_block_len
;
81 u8 hash_keys
[GCM_BLOCK_LEN
* 16];
84 asmlinkage
int aesni_set_key(struct crypto_aes_ctx
*ctx
, const u8
*in_key
,
85 unsigned int key_len
);
86 asmlinkage
void aesni_enc(const void *ctx
, u8
*out
, const u8
*in
);
87 asmlinkage
void aesni_dec(const void *ctx
, u8
*out
, const u8
*in
);
88 asmlinkage
void aesni_ecb_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
89 const u8
*in
, unsigned int len
);
90 asmlinkage
void aesni_ecb_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
91 const u8
*in
, unsigned int len
);
92 asmlinkage
void aesni_cbc_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
93 const u8
*in
, unsigned int len
, u8
*iv
);
94 asmlinkage
void aesni_cbc_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
95 const u8
*in
, unsigned int len
, u8
*iv
);
97 #define AVX_GEN2_OPTSIZE 640
98 #define AVX_GEN4_OPTSIZE 4096
102 static void (*aesni_ctr_enc_tfm
)(struct crypto_aes_ctx
*ctx
, u8
*out
,
103 const u8
*in
, unsigned int len
, u8
*iv
);
104 asmlinkage
void aesni_ctr_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
105 const u8
*in
, unsigned int len
, u8
*iv
);
107 asmlinkage
void aesni_xts_crypt8(const struct crypto_aes_ctx
*ctx
, u8
*out
,
108 const u8
*in
, bool enc
, le128
*iv
);
110 /* asmlinkage void aesni_gcm_enc()
111 * void *ctx, AES Key schedule. Starts on a 16 byte boundary.
112 * struct gcm_context_data. May be uninitialized.
113 * u8 *out, Ciphertext output. Encrypt in-place is allowed.
114 * const u8 *in, Plaintext input
115 * unsigned long plaintext_len, Length of data in bytes for encryption.
116 * u8 *iv, Pre-counter block j0: 12 byte IV concatenated with 0x00000001.
117 * 16-byte aligned pointer.
118 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
119 * const u8 *aad, Additional Authentication Data (AAD)
120 * unsigned long aad_len, Length of AAD in bytes.
121 * u8 *auth_tag, Authenticated Tag output.
122 * unsigned long auth_tag_len), Authenticated Tag Length in bytes.
123 * Valid values are 16 (most likely), 12 or 8.
125 asmlinkage
void aesni_gcm_enc(void *ctx
,
126 struct gcm_context_data
*gdata
, u8
*out
,
127 const u8
*in
, unsigned long plaintext_len
, u8
*iv
,
128 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
129 u8
*auth_tag
, unsigned long auth_tag_len
);
131 /* asmlinkage void aesni_gcm_dec()
132 * void *ctx, AES Key schedule. Starts on a 16 byte boundary.
133 * struct gcm_context_data. May be uninitialized.
134 * u8 *out, Plaintext output. Decrypt in-place is allowed.
135 * const u8 *in, Ciphertext input
136 * unsigned long ciphertext_len, Length of data in bytes for decryption.
137 * u8 *iv, Pre-counter block j0: 12 byte IV concatenated with 0x00000001.
138 * 16-byte aligned pointer.
139 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
140 * const u8 *aad, Additional Authentication Data (AAD)
141 * unsigned long aad_len, Length of AAD in bytes. With RFC4106 this is going
142 * to be 8 or 12 bytes
143 * u8 *auth_tag, Authenticated Tag output.
144 * unsigned long auth_tag_len) Authenticated Tag Length in bytes.
145 * Valid values are 16 (most likely), 12 or 8.
147 asmlinkage
void aesni_gcm_dec(void *ctx
,
148 struct gcm_context_data
*gdata
, u8
*out
,
149 const u8
*in
, unsigned long ciphertext_len
, u8
*iv
,
150 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
151 u8
*auth_tag
, unsigned long auth_tag_len
);
153 /* Scatter / Gather routines, with args similar to above */
154 asmlinkage
void aesni_gcm_init(void *ctx
,
155 struct gcm_context_data
*gdata
,
157 u8
*hash_subkey
, const u8
*aad
,
158 unsigned long aad_len
);
159 asmlinkage
void aesni_gcm_enc_update(void *ctx
,
160 struct gcm_context_data
*gdata
, u8
*out
,
161 const u8
*in
, unsigned long plaintext_len
);
162 asmlinkage
void aesni_gcm_dec_update(void *ctx
,
163 struct gcm_context_data
*gdata
, u8
*out
,
165 unsigned long ciphertext_len
);
166 asmlinkage
void aesni_gcm_finalize(void *ctx
,
167 struct gcm_context_data
*gdata
,
168 u8
*auth_tag
, unsigned long auth_tag_len
);
170 static const struct aesni_gcm_tfm_s
{
171 void (*init
)(void *ctx
, struct gcm_context_data
*gdata
, u8
*iv
,
172 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
);
173 void (*enc_update
)(void *ctx
, struct gcm_context_data
*gdata
, u8
*out
,
174 const u8
*in
, unsigned long plaintext_len
);
175 void (*dec_update
)(void *ctx
, struct gcm_context_data
*gdata
, u8
*out
,
176 const u8
*in
, unsigned long ciphertext_len
);
177 void (*finalize
)(void *ctx
, struct gcm_context_data
*gdata
,
178 u8
*auth_tag
, unsigned long auth_tag_len
);
181 static const struct aesni_gcm_tfm_s aesni_gcm_tfm_sse
= {
182 .init
= &aesni_gcm_init
,
183 .enc_update
= &aesni_gcm_enc_update
,
184 .dec_update
= &aesni_gcm_dec_update
,
185 .finalize
= &aesni_gcm_finalize
,
189 asmlinkage
void aes_ctr_enc_128_avx_by8(const u8
*in
, u8
*iv
,
190 void *keys
, u8
*out
, unsigned int num_bytes
);
191 asmlinkage
void aes_ctr_enc_192_avx_by8(const u8
*in
, u8
*iv
,
192 void *keys
, u8
*out
, unsigned int num_bytes
);
193 asmlinkage
void aes_ctr_enc_256_avx_by8(const u8
*in
, u8
*iv
,
194 void *keys
, u8
*out
, unsigned int num_bytes
);
196 * asmlinkage void aesni_gcm_init_avx_gen2()
197 * gcm_data *my_ctx_data, context data
198 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
200 asmlinkage
void aesni_gcm_init_avx_gen2(void *my_ctx_data
,
201 struct gcm_context_data
*gdata
,
205 unsigned long aad_len
);
207 asmlinkage
void aesni_gcm_enc_update_avx_gen2(void *ctx
,
208 struct gcm_context_data
*gdata
, u8
*out
,
209 const u8
*in
, unsigned long plaintext_len
);
210 asmlinkage
void aesni_gcm_dec_update_avx_gen2(void *ctx
,
211 struct gcm_context_data
*gdata
, u8
*out
,
213 unsigned long ciphertext_len
);
214 asmlinkage
void aesni_gcm_finalize_avx_gen2(void *ctx
,
215 struct gcm_context_data
*gdata
,
216 u8
*auth_tag
, unsigned long auth_tag_len
);
218 asmlinkage
void aesni_gcm_enc_avx_gen2(void *ctx
,
219 struct gcm_context_data
*gdata
, u8
*out
,
220 const u8
*in
, unsigned long plaintext_len
, u8
*iv
,
221 const u8
*aad
, unsigned long aad_len
,
222 u8
*auth_tag
, unsigned long auth_tag_len
);
224 asmlinkage
void aesni_gcm_dec_avx_gen2(void *ctx
,
225 struct gcm_context_data
*gdata
, u8
*out
,
226 const u8
*in
, unsigned long ciphertext_len
, u8
*iv
,
227 const u8
*aad
, unsigned long aad_len
,
228 u8
*auth_tag
, unsigned long auth_tag_len
);
230 static const struct aesni_gcm_tfm_s aesni_gcm_tfm_avx_gen2
= {
231 .init
= &aesni_gcm_init_avx_gen2
,
232 .enc_update
= &aesni_gcm_enc_update_avx_gen2
,
233 .dec_update
= &aesni_gcm_dec_update_avx_gen2
,
234 .finalize
= &aesni_gcm_finalize_avx_gen2
,
239 #ifdef CONFIG_AS_AVX2
241 * asmlinkage void aesni_gcm_init_avx_gen4()
242 * gcm_data *my_ctx_data, context data
243 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
245 asmlinkage
void aesni_gcm_init_avx_gen4(void *my_ctx_data
,
246 struct gcm_context_data
*gdata
,
250 unsigned long aad_len
);
252 asmlinkage
void aesni_gcm_enc_update_avx_gen4(void *ctx
,
253 struct gcm_context_data
*gdata
, u8
*out
,
254 const u8
*in
, unsigned long plaintext_len
);
255 asmlinkage
void aesni_gcm_dec_update_avx_gen4(void *ctx
,
256 struct gcm_context_data
*gdata
, u8
*out
,
258 unsigned long ciphertext_len
);
259 asmlinkage
void aesni_gcm_finalize_avx_gen4(void *ctx
,
260 struct gcm_context_data
*gdata
,
261 u8
*auth_tag
, unsigned long auth_tag_len
);
263 asmlinkage
void aesni_gcm_enc_avx_gen4(void *ctx
,
264 struct gcm_context_data
*gdata
, u8
*out
,
265 const u8
*in
, unsigned long plaintext_len
, u8
*iv
,
266 const u8
*aad
, unsigned long aad_len
,
267 u8
*auth_tag
, unsigned long auth_tag_len
);
269 asmlinkage
void aesni_gcm_dec_avx_gen4(void *ctx
,
270 struct gcm_context_data
*gdata
, u8
*out
,
271 const u8
*in
, unsigned long ciphertext_len
, u8
*iv
,
272 const u8
*aad
, unsigned long aad_len
,
273 u8
*auth_tag
, unsigned long auth_tag_len
);
275 static const struct aesni_gcm_tfm_s aesni_gcm_tfm_avx_gen4
= {
276 .init
= &aesni_gcm_init_avx_gen4
,
277 .enc_update
= &aesni_gcm_enc_update_avx_gen4
,
278 .dec_update
= &aesni_gcm_dec_update_avx_gen4
,
279 .finalize
= &aesni_gcm_finalize_avx_gen4
,
285 aesni_rfc4106_gcm_ctx
*aesni_rfc4106_gcm_ctx_get(struct crypto_aead
*tfm
)
287 unsigned long align
= AESNI_ALIGN
;
289 if (align
<= crypto_tfm_ctx_alignment())
291 return PTR_ALIGN(crypto_aead_ctx(tfm
), align
);
295 generic_gcmaes_ctx
*generic_gcmaes_ctx_get(struct crypto_aead
*tfm
)
297 unsigned long align
= AESNI_ALIGN
;
299 if (align
<= crypto_tfm_ctx_alignment())
301 return PTR_ALIGN(crypto_aead_ctx(tfm
), align
);
305 static inline struct crypto_aes_ctx
*aes_ctx(void *raw_ctx
)
307 unsigned long addr
= (unsigned long)raw_ctx
;
308 unsigned long align
= AESNI_ALIGN
;
310 if (align
<= crypto_tfm_ctx_alignment())
312 return (struct crypto_aes_ctx
*)ALIGN(addr
, align
);
315 static int aes_set_key_common(struct crypto_tfm
*tfm
, void *raw_ctx
,
316 const u8
*in_key
, unsigned int key_len
)
318 struct crypto_aes_ctx
*ctx
= aes_ctx(raw_ctx
);
321 if (key_len
!= AES_KEYSIZE_128
&& key_len
!= AES_KEYSIZE_192
&&
322 key_len
!= AES_KEYSIZE_256
)
325 if (!crypto_simd_usable())
326 err
= aes_expandkey(ctx
, in_key
, key_len
);
329 err
= aesni_set_key(ctx
, in_key
, key_len
);
336 static int aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
337 unsigned int key_len
)
339 return aes_set_key_common(tfm
, crypto_tfm_ctx(tfm
), in_key
, key_len
);
342 static void aesni_encrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
344 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
346 if (!crypto_simd_usable()) {
347 aes_encrypt(ctx
, dst
, src
);
350 aesni_enc(ctx
, dst
, src
);
355 static void aesni_decrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
357 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
359 if (!crypto_simd_usable()) {
360 aes_decrypt(ctx
, dst
, src
);
363 aesni_dec(ctx
, dst
, src
);
368 static int aesni_skcipher_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
371 return aes_set_key_common(crypto_skcipher_tfm(tfm
),
372 crypto_skcipher_ctx(tfm
), key
, len
);
375 static int ecb_encrypt(struct skcipher_request
*req
)
377 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
378 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_skcipher_ctx(tfm
));
379 struct skcipher_walk walk
;
383 err
= skcipher_walk_virt(&walk
, req
, true);
386 while ((nbytes
= walk
.nbytes
)) {
387 aesni_ecb_enc(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
388 nbytes
& AES_BLOCK_MASK
);
389 nbytes
&= AES_BLOCK_SIZE
- 1;
390 err
= skcipher_walk_done(&walk
, nbytes
);
397 static int ecb_decrypt(struct skcipher_request
*req
)
399 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
400 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_skcipher_ctx(tfm
));
401 struct skcipher_walk walk
;
405 err
= skcipher_walk_virt(&walk
, req
, true);
408 while ((nbytes
= walk
.nbytes
)) {
409 aesni_ecb_dec(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
410 nbytes
& AES_BLOCK_MASK
);
411 nbytes
&= AES_BLOCK_SIZE
- 1;
412 err
= skcipher_walk_done(&walk
, nbytes
);
419 static int cbc_encrypt(struct skcipher_request
*req
)
421 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
422 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_skcipher_ctx(tfm
));
423 struct skcipher_walk walk
;
427 err
= skcipher_walk_virt(&walk
, req
, true);
430 while ((nbytes
= walk
.nbytes
)) {
431 aesni_cbc_enc(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
432 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
433 nbytes
&= AES_BLOCK_SIZE
- 1;
434 err
= skcipher_walk_done(&walk
, nbytes
);
441 static int cbc_decrypt(struct skcipher_request
*req
)
443 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
444 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_skcipher_ctx(tfm
));
445 struct skcipher_walk walk
;
449 err
= skcipher_walk_virt(&walk
, req
, true);
452 while ((nbytes
= walk
.nbytes
)) {
453 aesni_cbc_dec(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
454 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
455 nbytes
&= AES_BLOCK_SIZE
- 1;
456 err
= skcipher_walk_done(&walk
, nbytes
);
464 static void ctr_crypt_final(struct crypto_aes_ctx
*ctx
,
465 struct skcipher_walk
*walk
)
467 u8
*ctrblk
= walk
->iv
;
468 u8 keystream
[AES_BLOCK_SIZE
];
469 u8
*src
= walk
->src
.virt
.addr
;
470 u8
*dst
= walk
->dst
.virt
.addr
;
471 unsigned int nbytes
= walk
->nbytes
;
473 aesni_enc(ctx
, keystream
, ctrblk
);
474 crypto_xor_cpy(dst
, keystream
, src
, nbytes
);
476 crypto_inc(ctrblk
, AES_BLOCK_SIZE
);
480 static void aesni_ctr_enc_avx_tfm(struct crypto_aes_ctx
*ctx
, u8
*out
,
481 const u8
*in
, unsigned int len
, u8
*iv
)
484 * based on key length, override with the by8 version
485 * of ctr mode encryption/decryption for improved performance
486 * aes_set_key_common() ensures that key length is one of
489 if (ctx
->key_length
== AES_KEYSIZE_128
)
490 aes_ctr_enc_128_avx_by8(in
, iv
, (void *)ctx
, out
, len
);
491 else if (ctx
->key_length
== AES_KEYSIZE_192
)
492 aes_ctr_enc_192_avx_by8(in
, iv
, (void *)ctx
, out
, len
);
494 aes_ctr_enc_256_avx_by8(in
, iv
, (void *)ctx
, out
, len
);
498 static int ctr_crypt(struct skcipher_request
*req
)
500 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
501 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_skcipher_ctx(tfm
));
502 struct skcipher_walk walk
;
506 err
= skcipher_walk_virt(&walk
, req
, true);
509 while ((nbytes
= walk
.nbytes
) >= AES_BLOCK_SIZE
) {
510 aesni_ctr_enc_tfm(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
511 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
512 nbytes
&= AES_BLOCK_SIZE
- 1;
513 err
= skcipher_walk_done(&walk
, nbytes
);
516 ctr_crypt_final(ctx
, &walk
);
517 err
= skcipher_walk_done(&walk
, 0);
524 static int xts_aesni_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
527 struct aesni_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
530 err
= xts_verify_key(tfm
, key
, keylen
);
536 /* first half of xts-key is for crypt */
537 err
= aes_set_key_common(crypto_skcipher_tfm(tfm
), ctx
->raw_crypt_ctx
,
542 /* second half of xts-key is for tweak */
543 return aes_set_key_common(crypto_skcipher_tfm(tfm
), ctx
->raw_tweak_ctx
,
544 key
+ keylen
, keylen
);
548 static void aesni_xts_enc(const void *ctx
, u8
*dst
, const u8
*src
, le128
*iv
)
550 glue_xts_crypt_128bit_one(ctx
, dst
, src
, iv
, aesni_enc
);
553 static void aesni_xts_dec(const void *ctx
, u8
*dst
, const u8
*src
, le128
*iv
)
555 glue_xts_crypt_128bit_one(ctx
, dst
, src
, iv
, aesni_dec
);
558 static void aesni_xts_enc8(const void *ctx
, u8
*dst
, const u8
*src
, le128
*iv
)
560 aesni_xts_crypt8(ctx
, dst
, src
, true, iv
);
563 static void aesni_xts_dec8(const void *ctx
, u8
*dst
, const u8
*src
, le128
*iv
)
565 aesni_xts_crypt8(ctx
, dst
, src
, false, iv
);
568 static const struct common_glue_ctx aesni_enc_xts
= {
570 .fpu_blocks_limit
= 1,
574 .fn_u
= { .xts
= aesni_xts_enc8
}
577 .fn_u
= { .xts
= aesni_xts_enc
}
581 static const struct common_glue_ctx aesni_dec_xts
= {
583 .fpu_blocks_limit
= 1,
587 .fn_u
= { .xts
= aesni_xts_dec8
}
590 .fn_u
= { .xts
= aesni_xts_dec
}
594 static int xts_encrypt(struct skcipher_request
*req
)
596 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
597 struct aesni_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
599 return glue_xts_req_128bit(&aesni_enc_xts
, req
, aesni_enc
,
600 aes_ctx(ctx
->raw_tweak_ctx
),
601 aes_ctx(ctx
->raw_crypt_ctx
),
605 static int xts_decrypt(struct skcipher_request
*req
)
607 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
608 struct aesni_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
610 return glue_xts_req_128bit(&aesni_dec_xts
, req
, aesni_enc
,
611 aes_ctx(ctx
->raw_tweak_ctx
),
612 aes_ctx(ctx
->raw_crypt_ctx
),
617 rfc4106_set_hash_subkey(u8
*hash_subkey
, const u8
*key
, unsigned int key_len
)
619 struct crypto_aes_ctx ctx
;
622 ret
= aes_expandkey(&ctx
, key
, key_len
);
626 /* Clear the data in the hash sub key container to zero.*/
627 /* We want to cipher all zeros to create the hash sub key. */
628 memset(hash_subkey
, 0, RFC4106_HASH_SUBKEY_SIZE
);
630 aes_encrypt(&ctx
, hash_subkey
, hash_subkey
);
632 memzero_explicit(&ctx
, sizeof(ctx
));
636 static int common_rfc4106_set_key(struct crypto_aead
*aead
, const u8
*key
,
637 unsigned int key_len
)
639 struct aesni_rfc4106_gcm_ctx
*ctx
= aesni_rfc4106_gcm_ctx_get(aead
);
644 /*Account for 4 byte nonce at the end.*/
647 memcpy(ctx
->nonce
, key
+ key_len
, sizeof(ctx
->nonce
));
649 return aes_set_key_common(crypto_aead_tfm(aead
),
650 &ctx
->aes_key_expanded
, key
, key_len
) ?:
651 rfc4106_set_hash_subkey(ctx
->hash_subkey
, key
, key_len
);
654 /* This is the Integrity Check Value (aka the authentication tag) length and can
655 * be 8, 12 or 16 bytes long. */
656 static int common_rfc4106_set_authsize(struct crypto_aead
*aead
,
657 unsigned int authsize
)
671 static int generic_gcmaes_set_authsize(struct crypto_aead
*tfm
,
672 unsigned int authsize
)
690 static int gcmaes_crypt_by_sg(bool enc
, struct aead_request
*req
,
691 unsigned int assoclen
, u8
*hash_subkey
,
692 u8
*iv
, void *aes_ctx
)
694 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
695 unsigned long auth_tag_len
= crypto_aead_authsize(tfm
);
696 const struct aesni_gcm_tfm_s
*gcm_tfm
= aesni_gcm_tfm
;
697 struct gcm_context_data data AESNI_ALIGN_ATTR
;
698 struct scatter_walk dst_sg_walk
= {};
699 unsigned long left
= req
->cryptlen
;
700 unsigned long len
, srclen
, dstlen
;
701 struct scatter_walk assoc_sg_walk
;
702 struct scatter_walk src_sg_walk
;
703 struct scatterlist src_start
[2];
704 struct scatterlist dst_start
[2];
705 struct scatterlist
*src_sg
;
706 struct scatterlist
*dst_sg
;
707 u8
*src
, *dst
, *assoc
;
712 left
-= auth_tag_len
;
714 #ifdef CONFIG_AS_AVX2
715 if (left
< AVX_GEN4_OPTSIZE
&& gcm_tfm
== &aesni_gcm_tfm_avx_gen4
)
716 gcm_tfm
= &aesni_gcm_tfm_avx_gen2
;
719 if (left
< AVX_GEN2_OPTSIZE
&& gcm_tfm
== &aesni_gcm_tfm_avx_gen2
)
720 gcm_tfm
= &aesni_gcm_tfm_sse
;
723 /* Linearize assoc, if not already linear */
724 if (req
->src
->length
>= assoclen
&& req
->src
->length
&&
725 (!PageHighMem(sg_page(req
->src
)) ||
726 req
->src
->offset
+ req
->src
->length
<= PAGE_SIZE
)) {
727 scatterwalk_start(&assoc_sg_walk
, req
->src
);
728 assoc
= scatterwalk_map(&assoc_sg_walk
);
730 /* assoc can be any length, so must be on heap */
731 assocmem
= kmalloc(assoclen
, GFP_ATOMIC
);
732 if (unlikely(!assocmem
))
736 scatterwalk_map_and_copy(assoc
, req
->src
, 0, assoclen
, 0);
740 src_sg
= scatterwalk_ffwd(src_start
, req
->src
, req
->assoclen
);
741 scatterwalk_start(&src_sg_walk
, src_sg
);
742 if (req
->src
!= req
->dst
) {
743 dst_sg
= scatterwalk_ffwd(dst_start
, req
->dst
,
745 scatterwalk_start(&dst_sg_walk
, dst_sg
);
750 gcm_tfm
->init(aes_ctx
, &data
, iv
,
751 hash_subkey
, assoc
, assoclen
);
752 if (req
->src
!= req
->dst
) {
754 src
= scatterwalk_map(&src_sg_walk
);
755 dst
= scatterwalk_map(&dst_sg_walk
);
756 srclen
= scatterwalk_clamp(&src_sg_walk
, left
);
757 dstlen
= scatterwalk_clamp(&dst_sg_walk
, left
);
758 len
= min(srclen
, dstlen
);
761 gcm_tfm
->enc_update(aes_ctx
, &data
,
764 gcm_tfm
->dec_update(aes_ctx
, &data
,
769 scatterwalk_unmap(src
);
770 scatterwalk_unmap(dst
);
771 scatterwalk_advance(&src_sg_walk
, len
);
772 scatterwalk_advance(&dst_sg_walk
, len
);
773 scatterwalk_done(&src_sg_walk
, 0, left
);
774 scatterwalk_done(&dst_sg_walk
, 1, left
);
778 dst
= src
= scatterwalk_map(&src_sg_walk
);
779 len
= scatterwalk_clamp(&src_sg_walk
, left
);
782 gcm_tfm
->enc_update(aes_ctx
, &data
,
785 gcm_tfm
->dec_update(aes_ctx
, &data
,
789 scatterwalk_unmap(src
);
790 scatterwalk_advance(&src_sg_walk
, len
);
791 scatterwalk_done(&src_sg_walk
, 1, left
);
794 gcm_tfm
->finalize(aes_ctx
, &data
, authTag
, auth_tag_len
);
798 scatterwalk_unmap(assoc
);
805 /* Copy out original authTag */
806 scatterwalk_map_and_copy(authTagMsg
, req
->src
,
807 req
->assoclen
+ req
->cryptlen
-
811 /* Compare generated tag with passed in tag. */
812 return crypto_memneq(authTagMsg
, authTag
, auth_tag_len
) ?
816 /* Copy in the authTag */
817 scatterwalk_map_and_copy(authTag
, req
->dst
,
818 req
->assoclen
+ req
->cryptlen
,
824 static int gcmaes_encrypt(struct aead_request
*req
, unsigned int assoclen
,
825 u8
*hash_subkey
, u8
*iv
, void *aes_ctx
)
827 return gcmaes_crypt_by_sg(true, req
, assoclen
, hash_subkey
, iv
,
831 static int gcmaes_decrypt(struct aead_request
*req
, unsigned int assoclen
,
832 u8
*hash_subkey
, u8
*iv
, void *aes_ctx
)
834 return gcmaes_crypt_by_sg(false, req
, assoclen
, hash_subkey
, iv
,
838 static int helper_rfc4106_encrypt(struct aead_request
*req
)
840 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
841 struct aesni_rfc4106_gcm_ctx
*ctx
= aesni_rfc4106_gcm_ctx_get(tfm
);
842 void *aes_ctx
= &(ctx
->aes_key_expanded
);
843 u8 iv
[16] __attribute__ ((__aligned__(AESNI_ALIGN
)));
845 __be32 counter
= cpu_to_be32(1);
847 /* Assuming we are supporting rfc4106 64-bit extended */
848 /* sequence numbers We need to have the AAD length equal */
849 /* to 16 or 20 bytes */
850 if (unlikely(req
->assoclen
!= 16 && req
->assoclen
!= 20))
854 for (i
= 0; i
< 4; i
++)
855 *(iv
+i
) = ctx
->nonce
[i
];
856 for (i
= 0; i
< 8; i
++)
857 *(iv
+4+i
) = req
->iv
[i
];
858 *((__be32
*)(iv
+12)) = counter
;
860 return gcmaes_encrypt(req
, req
->assoclen
- 8, ctx
->hash_subkey
, iv
,
864 static int helper_rfc4106_decrypt(struct aead_request
*req
)
866 __be32 counter
= cpu_to_be32(1);
867 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
868 struct aesni_rfc4106_gcm_ctx
*ctx
= aesni_rfc4106_gcm_ctx_get(tfm
);
869 void *aes_ctx
= &(ctx
->aes_key_expanded
);
870 u8 iv
[16] __attribute__ ((__aligned__(AESNI_ALIGN
)));
873 if (unlikely(req
->assoclen
!= 16 && req
->assoclen
!= 20))
876 /* Assuming we are supporting rfc4106 64-bit extended */
877 /* sequence numbers We need to have the AAD length */
878 /* equal to 16 or 20 bytes */
881 for (i
= 0; i
< 4; i
++)
882 *(iv
+i
) = ctx
->nonce
[i
];
883 for (i
= 0; i
< 8; i
++)
884 *(iv
+4+i
) = req
->iv
[i
];
885 *((__be32
*)(iv
+12)) = counter
;
887 return gcmaes_decrypt(req
, req
->assoclen
- 8, ctx
->hash_subkey
, iv
,
892 static struct crypto_alg aesni_cipher_alg
= {
894 .cra_driver_name
= "aes-aesni",
896 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
,
897 .cra_blocksize
= AES_BLOCK_SIZE
,
898 .cra_ctxsize
= CRYPTO_AES_CTX_SIZE
,
899 .cra_module
= THIS_MODULE
,
902 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
903 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
904 .cia_setkey
= aes_set_key
,
905 .cia_encrypt
= aesni_encrypt
,
906 .cia_decrypt
= aesni_decrypt
911 static struct skcipher_alg aesni_skciphers
[] = {
914 .cra_name
= "__ecb(aes)",
915 .cra_driver_name
= "__ecb-aes-aesni",
917 .cra_flags
= CRYPTO_ALG_INTERNAL
,
918 .cra_blocksize
= AES_BLOCK_SIZE
,
919 .cra_ctxsize
= CRYPTO_AES_CTX_SIZE
,
920 .cra_module
= THIS_MODULE
,
922 .min_keysize
= AES_MIN_KEY_SIZE
,
923 .max_keysize
= AES_MAX_KEY_SIZE
,
924 .setkey
= aesni_skcipher_setkey
,
925 .encrypt
= ecb_encrypt
,
926 .decrypt
= ecb_decrypt
,
929 .cra_name
= "__cbc(aes)",
930 .cra_driver_name
= "__cbc-aes-aesni",
932 .cra_flags
= CRYPTO_ALG_INTERNAL
,
933 .cra_blocksize
= AES_BLOCK_SIZE
,
934 .cra_ctxsize
= CRYPTO_AES_CTX_SIZE
,
935 .cra_module
= THIS_MODULE
,
937 .min_keysize
= AES_MIN_KEY_SIZE
,
938 .max_keysize
= AES_MAX_KEY_SIZE
,
939 .ivsize
= AES_BLOCK_SIZE
,
940 .setkey
= aesni_skcipher_setkey
,
941 .encrypt
= cbc_encrypt
,
942 .decrypt
= cbc_decrypt
,
946 .cra_name
= "__ctr(aes)",
947 .cra_driver_name
= "__ctr-aes-aesni",
949 .cra_flags
= CRYPTO_ALG_INTERNAL
,
951 .cra_ctxsize
= CRYPTO_AES_CTX_SIZE
,
952 .cra_module
= THIS_MODULE
,
954 .min_keysize
= AES_MIN_KEY_SIZE
,
955 .max_keysize
= AES_MAX_KEY_SIZE
,
956 .ivsize
= AES_BLOCK_SIZE
,
957 .chunksize
= AES_BLOCK_SIZE
,
958 .setkey
= aesni_skcipher_setkey
,
959 .encrypt
= ctr_crypt
,
960 .decrypt
= ctr_crypt
,
963 .cra_name
= "__xts(aes)",
964 .cra_driver_name
= "__xts-aes-aesni",
966 .cra_flags
= CRYPTO_ALG_INTERNAL
,
967 .cra_blocksize
= AES_BLOCK_SIZE
,
968 .cra_ctxsize
= XTS_AES_CTX_SIZE
,
969 .cra_module
= THIS_MODULE
,
971 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
972 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
973 .ivsize
= AES_BLOCK_SIZE
,
974 .setkey
= xts_aesni_setkey
,
975 .encrypt
= xts_encrypt
,
976 .decrypt
= xts_decrypt
,
982 struct simd_skcipher_alg
*aesni_simd_skciphers
[ARRAY_SIZE(aesni_skciphers
)];
985 static int generic_gcmaes_set_key(struct crypto_aead
*aead
, const u8
*key
,
986 unsigned int key_len
)
988 struct generic_gcmaes_ctx
*ctx
= generic_gcmaes_ctx_get(aead
);
990 return aes_set_key_common(crypto_aead_tfm(aead
),
991 &ctx
->aes_key_expanded
, key
, key_len
) ?:
992 rfc4106_set_hash_subkey(ctx
->hash_subkey
, key
, key_len
);
995 static int generic_gcmaes_encrypt(struct aead_request
*req
)
997 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
998 struct generic_gcmaes_ctx
*ctx
= generic_gcmaes_ctx_get(tfm
);
999 void *aes_ctx
= &(ctx
->aes_key_expanded
);
1000 u8 iv
[16] __attribute__ ((__aligned__(AESNI_ALIGN
)));
1001 __be32 counter
= cpu_to_be32(1);
1003 memcpy(iv
, req
->iv
, 12);
1004 *((__be32
*)(iv
+12)) = counter
;
1006 return gcmaes_encrypt(req
, req
->assoclen
, ctx
->hash_subkey
, iv
,
1010 static int generic_gcmaes_decrypt(struct aead_request
*req
)
1012 __be32 counter
= cpu_to_be32(1);
1013 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
1014 struct generic_gcmaes_ctx
*ctx
= generic_gcmaes_ctx_get(tfm
);
1015 void *aes_ctx
= &(ctx
->aes_key_expanded
);
1016 u8 iv
[16] __attribute__ ((__aligned__(AESNI_ALIGN
)));
1018 memcpy(iv
, req
->iv
, 12);
1019 *((__be32
*)(iv
+12)) = counter
;
1021 return gcmaes_decrypt(req
, req
->assoclen
, ctx
->hash_subkey
, iv
,
1025 static struct aead_alg aesni_aeads
[] = { {
1026 .setkey
= common_rfc4106_set_key
,
1027 .setauthsize
= common_rfc4106_set_authsize
,
1028 .encrypt
= helper_rfc4106_encrypt
,
1029 .decrypt
= helper_rfc4106_decrypt
,
1030 .ivsize
= GCM_RFC4106_IV_SIZE
,
1033 .cra_name
= "__rfc4106(gcm(aes))",
1034 .cra_driver_name
= "__rfc4106-gcm-aesni",
1035 .cra_priority
= 400,
1036 .cra_flags
= CRYPTO_ALG_INTERNAL
,
1038 .cra_ctxsize
= sizeof(struct aesni_rfc4106_gcm_ctx
),
1039 .cra_alignmask
= AESNI_ALIGN
- 1,
1040 .cra_module
= THIS_MODULE
,
1043 .setkey
= generic_gcmaes_set_key
,
1044 .setauthsize
= generic_gcmaes_set_authsize
,
1045 .encrypt
= generic_gcmaes_encrypt
,
1046 .decrypt
= generic_gcmaes_decrypt
,
1047 .ivsize
= GCM_AES_IV_SIZE
,
1050 .cra_name
= "__gcm(aes)",
1051 .cra_driver_name
= "__generic-gcm-aesni",
1052 .cra_priority
= 400,
1053 .cra_flags
= CRYPTO_ALG_INTERNAL
,
1055 .cra_ctxsize
= sizeof(struct generic_gcmaes_ctx
),
1056 .cra_alignmask
= AESNI_ALIGN
- 1,
1057 .cra_module
= THIS_MODULE
,
1061 static struct aead_alg aesni_aeads
[0];
1064 static struct simd_aead_alg
*aesni_simd_aeads
[ARRAY_SIZE(aesni_aeads
)];
1066 static const struct x86_cpu_id aesni_cpu_id
[] = {
1067 X86_FEATURE_MATCH(X86_FEATURE_AES
),
1070 MODULE_DEVICE_TABLE(x86cpu
, aesni_cpu_id
);
1072 static int __init
aesni_init(void)
1076 if (!x86_match_cpu(aesni_cpu_id
))
1078 #ifdef CONFIG_X86_64
1079 #ifdef CONFIG_AS_AVX2
1080 if (boot_cpu_has(X86_FEATURE_AVX2
)) {
1081 pr_info("AVX2 version of gcm_enc/dec engaged.\n");
1082 aesni_gcm_tfm
= &aesni_gcm_tfm_avx_gen4
;
1085 #ifdef CONFIG_AS_AVX
1086 if (boot_cpu_has(X86_FEATURE_AVX
)) {
1087 pr_info("AVX version of gcm_enc/dec engaged.\n");
1088 aesni_gcm_tfm
= &aesni_gcm_tfm_avx_gen2
;
1092 pr_info("SSE version of gcm_enc/dec engaged.\n");
1093 aesni_gcm_tfm
= &aesni_gcm_tfm_sse
;
1095 aesni_ctr_enc_tfm
= aesni_ctr_enc
;
1096 #ifdef CONFIG_AS_AVX
1097 if (boot_cpu_has(X86_FEATURE_AVX
)) {
1098 /* optimize performance of ctr mode encryption transform */
1099 aesni_ctr_enc_tfm
= aesni_ctr_enc_avx_tfm
;
1100 pr_info("AES CTR mode by8 optimization enabled\n");
1105 err
= crypto_register_alg(&aesni_cipher_alg
);
1109 err
= simd_register_skciphers_compat(aesni_skciphers
,
1110 ARRAY_SIZE(aesni_skciphers
),
1111 aesni_simd_skciphers
);
1113 goto unregister_cipher
;
1115 err
= simd_register_aeads_compat(aesni_aeads
, ARRAY_SIZE(aesni_aeads
),
1118 goto unregister_skciphers
;
1122 unregister_skciphers
:
1123 simd_unregister_skciphers(aesni_skciphers
, ARRAY_SIZE(aesni_skciphers
),
1124 aesni_simd_skciphers
);
1126 crypto_unregister_alg(&aesni_cipher_alg
);
1130 static void __exit
aesni_exit(void)
1132 simd_unregister_aeads(aesni_aeads
, ARRAY_SIZE(aesni_aeads
),
1134 simd_unregister_skciphers(aesni_skciphers
, ARRAY_SIZE(aesni_skciphers
),
1135 aesni_simd_skciphers
);
1136 crypto_unregister_alg(&aesni_cipher_alg
);
1139 late_initcall(aesni_init
);
1140 module_exit(aesni_exit
);
1142 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, Intel AES-NI instructions optimized");
1143 MODULE_LICENSE("GPL");
1144 MODULE_ALIAS_CRYPTO("aes");