2 * Support for Intel AES-NI instructions. This file contains glue
3 * code, the real AES implementation is in intel-aes_asm.S.
5 * Copyright (C) 2008, Intel Corp.
6 * Author: Huang Ying <ying.huang@intel.com>
8 * Added RFC4106 AES-GCM support for 128-bit keys under the AEAD
9 * interface for 64-bit kernels.
10 * Authors: Adrian Hoban <adrian.hoban@intel.com>
11 * Gabriele Paoloni <gabriele.paoloni@intel.com>
12 * Tadeusz Struk (tadeusz.struk@intel.com)
13 * Aidan O'Mahony (aidan.o.mahony@intel.com)
14 * Copyright (c) 2010, Intel Corporation.
16 * This program is free software; you can redistribute it and/or modify
17 * it under the terms of the GNU General Public License as published by
18 * the Free Software Foundation; either version 2 of the License, or
19 * (at your option) any later version.
22 #include <linux/hardirq.h>
23 #include <linux/types.h>
24 #include <linux/crypto.h>
25 #include <linux/module.h>
26 #include <linux/err.h>
27 #include <crypto/algapi.h>
28 #include <crypto/aes.h>
29 #include <crypto/cryptd.h>
30 #include <crypto/ctr.h>
31 #include <crypto/b128ops.h>
32 #include <crypto/lrw.h>
33 #include <crypto/xts.h>
34 #include <asm/cpu_device_id.h>
36 #include <asm/crypto/aes.h>
37 #include <asm/crypto/ablk_helper.h>
38 #include <crypto/scatterwalk.h>
39 #include <crypto/internal/aead.h>
40 #include <linux/workqueue.h>
41 #include <linux/spinlock.h>
43 #include <asm/crypto/glue_helper.h>
46 #if defined(CONFIG_CRYPTO_PCBC) || defined(CONFIG_CRYPTO_PCBC_MODULE)
50 /* This data is stored at the end of the crypto_tfm struct.
51 * It's a type of per "session" data storage location.
52 * This needs to be 16 byte aligned.
54 struct aesni_rfc4106_gcm_ctx
{
56 struct crypto_aes_ctx aes_key_expanded
;
58 struct cryptd_aead
*cryptd_tfm
;
61 struct aesni_gcm_set_hash_subkey_result
{
63 struct completion completion
;
66 struct aesni_hash_subkey_req_data
{
68 struct aesni_gcm_set_hash_subkey_result result
;
69 struct scatterlist sg
;
72 #define AESNI_ALIGN (16)
73 #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1))
74 #define RFC4106_HASH_SUBKEY_SIZE 16
76 struct aesni_lrw_ctx
{
77 struct lrw_table_ctx lrw_table
;
78 u8 raw_aes_ctx
[sizeof(struct crypto_aes_ctx
) + AESNI_ALIGN
- 1];
81 struct aesni_xts_ctx
{
82 u8 raw_tweak_ctx
[sizeof(struct crypto_aes_ctx
) + AESNI_ALIGN
- 1];
83 u8 raw_crypt_ctx
[sizeof(struct crypto_aes_ctx
) + AESNI_ALIGN
- 1];
86 asmlinkage
int aesni_set_key(struct crypto_aes_ctx
*ctx
, const u8
*in_key
,
87 unsigned int key_len
);
88 asmlinkage
void aesni_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
90 asmlinkage
void aesni_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
92 asmlinkage
void aesni_ecb_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
93 const u8
*in
, unsigned int len
);
94 asmlinkage
void aesni_ecb_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
95 const u8
*in
, unsigned int len
);
96 asmlinkage
void aesni_cbc_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
97 const u8
*in
, unsigned int len
, u8
*iv
);
98 asmlinkage
void aesni_cbc_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
99 const u8
*in
, unsigned int len
, u8
*iv
);
101 int crypto_fpu_init(void);
102 void crypto_fpu_exit(void);
105 asmlinkage
void aesni_ctr_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
106 const u8
*in
, unsigned int len
, u8
*iv
);
108 asmlinkage
void aesni_xts_crypt8(struct crypto_aes_ctx
*ctx
, u8
*out
,
109 const u8
*in
, bool enc
, u8
*iv
);
111 /* asmlinkage void aesni_gcm_enc()
112 * void *ctx, AES Key schedule. Starts on a 16 byte boundary.
113 * u8 *out, Ciphertext output. Encrypt in-place is allowed.
114 * const u8 *in, Plaintext input
115 * unsigned long plaintext_len, Length of data in bytes for encryption.
116 * u8 *iv, Pre-counter block j0: 4 byte salt (from Security Association)
117 * concatenated with 8 byte Initialisation Vector (from IPSec ESP
118 * Payload) concatenated with 0x00000001. 16-byte aligned pointer.
119 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
120 * const u8 *aad, Additional Authentication Data (AAD)
121 * unsigned long aad_len, Length of AAD in bytes. With RFC4106 this
122 * is going to be 8 or 12 bytes
123 * u8 *auth_tag, Authenticated Tag output.
124 * unsigned long auth_tag_len), Authenticated Tag Length in bytes.
125 * Valid values are 16 (most likely), 12 or 8.
127 asmlinkage
void aesni_gcm_enc(void *ctx
, u8
*out
,
128 const u8
*in
, unsigned long plaintext_len
, u8
*iv
,
129 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
130 u8
*auth_tag
, unsigned long auth_tag_len
);
132 /* asmlinkage void aesni_gcm_dec()
133 * void *ctx, AES Key schedule. Starts on a 16 byte boundary.
134 * u8 *out, Plaintext output. Decrypt in-place is allowed.
135 * const u8 *in, Ciphertext input
136 * unsigned long ciphertext_len, Length of data in bytes for decryption.
137 * u8 *iv, Pre-counter block j0: 4 byte salt (from Security Association)
138 * concatenated with 8 byte Initialisation Vector (from IPSec ESP
139 * Payload) concatenated with 0x00000001. 16-byte aligned pointer.
140 * u8 *hash_subkey, the Hash sub key input. Data starts on a 16-byte boundary.
141 * const u8 *aad, Additional Authentication Data (AAD)
142 * unsigned long aad_len, Length of AAD in bytes. With RFC4106 this is going
143 * to be 8 or 12 bytes
144 * u8 *auth_tag, Authenticated Tag output.
145 * unsigned long auth_tag_len) Authenticated Tag Length in bytes.
146 * Valid values are 16 (most likely), 12 or 8.
148 asmlinkage
void aesni_gcm_dec(void *ctx
, u8
*out
,
149 const u8
*in
, unsigned long ciphertext_len
, u8
*iv
,
150 u8
*hash_subkey
, const u8
*aad
, unsigned long aad_len
,
151 u8
*auth_tag
, unsigned long auth_tag_len
);
154 aesni_rfc4106_gcm_ctx
*aesni_rfc4106_gcm_ctx_get(struct crypto_aead
*tfm
)
157 (struct aesni_rfc4106_gcm_ctx
*)
159 crypto_tfm_ctx(crypto_aead_tfm(tfm
)), AESNI_ALIGN
);
163 static inline struct crypto_aes_ctx
*aes_ctx(void *raw_ctx
)
165 unsigned long addr
= (unsigned long)raw_ctx
;
166 unsigned long align
= AESNI_ALIGN
;
168 if (align
<= crypto_tfm_ctx_alignment())
170 return (struct crypto_aes_ctx
*)ALIGN(addr
, align
);
173 static int aes_set_key_common(struct crypto_tfm
*tfm
, void *raw_ctx
,
174 const u8
*in_key
, unsigned int key_len
)
176 struct crypto_aes_ctx
*ctx
= aes_ctx(raw_ctx
);
177 u32
*flags
= &tfm
->crt_flags
;
180 if (key_len
!= AES_KEYSIZE_128
&& key_len
!= AES_KEYSIZE_192
&&
181 key_len
!= AES_KEYSIZE_256
) {
182 *flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
186 if (!irq_fpu_usable())
187 err
= crypto_aes_expand_key(ctx
, in_key
, key_len
);
190 err
= aesni_set_key(ctx
, in_key
, key_len
);
197 static int aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
198 unsigned int key_len
)
200 return aes_set_key_common(tfm
, crypto_tfm_ctx(tfm
), in_key
, key_len
);
203 static void aes_encrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
205 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
207 if (!irq_fpu_usable())
208 crypto_aes_encrypt_x86(ctx
, dst
, src
);
211 aesni_enc(ctx
, dst
, src
);
216 static void aes_decrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
218 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
220 if (!irq_fpu_usable())
221 crypto_aes_decrypt_x86(ctx
, dst
, src
);
224 aesni_dec(ctx
, dst
, src
);
229 static void __aes_encrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
231 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
233 aesni_enc(ctx
, dst
, src
);
236 static void __aes_decrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
238 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
240 aesni_dec(ctx
, dst
, src
);
243 static int ecb_encrypt(struct blkcipher_desc
*desc
,
244 struct scatterlist
*dst
, struct scatterlist
*src
,
247 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
248 struct blkcipher_walk walk
;
251 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
252 err
= blkcipher_walk_virt(desc
, &walk
);
253 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
256 while ((nbytes
= walk
.nbytes
)) {
257 aesni_ecb_enc(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
258 nbytes
& AES_BLOCK_MASK
);
259 nbytes
&= AES_BLOCK_SIZE
- 1;
260 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
267 static int ecb_decrypt(struct blkcipher_desc
*desc
,
268 struct scatterlist
*dst
, struct scatterlist
*src
,
271 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
272 struct blkcipher_walk walk
;
275 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
276 err
= blkcipher_walk_virt(desc
, &walk
);
277 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
280 while ((nbytes
= walk
.nbytes
)) {
281 aesni_ecb_dec(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
282 nbytes
& AES_BLOCK_MASK
);
283 nbytes
&= AES_BLOCK_SIZE
- 1;
284 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
291 static int cbc_encrypt(struct blkcipher_desc
*desc
,
292 struct scatterlist
*dst
, struct scatterlist
*src
,
295 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
296 struct blkcipher_walk walk
;
299 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
300 err
= blkcipher_walk_virt(desc
, &walk
);
301 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
304 while ((nbytes
= walk
.nbytes
)) {
305 aesni_cbc_enc(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
306 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
307 nbytes
&= AES_BLOCK_SIZE
- 1;
308 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
315 static int cbc_decrypt(struct blkcipher_desc
*desc
,
316 struct scatterlist
*dst
, struct scatterlist
*src
,
319 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
320 struct blkcipher_walk walk
;
323 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
324 err
= blkcipher_walk_virt(desc
, &walk
);
325 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
328 while ((nbytes
= walk
.nbytes
)) {
329 aesni_cbc_dec(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
330 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
331 nbytes
&= AES_BLOCK_SIZE
- 1;
332 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
340 static void ctr_crypt_final(struct crypto_aes_ctx
*ctx
,
341 struct blkcipher_walk
*walk
)
343 u8
*ctrblk
= walk
->iv
;
344 u8 keystream
[AES_BLOCK_SIZE
];
345 u8
*src
= walk
->src
.virt
.addr
;
346 u8
*dst
= walk
->dst
.virt
.addr
;
347 unsigned int nbytes
= walk
->nbytes
;
349 aesni_enc(ctx
, keystream
, ctrblk
);
350 crypto_xor(keystream
, src
, nbytes
);
351 memcpy(dst
, keystream
, nbytes
);
352 crypto_inc(ctrblk
, AES_BLOCK_SIZE
);
355 static int ctr_crypt(struct blkcipher_desc
*desc
,
356 struct scatterlist
*dst
, struct scatterlist
*src
,
359 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
360 struct blkcipher_walk walk
;
363 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
364 err
= blkcipher_walk_virt_block(desc
, &walk
, AES_BLOCK_SIZE
);
365 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
368 while ((nbytes
= walk
.nbytes
) >= AES_BLOCK_SIZE
) {
369 aesni_ctr_enc(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
370 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
371 nbytes
&= AES_BLOCK_SIZE
- 1;
372 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
375 ctr_crypt_final(ctx
, &walk
);
376 err
= blkcipher_walk_done(desc
, &walk
, 0);
384 static int ablk_ecb_init(struct crypto_tfm
*tfm
)
386 return ablk_init_common(tfm
, "__driver-ecb-aes-aesni");
389 static int ablk_cbc_init(struct crypto_tfm
*tfm
)
391 return ablk_init_common(tfm
, "__driver-cbc-aes-aesni");
395 static int ablk_ctr_init(struct crypto_tfm
*tfm
)
397 return ablk_init_common(tfm
, "__driver-ctr-aes-aesni");
403 static int ablk_pcbc_init(struct crypto_tfm
*tfm
)
405 return ablk_init_common(tfm
, "fpu(pcbc(__driver-aes-aesni))");
409 static void lrw_xts_encrypt_callback(void *ctx
, u8
*blks
, unsigned int nbytes
)
411 aesni_ecb_enc(ctx
, blks
, blks
, nbytes
);
414 static void lrw_xts_decrypt_callback(void *ctx
, u8
*blks
, unsigned int nbytes
)
416 aesni_ecb_dec(ctx
, blks
, blks
, nbytes
);
419 static int lrw_aesni_setkey(struct crypto_tfm
*tfm
, const u8
*key
,
422 struct aesni_lrw_ctx
*ctx
= crypto_tfm_ctx(tfm
);
425 err
= aes_set_key_common(tfm
, ctx
->raw_aes_ctx
, key
,
426 keylen
- AES_BLOCK_SIZE
);
430 return lrw_init_table(&ctx
->lrw_table
, key
+ keylen
- AES_BLOCK_SIZE
);
433 static void lrw_aesni_exit_tfm(struct crypto_tfm
*tfm
)
435 struct aesni_lrw_ctx
*ctx
= crypto_tfm_ctx(tfm
);
437 lrw_free_table(&ctx
->lrw_table
);
440 static int lrw_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
441 struct scatterlist
*src
, unsigned int nbytes
)
443 struct aesni_lrw_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
445 struct lrw_crypt_req req
= {
447 .tbuflen
= sizeof(buf
),
449 .table_ctx
= &ctx
->lrw_table
,
450 .crypt_ctx
= aes_ctx(ctx
->raw_aes_ctx
),
451 .crypt_fn
= lrw_xts_encrypt_callback
,
455 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
458 ret
= lrw_crypt(desc
, dst
, src
, nbytes
, &req
);
464 static int lrw_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
465 struct scatterlist
*src
, unsigned int nbytes
)
467 struct aesni_lrw_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
469 struct lrw_crypt_req req
= {
471 .tbuflen
= sizeof(buf
),
473 .table_ctx
= &ctx
->lrw_table
,
474 .crypt_ctx
= aes_ctx(ctx
->raw_aes_ctx
),
475 .crypt_fn
= lrw_xts_decrypt_callback
,
479 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
482 ret
= lrw_crypt(desc
, dst
, src
, nbytes
, &req
);
488 static int xts_aesni_setkey(struct crypto_tfm
*tfm
, const u8
*key
,
491 struct aesni_xts_ctx
*ctx
= crypto_tfm_ctx(tfm
);
492 u32
*flags
= &tfm
->crt_flags
;
495 /* key consists of keys of equal size concatenated, therefore
496 * the length must be even
499 *flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
503 /* first half of xts-key is for crypt */
504 err
= aes_set_key_common(tfm
, ctx
->raw_crypt_ctx
, key
, keylen
/ 2);
508 /* second half of xts-key is for tweak */
509 return aes_set_key_common(tfm
, ctx
->raw_tweak_ctx
, key
+ keylen
/ 2,
514 static void aesni_xts_tweak(void *ctx
, u8
*out
, const u8
*in
)
516 aesni_enc(ctx
, out
, in
);
521 static void aesni_xts_enc(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
523 glue_xts_crypt_128bit_one(ctx
, dst
, src
, iv
, GLUE_FUNC_CAST(aesni_enc
));
526 static void aesni_xts_dec(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
528 glue_xts_crypt_128bit_one(ctx
, dst
, src
, iv
, GLUE_FUNC_CAST(aesni_dec
));
531 static void aesni_xts_enc8(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
533 aesni_xts_crypt8(ctx
, (u8
*)dst
, (const u8
*)src
, true, (u8
*)iv
);
536 static void aesni_xts_dec8(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
538 aesni_xts_crypt8(ctx
, (u8
*)dst
, (const u8
*)src
, false, (u8
*)iv
);
541 static const struct common_glue_ctx aesni_enc_xts
= {
543 .fpu_blocks_limit
= 1,
547 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(aesni_xts_enc8
) }
550 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(aesni_xts_enc
) }
554 static const struct common_glue_ctx aesni_dec_xts
= {
556 .fpu_blocks_limit
= 1,
560 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(aesni_xts_dec8
) }
563 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(aesni_xts_dec
) }
567 static int xts_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
568 struct scatterlist
*src
, unsigned int nbytes
)
570 struct aesni_xts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
572 return glue_xts_crypt_128bit(&aesni_enc_xts
, desc
, dst
, src
, nbytes
,
573 XTS_TWEAK_CAST(aesni_xts_tweak
),
574 aes_ctx(ctx
->raw_tweak_ctx
),
575 aes_ctx(ctx
->raw_crypt_ctx
));
578 static int xts_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
579 struct scatterlist
*src
, unsigned int nbytes
)
581 struct aesni_xts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
583 return glue_xts_crypt_128bit(&aesni_dec_xts
, desc
, dst
, src
, nbytes
,
584 XTS_TWEAK_CAST(aesni_xts_tweak
),
585 aes_ctx(ctx
->raw_tweak_ctx
),
586 aes_ctx(ctx
->raw_crypt_ctx
));
591 static int xts_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
592 struct scatterlist
*src
, unsigned int nbytes
)
594 struct aesni_xts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
596 struct xts_crypt_req req
= {
598 .tbuflen
= sizeof(buf
),
600 .tweak_ctx
= aes_ctx(ctx
->raw_tweak_ctx
),
601 .tweak_fn
= aesni_xts_tweak
,
602 .crypt_ctx
= aes_ctx(ctx
->raw_crypt_ctx
),
603 .crypt_fn
= lrw_xts_encrypt_callback
,
607 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
610 ret
= xts_crypt(desc
, dst
, src
, nbytes
, &req
);
616 static int xts_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
617 struct scatterlist
*src
, unsigned int nbytes
)
619 struct aesni_xts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
621 struct xts_crypt_req req
= {
623 .tbuflen
= sizeof(buf
),
625 .tweak_ctx
= aes_ctx(ctx
->raw_tweak_ctx
),
626 .tweak_fn
= aesni_xts_tweak
,
627 .crypt_ctx
= aes_ctx(ctx
->raw_crypt_ctx
),
628 .crypt_fn
= lrw_xts_decrypt_callback
,
632 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
635 ret
= xts_crypt(desc
, dst
, src
, nbytes
, &req
);
644 static int rfc4106_init(struct crypto_tfm
*tfm
)
646 struct cryptd_aead
*cryptd_tfm
;
647 struct aesni_rfc4106_gcm_ctx
*ctx
= (struct aesni_rfc4106_gcm_ctx
*)
648 PTR_ALIGN((u8
*)crypto_tfm_ctx(tfm
), AESNI_ALIGN
);
649 struct crypto_aead
*cryptd_child
;
650 struct aesni_rfc4106_gcm_ctx
*child_ctx
;
651 cryptd_tfm
= cryptd_alloc_aead("__driver-gcm-aes-aesni", 0, 0);
652 if (IS_ERR(cryptd_tfm
))
653 return PTR_ERR(cryptd_tfm
);
655 cryptd_child
= cryptd_aead_child(cryptd_tfm
);
656 child_ctx
= aesni_rfc4106_gcm_ctx_get(cryptd_child
);
657 memcpy(child_ctx
, ctx
, sizeof(*ctx
));
658 ctx
->cryptd_tfm
= cryptd_tfm
;
659 tfm
->crt_aead
.reqsize
= sizeof(struct aead_request
)
660 + crypto_aead_reqsize(&cryptd_tfm
->base
);
664 static void rfc4106_exit(struct crypto_tfm
*tfm
)
666 struct aesni_rfc4106_gcm_ctx
*ctx
=
667 (struct aesni_rfc4106_gcm_ctx
*)
668 PTR_ALIGN((u8
*)crypto_tfm_ctx(tfm
), AESNI_ALIGN
);
669 if (!IS_ERR(ctx
->cryptd_tfm
))
670 cryptd_free_aead(ctx
->cryptd_tfm
);
675 rfc4106_set_hash_subkey_done(struct crypto_async_request
*req
, int err
)
677 struct aesni_gcm_set_hash_subkey_result
*result
= req
->data
;
679 if (err
== -EINPROGRESS
)
682 complete(&result
->completion
);
686 rfc4106_set_hash_subkey(u8
*hash_subkey
, const u8
*key
, unsigned int key_len
)
688 struct crypto_ablkcipher
*ctr_tfm
;
689 struct ablkcipher_request
*req
;
691 struct aesni_hash_subkey_req_data
*req_data
;
693 ctr_tfm
= crypto_alloc_ablkcipher("ctr(aes)", 0, 0);
695 return PTR_ERR(ctr_tfm
);
697 crypto_ablkcipher_clear_flags(ctr_tfm
, ~0);
699 ret
= crypto_ablkcipher_setkey(ctr_tfm
, key
, key_len
);
701 goto out_free_ablkcipher
;
704 req
= ablkcipher_request_alloc(ctr_tfm
, GFP_KERNEL
);
706 goto out_free_ablkcipher
;
708 req_data
= kmalloc(sizeof(*req_data
), GFP_KERNEL
);
710 goto out_free_request
;
712 memset(req_data
->iv
, 0, sizeof(req_data
->iv
));
714 /* Clear the data in the hash sub key container to zero.*/
715 /* We want to cipher all zeros to create the hash sub key. */
716 memset(hash_subkey
, 0, RFC4106_HASH_SUBKEY_SIZE
);
718 init_completion(&req_data
->result
.completion
);
719 sg_init_one(&req_data
->sg
, hash_subkey
, RFC4106_HASH_SUBKEY_SIZE
);
720 ablkcipher_request_set_tfm(req
, ctr_tfm
);
721 ablkcipher_request_set_callback(req
, CRYPTO_TFM_REQ_MAY_SLEEP
|
722 CRYPTO_TFM_REQ_MAY_BACKLOG
,
723 rfc4106_set_hash_subkey_done
,
726 ablkcipher_request_set_crypt(req
, &req_data
->sg
,
727 &req_data
->sg
, RFC4106_HASH_SUBKEY_SIZE
, req_data
->iv
);
729 ret
= crypto_ablkcipher_encrypt(req
);
730 if (ret
== -EINPROGRESS
|| ret
== -EBUSY
) {
731 ret
= wait_for_completion_interruptible
732 (&req_data
->result
.completion
);
734 ret
= req_data
->result
.err
;
738 ablkcipher_request_free(req
);
740 crypto_free_ablkcipher(ctr_tfm
);
744 static int rfc4106_set_key(struct crypto_aead
*parent
, const u8
*key
,
745 unsigned int key_len
)
748 struct crypto_tfm
*tfm
= crypto_aead_tfm(parent
);
749 struct aesni_rfc4106_gcm_ctx
*ctx
= aesni_rfc4106_gcm_ctx_get(parent
);
750 struct crypto_aead
*cryptd_child
= cryptd_aead_child(ctx
->cryptd_tfm
);
751 struct aesni_rfc4106_gcm_ctx
*child_ctx
=
752 aesni_rfc4106_gcm_ctx_get(cryptd_child
);
753 u8
*new_key_align
, *new_key_mem
= NULL
;
756 crypto_tfm_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
759 /*Account for 4 byte nonce at the end.*/
761 if (key_len
!= AES_KEYSIZE_128
) {
762 crypto_tfm_set_flags(tfm
, CRYPTO_TFM_RES_BAD_KEY_LEN
);
766 memcpy(ctx
->nonce
, key
+ key_len
, sizeof(ctx
->nonce
));
767 /*This must be on a 16 byte boundary!*/
768 if ((unsigned long)(&(ctx
->aes_key_expanded
.key_enc
[0])) % AESNI_ALIGN
)
771 if ((unsigned long)key
% AESNI_ALIGN
) {
772 /*key is not aligned: use an auxuliar aligned pointer*/
773 new_key_mem
= kmalloc(key_len
+AESNI_ALIGN
, GFP_KERNEL
);
777 new_key_align
= PTR_ALIGN(new_key_mem
, AESNI_ALIGN
);
778 memcpy(new_key_align
, key
, key_len
);
782 if (!irq_fpu_usable())
783 ret
= crypto_aes_expand_key(&(ctx
->aes_key_expanded
),
787 ret
= aesni_set_key(&(ctx
->aes_key_expanded
), key
, key_len
);
790 /*This must be on a 16 byte boundary!*/
791 if ((unsigned long)(&(ctx
->hash_subkey
[0])) % AESNI_ALIGN
) {
795 ret
= rfc4106_set_hash_subkey(ctx
->hash_subkey
, key
, key_len
);
796 memcpy(child_ctx
, ctx
, sizeof(*ctx
));
802 /* This is the Integrity Check Value (aka the authentication tag length and can
803 * be 8, 12 or 16 bytes long. */
804 static int rfc4106_set_authsize(struct crypto_aead
*parent
,
805 unsigned int authsize
)
807 struct aesni_rfc4106_gcm_ctx
*ctx
= aesni_rfc4106_gcm_ctx_get(parent
);
808 struct crypto_aead
*cryptd_child
= cryptd_aead_child(ctx
->cryptd_tfm
);
818 crypto_aead_crt(parent
)->authsize
= authsize
;
819 crypto_aead_crt(cryptd_child
)->authsize
= authsize
;
823 static int rfc4106_encrypt(struct aead_request
*req
)
826 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
827 struct aesni_rfc4106_gcm_ctx
*ctx
= aesni_rfc4106_gcm_ctx_get(tfm
);
829 if (!irq_fpu_usable()) {
830 struct aead_request
*cryptd_req
=
831 (struct aead_request
*) aead_request_ctx(req
);
832 memcpy(cryptd_req
, req
, sizeof(*req
));
833 aead_request_set_tfm(cryptd_req
, &ctx
->cryptd_tfm
->base
);
834 return crypto_aead_encrypt(cryptd_req
);
836 struct crypto_aead
*cryptd_child
= cryptd_aead_child(ctx
->cryptd_tfm
);
838 ret
= cryptd_child
->base
.crt_aead
.encrypt(req
);
844 static int rfc4106_decrypt(struct aead_request
*req
)
847 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
848 struct aesni_rfc4106_gcm_ctx
*ctx
= aesni_rfc4106_gcm_ctx_get(tfm
);
850 if (!irq_fpu_usable()) {
851 struct aead_request
*cryptd_req
=
852 (struct aead_request
*) aead_request_ctx(req
);
853 memcpy(cryptd_req
, req
, sizeof(*req
));
854 aead_request_set_tfm(cryptd_req
, &ctx
->cryptd_tfm
->base
);
855 return crypto_aead_decrypt(cryptd_req
);
857 struct crypto_aead
*cryptd_child
= cryptd_aead_child(ctx
->cryptd_tfm
);
859 ret
= cryptd_child
->base
.crt_aead
.decrypt(req
);
865 static int __driver_rfc4106_encrypt(struct aead_request
*req
)
867 u8 one_entry_in_sg
= 0;
868 u8
*src
, *dst
, *assoc
;
869 __be32 counter
= cpu_to_be32(1);
870 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
871 struct aesni_rfc4106_gcm_ctx
*ctx
= aesni_rfc4106_gcm_ctx_get(tfm
);
872 void *aes_ctx
= &(ctx
->aes_key_expanded
);
873 unsigned long auth_tag_len
= crypto_aead_authsize(tfm
);
874 u8 iv_tab
[16+AESNI_ALIGN
];
875 u8
* iv
= (u8
*) PTR_ALIGN((u8
*)iv_tab
, AESNI_ALIGN
);
876 struct scatter_walk src_sg_walk
;
877 struct scatter_walk assoc_sg_walk
;
878 struct scatter_walk dst_sg_walk
;
881 /* Assuming we are supporting rfc4106 64-bit extended */
882 /* sequence numbers We need to have the AAD length equal */
883 /* to 8 or 12 bytes */
884 if (unlikely(req
->assoclen
!= 8 && req
->assoclen
!= 12))
887 for (i
= 0; i
< 4; i
++)
888 *(iv
+i
) = ctx
->nonce
[i
];
889 for (i
= 0; i
< 8; i
++)
890 *(iv
+4+i
) = req
->iv
[i
];
891 *((__be32
*)(iv
+12)) = counter
;
893 if ((sg_is_last(req
->src
)) && (sg_is_last(req
->assoc
))) {
895 scatterwalk_start(&src_sg_walk
, req
->src
);
896 scatterwalk_start(&assoc_sg_walk
, req
->assoc
);
897 src
= scatterwalk_map(&src_sg_walk
);
898 assoc
= scatterwalk_map(&assoc_sg_walk
);
900 if (unlikely(req
->src
!= req
->dst
)) {
901 scatterwalk_start(&dst_sg_walk
, req
->dst
);
902 dst
= scatterwalk_map(&dst_sg_walk
);
906 /* Allocate memory for src, dst, assoc */
907 src
= kmalloc(req
->cryptlen
+ auth_tag_len
+ req
->assoclen
,
911 assoc
= (src
+ req
->cryptlen
+ auth_tag_len
);
912 scatterwalk_map_and_copy(src
, req
->src
, 0, req
->cryptlen
, 0);
913 scatterwalk_map_and_copy(assoc
, req
->assoc
, 0,
918 aesni_gcm_enc(aes_ctx
, dst
, src
, (unsigned long)req
->cryptlen
, iv
,
919 ctx
->hash_subkey
, assoc
, (unsigned long)req
->assoclen
, dst
920 + ((unsigned long)req
->cryptlen
), auth_tag_len
);
922 /* The authTag (aka the Integrity Check Value) needs to be written
923 * back to the packet. */
924 if (one_entry_in_sg
) {
925 if (unlikely(req
->src
!= req
->dst
)) {
926 scatterwalk_unmap(dst
);
927 scatterwalk_done(&dst_sg_walk
, 0, 0);
929 scatterwalk_unmap(src
);
930 scatterwalk_unmap(assoc
);
931 scatterwalk_done(&src_sg_walk
, 0, 0);
932 scatterwalk_done(&assoc_sg_walk
, 0, 0);
934 scatterwalk_map_and_copy(dst
, req
->dst
, 0,
935 req
->cryptlen
+ auth_tag_len
, 1);
941 static int __driver_rfc4106_decrypt(struct aead_request
*req
)
943 u8 one_entry_in_sg
= 0;
944 u8
*src
, *dst
, *assoc
;
945 unsigned long tempCipherLen
= 0;
946 __be32 counter
= cpu_to_be32(1);
948 struct crypto_aead
*tfm
= crypto_aead_reqtfm(req
);
949 struct aesni_rfc4106_gcm_ctx
*ctx
= aesni_rfc4106_gcm_ctx_get(tfm
);
950 void *aes_ctx
= &(ctx
->aes_key_expanded
);
951 unsigned long auth_tag_len
= crypto_aead_authsize(tfm
);
952 u8 iv_and_authTag
[32+AESNI_ALIGN
];
953 u8
*iv
= (u8
*) PTR_ALIGN((u8
*)iv_and_authTag
, AESNI_ALIGN
);
954 u8
*authTag
= iv
+ 16;
955 struct scatter_walk src_sg_walk
;
956 struct scatter_walk assoc_sg_walk
;
957 struct scatter_walk dst_sg_walk
;
960 if (unlikely((req
->cryptlen
< auth_tag_len
) ||
961 (req
->assoclen
!= 8 && req
->assoclen
!= 12)))
963 /* Assuming we are supporting rfc4106 64-bit extended */
964 /* sequence numbers We need to have the AAD length */
965 /* equal to 8 or 12 bytes */
967 tempCipherLen
= (unsigned long)(req
->cryptlen
- auth_tag_len
);
969 for (i
= 0; i
< 4; i
++)
970 *(iv
+i
) = ctx
->nonce
[i
];
971 for (i
= 0; i
< 8; i
++)
972 *(iv
+4+i
) = req
->iv
[i
];
973 *((__be32
*)(iv
+12)) = counter
;
975 if ((sg_is_last(req
->src
)) && (sg_is_last(req
->assoc
))) {
977 scatterwalk_start(&src_sg_walk
, req
->src
);
978 scatterwalk_start(&assoc_sg_walk
, req
->assoc
);
979 src
= scatterwalk_map(&src_sg_walk
);
980 assoc
= scatterwalk_map(&assoc_sg_walk
);
982 if (unlikely(req
->src
!= req
->dst
)) {
983 scatterwalk_start(&dst_sg_walk
, req
->dst
);
984 dst
= scatterwalk_map(&dst_sg_walk
);
988 /* Allocate memory for src, dst, assoc */
989 src
= kmalloc(req
->cryptlen
+ req
->assoclen
, GFP_ATOMIC
);
992 assoc
= (src
+ req
->cryptlen
+ auth_tag_len
);
993 scatterwalk_map_and_copy(src
, req
->src
, 0, req
->cryptlen
, 0);
994 scatterwalk_map_and_copy(assoc
, req
->assoc
, 0,
999 aesni_gcm_dec(aes_ctx
, dst
, src
, tempCipherLen
, iv
,
1000 ctx
->hash_subkey
, assoc
, (unsigned long)req
->assoclen
,
1001 authTag
, auth_tag_len
);
1003 /* Compare generated tag with passed in tag. */
1004 retval
= memcmp(src
+ tempCipherLen
, authTag
, auth_tag_len
) ?
1007 if (one_entry_in_sg
) {
1008 if (unlikely(req
->src
!= req
->dst
)) {
1009 scatterwalk_unmap(dst
);
1010 scatterwalk_done(&dst_sg_walk
, 0, 0);
1012 scatterwalk_unmap(src
);
1013 scatterwalk_unmap(assoc
);
1014 scatterwalk_done(&src_sg_walk
, 0, 0);
1015 scatterwalk_done(&assoc_sg_walk
, 0, 0);
1017 scatterwalk_map_and_copy(dst
, req
->dst
, 0, req
->cryptlen
, 1);
1024 static struct crypto_alg aesni_algs
[] = { {
1026 .cra_driver_name
= "aes-aesni",
1027 .cra_priority
= 300,
1028 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
,
1029 .cra_blocksize
= AES_BLOCK_SIZE
,
1030 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
) +
1033 .cra_module
= THIS_MODULE
,
1036 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
1037 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
1038 .cia_setkey
= aes_set_key
,
1039 .cia_encrypt
= aes_encrypt
,
1040 .cia_decrypt
= aes_decrypt
1044 .cra_name
= "__aes-aesni",
1045 .cra_driver_name
= "__driver-aes-aesni",
1047 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
,
1048 .cra_blocksize
= AES_BLOCK_SIZE
,
1049 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
) +
1052 .cra_module
= THIS_MODULE
,
1055 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
1056 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
1057 .cia_setkey
= aes_set_key
,
1058 .cia_encrypt
= __aes_encrypt
,
1059 .cia_decrypt
= __aes_decrypt
1063 .cra_name
= "__ecb-aes-aesni",
1064 .cra_driver_name
= "__driver-ecb-aes-aesni",
1066 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
1067 .cra_blocksize
= AES_BLOCK_SIZE
,
1068 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
) +
1071 .cra_type
= &crypto_blkcipher_type
,
1072 .cra_module
= THIS_MODULE
,
1075 .min_keysize
= AES_MIN_KEY_SIZE
,
1076 .max_keysize
= AES_MAX_KEY_SIZE
,
1077 .setkey
= aes_set_key
,
1078 .encrypt
= ecb_encrypt
,
1079 .decrypt
= ecb_decrypt
,
1083 .cra_name
= "__cbc-aes-aesni",
1084 .cra_driver_name
= "__driver-cbc-aes-aesni",
1086 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
1087 .cra_blocksize
= AES_BLOCK_SIZE
,
1088 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
) +
1091 .cra_type
= &crypto_blkcipher_type
,
1092 .cra_module
= THIS_MODULE
,
1095 .min_keysize
= AES_MIN_KEY_SIZE
,
1096 .max_keysize
= AES_MAX_KEY_SIZE
,
1097 .setkey
= aes_set_key
,
1098 .encrypt
= cbc_encrypt
,
1099 .decrypt
= cbc_decrypt
,
1103 .cra_name
= "ecb(aes)",
1104 .cra_driver_name
= "ecb-aes-aesni",
1105 .cra_priority
= 400,
1106 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1107 .cra_blocksize
= AES_BLOCK_SIZE
,
1108 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
1110 .cra_type
= &crypto_ablkcipher_type
,
1111 .cra_module
= THIS_MODULE
,
1112 .cra_init
= ablk_ecb_init
,
1113 .cra_exit
= ablk_exit
,
1116 .min_keysize
= AES_MIN_KEY_SIZE
,
1117 .max_keysize
= AES_MAX_KEY_SIZE
,
1118 .setkey
= ablk_set_key
,
1119 .encrypt
= ablk_encrypt
,
1120 .decrypt
= ablk_decrypt
,
1124 .cra_name
= "cbc(aes)",
1125 .cra_driver_name
= "cbc-aes-aesni",
1126 .cra_priority
= 400,
1127 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1128 .cra_blocksize
= AES_BLOCK_SIZE
,
1129 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
1131 .cra_type
= &crypto_ablkcipher_type
,
1132 .cra_module
= THIS_MODULE
,
1133 .cra_init
= ablk_cbc_init
,
1134 .cra_exit
= ablk_exit
,
1137 .min_keysize
= AES_MIN_KEY_SIZE
,
1138 .max_keysize
= AES_MAX_KEY_SIZE
,
1139 .ivsize
= AES_BLOCK_SIZE
,
1140 .setkey
= ablk_set_key
,
1141 .encrypt
= ablk_encrypt
,
1142 .decrypt
= ablk_decrypt
,
1145 #ifdef CONFIG_X86_64
1147 .cra_name
= "__ctr-aes-aesni",
1148 .cra_driver_name
= "__driver-ctr-aes-aesni",
1150 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
1152 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
) +
1155 .cra_type
= &crypto_blkcipher_type
,
1156 .cra_module
= THIS_MODULE
,
1159 .min_keysize
= AES_MIN_KEY_SIZE
,
1160 .max_keysize
= AES_MAX_KEY_SIZE
,
1161 .ivsize
= AES_BLOCK_SIZE
,
1162 .setkey
= aes_set_key
,
1163 .encrypt
= ctr_crypt
,
1164 .decrypt
= ctr_crypt
,
1168 .cra_name
= "ctr(aes)",
1169 .cra_driver_name
= "ctr-aes-aesni",
1170 .cra_priority
= 400,
1171 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1173 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
1175 .cra_type
= &crypto_ablkcipher_type
,
1176 .cra_module
= THIS_MODULE
,
1177 .cra_init
= ablk_ctr_init
,
1178 .cra_exit
= ablk_exit
,
1181 .min_keysize
= AES_MIN_KEY_SIZE
,
1182 .max_keysize
= AES_MAX_KEY_SIZE
,
1183 .ivsize
= AES_BLOCK_SIZE
,
1184 .setkey
= ablk_set_key
,
1185 .encrypt
= ablk_encrypt
,
1186 .decrypt
= ablk_encrypt
,
1191 .cra_name
= "__gcm-aes-aesni",
1192 .cra_driver_name
= "__driver-gcm-aes-aesni",
1194 .cra_flags
= CRYPTO_ALG_TYPE_AEAD
,
1196 .cra_ctxsize
= sizeof(struct aesni_rfc4106_gcm_ctx
) +
1199 .cra_type
= &crypto_aead_type
,
1200 .cra_module
= THIS_MODULE
,
1203 .encrypt
= __driver_rfc4106_encrypt
,
1204 .decrypt
= __driver_rfc4106_decrypt
,
1208 .cra_name
= "rfc4106(gcm(aes))",
1209 .cra_driver_name
= "rfc4106-gcm-aesni",
1210 .cra_priority
= 400,
1211 .cra_flags
= CRYPTO_ALG_TYPE_AEAD
| CRYPTO_ALG_ASYNC
,
1213 .cra_ctxsize
= sizeof(struct aesni_rfc4106_gcm_ctx
) +
1216 .cra_type
= &crypto_nivaead_type
,
1217 .cra_module
= THIS_MODULE
,
1218 .cra_init
= rfc4106_init
,
1219 .cra_exit
= rfc4106_exit
,
1222 .setkey
= rfc4106_set_key
,
1223 .setauthsize
= rfc4106_set_authsize
,
1224 .encrypt
= rfc4106_encrypt
,
1225 .decrypt
= rfc4106_decrypt
,
1234 .cra_name
= "pcbc(aes)",
1235 .cra_driver_name
= "pcbc-aes-aesni",
1236 .cra_priority
= 400,
1237 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1238 .cra_blocksize
= AES_BLOCK_SIZE
,
1239 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
1241 .cra_type
= &crypto_ablkcipher_type
,
1242 .cra_module
= THIS_MODULE
,
1243 .cra_init
= ablk_pcbc_init
,
1244 .cra_exit
= ablk_exit
,
1247 .min_keysize
= AES_MIN_KEY_SIZE
,
1248 .max_keysize
= AES_MAX_KEY_SIZE
,
1249 .ivsize
= AES_BLOCK_SIZE
,
1250 .setkey
= ablk_set_key
,
1251 .encrypt
= ablk_encrypt
,
1252 .decrypt
= ablk_decrypt
,
1257 .cra_name
= "__lrw-aes-aesni",
1258 .cra_driver_name
= "__driver-lrw-aes-aesni",
1260 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
1261 .cra_blocksize
= AES_BLOCK_SIZE
,
1262 .cra_ctxsize
= sizeof(struct aesni_lrw_ctx
),
1264 .cra_type
= &crypto_blkcipher_type
,
1265 .cra_module
= THIS_MODULE
,
1266 .cra_exit
= lrw_aesni_exit_tfm
,
1269 .min_keysize
= AES_MIN_KEY_SIZE
+ AES_BLOCK_SIZE
,
1270 .max_keysize
= AES_MAX_KEY_SIZE
+ AES_BLOCK_SIZE
,
1271 .ivsize
= AES_BLOCK_SIZE
,
1272 .setkey
= lrw_aesni_setkey
,
1273 .encrypt
= lrw_encrypt
,
1274 .decrypt
= lrw_decrypt
,
1278 .cra_name
= "__xts-aes-aesni",
1279 .cra_driver_name
= "__driver-xts-aes-aesni",
1281 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
1282 .cra_blocksize
= AES_BLOCK_SIZE
,
1283 .cra_ctxsize
= sizeof(struct aesni_xts_ctx
),
1285 .cra_type
= &crypto_blkcipher_type
,
1286 .cra_module
= THIS_MODULE
,
1289 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
1290 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
1291 .ivsize
= AES_BLOCK_SIZE
,
1292 .setkey
= xts_aesni_setkey
,
1293 .encrypt
= xts_encrypt
,
1294 .decrypt
= xts_decrypt
,
1298 .cra_name
= "lrw(aes)",
1299 .cra_driver_name
= "lrw-aes-aesni",
1300 .cra_priority
= 400,
1301 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1302 .cra_blocksize
= AES_BLOCK_SIZE
,
1303 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
1305 .cra_type
= &crypto_ablkcipher_type
,
1306 .cra_module
= THIS_MODULE
,
1307 .cra_init
= ablk_init
,
1308 .cra_exit
= ablk_exit
,
1311 .min_keysize
= AES_MIN_KEY_SIZE
+ AES_BLOCK_SIZE
,
1312 .max_keysize
= AES_MAX_KEY_SIZE
+ AES_BLOCK_SIZE
,
1313 .ivsize
= AES_BLOCK_SIZE
,
1314 .setkey
= ablk_set_key
,
1315 .encrypt
= ablk_encrypt
,
1316 .decrypt
= ablk_decrypt
,
1320 .cra_name
= "xts(aes)",
1321 .cra_driver_name
= "xts-aes-aesni",
1322 .cra_priority
= 400,
1323 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
1324 .cra_blocksize
= AES_BLOCK_SIZE
,
1325 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
1327 .cra_type
= &crypto_ablkcipher_type
,
1328 .cra_module
= THIS_MODULE
,
1329 .cra_init
= ablk_init
,
1330 .cra_exit
= ablk_exit
,
1333 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
1334 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
1335 .ivsize
= AES_BLOCK_SIZE
,
1336 .setkey
= ablk_set_key
,
1337 .encrypt
= ablk_encrypt
,
1338 .decrypt
= ablk_decrypt
,
1344 static const struct x86_cpu_id aesni_cpu_id
[] = {
1345 X86_FEATURE_MATCH(X86_FEATURE_AES
),
1348 MODULE_DEVICE_TABLE(x86cpu
, aesni_cpu_id
);
1350 static int __init
aesni_init(void)
1354 if (!x86_match_cpu(aesni_cpu_id
))
1357 err
= crypto_fpu_init();
1361 return crypto_register_algs(aesni_algs
, ARRAY_SIZE(aesni_algs
));
1364 static void __exit
aesni_exit(void)
1366 crypto_unregister_algs(aesni_algs
, ARRAY_SIZE(aesni_algs
));
1371 module_init(aesni_init
);
1372 module_exit(aesni_exit
);
1374 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, Intel AES-NI instructions optimized");
1375 MODULE_LICENSE("GPL");
1376 MODULE_ALIAS("aes");