2 * Support for Intel AES-NI instructions. This file contains glue
3 * code, the real AES implementation is in intel-aes_asm.S.
5 * Copyright (C) 2008, Intel Corp.
6 * Author: Huang Ying <ying.huang@intel.com>
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation; either version 2 of the License, or
11 * (at your option) any later version.
14 #include <linux/hardirq.h>
15 #include <linux/types.h>
16 #include <linux/crypto.h>
17 #include <linux/err.h>
18 #include <crypto/algapi.h>
19 #include <crypto/aes.h>
20 #include <crypto/cryptd.h>
21 #include <crypto/ctr.h>
25 #if defined(CONFIG_CRYPTO_CTR) || defined(CONFIG_CRYPTO_CTR_MODULE)
29 #if defined(CONFIG_CRYPTO_LRW) || defined(CONFIG_CRYPTO_LRW_MODULE)
33 #if defined(CONFIG_CRYPTO_PCBC) || defined(CONFIG_CRYPTO_PCBC_MODULE)
37 #if defined(CONFIG_CRYPTO_XTS) || defined(CONFIG_CRYPTO_XTS_MODULE)
41 struct async_aes_ctx
{
42 struct cryptd_ablkcipher
*cryptd_tfm
;
45 #define AESNI_ALIGN 16
46 #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1))
48 asmlinkage
int aesni_set_key(struct crypto_aes_ctx
*ctx
, const u8
*in_key
,
49 unsigned int key_len
);
50 asmlinkage
void aesni_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
52 asmlinkage
void aesni_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
54 asmlinkage
void aesni_ecb_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
55 const u8
*in
, unsigned int len
);
56 asmlinkage
void aesni_ecb_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
57 const u8
*in
, unsigned int len
);
58 asmlinkage
void aesni_cbc_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
59 const u8
*in
, unsigned int len
, u8
*iv
);
60 asmlinkage
void aesni_cbc_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
61 const u8
*in
, unsigned int len
, u8
*iv
);
62 asmlinkage
void aesni_ctr_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
63 const u8
*in
, unsigned int len
, u8
*iv
);
65 static inline struct crypto_aes_ctx
*aes_ctx(void *raw_ctx
)
67 unsigned long addr
= (unsigned long)raw_ctx
;
68 unsigned long align
= AESNI_ALIGN
;
70 if (align
<= crypto_tfm_ctx_alignment())
72 return (struct crypto_aes_ctx
*)ALIGN(addr
, align
);
75 static int aes_set_key_common(struct crypto_tfm
*tfm
, void *raw_ctx
,
76 const u8
*in_key
, unsigned int key_len
)
78 struct crypto_aes_ctx
*ctx
= aes_ctx(raw_ctx
);
79 u32
*flags
= &tfm
->crt_flags
;
82 if (key_len
!= AES_KEYSIZE_128
&& key_len
!= AES_KEYSIZE_192
&&
83 key_len
!= AES_KEYSIZE_256
) {
84 *flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
88 if (!irq_fpu_usable())
89 err
= crypto_aes_expand_key(ctx
, in_key
, key_len
);
92 err
= aesni_set_key(ctx
, in_key
, key_len
);
99 static int aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
100 unsigned int key_len
)
102 return aes_set_key_common(tfm
, crypto_tfm_ctx(tfm
), in_key
, key_len
);
105 static void aes_encrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
107 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
109 if (!irq_fpu_usable())
110 crypto_aes_encrypt_x86(ctx
, dst
, src
);
113 aesni_enc(ctx
, dst
, src
);
118 static void aes_decrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
120 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
122 if (!irq_fpu_usable())
123 crypto_aes_decrypt_x86(ctx
, dst
, src
);
126 aesni_dec(ctx
, dst
, src
);
131 static struct crypto_alg aesni_alg
= {
133 .cra_driver_name
= "aes-aesni",
135 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
,
136 .cra_blocksize
= AES_BLOCK_SIZE
,
137 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
)+AESNI_ALIGN
-1,
139 .cra_module
= THIS_MODULE
,
140 .cra_list
= LIST_HEAD_INIT(aesni_alg
.cra_list
),
143 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
144 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
145 .cia_setkey
= aes_set_key
,
146 .cia_encrypt
= aes_encrypt
,
147 .cia_decrypt
= aes_decrypt
152 static void __aes_encrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
154 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
156 aesni_enc(ctx
, dst
, src
);
159 static void __aes_decrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
161 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
163 aesni_dec(ctx
, dst
, src
);
166 static struct crypto_alg __aesni_alg
= {
167 .cra_name
= "__aes-aesni",
168 .cra_driver_name
= "__driver-aes-aesni",
170 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
,
171 .cra_blocksize
= AES_BLOCK_SIZE
,
172 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
)+AESNI_ALIGN
-1,
174 .cra_module
= THIS_MODULE
,
175 .cra_list
= LIST_HEAD_INIT(__aesni_alg
.cra_list
),
178 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
179 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
180 .cia_setkey
= aes_set_key
,
181 .cia_encrypt
= __aes_encrypt
,
182 .cia_decrypt
= __aes_decrypt
187 static int ecb_encrypt(struct blkcipher_desc
*desc
,
188 struct scatterlist
*dst
, struct scatterlist
*src
,
191 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
192 struct blkcipher_walk walk
;
195 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
196 err
= blkcipher_walk_virt(desc
, &walk
);
197 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
200 while ((nbytes
= walk
.nbytes
)) {
201 aesni_ecb_enc(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
202 nbytes
& AES_BLOCK_MASK
);
203 nbytes
&= AES_BLOCK_SIZE
- 1;
204 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
211 static int ecb_decrypt(struct blkcipher_desc
*desc
,
212 struct scatterlist
*dst
, struct scatterlist
*src
,
215 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
216 struct blkcipher_walk walk
;
219 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
220 err
= blkcipher_walk_virt(desc
, &walk
);
221 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
224 while ((nbytes
= walk
.nbytes
)) {
225 aesni_ecb_dec(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
226 nbytes
& AES_BLOCK_MASK
);
227 nbytes
&= AES_BLOCK_SIZE
- 1;
228 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
235 static struct crypto_alg blk_ecb_alg
= {
236 .cra_name
= "__ecb-aes-aesni",
237 .cra_driver_name
= "__driver-ecb-aes-aesni",
239 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
240 .cra_blocksize
= AES_BLOCK_SIZE
,
241 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
)+AESNI_ALIGN
-1,
243 .cra_type
= &crypto_blkcipher_type
,
244 .cra_module
= THIS_MODULE
,
245 .cra_list
= LIST_HEAD_INIT(blk_ecb_alg
.cra_list
),
248 .min_keysize
= AES_MIN_KEY_SIZE
,
249 .max_keysize
= AES_MAX_KEY_SIZE
,
250 .setkey
= aes_set_key
,
251 .encrypt
= ecb_encrypt
,
252 .decrypt
= ecb_decrypt
,
257 static int cbc_encrypt(struct blkcipher_desc
*desc
,
258 struct scatterlist
*dst
, struct scatterlist
*src
,
261 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
262 struct blkcipher_walk walk
;
265 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
266 err
= blkcipher_walk_virt(desc
, &walk
);
267 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
270 while ((nbytes
= walk
.nbytes
)) {
271 aesni_cbc_enc(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
272 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
273 nbytes
&= AES_BLOCK_SIZE
- 1;
274 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
281 static int cbc_decrypt(struct blkcipher_desc
*desc
,
282 struct scatterlist
*dst
, struct scatterlist
*src
,
285 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
286 struct blkcipher_walk walk
;
289 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
290 err
= blkcipher_walk_virt(desc
, &walk
);
291 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
294 while ((nbytes
= walk
.nbytes
)) {
295 aesni_cbc_dec(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
296 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
297 nbytes
&= AES_BLOCK_SIZE
- 1;
298 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
305 static struct crypto_alg blk_cbc_alg
= {
306 .cra_name
= "__cbc-aes-aesni",
307 .cra_driver_name
= "__driver-cbc-aes-aesni",
309 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
310 .cra_blocksize
= AES_BLOCK_SIZE
,
311 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
)+AESNI_ALIGN
-1,
313 .cra_type
= &crypto_blkcipher_type
,
314 .cra_module
= THIS_MODULE
,
315 .cra_list
= LIST_HEAD_INIT(blk_cbc_alg
.cra_list
),
318 .min_keysize
= AES_MIN_KEY_SIZE
,
319 .max_keysize
= AES_MAX_KEY_SIZE
,
320 .setkey
= aes_set_key
,
321 .encrypt
= cbc_encrypt
,
322 .decrypt
= cbc_decrypt
,
327 static void ctr_crypt_final(struct crypto_aes_ctx
*ctx
,
328 struct blkcipher_walk
*walk
)
330 u8
*ctrblk
= walk
->iv
;
331 u8 keystream
[AES_BLOCK_SIZE
];
332 u8
*src
= walk
->src
.virt
.addr
;
333 u8
*dst
= walk
->dst
.virt
.addr
;
334 unsigned int nbytes
= walk
->nbytes
;
336 aesni_enc(ctx
, keystream
, ctrblk
);
337 crypto_xor(keystream
, src
, nbytes
);
338 memcpy(dst
, keystream
, nbytes
);
339 crypto_inc(ctrblk
, AES_BLOCK_SIZE
);
342 static int ctr_crypt(struct blkcipher_desc
*desc
,
343 struct scatterlist
*dst
, struct scatterlist
*src
,
346 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
347 struct blkcipher_walk walk
;
350 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
351 err
= blkcipher_walk_virt_block(desc
, &walk
, AES_BLOCK_SIZE
);
352 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
355 while ((nbytes
= walk
.nbytes
) >= AES_BLOCK_SIZE
) {
356 aesni_ctr_enc(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
357 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
358 nbytes
&= AES_BLOCK_SIZE
- 1;
359 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
362 ctr_crypt_final(ctx
, &walk
);
363 err
= blkcipher_walk_done(desc
, &walk
, 0);
370 static struct crypto_alg blk_ctr_alg
= {
371 .cra_name
= "__ctr-aes-aesni",
372 .cra_driver_name
= "__driver-ctr-aes-aesni",
374 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
376 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
)+AESNI_ALIGN
-1,
378 .cra_type
= &crypto_blkcipher_type
,
379 .cra_module
= THIS_MODULE
,
380 .cra_list
= LIST_HEAD_INIT(blk_ctr_alg
.cra_list
),
383 .min_keysize
= AES_MIN_KEY_SIZE
,
384 .max_keysize
= AES_MAX_KEY_SIZE
,
385 .ivsize
= AES_BLOCK_SIZE
,
386 .setkey
= aes_set_key
,
387 .encrypt
= ctr_crypt
,
388 .decrypt
= ctr_crypt
,
393 static int ablk_set_key(struct crypto_ablkcipher
*tfm
, const u8
*key
,
394 unsigned int key_len
)
396 struct async_aes_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
397 struct crypto_ablkcipher
*child
= &ctx
->cryptd_tfm
->base
;
400 crypto_ablkcipher_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
401 crypto_ablkcipher_set_flags(child
, crypto_ablkcipher_get_flags(tfm
)
402 & CRYPTO_TFM_REQ_MASK
);
403 err
= crypto_ablkcipher_setkey(child
, key
, key_len
);
404 crypto_ablkcipher_set_flags(tfm
, crypto_ablkcipher_get_flags(child
)
405 & CRYPTO_TFM_RES_MASK
);
409 static int ablk_encrypt(struct ablkcipher_request
*req
)
411 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
412 struct async_aes_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
414 if (!irq_fpu_usable()) {
415 struct ablkcipher_request
*cryptd_req
=
416 ablkcipher_request_ctx(req
);
417 memcpy(cryptd_req
, req
, sizeof(*req
));
418 ablkcipher_request_set_tfm(cryptd_req
, &ctx
->cryptd_tfm
->base
);
419 return crypto_ablkcipher_encrypt(cryptd_req
);
421 struct blkcipher_desc desc
;
422 desc
.tfm
= cryptd_ablkcipher_child(ctx
->cryptd_tfm
);
423 desc
.info
= req
->info
;
425 return crypto_blkcipher_crt(desc
.tfm
)->encrypt(
426 &desc
, req
->dst
, req
->src
, req
->nbytes
);
430 static int ablk_decrypt(struct ablkcipher_request
*req
)
432 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
433 struct async_aes_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
435 if (!irq_fpu_usable()) {
436 struct ablkcipher_request
*cryptd_req
=
437 ablkcipher_request_ctx(req
);
438 memcpy(cryptd_req
, req
, sizeof(*req
));
439 ablkcipher_request_set_tfm(cryptd_req
, &ctx
->cryptd_tfm
->base
);
440 return crypto_ablkcipher_decrypt(cryptd_req
);
442 struct blkcipher_desc desc
;
443 desc
.tfm
= cryptd_ablkcipher_child(ctx
->cryptd_tfm
);
444 desc
.info
= req
->info
;
446 return crypto_blkcipher_crt(desc
.tfm
)->decrypt(
447 &desc
, req
->dst
, req
->src
, req
->nbytes
);
451 static void ablk_exit(struct crypto_tfm
*tfm
)
453 struct async_aes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
455 cryptd_free_ablkcipher(ctx
->cryptd_tfm
);
458 static void ablk_init_common(struct crypto_tfm
*tfm
,
459 struct cryptd_ablkcipher
*cryptd_tfm
)
461 struct async_aes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
463 ctx
->cryptd_tfm
= cryptd_tfm
;
464 tfm
->crt_ablkcipher
.reqsize
= sizeof(struct ablkcipher_request
) +
465 crypto_ablkcipher_reqsize(&cryptd_tfm
->base
);
468 static int ablk_ecb_init(struct crypto_tfm
*tfm
)
470 struct cryptd_ablkcipher
*cryptd_tfm
;
472 cryptd_tfm
= cryptd_alloc_ablkcipher("__driver-ecb-aes-aesni", 0, 0);
473 if (IS_ERR(cryptd_tfm
))
474 return PTR_ERR(cryptd_tfm
);
475 ablk_init_common(tfm
, cryptd_tfm
);
479 static struct crypto_alg ablk_ecb_alg
= {
480 .cra_name
= "ecb(aes)",
481 .cra_driver_name
= "ecb-aes-aesni",
483 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
484 .cra_blocksize
= AES_BLOCK_SIZE
,
485 .cra_ctxsize
= sizeof(struct async_aes_ctx
),
487 .cra_type
= &crypto_ablkcipher_type
,
488 .cra_module
= THIS_MODULE
,
489 .cra_list
= LIST_HEAD_INIT(ablk_ecb_alg
.cra_list
),
490 .cra_init
= ablk_ecb_init
,
491 .cra_exit
= ablk_exit
,
494 .min_keysize
= AES_MIN_KEY_SIZE
,
495 .max_keysize
= AES_MAX_KEY_SIZE
,
496 .setkey
= ablk_set_key
,
497 .encrypt
= ablk_encrypt
,
498 .decrypt
= ablk_decrypt
,
503 static int ablk_cbc_init(struct crypto_tfm
*tfm
)
505 struct cryptd_ablkcipher
*cryptd_tfm
;
507 cryptd_tfm
= cryptd_alloc_ablkcipher("__driver-cbc-aes-aesni", 0, 0);
508 if (IS_ERR(cryptd_tfm
))
509 return PTR_ERR(cryptd_tfm
);
510 ablk_init_common(tfm
, cryptd_tfm
);
514 static struct crypto_alg ablk_cbc_alg
= {
515 .cra_name
= "cbc(aes)",
516 .cra_driver_name
= "cbc-aes-aesni",
518 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
519 .cra_blocksize
= AES_BLOCK_SIZE
,
520 .cra_ctxsize
= sizeof(struct async_aes_ctx
),
522 .cra_type
= &crypto_ablkcipher_type
,
523 .cra_module
= THIS_MODULE
,
524 .cra_list
= LIST_HEAD_INIT(ablk_cbc_alg
.cra_list
),
525 .cra_init
= ablk_cbc_init
,
526 .cra_exit
= ablk_exit
,
529 .min_keysize
= AES_MIN_KEY_SIZE
,
530 .max_keysize
= AES_MAX_KEY_SIZE
,
531 .ivsize
= AES_BLOCK_SIZE
,
532 .setkey
= ablk_set_key
,
533 .encrypt
= ablk_encrypt
,
534 .decrypt
= ablk_decrypt
,
539 static int ablk_ctr_init(struct crypto_tfm
*tfm
)
541 struct cryptd_ablkcipher
*cryptd_tfm
;
543 cryptd_tfm
= cryptd_alloc_ablkcipher("__driver-ctr-aes-aesni", 0, 0);
544 if (IS_ERR(cryptd_tfm
))
545 return PTR_ERR(cryptd_tfm
);
546 ablk_init_common(tfm
, cryptd_tfm
);
550 static struct crypto_alg ablk_ctr_alg
= {
551 .cra_name
= "ctr(aes)",
552 .cra_driver_name
= "ctr-aes-aesni",
554 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
556 .cra_ctxsize
= sizeof(struct async_aes_ctx
),
558 .cra_type
= &crypto_ablkcipher_type
,
559 .cra_module
= THIS_MODULE
,
560 .cra_list
= LIST_HEAD_INIT(ablk_ctr_alg
.cra_list
),
561 .cra_init
= ablk_ctr_init
,
562 .cra_exit
= ablk_exit
,
565 .min_keysize
= AES_MIN_KEY_SIZE
,
566 .max_keysize
= AES_MAX_KEY_SIZE
,
567 .ivsize
= AES_BLOCK_SIZE
,
568 .setkey
= ablk_set_key
,
569 .encrypt
= ablk_encrypt
,
570 .decrypt
= ablk_encrypt
,
577 static int ablk_rfc3686_ctr_init(struct crypto_tfm
*tfm
)
579 struct cryptd_ablkcipher
*cryptd_tfm
;
581 cryptd_tfm
= cryptd_alloc_ablkcipher(
582 "rfc3686(__driver-ctr-aes-aesni)", 0, 0);
583 if (IS_ERR(cryptd_tfm
))
584 return PTR_ERR(cryptd_tfm
);
585 ablk_init_common(tfm
, cryptd_tfm
);
589 static struct crypto_alg ablk_rfc3686_ctr_alg
= {
590 .cra_name
= "rfc3686(ctr(aes))",
591 .cra_driver_name
= "rfc3686-ctr-aes-aesni",
593 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
595 .cra_ctxsize
= sizeof(struct async_aes_ctx
),
597 .cra_type
= &crypto_ablkcipher_type
,
598 .cra_module
= THIS_MODULE
,
599 .cra_list
= LIST_HEAD_INIT(ablk_rfc3686_ctr_alg
.cra_list
),
600 .cra_init
= ablk_rfc3686_ctr_init
,
601 .cra_exit
= ablk_exit
,
604 .min_keysize
= AES_MIN_KEY_SIZE
+CTR_RFC3686_NONCE_SIZE
,
605 .max_keysize
= AES_MAX_KEY_SIZE
+CTR_RFC3686_NONCE_SIZE
,
606 .ivsize
= CTR_RFC3686_IV_SIZE
,
607 .setkey
= ablk_set_key
,
608 .encrypt
= ablk_encrypt
,
609 .decrypt
= ablk_decrypt
,
617 static int ablk_lrw_init(struct crypto_tfm
*tfm
)
619 struct cryptd_ablkcipher
*cryptd_tfm
;
621 cryptd_tfm
= cryptd_alloc_ablkcipher("fpu(lrw(__driver-aes-aesni))",
623 if (IS_ERR(cryptd_tfm
))
624 return PTR_ERR(cryptd_tfm
);
625 ablk_init_common(tfm
, cryptd_tfm
);
629 static struct crypto_alg ablk_lrw_alg
= {
630 .cra_name
= "lrw(aes)",
631 .cra_driver_name
= "lrw-aes-aesni",
633 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
634 .cra_blocksize
= AES_BLOCK_SIZE
,
635 .cra_ctxsize
= sizeof(struct async_aes_ctx
),
637 .cra_type
= &crypto_ablkcipher_type
,
638 .cra_module
= THIS_MODULE
,
639 .cra_list
= LIST_HEAD_INIT(ablk_lrw_alg
.cra_list
),
640 .cra_init
= ablk_lrw_init
,
641 .cra_exit
= ablk_exit
,
644 .min_keysize
= AES_MIN_KEY_SIZE
+ AES_BLOCK_SIZE
,
645 .max_keysize
= AES_MAX_KEY_SIZE
+ AES_BLOCK_SIZE
,
646 .ivsize
= AES_BLOCK_SIZE
,
647 .setkey
= ablk_set_key
,
648 .encrypt
= ablk_encrypt
,
649 .decrypt
= ablk_decrypt
,
656 static int ablk_pcbc_init(struct crypto_tfm
*tfm
)
658 struct cryptd_ablkcipher
*cryptd_tfm
;
660 cryptd_tfm
= cryptd_alloc_ablkcipher("fpu(pcbc(__driver-aes-aesni))",
662 if (IS_ERR(cryptd_tfm
))
663 return PTR_ERR(cryptd_tfm
);
664 ablk_init_common(tfm
, cryptd_tfm
);
668 static struct crypto_alg ablk_pcbc_alg
= {
669 .cra_name
= "pcbc(aes)",
670 .cra_driver_name
= "pcbc-aes-aesni",
672 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
673 .cra_blocksize
= AES_BLOCK_SIZE
,
674 .cra_ctxsize
= sizeof(struct async_aes_ctx
),
676 .cra_type
= &crypto_ablkcipher_type
,
677 .cra_module
= THIS_MODULE
,
678 .cra_list
= LIST_HEAD_INIT(ablk_pcbc_alg
.cra_list
),
679 .cra_init
= ablk_pcbc_init
,
680 .cra_exit
= ablk_exit
,
683 .min_keysize
= AES_MIN_KEY_SIZE
,
684 .max_keysize
= AES_MAX_KEY_SIZE
,
685 .ivsize
= AES_BLOCK_SIZE
,
686 .setkey
= ablk_set_key
,
687 .encrypt
= ablk_encrypt
,
688 .decrypt
= ablk_decrypt
,
695 static int ablk_xts_init(struct crypto_tfm
*tfm
)
697 struct cryptd_ablkcipher
*cryptd_tfm
;
699 cryptd_tfm
= cryptd_alloc_ablkcipher("fpu(xts(__driver-aes-aesni))",
701 if (IS_ERR(cryptd_tfm
))
702 return PTR_ERR(cryptd_tfm
);
703 ablk_init_common(tfm
, cryptd_tfm
);
707 static struct crypto_alg ablk_xts_alg
= {
708 .cra_name
= "xts(aes)",
709 .cra_driver_name
= "xts-aes-aesni",
711 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
712 .cra_blocksize
= AES_BLOCK_SIZE
,
713 .cra_ctxsize
= sizeof(struct async_aes_ctx
),
715 .cra_type
= &crypto_ablkcipher_type
,
716 .cra_module
= THIS_MODULE
,
717 .cra_list
= LIST_HEAD_INIT(ablk_xts_alg
.cra_list
),
718 .cra_init
= ablk_xts_init
,
719 .cra_exit
= ablk_exit
,
722 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
723 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
724 .ivsize
= AES_BLOCK_SIZE
,
725 .setkey
= ablk_set_key
,
726 .encrypt
= ablk_encrypt
,
727 .decrypt
= ablk_decrypt
,
733 static int __init
aesni_init(void)
738 printk(KERN_INFO
"Intel AES-NI instructions are not detected.\n");
741 if ((err
= crypto_register_alg(&aesni_alg
)))
743 if ((err
= crypto_register_alg(&__aesni_alg
)))
745 if ((err
= crypto_register_alg(&blk_ecb_alg
)))
747 if ((err
= crypto_register_alg(&blk_cbc_alg
)))
749 if ((err
= crypto_register_alg(&blk_ctr_alg
)))
751 if ((err
= crypto_register_alg(&ablk_ecb_alg
)))
753 if ((err
= crypto_register_alg(&ablk_cbc_alg
)))
755 if ((err
= crypto_register_alg(&ablk_ctr_alg
)))
758 if ((err
= crypto_register_alg(&ablk_rfc3686_ctr_alg
)))
759 goto ablk_rfc3686_ctr_err
;
762 if ((err
= crypto_register_alg(&ablk_lrw_alg
)))
766 if ((err
= crypto_register_alg(&ablk_pcbc_alg
)))
770 if ((err
= crypto_register_alg(&ablk_xts_alg
)))
780 crypto_unregister_alg(&ablk_pcbc_alg
);
784 crypto_unregister_alg(&ablk_lrw_alg
);
788 crypto_unregister_alg(&ablk_rfc3686_ctr_alg
);
789 ablk_rfc3686_ctr_err
:
791 crypto_unregister_alg(&ablk_ctr_alg
);
793 crypto_unregister_alg(&ablk_cbc_alg
);
795 crypto_unregister_alg(&ablk_ecb_alg
);
797 crypto_unregister_alg(&blk_ctr_alg
);
799 crypto_unregister_alg(&blk_cbc_alg
);
801 crypto_unregister_alg(&blk_ecb_alg
);
803 crypto_unregister_alg(&__aesni_alg
);
805 crypto_unregister_alg(&aesni_alg
);
810 static void __exit
aesni_exit(void)
813 crypto_unregister_alg(&ablk_xts_alg
);
816 crypto_unregister_alg(&ablk_pcbc_alg
);
819 crypto_unregister_alg(&ablk_lrw_alg
);
822 crypto_unregister_alg(&ablk_rfc3686_ctr_alg
);
824 crypto_unregister_alg(&ablk_ctr_alg
);
825 crypto_unregister_alg(&ablk_cbc_alg
);
826 crypto_unregister_alg(&ablk_ecb_alg
);
827 crypto_unregister_alg(&blk_ctr_alg
);
828 crypto_unregister_alg(&blk_cbc_alg
);
829 crypto_unregister_alg(&blk_ecb_alg
);
830 crypto_unregister_alg(&__aesni_alg
);
831 crypto_unregister_alg(&aesni_alg
);
834 module_init(aesni_init
);
835 module_exit(aesni_exit
);
837 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, Intel AES-NI instructions optimized");
838 MODULE_LICENSE("GPL");