2 * Support for Intel AES-NI instructions. This file contains glue
3 * code, the real AES implementation is in intel-aes_asm.S.
5 * Copyright (C) 2008, Intel Corp.
6 * Author: Huang Ying <ying.huang@intel.com>
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation; either version 2 of the License, or
11 * (at your option) any later version.
14 #include <linux/hardirq.h>
15 #include <linux/types.h>
16 #include <linux/crypto.h>
17 #include <linux/err.h>
18 #include <crypto/algapi.h>
19 #include <crypto/aes.h>
20 #include <crypto/cryptd.h>
24 #if defined(CONFIG_CRYPTO_CTR) || defined(CONFIG_CRYPTO_CTR_MODULE)
28 #if defined(CONFIG_CRYPTO_LRW) || defined(CONFIG_CRYPTO_LRW_MODULE)
32 #if defined(CONFIG_CRYPTO_PCBC) || defined(CONFIG_CRYPTO_PCBC_MODULE)
36 #if defined(CONFIG_CRYPTO_XTS) || defined(CONFIG_CRYPTO_XTS_MODULE)
40 struct async_aes_ctx
{
41 struct cryptd_ablkcipher
*cryptd_tfm
;
44 #define AESNI_ALIGN 16
45 #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1))
47 asmlinkage
int aesni_set_key(struct crypto_aes_ctx
*ctx
, const u8
*in_key
,
48 unsigned int key_len
);
49 asmlinkage
void aesni_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
51 asmlinkage
void aesni_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
53 asmlinkage
void aesni_ecb_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
54 const u8
*in
, unsigned int len
);
55 asmlinkage
void aesni_ecb_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
56 const u8
*in
, unsigned int len
);
57 asmlinkage
void aesni_cbc_enc(struct crypto_aes_ctx
*ctx
, u8
*out
,
58 const u8
*in
, unsigned int len
, u8
*iv
);
59 asmlinkage
void aesni_cbc_dec(struct crypto_aes_ctx
*ctx
, u8
*out
,
60 const u8
*in
, unsigned int len
, u8
*iv
);
62 static inline int kernel_fpu_using(void)
64 if (in_interrupt() && !(read_cr0() & X86_CR0_TS
))
69 static inline struct crypto_aes_ctx
*aes_ctx(void *raw_ctx
)
71 unsigned long addr
= (unsigned long)raw_ctx
;
72 unsigned long align
= AESNI_ALIGN
;
74 if (align
<= crypto_tfm_ctx_alignment())
76 return (struct crypto_aes_ctx
*)ALIGN(addr
, align
);
79 static int aes_set_key_common(struct crypto_tfm
*tfm
, void *raw_ctx
,
80 const u8
*in_key
, unsigned int key_len
)
82 struct crypto_aes_ctx
*ctx
= aes_ctx(raw_ctx
);
83 u32
*flags
= &tfm
->crt_flags
;
86 if (key_len
!= AES_KEYSIZE_128
&& key_len
!= AES_KEYSIZE_192
&&
87 key_len
!= AES_KEYSIZE_256
) {
88 *flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
92 if (kernel_fpu_using())
93 err
= crypto_aes_expand_key(ctx
, in_key
, key_len
);
96 err
= aesni_set_key(ctx
, in_key
, key_len
);
103 static int aes_set_key(struct crypto_tfm
*tfm
, const u8
*in_key
,
104 unsigned int key_len
)
106 return aes_set_key_common(tfm
, crypto_tfm_ctx(tfm
), in_key
, key_len
);
109 static void aes_encrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
111 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
113 if (kernel_fpu_using())
114 crypto_aes_encrypt_x86(ctx
, dst
, src
);
117 aesni_enc(ctx
, dst
, src
);
122 static void aes_decrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
124 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
126 if (kernel_fpu_using())
127 crypto_aes_decrypt_x86(ctx
, dst
, src
);
130 aesni_dec(ctx
, dst
, src
);
135 static struct crypto_alg aesni_alg
= {
137 .cra_driver_name
= "aes-aesni",
139 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
,
140 .cra_blocksize
= AES_BLOCK_SIZE
,
141 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
)+AESNI_ALIGN
-1,
143 .cra_module
= THIS_MODULE
,
144 .cra_list
= LIST_HEAD_INIT(aesni_alg
.cra_list
),
147 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
148 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
149 .cia_setkey
= aes_set_key
,
150 .cia_encrypt
= aes_encrypt
,
151 .cia_decrypt
= aes_decrypt
156 static void __aes_encrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
158 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
160 aesni_enc(ctx
, dst
, src
);
163 static void __aes_decrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
165 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_tfm_ctx(tfm
));
167 aesni_dec(ctx
, dst
, src
);
170 static struct crypto_alg __aesni_alg
= {
171 .cra_name
= "__aes-aesni",
172 .cra_driver_name
= "__driver-aes-aesni",
174 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
,
175 .cra_blocksize
= AES_BLOCK_SIZE
,
176 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
)+AESNI_ALIGN
-1,
178 .cra_module
= THIS_MODULE
,
179 .cra_list
= LIST_HEAD_INIT(__aesni_alg
.cra_list
),
182 .cia_min_keysize
= AES_MIN_KEY_SIZE
,
183 .cia_max_keysize
= AES_MAX_KEY_SIZE
,
184 .cia_setkey
= aes_set_key
,
185 .cia_encrypt
= __aes_encrypt
,
186 .cia_decrypt
= __aes_decrypt
191 static int ecb_encrypt(struct blkcipher_desc
*desc
,
192 struct scatterlist
*dst
, struct scatterlist
*src
,
195 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
196 struct blkcipher_walk walk
;
199 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
200 err
= blkcipher_walk_virt(desc
, &walk
);
201 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
204 while ((nbytes
= walk
.nbytes
)) {
205 aesni_ecb_enc(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
206 nbytes
& AES_BLOCK_MASK
);
207 nbytes
&= AES_BLOCK_SIZE
- 1;
208 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
215 static int ecb_decrypt(struct blkcipher_desc
*desc
,
216 struct scatterlist
*dst
, struct scatterlist
*src
,
219 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
220 struct blkcipher_walk walk
;
223 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
224 err
= blkcipher_walk_virt(desc
, &walk
);
225 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
228 while ((nbytes
= walk
.nbytes
)) {
229 aesni_ecb_dec(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
230 nbytes
& AES_BLOCK_MASK
);
231 nbytes
&= AES_BLOCK_SIZE
- 1;
232 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
239 static struct crypto_alg blk_ecb_alg
= {
240 .cra_name
= "__ecb-aes-aesni",
241 .cra_driver_name
= "__driver-ecb-aes-aesni",
243 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
244 .cra_blocksize
= AES_BLOCK_SIZE
,
245 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
)+AESNI_ALIGN
-1,
247 .cra_type
= &crypto_blkcipher_type
,
248 .cra_module
= THIS_MODULE
,
249 .cra_list
= LIST_HEAD_INIT(blk_ecb_alg
.cra_list
),
252 .min_keysize
= AES_MIN_KEY_SIZE
,
253 .max_keysize
= AES_MAX_KEY_SIZE
,
254 .setkey
= aes_set_key
,
255 .encrypt
= ecb_encrypt
,
256 .decrypt
= ecb_decrypt
,
261 static int cbc_encrypt(struct blkcipher_desc
*desc
,
262 struct scatterlist
*dst
, struct scatterlist
*src
,
265 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
266 struct blkcipher_walk walk
;
269 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
270 err
= blkcipher_walk_virt(desc
, &walk
);
271 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
274 while ((nbytes
= walk
.nbytes
)) {
275 aesni_cbc_enc(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
276 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
277 nbytes
&= AES_BLOCK_SIZE
- 1;
278 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
285 static int cbc_decrypt(struct blkcipher_desc
*desc
,
286 struct scatterlist
*dst
, struct scatterlist
*src
,
289 struct crypto_aes_ctx
*ctx
= aes_ctx(crypto_blkcipher_ctx(desc
->tfm
));
290 struct blkcipher_walk walk
;
293 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
294 err
= blkcipher_walk_virt(desc
, &walk
);
295 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
298 while ((nbytes
= walk
.nbytes
)) {
299 aesni_cbc_dec(ctx
, walk
.dst
.virt
.addr
, walk
.src
.virt
.addr
,
300 nbytes
& AES_BLOCK_MASK
, walk
.iv
);
301 nbytes
&= AES_BLOCK_SIZE
- 1;
302 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
309 static struct crypto_alg blk_cbc_alg
= {
310 .cra_name
= "__cbc-aes-aesni",
311 .cra_driver_name
= "__driver-cbc-aes-aesni",
313 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
314 .cra_blocksize
= AES_BLOCK_SIZE
,
315 .cra_ctxsize
= sizeof(struct crypto_aes_ctx
)+AESNI_ALIGN
-1,
317 .cra_type
= &crypto_blkcipher_type
,
318 .cra_module
= THIS_MODULE
,
319 .cra_list
= LIST_HEAD_INIT(blk_cbc_alg
.cra_list
),
322 .min_keysize
= AES_MIN_KEY_SIZE
,
323 .max_keysize
= AES_MAX_KEY_SIZE
,
324 .setkey
= aes_set_key
,
325 .encrypt
= cbc_encrypt
,
326 .decrypt
= cbc_decrypt
,
331 static int ablk_set_key(struct crypto_ablkcipher
*tfm
, const u8
*key
,
332 unsigned int key_len
)
334 struct async_aes_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
335 struct crypto_ablkcipher
*child
= &ctx
->cryptd_tfm
->base
;
338 crypto_ablkcipher_clear_flags(child
, CRYPTO_TFM_REQ_MASK
);
339 crypto_ablkcipher_set_flags(child
, crypto_ablkcipher_get_flags(tfm
)
340 & CRYPTO_TFM_REQ_MASK
);
341 err
= crypto_ablkcipher_setkey(child
, key
, key_len
);
342 crypto_ablkcipher_set_flags(tfm
, crypto_ablkcipher_get_flags(child
)
343 & CRYPTO_TFM_RES_MASK
);
347 static int ablk_encrypt(struct ablkcipher_request
*req
)
349 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
350 struct async_aes_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
352 if (kernel_fpu_using()) {
353 struct ablkcipher_request
*cryptd_req
=
354 ablkcipher_request_ctx(req
);
355 memcpy(cryptd_req
, req
, sizeof(*req
));
356 ablkcipher_request_set_tfm(cryptd_req
, &ctx
->cryptd_tfm
->base
);
357 return crypto_ablkcipher_encrypt(cryptd_req
);
359 struct blkcipher_desc desc
;
360 desc
.tfm
= cryptd_ablkcipher_child(ctx
->cryptd_tfm
);
361 desc
.info
= req
->info
;
363 return crypto_blkcipher_crt(desc
.tfm
)->encrypt(
364 &desc
, req
->dst
, req
->src
, req
->nbytes
);
368 static int ablk_decrypt(struct ablkcipher_request
*req
)
370 struct crypto_ablkcipher
*tfm
= crypto_ablkcipher_reqtfm(req
);
371 struct async_aes_ctx
*ctx
= crypto_ablkcipher_ctx(tfm
);
373 if (kernel_fpu_using()) {
374 struct ablkcipher_request
*cryptd_req
=
375 ablkcipher_request_ctx(req
);
376 memcpy(cryptd_req
, req
, sizeof(*req
));
377 ablkcipher_request_set_tfm(cryptd_req
, &ctx
->cryptd_tfm
->base
);
378 return crypto_ablkcipher_decrypt(cryptd_req
);
380 struct blkcipher_desc desc
;
381 desc
.tfm
= cryptd_ablkcipher_child(ctx
->cryptd_tfm
);
382 desc
.info
= req
->info
;
384 return crypto_blkcipher_crt(desc
.tfm
)->decrypt(
385 &desc
, req
->dst
, req
->src
, req
->nbytes
);
389 static void ablk_exit(struct crypto_tfm
*tfm
)
391 struct async_aes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
393 cryptd_free_ablkcipher(ctx
->cryptd_tfm
);
396 static void ablk_init_common(struct crypto_tfm
*tfm
,
397 struct cryptd_ablkcipher
*cryptd_tfm
)
399 struct async_aes_ctx
*ctx
= crypto_tfm_ctx(tfm
);
401 ctx
->cryptd_tfm
= cryptd_tfm
;
402 tfm
->crt_ablkcipher
.reqsize
= sizeof(struct ablkcipher_request
) +
403 crypto_ablkcipher_reqsize(&cryptd_tfm
->base
);
406 static int ablk_ecb_init(struct crypto_tfm
*tfm
)
408 struct cryptd_ablkcipher
*cryptd_tfm
;
410 cryptd_tfm
= cryptd_alloc_ablkcipher("__driver-ecb-aes-aesni", 0, 0);
411 if (IS_ERR(cryptd_tfm
))
412 return PTR_ERR(cryptd_tfm
);
413 ablk_init_common(tfm
, cryptd_tfm
);
417 static struct crypto_alg ablk_ecb_alg
= {
418 .cra_name
= "ecb(aes)",
419 .cra_driver_name
= "ecb-aes-aesni",
421 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
422 .cra_blocksize
= AES_BLOCK_SIZE
,
423 .cra_ctxsize
= sizeof(struct async_aes_ctx
),
425 .cra_type
= &crypto_ablkcipher_type
,
426 .cra_module
= THIS_MODULE
,
427 .cra_list
= LIST_HEAD_INIT(ablk_ecb_alg
.cra_list
),
428 .cra_init
= ablk_ecb_init
,
429 .cra_exit
= ablk_exit
,
432 .min_keysize
= AES_MIN_KEY_SIZE
,
433 .max_keysize
= AES_MAX_KEY_SIZE
,
434 .setkey
= ablk_set_key
,
435 .encrypt
= ablk_encrypt
,
436 .decrypt
= ablk_decrypt
,
441 static int ablk_cbc_init(struct crypto_tfm
*tfm
)
443 struct cryptd_ablkcipher
*cryptd_tfm
;
445 cryptd_tfm
= cryptd_alloc_ablkcipher("__driver-cbc-aes-aesni", 0, 0);
446 if (IS_ERR(cryptd_tfm
))
447 return PTR_ERR(cryptd_tfm
);
448 ablk_init_common(tfm
, cryptd_tfm
);
452 static struct crypto_alg ablk_cbc_alg
= {
453 .cra_name
= "cbc(aes)",
454 .cra_driver_name
= "cbc-aes-aesni",
456 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
457 .cra_blocksize
= AES_BLOCK_SIZE
,
458 .cra_ctxsize
= sizeof(struct async_aes_ctx
),
460 .cra_type
= &crypto_ablkcipher_type
,
461 .cra_module
= THIS_MODULE
,
462 .cra_list
= LIST_HEAD_INIT(ablk_cbc_alg
.cra_list
),
463 .cra_init
= ablk_cbc_init
,
464 .cra_exit
= ablk_exit
,
467 .min_keysize
= AES_MIN_KEY_SIZE
,
468 .max_keysize
= AES_MAX_KEY_SIZE
,
469 .ivsize
= AES_BLOCK_SIZE
,
470 .setkey
= ablk_set_key
,
471 .encrypt
= ablk_encrypt
,
472 .decrypt
= ablk_decrypt
,
478 static int ablk_ctr_init(struct crypto_tfm
*tfm
)
480 struct cryptd_ablkcipher
*cryptd_tfm
;
482 cryptd_tfm
= cryptd_alloc_ablkcipher("fpu(ctr(__driver-aes-aesni))",
484 if (IS_ERR(cryptd_tfm
))
485 return PTR_ERR(cryptd_tfm
);
486 ablk_init_common(tfm
, cryptd_tfm
);
490 static struct crypto_alg ablk_ctr_alg
= {
491 .cra_name
= "ctr(aes)",
492 .cra_driver_name
= "ctr-aes-aesni",
494 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
496 .cra_ctxsize
= sizeof(struct async_aes_ctx
),
498 .cra_type
= &crypto_ablkcipher_type
,
499 .cra_module
= THIS_MODULE
,
500 .cra_list
= LIST_HEAD_INIT(ablk_ctr_alg
.cra_list
),
501 .cra_init
= ablk_ctr_init
,
502 .cra_exit
= ablk_exit
,
505 .min_keysize
= AES_MIN_KEY_SIZE
,
506 .max_keysize
= AES_MAX_KEY_SIZE
,
507 .ivsize
= AES_BLOCK_SIZE
,
508 .setkey
= ablk_set_key
,
509 .encrypt
= ablk_encrypt
,
510 .decrypt
= ablk_decrypt
,
518 static int ablk_lrw_init(struct crypto_tfm
*tfm
)
520 struct cryptd_ablkcipher
*cryptd_tfm
;
522 cryptd_tfm
= cryptd_alloc_ablkcipher("fpu(lrw(__driver-aes-aesni))",
524 if (IS_ERR(cryptd_tfm
))
525 return PTR_ERR(cryptd_tfm
);
526 ablk_init_common(tfm
, cryptd_tfm
);
530 static struct crypto_alg ablk_lrw_alg
= {
531 .cra_name
= "lrw(aes)",
532 .cra_driver_name
= "lrw-aes-aesni",
534 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
535 .cra_blocksize
= AES_BLOCK_SIZE
,
536 .cra_ctxsize
= sizeof(struct async_aes_ctx
),
538 .cra_type
= &crypto_ablkcipher_type
,
539 .cra_module
= THIS_MODULE
,
540 .cra_list
= LIST_HEAD_INIT(ablk_lrw_alg
.cra_list
),
541 .cra_init
= ablk_lrw_init
,
542 .cra_exit
= ablk_exit
,
545 .min_keysize
= AES_MIN_KEY_SIZE
+ AES_BLOCK_SIZE
,
546 .max_keysize
= AES_MAX_KEY_SIZE
+ AES_BLOCK_SIZE
,
547 .ivsize
= AES_BLOCK_SIZE
,
548 .setkey
= ablk_set_key
,
549 .encrypt
= ablk_encrypt
,
550 .decrypt
= ablk_decrypt
,
557 static int ablk_pcbc_init(struct crypto_tfm
*tfm
)
559 struct cryptd_ablkcipher
*cryptd_tfm
;
561 cryptd_tfm
= cryptd_alloc_ablkcipher("fpu(pcbc(__driver-aes-aesni))",
563 if (IS_ERR(cryptd_tfm
))
564 return PTR_ERR(cryptd_tfm
);
565 ablk_init_common(tfm
, cryptd_tfm
);
569 static struct crypto_alg ablk_pcbc_alg
= {
570 .cra_name
= "pcbc(aes)",
571 .cra_driver_name
= "pcbc-aes-aesni",
573 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
574 .cra_blocksize
= AES_BLOCK_SIZE
,
575 .cra_ctxsize
= sizeof(struct async_aes_ctx
),
577 .cra_type
= &crypto_ablkcipher_type
,
578 .cra_module
= THIS_MODULE
,
579 .cra_list
= LIST_HEAD_INIT(ablk_pcbc_alg
.cra_list
),
580 .cra_init
= ablk_pcbc_init
,
581 .cra_exit
= ablk_exit
,
584 .min_keysize
= AES_MIN_KEY_SIZE
,
585 .max_keysize
= AES_MAX_KEY_SIZE
,
586 .ivsize
= AES_BLOCK_SIZE
,
587 .setkey
= ablk_set_key
,
588 .encrypt
= ablk_encrypt
,
589 .decrypt
= ablk_decrypt
,
596 static int ablk_xts_init(struct crypto_tfm
*tfm
)
598 struct cryptd_ablkcipher
*cryptd_tfm
;
600 cryptd_tfm
= cryptd_alloc_ablkcipher("fpu(xts(__driver-aes-aesni))",
602 if (IS_ERR(cryptd_tfm
))
603 return PTR_ERR(cryptd_tfm
);
604 ablk_init_common(tfm
, cryptd_tfm
);
608 static struct crypto_alg ablk_xts_alg
= {
609 .cra_name
= "xts(aes)",
610 .cra_driver_name
= "xts-aes-aesni",
612 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
|CRYPTO_ALG_ASYNC
,
613 .cra_blocksize
= AES_BLOCK_SIZE
,
614 .cra_ctxsize
= sizeof(struct async_aes_ctx
),
616 .cra_type
= &crypto_ablkcipher_type
,
617 .cra_module
= THIS_MODULE
,
618 .cra_list
= LIST_HEAD_INIT(ablk_xts_alg
.cra_list
),
619 .cra_init
= ablk_xts_init
,
620 .cra_exit
= ablk_exit
,
623 .min_keysize
= 2 * AES_MIN_KEY_SIZE
,
624 .max_keysize
= 2 * AES_MAX_KEY_SIZE
,
625 .ivsize
= AES_BLOCK_SIZE
,
626 .setkey
= ablk_set_key
,
627 .encrypt
= ablk_encrypt
,
628 .decrypt
= ablk_decrypt
,
634 static int __init
aesni_init(void)
639 printk(KERN_ERR
"Intel AES-NI instructions are not detected.\n");
642 if ((err
= crypto_register_alg(&aesni_alg
)))
644 if ((err
= crypto_register_alg(&__aesni_alg
)))
646 if ((err
= crypto_register_alg(&blk_ecb_alg
)))
648 if ((err
= crypto_register_alg(&blk_cbc_alg
)))
650 if ((err
= crypto_register_alg(&ablk_ecb_alg
)))
652 if ((err
= crypto_register_alg(&ablk_cbc_alg
)))
655 if ((err
= crypto_register_alg(&ablk_ctr_alg
)))
659 if ((err
= crypto_register_alg(&ablk_lrw_alg
)))
663 if ((err
= crypto_register_alg(&ablk_pcbc_alg
)))
667 if ((err
= crypto_register_alg(&ablk_xts_alg
)))
677 crypto_unregister_alg(&ablk_pcbc_alg
);
681 crypto_unregister_alg(&ablk_lrw_alg
);
685 crypto_unregister_alg(&ablk_ctr_alg
);
688 crypto_unregister_alg(&ablk_cbc_alg
);
690 crypto_unregister_alg(&ablk_ecb_alg
);
692 crypto_unregister_alg(&blk_cbc_alg
);
694 crypto_unregister_alg(&blk_ecb_alg
);
696 crypto_unregister_alg(&__aesni_alg
);
698 crypto_unregister_alg(&aesni_alg
);
703 static void __exit
aesni_exit(void)
706 crypto_unregister_alg(&ablk_xts_alg
);
709 crypto_unregister_alg(&ablk_pcbc_alg
);
712 crypto_unregister_alg(&ablk_lrw_alg
);
715 crypto_unregister_alg(&ablk_ctr_alg
);
717 crypto_unregister_alg(&ablk_cbc_alg
);
718 crypto_unregister_alg(&ablk_ecb_alg
);
719 crypto_unregister_alg(&blk_cbc_alg
);
720 crypto_unregister_alg(&blk_ecb_alg
);
721 crypto_unregister_alg(&__aesni_alg
);
722 crypto_unregister_alg(&aesni_alg
);
725 module_init(aesni_init
);
726 module_exit(aesni_exit
);
728 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, Intel AES-NI instructions optimized");
729 MODULE_LICENSE("GPL");