IPVS: netns, svc counters moved in ip_vs_ctl,c
[linux/fpc-iii.git] / arch / x86 / crypto / aesni-intel_glue.c
blob2cb3dcc4490ae0f7e17690b467c534a32777531a
1 /*
2 * Support for Intel AES-NI instructions. This file contains glue
3 * code, the real AES implementation is in intel-aes_asm.S.
5 * Copyright (C) 2008, Intel Corp.
6 * Author: Huang Ying <ying.huang@intel.com>
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation; either version 2 of the License, or
11 * (at your option) any later version.
14 #include <linux/hardirq.h>
15 #include <linux/types.h>
16 #include <linux/crypto.h>
17 #include <linux/err.h>
18 #include <crypto/algapi.h>
19 #include <crypto/aes.h>
20 #include <crypto/cryptd.h>
21 #include <crypto/ctr.h>
22 #include <asm/i387.h>
23 #include <asm/aes.h>
25 #if defined(CONFIG_CRYPTO_CTR) || defined(CONFIG_CRYPTO_CTR_MODULE)
26 #define HAS_CTR
27 #endif
29 #if defined(CONFIG_CRYPTO_LRW) || defined(CONFIG_CRYPTO_LRW_MODULE)
30 #define HAS_LRW
31 #endif
33 #if defined(CONFIG_CRYPTO_PCBC) || defined(CONFIG_CRYPTO_PCBC_MODULE)
34 #define HAS_PCBC
35 #endif
37 #if defined(CONFIG_CRYPTO_XTS) || defined(CONFIG_CRYPTO_XTS_MODULE)
38 #define HAS_XTS
39 #endif
41 struct async_aes_ctx {
42 struct cryptd_ablkcipher *cryptd_tfm;
45 #define AESNI_ALIGN 16
46 #define AES_BLOCK_MASK (~(AES_BLOCK_SIZE-1))
48 asmlinkage int aesni_set_key(struct crypto_aes_ctx *ctx, const u8 *in_key,
49 unsigned int key_len);
50 asmlinkage void aesni_enc(struct crypto_aes_ctx *ctx, u8 *out,
51 const u8 *in);
52 asmlinkage void aesni_dec(struct crypto_aes_ctx *ctx, u8 *out,
53 const u8 *in);
54 asmlinkage void aesni_ecb_enc(struct crypto_aes_ctx *ctx, u8 *out,
55 const u8 *in, unsigned int len);
56 asmlinkage void aesni_ecb_dec(struct crypto_aes_ctx *ctx, u8 *out,
57 const u8 *in, unsigned int len);
58 asmlinkage void aesni_cbc_enc(struct crypto_aes_ctx *ctx, u8 *out,
59 const u8 *in, unsigned int len, u8 *iv);
60 asmlinkage void aesni_cbc_dec(struct crypto_aes_ctx *ctx, u8 *out,
61 const u8 *in, unsigned int len, u8 *iv);
62 asmlinkage void aesni_ctr_enc(struct crypto_aes_ctx *ctx, u8 *out,
63 const u8 *in, unsigned int len, u8 *iv);
65 static inline struct crypto_aes_ctx *aes_ctx(void *raw_ctx)
67 unsigned long addr = (unsigned long)raw_ctx;
68 unsigned long align = AESNI_ALIGN;
70 if (align <= crypto_tfm_ctx_alignment())
71 align = 1;
72 return (struct crypto_aes_ctx *)ALIGN(addr, align);
75 static int aes_set_key_common(struct crypto_tfm *tfm, void *raw_ctx,
76 const u8 *in_key, unsigned int key_len)
78 struct crypto_aes_ctx *ctx = aes_ctx(raw_ctx);
79 u32 *flags = &tfm->crt_flags;
80 int err;
82 if (key_len != AES_KEYSIZE_128 && key_len != AES_KEYSIZE_192 &&
83 key_len != AES_KEYSIZE_256) {
84 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
85 return -EINVAL;
88 if (!irq_fpu_usable())
89 err = crypto_aes_expand_key(ctx, in_key, key_len);
90 else {
91 kernel_fpu_begin();
92 err = aesni_set_key(ctx, in_key, key_len);
93 kernel_fpu_end();
96 return err;
99 static int aes_set_key(struct crypto_tfm *tfm, const u8 *in_key,
100 unsigned int key_len)
102 return aes_set_key_common(tfm, crypto_tfm_ctx(tfm), in_key, key_len);
105 static void aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
107 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
109 if (!irq_fpu_usable())
110 crypto_aes_encrypt_x86(ctx, dst, src);
111 else {
112 kernel_fpu_begin();
113 aesni_enc(ctx, dst, src);
114 kernel_fpu_end();
118 static void aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
120 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
122 if (!irq_fpu_usable())
123 crypto_aes_decrypt_x86(ctx, dst, src);
124 else {
125 kernel_fpu_begin();
126 aesni_dec(ctx, dst, src);
127 kernel_fpu_end();
131 static struct crypto_alg aesni_alg = {
132 .cra_name = "aes",
133 .cra_driver_name = "aes-aesni",
134 .cra_priority = 300,
135 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
136 .cra_blocksize = AES_BLOCK_SIZE,
137 .cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
138 .cra_alignmask = 0,
139 .cra_module = THIS_MODULE,
140 .cra_list = LIST_HEAD_INIT(aesni_alg.cra_list),
141 .cra_u = {
142 .cipher = {
143 .cia_min_keysize = AES_MIN_KEY_SIZE,
144 .cia_max_keysize = AES_MAX_KEY_SIZE,
145 .cia_setkey = aes_set_key,
146 .cia_encrypt = aes_encrypt,
147 .cia_decrypt = aes_decrypt
152 static void __aes_encrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
154 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
156 aesni_enc(ctx, dst, src);
159 static void __aes_decrypt(struct crypto_tfm *tfm, u8 *dst, const u8 *src)
161 struct crypto_aes_ctx *ctx = aes_ctx(crypto_tfm_ctx(tfm));
163 aesni_dec(ctx, dst, src);
166 static struct crypto_alg __aesni_alg = {
167 .cra_name = "__aes-aesni",
168 .cra_driver_name = "__driver-aes-aesni",
169 .cra_priority = 0,
170 .cra_flags = CRYPTO_ALG_TYPE_CIPHER,
171 .cra_blocksize = AES_BLOCK_SIZE,
172 .cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
173 .cra_alignmask = 0,
174 .cra_module = THIS_MODULE,
175 .cra_list = LIST_HEAD_INIT(__aesni_alg.cra_list),
176 .cra_u = {
177 .cipher = {
178 .cia_min_keysize = AES_MIN_KEY_SIZE,
179 .cia_max_keysize = AES_MAX_KEY_SIZE,
180 .cia_setkey = aes_set_key,
181 .cia_encrypt = __aes_encrypt,
182 .cia_decrypt = __aes_decrypt
187 static int ecb_encrypt(struct blkcipher_desc *desc,
188 struct scatterlist *dst, struct scatterlist *src,
189 unsigned int nbytes)
191 struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
192 struct blkcipher_walk walk;
193 int err;
195 blkcipher_walk_init(&walk, dst, src, nbytes);
196 err = blkcipher_walk_virt(desc, &walk);
197 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
199 kernel_fpu_begin();
200 while ((nbytes = walk.nbytes)) {
201 aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
202 nbytes & AES_BLOCK_MASK);
203 nbytes &= AES_BLOCK_SIZE - 1;
204 err = blkcipher_walk_done(desc, &walk, nbytes);
206 kernel_fpu_end();
208 return err;
211 static int ecb_decrypt(struct blkcipher_desc *desc,
212 struct scatterlist *dst, struct scatterlist *src,
213 unsigned int nbytes)
215 struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
216 struct blkcipher_walk walk;
217 int err;
219 blkcipher_walk_init(&walk, dst, src, nbytes);
220 err = blkcipher_walk_virt(desc, &walk);
221 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
223 kernel_fpu_begin();
224 while ((nbytes = walk.nbytes)) {
225 aesni_ecb_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
226 nbytes & AES_BLOCK_MASK);
227 nbytes &= AES_BLOCK_SIZE - 1;
228 err = blkcipher_walk_done(desc, &walk, nbytes);
230 kernel_fpu_end();
232 return err;
235 static struct crypto_alg blk_ecb_alg = {
236 .cra_name = "__ecb-aes-aesni",
237 .cra_driver_name = "__driver-ecb-aes-aesni",
238 .cra_priority = 0,
239 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
240 .cra_blocksize = AES_BLOCK_SIZE,
241 .cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
242 .cra_alignmask = 0,
243 .cra_type = &crypto_blkcipher_type,
244 .cra_module = THIS_MODULE,
245 .cra_list = LIST_HEAD_INIT(blk_ecb_alg.cra_list),
246 .cra_u = {
247 .blkcipher = {
248 .min_keysize = AES_MIN_KEY_SIZE,
249 .max_keysize = AES_MAX_KEY_SIZE,
250 .setkey = aes_set_key,
251 .encrypt = ecb_encrypt,
252 .decrypt = ecb_decrypt,
257 static int cbc_encrypt(struct blkcipher_desc *desc,
258 struct scatterlist *dst, struct scatterlist *src,
259 unsigned int nbytes)
261 struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
262 struct blkcipher_walk walk;
263 int err;
265 blkcipher_walk_init(&walk, dst, src, nbytes);
266 err = blkcipher_walk_virt(desc, &walk);
267 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
269 kernel_fpu_begin();
270 while ((nbytes = walk.nbytes)) {
271 aesni_cbc_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
272 nbytes & AES_BLOCK_MASK, walk.iv);
273 nbytes &= AES_BLOCK_SIZE - 1;
274 err = blkcipher_walk_done(desc, &walk, nbytes);
276 kernel_fpu_end();
278 return err;
281 static int cbc_decrypt(struct blkcipher_desc *desc,
282 struct scatterlist *dst, struct scatterlist *src,
283 unsigned int nbytes)
285 struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
286 struct blkcipher_walk walk;
287 int err;
289 blkcipher_walk_init(&walk, dst, src, nbytes);
290 err = blkcipher_walk_virt(desc, &walk);
291 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
293 kernel_fpu_begin();
294 while ((nbytes = walk.nbytes)) {
295 aesni_cbc_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
296 nbytes & AES_BLOCK_MASK, walk.iv);
297 nbytes &= AES_BLOCK_SIZE - 1;
298 err = blkcipher_walk_done(desc, &walk, nbytes);
300 kernel_fpu_end();
302 return err;
305 static struct crypto_alg blk_cbc_alg = {
306 .cra_name = "__cbc-aes-aesni",
307 .cra_driver_name = "__driver-cbc-aes-aesni",
308 .cra_priority = 0,
309 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
310 .cra_blocksize = AES_BLOCK_SIZE,
311 .cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
312 .cra_alignmask = 0,
313 .cra_type = &crypto_blkcipher_type,
314 .cra_module = THIS_MODULE,
315 .cra_list = LIST_HEAD_INIT(blk_cbc_alg.cra_list),
316 .cra_u = {
317 .blkcipher = {
318 .min_keysize = AES_MIN_KEY_SIZE,
319 .max_keysize = AES_MAX_KEY_SIZE,
320 .setkey = aes_set_key,
321 .encrypt = cbc_encrypt,
322 .decrypt = cbc_decrypt,
327 static void ctr_crypt_final(struct crypto_aes_ctx *ctx,
328 struct blkcipher_walk *walk)
330 u8 *ctrblk = walk->iv;
331 u8 keystream[AES_BLOCK_SIZE];
332 u8 *src = walk->src.virt.addr;
333 u8 *dst = walk->dst.virt.addr;
334 unsigned int nbytes = walk->nbytes;
336 aesni_enc(ctx, keystream, ctrblk);
337 crypto_xor(keystream, src, nbytes);
338 memcpy(dst, keystream, nbytes);
339 crypto_inc(ctrblk, AES_BLOCK_SIZE);
342 static int ctr_crypt(struct blkcipher_desc *desc,
343 struct scatterlist *dst, struct scatterlist *src,
344 unsigned int nbytes)
346 struct crypto_aes_ctx *ctx = aes_ctx(crypto_blkcipher_ctx(desc->tfm));
347 struct blkcipher_walk walk;
348 int err;
350 blkcipher_walk_init(&walk, dst, src, nbytes);
351 err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
352 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
354 kernel_fpu_begin();
355 while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
356 aesni_ctr_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
357 nbytes & AES_BLOCK_MASK, walk.iv);
358 nbytes &= AES_BLOCK_SIZE - 1;
359 err = blkcipher_walk_done(desc, &walk, nbytes);
361 if (walk.nbytes) {
362 ctr_crypt_final(ctx, &walk);
363 err = blkcipher_walk_done(desc, &walk, 0);
365 kernel_fpu_end();
367 return err;
370 static struct crypto_alg blk_ctr_alg = {
371 .cra_name = "__ctr-aes-aesni",
372 .cra_driver_name = "__driver-ctr-aes-aesni",
373 .cra_priority = 0,
374 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
375 .cra_blocksize = 1,
376 .cra_ctxsize = sizeof(struct crypto_aes_ctx)+AESNI_ALIGN-1,
377 .cra_alignmask = 0,
378 .cra_type = &crypto_blkcipher_type,
379 .cra_module = THIS_MODULE,
380 .cra_list = LIST_HEAD_INIT(blk_ctr_alg.cra_list),
381 .cra_u = {
382 .blkcipher = {
383 .min_keysize = AES_MIN_KEY_SIZE,
384 .max_keysize = AES_MAX_KEY_SIZE,
385 .ivsize = AES_BLOCK_SIZE,
386 .setkey = aes_set_key,
387 .encrypt = ctr_crypt,
388 .decrypt = ctr_crypt,
393 static int ablk_set_key(struct crypto_ablkcipher *tfm, const u8 *key,
394 unsigned int key_len)
396 struct async_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm);
397 struct crypto_ablkcipher *child = &ctx->cryptd_tfm->base;
398 int err;
400 crypto_ablkcipher_clear_flags(child, CRYPTO_TFM_REQ_MASK);
401 crypto_ablkcipher_set_flags(child, crypto_ablkcipher_get_flags(tfm)
402 & CRYPTO_TFM_REQ_MASK);
403 err = crypto_ablkcipher_setkey(child, key, key_len);
404 crypto_ablkcipher_set_flags(tfm, crypto_ablkcipher_get_flags(child)
405 & CRYPTO_TFM_RES_MASK);
406 return err;
409 static int ablk_encrypt(struct ablkcipher_request *req)
411 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
412 struct async_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm);
414 if (!irq_fpu_usable()) {
415 struct ablkcipher_request *cryptd_req =
416 ablkcipher_request_ctx(req);
417 memcpy(cryptd_req, req, sizeof(*req));
418 ablkcipher_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base);
419 return crypto_ablkcipher_encrypt(cryptd_req);
420 } else {
421 struct blkcipher_desc desc;
422 desc.tfm = cryptd_ablkcipher_child(ctx->cryptd_tfm);
423 desc.info = req->info;
424 desc.flags = 0;
425 return crypto_blkcipher_crt(desc.tfm)->encrypt(
426 &desc, req->dst, req->src, req->nbytes);
430 static int ablk_decrypt(struct ablkcipher_request *req)
432 struct crypto_ablkcipher *tfm = crypto_ablkcipher_reqtfm(req);
433 struct async_aes_ctx *ctx = crypto_ablkcipher_ctx(tfm);
435 if (!irq_fpu_usable()) {
436 struct ablkcipher_request *cryptd_req =
437 ablkcipher_request_ctx(req);
438 memcpy(cryptd_req, req, sizeof(*req));
439 ablkcipher_request_set_tfm(cryptd_req, &ctx->cryptd_tfm->base);
440 return crypto_ablkcipher_decrypt(cryptd_req);
441 } else {
442 struct blkcipher_desc desc;
443 desc.tfm = cryptd_ablkcipher_child(ctx->cryptd_tfm);
444 desc.info = req->info;
445 desc.flags = 0;
446 return crypto_blkcipher_crt(desc.tfm)->decrypt(
447 &desc, req->dst, req->src, req->nbytes);
451 static void ablk_exit(struct crypto_tfm *tfm)
453 struct async_aes_ctx *ctx = crypto_tfm_ctx(tfm);
455 cryptd_free_ablkcipher(ctx->cryptd_tfm);
458 static void ablk_init_common(struct crypto_tfm *tfm,
459 struct cryptd_ablkcipher *cryptd_tfm)
461 struct async_aes_ctx *ctx = crypto_tfm_ctx(tfm);
463 ctx->cryptd_tfm = cryptd_tfm;
464 tfm->crt_ablkcipher.reqsize = sizeof(struct ablkcipher_request) +
465 crypto_ablkcipher_reqsize(&cryptd_tfm->base);
468 static int ablk_ecb_init(struct crypto_tfm *tfm)
470 struct cryptd_ablkcipher *cryptd_tfm;
472 cryptd_tfm = cryptd_alloc_ablkcipher("__driver-ecb-aes-aesni", 0, 0);
473 if (IS_ERR(cryptd_tfm))
474 return PTR_ERR(cryptd_tfm);
475 ablk_init_common(tfm, cryptd_tfm);
476 return 0;
479 static struct crypto_alg ablk_ecb_alg = {
480 .cra_name = "ecb(aes)",
481 .cra_driver_name = "ecb-aes-aesni",
482 .cra_priority = 400,
483 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
484 .cra_blocksize = AES_BLOCK_SIZE,
485 .cra_ctxsize = sizeof(struct async_aes_ctx),
486 .cra_alignmask = 0,
487 .cra_type = &crypto_ablkcipher_type,
488 .cra_module = THIS_MODULE,
489 .cra_list = LIST_HEAD_INIT(ablk_ecb_alg.cra_list),
490 .cra_init = ablk_ecb_init,
491 .cra_exit = ablk_exit,
492 .cra_u = {
493 .ablkcipher = {
494 .min_keysize = AES_MIN_KEY_SIZE,
495 .max_keysize = AES_MAX_KEY_SIZE,
496 .setkey = ablk_set_key,
497 .encrypt = ablk_encrypt,
498 .decrypt = ablk_decrypt,
503 static int ablk_cbc_init(struct crypto_tfm *tfm)
505 struct cryptd_ablkcipher *cryptd_tfm;
507 cryptd_tfm = cryptd_alloc_ablkcipher("__driver-cbc-aes-aesni", 0, 0);
508 if (IS_ERR(cryptd_tfm))
509 return PTR_ERR(cryptd_tfm);
510 ablk_init_common(tfm, cryptd_tfm);
511 return 0;
514 static struct crypto_alg ablk_cbc_alg = {
515 .cra_name = "cbc(aes)",
516 .cra_driver_name = "cbc-aes-aesni",
517 .cra_priority = 400,
518 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
519 .cra_blocksize = AES_BLOCK_SIZE,
520 .cra_ctxsize = sizeof(struct async_aes_ctx),
521 .cra_alignmask = 0,
522 .cra_type = &crypto_ablkcipher_type,
523 .cra_module = THIS_MODULE,
524 .cra_list = LIST_HEAD_INIT(ablk_cbc_alg.cra_list),
525 .cra_init = ablk_cbc_init,
526 .cra_exit = ablk_exit,
527 .cra_u = {
528 .ablkcipher = {
529 .min_keysize = AES_MIN_KEY_SIZE,
530 .max_keysize = AES_MAX_KEY_SIZE,
531 .ivsize = AES_BLOCK_SIZE,
532 .setkey = ablk_set_key,
533 .encrypt = ablk_encrypt,
534 .decrypt = ablk_decrypt,
539 static int ablk_ctr_init(struct crypto_tfm *tfm)
541 struct cryptd_ablkcipher *cryptd_tfm;
543 cryptd_tfm = cryptd_alloc_ablkcipher("__driver-ctr-aes-aesni", 0, 0);
544 if (IS_ERR(cryptd_tfm))
545 return PTR_ERR(cryptd_tfm);
546 ablk_init_common(tfm, cryptd_tfm);
547 return 0;
550 static struct crypto_alg ablk_ctr_alg = {
551 .cra_name = "ctr(aes)",
552 .cra_driver_name = "ctr-aes-aesni",
553 .cra_priority = 400,
554 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
555 .cra_blocksize = 1,
556 .cra_ctxsize = sizeof(struct async_aes_ctx),
557 .cra_alignmask = 0,
558 .cra_type = &crypto_ablkcipher_type,
559 .cra_module = THIS_MODULE,
560 .cra_list = LIST_HEAD_INIT(ablk_ctr_alg.cra_list),
561 .cra_init = ablk_ctr_init,
562 .cra_exit = ablk_exit,
563 .cra_u = {
564 .ablkcipher = {
565 .min_keysize = AES_MIN_KEY_SIZE,
566 .max_keysize = AES_MAX_KEY_SIZE,
567 .ivsize = AES_BLOCK_SIZE,
568 .setkey = ablk_set_key,
569 .encrypt = ablk_encrypt,
570 .decrypt = ablk_encrypt,
571 .geniv = "chainiv",
576 #ifdef HAS_CTR
577 static int ablk_rfc3686_ctr_init(struct crypto_tfm *tfm)
579 struct cryptd_ablkcipher *cryptd_tfm;
581 cryptd_tfm = cryptd_alloc_ablkcipher(
582 "rfc3686(__driver-ctr-aes-aesni)", 0, 0);
583 if (IS_ERR(cryptd_tfm))
584 return PTR_ERR(cryptd_tfm);
585 ablk_init_common(tfm, cryptd_tfm);
586 return 0;
589 static struct crypto_alg ablk_rfc3686_ctr_alg = {
590 .cra_name = "rfc3686(ctr(aes))",
591 .cra_driver_name = "rfc3686-ctr-aes-aesni",
592 .cra_priority = 400,
593 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
594 .cra_blocksize = 1,
595 .cra_ctxsize = sizeof(struct async_aes_ctx),
596 .cra_alignmask = 0,
597 .cra_type = &crypto_ablkcipher_type,
598 .cra_module = THIS_MODULE,
599 .cra_list = LIST_HEAD_INIT(ablk_rfc3686_ctr_alg.cra_list),
600 .cra_init = ablk_rfc3686_ctr_init,
601 .cra_exit = ablk_exit,
602 .cra_u = {
603 .ablkcipher = {
604 .min_keysize = AES_MIN_KEY_SIZE+CTR_RFC3686_NONCE_SIZE,
605 .max_keysize = AES_MAX_KEY_SIZE+CTR_RFC3686_NONCE_SIZE,
606 .ivsize = CTR_RFC3686_IV_SIZE,
607 .setkey = ablk_set_key,
608 .encrypt = ablk_encrypt,
609 .decrypt = ablk_decrypt,
610 .geniv = "seqiv",
614 #endif
616 #ifdef HAS_LRW
617 static int ablk_lrw_init(struct crypto_tfm *tfm)
619 struct cryptd_ablkcipher *cryptd_tfm;
621 cryptd_tfm = cryptd_alloc_ablkcipher("fpu(lrw(__driver-aes-aesni))",
622 0, 0);
623 if (IS_ERR(cryptd_tfm))
624 return PTR_ERR(cryptd_tfm);
625 ablk_init_common(tfm, cryptd_tfm);
626 return 0;
629 static struct crypto_alg ablk_lrw_alg = {
630 .cra_name = "lrw(aes)",
631 .cra_driver_name = "lrw-aes-aesni",
632 .cra_priority = 400,
633 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
634 .cra_blocksize = AES_BLOCK_SIZE,
635 .cra_ctxsize = sizeof(struct async_aes_ctx),
636 .cra_alignmask = 0,
637 .cra_type = &crypto_ablkcipher_type,
638 .cra_module = THIS_MODULE,
639 .cra_list = LIST_HEAD_INIT(ablk_lrw_alg.cra_list),
640 .cra_init = ablk_lrw_init,
641 .cra_exit = ablk_exit,
642 .cra_u = {
643 .ablkcipher = {
644 .min_keysize = AES_MIN_KEY_SIZE + AES_BLOCK_SIZE,
645 .max_keysize = AES_MAX_KEY_SIZE + AES_BLOCK_SIZE,
646 .ivsize = AES_BLOCK_SIZE,
647 .setkey = ablk_set_key,
648 .encrypt = ablk_encrypt,
649 .decrypt = ablk_decrypt,
653 #endif
655 #ifdef HAS_PCBC
656 static int ablk_pcbc_init(struct crypto_tfm *tfm)
658 struct cryptd_ablkcipher *cryptd_tfm;
660 cryptd_tfm = cryptd_alloc_ablkcipher("fpu(pcbc(__driver-aes-aesni))",
661 0, 0);
662 if (IS_ERR(cryptd_tfm))
663 return PTR_ERR(cryptd_tfm);
664 ablk_init_common(tfm, cryptd_tfm);
665 return 0;
668 static struct crypto_alg ablk_pcbc_alg = {
669 .cra_name = "pcbc(aes)",
670 .cra_driver_name = "pcbc-aes-aesni",
671 .cra_priority = 400,
672 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
673 .cra_blocksize = AES_BLOCK_SIZE,
674 .cra_ctxsize = sizeof(struct async_aes_ctx),
675 .cra_alignmask = 0,
676 .cra_type = &crypto_ablkcipher_type,
677 .cra_module = THIS_MODULE,
678 .cra_list = LIST_HEAD_INIT(ablk_pcbc_alg.cra_list),
679 .cra_init = ablk_pcbc_init,
680 .cra_exit = ablk_exit,
681 .cra_u = {
682 .ablkcipher = {
683 .min_keysize = AES_MIN_KEY_SIZE,
684 .max_keysize = AES_MAX_KEY_SIZE,
685 .ivsize = AES_BLOCK_SIZE,
686 .setkey = ablk_set_key,
687 .encrypt = ablk_encrypt,
688 .decrypt = ablk_decrypt,
692 #endif
694 #ifdef HAS_XTS
695 static int ablk_xts_init(struct crypto_tfm *tfm)
697 struct cryptd_ablkcipher *cryptd_tfm;
699 cryptd_tfm = cryptd_alloc_ablkcipher("fpu(xts(__driver-aes-aesni))",
700 0, 0);
701 if (IS_ERR(cryptd_tfm))
702 return PTR_ERR(cryptd_tfm);
703 ablk_init_common(tfm, cryptd_tfm);
704 return 0;
707 static struct crypto_alg ablk_xts_alg = {
708 .cra_name = "xts(aes)",
709 .cra_driver_name = "xts-aes-aesni",
710 .cra_priority = 400,
711 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER|CRYPTO_ALG_ASYNC,
712 .cra_blocksize = AES_BLOCK_SIZE,
713 .cra_ctxsize = sizeof(struct async_aes_ctx),
714 .cra_alignmask = 0,
715 .cra_type = &crypto_ablkcipher_type,
716 .cra_module = THIS_MODULE,
717 .cra_list = LIST_HEAD_INIT(ablk_xts_alg.cra_list),
718 .cra_init = ablk_xts_init,
719 .cra_exit = ablk_exit,
720 .cra_u = {
721 .ablkcipher = {
722 .min_keysize = 2 * AES_MIN_KEY_SIZE,
723 .max_keysize = 2 * AES_MAX_KEY_SIZE,
724 .ivsize = AES_BLOCK_SIZE,
725 .setkey = ablk_set_key,
726 .encrypt = ablk_encrypt,
727 .decrypt = ablk_decrypt,
731 #endif
733 static int __init aesni_init(void)
735 int err;
737 if (!cpu_has_aes) {
738 printk(KERN_INFO "Intel AES-NI instructions are not detected.\n");
739 return -ENODEV;
741 if ((err = crypto_register_alg(&aesni_alg)))
742 goto aes_err;
743 if ((err = crypto_register_alg(&__aesni_alg)))
744 goto __aes_err;
745 if ((err = crypto_register_alg(&blk_ecb_alg)))
746 goto blk_ecb_err;
747 if ((err = crypto_register_alg(&blk_cbc_alg)))
748 goto blk_cbc_err;
749 if ((err = crypto_register_alg(&blk_ctr_alg)))
750 goto blk_ctr_err;
751 if ((err = crypto_register_alg(&ablk_ecb_alg)))
752 goto ablk_ecb_err;
753 if ((err = crypto_register_alg(&ablk_cbc_alg)))
754 goto ablk_cbc_err;
755 if ((err = crypto_register_alg(&ablk_ctr_alg)))
756 goto ablk_ctr_err;
757 #ifdef HAS_CTR
758 if ((err = crypto_register_alg(&ablk_rfc3686_ctr_alg)))
759 goto ablk_rfc3686_ctr_err;
760 #endif
761 #ifdef HAS_LRW
762 if ((err = crypto_register_alg(&ablk_lrw_alg)))
763 goto ablk_lrw_err;
764 #endif
765 #ifdef HAS_PCBC
766 if ((err = crypto_register_alg(&ablk_pcbc_alg)))
767 goto ablk_pcbc_err;
768 #endif
769 #ifdef HAS_XTS
770 if ((err = crypto_register_alg(&ablk_xts_alg)))
771 goto ablk_xts_err;
772 #endif
774 return err;
776 #ifdef HAS_XTS
777 ablk_xts_err:
778 #endif
779 #ifdef HAS_PCBC
780 crypto_unregister_alg(&ablk_pcbc_alg);
781 ablk_pcbc_err:
782 #endif
783 #ifdef HAS_LRW
784 crypto_unregister_alg(&ablk_lrw_alg);
785 ablk_lrw_err:
786 #endif
787 #ifdef HAS_CTR
788 crypto_unregister_alg(&ablk_rfc3686_ctr_alg);
789 ablk_rfc3686_ctr_err:
790 #endif
791 crypto_unregister_alg(&ablk_ctr_alg);
792 ablk_ctr_err:
793 crypto_unregister_alg(&ablk_cbc_alg);
794 ablk_cbc_err:
795 crypto_unregister_alg(&ablk_ecb_alg);
796 ablk_ecb_err:
797 crypto_unregister_alg(&blk_ctr_alg);
798 blk_ctr_err:
799 crypto_unregister_alg(&blk_cbc_alg);
800 blk_cbc_err:
801 crypto_unregister_alg(&blk_ecb_alg);
802 blk_ecb_err:
803 crypto_unregister_alg(&__aesni_alg);
804 __aes_err:
805 crypto_unregister_alg(&aesni_alg);
806 aes_err:
807 return err;
810 static void __exit aesni_exit(void)
812 #ifdef HAS_XTS
813 crypto_unregister_alg(&ablk_xts_alg);
814 #endif
815 #ifdef HAS_PCBC
816 crypto_unregister_alg(&ablk_pcbc_alg);
817 #endif
818 #ifdef HAS_LRW
819 crypto_unregister_alg(&ablk_lrw_alg);
820 #endif
821 #ifdef HAS_CTR
822 crypto_unregister_alg(&ablk_rfc3686_ctr_alg);
823 #endif
824 crypto_unregister_alg(&ablk_ctr_alg);
825 crypto_unregister_alg(&ablk_cbc_alg);
826 crypto_unregister_alg(&ablk_ecb_alg);
827 crypto_unregister_alg(&blk_ctr_alg);
828 crypto_unregister_alg(&blk_cbc_alg);
829 crypto_unregister_alg(&blk_ecb_alg);
830 crypto_unregister_alg(&__aesni_alg);
831 crypto_unregister_alg(&aesni_alg);
834 module_init(aesni_init);
835 module_exit(aesni_exit);
837 MODULE_DESCRIPTION("Rijndael (AES) Cipher Algorithm, Intel AES-NI instructions optimized");
838 MODULE_LICENSE("GPL");
839 MODULE_ALIAS("aes");