IB/srp: Let srp_abort() return FAST_IO_FAIL if TL offline
[linux/fpc-iii.git] / arch / x86 / crypto / twofish_avx_glue.c
blob2047a562f6b3f0f8729d95442e1769a35fa2bad6
1 /*
2 * Glue Code for AVX assembler version of Twofish Cipher
4 * Copyright (C) 2012 Johannes Goetzfried
5 * <Johannes.Goetzfried@informatik.stud.uni-erlangen.de>
7 * Copyright © 2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
9 * This program is free software; you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License as published by
11 * the Free Software Foundation; either version 2 of the License, or
12 * (at your option) any later version.
14 * This program is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 * GNU General Public License for more details.
19 * You should have received a copy of the GNU General Public License
20 * along with this program; if not, write to the Free Software
21 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
22 * USA
26 #include <linux/module.h>
27 #include <linux/hardirq.h>
28 #include <linux/types.h>
29 #include <linux/crypto.h>
30 #include <linux/err.h>
31 #include <crypto/algapi.h>
32 #include <crypto/twofish.h>
33 #include <crypto/cryptd.h>
34 #include <crypto/b128ops.h>
35 #include <crypto/ctr.h>
36 #include <crypto/lrw.h>
37 #include <crypto/xts.h>
38 #include <asm/i387.h>
39 #include <asm/xcr.h>
40 #include <asm/xsave.h>
41 #include <asm/crypto/twofish.h>
42 #include <asm/crypto/ablk_helper.h>
43 #include <asm/crypto/glue_helper.h>
44 #include <crypto/scatterwalk.h>
45 #include <linux/workqueue.h>
46 #include <linux/spinlock.h>
48 #define TWOFISH_PARALLEL_BLOCKS 8
50 /* 8-way parallel cipher functions */
51 asmlinkage void twofish_ecb_enc_8way(struct twofish_ctx *ctx, u8 *dst,
52 const u8 *src);
53 EXPORT_SYMBOL_GPL(twofish_ecb_enc_8way);
55 asmlinkage void twofish_ecb_dec_8way(struct twofish_ctx *ctx, u8 *dst,
56 const u8 *src);
57 EXPORT_SYMBOL_GPL(twofish_ecb_dec_8way);
59 asmlinkage void twofish_cbc_dec_8way(struct twofish_ctx *ctx, u8 *dst,
60 const u8 *src);
61 EXPORT_SYMBOL_GPL(twofish_cbc_dec_8way);
63 asmlinkage void twofish_ctr_8way(struct twofish_ctx *ctx, u8 *dst,
64 const u8 *src, le128 *iv);
65 EXPORT_SYMBOL_GPL(twofish_ctr_8way);
67 asmlinkage void twofish_xts_enc_8way(struct twofish_ctx *ctx, u8 *dst,
68 const u8 *src, le128 *iv);
69 EXPORT_SYMBOL_GPL(twofish_xts_enc_8way);
70 asmlinkage void twofish_xts_dec_8way(struct twofish_ctx *ctx, u8 *dst,
71 const u8 *src, le128 *iv);
72 EXPORT_SYMBOL_GPL(twofish_xts_dec_8way);
74 static inline void twofish_enc_blk_3way(struct twofish_ctx *ctx, u8 *dst,
75 const u8 *src)
77 __twofish_enc_blk_3way(ctx, dst, src, false);
80 void twofish_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
82 glue_xts_crypt_128bit_one(ctx, dst, src, iv,
83 GLUE_FUNC_CAST(twofish_enc_blk));
85 EXPORT_SYMBOL_GPL(twofish_xts_enc);
87 void twofish_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
89 glue_xts_crypt_128bit_one(ctx, dst, src, iv,
90 GLUE_FUNC_CAST(twofish_dec_blk));
92 EXPORT_SYMBOL_GPL(twofish_xts_dec);
95 static const struct common_glue_ctx twofish_enc = {
96 .num_funcs = 3,
97 .fpu_blocks_limit = TWOFISH_PARALLEL_BLOCKS,
99 .funcs = { {
100 .num_blocks = TWOFISH_PARALLEL_BLOCKS,
101 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_ecb_enc_8way) }
102 }, {
103 .num_blocks = 3,
104 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_enc_blk_3way) }
105 }, {
106 .num_blocks = 1,
107 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_enc_blk) }
111 static const struct common_glue_ctx twofish_ctr = {
112 .num_funcs = 3,
113 .fpu_blocks_limit = TWOFISH_PARALLEL_BLOCKS,
115 .funcs = { {
116 .num_blocks = TWOFISH_PARALLEL_BLOCKS,
117 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(twofish_ctr_8way) }
118 }, {
119 .num_blocks = 3,
120 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(twofish_enc_blk_ctr_3way) }
121 }, {
122 .num_blocks = 1,
123 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(twofish_enc_blk_ctr) }
127 static const struct common_glue_ctx twofish_enc_xts = {
128 .num_funcs = 2,
129 .fpu_blocks_limit = TWOFISH_PARALLEL_BLOCKS,
131 .funcs = { {
132 .num_blocks = TWOFISH_PARALLEL_BLOCKS,
133 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(twofish_xts_enc_8way) }
134 }, {
135 .num_blocks = 1,
136 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(twofish_xts_enc) }
140 static const struct common_glue_ctx twofish_dec = {
141 .num_funcs = 3,
142 .fpu_blocks_limit = TWOFISH_PARALLEL_BLOCKS,
144 .funcs = { {
145 .num_blocks = TWOFISH_PARALLEL_BLOCKS,
146 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_ecb_dec_8way) }
147 }, {
148 .num_blocks = 3,
149 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_dec_blk_3way) }
150 }, {
151 .num_blocks = 1,
152 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_dec_blk) }
156 static const struct common_glue_ctx twofish_dec_cbc = {
157 .num_funcs = 3,
158 .fpu_blocks_limit = TWOFISH_PARALLEL_BLOCKS,
160 .funcs = { {
161 .num_blocks = TWOFISH_PARALLEL_BLOCKS,
162 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(twofish_cbc_dec_8way) }
163 }, {
164 .num_blocks = 3,
165 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(twofish_dec_blk_cbc_3way) }
166 }, {
167 .num_blocks = 1,
168 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(twofish_dec_blk) }
172 static const struct common_glue_ctx twofish_dec_xts = {
173 .num_funcs = 2,
174 .fpu_blocks_limit = TWOFISH_PARALLEL_BLOCKS,
176 .funcs = { {
177 .num_blocks = TWOFISH_PARALLEL_BLOCKS,
178 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(twofish_xts_dec_8way) }
179 }, {
180 .num_blocks = 1,
181 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(twofish_xts_dec) }
185 static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
186 struct scatterlist *src, unsigned int nbytes)
188 return glue_ecb_crypt_128bit(&twofish_enc, desc, dst, src, nbytes);
191 static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
192 struct scatterlist *src, unsigned int nbytes)
194 return glue_ecb_crypt_128bit(&twofish_dec, desc, dst, src, nbytes);
197 static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
198 struct scatterlist *src, unsigned int nbytes)
200 return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(twofish_enc_blk), desc,
201 dst, src, nbytes);
204 static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
205 struct scatterlist *src, unsigned int nbytes)
207 return glue_cbc_decrypt_128bit(&twofish_dec_cbc, desc, dst, src,
208 nbytes);
211 static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
212 struct scatterlist *src, unsigned int nbytes)
214 return glue_ctr_crypt_128bit(&twofish_ctr, desc, dst, src, nbytes);
217 static inline bool twofish_fpu_begin(bool fpu_enabled, unsigned int nbytes)
219 return glue_fpu_begin(TF_BLOCK_SIZE, TWOFISH_PARALLEL_BLOCKS, NULL,
220 fpu_enabled, nbytes);
223 static inline void twofish_fpu_end(bool fpu_enabled)
225 glue_fpu_end(fpu_enabled);
228 struct crypt_priv {
229 struct twofish_ctx *ctx;
230 bool fpu_enabled;
233 static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
235 const unsigned int bsize = TF_BLOCK_SIZE;
236 struct crypt_priv *ctx = priv;
237 int i;
239 ctx->fpu_enabled = twofish_fpu_begin(ctx->fpu_enabled, nbytes);
241 if (nbytes == bsize * TWOFISH_PARALLEL_BLOCKS) {
242 twofish_ecb_enc_8way(ctx->ctx, srcdst, srcdst);
243 return;
246 for (i = 0; i < nbytes / (bsize * 3); i++, srcdst += bsize * 3)
247 twofish_enc_blk_3way(ctx->ctx, srcdst, srcdst);
249 nbytes %= bsize * 3;
251 for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
252 twofish_enc_blk(ctx->ctx, srcdst, srcdst);
255 static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
257 const unsigned int bsize = TF_BLOCK_SIZE;
258 struct crypt_priv *ctx = priv;
259 int i;
261 ctx->fpu_enabled = twofish_fpu_begin(ctx->fpu_enabled, nbytes);
263 if (nbytes == bsize * TWOFISH_PARALLEL_BLOCKS) {
264 twofish_ecb_dec_8way(ctx->ctx, srcdst, srcdst);
265 return;
268 for (i = 0; i < nbytes / (bsize * 3); i++, srcdst += bsize * 3)
269 twofish_dec_blk_3way(ctx->ctx, srcdst, srcdst);
271 nbytes %= bsize * 3;
273 for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
274 twofish_dec_blk(ctx->ctx, srcdst, srcdst);
277 static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
278 struct scatterlist *src, unsigned int nbytes)
280 struct twofish_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
281 be128 buf[TWOFISH_PARALLEL_BLOCKS];
282 struct crypt_priv crypt_ctx = {
283 .ctx = &ctx->twofish_ctx,
284 .fpu_enabled = false,
286 struct lrw_crypt_req req = {
287 .tbuf = buf,
288 .tbuflen = sizeof(buf),
290 .table_ctx = &ctx->lrw_table,
291 .crypt_ctx = &crypt_ctx,
292 .crypt_fn = encrypt_callback,
294 int ret;
296 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
297 ret = lrw_crypt(desc, dst, src, nbytes, &req);
298 twofish_fpu_end(crypt_ctx.fpu_enabled);
300 return ret;
303 static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
304 struct scatterlist *src, unsigned int nbytes)
306 struct twofish_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
307 be128 buf[TWOFISH_PARALLEL_BLOCKS];
308 struct crypt_priv crypt_ctx = {
309 .ctx = &ctx->twofish_ctx,
310 .fpu_enabled = false,
312 struct lrw_crypt_req req = {
313 .tbuf = buf,
314 .tbuflen = sizeof(buf),
316 .table_ctx = &ctx->lrw_table,
317 .crypt_ctx = &crypt_ctx,
318 .crypt_fn = decrypt_callback,
320 int ret;
322 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
323 ret = lrw_crypt(desc, dst, src, nbytes, &req);
324 twofish_fpu_end(crypt_ctx.fpu_enabled);
326 return ret;
329 static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
330 struct scatterlist *src, unsigned int nbytes)
332 struct twofish_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
334 return glue_xts_crypt_128bit(&twofish_enc_xts, desc, dst, src, nbytes,
335 XTS_TWEAK_CAST(twofish_enc_blk),
336 &ctx->tweak_ctx, &ctx->crypt_ctx);
339 static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
340 struct scatterlist *src, unsigned int nbytes)
342 struct twofish_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
344 return glue_xts_crypt_128bit(&twofish_dec_xts, desc, dst, src, nbytes,
345 XTS_TWEAK_CAST(twofish_enc_blk),
346 &ctx->tweak_ctx, &ctx->crypt_ctx);
349 static struct crypto_alg twofish_algs[10] = { {
350 .cra_name = "__ecb-twofish-avx",
351 .cra_driver_name = "__driver-ecb-twofish-avx",
352 .cra_priority = 0,
353 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
354 .cra_blocksize = TF_BLOCK_SIZE,
355 .cra_ctxsize = sizeof(struct twofish_ctx),
356 .cra_alignmask = 0,
357 .cra_type = &crypto_blkcipher_type,
358 .cra_module = THIS_MODULE,
359 .cra_u = {
360 .blkcipher = {
361 .min_keysize = TF_MIN_KEY_SIZE,
362 .max_keysize = TF_MAX_KEY_SIZE,
363 .setkey = twofish_setkey,
364 .encrypt = ecb_encrypt,
365 .decrypt = ecb_decrypt,
368 }, {
369 .cra_name = "__cbc-twofish-avx",
370 .cra_driver_name = "__driver-cbc-twofish-avx",
371 .cra_priority = 0,
372 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
373 .cra_blocksize = TF_BLOCK_SIZE,
374 .cra_ctxsize = sizeof(struct twofish_ctx),
375 .cra_alignmask = 0,
376 .cra_type = &crypto_blkcipher_type,
377 .cra_module = THIS_MODULE,
378 .cra_u = {
379 .blkcipher = {
380 .min_keysize = TF_MIN_KEY_SIZE,
381 .max_keysize = TF_MAX_KEY_SIZE,
382 .setkey = twofish_setkey,
383 .encrypt = cbc_encrypt,
384 .decrypt = cbc_decrypt,
387 }, {
388 .cra_name = "__ctr-twofish-avx",
389 .cra_driver_name = "__driver-ctr-twofish-avx",
390 .cra_priority = 0,
391 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
392 .cra_blocksize = 1,
393 .cra_ctxsize = sizeof(struct twofish_ctx),
394 .cra_alignmask = 0,
395 .cra_type = &crypto_blkcipher_type,
396 .cra_module = THIS_MODULE,
397 .cra_u = {
398 .blkcipher = {
399 .min_keysize = TF_MIN_KEY_SIZE,
400 .max_keysize = TF_MAX_KEY_SIZE,
401 .ivsize = TF_BLOCK_SIZE,
402 .setkey = twofish_setkey,
403 .encrypt = ctr_crypt,
404 .decrypt = ctr_crypt,
407 }, {
408 .cra_name = "__lrw-twofish-avx",
409 .cra_driver_name = "__driver-lrw-twofish-avx",
410 .cra_priority = 0,
411 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
412 .cra_blocksize = TF_BLOCK_SIZE,
413 .cra_ctxsize = sizeof(struct twofish_lrw_ctx),
414 .cra_alignmask = 0,
415 .cra_type = &crypto_blkcipher_type,
416 .cra_module = THIS_MODULE,
417 .cra_exit = lrw_twofish_exit_tfm,
418 .cra_u = {
419 .blkcipher = {
420 .min_keysize = TF_MIN_KEY_SIZE +
421 TF_BLOCK_SIZE,
422 .max_keysize = TF_MAX_KEY_SIZE +
423 TF_BLOCK_SIZE,
424 .ivsize = TF_BLOCK_SIZE,
425 .setkey = lrw_twofish_setkey,
426 .encrypt = lrw_encrypt,
427 .decrypt = lrw_decrypt,
430 }, {
431 .cra_name = "__xts-twofish-avx",
432 .cra_driver_name = "__driver-xts-twofish-avx",
433 .cra_priority = 0,
434 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
435 .cra_blocksize = TF_BLOCK_SIZE,
436 .cra_ctxsize = sizeof(struct twofish_xts_ctx),
437 .cra_alignmask = 0,
438 .cra_type = &crypto_blkcipher_type,
439 .cra_module = THIS_MODULE,
440 .cra_u = {
441 .blkcipher = {
442 .min_keysize = TF_MIN_KEY_SIZE * 2,
443 .max_keysize = TF_MAX_KEY_SIZE * 2,
444 .ivsize = TF_BLOCK_SIZE,
445 .setkey = xts_twofish_setkey,
446 .encrypt = xts_encrypt,
447 .decrypt = xts_decrypt,
450 }, {
451 .cra_name = "ecb(twofish)",
452 .cra_driver_name = "ecb-twofish-avx",
453 .cra_priority = 400,
454 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
455 .cra_blocksize = TF_BLOCK_SIZE,
456 .cra_ctxsize = sizeof(struct async_helper_ctx),
457 .cra_alignmask = 0,
458 .cra_type = &crypto_ablkcipher_type,
459 .cra_module = THIS_MODULE,
460 .cra_init = ablk_init,
461 .cra_exit = ablk_exit,
462 .cra_u = {
463 .ablkcipher = {
464 .min_keysize = TF_MIN_KEY_SIZE,
465 .max_keysize = TF_MAX_KEY_SIZE,
466 .setkey = ablk_set_key,
467 .encrypt = ablk_encrypt,
468 .decrypt = ablk_decrypt,
471 }, {
472 .cra_name = "cbc(twofish)",
473 .cra_driver_name = "cbc-twofish-avx",
474 .cra_priority = 400,
475 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
476 .cra_blocksize = TF_BLOCK_SIZE,
477 .cra_ctxsize = sizeof(struct async_helper_ctx),
478 .cra_alignmask = 0,
479 .cra_type = &crypto_ablkcipher_type,
480 .cra_module = THIS_MODULE,
481 .cra_init = ablk_init,
482 .cra_exit = ablk_exit,
483 .cra_u = {
484 .ablkcipher = {
485 .min_keysize = TF_MIN_KEY_SIZE,
486 .max_keysize = TF_MAX_KEY_SIZE,
487 .ivsize = TF_BLOCK_SIZE,
488 .setkey = ablk_set_key,
489 .encrypt = __ablk_encrypt,
490 .decrypt = ablk_decrypt,
493 }, {
494 .cra_name = "ctr(twofish)",
495 .cra_driver_name = "ctr-twofish-avx",
496 .cra_priority = 400,
497 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
498 .cra_blocksize = 1,
499 .cra_ctxsize = sizeof(struct async_helper_ctx),
500 .cra_alignmask = 0,
501 .cra_type = &crypto_ablkcipher_type,
502 .cra_module = THIS_MODULE,
503 .cra_init = ablk_init,
504 .cra_exit = ablk_exit,
505 .cra_u = {
506 .ablkcipher = {
507 .min_keysize = TF_MIN_KEY_SIZE,
508 .max_keysize = TF_MAX_KEY_SIZE,
509 .ivsize = TF_BLOCK_SIZE,
510 .setkey = ablk_set_key,
511 .encrypt = ablk_encrypt,
512 .decrypt = ablk_encrypt,
513 .geniv = "chainiv",
516 }, {
517 .cra_name = "lrw(twofish)",
518 .cra_driver_name = "lrw-twofish-avx",
519 .cra_priority = 400,
520 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
521 .cra_blocksize = TF_BLOCK_SIZE,
522 .cra_ctxsize = sizeof(struct async_helper_ctx),
523 .cra_alignmask = 0,
524 .cra_type = &crypto_ablkcipher_type,
525 .cra_module = THIS_MODULE,
526 .cra_init = ablk_init,
527 .cra_exit = ablk_exit,
528 .cra_u = {
529 .ablkcipher = {
530 .min_keysize = TF_MIN_KEY_SIZE +
531 TF_BLOCK_SIZE,
532 .max_keysize = TF_MAX_KEY_SIZE +
533 TF_BLOCK_SIZE,
534 .ivsize = TF_BLOCK_SIZE,
535 .setkey = ablk_set_key,
536 .encrypt = ablk_encrypt,
537 .decrypt = ablk_decrypt,
540 }, {
541 .cra_name = "xts(twofish)",
542 .cra_driver_name = "xts-twofish-avx",
543 .cra_priority = 400,
544 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
545 .cra_blocksize = TF_BLOCK_SIZE,
546 .cra_ctxsize = sizeof(struct async_helper_ctx),
547 .cra_alignmask = 0,
548 .cra_type = &crypto_ablkcipher_type,
549 .cra_module = THIS_MODULE,
550 .cra_init = ablk_init,
551 .cra_exit = ablk_exit,
552 .cra_u = {
553 .ablkcipher = {
554 .min_keysize = TF_MIN_KEY_SIZE * 2,
555 .max_keysize = TF_MAX_KEY_SIZE * 2,
556 .ivsize = TF_BLOCK_SIZE,
557 .setkey = ablk_set_key,
558 .encrypt = ablk_encrypt,
559 .decrypt = ablk_decrypt,
562 } };
564 static int __init twofish_init(void)
566 u64 xcr0;
568 if (!cpu_has_avx || !cpu_has_osxsave) {
569 printk(KERN_INFO "AVX instructions are not detected.\n");
570 return -ENODEV;
573 xcr0 = xgetbv(XCR_XFEATURE_ENABLED_MASK);
574 if ((xcr0 & (XSTATE_SSE | XSTATE_YMM)) != (XSTATE_SSE | XSTATE_YMM)) {
575 printk(KERN_INFO "AVX detected but unusable.\n");
576 return -ENODEV;
579 return crypto_register_algs(twofish_algs, ARRAY_SIZE(twofish_algs));
582 static void __exit twofish_exit(void)
584 crypto_unregister_algs(twofish_algs, ARRAY_SIZE(twofish_algs));
587 module_init(twofish_init);
588 module_exit(twofish_exit);
590 MODULE_DESCRIPTION("Twofish Cipher Algorithm, AVX optimized");
591 MODULE_LICENSE("GPL");
592 MODULE_ALIAS("twofish");