2 * Glue Code for AVX assembler version of Twofish Cipher
4 * Copyright (C) 2012 Johannes Goetzfried
5 * <Johannes.Goetzfried@informatik.stud.uni-erlangen.de>
7 * Copyright © 2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
9 * This program is free software; you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License as published by
11 * the Free Software Foundation; either version 2 of the License, or
12 * (at your option) any later version.
14 * This program is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 * GNU General Public License for more details.
19 * You should have received a copy of the GNU General Public License
20 * along with this program; if not, write to the Free Software
21 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
26 #include <linux/module.h>
27 #include <linux/hardirq.h>
28 #include <linux/types.h>
29 #include <linux/crypto.h>
30 #include <linux/err.h>
31 #include <crypto/algapi.h>
32 #include <crypto/twofish.h>
33 #include <crypto/cryptd.h>
34 #include <crypto/b128ops.h>
35 #include <crypto/ctr.h>
36 #include <crypto/lrw.h>
37 #include <crypto/xts.h>
40 #include <asm/xsave.h>
41 #include <asm/crypto/twofish.h>
42 #include <asm/crypto/ablk_helper.h>
43 #include <asm/crypto/glue_helper.h>
44 #include <crypto/scatterwalk.h>
45 #include <linux/workqueue.h>
46 #include <linux/spinlock.h>
48 #define TWOFISH_PARALLEL_BLOCKS 8
50 /* 8-way parallel cipher functions */
51 asmlinkage
void twofish_ecb_enc_8way(struct twofish_ctx
*ctx
, u8
*dst
,
53 asmlinkage
void twofish_ecb_dec_8way(struct twofish_ctx
*ctx
, u8
*dst
,
56 asmlinkage
void twofish_cbc_dec_8way(struct twofish_ctx
*ctx
, u8
*dst
,
58 asmlinkage
void twofish_ctr_8way(struct twofish_ctx
*ctx
, u8
*dst
,
59 const u8
*src
, le128
*iv
);
61 asmlinkage
void twofish_xts_enc_8way(struct twofish_ctx
*ctx
, u8
*dst
,
62 const u8
*src
, le128
*iv
);
63 asmlinkage
void twofish_xts_dec_8way(struct twofish_ctx
*ctx
, u8
*dst
,
64 const u8
*src
, le128
*iv
);
66 static inline void twofish_enc_blk_3way(struct twofish_ctx
*ctx
, u8
*dst
,
69 __twofish_enc_blk_3way(ctx
, dst
, src
, false);
72 static void twofish_xts_enc(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
74 glue_xts_crypt_128bit_one(ctx
, dst
, src
, iv
,
75 GLUE_FUNC_CAST(twofish_enc_blk
));
78 static void twofish_xts_dec(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
80 glue_xts_crypt_128bit_one(ctx
, dst
, src
, iv
,
81 GLUE_FUNC_CAST(twofish_dec_blk
));
85 static const struct common_glue_ctx twofish_enc
= {
87 .fpu_blocks_limit
= TWOFISH_PARALLEL_BLOCKS
,
90 .num_blocks
= TWOFISH_PARALLEL_BLOCKS
,
91 .fn_u
= { .ecb
= GLUE_FUNC_CAST(twofish_ecb_enc_8way
) }
94 .fn_u
= { .ecb
= GLUE_FUNC_CAST(twofish_enc_blk_3way
) }
97 .fn_u
= { .ecb
= GLUE_FUNC_CAST(twofish_enc_blk
) }
101 static const struct common_glue_ctx twofish_ctr
= {
103 .fpu_blocks_limit
= TWOFISH_PARALLEL_BLOCKS
,
106 .num_blocks
= TWOFISH_PARALLEL_BLOCKS
,
107 .fn_u
= { .ctr
= GLUE_CTR_FUNC_CAST(twofish_ctr_8way
) }
110 .fn_u
= { .ctr
= GLUE_CTR_FUNC_CAST(twofish_enc_blk_ctr_3way
) }
113 .fn_u
= { .ctr
= GLUE_CTR_FUNC_CAST(twofish_enc_blk_ctr
) }
117 static const struct common_glue_ctx twofish_enc_xts
= {
119 .fpu_blocks_limit
= TWOFISH_PARALLEL_BLOCKS
,
122 .num_blocks
= TWOFISH_PARALLEL_BLOCKS
,
123 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(twofish_xts_enc_8way
) }
126 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(twofish_xts_enc
) }
130 static const struct common_glue_ctx twofish_dec
= {
132 .fpu_blocks_limit
= TWOFISH_PARALLEL_BLOCKS
,
135 .num_blocks
= TWOFISH_PARALLEL_BLOCKS
,
136 .fn_u
= { .ecb
= GLUE_FUNC_CAST(twofish_ecb_dec_8way
) }
139 .fn_u
= { .ecb
= GLUE_FUNC_CAST(twofish_dec_blk_3way
) }
142 .fn_u
= { .ecb
= GLUE_FUNC_CAST(twofish_dec_blk
) }
146 static const struct common_glue_ctx twofish_dec_cbc
= {
148 .fpu_blocks_limit
= TWOFISH_PARALLEL_BLOCKS
,
151 .num_blocks
= TWOFISH_PARALLEL_BLOCKS
,
152 .fn_u
= { .cbc
= GLUE_CBC_FUNC_CAST(twofish_cbc_dec_8way
) }
155 .fn_u
= { .cbc
= GLUE_CBC_FUNC_CAST(twofish_dec_blk_cbc_3way
) }
158 .fn_u
= { .cbc
= GLUE_CBC_FUNC_CAST(twofish_dec_blk
) }
162 static const struct common_glue_ctx twofish_dec_xts
= {
164 .fpu_blocks_limit
= TWOFISH_PARALLEL_BLOCKS
,
167 .num_blocks
= TWOFISH_PARALLEL_BLOCKS
,
168 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(twofish_xts_dec_8way
) }
171 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(twofish_xts_dec
) }
175 static int ecb_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
176 struct scatterlist
*src
, unsigned int nbytes
)
178 return glue_ecb_crypt_128bit(&twofish_enc
, desc
, dst
, src
, nbytes
);
181 static int ecb_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
182 struct scatterlist
*src
, unsigned int nbytes
)
184 return glue_ecb_crypt_128bit(&twofish_dec
, desc
, dst
, src
, nbytes
);
187 static int cbc_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
188 struct scatterlist
*src
, unsigned int nbytes
)
190 return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(twofish_enc_blk
), desc
,
194 static int cbc_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
195 struct scatterlist
*src
, unsigned int nbytes
)
197 return glue_cbc_decrypt_128bit(&twofish_dec_cbc
, desc
, dst
, src
,
201 static int ctr_crypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
202 struct scatterlist
*src
, unsigned int nbytes
)
204 return glue_ctr_crypt_128bit(&twofish_ctr
, desc
, dst
, src
, nbytes
);
207 static inline bool twofish_fpu_begin(bool fpu_enabled
, unsigned int nbytes
)
209 return glue_fpu_begin(TF_BLOCK_SIZE
, TWOFISH_PARALLEL_BLOCKS
, NULL
,
210 fpu_enabled
, nbytes
);
213 static inline void twofish_fpu_end(bool fpu_enabled
)
215 glue_fpu_end(fpu_enabled
);
219 struct twofish_ctx
*ctx
;
223 static void encrypt_callback(void *priv
, u8
*srcdst
, unsigned int nbytes
)
225 const unsigned int bsize
= TF_BLOCK_SIZE
;
226 struct crypt_priv
*ctx
= priv
;
229 ctx
->fpu_enabled
= twofish_fpu_begin(ctx
->fpu_enabled
, nbytes
);
231 if (nbytes
== bsize
* TWOFISH_PARALLEL_BLOCKS
) {
232 twofish_ecb_enc_8way(ctx
->ctx
, srcdst
, srcdst
);
236 for (i
= 0; i
< nbytes
/ (bsize
* 3); i
++, srcdst
+= bsize
* 3)
237 twofish_enc_blk_3way(ctx
->ctx
, srcdst
, srcdst
);
241 for (i
= 0; i
< nbytes
/ bsize
; i
++, srcdst
+= bsize
)
242 twofish_enc_blk(ctx
->ctx
, srcdst
, srcdst
);
245 static void decrypt_callback(void *priv
, u8
*srcdst
, unsigned int nbytes
)
247 const unsigned int bsize
= TF_BLOCK_SIZE
;
248 struct crypt_priv
*ctx
= priv
;
251 ctx
->fpu_enabled
= twofish_fpu_begin(ctx
->fpu_enabled
, nbytes
);
253 if (nbytes
== bsize
* TWOFISH_PARALLEL_BLOCKS
) {
254 twofish_ecb_dec_8way(ctx
->ctx
, srcdst
, srcdst
);
258 for (i
= 0; i
< nbytes
/ (bsize
* 3); i
++, srcdst
+= bsize
* 3)
259 twofish_dec_blk_3way(ctx
->ctx
, srcdst
, srcdst
);
263 for (i
= 0; i
< nbytes
/ bsize
; i
++, srcdst
+= bsize
)
264 twofish_dec_blk(ctx
->ctx
, srcdst
, srcdst
);
267 static int lrw_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
268 struct scatterlist
*src
, unsigned int nbytes
)
270 struct twofish_lrw_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
271 be128 buf
[TWOFISH_PARALLEL_BLOCKS
];
272 struct crypt_priv crypt_ctx
= {
273 .ctx
= &ctx
->twofish_ctx
,
274 .fpu_enabled
= false,
276 struct lrw_crypt_req req
= {
278 .tbuflen
= sizeof(buf
),
280 .table_ctx
= &ctx
->lrw_table
,
281 .crypt_ctx
= &crypt_ctx
,
282 .crypt_fn
= encrypt_callback
,
286 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
287 ret
= lrw_crypt(desc
, dst
, src
, nbytes
, &req
);
288 twofish_fpu_end(crypt_ctx
.fpu_enabled
);
293 static int lrw_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
294 struct scatterlist
*src
, unsigned int nbytes
)
296 struct twofish_lrw_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
297 be128 buf
[TWOFISH_PARALLEL_BLOCKS
];
298 struct crypt_priv crypt_ctx
= {
299 .ctx
= &ctx
->twofish_ctx
,
300 .fpu_enabled
= false,
302 struct lrw_crypt_req req
= {
304 .tbuflen
= sizeof(buf
),
306 .table_ctx
= &ctx
->lrw_table
,
307 .crypt_ctx
= &crypt_ctx
,
308 .crypt_fn
= decrypt_callback
,
312 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
313 ret
= lrw_crypt(desc
, dst
, src
, nbytes
, &req
);
314 twofish_fpu_end(crypt_ctx
.fpu_enabled
);
319 static int xts_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
320 struct scatterlist
*src
, unsigned int nbytes
)
322 struct twofish_xts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
324 return glue_xts_crypt_128bit(&twofish_enc_xts
, desc
, dst
, src
, nbytes
,
325 XTS_TWEAK_CAST(twofish_enc_blk
),
326 &ctx
->tweak_ctx
, &ctx
->crypt_ctx
);
329 static int xts_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
330 struct scatterlist
*src
, unsigned int nbytes
)
332 struct twofish_xts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
334 return glue_xts_crypt_128bit(&twofish_dec_xts
, desc
, dst
, src
, nbytes
,
335 XTS_TWEAK_CAST(twofish_enc_blk
),
336 &ctx
->tweak_ctx
, &ctx
->crypt_ctx
);
339 static struct crypto_alg twofish_algs
[10] = { {
340 .cra_name
= "__ecb-twofish-avx",
341 .cra_driver_name
= "__driver-ecb-twofish-avx",
343 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
344 .cra_blocksize
= TF_BLOCK_SIZE
,
345 .cra_ctxsize
= sizeof(struct twofish_ctx
),
347 .cra_type
= &crypto_blkcipher_type
,
348 .cra_module
= THIS_MODULE
,
351 .min_keysize
= TF_MIN_KEY_SIZE
,
352 .max_keysize
= TF_MAX_KEY_SIZE
,
353 .setkey
= twofish_setkey
,
354 .encrypt
= ecb_encrypt
,
355 .decrypt
= ecb_decrypt
,
359 .cra_name
= "__cbc-twofish-avx",
360 .cra_driver_name
= "__driver-cbc-twofish-avx",
362 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
363 .cra_blocksize
= TF_BLOCK_SIZE
,
364 .cra_ctxsize
= sizeof(struct twofish_ctx
),
366 .cra_type
= &crypto_blkcipher_type
,
367 .cra_module
= THIS_MODULE
,
370 .min_keysize
= TF_MIN_KEY_SIZE
,
371 .max_keysize
= TF_MAX_KEY_SIZE
,
372 .setkey
= twofish_setkey
,
373 .encrypt
= cbc_encrypt
,
374 .decrypt
= cbc_decrypt
,
378 .cra_name
= "__ctr-twofish-avx",
379 .cra_driver_name
= "__driver-ctr-twofish-avx",
381 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
383 .cra_ctxsize
= sizeof(struct twofish_ctx
),
385 .cra_type
= &crypto_blkcipher_type
,
386 .cra_module
= THIS_MODULE
,
389 .min_keysize
= TF_MIN_KEY_SIZE
,
390 .max_keysize
= TF_MAX_KEY_SIZE
,
391 .ivsize
= TF_BLOCK_SIZE
,
392 .setkey
= twofish_setkey
,
393 .encrypt
= ctr_crypt
,
394 .decrypt
= ctr_crypt
,
398 .cra_name
= "__lrw-twofish-avx",
399 .cra_driver_name
= "__driver-lrw-twofish-avx",
401 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
402 .cra_blocksize
= TF_BLOCK_SIZE
,
403 .cra_ctxsize
= sizeof(struct twofish_lrw_ctx
),
405 .cra_type
= &crypto_blkcipher_type
,
406 .cra_module
= THIS_MODULE
,
407 .cra_exit
= lrw_twofish_exit_tfm
,
410 .min_keysize
= TF_MIN_KEY_SIZE
+
412 .max_keysize
= TF_MAX_KEY_SIZE
+
414 .ivsize
= TF_BLOCK_SIZE
,
415 .setkey
= lrw_twofish_setkey
,
416 .encrypt
= lrw_encrypt
,
417 .decrypt
= lrw_decrypt
,
421 .cra_name
= "__xts-twofish-avx",
422 .cra_driver_name
= "__driver-xts-twofish-avx",
424 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
425 .cra_blocksize
= TF_BLOCK_SIZE
,
426 .cra_ctxsize
= sizeof(struct twofish_xts_ctx
),
428 .cra_type
= &crypto_blkcipher_type
,
429 .cra_module
= THIS_MODULE
,
432 .min_keysize
= TF_MIN_KEY_SIZE
* 2,
433 .max_keysize
= TF_MAX_KEY_SIZE
* 2,
434 .ivsize
= TF_BLOCK_SIZE
,
435 .setkey
= xts_twofish_setkey
,
436 .encrypt
= xts_encrypt
,
437 .decrypt
= xts_decrypt
,
441 .cra_name
= "ecb(twofish)",
442 .cra_driver_name
= "ecb-twofish-avx",
444 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
445 .cra_blocksize
= TF_BLOCK_SIZE
,
446 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
448 .cra_type
= &crypto_ablkcipher_type
,
449 .cra_module
= THIS_MODULE
,
450 .cra_init
= ablk_init
,
451 .cra_exit
= ablk_exit
,
454 .min_keysize
= TF_MIN_KEY_SIZE
,
455 .max_keysize
= TF_MAX_KEY_SIZE
,
456 .setkey
= ablk_set_key
,
457 .encrypt
= ablk_encrypt
,
458 .decrypt
= ablk_decrypt
,
462 .cra_name
= "cbc(twofish)",
463 .cra_driver_name
= "cbc-twofish-avx",
465 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
466 .cra_blocksize
= TF_BLOCK_SIZE
,
467 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
469 .cra_type
= &crypto_ablkcipher_type
,
470 .cra_module
= THIS_MODULE
,
471 .cra_init
= ablk_init
,
472 .cra_exit
= ablk_exit
,
475 .min_keysize
= TF_MIN_KEY_SIZE
,
476 .max_keysize
= TF_MAX_KEY_SIZE
,
477 .ivsize
= TF_BLOCK_SIZE
,
478 .setkey
= ablk_set_key
,
479 .encrypt
= __ablk_encrypt
,
480 .decrypt
= ablk_decrypt
,
484 .cra_name
= "ctr(twofish)",
485 .cra_driver_name
= "ctr-twofish-avx",
487 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
489 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
491 .cra_type
= &crypto_ablkcipher_type
,
492 .cra_module
= THIS_MODULE
,
493 .cra_init
= ablk_init
,
494 .cra_exit
= ablk_exit
,
497 .min_keysize
= TF_MIN_KEY_SIZE
,
498 .max_keysize
= TF_MAX_KEY_SIZE
,
499 .ivsize
= TF_BLOCK_SIZE
,
500 .setkey
= ablk_set_key
,
501 .encrypt
= ablk_encrypt
,
502 .decrypt
= ablk_encrypt
,
507 .cra_name
= "lrw(twofish)",
508 .cra_driver_name
= "lrw-twofish-avx",
510 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
511 .cra_blocksize
= TF_BLOCK_SIZE
,
512 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
514 .cra_type
= &crypto_ablkcipher_type
,
515 .cra_module
= THIS_MODULE
,
516 .cra_init
= ablk_init
,
517 .cra_exit
= ablk_exit
,
520 .min_keysize
= TF_MIN_KEY_SIZE
+
522 .max_keysize
= TF_MAX_KEY_SIZE
+
524 .ivsize
= TF_BLOCK_SIZE
,
525 .setkey
= ablk_set_key
,
526 .encrypt
= ablk_encrypt
,
527 .decrypt
= ablk_decrypt
,
531 .cra_name
= "xts(twofish)",
532 .cra_driver_name
= "xts-twofish-avx",
534 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
535 .cra_blocksize
= TF_BLOCK_SIZE
,
536 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
538 .cra_type
= &crypto_ablkcipher_type
,
539 .cra_module
= THIS_MODULE
,
540 .cra_init
= ablk_init
,
541 .cra_exit
= ablk_exit
,
544 .min_keysize
= TF_MIN_KEY_SIZE
* 2,
545 .max_keysize
= TF_MAX_KEY_SIZE
* 2,
546 .ivsize
= TF_BLOCK_SIZE
,
547 .setkey
= ablk_set_key
,
548 .encrypt
= ablk_encrypt
,
549 .decrypt
= ablk_decrypt
,
554 static int __init
twofish_init(void)
558 if (!cpu_has_avx
|| !cpu_has_osxsave
) {
559 printk(KERN_INFO
"AVX instructions are not detected.\n");
563 xcr0
= xgetbv(XCR_XFEATURE_ENABLED_MASK
);
564 if ((xcr0
& (XSTATE_SSE
| XSTATE_YMM
)) != (XSTATE_SSE
| XSTATE_YMM
)) {
565 printk(KERN_INFO
"AVX detected but unusable.\n");
569 return crypto_register_algs(twofish_algs
, ARRAY_SIZE(twofish_algs
));
572 static void __exit
twofish_exit(void)
574 crypto_unregister_algs(twofish_algs
, ARRAY_SIZE(twofish_algs
));
577 module_init(twofish_init
);
578 module_exit(twofish_exit
);
580 MODULE_DESCRIPTION("Twofish Cipher Algorithm, AVX optimized");
581 MODULE_LICENSE("GPL");
582 MODULE_ALIAS("twofish");