x86/efi: Enforce CONFIG_RELOCATABLE for EFI boot stub
[linux/fpc-iii.git] / arch / x86 / crypto / twofish_avx_glue.c
bloba62ba541884ef1a15da1082d9d2ca48296c563ec
1 /*
2 * Glue Code for AVX assembler version of Twofish Cipher
4 * Copyright (C) 2012 Johannes Goetzfried
5 * <Johannes.Goetzfried@informatik.stud.uni-erlangen.de>
7 * Copyright © 2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
9 * This program is free software; you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License as published by
11 * the Free Software Foundation; either version 2 of the License, or
12 * (at your option) any later version.
14 * This program is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 * GNU General Public License for more details.
19 * You should have received a copy of the GNU General Public License
20 * along with this program; if not, write to the Free Software
21 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
22 * USA
26 #include <linux/module.h>
27 #include <linux/hardirq.h>
28 #include <linux/types.h>
29 #include <linux/crypto.h>
30 #include <linux/err.h>
31 #include <crypto/algapi.h>
32 #include <crypto/twofish.h>
33 #include <crypto/cryptd.h>
34 #include <crypto/b128ops.h>
35 #include <crypto/ctr.h>
36 #include <crypto/lrw.h>
37 #include <crypto/xts.h>
38 #include <asm/i387.h>
39 #include <asm/xcr.h>
40 #include <asm/xsave.h>
41 #include <asm/crypto/twofish.h>
42 #include <asm/crypto/ablk_helper.h>
43 #include <asm/crypto/glue_helper.h>
44 #include <crypto/scatterwalk.h>
45 #include <linux/workqueue.h>
46 #include <linux/spinlock.h>
48 #define TWOFISH_PARALLEL_BLOCKS 8
50 /* 8-way parallel cipher functions */
51 asmlinkage void twofish_ecb_enc_8way(struct twofish_ctx *ctx, u8 *dst,
52 const u8 *src);
53 asmlinkage void twofish_ecb_dec_8way(struct twofish_ctx *ctx, u8 *dst,
54 const u8 *src);
56 asmlinkage void twofish_cbc_dec_8way(struct twofish_ctx *ctx, u8 *dst,
57 const u8 *src);
58 asmlinkage void twofish_ctr_8way(struct twofish_ctx *ctx, u8 *dst,
59 const u8 *src, le128 *iv);
61 asmlinkage void twofish_xts_enc_8way(struct twofish_ctx *ctx, u8 *dst,
62 const u8 *src, le128 *iv);
63 asmlinkage void twofish_xts_dec_8way(struct twofish_ctx *ctx, u8 *dst,
64 const u8 *src, le128 *iv);
66 static inline void twofish_enc_blk_3way(struct twofish_ctx *ctx, u8 *dst,
67 const u8 *src)
69 __twofish_enc_blk_3way(ctx, dst, src, false);
72 static void twofish_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
74 glue_xts_crypt_128bit_one(ctx, dst, src, iv,
75 GLUE_FUNC_CAST(twofish_enc_blk));
78 static void twofish_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
80 glue_xts_crypt_128bit_one(ctx, dst, src, iv,
81 GLUE_FUNC_CAST(twofish_dec_blk));
85 static const struct common_glue_ctx twofish_enc = {
86 .num_funcs = 3,
87 .fpu_blocks_limit = TWOFISH_PARALLEL_BLOCKS,
89 .funcs = { {
90 .num_blocks = TWOFISH_PARALLEL_BLOCKS,
91 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_ecb_enc_8way) }
92 }, {
93 .num_blocks = 3,
94 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_enc_blk_3way) }
95 }, {
96 .num_blocks = 1,
97 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_enc_blk) }
98 } }
101 static const struct common_glue_ctx twofish_ctr = {
102 .num_funcs = 3,
103 .fpu_blocks_limit = TWOFISH_PARALLEL_BLOCKS,
105 .funcs = { {
106 .num_blocks = TWOFISH_PARALLEL_BLOCKS,
107 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(twofish_ctr_8way) }
108 }, {
109 .num_blocks = 3,
110 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(twofish_enc_blk_ctr_3way) }
111 }, {
112 .num_blocks = 1,
113 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(twofish_enc_blk_ctr) }
117 static const struct common_glue_ctx twofish_enc_xts = {
118 .num_funcs = 2,
119 .fpu_blocks_limit = TWOFISH_PARALLEL_BLOCKS,
121 .funcs = { {
122 .num_blocks = TWOFISH_PARALLEL_BLOCKS,
123 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(twofish_xts_enc_8way) }
124 }, {
125 .num_blocks = 1,
126 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(twofish_xts_enc) }
130 static const struct common_glue_ctx twofish_dec = {
131 .num_funcs = 3,
132 .fpu_blocks_limit = TWOFISH_PARALLEL_BLOCKS,
134 .funcs = { {
135 .num_blocks = TWOFISH_PARALLEL_BLOCKS,
136 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_ecb_dec_8way) }
137 }, {
138 .num_blocks = 3,
139 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_dec_blk_3way) }
140 }, {
141 .num_blocks = 1,
142 .fn_u = { .ecb = GLUE_FUNC_CAST(twofish_dec_blk) }
146 static const struct common_glue_ctx twofish_dec_cbc = {
147 .num_funcs = 3,
148 .fpu_blocks_limit = TWOFISH_PARALLEL_BLOCKS,
150 .funcs = { {
151 .num_blocks = TWOFISH_PARALLEL_BLOCKS,
152 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(twofish_cbc_dec_8way) }
153 }, {
154 .num_blocks = 3,
155 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(twofish_dec_blk_cbc_3way) }
156 }, {
157 .num_blocks = 1,
158 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(twofish_dec_blk) }
162 static const struct common_glue_ctx twofish_dec_xts = {
163 .num_funcs = 2,
164 .fpu_blocks_limit = TWOFISH_PARALLEL_BLOCKS,
166 .funcs = { {
167 .num_blocks = TWOFISH_PARALLEL_BLOCKS,
168 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(twofish_xts_dec_8way) }
169 }, {
170 .num_blocks = 1,
171 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(twofish_xts_dec) }
175 static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
176 struct scatterlist *src, unsigned int nbytes)
178 return glue_ecb_crypt_128bit(&twofish_enc, desc, dst, src, nbytes);
181 static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
182 struct scatterlist *src, unsigned int nbytes)
184 return glue_ecb_crypt_128bit(&twofish_dec, desc, dst, src, nbytes);
187 static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
188 struct scatterlist *src, unsigned int nbytes)
190 return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(twofish_enc_blk), desc,
191 dst, src, nbytes);
194 static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
195 struct scatterlist *src, unsigned int nbytes)
197 return glue_cbc_decrypt_128bit(&twofish_dec_cbc, desc, dst, src,
198 nbytes);
201 static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
202 struct scatterlist *src, unsigned int nbytes)
204 return glue_ctr_crypt_128bit(&twofish_ctr, desc, dst, src, nbytes);
207 static inline bool twofish_fpu_begin(bool fpu_enabled, unsigned int nbytes)
209 return glue_fpu_begin(TF_BLOCK_SIZE, TWOFISH_PARALLEL_BLOCKS, NULL,
210 fpu_enabled, nbytes);
213 static inline void twofish_fpu_end(bool fpu_enabled)
215 glue_fpu_end(fpu_enabled);
218 struct crypt_priv {
219 struct twofish_ctx *ctx;
220 bool fpu_enabled;
223 static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
225 const unsigned int bsize = TF_BLOCK_SIZE;
226 struct crypt_priv *ctx = priv;
227 int i;
229 ctx->fpu_enabled = twofish_fpu_begin(ctx->fpu_enabled, nbytes);
231 if (nbytes == bsize * TWOFISH_PARALLEL_BLOCKS) {
232 twofish_ecb_enc_8way(ctx->ctx, srcdst, srcdst);
233 return;
236 for (i = 0; i < nbytes / (bsize * 3); i++, srcdst += bsize * 3)
237 twofish_enc_blk_3way(ctx->ctx, srcdst, srcdst);
239 nbytes %= bsize * 3;
241 for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
242 twofish_enc_blk(ctx->ctx, srcdst, srcdst);
245 static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
247 const unsigned int bsize = TF_BLOCK_SIZE;
248 struct crypt_priv *ctx = priv;
249 int i;
251 ctx->fpu_enabled = twofish_fpu_begin(ctx->fpu_enabled, nbytes);
253 if (nbytes == bsize * TWOFISH_PARALLEL_BLOCKS) {
254 twofish_ecb_dec_8way(ctx->ctx, srcdst, srcdst);
255 return;
258 for (i = 0; i < nbytes / (bsize * 3); i++, srcdst += bsize * 3)
259 twofish_dec_blk_3way(ctx->ctx, srcdst, srcdst);
261 nbytes %= bsize * 3;
263 for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
264 twofish_dec_blk(ctx->ctx, srcdst, srcdst);
267 static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
268 struct scatterlist *src, unsigned int nbytes)
270 struct twofish_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
271 be128 buf[TWOFISH_PARALLEL_BLOCKS];
272 struct crypt_priv crypt_ctx = {
273 .ctx = &ctx->twofish_ctx,
274 .fpu_enabled = false,
276 struct lrw_crypt_req req = {
277 .tbuf = buf,
278 .tbuflen = sizeof(buf),
280 .table_ctx = &ctx->lrw_table,
281 .crypt_ctx = &crypt_ctx,
282 .crypt_fn = encrypt_callback,
284 int ret;
286 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
287 ret = lrw_crypt(desc, dst, src, nbytes, &req);
288 twofish_fpu_end(crypt_ctx.fpu_enabled);
290 return ret;
293 static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
294 struct scatterlist *src, unsigned int nbytes)
296 struct twofish_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
297 be128 buf[TWOFISH_PARALLEL_BLOCKS];
298 struct crypt_priv crypt_ctx = {
299 .ctx = &ctx->twofish_ctx,
300 .fpu_enabled = false,
302 struct lrw_crypt_req req = {
303 .tbuf = buf,
304 .tbuflen = sizeof(buf),
306 .table_ctx = &ctx->lrw_table,
307 .crypt_ctx = &crypt_ctx,
308 .crypt_fn = decrypt_callback,
310 int ret;
312 desc->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP;
313 ret = lrw_crypt(desc, dst, src, nbytes, &req);
314 twofish_fpu_end(crypt_ctx.fpu_enabled);
316 return ret;
319 static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
320 struct scatterlist *src, unsigned int nbytes)
322 struct twofish_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
324 return glue_xts_crypt_128bit(&twofish_enc_xts, desc, dst, src, nbytes,
325 XTS_TWEAK_CAST(twofish_enc_blk),
326 &ctx->tweak_ctx, &ctx->crypt_ctx);
329 static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
330 struct scatterlist *src, unsigned int nbytes)
332 struct twofish_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
334 return glue_xts_crypt_128bit(&twofish_dec_xts, desc, dst, src, nbytes,
335 XTS_TWEAK_CAST(twofish_enc_blk),
336 &ctx->tweak_ctx, &ctx->crypt_ctx);
339 static struct crypto_alg twofish_algs[10] = { {
340 .cra_name = "__ecb-twofish-avx",
341 .cra_driver_name = "__driver-ecb-twofish-avx",
342 .cra_priority = 0,
343 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
344 .cra_blocksize = TF_BLOCK_SIZE,
345 .cra_ctxsize = sizeof(struct twofish_ctx),
346 .cra_alignmask = 0,
347 .cra_type = &crypto_blkcipher_type,
348 .cra_module = THIS_MODULE,
349 .cra_u = {
350 .blkcipher = {
351 .min_keysize = TF_MIN_KEY_SIZE,
352 .max_keysize = TF_MAX_KEY_SIZE,
353 .setkey = twofish_setkey,
354 .encrypt = ecb_encrypt,
355 .decrypt = ecb_decrypt,
358 }, {
359 .cra_name = "__cbc-twofish-avx",
360 .cra_driver_name = "__driver-cbc-twofish-avx",
361 .cra_priority = 0,
362 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
363 .cra_blocksize = TF_BLOCK_SIZE,
364 .cra_ctxsize = sizeof(struct twofish_ctx),
365 .cra_alignmask = 0,
366 .cra_type = &crypto_blkcipher_type,
367 .cra_module = THIS_MODULE,
368 .cra_u = {
369 .blkcipher = {
370 .min_keysize = TF_MIN_KEY_SIZE,
371 .max_keysize = TF_MAX_KEY_SIZE,
372 .setkey = twofish_setkey,
373 .encrypt = cbc_encrypt,
374 .decrypt = cbc_decrypt,
377 }, {
378 .cra_name = "__ctr-twofish-avx",
379 .cra_driver_name = "__driver-ctr-twofish-avx",
380 .cra_priority = 0,
381 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
382 .cra_blocksize = 1,
383 .cra_ctxsize = sizeof(struct twofish_ctx),
384 .cra_alignmask = 0,
385 .cra_type = &crypto_blkcipher_type,
386 .cra_module = THIS_MODULE,
387 .cra_u = {
388 .blkcipher = {
389 .min_keysize = TF_MIN_KEY_SIZE,
390 .max_keysize = TF_MAX_KEY_SIZE,
391 .ivsize = TF_BLOCK_SIZE,
392 .setkey = twofish_setkey,
393 .encrypt = ctr_crypt,
394 .decrypt = ctr_crypt,
397 }, {
398 .cra_name = "__lrw-twofish-avx",
399 .cra_driver_name = "__driver-lrw-twofish-avx",
400 .cra_priority = 0,
401 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
402 .cra_blocksize = TF_BLOCK_SIZE,
403 .cra_ctxsize = sizeof(struct twofish_lrw_ctx),
404 .cra_alignmask = 0,
405 .cra_type = &crypto_blkcipher_type,
406 .cra_module = THIS_MODULE,
407 .cra_exit = lrw_twofish_exit_tfm,
408 .cra_u = {
409 .blkcipher = {
410 .min_keysize = TF_MIN_KEY_SIZE +
411 TF_BLOCK_SIZE,
412 .max_keysize = TF_MAX_KEY_SIZE +
413 TF_BLOCK_SIZE,
414 .ivsize = TF_BLOCK_SIZE,
415 .setkey = lrw_twofish_setkey,
416 .encrypt = lrw_encrypt,
417 .decrypt = lrw_decrypt,
420 }, {
421 .cra_name = "__xts-twofish-avx",
422 .cra_driver_name = "__driver-xts-twofish-avx",
423 .cra_priority = 0,
424 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
425 .cra_blocksize = TF_BLOCK_SIZE,
426 .cra_ctxsize = sizeof(struct twofish_xts_ctx),
427 .cra_alignmask = 0,
428 .cra_type = &crypto_blkcipher_type,
429 .cra_module = THIS_MODULE,
430 .cra_u = {
431 .blkcipher = {
432 .min_keysize = TF_MIN_KEY_SIZE * 2,
433 .max_keysize = TF_MAX_KEY_SIZE * 2,
434 .ivsize = TF_BLOCK_SIZE,
435 .setkey = xts_twofish_setkey,
436 .encrypt = xts_encrypt,
437 .decrypt = xts_decrypt,
440 }, {
441 .cra_name = "ecb(twofish)",
442 .cra_driver_name = "ecb-twofish-avx",
443 .cra_priority = 400,
444 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
445 .cra_blocksize = TF_BLOCK_SIZE,
446 .cra_ctxsize = sizeof(struct async_helper_ctx),
447 .cra_alignmask = 0,
448 .cra_type = &crypto_ablkcipher_type,
449 .cra_module = THIS_MODULE,
450 .cra_init = ablk_init,
451 .cra_exit = ablk_exit,
452 .cra_u = {
453 .ablkcipher = {
454 .min_keysize = TF_MIN_KEY_SIZE,
455 .max_keysize = TF_MAX_KEY_SIZE,
456 .setkey = ablk_set_key,
457 .encrypt = ablk_encrypt,
458 .decrypt = ablk_decrypt,
461 }, {
462 .cra_name = "cbc(twofish)",
463 .cra_driver_name = "cbc-twofish-avx",
464 .cra_priority = 400,
465 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
466 .cra_blocksize = TF_BLOCK_SIZE,
467 .cra_ctxsize = sizeof(struct async_helper_ctx),
468 .cra_alignmask = 0,
469 .cra_type = &crypto_ablkcipher_type,
470 .cra_module = THIS_MODULE,
471 .cra_init = ablk_init,
472 .cra_exit = ablk_exit,
473 .cra_u = {
474 .ablkcipher = {
475 .min_keysize = TF_MIN_KEY_SIZE,
476 .max_keysize = TF_MAX_KEY_SIZE,
477 .ivsize = TF_BLOCK_SIZE,
478 .setkey = ablk_set_key,
479 .encrypt = __ablk_encrypt,
480 .decrypt = ablk_decrypt,
483 }, {
484 .cra_name = "ctr(twofish)",
485 .cra_driver_name = "ctr-twofish-avx",
486 .cra_priority = 400,
487 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
488 .cra_blocksize = 1,
489 .cra_ctxsize = sizeof(struct async_helper_ctx),
490 .cra_alignmask = 0,
491 .cra_type = &crypto_ablkcipher_type,
492 .cra_module = THIS_MODULE,
493 .cra_init = ablk_init,
494 .cra_exit = ablk_exit,
495 .cra_u = {
496 .ablkcipher = {
497 .min_keysize = TF_MIN_KEY_SIZE,
498 .max_keysize = TF_MAX_KEY_SIZE,
499 .ivsize = TF_BLOCK_SIZE,
500 .setkey = ablk_set_key,
501 .encrypt = ablk_encrypt,
502 .decrypt = ablk_encrypt,
503 .geniv = "chainiv",
506 }, {
507 .cra_name = "lrw(twofish)",
508 .cra_driver_name = "lrw-twofish-avx",
509 .cra_priority = 400,
510 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
511 .cra_blocksize = TF_BLOCK_SIZE,
512 .cra_ctxsize = sizeof(struct async_helper_ctx),
513 .cra_alignmask = 0,
514 .cra_type = &crypto_ablkcipher_type,
515 .cra_module = THIS_MODULE,
516 .cra_init = ablk_init,
517 .cra_exit = ablk_exit,
518 .cra_u = {
519 .ablkcipher = {
520 .min_keysize = TF_MIN_KEY_SIZE +
521 TF_BLOCK_SIZE,
522 .max_keysize = TF_MAX_KEY_SIZE +
523 TF_BLOCK_SIZE,
524 .ivsize = TF_BLOCK_SIZE,
525 .setkey = ablk_set_key,
526 .encrypt = ablk_encrypt,
527 .decrypt = ablk_decrypt,
530 }, {
531 .cra_name = "xts(twofish)",
532 .cra_driver_name = "xts-twofish-avx",
533 .cra_priority = 400,
534 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER | CRYPTO_ALG_ASYNC,
535 .cra_blocksize = TF_BLOCK_SIZE,
536 .cra_ctxsize = sizeof(struct async_helper_ctx),
537 .cra_alignmask = 0,
538 .cra_type = &crypto_ablkcipher_type,
539 .cra_module = THIS_MODULE,
540 .cra_init = ablk_init,
541 .cra_exit = ablk_exit,
542 .cra_u = {
543 .ablkcipher = {
544 .min_keysize = TF_MIN_KEY_SIZE * 2,
545 .max_keysize = TF_MAX_KEY_SIZE * 2,
546 .ivsize = TF_BLOCK_SIZE,
547 .setkey = ablk_set_key,
548 .encrypt = ablk_encrypt,
549 .decrypt = ablk_decrypt,
552 } };
554 static int __init twofish_init(void)
556 u64 xcr0;
558 if (!cpu_has_avx || !cpu_has_osxsave) {
559 printk(KERN_INFO "AVX instructions are not detected.\n");
560 return -ENODEV;
563 xcr0 = xgetbv(XCR_XFEATURE_ENABLED_MASK);
564 if ((xcr0 & (XSTATE_SSE | XSTATE_YMM)) != (XSTATE_SSE | XSTATE_YMM)) {
565 printk(KERN_INFO "AVX detected but unusable.\n");
566 return -ENODEV;
569 return crypto_register_algs(twofish_algs, ARRAY_SIZE(twofish_algs));
572 static void __exit twofish_exit(void)
574 crypto_unregister_algs(twofish_algs, ARRAY_SIZE(twofish_algs));
577 module_init(twofish_init);
578 module_exit(twofish_exit);
580 MODULE_DESCRIPTION("Twofish Cipher Algorithm, AVX optimized");
581 MODULE_LICENSE("GPL");
582 MODULE_ALIAS("twofish");