USB: cp210x: call generic open last in open
[zen-stable.git] / arch / x86 / crypto / twofish_glue_3way.c
blob7fee8c152f93c73d430b138a464dcb83f8cfaf0e
1 /*
2 * Glue Code for 3-way parallel assembler optimized version of Twofish
4 * Copyright (c) 2011 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
6 * CBC & ECB parts based on code (crypto/cbc.c,ecb.c) by:
7 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
8 * CTR part based on code (crypto/ctr.c) by:
9 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
11 * This program is free software; you can redistribute it and/or modify
12 * it under the terms of the GNU General Public License as published by
13 * the Free Software Foundation; either version 2 of the License, or
14 * (at your option) any later version.
16 * This program is distributed in the hope that it will be useful,
17 * but WITHOUT ANY WARRANTY; without even the implied warranty of
18 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 * GNU General Public License for more details.
21 * You should have received a copy of the GNU General Public License
22 * along with this program; if not, write to the Free Software
23 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
24 * USA
28 #include <linux/crypto.h>
29 #include <linux/init.h>
30 #include <linux/module.h>
31 #include <linux/types.h>
32 #include <crypto/algapi.h>
33 #include <crypto/twofish.h>
34 #include <crypto/b128ops.h>
35 #include <crypto/lrw.h>
36 #include <crypto/xts.h>
38 /* regular block cipher functions from twofish_x86_64 module */
39 asmlinkage void twofish_enc_blk(struct twofish_ctx *ctx, u8 *dst,
40 const u8 *src);
41 asmlinkage void twofish_dec_blk(struct twofish_ctx *ctx, u8 *dst,
42 const u8 *src);
44 /* 3-way parallel cipher functions */
45 asmlinkage void __twofish_enc_blk_3way(struct twofish_ctx *ctx, u8 *dst,
46 const u8 *src, bool xor);
47 asmlinkage void twofish_dec_blk_3way(struct twofish_ctx *ctx, u8 *dst,
48 const u8 *src);
50 static inline void twofish_enc_blk_3way(struct twofish_ctx *ctx, u8 *dst,
51 const u8 *src)
53 __twofish_enc_blk_3way(ctx, dst, src, false);
56 static inline void twofish_enc_blk_xor_3way(struct twofish_ctx *ctx, u8 *dst,
57 const u8 *src)
59 __twofish_enc_blk_3way(ctx, dst, src, true);
62 static int ecb_crypt(struct blkcipher_desc *desc, struct blkcipher_walk *walk,
63 void (*fn)(struct twofish_ctx *, u8 *, const u8 *),
64 void (*fn_3way)(struct twofish_ctx *, u8 *, const u8 *))
66 struct twofish_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
67 unsigned int bsize = TF_BLOCK_SIZE;
68 unsigned int nbytes;
69 int err;
71 err = blkcipher_walk_virt(desc, walk);
73 while ((nbytes = walk->nbytes)) {
74 u8 *wsrc = walk->src.virt.addr;
75 u8 *wdst = walk->dst.virt.addr;
77 /* Process three block batch */
78 if (nbytes >= bsize * 3) {
79 do {
80 fn_3way(ctx, wdst, wsrc);
82 wsrc += bsize * 3;
83 wdst += bsize * 3;
84 nbytes -= bsize * 3;
85 } while (nbytes >= bsize * 3);
87 if (nbytes < bsize)
88 goto done;
91 /* Handle leftovers */
92 do {
93 fn(ctx, wdst, wsrc);
95 wsrc += bsize;
96 wdst += bsize;
97 nbytes -= bsize;
98 } while (nbytes >= bsize);
100 done:
101 err = blkcipher_walk_done(desc, walk, nbytes);
104 return err;
107 static int ecb_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
108 struct scatterlist *src, unsigned int nbytes)
110 struct blkcipher_walk walk;
112 blkcipher_walk_init(&walk, dst, src, nbytes);
113 return ecb_crypt(desc, &walk, twofish_enc_blk, twofish_enc_blk_3way);
116 static int ecb_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
117 struct scatterlist *src, unsigned int nbytes)
119 struct blkcipher_walk walk;
121 blkcipher_walk_init(&walk, dst, src, nbytes);
122 return ecb_crypt(desc, &walk, twofish_dec_blk, twofish_dec_blk_3way);
125 static struct crypto_alg blk_ecb_alg = {
126 .cra_name = "ecb(twofish)",
127 .cra_driver_name = "ecb-twofish-3way",
128 .cra_priority = 300,
129 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
130 .cra_blocksize = TF_BLOCK_SIZE,
131 .cra_ctxsize = sizeof(struct twofish_ctx),
132 .cra_alignmask = 0,
133 .cra_type = &crypto_blkcipher_type,
134 .cra_module = THIS_MODULE,
135 .cra_list = LIST_HEAD_INIT(blk_ecb_alg.cra_list),
136 .cra_u = {
137 .blkcipher = {
138 .min_keysize = TF_MIN_KEY_SIZE,
139 .max_keysize = TF_MAX_KEY_SIZE,
140 .setkey = twofish_setkey,
141 .encrypt = ecb_encrypt,
142 .decrypt = ecb_decrypt,
147 static unsigned int __cbc_encrypt(struct blkcipher_desc *desc,
148 struct blkcipher_walk *walk)
150 struct twofish_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
151 unsigned int bsize = TF_BLOCK_SIZE;
152 unsigned int nbytes = walk->nbytes;
153 u128 *src = (u128 *)walk->src.virt.addr;
154 u128 *dst = (u128 *)walk->dst.virt.addr;
155 u128 *iv = (u128 *)walk->iv;
157 do {
158 u128_xor(dst, src, iv);
159 twofish_enc_blk(ctx, (u8 *)dst, (u8 *)dst);
160 iv = dst;
162 src += 1;
163 dst += 1;
164 nbytes -= bsize;
165 } while (nbytes >= bsize);
167 u128_xor((u128 *)walk->iv, (u128 *)walk->iv, iv);
168 return nbytes;
171 static int cbc_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
172 struct scatterlist *src, unsigned int nbytes)
174 struct blkcipher_walk walk;
175 int err;
177 blkcipher_walk_init(&walk, dst, src, nbytes);
178 err = blkcipher_walk_virt(desc, &walk);
180 while ((nbytes = walk.nbytes)) {
181 nbytes = __cbc_encrypt(desc, &walk);
182 err = blkcipher_walk_done(desc, &walk, nbytes);
185 return err;
188 static unsigned int __cbc_decrypt(struct blkcipher_desc *desc,
189 struct blkcipher_walk *walk)
191 struct twofish_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
192 unsigned int bsize = TF_BLOCK_SIZE;
193 unsigned int nbytes = walk->nbytes;
194 u128 *src = (u128 *)walk->src.virt.addr;
195 u128 *dst = (u128 *)walk->dst.virt.addr;
196 u128 ivs[3 - 1];
197 u128 last_iv;
199 /* Start of the last block. */
200 src += nbytes / bsize - 1;
201 dst += nbytes / bsize - 1;
203 last_iv = *src;
205 /* Process three block batch */
206 if (nbytes >= bsize * 3) {
207 do {
208 nbytes -= bsize * (3 - 1);
209 src -= 3 - 1;
210 dst -= 3 - 1;
212 ivs[0] = src[0];
213 ivs[1] = src[1];
215 twofish_dec_blk_3way(ctx, (u8 *)dst, (u8 *)src);
217 u128_xor(dst + 1, dst + 1, ivs + 0);
218 u128_xor(dst + 2, dst + 2, ivs + 1);
220 nbytes -= bsize;
221 if (nbytes < bsize)
222 goto done;
224 u128_xor(dst, dst, src - 1);
225 src -= 1;
226 dst -= 1;
227 } while (nbytes >= bsize * 3);
229 if (nbytes < bsize)
230 goto done;
233 /* Handle leftovers */
234 for (;;) {
235 twofish_dec_blk(ctx, (u8 *)dst, (u8 *)src);
237 nbytes -= bsize;
238 if (nbytes < bsize)
239 break;
241 u128_xor(dst, dst, src - 1);
242 src -= 1;
243 dst -= 1;
246 done:
247 u128_xor(dst, dst, (u128 *)walk->iv);
248 *(u128 *)walk->iv = last_iv;
250 return nbytes;
253 static int cbc_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
254 struct scatterlist *src, unsigned int nbytes)
256 struct blkcipher_walk walk;
257 int err;
259 blkcipher_walk_init(&walk, dst, src, nbytes);
260 err = blkcipher_walk_virt(desc, &walk);
262 while ((nbytes = walk.nbytes)) {
263 nbytes = __cbc_decrypt(desc, &walk);
264 err = blkcipher_walk_done(desc, &walk, nbytes);
267 return err;
270 static struct crypto_alg blk_cbc_alg = {
271 .cra_name = "cbc(twofish)",
272 .cra_driver_name = "cbc-twofish-3way",
273 .cra_priority = 300,
274 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
275 .cra_blocksize = TF_BLOCK_SIZE,
276 .cra_ctxsize = sizeof(struct twofish_ctx),
277 .cra_alignmask = 0,
278 .cra_type = &crypto_blkcipher_type,
279 .cra_module = THIS_MODULE,
280 .cra_list = LIST_HEAD_INIT(blk_cbc_alg.cra_list),
281 .cra_u = {
282 .blkcipher = {
283 .min_keysize = TF_MIN_KEY_SIZE,
284 .max_keysize = TF_MAX_KEY_SIZE,
285 .ivsize = TF_BLOCK_SIZE,
286 .setkey = twofish_setkey,
287 .encrypt = cbc_encrypt,
288 .decrypt = cbc_decrypt,
293 static inline void u128_to_be128(be128 *dst, const u128 *src)
295 dst->a = cpu_to_be64(src->a);
296 dst->b = cpu_to_be64(src->b);
299 static inline void be128_to_u128(u128 *dst, const be128 *src)
301 dst->a = be64_to_cpu(src->a);
302 dst->b = be64_to_cpu(src->b);
305 static inline void u128_inc(u128 *i)
307 i->b++;
308 if (!i->b)
309 i->a++;
312 static void ctr_crypt_final(struct blkcipher_desc *desc,
313 struct blkcipher_walk *walk)
315 struct twofish_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
316 u8 *ctrblk = walk->iv;
317 u8 keystream[TF_BLOCK_SIZE];
318 u8 *src = walk->src.virt.addr;
319 u8 *dst = walk->dst.virt.addr;
320 unsigned int nbytes = walk->nbytes;
322 twofish_enc_blk(ctx, keystream, ctrblk);
323 crypto_xor(keystream, src, nbytes);
324 memcpy(dst, keystream, nbytes);
326 crypto_inc(ctrblk, TF_BLOCK_SIZE);
329 static unsigned int __ctr_crypt(struct blkcipher_desc *desc,
330 struct blkcipher_walk *walk)
332 struct twofish_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
333 unsigned int bsize = TF_BLOCK_SIZE;
334 unsigned int nbytes = walk->nbytes;
335 u128 *src = (u128 *)walk->src.virt.addr;
336 u128 *dst = (u128 *)walk->dst.virt.addr;
337 u128 ctrblk;
338 be128 ctrblocks[3];
340 be128_to_u128(&ctrblk, (be128 *)walk->iv);
342 /* Process three block batch */
343 if (nbytes >= bsize * 3) {
344 do {
345 if (dst != src) {
346 dst[0] = src[0];
347 dst[1] = src[1];
348 dst[2] = src[2];
351 /* create ctrblks for parallel encrypt */
352 u128_to_be128(&ctrblocks[0], &ctrblk);
353 u128_inc(&ctrblk);
354 u128_to_be128(&ctrblocks[1], &ctrblk);
355 u128_inc(&ctrblk);
356 u128_to_be128(&ctrblocks[2], &ctrblk);
357 u128_inc(&ctrblk);
359 twofish_enc_blk_xor_3way(ctx, (u8 *)dst,
360 (u8 *)ctrblocks);
362 src += 3;
363 dst += 3;
364 nbytes -= bsize * 3;
365 } while (nbytes >= bsize * 3);
367 if (nbytes < bsize)
368 goto done;
371 /* Handle leftovers */
372 do {
373 if (dst != src)
374 *dst = *src;
376 u128_to_be128(&ctrblocks[0], &ctrblk);
377 u128_inc(&ctrblk);
379 twofish_enc_blk(ctx, (u8 *)ctrblocks, (u8 *)ctrblocks);
380 u128_xor(dst, dst, (u128 *)ctrblocks);
382 src += 1;
383 dst += 1;
384 nbytes -= bsize;
385 } while (nbytes >= bsize);
387 done:
388 u128_to_be128((be128 *)walk->iv, &ctrblk);
389 return nbytes;
392 static int ctr_crypt(struct blkcipher_desc *desc, struct scatterlist *dst,
393 struct scatterlist *src, unsigned int nbytes)
395 struct blkcipher_walk walk;
396 int err;
398 blkcipher_walk_init(&walk, dst, src, nbytes);
399 err = blkcipher_walk_virt_block(desc, &walk, TF_BLOCK_SIZE);
401 while ((nbytes = walk.nbytes) >= TF_BLOCK_SIZE) {
402 nbytes = __ctr_crypt(desc, &walk);
403 err = blkcipher_walk_done(desc, &walk, nbytes);
406 if (walk.nbytes) {
407 ctr_crypt_final(desc, &walk);
408 err = blkcipher_walk_done(desc, &walk, 0);
411 return err;
414 static struct crypto_alg blk_ctr_alg = {
415 .cra_name = "ctr(twofish)",
416 .cra_driver_name = "ctr-twofish-3way",
417 .cra_priority = 300,
418 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
419 .cra_blocksize = 1,
420 .cra_ctxsize = sizeof(struct twofish_ctx),
421 .cra_alignmask = 0,
422 .cra_type = &crypto_blkcipher_type,
423 .cra_module = THIS_MODULE,
424 .cra_list = LIST_HEAD_INIT(blk_ctr_alg.cra_list),
425 .cra_u = {
426 .blkcipher = {
427 .min_keysize = TF_MIN_KEY_SIZE,
428 .max_keysize = TF_MAX_KEY_SIZE,
429 .ivsize = TF_BLOCK_SIZE,
430 .setkey = twofish_setkey,
431 .encrypt = ctr_crypt,
432 .decrypt = ctr_crypt,
437 static void encrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
439 const unsigned int bsize = TF_BLOCK_SIZE;
440 struct twofish_ctx *ctx = priv;
441 int i;
443 if (nbytes == 3 * bsize) {
444 twofish_enc_blk_3way(ctx, srcdst, srcdst);
445 return;
448 for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
449 twofish_enc_blk(ctx, srcdst, srcdst);
452 static void decrypt_callback(void *priv, u8 *srcdst, unsigned int nbytes)
454 const unsigned int bsize = TF_BLOCK_SIZE;
455 struct twofish_ctx *ctx = priv;
456 int i;
458 if (nbytes == 3 * bsize) {
459 twofish_dec_blk_3way(ctx, srcdst, srcdst);
460 return;
463 for (i = 0; i < nbytes / bsize; i++, srcdst += bsize)
464 twofish_dec_blk(ctx, srcdst, srcdst);
467 struct twofish_lrw_ctx {
468 struct lrw_table_ctx lrw_table;
469 struct twofish_ctx twofish_ctx;
472 static int lrw_twofish_setkey(struct crypto_tfm *tfm, const u8 *key,
473 unsigned int keylen)
475 struct twofish_lrw_ctx *ctx = crypto_tfm_ctx(tfm);
476 int err;
478 err = __twofish_setkey(&ctx->twofish_ctx, key, keylen - TF_BLOCK_SIZE,
479 &tfm->crt_flags);
480 if (err)
481 return err;
483 return lrw_init_table(&ctx->lrw_table, key + keylen - TF_BLOCK_SIZE);
486 static int lrw_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
487 struct scatterlist *src, unsigned int nbytes)
489 struct twofish_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
490 be128 buf[3];
491 struct lrw_crypt_req req = {
492 .tbuf = buf,
493 .tbuflen = sizeof(buf),
495 .table_ctx = &ctx->lrw_table,
496 .crypt_ctx = &ctx->twofish_ctx,
497 .crypt_fn = encrypt_callback,
500 return lrw_crypt(desc, dst, src, nbytes, &req);
503 static int lrw_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
504 struct scatterlist *src, unsigned int nbytes)
506 struct twofish_lrw_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
507 be128 buf[3];
508 struct lrw_crypt_req req = {
509 .tbuf = buf,
510 .tbuflen = sizeof(buf),
512 .table_ctx = &ctx->lrw_table,
513 .crypt_ctx = &ctx->twofish_ctx,
514 .crypt_fn = decrypt_callback,
517 return lrw_crypt(desc, dst, src, nbytes, &req);
520 static void lrw_exit_tfm(struct crypto_tfm *tfm)
522 struct twofish_lrw_ctx *ctx = crypto_tfm_ctx(tfm);
524 lrw_free_table(&ctx->lrw_table);
527 static struct crypto_alg blk_lrw_alg = {
528 .cra_name = "lrw(twofish)",
529 .cra_driver_name = "lrw-twofish-3way",
530 .cra_priority = 300,
531 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
532 .cra_blocksize = TF_BLOCK_SIZE,
533 .cra_ctxsize = sizeof(struct twofish_lrw_ctx),
534 .cra_alignmask = 0,
535 .cra_type = &crypto_blkcipher_type,
536 .cra_module = THIS_MODULE,
537 .cra_list = LIST_HEAD_INIT(blk_lrw_alg.cra_list),
538 .cra_exit = lrw_exit_tfm,
539 .cra_u = {
540 .blkcipher = {
541 .min_keysize = TF_MIN_KEY_SIZE + TF_BLOCK_SIZE,
542 .max_keysize = TF_MAX_KEY_SIZE + TF_BLOCK_SIZE,
543 .ivsize = TF_BLOCK_SIZE,
544 .setkey = lrw_twofish_setkey,
545 .encrypt = lrw_encrypt,
546 .decrypt = lrw_decrypt,
551 struct twofish_xts_ctx {
552 struct twofish_ctx tweak_ctx;
553 struct twofish_ctx crypt_ctx;
556 static int xts_twofish_setkey(struct crypto_tfm *tfm, const u8 *key,
557 unsigned int keylen)
559 struct twofish_xts_ctx *ctx = crypto_tfm_ctx(tfm);
560 u32 *flags = &tfm->crt_flags;
561 int err;
563 /* key consists of keys of equal size concatenated, therefore
564 * the length must be even
566 if (keylen % 2) {
567 *flags |= CRYPTO_TFM_RES_BAD_KEY_LEN;
568 return -EINVAL;
571 /* first half of xts-key is for crypt */
572 err = __twofish_setkey(&ctx->crypt_ctx, key, keylen / 2, flags);
573 if (err)
574 return err;
576 /* second half of xts-key is for tweak */
577 return __twofish_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2,
578 flags);
581 static int xts_encrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
582 struct scatterlist *src, unsigned int nbytes)
584 struct twofish_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
585 be128 buf[3];
586 struct xts_crypt_req req = {
587 .tbuf = buf,
588 .tbuflen = sizeof(buf),
590 .tweak_ctx = &ctx->tweak_ctx,
591 .tweak_fn = XTS_TWEAK_CAST(twofish_enc_blk),
592 .crypt_ctx = &ctx->crypt_ctx,
593 .crypt_fn = encrypt_callback,
596 return xts_crypt(desc, dst, src, nbytes, &req);
599 static int xts_decrypt(struct blkcipher_desc *desc, struct scatterlist *dst,
600 struct scatterlist *src, unsigned int nbytes)
602 struct twofish_xts_ctx *ctx = crypto_blkcipher_ctx(desc->tfm);
603 be128 buf[3];
604 struct xts_crypt_req req = {
605 .tbuf = buf,
606 .tbuflen = sizeof(buf),
608 .tweak_ctx = &ctx->tweak_ctx,
609 .tweak_fn = XTS_TWEAK_CAST(twofish_enc_blk),
610 .crypt_ctx = &ctx->crypt_ctx,
611 .crypt_fn = decrypt_callback,
614 return xts_crypt(desc, dst, src, nbytes, &req);
617 static struct crypto_alg blk_xts_alg = {
618 .cra_name = "xts(twofish)",
619 .cra_driver_name = "xts-twofish-3way",
620 .cra_priority = 300,
621 .cra_flags = CRYPTO_ALG_TYPE_BLKCIPHER,
622 .cra_blocksize = TF_BLOCK_SIZE,
623 .cra_ctxsize = sizeof(struct twofish_xts_ctx),
624 .cra_alignmask = 0,
625 .cra_type = &crypto_blkcipher_type,
626 .cra_module = THIS_MODULE,
627 .cra_list = LIST_HEAD_INIT(blk_xts_alg.cra_list),
628 .cra_u = {
629 .blkcipher = {
630 .min_keysize = TF_MIN_KEY_SIZE * 2,
631 .max_keysize = TF_MAX_KEY_SIZE * 2,
632 .ivsize = TF_BLOCK_SIZE,
633 .setkey = xts_twofish_setkey,
634 .encrypt = xts_encrypt,
635 .decrypt = xts_decrypt,
640 int __init init(void)
642 int err;
644 err = crypto_register_alg(&blk_ecb_alg);
645 if (err)
646 goto ecb_err;
647 err = crypto_register_alg(&blk_cbc_alg);
648 if (err)
649 goto cbc_err;
650 err = crypto_register_alg(&blk_ctr_alg);
651 if (err)
652 goto ctr_err;
653 err = crypto_register_alg(&blk_lrw_alg);
654 if (err)
655 goto blk_lrw_err;
656 err = crypto_register_alg(&blk_xts_alg);
657 if (err)
658 goto blk_xts_err;
660 return 0;
662 crypto_unregister_alg(&blk_xts_alg);
663 blk_xts_err:
664 crypto_unregister_alg(&blk_lrw_alg);
665 blk_lrw_err:
666 crypto_unregister_alg(&blk_ctr_alg);
667 ctr_err:
668 crypto_unregister_alg(&blk_cbc_alg);
669 cbc_err:
670 crypto_unregister_alg(&blk_ecb_alg);
671 ecb_err:
672 return err;
675 void __exit fini(void)
677 crypto_unregister_alg(&blk_xts_alg);
678 crypto_unregister_alg(&blk_lrw_alg);
679 crypto_unregister_alg(&blk_ctr_alg);
680 crypto_unregister_alg(&blk_cbc_alg);
681 crypto_unregister_alg(&blk_ecb_alg);
684 module_init(init);
685 module_exit(fini);
687 MODULE_LICENSE("GPL");
688 MODULE_DESCRIPTION("Twofish Cipher Algorithm, 3-way parallel asm optimized");
689 MODULE_ALIAS("twofish");
690 MODULE_ALIAS("twofish-asm");