2 * Glue Code for 3-way parallel assembler optimized version of Twofish
4 * Copyright (c) 2011 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
6 * CBC & ECB parts based on code (crypto/cbc.c,ecb.c) by:
7 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
8 * CTR part based on code (crypto/ctr.c) by:
9 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
11 * This program is free software; you can redistribute it and/or modify
12 * it under the terms of the GNU General Public License as published by
13 * the Free Software Foundation; either version 2 of the License, or
14 * (at your option) any later version.
16 * This program is distributed in the hope that it will be useful,
17 * but WITHOUT ANY WARRANTY; without even the implied warranty of
18 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 * GNU General Public License for more details.
21 * You should have received a copy of the GNU General Public License
22 * along with this program; if not, write to the Free Software
23 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
28 #include <linux/crypto.h>
29 #include <linux/init.h>
30 #include <linux/module.h>
31 #include <linux/types.h>
32 #include <crypto/algapi.h>
33 #include <crypto/twofish.h>
34 #include <crypto/b128ops.h>
36 /* regular block cipher functions from twofish_x86_64 module */
37 asmlinkage
void twofish_enc_blk(struct twofish_ctx
*ctx
, u8
*dst
,
39 asmlinkage
void twofish_dec_blk(struct twofish_ctx
*ctx
, u8
*dst
,
42 /* 3-way parallel cipher functions */
43 asmlinkage
void __twofish_enc_blk_3way(struct twofish_ctx
*ctx
, u8
*dst
,
44 const u8
*src
, bool xor);
45 asmlinkage
void twofish_dec_blk_3way(struct twofish_ctx
*ctx
, u8
*dst
,
48 static inline void twofish_enc_blk_3way(struct twofish_ctx
*ctx
, u8
*dst
,
51 __twofish_enc_blk_3way(ctx
, dst
, src
, false);
54 static inline void twofish_enc_blk_xor_3way(struct twofish_ctx
*ctx
, u8
*dst
,
57 __twofish_enc_blk_3way(ctx
, dst
, src
, true);
60 static int ecb_crypt(struct blkcipher_desc
*desc
, struct blkcipher_walk
*walk
,
61 void (*fn
)(struct twofish_ctx
*, u8
*, const u8
*),
62 void (*fn_3way
)(struct twofish_ctx
*, u8
*, const u8
*))
64 struct twofish_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
65 unsigned int bsize
= TF_BLOCK_SIZE
;
69 err
= blkcipher_walk_virt(desc
, walk
);
71 while ((nbytes
= walk
->nbytes
)) {
72 u8
*wsrc
= walk
->src
.virt
.addr
;
73 u8
*wdst
= walk
->dst
.virt
.addr
;
75 /* Process three block batch */
76 if (nbytes
>= bsize
* 3) {
78 fn_3way(ctx
, wdst
, wsrc
);
83 } while (nbytes
>= bsize
* 3);
89 /* Handle leftovers */
96 } while (nbytes
>= bsize
);
99 err
= blkcipher_walk_done(desc
, walk
, nbytes
);
105 static int ecb_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
106 struct scatterlist
*src
, unsigned int nbytes
)
108 struct blkcipher_walk walk
;
110 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
111 return ecb_crypt(desc
, &walk
, twofish_enc_blk
, twofish_enc_blk_3way
);
114 static int ecb_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
115 struct scatterlist
*src
, unsigned int nbytes
)
117 struct blkcipher_walk walk
;
119 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
120 return ecb_crypt(desc
, &walk
, twofish_dec_blk
, twofish_dec_blk_3way
);
123 static struct crypto_alg blk_ecb_alg
= {
124 .cra_name
= "ecb(twofish)",
125 .cra_driver_name
= "ecb-twofish-3way",
127 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
128 .cra_blocksize
= TF_BLOCK_SIZE
,
129 .cra_ctxsize
= sizeof(struct twofish_ctx
),
131 .cra_type
= &crypto_blkcipher_type
,
132 .cra_module
= THIS_MODULE
,
133 .cra_list
= LIST_HEAD_INIT(blk_ecb_alg
.cra_list
),
136 .min_keysize
= TF_MIN_KEY_SIZE
,
137 .max_keysize
= TF_MAX_KEY_SIZE
,
138 .setkey
= twofish_setkey
,
139 .encrypt
= ecb_encrypt
,
140 .decrypt
= ecb_decrypt
,
145 static unsigned int __cbc_encrypt(struct blkcipher_desc
*desc
,
146 struct blkcipher_walk
*walk
)
148 struct twofish_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
149 unsigned int bsize
= TF_BLOCK_SIZE
;
150 unsigned int nbytes
= walk
->nbytes
;
151 u128
*src
= (u128
*)walk
->src
.virt
.addr
;
152 u128
*dst
= (u128
*)walk
->dst
.virt
.addr
;
153 u128
*iv
= (u128
*)walk
->iv
;
156 u128_xor(dst
, src
, iv
);
157 twofish_enc_blk(ctx
, (u8
*)dst
, (u8
*)dst
);
163 } while (nbytes
>= bsize
);
165 u128_xor((u128
*)walk
->iv
, (u128
*)walk
->iv
, iv
);
169 static int cbc_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
170 struct scatterlist
*src
, unsigned int nbytes
)
172 struct blkcipher_walk walk
;
175 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
176 err
= blkcipher_walk_virt(desc
, &walk
);
178 while ((nbytes
= walk
.nbytes
)) {
179 nbytes
= __cbc_encrypt(desc
, &walk
);
180 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
186 static unsigned int __cbc_decrypt(struct blkcipher_desc
*desc
,
187 struct blkcipher_walk
*walk
)
189 struct twofish_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
190 unsigned int bsize
= TF_BLOCK_SIZE
;
191 unsigned int nbytes
= walk
->nbytes
;
192 u128
*src
= (u128
*)walk
->src
.virt
.addr
;
193 u128
*dst
= (u128
*)walk
->dst
.virt
.addr
;
197 /* Start of the last block. */
198 src
+= nbytes
/ bsize
- 1;
199 dst
+= nbytes
/ bsize
- 1;
203 /* Process three block batch */
204 if (nbytes
>= bsize
* 3) {
206 nbytes
-= bsize
* (3 - 1);
213 twofish_dec_blk_3way(ctx
, (u8
*)dst
, (u8
*)src
);
215 u128_xor(dst
+ 1, dst
+ 1, ivs
+ 0);
216 u128_xor(dst
+ 2, dst
+ 2, ivs
+ 1);
222 u128_xor(dst
, dst
, src
- 1);
225 } while (nbytes
>= bsize
* 3);
231 /* Handle leftovers */
233 twofish_dec_blk(ctx
, (u8
*)dst
, (u8
*)src
);
239 u128_xor(dst
, dst
, src
- 1);
245 u128_xor(dst
, dst
, (u128
*)walk
->iv
);
246 *(u128
*)walk
->iv
= last_iv
;
251 static int cbc_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
252 struct scatterlist
*src
, unsigned int nbytes
)
254 struct blkcipher_walk walk
;
257 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
258 err
= blkcipher_walk_virt(desc
, &walk
);
260 while ((nbytes
= walk
.nbytes
)) {
261 nbytes
= __cbc_decrypt(desc
, &walk
);
262 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
268 static struct crypto_alg blk_cbc_alg
= {
269 .cra_name
= "cbc(twofish)",
270 .cra_driver_name
= "cbc-twofish-3way",
272 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
273 .cra_blocksize
= TF_BLOCK_SIZE
,
274 .cra_ctxsize
= sizeof(struct twofish_ctx
),
276 .cra_type
= &crypto_blkcipher_type
,
277 .cra_module
= THIS_MODULE
,
278 .cra_list
= LIST_HEAD_INIT(blk_cbc_alg
.cra_list
),
281 .min_keysize
= TF_MIN_KEY_SIZE
,
282 .max_keysize
= TF_MAX_KEY_SIZE
,
283 .ivsize
= TF_BLOCK_SIZE
,
284 .setkey
= twofish_setkey
,
285 .encrypt
= cbc_encrypt
,
286 .decrypt
= cbc_decrypt
,
291 static inline void u128_to_be128(be128
*dst
, const u128
*src
)
293 dst
->a
= cpu_to_be64(src
->a
);
294 dst
->b
= cpu_to_be64(src
->b
);
297 static inline void be128_to_u128(u128
*dst
, const be128
*src
)
299 dst
->a
= be64_to_cpu(src
->a
);
300 dst
->b
= be64_to_cpu(src
->b
);
303 static inline void u128_inc(u128
*i
)
310 static void ctr_crypt_final(struct blkcipher_desc
*desc
,
311 struct blkcipher_walk
*walk
)
313 struct twofish_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
314 u8
*ctrblk
= walk
->iv
;
315 u8 keystream
[TF_BLOCK_SIZE
];
316 u8
*src
= walk
->src
.virt
.addr
;
317 u8
*dst
= walk
->dst
.virt
.addr
;
318 unsigned int nbytes
= walk
->nbytes
;
320 twofish_enc_blk(ctx
, keystream
, ctrblk
);
321 crypto_xor(keystream
, src
, nbytes
);
322 memcpy(dst
, keystream
, nbytes
);
324 crypto_inc(ctrblk
, TF_BLOCK_SIZE
);
327 static unsigned int __ctr_crypt(struct blkcipher_desc
*desc
,
328 struct blkcipher_walk
*walk
)
330 struct twofish_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
331 unsigned int bsize
= TF_BLOCK_SIZE
;
332 unsigned int nbytes
= walk
->nbytes
;
333 u128
*src
= (u128
*)walk
->src
.virt
.addr
;
334 u128
*dst
= (u128
*)walk
->dst
.virt
.addr
;
338 be128_to_u128(&ctrblk
, (be128
*)walk
->iv
);
340 /* Process three block batch */
341 if (nbytes
>= bsize
* 3) {
349 /* create ctrblks for parallel encrypt */
350 u128_to_be128(&ctrblocks
[0], &ctrblk
);
352 u128_to_be128(&ctrblocks
[1], &ctrblk
);
354 u128_to_be128(&ctrblocks
[2], &ctrblk
);
357 twofish_enc_blk_xor_3way(ctx
, (u8
*)dst
,
363 } while (nbytes
>= bsize
* 3);
369 /* Handle leftovers */
374 u128_to_be128(&ctrblocks
[0], &ctrblk
);
377 twofish_enc_blk(ctx
, (u8
*)ctrblocks
, (u8
*)ctrblocks
);
378 u128_xor(dst
, dst
, (u128
*)ctrblocks
);
383 } while (nbytes
>= bsize
);
386 u128_to_be128((be128
*)walk
->iv
, &ctrblk
);
390 static int ctr_crypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
391 struct scatterlist
*src
, unsigned int nbytes
)
393 struct blkcipher_walk walk
;
396 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
397 err
= blkcipher_walk_virt_block(desc
, &walk
, TF_BLOCK_SIZE
);
399 while ((nbytes
= walk
.nbytes
) >= TF_BLOCK_SIZE
) {
400 nbytes
= __ctr_crypt(desc
, &walk
);
401 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
405 ctr_crypt_final(desc
, &walk
);
406 err
= blkcipher_walk_done(desc
, &walk
, 0);
412 static struct crypto_alg blk_ctr_alg
= {
413 .cra_name
= "ctr(twofish)",
414 .cra_driver_name
= "ctr-twofish-3way",
416 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
418 .cra_ctxsize
= sizeof(struct twofish_ctx
),
420 .cra_type
= &crypto_blkcipher_type
,
421 .cra_module
= THIS_MODULE
,
422 .cra_list
= LIST_HEAD_INIT(blk_ctr_alg
.cra_list
),
425 .min_keysize
= TF_MIN_KEY_SIZE
,
426 .max_keysize
= TF_MAX_KEY_SIZE
,
427 .ivsize
= TF_BLOCK_SIZE
,
428 .setkey
= twofish_setkey
,
429 .encrypt
= ctr_crypt
,
430 .decrypt
= ctr_crypt
,
435 int __init
init(void)
439 err
= crypto_register_alg(&blk_ecb_alg
);
442 err
= crypto_register_alg(&blk_cbc_alg
);
445 err
= crypto_register_alg(&blk_ctr_alg
);
452 crypto_unregister_alg(&blk_cbc_alg
);
454 crypto_unregister_alg(&blk_ecb_alg
);
459 void __exit
fini(void)
461 crypto_unregister_alg(&blk_ctr_alg
);
462 crypto_unregister_alg(&blk_cbc_alg
);
463 crypto_unregister_alg(&blk_ecb_alg
);
469 MODULE_LICENSE("GPL");
470 MODULE_DESCRIPTION("Twofish Cipher Algorithm, 3-way parallel asm optimized");
471 MODULE_ALIAS("twofish");
472 MODULE_ALIAS("twofish-asm");