2 * Glue Code for assembler optimized version of Blowfish
4 * Copyright (c) 2011 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
6 * CBC & ECB parts based on code (crypto/cbc.c,ecb.c) by:
7 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
8 * CTR part based on code (crypto/ctr.c) by:
9 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
11 * This program is free software; you can redistribute it and/or modify
12 * it under the terms of the GNU General Public License as published by
13 * the Free Software Foundation; either version 2 of the License, or
14 * (at your option) any later version.
16 * This program is distributed in the hope that it will be useful,
17 * but WITHOUT ANY WARRANTY; without even the implied warranty of
18 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 * GNU General Public License for more details.
21 * You should have received a copy of the GNU General Public License
22 * along with this program; if not, write to the Free Software
23 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
28 #include <asm/processor.h>
29 #include <crypto/blowfish.h>
30 #include <linux/crypto.h>
31 #include <linux/init.h>
32 #include <linux/module.h>
33 #include <linux/types.h>
34 #include <crypto/algapi.h>
36 /* regular block cipher functions */
37 asmlinkage
void __blowfish_enc_blk(struct bf_ctx
*ctx
, u8
*dst
, const u8
*src
,
39 asmlinkage
void blowfish_dec_blk(struct bf_ctx
*ctx
, u8
*dst
, const u8
*src
);
41 /* 4-way parallel cipher functions */
42 asmlinkage
void __blowfish_enc_blk_4way(struct bf_ctx
*ctx
, u8
*dst
,
43 const u8
*src
, bool xor);
44 asmlinkage
void blowfish_dec_blk_4way(struct bf_ctx
*ctx
, u8
*dst
,
47 static inline void blowfish_enc_blk(struct bf_ctx
*ctx
, u8
*dst
, const u8
*src
)
49 __blowfish_enc_blk(ctx
, dst
, src
, false);
52 static inline void blowfish_enc_blk_xor(struct bf_ctx
*ctx
, u8
*dst
,
55 __blowfish_enc_blk(ctx
, dst
, src
, true);
58 static inline void blowfish_enc_blk_4way(struct bf_ctx
*ctx
, u8
*dst
,
61 __blowfish_enc_blk_4way(ctx
, dst
, src
, false);
64 static inline void blowfish_enc_blk_xor_4way(struct bf_ctx
*ctx
, u8
*dst
,
67 __blowfish_enc_blk_4way(ctx
, dst
, src
, true);
70 static void blowfish_encrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
72 blowfish_enc_blk(crypto_tfm_ctx(tfm
), dst
, src
);
75 static void blowfish_decrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
77 blowfish_dec_blk(crypto_tfm_ctx(tfm
), dst
, src
);
80 static int ecb_crypt(struct blkcipher_desc
*desc
, struct blkcipher_walk
*walk
,
81 void (*fn
)(struct bf_ctx
*, u8
*, const u8
*),
82 void (*fn_4way
)(struct bf_ctx
*, u8
*, const u8
*))
84 struct bf_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
85 unsigned int bsize
= BF_BLOCK_SIZE
;
89 err
= blkcipher_walk_virt(desc
, walk
);
91 while ((nbytes
= walk
->nbytes
)) {
92 u8
*wsrc
= walk
->src
.virt
.addr
;
93 u8
*wdst
= walk
->dst
.virt
.addr
;
95 /* Process four block batch */
96 if (nbytes
>= bsize
* 4) {
98 fn_4way(ctx
, wdst
, wsrc
);
103 } while (nbytes
>= bsize
* 4);
109 /* Handle leftovers */
116 } while (nbytes
>= bsize
);
119 err
= blkcipher_walk_done(desc
, walk
, nbytes
);
125 static int ecb_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
126 struct scatterlist
*src
, unsigned int nbytes
)
128 struct blkcipher_walk walk
;
130 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
131 return ecb_crypt(desc
, &walk
, blowfish_enc_blk
, blowfish_enc_blk_4way
);
134 static int ecb_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
135 struct scatterlist
*src
, unsigned int nbytes
)
137 struct blkcipher_walk walk
;
139 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
140 return ecb_crypt(desc
, &walk
, blowfish_dec_blk
, blowfish_dec_blk_4way
);
143 static unsigned int __cbc_encrypt(struct blkcipher_desc
*desc
,
144 struct blkcipher_walk
*walk
)
146 struct bf_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
147 unsigned int bsize
= BF_BLOCK_SIZE
;
148 unsigned int nbytes
= walk
->nbytes
;
149 u64
*src
= (u64
*)walk
->src
.virt
.addr
;
150 u64
*dst
= (u64
*)walk
->dst
.virt
.addr
;
151 u64
*iv
= (u64
*)walk
->iv
;
155 blowfish_enc_blk(ctx
, (u8
*)dst
, (u8
*)dst
);
161 } while (nbytes
>= bsize
);
163 *(u64
*)walk
->iv
= *iv
;
167 static int cbc_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
168 struct scatterlist
*src
, unsigned int nbytes
)
170 struct blkcipher_walk walk
;
173 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
174 err
= blkcipher_walk_virt(desc
, &walk
);
176 while ((nbytes
= walk
.nbytes
)) {
177 nbytes
= __cbc_encrypt(desc
, &walk
);
178 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
184 static unsigned int __cbc_decrypt(struct blkcipher_desc
*desc
,
185 struct blkcipher_walk
*walk
)
187 struct bf_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
188 unsigned int bsize
= BF_BLOCK_SIZE
;
189 unsigned int nbytes
= walk
->nbytes
;
190 u64
*src
= (u64
*)walk
->src
.virt
.addr
;
191 u64
*dst
= (u64
*)walk
->dst
.virt
.addr
;
195 /* Start of the last block. */
196 src
+= nbytes
/ bsize
- 1;
197 dst
+= nbytes
/ bsize
- 1;
201 /* Process four block batch */
202 if (nbytes
>= bsize
* 4) {
204 nbytes
-= bsize
* 4 - bsize
;
212 blowfish_dec_blk_4way(ctx
, (u8
*)dst
, (u8
*)src
);
225 } while (nbytes
>= bsize
* 4);
228 /* Handle leftovers */
230 blowfish_dec_blk(ctx
, (u8
*)dst
, (u8
*)src
);
242 *dst
^= *(u64
*)walk
->iv
;
243 *(u64
*)walk
->iv
= last_iv
;
248 static int cbc_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
249 struct scatterlist
*src
, unsigned int nbytes
)
251 struct blkcipher_walk walk
;
254 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
255 err
= blkcipher_walk_virt(desc
, &walk
);
257 while ((nbytes
= walk
.nbytes
)) {
258 nbytes
= __cbc_decrypt(desc
, &walk
);
259 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
265 static void ctr_crypt_final(struct bf_ctx
*ctx
, struct blkcipher_walk
*walk
)
267 u8
*ctrblk
= walk
->iv
;
268 u8 keystream
[BF_BLOCK_SIZE
];
269 u8
*src
= walk
->src
.virt
.addr
;
270 u8
*dst
= walk
->dst
.virt
.addr
;
271 unsigned int nbytes
= walk
->nbytes
;
273 blowfish_enc_blk(ctx
, keystream
, ctrblk
);
274 crypto_xor(keystream
, src
, nbytes
);
275 memcpy(dst
, keystream
, nbytes
);
277 crypto_inc(ctrblk
, BF_BLOCK_SIZE
);
280 static unsigned int __ctr_crypt(struct blkcipher_desc
*desc
,
281 struct blkcipher_walk
*walk
)
283 struct bf_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
284 unsigned int bsize
= BF_BLOCK_SIZE
;
285 unsigned int nbytes
= walk
->nbytes
;
286 u64
*src
= (u64
*)walk
->src
.virt
.addr
;
287 u64
*dst
= (u64
*)walk
->dst
.virt
.addr
;
288 u64 ctrblk
= be64_to_cpu(*(__be64
*)walk
->iv
);
291 /* Process four block batch */
292 if (nbytes
>= bsize
* 4) {
301 /* create ctrblks for parallel encrypt */
302 ctrblocks
[0] = cpu_to_be64(ctrblk
++);
303 ctrblocks
[1] = cpu_to_be64(ctrblk
++);
304 ctrblocks
[2] = cpu_to_be64(ctrblk
++);
305 ctrblocks
[3] = cpu_to_be64(ctrblk
++);
307 blowfish_enc_blk_xor_4way(ctx
, (u8
*)dst
,
312 } while ((nbytes
-= bsize
* 4) >= bsize
* 4);
318 /* Handle leftovers */
323 ctrblocks
[0] = cpu_to_be64(ctrblk
++);
325 blowfish_enc_blk_xor(ctx
, (u8
*)dst
, (u8
*)ctrblocks
);
329 } while ((nbytes
-= bsize
) >= bsize
);
332 *(__be64
*)walk
->iv
= cpu_to_be64(ctrblk
);
336 static int ctr_crypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
337 struct scatterlist
*src
, unsigned int nbytes
)
339 struct blkcipher_walk walk
;
342 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
343 err
= blkcipher_walk_virt_block(desc
, &walk
, BF_BLOCK_SIZE
);
345 while ((nbytes
= walk
.nbytes
) >= BF_BLOCK_SIZE
) {
346 nbytes
= __ctr_crypt(desc
, &walk
);
347 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
351 ctr_crypt_final(crypto_blkcipher_ctx(desc
->tfm
), &walk
);
352 err
= blkcipher_walk_done(desc
, &walk
, 0);
358 static struct crypto_alg bf_algs
[4] = { {
359 .cra_name
= "blowfish",
360 .cra_driver_name
= "blowfish-asm",
362 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
,
363 .cra_blocksize
= BF_BLOCK_SIZE
,
364 .cra_ctxsize
= sizeof(struct bf_ctx
),
366 .cra_module
= THIS_MODULE
,
369 .cia_min_keysize
= BF_MIN_KEY_SIZE
,
370 .cia_max_keysize
= BF_MAX_KEY_SIZE
,
371 .cia_setkey
= blowfish_setkey
,
372 .cia_encrypt
= blowfish_encrypt
,
373 .cia_decrypt
= blowfish_decrypt
,
377 .cra_name
= "ecb(blowfish)",
378 .cra_driver_name
= "ecb-blowfish-asm",
380 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
381 .cra_blocksize
= BF_BLOCK_SIZE
,
382 .cra_ctxsize
= sizeof(struct bf_ctx
),
384 .cra_type
= &crypto_blkcipher_type
,
385 .cra_module
= THIS_MODULE
,
388 .min_keysize
= BF_MIN_KEY_SIZE
,
389 .max_keysize
= BF_MAX_KEY_SIZE
,
390 .setkey
= blowfish_setkey
,
391 .encrypt
= ecb_encrypt
,
392 .decrypt
= ecb_decrypt
,
396 .cra_name
= "cbc(blowfish)",
397 .cra_driver_name
= "cbc-blowfish-asm",
399 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
400 .cra_blocksize
= BF_BLOCK_SIZE
,
401 .cra_ctxsize
= sizeof(struct bf_ctx
),
403 .cra_type
= &crypto_blkcipher_type
,
404 .cra_module
= THIS_MODULE
,
407 .min_keysize
= BF_MIN_KEY_SIZE
,
408 .max_keysize
= BF_MAX_KEY_SIZE
,
409 .ivsize
= BF_BLOCK_SIZE
,
410 .setkey
= blowfish_setkey
,
411 .encrypt
= cbc_encrypt
,
412 .decrypt
= cbc_decrypt
,
416 .cra_name
= "ctr(blowfish)",
417 .cra_driver_name
= "ctr-blowfish-asm",
419 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
421 .cra_ctxsize
= sizeof(struct bf_ctx
),
423 .cra_type
= &crypto_blkcipher_type
,
424 .cra_module
= THIS_MODULE
,
427 .min_keysize
= BF_MIN_KEY_SIZE
,
428 .max_keysize
= BF_MAX_KEY_SIZE
,
429 .ivsize
= BF_BLOCK_SIZE
,
430 .setkey
= blowfish_setkey
,
431 .encrypt
= ctr_crypt
,
432 .decrypt
= ctr_crypt
,
437 static bool is_blacklisted_cpu(void)
439 if (boot_cpu_data
.x86_vendor
!= X86_VENDOR_INTEL
)
442 if (boot_cpu_data
.x86
== 0x0f) {
444 * On Pentium 4, blowfish-x86_64 is slower than generic C
445 * implementation because use of 64bit rotates (which are really
446 * slow on P4). Therefore blacklist P4s.
455 module_param(force
, int, 0);
456 MODULE_PARM_DESC(force
, "Force module load, ignore CPU blacklist");
458 static int __init
init(void)
460 if (!force
&& is_blacklisted_cpu()) {
462 "blowfish-x86_64: performance on this CPU "
463 "would be suboptimal: disabling "
464 "blowfish-x86_64.\n");
468 return crypto_register_algs(bf_algs
, ARRAY_SIZE(bf_algs
));
471 static void __exit
fini(void)
473 crypto_unregister_algs(bf_algs
, ARRAY_SIZE(bf_algs
));
479 MODULE_LICENSE("GPL");
480 MODULE_DESCRIPTION("Blowfish Cipher Algorithm, asm optimized");
481 MODULE_ALIAS_CRYPTO("blowfish");
482 MODULE_ALIAS_CRYPTO("blowfish-asm");