2 * Glue Code for assembler optimized version of Blowfish
4 * Copyright (c) 2011 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
6 * CBC & ECB parts based on code (crypto/cbc.c,ecb.c) by:
7 * Copyright (c) 2006 Herbert Xu <herbert@gondor.apana.org.au>
8 * CTR part based on code (crypto/ctr.c) by:
9 * (C) Copyright IBM Corp. 2007 - Joy Latten <latten@us.ibm.com>
11 * This program is free software; you can redistribute it and/or modify
12 * it under the terms of the GNU General Public License as published by
13 * the Free Software Foundation; either version 2 of the License, or
14 * (at your option) any later version.
16 * This program is distributed in the hope that it will be useful,
17 * but WITHOUT ANY WARRANTY; without even the implied warranty of
18 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 * GNU General Public License for more details.
21 * You should have received a copy of the GNU General Public License
22 * along with this program; if not, write to the Free Software
23 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
28 #include <asm/processor.h>
29 #include <crypto/blowfish.h>
30 #include <linux/crypto.h>
31 #include <linux/init.h>
32 #include <linux/module.h>
33 #include <linux/types.h>
34 #include <crypto/algapi.h>
36 /* regular block cipher functions */
37 asmlinkage
void __blowfish_enc_blk(struct bf_ctx
*ctx
, u8
*dst
, const u8
*src
,
39 asmlinkage
void blowfish_dec_blk(struct bf_ctx
*ctx
, u8
*dst
, const u8
*src
);
41 /* 4-way parallel cipher functions */
42 asmlinkage
void __blowfish_enc_blk_4way(struct bf_ctx
*ctx
, u8
*dst
,
43 const u8
*src
, bool xor);
44 asmlinkage
void blowfish_dec_blk_4way(struct bf_ctx
*ctx
, u8
*dst
,
47 static inline void blowfish_enc_blk(struct bf_ctx
*ctx
, u8
*dst
, const u8
*src
)
49 __blowfish_enc_blk(ctx
, dst
, src
, false);
52 static inline void blowfish_enc_blk_xor(struct bf_ctx
*ctx
, u8
*dst
,
55 __blowfish_enc_blk(ctx
, dst
, src
, true);
58 static inline void blowfish_enc_blk_4way(struct bf_ctx
*ctx
, u8
*dst
,
61 __blowfish_enc_blk_4way(ctx
, dst
, src
, false);
64 static inline void blowfish_enc_blk_xor_4way(struct bf_ctx
*ctx
, u8
*dst
,
67 __blowfish_enc_blk_4way(ctx
, dst
, src
, true);
70 static void blowfish_encrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
72 blowfish_enc_blk(crypto_tfm_ctx(tfm
), dst
, src
);
75 static void blowfish_decrypt(struct crypto_tfm
*tfm
, u8
*dst
, const u8
*src
)
77 blowfish_dec_blk(crypto_tfm_ctx(tfm
), dst
, src
);
80 static int ecb_crypt(struct blkcipher_desc
*desc
, struct blkcipher_walk
*walk
,
81 void (*fn
)(struct bf_ctx
*, u8
*, const u8
*),
82 void (*fn_4way
)(struct bf_ctx
*, u8
*, const u8
*))
84 struct bf_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
85 unsigned int bsize
= BF_BLOCK_SIZE
;
89 err
= blkcipher_walk_virt(desc
, walk
);
91 while ((nbytes
= walk
->nbytes
)) {
92 u8
*wsrc
= walk
->src
.virt
.addr
;
93 u8
*wdst
= walk
->dst
.virt
.addr
;
95 /* Process four block batch */
96 if (nbytes
>= bsize
* 4) {
98 fn_4way(ctx
, wdst
, wsrc
);
103 } while (nbytes
>= bsize
* 4);
109 /* Handle leftovers */
116 } while (nbytes
>= bsize
);
119 err
= blkcipher_walk_done(desc
, walk
, nbytes
);
125 static int ecb_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
126 struct scatterlist
*src
, unsigned int nbytes
)
128 struct blkcipher_walk walk
;
130 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
131 return ecb_crypt(desc
, &walk
, blowfish_enc_blk
, blowfish_enc_blk_4way
);
134 static int ecb_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
135 struct scatterlist
*src
, unsigned int nbytes
)
137 struct blkcipher_walk walk
;
139 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
140 return ecb_crypt(desc
, &walk
, blowfish_dec_blk
, blowfish_dec_blk_4way
);
143 static unsigned int __cbc_encrypt(struct blkcipher_desc
*desc
,
144 struct blkcipher_walk
*walk
)
146 struct bf_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
147 unsigned int bsize
= BF_BLOCK_SIZE
;
148 unsigned int nbytes
= walk
->nbytes
;
149 u64
*src
= (u64
*)walk
->src
.virt
.addr
;
150 u64
*dst
= (u64
*)walk
->dst
.virt
.addr
;
151 u64
*iv
= (u64
*)walk
->iv
;
155 blowfish_enc_blk(ctx
, (u8
*)dst
, (u8
*)dst
);
161 } while (nbytes
>= bsize
);
163 *(u64
*)walk
->iv
= *iv
;
167 static int cbc_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
168 struct scatterlist
*src
, unsigned int nbytes
)
170 struct blkcipher_walk walk
;
173 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
174 err
= blkcipher_walk_virt(desc
, &walk
);
176 while ((nbytes
= walk
.nbytes
)) {
177 nbytes
= __cbc_encrypt(desc
, &walk
);
178 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
184 static unsigned int __cbc_decrypt(struct blkcipher_desc
*desc
,
185 struct blkcipher_walk
*walk
)
187 struct bf_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
188 unsigned int bsize
= BF_BLOCK_SIZE
;
189 unsigned int nbytes
= walk
->nbytes
;
190 u64
*src
= (u64
*)walk
->src
.virt
.addr
;
191 u64
*dst
= (u64
*)walk
->dst
.virt
.addr
;
195 /* Start of the last block. */
196 src
+= nbytes
/ bsize
- 1;
197 dst
+= nbytes
/ bsize
- 1;
201 /* Process four block batch */
202 if (nbytes
>= bsize
* 4) {
204 nbytes
-= bsize
* 4 - bsize
;
212 blowfish_dec_blk_4way(ctx
, (u8
*)dst
, (u8
*)src
);
225 } while (nbytes
>= bsize
* 4);
231 /* Handle leftovers */
233 blowfish_dec_blk(ctx
, (u8
*)dst
, (u8
*)src
);
245 *dst
^= *(u64
*)walk
->iv
;
246 *(u64
*)walk
->iv
= last_iv
;
251 static int cbc_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
252 struct scatterlist
*src
, unsigned int nbytes
)
254 struct blkcipher_walk walk
;
257 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
258 err
= blkcipher_walk_virt(desc
, &walk
);
260 while ((nbytes
= walk
.nbytes
)) {
261 nbytes
= __cbc_decrypt(desc
, &walk
);
262 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
268 static void ctr_crypt_final(struct bf_ctx
*ctx
, struct blkcipher_walk
*walk
)
270 u8
*ctrblk
= walk
->iv
;
271 u8 keystream
[BF_BLOCK_SIZE
];
272 u8
*src
= walk
->src
.virt
.addr
;
273 u8
*dst
= walk
->dst
.virt
.addr
;
274 unsigned int nbytes
= walk
->nbytes
;
276 blowfish_enc_blk(ctx
, keystream
, ctrblk
);
277 crypto_xor(keystream
, src
, nbytes
);
278 memcpy(dst
, keystream
, nbytes
);
280 crypto_inc(ctrblk
, BF_BLOCK_SIZE
);
283 static unsigned int __ctr_crypt(struct blkcipher_desc
*desc
,
284 struct blkcipher_walk
*walk
)
286 struct bf_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
287 unsigned int bsize
= BF_BLOCK_SIZE
;
288 unsigned int nbytes
= walk
->nbytes
;
289 u64
*src
= (u64
*)walk
->src
.virt
.addr
;
290 u64
*dst
= (u64
*)walk
->dst
.virt
.addr
;
291 u64 ctrblk
= be64_to_cpu(*(__be64
*)walk
->iv
);
294 /* Process four block batch */
295 if (nbytes
>= bsize
* 4) {
304 /* create ctrblks for parallel encrypt */
305 ctrblocks
[0] = cpu_to_be64(ctrblk
++);
306 ctrblocks
[1] = cpu_to_be64(ctrblk
++);
307 ctrblocks
[2] = cpu_to_be64(ctrblk
++);
308 ctrblocks
[3] = cpu_to_be64(ctrblk
++);
310 blowfish_enc_blk_xor_4way(ctx
, (u8
*)dst
,
315 } while ((nbytes
-= bsize
* 4) >= bsize
* 4);
321 /* Handle leftovers */
326 ctrblocks
[0] = cpu_to_be64(ctrblk
++);
328 blowfish_enc_blk_xor(ctx
, (u8
*)dst
, (u8
*)ctrblocks
);
332 } while ((nbytes
-= bsize
) >= bsize
);
335 *(__be64
*)walk
->iv
= cpu_to_be64(ctrblk
);
339 static int ctr_crypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
340 struct scatterlist
*src
, unsigned int nbytes
)
342 struct blkcipher_walk walk
;
345 blkcipher_walk_init(&walk
, dst
, src
, nbytes
);
346 err
= blkcipher_walk_virt_block(desc
, &walk
, BF_BLOCK_SIZE
);
348 while ((nbytes
= walk
.nbytes
) >= BF_BLOCK_SIZE
) {
349 nbytes
= __ctr_crypt(desc
, &walk
);
350 err
= blkcipher_walk_done(desc
, &walk
, nbytes
);
354 ctr_crypt_final(crypto_blkcipher_ctx(desc
->tfm
), &walk
);
355 err
= blkcipher_walk_done(desc
, &walk
, 0);
361 static struct crypto_alg bf_algs
[4] = { {
362 .cra_name
= "blowfish",
363 .cra_driver_name
= "blowfish-asm",
365 .cra_flags
= CRYPTO_ALG_TYPE_CIPHER
,
366 .cra_blocksize
= BF_BLOCK_SIZE
,
367 .cra_ctxsize
= sizeof(struct bf_ctx
),
369 .cra_module
= THIS_MODULE
,
372 .cia_min_keysize
= BF_MIN_KEY_SIZE
,
373 .cia_max_keysize
= BF_MAX_KEY_SIZE
,
374 .cia_setkey
= blowfish_setkey
,
375 .cia_encrypt
= blowfish_encrypt
,
376 .cia_decrypt
= blowfish_decrypt
,
380 .cra_name
= "ecb(blowfish)",
381 .cra_driver_name
= "ecb-blowfish-asm",
383 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
384 .cra_blocksize
= BF_BLOCK_SIZE
,
385 .cra_ctxsize
= sizeof(struct bf_ctx
),
387 .cra_type
= &crypto_blkcipher_type
,
388 .cra_module
= THIS_MODULE
,
391 .min_keysize
= BF_MIN_KEY_SIZE
,
392 .max_keysize
= BF_MAX_KEY_SIZE
,
393 .setkey
= blowfish_setkey
,
394 .encrypt
= ecb_encrypt
,
395 .decrypt
= ecb_decrypt
,
399 .cra_name
= "cbc(blowfish)",
400 .cra_driver_name
= "cbc-blowfish-asm",
402 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
403 .cra_blocksize
= BF_BLOCK_SIZE
,
404 .cra_ctxsize
= sizeof(struct bf_ctx
),
406 .cra_type
= &crypto_blkcipher_type
,
407 .cra_module
= THIS_MODULE
,
410 .min_keysize
= BF_MIN_KEY_SIZE
,
411 .max_keysize
= BF_MAX_KEY_SIZE
,
412 .ivsize
= BF_BLOCK_SIZE
,
413 .setkey
= blowfish_setkey
,
414 .encrypt
= cbc_encrypt
,
415 .decrypt
= cbc_decrypt
,
419 .cra_name
= "ctr(blowfish)",
420 .cra_driver_name
= "ctr-blowfish-asm",
422 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
,
424 .cra_ctxsize
= sizeof(struct bf_ctx
),
426 .cra_type
= &crypto_blkcipher_type
,
427 .cra_module
= THIS_MODULE
,
430 .min_keysize
= BF_MIN_KEY_SIZE
,
431 .max_keysize
= BF_MAX_KEY_SIZE
,
432 .ivsize
= BF_BLOCK_SIZE
,
433 .setkey
= blowfish_setkey
,
434 .encrypt
= ctr_crypt
,
435 .decrypt
= ctr_crypt
,
440 static bool is_blacklisted_cpu(void)
442 if (boot_cpu_data
.x86_vendor
!= X86_VENDOR_INTEL
)
445 if (boot_cpu_data
.x86
== 0x0f) {
447 * On Pentium 4, blowfish-x86_64 is slower than generic C
448 * implementation because use of 64bit rotates (which are really
449 * slow on P4). Therefore blacklist P4s.
458 module_param(force
, int, 0);
459 MODULE_PARM_DESC(force
, "Force module load, ignore CPU blacklist");
461 static int __init
init(void)
463 if (!force
&& is_blacklisted_cpu()) {
465 "blowfish-x86_64: performance on this CPU "
466 "would be suboptimal: disabling "
467 "blowfish-x86_64.\n");
471 return crypto_register_algs(bf_algs
, ARRAY_SIZE(bf_algs
));
474 static void __exit
fini(void)
476 crypto_unregister_algs(bf_algs
, ARRAY_SIZE(bf_algs
));
482 MODULE_LICENSE("GPL");
483 MODULE_DESCRIPTION("Blowfish Cipher Algorithm, asm optimized");
484 MODULE_ALIAS("blowfish");
485 MODULE_ALIAS("blowfish-asm");