2 * Glue Code for the AVX assembler implemention of the Cast6 Cipher
4 * Copyright (C) 2012 Johannes Goetzfried
5 * <Johannes.Goetzfried@informatik.stud.uni-erlangen.de>
7 * Copyright © 2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
9 * This program is free software; you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License as published by
11 * the Free Software Foundation; either version 2 of the License, or
12 * (at your option) any later version.
14 * This program is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 * GNU General Public License for more details.
19 * You should have received a copy of the GNU General Public License
20 * along with this program; if not, write to the Free Software
21 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
26 #include <linux/module.h>
27 #include <linux/hardirq.h>
28 #include <linux/types.h>
29 #include <linux/crypto.h>
30 #include <linux/err.h>
31 #include <crypto/ablk_helper.h>
32 #include <crypto/algapi.h>
33 #include <crypto/cast6.h>
34 #include <crypto/cryptd.h>
35 #include <crypto/b128ops.h>
36 #include <crypto/ctr.h>
37 #include <crypto/lrw.h>
38 #include <crypto/xts.h>
40 #include <asm/xsave.h>
41 #include <asm/crypto/glue_helper.h>
43 #define CAST6_PARALLEL_BLOCKS 8
45 asmlinkage
void cast6_ecb_enc_8way(struct cast6_ctx
*ctx
, u8
*dst
,
47 asmlinkage
void cast6_ecb_dec_8way(struct cast6_ctx
*ctx
, u8
*dst
,
50 asmlinkage
void cast6_cbc_dec_8way(struct cast6_ctx
*ctx
, u8
*dst
,
52 asmlinkage
void cast6_ctr_8way(struct cast6_ctx
*ctx
, u8
*dst
, const u8
*src
,
55 asmlinkage
void cast6_xts_enc_8way(struct cast6_ctx
*ctx
, u8
*dst
,
56 const u8
*src
, le128
*iv
);
57 asmlinkage
void cast6_xts_dec_8way(struct cast6_ctx
*ctx
, u8
*dst
,
58 const u8
*src
, le128
*iv
);
60 static void cast6_xts_enc(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
62 glue_xts_crypt_128bit_one(ctx
, dst
, src
, iv
,
63 GLUE_FUNC_CAST(__cast6_encrypt
));
66 static void cast6_xts_dec(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
68 glue_xts_crypt_128bit_one(ctx
, dst
, src
, iv
,
69 GLUE_FUNC_CAST(__cast6_decrypt
));
72 static void cast6_crypt_ctr(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
76 le128_to_be128(&ctrblk
, iv
);
79 __cast6_encrypt(ctx
, (u8
*)&ctrblk
, (u8
*)&ctrblk
);
80 u128_xor(dst
, src
, (u128
*)&ctrblk
);
83 static const struct common_glue_ctx cast6_enc
= {
85 .fpu_blocks_limit
= CAST6_PARALLEL_BLOCKS
,
88 .num_blocks
= CAST6_PARALLEL_BLOCKS
,
89 .fn_u
= { .ecb
= GLUE_FUNC_CAST(cast6_ecb_enc_8way
) }
92 .fn_u
= { .ecb
= GLUE_FUNC_CAST(__cast6_encrypt
) }
96 static const struct common_glue_ctx cast6_ctr
= {
98 .fpu_blocks_limit
= CAST6_PARALLEL_BLOCKS
,
101 .num_blocks
= CAST6_PARALLEL_BLOCKS
,
102 .fn_u
= { .ctr
= GLUE_CTR_FUNC_CAST(cast6_ctr_8way
) }
105 .fn_u
= { .ctr
= GLUE_CTR_FUNC_CAST(cast6_crypt_ctr
) }
109 static const struct common_glue_ctx cast6_enc_xts
= {
111 .fpu_blocks_limit
= CAST6_PARALLEL_BLOCKS
,
114 .num_blocks
= CAST6_PARALLEL_BLOCKS
,
115 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(cast6_xts_enc_8way
) }
118 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(cast6_xts_enc
) }
122 static const struct common_glue_ctx cast6_dec
= {
124 .fpu_blocks_limit
= CAST6_PARALLEL_BLOCKS
,
127 .num_blocks
= CAST6_PARALLEL_BLOCKS
,
128 .fn_u
= { .ecb
= GLUE_FUNC_CAST(cast6_ecb_dec_8way
) }
131 .fn_u
= { .ecb
= GLUE_FUNC_CAST(__cast6_decrypt
) }
135 static const struct common_glue_ctx cast6_dec_cbc
= {
137 .fpu_blocks_limit
= CAST6_PARALLEL_BLOCKS
,
140 .num_blocks
= CAST6_PARALLEL_BLOCKS
,
141 .fn_u
= { .cbc
= GLUE_CBC_FUNC_CAST(cast6_cbc_dec_8way
) }
144 .fn_u
= { .cbc
= GLUE_CBC_FUNC_CAST(__cast6_decrypt
) }
148 static const struct common_glue_ctx cast6_dec_xts
= {
150 .fpu_blocks_limit
= CAST6_PARALLEL_BLOCKS
,
153 .num_blocks
= CAST6_PARALLEL_BLOCKS
,
154 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(cast6_xts_dec_8way
) }
157 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(cast6_xts_dec
) }
161 static int ecb_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
162 struct scatterlist
*src
, unsigned int nbytes
)
164 return glue_ecb_crypt_128bit(&cast6_enc
, desc
, dst
, src
, nbytes
);
167 static int ecb_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
168 struct scatterlist
*src
, unsigned int nbytes
)
170 return glue_ecb_crypt_128bit(&cast6_dec
, desc
, dst
, src
, nbytes
);
173 static int cbc_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
174 struct scatterlist
*src
, unsigned int nbytes
)
176 return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(__cast6_encrypt
), desc
,
180 static int cbc_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
181 struct scatterlist
*src
, unsigned int nbytes
)
183 return glue_cbc_decrypt_128bit(&cast6_dec_cbc
, desc
, dst
, src
,
187 static int ctr_crypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
188 struct scatterlist
*src
, unsigned int nbytes
)
190 return glue_ctr_crypt_128bit(&cast6_ctr
, desc
, dst
, src
, nbytes
);
193 static inline bool cast6_fpu_begin(bool fpu_enabled
, unsigned int nbytes
)
195 return glue_fpu_begin(CAST6_BLOCK_SIZE
, CAST6_PARALLEL_BLOCKS
,
196 NULL
, fpu_enabled
, nbytes
);
199 static inline void cast6_fpu_end(bool fpu_enabled
)
201 glue_fpu_end(fpu_enabled
);
205 struct cast6_ctx
*ctx
;
209 static void encrypt_callback(void *priv
, u8
*srcdst
, unsigned int nbytes
)
211 const unsigned int bsize
= CAST6_BLOCK_SIZE
;
212 struct crypt_priv
*ctx
= priv
;
215 ctx
->fpu_enabled
= cast6_fpu_begin(ctx
->fpu_enabled
, nbytes
);
217 if (nbytes
== bsize
* CAST6_PARALLEL_BLOCKS
) {
218 cast6_ecb_enc_8way(ctx
->ctx
, srcdst
, srcdst
);
222 for (i
= 0; i
< nbytes
/ bsize
; i
++, srcdst
+= bsize
)
223 __cast6_encrypt(ctx
->ctx
, srcdst
, srcdst
);
226 static void decrypt_callback(void *priv
, u8
*srcdst
, unsigned int nbytes
)
228 const unsigned int bsize
= CAST6_BLOCK_SIZE
;
229 struct crypt_priv
*ctx
= priv
;
232 ctx
->fpu_enabled
= cast6_fpu_begin(ctx
->fpu_enabled
, nbytes
);
234 if (nbytes
== bsize
* CAST6_PARALLEL_BLOCKS
) {
235 cast6_ecb_dec_8way(ctx
->ctx
, srcdst
, srcdst
);
239 for (i
= 0; i
< nbytes
/ bsize
; i
++, srcdst
+= bsize
)
240 __cast6_decrypt(ctx
->ctx
, srcdst
, srcdst
);
243 struct cast6_lrw_ctx
{
244 struct lrw_table_ctx lrw_table
;
245 struct cast6_ctx cast6_ctx
;
248 static int lrw_cast6_setkey(struct crypto_tfm
*tfm
, const u8
*key
,
251 struct cast6_lrw_ctx
*ctx
= crypto_tfm_ctx(tfm
);
254 err
= __cast6_setkey(&ctx
->cast6_ctx
, key
, keylen
- CAST6_BLOCK_SIZE
,
259 return lrw_init_table(&ctx
->lrw_table
, key
+ keylen
- CAST6_BLOCK_SIZE
);
262 static int lrw_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
263 struct scatterlist
*src
, unsigned int nbytes
)
265 struct cast6_lrw_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
266 be128 buf
[CAST6_PARALLEL_BLOCKS
];
267 struct crypt_priv crypt_ctx
= {
268 .ctx
= &ctx
->cast6_ctx
,
269 .fpu_enabled
= false,
271 struct lrw_crypt_req req
= {
273 .tbuflen
= sizeof(buf
),
275 .table_ctx
= &ctx
->lrw_table
,
276 .crypt_ctx
= &crypt_ctx
,
277 .crypt_fn
= encrypt_callback
,
281 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
282 ret
= lrw_crypt(desc
, dst
, src
, nbytes
, &req
);
283 cast6_fpu_end(crypt_ctx
.fpu_enabled
);
288 static int lrw_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
289 struct scatterlist
*src
, unsigned int nbytes
)
291 struct cast6_lrw_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
292 be128 buf
[CAST6_PARALLEL_BLOCKS
];
293 struct crypt_priv crypt_ctx
= {
294 .ctx
= &ctx
->cast6_ctx
,
295 .fpu_enabled
= false,
297 struct lrw_crypt_req req
= {
299 .tbuflen
= sizeof(buf
),
301 .table_ctx
= &ctx
->lrw_table
,
302 .crypt_ctx
= &crypt_ctx
,
303 .crypt_fn
= decrypt_callback
,
307 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
308 ret
= lrw_crypt(desc
, dst
, src
, nbytes
, &req
);
309 cast6_fpu_end(crypt_ctx
.fpu_enabled
);
314 static void lrw_exit_tfm(struct crypto_tfm
*tfm
)
316 struct cast6_lrw_ctx
*ctx
= crypto_tfm_ctx(tfm
);
318 lrw_free_table(&ctx
->lrw_table
);
321 struct cast6_xts_ctx
{
322 struct cast6_ctx tweak_ctx
;
323 struct cast6_ctx crypt_ctx
;
326 static int xts_cast6_setkey(struct crypto_tfm
*tfm
, const u8
*key
,
329 struct cast6_xts_ctx
*ctx
= crypto_tfm_ctx(tfm
);
330 u32
*flags
= &tfm
->crt_flags
;
333 /* key consists of keys of equal size concatenated, therefore
334 * the length must be even
337 *flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
341 /* first half of xts-key is for crypt */
342 err
= __cast6_setkey(&ctx
->crypt_ctx
, key
, keylen
/ 2, flags
);
346 /* second half of xts-key is for tweak */
347 return __cast6_setkey(&ctx
->tweak_ctx
, key
+ keylen
/ 2, keylen
/ 2,
351 static int xts_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
352 struct scatterlist
*src
, unsigned int nbytes
)
354 struct cast6_xts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
356 return glue_xts_crypt_128bit(&cast6_enc_xts
, desc
, dst
, src
, nbytes
,
357 XTS_TWEAK_CAST(__cast6_encrypt
),
358 &ctx
->tweak_ctx
, &ctx
->crypt_ctx
);
361 static int xts_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
362 struct scatterlist
*src
, unsigned int nbytes
)
364 struct cast6_xts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
366 return glue_xts_crypt_128bit(&cast6_dec_xts
, desc
, dst
, src
, nbytes
,
367 XTS_TWEAK_CAST(__cast6_encrypt
),
368 &ctx
->tweak_ctx
, &ctx
->crypt_ctx
);
371 static struct crypto_alg cast6_algs
[10] = { {
372 .cra_name
= "__ecb-cast6-avx",
373 .cra_driver_name
= "__driver-ecb-cast6-avx",
375 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
377 .cra_blocksize
= CAST6_BLOCK_SIZE
,
378 .cra_ctxsize
= sizeof(struct cast6_ctx
),
380 .cra_type
= &crypto_blkcipher_type
,
381 .cra_module
= THIS_MODULE
,
384 .min_keysize
= CAST6_MIN_KEY_SIZE
,
385 .max_keysize
= CAST6_MAX_KEY_SIZE
,
386 .setkey
= cast6_setkey
,
387 .encrypt
= ecb_encrypt
,
388 .decrypt
= ecb_decrypt
,
392 .cra_name
= "__cbc-cast6-avx",
393 .cra_driver_name
= "__driver-cbc-cast6-avx",
395 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
397 .cra_blocksize
= CAST6_BLOCK_SIZE
,
398 .cra_ctxsize
= sizeof(struct cast6_ctx
),
400 .cra_type
= &crypto_blkcipher_type
,
401 .cra_module
= THIS_MODULE
,
404 .min_keysize
= CAST6_MIN_KEY_SIZE
,
405 .max_keysize
= CAST6_MAX_KEY_SIZE
,
406 .setkey
= cast6_setkey
,
407 .encrypt
= cbc_encrypt
,
408 .decrypt
= cbc_decrypt
,
412 .cra_name
= "__ctr-cast6-avx",
413 .cra_driver_name
= "__driver-ctr-cast6-avx",
415 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
418 .cra_ctxsize
= sizeof(struct cast6_ctx
),
420 .cra_type
= &crypto_blkcipher_type
,
421 .cra_module
= THIS_MODULE
,
424 .min_keysize
= CAST6_MIN_KEY_SIZE
,
425 .max_keysize
= CAST6_MAX_KEY_SIZE
,
426 .ivsize
= CAST6_BLOCK_SIZE
,
427 .setkey
= cast6_setkey
,
428 .encrypt
= ctr_crypt
,
429 .decrypt
= ctr_crypt
,
433 .cra_name
= "__lrw-cast6-avx",
434 .cra_driver_name
= "__driver-lrw-cast6-avx",
436 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
438 .cra_blocksize
= CAST6_BLOCK_SIZE
,
439 .cra_ctxsize
= sizeof(struct cast6_lrw_ctx
),
441 .cra_type
= &crypto_blkcipher_type
,
442 .cra_module
= THIS_MODULE
,
443 .cra_exit
= lrw_exit_tfm
,
446 .min_keysize
= CAST6_MIN_KEY_SIZE
+
448 .max_keysize
= CAST6_MAX_KEY_SIZE
+
450 .ivsize
= CAST6_BLOCK_SIZE
,
451 .setkey
= lrw_cast6_setkey
,
452 .encrypt
= lrw_encrypt
,
453 .decrypt
= lrw_decrypt
,
457 .cra_name
= "__xts-cast6-avx",
458 .cra_driver_name
= "__driver-xts-cast6-avx",
460 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
462 .cra_blocksize
= CAST6_BLOCK_SIZE
,
463 .cra_ctxsize
= sizeof(struct cast6_xts_ctx
),
465 .cra_type
= &crypto_blkcipher_type
,
466 .cra_module
= THIS_MODULE
,
469 .min_keysize
= CAST6_MIN_KEY_SIZE
* 2,
470 .max_keysize
= CAST6_MAX_KEY_SIZE
* 2,
471 .ivsize
= CAST6_BLOCK_SIZE
,
472 .setkey
= xts_cast6_setkey
,
473 .encrypt
= xts_encrypt
,
474 .decrypt
= xts_decrypt
,
478 .cra_name
= "ecb(cast6)",
479 .cra_driver_name
= "ecb-cast6-avx",
481 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
482 .cra_blocksize
= CAST6_BLOCK_SIZE
,
483 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
485 .cra_type
= &crypto_ablkcipher_type
,
486 .cra_module
= THIS_MODULE
,
487 .cra_init
= ablk_init
,
488 .cra_exit
= ablk_exit
,
491 .min_keysize
= CAST6_MIN_KEY_SIZE
,
492 .max_keysize
= CAST6_MAX_KEY_SIZE
,
493 .setkey
= ablk_set_key
,
494 .encrypt
= ablk_encrypt
,
495 .decrypt
= ablk_decrypt
,
499 .cra_name
= "cbc(cast6)",
500 .cra_driver_name
= "cbc-cast6-avx",
502 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
503 .cra_blocksize
= CAST6_BLOCK_SIZE
,
504 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
506 .cra_type
= &crypto_ablkcipher_type
,
507 .cra_module
= THIS_MODULE
,
508 .cra_init
= ablk_init
,
509 .cra_exit
= ablk_exit
,
512 .min_keysize
= CAST6_MIN_KEY_SIZE
,
513 .max_keysize
= CAST6_MAX_KEY_SIZE
,
514 .ivsize
= CAST6_BLOCK_SIZE
,
515 .setkey
= ablk_set_key
,
516 .encrypt
= __ablk_encrypt
,
517 .decrypt
= ablk_decrypt
,
521 .cra_name
= "ctr(cast6)",
522 .cra_driver_name
= "ctr-cast6-avx",
524 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
526 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
528 .cra_type
= &crypto_ablkcipher_type
,
529 .cra_module
= THIS_MODULE
,
530 .cra_init
= ablk_init
,
531 .cra_exit
= ablk_exit
,
534 .min_keysize
= CAST6_MIN_KEY_SIZE
,
535 .max_keysize
= CAST6_MAX_KEY_SIZE
,
536 .ivsize
= CAST6_BLOCK_SIZE
,
537 .setkey
= ablk_set_key
,
538 .encrypt
= ablk_encrypt
,
539 .decrypt
= ablk_encrypt
,
544 .cra_name
= "lrw(cast6)",
545 .cra_driver_name
= "lrw-cast6-avx",
547 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
548 .cra_blocksize
= CAST6_BLOCK_SIZE
,
549 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
551 .cra_type
= &crypto_ablkcipher_type
,
552 .cra_module
= THIS_MODULE
,
553 .cra_init
= ablk_init
,
554 .cra_exit
= ablk_exit
,
557 .min_keysize
= CAST6_MIN_KEY_SIZE
+
559 .max_keysize
= CAST6_MAX_KEY_SIZE
+
561 .ivsize
= CAST6_BLOCK_SIZE
,
562 .setkey
= ablk_set_key
,
563 .encrypt
= ablk_encrypt
,
564 .decrypt
= ablk_decrypt
,
568 .cra_name
= "xts(cast6)",
569 .cra_driver_name
= "xts-cast6-avx",
571 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
572 .cra_blocksize
= CAST6_BLOCK_SIZE
,
573 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
575 .cra_type
= &crypto_ablkcipher_type
,
576 .cra_module
= THIS_MODULE
,
577 .cra_init
= ablk_init
,
578 .cra_exit
= ablk_exit
,
581 .min_keysize
= CAST6_MIN_KEY_SIZE
* 2,
582 .max_keysize
= CAST6_MAX_KEY_SIZE
* 2,
583 .ivsize
= CAST6_BLOCK_SIZE
,
584 .setkey
= ablk_set_key
,
585 .encrypt
= ablk_encrypt
,
586 .decrypt
= ablk_decrypt
,
591 static int __init
cast6_init(void)
595 if (!cpu_has_avx
|| !cpu_has_osxsave
) {
596 pr_info("AVX instructions are not detected.\n");
600 xcr0
= xgetbv(XCR_XFEATURE_ENABLED_MASK
);
601 if ((xcr0
& (XSTATE_SSE
| XSTATE_YMM
)) != (XSTATE_SSE
| XSTATE_YMM
)) {
602 pr_info("AVX detected but unusable.\n");
606 return crypto_register_algs(cast6_algs
, ARRAY_SIZE(cast6_algs
));
609 static void __exit
cast6_exit(void)
611 crypto_unregister_algs(cast6_algs
, ARRAY_SIZE(cast6_algs
));
614 module_init(cast6_init
);
615 module_exit(cast6_exit
);
617 MODULE_DESCRIPTION("Cast6 Cipher Algorithm, AVX optimized");
618 MODULE_LICENSE("GPL");
619 MODULE_ALIAS_CRYPTO("cast6");