2 * Glue Code for AVX assembler versions of Serpent Cipher
4 * Copyright (C) 2012 Johannes Goetzfried
5 * <Johannes.Goetzfried@informatik.stud.uni-erlangen.de>
7 * Copyright © 2011-2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
9 * This program is free software; you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License as published by
11 * the Free Software Foundation; either version 2 of the License, or
12 * (at your option) any later version.
14 * This program is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 * GNU General Public License for more details.
19 * You should have received a copy of the GNU General Public License
20 * along with this program; if not, write to the Free Software
21 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
26 #include <linux/module.h>
27 #include <linux/hardirq.h>
28 #include <linux/types.h>
29 #include <linux/crypto.h>
30 #include <linux/err.h>
31 #include <crypto/ablk_helper.h>
32 #include <crypto/algapi.h>
33 #include <crypto/serpent.h>
34 #include <crypto/cryptd.h>
35 #include <crypto/b128ops.h>
36 #include <crypto/ctr.h>
37 #include <crypto/lrw.h>
38 #include <crypto/xts.h>
39 #include <asm/fpu/api.h>
40 #include <asm/crypto/serpent-avx.h>
41 #include <asm/crypto/glue_helper.h>
43 /* 8-way parallel cipher functions */
44 asmlinkage
void serpent_ecb_enc_8way_avx(struct serpent_ctx
*ctx
, u8
*dst
,
46 EXPORT_SYMBOL_GPL(serpent_ecb_enc_8way_avx
);
48 asmlinkage
void serpent_ecb_dec_8way_avx(struct serpent_ctx
*ctx
, u8
*dst
,
50 EXPORT_SYMBOL_GPL(serpent_ecb_dec_8way_avx
);
52 asmlinkage
void serpent_cbc_dec_8way_avx(struct serpent_ctx
*ctx
, u8
*dst
,
54 EXPORT_SYMBOL_GPL(serpent_cbc_dec_8way_avx
);
56 asmlinkage
void serpent_ctr_8way_avx(struct serpent_ctx
*ctx
, u8
*dst
,
57 const u8
*src
, le128
*iv
);
58 EXPORT_SYMBOL_GPL(serpent_ctr_8way_avx
);
60 asmlinkage
void serpent_xts_enc_8way_avx(struct serpent_ctx
*ctx
, u8
*dst
,
61 const u8
*src
, le128
*iv
);
62 EXPORT_SYMBOL_GPL(serpent_xts_enc_8way_avx
);
64 asmlinkage
void serpent_xts_dec_8way_avx(struct serpent_ctx
*ctx
, u8
*dst
,
65 const u8
*src
, le128
*iv
);
66 EXPORT_SYMBOL_GPL(serpent_xts_dec_8way_avx
);
68 void __serpent_crypt_ctr(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
72 le128_to_be128(&ctrblk
, iv
);
75 __serpent_encrypt(ctx
, (u8
*)&ctrblk
, (u8
*)&ctrblk
);
76 u128_xor(dst
, src
, (u128
*)&ctrblk
);
78 EXPORT_SYMBOL_GPL(__serpent_crypt_ctr
);
80 void serpent_xts_enc(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
82 glue_xts_crypt_128bit_one(ctx
, dst
, src
, iv
,
83 GLUE_FUNC_CAST(__serpent_encrypt
));
85 EXPORT_SYMBOL_GPL(serpent_xts_enc
);
87 void serpent_xts_dec(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
89 glue_xts_crypt_128bit_one(ctx
, dst
, src
, iv
,
90 GLUE_FUNC_CAST(__serpent_decrypt
));
92 EXPORT_SYMBOL_GPL(serpent_xts_dec
);
95 static const struct common_glue_ctx serpent_enc
= {
97 .fpu_blocks_limit
= SERPENT_PARALLEL_BLOCKS
,
100 .num_blocks
= SERPENT_PARALLEL_BLOCKS
,
101 .fn_u
= { .ecb
= GLUE_FUNC_CAST(serpent_ecb_enc_8way_avx
) }
104 .fn_u
= { .ecb
= GLUE_FUNC_CAST(__serpent_encrypt
) }
108 static const struct common_glue_ctx serpent_ctr
= {
110 .fpu_blocks_limit
= SERPENT_PARALLEL_BLOCKS
,
113 .num_blocks
= SERPENT_PARALLEL_BLOCKS
,
114 .fn_u
= { .ctr
= GLUE_CTR_FUNC_CAST(serpent_ctr_8way_avx
) }
117 .fn_u
= { .ctr
= GLUE_CTR_FUNC_CAST(__serpent_crypt_ctr
) }
121 static const struct common_glue_ctx serpent_enc_xts
= {
123 .fpu_blocks_limit
= SERPENT_PARALLEL_BLOCKS
,
126 .num_blocks
= SERPENT_PARALLEL_BLOCKS
,
127 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(serpent_xts_enc_8way_avx
) }
130 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(serpent_xts_enc
) }
134 static const struct common_glue_ctx serpent_dec
= {
136 .fpu_blocks_limit
= SERPENT_PARALLEL_BLOCKS
,
139 .num_blocks
= SERPENT_PARALLEL_BLOCKS
,
140 .fn_u
= { .ecb
= GLUE_FUNC_CAST(serpent_ecb_dec_8way_avx
) }
143 .fn_u
= { .ecb
= GLUE_FUNC_CAST(__serpent_decrypt
) }
147 static const struct common_glue_ctx serpent_dec_cbc
= {
149 .fpu_blocks_limit
= SERPENT_PARALLEL_BLOCKS
,
152 .num_blocks
= SERPENT_PARALLEL_BLOCKS
,
153 .fn_u
= { .cbc
= GLUE_CBC_FUNC_CAST(serpent_cbc_dec_8way_avx
) }
156 .fn_u
= { .cbc
= GLUE_CBC_FUNC_CAST(__serpent_decrypt
) }
160 static const struct common_glue_ctx serpent_dec_xts
= {
162 .fpu_blocks_limit
= SERPENT_PARALLEL_BLOCKS
,
165 .num_blocks
= SERPENT_PARALLEL_BLOCKS
,
166 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(serpent_xts_dec_8way_avx
) }
169 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(serpent_xts_dec
) }
173 static int ecb_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
174 struct scatterlist
*src
, unsigned int nbytes
)
176 return glue_ecb_crypt_128bit(&serpent_enc
, desc
, dst
, src
, nbytes
);
179 static int ecb_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
180 struct scatterlist
*src
, unsigned int nbytes
)
182 return glue_ecb_crypt_128bit(&serpent_dec
, desc
, dst
, src
, nbytes
);
185 static int cbc_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
186 struct scatterlist
*src
, unsigned int nbytes
)
188 return glue_cbc_encrypt_128bit(GLUE_FUNC_CAST(__serpent_encrypt
), desc
,
192 static int cbc_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
193 struct scatterlist
*src
, unsigned int nbytes
)
195 return glue_cbc_decrypt_128bit(&serpent_dec_cbc
, desc
, dst
, src
,
199 static int ctr_crypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
200 struct scatterlist
*src
, unsigned int nbytes
)
202 return glue_ctr_crypt_128bit(&serpent_ctr
, desc
, dst
, src
, nbytes
);
205 static inline bool serpent_fpu_begin(bool fpu_enabled
, unsigned int nbytes
)
207 return glue_fpu_begin(SERPENT_BLOCK_SIZE
, SERPENT_PARALLEL_BLOCKS
,
208 NULL
, fpu_enabled
, nbytes
);
211 static inline void serpent_fpu_end(bool fpu_enabled
)
213 glue_fpu_end(fpu_enabled
);
217 struct serpent_ctx
*ctx
;
221 static void encrypt_callback(void *priv
, u8
*srcdst
, unsigned int nbytes
)
223 const unsigned int bsize
= SERPENT_BLOCK_SIZE
;
224 struct crypt_priv
*ctx
= priv
;
227 ctx
->fpu_enabled
= serpent_fpu_begin(ctx
->fpu_enabled
, nbytes
);
229 if (nbytes
== bsize
* SERPENT_PARALLEL_BLOCKS
) {
230 serpent_ecb_enc_8way_avx(ctx
->ctx
, srcdst
, srcdst
);
234 for (i
= 0; i
< nbytes
/ bsize
; i
++, srcdst
+= bsize
)
235 __serpent_encrypt(ctx
->ctx
, srcdst
, srcdst
);
238 static void decrypt_callback(void *priv
, u8
*srcdst
, unsigned int nbytes
)
240 const unsigned int bsize
= SERPENT_BLOCK_SIZE
;
241 struct crypt_priv
*ctx
= priv
;
244 ctx
->fpu_enabled
= serpent_fpu_begin(ctx
->fpu_enabled
, nbytes
);
246 if (nbytes
== bsize
* SERPENT_PARALLEL_BLOCKS
) {
247 serpent_ecb_dec_8way_avx(ctx
->ctx
, srcdst
, srcdst
);
251 for (i
= 0; i
< nbytes
/ bsize
; i
++, srcdst
+= bsize
)
252 __serpent_decrypt(ctx
->ctx
, srcdst
, srcdst
);
255 int lrw_serpent_setkey(struct crypto_tfm
*tfm
, const u8
*key
,
258 struct serpent_lrw_ctx
*ctx
= crypto_tfm_ctx(tfm
);
261 err
= __serpent_setkey(&ctx
->serpent_ctx
, key
, keylen
-
266 return lrw_init_table(&ctx
->lrw_table
, key
+ keylen
-
269 EXPORT_SYMBOL_GPL(lrw_serpent_setkey
);
271 static int lrw_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
272 struct scatterlist
*src
, unsigned int nbytes
)
274 struct serpent_lrw_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
275 be128 buf
[SERPENT_PARALLEL_BLOCKS
];
276 struct crypt_priv crypt_ctx
= {
277 .ctx
= &ctx
->serpent_ctx
,
278 .fpu_enabled
= false,
280 struct lrw_crypt_req req
= {
282 .tbuflen
= sizeof(buf
),
284 .table_ctx
= &ctx
->lrw_table
,
285 .crypt_ctx
= &crypt_ctx
,
286 .crypt_fn
= encrypt_callback
,
290 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
291 ret
= lrw_crypt(desc
, dst
, src
, nbytes
, &req
);
292 serpent_fpu_end(crypt_ctx
.fpu_enabled
);
297 static int lrw_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
298 struct scatterlist
*src
, unsigned int nbytes
)
300 struct serpent_lrw_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
301 be128 buf
[SERPENT_PARALLEL_BLOCKS
];
302 struct crypt_priv crypt_ctx
= {
303 .ctx
= &ctx
->serpent_ctx
,
304 .fpu_enabled
= false,
306 struct lrw_crypt_req req
= {
308 .tbuflen
= sizeof(buf
),
310 .table_ctx
= &ctx
->lrw_table
,
311 .crypt_ctx
= &crypt_ctx
,
312 .crypt_fn
= decrypt_callback
,
316 desc
->flags
&= ~CRYPTO_TFM_REQ_MAY_SLEEP
;
317 ret
= lrw_crypt(desc
, dst
, src
, nbytes
, &req
);
318 serpent_fpu_end(crypt_ctx
.fpu_enabled
);
323 void lrw_serpent_exit_tfm(struct crypto_tfm
*tfm
)
325 struct serpent_lrw_ctx
*ctx
= crypto_tfm_ctx(tfm
);
327 lrw_free_table(&ctx
->lrw_table
);
329 EXPORT_SYMBOL_GPL(lrw_serpent_exit_tfm
);
331 int xts_serpent_setkey(struct crypto_tfm
*tfm
, const u8
*key
,
334 struct serpent_xts_ctx
*ctx
= crypto_tfm_ctx(tfm
);
335 u32
*flags
= &tfm
->crt_flags
;
338 /* key consists of keys of equal size concatenated, therefore
339 * the length must be even
342 *flags
|= CRYPTO_TFM_RES_BAD_KEY_LEN
;
346 /* first half of xts-key is for crypt */
347 err
= __serpent_setkey(&ctx
->crypt_ctx
, key
, keylen
/ 2);
351 /* second half of xts-key is for tweak */
352 return __serpent_setkey(&ctx
->tweak_ctx
, key
+ keylen
/ 2, keylen
/ 2);
354 EXPORT_SYMBOL_GPL(xts_serpent_setkey
);
356 static int xts_encrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
357 struct scatterlist
*src
, unsigned int nbytes
)
359 struct serpent_xts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
361 return glue_xts_crypt_128bit(&serpent_enc_xts
, desc
, dst
, src
, nbytes
,
362 XTS_TWEAK_CAST(__serpent_encrypt
),
363 &ctx
->tweak_ctx
, &ctx
->crypt_ctx
);
366 static int xts_decrypt(struct blkcipher_desc
*desc
, struct scatterlist
*dst
,
367 struct scatterlist
*src
, unsigned int nbytes
)
369 struct serpent_xts_ctx
*ctx
= crypto_blkcipher_ctx(desc
->tfm
);
371 return glue_xts_crypt_128bit(&serpent_dec_xts
, desc
, dst
, src
, nbytes
,
372 XTS_TWEAK_CAST(__serpent_encrypt
),
373 &ctx
->tweak_ctx
, &ctx
->crypt_ctx
);
376 static struct crypto_alg serpent_algs
[10] = { {
377 .cra_name
= "__ecb-serpent-avx",
378 .cra_driver_name
= "__driver-ecb-serpent-avx",
380 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
382 .cra_blocksize
= SERPENT_BLOCK_SIZE
,
383 .cra_ctxsize
= sizeof(struct serpent_ctx
),
385 .cra_type
= &crypto_blkcipher_type
,
386 .cra_module
= THIS_MODULE
,
389 .min_keysize
= SERPENT_MIN_KEY_SIZE
,
390 .max_keysize
= SERPENT_MAX_KEY_SIZE
,
391 .setkey
= serpent_setkey
,
392 .encrypt
= ecb_encrypt
,
393 .decrypt
= ecb_decrypt
,
397 .cra_name
= "__cbc-serpent-avx",
398 .cra_driver_name
= "__driver-cbc-serpent-avx",
400 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
402 .cra_blocksize
= SERPENT_BLOCK_SIZE
,
403 .cra_ctxsize
= sizeof(struct serpent_ctx
),
405 .cra_type
= &crypto_blkcipher_type
,
406 .cra_module
= THIS_MODULE
,
409 .min_keysize
= SERPENT_MIN_KEY_SIZE
,
410 .max_keysize
= SERPENT_MAX_KEY_SIZE
,
411 .setkey
= serpent_setkey
,
412 .encrypt
= cbc_encrypt
,
413 .decrypt
= cbc_decrypt
,
417 .cra_name
= "__ctr-serpent-avx",
418 .cra_driver_name
= "__driver-ctr-serpent-avx",
420 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
423 .cra_ctxsize
= sizeof(struct serpent_ctx
),
425 .cra_type
= &crypto_blkcipher_type
,
426 .cra_module
= THIS_MODULE
,
429 .min_keysize
= SERPENT_MIN_KEY_SIZE
,
430 .max_keysize
= SERPENT_MAX_KEY_SIZE
,
431 .ivsize
= SERPENT_BLOCK_SIZE
,
432 .setkey
= serpent_setkey
,
433 .encrypt
= ctr_crypt
,
434 .decrypt
= ctr_crypt
,
438 .cra_name
= "__lrw-serpent-avx",
439 .cra_driver_name
= "__driver-lrw-serpent-avx",
441 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
443 .cra_blocksize
= SERPENT_BLOCK_SIZE
,
444 .cra_ctxsize
= sizeof(struct serpent_lrw_ctx
),
446 .cra_type
= &crypto_blkcipher_type
,
447 .cra_module
= THIS_MODULE
,
448 .cra_exit
= lrw_serpent_exit_tfm
,
451 .min_keysize
= SERPENT_MIN_KEY_SIZE
+
453 .max_keysize
= SERPENT_MAX_KEY_SIZE
+
455 .ivsize
= SERPENT_BLOCK_SIZE
,
456 .setkey
= lrw_serpent_setkey
,
457 .encrypt
= lrw_encrypt
,
458 .decrypt
= lrw_decrypt
,
462 .cra_name
= "__xts-serpent-avx",
463 .cra_driver_name
= "__driver-xts-serpent-avx",
465 .cra_flags
= CRYPTO_ALG_TYPE_BLKCIPHER
|
467 .cra_blocksize
= SERPENT_BLOCK_SIZE
,
468 .cra_ctxsize
= sizeof(struct serpent_xts_ctx
),
470 .cra_type
= &crypto_blkcipher_type
,
471 .cra_module
= THIS_MODULE
,
474 .min_keysize
= SERPENT_MIN_KEY_SIZE
* 2,
475 .max_keysize
= SERPENT_MAX_KEY_SIZE
* 2,
476 .ivsize
= SERPENT_BLOCK_SIZE
,
477 .setkey
= xts_serpent_setkey
,
478 .encrypt
= xts_encrypt
,
479 .decrypt
= xts_decrypt
,
483 .cra_name
= "ecb(serpent)",
484 .cra_driver_name
= "ecb-serpent-avx",
486 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
487 .cra_blocksize
= SERPENT_BLOCK_SIZE
,
488 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
490 .cra_type
= &crypto_ablkcipher_type
,
491 .cra_module
= THIS_MODULE
,
492 .cra_init
= ablk_init
,
493 .cra_exit
= ablk_exit
,
496 .min_keysize
= SERPENT_MIN_KEY_SIZE
,
497 .max_keysize
= SERPENT_MAX_KEY_SIZE
,
498 .setkey
= ablk_set_key
,
499 .encrypt
= ablk_encrypt
,
500 .decrypt
= ablk_decrypt
,
504 .cra_name
= "cbc(serpent)",
505 .cra_driver_name
= "cbc-serpent-avx",
507 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
508 .cra_blocksize
= SERPENT_BLOCK_SIZE
,
509 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
511 .cra_type
= &crypto_ablkcipher_type
,
512 .cra_module
= THIS_MODULE
,
513 .cra_init
= ablk_init
,
514 .cra_exit
= ablk_exit
,
517 .min_keysize
= SERPENT_MIN_KEY_SIZE
,
518 .max_keysize
= SERPENT_MAX_KEY_SIZE
,
519 .ivsize
= SERPENT_BLOCK_SIZE
,
520 .setkey
= ablk_set_key
,
521 .encrypt
= __ablk_encrypt
,
522 .decrypt
= ablk_decrypt
,
526 .cra_name
= "ctr(serpent)",
527 .cra_driver_name
= "ctr-serpent-avx",
529 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
531 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
533 .cra_type
= &crypto_ablkcipher_type
,
534 .cra_module
= THIS_MODULE
,
535 .cra_init
= ablk_init
,
536 .cra_exit
= ablk_exit
,
539 .min_keysize
= SERPENT_MIN_KEY_SIZE
,
540 .max_keysize
= SERPENT_MAX_KEY_SIZE
,
541 .ivsize
= SERPENT_BLOCK_SIZE
,
542 .setkey
= ablk_set_key
,
543 .encrypt
= ablk_encrypt
,
544 .decrypt
= ablk_encrypt
,
549 .cra_name
= "lrw(serpent)",
550 .cra_driver_name
= "lrw-serpent-avx",
552 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
553 .cra_blocksize
= SERPENT_BLOCK_SIZE
,
554 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
556 .cra_type
= &crypto_ablkcipher_type
,
557 .cra_module
= THIS_MODULE
,
558 .cra_init
= ablk_init
,
559 .cra_exit
= ablk_exit
,
562 .min_keysize
= SERPENT_MIN_KEY_SIZE
+
564 .max_keysize
= SERPENT_MAX_KEY_SIZE
+
566 .ivsize
= SERPENT_BLOCK_SIZE
,
567 .setkey
= ablk_set_key
,
568 .encrypt
= ablk_encrypt
,
569 .decrypt
= ablk_decrypt
,
573 .cra_name
= "xts(serpent)",
574 .cra_driver_name
= "xts-serpent-avx",
576 .cra_flags
= CRYPTO_ALG_TYPE_ABLKCIPHER
| CRYPTO_ALG_ASYNC
,
577 .cra_blocksize
= SERPENT_BLOCK_SIZE
,
578 .cra_ctxsize
= sizeof(struct async_helper_ctx
),
580 .cra_type
= &crypto_ablkcipher_type
,
581 .cra_module
= THIS_MODULE
,
582 .cra_init
= ablk_init
,
583 .cra_exit
= ablk_exit
,
586 .min_keysize
= SERPENT_MIN_KEY_SIZE
* 2,
587 .max_keysize
= SERPENT_MAX_KEY_SIZE
* 2,
588 .ivsize
= SERPENT_BLOCK_SIZE
,
589 .setkey
= ablk_set_key
,
590 .encrypt
= ablk_encrypt
,
591 .decrypt
= ablk_decrypt
,
596 static int __init
serpent_init(void)
598 const char *feature_name
;
600 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE
| XFEATURE_MASK_YMM
,
602 pr_info("CPU feature '%s' is not supported.\n", feature_name
);
606 return crypto_register_algs(serpent_algs
, ARRAY_SIZE(serpent_algs
));
609 static void __exit
serpent_exit(void)
611 crypto_unregister_algs(serpent_algs
, ARRAY_SIZE(serpent_algs
));
614 module_init(serpent_init
);
615 module_exit(serpent_exit
);
617 MODULE_DESCRIPTION("Serpent Cipher Algorithm, AVX optimized");
618 MODULE_LICENSE("GPL");
619 MODULE_ALIAS_CRYPTO("serpent");