2 * Glue Code for the AVX assembler implemention of the Cast6 Cipher
4 * Copyright (C) 2012 Johannes Goetzfried
5 * <Johannes.Goetzfried@informatik.stud.uni-erlangen.de>
7 * Copyright © 2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
9 * This program is free software; you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License as published by
11 * the Free Software Foundation; either version 2 of the License, or
12 * (at your option) any later version.
14 * This program is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 * GNU General Public License for more details.
19 * You should have received a copy of the GNU General Public License
20 * along with this program; if not, write to the Free Software
21 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
26 #include <linux/module.h>
27 #include <linux/types.h>
28 #include <linux/crypto.h>
29 #include <linux/err.h>
30 #include <crypto/algapi.h>
31 #include <crypto/cast6.h>
32 #include <crypto/internal/simd.h>
33 #include <crypto/xts.h>
34 #include <asm/crypto/glue_helper.h>
36 #define CAST6_PARALLEL_BLOCKS 8
38 asmlinkage
void cast6_ecb_enc_8way(struct cast6_ctx
*ctx
, u8
*dst
,
40 asmlinkage
void cast6_ecb_dec_8way(struct cast6_ctx
*ctx
, u8
*dst
,
43 asmlinkage
void cast6_cbc_dec_8way(struct cast6_ctx
*ctx
, u8
*dst
,
45 asmlinkage
void cast6_ctr_8way(struct cast6_ctx
*ctx
, u8
*dst
, const u8
*src
,
48 asmlinkage
void cast6_xts_enc_8way(struct cast6_ctx
*ctx
, u8
*dst
,
49 const u8
*src
, le128
*iv
);
50 asmlinkage
void cast6_xts_dec_8way(struct cast6_ctx
*ctx
, u8
*dst
,
51 const u8
*src
, le128
*iv
);
53 static int cast6_setkey_skcipher(struct crypto_skcipher
*tfm
,
54 const u8
*key
, unsigned int keylen
)
56 return cast6_setkey(&tfm
->base
, key
, keylen
);
59 static void cast6_xts_enc(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
61 glue_xts_crypt_128bit_one(ctx
, dst
, src
, iv
,
62 GLUE_FUNC_CAST(__cast6_encrypt
));
65 static void cast6_xts_dec(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
67 glue_xts_crypt_128bit_one(ctx
, dst
, src
, iv
,
68 GLUE_FUNC_CAST(__cast6_decrypt
));
71 static void cast6_crypt_ctr(void *ctx
, u128
*dst
, const u128
*src
, le128
*iv
)
75 le128_to_be128(&ctrblk
, iv
);
78 __cast6_encrypt(ctx
, (u8
*)&ctrblk
, (u8
*)&ctrblk
);
79 u128_xor(dst
, src
, (u128
*)&ctrblk
);
82 static const struct common_glue_ctx cast6_enc
= {
84 .fpu_blocks_limit
= CAST6_PARALLEL_BLOCKS
,
87 .num_blocks
= CAST6_PARALLEL_BLOCKS
,
88 .fn_u
= { .ecb
= GLUE_FUNC_CAST(cast6_ecb_enc_8way
) }
91 .fn_u
= { .ecb
= GLUE_FUNC_CAST(__cast6_encrypt
) }
95 static const struct common_glue_ctx cast6_ctr
= {
97 .fpu_blocks_limit
= CAST6_PARALLEL_BLOCKS
,
100 .num_blocks
= CAST6_PARALLEL_BLOCKS
,
101 .fn_u
= { .ctr
= GLUE_CTR_FUNC_CAST(cast6_ctr_8way
) }
104 .fn_u
= { .ctr
= GLUE_CTR_FUNC_CAST(cast6_crypt_ctr
) }
108 static const struct common_glue_ctx cast6_enc_xts
= {
110 .fpu_blocks_limit
= CAST6_PARALLEL_BLOCKS
,
113 .num_blocks
= CAST6_PARALLEL_BLOCKS
,
114 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(cast6_xts_enc_8way
) }
117 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(cast6_xts_enc
) }
121 static const struct common_glue_ctx cast6_dec
= {
123 .fpu_blocks_limit
= CAST6_PARALLEL_BLOCKS
,
126 .num_blocks
= CAST6_PARALLEL_BLOCKS
,
127 .fn_u
= { .ecb
= GLUE_FUNC_CAST(cast6_ecb_dec_8way
) }
130 .fn_u
= { .ecb
= GLUE_FUNC_CAST(__cast6_decrypt
) }
134 static const struct common_glue_ctx cast6_dec_cbc
= {
136 .fpu_blocks_limit
= CAST6_PARALLEL_BLOCKS
,
139 .num_blocks
= CAST6_PARALLEL_BLOCKS
,
140 .fn_u
= { .cbc
= GLUE_CBC_FUNC_CAST(cast6_cbc_dec_8way
) }
143 .fn_u
= { .cbc
= GLUE_CBC_FUNC_CAST(__cast6_decrypt
) }
147 static const struct common_glue_ctx cast6_dec_xts
= {
149 .fpu_blocks_limit
= CAST6_PARALLEL_BLOCKS
,
152 .num_blocks
= CAST6_PARALLEL_BLOCKS
,
153 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(cast6_xts_dec_8way
) }
156 .fn_u
= { .xts
= GLUE_XTS_FUNC_CAST(cast6_xts_dec
) }
160 static int ecb_encrypt(struct skcipher_request
*req
)
162 return glue_ecb_req_128bit(&cast6_enc
, req
);
165 static int ecb_decrypt(struct skcipher_request
*req
)
167 return glue_ecb_req_128bit(&cast6_dec
, req
);
170 static int cbc_encrypt(struct skcipher_request
*req
)
172 return glue_cbc_encrypt_req_128bit(GLUE_FUNC_CAST(__cast6_encrypt
),
176 static int cbc_decrypt(struct skcipher_request
*req
)
178 return glue_cbc_decrypt_req_128bit(&cast6_dec_cbc
, req
);
181 static int ctr_crypt(struct skcipher_request
*req
)
183 return glue_ctr_req_128bit(&cast6_ctr
, req
);
186 struct cast6_xts_ctx
{
187 struct cast6_ctx tweak_ctx
;
188 struct cast6_ctx crypt_ctx
;
191 static int xts_cast6_setkey(struct crypto_skcipher
*tfm
, const u8
*key
,
194 struct cast6_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
195 u32
*flags
= &tfm
->base
.crt_flags
;
198 err
= xts_verify_key(tfm
, key
, keylen
);
202 /* first half of xts-key is for crypt */
203 err
= __cast6_setkey(&ctx
->crypt_ctx
, key
, keylen
/ 2, flags
);
207 /* second half of xts-key is for tweak */
208 return __cast6_setkey(&ctx
->tweak_ctx
, key
+ keylen
/ 2, keylen
/ 2,
212 static int xts_encrypt(struct skcipher_request
*req
)
214 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
215 struct cast6_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
217 return glue_xts_req_128bit(&cast6_enc_xts
, req
,
218 XTS_TWEAK_CAST(__cast6_encrypt
),
219 &ctx
->tweak_ctx
, &ctx
->crypt_ctx
);
222 static int xts_decrypt(struct skcipher_request
*req
)
224 struct crypto_skcipher
*tfm
= crypto_skcipher_reqtfm(req
);
225 struct cast6_xts_ctx
*ctx
= crypto_skcipher_ctx(tfm
);
227 return glue_xts_req_128bit(&cast6_dec_xts
, req
,
228 XTS_TWEAK_CAST(__cast6_encrypt
),
229 &ctx
->tweak_ctx
, &ctx
->crypt_ctx
);
232 static struct skcipher_alg cast6_algs
[] = {
234 .base
.cra_name
= "__ecb(cast6)",
235 .base
.cra_driver_name
= "__ecb-cast6-avx",
236 .base
.cra_priority
= 200,
237 .base
.cra_flags
= CRYPTO_ALG_INTERNAL
,
238 .base
.cra_blocksize
= CAST6_BLOCK_SIZE
,
239 .base
.cra_ctxsize
= sizeof(struct cast6_ctx
),
240 .base
.cra_module
= THIS_MODULE
,
241 .min_keysize
= CAST6_MIN_KEY_SIZE
,
242 .max_keysize
= CAST6_MAX_KEY_SIZE
,
243 .setkey
= cast6_setkey_skcipher
,
244 .encrypt
= ecb_encrypt
,
245 .decrypt
= ecb_decrypt
,
247 .base
.cra_name
= "__cbc(cast6)",
248 .base
.cra_driver_name
= "__cbc-cast6-avx",
249 .base
.cra_priority
= 200,
250 .base
.cra_flags
= CRYPTO_ALG_INTERNAL
,
251 .base
.cra_blocksize
= CAST6_BLOCK_SIZE
,
252 .base
.cra_ctxsize
= sizeof(struct cast6_ctx
),
253 .base
.cra_module
= THIS_MODULE
,
254 .min_keysize
= CAST6_MIN_KEY_SIZE
,
255 .max_keysize
= CAST6_MAX_KEY_SIZE
,
256 .ivsize
= CAST6_BLOCK_SIZE
,
257 .setkey
= cast6_setkey_skcipher
,
258 .encrypt
= cbc_encrypt
,
259 .decrypt
= cbc_decrypt
,
261 .base
.cra_name
= "__ctr(cast6)",
262 .base
.cra_driver_name
= "__ctr-cast6-avx",
263 .base
.cra_priority
= 200,
264 .base
.cra_flags
= CRYPTO_ALG_INTERNAL
,
265 .base
.cra_blocksize
= 1,
266 .base
.cra_ctxsize
= sizeof(struct cast6_ctx
),
267 .base
.cra_module
= THIS_MODULE
,
268 .min_keysize
= CAST6_MIN_KEY_SIZE
,
269 .max_keysize
= CAST6_MAX_KEY_SIZE
,
270 .ivsize
= CAST6_BLOCK_SIZE
,
271 .chunksize
= CAST6_BLOCK_SIZE
,
272 .setkey
= cast6_setkey_skcipher
,
273 .encrypt
= ctr_crypt
,
274 .decrypt
= ctr_crypt
,
276 .base
.cra_name
= "__xts(cast6)",
277 .base
.cra_driver_name
= "__xts-cast6-avx",
278 .base
.cra_priority
= 200,
279 .base
.cra_flags
= CRYPTO_ALG_INTERNAL
,
280 .base
.cra_blocksize
= CAST6_BLOCK_SIZE
,
281 .base
.cra_ctxsize
= sizeof(struct cast6_xts_ctx
),
282 .base
.cra_module
= THIS_MODULE
,
283 .min_keysize
= 2 * CAST6_MIN_KEY_SIZE
,
284 .max_keysize
= 2 * CAST6_MAX_KEY_SIZE
,
285 .ivsize
= CAST6_BLOCK_SIZE
,
286 .setkey
= xts_cast6_setkey
,
287 .encrypt
= xts_encrypt
,
288 .decrypt
= xts_decrypt
,
292 static struct simd_skcipher_alg
*cast6_simd_algs
[ARRAY_SIZE(cast6_algs
)];
294 static int __init
cast6_init(void)
296 const char *feature_name
;
298 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE
| XFEATURE_MASK_YMM
,
300 pr_info("CPU feature '%s' is not supported.\n", feature_name
);
304 return simd_register_skciphers_compat(cast6_algs
,
305 ARRAY_SIZE(cast6_algs
),
309 static void __exit
cast6_exit(void)
311 simd_unregister_skciphers(cast6_algs
, ARRAY_SIZE(cast6_algs
),
315 module_init(cast6_init
);
316 module_exit(cast6_exit
);
318 MODULE_DESCRIPTION("Cast6 Cipher Algorithm, AVX optimized");
319 MODULE_LICENSE("GPL");
320 MODULE_ALIAS_CRYPTO("cast6");