iwlwifi: mvm: fix version check for GEO_TX_POWER_LIMIT support
[linux/fpc-iii.git] / arch / x86 / crypto / cast6_avx_glue.c
blob9fb66b5e94b23ec9958e669b5a749b98060742a8
1 /*
2 * Glue Code for the AVX assembler implemention of the Cast6 Cipher
4 * Copyright (C) 2012 Johannes Goetzfried
5 * <Johannes.Goetzfried@informatik.stud.uni-erlangen.de>
7 * Copyright © 2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
9 * This program is free software; you can redistribute it and/or modify
10 * it under the terms of the GNU General Public License as published by
11 * the Free Software Foundation; either version 2 of the License, or
12 * (at your option) any later version.
14 * This program is distributed in the hope that it will be useful,
15 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 * GNU General Public License for more details.
19 * You should have received a copy of the GNU General Public License
20 * along with this program; if not, write to the Free Software
21 * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
22 * USA
26 #include <linux/module.h>
27 #include <linux/types.h>
28 #include <linux/crypto.h>
29 #include <linux/err.h>
30 #include <crypto/algapi.h>
31 #include <crypto/cast6.h>
32 #include <crypto/internal/simd.h>
33 #include <crypto/xts.h>
34 #include <asm/crypto/glue_helper.h>
36 #define CAST6_PARALLEL_BLOCKS 8
38 asmlinkage void cast6_ecb_enc_8way(struct cast6_ctx *ctx, u8 *dst,
39 const u8 *src);
40 asmlinkage void cast6_ecb_dec_8way(struct cast6_ctx *ctx, u8 *dst,
41 const u8 *src);
43 asmlinkage void cast6_cbc_dec_8way(struct cast6_ctx *ctx, u8 *dst,
44 const u8 *src);
45 asmlinkage void cast6_ctr_8way(struct cast6_ctx *ctx, u8 *dst, const u8 *src,
46 le128 *iv);
48 asmlinkage void cast6_xts_enc_8way(struct cast6_ctx *ctx, u8 *dst,
49 const u8 *src, le128 *iv);
50 asmlinkage void cast6_xts_dec_8way(struct cast6_ctx *ctx, u8 *dst,
51 const u8 *src, le128 *iv);
53 static int cast6_setkey_skcipher(struct crypto_skcipher *tfm,
54 const u8 *key, unsigned int keylen)
56 return cast6_setkey(&tfm->base, key, keylen);
59 static void cast6_xts_enc(void *ctx, u128 *dst, const u128 *src, le128 *iv)
61 glue_xts_crypt_128bit_one(ctx, dst, src, iv,
62 GLUE_FUNC_CAST(__cast6_encrypt));
65 static void cast6_xts_dec(void *ctx, u128 *dst, const u128 *src, le128 *iv)
67 glue_xts_crypt_128bit_one(ctx, dst, src, iv,
68 GLUE_FUNC_CAST(__cast6_decrypt));
71 static void cast6_crypt_ctr(void *ctx, u128 *dst, const u128 *src, le128 *iv)
73 be128 ctrblk;
75 le128_to_be128(&ctrblk, iv);
76 le128_inc(iv);
78 __cast6_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk);
79 u128_xor(dst, src, (u128 *)&ctrblk);
82 static const struct common_glue_ctx cast6_enc = {
83 .num_funcs = 2,
84 .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
86 .funcs = { {
87 .num_blocks = CAST6_PARALLEL_BLOCKS,
88 .fn_u = { .ecb = GLUE_FUNC_CAST(cast6_ecb_enc_8way) }
89 }, {
90 .num_blocks = 1,
91 .fn_u = { .ecb = GLUE_FUNC_CAST(__cast6_encrypt) }
92 } }
95 static const struct common_glue_ctx cast6_ctr = {
96 .num_funcs = 2,
97 .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
99 .funcs = { {
100 .num_blocks = CAST6_PARALLEL_BLOCKS,
101 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(cast6_ctr_8way) }
102 }, {
103 .num_blocks = 1,
104 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(cast6_crypt_ctr) }
108 static const struct common_glue_ctx cast6_enc_xts = {
109 .num_funcs = 2,
110 .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
112 .funcs = { {
113 .num_blocks = CAST6_PARALLEL_BLOCKS,
114 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(cast6_xts_enc_8way) }
115 }, {
116 .num_blocks = 1,
117 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(cast6_xts_enc) }
121 static const struct common_glue_ctx cast6_dec = {
122 .num_funcs = 2,
123 .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
125 .funcs = { {
126 .num_blocks = CAST6_PARALLEL_BLOCKS,
127 .fn_u = { .ecb = GLUE_FUNC_CAST(cast6_ecb_dec_8way) }
128 }, {
129 .num_blocks = 1,
130 .fn_u = { .ecb = GLUE_FUNC_CAST(__cast6_decrypt) }
134 static const struct common_glue_ctx cast6_dec_cbc = {
135 .num_funcs = 2,
136 .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
138 .funcs = { {
139 .num_blocks = CAST6_PARALLEL_BLOCKS,
140 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(cast6_cbc_dec_8way) }
141 }, {
142 .num_blocks = 1,
143 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(__cast6_decrypt) }
147 static const struct common_glue_ctx cast6_dec_xts = {
148 .num_funcs = 2,
149 .fpu_blocks_limit = CAST6_PARALLEL_BLOCKS,
151 .funcs = { {
152 .num_blocks = CAST6_PARALLEL_BLOCKS,
153 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(cast6_xts_dec_8way) }
154 }, {
155 .num_blocks = 1,
156 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(cast6_xts_dec) }
160 static int ecb_encrypt(struct skcipher_request *req)
162 return glue_ecb_req_128bit(&cast6_enc, req);
165 static int ecb_decrypt(struct skcipher_request *req)
167 return glue_ecb_req_128bit(&cast6_dec, req);
170 static int cbc_encrypt(struct skcipher_request *req)
172 return glue_cbc_encrypt_req_128bit(GLUE_FUNC_CAST(__cast6_encrypt),
173 req);
176 static int cbc_decrypt(struct skcipher_request *req)
178 return glue_cbc_decrypt_req_128bit(&cast6_dec_cbc, req);
181 static int ctr_crypt(struct skcipher_request *req)
183 return glue_ctr_req_128bit(&cast6_ctr, req);
186 struct cast6_xts_ctx {
187 struct cast6_ctx tweak_ctx;
188 struct cast6_ctx crypt_ctx;
191 static int xts_cast6_setkey(struct crypto_skcipher *tfm, const u8 *key,
192 unsigned int keylen)
194 struct cast6_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
195 u32 *flags = &tfm->base.crt_flags;
196 int err;
198 err = xts_verify_key(tfm, key, keylen);
199 if (err)
200 return err;
202 /* first half of xts-key is for crypt */
203 err = __cast6_setkey(&ctx->crypt_ctx, key, keylen / 2, flags);
204 if (err)
205 return err;
207 /* second half of xts-key is for tweak */
208 return __cast6_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2,
209 flags);
212 static int xts_encrypt(struct skcipher_request *req)
214 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
215 struct cast6_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
217 return glue_xts_req_128bit(&cast6_enc_xts, req,
218 XTS_TWEAK_CAST(__cast6_encrypt),
219 &ctx->tweak_ctx, &ctx->crypt_ctx);
222 static int xts_decrypt(struct skcipher_request *req)
224 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
225 struct cast6_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
227 return glue_xts_req_128bit(&cast6_dec_xts, req,
228 XTS_TWEAK_CAST(__cast6_encrypt),
229 &ctx->tweak_ctx, &ctx->crypt_ctx);
232 static struct skcipher_alg cast6_algs[] = {
234 .base.cra_name = "__ecb(cast6)",
235 .base.cra_driver_name = "__ecb-cast6-avx",
236 .base.cra_priority = 200,
237 .base.cra_flags = CRYPTO_ALG_INTERNAL,
238 .base.cra_blocksize = CAST6_BLOCK_SIZE,
239 .base.cra_ctxsize = sizeof(struct cast6_ctx),
240 .base.cra_module = THIS_MODULE,
241 .min_keysize = CAST6_MIN_KEY_SIZE,
242 .max_keysize = CAST6_MAX_KEY_SIZE,
243 .setkey = cast6_setkey_skcipher,
244 .encrypt = ecb_encrypt,
245 .decrypt = ecb_decrypt,
246 }, {
247 .base.cra_name = "__cbc(cast6)",
248 .base.cra_driver_name = "__cbc-cast6-avx",
249 .base.cra_priority = 200,
250 .base.cra_flags = CRYPTO_ALG_INTERNAL,
251 .base.cra_blocksize = CAST6_BLOCK_SIZE,
252 .base.cra_ctxsize = sizeof(struct cast6_ctx),
253 .base.cra_module = THIS_MODULE,
254 .min_keysize = CAST6_MIN_KEY_SIZE,
255 .max_keysize = CAST6_MAX_KEY_SIZE,
256 .ivsize = CAST6_BLOCK_SIZE,
257 .setkey = cast6_setkey_skcipher,
258 .encrypt = cbc_encrypt,
259 .decrypt = cbc_decrypt,
260 }, {
261 .base.cra_name = "__ctr(cast6)",
262 .base.cra_driver_name = "__ctr-cast6-avx",
263 .base.cra_priority = 200,
264 .base.cra_flags = CRYPTO_ALG_INTERNAL,
265 .base.cra_blocksize = 1,
266 .base.cra_ctxsize = sizeof(struct cast6_ctx),
267 .base.cra_module = THIS_MODULE,
268 .min_keysize = CAST6_MIN_KEY_SIZE,
269 .max_keysize = CAST6_MAX_KEY_SIZE,
270 .ivsize = CAST6_BLOCK_SIZE,
271 .chunksize = CAST6_BLOCK_SIZE,
272 .setkey = cast6_setkey_skcipher,
273 .encrypt = ctr_crypt,
274 .decrypt = ctr_crypt,
275 }, {
276 .base.cra_name = "__xts(cast6)",
277 .base.cra_driver_name = "__xts-cast6-avx",
278 .base.cra_priority = 200,
279 .base.cra_flags = CRYPTO_ALG_INTERNAL,
280 .base.cra_blocksize = CAST6_BLOCK_SIZE,
281 .base.cra_ctxsize = sizeof(struct cast6_xts_ctx),
282 .base.cra_module = THIS_MODULE,
283 .min_keysize = 2 * CAST6_MIN_KEY_SIZE,
284 .max_keysize = 2 * CAST6_MAX_KEY_SIZE,
285 .ivsize = CAST6_BLOCK_SIZE,
286 .setkey = xts_cast6_setkey,
287 .encrypt = xts_encrypt,
288 .decrypt = xts_decrypt,
292 static struct simd_skcipher_alg *cast6_simd_algs[ARRAY_SIZE(cast6_algs)];
294 static int __init cast6_init(void)
296 const char *feature_name;
298 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
299 &feature_name)) {
300 pr_info("CPU feature '%s' is not supported.\n", feature_name);
301 return -ENODEV;
304 return simd_register_skciphers_compat(cast6_algs,
305 ARRAY_SIZE(cast6_algs),
306 cast6_simd_algs);
309 static void __exit cast6_exit(void)
311 simd_unregister_skciphers(cast6_algs, ARRAY_SIZE(cast6_algs),
312 cast6_simd_algs);
315 module_init(cast6_init);
316 module_exit(cast6_exit);
318 MODULE_DESCRIPTION("Cast6 Cipher Algorithm, AVX optimized");
319 MODULE_LICENSE("GPL");
320 MODULE_ALIAS_CRYPTO("cast6");