iwlwifi: mvm: fix version check for GEO_TX_POWER_LIMIT support
[linux/fpc-iii.git] / arch / x86 / crypto / camellia_aesni_avx2_glue.c
blobd4992e458f929e8883ba7418d790d2e4b3c7a94a
1 /*
2 * Glue Code for x86_64/AVX2/AES-NI assembler optimized version of Camellia
4 * Copyright © 2013 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
13 #include <asm/crypto/camellia.h>
14 #include <asm/crypto/glue_helper.h>
15 #include <crypto/algapi.h>
16 #include <crypto/internal/simd.h>
17 #include <crypto/xts.h>
18 #include <linux/crypto.h>
19 #include <linux/err.h>
20 #include <linux/module.h>
21 #include <linux/types.h>
23 #define CAMELLIA_AESNI_PARALLEL_BLOCKS 16
24 #define CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS 32
26 /* 32-way AVX2/AES-NI parallel cipher functions */
27 asmlinkage void camellia_ecb_enc_32way(struct camellia_ctx *ctx, u8 *dst,
28 const u8 *src);
29 asmlinkage void camellia_ecb_dec_32way(struct camellia_ctx *ctx, u8 *dst,
30 const u8 *src);
32 asmlinkage void camellia_cbc_dec_32way(struct camellia_ctx *ctx, u8 *dst,
33 const u8 *src);
34 asmlinkage void camellia_ctr_32way(struct camellia_ctx *ctx, u8 *dst,
35 const u8 *src, le128 *iv);
37 asmlinkage void camellia_xts_enc_32way(struct camellia_ctx *ctx, u8 *dst,
38 const u8 *src, le128 *iv);
39 asmlinkage void camellia_xts_dec_32way(struct camellia_ctx *ctx, u8 *dst,
40 const u8 *src, le128 *iv);
42 static const struct common_glue_ctx camellia_enc = {
43 .num_funcs = 4,
44 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
46 .funcs = { {
47 .num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
48 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_enc_32way) }
49 }, {
50 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
51 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_enc_16way) }
52 }, {
53 .num_blocks = 2,
54 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk_2way) }
55 }, {
56 .num_blocks = 1,
57 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_enc_blk) }
58 } }
61 static const struct common_glue_ctx camellia_ctr = {
62 .num_funcs = 4,
63 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
65 .funcs = { {
66 .num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
67 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_ctr_32way) }
68 }, {
69 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
70 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_ctr_16way) }
71 }, {
72 .num_blocks = 2,
73 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_crypt_ctr_2way) }
74 }, {
75 .num_blocks = 1,
76 .fn_u = { .ctr = GLUE_CTR_FUNC_CAST(camellia_crypt_ctr) }
77 } }
80 static const struct common_glue_ctx camellia_enc_xts = {
81 .num_funcs = 3,
82 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
84 .funcs = { {
85 .num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
86 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc_32way) }
87 }, {
88 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
89 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc_16way) }
90 }, {
91 .num_blocks = 1,
92 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_enc) }
93 } }
96 static const struct common_glue_ctx camellia_dec = {
97 .num_funcs = 4,
98 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
100 .funcs = { {
101 .num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
102 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_dec_32way) }
103 }, {
104 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
105 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_ecb_dec_16way) }
106 }, {
107 .num_blocks = 2,
108 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk_2way) }
109 }, {
110 .num_blocks = 1,
111 .fn_u = { .ecb = GLUE_FUNC_CAST(camellia_dec_blk) }
115 static const struct common_glue_ctx camellia_dec_cbc = {
116 .num_funcs = 4,
117 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
119 .funcs = { {
120 .num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
121 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_cbc_dec_32way) }
122 }, {
123 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
124 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_cbc_dec_16way) }
125 }, {
126 .num_blocks = 2,
127 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_decrypt_cbc_2way) }
128 }, {
129 .num_blocks = 1,
130 .fn_u = { .cbc = GLUE_CBC_FUNC_CAST(camellia_dec_blk) }
134 static const struct common_glue_ctx camellia_dec_xts = {
135 .num_funcs = 3,
136 .fpu_blocks_limit = CAMELLIA_AESNI_PARALLEL_BLOCKS,
138 .funcs = { {
139 .num_blocks = CAMELLIA_AESNI_AVX2_PARALLEL_BLOCKS,
140 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec_32way) }
141 }, {
142 .num_blocks = CAMELLIA_AESNI_PARALLEL_BLOCKS,
143 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec_16way) }
144 }, {
145 .num_blocks = 1,
146 .fn_u = { .xts = GLUE_XTS_FUNC_CAST(camellia_xts_dec) }
150 static int camellia_setkey(struct crypto_skcipher *tfm, const u8 *key,
151 unsigned int keylen)
153 return __camellia_setkey(crypto_skcipher_ctx(tfm), key, keylen,
154 &tfm->base.crt_flags);
157 static int ecb_encrypt(struct skcipher_request *req)
159 return glue_ecb_req_128bit(&camellia_enc, req);
162 static int ecb_decrypt(struct skcipher_request *req)
164 return glue_ecb_req_128bit(&camellia_dec, req);
167 static int cbc_encrypt(struct skcipher_request *req)
169 return glue_cbc_encrypt_req_128bit(GLUE_FUNC_CAST(camellia_enc_blk),
170 req);
173 static int cbc_decrypt(struct skcipher_request *req)
175 return glue_cbc_decrypt_req_128bit(&camellia_dec_cbc, req);
178 static int ctr_crypt(struct skcipher_request *req)
180 return glue_ctr_req_128bit(&camellia_ctr, req);
183 static int xts_encrypt(struct skcipher_request *req)
185 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
186 struct camellia_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
188 return glue_xts_req_128bit(&camellia_enc_xts, req,
189 XTS_TWEAK_CAST(camellia_enc_blk),
190 &ctx->tweak_ctx, &ctx->crypt_ctx);
193 static int xts_decrypt(struct skcipher_request *req)
195 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
196 struct camellia_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
198 return glue_xts_req_128bit(&camellia_dec_xts, req,
199 XTS_TWEAK_CAST(camellia_enc_blk),
200 &ctx->tweak_ctx, &ctx->crypt_ctx);
203 static struct skcipher_alg camellia_algs[] = {
205 .base.cra_name = "__ecb(camellia)",
206 .base.cra_driver_name = "__ecb-camellia-aesni-avx2",
207 .base.cra_priority = 500,
208 .base.cra_flags = CRYPTO_ALG_INTERNAL,
209 .base.cra_blocksize = CAMELLIA_BLOCK_SIZE,
210 .base.cra_ctxsize = sizeof(struct camellia_ctx),
211 .base.cra_module = THIS_MODULE,
212 .min_keysize = CAMELLIA_MIN_KEY_SIZE,
213 .max_keysize = CAMELLIA_MAX_KEY_SIZE,
214 .setkey = camellia_setkey,
215 .encrypt = ecb_encrypt,
216 .decrypt = ecb_decrypt,
217 }, {
218 .base.cra_name = "__cbc(camellia)",
219 .base.cra_driver_name = "__cbc-camellia-aesni-avx2",
220 .base.cra_priority = 500,
221 .base.cra_flags = CRYPTO_ALG_INTERNAL,
222 .base.cra_blocksize = CAMELLIA_BLOCK_SIZE,
223 .base.cra_ctxsize = sizeof(struct camellia_ctx),
224 .base.cra_module = THIS_MODULE,
225 .min_keysize = CAMELLIA_MIN_KEY_SIZE,
226 .max_keysize = CAMELLIA_MAX_KEY_SIZE,
227 .ivsize = CAMELLIA_BLOCK_SIZE,
228 .setkey = camellia_setkey,
229 .encrypt = cbc_encrypt,
230 .decrypt = cbc_decrypt,
231 }, {
232 .base.cra_name = "__ctr(camellia)",
233 .base.cra_driver_name = "__ctr-camellia-aesni-avx2",
234 .base.cra_priority = 500,
235 .base.cra_flags = CRYPTO_ALG_INTERNAL,
236 .base.cra_blocksize = 1,
237 .base.cra_ctxsize = sizeof(struct camellia_ctx),
238 .base.cra_module = THIS_MODULE,
239 .min_keysize = CAMELLIA_MIN_KEY_SIZE,
240 .max_keysize = CAMELLIA_MAX_KEY_SIZE,
241 .ivsize = CAMELLIA_BLOCK_SIZE,
242 .chunksize = CAMELLIA_BLOCK_SIZE,
243 .setkey = camellia_setkey,
244 .encrypt = ctr_crypt,
245 .decrypt = ctr_crypt,
246 }, {
247 .base.cra_name = "__xts(camellia)",
248 .base.cra_driver_name = "__xts-camellia-aesni-avx2",
249 .base.cra_priority = 500,
250 .base.cra_flags = CRYPTO_ALG_INTERNAL,
251 .base.cra_blocksize = CAMELLIA_BLOCK_SIZE,
252 .base.cra_ctxsize = sizeof(struct camellia_xts_ctx),
253 .base.cra_module = THIS_MODULE,
254 .min_keysize = 2 * CAMELLIA_MIN_KEY_SIZE,
255 .max_keysize = 2 * CAMELLIA_MAX_KEY_SIZE,
256 .ivsize = CAMELLIA_BLOCK_SIZE,
257 .setkey = xts_camellia_setkey,
258 .encrypt = xts_encrypt,
259 .decrypt = xts_decrypt,
263 static struct simd_skcipher_alg *camellia_simd_algs[ARRAY_SIZE(camellia_algs)];
265 static int __init camellia_aesni_init(void)
267 const char *feature_name;
269 if (!boot_cpu_has(X86_FEATURE_AVX) ||
270 !boot_cpu_has(X86_FEATURE_AVX2) ||
271 !boot_cpu_has(X86_FEATURE_AES) ||
272 !boot_cpu_has(X86_FEATURE_OSXSAVE)) {
273 pr_info("AVX2 or AES-NI instructions are not detected.\n");
274 return -ENODEV;
277 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
278 &feature_name)) {
279 pr_info("CPU feature '%s' is not supported.\n", feature_name);
280 return -ENODEV;
283 return simd_register_skciphers_compat(camellia_algs,
284 ARRAY_SIZE(camellia_algs),
285 camellia_simd_algs);
288 static void __exit camellia_aesni_fini(void)
290 simd_unregister_skciphers(camellia_algs, ARRAY_SIZE(camellia_algs),
291 camellia_simd_algs);
294 module_init(camellia_aesni_init);
295 module_exit(camellia_aesni_fini);
297 MODULE_LICENSE("GPL");
298 MODULE_DESCRIPTION("Camellia Cipher Algorithm, AES-NI/AVX2 optimized");
299 MODULE_ALIAS_CRYPTO("camellia");
300 MODULE_ALIAS_CRYPTO("camellia-asm");