Merge tag 'sched-urgent-2020-12-27' of git://git.kernel.org/pub/scm/linux/kernel...
[linux/fpc-iii.git] / arch / x86 / crypto / serpent_avx_glue.c
blob7806d1cbe8541e8f35936e9cf44814c3ea2a0712
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * Glue Code for AVX assembler versions of Serpent Cipher
5 * Copyright (C) 2012 Johannes Goetzfried
6 * <Johannes.Goetzfried@informatik.stud.uni-erlangen.de>
8 * Copyright © 2011-2013 Jussi Kivilinna <jussi.kivilinna@iki.fi>
9 */
11 #include <linux/module.h>
12 #include <linux/types.h>
13 #include <linux/crypto.h>
14 #include <linux/err.h>
15 #include <crypto/algapi.h>
16 #include <crypto/internal/simd.h>
17 #include <crypto/serpent.h>
18 #include <crypto/xts.h>
19 #include <asm/crypto/glue_helper.h>
20 #include <asm/crypto/serpent-avx.h>
22 /* 8-way parallel cipher functions */
23 asmlinkage void serpent_ecb_enc_8way_avx(const void *ctx, u8 *dst,
24 const u8 *src);
25 EXPORT_SYMBOL_GPL(serpent_ecb_enc_8way_avx);
27 asmlinkage void serpent_ecb_dec_8way_avx(const void *ctx, u8 *dst,
28 const u8 *src);
29 EXPORT_SYMBOL_GPL(serpent_ecb_dec_8way_avx);
31 asmlinkage void serpent_cbc_dec_8way_avx(const void *ctx, u8 *dst,
32 const u8 *src);
33 EXPORT_SYMBOL_GPL(serpent_cbc_dec_8way_avx);
35 asmlinkage void serpent_ctr_8way_avx(const void *ctx, u8 *dst, const u8 *src,
36 le128 *iv);
37 EXPORT_SYMBOL_GPL(serpent_ctr_8way_avx);
39 asmlinkage void serpent_xts_enc_8way_avx(const void *ctx, u8 *dst,
40 const u8 *src, le128 *iv);
41 EXPORT_SYMBOL_GPL(serpent_xts_enc_8way_avx);
43 asmlinkage void serpent_xts_dec_8way_avx(const void *ctx, u8 *dst,
44 const u8 *src, le128 *iv);
45 EXPORT_SYMBOL_GPL(serpent_xts_dec_8way_avx);
47 void __serpent_crypt_ctr(const void *ctx, u8 *d, const u8 *s, le128 *iv)
49 be128 ctrblk;
50 u128 *dst = (u128 *)d;
51 const u128 *src = (const u128 *)s;
53 le128_to_be128(&ctrblk, iv);
54 le128_inc(iv);
56 __serpent_encrypt(ctx, (u8 *)&ctrblk, (u8 *)&ctrblk);
57 u128_xor(dst, src, (u128 *)&ctrblk);
59 EXPORT_SYMBOL_GPL(__serpent_crypt_ctr);
61 void serpent_xts_enc(const void *ctx, u8 *dst, const u8 *src, le128 *iv)
63 glue_xts_crypt_128bit_one(ctx, dst, src, iv, __serpent_encrypt);
65 EXPORT_SYMBOL_GPL(serpent_xts_enc);
67 void serpent_xts_dec(const void *ctx, u8 *dst, const u8 *src, le128 *iv)
69 glue_xts_crypt_128bit_one(ctx, dst, src, iv, __serpent_decrypt);
71 EXPORT_SYMBOL_GPL(serpent_xts_dec);
73 static int serpent_setkey_skcipher(struct crypto_skcipher *tfm,
74 const u8 *key, unsigned int keylen)
76 return __serpent_setkey(crypto_skcipher_ctx(tfm), key, keylen);
79 int xts_serpent_setkey(struct crypto_skcipher *tfm, const u8 *key,
80 unsigned int keylen)
82 struct serpent_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
83 int err;
85 err = xts_verify_key(tfm, key, keylen);
86 if (err)
87 return err;
89 /* first half of xts-key is for crypt */
90 err = __serpent_setkey(&ctx->crypt_ctx, key, keylen / 2);
91 if (err)
92 return err;
94 /* second half of xts-key is for tweak */
95 return __serpent_setkey(&ctx->tweak_ctx, key + keylen / 2, keylen / 2);
97 EXPORT_SYMBOL_GPL(xts_serpent_setkey);
99 static const struct common_glue_ctx serpent_enc = {
100 .num_funcs = 2,
101 .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
103 .funcs = { {
104 .num_blocks = SERPENT_PARALLEL_BLOCKS,
105 .fn_u = { .ecb = serpent_ecb_enc_8way_avx }
106 }, {
107 .num_blocks = 1,
108 .fn_u = { .ecb = __serpent_encrypt }
112 static const struct common_glue_ctx serpent_ctr = {
113 .num_funcs = 2,
114 .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
116 .funcs = { {
117 .num_blocks = SERPENT_PARALLEL_BLOCKS,
118 .fn_u = { .ctr = serpent_ctr_8way_avx }
119 }, {
120 .num_blocks = 1,
121 .fn_u = { .ctr = __serpent_crypt_ctr }
125 static const struct common_glue_ctx serpent_enc_xts = {
126 .num_funcs = 2,
127 .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
129 .funcs = { {
130 .num_blocks = SERPENT_PARALLEL_BLOCKS,
131 .fn_u = { .xts = serpent_xts_enc_8way_avx }
132 }, {
133 .num_blocks = 1,
134 .fn_u = { .xts = serpent_xts_enc }
138 static const struct common_glue_ctx serpent_dec = {
139 .num_funcs = 2,
140 .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
142 .funcs = { {
143 .num_blocks = SERPENT_PARALLEL_BLOCKS,
144 .fn_u = { .ecb = serpent_ecb_dec_8way_avx }
145 }, {
146 .num_blocks = 1,
147 .fn_u = { .ecb = __serpent_decrypt }
151 static const struct common_glue_ctx serpent_dec_cbc = {
152 .num_funcs = 2,
153 .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
155 .funcs = { {
156 .num_blocks = SERPENT_PARALLEL_BLOCKS,
157 .fn_u = { .cbc = serpent_cbc_dec_8way_avx }
158 }, {
159 .num_blocks = 1,
160 .fn_u = { .cbc = __serpent_decrypt }
164 static const struct common_glue_ctx serpent_dec_xts = {
165 .num_funcs = 2,
166 .fpu_blocks_limit = SERPENT_PARALLEL_BLOCKS,
168 .funcs = { {
169 .num_blocks = SERPENT_PARALLEL_BLOCKS,
170 .fn_u = { .xts = serpent_xts_dec_8way_avx }
171 }, {
172 .num_blocks = 1,
173 .fn_u = { .xts = serpent_xts_dec }
177 static int ecb_encrypt(struct skcipher_request *req)
179 return glue_ecb_req_128bit(&serpent_enc, req);
182 static int ecb_decrypt(struct skcipher_request *req)
184 return glue_ecb_req_128bit(&serpent_dec, req);
187 static int cbc_encrypt(struct skcipher_request *req)
189 return glue_cbc_encrypt_req_128bit(__serpent_encrypt, req);
192 static int cbc_decrypt(struct skcipher_request *req)
194 return glue_cbc_decrypt_req_128bit(&serpent_dec_cbc, req);
197 static int ctr_crypt(struct skcipher_request *req)
199 return glue_ctr_req_128bit(&serpent_ctr, req);
202 static int xts_encrypt(struct skcipher_request *req)
204 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
205 struct serpent_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
207 return glue_xts_req_128bit(&serpent_enc_xts, req,
208 __serpent_encrypt, &ctx->tweak_ctx,
209 &ctx->crypt_ctx, false);
212 static int xts_decrypt(struct skcipher_request *req)
214 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
215 struct serpent_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
217 return glue_xts_req_128bit(&serpent_dec_xts, req,
218 __serpent_encrypt, &ctx->tweak_ctx,
219 &ctx->crypt_ctx, true);
222 static struct skcipher_alg serpent_algs[] = {
224 .base.cra_name = "__ecb(serpent)",
225 .base.cra_driver_name = "__ecb-serpent-avx",
226 .base.cra_priority = 500,
227 .base.cra_flags = CRYPTO_ALG_INTERNAL,
228 .base.cra_blocksize = SERPENT_BLOCK_SIZE,
229 .base.cra_ctxsize = sizeof(struct serpent_ctx),
230 .base.cra_module = THIS_MODULE,
231 .min_keysize = SERPENT_MIN_KEY_SIZE,
232 .max_keysize = SERPENT_MAX_KEY_SIZE,
233 .setkey = serpent_setkey_skcipher,
234 .encrypt = ecb_encrypt,
235 .decrypt = ecb_decrypt,
236 }, {
237 .base.cra_name = "__cbc(serpent)",
238 .base.cra_driver_name = "__cbc-serpent-avx",
239 .base.cra_priority = 500,
240 .base.cra_flags = CRYPTO_ALG_INTERNAL,
241 .base.cra_blocksize = SERPENT_BLOCK_SIZE,
242 .base.cra_ctxsize = sizeof(struct serpent_ctx),
243 .base.cra_module = THIS_MODULE,
244 .min_keysize = SERPENT_MIN_KEY_SIZE,
245 .max_keysize = SERPENT_MAX_KEY_SIZE,
246 .ivsize = SERPENT_BLOCK_SIZE,
247 .setkey = serpent_setkey_skcipher,
248 .encrypt = cbc_encrypt,
249 .decrypt = cbc_decrypt,
250 }, {
251 .base.cra_name = "__ctr(serpent)",
252 .base.cra_driver_name = "__ctr-serpent-avx",
253 .base.cra_priority = 500,
254 .base.cra_flags = CRYPTO_ALG_INTERNAL,
255 .base.cra_blocksize = 1,
256 .base.cra_ctxsize = sizeof(struct serpent_ctx),
257 .base.cra_module = THIS_MODULE,
258 .min_keysize = SERPENT_MIN_KEY_SIZE,
259 .max_keysize = SERPENT_MAX_KEY_SIZE,
260 .ivsize = SERPENT_BLOCK_SIZE,
261 .chunksize = SERPENT_BLOCK_SIZE,
262 .setkey = serpent_setkey_skcipher,
263 .encrypt = ctr_crypt,
264 .decrypt = ctr_crypt,
265 }, {
266 .base.cra_name = "__xts(serpent)",
267 .base.cra_driver_name = "__xts-serpent-avx",
268 .base.cra_priority = 500,
269 .base.cra_flags = CRYPTO_ALG_INTERNAL,
270 .base.cra_blocksize = SERPENT_BLOCK_SIZE,
271 .base.cra_ctxsize = sizeof(struct serpent_xts_ctx),
272 .base.cra_module = THIS_MODULE,
273 .min_keysize = 2 * SERPENT_MIN_KEY_SIZE,
274 .max_keysize = 2 * SERPENT_MAX_KEY_SIZE,
275 .ivsize = SERPENT_BLOCK_SIZE,
276 .setkey = xts_serpent_setkey,
277 .encrypt = xts_encrypt,
278 .decrypt = xts_decrypt,
282 static struct simd_skcipher_alg *serpent_simd_algs[ARRAY_SIZE(serpent_algs)];
284 static int __init serpent_init(void)
286 const char *feature_name;
288 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
289 &feature_name)) {
290 pr_info("CPU feature '%s' is not supported.\n", feature_name);
291 return -ENODEV;
294 return simd_register_skciphers_compat(serpent_algs,
295 ARRAY_SIZE(serpent_algs),
296 serpent_simd_algs);
299 static void __exit serpent_exit(void)
301 simd_unregister_skciphers(serpent_algs, ARRAY_SIZE(serpent_algs),
302 serpent_simd_algs);
305 module_init(serpent_init);
306 module_exit(serpent_exit);
308 MODULE_DESCRIPTION("Serpent Cipher Algorithm, AVX optimized");
309 MODULE_LICENSE("GPL");
310 MODULE_ALIAS_CRYPTO("serpent");