arm64: dts: Revert "specify console via command line"
[linux/fpc-iii.git] / arch / x86 / crypto / serpent_avx2_glue.c
blobf973ace44ad3580350646cec309167870179ad2a
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * Glue Code for x86_64/AVX2 assembler optimized version of Serpent
5 * Copyright © 2012-2013 Jussi Kivilinna <jussi.kivilinna@mbnet.fi>
6 */
8 #include <linux/module.h>
9 #include <linux/types.h>
10 #include <linux/crypto.h>
11 #include <linux/err.h>
12 #include <crypto/algapi.h>
13 #include <crypto/internal/simd.h>
14 #include <crypto/serpent.h>
15 #include <crypto/xts.h>
16 #include <asm/crypto/glue_helper.h>
17 #include <asm/crypto/serpent-avx.h>
19 #define SERPENT_AVX2_PARALLEL_BLOCKS 16
21 /* 16-way AVX2 parallel cipher functions */
22 asmlinkage void serpent_ecb_enc_16way(const void *ctx, u8 *dst, const u8 *src);
23 asmlinkage void serpent_ecb_dec_16way(const void *ctx, u8 *dst, const u8 *src);
24 asmlinkage void serpent_cbc_dec_16way(const void *ctx, u8 *dst, const u8 *src);
26 asmlinkage void serpent_ctr_16way(const void *ctx, u8 *dst, const u8 *src,
27 le128 *iv);
28 asmlinkage void serpent_xts_enc_16way(const void *ctx, u8 *dst, const u8 *src,
29 le128 *iv);
30 asmlinkage void serpent_xts_dec_16way(const void *ctx, u8 *dst, const u8 *src,
31 le128 *iv);
33 static int serpent_setkey_skcipher(struct crypto_skcipher *tfm,
34 const u8 *key, unsigned int keylen)
36 return __serpent_setkey(crypto_skcipher_ctx(tfm), key, keylen);
39 static const struct common_glue_ctx serpent_enc = {
40 .num_funcs = 3,
41 .fpu_blocks_limit = 8,
43 .funcs = { {
44 .num_blocks = 16,
45 .fn_u = { .ecb = serpent_ecb_enc_16way }
46 }, {
47 .num_blocks = 8,
48 .fn_u = { .ecb = serpent_ecb_enc_8way_avx }
49 }, {
50 .num_blocks = 1,
51 .fn_u = { .ecb = __serpent_encrypt }
52 } }
55 static const struct common_glue_ctx serpent_ctr = {
56 .num_funcs = 3,
57 .fpu_blocks_limit = 8,
59 .funcs = { {
60 .num_blocks = 16,
61 .fn_u = { .ctr = serpent_ctr_16way }
62 }, {
63 .num_blocks = 8,
64 .fn_u = { .ctr = serpent_ctr_8way_avx }
65 }, {
66 .num_blocks = 1,
67 .fn_u = { .ctr = __serpent_crypt_ctr }
68 } }
71 static const struct common_glue_ctx serpent_enc_xts = {
72 .num_funcs = 3,
73 .fpu_blocks_limit = 8,
75 .funcs = { {
76 .num_blocks = 16,
77 .fn_u = { .xts = serpent_xts_enc_16way }
78 }, {
79 .num_blocks = 8,
80 .fn_u = { .xts = serpent_xts_enc_8way_avx }
81 }, {
82 .num_blocks = 1,
83 .fn_u = { .xts = serpent_xts_enc }
84 } }
87 static const struct common_glue_ctx serpent_dec = {
88 .num_funcs = 3,
89 .fpu_blocks_limit = 8,
91 .funcs = { {
92 .num_blocks = 16,
93 .fn_u = { .ecb = serpent_ecb_dec_16way }
94 }, {
95 .num_blocks = 8,
96 .fn_u = { .ecb = serpent_ecb_dec_8way_avx }
97 }, {
98 .num_blocks = 1,
99 .fn_u = { .ecb = __serpent_decrypt }
103 static const struct common_glue_ctx serpent_dec_cbc = {
104 .num_funcs = 3,
105 .fpu_blocks_limit = 8,
107 .funcs = { {
108 .num_blocks = 16,
109 .fn_u = { .cbc = serpent_cbc_dec_16way }
110 }, {
111 .num_blocks = 8,
112 .fn_u = { .cbc = serpent_cbc_dec_8way_avx }
113 }, {
114 .num_blocks = 1,
115 .fn_u = { .cbc = __serpent_decrypt }
119 static const struct common_glue_ctx serpent_dec_xts = {
120 .num_funcs = 3,
121 .fpu_blocks_limit = 8,
123 .funcs = { {
124 .num_blocks = 16,
125 .fn_u = { .xts = serpent_xts_dec_16way }
126 }, {
127 .num_blocks = 8,
128 .fn_u = { .xts = serpent_xts_dec_8way_avx }
129 }, {
130 .num_blocks = 1,
131 .fn_u = { .xts = serpent_xts_dec }
135 static int ecb_encrypt(struct skcipher_request *req)
137 return glue_ecb_req_128bit(&serpent_enc, req);
140 static int ecb_decrypt(struct skcipher_request *req)
142 return glue_ecb_req_128bit(&serpent_dec, req);
145 static int cbc_encrypt(struct skcipher_request *req)
147 return glue_cbc_encrypt_req_128bit(__serpent_encrypt, req);
150 static int cbc_decrypt(struct skcipher_request *req)
152 return glue_cbc_decrypt_req_128bit(&serpent_dec_cbc, req);
155 static int ctr_crypt(struct skcipher_request *req)
157 return glue_ctr_req_128bit(&serpent_ctr, req);
160 static int xts_encrypt(struct skcipher_request *req)
162 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
163 struct serpent_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
165 return glue_xts_req_128bit(&serpent_enc_xts, req,
166 __serpent_encrypt, &ctx->tweak_ctx,
167 &ctx->crypt_ctx, false);
170 static int xts_decrypt(struct skcipher_request *req)
172 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
173 struct serpent_xts_ctx *ctx = crypto_skcipher_ctx(tfm);
175 return glue_xts_req_128bit(&serpent_dec_xts, req,
176 __serpent_encrypt, &ctx->tweak_ctx,
177 &ctx->crypt_ctx, true);
180 static struct skcipher_alg serpent_algs[] = {
182 .base.cra_name = "__ecb(serpent)",
183 .base.cra_driver_name = "__ecb-serpent-avx2",
184 .base.cra_priority = 600,
185 .base.cra_flags = CRYPTO_ALG_INTERNAL,
186 .base.cra_blocksize = SERPENT_BLOCK_SIZE,
187 .base.cra_ctxsize = sizeof(struct serpent_ctx),
188 .base.cra_module = THIS_MODULE,
189 .min_keysize = SERPENT_MIN_KEY_SIZE,
190 .max_keysize = SERPENT_MAX_KEY_SIZE,
191 .setkey = serpent_setkey_skcipher,
192 .encrypt = ecb_encrypt,
193 .decrypt = ecb_decrypt,
194 }, {
195 .base.cra_name = "__cbc(serpent)",
196 .base.cra_driver_name = "__cbc-serpent-avx2",
197 .base.cra_priority = 600,
198 .base.cra_flags = CRYPTO_ALG_INTERNAL,
199 .base.cra_blocksize = SERPENT_BLOCK_SIZE,
200 .base.cra_ctxsize = sizeof(struct serpent_ctx),
201 .base.cra_module = THIS_MODULE,
202 .min_keysize = SERPENT_MIN_KEY_SIZE,
203 .max_keysize = SERPENT_MAX_KEY_SIZE,
204 .ivsize = SERPENT_BLOCK_SIZE,
205 .setkey = serpent_setkey_skcipher,
206 .encrypt = cbc_encrypt,
207 .decrypt = cbc_decrypt,
208 }, {
209 .base.cra_name = "__ctr(serpent)",
210 .base.cra_driver_name = "__ctr-serpent-avx2",
211 .base.cra_priority = 600,
212 .base.cra_flags = CRYPTO_ALG_INTERNAL,
213 .base.cra_blocksize = 1,
214 .base.cra_ctxsize = sizeof(struct serpent_ctx),
215 .base.cra_module = THIS_MODULE,
216 .min_keysize = SERPENT_MIN_KEY_SIZE,
217 .max_keysize = SERPENT_MAX_KEY_SIZE,
218 .ivsize = SERPENT_BLOCK_SIZE,
219 .chunksize = SERPENT_BLOCK_SIZE,
220 .setkey = serpent_setkey_skcipher,
221 .encrypt = ctr_crypt,
222 .decrypt = ctr_crypt,
223 }, {
224 .base.cra_name = "__xts(serpent)",
225 .base.cra_driver_name = "__xts-serpent-avx2",
226 .base.cra_priority = 600,
227 .base.cra_flags = CRYPTO_ALG_INTERNAL,
228 .base.cra_blocksize = SERPENT_BLOCK_SIZE,
229 .base.cra_ctxsize = sizeof(struct serpent_xts_ctx),
230 .base.cra_module = THIS_MODULE,
231 .min_keysize = 2 * SERPENT_MIN_KEY_SIZE,
232 .max_keysize = 2 * SERPENT_MAX_KEY_SIZE,
233 .ivsize = SERPENT_BLOCK_SIZE,
234 .setkey = xts_serpent_setkey,
235 .encrypt = xts_encrypt,
236 .decrypt = xts_decrypt,
240 static struct simd_skcipher_alg *serpent_simd_algs[ARRAY_SIZE(serpent_algs)];
242 static int __init init(void)
244 const char *feature_name;
246 if (!boot_cpu_has(X86_FEATURE_AVX2) || !boot_cpu_has(X86_FEATURE_OSXSAVE)) {
247 pr_info("AVX2 instructions are not detected.\n");
248 return -ENODEV;
250 if (!cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM,
251 &feature_name)) {
252 pr_info("CPU feature '%s' is not supported.\n", feature_name);
253 return -ENODEV;
256 return simd_register_skciphers_compat(serpent_algs,
257 ARRAY_SIZE(serpent_algs),
258 serpent_simd_algs);
261 static void __exit fini(void)
263 simd_unregister_skciphers(serpent_algs, ARRAY_SIZE(serpent_algs),
264 serpent_simd_algs);
267 module_init(init);
268 module_exit(fini);
270 MODULE_LICENSE("GPL");
271 MODULE_DESCRIPTION("Serpent Cipher Algorithm, AVX2 optimized");
272 MODULE_ALIAS_CRYPTO("serpent");
273 MODULE_ALIAS_CRYPTO("serpent-asm");