treewide: remove redundant IS_ERR() before error code check
[linux/fpc-iii.git] / arch / x86 / crypto / chacha_glue.c
blob68a74953efaf94e20901dec9990c2aecaf162e53
1 // SPDX-License-Identifier: GPL-2.0-or-later
2 /*
3 * x64 SIMD accelerated ChaCha and XChaCha stream ciphers,
4 * including ChaCha20 (RFC7539)
6 * Copyright (C) 2015 Martin Willi
7 */
9 #include <crypto/algapi.h>
10 #include <crypto/internal/chacha.h>
11 #include <crypto/internal/simd.h>
12 #include <crypto/internal/skcipher.h>
13 #include <linux/kernel.h>
14 #include <linux/module.h>
15 #include <asm/simd.h>
17 #define CHACHA_STATE_ALIGN 16
19 asmlinkage void chacha_block_xor_ssse3(u32 *state, u8 *dst, const u8 *src,
20 unsigned int len, int nrounds);
21 asmlinkage void chacha_4block_xor_ssse3(u32 *state, u8 *dst, const u8 *src,
22 unsigned int len, int nrounds);
23 asmlinkage void hchacha_block_ssse3(const u32 *state, u32 *out, int nrounds);
25 asmlinkage void chacha_2block_xor_avx2(u32 *state, u8 *dst, const u8 *src,
26 unsigned int len, int nrounds);
27 asmlinkage void chacha_4block_xor_avx2(u32 *state, u8 *dst, const u8 *src,
28 unsigned int len, int nrounds);
29 asmlinkage void chacha_8block_xor_avx2(u32 *state, u8 *dst, const u8 *src,
30 unsigned int len, int nrounds);
32 asmlinkage void chacha_2block_xor_avx512vl(u32 *state, u8 *dst, const u8 *src,
33 unsigned int len, int nrounds);
34 asmlinkage void chacha_4block_xor_avx512vl(u32 *state, u8 *dst, const u8 *src,
35 unsigned int len, int nrounds);
36 asmlinkage void chacha_8block_xor_avx512vl(u32 *state, u8 *dst, const u8 *src,
37 unsigned int len, int nrounds);
39 static __ro_after_init DEFINE_STATIC_KEY_FALSE(chacha_use_simd);
40 static __ro_after_init DEFINE_STATIC_KEY_FALSE(chacha_use_avx2);
41 static __ro_after_init DEFINE_STATIC_KEY_FALSE(chacha_use_avx512vl);
43 static unsigned int chacha_advance(unsigned int len, unsigned int maxblocks)
45 len = min(len, maxblocks * CHACHA_BLOCK_SIZE);
46 return round_up(len, CHACHA_BLOCK_SIZE) / CHACHA_BLOCK_SIZE;
49 static void chacha_dosimd(u32 *state, u8 *dst, const u8 *src,
50 unsigned int bytes, int nrounds)
52 if (IS_ENABLED(CONFIG_AS_AVX512) &&
53 static_branch_likely(&chacha_use_avx512vl)) {
54 while (bytes >= CHACHA_BLOCK_SIZE * 8) {
55 chacha_8block_xor_avx512vl(state, dst, src, bytes,
56 nrounds);
57 bytes -= CHACHA_BLOCK_SIZE * 8;
58 src += CHACHA_BLOCK_SIZE * 8;
59 dst += CHACHA_BLOCK_SIZE * 8;
60 state[12] += 8;
62 if (bytes > CHACHA_BLOCK_SIZE * 4) {
63 chacha_8block_xor_avx512vl(state, dst, src, bytes,
64 nrounds);
65 state[12] += chacha_advance(bytes, 8);
66 return;
68 if (bytes > CHACHA_BLOCK_SIZE * 2) {
69 chacha_4block_xor_avx512vl(state, dst, src, bytes,
70 nrounds);
71 state[12] += chacha_advance(bytes, 4);
72 return;
74 if (bytes) {
75 chacha_2block_xor_avx512vl(state, dst, src, bytes,
76 nrounds);
77 state[12] += chacha_advance(bytes, 2);
78 return;
82 if (IS_ENABLED(CONFIG_AS_AVX2) &&
83 static_branch_likely(&chacha_use_avx2)) {
84 while (bytes >= CHACHA_BLOCK_SIZE * 8) {
85 chacha_8block_xor_avx2(state, dst, src, bytes, nrounds);
86 bytes -= CHACHA_BLOCK_SIZE * 8;
87 src += CHACHA_BLOCK_SIZE * 8;
88 dst += CHACHA_BLOCK_SIZE * 8;
89 state[12] += 8;
91 if (bytes > CHACHA_BLOCK_SIZE * 4) {
92 chacha_8block_xor_avx2(state, dst, src, bytes, nrounds);
93 state[12] += chacha_advance(bytes, 8);
94 return;
96 if (bytes > CHACHA_BLOCK_SIZE * 2) {
97 chacha_4block_xor_avx2(state, dst, src, bytes, nrounds);
98 state[12] += chacha_advance(bytes, 4);
99 return;
101 if (bytes > CHACHA_BLOCK_SIZE) {
102 chacha_2block_xor_avx2(state, dst, src, bytes, nrounds);
103 state[12] += chacha_advance(bytes, 2);
104 return;
108 while (bytes >= CHACHA_BLOCK_SIZE * 4) {
109 chacha_4block_xor_ssse3(state, dst, src, bytes, nrounds);
110 bytes -= CHACHA_BLOCK_SIZE * 4;
111 src += CHACHA_BLOCK_SIZE * 4;
112 dst += CHACHA_BLOCK_SIZE * 4;
113 state[12] += 4;
115 if (bytes > CHACHA_BLOCK_SIZE) {
116 chacha_4block_xor_ssse3(state, dst, src, bytes, nrounds);
117 state[12] += chacha_advance(bytes, 4);
118 return;
120 if (bytes) {
121 chacha_block_xor_ssse3(state, dst, src, bytes, nrounds);
122 state[12]++;
126 void hchacha_block_arch(const u32 *state, u32 *stream, int nrounds)
128 state = PTR_ALIGN(state, CHACHA_STATE_ALIGN);
130 if (!static_branch_likely(&chacha_use_simd) || !crypto_simd_usable()) {
131 hchacha_block_generic(state, stream, nrounds);
132 } else {
133 kernel_fpu_begin();
134 hchacha_block_ssse3(state, stream, nrounds);
135 kernel_fpu_end();
138 EXPORT_SYMBOL(hchacha_block_arch);
140 void chacha_init_arch(u32 *state, const u32 *key, const u8 *iv)
142 state = PTR_ALIGN(state, CHACHA_STATE_ALIGN);
144 chacha_init_generic(state, key, iv);
146 EXPORT_SYMBOL(chacha_init_arch);
148 void chacha_crypt_arch(u32 *state, u8 *dst, const u8 *src, unsigned int bytes,
149 int nrounds)
151 state = PTR_ALIGN(state, CHACHA_STATE_ALIGN);
153 if (!static_branch_likely(&chacha_use_simd) || !crypto_simd_usable() ||
154 bytes <= CHACHA_BLOCK_SIZE)
155 return chacha_crypt_generic(state, dst, src, bytes, nrounds);
157 kernel_fpu_begin();
158 chacha_dosimd(state, dst, src, bytes, nrounds);
159 kernel_fpu_end();
161 EXPORT_SYMBOL(chacha_crypt_arch);
163 static int chacha_simd_stream_xor(struct skcipher_request *req,
164 const struct chacha_ctx *ctx, const u8 *iv)
166 u32 *state, state_buf[16 + 2] __aligned(8);
167 struct skcipher_walk walk;
168 int err;
170 err = skcipher_walk_virt(&walk, req, false);
172 BUILD_BUG_ON(CHACHA_STATE_ALIGN != 16);
173 state = PTR_ALIGN(state_buf + 0, CHACHA_STATE_ALIGN);
175 chacha_init_generic(state, ctx->key, iv);
177 while (walk.nbytes > 0) {
178 unsigned int nbytes = walk.nbytes;
180 if (nbytes < walk.total)
181 nbytes = round_down(nbytes, walk.stride);
183 if (!static_branch_likely(&chacha_use_simd) ||
184 !crypto_simd_usable()) {
185 chacha_crypt_generic(state, walk.dst.virt.addr,
186 walk.src.virt.addr, nbytes,
187 ctx->nrounds);
188 } else {
189 kernel_fpu_begin();
190 chacha_dosimd(state, walk.dst.virt.addr,
191 walk.src.virt.addr, nbytes,
192 ctx->nrounds);
193 kernel_fpu_end();
195 err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
198 return err;
201 static int chacha_simd(struct skcipher_request *req)
203 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
204 struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
206 return chacha_simd_stream_xor(req, ctx, req->iv);
209 static int xchacha_simd(struct skcipher_request *req)
211 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
212 struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
213 u32 *state, state_buf[16 + 2] __aligned(8);
214 struct chacha_ctx subctx;
215 u8 real_iv[16];
217 BUILD_BUG_ON(CHACHA_STATE_ALIGN != 16);
218 state = PTR_ALIGN(state_buf + 0, CHACHA_STATE_ALIGN);
219 chacha_init_generic(state, ctx->key, req->iv);
221 if (req->cryptlen > CHACHA_BLOCK_SIZE && crypto_simd_usable()) {
222 kernel_fpu_begin();
223 hchacha_block_ssse3(state, subctx.key, ctx->nrounds);
224 kernel_fpu_end();
225 } else {
226 hchacha_block_generic(state, subctx.key, ctx->nrounds);
228 subctx.nrounds = ctx->nrounds;
230 memcpy(&real_iv[0], req->iv + 24, 8);
231 memcpy(&real_iv[8], req->iv + 16, 8);
232 return chacha_simd_stream_xor(req, &subctx, real_iv);
235 static struct skcipher_alg algs[] = {
237 .base.cra_name = "chacha20",
238 .base.cra_driver_name = "chacha20-simd",
239 .base.cra_priority = 300,
240 .base.cra_blocksize = 1,
241 .base.cra_ctxsize = sizeof(struct chacha_ctx),
242 .base.cra_module = THIS_MODULE,
244 .min_keysize = CHACHA_KEY_SIZE,
245 .max_keysize = CHACHA_KEY_SIZE,
246 .ivsize = CHACHA_IV_SIZE,
247 .chunksize = CHACHA_BLOCK_SIZE,
248 .setkey = chacha20_setkey,
249 .encrypt = chacha_simd,
250 .decrypt = chacha_simd,
251 }, {
252 .base.cra_name = "xchacha20",
253 .base.cra_driver_name = "xchacha20-simd",
254 .base.cra_priority = 300,
255 .base.cra_blocksize = 1,
256 .base.cra_ctxsize = sizeof(struct chacha_ctx),
257 .base.cra_module = THIS_MODULE,
259 .min_keysize = CHACHA_KEY_SIZE,
260 .max_keysize = CHACHA_KEY_SIZE,
261 .ivsize = XCHACHA_IV_SIZE,
262 .chunksize = CHACHA_BLOCK_SIZE,
263 .setkey = chacha20_setkey,
264 .encrypt = xchacha_simd,
265 .decrypt = xchacha_simd,
266 }, {
267 .base.cra_name = "xchacha12",
268 .base.cra_driver_name = "xchacha12-simd",
269 .base.cra_priority = 300,
270 .base.cra_blocksize = 1,
271 .base.cra_ctxsize = sizeof(struct chacha_ctx),
272 .base.cra_module = THIS_MODULE,
274 .min_keysize = CHACHA_KEY_SIZE,
275 .max_keysize = CHACHA_KEY_SIZE,
276 .ivsize = XCHACHA_IV_SIZE,
277 .chunksize = CHACHA_BLOCK_SIZE,
278 .setkey = chacha12_setkey,
279 .encrypt = xchacha_simd,
280 .decrypt = xchacha_simd,
284 static int __init chacha_simd_mod_init(void)
286 if (!boot_cpu_has(X86_FEATURE_SSSE3))
287 return 0;
289 static_branch_enable(&chacha_use_simd);
291 if (IS_ENABLED(CONFIG_AS_AVX2) &&
292 boot_cpu_has(X86_FEATURE_AVX) &&
293 boot_cpu_has(X86_FEATURE_AVX2) &&
294 cpu_has_xfeatures(XFEATURE_MASK_SSE | XFEATURE_MASK_YMM, NULL)) {
295 static_branch_enable(&chacha_use_avx2);
297 if (IS_ENABLED(CONFIG_AS_AVX512) &&
298 boot_cpu_has(X86_FEATURE_AVX512VL) &&
299 boot_cpu_has(X86_FEATURE_AVX512BW)) /* kmovq */
300 static_branch_enable(&chacha_use_avx512vl);
302 return IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER) ?
303 crypto_register_skciphers(algs, ARRAY_SIZE(algs)) : 0;
306 static void __exit chacha_simd_mod_fini(void)
308 if (IS_REACHABLE(CONFIG_CRYPTO_SKCIPHER) && boot_cpu_has(X86_FEATURE_SSSE3))
309 crypto_unregister_skciphers(algs, ARRAY_SIZE(algs));
312 module_init(chacha_simd_mod_init);
313 module_exit(chacha_simd_mod_fini);
315 MODULE_LICENSE("GPL");
316 MODULE_AUTHOR("Martin Willi <martin@strongswan.org>");
317 MODULE_DESCRIPTION("ChaCha and XChaCha stream ciphers (x64 SIMD accelerated)");
318 MODULE_ALIAS_CRYPTO("chacha20");
319 MODULE_ALIAS_CRYPTO("chacha20-simd");
320 MODULE_ALIAS_CRYPTO("xchacha20");
321 MODULE_ALIAS_CRYPTO("xchacha20-simd");
322 MODULE_ALIAS_CRYPTO("xchacha12");
323 MODULE_ALIAS_CRYPTO("xchacha12-simd");