staging: rtl8192u: remove redundant assignment to pointer crypt
[linux/fpc-iii.git] / arch / arm64 / crypto / chacha-neon-glue.c
blob1495d2b18518d4886f88e6ac8010754136381c68
1 /*
2 * ARM NEON accelerated ChaCha and XChaCha stream ciphers,
3 * including ChaCha20 (RFC7539)
5 * Copyright (C) 2016 - 2017 Linaro, Ltd. <ard.biesheuvel@linaro.org>
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License version 2 as
9 * published by the Free Software Foundation.
11 * Based on:
12 * ChaCha20 256-bit cipher algorithm, RFC7539, SIMD glue code
14 * Copyright (C) 2015 Martin Willi
16 * This program is free software; you can redistribute it and/or modify
17 * it under the terms of the GNU General Public License as published by
18 * the Free Software Foundation; either version 2 of the License, or
19 * (at your option) any later version.
22 #include <crypto/algapi.h>
23 #include <crypto/chacha.h>
24 #include <crypto/internal/simd.h>
25 #include <crypto/internal/skcipher.h>
26 #include <linux/kernel.h>
27 #include <linux/module.h>
29 #include <asm/hwcap.h>
30 #include <asm/neon.h>
31 #include <asm/simd.h>
33 asmlinkage void chacha_block_xor_neon(u32 *state, u8 *dst, const u8 *src,
34 int nrounds);
35 asmlinkage void chacha_4block_xor_neon(u32 *state, u8 *dst, const u8 *src,
36 int nrounds, int bytes);
37 asmlinkage void hchacha_block_neon(const u32 *state, u32 *out, int nrounds);
39 static void chacha_doneon(u32 *state, u8 *dst, const u8 *src,
40 int bytes, int nrounds)
42 while (bytes > 0) {
43 int l = min(bytes, CHACHA_BLOCK_SIZE * 5);
45 if (l <= CHACHA_BLOCK_SIZE) {
46 u8 buf[CHACHA_BLOCK_SIZE];
48 memcpy(buf, src, l);
49 chacha_block_xor_neon(state, buf, buf, nrounds);
50 memcpy(dst, buf, l);
51 state[12] += 1;
52 break;
54 chacha_4block_xor_neon(state, dst, src, nrounds, l);
55 bytes -= CHACHA_BLOCK_SIZE * 5;
56 src += CHACHA_BLOCK_SIZE * 5;
57 dst += CHACHA_BLOCK_SIZE * 5;
58 state[12] += 5;
62 static int chacha_neon_stream_xor(struct skcipher_request *req,
63 const struct chacha_ctx *ctx, const u8 *iv)
65 struct skcipher_walk walk;
66 u32 state[16];
67 int err;
69 err = skcipher_walk_virt(&walk, req, false);
71 crypto_chacha_init(state, ctx, iv);
73 while (walk.nbytes > 0) {
74 unsigned int nbytes = walk.nbytes;
76 if (nbytes < walk.total)
77 nbytes = rounddown(nbytes, walk.stride);
79 kernel_neon_begin();
80 chacha_doneon(state, walk.dst.virt.addr, walk.src.virt.addr,
81 nbytes, ctx->nrounds);
82 kernel_neon_end();
83 err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
86 return err;
89 static int chacha_neon(struct skcipher_request *req)
91 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
92 struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
94 if (req->cryptlen <= CHACHA_BLOCK_SIZE || !crypto_simd_usable())
95 return crypto_chacha_crypt(req);
97 return chacha_neon_stream_xor(req, ctx, req->iv);
100 static int xchacha_neon(struct skcipher_request *req)
102 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
103 struct chacha_ctx *ctx = crypto_skcipher_ctx(tfm);
104 struct chacha_ctx subctx;
105 u32 state[16];
106 u8 real_iv[16];
108 if (req->cryptlen <= CHACHA_BLOCK_SIZE || !crypto_simd_usable())
109 return crypto_xchacha_crypt(req);
111 crypto_chacha_init(state, ctx, req->iv);
113 kernel_neon_begin();
114 hchacha_block_neon(state, subctx.key, ctx->nrounds);
115 kernel_neon_end();
116 subctx.nrounds = ctx->nrounds;
118 memcpy(&real_iv[0], req->iv + 24, 8);
119 memcpy(&real_iv[8], req->iv + 16, 8);
120 return chacha_neon_stream_xor(req, &subctx, real_iv);
123 static struct skcipher_alg algs[] = {
125 .base.cra_name = "chacha20",
126 .base.cra_driver_name = "chacha20-neon",
127 .base.cra_priority = 300,
128 .base.cra_blocksize = 1,
129 .base.cra_ctxsize = sizeof(struct chacha_ctx),
130 .base.cra_module = THIS_MODULE,
132 .min_keysize = CHACHA_KEY_SIZE,
133 .max_keysize = CHACHA_KEY_SIZE,
134 .ivsize = CHACHA_IV_SIZE,
135 .chunksize = CHACHA_BLOCK_SIZE,
136 .walksize = 5 * CHACHA_BLOCK_SIZE,
137 .setkey = crypto_chacha20_setkey,
138 .encrypt = chacha_neon,
139 .decrypt = chacha_neon,
140 }, {
141 .base.cra_name = "xchacha20",
142 .base.cra_driver_name = "xchacha20-neon",
143 .base.cra_priority = 300,
144 .base.cra_blocksize = 1,
145 .base.cra_ctxsize = sizeof(struct chacha_ctx),
146 .base.cra_module = THIS_MODULE,
148 .min_keysize = CHACHA_KEY_SIZE,
149 .max_keysize = CHACHA_KEY_SIZE,
150 .ivsize = XCHACHA_IV_SIZE,
151 .chunksize = CHACHA_BLOCK_SIZE,
152 .walksize = 5 * CHACHA_BLOCK_SIZE,
153 .setkey = crypto_chacha20_setkey,
154 .encrypt = xchacha_neon,
155 .decrypt = xchacha_neon,
156 }, {
157 .base.cra_name = "xchacha12",
158 .base.cra_driver_name = "xchacha12-neon",
159 .base.cra_priority = 300,
160 .base.cra_blocksize = 1,
161 .base.cra_ctxsize = sizeof(struct chacha_ctx),
162 .base.cra_module = THIS_MODULE,
164 .min_keysize = CHACHA_KEY_SIZE,
165 .max_keysize = CHACHA_KEY_SIZE,
166 .ivsize = XCHACHA_IV_SIZE,
167 .chunksize = CHACHA_BLOCK_SIZE,
168 .walksize = 5 * CHACHA_BLOCK_SIZE,
169 .setkey = crypto_chacha12_setkey,
170 .encrypt = xchacha_neon,
171 .decrypt = xchacha_neon,
175 static int __init chacha_simd_mod_init(void)
177 if (!cpu_have_named_feature(ASIMD))
178 return -ENODEV;
180 return crypto_register_skciphers(algs, ARRAY_SIZE(algs));
183 static void __exit chacha_simd_mod_fini(void)
185 crypto_unregister_skciphers(algs, ARRAY_SIZE(algs));
188 module_init(chacha_simd_mod_init);
189 module_exit(chacha_simd_mod_fini);
191 MODULE_DESCRIPTION("ChaCha and XChaCha stream ciphers (NEON accelerated)");
192 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
193 MODULE_LICENSE("GPL v2");
194 MODULE_ALIAS_CRYPTO("chacha20");
195 MODULE_ALIAS_CRYPTO("chacha20-neon");
196 MODULE_ALIAS_CRYPTO("xchacha20");
197 MODULE_ALIAS_CRYPTO("xchacha20-neon");
198 MODULE_ALIAS_CRYPTO("xchacha12");
199 MODULE_ALIAS_CRYPTO("xchacha12-neon");