Linux 4.18.10
[linux/fpc-iii.git] / arch / arm64 / crypto / speck-neon-glue.c
blob6e233aeb4ff48b8eebed3966800ffe385658a06d
1 // SPDX-License-Identifier: GPL-2.0
2 /*
3 * NEON-accelerated implementation of Speck128-XTS and Speck64-XTS
4 * (64-bit version; based on the 32-bit version)
6 * Copyright (c) 2018 Google, Inc
7 */
9 #include <asm/hwcap.h>
10 #include <asm/neon.h>
11 #include <asm/simd.h>
12 #include <crypto/algapi.h>
13 #include <crypto/gf128mul.h>
14 #include <crypto/internal/skcipher.h>
15 #include <crypto/speck.h>
16 #include <crypto/xts.h>
17 #include <linux/kernel.h>
18 #include <linux/module.h>
20 /* The assembly functions only handle multiples of 128 bytes */
21 #define SPECK_NEON_CHUNK_SIZE 128
23 /* Speck128 */
25 struct speck128_xts_tfm_ctx {
26 struct speck128_tfm_ctx main_key;
27 struct speck128_tfm_ctx tweak_key;
30 asmlinkage void speck128_xts_encrypt_neon(const u64 *round_keys, int nrounds,
31 void *dst, const void *src,
32 unsigned int nbytes, void *tweak);
34 asmlinkage void speck128_xts_decrypt_neon(const u64 *round_keys, int nrounds,
35 void *dst, const void *src,
36 unsigned int nbytes, void *tweak);
38 typedef void (*speck128_crypt_one_t)(const struct speck128_tfm_ctx *,
39 u8 *, const u8 *);
40 typedef void (*speck128_xts_crypt_many_t)(const u64 *, int, void *,
41 const void *, unsigned int, void *);
43 static __always_inline int
44 __speck128_xts_crypt(struct skcipher_request *req,
45 speck128_crypt_one_t crypt_one,
46 speck128_xts_crypt_many_t crypt_many)
48 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
49 const struct speck128_xts_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
50 struct skcipher_walk walk;
51 le128 tweak;
52 int err;
54 err = skcipher_walk_virt(&walk, req, true);
56 crypto_speck128_encrypt(&ctx->tweak_key, (u8 *)&tweak, walk.iv);
58 while (walk.nbytes > 0) {
59 unsigned int nbytes = walk.nbytes;
60 u8 *dst = walk.dst.virt.addr;
61 const u8 *src = walk.src.virt.addr;
63 if (nbytes >= SPECK_NEON_CHUNK_SIZE && may_use_simd()) {
64 unsigned int count;
66 count = round_down(nbytes, SPECK_NEON_CHUNK_SIZE);
67 kernel_neon_begin();
68 (*crypt_many)(ctx->main_key.round_keys,
69 ctx->main_key.nrounds,
70 dst, src, count, &tweak);
71 kernel_neon_end();
72 dst += count;
73 src += count;
74 nbytes -= count;
77 /* Handle any remainder with generic code */
78 while (nbytes >= sizeof(tweak)) {
79 le128_xor((le128 *)dst, (const le128 *)src, &tweak);
80 (*crypt_one)(&ctx->main_key, dst, dst);
81 le128_xor((le128 *)dst, (const le128 *)dst, &tweak);
82 gf128mul_x_ble(&tweak, &tweak);
84 dst += sizeof(tweak);
85 src += sizeof(tweak);
86 nbytes -= sizeof(tweak);
88 err = skcipher_walk_done(&walk, nbytes);
91 return err;
94 static int speck128_xts_encrypt(struct skcipher_request *req)
96 return __speck128_xts_crypt(req, crypto_speck128_encrypt,
97 speck128_xts_encrypt_neon);
100 static int speck128_xts_decrypt(struct skcipher_request *req)
102 return __speck128_xts_crypt(req, crypto_speck128_decrypt,
103 speck128_xts_decrypt_neon);
106 static int speck128_xts_setkey(struct crypto_skcipher *tfm, const u8 *key,
107 unsigned int keylen)
109 struct speck128_xts_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
110 int err;
112 err = xts_verify_key(tfm, key, keylen);
113 if (err)
114 return err;
116 keylen /= 2;
118 err = crypto_speck128_setkey(&ctx->main_key, key, keylen);
119 if (err)
120 return err;
122 return crypto_speck128_setkey(&ctx->tweak_key, key + keylen, keylen);
125 /* Speck64 */
127 struct speck64_xts_tfm_ctx {
128 struct speck64_tfm_ctx main_key;
129 struct speck64_tfm_ctx tweak_key;
132 asmlinkage void speck64_xts_encrypt_neon(const u32 *round_keys, int nrounds,
133 void *dst, const void *src,
134 unsigned int nbytes, void *tweak);
136 asmlinkage void speck64_xts_decrypt_neon(const u32 *round_keys, int nrounds,
137 void *dst, const void *src,
138 unsigned int nbytes, void *tweak);
140 typedef void (*speck64_crypt_one_t)(const struct speck64_tfm_ctx *,
141 u8 *, const u8 *);
142 typedef void (*speck64_xts_crypt_many_t)(const u32 *, int, void *,
143 const void *, unsigned int, void *);
145 static __always_inline int
146 __speck64_xts_crypt(struct skcipher_request *req, speck64_crypt_one_t crypt_one,
147 speck64_xts_crypt_many_t crypt_many)
149 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(req);
150 const struct speck64_xts_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
151 struct skcipher_walk walk;
152 __le64 tweak;
153 int err;
155 err = skcipher_walk_virt(&walk, req, true);
157 crypto_speck64_encrypt(&ctx->tweak_key, (u8 *)&tweak, walk.iv);
159 while (walk.nbytes > 0) {
160 unsigned int nbytes = walk.nbytes;
161 u8 *dst = walk.dst.virt.addr;
162 const u8 *src = walk.src.virt.addr;
164 if (nbytes >= SPECK_NEON_CHUNK_SIZE && may_use_simd()) {
165 unsigned int count;
167 count = round_down(nbytes, SPECK_NEON_CHUNK_SIZE);
168 kernel_neon_begin();
169 (*crypt_many)(ctx->main_key.round_keys,
170 ctx->main_key.nrounds,
171 dst, src, count, &tweak);
172 kernel_neon_end();
173 dst += count;
174 src += count;
175 nbytes -= count;
178 /* Handle any remainder with generic code */
179 while (nbytes >= sizeof(tweak)) {
180 *(__le64 *)dst = *(__le64 *)src ^ tweak;
181 (*crypt_one)(&ctx->main_key, dst, dst);
182 *(__le64 *)dst ^= tweak;
183 tweak = cpu_to_le64((le64_to_cpu(tweak) << 1) ^
184 ((tweak & cpu_to_le64(1ULL << 63)) ?
185 0x1B : 0));
186 dst += sizeof(tweak);
187 src += sizeof(tweak);
188 nbytes -= sizeof(tweak);
190 err = skcipher_walk_done(&walk, nbytes);
193 return err;
196 static int speck64_xts_encrypt(struct skcipher_request *req)
198 return __speck64_xts_crypt(req, crypto_speck64_encrypt,
199 speck64_xts_encrypt_neon);
202 static int speck64_xts_decrypt(struct skcipher_request *req)
204 return __speck64_xts_crypt(req, crypto_speck64_decrypt,
205 speck64_xts_decrypt_neon);
208 static int speck64_xts_setkey(struct crypto_skcipher *tfm, const u8 *key,
209 unsigned int keylen)
211 struct speck64_xts_tfm_ctx *ctx = crypto_skcipher_ctx(tfm);
212 int err;
214 err = xts_verify_key(tfm, key, keylen);
215 if (err)
216 return err;
218 keylen /= 2;
220 err = crypto_speck64_setkey(&ctx->main_key, key, keylen);
221 if (err)
222 return err;
224 return crypto_speck64_setkey(&ctx->tweak_key, key + keylen, keylen);
227 static struct skcipher_alg speck_algs[] = {
229 .base.cra_name = "xts(speck128)",
230 .base.cra_driver_name = "xts-speck128-neon",
231 .base.cra_priority = 300,
232 .base.cra_blocksize = SPECK128_BLOCK_SIZE,
233 .base.cra_ctxsize = sizeof(struct speck128_xts_tfm_ctx),
234 .base.cra_alignmask = 7,
235 .base.cra_module = THIS_MODULE,
236 .min_keysize = 2 * SPECK128_128_KEY_SIZE,
237 .max_keysize = 2 * SPECK128_256_KEY_SIZE,
238 .ivsize = SPECK128_BLOCK_SIZE,
239 .walksize = SPECK_NEON_CHUNK_SIZE,
240 .setkey = speck128_xts_setkey,
241 .encrypt = speck128_xts_encrypt,
242 .decrypt = speck128_xts_decrypt,
243 }, {
244 .base.cra_name = "xts(speck64)",
245 .base.cra_driver_name = "xts-speck64-neon",
246 .base.cra_priority = 300,
247 .base.cra_blocksize = SPECK64_BLOCK_SIZE,
248 .base.cra_ctxsize = sizeof(struct speck64_xts_tfm_ctx),
249 .base.cra_alignmask = 7,
250 .base.cra_module = THIS_MODULE,
251 .min_keysize = 2 * SPECK64_96_KEY_SIZE,
252 .max_keysize = 2 * SPECK64_128_KEY_SIZE,
253 .ivsize = SPECK64_BLOCK_SIZE,
254 .walksize = SPECK_NEON_CHUNK_SIZE,
255 .setkey = speck64_xts_setkey,
256 .encrypt = speck64_xts_encrypt,
257 .decrypt = speck64_xts_decrypt,
261 static int __init speck_neon_module_init(void)
263 if (!(elf_hwcap & HWCAP_ASIMD))
264 return -ENODEV;
265 return crypto_register_skciphers(speck_algs, ARRAY_SIZE(speck_algs));
268 static void __exit speck_neon_module_exit(void)
270 crypto_unregister_skciphers(speck_algs, ARRAY_SIZE(speck_algs));
273 module_init(speck_neon_module_init);
274 module_exit(speck_neon_module_exit);
276 MODULE_DESCRIPTION("Speck block cipher (NEON-accelerated)");
277 MODULE_LICENSE("GPL");
278 MODULE_AUTHOR("Eric Biggers <ebiggers@google.com>");
279 MODULE_ALIAS_CRYPTO("xts(speck128)");
280 MODULE_ALIAS_CRYPTO("xts-speck128-neon");
281 MODULE_ALIAS_CRYPTO("xts(speck64)");
282 MODULE_ALIAS_CRYPTO("xts-speck64-neon");