Linux 5.7.6
[linux/fpc-iii.git] / arch / arm / crypto / ghash-ce-glue.c
bloba00fd329255fb7810e4d8c429364176fe1ca8fe9
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3 * Accelerated GHASH implementation with ARMv8 vmull.p64 instructions.
5 * Copyright (C) 2015 - 2018 Linaro Ltd. <ard.biesheuvel@linaro.org>
6 */
8 #include <asm/hwcap.h>
9 #include <asm/neon.h>
10 #include <asm/simd.h>
11 #include <asm/unaligned.h>
12 #include <crypto/b128ops.h>
13 #include <crypto/cryptd.h>
14 #include <crypto/internal/hash.h>
15 #include <crypto/internal/simd.h>
16 #include <crypto/gf128mul.h>
17 #include <linux/cpufeature.h>
18 #include <linux/crypto.h>
19 #include <linux/module.h>
21 MODULE_DESCRIPTION("GHASH hash function using ARMv8 Crypto Extensions");
22 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
23 MODULE_LICENSE("GPL v2");
24 MODULE_ALIAS_CRYPTO("ghash");
26 #define GHASH_BLOCK_SIZE 16
27 #define GHASH_DIGEST_SIZE 16
29 struct ghash_key {
30 u64 h[2];
31 u64 h2[2];
32 u64 h3[2];
33 u64 h4[2];
35 be128 k;
38 struct ghash_desc_ctx {
39 u64 digest[GHASH_DIGEST_SIZE/sizeof(u64)];
40 u8 buf[GHASH_BLOCK_SIZE];
41 u32 count;
44 struct ghash_async_ctx {
45 struct cryptd_ahash *cryptd_tfm;
48 asmlinkage void pmull_ghash_update_p64(int blocks, u64 dg[], const char *src,
49 struct ghash_key const *k,
50 const char *head);
52 asmlinkage void pmull_ghash_update_p8(int blocks, u64 dg[], const char *src,
53 struct ghash_key const *k,
54 const char *head);
56 static void (*pmull_ghash_update)(int blocks, u64 dg[], const char *src,
57 struct ghash_key const *k,
58 const char *head);
60 static int ghash_init(struct shash_desc *desc)
62 struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
64 *ctx = (struct ghash_desc_ctx){};
65 return 0;
68 static void ghash_do_update(int blocks, u64 dg[], const char *src,
69 struct ghash_key *key, const char *head)
71 if (likely(crypto_simd_usable())) {
72 kernel_neon_begin();
73 pmull_ghash_update(blocks, dg, src, key, head);
74 kernel_neon_end();
75 } else {
76 be128 dst = { cpu_to_be64(dg[1]), cpu_to_be64(dg[0]) };
78 do {
79 const u8 *in = src;
81 if (head) {
82 in = head;
83 blocks++;
84 head = NULL;
85 } else {
86 src += GHASH_BLOCK_SIZE;
89 crypto_xor((u8 *)&dst, in, GHASH_BLOCK_SIZE);
90 gf128mul_lle(&dst, &key->k);
91 } while (--blocks);
93 dg[0] = be64_to_cpu(dst.b);
94 dg[1] = be64_to_cpu(dst.a);
98 static int ghash_update(struct shash_desc *desc, const u8 *src,
99 unsigned int len)
101 struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
102 unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
104 ctx->count += len;
106 if ((partial + len) >= GHASH_BLOCK_SIZE) {
107 struct ghash_key *key = crypto_shash_ctx(desc->tfm);
108 int blocks;
110 if (partial) {
111 int p = GHASH_BLOCK_SIZE - partial;
113 memcpy(ctx->buf + partial, src, p);
114 src += p;
115 len -= p;
118 blocks = len / GHASH_BLOCK_SIZE;
119 len %= GHASH_BLOCK_SIZE;
121 ghash_do_update(blocks, ctx->digest, src, key,
122 partial ? ctx->buf : NULL);
123 src += blocks * GHASH_BLOCK_SIZE;
124 partial = 0;
126 if (len)
127 memcpy(ctx->buf + partial, src, len);
128 return 0;
131 static int ghash_final(struct shash_desc *desc, u8 *dst)
133 struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
134 unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
136 if (partial) {
137 struct ghash_key *key = crypto_shash_ctx(desc->tfm);
139 memset(ctx->buf + partial, 0, GHASH_BLOCK_SIZE - partial);
140 ghash_do_update(1, ctx->digest, ctx->buf, key, NULL);
142 put_unaligned_be64(ctx->digest[1], dst);
143 put_unaligned_be64(ctx->digest[0], dst + 8);
145 *ctx = (struct ghash_desc_ctx){};
146 return 0;
149 static void ghash_reflect(u64 h[], const be128 *k)
151 u64 carry = be64_to_cpu(k->a) >> 63;
153 h[0] = (be64_to_cpu(k->b) << 1) | carry;
154 h[1] = (be64_to_cpu(k->a) << 1) | (be64_to_cpu(k->b) >> 63);
156 if (carry)
157 h[1] ^= 0xc200000000000000UL;
160 static int ghash_setkey(struct crypto_shash *tfm,
161 const u8 *inkey, unsigned int keylen)
163 struct ghash_key *key = crypto_shash_ctx(tfm);
164 be128 h;
166 if (keylen != GHASH_BLOCK_SIZE)
167 return -EINVAL;
169 /* needed for the fallback */
170 memcpy(&key->k, inkey, GHASH_BLOCK_SIZE);
171 ghash_reflect(key->h, &key->k);
173 h = key->k;
174 gf128mul_lle(&h, &key->k);
175 ghash_reflect(key->h2, &h);
177 gf128mul_lle(&h, &key->k);
178 ghash_reflect(key->h3, &h);
180 gf128mul_lle(&h, &key->k);
181 ghash_reflect(key->h4, &h);
183 return 0;
186 static struct shash_alg ghash_alg = {
187 .digestsize = GHASH_DIGEST_SIZE,
188 .init = ghash_init,
189 .update = ghash_update,
190 .final = ghash_final,
191 .setkey = ghash_setkey,
192 .descsize = sizeof(struct ghash_desc_ctx),
194 .base.cra_name = "ghash",
195 .base.cra_driver_name = "ghash-ce-sync",
196 .base.cra_priority = 300 - 1,
197 .base.cra_blocksize = GHASH_BLOCK_SIZE,
198 .base.cra_ctxsize = sizeof(struct ghash_key),
199 .base.cra_module = THIS_MODULE,
202 static int ghash_async_init(struct ahash_request *req)
204 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
205 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
206 struct ahash_request *cryptd_req = ahash_request_ctx(req);
207 struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
208 struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
209 struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
211 desc->tfm = child;
212 return crypto_shash_init(desc);
215 static int ghash_async_update(struct ahash_request *req)
217 struct ahash_request *cryptd_req = ahash_request_ctx(req);
218 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
219 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
220 struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
222 if (!crypto_simd_usable() ||
223 (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
224 memcpy(cryptd_req, req, sizeof(*req));
225 ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
226 return crypto_ahash_update(cryptd_req);
227 } else {
228 struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
229 return shash_ahash_update(req, desc);
233 static int ghash_async_final(struct ahash_request *req)
235 struct ahash_request *cryptd_req = ahash_request_ctx(req);
236 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
237 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
238 struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
240 if (!crypto_simd_usable() ||
241 (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
242 memcpy(cryptd_req, req, sizeof(*req));
243 ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
244 return crypto_ahash_final(cryptd_req);
245 } else {
246 struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
247 return crypto_shash_final(desc, req->result);
251 static int ghash_async_digest(struct ahash_request *req)
253 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
254 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
255 struct ahash_request *cryptd_req = ahash_request_ctx(req);
256 struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
258 if (!crypto_simd_usable() ||
259 (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
260 memcpy(cryptd_req, req, sizeof(*req));
261 ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
262 return crypto_ahash_digest(cryptd_req);
263 } else {
264 struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
265 struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
267 desc->tfm = child;
268 return shash_ahash_digest(req, desc);
272 static int ghash_async_import(struct ahash_request *req, const void *in)
274 struct ahash_request *cryptd_req = ahash_request_ctx(req);
275 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
276 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
277 struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
279 desc->tfm = cryptd_ahash_child(ctx->cryptd_tfm);
281 return crypto_shash_import(desc, in);
284 static int ghash_async_export(struct ahash_request *req, void *out)
286 struct ahash_request *cryptd_req = ahash_request_ctx(req);
287 struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
289 return crypto_shash_export(desc, out);
292 static int ghash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
293 unsigned int keylen)
295 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
296 struct crypto_ahash *child = &ctx->cryptd_tfm->base;
298 crypto_ahash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
299 crypto_ahash_set_flags(child, crypto_ahash_get_flags(tfm)
300 & CRYPTO_TFM_REQ_MASK);
301 return crypto_ahash_setkey(child, key, keylen);
304 static int ghash_async_init_tfm(struct crypto_tfm *tfm)
306 struct cryptd_ahash *cryptd_tfm;
307 struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
309 cryptd_tfm = cryptd_alloc_ahash("ghash-ce-sync", 0, 0);
310 if (IS_ERR(cryptd_tfm))
311 return PTR_ERR(cryptd_tfm);
312 ctx->cryptd_tfm = cryptd_tfm;
313 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
314 sizeof(struct ahash_request) +
315 crypto_ahash_reqsize(&cryptd_tfm->base));
317 return 0;
320 static void ghash_async_exit_tfm(struct crypto_tfm *tfm)
322 struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
324 cryptd_free_ahash(ctx->cryptd_tfm);
327 static struct ahash_alg ghash_async_alg = {
328 .init = ghash_async_init,
329 .update = ghash_async_update,
330 .final = ghash_async_final,
331 .setkey = ghash_async_setkey,
332 .digest = ghash_async_digest,
333 .import = ghash_async_import,
334 .export = ghash_async_export,
335 .halg.digestsize = GHASH_DIGEST_SIZE,
336 .halg.statesize = sizeof(struct ghash_desc_ctx),
337 .halg.base = {
338 .cra_name = "ghash",
339 .cra_driver_name = "ghash-ce",
340 .cra_priority = 300,
341 .cra_flags = CRYPTO_ALG_ASYNC,
342 .cra_blocksize = GHASH_BLOCK_SIZE,
343 .cra_ctxsize = sizeof(struct ghash_async_ctx),
344 .cra_module = THIS_MODULE,
345 .cra_init = ghash_async_init_tfm,
346 .cra_exit = ghash_async_exit_tfm,
350 static int __init ghash_ce_mod_init(void)
352 int err;
354 if (!(elf_hwcap & HWCAP_NEON))
355 return -ENODEV;
357 if (elf_hwcap2 & HWCAP2_PMULL)
358 pmull_ghash_update = pmull_ghash_update_p64;
359 else
360 pmull_ghash_update = pmull_ghash_update_p8;
362 err = crypto_register_shash(&ghash_alg);
363 if (err)
364 return err;
365 err = crypto_register_ahash(&ghash_async_alg);
366 if (err)
367 goto err_shash;
369 return 0;
371 err_shash:
372 crypto_unregister_shash(&ghash_alg);
373 return err;
376 static void __exit ghash_ce_mod_exit(void)
378 crypto_unregister_ahash(&ghash_async_alg);
379 crypto_unregister_shash(&ghash_alg);
382 module_init(ghash_ce_mod_init);
383 module_exit(ghash_ce_mod_exit);