mm: make wait_on_page_writeback() wait for multiple pending writebacks
[linux/fpc-iii.git] / arch / arm / crypto / ghash-ce-glue.c
blobf13401f3e6693211c95e266d9676c79710755ebf
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3 * Accelerated GHASH implementation with ARMv8 vmull.p64 instructions.
5 * Copyright (C) 2015 - 2018 Linaro Ltd. <ard.biesheuvel@linaro.org>
6 */
8 #include <asm/hwcap.h>
9 #include <asm/neon.h>
10 #include <asm/simd.h>
11 #include <asm/unaligned.h>
12 #include <crypto/b128ops.h>
13 #include <crypto/cryptd.h>
14 #include <crypto/internal/hash.h>
15 #include <crypto/internal/simd.h>
16 #include <crypto/gf128mul.h>
17 #include <linux/cpufeature.h>
18 #include <linux/crypto.h>
19 #include <linux/jump_label.h>
20 #include <linux/module.h>
22 MODULE_DESCRIPTION("GHASH hash function using ARMv8 Crypto Extensions");
23 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
24 MODULE_LICENSE("GPL v2");
25 MODULE_ALIAS_CRYPTO("ghash");
27 #define GHASH_BLOCK_SIZE 16
28 #define GHASH_DIGEST_SIZE 16
30 struct ghash_key {
31 be128 k;
32 u64 h[][2];
35 struct ghash_desc_ctx {
36 u64 digest[GHASH_DIGEST_SIZE/sizeof(u64)];
37 u8 buf[GHASH_BLOCK_SIZE];
38 u32 count;
41 struct ghash_async_ctx {
42 struct cryptd_ahash *cryptd_tfm;
45 asmlinkage void pmull_ghash_update_p64(int blocks, u64 dg[], const char *src,
46 u64 const h[][2], const char *head);
48 asmlinkage void pmull_ghash_update_p8(int blocks, u64 dg[], const char *src,
49 u64 const h[][2], const char *head);
51 static __ro_after_init DEFINE_STATIC_KEY_FALSE(use_p64);
53 static int ghash_init(struct shash_desc *desc)
55 struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
57 *ctx = (struct ghash_desc_ctx){};
58 return 0;
61 static void ghash_do_update(int blocks, u64 dg[], const char *src,
62 struct ghash_key *key, const char *head)
64 if (likely(crypto_simd_usable())) {
65 kernel_neon_begin();
66 if (static_branch_likely(&use_p64))
67 pmull_ghash_update_p64(blocks, dg, src, key->h, head);
68 else
69 pmull_ghash_update_p8(blocks, dg, src, key->h, head);
70 kernel_neon_end();
71 } else {
72 be128 dst = { cpu_to_be64(dg[1]), cpu_to_be64(dg[0]) };
74 do {
75 const u8 *in = src;
77 if (head) {
78 in = head;
79 blocks++;
80 head = NULL;
81 } else {
82 src += GHASH_BLOCK_SIZE;
85 crypto_xor((u8 *)&dst, in, GHASH_BLOCK_SIZE);
86 gf128mul_lle(&dst, &key->k);
87 } while (--blocks);
89 dg[0] = be64_to_cpu(dst.b);
90 dg[1] = be64_to_cpu(dst.a);
94 static int ghash_update(struct shash_desc *desc, const u8 *src,
95 unsigned int len)
97 struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
98 unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
100 ctx->count += len;
102 if ((partial + len) >= GHASH_BLOCK_SIZE) {
103 struct ghash_key *key = crypto_shash_ctx(desc->tfm);
104 int blocks;
106 if (partial) {
107 int p = GHASH_BLOCK_SIZE - partial;
109 memcpy(ctx->buf + partial, src, p);
110 src += p;
111 len -= p;
114 blocks = len / GHASH_BLOCK_SIZE;
115 len %= GHASH_BLOCK_SIZE;
117 ghash_do_update(blocks, ctx->digest, src, key,
118 partial ? ctx->buf : NULL);
119 src += blocks * GHASH_BLOCK_SIZE;
120 partial = 0;
122 if (len)
123 memcpy(ctx->buf + partial, src, len);
124 return 0;
127 static int ghash_final(struct shash_desc *desc, u8 *dst)
129 struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
130 unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
132 if (partial) {
133 struct ghash_key *key = crypto_shash_ctx(desc->tfm);
135 memset(ctx->buf + partial, 0, GHASH_BLOCK_SIZE - partial);
136 ghash_do_update(1, ctx->digest, ctx->buf, key, NULL);
138 put_unaligned_be64(ctx->digest[1], dst);
139 put_unaligned_be64(ctx->digest[0], dst + 8);
141 *ctx = (struct ghash_desc_ctx){};
142 return 0;
145 static void ghash_reflect(u64 h[], const be128 *k)
147 u64 carry = be64_to_cpu(k->a) >> 63;
149 h[0] = (be64_to_cpu(k->b) << 1) | carry;
150 h[1] = (be64_to_cpu(k->a) << 1) | (be64_to_cpu(k->b) >> 63);
152 if (carry)
153 h[1] ^= 0xc200000000000000UL;
156 static int ghash_setkey(struct crypto_shash *tfm,
157 const u8 *inkey, unsigned int keylen)
159 struct ghash_key *key = crypto_shash_ctx(tfm);
161 if (keylen != GHASH_BLOCK_SIZE)
162 return -EINVAL;
164 /* needed for the fallback */
165 memcpy(&key->k, inkey, GHASH_BLOCK_SIZE);
166 ghash_reflect(key->h[0], &key->k);
168 if (static_branch_likely(&use_p64)) {
169 be128 h = key->k;
171 gf128mul_lle(&h, &key->k);
172 ghash_reflect(key->h[1], &h);
174 gf128mul_lle(&h, &key->k);
175 ghash_reflect(key->h[2], &h);
177 gf128mul_lle(&h, &key->k);
178 ghash_reflect(key->h[3], &h);
180 return 0;
183 static struct shash_alg ghash_alg = {
184 .digestsize = GHASH_DIGEST_SIZE,
185 .init = ghash_init,
186 .update = ghash_update,
187 .final = ghash_final,
188 .setkey = ghash_setkey,
189 .descsize = sizeof(struct ghash_desc_ctx),
191 .base.cra_name = "ghash",
192 .base.cra_driver_name = "ghash-ce-sync",
193 .base.cra_priority = 300 - 1,
194 .base.cra_blocksize = GHASH_BLOCK_SIZE,
195 .base.cra_ctxsize = sizeof(struct ghash_key) + sizeof(u64[2]),
196 .base.cra_module = THIS_MODULE,
199 static int ghash_async_init(struct ahash_request *req)
201 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
202 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
203 struct ahash_request *cryptd_req = ahash_request_ctx(req);
204 struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
205 struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
206 struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
208 desc->tfm = child;
209 return crypto_shash_init(desc);
212 static int ghash_async_update(struct ahash_request *req)
214 struct ahash_request *cryptd_req = ahash_request_ctx(req);
215 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
216 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
217 struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
219 if (!crypto_simd_usable() ||
220 (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
221 memcpy(cryptd_req, req, sizeof(*req));
222 ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
223 return crypto_ahash_update(cryptd_req);
224 } else {
225 struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
226 return shash_ahash_update(req, desc);
230 static int ghash_async_final(struct ahash_request *req)
232 struct ahash_request *cryptd_req = ahash_request_ctx(req);
233 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
234 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
235 struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
237 if (!crypto_simd_usable() ||
238 (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
239 memcpy(cryptd_req, req, sizeof(*req));
240 ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
241 return crypto_ahash_final(cryptd_req);
242 } else {
243 struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
244 return crypto_shash_final(desc, req->result);
248 static int ghash_async_digest(struct ahash_request *req)
250 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
251 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
252 struct ahash_request *cryptd_req = ahash_request_ctx(req);
253 struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
255 if (!crypto_simd_usable() ||
256 (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
257 memcpy(cryptd_req, req, sizeof(*req));
258 ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
259 return crypto_ahash_digest(cryptd_req);
260 } else {
261 struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
262 struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
264 desc->tfm = child;
265 return shash_ahash_digest(req, desc);
269 static int ghash_async_import(struct ahash_request *req, const void *in)
271 struct ahash_request *cryptd_req = ahash_request_ctx(req);
272 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
273 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
274 struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
276 desc->tfm = cryptd_ahash_child(ctx->cryptd_tfm);
278 return crypto_shash_import(desc, in);
281 static int ghash_async_export(struct ahash_request *req, void *out)
283 struct ahash_request *cryptd_req = ahash_request_ctx(req);
284 struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
286 return crypto_shash_export(desc, out);
289 static int ghash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
290 unsigned int keylen)
292 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
293 struct crypto_ahash *child = &ctx->cryptd_tfm->base;
295 crypto_ahash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
296 crypto_ahash_set_flags(child, crypto_ahash_get_flags(tfm)
297 & CRYPTO_TFM_REQ_MASK);
298 return crypto_ahash_setkey(child, key, keylen);
301 static int ghash_async_init_tfm(struct crypto_tfm *tfm)
303 struct cryptd_ahash *cryptd_tfm;
304 struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
306 cryptd_tfm = cryptd_alloc_ahash("ghash-ce-sync", 0, 0);
307 if (IS_ERR(cryptd_tfm))
308 return PTR_ERR(cryptd_tfm);
309 ctx->cryptd_tfm = cryptd_tfm;
310 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
311 sizeof(struct ahash_request) +
312 crypto_ahash_reqsize(&cryptd_tfm->base));
314 return 0;
317 static void ghash_async_exit_tfm(struct crypto_tfm *tfm)
319 struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
321 cryptd_free_ahash(ctx->cryptd_tfm);
324 static struct ahash_alg ghash_async_alg = {
325 .init = ghash_async_init,
326 .update = ghash_async_update,
327 .final = ghash_async_final,
328 .setkey = ghash_async_setkey,
329 .digest = ghash_async_digest,
330 .import = ghash_async_import,
331 .export = ghash_async_export,
332 .halg.digestsize = GHASH_DIGEST_SIZE,
333 .halg.statesize = sizeof(struct ghash_desc_ctx),
334 .halg.base = {
335 .cra_name = "ghash",
336 .cra_driver_name = "ghash-ce",
337 .cra_priority = 300,
338 .cra_flags = CRYPTO_ALG_ASYNC,
339 .cra_blocksize = GHASH_BLOCK_SIZE,
340 .cra_ctxsize = sizeof(struct ghash_async_ctx),
341 .cra_module = THIS_MODULE,
342 .cra_init = ghash_async_init_tfm,
343 .cra_exit = ghash_async_exit_tfm,
347 static int __init ghash_ce_mod_init(void)
349 int err;
351 if (!(elf_hwcap & HWCAP_NEON))
352 return -ENODEV;
354 if (elf_hwcap2 & HWCAP2_PMULL) {
355 ghash_alg.base.cra_ctxsize += 3 * sizeof(u64[2]);
356 static_branch_enable(&use_p64);
359 err = crypto_register_shash(&ghash_alg);
360 if (err)
361 return err;
362 err = crypto_register_ahash(&ghash_async_alg);
363 if (err)
364 goto err_shash;
366 return 0;
368 err_shash:
369 crypto_unregister_shash(&ghash_alg);
370 return err;
373 static void __exit ghash_ce_mod_exit(void)
375 crypto_unregister_ahash(&ghash_async_alg);
376 crypto_unregister_shash(&ghash_alg);
379 module_init(ghash_ce_mod_init);
380 module_exit(ghash_ce_mod_exit);