WIP FPC-III support
[linux/fpc-iii.git] / arch / x86 / crypto / ghash-clmulni-intel_glue.c
blob1f1a95f3dd0ca6828077544c26c5cb45545c31b6
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3 * Accelerated GHASH implementation with Intel PCLMULQDQ-NI
4 * instructions. This file contains glue code.
6 * Copyright (c) 2009 Intel Corp.
7 * Author: Huang Ying <ying.huang@intel.com>
8 */
10 #include <linux/err.h>
11 #include <linux/module.h>
12 #include <linux/init.h>
13 #include <linux/kernel.h>
14 #include <linux/crypto.h>
15 #include <crypto/algapi.h>
16 #include <crypto/cryptd.h>
17 #include <crypto/gf128mul.h>
18 #include <crypto/internal/hash.h>
19 #include <crypto/internal/simd.h>
20 #include <asm/cpu_device_id.h>
21 #include <asm/simd.h>
23 #define GHASH_BLOCK_SIZE 16
24 #define GHASH_DIGEST_SIZE 16
26 void clmul_ghash_mul(char *dst, const u128 *shash);
28 void clmul_ghash_update(char *dst, const char *src, unsigned int srclen,
29 const u128 *shash);
31 struct ghash_async_ctx {
32 struct cryptd_ahash *cryptd_tfm;
35 struct ghash_ctx {
36 u128 shash;
39 struct ghash_desc_ctx {
40 u8 buffer[GHASH_BLOCK_SIZE];
41 u32 bytes;
44 static int ghash_init(struct shash_desc *desc)
46 struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
48 memset(dctx, 0, sizeof(*dctx));
50 return 0;
53 static int ghash_setkey(struct crypto_shash *tfm,
54 const u8 *key, unsigned int keylen)
56 struct ghash_ctx *ctx = crypto_shash_ctx(tfm);
57 be128 *x = (be128 *)key;
58 u64 a, b;
60 if (keylen != GHASH_BLOCK_SIZE)
61 return -EINVAL;
63 /* perform multiplication by 'x' in GF(2^128) */
64 a = be64_to_cpu(x->a);
65 b = be64_to_cpu(x->b);
67 ctx->shash.a = (b << 1) | (a >> 63);
68 ctx->shash.b = (a << 1) | (b >> 63);
70 if (a >> 63)
71 ctx->shash.b ^= ((u64)0xc2) << 56;
73 return 0;
76 static int ghash_update(struct shash_desc *desc,
77 const u8 *src, unsigned int srclen)
79 struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
80 struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
81 u8 *dst = dctx->buffer;
83 kernel_fpu_begin();
84 if (dctx->bytes) {
85 int n = min(srclen, dctx->bytes);
86 u8 *pos = dst + (GHASH_BLOCK_SIZE - dctx->bytes);
88 dctx->bytes -= n;
89 srclen -= n;
91 while (n--)
92 *pos++ ^= *src++;
94 if (!dctx->bytes)
95 clmul_ghash_mul(dst, &ctx->shash);
98 clmul_ghash_update(dst, src, srclen, &ctx->shash);
99 kernel_fpu_end();
101 if (srclen & 0xf) {
102 src += srclen - (srclen & 0xf);
103 srclen &= 0xf;
104 dctx->bytes = GHASH_BLOCK_SIZE - srclen;
105 while (srclen--)
106 *dst++ ^= *src++;
109 return 0;
112 static void ghash_flush(struct ghash_ctx *ctx, struct ghash_desc_ctx *dctx)
114 u8 *dst = dctx->buffer;
116 if (dctx->bytes) {
117 u8 *tmp = dst + (GHASH_BLOCK_SIZE - dctx->bytes);
119 while (dctx->bytes--)
120 *tmp++ ^= 0;
122 kernel_fpu_begin();
123 clmul_ghash_mul(dst, &ctx->shash);
124 kernel_fpu_end();
127 dctx->bytes = 0;
130 static int ghash_final(struct shash_desc *desc, u8 *dst)
132 struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
133 struct ghash_ctx *ctx = crypto_shash_ctx(desc->tfm);
134 u8 *buf = dctx->buffer;
136 ghash_flush(ctx, dctx);
137 memcpy(dst, buf, GHASH_BLOCK_SIZE);
139 return 0;
142 static struct shash_alg ghash_alg = {
143 .digestsize = GHASH_DIGEST_SIZE,
144 .init = ghash_init,
145 .update = ghash_update,
146 .final = ghash_final,
147 .setkey = ghash_setkey,
148 .descsize = sizeof(struct ghash_desc_ctx),
149 .base = {
150 .cra_name = "__ghash",
151 .cra_driver_name = "__ghash-pclmulqdqni",
152 .cra_priority = 0,
153 .cra_flags = CRYPTO_ALG_INTERNAL,
154 .cra_blocksize = GHASH_BLOCK_SIZE,
155 .cra_ctxsize = sizeof(struct ghash_ctx),
156 .cra_module = THIS_MODULE,
160 static int ghash_async_init(struct ahash_request *req)
162 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
163 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
164 struct ahash_request *cryptd_req = ahash_request_ctx(req);
165 struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
166 struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
167 struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
169 desc->tfm = child;
170 return crypto_shash_init(desc);
173 static int ghash_async_update(struct ahash_request *req)
175 struct ahash_request *cryptd_req = ahash_request_ctx(req);
176 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
177 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
178 struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
180 if (!crypto_simd_usable() ||
181 (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
182 memcpy(cryptd_req, req, sizeof(*req));
183 ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
184 return crypto_ahash_update(cryptd_req);
185 } else {
186 struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
187 return shash_ahash_update(req, desc);
191 static int ghash_async_final(struct ahash_request *req)
193 struct ahash_request *cryptd_req = ahash_request_ctx(req);
194 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
195 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
196 struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
198 if (!crypto_simd_usable() ||
199 (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
200 memcpy(cryptd_req, req, sizeof(*req));
201 ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
202 return crypto_ahash_final(cryptd_req);
203 } else {
204 struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
205 return crypto_shash_final(desc, req->result);
209 static int ghash_async_import(struct ahash_request *req, const void *in)
211 struct ahash_request *cryptd_req = ahash_request_ctx(req);
212 struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
213 struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
215 ghash_async_init(req);
216 memcpy(dctx, in, sizeof(*dctx));
217 return 0;
221 static int ghash_async_export(struct ahash_request *req, void *out)
223 struct ahash_request *cryptd_req = ahash_request_ctx(req);
224 struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
225 struct ghash_desc_ctx *dctx = shash_desc_ctx(desc);
227 memcpy(out, dctx, sizeof(*dctx));
228 return 0;
232 static int ghash_async_digest(struct ahash_request *req)
234 struct crypto_ahash *tfm = crypto_ahash_reqtfm(req);
235 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
236 struct ahash_request *cryptd_req = ahash_request_ctx(req);
237 struct cryptd_ahash *cryptd_tfm = ctx->cryptd_tfm;
239 if (!crypto_simd_usable() ||
240 (in_atomic() && cryptd_ahash_queued(cryptd_tfm))) {
241 memcpy(cryptd_req, req, sizeof(*req));
242 ahash_request_set_tfm(cryptd_req, &cryptd_tfm->base);
243 return crypto_ahash_digest(cryptd_req);
244 } else {
245 struct shash_desc *desc = cryptd_shash_desc(cryptd_req);
246 struct crypto_shash *child = cryptd_ahash_child(cryptd_tfm);
248 desc->tfm = child;
249 return shash_ahash_digest(req, desc);
253 static int ghash_async_setkey(struct crypto_ahash *tfm, const u8 *key,
254 unsigned int keylen)
256 struct ghash_async_ctx *ctx = crypto_ahash_ctx(tfm);
257 struct crypto_ahash *child = &ctx->cryptd_tfm->base;
259 crypto_ahash_clear_flags(child, CRYPTO_TFM_REQ_MASK);
260 crypto_ahash_set_flags(child, crypto_ahash_get_flags(tfm)
261 & CRYPTO_TFM_REQ_MASK);
262 return crypto_ahash_setkey(child, key, keylen);
265 static int ghash_async_init_tfm(struct crypto_tfm *tfm)
267 struct cryptd_ahash *cryptd_tfm;
268 struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
270 cryptd_tfm = cryptd_alloc_ahash("__ghash-pclmulqdqni",
271 CRYPTO_ALG_INTERNAL,
272 CRYPTO_ALG_INTERNAL);
273 if (IS_ERR(cryptd_tfm))
274 return PTR_ERR(cryptd_tfm);
275 ctx->cryptd_tfm = cryptd_tfm;
276 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
277 sizeof(struct ahash_request) +
278 crypto_ahash_reqsize(&cryptd_tfm->base));
280 return 0;
283 static void ghash_async_exit_tfm(struct crypto_tfm *tfm)
285 struct ghash_async_ctx *ctx = crypto_tfm_ctx(tfm);
287 cryptd_free_ahash(ctx->cryptd_tfm);
290 static struct ahash_alg ghash_async_alg = {
291 .init = ghash_async_init,
292 .update = ghash_async_update,
293 .final = ghash_async_final,
294 .setkey = ghash_async_setkey,
295 .digest = ghash_async_digest,
296 .export = ghash_async_export,
297 .import = ghash_async_import,
298 .halg = {
299 .digestsize = GHASH_DIGEST_SIZE,
300 .statesize = sizeof(struct ghash_desc_ctx),
301 .base = {
302 .cra_name = "ghash",
303 .cra_driver_name = "ghash-clmulni",
304 .cra_priority = 400,
305 .cra_ctxsize = sizeof(struct ghash_async_ctx),
306 .cra_flags = CRYPTO_ALG_ASYNC,
307 .cra_blocksize = GHASH_BLOCK_SIZE,
308 .cra_module = THIS_MODULE,
309 .cra_init = ghash_async_init_tfm,
310 .cra_exit = ghash_async_exit_tfm,
315 static const struct x86_cpu_id pcmul_cpu_id[] = {
316 X86_MATCH_FEATURE(X86_FEATURE_PCLMULQDQ, NULL), /* Pickle-Mickle-Duck */
319 MODULE_DEVICE_TABLE(x86cpu, pcmul_cpu_id);
321 static int __init ghash_pclmulqdqni_mod_init(void)
323 int err;
325 if (!x86_match_cpu(pcmul_cpu_id))
326 return -ENODEV;
328 err = crypto_register_shash(&ghash_alg);
329 if (err)
330 goto err_out;
331 err = crypto_register_ahash(&ghash_async_alg);
332 if (err)
333 goto err_shash;
335 return 0;
337 err_shash:
338 crypto_unregister_shash(&ghash_alg);
339 err_out:
340 return err;
343 static void __exit ghash_pclmulqdqni_mod_exit(void)
345 crypto_unregister_ahash(&ghash_async_alg);
346 crypto_unregister_shash(&ghash_alg);
349 module_init(ghash_pclmulqdqni_mod_init);
350 module_exit(ghash_pclmulqdqni_mod_exit);
352 MODULE_LICENSE("GPL");
353 MODULE_DESCRIPTION("GHASH hash function, accelerated by PCLMULQDQ-NI");
354 MODULE_ALIAS_CRYPTO("ghash");