x86/mm/pat: Don't report PAT on CPUs that don't support it
[linux/fpc-iii.git] / arch / arm64 / crypto / ghash-ce-glue.c
blob833ec1e3f3e9b7491cc26da24fc0ba386b73de81
1 /*
2 * Accelerated GHASH implementation with ARMv8 PMULL instructions.
4 * Copyright (C) 2014 Linaro Ltd. <ard.biesheuvel@linaro.org>
6 * This program is free software; you can redistribute it and/or modify it
7 * under the terms of the GNU General Public License version 2 as published
8 * by the Free Software Foundation.
9 */
11 #include <asm/neon.h>
12 #include <asm/unaligned.h>
13 #include <crypto/internal/hash.h>
14 #include <linux/cpufeature.h>
15 #include <linux/crypto.h>
16 #include <linux/module.h>
18 MODULE_DESCRIPTION("GHASH secure hash using ARMv8 Crypto Extensions");
19 MODULE_AUTHOR("Ard Biesheuvel <ard.biesheuvel@linaro.org>");
20 MODULE_LICENSE("GPL v2");
22 #define GHASH_BLOCK_SIZE 16
23 #define GHASH_DIGEST_SIZE 16
25 struct ghash_key {
26 u64 a;
27 u64 b;
30 struct ghash_desc_ctx {
31 u64 digest[GHASH_DIGEST_SIZE/sizeof(u64)];
32 u8 buf[GHASH_BLOCK_SIZE];
33 u32 count;
36 asmlinkage void pmull_ghash_update(int blocks, u64 dg[], const char *src,
37 struct ghash_key const *k, const char *head);
39 static int ghash_init(struct shash_desc *desc)
41 struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
43 *ctx = (struct ghash_desc_ctx){};
44 return 0;
47 static int ghash_update(struct shash_desc *desc, const u8 *src,
48 unsigned int len)
50 struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
51 unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
53 ctx->count += len;
55 if ((partial + len) >= GHASH_BLOCK_SIZE) {
56 struct ghash_key *key = crypto_shash_ctx(desc->tfm);
57 int blocks;
59 if (partial) {
60 int p = GHASH_BLOCK_SIZE - partial;
62 memcpy(ctx->buf + partial, src, p);
63 src += p;
64 len -= p;
67 blocks = len / GHASH_BLOCK_SIZE;
68 len %= GHASH_BLOCK_SIZE;
70 kernel_neon_begin_partial(8);
71 pmull_ghash_update(blocks, ctx->digest, src, key,
72 partial ? ctx->buf : NULL);
73 kernel_neon_end();
74 src += blocks * GHASH_BLOCK_SIZE;
75 partial = 0;
77 if (len)
78 memcpy(ctx->buf + partial, src, len);
79 return 0;
82 static int ghash_final(struct shash_desc *desc, u8 *dst)
84 struct ghash_desc_ctx *ctx = shash_desc_ctx(desc);
85 unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
87 if (partial) {
88 struct ghash_key *key = crypto_shash_ctx(desc->tfm);
90 memset(ctx->buf + partial, 0, GHASH_BLOCK_SIZE - partial);
92 kernel_neon_begin_partial(8);
93 pmull_ghash_update(1, ctx->digest, ctx->buf, key, NULL);
94 kernel_neon_end();
96 put_unaligned_be64(ctx->digest[1], dst);
97 put_unaligned_be64(ctx->digest[0], dst + 8);
99 *ctx = (struct ghash_desc_ctx){};
100 return 0;
103 static int ghash_setkey(struct crypto_shash *tfm,
104 const u8 *inkey, unsigned int keylen)
106 struct ghash_key *key = crypto_shash_ctx(tfm);
107 u64 a, b;
109 if (keylen != GHASH_BLOCK_SIZE) {
110 crypto_shash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
111 return -EINVAL;
114 /* perform multiplication by 'x' in GF(2^128) */
115 b = get_unaligned_be64(inkey);
116 a = get_unaligned_be64(inkey + 8);
118 key->a = (a << 1) | (b >> 63);
119 key->b = (b << 1) | (a >> 63);
121 if (b >> 63)
122 key->b ^= 0xc200000000000000UL;
124 return 0;
127 static struct shash_alg ghash_alg = {
128 .digestsize = GHASH_DIGEST_SIZE,
129 .init = ghash_init,
130 .update = ghash_update,
131 .final = ghash_final,
132 .setkey = ghash_setkey,
133 .descsize = sizeof(struct ghash_desc_ctx),
134 .base = {
135 .cra_name = "ghash",
136 .cra_driver_name = "ghash-ce",
137 .cra_priority = 200,
138 .cra_flags = CRYPTO_ALG_TYPE_SHASH,
139 .cra_blocksize = GHASH_BLOCK_SIZE,
140 .cra_ctxsize = sizeof(struct ghash_key),
141 .cra_module = THIS_MODULE,
145 static int __init ghash_ce_mod_init(void)
147 return crypto_register_shash(&ghash_alg);
150 static void __exit ghash_ce_mod_exit(void)
152 crypto_unregister_shash(&ghash_alg);
155 module_cpu_feature_match(PMULL, ghash_ce_mod_init);
156 module_exit(ghash_ce_mod_exit);