1 // SPDX-License-Identifier: GPL-2.0
3 * GHASH routines supporting VMX instructions on the Power 8
5 * Copyright (C) 2015, 2019 International Business Machines Inc.
7 * Author: Marcelo Henrique Cerri <mhcerri@br.ibm.com>
9 * Extended by Daniel Axtens <dja@axtens.net> to replace the fallback
10 * mechanism. The new approach is based on arm64 code, which is:
11 * Copyright (C) 2014 - 2018 Linaro Ltd. <ard.biesheuvel@linaro.org>
14 #include <linux/types.h>
15 #include <linux/err.h>
16 #include <linux/crypto.h>
17 #include <linux/delay.h>
18 #include <linux/hardirq.h>
19 #include <asm/switch_to.h>
20 #include <crypto/aes.h>
21 #include <crypto/ghash.h>
22 #include <crypto/scatterwalk.h>
23 #include <crypto/internal/hash.h>
24 #include <crypto/b128ops.h>
26 #define IN_INTERRUPT in_interrupt()
28 void gcm_init_p8(u128 htable
[16], const u64 Xi
[2]);
29 void gcm_gmult_p8(u64 Xi
[2], const u128 htable
[16]);
30 void gcm_ghash_p8(u64 Xi
[2], const u128 htable
[16],
31 const u8
*in
, size_t len
);
34 /* key used by vector asm */
36 /* key used by software fallback */
40 struct p8_ghash_desc_ctx
{
42 u8 buffer
[GHASH_DIGEST_SIZE
];
46 static int p8_ghash_init(struct shash_desc
*desc
)
48 struct p8_ghash_desc_ctx
*dctx
= shash_desc_ctx(desc
);
51 memset(dctx
->shash
, 0, GHASH_DIGEST_SIZE
);
55 static int p8_ghash_setkey(struct crypto_shash
*tfm
, const u8
*key
,
58 struct p8_ghash_ctx
*ctx
= crypto_tfm_ctx(crypto_shash_tfm(tfm
));
60 if (keylen
!= GHASH_BLOCK_SIZE
)
66 gcm_init_p8(ctx
->htable
, (const u64
*) key
);
71 memcpy(&ctx
->key
, key
, GHASH_BLOCK_SIZE
);
76 static inline void __ghash_block(struct p8_ghash_ctx
*ctx
,
77 struct p8_ghash_desc_ctx
*dctx
)
83 gcm_ghash_p8(dctx
->shash
, ctx
->htable
,
84 dctx
->buffer
, GHASH_DIGEST_SIZE
);
89 crypto_xor((u8
*)dctx
->shash
, dctx
->buffer
, GHASH_BLOCK_SIZE
);
90 gf128mul_lle((be128
*)dctx
->shash
, &ctx
->key
);
94 static inline void __ghash_blocks(struct p8_ghash_ctx
*ctx
,
95 struct p8_ghash_desc_ctx
*dctx
,
96 const u8
*src
, unsigned int srclen
)
102 gcm_ghash_p8(dctx
->shash
, ctx
->htable
,
104 disable_kernel_vsx();
108 while (srclen
>= GHASH_BLOCK_SIZE
) {
109 crypto_xor((u8
*)dctx
->shash
, src
, GHASH_BLOCK_SIZE
);
110 gf128mul_lle((be128
*)dctx
->shash
, &ctx
->key
);
111 srclen
-= GHASH_BLOCK_SIZE
;
112 src
+= GHASH_BLOCK_SIZE
;
117 static int p8_ghash_update(struct shash_desc
*desc
,
118 const u8
*src
, unsigned int srclen
)
121 struct p8_ghash_ctx
*ctx
= crypto_tfm_ctx(crypto_shash_tfm(desc
->tfm
));
122 struct p8_ghash_desc_ctx
*dctx
= shash_desc_ctx(desc
);
125 if (dctx
->bytes
+ srclen
< GHASH_DIGEST_SIZE
) {
126 memcpy(dctx
->buffer
+ dctx
->bytes
, src
,
128 dctx
->bytes
+= srclen
;
131 memcpy(dctx
->buffer
+ dctx
->bytes
, src
,
132 GHASH_DIGEST_SIZE
- dctx
->bytes
);
134 __ghash_block(ctx
, dctx
);
136 src
+= GHASH_DIGEST_SIZE
- dctx
->bytes
;
137 srclen
-= GHASH_DIGEST_SIZE
- dctx
->bytes
;
140 len
= srclen
& ~(GHASH_DIGEST_SIZE
- 1);
142 __ghash_blocks(ctx
, dctx
, src
, len
);
147 memcpy(dctx
->buffer
, src
, srclen
);
148 dctx
->bytes
= srclen
;
153 static int p8_ghash_final(struct shash_desc
*desc
, u8
*out
)
156 struct p8_ghash_ctx
*ctx
= crypto_tfm_ctx(crypto_shash_tfm(desc
->tfm
));
157 struct p8_ghash_desc_ctx
*dctx
= shash_desc_ctx(desc
);
160 for (i
= dctx
->bytes
; i
< GHASH_DIGEST_SIZE
; i
++)
162 __ghash_block(ctx
, dctx
);
165 memcpy(out
, dctx
->shash
, GHASH_DIGEST_SIZE
);
169 struct shash_alg p8_ghash_alg
= {
170 .digestsize
= GHASH_DIGEST_SIZE
,
171 .init
= p8_ghash_init
,
172 .update
= p8_ghash_update
,
173 .final
= p8_ghash_final
,
174 .setkey
= p8_ghash_setkey
,
175 .descsize
= sizeof(struct p8_ghash_desc_ctx
)
176 + sizeof(struct ghash_desc_ctx
),
179 .cra_driver_name
= "p8_ghash",
180 .cra_priority
= 1000,
181 .cra_blocksize
= GHASH_BLOCK_SIZE
,
182 .cra_ctxsize
= sizeof(struct p8_ghash_ctx
),
183 .cra_module
= THIS_MODULE
,