1 // SPDX-License-Identifier: GPL-2.0
3 * GHASH routines supporting VMX instructions on the Power 8
5 * Copyright (C) 2015, 2019 International Business Machines Inc.
7 * Author: Marcelo Henrique Cerri <mhcerri@br.ibm.com>
9 * Extended by Daniel Axtens <dja@axtens.net> to replace the fallback
10 * mechanism. The new approach is based on arm64 code, which is:
11 * Copyright (C) 2014 - 2018 Linaro Ltd. <ard.biesheuvel@linaro.org>
14 #include <linux/types.h>
15 #include <linux/err.h>
16 #include <linux/crypto.h>
17 #include <linux/delay.h>
19 #include <asm/switch_to.h>
20 #include <crypto/aes.h>
21 #include <crypto/ghash.h>
22 #include <crypto/scatterwalk.h>
23 #include <crypto/internal/hash.h>
24 #include <crypto/internal/simd.h>
25 #include <crypto/b128ops.h>
27 void gcm_init_p8(u128 htable
[16], const u64 Xi
[2]);
28 void gcm_gmult_p8(u64 Xi
[2], const u128 htable
[16]);
29 void gcm_ghash_p8(u64 Xi
[2], const u128 htable
[16],
30 const u8
*in
, size_t len
);
33 /* key used by vector asm */
35 /* key used by software fallback */
39 struct p8_ghash_desc_ctx
{
41 u8 buffer
[GHASH_DIGEST_SIZE
];
45 static int p8_ghash_init(struct shash_desc
*desc
)
47 struct p8_ghash_desc_ctx
*dctx
= shash_desc_ctx(desc
);
50 memset(dctx
->shash
, 0, GHASH_DIGEST_SIZE
);
54 static int p8_ghash_setkey(struct crypto_shash
*tfm
, const u8
*key
,
57 struct p8_ghash_ctx
*ctx
= crypto_tfm_ctx(crypto_shash_tfm(tfm
));
59 if (keylen
!= GHASH_BLOCK_SIZE
)
65 gcm_init_p8(ctx
->htable
, (const u64
*) key
);
70 memcpy(&ctx
->key
, key
, GHASH_BLOCK_SIZE
);
75 static inline void __ghash_block(struct p8_ghash_ctx
*ctx
,
76 struct p8_ghash_desc_ctx
*dctx
)
78 if (crypto_simd_usable()) {
82 gcm_ghash_p8(dctx
->shash
, ctx
->htable
,
83 dctx
->buffer
, GHASH_DIGEST_SIZE
);
88 crypto_xor((u8
*)dctx
->shash
, dctx
->buffer
, GHASH_BLOCK_SIZE
);
89 gf128mul_lle((be128
*)dctx
->shash
, &ctx
->key
);
93 static inline void __ghash_blocks(struct p8_ghash_ctx
*ctx
,
94 struct p8_ghash_desc_ctx
*dctx
,
95 const u8
*src
, unsigned int srclen
)
97 if (crypto_simd_usable()) {
101 gcm_ghash_p8(dctx
->shash
, ctx
->htable
,
103 disable_kernel_vsx();
107 while (srclen
>= GHASH_BLOCK_SIZE
) {
108 crypto_xor((u8
*)dctx
->shash
, src
, GHASH_BLOCK_SIZE
);
109 gf128mul_lle((be128
*)dctx
->shash
, &ctx
->key
);
110 srclen
-= GHASH_BLOCK_SIZE
;
111 src
+= GHASH_BLOCK_SIZE
;
116 static int p8_ghash_update(struct shash_desc
*desc
,
117 const u8
*src
, unsigned int srclen
)
120 struct p8_ghash_ctx
*ctx
= crypto_tfm_ctx(crypto_shash_tfm(desc
->tfm
));
121 struct p8_ghash_desc_ctx
*dctx
= shash_desc_ctx(desc
);
124 if (dctx
->bytes
+ srclen
< GHASH_DIGEST_SIZE
) {
125 memcpy(dctx
->buffer
+ dctx
->bytes
, src
,
127 dctx
->bytes
+= srclen
;
130 memcpy(dctx
->buffer
+ dctx
->bytes
, src
,
131 GHASH_DIGEST_SIZE
- dctx
->bytes
);
133 __ghash_block(ctx
, dctx
);
135 src
+= GHASH_DIGEST_SIZE
- dctx
->bytes
;
136 srclen
-= GHASH_DIGEST_SIZE
- dctx
->bytes
;
139 len
= srclen
& ~(GHASH_DIGEST_SIZE
- 1);
141 __ghash_blocks(ctx
, dctx
, src
, len
);
146 memcpy(dctx
->buffer
, src
, srclen
);
147 dctx
->bytes
= srclen
;
152 static int p8_ghash_final(struct shash_desc
*desc
, u8
*out
)
155 struct p8_ghash_ctx
*ctx
= crypto_tfm_ctx(crypto_shash_tfm(desc
->tfm
));
156 struct p8_ghash_desc_ctx
*dctx
= shash_desc_ctx(desc
);
159 for (i
= dctx
->bytes
; i
< GHASH_DIGEST_SIZE
; i
++)
161 __ghash_block(ctx
, dctx
);
164 memcpy(out
, dctx
->shash
, GHASH_DIGEST_SIZE
);
168 struct shash_alg p8_ghash_alg
= {
169 .digestsize
= GHASH_DIGEST_SIZE
,
170 .init
= p8_ghash_init
,
171 .update
= p8_ghash_update
,
172 .final
= p8_ghash_final
,
173 .setkey
= p8_ghash_setkey
,
174 .descsize
= sizeof(struct p8_ghash_desc_ctx
)
175 + sizeof(struct ghash_desc_ctx
),
178 .cra_driver_name
= "p8_ghash",
179 .cra_priority
= 1000,
180 .cra_blocksize
= GHASH_BLOCK_SIZE
,
181 .cra_ctxsize
= sizeof(struct p8_ghash_ctx
),
182 .cra_module
= THIS_MODULE
,