1 // SPDX-License-Identifier: GPL-2.0-or-later
5 * SHA-3, as specified in
6 * http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.202.pdf
8 * SHA-3 code by Jeff Garzik <jeff@garzik.org>
9 * Ard Biesheuvel <ard.biesheuvel@linaro.org>
11 #include <crypto/internal/hash.h>
12 #include <linux/init.h>
13 #include <linux/module.h>
14 #include <linux/types.h>
15 #include <crypto/sha3.h>
16 #include <asm/unaligned.h>
19 * On some 32-bit architectures (h8300), GCC ends up using
20 * over 1 KB of stack if we inline the round calculation into the loop
21 * in keccakf(). On the other hand, on 64-bit architectures with plenty
22 * of [64-bit wide] general purpose registers, not inlining it severely
23 * hurts performance. So let's use 64-bitness as a heuristic to decide
24 * whether to inline or not.
27 #define SHA3_INLINE inline
29 #define SHA3_INLINE noinline
32 #define KECCAK_ROUNDS 24
34 static const u64 keccakf_rndc
[24] = {
35 0x0000000000000001ULL
, 0x0000000000008082ULL
, 0x800000000000808aULL
,
36 0x8000000080008000ULL
, 0x000000000000808bULL
, 0x0000000080000001ULL
,
37 0x8000000080008081ULL
, 0x8000000000008009ULL
, 0x000000000000008aULL
,
38 0x0000000000000088ULL
, 0x0000000080008009ULL
, 0x000000008000000aULL
,
39 0x000000008000808bULL
, 0x800000000000008bULL
, 0x8000000000008089ULL
,
40 0x8000000000008003ULL
, 0x8000000000008002ULL
, 0x8000000000000080ULL
,
41 0x000000000000800aULL
, 0x800000008000000aULL
, 0x8000000080008081ULL
,
42 0x8000000000008080ULL
, 0x0000000080000001ULL
, 0x8000000080008008ULL
45 /* update the state with given number of rounds */
47 static SHA3_INLINE
void keccakf_round(u64 st
[25])
52 bc
[0] = st
[0] ^ st
[5] ^ st
[10] ^ st
[15] ^ st
[20];
53 bc
[1] = st
[1] ^ st
[6] ^ st
[11] ^ st
[16] ^ st
[21];
54 bc
[2] = st
[2] ^ st
[7] ^ st
[12] ^ st
[17] ^ st
[22];
55 bc
[3] = st
[3] ^ st
[8] ^ st
[13] ^ st
[18] ^ st
[23];
56 bc
[4] = st
[4] ^ st
[9] ^ st
[14] ^ st
[19] ^ st
[24];
58 t
[0] = bc
[4] ^ rol64(bc
[1], 1);
59 t
[1] = bc
[0] ^ rol64(bc
[2], 1);
60 t
[2] = bc
[1] ^ rol64(bc
[3], 1);
61 t
[3] = bc
[2] ^ rol64(bc
[4], 1);
62 t
[4] = bc
[3] ^ rol64(bc
[0], 1);
68 st
[ 1] = rol64(st
[ 6] ^ t
[1], 44);
69 st
[ 6] = rol64(st
[ 9] ^ t
[4], 20);
70 st
[ 9] = rol64(st
[22] ^ t
[2], 61);
71 st
[22] = rol64(st
[14] ^ t
[4], 39);
72 st
[14] = rol64(st
[20] ^ t
[0], 18);
73 st
[20] = rol64(st
[ 2] ^ t
[2], 62);
74 st
[ 2] = rol64(st
[12] ^ t
[2], 43);
75 st
[12] = rol64(st
[13] ^ t
[3], 25);
76 st
[13] = rol64(st
[19] ^ t
[4], 8);
77 st
[19] = rol64(st
[23] ^ t
[3], 56);
78 st
[23] = rol64(st
[15] ^ t
[0], 41);
79 st
[15] = rol64(st
[ 4] ^ t
[4], 27);
80 st
[ 4] = rol64(st
[24] ^ t
[4], 14);
81 st
[24] = rol64(st
[21] ^ t
[1], 2);
82 st
[21] = rol64(st
[ 8] ^ t
[3], 55);
83 st
[ 8] = rol64(st
[16] ^ t
[1], 45);
84 st
[16] = rol64(st
[ 5] ^ t
[0], 36);
85 st
[ 5] = rol64(st
[ 3] ^ t
[3], 28);
86 st
[ 3] = rol64(st
[18] ^ t
[3], 21);
87 st
[18] = rol64(st
[17] ^ t
[2], 15);
88 st
[17] = rol64(st
[11] ^ t
[1], 10);
89 st
[11] = rol64(st
[ 7] ^ t
[2], 6);
90 st
[ 7] = rol64(st
[10] ^ t
[0], 3);
91 st
[10] = rol64( tt
^ t
[1], 1);
94 bc
[ 0] = ~st
[ 1] & st
[ 2];
95 bc
[ 1] = ~st
[ 2] & st
[ 3];
96 bc
[ 2] = ~st
[ 3] & st
[ 4];
97 bc
[ 3] = ~st
[ 4] & st
[ 0];
98 bc
[ 4] = ~st
[ 0] & st
[ 1];
105 bc
[ 0] = ~st
[ 6] & st
[ 7];
106 bc
[ 1] = ~st
[ 7] & st
[ 8];
107 bc
[ 2] = ~st
[ 8] & st
[ 9];
108 bc
[ 3] = ~st
[ 9] & st
[ 5];
109 bc
[ 4] = ~st
[ 5] & st
[ 6];
116 bc
[ 0] = ~st
[11] & st
[12];
117 bc
[ 1] = ~st
[12] & st
[13];
118 bc
[ 2] = ~st
[13] & st
[14];
119 bc
[ 3] = ~st
[14] & st
[10];
120 bc
[ 4] = ~st
[10] & st
[11];
127 bc
[ 0] = ~st
[16] & st
[17];
128 bc
[ 1] = ~st
[17] & st
[18];
129 bc
[ 2] = ~st
[18] & st
[19];
130 bc
[ 3] = ~st
[19] & st
[15];
131 bc
[ 4] = ~st
[15] & st
[16];
138 bc
[ 0] = ~st
[21] & st
[22];
139 bc
[ 1] = ~st
[22] & st
[23];
140 bc
[ 2] = ~st
[23] & st
[24];
141 bc
[ 3] = ~st
[24] & st
[20];
142 bc
[ 4] = ~st
[20] & st
[21];
150 static void keccakf(u64 st
[25])
154 for (round
= 0; round
< KECCAK_ROUNDS
; round
++) {
157 st
[0] ^= keccakf_rndc
[round
];
161 int crypto_sha3_init(struct shash_desc
*desc
)
163 struct sha3_state
*sctx
= shash_desc_ctx(desc
);
164 unsigned int digest_size
= crypto_shash_digestsize(desc
->tfm
);
166 sctx
->rsiz
= 200 - 2 * digest_size
;
167 sctx
->rsizw
= sctx
->rsiz
/ 8;
170 memset(sctx
->st
, 0, sizeof(sctx
->st
));
173 EXPORT_SYMBOL(crypto_sha3_init
);
175 int crypto_sha3_update(struct shash_desc
*desc
, const u8
*data
,
178 struct sha3_state
*sctx
= shash_desc_ctx(desc
);
185 if ((sctx
->partial
+ len
) > (sctx
->rsiz
- 1)) {
187 done
= -sctx
->partial
;
188 memcpy(sctx
->buf
+ sctx
->partial
, data
,
196 for (i
= 0; i
< sctx
->rsizw
; i
++)
197 sctx
->st
[i
] ^= get_unaligned_le64(src
+ 8 * i
);
202 } while (done
+ (sctx
->rsiz
- 1) < len
);
206 memcpy(sctx
->buf
+ sctx
->partial
, src
, len
- done
);
207 sctx
->partial
+= (len
- done
);
211 EXPORT_SYMBOL(crypto_sha3_update
);
213 int crypto_sha3_final(struct shash_desc
*desc
, u8
*out
)
215 struct sha3_state
*sctx
= shash_desc_ctx(desc
);
216 unsigned int i
, inlen
= sctx
->partial
;
217 unsigned int digest_size
= crypto_shash_digestsize(desc
->tfm
);
218 __le64
*digest
= (__le64
*)out
;
220 sctx
->buf
[inlen
++] = 0x06;
221 memset(sctx
->buf
+ inlen
, 0, sctx
->rsiz
- inlen
);
222 sctx
->buf
[sctx
->rsiz
- 1] |= 0x80;
224 for (i
= 0; i
< sctx
->rsizw
; i
++)
225 sctx
->st
[i
] ^= get_unaligned_le64(sctx
->buf
+ 8 * i
);
229 for (i
= 0; i
< digest_size
/ 8; i
++)
230 put_unaligned_le64(sctx
->st
[i
], digest
++);
233 put_unaligned_le32(sctx
->st
[i
], (__le32
*)digest
);
235 memset(sctx
, 0, sizeof(*sctx
));
238 EXPORT_SYMBOL(crypto_sha3_final
);
240 static struct shash_alg algs
[] = { {
241 .digestsize
= SHA3_224_DIGEST_SIZE
,
242 .init
= crypto_sha3_init
,
243 .update
= crypto_sha3_update
,
244 .final
= crypto_sha3_final
,
245 .descsize
= sizeof(struct sha3_state
),
246 .base
.cra_name
= "sha3-224",
247 .base
.cra_driver_name
= "sha3-224-generic",
248 .base
.cra_blocksize
= SHA3_224_BLOCK_SIZE
,
249 .base
.cra_module
= THIS_MODULE
,
251 .digestsize
= SHA3_256_DIGEST_SIZE
,
252 .init
= crypto_sha3_init
,
253 .update
= crypto_sha3_update
,
254 .final
= crypto_sha3_final
,
255 .descsize
= sizeof(struct sha3_state
),
256 .base
.cra_name
= "sha3-256",
257 .base
.cra_driver_name
= "sha3-256-generic",
258 .base
.cra_blocksize
= SHA3_256_BLOCK_SIZE
,
259 .base
.cra_module
= THIS_MODULE
,
261 .digestsize
= SHA3_384_DIGEST_SIZE
,
262 .init
= crypto_sha3_init
,
263 .update
= crypto_sha3_update
,
264 .final
= crypto_sha3_final
,
265 .descsize
= sizeof(struct sha3_state
),
266 .base
.cra_name
= "sha3-384",
267 .base
.cra_driver_name
= "sha3-384-generic",
268 .base
.cra_blocksize
= SHA3_384_BLOCK_SIZE
,
269 .base
.cra_module
= THIS_MODULE
,
271 .digestsize
= SHA3_512_DIGEST_SIZE
,
272 .init
= crypto_sha3_init
,
273 .update
= crypto_sha3_update
,
274 .final
= crypto_sha3_final
,
275 .descsize
= sizeof(struct sha3_state
),
276 .base
.cra_name
= "sha3-512",
277 .base
.cra_driver_name
= "sha3-512-generic",
278 .base
.cra_blocksize
= SHA3_512_BLOCK_SIZE
,
279 .base
.cra_module
= THIS_MODULE
,
282 static int __init
sha3_generic_mod_init(void)
284 return crypto_register_shashes(algs
, ARRAY_SIZE(algs
));
287 static void __exit
sha3_generic_mod_fini(void)
289 crypto_unregister_shashes(algs
, ARRAY_SIZE(algs
));
292 subsys_initcall(sha3_generic_mod_init
);
293 module_exit(sha3_generic_mod_fini
);
295 MODULE_LICENSE("GPL");
296 MODULE_DESCRIPTION("SHA-3 Secure Hash Algorithm");
298 MODULE_ALIAS_CRYPTO("sha3-224");
299 MODULE_ALIAS_CRYPTO("sha3-224-generic");
300 MODULE_ALIAS_CRYPTO("sha3-256");
301 MODULE_ALIAS_CRYPTO("sha3-256-generic");
302 MODULE_ALIAS_CRYPTO("sha3-384");
303 MODULE_ALIAS_CRYPTO("sha3-384-generic");
304 MODULE_ALIAS_CRYPTO("sha3-512");
305 MODULE_ALIAS_CRYPTO("sha3-512-generic");