1 // SPDX-License-Identifier: LGPL-2.1+
3 * MurmurHash3 was written by Austin Appleby, and is placed in the public
4 * domain. The author hereby disclaims copyright to this source code.
6 * Adapted by John Wiele (jwiele@redhat.com).
9 #include "murmurhash3.h"
11 #include <linux/unaligned.h>
13 static inline u64
rotl64(u64 x
, s8 r
)
15 return (x
<< r
) | (x
>> (64 - r
));
18 #define ROTL64(x, y) rotl64(x, y)
20 /* Finalization mix - force all bits of a hash block to avalanche */
22 static __always_inline u64
fmix64(u64 k
)
25 k
*= 0xff51afd7ed558ccdLLU
;
27 k
*= 0xc4ceb9fe1a85ec53LLU
;
33 void murmurhash3_128(const void *key
, const int len
, const u32 seed
, void *out
)
36 const int nblocks
= len
/ 16;
41 const u64 c1
= 0x87c37b91114253d5LLU
;
42 const u64 c2
= 0x4cf5ad432745937fLLU
;
49 for (i
= 0; i
< nblocks
; i
++) {
50 u64 k1
= get_unaligned_le64(&data
[i
* 16]);
51 u64 k2
= get_unaligned_le64(&data
[i
* 16 + 8]);
60 h1
= h1
* 5 + 0x52dce729;
69 h2
= h2
* 5 + 0x38495ab5;
75 const u8
*tail
= (const u8
*)(data
+ nblocks
* 16);
82 k2
^= ((u64
)tail
[14]) << 48;
85 k2
^= ((u64
)tail
[13]) << 40;
88 k2
^= ((u64
)tail
[12]) << 32;
91 k2
^= ((u64
)tail
[11]) << 24;
94 k2
^= ((u64
)tail
[10]) << 16;
97 k2
^= ((u64
)tail
[9]) << 8;
100 k2
^= ((u64
)tail
[8]) << 0;
108 k1
^= ((u64
)tail
[7]) << 56;
111 k1
^= ((u64
)tail
[6]) << 48;
114 k1
^= ((u64
)tail
[5]) << 40;
117 k1
^= ((u64
)tail
[4]) << 32;
120 k1
^= ((u64
)tail
[3]) << 24;
123 k1
^= ((u64
)tail
[2]) << 16;
126 k1
^= ((u64
)tail
[1]) << 8;
129 k1
^= ((u64
)tail
[0]) << 0;
153 put_unaligned_le64(h1
, &hash_out
[0]);
154 put_unaligned_le64(h2
, &hash_out
[1]);