19 #include "MurmurHash3.hpp"
27 #define FORCE_INLINE __forceinline
31 #define ROTL32(x, y) _rotl(x, y)
32 #define ROTL64(x, y) _rotl64(x, y)
34 #define BIG_CONSTANT(x) (x)
38 #else // not defined(_MSC_VER)
42 inline uint32_t rotl32(uint32_t x, int8_t r) {
43 return (x << r) | (x >> (32 - r));
46 inline uint64_t rotl64(uint64_t x, int8_t r) {
47 return (x << r) | (x >> (64 - r));
52 #define ROTL32(x, y) rotl32(x, y)
53 #define ROTL64(x, y) rotl64(x, y)
55 #define BIG_CONSTANT(x) (x##LLU)
57 #endif // !defined(_MSC_VER)
63 #define GETBLOCK(lhs, p, i) \
88 t_k *= BIG_CONSTANT(0xff51afd7ed558ccd); \
90 t_k *= BIG_CONSTANT(0xc4ceb9fe1a85ec53); \
100 void MurmurHash3_x86_32(
const void *key,
int len,
101 uint32_t seed,
void *out) {
102 const uint8_t *data = (
const uint8_t *)key;
103 const int nblocks = len / 4;
107 const uint32_t c1 = 0xcc9e2d51;
108 const uint32_t c2 = 0x1b873593;
113 const uint32_t *blocks = (
const uint32_t *)(data + nblocks * 4);
115 for (
int i = -nblocks; i; i++) {
117 GETBLOCK(k1, blocks, i);
125 h1 = h1 * 5 + 0xe6546b64;
131 const uint8_t *tail = (
const uint8_t *)(data + nblocks * 4);
136 case 3: k1 ^= tail[2] << 16;
137 case 2: k1 ^= tail[1] << 8;
153 *(uint32_t *)out = h1;
158 void MurmurHash3_x86_128(
const void *key,
const int len,
159 uint32_t seed,
void *out) {
160 const uint8_t *data = (
const uint8_t *)key;
161 const int nblocks = len / 16;
168 const uint32_t c1 = 0x239b961b;
169 const uint32_t c2 = 0xab0e9789;
170 const uint32_t c3 = 0x38b34ae5;
171 const uint32_t c4 = 0xa1e38b93;
176 const uint32_t *blocks = (
const uint32_t *)(data + nblocks * 16);
178 for (
int i = -nblocks; i; i++) {
179 uint32_t k1, k2, k3, k4;
180 GETBLOCK(k1, blocks, i * 4 + 0);
181 GETBLOCK(k2, blocks, i * 4 + 1);
182 GETBLOCK(k3, blocks, i * 4 + 2);
183 GETBLOCK(k4, blocks, i * 4 + 3);
192 h1 = h1 * 5 + 0x561ccd1b;
201 h2 = h2 * 5 + 0x0bcaa747;
210 h3 = h3 * 5 + 0x96cd1c35;
219 h4 = h4 * 5 + 0x32ac3b17;
225 const uint8_t *tail = (
const uint8_t *)(data + nblocks * 16);
233 case 15: k4 ^= tail[14] << 16;
234 case 14: k4 ^= tail[13] << 8;
242 case 12: k3 ^= tail[11] << 24;
243 case 11: k3 ^= tail[10] << 16;
244 case 10: k3 ^= tail[9] << 8;
252 case 8: k2 ^= tail[7] << 24;
253 case 7: k2 ^= tail[6] << 16;
254 case 6: k2 ^= tail[5] << 8;
262 case 4: k1 ^= tail[3] << 24;
263 case 3: k1 ^= tail[2] << 16;
264 case 2: k1 ^= tail[1] << 8;
300 ((uint32_t *)out)[0] = h1;
301 ((uint32_t *)out)[1] = h2;
302 ((uint32_t *)out)[2] = h3;
303 ((uint32_t *)out)[3] = h4;
308 void MurmurHash3_x64_128(
const void *key,
const int len,
309 const uint32_t seed,
void *out) {
310 const uint8_t *data = (
const uint8_t *)key;
311 const int nblocks = len / 16;
316 const uint64_t c1 = BIG_CONSTANT(0x87c37b91114253d5);
317 const uint64_t c2 = BIG_CONSTANT(0x4cf5ad432745937f);
322 const uint64_t *blocks = (
const uint64_t *)(data);
324 for (
int i = 0; i < nblocks; i++) {
326 GETBLOCK(k1, blocks, i * 2 + 0);
327 GETBLOCK(k2, blocks, i * 2 + 1);
336 h1 = h1 * 5 + 0x52dce729;
345 h2 = h2 * 5 + 0x38495ab5;
351 const uint8_t *tail = (
const uint8_t *)(data + nblocks * 16);
357 case 15: k2 ^= uint64_t(tail[14]) << 48;
358 case 14: k2 ^= uint64_t(tail[13]) << 40;
359 case 13: k2 ^= uint64_t(tail[12]) << 32;
360 case 12: k2 ^= uint64_t(tail[11]) << 24;
361 case 11: k2 ^= uint64_t(tail[10]) << 16;
362 case 10: k2 ^= uint64_t(tail[9]) << 8;
364 k2 ^= uint64_t(tail[8]) << 0;
370 case 8: k1 ^= uint64_t(tail[7]) << 56;
371 case 7: k1 ^= uint64_t(tail[6]) << 48;
372 case 6: k1 ^= uint64_t(tail[5]) << 40;
373 case 5: k1 ^= uint64_t(tail[4]) << 32;
374 case 4: k1 ^= uint64_t(tail[3]) << 24;
375 case 3: k1 ^= uint64_t(tail[2]) << 16;
376 case 2: k1 ^= uint64_t(tail[1]) << 8;
378 k1 ^= uint64_t(tail[0]) << 0;
400 ((uint64_t *)out)[0] = h1;
401 ((uint64_t *)out)[1] = h2;