@@ -29,37 +29,30 @@ typedef unsigned char uint8_t;
2929typedef unsigned int uint32_t ;
3030typedef unsigned __int64 uint64_t ;
3131
32- #define FORCE_INLINE __forceinline
32+ #define MURMUR3_FORCE_INLINE __forceinline
3333
3434#include <stdlib.h>
3535
36- #define ROTL32 (x ,y ) _rotl(x,y)
37- #define ROTL64 (x ,y ) _rotl64(x,y)
36+ #define MURMUR3_ROTL64 (x ,y ) _rotl64(x,y)
3837
39- #define BIG_CONSTANT (x ) (x)
38+ #define MURMUR3_BIG_CONSTANT (x ) (x)
4039
4140// Other compilers
4241
4342#else // defined(_MSC_VER)
4443
4544#include <stdint.h>
4645
47- #define FORCE_INLINE inline __attribute__((always_inline))
48-
49- inline uint32_t rotl32 ( uint32_t x , int8_t r )
50- {
51- return (x << r ) | (x >> (32 - r ));
52- }
46+ #define MURMUR3_FORCE_INLINE inline __attribute__((always_inline))
5347
5448inline uint64_t rotl64 ( uint64_t x , int8_t r )
5549{
5650 return (x << r ) | (x >> (64 - r ));
5751}
5852
59- #define ROTL32 (x ,y ) rotl32(x,y)
60- #define ROTL64 (x ,y ) rotl64(x,y)
53+ #define MURMUR3_ROTL64 (x ,y ) rotl64(x,y)
6154
62- #define BIG_CONSTANT (x ) (x##LLU)
55+ #define MURMUR3_BIG_CONSTANT (x ) (x##LLU)
6356
6457#endif // !defined(_MSC_VER)
6558
@@ -78,7 +71,7 @@ typedef struct {
7871// Block read - if your platform needs to do endian-swapping or can only
7972// handle aligned reads, do the conversion here
8073
81- FORCE_INLINE uint64_t getblock64 ( const uint64_t * p , size_t i )
74+ MURMUR3_FORCE_INLINE uint64_t getblock64 ( const uint64_t * p , size_t i )
8275{
8376 uint64_t res ;
8477 memcpy (& res , p + i , sizeof (res ));
@@ -88,20 +81,21 @@ FORCE_INLINE uint64_t getblock64 ( const uint64_t * p, size_t i )
8881//-----------------------------------------------------------------------------
8982// Finalization mix - force all bits of a hash block to avalanche
9083
91- FORCE_INLINE uint64_t fmix64 ( uint64_t k )
84+ MURMUR3_FORCE_INLINE uint64_t fmix64 ( uint64_t k )
9285{
9386 k ^= k >> 33 ;
94- k *= BIG_CONSTANT (0xff51afd7ed558ccd );
87+ k *= MURMUR3_BIG_CONSTANT (0xff51afd7ed558ccd );
9588 k ^= k >> 33 ;
96- k *= BIG_CONSTANT (0xc4ceb9fe1a85ec53 );
89+ k *= MURMUR3_BIG_CONSTANT (0xc4ceb9fe1a85ec53 );
9790 k ^= k >> 33 ;
9891
9992 return k ;
10093}
10194
102- FORCE_INLINE void MurmurHash3_x64_128 (const void * key , size_t lenBytes , uint64_t seed , HashState & out ) {
103- static const uint64_t c1 = BIG_CONSTANT (0x87c37b91114253d5 );
104- static const uint64_t c2 = BIG_CONSTANT (0x4cf5ad432745937f );
95+ MURMUR3_FORCE_INLINE void MurmurHash3_x64_128 (const void * key , size_t lenBytes ,
96+ uint64_t seed , HashState & out ) {
97+ static const uint64_t c1 = MURMUR3_BIG_CONSTANT (0x87c37b91114253d5 );
98+ static const uint64_t c2 = MURMUR3_BIG_CONSTANT (0x4cf5ad432745937f );
10599
106100 const uint8_t * data = (const uint8_t * )key ;
107101
@@ -118,13 +112,13 @@ FORCE_INLINE void MurmurHash3_x64_128(const void* key, size_t lenBytes, uint64_t
118112 uint64_t k1 = getblock64 (blocks , i * 2 + 0 );
119113 uint64_t k2 = getblock64 (blocks , i * 2 + 1 );
120114
121- k1 *= c1 ; k1 = ROTL64 (k1 ,31 ); k1 *= c2 ; out .h1 ^= k1 ;
122- out .h1 = ROTL64 (out .h1 ,27 );
115+ k1 *= c1 ; k1 = MURMUR3_ROTL64 (k1 ,31 ); k1 *= c2 ; out .h1 ^= k1 ;
116+ out .h1 = MURMUR3_ROTL64 (out .h1 ,27 );
123117 out .h1 += out .h2 ;
124118 out .h1 = out .h1 * 5 + 0x52dce729 ;
125119
126- k2 *= c2 ; k2 = ROTL64 (k2 ,33 ); k2 *= c1 ; out .h2 ^= k2 ;
127- out .h2 = ROTL64 (out .h2 ,31 );
120+ k2 *= c2 ; k2 = MURMUR3_ROTL64 (k2 ,33 ); k2 *= c1 ; out .h2 ^= k2 ;
121+ out .h2 = MURMUR3_ROTL64 (out .h2 ,31 );
128122 out .h2 += out .h1 ;
129123 out .h2 = out .h2 * 5 + 0x38495ab5 ;
130124 }
@@ -144,7 +138,7 @@ FORCE_INLINE void MurmurHash3_x64_128(const void* key, size_t lenBytes, uint64_t
144138 case 11 : k2 ^= ((uint64_t )tail [10 ]) << 16 ; // falls through
145139 case 10 : k2 ^= ((uint64_t )tail [ 9 ]) << 8 ; // falls through
146140 case 9 : k2 ^= ((uint64_t )tail [ 8 ]) << 0 ;
147- k2 *= c2 ; k2 = ROTL64 (k2 ,33 ); k2 *= c1 ; out .h2 ^= k2 ;
141+ k2 *= c2 ; k2 = MURMUR3_ROTL64 (k2 ,33 ); k2 *= c1 ; out .h2 ^= k2 ;
148142 // falls through
149143 case 8 : k1 ^= ((uint64_t )tail [ 7 ]) << 56 ; // falls through
150144 case 7 : k1 ^= ((uint64_t )tail [ 6 ]) << 48 ; // falls through
@@ -154,7 +148,7 @@ FORCE_INLINE void MurmurHash3_x64_128(const void* key, size_t lenBytes, uint64_t
154148 case 3 : k1 ^= ((uint64_t )tail [ 2 ]) << 16 ; // falls through
155149 case 2 : k1 ^= ((uint64_t )tail [ 1 ]) << 8 ; // falls through
156150 case 1 : k1 ^= ((uint64_t )tail [ 0 ]) << 0 ;
157- k1 *= c1 ; k1 = ROTL64 (k1 ,31 ); k1 *= c2 ; out .h1 ^= k1 ;
151+ k1 *= c1 ; k1 = MURMUR3_ROTL64 (k1 ,31 ); k1 *= c2 ; out .h1 ^= k1 ;
158152 };
159153
160154 //----------
@@ -175,10 +169,14 @@ FORCE_INLINE void MurmurHash3_x64_128(const void* key, size_t lenBytes, uint64_t
175169
176170//-----------------------------------------------------------------------------
177171
178- FORCE_INLINE uint16_t compute_seed_hash (uint64_t seed ) {
172+ MURMUR3_FORCE_INLINE uint16_t compute_seed_hash (uint64_t seed ) {
179173 HashState hashes ;
180174 MurmurHash3_x64_128 (& seed , sizeof (seed ), 0 , hashes );
181175 return static_cast < uint16_t > (hashes .h1 & 0xffff );
182176}
183177
178+ #undef MURMUR3_FORCE_INLINE
179+ #undef MURMUR3_ROTL64
180+ #undef MURMUR3_BIG_CONSTANT
181+
184182#endif // _MURMURHASH3_H_
0 commit comments