34 #include <ripple/beast/hash/impl/xxhash.h>
43 #if defined(__ARM_FEATURE_UNALIGNED) || defined(__i386) || defined(_M_IX86) || defined(__x86_64__) || defined(_M_X64)
44 # define XXH_USE_UNALIGNED_ACCESS 1
61 #define XXH_FORCE_NATIVE_FORMAT 0
67 #ifdef _MSC_VER // Visual Studio
68 # pragma warning(disable : 4127) // disable: C4127: conditional expression is constant
71 #ifdef _MSC_VER // Visual Studio
72 # define FORCE_INLINE static __forceinline
75 # define FORCE_INLINE static inline __attribute__((always_inline))
77 # define FORCE_INLINE static inline
88 static void* XXH_malloc(
size_t s) {
return malloc(s); }
89 static void XXH_free (
void* p) {
free(p); }
92 static void* XXH_memcpy(
void* dest,
const void* src,
size_t size)
94 return memcpy(dest,src,size);
101 #if defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L // C99
103 typedef uint8_t BYTE;
104 typedef uint16_t U16;
105 typedef uint32_t U32;
107 typedef uint64_t U64;
109 typedef unsigned char BYTE;
110 typedef unsigned short U16;
111 typedef unsigned int U32;
112 typedef signed int S32;
113 typedef unsigned long long U64;
116 #if defined(__GNUC__) && !defined(XXH_USE_UNALIGNED_ACCESS)
117 # define _PACKED __attribute__ ((packed))
122 #if !defined(XXH_USE_UNALIGNED_ACCESS) && !defined(__GNUC__)
126 # pragma pack(push, 1)
142 #if !defined(XXH_USE_UNALIGNED_ACCESS) && !defined(__GNUC__)
146 #define A32(x) (((U32_S *)(x))->v)
147 #define A64(x) (((U64_S *)(x))->v)
153 #define GCC_VERSION (__GNUC__ * 100 + __GNUC_MINOR__)
156 #if defined(_MSC_VER)
157 # define XXH_rotl32(x,r) _rotl(x,r)
158 # define XXH_rotl64(x,r) _rotl64(x,r)
160 # define XXH_rotl32(x,r) ((x << r) | (x >> (32 - r)))
161 # define XXH_rotl64(x,r) ((x << r) | (x >> (64 - r)))
164 #if defined(_MSC_VER) // Visual Studio
165 # define XXH_swap32 _byteswap_ulong
166 # define XXH_swap64 _byteswap_uint64
167 #elif GCC_VERSION >= 403
168 # define XXH_swap32 __builtin_bswap32
169 # define XXH_swap64 __builtin_bswap64
171 static inline U32 XXH_swap32 (U32 x)
173 return ((x << 24) & 0xff000000 ) |
174 ((x << 8) & 0x00ff0000 ) |
175 ((x >> 8) & 0x0000ff00 ) |
176 ((x >> 24) & 0x000000ff );
178 static inline U64 XXH_swap64 (U64 x)
180 return ((x << 56) & 0xff00000000000000ULL) |
181 ((x << 40) & 0x00ff000000000000ULL) |
182 ((x << 24) & 0x0000ff0000000000ULL) |
183 ((x << 8) & 0x000000ff00000000ULL) |
184 ((x >> 8) & 0x00000000ff000000ULL) |
185 ((x >> 24) & 0x0000000000ff0000ULL) |
186 ((x >> 40) & 0x000000000000ff00ULL) |
187 ((x >> 56) & 0x00000000000000ffULL);
195 #define PRIME32_1 2654435761U
196 #define PRIME32_2 2246822519U
197 #define PRIME32_3 3266489917U
198 #define PRIME32_4 668265263U
199 #define PRIME32_5 374761393U
201 #define PRIME64_1 11400714785074694791ULL
202 #define PRIME64_2 14029467366897019727ULL
203 #define PRIME64_3 1609587929392839161ULL
204 #define PRIME64_4 9650029242287828579ULL
205 #define PRIME64_5 2870177450012600261ULL
211 #ifndef XXH_CPU_LITTLE_ENDIAN // It is possible to define XXH_CPU_LITTLE_ENDIAN externally, for example using a compiler switch
213 # define XXH_CPU_LITTLE_ENDIAN (*(char*)(&one))
220 #define XXH_STATIC_ASSERT(c) { enum { XXH_static_assert = 1/(!!(c)) }; } // use only *after* variable declarations
260 const BYTE* p = (
const BYTE*)input;
261 const BYTE* bEnd = p + len;
263 #define XXH_get32bits(p) XXH_readLE32_align(p, endian, align)
265 #ifdef XXH_ACCEPT_NULL_INPUT_POINTER
269 bEnd=p=(
const BYTE*)(
size_t)16;
275 const BYTE*
const limit = bEnd - 16;
276 U32 v1 = seed + PRIME32_1 + PRIME32_2;
277 U32 v2 = seed + PRIME32_2;
279 U32 v4 = seed - PRIME32_1;
283 v1 += XXH_get32bits(p) * PRIME32_2;
284 v1 = XXH_rotl32(v1, 13);
287 v2 += XXH_get32bits(p) * PRIME32_2;
288 v2 = XXH_rotl32(v2, 13);
291 v3 += XXH_get32bits(p) * PRIME32_2;
292 v3 = XXH_rotl32(v3, 13);
295 v4 += XXH_get32bits(p) * PRIME32_2;
296 v4 = XXH_rotl32(v4, 13);
302 h32 = XXH_rotl32(v1, 1) + XXH_rotl32(v2, 7) + XXH_rotl32(v3, 12) + XXH_rotl32(v4, 18);
306 h32 = seed + PRIME32_5;
313 h32 += XXH_get32bits(p) * PRIME32_3;
314 h32 = XXH_rotl32(h32, 17) * PRIME32_4 ;
320 h32 += (*p) * PRIME32_5;
321 h32 = XXH_rotl32(h32, 11) * PRIME32_1 ;
335 unsigned int XXH32 (
const void* input,
size_t len,
unsigned seed)
346 # if !defined(XXH_USE_UNALIGNED_ACCESS)
347 if ((((
size_t)input) & 3) == 0)
365 const BYTE* p = (
const BYTE*)input;
366 const BYTE* bEnd = p + len;
368 #define XXH_get64bits(p) XXH_readLE64_align(p, endian, align)
370 #ifdef XXH_ACCEPT_NULL_INPUT_POINTER
374 bEnd=p=(
const BYTE*)(
size_t)32;
380 const BYTE*
const limit = bEnd - 32;
381 U64 v1 = seed + PRIME64_1 + PRIME64_2;
382 U64 v2 = seed + PRIME64_2;
384 U64 v4 = seed - PRIME64_1;
388 v1 += XXH_get64bits(p) * PRIME64_2;
390 v1 = XXH_rotl64(v1, 31);
392 v2 += XXH_get64bits(p) * PRIME64_2;
394 v2 = XXH_rotl64(v2, 31);
396 v3 += XXH_get64bits(p) * PRIME64_2;
398 v3 = XXH_rotl64(v3, 31);
400 v4 += XXH_get64bits(p) * PRIME64_2;
402 v4 = XXH_rotl64(v4, 31);
407 h64 = XXH_rotl64(v1, 1) + XXH_rotl64(v2, 7) + XXH_rotl64(v3, 12) + XXH_rotl64(v4, 18);
410 v1 = XXH_rotl64(v1, 31);
413 h64 = h64 * PRIME64_1 + PRIME64_4;
416 v2 = XXH_rotl64(v2, 31);
419 h64 = h64 * PRIME64_1 + PRIME64_4;
422 v3 = XXH_rotl64(v3, 31);
425 h64 = h64 * PRIME64_1 + PRIME64_4;
428 v4 = XXH_rotl64(v4, 31);
431 h64 = h64 * PRIME64_1 + PRIME64_4;
435 h64 = seed + PRIME64_5;
442 U64 k1 = XXH_get64bits(p);
444 k1 = XXH_rotl64(k1,31);
447 h64 = XXH_rotl64(h64,27) * PRIME64_1 + PRIME64_4;
453 h64 ^= (U64)(XXH_get32bits(p)) * PRIME64_1;
454 h64 = XXH_rotl64(h64, 23) * PRIME64_2 + PRIME64_3;
460 h64 ^= (*p) * PRIME64_5;
461 h64 = XXH_rotl64(h64, 11) * PRIME64_1;
475 unsigned long long XXH64 (
const void* input,
size_t len,
unsigned long long seed)
486 # if !defined(XXH_USE_UNALIGNED_ACCESS)
487 if ((((
size_t)input) & 7)==0)
562 state->
v1 = seed + PRIME32_1 + PRIME32_2;
563 state->
v2 = seed + PRIME32_2;
564 state->
v3 = seed + 0;
565 state->
v4 = seed - PRIME32_1;
575 state->
v1 = seed + PRIME64_1 + PRIME64_2;
576 state->
v2 = seed + PRIME64_2;
577 state->
v3 = seed + 0;
578 state->
v4 = seed - PRIME64_1;
588 const BYTE* p = (
const BYTE*)input;
589 const BYTE*
const bEnd = p + len;
591 #ifdef XXH_ACCEPT_NULL_INPUT_POINTER
599 XXH_memcpy((BYTE*)(state->
mem32) + state->
memsize, input, len);
608 const U32* p32 = state->
mem32;
610 state->
v1 = XXH_rotl32(state->
v1, 13);
611 state->
v1 *= PRIME32_1;
614 state->
v2 = XXH_rotl32(state->
v2, 13);
615 state->
v2 *= PRIME32_1;
618 state->
v3 = XXH_rotl32(state->
v3, 13);
619 state->
v3 *= PRIME32_1;
622 state->
v4 = XXH_rotl32(state->
v4, 13);
623 state->
v4 *= PRIME32_1;
632 const BYTE*
const limit = bEnd - 16;
641 v1 = XXH_rotl32(v1, 13);
645 v2 = XXH_rotl32(v2, 13);
649 v3 = XXH_rotl32(v3, 13);
653 v4 = XXH_rotl32(v4, 13);
667 XXH_memcpy(state->
mem32, p, bEnd-p);
668 state->
memsize = (int)(bEnd-p);
689 const BYTE * p = (
const BYTE*)state->
mem32;
695 h32 = XXH_rotl32(state->
v1, 1) + XXH_rotl32(state->
v2, 7) + XXH_rotl32(state->
v3, 12) + XXH_rotl32(state->
v4, 18);
699 h32 = state->
seed + PRIME32_5;
707 h32 = XXH_rotl32(h32, 17) * PRIME32_4;
713 h32 += (*p) * PRIME32_5;
714 h32 = XXH_rotl32(h32, 11) * PRIME32_1;
742 const BYTE* p = (
const BYTE*)input;
743 const BYTE*
const bEnd = p + len;
745 #ifdef XXH_ACCEPT_NULL_INPUT_POINTER
753 XXH_memcpy(((BYTE*)state->
mem64) + state->
memsize, input, len);
762 const U64* p64 = state->
mem64;
764 state->
v1 = XXH_rotl64(state->
v1, 31);
765 state->
v1 *= PRIME64_1;
768 state->
v2 = XXH_rotl64(state->
v2, 31);
769 state->
v2 *= PRIME64_1;
772 state->
v3 = XXH_rotl64(state->
v3, 31);
773 state->
v3 *= PRIME64_1;
776 state->
v4 = XXH_rotl64(state->
v4, 31);
777 state->
v4 *= PRIME64_1;
786 const BYTE*
const limit = bEnd - 32;
795 v1 = XXH_rotl64(v1, 31);
799 v2 = XXH_rotl64(v2, 31);
803 v3 = XXH_rotl64(v3, 31);
807 v4 = XXH_rotl64(v4, 31);
821 XXH_memcpy(state->
mem64, p, bEnd-p);
822 state->
memsize = (int)(bEnd-p);
843 const BYTE * p = (
const BYTE*)state->
mem64;
854 h64 = XXH_rotl64(v1, 1) + XXH_rotl64(v2, 7) + XXH_rotl64(v3, 12) + XXH_rotl64(v4, 18);
857 v1 = XXH_rotl64(v1, 31);
860 h64 = h64*PRIME64_1 + PRIME64_4;
863 v2 = XXH_rotl64(v2, 31);
866 h64 = h64*PRIME64_1 + PRIME64_4;
869 v3 = XXH_rotl64(v3, 31);
872 h64 = h64*PRIME64_1 + PRIME64_4;
875 v4 = XXH_rotl64(v4, 31);
878 h64 = h64*PRIME64_1 + PRIME64_4;
882 h64 = state->
seed + PRIME64_5;
891 k1 = XXH_rotl64(k1,31);
894 h64 = XXH_rotl64(h64,27) * PRIME64_1 + PRIME64_4;
901 h64 = XXH_rotl64(h64, 23) * PRIME64_2 + PRIME64_3;
907 h64 ^= (*p) * PRIME64_5;
908 h64 = XXH_rotl64(h64, 11) * PRIME64_1;