45 #if defined(__ARM_FEATURE_UNALIGNED) || defined(__i386) || defined(_M_IX86) || defined(__x86_64__) || defined(_M_X64)
46 # define XXH_USE_UNALIGNED_ACCESS 1
64 #define XXH_FORCE_NATIVE_FORMAT 0
71 # pragma warning(disable : 4127)
72 # define FORCE_INLINE static __forceinline
74 # if defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
76 # define FORCE_INLINE static inline __attribute__((always_inline))
78 # define FORCE_INLINE static inline
81 # define FORCE_INLINE static
93 static void* XXH_malloc(
size_t s) {
return malloc(s); }
94 static void XXH_free (
void* p) { free(p); }
97 static void* XXH_memcpy(
void* dest,
const void* src,
size_t size)
99 return memcpy(dest,src,size);
106 #if defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
115 typedef unsigned short U16;
118 typedef unsigned long long U64;
121 #if defined(__GNUC__) && !defined(XXH_USE_UNALIGNED_ACCESS)
122 # define _PACKED __attribute__ ((packed))
127 #if !defined(XXH_USE_UNALIGNED_ACCESS) && !defined(__GNUC__)
131 # pragma pack(push, 1)
144 #if !defined(XXH_USE_UNALIGNED_ACCESS) && !defined(__GNUC__)
148 #define A32(x) (((U32_S *)(x))->v)
149 #define A64(x) (((U64_S *)(x))->v)
155 #define GCC_VERSION (__GNUC__ * 100 + __GNUC_MINOR__)
158 #if defined(_MSC_VER)
159 # define XXH_rotl32(x,r) _rotl(x,r)
160 # define XXH_rotl64(x,r) _rotl64(x,r)
162 # define XXH_rotl32(x,r) ((x << r) | (x >> (32 - r)))
163 # define XXH_rotl64(x,r) ((x << r) | (x >> (64 - r)))
166 #if defined(_MSC_VER)
167 # define XXH_swap32 _byteswap_ulong
168 # define XXH_swap64 _byteswap_uint64
169 #elif GCC_VERSION >= 403
170 # define XXH_swap32 __builtin_bswap32
171 # define XXH_swap64 __builtin_bswap64
173 static U32 XXH_swap32 (
U32 x)
175 return ((x << 24) & 0xff000000 ) |
176 ((x << 8) & 0x00ff0000 ) |
177 ((x >> 8) & 0x0000ff00 ) |
178 ((x >> 24) & 0x000000ff );
180 static U64 XXH_swap64 (
U64 x)
182 return ((x << 56) & 0xff00000000000000ULL) |
183 ((x << 40) & 0x00ff000000000000ULL) |
184 ((x << 24) & 0x0000ff0000000000ULL) |
185 ((x << 8) & 0x000000ff00000000ULL) |
186 ((x >> 8) & 0x00000000ff000000ULL) |
187 ((x >> 24) & 0x0000000000ff0000ULL) |
188 ((x >> 40) & 0x000000000000ff00ULL) |
189 ((x >> 56) & 0x00000000000000ffULL);
197 #define PRIME32_1 2654435761U
198 #define PRIME32_2 2246822519U
199 #define PRIME32_3 3266489917U
200 #define PRIME32_4 668265263U
201 #define PRIME32_5 374761393U
203 #define PRIME64_1 11400714785074694791ULL
204 #define PRIME64_2 14029467366897019727ULL
205 #define PRIME64_3 1609587929392839161ULL
206 #define PRIME64_4 9650029242287828579ULL
207 #define PRIME64_5 2870177450012600261ULL
214 #ifndef XXH_CPU_LITTLE_ENDIAN
215 static const int one = 1;
216 # define XXH_CPU_LITTLE_ENDIAN (*(char*)(&one))
223 #define XXH_STATIC_ASSERT(c) { enum { XXH_static_assert = 1/(!!(c)) }; }
264 const BYTE* bEnd = p + len;
266 #define XXH_get32bits(p) XXH_readLE32_align(p, endian, align)
268 #ifdef XXH_ACCEPT_NULL_INPUT_POINTER
272 bEnd=p=(
const BYTE*)(
size_t)16;
278 const BYTE*
const limit = bEnd - 16;
338 unsigned int XXH32 (
const void* input,
size_t len,
unsigned seed)
349 # if !defined(XXH_USE_UNALIGNED_ACCESS)
350 if ((((
size_t)input) & 3) == 0)
369 const BYTE* bEnd = p + len;
371 #define XXH_get64bits(p) XXH_readLE64_align(p, endian, align)
373 #ifdef XXH_ACCEPT_NULL_INPUT_POINTER
377 bEnd=p=(
const BYTE*)(
size_t)32;
383 const BYTE*
const limit = bEnd - 32;
478 unsigned long long XXH64 (
const void* input,
size_t len,
unsigned long long seed)
489 # if !defined(XXH_USE_UNALIGNED_ACCESS)
490 if ((((
size_t)input) & 7)==0)
567 state->
v3 = seed + 0;
580 state->
v3 = seed + 0;
592 const BYTE*
const bEnd = p + len;
594 #ifdef XXH_ACCEPT_NULL_INPUT_POINTER
635 const BYTE*
const limit = bEnd - 16;
670 XXH_memcpy(state->
mem32, p, bEnd-p);
671 state->
memsize = (int)(bEnd-p);
746 const BYTE*
const bEnd = p + len;
748 #ifdef XXH_ACCEPT_NULL_INPUT_POINTER
789 const BYTE*
const limit = bEnd - 32;
824 XXH_memcpy(state->
mem64, p, bEnd-p);
825 state->
memsize = (int)(bEnd-p);