31 #if defined (__GLIBC__)
36 #define bitRotateLeft(x, nBits) (((x) << (nBits)) | ((x) >> (32 - (nBits))))
116 #define bitMixer(a, b, c) \
118 a -= c; a ^= bitRotateLeft(c, 4); c += b; \
119 b -= a; b ^= bitRotateLeft(a, 6); a += c; \
120 c -= b; c ^= bitRotateLeft(b, 8); b += a; \
121 a -= c; a ^= bitRotateLeft(c,16); c += b; \
122 b -= a; b ^= bitRotateLeft(a,19); a += c; \
123 c -= b; c ^= bitRotateLeft(b, 4); b += a; \
151 #define bitMixerFinal(a, b, c) \
153 c ^= b; c -= bitRotateLeft(b, 14); \
154 a ^= c; a -= bitRotateLeft(c, 11); \
155 b ^= a; b -= bitRotateLeft(a, 25); \
156 c ^= b; c -= bitRotateLeft(b, 16); \
157 a ^= c; a -= bitRotateLeft(c, 4); \
158 b ^= a; b -= bitRotateLeft(a, 14); \
159 c ^= b; c -= bitRotateLeft(b, 24); \
191 #if !defined (__BYTE_ORDER) || (__BYTE_ORDER == __LITTLE_ENDIAN)
200 union {
const void *ptr;
size_t i; } u;
203 a =
b =
c = 0xdeadbeef +
static_cast<uint32_t
>(length) + initval;
206 if ((u.i & 0x3) == 0)
209 const uint32_t *
k =
reinterpret_cast<const uint32_t*
>(key);
223 const uint8_t *k8 =
reinterpret_cast<const uint8_t*
>(
k);
226 case 12:
c +=
k[2];
b +=
k[1]; a +=
k[0];
break;
227 case 11:
c +=
static_cast<uint32_t
>(k8[10]) << 16; [[fallthrough]];
228 case 10:
c +=
static_cast<uint32_t
>(k8[9]) << 8; [[fallthrough]];
229 case 9 :
c += k8[8]; [[fallthrough]];
230 case 8 :
b +=
k[1]; a +=
k[0];
break;
231 case 7 :
b +=
static_cast<uint32_t
>(k8[6]) << 16; [[fallthrough]];
232 case 6 :
b +=
static_cast<uint32_t
>(k8[5]) << 8; [[fallthrough]];
233 case 5 :
b += k8[4]; [[fallthrough]];
234 case 4 : a +=
k[0];
break;
235 case 3 : a +=
static_cast<uint32_t
>(k8[2]) << 16; [[fallthrough]];
236 case 2 : a +=
static_cast<uint32_t
>(k8[1]) << 8; [[fallthrough]];
237 case 1 : a += k8[0];
break;
241 else if ((u.i & 0x1) == 0)
244 const uint16_t *
k =
reinterpret_cast<const uint16_t*
>(key);
249 a +=
k[0] + (
static_cast<uint32_t
>(
k[1]) << 16);
250 b +=
k[2] + (
static_cast<uint32_t
>(
k[3]) << 16);
251 c +=
k[4] + (
static_cast<uint32_t
>(
k[5]) << 16);
258 const uint8_t *k8 =
reinterpret_cast<const uint8_t*
>(
k);
262 c +=
k[4] + (
static_cast<uint32_t
>(
k[5]) << 16);
263 b +=
k[2] + (
static_cast<uint32_t
>(
k[3]) << 16);
264 a +=
k[0] + (
static_cast<uint32_t
>(
k[1]) << 16);
267 c +=
static_cast<uint32_t
>(k8[10]) << 16;
271 b +=
k[2] + (
static_cast<uint32_t
>(
k[3]) << 16);
272 a +=
k[0] + (
static_cast<uint32_t
>(
k[1]) << 16);
278 b +=
k[2] + (
static_cast<uint32_t
>(
k[3]) << 16);
279 a +=
k[0] + (
static_cast<uint32_t
>(
k[1]) << 16);
282 b +=
static_cast<uint32_t
>(k8[6]) << 16;
286 a +=
k[0] + (
static_cast<uint32_t
>(
k[1]) << 16);
292 a +=
k[0] + (
static_cast<uint32_t
>(
k[1]) << 16);
295 a +=
static_cast<uint32_t
>(k8[2]) << 16;
308 const uint8_t *
k =
reinterpret_cast<const uint8_t*
>(key);
314 a +=
static_cast<uint32_t
>(
k[1]) << 8;
315 a +=
static_cast<uint32_t
>(
k[2]) << 16;
316 a +=
static_cast<uint32_t
>(
k[3]) << 24;
318 b +=
static_cast<uint32_t
>(
k[5]) << 8;
319 b +=
static_cast<uint32_t
>(
k[6]) << 16;
320 b +=
static_cast<uint32_t
>(
k[7]) << 24;
322 c +=
static_cast<uint32_t
>(
k[9]) << 8;
323 c +=
static_cast<uint32_t
>(
k[10]) << 16;
324 c +=
static_cast<uint32_t
>(
k[11]) << 24;
334 case 12:
c +=
static_cast<uint32_t
>(
k[11]) << 24; [[fallthrough]];
335 case 11:
c +=
static_cast<uint32_t
>(
k[10]) << 16; [[fallthrough]];
336 case 10:
c +=
static_cast<uint32_t
>(
k[9]) << 8; [[fallthrough]];
337 case 9 :
c +=
k[8]; [[fallthrough]];
339 case 8 :
b +=
static_cast<uint32_t
>(
k[7]) << 24; [[fallthrough]];
340 case 7 :
b +=
static_cast<uint32_t
>(
k[6]) << 16; [[fallthrough]];
341 case 6 :
b +=
static_cast<uint32_t
>(
k[5]) << 8; [[fallthrough]];
342 case 5 :
b +=
k[4]; [[fallthrough]];
344 case 4 : a +=
static_cast<uint32_t
>(
k[3]) << 24; [[fallthrough]];
345 case 3 : a +=
static_cast<uint32_t
>(
k[2]) << 16; [[fallthrough]];
346 case 2 : a +=
static_cast<uint32_t
>(
k[1]) << 8; [[fallthrough]];
369 #if !defined (__BYTE_ORDER) || (__BYTE_ORDER == __BIG_ENDIAN)
378 union {
const void *ptr;
size_t i; } u;
381 a =
b =
c = 0xdeadbeef +
static_cast<uint32_t
>(length) + initval;
384 if ((u.i & 0x3) == 0)
387 const uint32_t *
k =
reinterpret_cast<const uint32_t*
>(key);
401 const uint8_t *k8 =
reinterpret_cast<const uint8_t*
>(
k);
405 case 12:
c +=
k[2];
b +=
k[1]; a +=
k[0];
break;
406 case 11:
c +=
static_cast<uint32_t
>(k8[10]) << 8; [[fallthrough]];
407 case 10:
c +=
static_cast<uint32_t
>(k8[9]) << 16; [[fallthrough]];
408 case 9 :
c +=
static_cast<uint32_t
>(k8[8]) << 24; [[fallthrough]];
409 case 8 :
b +=
k[1]; a +=
k[0];
break;
410 case 7 :
b +=
static_cast<uint32_t
>(k8[6]) << 8; [[fallthrough]];
411 case 6 :
b +=
static_cast<uint32_t
>(k8[5]) << 16; [[fallthrough]];
412 case 5 :
b +=
static_cast<uint32_t
>(k8[4]) << 24; [[fallthrough]];
413 case 4 : a +=
k[0];
break;
414 case 3 : a +=
static_cast<uint32_t
>(k8[2]) << 8; [[fallthrough]];
415 case 2 : a +=
static_cast<uint32_t
>(k8[1]) << 16; [[fallthrough]];
416 case 1 : a +=
static_cast<uint32_t
>(k8[0]) << 24;
break;
423 const uint8_t *
k =
reinterpret_cast<const uint8_t*
>(key);
428 a +=
static_cast<uint32_t
>(
k[0]) << 24;
429 a +=
static_cast<uint32_t
>(
k[1]) << 16;
430 a +=
static_cast<uint32_t
>(
k[2]) << 8;
431 a +=
static_cast<uint32_t
>(
k[3]);
432 b +=
static_cast<uint32_t
>(
k[4]) << 24;
433 b +=
static_cast<uint32_t
>(
k[5]) << 16;
434 b +=
static_cast<uint32_t
>(
k[6]) << 8;
435 b +=
static_cast<uint32_t
>(
k[7]);
436 c +=
static_cast<uint32_t
>(
k[8]) << 24;
437 c +=
static_cast<uint32_t
>(
k[9]) << 16;
438 c +=
static_cast<uint32_t
>(
k[10]) << 8;
439 c +=
static_cast<uint32_t
>(
k[11]);
449 case 12:
c +=
k[11]; [[fallthrough]];
450 case 11:
c +=
static_cast<uint32_t
>(
k[10]) << 8; [[fallthrough]];
451 case 10:
c +=
static_cast<uint32_t
>(
k[9]) << 16; [[fallthrough]];
452 case 9 :
c +=
static_cast<uint32_t
>(
k[8]) << 24; [[fallthrough]];
453 case 8 :
b +=
k[7]; [[fallthrough]];
454 case 7 :
b +=
static_cast<uint32_t
>(
k[6]) << 8; [[fallthrough]];
455 case 6 :
b +=
static_cast<uint32_t
>(
k[5]) << 16; [[fallthrough]];
456 case 5 :
b +=
static_cast<uint32_t
>(
k[4]) << 24; [[fallthrough]];
457 case 4 : a +=
k[3]; [[fallthrough]];
458 case 3 : a +=
static_cast<uint32_t
>(
k[2]) << 8; [[fallthrough]];
459 case 2 : a +=
static_cast<uint32_t
>(
k[1]) << 16; [[fallthrough]];
460 case 1 : a +=
static_cast<uint32_t
>(
k[0]) << 24; [[fallthrough]];
483 #if (__BYTE_ORDER == __BIG_ENDIAN)
490 const short endianTest = 0x0100;
493 if (*(
reinterpret_cast<const char*
>(&endianTest)))
526 a =
b =
c = 0xdeadbeef + (
static_cast<uint32_t
>(length) << 2) + seed;
542 case 3 :
c +=
k[2]; [[fallthrough]];
543 case 2 :
b +=
k[1]; [[fallthrough]];
572 a =
b =
c = 0xdeadbeef + (
static_cast<uint32_t
>(length) << 2) + hash1;
589 case 3 :
c +=
k[2]; [[fallthrough]];
590 case 2 :
b +=
k[1]; [[fallthrough]];
Optimised hashing functions.
#define bitMixerFinal(a, b, c)
static unsigned jenkins_hashlittle(const void *key, size_t length, unsigned initval)
#define bitMixer(a, b, c)
static unsigned jenkins_hashbig(const void *key, size_t length, unsigned initval)
Misc. hashing functions, mostly from Bob Jenkins.
const dimensionedScalar c
Speed of light in a vacuum.
unsigned Hasher(const void *data, size_t len, unsigned seed=0)
Bob Jenkins's 96-bit mixer hashing function (lookup3)
unsigned HasherDual(const uint32_t *data, size_t length, unsigned &hash1, unsigned &hash2)
An optimised version of Hasher, returning dual hash values.
unsigned HasherInt(const uint32_t *data, size_t length, unsigned seed=0)
An optimised version of Hasher.