2 * Copyright 2017 Facebook, Inc.
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
24 #include <type_traits>
27 #include <folly/ApplyTuple.h>
28 #include <folly/Bits.h>
29 #include <folly/SpookyHashV1.h>
30 #include <folly/SpookyHashV2.h>
33 * Various hashing functions.
36 namespace folly { namespace hash {
38 // This is a general-purpose way to create a single hash from multiple
39 // hashable objects. hash_combine_generic takes a class Hasher implementing
40 // hash<T>; hash_combine uses a default hasher StdHasher that uses std::hash.
41 // hash_combine_generic hashes each argument and combines those hashes in
42 // an order-dependent way to yield a new hash.
45 // This is the Hash128to64 function from Google's cityhash (available
46 // under the MIT License). We use it to reduce multiple 64 bit hashes
47 // into a single hash.
48 inline uint64_t hash_128_to_64(const uint64_t upper, const uint64_t lower) {
49 // Murmur-inspired hashing.
50 const uint64_t kMul = 0x9ddfea08eb382d69ULL;
51 uint64_t a = (lower ^ upper) * kMul;
53 uint64_t b = (upper ^ a) * kMul;
59 // Never used, but gcc demands it.
60 template <class Hasher>
61 inline size_t hash_combine_generic() {
67 class Hash = std::hash<typename std::iterator_traits<Iter>::value_type>>
68 uint64_t hash_range(Iter begin,
71 Hash hasher = Hash()) {
72 for (; begin != end; ++begin) {
73 hash = hash_128_to_64(hash, hasher(*begin));
78 inline uint32_t twang_32from64(uint64_t key);
80 template <class Hasher, typename T, typename... Ts>
81 size_t hash_combine_generic(const T& t, const Ts&... ts) {
82 size_t seed = Hasher::hash(t);
83 if (sizeof...(ts) == 0) {
86 size_t remainder = hash_combine_generic<Hasher>(ts...);
87 /* static */ if (sizeof(size_t) == sizeof(uint32_t)) {
88 return twang_32from64((uint64_t(seed) << 32) | remainder);
90 return static_cast<size_t>(hash_128_to_64(seed, remainder));
94 // Simply uses std::hash to hash. Note that std::hash is not guaranteed
95 // to be a very good hash function; provided std::hash doesn't collide on
96 // the individual inputs, you are fine, but that won't be true for, say,
100 template <typename T>
101 static size_t hash(const T& t) {
102 return std::hash<T>()(t);
106 template <typename T, typename... Ts>
107 size_t hash_combine(const T& t, const Ts&... ts) {
108 return hash_combine_generic<StdHasher>(t, ts...);
111 //////////////////////////////////////////////////////////////////////
114 * Thomas Wang 64 bit mix hash function
117 inline uint64_t twang_mix64(uint64_t key) {
118 key = (~key) + (key << 21); // key *= (1 << 21) - 1; key -= 1;
119 key = key ^ (key >> 24);
120 key = key + (key << 3) + (key << 8); // key *= 1 + (1 << 3) + (1 << 8)
121 key = key ^ (key >> 14);
122 key = key + (key << 2) + (key << 4); // key *= 1 + (1 << 2) + (1 << 4)
123 key = key ^ (key >> 28);
124 key = key + (key << 31); // key *= 1 + (1 << 31)
129 * Inverse of twang_mix64
131 * Note that twang_unmix64 is significantly slower than twang_mix64.
134 inline uint64_t twang_unmix64(uint64_t key) {
135 // See the comments in jenkins_rev_unmix32 for an explanation as to how this
137 key *= 4611686016279904257U;
138 key ^= (key >> 28) ^ (key >> 56);
139 key *= 14933078535860113213U;
140 key ^= (key >> 14) ^ (key >> 28) ^ (key >> 42) ^ (key >> 56);
141 key *= 15244667743933553977U;
142 key ^= (key >> 24) ^ (key >> 48);
143 key = (key + 1) * 9223367638806167551U;
148 * Thomas Wang downscaling hash function
151 inline uint32_t twang_32from64(uint64_t key) {
152 key = (~key) + (key << 18);
153 key = key ^ (key >> 31);
155 key = key ^ (key >> 11);
156 key = key + (key << 6);
157 key = key ^ (key >> 22);
158 return (uint32_t) key;
162 * Robert Jenkins' reversible 32 bit mix hash function
165 inline uint32_t jenkins_rev_mix32(uint32_t key) {
166 key += (key << 12); // key *= (1 + (1 << 12))
168 key += (key << 4); // key *= (1 + (1 << 4))
170 key += (key << 10); // key *= (1 + (1 << 10))
172 // key *= (1 + (1 << 7)) * (1 + (1 << 12))
179 * Inverse of jenkins_rev_mix32
181 * Note that jenkinks_rev_unmix32 is significantly slower than
185 inline uint32_t jenkins_rev_unmix32(uint32_t key) {
186 // These are the modular multiplicative inverses (in Z_2^32) of the
187 // multiplication factors in jenkins_rev_mix32, in reverse order. They were
188 // computed using the Extended Euclidean algorithm, see
189 // http://en.wikipedia.org/wiki/Modular_multiplicative_inverse
192 // The inverse of a ^= (a >> n) is
194 // for (int i = n; i < 32; i += n) {
198 (key >> 2) ^ (key >> 4) ^ (key >> 6) ^ (key >> 8) ^
199 (key >> 10) ^ (key >> 12) ^ (key >> 14) ^ (key >> 16) ^
200 (key >> 18) ^ (key >> 20) ^ (key >> 22) ^ (key >> 24) ^
201 (key >> 26) ^ (key >> 28) ^ (key >> 30);
203 key ^= (key >> 9) ^ (key >> 18) ^ (key >> 27);
211 * Fowler / Noll / Vo (FNV) Hash
212 * http://www.isthe.com/chongo/tech/comp/fnv/
215 const uint32_t FNV_32_HASH_START = 2166136261UL;
216 const uint64_t FNV_64_HASH_START = 14695981039346656037ULL;
218 inline uint32_t fnv32(const char* buf, uint32_t hash = FNV_32_HASH_START) {
219 // forcing signed char, since other platforms can use unsigned
220 const signed char* s = reinterpret_cast<const signed char*>(buf);
223 hash += (hash << 1) + (hash << 4) + (hash << 7) +
224 (hash << 8) + (hash << 24);
230 inline uint32_t fnv32_buf(const void* buf,
232 uint32_t hash = FNV_32_HASH_START) {
233 // forcing signed char, since other platforms can use unsigned
234 const signed char* char_buf = reinterpret_cast<const signed char*>(buf);
236 for (size_t i = 0; i < n; ++i) {
237 hash += (hash << 1) + (hash << 4) + (hash << 7) +
238 (hash << 8) + (hash << 24);
245 inline uint32_t fnv32(const std::string& str,
246 uint32_t hash = FNV_32_HASH_START) {
247 return fnv32_buf(str.data(), str.size(), hash);
250 inline uint64_t fnv64(const char* buf, uint64_t hash = FNV_64_HASH_START) {
251 // forcing signed char, since other platforms can use unsigned
252 const signed char* s = reinterpret_cast<const signed char*>(buf);
255 hash += (hash << 1) + (hash << 4) + (hash << 5) + (hash << 7) +
256 (hash << 8) + (hash << 40);
262 inline uint64_t fnv64_buf(const void* buf,
264 uint64_t hash = FNV_64_HASH_START) {
265 // forcing signed char, since other platforms can use unsigned
266 const signed char* char_buf = reinterpret_cast<const signed char*>(buf);
268 for (size_t i = 0; i < n; ++i) {
269 hash += (hash << 1) + (hash << 4) + (hash << 5) + (hash << 7) +
270 (hash << 8) + (hash << 40);
276 inline uint64_t fnv64(const std::string& str,
277 uint64_t hash = FNV_64_HASH_START) {
278 return fnv64_buf(str.data(), str.size(), hash);
282 * Paul Hsieh: http://www.azillionmonkeys.com/qed/hash.html
285 #define get16bits(d) folly::loadUnaligned<uint16_t>(d)
287 inline uint32_t hsieh_hash32_buf(const void* buf, size_t len) {
288 // forcing signed char, since other platforms can use unsigned
289 const unsigned char* s = reinterpret_cast<const unsigned char*>(buf);
290 uint32_t hash = static_cast<uint32_t>(len);
294 if (len <= 0 || buf == 0) {
302 for (;len > 0; len--) {
303 hash += get16bits (s);
304 tmp = (get16bits (s+2) << 11) ^ hash;
305 hash = (hash << 16) ^ tmp;
306 s += 2*sizeof (uint16_t);
310 /* Handle end cases */
313 hash += get16bits(s);
315 hash ^= s[sizeof (uint16_t)] << 18;
319 hash += get16bits(s);
329 /* Force "avalanching" of final 127 bits */
342 inline uint32_t hsieh_hash32(const char* s) {
343 return hsieh_hash32_buf(s, std::strlen(s));
346 inline uint32_t hsieh_hash32_str(const std::string& str) {
347 return hsieh_hash32_buf(str.data(), str.size());
350 //////////////////////////////////////////////////////////////////////
354 template<class Key, class Enable = void>
359 size_t operator()(const T& v) const {
360 return hasher<T>()(v);
363 template <class T, class... Ts>
364 size_t operator()(const T& t, const Ts&... ts) const {
365 return hash::hash_128_to_64((*this)(t), (*this)(ts...));
370 struct hasher<bool> {
371 size_t operator()(bool key) const {
372 // Make sure that all the output bits depend on the input.
373 return key ? std::numeric_limits<size_t>::max() : 0;
377 template<> struct hasher<int32_t> {
378 size_t operator()(int32_t key) const {
379 return hash::jenkins_rev_mix32(uint32_t(key));
383 template<> struct hasher<uint32_t> {
384 size_t operator()(uint32_t key) const {
385 return hash::jenkins_rev_mix32(key);
389 template<> struct hasher<int16_t> {
390 size_t operator()(int16_t key) const {
391 return hasher<int32_t>()(key); // as impl accident, sign-extends
395 template<> struct hasher<uint16_t> {
396 size_t operator()(uint16_t key) const {
397 return hasher<uint32_t>()(key);
401 template<> struct hasher<int8_t> {
402 size_t operator()(int8_t key) const {
403 return hasher<int32_t>()(key); // as impl accident, sign-extends
407 template<> struct hasher<uint8_t> {
408 size_t operator()(uint8_t key) const {
409 return hasher<uint32_t>()(key);
413 template<> struct hasher<char> {
414 using explicit_type =
415 std::conditional<std::is_signed<char>::value, int8_t, uint8_t>::type;
416 size_t operator()(char key) const {
417 return hasher<explicit_type>()(key); // as impl accident, sign-extends
421 template<> struct hasher<int64_t> {
422 size_t operator()(int64_t key) const {
423 return static_cast<size_t>(hash::twang_mix64(uint64_t(key)));
427 template<> struct hasher<uint64_t> {
428 size_t operator()(uint64_t key) const {
429 return static_cast<size_t>(hash::twang_mix64(key));
433 template<> struct hasher<std::string> {
434 size_t operator()(const std::string& key) const {
435 return static_cast<size_t>(
436 hash::SpookyHashV2::Hash64(key.data(), key.size(), 0));
441 struct hasher<T, typename std::enable_if<std::is_enum<T>::value, void>::type> {
442 size_t operator()(T key) const {
443 return Hash()(static_cast<typename std::underlying_type<T>::type>(key));
447 template <class T1, class T2>
448 struct hasher<std::pair<T1, T2>> {
449 size_t operator()(const std::pair<T1, T2>& key) const {
450 return Hash()(key.first, key.second);
454 template <typename... Ts>
455 struct hasher<std::tuple<Ts...>> {
456 size_t operator() (const std::tuple<Ts...>& key) const {
457 return applyTuple(Hash(), key);
462 template <size_t index, typename... Ts>
464 size_t operator()(std::tuple<Ts...> const& key) const {
465 return hash::hash_combine(
466 TupleHasher<index - 1, Ts...>()(key),
467 std::get<index>(key));
472 template <typename... Ts>
473 struct TupleHasher<0, Ts...> {
474 size_t operator()(std::tuple<Ts...> const& key) const {
475 // we could do std::hash here directly, but hash_combine hides all the
476 // ugly templating implicitly
477 return hash::hash_combine(std::get<0>(key));
483 // Custom hash functions.
485 // Hash function for pairs. Requires default hash functions for both
486 // items in the pair.
487 template <typename T1, typename T2>
488 struct hash<std::pair<T1, T2> > {
490 size_t operator()(const std::pair<T1, T2>& x) const {
491 return folly::hash::hash_combine(x.first, x.second);
495 // Hash function for tuples. Requires default hash functions for all types.
496 template <typename... Ts>
497 struct hash<std::tuple<Ts...>> {
498 size_t operator()(std::tuple<Ts...> const& key) const {
500 std::tuple_size<std::tuple<Ts...>>::value - 1, // start index