X-Git-Url: http://plrg.eecs.uci.edu/git/?a=blobdiff_plain;f=folly%2FAtomicUnorderedMap.h;h=45877e1ce475a59b5392cc92b4e40aeaf6194f26;hb=37ce60726a249bb67c885236a45d50cdb781694e;hp=941905efabebfd343e03caa8fa14939ecd4e65a8;hpb=d0889c8c36f0aa4a2773c0d9e95a2ff3eace6b89;p=folly.git diff --git a/folly/AtomicUnorderedMap.h b/folly/AtomicUnorderedMap.h index 941905ef..45877e1c 100644 --- a/folly/AtomicUnorderedMap.h +++ b/folly/AtomicUnorderedMap.h @@ -1,5 +1,5 @@ /* - * Copyright 2015 Facebook, Inc. + * Copyright 2017 Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -13,8 +13,8 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -#ifndef FOLLY_ATOMICUNORDEREDMAP_H -#define FOLLY_ATOMICUNORDEREDMAP_H + +#pragma once #include #include @@ -22,13 +22,15 @@ #include #include #include -#include -#include -#include + #include #include +#include #include #include +#include +#include + #include #include @@ -135,8 +137,8 @@ template ::value && boost::has_trivial_destructor::value), template class Atom = std::atomic, - typename Allocator = folly::detail::MMapAlloc, - typename IndexType = uint32_t> + typename IndexType = uint32_t, + typename Allocator = folly::detail::MMapAlloc> struct AtomicUnorderedInsertMap { @@ -178,7 +180,7 @@ struct AtomicUnorderedInsertMap { } // post-increment - ConstIterator operator++ (int dummy) { + ConstIterator operator++(int /* dummy */) { auto prev = *this; ++*this; return prev; @@ -210,7 +212,7 @@ struct AtomicUnorderedInsertMap { const Allocator& alloc = Allocator()) : allocator_(alloc) { - size_t capacity = maxSize / std::max(1.0f, maxLoadFactor) + 128; + size_t capacity = size_t(maxSize / std::min(1.0f, maxLoadFactor) + 128); size_t avail = size_t{1} << (8 * sizeof(IndexType) - 2); if (capacity > avail && maxSize < avail) { // we'll do our best @@ -336,8 +338,7 @@ struct AtomicUnorderedInsertMap { } private: - - enum { + enum : IndexType { kMaxAllocationTries = 1000, // after this we throw }; @@ -435,7 +436,7 @@ struct AtomicUnorderedInsertMap { /// Allocates a slot and returns its index. Tries to put it near /// slots_[start]. IndexType allocateNear(IndexType start) { - for (auto tries = 0; tries < kMaxAllocationTries; ++tries) { + for (IndexType tries = 0; tries < kMaxAllocationTries; ++tries) { auto slot = allocationAttempt(start, tries); auto prev = slots_[slot].headAndState_.load(std::memory_order_acquire); if ((prev & 3) == EMPTY && @@ -452,13 +453,13 @@ struct AtomicUnorderedInsertMap { /// can specialize it differently during deterministic testing IndexType allocationAttempt(IndexType start, IndexType tries) const { if (LIKELY(tries < 8 && start + tries < numSlots_)) { - return start + tries; + return IndexType(start + tries); } else { IndexType rv; if (sizeof(IndexType) <= 4) { - rv = folly::Random::rand32(numSlots_); + rv = IndexType(folly::Random::rand32(numSlots_)); } else { - rv = folly::Random::rand64(numSlots_); + rv = IndexType(folly::Random::rand64(numSlots_)); } assert(rv < numSlots_); return rv; @@ -493,9 +494,8 @@ using AtomicUnorderedInsertMap64 = KeyEqual, SkipKeyValueDeletion, Atom, - Allocator, - uint64_t>; - + uint64_t, + Allocator>; /// MutableAtom is a tiny wrapper than gives you the option of atomically /// updating values inserted into an AtomicUnorderedInsertMap