#pragma once
-#include <type_traits>
#include <assert.h>
#include <errno.h>
#include <stdint.h>
+
+#include <type_traits>
+
#include <boost/noncopyable.hpp>
#include <folly/AtomicStruct.h>
#include <folly/Portability.h>
-#include <folly/detail/CacheLocality.h>
+#include <folly/concurrency/CacheLocality.h>
#include <folly/portability/SysMman.h>
#include <folly/portability/Unistd.h>
namespace detail {
template <typename Pool>
struct IndexedMemPoolRecycler;
-}
+} // namespace detail
template <
typename T,
/// Gives up ownership previously granted by alloc()
void recycleIndex(uint32_t idx) {
assert(isAllocated(idx));
- Traits::onRecycle(&slot(idx).elem);
localPush(localHead(), idx);
}
Slot& s = slot(idx);
TaggedPtr h = head.load(std::memory_order_acquire);
while (true) {
- s.localNext.store(h.idx, std::memory_order_relaxed);
+ s.localNext.store(h.idx, std::memory_order_release);
+ Traits::onRecycle(&slot(idx).elem);
if (h.size() == LocalListLimit) {
// push will overflow local list, steal it instead
}
AtomicStruct<TaggedPtr,Atom>& localHead() {
- auto stripe = detail::AccessSpreader<Atom>::current(NumLocalLists);
+ auto stripe = AccessSpreader<Atom>::current(NumLocalLists);
return local_[stripe].head;
}
void markAllocated(Slot& slot) {
slot.localNext.store(uint32_t(-1), std::memory_order_release);
}
+
+ public:
+ static constexpr std::size_t kSlotSize = sizeof(Slot);
};
namespace detail {
}
};
-}
+} // namespace detail
} // namespace folly