2 * Copyright 2012 Facebook, Inc.
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
17 #ifndef FOLLY_ARENA_H_
18 #define FOLLY_ARENA_H_
23 #include <boost/intrusive/slist.hpp>
25 #include "folly/Likely.h"
26 #include "folly/Malloc.h"
31 * Simple arena: allocate memory which gets freed when the arena gets
34 * The arena itself allocates memory using a custom allocator which provides
35 * the following interface (same as required by StlAllocator in StlAllocator.h)
37 * void* allocate(size_t size);
38 * Allocate a block of size bytes, properly aligned to the maximum
39 * alignment required on your system; throw std::bad_alloc if the
40 * allocation can't be satisfied.
42 * void deallocate(void* ptr);
43 * Deallocate a previously allocated block.
45 * You may also specialize ArenaAllocatorTraits for your allocator type to
48 * size_t goodSize(const Allocator& alloc, size_t size) const;
49 * Return a size (>= the provided size) that is considered "good" for your
50 * allocator (for example, if your allocator allocates memory in 4MB
51 * chunks, size should be rounded up to 4MB). The provided value is
52 * guaranteed to be rounded up to a multiple of the maximum alignment
53 * required on your system; the returned value must be also.
55 * An implementation that uses malloc() / free() is defined below, see
56 * SysAlloc / SysArena.
58 template <class Alloc> struct ArenaAllocatorTraits;
59 template <class Alloc>
62 explicit Arena(const Alloc& alloc,
63 size_t minBlockSize = kDefaultMinBlockSize)
64 : allocAndSize_(alloc, minBlockSize),
71 void* allocate(size_t size) {
74 if (LIKELY(end_ - ptr_ >= size)) {
75 // Fast path: there's enough room in the current block
82 // Not enough room in the current block
83 void* r = allocateSlow(size);
88 void deallocate(void* p) {
92 // Transfer ownership of all memory allocated from "other" to "this".
93 void merge(Arena&& other);
97 Arena(const Arena&) = delete;
98 Arena& operator=(const Arena&) = delete;
101 Arena(Arena&&) = default;
102 Arena& operator=(Arena&&) = default;
105 typedef boost::intrusive::slist_member_hook<
106 boost::intrusive::tag<Arena>> BlockLink;
111 // Allocate a block with at least size bytes of storage.
112 // If allowSlack is true, allocate more than size bytes if convenient
113 // (via ArenaAllocatorTraits::goodSize()) as we'll try to pack small
114 // allocations in this block.
115 static std::pair<Block*, size_t> allocate(
116 Alloc& alloc, size_t size, bool allowSlack);
117 void deallocate(Alloc& alloc);
120 return reinterpret_cast<char*>(this + 1);
126 } __attribute__((aligned));
127 // This should be alignas(std::max_align_t) but neither alignas nor
128 // max_align_t are supported by gcc 4.6.2.
131 static constexpr size_t kDefaultMinBlockSize = 4096 - sizeof(Block);
134 static constexpr size_t maxAlign = alignof(Block);
135 static constexpr bool isAligned(uintptr_t address) {
136 return (address & (maxAlign - 1)) == 0;
138 static bool isAligned(void* p) {
139 return isAligned(reinterpret_cast<uintptr_t>(p));
142 // Round up size so it's properly aligned
143 static constexpr size_t roundUp(size_t size) {
144 return (size + maxAlign - 1) & ~(maxAlign - 1);
147 // cache_last<true> makes the list keep a pointer to the last element, so we
148 // have push_back() and constant time splice_after()
149 typedef boost::intrusive::slist<
151 boost::intrusive::member_hook<Block, BlockLink, &Block::link>,
152 boost::intrusive::constant_time_size<false>,
153 boost::intrusive::cache_last<true>> BlockList;
155 void* allocateSlow(size_t size);
157 // Empty member optimization: package Alloc with a non-empty member
158 // in case Alloc is empty (as it is in the case of SysAlloc).
159 struct AllocAndSize : public Alloc {
160 explicit AllocAndSize(const Alloc& a, size_t s)
161 : Alloc(a), minBlockSize(s) {
167 size_t minBlockSize() const {
168 return allocAndSize_.minBlockSize;
170 Alloc& alloc() { return allocAndSize_; }
171 const Alloc& alloc() const { return allocAndSize_; }
173 AllocAndSize allocAndSize_;
180 * By default, don't pad the given size.
182 template <class Alloc>
183 struct ArenaAllocatorTraits {
184 static size_t goodSize(const Alloc& alloc, size_t size) {
190 * Arena-compatible allocator that calls malloc() and free(); see
191 * goodMallocSize() in Malloc.h for goodSize().
195 void* allocate(size_t size) {
196 void* mem = malloc(size);
197 if (!mem) throw std::bad_alloc();
201 void deallocate(void* p) {
207 struct ArenaAllocatorTraits<SysAlloc> {
208 static size_t goodSize(const SysAlloc& alloc, size_t size) {
209 return goodMallocSize(size);
214 * Arena that uses the system allocator (malloc / free)
216 class SysArena : public Arena<SysAlloc> {
218 explicit SysArena(size_t minBlockSize = kDefaultMinBlockSize)
219 : Arena<SysAlloc>(SysAlloc(), minBlockSize) {
225 #include "folly/Arena-inl.h"
227 #endif /* FOLLY_ARENA_H_ */