1 //===--- Allocator.cpp - Simple memory allocation abstraction -------------===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
10 // This file implements the BumpPtrAllocator interface.
12 //===----------------------------------------------------------------------===//
14 #include "llvm/Support/Allocator.h"
15 #include "llvm/Support/Compiler.h"
16 #include "llvm/Support/DataTypes.h"
17 #include "llvm/Support/Memory.h"
18 #include "llvm/Support/Recycler.h"
19 #include "llvm/Support/raw_ostream.h"
24 BumpPtrAllocator::BumpPtrAllocator(size_t size, size_t threshold,
25 SlabAllocator &allocator)
26 : SlabSize(size), SizeThreshold(std::min(size, threshold)),
27 Allocator(allocator), CurSlab(0), BytesAllocated(0), NumSlabs(0) {}
29 BumpPtrAllocator::BumpPtrAllocator(size_t size, size_t threshold)
30 : SlabSize(size), SizeThreshold(std::min(size, threshold)),
31 Allocator(DefaultSlabAllocator), CurSlab(0), BytesAllocated(0),
34 BumpPtrAllocator::~BumpPtrAllocator() {
35 DeallocateSlabs(CurSlab);
38 /// StartNewSlab - Allocate a new slab and move the bump pointers over into
39 /// the new slab. Modifies CurPtr and End.
40 void BumpPtrAllocator::StartNewSlab() {
42 // Scale the actual allocated slab size based on the number of slabs
43 // allocated. Every 128 slabs allocated, we double the allocated size to
44 // reduce allocation frequency, but saturate at multiplying the slab size by
46 // FIXME: Currently, this count includes special slabs for objects above the
47 // size threshold. That will be fixed in a subsequent commit to make the
48 // growth even more predictable.
49 size_t AllocatedSlabSize =
50 SlabSize * (1 << std::min<size_t>(30, NumSlabs / 128));
52 MemSlab *NewSlab = Allocator.Allocate(AllocatedSlabSize);
53 NewSlab->NextPtr = CurSlab;
55 CurPtr = (char*)(CurSlab + 1);
56 End = ((char*)CurSlab) + CurSlab->Size;
59 /// DeallocateSlabs - Deallocate all memory slabs after and including this
61 void BumpPtrAllocator::DeallocateSlabs(MemSlab *Slab) {
63 MemSlab *NextSlab = Slab->NextPtr;
65 // Poison the memory so stale pointers crash sooner. Note we must
66 // preserve the Size and NextPtr fields at the beginning.
67 sys::Memory::setRangeWritable(Slab + 1, Slab->Size - sizeof(MemSlab));
68 memset(Slab + 1, 0xCD, Slab->Size - sizeof(MemSlab));
70 Allocator.Deallocate(Slab);
76 /// Reset - Deallocate all but the current slab and reset the current pointer
77 /// to the beginning of it, freeing all memory allocated so far.
78 void BumpPtrAllocator::Reset() {
81 DeallocateSlabs(CurSlab->NextPtr);
83 CurPtr = (char*)(CurSlab + 1);
84 End = ((char*)CurSlab) + CurSlab->Size;
88 /// Allocate - Allocate space at the specified alignment.
90 void *BumpPtrAllocator::Allocate(size_t Size, size_t Alignment) {
91 if (!CurSlab) // Start a new slab if we haven't allocated one already.
94 // Keep track of how many bytes we've allocated.
95 BytesAllocated += Size;
97 // 0-byte alignment means 1-byte alignment.
98 if (Alignment == 0) Alignment = 1;
100 // Allocate the aligned space, going forwards from CurPtr.
101 char *Ptr = alignPtr(CurPtr, Alignment);
103 // Check if we can hold it.
104 if (Ptr + Size <= End) {
106 // Update the allocation point of this memory block in MemorySanitizer.
107 // Without this, MemorySanitizer messages for values originated from here
108 // will point to the allocation of the entire slab.
109 __msan_allocated_memory(Ptr, Size);
113 // If Size is really big, allocate a separate slab for it.
114 size_t PaddedSize = Size + sizeof(MemSlab) + Alignment - 1;
115 if (PaddedSize > SizeThreshold) {
117 MemSlab *NewSlab = Allocator.Allocate(PaddedSize);
119 // Put the new slab after the current slab, since we are not allocating
121 NewSlab->NextPtr = CurSlab->NextPtr;
122 CurSlab->NextPtr = NewSlab;
124 Ptr = alignPtr((char*)(NewSlab + 1), Alignment);
125 assert((uintptr_t)Ptr + Size <= (uintptr_t)NewSlab + NewSlab->Size);
126 __msan_allocated_memory(Ptr, Size);
130 // Otherwise, start a new slab and try again.
132 Ptr = alignPtr(CurPtr, Alignment);
134 assert(CurPtr <= End && "Unable to allocate memory!");
135 __msan_allocated_memory(Ptr, Size);
139 size_t BumpPtrAllocator::getTotalMemory() const {
140 size_t TotalMemory = 0;
141 for (MemSlab *Slab = CurSlab; Slab != 0; Slab = Slab->NextPtr) {
142 TotalMemory += Slab->Size;
147 void BumpPtrAllocator::PrintStats() const {
148 unsigned NumSlabs = 0;
149 size_t TotalMemory = 0;
150 for (MemSlab *Slab = CurSlab; Slab != 0; Slab = Slab->NextPtr) {
151 TotalMemory += Slab->Size;
155 errs() << "\nNumber of memory regions: " << NumSlabs << '\n'
156 << "Bytes used: " << BytesAllocated << '\n'
157 << "Bytes allocated: " << TotalMemory << '\n'
158 << "Bytes wasted: " << (TotalMemory - BytesAllocated)
159 << " (includes alignment, etc)\n";
162 SlabAllocator::~SlabAllocator() { }
164 MallocSlabAllocator::~MallocSlabAllocator() { }
166 MemSlab *MallocSlabAllocator::Allocate(size_t Size) {
167 MemSlab *Slab = (MemSlab*)Allocator.Allocate(Size, 0);
173 void MallocSlabAllocator::Deallocate(MemSlab *Slab) {
174 Allocator.Deallocate(Slab);
177 void PrintRecyclerStats(size_t Size,
179 size_t FreeListSize) {
180 errs() << "Recycler element size: " << Size << '\n'
181 << "Recycler element alignment: " << Align << '\n'
182 << "Number of elements free for recycling: " << FreeListSize << '\n';