blob: 215b0f249d96f82cda56763998be5326fae3569e [file] [log] [blame]
Chris Lattner9f617d62006-10-29 22:08:03 +00001//===--- Allocator.cpp - Simple memory allocation abstraction -------------===//
2//
3// The LLVM Compiler Infrastructure
4//
Chris Lattner4ee451d2007-12-29 20:36:04 +00005// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
Chris Lattner9f617d62006-10-29 22:08:03 +00007//
8//===----------------------------------------------------------------------===//
9//
10// This file implements the BumpPtrAllocator interface.
11//
12//===----------------------------------------------------------------------===//
13
14#include "llvm/Support/Allocator.h"
Michael J. Spencer1f6efa32010-11-29 18:16:10 +000015#include "llvm/Support/DataTypes.h"
Reid Kleckner8f51a622009-07-23 18:34:13 +000016#include "llvm/Support/Recycler.h"
Daniel Dunbar7da95592009-07-24 04:01:01 +000017#include "llvm/Support/raw_ostream.h"
Michael J. Spencer1f6efa32010-11-29 18:16:10 +000018#include "llvm/Support/Memory.h"
Reid Kleckner8f51a622009-07-23 18:34:13 +000019#include <cstring>
Chris Lattner9f617d62006-10-29 22:08:03 +000020
Reid Kleckner8f51a622009-07-23 18:34:13 +000021namespace llvm {
Chris Lattner9f617d62006-10-29 22:08:03 +000022
Reid Kleckner8f51a622009-07-23 18:34:13 +000023BumpPtrAllocator::BumpPtrAllocator(size_t size, size_t threshold,
24 SlabAllocator &allocator)
25 : SlabSize(size), SizeThreshold(threshold), Allocator(allocator),
Benjamin Kramer5e6a7052010-04-13 14:41:51 +000026 CurSlab(0), BytesAllocated(0) { }
Chris Lattner9f617d62006-10-29 22:08:03 +000027
28BumpPtrAllocator::~BumpPtrAllocator() {
Reid Kleckner8f51a622009-07-23 18:34:13 +000029 DeallocateSlabs(CurSlab);
Chris Lattner9f617d62006-10-29 22:08:03 +000030}
31
Reid Kleckner8f51a622009-07-23 18:34:13 +000032/// AlignPtr - Align Ptr to Alignment bytes, rounding up. Alignment should
33/// be a power of two. This method rounds up, so AlignPtr(7, 4) == 8 and
34/// AlignPtr(8, 4) == 8.
35char *BumpPtrAllocator::AlignPtr(char *Ptr, size_t Alignment) {
36 assert(Alignment && (Alignment & (Alignment - 1)) == 0 &&
37 "Alignment is not a power of two!");
38
39 // Do the alignment.
40 return (char*)(((uintptr_t)Ptr + Alignment - 1) &
41 ~(uintptr_t)(Alignment - 1));
42}
43
44/// StartNewSlab - Allocate a new slab and move the bump pointers over into
45/// the new slab. Modifies CurPtr and End.
46void BumpPtrAllocator::StartNewSlab() {
Benjamin Kramer82d96cc2010-09-30 16:18:28 +000047 // If we allocated a big number of slabs already it's likely that we're going
48 // to allocate more. Increase slab size to reduce mallocs and possibly memory
49 // overhead. The factors are chosen conservatively to avoid overallocation.
50 if (BytesAllocated >= SlabSize * 128)
51 SlabSize *= 2;
52
Reid Kleckner8f51a622009-07-23 18:34:13 +000053 MemSlab *NewSlab = Allocator.Allocate(SlabSize);
54 NewSlab->NextPtr = CurSlab;
55 CurSlab = NewSlab;
56 CurPtr = (char*)(CurSlab + 1);
57 End = ((char*)CurSlab) + CurSlab->Size;
58}
59
60/// DeallocateSlabs - Deallocate all memory slabs after and including this
61/// one.
62void BumpPtrAllocator::DeallocateSlabs(MemSlab *Slab) {
63 while (Slab) {
64 MemSlab *NextSlab = Slab->NextPtr;
65#ifndef NDEBUG
66 // Poison the memory so stale pointers crash sooner. Note we must
67 // preserve the Size and NextPtr fields at the beginning.
Evan Chengc48edbb2009-09-09 01:45:24 +000068 sys::Memory::setRangeWritable(Slab + 1, Slab->Size - sizeof(MemSlab));
Reid Kleckner8f51a622009-07-23 18:34:13 +000069 memset(Slab + 1, 0xCD, Slab->Size - sizeof(MemSlab));
70#endif
71 Allocator.Deallocate(Slab);
72 Slab = NextSlab;
73 }
74}
75
76/// Reset - Deallocate all but the current slab and reset the current pointer
77/// to the beginning of it, freeing all memory allocated so far.
Evan Cheng188b5222007-09-05 21:41:34 +000078void BumpPtrAllocator::Reset() {
Benjamin Kramerb0322e62010-04-13 16:38:06 +000079 if (!CurSlab)
80 return;
Reid Kleckner8f51a622009-07-23 18:34:13 +000081 DeallocateSlabs(CurSlab->NextPtr);
82 CurSlab->NextPtr = 0;
83 CurPtr = (char*)(CurSlab + 1);
84 End = ((char*)CurSlab) + CurSlab->Size;
Evan Cheng188b5222007-09-05 21:41:34 +000085}
86
Reid Kleckner8f51a622009-07-23 18:34:13 +000087/// Allocate - Allocate space at the specified alignment.
88///
89void *BumpPtrAllocator::Allocate(size_t Size, size_t Alignment) {
Benjamin Kramer5e6a7052010-04-13 14:41:51 +000090 if (!CurSlab) // Start a new slab if we haven't allocated one already.
91 StartNewSlab();
92
Reid Kleckner8f51a622009-07-23 18:34:13 +000093 // Keep track of how many bytes we've allocated.
94 BytesAllocated += Size;
95
96 // 0-byte alignment means 1-byte alignment.
97 if (Alignment == 0) Alignment = 1;
98
99 // Allocate the aligned space, going forwards from CurPtr.
100 char *Ptr = AlignPtr(CurPtr, Alignment);
101
102 // Check if we can hold it.
103 if (Ptr + Size <= End) {
104 CurPtr = Ptr + Size;
105 return Ptr;
106 }
107
108 // If Size is really big, allocate a separate slab for it.
Reid Kleckner7d509132009-07-25 21:26:02 +0000109 size_t PaddedSize = Size + sizeof(MemSlab) + Alignment - 1;
110 if (PaddedSize > SizeThreshold) {
Reid Kleckner8f51a622009-07-23 18:34:13 +0000111 MemSlab *NewSlab = Allocator.Allocate(PaddedSize);
112
113 // Put the new slab after the current slab, since we are not allocating
114 // into it.
115 NewSlab->NextPtr = CurSlab->NextPtr;
116 CurSlab->NextPtr = NewSlab;
117
118 Ptr = AlignPtr((char*)(NewSlab + 1), Alignment);
119 assert((uintptr_t)Ptr + Size <= (uintptr_t)NewSlab + NewSlab->Size);
120 return Ptr;
121 }
122
123 // Otherwise, start a new slab and try again.
124 StartNewSlab();
125 Ptr = AlignPtr(CurPtr, Alignment);
126 CurPtr = Ptr + Size;
127 assert(CurPtr <= End && "Unable to allocate memory!");
Chris Lattnerd675b832007-02-23 22:31:24 +0000128 return Ptr;
Chris Lattner9f617d62006-10-29 22:08:03 +0000129}
130
Reid Kleckner8f51a622009-07-23 18:34:13 +0000131unsigned BumpPtrAllocator::GetNumSlabs() const {
132 unsigned NumSlabs = 0;
133 for (MemSlab *Slab = CurSlab; Slab != 0; Slab = Slab->NextPtr) {
134 ++NumSlabs;
135 }
136 return NumSlabs;
Reid Kleckner95eb3ad2009-07-23 00:30:41 +0000137}
138
Ted Kremenek1da29dd2011-04-18 22:44:46 +0000139size_t BumpPtrAllocator::getTotalMemory() const {
140 size_t TotalMemory = 0;
141 for (MemSlab *Slab = CurSlab; Slab != 0; Slab = Slab->NextPtr) {
142 TotalMemory += Slab->Size;
143 }
144 return TotalMemory;
145}
146
Reid Kleckner8f51a622009-07-23 18:34:13 +0000147void BumpPtrAllocator::PrintStats() const {
148 unsigned NumSlabs = 0;
149 size_t TotalMemory = 0;
150 for (MemSlab *Slab = CurSlab; Slab != 0; Slab = Slab->NextPtr) {
151 TotalMemory += Slab->Size;
152 ++NumSlabs;
153 }
154
Daniel Dunbar7da95592009-07-24 04:01:01 +0000155 errs() << "\nNumber of memory regions: " << NumSlabs << '\n'
156 << "Bytes used: " << BytesAllocated << '\n'
157 << "Bytes allocated: " << TotalMemory << '\n'
158 << "Bytes wasted: " << (TotalMemory - BytesAllocated)
159 << " (includes alignment, etc)\n";
Reid Kleckner8f51a622009-07-23 18:34:13 +0000160}
161
Bill Wendlingc5b7b192010-01-16 01:06:58 +0000162MallocSlabAllocator BumpPtrAllocator::DefaultSlabAllocator =
163 MallocSlabAllocator();
Reid Kleckner8f51a622009-07-23 18:34:13 +0000164
165SlabAllocator::~SlabAllocator() { }
166
167MallocSlabAllocator::~MallocSlabAllocator() { }
168
169MemSlab *MallocSlabAllocator::Allocate(size_t Size) {
170 MemSlab *Slab = (MemSlab*)Allocator.Allocate(Size, 0);
171 Slab->Size = Size;
172 Slab->NextPtr = 0;
173 return Slab;
174}
175
176void MallocSlabAllocator::Deallocate(MemSlab *Slab) {
177 Allocator.Deallocate(Slab);
178}
179
180void PrintRecyclerStats(size_t Size,
181 size_t Align,
182 size_t FreeListSize) {
Daniel Dunbar7da95592009-07-24 04:01:01 +0000183 errs() << "Recycler element size: " << Size << '\n'
184 << "Recycler element alignment: " << Align << '\n'
185 << "Number of elements free for recycling: " << FreeListSize << '\n';
Reid Kleckner8f51a622009-07-23 18:34:13 +0000186}
187
Dan Gohmane14d81d2008-07-07 22:58:06 +0000188}