blob: a80ad938a66b00d67826a488d5fca226f6a91cfe [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000017#include <algorithm>
Ian Rogers6f3dbba2014-10-14 17:41:57 -070018#include <iomanip>
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000019#include <numeric>
20
buzbee862a7602013-04-05 10:58:54 -070021#include "arena_allocator.h"
22#include "base/logging.h"
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070023#include "base/mutex.h"
Ian Rogers02ed4c02013-09-06 13:10:04 -070024#include "thread-inl.h"
Mathieu Chartier75165d02013-09-12 14:00:31 -070025#include <memcheck/memcheck.h>
buzbee862a7602013-04-05 10:58:54 -070026
27namespace art {
28
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070029// Memmap is a bit slower than malloc according to my measurements.
30static constexpr bool kUseMemMap = false;
31static constexpr bool kUseMemSet = true && kUseMemMap;
Mathieu Chartier75165d02013-09-12 14:00:31 -070032static constexpr size_t kValgrindRedZoneBytes = 8;
Mark Mendell45c11652013-12-11 12:27:35 -080033constexpr size_t Arena::kDefaultSize;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070034
Vladimir Markobd9e9db2014-03-07 19:41:05 +000035template <bool kCount>
Vladimir Marko8dea81c2014-06-06 14:50:36 +010036const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
buzbee862a7602013-04-05 10:58:54 -070037 "Misc ",
38 "BasicBlock ",
Vladimir Markoe39c54e2014-09-22 14:50:02 +010039 "BBList "
40 "BBPreds ",
41 "DfsPreOrd ",
42 "DfsPostOrd ",
43 "DomPostOrd ",
44 "TopoOrd ",
45 "Lowering ",
buzbee862a7602013-04-05 10:58:54 -070046 "LIR ",
Vladimir Marko8dea81c2014-06-06 14:50:36 +010047 "LIR masks ",
Vladimir Markoe39c54e2014-09-22 14:50:02 +010048 "SwitchTbl ",
49 "FillArray ",
50 "SlowPaths ",
buzbee862a7602013-04-05 10:58:54 -070051 "MIR ",
52 "DataFlow ",
53 "GrowList ",
54 "GrowBitMap ",
Vladimir Markoe39c54e2014-09-22 14:50:02 +010055 "SSA2Dalvik ",
buzbee862a7602013-04-05 10:58:54 -070056 "Dalvik2SSA ",
57 "DebugInfo ",
58 "Successor ",
59 "RegAlloc ",
60 "Data ",
61 "Preds ",
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000062 "STL ",
buzbee862a7602013-04-05 10:58:54 -070063};
64
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000065template <bool kCount>
66ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
67 : num_allocations_(0u) {
68 std::fill_n(alloc_stats_, arraysize(alloc_stats_), 0u);
69}
70
71template <bool kCount>
72void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
73 num_allocations_ = other.num_allocations_;
74 std::copy(other.alloc_stats_, other.alloc_stats_ + arraysize(alloc_stats_), alloc_stats_);
75}
76
77template <bool kCount>
78void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
79 alloc_stats_[kind] += bytes;
80 ++num_allocations_;
81}
82
83template <bool kCount>
84size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
85 return num_allocations_;
86}
87
88template <bool kCount>
89size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
90 const size_t init = 0u; // Initial value of the correct type.
91 return std::accumulate(alloc_stats_, alloc_stats_ + arraysize(alloc_stats_), init);
92}
93
94template <bool kCount>
95void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
96 ssize_t lost_bytes_adjustment) const {
97 size_t malloc_bytes = 0u;
98 size_t lost_bytes = 0u;
99 size_t num_arenas = 0u;
100 for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
101 malloc_bytes += arena->Size();
102 lost_bytes += arena->RemainingSpace();
103 ++num_arenas;
104 }
105 // The lost_bytes_adjustment is used to make up for the fact that the current arena
106 // may not have the bytes_allocated_ updated correctly.
107 lost_bytes += lost_bytes_adjustment;
108 const size_t bytes_allocated = BytesAllocated();
109 os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
110 << ", lost: " << lost_bytes << "\n";
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000111 size_t num_allocations = NumAllocations();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000112 if (num_allocations != 0) {
113 os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
114 << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
115 }
116 os << "===== Allocation by kind\n";
Andreas Gampe785d2f22014-11-03 22:57:30 -0800117 static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000118 for (int i = 0; i < kNumArenaAllocKinds; i++) {
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000119 os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000120 }
121}
122
123// Explicitly instantiate the used implementation.
124template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations>;
125
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700126Arena::Arena(size_t size)
127 : bytes_allocated_(0),
128 map_(nullptr),
129 next_(nullptr) {
130 if (kUseMemMap) {
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700131 std::string error_msg;
Ian Rogersef7d42f2014-01-06 12:55:46 -0800132 map_ = MemMap::MapAnonymous("dalvik-arena", NULL, size, PROT_READ | PROT_WRITE, false,
133 &error_msg);
Ian Rogers8d31bbd2013-10-13 10:44:14 -0700134 CHECK(map_ != nullptr) << error_msg;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700135 memory_ = map_->Begin();
136 size_ = map_->Size();
137 } else {
138 memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
139 size_ = size;
Ian Rogerse7a5b7d2013-04-18 20:09:02 -0700140 }
Ian Rogerse7a5b7d2013-04-18 20:09:02 -0700141}
142
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700143Arena::~Arena() {
144 if (kUseMemMap) {
145 delete map_;
146 } else {
147 free(reinterpret_cast<void*>(memory_));
148 }
buzbee862a7602013-04-05 10:58:54 -0700149}
150
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700151void Arena::Reset() {
152 if (bytes_allocated_) {
153 if (kUseMemSet || !kUseMemMap) {
154 memset(Begin(), 0, bytes_allocated_);
buzbeea5abf702013-04-12 14:39:29 -0700155 } else {
Ian Rogersc5f17732014-06-05 20:48:42 -0700156 map_->MadviseDontNeedAndZero();
buzbeea5abf702013-04-12 14:39:29 -0700157 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700158 bytes_allocated_ = 0;
buzbee862a7602013-04-05 10:58:54 -0700159 }
buzbee862a7602013-04-05 10:58:54 -0700160}
161
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700162ArenaPool::ArenaPool()
163 : lock_("Arena pool lock"),
164 free_arenas_(nullptr) {
165}
166
167ArenaPool::~ArenaPool() {
168 while (free_arenas_ != nullptr) {
169 auto* arena = free_arenas_;
170 free_arenas_ = free_arenas_->next_;
171 delete arena;
172 }
173}
174
175Arena* ArenaPool::AllocArena(size_t size) {
176 Thread* self = Thread::Current();
177 Arena* ret = nullptr;
178 {
179 MutexLock lock(self, lock_);
180 if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
181 ret = free_arenas_;
182 free_arenas_ = free_arenas_->next_;
183 }
184 }
185 if (ret == nullptr) {
186 ret = new Arena(size);
187 }
188 ret->Reset();
189 return ret;
190}
191
Mathieu Chartier49285c52014-12-02 15:43:48 -0800192size_t ArenaPool::GetBytesAllocated() const {
193 size_t total = 0;
194 MutexLock lock(Thread::Current(), lock_);
195 for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
196 total += arena->GetBytesAllocated();
197 }
198 return total;
199}
200
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000201void ArenaPool::FreeArenaChain(Arena* first) {
Mathieu Chartier661974a2014-01-09 11:23:53 -0800202 if (UNLIKELY(RUNNING_ON_VALGRIND > 0)) {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000203 for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
204 VALGRIND_MAKE_MEM_UNDEFINED(arena->memory_, arena->bytes_allocated_);
205 }
Mathieu Chartier75165d02013-09-12 14:00:31 -0700206 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000207 if (first != nullptr) {
208 Arena* last = first;
209 while (last->next_ != nullptr) {
210 last = last->next_;
211 }
212 Thread* self = Thread::Current();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700213 MutexLock lock(self, lock_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000214 last->next_ = free_arenas_;
215 free_arenas_ = first;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700216 }
217}
218
219size_t ArenaAllocator::BytesAllocated() const {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000220 return ArenaAllocatorStats::BytesAllocated();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700221}
222
223ArenaAllocator::ArenaAllocator(ArenaPool* pool)
224 : pool_(pool),
225 begin_(nullptr),
226 end_(nullptr),
227 ptr_(nullptr),
228 arena_head_(nullptr),
Mathieu Chartier661974a2014-01-09 11:23:53 -0800229 running_on_valgrind_(RUNNING_ON_VALGRIND > 0) {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700230}
231
232void ArenaAllocator::UpdateBytesAllocated() {
233 if (arena_head_ != nullptr) {
234 // Update how many bytes we have allocated into the arena so that the arena pool knows how
235 // much memory to zero out.
236 arena_head_->bytes_allocated_ = ptr_ - begin_;
237 }
238}
239
Mathieu Chartier75165d02013-09-12 14:00:31 -0700240void* ArenaAllocator::AllocValgrind(size_t bytes, ArenaAllocKind kind) {
Vladimir Marko22a0ef82014-06-10 14:47:51 +0100241 size_t rounded_bytes = RoundUp(bytes + kValgrindRedZoneBytes, 8);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700242 if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
243 // Obtain a new block.
244 ObtainNewArenaForAllocation(rounded_bytes);
245 if (UNLIKELY(ptr_ == nullptr)) {
246 return nullptr;
247 }
248 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000249 ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700250 uint8_t* ret = ptr_;
251 ptr_ += rounded_bytes;
252 // Check that the memory is already zeroed out.
253 for (uint8_t* ptr = ret; ptr < ptr_; ++ptr) {
254 CHECK_EQ(*ptr, 0U);
255 }
256 VALGRIND_MAKE_MEM_NOACCESS(ret + bytes, rounded_bytes - bytes);
257 return ret;
258}
259
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700260ArenaAllocator::~ArenaAllocator() {
261 // Reclaim all the arenas by giving them back to the thread pool.
262 UpdateBytesAllocated();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000263 pool_->FreeArenaChain(arena_head_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700264}
265
266void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
267 UpdateBytesAllocated();
268 Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
269 new_arena->next_ = arena_head_;
270 arena_head_ = new_arena;
271 // Update our internal data structures.
272 ptr_ = begin_ = new_arena->Begin();
273 end_ = new_arena->End();
274}
275
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000276MemStats::MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
277 ssize_t lost_bytes_adjustment)
278 : name_(name),
279 stats_(stats),
280 first_arena_(first_arena),
281 lost_bytes_adjustment_(lost_bytes_adjustment) {
282}
283
284void MemStats::Dump(std::ostream& os) const {
285 os << name_ << " stats:\n";
286 stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
287}
288
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700289// Dump memory usage stats.
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000290MemStats ArenaAllocator::GetMemStats() const {
291 ssize_t lost_bytes_adjustment =
292 (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
293 return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
buzbee862a7602013-04-05 10:58:54 -0700294}
295
296} // namespace art