blob: 2da806437c759030db86ff62f8ead251237e342d [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "compiler_internals.h"
18#include "dex_file-inl.h"
19#include "arena_allocator.h"
20#include "base/logging.h"
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070021#include "base/mutex.h"
Ian Rogers02ed4c02013-09-06 13:10:04 -070022#include "thread-inl.h"
Mathieu Chartier75165d02013-09-12 14:00:31 -070023#include <memcheck/memcheck.h>
buzbee862a7602013-04-05 10:58:54 -070024
25namespace art {
26
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070027// Memmap is a bit slower than malloc according to my measurements.
28static constexpr bool kUseMemMap = false;
29static constexpr bool kUseMemSet = true && kUseMemMap;
Mathieu Chartier75165d02013-09-12 14:00:31 -070030static constexpr size_t kValgrindRedZoneBytes = 8;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070031
buzbee862a7602013-04-05 10:58:54 -070032static const char* alloc_names[ArenaAllocator::kNumAllocKinds] = {
33 "Misc ",
34 "BasicBlock ",
35 "LIR ",
36 "MIR ",
37 "DataFlow ",
38 "GrowList ",
39 "GrowBitMap ",
40 "Dalvik2SSA ",
41 "DebugInfo ",
42 "Successor ",
43 "RegAlloc ",
44 "Data ",
45 "Preds ",
46};
47
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070048Arena::Arena(size_t size)
49 : bytes_allocated_(0),
50 map_(nullptr),
51 next_(nullptr) {
52 if (kUseMemMap) {
53 map_ = MemMap::MapAnonymous("dalvik-arena", NULL, size, PROT_READ | PROT_WRITE);
54 memory_ = map_->Begin();
55 size_ = map_->Size();
56 } else {
57 memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
58 size_ = size;
Ian Rogerse7a5b7d2013-04-18 20:09:02 -070059 }
Ian Rogerse7a5b7d2013-04-18 20:09:02 -070060}
61
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070062Arena::~Arena() {
63 if (kUseMemMap) {
64 delete map_;
65 } else {
66 free(reinterpret_cast<void*>(memory_));
67 }
buzbee862a7602013-04-05 10:58:54 -070068}
69
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070070void Arena::Reset() {
71 if (bytes_allocated_) {
72 if (kUseMemSet || !kUseMemMap) {
73 memset(Begin(), 0, bytes_allocated_);
buzbeea5abf702013-04-12 14:39:29 -070074 } else {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070075 madvise(Begin(), bytes_allocated_, MADV_DONTNEED);
buzbeea5abf702013-04-12 14:39:29 -070076 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070077 bytes_allocated_ = 0;
buzbee862a7602013-04-05 10:58:54 -070078 }
buzbee862a7602013-04-05 10:58:54 -070079}
80
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070081ArenaPool::ArenaPool()
82 : lock_("Arena pool lock"),
83 free_arenas_(nullptr) {
84}
85
86ArenaPool::~ArenaPool() {
87 while (free_arenas_ != nullptr) {
88 auto* arena = free_arenas_;
89 free_arenas_ = free_arenas_->next_;
90 delete arena;
91 }
92}
93
94Arena* ArenaPool::AllocArena(size_t size) {
95 Thread* self = Thread::Current();
96 Arena* ret = nullptr;
97 {
98 MutexLock lock(self, lock_);
99 if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
100 ret = free_arenas_;
101 free_arenas_ = free_arenas_->next_;
102 }
103 }
104 if (ret == nullptr) {
105 ret = new Arena(size);
106 }
107 ret->Reset();
108 return ret;
109}
110
111void ArenaPool::FreeArena(Arena* arena) {
112 Thread* self = Thread::Current();
Mathieu Chartier75165d02013-09-12 14:00:31 -0700113 if (UNLIKELY(RUNNING_ON_VALGRIND)) {
114 VALGRIND_MAKE_MEM_UNDEFINED(arena->memory_, arena->bytes_allocated_);
115 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700116 {
117 MutexLock lock(self, lock_);
118 arena->next_ = free_arenas_;
119 free_arenas_ = arena;
120 }
121}
122
123size_t ArenaAllocator::BytesAllocated() const {
buzbee862a7602013-04-05 10:58:54 -0700124 size_t total = 0;
125 for (int i = 0; i < kNumAllocKinds; i++) {
126 total += alloc_stats_[i];
127 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700128 return total;
129}
130
131ArenaAllocator::ArenaAllocator(ArenaPool* pool)
132 : pool_(pool),
133 begin_(nullptr),
134 end_(nullptr),
135 ptr_(nullptr),
136 arena_head_(nullptr),
Mathieu Chartier75165d02013-09-12 14:00:31 -0700137 num_allocations_(0),
138 running_on_valgrind_(RUNNING_ON_VALGRIND) {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700139 memset(&alloc_stats_[0], 0, sizeof(alloc_stats_));
140}
141
142void ArenaAllocator::UpdateBytesAllocated() {
143 if (arena_head_ != nullptr) {
144 // Update how many bytes we have allocated into the arena so that the arena pool knows how
145 // much memory to zero out.
146 arena_head_->bytes_allocated_ = ptr_ - begin_;
147 }
148}
149
Mathieu Chartier75165d02013-09-12 14:00:31 -0700150void* ArenaAllocator::AllocValgrind(size_t bytes, ArenaAllocKind kind) {
151 size_t rounded_bytes = (bytes + 3 + kValgrindRedZoneBytes) & ~3;
152 if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
153 // Obtain a new block.
154 ObtainNewArenaForAllocation(rounded_bytes);
155 if (UNLIKELY(ptr_ == nullptr)) {
156 return nullptr;
157 }
158 }
159 if (kCountAllocations) {
160 alloc_stats_[kind] += rounded_bytes;
161 ++num_allocations_;
162 }
163 uint8_t* ret = ptr_;
164 ptr_ += rounded_bytes;
165 // Check that the memory is already zeroed out.
166 for (uint8_t* ptr = ret; ptr < ptr_; ++ptr) {
167 CHECK_EQ(*ptr, 0U);
168 }
169 VALGRIND_MAKE_MEM_NOACCESS(ret + bytes, rounded_bytes - bytes);
170 return ret;
171}
172
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700173ArenaAllocator::~ArenaAllocator() {
174 // Reclaim all the arenas by giving them back to the thread pool.
175 UpdateBytesAllocated();
176 while (arena_head_ != nullptr) {
177 Arena* arena = arena_head_;
178 arena_head_ = arena_head_->next_;
179 pool_->FreeArena(arena);
180 }
181}
182
183void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
184 UpdateBytesAllocated();
185 Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
186 new_arena->next_ = arena_head_;
187 arena_head_ = new_arena;
188 // Update our internal data structures.
189 ptr_ = begin_ = new_arena->Begin();
190 end_ = new_arena->End();
191}
192
193// Dump memory usage stats.
194void ArenaAllocator::DumpMemStats(std::ostream& os) const {
195 size_t malloc_bytes = 0;
196 // Start out with how many lost bytes we have in the arena we are currently allocating into.
197 size_t lost_bytes(end_ - ptr_);
198 size_t num_arenas = 0;
199 for (Arena* arena = arena_head_; arena != nullptr; arena = arena->next_) {
200 malloc_bytes += arena->Size();
201 if (arena != arena_head_) {
202 lost_bytes += arena->RemainingSpace();
203 }
204 ++num_arenas;
205 }
206 const size_t bytes_allocated = BytesAllocated();
207 os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
208 << ", lost: " << lost_bytes << "\n";
209 if (num_allocations_ != 0) {
210 os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
211 << num_allocations_ << ", avg size: " << bytes_allocated / num_allocations_ << "\n";
212 }
buzbee862a7602013-04-05 10:58:54 -0700213 os << "===== Allocation by kind\n";
214 for (int i = 0; i < kNumAllocKinds; i++) {
215 os << alloc_names[i] << std::setw(10) << alloc_stats_[i] << "\n";
216 }
217}
218
219} // namespace art