blob: df3deba17887b9935b30789c6210bfa5322cb650 [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Andreas Gamped4901292017-05-30 18:41:34 -070017#include "arena_allocator-inl.h"
18
19#include <sys/mman.h>
20
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000021#include <algorithm>
Vladimir Markof44d36c2017-03-14 14:18:46 +000022#include <cstddef>
Ian Rogers6f3dbba2014-10-14 17:41:57 -070023#include <iomanip>
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000024#include <numeric>
25
Andreas Gampe57943812017-12-06 21:39:13 -080026#include <android-base/logging.h>
27
buzbee862a7602013-04-05 10:58:54 -070028namespace art {
29
Vladimir Markof44d36c2017-03-14 14:18:46 +000030constexpr size_t kMemoryToolRedZoneBytes = 8;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070031
Vladimir Markobd9e9db2014-03-07 19:41:05 +000032template <bool kCount>
Vladimir Marko8dea81c2014-06-06 14:50:36 +010033const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
Igor Murashkind01745e2017-04-05 16:40:31 -070034 // Every name should have the same width and end with a space. Abbreviate if necessary:
Vladimir Markof9f64412015-09-02 14:05:49 +010035 "Misc ",
Vladimir Markof9f64412015-09-02 14:05:49 +010036 "SwitchTbl ",
Vladimir Markof9f64412015-09-02 14:05:49 +010037 "SlowPaths ",
Vladimir Markof9f64412015-09-02 14:05:49 +010038 "GrowBitMap ",
Vladimir Markof9f64412015-09-02 14:05:49 +010039 "STL ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010040 "GraphBuilder ",
Vladimir Markof9f64412015-09-02 14:05:49 +010041 "Graph ",
42 "BasicBlock ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010043 "BlockList ",
44 "RevPostOrder ",
45 "LinearOrder ",
46 "ConstantsMap ",
Vladimir Marko60584552015-09-03 13:35:12 +000047 "Predecessors ",
48 "Successors ",
49 "Dominated ",
Vladimir Markof9f64412015-09-02 14:05:49 +010050 "Instruction ",
Igor Murashkind01745e2017-04-05 16:40:31 -070051 "CtorFenceIns ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010052 "InvokeInputs ",
53 "PhiInputs ",
Vladimir Marko175e7862018-03-27 09:03:13 +000054 "TypeCheckIns ",
Vladimir Markof9f64412015-09-02 14:05:49 +010055 "LoopInfo ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010056 "LIBackEdges ",
Vladimir Markof9f64412015-09-02 14:05:49 +010057 "TryCatchInf ",
58 "UseListNode ",
59 "Environment ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010060 "EnvVRegs ",
61 "EnvLocations ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010062 "LocSummary ",
Vladimir Marko71bf8092015-09-15 15:33:14 +010063 "SsaBuilder ",
Vladimir Markof9f64412015-09-02 14:05:49 +010064 "MoveOperands ",
65 "CodeBuffer ",
66 "StackMaps ",
Vladimir Markof9f64412015-09-02 14:05:49 +010067 "Optimization ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010068 "GVN ",
Vladimir Marko5233f932015-09-29 19:01:15 +010069 "InductionVar ",
70 "BCE ",
Vladimir Markof6a35de2016-03-21 12:01:50 +000071 "DCE ",
Vladimir Marko009d1662017-10-10 13:21:15 +010072 "LSA ",
Vladimir Markof6a35de2016-03-21 12:01:50 +000073 "LSE ",
Igor Murashkindd018df2017-08-09 10:38:31 -070074 "CFRE ",
Vladimir Markof6a35de2016-03-21 12:01:50 +000075 "LICM ",
Aart Bik96202302016-10-04 17:33:56 -070076 "LoopOpt ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010077 "SsaLiveness ",
78 "SsaPhiElim ",
79 "RefTypeProp ",
Aart Bik6d057002018-04-09 15:39:58 -070080 "SelectGen ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010081 "SideEffects ",
82 "RegAllocator ",
Vladimir Markof6a35de2016-03-21 12:01:50 +000083 "RegAllocVldt ",
Vladimir Marko225b6462015-09-28 12:17:40 +010084 "StackMapStm ",
David Srbeckydd966bc2018-05-24 13:55:52 +010085 "BitTableBld ",
Vladimir Markod38ba0a2017-04-13 13:43:43 +010086 "VectorNode ",
Vladimir Marko225b6462015-09-28 12:17:40 +010087 "CodeGen ",
Vladimir Marko93205e32016-04-13 11:59:46 +010088 "Assembler ",
Vladimir Marko225b6462015-09-28 12:17:40 +010089 "ParallelMove ",
Vladimir Marko655e5852015-10-12 10:38:28 +010090 "GraphChecker ",
Mathieu Chartierde40d472015-10-15 17:47:48 -070091 "Verifier ",
Vladimir Marko93205e32016-04-13 11:59:46 +010092 "CallingConv ",
Vladimir Marko4e335d02016-12-19 16:04:33 +000093 "CHA ",
Alexandre Rames22aa54b2016-10-18 09:32:29 +010094 "Scheduler ",
Calin Juravlecc3171a2017-05-19 16:47:53 -070095 "Profile ",
Artem Serov7f4aff62017-06-21 17:02:18 +010096 "SBCloner ",
buzbee862a7602013-04-05 10:58:54 -070097};
98
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000099template <bool kCount>
100ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
Vladimir Marko4e335d02016-12-19 16:04:33 +0000101 : num_allocations_(0u),
102 alloc_stats_(kNumArenaAllocKinds, 0u) {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000103}
104
105template <bool kCount>
106void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
107 num_allocations_ = other.num_allocations_;
Vladimir Marko4e335d02016-12-19 16:04:33 +0000108 std::copy_n(other.alloc_stats_.begin(), kNumArenaAllocKinds, alloc_stats_.begin());
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000109}
110
111template <bool kCount>
112void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
113 alloc_stats_[kind] += bytes;
114 ++num_allocations_;
115}
116
117template <bool kCount>
118size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
119 return num_allocations_;
120}
121
122template <bool kCount>
123size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
124 const size_t init = 0u; // Initial value of the correct type.
Vladimir Marko4e335d02016-12-19 16:04:33 +0000125 return std::accumulate(alloc_stats_.begin(), alloc_stats_.end(), init);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000126}
127
128template <bool kCount>
129void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
130 ssize_t lost_bytes_adjustment) const {
131 size_t malloc_bytes = 0u;
132 size_t lost_bytes = 0u;
133 size_t num_arenas = 0u;
134 for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
135 malloc_bytes += arena->Size();
136 lost_bytes += arena->RemainingSpace();
137 ++num_arenas;
138 }
139 // The lost_bytes_adjustment is used to make up for the fact that the current arena
140 // may not have the bytes_allocated_ updated correctly.
141 lost_bytes += lost_bytes_adjustment;
142 const size_t bytes_allocated = BytesAllocated();
143 os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
144 << ", lost: " << lost_bytes << "\n";
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000145 size_t num_allocations = NumAllocations();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000146 if (num_allocations != 0) {
147 os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
148 << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
149 }
150 os << "===== Allocation by kind\n";
Andreas Gampe785d2f22014-11-03 22:57:30 -0800151 static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000152 for (int i = 0; i < kNumArenaAllocKinds; i++) {
Vladimir Markoca6fff82017-10-03 14:49:14 +0100153 // Reduce output by listing only allocation kinds that actually have allocations.
154 if (alloc_stats_[i] != 0u) {
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000155 os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
Vladimir Markoca6fff82017-10-03 14:49:14 +0100156 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000157 }
158}
159
Stephen Hines47b22472017-01-26 00:50:31 -0800160#pragma GCC diagnostic push
Jayant Chowdhary5f9da552017-02-02 12:48:10 -0800161#if __clang_major__ >= 4
Stephen Hines47b22472017-01-26 00:50:31 -0800162#pragma GCC diagnostic ignored "-Winstantiation-after-specialization"
Jayant Chowdhary5f9da552017-02-02 12:48:10 -0800163#endif
Vladimir Markod38ba0a2017-04-13 13:43:43 +0100164// We're going to use ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> which needs
165// to be explicitly instantiated if kArenaAllocatorCountAllocations is true. Explicit
166// instantiation of the specialization ArenaAllocatorStatsImpl<false> does not do anything
167// but requires the warning "-Winstantiation-after-specialization" to be turned off.
168//
169// To avoid bit-rot of the ArenaAllocatorStatsImpl<true>, instantiate it also in debug builds
170// (but keep the unnecessary code out of release builds) as we do not usually compile with
171// kArenaAllocatorCountAllocations set to true.
172template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations || kIsDebugBuild>;
Stephen Hines47b22472017-01-26 00:50:31 -0800173#pragma GCC diagnostic pop
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000174
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000175void ArenaAllocatorMemoryTool::DoMakeDefined(void* ptr, size_t size) {
176 MEMORY_TOOL_MAKE_DEFINED(ptr, size);
177}
178
179void ArenaAllocatorMemoryTool::DoMakeUndefined(void* ptr, size_t size) {
180 MEMORY_TOOL_MAKE_UNDEFINED(ptr, size);
181}
182
183void ArenaAllocatorMemoryTool::DoMakeInaccessible(void* ptr, size_t size) {
184 MEMORY_TOOL_MAKE_NOACCESS(ptr, size);
185}
186
Andreas Gamped9911ee2017-03-27 13:27:24 -0700187Arena::Arena() : bytes_allocated_(0), memory_(nullptr), size_(0), next_(nullptr) {
Ian Rogerse7a5b7d2013-04-18 20:09:02 -0700188}
189
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700190size_t ArenaAllocator::BytesAllocated() const {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000191 return ArenaAllocatorStats::BytesAllocated();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700192}
193
Mathieu Chartierc7853442015-03-27 14:35:38 -0700194size_t ArenaAllocator::BytesUsed() const {
195 size_t total = ptr_ - begin_;
196 if (arena_head_ != nullptr) {
197 for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
198 cur_arena = cur_arena->next_) {
199 total += cur_arena->GetBytesAllocated();
200 }
201 }
202 return total;
203}
204
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700205ArenaAllocator::ArenaAllocator(ArenaPool* pool)
206 : pool_(pool),
207 begin_(nullptr),
208 end_(nullptr),
209 ptr_(nullptr),
Vladimir Marko2a408a32015-09-18 14:11:00 +0100210 arena_head_(nullptr) {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700211}
212
213void ArenaAllocator::UpdateBytesAllocated() {
214 if (arena_head_ != nullptr) {
215 // Update how many bytes we have allocated into the arena so that the arena pool knows how
216 // much memory to zero out.
217 arena_head_->bytes_allocated_ = ptr_ - begin_;
218 }
219}
220
Vladimir Marko2a408a32015-09-18 14:11:00 +0100221void* ArenaAllocator::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
Vladimir Marko75001932015-11-10 20:54:22 +0000222 // We mark all memory for a newly retrieved arena as inaccessible and then
223 // mark only the actually allocated memory as defined. That leaves red zones
224 // and padding between allocations marked as inaccessible.
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700225 size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000226 ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
Vladimir Marko3f84f2c2016-04-25 19:40:34 +0100227 uint8_t* ret;
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100228 if (UNLIKELY(rounded_bytes > static_cast<size_t>(end_ - ptr_))) {
Vladimir Markof44d36c2017-03-14 14:18:46 +0000229 ret = AllocFromNewArenaWithMemoryTool(rounded_bytes);
Vladimir Marko3f84f2c2016-04-25 19:40:34 +0100230 } else {
231 ret = ptr_;
232 ptr_ += rounded_bytes;
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100233 }
Vladimir Marko2a408a32015-09-18 14:11:00 +0100234 MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
Vladimir Marko75001932015-11-10 20:54:22 +0000235 // Check that the memory is already zeroed out.
236 DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; }));
Mathieu Chartier75165d02013-09-12 14:00:31 -0700237 return ret;
238}
239
Vladimir Markof44d36c2017-03-14 14:18:46 +0000240void* ArenaAllocator::AllocWithMemoryToolAlign16(size_t bytes, ArenaAllocKind kind) {
241 // We mark all memory for a newly retrieved arena as inaccessible and then
242 // mark only the actually allocated memory as defined. That leaves red zones
243 // and padding between allocations marked as inaccessible.
244 size_t rounded_bytes = bytes + kMemoryToolRedZoneBytes;
245 DCHECK_ALIGNED(rounded_bytes, 8); // `bytes` is 16-byte aligned, red zone is 8-byte aligned.
246 uintptr_t padding =
247 ((reinterpret_cast<uintptr_t>(ptr_) + 15u) & 15u) - reinterpret_cast<uintptr_t>(ptr_);
248 ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
249 uint8_t* ret;
250 if (UNLIKELY(padding + rounded_bytes > static_cast<size_t>(end_ - ptr_))) {
251 static_assert(kArenaAlignment >= 16, "Expecting sufficient alignment for new Arena.");
252 ret = AllocFromNewArenaWithMemoryTool(rounded_bytes);
253 } else {
254 ptr_ += padding; // Leave padding inaccessible.
255 ret = ptr_;
256 ptr_ += rounded_bytes;
257 }
258 MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
259 // Check that the memory is already zeroed out.
260 DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; }));
261 return ret;
262}
263
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700264ArenaAllocator::~ArenaAllocator() {
265 // Reclaim all the arenas by giving them back to the thread pool.
266 UpdateBytesAllocated();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000267 pool_->FreeArenaChain(arena_head_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700268}
269
Vladimir Marko3f84f2c2016-04-25 19:40:34 +0100270uint8_t* ArenaAllocator::AllocFromNewArena(size_t bytes) {
Andreas Gampe121f1482017-05-12 10:28:35 -0700271 Arena* new_arena = pool_->AllocArena(std::max(arena_allocator::kArenaDefaultSize, bytes));
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100272 DCHECK(new_arena != nullptr);
273 DCHECK_LE(bytes, new_arena->Size());
274 if (static_cast<size_t>(end_ - ptr_) > new_arena->Size() - bytes) {
275 // The old arena has more space remaining than the new one, so keep using it.
276 // This can happen when the requested size is over half of the default size.
277 DCHECK(arena_head_ != nullptr);
278 new_arena->bytes_allocated_ = bytes; // UpdateBytesAllocated() on the new_arena.
279 new_arena->next_ = arena_head_->next_;
280 arena_head_->next_ = new_arena;
281 } else {
282 UpdateBytesAllocated();
283 new_arena->next_ = arena_head_;
284 arena_head_ = new_arena;
285 // Update our internal data structures.
286 begin_ = new_arena->Begin();
Andreas Gampef6dd8292016-08-19 20:22:19 -0700287 DCHECK_ALIGNED(begin_, kAlignment);
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100288 ptr_ = begin_ + bytes;
289 end_ = new_arena->End();
290 }
291 return new_arena->Begin();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700292}
293
Vladimir Markof44d36c2017-03-14 14:18:46 +0000294uint8_t* ArenaAllocator::AllocFromNewArenaWithMemoryTool(size_t bytes) {
295 uint8_t* ret = AllocFromNewArena(bytes);
296 uint8_t* noaccess_begin = ret + bytes;
297 uint8_t* noaccess_end;
298 if (ret == arena_head_->Begin()) {
299 DCHECK(ptr_ - bytes == ret);
300 noaccess_end = end_;
301 } else {
302 // We're still using the old arena but `ret` comes from a new one just after it.
303 DCHECK(arena_head_->next_ != nullptr);
304 DCHECK(ret == arena_head_->next_->Begin());
305 DCHECK_EQ(bytes, arena_head_->next_->GetBytesAllocated());
306 noaccess_end = arena_head_->next_->End();
307 }
308 MEMORY_TOOL_MAKE_NOACCESS(noaccess_begin, noaccess_end - noaccess_begin);
309 return ret;
310}
311
Mathieu Chartiere401d142015-04-22 13:56:20 -0700312bool ArenaAllocator::Contains(const void* ptr) const {
313 if (ptr >= begin_ && ptr < end_) {
314 return true;
315 }
316 for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
317 if (cur_arena->Contains(ptr)) {
318 return true;
319 }
320 }
321 return false;
322}
323
Vladimir Markof44d36c2017-03-14 14:18:46 +0000324MemStats::MemStats(const char* name,
325 const ArenaAllocatorStats* stats,
326 const Arena* first_arena,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000327 ssize_t lost_bytes_adjustment)
328 : name_(name),
329 stats_(stats),
330 first_arena_(first_arena),
331 lost_bytes_adjustment_(lost_bytes_adjustment) {
332}
333
334void MemStats::Dump(std::ostream& os) const {
335 os << name_ << " stats:\n";
336 stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
337}
338
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700339// Dump memory usage stats.
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000340MemStats ArenaAllocator::GetMemStats() const {
341 ssize_t lost_bytes_adjustment =
342 (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
343 return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
buzbee862a7602013-04-05 10:58:54 -0700344}
345
346} // namespace art