blob: 8738adfadac40b41e7a50892eed3cc7a0839d038 [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Andreas Gamped4901292017-05-30 18:41:34 -070017#include "arena_allocator-inl.h"
18
19#include <sys/mman.h>
20
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000021#include <algorithm>
Vladimir Markof44d36c2017-03-14 14:18:46 +000022#include <cstddef>
Ian Rogers6f3dbba2014-10-14 17:41:57 -070023#include <iomanip>
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000024#include <numeric>
25
Mathieu Chartierb666f482015-02-18 14:33:14 -080026#include "logging.h"
Vladimir Marko3481ba22015-04-13 12:22:36 +010027#include "mem_map.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080028#include "mutex.h"
Mathieu Chartier32ce2ad2016-03-04 14:58:03 -080029#include "systrace.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070030#include "thread-current-inl.h"
buzbee862a7602013-04-05 10:58:54 -070031
32namespace art {
33
Vladimir Markof44d36c2017-03-14 14:18:46 +000034constexpr size_t kMemoryToolRedZoneBytes = 8;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070035
Vladimir Markobd9e9db2014-03-07 19:41:05 +000036template <bool kCount>
Vladimir Marko8dea81c2014-06-06 14:50:36 +010037const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = {
Igor Murashkind01745e2017-04-05 16:40:31 -070038 // Every name should have the same width and end with a space. Abbreviate if necessary:
Vladimir Markof9f64412015-09-02 14:05:49 +010039 "Misc ",
Vladimir Markof9f64412015-09-02 14:05:49 +010040 "SwitchTbl ",
Vladimir Markof9f64412015-09-02 14:05:49 +010041 "SlowPaths ",
Vladimir Markof9f64412015-09-02 14:05:49 +010042 "GrowBitMap ",
Vladimir Markof9f64412015-09-02 14:05:49 +010043 "STL ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010044 "GraphBuilder ",
Vladimir Markof9f64412015-09-02 14:05:49 +010045 "Graph ",
46 "BasicBlock ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010047 "BlockList ",
48 "RevPostOrder ",
49 "LinearOrder ",
50 "ConstantsMap ",
Vladimir Marko60584552015-09-03 13:35:12 +000051 "Predecessors ",
52 "Successors ",
53 "Dominated ",
Vladimir Markof9f64412015-09-02 14:05:49 +010054 "Instruction ",
Igor Murashkind01745e2017-04-05 16:40:31 -070055 "CtorFenceIns ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010056 "InvokeInputs ",
57 "PhiInputs ",
Vladimir Markof9f64412015-09-02 14:05:49 +010058 "LoopInfo ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010059 "LIBackEdges ",
Vladimir Markof9f64412015-09-02 14:05:49 +010060 "TryCatchInf ",
61 "UseListNode ",
62 "Environment ",
Vladimir Markofa6b93c2015-09-15 10:15:55 +010063 "EnvVRegs ",
64 "EnvLocations ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010065 "LocSummary ",
Vladimir Marko71bf8092015-09-15 15:33:14 +010066 "SsaBuilder ",
Vladimir Markof9f64412015-09-02 14:05:49 +010067 "MoveOperands ",
68 "CodeBuffer ",
69 "StackMaps ",
Vladimir Markof9f64412015-09-02 14:05:49 +010070 "Optimization ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010071 "GVN ",
Vladimir Marko5233f932015-09-29 19:01:15 +010072 "InductionVar ",
73 "BCE ",
Vladimir Markof6a35de2016-03-21 12:01:50 +000074 "DCE ",
75 "LSE ",
Igor Murashkindd018df2017-08-09 10:38:31 -070076 "CFRE ",
Vladimir Markof6a35de2016-03-21 12:01:50 +000077 "LICM ",
Aart Bik96202302016-10-04 17:33:56 -070078 "LoopOpt ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010079 "SsaLiveness ",
80 "SsaPhiElim ",
81 "RefTypeProp ",
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010082 "SideEffects ",
83 "RegAllocator ",
Vladimir Markof6a35de2016-03-21 12:01:50 +000084 "RegAllocVldt ",
Vladimir Marko225b6462015-09-28 12:17:40 +010085 "StackMapStm ",
Vladimir Markod38ba0a2017-04-13 13:43:43 +010086 "VectorNode ",
Vladimir Marko225b6462015-09-28 12:17:40 +010087 "CodeGen ",
Vladimir Marko93205e32016-04-13 11:59:46 +010088 "Assembler ",
Vladimir Marko225b6462015-09-28 12:17:40 +010089 "ParallelMove ",
Vladimir Marko655e5852015-10-12 10:38:28 +010090 "GraphChecker ",
Mathieu Chartierde40d472015-10-15 17:47:48 -070091 "Verifier ",
Vladimir Marko93205e32016-04-13 11:59:46 +010092 "CallingConv ",
Vladimir Marko4e335d02016-12-19 16:04:33 +000093 "CHA ",
Alexandre Rames22aa54b2016-10-18 09:32:29 +010094 "Scheduler ",
Calin Juravlecc3171a2017-05-19 16:47:53 -070095 "Profile ",
buzbee862a7602013-04-05 10:58:54 -070096};
97
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000098template <bool kCount>
99ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl()
Vladimir Marko4e335d02016-12-19 16:04:33 +0000100 : num_allocations_(0u),
101 alloc_stats_(kNumArenaAllocKinds, 0u) {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000102}
103
104template <bool kCount>
105void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) {
106 num_allocations_ = other.num_allocations_;
Vladimir Marko4e335d02016-12-19 16:04:33 +0000107 std::copy_n(other.alloc_stats_.begin(), kNumArenaAllocKinds, alloc_stats_.begin());
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000108}
109
110template <bool kCount>
111void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) {
112 alloc_stats_[kind] += bytes;
113 ++num_allocations_;
114}
115
116template <bool kCount>
117size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const {
118 return num_allocations_;
119}
120
121template <bool kCount>
122size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const {
123 const size_t init = 0u; // Initial value of the correct type.
Vladimir Marko4e335d02016-12-19 16:04:33 +0000124 return std::accumulate(alloc_stats_.begin(), alloc_stats_.end(), init);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000125}
126
127template <bool kCount>
128void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first,
129 ssize_t lost_bytes_adjustment) const {
130 size_t malloc_bytes = 0u;
131 size_t lost_bytes = 0u;
132 size_t num_arenas = 0u;
133 for (const Arena* arena = first; arena != nullptr; arena = arena->next_) {
134 malloc_bytes += arena->Size();
135 lost_bytes += arena->RemainingSpace();
136 ++num_arenas;
137 }
138 // The lost_bytes_adjustment is used to make up for the fact that the current arena
139 // may not have the bytes_allocated_ updated correctly.
140 lost_bytes += lost_bytes_adjustment;
141 const size_t bytes_allocated = BytesAllocated();
142 os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes
143 << ", lost: " << lost_bytes << "\n";
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000144 size_t num_allocations = NumAllocations();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000145 if (num_allocations != 0) {
146 os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: "
147 << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n";
148 }
149 os << "===== Allocation by kind\n";
Andreas Gampe785d2f22014-11-03 22:57:30 -0800150 static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames");
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000151 for (int i = 0; i < kNumArenaAllocKinds; i++) {
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000152 os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n";
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000153 }
154}
155
Stephen Hines47b22472017-01-26 00:50:31 -0800156#pragma GCC diagnostic push
Jayant Chowdhary5f9da552017-02-02 12:48:10 -0800157#if __clang_major__ >= 4
Stephen Hines47b22472017-01-26 00:50:31 -0800158#pragma GCC diagnostic ignored "-Winstantiation-after-specialization"
Jayant Chowdhary5f9da552017-02-02 12:48:10 -0800159#endif
Vladimir Markod38ba0a2017-04-13 13:43:43 +0100160// We're going to use ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> which needs
161// to be explicitly instantiated if kArenaAllocatorCountAllocations is true. Explicit
162// instantiation of the specialization ArenaAllocatorStatsImpl<false> does not do anything
163// but requires the warning "-Winstantiation-after-specialization" to be turned off.
164//
165// To avoid bit-rot of the ArenaAllocatorStatsImpl<true>, instantiate it also in debug builds
166// (but keep the unnecessary code out of release builds) as we do not usually compile with
167// kArenaAllocatorCountAllocations set to true.
168template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations || kIsDebugBuild>;
Stephen Hines47b22472017-01-26 00:50:31 -0800169#pragma GCC diagnostic pop
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000170
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000171void ArenaAllocatorMemoryTool::DoMakeDefined(void* ptr, size_t size) {
172 MEMORY_TOOL_MAKE_DEFINED(ptr, size);
173}
174
175void ArenaAllocatorMemoryTool::DoMakeUndefined(void* ptr, size_t size) {
176 MEMORY_TOOL_MAKE_UNDEFINED(ptr, size);
177}
178
179void ArenaAllocatorMemoryTool::DoMakeInaccessible(void* ptr, size_t size) {
180 MEMORY_TOOL_MAKE_NOACCESS(ptr, size);
181}
182
Andreas Gamped9911ee2017-03-27 13:27:24 -0700183Arena::Arena() : bytes_allocated_(0), memory_(nullptr), size_(0), next_(nullptr) {
Ian Rogerse7a5b7d2013-04-18 20:09:02 -0700184}
185
Vladimir Markof44d36c2017-03-14 14:18:46 +0000186class MallocArena FINAL : public Arena {
187 public:
Andreas Gampe121f1482017-05-12 10:28:35 -0700188 explicit MallocArena(size_t size = arena_allocator::kArenaDefaultSize);
Vladimir Markof44d36c2017-03-14 14:18:46 +0000189 virtual ~MallocArena();
190 private:
191 static constexpr size_t RequiredOverallocation() {
192 return (alignof(std::max_align_t) < ArenaAllocator::kArenaAlignment)
193 ? ArenaAllocator::kArenaAlignment - alignof(std::max_align_t)
194 : 0u;
195 }
196
197 uint8_t* unaligned_memory_;
198};
199
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700200MallocArena::MallocArena(size_t size) {
Vladimir Markof44d36c2017-03-14 14:18:46 +0000201 // We need to guarantee kArenaAlignment aligned allocation for the new arena.
202 // TODO: Use std::aligned_alloc() when it becomes available with C++17.
203 constexpr size_t overallocation = RequiredOverallocation();
204 unaligned_memory_ = reinterpret_cast<uint8_t*>(calloc(1, size + overallocation));
205 CHECK(unaligned_memory_ != nullptr); // Abort on OOM.
206 DCHECK_ALIGNED(unaligned_memory_, alignof(std::max_align_t));
207 if (overallocation == 0u) {
208 memory_ = unaligned_memory_;
209 } else {
210 memory_ = AlignUp(unaligned_memory_, ArenaAllocator::kArenaAlignment);
211 if (UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
212 size_t head = memory_ - unaligned_memory_;
213 size_t tail = overallocation - head;
214 MEMORY_TOOL_MAKE_NOACCESS(unaligned_memory_, head);
215 MEMORY_TOOL_MAKE_NOACCESS(memory_ + size, tail);
216 }
217 }
218 DCHECK_ALIGNED(memory_, ArenaAllocator::kArenaAlignment);
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700219 size_ = size;
buzbee862a7602013-04-05 10:58:54 -0700220}
221
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700222MallocArena::~MallocArena() {
Vladimir Markof44d36c2017-03-14 14:18:46 +0000223 constexpr size_t overallocation = RequiredOverallocation();
224 if (overallocation != 0u && UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
225 size_t head = memory_ - unaligned_memory_;
226 size_t tail = overallocation - head;
227 MEMORY_TOOL_MAKE_UNDEFINED(unaligned_memory_, head);
228 MEMORY_TOOL_MAKE_UNDEFINED(memory_ + size_, tail);
229 }
230 free(reinterpret_cast<void*>(unaligned_memory_));
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700231}
232
Vladimir Markof44d36c2017-03-14 14:18:46 +0000233class MemMapArena FINAL : public Arena {
234 public:
235 MemMapArena(size_t size, bool low_4gb, const char* name);
236 virtual ~MemMapArena();
237 void Release() OVERRIDE;
238
239 private:
240 std::unique_ptr<MemMap> map_;
241};
242
Nicolas Geoffray25e04562016-03-01 13:17:58 +0000243MemMapArena::MemMapArena(size_t size, bool low_4gb, const char* name) {
Vladimir Markof44d36c2017-03-14 14:18:46 +0000244 // Round up to a full page as that's the smallest unit of allocation for mmap()
245 // and we want to be able to use all memory that we actually allocate.
246 size = RoundUp(size, kPageSize);
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700247 std::string error_msg;
Mathieu Chartierc7853442015-03-27 14:35:38 -0700248 map_.reset(MemMap::MapAnonymous(
Nicolas Geoffray25e04562016-03-01 13:17:58 +0000249 name, nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg));
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700250 CHECK(map_.get() != nullptr) << error_msg;
251 memory_ = map_->Begin();
Vladimir Markof44d36c2017-03-14 14:18:46 +0000252 static_assert(ArenaAllocator::kArenaAlignment <= kPageSize,
253 "Arena should not need stronger alignment than kPageSize.");
254 DCHECK_ALIGNED(memory_, ArenaAllocator::kArenaAlignment);
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700255 size_ = map_->Size();
256}
257
Vladimir Marko3481ba22015-04-13 12:22:36 +0100258MemMapArena::~MemMapArena() {
259 // Destroys MemMap via std::unique_ptr<>.
260}
261
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700262void MemMapArena::Release() {
263 if (bytes_allocated_ > 0) {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700264 map_->MadviseDontNeedAndZero();
265 bytes_allocated_ = 0;
266 }
267}
268
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700269void Arena::Reset() {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700270 if (bytes_allocated_ > 0) {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700271 memset(Begin(), 0, bytes_allocated_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700272 bytes_allocated_ = 0;
buzbee862a7602013-04-05 10:58:54 -0700273 }
buzbee862a7602013-04-05 10:58:54 -0700274}
275
Nicolas Geoffray25e04562016-03-01 13:17:58 +0000276ArenaPool::ArenaPool(bool use_malloc, bool low_4gb, const char* name)
277 : use_malloc_(use_malloc),
278 lock_("Arena pool lock", kArenaPoolLock),
279 free_arenas_(nullptr),
280 low_4gb_(low_4gb),
281 name_(name) {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700282 if (low_4gb) {
283 CHECK(!use_malloc) << "low4gb must use map implementation";
284 }
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700285 if (!use_malloc) {
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700286 MemMap::Init();
287 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700288}
289
290ArenaPool::~ArenaPool() {
Jean-Philippe Halimica76a1a2016-02-02 19:48:52 +0100291 ReclaimMemory();
292}
293
294void ArenaPool::ReclaimMemory() {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700295 while (free_arenas_ != nullptr) {
296 auto* arena = free_arenas_;
297 free_arenas_ = free_arenas_->next_;
298 delete arena;
299 }
300}
301
Jean-Philippe Halimica76a1a2016-02-02 19:48:52 +0100302void ArenaPool::LockReclaimMemory() {
303 MutexLock lock(Thread::Current(), lock_);
304 ReclaimMemory();
305}
306
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700307Arena* ArenaPool::AllocArena(size_t size) {
308 Thread* self = Thread::Current();
309 Arena* ret = nullptr;
310 {
311 MutexLock lock(self, lock_);
312 if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
313 ret = free_arenas_;
314 free_arenas_ = free_arenas_->next_;
315 }
316 }
317 if (ret == nullptr) {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700318 ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) :
Nicolas Geoffray25e04562016-03-01 13:17:58 +0000319 new MemMapArena(size, low_4gb_, name_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700320 }
321 ret->Reset();
322 return ret;
323}
324
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700325void ArenaPool::TrimMaps() {
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700326 if (!use_malloc_) {
Mathieu Chartier32ce2ad2016-03-04 14:58:03 -0800327 ScopedTrace trace(__PRETTY_FUNCTION__);
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700328 // Doesn't work for malloc.
329 MutexLock lock(Thread::Current(), lock_);
330 for (auto* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
331 arena->Release();
332 }
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700333 }
334}
335
Mathieu Chartier49285c52014-12-02 15:43:48 -0800336size_t ArenaPool::GetBytesAllocated() const {
337 size_t total = 0;
338 MutexLock lock(Thread::Current(), lock_);
339 for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
340 total += arena->GetBytesAllocated();
341 }
342 return total;
343}
344
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000345void ArenaPool::FreeArenaChain(Arena* first) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700346 if (UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000347 for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700348 MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000349 }
Mathieu Chartier75165d02013-09-12 14:00:31 -0700350 }
Andreas Gampe121f1482017-05-12 10:28:35 -0700351
352 if (arena_allocator::kArenaAllocatorPreciseTracking) {
353 // Do not reuse arenas when tracking.
354 while (first != nullptr) {
355 Arena* next = first->next_;
356 delete first;
357 first = next;
358 }
359 return;
360 }
361
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000362 if (first != nullptr) {
363 Arena* last = first;
364 while (last->next_ != nullptr) {
365 last = last->next_;
366 }
367 Thread* self = Thread::Current();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700368 MutexLock lock(self, lock_);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000369 last->next_ = free_arenas_;
370 free_arenas_ = first;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700371 }
372}
373
374size_t ArenaAllocator::BytesAllocated() const {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000375 return ArenaAllocatorStats::BytesAllocated();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700376}
377
Mathieu Chartierc7853442015-03-27 14:35:38 -0700378size_t ArenaAllocator::BytesUsed() const {
379 size_t total = ptr_ - begin_;
380 if (arena_head_ != nullptr) {
381 for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr;
382 cur_arena = cur_arena->next_) {
383 total += cur_arena->GetBytesAllocated();
384 }
385 }
386 return total;
387}
388
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700389ArenaAllocator::ArenaAllocator(ArenaPool* pool)
390 : pool_(pool),
391 begin_(nullptr),
392 end_(nullptr),
393 ptr_(nullptr),
Vladimir Marko2a408a32015-09-18 14:11:00 +0100394 arena_head_(nullptr) {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700395}
396
397void ArenaAllocator::UpdateBytesAllocated() {
398 if (arena_head_ != nullptr) {
399 // Update how many bytes we have allocated into the arena so that the arena pool knows how
400 // much memory to zero out.
401 arena_head_->bytes_allocated_ = ptr_ - begin_;
402 }
403}
404
Vladimir Marko2a408a32015-09-18 14:11:00 +0100405void* ArenaAllocator::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
Vladimir Marko75001932015-11-10 20:54:22 +0000406 // We mark all memory for a newly retrieved arena as inaccessible and then
407 // mark only the actually allocated memory as defined. That leaves red zones
408 // and padding between allocations marked as inaccessible.
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700409 size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000410 ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
Vladimir Marko3f84f2c2016-04-25 19:40:34 +0100411 uint8_t* ret;
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100412 if (UNLIKELY(rounded_bytes > static_cast<size_t>(end_ - ptr_))) {
Vladimir Markof44d36c2017-03-14 14:18:46 +0000413 ret = AllocFromNewArenaWithMemoryTool(rounded_bytes);
Vladimir Marko3f84f2c2016-04-25 19:40:34 +0100414 } else {
415 ret = ptr_;
416 ptr_ += rounded_bytes;
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100417 }
Vladimir Marko2a408a32015-09-18 14:11:00 +0100418 MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
Vladimir Marko75001932015-11-10 20:54:22 +0000419 // Check that the memory is already zeroed out.
420 DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; }));
Mathieu Chartier75165d02013-09-12 14:00:31 -0700421 return ret;
422}
423
Vladimir Markof44d36c2017-03-14 14:18:46 +0000424void* ArenaAllocator::AllocWithMemoryToolAlign16(size_t bytes, ArenaAllocKind kind) {
425 // We mark all memory for a newly retrieved arena as inaccessible and then
426 // mark only the actually allocated memory as defined. That leaves red zones
427 // and padding between allocations marked as inaccessible.
428 size_t rounded_bytes = bytes + kMemoryToolRedZoneBytes;
429 DCHECK_ALIGNED(rounded_bytes, 8); // `bytes` is 16-byte aligned, red zone is 8-byte aligned.
430 uintptr_t padding =
431 ((reinterpret_cast<uintptr_t>(ptr_) + 15u) & 15u) - reinterpret_cast<uintptr_t>(ptr_);
432 ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
433 uint8_t* ret;
434 if (UNLIKELY(padding + rounded_bytes > static_cast<size_t>(end_ - ptr_))) {
435 static_assert(kArenaAlignment >= 16, "Expecting sufficient alignment for new Arena.");
436 ret = AllocFromNewArenaWithMemoryTool(rounded_bytes);
437 } else {
438 ptr_ += padding; // Leave padding inaccessible.
439 ret = ptr_;
440 ptr_ += rounded_bytes;
441 }
442 MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
443 // Check that the memory is already zeroed out.
444 DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; }));
445 return ret;
446}
447
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700448ArenaAllocator::~ArenaAllocator() {
449 // Reclaim all the arenas by giving them back to the thread pool.
450 UpdateBytesAllocated();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000451 pool_->FreeArenaChain(arena_head_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700452}
453
Vladimir Marko3f84f2c2016-04-25 19:40:34 +0100454uint8_t* ArenaAllocator::AllocFromNewArena(size_t bytes) {
Andreas Gampe121f1482017-05-12 10:28:35 -0700455 Arena* new_arena = pool_->AllocArena(std::max(arena_allocator::kArenaDefaultSize, bytes));
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100456 DCHECK(new_arena != nullptr);
457 DCHECK_LE(bytes, new_arena->Size());
458 if (static_cast<size_t>(end_ - ptr_) > new_arena->Size() - bytes) {
459 // The old arena has more space remaining than the new one, so keep using it.
460 // This can happen when the requested size is over half of the default size.
461 DCHECK(arena_head_ != nullptr);
462 new_arena->bytes_allocated_ = bytes; // UpdateBytesAllocated() on the new_arena.
463 new_arena->next_ = arena_head_->next_;
464 arena_head_->next_ = new_arena;
465 } else {
466 UpdateBytesAllocated();
467 new_arena->next_ = arena_head_;
468 arena_head_ = new_arena;
469 // Update our internal data structures.
470 begin_ = new_arena->Begin();
Andreas Gampef6dd8292016-08-19 20:22:19 -0700471 DCHECK_ALIGNED(begin_, kAlignment);
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100472 ptr_ = begin_ + bytes;
473 end_ = new_arena->End();
474 }
475 return new_arena->Begin();
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700476}
477
Vladimir Markof44d36c2017-03-14 14:18:46 +0000478uint8_t* ArenaAllocator::AllocFromNewArenaWithMemoryTool(size_t bytes) {
479 uint8_t* ret = AllocFromNewArena(bytes);
480 uint8_t* noaccess_begin = ret + bytes;
481 uint8_t* noaccess_end;
482 if (ret == arena_head_->Begin()) {
483 DCHECK(ptr_ - bytes == ret);
484 noaccess_end = end_;
485 } else {
486 // We're still using the old arena but `ret` comes from a new one just after it.
487 DCHECK(arena_head_->next_ != nullptr);
488 DCHECK(ret == arena_head_->next_->Begin());
489 DCHECK_EQ(bytes, arena_head_->next_->GetBytesAllocated());
490 noaccess_end = arena_head_->next_->End();
491 }
492 MEMORY_TOOL_MAKE_NOACCESS(noaccess_begin, noaccess_end - noaccess_begin);
493 return ret;
494}
495
Mathieu Chartiere401d142015-04-22 13:56:20 -0700496bool ArenaAllocator::Contains(const void* ptr) const {
497 if (ptr >= begin_ && ptr < end_) {
498 return true;
499 }
500 for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) {
501 if (cur_arena->Contains(ptr)) {
502 return true;
503 }
504 }
505 return false;
506}
507
Vladimir Markof44d36c2017-03-14 14:18:46 +0000508MemStats::MemStats(const char* name,
509 const ArenaAllocatorStats* stats,
510 const Arena* first_arena,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000511 ssize_t lost_bytes_adjustment)
512 : name_(name),
513 stats_(stats),
514 first_arena_(first_arena),
515 lost_bytes_adjustment_(lost_bytes_adjustment) {
516}
517
518void MemStats::Dump(std::ostream& os) const {
519 os << name_ << " stats:\n";
520 stats_->Dump(os, first_arena_, lost_bytes_adjustment_);
521}
522
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700523// Dump memory usage stats.
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000524MemStats ArenaAllocator::GetMemStats() const {
525 ssize_t lost_bytes_adjustment =
526 (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace();
527 return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment);
buzbee862a7602013-04-05 10:58:54 -0700528}
529
530} // namespace art