blob: 060b6fac2efeff702ab90e4d5470a56ad1378d73 [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Mathieu Chartierb666f482015-02-18 14:33:14 -080017#ifndef ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
18#define ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
buzbee862a7602013-04-05 10:58:54 -070019
buzbee862a7602013-04-05 10:58:54 -070020#include <stddef.h>
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070021#include <stdint.h>
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070022
David Sehr67bf42e2018-02-26 16:43:04 -080023#include "base/bit_utils.h"
24#include "base/macros.h"
25#include "base/memory_tool.h"
Andreas Gampe57943812017-12-06 21:39:13 -080026#include "dchecked_vector.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080027#include "debug_stack.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080028#include "mutex.h"
buzbee862a7602013-04-05 10:58:54 -070029
30namespace art {
31
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070032class Arena;
33class ArenaPool;
34class ArenaAllocator;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000035class ArenaStack;
36class ScopedArenaAllocator;
37class MemStats;
38
Vladimir Marko8081d2b2014-07-31 15:33:43 +010039template <typename T>
40class ArenaAllocatorAdapter;
41
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000042static constexpr bool kArenaAllocatorCountAllocations = false;
43
44// Type of allocation for memory tuning.
45enum ArenaAllocKind {
46 kArenaAllocMisc,
Vladimir Markoe39c54e2014-09-22 14:50:02 +010047 kArenaAllocSwitchTable,
Vladimir Markoe39c54e2014-09-22 14:50:02 +010048 kArenaAllocSlowPaths,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000049 kArenaAllocGrowableBitMap,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000050 kArenaAllocSTL,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010051 kArenaAllocGraphBuilder,
Vladimir Markof9f64412015-09-02 14:05:49 +010052 kArenaAllocGraph,
53 kArenaAllocBasicBlock,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010054 kArenaAllocBlockList,
55 kArenaAllocReversePostOrder,
56 kArenaAllocLinearOrder,
57 kArenaAllocConstantsMap,
Vladimir Marko60584552015-09-03 13:35:12 +000058 kArenaAllocPredecessors,
59 kArenaAllocSuccessors,
60 kArenaAllocDominated,
Vladimir Markof9f64412015-09-02 14:05:49 +010061 kArenaAllocInstruction,
Igor Murashkind01745e2017-04-05 16:40:31 -070062 kArenaAllocConstructorFenceInputs,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010063 kArenaAllocInvokeInputs,
64 kArenaAllocPhiInputs,
Vladimir Markof9f64412015-09-02 14:05:49 +010065 kArenaAllocLoopInfo,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010066 kArenaAllocLoopInfoBackEdges,
Vladimir Markof9f64412015-09-02 14:05:49 +010067 kArenaAllocTryCatchInfo,
68 kArenaAllocUseListNode,
69 kArenaAllocEnvironment,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010070 kArenaAllocEnvironmentVRegs,
71 kArenaAllocEnvironmentLocations,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010072 kArenaAllocLocationSummary,
Vladimir Marko71bf8092015-09-15 15:33:14 +010073 kArenaAllocSsaBuilder,
Vladimir Markof9f64412015-09-02 14:05:49 +010074 kArenaAllocMoveOperands,
75 kArenaAllocCodeBuffer,
76 kArenaAllocStackMaps,
Vladimir Markof9f64412015-09-02 14:05:49 +010077 kArenaAllocOptimization,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010078 kArenaAllocGvn,
Vladimir Marko5233f932015-09-29 19:01:15 +010079 kArenaAllocInductionVarAnalysis,
80 kArenaAllocBoundsCheckElimination,
Vladimir Markof6a35de2016-03-21 12:01:50 +000081 kArenaAllocDCE,
Vladimir Marko009d1662017-10-10 13:21:15 +010082 kArenaAllocLSA,
Vladimir Markof6a35de2016-03-21 12:01:50 +000083 kArenaAllocLSE,
Igor Murashkindd018df2017-08-09 10:38:31 -070084 kArenaAllocCFRE,
Vladimir Markof6a35de2016-03-21 12:01:50 +000085 kArenaAllocLICM,
Aart Bik96202302016-10-04 17:33:56 -070086 kArenaAllocLoopOptimization,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010087 kArenaAllocSsaLiveness,
88 kArenaAllocSsaPhiElimination,
89 kArenaAllocReferenceTypePropagation,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010090 kArenaAllocSideEffectsAnalysis,
91 kArenaAllocRegisterAllocator,
Vladimir Markof6a35de2016-03-21 12:01:50 +000092 kArenaAllocRegisterAllocatorValidate,
Vladimir Marko225b6462015-09-28 12:17:40 +010093 kArenaAllocStackMapStream,
Aart Bik92685a82017-03-06 11:13:43 -080094 kArenaAllocVectorNode,
Vladimir Marko225b6462015-09-28 12:17:40 +010095 kArenaAllocCodeGenerator,
Vladimir Marko93205e32016-04-13 11:59:46 +010096 kArenaAllocAssembler,
Vladimir Marko225b6462015-09-28 12:17:40 +010097 kArenaAllocParallelMoveResolver,
Vladimir Marko655e5852015-10-12 10:38:28 +010098 kArenaAllocGraphChecker,
Mathieu Chartierde40d472015-10-15 17:47:48 -070099 kArenaAllocVerifier,
Vladimir Marko93205e32016-04-13 11:59:46 +0100100 kArenaAllocCallingConvention,
Mingyao Yang063fc772016-08-02 11:02:54 -0700101 kArenaAllocCHA,
Alexandre Rames22aa54b2016-10-18 09:32:29 +0100102 kArenaAllocScheduler,
Calin Juravlecc3171a2017-05-19 16:47:53 -0700103 kArenaAllocProfile,
Artem Serov7f4aff62017-06-21 17:02:18 +0100104 kArenaAllocSuperblockCloner,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000105 kNumArenaAllocKinds
106};
107
108template <bool kCount>
109class ArenaAllocatorStatsImpl;
110
111template <>
112class ArenaAllocatorStatsImpl<false> {
113 public:
114 ArenaAllocatorStatsImpl() = default;
115 ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
116 ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
117
Roland Levillain4b8f1ec2015-08-26 18:34:03 +0100118 void Copy(const ArenaAllocatorStatsImpl& other ATTRIBUTE_UNUSED) {}
119 void RecordAlloc(size_t bytes ATTRIBUTE_UNUSED, ArenaAllocKind kind ATTRIBUTE_UNUSED) {}
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000120 size_t NumAllocations() const { return 0u; }
121 size_t BytesAllocated() const { return 0u; }
Roland Levillain4b8f1ec2015-08-26 18:34:03 +0100122 void Dump(std::ostream& os ATTRIBUTE_UNUSED,
123 const Arena* first ATTRIBUTE_UNUSED,
124 ssize_t lost_bytes_adjustment ATTRIBUTE_UNUSED) const {}
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000125};
126
127template <bool kCount>
128class ArenaAllocatorStatsImpl {
129 public:
130 ArenaAllocatorStatsImpl();
131 ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
132 ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
133
134 void Copy(const ArenaAllocatorStatsImpl& other);
135 void RecordAlloc(size_t bytes, ArenaAllocKind kind);
136 size_t NumAllocations() const;
137 size_t BytesAllocated() const;
138 void Dump(std::ostream& os, const Arena* first, ssize_t lost_bytes_adjustment) const;
139
140 private:
141 size_t num_allocations_;
Vladimir Marko4e335d02016-12-19 16:04:33 +0000142 dchecked_vector<size_t> alloc_stats_; // Bytes used by various allocation kinds.
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000143
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100144 static const char* const kAllocNames[];
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000145};
146
147typedef ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> ArenaAllocatorStats;
buzbee862a7602013-04-05 10:58:54 -0700148
Vladimir Marko2a408a32015-09-18 14:11:00 +0100149template <bool kAvailable, bool kValgrind>
150class ArenaAllocatorMemoryToolCheckImpl {
151 // This is the generic template but since there is a partial specialization
152 // for kValgrind == false, this can be instantiated only for kValgrind == true.
153 static_assert(kValgrind, "This template can be instantiated only for Valgrind.");
154 static_assert(kAvailable, "Valgrind implies memory tool availability.");
155
156 public:
157 ArenaAllocatorMemoryToolCheckImpl() : is_running_on_valgrind_(RUNNING_ON_MEMORY_TOOL) { }
158 bool IsRunningOnMemoryTool() { return is_running_on_valgrind_; }
159
160 private:
161 const bool is_running_on_valgrind_;
162};
163
164template <bool kAvailable>
165class ArenaAllocatorMemoryToolCheckImpl<kAvailable, false> {
166 public:
167 ArenaAllocatorMemoryToolCheckImpl() { }
168 bool IsRunningOnMemoryTool() { return kAvailable; }
169};
170
171typedef ArenaAllocatorMemoryToolCheckImpl<kMemoryToolIsAvailable, kMemoryToolIsValgrind>
172 ArenaAllocatorMemoryToolCheck;
173
174class ArenaAllocatorMemoryTool : private ArenaAllocatorMemoryToolCheck {
175 public:
176 using ArenaAllocatorMemoryToolCheck::IsRunningOnMemoryTool;
177
178 void MakeDefined(void* ptr, size_t size) {
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000179 if (UNLIKELY(IsRunningOnMemoryTool())) {
180 DoMakeDefined(ptr, size);
Vladimir Marko2a408a32015-09-18 14:11:00 +0100181 }
182 }
183 void MakeUndefined(void* ptr, size_t size) {
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000184 if (UNLIKELY(IsRunningOnMemoryTool())) {
185 DoMakeUndefined(ptr, size);
Vladimir Marko2a408a32015-09-18 14:11:00 +0100186 }
187 }
188 void MakeInaccessible(void* ptr, size_t size) {
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000189 if (UNLIKELY(IsRunningOnMemoryTool())) {
190 DoMakeInaccessible(ptr, size);
Vladimir Marko2a408a32015-09-18 14:11:00 +0100191 }
192 }
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000193
194 private:
195 void DoMakeDefined(void* ptr, size_t size);
196 void DoMakeUndefined(void* ptr, size_t size);
197 void DoMakeInaccessible(void* ptr, size_t size);
Vladimir Marko2a408a32015-09-18 14:11:00 +0100198};
199
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700200class Arena {
201 public:
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700202 Arena();
203 virtual ~Arena() { }
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700204 // Reset is for pre-use and uses memset for performance.
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700205 void Reset();
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700206 // Release is used inbetween uses and uses madvise for memory usage.
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700207 virtual void Release() { }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700208 uint8_t* Begin() {
209 return memory_;
buzbee862a7602013-04-05 10:58:54 -0700210 }
211
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700212 uint8_t* End() {
213 return memory_ + size_;
214 }
215
216 size_t Size() const {
217 return size_;
218 }
219
220 size_t RemainingSpace() const {
221 return Size() - bytes_allocated_;
222 }
223
Mathieu Chartier49285c52014-12-02 15:43:48 -0800224 size_t GetBytesAllocated() const {
225 return bytes_allocated_;
226 }
227
Mathieu Chartiere401d142015-04-22 13:56:20 -0700228 // Return true if ptr is contained in the arena.
229 bool Contains(const void* ptr) const {
230 return memory_ <= ptr && ptr < memory_ + bytes_allocated_;
231 }
232
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700233 protected:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700234 size_t bytes_allocated_;
235 uint8_t* memory_;
236 size_t size_;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700237 Arena* next_;
238 friend class ArenaPool;
239 friend class ArenaAllocator;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000240 friend class ArenaStack;
241 friend class ScopedArenaAllocator;
242 template <bool kCount> friend class ArenaAllocatorStatsImpl;
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700243
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100244 friend class ArenaAllocatorTest;
245
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700246 private:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700247 DISALLOW_COPY_AND_ASSIGN(Arena);
248};
249
250class ArenaPool {
251 public:
Chih-Hung Hsieha5931182016-09-01 15:08:13 -0700252 explicit ArenaPool(bool use_malloc = true,
253 bool low_4gb = false,
254 const char* name = "LinearAlloc");
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700255 ~ArenaPool();
Mathieu Chartier90443472015-07-16 20:32:27 -0700256 Arena* AllocArena(size_t size) REQUIRES(!lock_);
257 void FreeArenaChain(Arena* first) REQUIRES(!lock_);
258 size_t GetBytesAllocated() const REQUIRES(!lock_);
Jean-Philippe Halimica76a1a2016-02-02 19:48:52 +0100259 void ReclaimMemory() NO_THREAD_SAFETY_ANALYSIS;
260 void LockReclaimMemory() REQUIRES(!lock_);
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700261 // Trim the maps in arenas by madvising, used by JIT to reduce memory usage. This only works
262 // use_malloc is false.
Mathieu Chartier90443472015-07-16 20:32:27 -0700263 void TrimMaps() REQUIRES(!lock_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700264
265 private:
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700266 const bool use_malloc_;
Mathieu Chartier49285c52014-12-02 15:43:48 -0800267 mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700268 Arena* free_arenas_ GUARDED_BY(lock_);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700269 const bool low_4gb_;
Nicolas Geoffray25e04562016-03-01 13:17:58 +0000270 const char* name_;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700271 DISALLOW_COPY_AND_ASSIGN(ArenaPool);
272};
273
Vladimir Markofda04322015-11-11 18:45:50 +0000274// Fast single-threaded allocator for zero-initialized memory chunks.
275//
276// Memory is allocated from ArenaPool in large chunks and then rationed through
277// the ArenaAllocator. It's returned to the ArenaPool only when the ArenaAllocator
278// is destroyed.
Vladimir Marko2a408a32015-09-18 14:11:00 +0100279class ArenaAllocator
280 : private DebugStackRefCounter, private ArenaAllocatorStats, private ArenaAllocatorMemoryTool {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700281 public:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700282 explicit ArenaAllocator(ArenaPool* pool);
283 ~ArenaAllocator();
284
Vladimir Marko2a408a32015-09-18 14:11:00 +0100285 using ArenaAllocatorMemoryTool::IsRunningOnMemoryTool;
286 using ArenaAllocatorMemoryTool::MakeDefined;
287 using ArenaAllocatorMemoryTool::MakeUndefined;
288 using ArenaAllocatorMemoryTool::MakeInaccessible;
289
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100290 // Get adapter for use in STL containers. See arena_containers.h .
291 ArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL);
292
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700293 // Returns zeroed memory.
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000294 void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
Vladimir Marko2a408a32015-09-18 14:11:00 +0100295 if (UNLIKELY(IsRunningOnMemoryTool())) {
296 return AllocWithMemoryTool(bytes, kind);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700297 }
Mathieu Chartierb666f482015-02-18 14:33:14 -0800298 bytes = RoundUp(bytes, kAlignment);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000299 ArenaAllocatorStats::RecordAlloc(bytes, kind);
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100300 if (UNLIKELY(bytes > static_cast<size_t>(end_ - ptr_))) {
301 return AllocFromNewArena(bytes);
302 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700303 uint8_t* ret = ptr_;
Andreas Gampef6dd8292016-08-19 20:22:19 -0700304 DCHECK_ALIGNED(ret, kAlignment);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700305 ptr_ += bytes;
306 return ret;
307 }
308
Vladimir Markof44d36c2017-03-14 14:18:46 +0000309 // Returns zeroed memory.
310 void* AllocAlign16(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
311 // It is an error to request 16-byte aligned allocation of unaligned size.
312 DCHECK_ALIGNED(bytes, 16);
313 if (UNLIKELY(IsRunningOnMemoryTool())) {
314 return AllocWithMemoryToolAlign16(bytes, kind);
315 }
316 uintptr_t padding =
317 ((reinterpret_cast<uintptr_t>(ptr_) + 15u) & 15u) - reinterpret_cast<uintptr_t>(ptr_);
318 ArenaAllocatorStats::RecordAlloc(bytes, kind);
319 if (UNLIKELY(padding + bytes > static_cast<size_t>(end_ - ptr_))) {
320 static_assert(kArenaAlignment >= 16, "Expecting sufficient alignment for new Arena.");
321 return AllocFromNewArena(bytes);
322 }
323 ptr_ += padding;
324 uint8_t* ret = ptr_;
325 DCHECK_ALIGNED(ret, 16);
326 ptr_ += bytes;
327 return ret;
328 }
329
Mathieu Chartiere401d142015-04-22 13:56:20 -0700330 // Realloc never frees the input pointer, it is the caller's job to do this if necessary.
Vladimir Markof44d36c2017-03-14 14:18:46 +0000331 void* Realloc(void* ptr,
332 size_t ptr_size,
333 size_t new_size,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700334 ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
335 DCHECK_GE(new_size, ptr_size);
336 DCHECK_EQ(ptr == nullptr, ptr_size == 0u);
Andreas Gampef6dd8292016-08-19 20:22:19 -0700337 // We always allocate aligned.
338 const size_t aligned_ptr_size = RoundUp(ptr_size, kAlignment);
339 auto* end = reinterpret_cast<uint8_t*>(ptr) + aligned_ptr_size;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700340 // If we haven't allocated anything else, we can safely extend.
341 if (end == ptr_) {
Andreas Gampe421dbc22017-07-11 08:32:29 -0700342 // Red zone prevents end == ptr_ (unless input = allocator state = null).
343 DCHECK(!IsRunningOnMemoryTool() || ptr_ == nullptr);
Andreas Gampef6dd8292016-08-19 20:22:19 -0700344 const size_t aligned_new_size = RoundUp(new_size, kAlignment);
345 const size_t size_delta = aligned_new_size - aligned_ptr_size;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700346 // Check remain space.
347 const size_t remain = end_ - ptr_;
348 if (remain >= size_delta) {
349 ptr_ += size_delta;
350 ArenaAllocatorStats::RecordAlloc(size_delta, kind);
Andreas Gampef6dd8292016-08-19 20:22:19 -0700351 DCHECK_ALIGNED(ptr_, kAlignment);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700352 return ptr;
353 }
354 }
Andreas Gampef6dd8292016-08-19 20:22:19 -0700355 auto* new_ptr = Alloc(new_size, kind); // Note: Alloc will take care of aligning new_size.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700356 memcpy(new_ptr, ptr, ptr_size);
357 // TODO: Call free on ptr if linear alloc supports free.
358 return new_ptr;
359 }
360
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000361 template <typename T>
Vladimir Markof6a35de2016-03-21 12:01:50 +0000362 T* Alloc(ArenaAllocKind kind = kArenaAllocMisc) {
363 return AllocArray<T>(1, kind);
364 }
365
366 template <typename T>
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000367 T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) {
368 return static_cast<T*>(Alloc(length * sizeof(T), kind));
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100369 }
370
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700371 size_t BytesAllocated() const;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700372
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000373 MemStats GetMemStats() const;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700374
Mathieu Chartierc7853442015-03-27 14:35:38 -0700375 // The BytesUsed method sums up bytes allocated from arenas in arena_head_ and nodes.
376 // TODO: Change BytesAllocated to this behavior?
377 size_t BytesUsed() const;
buzbee862a7602013-04-05 10:58:54 -0700378
Mathieu Chartiere401d142015-04-22 13:56:20 -0700379 ArenaPool* GetArenaPool() const {
380 return pool_;
381 }
382
383 bool Contains(const void* ptr) const;
384
Vladimir Markof44d36c2017-03-14 14:18:46 +0000385 // The alignment guaranteed for individual allocations.
386 static constexpr size_t kAlignment = 8u;
387
388 // The alignment required for the whole Arena rather than individual allocations.
389 static constexpr size_t kArenaAlignment = 16u;
Andreas Gampef6dd8292016-08-19 20:22:19 -0700390
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700391 private:
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100392 void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind);
Vladimir Markof44d36c2017-03-14 14:18:46 +0000393 void* AllocWithMemoryToolAlign16(size_t bytes, ArenaAllocKind kind);
Vladimir Marko3f84f2c2016-04-25 19:40:34 +0100394 uint8_t* AllocFromNewArena(size_t bytes);
Vladimir Markof44d36c2017-03-14 14:18:46 +0000395 uint8_t* AllocFromNewArenaWithMemoryTool(size_t bytes);
Mathieu Chartierb666f482015-02-18 14:33:14 -0800396
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700397 void UpdateBytesAllocated();
buzbee862a7602013-04-05 10:58:54 -0700398
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700399 ArenaPool* pool_;
400 uint8_t* begin_;
401 uint8_t* end_;
402 uint8_t* ptr_;
403 Arena* arena_head_;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700404
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100405 template <typename U>
406 friend class ArenaAllocatorAdapter;
407
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100408 friend class ArenaAllocatorTest;
409
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700410 DISALLOW_COPY_AND_ASSIGN(ArenaAllocator);
buzbee862a7602013-04-05 10:58:54 -0700411}; // ArenaAllocator
412
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000413class MemStats {
414 public:
Vladimir Markof44d36c2017-03-14 14:18:46 +0000415 MemStats(const char* name,
416 const ArenaAllocatorStats* stats,
417 const Arena* first_arena,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000418 ssize_t lost_bytes_adjustment = 0);
419 void Dump(std::ostream& os) const;
420
421 private:
422 const char* const name_;
423 const ArenaAllocatorStats* const stats_;
424 const Arena* const first_arena_;
425 const ssize_t lost_bytes_adjustment_;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700426}; // MemStats
buzbee862a7602013-04-05 10:58:54 -0700427
428} // namespace art
429
Mathieu Chartierb666f482015-02-18 14:33:14 -0800430#endif // ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_