blob: 688f01b71f9a95419c6c53e363e5c48202d9d181 [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Mathieu Chartierb666f482015-02-18 14:33:14 -080017#ifndef ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
18#define ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
buzbee862a7602013-04-05 10:58:54 -070019
buzbee862a7602013-04-05 10:58:54 -070020#include <stddef.h>
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070021#include <stdint.h>
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070022
David Sehr67bf42e2018-02-26 16:43:04 -080023#include "base/bit_utils.h"
David Sehrc431b9d2018-03-02 12:01:51 -080024#include "base/debug_stack.h"
25#include "base/dchecked_vector.h"
David Sehr67bf42e2018-02-26 16:43:04 -080026#include "base/macros.h"
27#include "base/memory_tool.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080028#include "mutex.h"
buzbee862a7602013-04-05 10:58:54 -070029
30namespace art {
31
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070032class Arena;
33class ArenaPool;
34class ArenaAllocator;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000035class ArenaStack;
36class ScopedArenaAllocator;
37class MemStats;
38
Vladimir Marko8081d2b2014-07-31 15:33:43 +010039template <typename T>
40class ArenaAllocatorAdapter;
41
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000042static constexpr bool kArenaAllocatorCountAllocations = false;
43
44// Type of allocation for memory tuning.
45enum ArenaAllocKind {
46 kArenaAllocMisc,
Vladimir Markoe39c54e2014-09-22 14:50:02 +010047 kArenaAllocSwitchTable,
Vladimir Markoe39c54e2014-09-22 14:50:02 +010048 kArenaAllocSlowPaths,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000049 kArenaAllocGrowableBitMap,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000050 kArenaAllocSTL,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010051 kArenaAllocGraphBuilder,
Vladimir Markof9f64412015-09-02 14:05:49 +010052 kArenaAllocGraph,
53 kArenaAllocBasicBlock,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010054 kArenaAllocBlockList,
55 kArenaAllocReversePostOrder,
56 kArenaAllocLinearOrder,
57 kArenaAllocConstantsMap,
Vladimir Marko60584552015-09-03 13:35:12 +000058 kArenaAllocPredecessors,
59 kArenaAllocSuccessors,
60 kArenaAllocDominated,
Vladimir Markof9f64412015-09-02 14:05:49 +010061 kArenaAllocInstruction,
Igor Murashkind01745e2017-04-05 16:40:31 -070062 kArenaAllocConstructorFenceInputs,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010063 kArenaAllocInvokeInputs,
64 kArenaAllocPhiInputs,
Vladimir Marko175e7862018-03-27 09:03:13 +000065 kArenaAllocTypeCheckInputs,
Vladimir Markof9f64412015-09-02 14:05:49 +010066 kArenaAllocLoopInfo,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010067 kArenaAllocLoopInfoBackEdges,
Vladimir Markof9f64412015-09-02 14:05:49 +010068 kArenaAllocTryCatchInfo,
69 kArenaAllocUseListNode,
70 kArenaAllocEnvironment,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010071 kArenaAllocEnvironmentVRegs,
72 kArenaAllocEnvironmentLocations,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010073 kArenaAllocLocationSummary,
Vladimir Marko71bf8092015-09-15 15:33:14 +010074 kArenaAllocSsaBuilder,
Vladimir Markof9f64412015-09-02 14:05:49 +010075 kArenaAllocMoveOperands,
76 kArenaAllocCodeBuffer,
77 kArenaAllocStackMaps,
Vladimir Markof9f64412015-09-02 14:05:49 +010078 kArenaAllocOptimization,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010079 kArenaAllocGvn,
Vladimir Marko5233f932015-09-29 19:01:15 +010080 kArenaAllocInductionVarAnalysis,
81 kArenaAllocBoundsCheckElimination,
Vladimir Markof6a35de2016-03-21 12:01:50 +000082 kArenaAllocDCE,
Vladimir Marko009d1662017-10-10 13:21:15 +010083 kArenaAllocLSA,
Vladimir Markof6a35de2016-03-21 12:01:50 +000084 kArenaAllocLSE,
Igor Murashkindd018df2017-08-09 10:38:31 -070085 kArenaAllocCFRE,
Vladimir Markof6a35de2016-03-21 12:01:50 +000086 kArenaAllocLICM,
Aart Bik96202302016-10-04 17:33:56 -070087 kArenaAllocLoopOptimization,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010088 kArenaAllocSsaLiveness,
89 kArenaAllocSsaPhiElimination,
90 kArenaAllocReferenceTypePropagation,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010091 kArenaAllocSideEffectsAnalysis,
92 kArenaAllocRegisterAllocator,
Vladimir Markof6a35de2016-03-21 12:01:50 +000093 kArenaAllocRegisterAllocatorValidate,
Vladimir Marko225b6462015-09-28 12:17:40 +010094 kArenaAllocStackMapStream,
Aart Bik92685a82017-03-06 11:13:43 -080095 kArenaAllocVectorNode,
Vladimir Marko225b6462015-09-28 12:17:40 +010096 kArenaAllocCodeGenerator,
Vladimir Marko93205e32016-04-13 11:59:46 +010097 kArenaAllocAssembler,
Vladimir Marko225b6462015-09-28 12:17:40 +010098 kArenaAllocParallelMoveResolver,
Vladimir Marko655e5852015-10-12 10:38:28 +010099 kArenaAllocGraphChecker,
Mathieu Chartierde40d472015-10-15 17:47:48 -0700100 kArenaAllocVerifier,
Vladimir Marko93205e32016-04-13 11:59:46 +0100101 kArenaAllocCallingConvention,
Mingyao Yang063fc772016-08-02 11:02:54 -0700102 kArenaAllocCHA,
Alexandre Rames22aa54b2016-10-18 09:32:29 +0100103 kArenaAllocScheduler,
Calin Juravlecc3171a2017-05-19 16:47:53 -0700104 kArenaAllocProfile,
Artem Serov7f4aff62017-06-21 17:02:18 +0100105 kArenaAllocSuperblockCloner,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000106 kNumArenaAllocKinds
107};
108
109template <bool kCount>
110class ArenaAllocatorStatsImpl;
111
112template <>
113class ArenaAllocatorStatsImpl<false> {
114 public:
115 ArenaAllocatorStatsImpl() = default;
116 ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
117 ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
118
Roland Levillain4b8f1ec2015-08-26 18:34:03 +0100119 void Copy(const ArenaAllocatorStatsImpl& other ATTRIBUTE_UNUSED) {}
120 void RecordAlloc(size_t bytes ATTRIBUTE_UNUSED, ArenaAllocKind kind ATTRIBUTE_UNUSED) {}
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000121 size_t NumAllocations() const { return 0u; }
122 size_t BytesAllocated() const { return 0u; }
Roland Levillain4b8f1ec2015-08-26 18:34:03 +0100123 void Dump(std::ostream& os ATTRIBUTE_UNUSED,
124 const Arena* first ATTRIBUTE_UNUSED,
125 ssize_t lost_bytes_adjustment ATTRIBUTE_UNUSED) const {}
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000126};
127
128template <bool kCount>
129class ArenaAllocatorStatsImpl {
130 public:
131 ArenaAllocatorStatsImpl();
132 ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
133 ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
134
135 void Copy(const ArenaAllocatorStatsImpl& other);
136 void RecordAlloc(size_t bytes, ArenaAllocKind kind);
137 size_t NumAllocations() const;
138 size_t BytesAllocated() const;
139 void Dump(std::ostream& os, const Arena* first, ssize_t lost_bytes_adjustment) const;
140
141 private:
142 size_t num_allocations_;
Vladimir Marko4e335d02016-12-19 16:04:33 +0000143 dchecked_vector<size_t> alloc_stats_; // Bytes used by various allocation kinds.
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000144
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100145 static const char* const kAllocNames[];
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000146};
147
148typedef ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> ArenaAllocatorStats;
buzbee862a7602013-04-05 10:58:54 -0700149
Vladimir Marko2a408a32015-09-18 14:11:00 +0100150template <bool kAvailable, bool kValgrind>
151class ArenaAllocatorMemoryToolCheckImpl {
152 // This is the generic template but since there is a partial specialization
153 // for kValgrind == false, this can be instantiated only for kValgrind == true.
154 static_assert(kValgrind, "This template can be instantiated only for Valgrind.");
155 static_assert(kAvailable, "Valgrind implies memory tool availability.");
156
157 public:
158 ArenaAllocatorMemoryToolCheckImpl() : is_running_on_valgrind_(RUNNING_ON_MEMORY_TOOL) { }
159 bool IsRunningOnMemoryTool() { return is_running_on_valgrind_; }
160
161 private:
162 const bool is_running_on_valgrind_;
163};
164
165template <bool kAvailable>
166class ArenaAllocatorMemoryToolCheckImpl<kAvailable, false> {
167 public:
168 ArenaAllocatorMemoryToolCheckImpl() { }
169 bool IsRunningOnMemoryTool() { return kAvailable; }
170};
171
172typedef ArenaAllocatorMemoryToolCheckImpl<kMemoryToolIsAvailable, kMemoryToolIsValgrind>
173 ArenaAllocatorMemoryToolCheck;
174
175class ArenaAllocatorMemoryTool : private ArenaAllocatorMemoryToolCheck {
176 public:
177 using ArenaAllocatorMemoryToolCheck::IsRunningOnMemoryTool;
178
179 void MakeDefined(void* ptr, size_t size) {
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000180 if (UNLIKELY(IsRunningOnMemoryTool())) {
181 DoMakeDefined(ptr, size);
Vladimir Marko2a408a32015-09-18 14:11:00 +0100182 }
183 }
184 void MakeUndefined(void* ptr, size_t size) {
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000185 if (UNLIKELY(IsRunningOnMemoryTool())) {
186 DoMakeUndefined(ptr, size);
Vladimir Marko2a408a32015-09-18 14:11:00 +0100187 }
188 }
189 void MakeInaccessible(void* ptr, size_t size) {
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000190 if (UNLIKELY(IsRunningOnMemoryTool())) {
191 DoMakeInaccessible(ptr, size);
Vladimir Marko2a408a32015-09-18 14:11:00 +0100192 }
193 }
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000194
195 private:
196 void DoMakeDefined(void* ptr, size_t size);
197 void DoMakeUndefined(void* ptr, size_t size);
198 void DoMakeInaccessible(void* ptr, size_t size);
Vladimir Marko2a408a32015-09-18 14:11:00 +0100199};
200
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700201class Arena {
202 public:
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700203 Arena();
204 virtual ~Arena() { }
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700205 // Reset is for pre-use and uses memset for performance.
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700206 void Reset();
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700207 // Release is used inbetween uses and uses madvise for memory usage.
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700208 virtual void Release() { }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700209 uint8_t* Begin() {
210 return memory_;
buzbee862a7602013-04-05 10:58:54 -0700211 }
212
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700213 uint8_t* End() {
214 return memory_ + size_;
215 }
216
217 size_t Size() const {
218 return size_;
219 }
220
221 size_t RemainingSpace() const {
222 return Size() - bytes_allocated_;
223 }
224
Mathieu Chartier49285c52014-12-02 15:43:48 -0800225 size_t GetBytesAllocated() const {
226 return bytes_allocated_;
227 }
228
Mathieu Chartiere401d142015-04-22 13:56:20 -0700229 // Return true if ptr is contained in the arena.
230 bool Contains(const void* ptr) const {
231 return memory_ <= ptr && ptr < memory_ + bytes_allocated_;
232 }
233
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700234 protected:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700235 size_t bytes_allocated_;
236 uint8_t* memory_;
237 size_t size_;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700238 Arena* next_;
239 friend class ArenaPool;
240 friend class ArenaAllocator;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000241 friend class ArenaStack;
242 friend class ScopedArenaAllocator;
243 template <bool kCount> friend class ArenaAllocatorStatsImpl;
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700244
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100245 friend class ArenaAllocatorTest;
246
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700247 private:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700248 DISALLOW_COPY_AND_ASSIGN(Arena);
249};
250
251class ArenaPool {
252 public:
Chih-Hung Hsieha5931182016-09-01 15:08:13 -0700253 explicit ArenaPool(bool use_malloc = true,
254 bool low_4gb = false,
255 const char* name = "LinearAlloc");
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700256 ~ArenaPool();
Mathieu Chartier90443472015-07-16 20:32:27 -0700257 Arena* AllocArena(size_t size) REQUIRES(!lock_);
258 void FreeArenaChain(Arena* first) REQUIRES(!lock_);
259 size_t GetBytesAllocated() const REQUIRES(!lock_);
Jean-Philippe Halimica76a1a2016-02-02 19:48:52 +0100260 void ReclaimMemory() NO_THREAD_SAFETY_ANALYSIS;
261 void LockReclaimMemory() REQUIRES(!lock_);
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700262 // Trim the maps in arenas by madvising, used by JIT to reduce memory usage. This only works
263 // use_malloc is false.
Mathieu Chartier90443472015-07-16 20:32:27 -0700264 void TrimMaps() REQUIRES(!lock_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700265
266 private:
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700267 const bool use_malloc_;
Mathieu Chartier49285c52014-12-02 15:43:48 -0800268 mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700269 Arena* free_arenas_ GUARDED_BY(lock_);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700270 const bool low_4gb_;
Nicolas Geoffray25e04562016-03-01 13:17:58 +0000271 const char* name_;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700272 DISALLOW_COPY_AND_ASSIGN(ArenaPool);
273};
274
Vladimir Markofda04322015-11-11 18:45:50 +0000275// Fast single-threaded allocator for zero-initialized memory chunks.
276//
277// Memory is allocated from ArenaPool in large chunks and then rationed through
278// the ArenaAllocator. It's returned to the ArenaPool only when the ArenaAllocator
279// is destroyed.
Vladimir Marko2a408a32015-09-18 14:11:00 +0100280class ArenaAllocator
281 : private DebugStackRefCounter, private ArenaAllocatorStats, private ArenaAllocatorMemoryTool {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700282 public:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700283 explicit ArenaAllocator(ArenaPool* pool);
284 ~ArenaAllocator();
285
Vladimir Marko2a408a32015-09-18 14:11:00 +0100286 using ArenaAllocatorMemoryTool::IsRunningOnMemoryTool;
287 using ArenaAllocatorMemoryTool::MakeDefined;
288 using ArenaAllocatorMemoryTool::MakeUndefined;
289 using ArenaAllocatorMemoryTool::MakeInaccessible;
290
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100291 // Get adapter for use in STL containers. See arena_containers.h .
292 ArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL);
293
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700294 // Returns zeroed memory.
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000295 void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
Vladimir Marko2a408a32015-09-18 14:11:00 +0100296 if (UNLIKELY(IsRunningOnMemoryTool())) {
297 return AllocWithMemoryTool(bytes, kind);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700298 }
Mathieu Chartierb666f482015-02-18 14:33:14 -0800299 bytes = RoundUp(bytes, kAlignment);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000300 ArenaAllocatorStats::RecordAlloc(bytes, kind);
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100301 if (UNLIKELY(bytes > static_cast<size_t>(end_ - ptr_))) {
302 return AllocFromNewArena(bytes);
303 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700304 uint8_t* ret = ptr_;
Andreas Gampef6dd8292016-08-19 20:22:19 -0700305 DCHECK_ALIGNED(ret, kAlignment);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700306 ptr_ += bytes;
307 return ret;
308 }
309
Vladimir Markof44d36c2017-03-14 14:18:46 +0000310 // Returns zeroed memory.
311 void* AllocAlign16(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
312 // It is an error to request 16-byte aligned allocation of unaligned size.
313 DCHECK_ALIGNED(bytes, 16);
314 if (UNLIKELY(IsRunningOnMemoryTool())) {
315 return AllocWithMemoryToolAlign16(bytes, kind);
316 }
317 uintptr_t padding =
318 ((reinterpret_cast<uintptr_t>(ptr_) + 15u) & 15u) - reinterpret_cast<uintptr_t>(ptr_);
319 ArenaAllocatorStats::RecordAlloc(bytes, kind);
320 if (UNLIKELY(padding + bytes > static_cast<size_t>(end_ - ptr_))) {
321 static_assert(kArenaAlignment >= 16, "Expecting sufficient alignment for new Arena.");
322 return AllocFromNewArena(bytes);
323 }
324 ptr_ += padding;
325 uint8_t* ret = ptr_;
326 DCHECK_ALIGNED(ret, 16);
327 ptr_ += bytes;
328 return ret;
329 }
330
Mathieu Chartiere401d142015-04-22 13:56:20 -0700331 // Realloc never frees the input pointer, it is the caller's job to do this if necessary.
Vladimir Markof44d36c2017-03-14 14:18:46 +0000332 void* Realloc(void* ptr,
333 size_t ptr_size,
334 size_t new_size,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700335 ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
336 DCHECK_GE(new_size, ptr_size);
337 DCHECK_EQ(ptr == nullptr, ptr_size == 0u);
Andreas Gampef6dd8292016-08-19 20:22:19 -0700338 // We always allocate aligned.
339 const size_t aligned_ptr_size = RoundUp(ptr_size, kAlignment);
340 auto* end = reinterpret_cast<uint8_t*>(ptr) + aligned_ptr_size;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700341 // If we haven't allocated anything else, we can safely extend.
342 if (end == ptr_) {
Andreas Gampe421dbc22017-07-11 08:32:29 -0700343 // Red zone prevents end == ptr_ (unless input = allocator state = null).
344 DCHECK(!IsRunningOnMemoryTool() || ptr_ == nullptr);
Andreas Gampef6dd8292016-08-19 20:22:19 -0700345 const size_t aligned_new_size = RoundUp(new_size, kAlignment);
346 const size_t size_delta = aligned_new_size - aligned_ptr_size;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700347 // Check remain space.
348 const size_t remain = end_ - ptr_;
349 if (remain >= size_delta) {
350 ptr_ += size_delta;
351 ArenaAllocatorStats::RecordAlloc(size_delta, kind);
Andreas Gampef6dd8292016-08-19 20:22:19 -0700352 DCHECK_ALIGNED(ptr_, kAlignment);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700353 return ptr;
354 }
355 }
Andreas Gampef6dd8292016-08-19 20:22:19 -0700356 auto* new_ptr = Alloc(new_size, kind); // Note: Alloc will take care of aligning new_size.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700357 memcpy(new_ptr, ptr, ptr_size);
358 // TODO: Call free on ptr if linear alloc supports free.
359 return new_ptr;
360 }
361
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000362 template <typename T>
Vladimir Markof6a35de2016-03-21 12:01:50 +0000363 T* Alloc(ArenaAllocKind kind = kArenaAllocMisc) {
364 return AllocArray<T>(1, kind);
365 }
366
367 template <typename T>
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000368 T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) {
369 return static_cast<T*>(Alloc(length * sizeof(T), kind));
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100370 }
371
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700372 size_t BytesAllocated() const;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700373
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000374 MemStats GetMemStats() const;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700375
Mathieu Chartierc7853442015-03-27 14:35:38 -0700376 // The BytesUsed method sums up bytes allocated from arenas in arena_head_ and nodes.
377 // TODO: Change BytesAllocated to this behavior?
378 size_t BytesUsed() const;
buzbee862a7602013-04-05 10:58:54 -0700379
Mathieu Chartiere401d142015-04-22 13:56:20 -0700380 ArenaPool* GetArenaPool() const {
381 return pool_;
382 }
383
384 bool Contains(const void* ptr) const;
385
Vladimir Markof44d36c2017-03-14 14:18:46 +0000386 // The alignment guaranteed for individual allocations.
387 static constexpr size_t kAlignment = 8u;
388
389 // The alignment required for the whole Arena rather than individual allocations.
390 static constexpr size_t kArenaAlignment = 16u;
Andreas Gampef6dd8292016-08-19 20:22:19 -0700391
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700392 private:
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100393 void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind);
Vladimir Markof44d36c2017-03-14 14:18:46 +0000394 void* AllocWithMemoryToolAlign16(size_t bytes, ArenaAllocKind kind);
Vladimir Marko3f84f2c2016-04-25 19:40:34 +0100395 uint8_t* AllocFromNewArena(size_t bytes);
Vladimir Markof44d36c2017-03-14 14:18:46 +0000396 uint8_t* AllocFromNewArenaWithMemoryTool(size_t bytes);
Mathieu Chartierb666f482015-02-18 14:33:14 -0800397
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700398 void UpdateBytesAllocated();
buzbee862a7602013-04-05 10:58:54 -0700399
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700400 ArenaPool* pool_;
401 uint8_t* begin_;
402 uint8_t* end_;
403 uint8_t* ptr_;
404 Arena* arena_head_;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700405
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100406 template <typename U>
407 friend class ArenaAllocatorAdapter;
408
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100409 friend class ArenaAllocatorTest;
410
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700411 DISALLOW_COPY_AND_ASSIGN(ArenaAllocator);
buzbee862a7602013-04-05 10:58:54 -0700412}; // ArenaAllocator
413
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000414class MemStats {
415 public:
Vladimir Markof44d36c2017-03-14 14:18:46 +0000416 MemStats(const char* name,
417 const ArenaAllocatorStats* stats,
418 const Arena* first_arena,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000419 ssize_t lost_bytes_adjustment = 0);
420 void Dump(std::ostream& os) const;
421
422 private:
423 const char* const name_;
424 const ArenaAllocatorStats* const stats_;
425 const Arena* const first_arena_;
426 const ssize_t lost_bytes_adjustment_;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700427}; // MemStats
buzbee862a7602013-04-05 10:58:54 -0700428
429} // namespace art
430
Mathieu Chartierb666f482015-02-18 14:33:14 -0800431#endif // ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_