blob: 211ff4f6adc5f88cb16ff000d5780f9669bd110b [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
David Sehr1ce2b3b2018-04-05 11:02:03 -070017#ifndef ART_LIBARTBASE_BASE_ARENA_ALLOCATOR_H_
18#define ART_LIBARTBASE_BASE_ARENA_ALLOCATOR_H_
buzbee862a7602013-04-05 10:58:54 -070019
buzbee862a7602013-04-05 10:58:54 -070020#include <stddef.h>
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070021#include <stdint.h>
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070022
David Sehr1979c642018-04-26 14:41:18 -070023#include "bit_utils.h"
24#include "debug_stack.h"
25#include "dchecked_vector.h"
26#include "macros.h"
27#include "memory_tool.h"
buzbee862a7602013-04-05 10:58:54 -070028
29namespace art {
30
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070031class Arena;
32class ArenaPool;
33class ArenaAllocator;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000034class ArenaStack;
35class ScopedArenaAllocator;
36class MemStats;
37
Vladimir Marko8081d2b2014-07-31 15:33:43 +010038template <typename T>
39class ArenaAllocatorAdapter;
40
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000041static constexpr bool kArenaAllocatorCountAllocations = false;
42
43// Type of allocation for memory tuning.
44enum ArenaAllocKind {
45 kArenaAllocMisc,
Vladimir Markoe39c54e2014-09-22 14:50:02 +010046 kArenaAllocSwitchTable,
Vladimir Markoe39c54e2014-09-22 14:50:02 +010047 kArenaAllocSlowPaths,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000048 kArenaAllocGrowableBitMap,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000049 kArenaAllocSTL,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010050 kArenaAllocGraphBuilder,
Vladimir Markof9f64412015-09-02 14:05:49 +010051 kArenaAllocGraph,
52 kArenaAllocBasicBlock,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010053 kArenaAllocBlockList,
54 kArenaAllocReversePostOrder,
55 kArenaAllocLinearOrder,
56 kArenaAllocConstantsMap,
Vladimir Marko60584552015-09-03 13:35:12 +000057 kArenaAllocPredecessors,
58 kArenaAllocSuccessors,
59 kArenaAllocDominated,
Vladimir Markof9f64412015-09-02 14:05:49 +010060 kArenaAllocInstruction,
Igor Murashkind01745e2017-04-05 16:40:31 -070061 kArenaAllocConstructorFenceInputs,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010062 kArenaAllocInvokeInputs,
63 kArenaAllocPhiInputs,
Vladimir Marko175e7862018-03-27 09:03:13 +000064 kArenaAllocTypeCheckInputs,
Vladimir Markof9f64412015-09-02 14:05:49 +010065 kArenaAllocLoopInfo,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010066 kArenaAllocLoopInfoBackEdges,
Vladimir Markof9f64412015-09-02 14:05:49 +010067 kArenaAllocTryCatchInfo,
68 kArenaAllocUseListNode,
69 kArenaAllocEnvironment,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010070 kArenaAllocEnvironmentVRegs,
71 kArenaAllocEnvironmentLocations,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010072 kArenaAllocLocationSummary,
Vladimir Marko71bf8092015-09-15 15:33:14 +010073 kArenaAllocSsaBuilder,
Vladimir Markof9f64412015-09-02 14:05:49 +010074 kArenaAllocMoveOperands,
75 kArenaAllocCodeBuffer,
76 kArenaAllocStackMaps,
Vladimir Markof9f64412015-09-02 14:05:49 +010077 kArenaAllocOptimization,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010078 kArenaAllocGvn,
Vladimir Marko5233f932015-09-29 19:01:15 +010079 kArenaAllocInductionVarAnalysis,
80 kArenaAllocBoundsCheckElimination,
Vladimir Markof6a35de2016-03-21 12:01:50 +000081 kArenaAllocDCE,
Vladimir Marko009d1662017-10-10 13:21:15 +010082 kArenaAllocLSA,
Vladimir Markof6a35de2016-03-21 12:01:50 +000083 kArenaAllocLSE,
Igor Murashkindd018df2017-08-09 10:38:31 -070084 kArenaAllocCFRE,
Vladimir Markof6a35de2016-03-21 12:01:50 +000085 kArenaAllocLICM,
Aart Bik96202302016-10-04 17:33:56 -070086 kArenaAllocLoopOptimization,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010087 kArenaAllocSsaLiveness,
88 kArenaAllocSsaPhiElimination,
89 kArenaAllocReferenceTypePropagation,
Aart Bik6d057002018-04-09 15:39:58 -070090 kArenaAllocSelectGenerator,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010091 kArenaAllocSideEffectsAnalysis,
92 kArenaAllocRegisterAllocator,
Vladimir Markof6a35de2016-03-21 12:01:50 +000093 kArenaAllocRegisterAllocatorValidate,
Vladimir Marko225b6462015-09-28 12:17:40 +010094 kArenaAllocStackMapStream,
Aart Bik92685a82017-03-06 11:13:43 -080095 kArenaAllocVectorNode,
Vladimir Marko225b6462015-09-28 12:17:40 +010096 kArenaAllocCodeGenerator,
Vladimir Marko93205e32016-04-13 11:59:46 +010097 kArenaAllocAssembler,
Vladimir Marko225b6462015-09-28 12:17:40 +010098 kArenaAllocParallelMoveResolver,
Vladimir Marko655e5852015-10-12 10:38:28 +010099 kArenaAllocGraphChecker,
Mathieu Chartierde40d472015-10-15 17:47:48 -0700100 kArenaAllocVerifier,
Vladimir Marko93205e32016-04-13 11:59:46 +0100101 kArenaAllocCallingConvention,
Mingyao Yang063fc772016-08-02 11:02:54 -0700102 kArenaAllocCHA,
Alexandre Rames22aa54b2016-10-18 09:32:29 +0100103 kArenaAllocScheduler,
Calin Juravlecc3171a2017-05-19 16:47:53 -0700104 kArenaAllocProfile,
Artem Serov7f4aff62017-06-21 17:02:18 +0100105 kArenaAllocSuperblockCloner,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000106 kNumArenaAllocKinds
107};
108
109template <bool kCount>
110class ArenaAllocatorStatsImpl;
111
112template <>
113class ArenaAllocatorStatsImpl<false> {
114 public:
115 ArenaAllocatorStatsImpl() = default;
116 ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
117 ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
118
Roland Levillain4b8f1ec2015-08-26 18:34:03 +0100119 void Copy(const ArenaAllocatorStatsImpl& other ATTRIBUTE_UNUSED) {}
120 void RecordAlloc(size_t bytes ATTRIBUTE_UNUSED, ArenaAllocKind kind ATTRIBUTE_UNUSED) {}
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000121 size_t NumAllocations() const { return 0u; }
122 size_t BytesAllocated() const { return 0u; }
Roland Levillain4b8f1ec2015-08-26 18:34:03 +0100123 void Dump(std::ostream& os ATTRIBUTE_UNUSED,
124 const Arena* first ATTRIBUTE_UNUSED,
125 ssize_t lost_bytes_adjustment ATTRIBUTE_UNUSED) const {}
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000126};
127
128template <bool kCount>
129class ArenaAllocatorStatsImpl {
130 public:
131 ArenaAllocatorStatsImpl();
132 ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
133 ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
134
135 void Copy(const ArenaAllocatorStatsImpl& other);
136 void RecordAlloc(size_t bytes, ArenaAllocKind kind);
137 size_t NumAllocations() const;
138 size_t BytesAllocated() const;
139 void Dump(std::ostream& os, const Arena* first, ssize_t lost_bytes_adjustment) const;
140
141 private:
142 size_t num_allocations_;
Vladimir Marko4e335d02016-12-19 16:04:33 +0000143 dchecked_vector<size_t> alloc_stats_; // Bytes used by various allocation kinds.
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000144
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100145 static const char* const kAllocNames[];
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000146};
147
148typedef ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> ArenaAllocatorStats;
buzbee862a7602013-04-05 10:58:54 -0700149
Vladimir Marko2a408a32015-09-18 14:11:00 +0100150template <bool kAvailable, bool kValgrind>
151class ArenaAllocatorMemoryToolCheckImpl {
152 // This is the generic template but since there is a partial specialization
153 // for kValgrind == false, this can be instantiated only for kValgrind == true.
154 static_assert(kValgrind, "This template can be instantiated only for Valgrind.");
155 static_assert(kAvailable, "Valgrind implies memory tool availability.");
156
157 public:
158 ArenaAllocatorMemoryToolCheckImpl() : is_running_on_valgrind_(RUNNING_ON_MEMORY_TOOL) { }
159 bool IsRunningOnMemoryTool() { return is_running_on_valgrind_; }
160
161 private:
162 const bool is_running_on_valgrind_;
163};
164
165template <bool kAvailable>
166class ArenaAllocatorMemoryToolCheckImpl<kAvailable, false> {
167 public:
168 ArenaAllocatorMemoryToolCheckImpl() { }
169 bool IsRunningOnMemoryTool() { return kAvailable; }
170};
171
172typedef ArenaAllocatorMemoryToolCheckImpl<kMemoryToolIsAvailable, kMemoryToolIsValgrind>
173 ArenaAllocatorMemoryToolCheck;
174
175class ArenaAllocatorMemoryTool : private ArenaAllocatorMemoryToolCheck {
176 public:
177 using ArenaAllocatorMemoryToolCheck::IsRunningOnMemoryTool;
178
179 void MakeDefined(void* ptr, size_t size) {
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000180 if (UNLIKELY(IsRunningOnMemoryTool())) {
181 DoMakeDefined(ptr, size);
Vladimir Marko2a408a32015-09-18 14:11:00 +0100182 }
183 }
184 void MakeUndefined(void* ptr, size_t size) {
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000185 if (UNLIKELY(IsRunningOnMemoryTool())) {
186 DoMakeUndefined(ptr, size);
Vladimir Marko2a408a32015-09-18 14:11:00 +0100187 }
188 }
189 void MakeInaccessible(void* ptr, size_t size) {
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000190 if (UNLIKELY(IsRunningOnMemoryTool())) {
191 DoMakeInaccessible(ptr, size);
Vladimir Marko2a408a32015-09-18 14:11:00 +0100192 }
193 }
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000194
195 private:
196 void DoMakeDefined(void* ptr, size_t size);
197 void DoMakeUndefined(void* ptr, size_t size);
198 void DoMakeInaccessible(void* ptr, size_t size);
Vladimir Marko2a408a32015-09-18 14:11:00 +0100199};
200
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700201class Arena {
202 public:
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700203 Arena();
204 virtual ~Arena() { }
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700205 // Reset is for pre-use and uses memset for performance.
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700206 void Reset();
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700207 // Release is used inbetween uses and uses madvise for memory usage.
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700208 virtual void Release() { }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700209 uint8_t* Begin() {
210 return memory_;
buzbee862a7602013-04-05 10:58:54 -0700211 }
212
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700213 uint8_t* End() {
214 return memory_ + size_;
215 }
216
217 size_t Size() const {
218 return size_;
219 }
220
221 size_t RemainingSpace() const {
222 return Size() - bytes_allocated_;
223 }
224
Mathieu Chartier49285c52014-12-02 15:43:48 -0800225 size_t GetBytesAllocated() const {
226 return bytes_allocated_;
227 }
228
Mathieu Chartiere401d142015-04-22 13:56:20 -0700229 // Return true if ptr is contained in the arena.
230 bool Contains(const void* ptr) const {
231 return memory_ <= ptr && ptr < memory_ + bytes_allocated_;
232 }
233
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700234 protected:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700235 size_t bytes_allocated_;
236 uint8_t* memory_;
237 size_t size_;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700238 Arena* next_;
David Sehr3215fff2018-04-03 17:10:12 -0700239 friend class MallocArenaPool;
240 friend class MemMapArenaPool;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700241 friend class ArenaAllocator;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000242 friend class ArenaStack;
243 friend class ScopedArenaAllocator;
244 template <bool kCount> friend class ArenaAllocatorStatsImpl;
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700245
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100246 friend class ArenaAllocatorTest;
247
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700248 private:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700249 DISALLOW_COPY_AND_ASSIGN(Arena);
250};
251
252class ArenaPool {
253 public:
David Sehr3215fff2018-04-03 17:10:12 -0700254 virtual ~ArenaPool() = default;
255
256 virtual Arena* AllocArena(size_t size) = 0;
257 virtual void FreeArenaChain(Arena* first) = 0;
258 virtual size_t GetBytesAllocated() const = 0;
259 virtual void ReclaimMemory() = 0;
260 virtual void LockReclaimMemory() = 0;
261 // Trim the maps in arenas by madvising, used by JIT to reduce memory usage.
262 virtual void TrimMaps() = 0;
263
264 protected:
265 ArenaPool() = default;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700266
267 private:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700268 DISALLOW_COPY_AND_ASSIGN(ArenaPool);
269};
270
Vladimir Markofda04322015-11-11 18:45:50 +0000271// Fast single-threaded allocator for zero-initialized memory chunks.
272//
273// Memory is allocated from ArenaPool in large chunks and then rationed through
274// the ArenaAllocator. It's returned to the ArenaPool only when the ArenaAllocator
275// is destroyed.
Vladimir Marko2a408a32015-09-18 14:11:00 +0100276class ArenaAllocator
277 : private DebugStackRefCounter, private ArenaAllocatorStats, private ArenaAllocatorMemoryTool {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700278 public:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700279 explicit ArenaAllocator(ArenaPool* pool);
280 ~ArenaAllocator();
281
Vladimir Marko2a408a32015-09-18 14:11:00 +0100282 using ArenaAllocatorMemoryTool::IsRunningOnMemoryTool;
283 using ArenaAllocatorMemoryTool::MakeDefined;
284 using ArenaAllocatorMemoryTool::MakeUndefined;
285 using ArenaAllocatorMemoryTool::MakeInaccessible;
286
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100287 // Get adapter for use in STL containers. See arena_containers.h .
288 ArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL);
289
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700290 // Returns zeroed memory.
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000291 void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
Vladimir Marko2a408a32015-09-18 14:11:00 +0100292 if (UNLIKELY(IsRunningOnMemoryTool())) {
293 return AllocWithMemoryTool(bytes, kind);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700294 }
Mathieu Chartierb666f482015-02-18 14:33:14 -0800295 bytes = RoundUp(bytes, kAlignment);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000296 ArenaAllocatorStats::RecordAlloc(bytes, kind);
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100297 if (UNLIKELY(bytes > static_cast<size_t>(end_ - ptr_))) {
298 return AllocFromNewArena(bytes);
299 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700300 uint8_t* ret = ptr_;
Andreas Gampef6dd8292016-08-19 20:22:19 -0700301 DCHECK_ALIGNED(ret, kAlignment);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700302 ptr_ += bytes;
303 return ret;
304 }
305
Vladimir Markof44d36c2017-03-14 14:18:46 +0000306 // Returns zeroed memory.
307 void* AllocAlign16(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
308 // It is an error to request 16-byte aligned allocation of unaligned size.
309 DCHECK_ALIGNED(bytes, 16);
310 if (UNLIKELY(IsRunningOnMemoryTool())) {
311 return AllocWithMemoryToolAlign16(bytes, kind);
312 }
313 uintptr_t padding =
314 ((reinterpret_cast<uintptr_t>(ptr_) + 15u) & 15u) - reinterpret_cast<uintptr_t>(ptr_);
315 ArenaAllocatorStats::RecordAlloc(bytes, kind);
316 if (UNLIKELY(padding + bytes > static_cast<size_t>(end_ - ptr_))) {
317 static_assert(kArenaAlignment >= 16, "Expecting sufficient alignment for new Arena.");
318 return AllocFromNewArena(bytes);
319 }
320 ptr_ += padding;
321 uint8_t* ret = ptr_;
322 DCHECK_ALIGNED(ret, 16);
323 ptr_ += bytes;
324 return ret;
325 }
326
Mathieu Chartiere401d142015-04-22 13:56:20 -0700327 // Realloc never frees the input pointer, it is the caller's job to do this if necessary.
Vladimir Markof44d36c2017-03-14 14:18:46 +0000328 void* Realloc(void* ptr,
329 size_t ptr_size,
330 size_t new_size,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700331 ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
332 DCHECK_GE(new_size, ptr_size);
333 DCHECK_EQ(ptr == nullptr, ptr_size == 0u);
Andreas Gampef6dd8292016-08-19 20:22:19 -0700334 // We always allocate aligned.
335 const size_t aligned_ptr_size = RoundUp(ptr_size, kAlignment);
336 auto* end = reinterpret_cast<uint8_t*>(ptr) + aligned_ptr_size;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700337 // If we haven't allocated anything else, we can safely extend.
338 if (end == ptr_) {
Andreas Gampe421dbc22017-07-11 08:32:29 -0700339 // Red zone prevents end == ptr_ (unless input = allocator state = null).
340 DCHECK(!IsRunningOnMemoryTool() || ptr_ == nullptr);
Andreas Gampef6dd8292016-08-19 20:22:19 -0700341 const size_t aligned_new_size = RoundUp(new_size, kAlignment);
342 const size_t size_delta = aligned_new_size - aligned_ptr_size;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700343 // Check remain space.
344 const size_t remain = end_ - ptr_;
345 if (remain >= size_delta) {
346 ptr_ += size_delta;
347 ArenaAllocatorStats::RecordAlloc(size_delta, kind);
Andreas Gampef6dd8292016-08-19 20:22:19 -0700348 DCHECK_ALIGNED(ptr_, kAlignment);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700349 return ptr;
350 }
351 }
Andreas Gampef6dd8292016-08-19 20:22:19 -0700352 auto* new_ptr = Alloc(new_size, kind); // Note: Alloc will take care of aligning new_size.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700353 memcpy(new_ptr, ptr, ptr_size);
354 // TODO: Call free on ptr if linear alloc supports free.
355 return new_ptr;
356 }
357
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000358 template <typename T>
Vladimir Markof6a35de2016-03-21 12:01:50 +0000359 T* Alloc(ArenaAllocKind kind = kArenaAllocMisc) {
360 return AllocArray<T>(1, kind);
361 }
362
363 template <typename T>
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000364 T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) {
365 return static_cast<T*>(Alloc(length * sizeof(T), kind));
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100366 }
367
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700368 size_t BytesAllocated() const;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700369
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000370 MemStats GetMemStats() const;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700371
Mathieu Chartierc7853442015-03-27 14:35:38 -0700372 // The BytesUsed method sums up bytes allocated from arenas in arena_head_ and nodes.
373 // TODO: Change BytesAllocated to this behavior?
374 size_t BytesUsed() const;
buzbee862a7602013-04-05 10:58:54 -0700375
Mathieu Chartiere401d142015-04-22 13:56:20 -0700376 ArenaPool* GetArenaPool() const {
377 return pool_;
378 }
379
380 bool Contains(const void* ptr) const;
381
Vladimir Markof44d36c2017-03-14 14:18:46 +0000382 // The alignment guaranteed for individual allocations.
383 static constexpr size_t kAlignment = 8u;
384
385 // The alignment required for the whole Arena rather than individual allocations.
386 static constexpr size_t kArenaAlignment = 16u;
Andreas Gampef6dd8292016-08-19 20:22:19 -0700387
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700388 private:
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100389 void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind);
Vladimir Markof44d36c2017-03-14 14:18:46 +0000390 void* AllocWithMemoryToolAlign16(size_t bytes, ArenaAllocKind kind);
Vladimir Marko3f84f2c2016-04-25 19:40:34 +0100391 uint8_t* AllocFromNewArena(size_t bytes);
Vladimir Markof44d36c2017-03-14 14:18:46 +0000392 uint8_t* AllocFromNewArenaWithMemoryTool(size_t bytes);
Mathieu Chartierb666f482015-02-18 14:33:14 -0800393
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700394 void UpdateBytesAllocated();
buzbee862a7602013-04-05 10:58:54 -0700395
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700396 ArenaPool* pool_;
397 uint8_t* begin_;
398 uint8_t* end_;
399 uint8_t* ptr_;
400 Arena* arena_head_;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700401
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100402 template <typename U>
403 friend class ArenaAllocatorAdapter;
404
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100405 friend class ArenaAllocatorTest;
406
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700407 DISALLOW_COPY_AND_ASSIGN(ArenaAllocator);
buzbee862a7602013-04-05 10:58:54 -0700408}; // ArenaAllocator
409
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000410class MemStats {
411 public:
Vladimir Markof44d36c2017-03-14 14:18:46 +0000412 MemStats(const char* name,
413 const ArenaAllocatorStats* stats,
414 const Arena* first_arena,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000415 ssize_t lost_bytes_adjustment = 0);
416 void Dump(std::ostream& os) const;
417
418 private:
419 const char* const name_;
420 const ArenaAllocatorStats* const stats_;
421 const Arena* const first_arena_;
422 const ssize_t lost_bytes_adjustment_;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700423}; // MemStats
buzbee862a7602013-04-05 10:58:54 -0700424
425} // namespace art
426
David Sehr1ce2b3b2018-04-05 11:02:03 -0700427#endif // ART_LIBARTBASE_BASE_ARENA_ALLOCATOR_H_