blob: a9ccae1b07369912d280d51d71090029bbbad658 [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
David Sehr1ce2b3b2018-04-05 11:02:03 -070017#ifndef ART_LIBARTBASE_BASE_ARENA_ALLOCATOR_H_
18#define ART_LIBARTBASE_BASE_ARENA_ALLOCATOR_H_
buzbee862a7602013-04-05 10:58:54 -070019
buzbee862a7602013-04-05 10:58:54 -070020#include <stddef.h>
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070021#include <stdint.h>
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070022
David Sehr1979c642018-04-26 14:41:18 -070023#include "bit_utils.h"
24#include "debug_stack.h"
25#include "dchecked_vector.h"
26#include "macros.h"
27#include "memory_tool.h"
buzbee862a7602013-04-05 10:58:54 -070028
29namespace art {
30
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070031class Arena;
32class ArenaPool;
33class ArenaAllocator;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000034class ArenaStack;
35class ScopedArenaAllocator;
36class MemStats;
37
Vladimir Marko8081d2b2014-07-31 15:33:43 +010038template <typename T>
39class ArenaAllocatorAdapter;
40
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000041static constexpr bool kArenaAllocatorCountAllocations = false;
42
43// Type of allocation for memory tuning.
44enum ArenaAllocKind {
45 kArenaAllocMisc,
Vladimir Markoe39c54e2014-09-22 14:50:02 +010046 kArenaAllocSwitchTable,
Vladimir Markoe39c54e2014-09-22 14:50:02 +010047 kArenaAllocSlowPaths,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000048 kArenaAllocGrowableBitMap,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000049 kArenaAllocSTL,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010050 kArenaAllocGraphBuilder,
Vladimir Markof9f64412015-09-02 14:05:49 +010051 kArenaAllocGraph,
52 kArenaAllocBasicBlock,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010053 kArenaAllocBlockList,
54 kArenaAllocReversePostOrder,
55 kArenaAllocLinearOrder,
56 kArenaAllocConstantsMap,
Vladimir Marko60584552015-09-03 13:35:12 +000057 kArenaAllocPredecessors,
58 kArenaAllocSuccessors,
59 kArenaAllocDominated,
Vladimir Markof9f64412015-09-02 14:05:49 +010060 kArenaAllocInstruction,
Igor Murashkind01745e2017-04-05 16:40:31 -070061 kArenaAllocConstructorFenceInputs,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010062 kArenaAllocInvokeInputs,
63 kArenaAllocPhiInputs,
Vladimir Marko175e7862018-03-27 09:03:13 +000064 kArenaAllocTypeCheckInputs,
Vladimir Markof9f64412015-09-02 14:05:49 +010065 kArenaAllocLoopInfo,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010066 kArenaAllocLoopInfoBackEdges,
Vladimir Markof9f64412015-09-02 14:05:49 +010067 kArenaAllocTryCatchInfo,
68 kArenaAllocUseListNode,
69 kArenaAllocEnvironment,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010070 kArenaAllocEnvironmentVRegs,
71 kArenaAllocEnvironmentLocations,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010072 kArenaAllocLocationSummary,
Vladimir Marko71bf8092015-09-15 15:33:14 +010073 kArenaAllocSsaBuilder,
Vladimir Markof9f64412015-09-02 14:05:49 +010074 kArenaAllocMoveOperands,
75 kArenaAllocCodeBuffer,
76 kArenaAllocStackMaps,
Vladimir Markof9f64412015-09-02 14:05:49 +010077 kArenaAllocOptimization,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010078 kArenaAllocGvn,
Vladimir Marko5233f932015-09-29 19:01:15 +010079 kArenaAllocInductionVarAnalysis,
80 kArenaAllocBoundsCheckElimination,
Vladimir Markof6a35de2016-03-21 12:01:50 +000081 kArenaAllocDCE,
Vladimir Marko009d1662017-10-10 13:21:15 +010082 kArenaAllocLSA,
Vladimir Markof6a35de2016-03-21 12:01:50 +000083 kArenaAllocLSE,
Igor Murashkindd018df2017-08-09 10:38:31 -070084 kArenaAllocCFRE,
Vladimir Markof6a35de2016-03-21 12:01:50 +000085 kArenaAllocLICM,
Aart Bik96202302016-10-04 17:33:56 -070086 kArenaAllocLoopOptimization,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010087 kArenaAllocSsaLiveness,
88 kArenaAllocSsaPhiElimination,
89 kArenaAllocReferenceTypePropagation,
Aart Bik6d057002018-04-09 15:39:58 -070090 kArenaAllocSelectGenerator,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010091 kArenaAllocSideEffectsAnalysis,
92 kArenaAllocRegisterAllocator,
Vladimir Markof6a35de2016-03-21 12:01:50 +000093 kArenaAllocRegisterAllocatorValidate,
Vladimir Marko225b6462015-09-28 12:17:40 +010094 kArenaAllocStackMapStream,
David Srbeckydd966bc2018-05-24 13:55:52 +010095 kArenaAllocBitTableBuilder,
Aart Bik92685a82017-03-06 11:13:43 -080096 kArenaAllocVectorNode,
Vladimir Marko225b6462015-09-28 12:17:40 +010097 kArenaAllocCodeGenerator,
Vladimir Marko93205e32016-04-13 11:59:46 +010098 kArenaAllocAssembler,
Vladimir Marko225b6462015-09-28 12:17:40 +010099 kArenaAllocParallelMoveResolver,
Vladimir Marko655e5852015-10-12 10:38:28 +0100100 kArenaAllocGraphChecker,
Mathieu Chartierde40d472015-10-15 17:47:48 -0700101 kArenaAllocVerifier,
Vladimir Marko93205e32016-04-13 11:59:46 +0100102 kArenaAllocCallingConvention,
Mingyao Yang063fc772016-08-02 11:02:54 -0700103 kArenaAllocCHA,
Alexandre Rames22aa54b2016-10-18 09:32:29 +0100104 kArenaAllocScheduler,
Calin Juravlecc3171a2017-05-19 16:47:53 -0700105 kArenaAllocProfile,
Artem Serov7f4aff62017-06-21 17:02:18 +0100106 kArenaAllocSuperblockCloner,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000107 kNumArenaAllocKinds
108};
109
110template <bool kCount>
111class ArenaAllocatorStatsImpl;
112
113template <>
114class ArenaAllocatorStatsImpl<false> {
115 public:
116 ArenaAllocatorStatsImpl() = default;
117 ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
118 ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
119
Roland Levillain4b8f1ec2015-08-26 18:34:03 +0100120 void Copy(const ArenaAllocatorStatsImpl& other ATTRIBUTE_UNUSED) {}
121 void RecordAlloc(size_t bytes ATTRIBUTE_UNUSED, ArenaAllocKind kind ATTRIBUTE_UNUSED) {}
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000122 size_t NumAllocations() const { return 0u; }
123 size_t BytesAllocated() const { return 0u; }
Roland Levillain4b8f1ec2015-08-26 18:34:03 +0100124 void Dump(std::ostream& os ATTRIBUTE_UNUSED,
125 const Arena* first ATTRIBUTE_UNUSED,
126 ssize_t lost_bytes_adjustment ATTRIBUTE_UNUSED) const {}
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000127};
128
129template <bool kCount>
130class ArenaAllocatorStatsImpl {
131 public:
132 ArenaAllocatorStatsImpl();
133 ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
134 ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
135
136 void Copy(const ArenaAllocatorStatsImpl& other);
137 void RecordAlloc(size_t bytes, ArenaAllocKind kind);
138 size_t NumAllocations() const;
139 size_t BytesAllocated() const;
140 void Dump(std::ostream& os, const Arena* first, ssize_t lost_bytes_adjustment) const;
141
142 private:
143 size_t num_allocations_;
Vladimir Marko4e335d02016-12-19 16:04:33 +0000144 dchecked_vector<size_t> alloc_stats_; // Bytes used by various allocation kinds.
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000145
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100146 static const char* const kAllocNames[];
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000147};
148
149typedef ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> ArenaAllocatorStats;
buzbee862a7602013-04-05 10:58:54 -0700150
Roland Levillain05e34f42018-05-24 13:19:05 +0000151class ArenaAllocatorMemoryTool {
Vladimir Marko2a408a32015-09-18 14:11:00 +0100152 public:
Roland Levillain05e34f42018-05-24 13:19:05 +0000153 bool IsRunningOnMemoryTool() { return kMemoryToolIsAvailable; }
Vladimir Marko2a408a32015-09-18 14:11:00 +0100154
155 void MakeDefined(void* ptr, size_t size) {
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000156 if (UNLIKELY(IsRunningOnMemoryTool())) {
157 DoMakeDefined(ptr, size);
Vladimir Marko2a408a32015-09-18 14:11:00 +0100158 }
159 }
160 void MakeUndefined(void* ptr, size_t size) {
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000161 if (UNLIKELY(IsRunningOnMemoryTool())) {
162 DoMakeUndefined(ptr, size);
Vladimir Marko2a408a32015-09-18 14:11:00 +0100163 }
164 }
165 void MakeInaccessible(void* ptr, size_t size) {
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000166 if (UNLIKELY(IsRunningOnMemoryTool())) {
167 DoMakeInaccessible(ptr, size);
Vladimir Marko2a408a32015-09-18 14:11:00 +0100168 }
169 }
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000170
171 private:
172 void DoMakeDefined(void* ptr, size_t size);
173 void DoMakeUndefined(void* ptr, size_t size);
174 void DoMakeInaccessible(void* ptr, size_t size);
Vladimir Marko2a408a32015-09-18 14:11:00 +0100175};
176
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700177class Arena {
178 public:
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700179 Arena();
180 virtual ~Arena() { }
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700181 // Reset is for pre-use and uses memset for performance.
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700182 void Reset();
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700183 // Release is used inbetween uses and uses madvise for memory usage.
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700184 virtual void Release() { }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700185 uint8_t* Begin() {
186 return memory_;
buzbee862a7602013-04-05 10:58:54 -0700187 }
188
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700189 uint8_t* End() {
190 return memory_ + size_;
191 }
192
193 size_t Size() const {
194 return size_;
195 }
196
197 size_t RemainingSpace() const {
198 return Size() - bytes_allocated_;
199 }
200
Mathieu Chartier49285c52014-12-02 15:43:48 -0800201 size_t GetBytesAllocated() const {
202 return bytes_allocated_;
203 }
204
Mathieu Chartiere401d142015-04-22 13:56:20 -0700205 // Return true if ptr is contained in the arena.
206 bool Contains(const void* ptr) const {
207 return memory_ <= ptr && ptr < memory_ + bytes_allocated_;
208 }
209
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700210 protected:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700211 size_t bytes_allocated_;
212 uint8_t* memory_;
213 size_t size_;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700214 Arena* next_;
David Sehr3215fff2018-04-03 17:10:12 -0700215 friend class MallocArenaPool;
216 friend class MemMapArenaPool;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700217 friend class ArenaAllocator;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000218 friend class ArenaStack;
219 friend class ScopedArenaAllocator;
220 template <bool kCount> friend class ArenaAllocatorStatsImpl;
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700221
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100222 friend class ArenaAllocatorTest;
223
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700224 private:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700225 DISALLOW_COPY_AND_ASSIGN(Arena);
226};
227
228class ArenaPool {
229 public:
David Sehr3215fff2018-04-03 17:10:12 -0700230 virtual ~ArenaPool() = default;
231
232 virtual Arena* AllocArena(size_t size) = 0;
233 virtual void FreeArenaChain(Arena* first) = 0;
234 virtual size_t GetBytesAllocated() const = 0;
235 virtual void ReclaimMemory() = 0;
236 virtual void LockReclaimMemory() = 0;
237 // Trim the maps in arenas by madvising, used by JIT to reduce memory usage.
238 virtual void TrimMaps() = 0;
239
240 protected:
241 ArenaPool() = default;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700242
243 private:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700244 DISALLOW_COPY_AND_ASSIGN(ArenaPool);
245};
246
Vladimir Markofda04322015-11-11 18:45:50 +0000247// Fast single-threaded allocator for zero-initialized memory chunks.
248//
249// Memory is allocated from ArenaPool in large chunks and then rationed through
250// the ArenaAllocator. It's returned to the ArenaPool only when the ArenaAllocator
251// is destroyed.
Vladimir Marko2a408a32015-09-18 14:11:00 +0100252class ArenaAllocator
253 : private DebugStackRefCounter, private ArenaAllocatorStats, private ArenaAllocatorMemoryTool {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700254 public:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700255 explicit ArenaAllocator(ArenaPool* pool);
256 ~ArenaAllocator();
257
Vladimir Marko2a408a32015-09-18 14:11:00 +0100258 using ArenaAllocatorMemoryTool::IsRunningOnMemoryTool;
259 using ArenaAllocatorMemoryTool::MakeDefined;
260 using ArenaAllocatorMemoryTool::MakeUndefined;
261 using ArenaAllocatorMemoryTool::MakeInaccessible;
262
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100263 // Get adapter for use in STL containers. See arena_containers.h .
264 ArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL);
265
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700266 // Returns zeroed memory.
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000267 void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
Vladimir Marko2a408a32015-09-18 14:11:00 +0100268 if (UNLIKELY(IsRunningOnMemoryTool())) {
269 return AllocWithMemoryTool(bytes, kind);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700270 }
Mathieu Chartierb666f482015-02-18 14:33:14 -0800271 bytes = RoundUp(bytes, kAlignment);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000272 ArenaAllocatorStats::RecordAlloc(bytes, kind);
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100273 if (UNLIKELY(bytes > static_cast<size_t>(end_ - ptr_))) {
274 return AllocFromNewArena(bytes);
275 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700276 uint8_t* ret = ptr_;
Andreas Gampef6dd8292016-08-19 20:22:19 -0700277 DCHECK_ALIGNED(ret, kAlignment);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700278 ptr_ += bytes;
279 return ret;
280 }
281
Vladimir Markof44d36c2017-03-14 14:18:46 +0000282 // Returns zeroed memory.
283 void* AllocAlign16(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
284 // It is an error to request 16-byte aligned allocation of unaligned size.
285 DCHECK_ALIGNED(bytes, 16);
286 if (UNLIKELY(IsRunningOnMemoryTool())) {
287 return AllocWithMemoryToolAlign16(bytes, kind);
288 }
289 uintptr_t padding =
290 ((reinterpret_cast<uintptr_t>(ptr_) + 15u) & 15u) - reinterpret_cast<uintptr_t>(ptr_);
291 ArenaAllocatorStats::RecordAlloc(bytes, kind);
292 if (UNLIKELY(padding + bytes > static_cast<size_t>(end_ - ptr_))) {
293 static_assert(kArenaAlignment >= 16, "Expecting sufficient alignment for new Arena.");
294 return AllocFromNewArena(bytes);
295 }
296 ptr_ += padding;
297 uint8_t* ret = ptr_;
298 DCHECK_ALIGNED(ret, 16);
299 ptr_ += bytes;
300 return ret;
301 }
302
Mathieu Chartiere401d142015-04-22 13:56:20 -0700303 // Realloc never frees the input pointer, it is the caller's job to do this if necessary.
Vladimir Markof44d36c2017-03-14 14:18:46 +0000304 void* Realloc(void* ptr,
305 size_t ptr_size,
306 size_t new_size,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700307 ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
308 DCHECK_GE(new_size, ptr_size);
309 DCHECK_EQ(ptr == nullptr, ptr_size == 0u);
Andreas Gampef6dd8292016-08-19 20:22:19 -0700310 // We always allocate aligned.
311 const size_t aligned_ptr_size = RoundUp(ptr_size, kAlignment);
312 auto* end = reinterpret_cast<uint8_t*>(ptr) + aligned_ptr_size;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700313 // If we haven't allocated anything else, we can safely extend.
314 if (end == ptr_) {
Andreas Gampe421dbc22017-07-11 08:32:29 -0700315 // Red zone prevents end == ptr_ (unless input = allocator state = null).
316 DCHECK(!IsRunningOnMemoryTool() || ptr_ == nullptr);
Andreas Gampef6dd8292016-08-19 20:22:19 -0700317 const size_t aligned_new_size = RoundUp(new_size, kAlignment);
318 const size_t size_delta = aligned_new_size - aligned_ptr_size;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700319 // Check remain space.
320 const size_t remain = end_ - ptr_;
321 if (remain >= size_delta) {
322 ptr_ += size_delta;
323 ArenaAllocatorStats::RecordAlloc(size_delta, kind);
Andreas Gampef6dd8292016-08-19 20:22:19 -0700324 DCHECK_ALIGNED(ptr_, kAlignment);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700325 return ptr;
326 }
327 }
Andreas Gampef6dd8292016-08-19 20:22:19 -0700328 auto* new_ptr = Alloc(new_size, kind); // Note: Alloc will take care of aligning new_size.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700329 memcpy(new_ptr, ptr, ptr_size);
330 // TODO: Call free on ptr if linear alloc supports free.
331 return new_ptr;
332 }
333
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000334 template <typename T>
Vladimir Markof6a35de2016-03-21 12:01:50 +0000335 T* Alloc(ArenaAllocKind kind = kArenaAllocMisc) {
336 return AllocArray<T>(1, kind);
337 }
338
339 template <typename T>
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000340 T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) {
341 return static_cast<T*>(Alloc(length * sizeof(T), kind));
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100342 }
343
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700344 size_t BytesAllocated() const;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700345
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000346 MemStats GetMemStats() const;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700347
Mathieu Chartierc7853442015-03-27 14:35:38 -0700348 // The BytesUsed method sums up bytes allocated from arenas in arena_head_ and nodes.
349 // TODO: Change BytesAllocated to this behavior?
350 size_t BytesUsed() const;
buzbee862a7602013-04-05 10:58:54 -0700351
Mathieu Chartiere401d142015-04-22 13:56:20 -0700352 ArenaPool* GetArenaPool() const {
353 return pool_;
354 }
355
356 bool Contains(const void* ptr) const;
357
Vladimir Markof44d36c2017-03-14 14:18:46 +0000358 // The alignment guaranteed for individual allocations.
359 static constexpr size_t kAlignment = 8u;
360
361 // The alignment required for the whole Arena rather than individual allocations.
362 static constexpr size_t kArenaAlignment = 16u;
Andreas Gampef6dd8292016-08-19 20:22:19 -0700363
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700364 private:
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100365 void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind);
Vladimir Markof44d36c2017-03-14 14:18:46 +0000366 void* AllocWithMemoryToolAlign16(size_t bytes, ArenaAllocKind kind);
Vladimir Marko3f84f2c2016-04-25 19:40:34 +0100367 uint8_t* AllocFromNewArena(size_t bytes);
Vladimir Markof44d36c2017-03-14 14:18:46 +0000368 uint8_t* AllocFromNewArenaWithMemoryTool(size_t bytes);
Mathieu Chartierb666f482015-02-18 14:33:14 -0800369
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700370 void UpdateBytesAllocated();
buzbee862a7602013-04-05 10:58:54 -0700371
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700372 ArenaPool* pool_;
373 uint8_t* begin_;
374 uint8_t* end_;
375 uint8_t* ptr_;
376 Arena* arena_head_;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700377
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100378 template <typename U>
379 friend class ArenaAllocatorAdapter;
380
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100381 friend class ArenaAllocatorTest;
382
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700383 DISALLOW_COPY_AND_ASSIGN(ArenaAllocator);
buzbee862a7602013-04-05 10:58:54 -0700384}; // ArenaAllocator
385
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000386class MemStats {
387 public:
Vladimir Markof44d36c2017-03-14 14:18:46 +0000388 MemStats(const char* name,
389 const ArenaAllocatorStats* stats,
390 const Arena* first_arena,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000391 ssize_t lost_bytes_adjustment = 0);
392 void Dump(std::ostream& os) const;
393
394 private:
395 const char* const name_;
396 const ArenaAllocatorStats* const stats_;
397 const Arena* const first_arena_;
398 const ssize_t lost_bytes_adjustment_;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700399}; // MemStats
buzbee862a7602013-04-05 10:58:54 -0700400
401} // namespace art
402
David Sehr1ce2b3b2018-04-05 11:02:03 -0700403#endif // ART_LIBARTBASE_BASE_ARENA_ALLOCATOR_H_