blob: f92fbea15df28b9acf161ee84a5971a584b19cd4 [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Mathieu Chartierb666f482015-02-18 14:33:14 -080017#ifndef ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
18#define ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
buzbee862a7602013-04-05 10:58:54 -070019
20#include <stdint.h>
21#include <stddef.h>
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070022
Vladimir Marko80afd022015-05-19 18:08:00 +010023#include "base/bit_utils.h"
Vladimir Marko4e335d02016-12-19 16:04:33 +000024#include "base/dchecked_vector.h"
Vladimir Marko2a408a32015-09-18 14:11:00 +010025#include "base/memory_tool.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080026#include "debug_stack.h"
27#include "macros.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080028#include "mutex.h"
buzbee862a7602013-04-05 10:58:54 -070029
30namespace art {
31
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070032class Arena;
33class ArenaPool;
34class ArenaAllocator;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000035class ArenaStack;
36class ScopedArenaAllocator;
Vladimir Marko6374c582017-03-13 14:51:19 +000037class MemMap;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000038class MemStats;
39
Vladimir Marko8081d2b2014-07-31 15:33:43 +010040template <typename T>
41class ArenaAllocatorAdapter;
42
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000043static constexpr bool kArenaAllocatorCountAllocations = false;
44
45// Type of allocation for memory tuning.
46enum ArenaAllocKind {
47 kArenaAllocMisc,
Vladimir Markoe39c54e2014-09-22 14:50:02 +010048 kArenaAllocSwitchTable,
Vladimir Markoe39c54e2014-09-22 14:50:02 +010049 kArenaAllocSlowPaths,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000050 kArenaAllocGrowableBitMap,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000051 kArenaAllocSTL,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010052 kArenaAllocGraphBuilder,
Vladimir Markof9f64412015-09-02 14:05:49 +010053 kArenaAllocGraph,
54 kArenaAllocBasicBlock,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010055 kArenaAllocBlockList,
56 kArenaAllocReversePostOrder,
57 kArenaAllocLinearOrder,
58 kArenaAllocConstantsMap,
Vladimir Marko60584552015-09-03 13:35:12 +000059 kArenaAllocPredecessors,
60 kArenaAllocSuccessors,
61 kArenaAllocDominated,
Vladimir Markof9f64412015-09-02 14:05:49 +010062 kArenaAllocInstruction,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010063 kArenaAllocInvokeInputs,
64 kArenaAllocPhiInputs,
Vladimir Markof9f64412015-09-02 14:05:49 +010065 kArenaAllocLoopInfo,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010066 kArenaAllocLoopInfoBackEdges,
Vladimir Markof9f64412015-09-02 14:05:49 +010067 kArenaAllocTryCatchInfo,
68 kArenaAllocUseListNode,
69 kArenaAllocEnvironment,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010070 kArenaAllocEnvironmentVRegs,
71 kArenaAllocEnvironmentLocations,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010072 kArenaAllocLocationSummary,
Vladimir Marko71bf8092015-09-15 15:33:14 +010073 kArenaAllocSsaBuilder,
Vladimir Markof9f64412015-09-02 14:05:49 +010074 kArenaAllocMoveOperands,
75 kArenaAllocCodeBuffer,
76 kArenaAllocStackMaps,
Vladimir Markof9f64412015-09-02 14:05:49 +010077 kArenaAllocOptimization,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010078 kArenaAllocGvn,
Vladimir Marko5233f932015-09-29 19:01:15 +010079 kArenaAllocInductionVarAnalysis,
80 kArenaAllocBoundsCheckElimination,
Vladimir Markof6a35de2016-03-21 12:01:50 +000081 kArenaAllocDCE,
82 kArenaAllocLSE,
83 kArenaAllocLICM,
Aart Bik96202302016-10-04 17:33:56 -070084 kArenaAllocLoopOptimization,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010085 kArenaAllocSsaLiveness,
86 kArenaAllocSsaPhiElimination,
87 kArenaAllocReferenceTypePropagation,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010088 kArenaAllocSideEffectsAnalysis,
89 kArenaAllocRegisterAllocator,
Vladimir Markof6a35de2016-03-21 12:01:50 +000090 kArenaAllocRegisterAllocatorValidate,
Vladimir Marko225b6462015-09-28 12:17:40 +010091 kArenaAllocStackMapStream,
Aart Bik92685a82017-03-06 11:13:43 -080092 kArenaAllocVectorNode,
Vladimir Marko225b6462015-09-28 12:17:40 +010093 kArenaAllocCodeGenerator,
Vladimir Marko93205e32016-04-13 11:59:46 +010094 kArenaAllocAssembler,
Vladimir Marko225b6462015-09-28 12:17:40 +010095 kArenaAllocParallelMoveResolver,
Vladimir Marko655e5852015-10-12 10:38:28 +010096 kArenaAllocGraphChecker,
Mathieu Chartierde40d472015-10-15 17:47:48 -070097 kArenaAllocVerifier,
Vladimir Marko93205e32016-04-13 11:59:46 +010098 kArenaAllocCallingConvention,
Mingyao Yang063fc772016-08-02 11:02:54 -070099 kArenaAllocCHA,
Alexandre Rames22aa54b2016-10-18 09:32:29 +0100100 kArenaAllocScheduler,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000101 kNumArenaAllocKinds
102};
103
104template <bool kCount>
105class ArenaAllocatorStatsImpl;
106
107template <>
108class ArenaAllocatorStatsImpl<false> {
109 public:
110 ArenaAllocatorStatsImpl() = default;
111 ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
112 ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
113
Roland Levillain4b8f1ec2015-08-26 18:34:03 +0100114 void Copy(const ArenaAllocatorStatsImpl& other ATTRIBUTE_UNUSED) {}
115 void RecordAlloc(size_t bytes ATTRIBUTE_UNUSED, ArenaAllocKind kind ATTRIBUTE_UNUSED) {}
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000116 size_t NumAllocations() const { return 0u; }
117 size_t BytesAllocated() const { return 0u; }
Roland Levillain4b8f1ec2015-08-26 18:34:03 +0100118 void Dump(std::ostream& os ATTRIBUTE_UNUSED,
119 const Arena* first ATTRIBUTE_UNUSED,
120 ssize_t lost_bytes_adjustment ATTRIBUTE_UNUSED) const {}
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000121};
122
123template <bool kCount>
124class ArenaAllocatorStatsImpl {
125 public:
126 ArenaAllocatorStatsImpl();
127 ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
128 ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
129
130 void Copy(const ArenaAllocatorStatsImpl& other);
131 void RecordAlloc(size_t bytes, ArenaAllocKind kind);
132 size_t NumAllocations() const;
133 size_t BytesAllocated() const;
134 void Dump(std::ostream& os, const Arena* first, ssize_t lost_bytes_adjustment) const;
135
136 private:
137 size_t num_allocations_;
Vladimir Marko4e335d02016-12-19 16:04:33 +0000138 dchecked_vector<size_t> alloc_stats_; // Bytes used by various allocation kinds.
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000139
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100140 static const char* const kAllocNames[];
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000141};
142
143typedef ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> ArenaAllocatorStats;
buzbee862a7602013-04-05 10:58:54 -0700144
Vladimir Marko2a408a32015-09-18 14:11:00 +0100145template <bool kAvailable, bool kValgrind>
146class ArenaAllocatorMemoryToolCheckImpl {
147 // This is the generic template but since there is a partial specialization
148 // for kValgrind == false, this can be instantiated only for kValgrind == true.
149 static_assert(kValgrind, "This template can be instantiated only for Valgrind.");
150 static_assert(kAvailable, "Valgrind implies memory tool availability.");
151
152 public:
153 ArenaAllocatorMemoryToolCheckImpl() : is_running_on_valgrind_(RUNNING_ON_MEMORY_TOOL) { }
154 bool IsRunningOnMemoryTool() { return is_running_on_valgrind_; }
155
156 private:
157 const bool is_running_on_valgrind_;
158};
159
160template <bool kAvailable>
161class ArenaAllocatorMemoryToolCheckImpl<kAvailable, false> {
162 public:
163 ArenaAllocatorMemoryToolCheckImpl() { }
164 bool IsRunningOnMemoryTool() { return kAvailable; }
165};
166
167typedef ArenaAllocatorMemoryToolCheckImpl<kMemoryToolIsAvailable, kMemoryToolIsValgrind>
168 ArenaAllocatorMemoryToolCheck;
169
170class ArenaAllocatorMemoryTool : private ArenaAllocatorMemoryToolCheck {
171 public:
172 using ArenaAllocatorMemoryToolCheck::IsRunningOnMemoryTool;
173
174 void MakeDefined(void* ptr, size_t size) {
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000175 if (UNLIKELY(IsRunningOnMemoryTool())) {
176 DoMakeDefined(ptr, size);
Vladimir Marko2a408a32015-09-18 14:11:00 +0100177 }
178 }
179 void MakeUndefined(void* ptr, size_t size) {
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000180 if (UNLIKELY(IsRunningOnMemoryTool())) {
181 DoMakeUndefined(ptr, size);
Vladimir Marko2a408a32015-09-18 14:11:00 +0100182 }
183 }
184 void MakeInaccessible(void* ptr, size_t size) {
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000185 if (UNLIKELY(IsRunningOnMemoryTool())) {
186 DoMakeInaccessible(ptr, size);
Vladimir Marko2a408a32015-09-18 14:11:00 +0100187 }
188 }
Vladimir Marko7bda3b62015-10-07 12:44:31 +0000189
190 private:
191 void DoMakeDefined(void* ptr, size_t size);
192 void DoMakeUndefined(void* ptr, size_t size);
193 void DoMakeInaccessible(void* ptr, size_t size);
Vladimir Marko2a408a32015-09-18 14:11:00 +0100194};
195
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700196class Arena {
197 public:
198 static constexpr size_t kDefaultSize = 128 * KB;
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700199 Arena();
200 virtual ~Arena() { }
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700201 // Reset is for pre-use and uses memset for performance.
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700202 void Reset();
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700203 // Release is used inbetween uses and uses madvise for memory usage.
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700204 virtual void Release() { }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700205 uint8_t* Begin() {
206 return memory_;
buzbee862a7602013-04-05 10:58:54 -0700207 }
208
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700209 uint8_t* End() {
210 return memory_ + size_;
211 }
212
213 size_t Size() const {
214 return size_;
215 }
216
217 size_t RemainingSpace() const {
218 return Size() - bytes_allocated_;
219 }
220
Mathieu Chartier49285c52014-12-02 15:43:48 -0800221 size_t GetBytesAllocated() const {
222 return bytes_allocated_;
223 }
224
Mathieu Chartiere401d142015-04-22 13:56:20 -0700225 // Return true if ptr is contained in the arena.
226 bool Contains(const void* ptr) const {
227 return memory_ <= ptr && ptr < memory_ + bytes_allocated_;
228 }
229
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700230 protected:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700231 size_t bytes_allocated_;
232 uint8_t* memory_;
233 size_t size_;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700234 Arena* next_;
235 friend class ArenaPool;
236 friend class ArenaAllocator;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000237 friend class ArenaStack;
238 friend class ScopedArenaAllocator;
239 template <bool kCount> friend class ArenaAllocatorStatsImpl;
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700240
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100241 friend class ArenaAllocatorTest;
242
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700243 private:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700244 DISALLOW_COPY_AND_ASSIGN(Arena);
245};
246
Vladimir Marko6374c582017-03-13 14:51:19 +0000247class MallocArena FINAL : public Arena {
248 public:
249 explicit MallocArena(size_t size = Arena::kDefaultSize);
250 virtual ~MallocArena();
251};
252
253class MemMapArena FINAL : public Arena {
254 public:
255 MemMapArena(size_t size, bool low_4gb, const char* name);
256 virtual ~MemMapArena();
257 void Release() OVERRIDE;
258
259 private:
260 std::unique_ptr<MemMap> map_;
261};
262
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700263class ArenaPool {
264 public:
Chih-Hung Hsieha5931182016-09-01 15:08:13 -0700265 explicit ArenaPool(bool use_malloc = true,
266 bool low_4gb = false,
267 const char* name = "LinearAlloc");
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700268 ~ArenaPool();
Mathieu Chartier90443472015-07-16 20:32:27 -0700269 Arena* AllocArena(size_t size) REQUIRES(!lock_);
270 void FreeArenaChain(Arena* first) REQUIRES(!lock_);
271 size_t GetBytesAllocated() const REQUIRES(!lock_);
Jean-Philippe Halimica76a1a2016-02-02 19:48:52 +0100272 void ReclaimMemory() NO_THREAD_SAFETY_ANALYSIS;
273 void LockReclaimMemory() REQUIRES(!lock_);
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700274 // Trim the maps in arenas by madvising, used by JIT to reduce memory usage. This only works
275 // use_malloc is false.
Mathieu Chartier90443472015-07-16 20:32:27 -0700276 void TrimMaps() REQUIRES(!lock_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700277
278 private:
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700279 const bool use_malloc_;
Mathieu Chartier49285c52014-12-02 15:43:48 -0800280 mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700281 Arena* free_arenas_ GUARDED_BY(lock_);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700282 const bool low_4gb_;
Nicolas Geoffray25e04562016-03-01 13:17:58 +0000283 const char* name_;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700284 DISALLOW_COPY_AND_ASSIGN(ArenaPool);
285};
286
Vladimir Markofda04322015-11-11 18:45:50 +0000287// Fast single-threaded allocator for zero-initialized memory chunks.
288//
289// Memory is allocated from ArenaPool in large chunks and then rationed through
290// the ArenaAllocator. It's returned to the ArenaPool only when the ArenaAllocator
291// is destroyed.
Vladimir Marko2a408a32015-09-18 14:11:00 +0100292class ArenaAllocator
293 : private DebugStackRefCounter, private ArenaAllocatorStats, private ArenaAllocatorMemoryTool {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700294 public:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700295 explicit ArenaAllocator(ArenaPool* pool);
296 ~ArenaAllocator();
297
Vladimir Marko2a408a32015-09-18 14:11:00 +0100298 using ArenaAllocatorMemoryTool::IsRunningOnMemoryTool;
299 using ArenaAllocatorMemoryTool::MakeDefined;
300 using ArenaAllocatorMemoryTool::MakeUndefined;
301 using ArenaAllocatorMemoryTool::MakeInaccessible;
302
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100303 // Get adapter for use in STL containers. See arena_containers.h .
304 ArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL);
305
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700306 // Returns zeroed memory.
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000307 void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
Vladimir Marko2a408a32015-09-18 14:11:00 +0100308 if (UNLIKELY(IsRunningOnMemoryTool())) {
309 return AllocWithMemoryTool(bytes, kind);
Mathieu Chartier75165d02013-09-12 14:00:31 -0700310 }
Mathieu Chartierb666f482015-02-18 14:33:14 -0800311 bytes = RoundUp(bytes, kAlignment);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000312 ArenaAllocatorStats::RecordAlloc(bytes, kind);
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100313 if (UNLIKELY(bytes > static_cast<size_t>(end_ - ptr_))) {
314 return AllocFromNewArena(bytes);
315 }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700316 uint8_t* ret = ptr_;
Andreas Gampef6dd8292016-08-19 20:22:19 -0700317 DCHECK_ALIGNED(ret, kAlignment);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700318 ptr_ += bytes;
319 return ret;
320 }
321
Mathieu Chartiere401d142015-04-22 13:56:20 -0700322 // Realloc never frees the input pointer, it is the caller's job to do this if necessary.
Vladimir Marko6374c582017-03-13 14:51:19 +0000323 void* Realloc(void* ptr, size_t ptr_size, size_t new_size,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700324 ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
325 DCHECK_GE(new_size, ptr_size);
326 DCHECK_EQ(ptr == nullptr, ptr_size == 0u);
Andreas Gampef6dd8292016-08-19 20:22:19 -0700327 // We always allocate aligned.
328 const size_t aligned_ptr_size = RoundUp(ptr_size, kAlignment);
329 auto* end = reinterpret_cast<uint8_t*>(ptr) + aligned_ptr_size;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700330 // If we haven't allocated anything else, we can safely extend.
331 if (end == ptr_) {
Vladimir Marko2a408a32015-09-18 14:11:00 +0100332 DCHECK(!IsRunningOnMemoryTool()); // Red zone prevents end == ptr_.
Andreas Gampef6dd8292016-08-19 20:22:19 -0700333 const size_t aligned_new_size = RoundUp(new_size, kAlignment);
334 const size_t size_delta = aligned_new_size - aligned_ptr_size;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700335 // Check remain space.
336 const size_t remain = end_ - ptr_;
337 if (remain >= size_delta) {
338 ptr_ += size_delta;
339 ArenaAllocatorStats::RecordAlloc(size_delta, kind);
Andreas Gampef6dd8292016-08-19 20:22:19 -0700340 DCHECK_ALIGNED(ptr_, kAlignment);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700341 return ptr;
342 }
343 }
Andreas Gampef6dd8292016-08-19 20:22:19 -0700344 auto* new_ptr = Alloc(new_size, kind); // Note: Alloc will take care of aligning new_size.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700345 memcpy(new_ptr, ptr, ptr_size);
346 // TODO: Call free on ptr if linear alloc supports free.
347 return new_ptr;
348 }
349
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000350 template <typename T>
Vladimir Markof6a35de2016-03-21 12:01:50 +0000351 T* Alloc(ArenaAllocKind kind = kArenaAllocMisc) {
352 return AllocArray<T>(1, kind);
353 }
354
355 template <typename T>
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000356 T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) {
357 return static_cast<T*>(Alloc(length * sizeof(T), kind));
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100358 }
359
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700360 size_t BytesAllocated() const;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700361
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000362 MemStats GetMemStats() const;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700363
Mathieu Chartierc7853442015-03-27 14:35:38 -0700364 // The BytesUsed method sums up bytes allocated from arenas in arena_head_ and nodes.
365 // TODO: Change BytesAllocated to this behavior?
366 size_t BytesUsed() const;
buzbee862a7602013-04-05 10:58:54 -0700367
Mathieu Chartiere401d142015-04-22 13:56:20 -0700368 ArenaPool* GetArenaPool() const {
369 return pool_;
370 }
371
372 bool Contains(const void* ptr) const;
373
Vladimir Marko6374c582017-03-13 14:51:19 +0000374 static constexpr size_t kAlignment = 8;
Andreas Gampef6dd8292016-08-19 20:22:19 -0700375
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700376 private:
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100377 void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind);
Vladimir Marko3f84f2c2016-04-25 19:40:34 +0100378 uint8_t* AllocFromNewArena(size_t bytes);
Vladimir Marko6374c582017-03-13 14:51:19 +0000379
Mathieu Chartierb666f482015-02-18 14:33:14 -0800380
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700381 void UpdateBytesAllocated();
buzbee862a7602013-04-05 10:58:54 -0700382
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700383 ArenaPool* pool_;
384 uint8_t* begin_;
385 uint8_t* end_;
386 uint8_t* ptr_;
387 Arena* arena_head_;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700388
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100389 template <typename U>
390 friend class ArenaAllocatorAdapter;
391
Vladimir Marko3e0e7172016-04-22 18:07:13 +0100392 friend class ArenaAllocatorTest;
393
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700394 DISALLOW_COPY_AND_ASSIGN(ArenaAllocator);
buzbee862a7602013-04-05 10:58:54 -0700395}; // ArenaAllocator
396
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000397class MemStats {
398 public:
Vladimir Marko6374c582017-03-13 14:51:19 +0000399 MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000400 ssize_t lost_bytes_adjustment = 0);
401 void Dump(std::ostream& os) const;
402
403 private:
404 const char* const name_;
405 const ArenaAllocatorStats* const stats_;
406 const Arena* const first_arena_;
407 const ssize_t lost_bytes_adjustment_;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700408}; // MemStats
buzbee862a7602013-04-05 10:58:54 -0700409
410} // namespace art
411
Mathieu Chartierb666f482015-02-18 14:33:14 -0800412#endif // ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_