blob: b4f19ee8da0a6a77beb7f6b0627780ba0447888e [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Mathieu Chartierb666f482015-02-18 14:33:14 -080017#ifndef ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
18#define ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
buzbee862a7602013-04-05 10:58:54 -070019
20#include <stdint.h>
21#include <stddef.h>
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070022
Vladimir Marko80afd022015-05-19 18:08:00 +010023#include "base/bit_utils.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080024#include "debug_stack.h"
25#include "macros.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080026#include "mutex.h"
buzbee862a7602013-04-05 10:58:54 -070027
28namespace art {
29
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070030class Arena;
31class ArenaPool;
32class ArenaAllocator;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000033class ArenaStack;
34class ScopedArenaAllocator;
Vladimir Marko3481ba22015-04-13 12:22:36 +010035class MemMap;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000036class MemStats;
37
Vladimir Marko8081d2b2014-07-31 15:33:43 +010038template <typename T>
39class ArenaAllocatorAdapter;
40
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000041static constexpr bool kArenaAllocatorCountAllocations = false;
42
43// Type of allocation for memory tuning.
44enum ArenaAllocKind {
45 kArenaAllocMisc,
Vladimir Markoe39c54e2014-09-22 14:50:02 +010046 kArenaAllocBBList,
47 kArenaAllocBBPredecessors,
48 kArenaAllocDfsPreOrder,
49 kArenaAllocDfsPostOrder,
50 kArenaAllocDomPostOrder,
51 kArenaAllocTopologicalSortOrder,
52 kArenaAllocLoweringInfo,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000053 kArenaAllocLIR,
Vladimir Marko8dea81c2014-06-06 14:50:36 +010054 kArenaAllocLIRResourceMask,
Vladimir Markoe39c54e2014-09-22 14:50:02 +010055 kArenaAllocSwitchTable,
56 kArenaAllocFillArrayData,
57 kArenaAllocSlowPaths,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000058 kArenaAllocMIR,
59 kArenaAllocDFInfo,
60 kArenaAllocGrowableArray,
61 kArenaAllocGrowableBitMap,
Vladimir Markoe39c54e2014-09-22 14:50:02 +010062 kArenaAllocSSAToDalvikMap,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000063 kArenaAllocDalvikToSSAMap,
64 kArenaAllocDebugInfo,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000065 kArenaAllocRegAlloc,
66 kArenaAllocData,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000067 kArenaAllocSTL,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010068 kArenaAllocGraphBuilder,
Vladimir Markof9f64412015-09-02 14:05:49 +010069 kArenaAllocGraph,
70 kArenaAllocBasicBlock,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010071 kArenaAllocBlockList,
72 kArenaAllocReversePostOrder,
73 kArenaAllocLinearOrder,
74 kArenaAllocConstantsMap,
Vladimir Marko60584552015-09-03 13:35:12 +000075 kArenaAllocPredecessors,
76 kArenaAllocSuccessors,
77 kArenaAllocDominated,
Vladimir Markof9f64412015-09-02 14:05:49 +010078 kArenaAllocInstruction,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010079 kArenaAllocInvokeInputs,
80 kArenaAllocPhiInputs,
Vladimir Markof9f64412015-09-02 14:05:49 +010081 kArenaAllocLoopInfo,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010082 kArenaAllocLoopInfoBackEdges,
Vladimir Markof9f64412015-09-02 14:05:49 +010083 kArenaAllocTryCatchInfo,
84 kArenaAllocUseListNode,
85 kArenaAllocEnvironment,
Vladimir Markofa6b93c2015-09-15 10:15:55 +010086 kArenaAllocEnvironmentVRegs,
87 kArenaAllocEnvironmentLocations,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010088 kArenaAllocLocationSummary,
Vladimir Marko71bf8092015-09-15 15:33:14 +010089 kArenaAllocSsaBuilder,
Vladimir Markof9f64412015-09-02 14:05:49 +010090 kArenaAllocMoveOperands,
91 kArenaAllocCodeBuffer,
92 kArenaAllocStackMaps,
93 kArenaAllocBaselineMaps,
94 kArenaAllocOptimization,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010095 kArenaAllocGvn,
Vladimir Marko5233f932015-09-29 19:01:15 +010096 kArenaAllocInductionVarAnalysis,
97 kArenaAllocBoundsCheckElimination,
Vladimir Marko2aaa4b52015-09-17 17:03:26 +010098 kArenaAllocSsaLiveness,
99 kArenaAllocSsaPhiElimination,
100 kArenaAllocReferenceTypePropagation,
101 kArenaAllocPrimitiveTypePropagation,
102 kArenaAllocSideEffectsAnalysis,
103 kArenaAllocRegisterAllocator,
Vladimir Marko225b6462015-09-28 12:17:40 +0100104 kArenaAllocStackMapStream,
105 kArenaAllocCodeGenerator,
106 kArenaAllocParallelMoveResolver,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000107 kNumArenaAllocKinds
108};
109
110template <bool kCount>
111class ArenaAllocatorStatsImpl;
112
113template <>
114class ArenaAllocatorStatsImpl<false> {
115 public:
116 ArenaAllocatorStatsImpl() = default;
117 ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
118 ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
119
120 void Copy(const ArenaAllocatorStatsImpl& other) { UNUSED(other); }
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700121 void RecordAlloc(size_t bytes, ArenaAllocKind kind) { UNUSED(bytes, kind); }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000122 size_t NumAllocations() const { return 0u; }
123 size_t BytesAllocated() const { return 0u; }
124 void Dump(std::ostream& os, const Arena* first, ssize_t lost_bytes_adjustment) const {
125 UNUSED(os); UNUSED(first); UNUSED(lost_bytes_adjustment);
126 }
127};
128
129template <bool kCount>
130class ArenaAllocatorStatsImpl {
131 public:
132 ArenaAllocatorStatsImpl();
133 ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
134 ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
135
136 void Copy(const ArenaAllocatorStatsImpl& other);
137 void RecordAlloc(size_t bytes, ArenaAllocKind kind);
138 size_t NumAllocations() const;
139 size_t BytesAllocated() const;
140 void Dump(std::ostream& os, const Arena* first, ssize_t lost_bytes_adjustment) const;
141
142 private:
143 size_t num_allocations_;
144 // TODO: Use std::array<size_t, kNumArenaAllocKinds> from C++11 when we upgrade the STL.
145 size_t alloc_stats_[kNumArenaAllocKinds]; // Bytes used by various allocation kinds.
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000146
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100147 static const char* const kAllocNames[];
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000148};
149
150typedef ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> ArenaAllocatorStats;
buzbee862a7602013-04-05 10:58:54 -0700151
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700152class Arena {
153 public:
154 static constexpr size_t kDefaultSize = 128 * KB;
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700155 Arena();
156 virtual ~Arena() { }
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700157 // Reset is for pre-use and uses memset for performance.
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700158 void Reset();
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700159 // Release is used inbetween uses and uses madvise for memory usage.
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700160 virtual void Release() { }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700161 uint8_t* Begin() {
162 return memory_;
buzbee862a7602013-04-05 10:58:54 -0700163 }
164
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700165 uint8_t* End() {
166 return memory_ + size_;
167 }
168
169 size_t Size() const {
170 return size_;
171 }
172
173 size_t RemainingSpace() const {
174 return Size() - bytes_allocated_;
175 }
176
Mathieu Chartier49285c52014-12-02 15:43:48 -0800177 size_t GetBytesAllocated() const {
178 return bytes_allocated_;
179 }
180
Mathieu Chartiere401d142015-04-22 13:56:20 -0700181 // Return true if ptr is contained in the arena.
182 bool Contains(const void* ptr) const {
183 return memory_ <= ptr && ptr < memory_ + bytes_allocated_;
184 }
185
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700186 protected:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700187 size_t bytes_allocated_;
188 uint8_t* memory_;
189 size_t size_;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700190 Arena* next_;
191 friend class ArenaPool;
192 friend class ArenaAllocator;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000193 friend class ArenaStack;
194 friend class ScopedArenaAllocator;
195 template <bool kCount> friend class ArenaAllocatorStatsImpl;
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700196
197 private:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700198 DISALLOW_COPY_AND_ASSIGN(Arena);
199};
200
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700201class MallocArena FINAL : public Arena {
202 public:
203 explicit MallocArena(size_t size = Arena::kDefaultSize);
204 virtual ~MallocArena();
205};
206
207class MemMapArena FINAL : public Arena {
208 public:
Roland Levillain3887c462015-08-12 18:15:42 +0100209 MemMapArena(size_t size, bool low_4gb);
Vladimir Marko3481ba22015-04-13 12:22:36 +0100210 virtual ~MemMapArena();
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700211 void Release() OVERRIDE;
212
213 private:
214 std::unique_ptr<MemMap> map_;
215};
216
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700217class ArenaPool {
218 public:
Mathieu Chartierc7853442015-03-27 14:35:38 -0700219 explicit ArenaPool(bool use_malloc = true, bool low_4gb = false);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700220 ~ArenaPool();
Mathieu Chartier90443472015-07-16 20:32:27 -0700221 Arena* AllocArena(size_t size) REQUIRES(!lock_);
222 void FreeArenaChain(Arena* first) REQUIRES(!lock_);
223 size_t GetBytesAllocated() const REQUIRES(!lock_);
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700224 // Trim the maps in arenas by madvising, used by JIT to reduce memory usage. This only works
225 // use_malloc is false.
Mathieu Chartier90443472015-07-16 20:32:27 -0700226 void TrimMaps() REQUIRES(!lock_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700227
228 private:
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700229 const bool use_malloc_;
Mathieu Chartier49285c52014-12-02 15:43:48 -0800230 mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700231 Arena* free_arenas_ GUARDED_BY(lock_);
Mathieu Chartierc7853442015-03-27 14:35:38 -0700232 const bool low_4gb_;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700233 DISALLOW_COPY_AND_ASSIGN(ArenaPool);
234};
235
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100236class ArenaAllocator : private DebugStackRefCounter, private ArenaAllocatorStats {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700237 public:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700238 explicit ArenaAllocator(ArenaPool* pool);
239 ~ArenaAllocator();
240
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100241 // Get adapter for use in STL containers. See arena_containers.h .
242 ArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL);
243
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700244 // Returns zeroed memory.
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000245 void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700246 if (UNLIKELY(is_running_on_memory_tool_)) {
Mathieu Chartier75165d02013-09-12 14:00:31 -0700247 return AllocValgrind(bytes, kind);
248 }
Mathieu Chartierb666f482015-02-18 14:33:14 -0800249 bytes = RoundUp(bytes, kAlignment);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700250 if (UNLIKELY(ptr_ + bytes > end_)) {
251 // Obtain a new block.
252 ObtainNewArenaForAllocation(bytes);
253 if (UNLIKELY(ptr_ == nullptr)) {
254 return nullptr;
255 }
256 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000257 ArenaAllocatorStats::RecordAlloc(bytes, kind);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700258 uint8_t* ret = ptr_;
259 ptr_ += bytes;
260 return ret;
261 }
262
Mathieu Chartiere401d142015-04-22 13:56:20 -0700263 // Realloc never frees the input pointer, it is the caller's job to do this if necessary.
264 void* Realloc(void* ptr, size_t ptr_size, size_t new_size,
265 ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
266 DCHECK_GE(new_size, ptr_size);
267 DCHECK_EQ(ptr == nullptr, ptr_size == 0u);
268 auto* end = reinterpret_cast<uint8_t*>(ptr) + ptr_size;
269 // If we haven't allocated anything else, we can safely extend.
270 if (end == ptr_) {
271 const size_t size_delta = new_size - ptr_size;
272 // Check remain space.
273 const size_t remain = end_ - ptr_;
274 if (remain >= size_delta) {
275 ptr_ += size_delta;
276 ArenaAllocatorStats::RecordAlloc(size_delta, kind);
277 return ptr;
278 }
279 }
280 auto* new_ptr = Alloc(new_size, kind);
281 memcpy(new_ptr, ptr, ptr_size);
282 // TODO: Call free on ptr if linear alloc supports free.
283 return new_ptr;
284 }
285
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000286 template <typename T>
287 T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) {
288 return static_cast<T*>(Alloc(length * sizeof(T), kind));
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100289 }
290
Mathieu Chartier75165d02013-09-12 14:00:31 -0700291 void* AllocValgrind(size_t bytes, ArenaAllocKind kind);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700292
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700293 void ObtainNewArenaForAllocation(size_t allocation_size);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700294
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700295 size_t BytesAllocated() const;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700296
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000297 MemStats GetMemStats() const;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700298
Mathieu Chartierc7853442015-03-27 14:35:38 -0700299 // The BytesUsed method sums up bytes allocated from arenas in arena_head_ and nodes.
300 // TODO: Change BytesAllocated to this behavior?
301 size_t BytesUsed() const;
buzbee862a7602013-04-05 10:58:54 -0700302
Mathieu Chartiere401d142015-04-22 13:56:20 -0700303 ArenaPool* GetArenaPool() const {
304 return pool_;
305 }
306
307 bool Contains(const void* ptr) const;
308
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700309 private:
Mathieu Chartierb666f482015-02-18 14:33:14 -0800310 static constexpr size_t kAlignment = 8;
311
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700312 void UpdateBytesAllocated();
buzbee862a7602013-04-05 10:58:54 -0700313
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700314 ArenaPool* pool_;
315 uint8_t* begin_;
316 uint8_t* end_;
317 uint8_t* ptr_;
318 Arena* arena_head_;
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700319 bool is_running_on_memory_tool_;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700320
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100321 template <typename U>
322 friend class ArenaAllocatorAdapter;
323
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700324 DISALLOW_COPY_AND_ASSIGN(ArenaAllocator);
buzbee862a7602013-04-05 10:58:54 -0700325}; // ArenaAllocator
326
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000327class MemStats {
328 public:
329 MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
330 ssize_t lost_bytes_adjustment = 0);
331 void Dump(std::ostream& os) const;
332
333 private:
334 const char* const name_;
335 const ArenaAllocatorStats* const stats_;
336 const Arena* const first_arena_;
337 const ssize_t lost_bytes_adjustment_;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700338}; // MemStats
buzbee862a7602013-04-05 10:58:54 -0700339
340} // namespace art
341
Mathieu Chartierb666f482015-02-18 14:33:14 -0800342#endif // ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_