blob: cc7b856e849f60a7ab41cf738377a007cab976cd [file] [log] [blame]
buzbee862a7602013-04-05 10:58:54 -07001/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Mathieu Chartierb666f482015-02-18 14:33:14 -080017#ifndef ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
18#define ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_
buzbee862a7602013-04-05 10:58:54 -070019
20#include <stdint.h>
21#include <stddef.h>
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070022
Mathieu Chartierb666f482015-02-18 14:33:14 -080023#include "debug_stack.h"
24#include "macros.h"
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070025#include "mem_map.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080026#include "mutex.h"
Andreas Gampe66018822014-05-05 20:47:19 -070027#include "utils.h"
buzbee862a7602013-04-05 10:58:54 -070028
29namespace art {
30
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -070031class Arena;
32class ArenaPool;
33class ArenaAllocator;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000034class ArenaStack;
35class ScopedArenaAllocator;
36class MemStats;
37
Vladimir Marko8081d2b2014-07-31 15:33:43 +010038template <typename T>
39class ArenaAllocatorAdapter;
40
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000041static constexpr bool kArenaAllocatorCountAllocations = false;
42
43// Type of allocation for memory tuning.
44enum ArenaAllocKind {
45 kArenaAllocMisc,
46 kArenaAllocBB,
Vladimir Markoe39c54e2014-09-22 14:50:02 +010047 kArenaAllocBBList,
48 kArenaAllocBBPredecessors,
49 kArenaAllocDfsPreOrder,
50 kArenaAllocDfsPostOrder,
51 kArenaAllocDomPostOrder,
52 kArenaAllocTopologicalSortOrder,
53 kArenaAllocLoweringInfo,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000054 kArenaAllocLIR,
Vladimir Marko8dea81c2014-06-06 14:50:36 +010055 kArenaAllocLIRResourceMask,
Vladimir Markoe39c54e2014-09-22 14:50:02 +010056 kArenaAllocSwitchTable,
57 kArenaAllocFillArrayData,
58 kArenaAllocSlowPaths,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000059 kArenaAllocMIR,
60 kArenaAllocDFInfo,
61 kArenaAllocGrowableArray,
62 kArenaAllocGrowableBitMap,
Vladimir Markoe39c54e2014-09-22 14:50:02 +010063 kArenaAllocSSAToDalvikMap,
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000064 kArenaAllocDalvikToSSAMap,
65 kArenaAllocDebugInfo,
66 kArenaAllocSuccessor,
67 kArenaAllocRegAlloc,
68 kArenaAllocData,
69 kArenaAllocPredecessors,
70 kArenaAllocSTL,
71 kNumArenaAllocKinds
72};
73
74template <bool kCount>
75class ArenaAllocatorStatsImpl;
76
77template <>
78class ArenaAllocatorStatsImpl<false> {
79 public:
80 ArenaAllocatorStatsImpl() = default;
81 ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
82 ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
83
84 void Copy(const ArenaAllocatorStatsImpl& other) { UNUSED(other); }
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070085 void RecordAlloc(size_t bytes, ArenaAllocKind kind) { UNUSED(bytes, kind); }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000086 size_t NumAllocations() const { return 0u; }
87 size_t BytesAllocated() const { return 0u; }
88 void Dump(std::ostream& os, const Arena* first, ssize_t lost_bytes_adjustment) const {
89 UNUSED(os); UNUSED(first); UNUSED(lost_bytes_adjustment);
90 }
91};
92
93template <bool kCount>
94class ArenaAllocatorStatsImpl {
95 public:
96 ArenaAllocatorStatsImpl();
97 ArenaAllocatorStatsImpl(const ArenaAllocatorStatsImpl& other) = default;
98 ArenaAllocatorStatsImpl& operator = (const ArenaAllocatorStatsImpl& other) = delete;
99
100 void Copy(const ArenaAllocatorStatsImpl& other);
101 void RecordAlloc(size_t bytes, ArenaAllocKind kind);
102 size_t NumAllocations() const;
103 size_t BytesAllocated() const;
104 void Dump(std::ostream& os, const Arena* first, ssize_t lost_bytes_adjustment) const;
105
106 private:
107 size_t num_allocations_;
108 // TODO: Use std::array<size_t, kNumArenaAllocKinds> from C++11 when we upgrade the STL.
109 size_t alloc_stats_[kNumArenaAllocKinds]; // Bytes used by various allocation kinds.
Vladimir Markobd9e9db2014-03-07 19:41:05 +0000110
Vladimir Marko8dea81c2014-06-06 14:50:36 +0100111 static const char* const kAllocNames[];
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000112};
113
114typedef ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> ArenaAllocatorStats;
buzbee862a7602013-04-05 10:58:54 -0700115
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700116class Arena {
117 public:
118 static constexpr size_t kDefaultSize = 128 * KB;
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700119 Arena();
120 virtual ~Arena() { }
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700121 // Reset is for pre-use and uses memset for performance.
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700122 void Reset();
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700123 // Release is used inbetween uses and uses madvise for memory usage.
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700124 virtual void Release() { }
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700125 uint8_t* Begin() {
126 return memory_;
buzbee862a7602013-04-05 10:58:54 -0700127 }
128
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700129 uint8_t* End() {
130 return memory_ + size_;
131 }
132
133 size_t Size() const {
134 return size_;
135 }
136
137 size_t RemainingSpace() const {
138 return Size() - bytes_allocated_;
139 }
140
Mathieu Chartier49285c52014-12-02 15:43:48 -0800141 size_t GetBytesAllocated() const {
142 return bytes_allocated_;
143 }
144
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700145 protected:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700146 size_t bytes_allocated_;
147 uint8_t* memory_;
148 size_t size_;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700149 Arena* next_;
150 friend class ArenaPool;
151 friend class ArenaAllocator;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000152 friend class ArenaStack;
153 friend class ScopedArenaAllocator;
154 template <bool kCount> friend class ArenaAllocatorStatsImpl;
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700155
156 private:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700157 DISALLOW_COPY_AND_ASSIGN(Arena);
158};
159
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700160class MallocArena FINAL : public Arena {
161 public:
162 explicit MallocArena(size_t size = Arena::kDefaultSize);
163 virtual ~MallocArena();
164};
165
166class MemMapArena FINAL : public Arena {
167 public:
168 explicit MemMapArena(size_t size = Arena::kDefaultSize);
169 virtual ~MemMapArena() { }
170 void Release() OVERRIDE;
171
172 private:
173 std::unique_ptr<MemMap> map_;
174};
175
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700176class ArenaPool {
177 public:
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700178 explicit ArenaPool(bool use_malloc = true);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700179 ~ArenaPool();
Mathieu Chartier49285c52014-12-02 15:43:48 -0800180 Arena* AllocArena(size_t size) LOCKS_EXCLUDED(lock_);
181 void FreeArenaChain(Arena* first) LOCKS_EXCLUDED(lock_);
182 size_t GetBytesAllocated() const LOCKS_EXCLUDED(lock_);
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700183 // Trim the maps in arenas by madvising, used by JIT to reduce memory usage. This only works
184 // use_malloc is false.
Mathieu Chartier9b34b242015-03-09 11:30:17 -0700185 void TrimMaps() LOCKS_EXCLUDED(lock_);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700186
187 private:
Mathieu Chartierc6201fa2015-03-12 10:06:33 -0700188 const bool use_malloc_;
Mathieu Chartier49285c52014-12-02 15:43:48 -0800189 mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700190 Arena* free_arenas_ GUARDED_BY(lock_);
191 DISALLOW_COPY_AND_ASSIGN(ArenaPool);
192};
193
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100194class ArenaAllocator : private DebugStackRefCounter, private ArenaAllocatorStats {
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700195 public:
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700196 explicit ArenaAllocator(ArenaPool* pool);
197 ~ArenaAllocator();
198
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100199 // Get adapter for use in STL containers. See arena_containers.h .
200 ArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL);
201
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700202 // Returns zeroed memory.
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000203 void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
Mathieu Chartier75165d02013-09-12 14:00:31 -0700204 if (UNLIKELY(running_on_valgrind_)) {
205 return AllocValgrind(bytes, kind);
206 }
Mathieu Chartierb666f482015-02-18 14:33:14 -0800207 bytes = RoundUp(bytes, kAlignment);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700208 if (UNLIKELY(ptr_ + bytes > end_)) {
209 // Obtain a new block.
210 ObtainNewArenaForAllocation(bytes);
211 if (UNLIKELY(ptr_ == nullptr)) {
212 return nullptr;
213 }
214 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000215 ArenaAllocatorStats::RecordAlloc(bytes, kind);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700216 uint8_t* ret = ptr_;
217 ptr_ += bytes;
218 return ret;
219 }
220
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000221 template <typename T>
222 T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) {
223 return static_cast<T*>(Alloc(length * sizeof(T), kind));
Nicolas Geoffraya7062e02014-05-22 12:50:17 +0100224 }
225
Mathieu Chartier75165d02013-09-12 14:00:31 -0700226 void* AllocValgrind(size_t bytes, ArenaAllocKind kind);
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700227 void ObtainNewArenaForAllocation(size_t allocation_size);
228 size_t BytesAllocated() const;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000229 MemStats GetMemStats() const;
buzbee862a7602013-04-05 10:58:54 -0700230
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700231 private:
Mathieu Chartierb666f482015-02-18 14:33:14 -0800232 static constexpr size_t kAlignment = 8;
233
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700234 void UpdateBytesAllocated();
buzbee862a7602013-04-05 10:58:54 -0700235
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700236 ArenaPool* pool_;
237 uint8_t* begin_;
238 uint8_t* end_;
239 uint8_t* ptr_;
240 Arena* arena_head_;
Mathieu Chartier75165d02013-09-12 14:00:31 -0700241 bool running_on_valgrind_;
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700242
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100243 template <typename U>
244 friend class ArenaAllocatorAdapter;
245
Mathieu Chartierf6c4b3b2013-08-24 16:11:37 -0700246 DISALLOW_COPY_AND_ASSIGN(ArenaAllocator);
buzbee862a7602013-04-05 10:58:54 -0700247}; // ArenaAllocator
248
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000249class MemStats {
250 public:
251 MemStats(const char* name, const ArenaAllocatorStats* stats, const Arena* first_arena,
252 ssize_t lost_bytes_adjustment = 0);
253 void Dump(std::ostream& os) const;
254
255 private:
256 const char* const name_;
257 const ArenaAllocatorStats* const stats_;
258 const Arena* const first_arena_;
259 const ssize_t lost_bytes_adjustment_;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700260}; // MemStats
buzbee862a7602013-04-05 10:58:54 -0700261
262} // namespace art
263
Mathieu Chartierb666f482015-02-18 14:33:14 -0800264#endif // ART_RUNTIME_BASE_ARENA_ALLOCATOR_H_