blob: 1a0eb5ea079f8fa4d9a9adb5c75257530e3fa178 [file] [log] [blame]
Vladimir Marko83cc7ae2014-02-12 18:02:05 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Mathieu Chartierb666f482015-02-18 14:33:14 -080017#ifndef ART_RUNTIME_BASE_SCOPED_ARENA_ALLOCATOR_H_
18#define ART_RUNTIME_BASE_SCOPED_ARENA_ALLOCATOR_H_
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000019
Mathieu Chartierb666f482015-02-18 14:33:14 -080020#include "arena_allocator.h"
21#include "debug_stack.h"
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000022#include "globals.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080023#include "logging.h"
24#include "macros.h"
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000025
26namespace art {
27
28class ArenaStack;
29class ScopedArenaAllocator;
30
31template <typename T>
32class ScopedArenaAllocatorAdapter;
33
Mathieu Chartier7b05e172015-10-15 17:47:48 -070034// Tag associated with each allocation to help prevent double free.
35enum class ArenaFreeTag : uint8_t {
36 // Allocation is used and has not yet been destroyed.
37 kUsed,
38 // Allocation has been destroyed.
39 kFree,
40};
41
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000042// Holds a list of Arenas for use by ScopedArenaAllocator stack.
Vladimir Markofda04322015-11-11 18:45:50 +000043// The memory is returned to the ArenaPool when the ArenaStack is destroyed.
Vladimir Marko2a408a32015-09-18 14:11:00 +010044class ArenaStack : private DebugStackRefCounter, private ArenaAllocatorMemoryTool {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000045 public:
46 explicit ArenaStack(ArenaPool* arena_pool);
47 ~ArenaStack();
48
Vladimir Marko2a408a32015-09-18 14:11:00 +010049 using ArenaAllocatorMemoryTool::IsRunningOnMemoryTool;
50 using ArenaAllocatorMemoryTool::MakeDefined;
51 using ArenaAllocatorMemoryTool::MakeUndefined;
52 using ArenaAllocatorMemoryTool::MakeInaccessible;
53
Vladimir Marko53b6afc2014-03-21 14:21:20 +000054 void Reset();
55
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000056 size_t PeakBytesAllocated() {
57 return PeakStats()->BytesAllocated();
58 }
59
60 MemStats GetPeakStats() const;
61
Mathieu Chartier7b05e172015-10-15 17:47:48 -070062 // Return the arena tag associated with a pointer.
63 static ArenaFreeTag& ArenaTagForAllocation(void* ptr) {
64 DCHECK(kIsDebugBuild) << "Only debug builds have tags";
65 return *(reinterpret_cast<ArenaFreeTag*>(ptr) - 1);
66 }
67
Vladimir Markof44d36c2017-03-14 14:18:46 +000068 // The alignment guaranteed for individual allocations.
69 static constexpr size_t kAlignment = 8u;
70
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000071 private:
72 struct Peak;
73 struct Current;
74 template <typename Tag> struct TaggedStats : ArenaAllocatorStats { };
75 struct StatsAndPool : TaggedStats<Peak>, TaggedStats<Current> {
76 explicit StatsAndPool(ArenaPool* arena_pool) : pool(arena_pool) { }
77 ArenaPool* const pool;
78 };
79
80 ArenaAllocatorStats* PeakStats() {
81 return static_cast<TaggedStats<Peak>*>(&stats_and_pool_);
82 }
83
84 ArenaAllocatorStats* CurrentStats() {
85 return static_cast<TaggedStats<Current>*>(&stats_and_pool_);
86 }
87
88 // Private - access via ScopedArenaAllocator or ScopedArenaAllocatorAdapter.
89 void* Alloc(size_t bytes, ArenaAllocKind kind) ALWAYS_INLINE {
Vladimir Marko2a408a32015-09-18 14:11:00 +010090 if (UNLIKELY(IsRunningOnMemoryTool())) {
91 return AllocWithMemoryTool(bytes, kind);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000092 }
Vladimir Markof44d36c2017-03-14 14:18:46 +000093 // Add kAlignment for the free or used tag. Required to preserve alignment.
94 size_t rounded_bytes = RoundUp(bytes + (kIsDebugBuild ? kAlignment : 0u), kAlignment);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000095 uint8_t* ptr = top_ptr_;
96 if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) {
97 ptr = AllocateFromNextArena(rounded_bytes);
98 }
99 CurrentStats()->RecordAlloc(bytes, kind);
100 top_ptr_ = ptr + rounded_bytes;
Mathieu Chartier7b05e172015-10-15 17:47:48 -0700101 if (kIsDebugBuild) {
Vladimir Markof44d36c2017-03-14 14:18:46 +0000102 ptr += kAlignment;
Mathieu Chartier7b05e172015-10-15 17:47:48 -0700103 ArenaTagForAllocation(ptr) = ArenaFreeTag::kUsed;
104 }
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000105 return ptr;
106 }
107
108 uint8_t* AllocateFromNextArena(size_t rounded_bytes);
109 void UpdatePeakStatsAndRestore(const ArenaAllocatorStats& restore_stats);
110 void UpdateBytesAllocated();
Vladimir Marko2a408a32015-09-18 14:11:00 +0100111 void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000112
113 StatsAndPool stats_and_pool_;
114 Arena* bottom_arena_;
115 Arena* top_arena_;
116 uint8_t* top_ptr_;
117 uint8_t* top_end_;
118
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000119 friend class ScopedArenaAllocator;
120 template <typename T>
121 friend class ScopedArenaAllocatorAdapter;
122
123 DISALLOW_COPY_AND_ASSIGN(ArenaStack);
124};
125
Vladimir Markofda04322015-11-11 18:45:50 +0000126// Fast single-threaded allocator. Allocated chunks are _not_ guaranteed to be zero-initialized.
127//
128// Unlike the ArenaAllocator, ScopedArenaAllocator is intended for relatively short-lived
129// objects and allows nesting multiple allocators. Only the top allocator can be used but
130// once it's destroyed, its memory can be reused by the next ScopedArenaAllocator on the
131// stack. This is facilitated by returning the memory to the ArenaStack.
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000132class ScopedArenaAllocator
133 : private DebugStackReference, private DebugStackRefCounter, private ArenaAllocatorStats {
134 public:
135 // Create a ScopedArenaAllocator directly on the ArenaStack when the scope of
136 // the allocator is not exactly a C++ block scope. For example, an optimization
137 // pass can create the scoped allocator in Start() and destroy it in End().
138 static ScopedArenaAllocator* Create(ArenaStack* arena_stack) {
139 void* addr = arena_stack->Alloc(sizeof(ScopedArenaAllocator), kArenaAllocMisc);
140 ScopedArenaAllocator* allocator = new(addr) ScopedArenaAllocator(arena_stack);
141 allocator->mark_ptr_ = reinterpret_cast<uint8_t*>(addr);
142 return allocator;
143 }
144
145 explicit ScopedArenaAllocator(ArenaStack* arena_stack);
146 ~ScopedArenaAllocator();
147
148 void Reset();
149
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000150 void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000151 DebugStackReference::CheckTop();
152 return arena_stack_->Alloc(bytes, kind);
153 }
154
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000155 template <typename T>
Vladimir Markof6a35de2016-03-21 12:01:50 +0000156 T* Alloc(ArenaAllocKind kind = kArenaAllocMisc) {
157 return AllocArray<T>(1, kind);
158 }
159
160 template <typename T>
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000161 T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) {
162 return static_cast<T*>(Alloc(length * sizeof(T), kind));
163 }
164
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100165 // Get adapter for use in STL containers. See scoped_arena_containers.h .
166 ScopedArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000167
168 // Allow a delete-expression to destroy but not deallocate allocators created by Create().
Roland Levillain4b8f1ec2015-08-26 18:34:03 +0100169 static void operator delete(void* ptr ATTRIBUTE_UNUSED) {}
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000170
171 private:
172 ArenaStack* const arena_stack_;
173 Arena* mark_arena_;
174 uint8_t* mark_ptr_;
175 uint8_t* mark_end_;
176
Vladimir Marko3d2ec352014-10-10 15:39:11 +0100177 void DoReset();
178
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000179 template <typename T>
180 friend class ScopedArenaAllocatorAdapter;
181
182 DISALLOW_COPY_AND_ASSIGN(ScopedArenaAllocator);
183};
184
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000185} // namespace art
186
Mathieu Chartierb666f482015-02-18 14:33:14 -0800187#endif // ART_RUNTIME_BASE_SCOPED_ARENA_ALLOCATOR_H_