blob: bd19d005444db1435216e7c328cc711f5e1ee131 [file] [log] [blame]
Vladimir Marko69f08ba2014-04-11 12:28:11 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Mathieu Chartierb666f482015-02-18 14:33:14 -080017#ifndef ART_RUNTIME_BASE_SCOPED_ARENA_CONTAINERS_H_
18#define ART_RUNTIME_BASE_SCOPED_ARENA_CONTAINERS_H_
Vladimir Marko69f08ba2014-04-11 12:28:11 +010019
Vladimir Marko622bdbe2014-06-19 14:59:05 +010020#include <deque>
21#include <queue>
Vladimir Marko69f08ba2014-04-11 12:28:11 +010022#include <set>
Andreas Gampe784e7902015-10-23 17:31:36 -070023#include <type_traits>
Mathieu Chartiere401d142015-04-22 13:56:20 -070024#include <unordered_map>
Vladimir Marko1f497642015-10-05 20:34:42 +010025#include <utility>
Vladimir Marko69f08ba2014-04-11 12:28:11 +010026
Mathieu Chartierb666f482015-02-18 14:33:14 -080027#include "arena_containers.h" // For ArenaAllocatorAdapterKind.
Vladimir Markoec7802a2015-10-01 20:57:57 +010028#include "base/dchecked_vector.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080029#include "scoped_arena_allocator.h"
Vladimir Marko69f08ba2014-04-11 12:28:11 +010030#include "safe_map.h"
31
32namespace art {
33
Vladimir Marko8081d2b2014-07-31 15:33:43 +010034// Adapter for use of ScopedArenaAllocator in STL containers.
35// Use ScopedArenaAllocator::Adapter() to create an adapter to pass to container constructors.
36// For example,
37// void foo(ScopedArenaAllocator* allocator) {
38// ScopedArenaVector<int> foo_vector(allocator->Adapter(kArenaAllocMisc));
39// ScopedArenaSafeMap<int, int> foo_map(std::less<int>(), allocator->Adapter());
40// // Use foo_vector and foo_map...
41// }
42template <typename T>
43class ScopedArenaAllocatorAdapter;
44
Vladimir Marko69f08ba2014-04-11 12:28:11 +010045template <typename T>
Vladimir Marko622bdbe2014-06-19 14:59:05 +010046using ScopedArenaDeque = std::deque<T, ScopedArenaAllocatorAdapter<T>>;
47
48template <typename T>
49using ScopedArenaQueue = std::queue<T, ScopedArenaDeque<T>>;
50
51template <typename T>
Vladimir Markoec7802a2015-10-01 20:57:57 +010052using ScopedArenaVector = dchecked_vector<T, ScopedArenaAllocatorAdapter<T>>;
Vladimir Marko69f08ba2014-04-11 12:28:11 +010053
Ian Rogers700a4022014-05-19 16:49:03 -070054template <typename T, typename Comparator = std::less<T>>
55using ScopedArenaSet = std::set<T, Comparator, ScopedArenaAllocatorAdapter<T>>;
Vladimir Marko69f08ba2014-04-11 12:28:11 +010056
Ian Rogers700a4022014-05-19 16:49:03 -070057template <typename K, typename V, typename Comparator = std::less<K>>
Vladimir Marko69f08ba2014-04-11 12:28:11 +010058using ScopedArenaSafeMap =
Ian Rogers700a4022014-05-19 16:49:03 -070059 SafeMap<K, V, Comparator, ScopedArenaAllocatorAdapter<std::pair<const K, V>>>;
Vladimir Marko69f08ba2014-04-11 12:28:11 +010060
Mathieu Chartiere401d142015-04-22 13:56:20 -070061template <typename K, typename V, class Hash = std::hash<K>, class KeyEqual = std::equal_to<K>>
62using ScopedArenaUnorderedMap =
63 std::unordered_map<K, V, Hash, KeyEqual, ScopedArenaAllocatorAdapter<std::pair<const K, V>>>;
64
65
Vladimir Marko8081d2b2014-07-31 15:33:43 +010066// Implementation details below.
67
68template <>
69class ScopedArenaAllocatorAdapter<void>
70 : private DebugStackReference, private DebugStackIndirectTopRef,
71 private ArenaAllocatorAdapterKind {
72 public:
73 typedef void value_type;
74 typedef void* pointer;
75 typedef const void* const_pointer;
76
77 template <typename U>
78 struct rebind {
79 typedef ScopedArenaAllocatorAdapter<U> other;
80 };
81
82 explicit ScopedArenaAllocatorAdapter(ScopedArenaAllocator* arena_allocator,
83 ArenaAllocKind kind = kArenaAllocSTL)
84 : DebugStackReference(arena_allocator),
85 DebugStackIndirectTopRef(arena_allocator),
86 ArenaAllocatorAdapterKind(kind),
87 arena_stack_(arena_allocator->arena_stack_) {
88 }
89 template <typename U>
90 ScopedArenaAllocatorAdapter(const ScopedArenaAllocatorAdapter<U>& other)
91 : DebugStackReference(other),
92 DebugStackIndirectTopRef(other),
93 ArenaAllocatorAdapterKind(other),
94 arena_stack_(other.arena_stack_) {
95 }
Andreas Gampec801f0d2015-02-24 20:55:16 -080096 ScopedArenaAllocatorAdapter(const ScopedArenaAllocatorAdapter&) = default;
97 ScopedArenaAllocatorAdapter& operator=(const ScopedArenaAllocatorAdapter&) = default;
Vladimir Marko8081d2b2014-07-31 15:33:43 +010098 ~ScopedArenaAllocatorAdapter() = default;
99
100 private:
101 ArenaStack* arena_stack_;
102
103 template <typename U>
104 friend class ScopedArenaAllocatorAdapter;
105};
106
107template <typename T>
108class ScopedArenaAllocatorAdapter
109 : private DebugStackReference, private DebugStackIndirectTopRef,
110 private ArenaAllocatorAdapterKind {
111 public:
112 typedef T value_type;
113 typedef T* pointer;
114 typedef T& reference;
115 typedef const T* const_pointer;
116 typedef const T& const_reference;
117 typedef size_t size_type;
118 typedef ptrdiff_t difference_type;
119
120 template <typename U>
121 struct rebind {
122 typedef ScopedArenaAllocatorAdapter<U> other;
123 };
124
125 explicit ScopedArenaAllocatorAdapter(ScopedArenaAllocator* arena_allocator,
126 ArenaAllocKind kind = kArenaAllocSTL)
127 : DebugStackReference(arena_allocator),
128 DebugStackIndirectTopRef(arena_allocator),
129 ArenaAllocatorAdapterKind(kind),
130 arena_stack_(arena_allocator->arena_stack_) {
131 }
132 template <typename U>
133 ScopedArenaAllocatorAdapter(const ScopedArenaAllocatorAdapter<U>& other)
134 : DebugStackReference(other),
135 DebugStackIndirectTopRef(other),
136 ArenaAllocatorAdapterKind(other),
137 arena_stack_(other.arena_stack_) {
138 }
Andreas Gampec801f0d2015-02-24 20:55:16 -0800139 ScopedArenaAllocatorAdapter(const ScopedArenaAllocatorAdapter&) = default;
140 ScopedArenaAllocatorAdapter& operator=(const ScopedArenaAllocatorAdapter&) = default;
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100141 ~ScopedArenaAllocatorAdapter() = default;
142
143 size_type max_size() const {
144 return static_cast<size_type>(-1) / sizeof(T);
145 }
146
147 pointer address(reference x) const { return &x; }
148 const_pointer address(const_reference x) const { return &x; }
149
Roland Levillain4b8f1ec2015-08-26 18:34:03 +0100150 pointer allocate(size_type n,
151 ScopedArenaAllocatorAdapter<void>::pointer hint ATTRIBUTE_UNUSED = nullptr) {
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100152 DCHECK_LE(n, max_size());
153 DebugStackIndirectTopRef::CheckTop();
154 return reinterpret_cast<T*>(arena_stack_->Alloc(n * sizeof(T),
155 ArenaAllocatorAdapterKind::Kind()));
156 }
157 void deallocate(pointer p, size_type n) {
158 DebugStackIndirectTopRef::CheckTop();
Vladimir Marko2a408a32015-09-18 14:11:00 +0100159 arena_stack_->MakeInaccessible(p, sizeof(T) * n);
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100160 }
161
Vladimir Marko1f497642015-10-05 20:34:42 +0100162 template <typename U, typename... Args>
163 void construct(U* p, Args&&... args) {
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100164 // Don't CheckTop(), allow reusing existing capacity of a vector/deque below the top.
Vladimir Marko1f497642015-10-05 20:34:42 +0100165 ::new (static_cast<void*>(p)) U(std::forward<Args>(args)...);
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100166 }
Vladimir Marko1f497642015-10-05 20:34:42 +0100167 template <typename U>
168 void destroy(U* p) {
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100169 // Don't CheckTop(), allow reusing existing capacity of a vector/deque below the top.
Vladimir Marko1f497642015-10-05 20:34:42 +0100170 p->~U();
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100171 }
172
173 private:
174 ArenaStack* arena_stack_;
175
176 template <typename U>
177 friend class ScopedArenaAllocatorAdapter;
178
179 template <typename U>
180 friend bool operator==(const ScopedArenaAllocatorAdapter<U>& lhs,
181 const ScopedArenaAllocatorAdapter<U>& rhs);
182};
183
184template <typename T>
185inline bool operator==(const ScopedArenaAllocatorAdapter<T>& lhs,
186 const ScopedArenaAllocatorAdapter<T>& rhs) {
187 return lhs.arena_stack_ == rhs.arena_stack_;
188}
189
190template <typename T>
191inline bool operator!=(const ScopedArenaAllocatorAdapter<T>& lhs,
192 const ScopedArenaAllocatorAdapter<T>& rhs) {
193 return !(lhs == rhs);
194}
195
196inline ScopedArenaAllocatorAdapter<void> ScopedArenaAllocator::Adapter(ArenaAllocKind kind) {
197 return ScopedArenaAllocatorAdapter<void>(this, kind);
198}
199
Mathieu Chartier7b05e172015-10-15 17:47:48 -0700200// Special deleter that only calls the destructor. Also checks for double free errors.
201template <typename T>
202class ArenaDelete {
203 static constexpr uint8_t kMagicFill = 0xCE;
Mathieu Chartier88177602016-02-17 11:04:20 -0800204
Mathieu Chartier361e04a2016-02-16 14:06:35 -0800205 protected:
206 // Used for variable sized objects such as RegisterLine.
207 ALWAYS_INLINE void ProtectMemory(T* ptr, size_t size) const {
Mathieu Chartier7b05e172015-10-15 17:47:48 -0700208 if (RUNNING_ON_MEMORY_TOOL > 0) {
Mathieu Chartier88177602016-02-17 11:04:20 -0800209 // Writing to the memory will fail ift we already destroyed the pointer with
Mathieu Chartier7b05e172015-10-15 17:47:48 -0700210 // DestroyOnlyDelete since we make it no access.
Mathieu Chartier361e04a2016-02-16 14:06:35 -0800211 memset(ptr, kMagicFill, size);
212 MEMORY_TOOL_MAKE_NOACCESS(ptr, size);
Mathieu Chartier7b05e172015-10-15 17:47:48 -0700213 } else if (kIsDebugBuild) {
214 CHECK(ArenaStack::ArenaTagForAllocation(reinterpret_cast<void*>(ptr)) == ArenaFreeTag::kUsed)
215 << "Freeing invalid object " << ptr;
216 ArenaStack::ArenaTagForAllocation(reinterpret_cast<void*>(ptr)) = ArenaFreeTag::kFree;
217 // Write a magic value to try and catch use after free error.
Mathieu Chartier361e04a2016-02-16 14:06:35 -0800218 memset(ptr, kMagicFill, size);
Mathieu Chartier7b05e172015-10-15 17:47:48 -0700219 }
220 }
Mathieu Chartier361e04a2016-02-16 14:06:35 -0800221
222 public:
223 void operator()(T* ptr) const {
Mathieu Chartier88177602016-02-17 11:04:20 -0800224 if (ptr != nullptr) {
225 ptr->~T();
226 ProtectMemory(ptr, sizeof(T));
227 }
Mathieu Chartier361e04a2016-02-16 14:06:35 -0800228 }
Mathieu Chartier7b05e172015-10-15 17:47:48 -0700229};
230
Andreas Gampe784e7902015-10-23 17:31:36 -0700231// In general we lack support for arrays. We would need to call the destructor on each element,
232// which requires access to the array size. Support for that is future work.
233//
234// However, we can support trivially destructible component types, as then a destructor doesn't
235// need to be called.
Mathieu Chartier7b05e172015-10-15 17:47:48 -0700236template <typename T>
237class ArenaDelete<T[]> {
238 public:
Andreas Gampe784e7902015-10-23 17:31:36 -0700239 void operator()(T* ptr ATTRIBUTE_UNUSED) const {
240 static_assert(std::is_trivially_destructible<T>::value,
241 "ArenaUniquePtr does not support non-trivially-destructible arrays.");
242 // TODO: Implement debug checks, and MEMORY_TOOL support.
243 }
Mathieu Chartier7b05e172015-10-15 17:47:48 -0700244};
245
246// Arena unique ptr that only calls the destructor of the element.
247template <typename T>
248using ArenaUniquePtr = std::unique_ptr<T, ArenaDelete<T>>;
249
Vladimir Marko69f08ba2014-04-11 12:28:11 +0100250} // namespace art
251
Mathieu Chartierb666f482015-02-18 14:33:14 -0800252#endif // ART_RUNTIME_BASE_SCOPED_ARENA_CONTAINERS_H_