Vladimir Marko | 69f08ba | 2014-04-11 12:28:11 +0100 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2014 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
David Sehr | 1ce2b3b | 2018-04-05 11:02:03 -0700 | [diff] [blame] | 17 | #ifndef ART_LIBARTBASE_BASE_SCOPED_ARENA_CONTAINERS_H_ |
| 18 | #define ART_LIBARTBASE_BASE_SCOPED_ARENA_CONTAINERS_H_ |
Vladimir Marko | 69f08ba | 2014-04-11 12:28:11 +0100 | [diff] [blame] | 19 | |
Vladimir Marko | 622bdbe | 2014-06-19 14:59:05 +0100 | [diff] [blame] | 20 | #include <deque> |
| 21 | #include <queue> |
Vladimir Marko | 69f08ba | 2014-04-11 12:28:11 +0100 | [diff] [blame] | 22 | #include <set> |
Andreas Gampe | 784e790 | 2015-10-23 17:31:36 -0700 | [diff] [blame] | 23 | #include <type_traits> |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 24 | #include <unordered_map> |
Vladimir Marko | 1f49764 | 2015-10-05 20:34:42 +0100 | [diff] [blame] | 25 | #include <utility> |
Vladimir Marko | 69f08ba | 2014-04-11 12:28:11 +0100 | [diff] [blame] | 26 | |
Mathieu Chartier | b666f48 | 2015-02-18 14:33:14 -0800 | [diff] [blame] | 27 | #include "arena_containers.h" // For ArenaAllocatorAdapterKind. |
David Sehr | 1979c64 | 2018-04-26 14:41:18 -0700 | [diff] [blame] | 28 | #include "dchecked_vector.h" |
| 29 | #include "safe_map.h" |
Andreas Gampe | 8cf9cb3 | 2017-07-19 09:28:38 -0700 | [diff] [blame] | 30 | #include "scoped_arena_allocator.h" |
Vladimir Marko | 69f08ba | 2014-04-11 12:28:11 +0100 | [diff] [blame] | 31 | |
| 32 | namespace art { |
| 33 | |
Vladimir Marko | 8081d2b | 2014-07-31 15:33:43 +0100 | [diff] [blame] | 34 | // Adapter for use of ScopedArenaAllocator in STL containers. |
| 35 | // Use ScopedArenaAllocator::Adapter() to create an adapter to pass to container constructors. |
| 36 | // For example, |
| 37 | // void foo(ScopedArenaAllocator* allocator) { |
| 38 | // ScopedArenaVector<int> foo_vector(allocator->Adapter(kArenaAllocMisc)); |
| 39 | // ScopedArenaSafeMap<int, int> foo_map(std::less<int>(), allocator->Adapter()); |
| 40 | // // Use foo_vector and foo_map... |
| 41 | // } |
| 42 | template <typename T> |
| 43 | class ScopedArenaAllocatorAdapter; |
| 44 | |
Vladimir Marko | 69f08ba | 2014-04-11 12:28:11 +0100 | [diff] [blame] | 45 | template <typename T> |
Vladimir Marko | 622bdbe | 2014-06-19 14:59:05 +0100 | [diff] [blame] | 46 | using ScopedArenaDeque = std::deque<T, ScopedArenaAllocatorAdapter<T>>; |
| 47 | |
| 48 | template <typename T> |
| 49 | using ScopedArenaQueue = std::queue<T, ScopedArenaDeque<T>>; |
| 50 | |
| 51 | template <typename T> |
Vladimir Marko | ec7802a | 2015-10-01 20:57:57 +0100 | [diff] [blame] | 52 | using ScopedArenaVector = dchecked_vector<T, ScopedArenaAllocatorAdapter<T>>; |
Vladimir Marko | 69f08ba | 2014-04-11 12:28:11 +0100 | [diff] [blame] | 53 | |
Ian Rogers | 700a402 | 2014-05-19 16:49:03 -0700 | [diff] [blame] | 54 | template <typename T, typename Comparator = std::less<T>> |
Vladimir Marko | e764d2e | 2017-10-05 14:35:55 +0100 | [diff] [blame] | 55 | using ScopedArenaPriorityQueue = std::priority_queue<T, ScopedArenaVector<T>, Comparator>; |
| 56 | |
| 57 | template <typename T> |
| 58 | using ScopedArenaStdStack = std::stack<T, ScopedArenaDeque<T>>; |
| 59 | |
| 60 | template <typename T, typename Comparator = std::less<T>> |
Ian Rogers | 700a402 | 2014-05-19 16:49:03 -0700 | [diff] [blame] | 61 | using ScopedArenaSet = std::set<T, Comparator, ScopedArenaAllocatorAdapter<T>>; |
Vladimir Marko | 69f08ba | 2014-04-11 12:28:11 +0100 | [diff] [blame] | 62 | |
Ian Rogers | 700a402 | 2014-05-19 16:49:03 -0700 | [diff] [blame] | 63 | template <typename K, typename V, typename Comparator = std::less<K>> |
Vladimir Marko | 69f08ba | 2014-04-11 12:28:11 +0100 | [diff] [blame] | 64 | using ScopedArenaSafeMap = |
Ian Rogers | 700a402 | 2014-05-19 16:49:03 -0700 | [diff] [blame] | 65 | SafeMap<K, V, Comparator, ScopedArenaAllocatorAdapter<std::pair<const K, V>>>; |
Vladimir Marko | 69f08ba | 2014-04-11 12:28:11 +0100 | [diff] [blame] | 66 | |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 67 | template <typename T, |
| 68 | typename EmptyFn = DefaultEmptyFn<T>, |
Vladimir Marko | 54159c6 | 2018-06-20 14:30:08 +0100 | [diff] [blame] | 69 | typename HashFn = DefaultHashFn<T>, |
| 70 | typename Pred = DefaultPred<T>> |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 71 | using ScopedArenaHashSet = HashSet<T, EmptyFn, HashFn, Pred, ScopedArenaAllocatorAdapter<T>>; |
| 72 | |
| 73 | template <typename Key, |
| 74 | typename Value, |
| 75 | typename EmptyFn = DefaultEmptyFn<std::pair<Key, Value>>, |
Vladimir Marko | 54159c6 | 2018-06-20 14:30:08 +0100 | [diff] [blame] | 76 | typename HashFn = DefaultHashFn<Key>, |
| 77 | typename Pred = DefaultPred<Key>> |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 78 | using ScopedArenaHashMap = HashMap<Key, |
| 79 | Value, |
| 80 | EmptyFn, |
| 81 | HashFn, |
| 82 | Pred, |
| 83 | ScopedArenaAllocatorAdapter<std::pair<Key, Value>>>; |
| 84 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 85 | template <typename K, typename V, class Hash = std::hash<K>, class KeyEqual = std::equal_to<K>> |
| 86 | using ScopedArenaUnorderedMap = |
| 87 | std::unordered_map<K, V, Hash, KeyEqual, ScopedArenaAllocatorAdapter<std::pair<const K, V>>>; |
| 88 | |
David Srbecky | 159c9dd | 2018-05-24 14:56:51 +0100 | [diff] [blame] | 89 | template <typename K, typename V, class Hash = std::hash<K>, class KeyEqual = std::equal_to<K>> |
| 90 | using ScopedArenaUnorderedMultimap = |
| 91 | std::unordered_multimap<K, |
| 92 | V, |
| 93 | Hash, |
| 94 | KeyEqual, |
| 95 | ScopedArenaAllocatorAdapter<std::pair<const K, V>>>; |
| 96 | |
Vladimir Marko | 8081d2b | 2014-07-31 15:33:43 +0100 | [diff] [blame] | 97 | // Implementation details below. |
| 98 | |
| 99 | template <> |
| 100 | class ScopedArenaAllocatorAdapter<void> |
| 101 | : private DebugStackReference, private DebugStackIndirectTopRef, |
| 102 | private ArenaAllocatorAdapterKind { |
| 103 | public: |
| 104 | typedef void value_type; |
| 105 | typedef void* pointer; |
| 106 | typedef const void* const_pointer; |
| 107 | |
| 108 | template <typename U> |
| 109 | struct rebind { |
| 110 | typedef ScopedArenaAllocatorAdapter<U> other; |
| 111 | }; |
| 112 | |
Vladimir Marko | e764d2e | 2017-10-05 14:35:55 +0100 | [diff] [blame] | 113 | explicit ScopedArenaAllocatorAdapter(ScopedArenaAllocator* allocator, |
Vladimir Marko | 8081d2b | 2014-07-31 15:33:43 +0100 | [diff] [blame] | 114 | ArenaAllocKind kind = kArenaAllocSTL) |
Vladimir Marko | e764d2e | 2017-10-05 14:35:55 +0100 | [diff] [blame] | 115 | : DebugStackReference(allocator), |
| 116 | DebugStackIndirectTopRef(allocator), |
Vladimir Marko | 8081d2b | 2014-07-31 15:33:43 +0100 | [diff] [blame] | 117 | ArenaAllocatorAdapterKind(kind), |
Vladimir Marko | e764d2e | 2017-10-05 14:35:55 +0100 | [diff] [blame] | 118 | arena_stack_(allocator->arena_stack_) { |
Vladimir Marko | 8081d2b | 2014-07-31 15:33:43 +0100 | [diff] [blame] | 119 | } |
| 120 | template <typename U> |
Igor Murashkin | 5573c37 | 2017-11-16 13:34:30 -0800 | [diff] [blame] | 121 | ScopedArenaAllocatorAdapter(const ScopedArenaAllocatorAdapter<U>& other) |
Vladimir Marko | 8081d2b | 2014-07-31 15:33:43 +0100 | [diff] [blame] | 122 | : DebugStackReference(other), |
| 123 | DebugStackIndirectTopRef(other), |
| 124 | ArenaAllocatorAdapterKind(other), |
| 125 | arena_stack_(other.arena_stack_) { |
| 126 | } |
Andreas Gampe | c801f0d | 2015-02-24 20:55:16 -0800 | [diff] [blame] | 127 | ScopedArenaAllocatorAdapter(const ScopedArenaAllocatorAdapter&) = default; |
| 128 | ScopedArenaAllocatorAdapter& operator=(const ScopedArenaAllocatorAdapter&) = default; |
Vladimir Marko | 8081d2b | 2014-07-31 15:33:43 +0100 | [diff] [blame] | 129 | ~ScopedArenaAllocatorAdapter() = default; |
| 130 | |
| 131 | private: |
| 132 | ArenaStack* arena_stack_; |
| 133 | |
| 134 | template <typename U> |
| 135 | friend class ScopedArenaAllocatorAdapter; |
| 136 | }; |
| 137 | |
| 138 | template <typename T> |
| 139 | class ScopedArenaAllocatorAdapter |
| 140 | : private DebugStackReference, private DebugStackIndirectTopRef, |
| 141 | private ArenaAllocatorAdapterKind { |
| 142 | public: |
| 143 | typedef T value_type; |
| 144 | typedef T* pointer; |
| 145 | typedef T& reference; |
| 146 | typedef const T* const_pointer; |
| 147 | typedef const T& const_reference; |
| 148 | typedef size_t size_type; |
| 149 | typedef ptrdiff_t difference_type; |
| 150 | |
| 151 | template <typename U> |
| 152 | struct rebind { |
| 153 | typedef ScopedArenaAllocatorAdapter<U> other; |
| 154 | }; |
| 155 | |
Vladimir Marko | e764d2e | 2017-10-05 14:35:55 +0100 | [diff] [blame] | 156 | explicit ScopedArenaAllocatorAdapter(ScopedArenaAllocator* allocator, |
Vladimir Marko | 8081d2b | 2014-07-31 15:33:43 +0100 | [diff] [blame] | 157 | ArenaAllocKind kind = kArenaAllocSTL) |
Vladimir Marko | e764d2e | 2017-10-05 14:35:55 +0100 | [diff] [blame] | 158 | : DebugStackReference(allocator), |
| 159 | DebugStackIndirectTopRef(allocator), |
Vladimir Marko | 8081d2b | 2014-07-31 15:33:43 +0100 | [diff] [blame] | 160 | ArenaAllocatorAdapterKind(kind), |
Vladimir Marko | e764d2e | 2017-10-05 14:35:55 +0100 | [diff] [blame] | 161 | arena_stack_(allocator->arena_stack_) { |
Vladimir Marko | 8081d2b | 2014-07-31 15:33:43 +0100 | [diff] [blame] | 162 | } |
| 163 | template <typename U> |
Igor Murashkin | 5573c37 | 2017-11-16 13:34:30 -0800 | [diff] [blame] | 164 | ScopedArenaAllocatorAdapter(const ScopedArenaAllocatorAdapter<U>& other) |
Vladimir Marko | 8081d2b | 2014-07-31 15:33:43 +0100 | [diff] [blame] | 165 | : DebugStackReference(other), |
| 166 | DebugStackIndirectTopRef(other), |
| 167 | ArenaAllocatorAdapterKind(other), |
| 168 | arena_stack_(other.arena_stack_) { |
| 169 | } |
Andreas Gampe | c801f0d | 2015-02-24 20:55:16 -0800 | [diff] [blame] | 170 | ScopedArenaAllocatorAdapter(const ScopedArenaAllocatorAdapter&) = default; |
| 171 | ScopedArenaAllocatorAdapter& operator=(const ScopedArenaAllocatorAdapter&) = default; |
Vladimir Marko | 8081d2b | 2014-07-31 15:33:43 +0100 | [diff] [blame] | 172 | ~ScopedArenaAllocatorAdapter() = default; |
| 173 | |
| 174 | size_type max_size() const { |
| 175 | return static_cast<size_type>(-1) / sizeof(T); |
| 176 | } |
| 177 | |
| 178 | pointer address(reference x) const { return &x; } |
| 179 | const_pointer address(const_reference x) const { return &x; } |
| 180 | |
Roland Levillain | 4b8f1ec | 2015-08-26 18:34:03 +0100 | [diff] [blame] | 181 | pointer allocate(size_type n, |
| 182 | ScopedArenaAllocatorAdapter<void>::pointer hint ATTRIBUTE_UNUSED = nullptr) { |
Vladimir Marko | 8081d2b | 2014-07-31 15:33:43 +0100 | [diff] [blame] | 183 | DCHECK_LE(n, max_size()); |
| 184 | DebugStackIndirectTopRef::CheckTop(); |
| 185 | return reinterpret_cast<T*>(arena_stack_->Alloc(n * sizeof(T), |
| 186 | ArenaAllocatorAdapterKind::Kind())); |
| 187 | } |
| 188 | void deallocate(pointer p, size_type n) { |
| 189 | DebugStackIndirectTopRef::CheckTop(); |
Vladimir Marko | 2a408a3 | 2015-09-18 14:11:00 +0100 | [diff] [blame] | 190 | arena_stack_->MakeInaccessible(p, sizeof(T) * n); |
Vladimir Marko | 8081d2b | 2014-07-31 15:33:43 +0100 | [diff] [blame] | 191 | } |
| 192 | |
Vladimir Marko | 1f49764 | 2015-10-05 20:34:42 +0100 | [diff] [blame] | 193 | template <typename U, typename... Args> |
| 194 | void construct(U* p, Args&&... args) { |
Vladimir Marko | 8081d2b | 2014-07-31 15:33:43 +0100 | [diff] [blame] | 195 | // Don't CheckTop(), allow reusing existing capacity of a vector/deque below the top. |
Vladimir Marko | 1f49764 | 2015-10-05 20:34:42 +0100 | [diff] [blame] | 196 | ::new (static_cast<void*>(p)) U(std::forward<Args>(args)...); |
Vladimir Marko | 8081d2b | 2014-07-31 15:33:43 +0100 | [diff] [blame] | 197 | } |
Vladimir Marko | 1f49764 | 2015-10-05 20:34:42 +0100 | [diff] [blame] | 198 | template <typename U> |
| 199 | void destroy(U* p) { |
Vladimir Marko | 8081d2b | 2014-07-31 15:33:43 +0100 | [diff] [blame] | 200 | // Don't CheckTop(), allow reusing existing capacity of a vector/deque below the top. |
Vladimir Marko | 1f49764 | 2015-10-05 20:34:42 +0100 | [diff] [blame] | 201 | p->~U(); |
Vladimir Marko | 8081d2b | 2014-07-31 15:33:43 +0100 | [diff] [blame] | 202 | } |
| 203 | |
| 204 | private: |
| 205 | ArenaStack* arena_stack_; |
| 206 | |
| 207 | template <typename U> |
| 208 | friend class ScopedArenaAllocatorAdapter; |
| 209 | |
| 210 | template <typename U> |
| 211 | friend bool operator==(const ScopedArenaAllocatorAdapter<U>& lhs, |
| 212 | const ScopedArenaAllocatorAdapter<U>& rhs); |
| 213 | }; |
| 214 | |
| 215 | template <typename T> |
| 216 | inline bool operator==(const ScopedArenaAllocatorAdapter<T>& lhs, |
| 217 | const ScopedArenaAllocatorAdapter<T>& rhs) { |
| 218 | return lhs.arena_stack_ == rhs.arena_stack_; |
| 219 | } |
| 220 | |
| 221 | template <typename T> |
| 222 | inline bool operator!=(const ScopedArenaAllocatorAdapter<T>& lhs, |
| 223 | const ScopedArenaAllocatorAdapter<T>& rhs) { |
| 224 | return !(lhs == rhs); |
| 225 | } |
| 226 | |
| 227 | inline ScopedArenaAllocatorAdapter<void> ScopedArenaAllocator::Adapter(ArenaAllocKind kind) { |
| 228 | return ScopedArenaAllocatorAdapter<void>(this, kind); |
| 229 | } |
| 230 | |
Mathieu Chartier | 7b05e17 | 2015-10-15 17:47:48 -0700 | [diff] [blame] | 231 | // Special deleter that only calls the destructor. Also checks for double free errors. |
| 232 | template <typename T> |
| 233 | class ArenaDelete { |
| 234 | static constexpr uint8_t kMagicFill = 0xCE; |
Mathieu Chartier | 8817760 | 2016-02-17 11:04:20 -0800 | [diff] [blame] | 235 | |
Mathieu Chartier | 361e04a | 2016-02-16 14:06:35 -0800 | [diff] [blame] | 236 | protected: |
| 237 | // Used for variable sized objects such as RegisterLine. |
| 238 | ALWAYS_INLINE void ProtectMemory(T* ptr, size_t size) const { |
Roland Levillain | 05e34f4 | 2018-05-24 13:19:05 +0000 | [diff] [blame] | 239 | if (kRunningOnMemoryTool) { |
Mathieu Chartier | 8817760 | 2016-02-17 11:04:20 -0800 | [diff] [blame] | 240 | // Writing to the memory will fail ift we already destroyed the pointer with |
Mathieu Chartier | 7b05e17 | 2015-10-15 17:47:48 -0700 | [diff] [blame] | 241 | // DestroyOnlyDelete since we make it no access. |
Mathieu Chartier | 361e04a | 2016-02-16 14:06:35 -0800 | [diff] [blame] | 242 | memset(ptr, kMagicFill, size); |
| 243 | MEMORY_TOOL_MAKE_NOACCESS(ptr, size); |
Mathieu Chartier | 7b05e17 | 2015-10-15 17:47:48 -0700 | [diff] [blame] | 244 | } else if (kIsDebugBuild) { |
| 245 | CHECK(ArenaStack::ArenaTagForAllocation(reinterpret_cast<void*>(ptr)) == ArenaFreeTag::kUsed) |
| 246 | << "Freeing invalid object " << ptr; |
| 247 | ArenaStack::ArenaTagForAllocation(reinterpret_cast<void*>(ptr)) = ArenaFreeTag::kFree; |
| 248 | // Write a magic value to try and catch use after free error. |
Mathieu Chartier | 361e04a | 2016-02-16 14:06:35 -0800 | [diff] [blame] | 249 | memset(ptr, kMagicFill, size); |
Mathieu Chartier | 7b05e17 | 2015-10-15 17:47:48 -0700 | [diff] [blame] | 250 | } |
| 251 | } |
Mathieu Chartier | 361e04a | 2016-02-16 14:06:35 -0800 | [diff] [blame] | 252 | |
| 253 | public: |
| 254 | void operator()(T* ptr) const { |
Mathieu Chartier | 8817760 | 2016-02-17 11:04:20 -0800 | [diff] [blame] | 255 | if (ptr != nullptr) { |
| 256 | ptr->~T(); |
| 257 | ProtectMemory(ptr, sizeof(T)); |
| 258 | } |
Mathieu Chartier | 361e04a | 2016-02-16 14:06:35 -0800 | [diff] [blame] | 259 | } |
Mathieu Chartier | 7b05e17 | 2015-10-15 17:47:48 -0700 | [diff] [blame] | 260 | }; |
| 261 | |
Andreas Gampe | 784e790 | 2015-10-23 17:31:36 -0700 | [diff] [blame] | 262 | // In general we lack support for arrays. We would need to call the destructor on each element, |
| 263 | // which requires access to the array size. Support for that is future work. |
| 264 | // |
| 265 | // However, we can support trivially destructible component types, as then a destructor doesn't |
| 266 | // need to be called. |
Mathieu Chartier | 7b05e17 | 2015-10-15 17:47:48 -0700 | [diff] [blame] | 267 | template <typename T> |
| 268 | class ArenaDelete<T[]> { |
| 269 | public: |
Andreas Gampe | 784e790 | 2015-10-23 17:31:36 -0700 | [diff] [blame] | 270 | void operator()(T* ptr ATTRIBUTE_UNUSED) const { |
| 271 | static_assert(std::is_trivially_destructible<T>::value, |
| 272 | "ArenaUniquePtr does not support non-trivially-destructible arrays."); |
| 273 | // TODO: Implement debug checks, and MEMORY_TOOL support. |
| 274 | } |
Mathieu Chartier | 7b05e17 | 2015-10-15 17:47:48 -0700 | [diff] [blame] | 275 | }; |
| 276 | |
| 277 | // Arena unique ptr that only calls the destructor of the element. |
| 278 | template <typename T> |
| 279 | using ArenaUniquePtr = std::unique_ptr<T, ArenaDelete<T>>; |
| 280 | |
Vladimir Marko | 69f08ba | 2014-04-11 12:28:11 +0100 | [diff] [blame] | 281 | } // namespace art |
| 282 | |
David Sehr | 1ce2b3b | 2018-04-05 11:02:03 -0700 | [diff] [blame] | 283 | #endif // ART_LIBARTBASE_BASE_SCOPED_ARENA_CONTAINERS_H_ |