Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2014 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Mathieu Chartier | b666f48 | 2015-02-18 14:33:14 -0800 | [diff] [blame] | 17 | #ifndef ART_RUNTIME_BASE_SCOPED_ARENA_ALLOCATOR_H_ |
| 18 | #define ART_RUNTIME_BASE_SCOPED_ARENA_ALLOCATOR_H_ |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 19 | |
Mathieu Chartier | b666f48 | 2015-02-18 14:33:14 -0800 | [diff] [blame] | 20 | #include "arena_allocator.h" |
| 21 | #include "debug_stack.h" |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 22 | #include "globals.h" |
Mathieu Chartier | b666f48 | 2015-02-18 14:33:14 -0800 | [diff] [blame] | 23 | #include "logging.h" |
| 24 | #include "macros.h" |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 25 | |
| 26 | namespace art { |
| 27 | |
| 28 | class ArenaStack; |
| 29 | class ScopedArenaAllocator; |
| 30 | |
| 31 | template <typename T> |
| 32 | class ScopedArenaAllocatorAdapter; |
| 33 | |
Mathieu Chartier | 7b05e17 | 2015-10-15 17:47:48 -0700 | [diff] [blame] | 34 | // Tag associated with each allocation to help prevent double free. |
| 35 | enum class ArenaFreeTag : uint8_t { |
| 36 | // Allocation is used and has not yet been destroyed. |
| 37 | kUsed, |
| 38 | // Allocation has been destroyed. |
| 39 | kFree, |
| 40 | }; |
| 41 | |
| 42 | static constexpr size_t kArenaAlignment = 8; |
| 43 | |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 44 | // Holds a list of Arenas for use by ScopedArenaAllocator stack. |
Vladimir Marko | fda0432 | 2015-11-11 18:45:50 +0000 | [diff] [blame^] | 45 | // The memory is returned to the ArenaPool when the ArenaStack is destroyed. |
Vladimir Marko | 2a408a3 | 2015-09-18 14:11:00 +0100 | [diff] [blame] | 46 | class ArenaStack : private DebugStackRefCounter, private ArenaAllocatorMemoryTool { |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 47 | public: |
| 48 | explicit ArenaStack(ArenaPool* arena_pool); |
| 49 | ~ArenaStack(); |
| 50 | |
Vladimir Marko | 2a408a3 | 2015-09-18 14:11:00 +0100 | [diff] [blame] | 51 | using ArenaAllocatorMemoryTool::IsRunningOnMemoryTool; |
| 52 | using ArenaAllocatorMemoryTool::MakeDefined; |
| 53 | using ArenaAllocatorMemoryTool::MakeUndefined; |
| 54 | using ArenaAllocatorMemoryTool::MakeInaccessible; |
| 55 | |
Vladimir Marko | 53b6afc | 2014-03-21 14:21:20 +0000 | [diff] [blame] | 56 | void Reset(); |
| 57 | |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 58 | size_t PeakBytesAllocated() { |
| 59 | return PeakStats()->BytesAllocated(); |
| 60 | } |
| 61 | |
| 62 | MemStats GetPeakStats() const; |
| 63 | |
Mathieu Chartier | 7b05e17 | 2015-10-15 17:47:48 -0700 | [diff] [blame] | 64 | // Return the arena tag associated with a pointer. |
| 65 | static ArenaFreeTag& ArenaTagForAllocation(void* ptr) { |
| 66 | DCHECK(kIsDebugBuild) << "Only debug builds have tags"; |
| 67 | return *(reinterpret_cast<ArenaFreeTag*>(ptr) - 1); |
| 68 | } |
| 69 | |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 70 | private: |
| 71 | struct Peak; |
| 72 | struct Current; |
| 73 | template <typename Tag> struct TaggedStats : ArenaAllocatorStats { }; |
| 74 | struct StatsAndPool : TaggedStats<Peak>, TaggedStats<Current> { |
| 75 | explicit StatsAndPool(ArenaPool* arena_pool) : pool(arena_pool) { } |
| 76 | ArenaPool* const pool; |
| 77 | }; |
| 78 | |
| 79 | ArenaAllocatorStats* PeakStats() { |
| 80 | return static_cast<TaggedStats<Peak>*>(&stats_and_pool_); |
| 81 | } |
| 82 | |
| 83 | ArenaAllocatorStats* CurrentStats() { |
| 84 | return static_cast<TaggedStats<Current>*>(&stats_and_pool_); |
| 85 | } |
| 86 | |
| 87 | // Private - access via ScopedArenaAllocator or ScopedArenaAllocatorAdapter. |
| 88 | void* Alloc(size_t bytes, ArenaAllocKind kind) ALWAYS_INLINE { |
Vladimir Marko | 2a408a3 | 2015-09-18 14:11:00 +0100 | [diff] [blame] | 89 | if (UNLIKELY(IsRunningOnMemoryTool())) { |
| 90 | return AllocWithMemoryTool(bytes, kind); |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 91 | } |
Mathieu Chartier | 7b05e17 | 2015-10-15 17:47:48 -0700 | [diff] [blame] | 92 | // Add kArenaAlignment for the free or used tag. Required to preserve alignment. |
| 93 | size_t rounded_bytes = RoundUp(bytes + (kIsDebugBuild ? kArenaAlignment : 0u), kArenaAlignment); |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 94 | uint8_t* ptr = top_ptr_; |
| 95 | if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) { |
| 96 | ptr = AllocateFromNextArena(rounded_bytes); |
| 97 | } |
| 98 | CurrentStats()->RecordAlloc(bytes, kind); |
| 99 | top_ptr_ = ptr + rounded_bytes; |
Mathieu Chartier | 7b05e17 | 2015-10-15 17:47:48 -0700 | [diff] [blame] | 100 | if (kIsDebugBuild) { |
| 101 | ptr += kArenaAlignment; |
| 102 | ArenaTagForAllocation(ptr) = ArenaFreeTag::kUsed; |
| 103 | } |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 104 | return ptr; |
| 105 | } |
| 106 | |
| 107 | uint8_t* AllocateFromNextArena(size_t rounded_bytes); |
| 108 | void UpdatePeakStatsAndRestore(const ArenaAllocatorStats& restore_stats); |
| 109 | void UpdateBytesAllocated(); |
Vladimir Marko | 2a408a3 | 2015-09-18 14:11:00 +0100 | [diff] [blame] | 110 | void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind); |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 111 | |
| 112 | StatsAndPool stats_and_pool_; |
| 113 | Arena* bottom_arena_; |
| 114 | Arena* top_arena_; |
| 115 | uint8_t* top_ptr_; |
| 116 | uint8_t* top_end_; |
| 117 | |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 118 | friend class ScopedArenaAllocator; |
| 119 | template <typename T> |
| 120 | friend class ScopedArenaAllocatorAdapter; |
| 121 | |
| 122 | DISALLOW_COPY_AND_ASSIGN(ArenaStack); |
| 123 | }; |
| 124 | |
Vladimir Marko | fda0432 | 2015-11-11 18:45:50 +0000 | [diff] [blame^] | 125 | // Fast single-threaded allocator. Allocated chunks are _not_ guaranteed to be zero-initialized. |
| 126 | // |
| 127 | // Unlike the ArenaAllocator, ScopedArenaAllocator is intended for relatively short-lived |
| 128 | // objects and allows nesting multiple allocators. Only the top allocator can be used but |
| 129 | // once it's destroyed, its memory can be reused by the next ScopedArenaAllocator on the |
| 130 | // stack. This is facilitated by returning the memory to the ArenaStack. |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 131 | class ScopedArenaAllocator |
| 132 | : private DebugStackReference, private DebugStackRefCounter, private ArenaAllocatorStats { |
| 133 | public: |
| 134 | // Create a ScopedArenaAllocator directly on the ArenaStack when the scope of |
| 135 | // the allocator is not exactly a C++ block scope. For example, an optimization |
| 136 | // pass can create the scoped allocator in Start() and destroy it in End(). |
| 137 | static ScopedArenaAllocator* Create(ArenaStack* arena_stack) { |
| 138 | void* addr = arena_stack->Alloc(sizeof(ScopedArenaAllocator), kArenaAllocMisc); |
| 139 | ScopedArenaAllocator* allocator = new(addr) ScopedArenaAllocator(arena_stack); |
| 140 | allocator->mark_ptr_ = reinterpret_cast<uint8_t*>(addr); |
| 141 | return allocator; |
| 142 | } |
| 143 | |
| 144 | explicit ScopedArenaAllocator(ArenaStack* arena_stack); |
| 145 | ~ScopedArenaAllocator(); |
| 146 | |
| 147 | void Reset(); |
| 148 | |
Vladimir Marko | e4fcc5b | 2015-02-13 10:28:29 +0000 | [diff] [blame] | 149 | void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE { |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 150 | DebugStackReference::CheckTop(); |
| 151 | return arena_stack_->Alloc(bytes, kind); |
| 152 | } |
| 153 | |
Vladimir Marko | e4fcc5b | 2015-02-13 10:28:29 +0000 | [diff] [blame] | 154 | template <typename T> |
| 155 | T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) { |
| 156 | return static_cast<T*>(Alloc(length * sizeof(T), kind)); |
| 157 | } |
| 158 | |
Vladimir Marko | 8081d2b | 2014-07-31 15:33:43 +0100 | [diff] [blame] | 159 | // Get adapter for use in STL containers. See scoped_arena_containers.h . |
| 160 | ScopedArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL); |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 161 | |
| 162 | // Allow a delete-expression to destroy but not deallocate allocators created by Create(). |
Roland Levillain | 4b8f1ec | 2015-08-26 18:34:03 +0100 | [diff] [blame] | 163 | static void operator delete(void* ptr ATTRIBUTE_UNUSED) {} |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 164 | |
| 165 | private: |
| 166 | ArenaStack* const arena_stack_; |
| 167 | Arena* mark_arena_; |
| 168 | uint8_t* mark_ptr_; |
| 169 | uint8_t* mark_end_; |
| 170 | |
Vladimir Marko | 3d2ec35 | 2014-10-10 15:39:11 +0100 | [diff] [blame] | 171 | void DoReset(); |
| 172 | |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 173 | template <typename T> |
| 174 | friend class ScopedArenaAllocatorAdapter; |
| 175 | |
| 176 | DISALLOW_COPY_AND_ASSIGN(ScopedArenaAllocator); |
| 177 | }; |
| 178 | |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 179 | } // namespace art |
| 180 | |
Mathieu Chartier | b666f48 | 2015-02-18 14:33:14 -0800 | [diff] [blame] | 181 | #endif // ART_RUNTIME_BASE_SCOPED_ARENA_ALLOCATOR_H_ |