buzbee | 862a760 | 2013-04-05 10:58:54 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2013 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Andreas Gampe | d490129 | 2017-05-30 18:41:34 -0700 | [diff] [blame] | 17 | #include "arena_allocator-inl.h" |
| 18 | |
| 19 | #include <sys/mman.h> |
| 20 | |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 21 | #include <algorithm> |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 22 | #include <cstddef> |
Ian Rogers | 6f3dbba | 2014-10-14 17:41:57 -0700 | [diff] [blame] | 23 | #include <iomanip> |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 24 | #include <numeric> |
| 25 | |
Mathieu Chartier | b666f48 | 2015-02-18 14:33:14 -0800 | [diff] [blame] | 26 | #include "logging.h" |
Vladimir Marko | 3481ba2 | 2015-04-13 12:22:36 +0100 | [diff] [blame] | 27 | #include "mem_map.h" |
Mathieu Chartier | b666f48 | 2015-02-18 14:33:14 -0800 | [diff] [blame] | 28 | #include "mutex.h" |
Mathieu Chartier | 32ce2ad | 2016-03-04 14:58:03 -0800 | [diff] [blame] | 29 | #include "systrace.h" |
Andreas Gampe | 8cf9cb3 | 2017-07-19 09:28:38 -0700 | [diff] [blame] | 30 | #include "thread-current-inl.h" |
buzbee | 862a760 | 2013-04-05 10:58:54 -0700 | [diff] [blame] | 31 | |
| 32 | namespace art { |
| 33 | |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 34 | constexpr size_t kMemoryToolRedZoneBytes = 8; |
Mathieu Chartier | f6c4b3b | 2013-08-24 16:11:37 -0700 | [diff] [blame] | 35 | |
Vladimir Marko | bd9e9db | 2014-03-07 19:41:05 +0000 | [diff] [blame] | 36 | template <bool kCount> |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 37 | const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = { |
Igor Murashkin | d01745e | 2017-04-05 16:40:31 -0700 | [diff] [blame] | 38 | // Every name should have the same width and end with a space. Abbreviate if necessary: |
Vladimir Marko | f9f6441 | 2015-09-02 14:05:49 +0100 | [diff] [blame] | 39 | "Misc ", |
Vladimir Marko | f9f6441 | 2015-09-02 14:05:49 +0100 | [diff] [blame] | 40 | "SwitchTbl ", |
Vladimir Marko | f9f6441 | 2015-09-02 14:05:49 +0100 | [diff] [blame] | 41 | "SlowPaths ", |
Vladimir Marko | f9f6441 | 2015-09-02 14:05:49 +0100 | [diff] [blame] | 42 | "GrowBitMap ", |
Vladimir Marko | f9f6441 | 2015-09-02 14:05:49 +0100 | [diff] [blame] | 43 | "STL ", |
Vladimir Marko | 2aaa4b5 | 2015-09-17 17:03:26 +0100 | [diff] [blame] | 44 | "GraphBuilder ", |
Vladimir Marko | f9f6441 | 2015-09-02 14:05:49 +0100 | [diff] [blame] | 45 | "Graph ", |
| 46 | "BasicBlock ", |
Vladimir Marko | fa6b93c | 2015-09-15 10:15:55 +0100 | [diff] [blame] | 47 | "BlockList ", |
| 48 | "RevPostOrder ", |
| 49 | "LinearOrder ", |
| 50 | "ConstantsMap ", |
Vladimir Marko | 6058455 | 2015-09-03 13:35:12 +0000 | [diff] [blame] | 51 | "Predecessors ", |
| 52 | "Successors ", |
| 53 | "Dominated ", |
Vladimir Marko | f9f6441 | 2015-09-02 14:05:49 +0100 | [diff] [blame] | 54 | "Instruction ", |
Igor Murashkin | d01745e | 2017-04-05 16:40:31 -0700 | [diff] [blame] | 55 | "CtorFenceIns ", |
Vladimir Marko | fa6b93c | 2015-09-15 10:15:55 +0100 | [diff] [blame] | 56 | "InvokeInputs ", |
| 57 | "PhiInputs ", |
Vladimir Marko | f9f6441 | 2015-09-02 14:05:49 +0100 | [diff] [blame] | 58 | "LoopInfo ", |
Vladimir Marko | fa6b93c | 2015-09-15 10:15:55 +0100 | [diff] [blame] | 59 | "LIBackEdges ", |
Vladimir Marko | f9f6441 | 2015-09-02 14:05:49 +0100 | [diff] [blame] | 60 | "TryCatchInf ", |
| 61 | "UseListNode ", |
| 62 | "Environment ", |
Vladimir Marko | fa6b93c | 2015-09-15 10:15:55 +0100 | [diff] [blame] | 63 | "EnvVRegs ", |
| 64 | "EnvLocations ", |
Vladimir Marko | 2aaa4b5 | 2015-09-17 17:03:26 +0100 | [diff] [blame] | 65 | "LocSummary ", |
Vladimir Marko | 71bf809 | 2015-09-15 15:33:14 +0100 | [diff] [blame] | 66 | "SsaBuilder ", |
Vladimir Marko | f9f6441 | 2015-09-02 14:05:49 +0100 | [diff] [blame] | 67 | "MoveOperands ", |
| 68 | "CodeBuffer ", |
| 69 | "StackMaps ", |
Vladimir Marko | f9f6441 | 2015-09-02 14:05:49 +0100 | [diff] [blame] | 70 | "Optimization ", |
Vladimir Marko | 2aaa4b5 | 2015-09-17 17:03:26 +0100 | [diff] [blame] | 71 | "GVN ", |
Vladimir Marko | 5233f93 | 2015-09-29 19:01:15 +0100 | [diff] [blame] | 72 | "InductionVar ", |
| 73 | "BCE ", |
Vladimir Marko | f6a35de | 2016-03-21 12:01:50 +0000 | [diff] [blame] | 74 | "DCE ", |
| 75 | "LSE ", |
Igor Murashkin | dd018df | 2017-08-09 10:38:31 -0700 | [diff] [blame^] | 76 | "CFRE ", |
Vladimir Marko | f6a35de | 2016-03-21 12:01:50 +0000 | [diff] [blame] | 77 | "LICM ", |
Aart Bik | 9620230 | 2016-10-04 17:33:56 -0700 | [diff] [blame] | 78 | "LoopOpt ", |
Vladimir Marko | 2aaa4b5 | 2015-09-17 17:03:26 +0100 | [diff] [blame] | 79 | "SsaLiveness ", |
| 80 | "SsaPhiElim ", |
| 81 | "RefTypeProp ", |
Vladimir Marko | 2aaa4b5 | 2015-09-17 17:03:26 +0100 | [diff] [blame] | 82 | "SideEffects ", |
| 83 | "RegAllocator ", |
Vladimir Marko | f6a35de | 2016-03-21 12:01:50 +0000 | [diff] [blame] | 84 | "RegAllocVldt ", |
Vladimir Marko | 225b646 | 2015-09-28 12:17:40 +0100 | [diff] [blame] | 85 | "StackMapStm ", |
Vladimir Marko | d38ba0a | 2017-04-13 13:43:43 +0100 | [diff] [blame] | 86 | "VectorNode ", |
Vladimir Marko | 225b646 | 2015-09-28 12:17:40 +0100 | [diff] [blame] | 87 | "CodeGen ", |
Vladimir Marko | 93205e3 | 2016-04-13 11:59:46 +0100 | [diff] [blame] | 88 | "Assembler ", |
Vladimir Marko | 225b646 | 2015-09-28 12:17:40 +0100 | [diff] [blame] | 89 | "ParallelMove ", |
Vladimir Marko | 655e585 | 2015-10-12 10:38:28 +0100 | [diff] [blame] | 90 | "GraphChecker ", |
Mathieu Chartier | de40d47 | 2015-10-15 17:47:48 -0700 | [diff] [blame] | 91 | "Verifier ", |
Vladimir Marko | 93205e3 | 2016-04-13 11:59:46 +0100 | [diff] [blame] | 92 | "CallingConv ", |
Vladimir Marko | 4e335d0 | 2016-12-19 16:04:33 +0000 | [diff] [blame] | 93 | "CHA ", |
Alexandre Rames | 22aa54b | 2016-10-18 09:32:29 +0100 | [diff] [blame] | 94 | "Scheduler ", |
Calin Juravle | cc3171a | 2017-05-19 16:47:53 -0700 | [diff] [blame] | 95 | "Profile ", |
buzbee | 862a760 | 2013-04-05 10:58:54 -0700 | [diff] [blame] | 96 | }; |
| 97 | |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 98 | template <bool kCount> |
| 99 | ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl() |
Vladimir Marko | 4e335d0 | 2016-12-19 16:04:33 +0000 | [diff] [blame] | 100 | : num_allocations_(0u), |
| 101 | alloc_stats_(kNumArenaAllocKinds, 0u) { |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 102 | } |
| 103 | |
| 104 | template <bool kCount> |
| 105 | void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) { |
| 106 | num_allocations_ = other.num_allocations_; |
Vladimir Marko | 4e335d0 | 2016-12-19 16:04:33 +0000 | [diff] [blame] | 107 | std::copy_n(other.alloc_stats_.begin(), kNumArenaAllocKinds, alloc_stats_.begin()); |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 108 | } |
| 109 | |
| 110 | template <bool kCount> |
| 111 | void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) { |
| 112 | alloc_stats_[kind] += bytes; |
| 113 | ++num_allocations_; |
| 114 | } |
| 115 | |
| 116 | template <bool kCount> |
| 117 | size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const { |
| 118 | return num_allocations_; |
| 119 | } |
| 120 | |
| 121 | template <bool kCount> |
| 122 | size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const { |
| 123 | const size_t init = 0u; // Initial value of the correct type. |
Vladimir Marko | 4e335d0 | 2016-12-19 16:04:33 +0000 | [diff] [blame] | 124 | return std::accumulate(alloc_stats_.begin(), alloc_stats_.end(), init); |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 125 | } |
| 126 | |
| 127 | template <bool kCount> |
| 128 | void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first, |
| 129 | ssize_t lost_bytes_adjustment) const { |
| 130 | size_t malloc_bytes = 0u; |
| 131 | size_t lost_bytes = 0u; |
| 132 | size_t num_arenas = 0u; |
| 133 | for (const Arena* arena = first; arena != nullptr; arena = arena->next_) { |
| 134 | malloc_bytes += arena->Size(); |
| 135 | lost_bytes += arena->RemainingSpace(); |
| 136 | ++num_arenas; |
| 137 | } |
| 138 | // The lost_bytes_adjustment is used to make up for the fact that the current arena |
| 139 | // may not have the bytes_allocated_ updated correctly. |
| 140 | lost_bytes += lost_bytes_adjustment; |
| 141 | const size_t bytes_allocated = BytesAllocated(); |
| 142 | os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes |
| 143 | << ", lost: " << lost_bytes << "\n"; |
Vladimir Marko | bd9e9db | 2014-03-07 19:41:05 +0000 | [diff] [blame] | 144 | size_t num_allocations = NumAllocations(); |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 145 | if (num_allocations != 0) { |
| 146 | os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: " |
| 147 | << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n"; |
| 148 | } |
| 149 | os << "===== Allocation by kind\n"; |
Andreas Gampe | 785d2f2 | 2014-11-03 22:57:30 -0800 | [diff] [blame] | 150 | static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames"); |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 151 | for (int i = 0; i < kNumArenaAllocKinds; i++) { |
Vladimir Marko | bd9e9db | 2014-03-07 19:41:05 +0000 | [diff] [blame] | 152 | os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n"; |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 153 | } |
| 154 | } |
| 155 | |
Stephen Hines | 47b2247 | 2017-01-26 00:50:31 -0800 | [diff] [blame] | 156 | #pragma GCC diagnostic push |
Jayant Chowdhary | 5f9da55 | 2017-02-02 12:48:10 -0800 | [diff] [blame] | 157 | #if __clang_major__ >= 4 |
Stephen Hines | 47b2247 | 2017-01-26 00:50:31 -0800 | [diff] [blame] | 158 | #pragma GCC diagnostic ignored "-Winstantiation-after-specialization" |
Jayant Chowdhary | 5f9da55 | 2017-02-02 12:48:10 -0800 | [diff] [blame] | 159 | #endif |
Vladimir Marko | d38ba0a | 2017-04-13 13:43:43 +0100 | [diff] [blame] | 160 | // We're going to use ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> which needs |
| 161 | // to be explicitly instantiated if kArenaAllocatorCountAllocations is true. Explicit |
| 162 | // instantiation of the specialization ArenaAllocatorStatsImpl<false> does not do anything |
| 163 | // but requires the warning "-Winstantiation-after-specialization" to be turned off. |
| 164 | // |
| 165 | // To avoid bit-rot of the ArenaAllocatorStatsImpl<true>, instantiate it also in debug builds |
| 166 | // (but keep the unnecessary code out of release builds) as we do not usually compile with |
| 167 | // kArenaAllocatorCountAllocations set to true. |
| 168 | template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations || kIsDebugBuild>; |
Stephen Hines | 47b2247 | 2017-01-26 00:50:31 -0800 | [diff] [blame] | 169 | #pragma GCC diagnostic pop |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 170 | |
Vladimir Marko | 7bda3b6 | 2015-10-07 12:44:31 +0000 | [diff] [blame] | 171 | void ArenaAllocatorMemoryTool::DoMakeDefined(void* ptr, size_t size) { |
| 172 | MEMORY_TOOL_MAKE_DEFINED(ptr, size); |
| 173 | } |
| 174 | |
| 175 | void ArenaAllocatorMemoryTool::DoMakeUndefined(void* ptr, size_t size) { |
| 176 | MEMORY_TOOL_MAKE_UNDEFINED(ptr, size); |
| 177 | } |
| 178 | |
| 179 | void ArenaAllocatorMemoryTool::DoMakeInaccessible(void* ptr, size_t size) { |
| 180 | MEMORY_TOOL_MAKE_NOACCESS(ptr, size); |
| 181 | } |
| 182 | |
Andreas Gampe | d9911ee | 2017-03-27 13:27:24 -0700 | [diff] [blame] | 183 | Arena::Arena() : bytes_allocated_(0), memory_(nullptr), size_(0), next_(nullptr) { |
Ian Rogers | e7a5b7d | 2013-04-18 20:09:02 -0700 | [diff] [blame] | 184 | } |
| 185 | |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 186 | class MallocArena FINAL : public Arena { |
| 187 | public: |
Andreas Gampe | 121f148 | 2017-05-12 10:28:35 -0700 | [diff] [blame] | 188 | explicit MallocArena(size_t size = arena_allocator::kArenaDefaultSize); |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 189 | virtual ~MallocArena(); |
| 190 | private: |
| 191 | static constexpr size_t RequiredOverallocation() { |
| 192 | return (alignof(std::max_align_t) < ArenaAllocator::kArenaAlignment) |
| 193 | ? ArenaAllocator::kArenaAlignment - alignof(std::max_align_t) |
| 194 | : 0u; |
| 195 | } |
| 196 | |
| 197 | uint8_t* unaligned_memory_; |
| 198 | }; |
| 199 | |
Mathieu Chartier | c6201fa | 2015-03-12 10:06:33 -0700 | [diff] [blame] | 200 | MallocArena::MallocArena(size_t size) { |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 201 | // We need to guarantee kArenaAlignment aligned allocation for the new arena. |
| 202 | // TODO: Use std::aligned_alloc() when it becomes available with C++17. |
| 203 | constexpr size_t overallocation = RequiredOverallocation(); |
| 204 | unaligned_memory_ = reinterpret_cast<uint8_t*>(calloc(1, size + overallocation)); |
| 205 | CHECK(unaligned_memory_ != nullptr); // Abort on OOM. |
| 206 | DCHECK_ALIGNED(unaligned_memory_, alignof(std::max_align_t)); |
| 207 | if (overallocation == 0u) { |
| 208 | memory_ = unaligned_memory_; |
| 209 | } else { |
| 210 | memory_ = AlignUp(unaligned_memory_, ArenaAllocator::kArenaAlignment); |
| 211 | if (UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) { |
| 212 | size_t head = memory_ - unaligned_memory_; |
| 213 | size_t tail = overallocation - head; |
| 214 | MEMORY_TOOL_MAKE_NOACCESS(unaligned_memory_, head); |
| 215 | MEMORY_TOOL_MAKE_NOACCESS(memory_ + size, tail); |
| 216 | } |
| 217 | } |
| 218 | DCHECK_ALIGNED(memory_, ArenaAllocator::kArenaAlignment); |
Mathieu Chartier | c6201fa | 2015-03-12 10:06:33 -0700 | [diff] [blame] | 219 | size_ = size; |
buzbee | 862a760 | 2013-04-05 10:58:54 -0700 | [diff] [blame] | 220 | } |
| 221 | |
Mathieu Chartier | c6201fa | 2015-03-12 10:06:33 -0700 | [diff] [blame] | 222 | MallocArena::~MallocArena() { |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 223 | constexpr size_t overallocation = RequiredOverallocation(); |
| 224 | if (overallocation != 0u && UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) { |
| 225 | size_t head = memory_ - unaligned_memory_; |
| 226 | size_t tail = overallocation - head; |
| 227 | MEMORY_TOOL_MAKE_UNDEFINED(unaligned_memory_, head); |
| 228 | MEMORY_TOOL_MAKE_UNDEFINED(memory_ + size_, tail); |
| 229 | } |
| 230 | free(reinterpret_cast<void*>(unaligned_memory_)); |
Mathieu Chartier | c6201fa | 2015-03-12 10:06:33 -0700 | [diff] [blame] | 231 | } |
| 232 | |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 233 | class MemMapArena FINAL : public Arena { |
| 234 | public: |
| 235 | MemMapArena(size_t size, bool low_4gb, const char* name); |
| 236 | virtual ~MemMapArena(); |
| 237 | void Release() OVERRIDE; |
| 238 | |
| 239 | private: |
| 240 | std::unique_ptr<MemMap> map_; |
| 241 | }; |
| 242 | |
Nicolas Geoffray | 25e0456 | 2016-03-01 13:17:58 +0000 | [diff] [blame] | 243 | MemMapArena::MemMapArena(size_t size, bool low_4gb, const char* name) { |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 244 | // Round up to a full page as that's the smallest unit of allocation for mmap() |
| 245 | // and we want to be able to use all memory that we actually allocate. |
| 246 | size = RoundUp(size, kPageSize); |
Mathieu Chartier | c6201fa | 2015-03-12 10:06:33 -0700 | [diff] [blame] | 247 | std::string error_msg; |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 248 | map_.reset(MemMap::MapAnonymous( |
Nicolas Geoffray | 25e0456 | 2016-03-01 13:17:58 +0000 | [diff] [blame] | 249 | name, nullptr, size, PROT_READ | PROT_WRITE, low_4gb, false, &error_msg)); |
Mathieu Chartier | c6201fa | 2015-03-12 10:06:33 -0700 | [diff] [blame] | 250 | CHECK(map_.get() != nullptr) << error_msg; |
| 251 | memory_ = map_->Begin(); |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 252 | static_assert(ArenaAllocator::kArenaAlignment <= kPageSize, |
| 253 | "Arena should not need stronger alignment than kPageSize."); |
| 254 | DCHECK_ALIGNED(memory_, ArenaAllocator::kArenaAlignment); |
Mathieu Chartier | c6201fa | 2015-03-12 10:06:33 -0700 | [diff] [blame] | 255 | size_ = map_->Size(); |
| 256 | } |
| 257 | |
Vladimir Marko | 3481ba2 | 2015-04-13 12:22:36 +0100 | [diff] [blame] | 258 | MemMapArena::~MemMapArena() { |
| 259 | // Destroys MemMap via std::unique_ptr<>. |
| 260 | } |
| 261 | |
Mathieu Chartier | c6201fa | 2015-03-12 10:06:33 -0700 | [diff] [blame] | 262 | void MemMapArena::Release() { |
| 263 | if (bytes_allocated_ > 0) { |
Mathieu Chartier | 9b34b24 | 2015-03-09 11:30:17 -0700 | [diff] [blame] | 264 | map_->MadviseDontNeedAndZero(); |
| 265 | bytes_allocated_ = 0; |
| 266 | } |
| 267 | } |
| 268 | |
Mathieu Chartier | f6c4b3b | 2013-08-24 16:11:37 -0700 | [diff] [blame] | 269 | void Arena::Reset() { |
Mathieu Chartier | 9b34b24 | 2015-03-09 11:30:17 -0700 | [diff] [blame] | 270 | if (bytes_allocated_ > 0) { |
Mathieu Chartier | c6201fa | 2015-03-12 10:06:33 -0700 | [diff] [blame] | 271 | memset(Begin(), 0, bytes_allocated_); |
Mathieu Chartier | f6c4b3b | 2013-08-24 16:11:37 -0700 | [diff] [blame] | 272 | bytes_allocated_ = 0; |
buzbee | 862a760 | 2013-04-05 10:58:54 -0700 | [diff] [blame] | 273 | } |
buzbee | 862a760 | 2013-04-05 10:58:54 -0700 | [diff] [blame] | 274 | } |
| 275 | |
Nicolas Geoffray | 25e0456 | 2016-03-01 13:17:58 +0000 | [diff] [blame] | 276 | ArenaPool::ArenaPool(bool use_malloc, bool low_4gb, const char* name) |
| 277 | : use_malloc_(use_malloc), |
| 278 | lock_("Arena pool lock", kArenaPoolLock), |
| 279 | free_arenas_(nullptr), |
| 280 | low_4gb_(low_4gb), |
| 281 | name_(name) { |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 282 | if (low_4gb) { |
| 283 | CHECK(!use_malloc) << "low4gb must use map implementation"; |
| 284 | } |
Mathieu Chartier | c6201fa | 2015-03-12 10:06:33 -0700 | [diff] [blame] | 285 | if (!use_malloc) { |
Mathieu Chartier | 9b34b24 | 2015-03-09 11:30:17 -0700 | [diff] [blame] | 286 | MemMap::Init(); |
| 287 | } |
Mathieu Chartier | f6c4b3b | 2013-08-24 16:11:37 -0700 | [diff] [blame] | 288 | } |
| 289 | |
| 290 | ArenaPool::~ArenaPool() { |
Jean-Philippe Halimi | ca76a1a | 2016-02-02 19:48:52 +0100 | [diff] [blame] | 291 | ReclaimMemory(); |
| 292 | } |
| 293 | |
| 294 | void ArenaPool::ReclaimMemory() { |
Mathieu Chartier | f6c4b3b | 2013-08-24 16:11:37 -0700 | [diff] [blame] | 295 | while (free_arenas_ != nullptr) { |
| 296 | auto* arena = free_arenas_; |
| 297 | free_arenas_ = free_arenas_->next_; |
| 298 | delete arena; |
| 299 | } |
| 300 | } |
| 301 | |
Jean-Philippe Halimi | ca76a1a | 2016-02-02 19:48:52 +0100 | [diff] [blame] | 302 | void ArenaPool::LockReclaimMemory() { |
| 303 | MutexLock lock(Thread::Current(), lock_); |
| 304 | ReclaimMemory(); |
| 305 | } |
| 306 | |
Mathieu Chartier | f6c4b3b | 2013-08-24 16:11:37 -0700 | [diff] [blame] | 307 | Arena* ArenaPool::AllocArena(size_t size) { |
| 308 | Thread* self = Thread::Current(); |
| 309 | Arena* ret = nullptr; |
| 310 | { |
| 311 | MutexLock lock(self, lock_); |
| 312 | if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) { |
| 313 | ret = free_arenas_; |
| 314 | free_arenas_ = free_arenas_->next_; |
| 315 | } |
| 316 | } |
| 317 | if (ret == nullptr) { |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 318 | ret = use_malloc_ ? static_cast<Arena*>(new MallocArena(size)) : |
Nicolas Geoffray | 25e0456 | 2016-03-01 13:17:58 +0000 | [diff] [blame] | 319 | new MemMapArena(size, low_4gb_, name_); |
Mathieu Chartier | f6c4b3b | 2013-08-24 16:11:37 -0700 | [diff] [blame] | 320 | } |
| 321 | ret->Reset(); |
| 322 | return ret; |
| 323 | } |
| 324 | |
Mathieu Chartier | 9b34b24 | 2015-03-09 11:30:17 -0700 | [diff] [blame] | 325 | void ArenaPool::TrimMaps() { |
Mathieu Chartier | c6201fa | 2015-03-12 10:06:33 -0700 | [diff] [blame] | 326 | if (!use_malloc_) { |
Mathieu Chartier | 32ce2ad | 2016-03-04 14:58:03 -0800 | [diff] [blame] | 327 | ScopedTrace trace(__PRETTY_FUNCTION__); |
Mathieu Chartier | c6201fa | 2015-03-12 10:06:33 -0700 | [diff] [blame] | 328 | // Doesn't work for malloc. |
| 329 | MutexLock lock(Thread::Current(), lock_); |
| 330 | for (auto* arena = free_arenas_; arena != nullptr; arena = arena->next_) { |
| 331 | arena->Release(); |
| 332 | } |
Mathieu Chartier | 9b34b24 | 2015-03-09 11:30:17 -0700 | [diff] [blame] | 333 | } |
| 334 | } |
| 335 | |
Mathieu Chartier | 49285c5 | 2014-12-02 15:43:48 -0800 | [diff] [blame] | 336 | size_t ArenaPool::GetBytesAllocated() const { |
| 337 | size_t total = 0; |
| 338 | MutexLock lock(Thread::Current(), lock_); |
| 339 | for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) { |
| 340 | total += arena->GetBytesAllocated(); |
| 341 | } |
| 342 | return total; |
| 343 | } |
| 344 | |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 345 | void ArenaPool::FreeArenaChain(Arena* first) { |
Evgenii Stepanov | 1e13374 | 2015-05-20 12:30:59 -0700 | [diff] [blame] | 346 | if (UNLIKELY(RUNNING_ON_MEMORY_TOOL > 0)) { |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 347 | for (Arena* arena = first; arena != nullptr; arena = arena->next_) { |
Evgenii Stepanov | 1e13374 | 2015-05-20 12:30:59 -0700 | [diff] [blame] | 348 | MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_); |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 349 | } |
Mathieu Chartier | 75165d0 | 2013-09-12 14:00:31 -0700 | [diff] [blame] | 350 | } |
Andreas Gampe | 121f148 | 2017-05-12 10:28:35 -0700 | [diff] [blame] | 351 | |
| 352 | if (arena_allocator::kArenaAllocatorPreciseTracking) { |
| 353 | // Do not reuse arenas when tracking. |
| 354 | while (first != nullptr) { |
| 355 | Arena* next = first->next_; |
| 356 | delete first; |
| 357 | first = next; |
| 358 | } |
| 359 | return; |
| 360 | } |
| 361 | |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 362 | if (first != nullptr) { |
| 363 | Arena* last = first; |
| 364 | while (last->next_ != nullptr) { |
| 365 | last = last->next_; |
| 366 | } |
| 367 | Thread* self = Thread::Current(); |
Mathieu Chartier | f6c4b3b | 2013-08-24 16:11:37 -0700 | [diff] [blame] | 368 | MutexLock lock(self, lock_); |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 369 | last->next_ = free_arenas_; |
| 370 | free_arenas_ = first; |
Mathieu Chartier | f6c4b3b | 2013-08-24 16:11:37 -0700 | [diff] [blame] | 371 | } |
| 372 | } |
| 373 | |
| 374 | size_t ArenaAllocator::BytesAllocated() const { |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 375 | return ArenaAllocatorStats::BytesAllocated(); |
Mathieu Chartier | f6c4b3b | 2013-08-24 16:11:37 -0700 | [diff] [blame] | 376 | } |
| 377 | |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 378 | size_t ArenaAllocator::BytesUsed() const { |
| 379 | size_t total = ptr_ - begin_; |
| 380 | if (arena_head_ != nullptr) { |
| 381 | for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr; |
| 382 | cur_arena = cur_arena->next_) { |
| 383 | total += cur_arena->GetBytesAllocated(); |
| 384 | } |
| 385 | } |
| 386 | return total; |
| 387 | } |
| 388 | |
Mathieu Chartier | f6c4b3b | 2013-08-24 16:11:37 -0700 | [diff] [blame] | 389 | ArenaAllocator::ArenaAllocator(ArenaPool* pool) |
| 390 | : pool_(pool), |
| 391 | begin_(nullptr), |
| 392 | end_(nullptr), |
| 393 | ptr_(nullptr), |
Vladimir Marko | 2a408a3 | 2015-09-18 14:11:00 +0100 | [diff] [blame] | 394 | arena_head_(nullptr) { |
Mathieu Chartier | f6c4b3b | 2013-08-24 16:11:37 -0700 | [diff] [blame] | 395 | } |
| 396 | |
| 397 | void ArenaAllocator::UpdateBytesAllocated() { |
| 398 | if (arena_head_ != nullptr) { |
| 399 | // Update how many bytes we have allocated into the arena so that the arena pool knows how |
| 400 | // much memory to zero out. |
| 401 | arena_head_->bytes_allocated_ = ptr_ - begin_; |
| 402 | } |
| 403 | } |
| 404 | |
Vladimir Marko | 2a408a3 | 2015-09-18 14:11:00 +0100 | [diff] [blame] | 405 | void* ArenaAllocator::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) { |
Vladimir Marko | 7500193 | 2015-11-10 20:54:22 +0000 | [diff] [blame] | 406 | // We mark all memory for a newly retrieved arena as inaccessible and then |
| 407 | // mark only the actually allocated memory as defined. That leaves red zones |
| 408 | // and padding between allocations marked as inaccessible. |
Evgenii Stepanov | 1e13374 | 2015-05-20 12:30:59 -0700 | [diff] [blame] | 409 | size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8); |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 410 | ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind); |
Vladimir Marko | 3f84f2c | 2016-04-25 19:40:34 +0100 | [diff] [blame] | 411 | uint8_t* ret; |
Vladimir Marko | 3e0e717 | 2016-04-22 18:07:13 +0100 | [diff] [blame] | 412 | if (UNLIKELY(rounded_bytes > static_cast<size_t>(end_ - ptr_))) { |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 413 | ret = AllocFromNewArenaWithMemoryTool(rounded_bytes); |
Vladimir Marko | 3f84f2c | 2016-04-25 19:40:34 +0100 | [diff] [blame] | 414 | } else { |
| 415 | ret = ptr_; |
| 416 | ptr_ += rounded_bytes; |
Vladimir Marko | 3e0e717 | 2016-04-22 18:07:13 +0100 | [diff] [blame] | 417 | } |
Vladimir Marko | 2a408a3 | 2015-09-18 14:11:00 +0100 | [diff] [blame] | 418 | MEMORY_TOOL_MAKE_DEFINED(ret, bytes); |
Vladimir Marko | 7500193 | 2015-11-10 20:54:22 +0000 | [diff] [blame] | 419 | // Check that the memory is already zeroed out. |
| 420 | DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; })); |
Mathieu Chartier | 75165d0 | 2013-09-12 14:00:31 -0700 | [diff] [blame] | 421 | return ret; |
| 422 | } |
| 423 | |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 424 | void* ArenaAllocator::AllocWithMemoryToolAlign16(size_t bytes, ArenaAllocKind kind) { |
| 425 | // We mark all memory for a newly retrieved arena as inaccessible and then |
| 426 | // mark only the actually allocated memory as defined. That leaves red zones |
| 427 | // and padding between allocations marked as inaccessible. |
| 428 | size_t rounded_bytes = bytes + kMemoryToolRedZoneBytes; |
| 429 | DCHECK_ALIGNED(rounded_bytes, 8); // `bytes` is 16-byte aligned, red zone is 8-byte aligned. |
| 430 | uintptr_t padding = |
| 431 | ((reinterpret_cast<uintptr_t>(ptr_) + 15u) & 15u) - reinterpret_cast<uintptr_t>(ptr_); |
| 432 | ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind); |
| 433 | uint8_t* ret; |
| 434 | if (UNLIKELY(padding + rounded_bytes > static_cast<size_t>(end_ - ptr_))) { |
| 435 | static_assert(kArenaAlignment >= 16, "Expecting sufficient alignment for new Arena."); |
| 436 | ret = AllocFromNewArenaWithMemoryTool(rounded_bytes); |
| 437 | } else { |
| 438 | ptr_ += padding; // Leave padding inaccessible. |
| 439 | ret = ptr_; |
| 440 | ptr_ += rounded_bytes; |
| 441 | } |
| 442 | MEMORY_TOOL_MAKE_DEFINED(ret, bytes); |
| 443 | // Check that the memory is already zeroed out. |
| 444 | DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; })); |
| 445 | return ret; |
| 446 | } |
| 447 | |
Mathieu Chartier | f6c4b3b | 2013-08-24 16:11:37 -0700 | [diff] [blame] | 448 | ArenaAllocator::~ArenaAllocator() { |
| 449 | // Reclaim all the arenas by giving them back to the thread pool. |
| 450 | UpdateBytesAllocated(); |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 451 | pool_->FreeArenaChain(arena_head_); |
Mathieu Chartier | f6c4b3b | 2013-08-24 16:11:37 -0700 | [diff] [blame] | 452 | } |
| 453 | |
Vladimir Marko | 3f84f2c | 2016-04-25 19:40:34 +0100 | [diff] [blame] | 454 | uint8_t* ArenaAllocator::AllocFromNewArena(size_t bytes) { |
Andreas Gampe | 121f148 | 2017-05-12 10:28:35 -0700 | [diff] [blame] | 455 | Arena* new_arena = pool_->AllocArena(std::max(arena_allocator::kArenaDefaultSize, bytes)); |
Vladimir Marko | 3e0e717 | 2016-04-22 18:07:13 +0100 | [diff] [blame] | 456 | DCHECK(new_arena != nullptr); |
| 457 | DCHECK_LE(bytes, new_arena->Size()); |
| 458 | if (static_cast<size_t>(end_ - ptr_) > new_arena->Size() - bytes) { |
| 459 | // The old arena has more space remaining than the new one, so keep using it. |
| 460 | // This can happen when the requested size is over half of the default size. |
| 461 | DCHECK(arena_head_ != nullptr); |
| 462 | new_arena->bytes_allocated_ = bytes; // UpdateBytesAllocated() on the new_arena. |
| 463 | new_arena->next_ = arena_head_->next_; |
| 464 | arena_head_->next_ = new_arena; |
| 465 | } else { |
| 466 | UpdateBytesAllocated(); |
| 467 | new_arena->next_ = arena_head_; |
| 468 | arena_head_ = new_arena; |
| 469 | // Update our internal data structures. |
| 470 | begin_ = new_arena->Begin(); |
Andreas Gampe | f6dd829 | 2016-08-19 20:22:19 -0700 | [diff] [blame] | 471 | DCHECK_ALIGNED(begin_, kAlignment); |
Vladimir Marko | 3e0e717 | 2016-04-22 18:07:13 +0100 | [diff] [blame] | 472 | ptr_ = begin_ + bytes; |
| 473 | end_ = new_arena->End(); |
| 474 | } |
| 475 | return new_arena->Begin(); |
Mathieu Chartier | f6c4b3b | 2013-08-24 16:11:37 -0700 | [diff] [blame] | 476 | } |
| 477 | |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 478 | uint8_t* ArenaAllocator::AllocFromNewArenaWithMemoryTool(size_t bytes) { |
| 479 | uint8_t* ret = AllocFromNewArena(bytes); |
| 480 | uint8_t* noaccess_begin = ret + bytes; |
| 481 | uint8_t* noaccess_end; |
| 482 | if (ret == arena_head_->Begin()) { |
| 483 | DCHECK(ptr_ - bytes == ret); |
| 484 | noaccess_end = end_; |
| 485 | } else { |
| 486 | // We're still using the old arena but `ret` comes from a new one just after it. |
| 487 | DCHECK(arena_head_->next_ != nullptr); |
| 488 | DCHECK(ret == arena_head_->next_->Begin()); |
| 489 | DCHECK_EQ(bytes, arena_head_->next_->GetBytesAllocated()); |
| 490 | noaccess_end = arena_head_->next_->End(); |
| 491 | } |
| 492 | MEMORY_TOOL_MAKE_NOACCESS(noaccess_begin, noaccess_end - noaccess_begin); |
| 493 | return ret; |
| 494 | } |
| 495 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 496 | bool ArenaAllocator::Contains(const void* ptr) const { |
| 497 | if (ptr >= begin_ && ptr < end_) { |
| 498 | return true; |
| 499 | } |
| 500 | for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) { |
| 501 | if (cur_arena->Contains(ptr)) { |
| 502 | return true; |
| 503 | } |
| 504 | } |
| 505 | return false; |
| 506 | } |
| 507 | |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 508 | MemStats::MemStats(const char* name, |
| 509 | const ArenaAllocatorStats* stats, |
| 510 | const Arena* first_arena, |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 511 | ssize_t lost_bytes_adjustment) |
| 512 | : name_(name), |
| 513 | stats_(stats), |
| 514 | first_arena_(first_arena), |
| 515 | lost_bytes_adjustment_(lost_bytes_adjustment) { |
| 516 | } |
| 517 | |
| 518 | void MemStats::Dump(std::ostream& os) const { |
| 519 | os << name_ << " stats:\n"; |
| 520 | stats_->Dump(os, first_arena_, lost_bytes_adjustment_); |
| 521 | } |
| 522 | |
Mathieu Chartier | f6c4b3b | 2013-08-24 16:11:37 -0700 | [diff] [blame] | 523 | // Dump memory usage stats. |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 524 | MemStats ArenaAllocator::GetMemStats() const { |
| 525 | ssize_t lost_bytes_adjustment = |
| 526 | (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace(); |
| 527 | return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment); |
buzbee | 862a760 | 2013-04-05 10:58:54 -0700 | [diff] [blame] | 528 | } |
| 529 | |
| 530 | } // namespace art |