buzbee | 862a760 | 2013-04-05 10:58:54 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2013 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
Andreas Gampe | d490129 | 2017-05-30 18:41:34 -0700 | [diff] [blame] | 17 | #include "arena_allocator-inl.h" |
| 18 | |
| 19 | #include <sys/mman.h> |
| 20 | |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 21 | #include <algorithm> |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 22 | #include <cstddef> |
Ian Rogers | 6f3dbba | 2014-10-14 17:41:57 -0700 | [diff] [blame] | 23 | #include <iomanip> |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 24 | #include <numeric> |
| 25 | |
Andreas Gampe | 5794381 | 2017-12-06 21:39:13 -0800 | [diff] [blame] | 26 | #include <android-base/logging.h> |
| 27 | |
buzbee | 862a760 | 2013-04-05 10:58:54 -0700 | [diff] [blame] | 28 | namespace art { |
| 29 | |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 30 | constexpr size_t kMemoryToolRedZoneBytes = 8; |
Mathieu Chartier | f6c4b3b | 2013-08-24 16:11:37 -0700 | [diff] [blame] | 31 | |
Vladimir Marko | bd9e9db | 2014-03-07 19:41:05 +0000 | [diff] [blame] | 32 | template <bool kCount> |
Vladimir Marko | 8dea81c | 2014-06-06 14:50:36 +0100 | [diff] [blame] | 33 | const char* const ArenaAllocatorStatsImpl<kCount>::kAllocNames[] = { |
Igor Murashkin | d01745e | 2017-04-05 16:40:31 -0700 | [diff] [blame] | 34 | // Every name should have the same width and end with a space. Abbreviate if necessary: |
Vladimir Marko | f9f6441 | 2015-09-02 14:05:49 +0100 | [diff] [blame] | 35 | "Misc ", |
Vladimir Marko | f9f6441 | 2015-09-02 14:05:49 +0100 | [diff] [blame] | 36 | "SwitchTbl ", |
Vladimir Marko | f9f6441 | 2015-09-02 14:05:49 +0100 | [diff] [blame] | 37 | "SlowPaths ", |
Vladimir Marko | f9f6441 | 2015-09-02 14:05:49 +0100 | [diff] [blame] | 38 | "GrowBitMap ", |
Vladimir Marko | f9f6441 | 2015-09-02 14:05:49 +0100 | [diff] [blame] | 39 | "STL ", |
Vladimir Marko | 2aaa4b5 | 2015-09-17 17:03:26 +0100 | [diff] [blame] | 40 | "GraphBuilder ", |
Vladimir Marko | f9f6441 | 2015-09-02 14:05:49 +0100 | [diff] [blame] | 41 | "Graph ", |
| 42 | "BasicBlock ", |
Vladimir Marko | fa6b93c | 2015-09-15 10:15:55 +0100 | [diff] [blame] | 43 | "BlockList ", |
| 44 | "RevPostOrder ", |
| 45 | "LinearOrder ", |
| 46 | "ConstantsMap ", |
Vladimir Marko | 6058455 | 2015-09-03 13:35:12 +0000 | [diff] [blame] | 47 | "Predecessors ", |
| 48 | "Successors ", |
| 49 | "Dominated ", |
Vladimir Marko | f9f6441 | 2015-09-02 14:05:49 +0100 | [diff] [blame] | 50 | "Instruction ", |
Igor Murashkin | d01745e | 2017-04-05 16:40:31 -0700 | [diff] [blame] | 51 | "CtorFenceIns ", |
Vladimir Marko | fa6b93c | 2015-09-15 10:15:55 +0100 | [diff] [blame] | 52 | "InvokeInputs ", |
| 53 | "PhiInputs ", |
Vladimir Marko | 175e786 | 2018-03-27 09:03:13 +0000 | [diff] [blame] | 54 | "TypeCheckIns ", |
Vladimir Marko | f9f6441 | 2015-09-02 14:05:49 +0100 | [diff] [blame] | 55 | "LoopInfo ", |
Vladimir Marko | fa6b93c | 2015-09-15 10:15:55 +0100 | [diff] [blame] | 56 | "LIBackEdges ", |
Vladimir Marko | f9f6441 | 2015-09-02 14:05:49 +0100 | [diff] [blame] | 57 | "TryCatchInf ", |
| 58 | "UseListNode ", |
| 59 | "Environment ", |
Vladimir Marko | fa6b93c | 2015-09-15 10:15:55 +0100 | [diff] [blame] | 60 | "EnvVRegs ", |
| 61 | "EnvLocations ", |
Vladimir Marko | 2aaa4b5 | 2015-09-17 17:03:26 +0100 | [diff] [blame] | 62 | "LocSummary ", |
Vladimir Marko | 71bf809 | 2015-09-15 15:33:14 +0100 | [diff] [blame] | 63 | "SsaBuilder ", |
Vladimir Marko | f9f6441 | 2015-09-02 14:05:49 +0100 | [diff] [blame] | 64 | "MoveOperands ", |
| 65 | "CodeBuffer ", |
| 66 | "StackMaps ", |
Vladimir Marko | f9f6441 | 2015-09-02 14:05:49 +0100 | [diff] [blame] | 67 | "Optimization ", |
Vladimir Marko | 2aaa4b5 | 2015-09-17 17:03:26 +0100 | [diff] [blame] | 68 | "GVN ", |
Vladimir Marko | 5233f93 | 2015-09-29 19:01:15 +0100 | [diff] [blame] | 69 | "InductionVar ", |
| 70 | "BCE ", |
Vladimir Marko | f6a35de | 2016-03-21 12:01:50 +0000 | [diff] [blame] | 71 | "DCE ", |
Vladimir Marko | 009d166 | 2017-10-10 13:21:15 +0100 | [diff] [blame] | 72 | "LSA ", |
Vladimir Marko | f6a35de | 2016-03-21 12:01:50 +0000 | [diff] [blame] | 73 | "LSE ", |
Igor Murashkin | dd018df | 2017-08-09 10:38:31 -0700 | [diff] [blame] | 74 | "CFRE ", |
Vladimir Marko | f6a35de | 2016-03-21 12:01:50 +0000 | [diff] [blame] | 75 | "LICM ", |
Aart Bik | 9620230 | 2016-10-04 17:33:56 -0700 | [diff] [blame] | 76 | "LoopOpt ", |
Vladimir Marko | 2aaa4b5 | 2015-09-17 17:03:26 +0100 | [diff] [blame] | 77 | "SsaLiveness ", |
| 78 | "SsaPhiElim ", |
| 79 | "RefTypeProp ", |
Aart Bik | 6d05700 | 2018-04-09 15:39:58 -0700 | [diff] [blame] | 80 | "SelectGen ", |
Vladimir Marko | 2aaa4b5 | 2015-09-17 17:03:26 +0100 | [diff] [blame] | 81 | "SideEffects ", |
| 82 | "RegAllocator ", |
Vladimir Marko | f6a35de | 2016-03-21 12:01:50 +0000 | [diff] [blame] | 83 | "RegAllocVldt ", |
Vladimir Marko | 225b646 | 2015-09-28 12:17:40 +0100 | [diff] [blame] | 84 | "StackMapStm ", |
David Srbecky | dd966bc | 2018-05-24 13:55:52 +0100 | [diff] [blame] | 85 | "BitTableBld ", |
Vladimir Marko | d38ba0a | 2017-04-13 13:43:43 +0100 | [diff] [blame] | 86 | "VectorNode ", |
Vladimir Marko | 225b646 | 2015-09-28 12:17:40 +0100 | [diff] [blame] | 87 | "CodeGen ", |
Vladimir Marko | 93205e3 | 2016-04-13 11:59:46 +0100 | [diff] [blame] | 88 | "Assembler ", |
Vladimir Marko | 225b646 | 2015-09-28 12:17:40 +0100 | [diff] [blame] | 89 | "ParallelMove ", |
Vladimir Marko | 655e585 | 2015-10-12 10:38:28 +0100 | [diff] [blame] | 90 | "GraphChecker ", |
Mathieu Chartier | de40d47 | 2015-10-15 17:47:48 -0700 | [diff] [blame] | 91 | "Verifier ", |
Vladimir Marko | 93205e3 | 2016-04-13 11:59:46 +0100 | [diff] [blame] | 92 | "CallingConv ", |
Vladimir Marko | 4e335d0 | 2016-12-19 16:04:33 +0000 | [diff] [blame] | 93 | "CHA ", |
Alexandre Rames | 22aa54b | 2016-10-18 09:32:29 +0100 | [diff] [blame] | 94 | "Scheduler ", |
Calin Juravle | cc3171a | 2017-05-19 16:47:53 -0700 | [diff] [blame] | 95 | "Profile ", |
Artem Serov | 7f4aff6 | 2017-06-21 17:02:18 +0100 | [diff] [blame] | 96 | "SBCloner ", |
buzbee | 862a760 | 2013-04-05 10:58:54 -0700 | [diff] [blame] | 97 | }; |
| 98 | |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 99 | template <bool kCount> |
| 100 | ArenaAllocatorStatsImpl<kCount>::ArenaAllocatorStatsImpl() |
Vladimir Marko | 4e335d0 | 2016-12-19 16:04:33 +0000 | [diff] [blame] | 101 | : num_allocations_(0u), |
| 102 | alloc_stats_(kNumArenaAllocKinds, 0u) { |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 103 | } |
| 104 | |
| 105 | template <bool kCount> |
| 106 | void ArenaAllocatorStatsImpl<kCount>::Copy(const ArenaAllocatorStatsImpl& other) { |
| 107 | num_allocations_ = other.num_allocations_; |
Vladimir Marko | 4e335d0 | 2016-12-19 16:04:33 +0000 | [diff] [blame] | 108 | std::copy_n(other.alloc_stats_.begin(), kNumArenaAllocKinds, alloc_stats_.begin()); |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 109 | } |
| 110 | |
| 111 | template <bool kCount> |
| 112 | void ArenaAllocatorStatsImpl<kCount>::RecordAlloc(size_t bytes, ArenaAllocKind kind) { |
| 113 | alloc_stats_[kind] += bytes; |
| 114 | ++num_allocations_; |
| 115 | } |
| 116 | |
| 117 | template <bool kCount> |
| 118 | size_t ArenaAllocatorStatsImpl<kCount>::NumAllocations() const { |
| 119 | return num_allocations_; |
| 120 | } |
| 121 | |
| 122 | template <bool kCount> |
| 123 | size_t ArenaAllocatorStatsImpl<kCount>::BytesAllocated() const { |
| 124 | const size_t init = 0u; // Initial value of the correct type. |
Vladimir Marko | 4e335d0 | 2016-12-19 16:04:33 +0000 | [diff] [blame] | 125 | return std::accumulate(alloc_stats_.begin(), alloc_stats_.end(), init); |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 126 | } |
| 127 | |
| 128 | template <bool kCount> |
| 129 | void ArenaAllocatorStatsImpl<kCount>::Dump(std::ostream& os, const Arena* first, |
| 130 | ssize_t lost_bytes_adjustment) const { |
| 131 | size_t malloc_bytes = 0u; |
| 132 | size_t lost_bytes = 0u; |
| 133 | size_t num_arenas = 0u; |
| 134 | for (const Arena* arena = first; arena != nullptr; arena = arena->next_) { |
| 135 | malloc_bytes += arena->Size(); |
| 136 | lost_bytes += arena->RemainingSpace(); |
| 137 | ++num_arenas; |
| 138 | } |
| 139 | // The lost_bytes_adjustment is used to make up for the fact that the current arena |
| 140 | // may not have the bytes_allocated_ updated correctly. |
| 141 | lost_bytes += lost_bytes_adjustment; |
| 142 | const size_t bytes_allocated = BytesAllocated(); |
| 143 | os << " MEM: used: " << bytes_allocated << ", allocated: " << malloc_bytes |
| 144 | << ", lost: " << lost_bytes << "\n"; |
Vladimir Marko | bd9e9db | 2014-03-07 19:41:05 +0000 | [diff] [blame] | 145 | size_t num_allocations = NumAllocations(); |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 146 | if (num_allocations != 0) { |
| 147 | os << "Number of arenas allocated: " << num_arenas << ", Number of allocations: " |
| 148 | << num_allocations << ", avg size: " << bytes_allocated / num_allocations << "\n"; |
| 149 | } |
| 150 | os << "===== Allocation by kind\n"; |
Andreas Gampe | 785d2f2 | 2014-11-03 22:57:30 -0800 | [diff] [blame] | 151 | static_assert(arraysize(kAllocNames) == kNumArenaAllocKinds, "arraysize of kAllocNames"); |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 152 | for (int i = 0; i < kNumArenaAllocKinds; i++) { |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 153 | // Reduce output by listing only allocation kinds that actually have allocations. |
| 154 | if (alloc_stats_[i] != 0u) { |
Vladimir Marko | bd9e9db | 2014-03-07 19:41:05 +0000 | [diff] [blame] | 155 | os << kAllocNames[i] << std::setw(10) << alloc_stats_[i] << "\n"; |
Vladimir Marko | ca6fff8 | 2017-10-03 14:49:14 +0100 | [diff] [blame] | 156 | } |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 157 | } |
| 158 | } |
| 159 | |
Stephen Hines | 47b2247 | 2017-01-26 00:50:31 -0800 | [diff] [blame] | 160 | #pragma GCC diagnostic push |
Jayant Chowdhary | 5f9da55 | 2017-02-02 12:48:10 -0800 | [diff] [blame] | 161 | #if __clang_major__ >= 4 |
Stephen Hines | 47b2247 | 2017-01-26 00:50:31 -0800 | [diff] [blame] | 162 | #pragma GCC diagnostic ignored "-Winstantiation-after-specialization" |
Jayant Chowdhary | 5f9da55 | 2017-02-02 12:48:10 -0800 | [diff] [blame] | 163 | #endif |
Vladimir Marko | d38ba0a | 2017-04-13 13:43:43 +0100 | [diff] [blame] | 164 | // We're going to use ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations> which needs |
| 165 | // to be explicitly instantiated if kArenaAllocatorCountAllocations is true. Explicit |
| 166 | // instantiation of the specialization ArenaAllocatorStatsImpl<false> does not do anything |
| 167 | // but requires the warning "-Winstantiation-after-specialization" to be turned off. |
| 168 | // |
| 169 | // To avoid bit-rot of the ArenaAllocatorStatsImpl<true>, instantiate it also in debug builds |
| 170 | // (but keep the unnecessary code out of release builds) as we do not usually compile with |
| 171 | // kArenaAllocatorCountAllocations set to true. |
| 172 | template class ArenaAllocatorStatsImpl<kArenaAllocatorCountAllocations || kIsDebugBuild>; |
Stephen Hines | 47b2247 | 2017-01-26 00:50:31 -0800 | [diff] [blame] | 173 | #pragma GCC diagnostic pop |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 174 | |
Vladimir Marko | 7bda3b6 | 2015-10-07 12:44:31 +0000 | [diff] [blame] | 175 | void ArenaAllocatorMemoryTool::DoMakeDefined(void* ptr, size_t size) { |
| 176 | MEMORY_TOOL_MAKE_DEFINED(ptr, size); |
| 177 | } |
| 178 | |
| 179 | void ArenaAllocatorMemoryTool::DoMakeUndefined(void* ptr, size_t size) { |
| 180 | MEMORY_TOOL_MAKE_UNDEFINED(ptr, size); |
| 181 | } |
| 182 | |
| 183 | void ArenaAllocatorMemoryTool::DoMakeInaccessible(void* ptr, size_t size) { |
| 184 | MEMORY_TOOL_MAKE_NOACCESS(ptr, size); |
| 185 | } |
| 186 | |
Andreas Gampe | d9911ee | 2017-03-27 13:27:24 -0700 | [diff] [blame] | 187 | Arena::Arena() : bytes_allocated_(0), memory_(nullptr), size_(0), next_(nullptr) { |
Ian Rogers | e7a5b7d | 2013-04-18 20:09:02 -0700 | [diff] [blame] | 188 | } |
| 189 | |
Mathieu Chartier | f6c4b3b | 2013-08-24 16:11:37 -0700 | [diff] [blame] | 190 | size_t ArenaAllocator::BytesAllocated() const { |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 191 | return ArenaAllocatorStats::BytesAllocated(); |
Mathieu Chartier | f6c4b3b | 2013-08-24 16:11:37 -0700 | [diff] [blame] | 192 | } |
| 193 | |
Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 194 | size_t ArenaAllocator::BytesUsed() const { |
| 195 | size_t total = ptr_ - begin_; |
| 196 | if (arena_head_ != nullptr) { |
| 197 | for (Arena* cur_arena = arena_head_->next_; cur_arena != nullptr; |
| 198 | cur_arena = cur_arena->next_) { |
| 199 | total += cur_arena->GetBytesAllocated(); |
| 200 | } |
| 201 | } |
| 202 | return total; |
| 203 | } |
| 204 | |
Mathieu Chartier | f6c4b3b | 2013-08-24 16:11:37 -0700 | [diff] [blame] | 205 | ArenaAllocator::ArenaAllocator(ArenaPool* pool) |
| 206 | : pool_(pool), |
| 207 | begin_(nullptr), |
| 208 | end_(nullptr), |
| 209 | ptr_(nullptr), |
Vladimir Marko | 2a408a3 | 2015-09-18 14:11:00 +0100 | [diff] [blame] | 210 | arena_head_(nullptr) { |
Mathieu Chartier | f6c4b3b | 2013-08-24 16:11:37 -0700 | [diff] [blame] | 211 | } |
| 212 | |
| 213 | void ArenaAllocator::UpdateBytesAllocated() { |
| 214 | if (arena_head_ != nullptr) { |
| 215 | // Update how many bytes we have allocated into the arena so that the arena pool knows how |
| 216 | // much memory to zero out. |
| 217 | arena_head_->bytes_allocated_ = ptr_ - begin_; |
| 218 | } |
| 219 | } |
| 220 | |
Vladimir Marko | 2a408a3 | 2015-09-18 14:11:00 +0100 | [diff] [blame] | 221 | void* ArenaAllocator::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) { |
Vladimir Marko | 7500193 | 2015-11-10 20:54:22 +0000 | [diff] [blame] | 222 | // We mark all memory for a newly retrieved arena as inaccessible and then |
| 223 | // mark only the actually allocated memory as defined. That leaves red zones |
| 224 | // and padding between allocations marked as inaccessible. |
Evgenii Stepanov | 1e13374 | 2015-05-20 12:30:59 -0700 | [diff] [blame] | 225 | size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8); |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 226 | ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind); |
Vladimir Marko | 3f84f2c | 2016-04-25 19:40:34 +0100 | [diff] [blame] | 227 | uint8_t* ret; |
Vladimir Marko | 3e0e717 | 2016-04-22 18:07:13 +0100 | [diff] [blame] | 228 | if (UNLIKELY(rounded_bytes > static_cast<size_t>(end_ - ptr_))) { |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 229 | ret = AllocFromNewArenaWithMemoryTool(rounded_bytes); |
Vladimir Marko | 3f84f2c | 2016-04-25 19:40:34 +0100 | [diff] [blame] | 230 | } else { |
| 231 | ret = ptr_; |
| 232 | ptr_ += rounded_bytes; |
Vladimir Marko | 3e0e717 | 2016-04-22 18:07:13 +0100 | [diff] [blame] | 233 | } |
Vladimir Marko | 2a408a3 | 2015-09-18 14:11:00 +0100 | [diff] [blame] | 234 | MEMORY_TOOL_MAKE_DEFINED(ret, bytes); |
Vladimir Marko | 7500193 | 2015-11-10 20:54:22 +0000 | [diff] [blame] | 235 | // Check that the memory is already zeroed out. |
| 236 | DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; })); |
Mathieu Chartier | 75165d0 | 2013-09-12 14:00:31 -0700 | [diff] [blame] | 237 | return ret; |
| 238 | } |
| 239 | |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 240 | void* ArenaAllocator::AllocWithMemoryToolAlign16(size_t bytes, ArenaAllocKind kind) { |
| 241 | // We mark all memory for a newly retrieved arena as inaccessible and then |
| 242 | // mark only the actually allocated memory as defined. That leaves red zones |
| 243 | // and padding between allocations marked as inaccessible. |
| 244 | size_t rounded_bytes = bytes + kMemoryToolRedZoneBytes; |
| 245 | DCHECK_ALIGNED(rounded_bytes, 8); // `bytes` is 16-byte aligned, red zone is 8-byte aligned. |
| 246 | uintptr_t padding = |
| 247 | ((reinterpret_cast<uintptr_t>(ptr_) + 15u) & 15u) - reinterpret_cast<uintptr_t>(ptr_); |
| 248 | ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind); |
| 249 | uint8_t* ret; |
| 250 | if (UNLIKELY(padding + rounded_bytes > static_cast<size_t>(end_ - ptr_))) { |
| 251 | static_assert(kArenaAlignment >= 16, "Expecting sufficient alignment for new Arena."); |
| 252 | ret = AllocFromNewArenaWithMemoryTool(rounded_bytes); |
| 253 | } else { |
| 254 | ptr_ += padding; // Leave padding inaccessible. |
| 255 | ret = ptr_; |
| 256 | ptr_ += rounded_bytes; |
| 257 | } |
| 258 | MEMORY_TOOL_MAKE_DEFINED(ret, bytes); |
| 259 | // Check that the memory is already zeroed out. |
| 260 | DCHECK(std::all_of(ret, ret + bytes, [](uint8_t val) { return val == 0u; })); |
| 261 | return ret; |
| 262 | } |
| 263 | |
Mathieu Chartier | f6c4b3b | 2013-08-24 16:11:37 -0700 | [diff] [blame] | 264 | ArenaAllocator::~ArenaAllocator() { |
| 265 | // Reclaim all the arenas by giving them back to the thread pool. |
| 266 | UpdateBytesAllocated(); |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 267 | pool_->FreeArenaChain(arena_head_); |
Mathieu Chartier | f6c4b3b | 2013-08-24 16:11:37 -0700 | [diff] [blame] | 268 | } |
| 269 | |
Vladimir Marko | 3f84f2c | 2016-04-25 19:40:34 +0100 | [diff] [blame] | 270 | uint8_t* ArenaAllocator::AllocFromNewArena(size_t bytes) { |
Andreas Gampe | 121f148 | 2017-05-12 10:28:35 -0700 | [diff] [blame] | 271 | Arena* new_arena = pool_->AllocArena(std::max(arena_allocator::kArenaDefaultSize, bytes)); |
Vladimir Marko | 3e0e717 | 2016-04-22 18:07:13 +0100 | [diff] [blame] | 272 | DCHECK(new_arena != nullptr); |
| 273 | DCHECK_LE(bytes, new_arena->Size()); |
| 274 | if (static_cast<size_t>(end_ - ptr_) > new_arena->Size() - bytes) { |
| 275 | // The old arena has more space remaining than the new one, so keep using it. |
| 276 | // This can happen when the requested size is over half of the default size. |
| 277 | DCHECK(arena_head_ != nullptr); |
| 278 | new_arena->bytes_allocated_ = bytes; // UpdateBytesAllocated() on the new_arena. |
| 279 | new_arena->next_ = arena_head_->next_; |
| 280 | arena_head_->next_ = new_arena; |
| 281 | } else { |
| 282 | UpdateBytesAllocated(); |
| 283 | new_arena->next_ = arena_head_; |
| 284 | arena_head_ = new_arena; |
| 285 | // Update our internal data structures. |
| 286 | begin_ = new_arena->Begin(); |
Andreas Gampe | f6dd829 | 2016-08-19 20:22:19 -0700 | [diff] [blame] | 287 | DCHECK_ALIGNED(begin_, kAlignment); |
Vladimir Marko | 3e0e717 | 2016-04-22 18:07:13 +0100 | [diff] [blame] | 288 | ptr_ = begin_ + bytes; |
| 289 | end_ = new_arena->End(); |
| 290 | } |
| 291 | return new_arena->Begin(); |
Mathieu Chartier | f6c4b3b | 2013-08-24 16:11:37 -0700 | [diff] [blame] | 292 | } |
| 293 | |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 294 | uint8_t* ArenaAllocator::AllocFromNewArenaWithMemoryTool(size_t bytes) { |
| 295 | uint8_t* ret = AllocFromNewArena(bytes); |
| 296 | uint8_t* noaccess_begin = ret + bytes; |
| 297 | uint8_t* noaccess_end; |
| 298 | if (ret == arena_head_->Begin()) { |
| 299 | DCHECK(ptr_ - bytes == ret); |
| 300 | noaccess_end = end_; |
| 301 | } else { |
| 302 | // We're still using the old arena but `ret` comes from a new one just after it. |
| 303 | DCHECK(arena_head_->next_ != nullptr); |
| 304 | DCHECK(ret == arena_head_->next_->Begin()); |
| 305 | DCHECK_EQ(bytes, arena_head_->next_->GetBytesAllocated()); |
| 306 | noaccess_end = arena_head_->next_->End(); |
| 307 | } |
| 308 | MEMORY_TOOL_MAKE_NOACCESS(noaccess_begin, noaccess_end - noaccess_begin); |
| 309 | return ret; |
| 310 | } |
| 311 | |
Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 312 | bool ArenaAllocator::Contains(const void* ptr) const { |
| 313 | if (ptr >= begin_ && ptr < end_) { |
| 314 | return true; |
| 315 | } |
| 316 | for (const Arena* cur_arena = arena_head_; cur_arena != nullptr; cur_arena = cur_arena->next_) { |
| 317 | if (cur_arena->Contains(ptr)) { |
| 318 | return true; |
| 319 | } |
| 320 | } |
| 321 | return false; |
| 322 | } |
| 323 | |
Vladimir Marko | f44d36c | 2017-03-14 14:18:46 +0000 | [diff] [blame] | 324 | MemStats::MemStats(const char* name, |
| 325 | const ArenaAllocatorStats* stats, |
| 326 | const Arena* first_arena, |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 327 | ssize_t lost_bytes_adjustment) |
| 328 | : name_(name), |
| 329 | stats_(stats), |
| 330 | first_arena_(first_arena), |
| 331 | lost_bytes_adjustment_(lost_bytes_adjustment) { |
| 332 | } |
| 333 | |
| 334 | void MemStats::Dump(std::ostream& os) const { |
| 335 | os << name_ << " stats:\n"; |
| 336 | stats_->Dump(os, first_arena_, lost_bytes_adjustment_); |
| 337 | } |
| 338 | |
Mathieu Chartier | f6c4b3b | 2013-08-24 16:11:37 -0700 | [diff] [blame] | 339 | // Dump memory usage stats. |
Vladimir Marko | 83cc7ae | 2014-02-12 18:02:05 +0000 | [diff] [blame] | 340 | MemStats ArenaAllocator::GetMemStats() const { |
| 341 | ssize_t lost_bytes_adjustment = |
| 342 | (arena_head_ == nullptr) ? 0 : (end_ - ptr_) - arena_head_->RemainingSpace(); |
| 343 | return MemStats("ArenaAllocator", this, arena_head_, lost_bytes_adjustment); |
buzbee | 862a760 | 2013-04-05 10:58:54 -0700 | [diff] [blame] | 344 | } |
| 345 | |
| 346 | } // namespace art |