David Sehr | 3215fff | 2018-04-03 17:10:12 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2018 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #include "mem_map_arena_pool.h" |
| 18 | |
| 19 | #include <sys/mman.h> |
| 20 | |
| 21 | #include <algorithm> |
| 22 | #include <cstddef> |
| 23 | #include <iomanip> |
| 24 | #include <numeric> |
| 25 | |
| 26 | #include <android-base/logging.h> |
| 27 | |
| 28 | #include "base/arena_allocator-inl.h" |
David Sehr | 79e2607 | 2018-04-06 17:58:50 -0700 | [diff] [blame] | 29 | #include "base/mem_map.h" |
David Sehr | 3215fff | 2018-04-03 17:10:12 -0700 | [diff] [blame] | 30 | #include "base/systrace.h" |
David Sehr | 3215fff | 2018-04-03 17:10:12 -0700 | [diff] [blame] | 31 | |
| 32 | namespace art { |
| 33 | |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 34 | class MemMapArena final : public Arena { |
David Sehr | 3215fff | 2018-04-03 17:10:12 -0700 | [diff] [blame] | 35 | public: |
| 36 | MemMapArena(size_t size, bool low_4gb, const char* name); |
| 37 | virtual ~MemMapArena(); |
Roland Levillain | bbc6e7e | 2018-08-24 16:58:47 +0100 | [diff] [blame] | 38 | void Release() override; |
David Sehr | 3215fff | 2018-04-03 17:10:12 -0700 | [diff] [blame] | 39 | |
| 40 | private: |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 41 | static MemMap Allocate(size_t size, bool low_4gb, const char* name); |
| 42 | |
| 43 | MemMap map_; |
David Sehr | 3215fff | 2018-04-03 17:10:12 -0700 | [diff] [blame] | 44 | }; |
| 45 | |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 46 | MemMapArena::MemMapArena(size_t size, bool low_4gb, const char* name) |
| 47 | : map_(Allocate(size, low_4gb, name)) { |
| 48 | memory_ = map_.Begin(); |
| 49 | static_assert(ArenaAllocator::kArenaAlignment <= kPageSize, |
| 50 | "Arena should not need stronger alignment than kPageSize."); |
| 51 | DCHECK_ALIGNED(memory_, ArenaAllocator::kArenaAlignment); |
| 52 | size_ = map_.Size(); |
| 53 | } |
| 54 | |
| 55 | MemMap MemMapArena::Allocate(size_t size, bool low_4gb, const char* name) { |
David Sehr | 3215fff | 2018-04-03 17:10:12 -0700 | [diff] [blame] | 56 | // Round up to a full page as that's the smallest unit of allocation for mmap() |
| 57 | // and we want to be able to use all memory that we actually allocate. |
| 58 | size = RoundUp(size, kPageSize); |
| 59 | std::string error_msg; |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 60 | MemMap map = MemMap::MapAnonymous(name, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 61 | size, |
| 62 | PROT_READ | PROT_WRITE, |
| 63 | low_4gb, |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 64 | &error_msg); |
| 65 | CHECK(map.IsValid()) << error_msg; |
| 66 | return map; |
David Sehr | 3215fff | 2018-04-03 17:10:12 -0700 | [diff] [blame] | 67 | } |
| 68 | |
| 69 | MemMapArena::~MemMapArena() { |
| 70 | // Destroys MemMap via std::unique_ptr<>. |
| 71 | } |
| 72 | |
| 73 | void MemMapArena::Release() { |
| 74 | if (bytes_allocated_ > 0) { |
Vladimir Marko | c34bebf | 2018-08-16 16:12:49 +0100 | [diff] [blame] | 75 | map_.MadviseDontNeedAndZero(); |
David Sehr | 3215fff | 2018-04-03 17:10:12 -0700 | [diff] [blame] | 76 | bytes_allocated_ = 0; |
| 77 | } |
| 78 | } |
| 79 | |
| 80 | MemMapArenaPool::MemMapArenaPool(bool low_4gb, const char* name) |
| 81 | : low_4gb_(low_4gb), |
| 82 | name_(name), |
| 83 | free_arenas_(nullptr) { |
| 84 | MemMap::Init(); |
| 85 | } |
| 86 | |
| 87 | MemMapArenaPool::~MemMapArenaPool() { |
| 88 | ReclaimMemory(); |
| 89 | } |
| 90 | |
| 91 | void MemMapArenaPool::ReclaimMemory() { |
| 92 | while (free_arenas_ != nullptr) { |
| 93 | Arena* arena = free_arenas_; |
| 94 | free_arenas_ = free_arenas_->next_; |
| 95 | delete arena; |
| 96 | } |
| 97 | } |
| 98 | |
| 99 | void MemMapArenaPool::LockReclaimMemory() { |
| 100 | std::lock_guard<std::mutex> lock(lock_); |
| 101 | ReclaimMemory(); |
| 102 | } |
| 103 | |
| 104 | Arena* MemMapArenaPool::AllocArena(size_t size) { |
| 105 | Arena* ret = nullptr; |
| 106 | { |
| 107 | std::lock_guard<std::mutex> lock(lock_); |
| 108 | if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) { |
| 109 | ret = free_arenas_; |
| 110 | free_arenas_ = free_arenas_->next_; |
| 111 | } |
| 112 | } |
| 113 | if (ret == nullptr) { |
| 114 | ret = new MemMapArena(size, low_4gb_, name_); |
| 115 | } |
| 116 | ret->Reset(); |
| 117 | return ret; |
| 118 | } |
| 119 | |
| 120 | void MemMapArenaPool::TrimMaps() { |
| 121 | ScopedTrace trace(__PRETTY_FUNCTION__); |
| 122 | std::lock_guard<std::mutex> lock(lock_); |
| 123 | for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) { |
| 124 | arena->Release(); |
| 125 | } |
| 126 | } |
| 127 | |
| 128 | size_t MemMapArenaPool::GetBytesAllocated() const { |
| 129 | size_t total = 0; |
| 130 | std::lock_guard<std::mutex> lock(lock_); |
| 131 | for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) { |
| 132 | total += arena->GetBytesAllocated(); |
| 133 | } |
| 134 | return total; |
| 135 | } |
| 136 | |
| 137 | void MemMapArenaPool::FreeArenaChain(Arena* first) { |
Roland Levillain | 05e34f4 | 2018-05-24 13:19:05 +0000 | [diff] [blame] | 138 | if (kRunningOnMemoryTool) { |
David Sehr | 3215fff | 2018-04-03 17:10:12 -0700 | [diff] [blame] | 139 | for (Arena* arena = first; arena != nullptr; arena = arena->next_) { |
| 140 | MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_); |
| 141 | } |
| 142 | } |
| 143 | |
| 144 | if (arena_allocator::kArenaAllocatorPreciseTracking) { |
| 145 | // Do not reuse arenas when tracking. |
| 146 | while (first != nullptr) { |
| 147 | Arena* next = first->next_; |
| 148 | delete first; |
| 149 | first = next; |
| 150 | } |
| 151 | return; |
| 152 | } |
| 153 | |
| 154 | if (first != nullptr) { |
| 155 | Arena* last = first; |
| 156 | while (last->next_ != nullptr) { |
| 157 | last = last->next_; |
| 158 | } |
| 159 | std::lock_guard<std::mutex> lock(lock_); |
| 160 | last->next_ = free_arenas_; |
| 161 | free_arenas_ = first; |
| 162 | } |
| 163 | } |
| 164 | |
| 165 | } // namespace art |