blob: ae7db45024b2b0cee513f4804cb5da25793409f3 [file] [log] [blame]
David Sehr3215fff2018-04-03 17:10:12 -07001/*
2 * Copyright (C) 2018 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "mem_map_arena_pool.h"
18
19#include <sys/mman.h>
20
21#include <algorithm>
22#include <cstddef>
23#include <iomanip>
24#include <numeric>
25
26#include <android-base/logging.h>
27
28#include "base/arena_allocator-inl.h"
David Sehr79e26072018-04-06 17:58:50 -070029#include "base/mem_map.h"
David Sehr3215fff2018-04-03 17:10:12 -070030#include "base/systrace.h"
David Sehr3215fff2018-04-03 17:10:12 -070031
32namespace art {
33
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010034class MemMapArena final : public Arena {
David Sehr3215fff2018-04-03 17:10:12 -070035 public:
36 MemMapArena(size_t size, bool low_4gb, const char* name);
37 virtual ~MemMapArena();
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010038 void Release() override;
David Sehr3215fff2018-04-03 17:10:12 -070039
40 private:
Vladimir Markoc34bebf2018-08-16 16:12:49 +010041 static MemMap Allocate(size_t size, bool low_4gb, const char* name);
42
43 MemMap map_;
David Sehr3215fff2018-04-03 17:10:12 -070044};
45
Vladimir Markoc34bebf2018-08-16 16:12:49 +010046MemMapArena::MemMapArena(size_t size, bool low_4gb, const char* name)
47 : map_(Allocate(size, low_4gb, name)) {
48 memory_ = map_.Begin();
49 static_assert(ArenaAllocator::kArenaAlignment <= kPageSize,
50 "Arena should not need stronger alignment than kPageSize.");
51 DCHECK_ALIGNED(memory_, ArenaAllocator::kArenaAlignment);
52 size_ = map_.Size();
53}
54
55MemMap MemMapArena::Allocate(size_t size, bool low_4gb, const char* name) {
David Sehr3215fff2018-04-03 17:10:12 -070056 // Round up to a full page as that's the smallest unit of allocation for mmap()
57 // and we want to be able to use all memory that we actually allocate.
58 size = RoundUp(size, kPageSize);
59 std::string error_msg;
Vladimir Markoc34bebf2018-08-16 16:12:49 +010060 MemMap map = MemMap::MapAnonymous(name,
Vladimir Markoc34bebf2018-08-16 16:12:49 +010061 size,
62 PROT_READ | PROT_WRITE,
63 low_4gb,
Vladimir Markoc34bebf2018-08-16 16:12:49 +010064 &error_msg);
65 CHECK(map.IsValid()) << error_msg;
66 return map;
David Sehr3215fff2018-04-03 17:10:12 -070067}
68
69MemMapArena::~MemMapArena() {
70 // Destroys MemMap via std::unique_ptr<>.
71}
72
73void MemMapArena::Release() {
74 if (bytes_allocated_ > 0) {
Vladimir Markoc34bebf2018-08-16 16:12:49 +010075 map_.MadviseDontNeedAndZero();
David Sehr3215fff2018-04-03 17:10:12 -070076 bytes_allocated_ = 0;
77 }
78}
79
80MemMapArenaPool::MemMapArenaPool(bool low_4gb, const char* name)
81 : low_4gb_(low_4gb),
82 name_(name),
83 free_arenas_(nullptr) {
84 MemMap::Init();
85}
86
87MemMapArenaPool::~MemMapArenaPool() {
88 ReclaimMemory();
89}
90
91void MemMapArenaPool::ReclaimMemory() {
92 while (free_arenas_ != nullptr) {
93 Arena* arena = free_arenas_;
94 free_arenas_ = free_arenas_->next_;
95 delete arena;
96 }
97}
98
99void MemMapArenaPool::LockReclaimMemory() {
100 std::lock_guard<std::mutex> lock(lock_);
101 ReclaimMemory();
102}
103
104Arena* MemMapArenaPool::AllocArena(size_t size) {
105 Arena* ret = nullptr;
106 {
107 std::lock_guard<std::mutex> lock(lock_);
108 if (free_arenas_ != nullptr && LIKELY(free_arenas_->Size() >= size)) {
109 ret = free_arenas_;
110 free_arenas_ = free_arenas_->next_;
111 }
112 }
113 if (ret == nullptr) {
114 ret = new MemMapArena(size, low_4gb_, name_);
115 }
116 ret->Reset();
117 return ret;
118}
119
120void MemMapArenaPool::TrimMaps() {
121 ScopedTrace trace(__PRETTY_FUNCTION__);
122 std::lock_guard<std::mutex> lock(lock_);
123 for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
124 arena->Release();
125 }
126}
127
128size_t MemMapArenaPool::GetBytesAllocated() const {
129 size_t total = 0;
130 std::lock_guard<std::mutex> lock(lock_);
131 for (Arena* arena = free_arenas_; arena != nullptr; arena = arena->next_) {
132 total += arena->GetBytesAllocated();
133 }
134 return total;
135}
136
137void MemMapArenaPool::FreeArenaChain(Arena* first) {
Roland Levillain05e34f42018-05-24 13:19:05 +0000138 if (kRunningOnMemoryTool) {
David Sehr3215fff2018-04-03 17:10:12 -0700139 for (Arena* arena = first; arena != nullptr; arena = arena->next_) {
140 MEMORY_TOOL_MAKE_UNDEFINED(arena->memory_, arena->bytes_allocated_);
141 }
142 }
143
144 if (arena_allocator::kArenaAllocatorPreciseTracking) {
145 // Do not reuse arenas when tracking.
146 while (first != nullptr) {
147 Arena* next = first->next_;
148 delete first;
149 first = next;
150 }
151 return;
152 }
153
154 if (first != nullptr) {
155 Arena* last = first;
156 while (last->next_ != nullptr) {
157 last = last->next_;
158 }
159 std::lock_guard<std::mutex> lock(lock_);
160 last->next_ = free_arenas_;
161 free_arenas_ = first;
162 }
163}
164
165} // namespace art