blob: 973f9b93ed812af8d52f3b02f69813864c34b4ef [file] [log] [blame]
Vladimir Marko83cc7ae2014-02-12 18:02:05 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "scoped_arena_allocator.h"
18
Andreas Gampe121f1482017-05-12 10:28:35 -070019#include "arena_allocator-inl.h"
Evgenii Stepanov1e133742015-05-20 12:30:59 -070020#include "base/memory_tool.h"
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000021
22namespace art {
23
Evgenii Stepanov1e133742015-05-20 12:30:59 -070024static constexpr size_t kMemoryToolRedZoneBytes = 8;
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000025
26ArenaStack::ArenaStack(ArenaPool* arena_pool)
27 : DebugStackRefCounter(),
28 stats_and_pool_(arena_pool),
29 bottom_arena_(nullptr),
30 top_arena_(nullptr),
31 top_ptr_(nullptr),
Vladimir Marko2a408a32015-09-18 14:11:00 +010032 top_end_(nullptr) {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000033}
34
35ArenaStack::~ArenaStack() {
Vladimir Marko53b6afc2014-03-21 14:21:20 +000036 DebugStackRefCounter::CheckNoRefs();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000037 stats_and_pool_.pool->FreeArenaChain(bottom_arena_);
38}
39
Vladimir Marko53b6afc2014-03-21 14:21:20 +000040void ArenaStack::Reset() {
41 DebugStackRefCounter::CheckNoRefs();
42 stats_and_pool_.pool->FreeArenaChain(bottom_arena_);
43 bottom_arena_ = nullptr;
44 top_arena_ = nullptr;
45 top_ptr_ = nullptr;
46 top_end_ = nullptr;
47}
48
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000049MemStats ArenaStack::GetPeakStats() const {
50 DebugStackRefCounter::CheckNoRefs();
51 return MemStats("ArenaStack peak", static_cast<const TaggedStats<Peak>*>(&stats_and_pool_),
52 bottom_arena_);
53}
54
55uint8_t* ArenaStack::AllocateFromNextArena(size_t rounded_bytes) {
56 UpdateBytesAllocated();
Andreas Gampe121f1482017-05-12 10:28:35 -070057 size_t allocation_size = std::max(arena_allocator::kArenaDefaultSize, rounded_bytes);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000058 if (UNLIKELY(top_arena_ == nullptr)) {
59 top_arena_ = bottom_arena_ = stats_and_pool_.pool->AllocArena(allocation_size);
60 top_arena_->next_ = nullptr;
61 } else if (top_arena_->next_ != nullptr && top_arena_->next_->Size() >= allocation_size) {
62 top_arena_ = top_arena_->next_;
63 } else {
64 Arena* tail = top_arena_->next_;
65 top_arena_->next_ = stats_and_pool_.pool->AllocArena(allocation_size);
66 top_arena_ = top_arena_->next_;
67 top_arena_->next_ = tail;
68 }
69 top_end_ = top_arena_->End();
70 // top_ptr_ shall be updated by ScopedArenaAllocator.
71 return top_arena_->Begin();
72}
73
74void ArenaStack::UpdatePeakStatsAndRestore(const ArenaAllocatorStats& restore_stats) {
75 if (PeakStats()->BytesAllocated() < CurrentStats()->BytesAllocated()) {
76 PeakStats()->Copy(*CurrentStats());
77 }
78 CurrentStats()->Copy(restore_stats);
79}
80
81void ArenaStack::UpdateBytesAllocated() {
82 if (top_arena_ != nullptr) {
83 // Update how many bytes we have allocated into the arena so that the arena pool knows how
84 // much memory to zero out. Though ScopedArenaAllocator doesn't guarantee the memory is
85 // zero-initialized, the Arena may be reused by ArenaAllocator which does guarantee this.
86 size_t allocated = static_cast<size_t>(top_ptr_ - top_arena_->Begin());
87 if (top_arena_->bytes_allocated_ < allocated) {
88 top_arena_->bytes_allocated_ = allocated;
89 }
90 }
91}
92
Vladimir Marko2a408a32015-09-18 14:11:00 +010093void* ArenaStack::AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind) {
Vladimir Marko75001932015-11-10 20:54:22 +000094 // We mark all memory for a newly retrieved arena as inaccessible and then
95 // mark only the actually allocated memory as defined. That leaves red zones
96 // and padding between allocations marked as inaccessible.
Evgenii Stepanov1e133742015-05-20 12:30:59 -070097 size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000098 uint8_t* ptr = top_ptr_;
99 if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) {
100 ptr = AllocateFromNextArena(rounded_bytes);
Vladimirf41b92c2014-10-13 13:41:38 +0700101 CHECK(ptr != nullptr) << "Failed to allocate memory";
Vladimir Marko4fb3a422016-02-15 10:13:11 +0000102 MEMORY_TOOL_MAKE_NOACCESS(ptr, top_end_ - ptr);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000103 }
104 CurrentStats()->RecordAlloc(bytes, kind);
105 top_ptr_ = ptr + rounded_bytes;
Evgenii Stepanov1e133742015-05-20 12:30:59 -0700106 MEMORY_TOOL_MAKE_UNDEFINED(ptr, bytes);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000107 return ptr;
108}
109
110ScopedArenaAllocator::ScopedArenaAllocator(ArenaStack* arena_stack)
111 : DebugStackReference(arena_stack),
112 DebugStackRefCounter(),
113 ArenaAllocatorStats(*arena_stack->CurrentStats()),
114 arena_stack_(arena_stack),
115 mark_arena_(arena_stack->top_arena_),
116 mark_ptr_(arena_stack->top_ptr_),
117 mark_end_(arena_stack->top_end_) {
118}
119
120ScopedArenaAllocator::~ScopedArenaAllocator() {
Vladimir Marko3d2ec352014-10-10 15:39:11 +0100121 DoReset();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000122}
123
124void ScopedArenaAllocator::Reset() {
Vladimir Marko3d2ec352014-10-10 15:39:11 +0100125 DoReset();
126 // If this allocator was Create()d, we need to move the arena_stack_->top_ptr_ past *this.
127 if (mark_ptr_ == reinterpret_cast<uint8_t*>(this)) {
128 arena_stack_->top_ptr_ = mark_ptr_ + RoundUp(sizeof(ScopedArenaAllocator), 8);
129 }
130}
131
132void ScopedArenaAllocator::DoReset() {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000133 DebugStackReference::CheckTop();
134 DebugStackRefCounter::CheckNoRefs();
135 arena_stack_->UpdatePeakStatsAndRestore(*this);
136 arena_stack_->UpdateBytesAllocated();
137 if (LIKELY(mark_arena_ != nullptr)) {
138 arena_stack_->top_arena_ = mark_arena_;
139 arena_stack_->top_ptr_ = mark_ptr_;
140 arena_stack_->top_end_ = mark_end_;
141 } else if (arena_stack_->bottom_arena_ != nullptr) {
142 mark_arena_ = arena_stack_->top_arena_ = arena_stack_->bottom_arena_;
143 mark_ptr_ = arena_stack_->top_ptr_ = mark_arena_->Begin();
144 mark_end_ = arena_stack_->top_end_ = mark_arena_->End();
145 }
146}
147
148} // namespace art