blob: 4a7be384b11aff56b1a0abff77ebe07c9f0fa0d7 [file] [log] [blame]
Vladimir Marko83cc7ae2014-02-12 18:02:05 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "scoped_arena_allocator.h"
18
Mathieu Chartierb666f482015-02-18 14:33:14 -080019#include "arena_allocator.h"
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000020#include <memcheck/memcheck.h>
21
22namespace art {
23
24static constexpr size_t kValgrindRedZoneBytes = 8;
25
26ArenaStack::ArenaStack(ArenaPool* arena_pool)
27 : DebugStackRefCounter(),
28 stats_and_pool_(arena_pool),
29 bottom_arena_(nullptr),
30 top_arena_(nullptr),
31 top_ptr_(nullptr),
32 top_end_(nullptr),
33 running_on_valgrind_(RUNNING_ON_VALGRIND > 0) {
34}
35
36ArenaStack::~ArenaStack() {
Vladimir Marko53b6afc2014-03-21 14:21:20 +000037 DebugStackRefCounter::CheckNoRefs();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000038 stats_and_pool_.pool->FreeArenaChain(bottom_arena_);
39}
40
Vladimir Marko53b6afc2014-03-21 14:21:20 +000041void ArenaStack::Reset() {
42 DebugStackRefCounter::CheckNoRefs();
43 stats_and_pool_.pool->FreeArenaChain(bottom_arena_);
44 bottom_arena_ = nullptr;
45 top_arena_ = nullptr;
46 top_ptr_ = nullptr;
47 top_end_ = nullptr;
48}
49
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000050MemStats ArenaStack::GetPeakStats() const {
51 DebugStackRefCounter::CheckNoRefs();
52 return MemStats("ArenaStack peak", static_cast<const TaggedStats<Peak>*>(&stats_and_pool_),
53 bottom_arena_);
54}
55
56uint8_t* ArenaStack::AllocateFromNextArena(size_t rounded_bytes) {
57 UpdateBytesAllocated();
58 size_t allocation_size = std::max(Arena::kDefaultSize, rounded_bytes);
59 if (UNLIKELY(top_arena_ == nullptr)) {
60 top_arena_ = bottom_arena_ = stats_and_pool_.pool->AllocArena(allocation_size);
61 top_arena_->next_ = nullptr;
62 } else if (top_arena_->next_ != nullptr && top_arena_->next_->Size() >= allocation_size) {
63 top_arena_ = top_arena_->next_;
64 } else {
65 Arena* tail = top_arena_->next_;
66 top_arena_->next_ = stats_and_pool_.pool->AllocArena(allocation_size);
67 top_arena_ = top_arena_->next_;
68 top_arena_->next_ = tail;
69 }
70 top_end_ = top_arena_->End();
71 // top_ptr_ shall be updated by ScopedArenaAllocator.
72 return top_arena_->Begin();
73}
74
75void ArenaStack::UpdatePeakStatsAndRestore(const ArenaAllocatorStats& restore_stats) {
76 if (PeakStats()->BytesAllocated() < CurrentStats()->BytesAllocated()) {
77 PeakStats()->Copy(*CurrentStats());
78 }
79 CurrentStats()->Copy(restore_stats);
80}
81
82void ArenaStack::UpdateBytesAllocated() {
83 if (top_arena_ != nullptr) {
84 // Update how many bytes we have allocated into the arena so that the arena pool knows how
85 // much memory to zero out. Though ScopedArenaAllocator doesn't guarantee the memory is
86 // zero-initialized, the Arena may be reused by ArenaAllocator which does guarantee this.
87 size_t allocated = static_cast<size_t>(top_ptr_ - top_arena_->Begin());
88 if (top_arena_->bytes_allocated_ < allocated) {
89 top_arena_->bytes_allocated_ = allocated;
90 }
91 }
92}
93
94void* ArenaStack::AllocValgrind(size_t bytes, ArenaAllocKind kind) {
Vladimir Marko22a0ef82014-06-10 14:47:51 +010095 size_t rounded_bytes = RoundUp(bytes + kValgrindRedZoneBytes, 8);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000096 uint8_t* ptr = top_ptr_;
97 if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) {
98 ptr = AllocateFromNextArena(rounded_bytes);
Vladimirf41b92c2014-10-13 13:41:38 +070099 CHECK(ptr != nullptr) << "Failed to allocate memory";
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000100 }
101 CurrentStats()->RecordAlloc(bytes, kind);
102 top_ptr_ = ptr + rounded_bytes;
Vladimir Marko8a76f1e2014-03-31 15:32:56 +0100103 VALGRIND_MAKE_MEM_UNDEFINED(ptr, bytes);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000104 VALGRIND_MAKE_MEM_NOACCESS(ptr + bytes, rounded_bytes - bytes);
105 return ptr;
106}
107
108ScopedArenaAllocator::ScopedArenaAllocator(ArenaStack* arena_stack)
109 : DebugStackReference(arena_stack),
110 DebugStackRefCounter(),
111 ArenaAllocatorStats(*arena_stack->CurrentStats()),
112 arena_stack_(arena_stack),
113 mark_arena_(arena_stack->top_arena_),
114 mark_ptr_(arena_stack->top_ptr_),
115 mark_end_(arena_stack->top_end_) {
116}
117
118ScopedArenaAllocator::~ScopedArenaAllocator() {
Vladimir Marko3d2ec352014-10-10 15:39:11 +0100119 DoReset();
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000120}
121
122void ScopedArenaAllocator::Reset() {
Vladimir Marko3d2ec352014-10-10 15:39:11 +0100123 DoReset();
124 // If this allocator was Create()d, we need to move the arena_stack_->top_ptr_ past *this.
125 if (mark_ptr_ == reinterpret_cast<uint8_t*>(this)) {
126 arena_stack_->top_ptr_ = mark_ptr_ + RoundUp(sizeof(ScopedArenaAllocator), 8);
127 }
128}
129
130void ScopedArenaAllocator::DoReset() {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000131 DebugStackReference::CheckTop();
132 DebugStackRefCounter::CheckNoRefs();
133 arena_stack_->UpdatePeakStatsAndRestore(*this);
134 arena_stack_->UpdateBytesAllocated();
135 if (LIKELY(mark_arena_ != nullptr)) {
136 arena_stack_->top_arena_ = mark_arena_;
137 arena_stack_->top_ptr_ = mark_ptr_;
138 arena_stack_->top_end_ = mark_end_;
139 } else if (arena_stack_->bottom_arena_ != nullptr) {
140 mark_arena_ = arena_stack_->top_arena_ = arena_stack_->bottom_arena_;
141 mark_ptr_ = arena_stack_->top_ptr_ = mark_arena_->Begin();
142 mark_end_ = arena_stack_->top_end_ = mark_arena_->End();
143 }
144}
145
146} // namespace art