blob: bbedeac3b01e84e5ae5ff001a95699c5b0015788 [file] [log] [blame]
Vladimir Marko83cc7ae2014-02-12 18:02:05 +00001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Mathieu Chartierb666f482015-02-18 14:33:14 -080017#ifndef ART_RUNTIME_BASE_SCOPED_ARENA_ALLOCATOR_H_
18#define ART_RUNTIME_BASE_SCOPED_ARENA_ALLOCATOR_H_
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000019
Mathieu Chartierb666f482015-02-18 14:33:14 -080020#include "arena_allocator.h"
21#include "debug_stack.h"
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000022#include "globals.h"
Mathieu Chartierb666f482015-02-18 14:33:14 -080023#include "logging.h"
24#include "macros.h"
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000025
26namespace art {
27
28class ArenaStack;
29class ScopedArenaAllocator;
30
31template <typename T>
32class ScopedArenaAllocatorAdapter;
33
34// Holds a list of Arenas for use by ScopedArenaAllocator stack.
35class ArenaStack : private DebugStackRefCounter {
36 public:
37 explicit ArenaStack(ArenaPool* arena_pool);
38 ~ArenaStack();
39
Vladimir Marko53b6afc2014-03-21 14:21:20 +000040 void Reset();
41
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000042 size_t PeakBytesAllocated() {
43 return PeakStats()->BytesAllocated();
44 }
45
46 MemStats GetPeakStats() const;
47
48 private:
49 struct Peak;
50 struct Current;
51 template <typename Tag> struct TaggedStats : ArenaAllocatorStats { };
52 struct StatsAndPool : TaggedStats<Peak>, TaggedStats<Current> {
53 explicit StatsAndPool(ArenaPool* arena_pool) : pool(arena_pool) { }
54 ArenaPool* const pool;
55 };
56
57 ArenaAllocatorStats* PeakStats() {
58 return static_cast<TaggedStats<Peak>*>(&stats_and_pool_);
59 }
60
61 ArenaAllocatorStats* CurrentStats() {
62 return static_cast<TaggedStats<Current>*>(&stats_and_pool_);
63 }
64
65 // Private - access via ScopedArenaAllocator or ScopedArenaAllocatorAdapter.
66 void* Alloc(size_t bytes, ArenaAllocKind kind) ALWAYS_INLINE {
67 if (UNLIKELY(running_on_valgrind_)) {
68 return AllocValgrind(bytes, kind);
69 }
Vladimir Marko22a0ef82014-06-10 14:47:51 +010070 size_t rounded_bytes = RoundUp(bytes, 8);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +000071 uint8_t* ptr = top_ptr_;
72 if (UNLIKELY(static_cast<size_t>(top_end_ - ptr) < rounded_bytes)) {
73 ptr = AllocateFromNextArena(rounded_bytes);
74 }
75 CurrentStats()->RecordAlloc(bytes, kind);
76 top_ptr_ = ptr + rounded_bytes;
77 return ptr;
78 }
79
80 uint8_t* AllocateFromNextArena(size_t rounded_bytes);
81 void UpdatePeakStatsAndRestore(const ArenaAllocatorStats& restore_stats);
82 void UpdateBytesAllocated();
83 void* AllocValgrind(size_t bytes, ArenaAllocKind kind);
84
85 StatsAndPool stats_and_pool_;
86 Arena* bottom_arena_;
87 Arena* top_arena_;
88 uint8_t* top_ptr_;
89 uint8_t* top_end_;
90
91 const bool running_on_valgrind_;
92
93 friend class ScopedArenaAllocator;
94 template <typename T>
95 friend class ScopedArenaAllocatorAdapter;
96
97 DISALLOW_COPY_AND_ASSIGN(ArenaStack);
98};
99
100class ScopedArenaAllocator
101 : private DebugStackReference, private DebugStackRefCounter, private ArenaAllocatorStats {
102 public:
103 // Create a ScopedArenaAllocator directly on the ArenaStack when the scope of
104 // the allocator is not exactly a C++ block scope. For example, an optimization
105 // pass can create the scoped allocator in Start() and destroy it in End().
106 static ScopedArenaAllocator* Create(ArenaStack* arena_stack) {
107 void* addr = arena_stack->Alloc(sizeof(ScopedArenaAllocator), kArenaAllocMisc);
108 ScopedArenaAllocator* allocator = new(addr) ScopedArenaAllocator(arena_stack);
109 allocator->mark_ptr_ = reinterpret_cast<uint8_t*>(addr);
110 return allocator;
111 }
112
113 explicit ScopedArenaAllocator(ArenaStack* arena_stack);
114 ~ScopedArenaAllocator();
115
116 void Reset();
117
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000118 void* Alloc(size_t bytes, ArenaAllocKind kind = kArenaAllocMisc) ALWAYS_INLINE {
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000119 DebugStackReference::CheckTop();
120 return arena_stack_->Alloc(bytes, kind);
121 }
122
Vladimir Markoe4fcc5b2015-02-13 10:28:29 +0000123 template <typename T>
124 T* AllocArray(size_t length, ArenaAllocKind kind = kArenaAllocMisc) {
125 return static_cast<T*>(Alloc(length * sizeof(T), kind));
126 }
127
Vladimir Marko8081d2b2014-07-31 15:33:43 +0100128 // Get adapter for use in STL containers. See scoped_arena_containers.h .
129 ScopedArenaAllocatorAdapter<void> Adapter(ArenaAllocKind kind = kArenaAllocSTL);
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000130
131 // Allow a delete-expression to destroy but not deallocate allocators created by Create().
132 static void operator delete(void* ptr) { UNUSED(ptr); }
133
134 private:
135 ArenaStack* const arena_stack_;
136 Arena* mark_arena_;
137 uint8_t* mark_ptr_;
138 uint8_t* mark_end_;
139
Vladimir Marko3d2ec352014-10-10 15:39:11 +0100140 void DoReset();
141
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000142 template <typename T>
143 friend class ScopedArenaAllocatorAdapter;
144
145 DISALLOW_COPY_AND_ASSIGN(ScopedArenaAllocator);
146};
147
Vladimir Marko83cc7ae2014-02-12 18:02:05 +0000148} // namespace art
149
Mathieu Chartierb666f482015-02-18 14:33:14 -0800150#endif // ART_RUNTIME_BASE_SCOPED_ARENA_ALLOCATOR_H_