blob: 986e28ec79859b6e7e25ed507fd14fb8d5d8210c [file] [log] [blame]
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_GC_ROOT_H_
18#define ART_RUNTIME_GC_ROOT_H_
19
Mathieu Chartierbad02672014-08-25 13:08:22 -070020#include "base/macros.h"
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070021#include "base/mutex.h" // For Locks::mutator_lock_.
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -070022#include "mirror/object_reference.h"
Andreas Gampe217488a2017-09-18 08:34:42 -070023#include "read_barrier_option.h"
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070024
25namespace art {
Hiroshi Yamauchi3f64f252015-06-12 18:35:06 -070026class ArtField;
27class ArtMethod;
Andreas Gampec73cb642017-02-22 10:11:30 -080028template<class MirrorType> class ObjPtr;
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070029
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -080030namespace mirror {
31class Object;
32} // namespace mirror
33
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -070034template <size_t kBufferSize>
35class BufferedRootVisitor;
36
Mathieu Chartier4809d0a2015-04-07 10:39:04 -070037// Dependent on pointer size so that we don't have frames that are too big on 64 bit.
38static const size_t kDefaultBufferedRootCount = 1024 / sizeof(void*);
39
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -080040enum RootType {
41 kRootUnknown = 0,
42 kRootJNIGlobal,
43 kRootJNILocal,
44 kRootJavaFrame,
45 kRootNativeStack,
46 kRootStickyClass,
47 kRootThreadBlock,
48 kRootMonitorUsed,
49 kRootThreadObject,
50 kRootInternedString,
Man Cao1ed11b92015-06-11 22:47:35 -070051 kRootFinalizing, // used for HPROF's conversion to HprofHeapTag
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -080052 kRootDebugger,
Man Cao1ed11b92015-06-11 22:47:35 -070053 kRootReferenceCleanup, // used for HPROF's conversion to HprofHeapTag
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -080054 kRootVMInternal,
55 kRootJNIMonitor,
56};
57std::ostream& operator<<(std::ostream& os, const RootType& root_type);
58
Mathieu Chartierd3ed9a32015-04-10 14:23:35 -070059// Only used by hprof. thread_id_ and type_ are only used by hprof.
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -080060class RootInfo {
61 public:
62 // Thread id 0 is for non thread roots.
63 explicit RootInfo(RootType type, uint32_t thread_id = 0)
64 : type_(type), thread_id_(thread_id) {
65 }
Andreas Gampe758a8012015-04-03 21:28:42 -070066 RootInfo(const RootInfo&) = default;
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -080067 virtual ~RootInfo() {
68 }
69 RootType GetType() const {
70 return type_;
71 }
72 uint32_t GetThreadId() const {
73 return thread_id_;
74 }
75 virtual void Describe(std::ostream& os) const {
76 os << "Type=" << type_ << " thread_id=" << thread_id_;
77 }
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -070078 std::string ToString() const;
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -080079
80 private:
81 const RootType type_;
82 const uint32_t thread_id_;
83};
84
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -070085inline std::ostream& operator<<(std::ostream& os, const RootInfo& root_info) {
86 root_info.Describe(os);
87 return os;
88}
89
Andreas Gampe585da952016-12-02 14:52:29 -080090// Not all combinations of flags are valid. You may not visit all roots as well as the new roots
91// (no logical reason to do this). You also may not start logging new roots and stop logging new
92// roots (also no logical reason to do this).
93//
94// The precise flag ensures that more metadata is supplied. An example is vreg data for compiled
95// method frames.
96enum VisitRootFlags : uint8_t {
97 kVisitRootFlagAllRoots = 0x1,
98 kVisitRootFlagNewRoots = 0x2,
99 kVisitRootFlagStartLoggingNewRoots = 0x4,
100 kVisitRootFlagStopLoggingNewRoots = 0x8,
101 kVisitRootFlagClearRootLog = 0x10,
102 kVisitRootFlagClassLoader = 0x20,
103 kVisitRootFlagPrecise = 0x80,
104};
105
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700106class RootVisitor {
107 public:
108 virtual ~RootVisitor() { }
109
Mathieu Chartierd3ed9a32015-04-10 14:23:35 -0700110 // Single root version, not overridable.
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -0700111 ALWAYS_INLINE void VisitRoot(mirror::Object** root, const RootInfo& info)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700112 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -0700113 VisitRoots(&root, 1, info);
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700114 }
115
Mathieu Chartierd3ed9a32015-04-10 14:23:35 -0700116 // Single root version, not overridable.
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -0700117 ALWAYS_INLINE void VisitRootIfNonNull(mirror::Object** root, const RootInfo& info)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700118 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -0700119 if (*root != nullptr) {
120 VisitRoot(root, info);
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700121 }
122 }
123
124 virtual void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700125 REQUIRES_SHARED(Locks::mutator_lock_) = 0;
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700126
127 virtual void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
128 const RootInfo& info)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700129 REQUIRES_SHARED(Locks::mutator_lock_) = 0;
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700130};
131
132// Only visits roots one at a time, doesn't handle updating roots. Used when performance isn't
133// critical.
134class SingleRootVisitor : public RootVisitor {
135 private:
136 void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info) OVERRIDE
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700137 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700138 for (size_t i = 0; i < count; ++i) {
139 VisitRoot(*roots[i], info);
140 }
141 }
142
143 void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
144 const RootInfo& info) OVERRIDE
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700145 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700146 for (size_t i = 0; i < count; ++i) {
147 VisitRoot(roots[i]->AsMirrorPtr(), info);
148 }
149 }
150
151 virtual void VisitRoot(mirror::Object* root, const RootInfo& info) = 0;
152};
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -0800153
Hiroshi Yamauchi3f64f252015-06-12 18:35:06 -0700154class GcRootSource {
155 public:
156 GcRootSource()
157 : field_(nullptr), method_(nullptr) {
158 }
159 explicit GcRootSource(ArtField* field)
160 : field_(field), method_(nullptr) {
161 }
162 explicit GcRootSource(ArtMethod* method)
163 : field_(nullptr), method_(method) {
164 }
165 ArtField* GetArtField() const {
166 return field_;
167 }
168 ArtMethod* GetArtMethod() const {
169 return method_;
170 }
171 bool HasArtField() const {
172 return field_ != nullptr;
173 }
174 bool HasArtMethod() const {
175 return method_ != nullptr;
176 }
177
178 private:
179 ArtField* const field_;
180 ArtMethod* const method_;
181
182 DISALLOW_COPY_AND_ASSIGN(GcRootSource);
183};
184
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700185template<class MirrorType>
Hiroshi Yamauchi9e47bfa2015-02-23 11:14:40 -0800186class GcRoot {
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700187 public:
188 template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
Hiroshi Yamauchi3f64f252015-06-12 18:35:06 -0700189 ALWAYS_INLINE MirrorType* Read(GcRootSource* gc_root_source = nullptr) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700190 REQUIRES_SHARED(Locks::mutator_lock_);
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700191
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700192 void VisitRoot(RootVisitor* visitor, const RootInfo& info) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700193 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700194 DCHECK(!IsNull());
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700195 mirror::CompressedReference<mirror::Object>* roots[1] = { &root_ };
196 visitor->VisitRoots(roots, 1u, info);
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700197 DCHECK(!IsNull());
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700198 }
199
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700200 void VisitRootIfNonNull(RootVisitor* visitor, const RootInfo& info) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700201 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -0800202 if (!IsNull()) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700203 VisitRoot(visitor, info);
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -0800204 }
205 }
206
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700207 ALWAYS_INLINE mirror::CompressedReference<mirror::Object>* AddressWithoutBarrier() {
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700208 return &root_;
209 }
210
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700211 ALWAYS_INLINE bool IsNull() const {
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700212 // It's safe to null-check it without a read barrier.
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700213 return root_.IsNull();
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700214 }
215
Mathieu Chartier65975772016-08-05 10:46:36 -0700216 ALWAYS_INLINE GcRoot() {}
Mathieu Chartier3398c782016-09-30 10:27:43 -0700217 explicit ALWAYS_INLINE GcRoot(MirrorType* ref)
218 REQUIRES_SHARED(Locks::mutator_lock_);
Andreas Gampec73cb642017-02-22 10:11:30 -0800219 explicit ALWAYS_INLINE GcRoot(ObjPtr<MirrorType> ref)
Mathieu Chartier3398c782016-09-30 10:27:43 -0700220 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700221
222 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000223 // Root visitors take pointers to root_ and place them in CompressedReference** arrays. We use a
Mathieu Chartier9086b652015-04-14 09:35:18 -0700224 // CompressedReference<mirror::Object> here since it violates strict aliasing requirements to
225 // cast CompressedReference<MirrorType>* to CompressedReference<mirror::Object>*.
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700226 mutable mirror::CompressedReference<mirror::Object> root_;
227
228 template <size_t kBufferSize> friend class BufferedRootVisitor;
229};
230
231// Simple data structure for buffered root visiting to avoid virtual dispatch overhead. Currently
232// only for CompressedReferences since these are more common than the Object** roots which are only
233// for thread local roots.
234template <size_t kBufferSize>
235class BufferedRootVisitor {
236 public:
237 BufferedRootVisitor(RootVisitor* visitor, const RootInfo& root_info)
238 : visitor_(visitor), root_info_(root_info), buffer_pos_(0) {
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700239 }
240
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700241 ~BufferedRootVisitor() {
242 Flush();
243 }
244
245 template <class MirrorType>
246 ALWAYS_INLINE void VisitRootIfNonNull(GcRoot<MirrorType>& root)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700247 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700248 if (!root.IsNull()) {
249 VisitRoot(root);
250 }
251 }
252
253 template <class MirrorType>
254 ALWAYS_INLINE void VisitRootIfNonNull(mirror::CompressedReference<MirrorType>* root)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700255 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700256 if (!root->IsNull()) {
257 VisitRoot(root);
258 }
259 }
260
261 template <class MirrorType>
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700262 void VisitRoot(GcRoot<MirrorType>& root) REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700263 VisitRoot(root.AddressWithoutBarrier());
264 }
265
266 template <class MirrorType>
267 void VisitRoot(mirror::CompressedReference<MirrorType>* root)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700268 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700269 if (UNLIKELY(buffer_pos_ >= kBufferSize)) {
270 Flush();
271 }
272 roots_[buffer_pos_++] = root;
273 }
274
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700275 void Flush() REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700276 visitor_->VisitRoots(roots_, buffer_pos_, root_info_);
277 buffer_pos_ = 0;
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700278 }
279
280 private:
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700281 RootVisitor* const visitor_;
282 RootInfo root_info_;
283 mirror::CompressedReference<mirror::Object>* roots_[kBufferSize];
284 size_t buffer_pos_;
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700285};
286
Mathieu Chartier58c3f6a2016-12-01 14:21:11 -0800287class UnbufferedRootVisitor {
288 public:
289 UnbufferedRootVisitor(RootVisitor* visitor, const RootInfo& root_info)
290 : visitor_(visitor), root_info_(root_info) {}
291
292 template <class MirrorType>
293 ALWAYS_INLINE void VisitRootIfNonNull(GcRoot<MirrorType>& root) const
294 REQUIRES_SHARED(Locks::mutator_lock_) {
295 if (!root.IsNull()) {
296 VisitRoot(root);
297 }
298 }
299
300 template <class MirrorType>
301 ALWAYS_INLINE void VisitRootIfNonNull(mirror::CompressedReference<MirrorType>* root) const
302 REQUIRES_SHARED(Locks::mutator_lock_) {
303 if (!root->IsNull()) {
304 VisitRoot(root);
305 }
306 }
307
308 template <class MirrorType>
309 void VisitRoot(GcRoot<MirrorType>& root) const REQUIRES_SHARED(Locks::mutator_lock_) {
310 VisitRoot(root.AddressWithoutBarrier());
311 }
312
313 template <class MirrorType>
314 void VisitRoot(mirror::CompressedReference<MirrorType>* root) const
315 REQUIRES_SHARED(Locks::mutator_lock_) {
316 visitor_->VisitRoots(&root, 1, root_info_);
317 }
318
319 private:
320 RootVisitor* const visitor_;
321 RootInfo root_info_;
322};
323
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700324} // namespace art
325
326#endif // ART_RUNTIME_GC_ROOT_H_