blob: b795409bcf92c89234a4b49c15cf1f3b9e98917b [file] [log] [blame]
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_GC_ROOT_H_
18#define ART_RUNTIME_GC_ROOT_H_
19
Mathieu Chartierbad02672014-08-25 13:08:22 -070020#include "base/macros.h"
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070021#include "base/mutex.h" // For Locks::mutator_lock_.
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -070022#include "mirror/object_reference.h"
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070023
24namespace art {
Hiroshi Yamauchi3f64f252015-06-12 18:35:06 -070025class ArtField;
26class ArtMethod;
Mathieu Chartier3398c782016-09-30 10:27:43 -070027template<class MirrorType, bool kPoison> class ObjPtr;
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070028
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -080029namespace mirror {
30class Object;
31} // namespace mirror
32
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -070033template <size_t kBufferSize>
34class BufferedRootVisitor;
35
Mathieu Chartier4809d0a2015-04-07 10:39:04 -070036// Dependent on pointer size so that we don't have frames that are too big on 64 bit.
37static const size_t kDefaultBufferedRootCount = 1024 / sizeof(void*);
38
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -080039enum RootType {
40 kRootUnknown = 0,
41 kRootJNIGlobal,
42 kRootJNILocal,
43 kRootJavaFrame,
44 kRootNativeStack,
45 kRootStickyClass,
46 kRootThreadBlock,
47 kRootMonitorUsed,
48 kRootThreadObject,
49 kRootInternedString,
Man Cao1ed11b92015-06-11 22:47:35 -070050 kRootFinalizing, // used for HPROF's conversion to HprofHeapTag
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -080051 kRootDebugger,
Man Cao1ed11b92015-06-11 22:47:35 -070052 kRootReferenceCleanup, // used for HPROF's conversion to HprofHeapTag
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -080053 kRootVMInternal,
54 kRootJNIMonitor,
55};
56std::ostream& operator<<(std::ostream& os, const RootType& root_type);
57
Mathieu Chartierd3ed9a32015-04-10 14:23:35 -070058// Only used by hprof. thread_id_ and type_ are only used by hprof.
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -080059class RootInfo {
60 public:
61 // Thread id 0 is for non thread roots.
62 explicit RootInfo(RootType type, uint32_t thread_id = 0)
63 : type_(type), thread_id_(thread_id) {
64 }
Andreas Gampe758a8012015-04-03 21:28:42 -070065 RootInfo(const RootInfo&) = default;
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -080066 virtual ~RootInfo() {
67 }
68 RootType GetType() const {
69 return type_;
70 }
71 uint32_t GetThreadId() const {
72 return thread_id_;
73 }
74 virtual void Describe(std::ostream& os) const {
75 os << "Type=" << type_ << " thread_id=" << thread_id_;
76 }
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -070077 std::string ToString() const;
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -080078
79 private:
80 const RootType type_;
81 const uint32_t thread_id_;
82};
83
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -070084inline std::ostream& operator<<(std::ostream& os, const RootInfo& root_info) {
85 root_info.Describe(os);
86 return os;
87}
88
89class RootVisitor {
90 public:
91 virtual ~RootVisitor() { }
92
Mathieu Chartierd3ed9a32015-04-10 14:23:35 -070093 // Single root version, not overridable.
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -070094 ALWAYS_INLINE void VisitRoot(mirror::Object** root, const RootInfo& info)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -070095 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -070096 VisitRoots(&root, 1, info);
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -070097 }
98
Mathieu Chartierd3ed9a32015-04-10 14:23:35 -070099 // Single root version, not overridable.
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -0700100 ALWAYS_INLINE void VisitRootIfNonNull(mirror::Object** root, const RootInfo& info)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700101 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -0700102 if (*root != nullptr) {
103 VisitRoot(root, info);
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700104 }
105 }
106
107 virtual void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700108 REQUIRES_SHARED(Locks::mutator_lock_) = 0;
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700109
110 virtual void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
111 const RootInfo& info)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700112 REQUIRES_SHARED(Locks::mutator_lock_) = 0;
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700113};
114
115// Only visits roots one at a time, doesn't handle updating roots. Used when performance isn't
116// critical.
117class SingleRootVisitor : public RootVisitor {
118 private:
119 void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info) OVERRIDE
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700120 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700121 for (size_t i = 0; i < count; ++i) {
122 VisitRoot(*roots[i], info);
123 }
124 }
125
126 void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
127 const RootInfo& info) OVERRIDE
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700128 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700129 for (size_t i = 0; i < count; ++i) {
130 VisitRoot(roots[i]->AsMirrorPtr(), info);
131 }
132 }
133
134 virtual void VisitRoot(mirror::Object* root, const RootInfo& info) = 0;
135};
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -0800136
Hiroshi Yamauchi3f64f252015-06-12 18:35:06 -0700137class GcRootSource {
138 public:
139 GcRootSource()
140 : field_(nullptr), method_(nullptr) {
141 }
142 explicit GcRootSource(ArtField* field)
143 : field_(field), method_(nullptr) {
144 }
145 explicit GcRootSource(ArtMethod* method)
146 : field_(nullptr), method_(method) {
147 }
148 ArtField* GetArtField() const {
149 return field_;
150 }
151 ArtMethod* GetArtMethod() const {
152 return method_;
153 }
154 bool HasArtField() const {
155 return field_ != nullptr;
156 }
157 bool HasArtMethod() const {
158 return method_ != nullptr;
159 }
160
161 private:
162 ArtField* const field_;
163 ArtMethod* const method_;
164
165 DISALLOW_COPY_AND_ASSIGN(GcRootSource);
166};
167
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700168template<class MirrorType>
Hiroshi Yamauchi9e47bfa2015-02-23 11:14:40 -0800169class GcRoot {
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700170 public:
171 template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
Hiroshi Yamauchi3f64f252015-06-12 18:35:06 -0700172 ALWAYS_INLINE MirrorType* Read(GcRootSource* gc_root_source = nullptr) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700173 REQUIRES_SHARED(Locks::mutator_lock_);
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700174
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700175 void VisitRoot(RootVisitor* visitor, const RootInfo& info) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700176 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700177 DCHECK(!IsNull());
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700178 mirror::CompressedReference<mirror::Object>* roots[1] = { &root_ };
179 visitor->VisitRoots(roots, 1u, info);
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700180 DCHECK(!IsNull());
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700181 }
182
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700183 void VisitRootIfNonNull(RootVisitor* visitor, const RootInfo& info) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700184 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -0800185 if (!IsNull()) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700186 VisitRoot(visitor, info);
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -0800187 }
188 }
189
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700190 ALWAYS_INLINE mirror::CompressedReference<mirror::Object>* AddressWithoutBarrier() {
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700191 return &root_;
192 }
193
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700194 ALWAYS_INLINE bool IsNull() const {
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700195 // It's safe to null-check it without a read barrier.
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700196 return root_.IsNull();
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700197 }
198
Mathieu Chartier65975772016-08-05 10:46:36 -0700199 ALWAYS_INLINE GcRoot() {}
Mathieu Chartier3398c782016-09-30 10:27:43 -0700200 explicit ALWAYS_INLINE GcRoot(MirrorType* ref)
201 REQUIRES_SHARED(Locks::mutator_lock_);
202 explicit ALWAYS_INLINE GcRoot(ObjPtr<MirrorType, kIsDebugBuild> ref)
203 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700204
205 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000206 // Root visitors take pointers to root_ and place them in CompressedReference** arrays. We use a
Mathieu Chartier9086b652015-04-14 09:35:18 -0700207 // CompressedReference<mirror::Object> here since it violates strict aliasing requirements to
208 // cast CompressedReference<MirrorType>* to CompressedReference<mirror::Object>*.
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700209 mutable mirror::CompressedReference<mirror::Object> root_;
210
211 template <size_t kBufferSize> friend class BufferedRootVisitor;
212};
213
214// Simple data structure for buffered root visiting to avoid virtual dispatch overhead. Currently
215// only for CompressedReferences since these are more common than the Object** roots which are only
216// for thread local roots.
217template <size_t kBufferSize>
218class BufferedRootVisitor {
219 public:
220 BufferedRootVisitor(RootVisitor* visitor, const RootInfo& root_info)
221 : visitor_(visitor), root_info_(root_info), buffer_pos_(0) {
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700222 }
223
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700224 ~BufferedRootVisitor() {
225 Flush();
226 }
227
228 template <class MirrorType>
229 ALWAYS_INLINE void VisitRootIfNonNull(GcRoot<MirrorType>& root)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700230 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700231 if (!root.IsNull()) {
232 VisitRoot(root);
233 }
234 }
235
236 template <class MirrorType>
237 ALWAYS_INLINE void VisitRootIfNonNull(mirror::CompressedReference<MirrorType>* root)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700238 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700239 if (!root->IsNull()) {
240 VisitRoot(root);
241 }
242 }
243
244 template <class MirrorType>
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700245 void VisitRoot(GcRoot<MirrorType>& root) REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700246 VisitRoot(root.AddressWithoutBarrier());
247 }
248
249 template <class MirrorType>
250 void VisitRoot(mirror::CompressedReference<MirrorType>* root)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700251 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700252 if (UNLIKELY(buffer_pos_ >= kBufferSize)) {
253 Flush();
254 }
255 roots_[buffer_pos_++] = root;
256 }
257
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700258 void Flush() REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700259 visitor_->VisitRoots(roots_, buffer_pos_, root_info_);
260 buffer_pos_ = 0;
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700261 }
262
263 private:
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700264 RootVisitor* const visitor_;
265 RootInfo root_info_;
266 mirror::CompressedReference<mirror::Object>* roots_[kBufferSize];
267 size_t buffer_pos_;
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700268};
269
Mathieu Chartier58c3f6a2016-12-01 14:21:11 -0800270class UnbufferedRootVisitor {
271 public:
272 UnbufferedRootVisitor(RootVisitor* visitor, const RootInfo& root_info)
273 : visitor_(visitor), root_info_(root_info) {}
274
275 template <class MirrorType>
276 ALWAYS_INLINE void VisitRootIfNonNull(GcRoot<MirrorType>& root) const
277 REQUIRES_SHARED(Locks::mutator_lock_) {
278 if (!root.IsNull()) {
279 VisitRoot(root);
280 }
281 }
282
283 template <class MirrorType>
284 ALWAYS_INLINE void VisitRootIfNonNull(mirror::CompressedReference<MirrorType>* root) const
285 REQUIRES_SHARED(Locks::mutator_lock_) {
286 if (!root->IsNull()) {
287 VisitRoot(root);
288 }
289 }
290
291 template <class MirrorType>
292 void VisitRoot(GcRoot<MirrorType>& root) const REQUIRES_SHARED(Locks::mutator_lock_) {
293 VisitRoot(root.AddressWithoutBarrier());
294 }
295
296 template <class MirrorType>
297 void VisitRoot(mirror::CompressedReference<MirrorType>* root) const
298 REQUIRES_SHARED(Locks::mutator_lock_) {
299 visitor_->VisitRoots(&root, 1, root_info_);
300 }
301
302 private:
303 RootVisitor* const visitor_;
304 RootInfo root_info_;
305};
306
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700307} // namespace art
308
309#endif // ART_RUNTIME_GC_ROOT_H_