blob: 3734bcc7e1107a920061c248148b7d049a077c82 [file] [log] [blame]
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_GC_ROOT_H_
18#define ART_RUNTIME_GC_ROOT_H_
19
Mathieu Chartierbad02672014-08-25 13:08:22 -070020#include "base/macros.h"
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070021#include "base/mutex.h" // For Locks::mutator_lock_.
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -070022#include "mirror/object_reference.h"
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070023
24namespace art {
Hiroshi Yamauchi3f64f252015-06-12 18:35:06 -070025class ArtField;
26class ArtMethod;
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070027
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -080028namespace mirror {
29class Object;
30} // namespace mirror
31
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -070032template <size_t kBufferSize>
33class BufferedRootVisitor;
34
Mathieu Chartier4809d0a2015-04-07 10:39:04 -070035// Dependent on pointer size so that we don't have frames that are too big on 64 bit.
36static const size_t kDefaultBufferedRootCount = 1024 / sizeof(void*);
37
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -080038enum RootType {
39 kRootUnknown = 0,
40 kRootJNIGlobal,
41 kRootJNILocal,
42 kRootJavaFrame,
43 kRootNativeStack,
44 kRootStickyClass,
45 kRootThreadBlock,
46 kRootMonitorUsed,
47 kRootThreadObject,
48 kRootInternedString,
Man Cao1ed11b92015-06-11 22:47:35 -070049 kRootFinalizing, // used for HPROF's conversion to HprofHeapTag
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -080050 kRootDebugger,
Man Cao1ed11b92015-06-11 22:47:35 -070051 kRootReferenceCleanup, // used for HPROF's conversion to HprofHeapTag
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -080052 kRootVMInternal,
53 kRootJNIMonitor,
54};
55std::ostream& operator<<(std::ostream& os, const RootType& root_type);
56
Mathieu Chartierd3ed9a32015-04-10 14:23:35 -070057// Only used by hprof. thread_id_ and type_ are only used by hprof.
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -080058class RootInfo {
59 public:
60 // Thread id 0 is for non thread roots.
61 explicit RootInfo(RootType type, uint32_t thread_id = 0)
62 : type_(type), thread_id_(thread_id) {
63 }
Andreas Gampe758a8012015-04-03 21:28:42 -070064 RootInfo(const RootInfo&) = default;
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -080065 virtual ~RootInfo() {
66 }
67 RootType GetType() const {
68 return type_;
69 }
70 uint32_t GetThreadId() const {
71 return thread_id_;
72 }
73 virtual void Describe(std::ostream& os) const {
74 os << "Type=" << type_ << " thread_id=" << thread_id_;
75 }
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -070076 std::string ToString() const;
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -080077
78 private:
79 const RootType type_;
80 const uint32_t thread_id_;
81};
82
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -070083inline std::ostream& operator<<(std::ostream& os, const RootInfo& root_info) {
84 root_info.Describe(os);
85 return os;
86}
87
88class RootVisitor {
89 public:
90 virtual ~RootVisitor() { }
91
Mathieu Chartierd3ed9a32015-04-10 14:23:35 -070092 // Single root version, not overridable.
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -070093 ALWAYS_INLINE void VisitRoot(mirror::Object** root, const RootInfo& info)
Mathieu Chartier90443472015-07-16 20:32:27 -070094 SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -070095 VisitRoots(&root, 1, info);
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -070096 }
97
Mathieu Chartierd3ed9a32015-04-10 14:23:35 -070098 // Single root version, not overridable.
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -070099 ALWAYS_INLINE void VisitRootIfNonNull(mirror::Object** root, const RootInfo& info)
Mathieu Chartier90443472015-07-16 20:32:27 -0700100 SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartier9b1c71e2015-09-02 18:51:54 -0700101 if (*root != nullptr) {
102 VisitRoot(root, info);
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700103 }
104 }
105
106 virtual void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info)
Mathieu Chartier90443472015-07-16 20:32:27 -0700107 SHARED_REQUIRES(Locks::mutator_lock_) = 0;
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700108
109 virtual void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
110 const RootInfo& info)
Mathieu Chartier90443472015-07-16 20:32:27 -0700111 SHARED_REQUIRES(Locks::mutator_lock_) = 0;
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700112};
113
114// Only visits roots one at a time, doesn't handle updating roots. Used when performance isn't
115// critical.
116class SingleRootVisitor : public RootVisitor {
117 private:
118 void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info) OVERRIDE
Mathieu Chartier90443472015-07-16 20:32:27 -0700119 SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700120 for (size_t i = 0; i < count; ++i) {
121 VisitRoot(*roots[i], info);
122 }
123 }
124
125 void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count,
126 const RootInfo& info) OVERRIDE
Mathieu Chartier90443472015-07-16 20:32:27 -0700127 SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700128 for (size_t i = 0; i < count; ++i) {
129 VisitRoot(roots[i]->AsMirrorPtr(), info);
130 }
131 }
132
133 virtual void VisitRoot(mirror::Object* root, const RootInfo& info) = 0;
134};
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -0800135
Hiroshi Yamauchi3f64f252015-06-12 18:35:06 -0700136class GcRootSource {
137 public:
138 GcRootSource()
139 : field_(nullptr), method_(nullptr) {
140 }
141 explicit GcRootSource(ArtField* field)
142 : field_(field), method_(nullptr) {
143 }
144 explicit GcRootSource(ArtMethod* method)
145 : field_(nullptr), method_(method) {
146 }
147 ArtField* GetArtField() const {
148 return field_;
149 }
150 ArtMethod* GetArtMethod() const {
151 return method_;
152 }
153 bool HasArtField() const {
154 return field_ != nullptr;
155 }
156 bool HasArtMethod() const {
157 return method_ != nullptr;
158 }
159
160 private:
161 ArtField* const field_;
162 ArtMethod* const method_;
163
164 DISALLOW_COPY_AND_ASSIGN(GcRootSource);
165};
166
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700167template<class MirrorType>
Hiroshi Yamauchi9e47bfa2015-02-23 11:14:40 -0800168class GcRoot {
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700169 public:
170 template<ReadBarrierOption kReadBarrierOption = kWithReadBarrier>
Hiroshi Yamauchi3f64f252015-06-12 18:35:06 -0700171 ALWAYS_INLINE MirrorType* Read(GcRootSource* gc_root_source = nullptr) const
Mathieu Chartier90443472015-07-16 20:32:27 -0700172 SHARED_REQUIRES(Locks::mutator_lock_);
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700173
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700174 void VisitRoot(RootVisitor* visitor, const RootInfo& info) const
Mathieu Chartier90443472015-07-16 20:32:27 -0700175 SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700176 DCHECK(!IsNull());
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700177 mirror::CompressedReference<mirror::Object>* roots[1] = { &root_ };
178 visitor->VisitRoots(roots, 1u, info);
Mathieu Chartiereb175f72014-10-31 11:49:27 -0700179 DCHECK(!IsNull());
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700180 }
181
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700182 void VisitRootIfNonNull(RootVisitor* visitor, const RootInfo& info) const
Mathieu Chartier90443472015-07-16 20:32:27 -0700183 SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -0800184 if (!IsNull()) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700185 VisitRoot(visitor, info);
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -0800186 }
187 }
188
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700189 ALWAYS_INLINE mirror::CompressedReference<mirror::Object>* AddressWithoutBarrier() {
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700190 return &root_;
191 }
192
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700193 ALWAYS_INLINE bool IsNull() const {
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700194 // It's safe to null-check it without a read barrier.
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700195 return root_.IsNull();
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700196 }
197
Mathieu Chartier90443472015-07-16 20:32:27 -0700198 ALWAYS_INLINE GcRoot(MirrorType* ref = nullptr) SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700199
200 private:
Roland Levillain0d5a2812015-11-13 10:07:31 +0000201 // Root visitors take pointers to root_ and place them in CompressedReference** arrays. We use a
Mathieu Chartier9086b652015-04-14 09:35:18 -0700202 // CompressedReference<mirror::Object> here since it violates strict aliasing requirements to
203 // cast CompressedReference<MirrorType>* to CompressedReference<mirror::Object>*.
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700204 mutable mirror::CompressedReference<mirror::Object> root_;
205
206 template <size_t kBufferSize> friend class BufferedRootVisitor;
207};
208
209// Simple data structure for buffered root visiting to avoid virtual dispatch overhead. Currently
210// only for CompressedReferences since these are more common than the Object** roots which are only
211// for thread local roots.
212template <size_t kBufferSize>
213class BufferedRootVisitor {
214 public:
215 BufferedRootVisitor(RootVisitor* visitor, const RootInfo& root_info)
216 : visitor_(visitor), root_info_(root_info), buffer_pos_(0) {
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700217 }
218
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700219 ~BufferedRootVisitor() {
220 Flush();
221 }
222
223 template <class MirrorType>
224 ALWAYS_INLINE void VisitRootIfNonNull(GcRoot<MirrorType>& root)
Mathieu Chartier90443472015-07-16 20:32:27 -0700225 SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700226 if (!root.IsNull()) {
227 VisitRoot(root);
228 }
229 }
230
231 template <class MirrorType>
232 ALWAYS_INLINE void VisitRootIfNonNull(mirror::CompressedReference<MirrorType>* root)
Mathieu Chartier90443472015-07-16 20:32:27 -0700233 SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700234 if (!root->IsNull()) {
235 VisitRoot(root);
236 }
237 }
238
239 template <class MirrorType>
Mathieu Chartier90443472015-07-16 20:32:27 -0700240 void VisitRoot(GcRoot<MirrorType>& root) SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700241 VisitRoot(root.AddressWithoutBarrier());
242 }
243
244 template <class MirrorType>
245 void VisitRoot(mirror::CompressedReference<MirrorType>* root)
Mathieu Chartier90443472015-07-16 20:32:27 -0700246 SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700247 if (UNLIKELY(buffer_pos_ >= kBufferSize)) {
248 Flush();
249 }
250 roots_[buffer_pos_++] = root;
251 }
252
Mathieu Chartier90443472015-07-16 20:32:27 -0700253 void Flush() SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700254 visitor_->VisitRoots(roots_, buffer_pos_, root_info_);
255 buffer_pos_ = 0;
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700256 }
257
258 private:
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -0700259 RootVisitor* const visitor_;
260 RootInfo root_info_;
261 mirror::CompressedReference<mirror::Object>* roots_[kBufferSize];
262 size_t buffer_pos_;
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700263};
264
265} // namespace art
266
267#endif // ART_RUNTIME_GC_ROOT_H_