blob: f248a118e900a6688be6085c2023f6c83c8a5c4f [file] [log] [blame]
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_HANDLE_SCOPE_H_
18#define ART_RUNTIME_HANDLE_SCOPE_H_
19
Calin Juravleacf735c2015-02-12 15:25:22 +000020#include <stack>
21
Andreas Gampe542451c2016-07-26 09:02:02 -070022#include "base/enums.h"
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070023#include "base/logging.h"
24#include "base/macros.h"
Vladimir Marko3a21e382016-09-02 12:38:38 +010025#include "base/mutex.h"
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070026#include "handle.h"
Vladimir Marko3a21e382016-09-02 12:38:38 +010027#include "stack_reference.h"
Mathieu Chartier3e0acf62015-01-08 09:41:25 -080028#include "verify_object.h"
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070029
30namespace art {
Mathieu Chartier0795f232016-09-27 18:43:30 -070031
Mathieu Chartiere8a3c572016-10-11 16:52:17 -070032class HandleScope;
Andreas Gampec73cb642017-02-22 10:11:30 -080033template<class MirrorType> class ObjPtr;
Mathieu Chartiere8a3c572016-10-11 16:52:17 -070034class Thread;
35class VariableSizedHandleScope;
Mathieu Chartier0795f232016-09-27 18:43:30 -070036
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070037namespace mirror {
38class Object;
Andreas Gampedeae7db2017-05-30 09:56:41 -070039} // namespace mirror
Ian Rogerse63db272014-07-15 15:36:11 -070040
Mathieu Chartiere8a3c572016-10-11 16:52:17 -070041// Basic handle scope, tracked by a list. May be variable sized.
42class PACKED(4) BaseHandleScope {
43 public:
44 bool IsVariableSized() const {
45 return number_of_references_ == kNumReferencesVariableSized;
46 }
47
48 // Number of references contained within this handle scope.
49 ALWAYS_INLINE uint32_t NumberOfReferences() const;
50
51 ALWAYS_INLINE bool Contains(StackReference<mirror::Object>* handle_scope_entry) const;
52
53 template <typename Visitor>
54 ALWAYS_INLINE void VisitRoots(Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_);
55
56 // Link to previous BaseHandleScope or null.
57 BaseHandleScope* GetLink() const {
58 return link_;
59 }
60
61 ALWAYS_INLINE VariableSizedHandleScope* AsVariableSized();
62 ALWAYS_INLINE HandleScope* AsHandleScope();
63 ALWAYS_INLINE const VariableSizedHandleScope* AsVariableSized() const;
64 ALWAYS_INLINE const HandleScope* AsHandleScope() const;
65
66 protected:
67 BaseHandleScope(BaseHandleScope* link, uint32_t num_references)
68 : link_(link),
69 number_of_references_(num_references) {}
70
71 // Variable sized constructor.
Andreas Gampeea47ff82016-11-03 08:20:17 -070072 explicit BaseHandleScope(BaseHandleScope* link)
Mathieu Chartiere8a3c572016-10-11 16:52:17 -070073 : link_(link),
74 number_of_references_(kNumReferencesVariableSized) {}
75
76 static constexpr int32_t kNumReferencesVariableSized = -1;
77
78 // Link-list of handle scopes. The root is held by a Thread.
79 BaseHandleScope* const link_;
80
81 // Number of handlerized references. -1 for variable sized handle scopes.
82 const int32_t number_of_references_;
83
84 private:
85 DISALLOW_COPY_AND_ASSIGN(BaseHandleScope);
86};
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070087
Ian Rogers22d5e732014-07-15 22:23:51 -070088// HandleScopes are scoped objects containing a number of Handles. They are used to allocate
89// handles, for these handles (and the objects contained within them) to be visible/roots for the
90// GC. It is most common to stack allocate HandleScopes using StackHandleScope.
Mathieu Chartiere8a3c572016-10-11 16:52:17 -070091class PACKED(4) HandleScope : public BaseHandleScope {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070092 public:
93 ~HandleScope() {}
94
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070095 // We have versions with and without explicit pointer size of the following. The first two are
96 // used at runtime, so OFFSETOF_MEMBER computes the right offsets automatically. The last one
97 // takes the pointer size explicitly so that at compile time we can cross-compile correctly.
98
99 // Returns the size of a HandleScope containing num_references handles.
Mathieu Chartier3e0acf62015-01-08 09:41:25 -0800100 static size_t SizeOf(uint32_t num_references);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700101
Andreas Gampecf4035a2014-05-28 22:43:01 -0700102 // Returns the size of a HandleScope containing num_references handles.
Andreas Gampe542451c2016-07-26 09:02:02 -0700103 static size_t SizeOf(PointerSize pointer_size, uint32_t num_references);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700104
Ian Rogers59c07062014-10-10 13:03:39 -0700105 ALWAYS_INLINE mirror::Object* GetReference(size_t i) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700106 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700107
Vladimir Markof39745e2016-01-26 12:16:55 +0000108 ALWAYS_INLINE Handle<mirror::Object> GetHandle(size_t i);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700109
Ian Rogers59c07062014-10-10 13:03:39 -0700110 ALWAYS_INLINE MutableHandle<mirror::Object> GetMutableHandle(size_t i)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700111 REQUIRES_SHARED(Locks::mutator_lock_);
Andreas Gampe5a4b8a22014-09-11 08:30:08 -0700112
Ian Rogers59c07062014-10-10 13:03:39 -0700113 ALWAYS_INLINE void SetReference(size_t i, mirror::Object* object)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700114 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700115
Mathieu Chartier3e0acf62015-01-08 09:41:25 -0800116 ALWAYS_INLINE bool Contains(StackReference<mirror::Object>* handle_scope_entry) const;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700117
Ian Rogers59c07062014-10-10 13:03:39 -0700118 // Offset of link within HandleScope, used by generated code.
Andreas Gampe542451c2016-07-26 09:02:02 -0700119 static constexpr size_t LinkOffset(PointerSize pointer_size ATTRIBUTE_UNUSED) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700120 return 0;
121 }
122
Ian Rogers59c07062014-10-10 13:03:39 -0700123 // Offset of length within handle scope, used by generated code.
Andreas Gampe542451c2016-07-26 09:02:02 -0700124 static constexpr size_t NumberOfReferencesOffset(PointerSize pointer_size) {
125 return static_cast<size_t>(pointer_size);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700126 }
127
Ian Rogers59c07062014-10-10 13:03:39 -0700128 // Offset of link within handle scope, used by generated code.
Andreas Gampe542451c2016-07-26 09:02:02 -0700129 static constexpr size_t ReferencesOffset(PointerSize pointer_size) {
130 return NumberOfReferencesOffset(pointer_size) + sizeof(number_of_references_);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700131 }
132
Ian Rogers59c07062014-10-10 13:03:39 -0700133 // Placement new creation.
Mathieu Chartiere8a3c572016-10-11 16:52:17 -0700134 static HandleScope* Create(void* storage, BaseHandleScope* link, uint32_t num_references)
Ian Rogers59c07062014-10-10 13:03:39 -0700135 WARN_UNUSED {
136 return new (storage) HandleScope(link, num_references);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700137 }
138
Mathieu Chartiere8a3c572016-10-11 16:52:17 -0700139 // Number of references contained within this handle scope.
140 ALWAYS_INLINE uint32_t NumberOfReferences() const {
141 DCHECK_GE(number_of_references_, 0);
142 return static_cast<uint32_t>(number_of_references_);
143 }
144
145 template <typename Visitor>
146 void VisitRoots(Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
147 for (size_t i = 0, count = NumberOfReferences(); i < count; ++i) {
148 // GetReference returns a pointer to the stack reference within the handle scope. If this
149 // needs to be updated, it will be done by the root visitor.
150 visitor.VisitRootIfNonNull(GetHandle(i).GetReference());
151 }
152 }
153
Ian Rogers59c07062014-10-10 13:03:39 -0700154 protected:
155 // Return backing storage used for references.
156 ALWAYS_INLINE StackReference<mirror::Object>* GetReferences() const {
Andreas Gampe542451c2016-07-26 09:02:02 -0700157 uintptr_t address = reinterpret_cast<uintptr_t>(this) + ReferencesOffset(kRuntimePointerSize);
Ian Rogers59c07062014-10-10 13:03:39 -0700158 return reinterpret_cast<StackReference<mirror::Object>*>(address);
159 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700160
Mathieu Chartiere8a3c572016-10-11 16:52:17 -0700161 explicit HandleScope(size_t number_of_references) : HandleScope(nullptr, number_of_references) {}
Mathieu Chartierd035c2d2014-10-27 17:30:20 -0700162
Ian Rogers59c07062014-10-10 13:03:39 -0700163 // Semi-hidden constructor. Construction expected by generated code and StackHandleScope.
Mathieu Chartiere8a3c572016-10-11 16:52:17 -0700164 HandleScope(BaseHandleScope* link, uint32_t num_references)
165 : BaseHandleScope(link, num_references) {}
Ian Rogers59c07062014-10-10 13:03:39 -0700166
167 // Storage for references.
168 // StackReference<mirror::Object> references_[number_of_references_]
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700169
170 private:
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700171 DISALLOW_COPY_AND_ASSIGN(HandleScope);
172};
173
174// A wrapper which wraps around Object** and restores the pointer in the destructor.
Mathieu Chartier0795f232016-09-27 18:43:30 -0700175// TODO: Delete
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700176template<class T>
Andreas Gampe5a4b8a22014-09-11 08:30:08 -0700177class HandleWrapper : public MutableHandle<T> {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700178 public:
Andreas Gampe5a4b8a22014-09-11 08:30:08 -0700179 HandleWrapper(T** obj, const MutableHandle<T>& handle)
180 : MutableHandle<T>(handle), obj_(obj) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700181 }
182
Andreas Gampe758a8012015-04-03 21:28:42 -0700183 HandleWrapper(const HandleWrapper&) = default;
184
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700185 ~HandleWrapper() {
Andreas Gampe5a4b8a22014-09-11 08:30:08 -0700186 *obj_ = MutableHandle<T>::Get();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700187 }
188
189 private:
Ian Rogersb5cb18a2014-10-21 15:05:36 -0700190 T** const obj_;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700191};
192
Mathieu Chartier0795f232016-09-27 18:43:30 -0700193
194// A wrapper which wraps around ObjPtr<Object>* and restores the pointer in the destructor.
195// TODO: Add more functionality.
196template<class T>
197class HandleWrapperObjPtr : public MutableHandle<T> {
198 public:
199 HandleWrapperObjPtr(ObjPtr<T>* obj, const MutableHandle<T>& handle)
200 : MutableHandle<T>(handle), obj_(obj) {}
201
202 HandleWrapperObjPtr(const HandleWrapperObjPtr&) = default;
203
204 ~HandleWrapperObjPtr() {
205 *obj_ = ObjPtr<T>(MutableHandle<T>::Get());
206 }
207
208 private:
209 ObjPtr<T>* const obj_;
210};
211
Mathieu Chartiere8a3c572016-10-11 16:52:17 -0700212// Fixed size handle scope that is not necessarily linked in the thread.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700213template<size_t kNumReferences>
Mathieu Chartiere8a3c572016-10-11 16:52:17 -0700214class PACKED(4) FixedSizeHandleScope : public HandleScope {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700215 public:
Ian Rogersb5cb18a2014-10-21 15:05:36 -0700216 template<class T>
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700217 ALWAYS_INLINE MutableHandle<T> NewHandle(T* object) REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartierbc56fc32014-06-03 15:37:03 -0700218
Ian Rogersb5cb18a2014-10-21 15:05:36 -0700219 template<class T>
Mathieu Chartier2d2621a2014-10-23 16:48:06 -0700220 ALWAYS_INLINE HandleWrapper<T> NewHandleWrapper(T** object)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700221 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogersb5cb18a2014-10-21 15:05:36 -0700222
Mathieu Chartier0795f232016-09-27 18:43:30 -0700223 template<class T>
224 ALWAYS_INLINE HandleWrapperObjPtr<T> NewHandleWrapper(ObjPtr<T>* object)
225 REQUIRES_SHARED(Locks::mutator_lock_);
226
Andreas Gampec73cb642017-02-22 10:11:30 -0800227 template<class MirrorType>
228 ALWAYS_INLINE MutableHandle<MirrorType> NewHandle(ObjPtr<MirrorType> object)
Mathieu Chartier0795f232016-09-27 18:43:30 -0700229 REQUIRES_SHARED(Locks::mutator_lock_);
230
Ian Rogers59c07062014-10-10 13:03:39 -0700231 ALWAYS_INLINE void SetReference(size_t i, mirror::Object* object)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700232 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartierbc56fc32014-06-03 15:37:03 -0700233
Mathieu Chartiere8a3c572016-10-11 16:52:17 -0700234 size_t RemainingSlots() const {
235 return kNumReferences - pos_;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700236 }
237
Mathieu Chartier2d2621a2014-10-23 16:48:06 -0700238 private:
Mathieu Chartiere8a3c572016-10-11 16:52:17 -0700239 explicit ALWAYS_INLINE FixedSizeHandleScope(BaseHandleScope* link,
240 mirror::Object* fill_value = nullptr);
241 ALWAYS_INLINE ~FixedSizeHandleScope() {}
242
Mathieu Chartier2d2621a2014-10-23 16:48:06 -0700243 template<class T>
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700244 ALWAYS_INLINE MutableHandle<T> GetHandle(size_t i) REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartier2d2621a2014-10-23 16:48:06 -0700245 DCHECK_LT(i, kNumReferences);
246 return MutableHandle<T>(&GetReferences()[i]);
247 }
248
Ian Rogers59c07062014-10-10 13:03:39 -0700249 // Reference storage needs to be first as expected by the HandleScope layout.
250 StackReference<mirror::Object> storage_[kNumReferences];
Ian Rogers22d5e732014-07-15 22:23:51 -0700251
Mathieu Chartiere8a3c572016-10-11 16:52:17 -0700252 // Position new handles will be created.
Vladimir Marko26248c72017-02-21 17:00:28 +0000253 uint32_t pos_ = 0;
Mathieu Chartiere8a3c572016-10-11 16:52:17 -0700254
255 template<size_t kNumRefs> friend class StackHandleScope;
256 friend class VariableSizedHandleScope;
257};
258
259// Scoped handle storage of a fixed size that is stack allocated.
260template<size_t kNumReferences>
261class PACKED(4) StackHandleScope FINAL : public FixedSizeHandleScope<kNumReferences> {
262 public:
263 explicit ALWAYS_INLINE StackHandleScope(Thread* self, mirror::Object* fill_value = nullptr);
264 ALWAYS_INLINE ~StackHandleScope();
265
266 Thread* Self() const {
267 return self_;
268 }
269
270 private:
Ian Rogers22d5e732014-07-15 22:23:51 -0700271 // The thread that the stack handle scope is a linked list upon. The stack handle scope will
272 // push and pop itself from this thread.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700273 Thread* const self_;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700274};
275
Mathieu Chartiere8a3c572016-10-11 16:52:17 -0700276// Utility class to manage a variable sized handle scope by having a list of fixed size handle
277// scopes.
278// Calls to NewHandle will create a new handle inside the current FixedSizeHandleScope.
279// When the current handle scope becomes full a new one is created and put at the front of the
280// list.
281class VariableSizedHandleScope : public BaseHandleScope {
Calin Juravleacf735c2015-02-12 15:25:22 +0000282 public:
Mathieu Chartiere8a3c572016-10-11 16:52:17 -0700283 explicit VariableSizedHandleScope(Thread* const self);
284 ~VariableSizedHandleScope();
Calin Juravleacf735c2015-02-12 15:25:22 +0000285
286 template<class T>
Mathieu Chartiere8a3c572016-10-11 16:52:17 -0700287 MutableHandle<T> NewHandle(T* object) REQUIRES_SHARED(Locks::mutator_lock_);
Calin Juravleacf735c2015-02-12 15:25:22 +0000288
Andreas Gampec73cb642017-02-22 10:11:30 -0800289 template<class MirrorType>
290 MutableHandle<MirrorType> NewHandle(ObjPtr<MirrorType> ptr)
Mathieu Chartier3398c782016-09-30 10:27:43 -0700291 REQUIRES_SHARED(Locks::mutator_lock_);
292
Mathieu Chartiere8a3c572016-10-11 16:52:17 -0700293 // Number of references contained within this handle scope.
294 ALWAYS_INLINE uint32_t NumberOfReferences() const;
295
296 ALWAYS_INLINE bool Contains(StackReference<mirror::Object>* handle_scope_entry) const;
297
298 template <typename Visitor>
299 void VisitRoots(Visitor& visitor) REQUIRES_SHARED(Locks::mutator_lock_);
300
Calin Juravleacf735c2015-02-12 15:25:22 +0000301 private:
Vladimir Marko26248c72017-02-21 17:00:28 +0000302 static constexpr size_t kLocalScopeSize = 64u;
303 static constexpr size_t kSizeOfReferencesPerScope =
304 kLocalScopeSize
305 - /* BaseHandleScope::link_ */ sizeof(BaseHandleScope*)
306 - /* BaseHandleScope::number_of_references_ */ sizeof(int32_t)
307 - /* FixedSizeHandleScope<>::pos_ */ sizeof(uint32_t);
308 static constexpr size_t kNumReferencesPerScope =
309 kSizeOfReferencesPerScope / sizeof(StackReference<mirror::Object>);
Calin Juravleacf735c2015-02-12 15:25:22 +0000310
311 Thread* const self_;
312
Mathieu Chartiere8a3c572016-10-11 16:52:17 -0700313 // Linked list of fixed size handle scopes.
314 using LocalScopeType = FixedSizeHandleScope<kNumReferencesPerScope>;
Vladimir Marko26248c72017-02-21 17:00:28 +0000315 static_assert(sizeof(LocalScopeType) == kLocalScopeSize, "Unexpected size of LocalScopeType");
Mathieu Chartiere8a3c572016-10-11 16:52:17 -0700316 LocalScopeType* current_scope_;
Calin Juravleacf735c2015-02-12 15:25:22 +0000317
Mathieu Chartiere8a3c572016-10-11 16:52:17 -0700318 DISALLOW_COPY_AND_ASSIGN(VariableSizedHandleScope);
Calin Juravleacf735c2015-02-12 15:25:22 +0000319};
320
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700321} // namespace art
322
323#endif // ART_RUNTIME_HANDLE_SCOPE_H_