| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 1 | /* | 
 | 2 |  * Copyright (C) 2014 The Android Open Source Project | 
 | 3 |  * | 
 | 4 |  * Licensed under the Apache License, Version 2.0 (the "License"); | 
 | 5 |  * you may not use this file except in compliance with the License. | 
 | 6 |  * You may obtain a copy of the License at | 
 | 7 |  * | 
 | 8 |  *      http://www.apache.org/licenses/LICENSE-2.0 | 
 | 9 |  * | 
 | 10 |  * Unless required by applicable law or agreed to in writing, software | 
 | 11 |  * distributed under the License is distributed on an "AS IS" BASIS, | 
 | 12 |  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | 
 | 13 |  * See the License for the specific language governing permissions and | 
 | 14 |  * limitations under the License. | 
 | 15 |  */ | 
 | 16 |  | 
 | 17 | #ifndef ART_RUNTIME_HANDLE_SCOPE_H_ | 
 | 18 | #define ART_RUNTIME_HANDLE_SCOPE_H_ | 
 | 19 |  | 
| Calin Juravle | acf735c | 2015-02-12 15:25:22 +0000 | [diff] [blame] | 20 | #include <stack> | 
 | 21 |  | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 22 | #include "base/logging.h" | 
 | 23 | #include "base/macros.h" | 
 | 24 | #include "handle.h" | 
 | 25 | #include "stack.h" | 
| Mathieu Chartier | 3e0acf6 | 2015-01-08 09:41:25 -0800 | [diff] [blame] | 26 | #include "verify_object.h" | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 27 |  | 
 | 28 | namespace art { | 
 | 29 | namespace mirror { | 
 | 30 | class Object; | 
 | 31 | } | 
| Ian Rogers | e63db27 | 2014-07-15 15:36:11 -0700 | [diff] [blame] | 32 |  | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 33 | class Thread; | 
 | 34 |  | 
| Ian Rogers | 22d5e73 | 2014-07-15 22:23:51 -0700 | [diff] [blame] | 35 | // HandleScopes are scoped objects containing a number of Handles. They are used to allocate | 
 | 36 | // handles, for these handles (and the objects contained within them) to be visible/roots for the | 
 | 37 | // GC. It is most common to stack allocate HandleScopes using StackHandleScope. | 
| Mathieu Chartier | bc56fc3 | 2014-06-03 15:37:03 -0700 | [diff] [blame] | 38 | class PACKED(4) HandleScope { | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 39 |  public: | 
 | 40 |   ~HandleScope() {} | 
 | 41 |  | 
 | 42 |   // Number of references contained within this handle scope. | 
 | 43 |   uint32_t NumberOfReferences() const { | 
 | 44 |     return number_of_references_; | 
 | 45 |   } | 
 | 46 |  | 
 | 47 |   // We have versions with and without explicit pointer size of the following. The first two are | 
 | 48 |   // used at runtime, so OFFSETOF_MEMBER computes the right offsets automatically. The last one | 
 | 49 |   // takes the pointer size explicitly so that at compile time we can cross-compile correctly. | 
 | 50 |  | 
 | 51 |   // Returns the size of a HandleScope containing num_references handles. | 
| Mathieu Chartier | 3e0acf6 | 2015-01-08 09:41:25 -0800 | [diff] [blame] | 52 |   static size_t SizeOf(uint32_t num_references); | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 53 |  | 
| Andreas Gampe | cf4035a | 2014-05-28 22:43:01 -0700 | [diff] [blame] | 54 |   // Returns the size of a HandleScope containing num_references handles. | 
| Mathieu Chartier | 3e0acf6 | 2015-01-08 09:41:25 -0800 | [diff] [blame] | 55 |   static size_t SizeOf(size_t pointer_size, uint32_t num_references); | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 56 |  | 
 | 57 |   // Link to previous HandleScope or null. | 
 | 58 |   HandleScope* GetLink() const { | 
 | 59 |     return link_; | 
 | 60 |   } | 
 | 61 |  | 
| Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 62 |   ALWAYS_INLINE mirror::Object* GetReference(size_t i) const | 
| Mathieu Chartier | 3e0acf6 | 2015-01-08 09:41:25 -0800 | [diff] [blame] | 63 |       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 64 |  | 
| Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 65 |   ALWAYS_INLINE Handle<mirror::Object> GetHandle(size_t i) | 
| Mathieu Chartier | 3e0acf6 | 2015-01-08 09:41:25 -0800 | [diff] [blame] | 66 |       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 67 |  | 
| Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 68 |   ALWAYS_INLINE MutableHandle<mirror::Object> GetMutableHandle(size_t i) | 
| Mathieu Chartier | 3e0acf6 | 2015-01-08 09:41:25 -0800 | [diff] [blame] | 69 |       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); | 
| Andreas Gampe | 5a4b8a2 | 2014-09-11 08:30:08 -0700 | [diff] [blame] | 70 |  | 
| Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 71 |   ALWAYS_INLINE void SetReference(size_t i, mirror::Object* object) | 
| Mathieu Chartier | 3e0acf6 | 2015-01-08 09:41:25 -0800 | [diff] [blame] | 72 |       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 73 |  | 
| Mathieu Chartier | 3e0acf6 | 2015-01-08 09:41:25 -0800 | [diff] [blame] | 74 |   ALWAYS_INLINE bool Contains(StackReference<mirror::Object>* handle_scope_entry) const; | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 75 |  | 
| Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 76 |   // Offset of link within HandleScope, used by generated code. | 
| Ian Rogers | 6a3c1fc | 2014-10-31 00:33:20 -0700 | [diff] [blame] | 77 |   static size_t LinkOffset(size_t pointer_size ATTRIBUTE_UNUSED) { | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 78 |     return 0; | 
 | 79 |   } | 
 | 80 |  | 
| Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 81 |   // Offset of length within handle scope, used by generated code. | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 82 |   static size_t NumberOfReferencesOffset(size_t pointer_size) { | 
 | 83 |     return pointer_size; | 
 | 84 |   } | 
 | 85 |  | 
| Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 86 |   // Offset of link within handle scope, used by generated code. | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 87 |   static size_t ReferencesOffset(size_t pointer_size) { | 
 | 88 |     return pointer_size + sizeof(number_of_references_); | 
 | 89 |   } | 
 | 90 |  | 
| Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 91 |   // Placement new creation. | 
 | 92 |   static HandleScope* Create(void* storage, HandleScope* link, uint32_t num_references) | 
 | 93 |       WARN_UNUSED { | 
 | 94 |     return new (storage) HandleScope(link, num_references); | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 95 |   } | 
 | 96 |  | 
| Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 97 |  protected: | 
 | 98 |   // Return backing storage used for references. | 
 | 99 |   ALWAYS_INLINE StackReference<mirror::Object>* GetReferences() const { | 
 | 100 |     uintptr_t address = reinterpret_cast<uintptr_t>(this) + ReferencesOffset(sizeof(void*)); | 
 | 101 |     return reinterpret_cast<StackReference<mirror::Object>*>(address); | 
 | 102 |   } | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 103 |  | 
| Mathieu Chartier | d035c2d | 2014-10-27 17:30:20 -0700 | [diff] [blame] | 104 |   explicit HandleScope(size_t number_of_references) : | 
 | 105 |       link_(nullptr), number_of_references_(number_of_references) { | 
 | 106 |   } | 
 | 107 |  | 
| Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 108 |   // Semi-hidden constructor. Construction expected by generated code and StackHandleScope. | 
 | 109 |   explicit HandleScope(HandleScope* link, uint32_t num_references) : | 
 | 110 |       link_(link), number_of_references_(num_references) { | 
 | 111 |   } | 
 | 112 |  | 
 | 113 |   // Link-list of handle scopes. The root is held by a Thread. | 
 | 114 |   HandleScope* const link_; | 
 | 115 |  | 
 | 116 |   // Number of handlerized references. | 
 | 117 |   const uint32_t number_of_references_; | 
 | 118 |  | 
 | 119 |   // Storage for references. | 
 | 120 |   // StackReference<mirror::Object> references_[number_of_references_] | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 121 |  | 
 | 122 |  private: | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 123 |   DISALLOW_COPY_AND_ASSIGN(HandleScope); | 
 | 124 | }; | 
 | 125 |  | 
 | 126 | // A wrapper which wraps around Object** and restores the pointer in the destructor. | 
 | 127 | // TODO: Add more functionality. | 
 | 128 | template<class T> | 
| Andreas Gampe | 5a4b8a2 | 2014-09-11 08:30:08 -0700 | [diff] [blame] | 129 | class HandleWrapper : public MutableHandle<T> { | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 130 |  public: | 
| Andreas Gampe | 5a4b8a2 | 2014-09-11 08:30:08 -0700 | [diff] [blame] | 131 |   HandleWrapper(T** obj, const MutableHandle<T>& handle) | 
 | 132 |      : MutableHandle<T>(handle), obj_(obj) { | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 133 |   } | 
 | 134 |  | 
| Andreas Gampe | 758a801 | 2015-04-03 21:28:42 -0700 | [diff] [blame] | 135 |   HandleWrapper(const HandleWrapper&) = default; | 
 | 136 |  | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 137 |   ~HandleWrapper() { | 
| Andreas Gampe | 5a4b8a2 | 2014-09-11 08:30:08 -0700 | [diff] [blame] | 138 |     *obj_ = MutableHandle<T>::Get(); | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 139 |   } | 
 | 140 |  | 
 | 141 |  private: | 
| Ian Rogers | b5cb18a | 2014-10-21 15:05:36 -0700 | [diff] [blame] | 142 |   T** const obj_; | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 143 | }; | 
 | 144 |  | 
 | 145 | // Scoped handle storage of a fixed size that is usually stack allocated. | 
 | 146 | template<size_t kNumReferences> | 
| Ian Rogers | 22d5e73 | 2014-07-15 22:23:51 -0700 | [diff] [blame] | 147 | class PACKED(4) StackHandleScope FINAL : public HandleScope { | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 148 |  public: | 
| Mathieu Chartier | 2d2621a | 2014-10-23 16:48:06 -0700 | [diff] [blame] | 149 |   explicit ALWAYS_INLINE StackHandleScope(Thread* self, mirror::Object* fill_value = nullptr); | 
 | 150 |   ALWAYS_INLINE ~StackHandleScope(); | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 151 |  | 
| Ian Rogers | b5cb18a | 2014-10-21 15:05:36 -0700 | [diff] [blame] | 152 |   template<class T> | 
| Mathieu Chartier | 3e0acf6 | 2015-01-08 09:41:25 -0800 | [diff] [blame] | 153 |   ALWAYS_INLINE MutableHandle<T> NewHandle(T* object) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); | 
| Mathieu Chartier | bc56fc3 | 2014-06-03 15:37:03 -0700 | [diff] [blame] | 154 |  | 
| Ian Rogers | b5cb18a | 2014-10-21 15:05:36 -0700 | [diff] [blame] | 155 |   template<class T> | 
| Mathieu Chartier | 2d2621a | 2014-10-23 16:48:06 -0700 | [diff] [blame] | 156 |   ALWAYS_INLINE HandleWrapper<T> NewHandleWrapper(T** object) | 
| Mathieu Chartier | 3e0acf6 | 2015-01-08 09:41:25 -0800 | [diff] [blame] | 157 |       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); | 
| Ian Rogers | b5cb18a | 2014-10-21 15:05:36 -0700 | [diff] [blame] | 158 |  | 
| Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 159 |   ALWAYS_INLINE void SetReference(size_t i, mirror::Object* object) | 
| Mathieu Chartier | 3e0acf6 | 2015-01-08 09:41:25 -0800 | [diff] [blame] | 160 |       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); | 
| Mathieu Chartier | bc56fc3 | 2014-06-03 15:37:03 -0700 | [diff] [blame] | 161 |  | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 162 |   Thread* Self() const { | 
 | 163 |     return self_; | 
 | 164 |   } | 
 | 165 |  | 
| Mathieu Chartier | 2d2621a | 2014-10-23 16:48:06 -0700 | [diff] [blame] | 166 |  private: | 
 | 167 |   template<class T> | 
 | 168 |   ALWAYS_INLINE MutableHandle<T> GetHandle(size_t i) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { | 
 | 169 |     DCHECK_LT(i, kNumReferences); | 
 | 170 |     return MutableHandle<T>(&GetReferences()[i]); | 
 | 171 |   } | 
 | 172 |  | 
| Ian Rogers | 59c0706 | 2014-10-10 13:03:39 -0700 | [diff] [blame] | 173 |   // Reference storage needs to be first as expected by the HandleScope layout. | 
 | 174 |   StackReference<mirror::Object> storage_[kNumReferences]; | 
| Ian Rogers | 22d5e73 | 2014-07-15 22:23:51 -0700 | [diff] [blame] | 175 |  | 
 | 176 |   // The thread that the stack handle scope is a linked list upon. The stack handle scope will | 
 | 177 |   // push and pop itself from this thread. | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 178 |   Thread* const self_; | 
| Ian Rogers | 22d5e73 | 2014-07-15 22:23:51 -0700 | [diff] [blame] | 179 |  | 
 | 180 |   // Position new handles will be created. | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 181 |   size_t pos_; | 
 | 182 |  | 
 | 183 |   template<size_t kNumRefs> friend class StackHandleScope; | 
 | 184 | }; | 
 | 185 |  | 
| Calin Juravle | acf735c | 2015-02-12 15:25:22 +0000 | [diff] [blame] | 186 | // Utility class to manage a collection (stack) of StackHandleScope. All the managed | 
 | 187 | // scope handle have the same fixed sized. | 
 | 188 | // Calls to NewHandle will create a new handle inside the top StackHandleScope. | 
 | 189 | // When the handle scope becomes full a new one is created and push on top of the | 
 | 190 | // previous. | 
 | 191 | // | 
 | 192 | // NB: | 
 | 193 | // - it is not safe to use the *same* StackHandleScopeCollection intermix with | 
 | 194 | // other StackHandleScopes. | 
 | 195 | // - this is a an easy way around implementing a full ZoneHandleScope to manage an | 
 | 196 | // arbitrary number of handles. | 
 | 197 | class StackHandleScopeCollection { | 
 | 198 |  public: | 
 | 199 |   explicit StackHandleScopeCollection(Thread* const self) : | 
 | 200 |       self_(self), | 
 | 201 |       current_scope_num_refs_(0) { | 
 | 202 |   } | 
 | 203 |  | 
 | 204 |   ~StackHandleScopeCollection() { | 
 | 205 |     while (!scopes_.empty()) { | 
 | 206 |       delete scopes_.top(); | 
 | 207 |       scopes_.pop(); | 
 | 208 |     } | 
 | 209 |   } | 
 | 210 |  | 
 | 211 |   template<class T> | 
 | 212 |   MutableHandle<T> NewHandle(T* object) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { | 
 | 213 |     if (scopes_.empty() || current_scope_num_refs_ >= kNumReferencesPerScope) { | 
 | 214 |       StackHandleScope<kNumReferencesPerScope>* scope = | 
 | 215 |           new StackHandleScope<kNumReferencesPerScope>(self_); | 
 | 216 |       scopes_.push(scope); | 
 | 217 |       current_scope_num_refs_ = 0; | 
 | 218 |     } | 
 | 219 |     current_scope_num_refs_++; | 
 | 220 |     return scopes_.top()->NewHandle(object); | 
 | 221 |   } | 
 | 222 |  | 
 | 223 |  private: | 
 | 224 |   static constexpr size_t kNumReferencesPerScope = 4; | 
 | 225 |  | 
 | 226 |   Thread* const self_; | 
 | 227 |  | 
 | 228 |   std::stack<StackHandleScope<kNumReferencesPerScope>*> scopes_; | 
 | 229 |   size_t current_scope_num_refs_; | 
 | 230 |  | 
 | 231 |   DISALLOW_COPY_AND_ASSIGN(StackHandleScopeCollection); | 
 | 232 | }; | 
 | 233 |  | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 234 | }  // namespace art | 
 | 235 |  | 
 | 236 | #endif  // ART_RUNTIME_HANDLE_SCOPE_H_ |