Andreas Gampe | f014021 | 2017-03-03 13:28:58 -0800 | [diff] [blame] | 1 | /* Copyright (C) 2017 The Android Open Source Project |
| 2 | * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. |
| 3 | * |
| 4 | * This file implements interfaces from the file jvmti.h. This implementation |
| 5 | * is licensed under the same terms as the file jvmti.h. The |
| 6 | * copyright and license information for the file jvmti.h follows. |
| 7 | * |
| 8 | * Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved. |
| 9 | * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. |
| 10 | * |
| 11 | * This code is free software; you can redistribute it and/or modify it |
| 12 | * under the terms of the GNU General Public License version 2 only, as |
| 13 | * published by the Free Software Foundation. Oracle designates this |
| 14 | * particular file as subject to the "Classpath" exception as provided |
| 15 | * by Oracle in the LICENSE file that accompanied this code. |
| 16 | * |
| 17 | * This code is distributed in the hope that it will be useful, but WITHOUT |
| 18 | * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or |
| 19 | * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
| 20 | * version 2 for more details (a copy is included in the LICENSE file that |
| 21 | * accompanied this code). |
| 22 | * |
| 23 | * You should have received a copy of the GNU General Public License version |
| 24 | * 2 along with this work; if not, write to the Free Software Foundation, |
| 25 | * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. |
| 26 | * |
| 27 | * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA |
| 28 | * or visit www.oracle.com if you need additional information or have any |
| 29 | * questions. |
| 30 | */ |
| 31 | |
Andreas Gampe | 06c42a5 | 2017-07-26 14:17:14 -0700 | [diff] [blame] | 32 | #ifndef ART_OPENJDKJVMTI_JVMTI_WEAK_TABLE_H_ |
| 33 | #define ART_OPENJDKJVMTI_JVMTI_WEAK_TABLE_H_ |
Andreas Gampe | f014021 | 2017-03-03 13:28:58 -0800 | [diff] [blame] | 34 | |
| 35 | #include <unordered_map> |
| 36 | |
David Sehr | 1979c64 | 2018-04-26 14:41:18 -0700 | [diff] [blame] | 37 | #include "base/globals.h" |
Andreas Gampe | f014021 | 2017-03-03 13:28:58 -0800 | [diff] [blame] | 38 | #include "base/macros.h" |
| 39 | #include "base/mutex.h" |
| 40 | #include "gc/system_weak.h" |
| 41 | #include "gc_root-inl.h" |
Andreas Gampe | f014021 | 2017-03-03 13:28:58 -0800 | [diff] [blame] | 42 | #include "jvmti.h" |
Alex Light | c19cd2f | 2017-07-06 14:12:13 -0700 | [diff] [blame] | 43 | #include "jvmti_allocator.h" |
Andreas Gampe | f014021 | 2017-03-03 13:28:58 -0800 | [diff] [blame] | 44 | #include "mirror/object.h" |
Andreas Gampe | b486a98 | 2017-06-01 13:45:54 -0700 | [diff] [blame] | 45 | #include "thread-current-inl.h" |
Andreas Gampe | f014021 | 2017-03-03 13:28:58 -0800 | [diff] [blame] | 46 | |
| 47 | namespace openjdkjvmti { |
| 48 | |
| 49 | class EventHandler; |
| 50 | |
| 51 | // A system-weak container mapping objects to elements of the template type. This corresponds |
| 52 | // to a weak hash map. For historical reasons the stored value is called "tag." |
| 53 | template <typename T> |
| 54 | class JvmtiWeakTable : public art::gc::SystemWeakHolder { |
| 55 | public: |
| 56 | JvmtiWeakTable() |
Mathieu Chartier | f169e27 | 2017-03-28 12:59:38 -0700 | [diff] [blame] | 57 | : art::gc::SystemWeakHolder(art::kTaggingLockLevel), |
Andreas Gampe | f014021 | 2017-03-03 13:28:58 -0800 | [diff] [blame] | 58 | update_since_last_sweep_(false) { |
| 59 | } |
| 60 | |
| 61 | // Remove the mapping for the given object, returning whether such a mapping existed (and the old |
| 62 | // value). |
Alex Light | d902558 | 2019-02-13 16:33:14 -0800 | [diff] [blame] | 63 | ALWAYS_INLINE bool Remove(art::ObjPtr<art::mirror::Object> obj, /* out */ T* tag) |
Andreas Gampe | f014021 | 2017-03-03 13:28:58 -0800 | [diff] [blame] | 64 | REQUIRES_SHARED(art::Locks::mutator_lock_) |
| 65 | REQUIRES(!allow_disallow_lock_); |
Alex Light | d902558 | 2019-02-13 16:33:14 -0800 | [diff] [blame] | 66 | ALWAYS_INLINE bool RemoveLocked(art::ObjPtr<art::mirror::Object> obj, /* out */ T* tag) |
Andreas Gampe | f014021 | 2017-03-03 13:28:58 -0800 | [diff] [blame] | 67 | REQUIRES_SHARED(art::Locks::mutator_lock_) |
| 68 | REQUIRES(allow_disallow_lock_); |
| 69 | |
| 70 | // Set the mapping for the given object. Returns true if this overwrites an already existing |
| 71 | // mapping. |
Alex Light | d902558 | 2019-02-13 16:33:14 -0800 | [diff] [blame] | 72 | ALWAYS_INLINE virtual bool Set(art::ObjPtr<art::mirror::Object> obj, T tag) |
Andreas Gampe | f014021 | 2017-03-03 13:28:58 -0800 | [diff] [blame] | 73 | REQUIRES_SHARED(art::Locks::mutator_lock_) |
| 74 | REQUIRES(!allow_disallow_lock_); |
Alex Light | d902558 | 2019-02-13 16:33:14 -0800 | [diff] [blame] | 75 | ALWAYS_INLINE virtual bool SetLocked(art::ObjPtr<art::mirror::Object> obj, T tag) |
Andreas Gampe | f014021 | 2017-03-03 13:28:58 -0800 | [diff] [blame] | 76 | REQUIRES_SHARED(art::Locks::mutator_lock_) |
| 77 | REQUIRES(allow_disallow_lock_); |
| 78 | |
| 79 | // Return the value associated with the given object. Returns true if the mapping exists, false |
| 80 | // otherwise. |
Alex Light | d902558 | 2019-02-13 16:33:14 -0800 | [diff] [blame] | 81 | bool GetTag(art::ObjPtr<art::mirror::Object> obj, /* out */ T* result) |
Andreas Gampe | f014021 | 2017-03-03 13:28:58 -0800 | [diff] [blame] | 82 | REQUIRES_SHARED(art::Locks::mutator_lock_) |
| 83 | REQUIRES(!allow_disallow_lock_) { |
| 84 | art::Thread* self = art::Thread::Current(); |
| 85 | art::MutexLock mu(self, allow_disallow_lock_); |
| 86 | Wait(self); |
| 87 | |
| 88 | return GetTagLocked(self, obj, result); |
| 89 | } |
Alex Light | d902558 | 2019-02-13 16:33:14 -0800 | [diff] [blame] | 90 | bool GetTagLocked(art::ObjPtr<art::mirror::Object> obj, /* out */ T* result) |
Andreas Gampe | f014021 | 2017-03-03 13:28:58 -0800 | [diff] [blame] | 91 | REQUIRES_SHARED(art::Locks::mutator_lock_) |
| 92 | REQUIRES(allow_disallow_lock_) { |
| 93 | art::Thread* self = art::Thread::Current(); |
| 94 | allow_disallow_lock_.AssertHeld(self); |
| 95 | Wait(self); |
| 96 | |
| 97 | return GetTagLocked(self, obj, result); |
| 98 | } |
| 99 | |
| 100 | // Sweep the container. DO NOT CALL MANUALLY. |
Andreas Gampe | 8ea4eec | 2017-05-30 13:53:03 -0700 | [diff] [blame] | 101 | ALWAYS_INLINE void Sweep(art::IsMarkedVisitor* visitor) |
Andreas Gampe | f014021 | 2017-03-03 13:28:58 -0800 | [diff] [blame] | 102 | REQUIRES_SHARED(art::Locks::mutator_lock_) |
| 103 | REQUIRES(!allow_disallow_lock_); |
| 104 | |
| 105 | // Return all objects that have a value mapping in tags. |
Andreas Gampe | 8ea4eec | 2017-05-30 13:53:03 -0700 | [diff] [blame] | 106 | ALWAYS_INLINE |
Andreas Gampe | f014021 | 2017-03-03 13:28:58 -0800 | [diff] [blame] | 107 | jvmtiError GetTaggedObjects(jvmtiEnv* jvmti_env, |
| 108 | jint tag_count, |
| 109 | const T* tags, |
| 110 | /* out */ jint* count_ptr, |
| 111 | /* out */ jobject** object_result_ptr, |
| 112 | /* out */ T** tag_result_ptr) |
| 113 | REQUIRES_SHARED(art::Locks::mutator_lock_) |
| 114 | REQUIRES(!allow_disallow_lock_); |
| 115 | |
| 116 | // Locking functions, to allow coarse-grained locking and amortization. |
Andreas Gampe | 8ea4eec | 2017-05-30 13:53:03 -0700 | [diff] [blame] | 117 | ALWAYS_INLINE void Lock() ACQUIRE(allow_disallow_lock_); |
| 118 | ALWAYS_INLINE void Unlock() RELEASE(allow_disallow_lock_); |
| 119 | ALWAYS_INLINE void AssertLocked() ASSERT_CAPABILITY(allow_disallow_lock_); |
Andreas Gampe | f014021 | 2017-03-03 13:28:58 -0800 | [diff] [blame] | 120 | |
Alex Light | d902558 | 2019-02-13 16:33:14 -0800 | [diff] [blame] | 121 | ALWAYS_INLINE art::ObjPtr<art::mirror::Object> Find(T tag) |
Andreas Gampe | d73aba4 | 2017-05-03 21:40:26 -0700 | [diff] [blame] | 122 | REQUIRES_SHARED(art::Locks::mutator_lock_) |
| 123 | REQUIRES(!allow_disallow_lock_); |
| 124 | |
Andreas Gampe | f014021 | 2017-03-03 13:28:58 -0800 | [diff] [blame] | 125 | protected: |
| 126 | // Should HandleNullSweep be called when Sweep detects the release of an object? |
| 127 | virtual bool DoesHandleNullOnSweep() { |
| 128 | return false; |
| 129 | } |
| 130 | // If DoesHandleNullOnSweep returns true, this function will be called. |
| 131 | virtual void HandleNullSweep(T tag ATTRIBUTE_UNUSED) {} |
| 132 | |
| 133 | private: |
Andreas Gampe | 8ea4eec | 2017-05-30 13:53:03 -0700 | [diff] [blame] | 134 | ALWAYS_INLINE |
Alex Light | d902558 | 2019-02-13 16:33:14 -0800 | [diff] [blame] | 135 | bool SetLocked(art::Thread* self, art::ObjPtr<art::mirror::Object> obj, T tag) |
Andreas Gampe | f014021 | 2017-03-03 13:28:58 -0800 | [diff] [blame] | 136 | REQUIRES_SHARED(art::Locks::mutator_lock_) |
| 137 | REQUIRES(allow_disallow_lock_); |
| 138 | |
Andreas Gampe | 8ea4eec | 2017-05-30 13:53:03 -0700 | [diff] [blame] | 139 | ALWAYS_INLINE |
Alex Light | d902558 | 2019-02-13 16:33:14 -0800 | [diff] [blame] | 140 | bool RemoveLocked(art::Thread* self, art::ObjPtr<art::mirror::Object> obj, /* out */ T* tag) |
Andreas Gampe | f014021 | 2017-03-03 13:28:58 -0800 | [diff] [blame] | 141 | REQUIRES_SHARED(art::Locks::mutator_lock_) |
| 142 | REQUIRES(allow_disallow_lock_); |
| 143 | |
Alex Light | d902558 | 2019-02-13 16:33:14 -0800 | [diff] [blame] | 144 | bool GetTagLocked(art::Thread* self, art::ObjPtr<art::mirror::Object> obj, /* out */ T* result) |
Andreas Gampe | f014021 | 2017-03-03 13:28:58 -0800 | [diff] [blame] | 145 | REQUIRES_SHARED(art::Locks::mutator_lock_) |
| 146 | REQUIRES(allow_disallow_lock_) { |
| 147 | auto it = tagged_objects_.find(art::GcRoot<art::mirror::Object>(obj)); |
| 148 | if (it != tagged_objects_.end()) { |
| 149 | *result = it->second; |
| 150 | return true; |
| 151 | } |
| 152 | |
| 153 | // Performance optimization: To avoid multiple table updates, ensure that during GC we |
| 154 | // only update once. See the comment on the implementation of GetTagSlowPath. |
| 155 | if (art::kUseReadBarrier && |
| 156 | self != nullptr && |
| 157 | self->GetIsGcMarking() && |
| 158 | !update_since_last_sweep_) { |
| 159 | return GetTagSlowPath(self, obj, result); |
| 160 | } |
| 161 | |
| 162 | return false; |
| 163 | } |
| 164 | |
| 165 | // Slow-path for GetTag. We didn't find the object, but we might be storing from-pointers and |
| 166 | // are asked to retrieve with a to-pointer. |
Andreas Gampe | 8ea4eec | 2017-05-30 13:53:03 -0700 | [diff] [blame] | 167 | ALWAYS_INLINE |
Alex Light | d902558 | 2019-02-13 16:33:14 -0800 | [diff] [blame] | 168 | bool GetTagSlowPath(art::Thread* self, art::ObjPtr<art::mirror::Object> obj, /* out */ T* result) |
Andreas Gampe | f014021 | 2017-03-03 13:28:58 -0800 | [diff] [blame] | 169 | REQUIRES_SHARED(art::Locks::mutator_lock_) |
| 170 | REQUIRES(allow_disallow_lock_); |
| 171 | |
| 172 | // Update the table by doing read barriers on each element, ensuring that to-space pointers |
| 173 | // are stored. |
Andreas Gampe | 8ea4eec | 2017-05-30 13:53:03 -0700 | [diff] [blame] | 174 | ALWAYS_INLINE |
Andreas Gampe | f014021 | 2017-03-03 13:28:58 -0800 | [diff] [blame] | 175 | void UpdateTableWithReadBarrier() |
| 176 | REQUIRES_SHARED(art::Locks::mutator_lock_) |
| 177 | REQUIRES(allow_disallow_lock_); |
| 178 | |
| 179 | template <bool kHandleNull> |
| 180 | void SweepImpl(art::IsMarkedVisitor* visitor) |
| 181 | REQUIRES_SHARED(art::Locks::mutator_lock_) |
| 182 | REQUIRES(!allow_disallow_lock_); |
| 183 | |
| 184 | enum TableUpdateNullTarget { |
| 185 | kIgnoreNull, |
| 186 | kRemoveNull, |
| 187 | kCallHandleNull |
| 188 | }; |
| 189 | |
| 190 | template <typename Updater, TableUpdateNullTarget kTargetNull> |
| 191 | void UpdateTableWith(Updater& updater) |
| 192 | REQUIRES_SHARED(art::Locks::mutator_lock_) |
| 193 | REQUIRES(allow_disallow_lock_); |
| 194 | |
Alex Light | c19cd2f | 2017-07-06 14:12:13 -0700 | [diff] [blame] | 195 | template <typename Storage, class Allocator = JvmtiAllocator<T>> |
Andreas Gampe | f014021 | 2017-03-03 13:28:58 -0800 | [diff] [blame] | 196 | struct ReleasableContainer; |
| 197 | |
| 198 | struct HashGcRoot { |
| 199 | size_t operator()(const art::GcRoot<art::mirror::Object>& r) const |
| 200 | REQUIRES_SHARED(art::Locks::mutator_lock_) { |
| 201 | return reinterpret_cast<uintptr_t>(r.Read<art::kWithoutReadBarrier>()); |
| 202 | } |
| 203 | }; |
| 204 | |
| 205 | struct EqGcRoot { |
| 206 | bool operator()(const art::GcRoot<art::mirror::Object>& r1, |
| 207 | const art::GcRoot<art::mirror::Object>& r2) const |
| 208 | REQUIRES_SHARED(art::Locks::mutator_lock_) { |
| 209 | return r1.Read<art::kWithoutReadBarrier>() == r2.Read<art::kWithoutReadBarrier>(); |
| 210 | } |
| 211 | }; |
| 212 | |
Alex Light | c19cd2f | 2017-07-06 14:12:13 -0700 | [diff] [blame] | 213 | using TagAllocator = JvmtiAllocator<std::pair<const art::GcRoot<art::mirror::Object>, T>>; |
Andreas Gampe | f014021 | 2017-03-03 13:28:58 -0800 | [diff] [blame] | 214 | std::unordered_map<art::GcRoot<art::mirror::Object>, |
| 215 | T, |
| 216 | HashGcRoot, |
Alex Light | c19cd2f | 2017-07-06 14:12:13 -0700 | [diff] [blame] | 217 | EqGcRoot, |
| 218 | TagAllocator> tagged_objects_ |
Andreas Gampe | f014021 | 2017-03-03 13:28:58 -0800 | [diff] [blame] | 219 | GUARDED_BY(allow_disallow_lock_) |
| 220 | GUARDED_BY(art::Locks::mutator_lock_); |
| 221 | // To avoid repeatedly scanning the whole table, remember if we did that since the last sweep. |
| 222 | bool update_since_last_sweep_; |
| 223 | }; |
| 224 | |
| 225 | } // namespace openjdkjvmti |
| 226 | |
Andreas Gampe | 06c42a5 | 2017-07-26 14:17:14 -0700 | [diff] [blame] | 227 | #endif // ART_OPENJDKJVMTI_JVMTI_WEAK_TABLE_H_ |