blob: ea0d023728b7663831b20e27992219e890eb062d [file] [log] [blame]
Andreas Gampef0140212017-03-03 13:28:58 -08001/* Copyright (C) 2017 The Android Open Source Project
2 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
3 *
4 * This file implements interfaces from the file jvmti.h. This implementation
5 * is licensed under the same terms as the file jvmti.h. The
6 * copyright and license information for the file jvmti.h follows.
7 *
8 * Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved.
9 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
10 *
11 * This code is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License version 2 only, as
13 * published by the Free Software Foundation. Oracle designates this
14 * particular file as subject to the "Classpath" exception as provided
15 * by Oracle in the LICENSE file that accompanied this code.
16 *
17 * This code is distributed in the hope that it will be useful, but WITHOUT
18 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
19 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 * version 2 for more details (a copy is included in the LICENSE file that
21 * accompanied this code).
22 *
23 * You should have received a copy of the GNU General Public License version
24 * 2 along with this work; if not, write to the Free Software Foundation,
25 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
26 *
27 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
28 * or visit www.oracle.com if you need additional information or have any
29 * questions.
30 */
31
Andreas Gampe06c42a52017-07-26 14:17:14 -070032#ifndef ART_OPENJDKJVMTI_JVMTI_WEAK_TABLE_H_
33#define ART_OPENJDKJVMTI_JVMTI_WEAK_TABLE_H_
Andreas Gampef0140212017-03-03 13:28:58 -080034
35#include <unordered_map>
36
David Sehr1979c642018-04-26 14:41:18 -070037#include "base/globals.h"
Andreas Gampef0140212017-03-03 13:28:58 -080038#include "base/macros.h"
39#include "base/mutex.h"
40#include "gc/system_weak.h"
41#include "gc_root-inl.h"
Andreas Gampef0140212017-03-03 13:28:58 -080042#include "jvmti.h"
Alex Lightc19cd2f2017-07-06 14:12:13 -070043#include "jvmti_allocator.h"
Andreas Gampef0140212017-03-03 13:28:58 -080044#include "mirror/object.h"
Andreas Gampeb486a982017-06-01 13:45:54 -070045#include "thread-current-inl.h"
Andreas Gampef0140212017-03-03 13:28:58 -080046
47namespace openjdkjvmti {
48
49class EventHandler;
50
51// A system-weak container mapping objects to elements of the template type. This corresponds
52// to a weak hash map. For historical reasons the stored value is called "tag."
53template <typename T>
54class JvmtiWeakTable : public art::gc::SystemWeakHolder {
55 public:
56 JvmtiWeakTable()
Mathieu Chartierf169e272017-03-28 12:59:38 -070057 : art::gc::SystemWeakHolder(art::kTaggingLockLevel),
Andreas Gampef0140212017-03-03 13:28:58 -080058 update_since_last_sweep_(false) {
59 }
60
61 // Remove the mapping for the given object, returning whether such a mapping existed (and the old
62 // value).
Alex Lightd9025582019-02-13 16:33:14 -080063 ALWAYS_INLINE bool Remove(art::ObjPtr<art::mirror::Object> obj, /* out */ T* tag)
Andreas Gampef0140212017-03-03 13:28:58 -080064 REQUIRES_SHARED(art::Locks::mutator_lock_)
65 REQUIRES(!allow_disallow_lock_);
Alex Lightd9025582019-02-13 16:33:14 -080066 ALWAYS_INLINE bool RemoveLocked(art::ObjPtr<art::mirror::Object> obj, /* out */ T* tag)
Andreas Gampef0140212017-03-03 13:28:58 -080067 REQUIRES_SHARED(art::Locks::mutator_lock_)
68 REQUIRES(allow_disallow_lock_);
69
70 // Set the mapping for the given object. Returns true if this overwrites an already existing
71 // mapping.
Alex Lightd9025582019-02-13 16:33:14 -080072 ALWAYS_INLINE virtual bool Set(art::ObjPtr<art::mirror::Object> obj, T tag)
Andreas Gampef0140212017-03-03 13:28:58 -080073 REQUIRES_SHARED(art::Locks::mutator_lock_)
74 REQUIRES(!allow_disallow_lock_);
Alex Lightd9025582019-02-13 16:33:14 -080075 ALWAYS_INLINE virtual bool SetLocked(art::ObjPtr<art::mirror::Object> obj, T tag)
Andreas Gampef0140212017-03-03 13:28:58 -080076 REQUIRES_SHARED(art::Locks::mutator_lock_)
77 REQUIRES(allow_disallow_lock_);
78
79 // Return the value associated with the given object. Returns true if the mapping exists, false
80 // otherwise.
Alex Lightd9025582019-02-13 16:33:14 -080081 bool GetTag(art::ObjPtr<art::mirror::Object> obj, /* out */ T* result)
Andreas Gampef0140212017-03-03 13:28:58 -080082 REQUIRES_SHARED(art::Locks::mutator_lock_)
83 REQUIRES(!allow_disallow_lock_) {
84 art::Thread* self = art::Thread::Current();
85 art::MutexLock mu(self, allow_disallow_lock_);
86 Wait(self);
87
88 return GetTagLocked(self, obj, result);
89 }
Alex Lightd9025582019-02-13 16:33:14 -080090 bool GetTagLocked(art::ObjPtr<art::mirror::Object> obj, /* out */ T* result)
Andreas Gampef0140212017-03-03 13:28:58 -080091 REQUIRES_SHARED(art::Locks::mutator_lock_)
92 REQUIRES(allow_disallow_lock_) {
93 art::Thread* self = art::Thread::Current();
94 allow_disallow_lock_.AssertHeld(self);
95 Wait(self);
96
97 return GetTagLocked(self, obj, result);
98 }
99
100 // Sweep the container. DO NOT CALL MANUALLY.
Andreas Gampe8ea4eec2017-05-30 13:53:03 -0700101 ALWAYS_INLINE void Sweep(art::IsMarkedVisitor* visitor)
Andreas Gampef0140212017-03-03 13:28:58 -0800102 REQUIRES_SHARED(art::Locks::mutator_lock_)
103 REQUIRES(!allow_disallow_lock_);
104
105 // Return all objects that have a value mapping in tags.
Andreas Gampe8ea4eec2017-05-30 13:53:03 -0700106 ALWAYS_INLINE
Andreas Gampef0140212017-03-03 13:28:58 -0800107 jvmtiError GetTaggedObjects(jvmtiEnv* jvmti_env,
108 jint tag_count,
109 const T* tags,
110 /* out */ jint* count_ptr,
111 /* out */ jobject** object_result_ptr,
112 /* out */ T** tag_result_ptr)
113 REQUIRES_SHARED(art::Locks::mutator_lock_)
114 REQUIRES(!allow_disallow_lock_);
115
116 // Locking functions, to allow coarse-grained locking and amortization.
Andreas Gampe8ea4eec2017-05-30 13:53:03 -0700117 ALWAYS_INLINE void Lock() ACQUIRE(allow_disallow_lock_);
118 ALWAYS_INLINE void Unlock() RELEASE(allow_disallow_lock_);
119 ALWAYS_INLINE void AssertLocked() ASSERT_CAPABILITY(allow_disallow_lock_);
Andreas Gampef0140212017-03-03 13:28:58 -0800120
Alex Lightd9025582019-02-13 16:33:14 -0800121 ALWAYS_INLINE art::ObjPtr<art::mirror::Object> Find(T tag)
Andreas Gamped73aba42017-05-03 21:40:26 -0700122 REQUIRES_SHARED(art::Locks::mutator_lock_)
123 REQUIRES(!allow_disallow_lock_);
124
Andreas Gampef0140212017-03-03 13:28:58 -0800125 protected:
126 // Should HandleNullSweep be called when Sweep detects the release of an object?
127 virtual bool DoesHandleNullOnSweep() {
128 return false;
129 }
130 // If DoesHandleNullOnSweep returns true, this function will be called.
131 virtual void HandleNullSweep(T tag ATTRIBUTE_UNUSED) {}
132
133 private:
Andreas Gampe8ea4eec2017-05-30 13:53:03 -0700134 ALWAYS_INLINE
Alex Lightd9025582019-02-13 16:33:14 -0800135 bool SetLocked(art::Thread* self, art::ObjPtr<art::mirror::Object> obj, T tag)
Andreas Gampef0140212017-03-03 13:28:58 -0800136 REQUIRES_SHARED(art::Locks::mutator_lock_)
137 REQUIRES(allow_disallow_lock_);
138
Andreas Gampe8ea4eec2017-05-30 13:53:03 -0700139 ALWAYS_INLINE
Alex Lightd9025582019-02-13 16:33:14 -0800140 bool RemoveLocked(art::Thread* self, art::ObjPtr<art::mirror::Object> obj, /* out */ T* tag)
Andreas Gampef0140212017-03-03 13:28:58 -0800141 REQUIRES_SHARED(art::Locks::mutator_lock_)
142 REQUIRES(allow_disallow_lock_);
143
Alex Lightd9025582019-02-13 16:33:14 -0800144 bool GetTagLocked(art::Thread* self, art::ObjPtr<art::mirror::Object> obj, /* out */ T* result)
Andreas Gampef0140212017-03-03 13:28:58 -0800145 REQUIRES_SHARED(art::Locks::mutator_lock_)
146 REQUIRES(allow_disallow_lock_) {
147 auto it = tagged_objects_.find(art::GcRoot<art::mirror::Object>(obj));
148 if (it != tagged_objects_.end()) {
149 *result = it->second;
150 return true;
151 }
152
153 // Performance optimization: To avoid multiple table updates, ensure that during GC we
154 // only update once. See the comment on the implementation of GetTagSlowPath.
155 if (art::kUseReadBarrier &&
156 self != nullptr &&
157 self->GetIsGcMarking() &&
158 !update_since_last_sweep_) {
159 return GetTagSlowPath(self, obj, result);
160 }
161
162 return false;
163 }
164
165 // Slow-path for GetTag. We didn't find the object, but we might be storing from-pointers and
166 // are asked to retrieve with a to-pointer.
Andreas Gampe8ea4eec2017-05-30 13:53:03 -0700167 ALWAYS_INLINE
Alex Lightd9025582019-02-13 16:33:14 -0800168 bool GetTagSlowPath(art::Thread* self, art::ObjPtr<art::mirror::Object> obj, /* out */ T* result)
Andreas Gampef0140212017-03-03 13:28:58 -0800169 REQUIRES_SHARED(art::Locks::mutator_lock_)
170 REQUIRES(allow_disallow_lock_);
171
172 // Update the table by doing read barriers on each element, ensuring that to-space pointers
173 // are stored.
Andreas Gampe8ea4eec2017-05-30 13:53:03 -0700174 ALWAYS_INLINE
Andreas Gampef0140212017-03-03 13:28:58 -0800175 void UpdateTableWithReadBarrier()
176 REQUIRES_SHARED(art::Locks::mutator_lock_)
177 REQUIRES(allow_disallow_lock_);
178
179 template <bool kHandleNull>
180 void SweepImpl(art::IsMarkedVisitor* visitor)
181 REQUIRES_SHARED(art::Locks::mutator_lock_)
182 REQUIRES(!allow_disallow_lock_);
183
184 enum TableUpdateNullTarget {
185 kIgnoreNull,
186 kRemoveNull,
187 kCallHandleNull
188 };
189
190 template <typename Updater, TableUpdateNullTarget kTargetNull>
191 void UpdateTableWith(Updater& updater)
192 REQUIRES_SHARED(art::Locks::mutator_lock_)
193 REQUIRES(allow_disallow_lock_);
194
Alex Lightc19cd2f2017-07-06 14:12:13 -0700195 template <typename Storage, class Allocator = JvmtiAllocator<T>>
Andreas Gampef0140212017-03-03 13:28:58 -0800196 struct ReleasableContainer;
197
198 struct HashGcRoot {
199 size_t operator()(const art::GcRoot<art::mirror::Object>& r) const
200 REQUIRES_SHARED(art::Locks::mutator_lock_) {
201 return reinterpret_cast<uintptr_t>(r.Read<art::kWithoutReadBarrier>());
202 }
203 };
204
205 struct EqGcRoot {
206 bool operator()(const art::GcRoot<art::mirror::Object>& r1,
207 const art::GcRoot<art::mirror::Object>& r2) const
208 REQUIRES_SHARED(art::Locks::mutator_lock_) {
209 return r1.Read<art::kWithoutReadBarrier>() == r2.Read<art::kWithoutReadBarrier>();
210 }
211 };
212
Alex Lightc19cd2f2017-07-06 14:12:13 -0700213 using TagAllocator = JvmtiAllocator<std::pair<const art::GcRoot<art::mirror::Object>, T>>;
Andreas Gampef0140212017-03-03 13:28:58 -0800214 std::unordered_map<art::GcRoot<art::mirror::Object>,
215 T,
216 HashGcRoot,
Alex Lightc19cd2f2017-07-06 14:12:13 -0700217 EqGcRoot,
218 TagAllocator> tagged_objects_
Andreas Gampef0140212017-03-03 13:28:58 -0800219 GUARDED_BY(allow_disallow_lock_)
220 GUARDED_BY(art::Locks::mutator_lock_);
221 // To avoid repeatedly scanning the whole table, remember if we did that since the last sweep.
222 bool update_since_last_sweep_;
223};
224
225} // namespace openjdkjvmti
226
Andreas Gampe06c42a52017-07-26 14:17:14 -0700227#endif // ART_OPENJDKJVMTI_JVMTI_WEAK_TABLE_H_