blob: 22c622adcf94f66c075ab47f63383b2905d7cd5e [file] [log] [blame]
Andreas Gampe77708d92016-10-07 11:48:21 -07001/* Copyright (C) 2016 The Android Open Source Project
2 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
3 *
4 * This file implements interfaces from the file jvmti.h. This implementation
5 * is licensed under the same terms as the file jvmti.h. The
6 * copyright and license information for the file jvmti.h follows.
7 *
8 * Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved.
9 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
10 *
11 * This code is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License version 2 only, as
13 * published by the Free Software Foundation. Oracle designates this
14 * particular file as subject to the "Classpath" exception as provided
15 * by Oracle in the LICENSE file that accompanied this code.
16 *
17 * This code is distributed in the hope that it will be useful, but WITHOUT
18 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
19 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 * version 2 for more details (a copy is included in the LICENSE file that
21 * accompanied this code).
22 *
23 * You should have received a copy of the GNU General Public License version
24 * 2 along with this work; if not, write to the Free Software Foundation,
25 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
26 *
27 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
28 * or visit www.oracle.com if you need additional information or have any
29 * questions.
30 */
31
Andreas Gampe27fa96c2016-10-07 15:05:24 -070032#include "events-inl.h"
Andreas Gampe77708d92016-10-07 11:48:21 -070033
Alex Light77fee872017-09-05 14:51:49 -070034#include <array>
Charles Munger5cc0e752018-11-09 12:30:46 -080035#include <sys/time.h>
Alex Light77fee872017-09-05 14:51:49 -070036
Andreas Gampee5d23982019-01-08 10:34:26 -080037#include "arch/context.h"
Steven Morelande431e272017-07-18 16:53:49 -070038#include "art_field-inl.h"
Andreas Gampe77708d92016-10-07 11:48:21 -070039#include "art_jvmti.h"
Alex Lightb7edcda2017-04-27 13:20:31 -070040#include "art_method-inl.h"
Alex Light0fa17862017-10-24 13:43:05 -070041#include "deopt_manager.h"
David Sehr9e734c72018-01-04 17:56:19 -080042#include "dex/dex_file_types.h"
Andreas Gampe27fa96c2016-10-07 15:05:24 -070043#include "gc/allocation_listener.h"
Andreas Gampe9b8c5882016-10-21 15:27:46 -070044#include "gc/gc_pause_listener.h"
45#include "gc/heap.h"
Alex Lightb7edcda2017-04-27 13:20:31 -070046#include "gc/scoped_gc_critical_section.h"
Andreas Gampec15a2f42017-04-21 12:09:39 -070047#include "handle_scope-inl.h"
Andreas Gampe27fa96c2016-10-07 15:05:24 -070048#include "instrumentation.h"
Vladimir Markoa3ad0cd2018-05-04 10:06:38 +010049#include "jni/jni_env_ext-inl.h"
50#include "jni/jni_internal.h"
Andreas Gampe27fa96c2016-10-07 15:05:24 -070051#include "mirror/class.h"
Andreas Gampec15a2f42017-04-21 12:09:39 -070052#include "mirror/object-inl.h"
Alex Light77fee872017-09-05 14:51:49 -070053#include "monitor.h"
Andreas Gampe373a9b52017-10-18 09:01:57 -070054#include "nativehelper/scoped_local_ref.h"
Andreas Gampe27fa96c2016-10-07 15:05:24 -070055#include "runtime.h"
Andreas Gampec02685c2016-10-17 17:40:27 -070056#include "scoped_thread_state_change-inl.h"
Alex Light9fb1ab12017-09-05 09:32:49 -070057#include "stack.h"
Alex Lightb7edcda2017-04-27 13:20:31 -070058#include "thread-inl.h"
59#include "thread_list.h"
60#include "ti_phase.h"
Charles Munger5cc0e752018-11-09 12:30:46 -080061#include "well_known_classes.h"
Andreas Gampe77708d92016-10-07 11:48:21 -070062
63namespace openjdkjvmti {
64
Alex Light8c2b9292017-11-09 13:21:01 -080065void ArtJvmtiEventCallbacks::CopyExtensionsFrom(const ArtJvmtiEventCallbacks* cb) {
66 if (art::kIsDebugBuild) {
67 ArtJvmtiEventCallbacks clean;
68 DCHECK_EQ(memcmp(&clean, this, sizeof(clean)), 0)
69 << "CopyExtensionsFrom called with initialized eventsCallbacks!";
70 }
71 if (cb != nullptr) {
72 memcpy(this, cb, sizeof(*this));
73 } else {
74 memset(this, 0, sizeof(*this));
75 }
76}
77
78jvmtiError ArtJvmtiEventCallbacks::Set(jint index, jvmtiExtensionEvent cb) {
79 switch (index) {
80 case static_cast<jint>(ArtJvmtiEvent::kDdmPublishChunk):
81 DdmPublishChunk = reinterpret_cast<ArtJvmtiEventDdmPublishChunk>(cb);
82 return OK;
83 default:
84 return ERR(ILLEGAL_ARGUMENT);
85 }
86}
87
88
89bool IsExtensionEvent(jint e) {
90 return e >= static_cast<jint>(ArtJvmtiEvent::kMinEventTypeVal) &&
91 e <= static_cast<jint>(ArtJvmtiEvent::kMaxEventTypeVal) &&
92 IsExtensionEvent(static_cast<ArtJvmtiEvent>(e));
93}
94
95bool IsExtensionEvent(ArtJvmtiEvent e) {
96 switch (e) {
97 case ArtJvmtiEvent::kDdmPublishChunk:
98 return true;
99 default:
100 return false;
101 }
102}
103
Alex Light73afd322017-01-18 11:17:47 -0800104bool EventMasks::IsEnabledAnywhere(ArtJvmtiEvent event) {
105 return global_event_mask.Test(event) || unioned_thread_event_mask.Test(event);
106}
107
Andreas Gampe77708d92016-10-07 11:48:21 -0700108EventMask& EventMasks::GetEventMask(art::Thread* thread) {
109 if (thread == nullptr) {
110 return global_event_mask;
111 }
112
113 for (auto& pair : thread_event_masks) {
114 const UniqueThread& unique_thread = pair.first;
115 if (unique_thread.first == thread &&
116 unique_thread.second == static_cast<uint32_t>(thread->GetTid())) {
117 return pair.second;
118 }
119 }
120
121 // TODO: Remove old UniqueThread with the same pointer, if exists.
122
123 thread_event_masks.emplace_back(UniqueThread(thread, thread->GetTid()), EventMask());
124 return thread_event_masks.back().second;
125}
126
127EventMask* EventMasks::GetEventMaskOrNull(art::Thread* thread) {
128 if (thread == nullptr) {
129 return &global_event_mask;
130 }
131
132 for (auto& pair : thread_event_masks) {
133 const UniqueThread& unique_thread = pair.first;
134 if (unique_thread.first == thread &&
135 unique_thread.second == static_cast<uint32_t>(thread->GetTid())) {
136 return &pair.second;
137 }
138 }
139
140 return nullptr;
141}
142
143
Alex Light74c84402017-11-29 15:26:38 -0800144void EventMasks::EnableEvent(ArtJvmTiEnv* env, art::Thread* thread, ArtJvmtiEvent event) {
145 DCHECK_EQ(&env->event_masks, this);
146 env->event_info_mutex_.AssertExclusiveHeld(art::Thread::Current());
Andreas Gampe77708d92016-10-07 11:48:21 -0700147 DCHECK(EventMask::EventIsInRange(event));
148 GetEventMask(thread).Set(event);
149 if (thread != nullptr) {
150 unioned_thread_event_mask.Set(event, true);
151 }
152}
153
Alex Light74c84402017-11-29 15:26:38 -0800154void EventMasks::DisableEvent(ArtJvmTiEnv* env, art::Thread* thread, ArtJvmtiEvent event) {
155 DCHECK_EQ(&env->event_masks, this);
156 env->event_info_mutex_.AssertExclusiveHeld(art::Thread::Current());
Andreas Gampe77708d92016-10-07 11:48:21 -0700157 DCHECK(EventMask::EventIsInRange(event));
158 GetEventMask(thread).Set(event, false);
159 if (thread != nullptr) {
160 // Regenerate union for the event.
161 bool union_value = false;
162 for (auto& pair : thread_event_masks) {
163 union_value |= pair.second.Test(event);
164 if (union_value) {
165 break;
166 }
167 }
168 unioned_thread_event_mask.Set(event, union_value);
169 }
170}
171
Alex Light73afd322017-01-18 11:17:47 -0800172void EventMasks::HandleChangedCapabilities(const jvmtiCapabilities& caps, bool caps_added) {
173 if (UNLIKELY(caps.can_retransform_classes == 1)) {
174 // If we are giving this env the retransform classes cap we need to switch all events of
175 // NonTransformable to Transformable and vice versa.
176 ArtJvmtiEvent to_remove = caps_added ? ArtJvmtiEvent::kClassFileLoadHookNonRetransformable
177 : ArtJvmtiEvent::kClassFileLoadHookRetransformable;
178 ArtJvmtiEvent to_add = caps_added ? ArtJvmtiEvent::kClassFileLoadHookRetransformable
179 : ArtJvmtiEvent::kClassFileLoadHookNonRetransformable;
180 if (global_event_mask.Test(to_remove)) {
181 CHECK(!global_event_mask.Test(to_add));
182 global_event_mask.Set(to_remove, false);
183 global_event_mask.Set(to_add, true);
184 }
185
186 if (unioned_thread_event_mask.Test(to_remove)) {
187 CHECK(!unioned_thread_event_mask.Test(to_add));
188 unioned_thread_event_mask.Set(to_remove, false);
189 unioned_thread_event_mask.Set(to_add, true);
190 }
191 for (auto thread_mask : thread_event_masks) {
192 if (thread_mask.second.Test(to_remove)) {
193 CHECK(!thread_mask.second.Test(to_add));
194 thread_mask.second.Set(to_remove, false);
195 thread_mask.second.Set(to_add, true);
196 }
197 }
198 }
199}
200
Andreas Gampe77708d92016-10-07 11:48:21 -0700201void EventHandler::RegisterArtJvmTiEnv(ArtJvmTiEnv* env) {
Alex Light2a96fe82018-01-22 17:45:02 -0800202 art::WriterMutexLock mu(art::Thread::Current(), envs_lock_);
Alex Lightb284f8d2017-11-21 00:00:48 +0000203 envs.push_back(env);
Andreas Gampe77708d92016-10-07 11:48:21 -0700204}
205
Andreas Gampe3a7eb142017-01-19 21:59:22 -0800206void EventHandler::RemoveArtJvmTiEnv(ArtJvmTiEnv* env) {
Alex Light2a96fe82018-01-22 17:45:02 -0800207 art::WriterMutexLock mu(art::Thread::Current(), envs_lock_);
Alex Lightbb766462017-04-12 16:13:33 -0700208 // Since we might be currently iterating over the envs list we cannot actually erase elements.
209 // Instead we will simply replace them with 'nullptr' and skip them manually.
Andreas Gampe3a7eb142017-01-19 21:59:22 -0800210 auto it = std::find(envs.begin(), envs.end(), env);
211 if (it != envs.end()) {
Alex Lightb284f8d2017-11-21 00:00:48 +0000212 envs.erase(it);
Andreas Gampe3a7eb142017-01-19 21:59:22 -0800213 for (size_t i = static_cast<size_t>(ArtJvmtiEvent::kMinEventTypeVal);
214 i <= static_cast<size_t>(ArtJvmtiEvent::kMaxEventTypeVal);
215 ++i) {
Alex Lightb284f8d2017-11-21 00:00:48 +0000216 RecalculateGlobalEventMaskLocked(static_cast<ArtJvmtiEvent>(i));
Andreas Gampe3a7eb142017-01-19 21:59:22 -0800217 }
218 }
219}
220
Alex Light40d87f42017-01-18 10:27:06 -0800221static bool IsThreadControllable(ArtJvmtiEvent event) {
Andreas Gampe77708d92016-10-07 11:48:21 -0700222 switch (event) {
Alex Light40d87f42017-01-18 10:27:06 -0800223 case ArtJvmtiEvent::kVmInit:
224 case ArtJvmtiEvent::kVmStart:
225 case ArtJvmtiEvent::kVmDeath:
226 case ArtJvmtiEvent::kThreadStart:
227 case ArtJvmtiEvent::kCompiledMethodLoad:
228 case ArtJvmtiEvent::kCompiledMethodUnload:
229 case ArtJvmtiEvent::kDynamicCodeGenerated:
230 case ArtJvmtiEvent::kDataDumpRequest:
Andreas Gampe77708d92016-10-07 11:48:21 -0700231 return false;
232
233 default:
234 return true;
235 }
236}
237
Alex Light9df79b72017-09-12 08:57:31 -0700238template<typename Type>
239static Type AddLocalRef(art::JNIEnvExt* e, art::mirror::Object* obj)
240 REQUIRES_SHARED(art::Locks::mutator_lock_) {
241 return (obj == nullptr) ? nullptr : e->AddLocalReference<Type>(obj);
242}
243
244template<ArtJvmtiEvent kEvent, typename ...Args>
245static void RunEventCallback(EventHandler* handler,
246 art::Thread* self,
247 art::JNIEnvExt* jnienv,
248 Args... args)
249 REQUIRES_SHARED(art::Locks::mutator_lock_) {
250 ScopedLocalRef<jthread> thread_jni(jnienv, AddLocalRef<jthread>(jnienv, self->GetPeer()));
251 handler->DispatchEvent<kEvent>(self,
252 static_cast<JNIEnv*>(jnienv),
253 thread_jni.get(),
254 args...);
255}
256
Alex Light8c2b9292017-11-09 13:21:01 -0800257static void SetupDdmTracking(art::DdmCallback* listener, bool enable) {
258 art::ScopedObjectAccess soa(art::Thread::Current());
259 if (enable) {
260 art::Runtime::Current()->GetRuntimeCallbacks()->AddDdmCallback(listener);
261 } else {
262 art::Runtime::Current()->GetRuntimeCallbacks()->RemoveDdmCallback(listener);
263 }
264}
265
266class JvmtiDdmChunkListener : public art::DdmCallback {
267 public:
268 explicit JvmtiDdmChunkListener(EventHandler* handler) : handler_(handler) {}
269
270 void DdmPublishChunk(uint32_t type, const art::ArrayRef<const uint8_t>& data)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100271 override REQUIRES_SHARED(art::Locks::mutator_lock_) {
Alex Light8c2b9292017-11-09 13:21:01 -0800272 if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kDdmPublishChunk)) {
273 art::Thread* self = art::Thread::Current();
274 handler_->DispatchEvent<ArtJvmtiEvent::kDdmPublishChunk>(
275 self,
276 static_cast<JNIEnv*>(self->GetJniEnv()),
277 static_cast<jint>(type),
278 static_cast<jint>(data.size()),
279 reinterpret_cast<const jbyte*>(data.data()));
280 }
281 }
282
283 private:
284 EventHandler* handler_;
285
286 DISALLOW_COPY_AND_ASSIGN(JvmtiDdmChunkListener);
287};
288
Andreas Gampe27fa96c2016-10-07 15:05:24 -0700289class JvmtiAllocationListener : public art::gc::AllocationListener {
290 public:
291 explicit JvmtiAllocationListener(EventHandler* handler) : handler_(handler) {}
292
Mathieu Chartier9d156d52016-10-06 17:44:26 -0700293 void ObjectAllocated(art::Thread* self, art::ObjPtr<art::mirror::Object>* obj, size_t byte_count)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100294 override REQUIRES_SHARED(art::Locks::mutator_lock_) {
Andreas Gampe27fa96c2016-10-07 15:05:24 -0700295 DCHECK_EQ(self, art::Thread::Current());
296
Alex Light40d87f42017-01-18 10:27:06 -0800297 if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kVmObjectAlloc)) {
Mathieu Chartiera7118042016-10-12 15:45:58 -0700298 art::StackHandleScope<1> hs(self);
299 auto h = hs.NewHandleWrapper(obj);
Andreas Gampe27fa96c2016-10-07 15:05:24 -0700300 // jvmtiEventVMObjectAlloc parameters:
301 // jvmtiEnv *jvmti_env,
302 // JNIEnv* jni_env,
303 // jthread thread,
304 // jobject object,
305 // jclass object_klass,
306 // jlong size
307 art::JNIEnvExt* jni_env = self->GetJniEnv();
Andreas Gampe27fa96c2016-10-07 15:05:24 -0700308 ScopedLocalRef<jobject> object(
309 jni_env, jni_env->AddLocalReference<jobject>(*obj));
310 ScopedLocalRef<jclass> klass(
Mathieu Chartier9d156d52016-10-06 17:44:26 -0700311 jni_env, jni_env->AddLocalReference<jclass>(obj->Ptr()->GetClass()));
Andreas Gampe27fa96c2016-10-07 15:05:24 -0700312
Alex Light9df79b72017-09-12 08:57:31 -0700313 RunEventCallback<ArtJvmtiEvent::kVmObjectAlloc>(handler_,
314 self,
315 jni_env,
316 object.get(),
317 klass.get(),
318 static_cast<jlong>(byte_count));
Andreas Gampe27fa96c2016-10-07 15:05:24 -0700319 }
320 }
321
322 private:
323 EventHandler* handler_;
324};
325
326static void SetupObjectAllocationTracking(art::gc::AllocationListener* listener, bool enable) {
Andreas Gampec02685c2016-10-17 17:40:27 -0700327 // We must not hold the mutator lock here, but if we're in FastJNI, for example, we might. For
328 // now, do a workaround: (possibly) acquire and release.
329 art::ScopedObjectAccess soa(art::Thread::Current());
330 art::ScopedThreadSuspension sts(soa.Self(), art::ThreadState::kSuspended);
Andreas Gampe27fa96c2016-10-07 15:05:24 -0700331 if (enable) {
332 art::Runtime::Current()->GetHeap()->SetAllocationListener(listener);
333 } else {
334 art::Runtime::Current()->GetHeap()->RemoveAllocationListener();
335 }
336}
337
Alex Light77fee872017-09-05 14:51:49 -0700338class JvmtiMonitorListener : public art::MonitorCallback {
339 public:
340 explicit JvmtiMonitorListener(EventHandler* handler) : handler_(handler) {}
341
342 void MonitorContendedLocking(art::Monitor* m)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100343 override REQUIRES_SHARED(art::Locks::mutator_lock_) {
Alex Light77fee872017-09-05 14:51:49 -0700344 if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorContendedEnter)) {
345 art::Thread* self = art::Thread::Current();
346 art::JNIEnvExt* jnienv = self->GetJniEnv();
347 ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, m->GetObject()));
348 RunEventCallback<ArtJvmtiEvent::kMonitorContendedEnter>(
349 handler_,
350 self,
351 jnienv,
352 mon.get());
353 }
354 }
355
356 void MonitorContendedLocked(art::Monitor* m)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100357 override REQUIRES_SHARED(art::Locks::mutator_lock_) {
Alex Light77fee872017-09-05 14:51:49 -0700358 if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorContendedEntered)) {
359 art::Thread* self = art::Thread::Current();
360 art::JNIEnvExt* jnienv = self->GetJniEnv();
361 ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, m->GetObject()));
362 RunEventCallback<ArtJvmtiEvent::kMonitorContendedEntered>(
363 handler_,
364 self,
365 jnienv,
366 mon.get());
367 }
368 }
369
370 void ObjectWaitStart(art::Handle<art::mirror::Object> obj, int64_t timeout)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100371 override REQUIRES_SHARED(art::Locks::mutator_lock_) {
Alex Light77fee872017-09-05 14:51:49 -0700372 if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorWait)) {
373 art::Thread* self = art::Thread::Current();
374 art::JNIEnvExt* jnienv = self->GetJniEnv();
375 ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, obj.Get()));
376 RunEventCallback<ArtJvmtiEvent::kMonitorWait>(
377 handler_,
378 self,
379 jnienv,
380 mon.get(),
381 static_cast<jlong>(timeout));
382 }
383 }
384
385
386 // Our interpretation of the spec is that the JVMTI_EVENT_MONITOR_WAITED will be sent immediately
387 // after a thread has woken up from a sleep caused by a call to Object#wait. If the thread will
388 // never go to sleep (due to not having the lock, having bad arguments, or having an exception
389 // propogated from JVMTI_EVENT_MONITOR_WAIT) we will not send this event.
390 //
391 // This does not fully match the RI semantics. Specifically, we will not send the
392 // JVMTI_EVENT_MONITOR_WAITED event in one situation where the RI would, there was an exception in
393 // the JVMTI_EVENT_MONITOR_WAIT event but otherwise the call was fine. In that case the RI would
394 // send this event and return without going to sleep.
395 //
396 // See b/65558434 for more discussion.
397 void MonitorWaitFinished(art::Monitor* m, bool timeout)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100398 override REQUIRES_SHARED(art::Locks::mutator_lock_) {
Alex Light77fee872017-09-05 14:51:49 -0700399 if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorWaited)) {
400 art::Thread* self = art::Thread::Current();
401 art::JNIEnvExt* jnienv = self->GetJniEnv();
402 ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, m->GetObject()));
403 RunEventCallback<ArtJvmtiEvent::kMonitorWaited>(
404 handler_,
405 self,
406 jnienv,
407 mon.get(),
408 static_cast<jboolean>(timeout));
409 }
410 }
411
412 private:
413 EventHandler* handler_;
414};
415
Charles Munger5cc0e752018-11-09 12:30:46 -0800416class JvmtiParkListener : public art::ParkCallback {
417 public:
418 explicit JvmtiParkListener(EventHandler* handler) : handler_(handler) {}
419
420 void ThreadParkStart(bool is_absolute, int64_t timeout)
421 override REQUIRES_SHARED(art::Locks::mutator_lock_) {
422 if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorWait)) {
423 art::Thread* self = art::Thread::Current();
424 art::JNIEnvExt* jnienv = self->GetJniEnv();
425 art::ArtField* parkBlockerField = art::jni::DecodeArtField(
426 art::WellKnownClasses::java_lang_Thread_parkBlocker);
427 art::ObjPtr<art::mirror::Object> blocker_obj = parkBlockerField->GetObj(self->GetPeer());
428 if (blocker_obj.IsNull()) {
429 blocker_obj = self->GetPeer();
430 }
431 int64_t timeout_ms;
432 if (!is_absolute) {
433 if (timeout == 0) {
434 timeout_ms = 0;
435 } else {
436 timeout_ms = timeout / 1000000;
437 if (timeout_ms == 0) {
438 // If we were instructed to park for a nonzero number of nanoseconds, but not enough
439 // to be a full millisecond, round up to 1 ms. A nonzero park() call will return
440 // soon, but a 0 wait or park call will wait indefinitely.
441 timeout_ms = 1;
442 }
443 }
444 } else {
445 struct timeval tv;
446 gettimeofday(&tv, (struct timezone *) nullptr);
447 int64_t now = tv.tv_sec * 1000LL + tv.tv_usec / 1000;
448 if (now < timeout) {
449 timeout_ms = timeout - now;
450 } else {
451 // Waiting for 0 ms is an indefinite wait; parking until a time in
452 // the past or the current time will return immediately, so emulate
453 // the shortest possible wait event.
454 timeout_ms = 1;
455 }
456 }
457 ScopedLocalRef<jobject> blocker(jnienv, AddLocalRef<jobject>(jnienv, blocker_obj.Ptr()));
458 RunEventCallback<ArtJvmtiEvent::kMonitorWait>(
459 handler_,
460 self,
461 jnienv,
462 blocker.get(),
463 static_cast<jlong>(timeout_ms));
464 }
465 }
466
467
468 // Our interpretation of the spec is that the JVMTI_EVENT_MONITOR_WAITED will be sent immediately
469 // after a thread has woken up from a sleep caused by a call to Object#wait. If the thread will
470 // never go to sleep (due to not having the lock, having bad arguments, or having an exception
471 // propogated from JVMTI_EVENT_MONITOR_WAIT) we will not send this event.
472 //
473 // This does not fully match the RI semantics. Specifically, we will not send the
474 // JVMTI_EVENT_MONITOR_WAITED event in one situation where the RI would, there was an exception in
475 // the JVMTI_EVENT_MONITOR_WAIT event but otherwise the call was fine. In that case the RI would
476 // send this event and return without going to sleep.
477 //
478 // See b/65558434 for more discussion.
479 void ThreadParkFinished(bool timeout) override REQUIRES_SHARED(art::Locks::mutator_lock_) {
480 if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorWaited)) {
481 art::Thread* self = art::Thread::Current();
482 art::JNIEnvExt* jnienv = self->GetJniEnv();
483 art::ArtField* parkBlockerField = art::jni::DecodeArtField(
484 art::WellKnownClasses::java_lang_Thread_parkBlocker);
485 art::ObjPtr<art::mirror::Object> blocker_obj = parkBlockerField->GetObj(self->GetPeer());
486 if (blocker_obj.IsNull()) {
487 blocker_obj = self->GetPeer();
488 }
489 ScopedLocalRef<jobject> blocker(jnienv, AddLocalRef<jobject>(jnienv, blocker_obj.Ptr()));
490 RunEventCallback<ArtJvmtiEvent::kMonitorWaited>(
491 handler_,
492 self,
493 jnienv,
494 blocker.get(),
495 static_cast<jboolean>(timeout));
496 }
497 }
498
499 private:
500 EventHandler* handler_;
501};
502
503static void SetupMonitorListener(art::MonitorCallback* monitor_listener, art::ParkCallback* park_listener, bool enable) {
Alex Light77fee872017-09-05 14:51:49 -0700504 // We must not hold the mutator lock here, but if we're in FastJNI, for example, we might. For
505 // now, do a workaround: (possibly) acquire and release.
506 art::ScopedObjectAccess soa(art::Thread::Current());
507 if (enable) {
Charles Munger5cc0e752018-11-09 12:30:46 -0800508 art::Runtime::Current()->GetRuntimeCallbacks()->AddMonitorCallback(monitor_listener);
509 art::Runtime::Current()->GetRuntimeCallbacks()->AddParkCallback(park_listener);
Alex Light77fee872017-09-05 14:51:49 -0700510 } else {
Charles Munger5cc0e752018-11-09 12:30:46 -0800511 art::Runtime::Current()->GetRuntimeCallbacks()->RemoveMonitorCallback(monitor_listener);
512 art::Runtime::Current()->GetRuntimeCallbacks()->RemoveParkCallback(park_listener);
Alex Light77fee872017-09-05 14:51:49 -0700513 }
514}
515
Andreas Gampe9b8c5882016-10-21 15:27:46 -0700516// Report GC pauses (see spec) as GARBAGE_COLLECTION_START and GARBAGE_COLLECTION_END.
517class JvmtiGcPauseListener : public art::gc::GcPauseListener {
518 public:
519 explicit JvmtiGcPauseListener(EventHandler* handler)
520 : handler_(handler),
521 start_enabled_(false),
522 finish_enabled_(false) {}
523
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100524 void StartPause() override {
Alex Lightb284f8d2017-11-21 00:00:48 +0000525 handler_->DispatchEvent<ArtJvmtiEvent::kGarbageCollectionStart>(art::Thread::Current());
Andreas Gampe9b8c5882016-10-21 15:27:46 -0700526 }
527
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100528 void EndPause() override {
Alex Lightb284f8d2017-11-21 00:00:48 +0000529 handler_->DispatchEvent<ArtJvmtiEvent::kGarbageCollectionFinish>(art::Thread::Current());
Andreas Gampe9b8c5882016-10-21 15:27:46 -0700530 }
531
532 bool IsEnabled() {
533 return start_enabled_ || finish_enabled_;
534 }
535
536 void SetStartEnabled(bool e) {
537 start_enabled_ = e;
538 }
539
540 void SetFinishEnabled(bool e) {
541 finish_enabled_ = e;
542 }
543
544 private:
545 EventHandler* handler_;
546 bool start_enabled_;
547 bool finish_enabled_;
548};
549
Alex Light40d87f42017-01-18 10:27:06 -0800550static void SetupGcPauseTracking(JvmtiGcPauseListener* listener, ArtJvmtiEvent event, bool enable) {
Andreas Gampe9b8c5882016-10-21 15:27:46 -0700551 bool old_state = listener->IsEnabled();
552
Alex Light40d87f42017-01-18 10:27:06 -0800553 if (event == ArtJvmtiEvent::kGarbageCollectionStart) {
Andreas Gampe9b8c5882016-10-21 15:27:46 -0700554 listener->SetStartEnabled(enable);
555 } else {
556 listener->SetFinishEnabled(enable);
557 }
558
559 bool new_state = listener->IsEnabled();
560
561 if (old_state != new_state) {
562 if (new_state) {
563 art::Runtime::Current()->GetHeap()->SetGcPauseListener(listener);
564 } else {
565 art::Runtime::Current()->GetHeap()->RemoveGcPauseListener();
566 }
567 }
568}
569
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100570class JvmtiMethodTraceListener final : public art::instrumentation::InstrumentationListener {
Alex Lightb7edcda2017-04-27 13:20:31 -0700571 public:
572 explicit JvmtiMethodTraceListener(EventHandler* handler) : event_handler_(handler) {}
573
Alex Lightb7edcda2017-04-27 13:20:31 -0700574 // Call-back for when a method is entered.
575 void MethodEntered(art::Thread* self,
576 art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
577 art::ArtMethod* method,
578 uint32_t dex_pc ATTRIBUTE_UNUSED)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100579 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Lightb7edcda2017-04-27 13:20:31 -0700580 if (!method->IsRuntimeMethod() &&
581 event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodEntry)) {
582 art::JNIEnvExt* jnienv = self->GetJniEnv();
Alex Light77fee872017-09-05 14:51:49 -0700583 RunEventCallback<ArtJvmtiEvent::kMethodEntry>(event_handler_,
584 self,
Alex Lightb7edcda2017-04-27 13:20:31 -0700585 jnienv,
586 art::jni::EncodeArtMethod(method));
587 }
588 }
589
590 // Callback for when a method is exited with a reference return value.
591 void MethodExited(art::Thread* self,
592 art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
593 art::ArtMethod* method,
594 uint32_t dex_pc ATTRIBUTE_UNUSED,
595 art::Handle<art::mirror::Object> return_value)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100596 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Lightb7edcda2017-04-27 13:20:31 -0700597 if (!method->IsRuntimeMethod() &&
598 event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
Alex Lightc9167362018-06-11 16:46:43 -0700599 DCHECK_EQ(
600 method->GetInterfaceMethodIfProxy(art::kRuntimePointerSize)->GetReturnTypePrimitive(),
601 art::Primitive::kPrimNot) << method->PrettyMethod();
Alex Lightb7edcda2017-04-27 13:20:31 -0700602 DCHECK(!self->IsExceptionPending());
603 jvalue val;
604 art::JNIEnvExt* jnienv = self->GetJniEnv();
605 ScopedLocalRef<jobject> return_jobj(jnienv, AddLocalRef<jobject>(jnienv, return_value.Get()));
606 val.l = return_jobj.get();
607 RunEventCallback<ArtJvmtiEvent::kMethodExit>(
Alex Light77fee872017-09-05 14:51:49 -0700608 event_handler_,
Alex Lightb7edcda2017-04-27 13:20:31 -0700609 self,
610 jnienv,
611 art::jni::EncodeArtMethod(method),
Andreas Gampe6e897762018-10-16 13:09:32 -0700612 /*was_popped_by_exception=*/ static_cast<jboolean>(JNI_FALSE),
Alex Lightb7edcda2017-04-27 13:20:31 -0700613 val);
614 }
615 }
616
617 // Call-back for when a method is exited.
618 void MethodExited(art::Thread* self,
619 art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
620 art::ArtMethod* method,
621 uint32_t dex_pc ATTRIBUTE_UNUSED,
622 const art::JValue& return_value)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100623 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Lightb7edcda2017-04-27 13:20:31 -0700624 if (!method->IsRuntimeMethod() &&
625 event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
Alex Lightc9167362018-06-11 16:46:43 -0700626 DCHECK_NE(
627 method->GetInterfaceMethodIfProxy(art::kRuntimePointerSize)->GetReturnTypePrimitive(),
628 art::Primitive::kPrimNot) << method->PrettyMethod();
Alex Lightb7edcda2017-04-27 13:20:31 -0700629 DCHECK(!self->IsExceptionPending());
630 jvalue val;
631 art::JNIEnvExt* jnienv = self->GetJniEnv();
632 // 64bit integer is the largest value in the union so we should be fine simply copying it into
633 // the union.
634 val.j = return_value.GetJ();
635 RunEventCallback<ArtJvmtiEvent::kMethodExit>(
Alex Light77fee872017-09-05 14:51:49 -0700636 event_handler_,
Alex Lightb7edcda2017-04-27 13:20:31 -0700637 self,
638 jnienv,
639 art::jni::EncodeArtMethod(method),
Andreas Gampe6e897762018-10-16 13:09:32 -0700640 /*was_popped_by_exception=*/ static_cast<jboolean>(JNI_FALSE),
Alex Lightb7edcda2017-04-27 13:20:31 -0700641 val);
642 }
643 }
644
645 // Call-back for when a method is popped due to an exception throw. A method will either cause a
646 // MethodExited call-back or a MethodUnwind call-back when its activation is removed.
647 void MethodUnwind(art::Thread* self,
648 art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
649 art::ArtMethod* method,
650 uint32_t dex_pc ATTRIBUTE_UNUSED)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100651 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Lightb7edcda2017-04-27 13:20:31 -0700652 if (!method->IsRuntimeMethod() &&
653 event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
654 jvalue val;
655 // Just set this to 0xffffffffffffffff so it's not uninitialized.
656 val.j = static_cast<jlong>(-1);
657 art::JNIEnvExt* jnienv = self->GetJniEnv();
658 art::StackHandleScope<1> hs(self);
659 art::Handle<art::mirror::Throwable> old_exception(hs.NewHandle(self->GetException()));
660 CHECK(!old_exception.IsNull());
661 self->ClearException();
662 RunEventCallback<ArtJvmtiEvent::kMethodExit>(
Alex Light77fee872017-09-05 14:51:49 -0700663 event_handler_,
Alex Lightb7edcda2017-04-27 13:20:31 -0700664 self,
665 jnienv,
666 art::jni::EncodeArtMethod(method),
Andreas Gampe6e897762018-10-16 13:09:32 -0700667 /*was_popped_by_exception=*/ static_cast<jboolean>(JNI_TRUE),
Alex Lightb7edcda2017-04-27 13:20:31 -0700668 val);
669 // Match RI behavior of just throwing away original exception if a new one is thrown.
670 if (LIKELY(!self->IsExceptionPending())) {
671 self->SetException(old_exception.Get());
672 }
673 }
674 }
675
Alex Lighta26e3492017-06-27 17:55:37 -0700676 // Call-back for when the dex pc moves in a method.
677 void DexPcMoved(art::Thread* self,
Alex Lightb7edcda2017-04-27 13:20:31 -0700678 art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
Alex Lighta26e3492017-06-27 17:55:37 -0700679 art::ArtMethod* method,
680 uint32_t new_dex_pc)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100681 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Lighta26e3492017-06-27 17:55:37 -0700682 DCHECK(!method->IsRuntimeMethod());
683 // Default methods might be copied to multiple classes. We need to get the canonical version of
684 // this method so that we can check for breakpoints correctly.
685 // TODO We should maybe do this on other events to ensure that we are consistent WRT default
686 // methods. This could interact with obsolete methods if we ever let interface redefinition
687 // happen though.
688 method = method->GetCanonicalMethod();
689 art::JNIEnvExt* jnienv = self->GetJniEnv();
690 jmethodID jmethod = art::jni::EncodeArtMethod(method);
691 jlocation location = static_cast<jlocation>(new_dex_pc);
692 // Step event is reported first according to the spec.
693 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kSingleStep)) {
Alex Light77fee872017-09-05 14:51:49 -0700694 RunEventCallback<ArtJvmtiEvent::kSingleStep>(event_handler_, self, jnienv, jmethod, location);
Alex Lighta26e3492017-06-27 17:55:37 -0700695 }
696 // Next we do the Breakpoint events. The Dispatch code will filter the individual
697 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kBreakpoint)) {
Alex Light77fee872017-09-05 14:51:49 -0700698 RunEventCallback<ArtJvmtiEvent::kBreakpoint>(event_handler_, self, jnienv, jmethod, location);
Alex Lighta26e3492017-06-27 17:55:37 -0700699 }
Alex Lightb7edcda2017-04-27 13:20:31 -0700700 }
701
702 // Call-back for when we read from a field.
Alex Light084fa372017-06-16 08:58:34 -0700703 void FieldRead(art::Thread* self,
704 art::Handle<art::mirror::Object> this_object,
705 art::ArtMethod* method,
706 uint32_t dex_pc,
707 art::ArtField* field)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100708 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Light084fa372017-06-16 08:58:34 -0700709 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldAccess)) {
710 art::JNIEnvExt* jnienv = self->GetJniEnv();
711 // DCHECK(!self->IsExceptionPending());
712 ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
713 ScopedLocalRef<jobject> fklass(jnienv,
714 AddLocalRef<jobject>(jnienv,
715 field->GetDeclaringClass().Ptr()));
Alex Light77fee872017-09-05 14:51:49 -0700716 RunEventCallback<ArtJvmtiEvent::kFieldAccess>(event_handler_,
717 self,
Alex Light084fa372017-06-16 08:58:34 -0700718 jnienv,
719 art::jni::EncodeArtMethod(method),
720 static_cast<jlocation>(dex_pc),
721 static_cast<jclass>(fklass.get()),
722 this_ref.get(),
723 art::jni::EncodeArtField(field));
724 }
725 }
726
727 void FieldWritten(art::Thread* self,
728 art::Handle<art::mirror::Object> this_object,
729 art::ArtMethod* method,
730 uint32_t dex_pc,
731 art::ArtField* field,
732 art::Handle<art::mirror::Object> new_val)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100733 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Light084fa372017-06-16 08:58:34 -0700734 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldModification)) {
735 art::JNIEnvExt* jnienv = self->GetJniEnv();
736 // DCHECK(!self->IsExceptionPending());
737 ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
738 ScopedLocalRef<jobject> fklass(jnienv,
739 AddLocalRef<jobject>(jnienv,
740 field->GetDeclaringClass().Ptr()));
741 ScopedLocalRef<jobject> fval(jnienv, AddLocalRef<jobject>(jnienv, new_val.Get()));
742 jvalue val;
743 val.l = fval.get();
744 RunEventCallback<ArtJvmtiEvent::kFieldModification>(
Alex Light77fee872017-09-05 14:51:49 -0700745 event_handler_,
Alex Light084fa372017-06-16 08:58:34 -0700746 self,
747 jnienv,
748 art::jni::EncodeArtMethod(method),
749 static_cast<jlocation>(dex_pc),
750 static_cast<jclass>(fklass.get()),
751 field->IsStatic() ? nullptr : this_ref.get(),
752 art::jni::EncodeArtField(field),
753 'L', // type_char
754 val);
755 }
Alex Lightb7edcda2017-04-27 13:20:31 -0700756 }
757
758 // Call-back for when we write into a field.
Alex Light084fa372017-06-16 08:58:34 -0700759 void FieldWritten(art::Thread* self,
760 art::Handle<art::mirror::Object> this_object,
761 art::ArtMethod* method,
762 uint32_t dex_pc,
763 art::ArtField* field,
764 const art::JValue& field_value)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100765 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Light084fa372017-06-16 08:58:34 -0700766 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldModification)) {
767 art::JNIEnvExt* jnienv = self->GetJniEnv();
768 DCHECK(!self->IsExceptionPending());
769 ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
770 ScopedLocalRef<jobject> fklass(jnienv,
771 AddLocalRef<jobject>(jnienv,
772 field->GetDeclaringClass().Ptr()));
773 char type_char = art::Primitive::Descriptor(field->GetTypeAsPrimitiveType())[0];
774 jvalue val;
775 // 64bit integer is the largest value in the union so we should be fine simply copying it into
776 // the union.
777 val.j = field_value.GetJ();
778 RunEventCallback<ArtJvmtiEvent::kFieldModification>(
Alex Light77fee872017-09-05 14:51:49 -0700779 event_handler_,
Alex Light084fa372017-06-16 08:58:34 -0700780 self,
781 jnienv,
782 art::jni::EncodeArtMethod(method),
783 static_cast<jlocation>(dex_pc),
784 static_cast<jclass>(fklass.get()),
785 field->IsStatic() ? nullptr : this_ref.get(), // nb static field modification get given
786 // the class as this_object for some
787 // reason.
788 art::jni::EncodeArtField(field),
789 type_char,
790 val);
791 }
Alex Lightb7edcda2017-04-27 13:20:31 -0700792 }
793
Alex Lighte814f9d2017-07-31 16:14:39 -0700794 void WatchedFramePop(art::Thread* self, const art::ShadowFrame& frame)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100795 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Lighte814f9d2017-07-31 16:14:39 -0700796 art::JNIEnvExt* jnienv = self->GetJniEnv();
Alex Light9df79b72017-09-12 08:57:31 -0700797 jboolean is_exception_pending = self->IsExceptionPending();
798 RunEventCallback<ArtJvmtiEvent::kFramePop>(
799 event_handler_,
800 self,
801 jnienv,
802 art::jni::EncodeArtMethod(frame.GetMethod()),
803 is_exception_pending,
804 &frame);
Alex Lighte814f9d2017-07-31 16:14:39 -0700805 }
806
Alex Light9fb1ab12017-09-05 09:32:49 -0700807 static void FindCatchMethodsFromThrow(art::Thread* self,
808 art::Handle<art::mirror::Throwable> exception,
809 /*out*/ art::ArtMethod** out_method,
810 /*out*/ uint32_t* dex_pc)
811 REQUIRES_SHARED(art::Locks::mutator_lock_) {
812 // Finds the location where this exception will most likely be caught. We ignore intervening
813 // native frames (which could catch the exception) and return the closest java frame with a
814 // compatible catch statement.
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100815 class CatchLocationFinder final : public art::StackVisitor {
Alex Light9fb1ab12017-09-05 09:32:49 -0700816 public:
817 CatchLocationFinder(art::Thread* target,
818 art::Handle<art::mirror::Class> exception_class,
819 art::Context* context,
820 /*out*/ art::ArtMethod** out_catch_method,
821 /*out*/ uint32_t* out_catch_pc)
822 REQUIRES_SHARED(art::Locks::mutator_lock_)
823 : StackVisitor(target, context, art::StackVisitor::StackWalkKind::kIncludeInlinedFrames),
824 exception_class_(exception_class),
825 catch_method_ptr_(out_catch_method),
826 catch_dex_pc_ptr_(out_catch_pc) {}
827
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100828 bool VisitFrame() override REQUIRES_SHARED(art::Locks::mutator_lock_) {
Alex Light9fb1ab12017-09-05 09:32:49 -0700829 art::ArtMethod* method = GetMethod();
830 DCHECK(method != nullptr);
831 if (method->IsRuntimeMethod()) {
832 return true;
833 }
834
835 if (!method->IsNative()) {
836 uint32_t cur_dex_pc = GetDexPc();
Andreas Gampee2abbc62017-09-15 11:59:26 -0700837 if (cur_dex_pc == art::dex::kDexNoIndex) {
Alex Light9fb1ab12017-09-05 09:32:49 -0700838 // This frame looks opaque. Just keep on going.
839 return true;
840 }
841 bool has_no_move_exception = false;
842 uint32_t found_dex_pc = method->FindCatchBlock(
843 exception_class_, cur_dex_pc, &has_no_move_exception);
Andreas Gampee2abbc62017-09-15 11:59:26 -0700844 if (found_dex_pc != art::dex::kDexNoIndex) {
Alex Light9fb1ab12017-09-05 09:32:49 -0700845 // We found the catch. Store the result and return.
846 *catch_method_ptr_ = method;
847 *catch_dex_pc_ptr_ = found_dex_pc;
848 return false;
849 }
850 }
851 return true;
852 }
853
854 private:
855 art::Handle<art::mirror::Class> exception_class_;
856 art::ArtMethod** catch_method_ptr_;
857 uint32_t* catch_dex_pc_ptr_;
858
859 DISALLOW_COPY_AND_ASSIGN(CatchLocationFinder);
860 };
861
862 art::StackHandleScope<1> hs(self);
863 *out_method = nullptr;
864 *dex_pc = 0;
865 std::unique_ptr<art::Context> context(art::Context::Create());
866
867 CatchLocationFinder clf(self,
868 hs.NewHandle(exception->GetClass()),
869 context.get(),
870 /*out*/ out_method,
871 /*out*/ dex_pc);
Andreas Gampe6e897762018-10-16 13:09:32 -0700872 clf.WalkStack(/* include_transitions= */ false);
Alex Light9fb1ab12017-09-05 09:32:49 -0700873 }
874
Alex Light6e1607e2017-08-23 10:06:18 -0700875 // Call-back when an exception is thrown.
Alex Light9fb1ab12017-09-05 09:32:49 -0700876 void ExceptionThrown(art::Thread* self, art::Handle<art::mirror::Throwable> exception_object)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100877 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Light9fb1ab12017-09-05 09:32:49 -0700878 DCHECK(self->IsExceptionThrownByCurrentMethod(exception_object.Get()));
879 // The instrumentation events get rid of this for us.
880 DCHECK(!self->IsExceptionPending());
881 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kException)) {
882 art::JNIEnvExt* jnienv = self->GetJniEnv();
883 art::ArtMethod* catch_method;
884 uint32_t catch_pc;
885 FindCatchMethodsFromThrow(self, exception_object, &catch_method, &catch_pc);
886 uint32_t dex_pc = 0;
887 art::ArtMethod* method = self->GetCurrentMethod(&dex_pc,
Andreas Gampe6e897762018-10-16 13:09:32 -0700888 /* check_suspended= */ true,
889 /* abort_on_error= */ art::kIsDebugBuild);
Alex Light9fb1ab12017-09-05 09:32:49 -0700890 ScopedLocalRef<jobject> exception(jnienv,
891 AddLocalRef<jobject>(jnienv, exception_object.Get()));
892 RunEventCallback<ArtJvmtiEvent::kException>(
Alex Light77fee872017-09-05 14:51:49 -0700893 event_handler_,
Alex Light9fb1ab12017-09-05 09:32:49 -0700894 self,
895 jnienv,
896 art::jni::EncodeArtMethod(method),
897 static_cast<jlocation>(dex_pc),
898 exception.get(),
899 art::jni::EncodeArtMethod(catch_method),
900 static_cast<jlocation>(catch_pc));
901 }
902 return;
903 }
904
905 // Call-back when an exception is handled.
906 void ExceptionHandled(art::Thread* self, art::Handle<art::mirror::Throwable> exception_object)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100907 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Light9fb1ab12017-09-05 09:32:49 -0700908 // Since the exception has already been handled there shouldn't be one pending.
909 DCHECK(!self->IsExceptionPending());
910 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kExceptionCatch)) {
911 art::JNIEnvExt* jnienv = self->GetJniEnv();
912 uint32_t dex_pc;
913 art::ArtMethod* method = self->GetCurrentMethod(&dex_pc,
Andreas Gampe6e897762018-10-16 13:09:32 -0700914 /* check_suspended= */ true,
915 /* abort_on_error= */ art::kIsDebugBuild);
Alex Light9fb1ab12017-09-05 09:32:49 -0700916 ScopedLocalRef<jobject> exception(jnienv,
917 AddLocalRef<jobject>(jnienv, exception_object.Get()));
918 RunEventCallback<ArtJvmtiEvent::kExceptionCatch>(
Alex Light77fee872017-09-05 14:51:49 -0700919 event_handler_,
Alex Light9fb1ab12017-09-05 09:32:49 -0700920 self,
921 jnienv,
922 art::jni::EncodeArtMethod(method),
923 static_cast<jlocation>(dex_pc),
924 exception.get());
925 }
Alex Lightb7edcda2017-04-27 13:20:31 -0700926 return;
927 }
928
929 // Call-back for when we execute a branch.
930 void Branch(art::Thread* self ATTRIBUTE_UNUSED,
931 art::ArtMethod* method ATTRIBUTE_UNUSED,
932 uint32_t dex_pc ATTRIBUTE_UNUSED,
933 int32_t dex_pc_offset ATTRIBUTE_UNUSED)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100934 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Lightb7edcda2017-04-27 13:20:31 -0700935 return;
936 }
937
Alex Lightb7edcda2017-04-27 13:20:31 -0700938 private:
939 EventHandler* const event_handler_;
940};
941
942static uint32_t GetInstrumentationEventsFor(ArtJvmtiEvent event) {
943 switch (event) {
944 case ArtJvmtiEvent::kMethodEntry:
945 return art::instrumentation::Instrumentation::kMethodEntered;
946 case ArtJvmtiEvent::kMethodExit:
947 return art::instrumentation::Instrumentation::kMethodExited |
948 art::instrumentation::Instrumentation::kMethodUnwind;
Alex Light084fa372017-06-16 08:58:34 -0700949 case ArtJvmtiEvent::kFieldModification:
950 return art::instrumentation::Instrumentation::kFieldWritten;
951 case ArtJvmtiEvent::kFieldAccess:
952 return art::instrumentation::Instrumentation::kFieldRead;
Alex Lighta26e3492017-06-27 17:55:37 -0700953 case ArtJvmtiEvent::kBreakpoint:
954 case ArtJvmtiEvent::kSingleStep:
955 return art::instrumentation::Instrumentation::kDexPcMoved;
Alex Lighte814f9d2017-07-31 16:14:39 -0700956 case ArtJvmtiEvent::kFramePop:
957 return art::instrumentation::Instrumentation::kWatchedFramePop;
Alex Light9fb1ab12017-09-05 09:32:49 -0700958 case ArtJvmtiEvent::kException:
959 return art::instrumentation::Instrumentation::kExceptionThrown;
960 case ArtJvmtiEvent::kExceptionCatch:
961 return art::instrumentation::Instrumentation::kExceptionHandled;
Alex Lightb7edcda2017-04-27 13:20:31 -0700962 default:
963 LOG(FATAL) << "Unknown event ";
Elliott Hughesc1896c92018-11-29 11:33:18 -0800964 UNREACHABLE();
Alex Lightb7edcda2017-04-27 13:20:31 -0700965 }
966}
967
Alex Light0fa17862017-10-24 13:43:05 -0700968static bool EventNeedsFullDeopt(ArtJvmtiEvent event) {
969 switch (event) {
970 case ArtJvmtiEvent::kBreakpoint:
971 case ArtJvmtiEvent::kException:
972 return false;
David Srbeckyd25eb2c2018-07-19 12:17:04 +0000973 // TODO We should support more of these or at least do something to make them discriminate by
974 // thread.
Alex Light0fa17862017-10-24 13:43:05 -0700975 case ArtJvmtiEvent::kMethodEntry:
Alex Lightd7da3142018-07-18 15:39:16 +0000976 case ArtJvmtiEvent::kExceptionCatch:
David Srbeckyd25eb2c2018-07-19 12:17:04 +0000977 case ArtJvmtiEvent::kMethodExit:
Alex Light0fa17862017-10-24 13:43:05 -0700978 case ArtJvmtiEvent::kFieldModification:
979 case ArtJvmtiEvent::kFieldAccess:
980 case ArtJvmtiEvent::kSingleStep:
981 case ArtJvmtiEvent::kFramePop:
David Srbeckyd25eb2c2018-07-19 12:17:04 +0000982 return true;
Alex Light0fa17862017-10-24 13:43:05 -0700983 default:
984 LOG(FATAL) << "Unexpected event type!";
985 UNREACHABLE();
986 }
987}
988
Alex Lightf6df1b52017-11-29 14:46:53 -0800989void EventHandler::SetupTraceListener(JvmtiMethodTraceListener* listener,
990 ArtJvmtiEvent event,
991 bool enable) {
David Srbeckyd25eb2c2018-07-19 12:17:04 +0000992 bool needs_full_deopt = EventNeedsFullDeopt(event);
Alex Light0fa17862017-10-24 13:43:05 -0700993 // Make sure we can deopt.
994 {
995 art::ScopedObjectAccess soa(art::Thread::Current());
996 DeoptManager* deopt_manager = DeoptManager::Get();
997 if (enable) {
998 deopt_manager->AddDeoptimizationRequester();
David Srbeckyd25eb2c2018-07-19 12:17:04 +0000999 if (needs_full_deopt) {
1000 deopt_manager->AddDeoptimizeAllMethods();
Alex Light0fa17862017-10-24 13:43:05 -07001001 }
1002 } else {
David Srbeckyd25eb2c2018-07-19 12:17:04 +00001003 if (needs_full_deopt) {
1004 deopt_manager->RemoveDeoptimizeAllMethods();
Alex Light0fa17862017-10-24 13:43:05 -07001005 }
1006 deopt_manager->RemoveDeoptimizationRequester();
1007 }
1008 }
1009
1010 // Add the actual listeners.
Alex Lightb7edcda2017-04-27 13:20:31 -07001011 uint32_t new_events = GetInstrumentationEventsFor(event);
Alex Lightf6df1b52017-11-29 14:46:53 -08001012 if (new_events == art::instrumentation::Instrumentation::kDexPcMoved) {
1013 // Need to skip adding the listeners if the event is breakpoint/single-step since those events
1014 // share the same art-instrumentation underlying event. We need to give them their own deopt
1015 // request though so the test waits until here.
1016 DCHECK(event == ArtJvmtiEvent::kBreakpoint || event == ArtJvmtiEvent::kSingleStep);
1017 ArtJvmtiEvent other = event == ArtJvmtiEvent::kBreakpoint ? ArtJvmtiEvent::kSingleStep
1018 : ArtJvmtiEvent::kBreakpoint;
1019 if (IsEventEnabledAnywhere(other)) {
1020 // The event needs to be kept around/is already enabled by the other jvmti event that uses the
1021 // same instrumentation event.
1022 return;
1023 }
1024 }
1025 art::ScopedThreadStateChange stsc(art::Thread::Current(), art::ThreadState::kNative);
Alex Lightb7edcda2017-04-27 13:20:31 -07001026 art::instrumentation::Instrumentation* instr = art::Runtime::Current()->GetInstrumentation();
Alex Lightb7edcda2017-04-27 13:20:31 -07001027 art::ScopedSuspendAll ssa("jvmti method tracing installation");
1028 if (enable) {
Alex Lightb7edcda2017-04-27 13:20:31 -07001029 instr->AddListener(listener, new_events);
1030 } else {
1031 instr->RemoveListener(listener, new_events);
1032 }
1033}
1034
Alex Light0a5ec3d2017-07-25 16:50:26 -07001035// Makes sure that all compiled methods are AsyncDeoptimizable so we can deoptimize (and force to
1036// the switch interpreter) when we try to get or set a local variable.
Alex Lightbebd7bd2017-07-25 14:05:52 -07001037void EventHandler::HandleLocalAccessCapabilityAdded() {
Alex Light0a5ec3d2017-07-25 16:50:26 -07001038 class UpdateEntryPointsClassVisitor : public art::ClassVisitor {
1039 public:
1040 explicit UpdateEntryPointsClassVisitor(art::Runtime* runtime)
1041 : runtime_(runtime) {}
1042
1043 bool operator()(art::ObjPtr<art::mirror::Class> klass)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01001044 override REQUIRES(art::Locks::mutator_lock_) {
Alex Lighta567deb2017-10-10 16:44:11 -07001045 if (!klass->IsLoaded()) {
1046 // Skip classes that aren't loaded since they might not have fully allocated and initialized
1047 // their methods. Furthemore since the jvmti-plugin must have been loaded by this point
1048 // these methods will definitately be using debuggable code.
1049 return true;
1050 }
Alex Light0a5ec3d2017-07-25 16:50:26 -07001051 for (auto& m : klass->GetMethods(art::kRuntimePointerSize)) {
1052 const void* code = m.GetEntryPointFromQuickCompiledCode();
1053 if (m.IsNative() || m.IsProxyMethod()) {
1054 continue;
1055 } else if (!runtime_->GetClassLinker()->IsQuickToInterpreterBridge(code) &&
1056 !runtime_->IsAsyncDeoptimizeable(reinterpret_cast<uintptr_t>(code))) {
1057 runtime_->GetInstrumentation()->UpdateMethodsCodeToInterpreterEntryPoint(&m);
1058 }
1059 }
1060 return true;
1061 }
1062
1063 private:
1064 art::Runtime* runtime_;
1065 };
1066 art::ScopedObjectAccess soa(art::Thread::Current());
1067 UpdateEntryPointsClassVisitor visitor(art::Runtime::Current());
1068 art::Runtime::Current()->GetClassLinker()->VisitClasses(&visitor);
Alex Lightbebd7bd2017-07-25 14:05:52 -07001069}
1070
Alex Light77fee872017-09-05 14:51:49 -07001071bool EventHandler::OtherMonitorEventsEnabledAnywhere(ArtJvmtiEvent event) {
1072 std::array<ArtJvmtiEvent, 4> events {
1073 {
1074 ArtJvmtiEvent::kMonitorContendedEnter,
1075 ArtJvmtiEvent::kMonitorContendedEntered,
1076 ArtJvmtiEvent::kMonitorWait,
1077 ArtJvmtiEvent::kMonitorWaited
1078 }
1079 };
1080 for (ArtJvmtiEvent e : events) {
1081 if (e != event && IsEventEnabledAnywhere(e)) {
1082 return true;
1083 }
1084 }
1085 return false;
1086}
1087
Alex Lightf5d5eb12018-03-06 15:13:59 -08001088void EventHandler::SetupFramePopTraceListener(bool enable) {
1089 if (enable) {
1090 frame_pop_enabled = true;
1091 SetupTraceListener(method_trace_listener_.get(), ArtJvmtiEvent::kFramePop, enable);
1092 } else {
1093 // remove the listener if we have no outstanding frames.
1094 {
1095 art::ReaderMutexLock mu(art::Thread::Current(), envs_lock_);
1096 for (ArtJvmTiEnv* env : envs) {
1097 art::ReaderMutexLock event_mu(art::Thread::Current(), env->event_info_mutex_);
1098 if (!env->notify_frames.empty()) {
1099 // Leaving FramePop listener since there are unsent FramePop events.
1100 return;
1101 }
1102 }
1103 frame_pop_enabled = false;
1104 }
1105 SetupTraceListener(method_trace_listener_.get(), ArtJvmtiEvent::kFramePop, enable);
1106 }
1107}
1108
Andreas Gampe77708d92016-10-07 11:48:21 -07001109// Handle special work for the given event type, if necessary.
Alex Light40d87f42017-01-18 10:27:06 -08001110void EventHandler::HandleEventType(ArtJvmtiEvent event, bool enable) {
Andreas Gampe9b8c5882016-10-21 15:27:46 -07001111 switch (event) {
Alex Light8c2b9292017-11-09 13:21:01 -08001112 case ArtJvmtiEvent::kDdmPublishChunk:
1113 SetupDdmTracking(ddm_listener_.get(), enable);
1114 return;
Alex Light40d87f42017-01-18 10:27:06 -08001115 case ArtJvmtiEvent::kVmObjectAlloc:
Andreas Gampe9b8c5882016-10-21 15:27:46 -07001116 SetupObjectAllocationTracking(alloc_listener_.get(), enable);
1117 return;
1118
Alex Light40d87f42017-01-18 10:27:06 -08001119 case ArtJvmtiEvent::kGarbageCollectionStart:
1120 case ArtJvmtiEvent::kGarbageCollectionFinish:
Andreas Gampe9b8c5882016-10-21 15:27:46 -07001121 SetupGcPauseTracking(gc_pause_listener_.get(), event, enable);
1122 return;
Alex Lightf5d5eb12018-03-06 15:13:59 -08001123 // FramePop can never be disabled once it's been turned on if it was turned off with outstanding
1124 // pop-events since we would either need to deal with dangling pointers or have missed events.
Alex Lighte814f9d2017-07-31 16:14:39 -07001125 case ArtJvmtiEvent::kFramePop:
Alex Lightf5d5eb12018-03-06 15:13:59 -08001126 if (enable && frame_pop_enabled) {
1127 // The frame-pop event was held on by pending events so we don't need to do anything.
Alex Lighte814f9d2017-07-31 16:14:39 -07001128 break;
1129 } else {
Alex Lightf5d5eb12018-03-06 15:13:59 -08001130 SetupFramePopTraceListener(enable);
Alex Lighte814f9d2017-07-31 16:14:39 -07001131 break;
1132 }
Alex Lightb7edcda2017-04-27 13:20:31 -07001133 case ArtJvmtiEvent::kMethodEntry:
1134 case ArtJvmtiEvent::kMethodExit:
Alex Light084fa372017-06-16 08:58:34 -07001135 case ArtJvmtiEvent::kFieldAccess:
1136 case ArtJvmtiEvent::kFieldModification:
Alex Light9fb1ab12017-09-05 09:32:49 -07001137 case ArtJvmtiEvent::kException:
1138 case ArtJvmtiEvent::kExceptionCatch:
Alex Lightf6df1b52017-11-29 14:46:53 -08001139 case ArtJvmtiEvent::kBreakpoint:
1140 case ArtJvmtiEvent::kSingleStep:
Alex Light084fa372017-06-16 08:58:34 -07001141 SetupTraceListener(method_trace_listener_.get(), event, enable);
Alex Lightb7edcda2017-04-27 13:20:31 -07001142 return;
Alex Light77fee872017-09-05 14:51:49 -07001143 case ArtJvmtiEvent::kMonitorContendedEnter:
1144 case ArtJvmtiEvent::kMonitorContendedEntered:
1145 case ArtJvmtiEvent::kMonitorWait:
1146 case ArtJvmtiEvent::kMonitorWaited:
1147 if (!OtherMonitorEventsEnabledAnywhere(event)) {
Charles Munger5cc0e752018-11-09 12:30:46 -08001148 SetupMonitorListener(monitor_listener_.get(), park_listener_.get(), enable);
Alex Light77fee872017-09-05 14:51:49 -07001149 }
1150 return;
Andreas Gampe9b8c5882016-10-21 15:27:46 -07001151 default:
1152 break;
Andreas Gampe27fa96c2016-10-07 15:05:24 -07001153 }
Andreas Gampe77708d92016-10-07 11:48:21 -07001154}
1155
Alex Light9db679d2017-01-25 15:28:04 -08001156// Checks to see if the env has the capabilities associated with the given event.
1157static bool HasAssociatedCapability(ArtJvmTiEnv* env,
1158 ArtJvmtiEvent event) {
1159 jvmtiCapabilities caps = env->capabilities;
1160 switch (event) {
1161 case ArtJvmtiEvent::kBreakpoint:
1162 return caps.can_generate_breakpoint_events == 1;
1163
1164 case ArtJvmtiEvent::kCompiledMethodLoad:
1165 case ArtJvmtiEvent::kCompiledMethodUnload:
1166 return caps.can_generate_compiled_method_load_events == 1;
1167
1168 case ArtJvmtiEvent::kException:
1169 case ArtJvmtiEvent::kExceptionCatch:
1170 return caps.can_generate_exception_events == 1;
1171
1172 case ArtJvmtiEvent::kFieldAccess:
1173 return caps.can_generate_field_access_events == 1;
1174
1175 case ArtJvmtiEvent::kFieldModification:
1176 return caps.can_generate_field_modification_events == 1;
1177
1178 case ArtJvmtiEvent::kFramePop:
1179 return caps.can_generate_frame_pop_events == 1;
1180
1181 case ArtJvmtiEvent::kGarbageCollectionStart:
1182 case ArtJvmtiEvent::kGarbageCollectionFinish:
1183 return caps.can_generate_garbage_collection_events == 1;
1184
1185 case ArtJvmtiEvent::kMethodEntry:
1186 return caps.can_generate_method_entry_events == 1;
1187
1188 case ArtJvmtiEvent::kMethodExit:
1189 return caps.can_generate_method_exit_events == 1;
1190
1191 case ArtJvmtiEvent::kMonitorContendedEnter:
1192 case ArtJvmtiEvent::kMonitorContendedEntered:
1193 case ArtJvmtiEvent::kMonitorWait:
1194 case ArtJvmtiEvent::kMonitorWaited:
1195 return caps.can_generate_monitor_events == 1;
1196
1197 case ArtJvmtiEvent::kNativeMethodBind:
1198 return caps.can_generate_native_method_bind_events == 1;
1199
1200 case ArtJvmtiEvent::kObjectFree:
1201 return caps.can_generate_object_free_events == 1;
1202
1203 case ArtJvmtiEvent::kSingleStep:
1204 return caps.can_generate_single_step_events == 1;
1205
1206 case ArtJvmtiEvent::kVmObjectAlloc:
1207 return caps.can_generate_vm_object_alloc_events == 1;
1208
1209 default:
1210 return true;
1211 }
1212}
1213
Andreas Gampe77708d92016-10-07 11:48:21 -07001214jvmtiError EventHandler::SetEvent(ArtJvmTiEnv* env,
1215 art::Thread* thread,
Alex Light40d87f42017-01-18 10:27:06 -08001216 ArtJvmtiEvent event,
Andreas Gampe77708d92016-10-07 11:48:21 -07001217 jvmtiEventMode mode) {
1218 if (thread != nullptr) {
1219 art::ThreadState state = thread->GetState();
1220 if (state == art::ThreadState::kStarting ||
1221 state == art::ThreadState::kTerminated ||
1222 thread->IsStillStarting()) {
1223 return ERR(THREAD_NOT_ALIVE);
1224 }
1225 if (!IsThreadControllable(event)) {
1226 return ERR(ILLEGAL_ARGUMENT);
1227 }
1228 }
1229
Andreas Gampe77708d92016-10-07 11:48:21 -07001230 if (mode != JVMTI_ENABLE && mode != JVMTI_DISABLE) {
1231 return ERR(ILLEGAL_ARGUMENT);
1232 }
1233
1234 if (!EventMask::EventIsInRange(event)) {
1235 return ERR(INVALID_EVENT_TYPE);
1236 }
1237
Alex Light9db679d2017-01-25 15:28:04 -08001238 if (!HasAssociatedCapability(env, event)) {
1239 return ERR(MUST_POSSESS_CAPABILITY);
1240 }
1241
Alex Light74c84402017-11-29 15:26:38 -08001242 bool old_state;
1243 bool new_state;
Andreas Gampe8b862ff2016-10-17 17:49:59 -07001244
Alex Light74c84402017-11-29 15:26:38 -08001245 {
1246 // Change the event masks atomically.
1247 art::Thread* self = art::Thread::Current();
Alex Light2a96fe82018-01-22 17:45:02 -08001248 art::WriterMutexLock mu(self, envs_lock_);
Alex Light74c84402017-11-29 15:26:38 -08001249 art::WriterMutexLock mu_env_info(self, env->event_info_mutex_);
1250 old_state = global_mask.Test(event);
1251 if (mode == JVMTI_ENABLE) {
1252 env->event_masks.EnableEvent(env, thread, event);
1253 global_mask.Set(event);
1254 new_state = true;
1255 } else {
1256 DCHECK_EQ(mode, JVMTI_DISABLE);
Andreas Gampe77708d92016-10-07 11:48:21 -07001257
Alex Light74c84402017-11-29 15:26:38 -08001258 env->event_masks.DisableEvent(env, thread, event);
1259 RecalculateGlobalEventMaskLocked(event);
1260 new_state = global_mask.Test(event);
1261 }
Andreas Gampe77708d92016-10-07 11:48:21 -07001262 }
1263
1264 // Handle any special work required for the event type.
Andreas Gampe8b862ff2016-10-17 17:49:59 -07001265 if (new_state != old_state) {
1266 HandleEventType(event, mode == JVMTI_ENABLE);
1267 }
Andreas Gampe77708d92016-10-07 11:48:21 -07001268
1269 return ERR(NONE);
1270}
1271
Alex Light0fa17862017-10-24 13:43:05 -07001272void EventHandler::HandleBreakpointEventsChanged(bool added) {
1273 if (added) {
1274 DeoptManager::Get()->AddDeoptimizationRequester();
1275 } else {
1276 DeoptManager::Get()->RemoveDeoptimizationRequester();
1277 }
1278}
1279
Alex Lightb7edcda2017-04-27 13:20:31 -07001280void EventHandler::Shutdown() {
1281 // Need to remove the method_trace_listener_ if it's there.
1282 art::Thread* self = art::Thread::Current();
1283 art::gc::ScopedGCCriticalSection gcs(self,
1284 art::gc::kGcCauseInstrumentation,
1285 art::gc::kCollectorTypeInstrumentation);
1286 art::ScopedSuspendAll ssa("jvmti method tracing uninstallation");
1287 // Just remove every possible event.
1288 art::Runtime::Current()->GetInstrumentation()->RemoveListener(method_trace_listener_.get(), ~0);
1289}
1290
Alex Light0e841182018-02-12 17:42:50 +00001291EventHandler::EventHandler()
1292 : envs_lock_("JVMTI Environment List Lock", art::LockLevel::kTopLockLevel),
1293 frame_pop_enabled(false) {
Andreas Gampe27fa96c2016-10-07 15:05:24 -07001294 alloc_listener_.reset(new JvmtiAllocationListener(this));
Alex Light8c2b9292017-11-09 13:21:01 -08001295 ddm_listener_.reset(new JvmtiDdmChunkListener(this));
Andreas Gampe9b8c5882016-10-21 15:27:46 -07001296 gc_pause_listener_.reset(new JvmtiGcPauseListener(this));
Alex Lightb7edcda2017-04-27 13:20:31 -07001297 method_trace_listener_.reset(new JvmtiMethodTraceListener(this));
Alex Light77fee872017-09-05 14:51:49 -07001298 monitor_listener_.reset(new JvmtiMonitorListener(this));
Charles Munger5cc0e752018-11-09 12:30:46 -08001299 park_listener_.reset(new JvmtiParkListener(this));
Andreas Gampe27fa96c2016-10-07 15:05:24 -07001300}
1301
1302EventHandler::~EventHandler() {
1303}
1304
Andreas Gampe77708d92016-10-07 11:48:21 -07001305} // namespace openjdkjvmti