blob: 7fb6a15220e2baf082d21394de4609f02aa79baa [file] [log] [blame]
Andreas Gampe77708d92016-10-07 11:48:21 -07001/* Copyright (C) 2016 The Android Open Source Project
2 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
3 *
4 * This file implements interfaces from the file jvmti.h. This implementation
5 * is licensed under the same terms as the file jvmti.h. The
6 * copyright and license information for the file jvmti.h follows.
7 *
8 * Copyright (c) 2003, 2011, Oracle and/or its affiliates. All rights reserved.
9 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
10 *
11 * This code is free software; you can redistribute it and/or modify it
12 * under the terms of the GNU General Public License version 2 only, as
13 * published by the Free Software Foundation. Oracle designates this
14 * particular file as subject to the "Classpath" exception as provided
15 * by Oracle in the LICENSE file that accompanied this code.
16 *
17 * This code is distributed in the hope that it will be useful, but WITHOUT
18 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
19 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 * version 2 for more details (a copy is included in the LICENSE file that
21 * accompanied this code).
22 *
23 * You should have received a copy of the GNU General Public License version
24 * 2 along with this work; if not, write to the Free Software Foundation,
25 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
26 *
27 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
28 * or visit www.oracle.com if you need additional information or have any
29 * questions.
30 */
31
Andreas Gampe27fa96c2016-10-07 15:05:24 -070032#include "events-inl.h"
Andreas Gampe77708d92016-10-07 11:48:21 -070033
Alex Light77fee872017-09-05 14:51:49 -070034#include <array>
Charles Munger5cc0e752018-11-09 12:30:46 -080035#include <sys/time.h>
Alex Light77fee872017-09-05 14:51:49 -070036
Steven Morelande431e272017-07-18 16:53:49 -070037#include "art_field-inl.h"
Andreas Gampe77708d92016-10-07 11:48:21 -070038#include "art_jvmti.h"
Alex Lightb7edcda2017-04-27 13:20:31 -070039#include "art_method-inl.h"
Alex Light0fa17862017-10-24 13:43:05 -070040#include "deopt_manager.h"
David Sehr9e734c72018-01-04 17:56:19 -080041#include "dex/dex_file_types.h"
Andreas Gampe27fa96c2016-10-07 15:05:24 -070042#include "gc/allocation_listener.h"
Andreas Gampe9b8c5882016-10-21 15:27:46 -070043#include "gc/gc_pause_listener.h"
44#include "gc/heap.h"
Alex Lightb7edcda2017-04-27 13:20:31 -070045#include "gc/scoped_gc_critical_section.h"
Andreas Gampec15a2f42017-04-21 12:09:39 -070046#include "handle_scope-inl.h"
Andreas Gampe27fa96c2016-10-07 15:05:24 -070047#include "instrumentation.h"
Vladimir Markoa3ad0cd2018-05-04 10:06:38 +010048#include "jni/jni_env_ext-inl.h"
49#include "jni/jni_internal.h"
Andreas Gampe27fa96c2016-10-07 15:05:24 -070050#include "mirror/class.h"
Andreas Gampec15a2f42017-04-21 12:09:39 -070051#include "mirror/object-inl.h"
Alex Light77fee872017-09-05 14:51:49 -070052#include "monitor.h"
Andreas Gampe373a9b52017-10-18 09:01:57 -070053#include "nativehelper/scoped_local_ref.h"
Andreas Gampe27fa96c2016-10-07 15:05:24 -070054#include "runtime.h"
Andreas Gampec02685c2016-10-17 17:40:27 -070055#include "scoped_thread_state_change-inl.h"
Alex Light9fb1ab12017-09-05 09:32:49 -070056#include "stack.h"
Alex Lightb7edcda2017-04-27 13:20:31 -070057#include "thread-inl.h"
58#include "thread_list.h"
59#include "ti_phase.h"
Charles Munger5cc0e752018-11-09 12:30:46 -080060#include "well_known_classes.h"
Andreas Gampe77708d92016-10-07 11:48:21 -070061
62namespace openjdkjvmti {
63
Alex Light8c2b9292017-11-09 13:21:01 -080064void ArtJvmtiEventCallbacks::CopyExtensionsFrom(const ArtJvmtiEventCallbacks* cb) {
65 if (art::kIsDebugBuild) {
66 ArtJvmtiEventCallbacks clean;
67 DCHECK_EQ(memcmp(&clean, this, sizeof(clean)), 0)
68 << "CopyExtensionsFrom called with initialized eventsCallbacks!";
69 }
70 if (cb != nullptr) {
71 memcpy(this, cb, sizeof(*this));
72 } else {
73 memset(this, 0, sizeof(*this));
74 }
75}
76
77jvmtiError ArtJvmtiEventCallbacks::Set(jint index, jvmtiExtensionEvent cb) {
78 switch (index) {
79 case static_cast<jint>(ArtJvmtiEvent::kDdmPublishChunk):
80 DdmPublishChunk = reinterpret_cast<ArtJvmtiEventDdmPublishChunk>(cb);
81 return OK;
82 default:
83 return ERR(ILLEGAL_ARGUMENT);
84 }
85}
86
87
88bool IsExtensionEvent(jint e) {
89 return e >= static_cast<jint>(ArtJvmtiEvent::kMinEventTypeVal) &&
90 e <= static_cast<jint>(ArtJvmtiEvent::kMaxEventTypeVal) &&
91 IsExtensionEvent(static_cast<ArtJvmtiEvent>(e));
92}
93
94bool IsExtensionEvent(ArtJvmtiEvent e) {
95 switch (e) {
96 case ArtJvmtiEvent::kDdmPublishChunk:
97 return true;
98 default:
99 return false;
100 }
101}
102
Alex Light73afd322017-01-18 11:17:47 -0800103bool EventMasks::IsEnabledAnywhere(ArtJvmtiEvent event) {
104 return global_event_mask.Test(event) || unioned_thread_event_mask.Test(event);
105}
106
Andreas Gampe77708d92016-10-07 11:48:21 -0700107EventMask& EventMasks::GetEventMask(art::Thread* thread) {
108 if (thread == nullptr) {
109 return global_event_mask;
110 }
111
112 for (auto& pair : thread_event_masks) {
113 const UniqueThread& unique_thread = pair.first;
114 if (unique_thread.first == thread &&
115 unique_thread.second == static_cast<uint32_t>(thread->GetTid())) {
116 return pair.second;
117 }
118 }
119
120 // TODO: Remove old UniqueThread with the same pointer, if exists.
121
122 thread_event_masks.emplace_back(UniqueThread(thread, thread->GetTid()), EventMask());
123 return thread_event_masks.back().second;
124}
125
126EventMask* EventMasks::GetEventMaskOrNull(art::Thread* thread) {
127 if (thread == nullptr) {
128 return &global_event_mask;
129 }
130
131 for (auto& pair : thread_event_masks) {
132 const UniqueThread& unique_thread = pair.first;
133 if (unique_thread.first == thread &&
134 unique_thread.second == static_cast<uint32_t>(thread->GetTid())) {
135 return &pair.second;
136 }
137 }
138
139 return nullptr;
140}
141
142
Alex Light74c84402017-11-29 15:26:38 -0800143void EventMasks::EnableEvent(ArtJvmTiEnv* env, art::Thread* thread, ArtJvmtiEvent event) {
144 DCHECK_EQ(&env->event_masks, this);
145 env->event_info_mutex_.AssertExclusiveHeld(art::Thread::Current());
Andreas Gampe77708d92016-10-07 11:48:21 -0700146 DCHECK(EventMask::EventIsInRange(event));
147 GetEventMask(thread).Set(event);
148 if (thread != nullptr) {
149 unioned_thread_event_mask.Set(event, true);
150 }
151}
152
Alex Light74c84402017-11-29 15:26:38 -0800153void EventMasks::DisableEvent(ArtJvmTiEnv* env, art::Thread* thread, ArtJvmtiEvent event) {
154 DCHECK_EQ(&env->event_masks, this);
155 env->event_info_mutex_.AssertExclusiveHeld(art::Thread::Current());
Andreas Gampe77708d92016-10-07 11:48:21 -0700156 DCHECK(EventMask::EventIsInRange(event));
157 GetEventMask(thread).Set(event, false);
158 if (thread != nullptr) {
159 // Regenerate union for the event.
160 bool union_value = false;
161 for (auto& pair : thread_event_masks) {
162 union_value |= pair.second.Test(event);
163 if (union_value) {
164 break;
165 }
166 }
167 unioned_thread_event_mask.Set(event, union_value);
168 }
169}
170
Alex Light73afd322017-01-18 11:17:47 -0800171void EventMasks::HandleChangedCapabilities(const jvmtiCapabilities& caps, bool caps_added) {
172 if (UNLIKELY(caps.can_retransform_classes == 1)) {
173 // If we are giving this env the retransform classes cap we need to switch all events of
174 // NonTransformable to Transformable and vice versa.
175 ArtJvmtiEvent to_remove = caps_added ? ArtJvmtiEvent::kClassFileLoadHookNonRetransformable
176 : ArtJvmtiEvent::kClassFileLoadHookRetransformable;
177 ArtJvmtiEvent to_add = caps_added ? ArtJvmtiEvent::kClassFileLoadHookRetransformable
178 : ArtJvmtiEvent::kClassFileLoadHookNonRetransformable;
179 if (global_event_mask.Test(to_remove)) {
180 CHECK(!global_event_mask.Test(to_add));
181 global_event_mask.Set(to_remove, false);
182 global_event_mask.Set(to_add, true);
183 }
184
185 if (unioned_thread_event_mask.Test(to_remove)) {
186 CHECK(!unioned_thread_event_mask.Test(to_add));
187 unioned_thread_event_mask.Set(to_remove, false);
188 unioned_thread_event_mask.Set(to_add, true);
189 }
190 for (auto thread_mask : thread_event_masks) {
191 if (thread_mask.second.Test(to_remove)) {
192 CHECK(!thread_mask.second.Test(to_add));
193 thread_mask.second.Set(to_remove, false);
194 thread_mask.second.Set(to_add, true);
195 }
196 }
197 }
198}
199
Andreas Gampe77708d92016-10-07 11:48:21 -0700200void EventHandler::RegisterArtJvmTiEnv(ArtJvmTiEnv* env) {
Alex Light2a96fe82018-01-22 17:45:02 -0800201 art::WriterMutexLock mu(art::Thread::Current(), envs_lock_);
Alex Lightb284f8d2017-11-21 00:00:48 +0000202 envs.push_back(env);
Andreas Gampe77708d92016-10-07 11:48:21 -0700203}
204
Andreas Gampe3a7eb142017-01-19 21:59:22 -0800205void EventHandler::RemoveArtJvmTiEnv(ArtJvmTiEnv* env) {
Alex Light2a96fe82018-01-22 17:45:02 -0800206 art::WriterMutexLock mu(art::Thread::Current(), envs_lock_);
Alex Lightbb766462017-04-12 16:13:33 -0700207 // Since we might be currently iterating over the envs list we cannot actually erase elements.
208 // Instead we will simply replace them with 'nullptr' and skip them manually.
Andreas Gampe3a7eb142017-01-19 21:59:22 -0800209 auto it = std::find(envs.begin(), envs.end(), env);
210 if (it != envs.end()) {
Alex Lightb284f8d2017-11-21 00:00:48 +0000211 envs.erase(it);
Andreas Gampe3a7eb142017-01-19 21:59:22 -0800212 for (size_t i = static_cast<size_t>(ArtJvmtiEvent::kMinEventTypeVal);
213 i <= static_cast<size_t>(ArtJvmtiEvent::kMaxEventTypeVal);
214 ++i) {
Alex Lightb284f8d2017-11-21 00:00:48 +0000215 RecalculateGlobalEventMaskLocked(static_cast<ArtJvmtiEvent>(i));
Andreas Gampe3a7eb142017-01-19 21:59:22 -0800216 }
217 }
218}
219
Alex Light40d87f42017-01-18 10:27:06 -0800220static bool IsThreadControllable(ArtJvmtiEvent event) {
Andreas Gampe77708d92016-10-07 11:48:21 -0700221 switch (event) {
Alex Light40d87f42017-01-18 10:27:06 -0800222 case ArtJvmtiEvent::kVmInit:
223 case ArtJvmtiEvent::kVmStart:
224 case ArtJvmtiEvent::kVmDeath:
225 case ArtJvmtiEvent::kThreadStart:
226 case ArtJvmtiEvent::kCompiledMethodLoad:
227 case ArtJvmtiEvent::kCompiledMethodUnload:
228 case ArtJvmtiEvent::kDynamicCodeGenerated:
229 case ArtJvmtiEvent::kDataDumpRequest:
Andreas Gampe77708d92016-10-07 11:48:21 -0700230 return false;
231
232 default:
233 return true;
234 }
235}
236
Alex Light9df79b72017-09-12 08:57:31 -0700237template<typename Type>
238static Type AddLocalRef(art::JNIEnvExt* e, art::mirror::Object* obj)
239 REQUIRES_SHARED(art::Locks::mutator_lock_) {
240 return (obj == nullptr) ? nullptr : e->AddLocalReference<Type>(obj);
241}
242
243template<ArtJvmtiEvent kEvent, typename ...Args>
244static void RunEventCallback(EventHandler* handler,
245 art::Thread* self,
246 art::JNIEnvExt* jnienv,
247 Args... args)
248 REQUIRES_SHARED(art::Locks::mutator_lock_) {
249 ScopedLocalRef<jthread> thread_jni(jnienv, AddLocalRef<jthread>(jnienv, self->GetPeer()));
250 handler->DispatchEvent<kEvent>(self,
251 static_cast<JNIEnv*>(jnienv),
252 thread_jni.get(),
253 args...);
254}
255
Alex Light8c2b9292017-11-09 13:21:01 -0800256static void SetupDdmTracking(art::DdmCallback* listener, bool enable) {
257 art::ScopedObjectAccess soa(art::Thread::Current());
258 if (enable) {
259 art::Runtime::Current()->GetRuntimeCallbacks()->AddDdmCallback(listener);
260 } else {
261 art::Runtime::Current()->GetRuntimeCallbacks()->RemoveDdmCallback(listener);
262 }
263}
264
265class JvmtiDdmChunkListener : public art::DdmCallback {
266 public:
267 explicit JvmtiDdmChunkListener(EventHandler* handler) : handler_(handler) {}
268
269 void DdmPublishChunk(uint32_t type, const art::ArrayRef<const uint8_t>& data)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100270 override REQUIRES_SHARED(art::Locks::mutator_lock_) {
Alex Light8c2b9292017-11-09 13:21:01 -0800271 if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kDdmPublishChunk)) {
272 art::Thread* self = art::Thread::Current();
273 handler_->DispatchEvent<ArtJvmtiEvent::kDdmPublishChunk>(
274 self,
275 static_cast<JNIEnv*>(self->GetJniEnv()),
276 static_cast<jint>(type),
277 static_cast<jint>(data.size()),
278 reinterpret_cast<const jbyte*>(data.data()));
279 }
280 }
281
282 private:
283 EventHandler* handler_;
284
285 DISALLOW_COPY_AND_ASSIGN(JvmtiDdmChunkListener);
286};
287
Andreas Gampe27fa96c2016-10-07 15:05:24 -0700288class JvmtiAllocationListener : public art::gc::AllocationListener {
289 public:
290 explicit JvmtiAllocationListener(EventHandler* handler) : handler_(handler) {}
291
Mathieu Chartier9d156d52016-10-06 17:44:26 -0700292 void ObjectAllocated(art::Thread* self, art::ObjPtr<art::mirror::Object>* obj, size_t byte_count)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100293 override REQUIRES_SHARED(art::Locks::mutator_lock_) {
Andreas Gampe27fa96c2016-10-07 15:05:24 -0700294 DCHECK_EQ(self, art::Thread::Current());
295
Alex Light40d87f42017-01-18 10:27:06 -0800296 if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kVmObjectAlloc)) {
Mathieu Chartiera7118042016-10-12 15:45:58 -0700297 art::StackHandleScope<1> hs(self);
298 auto h = hs.NewHandleWrapper(obj);
Andreas Gampe27fa96c2016-10-07 15:05:24 -0700299 // jvmtiEventVMObjectAlloc parameters:
300 // jvmtiEnv *jvmti_env,
301 // JNIEnv* jni_env,
302 // jthread thread,
303 // jobject object,
304 // jclass object_klass,
305 // jlong size
306 art::JNIEnvExt* jni_env = self->GetJniEnv();
Andreas Gampe27fa96c2016-10-07 15:05:24 -0700307 ScopedLocalRef<jobject> object(
308 jni_env, jni_env->AddLocalReference<jobject>(*obj));
309 ScopedLocalRef<jclass> klass(
Mathieu Chartier9d156d52016-10-06 17:44:26 -0700310 jni_env, jni_env->AddLocalReference<jclass>(obj->Ptr()->GetClass()));
Andreas Gampe27fa96c2016-10-07 15:05:24 -0700311
Alex Light9df79b72017-09-12 08:57:31 -0700312 RunEventCallback<ArtJvmtiEvent::kVmObjectAlloc>(handler_,
313 self,
314 jni_env,
315 object.get(),
316 klass.get(),
317 static_cast<jlong>(byte_count));
Andreas Gampe27fa96c2016-10-07 15:05:24 -0700318 }
319 }
320
321 private:
322 EventHandler* handler_;
323};
324
325static void SetupObjectAllocationTracking(art::gc::AllocationListener* listener, bool enable) {
Andreas Gampec02685c2016-10-17 17:40:27 -0700326 // We must not hold the mutator lock here, but if we're in FastJNI, for example, we might. For
327 // now, do a workaround: (possibly) acquire and release.
328 art::ScopedObjectAccess soa(art::Thread::Current());
329 art::ScopedThreadSuspension sts(soa.Self(), art::ThreadState::kSuspended);
Andreas Gampe27fa96c2016-10-07 15:05:24 -0700330 if (enable) {
331 art::Runtime::Current()->GetHeap()->SetAllocationListener(listener);
332 } else {
333 art::Runtime::Current()->GetHeap()->RemoveAllocationListener();
334 }
335}
336
Alex Light77fee872017-09-05 14:51:49 -0700337class JvmtiMonitorListener : public art::MonitorCallback {
338 public:
339 explicit JvmtiMonitorListener(EventHandler* handler) : handler_(handler) {}
340
341 void MonitorContendedLocking(art::Monitor* m)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100342 override REQUIRES_SHARED(art::Locks::mutator_lock_) {
Alex Light77fee872017-09-05 14:51:49 -0700343 if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorContendedEnter)) {
344 art::Thread* self = art::Thread::Current();
345 art::JNIEnvExt* jnienv = self->GetJniEnv();
346 ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, m->GetObject()));
347 RunEventCallback<ArtJvmtiEvent::kMonitorContendedEnter>(
348 handler_,
349 self,
350 jnienv,
351 mon.get());
352 }
353 }
354
355 void MonitorContendedLocked(art::Monitor* m)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100356 override REQUIRES_SHARED(art::Locks::mutator_lock_) {
Alex Light77fee872017-09-05 14:51:49 -0700357 if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorContendedEntered)) {
358 art::Thread* self = art::Thread::Current();
359 art::JNIEnvExt* jnienv = self->GetJniEnv();
360 ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, m->GetObject()));
361 RunEventCallback<ArtJvmtiEvent::kMonitorContendedEntered>(
362 handler_,
363 self,
364 jnienv,
365 mon.get());
366 }
367 }
368
369 void ObjectWaitStart(art::Handle<art::mirror::Object> obj, int64_t timeout)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100370 override REQUIRES_SHARED(art::Locks::mutator_lock_) {
Alex Light77fee872017-09-05 14:51:49 -0700371 if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorWait)) {
372 art::Thread* self = art::Thread::Current();
373 art::JNIEnvExt* jnienv = self->GetJniEnv();
374 ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, obj.Get()));
375 RunEventCallback<ArtJvmtiEvent::kMonitorWait>(
376 handler_,
377 self,
378 jnienv,
379 mon.get(),
380 static_cast<jlong>(timeout));
381 }
382 }
383
384
385 // Our interpretation of the spec is that the JVMTI_EVENT_MONITOR_WAITED will be sent immediately
386 // after a thread has woken up from a sleep caused by a call to Object#wait. If the thread will
387 // never go to sleep (due to not having the lock, having bad arguments, or having an exception
388 // propogated from JVMTI_EVENT_MONITOR_WAIT) we will not send this event.
389 //
390 // This does not fully match the RI semantics. Specifically, we will not send the
391 // JVMTI_EVENT_MONITOR_WAITED event in one situation where the RI would, there was an exception in
392 // the JVMTI_EVENT_MONITOR_WAIT event but otherwise the call was fine. In that case the RI would
393 // send this event and return without going to sleep.
394 //
395 // See b/65558434 for more discussion.
396 void MonitorWaitFinished(art::Monitor* m, bool timeout)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100397 override REQUIRES_SHARED(art::Locks::mutator_lock_) {
Alex Light77fee872017-09-05 14:51:49 -0700398 if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorWaited)) {
399 art::Thread* self = art::Thread::Current();
400 art::JNIEnvExt* jnienv = self->GetJniEnv();
401 ScopedLocalRef<jobject> mon(jnienv, AddLocalRef<jobject>(jnienv, m->GetObject()));
402 RunEventCallback<ArtJvmtiEvent::kMonitorWaited>(
403 handler_,
404 self,
405 jnienv,
406 mon.get(),
407 static_cast<jboolean>(timeout));
408 }
409 }
410
411 private:
412 EventHandler* handler_;
413};
414
Charles Munger5cc0e752018-11-09 12:30:46 -0800415class JvmtiParkListener : public art::ParkCallback {
416 public:
417 explicit JvmtiParkListener(EventHandler* handler) : handler_(handler) {}
418
419 void ThreadParkStart(bool is_absolute, int64_t timeout)
420 override REQUIRES_SHARED(art::Locks::mutator_lock_) {
421 if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorWait)) {
422 art::Thread* self = art::Thread::Current();
423 art::JNIEnvExt* jnienv = self->GetJniEnv();
424 art::ArtField* parkBlockerField = art::jni::DecodeArtField(
425 art::WellKnownClasses::java_lang_Thread_parkBlocker);
426 art::ObjPtr<art::mirror::Object> blocker_obj = parkBlockerField->GetObj(self->GetPeer());
427 if (blocker_obj.IsNull()) {
428 blocker_obj = self->GetPeer();
429 }
430 int64_t timeout_ms;
431 if (!is_absolute) {
432 if (timeout == 0) {
433 timeout_ms = 0;
434 } else {
435 timeout_ms = timeout / 1000000;
436 if (timeout_ms == 0) {
437 // If we were instructed to park for a nonzero number of nanoseconds, but not enough
438 // to be a full millisecond, round up to 1 ms. A nonzero park() call will return
439 // soon, but a 0 wait or park call will wait indefinitely.
440 timeout_ms = 1;
441 }
442 }
443 } else {
444 struct timeval tv;
445 gettimeofday(&tv, (struct timezone *) nullptr);
446 int64_t now = tv.tv_sec * 1000LL + tv.tv_usec / 1000;
447 if (now < timeout) {
448 timeout_ms = timeout - now;
449 } else {
450 // Waiting for 0 ms is an indefinite wait; parking until a time in
451 // the past or the current time will return immediately, so emulate
452 // the shortest possible wait event.
453 timeout_ms = 1;
454 }
455 }
456 ScopedLocalRef<jobject> blocker(jnienv, AddLocalRef<jobject>(jnienv, blocker_obj.Ptr()));
457 RunEventCallback<ArtJvmtiEvent::kMonitorWait>(
458 handler_,
459 self,
460 jnienv,
461 blocker.get(),
462 static_cast<jlong>(timeout_ms));
463 }
464 }
465
466
467 // Our interpretation of the spec is that the JVMTI_EVENT_MONITOR_WAITED will be sent immediately
468 // after a thread has woken up from a sleep caused by a call to Object#wait. If the thread will
469 // never go to sleep (due to not having the lock, having bad arguments, or having an exception
470 // propogated from JVMTI_EVENT_MONITOR_WAIT) we will not send this event.
471 //
472 // This does not fully match the RI semantics. Specifically, we will not send the
473 // JVMTI_EVENT_MONITOR_WAITED event in one situation where the RI would, there was an exception in
474 // the JVMTI_EVENT_MONITOR_WAIT event but otherwise the call was fine. In that case the RI would
475 // send this event and return without going to sleep.
476 //
477 // See b/65558434 for more discussion.
478 void ThreadParkFinished(bool timeout) override REQUIRES_SHARED(art::Locks::mutator_lock_) {
479 if (handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMonitorWaited)) {
480 art::Thread* self = art::Thread::Current();
481 art::JNIEnvExt* jnienv = self->GetJniEnv();
482 art::ArtField* parkBlockerField = art::jni::DecodeArtField(
483 art::WellKnownClasses::java_lang_Thread_parkBlocker);
484 art::ObjPtr<art::mirror::Object> blocker_obj = parkBlockerField->GetObj(self->GetPeer());
485 if (blocker_obj.IsNull()) {
486 blocker_obj = self->GetPeer();
487 }
488 ScopedLocalRef<jobject> blocker(jnienv, AddLocalRef<jobject>(jnienv, blocker_obj.Ptr()));
489 RunEventCallback<ArtJvmtiEvent::kMonitorWaited>(
490 handler_,
491 self,
492 jnienv,
493 blocker.get(),
494 static_cast<jboolean>(timeout));
495 }
496 }
497
498 private:
499 EventHandler* handler_;
500};
501
502static void SetupMonitorListener(art::MonitorCallback* monitor_listener, art::ParkCallback* park_listener, bool enable) {
Alex Light77fee872017-09-05 14:51:49 -0700503 // We must not hold the mutator lock here, but if we're in FastJNI, for example, we might. For
504 // now, do a workaround: (possibly) acquire and release.
505 art::ScopedObjectAccess soa(art::Thread::Current());
506 if (enable) {
Charles Munger5cc0e752018-11-09 12:30:46 -0800507 art::Runtime::Current()->GetRuntimeCallbacks()->AddMonitorCallback(monitor_listener);
508 art::Runtime::Current()->GetRuntimeCallbacks()->AddParkCallback(park_listener);
Alex Light77fee872017-09-05 14:51:49 -0700509 } else {
Charles Munger5cc0e752018-11-09 12:30:46 -0800510 art::Runtime::Current()->GetRuntimeCallbacks()->RemoveMonitorCallback(monitor_listener);
511 art::Runtime::Current()->GetRuntimeCallbacks()->RemoveParkCallback(park_listener);
Alex Light77fee872017-09-05 14:51:49 -0700512 }
513}
514
Andreas Gampe9b8c5882016-10-21 15:27:46 -0700515// Report GC pauses (see spec) as GARBAGE_COLLECTION_START and GARBAGE_COLLECTION_END.
516class JvmtiGcPauseListener : public art::gc::GcPauseListener {
517 public:
518 explicit JvmtiGcPauseListener(EventHandler* handler)
519 : handler_(handler),
520 start_enabled_(false),
521 finish_enabled_(false) {}
522
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100523 void StartPause() override {
Alex Lightb284f8d2017-11-21 00:00:48 +0000524 handler_->DispatchEvent<ArtJvmtiEvent::kGarbageCollectionStart>(art::Thread::Current());
Andreas Gampe9b8c5882016-10-21 15:27:46 -0700525 }
526
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100527 void EndPause() override {
Alex Lightb284f8d2017-11-21 00:00:48 +0000528 handler_->DispatchEvent<ArtJvmtiEvent::kGarbageCollectionFinish>(art::Thread::Current());
Andreas Gampe9b8c5882016-10-21 15:27:46 -0700529 }
530
531 bool IsEnabled() {
532 return start_enabled_ || finish_enabled_;
533 }
534
535 void SetStartEnabled(bool e) {
536 start_enabled_ = e;
537 }
538
539 void SetFinishEnabled(bool e) {
540 finish_enabled_ = e;
541 }
542
543 private:
544 EventHandler* handler_;
545 bool start_enabled_;
546 bool finish_enabled_;
547};
548
Alex Light40d87f42017-01-18 10:27:06 -0800549static void SetupGcPauseTracking(JvmtiGcPauseListener* listener, ArtJvmtiEvent event, bool enable) {
Andreas Gampe9b8c5882016-10-21 15:27:46 -0700550 bool old_state = listener->IsEnabled();
551
Alex Light40d87f42017-01-18 10:27:06 -0800552 if (event == ArtJvmtiEvent::kGarbageCollectionStart) {
Andreas Gampe9b8c5882016-10-21 15:27:46 -0700553 listener->SetStartEnabled(enable);
554 } else {
555 listener->SetFinishEnabled(enable);
556 }
557
558 bool new_state = listener->IsEnabled();
559
560 if (old_state != new_state) {
561 if (new_state) {
562 art::Runtime::Current()->GetHeap()->SetGcPauseListener(listener);
563 } else {
564 art::Runtime::Current()->GetHeap()->RemoveGcPauseListener();
565 }
566 }
567}
568
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100569class JvmtiMethodTraceListener final : public art::instrumentation::InstrumentationListener {
Alex Lightb7edcda2017-04-27 13:20:31 -0700570 public:
571 explicit JvmtiMethodTraceListener(EventHandler* handler) : event_handler_(handler) {}
572
Alex Lightb7edcda2017-04-27 13:20:31 -0700573 // Call-back for when a method is entered.
574 void MethodEntered(art::Thread* self,
575 art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
576 art::ArtMethod* method,
577 uint32_t dex_pc ATTRIBUTE_UNUSED)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100578 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Lightb7edcda2017-04-27 13:20:31 -0700579 if (!method->IsRuntimeMethod() &&
580 event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodEntry)) {
581 art::JNIEnvExt* jnienv = self->GetJniEnv();
Alex Light77fee872017-09-05 14:51:49 -0700582 RunEventCallback<ArtJvmtiEvent::kMethodEntry>(event_handler_,
583 self,
Alex Lightb7edcda2017-04-27 13:20:31 -0700584 jnienv,
585 art::jni::EncodeArtMethod(method));
586 }
587 }
588
589 // Callback for when a method is exited with a reference return value.
590 void MethodExited(art::Thread* self,
591 art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
592 art::ArtMethod* method,
593 uint32_t dex_pc ATTRIBUTE_UNUSED,
594 art::Handle<art::mirror::Object> return_value)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100595 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Lightb7edcda2017-04-27 13:20:31 -0700596 if (!method->IsRuntimeMethod() &&
597 event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
Alex Lightc9167362018-06-11 16:46:43 -0700598 DCHECK_EQ(
599 method->GetInterfaceMethodIfProxy(art::kRuntimePointerSize)->GetReturnTypePrimitive(),
600 art::Primitive::kPrimNot) << method->PrettyMethod();
Alex Lightb7edcda2017-04-27 13:20:31 -0700601 DCHECK(!self->IsExceptionPending());
602 jvalue val;
603 art::JNIEnvExt* jnienv = self->GetJniEnv();
604 ScopedLocalRef<jobject> return_jobj(jnienv, AddLocalRef<jobject>(jnienv, return_value.Get()));
605 val.l = return_jobj.get();
606 RunEventCallback<ArtJvmtiEvent::kMethodExit>(
Alex Light77fee872017-09-05 14:51:49 -0700607 event_handler_,
Alex Lightb7edcda2017-04-27 13:20:31 -0700608 self,
609 jnienv,
610 art::jni::EncodeArtMethod(method),
Andreas Gampe6e897762018-10-16 13:09:32 -0700611 /*was_popped_by_exception=*/ static_cast<jboolean>(JNI_FALSE),
Alex Lightb7edcda2017-04-27 13:20:31 -0700612 val);
613 }
614 }
615
616 // Call-back for when a method is exited.
617 void MethodExited(art::Thread* self,
618 art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
619 art::ArtMethod* method,
620 uint32_t dex_pc ATTRIBUTE_UNUSED,
621 const art::JValue& return_value)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100622 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Lightb7edcda2017-04-27 13:20:31 -0700623 if (!method->IsRuntimeMethod() &&
624 event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
Alex Lightc9167362018-06-11 16:46:43 -0700625 DCHECK_NE(
626 method->GetInterfaceMethodIfProxy(art::kRuntimePointerSize)->GetReturnTypePrimitive(),
627 art::Primitive::kPrimNot) << method->PrettyMethod();
Alex Lightb7edcda2017-04-27 13:20:31 -0700628 DCHECK(!self->IsExceptionPending());
629 jvalue val;
630 art::JNIEnvExt* jnienv = self->GetJniEnv();
631 // 64bit integer is the largest value in the union so we should be fine simply copying it into
632 // the union.
633 val.j = return_value.GetJ();
634 RunEventCallback<ArtJvmtiEvent::kMethodExit>(
Alex Light77fee872017-09-05 14:51:49 -0700635 event_handler_,
Alex Lightb7edcda2017-04-27 13:20:31 -0700636 self,
637 jnienv,
638 art::jni::EncodeArtMethod(method),
Andreas Gampe6e897762018-10-16 13:09:32 -0700639 /*was_popped_by_exception=*/ static_cast<jboolean>(JNI_FALSE),
Alex Lightb7edcda2017-04-27 13:20:31 -0700640 val);
641 }
642 }
643
644 // Call-back for when a method is popped due to an exception throw. A method will either cause a
645 // MethodExited call-back or a MethodUnwind call-back when its activation is removed.
646 void MethodUnwind(art::Thread* self,
647 art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
648 art::ArtMethod* method,
649 uint32_t dex_pc ATTRIBUTE_UNUSED)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100650 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Lightb7edcda2017-04-27 13:20:31 -0700651 if (!method->IsRuntimeMethod() &&
652 event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kMethodExit)) {
653 jvalue val;
654 // Just set this to 0xffffffffffffffff so it's not uninitialized.
655 val.j = static_cast<jlong>(-1);
656 art::JNIEnvExt* jnienv = self->GetJniEnv();
657 art::StackHandleScope<1> hs(self);
658 art::Handle<art::mirror::Throwable> old_exception(hs.NewHandle(self->GetException()));
659 CHECK(!old_exception.IsNull());
660 self->ClearException();
661 RunEventCallback<ArtJvmtiEvent::kMethodExit>(
Alex Light77fee872017-09-05 14:51:49 -0700662 event_handler_,
Alex Lightb7edcda2017-04-27 13:20:31 -0700663 self,
664 jnienv,
665 art::jni::EncodeArtMethod(method),
Andreas Gampe6e897762018-10-16 13:09:32 -0700666 /*was_popped_by_exception=*/ static_cast<jboolean>(JNI_TRUE),
Alex Lightb7edcda2017-04-27 13:20:31 -0700667 val);
668 // Match RI behavior of just throwing away original exception if a new one is thrown.
669 if (LIKELY(!self->IsExceptionPending())) {
670 self->SetException(old_exception.Get());
671 }
672 }
673 }
674
Alex Lighta26e3492017-06-27 17:55:37 -0700675 // Call-back for when the dex pc moves in a method.
676 void DexPcMoved(art::Thread* self,
Alex Lightb7edcda2017-04-27 13:20:31 -0700677 art::Handle<art::mirror::Object> this_object ATTRIBUTE_UNUSED,
Alex Lighta26e3492017-06-27 17:55:37 -0700678 art::ArtMethod* method,
679 uint32_t new_dex_pc)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100680 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Lighta26e3492017-06-27 17:55:37 -0700681 DCHECK(!method->IsRuntimeMethod());
682 // Default methods might be copied to multiple classes. We need to get the canonical version of
683 // this method so that we can check for breakpoints correctly.
684 // TODO We should maybe do this on other events to ensure that we are consistent WRT default
685 // methods. This could interact with obsolete methods if we ever let interface redefinition
686 // happen though.
687 method = method->GetCanonicalMethod();
688 art::JNIEnvExt* jnienv = self->GetJniEnv();
689 jmethodID jmethod = art::jni::EncodeArtMethod(method);
690 jlocation location = static_cast<jlocation>(new_dex_pc);
691 // Step event is reported first according to the spec.
692 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kSingleStep)) {
Alex Light77fee872017-09-05 14:51:49 -0700693 RunEventCallback<ArtJvmtiEvent::kSingleStep>(event_handler_, self, jnienv, jmethod, location);
Alex Lighta26e3492017-06-27 17:55:37 -0700694 }
695 // Next we do the Breakpoint events. The Dispatch code will filter the individual
696 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kBreakpoint)) {
Alex Light77fee872017-09-05 14:51:49 -0700697 RunEventCallback<ArtJvmtiEvent::kBreakpoint>(event_handler_, self, jnienv, jmethod, location);
Alex Lighta26e3492017-06-27 17:55:37 -0700698 }
Alex Lightb7edcda2017-04-27 13:20:31 -0700699 }
700
701 // Call-back for when we read from a field.
Alex Light084fa372017-06-16 08:58:34 -0700702 void FieldRead(art::Thread* self,
703 art::Handle<art::mirror::Object> this_object,
704 art::ArtMethod* method,
705 uint32_t dex_pc,
706 art::ArtField* field)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100707 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Light084fa372017-06-16 08:58:34 -0700708 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldAccess)) {
709 art::JNIEnvExt* jnienv = self->GetJniEnv();
710 // DCHECK(!self->IsExceptionPending());
711 ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
712 ScopedLocalRef<jobject> fklass(jnienv,
713 AddLocalRef<jobject>(jnienv,
714 field->GetDeclaringClass().Ptr()));
Alex Light77fee872017-09-05 14:51:49 -0700715 RunEventCallback<ArtJvmtiEvent::kFieldAccess>(event_handler_,
716 self,
Alex Light084fa372017-06-16 08:58:34 -0700717 jnienv,
718 art::jni::EncodeArtMethod(method),
719 static_cast<jlocation>(dex_pc),
720 static_cast<jclass>(fklass.get()),
721 this_ref.get(),
722 art::jni::EncodeArtField(field));
723 }
724 }
725
726 void FieldWritten(art::Thread* self,
727 art::Handle<art::mirror::Object> this_object,
728 art::ArtMethod* method,
729 uint32_t dex_pc,
730 art::ArtField* field,
731 art::Handle<art::mirror::Object> new_val)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100732 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Light084fa372017-06-16 08:58:34 -0700733 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldModification)) {
734 art::JNIEnvExt* jnienv = self->GetJniEnv();
735 // DCHECK(!self->IsExceptionPending());
736 ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
737 ScopedLocalRef<jobject> fklass(jnienv,
738 AddLocalRef<jobject>(jnienv,
739 field->GetDeclaringClass().Ptr()));
740 ScopedLocalRef<jobject> fval(jnienv, AddLocalRef<jobject>(jnienv, new_val.Get()));
741 jvalue val;
742 val.l = fval.get();
743 RunEventCallback<ArtJvmtiEvent::kFieldModification>(
Alex Light77fee872017-09-05 14:51:49 -0700744 event_handler_,
Alex Light084fa372017-06-16 08:58:34 -0700745 self,
746 jnienv,
747 art::jni::EncodeArtMethod(method),
748 static_cast<jlocation>(dex_pc),
749 static_cast<jclass>(fklass.get()),
750 field->IsStatic() ? nullptr : this_ref.get(),
751 art::jni::EncodeArtField(field),
752 'L', // type_char
753 val);
754 }
Alex Lightb7edcda2017-04-27 13:20:31 -0700755 }
756
757 // Call-back for when we write into a field.
Alex Light084fa372017-06-16 08:58:34 -0700758 void FieldWritten(art::Thread* self,
759 art::Handle<art::mirror::Object> this_object,
760 art::ArtMethod* method,
761 uint32_t dex_pc,
762 art::ArtField* field,
763 const art::JValue& field_value)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100764 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Light084fa372017-06-16 08:58:34 -0700765 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kFieldModification)) {
766 art::JNIEnvExt* jnienv = self->GetJniEnv();
767 DCHECK(!self->IsExceptionPending());
768 ScopedLocalRef<jobject> this_ref(jnienv, AddLocalRef<jobject>(jnienv, this_object.Get()));
769 ScopedLocalRef<jobject> fklass(jnienv,
770 AddLocalRef<jobject>(jnienv,
771 field->GetDeclaringClass().Ptr()));
772 char type_char = art::Primitive::Descriptor(field->GetTypeAsPrimitiveType())[0];
773 jvalue val;
774 // 64bit integer is the largest value in the union so we should be fine simply copying it into
775 // the union.
776 val.j = field_value.GetJ();
777 RunEventCallback<ArtJvmtiEvent::kFieldModification>(
Alex Light77fee872017-09-05 14:51:49 -0700778 event_handler_,
Alex Light084fa372017-06-16 08:58:34 -0700779 self,
780 jnienv,
781 art::jni::EncodeArtMethod(method),
782 static_cast<jlocation>(dex_pc),
783 static_cast<jclass>(fklass.get()),
784 field->IsStatic() ? nullptr : this_ref.get(), // nb static field modification get given
785 // the class as this_object for some
786 // reason.
787 art::jni::EncodeArtField(field),
788 type_char,
789 val);
790 }
Alex Lightb7edcda2017-04-27 13:20:31 -0700791 }
792
Alex Lighte814f9d2017-07-31 16:14:39 -0700793 void WatchedFramePop(art::Thread* self, const art::ShadowFrame& frame)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100794 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Lighte814f9d2017-07-31 16:14:39 -0700795 art::JNIEnvExt* jnienv = self->GetJniEnv();
Alex Light9df79b72017-09-12 08:57:31 -0700796 jboolean is_exception_pending = self->IsExceptionPending();
797 RunEventCallback<ArtJvmtiEvent::kFramePop>(
798 event_handler_,
799 self,
800 jnienv,
801 art::jni::EncodeArtMethod(frame.GetMethod()),
802 is_exception_pending,
803 &frame);
Alex Lighte814f9d2017-07-31 16:14:39 -0700804 }
805
Alex Light9fb1ab12017-09-05 09:32:49 -0700806 static void FindCatchMethodsFromThrow(art::Thread* self,
807 art::Handle<art::mirror::Throwable> exception,
808 /*out*/ art::ArtMethod** out_method,
809 /*out*/ uint32_t* dex_pc)
810 REQUIRES_SHARED(art::Locks::mutator_lock_) {
811 // Finds the location where this exception will most likely be caught. We ignore intervening
812 // native frames (which could catch the exception) and return the closest java frame with a
813 // compatible catch statement.
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100814 class CatchLocationFinder final : public art::StackVisitor {
Alex Light9fb1ab12017-09-05 09:32:49 -0700815 public:
816 CatchLocationFinder(art::Thread* target,
817 art::Handle<art::mirror::Class> exception_class,
818 art::Context* context,
819 /*out*/ art::ArtMethod** out_catch_method,
820 /*out*/ uint32_t* out_catch_pc)
821 REQUIRES_SHARED(art::Locks::mutator_lock_)
822 : StackVisitor(target, context, art::StackVisitor::StackWalkKind::kIncludeInlinedFrames),
823 exception_class_(exception_class),
824 catch_method_ptr_(out_catch_method),
825 catch_dex_pc_ptr_(out_catch_pc) {}
826
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100827 bool VisitFrame() override REQUIRES_SHARED(art::Locks::mutator_lock_) {
Alex Light9fb1ab12017-09-05 09:32:49 -0700828 art::ArtMethod* method = GetMethod();
829 DCHECK(method != nullptr);
830 if (method->IsRuntimeMethod()) {
831 return true;
832 }
833
834 if (!method->IsNative()) {
835 uint32_t cur_dex_pc = GetDexPc();
Andreas Gampee2abbc62017-09-15 11:59:26 -0700836 if (cur_dex_pc == art::dex::kDexNoIndex) {
Alex Light9fb1ab12017-09-05 09:32:49 -0700837 // This frame looks opaque. Just keep on going.
838 return true;
839 }
840 bool has_no_move_exception = false;
841 uint32_t found_dex_pc = method->FindCatchBlock(
842 exception_class_, cur_dex_pc, &has_no_move_exception);
Andreas Gampee2abbc62017-09-15 11:59:26 -0700843 if (found_dex_pc != art::dex::kDexNoIndex) {
Alex Light9fb1ab12017-09-05 09:32:49 -0700844 // We found the catch. Store the result and return.
845 *catch_method_ptr_ = method;
846 *catch_dex_pc_ptr_ = found_dex_pc;
847 return false;
848 }
849 }
850 return true;
851 }
852
853 private:
854 art::Handle<art::mirror::Class> exception_class_;
855 art::ArtMethod** catch_method_ptr_;
856 uint32_t* catch_dex_pc_ptr_;
857
858 DISALLOW_COPY_AND_ASSIGN(CatchLocationFinder);
859 };
860
861 art::StackHandleScope<1> hs(self);
862 *out_method = nullptr;
863 *dex_pc = 0;
864 std::unique_ptr<art::Context> context(art::Context::Create());
865
866 CatchLocationFinder clf(self,
867 hs.NewHandle(exception->GetClass()),
868 context.get(),
869 /*out*/ out_method,
870 /*out*/ dex_pc);
Andreas Gampe6e897762018-10-16 13:09:32 -0700871 clf.WalkStack(/* include_transitions= */ false);
Alex Light9fb1ab12017-09-05 09:32:49 -0700872 }
873
Alex Light6e1607e2017-08-23 10:06:18 -0700874 // Call-back when an exception is thrown.
Alex Light9fb1ab12017-09-05 09:32:49 -0700875 void ExceptionThrown(art::Thread* self, art::Handle<art::mirror::Throwable> exception_object)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100876 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Light9fb1ab12017-09-05 09:32:49 -0700877 DCHECK(self->IsExceptionThrownByCurrentMethod(exception_object.Get()));
878 // The instrumentation events get rid of this for us.
879 DCHECK(!self->IsExceptionPending());
880 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kException)) {
881 art::JNIEnvExt* jnienv = self->GetJniEnv();
882 art::ArtMethod* catch_method;
883 uint32_t catch_pc;
884 FindCatchMethodsFromThrow(self, exception_object, &catch_method, &catch_pc);
885 uint32_t dex_pc = 0;
886 art::ArtMethod* method = self->GetCurrentMethod(&dex_pc,
Andreas Gampe6e897762018-10-16 13:09:32 -0700887 /* check_suspended= */ true,
888 /* abort_on_error= */ art::kIsDebugBuild);
Alex Light9fb1ab12017-09-05 09:32:49 -0700889 ScopedLocalRef<jobject> exception(jnienv,
890 AddLocalRef<jobject>(jnienv, exception_object.Get()));
891 RunEventCallback<ArtJvmtiEvent::kException>(
Alex Light77fee872017-09-05 14:51:49 -0700892 event_handler_,
Alex Light9fb1ab12017-09-05 09:32:49 -0700893 self,
894 jnienv,
895 art::jni::EncodeArtMethod(method),
896 static_cast<jlocation>(dex_pc),
897 exception.get(),
898 art::jni::EncodeArtMethod(catch_method),
899 static_cast<jlocation>(catch_pc));
900 }
901 return;
902 }
903
904 // Call-back when an exception is handled.
905 void ExceptionHandled(art::Thread* self, art::Handle<art::mirror::Throwable> exception_object)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100906 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Light9fb1ab12017-09-05 09:32:49 -0700907 // Since the exception has already been handled there shouldn't be one pending.
908 DCHECK(!self->IsExceptionPending());
909 if (event_handler_->IsEventEnabledAnywhere(ArtJvmtiEvent::kExceptionCatch)) {
910 art::JNIEnvExt* jnienv = self->GetJniEnv();
911 uint32_t dex_pc;
912 art::ArtMethod* method = self->GetCurrentMethod(&dex_pc,
Andreas Gampe6e897762018-10-16 13:09:32 -0700913 /* check_suspended= */ true,
914 /* abort_on_error= */ art::kIsDebugBuild);
Alex Light9fb1ab12017-09-05 09:32:49 -0700915 ScopedLocalRef<jobject> exception(jnienv,
916 AddLocalRef<jobject>(jnienv, exception_object.Get()));
917 RunEventCallback<ArtJvmtiEvent::kExceptionCatch>(
Alex Light77fee872017-09-05 14:51:49 -0700918 event_handler_,
Alex Light9fb1ab12017-09-05 09:32:49 -0700919 self,
920 jnienv,
921 art::jni::EncodeArtMethod(method),
922 static_cast<jlocation>(dex_pc),
923 exception.get());
924 }
Alex Lightb7edcda2017-04-27 13:20:31 -0700925 return;
926 }
927
928 // Call-back for when we execute a branch.
929 void Branch(art::Thread* self ATTRIBUTE_UNUSED,
930 art::ArtMethod* method ATTRIBUTE_UNUSED,
931 uint32_t dex_pc ATTRIBUTE_UNUSED,
932 int32_t dex_pc_offset ATTRIBUTE_UNUSED)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100933 REQUIRES_SHARED(art::Locks::mutator_lock_) override {
Alex Lightb7edcda2017-04-27 13:20:31 -0700934 return;
935 }
936
Alex Lightb7edcda2017-04-27 13:20:31 -0700937 private:
938 EventHandler* const event_handler_;
939};
940
941static uint32_t GetInstrumentationEventsFor(ArtJvmtiEvent event) {
942 switch (event) {
943 case ArtJvmtiEvent::kMethodEntry:
944 return art::instrumentation::Instrumentation::kMethodEntered;
945 case ArtJvmtiEvent::kMethodExit:
946 return art::instrumentation::Instrumentation::kMethodExited |
947 art::instrumentation::Instrumentation::kMethodUnwind;
Alex Light084fa372017-06-16 08:58:34 -0700948 case ArtJvmtiEvent::kFieldModification:
949 return art::instrumentation::Instrumentation::kFieldWritten;
950 case ArtJvmtiEvent::kFieldAccess:
951 return art::instrumentation::Instrumentation::kFieldRead;
Alex Lighta26e3492017-06-27 17:55:37 -0700952 case ArtJvmtiEvent::kBreakpoint:
953 case ArtJvmtiEvent::kSingleStep:
954 return art::instrumentation::Instrumentation::kDexPcMoved;
Alex Lighte814f9d2017-07-31 16:14:39 -0700955 case ArtJvmtiEvent::kFramePop:
956 return art::instrumentation::Instrumentation::kWatchedFramePop;
Alex Light9fb1ab12017-09-05 09:32:49 -0700957 case ArtJvmtiEvent::kException:
958 return art::instrumentation::Instrumentation::kExceptionThrown;
959 case ArtJvmtiEvent::kExceptionCatch:
960 return art::instrumentation::Instrumentation::kExceptionHandled;
Alex Lightb7edcda2017-04-27 13:20:31 -0700961 default:
962 LOG(FATAL) << "Unknown event ";
Elliott Hughesc1896c92018-11-29 11:33:18 -0800963 UNREACHABLE();
Alex Lightb7edcda2017-04-27 13:20:31 -0700964 }
965}
966
Alex Light0fa17862017-10-24 13:43:05 -0700967static bool EventNeedsFullDeopt(ArtJvmtiEvent event) {
968 switch (event) {
969 case ArtJvmtiEvent::kBreakpoint:
970 case ArtJvmtiEvent::kException:
971 return false;
David Srbeckyd25eb2c2018-07-19 12:17:04 +0000972 // TODO We should support more of these or at least do something to make them discriminate by
973 // thread.
Alex Light0fa17862017-10-24 13:43:05 -0700974 case ArtJvmtiEvent::kMethodEntry:
Alex Lightd7da3142018-07-18 15:39:16 +0000975 case ArtJvmtiEvent::kExceptionCatch:
David Srbeckyd25eb2c2018-07-19 12:17:04 +0000976 case ArtJvmtiEvent::kMethodExit:
Alex Light0fa17862017-10-24 13:43:05 -0700977 case ArtJvmtiEvent::kFieldModification:
978 case ArtJvmtiEvent::kFieldAccess:
979 case ArtJvmtiEvent::kSingleStep:
980 case ArtJvmtiEvent::kFramePop:
David Srbeckyd25eb2c2018-07-19 12:17:04 +0000981 return true;
Alex Light0fa17862017-10-24 13:43:05 -0700982 default:
983 LOG(FATAL) << "Unexpected event type!";
984 UNREACHABLE();
985 }
986}
987
Alex Lightf6df1b52017-11-29 14:46:53 -0800988void EventHandler::SetupTraceListener(JvmtiMethodTraceListener* listener,
989 ArtJvmtiEvent event,
990 bool enable) {
David Srbeckyd25eb2c2018-07-19 12:17:04 +0000991 bool needs_full_deopt = EventNeedsFullDeopt(event);
Alex Light0fa17862017-10-24 13:43:05 -0700992 // Make sure we can deopt.
993 {
994 art::ScopedObjectAccess soa(art::Thread::Current());
995 DeoptManager* deopt_manager = DeoptManager::Get();
996 if (enable) {
997 deopt_manager->AddDeoptimizationRequester();
David Srbeckyd25eb2c2018-07-19 12:17:04 +0000998 if (needs_full_deopt) {
999 deopt_manager->AddDeoptimizeAllMethods();
Alex Light0fa17862017-10-24 13:43:05 -07001000 }
1001 } else {
David Srbeckyd25eb2c2018-07-19 12:17:04 +00001002 if (needs_full_deopt) {
1003 deopt_manager->RemoveDeoptimizeAllMethods();
Alex Light0fa17862017-10-24 13:43:05 -07001004 }
1005 deopt_manager->RemoveDeoptimizationRequester();
1006 }
1007 }
1008
1009 // Add the actual listeners.
Alex Lightb7edcda2017-04-27 13:20:31 -07001010 uint32_t new_events = GetInstrumentationEventsFor(event);
Alex Lightf6df1b52017-11-29 14:46:53 -08001011 if (new_events == art::instrumentation::Instrumentation::kDexPcMoved) {
1012 // Need to skip adding the listeners if the event is breakpoint/single-step since those events
1013 // share the same art-instrumentation underlying event. We need to give them their own deopt
1014 // request though so the test waits until here.
1015 DCHECK(event == ArtJvmtiEvent::kBreakpoint || event == ArtJvmtiEvent::kSingleStep);
1016 ArtJvmtiEvent other = event == ArtJvmtiEvent::kBreakpoint ? ArtJvmtiEvent::kSingleStep
1017 : ArtJvmtiEvent::kBreakpoint;
1018 if (IsEventEnabledAnywhere(other)) {
1019 // The event needs to be kept around/is already enabled by the other jvmti event that uses the
1020 // same instrumentation event.
1021 return;
1022 }
1023 }
1024 art::ScopedThreadStateChange stsc(art::Thread::Current(), art::ThreadState::kNative);
Alex Lightb7edcda2017-04-27 13:20:31 -07001025 art::instrumentation::Instrumentation* instr = art::Runtime::Current()->GetInstrumentation();
Alex Lightb7edcda2017-04-27 13:20:31 -07001026 art::ScopedSuspendAll ssa("jvmti method tracing installation");
1027 if (enable) {
Alex Lightb7edcda2017-04-27 13:20:31 -07001028 instr->AddListener(listener, new_events);
1029 } else {
1030 instr->RemoveListener(listener, new_events);
1031 }
1032}
1033
Alex Light0a5ec3d2017-07-25 16:50:26 -07001034// Makes sure that all compiled methods are AsyncDeoptimizable so we can deoptimize (and force to
1035// the switch interpreter) when we try to get or set a local variable.
Alex Lightbebd7bd2017-07-25 14:05:52 -07001036void EventHandler::HandleLocalAccessCapabilityAdded() {
Alex Light0a5ec3d2017-07-25 16:50:26 -07001037 class UpdateEntryPointsClassVisitor : public art::ClassVisitor {
1038 public:
1039 explicit UpdateEntryPointsClassVisitor(art::Runtime* runtime)
1040 : runtime_(runtime) {}
1041
1042 bool operator()(art::ObjPtr<art::mirror::Class> klass)
Roland Levillainbbc6e7e2018-08-24 16:58:47 +01001043 override REQUIRES(art::Locks::mutator_lock_) {
Alex Lighta567deb2017-10-10 16:44:11 -07001044 if (!klass->IsLoaded()) {
1045 // Skip classes that aren't loaded since they might not have fully allocated and initialized
1046 // their methods. Furthemore since the jvmti-plugin must have been loaded by this point
1047 // these methods will definitately be using debuggable code.
1048 return true;
1049 }
Alex Light0a5ec3d2017-07-25 16:50:26 -07001050 for (auto& m : klass->GetMethods(art::kRuntimePointerSize)) {
1051 const void* code = m.GetEntryPointFromQuickCompiledCode();
1052 if (m.IsNative() || m.IsProxyMethod()) {
1053 continue;
1054 } else if (!runtime_->GetClassLinker()->IsQuickToInterpreterBridge(code) &&
1055 !runtime_->IsAsyncDeoptimizeable(reinterpret_cast<uintptr_t>(code))) {
1056 runtime_->GetInstrumentation()->UpdateMethodsCodeToInterpreterEntryPoint(&m);
1057 }
1058 }
1059 return true;
1060 }
1061
1062 private:
1063 art::Runtime* runtime_;
1064 };
1065 art::ScopedObjectAccess soa(art::Thread::Current());
1066 UpdateEntryPointsClassVisitor visitor(art::Runtime::Current());
1067 art::Runtime::Current()->GetClassLinker()->VisitClasses(&visitor);
Alex Lightbebd7bd2017-07-25 14:05:52 -07001068}
1069
Alex Light77fee872017-09-05 14:51:49 -07001070bool EventHandler::OtherMonitorEventsEnabledAnywhere(ArtJvmtiEvent event) {
1071 std::array<ArtJvmtiEvent, 4> events {
1072 {
1073 ArtJvmtiEvent::kMonitorContendedEnter,
1074 ArtJvmtiEvent::kMonitorContendedEntered,
1075 ArtJvmtiEvent::kMonitorWait,
1076 ArtJvmtiEvent::kMonitorWaited
1077 }
1078 };
1079 for (ArtJvmtiEvent e : events) {
1080 if (e != event && IsEventEnabledAnywhere(e)) {
1081 return true;
1082 }
1083 }
1084 return false;
1085}
1086
Alex Lightf5d5eb12018-03-06 15:13:59 -08001087void EventHandler::SetupFramePopTraceListener(bool enable) {
1088 if (enable) {
1089 frame_pop_enabled = true;
1090 SetupTraceListener(method_trace_listener_.get(), ArtJvmtiEvent::kFramePop, enable);
1091 } else {
1092 // remove the listener if we have no outstanding frames.
1093 {
1094 art::ReaderMutexLock mu(art::Thread::Current(), envs_lock_);
1095 for (ArtJvmTiEnv* env : envs) {
1096 art::ReaderMutexLock event_mu(art::Thread::Current(), env->event_info_mutex_);
1097 if (!env->notify_frames.empty()) {
1098 // Leaving FramePop listener since there are unsent FramePop events.
1099 return;
1100 }
1101 }
1102 frame_pop_enabled = false;
1103 }
1104 SetupTraceListener(method_trace_listener_.get(), ArtJvmtiEvent::kFramePop, enable);
1105 }
1106}
1107
Andreas Gampe77708d92016-10-07 11:48:21 -07001108// Handle special work for the given event type, if necessary.
Alex Light40d87f42017-01-18 10:27:06 -08001109void EventHandler::HandleEventType(ArtJvmtiEvent event, bool enable) {
Andreas Gampe9b8c5882016-10-21 15:27:46 -07001110 switch (event) {
Alex Light8c2b9292017-11-09 13:21:01 -08001111 case ArtJvmtiEvent::kDdmPublishChunk:
1112 SetupDdmTracking(ddm_listener_.get(), enable);
1113 return;
Alex Light40d87f42017-01-18 10:27:06 -08001114 case ArtJvmtiEvent::kVmObjectAlloc:
Andreas Gampe9b8c5882016-10-21 15:27:46 -07001115 SetupObjectAllocationTracking(alloc_listener_.get(), enable);
1116 return;
1117
Alex Light40d87f42017-01-18 10:27:06 -08001118 case ArtJvmtiEvent::kGarbageCollectionStart:
1119 case ArtJvmtiEvent::kGarbageCollectionFinish:
Andreas Gampe9b8c5882016-10-21 15:27:46 -07001120 SetupGcPauseTracking(gc_pause_listener_.get(), event, enable);
1121 return;
Alex Lightf5d5eb12018-03-06 15:13:59 -08001122 // FramePop can never be disabled once it's been turned on if it was turned off with outstanding
1123 // pop-events since we would either need to deal with dangling pointers or have missed events.
Alex Lighte814f9d2017-07-31 16:14:39 -07001124 case ArtJvmtiEvent::kFramePop:
Alex Lightf5d5eb12018-03-06 15:13:59 -08001125 if (enable && frame_pop_enabled) {
1126 // The frame-pop event was held on by pending events so we don't need to do anything.
Alex Lighte814f9d2017-07-31 16:14:39 -07001127 break;
1128 } else {
Alex Lightf5d5eb12018-03-06 15:13:59 -08001129 SetupFramePopTraceListener(enable);
Alex Lighte814f9d2017-07-31 16:14:39 -07001130 break;
1131 }
Alex Lightb7edcda2017-04-27 13:20:31 -07001132 case ArtJvmtiEvent::kMethodEntry:
1133 case ArtJvmtiEvent::kMethodExit:
Alex Light084fa372017-06-16 08:58:34 -07001134 case ArtJvmtiEvent::kFieldAccess:
1135 case ArtJvmtiEvent::kFieldModification:
Alex Light9fb1ab12017-09-05 09:32:49 -07001136 case ArtJvmtiEvent::kException:
1137 case ArtJvmtiEvent::kExceptionCatch:
Alex Lightf6df1b52017-11-29 14:46:53 -08001138 case ArtJvmtiEvent::kBreakpoint:
1139 case ArtJvmtiEvent::kSingleStep:
Alex Light084fa372017-06-16 08:58:34 -07001140 SetupTraceListener(method_trace_listener_.get(), event, enable);
Alex Lightb7edcda2017-04-27 13:20:31 -07001141 return;
Alex Light77fee872017-09-05 14:51:49 -07001142 case ArtJvmtiEvent::kMonitorContendedEnter:
1143 case ArtJvmtiEvent::kMonitorContendedEntered:
1144 case ArtJvmtiEvent::kMonitorWait:
1145 case ArtJvmtiEvent::kMonitorWaited:
1146 if (!OtherMonitorEventsEnabledAnywhere(event)) {
Charles Munger5cc0e752018-11-09 12:30:46 -08001147 SetupMonitorListener(monitor_listener_.get(), park_listener_.get(), enable);
Alex Light77fee872017-09-05 14:51:49 -07001148 }
1149 return;
Andreas Gampe9b8c5882016-10-21 15:27:46 -07001150 default:
1151 break;
Andreas Gampe27fa96c2016-10-07 15:05:24 -07001152 }
Andreas Gampe77708d92016-10-07 11:48:21 -07001153}
1154
Alex Light9db679d2017-01-25 15:28:04 -08001155// Checks to see if the env has the capabilities associated with the given event.
1156static bool HasAssociatedCapability(ArtJvmTiEnv* env,
1157 ArtJvmtiEvent event) {
1158 jvmtiCapabilities caps = env->capabilities;
1159 switch (event) {
1160 case ArtJvmtiEvent::kBreakpoint:
1161 return caps.can_generate_breakpoint_events == 1;
1162
1163 case ArtJvmtiEvent::kCompiledMethodLoad:
1164 case ArtJvmtiEvent::kCompiledMethodUnload:
1165 return caps.can_generate_compiled_method_load_events == 1;
1166
1167 case ArtJvmtiEvent::kException:
1168 case ArtJvmtiEvent::kExceptionCatch:
1169 return caps.can_generate_exception_events == 1;
1170
1171 case ArtJvmtiEvent::kFieldAccess:
1172 return caps.can_generate_field_access_events == 1;
1173
1174 case ArtJvmtiEvent::kFieldModification:
1175 return caps.can_generate_field_modification_events == 1;
1176
1177 case ArtJvmtiEvent::kFramePop:
1178 return caps.can_generate_frame_pop_events == 1;
1179
1180 case ArtJvmtiEvent::kGarbageCollectionStart:
1181 case ArtJvmtiEvent::kGarbageCollectionFinish:
1182 return caps.can_generate_garbage_collection_events == 1;
1183
1184 case ArtJvmtiEvent::kMethodEntry:
1185 return caps.can_generate_method_entry_events == 1;
1186
1187 case ArtJvmtiEvent::kMethodExit:
1188 return caps.can_generate_method_exit_events == 1;
1189
1190 case ArtJvmtiEvent::kMonitorContendedEnter:
1191 case ArtJvmtiEvent::kMonitorContendedEntered:
1192 case ArtJvmtiEvent::kMonitorWait:
1193 case ArtJvmtiEvent::kMonitorWaited:
1194 return caps.can_generate_monitor_events == 1;
1195
1196 case ArtJvmtiEvent::kNativeMethodBind:
1197 return caps.can_generate_native_method_bind_events == 1;
1198
1199 case ArtJvmtiEvent::kObjectFree:
1200 return caps.can_generate_object_free_events == 1;
1201
1202 case ArtJvmtiEvent::kSingleStep:
1203 return caps.can_generate_single_step_events == 1;
1204
1205 case ArtJvmtiEvent::kVmObjectAlloc:
1206 return caps.can_generate_vm_object_alloc_events == 1;
1207
1208 default:
1209 return true;
1210 }
1211}
1212
Andreas Gampe77708d92016-10-07 11:48:21 -07001213jvmtiError EventHandler::SetEvent(ArtJvmTiEnv* env,
1214 art::Thread* thread,
Alex Light40d87f42017-01-18 10:27:06 -08001215 ArtJvmtiEvent event,
Andreas Gampe77708d92016-10-07 11:48:21 -07001216 jvmtiEventMode mode) {
1217 if (thread != nullptr) {
1218 art::ThreadState state = thread->GetState();
1219 if (state == art::ThreadState::kStarting ||
1220 state == art::ThreadState::kTerminated ||
1221 thread->IsStillStarting()) {
1222 return ERR(THREAD_NOT_ALIVE);
1223 }
1224 if (!IsThreadControllable(event)) {
1225 return ERR(ILLEGAL_ARGUMENT);
1226 }
1227 }
1228
Andreas Gampe77708d92016-10-07 11:48:21 -07001229 if (mode != JVMTI_ENABLE && mode != JVMTI_DISABLE) {
1230 return ERR(ILLEGAL_ARGUMENT);
1231 }
1232
1233 if (!EventMask::EventIsInRange(event)) {
1234 return ERR(INVALID_EVENT_TYPE);
1235 }
1236
Alex Light9db679d2017-01-25 15:28:04 -08001237 if (!HasAssociatedCapability(env, event)) {
1238 return ERR(MUST_POSSESS_CAPABILITY);
1239 }
1240
Alex Light74c84402017-11-29 15:26:38 -08001241 bool old_state;
1242 bool new_state;
Andreas Gampe8b862ff2016-10-17 17:49:59 -07001243
Alex Light74c84402017-11-29 15:26:38 -08001244 {
1245 // Change the event masks atomically.
1246 art::Thread* self = art::Thread::Current();
Alex Light2a96fe82018-01-22 17:45:02 -08001247 art::WriterMutexLock mu(self, envs_lock_);
Alex Light74c84402017-11-29 15:26:38 -08001248 art::WriterMutexLock mu_env_info(self, env->event_info_mutex_);
1249 old_state = global_mask.Test(event);
1250 if (mode == JVMTI_ENABLE) {
1251 env->event_masks.EnableEvent(env, thread, event);
1252 global_mask.Set(event);
1253 new_state = true;
1254 } else {
1255 DCHECK_EQ(mode, JVMTI_DISABLE);
Andreas Gampe77708d92016-10-07 11:48:21 -07001256
Alex Light74c84402017-11-29 15:26:38 -08001257 env->event_masks.DisableEvent(env, thread, event);
1258 RecalculateGlobalEventMaskLocked(event);
1259 new_state = global_mask.Test(event);
1260 }
Andreas Gampe77708d92016-10-07 11:48:21 -07001261 }
1262
1263 // Handle any special work required for the event type.
Andreas Gampe8b862ff2016-10-17 17:49:59 -07001264 if (new_state != old_state) {
1265 HandleEventType(event, mode == JVMTI_ENABLE);
1266 }
Andreas Gampe77708d92016-10-07 11:48:21 -07001267
1268 return ERR(NONE);
1269}
1270
Alex Light0fa17862017-10-24 13:43:05 -07001271void EventHandler::HandleBreakpointEventsChanged(bool added) {
1272 if (added) {
1273 DeoptManager::Get()->AddDeoptimizationRequester();
1274 } else {
1275 DeoptManager::Get()->RemoveDeoptimizationRequester();
1276 }
1277}
1278
Alex Lightb7edcda2017-04-27 13:20:31 -07001279void EventHandler::Shutdown() {
1280 // Need to remove the method_trace_listener_ if it's there.
1281 art::Thread* self = art::Thread::Current();
1282 art::gc::ScopedGCCriticalSection gcs(self,
1283 art::gc::kGcCauseInstrumentation,
1284 art::gc::kCollectorTypeInstrumentation);
1285 art::ScopedSuspendAll ssa("jvmti method tracing uninstallation");
1286 // Just remove every possible event.
1287 art::Runtime::Current()->GetInstrumentation()->RemoveListener(method_trace_listener_.get(), ~0);
1288}
1289
Alex Light0e841182018-02-12 17:42:50 +00001290EventHandler::EventHandler()
1291 : envs_lock_("JVMTI Environment List Lock", art::LockLevel::kTopLockLevel),
1292 frame_pop_enabled(false) {
Andreas Gampe27fa96c2016-10-07 15:05:24 -07001293 alloc_listener_.reset(new JvmtiAllocationListener(this));
Alex Light8c2b9292017-11-09 13:21:01 -08001294 ddm_listener_.reset(new JvmtiDdmChunkListener(this));
Andreas Gampe9b8c5882016-10-21 15:27:46 -07001295 gc_pause_listener_.reset(new JvmtiGcPauseListener(this));
Alex Lightb7edcda2017-04-27 13:20:31 -07001296 method_trace_listener_.reset(new JvmtiMethodTraceListener(this));
Alex Light77fee872017-09-05 14:51:49 -07001297 monitor_listener_.reset(new JvmtiMonitorListener(this));
Charles Munger5cc0e752018-11-09 12:30:46 -08001298 park_listener_.reset(new JvmtiParkListener(this));
Andreas Gampe27fa96c2016-10-07 15:05:24 -07001299}
1300
1301EventHandler::~EventHandler() {
1302}
1303
Andreas Gampe77708d92016-10-07 11:48:21 -07001304} // namespace openjdkjvmti