blob: 60e7c9c80cff0255a8b3e91661b5d4c836870054 [file] [log] [blame]
jeffhao725a9572012-11-13 18:20:12 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "instrumentation.h"
18
Alex Lightb7c640d2019-03-20 15:52:13 -070019#include <functional>
20#include <optional>
Ian Rogersc7dd2952014-10-21 23:31:19 -070021#include <sstream>
22
Andreas Gampec7d878d2018-11-19 18:42:06 +000023#include <android-base/logging.h>
24
Ian Rogerse63db272014-07-15 15:36:11 -070025#include "arch/context.h"
Alex Lightd7661582017-05-01 13:48:16 -070026#include "art_field-inl.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070027#include "art_method-inl.h"
David Sehrc431b9d2018-03-02 12:01:51 -080028#include "base/atomic.h"
Andreas Gampe8228cdf2017-05-30 15:03:54 -070029#include "base/callee_save_type.h"
jeffhao725a9572012-11-13 18:20:12 -080030#include "class_linker.h"
31#include "debugger.h"
David Sehr9e734c72018-01-04 17:56:19 -080032#include "dex/dex_file-inl.h"
33#include "dex/dex_file_types.h"
34#include "dex/dex_instruction-inl.h"
Mathieu Chartierd8891782014-03-02 13:28:37 -080035#include "entrypoints/quick/quick_alloc_entrypoints.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070036#include "entrypoints/quick/quick_entrypoints.h"
Ian Rogers6f3dbba2014-10-14 17:41:57 -070037#include "entrypoints/runtime_asm_entrypoints.h"
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070038#include "gc_root-inl.h"
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010039#include "interpreter/interpreter.h"
Mingyao Yang2ee17902017-08-30 11:37:08 -070040#include "interpreter/interpreter_common.h"
Mathieu Chartiere5f13e52015-02-24 09:37:21 -080041#include "jit/jit.h"
42#include "jit/jit_code_cache.h"
Alex Lightd7661582017-05-01 13:48:16 -070043#include "jvalue-inl.h"
Alex Lightb7c640d2019-03-20 15:52:13 -070044#include "jvalue.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080045#include "mirror/class-inl.h"
46#include "mirror/dex_cache.h"
Ian Rogers4f6ad8a2013-03-18 15:27:28 -070047#include "mirror/object-inl.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070048#include "mirror/object_array-inl.h"
Ian Rogers62d6c772013-02-27 08:32:07 -080049#include "nth_caller_visitor.h"
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010050#include "oat_quick_method_header.h"
David Srbecky28f6cff2018-10-16 15:07:28 +010051#include "runtime-inl.h"
jeffhao725a9572012-11-13 18:20:12 -080052#include "thread.h"
53#include "thread_list.h"
jeffhao725a9572012-11-13 18:20:12 -080054
55namespace art {
Ian Rogers62d6c772013-02-27 08:32:07 -080056namespace instrumentation {
jeffhao725a9572012-11-13 18:20:12 -080057
Sebastien Hertz0462c4c2015-04-01 16:34:17 +020058constexpr bool kVerboseInstrumentation = false;
Sebastien Hertz5bfd5c92013-11-15 11:36:07 +010059
Alex Lightb7c640d2019-03-20 15:52:13 -070060void InstrumentationListener::MethodExited(
61 Thread* thread,
62 Handle<mirror::Object> this_object,
63 ArtMethod* method,
64 uint32_t dex_pc,
65 OptionalFrame frame,
66 MutableHandle<mirror::Object>& return_value) {
Alex Lightd7661582017-05-01 13:48:16 -070067 DCHECK_EQ(method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetReturnTypePrimitive(),
68 Primitive::kPrimNot);
Alex Lightb7c640d2019-03-20 15:52:13 -070069 const void* original_ret = return_value.Get();
Alex Lightd7661582017-05-01 13:48:16 -070070 JValue v;
71 v.SetL(return_value.Get());
Alex Lightb7c640d2019-03-20 15:52:13 -070072 MethodExited(thread, this_object, method, dex_pc, frame, v);
73 DCHECK(original_ret == v.GetL()) << "Return value changed";
Alex Lightd7661582017-05-01 13:48:16 -070074}
75
76void InstrumentationListener::FieldWritten(Thread* thread,
77 Handle<mirror::Object> this_object,
78 ArtMethod* method,
79 uint32_t dex_pc,
80 ArtField* field,
81 Handle<mirror::Object> field_value) {
82 DCHECK(!field->IsPrimitiveType());
83 JValue v;
84 v.SetL(field_value.Get());
85 FieldWritten(thread, this_object, method, dex_pc, field, v);
86}
87
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +010088// Instrumentation works on non-inlined frames by updating returned PCs
89// of compiled frames.
90static constexpr StackVisitor::StackWalkKind kInstrumentationStackWalk =
91 StackVisitor::StackWalkKind::kSkipInlinedFrames;
92
Mathieu Chartiere0671ce2015-07-28 17:23:28 -070093class InstallStubsClassVisitor : public ClassVisitor {
94 public:
95 explicit InstallStubsClassVisitor(Instrumentation* instrumentation)
96 : instrumentation_(instrumentation) {}
97
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010098 bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES(Locks::mutator_lock_) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -070099 instrumentation_->InstallStubsForClass(klass.Ptr());
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700100 return true; // we visit all classes.
101 }
102
103 private:
104 Instrumentation* const instrumentation_;
105};
106
Alex Light2c8206f2018-06-08 14:51:09 -0700107InstrumentationStackPopper::InstrumentationStackPopper(Thread* self)
108 : self_(self),
109 instrumentation_(Runtime::Current()->GetInstrumentation()),
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000110 pop_until_(0u) {}
Alex Light2c8206f2018-06-08 14:51:09 -0700111
112InstrumentationStackPopper::~InstrumentationStackPopper() {
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000113 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack =
114 self_->GetInstrumentationStack();
115 for (auto i = stack->begin(); i != stack->end() && i->first <= pop_until_;) {
116 i = stack->erase(i);
Alex Light2c8206f2018-06-08 14:51:09 -0700117 }
118}
119
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000120bool InstrumentationStackPopper::PopFramesTo(uintptr_t stack_pointer,
Alex Light2c8206f2018-06-08 14:51:09 -0700121 MutableHandle<mirror::Throwable>& exception) {
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000122 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack =
123 self_->GetInstrumentationStack();
Alex Light2c8206f2018-06-08 14:51:09 -0700124 DCHECK(!self_->IsExceptionPending());
125 if (!instrumentation_->HasMethodUnwindListeners()) {
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000126 pop_until_ = stack_pointer;
Alex Light2c8206f2018-06-08 14:51:09 -0700127 return true;
128 }
129 if (kVerboseInstrumentation) {
130 LOG(INFO) << "Popping frames for exception " << exception->Dump();
131 }
132 // The instrumentation events expect the exception to be set.
133 self_->SetException(exception.Get());
134 bool new_exception_thrown = false;
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000135 auto i = stack->upper_bound(pop_until_);
136
137 // Now pop all frames until reaching stack_pointer, or a new exception is
138 // thrown. Note that `stack_pointer` doesn't need to be a return PC address
139 // (in fact the exception handling code passes the start of the frame where
140 // the catch handler is).
141 for (; i != stack->end() && i->first <= stack_pointer; i++) {
142 const InstrumentationStackFrame& frame = i->second;
Alex Light2c8206f2018-06-08 14:51:09 -0700143 ArtMethod* method = frame.method_;
144 // Notify listeners of method unwind.
145 // TODO: improve the dex_pc information here.
146 uint32_t dex_pc = dex::kDexNoIndex;
147 if (kVerboseInstrumentation) {
148 LOG(INFO) << "Popping for unwind " << method->PrettyMethod();
149 }
150 if (!method->IsRuntimeMethod() && !frame.interpreter_entry_) {
151 instrumentation_->MethodUnwindEvent(self_, frame.this_object_, method, dex_pc);
152 new_exception_thrown = self_->GetException() != exception.Get();
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000153 if (new_exception_thrown) {
154 pop_until_ = i->first;
155 break;
156 }
Alex Light2c8206f2018-06-08 14:51:09 -0700157 }
158 }
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000159 if (!new_exception_thrown) {
160 pop_until_ = stack_pointer;
161 }
Alex Light2c8206f2018-06-08 14:51:09 -0700162 exception.Assign(self_->GetException());
163 self_->ClearException();
164 if (kVerboseInstrumentation && new_exception_thrown) {
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000165 LOG(INFO) << "Did partial pop of frames due to new exception";
Alex Light2c8206f2018-06-08 14:51:09 -0700166 }
167 return !new_exception_thrown;
168}
Ian Rogers62d6c772013-02-27 08:32:07 -0800169
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700170Instrumentation::Instrumentation()
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000171 : current_force_deopt_id_(0),
172 instrumentation_stubs_installed_(false),
Nicolas Geoffray5a23d2e2015-11-03 18:58:57 +0000173 entry_exit_stubs_installed_(false),
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700174 interpreter_stubs_installed_(false),
Nicolas Geoffray5a23d2e2015-11-03 18:58:57 +0000175 interpret_only_(false),
176 forced_interpret_only_(false),
177 have_method_entry_listeners_(false),
178 have_method_exit_listeners_(false),
179 have_method_unwind_listeners_(false),
180 have_dex_pc_listeners_(false),
181 have_field_read_listeners_(false),
182 have_field_write_listeners_(false),
Alex Light6e1607e2017-08-23 10:06:18 -0700183 have_exception_thrown_listeners_(false),
Alex Lighte814f9d2017-07-31 16:14:39 -0700184 have_watched_frame_pop_listeners_(false),
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000185 have_branch_listeners_(false),
Alex Light9fb1ab12017-09-05 09:32:49 -0700186 have_exception_handled_listeners_(false),
Andreas Gampe7e56a072018-11-29 10:40:06 -0800187 deoptimized_methods_lock_(new ReaderWriterMutex("deoptimized methods lock",
188 kGenericBottomLock)),
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700189 deoptimization_enabled_(false),
190 interpreter_handler_table_(kMainHandlerTable),
Mathieu Chartier50e93312016-03-16 11:25:29 -0700191 quick_alloc_entry_points_instrumentation_counter_(0),
Alex Light40607862019-05-06 18:16:24 +0000192 alloc_entrypoints_instrumented_(false),
193 can_use_instrumentation_trampolines_(true) {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700194}
195
Vladimir Marko19711d42019-04-12 14:05:34 +0100196void Instrumentation::InstallStubsForClass(ObjPtr<mirror::Class> klass) {
Vladimir Marko72ab6842017-01-20 19:32:50 +0000197 if (!klass->IsResolved()) {
Sebastien Hertza8a697f2015-01-15 12:28:47 +0100198 // We need the class to be resolved to install/uninstall stubs. Otherwise its methods
199 // could not be initialized or linked with regards to class inheritance.
Vladimir Marko72ab6842017-01-20 19:32:50 +0000200 } else if (klass->IsErroneousResolved()) {
201 // We can't execute code in a erroneous class: do nothing.
Sebastien Hertza8a697f2015-01-15 12:28:47 +0100202 } else {
Andreas Gampe542451c2016-07-26 09:02:02 -0700203 for (ArtMethod& method : klass->GetMethods(kRuntimePointerSize)) {
Alex Light51a64d52015-12-17 13:55:59 -0800204 InstallStubsForMethod(&method);
Sebastien Hertza8a697f2015-01-15 12:28:47 +0100205 }
jeffhao725a9572012-11-13 18:20:12 -0800206 }
jeffhao725a9572012-11-13 18:20:12 -0800207}
208
Mathieu Chartiere401d142015-04-22 13:56:20 -0700209static void UpdateEntrypoints(ArtMethod* method, const void* quick_code)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700210 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffrayd5a95872019-08-12 13:24:07 +0100211 if (kIsDebugBuild) {
212 jit::Jit* jit = Runtime::Current()->GetJit();
213 if (jit != nullptr && jit->GetCodeCache()->ContainsPc(quick_code)) {
214 // Ensure we always have the thumb entrypoint for JIT on arm32.
215 if (kRuntimeISA == InstructionSet::kArm) {
216 CHECK_EQ(reinterpret_cast<uintptr_t>(quick_code) & 1, 1u);
217 }
218 }
219 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800220 method->SetEntryPointFromQuickCompiledCode(quick_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100221}
222
Alex Light0fa17862017-10-24 13:43:05 -0700223bool Instrumentation::NeedDebugVersionFor(ArtMethod* method) const
224 REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Lightf2858632018-04-02 11:28:50 -0700225 art::Runtime* runtime = Runtime::Current();
226 // If anything says we need the debug version or we are debuggable we will need the debug version
227 // of the method.
228 return (runtime->GetRuntimeCallbacks()->MethodNeedsDebugVersion(method) ||
229 runtime->IsJavaDebuggable()) &&
Mingyao Yang6ea1a0e2016-01-29 12:12:49 -0800230 !method->IsNative() &&
Alex Lightf2858632018-04-02 11:28:50 -0700231 !method->IsProxyMethod();
Mingyao Yang6ea1a0e2016-01-29 12:12:49 -0800232}
233
Mathieu Chartiere401d142015-04-22 13:56:20 -0700234void Instrumentation::InstallStubsForMethod(ArtMethod* method) {
Alex Light9139e002015-10-09 15:59:48 -0700235 if (!method->IsInvokable() || method->IsProxyMethod()) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100236 // Do not change stubs for these methods.
237 return;
238 }
Jeff Hao56802772014-08-19 10:17:36 -0700239 // Don't stub Proxy.<init>. Note that the Proxy class itself is not a proxy class.
Alex Light6cae5ea2018-06-07 17:07:02 -0700240 // TODO We should remove the need for this since it means we cannot always correctly detect calls
241 // to Proxy.<init>
242 // Annoyingly this can be called before we have actually initialized WellKnownClasses so therefore
243 // we also need to check this based on the declaring-class descriptor. The check is valid because
244 // Proxy only has a single constructor.
245 ArtMethod* well_known_proxy_init = jni::DecodeArtMethod(
246 WellKnownClasses::java_lang_reflect_Proxy_init);
247 if ((LIKELY(well_known_proxy_init != nullptr) && UNLIKELY(method == well_known_proxy_init)) ||
248 UNLIKELY(method->IsConstructor() &&
249 method->GetDeclaringClass()->DescriptorEquals("Ljava/lang/reflect/Proxy;"))) {
Jeff Haodb8a6642014-08-14 17:18:52 -0700250 return;
251 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800252 const void* new_quick_code;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100253 bool uninstall = !entry_exit_stubs_installed_ && !interpreter_stubs_installed_;
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800254 Runtime* const runtime = Runtime::Current();
255 ClassLinker* const class_linker = runtime->GetClassLinker();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100256 bool is_class_initialized = method->GetDeclaringClass()->IsInitialized();
257 if (uninstall) {
258 if ((forced_interpret_only_ || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800259 new_quick_code = GetQuickToInterpreterBridge();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100260 } else if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
Alex Light3e36a9c2018-06-19 09:45:05 -0700261 new_quick_code = GetCodeForInvoke(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100262 } else {
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700263 new_quick_code = GetQuickResolutionStub();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100264 }
265 } else { // !uninstall
Sebastien Hertzbae182c2013-12-17 10:42:03 +0100266 if ((interpreter_stubs_installed_ || forced_interpret_only_ || IsDeoptimized(method)) &&
267 !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800268 new_quick_code = GetQuickToInterpreterBridge();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100269 } else {
270 // Do not overwrite resolution trampoline. When the trampoline initializes the method's
271 // class, all its static methods code will be set to the instrumentation entry point.
272 // For more details, see ClassLinker::FixupStaticTrampolines.
273 if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
Alex Light2d441b12018-06-08 15:33:21 -0700274 if (entry_exit_stubs_installed_) {
275 // This needs to be checked first since the instrumentation entrypoint will be able to
276 // find the actual JIT compiled code that corresponds to this method.
Ian Rogersef7d42f2014-01-06 12:55:46 -0800277 new_quick_code = GetQuickInstrumentationEntryPoint();
Alex Light2d441b12018-06-08 15:33:21 -0700278 } else if (NeedDebugVersionFor(method)) {
279 // It would be great to search the JIT for its implementation here but we cannot due to
280 // the locks we hold. Instead just set to the interpreter bridge and that code will search
281 // the JIT when it gets called and replace the entrypoint then.
282 new_quick_code = GetQuickToInterpreterBridge();
Nicolas Geoffraya0619e22016-12-20 13:57:43 +0000283 } else {
Alex Lightfc49fec2018-01-16 22:28:36 +0000284 new_quick_code = class_linker->GetQuickOatCodeFor(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100285 }
286 } else {
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700287 new_quick_code = GetQuickResolutionStub();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100288 }
289 }
290 }
Elliott Hughes956af0f2014-12-11 14:34:28 -0800291 UpdateEntrypoints(method, new_quick_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100292}
293
Ian Rogers62d6c772013-02-27 08:32:07 -0800294// Places the instrumentation exit pc as the return PC for every quick frame. This also allows
295// deoptimization of quick frames to interpreter frames.
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100296// Since we may already have done this previously, we need to push new instrumentation frame before
297// existing instrumentation frames.
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000298void InstrumentationInstallStack(Thread* thread, void* arg)
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000299 REQUIRES(Locks::mutator_lock_) {
300 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100301 struct InstallStackVisitor final : public StackVisitor {
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000302 InstallStackVisitor(Thread* thread_in,
303 Context* context,
304 uintptr_t instrumentation_exit_pc,
305 uint64_t force_deopt_id)
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100306 : StackVisitor(thread_in, context, kInstrumentationStackWalk),
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800307 instrumentation_stack_(thread_in->GetInstrumentationStack()),
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100308 instrumentation_exit_pc_(instrumentation_exit_pc),
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000309 reached_existing_instrumentation_frames_(false),
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000310 last_return_pc_(0),
311 force_deopt_id_(force_deopt_id) {}
jeffhao725a9572012-11-13 18:20:12 -0800312
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100313 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700314 ArtMethod* m = GetMethod();
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700315 if (m == nullptr) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800316 if (kVerboseInstrumentation) {
317 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
318 }
319 last_return_pc_ = 0;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700320 return true; // Ignore upcalls.
Ian Rogers306057f2012-11-26 12:45:53 -0800321 }
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700322 if (GetCurrentQuickFrame() == nullptr) {
Elliott Hughes956af0f2014-12-11 14:34:28 -0800323 bool interpreter_frame = true;
Vladimir Markoabedfca2019-05-23 14:07:47 +0100324 InstrumentationStackFrame instrumentation_frame(GetThisObject().Ptr(),
325 m,
326 /*return_pc=*/ 0,
327 GetFrameId(),
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000328 interpreter_frame,
329 force_deopt_id_);
Jeff Haoa15a81b2014-05-27 18:25:47 -0700330 if (kVerboseInstrumentation) {
331 LOG(INFO) << "Pushing shadow frame " << instrumentation_frame.Dump();
332 }
333 shadow_stack_.push_back(instrumentation_frame);
334 return true; // Continue.
335 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800336 uintptr_t return_pc = GetReturnPc();
Sebastien Hertz320deb22014-06-11 19:45:05 +0200337 if (kVerboseInstrumentation) {
338 LOG(INFO) << " Installing exit stub in " << DescribeLocation();
339 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100340 if (return_pc == instrumentation_exit_pc_) {
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000341 auto it = instrumentation_stack_->find(GetReturnPcAddr());
342 CHECK(it != instrumentation_stack_->end());
343 const InstrumentationStackFrame& frame = it->second;
Mingyao Yang2ee17902017-08-30 11:37:08 -0700344 if (m->IsRuntimeMethod()) {
Mingyao Yang2ee17902017-08-30 11:37:08 -0700345 if (frame.interpreter_entry_) {
346 // This instrumentation frame is for an interpreter bridge and is
347 // pushed when executing the instrumented interpreter bridge. So method
348 // enter event must have been reported. However we need to push a DEX pc
349 // into the dex_pcs_ list to match size of instrumentation stack.
Andreas Gampee2abbc62017-09-15 11:59:26 -0700350 uint32_t dex_pc = dex::kDexNoIndex;
Mingyao Yang2ee17902017-08-30 11:37:08 -0700351 dex_pcs_.push_back(dex_pc);
352 last_return_pc_ = frame.return_pc_;
Mingyao Yang2ee17902017-08-30 11:37:08 -0700353 return true;
354 }
355 }
356
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100357 // We've reached a frame which has already been installed with instrumentation exit stub.
Alex Light74c91c92018-03-08 14:01:44 -0800358 // We should have already installed instrumentation or be interpreter on previous frames.
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100359 reached_existing_instrumentation_frames_ = true;
360
Alex Lightfc81d802018-12-07 13:39:05 -0800361 CHECK_EQ(m->GetNonObsoleteMethod(), frame.method_->GetNonObsoleteMethod())
362 << "Expected " << ArtMethod::PrettyMethod(m)
363 << ", Found " << ArtMethod::PrettyMethod(frame.method_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100364 return_pc = frame.return_pc_;
365 if (kVerboseInstrumentation) {
366 LOG(INFO) << "Ignoring already instrumented " << frame.Dump();
367 }
368 } else {
369 CHECK_NE(return_pc, 0U);
Alex Light74c91c92018-03-08 14:01:44 -0800370 if (UNLIKELY(reached_existing_instrumentation_frames_ && !m->IsRuntimeMethod())) {
371 // We already saw an existing instrumentation frame so this should be a runtime-method
372 // inserted by the interpreter or runtime.
Alex Lighte9278662018-03-08 16:55:58 -0800373 std::string thread_name;
374 GetThread()->GetThreadName(thread_name);
375 uint32_t dex_pc = dex::kDexNoIndex;
Nicolas Geoffraya00b54b2019-12-03 14:36:42 +0000376 if (last_return_pc_ != 0 && GetCurrentOatQuickMethodHeader() != nullptr) {
377 dex_pc = GetCurrentOatQuickMethodHeader()->ToDexPc(
378 GetCurrentQuickFrame(), last_return_pc_);
Alex Lighte9278662018-03-08 16:55:58 -0800379 }
Alex Light74c91c92018-03-08 14:01:44 -0800380 LOG(FATAL) << "While walking " << thread_name << " found unexpected non-runtime method"
381 << " without instrumentation exit return or interpreter frame."
Alex Lighte9278662018-03-08 16:55:58 -0800382 << " method is " << GetMethod()->PrettyMethod()
383 << " return_pc is " << std::hex << return_pc
384 << " dex pc: " << dex_pc;
385 UNREACHABLE();
386 }
Mingyao Yang2ee17902017-08-30 11:37:08 -0700387 InstrumentationStackFrame instrumentation_frame(
Vladimir Markoabedfca2019-05-23 14:07:47 +0100388 m->IsRuntimeMethod() ? nullptr : GetThisObject().Ptr(),
Mingyao Yang2ee17902017-08-30 11:37:08 -0700389 m,
390 return_pc,
391 GetFrameId(), // A runtime method still gets a frame id.
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000392 false,
393 force_deopt_id_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100394 if (kVerboseInstrumentation) {
395 LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
396 }
397
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000398 instrumentation_stack_->insert({GetReturnPcAddr(), instrumentation_frame});
Alex Lighte0c6d432020-01-22 22:04:20 +0000399 SetReturnPc(instrumentation_exit_pc_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800400 }
Andreas Gampee2abbc62017-09-15 11:59:26 -0700401 uint32_t dex_pc = dex::kDexNoIndex;
Nicolas Geoffraya00b54b2019-12-03 14:36:42 +0000402 if (last_return_pc_ != 0 && GetCurrentOatQuickMethodHeader() != nullptr) {
403 dex_pc = GetCurrentOatQuickMethodHeader()->ToDexPc(GetCurrentQuickFrame(), last_return_pc_);
Mingyao Yang2ee17902017-08-30 11:37:08 -0700404 }
405 dex_pcs_.push_back(dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800406 last_return_pc_ = return_pc;
Ian Rogers306057f2012-11-26 12:45:53 -0800407 return true; // Continue.
408 }
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000409 std::map<uintptr_t, InstrumentationStackFrame>* const instrumentation_stack_;
Jeff Haoa15a81b2014-05-27 18:25:47 -0700410 std::vector<InstrumentationStackFrame> shadow_stack_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800411 std::vector<uint32_t> dex_pcs_;
Ian Rogers306057f2012-11-26 12:45:53 -0800412 const uintptr_t instrumentation_exit_pc_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100413 bool reached_existing_instrumentation_frames_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800414 uintptr_t last_return_pc_;
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000415 uint64_t force_deopt_id_;
Ian Rogers306057f2012-11-26 12:45:53 -0800416 };
Ian Rogers62d6c772013-02-27 08:32:07 -0800417 if (kVerboseInstrumentation) {
418 std::string thread_name;
419 thread->GetThreadName(thread_name);
420 LOG(INFO) << "Installing exit stubs in " << thread_name;
Ian Rogers306057f2012-11-26 12:45:53 -0800421 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100422
423 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
Ian Rogers700a4022014-05-19 16:49:03 -0700424 std::unique_ptr<Context> context(Context::Create());
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700425 uintptr_t instrumentation_exit_pc = reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc());
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000426 InstallStackVisitor visitor(
427 thread, context.get(), instrumentation_exit_pc, instrumentation->current_force_deopt_id_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800428 visitor.WalkStack(true);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100429 CHECK_EQ(visitor.dex_pcs_.size(), thread->GetInstrumentationStack()->size());
Ian Rogers62d6c772013-02-27 08:32:07 -0800430
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100431 if (instrumentation->ShouldNotifyMethodEnterExitEvents()) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100432 // Create method enter events for all methods currently on the thread's stack. We only do this
433 // if no debugger is attached to prevent from posting events twice.
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000434 // TODO: This is the only place we make use of frame_id_. We should create a
435 // std::vector instead and populate it as we walk the stack.
Jeff Haoa15a81b2014-05-27 18:25:47 -0700436 auto ssi = visitor.shadow_stack_.rbegin();
437 for (auto isi = thread->GetInstrumentationStack()->rbegin(),
438 end = thread->GetInstrumentationStack()->rend(); isi != end; ++isi) {
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000439 while (ssi != visitor.shadow_stack_.rend() && (*ssi).frame_id_ < isi->second.frame_id_) {
Jeff Haoa15a81b2014-05-27 18:25:47 -0700440 instrumentation->MethodEnterEvent(thread, (*ssi).this_object_, (*ssi).method_, 0);
441 ++ssi;
442 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100443 uint32_t dex_pc = visitor.dex_pcs_.back();
444 visitor.dex_pcs_.pop_back();
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000445 if (!isi->second.interpreter_entry_ && !isi->second.method_->IsRuntimeMethod()) {
446 instrumentation->MethodEnterEvent(
447 thread, isi->second.this_object_, isi->second.method_, dex_pc);
Sebastien Hertz320deb22014-06-11 19:45:05 +0200448 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100449 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800450 }
451 thread->VerifyStack();
Ian Rogers306057f2012-11-26 12:45:53 -0800452}
453
Mingyao Yang99170c62015-07-06 11:10:37 -0700454void Instrumentation::InstrumentThreadStack(Thread* thread) {
455 instrumentation_stubs_installed_ = true;
456 InstrumentationInstallStack(thread, this);
457}
458
Ian Rogers62d6c772013-02-27 08:32:07 -0800459// Removes the instrumentation exit pc as the return PC for every quick frame.
460static void InstrumentationRestoreStack(Thread* thread, void* arg)
Nicolas Geoffray5a23d2e2015-11-03 18:58:57 +0000461 REQUIRES(Locks::mutator_lock_) {
462 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
463
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100464 struct RestoreStackVisitor final : public StackVisitor {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800465 RestoreStackVisitor(Thread* thread_in, uintptr_t instrumentation_exit_pc,
Ian Rogers62d6c772013-02-27 08:32:07 -0800466 Instrumentation* instrumentation)
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100467 : StackVisitor(thread_in, nullptr, kInstrumentationStackWalk),
468 thread_(thread_in),
Ian Rogers62d6c772013-02-27 08:32:07 -0800469 instrumentation_exit_pc_(instrumentation_exit_pc),
470 instrumentation_(instrumentation),
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800471 instrumentation_stack_(thread_in->GetInstrumentationStack()),
Ian Rogers62d6c772013-02-27 08:32:07 -0800472 frames_removed_(0) {}
Ian Rogers306057f2012-11-26 12:45:53 -0800473
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100474 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800475 if (instrumentation_stack_->size() == 0) {
jeffhao725a9572012-11-13 18:20:12 -0800476 return false; // Stop.
477 }
Mathieu Chartiere401d142015-04-22 13:56:20 -0700478 ArtMethod* m = GetMethod();
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700479 if (GetCurrentQuickFrame() == nullptr) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800480 if (kVerboseInstrumentation) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200481 LOG(INFO) << " Ignoring a shadow frame. Frame " << GetFrameId()
David Sehr709b0702016-10-13 09:12:37 -0700482 << " Method=" << ArtMethod::PrettyMethod(m);
Ian Rogers62d6c772013-02-27 08:32:07 -0800483 }
484 return true; // Ignore shadow frames.
485 }
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700486 if (m == nullptr) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800487 if (kVerboseInstrumentation) {
488 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
489 }
Ian Rogers306057f2012-11-26 12:45:53 -0800490 return true; // Ignore upcalls.
491 }
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000492 auto it = instrumentation_stack_->find(GetReturnPcAddr());
493 if (it != instrumentation_stack_->end()) {
494 const InstrumentationStackFrame& instrumentation_frame = it->second;
495 if (kVerboseInstrumentation) {
496 LOG(INFO) << " Removing exit stub in " << DescribeLocation();
Ian Rogers62d6c772013-02-27 08:32:07 -0800497 }
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000498 if (instrumentation_frame.interpreter_entry_) {
499 CHECK(m == Runtime::Current()->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs));
500 } else {
501 CHECK_EQ(m->GetNonObsoleteMethod(),
502 instrumentation_frame.method_->GetNonObsoleteMethod())
503 << ArtMethod::PrettyMethod(m)
504 << " and " << instrumentation_frame.method_->GetNonObsoleteMethod()->PrettyMethod();
505 }
506 SetReturnPc(instrumentation_frame.return_pc_);
507 if (instrumentation_->ShouldNotifyMethodEnterExitEvents() &&
508 !m->IsRuntimeMethod()) {
509 // Create the method exit events. As the methods didn't really exit the result is 0.
510 // We only do this if no debugger is attached to prevent from posting events twice.
511 JValue val;
512 instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m,
513 GetDexPc(), OptionalFrame{}, val);
514 }
515 frames_removed_++;
516 } else {
Ian Rogers62d6c772013-02-27 08:32:07 -0800517 if (kVerboseInstrumentation) {
518 LOG(INFO) << " No exit stub in " << DescribeLocation();
Ian Rogers306057f2012-11-26 12:45:53 -0800519 }
jeffhao725a9572012-11-13 18:20:12 -0800520 }
521 return true; // Continue.
522 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800523 Thread* const thread_;
Ian Rogers306057f2012-11-26 12:45:53 -0800524 const uintptr_t instrumentation_exit_pc_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800525 Instrumentation* const instrumentation_;
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000526 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800527 size_t frames_removed_;
jeffhao725a9572012-11-13 18:20:12 -0800528 };
Ian Rogers62d6c772013-02-27 08:32:07 -0800529 if (kVerboseInstrumentation) {
530 std::string thread_name;
531 thread->GetThreadName(thread_name);
532 LOG(INFO) << "Removing exit stubs in " << thread_name;
533 }
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000534 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack =
535 thread->GetInstrumentationStack();
Ian Rogers62d6c772013-02-27 08:32:07 -0800536 if (stack->size() > 0) {
537 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700538 uintptr_t instrumentation_exit_pc =
539 reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc());
Ian Rogers62d6c772013-02-27 08:32:07 -0800540 RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
541 visitor.WalkStack(true);
542 CHECK_EQ(visitor.frames_removed_, stack->size());
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000543 stack->clear();
jeffhao725a9572012-11-13 18:20:12 -0800544 }
545}
546
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000547void Instrumentation::DeoptimizeAllThreadFrames() {
548 Thread* self = Thread::Current();
549 MutexLock mu(self, *Locks::thread_list_lock_);
550 ThreadList* tl = Runtime::Current()->GetThreadList();
551 tl->ForEach([&](Thread* t) {
552 Locks::mutator_lock_->AssertExclusiveHeld(self);
553 InstrumentThreadStack(t);
554 });
555 current_force_deopt_id_++;
556}
557
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200558static bool HasEvent(Instrumentation::InstrumentationEvent expected, uint32_t events) {
559 return (events & expected) != 0;
560}
561
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000562static void PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,
563 uint32_t events,
564 std::list<InstrumentationListener*>& list,
565 InstrumentationListener* listener,
566 bool* has_listener)
567 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
568 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
569 if (!HasEvent(event, events)) {
570 return;
571 }
572 // If there is a free slot in the list, we insert the listener in that slot.
573 // Otherwise we add it to the end of the list.
574 auto it = std::find(list.begin(), list.end(), nullptr);
575 if (it != list.end()) {
576 *it = listener;
577 } else {
578 list.push_back(listener);
579 }
David Srbecky28f6cff2018-10-16 15:07:28 +0100580 Runtime::DoAndMaybeSwitchInterpreter([=](){ *has_listener = true; });
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000581}
582
Ian Rogers62d6c772013-02-27 08:32:07 -0800583void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
584 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000585 PotentiallyAddListenerTo(kMethodEntered,
586 events,
587 method_entry_listeners_,
588 listener,
589 &have_method_entry_listeners_);
590 PotentiallyAddListenerTo(kMethodExited,
591 events,
592 method_exit_listeners_,
593 listener,
594 &have_method_exit_listeners_);
595 PotentiallyAddListenerTo(kMethodUnwind,
596 events,
597 method_unwind_listeners_,
598 listener,
599 &have_method_unwind_listeners_);
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000600 PotentiallyAddListenerTo(kBranch,
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000601 events,
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000602 branch_listeners_,
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000603 listener,
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000604 &have_branch_listeners_);
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000605 PotentiallyAddListenerTo(kDexPcMoved,
606 events,
607 dex_pc_listeners_,
608 listener,
609 &have_dex_pc_listeners_);
610 PotentiallyAddListenerTo(kFieldRead,
611 events,
612 field_read_listeners_,
613 listener,
614 &have_field_read_listeners_);
615 PotentiallyAddListenerTo(kFieldWritten,
616 events,
617 field_write_listeners_,
618 listener,
619 &have_field_write_listeners_);
Alex Light6e1607e2017-08-23 10:06:18 -0700620 PotentiallyAddListenerTo(kExceptionThrown,
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000621 events,
Alex Light6e1607e2017-08-23 10:06:18 -0700622 exception_thrown_listeners_,
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000623 listener,
Alex Light6e1607e2017-08-23 10:06:18 -0700624 &have_exception_thrown_listeners_);
Alex Lighte814f9d2017-07-31 16:14:39 -0700625 PotentiallyAddListenerTo(kWatchedFramePop,
626 events,
627 watched_frame_pop_listeners_,
628 listener,
629 &have_watched_frame_pop_listeners_);
Alex Light9fb1ab12017-09-05 09:32:49 -0700630 PotentiallyAddListenerTo(kExceptionHandled,
631 events,
632 exception_handled_listeners_,
633 listener,
634 &have_exception_handled_listeners_);
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200635 UpdateInterpreterHandlerTable();
jeffhao725a9572012-11-13 18:20:12 -0800636}
637
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000638static void PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,
639 uint32_t events,
640 std::list<InstrumentationListener*>& list,
641 InstrumentationListener* listener,
642 bool* has_listener)
643 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
644 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
645 if (!HasEvent(event, events)) {
646 return;
647 }
648 auto it = std::find(list.begin(), list.end(), listener);
649 if (it != list.end()) {
650 // Just update the entry, do not remove from the list. Removing entries in the list
651 // is unsafe when mutators are iterating over it.
652 *it = nullptr;
653 }
654
655 // Check if the list contains any non-null listener, and update 'has_listener'.
656 for (InstrumentationListener* l : list) {
657 if (l != nullptr) {
David Srbecky28f6cff2018-10-16 15:07:28 +0100658 Runtime::DoAndMaybeSwitchInterpreter([=](){ *has_listener = true; });
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000659 return;
660 }
661 }
David Srbecky28f6cff2018-10-16 15:07:28 +0100662 Runtime::DoAndMaybeSwitchInterpreter([=](){ *has_listener = false; });
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000663}
664
Ian Rogers62d6c772013-02-27 08:32:07 -0800665void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
666 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000667 PotentiallyRemoveListenerFrom(kMethodEntered,
668 events,
669 method_entry_listeners_,
670 listener,
671 &have_method_entry_listeners_);
672 PotentiallyRemoveListenerFrom(kMethodExited,
673 events,
674 method_exit_listeners_,
675 listener,
676 &have_method_exit_listeners_);
677 PotentiallyRemoveListenerFrom(kMethodUnwind,
678 events,
679 method_unwind_listeners_,
680 listener,
681 &have_method_unwind_listeners_);
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000682 PotentiallyRemoveListenerFrom(kBranch,
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000683 events,
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000684 branch_listeners_,
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000685 listener,
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000686 &have_branch_listeners_);
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000687 PotentiallyRemoveListenerFrom(kDexPcMoved,
688 events,
689 dex_pc_listeners_,
690 listener,
691 &have_dex_pc_listeners_);
692 PotentiallyRemoveListenerFrom(kFieldRead,
693 events,
694 field_read_listeners_,
695 listener,
696 &have_field_read_listeners_);
697 PotentiallyRemoveListenerFrom(kFieldWritten,
698 events,
699 field_write_listeners_,
700 listener,
701 &have_field_write_listeners_);
Alex Light6e1607e2017-08-23 10:06:18 -0700702 PotentiallyRemoveListenerFrom(kExceptionThrown,
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000703 events,
Alex Light6e1607e2017-08-23 10:06:18 -0700704 exception_thrown_listeners_,
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000705 listener,
Alex Light6e1607e2017-08-23 10:06:18 -0700706 &have_exception_thrown_listeners_);
Alex Lighte814f9d2017-07-31 16:14:39 -0700707 PotentiallyRemoveListenerFrom(kWatchedFramePop,
708 events,
709 watched_frame_pop_listeners_,
710 listener,
711 &have_watched_frame_pop_listeners_);
Alex Light9fb1ab12017-09-05 09:32:49 -0700712 PotentiallyRemoveListenerFrom(kExceptionHandled,
713 events,
714 exception_handled_listeners_,
715 listener,
716 &have_exception_handled_listeners_);
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200717 UpdateInterpreterHandlerTable();
jeffhao725a9572012-11-13 18:20:12 -0800718}
719
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200720Instrumentation::InstrumentationLevel Instrumentation::GetCurrentInstrumentationLevel() const {
Alex Light4ba388a2017-01-27 10:26:49 -0800721 if (interpreter_stubs_installed_) {
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200722 return InstrumentationLevel::kInstrumentWithInterpreter;
Ian Rogers62d6c772013-02-27 08:32:07 -0800723 } else if (entry_exit_stubs_installed_) {
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200724 return InstrumentationLevel::kInstrumentWithInstrumentationStubs;
Ian Rogers62d6c772013-02-27 08:32:07 -0800725 } else {
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200726 return InstrumentationLevel::kInstrumentNothing;
Ian Rogers62d6c772013-02-27 08:32:07 -0800727 }
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200728}
729
Alex Lightdba61482016-12-21 08:20:29 -0800730bool Instrumentation::RequiresInstrumentationInstallation(InstrumentationLevel new_level) const {
Alex Light4ba388a2017-01-27 10:26:49 -0800731 // We need to reinstall instrumentation if we go to a different level.
732 return GetCurrentInstrumentationLevel() != new_level;
Alex Lightdba61482016-12-21 08:20:29 -0800733}
734
Alex Light40607862019-05-06 18:16:24 +0000735void Instrumentation::UpdateInstrumentationLevels(InstrumentationLevel level) {
736 if (level == InstrumentationLevel::kInstrumentWithInterpreter) {
737 can_use_instrumentation_trampolines_ = false;
738 }
739 if (UNLIKELY(!can_use_instrumentation_trampolines_)) {
740 for (auto& p : requested_instrumentation_levels_) {
741 if (p.second == InstrumentationLevel::kInstrumentWithInstrumentationStubs) {
742 p.second = InstrumentationLevel::kInstrumentWithInterpreter;
743 }
744 }
745 }
746}
747
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200748void Instrumentation::ConfigureStubs(const char* key, InstrumentationLevel desired_level) {
749 // Store the instrumentation level for this key or remove it.
750 if (desired_level == InstrumentationLevel::kInstrumentNothing) {
751 // The client no longer needs instrumentation.
752 requested_instrumentation_levels_.erase(key);
753 } else {
754 // The client needs instrumentation.
755 requested_instrumentation_levels_.Overwrite(key, desired_level);
756 }
757
Alex Light40607862019-05-06 18:16:24 +0000758 UpdateInstrumentationLevels(desired_level);
759 UpdateStubs();
760}
761
762void Instrumentation::EnableSingleThreadDeopt() {
763 // Single-thread deopt only uses interpreter.
764 can_use_instrumentation_trampolines_ = false;
765 UpdateInstrumentationLevels(InstrumentationLevel::kInstrumentWithInterpreter);
766 UpdateStubs();
767}
768
769void Instrumentation::UpdateStubs() {
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200770 // Look for the highest required instrumentation level.
771 InstrumentationLevel requested_level = InstrumentationLevel::kInstrumentNothing;
772 for (const auto& v : requested_instrumentation_levels_) {
773 requested_level = std::max(requested_level, v.second);
774 }
775
Alex Light40607862019-05-06 18:16:24 +0000776 DCHECK(can_use_instrumentation_trampolines_ ||
777 requested_level != InstrumentationLevel::kInstrumentWithInstrumentationStubs)
778 << "Use trampolines: " << can_use_instrumentation_trampolines_ << " level "
779 << requested_level;
780
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200781 interpret_only_ = (requested_level == InstrumentationLevel::kInstrumentWithInterpreter) ||
782 forced_interpret_only_;
783
Alex Lightdba61482016-12-21 08:20:29 -0800784 if (!RequiresInstrumentationInstallation(requested_level)) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800785 // We're already set.
786 return;
787 }
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100788 Thread* const self = Thread::Current();
Ian Rogers62d6c772013-02-27 08:32:07 -0800789 Runtime* runtime = Runtime::Current();
Sebastien Hertza8a697f2015-01-15 12:28:47 +0100790 Locks::mutator_lock_->AssertExclusiveHeld(self);
Ian Rogers62d6c772013-02-27 08:32:07 -0800791 Locks::thread_list_lock_->AssertNotHeld(self);
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200792 if (requested_level > InstrumentationLevel::kInstrumentNothing) {
Alex Light4ba388a2017-01-27 10:26:49 -0800793 if (requested_level == InstrumentationLevel::kInstrumentWithInterpreter) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800794 interpreter_stubs_installed_ = true;
Ian Rogers62d6c772013-02-27 08:32:07 -0800795 entry_exit_stubs_installed_ = true;
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200796 } else {
797 CHECK_EQ(requested_level, InstrumentationLevel::kInstrumentWithInstrumentationStubs);
798 entry_exit_stubs_installed_ = true;
799 interpreter_stubs_installed_ = false;
Ian Rogers62d6c772013-02-27 08:32:07 -0800800 }
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700801 InstallStubsClassVisitor visitor(this);
802 runtime->GetClassLinker()->VisitClasses(&visitor);
Ian Rogers62d6c772013-02-27 08:32:07 -0800803 instrumentation_stubs_installed_ = true;
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100804 MutexLock mu(self, *Locks::thread_list_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800805 runtime->GetThreadList()->ForEach(InstrumentationInstallStack, this);
806 } else {
807 interpreter_stubs_installed_ = false;
808 entry_exit_stubs_installed_ = false;
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700809 InstallStubsClassVisitor visitor(this);
810 runtime->GetClassLinker()->VisitClasses(&visitor);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100811 // Restore stack only if there is no method currently deoptimized.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700812 bool empty;
813 {
Andreas Gampe7e56a072018-11-29 10:40:06 -0800814 ReaderMutexLock mu(self, *GetDeoptimizedMethodsLock());
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700815 empty = IsDeoptimizedMethodsEmpty(); // Avoid lock violation.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700816 }
817 if (empty) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100818 MutexLock mu(self, *Locks::thread_list_lock_);
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000819 bool no_remaining_deopts = true;
820 // Check that there are no other forced deoptimizations. Do it here so we only need to lock
821 // thread_list_lock once.
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000822 // The compiler gets confused on the thread annotations, so use
823 // NO_THREAD_SAFETY_ANALYSIS. Note that we hold the mutator lock
824 // exclusively at this point.
825 Locks::mutator_lock_->AssertExclusiveHeld(self);
826 runtime->GetThreadList()->ForEach([&](Thread* t) NO_THREAD_SAFETY_ANALYSIS {
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000827 no_remaining_deopts =
828 no_remaining_deopts && !t->IsForceInterpreter() &&
829 std::all_of(t->GetInstrumentationStack()->cbegin(),
830 t->GetInstrumentationStack()->cend(),
831 [&](const auto& frame) REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000832 return frame.second.force_deopt_id_ == current_force_deopt_id_;
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000833 });
834 });
835 if (no_remaining_deopts) {
836 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
837 // Only do this after restoring, as walking the stack when restoring will see
838 // the instrumentation exit pc.
839 instrumentation_stubs_installed_ = false;
840 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100841 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800842 }
jeffhao725a9572012-11-13 18:20:12 -0800843}
844
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200845static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg ATTRIBUTE_UNUSED) {
Mathieu Chartier5ace2012016-11-30 10:15:41 -0800846 thread->ResetQuickAllocEntryPointsForThread(kUseReadBarrier && thread->GetIsGcMarking());
Ian Rogersfa824272013-11-05 16:12:57 -0800847}
848
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700849void Instrumentation::SetEntrypointsInstrumented(bool instrumented) {
850 Thread* self = Thread::Current();
Mathieu Chartier661974a2014-01-09 11:23:53 -0800851 Runtime* runtime = Runtime::Current();
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700852 Locks::mutator_lock_->AssertNotHeld(self);
853 Locks::instrument_entrypoints_lock_->AssertHeld(self);
854 if (runtime->IsStarted()) {
Mathieu Chartier4f55e222015-09-04 13:26:21 -0700855 ScopedSuspendAll ssa(__FUNCTION__);
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700856 MutexLock mu(self, *Locks::runtime_shutdown_lock_);
Mathieu Chartier661974a2014-01-09 11:23:53 -0800857 SetQuickAllocEntryPointsInstrumented(instrumented);
858 ResetQuickAllocEntryPoints();
Mathieu Chartier50e93312016-03-16 11:25:29 -0700859 alloc_entrypoints_instrumented_ = instrumented;
Mathieu Chartier4f55e222015-09-04 13:26:21 -0700860 } else {
861 MutexLock mu(self, *Locks::runtime_shutdown_lock_);
862 SetQuickAllocEntryPointsInstrumented(instrumented);
Andreas Gampe157c77e2016-10-17 17:44:41 -0700863
864 // Note: ResetQuickAllocEntryPoints only works when the runtime is started. Manually run the
865 // update for just this thread.
Andreas Gampe162ae502016-10-18 10:03:42 -0700866 // Note: self may be null. One of those paths is setting instrumentation in the Heap
867 // constructor for gcstress mode.
868 if (self != nullptr) {
869 ResetQuickAllocEntryPointsForThread(self, nullptr);
870 }
Andreas Gampe157c77e2016-10-17 17:44:41 -0700871
Mathieu Chartier50e93312016-03-16 11:25:29 -0700872 alloc_entrypoints_instrumented_ = instrumented;
Mathieu Chartier661974a2014-01-09 11:23:53 -0800873 }
874}
875
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700876void Instrumentation::InstrumentQuickAllocEntryPoints() {
877 MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
878 InstrumentQuickAllocEntryPointsLocked();
Ian Rogersfa824272013-11-05 16:12:57 -0800879}
880
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700881void Instrumentation::UninstrumentQuickAllocEntryPoints() {
882 MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
883 UninstrumentQuickAllocEntryPointsLocked();
884}
885
886void Instrumentation::InstrumentQuickAllocEntryPointsLocked() {
887 Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
888 if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
889 SetEntrypointsInstrumented(true);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800890 }
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700891 ++quick_alloc_entry_points_instrumentation_counter_;
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700892}
893
894void Instrumentation::UninstrumentQuickAllocEntryPointsLocked() {
895 Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
896 CHECK_GT(quick_alloc_entry_points_instrumentation_counter_, 0U);
897 --quick_alloc_entry_points_instrumentation_counter_;
898 if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
899 SetEntrypointsInstrumented(false);
900 }
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800901}
902
903void Instrumentation::ResetQuickAllocEntryPoints() {
904 Runtime* runtime = Runtime::Current();
905 if (runtime->IsStarted()) {
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800906 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700907 runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, nullptr);
Ian Rogersfa824272013-11-05 16:12:57 -0800908 }
909}
910
Mingyao Yang3fd448a2016-05-10 14:30:41 -0700911void Instrumentation::UpdateMethodsCodeImpl(ArtMethod* method, const void* quick_code) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800912 const void* new_quick_code;
Ian Rogers62d6c772013-02-27 08:32:07 -0800913 if (LIKELY(!instrumentation_stubs_installed_)) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800914 new_quick_code = quick_code;
Jeff Hao65d15d92013-07-16 16:39:33 -0700915 } else {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100916 if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800917 new_quick_code = GetQuickToInterpreterBridge();
Jeff Hao65d15d92013-07-16 16:39:33 -0700918 } else {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700919 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700920 if (class_linker->IsQuickResolutionStub(quick_code) ||
921 class_linker->IsQuickToInterpreterBridge(quick_code)) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700922 new_quick_code = quick_code;
Alex Light6cae5ea2018-06-07 17:07:02 -0700923 } else if (entry_exit_stubs_installed_ &&
924 // We need to make sure not to replace anything that InstallStubsForMethod
925 // wouldn't. Specifically we cannot stub out Proxy.<init> since subtypes copy the
926 // implementation directly and this will confuse the instrumentation trampolines.
927 // TODO We should remove the need for this since it makes it impossible to profile
928 // Proxy.<init> correctly in all cases.
929 method != jni::DecodeArtMethod(WellKnownClasses::java_lang_reflect_Proxy_init)) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700930 new_quick_code = GetQuickInstrumentationEntryPoint();
Alex Light2d441b12018-06-08 15:33:21 -0700931 if (!method->IsNative() && Runtime::Current()->GetJit() != nullptr) {
932 // Native methods use trampoline entrypoints during interpreter tracing.
Nicolas Geoffray226805d2018-12-14 10:59:02 +0000933 DCHECK(!Runtime::Current()->GetJit()->GetCodeCache()->GetGarbageCollectCodeUnsafe());
Alex Light2d441b12018-06-08 15:33:21 -0700934 ProfilingInfo* profiling_info = method->GetProfilingInfo(kRuntimePointerSize);
935 // Tracing will look at the saved entry point in the profiling info to know the actual
936 // entrypoint, so we store it here.
937 if (profiling_info != nullptr) {
938 profiling_info->SetSavedEntryPoint(quick_code);
939 }
940 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700941 } else {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700942 new_quick_code = quick_code;
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700943 }
Jeff Hao65d15d92013-07-16 16:39:33 -0700944 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800945 }
Elliott Hughes956af0f2014-12-11 14:34:28 -0800946 UpdateEntrypoints(method, new_quick_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100947}
948
Nicolas Geoffraya6e0e7d2018-01-26 13:16:50 +0000949void Instrumentation::UpdateNativeMethodsCodeToJitCode(ArtMethod* method, const void* quick_code) {
950 // We don't do any read barrier on `method`'s declaring class in this code, as the JIT might
951 // enter here on a soon-to-be deleted ArtMethod. Updating the entrypoint is OK though, as
952 // the ArtMethod is still in memory.
953 const void* new_quick_code = quick_code;
954 if (UNLIKELY(instrumentation_stubs_installed_) && entry_exit_stubs_installed_) {
955 new_quick_code = GetQuickInstrumentationEntryPoint();
956 }
957 UpdateEntrypoints(method, new_quick_code);
958}
959
Mingyao Yang3fd448a2016-05-10 14:30:41 -0700960void Instrumentation::UpdateMethodsCode(ArtMethod* method, const void* quick_code) {
961 DCHECK(method->GetDeclaringClass()->IsResolved());
962 UpdateMethodsCodeImpl(method, quick_code);
963}
964
Alex Light0a5ec3d2017-07-25 16:50:26 -0700965void Instrumentation::UpdateMethodsCodeToInterpreterEntryPoint(ArtMethod* method) {
966 UpdateMethodsCodeImpl(method, GetQuickToInterpreterBridge());
967}
968
Nicolas Geoffraya0619e22016-12-20 13:57:43 +0000969void Instrumentation::UpdateMethodsCodeForJavaDebuggable(ArtMethod* method,
970 const void* quick_code) {
971 // When the runtime is set to Java debuggable, we may update the entry points of
972 // all methods of a class to the interpreter bridge. A method's declaring class
973 // might not be in resolved state yet in that case, so we bypass the DCHECK in
974 // UpdateMethodsCode.
Mingyao Yang3fd448a2016-05-10 14:30:41 -0700975 UpdateMethodsCodeImpl(method, quick_code);
976}
977
Mathieu Chartiere401d142015-04-22 13:56:20 -0700978bool Instrumentation::AddDeoptimizedMethod(ArtMethod* method) {
979 if (IsDeoptimizedMethod(method)) {
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700980 // Already in the map. Return.
981 return false;
982 }
983 // Not found. Add it.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700984 deoptimized_methods_.insert(method);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700985 return true;
986}
987
Mathieu Chartiere401d142015-04-22 13:56:20 -0700988bool Instrumentation::IsDeoptimizedMethod(ArtMethod* method) {
989 return deoptimized_methods_.find(method) != deoptimized_methods_.end();
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700990}
991
Mathieu Chartiere401d142015-04-22 13:56:20 -0700992ArtMethod* Instrumentation::BeginDeoptimizedMethod() {
993 if (deoptimized_methods_.empty()) {
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700994 // Empty.
995 return nullptr;
996 }
Mathieu Chartiere401d142015-04-22 13:56:20 -0700997 return *deoptimized_methods_.begin();
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700998}
999
Mathieu Chartiere401d142015-04-22 13:56:20 -07001000bool Instrumentation::RemoveDeoptimizedMethod(ArtMethod* method) {
1001 auto it = deoptimized_methods_.find(method);
1002 if (it == deoptimized_methods_.end()) {
1003 return false;
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001004 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001005 deoptimized_methods_.erase(it);
1006 return true;
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001007}
1008
1009bool Instrumentation::IsDeoptimizedMethodsEmpty() const {
1010 return deoptimized_methods_.empty();
1011}
1012
Mathieu Chartiere401d142015-04-22 13:56:20 -07001013void Instrumentation::Deoptimize(ArtMethod* method) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001014 CHECK(!method->IsNative());
1015 CHECK(!method->IsProxyMethod());
Alex Light9139e002015-10-09 15:59:48 -07001016 CHECK(method->IsInvokable());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001017
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001018 Thread* self = Thread::Current();
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001019 {
Andreas Gampe7e56a072018-11-29 10:40:06 -08001020 WriterMutexLock mu(self, *GetDeoptimizedMethodsLock());
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001021 bool has_not_been_deoptimized = AddDeoptimizedMethod(method);
David Sehr709b0702016-10-13 09:12:37 -07001022 CHECK(has_not_been_deoptimized) << "Method " << ArtMethod::PrettyMethod(method)
Daniel Mihalyica1d06c2014-08-18 18:45:31 +02001023 << " is already deoptimized";
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001024 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001025 if (!interpreter_stubs_installed_) {
Elliott Hughes956af0f2014-12-11 14:34:28 -08001026 UpdateEntrypoints(method, GetQuickInstrumentationEntryPoint());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001027
1028 // Install instrumentation exit stub and instrumentation frames. We may already have installed
1029 // these previously so it will only cover the newly created frames.
1030 instrumentation_stubs_installed_ = true;
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001031 MutexLock mu(self, *Locks::thread_list_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001032 Runtime::Current()->GetThreadList()->ForEach(InstrumentationInstallStack, this);
1033 }
1034}
1035
Mathieu Chartiere401d142015-04-22 13:56:20 -07001036void Instrumentation::Undeoptimize(ArtMethod* method) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001037 CHECK(!method->IsNative());
1038 CHECK(!method->IsProxyMethod());
Alex Light9139e002015-10-09 15:59:48 -07001039 CHECK(method->IsInvokable());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001040
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001041 Thread* self = Thread::Current();
1042 bool empty;
1043 {
Andreas Gampe7e56a072018-11-29 10:40:06 -08001044 WriterMutexLock mu(self, *GetDeoptimizedMethodsLock());
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001045 bool found_and_erased = RemoveDeoptimizedMethod(method);
David Sehr709b0702016-10-13 09:12:37 -07001046 CHECK(found_and_erased) << "Method " << ArtMethod::PrettyMethod(method)
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001047 << " is not deoptimized";
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001048 empty = IsDeoptimizedMethodsEmpty();
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001049 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001050
1051 // Restore code and possibly stack only if we did not deoptimize everything.
1052 if (!interpreter_stubs_installed_) {
1053 // Restore its code or resolution trampoline.
1054 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Ian Rogersef7d42f2014-01-06 12:55:46 -08001055 if (method->IsStatic() && !method->IsConstructor() &&
1056 !method->GetDeclaringClass()->IsInitialized()) {
Elliott Hughes956af0f2014-12-11 14:34:28 -08001057 UpdateEntrypoints(method, GetQuickResolutionStub());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001058 } else {
Nicolas Geoffraya0619e22016-12-20 13:57:43 +00001059 const void* quick_code = NeedDebugVersionFor(method)
1060 ? GetQuickToInterpreterBridge()
Alex Lightfc49fec2018-01-16 22:28:36 +00001061 : class_linker->GetQuickOatCodeFor(method);
Elliott Hughes956af0f2014-12-11 14:34:28 -08001062 UpdateEntrypoints(method, quick_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001063 }
1064
1065 // If there is no deoptimized method left, we can restore the stack of each thread.
Alex Lightf244a572018-06-08 13:56:51 -07001066 if (empty && !entry_exit_stubs_installed_) {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001067 MutexLock mu(self, *Locks::thread_list_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001068 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
1069 instrumentation_stubs_installed_ = false;
1070 }
1071 }
1072}
1073
Mathieu Chartiere401d142015-04-22 13:56:20 -07001074bool Instrumentation::IsDeoptimized(ArtMethod* method) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001075 DCHECK(method != nullptr);
Andreas Gampe7e56a072018-11-29 10:40:06 -08001076 ReaderMutexLock mu(Thread::Current(), *GetDeoptimizedMethodsLock());
Mathieu Chartiere401d142015-04-22 13:56:20 -07001077 return IsDeoptimizedMethod(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001078}
1079
1080void Instrumentation::EnableDeoptimization() {
Andreas Gampe7e56a072018-11-29 10:40:06 -08001081 ReaderMutexLock mu(Thread::Current(), *GetDeoptimizedMethodsLock());
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001082 CHECK(IsDeoptimizedMethodsEmpty());
Sebastien Hertz11d40c22014-02-19 18:00:17 +01001083 CHECK_EQ(deoptimization_enabled_, false);
1084 deoptimization_enabled_ = true;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001085}
1086
Sebastien Hertz0462c4c2015-04-01 16:34:17 +02001087void Instrumentation::DisableDeoptimization(const char* key) {
Sebastien Hertz11d40c22014-02-19 18:00:17 +01001088 CHECK_EQ(deoptimization_enabled_, true);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001089 // If we deoptimized everything, undo it.
Alex Lightdba61482016-12-21 08:20:29 -08001090 InstrumentationLevel level = GetCurrentInstrumentationLevel();
1091 if (level == InstrumentationLevel::kInstrumentWithInterpreter) {
Sebastien Hertz0462c4c2015-04-01 16:34:17 +02001092 UndeoptimizeEverything(key);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001093 }
1094 // Undeoptimized selected methods.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001095 while (true) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001096 ArtMethod* method;
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001097 {
Andreas Gampe7e56a072018-11-29 10:40:06 -08001098 ReaderMutexLock mu(Thread::Current(), *GetDeoptimizedMethodsLock());
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001099 if (IsDeoptimizedMethodsEmpty()) {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001100 break;
1101 }
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001102 method = BeginDeoptimizedMethod();
1103 CHECK(method != nullptr);
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001104 }
1105 Undeoptimize(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001106 }
Sebastien Hertz11d40c22014-02-19 18:00:17 +01001107 deoptimization_enabled_ = false;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001108}
1109
Sebastien Hertz11d40c22014-02-19 18:00:17 +01001110// Indicates if instrumentation should notify method enter/exit events to the listeners.
1111bool Instrumentation::ShouldNotifyMethodEnterExitEvents() const {
Sebastien Hertz0462c4c2015-04-01 16:34:17 +02001112 if (!HasMethodEntryListeners() && !HasMethodExitListeners()) {
1113 return false;
1114 }
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +01001115 return !deoptimization_enabled_ && !interpreter_stubs_installed_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001116}
1117
Sebastien Hertz0462c4c2015-04-01 16:34:17 +02001118void Instrumentation::DeoptimizeEverything(const char* key) {
1119 CHECK(deoptimization_enabled_);
1120 ConfigureStubs(key, InstrumentationLevel::kInstrumentWithInterpreter);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001121}
1122
Sebastien Hertz0462c4c2015-04-01 16:34:17 +02001123void Instrumentation::UndeoptimizeEverything(const char* key) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001124 CHECK(interpreter_stubs_installed_);
Sebastien Hertz0462c4c2015-04-01 16:34:17 +02001125 CHECK(deoptimization_enabled_);
1126 ConfigureStubs(key, InstrumentationLevel::kInstrumentNothing);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001127}
1128
Sebastien Hertz0462c4c2015-04-01 16:34:17 +02001129void Instrumentation::EnableMethodTracing(const char* key, bool needs_interpreter) {
1130 InstrumentationLevel level;
1131 if (needs_interpreter) {
1132 level = InstrumentationLevel::kInstrumentWithInterpreter;
1133 } else {
1134 level = InstrumentationLevel::kInstrumentWithInstrumentationStubs;
1135 }
1136 ConfigureStubs(key, level);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001137}
1138
Sebastien Hertz0462c4c2015-04-01 16:34:17 +02001139void Instrumentation::DisableMethodTracing(const char* key) {
1140 ConfigureStubs(key, InstrumentationLevel::kInstrumentNothing);
jeffhao725a9572012-11-13 18:20:12 -08001141}
1142
Alex Light2d441b12018-06-08 15:33:21 -07001143const void* Instrumentation::GetCodeForInvoke(ArtMethod* method) const {
1144 // This is called by instrumentation entry only and that should never be getting proxy methods.
1145 DCHECK(!method->IsProxyMethod()) << method->PrettyMethod();
1146 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1147 if (LIKELY(!instrumentation_stubs_installed_ && !interpreter_stubs_installed_)) {
1148 // In general we just return whatever the method thinks its entrypoint is here. The only
1149 // exception is if it still has the instrumentation entrypoint. That means we are racing another
1150 // thread getting rid of instrumentation which is unexpected but possible. In that case we want
1151 // to wait and try to get it from the oat file or jit.
1152 const void* code = method->GetEntryPointFromQuickCompiledCodePtrSize(kRuntimePointerSize);
1153 DCHECK(code != nullptr);
1154 if (code != GetQuickInstrumentationEntryPoint()) {
1155 return code;
1156 } else if (method->IsNative()) {
1157 return class_linker->GetQuickOatCodeFor(method);
1158 }
1159 // We don't know what it is. Fallthough to try to find the code from the JIT or Oat file.
1160 } else if (method->IsNative()) {
1161 // TODO We could have JIT compiled native entrypoints. It might be worth it to find these.
1162 return class_linker->GetQuickOatCodeFor(method);
1163 } else if (UNLIKELY(interpreter_stubs_installed_)) {
1164 return GetQuickToInterpreterBridge();
1165 }
1166 // Since the method cannot be native due to ifs above we can always fall back to interpreter
1167 // bridge.
1168 const void* result = GetQuickToInterpreterBridge();
1169 if (!NeedDebugVersionFor(method)) {
1170 // If we don't need a debug version we should see what the oat file/class linker has to say.
1171 result = class_linker->GetQuickOatCodeFor(method);
1172 }
1173 // If both those fail try the jit.
1174 if (result == GetQuickToInterpreterBridge()) {
1175 jit::Jit* jit = Runtime::Current()->GetJit();
1176 if (jit != nullptr) {
1177 const void* res = jit->GetCodeCache()->FindCompiledCodeForInstrumentation(method);
1178 if (res != nullptr) {
1179 result = res;
1180 }
1181 }
1182 }
1183 return result;
1184}
1185
Andreas Gampe542451c2016-07-26 09:02:02 -07001186const void* Instrumentation::GetQuickCodeFor(ArtMethod* method, PointerSize pointer_size) const {
Vladimir Marko97d7e1c2016-10-04 14:44:28 +01001187 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Ian Rogers62d6c772013-02-27 08:32:07 -08001188 if (LIKELY(!instrumentation_stubs_installed_)) {
Mathieu Chartiera7dd0382014-11-20 17:08:58 -08001189 const void* code = method->GetEntryPointFromQuickCompiledCodePtrSize(pointer_size);
Vladimir Marko8a630572014-04-09 18:45:35 +01001190 DCHECK(code != nullptr);
Ian Rogers6f3dbba2014-10-14 17:41:57 -07001191 if (LIKELY(!class_linker->IsQuickResolutionStub(code) &&
1192 !class_linker->IsQuickToInterpreterBridge(code)) &&
1193 !class_linker->IsQuickResolutionStub(code) &&
1194 !class_linker->IsQuickToInterpreterBridge(code)) {
Ian Rogers62d6c772013-02-27 08:32:07 -08001195 return code;
1196 }
1197 }
Alex Lightfc49fec2018-01-16 22:28:36 +00001198 return class_linker->GetQuickOatCodeFor(method);
jeffhao725a9572012-11-13 18:20:12 -08001199}
1200
Alex Lightd7661582017-05-01 13:48:16 -07001201void Instrumentation::MethodEnterEventImpl(Thread* thread,
1202 ObjPtr<mirror::Object> this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001203 ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -08001204 uint32_t dex_pc) const {
Mingyao Yang2ee17902017-08-30 11:37:08 -07001205 DCHECK(!method->IsRuntimeMethod());
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001206 if (HasMethodEntryListeners()) {
Alex Lightd7661582017-05-01 13:48:16 -07001207 Thread* self = Thread::Current();
1208 StackHandleScope<1> hs(self);
1209 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001210 for (InstrumentationListener* listener : method_entry_listeners_) {
1211 if (listener != nullptr) {
Alex Lightd7661582017-05-01 13:48:16 -07001212 listener->MethodEntered(thread, thiz, method, dex_pc);
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001213 }
1214 }
Ian Rogers62d6c772013-02-27 08:32:07 -08001215 }
1216}
1217
Alex Lightb7c640d2019-03-20 15:52:13 -07001218template <>
Alex Lightd7661582017-05-01 13:48:16 -07001219void Instrumentation::MethodExitEventImpl(Thread* thread,
1220 ObjPtr<mirror::Object> this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001221 ArtMethod* method,
Alex Lightd7661582017-05-01 13:48:16 -07001222 uint32_t dex_pc,
Alex Lightb7c640d2019-03-20 15:52:13 -07001223 OptionalFrame frame,
1224 MutableHandle<mirror::Object>& return_value) const {
1225 if (HasMethodExitListeners()) {
1226 Thread* self = Thread::Current();
1227 StackHandleScope<1> hs(self);
1228 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1229 for (InstrumentationListener* listener : method_exit_listeners_) {
1230 if (listener != nullptr) {
1231 listener->MethodExited(thread, thiz, method, dex_pc, frame, return_value);
1232 }
1233 }
1234 }
1235}
1236
1237template<> void Instrumentation::MethodExitEventImpl(Thread* thread,
1238 ObjPtr<mirror::Object> this_object,
1239 ArtMethod* method,
1240 uint32_t dex_pc,
1241 OptionalFrame frame,
1242 JValue& return_value) const {
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001243 if (HasMethodExitListeners()) {
Alex Lightd7661582017-05-01 13:48:16 -07001244 Thread* self = Thread::Current();
1245 StackHandleScope<2> hs(self);
1246 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
Alex Lightb7c640d2019-03-20 15:52:13 -07001247 if (method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetReturnTypePrimitive() !=
1248 Primitive::kPrimNot) {
Alex Lightd7661582017-05-01 13:48:16 -07001249 for (InstrumentationListener* listener : method_exit_listeners_) {
1250 if (listener != nullptr) {
Alex Lightb7c640d2019-03-20 15:52:13 -07001251 listener->MethodExited(thread, thiz, method, dex_pc, frame, return_value);
Alex Lightd7661582017-05-01 13:48:16 -07001252 }
1253 }
1254 } else {
Alex Lightb7c640d2019-03-20 15:52:13 -07001255 MutableHandle<mirror::Object> ret(hs.NewHandle(return_value.GetL()));
1256 MethodExitEventImpl(thread, thiz.Get(), method, dex_pc, frame, ret);
1257 return_value.SetL(ret.Get());
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001258 }
Ian Rogers62d6c772013-02-27 08:32:07 -08001259 }
1260}
1261
Alex Lightd7661582017-05-01 13:48:16 -07001262void Instrumentation::MethodUnwindEvent(Thread* thread,
Vladimir Marko19711d42019-04-12 14:05:34 +01001263 ObjPtr<mirror::Object> this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001264 ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -08001265 uint32_t dex_pc) const {
Sebastien Hertz0462c4c2015-04-01 16:34:17 +02001266 if (HasMethodUnwindListeners()) {
Alex Lightd7661582017-05-01 13:48:16 -07001267 Thread* self = Thread::Current();
1268 StackHandleScope<1> hs(self);
1269 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
Mathieu Chartier02e25112013-08-14 16:14:24 -07001270 for (InstrumentationListener* listener : method_unwind_listeners_) {
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001271 if (listener != nullptr) {
Alex Lightd7661582017-05-01 13:48:16 -07001272 listener->MethodUnwind(thread, thiz, method, dex_pc);
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001273 }
Ian Rogers62d6c772013-02-27 08:32:07 -08001274 }
1275 }
1276}
1277
Alex Lightd7661582017-05-01 13:48:16 -07001278void Instrumentation::DexPcMovedEventImpl(Thread* thread,
1279 ObjPtr<mirror::Object> this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001280 ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -08001281 uint32_t dex_pc) const {
Alex Lightd7661582017-05-01 13:48:16 -07001282 Thread* self = Thread::Current();
1283 StackHandleScope<1> hs(self);
1284 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001285 for (InstrumentationListener* listener : dex_pc_listeners_) {
1286 if (listener != nullptr) {
Alex Lightd7661582017-05-01 13:48:16 -07001287 listener->DexPcMoved(thread, thiz, method, dex_pc);
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001288 }
Ian Rogers62d6c772013-02-27 08:32:07 -08001289 }
1290}
1291
Nicolas Geoffray81f0f952016-01-20 16:25:19 +00001292void Instrumentation::BranchImpl(Thread* thread,
1293 ArtMethod* method,
1294 uint32_t dex_pc,
1295 int32_t offset) const {
1296 for (InstrumentationListener* listener : branch_listeners_) {
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001297 if (listener != nullptr) {
Nicolas Geoffray81f0f952016-01-20 16:25:19 +00001298 listener->Branch(thread, method, dex_pc, offset);
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001299 }
Mathieu Chartiere5f13e52015-02-24 09:37:21 -08001300 }
1301}
1302
Alex Lighte814f9d2017-07-31 16:14:39 -07001303void Instrumentation::WatchedFramePopImpl(Thread* thread, const ShadowFrame& frame) const {
1304 for (InstrumentationListener* listener : watched_frame_pop_listeners_) {
1305 if (listener != nullptr) {
1306 listener->WatchedFramePop(thread, frame);
1307 }
1308 }
1309}
1310
Alex Lightd7661582017-05-01 13:48:16 -07001311void Instrumentation::FieldReadEventImpl(Thread* thread,
1312 ObjPtr<mirror::Object> this_object,
1313 ArtMethod* method,
1314 uint32_t dex_pc,
Mathieu Chartierc7853442015-03-27 14:35:38 -07001315 ArtField* field) const {
Alex Lightd7661582017-05-01 13:48:16 -07001316 Thread* self = Thread::Current();
1317 StackHandleScope<1> hs(self);
1318 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001319 for (InstrumentationListener* listener : field_read_listeners_) {
1320 if (listener != nullptr) {
Alex Lightd7661582017-05-01 13:48:16 -07001321 listener->FieldRead(thread, thiz, method, dex_pc, field);
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001322 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +02001323 }
1324}
1325
Alex Lightd7661582017-05-01 13:48:16 -07001326void Instrumentation::FieldWriteEventImpl(Thread* thread,
1327 ObjPtr<mirror::Object> this_object,
1328 ArtMethod* method,
1329 uint32_t dex_pc,
1330 ArtField* field,
1331 const JValue& field_value) const {
1332 Thread* self = Thread::Current();
1333 StackHandleScope<2> hs(self);
1334 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1335 if (field->IsPrimitiveType()) {
1336 for (InstrumentationListener* listener : field_write_listeners_) {
1337 if (listener != nullptr) {
1338 listener->FieldWritten(thread, thiz, method, dex_pc, field, field_value);
1339 }
1340 }
1341 } else {
1342 Handle<mirror::Object> val(hs.NewHandle(field_value.GetL()));
1343 for (InstrumentationListener* listener : field_write_listeners_) {
1344 if (listener != nullptr) {
1345 listener->FieldWritten(thread, thiz, method, dex_pc, field, val);
1346 }
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001347 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +02001348 }
1349}
1350
Alex Light6e1607e2017-08-23 10:06:18 -07001351void Instrumentation::ExceptionThrownEvent(Thread* thread,
Vladimir Marko19711d42019-04-12 14:05:34 +01001352 ObjPtr<mirror::Throwable> exception_object) const {
Hiroshi Yamauchi3481f7a2017-02-10 12:07:36 -08001353 Thread* self = Thread::Current();
1354 StackHandleScope<1> hs(self);
1355 Handle<mirror::Throwable> h_exception(hs.NewHandle(exception_object));
Alex Light6e1607e2017-08-23 10:06:18 -07001356 if (HasExceptionThrownListeners()) {
Hiroshi Yamauchi3481f7a2017-02-10 12:07:36 -08001357 DCHECK_EQ(thread->GetException(), h_exception.Get());
Jeff Haoc0bd4da2013-04-11 15:52:28 -07001358 thread->ClearException();
Alex Light6e1607e2017-08-23 10:06:18 -07001359 for (InstrumentationListener* listener : exception_thrown_listeners_) {
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001360 if (listener != nullptr) {
Alex Light6e1607e2017-08-23 10:06:18 -07001361 listener->ExceptionThrown(thread, h_exception);
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001362 }
Ian Rogers62d6c772013-02-27 08:32:07 -08001363 }
Alex Light9fb1ab12017-09-05 09:32:49 -07001364 // See b/65049545 for discussion about this behavior.
1365 thread->AssertNoPendingException();
Hiroshi Yamauchi3481f7a2017-02-10 12:07:36 -08001366 thread->SetException(h_exception.Get());
Ian Rogers62d6c772013-02-27 08:32:07 -08001367 }
1368}
1369
Alex Light9fb1ab12017-09-05 09:32:49 -07001370void Instrumentation::ExceptionHandledEvent(Thread* thread,
Vladimir Marko19711d42019-04-12 14:05:34 +01001371 ObjPtr<mirror::Throwable> exception_object) const {
Alex Light9fb1ab12017-09-05 09:32:49 -07001372 Thread* self = Thread::Current();
1373 StackHandleScope<1> hs(self);
1374 Handle<mirror::Throwable> h_exception(hs.NewHandle(exception_object));
1375 if (HasExceptionHandledListeners()) {
1376 // We should have cleared the exception so that callers can detect a new one.
1377 DCHECK(thread->GetException() == nullptr);
1378 for (InstrumentationListener* listener : exception_handled_listeners_) {
1379 if (listener != nullptr) {
1380 listener->ExceptionHandled(thread, h_exception);
1381 }
1382 }
1383 }
1384}
1385
Vladimir Marko19711d42019-04-12 14:05:34 +01001386void Instrumentation::PushInstrumentationStackFrame(Thread* self,
1387 ObjPtr<mirror::Object> this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001388 ArtMethod* method,
Nicolas Geoffraye91e7952020-01-23 10:15:56 +00001389 uintptr_t stack_ptr,
Vladimir Marko19711d42019-04-12 14:05:34 +01001390 uintptr_t lr,
1391 bool interpreter_entry) {
Alex Lightb7edcda2017-04-27 13:20:31 -07001392 DCHECK(!self->IsExceptionPending());
Nicolas Geoffraye91e7952020-01-23 10:15:56 +00001393 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack =
1394 self->GetInstrumentationStack();
Ian Rogers62d6c772013-02-27 08:32:07 -08001395 if (kVerboseInstrumentation) {
David Sehr709b0702016-10-13 09:12:37 -07001396 LOG(INFO) << "Entering " << ArtMethod::PrettyMethod(method) << " from PC "
1397 << reinterpret_cast<void*>(lr);
Ian Rogers62d6c772013-02-27 08:32:07 -08001398 }
Alex Lightb7edcda2017-04-27 13:20:31 -07001399
1400 // We send the enter event before pushing the instrumentation frame to make cleanup easier. If the
1401 // event causes an exception we can simply send the unwind event and return.
1402 StackHandleScope<1> hs(self);
1403 Handle<mirror::Object> h_this(hs.NewHandle(this_object));
1404 if (!interpreter_entry) {
1405 MethodEnterEvent(self, h_this.Get(), method, 0);
1406 if (self->IsExceptionPending()) {
1407 MethodUnwindEvent(self, h_this.Get(), method, 0);
1408 return;
1409 }
1410 }
1411
1412 // We have a callee-save frame meaning this value is guaranteed to never be 0.
1413 DCHECK(!self->IsExceptionPending());
1414 size_t frame_id = StackVisitor::ComputeNumFrames(self, kInstrumentationStackWalk);
1415
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +00001416 instrumentation::InstrumentationStackFrame instrumentation_frame(
1417 h_this.Get(), method, lr, frame_id, interpreter_entry, current_force_deopt_id_);
Nicolas Geoffraye91e7952020-01-23 10:15:56 +00001418 stack->insert({stack_ptr, instrumentation_frame});
Ian Rogers62d6c772013-02-27 08:32:07 -08001419}
1420
Mingyao Yang2ee17902017-08-30 11:37:08 -07001421DeoptimizationMethodType Instrumentation::GetDeoptimizationMethodType(ArtMethod* method) {
1422 if (method->IsRuntimeMethod()) {
1423 // Certain methods have strict requirement on whether the dex instruction
1424 // should be re-executed upon deoptimization.
1425 if (method == Runtime::Current()->GetCalleeSaveMethod(
1426 CalleeSaveType::kSaveEverythingForClinit)) {
1427 return DeoptimizationMethodType::kKeepDexPc;
1428 }
1429 if (method == Runtime::Current()->GetCalleeSaveMethod(
1430 CalleeSaveType::kSaveEverythingForSuspendCheck)) {
1431 return DeoptimizationMethodType::kKeepDexPc;
1432 }
1433 }
1434 return DeoptimizationMethodType::kDefault;
1435}
1436
1437// Try to get the shorty of a runtime method if it's an invocation stub.
Andreas Gampec7d878d2018-11-19 18:42:06 +00001438static char GetRuntimeMethodShorty(Thread* thread) REQUIRES_SHARED(Locks::mutator_lock_) {
1439 char shorty = 'V';
1440 StackVisitor::WalkStack(
1441 [&shorty](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
1442 ArtMethod* m = stack_visitor->GetMethod();
1443 if (m == nullptr || m->IsRuntimeMethod()) {
1444 return true;
Andreas Gampe3d477f32018-11-16 16:40:45 +00001445 }
Andreas Gampec7d878d2018-11-19 18:42:06 +00001446 // The first Java method.
1447 if (m->IsNative()) {
1448 // Use JNI method's shorty for the jni stub.
1449 shorty = m->GetShorty()[0];
1450 } else if (m->IsProxyMethod()) {
1451 // Proxy method just invokes its proxied method via
1452 // art_quick_proxy_invoke_handler.
1453 shorty = m->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetShorty()[0];
1454 } else {
1455 const Instruction& instr = m->DexInstructions().InstructionAt(stack_visitor->GetDexPc());
1456 if (instr.IsInvoke()) {
1457 auto get_method_index_fn = [](ArtMethod* caller,
1458 const Instruction& inst,
1459 uint32_t dex_pc)
1460 REQUIRES_SHARED(Locks::mutator_lock_) {
1461 switch (inst.Opcode()) {
1462 case Instruction::INVOKE_VIRTUAL_RANGE_QUICK:
1463 case Instruction::INVOKE_VIRTUAL_QUICK: {
1464 uint16_t method_idx = caller->GetIndexFromQuickening(dex_pc);
1465 CHECK_NE(method_idx, DexFile::kDexNoIndex16);
1466 return method_idx;
1467 }
1468 default: {
1469 return static_cast<uint16_t>(inst.VRegB());
1470 }
1471 }
1472 };
Nicolas Geoffrayec43a012018-11-17 13:10:40 +00001473
Andreas Gampec7d878d2018-11-19 18:42:06 +00001474 uint16_t method_index = get_method_index_fn(m, instr, stack_visitor->GetDexPc());
1475 const DexFile* dex_file = m->GetDexFile();
1476 if (interpreter::IsStringInit(dex_file, method_index)) {
1477 // Invoking string init constructor is turned into invoking
1478 // StringFactory.newStringFromChars() which returns a string.
1479 shorty = 'L';
1480 } else {
1481 shorty = dex_file->GetMethodShorty(method_index)[0];
1482 }
1483
1484 } else {
1485 // It could be that a non-invoke opcode invokes a stub, which in turn
1486 // invokes Java code. In such cases, we should never expect a return
1487 // value from the stub.
1488 }
1489 }
1490 // Stop stack walking since we've seen a Java frame.
1491 return false;
1492 },
1493 thread,
1494 /* context= */ nullptr,
1495 art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
1496 return shorty;
1497}
Mingyao Yang2ee17902017-08-30 11:37:08 -07001498
Alex Lightb7edcda2017-04-27 13:20:31 -07001499TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self,
Nicolas Geoffraye91e7952020-01-23 10:15:56 +00001500 uintptr_t* return_pc_addr,
Alex Lightb7edcda2017-04-27 13:20:31 -07001501 uint64_t* gpr_result,
1502 uint64_t* fpr_result) {
1503 DCHECK(gpr_result != nullptr);
1504 DCHECK(fpr_result != nullptr);
Ian Rogers62d6c772013-02-27 08:32:07 -08001505 // Do the pop.
Nicolas Geoffraye91e7952020-01-23 10:15:56 +00001506 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack =
1507 self->GetInstrumentationStack();
Ian Rogers62d6c772013-02-27 08:32:07 -08001508 CHECK_GT(stack->size(), 0U);
Nicolas Geoffraye91e7952020-01-23 10:15:56 +00001509 auto it = stack->find(reinterpret_cast<uintptr_t>(return_pc_addr));
1510 CHECK(it != stack->end());
1511 InstrumentationStackFrame instrumentation_frame = it->second;
1512 stack->erase(it);
Ian Rogers62d6c772013-02-27 08:32:07 -08001513
1514 // Set return PC and check the sanity of the stack.
Nicolas Geoffraye91e7952020-01-23 10:15:56 +00001515 // We don't cache the return pc value in a local as it may change after
1516 // sending a method exit event.
1517 *return_pc_addr = instrumentation_frame.return_pc_;
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001518 self->VerifyStack();
Ian Rogers62d6c772013-02-27 08:32:07 -08001519
Mathieu Chartiere401d142015-04-22 13:56:20 -07001520 ArtMethod* method = instrumentation_frame.method_;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001521 uint32_t length;
Andreas Gampe542451c2016-07-26 09:02:02 -07001522 const PointerSize pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize();
Mingyao Yang2ee17902017-08-30 11:37:08 -07001523 char return_shorty;
1524
1525 // Runtime method does not call into MethodExitEvent() so there should not be
1526 // suspension point below.
1527 ScopedAssertNoThreadSuspension ants(__FUNCTION__, method->IsRuntimeMethod());
1528 if (method->IsRuntimeMethod()) {
1529 if (method != Runtime::Current()->GetCalleeSaveMethod(
1530 CalleeSaveType::kSaveEverythingForClinit)) {
1531 // If the caller is at an invocation point and the runtime method is not
1532 // for clinit, we need to pass return results to the caller.
1533 // We need the correct shorty to decide whether we need to pass the return
1534 // result for deoptimization below.
Andreas Gampec7d878d2018-11-19 18:42:06 +00001535 return_shorty = GetRuntimeMethodShorty(self);
Mingyao Yang2ee17902017-08-30 11:37:08 -07001536 } else {
1537 // Some runtime methods such as allocations, unresolved field getters, etc.
1538 // have return value. We don't need to set return_value since MethodExitEvent()
1539 // below isn't called for runtime methods. Deoptimization doesn't need the
1540 // value either since the dex instruction will be re-executed by the
1541 // interpreter, except these two cases:
1542 // (1) For an invoke, which is handled above to get the correct shorty.
1543 // (2) For MONITOR_ENTER/EXIT, which cannot be re-executed since it's not
1544 // idempotent. However there is no return value for it anyway.
1545 return_shorty = 'V';
1546 }
1547 } else {
1548 return_shorty = method->GetInterfaceMethodIfProxy(pointer_size)->GetShorty(&length)[0];
1549 }
1550
Alex Lightb7edcda2017-04-27 13:20:31 -07001551 bool is_ref = return_shorty == '[' || return_shorty == 'L';
1552 StackHandleScope<1> hs(self);
1553 MutableHandle<mirror::Object> res(hs.NewHandle<mirror::Object>(nullptr));
Ian Rogers62d6c772013-02-27 08:32:07 -08001554 JValue return_value;
1555 if (return_shorty == 'V') {
1556 return_value.SetJ(0);
1557 } else if (return_shorty == 'F' || return_shorty == 'D') {
Alex Lightb7edcda2017-04-27 13:20:31 -07001558 return_value.SetJ(*fpr_result);
Ian Rogers62d6c772013-02-27 08:32:07 -08001559 } else {
Alex Lightb7edcda2017-04-27 13:20:31 -07001560 return_value.SetJ(*gpr_result);
1561 }
1562 if (is_ref) {
1563 // Take a handle to the return value so we won't lose it if we suspend.
1564 res.Assign(return_value.GetL());
Ian Rogers62d6c772013-02-27 08:32:07 -08001565 }
1566 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
1567 // return_pc.
Andreas Gampee2abbc62017-09-15 11:59:26 -07001568 uint32_t dex_pc = dex::kDexNoIndex;
Mingyao Yang2ee17902017-08-30 11:37:08 -07001569 if (!method->IsRuntimeMethod() && !instrumentation_frame.interpreter_entry_) {
Vladimir Marko19711d42019-04-12 14:05:34 +01001570 ObjPtr<mirror::Object> this_object = instrumentation_frame.this_object_;
Nicolas Geoffraye91e7952020-01-23 10:15:56 +00001571 // Note that sending the event may change the contents of *return_pc_addr.
Alex Lightb7c640d2019-03-20 15:52:13 -07001572 MethodExitEvent(
1573 self, this_object, instrumentation_frame.method_, dex_pc, OptionalFrame{}, return_value);
Sebastien Hertz320deb22014-06-11 19:45:05 +02001574 }
jeffhao725a9572012-11-13 18:20:12 -08001575
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001576 // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get
1577 // back to an upcall.
1578 NthCallerVisitor visitor(self, 1, true);
1579 visitor.WalkStack(true);
Sebastien Hertz270a0e12015-01-16 19:49:09 +01001580 bool deoptimize = (visitor.caller != nullptr) &&
Daniel Mihalyieb076692014-08-22 17:33:31 +02001581 (interpreter_stubs_installed_ || IsDeoptimized(visitor.caller) ||
Alex Light3dacdd62019-03-12 15:45:47 +00001582 self->IsForceInterpreter() ||
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +00001583 // NB Since structurally obsolete compiled methods might have the offsets of
1584 // methods/fields compiled in we need to go back to interpreter whenever we hit
1585 // them.
1586 visitor.caller->GetDeclaringClass()->IsObsoleteObject() ||
1587 // Check if we forced all threads to deoptimize in the time between this frame
1588 // being created and now.
1589 instrumentation_frame.force_deopt_id_ != current_force_deopt_id_ ||
Daniel Mihalyieb076692014-08-22 17:33:31 +02001590 Dbg::IsForcedInterpreterNeededForUpcall(self, visitor.caller));
Alex Lightb7edcda2017-04-27 13:20:31 -07001591 if (is_ref) {
1592 // Restore the return value if it's a reference since it might have moved.
1593 *reinterpret_cast<mirror::Object**>(gpr_result) = res.Get();
1594 }
Nicolas Geoffraye91e7952020-01-23 10:15:56 +00001595 if (deoptimize && Runtime::Current()->IsAsyncDeoptimizeable(*return_pc_addr)) {
Ian Rogers62d6c772013-02-27 08:32:07 -08001596 if (kVerboseInstrumentation) {
Andreas Gampe46ee31b2016-12-14 10:11:49 -08001597 LOG(INFO) << "Deoptimizing "
1598 << visitor.caller->PrettyMethod()
1599 << " by returning from "
1600 << method->PrettyMethod()
1601 << " with result "
1602 << std::hex << return_value.GetJ() << std::dec
1603 << " in "
1604 << *self;
Ian Rogers62d6c772013-02-27 08:32:07 -08001605 }
Mingyao Yang2ee17902017-08-30 11:37:08 -07001606 DeoptimizationMethodType deopt_method_type = GetDeoptimizationMethodType(method);
Nicolas Geoffray73be1e82015-09-17 15:22:56 +01001607 self->PushDeoptimizationContext(return_value,
Mingyao Yang2ee17902017-08-30 11:37:08 -07001608 return_shorty == 'L' || return_shorty == '[',
Andreas Gampe98ea9d92018-10-19 14:06:15 -07001609 /* exception= */ nullptr ,
1610 /* from_code= */ false,
Mingyao Yang2ee17902017-08-30 11:37:08 -07001611 deopt_method_type);
Nicolas Geoffraye91e7952020-01-23 10:15:56 +00001612 return GetTwoWordSuccessValue(*return_pc_addr,
Andreas Gamped58342c2014-06-05 14:18:08 -07001613 reinterpret_cast<uintptr_t>(GetQuickDeoptimizationEntryPoint()));
Ian Rogers62d6c772013-02-27 08:32:07 -08001614 } else {
Nicolas Geoffraye91e7952020-01-23 10:15:56 +00001615 if (deoptimize && !Runtime::Current()->IsAsyncDeoptimizeable(*return_pc_addr)) {
Alex Lightd8eb6732018-01-29 15:16:02 -08001616 VLOG(deopt) << "Got a deoptimization request on un-deoptimizable " << method->PrettyMethod()
Nicolas Geoffraye91e7952020-01-23 10:15:56 +00001617 << " at PC " << reinterpret_cast<void*>(*return_pc_addr);
Nicolas Geoffraya0619e22016-12-20 13:57:43 +00001618 }
Ian Rogers62d6c772013-02-27 08:32:07 -08001619 if (kVerboseInstrumentation) {
David Sehr709b0702016-10-13 09:12:37 -07001620 LOG(INFO) << "Returning from " << method->PrettyMethod()
Nicolas Geoffraye91e7952020-01-23 10:15:56 +00001621 << " to PC " << reinterpret_cast<void*>(*return_pc_addr);
Ian Rogers62d6c772013-02-27 08:32:07 -08001622 }
Nicolas Geoffraye91e7952020-01-23 10:15:56 +00001623 return GetTwoWordSuccessValue(0, *return_pc_addr);
Ian Rogers62d6c772013-02-27 08:32:07 -08001624 }
jeffhao725a9572012-11-13 18:20:12 -08001625}
1626
Nicolas Geoffraye91e7952020-01-23 10:15:56 +00001627uintptr_t Instrumentation::PopFramesForDeoptimization(Thread* self, uintptr_t pop_until) const {
1628 std::map<uintptr_t, instrumentation::InstrumentationStackFrame>* stack =
1629 self->GetInstrumentationStack();
1630 // Pop all instrumentation frames below `pop_until`.
1631 uintptr_t return_pc = 0u;
1632 for (auto i = stack->begin(); i != stack->end() && i->first <= pop_until;) {
1633 auto e = i;
1634 ++i;
1635 if (kVerboseInstrumentation) {
1636 LOG(INFO) << "Popping for deoptimization " << e->second.method_->PrettyMethod();
Mingyao Yang2ee17902017-08-30 11:37:08 -07001637 }
Nicolas Geoffraye91e7952020-01-23 10:15:56 +00001638 return_pc = e->second.return_pc_;
1639 stack->erase(e);
Ian Rogers62d6c772013-02-27 08:32:07 -08001640 }
Alex Light2c8206f2018-06-08 14:51:09 -07001641 return return_pc;
Ian Rogers62d6c772013-02-27 08:32:07 -08001642}
1643
1644std::string InstrumentationStackFrame::Dump() const {
1645 std::ostringstream os;
David Sehr709b0702016-10-13 09:12:37 -07001646 os << "Frame " << frame_id_ << " " << ArtMethod::PrettyMethod(method_) << ":"
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +00001647 << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_)
1648 << " force_deopt_id=" << force_deopt_id_;
Ian Rogers62d6c772013-02-27 08:32:07 -08001649 return os.str();
1650}
1651
1652} // namespace instrumentation
jeffhao725a9572012-11-13 18:20:12 -08001653} // namespace art