blob: 011d947ea22dc345fca9f2306f513552081fd818 [file] [log] [blame]
jeffhao725a9572012-11-13 18:20:12 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "instrumentation.h"
18
Alex Lightb7c640d2019-03-20 15:52:13 -070019#include <functional>
20#include <optional>
Ian Rogersc7dd2952014-10-21 23:31:19 -070021#include <sstream>
22
Andreas Gampec7d878d2018-11-19 18:42:06 +000023#include <android-base/logging.h>
24
Ian Rogerse63db272014-07-15 15:36:11 -070025#include "arch/context.h"
Alex Lightd7661582017-05-01 13:48:16 -070026#include "art_field-inl.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070027#include "art_method-inl.h"
David Sehrc431b9d2018-03-02 12:01:51 -080028#include "base/atomic.h"
Andreas Gampe8228cdf2017-05-30 15:03:54 -070029#include "base/callee_save_type.h"
jeffhao725a9572012-11-13 18:20:12 -080030#include "class_linker.h"
31#include "debugger.h"
David Sehr9e734c72018-01-04 17:56:19 -080032#include "dex/dex_file-inl.h"
33#include "dex/dex_file_types.h"
34#include "dex/dex_instruction-inl.h"
Mathieu Chartierd8891782014-03-02 13:28:37 -080035#include "entrypoints/quick/quick_alloc_entrypoints.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070036#include "entrypoints/quick/quick_entrypoints.h"
Ian Rogers6f3dbba2014-10-14 17:41:57 -070037#include "entrypoints/runtime_asm_entrypoints.h"
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070038#include "gc_root-inl.h"
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010039#include "interpreter/interpreter.h"
Mingyao Yang2ee17902017-08-30 11:37:08 -070040#include "interpreter/interpreter_common.h"
Mathieu Chartiere5f13e52015-02-24 09:37:21 -080041#include "jit/jit.h"
42#include "jit/jit_code_cache.h"
Alex Lightd7661582017-05-01 13:48:16 -070043#include "jvalue-inl.h"
Alex Lightb7c640d2019-03-20 15:52:13 -070044#include "jvalue.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080045#include "mirror/class-inl.h"
46#include "mirror/dex_cache.h"
Ian Rogers4f6ad8a2013-03-18 15:27:28 -070047#include "mirror/object-inl.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070048#include "mirror/object_array-inl.h"
Ian Rogers62d6c772013-02-27 08:32:07 -080049#include "nth_caller_visitor.h"
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010050#include "oat_quick_method_header.h"
David Srbecky28f6cff2018-10-16 15:07:28 +010051#include "runtime-inl.h"
jeffhao725a9572012-11-13 18:20:12 -080052#include "thread.h"
53#include "thread_list.h"
jeffhao725a9572012-11-13 18:20:12 -080054
55namespace art {
Ian Rogers62d6c772013-02-27 08:32:07 -080056namespace instrumentation {
jeffhao725a9572012-11-13 18:20:12 -080057
Sebastien Hertz0462c4c2015-04-01 16:34:17 +020058constexpr bool kVerboseInstrumentation = false;
Sebastien Hertz5bfd5c92013-11-15 11:36:07 +010059
Alex Lightb7c640d2019-03-20 15:52:13 -070060void InstrumentationListener::MethodExited(
61 Thread* thread,
62 Handle<mirror::Object> this_object,
63 ArtMethod* method,
64 uint32_t dex_pc,
65 OptionalFrame frame,
66 MutableHandle<mirror::Object>& return_value) {
Alex Lightd7661582017-05-01 13:48:16 -070067 DCHECK_EQ(method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetReturnTypePrimitive(),
68 Primitive::kPrimNot);
Alex Lightb7c640d2019-03-20 15:52:13 -070069 const void* original_ret = return_value.Get();
Alex Lightd7661582017-05-01 13:48:16 -070070 JValue v;
71 v.SetL(return_value.Get());
Alex Lightb7c640d2019-03-20 15:52:13 -070072 MethodExited(thread, this_object, method, dex_pc, frame, v);
73 DCHECK(original_ret == v.GetL()) << "Return value changed";
Alex Lightd7661582017-05-01 13:48:16 -070074}
75
76void InstrumentationListener::FieldWritten(Thread* thread,
77 Handle<mirror::Object> this_object,
78 ArtMethod* method,
79 uint32_t dex_pc,
80 ArtField* field,
81 Handle<mirror::Object> field_value) {
82 DCHECK(!field->IsPrimitiveType());
83 JValue v;
84 v.SetL(field_value.Get());
85 FieldWritten(thread, this_object, method, dex_pc, field, v);
86}
87
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +010088// Instrumentation works on non-inlined frames by updating returned PCs
89// of compiled frames.
90static constexpr StackVisitor::StackWalkKind kInstrumentationStackWalk =
91 StackVisitor::StackWalkKind::kSkipInlinedFrames;
92
Mathieu Chartiere0671ce2015-07-28 17:23:28 -070093class InstallStubsClassVisitor : public ClassVisitor {
94 public:
95 explicit InstallStubsClassVisitor(Instrumentation* instrumentation)
96 : instrumentation_(instrumentation) {}
97
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010098 bool operator()(ObjPtr<mirror::Class> klass) override REQUIRES(Locks::mutator_lock_) {
Mathieu Chartier28357fa2016-10-18 16:27:40 -070099 instrumentation_->InstallStubsForClass(klass.Ptr());
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700100 return true; // we visit all classes.
101 }
102
103 private:
104 Instrumentation* const instrumentation_;
105};
106
Alex Light2c8206f2018-06-08 14:51:09 -0700107InstrumentationStackPopper::InstrumentationStackPopper(Thread* self)
108 : self_(self),
109 instrumentation_(Runtime::Current()->GetInstrumentation()),
110 frames_to_remove_(0) {}
111
112InstrumentationStackPopper::~InstrumentationStackPopper() {
113 std::deque<instrumentation::InstrumentationStackFrame>* stack = self_->GetInstrumentationStack();
114 for (size_t i = 0; i < frames_to_remove_; i++) {
115 stack->pop_front();
116 }
117}
118
119bool InstrumentationStackPopper::PopFramesTo(uint32_t desired_pops,
120 MutableHandle<mirror::Throwable>& exception) {
121 std::deque<instrumentation::InstrumentationStackFrame>* stack = self_->GetInstrumentationStack();
122 DCHECK_LE(frames_to_remove_, desired_pops);
123 DCHECK_GE(stack->size(), desired_pops);
124 DCHECK(!self_->IsExceptionPending());
125 if (!instrumentation_->HasMethodUnwindListeners()) {
126 frames_to_remove_ = desired_pops;
127 return true;
128 }
129 if (kVerboseInstrumentation) {
130 LOG(INFO) << "Popping frames for exception " << exception->Dump();
131 }
132 // The instrumentation events expect the exception to be set.
133 self_->SetException(exception.Get());
134 bool new_exception_thrown = false;
135 for (; frames_to_remove_ < desired_pops && !new_exception_thrown; frames_to_remove_++) {
136 InstrumentationStackFrame frame = stack->at(frames_to_remove_);
137 ArtMethod* method = frame.method_;
138 // Notify listeners of method unwind.
139 // TODO: improve the dex_pc information here.
140 uint32_t dex_pc = dex::kDexNoIndex;
141 if (kVerboseInstrumentation) {
142 LOG(INFO) << "Popping for unwind " << method->PrettyMethod();
143 }
144 if (!method->IsRuntimeMethod() && !frame.interpreter_entry_) {
145 instrumentation_->MethodUnwindEvent(self_, frame.this_object_, method, dex_pc);
146 new_exception_thrown = self_->GetException() != exception.Get();
147 }
148 }
149 exception.Assign(self_->GetException());
150 self_->ClearException();
151 if (kVerboseInstrumentation && new_exception_thrown) {
152 LOG(INFO) << "Failed to pop " << (desired_pops - frames_to_remove_)
153 << " frames due to new exception";
154 }
155 return !new_exception_thrown;
156}
Ian Rogers62d6c772013-02-27 08:32:07 -0800157
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700158Instrumentation::Instrumentation()
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000159 : current_force_deopt_id_(0),
160 instrumentation_stubs_installed_(false),
Nicolas Geoffray5a23d2e2015-11-03 18:58:57 +0000161 entry_exit_stubs_installed_(false),
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700162 interpreter_stubs_installed_(false),
Nicolas Geoffray5a23d2e2015-11-03 18:58:57 +0000163 interpret_only_(false),
164 forced_interpret_only_(false),
165 have_method_entry_listeners_(false),
166 have_method_exit_listeners_(false),
167 have_method_unwind_listeners_(false),
168 have_dex_pc_listeners_(false),
169 have_field_read_listeners_(false),
170 have_field_write_listeners_(false),
Alex Light6e1607e2017-08-23 10:06:18 -0700171 have_exception_thrown_listeners_(false),
Alex Lighte814f9d2017-07-31 16:14:39 -0700172 have_watched_frame_pop_listeners_(false),
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000173 have_branch_listeners_(false),
Alex Light9fb1ab12017-09-05 09:32:49 -0700174 have_exception_handled_listeners_(false),
Andreas Gampe7e56a072018-11-29 10:40:06 -0800175 deoptimized_methods_lock_(new ReaderWriterMutex("deoptimized methods lock",
176 kGenericBottomLock)),
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700177 deoptimization_enabled_(false),
178 interpreter_handler_table_(kMainHandlerTable),
Mathieu Chartier50e93312016-03-16 11:25:29 -0700179 quick_alloc_entry_points_instrumentation_counter_(0),
Alex Light40607862019-05-06 18:16:24 +0000180 alloc_entrypoints_instrumented_(false),
181 can_use_instrumentation_trampolines_(true) {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700182}
183
Vladimir Marko19711d42019-04-12 14:05:34 +0100184void Instrumentation::InstallStubsForClass(ObjPtr<mirror::Class> klass) {
Vladimir Marko72ab6842017-01-20 19:32:50 +0000185 if (!klass->IsResolved()) {
Sebastien Hertza8a697f2015-01-15 12:28:47 +0100186 // We need the class to be resolved to install/uninstall stubs. Otherwise its methods
187 // could not be initialized or linked with regards to class inheritance.
Vladimir Marko72ab6842017-01-20 19:32:50 +0000188 } else if (klass->IsErroneousResolved()) {
189 // We can't execute code in a erroneous class: do nothing.
Sebastien Hertza8a697f2015-01-15 12:28:47 +0100190 } else {
Andreas Gampe542451c2016-07-26 09:02:02 -0700191 for (ArtMethod& method : klass->GetMethods(kRuntimePointerSize)) {
Alex Light51a64d52015-12-17 13:55:59 -0800192 InstallStubsForMethod(&method);
Sebastien Hertza8a697f2015-01-15 12:28:47 +0100193 }
jeffhao725a9572012-11-13 18:20:12 -0800194 }
jeffhao725a9572012-11-13 18:20:12 -0800195}
196
Mathieu Chartiere401d142015-04-22 13:56:20 -0700197static void UpdateEntrypoints(ArtMethod* method, const void* quick_code)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700198 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffrayd5a95872019-08-12 13:24:07 +0100199 if (kIsDebugBuild) {
200 jit::Jit* jit = Runtime::Current()->GetJit();
201 if (jit != nullptr && jit->GetCodeCache()->ContainsPc(quick_code)) {
202 // Ensure we always have the thumb entrypoint for JIT on arm32.
203 if (kRuntimeISA == InstructionSet::kArm) {
204 CHECK_EQ(reinterpret_cast<uintptr_t>(quick_code) & 1, 1u);
205 }
206 }
207 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800208 method->SetEntryPointFromQuickCompiledCode(quick_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100209}
210
Alex Light0fa17862017-10-24 13:43:05 -0700211bool Instrumentation::NeedDebugVersionFor(ArtMethod* method) const
212 REQUIRES_SHARED(Locks::mutator_lock_) {
Alex Lightf2858632018-04-02 11:28:50 -0700213 art::Runtime* runtime = Runtime::Current();
214 // If anything says we need the debug version or we are debuggable we will need the debug version
215 // of the method.
216 return (runtime->GetRuntimeCallbacks()->MethodNeedsDebugVersion(method) ||
217 runtime->IsJavaDebuggable()) &&
Mingyao Yang6ea1a0e2016-01-29 12:12:49 -0800218 !method->IsNative() &&
Alex Lightf2858632018-04-02 11:28:50 -0700219 !method->IsProxyMethod();
Mingyao Yang6ea1a0e2016-01-29 12:12:49 -0800220}
221
Mathieu Chartiere401d142015-04-22 13:56:20 -0700222void Instrumentation::InstallStubsForMethod(ArtMethod* method) {
Alex Light9139e002015-10-09 15:59:48 -0700223 if (!method->IsInvokable() || method->IsProxyMethod()) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100224 // Do not change stubs for these methods.
225 return;
226 }
Jeff Hao56802772014-08-19 10:17:36 -0700227 // Don't stub Proxy.<init>. Note that the Proxy class itself is not a proxy class.
Alex Light6cae5ea2018-06-07 17:07:02 -0700228 // TODO We should remove the need for this since it means we cannot always correctly detect calls
229 // to Proxy.<init>
230 // Annoyingly this can be called before we have actually initialized WellKnownClasses so therefore
231 // we also need to check this based on the declaring-class descriptor. The check is valid because
232 // Proxy only has a single constructor.
233 ArtMethod* well_known_proxy_init = jni::DecodeArtMethod(
234 WellKnownClasses::java_lang_reflect_Proxy_init);
235 if ((LIKELY(well_known_proxy_init != nullptr) && UNLIKELY(method == well_known_proxy_init)) ||
236 UNLIKELY(method->IsConstructor() &&
237 method->GetDeclaringClass()->DescriptorEquals("Ljava/lang/reflect/Proxy;"))) {
Jeff Haodb8a6642014-08-14 17:18:52 -0700238 return;
239 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800240 const void* new_quick_code;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100241 bool uninstall = !entry_exit_stubs_installed_ && !interpreter_stubs_installed_;
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800242 Runtime* const runtime = Runtime::Current();
243 ClassLinker* const class_linker = runtime->GetClassLinker();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100244 bool is_class_initialized = method->GetDeclaringClass()->IsInitialized();
245 if (uninstall) {
246 if ((forced_interpret_only_ || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800247 new_quick_code = GetQuickToInterpreterBridge();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100248 } else if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
Alex Light3e36a9c2018-06-19 09:45:05 -0700249 new_quick_code = GetCodeForInvoke(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100250 } else {
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700251 new_quick_code = GetQuickResolutionStub();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100252 }
253 } else { // !uninstall
Sebastien Hertzbae182c2013-12-17 10:42:03 +0100254 if ((interpreter_stubs_installed_ || forced_interpret_only_ || IsDeoptimized(method)) &&
255 !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800256 new_quick_code = GetQuickToInterpreterBridge();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100257 } else {
258 // Do not overwrite resolution trampoline. When the trampoline initializes the method's
259 // class, all its static methods code will be set to the instrumentation entry point.
260 // For more details, see ClassLinker::FixupStaticTrampolines.
261 if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
Alex Light2d441b12018-06-08 15:33:21 -0700262 if (entry_exit_stubs_installed_) {
263 // This needs to be checked first since the instrumentation entrypoint will be able to
264 // find the actual JIT compiled code that corresponds to this method.
Ian Rogersef7d42f2014-01-06 12:55:46 -0800265 new_quick_code = GetQuickInstrumentationEntryPoint();
Alex Light2d441b12018-06-08 15:33:21 -0700266 } else if (NeedDebugVersionFor(method)) {
267 // It would be great to search the JIT for its implementation here but we cannot due to
268 // the locks we hold. Instead just set to the interpreter bridge and that code will search
269 // the JIT when it gets called and replace the entrypoint then.
270 new_quick_code = GetQuickToInterpreterBridge();
Nicolas Geoffraya0619e22016-12-20 13:57:43 +0000271 } else {
Alex Lightfc49fec2018-01-16 22:28:36 +0000272 new_quick_code = class_linker->GetQuickOatCodeFor(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100273 }
274 } else {
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700275 new_quick_code = GetQuickResolutionStub();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100276 }
277 }
278 }
Elliott Hughes956af0f2014-12-11 14:34:28 -0800279 UpdateEntrypoints(method, new_quick_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100280}
281
Ian Rogers62d6c772013-02-27 08:32:07 -0800282// Places the instrumentation exit pc as the return PC for every quick frame. This also allows
283// deoptimization of quick frames to interpreter frames.
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100284// Since we may already have done this previously, we need to push new instrumentation frame before
285// existing instrumentation frames.
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000286void InstrumentationInstallStack(Thread* thread, void* arg)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700287 REQUIRES_SHARED(Locks::mutator_lock_) {
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100288 struct InstallStackVisitor final : public StackVisitor {
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000289 InstallStackVisitor(Thread* thread_in,
290 Context* context,
291 uintptr_t instrumentation_exit_pc,
292 uint64_t force_deopt_id)
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100293 : StackVisitor(thread_in, context, kInstrumentationStackWalk),
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800294 instrumentation_stack_(thread_in->GetInstrumentationStack()),
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100295 instrumentation_exit_pc_(instrumentation_exit_pc),
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000296 reached_existing_instrumentation_frames_(false),
297 instrumentation_stack_depth_(0),
298 last_return_pc_(0),
299 force_deopt_id_(force_deopt_id) {}
jeffhao725a9572012-11-13 18:20:12 -0800300
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100301 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700302 ArtMethod* m = GetMethod();
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700303 if (m == nullptr) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800304 if (kVerboseInstrumentation) {
305 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
306 }
307 last_return_pc_ = 0;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700308 return true; // Ignore upcalls.
Ian Rogers306057f2012-11-26 12:45:53 -0800309 }
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700310 if (GetCurrentQuickFrame() == nullptr) {
Elliott Hughes956af0f2014-12-11 14:34:28 -0800311 bool interpreter_frame = true;
Vladimir Markoabedfca2019-05-23 14:07:47 +0100312 InstrumentationStackFrame instrumentation_frame(GetThisObject().Ptr(),
313 m,
314 /*return_pc=*/ 0,
315 GetFrameId(),
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000316 interpreter_frame,
317 force_deopt_id_);
Jeff Haoa15a81b2014-05-27 18:25:47 -0700318 if (kVerboseInstrumentation) {
319 LOG(INFO) << "Pushing shadow frame " << instrumentation_frame.Dump();
320 }
321 shadow_stack_.push_back(instrumentation_frame);
322 return true; // Continue.
323 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800324 uintptr_t return_pc = GetReturnPc();
Sebastien Hertz320deb22014-06-11 19:45:05 +0200325 if (kVerboseInstrumentation) {
326 LOG(INFO) << " Installing exit stub in " << DescribeLocation();
327 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100328 if (return_pc == instrumentation_exit_pc_) {
Mingyao Yang2ee17902017-08-30 11:37:08 -0700329 CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
330
331 if (m->IsRuntimeMethod()) {
332 const InstrumentationStackFrame& frame =
Vladimir Marko35d5b8a2018-07-03 09:18:32 +0100333 (*instrumentation_stack_)[instrumentation_stack_depth_];
Mingyao Yang2ee17902017-08-30 11:37:08 -0700334 if (frame.interpreter_entry_) {
335 // This instrumentation frame is for an interpreter bridge and is
336 // pushed when executing the instrumented interpreter bridge. So method
337 // enter event must have been reported. However we need to push a DEX pc
338 // into the dex_pcs_ list to match size of instrumentation stack.
Andreas Gampee2abbc62017-09-15 11:59:26 -0700339 uint32_t dex_pc = dex::kDexNoIndex;
Mingyao Yang2ee17902017-08-30 11:37:08 -0700340 dex_pcs_.push_back(dex_pc);
341 last_return_pc_ = frame.return_pc_;
342 ++instrumentation_stack_depth_;
343 return true;
344 }
345 }
346
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100347 // We've reached a frame which has already been installed with instrumentation exit stub.
Alex Light74c91c92018-03-08 14:01:44 -0800348 // We should have already installed instrumentation or be interpreter on previous frames.
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100349 reached_existing_instrumentation_frames_ = true;
350
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200351 const InstrumentationStackFrame& frame =
Vladimir Marko35d5b8a2018-07-03 09:18:32 +0100352 (*instrumentation_stack_)[instrumentation_stack_depth_];
Alex Lightfc81d802018-12-07 13:39:05 -0800353 CHECK_EQ(m->GetNonObsoleteMethod(), frame.method_->GetNonObsoleteMethod())
354 << "Expected " << ArtMethod::PrettyMethod(m)
355 << ", Found " << ArtMethod::PrettyMethod(frame.method_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100356 return_pc = frame.return_pc_;
357 if (kVerboseInstrumentation) {
358 LOG(INFO) << "Ignoring already instrumented " << frame.Dump();
359 }
360 } else {
361 CHECK_NE(return_pc, 0U);
Alex Light74c91c92018-03-08 14:01:44 -0800362 if (UNLIKELY(reached_existing_instrumentation_frames_ && !m->IsRuntimeMethod())) {
363 // We already saw an existing instrumentation frame so this should be a runtime-method
364 // inserted by the interpreter or runtime.
Alex Lighte9278662018-03-08 16:55:58 -0800365 std::string thread_name;
366 GetThread()->GetThreadName(thread_name);
367 uint32_t dex_pc = dex::kDexNoIndex;
Nicolas Geoffraya00b54b2019-12-03 14:36:42 +0000368 if (last_return_pc_ != 0 && GetCurrentOatQuickMethodHeader() != nullptr) {
369 dex_pc = GetCurrentOatQuickMethodHeader()->ToDexPc(
370 GetCurrentQuickFrame(), last_return_pc_);
Alex Lighte9278662018-03-08 16:55:58 -0800371 }
Alex Light74c91c92018-03-08 14:01:44 -0800372 LOG(FATAL) << "While walking " << thread_name << " found unexpected non-runtime method"
373 << " without instrumentation exit return or interpreter frame."
Alex Lighte9278662018-03-08 16:55:58 -0800374 << " method is " << GetMethod()->PrettyMethod()
375 << " return_pc is " << std::hex << return_pc
376 << " dex pc: " << dex_pc;
377 UNREACHABLE();
378 }
Mingyao Yang2ee17902017-08-30 11:37:08 -0700379 InstrumentationStackFrame instrumentation_frame(
Vladimir Markoabedfca2019-05-23 14:07:47 +0100380 m->IsRuntimeMethod() ? nullptr : GetThisObject().Ptr(),
Mingyao Yang2ee17902017-08-30 11:37:08 -0700381 m,
382 return_pc,
383 GetFrameId(), // A runtime method still gets a frame id.
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000384 false,
385 force_deopt_id_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100386 if (kVerboseInstrumentation) {
387 LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
388 }
389
Sebastien Hertz320deb22014-06-11 19:45:05 +0200390 // Insert frame at the right position so we do not corrupt the instrumentation stack.
391 // Instrumentation stack frames are in descending frame id order.
392 auto it = instrumentation_stack_->begin();
393 for (auto end = instrumentation_stack_->end(); it != end; ++it) {
394 const InstrumentationStackFrame& current = *it;
395 if (instrumentation_frame.frame_id_ >= current.frame_id_) {
396 break;
397 }
398 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100399 instrumentation_stack_->insert(it, instrumentation_frame);
400 SetReturnPc(instrumentation_exit_pc_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800401 }
Andreas Gampee2abbc62017-09-15 11:59:26 -0700402 uint32_t dex_pc = dex::kDexNoIndex;
Nicolas Geoffraya00b54b2019-12-03 14:36:42 +0000403 if (last_return_pc_ != 0 && GetCurrentOatQuickMethodHeader() != nullptr) {
404 dex_pc = GetCurrentOatQuickMethodHeader()->ToDexPc(GetCurrentQuickFrame(), last_return_pc_);
Mingyao Yang2ee17902017-08-30 11:37:08 -0700405 }
406 dex_pcs_.push_back(dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800407 last_return_pc_ = return_pc;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100408 ++instrumentation_stack_depth_;
Ian Rogers306057f2012-11-26 12:45:53 -0800409 return true; // Continue.
410 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800411 std::deque<InstrumentationStackFrame>* const instrumentation_stack_;
Jeff Haoa15a81b2014-05-27 18:25:47 -0700412 std::vector<InstrumentationStackFrame> shadow_stack_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800413 std::vector<uint32_t> dex_pcs_;
Ian Rogers306057f2012-11-26 12:45:53 -0800414 const uintptr_t instrumentation_exit_pc_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100415 bool reached_existing_instrumentation_frames_;
416 size_t instrumentation_stack_depth_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800417 uintptr_t last_return_pc_;
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000418 uint64_t force_deopt_id_;
Ian Rogers306057f2012-11-26 12:45:53 -0800419 };
Ian Rogers62d6c772013-02-27 08:32:07 -0800420 if (kVerboseInstrumentation) {
421 std::string thread_name;
422 thread->GetThreadName(thread_name);
423 LOG(INFO) << "Installing exit stubs in " << thread_name;
Ian Rogers306057f2012-11-26 12:45:53 -0800424 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100425
426 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
Ian Rogers700a4022014-05-19 16:49:03 -0700427 std::unique_ptr<Context> context(Context::Create());
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700428 uintptr_t instrumentation_exit_pc = reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc());
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000429 InstallStackVisitor visitor(
430 thread, context.get(), instrumentation_exit_pc, instrumentation->current_force_deopt_id_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800431 visitor.WalkStack(true);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100432 CHECK_EQ(visitor.dex_pcs_.size(), thread->GetInstrumentationStack()->size());
Ian Rogers62d6c772013-02-27 08:32:07 -0800433
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100434 if (instrumentation->ShouldNotifyMethodEnterExitEvents()) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100435 // Create method enter events for all methods currently on the thread's stack. We only do this
436 // if no debugger is attached to prevent from posting events twice.
Jeff Haoa15a81b2014-05-27 18:25:47 -0700437 auto ssi = visitor.shadow_stack_.rbegin();
438 for (auto isi = thread->GetInstrumentationStack()->rbegin(),
439 end = thread->GetInstrumentationStack()->rend(); isi != end; ++isi) {
440 while (ssi != visitor.shadow_stack_.rend() && (*ssi).frame_id_ < (*isi).frame_id_) {
441 instrumentation->MethodEnterEvent(thread, (*ssi).this_object_, (*ssi).method_, 0);
442 ++ssi;
443 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100444 uint32_t dex_pc = visitor.dex_pcs_.back();
445 visitor.dex_pcs_.pop_back();
Alex Lightdc5423f2018-06-08 10:43:38 -0700446 if (!isi->interpreter_entry_ && !isi->method_->IsRuntimeMethod()) {
Sebastien Hertz320deb22014-06-11 19:45:05 +0200447 instrumentation->MethodEnterEvent(thread, (*isi).this_object_, (*isi).method_, dex_pc);
448 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100449 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800450 }
451 thread->VerifyStack();
Ian Rogers306057f2012-11-26 12:45:53 -0800452}
453
Mingyao Yang99170c62015-07-06 11:10:37 -0700454void Instrumentation::InstrumentThreadStack(Thread* thread) {
455 instrumentation_stubs_installed_ = true;
456 InstrumentationInstallStack(thread, this);
457}
458
Ian Rogers62d6c772013-02-27 08:32:07 -0800459// Removes the instrumentation exit pc as the return PC for every quick frame.
460static void InstrumentationRestoreStack(Thread* thread, void* arg)
Nicolas Geoffray5a23d2e2015-11-03 18:58:57 +0000461 REQUIRES(Locks::mutator_lock_) {
462 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
463
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100464 struct RestoreStackVisitor final : public StackVisitor {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800465 RestoreStackVisitor(Thread* thread_in, uintptr_t instrumentation_exit_pc,
Ian Rogers62d6c772013-02-27 08:32:07 -0800466 Instrumentation* instrumentation)
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100467 : StackVisitor(thread_in, nullptr, kInstrumentationStackWalk),
468 thread_(thread_in),
Ian Rogers62d6c772013-02-27 08:32:07 -0800469 instrumentation_exit_pc_(instrumentation_exit_pc),
470 instrumentation_(instrumentation),
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800471 instrumentation_stack_(thread_in->GetInstrumentationStack()),
Ian Rogers62d6c772013-02-27 08:32:07 -0800472 frames_removed_(0) {}
Ian Rogers306057f2012-11-26 12:45:53 -0800473
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100474 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800475 if (instrumentation_stack_->size() == 0) {
jeffhao725a9572012-11-13 18:20:12 -0800476 return false; // Stop.
477 }
Mathieu Chartiere401d142015-04-22 13:56:20 -0700478 ArtMethod* m = GetMethod();
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700479 if (GetCurrentQuickFrame() == nullptr) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800480 if (kVerboseInstrumentation) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200481 LOG(INFO) << " Ignoring a shadow frame. Frame " << GetFrameId()
David Sehr709b0702016-10-13 09:12:37 -0700482 << " Method=" << ArtMethod::PrettyMethod(m);
Ian Rogers62d6c772013-02-27 08:32:07 -0800483 }
484 return true; // Ignore shadow frames.
485 }
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700486 if (m == nullptr) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800487 if (kVerboseInstrumentation) {
488 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
489 }
Ian Rogers306057f2012-11-26 12:45:53 -0800490 return true; // Ignore upcalls.
491 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800492 bool removed_stub = false;
493 // TODO: make this search more efficient?
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100494 const size_t frameId = GetFrameId();
495 for (const InstrumentationStackFrame& instrumentation_frame : *instrumentation_stack_) {
496 if (instrumentation_frame.frame_id_ == frameId) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800497 if (kVerboseInstrumentation) {
498 LOG(INFO) << " Removing exit stub in " << DescribeLocation();
499 }
Jeff Hao9a916d32013-06-27 18:45:37 -0700500 if (instrumentation_frame.interpreter_entry_) {
Andreas Gampe8228cdf2017-05-30 15:03:54 -0700501 CHECK(m == Runtime::Current()->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs));
Jeff Hao9a916d32013-06-27 18:45:37 -0700502 } else {
Alex Lightfc81d802018-12-07 13:39:05 -0800503 CHECK_EQ(m->GetNonObsoleteMethod(),
504 instrumentation_frame.method_->GetNonObsoleteMethod())
505 << ArtMethod::PrettyMethod(m);
Jeff Hao9a916d32013-06-27 18:45:37 -0700506 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800507 SetReturnPc(instrumentation_frame.return_pc_);
Mingyao Yang2ee17902017-08-30 11:37:08 -0700508 if (instrumentation_->ShouldNotifyMethodEnterExitEvents() &&
509 !m->IsRuntimeMethod()) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100510 // Create the method exit events. As the methods didn't really exit the result is 0.
511 // We only do this if no debugger is attached to prevent from posting events twice.
Alex Lightb7c640d2019-03-20 15:52:13 -0700512 JValue val;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100513 instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m,
Alex Lightb7c640d2019-03-20 15:52:13 -0700514 GetDexPc(), OptionalFrame{}, val);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100515 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800516 frames_removed_++;
517 removed_stub = true;
518 break;
519 }
520 }
521 if (!removed_stub) {
522 if (kVerboseInstrumentation) {
523 LOG(INFO) << " No exit stub in " << DescribeLocation();
Ian Rogers306057f2012-11-26 12:45:53 -0800524 }
jeffhao725a9572012-11-13 18:20:12 -0800525 }
526 return true; // Continue.
527 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800528 Thread* const thread_;
Ian Rogers306057f2012-11-26 12:45:53 -0800529 const uintptr_t instrumentation_exit_pc_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800530 Instrumentation* const instrumentation_;
531 std::deque<instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
532 size_t frames_removed_;
jeffhao725a9572012-11-13 18:20:12 -0800533 };
Ian Rogers62d6c772013-02-27 08:32:07 -0800534 if (kVerboseInstrumentation) {
535 std::string thread_name;
536 thread->GetThreadName(thread_name);
537 LOG(INFO) << "Removing exit stubs in " << thread_name;
538 }
539 std::deque<instrumentation::InstrumentationStackFrame>* stack = thread->GetInstrumentationStack();
540 if (stack->size() > 0) {
541 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700542 uintptr_t instrumentation_exit_pc =
543 reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc());
Ian Rogers62d6c772013-02-27 08:32:07 -0800544 RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
545 visitor.WalkStack(true);
546 CHECK_EQ(visitor.frames_removed_, stack->size());
547 while (stack->size() > 0) {
548 stack->pop_front();
549 }
jeffhao725a9572012-11-13 18:20:12 -0800550 }
551}
552
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000553void Instrumentation::DeoptimizeAllThreadFrames() {
554 Thread* self = Thread::Current();
555 MutexLock mu(self, *Locks::thread_list_lock_);
556 ThreadList* tl = Runtime::Current()->GetThreadList();
557 tl->ForEach([&](Thread* t) {
558 Locks::mutator_lock_->AssertExclusiveHeld(self);
559 InstrumentThreadStack(t);
560 });
561 current_force_deopt_id_++;
562}
563
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200564static bool HasEvent(Instrumentation::InstrumentationEvent expected, uint32_t events) {
565 return (events & expected) != 0;
566}
567
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000568static void PotentiallyAddListenerTo(Instrumentation::InstrumentationEvent event,
569 uint32_t events,
570 std::list<InstrumentationListener*>& list,
571 InstrumentationListener* listener,
572 bool* has_listener)
573 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
574 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
575 if (!HasEvent(event, events)) {
576 return;
577 }
578 // If there is a free slot in the list, we insert the listener in that slot.
579 // Otherwise we add it to the end of the list.
580 auto it = std::find(list.begin(), list.end(), nullptr);
581 if (it != list.end()) {
582 *it = listener;
583 } else {
584 list.push_back(listener);
585 }
David Srbecky28f6cff2018-10-16 15:07:28 +0100586 Runtime::DoAndMaybeSwitchInterpreter([=](){ *has_listener = true; });
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000587}
588
Ian Rogers62d6c772013-02-27 08:32:07 -0800589void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
590 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000591 PotentiallyAddListenerTo(kMethodEntered,
592 events,
593 method_entry_listeners_,
594 listener,
595 &have_method_entry_listeners_);
596 PotentiallyAddListenerTo(kMethodExited,
597 events,
598 method_exit_listeners_,
599 listener,
600 &have_method_exit_listeners_);
601 PotentiallyAddListenerTo(kMethodUnwind,
602 events,
603 method_unwind_listeners_,
604 listener,
605 &have_method_unwind_listeners_);
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000606 PotentiallyAddListenerTo(kBranch,
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000607 events,
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000608 branch_listeners_,
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000609 listener,
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000610 &have_branch_listeners_);
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000611 PotentiallyAddListenerTo(kDexPcMoved,
612 events,
613 dex_pc_listeners_,
614 listener,
615 &have_dex_pc_listeners_);
616 PotentiallyAddListenerTo(kFieldRead,
617 events,
618 field_read_listeners_,
619 listener,
620 &have_field_read_listeners_);
621 PotentiallyAddListenerTo(kFieldWritten,
622 events,
623 field_write_listeners_,
624 listener,
625 &have_field_write_listeners_);
Alex Light6e1607e2017-08-23 10:06:18 -0700626 PotentiallyAddListenerTo(kExceptionThrown,
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000627 events,
Alex Light6e1607e2017-08-23 10:06:18 -0700628 exception_thrown_listeners_,
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000629 listener,
Alex Light6e1607e2017-08-23 10:06:18 -0700630 &have_exception_thrown_listeners_);
Alex Lighte814f9d2017-07-31 16:14:39 -0700631 PotentiallyAddListenerTo(kWatchedFramePop,
632 events,
633 watched_frame_pop_listeners_,
634 listener,
635 &have_watched_frame_pop_listeners_);
Alex Light9fb1ab12017-09-05 09:32:49 -0700636 PotentiallyAddListenerTo(kExceptionHandled,
637 events,
638 exception_handled_listeners_,
639 listener,
640 &have_exception_handled_listeners_);
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200641 UpdateInterpreterHandlerTable();
jeffhao725a9572012-11-13 18:20:12 -0800642}
643
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000644static void PotentiallyRemoveListenerFrom(Instrumentation::InstrumentationEvent event,
645 uint32_t events,
646 std::list<InstrumentationListener*>& list,
647 InstrumentationListener* listener,
648 bool* has_listener)
649 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_) {
650 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
651 if (!HasEvent(event, events)) {
652 return;
653 }
654 auto it = std::find(list.begin(), list.end(), listener);
655 if (it != list.end()) {
656 // Just update the entry, do not remove from the list. Removing entries in the list
657 // is unsafe when mutators are iterating over it.
658 *it = nullptr;
659 }
660
661 // Check if the list contains any non-null listener, and update 'has_listener'.
662 for (InstrumentationListener* l : list) {
663 if (l != nullptr) {
David Srbecky28f6cff2018-10-16 15:07:28 +0100664 Runtime::DoAndMaybeSwitchInterpreter([=](){ *has_listener = true; });
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000665 return;
666 }
667 }
David Srbecky28f6cff2018-10-16 15:07:28 +0100668 Runtime::DoAndMaybeSwitchInterpreter([=](){ *has_listener = false; });
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000669}
670
Ian Rogers62d6c772013-02-27 08:32:07 -0800671void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
672 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000673 PotentiallyRemoveListenerFrom(kMethodEntered,
674 events,
675 method_entry_listeners_,
676 listener,
677 &have_method_entry_listeners_);
678 PotentiallyRemoveListenerFrom(kMethodExited,
679 events,
680 method_exit_listeners_,
681 listener,
682 &have_method_exit_listeners_);
683 PotentiallyRemoveListenerFrom(kMethodUnwind,
684 events,
685 method_unwind_listeners_,
686 listener,
687 &have_method_unwind_listeners_);
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000688 PotentiallyRemoveListenerFrom(kBranch,
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000689 events,
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000690 branch_listeners_,
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000691 listener,
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000692 &have_branch_listeners_);
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000693 PotentiallyRemoveListenerFrom(kDexPcMoved,
694 events,
695 dex_pc_listeners_,
696 listener,
697 &have_dex_pc_listeners_);
698 PotentiallyRemoveListenerFrom(kFieldRead,
699 events,
700 field_read_listeners_,
701 listener,
702 &have_field_read_listeners_);
703 PotentiallyRemoveListenerFrom(kFieldWritten,
704 events,
705 field_write_listeners_,
706 listener,
707 &have_field_write_listeners_);
Alex Light6e1607e2017-08-23 10:06:18 -0700708 PotentiallyRemoveListenerFrom(kExceptionThrown,
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000709 events,
Alex Light6e1607e2017-08-23 10:06:18 -0700710 exception_thrown_listeners_,
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000711 listener,
Alex Light6e1607e2017-08-23 10:06:18 -0700712 &have_exception_thrown_listeners_);
Alex Lighte814f9d2017-07-31 16:14:39 -0700713 PotentiallyRemoveListenerFrom(kWatchedFramePop,
714 events,
715 watched_frame_pop_listeners_,
716 listener,
717 &have_watched_frame_pop_listeners_);
Alex Light9fb1ab12017-09-05 09:32:49 -0700718 PotentiallyRemoveListenerFrom(kExceptionHandled,
719 events,
720 exception_handled_listeners_,
721 listener,
722 &have_exception_handled_listeners_);
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200723 UpdateInterpreterHandlerTable();
jeffhao725a9572012-11-13 18:20:12 -0800724}
725
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200726Instrumentation::InstrumentationLevel Instrumentation::GetCurrentInstrumentationLevel() const {
Alex Light4ba388a2017-01-27 10:26:49 -0800727 if (interpreter_stubs_installed_) {
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200728 return InstrumentationLevel::kInstrumentWithInterpreter;
Ian Rogers62d6c772013-02-27 08:32:07 -0800729 } else if (entry_exit_stubs_installed_) {
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200730 return InstrumentationLevel::kInstrumentWithInstrumentationStubs;
Ian Rogers62d6c772013-02-27 08:32:07 -0800731 } else {
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200732 return InstrumentationLevel::kInstrumentNothing;
Ian Rogers62d6c772013-02-27 08:32:07 -0800733 }
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200734}
735
Alex Lightdba61482016-12-21 08:20:29 -0800736bool Instrumentation::RequiresInstrumentationInstallation(InstrumentationLevel new_level) const {
Alex Light4ba388a2017-01-27 10:26:49 -0800737 // We need to reinstall instrumentation if we go to a different level.
738 return GetCurrentInstrumentationLevel() != new_level;
Alex Lightdba61482016-12-21 08:20:29 -0800739}
740
Alex Light40607862019-05-06 18:16:24 +0000741void Instrumentation::UpdateInstrumentationLevels(InstrumentationLevel level) {
742 if (level == InstrumentationLevel::kInstrumentWithInterpreter) {
743 can_use_instrumentation_trampolines_ = false;
744 }
745 if (UNLIKELY(!can_use_instrumentation_trampolines_)) {
746 for (auto& p : requested_instrumentation_levels_) {
747 if (p.second == InstrumentationLevel::kInstrumentWithInstrumentationStubs) {
748 p.second = InstrumentationLevel::kInstrumentWithInterpreter;
749 }
750 }
751 }
752}
753
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200754void Instrumentation::ConfigureStubs(const char* key, InstrumentationLevel desired_level) {
755 // Store the instrumentation level for this key or remove it.
756 if (desired_level == InstrumentationLevel::kInstrumentNothing) {
757 // The client no longer needs instrumentation.
758 requested_instrumentation_levels_.erase(key);
759 } else {
760 // The client needs instrumentation.
761 requested_instrumentation_levels_.Overwrite(key, desired_level);
762 }
763
Alex Light40607862019-05-06 18:16:24 +0000764 UpdateInstrumentationLevels(desired_level);
765 UpdateStubs();
766}
767
768void Instrumentation::EnableSingleThreadDeopt() {
769 // Single-thread deopt only uses interpreter.
770 can_use_instrumentation_trampolines_ = false;
771 UpdateInstrumentationLevels(InstrumentationLevel::kInstrumentWithInterpreter);
772 UpdateStubs();
773}
774
775void Instrumentation::UpdateStubs() {
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200776 // Look for the highest required instrumentation level.
777 InstrumentationLevel requested_level = InstrumentationLevel::kInstrumentNothing;
778 for (const auto& v : requested_instrumentation_levels_) {
779 requested_level = std::max(requested_level, v.second);
780 }
781
Alex Light40607862019-05-06 18:16:24 +0000782 DCHECK(can_use_instrumentation_trampolines_ ||
783 requested_level != InstrumentationLevel::kInstrumentWithInstrumentationStubs)
784 << "Use trampolines: " << can_use_instrumentation_trampolines_ << " level "
785 << requested_level;
786
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200787 interpret_only_ = (requested_level == InstrumentationLevel::kInstrumentWithInterpreter) ||
788 forced_interpret_only_;
789
Alex Lightdba61482016-12-21 08:20:29 -0800790 if (!RequiresInstrumentationInstallation(requested_level)) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800791 // We're already set.
792 return;
793 }
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100794 Thread* const self = Thread::Current();
Ian Rogers62d6c772013-02-27 08:32:07 -0800795 Runtime* runtime = Runtime::Current();
Sebastien Hertza8a697f2015-01-15 12:28:47 +0100796 Locks::mutator_lock_->AssertExclusiveHeld(self);
Ian Rogers62d6c772013-02-27 08:32:07 -0800797 Locks::thread_list_lock_->AssertNotHeld(self);
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200798 if (requested_level > InstrumentationLevel::kInstrumentNothing) {
Alex Light4ba388a2017-01-27 10:26:49 -0800799 if (requested_level == InstrumentationLevel::kInstrumentWithInterpreter) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800800 interpreter_stubs_installed_ = true;
Ian Rogers62d6c772013-02-27 08:32:07 -0800801 entry_exit_stubs_installed_ = true;
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200802 } else {
803 CHECK_EQ(requested_level, InstrumentationLevel::kInstrumentWithInstrumentationStubs);
804 entry_exit_stubs_installed_ = true;
805 interpreter_stubs_installed_ = false;
Ian Rogers62d6c772013-02-27 08:32:07 -0800806 }
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700807 InstallStubsClassVisitor visitor(this);
808 runtime->GetClassLinker()->VisitClasses(&visitor);
Ian Rogers62d6c772013-02-27 08:32:07 -0800809 instrumentation_stubs_installed_ = true;
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100810 MutexLock mu(self, *Locks::thread_list_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800811 runtime->GetThreadList()->ForEach(InstrumentationInstallStack, this);
812 } else {
813 interpreter_stubs_installed_ = false;
814 entry_exit_stubs_installed_ = false;
Mathieu Chartiere0671ce2015-07-28 17:23:28 -0700815 InstallStubsClassVisitor visitor(this);
816 runtime->GetClassLinker()->VisitClasses(&visitor);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100817 // Restore stack only if there is no method currently deoptimized.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700818 bool empty;
819 {
Andreas Gampe7e56a072018-11-29 10:40:06 -0800820 ReaderMutexLock mu(self, *GetDeoptimizedMethodsLock());
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700821 empty = IsDeoptimizedMethodsEmpty(); // Avoid lock violation.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700822 }
823 if (empty) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100824 MutexLock mu(self, *Locks::thread_list_lock_);
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000825 bool no_remaining_deopts = true;
826 // Check that there are no other forced deoptimizations. Do it here so we only need to lock
827 // thread_list_lock once.
828 runtime->GetThreadList()->ForEach([&](Thread* t) {
829 no_remaining_deopts =
830 no_remaining_deopts && !t->IsForceInterpreter() &&
831 std::all_of(t->GetInstrumentationStack()->cbegin(),
832 t->GetInstrumentationStack()->cend(),
833 [&](const auto& frame) REQUIRES_SHARED(Locks::mutator_lock_) {
834 return frame.force_deopt_id_ == current_force_deopt_id_;
835 });
836 });
837 if (no_remaining_deopts) {
838 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
839 // Only do this after restoring, as walking the stack when restoring will see
840 // the instrumentation exit pc.
841 instrumentation_stubs_installed_ = false;
842 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100843 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800844 }
jeffhao725a9572012-11-13 18:20:12 -0800845}
846
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200847static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg ATTRIBUTE_UNUSED) {
Mathieu Chartier5ace2012016-11-30 10:15:41 -0800848 thread->ResetQuickAllocEntryPointsForThread(kUseReadBarrier && thread->GetIsGcMarking());
Ian Rogersfa824272013-11-05 16:12:57 -0800849}
850
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700851void Instrumentation::SetEntrypointsInstrumented(bool instrumented) {
852 Thread* self = Thread::Current();
Mathieu Chartier661974a2014-01-09 11:23:53 -0800853 Runtime* runtime = Runtime::Current();
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700854 Locks::mutator_lock_->AssertNotHeld(self);
855 Locks::instrument_entrypoints_lock_->AssertHeld(self);
856 if (runtime->IsStarted()) {
Mathieu Chartier4f55e222015-09-04 13:26:21 -0700857 ScopedSuspendAll ssa(__FUNCTION__);
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700858 MutexLock mu(self, *Locks::runtime_shutdown_lock_);
Mathieu Chartier661974a2014-01-09 11:23:53 -0800859 SetQuickAllocEntryPointsInstrumented(instrumented);
860 ResetQuickAllocEntryPoints();
Mathieu Chartier50e93312016-03-16 11:25:29 -0700861 alloc_entrypoints_instrumented_ = instrumented;
Mathieu Chartier4f55e222015-09-04 13:26:21 -0700862 } else {
863 MutexLock mu(self, *Locks::runtime_shutdown_lock_);
864 SetQuickAllocEntryPointsInstrumented(instrumented);
Andreas Gampe157c77e2016-10-17 17:44:41 -0700865
866 // Note: ResetQuickAllocEntryPoints only works when the runtime is started. Manually run the
867 // update for just this thread.
Andreas Gampe162ae502016-10-18 10:03:42 -0700868 // Note: self may be null. One of those paths is setting instrumentation in the Heap
869 // constructor for gcstress mode.
870 if (self != nullptr) {
871 ResetQuickAllocEntryPointsForThread(self, nullptr);
872 }
Andreas Gampe157c77e2016-10-17 17:44:41 -0700873
Mathieu Chartier50e93312016-03-16 11:25:29 -0700874 alloc_entrypoints_instrumented_ = instrumented;
Mathieu Chartier661974a2014-01-09 11:23:53 -0800875 }
876}
877
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700878void Instrumentation::InstrumentQuickAllocEntryPoints() {
879 MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
880 InstrumentQuickAllocEntryPointsLocked();
Ian Rogersfa824272013-11-05 16:12:57 -0800881}
882
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700883void Instrumentation::UninstrumentQuickAllocEntryPoints() {
884 MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
885 UninstrumentQuickAllocEntryPointsLocked();
886}
887
888void Instrumentation::InstrumentQuickAllocEntryPointsLocked() {
889 Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
890 if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
891 SetEntrypointsInstrumented(true);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800892 }
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700893 ++quick_alloc_entry_points_instrumentation_counter_;
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700894}
895
896void Instrumentation::UninstrumentQuickAllocEntryPointsLocked() {
897 Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
898 CHECK_GT(quick_alloc_entry_points_instrumentation_counter_, 0U);
899 --quick_alloc_entry_points_instrumentation_counter_;
900 if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
901 SetEntrypointsInstrumented(false);
902 }
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800903}
904
905void Instrumentation::ResetQuickAllocEntryPoints() {
906 Runtime* runtime = Runtime::Current();
907 if (runtime->IsStarted()) {
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800908 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700909 runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, nullptr);
Ian Rogersfa824272013-11-05 16:12:57 -0800910 }
911}
912
Mingyao Yang3fd448a2016-05-10 14:30:41 -0700913void Instrumentation::UpdateMethodsCodeImpl(ArtMethod* method, const void* quick_code) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800914 const void* new_quick_code;
Ian Rogers62d6c772013-02-27 08:32:07 -0800915 if (LIKELY(!instrumentation_stubs_installed_)) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800916 new_quick_code = quick_code;
Jeff Hao65d15d92013-07-16 16:39:33 -0700917 } else {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100918 if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800919 new_quick_code = GetQuickToInterpreterBridge();
Jeff Hao65d15d92013-07-16 16:39:33 -0700920 } else {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700921 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700922 if (class_linker->IsQuickResolutionStub(quick_code) ||
923 class_linker->IsQuickToInterpreterBridge(quick_code)) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700924 new_quick_code = quick_code;
Alex Light6cae5ea2018-06-07 17:07:02 -0700925 } else if (entry_exit_stubs_installed_ &&
926 // We need to make sure not to replace anything that InstallStubsForMethod
927 // wouldn't. Specifically we cannot stub out Proxy.<init> since subtypes copy the
928 // implementation directly and this will confuse the instrumentation trampolines.
929 // TODO We should remove the need for this since it makes it impossible to profile
930 // Proxy.<init> correctly in all cases.
931 method != jni::DecodeArtMethod(WellKnownClasses::java_lang_reflect_Proxy_init)) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700932 new_quick_code = GetQuickInstrumentationEntryPoint();
Alex Light2d441b12018-06-08 15:33:21 -0700933 if (!method->IsNative() && Runtime::Current()->GetJit() != nullptr) {
934 // Native methods use trampoline entrypoints during interpreter tracing.
Nicolas Geoffray226805d2018-12-14 10:59:02 +0000935 DCHECK(!Runtime::Current()->GetJit()->GetCodeCache()->GetGarbageCollectCodeUnsafe());
Alex Light2d441b12018-06-08 15:33:21 -0700936 ProfilingInfo* profiling_info = method->GetProfilingInfo(kRuntimePointerSize);
937 // Tracing will look at the saved entry point in the profiling info to know the actual
938 // entrypoint, so we store it here.
939 if (profiling_info != nullptr) {
940 profiling_info->SetSavedEntryPoint(quick_code);
941 }
942 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700943 } else {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700944 new_quick_code = quick_code;
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700945 }
Jeff Hao65d15d92013-07-16 16:39:33 -0700946 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800947 }
Elliott Hughes956af0f2014-12-11 14:34:28 -0800948 UpdateEntrypoints(method, new_quick_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100949}
950
Nicolas Geoffraya6e0e7d2018-01-26 13:16:50 +0000951void Instrumentation::UpdateNativeMethodsCodeToJitCode(ArtMethod* method, const void* quick_code) {
952 // We don't do any read barrier on `method`'s declaring class in this code, as the JIT might
953 // enter here on a soon-to-be deleted ArtMethod. Updating the entrypoint is OK though, as
954 // the ArtMethod is still in memory.
955 const void* new_quick_code = quick_code;
956 if (UNLIKELY(instrumentation_stubs_installed_) && entry_exit_stubs_installed_) {
957 new_quick_code = GetQuickInstrumentationEntryPoint();
958 }
959 UpdateEntrypoints(method, new_quick_code);
960}
961
Mingyao Yang3fd448a2016-05-10 14:30:41 -0700962void Instrumentation::UpdateMethodsCode(ArtMethod* method, const void* quick_code) {
963 DCHECK(method->GetDeclaringClass()->IsResolved());
964 UpdateMethodsCodeImpl(method, quick_code);
965}
966
Alex Light0a5ec3d2017-07-25 16:50:26 -0700967void Instrumentation::UpdateMethodsCodeToInterpreterEntryPoint(ArtMethod* method) {
968 UpdateMethodsCodeImpl(method, GetQuickToInterpreterBridge());
969}
970
Nicolas Geoffraya0619e22016-12-20 13:57:43 +0000971void Instrumentation::UpdateMethodsCodeForJavaDebuggable(ArtMethod* method,
972 const void* quick_code) {
973 // When the runtime is set to Java debuggable, we may update the entry points of
974 // all methods of a class to the interpreter bridge. A method's declaring class
975 // might not be in resolved state yet in that case, so we bypass the DCHECK in
976 // UpdateMethodsCode.
Mingyao Yang3fd448a2016-05-10 14:30:41 -0700977 UpdateMethodsCodeImpl(method, quick_code);
978}
979
Mathieu Chartiere401d142015-04-22 13:56:20 -0700980bool Instrumentation::AddDeoptimizedMethod(ArtMethod* method) {
981 if (IsDeoptimizedMethod(method)) {
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700982 // Already in the map. Return.
983 return false;
984 }
985 // Not found. Add it.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700986 deoptimized_methods_.insert(method);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700987 return true;
988}
989
Mathieu Chartiere401d142015-04-22 13:56:20 -0700990bool Instrumentation::IsDeoptimizedMethod(ArtMethod* method) {
991 return deoptimized_methods_.find(method) != deoptimized_methods_.end();
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700992}
993
Mathieu Chartiere401d142015-04-22 13:56:20 -0700994ArtMethod* Instrumentation::BeginDeoptimizedMethod() {
995 if (deoptimized_methods_.empty()) {
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700996 // Empty.
997 return nullptr;
998 }
Mathieu Chartiere401d142015-04-22 13:56:20 -0700999 return *deoptimized_methods_.begin();
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001000}
1001
Mathieu Chartiere401d142015-04-22 13:56:20 -07001002bool Instrumentation::RemoveDeoptimizedMethod(ArtMethod* method) {
1003 auto it = deoptimized_methods_.find(method);
1004 if (it == deoptimized_methods_.end()) {
1005 return false;
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001006 }
Mathieu Chartiere401d142015-04-22 13:56:20 -07001007 deoptimized_methods_.erase(it);
1008 return true;
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001009}
1010
1011bool Instrumentation::IsDeoptimizedMethodsEmpty() const {
1012 return deoptimized_methods_.empty();
1013}
1014
Mathieu Chartiere401d142015-04-22 13:56:20 -07001015void Instrumentation::Deoptimize(ArtMethod* method) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001016 CHECK(!method->IsNative());
1017 CHECK(!method->IsProxyMethod());
Alex Light9139e002015-10-09 15:59:48 -07001018 CHECK(method->IsInvokable());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001019
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001020 Thread* self = Thread::Current();
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001021 {
Andreas Gampe7e56a072018-11-29 10:40:06 -08001022 WriterMutexLock mu(self, *GetDeoptimizedMethodsLock());
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001023 bool has_not_been_deoptimized = AddDeoptimizedMethod(method);
David Sehr709b0702016-10-13 09:12:37 -07001024 CHECK(has_not_been_deoptimized) << "Method " << ArtMethod::PrettyMethod(method)
Daniel Mihalyica1d06c2014-08-18 18:45:31 +02001025 << " is already deoptimized";
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001026 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001027 if (!interpreter_stubs_installed_) {
Elliott Hughes956af0f2014-12-11 14:34:28 -08001028 UpdateEntrypoints(method, GetQuickInstrumentationEntryPoint());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001029
1030 // Install instrumentation exit stub and instrumentation frames. We may already have installed
1031 // these previously so it will only cover the newly created frames.
1032 instrumentation_stubs_installed_ = true;
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001033 MutexLock mu(self, *Locks::thread_list_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001034 Runtime::Current()->GetThreadList()->ForEach(InstrumentationInstallStack, this);
1035 }
1036}
1037
Mathieu Chartiere401d142015-04-22 13:56:20 -07001038void Instrumentation::Undeoptimize(ArtMethod* method) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001039 CHECK(!method->IsNative());
1040 CHECK(!method->IsProxyMethod());
Alex Light9139e002015-10-09 15:59:48 -07001041 CHECK(method->IsInvokable());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001042
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001043 Thread* self = Thread::Current();
1044 bool empty;
1045 {
Andreas Gampe7e56a072018-11-29 10:40:06 -08001046 WriterMutexLock mu(self, *GetDeoptimizedMethodsLock());
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001047 bool found_and_erased = RemoveDeoptimizedMethod(method);
David Sehr709b0702016-10-13 09:12:37 -07001048 CHECK(found_and_erased) << "Method " << ArtMethod::PrettyMethod(method)
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001049 << " is not deoptimized";
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001050 empty = IsDeoptimizedMethodsEmpty();
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001051 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001052
1053 // Restore code and possibly stack only if we did not deoptimize everything.
1054 if (!interpreter_stubs_installed_) {
1055 // Restore its code or resolution trampoline.
1056 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Ian Rogersef7d42f2014-01-06 12:55:46 -08001057 if (method->IsStatic() && !method->IsConstructor() &&
1058 !method->GetDeclaringClass()->IsInitialized()) {
Elliott Hughes956af0f2014-12-11 14:34:28 -08001059 UpdateEntrypoints(method, GetQuickResolutionStub());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001060 } else {
Nicolas Geoffraya0619e22016-12-20 13:57:43 +00001061 const void* quick_code = NeedDebugVersionFor(method)
1062 ? GetQuickToInterpreterBridge()
Alex Lightfc49fec2018-01-16 22:28:36 +00001063 : class_linker->GetQuickOatCodeFor(method);
Elliott Hughes956af0f2014-12-11 14:34:28 -08001064 UpdateEntrypoints(method, quick_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001065 }
1066
1067 // If there is no deoptimized method left, we can restore the stack of each thread.
Alex Lightf244a572018-06-08 13:56:51 -07001068 if (empty && !entry_exit_stubs_installed_) {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001069 MutexLock mu(self, *Locks::thread_list_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001070 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
1071 instrumentation_stubs_installed_ = false;
1072 }
1073 }
1074}
1075
Mathieu Chartiere401d142015-04-22 13:56:20 -07001076bool Instrumentation::IsDeoptimized(ArtMethod* method) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001077 DCHECK(method != nullptr);
Andreas Gampe7e56a072018-11-29 10:40:06 -08001078 ReaderMutexLock mu(Thread::Current(), *GetDeoptimizedMethodsLock());
Mathieu Chartiere401d142015-04-22 13:56:20 -07001079 return IsDeoptimizedMethod(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001080}
1081
1082void Instrumentation::EnableDeoptimization() {
Andreas Gampe7e56a072018-11-29 10:40:06 -08001083 ReaderMutexLock mu(Thread::Current(), *GetDeoptimizedMethodsLock());
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001084 CHECK(IsDeoptimizedMethodsEmpty());
Sebastien Hertz11d40c22014-02-19 18:00:17 +01001085 CHECK_EQ(deoptimization_enabled_, false);
1086 deoptimization_enabled_ = true;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001087}
1088
Sebastien Hertz0462c4c2015-04-01 16:34:17 +02001089void Instrumentation::DisableDeoptimization(const char* key) {
Sebastien Hertz11d40c22014-02-19 18:00:17 +01001090 CHECK_EQ(deoptimization_enabled_, true);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001091 // If we deoptimized everything, undo it.
Alex Lightdba61482016-12-21 08:20:29 -08001092 InstrumentationLevel level = GetCurrentInstrumentationLevel();
1093 if (level == InstrumentationLevel::kInstrumentWithInterpreter) {
Sebastien Hertz0462c4c2015-04-01 16:34:17 +02001094 UndeoptimizeEverything(key);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001095 }
1096 // Undeoptimized selected methods.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001097 while (true) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001098 ArtMethod* method;
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001099 {
Andreas Gampe7e56a072018-11-29 10:40:06 -08001100 ReaderMutexLock mu(Thread::Current(), *GetDeoptimizedMethodsLock());
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001101 if (IsDeoptimizedMethodsEmpty()) {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001102 break;
1103 }
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001104 method = BeginDeoptimizedMethod();
1105 CHECK(method != nullptr);
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001106 }
1107 Undeoptimize(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001108 }
Sebastien Hertz11d40c22014-02-19 18:00:17 +01001109 deoptimization_enabled_ = false;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001110}
1111
Sebastien Hertz11d40c22014-02-19 18:00:17 +01001112// Indicates if instrumentation should notify method enter/exit events to the listeners.
1113bool Instrumentation::ShouldNotifyMethodEnterExitEvents() const {
Sebastien Hertz0462c4c2015-04-01 16:34:17 +02001114 if (!HasMethodEntryListeners() && !HasMethodExitListeners()) {
1115 return false;
1116 }
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +01001117 return !deoptimization_enabled_ && !interpreter_stubs_installed_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001118}
1119
Sebastien Hertz0462c4c2015-04-01 16:34:17 +02001120void Instrumentation::DeoptimizeEverything(const char* key) {
1121 CHECK(deoptimization_enabled_);
1122 ConfigureStubs(key, InstrumentationLevel::kInstrumentWithInterpreter);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001123}
1124
Sebastien Hertz0462c4c2015-04-01 16:34:17 +02001125void Instrumentation::UndeoptimizeEverything(const char* key) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001126 CHECK(interpreter_stubs_installed_);
Sebastien Hertz0462c4c2015-04-01 16:34:17 +02001127 CHECK(deoptimization_enabled_);
1128 ConfigureStubs(key, InstrumentationLevel::kInstrumentNothing);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001129}
1130
Sebastien Hertz0462c4c2015-04-01 16:34:17 +02001131void Instrumentation::EnableMethodTracing(const char* key, bool needs_interpreter) {
1132 InstrumentationLevel level;
1133 if (needs_interpreter) {
1134 level = InstrumentationLevel::kInstrumentWithInterpreter;
1135 } else {
1136 level = InstrumentationLevel::kInstrumentWithInstrumentationStubs;
1137 }
1138 ConfigureStubs(key, level);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001139}
1140
Sebastien Hertz0462c4c2015-04-01 16:34:17 +02001141void Instrumentation::DisableMethodTracing(const char* key) {
1142 ConfigureStubs(key, InstrumentationLevel::kInstrumentNothing);
jeffhao725a9572012-11-13 18:20:12 -08001143}
1144
Alex Light2d441b12018-06-08 15:33:21 -07001145const void* Instrumentation::GetCodeForInvoke(ArtMethod* method) const {
1146 // This is called by instrumentation entry only and that should never be getting proxy methods.
1147 DCHECK(!method->IsProxyMethod()) << method->PrettyMethod();
1148 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1149 if (LIKELY(!instrumentation_stubs_installed_ && !interpreter_stubs_installed_)) {
1150 // In general we just return whatever the method thinks its entrypoint is here. The only
1151 // exception is if it still has the instrumentation entrypoint. That means we are racing another
1152 // thread getting rid of instrumentation which is unexpected but possible. In that case we want
1153 // to wait and try to get it from the oat file or jit.
1154 const void* code = method->GetEntryPointFromQuickCompiledCodePtrSize(kRuntimePointerSize);
1155 DCHECK(code != nullptr);
1156 if (code != GetQuickInstrumentationEntryPoint()) {
1157 return code;
1158 } else if (method->IsNative()) {
1159 return class_linker->GetQuickOatCodeFor(method);
1160 }
1161 // We don't know what it is. Fallthough to try to find the code from the JIT or Oat file.
1162 } else if (method->IsNative()) {
1163 // TODO We could have JIT compiled native entrypoints. It might be worth it to find these.
1164 return class_linker->GetQuickOatCodeFor(method);
1165 } else if (UNLIKELY(interpreter_stubs_installed_)) {
1166 return GetQuickToInterpreterBridge();
1167 }
1168 // Since the method cannot be native due to ifs above we can always fall back to interpreter
1169 // bridge.
1170 const void* result = GetQuickToInterpreterBridge();
1171 if (!NeedDebugVersionFor(method)) {
1172 // If we don't need a debug version we should see what the oat file/class linker has to say.
1173 result = class_linker->GetQuickOatCodeFor(method);
1174 }
1175 // If both those fail try the jit.
1176 if (result == GetQuickToInterpreterBridge()) {
1177 jit::Jit* jit = Runtime::Current()->GetJit();
1178 if (jit != nullptr) {
1179 const void* res = jit->GetCodeCache()->FindCompiledCodeForInstrumentation(method);
1180 if (res != nullptr) {
1181 result = res;
1182 }
1183 }
1184 }
1185 return result;
1186}
1187
Andreas Gampe542451c2016-07-26 09:02:02 -07001188const void* Instrumentation::GetQuickCodeFor(ArtMethod* method, PointerSize pointer_size) const {
Vladimir Marko97d7e1c2016-10-04 14:44:28 +01001189 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Ian Rogers62d6c772013-02-27 08:32:07 -08001190 if (LIKELY(!instrumentation_stubs_installed_)) {
Mathieu Chartiera7dd0382014-11-20 17:08:58 -08001191 const void* code = method->GetEntryPointFromQuickCompiledCodePtrSize(pointer_size);
Vladimir Marko8a630572014-04-09 18:45:35 +01001192 DCHECK(code != nullptr);
Ian Rogers6f3dbba2014-10-14 17:41:57 -07001193 if (LIKELY(!class_linker->IsQuickResolutionStub(code) &&
1194 !class_linker->IsQuickToInterpreterBridge(code)) &&
1195 !class_linker->IsQuickResolutionStub(code) &&
1196 !class_linker->IsQuickToInterpreterBridge(code)) {
Ian Rogers62d6c772013-02-27 08:32:07 -08001197 return code;
1198 }
1199 }
Alex Lightfc49fec2018-01-16 22:28:36 +00001200 return class_linker->GetQuickOatCodeFor(method);
jeffhao725a9572012-11-13 18:20:12 -08001201}
1202
Alex Lightd7661582017-05-01 13:48:16 -07001203void Instrumentation::MethodEnterEventImpl(Thread* thread,
1204 ObjPtr<mirror::Object> this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001205 ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -08001206 uint32_t dex_pc) const {
Mingyao Yang2ee17902017-08-30 11:37:08 -07001207 DCHECK(!method->IsRuntimeMethod());
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001208 if (HasMethodEntryListeners()) {
Alex Lightd7661582017-05-01 13:48:16 -07001209 Thread* self = Thread::Current();
1210 StackHandleScope<1> hs(self);
1211 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001212 for (InstrumentationListener* listener : method_entry_listeners_) {
1213 if (listener != nullptr) {
Alex Lightd7661582017-05-01 13:48:16 -07001214 listener->MethodEntered(thread, thiz, method, dex_pc);
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001215 }
1216 }
Ian Rogers62d6c772013-02-27 08:32:07 -08001217 }
1218}
1219
Alex Lightb7c640d2019-03-20 15:52:13 -07001220template <>
Alex Lightd7661582017-05-01 13:48:16 -07001221void Instrumentation::MethodExitEventImpl(Thread* thread,
1222 ObjPtr<mirror::Object> this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001223 ArtMethod* method,
Alex Lightd7661582017-05-01 13:48:16 -07001224 uint32_t dex_pc,
Alex Lightb7c640d2019-03-20 15:52:13 -07001225 OptionalFrame frame,
1226 MutableHandle<mirror::Object>& return_value) const {
1227 if (HasMethodExitListeners()) {
1228 Thread* self = Thread::Current();
1229 StackHandleScope<1> hs(self);
1230 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1231 for (InstrumentationListener* listener : method_exit_listeners_) {
1232 if (listener != nullptr) {
1233 listener->MethodExited(thread, thiz, method, dex_pc, frame, return_value);
1234 }
1235 }
1236 }
1237}
1238
1239template<> void Instrumentation::MethodExitEventImpl(Thread* thread,
1240 ObjPtr<mirror::Object> this_object,
1241 ArtMethod* method,
1242 uint32_t dex_pc,
1243 OptionalFrame frame,
1244 JValue& return_value) const {
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001245 if (HasMethodExitListeners()) {
Alex Lightd7661582017-05-01 13:48:16 -07001246 Thread* self = Thread::Current();
1247 StackHandleScope<2> hs(self);
1248 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
Alex Lightb7c640d2019-03-20 15:52:13 -07001249 if (method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetReturnTypePrimitive() !=
1250 Primitive::kPrimNot) {
Alex Lightd7661582017-05-01 13:48:16 -07001251 for (InstrumentationListener* listener : method_exit_listeners_) {
1252 if (listener != nullptr) {
Alex Lightb7c640d2019-03-20 15:52:13 -07001253 listener->MethodExited(thread, thiz, method, dex_pc, frame, return_value);
Alex Lightd7661582017-05-01 13:48:16 -07001254 }
1255 }
1256 } else {
Alex Lightb7c640d2019-03-20 15:52:13 -07001257 MutableHandle<mirror::Object> ret(hs.NewHandle(return_value.GetL()));
1258 MethodExitEventImpl(thread, thiz.Get(), method, dex_pc, frame, ret);
1259 return_value.SetL(ret.Get());
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001260 }
Ian Rogers62d6c772013-02-27 08:32:07 -08001261 }
1262}
1263
Alex Lightd7661582017-05-01 13:48:16 -07001264void Instrumentation::MethodUnwindEvent(Thread* thread,
Vladimir Marko19711d42019-04-12 14:05:34 +01001265 ObjPtr<mirror::Object> this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001266 ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -08001267 uint32_t dex_pc) const {
Sebastien Hertz0462c4c2015-04-01 16:34:17 +02001268 if (HasMethodUnwindListeners()) {
Alex Lightd7661582017-05-01 13:48:16 -07001269 Thread* self = Thread::Current();
1270 StackHandleScope<1> hs(self);
1271 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
Mathieu Chartier02e25112013-08-14 16:14:24 -07001272 for (InstrumentationListener* listener : method_unwind_listeners_) {
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001273 if (listener != nullptr) {
Alex Lightd7661582017-05-01 13:48:16 -07001274 listener->MethodUnwind(thread, thiz, method, dex_pc);
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001275 }
Ian Rogers62d6c772013-02-27 08:32:07 -08001276 }
1277 }
1278}
1279
Alex Lightd7661582017-05-01 13:48:16 -07001280void Instrumentation::DexPcMovedEventImpl(Thread* thread,
1281 ObjPtr<mirror::Object> this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001282 ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -08001283 uint32_t dex_pc) const {
Alex Lightd7661582017-05-01 13:48:16 -07001284 Thread* self = Thread::Current();
1285 StackHandleScope<1> hs(self);
1286 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001287 for (InstrumentationListener* listener : dex_pc_listeners_) {
1288 if (listener != nullptr) {
Alex Lightd7661582017-05-01 13:48:16 -07001289 listener->DexPcMoved(thread, thiz, method, dex_pc);
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001290 }
Ian Rogers62d6c772013-02-27 08:32:07 -08001291 }
1292}
1293
Nicolas Geoffray81f0f952016-01-20 16:25:19 +00001294void Instrumentation::BranchImpl(Thread* thread,
1295 ArtMethod* method,
1296 uint32_t dex_pc,
1297 int32_t offset) const {
1298 for (InstrumentationListener* listener : branch_listeners_) {
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001299 if (listener != nullptr) {
Nicolas Geoffray81f0f952016-01-20 16:25:19 +00001300 listener->Branch(thread, method, dex_pc, offset);
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001301 }
Mathieu Chartiere5f13e52015-02-24 09:37:21 -08001302 }
1303}
1304
Alex Lighte814f9d2017-07-31 16:14:39 -07001305void Instrumentation::WatchedFramePopImpl(Thread* thread, const ShadowFrame& frame) const {
1306 for (InstrumentationListener* listener : watched_frame_pop_listeners_) {
1307 if (listener != nullptr) {
1308 listener->WatchedFramePop(thread, frame);
1309 }
1310 }
1311}
1312
Alex Lightd7661582017-05-01 13:48:16 -07001313void Instrumentation::FieldReadEventImpl(Thread* thread,
1314 ObjPtr<mirror::Object> this_object,
1315 ArtMethod* method,
1316 uint32_t dex_pc,
Mathieu Chartierc7853442015-03-27 14:35:38 -07001317 ArtField* field) const {
Alex Lightd7661582017-05-01 13:48:16 -07001318 Thread* self = Thread::Current();
1319 StackHandleScope<1> hs(self);
1320 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001321 for (InstrumentationListener* listener : field_read_listeners_) {
1322 if (listener != nullptr) {
Alex Lightd7661582017-05-01 13:48:16 -07001323 listener->FieldRead(thread, thiz, method, dex_pc, field);
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001324 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +02001325 }
1326}
1327
Alex Lightd7661582017-05-01 13:48:16 -07001328void Instrumentation::FieldWriteEventImpl(Thread* thread,
1329 ObjPtr<mirror::Object> this_object,
1330 ArtMethod* method,
1331 uint32_t dex_pc,
1332 ArtField* field,
1333 const JValue& field_value) const {
1334 Thread* self = Thread::Current();
1335 StackHandleScope<2> hs(self);
1336 Handle<mirror::Object> thiz(hs.NewHandle(this_object));
1337 if (field->IsPrimitiveType()) {
1338 for (InstrumentationListener* listener : field_write_listeners_) {
1339 if (listener != nullptr) {
1340 listener->FieldWritten(thread, thiz, method, dex_pc, field, field_value);
1341 }
1342 }
1343 } else {
1344 Handle<mirror::Object> val(hs.NewHandle(field_value.GetL()));
1345 for (InstrumentationListener* listener : field_write_listeners_) {
1346 if (listener != nullptr) {
1347 listener->FieldWritten(thread, thiz, method, dex_pc, field, val);
1348 }
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001349 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +02001350 }
1351}
1352
Alex Light6e1607e2017-08-23 10:06:18 -07001353void Instrumentation::ExceptionThrownEvent(Thread* thread,
Vladimir Marko19711d42019-04-12 14:05:34 +01001354 ObjPtr<mirror::Throwable> exception_object) const {
Hiroshi Yamauchi3481f7a2017-02-10 12:07:36 -08001355 Thread* self = Thread::Current();
1356 StackHandleScope<1> hs(self);
1357 Handle<mirror::Throwable> h_exception(hs.NewHandle(exception_object));
Alex Light6e1607e2017-08-23 10:06:18 -07001358 if (HasExceptionThrownListeners()) {
Hiroshi Yamauchi3481f7a2017-02-10 12:07:36 -08001359 DCHECK_EQ(thread->GetException(), h_exception.Get());
Jeff Haoc0bd4da2013-04-11 15:52:28 -07001360 thread->ClearException();
Alex Light6e1607e2017-08-23 10:06:18 -07001361 for (InstrumentationListener* listener : exception_thrown_listeners_) {
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001362 if (listener != nullptr) {
Alex Light6e1607e2017-08-23 10:06:18 -07001363 listener->ExceptionThrown(thread, h_exception);
Nicolas Geoffray514a6162015-11-03 11:44:24 +00001364 }
Ian Rogers62d6c772013-02-27 08:32:07 -08001365 }
Alex Light9fb1ab12017-09-05 09:32:49 -07001366 // See b/65049545 for discussion about this behavior.
1367 thread->AssertNoPendingException();
Hiroshi Yamauchi3481f7a2017-02-10 12:07:36 -08001368 thread->SetException(h_exception.Get());
Ian Rogers62d6c772013-02-27 08:32:07 -08001369 }
1370}
1371
Alex Light9fb1ab12017-09-05 09:32:49 -07001372void Instrumentation::ExceptionHandledEvent(Thread* thread,
Vladimir Marko19711d42019-04-12 14:05:34 +01001373 ObjPtr<mirror::Throwable> exception_object) const {
Alex Light9fb1ab12017-09-05 09:32:49 -07001374 Thread* self = Thread::Current();
1375 StackHandleScope<1> hs(self);
1376 Handle<mirror::Throwable> h_exception(hs.NewHandle(exception_object));
1377 if (HasExceptionHandledListeners()) {
1378 // We should have cleared the exception so that callers can detect a new one.
1379 DCHECK(thread->GetException() == nullptr);
1380 for (InstrumentationListener* listener : exception_handled_listeners_) {
1381 if (listener != nullptr) {
1382 listener->ExceptionHandled(thread, h_exception);
1383 }
1384 }
1385 }
1386}
1387
Sebastien Hertzb2feaaf2015-10-12 13:40:10 +00001388// Computes a frame ID by ignoring inlined frames.
1389size_t Instrumentation::ComputeFrameId(Thread* self,
1390 size_t frame_depth,
1391 size_t inlined_frames_before_frame) {
1392 CHECK_GE(frame_depth, inlined_frames_before_frame);
1393 size_t no_inline_depth = frame_depth - inlined_frames_before_frame;
1394 return StackVisitor::ComputeNumFrames(self, kInstrumentationStackWalk) - no_inline_depth;
1395}
1396
Ian Rogers62d6c772013-02-27 08:32:07 -08001397static void CheckStackDepth(Thread* self, const InstrumentationStackFrame& instrumentation_frame,
1398 int delta)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001399 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +01001400 size_t frame_id = StackVisitor::ComputeNumFrames(self, kInstrumentationStackWalk) + delta;
Ian Rogers62d6c772013-02-27 08:32:07 -08001401 if (frame_id != instrumentation_frame.frame_id_) {
1402 LOG(ERROR) << "Expected frame_id=" << frame_id << " but found "
1403 << instrumentation_frame.frame_id_;
1404 StackVisitor::DescribeStack(self);
1405 CHECK_EQ(frame_id, instrumentation_frame.frame_id_);
1406 }
1407}
1408
Vladimir Marko19711d42019-04-12 14:05:34 +01001409void Instrumentation::PushInstrumentationStackFrame(Thread* self,
1410 ObjPtr<mirror::Object> this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001411 ArtMethod* method,
Vladimir Marko19711d42019-04-12 14:05:34 +01001412 uintptr_t lr,
1413 bool interpreter_entry) {
Alex Lightb7edcda2017-04-27 13:20:31 -07001414 DCHECK(!self->IsExceptionPending());
Ian Rogers62d6c772013-02-27 08:32:07 -08001415 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
1416 if (kVerboseInstrumentation) {
David Sehr709b0702016-10-13 09:12:37 -07001417 LOG(INFO) << "Entering " << ArtMethod::PrettyMethod(method) << " from PC "
1418 << reinterpret_cast<void*>(lr);
Ian Rogers62d6c772013-02-27 08:32:07 -08001419 }
Alex Lightb7edcda2017-04-27 13:20:31 -07001420
1421 // We send the enter event before pushing the instrumentation frame to make cleanup easier. If the
1422 // event causes an exception we can simply send the unwind event and return.
1423 StackHandleScope<1> hs(self);
1424 Handle<mirror::Object> h_this(hs.NewHandle(this_object));
1425 if (!interpreter_entry) {
1426 MethodEnterEvent(self, h_this.Get(), method, 0);
1427 if (self->IsExceptionPending()) {
1428 MethodUnwindEvent(self, h_this.Get(), method, 0);
1429 return;
1430 }
1431 }
1432
1433 // We have a callee-save frame meaning this value is guaranteed to never be 0.
1434 DCHECK(!self->IsExceptionPending());
1435 size_t frame_id = StackVisitor::ComputeNumFrames(self, kInstrumentationStackWalk);
1436
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +00001437 instrumentation::InstrumentationStackFrame instrumentation_frame(
1438 h_this.Get(), method, lr, frame_id, interpreter_entry, current_force_deopt_id_);
Ian Rogers62d6c772013-02-27 08:32:07 -08001439 stack->push_front(instrumentation_frame);
Ian Rogers62d6c772013-02-27 08:32:07 -08001440}
1441
Mingyao Yang2ee17902017-08-30 11:37:08 -07001442DeoptimizationMethodType Instrumentation::GetDeoptimizationMethodType(ArtMethod* method) {
1443 if (method->IsRuntimeMethod()) {
1444 // Certain methods have strict requirement on whether the dex instruction
1445 // should be re-executed upon deoptimization.
1446 if (method == Runtime::Current()->GetCalleeSaveMethod(
1447 CalleeSaveType::kSaveEverythingForClinit)) {
1448 return DeoptimizationMethodType::kKeepDexPc;
1449 }
1450 if (method == Runtime::Current()->GetCalleeSaveMethod(
1451 CalleeSaveType::kSaveEverythingForSuspendCheck)) {
1452 return DeoptimizationMethodType::kKeepDexPc;
1453 }
1454 }
1455 return DeoptimizationMethodType::kDefault;
1456}
1457
1458// Try to get the shorty of a runtime method if it's an invocation stub.
Andreas Gampec7d878d2018-11-19 18:42:06 +00001459static char GetRuntimeMethodShorty(Thread* thread) REQUIRES_SHARED(Locks::mutator_lock_) {
1460 char shorty = 'V';
1461 StackVisitor::WalkStack(
1462 [&shorty](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
1463 ArtMethod* m = stack_visitor->GetMethod();
1464 if (m == nullptr || m->IsRuntimeMethod()) {
1465 return true;
Andreas Gampe3d477f32018-11-16 16:40:45 +00001466 }
Andreas Gampec7d878d2018-11-19 18:42:06 +00001467 // The first Java method.
1468 if (m->IsNative()) {
1469 // Use JNI method's shorty for the jni stub.
1470 shorty = m->GetShorty()[0];
1471 } else if (m->IsProxyMethod()) {
1472 // Proxy method just invokes its proxied method via
1473 // art_quick_proxy_invoke_handler.
1474 shorty = m->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetShorty()[0];
1475 } else {
1476 const Instruction& instr = m->DexInstructions().InstructionAt(stack_visitor->GetDexPc());
1477 if (instr.IsInvoke()) {
1478 auto get_method_index_fn = [](ArtMethod* caller,
1479 const Instruction& inst,
1480 uint32_t dex_pc)
1481 REQUIRES_SHARED(Locks::mutator_lock_) {
1482 switch (inst.Opcode()) {
1483 case Instruction::INVOKE_VIRTUAL_RANGE_QUICK:
1484 case Instruction::INVOKE_VIRTUAL_QUICK: {
1485 uint16_t method_idx = caller->GetIndexFromQuickening(dex_pc);
1486 CHECK_NE(method_idx, DexFile::kDexNoIndex16);
1487 return method_idx;
1488 }
1489 default: {
1490 return static_cast<uint16_t>(inst.VRegB());
1491 }
1492 }
1493 };
Nicolas Geoffrayec43a012018-11-17 13:10:40 +00001494
Andreas Gampec7d878d2018-11-19 18:42:06 +00001495 uint16_t method_index = get_method_index_fn(m, instr, stack_visitor->GetDexPc());
1496 const DexFile* dex_file = m->GetDexFile();
1497 if (interpreter::IsStringInit(dex_file, method_index)) {
1498 // Invoking string init constructor is turned into invoking
1499 // StringFactory.newStringFromChars() which returns a string.
1500 shorty = 'L';
1501 } else {
1502 shorty = dex_file->GetMethodShorty(method_index)[0];
1503 }
1504
1505 } else {
1506 // It could be that a non-invoke opcode invokes a stub, which in turn
1507 // invokes Java code. In such cases, we should never expect a return
1508 // value from the stub.
1509 }
1510 }
1511 // Stop stack walking since we've seen a Java frame.
1512 return false;
1513 },
1514 thread,
1515 /* context= */ nullptr,
1516 art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
1517 return shorty;
1518}
Mingyao Yang2ee17902017-08-30 11:37:08 -07001519
Alex Lightb7edcda2017-04-27 13:20:31 -07001520TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self,
1521 uintptr_t* return_pc,
1522 uint64_t* gpr_result,
1523 uint64_t* fpr_result) {
1524 DCHECK(gpr_result != nullptr);
1525 DCHECK(fpr_result != nullptr);
Ian Rogers62d6c772013-02-27 08:32:07 -08001526 // Do the pop.
1527 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
1528 CHECK_GT(stack->size(), 0U);
1529 InstrumentationStackFrame instrumentation_frame = stack->front();
1530 stack->pop_front();
1531
1532 // Set return PC and check the sanity of the stack.
1533 *return_pc = instrumentation_frame.return_pc_;
1534 CheckStackDepth(self, instrumentation_frame, 0);
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001535 self->VerifyStack();
Ian Rogers62d6c772013-02-27 08:32:07 -08001536
Mathieu Chartiere401d142015-04-22 13:56:20 -07001537 ArtMethod* method = instrumentation_frame.method_;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001538 uint32_t length;
Andreas Gampe542451c2016-07-26 09:02:02 -07001539 const PointerSize pointer_size = Runtime::Current()->GetClassLinker()->GetImagePointerSize();
Mingyao Yang2ee17902017-08-30 11:37:08 -07001540 char return_shorty;
1541
1542 // Runtime method does not call into MethodExitEvent() so there should not be
1543 // suspension point below.
1544 ScopedAssertNoThreadSuspension ants(__FUNCTION__, method->IsRuntimeMethod());
1545 if (method->IsRuntimeMethod()) {
1546 if (method != Runtime::Current()->GetCalleeSaveMethod(
1547 CalleeSaveType::kSaveEverythingForClinit)) {
1548 // If the caller is at an invocation point and the runtime method is not
1549 // for clinit, we need to pass return results to the caller.
1550 // We need the correct shorty to decide whether we need to pass the return
1551 // result for deoptimization below.
Andreas Gampec7d878d2018-11-19 18:42:06 +00001552 return_shorty = GetRuntimeMethodShorty(self);
Mingyao Yang2ee17902017-08-30 11:37:08 -07001553 } else {
1554 // Some runtime methods such as allocations, unresolved field getters, etc.
1555 // have return value. We don't need to set return_value since MethodExitEvent()
1556 // below isn't called for runtime methods. Deoptimization doesn't need the
1557 // value either since the dex instruction will be re-executed by the
1558 // interpreter, except these two cases:
1559 // (1) For an invoke, which is handled above to get the correct shorty.
1560 // (2) For MONITOR_ENTER/EXIT, which cannot be re-executed since it's not
1561 // idempotent. However there is no return value for it anyway.
1562 return_shorty = 'V';
1563 }
1564 } else {
1565 return_shorty = method->GetInterfaceMethodIfProxy(pointer_size)->GetShorty(&length)[0];
1566 }
1567
Alex Lightb7edcda2017-04-27 13:20:31 -07001568 bool is_ref = return_shorty == '[' || return_shorty == 'L';
1569 StackHandleScope<1> hs(self);
1570 MutableHandle<mirror::Object> res(hs.NewHandle<mirror::Object>(nullptr));
Ian Rogers62d6c772013-02-27 08:32:07 -08001571 JValue return_value;
1572 if (return_shorty == 'V') {
1573 return_value.SetJ(0);
1574 } else if (return_shorty == 'F' || return_shorty == 'D') {
Alex Lightb7edcda2017-04-27 13:20:31 -07001575 return_value.SetJ(*fpr_result);
Ian Rogers62d6c772013-02-27 08:32:07 -08001576 } else {
Alex Lightb7edcda2017-04-27 13:20:31 -07001577 return_value.SetJ(*gpr_result);
1578 }
1579 if (is_ref) {
1580 // Take a handle to the return value so we won't lose it if we suspend.
1581 res.Assign(return_value.GetL());
Ian Rogers62d6c772013-02-27 08:32:07 -08001582 }
1583 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
1584 // return_pc.
Andreas Gampee2abbc62017-09-15 11:59:26 -07001585 uint32_t dex_pc = dex::kDexNoIndex;
Mingyao Yang2ee17902017-08-30 11:37:08 -07001586 if (!method->IsRuntimeMethod() && !instrumentation_frame.interpreter_entry_) {
Vladimir Marko19711d42019-04-12 14:05:34 +01001587 ObjPtr<mirror::Object> this_object = instrumentation_frame.this_object_;
Alex Lightb7c640d2019-03-20 15:52:13 -07001588 MethodExitEvent(
1589 self, this_object, instrumentation_frame.method_, dex_pc, OptionalFrame{}, return_value);
Sebastien Hertz320deb22014-06-11 19:45:05 +02001590 }
jeffhao725a9572012-11-13 18:20:12 -08001591
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001592 // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get
1593 // back to an upcall.
1594 NthCallerVisitor visitor(self, 1, true);
1595 visitor.WalkStack(true);
Sebastien Hertz270a0e12015-01-16 19:49:09 +01001596 bool deoptimize = (visitor.caller != nullptr) &&
Daniel Mihalyieb076692014-08-22 17:33:31 +02001597 (interpreter_stubs_installed_ || IsDeoptimized(visitor.caller) ||
Alex Light3dacdd62019-03-12 15:45:47 +00001598 self->IsForceInterpreter() ||
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +00001599 // NB Since structurally obsolete compiled methods might have the offsets of
1600 // methods/fields compiled in we need to go back to interpreter whenever we hit
1601 // them.
1602 visitor.caller->GetDeclaringClass()->IsObsoleteObject() ||
1603 // Check if we forced all threads to deoptimize in the time between this frame
1604 // being created and now.
1605 instrumentation_frame.force_deopt_id_ != current_force_deopt_id_ ||
Daniel Mihalyieb076692014-08-22 17:33:31 +02001606 Dbg::IsForcedInterpreterNeededForUpcall(self, visitor.caller));
Alex Lightb7edcda2017-04-27 13:20:31 -07001607 if (is_ref) {
1608 // Restore the return value if it's a reference since it might have moved.
1609 *reinterpret_cast<mirror::Object**>(gpr_result) = res.Get();
1610 }
Nicolas Geoffraya0619e22016-12-20 13:57:43 +00001611 if (deoptimize && Runtime::Current()->IsAsyncDeoptimizeable(*return_pc)) {
Ian Rogers62d6c772013-02-27 08:32:07 -08001612 if (kVerboseInstrumentation) {
Andreas Gampe46ee31b2016-12-14 10:11:49 -08001613 LOG(INFO) << "Deoptimizing "
1614 << visitor.caller->PrettyMethod()
1615 << " by returning from "
1616 << method->PrettyMethod()
1617 << " with result "
1618 << std::hex << return_value.GetJ() << std::dec
1619 << " in "
1620 << *self;
Ian Rogers62d6c772013-02-27 08:32:07 -08001621 }
Mingyao Yang2ee17902017-08-30 11:37:08 -07001622 DeoptimizationMethodType deopt_method_type = GetDeoptimizationMethodType(method);
Nicolas Geoffray73be1e82015-09-17 15:22:56 +01001623 self->PushDeoptimizationContext(return_value,
Mingyao Yang2ee17902017-08-30 11:37:08 -07001624 return_shorty == 'L' || return_shorty == '[',
Andreas Gampe98ea9d92018-10-19 14:06:15 -07001625 /* exception= */ nullptr ,
1626 /* from_code= */ false,
Mingyao Yang2ee17902017-08-30 11:37:08 -07001627 deopt_method_type);
Andreas Gamped58342c2014-06-05 14:18:08 -07001628 return GetTwoWordSuccessValue(*return_pc,
1629 reinterpret_cast<uintptr_t>(GetQuickDeoptimizationEntryPoint()));
Ian Rogers62d6c772013-02-27 08:32:07 -08001630 } else {
Nicolas Geoffraya0619e22016-12-20 13:57:43 +00001631 if (deoptimize && !Runtime::Current()->IsAsyncDeoptimizeable(*return_pc)) {
Alex Lightd8eb6732018-01-29 15:16:02 -08001632 VLOG(deopt) << "Got a deoptimization request on un-deoptimizable " << method->PrettyMethod()
1633 << " at PC " << reinterpret_cast<void*>(*return_pc);
Nicolas Geoffraya0619e22016-12-20 13:57:43 +00001634 }
Ian Rogers62d6c772013-02-27 08:32:07 -08001635 if (kVerboseInstrumentation) {
David Sehr709b0702016-10-13 09:12:37 -07001636 LOG(INFO) << "Returning from " << method->PrettyMethod()
Brian Carlstrom2d888622013-07-18 17:02:00 -07001637 << " to PC " << reinterpret_cast<void*>(*return_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -08001638 }
Andreas Gamped58342c2014-06-05 14:18:08 -07001639 return GetTwoWordSuccessValue(0, *return_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -08001640 }
jeffhao725a9572012-11-13 18:20:12 -08001641}
1642
Alex Light2c8206f2018-06-08 14:51:09 -07001643uintptr_t Instrumentation::PopFramesForDeoptimization(Thread* self, size_t nframes) const {
Ian Rogers62d6c772013-02-27 08:32:07 -08001644 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
Alex Light2c8206f2018-06-08 14:51:09 -07001645 CHECK_GE(stack->size(), nframes);
1646 if (nframes == 0) {
1647 return 0u;
1648 }
1649 // Only need to send instrumentation events if it's not for deopt (do give the log messages if we
1650 // have verbose-instrumentation anyway though).
1651 if (kVerboseInstrumentation) {
1652 for (size_t i = 0; i < nframes; i++) {
1653 LOG(INFO) << "Popping for deoptimization " << stack->at(i).method_->PrettyMethod();
Mingyao Yang2ee17902017-08-30 11:37:08 -07001654 }
Ian Rogers62d6c772013-02-27 08:32:07 -08001655 }
Alex Light2c8206f2018-06-08 14:51:09 -07001656 // Now that we've sent all the instrumentation events we can actually modify the
1657 // instrumentation-stack. We cannot do this earlier since MethodUnwindEvent can re-enter java and
1658 // do other things that require the instrumentation stack to be in a consistent state with the
1659 // actual stack.
1660 for (size_t i = 0; i < nframes - 1; i++) {
1661 stack->pop_front();
1662 }
1663 uintptr_t return_pc = stack->front().return_pc_;
Alex Lightb7edcda2017-04-27 13:20:31 -07001664 stack->pop_front();
Alex Light2c8206f2018-06-08 14:51:09 -07001665 return return_pc;
Ian Rogers62d6c772013-02-27 08:32:07 -08001666}
1667
1668std::string InstrumentationStackFrame::Dump() const {
1669 std::ostringstream os;
David Sehr709b0702016-10-13 09:12:37 -07001670 os << "Frame " << frame_id_ << " " << ArtMethod::PrettyMethod(method_) << ":"
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +00001671 << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_)
1672 << " force_deopt_id=" << force_deopt_id_;
Ian Rogers62d6c772013-02-27 08:32:07 -08001673 return os.str();
1674}
1675
1676} // namespace instrumentation
jeffhao725a9572012-11-13 18:20:12 -08001677} // namespace art