blob: 0e05b62dde2028cda5520a83bd7a09b39c50aa86 [file] [log] [blame]
jeffhao725a9572012-11-13 18:20:12 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "instrumentation.h"
18
19#include <sys/uio.h>
20
Ian Rogersef7d42f2014-01-06 12:55:46 -080021#include "atomic.h"
Elliott Hughes76160052012-12-12 16:31:20 -080022#include "base/unix_file/fd_file.h"
jeffhao725a9572012-11-13 18:20:12 -080023#include "class_linker.h"
24#include "debugger.h"
Ian Rogers62d6c772013-02-27 08:32:07 -080025#include "dex_file-inl.h"
Mathieu Chartierd8891782014-03-02 13:28:37 -080026#include "entrypoints/quick/quick_alloc_entrypoints.h"
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010027#include "interpreter/interpreter.h"
Brian Carlstromea46f952013-07-30 01:26:50 -070028#include "mirror/art_method-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080029#include "mirror/class-inl.h"
30#include "mirror/dex_cache.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080031#include "mirror/object_array-inl.h"
Ian Rogers4f6ad8a2013-03-18 15:27:28 -070032#include "mirror/object-inl.h"
Ian Rogers62d6c772013-02-27 08:32:07 -080033#include "nth_caller_visitor.h"
Ian Rogersc928de92013-02-27 14:30:44 -080034#if !defined(ART_USE_PORTABLE_COMPILER)
Ian Rogers166db042013-07-26 12:05:57 -070035#include "entrypoints/quick/quick_entrypoints.h"
jeffhao725a9572012-11-13 18:20:12 -080036#endif
37#include "object_utils.h"
38#include "os.h"
39#include "scoped_thread_state_change.h"
40#include "thread.h"
41#include "thread_list.h"
jeffhao725a9572012-11-13 18:20:12 -080042
43namespace art {
Ian Rogersfa824272013-11-05 16:12:57 -080044
Ian Rogers62d6c772013-02-27 08:32:07 -080045namespace instrumentation {
jeffhao725a9572012-11-13 18:20:12 -080046
Sebastien Hertz5bfd5c92013-11-15 11:36:07 +010047const bool kVerboseInstrumentation = false;
48
Ian Rogers816432e2013-09-06 15:47:45 -070049// Do we want to deoptimize for method entry and exit listeners or just try to intercept
50// invocations? Deoptimization forces all code to run in the interpreter and considerably hurts the
51// application's performance.
Ian Rogers7b6da362013-09-11 09:29:40 -070052static constexpr bool kDeoptimizeForAccurateMethodEntryExitListeners = false;
Ian Rogers816432e2013-09-06 15:47:45 -070053
Ian Rogers62d6c772013-02-27 08:32:07 -080054static bool InstallStubsClassVisitor(mirror::Class* klass, void* arg)
jeffhao725a9572012-11-13 18:20:12 -080055 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -080056 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
57 return instrumentation->InstallStubsForClass(klass);
58}
59
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -070060Instrumentation::Instrumentation()
61 : instrumentation_stubs_installed_(false), entry_exit_stubs_installed_(false),
62 interpreter_stubs_installed_(false),
63 interpret_only_(false), forced_interpret_only_(false),
64 have_method_entry_listeners_(false), have_method_exit_listeners_(false),
65 have_method_unwind_listeners_(false), have_dex_pc_listeners_(false),
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +020066 have_field_read_listeners_(false), have_field_write_listeners_(false),
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -070067 have_exception_caught_listeners_(false),
68 deoptimized_methods_lock_("deoptimized methods lock"),
69 deoptimization_enabled_(false),
70 interpreter_handler_table_(kMainHandlerTable),
71 quick_alloc_entry_points_instrumentation_counter_(0) {
72}
73
Ian Rogers62d6c772013-02-27 08:32:07 -080074bool Instrumentation::InstallStubsForClass(mirror::Class* klass) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010075 for (size_t i = 0, e = klass->NumDirectMethods(); i < e; i++) {
76 InstallStubsForMethod(klass->GetDirectMethod(i));
jeffhao725a9572012-11-13 18:20:12 -080077 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010078 for (size_t i = 0, e = klass->NumVirtualMethods(); i < e; i++) {
79 InstallStubsForMethod(klass->GetVirtualMethod(i));
jeffhao725a9572012-11-13 18:20:12 -080080 }
81 return true;
82}
83
Ian Rogersef7d42f2014-01-06 12:55:46 -080084static void UpdateEntrypoints(mirror::ArtMethod* method, const void* quick_code,
85 const void* portable_code, bool have_portable_code)
86 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
87 method->SetEntryPointFromPortableCompiledCode(portable_code);
88 method->SetEntryPointFromQuickCompiledCode(quick_code);
89 bool portable_enabled = method->IsPortableCompiled();
90 if (have_portable_code && !portable_enabled) {
91 method->SetIsPortableCompiled();
92 } else if (portable_enabled) {
93 method->ClearIsPortableCompiled();
94 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010095 if (!method->IsResolutionMethod()) {
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -080096 if (quick_code == GetQuickToInterpreterBridge() ||
Vladimir Marko8a630572014-04-09 18:45:35 +010097 quick_code == GetQuickToInterpreterBridgeTrampoline(Runtime::Current()->GetClassLinker()) ||
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -080098 (quick_code == GetQuickResolutionTrampoline(Runtime::Current()->GetClassLinker()) &&
99 Runtime::Current()->GetInstrumentation()->IsForcedInterpretOnly()
100 && !method->IsNative() && !method->IsProxyMethod())) {
101 if (kIsDebugBuild) {
102 if (quick_code == GetQuickToInterpreterBridge()) {
103 DCHECK(portable_code == GetPortableToInterpreterBridge());
104 } else if (quick_code == GetQuickResolutionTrampoline(Runtime::Current()->GetClassLinker())) {
105 DCHECK(portable_code == GetPortableResolutionTrampoline(Runtime::Current()->GetClassLinker()));
106 }
107 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800108 DCHECK(!method->IsNative()) << PrettyMethod(method);
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -0800109 DCHECK(!method->IsProxyMethod()) << PrettyMethod(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100110 method->SetEntryPointFromInterpreter(art::interpreter::artInterpreterToInterpreterBridge);
111 } else {
112 method->SetEntryPointFromInterpreter(art::artInterpreterToCompiledCodeBridge);
113 }
114 }
115}
116
117void Instrumentation::InstallStubsForMethod(mirror::ArtMethod* method) {
118 if (method->IsAbstract() || method->IsProxyMethod()) {
119 // Do not change stubs for these methods.
120 return;
121 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800122 const void* new_portable_code;
123 const void* new_quick_code;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100124 bool uninstall = !entry_exit_stubs_installed_ && !interpreter_stubs_installed_;
125 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
126 bool is_class_initialized = method->GetDeclaringClass()->IsInitialized();
Ian Rogersef7d42f2014-01-06 12:55:46 -0800127 bool have_portable_code = false;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100128 if (uninstall) {
129 if ((forced_interpret_only_ || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800130 new_portable_code = GetPortableToInterpreterBridge();
131 new_quick_code = GetQuickToInterpreterBridge();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100132 } else if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800133 new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
134 new_quick_code = class_linker->GetQuickOatCodeFor(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100135 } else {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800136 new_portable_code = GetPortableResolutionTrampoline(class_linker);
137 new_quick_code = GetQuickResolutionTrampoline(class_linker);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100138 }
139 } else { // !uninstall
Sebastien Hertzbae182c2013-12-17 10:42:03 +0100140 if ((interpreter_stubs_installed_ || forced_interpret_only_ || IsDeoptimized(method)) &&
141 !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800142 new_portable_code = GetPortableToInterpreterBridge();
143 new_quick_code = GetQuickToInterpreterBridge();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100144 } else {
145 // Do not overwrite resolution trampoline. When the trampoline initializes the method's
146 // class, all its static methods code will be set to the instrumentation entry point.
147 // For more details, see ClassLinker::FixupStaticTrampolines.
148 if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
Sebastien Hertz320deb22014-06-11 19:45:05 +0200149 if (entry_exit_stubs_installed_) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800150 new_portable_code = GetPortableToInterpreterBridge();
151 new_quick_code = GetQuickInstrumentationEntryPoint();
Sebastien Hertz320deb22014-06-11 19:45:05 +0200152 } else {
153 new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
154 new_quick_code = class_linker->GetQuickOatCodeFor(method);
155 DCHECK(new_quick_code != GetQuickToInterpreterBridgeTrampoline(class_linker));
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100156 }
157 } else {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800158 new_portable_code = GetPortableResolutionTrampoline(class_linker);
159 new_quick_code = GetQuickResolutionTrampoline(class_linker);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100160 }
161 }
162 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800163 UpdateEntrypoints(method, new_quick_code, new_portable_code, have_portable_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100164}
165
Ian Rogers62d6c772013-02-27 08:32:07 -0800166// Places the instrumentation exit pc as the return PC for every quick frame. This also allows
167// deoptimization of quick frames to interpreter frames.
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100168// Since we may already have done this previously, we need to push new instrumentation frame before
169// existing instrumentation frames.
Ian Rogers62d6c772013-02-27 08:32:07 -0800170static void InstrumentationInstallStack(Thread* thread, void* arg)
Ian Rogers306057f2012-11-26 12:45:53 -0800171 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
172 struct InstallStackVisitor : public StackVisitor {
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100173 InstallStackVisitor(Thread* thread, Context* context, uintptr_t instrumentation_exit_pc)
Ian Rogers62d6c772013-02-27 08:32:07 -0800174 : StackVisitor(thread, context), instrumentation_stack_(thread->GetInstrumentationStack()),
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100175 instrumentation_exit_pc_(instrumentation_exit_pc),
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100176 reached_existing_instrumentation_frames_(false), instrumentation_stack_depth_(0),
177 last_return_pc_(0) {
178 }
jeffhao725a9572012-11-13 18:20:12 -0800179
Ian Rogers306057f2012-11-26 12:45:53 -0800180 virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Brian Carlstromea46f952013-07-30 01:26:50 -0700181 mirror::ArtMethod* m = GetMethod();
Ian Rogers306057f2012-11-26 12:45:53 -0800182 if (m == NULL) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800183 if (kVerboseInstrumentation) {
184 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
185 }
186 last_return_pc_ = 0;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700187 return true; // Ignore upcalls.
Ian Rogers306057f2012-11-26 12:45:53 -0800188 }
Jeff Haoa15a81b2014-05-27 18:25:47 -0700189 if (GetCurrentQuickFrame() == NULL) {
Sebastien Hertz320deb22014-06-11 19:45:05 +0200190 bool interpreter_frame = !m->IsPortableCompiled();
191 InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, 0, GetFrameId(),
192 interpreter_frame);
Jeff Haoa15a81b2014-05-27 18:25:47 -0700193 if (kVerboseInstrumentation) {
194 LOG(INFO) << "Pushing shadow frame " << instrumentation_frame.Dump();
195 }
196 shadow_stack_.push_back(instrumentation_frame);
197 return true; // Continue.
198 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800199 uintptr_t return_pc = GetReturnPc();
Sebastien Hertz320deb22014-06-11 19:45:05 +0200200 if (m->IsRuntimeMethod()) {
201 if (return_pc == instrumentation_exit_pc_) {
202 if (kVerboseInstrumentation) {
203 LOG(INFO) << " Handling quick to interpreter transition. Frame " << GetFrameId();
204 }
205 CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
206 const InstrumentationStackFrame& frame = instrumentation_stack_->at(instrumentation_stack_depth_);
207 CHECK(frame.interpreter_entry_);
208 // This is an interpreter frame so method enter event must have been reported. However we
209 // need to push a DEX pc into the dex_pcs_ list to match size of instrumentation stack.
210 // Since we won't report method entry here, we can safely push any DEX pc.
211 dex_pcs_.push_back(0);
212 last_return_pc_ = frame.return_pc_;
213 ++instrumentation_stack_depth_;
214 return true;
215 } else {
216 if (kVerboseInstrumentation) {
217 LOG(INFO) << " Skipping runtime method. Frame " << GetFrameId();
218 }
219 last_return_pc_ = GetReturnPc();
220 return true; // Ignore unresolved methods since they will be instrumented after resolution.
221 }
222 }
223 if (kVerboseInstrumentation) {
224 LOG(INFO) << " Installing exit stub in " << DescribeLocation();
225 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100226 if (return_pc == instrumentation_exit_pc_) {
227 // We've reached a frame which has already been installed with instrumentation exit stub.
228 // We should have already installed instrumentation on previous frames.
229 reached_existing_instrumentation_frames_ = true;
230
231 CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
232 const InstrumentationStackFrame& frame = instrumentation_stack_->at(instrumentation_stack_depth_);
233 CHECK_EQ(m, frame.method_) << "Expected " << PrettyMethod(m)
234 << ", Found " << PrettyMethod(frame.method_);
235 return_pc = frame.return_pc_;
236 if (kVerboseInstrumentation) {
237 LOG(INFO) << "Ignoring already instrumented " << frame.Dump();
238 }
239 } else {
240 CHECK_NE(return_pc, 0U);
241 CHECK(!reached_existing_instrumentation_frames_);
242 InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, return_pc, GetFrameId(),
243 false);
244 if (kVerboseInstrumentation) {
245 LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
246 }
247
Sebastien Hertz320deb22014-06-11 19:45:05 +0200248 // Insert frame at the right position so we do not corrupt the instrumentation stack.
249 // Instrumentation stack frames are in descending frame id order.
250 auto it = instrumentation_stack_->begin();
251 for (auto end = instrumentation_stack_->end(); it != end; ++it) {
252 const InstrumentationStackFrame& current = *it;
253 if (instrumentation_frame.frame_id_ >= current.frame_id_) {
254 break;
255 }
256 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100257 instrumentation_stack_->insert(it, instrumentation_frame);
258 SetReturnPc(instrumentation_exit_pc_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800259 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800260 dex_pcs_.push_back(m->ToDexPc(last_return_pc_));
Ian Rogers62d6c772013-02-27 08:32:07 -0800261 last_return_pc_ = return_pc;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100262 ++instrumentation_stack_depth_;
Ian Rogers306057f2012-11-26 12:45:53 -0800263 return true; // Continue.
264 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800265 std::deque<InstrumentationStackFrame>* const instrumentation_stack_;
Jeff Haoa15a81b2014-05-27 18:25:47 -0700266 std::vector<InstrumentationStackFrame> shadow_stack_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800267 std::vector<uint32_t> dex_pcs_;
Ian Rogers306057f2012-11-26 12:45:53 -0800268 const uintptr_t instrumentation_exit_pc_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100269 bool reached_existing_instrumentation_frames_;
270 size_t instrumentation_stack_depth_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800271 uintptr_t last_return_pc_;
Ian Rogers306057f2012-11-26 12:45:53 -0800272 };
Ian Rogers62d6c772013-02-27 08:32:07 -0800273 if (kVerboseInstrumentation) {
274 std::string thread_name;
275 thread->GetThreadName(thread_name);
276 LOG(INFO) << "Installing exit stubs in " << thread_name;
Ian Rogers306057f2012-11-26 12:45:53 -0800277 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100278
279 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
Ian Rogers700a4022014-05-19 16:49:03 -0700280 std::unique_ptr<Context> context(Context::Create());
Ian Rogers848871b2013-08-05 10:56:33 -0700281 uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100282 InstallStackVisitor visitor(thread, context.get(), instrumentation_exit_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800283 visitor.WalkStack(true);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100284 CHECK_EQ(visitor.dex_pcs_.size(), thread->GetInstrumentationStack()->size());
Ian Rogers62d6c772013-02-27 08:32:07 -0800285
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100286 if (instrumentation->ShouldNotifyMethodEnterExitEvents()) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100287 // Create method enter events for all methods currently on the thread's stack. We only do this
288 // if no debugger is attached to prevent from posting events twice.
Jeff Haoa15a81b2014-05-27 18:25:47 -0700289 auto ssi = visitor.shadow_stack_.rbegin();
290 for (auto isi = thread->GetInstrumentationStack()->rbegin(),
291 end = thread->GetInstrumentationStack()->rend(); isi != end; ++isi) {
292 while (ssi != visitor.shadow_stack_.rend() && (*ssi).frame_id_ < (*isi).frame_id_) {
293 instrumentation->MethodEnterEvent(thread, (*ssi).this_object_, (*ssi).method_, 0);
294 ++ssi;
295 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100296 uint32_t dex_pc = visitor.dex_pcs_.back();
297 visitor.dex_pcs_.pop_back();
Sebastien Hertz320deb22014-06-11 19:45:05 +0200298 if (!isi->interpreter_entry_) {
299 instrumentation->MethodEnterEvent(thread, (*isi).this_object_, (*isi).method_, dex_pc);
300 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100301 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800302 }
303 thread->VerifyStack();
Ian Rogers306057f2012-11-26 12:45:53 -0800304}
305
Ian Rogers62d6c772013-02-27 08:32:07 -0800306// Removes the instrumentation exit pc as the return PC for every quick frame.
307static void InstrumentationRestoreStack(Thread* thread, void* arg)
Ian Rogers306057f2012-11-26 12:45:53 -0800308 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
309 struct RestoreStackVisitor : public StackVisitor {
Ian Rogers62d6c772013-02-27 08:32:07 -0800310 RestoreStackVisitor(Thread* thread, uintptr_t instrumentation_exit_pc,
311 Instrumentation* instrumentation)
312 : StackVisitor(thread, NULL), thread_(thread),
313 instrumentation_exit_pc_(instrumentation_exit_pc),
314 instrumentation_(instrumentation),
315 instrumentation_stack_(thread->GetInstrumentationStack()),
316 frames_removed_(0) {}
Ian Rogers306057f2012-11-26 12:45:53 -0800317
318 virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800319 if (instrumentation_stack_->size() == 0) {
jeffhao725a9572012-11-13 18:20:12 -0800320 return false; // Stop.
321 }
Brian Carlstromea46f952013-07-30 01:26:50 -0700322 mirror::ArtMethod* m = GetMethod();
Ian Rogers62d6c772013-02-27 08:32:07 -0800323 if (GetCurrentQuickFrame() == NULL) {
324 if (kVerboseInstrumentation) {
325 LOG(INFO) << " Ignoring a shadow frame. Frame " << GetFrameId() << " Method=" << PrettyMethod(m);
326 }
327 return true; // Ignore shadow frames.
328 }
Ian Rogers306057f2012-11-26 12:45:53 -0800329 if (m == NULL) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800330 if (kVerboseInstrumentation) {
331 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
332 }
Ian Rogers306057f2012-11-26 12:45:53 -0800333 return true; // Ignore upcalls.
334 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800335 bool removed_stub = false;
336 // TODO: make this search more efficient?
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100337 const size_t frameId = GetFrameId();
338 for (const InstrumentationStackFrame& instrumentation_frame : *instrumentation_stack_) {
339 if (instrumentation_frame.frame_id_ == frameId) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800340 if (kVerboseInstrumentation) {
341 LOG(INFO) << " Removing exit stub in " << DescribeLocation();
342 }
Jeff Hao9a916d32013-06-27 18:45:37 -0700343 if (instrumentation_frame.interpreter_entry_) {
344 CHECK(m == Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs));
345 } else {
346 CHECK(m == instrumentation_frame.method_) << PrettyMethod(m);
347 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800348 SetReturnPc(instrumentation_frame.return_pc_);
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100349 if (instrumentation_->ShouldNotifyMethodEnterExitEvents()) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100350 // Create the method exit events. As the methods didn't really exit the result is 0.
351 // We only do this if no debugger is attached to prevent from posting events twice.
352 instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m,
353 GetDexPc(), JValue());
354 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800355 frames_removed_++;
356 removed_stub = true;
357 break;
358 }
359 }
360 if (!removed_stub) {
361 if (kVerboseInstrumentation) {
362 LOG(INFO) << " No exit stub in " << DescribeLocation();
Ian Rogers306057f2012-11-26 12:45:53 -0800363 }
jeffhao725a9572012-11-13 18:20:12 -0800364 }
365 return true; // Continue.
366 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800367 Thread* const thread_;
Ian Rogers306057f2012-11-26 12:45:53 -0800368 const uintptr_t instrumentation_exit_pc_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800369 Instrumentation* const instrumentation_;
370 std::deque<instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
371 size_t frames_removed_;
jeffhao725a9572012-11-13 18:20:12 -0800372 };
Ian Rogers62d6c772013-02-27 08:32:07 -0800373 if (kVerboseInstrumentation) {
374 std::string thread_name;
375 thread->GetThreadName(thread_name);
376 LOG(INFO) << "Removing exit stubs in " << thread_name;
377 }
378 std::deque<instrumentation::InstrumentationStackFrame>* stack = thread->GetInstrumentationStack();
379 if (stack->size() > 0) {
380 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
Ian Rogers848871b2013-08-05 10:56:33 -0700381 uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
Ian Rogers62d6c772013-02-27 08:32:07 -0800382 RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
383 visitor.WalkStack(true);
384 CHECK_EQ(visitor.frames_removed_, stack->size());
385 while (stack->size() > 0) {
386 stack->pop_front();
387 }
jeffhao725a9572012-11-13 18:20:12 -0800388 }
389}
390
Ian Rogers62d6c772013-02-27 08:32:07 -0800391void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
392 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
Ian Rogers62d6c772013-02-27 08:32:07 -0800393 if ((events & kMethodEntered) != 0) {
394 method_entry_listeners_.push_back(listener);
Ian Rogers62d6c772013-02-27 08:32:07 -0800395 have_method_entry_listeners_ = true;
396 }
397 if ((events & kMethodExited) != 0) {
398 method_exit_listeners_.push_back(listener);
Ian Rogers62d6c772013-02-27 08:32:07 -0800399 have_method_exit_listeners_ = true;
400 }
401 if ((events & kMethodUnwind) != 0) {
402 method_unwind_listeners_.push_back(listener);
403 have_method_unwind_listeners_ = true;
404 }
405 if ((events & kDexPcMoved) != 0) {
406 dex_pc_listeners_.push_back(listener);
Ian Rogers62d6c772013-02-27 08:32:07 -0800407 have_dex_pc_listeners_ = true;
408 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200409 if ((events & kFieldRead) != 0) {
410 field_read_listeners_.push_back(listener);
411 have_field_read_listeners_ = true;
412 }
413 if ((events & kFieldWritten) != 0) {
414 field_write_listeners_.push_back(listener);
415 have_field_write_listeners_ = true;
416 }
Jeff Hao14dd5a82013-04-11 10:23:36 -0700417 if ((events & kExceptionCaught) != 0) {
418 exception_caught_listeners_.push_back(listener);
419 have_exception_caught_listeners_ = true;
420 }
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200421 UpdateInterpreterHandlerTable();
jeffhao725a9572012-11-13 18:20:12 -0800422}
423
Ian Rogers62d6c772013-02-27 08:32:07 -0800424void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
425 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
Ian Rogers62d6c772013-02-27 08:32:07 -0800426
427 if ((events & kMethodEntered) != 0) {
428 bool contains = std::find(method_entry_listeners_.begin(), method_entry_listeners_.end(),
429 listener) != method_entry_listeners_.end();
430 if (contains) {
431 method_entry_listeners_.remove(listener);
432 }
433 have_method_entry_listeners_ = method_entry_listeners_.size() > 0;
Ian Rogers62d6c772013-02-27 08:32:07 -0800434 }
435 if ((events & kMethodExited) != 0) {
436 bool contains = std::find(method_exit_listeners_.begin(), method_exit_listeners_.end(),
437 listener) != method_exit_listeners_.end();
438 if (contains) {
439 method_exit_listeners_.remove(listener);
440 }
441 have_method_exit_listeners_ = method_exit_listeners_.size() > 0;
Ian Rogers62d6c772013-02-27 08:32:07 -0800442 }
443 if ((events & kMethodUnwind) != 0) {
444 method_unwind_listeners_.remove(listener);
445 }
446 if ((events & kDexPcMoved) != 0) {
447 bool contains = std::find(dex_pc_listeners_.begin(), dex_pc_listeners_.end(),
448 listener) != dex_pc_listeners_.end();
449 if (contains) {
450 dex_pc_listeners_.remove(listener);
451 }
452 have_dex_pc_listeners_ = dex_pc_listeners_.size() > 0;
Ian Rogers62d6c772013-02-27 08:32:07 -0800453 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200454 if ((events & kFieldRead) != 0) {
455 bool contains = std::find(field_read_listeners_.begin(), field_read_listeners_.end(),
456 listener) != field_read_listeners_.end();
457 if (contains) {
458 field_read_listeners_.remove(listener);
459 }
460 have_field_read_listeners_ = field_read_listeners_.size() > 0;
461 }
462 if ((events & kFieldWritten) != 0) {
463 bool contains = std::find(field_write_listeners_.begin(), field_write_listeners_.end(),
464 listener) != field_write_listeners_.end();
465 if (contains) {
466 field_write_listeners_.remove(listener);
467 }
468 have_field_write_listeners_ = field_write_listeners_.size() > 0;
469 }
Jeff Hao14dd5a82013-04-11 10:23:36 -0700470 if ((events & kExceptionCaught) != 0) {
471 exception_caught_listeners_.remove(listener);
472 have_exception_caught_listeners_ = exception_caught_listeners_.size() > 0;
473 }
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200474 UpdateInterpreterHandlerTable();
jeffhao725a9572012-11-13 18:20:12 -0800475}
476
Ian Rogers62d6c772013-02-27 08:32:07 -0800477void Instrumentation::ConfigureStubs(bool require_entry_exit_stubs, bool require_interpreter) {
478 interpret_only_ = require_interpreter || forced_interpret_only_;
479 // Compute what level of instrumentation is required and compare to current.
480 int desired_level, current_level;
481 if (require_interpreter) {
482 desired_level = 2;
483 } else if (require_entry_exit_stubs) {
484 desired_level = 1;
485 } else {
486 desired_level = 0;
487 }
488 if (interpreter_stubs_installed_) {
489 current_level = 2;
490 } else if (entry_exit_stubs_installed_) {
491 current_level = 1;
492 } else {
493 current_level = 0;
494 }
495 if (desired_level == current_level) {
496 // We're already set.
497 return;
498 }
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100499 Thread* const self = Thread::Current();
Ian Rogers62d6c772013-02-27 08:32:07 -0800500 Runtime* runtime = Runtime::Current();
501 Locks::thread_list_lock_->AssertNotHeld(self);
502 if (desired_level > 0) {
503 if (require_interpreter) {
504 interpreter_stubs_installed_ = true;
505 } else {
506 CHECK(require_entry_exit_stubs);
507 entry_exit_stubs_installed_ = true;
508 }
509 runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
510 instrumentation_stubs_installed_ = true;
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100511 MutexLock mu(self, *Locks::thread_list_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800512 runtime->GetThreadList()->ForEach(InstrumentationInstallStack, this);
513 } else {
514 interpreter_stubs_installed_ = false;
515 entry_exit_stubs_installed_ = false;
516 runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100517 // Restore stack only if there is no method currently deoptimized.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700518 bool empty;
519 {
520 ReaderMutexLock mu(self, deoptimized_methods_lock_);
521 empty = deoptimized_methods_.empty(); // Avoid lock violation.
522 }
523 if (empty) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100524 instrumentation_stubs_installed_ = false;
525 MutexLock mu(self, *Locks::thread_list_lock_);
526 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
527 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800528 }
jeffhao725a9572012-11-13 18:20:12 -0800529}
530
Ian Rogersfa824272013-11-05 16:12:57 -0800531static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg) {
532 thread->ResetQuickAllocEntryPointsForThread();
533}
534
Mathieu Chartier661974a2014-01-09 11:23:53 -0800535void Instrumentation::SetEntrypointsInstrumented(bool instrumented) {
536 Runtime* runtime = Runtime::Current();
537 ThreadList* tl = runtime->GetThreadList();
538 if (runtime->IsStarted()) {
539 tl->SuspendAll();
540 }
541 {
542 MutexLock mu(Thread::Current(), *Locks::runtime_shutdown_lock_);
543 SetQuickAllocEntryPointsInstrumented(instrumented);
544 ResetQuickAllocEntryPoints();
545 }
546 if (runtime->IsStarted()) {
547 tl->ResumeAll();
548 }
549}
550
Ian Rogersfa824272013-11-05 16:12:57 -0800551void Instrumentation::InstrumentQuickAllocEntryPoints() {
552 // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
553 // should be guarded by a lock.
Ian Rogers3e5cf302014-05-20 16:40:37 -0700554 DCHECK_GE(quick_alloc_entry_points_instrumentation_counter_.LoadSequentiallyConsistent(), 0);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800555 const bool enable_instrumentation =
Ian Rogers3e5cf302014-05-20 16:40:37 -0700556 quick_alloc_entry_points_instrumentation_counter_.FetchAndAddSequentiallyConsistent(1) == 0;
Ian Rogersfa824272013-11-05 16:12:57 -0800557 if (enable_instrumentation) {
Mathieu Chartier661974a2014-01-09 11:23:53 -0800558 SetEntrypointsInstrumented(true);
Ian Rogersfa824272013-11-05 16:12:57 -0800559 }
560}
561
562void Instrumentation::UninstrumentQuickAllocEntryPoints() {
563 // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
564 // should be guarded by a lock.
Ian Rogers3e5cf302014-05-20 16:40:37 -0700565 DCHECK_GT(quick_alloc_entry_points_instrumentation_counter_.LoadSequentiallyConsistent(), 0);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800566 const bool disable_instrumentation =
Ian Rogers3e5cf302014-05-20 16:40:37 -0700567 quick_alloc_entry_points_instrumentation_counter_.FetchAndSubSequentiallyConsistent(1) == 1;
Ian Rogersfa824272013-11-05 16:12:57 -0800568 if (disable_instrumentation) {
Mathieu Chartier661974a2014-01-09 11:23:53 -0800569 SetEntrypointsInstrumented(false);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800570 }
571}
572
573void Instrumentation::ResetQuickAllocEntryPoints() {
574 Runtime* runtime = Runtime::Current();
575 if (runtime->IsStarted()) {
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800576 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
577 runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, NULL);
Ian Rogersfa824272013-11-05 16:12:57 -0800578 }
579}
580
Ian Rogersef7d42f2014-01-06 12:55:46 -0800581void Instrumentation::UpdateMethodsCode(mirror::ArtMethod* method, const void* quick_code,
582 const void* portable_code, bool have_portable_code) const {
583 const void* new_portable_code;
584 const void* new_quick_code;
585 bool new_have_portable_code;
Ian Rogers62d6c772013-02-27 08:32:07 -0800586 if (LIKELY(!instrumentation_stubs_installed_)) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800587 new_portable_code = portable_code;
588 new_quick_code = quick_code;
589 new_have_portable_code = have_portable_code;
Jeff Hao65d15d92013-07-16 16:39:33 -0700590 } else {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100591 if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800592 new_portable_code = GetPortableToInterpreterBridge();
593 new_quick_code = GetQuickToInterpreterBridge();
594 new_have_portable_code = false;
595 } else if (quick_code == GetQuickResolutionTrampoline(Runtime::Current()->GetClassLinker()) ||
Vladimir Marko8a630572014-04-09 18:45:35 +0100596 quick_code == GetQuickToInterpreterBridgeTrampoline(Runtime::Current()->GetClassLinker()) ||
597 quick_code == GetQuickToInterpreterBridge()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800598 DCHECK((portable_code == GetPortableResolutionTrampoline(Runtime::Current()->GetClassLinker())) ||
599 (portable_code == GetPortableToInterpreterBridge()));
600 new_portable_code = portable_code;
601 new_quick_code = quick_code;
602 new_have_portable_code = have_portable_code;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100603 } else if (entry_exit_stubs_installed_) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800604 new_quick_code = GetQuickInstrumentationEntryPoint();
605 new_portable_code = GetPortableToInterpreterBridge();
606 new_have_portable_code = false;
Jeff Hao65d15d92013-07-16 16:39:33 -0700607 } else {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800608 new_portable_code = portable_code;
609 new_quick_code = quick_code;
610 new_have_portable_code = have_portable_code;
Jeff Hao65d15d92013-07-16 16:39:33 -0700611 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800612 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800613 UpdateEntrypoints(method, new_quick_code, new_portable_code, new_have_portable_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100614}
615
616void Instrumentation::Deoptimize(mirror::ArtMethod* method) {
617 CHECK(!method->IsNative());
618 CHECK(!method->IsProxyMethod());
619 CHECK(!method->IsAbstract());
620
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700621 Thread* self = Thread::Current();
622 std::pair<std::set<mirror::ArtMethod*>::iterator, bool> pair;
623 {
624 WriterMutexLock mu(self, deoptimized_methods_lock_);
625 pair = deoptimized_methods_.insert(method);
626 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100627 bool already_deoptimized = !pair.second;
628 CHECK(!already_deoptimized) << "Method " << PrettyMethod(method) << " is already deoptimized";
629
630 if (!interpreter_stubs_installed_) {
Sebastien Hertz320deb22014-06-11 19:45:05 +0200631 UpdateEntrypoints(method, GetQuickInstrumentationEntryPoint(), GetPortableToInterpreterBridge(),
Ian Rogersef7d42f2014-01-06 12:55:46 -0800632 false);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100633
634 // Install instrumentation exit stub and instrumentation frames. We may already have installed
635 // these previously so it will only cover the newly created frames.
636 instrumentation_stubs_installed_ = true;
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700637 MutexLock mu(self, *Locks::thread_list_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100638 Runtime::Current()->GetThreadList()->ForEach(InstrumentationInstallStack, this);
639 }
640}
641
642void Instrumentation::Undeoptimize(mirror::ArtMethod* method) {
643 CHECK(!method->IsNative());
644 CHECK(!method->IsProxyMethod());
645 CHECK(!method->IsAbstract());
646
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700647 Thread* self = Thread::Current();
648 bool empty;
649 {
650 WriterMutexLock mu(self, deoptimized_methods_lock_);
651 auto it = deoptimized_methods_.find(method);
652 CHECK(it != deoptimized_methods_.end()) << "Method " << PrettyMethod(method)
653 << " is not deoptimized";
654 deoptimized_methods_.erase(it);
655 empty = deoptimized_methods_.empty();
656 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100657
658 // Restore code and possibly stack only if we did not deoptimize everything.
659 if (!interpreter_stubs_installed_) {
660 // Restore its code or resolution trampoline.
661 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Ian Rogersef7d42f2014-01-06 12:55:46 -0800662 if (method->IsStatic() && !method->IsConstructor() &&
663 !method->GetDeclaringClass()->IsInitialized()) {
664 UpdateEntrypoints(method, GetQuickResolutionTrampoline(class_linker),
665 GetPortableResolutionTrampoline(class_linker), false);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100666 } else {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800667 bool have_portable_code = false;
668 const void* quick_code = class_linker->GetQuickOatCodeFor(method);
669 const void* portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
670 UpdateEntrypoints(method, quick_code, portable_code, have_portable_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100671 }
672
673 // If there is no deoptimized method left, we can restore the stack of each thread.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700674 if (empty) {
675 MutexLock mu(self, *Locks::thread_list_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100676 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
677 instrumentation_stubs_installed_ = false;
678 }
679 }
680}
681
682bool Instrumentation::IsDeoptimized(mirror::ArtMethod* method) const {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700683 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100684 DCHECK(method != nullptr);
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700685 return deoptimized_methods_.find(method) != deoptimized_methods_.end();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100686}
687
688void Instrumentation::EnableDeoptimization() {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700689 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100690 CHECK(deoptimized_methods_.empty());
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100691 CHECK_EQ(deoptimization_enabled_, false);
692 deoptimization_enabled_ = true;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100693}
694
695void Instrumentation::DisableDeoptimization() {
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100696 CHECK_EQ(deoptimization_enabled_, true);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100697 // If we deoptimized everything, undo it.
698 if (interpreter_stubs_installed_) {
699 UndeoptimizeEverything();
700 }
701 // Undeoptimized selected methods.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700702 while (true) {
703 mirror::ArtMethod* method;
704 {
705 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
706 if (deoptimized_methods_.empty()) {
707 break;
708 }
709 method = *deoptimized_methods_.begin();
710 }
711 Undeoptimize(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100712 }
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100713 deoptimization_enabled_ = false;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100714}
715
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100716// Indicates if instrumentation should notify method enter/exit events to the listeners.
717bool Instrumentation::ShouldNotifyMethodEnterExitEvents() const {
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100718 return !deoptimization_enabled_ && !interpreter_stubs_installed_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100719}
720
721void Instrumentation::DeoptimizeEverything() {
722 CHECK(!interpreter_stubs_installed_);
723 ConfigureStubs(false, true);
724}
725
726void Instrumentation::UndeoptimizeEverything() {
727 CHECK(interpreter_stubs_installed_);
728 ConfigureStubs(false, false);
729}
730
731void Instrumentation::EnableMethodTracing() {
732 bool require_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners;
733 ConfigureStubs(!require_interpreter, require_interpreter);
734}
735
736void Instrumentation::DisableMethodTracing() {
737 ConfigureStubs(false, false);
jeffhao725a9572012-11-13 18:20:12 -0800738}
739
Ian Rogersef7d42f2014-01-06 12:55:46 -0800740const void* Instrumentation::GetQuickCodeFor(mirror::ArtMethod* method) const {
Ian Rogers62d6c772013-02-27 08:32:07 -0800741 Runtime* runtime = Runtime::Current();
742 if (LIKELY(!instrumentation_stubs_installed_)) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800743 const void* code = method->GetEntryPointFromQuickCompiledCode();
Vladimir Marko8a630572014-04-09 18:45:35 +0100744 DCHECK(code != nullptr);
745 if (LIKELY(code != GetQuickResolutionTrampoline(runtime->GetClassLinker())) &&
746 LIKELY(code != GetQuickToInterpreterBridgeTrampoline(runtime->GetClassLinker())) &&
747 LIKELY(code != GetQuickToInterpreterBridge())) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800748 return code;
749 }
750 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800751 return runtime->GetClassLinker()->GetQuickOatCodeFor(method);
jeffhao725a9572012-11-13 18:20:12 -0800752}
753
Ian Rogers62d6c772013-02-27 08:32:07 -0800754void Instrumentation::MethodEnterEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800755 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800756 uint32_t dex_pc) const {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700757 auto it = method_entry_listeners_.begin();
Jeff Hao65d15d92013-07-16 16:39:33 -0700758 bool is_end = (it == method_entry_listeners_.end());
759 // Implemented this way to prevent problems caused by modification of the list while iterating.
760 while (!is_end) {
761 InstrumentationListener* cur = *it;
762 ++it;
763 is_end = (it == method_entry_listeners_.end());
764 cur->MethodEntered(thread, this_object, method, dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800765 }
766}
767
768void Instrumentation::MethodExitEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800769 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800770 uint32_t dex_pc, const JValue& return_value) const {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700771 auto it = method_exit_listeners_.begin();
Jeff Hao65d15d92013-07-16 16:39:33 -0700772 bool is_end = (it == method_exit_listeners_.end());
773 // Implemented this way to prevent problems caused by modification of the list while iterating.
774 while (!is_end) {
775 InstrumentationListener* cur = *it;
776 ++it;
777 is_end = (it == method_exit_listeners_.end());
778 cur->MethodExited(thread, this_object, method, dex_pc, return_value);
Ian Rogers62d6c772013-02-27 08:32:07 -0800779 }
780}
781
782void Instrumentation::MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800783 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800784 uint32_t dex_pc) const {
785 if (have_method_unwind_listeners_) {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700786 for (InstrumentationListener* listener : method_unwind_listeners_) {
Sebastien Hertz51db44a2013-11-19 10:00:29 +0100787 listener->MethodUnwind(thread, this_object, method, dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800788 }
789 }
790}
791
792void Instrumentation::DexPcMovedEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800793 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800794 uint32_t dex_pc) const {
795 // TODO: STL copy-on-write collection? The copy below is due to the debug listener having an
796 // action where it can remove itself as a listener and break the iterator. The copy only works
797 // around the problem and in general we may have to move to something like reference counting to
798 // ensure listeners are deleted correctly.
799 std::list<InstrumentationListener*> copy(dex_pc_listeners_);
Mathieu Chartier02e25112013-08-14 16:14:24 -0700800 for (InstrumentationListener* listener : copy) {
801 listener->DexPcMoved(thread, this_object, method, dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800802 }
803}
804
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200805void Instrumentation::FieldReadEventImpl(Thread* thread, mirror::Object* this_object,
806 mirror::ArtMethod* method, uint32_t dex_pc,
807 mirror::ArtField* field) const {
Sebastien Hertz9f102032014-05-23 08:59:42 +0200808 // TODO: same comment than DexPcMovedEventImpl.
809 std::list<InstrumentationListener*> copy(field_read_listeners_);
810 for (InstrumentationListener* listener : copy) {
811 listener->FieldRead(thread, this_object, method, dex_pc, field);
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200812 }
813}
814
815void Instrumentation::FieldWriteEventImpl(Thread* thread, mirror::Object* this_object,
816 mirror::ArtMethod* method, uint32_t dex_pc,
817 mirror::ArtField* field, const JValue& field_value) const {
Sebastien Hertz9f102032014-05-23 08:59:42 +0200818 // TODO: same comment than DexPcMovedEventImpl.
819 std::list<InstrumentationListener*> copy(field_write_listeners_);
820 for (InstrumentationListener* listener : copy) {
821 listener->FieldWritten(thread, this_object, method, dex_pc, field, field_value);
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200822 }
823}
824
Ian Rogers62d6c772013-02-27 08:32:07 -0800825void Instrumentation::ExceptionCaughtEvent(Thread* thread, const ThrowLocation& throw_location,
Brian Carlstromea46f952013-07-30 01:26:50 -0700826 mirror::ArtMethod* catch_method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800827 uint32_t catch_dex_pc,
Sebastien Hertz947ff082013-09-17 14:10:13 +0200828 mirror::Throwable* exception_object) const {
Sebastien Hertz9f102032014-05-23 08:59:42 +0200829 if (HasExceptionCaughtListeners()) {
830 DCHECK_EQ(thread->GetException(nullptr), exception_object);
831 bool is_exception_reported = thread->IsExceptionReportedToInstrumentation();
Jeff Haoc0bd4da2013-04-11 15:52:28 -0700832 thread->ClearException();
Sebastien Hertzbf079fe2014-04-01 15:31:05 +0200833 // TODO: The copy below is due to the debug listener having an action where it can remove
834 // itself as a listener and break the iterator. The copy only works around the problem.
835 std::list<InstrumentationListener*> copy(exception_caught_listeners_);
836 for (InstrumentationListener* listener : copy) {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700837 listener->ExceptionCaught(thread, throw_location, catch_method, catch_dex_pc, exception_object);
Ian Rogers62d6c772013-02-27 08:32:07 -0800838 }
Jeff Haoc0bd4da2013-04-11 15:52:28 -0700839 thread->SetException(throw_location, exception_object);
Sebastien Hertz9f102032014-05-23 08:59:42 +0200840 thread->SetExceptionReportedToInstrumentation(is_exception_reported);
Ian Rogers62d6c772013-02-27 08:32:07 -0800841 }
842}
843
844static void CheckStackDepth(Thread* self, const InstrumentationStackFrame& instrumentation_frame,
845 int delta)
846 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
847 size_t frame_id = StackVisitor::ComputeNumFrames(self) + delta;
848 if (frame_id != instrumentation_frame.frame_id_) {
849 LOG(ERROR) << "Expected frame_id=" << frame_id << " but found "
850 << instrumentation_frame.frame_id_;
851 StackVisitor::DescribeStack(self);
852 CHECK_EQ(frame_id, instrumentation_frame.frame_id_);
853 }
854}
855
856void Instrumentation::PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
Brian Carlstromea46f952013-07-30 01:26:50 -0700857 mirror::ArtMethod* method,
Jeff Hao9a916d32013-06-27 18:45:37 -0700858 uintptr_t lr, bool interpreter_entry) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800859 // We have a callee-save frame meaning this value is guaranteed to never be 0.
860 size_t frame_id = StackVisitor::ComputeNumFrames(self);
861 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
862 if (kVerboseInstrumentation) {
Brian Carlstrom2d888622013-07-18 17:02:00 -0700863 LOG(INFO) << "Entering " << PrettyMethod(method) << " from PC " << reinterpret_cast<void*>(lr);
Ian Rogers62d6c772013-02-27 08:32:07 -0800864 }
865 instrumentation::InstrumentationStackFrame instrumentation_frame(this_object, method, lr,
Jeff Hao9a916d32013-06-27 18:45:37 -0700866 frame_id, interpreter_entry);
Ian Rogers62d6c772013-02-27 08:32:07 -0800867 stack->push_front(instrumentation_frame);
868
Sebastien Hertz320deb22014-06-11 19:45:05 +0200869 if (!interpreter_entry) {
870 MethodEnterEvent(self, this_object, method, 0);
871 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800872}
873
Andreas Gamped58342c2014-06-05 14:18:08 -0700874TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc,
875 uint64_t gpr_result,
876 uint64_t fpr_result) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800877 // Do the pop.
878 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
879 CHECK_GT(stack->size(), 0U);
880 InstrumentationStackFrame instrumentation_frame = stack->front();
881 stack->pop_front();
882
883 // Set return PC and check the sanity of the stack.
884 *return_pc = instrumentation_frame.return_pc_;
885 CheckStackDepth(self, instrumentation_frame, 0);
886
Brian Carlstromea46f952013-07-30 01:26:50 -0700887 mirror::ArtMethod* method = instrumentation_frame.method_;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700888 uint32_t length;
889 char return_shorty = method->GetShorty(&length)[0];
Ian Rogers62d6c772013-02-27 08:32:07 -0800890 JValue return_value;
891 if (return_shorty == 'V') {
892 return_value.SetJ(0);
893 } else if (return_shorty == 'F' || return_shorty == 'D') {
894 return_value.SetJ(fpr_result);
895 } else {
896 return_value.SetJ(gpr_result);
897 }
898 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
899 // return_pc.
900 uint32_t dex_pc = DexFile::kDexNoIndex;
901 mirror::Object* this_object = instrumentation_frame.this_object_;
Sebastien Hertz320deb22014-06-11 19:45:05 +0200902 if (!instrumentation_frame.interpreter_entry_) {
903 MethodExitEvent(self, this_object, instrumentation_frame.method_, dex_pc, return_value);
904 }
jeffhao725a9572012-11-13 18:20:12 -0800905
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100906 // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get
907 // back to an upcall.
908 NthCallerVisitor visitor(self, 1, true);
909 visitor.WalkStack(true);
910 bool deoptimize = (visitor.caller != NULL) &&
911 (interpreter_stubs_installed_ || IsDeoptimized(visitor.caller));
912 if (deoptimize && kVerboseInstrumentation) {
913 LOG(INFO) << "Deoptimizing into " << PrettyMethod(visitor.caller);
Ian Rogers62d6c772013-02-27 08:32:07 -0800914 }
915 if (deoptimize) {
916 if (kVerboseInstrumentation) {
917 LOG(INFO) << "Deoptimizing from " << PrettyMethod(method)
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100918 << " result is " << std::hex << return_value.GetJ();
Ian Rogers62d6c772013-02-27 08:32:07 -0800919 }
920 self->SetDeoptimizationReturnValue(return_value);
Andreas Gamped58342c2014-06-05 14:18:08 -0700921 return GetTwoWordSuccessValue(*return_pc,
922 reinterpret_cast<uintptr_t>(GetQuickDeoptimizationEntryPoint()));
Ian Rogers62d6c772013-02-27 08:32:07 -0800923 } else {
924 if (kVerboseInstrumentation) {
Brian Carlstrom2d888622013-07-18 17:02:00 -0700925 LOG(INFO) << "Returning from " << PrettyMethod(method)
926 << " to PC " << reinterpret_cast<void*>(*return_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800927 }
Andreas Gamped58342c2014-06-05 14:18:08 -0700928 return GetTwoWordSuccessValue(0, *return_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800929 }
jeffhao725a9572012-11-13 18:20:12 -0800930}
931
Ian Rogers62d6c772013-02-27 08:32:07 -0800932void Instrumentation::PopMethodForUnwind(Thread* self, bool is_deoptimization) const {
933 // Do the pop.
934 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
935 CHECK_GT(stack->size(), 0U);
936 InstrumentationStackFrame instrumentation_frame = stack->front();
937 // TODO: bring back CheckStackDepth(self, instrumentation_frame, 2);
938 stack->pop_front();
939
Brian Carlstromea46f952013-07-30 01:26:50 -0700940 mirror::ArtMethod* method = instrumentation_frame.method_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800941 if (is_deoptimization) {
942 if (kVerboseInstrumentation) {
943 LOG(INFO) << "Popping for deoptimization " << PrettyMethod(method);
944 }
945 } else {
946 if (kVerboseInstrumentation) {
947 LOG(INFO) << "Popping for unwind " << PrettyMethod(method);
948 }
949
950 // Notify listeners of method unwind.
951 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
952 // return_pc.
953 uint32_t dex_pc = DexFile::kDexNoIndex;
954 MethodUnwindEvent(self, instrumentation_frame.this_object_, method, dex_pc);
955 }
956}
957
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700958void Instrumentation::VisitRoots(RootCallback* callback, void* arg) {
959 WriterMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
960 if (deoptimized_methods_.empty()) {
961 return;
962 }
963 std::set<mirror::ArtMethod*> new_deoptimized_methods;
964 for (mirror::ArtMethod* method : deoptimized_methods_) {
965 DCHECK(method != nullptr);
966 callback(reinterpret_cast<mirror::Object**>(&method), arg, 0, kRootVMInternal);
967 new_deoptimized_methods.insert(method);
968 }
969 deoptimized_methods_ = new_deoptimized_methods;
970}
971
Ian Rogers62d6c772013-02-27 08:32:07 -0800972std::string InstrumentationStackFrame::Dump() const {
973 std::ostringstream os;
974 os << "Frame " << frame_id_ << " " << PrettyMethod(method_) << ":"
975 << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_);
976 return os.str();
977}
978
979} // namespace instrumentation
jeffhao725a9572012-11-13 18:20:12 -0800980} // namespace art