blob: e3d32bb75e0d0046b7c042efeccee687ce20f790 [file] [log] [blame]
jeffhao725a9572012-11-13 18:20:12 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "instrumentation.h"
18
19#include <sys/uio.h>
20
Ian Rogerse63db272014-07-15 15:36:11 -070021#include "arch/context.h"
Ian Rogersef7d42f2014-01-06 12:55:46 -080022#include "atomic.h"
Elliott Hughes76160052012-12-12 16:31:20 -080023#include "base/unix_file/fd_file.h"
jeffhao725a9572012-11-13 18:20:12 -080024#include "class_linker.h"
25#include "debugger.h"
Ian Rogers62d6c772013-02-27 08:32:07 -080026#include "dex_file-inl.h"
Mathieu Chartierd8891782014-03-02 13:28:37 -080027#include "entrypoints/quick/quick_alloc_entrypoints.h"
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010028#include "interpreter/interpreter.h"
Brian Carlstromea46f952013-07-30 01:26:50 -070029#include "mirror/art_method-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080030#include "mirror/class-inl.h"
31#include "mirror/dex_cache.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080032#include "mirror/object_array-inl.h"
Ian Rogers4f6ad8a2013-03-18 15:27:28 -070033#include "mirror/object-inl.h"
Ian Rogers62d6c772013-02-27 08:32:07 -080034#include "nth_caller_visitor.h"
Ian Rogersc928de92013-02-27 14:30:44 -080035#if !defined(ART_USE_PORTABLE_COMPILER)
Ian Rogers166db042013-07-26 12:05:57 -070036#include "entrypoints/quick/quick_entrypoints.h"
jeffhao725a9572012-11-13 18:20:12 -080037#endif
38#include "object_utils.h"
39#include "os.h"
40#include "scoped_thread_state_change.h"
41#include "thread.h"
42#include "thread_list.h"
jeffhao725a9572012-11-13 18:20:12 -080043
44namespace art {
Ian Rogersfa824272013-11-05 16:12:57 -080045
Ian Rogers62d6c772013-02-27 08:32:07 -080046namespace instrumentation {
jeffhao725a9572012-11-13 18:20:12 -080047
Sebastien Hertz5bfd5c92013-11-15 11:36:07 +010048const bool kVerboseInstrumentation = false;
49
Ian Rogers816432e2013-09-06 15:47:45 -070050// Do we want to deoptimize for method entry and exit listeners or just try to intercept
51// invocations? Deoptimization forces all code to run in the interpreter and considerably hurts the
52// application's performance.
Ian Rogers7b6da362013-09-11 09:29:40 -070053static constexpr bool kDeoptimizeForAccurateMethodEntryExitListeners = false;
Ian Rogers816432e2013-09-06 15:47:45 -070054
Ian Rogers62d6c772013-02-27 08:32:07 -080055static bool InstallStubsClassVisitor(mirror::Class* klass, void* arg)
jeffhao725a9572012-11-13 18:20:12 -080056 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -080057 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
58 return instrumentation->InstallStubsForClass(klass);
59}
60
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -070061Instrumentation::Instrumentation()
62 : instrumentation_stubs_installed_(false), entry_exit_stubs_installed_(false),
63 interpreter_stubs_installed_(false),
64 interpret_only_(false), forced_interpret_only_(false),
65 have_method_entry_listeners_(false), have_method_exit_listeners_(false),
66 have_method_unwind_listeners_(false), have_dex_pc_listeners_(false),
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +020067 have_field_read_listeners_(false), have_field_write_listeners_(false),
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -070068 have_exception_caught_listeners_(false),
69 deoptimized_methods_lock_("deoptimized methods lock"),
70 deoptimization_enabled_(false),
71 interpreter_handler_table_(kMainHandlerTable),
72 quick_alloc_entry_points_instrumentation_counter_(0) {
73}
74
Ian Rogers62d6c772013-02-27 08:32:07 -080075bool Instrumentation::InstallStubsForClass(mirror::Class* klass) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010076 for (size_t i = 0, e = klass->NumDirectMethods(); i < e; i++) {
77 InstallStubsForMethod(klass->GetDirectMethod(i));
jeffhao725a9572012-11-13 18:20:12 -080078 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010079 for (size_t i = 0, e = klass->NumVirtualMethods(); i < e; i++) {
80 InstallStubsForMethod(klass->GetVirtualMethod(i));
jeffhao725a9572012-11-13 18:20:12 -080081 }
82 return true;
83}
84
Ian Rogersef7d42f2014-01-06 12:55:46 -080085static void UpdateEntrypoints(mirror::ArtMethod* method, const void* quick_code,
86 const void* portable_code, bool have_portable_code)
87 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
88 method->SetEntryPointFromPortableCompiledCode(portable_code);
89 method->SetEntryPointFromQuickCompiledCode(quick_code);
90 bool portable_enabled = method->IsPortableCompiled();
91 if (have_portable_code && !portable_enabled) {
92 method->SetIsPortableCompiled();
93 } else if (portable_enabled) {
94 method->ClearIsPortableCompiled();
95 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010096 if (!method->IsResolutionMethod()) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -070097 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -080098 if (quick_code == GetQuickToInterpreterBridge() ||
Mingyao Yang98d1cc82014-05-15 17:02:16 -070099 quick_code == class_linker->GetQuickToInterpreterBridgeTrampoline() ||
100 (quick_code == class_linker->GetQuickResolutionTrampoline() &&
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -0800101 Runtime::Current()->GetInstrumentation()->IsForcedInterpretOnly()
102 && !method->IsNative() && !method->IsProxyMethod())) {
103 if (kIsDebugBuild) {
104 if (quick_code == GetQuickToInterpreterBridge()) {
105 DCHECK(portable_code == GetPortableToInterpreterBridge());
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700106 } else if (quick_code == class_linker->GetQuickResolutionTrampoline()) {
107 DCHECK(portable_code == class_linker->GetPortableResolutionTrampoline());
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -0800108 }
109 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800110 DCHECK(!method->IsNative()) << PrettyMethod(method);
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -0800111 DCHECK(!method->IsProxyMethod()) << PrettyMethod(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100112 method->SetEntryPointFromInterpreter(art::interpreter::artInterpreterToInterpreterBridge);
113 } else {
114 method->SetEntryPointFromInterpreter(art::artInterpreterToCompiledCodeBridge);
115 }
116 }
117}
118
119void Instrumentation::InstallStubsForMethod(mirror::ArtMethod* method) {
120 if (method->IsAbstract() || method->IsProxyMethod()) {
121 // Do not change stubs for these methods.
122 return;
123 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800124 const void* new_portable_code;
125 const void* new_quick_code;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100126 bool uninstall = !entry_exit_stubs_installed_ && !interpreter_stubs_installed_;
127 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
128 bool is_class_initialized = method->GetDeclaringClass()->IsInitialized();
Ian Rogersef7d42f2014-01-06 12:55:46 -0800129 bool have_portable_code = false;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100130 if (uninstall) {
131 if ((forced_interpret_only_ || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800132 new_portable_code = GetPortableToInterpreterBridge();
133 new_quick_code = GetQuickToInterpreterBridge();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100134 } else if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800135 new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
136 new_quick_code = class_linker->GetQuickOatCodeFor(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100137 } else {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700138 new_portable_code = class_linker->GetPortableResolutionTrampoline();
139 new_quick_code = class_linker->GetQuickResolutionTrampoline();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100140 }
141 } else { // !uninstall
Sebastien Hertzbae182c2013-12-17 10:42:03 +0100142 if ((interpreter_stubs_installed_ || forced_interpret_only_ || IsDeoptimized(method)) &&
143 !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800144 new_portable_code = GetPortableToInterpreterBridge();
145 new_quick_code = GetQuickToInterpreterBridge();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100146 } else {
147 // Do not overwrite resolution trampoline. When the trampoline initializes the method's
148 // class, all its static methods code will be set to the instrumentation entry point.
149 // For more details, see ClassLinker::FixupStaticTrampolines.
150 if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
Sebastien Hertz320deb22014-06-11 19:45:05 +0200151 if (entry_exit_stubs_installed_) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800152 new_portable_code = GetPortableToInterpreterBridge();
153 new_quick_code = GetQuickInstrumentationEntryPoint();
Sebastien Hertz320deb22014-06-11 19:45:05 +0200154 } else {
155 new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
156 new_quick_code = class_linker->GetQuickOatCodeFor(method);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700157 DCHECK(new_quick_code != class_linker->GetQuickToInterpreterBridgeTrampoline());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100158 }
159 } else {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700160 new_portable_code = class_linker->GetPortableResolutionTrampoline();
161 new_quick_code = class_linker->GetQuickResolutionTrampoline();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100162 }
163 }
164 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800165 UpdateEntrypoints(method, new_quick_code, new_portable_code, have_portable_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100166}
167
Ian Rogers62d6c772013-02-27 08:32:07 -0800168// Places the instrumentation exit pc as the return PC for every quick frame. This also allows
169// deoptimization of quick frames to interpreter frames.
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100170// Since we may already have done this previously, we need to push new instrumentation frame before
171// existing instrumentation frames.
Ian Rogers62d6c772013-02-27 08:32:07 -0800172static void InstrumentationInstallStack(Thread* thread, void* arg)
Ian Rogers306057f2012-11-26 12:45:53 -0800173 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
174 struct InstallStackVisitor : public StackVisitor {
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100175 InstallStackVisitor(Thread* thread, Context* context, uintptr_t instrumentation_exit_pc)
Ian Rogers62d6c772013-02-27 08:32:07 -0800176 : StackVisitor(thread, context), instrumentation_stack_(thread->GetInstrumentationStack()),
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100177 instrumentation_exit_pc_(instrumentation_exit_pc),
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100178 reached_existing_instrumentation_frames_(false), instrumentation_stack_depth_(0),
179 last_return_pc_(0) {
180 }
jeffhao725a9572012-11-13 18:20:12 -0800181
Ian Rogers306057f2012-11-26 12:45:53 -0800182 virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Brian Carlstromea46f952013-07-30 01:26:50 -0700183 mirror::ArtMethod* m = GetMethod();
Ian Rogers306057f2012-11-26 12:45:53 -0800184 if (m == NULL) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800185 if (kVerboseInstrumentation) {
186 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
187 }
188 last_return_pc_ = 0;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700189 return true; // Ignore upcalls.
Ian Rogers306057f2012-11-26 12:45:53 -0800190 }
Jeff Haoa15a81b2014-05-27 18:25:47 -0700191 if (GetCurrentQuickFrame() == NULL) {
Sebastien Hertz320deb22014-06-11 19:45:05 +0200192 bool interpreter_frame = !m->IsPortableCompiled();
193 InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, 0, GetFrameId(),
194 interpreter_frame);
Jeff Haoa15a81b2014-05-27 18:25:47 -0700195 if (kVerboseInstrumentation) {
196 LOG(INFO) << "Pushing shadow frame " << instrumentation_frame.Dump();
197 }
198 shadow_stack_.push_back(instrumentation_frame);
199 return true; // Continue.
200 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800201 uintptr_t return_pc = GetReturnPc();
Sebastien Hertz320deb22014-06-11 19:45:05 +0200202 if (m->IsRuntimeMethod()) {
203 if (return_pc == instrumentation_exit_pc_) {
204 if (kVerboseInstrumentation) {
205 LOG(INFO) << " Handling quick to interpreter transition. Frame " << GetFrameId();
206 }
207 CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
208 const InstrumentationStackFrame& frame = instrumentation_stack_->at(instrumentation_stack_depth_);
209 CHECK(frame.interpreter_entry_);
210 // This is an interpreter frame so method enter event must have been reported. However we
211 // need to push a DEX pc into the dex_pcs_ list to match size of instrumentation stack.
212 // Since we won't report method entry here, we can safely push any DEX pc.
213 dex_pcs_.push_back(0);
214 last_return_pc_ = frame.return_pc_;
215 ++instrumentation_stack_depth_;
216 return true;
217 } else {
218 if (kVerboseInstrumentation) {
219 LOG(INFO) << " Skipping runtime method. Frame " << GetFrameId();
220 }
221 last_return_pc_ = GetReturnPc();
222 return true; // Ignore unresolved methods since they will be instrumented after resolution.
223 }
224 }
225 if (kVerboseInstrumentation) {
226 LOG(INFO) << " Installing exit stub in " << DescribeLocation();
227 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100228 if (return_pc == instrumentation_exit_pc_) {
229 // We've reached a frame which has already been installed with instrumentation exit stub.
230 // We should have already installed instrumentation on previous frames.
231 reached_existing_instrumentation_frames_ = true;
232
233 CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
234 const InstrumentationStackFrame& frame = instrumentation_stack_->at(instrumentation_stack_depth_);
235 CHECK_EQ(m, frame.method_) << "Expected " << PrettyMethod(m)
236 << ", Found " << PrettyMethod(frame.method_);
237 return_pc = frame.return_pc_;
238 if (kVerboseInstrumentation) {
239 LOG(INFO) << "Ignoring already instrumented " << frame.Dump();
240 }
241 } else {
242 CHECK_NE(return_pc, 0U);
243 CHECK(!reached_existing_instrumentation_frames_);
244 InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, return_pc, GetFrameId(),
245 false);
246 if (kVerboseInstrumentation) {
247 LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
248 }
249
Sebastien Hertz320deb22014-06-11 19:45:05 +0200250 // Insert frame at the right position so we do not corrupt the instrumentation stack.
251 // Instrumentation stack frames are in descending frame id order.
252 auto it = instrumentation_stack_->begin();
253 for (auto end = instrumentation_stack_->end(); it != end; ++it) {
254 const InstrumentationStackFrame& current = *it;
255 if (instrumentation_frame.frame_id_ >= current.frame_id_) {
256 break;
257 }
258 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100259 instrumentation_stack_->insert(it, instrumentation_frame);
260 SetReturnPc(instrumentation_exit_pc_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800261 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800262 dex_pcs_.push_back(m->ToDexPc(last_return_pc_));
Ian Rogers62d6c772013-02-27 08:32:07 -0800263 last_return_pc_ = return_pc;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100264 ++instrumentation_stack_depth_;
Ian Rogers306057f2012-11-26 12:45:53 -0800265 return true; // Continue.
266 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800267 std::deque<InstrumentationStackFrame>* const instrumentation_stack_;
Jeff Haoa15a81b2014-05-27 18:25:47 -0700268 std::vector<InstrumentationStackFrame> shadow_stack_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800269 std::vector<uint32_t> dex_pcs_;
Ian Rogers306057f2012-11-26 12:45:53 -0800270 const uintptr_t instrumentation_exit_pc_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100271 bool reached_existing_instrumentation_frames_;
272 size_t instrumentation_stack_depth_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800273 uintptr_t last_return_pc_;
Ian Rogers306057f2012-11-26 12:45:53 -0800274 };
Ian Rogers62d6c772013-02-27 08:32:07 -0800275 if (kVerboseInstrumentation) {
276 std::string thread_name;
277 thread->GetThreadName(thread_name);
278 LOG(INFO) << "Installing exit stubs in " << thread_name;
Ian Rogers306057f2012-11-26 12:45:53 -0800279 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100280
281 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
Ian Rogers700a4022014-05-19 16:49:03 -0700282 std::unique_ptr<Context> context(Context::Create());
Ian Rogers848871b2013-08-05 10:56:33 -0700283 uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100284 InstallStackVisitor visitor(thread, context.get(), instrumentation_exit_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800285 visitor.WalkStack(true);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100286 CHECK_EQ(visitor.dex_pcs_.size(), thread->GetInstrumentationStack()->size());
Ian Rogers62d6c772013-02-27 08:32:07 -0800287
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100288 if (instrumentation->ShouldNotifyMethodEnterExitEvents()) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100289 // Create method enter events for all methods currently on the thread's stack. We only do this
290 // if no debugger is attached to prevent from posting events twice.
Jeff Haoa15a81b2014-05-27 18:25:47 -0700291 auto ssi = visitor.shadow_stack_.rbegin();
292 for (auto isi = thread->GetInstrumentationStack()->rbegin(),
293 end = thread->GetInstrumentationStack()->rend(); isi != end; ++isi) {
294 while (ssi != visitor.shadow_stack_.rend() && (*ssi).frame_id_ < (*isi).frame_id_) {
295 instrumentation->MethodEnterEvent(thread, (*ssi).this_object_, (*ssi).method_, 0);
296 ++ssi;
297 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100298 uint32_t dex_pc = visitor.dex_pcs_.back();
299 visitor.dex_pcs_.pop_back();
Sebastien Hertz320deb22014-06-11 19:45:05 +0200300 if (!isi->interpreter_entry_) {
301 instrumentation->MethodEnterEvent(thread, (*isi).this_object_, (*isi).method_, dex_pc);
302 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100303 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800304 }
305 thread->VerifyStack();
Ian Rogers306057f2012-11-26 12:45:53 -0800306}
307
Ian Rogers62d6c772013-02-27 08:32:07 -0800308// Removes the instrumentation exit pc as the return PC for every quick frame.
309static void InstrumentationRestoreStack(Thread* thread, void* arg)
Ian Rogers306057f2012-11-26 12:45:53 -0800310 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
311 struct RestoreStackVisitor : public StackVisitor {
Ian Rogers62d6c772013-02-27 08:32:07 -0800312 RestoreStackVisitor(Thread* thread, uintptr_t instrumentation_exit_pc,
313 Instrumentation* instrumentation)
314 : StackVisitor(thread, NULL), thread_(thread),
315 instrumentation_exit_pc_(instrumentation_exit_pc),
316 instrumentation_(instrumentation),
317 instrumentation_stack_(thread->GetInstrumentationStack()),
318 frames_removed_(0) {}
Ian Rogers306057f2012-11-26 12:45:53 -0800319
320 virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800321 if (instrumentation_stack_->size() == 0) {
jeffhao725a9572012-11-13 18:20:12 -0800322 return false; // Stop.
323 }
Brian Carlstromea46f952013-07-30 01:26:50 -0700324 mirror::ArtMethod* m = GetMethod();
Ian Rogers62d6c772013-02-27 08:32:07 -0800325 if (GetCurrentQuickFrame() == NULL) {
326 if (kVerboseInstrumentation) {
327 LOG(INFO) << " Ignoring a shadow frame. Frame " << GetFrameId() << " Method=" << PrettyMethod(m);
328 }
329 return true; // Ignore shadow frames.
330 }
Ian Rogers306057f2012-11-26 12:45:53 -0800331 if (m == NULL) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800332 if (kVerboseInstrumentation) {
333 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
334 }
Ian Rogers306057f2012-11-26 12:45:53 -0800335 return true; // Ignore upcalls.
336 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800337 bool removed_stub = false;
338 // TODO: make this search more efficient?
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100339 const size_t frameId = GetFrameId();
340 for (const InstrumentationStackFrame& instrumentation_frame : *instrumentation_stack_) {
341 if (instrumentation_frame.frame_id_ == frameId) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800342 if (kVerboseInstrumentation) {
343 LOG(INFO) << " Removing exit stub in " << DescribeLocation();
344 }
Jeff Hao9a916d32013-06-27 18:45:37 -0700345 if (instrumentation_frame.interpreter_entry_) {
346 CHECK(m == Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs));
347 } else {
348 CHECK(m == instrumentation_frame.method_) << PrettyMethod(m);
349 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800350 SetReturnPc(instrumentation_frame.return_pc_);
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100351 if (instrumentation_->ShouldNotifyMethodEnterExitEvents()) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100352 // Create the method exit events. As the methods didn't really exit the result is 0.
353 // We only do this if no debugger is attached to prevent from posting events twice.
354 instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m,
355 GetDexPc(), JValue());
356 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800357 frames_removed_++;
358 removed_stub = true;
359 break;
360 }
361 }
362 if (!removed_stub) {
363 if (kVerboseInstrumentation) {
364 LOG(INFO) << " No exit stub in " << DescribeLocation();
Ian Rogers306057f2012-11-26 12:45:53 -0800365 }
jeffhao725a9572012-11-13 18:20:12 -0800366 }
367 return true; // Continue.
368 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800369 Thread* const thread_;
Ian Rogers306057f2012-11-26 12:45:53 -0800370 const uintptr_t instrumentation_exit_pc_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800371 Instrumentation* const instrumentation_;
372 std::deque<instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
373 size_t frames_removed_;
jeffhao725a9572012-11-13 18:20:12 -0800374 };
Ian Rogers62d6c772013-02-27 08:32:07 -0800375 if (kVerboseInstrumentation) {
376 std::string thread_name;
377 thread->GetThreadName(thread_name);
378 LOG(INFO) << "Removing exit stubs in " << thread_name;
379 }
380 std::deque<instrumentation::InstrumentationStackFrame>* stack = thread->GetInstrumentationStack();
381 if (stack->size() > 0) {
382 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
Ian Rogers848871b2013-08-05 10:56:33 -0700383 uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
Ian Rogers62d6c772013-02-27 08:32:07 -0800384 RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
385 visitor.WalkStack(true);
386 CHECK_EQ(visitor.frames_removed_, stack->size());
387 while (stack->size() > 0) {
388 stack->pop_front();
389 }
jeffhao725a9572012-11-13 18:20:12 -0800390 }
391}
392
Ian Rogers62d6c772013-02-27 08:32:07 -0800393void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
394 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
Ian Rogers62d6c772013-02-27 08:32:07 -0800395 if ((events & kMethodEntered) != 0) {
396 method_entry_listeners_.push_back(listener);
Ian Rogers62d6c772013-02-27 08:32:07 -0800397 have_method_entry_listeners_ = true;
398 }
399 if ((events & kMethodExited) != 0) {
400 method_exit_listeners_.push_back(listener);
Ian Rogers62d6c772013-02-27 08:32:07 -0800401 have_method_exit_listeners_ = true;
402 }
403 if ((events & kMethodUnwind) != 0) {
404 method_unwind_listeners_.push_back(listener);
405 have_method_unwind_listeners_ = true;
406 }
407 if ((events & kDexPcMoved) != 0) {
408 dex_pc_listeners_.push_back(listener);
Ian Rogers62d6c772013-02-27 08:32:07 -0800409 have_dex_pc_listeners_ = true;
410 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200411 if ((events & kFieldRead) != 0) {
412 field_read_listeners_.push_back(listener);
413 have_field_read_listeners_ = true;
414 }
415 if ((events & kFieldWritten) != 0) {
416 field_write_listeners_.push_back(listener);
417 have_field_write_listeners_ = true;
418 }
Jeff Hao14dd5a82013-04-11 10:23:36 -0700419 if ((events & kExceptionCaught) != 0) {
420 exception_caught_listeners_.push_back(listener);
421 have_exception_caught_listeners_ = true;
422 }
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200423 UpdateInterpreterHandlerTable();
jeffhao725a9572012-11-13 18:20:12 -0800424}
425
Ian Rogers62d6c772013-02-27 08:32:07 -0800426void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
427 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
Ian Rogers62d6c772013-02-27 08:32:07 -0800428
429 if ((events & kMethodEntered) != 0) {
430 bool contains = std::find(method_entry_listeners_.begin(), method_entry_listeners_.end(),
431 listener) != method_entry_listeners_.end();
432 if (contains) {
433 method_entry_listeners_.remove(listener);
434 }
435 have_method_entry_listeners_ = method_entry_listeners_.size() > 0;
Ian Rogers62d6c772013-02-27 08:32:07 -0800436 }
437 if ((events & kMethodExited) != 0) {
438 bool contains = std::find(method_exit_listeners_.begin(), method_exit_listeners_.end(),
439 listener) != method_exit_listeners_.end();
440 if (contains) {
441 method_exit_listeners_.remove(listener);
442 }
443 have_method_exit_listeners_ = method_exit_listeners_.size() > 0;
Ian Rogers62d6c772013-02-27 08:32:07 -0800444 }
445 if ((events & kMethodUnwind) != 0) {
446 method_unwind_listeners_.remove(listener);
447 }
448 if ((events & kDexPcMoved) != 0) {
449 bool contains = std::find(dex_pc_listeners_.begin(), dex_pc_listeners_.end(),
450 listener) != dex_pc_listeners_.end();
451 if (contains) {
452 dex_pc_listeners_.remove(listener);
453 }
454 have_dex_pc_listeners_ = dex_pc_listeners_.size() > 0;
Ian Rogers62d6c772013-02-27 08:32:07 -0800455 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200456 if ((events & kFieldRead) != 0) {
457 bool contains = std::find(field_read_listeners_.begin(), field_read_listeners_.end(),
458 listener) != field_read_listeners_.end();
459 if (contains) {
460 field_read_listeners_.remove(listener);
461 }
462 have_field_read_listeners_ = field_read_listeners_.size() > 0;
463 }
464 if ((events & kFieldWritten) != 0) {
465 bool contains = std::find(field_write_listeners_.begin(), field_write_listeners_.end(),
466 listener) != field_write_listeners_.end();
467 if (contains) {
468 field_write_listeners_.remove(listener);
469 }
470 have_field_write_listeners_ = field_write_listeners_.size() > 0;
471 }
Jeff Hao14dd5a82013-04-11 10:23:36 -0700472 if ((events & kExceptionCaught) != 0) {
473 exception_caught_listeners_.remove(listener);
474 have_exception_caught_listeners_ = exception_caught_listeners_.size() > 0;
475 }
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200476 UpdateInterpreterHandlerTable();
jeffhao725a9572012-11-13 18:20:12 -0800477}
478
Ian Rogers62d6c772013-02-27 08:32:07 -0800479void Instrumentation::ConfigureStubs(bool require_entry_exit_stubs, bool require_interpreter) {
480 interpret_only_ = require_interpreter || forced_interpret_only_;
481 // Compute what level of instrumentation is required and compare to current.
482 int desired_level, current_level;
483 if (require_interpreter) {
484 desired_level = 2;
485 } else if (require_entry_exit_stubs) {
486 desired_level = 1;
487 } else {
488 desired_level = 0;
489 }
490 if (interpreter_stubs_installed_) {
491 current_level = 2;
492 } else if (entry_exit_stubs_installed_) {
493 current_level = 1;
494 } else {
495 current_level = 0;
496 }
497 if (desired_level == current_level) {
498 // We're already set.
499 return;
500 }
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100501 Thread* const self = Thread::Current();
Ian Rogers62d6c772013-02-27 08:32:07 -0800502 Runtime* runtime = Runtime::Current();
503 Locks::thread_list_lock_->AssertNotHeld(self);
504 if (desired_level > 0) {
505 if (require_interpreter) {
506 interpreter_stubs_installed_ = true;
507 } else {
508 CHECK(require_entry_exit_stubs);
509 entry_exit_stubs_installed_ = true;
510 }
511 runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
512 instrumentation_stubs_installed_ = true;
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100513 MutexLock mu(self, *Locks::thread_list_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800514 runtime->GetThreadList()->ForEach(InstrumentationInstallStack, this);
515 } else {
516 interpreter_stubs_installed_ = false;
517 entry_exit_stubs_installed_ = false;
518 runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100519 // Restore stack only if there is no method currently deoptimized.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700520 bool empty;
521 {
522 ReaderMutexLock mu(self, deoptimized_methods_lock_);
523 empty = deoptimized_methods_.empty(); // Avoid lock violation.
524 }
525 if (empty) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100526 instrumentation_stubs_installed_ = false;
527 MutexLock mu(self, *Locks::thread_list_lock_);
528 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
529 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800530 }
jeffhao725a9572012-11-13 18:20:12 -0800531}
532
Ian Rogersfa824272013-11-05 16:12:57 -0800533static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg) {
534 thread->ResetQuickAllocEntryPointsForThread();
535}
536
Mathieu Chartier661974a2014-01-09 11:23:53 -0800537void Instrumentation::SetEntrypointsInstrumented(bool instrumented) {
538 Runtime* runtime = Runtime::Current();
539 ThreadList* tl = runtime->GetThreadList();
540 if (runtime->IsStarted()) {
541 tl->SuspendAll();
542 }
543 {
544 MutexLock mu(Thread::Current(), *Locks::runtime_shutdown_lock_);
545 SetQuickAllocEntryPointsInstrumented(instrumented);
546 ResetQuickAllocEntryPoints();
547 }
548 if (runtime->IsStarted()) {
549 tl->ResumeAll();
550 }
551}
552
Ian Rogersfa824272013-11-05 16:12:57 -0800553void Instrumentation::InstrumentQuickAllocEntryPoints() {
554 // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
555 // should be guarded by a lock.
Ian Rogers3e5cf302014-05-20 16:40:37 -0700556 DCHECK_GE(quick_alloc_entry_points_instrumentation_counter_.LoadSequentiallyConsistent(), 0);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800557 const bool enable_instrumentation =
Ian Rogers3e5cf302014-05-20 16:40:37 -0700558 quick_alloc_entry_points_instrumentation_counter_.FetchAndAddSequentiallyConsistent(1) == 0;
Ian Rogersfa824272013-11-05 16:12:57 -0800559 if (enable_instrumentation) {
Mathieu Chartier661974a2014-01-09 11:23:53 -0800560 SetEntrypointsInstrumented(true);
Ian Rogersfa824272013-11-05 16:12:57 -0800561 }
562}
563
564void Instrumentation::UninstrumentQuickAllocEntryPoints() {
565 // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
566 // should be guarded by a lock.
Ian Rogers3e5cf302014-05-20 16:40:37 -0700567 DCHECK_GT(quick_alloc_entry_points_instrumentation_counter_.LoadSequentiallyConsistent(), 0);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800568 const bool disable_instrumentation =
Ian Rogers3e5cf302014-05-20 16:40:37 -0700569 quick_alloc_entry_points_instrumentation_counter_.FetchAndSubSequentiallyConsistent(1) == 1;
Ian Rogersfa824272013-11-05 16:12:57 -0800570 if (disable_instrumentation) {
Mathieu Chartier661974a2014-01-09 11:23:53 -0800571 SetEntrypointsInstrumented(false);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800572 }
573}
574
575void Instrumentation::ResetQuickAllocEntryPoints() {
576 Runtime* runtime = Runtime::Current();
577 if (runtime->IsStarted()) {
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800578 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
579 runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, NULL);
Ian Rogersfa824272013-11-05 16:12:57 -0800580 }
581}
582
Ian Rogersef7d42f2014-01-06 12:55:46 -0800583void Instrumentation::UpdateMethodsCode(mirror::ArtMethod* method, const void* quick_code,
584 const void* portable_code, bool have_portable_code) const {
585 const void* new_portable_code;
586 const void* new_quick_code;
587 bool new_have_portable_code;
Ian Rogers62d6c772013-02-27 08:32:07 -0800588 if (LIKELY(!instrumentation_stubs_installed_)) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800589 new_portable_code = portable_code;
590 new_quick_code = quick_code;
591 new_have_portable_code = have_portable_code;
Jeff Hao65d15d92013-07-16 16:39:33 -0700592 } else {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100593 if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800594 new_portable_code = GetPortableToInterpreterBridge();
595 new_quick_code = GetQuickToInterpreterBridge();
596 new_have_portable_code = false;
Jeff Hao65d15d92013-07-16 16:39:33 -0700597 } else {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700598 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
599 if (quick_code == class_linker->GetQuickResolutionTrampoline() ||
600 quick_code == class_linker->GetQuickToInterpreterBridgeTrampoline() ||
601 quick_code == GetQuickToInterpreterBridge()) {
602 DCHECK((portable_code == class_linker->GetPortableResolutionTrampoline()) ||
603 (portable_code == GetPortableToInterpreterBridge()));
604 new_portable_code = portable_code;
605 new_quick_code = quick_code;
606 new_have_portable_code = have_portable_code;
607 } else if (entry_exit_stubs_installed_) {
608 new_quick_code = GetQuickInstrumentationEntryPoint();
609 new_portable_code = GetPortableToInterpreterBridge();
610 new_have_portable_code = false;
611 } else {
612 new_portable_code = portable_code;
613 new_quick_code = quick_code;
614 new_have_portable_code = have_portable_code;
615 }
Jeff Hao65d15d92013-07-16 16:39:33 -0700616 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800617 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800618 UpdateEntrypoints(method, new_quick_code, new_portable_code, new_have_portable_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100619}
620
621void Instrumentation::Deoptimize(mirror::ArtMethod* method) {
622 CHECK(!method->IsNative());
623 CHECK(!method->IsProxyMethod());
624 CHECK(!method->IsAbstract());
625
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700626 Thread* self = Thread::Current();
627 std::pair<std::set<mirror::ArtMethod*>::iterator, bool> pair;
628 {
629 WriterMutexLock mu(self, deoptimized_methods_lock_);
630 pair = deoptimized_methods_.insert(method);
631 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100632 bool already_deoptimized = !pair.second;
633 CHECK(!already_deoptimized) << "Method " << PrettyMethod(method) << " is already deoptimized";
634
635 if (!interpreter_stubs_installed_) {
Sebastien Hertz320deb22014-06-11 19:45:05 +0200636 UpdateEntrypoints(method, GetQuickInstrumentationEntryPoint(), GetPortableToInterpreterBridge(),
Ian Rogersef7d42f2014-01-06 12:55:46 -0800637 false);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100638
639 // Install instrumentation exit stub and instrumentation frames. We may already have installed
640 // these previously so it will only cover the newly created frames.
641 instrumentation_stubs_installed_ = true;
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700642 MutexLock mu(self, *Locks::thread_list_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100643 Runtime::Current()->GetThreadList()->ForEach(InstrumentationInstallStack, this);
644 }
645}
646
647void Instrumentation::Undeoptimize(mirror::ArtMethod* method) {
648 CHECK(!method->IsNative());
649 CHECK(!method->IsProxyMethod());
650 CHECK(!method->IsAbstract());
651
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700652 Thread* self = Thread::Current();
653 bool empty;
654 {
655 WriterMutexLock mu(self, deoptimized_methods_lock_);
656 auto it = deoptimized_methods_.find(method);
657 CHECK(it != deoptimized_methods_.end()) << "Method " << PrettyMethod(method)
658 << " is not deoptimized";
659 deoptimized_methods_.erase(it);
660 empty = deoptimized_methods_.empty();
661 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100662
663 // Restore code and possibly stack only if we did not deoptimize everything.
664 if (!interpreter_stubs_installed_) {
665 // Restore its code or resolution trampoline.
666 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Ian Rogersef7d42f2014-01-06 12:55:46 -0800667 if (method->IsStatic() && !method->IsConstructor() &&
668 !method->GetDeclaringClass()->IsInitialized()) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700669 // TODO: we're updating to entrypoints in the image here, we can avoid the trampoline.
670 UpdateEntrypoints(method, class_linker->GetQuickResolutionTrampoline(),
671 class_linker->GetPortableResolutionTrampoline(), false);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100672 } else {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800673 bool have_portable_code = false;
674 const void* quick_code = class_linker->GetQuickOatCodeFor(method);
675 const void* portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
676 UpdateEntrypoints(method, quick_code, portable_code, have_portable_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100677 }
678
679 // If there is no deoptimized method left, we can restore the stack of each thread.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700680 if (empty) {
681 MutexLock mu(self, *Locks::thread_list_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100682 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
683 instrumentation_stubs_installed_ = false;
684 }
685 }
686}
687
688bool Instrumentation::IsDeoptimized(mirror::ArtMethod* method) const {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700689 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100690 DCHECK(method != nullptr);
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700691 return deoptimized_methods_.find(method) != deoptimized_methods_.end();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100692}
693
694void Instrumentation::EnableDeoptimization() {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700695 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100696 CHECK(deoptimized_methods_.empty());
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100697 CHECK_EQ(deoptimization_enabled_, false);
698 deoptimization_enabled_ = true;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100699}
700
701void Instrumentation::DisableDeoptimization() {
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100702 CHECK_EQ(deoptimization_enabled_, true);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100703 // If we deoptimized everything, undo it.
704 if (interpreter_stubs_installed_) {
705 UndeoptimizeEverything();
706 }
707 // Undeoptimized selected methods.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700708 while (true) {
709 mirror::ArtMethod* method;
710 {
711 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
712 if (deoptimized_methods_.empty()) {
713 break;
714 }
715 method = *deoptimized_methods_.begin();
716 }
717 Undeoptimize(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100718 }
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100719 deoptimization_enabled_ = false;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100720}
721
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100722// Indicates if instrumentation should notify method enter/exit events to the listeners.
723bool Instrumentation::ShouldNotifyMethodEnterExitEvents() const {
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100724 return !deoptimization_enabled_ && !interpreter_stubs_installed_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100725}
726
727void Instrumentation::DeoptimizeEverything() {
728 CHECK(!interpreter_stubs_installed_);
729 ConfigureStubs(false, true);
730}
731
732void Instrumentation::UndeoptimizeEverything() {
733 CHECK(interpreter_stubs_installed_);
734 ConfigureStubs(false, false);
735}
736
737void Instrumentation::EnableMethodTracing() {
738 bool require_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners;
739 ConfigureStubs(!require_interpreter, require_interpreter);
740}
741
742void Instrumentation::DisableMethodTracing() {
743 ConfigureStubs(false, false);
jeffhao725a9572012-11-13 18:20:12 -0800744}
745
Ian Rogersef7d42f2014-01-06 12:55:46 -0800746const void* Instrumentation::GetQuickCodeFor(mirror::ArtMethod* method) const {
Ian Rogers62d6c772013-02-27 08:32:07 -0800747 Runtime* runtime = Runtime::Current();
748 if (LIKELY(!instrumentation_stubs_installed_)) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800749 const void* code = method->GetEntryPointFromQuickCompiledCode();
Vladimir Marko8a630572014-04-09 18:45:35 +0100750 DCHECK(code != nullptr);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700751 ClassLinker* class_linker = runtime->GetClassLinker();
752 if (LIKELY(code != class_linker->GetQuickResolutionTrampoline()) &&
753 LIKELY(code != class_linker->GetQuickToInterpreterBridgeTrampoline()) &&
Vladimir Marko8a630572014-04-09 18:45:35 +0100754 LIKELY(code != GetQuickToInterpreterBridge())) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800755 return code;
756 }
757 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800758 return runtime->GetClassLinker()->GetQuickOatCodeFor(method);
jeffhao725a9572012-11-13 18:20:12 -0800759}
760
Ian Rogers62d6c772013-02-27 08:32:07 -0800761void Instrumentation::MethodEnterEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800762 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800763 uint32_t dex_pc) const {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700764 auto it = method_entry_listeners_.begin();
Jeff Hao65d15d92013-07-16 16:39:33 -0700765 bool is_end = (it == method_entry_listeners_.end());
766 // Implemented this way to prevent problems caused by modification of the list while iterating.
767 while (!is_end) {
768 InstrumentationListener* cur = *it;
769 ++it;
770 is_end = (it == method_entry_listeners_.end());
771 cur->MethodEntered(thread, this_object, method, dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800772 }
773}
774
775void Instrumentation::MethodExitEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800776 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800777 uint32_t dex_pc, const JValue& return_value) const {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700778 auto it = method_exit_listeners_.begin();
Jeff Hao65d15d92013-07-16 16:39:33 -0700779 bool is_end = (it == method_exit_listeners_.end());
780 // Implemented this way to prevent problems caused by modification of the list while iterating.
781 while (!is_end) {
782 InstrumentationListener* cur = *it;
783 ++it;
784 is_end = (it == method_exit_listeners_.end());
785 cur->MethodExited(thread, this_object, method, dex_pc, return_value);
Ian Rogers62d6c772013-02-27 08:32:07 -0800786 }
787}
788
789void Instrumentation::MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800790 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800791 uint32_t dex_pc) const {
792 if (have_method_unwind_listeners_) {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700793 for (InstrumentationListener* listener : method_unwind_listeners_) {
Sebastien Hertz51db44a2013-11-19 10:00:29 +0100794 listener->MethodUnwind(thread, this_object, method, dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800795 }
796 }
797}
798
799void Instrumentation::DexPcMovedEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800800 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800801 uint32_t dex_pc) const {
802 // TODO: STL copy-on-write collection? The copy below is due to the debug listener having an
803 // action where it can remove itself as a listener and break the iterator. The copy only works
804 // around the problem and in general we may have to move to something like reference counting to
805 // ensure listeners are deleted correctly.
806 std::list<InstrumentationListener*> copy(dex_pc_listeners_);
Mathieu Chartier02e25112013-08-14 16:14:24 -0700807 for (InstrumentationListener* listener : copy) {
808 listener->DexPcMoved(thread, this_object, method, dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800809 }
810}
811
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200812void Instrumentation::FieldReadEventImpl(Thread* thread, mirror::Object* this_object,
813 mirror::ArtMethod* method, uint32_t dex_pc,
814 mirror::ArtField* field) const {
Sebastien Hertz9f102032014-05-23 08:59:42 +0200815 // TODO: same comment than DexPcMovedEventImpl.
816 std::list<InstrumentationListener*> copy(field_read_listeners_);
817 for (InstrumentationListener* listener : copy) {
818 listener->FieldRead(thread, this_object, method, dex_pc, field);
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200819 }
820}
821
822void Instrumentation::FieldWriteEventImpl(Thread* thread, mirror::Object* this_object,
823 mirror::ArtMethod* method, uint32_t dex_pc,
824 mirror::ArtField* field, const JValue& field_value) const {
Sebastien Hertz9f102032014-05-23 08:59:42 +0200825 // TODO: same comment than DexPcMovedEventImpl.
826 std::list<InstrumentationListener*> copy(field_write_listeners_);
827 for (InstrumentationListener* listener : copy) {
828 listener->FieldWritten(thread, this_object, method, dex_pc, field, field_value);
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200829 }
830}
831
Ian Rogers62d6c772013-02-27 08:32:07 -0800832void Instrumentation::ExceptionCaughtEvent(Thread* thread, const ThrowLocation& throw_location,
Brian Carlstromea46f952013-07-30 01:26:50 -0700833 mirror::ArtMethod* catch_method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800834 uint32_t catch_dex_pc,
Sebastien Hertz947ff082013-09-17 14:10:13 +0200835 mirror::Throwable* exception_object) const {
Sebastien Hertz9f102032014-05-23 08:59:42 +0200836 if (HasExceptionCaughtListeners()) {
837 DCHECK_EQ(thread->GetException(nullptr), exception_object);
838 bool is_exception_reported = thread->IsExceptionReportedToInstrumentation();
Jeff Haoc0bd4da2013-04-11 15:52:28 -0700839 thread->ClearException();
Sebastien Hertzbf079fe2014-04-01 15:31:05 +0200840 // TODO: The copy below is due to the debug listener having an action where it can remove
841 // itself as a listener and break the iterator. The copy only works around the problem.
842 std::list<InstrumentationListener*> copy(exception_caught_listeners_);
843 for (InstrumentationListener* listener : copy) {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700844 listener->ExceptionCaught(thread, throw_location, catch_method, catch_dex_pc, exception_object);
Ian Rogers62d6c772013-02-27 08:32:07 -0800845 }
Jeff Haoc0bd4da2013-04-11 15:52:28 -0700846 thread->SetException(throw_location, exception_object);
Sebastien Hertz9f102032014-05-23 08:59:42 +0200847 thread->SetExceptionReportedToInstrumentation(is_exception_reported);
Ian Rogers62d6c772013-02-27 08:32:07 -0800848 }
849}
850
851static void CheckStackDepth(Thread* self, const InstrumentationStackFrame& instrumentation_frame,
852 int delta)
853 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
854 size_t frame_id = StackVisitor::ComputeNumFrames(self) + delta;
855 if (frame_id != instrumentation_frame.frame_id_) {
856 LOG(ERROR) << "Expected frame_id=" << frame_id << " but found "
857 << instrumentation_frame.frame_id_;
858 StackVisitor::DescribeStack(self);
859 CHECK_EQ(frame_id, instrumentation_frame.frame_id_);
860 }
861}
862
863void Instrumentation::PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
Brian Carlstromea46f952013-07-30 01:26:50 -0700864 mirror::ArtMethod* method,
Jeff Hao9a916d32013-06-27 18:45:37 -0700865 uintptr_t lr, bool interpreter_entry) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800866 // We have a callee-save frame meaning this value is guaranteed to never be 0.
867 size_t frame_id = StackVisitor::ComputeNumFrames(self);
868 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
869 if (kVerboseInstrumentation) {
Brian Carlstrom2d888622013-07-18 17:02:00 -0700870 LOG(INFO) << "Entering " << PrettyMethod(method) << " from PC " << reinterpret_cast<void*>(lr);
Ian Rogers62d6c772013-02-27 08:32:07 -0800871 }
872 instrumentation::InstrumentationStackFrame instrumentation_frame(this_object, method, lr,
Jeff Hao9a916d32013-06-27 18:45:37 -0700873 frame_id, interpreter_entry);
Ian Rogers62d6c772013-02-27 08:32:07 -0800874 stack->push_front(instrumentation_frame);
875
Sebastien Hertz320deb22014-06-11 19:45:05 +0200876 if (!interpreter_entry) {
877 MethodEnterEvent(self, this_object, method, 0);
878 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800879}
880
Andreas Gamped58342c2014-06-05 14:18:08 -0700881TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc,
882 uint64_t gpr_result,
883 uint64_t fpr_result) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800884 // Do the pop.
885 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
886 CHECK_GT(stack->size(), 0U);
887 InstrumentationStackFrame instrumentation_frame = stack->front();
888 stack->pop_front();
889
890 // Set return PC and check the sanity of the stack.
891 *return_pc = instrumentation_frame.return_pc_;
892 CheckStackDepth(self, instrumentation_frame, 0);
893
Brian Carlstromea46f952013-07-30 01:26:50 -0700894 mirror::ArtMethod* method = instrumentation_frame.method_;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700895 uint32_t length;
896 char return_shorty = method->GetShorty(&length)[0];
Ian Rogers62d6c772013-02-27 08:32:07 -0800897 JValue return_value;
898 if (return_shorty == 'V') {
899 return_value.SetJ(0);
900 } else if (return_shorty == 'F' || return_shorty == 'D') {
901 return_value.SetJ(fpr_result);
902 } else {
903 return_value.SetJ(gpr_result);
904 }
905 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
906 // return_pc.
907 uint32_t dex_pc = DexFile::kDexNoIndex;
908 mirror::Object* this_object = instrumentation_frame.this_object_;
Sebastien Hertz320deb22014-06-11 19:45:05 +0200909 if (!instrumentation_frame.interpreter_entry_) {
910 MethodExitEvent(self, this_object, instrumentation_frame.method_, dex_pc, return_value);
911 }
jeffhao725a9572012-11-13 18:20:12 -0800912
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100913 // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get
914 // back to an upcall.
915 NthCallerVisitor visitor(self, 1, true);
916 visitor.WalkStack(true);
917 bool deoptimize = (visitor.caller != NULL) &&
918 (interpreter_stubs_installed_ || IsDeoptimized(visitor.caller));
919 if (deoptimize && kVerboseInstrumentation) {
920 LOG(INFO) << "Deoptimizing into " << PrettyMethod(visitor.caller);
Ian Rogers62d6c772013-02-27 08:32:07 -0800921 }
922 if (deoptimize) {
923 if (kVerboseInstrumentation) {
924 LOG(INFO) << "Deoptimizing from " << PrettyMethod(method)
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100925 << " result is " << std::hex << return_value.GetJ();
Ian Rogers62d6c772013-02-27 08:32:07 -0800926 }
927 self->SetDeoptimizationReturnValue(return_value);
Andreas Gamped58342c2014-06-05 14:18:08 -0700928 return GetTwoWordSuccessValue(*return_pc,
929 reinterpret_cast<uintptr_t>(GetQuickDeoptimizationEntryPoint()));
Ian Rogers62d6c772013-02-27 08:32:07 -0800930 } else {
931 if (kVerboseInstrumentation) {
Brian Carlstrom2d888622013-07-18 17:02:00 -0700932 LOG(INFO) << "Returning from " << PrettyMethod(method)
933 << " to PC " << reinterpret_cast<void*>(*return_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800934 }
Andreas Gamped58342c2014-06-05 14:18:08 -0700935 return GetTwoWordSuccessValue(0, *return_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800936 }
jeffhao725a9572012-11-13 18:20:12 -0800937}
938
Ian Rogers62d6c772013-02-27 08:32:07 -0800939void Instrumentation::PopMethodForUnwind(Thread* self, bool is_deoptimization) const {
940 // Do the pop.
941 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
942 CHECK_GT(stack->size(), 0U);
943 InstrumentationStackFrame instrumentation_frame = stack->front();
944 // TODO: bring back CheckStackDepth(self, instrumentation_frame, 2);
945 stack->pop_front();
946
Brian Carlstromea46f952013-07-30 01:26:50 -0700947 mirror::ArtMethod* method = instrumentation_frame.method_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800948 if (is_deoptimization) {
949 if (kVerboseInstrumentation) {
950 LOG(INFO) << "Popping for deoptimization " << PrettyMethod(method);
951 }
952 } else {
953 if (kVerboseInstrumentation) {
954 LOG(INFO) << "Popping for unwind " << PrettyMethod(method);
955 }
956
957 // Notify listeners of method unwind.
958 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
959 // return_pc.
960 uint32_t dex_pc = DexFile::kDexNoIndex;
961 MethodUnwindEvent(self, instrumentation_frame.this_object_, method, dex_pc);
962 }
963}
964
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700965void Instrumentation::VisitRoots(RootCallback* callback, void* arg) {
966 WriterMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
967 if (deoptimized_methods_.empty()) {
968 return;
969 }
970 std::set<mirror::ArtMethod*> new_deoptimized_methods;
971 for (mirror::ArtMethod* method : deoptimized_methods_) {
972 DCHECK(method != nullptr);
973 callback(reinterpret_cast<mirror::Object**>(&method), arg, 0, kRootVMInternal);
974 new_deoptimized_methods.insert(method);
975 }
976 deoptimized_methods_ = new_deoptimized_methods;
977}
978
Ian Rogers62d6c772013-02-27 08:32:07 -0800979std::string InstrumentationStackFrame::Dump() const {
980 std::ostringstream os;
981 os << "Frame " << frame_id_ << " " << PrettyMethod(method_) << ":"
982 << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_);
983 return os.str();
984}
985
986} // namespace instrumentation
jeffhao725a9572012-11-13 18:20:12 -0800987} // namespace art