blob: 90115c3887ad9f2c6a17476f2bdb84811372d3f3 [file] [log] [blame]
jeffhao725a9572012-11-13 18:20:12 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "instrumentation.h"
18
19#include <sys/uio.h>
20
Ian Rogersc7dd2952014-10-21 23:31:19 -070021#include <sstream>
22
Ian Rogerse63db272014-07-15 15:36:11 -070023#include "arch/context.h"
Ian Rogersef7d42f2014-01-06 12:55:46 -080024#include "atomic.h"
Elliott Hughes76160052012-12-12 16:31:20 -080025#include "base/unix_file/fd_file.h"
jeffhao725a9572012-11-13 18:20:12 -080026#include "class_linker.h"
27#include "debugger.h"
Ian Rogers62d6c772013-02-27 08:32:07 -080028#include "dex_file-inl.h"
Ian Rogersc7dd2952014-10-21 23:31:19 -070029#include "entrypoints/quick/quick_entrypoints.h"
Mathieu Chartierd8891782014-03-02 13:28:37 -080030#include "entrypoints/quick/quick_alloc_entrypoints.h"
Ian Rogers6f3dbba2014-10-14 17:41:57 -070031#include "entrypoints/runtime_asm_entrypoints.h"
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070032#include "gc_root-inl.h"
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010033#include "interpreter/interpreter.h"
Brian Carlstromea46f952013-07-30 01:26:50 -070034#include "mirror/art_method-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080035#include "mirror/class-inl.h"
36#include "mirror/dex_cache.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080037#include "mirror/object_array-inl.h"
Ian Rogers4f6ad8a2013-03-18 15:27:28 -070038#include "mirror/object-inl.h"
Ian Rogers62d6c772013-02-27 08:32:07 -080039#include "nth_caller_visitor.h"
jeffhao725a9572012-11-13 18:20:12 -080040#include "os.h"
41#include "scoped_thread_state_change.h"
42#include "thread.h"
43#include "thread_list.h"
jeffhao725a9572012-11-13 18:20:12 -080044
45namespace art {
Ian Rogersfa824272013-11-05 16:12:57 -080046
Ian Rogers62d6c772013-02-27 08:32:07 -080047namespace instrumentation {
jeffhao725a9572012-11-13 18:20:12 -080048
Sebastien Hertz5bfd5c92013-11-15 11:36:07 +010049const bool kVerboseInstrumentation = false;
50
Ian Rogers816432e2013-09-06 15:47:45 -070051// Do we want to deoptimize for method entry and exit listeners or just try to intercept
52// invocations? Deoptimization forces all code to run in the interpreter and considerably hurts the
53// application's performance.
Jeff Haobc678bb2014-08-11 18:00:29 -070054static constexpr bool kDeoptimizeForAccurateMethodEntryExitListeners = true;
Ian Rogers816432e2013-09-06 15:47:45 -070055
Ian Rogers62d6c772013-02-27 08:32:07 -080056static bool InstallStubsClassVisitor(mirror::Class* klass, void* arg)
Sebastien Hertza8a697f2015-01-15 12:28:47 +010057 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -080058 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
Sebastien Hertza10aa372015-01-21 17:30:58 +010059 instrumentation->InstallStubsForClass(klass);
60 return true; // we visit all classes.
Ian Rogers62d6c772013-02-27 08:32:07 -080061}
62
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -070063Instrumentation::Instrumentation()
64 : instrumentation_stubs_installed_(false), entry_exit_stubs_installed_(false),
65 interpreter_stubs_installed_(false),
66 interpret_only_(false), forced_interpret_only_(false),
67 have_method_entry_listeners_(false), have_method_exit_listeners_(false),
68 have_method_unwind_listeners_(false), have_dex_pc_listeners_(false),
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +020069 have_field_read_listeners_(false), have_field_write_listeners_(false),
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -070070 have_exception_caught_listeners_(false),
71 deoptimized_methods_lock_("deoptimized methods lock"),
72 deoptimization_enabled_(false),
73 interpreter_handler_table_(kMainHandlerTable),
74 quick_alloc_entry_points_instrumentation_counter_(0) {
75}
76
Sebastien Hertza10aa372015-01-21 17:30:58 +010077void Instrumentation::InstallStubsForClass(mirror::Class* klass) {
Sebastien Hertza8a697f2015-01-15 12:28:47 +010078 if (klass->IsErroneous()) {
79 // We can't execute code in a erroneous class: do nothing.
80 } else if (!klass->IsResolved()) {
81 // We need the class to be resolved to install/uninstall stubs. Otherwise its methods
82 // could not be initialized or linked with regards to class inheritance.
83 } else {
84 for (size_t i = 0, e = klass->NumDirectMethods(); i < e; i++) {
85 InstallStubsForMethod(klass->GetDirectMethod(i));
86 }
87 for (size_t i = 0, e = klass->NumVirtualMethods(); i < e; i++) {
88 InstallStubsForMethod(klass->GetVirtualMethod(i));
89 }
jeffhao725a9572012-11-13 18:20:12 -080090 }
jeffhao725a9572012-11-13 18:20:12 -080091}
92
Elliott Hughes956af0f2014-12-11 14:34:28 -080093static void UpdateEntrypoints(mirror::ArtMethod* method, const void* quick_code)
Ian Rogersef7d42f2014-01-06 12:55:46 -080094 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersef7d42f2014-01-06 12:55:46 -080095 method->SetEntryPointFromQuickCompiledCode(quick_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010096 if (!method->IsResolutionMethod()) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -070097 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Ian Rogers6f3dbba2014-10-14 17:41:57 -070098 if (class_linker->IsQuickToInterpreterBridge(quick_code) ||
99 (class_linker->IsQuickResolutionStub(quick_code) &&
100 Runtime::Current()->GetInstrumentation()->IsForcedInterpretOnly() &&
101 !method->IsNative() && !method->IsProxyMethod())) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800102 DCHECK(!method->IsNative()) << PrettyMethod(method);
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -0800103 DCHECK(!method->IsProxyMethod()) << PrettyMethod(method);
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700104 method->SetEntryPointFromInterpreter(art::artInterpreterToInterpreterBridge);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100105 } else {
106 method->SetEntryPointFromInterpreter(art::artInterpreterToCompiledCodeBridge);
107 }
108 }
109}
110
111void Instrumentation::InstallStubsForMethod(mirror::ArtMethod* method) {
112 if (method->IsAbstract() || method->IsProxyMethod()) {
113 // Do not change stubs for these methods.
114 return;
115 }
Jeff Hao56802772014-08-19 10:17:36 -0700116 // Don't stub Proxy.<init>. Note that the Proxy class itself is not a proxy class.
117 if (method->IsConstructor() &&
118 method->GetDeclaringClass()->DescriptorEquals("Ljava/lang/reflect/Proxy;")) {
Jeff Haodb8a6642014-08-14 17:18:52 -0700119 return;
120 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800121 const void* new_quick_code;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100122 bool uninstall = !entry_exit_stubs_installed_ && !interpreter_stubs_installed_;
123 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
124 bool is_class_initialized = method->GetDeclaringClass()->IsInitialized();
125 if (uninstall) {
126 if ((forced_interpret_only_ || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800127 new_quick_code = GetQuickToInterpreterBridge();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100128 } else if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800129 new_quick_code = class_linker->GetQuickOatCodeFor(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100130 } else {
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700131 new_quick_code = GetQuickResolutionStub();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100132 }
133 } else { // !uninstall
Sebastien Hertzbae182c2013-12-17 10:42:03 +0100134 if ((interpreter_stubs_installed_ || forced_interpret_only_ || IsDeoptimized(method)) &&
135 !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800136 new_quick_code = GetQuickToInterpreterBridge();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100137 } else {
138 // Do not overwrite resolution trampoline. When the trampoline initializes the method's
139 // class, all its static methods code will be set to the instrumentation entry point.
140 // For more details, see ClassLinker::FixupStaticTrampolines.
141 if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
Sebastien Hertz320deb22014-06-11 19:45:05 +0200142 if (entry_exit_stubs_installed_) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800143 new_quick_code = GetQuickInstrumentationEntryPoint();
Sebastien Hertz320deb22014-06-11 19:45:05 +0200144 } else {
Sebastien Hertz320deb22014-06-11 19:45:05 +0200145 new_quick_code = class_linker->GetQuickOatCodeFor(method);
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700146 DCHECK(!class_linker->IsQuickToInterpreterBridge(new_quick_code));
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100147 }
148 } else {
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700149 new_quick_code = GetQuickResolutionStub();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100150 }
151 }
152 }
Elliott Hughes956af0f2014-12-11 14:34:28 -0800153 UpdateEntrypoints(method, new_quick_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100154}
155
Ian Rogers62d6c772013-02-27 08:32:07 -0800156// Places the instrumentation exit pc as the return PC for every quick frame. This also allows
157// deoptimization of quick frames to interpreter frames.
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100158// Since we may already have done this previously, we need to push new instrumentation frame before
159// existing instrumentation frames.
Ian Rogers62d6c772013-02-27 08:32:07 -0800160static void InstrumentationInstallStack(Thread* thread, void* arg)
Ian Rogers306057f2012-11-26 12:45:53 -0800161 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
162 struct InstallStackVisitor : public StackVisitor {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800163 InstallStackVisitor(Thread* thread_in, Context* context, uintptr_t instrumentation_exit_pc)
164 : StackVisitor(thread_in, context),
165 instrumentation_stack_(thread_in->GetInstrumentationStack()),
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100166 instrumentation_exit_pc_(instrumentation_exit_pc),
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100167 reached_existing_instrumentation_frames_(false), instrumentation_stack_depth_(0),
168 last_return_pc_(0) {
169 }
jeffhao725a9572012-11-13 18:20:12 -0800170
Ian Rogers306057f2012-11-26 12:45:53 -0800171 virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Brian Carlstromea46f952013-07-30 01:26:50 -0700172 mirror::ArtMethod* m = GetMethod();
Ian Rogers306057f2012-11-26 12:45:53 -0800173 if (m == NULL) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800174 if (kVerboseInstrumentation) {
175 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
176 }
177 last_return_pc_ = 0;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700178 return true; // Ignore upcalls.
Ian Rogers306057f2012-11-26 12:45:53 -0800179 }
Jeff Haoa15a81b2014-05-27 18:25:47 -0700180 if (GetCurrentQuickFrame() == NULL) {
Elliott Hughes956af0f2014-12-11 14:34:28 -0800181 bool interpreter_frame = true;
Sebastien Hertz320deb22014-06-11 19:45:05 +0200182 InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, 0, GetFrameId(),
183 interpreter_frame);
Jeff Haoa15a81b2014-05-27 18:25:47 -0700184 if (kVerboseInstrumentation) {
185 LOG(INFO) << "Pushing shadow frame " << instrumentation_frame.Dump();
186 }
187 shadow_stack_.push_back(instrumentation_frame);
188 return true; // Continue.
189 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800190 uintptr_t return_pc = GetReturnPc();
Sebastien Hertz320deb22014-06-11 19:45:05 +0200191 if (m->IsRuntimeMethod()) {
192 if (return_pc == instrumentation_exit_pc_) {
193 if (kVerboseInstrumentation) {
194 LOG(INFO) << " Handling quick to interpreter transition. Frame " << GetFrameId();
195 }
196 CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200197 const InstrumentationStackFrame& frame =
198 instrumentation_stack_->at(instrumentation_stack_depth_);
Sebastien Hertz320deb22014-06-11 19:45:05 +0200199 CHECK(frame.interpreter_entry_);
200 // This is an interpreter frame so method enter event must have been reported. However we
201 // need to push a DEX pc into the dex_pcs_ list to match size of instrumentation stack.
202 // Since we won't report method entry here, we can safely push any DEX pc.
203 dex_pcs_.push_back(0);
204 last_return_pc_ = frame.return_pc_;
205 ++instrumentation_stack_depth_;
206 return true;
207 } else {
208 if (kVerboseInstrumentation) {
209 LOG(INFO) << " Skipping runtime method. Frame " << GetFrameId();
210 }
211 last_return_pc_ = GetReturnPc();
212 return true; // Ignore unresolved methods since they will be instrumented after resolution.
213 }
214 }
215 if (kVerboseInstrumentation) {
216 LOG(INFO) << " Installing exit stub in " << DescribeLocation();
217 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100218 if (return_pc == instrumentation_exit_pc_) {
219 // We've reached a frame which has already been installed with instrumentation exit stub.
220 // We should have already installed instrumentation on previous frames.
221 reached_existing_instrumentation_frames_ = true;
222
223 CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200224 const InstrumentationStackFrame& frame =
225 instrumentation_stack_->at(instrumentation_stack_depth_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100226 CHECK_EQ(m, frame.method_) << "Expected " << PrettyMethod(m)
227 << ", Found " << PrettyMethod(frame.method_);
228 return_pc = frame.return_pc_;
229 if (kVerboseInstrumentation) {
230 LOG(INFO) << "Ignoring already instrumented " << frame.Dump();
231 }
232 } else {
233 CHECK_NE(return_pc, 0U);
234 CHECK(!reached_existing_instrumentation_frames_);
235 InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, return_pc, GetFrameId(),
236 false);
237 if (kVerboseInstrumentation) {
238 LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
239 }
240
Sebastien Hertz320deb22014-06-11 19:45:05 +0200241 // Insert frame at the right position so we do not corrupt the instrumentation stack.
242 // Instrumentation stack frames are in descending frame id order.
243 auto it = instrumentation_stack_->begin();
244 for (auto end = instrumentation_stack_->end(); it != end; ++it) {
245 const InstrumentationStackFrame& current = *it;
246 if (instrumentation_frame.frame_id_ >= current.frame_id_) {
247 break;
248 }
249 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100250 instrumentation_stack_->insert(it, instrumentation_frame);
251 SetReturnPc(instrumentation_exit_pc_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800252 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800253 dex_pcs_.push_back(m->ToDexPc(last_return_pc_));
Ian Rogers62d6c772013-02-27 08:32:07 -0800254 last_return_pc_ = return_pc;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100255 ++instrumentation_stack_depth_;
Ian Rogers306057f2012-11-26 12:45:53 -0800256 return true; // Continue.
257 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800258 std::deque<InstrumentationStackFrame>* const instrumentation_stack_;
Jeff Haoa15a81b2014-05-27 18:25:47 -0700259 std::vector<InstrumentationStackFrame> shadow_stack_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800260 std::vector<uint32_t> dex_pcs_;
Ian Rogers306057f2012-11-26 12:45:53 -0800261 const uintptr_t instrumentation_exit_pc_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100262 bool reached_existing_instrumentation_frames_;
263 size_t instrumentation_stack_depth_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800264 uintptr_t last_return_pc_;
Ian Rogers306057f2012-11-26 12:45:53 -0800265 };
Ian Rogers62d6c772013-02-27 08:32:07 -0800266 if (kVerboseInstrumentation) {
267 std::string thread_name;
268 thread->GetThreadName(thread_name);
269 LOG(INFO) << "Installing exit stubs in " << thread_name;
Ian Rogers306057f2012-11-26 12:45:53 -0800270 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100271
272 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
Ian Rogers700a4022014-05-19 16:49:03 -0700273 std::unique_ptr<Context> context(Context::Create());
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700274 uintptr_t instrumentation_exit_pc = reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc());
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100275 InstallStackVisitor visitor(thread, context.get(), instrumentation_exit_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800276 visitor.WalkStack(true);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100277 CHECK_EQ(visitor.dex_pcs_.size(), thread->GetInstrumentationStack()->size());
Ian Rogers62d6c772013-02-27 08:32:07 -0800278
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100279 if (instrumentation->ShouldNotifyMethodEnterExitEvents()) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100280 // Create method enter events for all methods currently on the thread's stack. We only do this
281 // if no debugger is attached to prevent from posting events twice.
Jeff Haoa15a81b2014-05-27 18:25:47 -0700282 auto ssi = visitor.shadow_stack_.rbegin();
283 for (auto isi = thread->GetInstrumentationStack()->rbegin(),
284 end = thread->GetInstrumentationStack()->rend(); isi != end; ++isi) {
285 while (ssi != visitor.shadow_stack_.rend() && (*ssi).frame_id_ < (*isi).frame_id_) {
286 instrumentation->MethodEnterEvent(thread, (*ssi).this_object_, (*ssi).method_, 0);
287 ++ssi;
288 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100289 uint32_t dex_pc = visitor.dex_pcs_.back();
290 visitor.dex_pcs_.pop_back();
Sebastien Hertz320deb22014-06-11 19:45:05 +0200291 if (!isi->interpreter_entry_) {
292 instrumentation->MethodEnterEvent(thread, (*isi).this_object_, (*isi).method_, dex_pc);
293 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100294 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800295 }
296 thread->VerifyStack();
Ian Rogers306057f2012-11-26 12:45:53 -0800297}
298
Ian Rogers62d6c772013-02-27 08:32:07 -0800299// Removes the instrumentation exit pc as the return PC for every quick frame.
300static void InstrumentationRestoreStack(Thread* thread, void* arg)
Ian Rogers306057f2012-11-26 12:45:53 -0800301 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
302 struct RestoreStackVisitor : public StackVisitor {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800303 RestoreStackVisitor(Thread* thread_in, uintptr_t instrumentation_exit_pc,
Ian Rogers62d6c772013-02-27 08:32:07 -0800304 Instrumentation* instrumentation)
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800305 : StackVisitor(thread_in, NULL), thread_(thread_in),
Ian Rogers62d6c772013-02-27 08:32:07 -0800306 instrumentation_exit_pc_(instrumentation_exit_pc),
307 instrumentation_(instrumentation),
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800308 instrumentation_stack_(thread_in->GetInstrumentationStack()),
Ian Rogers62d6c772013-02-27 08:32:07 -0800309 frames_removed_(0) {}
Ian Rogers306057f2012-11-26 12:45:53 -0800310
311 virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800312 if (instrumentation_stack_->size() == 0) {
jeffhao725a9572012-11-13 18:20:12 -0800313 return false; // Stop.
314 }
Brian Carlstromea46f952013-07-30 01:26:50 -0700315 mirror::ArtMethod* m = GetMethod();
Ian Rogers62d6c772013-02-27 08:32:07 -0800316 if (GetCurrentQuickFrame() == NULL) {
317 if (kVerboseInstrumentation) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200318 LOG(INFO) << " Ignoring a shadow frame. Frame " << GetFrameId()
319 << " Method=" << PrettyMethod(m);
Ian Rogers62d6c772013-02-27 08:32:07 -0800320 }
321 return true; // Ignore shadow frames.
322 }
Ian Rogers306057f2012-11-26 12:45:53 -0800323 if (m == NULL) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800324 if (kVerboseInstrumentation) {
325 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
326 }
Ian Rogers306057f2012-11-26 12:45:53 -0800327 return true; // Ignore upcalls.
328 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800329 bool removed_stub = false;
330 // TODO: make this search more efficient?
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100331 const size_t frameId = GetFrameId();
332 for (const InstrumentationStackFrame& instrumentation_frame : *instrumentation_stack_) {
333 if (instrumentation_frame.frame_id_ == frameId) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800334 if (kVerboseInstrumentation) {
335 LOG(INFO) << " Removing exit stub in " << DescribeLocation();
336 }
Jeff Hao9a916d32013-06-27 18:45:37 -0700337 if (instrumentation_frame.interpreter_entry_) {
338 CHECK(m == Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs));
339 } else {
340 CHECK(m == instrumentation_frame.method_) << PrettyMethod(m);
341 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800342 SetReturnPc(instrumentation_frame.return_pc_);
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100343 if (instrumentation_->ShouldNotifyMethodEnterExitEvents()) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100344 // Create the method exit events. As the methods didn't really exit the result is 0.
345 // We only do this if no debugger is attached to prevent from posting events twice.
346 instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m,
347 GetDexPc(), JValue());
348 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800349 frames_removed_++;
350 removed_stub = true;
351 break;
352 }
353 }
354 if (!removed_stub) {
355 if (kVerboseInstrumentation) {
356 LOG(INFO) << " No exit stub in " << DescribeLocation();
Ian Rogers306057f2012-11-26 12:45:53 -0800357 }
jeffhao725a9572012-11-13 18:20:12 -0800358 }
359 return true; // Continue.
360 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800361 Thread* const thread_;
Ian Rogers306057f2012-11-26 12:45:53 -0800362 const uintptr_t instrumentation_exit_pc_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800363 Instrumentation* const instrumentation_;
364 std::deque<instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
365 size_t frames_removed_;
jeffhao725a9572012-11-13 18:20:12 -0800366 };
Ian Rogers62d6c772013-02-27 08:32:07 -0800367 if (kVerboseInstrumentation) {
368 std::string thread_name;
369 thread->GetThreadName(thread_name);
370 LOG(INFO) << "Removing exit stubs in " << thread_name;
371 }
372 std::deque<instrumentation::InstrumentationStackFrame>* stack = thread->GetInstrumentationStack();
373 if (stack->size() > 0) {
374 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700375 uintptr_t instrumentation_exit_pc =
376 reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc());
Ian Rogers62d6c772013-02-27 08:32:07 -0800377 RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
378 visitor.WalkStack(true);
379 CHECK_EQ(visitor.frames_removed_, stack->size());
380 while (stack->size() > 0) {
381 stack->pop_front();
382 }
jeffhao725a9572012-11-13 18:20:12 -0800383 }
384}
385
Ian Rogers62d6c772013-02-27 08:32:07 -0800386void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
387 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
Ian Rogers62d6c772013-02-27 08:32:07 -0800388 if ((events & kMethodEntered) != 0) {
389 method_entry_listeners_.push_back(listener);
Ian Rogers62d6c772013-02-27 08:32:07 -0800390 have_method_entry_listeners_ = true;
391 }
392 if ((events & kMethodExited) != 0) {
393 method_exit_listeners_.push_back(listener);
Ian Rogers62d6c772013-02-27 08:32:07 -0800394 have_method_exit_listeners_ = true;
395 }
396 if ((events & kMethodUnwind) != 0) {
397 method_unwind_listeners_.push_back(listener);
398 have_method_unwind_listeners_ = true;
399 }
400 if ((events & kDexPcMoved) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200401 std::list<InstrumentationListener*>* modified;
402 if (have_dex_pc_listeners_) {
403 modified = new std::list<InstrumentationListener*>(*dex_pc_listeners_.get());
404 } else {
405 modified = new std::list<InstrumentationListener*>();
406 }
407 modified->push_back(listener);
408 dex_pc_listeners_.reset(modified);
Ian Rogers62d6c772013-02-27 08:32:07 -0800409 have_dex_pc_listeners_ = true;
410 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200411 if ((events & kFieldRead) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200412 std::list<InstrumentationListener*>* modified;
413 if (have_field_read_listeners_) {
414 modified = new std::list<InstrumentationListener*>(*field_read_listeners_.get());
415 } else {
416 modified = new std::list<InstrumentationListener*>();
417 }
418 modified->push_back(listener);
419 field_read_listeners_.reset(modified);
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200420 have_field_read_listeners_ = true;
421 }
422 if ((events & kFieldWritten) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200423 std::list<InstrumentationListener*>* modified;
424 if (have_field_write_listeners_) {
425 modified = new std::list<InstrumentationListener*>(*field_write_listeners_.get());
426 } else {
427 modified = new std::list<InstrumentationListener*>();
428 }
429 modified->push_back(listener);
430 field_write_listeners_.reset(modified);
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200431 have_field_write_listeners_ = true;
432 }
Jeff Hao14dd5a82013-04-11 10:23:36 -0700433 if ((events & kExceptionCaught) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200434 std::list<InstrumentationListener*>* modified;
435 if (have_exception_caught_listeners_) {
436 modified = new std::list<InstrumentationListener*>(*exception_caught_listeners_.get());
437 } else {
438 modified = new std::list<InstrumentationListener*>();
439 }
440 modified->push_back(listener);
441 exception_caught_listeners_.reset(modified);
Jeff Hao14dd5a82013-04-11 10:23:36 -0700442 have_exception_caught_listeners_ = true;
443 }
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200444 UpdateInterpreterHandlerTable();
jeffhao725a9572012-11-13 18:20:12 -0800445}
446
Ian Rogers62d6c772013-02-27 08:32:07 -0800447void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
448 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
Ian Rogers62d6c772013-02-27 08:32:07 -0800449
450 if ((events & kMethodEntered) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200451 if (have_method_entry_listeners_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800452 method_entry_listeners_.remove(listener);
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200453 have_method_entry_listeners_ = !method_entry_listeners_.empty();
Ian Rogers62d6c772013-02-27 08:32:07 -0800454 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800455 }
456 if ((events & kMethodExited) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200457 if (have_method_exit_listeners_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800458 method_exit_listeners_.remove(listener);
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200459 have_method_exit_listeners_ = !method_exit_listeners_.empty();
Ian Rogers62d6c772013-02-27 08:32:07 -0800460 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800461 }
462 if ((events & kMethodUnwind) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200463 if (have_method_unwind_listeners_) {
464 method_unwind_listeners_.remove(listener);
465 have_method_unwind_listeners_ = !method_unwind_listeners_.empty();
466 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800467 }
468 if ((events & kDexPcMoved) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200469 if (have_dex_pc_listeners_) {
470 std::list<InstrumentationListener*>* modified =
471 new std::list<InstrumentationListener*>(*dex_pc_listeners_.get());
472 modified->remove(listener);
473 have_dex_pc_listeners_ = !modified->empty();
474 if (have_dex_pc_listeners_) {
475 dex_pc_listeners_.reset(modified);
476 } else {
477 dex_pc_listeners_.reset();
478 delete modified;
479 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800480 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800481 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200482 if ((events & kFieldRead) != 0) {
Daniel Mihalyi66445212014-08-21 15:57:25 +0200483 if (have_field_read_listeners_) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200484 std::list<InstrumentationListener*>* modified =
485 new std::list<InstrumentationListener*>(*field_read_listeners_.get());
486 modified->remove(listener);
487 have_field_read_listeners_ = !modified->empty();
488 if (have_field_read_listeners_) {
489 field_read_listeners_.reset(modified);
490 } else {
491 field_read_listeners_.reset();
492 delete modified;
493 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200494 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200495 }
496 if ((events & kFieldWritten) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200497 if (have_field_write_listeners_) {
498 std::list<InstrumentationListener*>* modified =
499 new std::list<InstrumentationListener*>(*field_write_listeners_.get());
500 modified->remove(listener);
501 have_field_write_listeners_ = !modified->empty();
502 if (have_field_write_listeners_) {
503 field_write_listeners_.reset(modified);
504 } else {
505 field_write_listeners_.reset();
506 delete modified;
507 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200508 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200509 }
Jeff Hao14dd5a82013-04-11 10:23:36 -0700510 if ((events & kExceptionCaught) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200511 if (have_exception_caught_listeners_) {
512 std::list<InstrumentationListener*>* modified =
513 new std::list<InstrumentationListener*>(*exception_caught_listeners_.get());
514 modified->remove(listener);
515 have_exception_caught_listeners_ = !modified->empty();
516 if (have_exception_caught_listeners_) {
517 exception_caught_listeners_.reset(modified);
518 } else {
519 exception_caught_listeners_.reset();
520 delete modified;
521 }
522 }
Jeff Hao14dd5a82013-04-11 10:23:36 -0700523 }
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200524 UpdateInterpreterHandlerTable();
jeffhao725a9572012-11-13 18:20:12 -0800525}
526
Ian Rogers62d6c772013-02-27 08:32:07 -0800527void Instrumentation::ConfigureStubs(bool require_entry_exit_stubs, bool require_interpreter) {
528 interpret_only_ = require_interpreter || forced_interpret_only_;
529 // Compute what level of instrumentation is required and compare to current.
530 int desired_level, current_level;
531 if (require_interpreter) {
532 desired_level = 2;
533 } else if (require_entry_exit_stubs) {
534 desired_level = 1;
535 } else {
536 desired_level = 0;
537 }
538 if (interpreter_stubs_installed_) {
539 current_level = 2;
540 } else if (entry_exit_stubs_installed_) {
541 current_level = 1;
542 } else {
543 current_level = 0;
544 }
545 if (desired_level == current_level) {
546 // We're already set.
547 return;
548 }
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100549 Thread* const self = Thread::Current();
Ian Rogers62d6c772013-02-27 08:32:07 -0800550 Runtime* runtime = Runtime::Current();
Sebastien Hertza8a697f2015-01-15 12:28:47 +0100551 Locks::mutator_lock_->AssertExclusiveHeld(self);
Ian Rogers62d6c772013-02-27 08:32:07 -0800552 Locks::thread_list_lock_->AssertNotHeld(self);
553 if (desired_level > 0) {
554 if (require_interpreter) {
555 interpreter_stubs_installed_ = true;
556 } else {
557 CHECK(require_entry_exit_stubs);
558 entry_exit_stubs_installed_ = true;
559 }
560 runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
561 instrumentation_stubs_installed_ = true;
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100562 MutexLock mu(self, *Locks::thread_list_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800563 runtime->GetThreadList()->ForEach(InstrumentationInstallStack, this);
564 } else {
565 interpreter_stubs_installed_ = false;
566 entry_exit_stubs_installed_ = false;
567 runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100568 // Restore stack only if there is no method currently deoptimized.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700569 bool empty;
570 {
571 ReaderMutexLock mu(self, deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700572 empty = IsDeoptimizedMethodsEmpty(); // Avoid lock violation.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700573 }
574 if (empty) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100575 instrumentation_stubs_installed_ = false;
576 MutexLock mu(self, *Locks::thread_list_lock_);
577 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
578 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800579 }
jeffhao725a9572012-11-13 18:20:12 -0800580}
581
Ian Rogersfa824272013-11-05 16:12:57 -0800582static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700583 UNUSED(arg);
Ian Rogersfa824272013-11-05 16:12:57 -0800584 thread->ResetQuickAllocEntryPointsForThread();
585}
586
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700587void Instrumentation::SetEntrypointsInstrumented(bool instrumented) {
588 Thread* self = Thread::Current();
Mathieu Chartier661974a2014-01-09 11:23:53 -0800589 Runtime* runtime = Runtime::Current();
590 ThreadList* tl = runtime->GetThreadList();
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700591 Locks::mutator_lock_->AssertNotHeld(self);
592 Locks::instrument_entrypoints_lock_->AssertHeld(self);
593 if (runtime->IsStarted()) {
Mathieu Chartier661974a2014-01-09 11:23:53 -0800594 tl->SuspendAll();
595 }
596 {
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700597 MutexLock mu(self, *Locks::runtime_shutdown_lock_);
Mathieu Chartier661974a2014-01-09 11:23:53 -0800598 SetQuickAllocEntryPointsInstrumented(instrumented);
599 ResetQuickAllocEntryPoints();
600 }
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700601 if (runtime->IsStarted()) {
Mathieu Chartier661974a2014-01-09 11:23:53 -0800602 tl->ResumeAll();
603 }
604}
605
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700606void Instrumentation::InstrumentQuickAllocEntryPoints() {
607 MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
608 InstrumentQuickAllocEntryPointsLocked();
Ian Rogersfa824272013-11-05 16:12:57 -0800609}
610
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700611void Instrumentation::UninstrumentQuickAllocEntryPoints() {
612 MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
613 UninstrumentQuickAllocEntryPointsLocked();
614}
615
616void Instrumentation::InstrumentQuickAllocEntryPointsLocked() {
617 Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
618 if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
619 SetEntrypointsInstrumented(true);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800620 }
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700621 ++quick_alloc_entry_points_instrumentation_counter_;
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700622}
623
624void Instrumentation::UninstrumentQuickAllocEntryPointsLocked() {
625 Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
626 CHECK_GT(quick_alloc_entry_points_instrumentation_counter_, 0U);
627 --quick_alloc_entry_points_instrumentation_counter_;
628 if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
629 SetEntrypointsInstrumented(false);
630 }
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800631}
632
633void Instrumentation::ResetQuickAllocEntryPoints() {
634 Runtime* runtime = Runtime::Current();
635 if (runtime->IsStarted()) {
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800636 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
637 runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, NULL);
Ian Rogersfa824272013-11-05 16:12:57 -0800638 }
639}
640
Elliott Hughes956af0f2014-12-11 14:34:28 -0800641void Instrumentation::UpdateMethodsCode(mirror::ArtMethod* method, const void* quick_code) {
Sebastien Hertza8a697f2015-01-15 12:28:47 +0100642 DCHECK(method->GetDeclaringClass()->IsResolved());
Ian Rogersef7d42f2014-01-06 12:55:46 -0800643 const void* new_quick_code;
Ian Rogers62d6c772013-02-27 08:32:07 -0800644 if (LIKELY(!instrumentation_stubs_installed_)) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800645 new_quick_code = quick_code;
Jeff Hao65d15d92013-07-16 16:39:33 -0700646 } else {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100647 if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800648 new_quick_code = GetQuickToInterpreterBridge();
Jeff Hao65d15d92013-07-16 16:39:33 -0700649 } else {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700650 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700651 if (class_linker->IsQuickResolutionStub(quick_code) ||
652 class_linker->IsQuickToInterpreterBridge(quick_code)) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700653 new_quick_code = quick_code;
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700654 } else if (entry_exit_stubs_installed_) {
655 new_quick_code = GetQuickInstrumentationEntryPoint();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700656 } else {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700657 new_quick_code = quick_code;
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700658 }
Jeff Hao65d15d92013-07-16 16:39:33 -0700659 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800660 }
Elliott Hughes956af0f2014-12-11 14:34:28 -0800661 UpdateEntrypoints(method, new_quick_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100662}
663
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700664bool Instrumentation::AddDeoptimizedMethod(mirror::ArtMethod* method) {
665 // Note that the insert() below isn't read barrier-aware. So, this
666 // FindDeoptimizedMethod() call is necessary or else we would end up
667 // storing the same method twice in the map (the from-space and the
668 // to-space ones).
669 if (FindDeoptimizedMethod(method)) {
670 // Already in the map. Return.
671 return false;
672 }
673 // Not found. Add it.
Mathieu Chartier4c4d6092015-01-22 17:02:27 -0800674 static_assert(!kMovingMethods, "Not safe if methods can move");
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700675 int32_t hash_code = method->IdentityHashCode();
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700676 deoptimized_methods_.insert(std::make_pair(hash_code, GcRoot<mirror::ArtMethod>(method)));
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700677 return true;
678}
679
680bool Instrumentation::FindDeoptimizedMethod(mirror::ArtMethod* method) {
Mathieu Chartier4c4d6092015-01-22 17:02:27 -0800681 static_assert(!kMovingMethods, "Not safe if methods can move");
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700682 int32_t hash_code = method->IdentityHashCode();
683 auto range = deoptimized_methods_.equal_range(hash_code);
684 for (auto it = range.first; it != range.second; ++it) {
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700685 mirror::ArtMethod* m = it->second.Read();
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700686 if (m == method) {
687 // Found.
688 return true;
689 }
690 }
691 // Not found.
692 return false;
693}
694
695mirror::ArtMethod* Instrumentation::BeginDeoptimizedMethod() {
696 auto it = deoptimized_methods_.begin();
697 if (it == deoptimized_methods_.end()) {
698 // Empty.
699 return nullptr;
700 }
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700701 return it->second.Read();
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700702}
703
704bool Instrumentation::RemoveDeoptimizedMethod(mirror::ArtMethod* method) {
Mathieu Chartier4c4d6092015-01-22 17:02:27 -0800705 static_assert(!kMovingMethods, "Not safe if methods can move");
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700706 int32_t hash_code = method->IdentityHashCode();
707 auto range = deoptimized_methods_.equal_range(hash_code);
708 for (auto it = range.first; it != range.second; ++it) {
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700709 mirror::ArtMethod* m = it->second.Read();
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700710 if (m == method) {
711 // Found. Erase and return.
712 deoptimized_methods_.erase(it);
713 return true;
714 }
715 }
716 // Not found.
717 return false;
718}
719
720bool Instrumentation::IsDeoptimizedMethodsEmpty() const {
721 return deoptimized_methods_.empty();
722}
723
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100724void Instrumentation::Deoptimize(mirror::ArtMethod* method) {
725 CHECK(!method->IsNative());
726 CHECK(!method->IsProxyMethod());
727 CHECK(!method->IsAbstract());
728
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700729 Thread* self = Thread::Current();
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700730 {
731 WriterMutexLock mu(self, deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700732 bool has_not_been_deoptimized = AddDeoptimizedMethod(method);
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200733 CHECK(has_not_been_deoptimized) << "Method " << PrettyMethod(method)
734 << " is already deoptimized";
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700735 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100736 if (!interpreter_stubs_installed_) {
Elliott Hughes956af0f2014-12-11 14:34:28 -0800737 UpdateEntrypoints(method, GetQuickInstrumentationEntryPoint());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100738
739 // Install instrumentation exit stub and instrumentation frames. We may already have installed
740 // these previously so it will only cover the newly created frames.
741 instrumentation_stubs_installed_ = true;
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700742 MutexLock mu(self, *Locks::thread_list_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100743 Runtime::Current()->GetThreadList()->ForEach(InstrumentationInstallStack, this);
744 }
745}
746
747void Instrumentation::Undeoptimize(mirror::ArtMethod* method) {
748 CHECK(!method->IsNative());
749 CHECK(!method->IsProxyMethod());
750 CHECK(!method->IsAbstract());
751
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700752 Thread* self = Thread::Current();
753 bool empty;
754 {
755 WriterMutexLock mu(self, deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700756 bool found_and_erased = RemoveDeoptimizedMethod(method);
757 CHECK(found_and_erased) << "Method " << PrettyMethod(method)
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700758 << " is not deoptimized";
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700759 empty = IsDeoptimizedMethodsEmpty();
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700760 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100761
762 // Restore code and possibly stack only if we did not deoptimize everything.
763 if (!interpreter_stubs_installed_) {
764 // Restore its code or resolution trampoline.
765 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Ian Rogersef7d42f2014-01-06 12:55:46 -0800766 if (method->IsStatic() && !method->IsConstructor() &&
767 !method->GetDeclaringClass()->IsInitialized()) {
Elliott Hughes956af0f2014-12-11 14:34:28 -0800768 UpdateEntrypoints(method, GetQuickResolutionStub());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100769 } else {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800770 const void* quick_code = class_linker->GetQuickOatCodeFor(method);
Elliott Hughes956af0f2014-12-11 14:34:28 -0800771 UpdateEntrypoints(method, quick_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100772 }
773
774 // If there is no deoptimized method left, we can restore the stack of each thread.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700775 if (empty) {
776 MutexLock mu(self, *Locks::thread_list_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100777 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
778 instrumentation_stubs_installed_ = false;
779 }
780 }
781}
782
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700783bool Instrumentation::IsDeoptimized(mirror::ArtMethod* method) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100784 DCHECK(method != nullptr);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700785 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
786 return FindDeoptimizedMethod(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100787}
788
789void Instrumentation::EnableDeoptimization() {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700790 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700791 CHECK(IsDeoptimizedMethodsEmpty());
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100792 CHECK_EQ(deoptimization_enabled_, false);
793 deoptimization_enabled_ = true;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100794}
795
796void Instrumentation::DisableDeoptimization() {
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100797 CHECK_EQ(deoptimization_enabled_, true);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100798 // If we deoptimized everything, undo it.
799 if (interpreter_stubs_installed_) {
800 UndeoptimizeEverything();
801 }
802 // Undeoptimized selected methods.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700803 while (true) {
804 mirror::ArtMethod* method;
805 {
806 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700807 if (IsDeoptimizedMethodsEmpty()) {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700808 break;
809 }
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700810 method = BeginDeoptimizedMethod();
811 CHECK(method != nullptr);
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700812 }
813 Undeoptimize(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100814 }
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100815 deoptimization_enabled_ = false;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100816}
817
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100818// Indicates if instrumentation should notify method enter/exit events to the listeners.
819bool Instrumentation::ShouldNotifyMethodEnterExitEvents() const {
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100820 return !deoptimization_enabled_ && !interpreter_stubs_installed_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100821}
822
823void Instrumentation::DeoptimizeEverything() {
824 CHECK(!interpreter_stubs_installed_);
825 ConfigureStubs(false, true);
826}
827
828void Instrumentation::UndeoptimizeEverything() {
829 CHECK(interpreter_stubs_installed_);
830 ConfigureStubs(false, false);
831}
832
833void Instrumentation::EnableMethodTracing() {
834 bool require_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners;
835 ConfigureStubs(!require_interpreter, require_interpreter);
836}
837
838void Instrumentation::DisableMethodTracing() {
839 ConfigureStubs(false, false);
jeffhao725a9572012-11-13 18:20:12 -0800840}
841
Mathieu Chartiera7dd0382014-11-20 17:08:58 -0800842const void* Instrumentation::GetQuickCodeFor(mirror::ArtMethod* method, size_t pointer_size) const {
Ian Rogers62d6c772013-02-27 08:32:07 -0800843 Runtime* runtime = Runtime::Current();
844 if (LIKELY(!instrumentation_stubs_installed_)) {
Mathieu Chartiera7dd0382014-11-20 17:08:58 -0800845 const void* code = method->GetEntryPointFromQuickCompiledCodePtrSize(pointer_size);
Vladimir Marko8a630572014-04-09 18:45:35 +0100846 DCHECK(code != nullptr);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700847 ClassLinker* class_linker = runtime->GetClassLinker();
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700848 if (LIKELY(!class_linker->IsQuickResolutionStub(code) &&
849 !class_linker->IsQuickToInterpreterBridge(code)) &&
850 !class_linker->IsQuickResolutionStub(code) &&
851 !class_linker->IsQuickToInterpreterBridge(code)) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800852 return code;
853 }
854 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800855 return runtime->GetClassLinker()->GetQuickOatCodeFor(method);
jeffhao725a9572012-11-13 18:20:12 -0800856}
857
Ian Rogers62d6c772013-02-27 08:32:07 -0800858void Instrumentation::MethodEnterEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800859 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800860 uint32_t dex_pc) const {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700861 auto it = method_entry_listeners_.begin();
Jeff Hao65d15d92013-07-16 16:39:33 -0700862 bool is_end = (it == method_entry_listeners_.end());
863 // Implemented this way to prevent problems caused by modification of the list while iterating.
864 while (!is_end) {
865 InstrumentationListener* cur = *it;
866 ++it;
867 is_end = (it == method_entry_listeners_.end());
868 cur->MethodEntered(thread, this_object, method, dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800869 }
870}
871
872void Instrumentation::MethodExitEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800873 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800874 uint32_t dex_pc, const JValue& return_value) const {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700875 auto it = method_exit_listeners_.begin();
Jeff Hao65d15d92013-07-16 16:39:33 -0700876 bool is_end = (it == method_exit_listeners_.end());
877 // Implemented this way to prevent problems caused by modification of the list while iterating.
878 while (!is_end) {
879 InstrumentationListener* cur = *it;
880 ++it;
881 is_end = (it == method_exit_listeners_.end());
882 cur->MethodExited(thread, this_object, method, dex_pc, return_value);
Ian Rogers62d6c772013-02-27 08:32:07 -0800883 }
884}
885
886void Instrumentation::MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800887 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800888 uint32_t dex_pc) const {
889 if (have_method_unwind_listeners_) {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700890 for (InstrumentationListener* listener : method_unwind_listeners_) {
Sebastien Hertz51db44a2013-11-19 10:00:29 +0100891 listener->MethodUnwind(thread, this_object, method, dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800892 }
893 }
894}
895
896void Instrumentation::DexPcMovedEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800897 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800898 uint32_t dex_pc) const {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200899 if (HasDexPcListeners()) {
900 std::shared_ptr<std::list<InstrumentationListener*>> original(dex_pc_listeners_);
901 for (InstrumentationListener* listener : *original.get()) {
902 listener->DexPcMoved(thread, this_object, method, dex_pc);
903 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800904 }
905}
906
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200907void Instrumentation::FieldReadEventImpl(Thread* thread, mirror::Object* this_object,
908 mirror::ArtMethod* method, uint32_t dex_pc,
909 mirror::ArtField* field) const {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200910 if (HasFieldReadListeners()) {
911 std::shared_ptr<std::list<InstrumentationListener*>> original(field_read_listeners_);
912 for (InstrumentationListener* listener : *original.get()) {
913 listener->FieldRead(thread, this_object, method, dex_pc, field);
914 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200915 }
916}
917
918void Instrumentation::FieldWriteEventImpl(Thread* thread, mirror::Object* this_object,
919 mirror::ArtMethod* method, uint32_t dex_pc,
920 mirror::ArtField* field, const JValue& field_value) const {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200921 if (HasFieldWriteListeners()) {
922 std::shared_ptr<std::list<InstrumentationListener*>> original(field_write_listeners_);
923 for (InstrumentationListener* listener : *original.get()) {
924 listener->FieldWritten(thread, this_object, method, dex_pc, field, field_value);
925 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200926 }
927}
928
Ian Rogers62d6c772013-02-27 08:32:07 -0800929void Instrumentation::ExceptionCaughtEvent(Thread* thread, const ThrowLocation& throw_location,
Brian Carlstromea46f952013-07-30 01:26:50 -0700930 mirror::ArtMethod* catch_method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800931 uint32_t catch_dex_pc,
Sebastien Hertz947ff082013-09-17 14:10:13 +0200932 mirror::Throwable* exception_object) const {
Sebastien Hertz9f102032014-05-23 08:59:42 +0200933 if (HasExceptionCaughtListeners()) {
934 DCHECK_EQ(thread->GetException(nullptr), exception_object);
935 bool is_exception_reported = thread->IsExceptionReportedToInstrumentation();
Jeff Haoc0bd4da2013-04-11 15:52:28 -0700936 thread->ClearException();
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200937 std::shared_ptr<std::list<InstrumentationListener*>> original(exception_caught_listeners_);
938 for (InstrumentationListener* listener : *original.get()) {
939 listener->ExceptionCaught(thread, throw_location, catch_method, catch_dex_pc,
940 exception_object);
Ian Rogers62d6c772013-02-27 08:32:07 -0800941 }
Jeff Haoc0bd4da2013-04-11 15:52:28 -0700942 thread->SetException(throw_location, exception_object);
Sebastien Hertz9f102032014-05-23 08:59:42 +0200943 thread->SetExceptionReportedToInstrumentation(is_exception_reported);
Ian Rogers62d6c772013-02-27 08:32:07 -0800944 }
945}
946
947static void CheckStackDepth(Thread* self, const InstrumentationStackFrame& instrumentation_frame,
948 int delta)
949 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
950 size_t frame_id = StackVisitor::ComputeNumFrames(self) + delta;
951 if (frame_id != instrumentation_frame.frame_id_) {
952 LOG(ERROR) << "Expected frame_id=" << frame_id << " but found "
953 << instrumentation_frame.frame_id_;
954 StackVisitor::DescribeStack(self);
955 CHECK_EQ(frame_id, instrumentation_frame.frame_id_);
956 }
957}
958
959void Instrumentation::PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
Brian Carlstromea46f952013-07-30 01:26:50 -0700960 mirror::ArtMethod* method,
Jeff Hao9a916d32013-06-27 18:45:37 -0700961 uintptr_t lr, bool interpreter_entry) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800962 // We have a callee-save frame meaning this value is guaranteed to never be 0.
963 size_t frame_id = StackVisitor::ComputeNumFrames(self);
964 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
965 if (kVerboseInstrumentation) {
Brian Carlstrom2d888622013-07-18 17:02:00 -0700966 LOG(INFO) << "Entering " << PrettyMethod(method) << " from PC " << reinterpret_cast<void*>(lr);
Ian Rogers62d6c772013-02-27 08:32:07 -0800967 }
968 instrumentation::InstrumentationStackFrame instrumentation_frame(this_object, method, lr,
Jeff Hao9a916d32013-06-27 18:45:37 -0700969 frame_id, interpreter_entry);
Ian Rogers62d6c772013-02-27 08:32:07 -0800970 stack->push_front(instrumentation_frame);
971
Sebastien Hertz320deb22014-06-11 19:45:05 +0200972 if (!interpreter_entry) {
973 MethodEnterEvent(self, this_object, method, 0);
974 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800975}
976
Andreas Gamped58342c2014-06-05 14:18:08 -0700977TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc,
978 uint64_t gpr_result,
979 uint64_t fpr_result) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800980 // Do the pop.
981 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
982 CHECK_GT(stack->size(), 0U);
983 InstrumentationStackFrame instrumentation_frame = stack->front();
984 stack->pop_front();
985
986 // Set return PC and check the sanity of the stack.
987 *return_pc = instrumentation_frame.return_pc_;
988 CheckStackDepth(self, instrumentation_frame, 0);
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700989 self->VerifyStack();
Ian Rogers62d6c772013-02-27 08:32:07 -0800990
Brian Carlstromea46f952013-07-30 01:26:50 -0700991 mirror::ArtMethod* method = instrumentation_frame.method_;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700992 uint32_t length;
993 char return_shorty = method->GetShorty(&length)[0];
Ian Rogers62d6c772013-02-27 08:32:07 -0800994 JValue return_value;
995 if (return_shorty == 'V') {
996 return_value.SetJ(0);
997 } else if (return_shorty == 'F' || return_shorty == 'D') {
998 return_value.SetJ(fpr_result);
999 } else {
1000 return_value.SetJ(gpr_result);
1001 }
1002 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
1003 // return_pc.
1004 uint32_t dex_pc = DexFile::kDexNoIndex;
1005 mirror::Object* this_object = instrumentation_frame.this_object_;
Sebastien Hertz320deb22014-06-11 19:45:05 +02001006 if (!instrumentation_frame.interpreter_entry_) {
1007 MethodExitEvent(self, this_object, instrumentation_frame.method_, dex_pc, return_value);
1008 }
jeffhao725a9572012-11-13 18:20:12 -08001009
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001010 // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get
1011 // back to an upcall.
1012 NthCallerVisitor visitor(self, 1, true);
1013 visitor.WalkStack(true);
Sebastien Hertz270a0e12015-01-16 19:49:09 +01001014 bool deoptimize = (visitor.caller != nullptr) &&
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001015 (interpreter_stubs_installed_ || IsDeoptimized(visitor.caller));
Ian Rogers62d6c772013-02-27 08:32:07 -08001016 if (deoptimize) {
1017 if (kVerboseInstrumentation) {
Sebastien Hertz270a0e12015-01-16 19:49:09 +01001018 LOG(INFO) << StringPrintf("Deoptimizing %s by returning from %s with result %#" PRIx64 " in ",
1019 PrettyMethod(visitor.caller).c_str(),
1020 PrettyMethod(method).c_str(),
1021 return_value.GetJ()) << *self;
Ian Rogers62d6c772013-02-27 08:32:07 -08001022 }
1023 self->SetDeoptimizationReturnValue(return_value);
Andreas Gamped58342c2014-06-05 14:18:08 -07001024 return GetTwoWordSuccessValue(*return_pc,
1025 reinterpret_cast<uintptr_t>(GetQuickDeoptimizationEntryPoint()));
Ian Rogers62d6c772013-02-27 08:32:07 -08001026 } else {
1027 if (kVerboseInstrumentation) {
Brian Carlstrom2d888622013-07-18 17:02:00 -07001028 LOG(INFO) << "Returning from " << PrettyMethod(method)
1029 << " to PC " << reinterpret_cast<void*>(*return_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -08001030 }
Andreas Gamped58342c2014-06-05 14:18:08 -07001031 return GetTwoWordSuccessValue(0, *return_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -08001032 }
jeffhao725a9572012-11-13 18:20:12 -08001033}
1034
Ian Rogers62d6c772013-02-27 08:32:07 -08001035void Instrumentation::PopMethodForUnwind(Thread* self, bool is_deoptimization) const {
1036 // Do the pop.
1037 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
1038 CHECK_GT(stack->size(), 0U);
1039 InstrumentationStackFrame instrumentation_frame = stack->front();
1040 // TODO: bring back CheckStackDepth(self, instrumentation_frame, 2);
1041 stack->pop_front();
1042
Brian Carlstromea46f952013-07-30 01:26:50 -07001043 mirror::ArtMethod* method = instrumentation_frame.method_;
Ian Rogers62d6c772013-02-27 08:32:07 -08001044 if (is_deoptimization) {
1045 if (kVerboseInstrumentation) {
1046 LOG(INFO) << "Popping for deoptimization " << PrettyMethod(method);
1047 }
1048 } else {
1049 if (kVerboseInstrumentation) {
1050 LOG(INFO) << "Popping for unwind " << PrettyMethod(method);
1051 }
1052
1053 // Notify listeners of method unwind.
1054 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
1055 // return_pc.
1056 uint32_t dex_pc = DexFile::kDexNoIndex;
1057 MethodUnwindEvent(self, instrumentation_frame.this_object_, method, dex_pc);
1058 }
1059}
1060
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001061void Instrumentation::VisitRoots(RootCallback* callback, void* arg) {
1062 WriterMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001063 if (IsDeoptimizedMethodsEmpty()) {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001064 return;
1065 }
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001066 for (auto pair : deoptimized_methods_) {
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -08001067 pair.second.VisitRoot(callback, arg, RootInfo(kRootVMInternal));
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001068 }
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001069}
1070
Ian Rogers62d6c772013-02-27 08:32:07 -08001071std::string InstrumentationStackFrame::Dump() const {
1072 std::ostringstream os;
1073 os << "Frame " << frame_id_ << " " << PrettyMethod(method_) << ":"
1074 << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_);
1075 return os.str();
1076}
1077
1078} // namespace instrumentation
jeffhao725a9572012-11-13 18:20:12 -08001079} // namespace art