blob: d5c6e4f50341b94d032daa4c3e510bf386440d7f [file] [log] [blame]
jeffhao725a9572012-11-13 18:20:12 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "instrumentation.h"
18
19#include <sys/uio.h>
20
Ian Rogerse63db272014-07-15 15:36:11 -070021#include "arch/context.h"
Ian Rogersef7d42f2014-01-06 12:55:46 -080022#include "atomic.h"
Elliott Hughes76160052012-12-12 16:31:20 -080023#include "base/unix_file/fd_file.h"
jeffhao725a9572012-11-13 18:20:12 -080024#include "class_linker.h"
25#include "debugger.h"
Ian Rogers62d6c772013-02-27 08:32:07 -080026#include "dex_file-inl.h"
Mathieu Chartierd8891782014-03-02 13:28:37 -080027#include "entrypoints/quick/quick_alloc_entrypoints.h"
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070028#include "gc_root-inl.h"
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010029#include "interpreter/interpreter.h"
Brian Carlstromea46f952013-07-30 01:26:50 -070030#include "mirror/art_method-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080031#include "mirror/class-inl.h"
32#include "mirror/dex_cache.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080033#include "mirror/object_array-inl.h"
Ian Rogers4f6ad8a2013-03-18 15:27:28 -070034#include "mirror/object-inl.h"
Ian Rogers62d6c772013-02-27 08:32:07 -080035#include "nth_caller_visitor.h"
Ian Rogersc928de92013-02-27 14:30:44 -080036#if !defined(ART_USE_PORTABLE_COMPILER)
Ian Rogers166db042013-07-26 12:05:57 -070037#include "entrypoints/quick/quick_entrypoints.h"
jeffhao725a9572012-11-13 18:20:12 -080038#endif
jeffhao725a9572012-11-13 18:20:12 -080039#include "os.h"
40#include "scoped_thread_state_change.h"
41#include "thread.h"
42#include "thread_list.h"
jeffhao725a9572012-11-13 18:20:12 -080043
44namespace art {
Ian Rogersfa824272013-11-05 16:12:57 -080045
Ian Rogers62d6c772013-02-27 08:32:07 -080046namespace instrumentation {
jeffhao725a9572012-11-13 18:20:12 -080047
Sebastien Hertz5bfd5c92013-11-15 11:36:07 +010048const bool kVerboseInstrumentation = false;
49
Ian Rogers816432e2013-09-06 15:47:45 -070050// Do we want to deoptimize for method entry and exit listeners or just try to intercept
51// invocations? Deoptimization forces all code to run in the interpreter and considerably hurts the
52// application's performance.
Jeff Haobc678bb2014-08-11 18:00:29 -070053static constexpr bool kDeoptimizeForAccurateMethodEntryExitListeners = true;
Ian Rogers816432e2013-09-06 15:47:45 -070054
Ian Rogers62d6c772013-02-27 08:32:07 -080055static bool InstallStubsClassVisitor(mirror::Class* klass, void* arg)
jeffhao725a9572012-11-13 18:20:12 -080056 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -080057 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
58 return instrumentation->InstallStubsForClass(klass);
59}
60
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -070061Instrumentation::Instrumentation()
62 : instrumentation_stubs_installed_(false), entry_exit_stubs_installed_(false),
63 interpreter_stubs_installed_(false),
64 interpret_only_(false), forced_interpret_only_(false),
65 have_method_entry_listeners_(false), have_method_exit_listeners_(false),
66 have_method_unwind_listeners_(false), have_dex_pc_listeners_(false),
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +020067 have_field_read_listeners_(false), have_field_write_listeners_(false),
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -070068 have_exception_caught_listeners_(false),
69 deoptimized_methods_lock_("deoptimized methods lock"),
70 deoptimization_enabled_(false),
71 interpreter_handler_table_(kMainHandlerTable),
72 quick_alloc_entry_points_instrumentation_counter_(0) {
73}
74
Ian Rogers62d6c772013-02-27 08:32:07 -080075bool Instrumentation::InstallStubsForClass(mirror::Class* klass) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010076 for (size_t i = 0, e = klass->NumDirectMethods(); i < e; i++) {
77 InstallStubsForMethod(klass->GetDirectMethod(i));
jeffhao725a9572012-11-13 18:20:12 -080078 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010079 for (size_t i = 0, e = klass->NumVirtualMethods(); i < e; i++) {
80 InstallStubsForMethod(klass->GetVirtualMethod(i));
jeffhao725a9572012-11-13 18:20:12 -080081 }
82 return true;
83}
84
Ian Rogersef7d42f2014-01-06 12:55:46 -080085static void UpdateEntrypoints(mirror::ArtMethod* method, const void* quick_code,
86 const void* portable_code, bool have_portable_code)
87 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
88 method->SetEntryPointFromPortableCompiledCode(portable_code);
89 method->SetEntryPointFromQuickCompiledCode(quick_code);
90 bool portable_enabled = method->IsPortableCompiled();
91 if (have_portable_code && !portable_enabled) {
92 method->SetIsPortableCompiled();
93 } else if (portable_enabled) {
94 method->ClearIsPortableCompiled();
95 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010096 if (!method->IsResolutionMethod()) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -070097 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -080098 if (quick_code == GetQuickToInterpreterBridge() ||
Mingyao Yang98d1cc82014-05-15 17:02:16 -070099 quick_code == class_linker->GetQuickToInterpreterBridgeTrampoline() ||
100 (quick_code == class_linker->GetQuickResolutionTrampoline() &&
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -0800101 Runtime::Current()->GetInstrumentation()->IsForcedInterpretOnly()
102 && !method->IsNative() && !method->IsProxyMethod())) {
103 if (kIsDebugBuild) {
104 if (quick_code == GetQuickToInterpreterBridge()) {
105 DCHECK(portable_code == GetPortableToInterpreterBridge());
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700106 } else if (quick_code == class_linker->GetQuickResolutionTrampoline()) {
107 DCHECK(portable_code == class_linker->GetPortableResolutionTrampoline());
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -0800108 }
109 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800110 DCHECK(!method->IsNative()) << PrettyMethod(method);
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -0800111 DCHECK(!method->IsProxyMethod()) << PrettyMethod(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100112 method->SetEntryPointFromInterpreter(art::interpreter::artInterpreterToInterpreterBridge);
113 } else {
114 method->SetEntryPointFromInterpreter(art::artInterpreterToCompiledCodeBridge);
115 }
116 }
117}
118
119void Instrumentation::InstallStubsForMethod(mirror::ArtMethod* method) {
120 if (method->IsAbstract() || method->IsProxyMethod()) {
121 // Do not change stubs for these methods.
122 return;
123 }
Jeff Haodb8a6642014-08-14 17:18:52 -0700124 std::string temp;
125 // Note that the Proxy class itself is not a proxy class.
126 if (strcmp(method->GetDeclaringClass()->GetDescriptor(&temp), "Ljava/lang/reflect/Proxy;") == 0 &&
127 method->IsConstructor()) {
128 // Do not stub Proxy.<init>.
129 return;
130 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800131 const void* new_portable_code;
132 const void* new_quick_code;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100133 bool uninstall = !entry_exit_stubs_installed_ && !interpreter_stubs_installed_;
134 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
135 bool is_class_initialized = method->GetDeclaringClass()->IsInitialized();
Ian Rogersef7d42f2014-01-06 12:55:46 -0800136 bool have_portable_code = false;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100137 if (uninstall) {
138 if ((forced_interpret_only_ || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800139 new_portable_code = GetPortableToInterpreterBridge();
140 new_quick_code = GetQuickToInterpreterBridge();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100141 } else if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800142 new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
143 new_quick_code = class_linker->GetQuickOatCodeFor(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100144 } else {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700145 new_portable_code = class_linker->GetPortableResolutionTrampoline();
146 new_quick_code = class_linker->GetQuickResolutionTrampoline();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100147 }
148 } else { // !uninstall
Sebastien Hertzbae182c2013-12-17 10:42:03 +0100149 if ((interpreter_stubs_installed_ || forced_interpret_only_ || IsDeoptimized(method)) &&
150 !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800151 new_portable_code = GetPortableToInterpreterBridge();
152 new_quick_code = GetQuickToInterpreterBridge();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100153 } else {
154 // Do not overwrite resolution trampoline. When the trampoline initializes the method's
155 // class, all its static methods code will be set to the instrumentation entry point.
156 // For more details, see ClassLinker::FixupStaticTrampolines.
157 if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
Sebastien Hertz320deb22014-06-11 19:45:05 +0200158 if (entry_exit_stubs_installed_) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800159 new_portable_code = GetPortableToInterpreterBridge();
160 new_quick_code = GetQuickInstrumentationEntryPoint();
Sebastien Hertz320deb22014-06-11 19:45:05 +0200161 } else {
162 new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
163 new_quick_code = class_linker->GetQuickOatCodeFor(method);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700164 DCHECK(new_quick_code != class_linker->GetQuickToInterpreterBridgeTrampoline());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100165 }
166 } else {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700167 new_portable_code = class_linker->GetPortableResolutionTrampoline();
168 new_quick_code = class_linker->GetQuickResolutionTrampoline();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100169 }
170 }
171 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800172 UpdateEntrypoints(method, new_quick_code, new_portable_code, have_portable_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100173}
174
Ian Rogers62d6c772013-02-27 08:32:07 -0800175// Places the instrumentation exit pc as the return PC for every quick frame. This also allows
176// deoptimization of quick frames to interpreter frames.
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100177// Since we may already have done this previously, we need to push new instrumentation frame before
178// existing instrumentation frames.
Ian Rogers62d6c772013-02-27 08:32:07 -0800179static void InstrumentationInstallStack(Thread* thread, void* arg)
Ian Rogers306057f2012-11-26 12:45:53 -0800180 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
181 struct InstallStackVisitor : public StackVisitor {
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100182 InstallStackVisitor(Thread* thread, Context* context, uintptr_t instrumentation_exit_pc)
Ian Rogers62d6c772013-02-27 08:32:07 -0800183 : StackVisitor(thread, context), instrumentation_stack_(thread->GetInstrumentationStack()),
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100184 instrumentation_exit_pc_(instrumentation_exit_pc),
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100185 reached_existing_instrumentation_frames_(false), instrumentation_stack_depth_(0),
186 last_return_pc_(0) {
187 }
jeffhao725a9572012-11-13 18:20:12 -0800188
Ian Rogers306057f2012-11-26 12:45:53 -0800189 virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Brian Carlstromea46f952013-07-30 01:26:50 -0700190 mirror::ArtMethod* m = GetMethod();
Ian Rogers306057f2012-11-26 12:45:53 -0800191 if (m == NULL) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800192 if (kVerboseInstrumentation) {
193 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
194 }
195 last_return_pc_ = 0;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700196 return true; // Ignore upcalls.
Ian Rogers306057f2012-11-26 12:45:53 -0800197 }
Jeff Haoa15a81b2014-05-27 18:25:47 -0700198 if (GetCurrentQuickFrame() == NULL) {
Sebastien Hertz320deb22014-06-11 19:45:05 +0200199 bool interpreter_frame = !m->IsPortableCompiled();
200 InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, 0, GetFrameId(),
201 interpreter_frame);
Jeff Haoa15a81b2014-05-27 18:25:47 -0700202 if (kVerboseInstrumentation) {
203 LOG(INFO) << "Pushing shadow frame " << instrumentation_frame.Dump();
204 }
205 shadow_stack_.push_back(instrumentation_frame);
206 return true; // Continue.
207 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800208 uintptr_t return_pc = GetReturnPc();
Sebastien Hertz320deb22014-06-11 19:45:05 +0200209 if (m->IsRuntimeMethod()) {
210 if (return_pc == instrumentation_exit_pc_) {
211 if (kVerboseInstrumentation) {
212 LOG(INFO) << " Handling quick to interpreter transition. Frame " << GetFrameId();
213 }
214 CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200215 const InstrumentationStackFrame& frame =
216 instrumentation_stack_->at(instrumentation_stack_depth_);
Sebastien Hertz320deb22014-06-11 19:45:05 +0200217 CHECK(frame.interpreter_entry_);
218 // This is an interpreter frame so method enter event must have been reported. However we
219 // need to push a DEX pc into the dex_pcs_ list to match size of instrumentation stack.
220 // Since we won't report method entry here, we can safely push any DEX pc.
221 dex_pcs_.push_back(0);
222 last_return_pc_ = frame.return_pc_;
223 ++instrumentation_stack_depth_;
224 return true;
225 } else {
226 if (kVerboseInstrumentation) {
227 LOG(INFO) << " Skipping runtime method. Frame " << GetFrameId();
228 }
229 last_return_pc_ = GetReturnPc();
230 return true; // Ignore unresolved methods since they will be instrumented after resolution.
231 }
232 }
233 if (kVerboseInstrumentation) {
234 LOG(INFO) << " Installing exit stub in " << DescribeLocation();
235 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100236 if (return_pc == instrumentation_exit_pc_) {
237 // We've reached a frame which has already been installed with instrumentation exit stub.
238 // We should have already installed instrumentation on previous frames.
239 reached_existing_instrumentation_frames_ = true;
240
241 CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200242 const InstrumentationStackFrame& frame =
243 instrumentation_stack_->at(instrumentation_stack_depth_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100244 CHECK_EQ(m, frame.method_) << "Expected " << PrettyMethod(m)
245 << ", Found " << PrettyMethod(frame.method_);
246 return_pc = frame.return_pc_;
247 if (kVerboseInstrumentation) {
248 LOG(INFO) << "Ignoring already instrumented " << frame.Dump();
249 }
250 } else {
251 CHECK_NE(return_pc, 0U);
252 CHECK(!reached_existing_instrumentation_frames_);
253 InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, return_pc, GetFrameId(),
254 false);
255 if (kVerboseInstrumentation) {
256 LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
257 }
258
Sebastien Hertz320deb22014-06-11 19:45:05 +0200259 // Insert frame at the right position so we do not corrupt the instrumentation stack.
260 // Instrumentation stack frames are in descending frame id order.
261 auto it = instrumentation_stack_->begin();
262 for (auto end = instrumentation_stack_->end(); it != end; ++it) {
263 const InstrumentationStackFrame& current = *it;
264 if (instrumentation_frame.frame_id_ >= current.frame_id_) {
265 break;
266 }
267 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100268 instrumentation_stack_->insert(it, instrumentation_frame);
269 SetReturnPc(instrumentation_exit_pc_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800270 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800271 dex_pcs_.push_back(m->ToDexPc(last_return_pc_));
Ian Rogers62d6c772013-02-27 08:32:07 -0800272 last_return_pc_ = return_pc;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100273 ++instrumentation_stack_depth_;
Ian Rogers306057f2012-11-26 12:45:53 -0800274 return true; // Continue.
275 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800276 std::deque<InstrumentationStackFrame>* const instrumentation_stack_;
Jeff Haoa15a81b2014-05-27 18:25:47 -0700277 std::vector<InstrumentationStackFrame> shadow_stack_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800278 std::vector<uint32_t> dex_pcs_;
Ian Rogers306057f2012-11-26 12:45:53 -0800279 const uintptr_t instrumentation_exit_pc_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100280 bool reached_existing_instrumentation_frames_;
281 size_t instrumentation_stack_depth_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800282 uintptr_t last_return_pc_;
Ian Rogers306057f2012-11-26 12:45:53 -0800283 };
Ian Rogers62d6c772013-02-27 08:32:07 -0800284 if (kVerboseInstrumentation) {
285 std::string thread_name;
286 thread->GetThreadName(thread_name);
287 LOG(INFO) << "Installing exit stubs in " << thread_name;
Ian Rogers306057f2012-11-26 12:45:53 -0800288 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100289
290 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
Ian Rogers700a4022014-05-19 16:49:03 -0700291 std::unique_ptr<Context> context(Context::Create());
Ian Rogers848871b2013-08-05 10:56:33 -0700292 uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100293 InstallStackVisitor visitor(thread, context.get(), instrumentation_exit_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800294 visitor.WalkStack(true);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100295 CHECK_EQ(visitor.dex_pcs_.size(), thread->GetInstrumentationStack()->size());
Ian Rogers62d6c772013-02-27 08:32:07 -0800296
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100297 if (instrumentation->ShouldNotifyMethodEnterExitEvents()) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100298 // Create method enter events for all methods currently on the thread's stack. We only do this
299 // if no debugger is attached to prevent from posting events twice.
Jeff Haoa15a81b2014-05-27 18:25:47 -0700300 auto ssi = visitor.shadow_stack_.rbegin();
301 for (auto isi = thread->GetInstrumentationStack()->rbegin(),
302 end = thread->GetInstrumentationStack()->rend(); isi != end; ++isi) {
303 while (ssi != visitor.shadow_stack_.rend() && (*ssi).frame_id_ < (*isi).frame_id_) {
304 instrumentation->MethodEnterEvent(thread, (*ssi).this_object_, (*ssi).method_, 0);
305 ++ssi;
306 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100307 uint32_t dex_pc = visitor.dex_pcs_.back();
308 visitor.dex_pcs_.pop_back();
Sebastien Hertz320deb22014-06-11 19:45:05 +0200309 if (!isi->interpreter_entry_) {
310 instrumentation->MethodEnterEvent(thread, (*isi).this_object_, (*isi).method_, dex_pc);
311 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100312 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800313 }
314 thread->VerifyStack();
Ian Rogers306057f2012-11-26 12:45:53 -0800315}
316
Ian Rogers62d6c772013-02-27 08:32:07 -0800317// Removes the instrumentation exit pc as the return PC for every quick frame.
318static void InstrumentationRestoreStack(Thread* thread, void* arg)
Ian Rogers306057f2012-11-26 12:45:53 -0800319 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
320 struct RestoreStackVisitor : public StackVisitor {
Ian Rogers62d6c772013-02-27 08:32:07 -0800321 RestoreStackVisitor(Thread* thread, uintptr_t instrumentation_exit_pc,
322 Instrumentation* instrumentation)
323 : StackVisitor(thread, NULL), thread_(thread),
324 instrumentation_exit_pc_(instrumentation_exit_pc),
325 instrumentation_(instrumentation),
326 instrumentation_stack_(thread->GetInstrumentationStack()),
327 frames_removed_(0) {}
Ian Rogers306057f2012-11-26 12:45:53 -0800328
329 virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800330 if (instrumentation_stack_->size() == 0) {
jeffhao725a9572012-11-13 18:20:12 -0800331 return false; // Stop.
332 }
Brian Carlstromea46f952013-07-30 01:26:50 -0700333 mirror::ArtMethod* m = GetMethod();
Ian Rogers62d6c772013-02-27 08:32:07 -0800334 if (GetCurrentQuickFrame() == NULL) {
335 if (kVerboseInstrumentation) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200336 LOG(INFO) << " Ignoring a shadow frame. Frame " << GetFrameId()
337 << " Method=" << PrettyMethod(m);
Ian Rogers62d6c772013-02-27 08:32:07 -0800338 }
339 return true; // Ignore shadow frames.
340 }
Ian Rogers306057f2012-11-26 12:45:53 -0800341 if (m == NULL) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800342 if (kVerboseInstrumentation) {
343 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
344 }
Ian Rogers306057f2012-11-26 12:45:53 -0800345 return true; // Ignore upcalls.
346 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800347 bool removed_stub = false;
348 // TODO: make this search more efficient?
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100349 const size_t frameId = GetFrameId();
350 for (const InstrumentationStackFrame& instrumentation_frame : *instrumentation_stack_) {
351 if (instrumentation_frame.frame_id_ == frameId) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800352 if (kVerboseInstrumentation) {
353 LOG(INFO) << " Removing exit stub in " << DescribeLocation();
354 }
Jeff Hao9a916d32013-06-27 18:45:37 -0700355 if (instrumentation_frame.interpreter_entry_) {
356 CHECK(m == Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs));
357 } else {
358 CHECK(m == instrumentation_frame.method_) << PrettyMethod(m);
359 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800360 SetReturnPc(instrumentation_frame.return_pc_);
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100361 if (instrumentation_->ShouldNotifyMethodEnterExitEvents()) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100362 // Create the method exit events. As the methods didn't really exit the result is 0.
363 // We only do this if no debugger is attached to prevent from posting events twice.
364 instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m,
365 GetDexPc(), JValue());
366 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800367 frames_removed_++;
368 removed_stub = true;
369 break;
370 }
371 }
372 if (!removed_stub) {
373 if (kVerboseInstrumentation) {
374 LOG(INFO) << " No exit stub in " << DescribeLocation();
Ian Rogers306057f2012-11-26 12:45:53 -0800375 }
jeffhao725a9572012-11-13 18:20:12 -0800376 }
377 return true; // Continue.
378 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800379 Thread* const thread_;
Ian Rogers306057f2012-11-26 12:45:53 -0800380 const uintptr_t instrumentation_exit_pc_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800381 Instrumentation* const instrumentation_;
382 std::deque<instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
383 size_t frames_removed_;
jeffhao725a9572012-11-13 18:20:12 -0800384 };
Ian Rogers62d6c772013-02-27 08:32:07 -0800385 if (kVerboseInstrumentation) {
386 std::string thread_name;
387 thread->GetThreadName(thread_name);
388 LOG(INFO) << "Removing exit stubs in " << thread_name;
389 }
390 std::deque<instrumentation::InstrumentationStackFrame>* stack = thread->GetInstrumentationStack();
391 if (stack->size() > 0) {
392 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
Ian Rogers848871b2013-08-05 10:56:33 -0700393 uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
Ian Rogers62d6c772013-02-27 08:32:07 -0800394 RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
395 visitor.WalkStack(true);
396 CHECK_EQ(visitor.frames_removed_, stack->size());
397 while (stack->size() > 0) {
398 stack->pop_front();
399 }
jeffhao725a9572012-11-13 18:20:12 -0800400 }
401}
402
Ian Rogers62d6c772013-02-27 08:32:07 -0800403void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
404 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
Ian Rogers62d6c772013-02-27 08:32:07 -0800405 if ((events & kMethodEntered) != 0) {
406 method_entry_listeners_.push_back(listener);
Ian Rogers62d6c772013-02-27 08:32:07 -0800407 have_method_entry_listeners_ = true;
408 }
409 if ((events & kMethodExited) != 0) {
410 method_exit_listeners_.push_back(listener);
Ian Rogers62d6c772013-02-27 08:32:07 -0800411 have_method_exit_listeners_ = true;
412 }
413 if ((events & kMethodUnwind) != 0) {
414 method_unwind_listeners_.push_back(listener);
415 have_method_unwind_listeners_ = true;
416 }
417 if ((events & kDexPcMoved) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200418 std::list<InstrumentationListener*>* modified;
419 if (have_dex_pc_listeners_) {
420 modified = new std::list<InstrumentationListener*>(*dex_pc_listeners_.get());
421 } else {
422 modified = new std::list<InstrumentationListener*>();
423 }
424 modified->push_back(listener);
425 dex_pc_listeners_.reset(modified);
Ian Rogers62d6c772013-02-27 08:32:07 -0800426 have_dex_pc_listeners_ = true;
427 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200428 if ((events & kFieldRead) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200429 std::list<InstrumentationListener*>* modified;
430 if (have_field_read_listeners_) {
431 modified = new std::list<InstrumentationListener*>(*field_read_listeners_.get());
432 } else {
433 modified = new std::list<InstrumentationListener*>();
434 }
435 modified->push_back(listener);
436 field_read_listeners_.reset(modified);
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200437 have_field_read_listeners_ = true;
438 }
439 if ((events & kFieldWritten) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200440 std::list<InstrumentationListener*>* modified;
441 if (have_field_write_listeners_) {
442 modified = new std::list<InstrumentationListener*>(*field_write_listeners_.get());
443 } else {
444 modified = new std::list<InstrumentationListener*>();
445 }
446 modified->push_back(listener);
447 field_write_listeners_.reset(modified);
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200448 have_field_write_listeners_ = true;
449 }
Jeff Hao14dd5a82013-04-11 10:23:36 -0700450 if ((events & kExceptionCaught) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200451 std::list<InstrumentationListener*>* modified;
452 if (have_exception_caught_listeners_) {
453 modified = new std::list<InstrumentationListener*>(*exception_caught_listeners_.get());
454 } else {
455 modified = new std::list<InstrumentationListener*>();
456 }
457 modified->push_back(listener);
458 exception_caught_listeners_.reset(modified);
Jeff Hao14dd5a82013-04-11 10:23:36 -0700459 have_exception_caught_listeners_ = true;
460 }
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200461 UpdateInterpreterHandlerTable();
jeffhao725a9572012-11-13 18:20:12 -0800462}
463
Ian Rogers62d6c772013-02-27 08:32:07 -0800464void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
465 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
Ian Rogers62d6c772013-02-27 08:32:07 -0800466
467 if ((events & kMethodEntered) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200468 if (have_method_entry_listeners_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800469 method_entry_listeners_.remove(listener);
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200470 have_method_entry_listeners_ = !method_entry_listeners_.empty();
Ian Rogers62d6c772013-02-27 08:32:07 -0800471 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800472 }
473 if ((events & kMethodExited) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200474 if (have_method_exit_listeners_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800475 method_exit_listeners_.remove(listener);
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200476 have_method_exit_listeners_ = !method_exit_listeners_.empty();
Ian Rogers62d6c772013-02-27 08:32:07 -0800477 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800478 }
479 if ((events & kMethodUnwind) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200480 if (have_method_unwind_listeners_) {
481 method_unwind_listeners_.remove(listener);
482 have_method_unwind_listeners_ = !method_unwind_listeners_.empty();
483 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800484 }
485 if ((events & kDexPcMoved) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200486 if (have_dex_pc_listeners_) {
487 std::list<InstrumentationListener*>* modified =
488 new std::list<InstrumentationListener*>(*dex_pc_listeners_.get());
489 modified->remove(listener);
490 have_dex_pc_listeners_ = !modified->empty();
491 if (have_dex_pc_listeners_) {
492 dex_pc_listeners_.reset(modified);
493 } else {
494 dex_pc_listeners_.reset();
495 delete modified;
496 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800497 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800498 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200499 if ((events & kFieldRead) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200500 if (have_dex_pc_listeners_) {
501 std::list<InstrumentationListener*>* modified =
502 new std::list<InstrumentationListener*>(*field_read_listeners_.get());
503 modified->remove(listener);
504 have_field_read_listeners_ = !modified->empty();
505 if (have_field_read_listeners_) {
506 field_read_listeners_.reset(modified);
507 } else {
508 field_read_listeners_.reset();
509 delete modified;
510 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200511 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200512 }
513 if ((events & kFieldWritten) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200514 if (have_field_write_listeners_) {
515 std::list<InstrumentationListener*>* modified =
516 new std::list<InstrumentationListener*>(*field_write_listeners_.get());
517 modified->remove(listener);
518 have_field_write_listeners_ = !modified->empty();
519 if (have_field_write_listeners_) {
520 field_write_listeners_.reset(modified);
521 } else {
522 field_write_listeners_.reset();
523 delete modified;
524 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200525 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200526 }
Jeff Hao14dd5a82013-04-11 10:23:36 -0700527 if ((events & kExceptionCaught) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200528 if (have_exception_caught_listeners_) {
529 std::list<InstrumentationListener*>* modified =
530 new std::list<InstrumentationListener*>(*exception_caught_listeners_.get());
531 modified->remove(listener);
532 have_exception_caught_listeners_ = !modified->empty();
533 if (have_exception_caught_listeners_) {
534 exception_caught_listeners_.reset(modified);
535 } else {
536 exception_caught_listeners_.reset();
537 delete modified;
538 }
539 }
Jeff Hao14dd5a82013-04-11 10:23:36 -0700540 }
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200541 UpdateInterpreterHandlerTable();
jeffhao725a9572012-11-13 18:20:12 -0800542}
543
Ian Rogers62d6c772013-02-27 08:32:07 -0800544void Instrumentation::ConfigureStubs(bool require_entry_exit_stubs, bool require_interpreter) {
545 interpret_only_ = require_interpreter || forced_interpret_only_;
546 // Compute what level of instrumentation is required and compare to current.
547 int desired_level, current_level;
548 if (require_interpreter) {
549 desired_level = 2;
550 } else if (require_entry_exit_stubs) {
551 desired_level = 1;
552 } else {
553 desired_level = 0;
554 }
555 if (interpreter_stubs_installed_) {
556 current_level = 2;
557 } else if (entry_exit_stubs_installed_) {
558 current_level = 1;
559 } else {
560 current_level = 0;
561 }
562 if (desired_level == current_level) {
563 // We're already set.
564 return;
565 }
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100566 Thread* const self = Thread::Current();
Ian Rogers62d6c772013-02-27 08:32:07 -0800567 Runtime* runtime = Runtime::Current();
568 Locks::thread_list_lock_->AssertNotHeld(self);
569 if (desired_level > 0) {
570 if (require_interpreter) {
571 interpreter_stubs_installed_ = true;
572 } else {
573 CHECK(require_entry_exit_stubs);
574 entry_exit_stubs_installed_ = true;
575 }
576 runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
577 instrumentation_stubs_installed_ = true;
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100578 MutexLock mu(self, *Locks::thread_list_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800579 runtime->GetThreadList()->ForEach(InstrumentationInstallStack, this);
580 } else {
581 interpreter_stubs_installed_ = false;
582 entry_exit_stubs_installed_ = false;
583 runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100584 // Restore stack only if there is no method currently deoptimized.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700585 bool empty;
586 {
587 ReaderMutexLock mu(self, deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700588 empty = IsDeoptimizedMethodsEmpty(); // Avoid lock violation.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700589 }
590 if (empty) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100591 instrumentation_stubs_installed_ = false;
592 MutexLock mu(self, *Locks::thread_list_lock_);
593 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
594 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800595 }
jeffhao725a9572012-11-13 18:20:12 -0800596}
597
Ian Rogersfa824272013-11-05 16:12:57 -0800598static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg) {
599 thread->ResetQuickAllocEntryPointsForThread();
600}
601
Mathieu Chartier661974a2014-01-09 11:23:53 -0800602void Instrumentation::SetEntrypointsInstrumented(bool instrumented) {
603 Runtime* runtime = Runtime::Current();
604 ThreadList* tl = runtime->GetThreadList();
605 if (runtime->IsStarted()) {
606 tl->SuspendAll();
607 }
608 {
609 MutexLock mu(Thread::Current(), *Locks::runtime_shutdown_lock_);
610 SetQuickAllocEntryPointsInstrumented(instrumented);
611 ResetQuickAllocEntryPoints();
612 }
613 if (runtime->IsStarted()) {
614 tl->ResumeAll();
615 }
616}
617
Ian Rogersfa824272013-11-05 16:12:57 -0800618void Instrumentation::InstrumentQuickAllocEntryPoints() {
619 // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
620 // should be guarded by a lock.
Ian Rogers3e5cf302014-05-20 16:40:37 -0700621 DCHECK_GE(quick_alloc_entry_points_instrumentation_counter_.LoadSequentiallyConsistent(), 0);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800622 const bool enable_instrumentation =
Ian Rogers3e5cf302014-05-20 16:40:37 -0700623 quick_alloc_entry_points_instrumentation_counter_.FetchAndAddSequentiallyConsistent(1) == 0;
Ian Rogersfa824272013-11-05 16:12:57 -0800624 if (enable_instrumentation) {
Mathieu Chartier661974a2014-01-09 11:23:53 -0800625 SetEntrypointsInstrumented(true);
Ian Rogersfa824272013-11-05 16:12:57 -0800626 }
627}
628
629void Instrumentation::UninstrumentQuickAllocEntryPoints() {
630 // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
631 // should be guarded by a lock.
Ian Rogers3e5cf302014-05-20 16:40:37 -0700632 DCHECK_GT(quick_alloc_entry_points_instrumentation_counter_.LoadSequentiallyConsistent(), 0);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800633 const bool disable_instrumentation =
Ian Rogers3e5cf302014-05-20 16:40:37 -0700634 quick_alloc_entry_points_instrumentation_counter_.FetchAndSubSequentiallyConsistent(1) == 1;
Ian Rogersfa824272013-11-05 16:12:57 -0800635 if (disable_instrumentation) {
Mathieu Chartier661974a2014-01-09 11:23:53 -0800636 SetEntrypointsInstrumented(false);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800637 }
638}
639
640void Instrumentation::ResetQuickAllocEntryPoints() {
641 Runtime* runtime = Runtime::Current();
642 if (runtime->IsStarted()) {
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800643 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
644 runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, NULL);
Ian Rogersfa824272013-11-05 16:12:57 -0800645 }
646}
647
Ian Rogersef7d42f2014-01-06 12:55:46 -0800648void Instrumentation::UpdateMethodsCode(mirror::ArtMethod* method, const void* quick_code,
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700649 const void* portable_code, bool have_portable_code) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800650 const void* new_portable_code;
651 const void* new_quick_code;
652 bool new_have_portable_code;
Ian Rogers62d6c772013-02-27 08:32:07 -0800653 if (LIKELY(!instrumentation_stubs_installed_)) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800654 new_portable_code = portable_code;
655 new_quick_code = quick_code;
656 new_have_portable_code = have_portable_code;
Jeff Hao65d15d92013-07-16 16:39:33 -0700657 } else {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100658 if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800659 new_portable_code = GetPortableToInterpreterBridge();
660 new_quick_code = GetQuickToInterpreterBridge();
661 new_have_portable_code = false;
Jeff Hao65d15d92013-07-16 16:39:33 -0700662 } else {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700663 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
664 if (quick_code == class_linker->GetQuickResolutionTrampoline() ||
665 quick_code == class_linker->GetQuickToInterpreterBridgeTrampoline() ||
666 quick_code == GetQuickToInterpreterBridge()) {
667 DCHECK((portable_code == class_linker->GetPortableResolutionTrampoline()) ||
668 (portable_code == GetPortableToInterpreterBridge()));
669 new_portable_code = portable_code;
670 new_quick_code = quick_code;
671 new_have_portable_code = have_portable_code;
672 } else if (entry_exit_stubs_installed_) {
673 new_quick_code = GetQuickInstrumentationEntryPoint();
674 new_portable_code = GetPortableToInterpreterBridge();
675 new_have_portable_code = false;
676 } else {
677 new_portable_code = portable_code;
678 new_quick_code = quick_code;
679 new_have_portable_code = have_portable_code;
680 }
Jeff Hao65d15d92013-07-16 16:39:33 -0700681 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800682 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800683 UpdateEntrypoints(method, new_quick_code, new_portable_code, new_have_portable_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100684}
685
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700686bool Instrumentation::AddDeoptimizedMethod(mirror::ArtMethod* method) {
687 // Note that the insert() below isn't read barrier-aware. So, this
688 // FindDeoptimizedMethod() call is necessary or else we would end up
689 // storing the same method twice in the map (the from-space and the
690 // to-space ones).
691 if (FindDeoptimizedMethod(method)) {
692 // Already in the map. Return.
693 return false;
694 }
695 // Not found. Add it.
696 int32_t hash_code = method->IdentityHashCode();
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700697 deoptimized_methods_.insert(std::make_pair(hash_code, GcRoot<mirror::ArtMethod>(method)));
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700698 return true;
699}
700
701bool Instrumentation::FindDeoptimizedMethod(mirror::ArtMethod* method) {
702 int32_t hash_code = method->IdentityHashCode();
703 auto range = deoptimized_methods_.equal_range(hash_code);
704 for (auto it = range.first; it != range.second; ++it) {
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700705 mirror::ArtMethod* m = it->second.Read();
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700706 if (m == method) {
707 // Found.
708 return true;
709 }
710 }
711 // Not found.
712 return false;
713}
714
715mirror::ArtMethod* Instrumentation::BeginDeoptimizedMethod() {
716 auto it = deoptimized_methods_.begin();
717 if (it == deoptimized_methods_.end()) {
718 // Empty.
719 return nullptr;
720 }
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700721 return it->second.Read();
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700722}
723
724bool Instrumentation::RemoveDeoptimizedMethod(mirror::ArtMethod* method) {
725 int32_t hash_code = method->IdentityHashCode();
726 auto range = deoptimized_methods_.equal_range(hash_code);
727 for (auto it = range.first; it != range.second; ++it) {
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700728 mirror::ArtMethod* m = it->second.Read();
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700729 if (m == method) {
730 // Found. Erase and return.
731 deoptimized_methods_.erase(it);
732 return true;
733 }
734 }
735 // Not found.
736 return false;
737}
738
739bool Instrumentation::IsDeoptimizedMethodsEmpty() const {
740 return deoptimized_methods_.empty();
741}
742
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100743void Instrumentation::Deoptimize(mirror::ArtMethod* method) {
744 CHECK(!method->IsNative());
745 CHECK(!method->IsProxyMethod());
746 CHECK(!method->IsAbstract());
747
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700748 Thread* self = Thread::Current();
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700749 {
750 WriterMutexLock mu(self, deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700751 bool has_not_been_deoptimized = AddDeoptimizedMethod(method);
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200752 CHECK(has_not_been_deoptimized) << "Method " << PrettyMethod(method)
753 << " is already deoptimized";
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700754 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100755 if (!interpreter_stubs_installed_) {
Sebastien Hertz320deb22014-06-11 19:45:05 +0200756 UpdateEntrypoints(method, GetQuickInstrumentationEntryPoint(), GetPortableToInterpreterBridge(),
Ian Rogersef7d42f2014-01-06 12:55:46 -0800757 false);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100758
759 // Install instrumentation exit stub and instrumentation frames. We may already have installed
760 // these previously so it will only cover the newly created frames.
761 instrumentation_stubs_installed_ = true;
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700762 MutexLock mu(self, *Locks::thread_list_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100763 Runtime::Current()->GetThreadList()->ForEach(InstrumentationInstallStack, this);
764 }
765}
766
767void Instrumentation::Undeoptimize(mirror::ArtMethod* method) {
768 CHECK(!method->IsNative());
769 CHECK(!method->IsProxyMethod());
770 CHECK(!method->IsAbstract());
771
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700772 Thread* self = Thread::Current();
773 bool empty;
774 {
775 WriterMutexLock mu(self, deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700776 bool found_and_erased = RemoveDeoptimizedMethod(method);
777 CHECK(found_and_erased) << "Method " << PrettyMethod(method)
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700778 << " is not deoptimized";
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700779 empty = IsDeoptimizedMethodsEmpty();
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700780 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100781
782 // Restore code and possibly stack only if we did not deoptimize everything.
783 if (!interpreter_stubs_installed_) {
784 // Restore its code or resolution trampoline.
785 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Ian Rogersef7d42f2014-01-06 12:55:46 -0800786 if (method->IsStatic() && !method->IsConstructor() &&
787 !method->GetDeclaringClass()->IsInitialized()) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700788 // TODO: we're updating to entrypoints in the image here, we can avoid the trampoline.
789 UpdateEntrypoints(method, class_linker->GetQuickResolutionTrampoline(),
790 class_linker->GetPortableResolutionTrampoline(), false);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100791 } else {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800792 bool have_portable_code = false;
793 const void* quick_code = class_linker->GetQuickOatCodeFor(method);
794 const void* portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
795 UpdateEntrypoints(method, quick_code, portable_code, have_portable_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100796 }
797
798 // If there is no deoptimized method left, we can restore the stack of each thread.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700799 if (empty) {
800 MutexLock mu(self, *Locks::thread_list_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100801 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
802 instrumentation_stubs_installed_ = false;
803 }
804 }
805}
806
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700807bool Instrumentation::IsDeoptimized(mirror::ArtMethod* method) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100808 DCHECK(method != nullptr);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700809 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
810 return FindDeoptimizedMethod(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100811}
812
813void Instrumentation::EnableDeoptimization() {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700814 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700815 CHECK(IsDeoptimizedMethodsEmpty());
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100816 CHECK_EQ(deoptimization_enabled_, false);
817 deoptimization_enabled_ = true;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100818}
819
820void Instrumentation::DisableDeoptimization() {
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100821 CHECK_EQ(deoptimization_enabled_, true);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100822 // If we deoptimized everything, undo it.
823 if (interpreter_stubs_installed_) {
824 UndeoptimizeEverything();
825 }
826 // Undeoptimized selected methods.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700827 while (true) {
828 mirror::ArtMethod* method;
829 {
830 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700831 if (IsDeoptimizedMethodsEmpty()) {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700832 break;
833 }
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700834 method = BeginDeoptimizedMethod();
835 CHECK(method != nullptr);
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700836 }
837 Undeoptimize(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100838 }
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100839 deoptimization_enabled_ = false;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100840}
841
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100842// Indicates if instrumentation should notify method enter/exit events to the listeners.
843bool Instrumentation::ShouldNotifyMethodEnterExitEvents() const {
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100844 return !deoptimization_enabled_ && !interpreter_stubs_installed_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100845}
846
847void Instrumentation::DeoptimizeEverything() {
848 CHECK(!interpreter_stubs_installed_);
849 ConfigureStubs(false, true);
850}
851
852void Instrumentation::UndeoptimizeEverything() {
853 CHECK(interpreter_stubs_installed_);
854 ConfigureStubs(false, false);
855}
856
857void Instrumentation::EnableMethodTracing() {
858 bool require_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners;
859 ConfigureStubs(!require_interpreter, require_interpreter);
860}
861
862void Instrumentation::DisableMethodTracing() {
863 ConfigureStubs(false, false);
jeffhao725a9572012-11-13 18:20:12 -0800864}
865
Ian Rogersef7d42f2014-01-06 12:55:46 -0800866const void* Instrumentation::GetQuickCodeFor(mirror::ArtMethod* method) const {
Ian Rogers62d6c772013-02-27 08:32:07 -0800867 Runtime* runtime = Runtime::Current();
868 if (LIKELY(!instrumentation_stubs_installed_)) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800869 const void* code = method->GetEntryPointFromQuickCompiledCode();
Vladimir Marko8a630572014-04-09 18:45:35 +0100870 DCHECK(code != nullptr);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700871 ClassLinker* class_linker = runtime->GetClassLinker();
872 if (LIKELY(code != class_linker->GetQuickResolutionTrampoline()) &&
873 LIKELY(code != class_linker->GetQuickToInterpreterBridgeTrampoline()) &&
Vladimir Marko8a630572014-04-09 18:45:35 +0100874 LIKELY(code != GetQuickToInterpreterBridge())) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800875 return code;
876 }
877 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800878 return runtime->GetClassLinker()->GetQuickOatCodeFor(method);
jeffhao725a9572012-11-13 18:20:12 -0800879}
880
Ian Rogers62d6c772013-02-27 08:32:07 -0800881void Instrumentation::MethodEnterEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800882 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800883 uint32_t dex_pc) const {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700884 auto it = method_entry_listeners_.begin();
Jeff Hao65d15d92013-07-16 16:39:33 -0700885 bool is_end = (it == method_entry_listeners_.end());
886 // Implemented this way to prevent problems caused by modification of the list while iterating.
887 while (!is_end) {
888 InstrumentationListener* cur = *it;
889 ++it;
890 is_end = (it == method_entry_listeners_.end());
891 cur->MethodEntered(thread, this_object, method, dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800892 }
893}
894
895void Instrumentation::MethodExitEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800896 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800897 uint32_t dex_pc, const JValue& return_value) const {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700898 auto it = method_exit_listeners_.begin();
Jeff Hao65d15d92013-07-16 16:39:33 -0700899 bool is_end = (it == method_exit_listeners_.end());
900 // Implemented this way to prevent problems caused by modification of the list while iterating.
901 while (!is_end) {
902 InstrumentationListener* cur = *it;
903 ++it;
904 is_end = (it == method_exit_listeners_.end());
905 cur->MethodExited(thread, this_object, method, dex_pc, return_value);
Ian Rogers62d6c772013-02-27 08:32:07 -0800906 }
907}
908
909void Instrumentation::MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800910 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800911 uint32_t dex_pc) const {
912 if (have_method_unwind_listeners_) {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700913 for (InstrumentationListener* listener : method_unwind_listeners_) {
Sebastien Hertz51db44a2013-11-19 10:00:29 +0100914 listener->MethodUnwind(thread, this_object, method, dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800915 }
916 }
917}
918
919void Instrumentation::DexPcMovedEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800920 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800921 uint32_t dex_pc) const {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200922 if (HasDexPcListeners()) {
923 std::shared_ptr<std::list<InstrumentationListener*>> original(dex_pc_listeners_);
924 for (InstrumentationListener* listener : *original.get()) {
925 listener->DexPcMoved(thread, this_object, method, dex_pc);
926 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800927 }
928}
929
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200930void Instrumentation::FieldReadEventImpl(Thread* thread, mirror::Object* this_object,
931 mirror::ArtMethod* method, uint32_t dex_pc,
932 mirror::ArtField* field) const {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200933 if (HasFieldReadListeners()) {
934 std::shared_ptr<std::list<InstrumentationListener*>> original(field_read_listeners_);
935 for (InstrumentationListener* listener : *original.get()) {
936 listener->FieldRead(thread, this_object, method, dex_pc, field);
937 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200938 }
939}
940
941void Instrumentation::FieldWriteEventImpl(Thread* thread, mirror::Object* this_object,
942 mirror::ArtMethod* method, uint32_t dex_pc,
943 mirror::ArtField* field, const JValue& field_value) const {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200944 if (HasFieldWriteListeners()) {
945 std::shared_ptr<std::list<InstrumentationListener*>> original(field_write_listeners_);
946 for (InstrumentationListener* listener : *original.get()) {
947 listener->FieldWritten(thread, this_object, method, dex_pc, field, field_value);
948 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200949 }
950}
951
Ian Rogers62d6c772013-02-27 08:32:07 -0800952void Instrumentation::ExceptionCaughtEvent(Thread* thread, const ThrowLocation& throw_location,
Brian Carlstromea46f952013-07-30 01:26:50 -0700953 mirror::ArtMethod* catch_method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800954 uint32_t catch_dex_pc,
Sebastien Hertz947ff082013-09-17 14:10:13 +0200955 mirror::Throwable* exception_object) const {
Sebastien Hertz9f102032014-05-23 08:59:42 +0200956 if (HasExceptionCaughtListeners()) {
957 DCHECK_EQ(thread->GetException(nullptr), exception_object);
958 bool is_exception_reported = thread->IsExceptionReportedToInstrumentation();
Jeff Haoc0bd4da2013-04-11 15:52:28 -0700959 thread->ClearException();
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200960 std::shared_ptr<std::list<InstrumentationListener*>> original(exception_caught_listeners_);
961 for (InstrumentationListener* listener : *original.get()) {
962 listener->ExceptionCaught(thread, throw_location, catch_method, catch_dex_pc,
963 exception_object);
Ian Rogers62d6c772013-02-27 08:32:07 -0800964 }
Jeff Haoc0bd4da2013-04-11 15:52:28 -0700965 thread->SetException(throw_location, exception_object);
Sebastien Hertz9f102032014-05-23 08:59:42 +0200966 thread->SetExceptionReportedToInstrumentation(is_exception_reported);
Ian Rogers62d6c772013-02-27 08:32:07 -0800967 }
968}
969
970static void CheckStackDepth(Thread* self, const InstrumentationStackFrame& instrumentation_frame,
971 int delta)
972 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
973 size_t frame_id = StackVisitor::ComputeNumFrames(self) + delta;
974 if (frame_id != instrumentation_frame.frame_id_) {
975 LOG(ERROR) << "Expected frame_id=" << frame_id << " but found "
976 << instrumentation_frame.frame_id_;
977 StackVisitor::DescribeStack(self);
978 CHECK_EQ(frame_id, instrumentation_frame.frame_id_);
979 }
980}
981
982void Instrumentation::PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
Brian Carlstromea46f952013-07-30 01:26:50 -0700983 mirror::ArtMethod* method,
Jeff Hao9a916d32013-06-27 18:45:37 -0700984 uintptr_t lr, bool interpreter_entry) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800985 // We have a callee-save frame meaning this value is guaranteed to never be 0.
986 size_t frame_id = StackVisitor::ComputeNumFrames(self);
987 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
988 if (kVerboseInstrumentation) {
Brian Carlstrom2d888622013-07-18 17:02:00 -0700989 LOG(INFO) << "Entering " << PrettyMethod(method) << " from PC " << reinterpret_cast<void*>(lr);
Ian Rogers62d6c772013-02-27 08:32:07 -0800990 }
991 instrumentation::InstrumentationStackFrame instrumentation_frame(this_object, method, lr,
Jeff Hao9a916d32013-06-27 18:45:37 -0700992 frame_id, interpreter_entry);
Ian Rogers62d6c772013-02-27 08:32:07 -0800993 stack->push_front(instrumentation_frame);
994
Sebastien Hertz320deb22014-06-11 19:45:05 +0200995 if (!interpreter_entry) {
996 MethodEnterEvent(self, this_object, method, 0);
997 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800998}
999
Andreas Gamped58342c2014-06-05 14:18:08 -07001000TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc,
1001 uint64_t gpr_result,
1002 uint64_t fpr_result) {
Ian Rogers62d6c772013-02-27 08:32:07 -08001003 // Do the pop.
1004 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
1005 CHECK_GT(stack->size(), 0U);
1006 InstrumentationStackFrame instrumentation_frame = stack->front();
1007 stack->pop_front();
1008
1009 // Set return PC and check the sanity of the stack.
1010 *return_pc = instrumentation_frame.return_pc_;
1011 CheckStackDepth(self, instrumentation_frame, 0);
1012
Brian Carlstromea46f952013-07-30 01:26:50 -07001013 mirror::ArtMethod* method = instrumentation_frame.method_;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001014 uint32_t length;
1015 char return_shorty = method->GetShorty(&length)[0];
Ian Rogers62d6c772013-02-27 08:32:07 -08001016 JValue return_value;
1017 if (return_shorty == 'V') {
1018 return_value.SetJ(0);
1019 } else if (return_shorty == 'F' || return_shorty == 'D') {
1020 return_value.SetJ(fpr_result);
1021 } else {
1022 return_value.SetJ(gpr_result);
1023 }
1024 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
1025 // return_pc.
1026 uint32_t dex_pc = DexFile::kDexNoIndex;
1027 mirror::Object* this_object = instrumentation_frame.this_object_;
Sebastien Hertz320deb22014-06-11 19:45:05 +02001028 if (!instrumentation_frame.interpreter_entry_) {
1029 MethodExitEvent(self, this_object, instrumentation_frame.method_, dex_pc, return_value);
1030 }
jeffhao725a9572012-11-13 18:20:12 -08001031
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001032 // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get
1033 // back to an upcall.
1034 NthCallerVisitor visitor(self, 1, true);
1035 visitor.WalkStack(true);
1036 bool deoptimize = (visitor.caller != NULL) &&
1037 (interpreter_stubs_installed_ || IsDeoptimized(visitor.caller));
1038 if (deoptimize && kVerboseInstrumentation) {
1039 LOG(INFO) << "Deoptimizing into " << PrettyMethod(visitor.caller);
Ian Rogers62d6c772013-02-27 08:32:07 -08001040 }
1041 if (deoptimize) {
1042 if (kVerboseInstrumentation) {
1043 LOG(INFO) << "Deoptimizing from " << PrettyMethod(method)
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001044 << " result is " << std::hex << return_value.GetJ();
Ian Rogers62d6c772013-02-27 08:32:07 -08001045 }
1046 self->SetDeoptimizationReturnValue(return_value);
Andreas Gamped58342c2014-06-05 14:18:08 -07001047 return GetTwoWordSuccessValue(*return_pc,
1048 reinterpret_cast<uintptr_t>(GetQuickDeoptimizationEntryPoint()));
Ian Rogers62d6c772013-02-27 08:32:07 -08001049 } else {
1050 if (kVerboseInstrumentation) {
Brian Carlstrom2d888622013-07-18 17:02:00 -07001051 LOG(INFO) << "Returning from " << PrettyMethod(method)
1052 << " to PC " << reinterpret_cast<void*>(*return_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -08001053 }
Andreas Gamped58342c2014-06-05 14:18:08 -07001054 return GetTwoWordSuccessValue(0, *return_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -08001055 }
jeffhao725a9572012-11-13 18:20:12 -08001056}
1057
Ian Rogers62d6c772013-02-27 08:32:07 -08001058void Instrumentation::PopMethodForUnwind(Thread* self, bool is_deoptimization) const {
1059 // Do the pop.
1060 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
1061 CHECK_GT(stack->size(), 0U);
1062 InstrumentationStackFrame instrumentation_frame = stack->front();
1063 // TODO: bring back CheckStackDepth(self, instrumentation_frame, 2);
1064 stack->pop_front();
1065
Brian Carlstromea46f952013-07-30 01:26:50 -07001066 mirror::ArtMethod* method = instrumentation_frame.method_;
Ian Rogers62d6c772013-02-27 08:32:07 -08001067 if (is_deoptimization) {
1068 if (kVerboseInstrumentation) {
1069 LOG(INFO) << "Popping for deoptimization " << PrettyMethod(method);
1070 }
1071 } else {
1072 if (kVerboseInstrumentation) {
1073 LOG(INFO) << "Popping for unwind " << PrettyMethod(method);
1074 }
1075
1076 // Notify listeners of method unwind.
1077 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
1078 // return_pc.
1079 uint32_t dex_pc = DexFile::kDexNoIndex;
1080 MethodUnwindEvent(self, instrumentation_frame.this_object_, method, dex_pc);
1081 }
1082}
1083
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001084void Instrumentation::VisitRoots(RootCallback* callback, void* arg) {
1085 WriterMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001086 if (IsDeoptimizedMethodsEmpty()) {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001087 return;
1088 }
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001089 for (auto pair : deoptimized_methods_) {
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -07001090 pair.second.VisitRoot(callback, arg, 0, kRootVMInternal);
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001091 }
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001092}
1093
Ian Rogers62d6c772013-02-27 08:32:07 -08001094std::string InstrumentationStackFrame::Dump() const {
1095 std::ostringstream os;
1096 os << "Frame " << frame_id_ << " " << PrettyMethod(method_) << ":"
1097 << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_);
1098 return os.str();
1099}
1100
1101} // namespace instrumentation
jeffhao725a9572012-11-13 18:20:12 -08001102} // namespace art