blob: e6c333d5cd3c5f88219825f1b97c165c95895aca [file] [log] [blame]
jeffhao725a9572012-11-13 18:20:12 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "instrumentation.h"
18
19#include <sys/uio.h>
20
Ian Rogersc7dd2952014-10-21 23:31:19 -070021#include <sstream>
22
Ian Rogerse63db272014-07-15 15:36:11 -070023#include "arch/context.h"
Ian Rogersef7d42f2014-01-06 12:55:46 -080024#include "atomic.h"
Elliott Hughes76160052012-12-12 16:31:20 -080025#include "base/unix_file/fd_file.h"
jeffhao725a9572012-11-13 18:20:12 -080026#include "class_linker.h"
27#include "debugger.h"
Ian Rogers62d6c772013-02-27 08:32:07 -080028#include "dex_file-inl.h"
Ian Rogersc7dd2952014-10-21 23:31:19 -070029#include "entrypoints/quick/quick_entrypoints.h"
Mathieu Chartierd8891782014-03-02 13:28:37 -080030#include "entrypoints/quick/quick_alloc_entrypoints.h"
Ian Rogers6f3dbba2014-10-14 17:41:57 -070031#include "entrypoints/runtime_asm_entrypoints.h"
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070032#include "gc_root-inl.h"
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010033#include "interpreter/interpreter.h"
Mathieu Chartiere5f13e52015-02-24 09:37:21 -080034#include "jit/jit.h"
35#include "jit/jit_code_cache.h"
Brian Carlstromea46f952013-07-30 01:26:50 -070036#include "mirror/art_method-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080037#include "mirror/class-inl.h"
38#include "mirror/dex_cache.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080039#include "mirror/object_array-inl.h"
Ian Rogers4f6ad8a2013-03-18 15:27:28 -070040#include "mirror/object-inl.h"
Ian Rogers62d6c772013-02-27 08:32:07 -080041#include "nth_caller_visitor.h"
jeffhao725a9572012-11-13 18:20:12 -080042#include "os.h"
43#include "scoped_thread_state_change.h"
44#include "thread.h"
45#include "thread_list.h"
jeffhao725a9572012-11-13 18:20:12 -080046
47namespace art {
Ian Rogersfa824272013-11-05 16:12:57 -080048
Ian Rogers62d6c772013-02-27 08:32:07 -080049namespace instrumentation {
jeffhao725a9572012-11-13 18:20:12 -080050
Sebastien Hertz5bfd5c92013-11-15 11:36:07 +010051const bool kVerboseInstrumentation = false;
52
Ian Rogers62d6c772013-02-27 08:32:07 -080053static bool InstallStubsClassVisitor(mirror::Class* klass, void* arg)
Sebastien Hertza8a697f2015-01-15 12:28:47 +010054 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -080055 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
Sebastien Hertza10aa372015-01-21 17:30:58 +010056 instrumentation->InstallStubsForClass(klass);
57 return true; // we visit all classes.
Ian Rogers62d6c772013-02-27 08:32:07 -080058}
59
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -070060Instrumentation::Instrumentation()
61 : instrumentation_stubs_installed_(false), entry_exit_stubs_installed_(false),
62 interpreter_stubs_installed_(false),
63 interpret_only_(false), forced_interpret_only_(false),
64 have_method_entry_listeners_(false), have_method_exit_listeners_(false),
65 have_method_unwind_listeners_(false), have_dex_pc_listeners_(false),
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +020066 have_field_read_listeners_(false), have_field_write_listeners_(false),
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -070067 have_exception_caught_listeners_(false),
68 deoptimized_methods_lock_("deoptimized methods lock"),
69 deoptimization_enabled_(false),
70 interpreter_handler_table_(kMainHandlerTable),
71 quick_alloc_entry_points_instrumentation_counter_(0) {
72}
73
Sebastien Hertza10aa372015-01-21 17:30:58 +010074void Instrumentation::InstallStubsForClass(mirror::Class* klass) {
Sebastien Hertza8a697f2015-01-15 12:28:47 +010075 if (klass->IsErroneous()) {
76 // We can't execute code in a erroneous class: do nothing.
77 } else if (!klass->IsResolved()) {
78 // We need the class to be resolved to install/uninstall stubs. Otherwise its methods
79 // could not be initialized or linked with regards to class inheritance.
80 } else {
81 for (size_t i = 0, e = klass->NumDirectMethods(); i < e; i++) {
82 InstallStubsForMethod(klass->GetDirectMethod(i));
83 }
84 for (size_t i = 0, e = klass->NumVirtualMethods(); i < e; i++) {
85 InstallStubsForMethod(klass->GetVirtualMethod(i));
86 }
jeffhao725a9572012-11-13 18:20:12 -080087 }
jeffhao725a9572012-11-13 18:20:12 -080088}
89
Elliott Hughes956af0f2014-12-11 14:34:28 -080090static void UpdateEntrypoints(mirror::ArtMethod* method, const void* quick_code)
Ian Rogersef7d42f2014-01-06 12:55:46 -080091 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Mathieu Chartiere5f13e52015-02-24 09:37:21 -080092 Runtime* const runtime = Runtime::Current();
93 jit::Jit* jit = runtime->GetJit();
94 if (jit != nullptr) {
95 const void* old_code_ptr = method->GetEntryPointFromQuickCompiledCode();
96 jit::JitCodeCache* code_cache = jit->GetCodeCache();
97 if (code_cache->ContainsCodePtr(old_code_ptr)) {
98 // Save the old compiled code since we need it to implement ClassLinker::GetQuickOatCodeFor.
99 code_cache->SaveCompiledCode(method, old_code_ptr);
100 }
101 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800102 method->SetEntryPointFromQuickCompiledCode(quick_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100103 if (!method->IsResolutionMethod()) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700104 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700105 if (class_linker->IsQuickToInterpreterBridge(quick_code) ||
106 (class_linker->IsQuickResolutionStub(quick_code) &&
107 Runtime::Current()->GetInstrumentation()->IsForcedInterpretOnly() &&
108 !method->IsNative() && !method->IsProxyMethod())) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800109 DCHECK(!method->IsNative()) << PrettyMethod(method);
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -0800110 DCHECK(!method->IsProxyMethod()) << PrettyMethod(method);
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700111 method->SetEntryPointFromInterpreter(art::artInterpreterToInterpreterBridge);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100112 } else {
113 method->SetEntryPointFromInterpreter(art::artInterpreterToCompiledCodeBridge);
114 }
115 }
116}
117
118void Instrumentation::InstallStubsForMethod(mirror::ArtMethod* method) {
119 if (method->IsAbstract() || method->IsProxyMethod()) {
120 // Do not change stubs for these methods.
121 return;
122 }
Jeff Hao56802772014-08-19 10:17:36 -0700123 // Don't stub Proxy.<init>. Note that the Proxy class itself is not a proxy class.
124 if (method->IsConstructor() &&
125 method->GetDeclaringClass()->DescriptorEquals("Ljava/lang/reflect/Proxy;")) {
Jeff Haodb8a6642014-08-14 17:18:52 -0700126 return;
127 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800128 const void* new_quick_code;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100129 bool uninstall = !entry_exit_stubs_installed_ && !interpreter_stubs_installed_;
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800130 Runtime* const runtime = Runtime::Current();
131 ClassLinker* const class_linker = runtime->GetClassLinker();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100132 bool is_class_initialized = method->GetDeclaringClass()->IsInitialized();
133 if (uninstall) {
134 if ((forced_interpret_only_ || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800135 new_quick_code = GetQuickToInterpreterBridge();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100136 } else if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800137 new_quick_code = class_linker->GetQuickOatCodeFor(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100138 } else {
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700139 new_quick_code = GetQuickResolutionStub();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100140 }
141 } else { // !uninstall
Sebastien Hertzbae182c2013-12-17 10:42:03 +0100142 if ((interpreter_stubs_installed_ || forced_interpret_only_ || IsDeoptimized(method)) &&
143 !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800144 new_quick_code = GetQuickToInterpreterBridge();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100145 } else {
146 // Do not overwrite resolution trampoline. When the trampoline initializes the method's
147 // class, all its static methods code will be set to the instrumentation entry point.
148 // For more details, see ClassLinker::FixupStaticTrampolines.
149 if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
Sebastien Hertz320deb22014-06-11 19:45:05 +0200150 if (entry_exit_stubs_installed_) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800151 new_quick_code = GetQuickInstrumentationEntryPoint();
Sebastien Hertz320deb22014-06-11 19:45:05 +0200152 } else {
Sebastien Hertz320deb22014-06-11 19:45:05 +0200153 new_quick_code = class_linker->GetQuickOatCodeFor(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100154 }
155 } else {
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700156 new_quick_code = GetQuickResolutionStub();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100157 }
158 }
159 }
Elliott Hughes956af0f2014-12-11 14:34:28 -0800160 UpdateEntrypoints(method, new_quick_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100161}
162
Ian Rogers62d6c772013-02-27 08:32:07 -0800163// Places the instrumentation exit pc as the return PC for every quick frame. This also allows
164// deoptimization of quick frames to interpreter frames.
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100165// Since we may already have done this previously, we need to push new instrumentation frame before
166// existing instrumentation frames.
Ian Rogers62d6c772013-02-27 08:32:07 -0800167static void InstrumentationInstallStack(Thread* thread, void* arg)
Ian Rogers306057f2012-11-26 12:45:53 -0800168 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
169 struct InstallStackVisitor : public StackVisitor {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800170 InstallStackVisitor(Thread* thread_in, Context* context, uintptr_t instrumentation_exit_pc)
171 : StackVisitor(thread_in, context),
172 instrumentation_stack_(thread_in->GetInstrumentationStack()),
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100173 instrumentation_exit_pc_(instrumentation_exit_pc),
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100174 reached_existing_instrumentation_frames_(false), instrumentation_stack_depth_(0),
175 last_return_pc_(0) {
176 }
jeffhao725a9572012-11-13 18:20:12 -0800177
Ian Rogers306057f2012-11-26 12:45:53 -0800178 virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Brian Carlstromea46f952013-07-30 01:26:50 -0700179 mirror::ArtMethod* m = GetMethod();
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700180 if (m == nullptr) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800181 if (kVerboseInstrumentation) {
182 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
183 }
184 last_return_pc_ = 0;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700185 return true; // Ignore upcalls.
Ian Rogers306057f2012-11-26 12:45:53 -0800186 }
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700187 if (GetCurrentQuickFrame() == nullptr) {
Elliott Hughes956af0f2014-12-11 14:34:28 -0800188 bool interpreter_frame = true;
Sebastien Hertz320deb22014-06-11 19:45:05 +0200189 InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, 0, GetFrameId(),
190 interpreter_frame);
Jeff Haoa15a81b2014-05-27 18:25:47 -0700191 if (kVerboseInstrumentation) {
192 LOG(INFO) << "Pushing shadow frame " << instrumentation_frame.Dump();
193 }
194 shadow_stack_.push_back(instrumentation_frame);
195 return true; // Continue.
196 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800197 uintptr_t return_pc = GetReturnPc();
Sebastien Hertz320deb22014-06-11 19:45:05 +0200198 if (m->IsRuntimeMethod()) {
199 if (return_pc == instrumentation_exit_pc_) {
200 if (kVerboseInstrumentation) {
201 LOG(INFO) << " Handling quick to interpreter transition. Frame " << GetFrameId();
202 }
203 CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200204 const InstrumentationStackFrame& frame =
205 instrumentation_stack_->at(instrumentation_stack_depth_);
Sebastien Hertz320deb22014-06-11 19:45:05 +0200206 CHECK(frame.interpreter_entry_);
207 // This is an interpreter frame so method enter event must have been reported. However we
208 // need to push a DEX pc into the dex_pcs_ list to match size of instrumentation stack.
209 // Since we won't report method entry here, we can safely push any DEX pc.
210 dex_pcs_.push_back(0);
211 last_return_pc_ = frame.return_pc_;
212 ++instrumentation_stack_depth_;
213 return true;
214 } else {
215 if (kVerboseInstrumentation) {
216 LOG(INFO) << " Skipping runtime method. Frame " << GetFrameId();
217 }
218 last_return_pc_ = GetReturnPc();
219 return true; // Ignore unresolved methods since they will be instrumented after resolution.
220 }
221 }
222 if (kVerboseInstrumentation) {
223 LOG(INFO) << " Installing exit stub in " << DescribeLocation();
224 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100225 if (return_pc == instrumentation_exit_pc_) {
226 // We've reached a frame which has already been installed with instrumentation exit stub.
227 // We should have already installed instrumentation on previous frames.
228 reached_existing_instrumentation_frames_ = true;
229
230 CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200231 const InstrumentationStackFrame& frame =
232 instrumentation_stack_->at(instrumentation_stack_depth_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100233 CHECK_EQ(m, frame.method_) << "Expected " << PrettyMethod(m)
234 << ", Found " << PrettyMethod(frame.method_);
235 return_pc = frame.return_pc_;
236 if (kVerboseInstrumentation) {
237 LOG(INFO) << "Ignoring already instrumented " << frame.Dump();
238 }
239 } else {
240 CHECK_NE(return_pc, 0U);
241 CHECK(!reached_existing_instrumentation_frames_);
242 InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, return_pc, GetFrameId(),
243 false);
244 if (kVerboseInstrumentation) {
245 LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
246 }
247
Sebastien Hertz320deb22014-06-11 19:45:05 +0200248 // Insert frame at the right position so we do not corrupt the instrumentation stack.
249 // Instrumentation stack frames are in descending frame id order.
250 auto it = instrumentation_stack_->begin();
251 for (auto end = instrumentation_stack_->end(); it != end; ++it) {
252 const InstrumentationStackFrame& current = *it;
253 if (instrumentation_frame.frame_id_ >= current.frame_id_) {
254 break;
255 }
256 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100257 instrumentation_stack_->insert(it, instrumentation_frame);
258 SetReturnPc(instrumentation_exit_pc_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800259 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800260 dex_pcs_.push_back(m->ToDexPc(last_return_pc_));
Ian Rogers62d6c772013-02-27 08:32:07 -0800261 last_return_pc_ = return_pc;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100262 ++instrumentation_stack_depth_;
Ian Rogers306057f2012-11-26 12:45:53 -0800263 return true; // Continue.
264 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800265 std::deque<InstrumentationStackFrame>* const instrumentation_stack_;
Jeff Haoa15a81b2014-05-27 18:25:47 -0700266 std::vector<InstrumentationStackFrame> shadow_stack_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800267 std::vector<uint32_t> dex_pcs_;
Ian Rogers306057f2012-11-26 12:45:53 -0800268 const uintptr_t instrumentation_exit_pc_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100269 bool reached_existing_instrumentation_frames_;
270 size_t instrumentation_stack_depth_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800271 uintptr_t last_return_pc_;
Ian Rogers306057f2012-11-26 12:45:53 -0800272 };
Ian Rogers62d6c772013-02-27 08:32:07 -0800273 if (kVerboseInstrumentation) {
274 std::string thread_name;
275 thread->GetThreadName(thread_name);
276 LOG(INFO) << "Installing exit stubs in " << thread_name;
Ian Rogers306057f2012-11-26 12:45:53 -0800277 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100278
279 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
Ian Rogers700a4022014-05-19 16:49:03 -0700280 std::unique_ptr<Context> context(Context::Create());
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700281 uintptr_t instrumentation_exit_pc = reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc());
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100282 InstallStackVisitor visitor(thread, context.get(), instrumentation_exit_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800283 visitor.WalkStack(true);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100284 CHECK_EQ(visitor.dex_pcs_.size(), thread->GetInstrumentationStack()->size());
Ian Rogers62d6c772013-02-27 08:32:07 -0800285
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100286 if (instrumentation->ShouldNotifyMethodEnterExitEvents()) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100287 // Create method enter events for all methods currently on the thread's stack. We only do this
288 // if no debugger is attached to prevent from posting events twice.
Jeff Haoa15a81b2014-05-27 18:25:47 -0700289 auto ssi = visitor.shadow_stack_.rbegin();
290 for (auto isi = thread->GetInstrumentationStack()->rbegin(),
291 end = thread->GetInstrumentationStack()->rend(); isi != end; ++isi) {
292 while (ssi != visitor.shadow_stack_.rend() && (*ssi).frame_id_ < (*isi).frame_id_) {
293 instrumentation->MethodEnterEvent(thread, (*ssi).this_object_, (*ssi).method_, 0);
294 ++ssi;
295 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100296 uint32_t dex_pc = visitor.dex_pcs_.back();
297 visitor.dex_pcs_.pop_back();
Sebastien Hertz320deb22014-06-11 19:45:05 +0200298 if (!isi->interpreter_entry_) {
299 instrumentation->MethodEnterEvent(thread, (*isi).this_object_, (*isi).method_, dex_pc);
300 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100301 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800302 }
303 thread->VerifyStack();
Ian Rogers306057f2012-11-26 12:45:53 -0800304}
305
Ian Rogers62d6c772013-02-27 08:32:07 -0800306// Removes the instrumentation exit pc as the return PC for every quick frame.
307static void InstrumentationRestoreStack(Thread* thread, void* arg)
Ian Rogers306057f2012-11-26 12:45:53 -0800308 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
309 struct RestoreStackVisitor : public StackVisitor {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800310 RestoreStackVisitor(Thread* thread_in, uintptr_t instrumentation_exit_pc,
Ian Rogers62d6c772013-02-27 08:32:07 -0800311 Instrumentation* instrumentation)
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700312 : StackVisitor(thread_in, nullptr), thread_(thread_in),
Ian Rogers62d6c772013-02-27 08:32:07 -0800313 instrumentation_exit_pc_(instrumentation_exit_pc),
314 instrumentation_(instrumentation),
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800315 instrumentation_stack_(thread_in->GetInstrumentationStack()),
Ian Rogers62d6c772013-02-27 08:32:07 -0800316 frames_removed_(0) {}
Ian Rogers306057f2012-11-26 12:45:53 -0800317
318 virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800319 if (instrumentation_stack_->size() == 0) {
jeffhao725a9572012-11-13 18:20:12 -0800320 return false; // Stop.
321 }
Brian Carlstromea46f952013-07-30 01:26:50 -0700322 mirror::ArtMethod* m = GetMethod();
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700323 if (GetCurrentQuickFrame() == nullptr) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800324 if (kVerboseInstrumentation) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200325 LOG(INFO) << " Ignoring a shadow frame. Frame " << GetFrameId()
326 << " Method=" << PrettyMethod(m);
Ian Rogers62d6c772013-02-27 08:32:07 -0800327 }
328 return true; // Ignore shadow frames.
329 }
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700330 if (m == nullptr) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800331 if (kVerboseInstrumentation) {
332 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
333 }
Ian Rogers306057f2012-11-26 12:45:53 -0800334 return true; // Ignore upcalls.
335 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800336 bool removed_stub = false;
337 // TODO: make this search more efficient?
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100338 const size_t frameId = GetFrameId();
339 for (const InstrumentationStackFrame& instrumentation_frame : *instrumentation_stack_) {
340 if (instrumentation_frame.frame_id_ == frameId) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800341 if (kVerboseInstrumentation) {
342 LOG(INFO) << " Removing exit stub in " << DescribeLocation();
343 }
Jeff Hao9a916d32013-06-27 18:45:37 -0700344 if (instrumentation_frame.interpreter_entry_) {
345 CHECK(m == Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs));
346 } else {
347 CHECK(m == instrumentation_frame.method_) << PrettyMethod(m);
348 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800349 SetReturnPc(instrumentation_frame.return_pc_);
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100350 if (instrumentation_->ShouldNotifyMethodEnterExitEvents()) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100351 // Create the method exit events. As the methods didn't really exit the result is 0.
352 // We only do this if no debugger is attached to prevent from posting events twice.
353 instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m,
354 GetDexPc(), JValue());
355 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800356 frames_removed_++;
357 removed_stub = true;
358 break;
359 }
360 }
361 if (!removed_stub) {
362 if (kVerboseInstrumentation) {
363 LOG(INFO) << " No exit stub in " << DescribeLocation();
Ian Rogers306057f2012-11-26 12:45:53 -0800364 }
jeffhao725a9572012-11-13 18:20:12 -0800365 }
366 return true; // Continue.
367 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800368 Thread* const thread_;
Ian Rogers306057f2012-11-26 12:45:53 -0800369 const uintptr_t instrumentation_exit_pc_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800370 Instrumentation* const instrumentation_;
371 std::deque<instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
372 size_t frames_removed_;
jeffhao725a9572012-11-13 18:20:12 -0800373 };
Ian Rogers62d6c772013-02-27 08:32:07 -0800374 if (kVerboseInstrumentation) {
375 std::string thread_name;
376 thread->GetThreadName(thread_name);
377 LOG(INFO) << "Removing exit stubs in " << thread_name;
378 }
379 std::deque<instrumentation::InstrumentationStackFrame>* stack = thread->GetInstrumentationStack();
380 if (stack->size() > 0) {
381 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700382 uintptr_t instrumentation_exit_pc =
383 reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc());
Ian Rogers62d6c772013-02-27 08:32:07 -0800384 RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
385 visitor.WalkStack(true);
386 CHECK_EQ(visitor.frames_removed_, stack->size());
387 while (stack->size() > 0) {
388 stack->pop_front();
389 }
jeffhao725a9572012-11-13 18:20:12 -0800390 }
391}
392
Ian Rogers62d6c772013-02-27 08:32:07 -0800393void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
394 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
Ian Rogers62d6c772013-02-27 08:32:07 -0800395 if ((events & kMethodEntered) != 0) {
396 method_entry_listeners_.push_back(listener);
Ian Rogers62d6c772013-02-27 08:32:07 -0800397 have_method_entry_listeners_ = true;
398 }
399 if ((events & kMethodExited) != 0) {
400 method_exit_listeners_.push_back(listener);
Ian Rogers62d6c772013-02-27 08:32:07 -0800401 have_method_exit_listeners_ = true;
402 }
403 if ((events & kMethodUnwind) != 0) {
404 method_unwind_listeners_.push_back(listener);
405 have_method_unwind_listeners_ = true;
406 }
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800407 if ((events & kBackwardBranch) != 0) {
408 backward_branch_listeners_.push_back(listener);
409 have_backward_branch_listeners_ = true;
410 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800411 if ((events & kDexPcMoved) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200412 std::list<InstrumentationListener*>* modified;
413 if (have_dex_pc_listeners_) {
414 modified = new std::list<InstrumentationListener*>(*dex_pc_listeners_.get());
415 } else {
416 modified = new std::list<InstrumentationListener*>();
417 }
418 modified->push_back(listener);
419 dex_pc_listeners_.reset(modified);
Ian Rogers62d6c772013-02-27 08:32:07 -0800420 have_dex_pc_listeners_ = true;
421 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200422 if ((events & kFieldRead) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200423 std::list<InstrumentationListener*>* modified;
424 if (have_field_read_listeners_) {
425 modified = new std::list<InstrumentationListener*>(*field_read_listeners_.get());
426 } else {
427 modified = new std::list<InstrumentationListener*>();
428 }
429 modified->push_back(listener);
430 field_read_listeners_.reset(modified);
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200431 have_field_read_listeners_ = true;
432 }
433 if ((events & kFieldWritten) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200434 std::list<InstrumentationListener*>* modified;
435 if (have_field_write_listeners_) {
436 modified = new std::list<InstrumentationListener*>(*field_write_listeners_.get());
437 } else {
438 modified = new std::list<InstrumentationListener*>();
439 }
440 modified->push_back(listener);
441 field_write_listeners_.reset(modified);
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200442 have_field_write_listeners_ = true;
443 }
Jeff Hao14dd5a82013-04-11 10:23:36 -0700444 if ((events & kExceptionCaught) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200445 std::list<InstrumentationListener*>* modified;
446 if (have_exception_caught_listeners_) {
447 modified = new std::list<InstrumentationListener*>(*exception_caught_listeners_.get());
448 } else {
449 modified = new std::list<InstrumentationListener*>();
450 }
451 modified->push_back(listener);
452 exception_caught_listeners_.reset(modified);
Jeff Hao14dd5a82013-04-11 10:23:36 -0700453 have_exception_caught_listeners_ = true;
454 }
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200455 UpdateInterpreterHandlerTable();
jeffhao725a9572012-11-13 18:20:12 -0800456}
457
Ian Rogers62d6c772013-02-27 08:32:07 -0800458void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
459 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
Ian Rogers62d6c772013-02-27 08:32:07 -0800460
461 if ((events & kMethodEntered) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200462 if (have_method_entry_listeners_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800463 method_entry_listeners_.remove(listener);
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200464 have_method_entry_listeners_ = !method_entry_listeners_.empty();
Ian Rogers62d6c772013-02-27 08:32:07 -0800465 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800466 }
467 if ((events & kMethodExited) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200468 if (have_method_exit_listeners_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800469 method_exit_listeners_.remove(listener);
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200470 have_method_exit_listeners_ = !method_exit_listeners_.empty();
Ian Rogers62d6c772013-02-27 08:32:07 -0800471 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800472 }
473 if ((events & kMethodUnwind) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200474 if (have_method_unwind_listeners_) {
475 method_unwind_listeners_.remove(listener);
476 have_method_unwind_listeners_ = !method_unwind_listeners_.empty();
477 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800478 }
479 if ((events & kDexPcMoved) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200480 if (have_dex_pc_listeners_) {
481 std::list<InstrumentationListener*>* modified =
482 new std::list<InstrumentationListener*>(*dex_pc_listeners_.get());
483 modified->remove(listener);
484 have_dex_pc_listeners_ = !modified->empty();
485 if (have_dex_pc_listeners_) {
486 dex_pc_listeners_.reset(modified);
487 } else {
488 dex_pc_listeners_.reset();
489 delete modified;
490 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800491 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800492 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200493 if ((events & kFieldRead) != 0) {
Daniel Mihalyi66445212014-08-21 15:57:25 +0200494 if (have_field_read_listeners_) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200495 std::list<InstrumentationListener*>* modified =
496 new std::list<InstrumentationListener*>(*field_read_listeners_.get());
497 modified->remove(listener);
498 have_field_read_listeners_ = !modified->empty();
499 if (have_field_read_listeners_) {
500 field_read_listeners_.reset(modified);
501 } else {
502 field_read_listeners_.reset();
503 delete modified;
504 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200505 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200506 }
507 if ((events & kFieldWritten) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200508 if (have_field_write_listeners_) {
509 std::list<InstrumentationListener*>* modified =
510 new std::list<InstrumentationListener*>(*field_write_listeners_.get());
511 modified->remove(listener);
512 have_field_write_listeners_ = !modified->empty();
513 if (have_field_write_listeners_) {
514 field_write_listeners_.reset(modified);
515 } else {
516 field_write_listeners_.reset();
517 delete modified;
518 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200519 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200520 }
Jeff Hao14dd5a82013-04-11 10:23:36 -0700521 if ((events & kExceptionCaught) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200522 if (have_exception_caught_listeners_) {
523 std::list<InstrumentationListener*>* modified =
524 new std::list<InstrumentationListener*>(*exception_caught_listeners_.get());
525 modified->remove(listener);
526 have_exception_caught_listeners_ = !modified->empty();
527 if (have_exception_caught_listeners_) {
528 exception_caught_listeners_.reset(modified);
529 } else {
530 exception_caught_listeners_.reset();
531 delete modified;
532 }
533 }
Jeff Hao14dd5a82013-04-11 10:23:36 -0700534 }
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200535 UpdateInterpreterHandlerTable();
jeffhao725a9572012-11-13 18:20:12 -0800536}
537
Ian Rogers62d6c772013-02-27 08:32:07 -0800538void Instrumentation::ConfigureStubs(bool require_entry_exit_stubs, bool require_interpreter) {
539 interpret_only_ = require_interpreter || forced_interpret_only_;
540 // Compute what level of instrumentation is required and compare to current.
541 int desired_level, current_level;
542 if (require_interpreter) {
543 desired_level = 2;
544 } else if (require_entry_exit_stubs) {
545 desired_level = 1;
546 } else {
547 desired_level = 0;
548 }
549 if (interpreter_stubs_installed_) {
550 current_level = 2;
551 } else if (entry_exit_stubs_installed_) {
552 current_level = 1;
553 } else {
554 current_level = 0;
555 }
556 if (desired_level == current_level) {
557 // We're already set.
558 return;
559 }
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100560 Thread* const self = Thread::Current();
Ian Rogers62d6c772013-02-27 08:32:07 -0800561 Runtime* runtime = Runtime::Current();
Sebastien Hertza8a697f2015-01-15 12:28:47 +0100562 Locks::mutator_lock_->AssertExclusiveHeld(self);
Ian Rogers62d6c772013-02-27 08:32:07 -0800563 Locks::thread_list_lock_->AssertNotHeld(self);
564 if (desired_level > 0) {
565 if (require_interpreter) {
566 interpreter_stubs_installed_ = true;
567 } else {
568 CHECK(require_entry_exit_stubs);
569 entry_exit_stubs_installed_ = true;
570 }
571 runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
572 instrumentation_stubs_installed_ = true;
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100573 MutexLock mu(self, *Locks::thread_list_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800574 runtime->GetThreadList()->ForEach(InstrumentationInstallStack, this);
575 } else {
576 interpreter_stubs_installed_ = false;
577 entry_exit_stubs_installed_ = false;
578 runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100579 // Restore stack only if there is no method currently deoptimized.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700580 bool empty;
581 {
582 ReaderMutexLock mu(self, deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700583 empty = IsDeoptimizedMethodsEmpty(); // Avoid lock violation.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700584 }
585 if (empty) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100586 instrumentation_stubs_installed_ = false;
587 MutexLock mu(self, *Locks::thread_list_lock_);
588 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
589 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800590 }
jeffhao725a9572012-11-13 18:20:12 -0800591}
592
Ian Rogersfa824272013-11-05 16:12:57 -0800593static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700594 UNUSED(arg);
Ian Rogersfa824272013-11-05 16:12:57 -0800595 thread->ResetQuickAllocEntryPointsForThread();
596}
597
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700598void Instrumentation::SetEntrypointsInstrumented(bool instrumented) {
599 Thread* self = Thread::Current();
Mathieu Chartier661974a2014-01-09 11:23:53 -0800600 Runtime* runtime = Runtime::Current();
601 ThreadList* tl = runtime->GetThreadList();
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700602 Locks::mutator_lock_->AssertNotHeld(self);
603 Locks::instrument_entrypoints_lock_->AssertHeld(self);
604 if (runtime->IsStarted()) {
Mathieu Chartierbf9fc582015-03-13 17:21:25 -0700605 tl->SuspendAll(__FUNCTION__);
Mathieu Chartier661974a2014-01-09 11:23:53 -0800606 }
607 {
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700608 MutexLock mu(self, *Locks::runtime_shutdown_lock_);
Mathieu Chartier661974a2014-01-09 11:23:53 -0800609 SetQuickAllocEntryPointsInstrumented(instrumented);
610 ResetQuickAllocEntryPoints();
611 }
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700612 if (runtime->IsStarted()) {
Mathieu Chartier661974a2014-01-09 11:23:53 -0800613 tl->ResumeAll();
614 }
615}
616
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700617void Instrumentation::InstrumentQuickAllocEntryPoints() {
618 MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
619 InstrumentQuickAllocEntryPointsLocked();
Ian Rogersfa824272013-11-05 16:12:57 -0800620}
621
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700622void Instrumentation::UninstrumentQuickAllocEntryPoints() {
623 MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
624 UninstrumentQuickAllocEntryPointsLocked();
625}
626
627void Instrumentation::InstrumentQuickAllocEntryPointsLocked() {
628 Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
629 if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
630 SetEntrypointsInstrumented(true);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800631 }
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700632 ++quick_alloc_entry_points_instrumentation_counter_;
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700633}
634
635void Instrumentation::UninstrumentQuickAllocEntryPointsLocked() {
636 Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
637 CHECK_GT(quick_alloc_entry_points_instrumentation_counter_, 0U);
638 --quick_alloc_entry_points_instrumentation_counter_;
639 if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
640 SetEntrypointsInstrumented(false);
641 }
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800642}
643
644void Instrumentation::ResetQuickAllocEntryPoints() {
645 Runtime* runtime = Runtime::Current();
646 if (runtime->IsStarted()) {
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800647 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700648 runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, nullptr);
Ian Rogersfa824272013-11-05 16:12:57 -0800649 }
650}
651
Elliott Hughes956af0f2014-12-11 14:34:28 -0800652void Instrumentation::UpdateMethodsCode(mirror::ArtMethod* method, const void* quick_code) {
Sebastien Hertza8a697f2015-01-15 12:28:47 +0100653 DCHECK(method->GetDeclaringClass()->IsResolved());
Ian Rogersef7d42f2014-01-06 12:55:46 -0800654 const void* new_quick_code;
Ian Rogers62d6c772013-02-27 08:32:07 -0800655 if (LIKELY(!instrumentation_stubs_installed_)) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800656 new_quick_code = quick_code;
Jeff Hao65d15d92013-07-16 16:39:33 -0700657 } else {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100658 if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800659 new_quick_code = GetQuickToInterpreterBridge();
Jeff Hao65d15d92013-07-16 16:39:33 -0700660 } else {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700661 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700662 if (class_linker->IsQuickResolutionStub(quick_code) ||
663 class_linker->IsQuickToInterpreterBridge(quick_code)) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700664 new_quick_code = quick_code;
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700665 } else if (entry_exit_stubs_installed_) {
666 new_quick_code = GetQuickInstrumentationEntryPoint();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700667 } else {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700668 new_quick_code = quick_code;
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700669 }
Jeff Hao65d15d92013-07-16 16:39:33 -0700670 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800671 }
Elliott Hughes956af0f2014-12-11 14:34:28 -0800672 UpdateEntrypoints(method, new_quick_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100673}
674
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700675bool Instrumentation::AddDeoptimizedMethod(mirror::ArtMethod* method) {
676 // Note that the insert() below isn't read barrier-aware. So, this
677 // FindDeoptimizedMethod() call is necessary or else we would end up
678 // storing the same method twice in the map (the from-space and the
679 // to-space ones).
680 if (FindDeoptimizedMethod(method)) {
681 // Already in the map. Return.
682 return false;
683 }
684 // Not found. Add it.
Mathieu Chartier4c4d6092015-01-22 17:02:27 -0800685 static_assert(!kMovingMethods, "Not safe if methods can move");
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700686 int32_t hash_code = method->IdentityHashCode();
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700687 deoptimized_methods_.insert(std::make_pair(hash_code, GcRoot<mirror::ArtMethod>(method)));
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700688 return true;
689}
690
691bool Instrumentation::FindDeoptimizedMethod(mirror::ArtMethod* method) {
Mathieu Chartier4c4d6092015-01-22 17:02:27 -0800692 static_assert(!kMovingMethods, "Not safe if methods can move");
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700693 int32_t hash_code = method->IdentityHashCode();
694 auto range = deoptimized_methods_.equal_range(hash_code);
695 for (auto it = range.first; it != range.second; ++it) {
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700696 mirror::ArtMethod* m = it->second.Read();
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700697 if (m == method) {
698 // Found.
699 return true;
700 }
701 }
702 // Not found.
703 return false;
704}
705
706mirror::ArtMethod* Instrumentation::BeginDeoptimizedMethod() {
707 auto it = deoptimized_methods_.begin();
708 if (it == deoptimized_methods_.end()) {
709 // Empty.
710 return nullptr;
711 }
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700712 return it->second.Read();
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700713}
714
715bool Instrumentation::RemoveDeoptimizedMethod(mirror::ArtMethod* method) {
Mathieu Chartier4c4d6092015-01-22 17:02:27 -0800716 static_assert(!kMovingMethods, "Not safe if methods can move");
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700717 int32_t hash_code = method->IdentityHashCode();
718 auto range = deoptimized_methods_.equal_range(hash_code);
719 for (auto it = range.first; it != range.second; ++it) {
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700720 mirror::ArtMethod* m = it->second.Read();
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700721 if (m == method) {
722 // Found. Erase and return.
723 deoptimized_methods_.erase(it);
724 return true;
725 }
726 }
727 // Not found.
728 return false;
729}
730
731bool Instrumentation::IsDeoptimizedMethodsEmpty() const {
732 return deoptimized_methods_.empty();
733}
734
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100735void Instrumentation::Deoptimize(mirror::ArtMethod* method) {
736 CHECK(!method->IsNative());
737 CHECK(!method->IsProxyMethod());
738 CHECK(!method->IsAbstract());
739
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700740 Thread* self = Thread::Current();
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700741 {
742 WriterMutexLock mu(self, deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700743 bool has_not_been_deoptimized = AddDeoptimizedMethod(method);
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200744 CHECK(has_not_been_deoptimized) << "Method " << PrettyMethod(method)
745 << " is already deoptimized";
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700746 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100747 if (!interpreter_stubs_installed_) {
Elliott Hughes956af0f2014-12-11 14:34:28 -0800748 UpdateEntrypoints(method, GetQuickInstrumentationEntryPoint());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100749
750 // Install instrumentation exit stub and instrumentation frames. We may already have installed
751 // these previously so it will only cover the newly created frames.
752 instrumentation_stubs_installed_ = true;
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700753 MutexLock mu(self, *Locks::thread_list_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100754 Runtime::Current()->GetThreadList()->ForEach(InstrumentationInstallStack, this);
755 }
756}
757
758void Instrumentation::Undeoptimize(mirror::ArtMethod* method) {
759 CHECK(!method->IsNative());
760 CHECK(!method->IsProxyMethod());
761 CHECK(!method->IsAbstract());
762
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700763 Thread* self = Thread::Current();
764 bool empty;
765 {
766 WriterMutexLock mu(self, deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700767 bool found_and_erased = RemoveDeoptimizedMethod(method);
768 CHECK(found_and_erased) << "Method " << PrettyMethod(method)
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700769 << " is not deoptimized";
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700770 empty = IsDeoptimizedMethodsEmpty();
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700771 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100772
773 // Restore code and possibly stack only if we did not deoptimize everything.
774 if (!interpreter_stubs_installed_) {
775 // Restore its code or resolution trampoline.
776 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Ian Rogersef7d42f2014-01-06 12:55:46 -0800777 if (method->IsStatic() && !method->IsConstructor() &&
778 !method->GetDeclaringClass()->IsInitialized()) {
Elliott Hughes956af0f2014-12-11 14:34:28 -0800779 UpdateEntrypoints(method, GetQuickResolutionStub());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100780 } else {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800781 const void* quick_code = class_linker->GetQuickOatCodeFor(method);
Elliott Hughes956af0f2014-12-11 14:34:28 -0800782 UpdateEntrypoints(method, quick_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100783 }
784
785 // If there is no deoptimized method left, we can restore the stack of each thread.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700786 if (empty) {
787 MutexLock mu(self, *Locks::thread_list_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100788 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
789 instrumentation_stubs_installed_ = false;
790 }
791 }
792}
793
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700794bool Instrumentation::IsDeoptimized(mirror::ArtMethod* method) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100795 DCHECK(method != nullptr);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700796 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
797 return FindDeoptimizedMethod(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100798}
799
800void Instrumentation::EnableDeoptimization() {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700801 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700802 CHECK(IsDeoptimizedMethodsEmpty());
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100803 CHECK_EQ(deoptimization_enabled_, false);
804 deoptimization_enabled_ = true;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100805}
806
807void Instrumentation::DisableDeoptimization() {
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100808 CHECK_EQ(deoptimization_enabled_, true);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100809 // If we deoptimized everything, undo it.
810 if (interpreter_stubs_installed_) {
811 UndeoptimizeEverything();
812 }
813 // Undeoptimized selected methods.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700814 while (true) {
815 mirror::ArtMethod* method;
816 {
817 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700818 if (IsDeoptimizedMethodsEmpty()) {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700819 break;
820 }
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700821 method = BeginDeoptimizedMethod();
822 CHECK(method != nullptr);
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700823 }
824 Undeoptimize(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100825 }
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100826 deoptimization_enabled_ = false;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100827}
828
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100829// Indicates if instrumentation should notify method enter/exit events to the listeners.
830bool Instrumentation::ShouldNotifyMethodEnterExitEvents() const {
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100831 return !deoptimization_enabled_ && !interpreter_stubs_installed_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100832}
833
834void Instrumentation::DeoptimizeEverything() {
835 CHECK(!interpreter_stubs_installed_);
836 ConfigureStubs(false, true);
837}
838
839void Instrumentation::UndeoptimizeEverything() {
840 CHECK(interpreter_stubs_installed_);
841 ConfigureStubs(false, false);
842}
843
Andreas Gampe40da2862015-02-27 12:49:04 -0800844void Instrumentation::EnableMethodTracing(bool require_interpreter) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100845 ConfigureStubs(!require_interpreter, require_interpreter);
846}
847
848void Instrumentation::DisableMethodTracing() {
849 ConfigureStubs(false, false);
jeffhao725a9572012-11-13 18:20:12 -0800850}
851
Mathieu Chartiera7dd0382014-11-20 17:08:58 -0800852const void* Instrumentation::GetQuickCodeFor(mirror::ArtMethod* method, size_t pointer_size) const {
Ian Rogers62d6c772013-02-27 08:32:07 -0800853 Runtime* runtime = Runtime::Current();
854 if (LIKELY(!instrumentation_stubs_installed_)) {
Mathieu Chartiera7dd0382014-11-20 17:08:58 -0800855 const void* code = method->GetEntryPointFromQuickCompiledCodePtrSize(pointer_size);
Vladimir Marko8a630572014-04-09 18:45:35 +0100856 DCHECK(code != nullptr);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700857 ClassLinker* class_linker = runtime->GetClassLinker();
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700858 if (LIKELY(!class_linker->IsQuickResolutionStub(code) &&
859 !class_linker->IsQuickToInterpreterBridge(code)) &&
860 !class_linker->IsQuickResolutionStub(code) &&
861 !class_linker->IsQuickToInterpreterBridge(code)) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800862 return code;
863 }
864 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800865 return runtime->GetClassLinker()->GetQuickOatCodeFor(method);
jeffhao725a9572012-11-13 18:20:12 -0800866}
867
Ian Rogers62d6c772013-02-27 08:32:07 -0800868void Instrumentation::MethodEnterEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800869 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800870 uint32_t dex_pc) const {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700871 auto it = method_entry_listeners_.begin();
Jeff Hao65d15d92013-07-16 16:39:33 -0700872 bool is_end = (it == method_entry_listeners_.end());
873 // Implemented this way to prevent problems caused by modification of the list while iterating.
874 while (!is_end) {
875 InstrumentationListener* cur = *it;
876 ++it;
877 is_end = (it == method_entry_listeners_.end());
878 cur->MethodEntered(thread, this_object, method, dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800879 }
880}
881
882void Instrumentation::MethodExitEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800883 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800884 uint32_t dex_pc, const JValue& return_value) const {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700885 auto it = method_exit_listeners_.begin();
Jeff Hao65d15d92013-07-16 16:39:33 -0700886 bool is_end = (it == method_exit_listeners_.end());
887 // Implemented this way to prevent problems caused by modification of the list while iterating.
888 while (!is_end) {
889 InstrumentationListener* cur = *it;
890 ++it;
891 is_end = (it == method_exit_listeners_.end());
892 cur->MethodExited(thread, this_object, method, dex_pc, return_value);
Ian Rogers62d6c772013-02-27 08:32:07 -0800893 }
894}
895
896void Instrumentation::MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800897 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800898 uint32_t dex_pc) const {
899 if (have_method_unwind_listeners_) {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700900 for (InstrumentationListener* listener : method_unwind_listeners_) {
Sebastien Hertz51db44a2013-11-19 10:00:29 +0100901 listener->MethodUnwind(thread, this_object, method, dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800902 }
903 }
904}
905
906void Instrumentation::DexPcMovedEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800907 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800908 uint32_t dex_pc) const {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200909 if (HasDexPcListeners()) {
910 std::shared_ptr<std::list<InstrumentationListener*>> original(dex_pc_listeners_);
911 for (InstrumentationListener* listener : *original.get()) {
912 listener->DexPcMoved(thread, this_object, method, dex_pc);
913 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800914 }
915}
916
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800917void Instrumentation::BackwardBranchImpl(Thread* thread, mirror::ArtMethod* method,
918 int32_t offset) const {
919 for (InstrumentationListener* listener : backward_branch_listeners_) {
920 listener->BackwardBranch(thread, method, offset);
921 }
922}
923
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200924void Instrumentation::FieldReadEventImpl(Thread* thread, mirror::Object* this_object,
925 mirror::ArtMethod* method, uint32_t dex_pc,
Mathieu Chartierc7853442015-03-27 14:35:38 -0700926 ArtField* field) const {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200927 if (HasFieldReadListeners()) {
928 std::shared_ptr<std::list<InstrumentationListener*>> original(field_read_listeners_);
929 for (InstrumentationListener* listener : *original.get()) {
930 listener->FieldRead(thread, this_object, method, dex_pc, field);
931 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200932 }
933}
934
935void Instrumentation::FieldWriteEventImpl(Thread* thread, mirror::Object* this_object,
936 mirror::ArtMethod* method, uint32_t dex_pc,
Mathieu Chartierc7853442015-03-27 14:35:38 -0700937 ArtField* field, const JValue& field_value) const {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200938 if (HasFieldWriteListeners()) {
939 std::shared_ptr<std::list<InstrumentationListener*>> original(field_write_listeners_);
940 for (InstrumentationListener* listener : *original.get()) {
941 listener->FieldWritten(thread, this_object, method, dex_pc, field, field_value);
942 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200943 }
944}
945
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000946void Instrumentation::ExceptionCaughtEvent(Thread* thread,
Sebastien Hertz947ff082013-09-17 14:10:13 +0200947 mirror::Throwable* exception_object) const {
Sebastien Hertz9f102032014-05-23 08:59:42 +0200948 if (HasExceptionCaughtListeners()) {
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000949 DCHECK_EQ(thread->GetException(), exception_object);
Jeff Haoc0bd4da2013-04-11 15:52:28 -0700950 thread->ClearException();
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200951 std::shared_ptr<std::list<InstrumentationListener*>> original(exception_caught_listeners_);
952 for (InstrumentationListener* listener : *original.get()) {
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000953 listener->ExceptionCaught(thread, exception_object);
Ian Rogers62d6c772013-02-27 08:32:07 -0800954 }
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000955 thread->SetException(exception_object);
Ian Rogers62d6c772013-02-27 08:32:07 -0800956 }
957}
958
959static void CheckStackDepth(Thread* self, const InstrumentationStackFrame& instrumentation_frame,
960 int delta)
961 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
962 size_t frame_id = StackVisitor::ComputeNumFrames(self) + delta;
963 if (frame_id != instrumentation_frame.frame_id_) {
964 LOG(ERROR) << "Expected frame_id=" << frame_id << " but found "
965 << instrumentation_frame.frame_id_;
966 StackVisitor::DescribeStack(self);
967 CHECK_EQ(frame_id, instrumentation_frame.frame_id_);
968 }
969}
970
971void Instrumentation::PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
Brian Carlstromea46f952013-07-30 01:26:50 -0700972 mirror::ArtMethod* method,
Jeff Hao9a916d32013-06-27 18:45:37 -0700973 uintptr_t lr, bool interpreter_entry) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800974 // We have a callee-save frame meaning this value is guaranteed to never be 0.
975 size_t frame_id = StackVisitor::ComputeNumFrames(self);
976 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
977 if (kVerboseInstrumentation) {
Brian Carlstrom2d888622013-07-18 17:02:00 -0700978 LOG(INFO) << "Entering " << PrettyMethod(method) << " from PC " << reinterpret_cast<void*>(lr);
Ian Rogers62d6c772013-02-27 08:32:07 -0800979 }
980 instrumentation::InstrumentationStackFrame instrumentation_frame(this_object, method, lr,
Jeff Hao9a916d32013-06-27 18:45:37 -0700981 frame_id, interpreter_entry);
Ian Rogers62d6c772013-02-27 08:32:07 -0800982 stack->push_front(instrumentation_frame);
983
Sebastien Hertz320deb22014-06-11 19:45:05 +0200984 if (!interpreter_entry) {
985 MethodEnterEvent(self, this_object, method, 0);
986 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800987}
988
Andreas Gamped58342c2014-06-05 14:18:08 -0700989TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc,
990 uint64_t gpr_result,
991 uint64_t fpr_result) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800992 // Do the pop.
993 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
994 CHECK_GT(stack->size(), 0U);
995 InstrumentationStackFrame instrumentation_frame = stack->front();
996 stack->pop_front();
997
998 // Set return PC and check the sanity of the stack.
999 *return_pc = instrumentation_frame.return_pc_;
1000 CheckStackDepth(self, instrumentation_frame, 0);
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001001 self->VerifyStack();
Ian Rogers62d6c772013-02-27 08:32:07 -08001002
Brian Carlstromea46f952013-07-30 01:26:50 -07001003 mirror::ArtMethod* method = instrumentation_frame.method_;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001004 uint32_t length;
1005 char return_shorty = method->GetShorty(&length)[0];
Ian Rogers62d6c772013-02-27 08:32:07 -08001006 JValue return_value;
1007 if (return_shorty == 'V') {
1008 return_value.SetJ(0);
1009 } else if (return_shorty == 'F' || return_shorty == 'D') {
1010 return_value.SetJ(fpr_result);
1011 } else {
1012 return_value.SetJ(gpr_result);
1013 }
1014 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
1015 // return_pc.
1016 uint32_t dex_pc = DexFile::kDexNoIndex;
1017 mirror::Object* this_object = instrumentation_frame.this_object_;
Sebastien Hertz320deb22014-06-11 19:45:05 +02001018 if (!instrumentation_frame.interpreter_entry_) {
1019 MethodExitEvent(self, this_object, instrumentation_frame.method_, dex_pc, return_value);
1020 }
jeffhao725a9572012-11-13 18:20:12 -08001021
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001022 // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get
1023 // back to an upcall.
1024 NthCallerVisitor visitor(self, 1, true);
1025 visitor.WalkStack(true);
Sebastien Hertz270a0e12015-01-16 19:49:09 +01001026 bool deoptimize = (visitor.caller != nullptr) &&
Daniel Mihalyieb076692014-08-22 17:33:31 +02001027 (interpreter_stubs_installed_ || IsDeoptimized(visitor.caller) ||
1028 Dbg::IsForcedInterpreterNeededForUpcall(self, visitor.caller));
Ian Rogers62d6c772013-02-27 08:32:07 -08001029 if (deoptimize) {
1030 if (kVerboseInstrumentation) {
Sebastien Hertz270a0e12015-01-16 19:49:09 +01001031 LOG(INFO) << StringPrintf("Deoptimizing %s by returning from %s with result %#" PRIx64 " in ",
1032 PrettyMethod(visitor.caller).c_str(),
1033 PrettyMethod(method).c_str(),
1034 return_value.GetJ()) << *self;
Ian Rogers62d6c772013-02-27 08:32:07 -08001035 }
1036 self->SetDeoptimizationReturnValue(return_value);
Andreas Gamped58342c2014-06-05 14:18:08 -07001037 return GetTwoWordSuccessValue(*return_pc,
1038 reinterpret_cast<uintptr_t>(GetQuickDeoptimizationEntryPoint()));
Ian Rogers62d6c772013-02-27 08:32:07 -08001039 } else {
1040 if (kVerboseInstrumentation) {
Brian Carlstrom2d888622013-07-18 17:02:00 -07001041 LOG(INFO) << "Returning from " << PrettyMethod(method)
1042 << " to PC " << reinterpret_cast<void*>(*return_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -08001043 }
Andreas Gamped58342c2014-06-05 14:18:08 -07001044 return GetTwoWordSuccessValue(0, *return_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -08001045 }
jeffhao725a9572012-11-13 18:20:12 -08001046}
1047
Ian Rogers62d6c772013-02-27 08:32:07 -08001048void Instrumentation::PopMethodForUnwind(Thread* self, bool is_deoptimization) const {
1049 // Do the pop.
1050 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
1051 CHECK_GT(stack->size(), 0U);
1052 InstrumentationStackFrame instrumentation_frame = stack->front();
1053 // TODO: bring back CheckStackDepth(self, instrumentation_frame, 2);
1054 stack->pop_front();
1055
Brian Carlstromea46f952013-07-30 01:26:50 -07001056 mirror::ArtMethod* method = instrumentation_frame.method_;
Ian Rogers62d6c772013-02-27 08:32:07 -08001057 if (is_deoptimization) {
1058 if (kVerboseInstrumentation) {
1059 LOG(INFO) << "Popping for deoptimization " << PrettyMethod(method);
1060 }
1061 } else {
1062 if (kVerboseInstrumentation) {
1063 LOG(INFO) << "Popping for unwind " << PrettyMethod(method);
1064 }
1065
1066 // Notify listeners of method unwind.
1067 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
1068 // return_pc.
1069 uint32_t dex_pc = DexFile::kDexNoIndex;
1070 MethodUnwindEvent(self, instrumentation_frame.this_object_, method, dex_pc);
1071 }
1072}
1073
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -07001074void Instrumentation::VisitRoots(RootVisitor* visitor) {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001075 WriterMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001076 if (IsDeoptimizedMethodsEmpty()) {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001077 return;
1078 }
Mathieu Chartier4809d0a2015-04-07 10:39:04 -07001079 BufferedRootVisitor<kDefaultBufferedRootCount> roots(visitor, RootInfo(kRootVMInternal));
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001080 for (auto pair : deoptimized_methods_) {
Mathieu Chartierbb87e0f2015-04-03 11:21:55 -07001081 roots.VisitRoot(pair.second);
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001082 }
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001083}
1084
Ian Rogers62d6c772013-02-27 08:32:07 -08001085std::string InstrumentationStackFrame::Dump() const {
1086 std::ostringstream os;
1087 os << "Frame " << frame_id_ << " " << PrettyMethod(method_) << ":"
1088 << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_);
1089 return os.str();
1090}
1091
1092} // namespace instrumentation
jeffhao725a9572012-11-13 18:20:12 -08001093} // namespace art