blob: b8dcb42312240f6aa0ef8539a74c2eff707f7d82 [file] [log] [blame]
jeffhao725a9572012-11-13 18:20:12 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "instrumentation.h"
18
19#include <sys/uio.h>
20
Ian Rogersc7dd2952014-10-21 23:31:19 -070021#include <sstream>
22
Ian Rogerse63db272014-07-15 15:36:11 -070023#include "arch/context.h"
Ian Rogersef7d42f2014-01-06 12:55:46 -080024#include "atomic.h"
Elliott Hughes76160052012-12-12 16:31:20 -080025#include "base/unix_file/fd_file.h"
jeffhao725a9572012-11-13 18:20:12 -080026#include "class_linker.h"
27#include "debugger.h"
Ian Rogers62d6c772013-02-27 08:32:07 -080028#include "dex_file-inl.h"
Ian Rogersc7dd2952014-10-21 23:31:19 -070029#include "entrypoints/quick/quick_entrypoints.h"
Mathieu Chartierd8891782014-03-02 13:28:37 -080030#include "entrypoints/quick/quick_alloc_entrypoints.h"
Ian Rogers6f3dbba2014-10-14 17:41:57 -070031#include "entrypoints/runtime_asm_entrypoints.h"
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070032#include "gc_root-inl.h"
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010033#include "interpreter/interpreter.h"
Brian Carlstromea46f952013-07-30 01:26:50 -070034#include "mirror/art_method-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080035#include "mirror/class-inl.h"
36#include "mirror/dex_cache.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080037#include "mirror/object_array-inl.h"
Ian Rogers4f6ad8a2013-03-18 15:27:28 -070038#include "mirror/object-inl.h"
Ian Rogers62d6c772013-02-27 08:32:07 -080039#include "nth_caller_visitor.h"
jeffhao725a9572012-11-13 18:20:12 -080040#include "os.h"
41#include "scoped_thread_state_change.h"
42#include "thread.h"
43#include "thread_list.h"
jeffhao725a9572012-11-13 18:20:12 -080044
45namespace art {
Ian Rogersfa824272013-11-05 16:12:57 -080046
Ian Rogers62d6c772013-02-27 08:32:07 -080047namespace instrumentation {
jeffhao725a9572012-11-13 18:20:12 -080048
Sebastien Hertz5bfd5c92013-11-15 11:36:07 +010049const bool kVerboseInstrumentation = false;
50
Ian Rogers816432e2013-09-06 15:47:45 -070051// Do we want to deoptimize for method entry and exit listeners or just try to intercept
52// invocations? Deoptimization forces all code to run in the interpreter and considerably hurts the
53// application's performance.
Jeff Haobc678bb2014-08-11 18:00:29 -070054static constexpr bool kDeoptimizeForAccurateMethodEntryExitListeners = true;
Ian Rogers816432e2013-09-06 15:47:45 -070055
Ian Rogers62d6c772013-02-27 08:32:07 -080056static bool InstallStubsClassVisitor(mirror::Class* klass, void* arg)
jeffhao725a9572012-11-13 18:20:12 -080057 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -080058 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
59 return instrumentation->InstallStubsForClass(klass);
60}
61
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -070062Instrumentation::Instrumentation()
63 : instrumentation_stubs_installed_(false), entry_exit_stubs_installed_(false),
64 interpreter_stubs_installed_(false),
65 interpret_only_(false), forced_interpret_only_(false),
66 have_method_entry_listeners_(false), have_method_exit_listeners_(false),
67 have_method_unwind_listeners_(false), have_dex_pc_listeners_(false),
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +020068 have_field_read_listeners_(false), have_field_write_listeners_(false),
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -070069 have_exception_caught_listeners_(false),
70 deoptimized_methods_lock_("deoptimized methods lock"),
71 deoptimization_enabled_(false),
72 interpreter_handler_table_(kMainHandlerTable),
73 quick_alloc_entry_points_instrumentation_counter_(0) {
74}
75
Ian Rogers62d6c772013-02-27 08:32:07 -080076bool Instrumentation::InstallStubsForClass(mirror::Class* klass) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010077 for (size_t i = 0, e = klass->NumDirectMethods(); i < e; i++) {
78 InstallStubsForMethod(klass->GetDirectMethod(i));
jeffhao725a9572012-11-13 18:20:12 -080079 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010080 for (size_t i = 0, e = klass->NumVirtualMethods(); i < e; i++) {
81 InstallStubsForMethod(klass->GetVirtualMethod(i));
jeffhao725a9572012-11-13 18:20:12 -080082 }
83 return true;
84}
85
Elliott Hughes956af0f2014-12-11 14:34:28 -080086static void UpdateEntrypoints(mirror::ArtMethod* method, const void* quick_code)
Ian Rogersef7d42f2014-01-06 12:55:46 -080087 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogersef7d42f2014-01-06 12:55:46 -080088 method->SetEntryPointFromQuickCompiledCode(quick_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010089 if (!method->IsResolutionMethod()) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -070090 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Ian Rogers6f3dbba2014-10-14 17:41:57 -070091 if (class_linker->IsQuickToInterpreterBridge(quick_code) ||
92 (class_linker->IsQuickResolutionStub(quick_code) &&
93 Runtime::Current()->GetInstrumentation()->IsForcedInterpretOnly() &&
94 !method->IsNative() && !method->IsProxyMethod())) {
Ian Rogersef7d42f2014-01-06 12:55:46 -080095 DCHECK(!method->IsNative()) << PrettyMethod(method);
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -080096 DCHECK(!method->IsProxyMethod()) << PrettyMethod(method);
Ian Rogers6f3dbba2014-10-14 17:41:57 -070097 method->SetEntryPointFromInterpreter(art::artInterpreterToInterpreterBridge);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010098 } else {
99 method->SetEntryPointFromInterpreter(art::artInterpreterToCompiledCodeBridge);
100 }
101 }
102}
103
104void Instrumentation::InstallStubsForMethod(mirror::ArtMethod* method) {
105 if (method->IsAbstract() || method->IsProxyMethod()) {
106 // Do not change stubs for these methods.
107 return;
108 }
Jeff Hao56802772014-08-19 10:17:36 -0700109 // Don't stub Proxy.<init>. Note that the Proxy class itself is not a proxy class.
110 if (method->IsConstructor() &&
111 method->GetDeclaringClass()->DescriptorEquals("Ljava/lang/reflect/Proxy;")) {
Jeff Haodb8a6642014-08-14 17:18:52 -0700112 return;
113 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800114 const void* new_quick_code;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100115 bool uninstall = !entry_exit_stubs_installed_ && !interpreter_stubs_installed_;
116 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
117 bool is_class_initialized = method->GetDeclaringClass()->IsInitialized();
118 if (uninstall) {
119 if ((forced_interpret_only_ || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800120 new_quick_code = GetQuickToInterpreterBridge();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100121 } else if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800122 new_quick_code = class_linker->GetQuickOatCodeFor(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100123 } else {
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700124 new_quick_code = GetQuickResolutionStub();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100125 }
126 } else { // !uninstall
Sebastien Hertzbae182c2013-12-17 10:42:03 +0100127 if ((interpreter_stubs_installed_ || forced_interpret_only_ || IsDeoptimized(method)) &&
128 !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800129 new_quick_code = GetQuickToInterpreterBridge();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100130 } else {
131 // Do not overwrite resolution trampoline. When the trampoline initializes the method's
132 // class, all its static methods code will be set to the instrumentation entry point.
133 // For more details, see ClassLinker::FixupStaticTrampolines.
134 if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
Sebastien Hertz320deb22014-06-11 19:45:05 +0200135 if (entry_exit_stubs_installed_) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800136 new_quick_code = GetQuickInstrumentationEntryPoint();
Sebastien Hertz320deb22014-06-11 19:45:05 +0200137 } else {
Sebastien Hertz320deb22014-06-11 19:45:05 +0200138 new_quick_code = class_linker->GetQuickOatCodeFor(method);
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700139 DCHECK(!class_linker->IsQuickToInterpreterBridge(new_quick_code));
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100140 }
141 } else {
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700142 new_quick_code = GetQuickResolutionStub();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100143 }
144 }
145 }
Elliott Hughes956af0f2014-12-11 14:34:28 -0800146 UpdateEntrypoints(method, new_quick_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100147}
148
Ian Rogers62d6c772013-02-27 08:32:07 -0800149// Places the instrumentation exit pc as the return PC for every quick frame. This also allows
150// deoptimization of quick frames to interpreter frames.
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100151// Since we may already have done this previously, we need to push new instrumentation frame before
152// existing instrumentation frames.
Ian Rogers62d6c772013-02-27 08:32:07 -0800153static void InstrumentationInstallStack(Thread* thread, void* arg)
Ian Rogers306057f2012-11-26 12:45:53 -0800154 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
155 struct InstallStackVisitor : public StackVisitor {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800156 InstallStackVisitor(Thread* thread_in, Context* context, uintptr_t instrumentation_exit_pc)
157 : StackVisitor(thread_in, context),
158 instrumentation_stack_(thread_in->GetInstrumentationStack()),
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100159 instrumentation_exit_pc_(instrumentation_exit_pc),
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100160 reached_existing_instrumentation_frames_(false), instrumentation_stack_depth_(0),
161 last_return_pc_(0) {
162 }
jeffhao725a9572012-11-13 18:20:12 -0800163
Ian Rogers306057f2012-11-26 12:45:53 -0800164 virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Brian Carlstromea46f952013-07-30 01:26:50 -0700165 mirror::ArtMethod* m = GetMethod();
Ian Rogers306057f2012-11-26 12:45:53 -0800166 if (m == NULL) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800167 if (kVerboseInstrumentation) {
168 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
169 }
170 last_return_pc_ = 0;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700171 return true; // Ignore upcalls.
Ian Rogers306057f2012-11-26 12:45:53 -0800172 }
Jeff Haoa15a81b2014-05-27 18:25:47 -0700173 if (GetCurrentQuickFrame() == NULL) {
Elliott Hughes956af0f2014-12-11 14:34:28 -0800174 bool interpreter_frame = true;
Sebastien Hertz320deb22014-06-11 19:45:05 +0200175 InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, 0, GetFrameId(),
176 interpreter_frame);
Jeff Haoa15a81b2014-05-27 18:25:47 -0700177 if (kVerboseInstrumentation) {
178 LOG(INFO) << "Pushing shadow frame " << instrumentation_frame.Dump();
179 }
180 shadow_stack_.push_back(instrumentation_frame);
181 return true; // Continue.
182 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800183 uintptr_t return_pc = GetReturnPc();
Sebastien Hertz320deb22014-06-11 19:45:05 +0200184 if (m->IsRuntimeMethod()) {
185 if (return_pc == instrumentation_exit_pc_) {
186 if (kVerboseInstrumentation) {
187 LOG(INFO) << " Handling quick to interpreter transition. Frame " << GetFrameId();
188 }
189 CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200190 const InstrumentationStackFrame& frame =
191 instrumentation_stack_->at(instrumentation_stack_depth_);
Sebastien Hertz320deb22014-06-11 19:45:05 +0200192 CHECK(frame.interpreter_entry_);
193 // This is an interpreter frame so method enter event must have been reported. However we
194 // need to push a DEX pc into the dex_pcs_ list to match size of instrumentation stack.
195 // Since we won't report method entry here, we can safely push any DEX pc.
196 dex_pcs_.push_back(0);
197 last_return_pc_ = frame.return_pc_;
198 ++instrumentation_stack_depth_;
199 return true;
200 } else {
201 if (kVerboseInstrumentation) {
202 LOG(INFO) << " Skipping runtime method. Frame " << GetFrameId();
203 }
204 last_return_pc_ = GetReturnPc();
205 return true; // Ignore unresolved methods since they will be instrumented after resolution.
206 }
207 }
208 if (kVerboseInstrumentation) {
209 LOG(INFO) << " Installing exit stub in " << DescribeLocation();
210 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100211 if (return_pc == instrumentation_exit_pc_) {
212 // We've reached a frame which has already been installed with instrumentation exit stub.
213 // We should have already installed instrumentation on previous frames.
214 reached_existing_instrumentation_frames_ = true;
215
216 CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200217 const InstrumentationStackFrame& frame =
218 instrumentation_stack_->at(instrumentation_stack_depth_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100219 CHECK_EQ(m, frame.method_) << "Expected " << PrettyMethod(m)
220 << ", Found " << PrettyMethod(frame.method_);
221 return_pc = frame.return_pc_;
222 if (kVerboseInstrumentation) {
223 LOG(INFO) << "Ignoring already instrumented " << frame.Dump();
224 }
225 } else {
226 CHECK_NE(return_pc, 0U);
227 CHECK(!reached_existing_instrumentation_frames_);
228 InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, return_pc, GetFrameId(),
229 false);
230 if (kVerboseInstrumentation) {
231 LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
232 }
233
Sebastien Hertz320deb22014-06-11 19:45:05 +0200234 // Insert frame at the right position so we do not corrupt the instrumentation stack.
235 // Instrumentation stack frames are in descending frame id order.
236 auto it = instrumentation_stack_->begin();
237 for (auto end = instrumentation_stack_->end(); it != end; ++it) {
238 const InstrumentationStackFrame& current = *it;
239 if (instrumentation_frame.frame_id_ >= current.frame_id_) {
240 break;
241 }
242 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100243 instrumentation_stack_->insert(it, instrumentation_frame);
244 SetReturnPc(instrumentation_exit_pc_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800245 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800246 dex_pcs_.push_back(m->ToDexPc(last_return_pc_));
Ian Rogers62d6c772013-02-27 08:32:07 -0800247 last_return_pc_ = return_pc;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100248 ++instrumentation_stack_depth_;
Ian Rogers306057f2012-11-26 12:45:53 -0800249 return true; // Continue.
250 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800251 std::deque<InstrumentationStackFrame>* const instrumentation_stack_;
Jeff Haoa15a81b2014-05-27 18:25:47 -0700252 std::vector<InstrumentationStackFrame> shadow_stack_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800253 std::vector<uint32_t> dex_pcs_;
Ian Rogers306057f2012-11-26 12:45:53 -0800254 const uintptr_t instrumentation_exit_pc_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100255 bool reached_existing_instrumentation_frames_;
256 size_t instrumentation_stack_depth_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800257 uintptr_t last_return_pc_;
Ian Rogers306057f2012-11-26 12:45:53 -0800258 };
Ian Rogers62d6c772013-02-27 08:32:07 -0800259 if (kVerboseInstrumentation) {
260 std::string thread_name;
261 thread->GetThreadName(thread_name);
262 LOG(INFO) << "Installing exit stubs in " << thread_name;
Ian Rogers306057f2012-11-26 12:45:53 -0800263 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100264
265 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
Ian Rogers700a4022014-05-19 16:49:03 -0700266 std::unique_ptr<Context> context(Context::Create());
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700267 uintptr_t instrumentation_exit_pc = reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc());
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100268 InstallStackVisitor visitor(thread, context.get(), instrumentation_exit_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800269 visitor.WalkStack(true);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100270 CHECK_EQ(visitor.dex_pcs_.size(), thread->GetInstrumentationStack()->size());
Ian Rogers62d6c772013-02-27 08:32:07 -0800271
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100272 if (instrumentation->ShouldNotifyMethodEnterExitEvents()) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100273 // Create method enter events for all methods currently on the thread's stack. We only do this
274 // if no debugger is attached to prevent from posting events twice.
Jeff Haoa15a81b2014-05-27 18:25:47 -0700275 auto ssi = visitor.shadow_stack_.rbegin();
276 for (auto isi = thread->GetInstrumentationStack()->rbegin(),
277 end = thread->GetInstrumentationStack()->rend(); isi != end; ++isi) {
278 while (ssi != visitor.shadow_stack_.rend() && (*ssi).frame_id_ < (*isi).frame_id_) {
279 instrumentation->MethodEnterEvent(thread, (*ssi).this_object_, (*ssi).method_, 0);
280 ++ssi;
281 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100282 uint32_t dex_pc = visitor.dex_pcs_.back();
283 visitor.dex_pcs_.pop_back();
Sebastien Hertz320deb22014-06-11 19:45:05 +0200284 if (!isi->interpreter_entry_) {
285 instrumentation->MethodEnterEvent(thread, (*isi).this_object_, (*isi).method_, dex_pc);
286 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100287 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800288 }
289 thread->VerifyStack();
Ian Rogers306057f2012-11-26 12:45:53 -0800290}
291
Ian Rogers62d6c772013-02-27 08:32:07 -0800292// Removes the instrumentation exit pc as the return PC for every quick frame.
293static void InstrumentationRestoreStack(Thread* thread, void* arg)
Ian Rogers306057f2012-11-26 12:45:53 -0800294 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
295 struct RestoreStackVisitor : public StackVisitor {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800296 RestoreStackVisitor(Thread* thread_in, uintptr_t instrumentation_exit_pc,
Ian Rogers62d6c772013-02-27 08:32:07 -0800297 Instrumentation* instrumentation)
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800298 : StackVisitor(thread_in, NULL), thread_(thread_in),
Ian Rogers62d6c772013-02-27 08:32:07 -0800299 instrumentation_exit_pc_(instrumentation_exit_pc),
300 instrumentation_(instrumentation),
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800301 instrumentation_stack_(thread_in->GetInstrumentationStack()),
Ian Rogers62d6c772013-02-27 08:32:07 -0800302 frames_removed_(0) {}
Ian Rogers306057f2012-11-26 12:45:53 -0800303
304 virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800305 if (instrumentation_stack_->size() == 0) {
jeffhao725a9572012-11-13 18:20:12 -0800306 return false; // Stop.
307 }
Brian Carlstromea46f952013-07-30 01:26:50 -0700308 mirror::ArtMethod* m = GetMethod();
Ian Rogers62d6c772013-02-27 08:32:07 -0800309 if (GetCurrentQuickFrame() == NULL) {
310 if (kVerboseInstrumentation) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200311 LOG(INFO) << " Ignoring a shadow frame. Frame " << GetFrameId()
312 << " Method=" << PrettyMethod(m);
Ian Rogers62d6c772013-02-27 08:32:07 -0800313 }
314 return true; // Ignore shadow frames.
315 }
Ian Rogers306057f2012-11-26 12:45:53 -0800316 if (m == NULL) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800317 if (kVerboseInstrumentation) {
318 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
319 }
Ian Rogers306057f2012-11-26 12:45:53 -0800320 return true; // Ignore upcalls.
321 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800322 bool removed_stub = false;
323 // TODO: make this search more efficient?
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100324 const size_t frameId = GetFrameId();
325 for (const InstrumentationStackFrame& instrumentation_frame : *instrumentation_stack_) {
326 if (instrumentation_frame.frame_id_ == frameId) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800327 if (kVerboseInstrumentation) {
328 LOG(INFO) << " Removing exit stub in " << DescribeLocation();
329 }
Jeff Hao9a916d32013-06-27 18:45:37 -0700330 if (instrumentation_frame.interpreter_entry_) {
331 CHECK(m == Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs));
332 } else {
333 CHECK(m == instrumentation_frame.method_) << PrettyMethod(m);
334 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800335 SetReturnPc(instrumentation_frame.return_pc_);
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100336 if (instrumentation_->ShouldNotifyMethodEnterExitEvents()) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100337 // Create the method exit events. As the methods didn't really exit the result is 0.
338 // We only do this if no debugger is attached to prevent from posting events twice.
339 instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m,
340 GetDexPc(), JValue());
341 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800342 frames_removed_++;
343 removed_stub = true;
344 break;
345 }
346 }
347 if (!removed_stub) {
348 if (kVerboseInstrumentation) {
349 LOG(INFO) << " No exit stub in " << DescribeLocation();
Ian Rogers306057f2012-11-26 12:45:53 -0800350 }
jeffhao725a9572012-11-13 18:20:12 -0800351 }
352 return true; // Continue.
353 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800354 Thread* const thread_;
Ian Rogers306057f2012-11-26 12:45:53 -0800355 const uintptr_t instrumentation_exit_pc_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800356 Instrumentation* const instrumentation_;
357 std::deque<instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
358 size_t frames_removed_;
jeffhao725a9572012-11-13 18:20:12 -0800359 };
Ian Rogers62d6c772013-02-27 08:32:07 -0800360 if (kVerboseInstrumentation) {
361 std::string thread_name;
362 thread->GetThreadName(thread_name);
363 LOG(INFO) << "Removing exit stubs in " << thread_name;
364 }
365 std::deque<instrumentation::InstrumentationStackFrame>* stack = thread->GetInstrumentationStack();
366 if (stack->size() > 0) {
367 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700368 uintptr_t instrumentation_exit_pc =
369 reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc());
Ian Rogers62d6c772013-02-27 08:32:07 -0800370 RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
371 visitor.WalkStack(true);
372 CHECK_EQ(visitor.frames_removed_, stack->size());
373 while (stack->size() > 0) {
374 stack->pop_front();
375 }
jeffhao725a9572012-11-13 18:20:12 -0800376 }
377}
378
Ian Rogers62d6c772013-02-27 08:32:07 -0800379void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
380 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
Ian Rogers62d6c772013-02-27 08:32:07 -0800381 if ((events & kMethodEntered) != 0) {
382 method_entry_listeners_.push_back(listener);
Ian Rogers62d6c772013-02-27 08:32:07 -0800383 have_method_entry_listeners_ = true;
384 }
385 if ((events & kMethodExited) != 0) {
386 method_exit_listeners_.push_back(listener);
Ian Rogers62d6c772013-02-27 08:32:07 -0800387 have_method_exit_listeners_ = true;
388 }
389 if ((events & kMethodUnwind) != 0) {
390 method_unwind_listeners_.push_back(listener);
391 have_method_unwind_listeners_ = true;
392 }
393 if ((events & kDexPcMoved) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200394 std::list<InstrumentationListener*>* modified;
395 if (have_dex_pc_listeners_) {
396 modified = new std::list<InstrumentationListener*>(*dex_pc_listeners_.get());
397 } else {
398 modified = new std::list<InstrumentationListener*>();
399 }
400 modified->push_back(listener);
401 dex_pc_listeners_.reset(modified);
Ian Rogers62d6c772013-02-27 08:32:07 -0800402 have_dex_pc_listeners_ = true;
403 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200404 if ((events & kFieldRead) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200405 std::list<InstrumentationListener*>* modified;
406 if (have_field_read_listeners_) {
407 modified = new std::list<InstrumentationListener*>(*field_read_listeners_.get());
408 } else {
409 modified = new std::list<InstrumentationListener*>();
410 }
411 modified->push_back(listener);
412 field_read_listeners_.reset(modified);
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200413 have_field_read_listeners_ = true;
414 }
415 if ((events & kFieldWritten) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200416 std::list<InstrumentationListener*>* modified;
417 if (have_field_write_listeners_) {
418 modified = new std::list<InstrumentationListener*>(*field_write_listeners_.get());
419 } else {
420 modified = new std::list<InstrumentationListener*>();
421 }
422 modified->push_back(listener);
423 field_write_listeners_.reset(modified);
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200424 have_field_write_listeners_ = true;
425 }
Jeff Hao14dd5a82013-04-11 10:23:36 -0700426 if ((events & kExceptionCaught) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200427 std::list<InstrumentationListener*>* modified;
428 if (have_exception_caught_listeners_) {
429 modified = new std::list<InstrumentationListener*>(*exception_caught_listeners_.get());
430 } else {
431 modified = new std::list<InstrumentationListener*>();
432 }
433 modified->push_back(listener);
434 exception_caught_listeners_.reset(modified);
Jeff Hao14dd5a82013-04-11 10:23:36 -0700435 have_exception_caught_listeners_ = true;
436 }
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200437 UpdateInterpreterHandlerTable();
jeffhao725a9572012-11-13 18:20:12 -0800438}
439
Ian Rogers62d6c772013-02-27 08:32:07 -0800440void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
441 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
Ian Rogers62d6c772013-02-27 08:32:07 -0800442
443 if ((events & kMethodEntered) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200444 if (have_method_entry_listeners_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800445 method_entry_listeners_.remove(listener);
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200446 have_method_entry_listeners_ = !method_entry_listeners_.empty();
Ian Rogers62d6c772013-02-27 08:32:07 -0800447 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800448 }
449 if ((events & kMethodExited) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200450 if (have_method_exit_listeners_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800451 method_exit_listeners_.remove(listener);
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200452 have_method_exit_listeners_ = !method_exit_listeners_.empty();
Ian Rogers62d6c772013-02-27 08:32:07 -0800453 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800454 }
455 if ((events & kMethodUnwind) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200456 if (have_method_unwind_listeners_) {
457 method_unwind_listeners_.remove(listener);
458 have_method_unwind_listeners_ = !method_unwind_listeners_.empty();
459 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800460 }
461 if ((events & kDexPcMoved) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200462 if (have_dex_pc_listeners_) {
463 std::list<InstrumentationListener*>* modified =
464 new std::list<InstrumentationListener*>(*dex_pc_listeners_.get());
465 modified->remove(listener);
466 have_dex_pc_listeners_ = !modified->empty();
467 if (have_dex_pc_listeners_) {
468 dex_pc_listeners_.reset(modified);
469 } else {
470 dex_pc_listeners_.reset();
471 delete modified;
472 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800473 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800474 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200475 if ((events & kFieldRead) != 0) {
Daniel Mihalyi66445212014-08-21 15:57:25 +0200476 if (have_field_read_listeners_) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200477 std::list<InstrumentationListener*>* modified =
478 new std::list<InstrumentationListener*>(*field_read_listeners_.get());
479 modified->remove(listener);
480 have_field_read_listeners_ = !modified->empty();
481 if (have_field_read_listeners_) {
482 field_read_listeners_.reset(modified);
483 } else {
484 field_read_listeners_.reset();
485 delete modified;
486 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200487 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200488 }
489 if ((events & kFieldWritten) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200490 if (have_field_write_listeners_) {
491 std::list<InstrumentationListener*>* modified =
492 new std::list<InstrumentationListener*>(*field_write_listeners_.get());
493 modified->remove(listener);
494 have_field_write_listeners_ = !modified->empty();
495 if (have_field_write_listeners_) {
496 field_write_listeners_.reset(modified);
497 } else {
498 field_write_listeners_.reset();
499 delete modified;
500 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200501 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200502 }
Jeff Hao14dd5a82013-04-11 10:23:36 -0700503 if ((events & kExceptionCaught) != 0) {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200504 if (have_exception_caught_listeners_) {
505 std::list<InstrumentationListener*>* modified =
506 new std::list<InstrumentationListener*>(*exception_caught_listeners_.get());
507 modified->remove(listener);
508 have_exception_caught_listeners_ = !modified->empty();
509 if (have_exception_caught_listeners_) {
510 exception_caught_listeners_.reset(modified);
511 } else {
512 exception_caught_listeners_.reset();
513 delete modified;
514 }
515 }
Jeff Hao14dd5a82013-04-11 10:23:36 -0700516 }
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200517 UpdateInterpreterHandlerTable();
jeffhao725a9572012-11-13 18:20:12 -0800518}
519
Ian Rogers62d6c772013-02-27 08:32:07 -0800520void Instrumentation::ConfigureStubs(bool require_entry_exit_stubs, bool require_interpreter) {
521 interpret_only_ = require_interpreter || forced_interpret_only_;
522 // Compute what level of instrumentation is required and compare to current.
523 int desired_level, current_level;
524 if (require_interpreter) {
525 desired_level = 2;
526 } else if (require_entry_exit_stubs) {
527 desired_level = 1;
528 } else {
529 desired_level = 0;
530 }
531 if (interpreter_stubs_installed_) {
532 current_level = 2;
533 } else if (entry_exit_stubs_installed_) {
534 current_level = 1;
535 } else {
536 current_level = 0;
537 }
538 if (desired_level == current_level) {
539 // We're already set.
540 return;
541 }
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100542 Thread* const self = Thread::Current();
Ian Rogers62d6c772013-02-27 08:32:07 -0800543 Runtime* runtime = Runtime::Current();
544 Locks::thread_list_lock_->AssertNotHeld(self);
545 if (desired_level > 0) {
546 if (require_interpreter) {
547 interpreter_stubs_installed_ = true;
548 } else {
549 CHECK(require_entry_exit_stubs);
550 entry_exit_stubs_installed_ = true;
551 }
552 runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
553 instrumentation_stubs_installed_ = true;
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100554 MutexLock mu(self, *Locks::thread_list_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800555 runtime->GetThreadList()->ForEach(InstrumentationInstallStack, this);
556 } else {
557 interpreter_stubs_installed_ = false;
558 entry_exit_stubs_installed_ = false;
559 runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100560 // Restore stack only if there is no method currently deoptimized.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700561 bool empty;
562 {
563 ReaderMutexLock mu(self, deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700564 empty = IsDeoptimizedMethodsEmpty(); // Avoid lock violation.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700565 }
566 if (empty) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100567 instrumentation_stubs_installed_ = false;
568 MutexLock mu(self, *Locks::thread_list_lock_);
569 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
570 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800571 }
jeffhao725a9572012-11-13 18:20:12 -0800572}
573
Ian Rogersfa824272013-11-05 16:12:57 -0800574static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700575 UNUSED(arg);
Ian Rogersfa824272013-11-05 16:12:57 -0800576 thread->ResetQuickAllocEntryPointsForThread();
577}
578
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700579void Instrumentation::SetEntrypointsInstrumented(bool instrumented) {
580 Thread* self = Thread::Current();
Mathieu Chartier661974a2014-01-09 11:23:53 -0800581 Runtime* runtime = Runtime::Current();
582 ThreadList* tl = runtime->GetThreadList();
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700583 Locks::mutator_lock_->AssertNotHeld(self);
584 Locks::instrument_entrypoints_lock_->AssertHeld(self);
585 if (runtime->IsStarted()) {
Mathieu Chartier661974a2014-01-09 11:23:53 -0800586 tl->SuspendAll();
587 }
588 {
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700589 MutexLock mu(self, *Locks::runtime_shutdown_lock_);
Mathieu Chartier661974a2014-01-09 11:23:53 -0800590 SetQuickAllocEntryPointsInstrumented(instrumented);
591 ResetQuickAllocEntryPoints();
592 }
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700593 if (runtime->IsStarted()) {
Mathieu Chartier661974a2014-01-09 11:23:53 -0800594 tl->ResumeAll();
595 }
596}
597
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700598void Instrumentation::InstrumentQuickAllocEntryPoints() {
599 MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
600 InstrumentQuickAllocEntryPointsLocked();
Ian Rogersfa824272013-11-05 16:12:57 -0800601}
602
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700603void Instrumentation::UninstrumentQuickAllocEntryPoints() {
604 MutexLock mu(Thread::Current(), *Locks::instrument_entrypoints_lock_);
605 UninstrumentQuickAllocEntryPointsLocked();
606}
607
608void Instrumentation::InstrumentQuickAllocEntryPointsLocked() {
609 Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
610 if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
611 SetEntrypointsInstrumented(true);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800612 }
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700613 ++quick_alloc_entry_points_instrumentation_counter_;
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700614}
615
616void Instrumentation::UninstrumentQuickAllocEntryPointsLocked() {
617 Locks::instrument_entrypoints_lock_->AssertHeld(Thread::Current());
618 CHECK_GT(quick_alloc_entry_points_instrumentation_counter_, 0U);
619 --quick_alloc_entry_points_instrumentation_counter_;
620 if (quick_alloc_entry_points_instrumentation_counter_ == 0) {
621 SetEntrypointsInstrumented(false);
622 }
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800623}
624
625void Instrumentation::ResetQuickAllocEntryPoints() {
626 Runtime* runtime = Runtime::Current();
627 if (runtime->IsStarted()) {
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800628 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
629 runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, NULL);
Ian Rogersfa824272013-11-05 16:12:57 -0800630 }
631}
632
Elliott Hughes956af0f2014-12-11 14:34:28 -0800633void Instrumentation::UpdateMethodsCode(mirror::ArtMethod* method, const void* quick_code) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800634 const void* new_quick_code;
Ian Rogers62d6c772013-02-27 08:32:07 -0800635 if (LIKELY(!instrumentation_stubs_installed_)) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800636 new_quick_code = quick_code;
Jeff Hao65d15d92013-07-16 16:39:33 -0700637 } else {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100638 if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800639 new_quick_code = GetQuickToInterpreterBridge();
Jeff Hao65d15d92013-07-16 16:39:33 -0700640 } else {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700641 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700642 if (class_linker->IsQuickResolutionStub(quick_code) ||
643 class_linker->IsQuickToInterpreterBridge(quick_code)) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700644 new_quick_code = quick_code;
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700645 } else if (entry_exit_stubs_installed_) {
646 new_quick_code = GetQuickInstrumentationEntryPoint();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700647 } else {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700648 new_quick_code = quick_code;
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700649 }
Jeff Hao65d15d92013-07-16 16:39:33 -0700650 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800651 }
Elliott Hughes956af0f2014-12-11 14:34:28 -0800652 UpdateEntrypoints(method, new_quick_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100653}
654
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700655bool Instrumentation::AddDeoptimizedMethod(mirror::ArtMethod* method) {
656 // Note that the insert() below isn't read barrier-aware. So, this
657 // FindDeoptimizedMethod() call is necessary or else we would end up
658 // storing the same method twice in the map (the from-space and the
659 // to-space ones).
660 if (FindDeoptimizedMethod(method)) {
661 // Already in the map. Return.
662 return false;
663 }
664 // Not found. Add it.
665 int32_t hash_code = method->IdentityHashCode();
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700666 deoptimized_methods_.insert(std::make_pair(hash_code, GcRoot<mirror::ArtMethod>(method)));
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700667 return true;
668}
669
670bool Instrumentation::FindDeoptimizedMethod(mirror::ArtMethod* method) {
671 int32_t hash_code = method->IdentityHashCode();
672 auto range = deoptimized_methods_.equal_range(hash_code);
673 for (auto it = range.first; it != range.second; ++it) {
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700674 mirror::ArtMethod* m = it->second.Read();
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700675 if (m == method) {
676 // Found.
677 return true;
678 }
679 }
680 // Not found.
681 return false;
682}
683
684mirror::ArtMethod* Instrumentation::BeginDeoptimizedMethod() {
685 auto it = deoptimized_methods_.begin();
686 if (it == deoptimized_methods_.end()) {
687 // Empty.
688 return nullptr;
689 }
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700690 return it->second.Read();
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700691}
692
693bool Instrumentation::RemoveDeoptimizedMethod(mirror::ArtMethod* method) {
694 int32_t hash_code = method->IdentityHashCode();
695 auto range = deoptimized_methods_.equal_range(hash_code);
696 for (auto it = range.first; it != range.second; ++it) {
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -0700697 mirror::ArtMethod* m = it->second.Read();
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700698 if (m == method) {
699 // Found. Erase and return.
700 deoptimized_methods_.erase(it);
701 return true;
702 }
703 }
704 // Not found.
705 return false;
706}
707
708bool Instrumentation::IsDeoptimizedMethodsEmpty() const {
709 return deoptimized_methods_.empty();
710}
711
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100712void Instrumentation::Deoptimize(mirror::ArtMethod* method) {
713 CHECK(!method->IsNative());
714 CHECK(!method->IsProxyMethod());
715 CHECK(!method->IsAbstract());
716
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700717 Thread* self = Thread::Current();
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700718 {
719 WriterMutexLock mu(self, deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700720 bool has_not_been_deoptimized = AddDeoptimizedMethod(method);
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200721 CHECK(has_not_been_deoptimized) << "Method " << PrettyMethod(method)
722 << " is already deoptimized";
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700723 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100724 if (!interpreter_stubs_installed_) {
Elliott Hughes956af0f2014-12-11 14:34:28 -0800725 UpdateEntrypoints(method, GetQuickInstrumentationEntryPoint());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100726
727 // Install instrumentation exit stub and instrumentation frames. We may already have installed
728 // these previously so it will only cover the newly created frames.
729 instrumentation_stubs_installed_ = true;
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700730 MutexLock mu(self, *Locks::thread_list_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100731 Runtime::Current()->GetThreadList()->ForEach(InstrumentationInstallStack, this);
732 }
733}
734
735void Instrumentation::Undeoptimize(mirror::ArtMethod* method) {
736 CHECK(!method->IsNative());
737 CHECK(!method->IsProxyMethod());
738 CHECK(!method->IsAbstract());
739
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700740 Thread* self = Thread::Current();
741 bool empty;
742 {
743 WriterMutexLock mu(self, deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700744 bool found_and_erased = RemoveDeoptimizedMethod(method);
745 CHECK(found_and_erased) << "Method " << PrettyMethod(method)
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700746 << " is not deoptimized";
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700747 empty = IsDeoptimizedMethodsEmpty();
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700748 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100749
750 // Restore code and possibly stack only if we did not deoptimize everything.
751 if (!interpreter_stubs_installed_) {
752 // Restore its code or resolution trampoline.
753 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Ian Rogersef7d42f2014-01-06 12:55:46 -0800754 if (method->IsStatic() && !method->IsConstructor() &&
755 !method->GetDeclaringClass()->IsInitialized()) {
Elliott Hughes956af0f2014-12-11 14:34:28 -0800756 UpdateEntrypoints(method, GetQuickResolutionStub());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100757 } else {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800758 const void* quick_code = class_linker->GetQuickOatCodeFor(method);
Elliott Hughes956af0f2014-12-11 14:34:28 -0800759 UpdateEntrypoints(method, quick_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100760 }
761
762 // If there is no deoptimized method left, we can restore the stack of each thread.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700763 if (empty) {
764 MutexLock mu(self, *Locks::thread_list_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100765 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
766 instrumentation_stubs_installed_ = false;
767 }
768 }
769}
770
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700771bool Instrumentation::IsDeoptimized(mirror::ArtMethod* method) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100772 DCHECK(method != nullptr);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700773 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
774 return FindDeoptimizedMethod(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100775}
776
777void Instrumentation::EnableDeoptimization() {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700778 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700779 CHECK(IsDeoptimizedMethodsEmpty());
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100780 CHECK_EQ(deoptimization_enabled_, false);
781 deoptimization_enabled_ = true;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100782}
783
784void Instrumentation::DisableDeoptimization() {
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100785 CHECK_EQ(deoptimization_enabled_, true);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100786 // If we deoptimized everything, undo it.
787 if (interpreter_stubs_installed_) {
788 UndeoptimizeEverything();
789 }
790 // Undeoptimized selected methods.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700791 while (true) {
792 mirror::ArtMethod* method;
793 {
794 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700795 if (IsDeoptimizedMethodsEmpty()) {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700796 break;
797 }
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700798 method = BeginDeoptimizedMethod();
799 CHECK(method != nullptr);
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700800 }
801 Undeoptimize(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100802 }
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100803 deoptimization_enabled_ = false;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100804}
805
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100806// Indicates if instrumentation should notify method enter/exit events to the listeners.
807bool Instrumentation::ShouldNotifyMethodEnterExitEvents() const {
Sebastien Hertz7ec2f1c2014-03-27 20:06:47 +0100808 return !deoptimization_enabled_ && !interpreter_stubs_installed_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100809}
810
811void Instrumentation::DeoptimizeEverything() {
812 CHECK(!interpreter_stubs_installed_);
813 ConfigureStubs(false, true);
814}
815
816void Instrumentation::UndeoptimizeEverything() {
817 CHECK(interpreter_stubs_installed_);
818 ConfigureStubs(false, false);
819}
820
821void Instrumentation::EnableMethodTracing() {
822 bool require_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners;
823 ConfigureStubs(!require_interpreter, require_interpreter);
824}
825
826void Instrumentation::DisableMethodTracing() {
827 ConfigureStubs(false, false);
jeffhao725a9572012-11-13 18:20:12 -0800828}
829
Mathieu Chartiera7dd0382014-11-20 17:08:58 -0800830const void* Instrumentation::GetQuickCodeFor(mirror::ArtMethod* method, size_t pointer_size) const {
Ian Rogers62d6c772013-02-27 08:32:07 -0800831 Runtime* runtime = Runtime::Current();
832 if (LIKELY(!instrumentation_stubs_installed_)) {
Mathieu Chartiera7dd0382014-11-20 17:08:58 -0800833 const void* code = method->GetEntryPointFromQuickCompiledCodePtrSize(pointer_size);
Vladimir Marko8a630572014-04-09 18:45:35 +0100834 DCHECK(code != nullptr);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700835 ClassLinker* class_linker = runtime->GetClassLinker();
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700836 if (LIKELY(!class_linker->IsQuickResolutionStub(code) &&
837 !class_linker->IsQuickToInterpreterBridge(code)) &&
838 !class_linker->IsQuickResolutionStub(code) &&
839 !class_linker->IsQuickToInterpreterBridge(code)) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800840 return code;
841 }
842 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800843 return runtime->GetClassLinker()->GetQuickOatCodeFor(method);
jeffhao725a9572012-11-13 18:20:12 -0800844}
845
Ian Rogers62d6c772013-02-27 08:32:07 -0800846void Instrumentation::MethodEnterEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800847 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800848 uint32_t dex_pc) const {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700849 auto it = method_entry_listeners_.begin();
Jeff Hao65d15d92013-07-16 16:39:33 -0700850 bool is_end = (it == method_entry_listeners_.end());
851 // Implemented this way to prevent problems caused by modification of the list while iterating.
852 while (!is_end) {
853 InstrumentationListener* cur = *it;
854 ++it;
855 is_end = (it == method_entry_listeners_.end());
856 cur->MethodEntered(thread, this_object, method, dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800857 }
858}
859
860void Instrumentation::MethodExitEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800861 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800862 uint32_t dex_pc, const JValue& return_value) const {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700863 auto it = method_exit_listeners_.begin();
Jeff Hao65d15d92013-07-16 16:39:33 -0700864 bool is_end = (it == method_exit_listeners_.end());
865 // Implemented this way to prevent problems caused by modification of the list while iterating.
866 while (!is_end) {
867 InstrumentationListener* cur = *it;
868 ++it;
869 is_end = (it == method_exit_listeners_.end());
870 cur->MethodExited(thread, this_object, method, dex_pc, return_value);
Ian Rogers62d6c772013-02-27 08:32:07 -0800871 }
872}
873
874void Instrumentation::MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800875 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800876 uint32_t dex_pc) const {
877 if (have_method_unwind_listeners_) {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700878 for (InstrumentationListener* listener : method_unwind_listeners_) {
Sebastien Hertz51db44a2013-11-19 10:00:29 +0100879 listener->MethodUnwind(thread, this_object, method, dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800880 }
881 }
882}
883
884void Instrumentation::DexPcMovedEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800885 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800886 uint32_t dex_pc) const {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200887 if (HasDexPcListeners()) {
888 std::shared_ptr<std::list<InstrumentationListener*>> original(dex_pc_listeners_);
889 for (InstrumentationListener* listener : *original.get()) {
890 listener->DexPcMoved(thread, this_object, method, dex_pc);
891 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800892 }
893}
894
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200895void Instrumentation::FieldReadEventImpl(Thread* thread, mirror::Object* this_object,
896 mirror::ArtMethod* method, uint32_t dex_pc,
897 mirror::ArtField* field) const {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200898 if (HasFieldReadListeners()) {
899 std::shared_ptr<std::list<InstrumentationListener*>> original(field_read_listeners_);
900 for (InstrumentationListener* listener : *original.get()) {
901 listener->FieldRead(thread, this_object, method, dex_pc, field);
902 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200903 }
904}
905
906void Instrumentation::FieldWriteEventImpl(Thread* thread, mirror::Object* this_object,
907 mirror::ArtMethod* method, uint32_t dex_pc,
908 mirror::ArtField* field, const JValue& field_value) const {
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200909 if (HasFieldWriteListeners()) {
910 std::shared_ptr<std::list<InstrumentationListener*>> original(field_write_listeners_);
911 for (InstrumentationListener* listener : *original.get()) {
912 listener->FieldWritten(thread, this_object, method, dex_pc, field, field_value);
913 }
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200914 }
915}
916
Ian Rogers62d6c772013-02-27 08:32:07 -0800917void Instrumentation::ExceptionCaughtEvent(Thread* thread, const ThrowLocation& throw_location,
Brian Carlstromea46f952013-07-30 01:26:50 -0700918 mirror::ArtMethod* catch_method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800919 uint32_t catch_dex_pc,
Sebastien Hertz947ff082013-09-17 14:10:13 +0200920 mirror::Throwable* exception_object) const {
Sebastien Hertz9f102032014-05-23 08:59:42 +0200921 if (HasExceptionCaughtListeners()) {
922 DCHECK_EQ(thread->GetException(nullptr), exception_object);
923 bool is_exception_reported = thread->IsExceptionReportedToInstrumentation();
Jeff Haoc0bd4da2013-04-11 15:52:28 -0700924 thread->ClearException();
Daniel Mihalyica1d06c2014-08-18 18:45:31 +0200925 std::shared_ptr<std::list<InstrumentationListener*>> original(exception_caught_listeners_);
926 for (InstrumentationListener* listener : *original.get()) {
927 listener->ExceptionCaught(thread, throw_location, catch_method, catch_dex_pc,
928 exception_object);
Ian Rogers62d6c772013-02-27 08:32:07 -0800929 }
Jeff Haoc0bd4da2013-04-11 15:52:28 -0700930 thread->SetException(throw_location, exception_object);
Sebastien Hertz9f102032014-05-23 08:59:42 +0200931 thread->SetExceptionReportedToInstrumentation(is_exception_reported);
Ian Rogers62d6c772013-02-27 08:32:07 -0800932 }
933}
934
935static void CheckStackDepth(Thread* self, const InstrumentationStackFrame& instrumentation_frame,
936 int delta)
937 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
938 size_t frame_id = StackVisitor::ComputeNumFrames(self) + delta;
939 if (frame_id != instrumentation_frame.frame_id_) {
940 LOG(ERROR) << "Expected frame_id=" << frame_id << " but found "
941 << instrumentation_frame.frame_id_;
942 StackVisitor::DescribeStack(self);
943 CHECK_EQ(frame_id, instrumentation_frame.frame_id_);
944 }
945}
946
947void Instrumentation::PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
Brian Carlstromea46f952013-07-30 01:26:50 -0700948 mirror::ArtMethod* method,
Jeff Hao9a916d32013-06-27 18:45:37 -0700949 uintptr_t lr, bool interpreter_entry) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800950 // We have a callee-save frame meaning this value is guaranteed to never be 0.
951 size_t frame_id = StackVisitor::ComputeNumFrames(self);
952 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
953 if (kVerboseInstrumentation) {
Brian Carlstrom2d888622013-07-18 17:02:00 -0700954 LOG(INFO) << "Entering " << PrettyMethod(method) << " from PC " << reinterpret_cast<void*>(lr);
Ian Rogers62d6c772013-02-27 08:32:07 -0800955 }
956 instrumentation::InstrumentationStackFrame instrumentation_frame(this_object, method, lr,
Jeff Hao9a916d32013-06-27 18:45:37 -0700957 frame_id, interpreter_entry);
Ian Rogers62d6c772013-02-27 08:32:07 -0800958 stack->push_front(instrumentation_frame);
959
Sebastien Hertz320deb22014-06-11 19:45:05 +0200960 if (!interpreter_entry) {
961 MethodEnterEvent(self, this_object, method, 0);
962 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800963}
964
Andreas Gamped58342c2014-06-05 14:18:08 -0700965TwoWordReturn Instrumentation::PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc,
966 uint64_t gpr_result,
967 uint64_t fpr_result) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800968 // Do the pop.
969 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
970 CHECK_GT(stack->size(), 0U);
971 InstrumentationStackFrame instrumentation_frame = stack->front();
972 stack->pop_front();
973
974 // Set return PC and check the sanity of the stack.
975 *return_pc = instrumentation_frame.return_pc_;
976 CheckStackDepth(self, instrumentation_frame, 0);
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700977 self->VerifyStack();
Ian Rogers62d6c772013-02-27 08:32:07 -0800978
Brian Carlstromea46f952013-07-30 01:26:50 -0700979 mirror::ArtMethod* method = instrumentation_frame.method_;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700980 uint32_t length;
981 char return_shorty = method->GetShorty(&length)[0];
Ian Rogers62d6c772013-02-27 08:32:07 -0800982 JValue return_value;
983 if (return_shorty == 'V') {
984 return_value.SetJ(0);
985 } else if (return_shorty == 'F' || return_shorty == 'D') {
986 return_value.SetJ(fpr_result);
987 } else {
988 return_value.SetJ(gpr_result);
989 }
990 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
991 // return_pc.
992 uint32_t dex_pc = DexFile::kDexNoIndex;
993 mirror::Object* this_object = instrumentation_frame.this_object_;
Sebastien Hertz320deb22014-06-11 19:45:05 +0200994 if (!instrumentation_frame.interpreter_entry_) {
995 MethodExitEvent(self, this_object, instrumentation_frame.method_, dex_pc, return_value);
996 }
jeffhao725a9572012-11-13 18:20:12 -0800997
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100998 // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get
999 // back to an upcall.
1000 NthCallerVisitor visitor(self, 1, true);
1001 visitor.WalkStack(true);
Sebastien Hertz270a0e12015-01-16 19:49:09 +01001002 bool deoptimize = (visitor.caller != nullptr) &&
Sebastien Hertz138dbfc2013-12-04 18:15:25 +01001003 (interpreter_stubs_installed_ || IsDeoptimized(visitor.caller));
Ian Rogers62d6c772013-02-27 08:32:07 -08001004 if (deoptimize) {
1005 if (kVerboseInstrumentation) {
Sebastien Hertz270a0e12015-01-16 19:49:09 +01001006 LOG(INFO) << StringPrintf("Deoptimizing %s by returning from %s with result %#" PRIx64 " in ",
1007 PrettyMethod(visitor.caller).c_str(),
1008 PrettyMethod(method).c_str(),
1009 return_value.GetJ()) << *self;
Ian Rogers62d6c772013-02-27 08:32:07 -08001010 }
1011 self->SetDeoptimizationReturnValue(return_value);
Andreas Gamped58342c2014-06-05 14:18:08 -07001012 return GetTwoWordSuccessValue(*return_pc,
1013 reinterpret_cast<uintptr_t>(GetQuickDeoptimizationEntryPoint()));
Ian Rogers62d6c772013-02-27 08:32:07 -08001014 } else {
1015 if (kVerboseInstrumentation) {
Brian Carlstrom2d888622013-07-18 17:02:00 -07001016 LOG(INFO) << "Returning from " << PrettyMethod(method)
1017 << " to PC " << reinterpret_cast<void*>(*return_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -08001018 }
Andreas Gamped58342c2014-06-05 14:18:08 -07001019 return GetTwoWordSuccessValue(0, *return_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -08001020 }
jeffhao725a9572012-11-13 18:20:12 -08001021}
1022
Ian Rogers62d6c772013-02-27 08:32:07 -08001023void Instrumentation::PopMethodForUnwind(Thread* self, bool is_deoptimization) const {
1024 // Do the pop.
1025 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
1026 CHECK_GT(stack->size(), 0U);
1027 InstrumentationStackFrame instrumentation_frame = stack->front();
1028 // TODO: bring back CheckStackDepth(self, instrumentation_frame, 2);
1029 stack->pop_front();
1030
Brian Carlstromea46f952013-07-30 01:26:50 -07001031 mirror::ArtMethod* method = instrumentation_frame.method_;
Ian Rogers62d6c772013-02-27 08:32:07 -08001032 if (is_deoptimization) {
1033 if (kVerboseInstrumentation) {
1034 LOG(INFO) << "Popping for deoptimization " << PrettyMethod(method);
1035 }
1036 } else {
1037 if (kVerboseInstrumentation) {
1038 LOG(INFO) << "Popping for unwind " << PrettyMethod(method);
1039 }
1040
1041 // Notify listeners of method unwind.
1042 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
1043 // return_pc.
1044 uint32_t dex_pc = DexFile::kDexNoIndex;
1045 MethodUnwindEvent(self, instrumentation_frame.this_object_, method, dex_pc);
1046 }
1047}
1048
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001049void Instrumentation::VisitRoots(RootCallback* callback, void* arg) {
1050 WriterMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001051 if (IsDeoptimizedMethodsEmpty()) {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001052 return;
1053 }
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -07001054 for (auto pair : deoptimized_methods_) {
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -08001055 pair.second.VisitRoot(callback, arg, RootInfo(kRootVMInternal));
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001056 }
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -07001057}
1058
Ian Rogers62d6c772013-02-27 08:32:07 -08001059std::string InstrumentationStackFrame::Dump() const {
1060 std::ostringstream os;
1061 os << "Frame " << frame_id_ << " " << PrettyMethod(method_) << ":"
1062 << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_);
1063 return os.str();
1064}
1065
1066} // namespace instrumentation
jeffhao725a9572012-11-13 18:20:12 -08001067} // namespace art