blob: 525e2b30dfcca1e087589b62d8aa88fb563424ee [file] [log] [blame]
jeffhao725a9572012-11-13 18:20:12 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "instrumentation.h"
18
19#include <sys/uio.h>
20
Ian Rogersef7d42f2014-01-06 12:55:46 -080021#include "atomic.h"
Elliott Hughes76160052012-12-12 16:31:20 -080022#include "base/unix_file/fd_file.h"
jeffhao725a9572012-11-13 18:20:12 -080023#include "class_linker.h"
24#include "debugger.h"
Ian Rogers62d6c772013-02-27 08:32:07 -080025#include "dex_file-inl.h"
Mathieu Chartierd8891782014-03-02 13:28:37 -080026#include "entrypoints/quick/quick_alloc_entrypoints.h"
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010027#include "interpreter/interpreter.h"
Brian Carlstromea46f952013-07-30 01:26:50 -070028#include "mirror/art_method-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080029#include "mirror/class-inl.h"
30#include "mirror/dex_cache.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080031#include "mirror/object_array-inl.h"
Ian Rogers4f6ad8a2013-03-18 15:27:28 -070032#include "mirror/object-inl.h"
Ian Rogers62d6c772013-02-27 08:32:07 -080033#include "nth_caller_visitor.h"
Ian Rogersc928de92013-02-27 14:30:44 -080034#if !defined(ART_USE_PORTABLE_COMPILER)
Ian Rogers166db042013-07-26 12:05:57 -070035#include "entrypoints/quick/quick_entrypoints.h"
jeffhao725a9572012-11-13 18:20:12 -080036#endif
37#include "object_utils.h"
38#include "os.h"
39#include "scoped_thread_state_change.h"
40#include "thread.h"
41#include "thread_list.h"
jeffhao725a9572012-11-13 18:20:12 -080042
43namespace art {
Ian Rogersfa824272013-11-05 16:12:57 -080044
Ian Rogers62d6c772013-02-27 08:32:07 -080045namespace instrumentation {
jeffhao725a9572012-11-13 18:20:12 -080046
Sebastien Hertz5bfd5c92013-11-15 11:36:07 +010047const bool kVerboseInstrumentation = false;
48
Ian Rogers816432e2013-09-06 15:47:45 -070049// Do we want to deoptimize for method entry and exit listeners or just try to intercept
50// invocations? Deoptimization forces all code to run in the interpreter and considerably hurts the
51// application's performance.
Ian Rogers7b6da362013-09-11 09:29:40 -070052static constexpr bool kDeoptimizeForAccurateMethodEntryExitListeners = false;
Ian Rogers816432e2013-09-06 15:47:45 -070053
Ian Rogers62d6c772013-02-27 08:32:07 -080054static bool InstallStubsClassVisitor(mirror::Class* klass, void* arg)
jeffhao725a9572012-11-13 18:20:12 -080055 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -080056 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
57 return instrumentation->InstallStubsForClass(klass);
58}
59
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -070060Instrumentation::Instrumentation()
61 : instrumentation_stubs_installed_(false), entry_exit_stubs_installed_(false),
62 interpreter_stubs_installed_(false),
63 interpret_only_(false), forced_interpret_only_(false),
64 have_method_entry_listeners_(false), have_method_exit_listeners_(false),
65 have_method_unwind_listeners_(false), have_dex_pc_listeners_(false),
66 have_exception_caught_listeners_(false),
67 deoptimized_methods_lock_("deoptimized methods lock"),
68 deoptimization_enabled_(false),
69 interpreter_handler_table_(kMainHandlerTable),
70 quick_alloc_entry_points_instrumentation_counter_(0) {
71}
72
Ian Rogers62d6c772013-02-27 08:32:07 -080073bool Instrumentation::InstallStubsForClass(mirror::Class* klass) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010074 for (size_t i = 0, e = klass->NumDirectMethods(); i < e; i++) {
75 InstallStubsForMethod(klass->GetDirectMethod(i));
jeffhao725a9572012-11-13 18:20:12 -080076 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010077 for (size_t i = 0, e = klass->NumVirtualMethods(); i < e; i++) {
78 InstallStubsForMethod(klass->GetVirtualMethod(i));
jeffhao725a9572012-11-13 18:20:12 -080079 }
80 return true;
81}
82
Ian Rogersef7d42f2014-01-06 12:55:46 -080083static void UpdateEntrypoints(mirror::ArtMethod* method, const void* quick_code,
84 const void* portable_code, bool have_portable_code)
85 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
86 method->SetEntryPointFromPortableCompiledCode(portable_code);
87 method->SetEntryPointFromQuickCompiledCode(quick_code);
88 bool portable_enabled = method->IsPortableCompiled();
89 if (have_portable_code && !portable_enabled) {
90 method->SetIsPortableCompiled();
91 } else if (portable_enabled) {
92 method->ClearIsPortableCompiled();
93 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010094 if (!method->IsResolutionMethod()) {
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -080095 if (quick_code == GetQuickToInterpreterBridge() ||
96 (quick_code == GetQuickResolutionTrampoline(Runtime::Current()->GetClassLinker()) &&
97 Runtime::Current()->GetInstrumentation()->IsForcedInterpretOnly()
98 && !method->IsNative() && !method->IsProxyMethod())) {
99 if (kIsDebugBuild) {
100 if (quick_code == GetQuickToInterpreterBridge()) {
101 DCHECK(portable_code == GetPortableToInterpreterBridge());
102 } else if (quick_code == GetQuickResolutionTrampoline(Runtime::Current()->GetClassLinker())) {
103 DCHECK(portable_code == GetPortableResolutionTrampoline(Runtime::Current()->GetClassLinker()));
104 }
105 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800106 DCHECK(!method->IsNative()) << PrettyMethod(method);
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -0800107 DCHECK(!method->IsProxyMethod()) << PrettyMethod(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100108 method->SetEntryPointFromInterpreter(art::interpreter::artInterpreterToInterpreterBridge);
109 } else {
110 method->SetEntryPointFromInterpreter(art::artInterpreterToCompiledCodeBridge);
111 }
112 }
113}
114
115void Instrumentation::InstallStubsForMethod(mirror::ArtMethod* method) {
116 if (method->IsAbstract() || method->IsProxyMethod()) {
117 // Do not change stubs for these methods.
118 return;
119 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800120 const void* new_portable_code;
121 const void* new_quick_code;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100122 bool uninstall = !entry_exit_stubs_installed_ && !interpreter_stubs_installed_;
123 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
124 bool is_class_initialized = method->GetDeclaringClass()->IsInitialized();
Ian Rogersef7d42f2014-01-06 12:55:46 -0800125 bool have_portable_code = false;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100126 if (uninstall) {
127 if ((forced_interpret_only_ || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800128 new_portable_code = GetPortableToInterpreterBridge();
129 new_quick_code = GetQuickToInterpreterBridge();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100130 } else if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800131 new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
132 new_quick_code = class_linker->GetQuickOatCodeFor(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100133 } else {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800134 new_portable_code = GetPortableResolutionTrampoline(class_linker);
135 new_quick_code = GetQuickResolutionTrampoline(class_linker);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100136 }
137 } else { // !uninstall
138 if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800139 new_portable_code = GetPortableToInterpreterBridge();
140 new_quick_code = GetQuickToInterpreterBridge();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100141 } else {
142 // Do not overwrite resolution trampoline. When the trampoline initializes the method's
143 // class, all its static methods code will be set to the instrumentation entry point.
144 // For more details, see ClassLinker::FixupStaticTrampolines.
145 if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
146 // Do not overwrite interpreter to prevent from posting method entry/exit events twice.
Ian Rogersef7d42f2014-01-06 12:55:46 -0800147 new_portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
148 new_quick_code = class_linker->GetQuickOatCodeFor(method);
149 if (entry_exit_stubs_installed_ && new_quick_code != GetQuickToInterpreterBridge()) {
150 DCHECK(new_portable_code != GetPortableToInterpreterBridge());
151 new_portable_code = GetPortableToInterpreterBridge();
152 new_quick_code = GetQuickInstrumentationEntryPoint();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100153 }
154 } else {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800155 new_portable_code = GetPortableResolutionTrampoline(class_linker);
156 new_quick_code = GetQuickResolutionTrampoline(class_linker);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100157 }
158 }
159 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800160 UpdateEntrypoints(method, new_quick_code, new_portable_code, have_portable_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100161}
162
Ian Rogers62d6c772013-02-27 08:32:07 -0800163// Places the instrumentation exit pc as the return PC for every quick frame. This also allows
164// deoptimization of quick frames to interpreter frames.
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100165// Since we may already have done this previously, we need to push new instrumentation frame before
166// existing instrumentation frames.
Ian Rogers62d6c772013-02-27 08:32:07 -0800167static void InstrumentationInstallStack(Thread* thread, void* arg)
Ian Rogers306057f2012-11-26 12:45:53 -0800168 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
169 struct InstallStackVisitor : public StackVisitor {
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100170 InstallStackVisitor(Thread* thread, Context* context, uintptr_t instrumentation_exit_pc)
Ian Rogers62d6c772013-02-27 08:32:07 -0800171 : StackVisitor(thread, context), instrumentation_stack_(thread->GetInstrumentationStack()),
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100172 existing_instrumentation_frames_count_(instrumentation_stack_->size()),
173 instrumentation_exit_pc_(instrumentation_exit_pc),
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100174 reached_existing_instrumentation_frames_(false), instrumentation_stack_depth_(0),
175 last_return_pc_(0) {
176 }
jeffhao725a9572012-11-13 18:20:12 -0800177
Ian Rogers306057f2012-11-26 12:45:53 -0800178 virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Brian Carlstromea46f952013-07-30 01:26:50 -0700179 mirror::ArtMethod* m = GetMethod();
Ian Rogers306057f2012-11-26 12:45:53 -0800180 if (GetCurrentQuickFrame() == NULL) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800181 if (kVerboseInstrumentation) {
182 LOG(INFO) << " Ignoring a shadow frame. Frame " << GetFrameId()
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100183 << " Method=" << PrettyMethod(m);
Ian Rogers62d6c772013-02-27 08:32:07 -0800184 }
Ian Rogers306057f2012-11-26 12:45:53 -0800185 return true; // Ignore shadow frames.
186 }
Ian Rogers306057f2012-11-26 12:45:53 -0800187 if (m == NULL) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800188 if (kVerboseInstrumentation) {
189 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
190 }
191 last_return_pc_ = 0;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700192 return true; // Ignore upcalls.
Ian Rogers306057f2012-11-26 12:45:53 -0800193 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800194 if (m->IsRuntimeMethod()) {
195 if (kVerboseInstrumentation) {
196 LOG(INFO) << " Skipping runtime method. Frame " << GetFrameId();
197 }
198 last_return_pc_ = GetReturnPc();
Ian Rogers306057f2012-11-26 12:45:53 -0800199 return true; // Ignore unresolved methods since they will be instrumented after resolution.
200 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800201 if (kVerboseInstrumentation) {
202 LOG(INFO) << " Installing exit stub in " << DescribeLocation();
203 }
204 uintptr_t return_pc = GetReturnPc();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100205 if (return_pc == instrumentation_exit_pc_) {
206 // We've reached a frame which has already been installed with instrumentation exit stub.
207 // We should have already installed instrumentation on previous frames.
208 reached_existing_instrumentation_frames_ = true;
209
210 CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
211 const InstrumentationStackFrame& frame = instrumentation_stack_->at(instrumentation_stack_depth_);
212 CHECK_EQ(m, frame.method_) << "Expected " << PrettyMethod(m)
213 << ", Found " << PrettyMethod(frame.method_);
214 return_pc = frame.return_pc_;
215 if (kVerboseInstrumentation) {
216 LOG(INFO) << "Ignoring already instrumented " << frame.Dump();
217 }
218 } else {
219 CHECK_NE(return_pc, 0U);
220 CHECK(!reached_existing_instrumentation_frames_);
221 InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, return_pc, GetFrameId(),
222 false);
223 if (kVerboseInstrumentation) {
224 LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
225 }
226
227 // Insert frame before old ones so we do not corrupt the instrumentation stack.
228 auto it = instrumentation_stack_->end() - existing_instrumentation_frames_count_;
229 instrumentation_stack_->insert(it, instrumentation_frame);
230 SetReturnPc(instrumentation_exit_pc_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800231 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800232 dex_pcs_.push_back(m->ToDexPc(last_return_pc_));
Ian Rogers62d6c772013-02-27 08:32:07 -0800233 last_return_pc_ = return_pc;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100234 ++instrumentation_stack_depth_;
Ian Rogers306057f2012-11-26 12:45:53 -0800235 return true; // Continue.
236 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800237 std::deque<InstrumentationStackFrame>* const instrumentation_stack_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100238 const size_t existing_instrumentation_frames_count_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800239 std::vector<uint32_t> dex_pcs_;
Ian Rogers306057f2012-11-26 12:45:53 -0800240 const uintptr_t instrumentation_exit_pc_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100241 bool reached_existing_instrumentation_frames_;
242 size_t instrumentation_stack_depth_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800243 uintptr_t last_return_pc_;
Ian Rogers306057f2012-11-26 12:45:53 -0800244 };
Ian Rogers62d6c772013-02-27 08:32:07 -0800245 if (kVerboseInstrumentation) {
246 std::string thread_name;
247 thread->GetThreadName(thread_name);
248 LOG(INFO) << "Installing exit stubs in " << thread_name;
Ian Rogers306057f2012-11-26 12:45:53 -0800249 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100250
251 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
Ian Rogers62d6c772013-02-27 08:32:07 -0800252 UniquePtr<Context> context(Context::Create());
Ian Rogers848871b2013-08-05 10:56:33 -0700253 uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100254 InstallStackVisitor visitor(thread, context.get(), instrumentation_exit_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800255 visitor.WalkStack(true);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100256 CHECK_EQ(visitor.dex_pcs_.size(), thread->GetInstrumentationStack()->size());
Ian Rogers62d6c772013-02-27 08:32:07 -0800257
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100258 if (!instrumentation->ShouldNotifyMethodEnterExitEvents()) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100259 // Create method enter events for all methods currently on the thread's stack. We only do this
260 // if no debugger is attached to prevent from posting events twice.
261 typedef std::deque<InstrumentationStackFrame>::const_reverse_iterator It;
262 for (It it = thread->GetInstrumentationStack()->rbegin(),
263 end = thread->GetInstrumentationStack()->rend(); it != end; ++it) {
264 mirror::Object* this_object = (*it).this_object_;
265 mirror::ArtMethod* method = (*it).method_;
266 uint32_t dex_pc = visitor.dex_pcs_.back();
267 visitor.dex_pcs_.pop_back();
268 instrumentation->MethodEnterEvent(thread, this_object, method, dex_pc);
269 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800270 }
271 thread->VerifyStack();
Ian Rogers306057f2012-11-26 12:45:53 -0800272}
273
Ian Rogers62d6c772013-02-27 08:32:07 -0800274// Removes the instrumentation exit pc as the return PC for every quick frame.
275static void InstrumentationRestoreStack(Thread* thread, void* arg)
Ian Rogers306057f2012-11-26 12:45:53 -0800276 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
277 struct RestoreStackVisitor : public StackVisitor {
Ian Rogers62d6c772013-02-27 08:32:07 -0800278 RestoreStackVisitor(Thread* thread, uintptr_t instrumentation_exit_pc,
279 Instrumentation* instrumentation)
280 : StackVisitor(thread, NULL), thread_(thread),
281 instrumentation_exit_pc_(instrumentation_exit_pc),
282 instrumentation_(instrumentation),
283 instrumentation_stack_(thread->GetInstrumentationStack()),
284 frames_removed_(0) {}
Ian Rogers306057f2012-11-26 12:45:53 -0800285
286 virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800287 if (instrumentation_stack_->size() == 0) {
jeffhao725a9572012-11-13 18:20:12 -0800288 return false; // Stop.
289 }
Brian Carlstromea46f952013-07-30 01:26:50 -0700290 mirror::ArtMethod* m = GetMethod();
Ian Rogers62d6c772013-02-27 08:32:07 -0800291 if (GetCurrentQuickFrame() == NULL) {
292 if (kVerboseInstrumentation) {
293 LOG(INFO) << " Ignoring a shadow frame. Frame " << GetFrameId() << " Method=" << PrettyMethod(m);
294 }
295 return true; // Ignore shadow frames.
296 }
Ian Rogers306057f2012-11-26 12:45:53 -0800297 if (m == NULL) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800298 if (kVerboseInstrumentation) {
299 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
300 }
Ian Rogers306057f2012-11-26 12:45:53 -0800301 return true; // Ignore upcalls.
302 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800303 bool removed_stub = false;
304 // TODO: make this search more efficient?
Mathieu Chartier02e25112013-08-14 16:14:24 -0700305 for (InstrumentationStackFrame instrumentation_frame : *instrumentation_stack_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800306 if (instrumentation_frame.frame_id_ == GetFrameId()) {
307 if (kVerboseInstrumentation) {
308 LOG(INFO) << " Removing exit stub in " << DescribeLocation();
309 }
Jeff Hao9a916d32013-06-27 18:45:37 -0700310 if (instrumentation_frame.interpreter_entry_) {
311 CHECK(m == Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs));
312 } else {
313 CHECK(m == instrumentation_frame.method_) << PrettyMethod(m);
314 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800315 SetReturnPc(instrumentation_frame.return_pc_);
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100316 if (!instrumentation_->ShouldNotifyMethodEnterExitEvents()) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100317 // Create the method exit events. As the methods didn't really exit the result is 0.
318 // We only do this if no debugger is attached to prevent from posting events twice.
319 instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m,
320 GetDexPc(), JValue());
321 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800322 frames_removed_++;
323 removed_stub = true;
324 break;
325 }
326 }
327 if (!removed_stub) {
328 if (kVerboseInstrumentation) {
329 LOG(INFO) << " No exit stub in " << DescribeLocation();
Ian Rogers306057f2012-11-26 12:45:53 -0800330 }
jeffhao725a9572012-11-13 18:20:12 -0800331 }
332 return true; // Continue.
333 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800334 Thread* const thread_;
Ian Rogers306057f2012-11-26 12:45:53 -0800335 const uintptr_t instrumentation_exit_pc_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800336 Instrumentation* const instrumentation_;
337 std::deque<instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
338 size_t frames_removed_;
jeffhao725a9572012-11-13 18:20:12 -0800339 };
Ian Rogers62d6c772013-02-27 08:32:07 -0800340 if (kVerboseInstrumentation) {
341 std::string thread_name;
342 thread->GetThreadName(thread_name);
343 LOG(INFO) << "Removing exit stubs in " << thread_name;
344 }
345 std::deque<instrumentation::InstrumentationStackFrame>* stack = thread->GetInstrumentationStack();
346 if (stack->size() > 0) {
347 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
Ian Rogers848871b2013-08-05 10:56:33 -0700348 uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
Ian Rogers62d6c772013-02-27 08:32:07 -0800349 RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
350 visitor.WalkStack(true);
351 CHECK_EQ(visitor.frames_removed_, stack->size());
352 while (stack->size() > 0) {
353 stack->pop_front();
354 }
jeffhao725a9572012-11-13 18:20:12 -0800355 }
356}
357
Ian Rogers62d6c772013-02-27 08:32:07 -0800358void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
359 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
Ian Rogers62d6c772013-02-27 08:32:07 -0800360 if ((events & kMethodEntered) != 0) {
361 method_entry_listeners_.push_back(listener);
Ian Rogers62d6c772013-02-27 08:32:07 -0800362 have_method_entry_listeners_ = true;
363 }
364 if ((events & kMethodExited) != 0) {
365 method_exit_listeners_.push_back(listener);
Ian Rogers62d6c772013-02-27 08:32:07 -0800366 have_method_exit_listeners_ = true;
367 }
368 if ((events & kMethodUnwind) != 0) {
369 method_unwind_listeners_.push_back(listener);
370 have_method_unwind_listeners_ = true;
371 }
372 if ((events & kDexPcMoved) != 0) {
373 dex_pc_listeners_.push_back(listener);
Ian Rogers62d6c772013-02-27 08:32:07 -0800374 have_dex_pc_listeners_ = true;
375 }
Jeff Hao14dd5a82013-04-11 10:23:36 -0700376 if ((events & kExceptionCaught) != 0) {
377 exception_caught_listeners_.push_back(listener);
378 have_exception_caught_listeners_ = true;
379 }
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200380 UpdateInterpreterHandlerTable();
jeffhao725a9572012-11-13 18:20:12 -0800381}
382
Ian Rogers62d6c772013-02-27 08:32:07 -0800383void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
384 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
Ian Rogers62d6c772013-02-27 08:32:07 -0800385
386 if ((events & kMethodEntered) != 0) {
387 bool contains = std::find(method_entry_listeners_.begin(), method_entry_listeners_.end(),
388 listener) != method_entry_listeners_.end();
389 if (contains) {
390 method_entry_listeners_.remove(listener);
391 }
392 have_method_entry_listeners_ = method_entry_listeners_.size() > 0;
Ian Rogers62d6c772013-02-27 08:32:07 -0800393 }
394 if ((events & kMethodExited) != 0) {
395 bool contains = std::find(method_exit_listeners_.begin(), method_exit_listeners_.end(),
396 listener) != method_exit_listeners_.end();
397 if (contains) {
398 method_exit_listeners_.remove(listener);
399 }
400 have_method_exit_listeners_ = method_exit_listeners_.size() > 0;
Ian Rogers62d6c772013-02-27 08:32:07 -0800401 }
402 if ((events & kMethodUnwind) != 0) {
403 method_unwind_listeners_.remove(listener);
404 }
405 if ((events & kDexPcMoved) != 0) {
406 bool contains = std::find(dex_pc_listeners_.begin(), dex_pc_listeners_.end(),
407 listener) != dex_pc_listeners_.end();
408 if (contains) {
409 dex_pc_listeners_.remove(listener);
410 }
411 have_dex_pc_listeners_ = dex_pc_listeners_.size() > 0;
Ian Rogers62d6c772013-02-27 08:32:07 -0800412 }
Jeff Hao14dd5a82013-04-11 10:23:36 -0700413 if ((events & kExceptionCaught) != 0) {
414 exception_caught_listeners_.remove(listener);
415 have_exception_caught_listeners_ = exception_caught_listeners_.size() > 0;
416 }
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200417 UpdateInterpreterHandlerTable();
jeffhao725a9572012-11-13 18:20:12 -0800418}
419
Ian Rogers62d6c772013-02-27 08:32:07 -0800420void Instrumentation::ConfigureStubs(bool require_entry_exit_stubs, bool require_interpreter) {
421 interpret_only_ = require_interpreter || forced_interpret_only_;
422 // Compute what level of instrumentation is required and compare to current.
423 int desired_level, current_level;
424 if (require_interpreter) {
425 desired_level = 2;
426 } else if (require_entry_exit_stubs) {
427 desired_level = 1;
428 } else {
429 desired_level = 0;
430 }
431 if (interpreter_stubs_installed_) {
432 current_level = 2;
433 } else if (entry_exit_stubs_installed_) {
434 current_level = 1;
435 } else {
436 current_level = 0;
437 }
438 if (desired_level == current_level) {
439 // We're already set.
440 return;
441 }
442 Thread* self = Thread::Current();
443 Runtime* runtime = Runtime::Current();
444 Locks::thread_list_lock_->AssertNotHeld(self);
445 if (desired_level > 0) {
446 if (require_interpreter) {
447 interpreter_stubs_installed_ = true;
448 } else {
449 CHECK(require_entry_exit_stubs);
450 entry_exit_stubs_installed_ = true;
451 }
452 runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
453 instrumentation_stubs_installed_ = true;
454 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
455 runtime->GetThreadList()->ForEach(InstrumentationInstallStack, this);
456 } else {
457 interpreter_stubs_installed_ = false;
458 entry_exit_stubs_installed_ = false;
459 runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100460 // Restore stack only if there is no method currently deoptimized.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700461 bool empty;
462 {
463 ReaderMutexLock mu(self, deoptimized_methods_lock_);
464 empty = deoptimized_methods_.empty(); // Avoid lock violation.
465 }
466 if (empty) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100467 instrumentation_stubs_installed_ = false;
468 MutexLock mu(self, *Locks::thread_list_lock_);
469 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
470 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800471 }
jeffhao725a9572012-11-13 18:20:12 -0800472}
473
Ian Rogersfa824272013-11-05 16:12:57 -0800474static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg) {
475 thread->ResetQuickAllocEntryPointsForThread();
476}
477
Mathieu Chartier661974a2014-01-09 11:23:53 -0800478void Instrumentation::SetEntrypointsInstrumented(bool instrumented) {
479 Runtime* runtime = Runtime::Current();
480 ThreadList* tl = runtime->GetThreadList();
481 if (runtime->IsStarted()) {
482 tl->SuspendAll();
483 }
484 {
485 MutexLock mu(Thread::Current(), *Locks::runtime_shutdown_lock_);
486 SetQuickAllocEntryPointsInstrumented(instrumented);
487 ResetQuickAllocEntryPoints();
488 }
489 if (runtime->IsStarted()) {
490 tl->ResumeAll();
491 }
492}
493
Ian Rogersfa824272013-11-05 16:12:57 -0800494void Instrumentation::InstrumentQuickAllocEntryPoints() {
495 // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
496 // should be guarded by a lock.
Ian Rogersb122a4b2013-11-19 18:00:50 -0800497 DCHECK_GE(quick_alloc_entry_points_instrumentation_counter_.Load(), 0);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800498 const bool enable_instrumentation =
Ian Rogersb122a4b2013-11-19 18:00:50 -0800499 quick_alloc_entry_points_instrumentation_counter_.FetchAndAdd(1) == 0;
Ian Rogersfa824272013-11-05 16:12:57 -0800500 if (enable_instrumentation) {
Mathieu Chartier661974a2014-01-09 11:23:53 -0800501 SetEntrypointsInstrumented(true);
Ian Rogersfa824272013-11-05 16:12:57 -0800502 }
503}
504
505void Instrumentation::UninstrumentQuickAllocEntryPoints() {
506 // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
507 // should be guarded by a lock.
Ian Rogersb122a4b2013-11-19 18:00:50 -0800508 DCHECK_GT(quick_alloc_entry_points_instrumentation_counter_.Load(), 0);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800509 const bool disable_instrumentation =
Ian Rogersb122a4b2013-11-19 18:00:50 -0800510 quick_alloc_entry_points_instrumentation_counter_.FetchAndSub(1) == 1;
Ian Rogersfa824272013-11-05 16:12:57 -0800511 if (disable_instrumentation) {
Mathieu Chartier661974a2014-01-09 11:23:53 -0800512 SetEntrypointsInstrumented(false);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800513 }
514}
515
516void Instrumentation::ResetQuickAllocEntryPoints() {
517 Runtime* runtime = Runtime::Current();
518 if (runtime->IsStarted()) {
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800519 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
520 runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, NULL);
Ian Rogersfa824272013-11-05 16:12:57 -0800521 }
522}
523
Ian Rogersef7d42f2014-01-06 12:55:46 -0800524void Instrumentation::UpdateMethodsCode(mirror::ArtMethod* method, const void* quick_code,
525 const void* portable_code, bool have_portable_code) const {
526 const void* new_portable_code;
527 const void* new_quick_code;
528 bool new_have_portable_code;
Ian Rogers62d6c772013-02-27 08:32:07 -0800529 if (LIKELY(!instrumentation_stubs_installed_)) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800530 new_portable_code = portable_code;
531 new_quick_code = quick_code;
532 new_have_portable_code = have_portable_code;
Jeff Hao65d15d92013-07-16 16:39:33 -0700533 } else {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100534 if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800535 new_portable_code = GetPortableToInterpreterBridge();
536 new_quick_code = GetQuickToInterpreterBridge();
537 new_have_portable_code = false;
538 } else if (quick_code == GetQuickResolutionTrampoline(Runtime::Current()->GetClassLinker()) ||
539 quick_code == GetQuickToInterpreterBridge()) {
540 DCHECK((portable_code == GetPortableResolutionTrampoline(Runtime::Current()->GetClassLinker())) ||
541 (portable_code == GetPortableToInterpreterBridge()));
542 new_portable_code = portable_code;
543 new_quick_code = quick_code;
544 new_have_portable_code = have_portable_code;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100545 } else if (entry_exit_stubs_installed_) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800546 new_quick_code = GetQuickInstrumentationEntryPoint();
547 new_portable_code = GetPortableToInterpreterBridge();
548 new_have_portable_code = false;
Jeff Hao65d15d92013-07-16 16:39:33 -0700549 } else {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800550 new_portable_code = portable_code;
551 new_quick_code = quick_code;
552 new_have_portable_code = have_portable_code;
Jeff Hao65d15d92013-07-16 16:39:33 -0700553 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800554 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800555 UpdateEntrypoints(method, new_quick_code, new_portable_code, new_have_portable_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100556}
557
558void Instrumentation::Deoptimize(mirror::ArtMethod* method) {
559 CHECK(!method->IsNative());
560 CHECK(!method->IsProxyMethod());
561 CHECK(!method->IsAbstract());
562
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700563 Thread* self = Thread::Current();
564 std::pair<std::set<mirror::ArtMethod*>::iterator, bool> pair;
565 {
566 WriterMutexLock mu(self, deoptimized_methods_lock_);
567 pair = deoptimized_methods_.insert(method);
568 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100569 bool already_deoptimized = !pair.second;
570 CHECK(!already_deoptimized) << "Method " << PrettyMethod(method) << " is already deoptimized";
571
572 if (!interpreter_stubs_installed_) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800573 UpdateEntrypoints(method, GetQuickToInterpreterBridge(), GetPortableToInterpreterBridge(),
574 false);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100575
576 // Install instrumentation exit stub and instrumentation frames. We may already have installed
577 // these previously so it will only cover the newly created frames.
578 instrumentation_stubs_installed_ = true;
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700579 MutexLock mu(self, *Locks::thread_list_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100580 Runtime::Current()->GetThreadList()->ForEach(InstrumentationInstallStack, this);
581 }
582}
583
584void Instrumentation::Undeoptimize(mirror::ArtMethod* method) {
585 CHECK(!method->IsNative());
586 CHECK(!method->IsProxyMethod());
587 CHECK(!method->IsAbstract());
588
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700589 Thread* self = Thread::Current();
590 bool empty;
591 {
592 WriterMutexLock mu(self, deoptimized_methods_lock_);
593 auto it = deoptimized_methods_.find(method);
594 CHECK(it != deoptimized_methods_.end()) << "Method " << PrettyMethod(method)
595 << " is not deoptimized";
596 deoptimized_methods_.erase(it);
597 empty = deoptimized_methods_.empty();
598 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100599
600 // Restore code and possibly stack only if we did not deoptimize everything.
601 if (!interpreter_stubs_installed_) {
602 // Restore its code or resolution trampoline.
603 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Ian Rogersef7d42f2014-01-06 12:55:46 -0800604 if (method->IsStatic() && !method->IsConstructor() &&
605 !method->GetDeclaringClass()->IsInitialized()) {
606 UpdateEntrypoints(method, GetQuickResolutionTrampoline(class_linker),
607 GetPortableResolutionTrampoline(class_linker), false);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100608 } else {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800609 bool have_portable_code = false;
610 const void* quick_code = class_linker->GetQuickOatCodeFor(method);
611 const void* portable_code = class_linker->GetPortableOatCodeFor(method, &have_portable_code);
612 UpdateEntrypoints(method, quick_code, portable_code, have_portable_code);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100613 }
614
615 // If there is no deoptimized method left, we can restore the stack of each thread.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700616 if (empty) {
617 MutexLock mu(self, *Locks::thread_list_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100618 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
619 instrumentation_stubs_installed_ = false;
620 }
621 }
622}
623
624bool Instrumentation::IsDeoptimized(mirror::ArtMethod* method) const {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700625 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100626 DCHECK(method != nullptr);
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700627 return deoptimized_methods_.find(method) != deoptimized_methods_.end();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100628}
629
630void Instrumentation::EnableDeoptimization() {
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700631 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100632 CHECK(deoptimized_methods_.empty());
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100633 CHECK_EQ(deoptimization_enabled_, false);
634 deoptimization_enabled_ = true;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100635}
636
637void Instrumentation::DisableDeoptimization() {
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100638 CHECK_EQ(deoptimization_enabled_, true);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100639 // If we deoptimized everything, undo it.
640 if (interpreter_stubs_installed_) {
641 UndeoptimizeEverything();
642 }
643 // Undeoptimized selected methods.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700644 while (true) {
645 mirror::ArtMethod* method;
646 {
647 ReaderMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
648 if (deoptimized_methods_.empty()) {
649 break;
650 }
651 method = *deoptimized_methods_.begin();
652 }
653 Undeoptimize(method);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100654 }
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100655 deoptimization_enabled_ = false;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100656}
657
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100658// Indicates if instrumentation should notify method enter/exit events to the listeners.
659bool Instrumentation::ShouldNotifyMethodEnterExitEvents() const {
660 return deoptimization_enabled_ || interpreter_stubs_installed_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100661}
662
663void Instrumentation::DeoptimizeEverything() {
664 CHECK(!interpreter_stubs_installed_);
665 ConfigureStubs(false, true);
666}
667
668void Instrumentation::UndeoptimizeEverything() {
669 CHECK(interpreter_stubs_installed_);
670 ConfigureStubs(false, false);
671}
672
673void Instrumentation::EnableMethodTracing() {
674 bool require_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners;
675 ConfigureStubs(!require_interpreter, require_interpreter);
676}
677
678void Instrumentation::DisableMethodTracing() {
679 ConfigureStubs(false, false);
jeffhao725a9572012-11-13 18:20:12 -0800680}
681
Ian Rogersef7d42f2014-01-06 12:55:46 -0800682const void* Instrumentation::GetQuickCodeFor(mirror::ArtMethod* method) const {
Ian Rogers62d6c772013-02-27 08:32:07 -0800683 Runtime* runtime = Runtime::Current();
684 if (LIKELY(!instrumentation_stubs_installed_)) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800685 const void* code = method->GetEntryPointFromQuickCompiledCode();
Ian Rogers62d6c772013-02-27 08:32:07 -0800686 DCHECK(code != NULL);
Ian Rogers848871b2013-08-05 10:56:33 -0700687 if (LIKELY(code != GetQuickResolutionTrampoline(runtime->GetClassLinker()) &&
688 code != GetQuickToInterpreterBridge())) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800689 return code;
690 }
691 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800692 return runtime->GetClassLinker()->GetQuickOatCodeFor(method);
jeffhao725a9572012-11-13 18:20:12 -0800693}
694
Ian Rogers62d6c772013-02-27 08:32:07 -0800695void Instrumentation::MethodEnterEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800696 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800697 uint32_t dex_pc) const {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700698 auto it = method_entry_listeners_.begin();
Jeff Hao65d15d92013-07-16 16:39:33 -0700699 bool is_end = (it == method_entry_listeners_.end());
700 // Implemented this way to prevent problems caused by modification of the list while iterating.
701 while (!is_end) {
702 InstrumentationListener* cur = *it;
703 ++it;
704 is_end = (it == method_entry_listeners_.end());
705 cur->MethodEntered(thread, this_object, method, dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800706 }
707}
708
709void Instrumentation::MethodExitEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800710 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800711 uint32_t dex_pc, const JValue& return_value) const {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700712 auto it = method_exit_listeners_.begin();
Jeff Hao65d15d92013-07-16 16:39:33 -0700713 bool is_end = (it == method_exit_listeners_.end());
714 // Implemented this way to prevent problems caused by modification of the list while iterating.
715 while (!is_end) {
716 InstrumentationListener* cur = *it;
717 ++it;
718 is_end = (it == method_exit_listeners_.end());
719 cur->MethodExited(thread, this_object, method, dex_pc, return_value);
Ian Rogers62d6c772013-02-27 08:32:07 -0800720 }
721}
722
723void Instrumentation::MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800724 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800725 uint32_t dex_pc) const {
726 if (have_method_unwind_listeners_) {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700727 for (InstrumentationListener* listener : method_unwind_listeners_) {
Sebastien Hertz51db44a2013-11-19 10:00:29 +0100728 listener->MethodUnwind(thread, this_object, method, dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800729 }
730 }
731}
732
733void Instrumentation::DexPcMovedEventImpl(Thread* thread, mirror::Object* this_object,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800734 mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800735 uint32_t dex_pc) const {
736 // TODO: STL copy-on-write collection? The copy below is due to the debug listener having an
737 // action where it can remove itself as a listener and break the iterator. The copy only works
738 // around the problem and in general we may have to move to something like reference counting to
739 // ensure listeners are deleted correctly.
740 std::list<InstrumentationListener*> copy(dex_pc_listeners_);
Mathieu Chartier02e25112013-08-14 16:14:24 -0700741 for (InstrumentationListener* listener : copy) {
742 listener->DexPcMoved(thread, this_object, method, dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800743 }
744}
745
746void Instrumentation::ExceptionCaughtEvent(Thread* thread, const ThrowLocation& throw_location,
Brian Carlstromea46f952013-07-30 01:26:50 -0700747 mirror::ArtMethod* catch_method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800748 uint32_t catch_dex_pc,
Sebastien Hertz947ff082013-09-17 14:10:13 +0200749 mirror::Throwable* exception_object) const {
Ian Rogers62d6c772013-02-27 08:32:07 -0800750 if (have_exception_caught_listeners_) {
Jeff Haoc0bd4da2013-04-11 15:52:28 -0700751 DCHECK_EQ(thread->GetException(NULL), exception_object);
752 thread->ClearException();
Mathieu Chartier02e25112013-08-14 16:14:24 -0700753 for (InstrumentationListener* listener : exception_caught_listeners_) {
754 listener->ExceptionCaught(thread, throw_location, catch_method, catch_dex_pc, exception_object);
Ian Rogers62d6c772013-02-27 08:32:07 -0800755 }
Jeff Haoc0bd4da2013-04-11 15:52:28 -0700756 thread->SetException(throw_location, exception_object);
Ian Rogers62d6c772013-02-27 08:32:07 -0800757 }
758}
759
760static void CheckStackDepth(Thread* self, const InstrumentationStackFrame& instrumentation_frame,
761 int delta)
762 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
763 size_t frame_id = StackVisitor::ComputeNumFrames(self) + delta;
764 if (frame_id != instrumentation_frame.frame_id_) {
765 LOG(ERROR) << "Expected frame_id=" << frame_id << " but found "
766 << instrumentation_frame.frame_id_;
767 StackVisitor::DescribeStack(self);
768 CHECK_EQ(frame_id, instrumentation_frame.frame_id_);
769 }
770}
771
772void Instrumentation::PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
Brian Carlstromea46f952013-07-30 01:26:50 -0700773 mirror::ArtMethod* method,
Jeff Hao9a916d32013-06-27 18:45:37 -0700774 uintptr_t lr, bool interpreter_entry) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800775 // We have a callee-save frame meaning this value is guaranteed to never be 0.
776 size_t frame_id = StackVisitor::ComputeNumFrames(self);
777 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
778 if (kVerboseInstrumentation) {
Brian Carlstrom2d888622013-07-18 17:02:00 -0700779 LOG(INFO) << "Entering " << PrettyMethod(method) << " from PC " << reinterpret_cast<void*>(lr);
Ian Rogers62d6c772013-02-27 08:32:07 -0800780 }
781 instrumentation::InstrumentationStackFrame instrumentation_frame(this_object, method, lr,
Jeff Hao9a916d32013-06-27 18:45:37 -0700782 frame_id, interpreter_entry);
Ian Rogers62d6c772013-02-27 08:32:07 -0800783 stack->push_front(instrumentation_frame);
784
785 MethodEnterEvent(self, this_object, method, 0);
786}
787
788uint64_t Instrumentation::PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc,
789 uint64_t gpr_result, uint64_t fpr_result) {
790 // Do the pop.
791 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
792 CHECK_GT(stack->size(), 0U);
793 InstrumentationStackFrame instrumentation_frame = stack->front();
794 stack->pop_front();
795
796 // Set return PC and check the sanity of the stack.
797 *return_pc = instrumentation_frame.return_pc_;
798 CheckStackDepth(self, instrumentation_frame, 0);
799
Brian Carlstromea46f952013-07-30 01:26:50 -0700800 mirror::ArtMethod* method = instrumentation_frame.method_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800801 char return_shorty = MethodHelper(method).GetShorty()[0];
802 JValue return_value;
803 if (return_shorty == 'V') {
804 return_value.SetJ(0);
805 } else if (return_shorty == 'F' || return_shorty == 'D') {
806 return_value.SetJ(fpr_result);
807 } else {
808 return_value.SetJ(gpr_result);
809 }
810 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
811 // return_pc.
812 uint32_t dex_pc = DexFile::kDexNoIndex;
813 mirror::Object* this_object = instrumentation_frame.this_object_;
814 MethodExitEvent(self, this_object, instrumentation_frame.method_, dex_pc, return_value);
jeffhao725a9572012-11-13 18:20:12 -0800815
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100816 // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get
817 // back to an upcall.
818 NthCallerVisitor visitor(self, 1, true);
819 visitor.WalkStack(true);
820 bool deoptimize = (visitor.caller != NULL) &&
821 (interpreter_stubs_installed_ || IsDeoptimized(visitor.caller));
822 if (deoptimize && kVerboseInstrumentation) {
823 LOG(INFO) << "Deoptimizing into " << PrettyMethod(visitor.caller);
Ian Rogers62d6c772013-02-27 08:32:07 -0800824 }
825 if (deoptimize) {
826 if (kVerboseInstrumentation) {
827 LOG(INFO) << "Deoptimizing from " << PrettyMethod(method)
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100828 << " result is " << std::hex << return_value.GetJ();
Ian Rogers62d6c772013-02-27 08:32:07 -0800829 }
830 self->SetDeoptimizationReturnValue(return_value);
Ian Rogers848871b2013-08-05 10:56:33 -0700831 return static_cast<uint64_t>(GetQuickDeoptimizationEntryPoint()) |
Ian Rogers62d6c772013-02-27 08:32:07 -0800832 (static_cast<uint64_t>(*return_pc) << 32);
833 } else {
834 if (kVerboseInstrumentation) {
Brian Carlstrom2d888622013-07-18 17:02:00 -0700835 LOG(INFO) << "Returning from " << PrettyMethod(method)
836 << " to PC " << reinterpret_cast<void*>(*return_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800837 }
838 return *return_pc;
839 }
jeffhao725a9572012-11-13 18:20:12 -0800840}
841
Ian Rogers62d6c772013-02-27 08:32:07 -0800842void Instrumentation::PopMethodForUnwind(Thread* self, bool is_deoptimization) const {
843 // Do the pop.
844 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
845 CHECK_GT(stack->size(), 0U);
846 InstrumentationStackFrame instrumentation_frame = stack->front();
847 // TODO: bring back CheckStackDepth(self, instrumentation_frame, 2);
848 stack->pop_front();
849
Brian Carlstromea46f952013-07-30 01:26:50 -0700850 mirror::ArtMethod* method = instrumentation_frame.method_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800851 if (is_deoptimization) {
852 if (kVerboseInstrumentation) {
853 LOG(INFO) << "Popping for deoptimization " << PrettyMethod(method);
854 }
855 } else {
856 if (kVerboseInstrumentation) {
857 LOG(INFO) << "Popping for unwind " << PrettyMethod(method);
858 }
859
860 // Notify listeners of method unwind.
861 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
862 // return_pc.
863 uint32_t dex_pc = DexFile::kDexNoIndex;
864 MethodUnwindEvent(self, instrumentation_frame.this_object_, method, dex_pc);
865 }
866}
867
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700868void Instrumentation::VisitRoots(RootCallback* callback, void* arg) {
869 WriterMutexLock mu(Thread::Current(), deoptimized_methods_lock_);
870 if (deoptimized_methods_.empty()) {
871 return;
872 }
873 std::set<mirror::ArtMethod*> new_deoptimized_methods;
874 for (mirror::ArtMethod* method : deoptimized_methods_) {
875 DCHECK(method != nullptr);
876 callback(reinterpret_cast<mirror::Object**>(&method), arg, 0, kRootVMInternal);
877 new_deoptimized_methods.insert(method);
878 }
879 deoptimized_methods_ = new_deoptimized_methods;
880}
881
Ian Rogers62d6c772013-02-27 08:32:07 -0800882std::string InstrumentationStackFrame::Dump() const {
883 std::ostringstream os;
884 os << "Frame " << frame_id_ << " " << PrettyMethod(method_) << ":"
885 << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_);
886 return os.str();
887}
888
889} // namespace instrumentation
jeffhao725a9572012-11-13 18:20:12 -0800890} // namespace art