blob: 0b1154338f1d85599cbeccf59409b11fe0ad69f0 [file] [log] [blame]
jeffhao725a9572012-11-13 18:20:12 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "instrumentation.h"
18
19#include <sys/uio.h>
20
Ian Rogers62d6c772013-02-27 08:32:07 -080021#include "atomic_integer.h"
Elliott Hughes76160052012-12-12 16:31:20 -080022#include "base/unix_file/fd_file.h"
jeffhao725a9572012-11-13 18:20:12 -080023#include "class_linker.h"
24#include "debugger.h"
Ian Rogers62d6c772013-02-27 08:32:07 -080025#include "dex_file-inl.h"
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010026#include "interpreter/interpreter.h"
Brian Carlstromea46f952013-07-30 01:26:50 -070027#include "mirror/art_method-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080028#include "mirror/class-inl.h"
29#include "mirror/dex_cache.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080030#include "mirror/object_array-inl.h"
Ian Rogers4f6ad8a2013-03-18 15:27:28 -070031#include "mirror/object-inl.h"
Ian Rogers62d6c772013-02-27 08:32:07 -080032#include "nth_caller_visitor.h"
Ian Rogersc928de92013-02-27 14:30:44 -080033#if !defined(ART_USE_PORTABLE_COMPILER)
Ian Rogers166db042013-07-26 12:05:57 -070034#include "entrypoints/quick/quick_entrypoints.h"
jeffhao725a9572012-11-13 18:20:12 -080035#endif
36#include "object_utils.h"
37#include "os.h"
38#include "scoped_thread_state_change.h"
39#include "thread.h"
40#include "thread_list.h"
jeffhao725a9572012-11-13 18:20:12 -080041
42namespace art {
Ian Rogersfa824272013-11-05 16:12:57 -080043
44extern void SetQuickAllocEntryPointsInstrumented(bool instrumented);
45
Ian Rogers62d6c772013-02-27 08:32:07 -080046namespace instrumentation {
jeffhao725a9572012-11-13 18:20:12 -080047
Sebastien Hertz5bfd5c92013-11-15 11:36:07 +010048const bool kVerboseInstrumentation = false;
49
Ian Rogers816432e2013-09-06 15:47:45 -070050// Do we want to deoptimize for method entry and exit listeners or just try to intercept
51// invocations? Deoptimization forces all code to run in the interpreter and considerably hurts the
52// application's performance.
Ian Rogers7b6da362013-09-11 09:29:40 -070053static constexpr bool kDeoptimizeForAccurateMethodEntryExitListeners = false;
Ian Rogers816432e2013-09-06 15:47:45 -070054
Ian Rogers62d6c772013-02-27 08:32:07 -080055static bool InstallStubsClassVisitor(mirror::Class* klass, void* arg)
jeffhao725a9572012-11-13 18:20:12 -080056 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -080057 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
58 return instrumentation->InstallStubsForClass(klass);
59}
60
61bool Instrumentation::InstallStubsForClass(mirror::Class* klass) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010062 for (size_t i = 0, e = klass->NumDirectMethods(); i < e; i++) {
63 InstallStubsForMethod(klass->GetDirectMethod(i));
jeffhao725a9572012-11-13 18:20:12 -080064 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010065 for (size_t i = 0, e = klass->NumVirtualMethods(); i < e; i++) {
66 InstallStubsForMethod(klass->GetVirtualMethod(i));
jeffhao725a9572012-11-13 18:20:12 -080067 }
68 return true;
69}
70
Sebastien Hertz138dbfc2013-12-04 18:15:25 +010071static void UpdateEntrypoints(mirror::ArtMethod* method, const void* code) {
72 method->SetEntryPointFromCompiledCode(code);
73 if (!method->IsResolutionMethod()) {
74 if (code == GetCompiledCodeToInterpreterBridge()) {
75 method->SetEntryPointFromInterpreter(art::interpreter::artInterpreterToInterpreterBridge);
76 } else {
77 method->SetEntryPointFromInterpreter(art::artInterpreterToCompiledCodeBridge);
78 }
79 }
80}
81
82void Instrumentation::InstallStubsForMethod(mirror::ArtMethod* method) {
83 if (method->IsAbstract() || method->IsProxyMethod()) {
84 // Do not change stubs for these methods.
85 return;
86 }
87 const void* new_code;
88 bool uninstall = !entry_exit_stubs_installed_ && !interpreter_stubs_installed_;
89 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
90 bool is_class_initialized = method->GetDeclaringClass()->IsInitialized();
91 if (uninstall) {
92 if ((forced_interpret_only_ || IsDeoptimized(method)) && !method->IsNative()) {
93 new_code = GetCompiledCodeToInterpreterBridge();
94 } else if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
95 new_code = class_linker->GetOatCodeFor(method);
96 } else {
97 new_code = GetResolutionTrampoline(class_linker);
98 }
99 } else { // !uninstall
100 if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
101 new_code = GetCompiledCodeToInterpreterBridge();
102 } else {
103 // Do not overwrite resolution trampoline. When the trampoline initializes the method's
104 // class, all its static methods code will be set to the instrumentation entry point.
105 // For more details, see ClassLinker::FixupStaticTrampolines.
106 if (is_class_initialized || !method->IsStatic() || method->IsConstructor()) {
107 // Do not overwrite interpreter to prevent from posting method entry/exit events twice.
108 new_code = class_linker->GetOatCodeFor(method);
109 if (entry_exit_stubs_installed_ && new_code != GetCompiledCodeToInterpreterBridge()) {
110 new_code = GetQuickInstrumentationEntryPoint();
111 }
112 } else {
113 new_code = GetResolutionTrampoline(class_linker);
114 }
115 }
116 }
117 UpdateEntrypoints(method, new_code);
118}
119
Ian Rogers62d6c772013-02-27 08:32:07 -0800120// Places the instrumentation exit pc as the return PC for every quick frame. This also allows
121// deoptimization of quick frames to interpreter frames.
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100122// Since we may already have done this previously, we need to push new instrumentation frame before
123// existing instrumentation frames.
Ian Rogers62d6c772013-02-27 08:32:07 -0800124static void InstrumentationInstallStack(Thread* thread, void* arg)
Ian Rogers306057f2012-11-26 12:45:53 -0800125 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
126 struct InstallStackVisitor : public StackVisitor {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100127 InstallStackVisitor(Thread* thread, Context* context, uintptr_t instrumentation_exit_pc,
128 bool is_deoptimization_enabled)
Ian Rogers62d6c772013-02-27 08:32:07 -0800129 : StackVisitor(thread, context), instrumentation_stack_(thread->GetInstrumentationStack()),
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100130 existing_instrumentation_frames_count_(instrumentation_stack_->size()),
131 instrumentation_exit_pc_(instrumentation_exit_pc),
132 is_deoptimization_enabled_(is_deoptimization_enabled),
133 reached_existing_instrumentation_frames_(false), instrumentation_stack_depth_(0),
134 last_return_pc_(0) {
135 }
jeffhao725a9572012-11-13 18:20:12 -0800136
Ian Rogers306057f2012-11-26 12:45:53 -0800137 virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Brian Carlstromea46f952013-07-30 01:26:50 -0700138 mirror::ArtMethod* m = GetMethod();
Ian Rogers306057f2012-11-26 12:45:53 -0800139 if (GetCurrentQuickFrame() == NULL) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800140 if (kVerboseInstrumentation) {
141 LOG(INFO) << " Ignoring a shadow frame. Frame " << GetFrameId()
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100142 << " Method=" << PrettyMethod(m);
Ian Rogers62d6c772013-02-27 08:32:07 -0800143 }
Ian Rogers306057f2012-11-26 12:45:53 -0800144 return true; // Ignore shadow frames.
145 }
Ian Rogers306057f2012-11-26 12:45:53 -0800146 if (m == NULL) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800147 if (kVerboseInstrumentation) {
148 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
149 }
150 last_return_pc_ = 0;
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700151 return true; // Ignore upcalls.
Ian Rogers306057f2012-11-26 12:45:53 -0800152 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800153 if (m->IsRuntimeMethod()) {
154 if (kVerboseInstrumentation) {
155 LOG(INFO) << " Skipping runtime method. Frame " << GetFrameId();
156 }
157 last_return_pc_ = GetReturnPc();
Ian Rogers306057f2012-11-26 12:45:53 -0800158 return true; // Ignore unresolved methods since they will be instrumented after resolution.
159 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800160 if (kVerboseInstrumentation) {
161 LOG(INFO) << " Installing exit stub in " << DescribeLocation();
162 }
163 uintptr_t return_pc = GetReturnPc();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100164 if (return_pc == instrumentation_exit_pc_) {
165 // We've reached a frame which has already been installed with instrumentation exit stub.
166 // We should have already installed instrumentation on previous frames.
167 reached_existing_instrumentation_frames_ = true;
168
169 CHECK_LT(instrumentation_stack_depth_, instrumentation_stack_->size());
170 const InstrumentationStackFrame& frame = instrumentation_stack_->at(instrumentation_stack_depth_);
171 CHECK_EQ(m, frame.method_) << "Expected " << PrettyMethod(m)
172 << ", Found " << PrettyMethod(frame.method_);
173 return_pc = frame.return_pc_;
174 if (kVerboseInstrumentation) {
175 LOG(INFO) << "Ignoring already instrumented " << frame.Dump();
176 }
177 } else {
178 CHECK_NE(return_pc, 0U);
179 CHECK(!reached_existing_instrumentation_frames_);
180 InstrumentationStackFrame instrumentation_frame(GetThisObject(), m, return_pc, GetFrameId(),
181 false);
182 if (kVerboseInstrumentation) {
183 LOG(INFO) << "Pushing frame " << instrumentation_frame.Dump();
184 }
185
186 // Insert frame before old ones so we do not corrupt the instrumentation stack.
187 auto it = instrumentation_stack_->end() - existing_instrumentation_frames_count_;
188 instrumentation_stack_->insert(it, instrumentation_frame);
189 SetReturnPc(instrumentation_exit_pc_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800190 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800191 dex_pcs_.push_back(m->ToDexPc(last_return_pc_));
Ian Rogers62d6c772013-02-27 08:32:07 -0800192 last_return_pc_ = return_pc;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100193 ++instrumentation_stack_depth_;
Ian Rogers306057f2012-11-26 12:45:53 -0800194 return true; // Continue.
195 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800196 std::deque<InstrumentationStackFrame>* const instrumentation_stack_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100197 const size_t existing_instrumentation_frames_count_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800198 std::vector<uint32_t> dex_pcs_;
Ian Rogers306057f2012-11-26 12:45:53 -0800199 const uintptr_t instrumentation_exit_pc_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100200 const bool is_deoptimization_enabled_;
201 bool reached_existing_instrumentation_frames_;
202 size_t instrumentation_stack_depth_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800203 uintptr_t last_return_pc_;
Ian Rogers306057f2012-11-26 12:45:53 -0800204 };
Ian Rogers62d6c772013-02-27 08:32:07 -0800205 if (kVerboseInstrumentation) {
206 std::string thread_name;
207 thread->GetThreadName(thread_name);
208 LOG(INFO) << "Installing exit stubs in " << thread_name;
Ian Rogers306057f2012-11-26 12:45:53 -0800209 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100210
211 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
Ian Rogers62d6c772013-02-27 08:32:07 -0800212 UniquePtr<Context> context(Context::Create());
Ian Rogers848871b2013-08-05 10:56:33 -0700213 uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100214 InstallStackVisitor visitor(thread, context.get(), instrumentation_exit_pc,
215 instrumentation->IsDeoptimizationEnabled());
Ian Rogers62d6c772013-02-27 08:32:07 -0800216 visitor.WalkStack(true);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100217 CHECK_EQ(visitor.dex_pcs_.size(), thread->GetInstrumentationStack()->size());
Ian Rogers62d6c772013-02-27 08:32:07 -0800218
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100219 if (!instrumentation->IsDeoptimizationEnabled()) {
220 // Create method enter events for all methods currently on the thread's stack. We only do this
221 // if no debugger is attached to prevent from posting events twice.
222 typedef std::deque<InstrumentationStackFrame>::const_reverse_iterator It;
223 for (It it = thread->GetInstrumentationStack()->rbegin(),
224 end = thread->GetInstrumentationStack()->rend(); it != end; ++it) {
225 mirror::Object* this_object = (*it).this_object_;
226 mirror::ArtMethod* method = (*it).method_;
227 uint32_t dex_pc = visitor.dex_pcs_.back();
228 visitor.dex_pcs_.pop_back();
229 instrumentation->MethodEnterEvent(thread, this_object, method, dex_pc);
230 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800231 }
232 thread->VerifyStack();
Ian Rogers306057f2012-11-26 12:45:53 -0800233}
234
Ian Rogers62d6c772013-02-27 08:32:07 -0800235// Removes the instrumentation exit pc as the return PC for every quick frame.
236static void InstrumentationRestoreStack(Thread* thread, void* arg)
Ian Rogers306057f2012-11-26 12:45:53 -0800237 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
238 struct RestoreStackVisitor : public StackVisitor {
Ian Rogers62d6c772013-02-27 08:32:07 -0800239 RestoreStackVisitor(Thread* thread, uintptr_t instrumentation_exit_pc,
240 Instrumentation* instrumentation)
241 : StackVisitor(thread, NULL), thread_(thread),
242 instrumentation_exit_pc_(instrumentation_exit_pc),
243 instrumentation_(instrumentation),
244 instrumentation_stack_(thread->GetInstrumentationStack()),
245 frames_removed_(0) {}
Ian Rogers306057f2012-11-26 12:45:53 -0800246
247 virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800248 if (instrumentation_stack_->size() == 0) {
jeffhao725a9572012-11-13 18:20:12 -0800249 return false; // Stop.
250 }
Brian Carlstromea46f952013-07-30 01:26:50 -0700251 mirror::ArtMethod* m = GetMethod();
Ian Rogers62d6c772013-02-27 08:32:07 -0800252 if (GetCurrentQuickFrame() == NULL) {
253 if (kVerboseInstrumentation) {
254 LOG(INFO) << " Ignoring a shadow frame. Frame " << GetFrameId() << " Method=" << PrettyMethod(m);
255 }
256 return true; // Ignore shadow frames.
257 }
Ian Rogers306057f2012-11-26 12:45:53 -0800258 if (m == NULL) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800259 if (kVerboseInstrumentation) {
260 LOG(INFO) << " Skipping upcall. Frame " << GetFrameId();
261 }
Ian Rogers306057f2012-11-26 12:45:53 -0800262 return true; // Ignore upcalls.
263 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800264 bool removed_stub = false;
265 // TODO: make this search more efficient?
Mathieu Chartier02e25112013-08-14 16:14:24 -0700266 for (InstrumentationStackFrame instrumentation_frame : *instrumentation_stack_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800267 if (instrumentation_frame.frame_id_ == GetFrameId()) {
268 if (kVerboseInstrumentation) {
269 LOG(INFO) << " Removing exit stub in " << DescribeLocation();
270 }
Jeff Hao9a916d32013-06-27 18:45:37 -0700271 if (instrumentation_frame.interpreter_entry_) {
272 CHECK(m == Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs));
273 } else {
274 CHECK(m == instrumentation_frame.method_) << PrettyMethod(m);
275 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800276 SetReturnPc(instrumentation_frame.return_pc_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100277 if (!instrumentation_->IsDeoptimizationEnabled()) {
278 // Create the method exit events. As the methods didn't really exit the result is 0.
279 // We only do this if no debugger is attached to prevent from posting events twice.
280 instrumentation_->MethodExitEvent(thread_, instrumentation_frame.this_object_, m,
281 GetDexPc(), JValue());
282 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800283 frames_removed_++;
284 removed_stub = true;
285 break;
286 }
287 }
288 if (!removed_stub) {
289 if (kVerboseInstrumentation) {
290 LOG(INFO) << " No exit stub in " << DescribeLocation();
Ian Rogers306057f2012-11-26 12:45:53 -0800291 }
jeffhao725a9572012-11-13 18:20:12 -0800292 }
293 return true; // Continue.
294 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800295 Thread* const thread_;
Ian Rogers306057f2012-11-26 12:45:53 -0800296 const uintptr_t instrumentation_exit_pc_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800297 Instrumentation* const instrumentation_;
298 std::deque<instrumentation::InstrumentationStackFrame>* const instrumentation_stack_;
299 size_t frames_removed_;
jeffhao725a9572012-11-13 18:20:12 -0800300 };
Ian Rogers62d6c772013-02-27 08:32:07 -0800301 if (kVerboseInstrumentation) {
302 std::string thread_name;
303 thread->GetThreadName(thread_name);
304 LOG(INFO) << "Removing exit stubs in " << thread_name;
305 }
306 std::deque<instrumentation::InstrumentationStackFrame>* stack = thread->GetInstrumentationStack();
307 if (stack->size() > 0) {
308 Instrumentation* instrumentation = reinterpret_cast<Instrumentation*>(arg);
Ian Rogers848871b2013-08-05 10:56:33 -0700309 uintptr_t instrumentation_exit_pc = GetQuickInstrumentationExitPc();
Ian Rogers62d6c772013-02-27 08:32:07 -0800310 RestoreStackVisitor visitor(thread, instrumentation_exit_pc, instrumentation);
311 visitor.WalkStack(true);
312 CHECK_EQ(visitor.frames_removed_, stack->size());
313 while (stack->size() > 0) {
314 stack->pop_front();
315 }
jeffhao725a9572012-11-13 18:20:12 -0800316 }
317}
318
Ian Rogers62d6c772013-02-27 08:32:07 -0800319void Instrumentation::AddListener(InstrumentationListener* listener, uint32_t events) {
320 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
Ian Rogers62d6c772013-02-27 08:32:07 -0800321 if ((events & kMethodEntered) != 0) {
322 method_entry_listeners_.push_back(listener);
Ian Rogers62d6c772013-02-27 08:32:07 -0800323 have_method_entry_listeners_ = true;
324 }
325 if ((events & kMethodExited) != 0) {
326 method_exit_listeners_.push_back(listener);
Ian Rogers62d6c772013-02-27 08:32:07 -0800327 have_method_exit_listeners_ = true;
328 }
329 if ((events & kMethodUnwind) != 0) {
330 method_unwind_listeners_.push_back(listener);
331 have_method_unwind_listeners_ = true;
332 }
333 if ((events & kDexPcMoved) != 0) {
334 dex_pc_listeners_.push_back(listener);
Ian Rogers62d6c772013-02-27 08:32:07 -0800335 have_dex_pc_listeners_ = true;
336 }
Jeff Hao14dd5a82013-04-11 10:23:36 -0700337 if ((events & kExceptionCaught) != 0) {
338 exception_caught_listeners_.push_back(listener);
339 have_exception_caught_listeners_ = true;
340 }
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200341 UpdateInterpreterHandlerTable();
jeffhao725a9572012-11-13 18:20:12 -0800342}
343
Ian Rogers62d6c772013-02-27 08:32:07 -0800344void Instrumentation::RemoveListener(InstrumentationListener* listener, uint32_t events) {
345 Locks::mutator_lock_->AssertExclusiveHeld(Thread::Current());
Ian Rogers62d6c772013-02-27 08:32:07 -0800346
347 if ((events & kMethodEntered) != 0) {
348 bool contains = std::find(method_entry_listeners_.begin(), method_entry_listeners_.end(),
349 listener) != method_entry_listeners_.end();
350 if (contains) {
351 method_entry_listeners_.remove(listener);
352 }
353 have_method_entry_listeners_ = method_entry_listeners_.size() > 0;
Ian Rogers62d6c772013-02-27 08:32:07 -0800354 }
355 if ((events & kMethodExited) != 0) {
356 bool contains = std::find(method_exit_listeners_.begin(), method_exit_listeners_.end(),
357 listener) != method_exit_listeners_.end();
358 if (contains) {
359 method_exit_listeners_.remove(listener);
360 }
361 have_method_exit_listeners_ = method_exit_listeners_.size() > 0;
Ian Rogers62d6c772013-02-27 08:32:07 -0800362 }
363 if ((events & kMethodUnwind) != 0) {
364 method_unwind_listeners_.remove(listener);
365 }
366 if ((events & kDexPcMoved) != 0) {
367 bool contains = std::find(dex_pc_listeners_.begin(), dex_pc_listeners_.end(),
368 listener) != dex_pc_listeners_.end();
369 if (contains) {
370 dex_pc_listeners_.remove(listener);
371 }
372 have_dex_pc_listeners_ = dex_pc_listeners_.size() > 0;
Ian Rogers62d6c772013-02-27 08:32:07 -0800373 }
Jeff Hao14dd5a82013-04-11 10:23:36 -0700374 if ((events & kExceptionCaught) != 0) {
375 exception_caught_listeners_.remove(listener);
376 have_exception_caught_listeners_ = exception_caught_listeners_.size() > 0;
377 }
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200378 UpdateInterpreterHandlerTable();
jeffhao725a9572012-11-13 18:20:12 -0800379}
380
Ian Rogers62d6c772013-02-27 08:32:07 -0800381void Instrumentation::ConfigureStubs(bool require_entry_exit_stubs, bool require_interpreter) {
382 interpret_only_ = require_interpreter || forced_interpret_only_;
383 // Compute what level of instrumentation is required and compare to current.
384 int desired_level, current_level;
385 if (require_interpreter) {
386 desired_level = 2;
387 } else if (require_entry_exit_stubs) {
388 desired_level = 1;
389 } else {
390 desired_level = 0;
391 }
392 if (interpreter_stubs_installed_) {
393 current_level = 2;
394 } else if (entry_exit_stubs_installed_) {
395 current_level = 1;
396 } else {
397 current_level = 0;
398 }
399 if (desired_level == current_level) {
400 // We're already set.
401 return;
402 }
403 Thread* self = Thread::Current();
404 Runtime* runtime = Runtime::Current();
405 Locks::thread_list_lock_->AssertNotHeld(self);
406 if (desired_level > 0) {
407 if (require_interpreter) {
408 interpreter_stubs_installed_ = true;
409 } else {
410 CHECK(require_entry_exit_stubs);
411 entry_exit_stubs_installed_ = true;
412 }
413 runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
414 instrumentation_stubs_installed_ = true;
415 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
416 runtime->GetThreadList()->ForEach(InstrumentationInstallStack, this);
417 } else {
418 interpreter_stubs_installed_ = false;
419 entry_exit_stubs_installed_ = false;
420 runtime->GetClassLinker()->VisitClasses(InstallStubsClassVisitor, this);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100421 // Restore stack only if there is no method currently deoptimized.
422 if (deoptimized_methods_.empty()) {
423 instrumentation_stubs_installed_ = false;
424 MutexLock mu(self, *Locks::thread_list_lock_);
425 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
426 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800427 }
jeffhao725a9572012-11-13 18:20:12 -0800428}
429
Ian Rogersfa824272013-11-05 16:12:57 -0800430static void ResetQuickAllocEntryPointsForThread(Thread* thread, void* arg) {
431 thread->ResetQuickAllocEntryPointsForThread();
432}
433
434void Instrumentation::InstrumentQuickAllocEntryPoints() {
435 // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
436 // should be guarded by a lock.
Ian Rogersb122a4b2013-11-19 18:00:50 -0800437 DCHECK_GE(quick_alloc_entry_points_instrumentation_counter_.Load(), 0);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800438 const bool enable_instrumentation =
Ian Rogersb122a4b2013-11-19 18:00:50 -0800439 quick_alloc_entry_points_instrumentation_counter_.FetchAndAdd(1) == 0;
Ian Rogersfa824272013-11-05 16:12:57 -0800440 if (enable_instrumentation) {
441 // Instrumentation wasn't enabled so enable it.
442 SetQuickAllocEntryPointsInstrumented(true);
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800443 ThreadList* tl = Runtime::Current()->GetThreadList();
444 tl->SuspendAll();
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800445 ResetQuickAllocEntryPoints();
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800446 tl->ResumeAll();
Ian Rogersfa824272013-11-05 16:12:57 -0800447 }
448}
449
450void Instrumentation::UninstrumentQuickAllocEntryPoints() {
451 // TODO: the read of quick_alloc_entry_points_instrumentation_counter_ is racey and this code
452 // should be guarded by a lock.
Ian Rogersb122a4b2013-11-19 18:00:50 -0800453 DCHECK_GT(quick_alloc_entry_points_instrumentation_counter_.Load(), 0);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800454 const bool disable_instrumentation =
Ian Rogersb122a4b2013-11-19 18:00:50 -0800455 quick_alloc_entry_points_instrumentation_counter_.FetchAndSub(1) == 1;
Ian Rogersfa824272013-11-05 16:12:57 -0800456 if (disable_instrumentation) {
457 SetQuickAllocEntryPointsInstrumented(false);
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800458 ThreadList* tl = Runtime::Current()->GetThreadList();
459 tl->SuspendAll();
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800460 ResetQuickAllocEntryPoints();
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800461 tl->ResumeAll();
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800462 }
463}
464
465void Instrumentation::ResetQuickAllocEntryPoints() {
466 Runtime* runtime = Runtime::Current();
467 if (runtime->IsStarted()) {
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800468 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
469 runtime->GetThreadList()->ForEach(ResetQuickAllocEntryPointsForThread, NULL);
Ian Rogersfa824272013-11-05 16:12:57 -0800470 }
471}
472
Brian Carlstromea46f952013-07-30 01:26:50 -0700473void Instrumentation::UpdateMethodsCode(mirror::ArtMethod* method, const void* code) const {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100474 const void* new_code;
Ian Rogers62d6c772013-02-27 08:32:07 -0800475 if (LIKELY(!instrumentation_stubs_installed_)) {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100476 new_code = code;
Jeff Hao65d15d92013-07-16 16:39:33 -0700477 } else {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100478 if ((interpreter_stubs_installed_ || IsDeoptimized(method)) && !method->IsNative()) {
479 new_code = GetCompiledCodeToInterpreterBridge();
480 } else if (code == GetResolutionTrampoline(Runtime::Current()->GetClassLinker()) ||
481 code == GetCompiledCodeToInterpreterBridge()) {
482 new_code = code;
483 } else if (entry_exit_stubs_installed_) {
484 new_code = GetQuickInstrumentationEntryPoint();
Jeff Hao65d15d92013-07-16 16:39:33 -0700485 } else {
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100486 new_code = code;
Jeff Hao65d15d92013-07-16 16:39:33 -0700487 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800488 }
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100489 UpdateEntrypoints(method, new_code);
490}
491
492void Instrumentation::Deoptimize(mirror::ArtMethod* method) {
493 CHECK(!method->IsNative());
494 CHECK(!method->IsProxyMethod());
495 CHECK(!method->IsAbstract());
496
497 std::pair<std::set<mirror::ArtMethod*>::iterator, bool> pair = deoptimized_methods_.insert(method);
498 bool already_deoptimized = !pair.second;
499 CHECK(!already_deoptimized) << "Method " << PrettyMethod(method) << " is already deoptimized";
500
501 if (!interpreter_stubs_installed_) {
502 UpdateEntrypoints(method, GetCompiledCodeToInterpreterBridge());
503
504 // Install instrumentation exit stub and instrumentation frames. We may already have installed
505 // these previously so it will only cover the newly created frames.
506 instrumentation_stubs_installed_ = true;
507 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
508 Runtime::Current()->GetThreadList()->ForEach(InstrumentationInstallStack, this);
509 }
510}
511
512void Instrumentation::Undeoptimize(mirror::ArtMethod* method) {
513 CHECK(!method->IsNative());
514 CHECK(!method->IsProxyMethod());
515 CHECK(!method->IsAbstract());
516
517 auto it = deoptimized_methods_.find(method);
518 CHECK(it != deoptimized_methods_.end()) << "Method " << PrettyMethod(method) << " is not deoptimized";
519 deoptimized_methods_.erase(it);
520
521 // Restore code and possibly stack only if we did not deoptimize everything.
522 if (!interpreter_stubs_installed_) {
523 // Restore its code or resolution trampoline.
524 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
525 if (method->IsStatic() && !method->IsConstructor() && !method->GetDeclaringClass()->IsInitialized()) {
526 UpdateEntrypoints(method, GetResolutionTrampoline(class_linker));
527 } else {
528 UpdateEntrypoints(method, class_linker->GetOatCodeFor(method));
529 }
530
531 // If there is no deoptimized method left, we can restore the stack of each thread.
532 if (deoptimized_methods_.empty()) {
533 MutexLock mu(Thread::Current(), *Locks::thread_list_lock_);
534 Runtime::Current()->GetThreadList()->ForEach(InstrumentationRestoreStack, this);
535 instrumentation_stubs_installed_ = false;
536 }
537 }
538}
539
540bool Instrumentation::IsDeoptimized(mirror::ArtMethod* method) const {
541 DCHECK(method != nullptr);
542 return deoptimized_methods_.count(method);
543}
544
545void Instrumentation::EnableDeoptimization() {
546 CHECK(deoptimized_methods_.empty());
547}
548
549void Instrumentation::DisableDeoptimization() {
550 // If we deoptimized everything, undo it.
551 if (interpreter_stubs_installed_) {
552 UndeoptimizeEverything();
553 }
554 // Undeoptimized selected methods.
555 while (!deoptimized_methods_.empty()) {
556 auto it_begin = deoptimized_methods_.begin();
557 Undeoptimize(*it_begin);
558 }
559 CHECK(deoptimized_methods_.empty());
560}
561
562bool Instrumentation::IsDeoptimizationEnabled() const {
563 return interpreter_stubs_installed_ || !deoptimized_methods_.empty();
564}
565
566void Instrumentation::DeoptimizeEverything() {
567 CHECK(!interpreter_stubs_installed_);
568 ConfigureStubs(false, true);
569}
570
571void Instrumentation::UndeoptimizeEverything() {
572 CHECK(interpreter_stubs_installed_);
573 ConfigureStubs(false, false);
574}
575
576void Instrumentation::EnableMethodTracing() {
577 bool require_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners;
578 ConfigureStubs(!require_interpreter, require_interpreter);
579}
580
581void Instrumentation::DisableMethodTracing() {
582 ConfigureStubs(false, false);
jeffhao725a9572012-11-13 18:20:12 -0800583}
584
Brian Carlstromea46f952013-07-30 01:26:50 -0700585const void* Instrumentation::GetQuickCodeFor(const mirror::ArtMethod* method) const {
Ian Rogers62d6c772013-02-27 08:32:07 -0800586 Runtime* runtime = Runtime::Current();
587 if (LIKELY(!instrumentation_stubs_installed_)) {
Jeff Haoaa4a7932013-05-13 11:28:27 -0700588 const void* code = method->GetEntryPointFromCompiledCode();
Ian Rogers62d6c772013-02-27 08:32:07 -0800589 DCHECK(code != NULL);
Ian Rogers848871b2013-08-05 10:56:33 -0700590 if (LIKELY(code != GetQuickResolutionTrampoline(runtime->GetClassLinker()) &&
591 code != GetQuickToInterpreterBridge())) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800592 return code;
593 }
594 }
595 return runtime->GetClassLinker()->GetOatCodeFor(method);
jeffhao725a9572012-11-13 18:20:12 -0800596}
597
Ian Rogers62d6c772013-02-27 08:32:07 -0800598void Instrumentation::MethodEnterEventImpl(Thread* thread, mirror::Object* this_object,
Brian Carlstromea46f952013-07-30 01:26:50 -0700599 const mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800600 uint32_t dex_pc) const {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700601 auto it = method_entry_listeners_.begin();
Jeff Hao65d15d92013-07-16 16:39:33 -0700602 bool is_end = (it == method_entry_listeners_.end());
603 // Implemented this way to prevent problems caused by modification of the list while iterating.
604 while (!is_end) {
605 InstrumentationListener* cur = *it;
606 ++it;
607 is_end = (it == method_entry_listeners_.end());
608 cur->MethodEntered(thread, this_object, method, dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800609 }
610}
611
612void Instrumentation::MethodExitEventImpl(Thread* thread, mirror::Object* this_object,
Brian Carlstromea46f952013-07-30 01:26:50 -0700613 const mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800614 uint32_t dex_pc, const JValue& return_value) const {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700615 auto it = method_exit_listeners_.begin();
Jeff Hao65d15d92013-07-16 16:39:33 -0700616 bool is_end = (it == method_exit_listeners_.end());
617 // Implemented this way to prevent problems caused by modification of the list while iterating.
618 while (!is_end) {
619 InstrumentationListener* cur = *it;
620 ++it;
621 is_end = (it == method_exit_listeners_.end());
622 cur->MethodExited(thread, this_object, method, dex_pc, return_value);
Ian Rogers62d6c772013-02-27 08:32:07 -0800623 }
624}
625
626void Instrumentation::MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
Brian Carlstromea46f952013-07-30 01:26:50 -0700627 const mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800628 uint32_t dex_pc) const {
629 if (have_method_unwind_listeners_) {
Mathieu Chartier02e25112013-08-14 16:14:24 -0700630 for (InstrumentationListener* listener : method_unwind_listeners_) {
Sebastien Hertz51db44a2013-11-19 10:00:29 +0100631 listener->MethodUnwind(thread, this_object, method, dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800632 }
633 }
634}
635
636void Instrumentation::DexPcMovedEventImpl(Thread* thread, mirror::Object* this_object,
Brian Carlstromea46f952013-07-30 01:26:50 -0700637 const mirror::ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800638 uint32_t dex_pc) const {
639 // TODO: STL copy-on-write collection? The copy below is due to the debug listener having an
640 // action where it can remove itself as a listener and break the iterator. The copy only works
641 // around the problem and in general we may have to move to something like reference counting to
642 // ensure listeners are deleted correctly.
643 std::list<InstrumentationListener*> copy(dex_pc_listeners_);
Mathieu Chartier02e25112013-08-14 16:14:24 -0700644 for (InstrumentationListener* listener : copy) {
645 listener->DexPcMoved(thread, this_object, method, dex_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800646 }
647}
648
649void Instrumentation::ExceptionCaughtEvent(Thread* thread, const ThrowLocation& throw_location,
Brian Carlstromea46f952013-07-30 01:26:50 -0700650 mirror::ArtMethod* catch_method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800651 uint32_t catch_dex_pc,
Sebastien Hertz947ff082013-09-17 14:10:13 +0200652 mirror::Throwable* exception_object) const {
Ian Rogers62d6c772013-02-27 08:32:07 -0800653 if (have_exception_caught_listeners_) {
Jeff Haoc0bd4da2013-04-11 15:52:28 -0700654 DCHECK_EQ(thread->GetException(NULL), exception_object);
655 thread->ClearException();
Mathieu Chartier02e25112013-08-14 16:14:24 -0700656 for (InstrumentationListener* listener : exception_caught_listeners_) {
657 listener->ExceptionCaught(thread, throw_location, catch_method, catch_dex_pc, exception_object);
Ian Rogers62d6c772013-02-27 08:32:07 -0800658 }
Jeff Haoc0bd4da2013-04-11 15:52:28 -0700659 thread->SetException(throw_location, exception_object);
Ian Rogers62d6c772013-02-27 08:32:07 -0800660 }
661}
662
663static void CheckStackDepth(Thread* self, const InstrumentationStackFrame& instrumentation_frame,
664 int delta)
665 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
666 size_t frame_id = StackVisitor::ComputeNumFrames(self) + delta;
667 if (frame_id != instrumentation_frame.frame_id_) {
668 LOG(ERROR) << "Expected frame_id=" << frame_id << " but found "
669 << instrumentation_frame.frame_id_;
670 StackVisitor::DescribeStack(self);
671 CHECK_EQ(frame_id, instrumentation_frame.frame_id_);
672 }
673}
674
675void Instrumentation::PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
Brian Carlstromea46f952013-07-30 01:26:50 -0700676 mirror::ArtMethod* method,
Jeff Hao9a916d32013-06-27 18:45:37 -0700677 uintptr_t lr, bool interpreter_entry) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800678 // We have a callee-save frame meaning this value is guaranteed to never be 0.
679 size_t frame_id = StackVisitor::ComputeNumFrames(self);
680 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
681 if (kVerboseInstrumentation) {
Brian Carlstrom2d888622013-07-18 17:02:00 -0700682 LOG(INFO) << "Entering " << PrettyMethod(method) << " from PC " << reinterpret_cast<void*>(lr);
Ian Rogers62d6c772013-02-27 08:32:07 -0800683 }
684 instrumentation::InstrumentationStackFrame instrumentation_frame(this_object, method, lr,
Jeff Hao9a916d32013-06-27 18:45:37 -0700685 frame_id, interpreter_entry);
Ian Rogers62d6c772013-02-27 08:32:07 -0800686 stack->push_front(instrumentation_frame);
687
688 MethodEnterEvent(self, this_object, method, 0);
689}
690
691uint64_t Instrumentation::PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc,
692 uint64_t gpr_result, uint64_t fpr_result) {
693 // Do the pop.
694 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
695 CHECK_GT(stack->size(), 0U);
696 InstrumentationStackFrame instrumentation_frame = stack->front();
697 stack->pop_front();
698
699 // Set return PC and check the sanity of the stack.
700 *return_pc = instrumentation_frame.return_pc_;
701 CheckStackDepth(self, instrumentation_frame, 0);
702
Brian Carlstromea46f952013-07-30 01:26:50 -0700703 mirror::ArtMethod* method = instrumentation_frame.method_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800704 char return_shorty = MethodHelper(method).GetShorty()[0];
705 JValue return_value;
706 if (return_shorty == 'V') {
707 return_value.SetJ(0);
708 } else if (return_shorty == 'F' || return_shorty == 'D') {
709 return_value.SetJ(fpr_result);
710 } else {
711 return_value.SetJ(gpr_result);
712 }
713 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
714 // return_pc.
715 uint32_t dex_pc = DexFile::kDexNoIndex;
716 mirror::Object* this_object = instrumentation_frame.this_object_;
717 MethodExitEvent(self, this_object, instrumentation_frame.method_, dex_pc, return_value);
jeffhao725a9572012-11-13 18:20:12 -0800718
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100719 // Deoptimize if the caller needs to continue execution in the interpreter. Do nothing if we get
720 // back to an upcall.
721 NthCallerVisitor visitor(self, 1, true);
722 visitor.WalkStack(true);
723 bool deoptimize = (visitor.caller != NULL) &&
724 (interpreter_stubs_installed_ || IsDeoptimized(visitor.caller));
725 if (deoptimize && kVerboseInstrumentation) {
726 LOG(INFO) << "Deoptimizing into " << PrettyMethod(visitor.caller);
Ian Rogers62d6c772013-02-27 08:32:07 -0800727 }
728 if (deoptimize) {
729 if (kVerboseInstrumentation) {
730 LOG(INFO) << "Deoptimizing from " << PrettyMethod(method)
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100731 << " result is " << std::hex << return_value.GetJ();
Ian Rogers62d6c772013-02-27 08:32:07 -0800732 }
733 self->SetDeoptimizationReturnValue(return_value);
Ian Rogers848871b2013-08-05 10:56:33 -0700734 return static_cast<uint64_t>(GetQuickDeoptimizationEntryPoint()) |
Ian Rogers62d6c772013-02-27 08:32:07 -0800735 (static_cast<uint64_t>(*return_pc) << 32);
736 } else {
737 if (kVerboseInstrumentation) {
Brian Carlstrom2d888622013-07-18 17:02:00 -0700738 LOG(INFO) << "Returning from " << PrettyMethod(method)
739 << " to PC " << reinterpret_cast<void*>(*return_pc);
Ian Rogers62d6c772013-02-27 08:32:07 -0800740 }
741 return *return_pc;
742 }
jeffhao725a9572012-11-13 18:20:12 -0800743}
744
Ian Rogers62d6c772013-02-27 08:32:07 -0800745void Instrumentation::PopMethodForUnwind(Thread* self, bool is_deoptimization) const {
746 // Do the pop.
747 std::deque<instrumentation::InstrumentationStackFrame>* stack = self->GetInstrumentationStack();
748 CHECK_GT(stack->size(), 0U);
749 InstrumentationStackFrame instrumentation_frame = stack->front();
750 // TODO: bring back CheckStackDepth(self, instrumentation_frame, 2);
751 stack->pop_front();
752
Brian Carlstromea46f952013-07-30 01:26:50 -0700753 mirror::ArtMethod* method = instrumentation_frame.method_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800754 if (is_deoptimization) {
755 if (kVerboseInstrumentation) {
756 LOG(INFO) << "Popping for deoptimization " << PrettyMethod(method);
757 }
758 } else {
759 if (kVerboseInstrumentation) {
760 LOG(INFO) << "Popping for unwind " << PrettyMethod(method);
761 }
762
763 // Notify listeners of method unwind.
764 // TODO: improve the dex pc information here, requires knowledge of current PC as opposed to
765 // return_pc.
766 uint32_t dex_pc = DexFile::kDexNoIndex;
767 MethodUnwindEvent(self, instrumentation_frame.this_object_, method, dex_pc);
768 }
769}
770
771std::string InstrumentationStackFrame::Dump() const {
772 std::ostringstream os;
773 os << "Frame " << frame_id_ << " " << PrettyMethod(method_) << ":"
774 << reinterpret_cast<void*>(return_pc_) << " this=" << reinterpret_cast<void*>(this_object_);
775 return os.str();
776}
777
778} // namespace instrumentation
jeffhao725a9572012-11-13 18:20:12 -0800779} // namespace art