blob: 56aeefc2f57009dee96d67ef9993d0aa0b650fec [file] [log] [blame]
jeffhao725a9572012-11-13 18:20:12 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_RUNTIME_INSTRUMENTATION_H_
18#define ART_RUNTIME_INSTRUMENTATION_H_
jeffhao725a9572012-11-13 18:20:12 -080019
Ian Rogers576ca0c2014-06-06 15:58:22 -070020#include <stdint.h>
Ian Rogers576ca0c2014-06-06 15:58:22 -070021#include <list>
Mathieu Chartiere401d142015-04-22 13:56:20 -070022#include <unordered_set>
Ian Rogers576ca0c2014-06-06 15:58:22 -070023
Ian Rogersd582fa42014-11-05 23:46:43 -080024#include "arch/instruction_set.h"
Elliott Hughes76160052012-12-12 16:31:20 -080025#include "base/macros.h"
Ian Rogers719d1a32014-03-06 12:13:39 -080026#include "base/mutex.h"
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070027#include "gc_root.h"
Sebastien Hertz0462c4c2015-04-01 16:34:17 +020028#include "safe_map.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080029
jeffhao725a9572012-11-13 18:20:12 -080030namespace art {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080031namespace mirror {
Brian Carlstromea46f952013-07-30 01:26:50 -070032 class Class;
33 class Object;
34 class Throwable;
Ian Rogers62d6c772013-02-27 08:32:07 -080035} // namespace mirror
Mathieu Chartierc7853442015-03-27 14:35:38 -070036class ArtField;
Mathieu Chartiere401d142015-04-22 13:56:20 -070037class ArtMethod;
Ian Rogers62d6c772013-02-27 08:32:07 -080038union JValue;
jeffhao725a9572012-11-13 18:20:12 -080039class Thread;
40
Ian Rogers62d6c772013-02-27 08:32:07 -080041namespace instrumentation {
jeffhao725a9572012-11-13 18:20:12 -080042
Sebastien Hertzee1997a2013-09-19 14:47:09 +020043// Interpreter handler tables.
44enum InterpreterHandlerTable {
45 kMainHandlerTable = 0, // Main handler table: no suspend check, no instrumentation.
46 kAlternativeHandlerTable = 1, // Alternative handler table: suspend check and/or instrumentation
47 // enabled.
48 kNumHandlerTables
49};
50
Andreas Gampe40da2862015-02-27 12:49:04 -080051// Do we want to deoptimize for method entry and exit listeners or just try to intercept
52// invocations? Deoptimization forces all code to run in the interpreter and considerably hurts the
53// application's performance.
54static constexpr bool kDeoptimizeForAccurateMethodEntryExitListeners = true;
55
Ian Rogers62d6c772013-02-27 08:32:07 -080056// Instrumentation event listener API. Registered listeners will get the appropriate call back for
57// the events they are listening for. The call backs supply the thread, method and dex_pc the event
58// occurred upon. The thread may or may not be Thread::Current().
59struct InstrumentationListener {
60 InstrumentationListener() {}
61 virtual ~InstrumentationListener() {}
62
63 // Call-back for when a method is entered.
64 virtual void MethodEntered(Thread* thread, mirror::Object* this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -070065 ArtMethod* method,
Mathieu Chartier90443472015-07-16 20:32:27 -070066 uint32_t dex_pc) SHARED_REQUIRES(Locks::mutator_lock_) = 0;
Ian Rogers62d6c772013-02-27 08:32:07 -080067
68 // Call-back for when a method is exited.
Ian Rogers62d6c772013-02-27 08:32:07 -080069 virtual void MethodExited(Thread* thread, mirror::Object* this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -070070 ArtMethod* method, uint32_t dex_pc,
Ian Rogers62d6c772013-02-27 08:32:07 -080071 const JValue& return_value)
Mathieu Chartier90443472015-07-16 20:32:27 -070072 SHARED_REQUIRES(Locks::mutator_lock_) = 0;
Ian Rogers62d6c772013-02-27 08:32:07 -080073
74 // Call-back for when a method is popped due to an exception throw. A method will either cause a
75 // MethodExited call-back or a MethodUnwind call-back when its activation is removed.
Sebastien Hertz51db44a2013-11-19 10:00:29 +010076 virtual void MethodUnwind(Thread* thread, mirror::Object* this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -070077 ArtMethod* method, uint32_t dex_pc)
Mathieu Chartier90443472015-07-16 20:32:27 -070078 SHARED_REQUIRES(Locks::mutator_lock_) = 0;
Ian Rogers62d6c772013-02-27 08:32:07 -080079
80 // Call-back for when the dex pc moves in a method.
81 virtual void DexPcMoved(Thread* thread, mirror::Object* this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -070082 ArtMethod* method, uint32_t new_dex_pc)
Mathieu Chartier90443472015-07-16 20:32:27 -070083 SHARED_REQUIRES(Locks::mutator_lock_) = 0;
Ian Rogers62d6c772013-02-27 08:32:07 -080084
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +020085 // Call-back for when we read from a field.
Mathieu Chartiere401d142015-04-22 13:56:20 -070086 virtual void FieldRead(Thread* thread, mirror::Object* this_object, ArtMethod* method,
Mathieu Chartierc7853442015-03-27 14:35:38 -070087 uint32_t dex_pc, ArtField* field) = 0;
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +020088
89 // Call-back for when we write into a field.
Mathieu Chartiere401d142015-04-22 13:56:20 -070090 virtual void FieldWritten(Thread* thread, mirror::Object* this_object, ArtMethod* method,
Mathieu Chartierc7853442015-03-27 14:35:38 -070091 uint32_t dex_pc, ArtField* field, const JValue& field_value) = 0;
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +020092
Ian Rogers62d6c772013-02-27 08:32:07 -080093 // Call-back when an exception is caught.
Nicolas Geoffray14691c52015-03-05 10:40:17 +000094 virtual void ExceptionCaught(Thread* thread, mirror::Throwable* exception_object)
Mathieu Chartier90443472015-07-16 20:32:27 -070095 SHARED_REQUIRES(Locks::mutator_lock_) = 0;
Mathieu Chartiere5f13e52015-02-24 09:37:21 -080096
Nicolas Geoffray81f0f952016-01-20 16:25:19 +000097 // Call-back for when we execute a branch.
98 virtual void Branch(Thread* thread,
99 ArtMethod* method,
100 uint32_t dex_pc,
101 int32_t dex_pc_offset)
Mathieu Chartier90443472015-07-16 20:32:27 -0700102 SHARED_REQUIRES(Locks::mutator_lock_) = 0;
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100103
104 // Call-back for when we get an invokevirtual or an invokeinterface.
105 virtual void InvokeVirtualOrInterface(Thread* thread,
106 mirror::Object* this_object,
107 ArtMethod* caller,
108 uint32_t dex_pc,
109 ArtMethod* callee)
Mathieu Chartier3fdb3fe2016-01-14 10:24:28 -0800110 REQUIRES(Roles::uninterruptible_)
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100111 SHARED_REQUIRES(Locks::mutator_lock_) = 0;
jeffhao725a9572012-11-13 18:20:12 -0800112};
113
Ian Rogers62d6c772013-02-27 08:32:07 -0800114// Instrumentation is a catch-all for when extra information is required from the runtime. The
115// typical use for instrumentation is for profiling and debugging. Instrumentation may add stubs
116// to method entry and exit, it may also force execution to be switched to the interpreter and
117// trigger deoptimization.
jeffhao725a9572012-11-13 18:20:12 -0800118class Instrumentation {
119 public:
Ian Rogers62d6c772013-02-27 08:32:07 -0800120 enum InstrumentationEvent {
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800121 kMethodEntered = 0x1,
122 kMethodExited = 0x2,
123 kMethodUnwind = 0x4,
124 kDexPcMoved = 0x8,
125 kFieldRead = 0x10,
126 kFieldWritten = 0x20,
127 kExceptionCaught = 0x40,
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000128 kBranch = 0x80,
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100129 kInvokeVirtualOrInterface = 0x100,
Ian Rogers62d6c772013-02-27 08:32:07 -0800130 };
jeffhao725a9572012-11-13 18:20:12 -0800131
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200132 enum class InstrumentationLevel {
133 kInstrumentNothing, // execute without instrumentation
134 kInstrumentWithInstrumentationStubs, // execute with instrumentation entry/exit stubs
135 kInstrumentWithInterpreter // execute with interpreter
136 };
137
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700138 Instrumentation();
jeffhao725a9572012-11-13 18:20:12 -0800139
Ian Rogers62d6c772013-02-27 08:32:07 -0800140 // Add a listener to be notified of the masked together sent of instrumentation events. This
141 // suspend the runtime to install stubs. You are expected to hold the mutator lock as a proxy
142 // for saying you should have suspended all threads (installing stubs while threads are running
143 // will break).
144 void AddListener(InstrumentationListener* listener, uint32_t events)
Mathieu Chartier90443472015-07-16 20:32:27 -0700145 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_);
jeffhao725a9572012-11-13 18:20:12 -0800146
Ian Rogers62d6c772013-02-27 08:32:07 -0800147 // Removes a listener possibly removing instrumentation stubs.
148 void RemoveListener(InstrumentationListener* listener, uint32_t events)
Mathieu Chartier90443472015-07-16 20:32:27 -0700149 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_);
jeffhao725a9572012-11-13 18:20:12 -0800150
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100151 // Deoptimization.
Sebastien Hertza76a6d42014-03-20 16:40:17 +0100152 void EnableDeoptimization()
Mathieu Chartieraa516822015-10-02 15:53:37 -0700153 REQUIRES(Locks::mutator_lock_)
154 REQUIRES(!deoptimized_methods_lock_);
155 // Calls UndeoptimizeEverything which may visit class linker classes through ConfigureStubs.
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200156 void DisableDeoptimization(const char* key)
Mathieu Chartieraa516822015-10-02 15:53:37 -0700157 REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
158 REQUIRES(!deoptimized_methods_lock_);
159
Sebastien Hertza76a6d42014-03-20 16:40:17 +0100160 bool AreAllMethodsDeoptimized() const {
161 return interpreter_stubs_installed_;
162 }
Mathieu Chartier90443472015-07-16 20:32:27 -0700163 bool ShouldNotifyMethodEnterExitEvents() const SHARED_REQUIRES(Locks::mutator_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100164
165 // Executes everything with interpreter.
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200166 void DeoptimizeEverything(const char* key)
Mathieu Chartieraa516822015-10-02 15:53:37 -0700167 REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
168 REQUIRES(!Locks::thread_list_lock_,
169 !Locks::classlinker_classes_lock_,
Mathieu Chartier90443472015-07-16 20:32:27 -0700170 !deoptimized_methods_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100171
Mathieu Chartieraa516822015-10-02 15:53:37 -0700172 // Executes everything with compiled code (or interpreter if there is no code). May visit class
173 // linker classes through ConfigureStubs.
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200174 void UndeoptimizeEverything(const char* key)
Mathieu Chartieraa516822015-10-02 15:53:37 -0700175 REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
176 REQUIRES(!Locks::thread_list_lock_,
177 !Locks::classlinker_classes_lock_,
Mathieu Chartier90443472015-07-16 20:32:27 -0700178 !deoptimized_methods_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100179
180 // Deoptimize a method by forcing its execution with the interpreter. Nevertheless, a static
181 // method (except a class initializer) set to the resolution trampoline will be deoptimized only
182 // once its declaring class is initialized.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700183 void Deoptimize(ArtMethod* method)
Mathieu Chartier90443472015-07-16 20:32:27 -0700184 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !deoptimized_methods_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100185
186 // Undeoptimze the method by restoring its entrypoints. Nevertheless, a static method
187 // (except a class initializer) set to the resolution trampoline will be updated only once its
188 // declaring class is initialized.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700189 void Undeoptimize(ArtMethod* method)
Mathieu Chartier90443472015-07-16 20:32:27 -0700190 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !deoptimized_methods_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100191
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200192 // Indicates whether the method has been deoptimized so it is executed with the interpreter.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700193 bool IsDeoptimized(ArtMethod* method)
Mathieu Chartier90443472015-07-16 20:32:27 -0700194 REQUIRES(!deoptimized_methods_lock_) SHARED_REQUIRES(Locks::mutator_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100195
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200196 // Enable method tracing by installing instrumentation entry/exit stubs or interpreter.
197 void EnableMethodTracing(const char* key,
198 bool needs_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners)
Mathieu Chartieraa516822015-10-02 15:53:37 -0700199 REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
200 REQUIRES(!Locks::thread_list_lock_,
201 !Locks::classlinker_classes_lock_,
Mathieu Chartier90443472015-07-16 20:32:27 -0700202 !deoptimized_methods_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100203
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200204 // Disable method tracing by uninstalling instrumentation entry/exit stubs or interpreter.
205 void DisableMethodTracing(const char* key)
Mathieu Chartieraa516822015-10-02 15:53:37 -0700206 REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
207 REQUIRES(!Locks::thread_list_lock_,
208 !Locks::classlinker_classes_lock_,
Mathieu Chartier90443472015-07-16 20:32:27 -0700209 !deoptimized_methods_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100210
Sebastien Hertzed2be172014-08-19 15:33:43 +0200211 InterpreterHandlerTable GetInterpreterHandlerTable() const
Mathieu Chartier90443472015-07-16 20:32:27 -0700212 SHARED_REQUIRES(Locks::mutator_lock_) {
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200213 return interpreter_handler_table_;
214 }
215
Mathieu Chartier90443472015-07-16 20:32:27 -0700216 void InstrumentQuickAllocEntryPoints() REQUIRES(!Locks::instrument_entrypoints_lock_);
217 void UninstrumentQuickAllocEntryPoints() REQUIRES(!Locks::instrument_entrypoints_lock_);
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700218 void InstrumentQuickAllocEntryPointsLocked()
Mathieu Chartier90443472015-07-16 20:32:27 -0700219 REQUIRES(Locks::instrument_entrypoints_lock_, !Locks::thread_list_lock_,
220 !Locks::runtime_shutdown_lock_);
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700221 void UninstrumentQuickAllocEntryPointsLocked()
Mathieu Chartier90443472015-07-16 20:32:27 -0700222 REQUIRES(Locks::instrument_entrypoints_lock_, !Locks::thread_list_lock_,
223 !Locks::runtime_shutdown_lock_);
224 void ResetQuickAllocEntryPoints() REQUIRES(Locks::runtime_shutdown_lock_);
Ian Rogersfa824272013-11-05 16:12:57 -0800225
Ian Rogers62d6c772013-02-27 08:32:07 -0800226 // Update the code of a method respecting any installed stubs.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700227 void UpdateMethodsCode(ArtMethod* method, const void* quick_code)
Mathieu Chartier90443472015-07-16 20:32:27 -0700228 SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!deoptimized_methods_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800229
230 // Get the quick code for the given method. More efficient than asking the class linker as it
231 // will short-cut to GetCode if instrumentation and static method resolution stubs aren't
232 // installed.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700233 const void* GetQuickCodeFor(ArtMethod* method, size_t pointer_size) const
Mathieu Chartier90443472015-07-16 20:32:27 -0700234 SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800235
236 void ForceInterpretOnly() {
237 interpret_only_ = true;
238 forced_interpret_only_ = true;
239 }
240
Brian Carlstromea46f952013-07-30 01:26:50 -0700241 // Called by ArtMethod::Invoke to determine dispatch mechanism.
Ian Rogers62d6c772013-02-27 08:32:07 -0800242 bool InterpretOnly() const {
243 return interpret_only_;
244 }
245
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -0800246 bool IsForcedInterpretOnly() const {
247 return forced_interpret_only_;
248 }
249
Ian Rogers62d6c772013-02-27 08:32:07 -0800250 bool AreExitStubsInstalled() const {
251 return instrumentation_stubs_installed_;
252 }
253
Mathieu Chartier90443472015-07-16 20:32:27 -0700254 bool HasMethodEntryListeners() const SHARED_REQUIRES(Locks::mutator_lock_) {
Sebastien Hertz74109f62013-06-07 17:40:09 +0200255 return have_method_entry_listeners_;
256 }
257
Mathieu Chartier90443472015-07-16 20:32:27 -0700258 bool HasMethodExitListeners() const SHARED_REQUIRES(Locks::mutator_lock_) {
Sebastien Hertz74109f62013-06-07 17:40:09 +0200259 return have_method_exit_listeners_;
260 }
261
Mathieu Chartier90443472015-07-16 20:32:27 -0700262 bool HasMethodUnwindListeners() const SHARED_REQUIRES(Locks::mutator_lock_) {
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200263 return have_method_unwind_listeners_;
264 }
265
Mathieu Chartier90443472015-07-16 20:32:27 -0700266 bool HasDexPcListeners() const SHARED_REQUIRES(Locks::mutator_lock_) {
Sebastien Hertz74109f62013-06-07 17:40:09 +0200267 return have_dex_pc_listeners_;
268 }
269
Mathieu Chartier90443472015-07-16 20:32:27 -0700270 bool HasFieldReadListeners() const SHARED_REQUIRES(Locks::mutator_lock_) {
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200271 return have_field_read_listeners_;
272 }
273
Mathieu Chartier90443472015-07-16 20:32:27 -0700274 bool HasFieldWriteListeners() const SHARED_REQUIRES(Locks::mutator_lock_) {
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200275 return have_field_write_listeners_;
276 }
277
Mathieu Chartier90443472015-07-16 20:32:27 -0700278 bool HasExceptionCaughtListeners() const SHARED_REQUIRES(Locks::mutator_lock_) {
Sebastien Hertz9f102032014-05-23 08:59:42 +0200279 return have_exception_caught_listeners_;
280 }
281
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000282 bool HasBranchListeners() const SHARED_REQUIRES(Locks::mutator_lock_) {
283 return have_branch_listeners_;
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800284 }
285
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100286 bool HasInvokeVirtualOrInterfaceListeners() const SHARED_REQUIRES(Locks::mutator_lock_) {
287 return have_invoke_virtual_or_interface_listeners_;
288 }
289
Mathieu Chartier90443472015-07-16 20:32:27 -0700290 bool IsActive() const SHARED_REQUIRES(Locks::mutator_lock_) {
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200291 return have_dex_pc_listeners_ || have_method_entry_listeners_ || have_method_exit_listeners_ ||
Sebastien Hertz42cd43f2014-05-13 14:15:41 +0200292 have_field_read_listeners_ || have_field_write_listeners_ ||
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200293 have_exception_caught_listeners_ || have_method_unwind_listeners_;
294 }
295
Ian Rogers62d6c772013-02-27 08:32:07 -0800296 // Inform listeners that a method has been entered. A dex PC is provided as we may install
297 // listeners into executing code and get method enter events for methods already on the stack.
298 void MethodEnterEvent(Thread* thread, mirror::Object* this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700299 ArtMethod* method, uint32_t dex_pc) const
Mathieu Chartier90443472015-07-16 20:32:27 -0700300 SHARED_REQUIRES(Locks::mutator_lock_) {
Sebastien Hertz74109f62013-06-07 17:40:09 +0200301 if (UNLIKELY(HasMethodEntryListeners())) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800302 MethodEnterEventImpl(thread, this_object, method, dex_pc);
303 }
304 }
305
306 // Inform listeners that a method has been exited.
307 void MethodExitEvent(Thread* thread, mirror::Object* this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700308 ArtMethod* method, uint32_t dex_pc,
Ian Rogers62d6c772013-02-27 08:32:07 -0800309 const JValue& return_value) const
Mathieu Chartier90443472015-07-16 20:32:27 -0700310 SHARED_REQUIRES(Locks::mutator_lock_) {
Sebastien Hertz74109f62013-06-07 17:40:09 +0200311 if (UNLIKELY(HasMethodExitListeners())) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800312 MethodExitEventImpl(thread, this_object, method, dex_pc, return_value);
313 }
314 }
315
316 // Inform listeners that a method has been exited due to an exception.
317 void MethodUnwindEvent(Thread* thread, mirror::Object* this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700318 ArtMethod* method, uint32_t dex_pc) const
Mathieu Chartier90443472015-07-16 20:32:27 -0700319 SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800320
321 // Inform listeners that the dex pc has moved (only supported by the interpreter).
322 void DexPcMovedEvent(Thread* thread, mirror::Object* this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700323 ArtMethod* method, uint32_t dex_pc) const
Mathieu Chartier90443472015-07-16 20:32:27 -0700324 SHARED_REQUIRES(Locks::mutator_lock_) {
Sebastien Hertz74109f62013-06-07 17:40:09 +0200325 if (UNLIKELY(HasDexPcListeners())) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800326 DexPcMovedEventImpl(thread, this_object, method, dex_pc);
327 }
328 }
329
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000330 // Inform listeners that a branch has been taken (only supported by the interpreter).
331 void Branch(Thread* thread, ArtMethod* method, uint32_t dex_pc, int32_t offset) const
Mathieu Chartier90443472015-07-16 20:32:27 -0700332 SHARED_REQUIRES(Locks::mutator_lock_) {
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000333 if (UNLIKELY(HasBranchListeners())) {
334 BranchImpl(thread, method, dex_pc, offset);
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800335 }
336 }
337
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200338 // Inform listeners that we read a field (only supported by the interpreter).
339 void FieldReadEvent(Thread* thread, mirror::Object* this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700340 ArtMethod* method, uint32_t dex_pc,
Mathieu Chartierc7853442015-03-27 14:35:38 -0700341 ArtField* field) const
Mathieu Chartier90443472015-07-16 20:32:27 -0700342 SHARED_REQUIRES(Locks::mutator_lock_) {
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200343 if (UNLIKELY(HasFieldReadListeners())) {
344 FieldReadEventImpl(thread, this_object, method, dex_pc, field);
345 }
346 }
347
348 // Inform listeners that we write a field (only supported by the interpreter).
349 void FieldWriteEvent(Thread* thread, mirror::Object* this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700350 ArtMethod* method, uint32_t dex_pc,
Mathieu Chartierc7853442015-03-27 14:35:38 -0700351 ArtField* field, const JValue& field_value) const
Mathieu Chartier90443472015-07-16 20:32:27 -0700352 SHARED_REQUIRES(Locks::mutator_lock_) {
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200353 if (UNLIKELY(HasFieldWriteListeners())) {
354 FieldWriteEventImpl(thread, this_object, method, dex_pc, field, field_value);
355 }
356 }
357
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100358 void InvokeVirtualOrInterface(Thread* thread,
359 mirror::Object* this_object,
360 ArtMethod* caller,
361 uint32_t dex_pc,
362 ArtMethod* callee) const
363 SHARED_REQUIRES(Locks::mutator_lock_) {
364 if (UNLIKELY(HasInvokeVirtualOrInterfaceListeners())) {
365 InvokeVirtualOrInterfaceImpl(thread, this_object, caller, dex_pc, callee);
366 }
367 }
368
Ian Rogers62d6c772013-02-27 08:32:07 -0800369 // Inform listeners that an exception was caught.
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000370 void ExceptionCaughtEvent(Thread* thread, mirror::Throwable* exception_object) const
Mathieu Chartier90443472015-07-16 20:32:27 -0700371 SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800372
373 // Called when an instrumented method is entered. The intended link register (lr) is saved so
374 // that returning causes a branch to the method exit stub. Generates method enter events.
375 void PushInstrumentationStackFrame(Thread* self, mirror::Object* this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700376 ArtMethod* method, uintptr_t lr,
Jeff Hao9a916d32013-06-27 18:45:37 -0700377 bool interpreter_entry)
Mathieu Chartier90443472015-07-16 20:32:27 -0700378 SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800379
380 // Called when an instrumented method is exited. Removes the pushed instrumentation frame
381 // returning the intended link register. Generates method exit events.
Andreas Gamped58342c2014-06-05 14:18:08 -0700382 TwoWordReturn PopInstrumentationStackFrame(Thread* self, uintptr_t* return_pc,
383 uint64_t gpr_result, uint64_t fpr_result)
Mathieu Chartier90443472015-07-16 20:32:27 -0700384 SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!deoptimized_methods_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800385
386 // Pops an instrumentation frame from the current thread and generate an unwind event.
387 void PopMethodForUnwind(Thread* self, bool is_deoptimization) const
Mathieu Chartier90443472015-07-16 20:32:27 -0700388 SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800389
390 // Call back for configure stubs.
Mathieu Chartier90443472015-07-16 20:32:27 -0700391 void InstallStubsForClass(mirror::Class* klass) SHARED_REQUIRES(Locks::mutator_lock_)
392 REQUIRES(!deoptimized_methods_lock_);
jeffhao725a9572012-11-13 18:20:12 -0800393
Mathieu Chartiere401d142015-04-22 13:56:20 -0700394 void InstallStubsForMethod(ArtMethod* method)
Mathieu Chartier90443472015-07-16 20:32:27 -0700395 SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(!deoptimized_methods_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100396
Mingyao Yang99170c62015-07-06 11:10:37 -0700397 // Install instrumentation exit stub on every method of the stack of the given thread.
398 // This is used by the debugger to cause a deoptimization of the thread's stack after updating
399 // local variable(s).
400 void InstrumentThreadStack(Thread* thread)
401 SHARED_REQUIRES(Locks::mutator_lock_)
402 REQUIRES(!Locks::thread_list_lock_);
403
Sebastien Hertzb2feaaf2015-10-12 13:40:10 +0000404 static size_t ComputeFrameId(Thread* self,
405 size_t frame_depth,
406 size_t inlined_frames_before_frame)
407 SHARED_REQUIRES(Locks::mutator_lock_);
408
jeffhao725a9572012-11-13 18:20:12 -0800409 private:
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200410 InstrumentationLevel GetCurrentInstrumentationLevel() const;
411
Ian Rogers62d6c772013-02-27 08:32:07 -0800412 // Does the job of installing or removing instrumentation code within methods.
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200413 // In order to support multiple clients using instrumentation at the same time,
414 // the caller must pass a unique key (a string) identifying it so we remind which
415 // instrumentation level it needs. Therefore the current instrumentation level
416 // becomes the highest instrumentation level required by a client.
417 void ConfigureStubs(const char* key, InstrumentationLevel desired_instrumentation_level)
Mathieu Chartieraa516822015-10-02 15:53:37 -0700418 REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
419 REQUIRES(!deoptimized_methods_lock_,
420 !Locks::thread_list_lock_,
Mathieu Chartier90443472015-07-16 20:32:27 -0700421 !Locks::classlinker_classes_lock_);
jeffhao725a9572012-11-13 18:20:12 -0800422
Mathieu Chartier90443472015-07-16 20:32:27 -0700423 void UpdateInterpreterHandlerTable() REQUIRES(Locks::mutator_lock_) {
buzbee1452bee2015-03-06 14:43:04 -0800424 /*
425 * TUNING: Dalvik's mterp stashes the actual current handler table base in a
426 * tls field. For Arm, this enables all suspend, debug & tracing checks to be
427 * collapsed into a single conditionally-executed ldw instruction.
428 * Move to Dalvik-style handler-table management for both the goto interpreter and
429 * mterp.
430 */
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200431 interpreter_handler_table_ = IsActive() ? kAlternativeHandlerTable : kMainHandlerTable;
432 }
433
Mathieu Chartier661974a2014-01-09 11:23:53 -0800434 // No thread safety analysis to get around SetQuickAllocEntryPointsInstrumented requiring
435 // exclusive access to mutator lock which you can't get if the runtime isn't started.
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700436 void SetEntrypointsInstrumented(bool instrumented) NO_THREAD_SAFETY_ANALYSIS;
Mathieu Chartier661974a2014-01-09 11:23:53 -0800437
Ian Rogers62d6c772013-02-27 08:32:07 -0800438 void MethodEnterEventImpl(Thread* thread, mirror::Object* this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700439 ArtMethod* method, uint32_t dex_pc) const
Mathieu Chartier90443472015-07-16 20:32:27 -0700440 SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800441 void MethodExitEventImpl(Thread* thread, mirror::Object* this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700442 ArtMethod* method,
Ian Rogers62d6c772013-02-27 08:32:07 -0800443 uint32_t dex_pc, const JValue& return_value) const
Mathieu Chartier90443472015-07-16 20:32:27 -0700444 SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800445 void DexPcMovedEventImpl(Thread* thread, mirror::Object* this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700446 ArtMethod* method, uint32_t dex_pc) const
Mathieu Chartier90443472015-07-16 20:32:27 -0700447 SHARED_REQUIRES(Locks::mutator_lock_);
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000448 void BranchImpl(Thread* thread, ArtMethod* method, uint32_t dex_pc, int32_t offset) const
Mathieu Chartier90443472015-07-16 20:32:27 -0700449 SHARED_REQUIRES(Locks::mutator_lock_);
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100450 void InvokeVirtualOrInterfaceImpl(Thread* thread,
451 mirror::Object* this_object,
452 ArtMethod* caller,
453 uint32_t dex_pc,
454 ArtMethod* callee) const
455 SHARED_REQUIRES(Locks::mutator_lock_);
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200456 void FieldReadEventImpl(Thread* thread, mirror::Object* this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700457 ArtMethod* method, uint32_t dex_pc,
Mathieu Chartierc7853442015-03-27 14:35:38 -0700458 ArtField* field) const
Mathieu Chartier90443472015-07-16 20:32:27 -0700459 SHARED_REQUIRES(Locks::mutator_lock_);
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200460 void FieldWriteEventImpl(Thread* thread, mirror::Object* this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700461 ArtMethod* method, uint32_t dex_pc,
Mathieu Chartierc7853442015-03-27 14:35:38 -0700462 ArtField* field, const JValue& field_value) const
Mathieu Chartier90443472015-07-16 20:32:27 -0700463 SHARED_REQUIRES(Locks::mutator_lock_);
jeffhao725a9572012-11-13 18:20:12 -0800464
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700465 // Read barrier-aware utility functions for accessing deoptimized_methods_
Mathieu Chartiere401d142015-04-22 13:56:20 -0700466 bool AddDeoptimizedMethod(ArtMethod* method)
Mathieu Chartier90443472015-07-16 20:32:27 -0700467 SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(deoptimized_methods_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700468 bool IsDeoptimizedMethod(ArtMethod* method)
Mathieu Chartier90443472015-07-16 20:32:27 -0700469 SHARED_REQUIRES(Locks::mutator_lock_, deoptimized_methods_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700470 bool RemoveDeoptimizedMethod(ArtMethod* method)
Mathieu Chartier90443472015-07-16 20:32:27 -0700471 SHARED_REQUIRES(Locks::mutator_lock_) REQUIRES(deoptimized_methods_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700472 ArtMethod* BeginDeoptimizedMethod()
Mathieu Chartier90443472015-07-16 20:32:27 -0700473 SHARED_REQUIRES(Locks::mutator_lock_, deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700474 bool IsDeoptimizedMethodsEmpty() const
Mathieu Chartier90443472015-07-16 20:32:27 -0700475 SHARED_REQUIRES(Locks::mutator_lock_, deoptimized_methods_lock_);
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700476
Brian Carlstromea46f952013-07-30 01:26:50 -0700477 // Have we hijacked ArtMethod::code_ so that it calls instrumentation/interpreter code?
Ian Rogers62d6c772013-02-27 08:32:07 -0800478 bool instrumentation_stubs_installed_;
479
Brian Carlstromea46f952013-07-30 01:26:50 -0700480 // Have we hijacked ArtMethod::code_ to reference the enter/exit stubs?
Ian Rogers62d6c772013-02-27 08:32:07 -0800481 bool entry_exit_stubs_installed_;
482
Brian Carlstromea46f952013-07-30 01:26:50 -0700483 // Have we hijacked ArtMethod::code_ to reference the enter interpreter stub?
Ian Rogers62d6c772013-02-27 08:32:07 -0800484 bool interpreter_stubs_installed_;
485
486 // Do we need the fidelity of events that we only get from running within the interpreter?
487 bool interpret_only_;
488
489 // Did the runtime request we only run in the interpreter? ie -Xint mode.
490 bool forced_interpret_only_;
491
492 // Do we have any listeners for method entry events? Short-cut to avoid taking the
493 // instrumentation_lock_.
Sebastien Hertzed2be172014-08-19 15:33:43 +0200494 bool have_method_entry_listeners_ GUARDED_BY(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800495
496 // Do we have any listeners for method exit events? Short-cut to avoid taking the
497 // instrumentation_lock_.
Sebastien Hertzed2be172014-08-19 15:33:43 +0200498 bool have_method_exit_listeners_ GUARDED_BY(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800499
500 // Do we have any listeners for method unwind events? Short-cut to avoid taking the
501 // instrumentation_lock_.
Sebastien Hertzed2be172014-08-19 15:33:43 +0200502 bool have_method_unwind_listeners_ GUARDED_BY(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800503
504 // Do we have any listeners for dex move events? Short-cut to avoid taking the
505 // instrumentation_lock_.
Sebastien Hertzed2be172014-08-19 15:33:43 +0200506 bool have_dex_pc_listeners_ GUARDED_BY(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800507
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200508 // Do we have any listeners for field read events? Short-cut to avoid taking the
509 // instrumentation_lock_.
Sebastien Hertzed2be172014-08-19 15:33:43 +0200510 bool have_field_read_listeners_ GUARDED_BY(Locks::mutator_lock_);
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200511
512 // Do we have any listeners for field write events? Short-cut to avoid taking the
513 // instrumentation_lock_.
Sebastien Hertzed2be172014-08-19 15:33:43 +0200514 bool have_field_write_listeners_ GUARDED_BY(Locks::mutator_lock_);
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200515
Ian Rogers62d6c772013-02-27 08:32:07 -0800516 // Do we have any exception caught listeners? Short-cut to avoid taking the instrumentation_lock_.
Sebastien Hertzed2be172014-08-19 15:33:43 +0200517 bool have_exception_caught_listeners_ GUARDED_BY(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800518
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000519 // Do we have any branch listeners? Short-cut to avoid taking the instrumentation_lock_.
520 bool have_branch_listeners_ GUARDED_BY(Locks::mutator_lock_);
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800521
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100522 // Do we have any invoke listeners? Short-cut to avoid taking the instrumentation_lock_.
523 bool have_invoke_virtual_or_interface_listeners_ GUARDED_BY(Locks::mutator_lock_);
524
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200525 // Contains the instrumentation level required by each client of the instrumentation identified
526 // by a string key.
527 typedef SafeMap<const char*, InstrumentationLevel> InstrumentationLevelTable;
528 InstrumentationLevelTable requested_instrumentation_levels_ GUARDED_BY(Locks::mutator_lock_);
529
Ian Rogers62d6c772013-02-27 08:32:07 -0800530 // The event listeners, written to with the mutator_lock_ exclusively held.
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000531 // Mutators must be able to iterate over these lists concurrently, that is, with listeners being
532 // added or removed while iterating. The modifying thread holds exclusive lock,
533 // so other threads cannot iterate (i.e. read the data of the list) at the same time but they
534 // do keep iterators that need to remain valid. This is the reason these listeners are std::list
535 // and not for example std::vector: the existing storage for a std::list does not move.
536 // Note that mutators cannot make a copy of these lists before iterating, as the instrumentation
537 // listeners can also be deleted concurrently.
538 // As a result, these lists are never trimmed. That's acceptable given the low number of
539 // listeners we have.
Ian Rogers62d6c772013-02-27 08:32:07 -0800540 std::list<InstrumentationListener*> method_entry_listeners_ GUARDED_BY(Locks::mutator_lock_);
541 std::list<InstrumentationListener*> method_exit_listeners_ GUARDED_BY(Locks::mutator_lock_);
542 std::list<InstrumentationListener*> method_unwind_listeners_ GUARDED_BY(Locks::mutator_lock_);
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000543 std::list<InstrumentationListener*> branch_listeners_ GUARDED_BY(Locks::mutator_lock_);
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100544 std::list<InstrumentationListener*> invoke_virtual_or_interface_listeners_
545 GUARDED_BY(Locks::mutator_lock_);
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000546 std::list<InstrumentationListener*> dex_pc_listeners_ GUARDED_BY(Locks::mutator_lock_);
547 std::list<InstrumentationListener*> field_read_listeners_ GUARDED_BY(Locks::mutator_lock_);
548 std::list<InstrumentationListener*> field_write_listeners_ GUARDED_BY(Locks::mutator_lock_);
549 std::list<InstrumentationListener*> exception_caught_listeners_ GUARDED_BY(Locks::mutator_lock_);
jeffhao725a9572012-11-13 18:20:12 -0800550
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100551 // The set of methods being deoptimized (by the debugger) which must be executed with interpreter
552 // only.
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700553 mutable ReaderWriterMutex deoptimized_methods_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700554 std::unordered_set<ArtMethod*> deoptimized_methods_ GUARDED_BY(deoptimized_methods_lock_);
Sebastien Hertz11d40c22014-02-19 18:00:17 +0100555 bool deoptimization_enabled_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100556
Ian Rogersfa824272013-11-05 16:12:57 -0800557 // Current interpreter handler table. This is updated each time the thread state flags are
558 // modified.
Sebastien Hertzed2be172014-08-19 15:33:43 +0200559 InterpreterHandlerTable interpreter_handler_table_ GUARDED_BY(Locks::mutator_lock_);
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200560
Ian Rogersfa824272013-11-05 16:12:57 -0800561 // Greater than 0 if quick alloc entry points instrumented.
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700562 size_t quick_alloc_entry_points_instrumentation_counter_
563 GUARDED_BY(Locks::instrument_entrypoints_lock_);
Ian Rogersfa824272013-11-05 16:12:57 -0800564
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200565 friend class InstrumentationTest; // For GetCurrentInstrumentationLevel and ConfigureStubs.
566
jeffhao725a9572012-11-13 18:20:12 -0800567 DISALLOW_COPY_AND_ASSIGN(Instrumentation);
568};
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700569std::ostream& operator<<(std::ostream& os, const Instrumentation::InstrumentationEvent& rhs);
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200570std::ostream& operator<<(std::ostream& os, const Instrumentation::InstrumentationLevel& rhs);
jeffhao725a9572012-11-13 18:20:12 -0800571
Ian Rogers62d6c772013-02-27 08:32:07 -0800572// An element in the instrumentation side stack maintained in art::Thread.
573struct InstrumentationStackFrame {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700574 InstrumentationStackFrame(mirror::Object* this_object, ArtMethod* method,
Jeff Hao9a916d32013-06-27 18:45:37 -0700575 uintptr_t return_pc, size_t frame_id, bool interpreter_entry)
576 : this_object_(this_object), method_(method), return_pc_(return_pc), frame_id_(frame_id),
577 interpreter_entry_(interpreter_entry) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800578 }
579
Mathieu Chartier90443472015-07-16 20:32:27 -0700580 std::string Dump() const SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800581
582 mirror::Object* this_object_;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700583 ArtMethod* method_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100584 uintptr_t return_pc_;
585 size_t frame_id_;
586 bool interpreter_entry_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800587};
588
589} // namespace instrumentation
jeffhao725a9572012-11-13 18:20:12 -0800590} // namespace art
591
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700592#endif // ART_RUNTIME_INSTRUMENTATION_H_