blob: c811935e9dce06bdf70ad71d106b6a15e40b5c38 [file] [log] [blame]
jeffhao725a9572012-11-13 18:20:12 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_RUNTIME_INSTRUMENTATION_H_
18#define ART_RUNTIME_INSTRUMENTATION_H_
jeffhao725a9572012-11-13 18:20:12 -080019
Mythri Alle72be14e2021-11-01 11:48:06 +000020#include <stdint.h>
Mythri Alle5097f832021-11-02 14:52:30 +000021
22#include <functional>
Ian Rogers576ca0c2014-06-06 15:58:22 -070023#include <list>
Andreas Gampe7e56a072018-11-29 10:40:06 -080024#include <memory>
Mythri Alle72be14e2021-11-01 11:48:06 +000025#include <optional>
Mythri Alle5097f832021-11-02 14:52:30 +000026#include <unordered_set>
Ian Rogers576ca0c2014-06-06 15:58:22 -070027
Ian Rogersd582fa42014-11-05 23:46:43 -080028#include "arch/instruction_set.h"
Andreas Gampe542451c2016-07-26 09:02:02 -070029#include "base/enums.h"
Andreas Gampe7e56a072018-11-29 10:40:06 -080030#include "base/locks.h"
Elliott Hughes76160052012-12-12 16:31:20 -080031#include "base/macros.h"
David Sehr67bf42e2018-02-26 16:43:04 -080032#include "base/safe_map.h"
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070033#include "gc_root.h"
Mythri Alle5097f832021-11-02 14:52:30 +000034#include "offsets.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080035
jeffhao725a9572012-11-13 18:20:12 -080036namespace art {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080037namespace mirror {
Igor Murashkin2ffb7032017-11-08 13:35:21 -080038class Class;
39class Object;
40class Throwable;
Ian Rogers62d6c772013-02-27 08:32:07 -080041} // namespace mirror
Mathieu Chartierc7853442015-03-27 14:35:38 -070042class ArtField;
Mathieu Chartiere401d142015-04-22 13:56:20 -070043class ArtMethod;
Alex Lightd7661582017-05-01 13:48:16 -070044template <typename T> class Handle;
Alex Light2c8206f2018-06-08 14:51:09 -070045template <typename T> class MutableHandle;
Mythri Alle5097f832021-11-02 14:52:30 +000046struct NthCallerVisitor;
Ian Rogers62d6c772013-02-27 08:32:07 -080047union JValue;
Andreas Gampe7e56a072018-11-29 10:40:06 -080048class SHARED_LOCKABLE ReaderWriterMutex;
Alex Lighte814f9d2017-07-31 16:14:39 -070049class ShadowFrame;
jeffhao725a9572012-11-13 18:20:12 -080050class Thread;
Mingyao Yang2ee17902017-08-30 11:37:08 -070051enum class DeoptimizationMethodType;
jeffhao725a9572012-11-13 18:20:12 -080052
Ian Rogers62d6c772013-02-27 08:32:07 -080053namespace instrumentation {
jeffhao725a9572012-11-13 18:20:12 -080054
Sebastien Hertzee1997a2013-09-19 14:47:09 +020055
Andreas Gampe40da2862015-02-27 12:49:04 -080056// Do we want to deoptimize for method entry and exit listeners or just try to intercept
57// invocations? Deoptimization forces all code to run in the interpreter and considerably hurts the
58// application's performance.
59static constexpr bool kDeoptimizeForAccurateMethodEntryExitListeners = true;
60
Alex Lightb7c640d2019-03-20 15:52:13 -070061// an optional frame is either Some(const ShadowFrame& current_frame) or None depending on if the
62// method being exited has a shadow-frame associed with the current stack frame. In cases where
63// there is no shadow-frame associated with this stack frame this will be None.
64using OptionalFrame = std::optional<std::reference_wrapper<const ShadowFrame>>;
65
Ian Rogers62d6c772013-02-27 08:32:07 -080066// Instrumentation event listener API. Registered listeners will get the appropriate call back for
67// the events they are listening for. The call backs supply the thread, method and dex_pc the event
68// occurred upon. The thread may or may not be Thread::Current().
69struct InstrumentationListener {
70 InstrumentationListener() {}
71 virtual ~InstrumentationListener() {}
72
73 // Call-back for when a method is entered.
Mythri Alle9cc65df2021-09-21 15:09:58 +000074 virtual void MethodEntered(Thread* thread, ArtMethod* method)
75 REQUIRES_SHARED(Locks::mutator_lock_) = 0;
Ian Rogers62d6c772013-02-27 08:32:07 -080076
Alex Lightd7661582017-05-01 13:48:16 -070077 virtual void MethodExited(Thread* thread,
Alex Lightd7661582017-05-01 13:48:16 -070078 ArtMethod* method,
Alex Lightb7c640d2019-03-20 15:52:13 -070079 OptionalFrame frame,
80 MutableHandle<mirror::Object>& return_value)
Alex Lightd7661582017-05-01 13:48:16 -070081 REQUIRES_SHARED(Locks::mutator_lock_);
82
83 // Call-back for when a method is exited. The implementor should either handler-ize the return
84 // value (if appropriate) or use the alternate MethodExited callback instead if they need to
85 // go through a suspend point.
86 virtual void MethodExited(Thread* thread,
Alex Lightd7661582017-05-01 13:48:16 -070087 ArtMethod* method,
Alex Lightb7c640d2019-03-20 15:52:13 -070088 OptionalFrame frame,
89 JValue& return_value)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -070090 REQUIRES_SHARED(Locks::mutator_lock_) = 0;
Ian Rogers62d6c772013-02-27 08:32:07 -080091
92 // Call-back for when a method is popped due to an exception throw. A method will either cause a
93 // MethodExited call-back or a MethodUnwind call-back when its activation is removed.
Alex Lightd7661582017-05-01 13:48:16 -070094 virtual void MethodUnwind(Thread* thread,
95 Handle<mirror::Object> this_object,
96 ArtMethod* method,
97 uint32_t dex_pc)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -070098 REQUIRES_SHARED(Locks::mutator_lock_) = 0;
Ian Rogers62d6c772013-02-27 08:32:07 -080099
100 // Call-back for when the dex pc moves in a method.
Alex Lightd7661582017-05-01 13:48:16 -0700101 virtual void DexPcMoved(Thread* thread,
102 Handle<mirror::Object> this_object,
103 ArtMethod* method,
104 uint32_t new_dex_pc)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700105 REQUIRES_SHARED(Locks::mutator_lock_) = 0;
Ian Rogers62d6c772013-02-27 08:32:07 -0800106
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200107 // Call-back for when we read from a field.
Alex Lightd7661582017-05-01 13:48:16 -0700108 virtual void FieldRead(Thread* thread,
109 Handle<mirror::Object> this_object,
110 ArtMethod* method,
111 uint32_t dex_pc,
112 ArtField* field) = 0;
113
114 virtual void FieldWritten(Thread* thread,
115 Handle<mirror::Object> this_object,
116 ArtMethod* method,
117 uint32_t dex_pc,
118 ArtField* field,
119 Handle<mirror::Object> field_value)
120 REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200121
122 // Call-back for when we write into a field.
Alex Lightd7661582017-05-01 13:48:16 -0700123 virtual void FieldWritten(Thread* thread,
124 Handle<mirror::Object> this_object,
125 ArtMethod* method,
126 uint32_t dex_pc,
127 ArtField* field,
128 const JValue& field_value)
129 REQUIRES_SHARED(Locks::mutator_lock_) = 0;
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200130
Alex Light6e1607e2017-08-23 10:06:18 -0700131 // Call-back when an exception is thrown.
132 virtual void ExceptionThrown(Thread* thread,
Alex Lightd7661582017-05-01 13:48:16 -0700133 Handle<mirror::Throwable> exception_object)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700134 REQUIRES_SHARED(Locks::mutator_lock_) = 0;
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800135
Alex Light9fb1ab12017-09-05 09:32:49 -0700136 // Call-back when an exception is caught/handled by java code.
137 virtual void ExceptionHandled(Thread* thread, Handle<mirror::Throwable> exception_object)
138 REQUIRES_SHARED(Locks::mutator_lock_) = 0;
139
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000140 // Call-back for when we execute a branch.
141 virtual void Branch(Thread* thread,
142 ArtMethod* method,
143 uint32_t dex_pc,
144 int32_t dex_pc_offset)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700145 REQUIRES_SHARED(Locks::mutator_lock_) = 0;
Nicolas Geoffray5550ca82015-08-21 18:38:30 +0100146
Alex Lighte814f9d2017-07-31 16:14:39 -0700147 // Call-back when a shadow_frame with the needs_notify_pop_ boolean set is popped off the stack by
148 // either return or exceptions. Normally instrumentation listeners should ensure that there are
149 // shadow-frames by deoptimizing stacks.
150 virtual void WatchedFramePop(Thread* thread ATTRIBUTE_UNUSED,
151 const ShadowFrame& frame ATTRIBUTE_UNUSED)
Alex Light05f47742017-09-14 00:34:44 +0000152 REQUIRES_SHARED(Locks::mutator_lock_) = 0;
jeffhao725a9572012-11-13 18:20:12 -0800153};
154
Alex Light2c8206f2018-06-08 14:51:09 -0700155class Instrumentation;
156// A helper to send instrumentation events while popping the stack in a safe way.
157class InstrumentationStackPopper {
158 public:
159 explicit InstrumentationStackPopper(Thread* self);
160 ~InstrumentationStackPopper() REQUIRES_SHARED(Locks::mutator_lock_);
161
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000162 // Increase the number of frames being popped up to `stack_pointer`. Return true if the
163 // frames were popped without any exceptions, false otherwise. The exception that caused
164 // the pop is 'exception'.
165 bool PopFramesTo(uintptr_t stack_pointer, /*in-out*/MutableHandle<mirror::Throwable>& exception)
Alex Light2c8206f2018-06-08 14:51:09 -0700166 REQUIRES_SHARED(Locks::mutator_lock_);
167
168 private:
169 Thread* self_;
170 Instrumentation* instrumentation_;
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000171 // The stack pointer limit for frames to pop.
172 uintptr_t pop_until_;
Alex Light2c8206f2018-06-08 14:51:09 -0700173};
174
Ian Rogers62d6c772013-02-27 08:32:07 -0800175// Instrumentation is a catch-all for when extra information is required from the runtime. The
176// typical use for instrumentation is for profiling and debugging. Instrumentation may add stubs
177// to method entry and exit, it may also force execution to be switched to the interpreter and
178// trigger deoptimization.
jeffhao725a9572012-11-13 18:20:12 -0800179class Instrumentation {
180 public:
Ian Rogers62d6c772013-02-27 08:32:07 -0800181 enum InstrumentationEvent {
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800182 kMethodEntered = 0x1,
183 kMethodExited = 0x2,
184 kMethodUnwind = 0x4,
185 kDexPcMoved = 0x8,
186 kFieldRead = 0x10,
187 kFieldWritten = 0x20,
Alex Light6e1607e2017-08-23 10:06:18 -0700188 kExceptionThrown = 0x40,
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000189 kBranch = 0x80,
Alex Lighte814f9d2017-07-31 16:14:39 -0700190 kWatchedFramePop = 0x200,
Alex Light9fb1ab12017-09-05 09:32:49 -0700191 kExceptionHandled = 0x400,
Ian Rogers62d6c772013-02-27 08:32:07 -0800192 };
jeffhao725a9572012-11-13 18:20:12 -0800193
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200194 enum class InstrumentationLevel {
195 kInstrumentNothing, // execute without instrumentation
196 kInstrumentWithInstrumentationStubs, // execute with instrumentation entry/exit stubs
197 kInstrumentWithInterpreter // execute with interpreter
198 };
199
Mathieu Chartier3b05e9b2014-03-25 09:29:43 -0700200 Instrumentation();
jeffhao725a9572012-11-13 18:20:12 -0800201
Mythri Alle5097f832021-11-02 14:52:30 +0000202 static constexpr MemberOffset NeedsEntryExitHooksOffset() {
Mythri Alle9575c122021-11-12 12:04:41 +0000203 // Assert that instrumentation_stubs_installed_ is 8bits wide. If the size changes
204 // update the compare instructions in the code generator when generating checks for
205 // MethodEntryExitHooks.
206 static_assert(sizeof(instrumentation_stubs_installed_) == 1,
207 "instrumentation_stubs_installed_ isn't expected size");
Mythri Alle5097f832021-11-02 14:52:30 +0000208 return MemberOffset(OFFSETOF_MEMBER(Instrumentation, instrumentation_stubs_installed_));
209 }
210
Ian Rogers62d6c772013-02-27 08:32:07 -0800211 // Add a listener to be notified of the masked together sent of instrumentation events. This
212 // suspend the runtime to install stubs. You are expected to hold the mutator lock as a proxy
213 // for saying you should have suspended all threads (installing stubs while threads are running
214 // will break).
215 void AddListener(InstrumentationListener* listener, uint32_t events)
Mathieu Chartier90443472015-07-16 20:32:27 -0700216 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_);
jeffhao725a9572012-11-13 18:20:12 -0800217
Ian Rogers62d6c772013-02-27 08:32:07 -0800218 // Removes a listener possibly removing instrumentation stubs.
219 void RemoveListener(InstrumentationListener* listener, uint32_t events)
Mathieu Chartier90443472015-07-16 20:32:27 -0700220 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !Locks::classlinker_classes_lock_);
jeffhao725a9572012-11-13 18:20:12 -0800221
Mathieu Chartieraa516822015-10-02 15:53:37 -0700222 // Calls UndeoptimizeEverything which may visit class linker classes through ConfigureStubs.
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200223 void DisableDeoptimization(const char* key)
Mathieu Chartieraa516822015-10-02 15:53:37 -0700224 REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
Andreas Gampe7e56a072018-11-29 10:40:06 -0800225 REQUIRES(!GetDeoptimizedMethodsLock());
Mathieu Chartieraa516822015-10-02 15:53:37 -0700226
Sebastien Hertza76a6d42014-03-20 16:40:17 +0100227 bool AreAllMethodsDeoptimized() const {
Mythri Alle9575c122021-11-12 12:04:41 +0000228 return InterpreterStubsInstalled();
Sebastien Hertza76a6d42014-03-20 16:40:17 +0100229 }
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700230 bool ShouldNotifyMethodEnterExitEvents() const REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100231
232 // Executes everything with interpreter.
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200233 void DeoptimizeEverything(const char* key)
Mathieu Chartieraa516822015-10-02 15:53:37 -0700234 REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
235 REQUIRES(!Locks::thread_list_lock_,
236 !Locks::classlinker_classes_lock_,
Andreas Gampe7e56a072018-11-29 10:40:06 -0800237 !GetDeoptimizedMethodsLock());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100238
Mathieu Chartieraa516822015-10-02 15:53:37 -0700239 // Executes everything with compiled code (or interpreter if there is no code). May visit class
240 // linker classes through ConfigureStubs.
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200241 void UndeoptimizeEverything(const char* key)
Mathieu Chartieraa516822015-10-02 15:53:37 -0700242 REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
243 REQUIRES(!Locks::thread_list_lock_,
244 !Locks::classlinker_classes_lock_,
Andreas Gampe7e56a072018-11-29 10:40:06 -0800245 !GetDeoptimizedMethodsLock());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100246
247 // Deoptimize a method by forcing its execution with the interpreter. Nevertheless, a static
248 // method (except a class initializer) set to the resolution trampoline will be deoptimized only
249 // once its declaring class is initialized.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700250 void Deoptimize(ArtMethod* method)
Andreas Gampe7e56a072018-11-29 10:40:06 -0800251 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !GetDeoptimizedMethodsLock());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100252
253 // Undeoptimze the method by restoring its entrypoints. Nevertheless, a static method
254 // (except a class initializer) set to the resolution trampoline will be updated only once its
255 // declaring class is initialized.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700256 void Undeoptimize(ArtMethod* method)
Andreas Gampe7e56a072018-11-29 10:40:06 -0800257 REQUIRES(Locks::mutator_lock_, !Locks::thread_list_lock_, !GetDeoptimizedMethodsLock());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100258
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200259 // Indicates whether the method has been deoptimized so it is executed with the interpreter.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700260 bool IsDeoptimized(ArtMethod* method)
Andreas Gampe7e56a072018-11-29 10:40:06 -0800261 REQUIRES(!GetDeoptimizedMethodsLock()) REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100262
Mythri Alleab474882022-01-17 16:43:04 +0000263 // Indicates if any method needs to be deoptimized. This is used to avoid walking the stack to
264 // determine if a deoptimization is required.
265 bool IsDeoptimizedMethodsEmpty() const
266 REQUIRES(!GetDeoptimizedMethodsLock()) REQUIRES_SHARED(Locks::mutator_lock_);
267
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200268 // Enable method tracing by installing instrumentation entry/exit stubs or interpreter.
269 void EnableMethodTracing(const char* key,
270 bool needs_interpreter = kDeoptimizeForAccurateMethodEntryExitListeners)
Mathieu Chartieraa516822015-10-02 15:53:37 -0700271 REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
272 REQUIRES(!Locks::thread_list_lock_,
273 !Locks::classlinker_classes_lock_,
Andreas Gampe7e56a072018-11-29 10:40:06 -0800274 !GetDeoptimizedMethodsLock());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100275
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200276 // Disable method tracing by uninstalling instrumentation entry/exit stubs or interpreter.
277 void DisableMethodTracing(const char* key)
Mathieu Chartieraa516822015-10-02 15:53:37 -0700278 REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
279 REQUIRES(!Locks::thread_list_lock_,
280 !Locks::classlinker_classes_lock_,
Andreas Gampe7e56a072018-11-29 10:40:06 -0800281 !GetDeoptimizedMethodsLock());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100282
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200283
Mathieu Chartier90443472015-07-16 20:32:27 -0700284 void InstrumentQuickAllocEntryPoints() REQUIRES(!Locks::instrument_entrypoints_lock_);
285 void UninstrumentQuickAllocEntryPoints() REQUIRES(!Locks::instrument_entrypoints_lock_);
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700286 void InstrumentQuickAllocEntryPointsLocked()
Mathieu Chartier90443472015-07-16 20:32:27 -0700287 REQUIRES(Locks::instrument_entrypoints_lock_, !Locks::thread_list_lock_,
288 !Locks::runtime_shutdown_lock_);
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700289 void UninstrumentQuickAllocEntryPointsLocked()
Mathieu Chartier90443472015-07-16 20:32:27 -0700290 REQUIRES(Locks::instrument_entrypoints_lock_, !Locks::thread_list_lock_,
291 !Locks::runtime_shutdown_lock_);
292 void ResetQuickAllocEntryPoints() REQUIRES(Locks::runtime_shutdown_lock_);
Ian Rogersfa824272013-11-05 16:12:57 -0800293
Nicolas Geoffray854af032021-12-21 08:32:42 +0000294 // Returns a string representation of the given entry point.
295 static std::string EntryPointString(const void* code);
296
297 // Initialize the entrypoint of the method .`aot_code` is the AOT code.
298 void InitializeMethodsCode(ArtMethod* method, const void* aot_code)
299 REQUIRES_SHARED(Locks::mutator_lock_);
300
Ian Rogers62d6c772013-02-27 08:32:07 -0800301 // Update the code of a method respecting any installed stubs.
Nicolas Geoffray854af032021-12-21 08:32:42 +0000302 void UpdateMethodsCode(ArtMethod* method, const void* new_code)
Andreas Gampe7e56a072018-11-29 10:40:06 -0800303 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!GetDeoptimizedMethodsLock());
Ian Rogers62d6c772013-02-27 08:32:07 -0800304
Nicolas Geoffraya6e0e7d2018-01-26 13:16:50 +0000305 // Update the code of a native method to a JITed stub.
Nicolas Geoffray854af032021-12-21 08:32:42 +0000306 void UpdateNativeMethodsCodeToJitCode(ArtMethod* method, const void* new_code)
Andreas Gampe7e56a072018-11-29 10:40:06 -0800307 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!GetDeoptimizedMethodsLock());
Nicolas Geoffraya6e0e7d2018-01-26 13:16:50 +0000308
Alex Light2d441b12018-06-08 15:33:21 -0700309 // Return the code that we can execute for an invoke including from the JIT.
Nicolas Geoffrayc25a9f92021-12-13 17:22:43 +0000310 const void* GetCodeForInvoke(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800311
Mythri Alle02af7362022-03-25 12:14:28 +0000312 // Return the code that we can execute considering the current instrumentation level.
313 // If interpreter stubs are installed return interpreter bridge. If the entry exit stubs
314 // are installed return an instrumentation entry point. Otherwise, return the code that
315 // can be executed including from the JIT.
316 const void* GetMaybeInstrumentedCodeForInvoke(ArtMethod* method)
317 REQUIRES_SHARED(Locks::mutator_lock_);
318
Ian Rogers62d6c772013-02-27 08:32:07 -0800319 void ForceInterpretOnly() {
Ian Rogers62d6c772013-02-27 08:32:07 -0800320 forced_interpret_only_ = true;
321 }
322
Mythri Alle9575c122021-11-12 12:04:41 +0000323 bool EntryExitStubsInstalled() const {
324 return instrumentation_level_ == InstrumentationLevel::kInstrumentWithInstrumentationStubs ||
325 instrumentation_level_ == InstrumentationLevel::kInstrumentWithInterpreter;
326 }
327
328 bool InterpreterStubsInstalled() const {
329 return instrumentation_level_ == InstrumentationLevel::kInstrumentWithInterpreter;
330 }
331
Brian Carlstromea46f952013-07-30 01:26:50 -0700332 // Called by ArtMethod::Invoke to determine dispatch mechanism.
Ian Rogers62d6c772013-02-27 08:32:07 -0800333 bool InterpretOnly() const {
Mythri Alle9575c122021-11-12 12:04:41 +0000334 return forced_interpret_only_ || InterpreterStubsInstalled();
Ian Rogers62d6c772013-02-27 08:32:07 -0800335 }
Nicolas Geoffrayc25a9f92021-12-13 17:22:43 +0000336 bool InterpretOnly(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800337
Hiroshi Yamauchi563b47c2014-02-28 17:18:37 -0800338 bool IsForcedInterpretOnly() const {
339 return forced_interpret_only_;
340 }
341
Ian Rogers62d6c772013-02-27 08:32:07 -0800342 bool AreExitStubsInstalled() const {
343 return instrumentation_stubs_installed_;
344 }
345
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700346 bool HasMethodEntryListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertz74109f62013-06-07 17:40:09 +0200347 return have_method_entry_listeners_;
348 }
349
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700350 bool HasMethodExitListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertz74109f62013-06-07 17:40:09 +0200351 return have_method_exit_listeners_;
352 }
353
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700354 bool HasMethodUnwindListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200355 return have_method_unwind_listeners_;
356 }
357
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700358 bool HasDexPcListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertz74109f62013-06-07 17:40:09 +0200359 return have_dex_pc_listeners_;
360 }
361
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700362 bool HasFieldReadListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200363 return have_field_read_listeners_;
364 }
365
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700366 bool HasFieldWriteListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200367 return have_field_write_listeners_;
368 }
369
Alex Light6e1607e2017-08-23 10:06:18 -0700370 bool HasExceptionThrownListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
371 return have_exception_thrown_listeners_;
Sebastien Hertz9f102032014-05-23 08:59:42 +0200372 }
373
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700374 bool HasBranchListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000375 return have_branch_listeners_;
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800376 }
377
Alex Lighte814f9d2017-07-31 16:14:39 -0700378 bool HasWatchedFramePopListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
379 return have_watched_frame_pop_listeners_;
380 }
381
Alex Light9fb1ab12017-09-05 09:32:49 -0700382 bool HasExceptionHandledListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
383 return have_exception_handled_listeners_;
384 }
385
Mythri Alleab474882022-01-17 16:43:04 +0000386 bool NeedsSlowInterpreterForListeners() const REQUIRES_SHARED(Locks::mutator_lock_) {
387 return have_field_read_listeners_ ||
388 have_field_write_listeners_ ||
389 have_watched_frame_pop_listeners_ ||
390 have_exception_handled_listeners_;
Bill Buzbeefd522f92016-02-11 22:37:42 +0000391 }
392
Ian Rogers62d6c772013-02-27 08:32:07 -0800393 // Inform listeners that a method has been entered. A dex PC is provided as we may install
394 // listeners into executing code and get method enter events for methods already on the stack.
Mythri Alle9cc65df2021-09-21 15:09:58 +0000395 void MethodEnterEvent(Thread* thread, ArtMethod* method) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700396 REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertz74109f62013-06-07 17:40:09 +0200397 if (UNLIKELY(HasMethodEntryListeners())) {
Mythri Alle9cc65df2021-09-21 15:09:58 +0000398 MethodEnterEventImpl(thread, method);
Ian Rogers62d6c772013-02-27 08:32:07 -0800399 }
400 }
401
402 // Inform listeners that a method has been exited.
Alex Lightb7c640d2019-03-20 15:52:13 -0700403 template<typename T>
Alex Lightd7661582017-05-01 13:48:16 -0700404 void MethodExitEvent(Thread* thread,
Alex Lightd7661582017-05-01 13:48:16 -0700405 ArtMethod* method,
Alex Lightb7c640d2019-03-20 15:52:13 -0700406 OptionalFrame frame,
407 T& return_value) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700408 REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertz74109f62013-06-07 17:40:09 +0200409 if (UNLIKELY(HasMethodExitListeners())) {
Mythri Alle18fba4c2021-10-27 10:00:55 +0000410 MethodExitEventImpl(thread, method, frame, return_value);
Ian Rogers62d6c772013-02-27 08:32:07 -0800411 }
412 }
413
414 // Inform listeners that a method has been exited due to an exception.
Vladimir Marko19711d42019-04-12 14:05:34 +0100415 void MethodUnwindEvent(Thread* thread,
416 ObjPtr<mirror::Object> this_object,
417 ArtMethod* method,
418 uint32_t dex_pc) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700419 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800420
421 // Inform listeners that the dex pc has moved (only supported by the interpreter).
Vladimir Marko19711d42019-04-12 14:05:34 +0100422 void DexPcMovedEvent(Thread* thread,
423 ObjPtr<mirror::Object> this_object,
424 ArtMethod* method,
425 uint32_t dex_pc) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700426 REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertz74109f62013-06-07 17:40:09 +0200427 if (UNLIKELY(HasDexPcListeners())) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800428 DexPcMovedEventImpl(thread, this_object, method, dex_pc);
429 }
430 }
431
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000432 // Inform listeners that a branch has been taken (only supported by the interpreter).
433 void Branch(Thread* thread, ArtMethod* method, uint32_t dex_pc, int32_t offset) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700434 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000435 if (UNLIKELY(HasBranchListeners())) {
436 BranchImpl(thread, method, dex_pc, offset);
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800437 }
438 }
439
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200440 // Inform listeners that we read a field (only supported by the interpreter).
Vladimir Marko19711d42019-04-12 14:05:34 +0100441 void FieldReadEvent(Thread* thread,
442 ObjPtr<mirror::Object> this_object,
443 ArtMethod* method,
444 uint32_t dex_pc,
Mathieu Chartierc7853442015-03-27 14:35:38 -0700445 ArtField* field) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700446 REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200447 if (UNLIKELY(HasFieldReadListeners())) {
448 FieldReadEventImpl(thread, this_object, method, dex_pc, field);
449 }
450 }
451
452 // Inform listeners that we write a field (only supported by the interpreter).
Vladimir Marko19711d42019-04-12 14:05:34 +0100453 void FieldWriteEvent(Thread* thread,
454 ObjPtr<mirror::Object> this_object,
455 ArtMethod* method,
456 uint32_t dex_pc,
457 ArtField* field,
458 const JValue& field_value) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700459 REQUIRES_SHARED(Locks::mutator_lock_) {
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200460 if (UNLIKELY(HasFieldWriteListeners())) {
461 FieldWriteEventImpl(thread, this_object, method, dex_pc, field, field_value);
462 }
463 }
464
Alex Lighte814f9d2017-07-31 16:14:39 -0700465 // Inform listeners that a branch has been taken (only supported by the interpreter).
466 void WatchedFramePopped(Thread* thread, const ShadowFrame& frame) const
467 REQUIRES_SHARED(Locks::mutator_lock_) {
468 if (UNLIKELY(HasWatchedFramePopListeners())) {
469 WatchedFramePopImpl(thread, frame);
470 }
471 }
472
Alex Light6e1607e2017-08-23 10:06:18 -0700473 // Inform listeners that an exception was thrown.
Vladimir Marko19711d42019-04-12 14:05:34 +0100474 void ExceptionThrownEvent(Thread* thread, ObjPtr<mirror::Throwable> exception_object) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700475 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800476
Alex Light9fb1ab12017-09-05 09:32:49 -0700477 // Inform listeners that an exception has been handled. This is not sent for native code or for
478 // exceptions which reach the end of the thread's stack.
Vladimir Marko19711d42019-04-12 14:05:34 +0100479 void ExceptionHandledEvent(Thread* thread, ObjPtr<mirror::Throwable> exception_object) const
Alex Light9fb1ab12017-09-05 09:32:49 -0700480 REQUIRES_SHARED(Locks::mutator_lock_);
481
Mythri Alle5097f832021-11-02 14:52:30 +0000482 JValue GetReturnValue(Thread* self,
483 ArtMethod* method,
484 bool* is_ref,
485 uint64_t* gpr_result,
486 uint64_t* fpr_result) REQUIRES_SHARED(Locks::mutator_lock_);
487 bool ShouldDeoptimizeMethod(Thread* self, const NthCallerVisitor& visitor)
488 REQUIRES_SHARED(Locks::mutator_lock_);
489
Ian Rogers62d6c772013-02-27 08:32:07 -0800490 // Called when an instrumented method is entered. The intended link register (lr) is saved so
491 // that returning causes a branch to the method exit stub. Generates method enter events.
Vladimir Marko19711d42019-04-12 14:05:34 +0100492 void PushInstrumentationStackFrame(Thread* self,
493 ObjPtr<mirror::Object> this_object,
494 ArtMethod* method,
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000495 uintptr_t stack_pointer,
Vladimir Marko19711d42019-04-12 14:05:34 +0100496 uintptr_t lr,
Jeff Hao9a916d32013-06-27 18:45:37 -0700497 bool interpreter_entry)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700498 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800499
Mingyao Yang2ee17902017-08-30 11:37:08 -0700500 DeoptimizationMethodType GetDeoptimizationMethodType(ArtMethod* method)
501 REQUIRES_SHARED(Locks::mutator_lock_);
502
Ian Rogers62d6c772013-02-27 08:32:07 -0800503 // Called when an instrumented method is exited. Removes the pushed instrumentation frame
Alex Lightb7edcda2017-04-27 13:20:31 -0700504 // returning the intended link register. Generates method exit events. The gpr_result and
505 // fpr_result pointers are pointers to the locations where the integer/pointer and floating point
506 // result values of the function are stored. Both pointers must always be valid but the values
507 // held there will only be meaningful if interpreted as the appropriate type given the function
508 // being returned from.
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000509 TwoWordReturn PopInstrumentationStackFrame(Thread* self,
510 uintptr_t* return_pc_addr,
511 uint64_t* gpr_result,
512 uint64_t* fpr_result)
Andreas Gampe7e56a072018-11-29 10:40:06 -0800513 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!GetDeoptimizedMethodsLock());
Ian Rogers62d6c772013-02-27 08:32:07 -0800514
Alex Light2c8206f2018-06-08 14:51:09 -0700515 // Pops nframes instrumentation frames from the current thread. Returns the return pc for the last
516 // instrumentation frame that's popped.
Nicolas Geoffraye91e7952020-01-23 10:15:56 +0000517 uintptr_t PopFramesForDeoptimization(Thread* self, uintptr_t stack_pointer) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700518 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800519
520 // Call back for configure stubs.
Vladimir Marko19711d42019-04-12 14:05:34 +0100521 void InstallStubsForClass(ObjPtr<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_)
Andreas Gampe7e56a072018-11-29 10:40:06 -0800522 REQUIRES(!GetDeoptimizedMethodsLock());
jeffhao725a9572012-11-13 18:20:12 -0800523
Mathieu Chartiere401d142015-04-22 13:56:20 -0700524 void InstallStubsForMethod(ArtMethod* method)
Andreas Gampe7e56a072018-11-29 10:40:06 -0800525 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!GetDeoptimizedMethodsLock());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100526
Mingyao Yang99170c62015-07-06 11:10:37 -0700527 // Install instrumentation exit stub on every method of the stack of the given thread.
Mythri Alle5097f832021-11-02 14:52:30 +0000528 // This is used by:
529 // - the debugger to cause a deoptimization of the all frames in thread's stack (for
530 // example, after updating local variables)
531 // - to call method entry / exit hooks for tracing. For this we instrument
532 // the stack frame to run entry / exit hooks but we don't need to deoptimize.
533 // deopt_all_frames indicates whether the frames need to deoptimize or not.
534 void InstrumentThreadStack(Thread* thread, bool deopt_all_frames) REQUIRES(Locks::mutator_lock_);
Mingyao Yang99170c62015-07-06 11:10:37 -0700535
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000536 // Force all currently running frames to be deoptimized back to interpreter. This should only be
537 // used in cases where basically all compiled code has been invalidated.
538 void DeoptimizeAllThreadFrames() REQUIRES(art::Locks::mutator_lock_);
539
Sebastien Hertzb2feaaf2015-10-12 13:40:10 +0000540 static size_t ComputeFrameId(Thread* self,
541 size_t frame_depth,
542 size_t inlined_frames_before_frame)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700543 REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertzb2feaaf2015-10-12 13:40:10 +0000544
Mathieu Chartiereebc3af2016-02-29 18:13:38 -0800545 // Does not hold lock, used to check if someone changed from not instrumented to instrumented
546 // during a GC suspend point.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700547 bool AllocEntrypointsInstrumented() const REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartier50e93312016-03-16 11:25:29 -0700548 return alloc_entrypoints_instrumented_;
Mathieu Chartiereebc3af2016-02-29 18:13:38 -0800549 }
550
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200551 InstrumentationLevel GetCurrentInstrumentationLevel() const;
552
Alex Lightdba61482016-12-21 08:20:29 -0800553 private:
554 // Returns true if moving to the given instrumentation level requires the installation of stubs.
555 // False otherwise.
556 bool RequiresInstrumentationInstallation(InstrumentationLevel new_level) const;
557
Mythri Alle5097f832021-11-02 14:52:30 +0000558 // Returns true if we need entry exit stub to call entry hooks. JITed code
559 // directly call entry / exit hooks and don't need the stub.
Nicolas Geoffrayc8a694d2022-01-17 17:12:38 +0000560 static bool CodeNeedsEntryExitStub(const void* code, ArtMethod* method)
561 REQUIRES_SHARED(Locks::mutator_lock_);
Mythri Alle5097f832021-11-02 14:52:30 +0000562
Mythri Alle519ff8b2021-11-17 13:47:07 +0000563 // Update the current instrumentation_level_.
564 void UpdateInstrumentationLevel(InstrumentationLevel level);
Mythri Alle9575c122021-11-12 12:04:41 +0000565
Ian Rogers62d6c772013-02-27 08:32:07 -0800566 // Does the job of installing or removing instrumentation code within methods.
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200567 // In order to support multiple clients using instrumentation at the same time,
568 // the caller must pass a unique key (a string) identifying it so we remind which
569 // instrumentation level it needs. Therefore the current instrumentation level
570 // becomes the highest instrumentation level required by a client.
571 void ConfigureStubs(const char* key, InstrumentationLevel desired_instrumentation_level)
Mathieu Chartieraa516822015-10-02 15:53:37 -0700572 REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
Andreas Gampe7e56a072018-11-29 10:40:06 -0800573 REQUIRES(!GetDeoptimizedMethodsLock(),
Mathieu Chartieraa516822015-10-02 15:53:37 -0700574 !Locks::thread_list_lock_,
Mathieu Chartier90443472015-07-16 20:32:27 -0700575 !Locks::classlinker_classes_lock_);
Alex Light40607862019-05-06 18:16:24 +0000576 void UpdateStubs() REQUIRES(Locks::mutator_lock_, Roles::uninterruptible_)
577 REQUIRES(!GetDeoptimizedMethodsLock(),
578 !Locks::thread_list_lock_,
579 !Locks::classlinker_classes_lock_);
jeffhao725a9572012-11-13 18:20:12 -0800580
Mythri Alle3fbd8a72022-03-22 18:25:34 +0000581 // If there are no pending deoptimizations restores the stack to the normal state by updating the
582 // return pcs to actual return addresses from the instrumentation stack and clears the
583 // instrumentation stack.
584 void MaybeRestoreInstrumentationStack() REQUIRES(Locks::mutator_lock_);
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200585
Mathieu Chartier661974a2014-01-09 11:23:53 -0800586 // No thread safety analysis to get around SetQuickAllocEntryPointsInstrumented requiring
587 // exclusive access to mutator lock which you can't get if the runtime isn't started.
Mathieu Chartier9ef78b52014-09-25 17:03:12 -0700588 void SetEntrypointsInstrumented(bool instrumented) NO_THREAD_SAFETY_ANALYSIS;
Mathieu Chartier661974a2014-01-09 11:23:53 -0800589
Mythri Alle9cc65df2021-09-21 15:09:58 +0000590 void MethodEnterEventImpl(Thread* thread, ArtMethod* method) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700591 REQUIRES_SHARED(Locks::mutator_lock_);
Alex Lightb7c640d2019-03-20 15:52:13 -0700592 template <typename T>
Alex Lightd7661582017-05-01 13:48:16 -0700593 void MethodExitEventImpl(Thread* thread,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700594 ArtMethod* method,
Alex Lightb7c640d2019-03-20 15:52:13 -0700595 OptionalFrame frame,
596 T& return_value) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700597 REQUIRES_SHARED(Locks::mutator_lock_);
Alex Lightd7661582017-05-01 13:48:16 -0700598 void DexPcMovedEventImpl(Thread* thread,
599 ObjPtr<mirror::Object> this_object,
600 ArtMethod* method,
601 uint32_t dex_pc) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700602 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000603 void BranchImpl(Thread* thread, ArtMethod* method, uint32_t dex_pc, int32_t offset) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700604 REQUIRES_SHARED(Locks::mutator_lock_);
Alex Lighte814f9d2017-07-31 16:14:39 -0700605 void WatchedFramePopImpl(Thread* thread, const ShadowFrame& frame) const
606 REQUIRES_SHARED(Locks::mutator_lock_);
Alex Lightd7661582017-05-01 13:48:16 -0700607 void FieldReadEventImpl(Thread* thread,
608 ObjPtr<mirror::Object> this_object,
609 ArtMethod* method,
610 uint32_t dex_pc,
611 ArtField* field) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700612 REQUIRES_SHARED(Locks::mutator_lock_);
Alex Lightd7661582017-05-01 13:48:16 -0700613 void FieldWriteEventImpl(Thread* thread,
614 ObjPtr<mirror::Object> this_object,
615 ArtMethod* method,
616 uint32_t dex_pc,
617 ArtField* field,
618 const JValue& field_value) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700619 REQUIRES_SHARED(Locks::mutator_lock_);
jeffhao725a9572012-11-13 18:20:12 -0800620
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700621 // Read barrier-aware utility functions for accessing deoptimized_methods_
Mathieu Chartiere401d142015-04-22 13:56:20 -0700622 bool AddDeoptimizedMethod(ArtMethod* method)
Andreas Gampe7e56a072018-11-29 10:40:06 -0800623 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(GetDeoptimizedMethodsLock());
Mathieu Chartiere401d142015-04-22 13:56:20 -0700624 bool IsDeoptimizedMethod(ArtMethod* method)
Andreas Gampe7e56a072018-11-29 10:40:06 -0800625 REQUIRES_SHARED(Locks::mutator_lock_, GetDeoptimizedMethodsLock());
Mathieu Chartiere401d142015-04-22 13:56:20 -0700626 bool RemoveDeoptimizedMethod(ArtMethod* method)
Andreas Gampe7e56a072018-11-29 10:40:06 -0800627 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(GetDeoptimizedMethodsLock());
Mathieu Chartiere401d142015-04-22 13:56:20 -0700628 ArtMethod* BeginDeoptimizedMethod()
Andreas Gampe7e56a072018-11-29 10:40:06 -0800629 REQUIRES_SHARED(Locks::mutator_lock_, GetDeoptimizedMethodsLock());
Mythri Alleab474882022-01-17 16:43:04 +0000630 bool IsDeoptimizedMethodsEmptyLocked() const
Andreas Gampe7e56a072018-11-29 10:40:06 -0800631 REQUIRES_SHARED(Locks::mutator_lock_, GetDeoptimizedMethodsLock());
Nicolas Geoffray854af032021-12-21 08:32:42 +0000632 void UpdateMethodsCodeImpl(ArtMethod* method, const void* new_code)
Andreas Gampe7e56a072018-11-29 10:40:06 -0800633 REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!GetDeoptimizedMethodsLock());
Mingyao Yang3fd448a2016-05-10 14:30:41 -0700634
Andreas Gampe7e56a072018-11-29 10:40:06 -0800635 ReaderWriterMutex* GetDeoptimizedMethodsLock() const {
636 return deoptimized_methods_lock_.get();
637 }
Hiroshi Yamauchi799eb3a2014-07-18 15:38:17 -0700638
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000639 // A counter that's incremented every time a DeoptimizeAllFrames. We check each
640 // InstrumentationStackFrames creation id against this number and if they differ we deopt even if
641 // we could otherwise continue running.
642 uint64_t current_force_deopt_id_ GUARDED_BY(Locks::mutator_lock_);
643
Brian Carlstromea46f952013-07-30 01:26:50 -0700644 // Have we hijacked ArtMethod::code_ so that it calls instrumentation/interpreter code?
Ian Rogers62d6c772013-02-27 08:32:07 -0800645 bool instrumentation_stubs_installed_;
646
Mythri Alle9575c122021-11-12 12:04:41 +0000647 // The required level of instrumentation. This could be one of the following values:
648 // kInstrumentNothing: no instrumentation support is needed
649 // kInstrumentWithInstrumentationStubs: needs support to call method entry/exit stubs.
650 // kInstrumentWithInterpreter: only execute with interpreter
651 Instrumentation::InstrumentationLevel instrumentation_level_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800652
653 // Did the runtime request we only run in the interpreter? ie -Xint mode.
654 bool forced_interpret_only_;
655
656 // Do we have any listeners for method entry events? Short-cut to avoid taking the
657 // instrumentation_lock_.
Sebastien Hertzed2be172014-08-19 15:33:43 +0200658 bool have_method_entry_listeners_ GUARDED_BY(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800659
660 // Do we have any listeners for method exit events? Short-cut to avoid taking the
661 // instrumentation_lock_.
Sebastien Hertzed2be172014-08-19 15:33:43 +0200662 bool have_method_exit_listeners_ GUARDED_BY(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800663
664 // Do we have any listeners for method unwind events? Short-cut to avoid taking the
665 // instrumentation_lock_.
Sebastien Hertzed2be172014-08-19 15:33:43 +0200666 bool have_method_unwind_listeners_ GUARDED_BY(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800667
668 // Do we have any listeners for dex move events? Short-cut to avoid taking the
669 // instrumentation_lock_.
Sebastien Hertzed2be172014-08-19 15:33:43 +0200670 bool have_dex_pc_listeners_ GUARDED_BY(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800671
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200672 // Do we have any listeners for field read events? Short-cut to avoid taking the
673 // instrumentation_lock_.
Sebastien Hertzed2be172014-08-19 15:33:43 +0200674 bool have_field_read_listeners_ GUARDED_BY(Locks::mutator_lock_);
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200675
676 // Do we have any listeners for field write events? Short-cut to avoid taking the
677 // instrumentation_lock_.
Sebastien Hertzed2be172014-08-19 15:33:43 +0200678 bool have_field_write_listeners_ GUARDED_BY(Locks::mutator_lock_);
Sebastien Hertz3f52eaf2014-04-04 17:50:18 +0200679
Alex Light6e1607e2017-08-23 10:06:18 -0700680 // Do we have any exception thrown listeners? Short-cut to avoid taking the instrumentation_lock_.
681 bool have_exception_thrown_listeners_ GUARDED_BY(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800682
Alex Lighte814f9d2017-07-31 16:14:39 -0700683 // Do we have any frame pop listeners? Short-cut to avoid taking the instrumentation_lock_.
684 bool have_watched_frame_pop_listeners_ GUARDED_BY(Locks::mutator_lock_);
685
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000686 // Do we have any branch listeners? Short-cut to avoid taking the instrumentation_lock_.
687 bool have_branch_listeners_ GUARDED_BY(Locks::mutator_lock_);
Mathieu Chartiere5f13e52015-02-24 09:37:21 -0800688
Alex Light9fb1ab12017-09-05 09:32:49 -0700689 // Do we have any exception handled listeners? Short-cut to avoid taking the
690 // instrumentation_lock_.
691 bool have_exception_handled_listeners_ GUARDED_BY(Locks::mutator_lock_);
692
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200693 // Contains the instrumentation level required by each client of the instrumentation identified
694 // by a string key.
Vladimir Marko4f990712021-07-14 12:45:13 +0100695 using InstrumentationLevelTable = SafeMap<const char*, InstrumentationLevel>;
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200696 InstrumentationLevelTable requested_instrumentation_levels_ GUARDED_BY(Locks::mutator_lock_);
697
Ian Rogers62d6c772013-02-27 08:32:07 -0800698 // The event listeners, written to with the mutator_lock_ exclusively held.
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000699 // Mutators must be able to iterate over these lists concurrently, that is, with listeners being
700 // added or removed while iterating. The modifying thread holds exclusive lock,
701 // so other threads cannot iterate (i.e. read the data of the list) at the same time but they
702 // do keep iterators that need to remain valid. This is the reason these listeners are std::list
703 // and not for example std::vector: the existing storage for a std::list does not move.
704 // Note that mutators cannot make a copy of these lists before iterating, as the instrumentation
705 // listeners can also be deleted concurrently.
706 // As a result, these lists are never trimmed. That's acceptable given the low number of
707 // listeners we have.
Ian Rogers62d6c772013-02-27 08:32:07 -0800708 std::list<InstrumentationListener*> method_entry_listeners_ GUARDED_BY(Locks::mutator_lock_);
709 std::list<InstrumentationListener*> method_exit_listeners_ GUARDED_BY(Locks::mutator_lock_);
710 std::list<InstrumentationListener*> method_unwind_listeners_ GUARDED_BY(Locks::mutator_lock_);
Nicolas Geoffray81f0f952016-01-20 16:25:19 +0000711 std::list<InstrumentationListener*> branch_listeners_ GUARDED_BY(Locks::mutator_lock_);
Nicolas Geoffray514a6162015-11-03 11:44:24 +0000712 std::list<InstrumentationListener*> dex_pc_listeners_ GUARDED_BY(Locks::mutator_lock_);
713 std::list<InstrumentationListener*> field_read_listeners_ GUARDED_BY(Locks::mutator_lock_);
714 std::list<InstrumentationListener*> field_write_listeners_ GUARDED_BY(Locks::mutator_lock_);
Alex Light6e1607e2017-08-23 10:06:18 -0700715 std::list<InstrumentationListener*> exception_thrown_listeners_ GUARDED_BY(Locks::mutator_lock_);
Alex Lighte814f9d2017-07-31 16:14:39 -0700716 std::list<InstrumentationListener*> watched_frame_pop_listeners_ GUARDED_BY(Locks::mutator_lock_);
Alex Light9fb1ab12017-09-05 09:32:49 -0700717 std::list<InstrumentationListener*> exception_handled_listeners_ GUARDED_BY(Locks::mutator_lock_);
jeffhao725a9572012-11-13 18:20:12 -0800718
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100719 // The set of methods being deoptimized (by the debugger) which must be executed with interpreter
720 // only.
Andreas Gampe7e56a072018-11-29 10:40:06 -0800721 mutable std::unique_ptr<ReaderWriterMutex> deoptimized_methods_lock_ BOTTOM_MUTEX_ACQUIRED_AFTER;
722 std::unordered_set<ArtMethod*> deoptimized_methods_ GUARDED_BY(GetDeoptimizedMethodsLock());
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100723
Ian Rogersfa824272013-11-05 16:12:57 -0800724 // Current interpreter handler table. This is updated each time the thread state flags are
725 // modified.
Sebastien Hertzee1997a2013-09-19 14:47:09 +0200726
Ian Rogersfa824272013-11-05 16:12:57 -0800727 // Greater than 0 if quick alloc entry points instrumented.
Mathieu Chartiereebc3af2016-02-29 18:13:38 -0800728 size_t quick_alloc_entry_points_instrumentation_counter_;
Mathieu Chartier50e93312016-03-16 11:25:29 -0700729
730 // alloc_entrypoints_instrumented_ is only updated with all the threads suspended, this is done
731 // to prevent races with the GC where the GC relies on thread suspension only see
732 // alloc_entrypoints_instrumented_ change during suspend points.
733 bool alloc_entrypoints_instrumented_;
734
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200735 friend class InstrumentationTest; // For GetCurrentInstrumentationLevel and ConfigureStubs.
Alex Light2c8206f2018-06-08 14:51:09 -0700736 friend class InstrumentationStackPopper; // For popping instrumentation frames.
Mythri Alle5097f832021-11-02 14:52:30 +0000737 friend void InstrumentationInstallStack(Thread*, void*, bool);
Sebastien Hertz0462c4c2015-04-01 16:34:17 +0200738
jeffhao725a9572012-11-13 18:20:12 -0800739 DISALLOW_COPY_AND_ASSIGN(Instrumentation);
740};
Vladimir Marko9974e3c2020-06-10 16:27:06 +0100741std::ostream& operator<<(std::ostream& os, Instrumentation::InstrumentationEvent rhs);
742std::ostream& operator<<(std::ostream& os, Instrumentation::InstrumentationLevel rhs);
jeffhao725a9572012-11-13 18:20:12 -0800743
Ian Rogers62d6c772013-02-27 08:32:07 -0800744// An element in the instrumentation side stack maintained in art::Thread.
745struct InstrumentationStackFrame {
Mingyao Yang2ee17902017-08-30 11:37:08 -0700746 InstrumentationStackFrame(mirror::Object* this_object,
747 ArtMethod* method,
748 uintptr_t return_pc,
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000749 bool interpreter_entry,
750 uint64_t force_deopt_id)
Mingyao Yang2ee17902017-08-30 11:37:08 -0700751 : this_object_(this_object),
752 method_(method),
753 return_pc_(return_pc),
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000754 interpreter_entry_(interpreter_entry),
755 force_deopt_id_(force_deopt_id) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800756 }
757
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700758 std::string Dump() const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800759
760 mirror::Object* this_object_;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700761 ArtMethod* method_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100762 uintptr_t return_pc_;
Sebastien Hertz138dbfc2013-12-04 18:15:25 +0100763 bool interpreter_entry_;
Nicolas Geoffray4ac0e152019-09-18 06:14:50 +0000764 uint64_t force_deopt_id_;
Ian Rogers62d6c772013-02-27 08:32:07 -0800765};
766
767} // namespace instrumentation
jeffhao725a9572012-11-13 18:20:12 -0800768} // namespace art
769
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700770#endif // ART_RUNTIME_INSTRUMENTATION_H_