Andreas Gampe | 04bbb5b | 2017-01-19 17:49:03 +0000 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2017 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #ifndef ART_RUNTIME_RUNTIME_CALLBACKS_H_ |
| 18 | #define ART_RUNTIME_RUNTIME_CALLBACKS_H_ |
| 19 | |
| 20 | #include <vector> |
| 21 | |
Alex Light | 8c2b929 | 2017-11-09 13:21:01 -0800 | [diff] [blame] | 22 | #include "base/array_ref.h" |
Andreas Gampe | 7fbc4a5 | 2018-11-28 08:26:47 -0800 | [diff] [blame] | 23 | #include "base/locks.h" |
Andreas Gampe | 04bbb5b | 2017-01-19 17:49:03 +0000 | [diff] [blame] | 24 | #include "base/macros.h" |
Andreas Gampe | 0f01b58 | 2017-01-18 15:22:37 -0800 | [diff] [blame] | 25 | #include "handle.h" |
Andreas Gampe | 04bbb5b | 2017-01-19 17:49:03 +0000 | [diff] [blame] | 26 | |
| 27 | namespace art { |
| 28 | |
Andreas Gampe | 3f1dcd3 | 2018-12-28 09:39:56 -0800 | [diff] [blame] | 29 | namespace dex { |
| 30 | struct ClassDef; |
| 31 | } // namespace dex |
| 32 | |
Andreas Gampe | 0f01b58 | 2017-01-18 15:22:37 -0800 | [diff] [blame] | 33 | namespace mirror { |
| 34 | class Class; |
Alex Light | b0f1192 | 2017-01-23 14:25:17 -0800 | [diff] [blame] | 35 | class ClassLoader; |
Alex Light | 77fee87 | 2017-09-05 14:51:49 -0700 | [diff] [blame] | 36 | class Object; |
Andreas Gampe | 0f01b58 | 2017-01-18 15:22:37 -0800 | [diff] [blame] | 37 | } // namespace mirror |
| 38 | |
Alex Light | d78ddec | 2017-04-18 15:20:38 -0700 | [diff] [blame] | 39 | class ArtMethod; |
Andreas Gampe | 0f01b58 | 2017-01-18 15:22:37 -0800 | [diff] [blame] | 40 | class ClassLoadCallback; |
Andreas Gampe | 3f1dcd3 | 2018-12-28 09:39:56 -0800 | [diff] [blame] | 41 | class DexFile; |
Andreas Gampe | 04bbb5b | 2017-01-19 17:49:03 +0000 | [diff] [blame] | 42 | class Thread; |
Alex Light | d78ddec | 2017-04-18 15:20:38 -0700 | [diff] [blame] | 43 | class MethodCallback; |
Alex Light | 77fee87 | 2017-09-05 14:51:49 -0700 | [diff] [blame] | 44 | class Monitor; |
Alex Light | 51d5a30 | 2019-04-09 11:22:17 -0700 | [diff] [blame] | 45 | class ReaderWriterMutex; |
Andreas Gampe | 04bbb5b | 2017-01-19 17:49:03 +0000 | [diff] [blame] | 46 | class ThreadLifecycleCallback; |
Alex Light | c18eba3 | 2019-09-24 14:36:27 -0700 | [diff] [blame] | 47 | class ReflectiveValueVisitor; |
Andreas Gampe | 04bbb5b | 2017-01-19 17:49:03 +0000 | [diff] [blame] | 48 | |
| 49 | // Note: RuntimeCallbacks uses the mutator lock to synchronize the callback lists. A thread must |
| 50 | // hold the exclusive lock to add or remove a listener. A thread must hold the shared lock |
| 51 | // to dispatch an event. This setup is chosen as some clients may want to suspend the |
| 52 | // dispatching thread or all threads. |
| 53 | // |
| 54 | // To make this safe, the following restrictions apply: |
| 55 | // * Only the owner of a listener may ever add or remove said listener. |
| 56 | // * A listener must never add or remove itself or any other listener while running. |
| 57 | // * It is the responsibility of the owner to not remove the listener while it is running |
| 58 | // (and suspended). |
Alex Light | 51d5a30 | 2019-04-09 11:22:17 -0700 | [diff] [blame] | 59 | // * The owner should never deallocate a listener once it has been registered, even if it has |
| 60 | // been removed. |
Andreas Gampe | 04bbb5b | 2017-01-19 17:49:03 +0000 | [diff] [blame] | 61 | // |
| 62 | // The simplest way to satisfy these restrictions is to never remove a listener, and to do |
| 63 | // any state checking (is the listener enabled) in the listener itself. For an example, see |
| 64 | // Dbg. |
| 65 | |
Alex Light | 8c2b929 | 2017-11-09 13:21:01 -0800 | [diff] [blame] | 66 | class DdmCallback { |
| 67 | public: |
| 68 | virtual ~DdmCallback() {} |
| 69 | virtual void DdmPublishChunk(uint32_t type, const ArrayRef<const uint8_t>& data) |
| 70 | REQUIRES_SHARED(Locks::mutator_lock_) = 0; |
| 71 | }; |
| 72 | |
Alex Light | 4032071 | 2017-12-14 11:52:04 -0800 | [diff] [blame] | 73 | class DebuggerControlCallback { |
| 74 | public: |
| 75 | virtual ~DebuggerControlCallback() {} |
| 76 | |
| 77 | // Begin running the debugger. |
| 78 | virtual void StartDebugger() = 0; |
| 79 | // The debugger should begin shutting down since the runtime is ending. This is just advisory |
| 80 | virtual void StopDebugger() = 0; |
| 81 | |
| 82 | // This allows the debugger to tell the runtime if it is configured. |
| 83 | virtual bool IsDebuggerConfigured() = 0; |
| 84 | }; |
| 85 | |
Andreas Gampe | a5814f9 | 2017-01-18 21:43:16 -0800 | [diff] [blame] | 86 | class RuntimeSigQuitCallback { |
| 87 | public: |
| 88 | virtual ~RuntimeSigQuitCallback() {} |
| 89 | |
| 90 | virtual void SigQuit() REQUIRES_SHARED(Locks::mutator_lock_) = 0; |
| 91 | }; |
| 92 | |
Andreas Gampe | 4886411 | 2017-01-19 17:23:17 -0800 | [diff] [blame] | 93 | class RuntimePhaseCallback { |
| 94 | public: |
| 95 | enum RuntimePhase { |
Andreas Gampe | 96eca78 | 2017-01-19 19:45:30 -0800 | [diff] [blame] | 96 | kInitialAgents, // Initial agent loading is done. |
| 97 | kStart, // The runtime is started. |
| 98 | kInit, // The runtime is initialized (and will run user code soon). |
| 99 | kDeath, // The runtime just died. |
Andreas Gampe | 4886411 | 2017-01-19 17:23:17 -0800 | [diff] [blame] | 100 | }; |
| 101 | |
| 102 | virtual ~RuntimePhaseCallback() {} |
| 103 | |
| 104 | virtual void NextRuntimePhase(RuntimePhase phase) REQUIRES_SHARED(Locks::mutator_lock_) = 0; |
| 105 | }; |
| 106 | |
Alex Light | 77fee87 | 2017-09-05 14:51:49 -0700 | [diff] [blame] | 107 | class MonitorCallback { |
| 108 | public: |
| 109 | // Called just before the thread goes to sleep to wait for the monitor to become unlocked. |
| 110 | virtual void MonitorContendedLocking(Monitor* mon) REQUIRES_SHARED(Locks::mutator_lock_) = 0; |
| 111 | // Called just after the monitor has been successfully acquired when it was already locked. |
| 112 | virtual void MonitorContendedLocked(Monitor* mon) REQUIRES_SHARED(Locks::mutator_lock_) = 0; |
| 113 | // Called on entry to the Object#wait method regardless of whether or not the call is valid. |
| 114 | virtual void ObjectWaitStart(Handle<mirror::Object> obj, int64_t millis_timeout) |
| 115 | REQUIRES_SHARED(Locks::mutator_lock_) = 0; |
| 116 | |
| 117 | // Called just after the monitor has woken up from going to sleep for a wait(). At this point the |
| 118 | // thread does not possess a lock on the monitor. This will only be called for threads wait calls |
| 119 | // where the thread did (or at least could have) gone to sleep. |
| 120 | virtual void MonitorWaitFinished(Monitor* m, bool timed_out) |
| 121 | REQUIRES_SHARED(Locks::mutator_lock_) = 0; |
| 122 | |
| 123 | virtual ~MonitorCallback() {} |
| 124 | }; |
| 125 | |
Charles Munger | 5cc0e75 | 2018-11-09 12:30:46 -0800 | [diff] [blame] | 126 | class ParkCallback { |
| 127 | public: |
| 128 | // Called on entry to the Unsafe.#park method |
| 129 | virtual void ThreadParkStart(bool is_absolute, int64_t millis_timeout) |
| 130 | REQUIRES_SHARED(Locks::mutator_lock_) = 0; |
| 131 | |
| 132 | // Called just after the thread has woken up from going to sleep for a park(). This will only be |
| 133 | // called for Unsafe.park() calls where the thread did (or at least could have) gone to sleep. |
| 134 | virtual void ThreadParkFinished(bool timed_out) REQUIRES_SHARED(Locks::mutator_lock_) = 0; |
| 135 | |
| 136 | virtual ~ParkCallback() {} |
| 137 | }; |
| 138 | |
Alex Light | 2161193 | 2017-09-26 13:07:39 -0700 | [diff] [blame] | 139 | // A callback to let parts of the runtime note that they are currently relying on a particular |
| 140 | // method remaining in it's current state. Users should not rely on always being called. If multiple |
| 141 | // callbacks are added the runtime will short-circuit when the first one returns 'true'. |
| 142 | class MethodInspectionCallback { |
| 143 | public: |
| 144 | virtual ~MethodInspectionCallback() {} |
| 145 | |
| 146 | // Returns true if the method is being inspected currently and the runtime should not modify it in |
| 147 | // potentially dangerous ways (i.e. replace with compiled version, JIT it, etc). |
| 148 | virtual bool IsMethodBeingInspected(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) = 0; |
Alex Light | 0fa1786 | 2017-10-24 13:43:05 -0700 | [diff] [blame] | 149 | |
| 150 | // Returns true if the method is safe to Jit, false otherwise. |
| 151 | // Note that '!IsMethodSafeToJit(m) implies IsMethodBeingInspected(m)'. That is that if this |
| 152 | // method returns false IsMethodBeingInspected must return true. |
| 153 | virtual bool IsMethodSafeToJit(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) = 0; |
Alex Light | f285863 | 2018-04-02 11:28:50 -0700 | [diff] [blame] | 154 | |
| 155 | // Returns true if we expect the method to be debuggable but are not doing anything unusual with |
| 156 | // it currently. |
| 157 | virtual bool MethodNeedsDebugVersion(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_) = 0; |
Alex Light | 2161193 | 2017-09-26 13:07:39 -0700 | [diff] [blame] | 158 | }; |
| 159 | |
Alex Light | c18eba3 | 2019-09-24 14:36:27 -0700 | [diff] [blame] | 160 | // Callback to let something request to be notified when reflective objects are being visited and |
| 161 | // updated to update any bare ArtMethod/ArtField pointers it might have. |
| 162 | class ReflectiveValueVisitCallback { |
| 163 | public: |
| 164 | virtual ~ReflectiveValueVisitCallback() {} |
| 165 | |
| 166 | // Called when something visits all reflective values with the update visitor. |
| 167 | virtual void VisitReflectiveTargets(ReflectiveValueVisitor* visitor) |
| 168 | REQUIRES(Locks::mutator_lock_) = 0; |
| 169 | }; |
| 170 | |
Andreas Gampe | 04bbb5b | 2017-01-19 17:49:03 +0000 | [diff] [blame] | 171 | class RuntimeCallbacks { |
| 172 | public: |
Alex Light | 51d5a30 | 2019-04-09 11:22:17 -0700 | [diff] [blame] | 173 | RuntimeCallbacks(); |
| 174 | |
Andreas Gampe | 0f01b58 | 2017-01-18 15:22:37 -0800 | [diff] [blame] | 175 | void AddThreadLifecycleCallback(ThreadLifecycleCallback* cb) REQUIRES(Locks::mutator_lock_); |
| 176 | void RemoveThreadLifecycleCallback(ThreadLifecycleCallback* cb) REQUIRES(Locks::mutator_lock_); |
Andreas Gampe | 04bbb5b | 2017-01-19 17:49:03 +0000 | [diff] [blame] | 177 | |
Andreas Gampe | 0f01b58 | 2017-01-18 15:22:37 -0800 | [diff] [blame] | 178 | void ThreadStart(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_); |
| 179 | void ThreadDeath(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_); |
| 180 | |
| 181 | void AddClassLoadCallback(ClassLoadCallback* cb) REQUIRES(Locks::mutator_lock_); |
| 182 | void RemoveClassLoadCallback(ClassLoadCallback* cb) REQUIRES(Locks::mutator_lock_); |
| 183 | |
Alex Light | 270db1c | 2019-12-03 12:20:01 +0000 | [diff] [blame] | 184 | void BeginDefineClass() REQUIRES_SHARED(Locks::mutator_lock_); |
| 185 | void EndDefineClass() REQUIRES_SHARED(Locks::mutator_lock_); |
Andreas Gampe | 0f01b58 | 2017-01-18 15:22:37 -0800 | [diff] [blame] | 186 | void ClassLoad(Handle<mirror::Class> klass) REQUIRES_SHARED(Locks::mutator_lock_); |
| 187 | void ClassPrepare(Handle<mirror::Class> temp_klass, Handle<mirror::Class> klass) |
Andreas Gampe | 04bbb5b | 2017-01-19 17:49:03 +0000 | [diff] [blame] | 188 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 189 | |
Andreas Gampe | a5814f9 | 2017-01-18 21:43:16 -0800 | [diff] [blame] | 190 | void AddRuntimeSigQuitCallback(RuntimeSigQuitCallback* cb) |
| 191 | REQUIRES(Locks::mutator_lock_); |
| 192 | void RemoveRuntimeSigQuitCallback(RuntimeSigQuitCallback* cb) |
| 193 | REQUIRES(Locks::mutator_lock_); |
| 194 | |
| 195 | void SigQuit() REQUIRES_SHARED(Locks::mutator_lock_); |
| 196 | |
Andreas Gampe | 4886411 | 2017-01-19 17:23:17 -0800 | [diff] [blame] | 197 | void AddRuntimePhaseCallback(RuntimePhaseCallback* cb) |
| 198 | REQUIRES(Locks::mutator_lock_); |
| 199 | void RemoveRuntimePhaseCallback(RuntimePhaseCallback* cb) |
| 200 | REQUIRES(Locks::mutator_lock_); |
| 201 | |
| 202 | void NextRuntimePhase(RuntimePhaseCallback::RuntimePhase phase) |
| 203 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 204 | |
Alex Light | b0f1192 | 2017-01-23 14:25:17 -0800 | [diff] [blame] | 205 | void ClassPreDefine(const char* descriptor, |
| 206 | Handle<mirror::Class> temp_class, |
| 207 | Handle<mirror::ClassLoader> loader, |
| 208 | const DexFile& initial_dex_file, |
Andreas Gampe | 3f1dcd3 | 2018-12-28 09:39:56 -0800 | [diff] [blame] | 209 | const dex::ClassDef& initial_class_def, |
Alex Light | b0f1192 | 2017-01-23 14:25:17 -0800 | [diff] [blame] | 210 | /*out*/DexFile const** final_dex_file, |
Andreas Gampe | 3f1dcd3 | 2018-12-28 09:39:56 -0800 | [diff] [blame] | 211 | /*out*/dex::ClassDef const** final_class_def) |
Alex Light | b0f1192 | 2017-01-23 14:25:17 -0800 | [diff] [blame] | 212 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 213 | |
Alex Light | d78ddec | 2017-04-18 15:20:38 -0700 | [diff] [blame] | 214 | void AddMethodCallback(MethodCallback* cb) REQUIRES(Locks::mutator_lock_); |
| 215 | void RemoveMethodCallback(MethodCallback* cb) REQUIRES(Locks::mutator_lock_); |
| 216 | |
| 217 | void RegisterNativeMethod(ArtMethod* method, |
| 218 | const void* original_implementation, |
| 219 | /*out*/void** new_implementation) |
| 220 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 221 | |
Alex Light | 77fee87 | 2017-09-05 14:51:49 -0700 | [diff] [blame] | 222 | void MonitorContendedLocking(Monitor* m) REQUIRES_SHARED(Locks::mutator_lock_); |
| 223 | void MonitorContendedLocked(Monitor* m) REQUIRES_SHARED(Locks::mutator_lock_); |
| 224 | void ObjectWaitStart(Handle<mirror::Object> m, int64_t timeout) |
| 225 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 226 | void MonitorWaitFinished(Monitor* m, bool timed_out) |
| 227 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 228 | |
| 229 | void AddMonitorCallback(MonitorCallback* cb) REQUIRES_SHARED(Locks::mutator_lock_); |
| 230 | void RemoveMonitorCallback(MonitorCallback* cb) REQUIRES_SHARED(Locks::mutator_lock_); |
| 231 | |
Charles Munger | 5cc0e75 | 2018-11-09 12:30:46 -0800 | [diff] [blame] | 232 | void ThreadParkStart(bool is_absolute, int64_t timeout) REQUIRES_SHARED(Locks::mutator_lock_); |
| 233 | void ThreadParkFinished(bool timed_out) REQUIRES_SHARED(Locks::mutator_lock_); |
| 234 | void AddParkCallback(ParkCallback* cb) REQUIRES_SHARED(Locks::mutator_lock_); |
| 235 | void RemoveParkCallback(ParkCallback* cb) REQUIRES_SHARED(Locks::mutator_lock_); |
| 236 | |
Alex Light | 2161193 | 2017-09-26 13:07:39 -0700 | [diff] [blame] | 237 | // Returns true if some MethodInspectionCallback indicates the method is being inspected/depended |
| 238 | // on by some code. |
| 239 | bool IsMethodBeingInspected(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_); |
| 240 | |
Alex Light | 0fa1786 | 2017-10-24 13:43:05 -0700 | [diff] [blame] | 241 | // Returns false if some MethodInspectionCallback indicates the method cannot be safetly jitted |
| 242 | // (which implies that it is being Inspected). Returns true otherwise. If it returns false the |
| 243 | // entrypoint should not be changed to JITed code. |
| 244 | bool IsMethodSafeToJit(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_); |
| 245 | |
Alex Light | f285863 | 2018-04-02 11:28:50 -0700 | [diff] [blame] | 246 | // Returns true if some MethodInspectionCallback indicates the method needs to use a debug |
| 247 | // version. This allows later code to set breakpoints or perform other actions that could be |
| 248 | // broken by some optimizations. |
| 249 | bool MethodNeedsDebugVersion(ArtMethod* method) REQUIRES_SHARED(Locks::mutator_lock_); |
| 250 | |
Alex Light | 2161193 | 2017-09-26 13:07:39 -0700 | [diff] [blame] | 251 | void AddMethodInspectionCallback(MethodInspectionCallback* cb) |
| 252 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 253 | void RemoveMethodInspectionCallback(MethodInspectionCallback* cb) |
| 254 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 255 | |
Alex Light | 8c2b929 | 2017-11-09 13:21:01 -0800 | [diff] [blame] | 256 | // DDMS callbacks |
| 257 | void DdmPublishChunk(uint32_t type, const ArrayRef<const uint8_t>& data) |
| 258 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 259 | |
| 260 | void AddDdmCallback(DdmCallback* cb) REQUIRES_SHARED(Locks::mutator_lock_); |
| 261 | void RemoveDdmCallback(DdmCallback* cb) REQUIRES_SHARED(Locks::mutator_lock_); |
| 262 | |
Alex Light | 4032071 | 2017-12-14 11:52:04 -0800 | [diff] [blame] | 263 | void StartDebugger() REQUIRES_SHARED(Locks::mutator_lock_); |
| 264 | // NO_THREAD_SAFETY_ANALYSIS since this is only called when we are in the middle of shutting down |
| 265 | // and the mutator_lock_ is no longer acquirable. |
| 266 | void StopDebugger() NO_THREAD_SAFETY_ANALYSIS; |
| 267 | bool IsDebuggerConfigured() REQUIRES_SHARED(Locks::mutator_lock_); |
| 268 | |
| 269 | void AddDebuggerControlCallback(DebuggerControlCallback* cb) |
| 270 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 271 | void RemoveDebuggerControlCallback(DebuggerControlCallback* cb) |
| 272 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 273 | |
Alex Light | c18eba3 | 2019-09-24 14:36:27 -0700 | [diff] [blame] | 274 | void VisitReflectiveTargets(ReflectiveValueVisitor* visitor) REQUIRES(Locks::mutator_lock_); |
| 275 | |
| 276 | void AddReflectiveValueVisitCallback(ReflectiveValueVisitCallback* cb) |
| 277 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 278 | void RemoveReflectiveValueVisitCallback(ReflectiveValueVisitCallback* cb) |
| 279 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 280 | |
Andreas Gampe | 04bbb5b | 2017-01-19 17:49:03 +0000 | [diff] [blame] | 281 | private: |
Alex Light | 51d5a30 | 2019-04-09 11:22:17 -0700 | [diff] [blame] | 282 | std::unique_ptr<ReaderWriterMutex> callback_lock_ BOTTOM_MUTEX_ACQUIRED_AFTER; |
| 283 | |
Andreas Gampe | 04bbb5b | 2017-01-19 17:49:03 +0000 | [diff] [blame] | 284 | std::vector<ThreadLifecycleCallback*> thread_callbacks_ |
Alex Light | 51d5a30 | 2019-04-09 11:22:17 -0700 | [diff] [blame] | 285 | GUARDED_BY(callback_lock_); |
Andreas Gampe | 0f01b58 | 2017-01-18 15:22:37 -0800 | [diff] [blame] | 286 | std::vector<ClassLoadCallback*> class_callbacks_ |
Alex Light | 51d5a30 | 2019-04-09 11:22:17 -0700 | [diff] [blame] | 287 | GUARDED_BY(callback_lock_); |
Andreas Gampe | a5814f9 | 2017-01-18 21:43:16 -0800 | [diff] [blame] | 288 | std::vector<RuntimeSigQuitCallback*> sigquit_callbacks_ |
Alex Light | 51d5a30 | 2019-04-09 11:22:17 -0700 | [diff] [blame] | 289 | GUARDED_BY(callback_lock_); |
Andreas Gampe | 4886411 | 2017-01-19 17:23:17 -0800 | [diff] [blame] | 290 | std::vector<RuntimePhaseCallback*> phase_callbacks_ |
Alex Light | 51d5a30 | 2019-04-09 11:22:17 -0700 | [diff] [blame] | 291 | GUARDED_BY(callback_lock_); |
Alex Light | d78ddec | 2017-04-18 15:20:38 -0700 | [diff] [blame] | 292 | std::vector<MethodCallback*> method_callbacks_ |
Alex Light | 51d5a30 | 2019-04-09 11:22:17 -0700 | [diff] [blame] | 293 | GUARDED_BY(callback_lock_); |
Alex Light | 77fee87 | 2017-09-05 14:51:49 -0700 | [diff] [blame] | 294 | std::vector<MonitorCallback*> monitor_callbacks_ |
Alex Light | 51d5a30 | 2019-04-09 11:22:17 -0700 | [diff] [blame] | 295 | GUARDED_BY(callback_lock_); |
Charles Munger | 5cc0e75 | 2018-11-09 12:30:46 -0800 | [diff] [blame] | 296 | std::vector<ParkCallback*> park_callbacks_ |
Alex Light | 51d5a30 | 2019-04-09 11:22:17 -0700 | [diff] [blame] | 297 | GUARDED_BY(callback_lock_); |
Alex Light | 2161193 | 2017-09-26 13:07:39 -0700 | [diff] [blame] | 298 | std::vector<MethodInspectionCallback*> method_inspection_callbacks_ |
Alex Light | 51d5a30 | 2019-04-09 11:22:17 -0700 | [diff] [blame] | 299 | GUARDED_BY(callback_lock_); |
Alex Light | 8c2b929 | 2017-11-09 13:21:01 -0800 | [diff] [blame] | 300 | std::vector<DdmCallback*> ddm_callbacks_ |
Alex Light | 51d5a30 | 2019-04-09 11:22:17 -0700 | [diff] [blame] | 301 | GUARDED_BY(callback_lock_); |
Alex Light | 4032071 | 2017-12-14 11:52:04 -0800 | [diff] [blame] | 302 | std::vector<DebuggerControlCallback*> debugger_control_callbacks_ |
Alex Light | 51d5a30 | 2019-04-09 11:22:17 -0700 | [diff] [blame] | 303 | GUARDED_BY(callback_lock_); |
Alex Light | c18eba3 | 2019-09-24 14:36:27 -0700 | [diff] [blame] | 304 | std::vector<ReflectiveValueVisitCallback*> reflective_value_visit_callbacks_ |
| 305 | GUARDED_BY(callback_lock_); |
Andreas Gampe | 04bbb5b | 2017-01-19 17:49:03 +0000 | [diff] [blame] | 306 | }; |
| 307 | |
| 308 | } // namespace art |
| 309 | |
| 310 | #endif // ART_RUNTIME_RUNTIME_CALLBACKS_H_ |