blob: a16930bba05f44a83ec51354c7e9811eff61fabc [file] [log] [blame]
Elliott Hughes68e76522011-10-05 13:22:16 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_RUNTIME_STACK_H_
18#define ART_RUNTIME_STACK_H_
Elliott Hughes68e76522011-10-05 13:22:16 -070019
Elliott Hughes68e76522011-10-05 13:22:16 -070020#include <stdint.h>
Ian Rogers40e3bac2012-11-20 00:09:14 -080021#include <string>
Elliott Hughes68e76522011-10-05 13:22:16 -070022
Andreas Gampe03ec9302015-08-27 17:41:47 -070023#include "base/macros.h"
24#include "base/mutex.h"
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010025#include "quick/quick_method_frame_info.h"
Ian Rogerse63db272014-07-15 15:36:11 -070026
Elliott Hughes68e76522011-10-05 13:22:16 -070027namespace art {
28
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080029namespace mirror {
Igor Murashkin2ffb7032017-11-08 13:35:21 -080030class Object;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080031} // namespace mirror
32
Mathieu Chartiere401d142015-04-22 13:56:20 -070033class ArtMethod;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080034class Context;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070035class HandleScope;
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010036class OatQuickMethodHeader;
Nicolas Geoffray57f61612015-05-15 13:20:41 +010037class ShadowFrame;
Elliott Hughes68e76522011-10-05 13:22:16 -070038class Thread;
Vladimir Marko3a21e382016-09-02 12:38:38 +010039union JValue;
Elliott Hughes68e76522011-10-05 13:22:16 -070040
Ian Rogers2bcb4a42012-11-08 10:39:18 -080041// The kind of vreg being accessed in calls to Set/GetVReg.
42enum VRegKind {
43 kReferenceVReg,
44 kIntVReg,
45 kFloatVReg,
46 kLongLoVReg,
47 kLongHiVReg,
48 kDoubleLoVReg,
49 kDoubleHiVReg,
50 kConstant,
51 kImpreciseConstant,
52 kUndefined,
53};
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070054std::ostream& operator<<(std::ostream& os, const VRegKind& rhs);
Ian Rogers2bcb4a42012-11-08 10:39:18 -080055
Mingyao Yang063fc772016-08-02 11:02:54 -070056// Size in bytes of the should_deoptimize flag on stack.
57// We just need 4 bytes for our purpose regardless of the architecture. Frame size
58// calculation will automatically do alignment for the final frame size.
59static constexpr size_t kShouldDeoptimizeFlagSize = 4;
60
Andreas Gampe36a296f2017-06-13 14:11:11 -070061/*
62 * Our current stack layout.
63 * The Dalvik registers come first, followed by the
64 * Method*, followed by other special temporaries if any, followed by
65 * regular compiler temporary. As of now we only have the Method* as
66 * as a special compiler temporary.
67 * A compiler temporary can be thought of as a virtual register that
68 * does not exist in the dex but holds intermediate values to help
69 * optimizations and code generation. A special compiler temporary is
70 * one whose location in frame is well known while non-special ones
71 * do not have a requirement on location in frame as long as code
72 * generator itself knows how to access them.
73 *
74 * TODO: Update this documentation?
75 *
76 * +-------------------------------+
77 * | IN[ins-1] | {Note: resides in caller's frame}
78 * | . |
79 * | IN[0] |
80 * | caller's ArtMethod | ... ArtMethod*
81 * +===============================+ {Note: start of callee's frame}
82 * | core callee-save spill | {variable sized}
83 * +-------------------------------+
84 * | fp callee-save spill |
85 * +-------------------------------+
86 * | filler word | {For compatibility, if V[locals-1] used as wide
87 * +-------------------------------+
88 * | V[locals-1] |
89 * | V[locals-2] |
90 * | . |
91 * | . | ... (reg == 2)
92 * | V[1] | ... (reg == 1)
93 * | V[0] | ... (reg == 0) <---- "locals_start"
94 * +-------------------------------+
95 * | stack alignment padding | {0 to (kStackAlignWords-1) of padding}
96 * +-------------------------------+
97 * | Compiler temp region | ... (reg >= max_num_special_temps)
98 * | . |
99 * | . |
100 * | V[max_num_special_temps + 1] |
101 * | V[max_num_special_temps + 0] |
102 * +-------------------------------+
103 * | OUT[outs-1] |
104 * | OUT[outs-2] |
105 * | . |
106 * | OUT[0] |
107 * | ArtMethod* | ... (reg == num_total_code_regs == special_temp_value) <<== sp, 16-byte aligned
108 * +===============================+
109 */
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -0800110
Ian Rogers0399dde2012-06-06 17:09:28 -0700111class StackVisitor {
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100112 public:
113 // This enum defines a flag to control whether inlined frames are included
114 // when walking the stack.
115 enum class StackWalkKind {
116 kIncludeInlinedFrames,
117 kSkipInlinedFrames,
118 };
119
Ian Rogers0399dde2012-06-06 17:09:28 -0700120 protected:
Hiroshi Yamauchi02f365f2017-02-03 15:06:00 -0800121 StackVisitor(Thread* thread,
122 Context* context,
123 StackWalkKind walk_kind,
124 bool check_suspended = true);
Ian Rogers0399dde2012-06-06 17:09:28 -0700125
Nicolas Geoffray33856502015-10-20 15:52:58 +0100126 bool GetRegisterIfAccessible(uint32_t reg, VRegKind kind, uint32_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700127 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray33856502015-10-20 15:52:58 +0100128
Ian Rogers0399dde2012-06-06 17:09:28 -0700129 public:
130 virtual ~StackVisitor() {}
Andreas Gampe6db6b4d2017-06-12 16:36:33 -0700131 StackVisitor(const StackVisitor&) = default;
132 StackVisitor(StackVisitor&&) = default;
Ian Rogers0399dde2012-06-06 17:09:28 -0700133
134 // Return 'true' if we should continue to visit more frames, 'false' to stop.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700135 virtual bool VisitFrame() REQUIRES_SHARED(Locks::mutator_lock_) = 0;
Ian Rogers0399dde2012-06-06 17:09:28 -0700136
Andreas Gampe585da952016-12-02 14:52:29 -0800137 enum class CountTransitions {
138 kYes,
139 kNo,
140 };
141
142 template <CountTransitions kCount = CountTransitions::kYes>
Vladimir Markoe7441632017-11-29 13:00:56 +0000143 void WalkStack(bool include_transitions = false) REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700144
Sebastien Hertz26f72862015-09-15 09:52:07 +0200145 Thread* GetThread() const {
146 return thread_;
147 }
148
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700149 ArtMethod* GetMethod() const REQUIRES_SHARED(Locks::mutator_lock_);
Hiroshi Yamauchi92d1a662014-05-15 21:43:59 -0700150
Alex Lightdba61482016-12-21 08:20:29 -0800151 // Sets this stack frame's method pointer. This requires a full lock of the MutatorLock. This
152 // doesn't work with inlined methods.
153 void SetMethod(ArtMethod* method) REQUIRES(Locks::mutator_lock_);
154
Nicolas Geoffrayccc61972015-10-01 14:34:20 +0100155 ArtMethod* GetOuterMethod() const {
156 return *GetCurrentQuickFrame();
157 }
158
Ian Rogers0399dde2012-06-06 17:09:28 -0700159 bool IsShadowFrame() const {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800160 return cur_shadow_frame_ != nullptr;
Ian Rogers0399dde2012-06-06 17:09:28 -0700161 }
162
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700163 uint32_t GetDexPc(bool abort_on_failure = true) const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0c7abda2012-09-19 13:33:42 -0700164
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700165 mirror::Object* GetThisObject() const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800166
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700167 size_t GetNativePcOffset() const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0c7abda2012-09-19 13:33:42 -0700168
Elliott Hughes08fc03a2012-06-26 17:34:00 -0700169 // Returns the height of the stack in the managed stack frames, including transitions.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700170 size_t GetFrameHeight() REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800171 return GetNumFrames() - cur_depth_ - 1;
Ian Rogers0399dde2012-06-06 17:09:28 -0700172 }
173
Elliott Hughes08fc03a2012-06-26 17:34:00 -0700174 // Returns a frame ID for JDWP use, starting from 1.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700175 size_t GetFrameId() REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers0399dde2012-06-06 17:09:28 -0700176 return GetFrameHeight() + 1;
177 }
178
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700179 size_t GetNumFrames() REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers0399dde2012-06-06 17:09:28 -0700180 if (num_frames_ == 0) {
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100181 num_frames_ = ComputeNumFrames(thread_, walk_kind_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700182 }
183 return num_frames_;
184 }
185
Andreas Gampe140da3b2016-11-08 16:01:00 -0800186 size_t GetFrameDepth() const REQUIRES_SHARED(Locks::mutator_lock_) {
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -0700187 return cur_depth_;
188 }
189
Ian Rogers5cf98192014-05-29 21:31:50 -0700190 // Get the method and dex pc immediately after the one that's currently being visited.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700191 bool GetNextMethodAndDexPc(ArtMethod** next_method, uint32_t* next_dex_pc)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700192 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers5cf98192014-05-29 21:31:50 -0700193
Mathieu Chartiere401d142015-04-22 13:56:20 -0700194 bool GetVReg(ArtMethod* m, uint16_t vreg, VRegKind kind, uint32_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700195 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700196
Mathieu Chartiere401d142015-04-22 13:56:20 -0700197 bool GetVRegPair(ArtMethod* m, uint16_t vreg, VRegKind kind_lo, VRegKind kind_hi,
Sebastien Hertzc901dd72014-07-16 11:56:07 +0200198 uint64_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700199 REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertzc901dd72014-07-16 11:56:07 +0200200
Mingyao Yang636b9252015-07-31 16:40:24 -0700201 // Values will be set in debugger shadow frames. Debugger will make sure deoptimization
202 // is triggered to make the values effective.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700203 bool SetVReg(ArtMethod* m, uint16_t vreg, uint32_t new_value, VRegKind kind)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700204 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700205
Mingyao Yang99170c62015-07-06 11:10:37 -0700206 // Values will be set in debugger shadow frames. Debugger will make sure deoptimization
207 // is triggered to make the values effective.
Mingyao Yang636b9252015-07-31 16:40:24 -0700208 bool SetVRegPair(ArtMethod* m,
209 uint16_t vreg,
210 uint64_t new_value,
211 VRegKind kind_lo,
212 VRegKind kind_hi)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700213 REQUIRES_SHARED(Locks::mutator_lock_);
Mingyao Yang99170c62015-07-06 11:10:37 -0700214
Mathieu Chartier815873e2014-02-13 18:02:13 -0800215 uintptr_t* GetGPRAddress(uint32_t reg) const;
Ian Rogers0399dde2012-06-06 17:09:28 -0700216
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700217 uintptr_t GetReturnPc() const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700218
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700219 void SetReturnPc(uintptr_t new_ret_pc) REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700220
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100221 bool IsInInlinedFrame() const {
Nicolas Geoffray57f61612015-05-15 13:20:41 +0100222 return current_inlining_depth_ != 0;
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100223 }
224
David Brazdilefc3f022015-10-28 12:19:06 -0500225 size_t GetCurrentInliningDepth() const {
226 return current_inlining_depth_;
227 }
228
Ian Rogers0399dde2012-06-06 17:09:28 -0700229 uintptr_t GetCurrentQuickFramePc() const {
230 return cur_quick_frame_pc_;
231 }
232
Mathieu Chartiere401d142015-04-22 13:56:20 -0700233 ArtMethod** GetCurrentQuickFrame() const {
Ian Rogers0399dde2012-06-06 17:09:28 -0700234 return cur_quick_frame_;
235 }
236
237 ShadowFrame* GetCurrentShadowFrame() const {
238 return cur_shadow_frame_;
239 }
240
Mathieu Chartiere401d142015-04-22 13:56:20 -0700241 HandleScope* GetCurrentHandleScope(size_t pointer_size) const {
242 ArtMethod** sp = GetCurrentQuickFrame();
243 // Skip ArtMethod*; handle scope comes next;
244 return reinterpret_cast<HandleScope*>(reinterpret_cast<uintptr_t>(sp) + pointer_size);
Elliott Hughes08fc03a2012-06-26 17:34:00 -0700245 }
246
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700247 std::string DescribeLocation() const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers40e3bac2012-11-20 00:09:14 -0800248
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100249 static size_t ComputeNumFrames(Thread* thread, StackWalkKind walk_kind)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700250 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers306057f2012-11-26 12:45:53 -0800251
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700252 static void DescribeStack(Thread* thread) REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers306057f2012-11-26 12:45:53 -0800253
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100254 const OatQuickMethodHeader* GetCurrentOatQuickMethodHeader() const {
255 return cur_oat_quick_method_header_;
256 }
257
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700258 QuickMethodFrameInfo GetCurrentQuickFrameInfo() const REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray6bc43742015-10-12 18:11:10 +0100259
Ian Rogers0399dde2012-06-06 17:09:28 -0700260 private:
Ian Rogers5cf98192014-05-29 21:31:50 -0700261 // Private constructor known in the case that num_frames_ has already been computed.
Hiroshi Yamauchi02f365f2017-02-03 15:06:00 -0800262 StackVisitor(Thread* thread,
263 Context* context,
264 StackWalkKind walk_kind,
265 size_t num_frames,
266 bool check_suspended = true)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700267 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers5cf98192014-05-29 21:31:50 -0700268
Sebastien Hertz96ba8dc2015-01-22 18:57:14 +0100269 bool IsAccessibleRegister(uint32_t reg, bool is_float) const {
270 return is_float ? IsAccessibleFPR(reg) : IsAccessibleGPR(reg);
271 }
272 uintptr_t GetRegister(uint32_t reg, bool is_float) const {
273 DCHECK(IsAccessibleRegister(reg, is_float));
274 return is_float ? GetFPR(reg) : GetGPR(reg);
275 }
Sebastien Hertz96ba8dc2015-01-22 18:57:14 +0100276
277 bool IsAccessibleGPR(uint32_t reg) const;
278 uintptr_t GetGPR(uint32_t reg) const;
Sebastien Hertz96ba8dc2015-01-22 18:57:14 +0100279
280 bool IsAccessibleFPR(uint32_t reg) const;
281 uintptr_t GetFPR(uint32_t reg) const;
Sebastien Hertz0bcb2902014-06-17 15:52:45 +0200282
Mingyao Yang99170c62015-07-06 11:10:37 -0700283 bool GetVRegFromDebuggerShadowFrame(uint16_t vreg, VRegKind kind, uint32_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700284 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700285 bool GetVRegFromOptimizedCode(ArtMethod* m, uint16_t vreg, VRegKind kind,
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100286 uint32_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700287 REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100288
Mingyao Yang99170c62015-07-06 11:10:37 -0700289 bool GetVRegPairFromDebuggerShadowFrame(uint16_t vreg, VRegKind kind_lo, VRegKind kind_hi,
290 uint64_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700291 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700292 bool GetVRegPairFromOptimizedCode(ArtMethod* m, uint16_t vreg,
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100293 VRegKind kind_lo, VRegKind kind_hi,
294 uint64_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700295 REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100296 bool GetRegisterPairIfAccessible(uint32_t reg_lo, uint32_t reg_hi, VRegKind kind_lo,
297 uint64_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700298 REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100299
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700300 void SanityCheckFrame() const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700301
Ian Rogers7a22fa62013-01-23 12:16:16 -0800302 Thread* const thread_;
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100303 const StackWalkKind walk_kind_;
Ian Rogers0399dde2012-06-06 17:09:28 -0700304 ShadowFrame* cur_shadow_frame_;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700305 ArtMethod** cur_quick_frame_;
Ian Rogers0399dde2012-06-06 17:09:28 -0700306 uintptr_t cur_quick_frame_pc_;
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100307 const OatQuickMethodHeader* cur_oat_quick_method_header_;
Ian Rogers0399dde2012-06-06 17:09:28 -0700308 // Lazily computed, number of frames in the stack.
309 size_t num_frames_;
310 // Depth of the frame we're currently at.
311 size_t cur_depth_;
Nicolas Geoffray57f61612015-05-15 13:20:41 +0100312 // Current inlining depth of the method we are currently at.
313 // 0 if there is no inlined frame.
314 size_t current_inlining_depth_;
Brian Carlstrom0cd7ec22013-07-17 23:40:20 -0700315
Ian Rogers0399dde2012-06-06 17:09:28 -0700316 protected:
317 Context* const context_;
Hiroshi Yamauchi02f365f2017-02-03 15:06:00 -0800318 const bool check_suspended_;
Ian Rogers0399dde2012-06-06 17:09:28 -0700319};
320
Elliott Hughes68e76522011-10-05 13:22:16 -0700321} // namespace art
322
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700323#endif // ART_RUNTIME_STACK_H_