blob: 4ef94877249c7125bf554e98d2e7fca695103f8a [file] [log] [blame]
Elliott Hughes68e76522011-10-05 13:22:16 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_RUNTIME_STACK_H_
18#define ART_RUNTIME_STACK_H_
Elliott Hughes68e76522011-10-05 13:22:16 -070019
Elliott Hughes68e76522011-10-05 13:22:16 -070020#include <stdint.h>
Ian Rogers40e3bac2012-11-20 00:09:14 -080021#include <string>
Elliott Hughes68e76522011-10-05 13:22:16 -070022
Andreas Gampe03ec9302015-08-27 17:41:47 -070023#include "base/macros.h"
24#include "base/mutex.h"
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010025#include "quick/quick_method_frame_info.h"
Ian Rogerse63db272014-07-15 15:36:11 -070026
Elliott Hughes68e76522011-10-05 13:22:16 -070027namespace art {
28
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080029namespace mirror {
Brian Carlstromea46f952013-07-30 01:26:50 -070030 class Object;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080031} // namespace mirror
32
Mathieu Chartiere401d142015-04-22 13:56:20 -070033class ArtMethod;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080034class Context;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070035class HandleScope;
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010036class OatQuickMethodHeader;
Nicolas Geoffray57f61612015-05-15 13:20:41 +010037class ShadowFrame;
Elliott Hughes68e76522011-10-05 13:22:16 -070038class Thread;
Vladimir Marko3a21e382016-09-02 12:38:38 +010039union JValue;
Elliott Hughes68e76522011-10-05 13:22:16 -070040
Ian Rogers2bcb4a42012-11-08 10:39:18 -080041// The kind of vreg being accessed in calls to Set/GetVReg.
42enum VRegKind {
43 kReferenceVReg,
44 kIntVReg,
45 kFloatVReg,
46 kLongLoVReg,
47 kLongHiVReg,
48 kDoubleLoVReg,
49 kDoubleHiVReg,
50 kConstant,
51 kImpreciseConstant,
52 kUndefined,
53};
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070054std::ostream& operator<<(std::ostream& os, const VRegKind& rhs);
Ian Rogers2bcb4a42012-11-08 10:39:18 -080055
Mingyao Yang063fc772016-08-02 11:02:54 -070056// Size in bytes of the should_deoptimize flag on stack.
57// We just need 4 bytes for our purpose regardless of the architecture. Frame size
58// calculation will automatically do alignment for the final frame size.
59static constexpr size_t kShouldDeoptimizeFlagSize = 4;
60
Andreas Gampe36a296f2017-06-13 14:11:11 -070061/*
62 * Our current stack layout.
63 * The Dalvik registers come first, followed by the
64 * Method*, followed by other special temporaries if any, followed by
65 * regular compiler temporary. As of now we only have the Method* as
66 * as a special compiler temporary.
67 * A compiler temporary can be thought of as a virtual register that
68 * does not exist in the dex but holds intermediate values to help
69 * optimizations and code generation. A special compiler temporary is
70 * one whose location in frame is well known while non-special ones
71 * do not have a requirement on location in frame as long as code
72 * generator itself knows how to access them.
73 *
74 * TODO: Update this documentation?
75 *
76 * +-------------------------------+
77 * | IN[ins-1] | {Note: resides in caller's frame}
78 * | . |
79 * | IN[0] |
80 * | caller's ArtMethod | ... ArtMethod*
81 * +===============================+ {Note: start of callee's frame}
82 * | core callee-save spill | {variable sized}
83 * +-------------------------------+
84 * | fp callee-save spill |
85 * +-------------------------------+
86 * | filler word | {For compatibility, if V[locals-1] used as wide
87 * +-------------------------------+
88 * | V[locals-1] |
89 * | V[locals-2] |
90 * | . |
91 * | . | ... (reg == 2)
92 * | V[1] | ... (reg == 1)
93 * | V[0] | ... (reg == 0) <---- "locals_start"
94 * +-------------------------------+
95 * | stack alignment padding | {0 to (kStackAlignWords-1) of padding}
96 * +-------------------------------+
97 * | Compiler temp region | ... (reg >= max_num_special_temps)
98 * | . |
99 * | . |
100 * | V[max_num_special_temps + 1] |
101 * | V[max_num_special_temps + 0] |
102 * +-------------------------------+
103 * | OUT[outs-1] |
104 * | OUT[outs-2] |
105 * | . |
106 * | OUT[0] |
107 * | ArtMethod* | ... (reg == num_total_code_regs == special_temp_value) <<== sp, 16-byte aligned
108 * +===============================+
109 */
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -0800110
Ian Rogers0399dde2012-06-06 17:09:28 -0700111class StackVisitor {
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100112 public:
113 // This enum defines a flag to control whether inlined frames are included
114 // when walking the stack.
115 enum class StackWalkKind {
116 kIncludeInlinedFrames,
117 kSkipInlinedFrames,
118 };
119
Ian Rogers0399dde2012-06-06 17:09:28 -0700120 protected:
Hiroshi Yamauchi02f365f2017-02-03 15:06:00 -0800121 StackVisitor(Thread* thread,
122 Context* context,
123 StackWalkKind walk_kind,
124 bool check_suspended = true);
Ian Rogers0399dde2012-06-06 17:09:28 -0700125
Nicolas Geoffray33856502015-10-20 15:52:58 +0100126 bool GetRegisterIfAccessible(uint32_t reg, VRegKind kind, uint32_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700127 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray33856502015-10-20 15:52:58 +0100128
Ian Rogers0399dde2012-06-06 17:09:28 -0700129 public:
130 virtual ~StackVisitor() {}
Andreas Gampe6db6b4d2017-06-12 16:36:33 -0700131 StackVisitor(const StackVisitor&) = default;
132 StackVisitor(StackVisitor&&) = default;
Ian Rogers0399dde2012-06-06 17:09:28 -0700133
134 // Return 'true' if we should continue to visit more frames, 'false' to stop.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700135 virtual bool VisitFrame() REQUIRES_SHARED(Locks::mutator_lock_) = 0;
Ian Rogers0399dde2012-06-06 17:09:28 -0700136
Andreas Gampe585da952016-12-02 14:52:29 -0800137 enum class CountTransitions {
138 kYes,
139 kNo,
140 };
141
142 template <CountTransitions kCount = CountTransitions::kYes>
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700143 void WalkStack(bool include_transitions = false)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700144 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700145
Sebastien Hertz26f72862015-09-15 09:52:07 +0200146 Thread* GetThread() const {
147 return thread_;
148 }
149
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700150 ArtMethod* GetMethod() const REQUIRES_SHARED(Locks::mutator_lock_);
Hiroshi Yamauchi92d1a662014-05-15 21:43:59 -0700151
Alex Lightdba61482016-12-21 08:20:29 -0800152 // Sets this stack frame's method pointer. This requires a full lock of the MutatorLock. This
153 // doesn't work with inlined methods.
154 void SetMethod(ArtMethod* method) REQUIRES(Locks::mutator_lock_);
155
Nicolas Geoffrayccc61972015-10-01 14:34:20 +0100156 ArtMethod* GetOuterMethod() const {
157 return *GetCurrentQuickFrame();
158 }
159
Ian Rogers0399dde2012-06-06 17:09:28 -0700160 bool IsShadowFrame() const {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800161 return cur_shadow_frame_ != nullptr;
Ian Rogers0399dde2012-06-06 17:09:28 -0700162 }
163
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700164 uint32_t GetDexPc(bool abort_on_failure = true) const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0c7abda2012-09-19 13:33:42 -0700165
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700166 mirror::Object* GetThisObject() const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800167
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700168 size_t GetNativePcOffset() const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0c7abda2012-09-19 13:33:42 -0700169
Elliott Hughes08fc03a2012-06-26 17:34:00 -0700170 // Returns the height of the stack in the managed stack frames, including transitions.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700171 size_t GetFrameHeight() REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800172 return GetNumFrames() - cur_depth_ - 1;
Ian Rogers0399dde2012-06-06 17:09:28 -0700173 }
174
Elliott Hughes08fc03a2012-06-26 17:34:00 -0700175 // Returns a frame ID for JDWP use, starting from 1.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700176 size_t GetFrameId() REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers0399dde2012-06-06 17:09:28 -0700177 return GetFrameHeight() + 1;
178 }
179
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700180 size_t GetNumFrames() REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers0399dde2012-06-06 17:09:28 -0700181 if (num_frames_ == 0) {
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100182 num_frames_ = ComputeNumFrames(thread_, walk_kind_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700183 }
184 return num_frames_;
185 }
186
Andreas Gampe140da3b2016-11-08 16:01:00 -0800187 size_t GetFrameDepth() const REQUIRES_SHARED(Locks::mutator_lock_) {
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -0700188 return cur_depth_;
189 }
190
Ian Rogers5cf98192014-05-29 21:31:50 -0700191 // Get the method and dex pc immediately after the one that's currently being visited.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700192 bool GetNextMethodAndDexPc(ArtMethod** next_method, uint32_t* next_dex_pc)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700193 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers5cf98192014-05-29 21:31:50 -0700194
Mathieu Chartiere401d142015-04-22 13:56:20 -0700195 bool GetVReg(ArtMethod* m, uint16_t vreg, VRegKind kind, uint32_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700196 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700197
Mathieu Chartiere401d142015-04-22 13:56:20 -0700198 bool GetVRegPair(ArtMethod* m, uint16_t vreg, VRegKind kind_lo, VRegKind kind_hi,
Sebastien Hertzc901dd72014-07-16 11:56:07 +0200199 uint64_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700200 REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertzc901dd72014-07-16 11:56:07 +0200201
Mingyao Yang636b9252015-07-31 16:40:24 -0700202 // Values will be set in debugger shadow frames. Debugger will make sure deoptimization
203 // is triggered to make the values effective.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700204 bool SetVReg(ArtMethod* m, uint16_t vreg, uint32_t new_value, VRegKind kind)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700205 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700206
Mingyao Yang99170c62015-07-06 11:10:37 -0700207 // Values will be set in debugger shadow frames. Debugger will make sure deoptimization
208 // is triggered to make the values effective.
Mingyao Yang636b9252015-07-31 16:40:24 -0700209 bool SetVRegPair(ArtMethod* m,
210 uint16_t vreg,
211 uint64_t new_value,
212 VRegKind kind_lo,
213 VRegKind kind_hi)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700214 REQUIRES_SHARED(Locks::mutator_lock_);
Mingyao Yang99170c62015-07-06 11:10:37 -0700215
Mathieu Chartier815873e2014-02-13 18:02:13 -0800216 uintptr_t* GetGPRAddress(uint32_t reg) const;
Ian Rogers0399dde2012-06-06 17:09:28 -0700217
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700218 uintptr_t GetReturnPc() const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700219
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700220 void SetReturnPc(uintptr_t new_ret_pc) REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700221
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100222 bool IsInInlinedFrame() const {
Nicolas Geoffray57f61612015-05-15 13:20:41 +0100223 return current_inlining_depth_ != 0;
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100224 }
225
David Brazdilefc3f022015-10-28 12:19:06 -0500226 size_t GetCurrentInliningDepth() const {
227 return current_inlining_depth_;
228 }
229
Ian Rogers0399dde2012-06-06 17:09:28 -0700230 uintptr_t GetCurrentQuickFramePc() const {
231 return cur_quick_frame_pc_;
232 }
233
Mathieu Chartiere401d142015-04-22 13:56:20 -0700234 ArtMethod** GetCurrentQuickFrame() const {
Ian Rogers0399dde2012-06-06 17:09:28 -0700235 return cur_quick_frame_;
236 }
237
238 ShadowFrame* GetCurrentShadowFrame() const {
239 return cur_shadow_frame_;
240 }
241
Mathieu Chartiere401d142015-04-22 13:56:20 -0700242 HandleScope* GetCurrentHandleScope(size_t pointer_size) const {
243 ArtMethod** sp = GetCurrentQuickFrame();
244 // Skip ArtMethod*; handle scope comes next;
245 return reinterpret_cast<HandleScope*>(reinterpret_cast<uintptr_t>(sp) + pointer_size);
Elliott Hughes08fc03a2012-06-26 17:34:00 -0700246 }
247
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700248 std::string DescribeLocation() const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers40e3bac2012-11-20 00:09:14 -0800249
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100250 static size_t ComputeNumFrames(Thread* thread, StackWalkKind walk_kind)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700251 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers306057f2012-11-26 12:45:53 -0800252
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700253 static void DescribeStack(Thread* thread) REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers306057f2012-11-26 12:45:53 -0800254
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100255 const OatQuickMethodHeader* GetCurrentOatQuickMethodHeader() const {
256 return cur_oat_quick_method_header_;
257 }
258
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700259 QuickMethodFrameInfo GetCurrentQuickFrameInfo() const REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray6bc43742015-10-12 18:11:10 +0100260
Ian Rogers0399dde2012-06-06 17:09:28 -0700261 private:
Ian Rogers5cf98192014-05-29 21:31:50 -0700262 // Private constructor known in the case that num_frames_ has already been computed.
Hiroshi Yamauchi02f365f2017-02-03 15:06:00 -0800263 StackVisitor(Thread* thread,
264 Context* context,
265 StackWalkKind walk_kind,
266 size_t num_frames,
267 bool check_suspended = true)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700268 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers5cf98192014-05-29 21:31:50 -0700269
Sebastien Hertz96ba8dc2015-01-22 18:57:14 +0100270 bool IsAccessibleRegister(uint32_t reg, bool is_float) const {
271 return is_float ? IsAccessibleFPR(reg) : IsAccessibleGPR(reg);
272 }
273 uintptr_t GetRegister(uint32_t reg, bool is_float) const {
274 DCHECK(IsAccessibleRegister(reg, is_float));
275 return is_float ? GetFPR(reg) : GetGPR(reg);
276 }
Sebastien Hertz96ba8dc2015-01-22 18:57:14 +0100277
278 bool IsAccessibleGPR(uint32_t reg) const;
279 uintptr_t GetGPR(uint32_t reg) const;
Sebastien Hertz96ba8dc2015-01-22 18:57:14 +0100280
281 bool IsAccessibleFPR(uint32_t reg) const;
282 uintptr_t GetFPR(uint32_t reg) const;
Sebastien Hertz0bcb2902014-06-17 15:52:45 +0200283
Mingyao Yang99170c62015-07-06 11:10:37 -0700284 bool GetVRegFromDebuggerShadowFrame(uint16_t vreg, VRegKind kind, uint32_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700285 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700286 bool GetVRegFromOptimizedCode(ArtMethod* m, uint16_t vreg, VRegKind kind,
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100287 uint32_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700288 REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100289
Mingyao Yang99170c62015-07-06 11:10:37 -0700290 bool GetVRegPairFromDebuggerShadowFrame(uint16_t vreg, VRegKind kind_lo, VRegKind kind_hi,
291 uint64_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700292 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700293 bool GetVRegPairFromOptimizedCode(ArtMethod* m, uint16_t vreg,
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100294 VRegKind kind_lo, VRegKind kind_hi,
295 uint64_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700296 REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100297 bool GetRegisterPairIfAccessible(uint32_t reg_lo, uint32_t reg_hi, VRegKind kind_lo,
298 uint64_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700299 REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100300
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700301 void SanityCheckFrame() const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700302
Ian Rogers7a22fa62013-01-23 12:16:16 -0800303 Thread* const thread_;
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100304 const StackWalkKind walk_kind_;
Ian Rogers0399dde2012-06-06 17:09:28 -0700305 ShadowFrame* cur_shadow_frame_;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700306 ArtMethod** cur_quick_frame_;
Ian Rogers0399dde2012-06-06 17:09:28 -0700307 uintptr_t cur_quick_frame_pc_;
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100308 const OatQuickMethodHeader* cur_oat_quick_method_header_;
Ian Rogers0399dde2012-06-06 17:09:28 -0700309 // Lazily computed, number of frames in the stack.
310 size_t num_frames_;
311 // Depth of the frame we're currently at.
312 size_t cur_depth_;
Nicolas Geoffray57f61612015-05-15 13:20:41 +0100313 // Current inlining depth of the method we are currently at.
314 // 0 if there is no inlined frame.
315 size_t current_inlining_depth_;
Brian Carlstrom0cd7ec22013-07-17 23:40:20 -0700316
Ian Rogers0399dde2012-06-06 17:09:28 -0700317 protected:
318 Context* const context_;
Hiroshi Yamauchi02f365f2017-02-03 15:06:00 -0800319 const bool check_suspended_;
Ian Rogers0399dde2012-06-06 17:09:28 -0700320};
321
Elliott Hughes68e76522011-10-05 13:22:16 -0700322} // namespace art
323
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700324#endif // ART_RUNTIME_STACK_H_