blob: 4bc0fc85d42edbf29418c42e2e9618872c28e252 [file] [log] [blame]
Elliott Hughes68e76522011-10-05 13:22:16 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_RUNTIME_STACK_H_
18#define ART_RUNTIME_STACK_H_
Elliott Hughes68e76522011-10-05 13:22:16 -070019
Elliott Hughes68e76522011-10-05 13:22:16 -070020#include <stdint.h>
Ian Rogers40e3bac2012-11-20 00:09:14 -080021#include <string>
Elliott Hughes68e76522011-10-05 13:22:16 -070022
Andreas Gampe7fbc4a52018-11-28 08:26:47 -080023#include "base/locks.h"
Andreas Gampe03ec9302015-08-27 17:41:47 -070024#include "base/macros.h"
Vladimir Marko439d1262019-04-12 14:45:07 +010025#include "obj_ptr.h"
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010026#include "quick/quick_method_frame_info.h"
David Srbecky93bd3612018-07-02 19:30:18 +010027#include "stack_map.h"
Ian Rogerse63db272014-07-15 15:36:11 -070028
Elliott Hughes68e76522011-10-05 13:22:16 -070029namespace art {
30
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080031namespace mirror {
Igor Murashkin2ffb7032017-11-08 13:35:21 -080032class Object;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080033} // namespace mirror
34
Mathieu Chartiere401d142015-04-22 13:56:20 -070035class ArtMethod;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080036class Context;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070037class HandleScope;
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010038class OatQuickMethodHeader;
Nicolas Geoffray57f61612015-05-15 13:20:41 +010039class ShadowFrame;
Elliott Hughes68e76522011-10-05 13:22:16 -070040class Thread;
Vladimir Marko3a21e382016-09-02 12:38:38 +010041union JValue;
Elliott Hughes68e76522011-10-05 13:22:16 -070042
Ian Rogers2bcb4a42012-11-08 10:39:18 -080043// The kind of vreg being accessed in calls to Set/GetVReg.
44enum VRegKind {
45 kReferenceVReg,
46 kIntVReg,
47 kFloatVReg,
48 kLongLoVReg,
49 kLongHiVReg,
50 kDoubleLoVReg,
51 kDoubleHiVReg,
52 kConstant,
53 kImpreciseConstant,
54 kUndefined,
55};
Ian Rogers6a3c1fc2014-10-31 00:33:20 -070056std::ostream& operator<<(std::ostream& os, const VRegKind& rhs);
Ian Rogers2bcb4a42012-11-08 10:39:18 -080057
Mingyao Yang063fc772016-08-02 11:02:54 -070058// Size in bytes of the should_deoptimize flag on stack.
59// We just need 4 bytes for our purpose regardless of the architecture. Frame size
60// calculation will automatically do alignment for the final frame size.
61static constexpr size_t kShouldDeoptimizeFlagSize = 4;
62
Andreas Gampe36a296f2017-06-13 14:11:11 -070063/*
64 * Our current stack layout.
65 * The Dalvik registers come first, followed by the
66 * Method*, followed by other special temporaries if any, followed by
67 * regular compiler temporary. As of now we only have the Method* as
68 * as a special compiler temporary.
69 * A compiler temporary can be thought of as a virtual register that
70 * does not exist in the dex but holds intermediate values to help
71 * optimizations and code generation. A special compiler temporary is
72 * one whose location in frame is well known while non-special ones
73 * do not have a requirement on location in frame as long as code
74 * generator itself knows how to access them.
75 *
76 * TODO: Update this documentation?
77 *
78 * +-------------------------------+
79 * | IN[ins-1] | {Note: resides in caller's frame}
80 * | . |
81 * | IN[0] |
82 * | caller's ArtMethod | ... ArtMethod*
83 * +===============================+ {Note: start of callee's frame}
84 * | core callee-save spill | {variable sized}
85 * +-------------------------------+
86 * | fp callee-save spill |
87 * +-------------------------------+
88 * | filler word | {For compatibility, if V[locals-1] used as wide
89 * +-------------------------------+
90 * | V[locals-1] |
91 * | V[locals-2] |
92 * | . |
93 * | . | ... (reg == 2)
94 * | V[1] | ... (reg == 1)
95 * | V[0] | ... (reg == 0) <---- "locals_start"
96 * +-------------------------------+
97 * | stack alignment padding | {0 to (kStackAlignWords-1) of padding}
98 * +-------------------------------+
99 * | Compiler temp region | ... (reg >= max_num_special_temps)
100 * | . |
101 * | . |
102 * | V[max_num_special_temps + 1] |
103 * | V[max_num_special_temps + 0] |
104 * +-------------------------------+
105 * | OUT[outs-1] |
106 * | OUT[outs-2] |
107 * | . |
108 * | OUT[0] |
109 * | ArtMethod* | ... (reg == num_total_code_regs == special_temp_value) <<== sp, 16-byte aligned
110 * +===============================+
111 */
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -0800112
Ian Rogers0399dde2012-06-06 17:09:28 -0700113class StackVisitor {
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100114 public:
115 // This enum defines a flag to control whether inlined frames are included
116 // when walking the stack.
117 enum class StackWalkKind {
118 kIncludeInlinedFrames,
119 kSkipInlinedFrames,
120 };
121
Ian Rogers0399dde2012-06-06 17:09:28 -0700122 protected:
Hiroshi Yamauchi02f365f2017-02-03 15:06:00 -0800123 StackVisitor(Thread* thread,
124 Context* context,
125 StackWalkKind walk_kind,
126 bool check_suspended = true);
Ian Rogers0399dde2012-06-06 17:09:28 -0700127
Nicolas Geoffray33856502015-10-20 15:52:58 +0100128 bool GetRegisterIfAccessible(uint32_t reg, VRegKind kind, uint32_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700129 REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray33856502015-10-20 15:52:58 +0100130
Ian Rogers0399dde2012-06-06 17:09:28 -0700131 public:
132 virtual ~StackVisitor() {}
Andreas Gampe6db6b4d2017-06-12 16:36:33 -0700133 StackVisitor(const StackVisitor&) = default;
134 StackVisitor(StackVisitor&&) = default;
Ian Rogers0399dde2012-06-06 17:09:28 -0700135
136 // Return 'true' if we should continue to visit more frames, 'false' to stop.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700137 virtual bool VisitFrame() REQUIRES_SHARED(Locks::mutator_lock_) = 0;
Ian Rogers0399dde2012-06-06 17:09:28 -0700138
Andreas Gampe585da952016-12-02 14:52:29 -0800139 enum class CountTransitions {
140 kYes,
141 kNo,
142 };
143
144 template <CountTransitions kCount = CountTransitions::kYes>
Vladimir Marko2196c652017-11-30 16:16:07 +0000145 void WalkStack(bool include_transitions = false) REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700146
Andreas Gampec7d878d2018-11-19 18:42:06 +0000147 // Convenience helper function to walk the stack with a lambda as a visitor.
148 template <CountTransitions kCountTransitions = CountTransitions::kYes,
149 typename T>
150 ALWAYS_INLINE static void WalkStack(const T& fn,
151 Thread* thread,
152 Context* context,
153 StackWalkKind walk_kind,
154 bool check_suspended = true,
155 bool include_transitions = false)
156 REQUIRES_SHARED(Locks::mutator_lock_) {
157 class LambdaStackVisitor : public StackVisitor {
158 public:
159 LambdaStackVisitor(const T& fn,
160 Thread* thread,
161 Context* context,
162 StackWalkKind walk_kind,
163 bool check_suspended = true)
164 : StackVisitor(thread, context, walk_kind, check_suspended), fn_(fn) {}
165
166 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
167 return fn_(this);
168 }
169
170 private:
171 T fn_;
172 };
173 LambdaStackVisitor visitor(fn, thread, context, walk_kind, check_suspended);
174 visitor.template WalkStack<kCountTransitions>(include_transitions);
175 }
176
Sebastien Hertz26f72862015-09-15 09:52:07 +0200177 Thread* GetThread() const {
178 return thread_;
179 }
180
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700181 ArtMethod* GetMethod() const REQUIRES_SHARED(Locks::mutator_lock_);
Hiroshi Yamauchi92d1a662014-05-15 21:43:59 -0700182
Alex Lightdba61482016-12-21 08:20:29 -0800183 // Sets this stack frame's method pointer. This requires a full lock of the MutatorLock. This
184 // doesn't work with inlined methods.
185 void SetMethod(ArtMethod* method) REQUIRES(Locks::mutator_lock_);
186
Nicolas Geoffrayccc61972015-10-01 14:34:20 +0100187 ArtMethod* GetOuterMethod() const {
188 return *GetCurrentQuickFrame();
189 }
190
Ian Rogers0399dde2012-06-06 17:09:28 -0700191 bool IsShadowFrame() const {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800192 return cur_shadow_frame_ != nullptr;
Ian Rogers0399dde2012-06-06 17:09:28 -0700193 }
194
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700195 uint32_t GetDexPc(bool abort_on_failure = true) const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0c7abda2012-09-19 13:33:42 -0700196
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700197 mirror::Object* GetThisObject() const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers62d6c772013-02-27 08:32:07 -0800198
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700199 size_t GetNativePcOffset() const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0c7abda2012-09-19 13:33:42 -0700200
Elliott Hughes08fc03a2012-06-26 17:34:00 -0700201 // Returns the height of the stack in the managed stack frames, including transitions.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700202 size_t GetFrameHeight() REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800203 return GetNumFrames() - cur_depth_ - 1;
Ian Rogers0399dde2012-06-06 17:09:28 -0700204 }
205
Elliott Hughes08fc03a2012-06-26 17:34:00 -0700206 // Returns a frame ID for JDWP use, starting from 1.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700207 size_t GetFrameId() REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers0399dde2012-06-06 17:09:28 -0700208 return GetFrameHeight() + 1;
209 }
210
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700211 size_t GetNumFrames() REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers0399dde2012-06-06 17:09:28 -0700212 if (num_frames_ == 0) {
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100213 num_frames_ = ComputeNumFrames(thread_, walk_kind_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700214 }
215 return num_frames_;
216 }
217
Andreas Gampe140da3b2016-11-08 16:01:00 -0800218 size_t GetFrameDepth() const REQUIRES_SHARED(Locks::mutator_lock_) {
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -0700219 return cur_depth_;
220 }
221
Ian Rogers5cf98192014-05-29 21:31:50 -0700222 // Get the method and dex pc immediately after the one that's currently being visited.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700223 bool GetNextMethodAndDexPc(ArtMethod** next_method, uint32_t* next_dex_pc)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700224 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers5cf98192014-05-29 21:31:50 -0700225
Mathieu Chartiere401d142015-04-22 13:56:20 -0700226 bool GetVReg(ArtMethod* m, uint16_t vreg, VRegKind kind, uint32_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700227 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700228
Mathieu Chartiere401d142015-04-22 13:56:20 -0700229 bool GetVRegPair(ArtMethod* m, uint16_t vreg, VRegKind kind_lo, VRegKind kind_hi,
Sebastien Hertzc901dd72014-07-16 11:56:07 +0200230 uint64_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700231 REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertzc901dd72014-07-16 11:56:07 +0200232
Mingyao Yang636b9252015-07-31 16:40:24 -0700233 // Values will be set in debugger shadow frames. Debugger will make sure deoptimization
234 // is triggered to make the values effective.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700235 bool SetVReg(ArtMethod* m, uint16_t vreg, uint32_t new_value, VRegKind kind)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700236 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700237
Mingyao Yang99170c62015-07-06 11:10:37 -0700238 // Values will be set in debugger shadow frames. Debugger will make sure deoptimization
239 // is triggered to make the values effective.
Vladimir Marko439d1262019-04-12 14:45:07 +0100240 bool SetVRegReference(ArtMethod* m, uint16_t vreg, ObjPtr<mirror::Object> new_value)
241 REQUIRES_SHARED(Locks::mutator_lock_);
242
243 // Values will be set in debugger shadow frames. Debugger will make sure deoptimization
244 // is triggered to make the values effective.
Mingyao Yang636b9252015-07-31 16:40:24 -0700245 bool SetVRegPair(ArtMethod* m,
246 uint16_t vreg,
247 uint64_t new_value,
248 VRegKind kind_lo,
249 VRegKind kind_hi)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700250 REQUIRES_SHARED(Locks::mutator_lock_);
Mingyao Yang99170c62015-07-06 11:10:37 -0700251
Mathieu Chartier815873e2014-02-13 18:02:13 -0800252 uintptr_t* GetGPRAddress(uint32_t reg) const;
Ian Rogers0399dde2012-06-06 17:09:28 -0700253
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700254 uintptr_t GetReturnPc() const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700255
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700256 void SetReturnPc(uintptr_t new_ret_pc) REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700257
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100258 bool IsInInlinedFrame() const {
David Srbecky93bd3612018-07-02 19:30:18 +0100259 return !current_inline_frames_.empty();
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100260 }
261
David Srbecky93bd3612018-07-02 19:30:18 +0100262 InlineInfo GetCurrentInlinedFrame() const {
263 return current_inline_frames_.back();
David Brazdilefc3f022015-10-28 12:19:06 -0500264 }
265
Ian Rogers0399dde2012-06-06 17:09:28 -0700266 uintptr_t GetCurrentQuickFramePc() const {
267 return cur_quick_frame_pc_;
268 }
269
Mathieu Chartiere401d142015-04-22 13:56:20 -0700270 ArtMethod** GetCurrentQuickFrame() const {
Ian Rogers0399dde2012-06-06 17:09:28 -0700271 return cur_quick_frame_;
272 }
273
274 ShadowFrame* GetCurrentShadowFrame() const {
275 return cur_shadow_frame_;
276 }
277
Mathieu Chartiere401d142015-04-22 13:56:20 -0700278 HandleScope* GetCurrentHandleScope(size_t pointer_size) const {
279 ArtMethod** sp = GetCurrentQuickFrame();
280 // Skip ArtMethod*; handle scope comes next;
281 return reinterpret_cast<HandleScope*>(reinterpret_cast<uintptr_t>(sp) + pointer_size);
Elliott Hughes08fc03a2012-06-26 17:34:00 -0700282 }
283
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700284 std::string DescribeLocation() const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers40e3bac2012-11-20 00:09:14 -0800285
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100286 static size_t ComputeNumFrames(Thread* thread, StackWalkKind walk_kind)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700287 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers306057f2012-11-26 12:45:53 -0800288
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700289 static void DescribeStack(Thread* thread) REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers306057f2012-11-26 12:45:53 -0800290
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100291 const OatQuickMethodHeader* GetCurrentOatQuickMethodHeader() const {
292 return cur_oat_quick_method_header_;
293 }
294
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700295 QuickMethodFrameInfo GetCurrentQuickFrameInfo() const REQUIRES_SHARED(Locks::mutator_lock_);
Nicolas Geoffray6bc43742015-10-12 18:11:10 +0100296
Ian Rogers0399dde2012-06-06 17:09:28 -0700297 private:
Ian Rogers5cf98192014-05-29 21:31:50 -0700298 // Private constructor known in the case that num_frames_ has already been computed.
Hiroshi Yamauchi02f365f2017-02-03 15:06:00 -0800299 StackVisitor(Thread* thread,
300 Context* context,
301 StackWalkKind walk_kind,
302 size_t num_frames,
303 bool check_suspended = true)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700304 REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers5cf98192014-05-29 21:31:50 -0700305
Sebastien Hertz96ba8dc2015-01-22 18:57:14 +0100306 bool IsAccessibleRegister(uint32_t reg, bool is_float) const {
307 return is_float ? IsAccessibleFPR(reg) : IsAccessibleGPR(reg);
308 }
309 uintptr_t GetRegister(uint32_t reg, bool is_float) const {
310 DCHECK(IsAccessibleRegister(reg, is_float));
311 return is_float ? GetFPR(reg) : GetGPR(reg);
312 }
Sebastien Hertz96ba8dc2015-01-22 18:57:14 +0100313
314 bool IsAccessibleGPR(uint32_t reg) const;
315 uintptr_t GetGPR(uint32_t reg) const;
Sebastien Hertz96ba8dc2015-01-22 18:57:14 +0100316
317 bool IsAccessibleFPR(uint32_t reg) const;
318 uintptr_t GetFPR(uint32_t reg) const;
Sebastien Hertz0bcb2902014-06-17 15:52:45 +0200319
Mingyao Yang99170c62015-07-06 11:10:37 -0700320 bool GetVRegFromDebuggerShadowFrame(uint16_t vreg, VRegKind kind, uint32_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700321 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700322 bool GetVRegFromOptimizedCode(ArtMethod* m, uint16_t vreg, VRegKind kind,
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100323 uint32_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700324 REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100325
Mingyao Yang99170c62015-07-06 11:10:37 -0700326 bool GetVRegPairFromDebuggerShadowFrame(uint16_t vreg, VRegKind kind_lo, VRegKind kind_hi,
327 uint64_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700328 REQUIRES_SHARED(Locks::mutator_lock_);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700329 bool GetVRegPairFromOptimizedCode(ArtMethod* m, uint16_t vreg,
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100330 VRegKind kind_lo, VRegKind kind_hi,
331 uint64_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700332 REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100333 bool GetRegisterPairIfAccessible(uint32_t reg_lo, uint32_t reg_hi, VRegKind kind_lo,
334 uint64_t* val) const
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700335 REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100336
Vladimir Marko439d1262019-04-12 14:45:07 +0100337 ShadowFrame* PrepareSetVReg(ArtMethod* m, uint16_t vreg, bool wide)
338 REQUIRES_SHARED(Locks::mutator_lock_);
339
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700340 void SanityCheckFrame() const REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700341
Ian Rogers7a22fa62013-01-23 12:16:16 -0800342 Thread* const thread_;
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100343 const StackWalkKind walk_kind_;
Ian Rogers0399dde2012-06-06 17:09:28 -0700344 ShadowFrame* cur_shadow_frame_;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700345 ArtMethod** cur_quick_frame_;
Ian Rogers0399dde2012-06-06 17:09:28 -0700346 uintptr_t cur_quick_frame_pc_;
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100347 const OatQuickMethodHeader* cur_oat_quick_method_header_;
Ian Rogers0399dde2012-06-06 17:09:28 -0700348 // Lazily computed, number of frames in the stack.
349 size_t num_frames_;
350 // Depth of the frame we're currently at.
351 size_t cur_depth_;
David Srbecky93bd3612018-07-02 19:30:18 +0100352 // Current inlined frames of the method we are currently at.
353 // We keep poping frames from the end as we visit the frames.
David Srbecky2259f1c2019-01-16 23:18:30 +0000354 CodeInfo current_code_info_;
David Srbecky93bd3612018-07-02 19:30:18 +0100355 BitTableRange<InlineInfo> current_inline_frames_;
Brian Carlstrom0cd7ec22013-07-17 23:40:20 -0700356
Ian Rogers0399dde2012-06-06 17:09:28 -0700357 protected:
358 Context* const context_;
Hiroshi Yamauchi02f365f2017-02-03 15:06:00 -0800359 const bool check_suspended_;
Ian Rogers0399dde2012-06-06 17:09:28 -0700360};
361
Elliott Hughes68e76522011-10-05 13:22:16 -0700362} // namespace art
363
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700364#endif // ART_RUNTIME_STACK_H_