blob: 73a823aac7e0c62bca32f0f81f3922aa9b20172c [file] [log] [blame]
Elliott Hughes68e76522011-10-05 13:22:16 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_RUNTIME_STACK_H_
18#define ART_RUNTIME_STACK_H_
Elliott Hughes68e76522011-10-05 13:22:16 -070019
Ian Rogers6d4d9fc2011-11-30 16:24:48 -080020#include "dex_file.h"
jeffhao725a9572012-11-13 18:20:12 -080021#include "instrumentation.h"
Nicolas Geoffray42fcd982014-04-22 11:03:52 +000022#include "arch/context.h"
Ian Rogersef7d42f2014-01-06 12:55:46 -080023#include "base/casts.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080024#include "base/macros.h"
Nicolas Geoffray42fcd982014-04-22 11:03:52 +000025#include "instruction_set.h"
Mathieu Chartier4e305412014-02-19 10:54:44 -080026#include "mirror/object.h"
Ian Rogersef7d42f2014-01-06 12:55:46 -080027#include "mirror/object_reference.h"
Vladimir Marko81949632014-05-02 11:53:22 +010028#include "utils.h"
Mathieu Chartier4e305412014-02-19 10:54:44 -080029#include "verify_object.h"
Elliott Hughes68e76522011-10-05 13:22:16 -070030
31#include <stdint.h>
Ian Rogers40e3bac2012-11-20 00:09:14 -080032#include <string>
Elliott Hughes68e76522011-10-05 13:22:16 -070033
34namespace art {
35
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080036namespace mirror {
Brian Carlstromea46f952013-07-30 01:26:50 -070037 class ArtMethod;
38 class Object;
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080039} // namespace mirror
40
41class Context;
Ian Rogers0399dde2012-06-06 17:09:28 -070042class ShadowFrame;
Elliott Hughes08fc03a2012-06-26 17:34:00 -070043class StackIndirectReferenceTable;
Ian Rogers00f7d0e2012-07-19 15:28:27 -070044class ScopedObjectAccess;
Elliott Hughes68e76522011-10-05 13:22:16 -070045class Thread;
46
Ian Rogers2bcb4a42012-11-08 10:39:18 -080047// The kind of vreg being accessed in calls to Set/GetVReg.
48enum VRegKind {
49 kReferenceVReg,
50 kIntVReg,
51 kFloatVReg,
52 kLongLoVReg,
53 kLongHiVReg,
54 kDoubleLoVReg,
55 kDoubleHiVReg,
56 kConstant,
57 kImpreciseConstant,
58 kUndefined,
59};
60
Razvan A Lupusoruda7a69b2014-01-08 15:09:50 -080061/**
62 * @brief Represents the virtual register numbers that denote special meaning.
63 * @details This is used to make some virtual register numbers to have specific
64 * semantic meaning. This is done so that the compiler can treat all virtual
65 * registers the same way and only special case when needed. For example,
66 * calculating SSA does not care whether a virtual register is a normal one or
67 * a compiler temporary, so it can deal with them in a consistent manner. But,
68 * for example if backend cares about temporaries because it has custom spill
69 * location, then it can special case them only then.
70 */
71enum VRegBaseRegNum : int {
72 /**
73 * @brief Virtual registers originating from dex have number >= 0.
74 */
75 kVRegBaseReg = 0,
76
77 /**
78 * @brief Invalid virtual register number.
79 */
80 kVRegInvalid = -1,
81
82 /**
83 * @brief Used to denote the base register for compiler temporaries.
84 * @details Compiler temporaries are virtual registers not originating
85 * from dex but that are created by compiler. All virtual register numbers
86 * that are <= kVRegTempBaseReg are categorized as compiler temporaries.
87 */
88 kVRegTempBaseReg = -2,
89
90 /**
91 * @brief Base register of temporary that holds the method pointer.
92 * @details This is a special compiler temporary because it has a specific
93 * location on stack.
94 */
95 kVRegMethodPtrBaseReg = kVRegTempBaseReg,
96
97 /**
98 * @brief Base register of non-special compiler temporary.
99 * @details A non-special compiler temporary is one whose spill location
100 * is flexible.
101 */
102 kVRegNonSpecialTempBaseReg = -3,
103};
104
Dave Allisonf9439142014-03-27 15:10:22 -0700105// Special object used to mark the gap in the stack placed when a stack
106// overflow fault occurs during implicit stack checking. This is not
107// a real object - it is used simply as a valid address to which a
108// mirror::ArtMethod* can be compared during a stack walk. It is inserted
109// into the stack during the stack overflow signal handling to mark the gap
110// in which the memory is protected against read and write.
111extern void* stack_overflow_gap_marker;
112
Ian Rogersef7d42f2014-01-06 12:55:46 -0800113// A reference from the shadow stack to a MirrorType object within the Java heap.
114template<class MirrorType>
115class MANAGED StackReference : public mirror::ObjectReference<false, MirrorType> {
116 public:
117 StackReference<MirrorType>() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
118 : mirror::ObjectReference<false, MirrorType>(nullptr) {}
119
120 static StackReference<MirrorType> FromMirrorPtr(MirrorType* p)
121 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
122 return StackReference<MirrorType>(p);
123 }
124
125 private:
126 StackReference<MirrorType>(MirrorType* p) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
127 : mirror::ObjectReference<false, MirrorType>(p) {}
128};
129
Mathieu Chartier67022432012-11-29 18:04:50 -0800130// ShadowFrame has 3 possible layouts:
131// - portable - a unified array of VRegs and references. Precise references need GC maps.
132// - interpreter - separate VRegs and reference arrays. References are in the reference array.
133// - JNI - just VRegs, but where every VReg holds a reference.
Ian Rogers0399dde2012-06-06 17:09:28 -0700134class ShadowFrame {
Elliott Hughes68e76522011-10-05 13:22:16 -0700135 public:
Ian Rogersef7d42f2014-01-06 12:55:46 -0800136 // Compute size of ShadowFrame in bytes assuming it has a reference array.
Jeff Hao66135192013-05-14 11:02:41 -0700137 static size_t ComputeSize(uint32_t num_vregs) {
138 return sizeof(ShadowFrame) + (sizeof(uint32_t) * num_vregs) +
Ian Rogersef7d42f2014-01-06 12:55:46 -0800139 (sizeof(StackReference<mirror::Object>) * num_vregs);
Jeff Hao66135192013-05-14 11:02:41 -0700140 }
141
142 // Create ShadowFrame in heap for deoptimization.
TDYa127ce4cc0d2012-11-18 16:59:53 -0800143 static ShadowFrame* Create(uint32_t num_vregs, ShadowFrame* link,
Brian Carlstromea46f952013-07-30 01:26:50 -0700144 mirror::ArtMethod* method, uint32_t dex_pc) {
Jeff Hao66135192013-05-14 11:02:41 -0700145 uint8_t* memory = new uint8_t[ComputeSize(num_vregs)];
Sebastien Hertzc61124b2013-09-10 11:44:19 +0200146 return Create(num_vregs, link, method, dex_pc, memory);
Jeff Hao66135192013-05-14 11:02:41 -0700147 }
148
149 // Create ShadowFrame for interpreter using provided memory.
150 static ShadowFrame* Create(uint32_t num_vregs, ShadowFrame* link,
Brian Carlstromea46f952013-07-30 01:26:50 -0700151 mirror::ArtMethod* method, uint32_t dex_pc, void* memory) {
TDYa127ce4cc0d2012-11-18 16:59:53 -0800152 ShadowFrame* sf = new (memory) ShadowFrame(num_vregs, link, method, dex_pc, true);
153 return sf;
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700154 }
155 ~ShadowFrame() {}
156
TDYa127ce4cc0d2012-11-18 16:59:53 -0800157 bool HasReferenceArray() const {
Ian Rogers8a01a3a2013-05-06 13:25:44 -0700158#if defined(ART_USE_PORTABLE_COMPILER)
TDYa127ce4cc0d2012-11-18 16:59:53 -0800159 return (number_of_vregs_ & kHasReferenceArray) != 0;
Ian Rogers8a01a3a2013-05-06 13:25:44 -0700160#else
161 return true;
162#endif
Ian Rogers0399dde2012-06-06 17:09:28 -0700163 }
Elliott Hughes68e76522011-10-05 13:22:16 -0700164
TDYa127ce4cc0d2012-11-18 16:59:53 -0800165 uint32_t NumberOfVRegs() const {
Ian Rogers8a01a3a2013-05-06 13:25:44 -0700166#if defined(ART_USE_PORTABLE_COMPILER)
TDYa127ce4cc0d2012-11-18 16:59:53 -0800167 return number_of_vregs_ & ~kHasReferenceArray;
Ian Rogers8a01a3a2013-05-06 13:25:44 -0700168#else
169 return number_of_vregs_;
170#endif
Ian Rogers0399dde2012-06-06 17:09:28 -0700171 }
172
TDYa127ce4cc0d2012-11-18 16:59:53 -0800173 void SetNumberOfVRegs(uint32_t number_of_vregs) {
Ian Rogers8a01a3a2013-05-06 13:25:44 -0700174#if defined(ART_USE_PORTABLE_COMPILER)
TDYa127ce4cc0d2012-11-18 16:59:53 -0800175 number_of_vregs_ = number_of_vregs | (number_of_vregs_ & kHasReferenceArray);
Ian Rogers8a01a3a2013-05-06 13:25:44 -0700176#else
177 UNUSED(number_of_vregs);
178 UNIMPLEMENTED(FATAL) << "Should only be called when portable is enabled";
179#endif
Ian Rogers5438ad82012-10-15 17:22:44 -0700180 }
181
Ian Rogers0399dde2012-06-06 17:09:28 -0700182 uint32_t GetDexPC() const {
183 return dex_pc_;
184 }
185
186 void SetDexPC(uint32_t dex_pc) {
187 dex_pc_ = dex_pc;
188 }
189
Ian Rogers0399dde2012-06-06 17:09:28 -0700190 ShadowFrame* GetLink() const {
191 return link_;
192 }
193
194 void SetLink(ShadowFrame* frame) {
195 DCHECK_NE(this, frame);
196 link_ = frame;
197 }
198
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700199 int32_t GetVReg(size_t i) const {
TDYa127ce4cc0d2012-11-18 16:59:53 -0800200 DCHECK_LT(i, NumberOfVRegs());
201 const uint32_t* vreg = &vregs_[i];
202 return *reinterpret_cast<const int32_t*>(vreg);
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700203 }
204
205 float GetVRegFloat(size_t i) const {
TDYa127ce4cc0d2012-11-18 16:59:53 -0800206 DCHECK_LT(i, NumberOfVRegs());
207 // NOTE: Strict-aliasing?
208 const uint32_t* vreg = &vregs_[i];
209 return *reinterpret_cast<const float*>(vreg);
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700210 }
211
212 int64_t GetVRegLong(size_t i) const {
Sebastien Hertz807a2562013-04-15 09:33:39 +0200213 DCHECK_LT(i, NumberOfVRegs());
TDYa127ce4cc0d2012-11-18 16:59:53 -0800214 const uint32_t* vreg = &vregs_[i];
Jeff Haoe47637c2013-09-19 15:13:16 -0700215 // Alignment attribute required for GCC 4.8
216 typedef const int64_t unaligned_int64 __attribute__ ((aligned (4)));
217 return *reinterpret_cast<unaligned_int64*>(vreg);
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700218 }
219
220 double GetVRegDouble(size_t i) const {
Sebastien Hertz807a2562013-04-15 09:33:39 +0200221 DCHECK_LT(i, NumberOfVRegs());
TDYa127ce4cc0d2012-11-18 16:59:53 -0800222 const uint32_t* vreg = &vregs_[i];
Jeff Haoe47637c2013-09-19 15:13:16 -0700223 // Alignment attribute required for GCC 4.8
224 typedef const double unaligned_double __attribute__ ((aligned (4)));
225 return *reinterpret_cast<unaligned_double*>(vreg);
TDYa127ce4cc0d2012-11-18 16:59:53 -0800226 }
227
Mathieu Chartier4e305412014-02-19 10:54:44 -0800228 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
Ian Rogersef7d42f2014-01-06 12:55:46 -0800229 mirror::Object* GetVRegReference(size_t i) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
TDYa127ce4cc0d2012-11-18 16:59:53 -0800230 DCHECK_LT(i, NumberOfVRegs());
Mathieu Chartier4e305412014-02-19 10:54:44 -0800231 mirror::Object* ref;
TDYa127ce4cc0d2012-11-18 16:59:53 -0800232 if (HasReferenceArray()) {
Mathieu Chartier4e305412014-02-19 10:54:44 -0800233 ref = References()[i].AsMirrorPtr();
TDYa127ce4cc0d2012-11-18 16:59:53 -0800234 } else {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800235 const uint32_t* vreg_ptr = &vregs_[i];
Mathieu Chartier4e305412014-02-19 10:54:44 -0800236 ref = reinterpret_cast<const StackReference<mirror::Object>*>(vreg_ptr)->AsMirrorPtr();
TDYa127ce4cc0d2012-11-18 16:59:53 -0800237 }
Mathieu Chartier4e305412014-02-19 10:54:44 -0800238 if (kVerifyFlags & kVerifyReads) {
239 VerifyObject(ref);
240 }
241 return ref;
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700242 }
243
Jeff Hao16743632013-05-08 10:59:04 -0700244 // Get view of vregs as range of consecutive arguments starting at i.
245 uint32_t* GetVRegArgs(size_t i) {
246 return &vregs_[i];
247 }
248
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700249 void SetVReg(size_t i, int32_t val) {
TDYa127ce4cc0d2012-11-18 16:59:53 -0800250 DCHECK_LT(i, NumberOfVRegs());
251 uint32_t* vreg = &vregs_[i];
252 *reinterpret_cast<int32_t*>(vreg) = val;
Mathieu Chartier590fee92013-09-13 13:46:47 -0700253 // This is needed for moving collectors since these can update the vreg references if they
254 // happen to agree with references in the reference array.
255 if (kMovingCollector && HasReferenceArray()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800256 References()[i].Clear();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700257 }
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700258 }
259
260 void SetVRegFloat(size_t i, float val) {
TDYa127ce4cc0d2012-11-18 16:59:53 -0800261 DCHECK_LT(i, NumberOfVRegs());
262 uint32_t* vreg = &vregs_[i];
263 *reinterpret_cast<float*>(vreg) = val;
Mathieu Chartier590fee92013-09-13 13:46:47 -0700264 // This is needed for moving collectors since these can update the vreg references if they
265 // happen to agree with references in the reference array.
266 if (kMovingCollector && HasReferenceArray()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800267 References()[i].Clear();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700268 }
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700269 }
270
271 void SetVRegLong(size_t i, int64_t val) {
Sebastien Hertz807a2562013-04-15 09:33:39 +0200272 DCHECK_LT(i, NumberOfVRegs());
TDYa127ce4cc0d2012-11-18 16:59:53 -0800273 uint32_t* vreg = &vregs_[i];
Jeff Haoe47637c2013-09-19 15:13:16 -0700274 // Alignment attribute required for GCC 4.8
275 typedef int64_t unaligned_int64 __attribute__ ((aligned (4)));
276 *reinterpret_cast<unaligned_int64*>(vreg) = val;
Mathieu Chartier590fee92013-09-13 13:46:47 -0700277 // This is needed for moving collectors since these can update the vreg references if they
278 // happen to agree with references in the reference array.
279 if (kMovingCollector && HasReferenceArray()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800280 References()[i].Clear();
281 References()[i + 1].Clear();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700282 }
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700283 }
284
285 void SetVRegDouble(size_t i, double val) {
Sebastien Hertz807a2562013-04-15 09:33:39 +0200286 DCHECK_LT(i, NumberOfVRegs());
TDYa127ce4cc0d2012-11-18 16:59:53 -0800287 uint32_t* vreg = &vregs_[i];
Jeff Haoe47637c2013-09-19 15:13:16 -0700288 // Alignment attribute required for GCC 4.8
289 typedef double unaligned_double __attribute__ ((aligned (4)));
290 *reinterpret_cast<unaligned_double*>(vreg) = val;
Mathieu Chartier590fee92013-09-13 13:46:47 -0700291 // This is needed for moving collectors since these can update the vreg references if they
292 // happen to agree with references in the reference array.
293 if (kMovingCollector && HasReferenceArray()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800294 References()[i].Clear();
295 References()[i + 1].Clear();
Mathieu Chartier590fee92013-09-13 13:46:47 -0700296 }
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700297 }
298
Mathieu Chartier4e305412014-02-19 10:54:44 -0800299 template<VerifyObjectFlags kVerifyFlags = kDefaultVerifyFlags>
Ian Rogersef7d42f2014-01-06 12:55:46 -0800300 void SetVRegReference(size_t i, mirror::Object* val) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
TDYa127ce4cc0d2012-11-18 16:59:53 -0800301 DCHECK_LT(i, NumberOfVRegs());
Mathieu Chartier4e305412014-02-19 10:54:44 -0800302 if (kVerifyFlags & kVerifyWrites) {
303 VerifyObject(val);
304 }
TDYa127ce4cc0d2012-11-18 16:59:53 -0800305 uint32_t* vreg = &vregs_[i];
Ian Rogersef7d42f2014-01-06 12:55:46 -0800306 reinterpret_cast<StackReference<mirror::Object>*>(vreg)->Assign(val);
TDYa127ce4cc0d2012-11-18 16:59:53 -0800307 if (HasReferenceArray()) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800308 References()[i].Assign(val);
TDYa127ce4cc0d2012-11-18 16:59:53 -0800309 }
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700310 }
311
Ian Rogersef7d42f2014-01-06 12:55:46 -0800312 mirror::ArtMethod* GetMethod() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
313 DCHECK(method_ != nullptr);
Ian Rogers0399dde2012-06-06 17:09:28 -0700314 return method_;
Elliott Hughes68e76522011-10-05 13:22:16 -0700315 }
316
Ian Rogers62d6c772013-02-27 08:32:07 -0800317 mirror::Object* GetThisObject() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
318
Jeff Haoe701f482013-05-24 11:50:49 -0700319 mirror::Object* GetThisObject(uint16_t num_ins) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
320
Ian Rogers62d6c772013-02-27 08:32:07 -0800321 ThrowLocation GetCurrentLocationForThrow() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
322
Brian Carlstromea46f952013-07-30 01:26:50 -0700323 void SetMethod(mirror::ArtMethod* method) {
Ian Rogers8a01a3a2013-05-06 13:25:44 -0700324#if defined(ART_USE_PORTABLE_COMPILER)
Ian Rogersef7d42f2014-01-06 12:55:46 -0800325 DCHECK(method != nullptr);
Ian Rogers0399dde2012-06-06 17:09:28 -0700326 method_ = method;
Ian Rogers8a01a3a2013-05-06 13:25:44 -0700327#else
328 UNUSED(method);
329 UNIMPLEMENTED(FATAL) << "Should only be called when portable is enabled";
330#endif
Elliott Hughes68e76522011-10-05 13:22:16 -0700331 }
332
Ian Rogersef7d42f2014-01-06 12:55:46 -0800333 bool Contains(StackReference<mirror::Object>* shadow_frame_entry_obj) const {
TDYa127ce4cc0d2012-11-18 16:59:53 -0800334 if (HasReferenceArray()) {
335 return ((&References()[0] <= shadow_frame_entry_obj) &&
336 (shadow_frame_entry_obj <= (&References()[NumberOfVRegs() - 1])));
337 } else {
338 uint32_t* shadow_frame_entry = reinterpret_cast<uint32_t*>(shadow_frame_entry_obj);
339 return ((&vregs_[0] <= shadow_frame_entry) &&
340 (shadow_frame_entry <= (&vregs_[NumberOfVRegs() - 1])));
Ian Rogers0399dde2012-06-06 17:09:28 -0700341 }
Elliott Hughes68e76522011-10-05 13:22:16 -0700342 }
343
Ian Rogers0399dde2012-06-06 17:09:28 -0700344 static size_t LinkOffset() {
345 return OFFSETOF_MEMBER(ShadowFrame, link_);
346 }
347
Ian Rogers0399dde2012-06-06 17:09:28 -0700348 static size_t MethodOffset() {
349 return OFFSETOF_MEMBER(ShadowFrame, method_);
350 }
351
Ian Rogers0399dde2012-06-06 17:09:28 -0700352 static size_t DexPCOffset() {
353 return OFFSETOF_MEMBER(ShadowFrame, dex_pc_);
354 }
355
Ian Rogers5438ad82012-10-15 17:22:44 -0700356 static size_t NumberOfVRegsOffset() {
357 return OFFSETOF_MEMBER(ShadowFrame, number_of_vregs_);
358 }
359
TDYa127ce4cc0d2012-11-18 16:59:53 -0800360 static size_t VRegsOffset() {
361 return OFFSETOF_MEMBER(ShadowFrame, vregs_);
Ian Rogers5438ad82012-10-15 17:22:44 -0700362 }
363
Elliott Hughes68e76522011-10-05 13:22:16 -0700364 private:
Brian Carlstromea46f952013-07-30 01:26:50 -0700365 ShadowFrame(uint32_t num_vregs, ShadowFrame* link, mirror::ArtMethod* method,
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800366 uint32_t dex_pc, bool has_reference_array)
TDYa127ce4cc0d2012-11-18 16:59:53 -0800367 : number_of_vregs_(num_vregs), link_(link), method_(method), dex_pc_(dex_pc) {
368 if (has_reference_array) {
Ian Rogers8a01a3a2013-05-06 13:25:44 -0700369#if defined(ART_USE_PORTABLE_COMPILER)
370 CHECK_LT(num_vregs, static_cast<uint32_t>(kHasReferenceArray));
TDYa127ce4cc0d2012-11-18 16:59:53 -0800371 number_of_vregs_ |= kHasReferenceArray;
Ian Rogers8a01a3a2013-05-06 13:25:44 -0700372#endif
Ian Rogersef7d42f2014-01-06 12:55:46 -0800373 memset(vregs_, 0, num_vregs * (sizeof(uint32_t) + sizeof(StackReference<mirror::Object>)));
Mathieu Chartier67022432012-11-29 18:04:50 -0800374 } else {
Jeff Haoe701f482013-05-24 11:50:49 -0700375 memset(vregs_, 0, num_vregs * sizeof(uint32_t));
Ian Rogers2fa6b2e2012-10-17 00:10:17 -0700376 }
377 }
Elliott Hughes68e76522011-10-05 13:22:16 -0700378
Ian Rogersef7d42f2014-01-06 12:55:46 -0800379 const StackReference<mirror::Object>* References() const {
Mathieu Chartier67022432012-11-29 18:04:50 -0800380 DCHECK(HasReferenceArray());
TDYa127ce4cc0d2012-11-18 16:59:53 -0800381 const uint32_t* vreg_end = &vregs_[NumberOfVRegs()];
Ian Rogersef7d42f2014-01-06 12:55:46 -0800382 return reinterpret_cast<const StackReference<mirror::Object>*>(vreg_end);
TDYa127ce4cc0d2012-11-18 16:59:53 -0800383 }
384
Ian Rogersef7d42f2014-01-06 12:55:46 -0800385 StackReference<mirror::Object>* References() {
386 return const_cast<StackReference<mirror::Object>*>(const_cast<const ShadowFrame*>(this)->References());
TDYa127ce4cc0d2012-11-18 16:59:53 -0800387 }
388
Ian Rogers8a01a3a2013-05-06 13:25:44 -0700389#if defined(ART_USE_PORTABLE_COMPILER)
TDYa127ce4cc0d2012-11-18 16:59:53 -0800390 enum ShadowFrameFlag {
391 kHasReferenceArray = 1ul << 31
392 };
Ian Rogers8a01a3a2013-05-06 13:25:44 -0700393 // TODO: make const in the portable case.
TDYa127ce4cc0d2012-11-18 16:59:53 -0800394 uint32_t number_of_vregs_;
Ian Rogers8a01a3a2013-05-06 13:25:44 -0700395#else
396 const uint32_t number_of_vregs_;
397#endif
Ian Rogers5438ad82012-10-15 17:22:44 -0700398 // Link to previous shadow frame or NULL.
Ian Rogers0399dde2012-06-06 17:09:28 -0700399 ShadowFrame* link_;
Ian Rogers8a01a3a2013-05-06 13:25:44 -0700400#if defined(ART_USE_PORTABLE_COMPILER)
401 // TODO: make const in the portable case.
Brian Carlstromea46f952013-07-30 01:26:50 -0700402 mirror::ArtMethod* method_;
Ian Rogers8a01a3a2013-05-06 13:25:44 -0700403#else
Brian Carlstromea46f952013-07-30 01:26:50 -0700404 mirror::ArtMethod* const method_;
Ian Rogers8a01a3a2013-05-06 13:25:44 -0700405#endif
Ian Rogers0399dde2012-06-06 17:09:28 -0700406 uint32_t dex_pc_;
TDYa127ce4cc0d2012-11-18 16:59:53 -0800407 uint32_t vregs_[0];
Elliott Hughes68e76522011-10-05 13:22:16 -0700408
Ian Rogers0399dde2012-06-06 17:09:28 -0700409 DISALLOW_IMPLICIT_CONSTRUCTORS(ShadowFrame);
Elliott Hughes68e76522011-10-05 13:22:16 -0700410};
411
Ian Rogers0399dde2012-06-06 17:09:28 -0700412// The managed stack is used to record fragments of managed code stacks. Managed code stacks
413// may either be shadow frames or lists of frames using fixed frame sizes. Transition records are
414// necessary for transitions between code using different frame layouts and transitions into native
415// code.
Ian Rogersdf1ce912012-11-27 17:07:11 -0800416class PACKED(4) ManagedStack {
Ian Rogers0399dde2012-06-06 17:09:28 -0700417 public:
Ian Rogersca190662012-06-26 15:45:57 -0700418 ManagedStack()
419 : link_(NULL), top_shadow_frame_(NULL), top_quick_frame_(NULL), top_quick_frame_pc_(0) {}
Ian Rogers81d425b2012-09-27 16:03:43 -0700420
421 void PushManagedStackFragment(ManagedStack* fragment) {
422 // Copy this top fragment into given fragment.
423 memcpy(fragment, this, sizeof(ManagedStack));
424 // Clear this fragment, which has become the top.
425 memset(this, 0, sizeof(ManagedStack));
426 // Link our top fragment onto the given fragment.
427 link_ = fragment;
428 }
429
430 void PopManagedStackFragment(const ManagedStack& fragment) {
431 DCHECK(&fragment == link_);
432 // Copy this given fragment back to the top.
433 memcpy(this, &fragment, sizeof(ManagedStack));
434 }
Ian Rogers0399dde2012-06-06 17:09:28 -0700435
436 ManagedStack* GetLink() const {
437 return link_;
438 }
439
Brian Carlstromea46f952013-07-30 01:26:50 -0700440 mirror::ArtMethod** GetTopQuickFrame() const {
Ian Rogers0399dde2012-06-06 17:09:28 -0700441 return top_quick_frame_;
442 }
443
Brian Carlstromea46f952013-07-30 01:26:50 -0700444 void SetTopQuickFrame(mirror::ArtMethod** top) {
Sebastien Hertza7b0c422013-04-05 16:19:39 +0200445 DCHECK(top_shadow_frame_ == NULL);
Ian Rogers0399dde2012-06-06 17:09:28 -0700446 top_quick_frame_ = top;
447 }
448
449 uintptr_t GetTopQuickFramePc() const {
450 return top_quick_frame_pc_;
451 }
452
453 void SetTopQuickFramePc(uintptr_t pc) {
Sebastien Hertza7b0c422013-04-05 16:19:39 +0200454 DCHECK(top_shadow_frame_ == NULL);
Ian Rogers0399dde2012-06-06 17:09:28 -0700455 top_quick_frame_pc_ = pc;
456 }
457
458 static size_t TopQuickFrameOffset() {
459 return OFFSETOF_MEMBER(ManagedStack, top_quick_frame_);
460 }
461
462 static size_t TopQuickFramePcOffset() {
463 return OFFSETOF_MEMBER(ManagedStack, top_quick_frame_pc_);
464 }
465
466 ShadowFrame* PushShadowFrame(ShadowFrame* new_top_frame) {
Sebastien Hertza7b0c422013-04-05 16:19:39 +0200467 DCHECK(top_quick_frame_ == NULL);
Ian Rogers0399dde2012-06-06 17:09:28 -0700468 ShadowFrame* old_frame = top_shadow_frame_;
469 top_shadow_frame_ = new_top_frame;
470 new_top_frame->SetLink(old_frame);
471 return old_frame;
472 }
473
474 ShadowFrame* PopShadowFrame() {
Sebastien Hertza7b0c422013-04-05 16:19:39 +0200475 DCHECK(top_quick_frame_ == NULL);
Ian Rogers0399dde2012-06-06 17:09:28 -0700476 CHECK(top_shadow_frame_ != NULL);
477 ShadowFrame* frame = top_shadow_frame_;
478 top_shadow_frame_ = frame->GetLink();
479 return frame;
480 }
481
482 ShadowFrame* GetTopShadowFrame() const {
483 return top_shadow_frame_;
484 }
485
Jeff Hao11ffc2d2013-02-01 11:52:17 -0800486 void SetTopShadowFrame(ShadowFrame* top) {
Sebastien Hertza7b0c422013-04-05 16:19:39 +0200487 DCHECK(top_quick_frame_ == NULL);
Jeff Hao11ffc2d2013-02-01 11:52:17 -0800488 top_shadow_frame_ = top;
489 }
490
Ian Rogers0399dde2012-06-06 17:09:28 -0700491 static size_t TopShadowFrameOffset() {
492 return OFFSETOF_MEMBER(ManagedStack, top_shadow_frame_);
493 }
494
Ian Rogersef7d42f2014-01-06 12:55:46 -0800495 size_t NumJniShadowFrameReferences() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700496
Ian Rogersef7d42f2014-01-06 12:55:46 -0800497 bool ShadowFramesContain(StackReference<mirror::Object>* shadow_frame_entry) const;
Ian Rogers0399dde2012-06-06 17:09:28 -0700498
499 private:
500 ManagedStack* link_;
501 ShadowFrame* top_shadow_frame_;
Brian Carlstromea46f952013-07-30 01:26:50 -0700502 mirror::ArtMethod** top_quick_frame_;
Ian Rogers0399dde2012-06-06 17:09:28 -0700503 uintptr_t top_quick_frame_pc_;
504};
505
506class StackVisitor {
507 protected:
Ian Rogers7a22fa62013-01-23 12:16:16 -0800508 StackVisitor(Thread* thread, Context* context) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700509
510 public:
511 virtual ~StackVisitor() {}
512
513 // Return 'true' if we should continue to visit more frames, 'false' to stop.
Ian Rogersb726dcb2012-09-05 08:57:23 -0700514 virtual bool VisitFrame() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) = 0;
Ian Rogers0399dde2012-06-06 17:09:28 -0700515
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700516 void WalkStack(bool include_transitions = false)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700517 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700518
Ian Rogersef7d42f2014-01-06 12:55:46 -0800519 mirror::ArtMethod* GetMethod() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
520 if (cur_shadow_frame_ != nullptr) {
Ian Rogers0399dde2012-06-06 17:09:28 -0700521 return cur_shadow_frame_->GetMethod();
Ian Rogersef7d42f2014-01-06 12:55:46 -0800522 } else if (cur_quick_frame_ != nullptr) {
Ian Rogers0399dde2012-06-06 17:09:28 -0700523 return *cur_quick_frame_;
524 } else {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800525 return nullptr;
Ian Rogers0399dde2012-06-06 17:09:28 -0700526 }
527 }
528
529 bool IsShadowFrame() const {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800530 return cur_shadow_frame_ != nullptr;
Ian Rogers0399dde2012-06-06 17:09:28 -0700531 }
532
Dave Allisonb373e092014-02-20 16:06:36 -0800533 uint32_t GetDexPc(bool abort_on_failure = true) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Ian Rogers0c7abda2012-09-19 13:33:42 -0700534
Ian Rogers62d6c772013-02-27 08:32:07 -0800535 mirror::Object* GetThisObject() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
536
Ian Rogers0c7abda2012-09-19 13:33:42 -0700537 size_t GetNativePcOffset() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
538
Ian Rogersef7d42f2014-01-06 12:55:46 -0800539 uintptr_t* CalleeSaveAddress(int num, size_t frame_size) const
540 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers0399dde2012-06-06 17:09:28 -0700541 // Callee saves are held at the top of the frame
Ian Rogersef7d42f2014-01-06 12:55:46 -0800542 DCHECK(GetMethod() != nullptr);
Ian Rogers0399dde2012-06-06 17:09:28 -0700543 byte* save_addr =
544 reinterpret_cast<byte*>(cur_quick_frame_) + frame_size - ((num + 1) * kPointerSize);
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800545#if defined(__i386__) || defined(__x86_64__)
Ian Rogers0399dde2012-06-06 17:09:28 -0700546 save_addr -= kPointerSize; // account for return address
547#endif
Mathieu Chartier67022432012-11-29 18:04:50 -0800548 return reinterpret_cast<uintptr_t*>(save_addr);
Ian Rogers0399dde2012-06-06 17:09:28 -0700549 }
550
Elliott Hughes08fc03a2012-06-26 17:34:00 -0700551 // Returns the height of the stack in the managed stack frames, including transitions.
Ian Rogersb726dcb2012-09-05 08:57:23 -0700552 size_t GetFrameHeight() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800553 return GetNumFrames() - cur_depth_ - 1;
Ian Rogers0399dde2012-06-06 17:09:28 -0700554 }
555
Elliott Hughes08fc03a2012-06-26 17:34:00 -0700556 // Returns a frame ID for JDWP use, starting from 1.
Ian Rogersb726dcb2012-09-05 08:57:23 -0700557 size_t GetFrameId() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers0399dde2012-06-06 17:09:28 -0700558 return GetFrameHeight() + 1;
559 }
560
Ian Rogersb726dcb2012-09-05 08:57:23 -0700561 size_t GetNumFrames() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers0399dde2012-06-06 17:09:28 -0700562 if (num_frames_ == 0) {
Ian Rogers7a22fa62013-01-23 12:16:16 -0800563 num_frames_ = ComputeNumFrames(thread_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700564 }
565 return num_frames_;
566 }
567
Brian Carlstromea46f952013-07-30 01:26:50 -0700568 uint32_t GetVReg(mirror::ArtMethod* m, uint16_t vreg, VRegKind kind) const
Ian Rogers2bcb4a42012-11-08 10:39:18 -0800569 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700570
Brian Carlstromea46f952013-07-30 01:26:50 -0700571 void SetVReg(mirror::ArtMethod* m, uint16_t vreg, uint32_t new_value, VRegKind kind)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700572 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700573
Mathieu Chartier815873e2014-02-13 18:02:13 -0800574 uintptr_t* GetGPRAddress(uint32_t reg) const;
Ian Rogers0399dde2012-06-06 17:09:28 -0700575 uintptr_t GetGPR(uint32_t reg) const;
Mathieu Chartier67022432012-11-29 18:04:50 -0800576 void SetGPR(uint32_t reg, uintptr_t value);
Ian Rogers0399dde2012-06-06 17:09:28 -0700577
Mathieu Chartier423d2a32013-09-12 17:33:56 -0700578 // This is a fast-path for getting/setting values in a quick frame.
579 uint32_t* GetVRegAddr(mirror::ArtMethod** cur_quick_frame, const DexFile::CodeItem* code_item,
Ian Rogersef7d42f2014-01-06 12:55:46 -0800580 uint32_t core_spills, uint32_t fp_spills, size_t frame_size,
581 uint16_t vreg) const {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000582 int offset = GetVRegOffset(code_item, core_spills, fp_spills, frame_size, vreg, kRuntimeISA);
Ian Rogers0ec569a2012-07-01 16:43:46 -0700583 DCHECK_EQ(cur_quick_frame, GetCurrentQuickFrame());
584 byte* vreg_addr = reinterpret_cast<byte*>(cur_quick_frame) + offset;
Mathieu Chartier423d2a32013-09-12 17:33:56 -0700585 return reinterpret_cast<uint32_t*>(vreg_addr);
Ian Rogers0399dde2012-06-06 17:09:28 -0700586 }
587
Ian Rogersef7d42f2014-01-06 12:55:46 -0800588 uintptr_t GetReturnPc() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700589
Ian Rogersef7d42f2014-01-06 12:55:46 -0800590 void SetReturnPc(uintptr_t new_ret_pc) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700591
592 /*
593 * Return sp-relative offset for a Dalvik virtual register, compiler
594 * spill or Method* in bytes using Method*.
Razvan A Lupusoruda7a69b2014-01-08 15:09:50 -0800595 * Note that (reg >= 0) refers to a Dalvik register, (reg == -1)
596 * denotes an invalid Dalvik register, (reg == -2) denotes Method*
597 * and (reg <= -3) denotes a compiler temporary. A compiler temporary
598 * can be thought of as a virtual register that does not exist in the
599 * dex but holds intermediate values to help optimizations and code
600 * generation. A special compiler temporary is one whose location
601 * in frame is well known while non-special ones do not have a requirement
602 * on location in frame as long as code generator itself knows how
603 * to access them.
Ian Rogers0399dde2012-06-06 17:09:28 -0700604 *
605 * +------------------------+
606 * | IN[ins-1] | {Note: resides in caller's frame}
607 * | . |
608 * | IN[0] |
609 * | caller's Method* |
610 * +========================+ {Note: start of callee's frame}
611 * | core callee-save spill | {variable sized}
612 * +------------------------+
613 * | fp callee-save spill |
614 * +------------------------+
615 * | filler word | {For compatibility, if V[locals-1] used as wide
616 * +------------------------+
617 * | V[locals-1] |
618 * | V[locals-2] |
619 * | . |
620 * | . | ... (reg == 2)
621 * | V[1] | ... (reg == 1)
622 * | V[0] | ... (reg == 0) <---- "locals_start"
623 * +------------------------+
Razvan A Lupusoruda7a69b2014-01-08 15:09:50 -0800624 * | Compiler temp region | ... (reg <= -3)
625 * | |
626 * | |
Ian Rogers0399dde2012-06-06 17:09:28 -0700627 * +------------------------+
628 * | stack alignment padding| {0 to (kStackAlignWords-1) of padding}
629 * +------------------------+
630 * | OUT[outs-1] |
631 * | OUT[outs-2] |
632 * | . |
633 * | OUT[0] |
Razvan A Lupusoruda7a69b2014-01-08 15:09:50 -0800634 * | curMethod* | ... (reg == -2) <<== sp, 16-byte aligned
Ian Rogers0399dde2012-06-06 17:09:28 -0700635 * +========================+
636 */
637 static int GetVRegOffset(const DexFile::CodeItem* code_item,
Ian Rogersb23a7722012-10-09 16:54:26 -0700638 uint32_t core_spills, uint32_t fp_spills,
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000639 size_t frame_size, int reg, InstructionSet isa) {
Ian Rogers0399dde2012-06-06 17:09:28 -0700640 DCHECK_EQ(frame_size & (kStackAlignment - 1), 0U);
Razvan A Lupusoruda7a69b2014-01-08 15:09:50 -0800641 DCHECK_NE(reg, static_cast<int>(kVRegInvalid));
Vladimir Marko81949632014-05-02 11:53:22 +0100642 int spill_size = POPCOUNT(core_spills) * GetBytesPerGprSpillLocation(isa)
643 + POPCOUNT(fp_spills) * GetBytesPerFprSpillLocation(isa)
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000644 + sizeof(uint32_t); // Filler.
Ian Rogers0399dde2012-06-06 17:09:28 -0700645 int num_ins = code_item->ins_size_;
646 int num_regs = code_item->registers_size_ - num_ins;
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000647 int locals_start = frame_size - spill_size - num_regs * sizeof(uint32_t);
Razvan A Lupusoruda7a69b2014-01-08 15:09:50 -0800648 if (reg == static_cast<int>(kVRegMethodPtrBaseReg)) {
649 // The current method pointer corresponds to special location on stack.
650 return 0;
651 } else if (reg <= static_cast<int>(kVRegNonSpecialTempBaseReg)) {
652 /*
653 * Special temporaries may have custom locations and the logic above deals with that.
654 * However, non-special temporaries are placed relative to the locals. Since the
655 * virtual register numbers for temporaries "grow" in negative direction, reg number
656 * will always be <= to the temp base reg. Thus, the logic ensures that the first
657 * temp is at offset -4 bytes from locals, the second is at -8 bytes from locals,
658 * and so on.
659 */
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000660 int relative_offset =
661 (reg + std::abs(static_cast<int>(kVRegNonSpecialTempBaseReg)) - 1) * sizeof(uint32_t);
Razvan A Lupusoruda7a69b2014-01-08 15:09:50 -0800662 return locals_start + relative_offset;
663 } else if (reg < num_regs) {
664 return locals_start + (reg * sizeof(uint32_t));
Ian Rogers0399dde2012-06-06 17:09:28 -0700665 } else {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800666 // Handle ins.
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000667 return frame_size + ((reg - num_regs) * sizeof(uint32_t)) + GetBytesPerGprSpillLocation(isa);
Ian Rogers0399dde2012-06-06 17:09:28 -0700668 }
669 }
670
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000671 static int GetOutVROffset(uint16_t out_num, InstructionSet isa) {
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800672 // According to stack model, the first out is above the Method ptr.
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000673 return GetBytesPerGprSpillLocation(isa) + (out_num * sizeof(uint32_t));
Razvan A Lupusoru3bc01742014-02-06 13:18:43 -0800674 }
675
Ian Rogers0399dde2012-06-06 17:09:28 -0700676 uintptr_t GetCurrentQuickFramePc() const {
677 return cur_quick_frame_pc_;
678 }
679
Brian Carlstromea46f952013-07-30 01:26:50 -0700680 mirror::ArtMethod** GetCurrentQuickFrame() const {
Ian Rogers0399dde2012-06-06 17:09:28 -0700681 return cur_quick_frame_;
682 }
683
684 ShadowFrame* GetCurrentShadowFrame() const {
685 return cur_shadow_frame_;
686 }
687
Elliott Hughes08fc03a2012-06-26 17:34:00 -0700688 StackIndirectReferenceTable* GetCurrentSirt() const {
Brian Carlstromea46f952013-07-30 01:26:50 -0700689 mirror::ArtMethod** sp = GetCurrentQuickFrame();
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700690 ++sp; // Skip Method*; SIRT comes next;
Elliott Hughes08fc03a2012-06-26 17:34:00 -0700691 return reinterpret_cast<StackIndirectReferenceTable*>(sp);
692 }
693
Ian Rogers40e3bac2012-11-20 00:09:14 -0800694 std::string DescribeLocation() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
695
Ian Rogers7a22fa62013-01-23 12:16:16 -0800696 static size_t ComputeNumFrames(Thread* thread) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Ian Rogers306057f2012-11-26 12:45:53 -0800697
Ian Rogers7a22fa62013-01-23 12:16:16 -0800698 static void DescribeStack(Thread* thread) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Ian Rogers306057f2012-11-26 12:45:53 -0800699
Ian Rogers0399dde2012-06-06 17:09:28 -0700700 private:
Sebastien Hertz74e256b2013-10-04 10:40:37 +0200701 instrumentation::InstrumentationStackFrame& GetInstrumentationStackFrame(uint32_t depth) const;
Ian Rogers0399dde2012-06-06 17:09:28 -0700702
Ian Rogersb726dcb2012-09-05 08:57:23 -0700703 void SanityCheckFrame() const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700704
Ian Rogers7a22fa62013-01-23 12:16:16 -0800705 Thread* const thread_;
Ian Rogers0399dde2012-06-06 17:09:28 -0700706 ShadowFrame* cur_shadow_frame_;
Brian Carlstromea46f952013-07-30 01:26:50 -0700707 mirror::ArtMethod** cur_quick_frame_;
Ian Rogers0399dde2012-06-06 17:09:28 -0700708 uintptr_t cur_quick_frame_pc_;
709 // Lazily computed, number of frames in the stack.
710 size_t num_frames_;
711 // Depth of the frame we're currently at.
712 size_t cur_depth_;
Brian Carlstrom0cd7ec22013-07-17 23:40:20 -0700713
Ian Rogers0399dde2012-06-06 17:09:28 -0700714 protected:
715 Context* const context_;
716};
717
Elliott Hughes68e76522011-10-05 13:22:16 -0700718} // namespace art
719
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700720#endif // ART_RUNTIME_STACK_H_