blob: 7d1cb5cc4bd25990a4d3b83131731bc04e7577a6 [file] [log] [blame]
Elliott Hughes68e76522011-10-05 13:22:16 -07001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "stack.h"
18
Andreas Gampe46ee31b2016-12-14 10:11:49 -080019#include "android-base/stringprintf.h"
20
Ian Rogerse63db272014-07-15 15:36:11 -070021#include "arch/context.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070022#include "art_method-inl.h"
Andreas Gampe8228cdf2017-05-30 15:03:54 -070023#include "base/callee_save_type.h"
Andreas Gampe542451c2016-07-26 09:02:02 -070024#include "base/enums.h"
Dave Allisonf9439142014-03-27 15:10:22 -070025#include "base/hex_dump.h"
David Sehr9e734c72018-01-04 17:56:19 -080026#include "dex/dex_file_types.h"
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +010027#include "entrypoints/entrypoint_utils-inl.h"
Vladimir Markod3083dd2018-05-17 08:43:47 +010028#include "entrypoints/quick/callee_save_frame.h"
Ian Rogers6f3dbba2014-10-14 17:41:57 -070029#include "entrypoints/runtime_asm_entrypoints.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070030#include "gc/space/image_space.h"
31#include "gc/space/space-inl.h"
Andreas Gampe36a296f2017-06-13 14:11:11 -070032#include "interpreter/shadow_frame.h"
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010033#include "jit/jit.h"
34#include "jit/jit_code_cache.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070035#include "linear_alloc.h"
Andreas Gampe513061a2017-06-01 09:17:34 -070036#include "managed_stack.h"
Ian Rogers4f6ad8a2013-03-18 15:27:28 -070037#include "mirror/class-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080038#include "mirror/object-inl.h"
39#include "mirror/object_array-inl.h"
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010040#include "oat_quick_method_header.h"
Vladimir Marko7624d252014-05-02 14:40:15 +010041#include "quick/quick_method_frame_info.h"
Mathieu Chartier590fee92013-09-13 13:46:47 -070042#include "runtime.h"
Dave Allisonf9439142014-03-27 15:10:22 -070043#include "thread.h"
Elliott Hughesbfe487b2011-10-26 15:48:55 -070044#include "thread_list.h"
Elliott Hughes68e76522011-10-05 13:22:16 -070045
Elliott Hughes11d1b0c2012-01-23 16:57:47 -080046namespace art {
47
Andreas Gampe46ee31b2016-12-14 10:11:49 -080048using android::base::StringPrintf;
49
Mathieu Chartier8405bfd2016-02-05 12:00:49 -080050static constexpr bool kDebugStackWalk = false;
Mathieu Chartiere401d142015-04-22 13:56:20 -070051
Hiroshi Yamauchi02f365f2017-02-03 15:06:00 -080052StackVisitor::StackVisitor(Thread* thread,
53 Context* context,
54 StackWalkKind walk_kind,
55 bool check_suspended)
56 : StackVisitor(thread, context, walk_kind, 0, check_suspended) {}
Ian Rogers7a22fa62013-01-23 12:16:16 -080057
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +010058StackVisitor::StackVisitor(Thread* thread,
59 Context* context,
60 StackWalkKind walk_kind,
Hiroshi Yamauchi02f365f2017-02-03 15:06:00 -080061 size_t num_frames,
62 bool check_suspended)
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +010063 : thread_(thread),
64 walk_kind_(walk_kind),
65 cur_shadow_frame_(nullptr),
66 cur_quick_frame_(nullptr),
67 cur_quick_frame_pc_(0),
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010068 cur_oat_quick_method_header_(nullptr),
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +010069 num_frames_(num_frames),
70 cur_depth_(0),
Nicolas Geoffray57f61612015-05-15 13:20:41 +010071 current_inlining_depth_(0),
Hiroshi Yamauchi02f365f2017-02-03 15:06:00 -080072 context_(context),
73 check_suspended_(check_suspended) {
74 if (check_suspended_) {
75 DCHECK(thread == Thread::Current() || thread->IsSuspended()) << *thread;
76 }
Ian Rogers5cf98192014-05-29 21:31:50 -070077}
78
David Srbecky052f8ca2018-04-26 15:42:54 +010079static InlineInfo GetCurrentInlineInfo(CodeInfo& code_info,
80 const OatQuickMethodHeader* method_header,
Andreas Gampe36a296f2017-06-13 14:11:11 -070081 uintptr_t cur_quick_frame_pc)
82 REQUIRES_SHARED(Locks::mutator_lock_) {
83 uint32_t native_pc_offset = method_header->NativeQuickPcOffset(cur_quick_frame_pc);
David Srbecky052f8ca2018-04-26 15:42:54 +010084 StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset);
Nicolas Geoffraye12997f2015-05-22 14:01:33 +010085 DCHECK(stack_map.IsValid());
David Srbecky052f8ca2018-04-26 15:42:54 +010086 return code_info.GetInlineInfoOf(stack_map);
Nicolas Geoffray57f61612015-05-15 13:20:41 +010087}
88
Mathieu Chartiere401d142015-04-22 13:56:20 -070089ArtMethod* StackVisitor::GetMethod() const {
Nicolas Geoffray57f61612015-05-15 13:20:41 +010090 if (cur_shadow_frame_ != nullptr) {
91 return cur_shadow_frame_->GetMethod();
92 } else if (cur_quick_frame_ != nullptr) {
93 if (IsInInlinedFrame()) {
94 size_t depth_in_stack_map = current_inlining_depth_ - 1;
David Srbecky61b28a12016-02-25 21:55:03 +000095 const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
David Srbecky052f8ca2018-04-26 15:42:54 +010096 CodeInfo code_info(method_header);
97 InlineInfo inline_info = GetCurrentInlineInfo(code_info,
98 method_header,
99 cur_quick_frame_pc_);
Mathieu Chartiercbcedbf2017-03-12 22:24:50 -0700100 MethodInfo method_info = method_header->GetOptimizedMethodInfo();
Mathieu Chartier45bf2502016-03-31 11:07:09 -0700101 DCHECK(walk_kind_ != StackWalkKind::kSkipInlinedFrames);
Nicolas Geoffrayc6df1e32016-07-04 10:15:47 +0100102 return GetResolvedMethod(*GetCurrentQuickFrame(),
Mathieu Chartiercbcedbf2017-03-12 22:24:50 -0700103 method_info,
Nicolas Geoffrayc6df1e32016-07-04 10:15:47 +0100104 inline_info,
Nicolas Geoffrayc6df1e32016-07-04 10:15:47 +0100105 depth_in_stack_map);
Nicolas Geoffray57f61612015-05-15 13:20:41 +0100106 } else {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700107 return *cur_quick_frame_;
Nicolas Geoffray57f61612015-05-15 13:20:41 +0100108 }
Nicolas Geoffray57f61612015-05-15 13:20:41 +0100109 }
Mathieu Chartiere401d142015-04-22 13:56:20 -0700110 return nullptr;
Nicolas Geoffray57f61612015-05-15 13:20:41 +0100111}
112
Dave Allisonb373e092014-02-20 16:06:36 -0800113uint32_t StackVisitor::GetDexPc(bool abort_on_failure) const {
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700114 if (cur_shadow_frame_ != nullptr) {
Ian Rogers0399dde2012-06-06 17:09:28 -0700115 return cur_shadow_frame_->GetDexPC();
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700116 } else if (cur_quick_frame_ != nullptr) {
Nicolas Geoffray57f61612015-05-15 13:20:41 +0100117 if (IsInInlinedFrame()) {
David Srbecky61b28a12016-02-25 21:55:03 +0000118 const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
David Srbecky052f8ca2018-04-26 15:42:54 +0100119 CodeInfo code_info(method_header);
120 size_t depth_in_stack_map = current_inlining_depth_ - 1;
121 return GetCurrentInlineInfo(code_info, method_header, cur_quick_frame_pc_).
122 GetDexPcAtDepth(depth_in_stack_map);
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100123 } else if (cur_oat_quick_method_header_ == nullptr) {
Andreas Gampee2abbc62017-09-15 11:59:26 -0700124 return dex::kDexNoIndex;
Nicolas Geoffray57f61612015-05-15 13:20:41 +0100125 } else {
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100126 return cur_oat_quick_method_header_->ToDexPc(
127 GetMethod(), cur_quick_frame_pc_, abort_on_failure);
Nicolas Geoffray57f61612015-05-15 13:20:41 +0100128 }
Ian Rogers0399dde2012-06-06 17:09:28 -0700129 } else {
130 return 0;
131 }
132}
133
Mathieu Chartiere401d142015-04-22 13:56:20 -0700134extern "C" mirror::Object* artQuickGetProxyThisObject(ArtMethod** sp)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700135 REQUIRES_SHARED(Locks::mutator_lock_);
Sebastien Hertza836bc92014-11-25 16:30:53 +0100136
Ian Rogers62d6c772013-02-27 08:32:07 -0800137mirror::Object* StackVisitor::GetThisObject() const {
Andreas Gampe542451c2016-07-26 09:02:02 -0700138 DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), kRuntimePointerSize);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700139 ArtMethod* m = GetMethod();
Ian Rogers62d6c772013-02-27 08:32:07 -0800140 if (m->IsStatic()) {
Nicolas Geoffray39468442014-09-02 15:17:15 +0100141 return nullptr;
Ian Rogers62d6c772013-02-27 08:32:07 -0800142 } else if (m->IsNative()) {
Nicolas Geoffray39468442014-09-02 15:17:15 +0100143 if (cur_quick_frame_ != nullptr) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700144 HandleScope* hs = reinterpret_cast<HandleScope*>(
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100145 reinterpret_cast<char*>(cur_quick_frame_) + sizeof(ArtMethod*));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700146 return hs->GetReference(0);
Ian Rogers62d6c772013-02-27 08:32:07 -0800147 } else {
148 return cur_shadow_frame_->GetVRegReference(0);
149 }
Nicolas Geoffray3a090922015-11-24 09:17:30 +0000150 } else if (m->IsProxyMethod()) {
Sebastien Hertza836bc92014-11-25 16:30:53 +0100151 if (cur_quick_frame_ != nullptr) {
152 return artQuickGetProxyThisObject(cur_quick_frame_);
153 } else {
154 return cur_shadow_frame_->GetVRegReference(0);
155 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800156 } else {
David Sehr0225f8e2018-01-31 08:52:24 +0000157 CodeItemDataAccessor accessor(m->DexInstructionData());
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800158 if (!accessor.HasCodeItem()) {
Ian Rogerse0dcd462014-03-08 15:21:04 -0800159 UNIMPLEMENTED(ERROR) << "Failed to determine this object of abstract or proxy method: "
David Sehr709b0702016-10-13 09:12:37 -0700160 << ArtMethod::PrettyMethod(m);
Ian Rogerse0dcd462014-03-08 15:21:04 -0800161 return nullptr;
Ian Rogers62d6c772013-02-27 08:32:07 -0800162 } else {
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800163 uint16_t reg = accessor.RegistersSize() - accessor.InsSize();
Nicolas Geoffray15b9d522015-03-12 15:05:13 +0000164 uint32_t value = 0;
Igor Murashkinb1d8c312015-08-04 11:18:43 -0700165 bool success = GetVReg(m, reg, kReferenceVReg, &value);
Nicolas Geoffray15b9d522015-03-12 15:05:13 +0000166 // We currently always guarantee the `this` object is live throughout the method.
David Sehr709b0702016-10-13 09:12:37 -0700167 CHECK(success) << "Failed to read the this object in " << ArtMethod::PrettyMethod(m);
Nicolas Geoffray15b9d522015-03-12 15:05:13 +0000168 return reinterpret_cast<mirror::Object*>(value);
Ian Rogers62d6c772013-02-27 08:32:07 -0800169 }
170 }
171}
172
Ian Rogers0c7abda2012-09-19 13:33:42 -0700173size_t StackVisitor::GetNativePcOffset() const {
174 DCHECK(!IsShadowFrame());
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100175 return GetCurrentOatQuickMethodHeader()->NativeQuickPcOffset(cur_quick_frame_pc_);
Ian Rogers0c7abda2012-09-19 13:33:42 -0700176}
177
Mingyao Yang99170c62015-07-06 11:10:37 -0700178bool StackVisitor::GetVRegFromDebuggerShadowFrame(uint16_t vreg,
179 VRegKind kind,
180 uint32_t* val) const {
181 size_t frame_id = const_cast<StackVisitor*>(this)->GetFrameId();
182 ShadowFrame* shadow_frame = thread_->FindDebuggerShadowFrame(frame_id);
183 if (shadow_frame != nullptr) {
184 bool* updated_vreg_flags = thread_->GetUpdatedVRegFlags(frame_id);
185 DCHECK(updated_vreg_flags != nullptr);
186 if (updated_vreg_flags[vreg]) {
187 // Value is set by the debugger.
188 if (kind == kReferenceVReg) {
189 *val = static_cast<uint32_t>(reinterpret_cast<uintptr_t>(
190 shadow_frame->GetVRegReference(vreg)));
191 } else {
192 *val = shadow_frame->GetVReg(vreg);
193 }
194 return true;
195 }
196 }
197 // No value is set by the debugger.
198 return false;
199}
200
Mathieu Chartiere401d142015-04-22 13:56:20 -0700201bool StackVisitor::GetVReg(ArtMethod* m, uint16_t vreg, VRegKind kind, uint32_t* val) const {
Sebastien Hertzc901dd72014-07-16 11:56:07 +0200202 if (cur_quick_frame_ != nullptr) {
203 DCHECK(context_ != nullptr); // You can't reliably read registers without a context.
Ian Rogers2bcb4a42012-11-08 10:39:18 -0800204 DCHECK(m == GetMethod());
Mingyao Yang99170c62015-07-06 11:10:37 -0700205 // Check if there is value set by the debugger.
206 if (GetVRegFromDebuggerShadowFrame(vreg, kind, val)) {
207 return true;
208 }
Vladimir Marko9d07e3d2016-03-31 12:02:28 +0100209 DCHECK(cur_oat_quick_method_header_->IsOptimized());
210 return GetVRegFromOptimizedCode(m, vreg, kind, val);
Ian Rogers0399dde2012-06-06 17:09:28 -0700211 } else {
Sebastien Hertz96ba8dc2015-01-22 18:57:14 +0100212 DCHECK(cur_shadow_frame_ != nullptr);
Sebastien Hertz09687442015-11-17 10:35:39 +0100213 if (kind == kReferenceVReg) {
214 *val = static_cast<uint32_t>(reinterpret_cast<uintptr_t>(
215 cur_shadow_frame_->GetVRegReference(vreg)));
216 } else {
217 *val = cur_shadow_frame_->GetVReg(vreg);
218 }
Sebastien Hertz0bcb2902014-06-17 15:52:45 +0200219 return true;
Ian Rogers0399dde2012-06-06 17:09:28 -0700220 }
221}
222
Mathieu Chartiere401d142015-04-22 13:56:20 -0700223bool StackVisitor::GetVRegFromOptimizedCode(ArtMethod* m, uint16_t vreg, VRegKind kind,
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100224 uint32_t* val) const {
Nicolas Geoffray57f61612015-05-15 13:20:41 +0100225 DCHECK_EQ(m, GetMethod());
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800226 // Can't be null or how would we compile its instructions?
227 DCHECK(m->GetCodeItem() != nullptr) << m->PrettyMethod();
David Sehr0225f8e2018-01-31 08:52:24 +0000228 CodeItemDataAccessor accessor(m->DexInstructionData());
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800229 uint16_t number_of_dex_registers = accessor.RegistersSize();
230 DCHECK_LT(vreg, number_of_dex_registers);
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100231 const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
David Srbecky052f8ca2018-04-26 15:42:54 +0100232 CodeInfo code_info(method_header);
Nicolas Geoffray57f61612015-05-15 13:20:41 +0100233
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100234 uint32_t native_pc_offset = method_header->NativeQuickPcOffset(cur_quick_frame_pc_);
David Srbecky052f8ca2018-04-26 15:42:54 +0100235 StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset);
Nicolas Geoffraye12997f2015-05-22 14:01:33 +0100236 DCHECK(stack_map.IsValid());
Nicolas Geoffray57f61612015-05-15 13:20:41 +0100237 size_t depth_in_stack_map = current_inlining_depth_ - 1;
238
239 DexRegisterMap dex_register_map = IsInInlinedFrame()
David Brazdilf677ebf2015-05-29 16:29:43 +0100240 ? code_info.GetDexRegisterMapAtDepth(depth_in_stack_map,
David Srbecky052f8ca2018-04-26 15:42:54 +0100241 code_info.GetInlineInfoOf(stack_map),
David Brazdilf677ebf2015-05-29 16:29:43 +0100242 number_of_dex_registers)
David Srbecky052f8ca2018-04-26 15:42:54 +0100243 : code_info.GetDexRegisterMapOf(stack_map, number_of_dex_registers);
Nicolas Geoffray57f61612015-05-15 13:20:41 +0100244
Nicolas Geoffray012fc4e2016-01-08 15:58:19 +0000245 if (!dex_register_map.IsValid()) {
246 return false;
247 }
Nicolas Geoffrayfead4e42015-03-13 14:39:40 +0000248 DexRegisterLocation::Kind location_kind =
David Srbecky052f8ca2018-04-26 15:42:54 +0100249 dex_register_map.GetLocationKind(vreg, number_of_dex_registers, code_info);
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100250 switch (location_kind) {
Roland Levillaina2d8ec62015-03-12 15:25:29 +0000251 case DexRegisterLocation::Kind::kInStack: {
David Brazdilf677ebf2015-05-29 16:29:43 +0100252 const int32_t offset = dex_register_map.GetStackOffsetInBytes(vreg,
253 number_of_dex_registers,
David Srbecky052f8ca2018-04-26 15:42:54 +0100254 code_info);
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100255 const uint8_t* addr = reinterpret_cast<const uint8_t*>(cur_quick_frame_) + offset;
256 *val = *reinterpret_cast<const uint32_t*>(addr);
257 return true;
258 }
Roland Levillaina2d8ec62015-03-12 15:25:29 +0000259 case DexRegisterLocation::Kind::kInRegister:
David Brazdild9cb68e2015-08-25 13:52:43 +0100260 case DexRegisterLocation::Kind::kInRegisterHigh:
261 case DexRegisterLocation::Kind::kInFpuRegister:
262 case DexRegisterLocation::Kind::kInFpuRegisterHigh: {
David Brazdilf677ebf2015-05-29 16:29:43 +0100263 uint32_t reg =
David Srbecky052f8ca2018-04-26 15:42:54 +0100264 dex_register_map.GetMachineRegister(vreg, number_of_dex_registers, code_info);
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100265 return GetRegisterIfAccessible(reg, kind, val);
266 }
Roland Levillaina2d8ec62015-03-12 15:25:29 +0000267 case DexRegisterLocation::Kind::kConstant:
David Srbecky052f8ca2018-04-26 15:42:54 +0100268 *val = dex_register_map.GetConstant(vreg, number_of_dex_registers, code_info);
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100269 return true;
Roland Levillaina2d8ec62015-03-12 15:25:29 +0000270 case DexRegisterLocation::Kind::kNone:
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100271 return false;
Roland Levillaina2d8ec62015-03-12 15:25:29 +0000272 default:
273 LOG(FATAL)
David Srbecky7dc11782016-02-25 13:23:56 +0000274 << "Unexpected location kind "
275 << dex_register_map.GetLocationInternalKind(vreg,
276 number_of_dex_registers,
David Srbecky052f8ca2018-04-26 15:42:54 +0100277 code_info);
Roland Levillaina2d8ec62015-03-12 15:25:29 +0000278 UNREACHABLE();
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100279 }
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100280}
281
282bool StackVisitor::GetRegisterIfAccessible(uint32_t reg, VRegKind kind, uint32_t* val) const {
283 const bool is_float = (kind == kFloatVReg) || (kind == kDoubleLoVReg) || (kind == kDoubleHiVReg);
David Brazdil77a48ae2015-09-15 12:34:04 +0000284
Vladimir Marko239d6ea2016-09-05 10:44:04 +0100285 if (kRuntimeISA == InstructionSet::kX86 && is_float) {
286 // X86 float registers are 64-bit and each XMM register is provided as two separate
287 // 32-bit registers by the context.
288 reg = (kind == kDoubleHiVReg) ? (2 * reg + 1) : (2 * reg);
289 }
David Brazdil77a48ae2015-09-15 12:34:04 +0000290
Goran Jakovljevic986660c2015-12-10 11:44:50 +0100291 // MIPS32 float registers are used as 64-bit (for MIPS32r2 it is pair
292 // F(2n)-F(2n+1), and for MIPS32r6 it is 64-bit register F(2n)). When
293 // accessing upper 32-bits from double, reg + 1 should be used.
294 if ((kRuntimeISA == InstructionSet::kMips) && (kind == kDoubleHiVReg)) {
295 DCHECK_ALIGNED(reg, 2);
296 reg++;
297 }
298
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100299 if (!IsAccessibleRegister(reg, is_float)) {
300 return false;
301 }
302 uintptr_t ptr_val = GetRegister(reg, is_float);
303 const bool target64 = Is64BitInstructionSet(kRuntimeISA);
304 if (target64) {
305 const bool wide_lo = (kind == kLongLoVReg) || (kind == kDoubleLoVReg);
306 const bool wide_hi = (kind == kLongHiVReg) || (kind == kDoubleHiVReg);
307 int64_t value_long = static_cast<int64_t>(ptr_val);
308 if (wide_lo) {
309 ptr_val = static_cast<uintptr_t>(Low32Bits(value_long));
310 } else if (wide_hi) {
311 ptr_val = static_cast<uintptr_t>(High32Bits(value_long));
312 }
313 }
314 *val = ptr_val;
315 return true;
316}
317
Mingyao Yang99170c62015-07-06 11:10:37 -0700318bool StackVisitor::GetVRegPairFromDebuggerShadowFrame(uint16_t vreg,
319 VRegKind kind_lo,
320 VRegKind kind_hi,
321 uint64_t* val) const {
322 uint32_t low_32bits;
323 uint32_t high_32bits;
324 bool success = GetVRegFromDebuggerShadowFrame(vreg, kind_lo, &low_32bits);
325 success &= GetVRegFromDebuggerShadowFrame(vreg + 1, kind_hi, &high_32bits);
326 if (success) {
327 *val = (static_cast<uint64_t>(high_32bits) << 32) | static_cast<uint64_t>(low_32bits);
328 }
329 return success;
330}
331
Mathieu Chartiere401d142015-04-22 13:56:20 -0700332bool StackVisitor::GetVRegPair(ArtMethod* m, uint16_t vreg, VRegKind kind_lo,
Sebastien Hertzc901dd72014-07-16 11:56:07 +0200333 VRegKind kind_hi, uint64_t* val) const {
334 if (kind_lo == kLongLoVReg) {
335 DCHECK_EQ(kind_hi, kLongHiVReg);
336 } else if (kind_lo == kDoubleLoVReg) {
337 DCHECK_EQ(kind_hi, kDoubleHiVReg);
338 } else {
339 LOG(FATAL) << "Expected long or double: kind_lo=" << kind_lo << ", kind_hi=" << kind_hi;
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100340 UNREACHABLE();
Sebastien Hertzc901dd72014-07-16 11:56:07 +0200341 }
Mingyao Yang99170c62015-07-06 11:10:37 -0700342 // Check if there is value set by the debugger.
343 if (GetVRegPairFromDebuggerShadowFrame(vreg, kind_lo, kind_hi, val)) {
344 return true;
345 }
Sebastien Hertzc901dd72014-07-16 11:56:07 +0200346 if (cur_quick_frame_ != nullptr) {
347 DCHECK(context_ != nullptr); // You can't reliably read registers without a context.
348 DCHECK(m == GetMethod());
Vladimir Marko9d07e3d2016-03-31 12:02:28 +0100349 DCHECK(cur_oat_quick_method_header_->IsOptimized());
350 return GetVRegPairFromOptimizedCode(m, vreg, kind_lo, kind_hi, val);
Sebastien Hertzc901dd72014-07-16 11:56:07 +0200351 } else {
Sebastien Hertz96ba8dc2015-01-22 18:57:14 +0100352 DCHECK(cur_shadow_frame_ != nullptr);
Sebastien Hertzc901dd72014-07-16 11:56:07 +0200353 *val = cur_shadow_frame_->GetVRegLong(vreg);
354 return true;
355 }
356}
357
Mathieu Chartiere401d142015-04-22 13:56:20 -0700358bool StackVisitor::GetVRegPairFromOptimizedCode(ArtMethod* m, uint16_t vreg,
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100359 VRegKind kind_lo, VRegKind kind_hi,
360 uint64_t* val) const {
361 uint32_t low_32bits;
362 uint32_t high_32bits;
Igor Murashkinb1d8c312015-08-04 11:18:43 -0700363 bool success = GetVRegFromOptimizedCode(m, vreg, kind_lo, &low_32bits);
364 success &= GetVRegFromOptimizedCode(m, vreg + 1, kind_hi, &high_32bits);
Sebastien Hertz7cde48c2015-01-20 16:06:43 +0100365 if (success) {
366 *val = (static_cast<uint64_t>(high_32bits) << 32) | static_cast<uint64_t>(low_32bits);
367 }
368 return success;
369}
370
371bool StackVisitor::GetRegisterPairIfAccessible(uint32_t reg_lo, uint32_t reg_hi,
372 VRegKind kind_lo, uint64_t* val) const {
373 const bool is_float = (kind_lo == kDoubleLoVReg);
374 if (!IsAccessibleRegister(reg_lo, is_float) || !IsAccessibleRegister(reg_hi, is_float)) {
375 return false;
376 }
377 uintptr_t ptr_val_lo = GetRegister(reg_lo, is_float);
378 uintptr_t ptr_val_hi = GetRegister(reg_hi, is_float);
379 bool target64 = Is64BitInstructionSet(kRuntimeISA);
380 if (target64) {
381 int64_t value_long_lo = static_cast<int64_t>(ptr_val_lo);
382 int64_t value_long_hi = static_cast<int64_t>(ptr_val_hi);
383 ptr_val_lo = static_cast<uintptr_t>(Low32Bits(value_long_lo));
384 ptr_val_hi = static_cast<uintptr_t>(High32Bits(value_long_hi));
385 }
386 *val = (static_cast<uint64_t>(ptr_val_hi) << 32) | static_cast<uint32_t>(ptr_val_lo);
387 return true;
388}
389
Mingyao Yang636b9252015-07-31 16:40:24 -0700390bool StackVisitor::SetVReg(ArtMethod* m,
391 uint16_t vreg,
392 uint32_t new_value,
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800393 VRegKind kind) {
David Sehr0225f8e2018-01-31 08:52:24 +0000394 CodeItemDataAccessor accessor(m->DexInstructionData());
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800395 if (!accessor.HasCodeItem()) {
Mingyao Yang99170c62015-07-06 11:10:37 -0700396 return false;
397 }
398 ShadowFrame* shadow_frame = GetCurrentShadowFrame();
399 if (shadow_frame == nullptr) {
400 // This is a compiled frame: we must prepare and update a shadow frame that will
401 // be executed by the interpreter after deoptimization of the stack.
402 const size_t frame_id = GetFrameId();
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800403 const uint16_t num_regs = accessor.RegistersSize();
Mingyao Yang99170c62015-07-06 11:10:37 -0700404 shadow_frame = thread_->FindOrCreateDebuggerShadowFrame(frame_id, num_regs, m, GetDexPc());
405 CHECK(shadow_frame != nullptr);
406 // Remember the vreg has been set for debugging and must not be overwritten by the
407 // original value during deoptimization of the stack.
408 thread_->GetUpdatedVRegFlags(frame_id)[vreg] = true;
409 }
410 if (kind == kReferenceVReg) {
411 shadow_frame->SetVRegReference(vreg, reinterpret_cast<mirror::Object*>(new_value));
412 } else {
413 shadow_frame->SetVReg(vreg, new_value);
414 }
415 return true;
416}
417
Mingyao Yang636b9252015-07-31 16:40:24 -0700418bool StackVisitor::SetVRegPair(ArtMethod* m,
419 uint16_t vreg,
420 uint64_t new_value,
421 VRegKind kind_lo,
422 VRegKind kind_hi) {
Mingyao Yang99170c62015-07-06 11:10:37 -0700423 if (kind_lo == kLongLoVReg) {
424 DCHECK_EQ(kind_hi, kLongHiVReg);
425 } else if (kind_lo == kDoubleLoVReg) {
426 DCHECK_EQ(kind_hi, kDoubleHiVReg);
427 } else {
428 LOG(FATAL) << "Expected long or double: kind_lo=" << kind_lo << ", kind_hi=" << kind_hi;
429 UNREACHABLE();
430 }
David Sehr0225f8e2018-01-31 08:52:24 +0000431 CodeItemDataAccessor accessor(m->DexInstructionData());
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800432 if (!accessor.HasCodeItem()) {
Mingyao Yang99170c62015-07-06 11:10:37 -0700433 return false;
434 }
435 ShadowFrame* shadow_frame = GetCurrentShadowFrame();
436 if (shadow_frame == nullptr) {
437 // This is a compiled frame: we must prepare for deoptimization (see SetVRegFromDebugger).
438 const size_t frame_id = GetFrameId();
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800439 const uint16_t num_regs = accessor.RegistersSize();
Mingyao Yang99170c62015-07-06 11:10:37 -0700440 shadow_frame = thread_->FindOrCreateDebuggerShadowFrame(frame_id, num_regs, m, GetDexPc());
441 CHECK(shadow_frame != nullptr);
442 // Remember the vreg pair has been set for debugging and must not be overwritten by the
443 // original value during deoptimization of the stack.
444 thread_->GetUpdatedVRegFlags(frame_id)[vreg] = true;
445 thread_->GetUpdatedVRegFlags(frame_id)[vreg + 1] = true;
446 }
447 shadow_frame->SetVRegLong(vreg, new_value);
448 return true;
449}
450
Sebastien Hertz96ba8dc2015-01-22 18:57:14 +0100451bool StackVisitor::IsAccessibleGPR(uint32_t reg) const {
452 DCHECK(context_ != nullptr);
453 return context_->IsAccessibleGPR(reg);
454}
455
Mathieu Chartier815873e2014-02-13 18:02:13 -0800456uintptr_t* StackVisitor::GetGPRAddress(uint32_t reg) const {
Sebastien Hertz96ba8dc2015-01-22 18:57:14 +0100457 DCHECK(cur_quick_frame_ != nullptr) << "This is a quick frame routine";
458 DCHECK(context_ != nullptr);
Mathieu Chartier815873e2014-02-13 18:02:13 -0800459 return context_->GetGPRAddress(reg);
460}
461
Sebastien Hertz96ba8dc2015-01-22 18:57:14 +0100462uintptr_t StackVisitor::GetGPR(uint32_t reg) const {
463 DCHECK(cur_quick_frame_ != nullptr) << "This is a quick frame routine";
464 DCHECK(context_ != nullptr);
465 return context_->GetGPR(reg);
Ian Rogers0399dde2012-06-06 17:09:28 -0700466}
467
Sebastien Hertz96ba8dc2015-01-22 18:57:14 +0100468bool StackVisitor::IsAccessibleFPR(uint32_t reg) const {
469 DCHECK(context_ != nullptr);
470 return context_->IsAccessibleFPR(reg);
Sebastien Hertz0bcb2902014-06-17 15:52:45 +0200471}
472
Sebastien Hertz96ba8dc2015-01-22 18:57:14 +0100473uintptr_t StackVisitor::GetFPR(uint32_t reg) const {
474 DCHECK(cur_quick_frame_ != nullptr) << "This is a quick frame routine";
475 DCHECK(context_ != nullptr);
476 return context_->GetFPR(reg);
477}
478
Ian Rogers0399dde2012-06-06 17:09:28 -0700479uintptr_t StackVisitor::GetReturnPc() const {
Ian Rogers13735952014-10-08 12:43:28 -0700480 uint8_t* sp = reinterpret_cast<uint8_t*>(GetCurrentQuickFrame());
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700481 DCHECK(sp != nullptr);
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100482 uint8_t* pc_addr = sp + GetCurrentQuickFrameInfo().GetReturnPcOffset();
Ian Rogers0399dde2012-06-06 17:09:28 -0700483 return *reinterpret_cast<uintptr_t*>(pc_addr);
484}
485
486void StackVisitor::SetReturnPc(uintptr_t new_ret_pc) {
Ian Rogers13735952014-10-08 12:43:28 -0700487 uint8_t* sp = reinterpret_cast<uint8_t*>(GetCurrentQuickFrame());
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700488 CHECK(sp != nullptr);
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100489 uint8_t* pc_addr = sp + GetCurrentQuickFrameInfo().GetReturnPcOffset();
Ian Rogers0399dde2012-06-06 17:09:28 -0700490 *reinterpret_cast<uintptr_t*>(pc_addr) = new_ret_pc;
491}
492
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100493size_t StackVisitor::ComputeNumFrames(Thread* thread, StackWalkKind walk_kind) {
Ian Rogers0399dde2012-06-06 17:09:28 -0700494 struct NumFramesVisitor : public StackVisitor {
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100495 NumFramesVisitor(Thread* thread_in, StackWalkKind walk_kind_in)
496 : StackVisitor(thread_in, nullptr, walk_kind_in), frames(0) {}
Ian Rogers0399dde2012-06-06 17:09:28 -0700497
Ian Rogers5cf98192014-05-29 21:31:50 -0700498 bool VisitFrame() OVERRIDE {
Ian Rogers0399dde2012-06-06 17:09:28 -0700499 frames++;
500 return true;
501 }
Elliott Hughes08fc03a2012-06-26 17:34:00 -0700502
Ian Rogers0399dde2012-06-06 17:09:28 -0700503 size_t frames;
504 };
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100505 NumFramesVisitor visitor(thread, walk_kind);
Ian Rogers0399dde2012-06-06 17:09:28 -0700506 visitor.WalkStack(true);
507 return visitor.frames;
508}
509
Mathieu Chartiere401d142015-04-22 13:56:20 -0700510bool StackVisitor::GetNextMethodAndDexPc(ArtMethod** next_method, uint32_t* next_dex_pc) {
Ian Rogers5cf98192014-05-29 21:31:50 -0700511 struct HasMoreFramesVisitor : public StackVisitor {
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100512 HasMoreFramesVisitor(Thread* thread,
513 StackWalkKind walk_kind,
514 size_t num_frames,
515 size_t frame_height)
516 : StackVisitor(thread, nullptr, walk_kind, num_frames),
517 frame_height_(frame_height),
518 found_frame_(false),
519 has_more_frames_(false),
520 next_method_(nullptr),
521 next_dex_pc_(0) {
Ian Rogers5cf98192014-05-29 21:31:50 -0700522 }
523
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700524 bool VisitFrame() OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers5cf98192014-05-29 21:31:50 -0700525 if (found_frame_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700526 ArtMethod* method = GetMethod();
Ian Rogers5cf98192014-05-29 21:31:50 -0700527 if (method != nullptr && !method->IsRuntimeMethod()) {
528 has_more_frames_ = true;
529 next_method_ = method;
530 next_dex_pc_ = GetDexPc();
531 return false; // End stack walk once next method is found.
532 }
533 } else if (GetFrameHeight() == frame_height_) {
534 found_frame_ = true;
535 }
536 return true;
537 }
538
539 size_t frame_height_;
540 bool found_frame_;
541 bool has_more_frames_;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700542 ArtMethod* next_method_;
Ian Rogers5cf98192014-05-29 21:31:50 -0700543 uint32_t next_dex_pc_;
544 };
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100545 HasMoreFramesVisitor visitor(thread_, walk_kind_, GetNumFrames(), GetFrameHeight());
Ian Rogers5cf98192014-05-29 21:31:50 -0700546 visitor.WalkStack(true);
547 *next_method = visitor.next_method_;
548 *next_dex_pc = visitor.next_dex_pc_;
549 return visitor.has_more_frames_;
550}
551
Ian Rogers7a22fa62013-01-23 12:16:16 -0800552void StackVisitor::DescribeStack(Thread* thread) {
Ian Rogers306057f2012-11-26 12:45:53 -0800553 struct DescribeStackVisitor : public StackVisitor {
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800554 explicit DescribeStackVisitor(Thread* thread_in)
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100555 : StackVisitor(thread_in, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames) {}
Ian Rogers306057f2012-11-26 12:45:53 -0800556
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700557 bool VisitFrame() OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers306057f2012-11-26 12:45:53 -0800558 LOG(INFO) << "Frame Id=" << GetFrameId() << " " << DescribeLocation();
559 return true;
560 }
561 };
Ian Rogers7a22fa62013-01-23 12:16:16 -0800562 DescribeStackVisitor visitor(thread);
Ian Rogers306057f2012-11-26 12:45:53 -0800563 visitor.WalkStack(true);
564}
565
Ian Rogers40e3bac2012-11-20 00:09:14 -0800566std::string StackVisitor::DescribeLocation() const {
567 std::string result("Visiting method '");
Mathieu Chartiere401d142015-04-22 13:56:20 -0700568 ArtMethod* m = GetMethod();
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700569 if (m == nullptr) {
Ian Rogers306057f2012-11-26 12:45:53 -0800570 return "upcall";
571 }
David Sehr709b0702016-10-13 09:12:37 -0700572 result += m->PrettyMethod();
Ian Rogersef7d42f2014-01-06 12:55:46 -0800573 result += StringPrintf("' at dex PC 0x%04x", GetDexPc());
Ian Rogers40e3bac2012-11-20 00:09:14 -0800574 if (!IsShadowFrame()) {
575 result += StringPrintf(" (native PC %p)", reinterpret_cast<void*>(GetCurrentQuickFramePc()));
576 }
577 return result;
578}
579
Alex Lightdba61482016-12-21 08:20:29 -0800580void StackVisitor::SetMethod(ArtMethod* method) {
581 DCHECK(GetMethod() != nullptr);
582 if (cur_shadow_frame_ != nullptr) {
583 cur_shadow_frame_->SetMethod(method);
584 } else {
585 DCHECK(cur_quick_frame_ != nullptr);
586 CHECK(!IsInInlinedFrame()) << "We do not support setting inlined method's ArtMethod!";
Alex Light1ebe4fe2017-01-30 14:57:11 -0800587 *cur_quick_frame_ = method;
Alex Lightdba61482016-12-21 08:20:29 -0800588 }
589}
590
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100591static void AssertPcIsWithinQuickCode(ArtMethod* method, uintptr_t pc)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700592 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100593 if (method->IsNative() || method->IsRuntimeMethod() || method->IsProxyMethod()) {
594 return;
595 }
596
597 if (pc == reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc())) {
598 return;
599 }
600
Mingyao Yang88ca8ba2017-05-23 14:21:07 -0700601 Runtime* runtime = Runtime::Current();
602 if (runtime->UseJitCompilation() &&
603 runtime->GetJit()->GetCodeCache()->ContainsPc(reinterpret_cast<const void*>(pc))) {
604 return;
605 }
606
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100607 const void* code = method->GetEntryPointFromQuickCompiledCode();
Alex Lightdb01a092017-04-03 15:39:55 -0700608 if (code == GetQuickInstrumentationEntryPoint() || code == GetInvokeObsoleteMethodStub()) {
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100609 return;
610 }
611
612 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
613 if (class_linker->IsQuickToInterpreterBridge(code) ||
614 class_linker->IsQuickResolutionStub(code)) {
615 return;
616 }
617
Calin Juravleffc87072016-04-20 14:22:09 +0100618 if (runtime->UseJitCompilation() && runtime->GetJit()->GetCodeCache()->ContainsPc(code)) {
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100619 return;
620 }
621
Mingyao Yang063fc772016-08-02 11:02:54 -0700622 uint32_t code_size = OatQuickMethodHeader::FromEntryPoint(code)->GetCodeSize();
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100623 uintptr_t code_start = reinterpret_cast<uintptr_t>(code);
624 CHECK(code_start <= pc && pc <= (code_start + code_size))
David Sehr709b0702016-10-13 09:12:37 -0700625 << method->PrettyMethod()
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100626 << " pc=" << std::hex << pc
Roland Levillain0d5a2812015-11-13 10:07:31 +0000627 << " code_start=" << code_start
628 << " code_size=" << code_size;
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100629}
630
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700631void StackVisitor::SanityCheckFrame() const {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800632 if (kIsDebugBuild) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700633 ArtMethod* method = GetMethod();
Nicolas Geoffray48b40cc2017-08-07 16:52:40 +0100634 mirror::Class* declaring_class = method->GetDeclaringClass();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700635 // Runtime methods have null declaring class.
636 if (!method->IsRuntimeMethod()) {
637 CHECK(declaring_class != nullptr);
638 CHECK_EQ(declaring_class->GetClass(), declaring_class->GetClass()->GetClass())
639 << declaring_class;
640 } else {
641 CHECK(declaring_class == nullptr);
642 }
Mathieu Chartier951ec2c2015-09-22 08:50:05 -0700643 Runtime* const runtime = Runtime::Current();
644 LinearAlloc* const linear_alloc = runtime->GetLinearAlloc();
645 if (!linear_alloc->Contains(method)) {
646 // Check class linker linear allocs.
Nicolas Geoffray48b40cc2017-08-07 16:52:40 +0100647 // We get the canonical method as copied methods may have their declaring
648 // class from another class loader.
649 ArtMethod* canonical = method->GetCanonicalMethod();
650 mirror::Class* klass = canonical->GetDeclaringClass();
Mathieu Chartier951ec2c2015-09-22 08:50:05 -0700651 LinearAlloc* const class_linear_alloc = (klass != nullptr)
Mathieu Chartier5b830502016-03-02 10:30:23 -0800652 ? runtime->GetClassLinker()->GetAllocatorForClassLoader(klass->GetClassLoader())
Mathieu Chartier951ec2c2015-09-22 08:50:05 -0700653 : linear_alloc;
Nicolas Geoffray48b40cc2017-08-07 16:52:40 +0100654 if (!class_linear_alloc->Contains(canonical)) {
Mathieu Chartier951ec2c2015-09-22 08:50:05 -0700655 // Check image space.
656 bool in_image = false;
657 for (auto& space : runtime->GetHeap()->GetContinuousSpaces()) {
658 if (space->IsImageSpace()) {
659 auto* image_space = space->AsImageSpace();
660 const auto& header = image_space->GetImageHeader();
Mathieu Chartiere42888f2016-04-14 10:49:19 -0700661 const ImageSection& methods = header.GetMethodsSection();
662 const ImageSection& runtime_methods = header.GetRuntimeMethodsSection();
Nicolas Geoffray48b40cc2017-08-07 16:52:40 +0100663 const size_t offset = reinterpret_cast<const uint8_t*>(canonical) - image_space->Begin();
Mathieu Chartiere42888f2016-04-14 10:49:19 -0700664 if (methods.Contains(offset) || runtime_methods.Contains(offset)) {
Mathieu Chartier951ec2c2015-09-22 08:50:05 -0700665 in_image = true;
666 break;
667 }
Mathieu Chartiere401d142015-04-22 13:56:20 -0700668 }
669 }
Nicolas Geoffray48b40cc2017-08-07 16:52:40 +0100670 CHECK(in_image) << canonical->PrettyMethod() << " not in linear alloc or image";
Mathieu Chartiere401d142015-04-22 13:56:20 -0700671 }
Mathieu Chartiere401d142015-04-22 13:56:20 -0700672 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800673 if (cur_quick_frame_ != nullptr) {
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100674 AssertPcIsWithinQuickCode(method, cur_quick_frame_pc_);
Ian Rogersef7d42f2014-01-06 12:55:46 -0800675 // Frame sanity.
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100676 size_t frame_size = GetCurrentQuickFrameInfo().FrameSizeInBytes();
Ian Rogersef7d42f2014-01-06 12:55:46 -0800677 CHECK_NE(frame_size, 0u);
Andreas Gampe5b417b92014-03-10 14:18:35 -0700678 // A rough guess at an upper size we expect to see for a frame.
679 // 256 registers
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700680 // 2 words HandleScope overhead
Andreas Gampe5b417b92014-03-10 14:18:35 -0700681 // 3+3 register spills
682 // TODO: this seems architecture specific for the case of JNI frames.
Brian Carlstromed08bd42014-03-19 18:34:17 -0700683 // TODO: 083-compiler-regressions ManyFloatArgs shows this estimate is wrong.
684 // const size_t kMaxExpectedFrameSize = (256 + 2 + 3 + 3) * sizeof(word);
685 const size_t kMaxExpectedFrameSize = 2 * KB;
David Sehr709b0702016-10-13 09:12:37 -0700686 CHECK_LE(frame_size, kMaxExpectedFrameSize) << method->PrettyMethod();
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100687 size_t return_pc_offset = GetCurrentQuickFrameInfo().GetReturnPcOffset();
Ian Rogersef7d42f2014-01-06 12:55:46 -0800688 CHECK_LT(return_pc_offset, frame_size);
689 }
Ian Rogers0399dde2012-06-06 17:09:28 -0700690 }
Ian Rogers0399dde2012-06-06 17:09:28 -0700691}
692
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100693// Counts the number of references in the parameter list of the corresponding method.
694// Note: Thus does _not_ include "this" for non-static methods.
695static uint32_t GetNumberOfReferenceArgsWithoutReceiver(ArtMethod* method)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700696 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100697 uint32_t shorty_len;
698 const char* shorty = method->GetShorty(&shorty_len);
699 uint32_t refs = 0;
700 for (uint32_t i = 1; i < shorty_len ; ++i) {
701 if (shorty[i] == 'L') {
702 refs++;
703 }
704 }
705 return refs;
706}
707
708QuickMethodFrameInfo StackVisitor::GetCurrentQuickFrameInfo() const {
709 if (cur_oat_quick_method_header_ != nullptr) {
710 return cur_oat_quick_method_header_->GetFrameInfo();
711 }
712
713 ArtMethod* method = GetMethod();
714 Runtime* runtime = Runtime::Current();
715
716 if (method->IsAbstract()) {
Vladimir Markod3083dd2018-05-17 08:43:47 +0100717 return RuntimeCalleeSaveFrame::GetMethodFrameInfo(CalleeSaveType::kSaveRefsAndArgs);
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100718 }
719
720 // This goes before IsProxyMethod since runtime methods have a null declaring class.
721 if (method->IsRuntimeMethod()) {
722 return runtime->GetRuntimeMethodFrameInfo(method);
723 }
724
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100725 if (method->IsProxyMethod()) {
Nicolas Geoffray22cf3d32015-11-02 11:57:11 +0000726 // There is only one direct method of a proxy class: the constructor. A direct method is
727 // cloned from the original java.lang.reflect.Proxy and is executed as usual quick
728 // compiled method without any stubs. Therefore the method must have a OatQuickMethodHeader.
729 DCHECK(!method->IsDirect() && !method->IsConstructor())
730 << "Constructors of proxy classes must have a OatQuickMethodHeader";
Vladimir Markod3083dd2018-05-17 08:43:47 +0100731 return RuntimeCalleeSaveFrame::GetMethodFrameInfo(CalleeSaveType::kSaveRefsAndArgs);
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100732 }
733
Vladimir Marko2196c652017-11-30 16:16:07 +0000734 // The only remaining case is if the method is native and uses the generic JNI stub,
735 // called either directly or through some (resolution, instrumentation) trampoline.
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100736 DCHECK(method->IsNative());
Vladimir Marko2196c652017-11-30 16:16:07 +0000737 if (kIsDebugBuild) {
738 ClassLinker* class_linker = runtime->GetClassLinker();
739 const void* entry_point = runtime->GetInstrumentation()->GetQuickCodeFor(method,
740 kRuntimePointerSize);
741 CHECK(class_linker->IsQuickGenericJniStub(entry_point) ||
742 // The current entrypoint (after filtering out trampolines) may have changed
743 // from GenericJNI to JIT-compiled stub since we have entered this frame.
744 (runtime->GetJit() != nullptr &&
745 runtime->GetJit()->GetCodeCache()->ContainsPc(entry_point))) << method->PrettyMethod();
746 }
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100747 // Generic JNI frame.
748 uint32_t handle_refs = GetNumberOfReferenceArgsWithoutReceiver(method) + 1;
749 size_t scope_size = HandleScope::SizeOf(handle_refs);
Vladimir Markod3083dd2018-05-17 08:43:47 +0100750 constexpr QuickMethodFrameInfo callee_info =
751 RuntimeCalleeSaveFrame::GetMethodFrameInfo(CalleeSaveType::kSaveRefsAndArgs);
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100752
753 // Callee saves + handle scope + method ref + alignment
754 // Note: -sizeof(void*) since callee-save frame stores a whole method pointer.
755 size_t frame_size = RoundUp(
756 callee_info.FrameSizeInBytes() - sizeof(void*) + sizeof(ArtMethod*) + scope_size,
757 kStackAlignment);
758 return QuickMethodFrameInfo(frame_size, callee_info.CoreSpillMask(), callee_info.FpSpillMask());
759}
760
Andreas Gampe585da952016-12-02 14:52:29 -0800761template <StackVisitor::CountTransitions kCount>
Ian Rogers0399dde2012-06-06 17:09:28 -0700762void StackVisitor::WalkStack(bool include_transitions) {
Hiroshi Yamauchi02f365f2017-02-03 15:06:00 -0800763 if (check_suspended_) {
764 DCHECK(thread_ == Thread::Current() || thread_->IsSuspended());
765 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800766 CHECK_EQ(cur_depth_, 0U);
767 bool exit_stubs_installed = Runtime::Current()->GetInstrumentation()->AreExitStubsInstalled();
Alex Lightb81a9842016-12-15 00:59:05 +0000768 uint32_t instrumentation_stack_depth = 0;
Sebastien Hertzb2feaaf2015-10-12 13:40:10 +0000769 size_t inlined_frames_count = 0;
Dave Allisonf9439142014-03-27 15:10:22 -0700770
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700771 for (const ManagedStack* current_fragment = thread_->GetManagedStack();
772 current_fragment != nullptr; current_fragment = current_fragment->GetLink()) {
Ian Rogers0399dde2012-06-06 17:09:28 -0700773 cur_shadow_frame_ = current_fragment->GetTopShadowFrame();
774 cur_quick_frame_ = current_fragment->GetTopQuickFrame();
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700775 cur_quick_frame_pc_ = 0;
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100776 cur_oat_quick_method_header_ = nullptr;
Dave Allisonf9439142014-03-27 15:10:22 -0700777
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700778 if (cur_quick_frame_ != nullptr) { // Handle quick stack frames.
Ian Rogers0399dde2012-06-06 17:09:28 -0700779 // Can't be both a shadow and a quick fragment.
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700780 DCHECK(current_fragment->GetTopShadowFrame() == nullptr);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700781 ArtMethod* method = *cur_quick_frame_;
Vladimir Marko2196c652017-11-30 16:16:07 +0000782 DCHECK(method != nullptr);
783 bool header_retrieved = false;
784 if (method->IsNative()) {
785 // We do not have a PC for the first frame, so we cannot simply use
786 // ArtMethod::GetOatQuickMethodHeader() as we're unable to distinguish there
787 // between GenericJNI frame and JIT-compiled JNI stub; the entrypoint may have
788 // changed since the frame was entered. The top quick frame tag indicates
789 // GenericJNI here, otherwise it's either AOT-compiled or JNI-compiled JNI stub.
790 if (UNLIKELY(current_fragment->GetTopQuickFrameTag())) {
791 // The generic JNI does not have any method header.
792 cur_oat_quick_method_header_ = nullptr;
793 } else {
794 const void* existing_entry_point = method->GetEntryPointFromQuickCompiledCode();
795 CHECK(existing_entry_point != nullptr);
796 Runtime* runtime = Runtime::Current();
797 ClassLinker* class_linker = runtime->GetClassLinker();
798 // Check whether we can quickly get the header from the current entrypoint.
799 if (!class_linker->IsQuickGenericJniStub(existing_entry_point) &&
800 !class_linker->IsQuickResolutionStub(existing_entry_point) &&
801 existing_entry_point != GetQuickInstrumentationEntryPoint()) {
802 cur_oat_quick_method_header_ =
803 OatQuickMethodHeader::FromEntryPoint(existing_entry_point);
804 } else {
805 const void* code = method->GetOatMethodQuickCode(class_linker->GetImagePointerSize());
806 if (code != nullptr) {
807 cur_oat_quick_method_header_ = OatQuickMethodHeader::FromEntryPoint(code);
808 } else {
809 // This must be a JITted JNI stub frame.
810 CHECK(runtime->GetJit() != nullptr);
811 code = runtime->GetJit()->GetCodeCache()->GetJniStubCode(method);
812 CHECK(code != nullptr) << method->PrettyMethod();
813 cur_oat_quick_method_header_ = OatQuickMethodHeader::FromCodePointer(code);
814 }
815 }
816 }
817 header_retrieved = true;
818 }
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700819 while (method != nullptr) {
Vladimir Marko2196c652017-11-30 16:16:07 +0000820 if (!header_retrieved) {
821 cur_oat_quick_method_header_ = method->GetOatQuickMethodHeader(cur_quick_frame_pc_);
822 }
823 header_retrieved = false; // Force header retrieval in next iteration.
Dave Allison5cd33752014-04-15 15:57:58 -0700824 SanityCheckFrame();
Nicolas Geoffray57f61612015-05-15 13:20:41 +0100825
Nicolas Geoffrayc6df1e32016-07-04 10:15:47 +0100826 if ((walk_kind_ == StackWalkKind::kIncludeInlinedFrames)
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100827 && (cur_oat_quick_method_header_ != nullptr)
828 && cur_oat_quick_method_header_->IsOptimized()) {
David Srbecky052f8ca2018-04-26 15:42:54 +0100829 CodeInfo code_info(cur_oat_quick_method_header_);
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100830 uint32_t native_pc_offset =
831 cur_oat_quick_method_header_->NativeQuickPcOffset(cur_quick_frame_pc_);
David Srbecky052f8ca2018-04-26 15:42:54 +0100832 StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset);
833 if (stack_map.IsValid() && stack_map.HasInlineInfo()) {
834 InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map);
Nicolas Geoffray57f61612015-05-15 13:20:41 +0100835 DCHECK_EQ(current_inlining_depth_, 0u);
David Srbecky052f8ca2018-04-26 15:42:54 +0100836 for (current_inlining_depth_ = inline_info.GetDepth();
Nicolas Geoffray57f61612015-05-15 13:20:41 +0100837 current_inlining_depth_ != 0;
838 --current_inlining_depth_) {
839 bool should_continue = VisitFrame();
840 if (UNLIKELY(!should_continue)) {
841 return;
842 }
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100843 cur_depth_++;
Sebastien Hertzb2feaaf2015-10-12 13:40:10 +0000844 inlined_frames_count++;
Nicolas Geoffray57f61612015-05-15 13:20:41 +0100845 }
846 }
847 }
848
Dave Allison5cd33752014-04-15 15:57:58 -0700849 bool should_continue = VisitFrame();
850 if (UNLIKELY(!should_continue)) {
851 return;
Ian Rogers0399dde2012-06-06 17:09:28 -0700852 }
Dave Allison5cd33752014-04-15 15:57:58 -0700853
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100854 QuickMethodFrameInfo frame_info = GetCurrentQuickFrameInfo();
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700855 if (context_ != nullptr) {
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100856 context_->FillCalleeSaves(reinterpret_cast<uint8_t*>(cur_quick_frame_), frame_info);
Ian Rogers0399dde2012-06-06 17:09:28 -0700857 }
Ian Rogers0399dde2012-06-06 17:09:28 -0700858 // Compute PC for next stack frame from return PC.
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100859 size_t frame_size = frame_info.FrameSizeInBytes();
860 size_t return_pc_offset = frame_size - sizeof(void*);
Ian Rogers13735952014-10-08 12:43:28 -0700861 uint8_t* return_pc_addr = reinterpret_cast<uint8_t*>(cur_quick_frame_) + return_pc_offset;
Ian Rogers0399dde2012-06-06 17:09:28 -0700862 uintptr_t return_pc = *reinterpret_cast<uintptr_t*>(return_pc_addr);
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100863
Ian Rogers62d6c772013-02-27 08:32:07 -0800864 if (UNLIKELY(exit_stubs_installed)) {
Ian Rogers0399dde2012-06-06 17:09:28 -0700865 // While profiling, the return pc is restored from the side stack, except when walking
866 // the stack for an exception where the side stack will be unwound in VisitFrame.
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700867 if (reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc()) == return_pc) {
Andreas Gampe585da952016-12-02 14:52:29 -0800868 CHECK_LT(instrumentation_stack_depth, thread_->GetInstrumentationStack()->size());
Sebastien Hertz74e256b2013-10-04 10:40:37 +0200869 const instrumentation::InstrumentationStackFrame& instrumentation_frame =
Alex Lightb81a9842016-12-15 00:59:05 +0000870 thread_->GetInstrumentationStack()->at(instrumentation_stack_depth);
jeffhao725a9572012-11-13 18:20:12 -0800871 instrumentation_stack_depth++;
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100872 if (GetMethod() ==
Andreas Gampe8228cdf2017-05-30 15:03:54 -0700873 Runtime::Current()->GetCalleeSaveMethod(CalleeSaveType::kSaveAllCalleeSaves)) {
Jeff Haofb2802d2013-07-24 13:53:05 -0700874 // Skip runtime save all callee frames which are used to deliver exceptions.
875 } else if (instrumentation_frame.interpreter_entry_) {
Vladimir Markofd36f1f2016-08-03 18:49:58 +0100876 ArtMethod* callee =
Andreas Gampe8228cdf2017-05-30 15:03:54 -0700877 Runtime::Current()->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs);
David Sehr709b0702016-10-13 09:12:37 -0700878 CHECK_EQ(GetMethod(), callee) << "Expected: " << ArtMethod::PrettyMethod(callee)
879 << " Found: " << ArtMethod::PrettyMethod(GetMethod());
Sebastien Hertzb2feaaf2015-10-12 13:40:10 +0000880 } else {
Alex Lighteee0bd42017-02-14 15:31:45 +0000881 // Instrumentation generally doesn't distinguish between a method's obsolete and
882 // non-obsolete version.
883 CHECK_EQ(instrumentation_frame.method_->GetNonObsoleteMethod(),
884 GetMethod()->GetNonObsoleteMethod())
885 << "Expected: "
886 << ArtMethod::PrettyMethod(instrumentation_frame.method_->GetNonObsoleteMethod())
887 << " Found: " << ArtMethod::PrettyMethod(GetMethod()->GetNonObsoleteMethod());
Ian Rogers62d6c772013-02-27 08:32:07 -0800888 }
889 if (num_frames_ != 0) {
890 // Check agreement of frame Ids only if num_frames_ is computed to avoid infinite
891 // recursion.
Sebastien Hertzb2feaaf2015-10-12 13:40:10 +0000892 size_t frame_id = instrumentation::Instrumentation::ComputeFrameId(
893 thread_,
894 cur_depth_,
895 inlined_frames_count);
896 CHECK_EQ(instrumentation_frame.frame_id_, frame_id);
Ian Rogers62d6c772013-02-27 08:32:07 -0800897 }
jeffhao725a9572012-11-13 18:20:12 -0800898 return_pc = instrumentation_frame.return_pc_;
Ian Rogers0399dde2012-06-06 17:09:28 -0700899 }
900 }
Nicolas Geoffray6bc43742015-10-12 18:11:10 +0100901
Ian Rogers0399dde2012-06-06 17:09:28 -0700902 cur_quick_frame_pc_ = return_pc;
Ian Rogers13735952014-10-08 12:43:28 -0700903 uint8_t* next_frame = reinterpret_cast<uint8_t*>(cur_quick_frame_) + frame_size;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700904 cur_quick_frame_ = reinterpret_cast<ArtMethod**>(next_frame);
905
906 if (kDebugStackWalk) {
David Sehr709b0702016-10-13 09:12:37 -0700907 LOG(INFO) << ArtMethod::PrettyMethod(method) << "@" << method << " size=" << frame_size
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100908 << std::boolalpha
909 << " optimized=" << (cur_oat_quick_method_header_ != nullptr &&
910 cur_oat_quick_method_header_->IsOptimized())
Mathieu Chartiere401d142015-04-22 13:56:20 -0700911 << " native=" << method->IsNative()
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100912 << std::noboolalpha
Mathieu Chartiere401d142015-04-22 13:56:20 -0700913 << " entrypoints=" << method->GetEntryPointFromQuickCompiledCode()
Alex Lighteee0bd42017-02-14 15:31:45 +0000914 << "," << (method->IsNative() ? method->GetEntryPointFromJni() : nullptr)
Mathieu Chartiere401d142015-04-22 13:56:20 -0700915 << " next=" << *cur_quick_frame_;
916 }
917
Andreas Gampef040be62017-04-14 21:49:33 -0700918 if (kCount == CountTransitions::kYes || !method->IsRuntimeMethod()) {
919 cur_depth_++;
920 }
Mathieu Chartiere401d142015-04-22 13:56:20 -0700921 method = *cur_quick_frame_;
jeffhao6641ea12013-01-02 18:13:42 -0800922 }
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700923 } else if (cur_shadow_frame_ != nullptr) {
Ian Rogers0399dde2012-06-06 17:09:28 -0700924 do {
925 SanityCheckFrame();
926 bool should_continue = VisitFrame();
927 if (UNLIKELY(!should_continue)) {
928 return;
929 }
930 cur_depth_++;
931 cur_shadow_frame_ = cur_shadow_frame_->GetLink();
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700932 } while (cur_shadow_frame_ != nullptr);
Ian Rogers0399dde2012-06-06 17:09:28 -0700933 }
Ian Rogers0399dde2012-06-06 17:09:28 -0700934 if (include_transitions) {
935 bool should_continue = VisitFrame();
936 if (!should_continue) {
937 return;
938 }
939 }
Andreas Gampe585da952016-12-02 14:52:29 -0800940 if (kCount == CountTransitions::kYes) {
941 cur_depth_++;
942 }
Ian Rogers62d6c772013-02-27 08:32:07 -0800943 }
944 if (num_frames_ != 0) {
945 CHECK_EQ(cur_depth_, num_frames_);
Ian Rogers0399dde2012-06-06 17:09:28 -0700946 }
947}
948
Andreas Gampe585da952016-12-02 14:52:29 -0800949template void StackVisitor::WalkStack<StackVisitor::CountTransitions::kYes>(bool);
950template void StackVisitor::WalkStack<StackVisitor::CountTransitions::kNo>(bool);
951
Elliott Hughes68e76522011-10-05 13:22:16 -0700952} // namespace art