blob: 3bc718b05813c40fd1878a381747d3d5e7c6d90a [file] [log] [blame]
Sebastien Hertzd45a1f52014-01-09 14:56:54 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Sebastien Hertzfd3077e2014-04-23 10:32:43 +020017#include "quick_exception_handler.h"
18
Ian Rogerse63db272014-07-15 15:36:11 -070019#include "arch/context.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method-inl.h"
Andreas Gampe542451c2016-07-26 09:02:02 -070021#include "base/enums.h"
Andreas Gampe170331f2017-12-07 18:41:03 -080022#include "base/logging.h" // For VLOG_IS_ON.
Nicolas Geoffray62e7c092019-01-08 09:43:01 +000023#include "base/systrace.h"
David Sehr9e734c72018-01-04 17:56:19 -080024#include "dex/dex_file_types.h"
25#include "dex/dex_instruction.h"
Sebastien Hertzfd3077e2014-04-23 10:32:43 +020026#include "entrypoints/entrypoint_utils.h"
Andreas Gampe639bdd12015-06-03 11:22:45 -070027#include "entrypoints/quick/quick_entrypoints_enum.h"
Ian Rogers6f3dbba2014-10-14 17:41:57 -070028#include "entrypoints/runtime_asm_entrypoints.h"
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070029#include "handle_scope-inl.h"
Vladimir Marko6ec2a1b2018-05-22 15:33:48 +010030#include "interpreter/shadow_frame-inl.h"
Nicolas Geoffrayb88d59e2016-02-17 11:31:49 +000031#include "jit/jit.h"
32#include "jit/jit_code_cache.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -070033#include "mirror/class-inl.h"
34#include "mirror/class_loader.h"
35#include "mirror/throwable.h"
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010036#include "oat_quick_method_header.h"
Vladimir Marko3a21e382016-09-02 12:38:38 +010037#include "stack.h"
Nicolas Geoffray6bc43742015-10-12 18:11:10 +010038#include "stack_map.h"
Sebastien Hertzd45a1f52014-01-09 14:56:54 +010039
40namespace art {
41
Ian Rogers5cf98192014-05-29 21:31:50 -070042static constexpr bool kDebugExceptionDelivery = false;
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -070043static constexpr size_t kInvalidFrameDepth = 0xffffffff;
Ian Rogers5cf98192014-05-29 21:31:50 -070044
Sebastien Hertzfd3077e2014-04-23 10:32:43 +020045QuickExceptionHandler::QuickExceptionHandler(Thread* self, bool is_deoptimization)
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010046 : self_(self),
47 context_(self->GetLongJumpContext()),
48 is_deoptimization_(is_deoptimization),
49 method_tracing_active_(is_deoptimization ||
50 Runtime::Current()->GetInstrumentation()->AreExitStubsInstalled()),
51 handler_quick_frame_(nullptr),
52 handler_quick_frame_pc_(0),
53 handler_method_header_(nullptr),
54 handler_quick_arg0_(0),
55 handler_method_(nullptr),
56 handler_dex_pc_(0),
57 clear_exception_(false),
Mingyao Yangf711f2c2016-05-23 12:29:39 -070058 handler_frame_depth_(kInvalidFrameDepth),
59 full_fragment_done_(false) {}
Sebastien Hertzd45a1f52014-01-09 14:56:54 +010060
Sebastien Hertz520633b2015-09-08 17:03:36 +020061// Finds catch handler.
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010062class CatchBlockStackVisitor final : public StackVisitor {
Ian Rogers5cf98192014-05-29 21:31:50 -070063 public:
Alex Light2c8206f2018-06-08 14:51:09 -070064 CatchBlockStackVisitor(Thread* self,
65 Context* context,
66 Handle<mirror::Throwable>* exception,
67 QuickExceptionHandler* exception_handler,
68 uint32_t skip_frames)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -070069 REQUIRES_SHARED(Locks::mutator_lock_)
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +010070 : StackVisitor(self, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +010071 exception_(exception),
Alex Light2c8206f2018-06-08 14:51:09 -070072 exception_handler_(exception_handler),
73 skip_frames_(skip_frames) {
Ian Rogers5cf98192014-05-29 21:31:50 -070074 }
75
Roland Levillainbbc6e7e2018-08-24 16:58:47 +010076 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -070077 ArtMethod* method = GetMethod();
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -070078 exception_handler_->SetHandlerFrameDepth(GetFrameDepth());
Ian Rogers5cf98192014-05-29 21:31:50 -070079 if (method == nullptr) {
Alex Light2c8206f2018-06-08 14:51:09 -070080 DCHECK_EQ(skip_frames_, 0u)
81 << "We tried to skip an upcall! We should have returned to the upcall to finish delivery";
Ian Rogers5cf98192014-05-29 21:31:50 -070082 // This is the upcall, we remember the frame and last pc so that we may long jump to them.
83 exception_handler_->SetHandlerQuickFramePc(GetCurrentQuickFramePc());
84 exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010085 exception_handler_->SetHandlerMethodHeader(GetCurrentOatQuickMethodHeader());
Ian Rogers5cf98192014-05-29 21:31:50 -070086 uint32_t next_dex_pc;
Mathieu Chartiere401d142015-04-22 13:56:20 -070087 ArtMethod* next_art_method;
Ian Rogers5cf98192014-05-29 21:31:50 -070088 bool has_next = GetNextMethodAndDexPc(&next_art_method, &next_dex_pc);
89 // Report the method that did the down call as the handler.
90 exception_handler_->SetHandlerDexPc(next_dex_pc);
91 exception_handler_->SetHandlerMethod(next_art_method);
92 if (!has_next) {
93 // No next method? Check exception handler is set up for the unhandled exception handler
94 // case.
95 DCHECK_EQ(0U, exception_handler_->GetHandlerDexPc());
96 DCHECK(nullptr == exception_handler_->GetHandlerMethod());
97 }
98 return false; // End stack walk.
99 }
Alex Light2c8206f2018-06-08 14:51:09 -0700100 if (skip_frames_ != 0) {
101 skip_frames_--;
102 return true;
103 }
Ian Rogers5cf98192014-05-29 21:31:50 -0700104 if (method->IsRuntimeMethod()) {
105 // Ignore callee save method.
106 DCHECK(method->IsCalleeSaveMethod());
107 return true;
108 }
Mathieu Chartiere401d142015-04-22 13:56:20 -0700109 return HandleTryItems(method);
Ian Rogers5cf98192014-05-29 21:31:50 -0700110 }
111
112 private:
Mathieu Chartiere401d142015-04-22 13:56:20 -0700113 bool HandleTryItems(ArtMethod* method)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700114 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampee2abbc62017-09-15 11:59:26 -0700115 uint32_t dex_pc = dex::kDexNoIndex;
Ian Rogers5cf98192014-05-29 21:31:50 -0700116 if (!method->IsNative()) {
117 dex_pc = GetDexPc();
118 }
Andreas Gampee2abbc62017-09-15 11:59:26 -0700119 if (dex_pc != dex::kDexNoIndex) {
Ian Rogers5cf98192014-05-29 21:31:50 -0700120 bool clear_exception = false;
Sebastien Hertz26f72862015-09-15 09:52:07 +0200121 StackHandleScope<1> hs(GetThread());
Ian Rogers5cf98192014-05-29 21:31:50 -0700122 Handle<mirror::Class> to_find(hs.NewHandle((*exception_)->GetClass()));
Mathieu Chartiere401d142015-04-22 13:56:20 -0700123 uint32_t found_dex_pc = method->FindCatchBlock(to_find, dex_pc, &clear_exception);
Ian Rogers5cf98192014-05-29 21:31:50 -0700124 exception_handler_->SetClearException(clear_exception);
Andreas Gampee2abbc62017-09-15 11:59:26 -0700125 if (found_dex_pc != dex::kDexNoIndex) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700126 exception_handler_->SetHandlerMethod(method);
Ian Rogers5cf98192014-05-29 21:31:50 -0700127 exception_handler_->SetHandlerDexPc(found_dex_pc);
David Brazdil72f7b882015-09-15 17:00:52 +0100128 exception_handler_->SetHandlerQuickFramePc(
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100129 GetCurrentOatQuickMethodHeader()->ToNativeQuickPc(
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700130 method, found_dex_pc, /* is_for_catch_handler= */ true));
Ian Rogers5cf98192014-05-29 21:31:50 -0700131 exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100132 exception_handler_->SetHandlerMethodHeader(GetCurrentOatQuickMethodHeader());
Ian Rogers5cf98192014-05-29 21:31:50 -0700133 return false; // End stack walk.
Mingyao Yang99170c62015-07-06 11:10:37 -0700134 } else if (UNLIKELY(GetThread()->HasDebuggerShadowFrames())) {
135 // We are going to unwind this frame. Did we prepare a shadow frame for debugging?
136 size_t frame_id = GetFrameId();
137 ShadowFrame* frame = GetThread()->FindDebuggerShadowFrame(frame_id);
138 if (frame != nullptr) {
139 // We will not execute this shadow frame so we can safely deallocate it.
140 GetThread()->RemoveDebuggerShadowFrameMapping(frame_id);
141 ShadowFrame::DeleteDeoptimizedFrame(frame);
142 }
Ian Rogers5cf98192014-05-29 21:31:50 -0700143 }
144 }
145 return true; // Continue stack walk.
146 }
147
Ian Rogers5cf98192014-05-29 21:31:50 -0700148 // The exception we're looking for the catch block of.
149 Handle<mirror::Throwable>* exception_;
150 // The quick exception handler we're visiting for.
151 QuickExceptionHandler* const exception_handler_;
Alex Light2c8206f2018-06-08 14:51:09 -0700152 // The number of frames to skip searching for catches in.
153 uint32_t skip_frames_;
Ian Rogers5cf98192014-05-29 21:31:50 -0700154
155 DISALLOW_COPY_AND_ASSIGN(CatchBlockStackVisitor);
156};
157
Andreas Gampec7d878d2018-11-19 18:42:06 +0000158static size_t GetInstrumentationFramesToPop(Thread* self, size_t frame_depth)
159 REQUIRES_SHARED(Locks::mutator_lock_) {
160 CHECK_NE(frame_depth, kInvalidFrameDepth);
161 size_t instrumentation_frames_to_pop = 0;
162 StackVisitor::WalkStack(
163 [&](art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
164 size_t current_frame_depth = stack_visitor->GetFrameDepth();
165 if (current_frame_depth < frame_depth) {
166 CHECK(stack_visitor->GetMethod() != nullptr);
167 if (UNLIKELY(reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc()) ==
168 stack_visitor->GetReturnPc())) {
169 if (!stack_visitor->IsInInlinedFrame()) {
170 // We do not count inlined frames, because we do not instrument them. The reason we
171 // include them in the stack walking is the check against `frame_depth_`, which is
172 // given to us by a visitor that visits inlined frames.
173 ++instrumentation_frames_to_pop;
174 }
175 }
176 return true;
Alex Light2c8206f2018-06-08 14:51:09 -0700177 }
Andreas Gampec7d878d2018-11-19 18:42:06 +0000178 // We reached the frame of the catch handler or the upcall.
179 return false;
180 },
181 self,
182 /* context= */ nullptr,
183 art::StackVisitor::StackWalkKind::kIncludeInlinedFrames,
184 /* check_suspended */ true,
185 /* include_transitions */ true);
186 return instrumentation_frames_to_pop;
187}
Alex Light2c8206f2018-06-08 14:51:09 -0700188
189// Finds the appropriate exception catch after calling all method exit instrumentation functions.
190// Note that this might change the exception being thrown.
Mathieu Chartierf5769e12017-01-10 15:54:41 -0800191void QuickExceptionHandler::FindCatch(ObjPtr<mirror::Throwable> exception) {
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200192 DCHECK(!is_deoptimization_);
Alex Light2c8206f2018-06-08 14:51:09 -0700193 instrumentation::InstrumentationStackPopper popper(self_);
194 // The number of total frames we have so far popped.
195 uint32_t already_popped = 0;
196 bool popped_to_top = true;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700197 StackHandleScope<1> hs(self_);
Alex Light2c8206f2018-06-08 14:51:09 -0700198 MutableHandle<mirror::Throwable> exception_ref(hs.NewHandle(exception));
199 // Sending the instrumentation events (done by the InstrumentationStackPopper) can cause new
200 // exceptions to be thrown which will override the current exception. Therefore we need to perform
201 // the search for a catch in a loop until we have successfully popped all the way to a catch or
202 // the top of the stack.
203 do {
204 if (kDebugExceptionDelivery) {
205 ObjPtr<mirror::String> msg = exception_ref->GetDetailMessage();
206 std::string str_msg(msg != nullptr ? msg->ToModifiedUtf8() : "");
207 self_->DumpStack(LOG_STREAM(INFO) << "Delivering exception: " << exception_ref->PrettyTypeOf()
208 << ": " << str_msg << "\n");
Ian Rogers5cf98192014-05-29 21:31:50 -0700209 }
Alex Light2c8206f2018-06-08 14:51:09 -0700210
211 // Walk the stack to find catch handler.
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700212 CatchBlockStackVisitor visitor(self_, context_,
213 &exception_ref,
214 this,
215 /*skip_frames=*/already_popped);
Alex Light2c8206f2018-06-08 14:51:09 -0700216 visitor.WalkStack(true);
217 uint32_t new_pop_count = handler_frame_depth_;
218 DCHECK_GE(new_pop_count, already_popped);
219 already_popped = new_pop_count;
220
221 // Figure out how many of those frames have instrumentation we need to remove (Should be the
222 // exact same as number of new_pop_count if there aren't inlined frames).
Andreas Gampec7d878d2018-11-19 18:42:06 +0000223 size_t instrumentation_frames_to_pop =
224 GetInstrumentationFramesToPop(self_, handler_frame_depth_);
Alex Light2c8206f2018-06-08 14:51:09 -0700225
226 if (kDebugExceptionDelivery) {
227 if (*handler_quick_frame_ == nullptr) {
228 LOG(INFO) << "Handler is upcall";
229 }
230 if (handler_method_ != nullptr) {
Vladimir Marko813a8632018-11-29 16:17:01 +0000231 const DexFile* dex_file = handler_method_->GetDexFile();
Alex Light2c8206f2018-06-08 14:51:09 -0700232 int line_number = annotations::GetLineNumFromPC(dex_file, handler_method_, handler_dex_pc_);
233 LOG(INFO) << "Handler: " << handler_method_->PrettyMethod() << " (line: "
234 << line_number << ")";
235 }
236 LOG(INFO) << "Will attempt to pop " << instrumentation_frames_to_pop
237 << " off of the instrumentation stack";
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100238 }
Alex Light2c8206f2018-06-08 14:51:09 -0700239 // Exception was cleared as part of delivery.
240 DCHECK(!self_->IsExceptionPending());
241 // If the handler is in optimized code, we need to set the catch environment.
242 if (*handler_quick_frame_ != nullptr &&
243 handler_method_header_ != nullptr &&
244 handler_method_header_->IsOptimized()) {
245 SetCatchEnvironmentForOptimizedHandler(&visitor);
246 }
247 popped_to_top = popper.PopFramesTo(instrumentation_frames_to_pop, exception_ref);
248 } while (!popped_to_top);
Roland Levillainb77b6982017-06-08 18:03:48 +0100249 if (!clear_exception_) {
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100250 // Put exception back in root set with clear throw location.
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000251 self_->SetException(exception_ref.Get());
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100252 }
David Brazdil77a48ae2015-09-15 12:34:04 +0000253}
254
255static VRegKind ToVRegKind(DexRegisterLocation::Kind kind) {
256 // Slightly hacky since we cannot map DexRegisterLocationKind and VRegKind
257 // one to one. However, StackVisitor::GetVRegFromOptimizedCode only needs to
258 // distinguish between core/FPU registers and low/high bits on 64-bit.
259 switch (kind) {
260 case DexRegisterLocation::Kind::kConstant:
261 case DexRegisterLocation::Kind::kInStack:
262 // VRegKind is ignored.
263 return VRegKind::kUndefined;
264
265 case DexRegisterLocation::Kind::kInRegister:
266 // Selects core register. For 64-bit registers, selects low 32 bits.
267 return VRegKind::kLongLoVReg;
268
269 case DexRegisterLocation::Kind::kInRegisterHigh:
270 // Selects core register. For 64-bit registers, selects high 32 bits.
271 return VRegKind::kLongHiVReg;
272
273 case DexRegisterLocation::Kind::kInFpuRegister:
274 // Selects FPU register. For 64-bit registers, selects low 32 bits.
275 return VRegKind::kDoubleLoVReg;
276
277 case DexRegisterLocation::Kind::kInFpuRegisterHigh:
278 // Selects FPU register. For 64-bit registers, selects high 32 bits.
279 return VRegKind::kDoubleHiVReg;
280
281 default:
David Srbecky7dc11782016-02-25 13:23:56 +0000282 LOG(FATAL) << "Unexpected vreg location " << kind;
David Brazdil77a48ae2015-09-15 12:34:04 +0000283 UNREACHABLE();
284 }
285}
286
287void QuickExceptionHandler::SetCatchEnvironmentForOptimizedHandler(StackVisitor* stack_visitor) {
288 DCHECK(!is_deoptimization_);
289 DCHECK(*handler_quick_frame_ != nullptr) << "Method should not be called on upcall exceptions";
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100290 DCHECK(handler_method_ != nullptr && handler_method_header_->IsOptimized());
David Brazdil77a48ae2015-09-15 12:34:04 +0000291
292 if (kDebugExceptionDelivery) {
Andreas Gampe3fec9ac2016-09-13 10:47:28 -0700293 self_->DumpStack(LOG_STREAM(INFO) << "Setting catch phis: ");
David Brazdil77a48ae2015-09-15 12:34:04 +0000294 }
295
David Sehr0225f8e2018-01-31 08:52:24 +0000296 CodeItemDataAccessor accessor(handler_method_->DexInstructionData());
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800297 const size_t number_of_vregs = accessor.RegistersSize();
David Srbecky052f8ca2018-04-26 15:42:54 +0100298 CodeInfo code_info(handler_method_header_);
David Brazdil77a48ae2015-09-15 12:34:04 +0000299
Nicolas Geoffray012fc4e2016-01-08 15:58:19 +0000300 // Find stack map of the catch block.
David Srbecky052f8ca2018-04-26 15:42:54 +0100301 StackMap catch_stack_map = code_info.GetCatchStackMapForDexPc(GetHandlerDexPc());
Nicolas Geoffray012fc4e2016-01-08 15:58:19 +0000302 DCHECK(catch_stack_map.IsValid());
David Srbeckyfd89b072018-06-03 12:00:22 +0100303 DexRegisterMap catch_vreg_map = code_info.GetDexRegisterMapOf(catch_stack_map);
304 if (!catch_vreg_map.HasAnyLiveDexRegisters()) {
Nicolas Geoffray012fc4e2016-01-08 15:58:19 +0000305 return;
306 }
David Srbeckyfd89b072018-06-03 12:00:22 +0100307 DCHECK_EQ(catch_vreg_map.size(), number_of_vregs);
Nicolas Geoffray012fc4e2016-01-08 15:58:19 +0000308
David Brazdil77a48ae2015-09-15 12:34:04 +0000309 // Find stack map of the throwing instruction.
310 StackMap throw_stack_map =
David Srbecky052f8ca2018-04-26 15:42:54 +0100311 code_info.GetStackMapForNativePcOffset(stack_visitor->GetNativePcOffset());
David Brazdil77a48ae2015-09-15 12:34:04 +0000312 DCHECK(throw_stack_map.IsValid());
David Srbeckyfd89b072018-06-03 12:00:22 +0100313 DexRegisterMap throw_vreg_map = code_info.GetDexRegisterMapOf(throw_stack_map);
314 DCHECK_EQ(throw_vreg_map.size(), number_of_vregs);
David Brazdil77a48ae2015-09-15 12:34:04 +0000315
316 // Copy values between them.
317 for (uint16_t vreg = 0; vreg < number_of_vregs; ++vreg) {
David Srbeckye1402122018-06-13 18:20:45 +0100318 DexRegisterLocation::Kind catch_location = catch_vreg_map[vreg].GetKind();
David Brazdil77a48ae2015-09-15 12:34:04 +0000319 if (catch_location == DexRegisterLocation::Kind::kNone) {
320 continue;
321 }
322 DCHECK(catch_location == DexRegisterLocation::Kind::kInStack);
323
324 // Get vreg value from its current location.
325 uint32_t vreg_value;
David Srbeckye1402122018-06-13 18:20:45 +0100326 VRegKind vreg_kind = ToVRegKind(throw_vreg_map[vreg].GetKind());
David Brazdil77a48ae2015-09-15 12:34:04 +0000327 bool get_vreg_success = stack_visitor->GetVReg(stack_visitor->GetMethod(),
328 vreg,
329 vreg_kind,
330 &vreg_value);
331 CHECK(get_vreg_success) << "VReg " << vreg << " was optimized out ("
David Sehr709b0702016-10-13 09:12:37 -0700332 << "method=" << ArtMethod::PrettyMethod(stack_visitor->GetMethod())
333 << ", dex_pc=" << stack_visitor->GetDexPc() << ", "
David Brazdil77a48ae2015-09-15 12:34:04 +0000334 << "native_pc_offset=" << stack_visitor->GetNativePcOffset() << ")";
335
336 // Copy value to the catch phi's stack slot.
David Srbeckye1402122018-06-13 18:20:45 +0100337 int32_t slot_offset = catch_vreg_map[vreg].GetStackOffsetInBytes();
David Brazdil77a48ae2015-09-15 12:34:04 +0000338 ArtMethod** frame_top = stack_visitor->GetCurrentQuickFrame();
339 uint8_t* slot_address = reinterpret_cast<uint8_t*>(frame_top) + slot_offset;
340 uint32_t* slot_ptr = reinterpret_cast<uint32_t*>(slot_address);
341 *slot_ptr = vreg_value;
342 }
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200343}
344
Ian Rogers5cf98192014-05-29 21:31:50 -0700345// Prepares deoptimization.
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100346class DeoptimizeStackVisitor final : public StackVisitor {
Ian Rogers5cf98192014-05-29 21:31:50 -0700347 public:
Andreas Gampe639bdd12015-06-03 11:22:45 -0700348 DeoptimizeStackVisitor(Thread* self,
349 Context* context,
350 QuickExceptionHandler* exception_handler,
351 bool single_frame)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700352 REQUIRES_SHARED(Locks::mutator_lock_)
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100353 : StackVisitor(self, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100354 exception_handler_(exception_handler),
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700355 prev_shadow_frame_(nullptr),
Andreas Gampe639bdd12015-06-03 11:22:45 -0700356 stacked_shadow_frame_pushed_(false),
357 single_frame_deopt_(single_frame),
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100358 single_frame_done_(false),
Nicolas Geoffrayb52de242016-02-19 12:43:12 +0000359 single_frame_deopt_method_(nullptr),
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700360 single_frame_deopt_quick_method_header_(nullptr),
361 callee_method_(nullptr) {
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100362 }
363
364 ArtMethod* GetSingleFrameDeoptMethod() const {
365 return single_frame_deopt_method_;
Ian Rogers5cf98192014-05-29 21:31:50 -0700366 }
367
Nicolas Geoffrayb52de242016-02-19 12:43:12 +0000368 const OatQuickMethodHeader* GetSingleFrameDeoptQuickMethodHeader() const {
369 return single_frame_deopt_quick_method_header_;
370 }
371
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700372 void FinishStackWalk() REQUIRES_SHARED(Locks::mutator_lock_) {
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700373 // This is the upcall, or the next full frame in single-frame deopt, or the
374 // code isn't deoptimizeable. We remember the frame and last pc so that we
375 // may long jump to them.
376 exception_handler_->SetHandlerQuickFramePc(GetCurrentQuickFramePc());
377 exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
378 exception_handler_->SetHandlerMethodHeader(GetCurrentOatQuickMethodHeader());
379 if (!stacked_shadow_frame_pushed_) {
380 // In case there is no deoptimized shadow frame for this upcall, we still
381 // need to push a nullptr to the stack since there is always a matching pop after
382 // the long jump.
383 GetThread()->PushStackedShadowFrame(nullptr,
384 StackedShadowFrameType::kDeoptimizationShadowFrame);
385 stacked_shadow_frame_pushed_ = true;
386 }
387 if (GetMethod() == nullptr) {
388 exception_handler_->SetFullFragmentDone(true);
389 } else {
David Sehr709b0702016-10-13 09:12:37 -0700390 CHECK(callee_method_ != nullptr) << GetMethod()->PrettyMethod(false);
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700391 exception_handler_->SetHandlerQuickArg0(reinterpret_cast<uintptr_t>(callee_method_));
392 }
393 }
394
Roland Levillainbbc6e7e2018-08-24 16:58:47 +0100395 bool VisitFrame() override REQUIRES_SHARED(Locks::mutator_lock_) {
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -0700396 exception_handler_->SetHandlerFrameDepth(GetFrameDepth());
Mathieu Chartiere401d142015-04-22 13:56:20 -0700397 ArtMethod* method = GetMethod();
Alex Light0aa7a5a2018-10-10 15:58:14 +0000398 VLOG(deopt) << "Deoptimizing stack: depth: " << GetFrameDepth()
399 << " at method " << ArtMethod::PrettyMethod(method);
Andreas Gampe639bdd12015-06-03 11:22:45 -0700400 if (method == nullptr || single_frame_done_) {
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700401 FinishStackWalk();
Ian Rogers5cf98192014-05-29 21:31:50 -0700402 return false; // End stack walk.
403 } else if (method->IsRuntimeMethod()) {
404 // Ignore callee save method.
405 DCHECK(method->IsCalleeSaveMethod());
406 return true;
Sebastien Hertz520633b2015-09-08 17:03:36 +0200407 } else if (method->IsNative()) {
408 // If we return from JNI with a pending exception and want to deoptimize, we need to skip
409 // the native method.
410 // The top method is a runtime method, the native method comes next.
411 CHECK_EQ(GetFrameDepth(), 1U);
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700412 callee_method_ = method;
Sebastien Hertz520633b2015-09-08 17:03:36 +0200413 return true;
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700414 } else if (!single_frame_deopt_ &&
Nicolas Geoffray433b79a2017-01-30 20:54:45 +0000415 !Runtime::Current()->IsAsyncDeoptimizeable(GetCurrentQuickFramePc())) {
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700416 // We hit some code that's not deoptimizeable. However, Single-frame deoptimization triggered
417 // from compiled code is always allowed since HDeoptimize always saves the full environment.
Nicolas Geoffray433b79a2017-01-30 20:54:45 +0000418 LOG(WARNING) << "Got request to deoptimize un-deoptimizable method "
419 << method->PrettyMethod();
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700420 FinishStackWalk();
421 return false; // End stack walk.
Ian Rogers5cf98192014-05-29 21:31:50 -0700422 } else {
Nicolas Geoffray33856502015-10-20 15:52:58 +0100423 // Check if a shadow frame already exists for debugger's set-local-value purpose.
424 const size_t frame_id = GetFrameId();
425 ShadowFrame* new_frame = GetThread()->FindDebuggerShadowFrame(frame_id);
426 const bool* updated_vregs;
David Sehr0225f8e2018-01-31 08:52:24 +0000427 CodeItemDataAccessor accessor(method->DexInstructionData());
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800428 const size_t num_regs = accessor.RegistersSize();
Nicolas Geoffray33856502015-10-20 15:52:58 +0100429 if (new_frame == nullptr) {
430 new_frame = ShadowFrame::CreateDeoptimizedFrame(num_regs, nullptr, method, GetDexPc());
431 updated_vregs = nullptr;
432 } else {
433 updated_vregs = GetThread()->GetUpdatedVRegFlags(frame_id);
434 DCHECK(updated_vregs != nullptr);
435 }
Andreas Gampebf9611f2016-03-25 16:58:00 -0700436 HandleOptimizingDeoptimization(method, new_frame, updated_vregs);
Nicolas Geoffray33856502015-10-20 15:52:58 +0100437 if (updated_vregs != nullptr) {
438 // Calling Thread::RemoveDebuggerShadowFrameMapping will also delete the updated_vregs
439 // array so this must come after we processed the frame.
440 GetThread()->RemoveDebuggerShadowFrameMapping(frame_id);
441 DCHECK(GetThread()->FindDebuggerShadowFrame(frame_id) == nullptr);
442 }
443 if (prev_shadow_frame_ != nullptr) {
444 prev_shadow_frame_->SetLink(new_frame);
445 } else {
446 // Will be popped after the long jump after DeoptimizeStack(),
447 // right before interpreter::EnterInterpreterFromDeoptimize().
448 stacked_shadow_frame_pushed_ = true;
449 GetThread()->PushStackedShadowFrame(
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700450 new_frame, StackedShadowFrameType::kDeoptimizationShadowFrame);
Nicolas Geoffray33856502015-10-20 15:52:58 +0100451 }
452 prev_shadow_frame_ = new_frame;
453
Andreas Gampe639bdd12015-06-03 11:22:45 -0700454 if (single_frame_deopt_ && !IsInInlinedFrame()) {
455 // Single-frame deopt ends at the first non-inlined frame and needs to store that method.
Andreas Gampe639bdd12015-06-03 11:22:45 -0700456 single_frame_done_ = true;
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100457 single_frame_deopt_method_ = method;
Nicolas Geoffrayb52de242016-02-19 12:43:12 +0000458 single_frame_deopt_quick_method_header_ = GetCurrentOatQuickMethodHeader();
Andreas Gampe639bdd12015-06-03 11:22:45 -0700459 }
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700460 callee_method_ = method;
Andreas Gampe639bdd12015-06-03 11:22:45 -0700461 return true;
Ian Rogers5cf98192014-05-29 21:31:50 -0700462 }
463 }
464
465 private:
Nicolas Geoffray33856502015-10-20 15:52:58 +0100466 void HandleOptimizingDeoptimization(ArtMethod* m,
467 ShadowFrame* new_frame,
468 const bool* updated_vregs)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700469 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray33856502015-10-20 15:52:58 +0100470 const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
David Srbecky052f8ca2018-04-26 15:42:54 +0100471 CodeInfo code_info(method_header);
Nicolas Geoffray33856502015-10-20 15:52:58 +0100472 uintptr_t native_pc_offset = method_header->NativeQuickPcOffset(GetCurrentQuickFramePc());
David Srbecky052f8ca2018-04-26 15:42:54 +0100473 StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset);
David Sehr0225f8e2018-01-31 08:52:24 +0000474 CodeItemDataAccessor accessor(m->DexInstructionData());
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800475 const size_t number_of_vregs = accessor.RegistersSize();
David Srbecky052f8ca2018-04-26 15:42:54 +0100476 uint32_t register_mask = code_info.GetRegisterMaskOf(stack_map);
477 BitMemoryRegion stack_mask = code_info.GetStackMaskOf(stack_map);
David Brazdilefc3f022015-10-28 12:19:06 -0500478 DexRegisterMap vreg_map = IsInInlinedFrame()
David Srbecky93bd3612018-07-02 19:30:18 +0100479 ? code_info.GetInlineDexRegisterMapOf(stack_map, GetCurrentInlinedFrame())
David Srbeckyfd89b072018-06-03 12:00:22 +0100480 : code_info.GetDexRegisterMapOf(stack_map);
481 if (vreg_map.empty()) {
Nicolas Geoffray012fc4e2016-01-08 15:58:19 +0000482 return;
483 }
David Srbeckyfd89b072018-06-03 12:00:22 +0100484 DCHECK_EQ(vreg_map.size(), number_of_vregs);
Nicolas Geoffray012fc4e2016-01-08 15:58:19 +0000485
Nicolas Geoffray33856502015-10-20 15:52:58 +0100486 for (uint16_t vreg = 0; vreg < number_of_vregs; ++vreg) {
487 if (updated_vregs != nullptr && updated_vregs[vreg]) {
488 // Keep the value set by debugger.
489 continue;
490 }
491
David Srbeckye1402122018-06-13 18:20:45 +0100492 DexRegisterLocation::Kind location = vreg_map[vreg].GetKind();
Nicolas Geoffray33856502015-10-20 15:52:58 +0100493 static constexpr uint32_t kDeadValue = 0xEBADDE09;
494 uint32_t value = kDeadValue;
495 bool is_reference = false;
496
497 switch (location) {
498 case DexRegisterLocation::Kind::kInStack: {
David Srbeckye1402122018-06-13 18:20:45 +0100499 const int32_t offset = vreg_map[vreg].GetStackOffsetInBytes();
Nicolas Geoffray33856502015-10-20 15:52:58 +0100500 const uint8_t* addr = reinterpret_cast<const uint8_t*>(GetCurrentQuickFrame()) + offset;
501 value = *reinterpret_cast<const uint32_t*>(addr);
502 uint32_t bit = (offset >> 2);
David Srbecky4b59d102018-05-29 21:46:10 +0000503 if (bit < stack_mask.size_in_bits() && stack_mask.LoadBit(bit)) {
Nicolas Geoffray33856502015-10-20 15:52:58 +0100504 is_reference = true;
505 }
506 break;
507 }
508 case DexRegisterLocation::Kind::kInRegister:
509 case DexRegisterLocation::Kind::kInRegisterHigh:
510 case DexRegisterLocation::Kind::kInFpuRegister:
511 case DexRegisterLocation::Kind::kInFpuRegisterHigh: {
David Srbeckye1402122018-06-13 18:20:45 +0100512 uint32_t reg = vreg_map[vreg].GetMachineRegister();
Nicolas Geoffray33856502015-10-20 15:52:58 +0100513 bool result = GetRegisterIfAccessible(reg, ToVRegKind(location), &value);
514 CHECK(result);
515 if (location == DexRegisterLocation::Kind::kInRegister) {
516 if (((1u << reg) & register_mask) != 0) {
517 is_reference = true;
518 }
519 }
520 break;
521 }
522 case DexRegisterLocation::Kind::kConstant: {
David Srbeckye1402122018-06-13 18:20:45 +0100523 value = vreg_map[vreg].GetConstant();
Nicolas Geoffray33856502015-10-20 15:52:58 +0100524 if (value == 0) {
525 // Make it a reference for extra safety.
526 is_reference = true;
527 }
528 break;
529 }
530 case DexRegisterLocation::Kind::kNone: {
531 break;
532 }
533 default: {
David Srbeckye1402122018-06-13 18:20:45 +0100534 LOG(FATAL) << "Unexpected location kind " << vreg_map[vreg].GetKind();
Nicolas Geoffray33856502015-10-20 15:52:58 +0100535 UNREACHABLE();
536 }
537 }
538 if (is_reference) {
539 new_frame->SetVRegReference(vreg, reinterpret_cast<mirror::Object*>(value));
540 } else {
541 new_frame->SetVReg(vreg, value);
542 }
543 }
544 }
545
Sebastien Hertzc901dd72014-07-16 11:56:07 +0200546 static VRegKind GetVRegKind(uint16_t reg, const std::vector<int32_t>& kinds) {
Vladimir Marko35d5b8a2018-07-03 09:18:32 +0100547 return static_cast<VRegKind>(kinds[reg * 2]);
Sebastien Hertzc901dd72014-07-16 11:56:07 +0200548 }
549
Ian Rogers5cf98192014-05-29 21:31:50 -0700550 QuickExceptionHandler* const exception_handler_;
551 ShadowFrame* prev_shadow_frame_;
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700552 bool stacked_shadow_frame_pushed_;
Andreas Gampe639bdd12015-06-03 11:22:45 -0700553 const bool single_frame_deopt_;
554 bool single_frame_done_;
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100555 ArtMethod* single_frame_deopt_method_;
Nicolas Geoffrayb52de242016-02-19 12:43:12 +0000556 const OatQuickMethodHeader* single_frame_deopt_quick_method_header_;
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700557 ArtMethod* callee_method_;
Ian Rogers5cf98192014-05-29 21:31:50 -0700558
559 DISALLOW_COPY_AND_ASSIGN(DeoptimizeStackVisitor);
560};
561
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700562void QuickExceptionHandler::PrepareForLongJumpToInvokeStubOrInterpreterBridge() {
563 if (full_fragment_done_) {
564 // Restore deoptimization exception. When returning from the invoke stub,
565 // ArtMethod::Invoke() will see the special exception to know deoptimization
566 // is needed.
567 self_->SetException(Thread::GetDeoptimizationException());
568 } else {
569 // PC needs to be of the quick-to-interpreter bridge.
570 int32_t offset;
Andreas Gampe542451c2016-07-26 09:02:02 -0700571 offset = GetThreadOffset<kRuntimePointerSize>(kQuickQuickToInterpreterBridge).Int32Value();
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700572 handler_quick_frame_pc_ = *reinterpret_cast<uintptr_t*>(
573 reinterpret_cast<uint8_t*>(self_) + offset);
574 }
575}
576
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200577void QuickExceptionHandler::DeoptimizeStack() {
578 DCHECK(is_deoptimization_);
Ian Rogers5cf98192014-05-29 21:31:50 -0700579 if (kDebugExceptionDelivery) {
Andreas Gampe3fec9ac2016-09-13 10:47:28 -0700580 self_->DumpStack(LOG_STREAM(INFO) << "Deoptimizing: ");
Ian Rogers5cf98192014-05-29 21:31:50 -0700581 }
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200582
Andreas Gampe639bdd12015-06-03 11:22:45 -0700583 DeoptimizeStackVisitor visitor(self_, context_, this, false);
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200584 visitor.WalkStack(true);
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700585 PrepareForLongJumpToInvokeStubOrInterpreterBridge();
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100586}
587
Nicolas Geoffray4e92c3c2017-05-08 09:34:26 +0100588void QuickExceptionHandler::DeoptimizeSingleFrame(DeoptimizationKind kind) {
Andreas Gampe639bdd12015-06-03 11:22:45 -0700589 DCHECK(is_deoptimization_);
590
Andreas Gampe639bdd12015-06-03 11:22:45 -0700591 DeoptimizeStackVisitor visitor(self_, context_, this, true);
592 visitor.WalkStack(true);
593
Nicolas Geoffrayb88d59e2016-02-17 11:31:49 +0000594 // Compiled code made an explicit deoptimization.
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100595 ArtMethod* deopt_method = visitor.GetSingleFrameDeoptMethod();
Nicolas Geoffray62e7c092019-01-08 09:43:01 +0000596 SCOPED_TRACE << "Deoptimizing "
597 << deopt_method->PrettyMethod()
598 << ": " << GetDeoptimizationKindName(kind);
599
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100600 DCHECK(deopt_method != nullptr);
Nicolas Geoffray646d6382017-08-09 10:50:00 +0100601 if (VLOG_IS_ON(deopt) || kDebugExceptionDelivery) {
602 LOG(INFO) << "Single-frame deopting: "
603 << deopt_method->PrettyMethod()
604 << " due to "
605 << GetDeoptimizationKindName(kind);
Andreas Gampe98ea9d92018-10-19 14:06:15 -0700606 DumpFramesWithType(self_, /* details= */ true);
Nicolas Geoffray646d6382017-08-09 10:50:00 +0100607 }
Calin Juravleffc87072016-04-20 14:22:09 +0100608 if (Runtime::Current()->UseJitCompilation()) {
Nicolas Geoffrayb88d59e2016-02-17 11:31:49 +0000609 Runtime::Current()->GetJit()->GetCodeCache()->InvalidateCompiledCodeFor(
Nicolas Geoffrayb52de242016-02-19 12:43:12 +0000610 deopt_method, visitor.GetSingleFrameDeoptQuickMethodHeader());
Nicolas Geoffrayb88d59e2016-02-17 11:31:49 +0000611 } else {
612 // Transfer the code to interpreter.
613 Runtime::Current()->GetInstrumentation()->UpdateMethodsCode(
614 deopt_method, GetQuickToInterpreterBridge());
615 }
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100616
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700617 PrepareForLongJumpToInvokeStubOrInterpreterBridge();
Andreas Gampe639bdd12015-06-03 11:22:45 -0700618}
619
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700620void QuickExceptionHandler::DeoptimizePartialFragmentFixup(uintptr_t return_pc) {
621 // At this point, the instrumentation stack has been updated. We need to install
622 // the real return pc on stack, in case instrumentation stub is stored there,
623 // so that the interpreter bridge code can return to the right place.
624 if (return_pc != 0) {
625 uintptr_t* pc_addr = reinterpret_cast<uintptr_t*>(handler_quick_frame_);
626 CHECK(pc_addr != nullptr);
627 pc_addr--;
628 *reinterpret_cast<uintptr_t*>(pc_addr) = return_pc;
629 }
Andreas Gampe639bdd12015-06-03 11:22:45 -0700630
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700631 // Architecture-dependent work. This is to get the LR right for x86 and x86-64.
Andreas Gampe639bdd12015-06-03 11:22:45 -0700632 if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
633 // On x86, the return address is on the stack, so just reuse it. Otherwise we would have to
634 // change how longjump works.
635 handler_quick_frame_ = reinterpret_cast<ArtMethod**>(
636 reinterpret_cast<uintptr_t>(handler_quick_frame_) - sizeof(void*));
637 }
638}
639
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700640uintptr_t QuickExceptionHandler::UpdateInstrumentationStack() {
Alex Light2c8206f2018-06-08 14:51:09 -0700641 DCHECK(is_deoptimization_) << "Non-deoptimization handlers should use FindCatch";
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700642 uintptr_t return_pc = 0;
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100643 if (method_tracing_active_) {
Andreas Gampec7d878d2018-11-19 18:42:06 +0000644 size_t instrumentation_frames_to_pop =
645 GetInstrumentationFramesToPop(self_, handler_frame_depth_);
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100646 instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
Alex Light2c8206f2018-06-08 14:51:09 -0700647 return_pc = instrumentation->PopFramesForDeoptimization(self_, instrumentation_frames_to_pop);
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100648 }
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700649 return return_pc;
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100650}
651
Andreas Gampe639bdd12015-06-03 11:22:45 -0700652void QuickExceptionHandler::DoLongJump(bool smash_caller_saves) {
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100653 // Place context back on thread so it will be available when we continue.
654 self_->ReleaseLongJumpContext(context_);
655 context_->SetSP(reinterpret_cast<uintptr_t>(handler_quick_frame_));
656 CHECK_NE(handler_quick_frame_pc_, 0u);
657 context_->SetPC(handler_quick_frame_pc_);
Andreas Gampe639bdd12015-06-03 11:22:45 -0700658 context_->SetArg0(handler_quick_arg0_);
659 if (smash_caller_saves) {
660 context_->SmashCallerSaves();
661 }
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100662 context_->DoLongJump();
Andreas Gampe794ad762015-02-23 08:12:24 -0800663 UNREACHABLE();
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100664}
665
Andreas Gampe639bdd12015-06-03 11:22:45 -0700666void QuickExceptionHandler::DumpFramesWithType(Thread* self, bool details) {
Andreas Gampec7d878d2018-11-19 18:42:06 +0000667 StackVisitor::WalkStack(
668 [&](const art::StackVisitor* stack_visitor) REQUIRES_SHARED(Locks::mutator_lock_) {
669 ArtMethod* method = stack_visitor->GetMethod();
670 if (details) {
671 LOG(INFO) << "|> pc = " << std::hex << stack_visitor->GetCurrentQuickFramePc();
672 LOG(INFO) << "|> addr = " << std::hex
673 << reinterpret_cast<uintptr_t>(stack_visitor->GetCurrentQuickFrame());
674 if (stack_visitor->GetCurrentQuickFrame() != nullptr && method != nullptr) {
675 LOG(INFO) << "|> ret = " << std::hex << stack_visitor->GetReturnPc();
676 }
677 }
678 if (method == nullptr) {
679 // Transition, do go on, we want to unwind over bridges, all the way.
680 if (details) {
681 LOG(INFO) << "N <transition>";
682 }
683 return true;
684 } else if (method->IsRuntimeMethod()) {
685 if (details) {
686 LOG(INFO) << "R " << method->PrettyMethod(true);
687 }
688 return true;
689 } else {
690 bool is_shadow = stack_visitor->GetCurrentShadowFrame() != nullptr;
691 LOG(INFO) << (is_shadow ? "S" : "Q")
692 << ((!is_shadow && stack_visitor->IsInInlinedFrame()) ? "i" : " ")
693 << " "
694 << method->PrettyMethod(true);
695 return true; // Go on.
696 }
697 },
698 self,
699 /* context= */ nullptr,
700 art::StackVisitor::StackWalkKind::kIncludeInlinedFrames);
Andreas Gampe639bdd12015-06-03 11:22:45 -0700701}
702
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100703} // namespace art