blob: 7ba19ab8d607e2fdfdd113162ba3a3e975c92a79 [file] [log] [blame]
Sebastien Hertzd45a1f52014-01-09 14:56:54 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Sebastien Hertzfd3077e2014-04-23 10:32:43 +020017#include "quick_exception_handler.h"
18
Ian Rogerse63db272014-07-15 15:36:11 -070019#include "arch/context.h"
Nicolas Geoffray6bc43742015-10-12 18:11:10 +010020#include "art_code.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070021#include "art_method-inl.h"
Ian Rogers5cf98192014-05-29 21:31:50 -070022#include "dex_instruction.h"
Sebastien Hertzfd3077e2014-04-23 10:32:43 +020023#include "entrypoints/entrypoint_utils.h"
Andreas Gampe639bdd12015-06-03 11:22:45 -070024#include "entrypoints/quick/quick_entrypoints_enum.h"
Ian Rogers6f3dbba2014-10-14 17:41:57 -070025#include "entrypoints/runtime_asm_entrypoints.h"
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070026#include "handle_scope-inl.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -070027#include "mirror/class-inl.h"
28#include "mirror/class_loader.h"
29#include "mirror/throwable.h"
Nicolas Geoffray6bc43742015-10-12 18:11:10 +010030#include "stack_map.h"
Ian Rogers5cf98192014-05-29 21:31:50 -070031#include "verifier/method_verifier.h"
Sebastien Hertzd45a1f52014-01-09 14:56:54 +010032
33namespace art {
34
Ian Rogers5cf98192014-05-29 21:31:50 -070035static constexpr bool kDebugExceptionDelivery = false;
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -070036static constexpr size_t kInvalidFrameDepth = 0xffffffff;
Ian Rogers5cf98192014-05-29 21:31:50 -070037
Sebastien Hertzfd3077e2014-04-23 10:32:43 +020038QuickExceptionHandler::QuickExceptionHandler(Thread* self, bool is_deoptimization)
39 : self_(self), context_(self->GetLongJumpContext()), is_deoptimization_(is_deoptimization),
Sebastien Hertzd45a1f52014-01-09 14:56:54 +010040 method_tracing_active_(is_deoptimization ||
41 Runtime::Current()->GetInstrumentation()->AreExitStubsInstalled()),
Andreas Gampe639bdd12015-06-03 11:22:45 -070042 handler_quick_frame_(nullptr), handler_quick_frame_pc_(0), handler_quick_arg0_(0),
43 handler_method_(nullptr), handler_dex_pc_(0), clear_exception_(false),
44 handler_frame_depth_(kInvalidFrameDepth) {
Sebastien Hertzd45a1f52014-01-09 14:56:54 +010045}
46
Sebastien Hertz520633b2015-09-08 17:03:36 +020047// Finds catch handler.
Ian Rogers5cf98192014-05-29 21:31:50 -070048class CatchBlockStackVisitor FINAL : public StackVisitor {
49 public:
50 CatchBlockStackVisitor(Thread* self, Context* context, Handle<mirror::Throwable>* exception,
51 QuickExceptionHandler* exception_handler)
Mathieu Chartier90443472015-07-16 20:32:27 -070052 SHARED_REQUIRES(Locks::mutator_lock_)
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +010053 : StackVisitor(self, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +010054 exception_(exception),
Ian Rogers5cf98192014-05-29 21:31:50 -070055 exception_handler_(exception_handler) {
56 }
57
Mathieu Chartier90443472015-07-16 20:32:27 -070058 bool VisitFrame() OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -070059 ArtMethod* method = GetMethod();
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -070060 exception_handler_->SetHandlerFrameDepth(GetFrameDepth());
Ian Rogers5cf98192014-05-29 21:31:50 -070061 if (method == nullptr) {
62 // This is the upcall, we remember the frame and last pc so that we may long jump to them.
63 exception_handler_->SetHandlerQuickFramePc(GetCurrentQuickFramePc());
64 exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
65 uint32_t next_dex_pc;
Mathieu Chartiere401d142015-04-22 13:56:20 -070066 ArtMethod* next_art_method;
Ian Rogers5cf98192014-05-29 21:31:50 -070067 bool has_next = GetNextMethodAndDexPc(&next_art_method, &next_dex_pc);
68 // Report the method that did the down call as the handler.
69 exception_handler_->SetHandlerDexPc(next_dex_pc);
70 exception_handler_->SetHandlerMethod(next_art_method);
71 if (!has_next) {
72 // No next method? Check exception handler is set up for the unhandled exception handler
73 // case.
74 DCHECK_EQ(0U, exception_handler_->GetHandlerDexPc());
75 DCHECK(nullptr == exception_handler_->GetHandlerMethod());
76 }
77 return false; // End stack walk.
78 }
79 if (method->IsRuntimeMethod()) {
80 // Ignore callee save method.
81 DCHECK(method->IsCalleeSaveMethod());
82 return true;
83 }
Mathieu Chartiere401d142015-04-22 13:56:20 -070084 return HandleTryItems(method);
Ian Rogers5cf98192014-05-29 21:31:50 -070085 }
86
87 private:
Mathieu Chartiere401d142015-04-22 13:56:20 -070088 bool HandleTryItems(ArtMethod* method)
Mathieu Chartier90443472015-07-16 20:32:27 -070089 SHARED_REQUIRES(Locks::mutator_lock_) {
Ian Rogers5cf98192014-05-29 21:31:50 -070090 uint32_t dex_pc = DexFile::kDexNoIndex;
91 if (!method->IsNative()) {
92 dex_pc = GetDexPc();
93 }
94 if (dex_pc != DexFile::kDexNoIndex) {
95 bool clear_exception = false;
Sebastien Hertz26f72862015-09-15 09:52:07 +020096 StackHandleScope<1> hs(GetThread());
Ian Rogers5cf98192014-05-29 21:31:50 -070097 Handle<mirror::Class> to_find(hs.NewHandle((*exception_)->GetClass()));
Mathieu Chartiere401d142015-04-22 13:56:20 -070098 uint32_t found_dex_pc = method->FindCatchBlock(to_find, dex_pc, &clear_exception);
Ian Rogers5cf98192014-05-29 21:31:50 -070099 exception_handler_->SetClearException(clear_exception);
100 if (found_dex_pc != DexFile::kDexNoIndex) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700101 exception_handler_->SetHandlerMethod(method);
Ian Rogers5cf98192014-05-29 21:31:50 -0700102 exception_handler_->SetHandlerDexPc(found_dex_pc);
David Brazdil72f7b882015-09-15 17:00:52 +0100103 exception_handler_->SetHandlerQuickFramePc(
Nicolas Geoffray6bc43742015-10-12 18:11:10 +0100104 GetCurrentCode().ToNativeQuickPc(found_dex_pc, /* is_catch_handler */ true));
Ian Rogers5cf98192014-05-29 21:31:50 -0700105 exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
106 return false; // End stack walk.
Mingyao Yang99170c62015-07-06 11:10:37 -0700107 } else if (UNLIKELY(GetThread()->HasDebuggerShadowFrames())) {
108 // We are going to unwind this frame. Did we prepare a shadow frame for debugging?
109 size_t frame_id = GetFrameId();
110 ShadowFrame* frame = GetThread()->FindDebuggerShadowFrame(frame_id);
111 if (frame != nullptr) {
112 // We will not execute this shadow frame so we can safely deallocate it.
113 GetThread()->RemoveDebuggerShadowFrameMapping(frame_id);
114 ShadowFrame::DeleteDeoptimizedFrame(frame);
115 }
Ian Rogers5cf98192014-05-29 21:31:50 -0700116 }
117 }
118 return true; // Continue stack walk.
119 }
120
Ian Rogers5cf98192014-05-29 21:31:50 -0700121 // The exception we're looking for the catch block of.
122 Handle<mirror::Throwable>* exception_;
123 // The quick exception handler we're visiting for.
124 QuickExceptionHandler* const exception_handler_;
125
126 DISALLOW_COPY_AND_ASSIGN(CatchBlockStackVisitor);
127};
128
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000129void QuickExceptionHandler::FindCatch(mirror::Throwable* exception) {
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200130 DCHECK(!is_deoptimization_);
Ian Rogers5cf98192014-05-29 21:31:50 -0700131 if (kDebugExceptionDelivery) {
132 mirror::String* msg = exception->GetDetailMessage();
133 std::string str_msg(msg != nullptr ? msg->ToModifiedUtf8() : "");
134 self_->DumpStack(LOG(INFO) << "Delivering exception: " << PrettyTypeOf(exception)
135 << ": " << str_msg << "\n");
136 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700137 StackHandleScope<1> hs(self_);
138 Handle<mirror::Throwable> exception_ref(hs.NewHandle(exception));
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200139
Sebastien Hertz520633b2015-09-08 17:03:36 +0200140 // Walk the stack to find catch handler.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700141 CatchBlockStackVisitor visitor(self_, context_, &exception_ref, this);
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100142 visitor.WalkStack(true);
143
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200144 if (kDebugExceptionDelivery) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700145 if (*handler_quick_frame_ == nullptr) {
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100146 LOG(INFO) << "Handler is upcall";
Ian Rogers5cf98192014-05-29 21:31:50 -0700147 }
148 if (handler_method_ != nullptr) {
149 const DexFile& dex_file = *handler_method_->GetDeclaringClass()->GetDexCache()->GetDexFile();
150 int line_number = dex_file.GetLineNumFromPC(handler_method_, handler_dex_pc_);
151 LOG(INFO) << "Handler: " << PrettyMethod(handler_method_) << " (line: " << line_number << ")";
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100152 }
153 }
154 if (clear_exception_) {
155 // Exception was cleared as part of delivery.
156 DCHECK(!self_->IsExceptionPending());
157 } else {
158 // Put exception back in root set with clear throw location.
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000159 self_->SetException(exception_ref.Get());
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100160 }
David Brazdil77a48ae2015-09-15 12:34:04 +0000161 // If the handler is in optimized code, we need to set the catch environment.
162 if (*handler_quick_frame_ != nullptr &&
163 handler_method_ != nullptr &&
Nicolas Geoffray6bc43742015-10-12 18:11:10 +0100164 ArtCode(handler_quick_frame_).IsOptimized(sizeof(void*))) {
David Brazdil77a48ae2015-09-15 12:34:04 +0000165 SetCatchEnvironmentForOptimizedHandler(&visitor);
166 }
167}
168
169static VRegKind ToVRegKind(DexRegisterLocation::Kind kind) {
170 // Slightly hacky since we cannot map DexRegisterLocationKind and VRegKind
171 // one to one. However, StackVisitor::GetVRegFromOptimizedCode only needs to
172 // distinguish between core/FPU registers and low/high bits on 64-bit.
173 switch (kind) {
174 case DexRegisterLocation::Kind::kConstant:
175 case DexRegisterLocation::Kind::kInStack:
176 // VRegKind is ignored.
177 return VRegKind::kUndefined;
178
179 case DexRegisterLocation::Kind::kInRegister:
180 // Selects core register. For 64-bit registers, selects low 32 bits.
181 return VRegKind::kLongLoVReg;
182
183 case DexRegisterLocation::Kind::kInRegisterHigh:
184 // Selects core register. For 64-bit registers, selects high 32 bits.
185 return VRegKind::kLongHiVReg;
186
187 case DexRegisterLocation::Kind::kInFpuRegister:
188 // Selects FPU register. For 64-bit registers, selects low 32 bits.
189 return VRegKind::kDoubleLoVReg;
190
191 case DexRegisterLocation::Kind::kInFpuRegisterHigh:
192 // Selects FPU register. For 64-bit registers, selects high 32 bits.
193 return VRegKind::kDoubleHiVReg;
194
195 default:
196 LOG(FATAL) << "Unexpected vreg location "
197 << DexRegisterLocation::PrettyDescriptor(kind);
198 UNREACHABLE();
199 }
200}
201
202void QuickExceptionHandler::SetCatchEnvironmentForOptimizedHandler(StackVisitor* stack_visitor) {
203 DCHECK(!is_deoptimization_);
204 DCHECK(*handler_quick_frame_ != nullptr) << "Method should not be called on upcall exceptions";
Nicolas Geoffray6bc43742015-10-12 18:11:10 +0100205 DCHECK(handler_method_ != nullptr && ArtCode(handler_quick_frame_).IsOptimized(sizeof(void*)));
David Brazdil77a48ae2015-09-15 12:34:04 +0000206
207 if (kDebugExceptionDelivery) {
208 self_->DumpStack(LOG(INFO) << "Setting catch phis: ");
209 }
210
211 const size_t number_of_vregs = handler_method_->GetCodeItem()->registers_size_;
Nicolas Geoffray6bc43742015-10-12 18:11:10 +0100212 CodeInfo code_info = ArtCode(handler_quick_frame_).GetOptimizedCodeInfo();
David Brazdil77a48ae2015-09-15 12:34:04 +0000213 StackMapEncoding encoding = code_info.ExtractEncoding();
214
215 // Find stack map of the throwing instruction.
216 StackMap throw_stack_map =
217 code_info.GetStackMapForNativePcOffset(stack_visitor->GetNativePcOffset(), encoding);
218 DCHECK(throw_stack_map.IsValid());
219 DexRegisterMap throw_vreg_map =
220 code_info.GetDexRegisterMapOf(throw_stack_map, encoding, number_of_vregs);
221
222 // Find stack map of the catch block.
223 StackMap catch_stack_map = code_info.GetCatchStackMapForDexPc(GetHandlerDexPc(), encoding);
224 DCHECK(catch_stack_map.IsValid());
225 DexRegisterMap catch_vreg_map =
226 code_info.GetDexRegisterMapOf(catch_stack_map, encoding, number_of_vregs);
227
228 // Copy values between them.
229 for (uint16_t vreg = 0; vreg < number_of_vregs; ++vreg) {
230 DexRegisterLocation::Kind catch_location =
231 catch_vreg_map.GetLocationKind(vreg, number_of_vregs, code_info, encoding);
232 if (catch_location == DexRegisterLocation::Kind::kNone) {
233 continue;
234 }
235 DCHECK(catch_location == DexRegisterLocation::Kind::kInStack);
236
237 // Get vreg value from its current location.
238 uint32_t vreg_value;
239 VRegKind vreg_kind = ToVRegKind(throw_vreg_map.GetLocationKind(vreg,
240 number_of_vregs,
241 code_info,
242 encoding));
243 bool get_vreg_success = stack_visitor->GetVReg(stack_visitor->GetMethod(),
244 vreg,
245 vreg_kind,
246 &vreg_value);
247 CHECK(get_vreg_success) << "VReg " << vreg << " was optimized out ("
248 << "method=" << PrettyMethod(stack_visitor->GetMethod()) << ", "
249 << "dex_pc=" << stack_visitor->GetDexPc() << ", "
250 << "native_pc_offset=" << stack_visitor->GetNativePcOffset() << ")";
251
252 // Copy value to the catch phi's stack slot.
253 int32_t slot_offset = catch_vreg_map.GetStackOffsetInBytes(vreg,
254 number_of_vregs,
255 code_info,
256 encoding);
257 ArtMethod** frame_top = stack_visitor->GetCurrentQuickFrame();
258 uint8_t* slot_address = reinterpret_cast<uint8_t*>(frame_top) + slot_offset;
259 uint32_t* slot_ptr = reinterpret_cast<uint32_t*>(slot_address);
260 *slot_ptr = vreg_value;
261 }
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200262}
263
Ian Rogers5cf98192014-05-29 21:31:50 -0700264// Prepares deoptimization.
265class DeoptimizeStackVisitor FINAL : public StackVisitor {
266 public:
Andreas Gampe639bdd12015-06-03 11:22:45 -0700267 DeoptimizeStackVisitor(Thread* self,
268 Context* context,
269 QuickExceptionHandler* exception_handler,
270 bool single_frame)
Mathieu Chartier90443472015-07-16 20:32:27 -0700271 SHARED_REQUIRES(Locks::mutator_lock_)
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100272 : StackVisitor(self, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100273 exception_handler_(exception_handler),
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700274 prev_shadow_frame_(nullptr),
Andreas Gampe639bdd12015-06-03 11:22:45 -0700275 stacked_shadow_frame_pushed_(false),
276 single_frame_deopt_(single_frame),
277 single_frame_done_(false) {
Ian Rogers5cf98192014-05-29 21:31:50 -0700278 }
279
Mathieu Chartier90443472015-07-16 20:32:27 -0700280 bool VisitFrame() OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -0700281 exception_handler_->SetHandlerFrameDepth(GetFrameDepth());
Mathieu Chartiere401d142015-04-22 13:56:20 -0700282 ArtMethod* method = GetMethod();
Andreas Gampe639bdd12015-06-03 11:22:45 -0700283 if (method == nullptr || single_frame_done_) {
284 // This is the upcall (or the next full frame in single-frame deopt), we remember the frame
285 // and last pc so that we may long jump to them.
Ian Rogers5cf98192014-05-29 21:31:50 -0700286 exception_handler_->SetHandlerQuickFramePc(GetCurrentQuickFramePc());
287 exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700288 if (!stacked_shadow_frame_pushed_) {
289 // In case there is no deoptimized shadow frame for this upcall, we still
290 // need to push a nullptr to the stack since there is always a matching pop after
291 // the long jump.
Sebastien Hertz26f72862015-09-15 09:52:07 +0200292 GetThread()->PushStackedShadowFrame(nullptr,
293 StackedShadowFrameType::kDeoptimizationShadowFrame);
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700294 stacked_shadow_frame_pushed_ = true;
295 }
Ian Rogers5cf98192014-05-29 21:31:50 -0700296 return false; // End stack walk.
297 } else if (method->IsRuntimeMethod()) {
298 // Ignore callee save method.
299 DCHECK(method->IsCalleeSaveMethod());
300 return true;
Sebastien Hertz520633b2015-09-08 17:03:36 +0200301 } else if (method->IsNative()) {
302 // If we return from JNI with a pending exception and want to deoptimize, we need to skip
303 // the native method.
304 // The top method is a runtime method, the native method comes next.
305 CHECK_EQ(GetFrameDepth(), 1U);
306 return true;
Ian Rogers5cf98192014-05-29 21:31:50 -0700307 } else {
Andreas Gampe639bdd12015-06-03 11:22:45 -0700308 HandleDeoptimization(method);
309 if (single_frame_deopt_ && !IsInInlinedFrame()) {
310 // Single-frame deopt ends at the first non-inlined frame and needs to store that method.
311 exception_handler_->SetHandlerQuickArg0(reinterpret_cast<uintptr_t>(method));
312 single_frame_done_ = true;
313 }
314 return true;
Ian Rogers5cf98192014-05-29 21:31:50 -0700315 }
316 }
317
318 private:
Sebastien Hertzc901dd72014-07-16 11:56:07 +0200319 static VRegKind GetVRegKind(uint16_t reg, const std::vector<int32_t>& kinds) {
320 return static_cast<VRegKind>(kinds.at(reg * 2));
321 }
322
Andreas Gampe639bdd12015-06-03 11:22:45 -0700323 void HandleDeoptimization(ArtMethod* m) SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700324 const DexFile::CodeItem* code_item = m->GetCodeItem();
Sebastien Hertz520633b2015-09-08 17:03:36 +0200325 CHECK(code_item != nullptr) << "No code item for " << PrettyMethod(m);
Ian Rogers5cf98192014-05-29 21:31:50 -0700326 uint16_t num_regs = code_item->registers_size_;
327 uint32_t dex_pc = GetDexPc();
Sebastien Hertz26f72862015-09-15 09:52:07 +0200328 StackHandleScope<2> hs(GetThread()); // Dex cache, class loader and method.
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700329 mirror::Class* declaring_class = m->GetDeclaringClass();
330 Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(declaring_class->GetDexCache()));
331 Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(declaring_class->GetClassLoader()));
Sebastien Hertz26f72862015-09-15 09:52:07 +0200332 verifier::MethodVerifier verifier(GetThread(), h_dex_cache->GetDexFile(), h_dex_cache,
333 h_class_loader, &m->GetClassDef(), code_item,
334 m->GetDexMethodIndex(), m, m->GetAccessFlags(), true, true,
335 true, true);
Andreas Gampe2e04bb22015-02-10 15:37:27 -0800336 bool verifier_success = verifier.Verify();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700337 CHECK(verifier_success) << PrettyMethod(m);
Mingyao Yang99170c62015-07-06 11:10:37 -0700338 // Check if a shadow frame already exists for debugger's set-local-value purpose.
339 const size_t frame_id = GetFrameId();
340 ShadowFrame* new_frame = GetThread()->FindDebuggerShadowFrame(frame_id);
341 const bool* updated_vregs;
342 if (new_frame == nullptr) {
343 new_frame = ShadowFrame::CreateDeoptimizedFrame(num_regs, nullptr, m, dex_pc);
344 updated_vregs = nullptr;
345 } else {
346 updated_vregs = GetThread()->GetUpdatedVRegFlags(frame_id);
347 DCHECK(updated_vregs != nullptr);
348 }
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700349 {
Sebastien Hertz26f72862015-09-15 09:52:07 +0200350 ScopedStackedShadowFramePusher pusher(GetThread(), new_frame,
Sebastien Hertzf7958692015-06-09 14:09:14 +0200351 StackedShadowFrameType::kShadowFrameUnderConstruction);
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700352 const std::vector<int32_t> kinds(verifier.DescribeVRegs(dex_pc));
Nicolas Geoffray15b9d522015-03-12 15:05:13 +0000353
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700354 // Markers for dead values, used when the verifier knows a Dex register is undefined,
355 // or when the compiler knows the register has not been initialized, or is not used
356 // anymore in the method.
357 static constexpr uint32_t kDeadValue = 0xEBADDE09;
358 static constexpr uint64_t kLongDeadValue = 0xEBADDE09EBADDE09;
359 for (uint16_t reg = 0; reg < num_regs; ++reg) {
Mingyao Yang99170c62015-07-06 11:10:37 -0700360 if (updated_vregs != nullptr && updated_vregs[reg]) {
361 // Keep the value set by debugger.
362 continue;
363 }
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700364 VRegKind kind = GetVRegKind(reg, kinds);
365 switch (kind) {
366 case kUndefined:
Nicolas Geoffray15b9d522015-03-12 15:05:13 +0000367 new_frame->SetVReg(reg, kDeadValue);
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700368 break;
369 case kConstant:
370 new_frame->SetVReg(reg, kinds.at((reg * 2) + 1));
371 break;
372 case kReferenceVReg: {
373 uint32_t value = 0;
374 // Check IsReferenceVReg in case the compiled GC map doesn't agree with the verifier.
375 // We don't want to copy a stale reference into the shadow frame as a reference.
376 // b/20736048
377 if (GetVReg(m, reg, kind, &value) && IsReferenceVReg(m, reg)) {
378 new_frame->SetVRegReference(reg, reinterpret_cast<mirror::Object*>(value));
379 } else {
380 new_frame->SetVReg(reg, kDeadValue);
381 }
382 break;
Nicolas Geoffray15b9d522015-03-12 15:05:13 +0000383 }
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700384 case kLongLoVReg:
385 if (GetVRegKind(reg + 1, kinds) == kLongHiVReg) {
386 // Treat it as a "long" register pair.
387 uint64_t value = 0;
388 if (GetVRegPair(m, reg, kLongLoVReg, kLongHiVReg, &value)) {
389 new_frame->SetVRegLong(reg, value);
390 } else {
391 new_frame->SetVRegLong(reg, kLongDeadValue);
392 }
393 } else {
394 uint32_t value = 0;
395 if (GetVReg(m, reg, kind, &value)) {
396 new_frame->SetVReg(reg, value);
397 } else {
398 new_frame->SetVReg(reg, kDeadValue);
399 }
400 }
401 break;
402 case kLongHiVReg:
403 if (GetVRegKind(reg - 1, kinds) == kLongLoVReg) {
404 // Nothing to do: we treated it as a "long" register pair.
405 } else {
406 uint32_t value = 0;
407 if (GetVReg(m, reg, kind, &value)) {
408 new_frame->SetVReg(reg, value);
409 } else {
410 new_frame->SetVReg(reg, kDeadValue);
411 }
412 }
413 break;
414 case kDoubleLoVReg:
415 if (GetVRegKind(reg + 1, kinds) == kDoubleHiVReg) {
416 uint64_t value = 0;
417 if (GetVRegPair(m, reg, kDoubleLoVReg, kDoubleHiVReg, &value)) {
418 // Treat it as a "double" register pair.
419 new_frame->SetVRegLong(reg, value);
420 } else {
421 new_frame->SetVRegLong(reg, kLongDeadValue);
422 }
423 } else {
424 uint32_t value = 0;
425 if (GetVReg(m, reg, kind, &value)) {
426 new_frame->SetVReg(reg, value);
427 } else {
428 new_frame->SetVReg(reg, kDeadValue);
429 }
430 }
431 break;
432 case kDoubleHiVReg:
433 if (GetVRegKind(reg - 1, kinds) == kDoubleLoVReg) {
434 // Nothing to do: we treated it as a "double" register pair.
435 } else {
436 uint32_t value = 0;
437 if (GetVReg(m, reg, kind, &value)) {
438 new_frame->SetVReg(reg, value);
439 } else {
440 new_frame->SetVReg(reg, kDeadValue);
441 }
442 }
443 break;
444 default:
445 uint32_t value = 0;
446 if (GetVReg(m, reg, kind, &value)) {
447 new_frame->SetVReg(reg, value);
448 } else {
449 new_frame->SetVReg(reg, kDeadValue);
450 }
451 break;
Nicolas Geoffray15b9d522015-03-12 15:05:13 +0000452 }
Ian Rogers5cf98192014-05-29 21:31:50 -0700453 }
454 }
Mingyao Yang99170c62015-07-06 11:10:37 -0700455 if (updated_vregs != nullptr) {
456 // Calling Thread::RemoveDebuggerShadowFrameMapping will also delete the updated_vregs
457 // array so this must come after we processed the frame.
458 GetThread()->RemoveDebuggerShadowFrameMapping(frame_id);
459 DCHECK(GetThread()->FindDebuggerShadowFrame(frame_id) == nullptr);
460 }
Ian Rogers5cf98192014-05-29 21:31:50 -0700461 if (prev_shadow_frame_ != nullptr) {
462 prev_shadow_frame_->SetLink(new_frame);
463 } else {
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700464 // Will be popped after the long jump after DeoptimizeStack(),
465 // right before interpreter::EnterInterpreterFromDeoptimize().
466 stacked_shadow_frame_pushed_ = true;
Andreas Gampe639bdd12015-06-03 11:22:45 -0700467 GetThread()->PushStackedShadowFrame(
468 new_frame,
469 single_frame_deopt_
470 ? StackedShadowFrameType::kSingleFrameDeoptimizationShadowFrame
471 : StackedShadowFrameType::kDeoptimizationShadowFrame);
Ian Rogers5cf98192014-05-29 21:31:50 -0700472 }
473 prev_shadow_frame_ = new_frame;
Ian Rogers5cf98192014-05-29 21:31:50 -0700474 }
475
Ian Rogers5cf98192014-05-29 21:31:50 -0700476 QuickExceptionHandler* const exception_handler_;
477 ShadowFrame* prev_shadow_frame_;
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700478 bool stacked_shadow_frame_pushed_;
Andreas Gampe639bdd12015-06-03 11:22:45 -0700479 const bool single_frame_deopt_;
480 bool single_frame_done_;
Ian Rogers5cf98192014-05-29 21:31:50 -0700481
482 DISALLOW_COPY_AND_ASSIGN(DeoptimizeStackVisitor);
483};
484
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200485void QuickExceptionHandler::DeoptimizeStack() {
486 DCHECK(is_deoptimization_);
Ian Rogers5cf98192014-05-29 21:31:50 -0700487 if (kDebugExceptionDelivery) {
488 self_->DumpStack(LOG(INFO) << "Deoptimizing: ");
489 }
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200490
Andreas Gampe639bdd12015-06-03 11:22:45 -0700491 DeoptimizeStackVisitor visitor(self_, context_, this, false);
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200492 visitor.WalkStack(true);
493
494 // Restore deoptimization exception
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000495 self_->SetException(Thread::GetDeoptimizationException());
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100496}
497
Andreas Gampe639bdd12015-06-03 11:22:45 -0700498void QuickExceptionHandler::DeoptimizeSingleFrame() {
499 DCHECK(is_deoptimization_);
500
501 if (VLOG_IS_ON(deopt) || kDebugExceptionDelivery) {
502 LOG(INFO) << "Single-frame deopting:";
503 DumpFramesWithType(self_, true);
504 }
505
506 DeoptimizeStackVisitor visitor(self_, context_, this, true);
507 visitor.WalkStack(true);
508
509 // PC needs to be of the quick-to-interpreter bridge.
510 int32_t offset;
511 #ifdef __LP64__
512 offset = GetThreadOffset<8>(kQuickQuickToInterpreterBridge).Int32Value();
513 #else
514 offset = GetThreadOffset<4>(kQuickQuickToInterpreterBridge).Int32Value();
515 #endif
516 handler_quick_frame_pc_ = *reinterpret_cast<uintptr_t*>(
517 reinterpret_cast<uint8_t*>(self_) + offset);
518}
519
520void QuickExceptionHandler::DeoptimizeSingleFrameArchDependentFixup() {
521 // Architecture-dependent work. This is to get the LR right for x86 and x86-64.
522
523 if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
524 // On x86, the return address is on the stack, so just reuse it. Otherwise we would have to
525 // change how longjump works.
526 handler_quick_frame_ = reinterpret_cast<ArtMethod**>(
527 reinterpret_cast<uintptr_t>(handler_quick_frame_) - sizeof(void*));
528 }
529}
530
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100531// Unwinds all instrumentation stack frame prior to catch handler or upcall.
532class InstrumentationStackVisitor : public StackVisitor {
533 public:
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700534 InstrumentationStackVisitor(Thread* self, size_t frame_depth)
Mathieu Chartier90443472015-07-16 20:32:27 -0700535 SHARED_REQUIRES(Locks::mutator_lock_)
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100536 : StackVisitor(self, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
Ian Rogerscf7f1912014-10-22 22:06:39 -0700537 frame_depth_(frame_depth),
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100538 instrumentation_frames_to_pop_(0) {
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -0700539 CHECK_NE(frame_depth_, kInvalidFrameDepth);
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100540 }
541
Mathieu Chartier90443472015-07-16 20:32:27 -0700542 bool VisitFrame() SHARED_REQUIRES(Locks::mutator_lock_) {
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -0700543 size_t current_frame_depth = GetFrameDepth();
544 if (current_frame_depth < frame_depth_) {
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100545 CHECK(GetMethod() != nullptr);
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700546 if (UNLIKELY(reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc()) == GetReturnPc())) {
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100547 if (!IsInInlinedFrame()) {
548 // We do not count inlined frames, because we do not instrument them. The reason we
549 // include them in the stack walking is the check against `frame_depth_`, which is
550 // given to us by a visitor that visits inlined frames.
551 ++instrumentation_frames_to_pop_;
552 }
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100553 }
554 return true;
555 } else {
556 // We reached the frame of the catch handler or the upcall.
557 return false;
558 }
559 }
560
561 size_t GetInstrumentationFramesToPop() const {
562 return instrumentation_frames_to_pop_;
563 }
564
565 private:
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -0700566 const size_t frame_depth_;
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100567 size_t instrumentation_frames_to_pop_;
568
569 DISALLOW_COPY_AND_ASSIGN(InstrumentationStackVisitor);
570};
571
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200572void QuickExceptionHandler::UpdateInstrumentationStack() {
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100573 if (method_tracing_active_) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700574 InstrumentationStackVisitor visitor(self_, handler_frame_depth_);
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100575 visitor.WalkStack(true);
576
577 size_t instrumentation_frames_to_pop = visitor.GetInstrumentationFramesToPop();
578 instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
579 for (size_t i = 0; i < instrumentation_frames_to_pop; ++i) {
580 instrumentation->PopMethodForUnwind(self_, is_deoptimization_);
581 }
582 }
583}
584
Andreas Gampe639bdd12015-06-03 11:22:45 -0700585void QuickExceptionHandler::DoLongJump(bool smash_caller_saves) {
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100586 // Place context back on thread so it will be available when we continue.
587 self_->ReleaseLongJumpContext(context_);
588 context_->SetSP(reinterpret_cast<uintptr_t>(handler_quick_frame_));
589 CHECK_NE(handler_quick_frame_pc_, 0u);
590 context_->SetPC(handler_quick_frame_pc_);
Andreas Gampe639bdd12015-06-03 11:22:45 -0700591 context_->SetArg0(handler_quick_arg0_);
592 if (smash_caller_saves) {
593 context_->SmashCallerSaves();
594 }
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100595 context_->DoLongJump();
Andreas Gampe794ad762015-02-23 08:12:24 -0800596 UNREACHABLE();
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100597}
598
Andreas Gampe639bdd12015-06-03 11:22:45 -0700599// Prints out methods with their type of frame.
600class DumpFramesWithTypeStackVisitor FINAL : public StackVisitor {
601 public:
602 DumpFramesWithTypeStackVisitor(Thread* self, bool show_details = false)
603 SHARED_REQUIRES(Locks::mutator_lock_)
604 : StackVisitor(self, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
605 show_details_(show_details) {}
606
607 bool VisitFrame() OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
608 ArtMethod* method = GetMethod();
609 if (show_details_) {
610 LOG(INFO) << "|> pc = " << std::hex << GetCurrentQuickFramePc();
611 LOG(INFO) << "|> addr = " << std::hex << reinterpret_cast<uintptr_t>(GetCurrentQuickFrame());
612 if (GetCurrentQuickFrame() != nullptr && method != nullptr) {
613 LOG(INFO) << "|> ret = " << std::hex << GetReturnPc();
614 }
615 }
616 if (method == nullptr) {
617 // Transition, do go on, we want to unwind over bridges, all the way.
618 if (show_details_) {
619 LOG(INFO) << "N <transition>";
620 }
621 return true;
622 } else if (method->IsRuntimeMethod()) {
623 if (show_details_) {
624 LOG(INFO) << "R " << PrettyMethod(method, true);
625 }
626 return true;
627 } else {
628 bool is_shadow = GetCurrentShadowFrame() != nullptr;
629 LOG(INFO) << (is_shadow ? "S" : "Q")
630 << ((!is_shadow && IsInInlinedFrame()) ? "i" : " ")
631 << " "
632 << PrettyMethod(method, true);
633 return true; // Go on.
634 }
635 }
636
637 private:
638 bool show_details_;
639
640 DISALLOW_COPY_AND_ASSIGN(DumpFramesWithTypeStackVisitor);
641};
642
643void QuickExceptionHandler::DumpFramesWithType(Thread* self, bool details) {
644 DumpFramesWithTypeStackVisitor visitor(self, details);
645 visitor.WalkStack(true);
646}
647
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100648} // namespace art