blob: 9e1109ea8587e531572a3c73c582e69938150a86 [file] [log] [blame]
Sebastien Hertzd45a1f52014-01-09 14:56:54 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Sebastien Hertzfd3077e2014-04-23 10:32:43 +020017#include "quick_exception_handler.h"
18
Ian Rogerse63db272014-07-15 15:36:11 -070019#include "arch/context.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method-inl.h"
Andreas Gampe542451c2016-07-26 09:02:02 -070021#include "base/enums.h"
Ian Rogers5cf98192014-05-29 21:31:50 -070022#include "dex_instruction.h"
Sebastien Hertzfd3077e2014-04-23 10:32:43 +020023#include "entrypoints/entrypoint_utils.h"
Andreas Gampe639bdd12015-06-03 11:22:45 -070024#include "entrypoints/quick/quick_entrypoints_enum.h"
Ian Rogers6f3dbba2014-10-14 17:41:57 -070025#include "entrypoints/runtime_asm_entrypoints.h"
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070026#include "handle_scope-inl.h"
Nicolas Geoffrayb88d59e2016-02-17 11:31:49 +000027#include "jit/jit.h"
28#include "jit/jit_code_cache.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -070029#include "mirror/class-inl.h"
30#include "mirror/class_loader.h"
31#include "mirror/throwable.h"
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010032#include "oat_quick_method_header.h"
Nicolas Geoffray6bc43742015-10-12 18:11:10 +010033#include "stack_map.h"
Ian Rogers5cf98192014-05-29 21:31:50 -070034#include "verifier/method_verifier.h"
Sebastien Hertzd45a1f52014-01-09 14:56:54 +010035
36namespace art {
37
Ian Rogers5cf98192014-05-29 21:31:50 -070038static constexpr bool kDebugExceptionDelivery = false;
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -070039static constexpr size_t kInvalidFrameDepth = 0xffffffff;
Ian Rogers5cf98192014-05-29 21:31:50 -070040
Sebastien Hertzfd3077e2014-04-23 10:32:43 +020041QuickExceptionHandler::QuickExceptionHandler(Thread* self, bool is_deoptimization)
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010042 : self_(self),
43 context_(self->GetLongJumpContext()),
44 is_deoptimization_(is_deoptimization),
45 method_tracing_active_(is_deoptimization ||
46 Runtime::Current()->GetInstrumentation()->AreExitStubsInstalled()),
47 handler_quick_frame_(nullptr),
48 handler_quick_frame_pc_(0),
49 handler_method_header_(nullptr),
50 handler_quick_arg0_(0),
51 handler_method_(nullptr),
52 handler_dex_pc_(0),
53 clear_exception_(false),
Mingyao Yangf711f2c2016-05-23 12:29:39 -070054 handler_frame_depth_(kInvalidFrameDepth),
55 full_fragment_done_(false) {}
Sebastien Hertzd45a1f52014-01-09 14:56:54 +010056
Sebastien Hertz520633b2015-09-08 17:03:36 +020057// Finds catch handler.
Ian Rogers5cf98192014-05-29 21:31:50 -070058class CatchBlockStackVisitor FINAL : public StackVisitor {
59 public:
60 CatchBlockStackVisitor(Thread* self, Context* context, Handle<mirror::Throwable>* exception,
61 QuickExceptionHandler* exception_handler)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -070062 REQUIRES_SHARED(Locks::mutator_lock_)
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +010063 : StackVisitor(self, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +010064 exception_(exception),
Ian Rogers5cf98192014-05-29 21:31:50 -070065 exception_handler_(exception_handler) {
66 }
67
Andreas Gampebdf7f1c2016-08-30 16:38:47 -070068 bool VisitFrame() OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -070069 ArtMethod* method = GetMethod();
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -070070 exception_handler_->SetHandlerFrameDepth(GetFrameDepth());
Ian Rogers5cf98192014-05-29 21:31:50 -070071 if (method == nullptr) {
72 // This is the upcall, we remember the frame and last pc so that we may long jump to them.
73 exception_handler_->SetHandlerQuickFramePc(GetCurrentQuickFramePc());
74 exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010075 exception_handler_->SetHandlerMethodHeader(GetCurrentOatQuickMethodHeader());
Ian Rogers5cf98192014-05-29 21:31:50 -070076 uint32_t next_dex_pc;
Mathieu Chartiere401d142015-04-22 13:56:20 -070077 ArtMethod* next_art_method;
Ian Rogers5cf98192014-05-29 21:31:50 -070078 bool has_next = GetNextMethodAndDexPc(&next_art_method, &next_dex_pc);
79 // Report the method that did the down call as the handler.
80 exception_handler_->SetHandlerDexPc(next_dex_pc);
81 exception_handler_->SetHandlerMethod(next_art_method);
82 if (!has_next) {
83 // No next method? Check exception handler is set up for the unhandled exception handler
84 // case.
85 DCHECK_EQ(0U, exception_handler_->GetHandlerDexPc());
86 DCHECK(nullptr == exception_handler_->GetHandlerMethod());
87 }
88 return false; // End stack walk.
89 }
90 if (method->IsRuntimeMethod()) {
91 // Ignore callee save method.
92 DCHECK(method->IsCalleeSaveMethod());
93 return true;
94 }
Mathieu Chartiere401d142015-04-22 13:56:20 -070095 return HandleTryItems(method);
Ian Rogers5cf98192014-05-29 21:31:50 -070096 }
97
98 private:
Mathieu Chartiere401d142015-04-22 13:56:20 -070099 bool HandleTryItems(ArtMethod* method)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700100 REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers5cf98192014-05-29 21:31:50 -0700101 uint32_t dex_pc = DexFile::kDexNoIndex;
102 if (!method->IsNative()) {
103 dex_pc = GetDexPc();
104 }
105 if (dex_pc != DexFile::kDexNoIndex) {
106 bool clear_exception = false;
Sebastien Hertz26f72862015-09-15 09:52:07 +0200107 StackHandleScope<1> hs(GetThread());
Ian Rogers5cf98192014-05-29 21:31:50 -0700108 Handle<mirror::Class> to_find(hs.NewHandle((*exception_)->GetClass()));
Mathieu Chartiere401d142015-04-22 13:56:20 -0700109 uint32_t found_dex_pc = method->FindCatchBlock(to_find, dex_pc, &clear_exception);
Ian Rogers5cf98192014-05-29 21:31:50 -0700110 exception_handler_->SetClearException(clear_exception);
111 if (found_dex_pc != DexFile::kDexNoIndex) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700112 exception_handler_->SetHandlerMethod(method);
Ian Rogers5cf98192014-05-29 21:31:50 -0700113 exception_handler_->SetHandlerDexPc(found_dex_pc);
David Brazdil72f7b882015-09-15 17:00:52 +0100114 exception_handler_->SetHandlerQuickFramePc(
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100115 GetCurrentOatQuickMethodHeader()->ToNativeQuickPc(
116 method, found_dex_pc, /* is_catch_handler */ true));
Ian Rogers5cf98192014-05-29 21:31:50 -0700117 exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100118 exception_handler_->SetHandlerMethodHeader(GetCurrentOatQuickMethodHeader());
Ian Rogers5cf98192014-05-29 21:31:50 -0700119 return false; // End stack walk.
Mingyao Yang99170c62015-07-06 11:10:37 -0700120 } else if (UNLIKELY(GetThread()->HasDebuggerShadowFrames())) {
121 // We are going to unwind this frame. Did we prepare a shadow frame for debugging?
122 size_t frame_id = GetFrameId();
123 ShadowFrame* frame = GetThread()->FindDebuggerShadowFrame(frame_id);
124 if (frame != nullptr) {
125 // We will not execute this shadow frame so we can safely deallocate it.
126 GetThread()->RemoveDebuggerShadowFrameMapping(frame_id);
127 ShadowFrame::DeleteDeoptimizedFrame(frame);
128 }
Ian Rogers5cf98192014-05-29 21:31:50 -0700129 }
130 }
131 return true; // Continue stack walk.
132 }
133
Ian Rogers5cf98192014-05-29 21:31:50 -0700134 // The exception we're looking for the catch block of.
135 Handle<mirror::Throwable>* exception_;
136 // The quick exception handler we're visiting for.
137 QuickExceptionHandler* const exception_handler_;
138
139 DISALLOW_COPY_AND_ASSIGN(CatchBlockStackVisitor);
140};
141
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000142void QuickExceptionHandler::FindCatch(mirror::Throwable* exception) {
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200143 DCHECK(!is_deoptimization_);
Ian Rogers5cf98192014-05-29 21:31:50 -0700144 if (kDebugExceptionDelivery) {
145 mirror::String* msg = exception->GetDetailMessage();
146 std::string str_msg(msg != nullptr ? msg->ToModifiedUtf8() : "");
147 self_->DumpStack(LOG(INFO) << "Delivering exception: " << PrettyTypeOf(exception)
148 << ": " << str_msg << "\n");
149 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700150 StackHandleScope<1> hs(self_);
151 Handle<mirror::Throwable> exception_ref(hs.NewHandle(exception));
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200152
Sebastien Hertz520633b2015-09-08 17:03:36 +0200153 // Walk the stack to find catch handler.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700154 CatchBlockStackVisitor visitor(self_, context_, &exception_ref, this);
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100155 visitor.WalkStack(true);
156
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200157 if (kDebugExceptionDelivery) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700158 if (*handler_quick_frame_ == nullptr) {
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100159 LOG(INFO) << "Handler is upcall";
Ian Rogers5cf98192014-05-29 21:31:50 -0700160 }
161 if (handler_method_ != nullptr) {
162 const DexFile& dex_file = *handler_method_->GetDeclaringClass()->GetDexCache()->GetDexFile();
163 int line_number = dex_file.GetLineNumFromPC(handler_method_, handler_dex_pc_);
164 LOG(INFO) << "Handler: " << PrettyMethod(handler_method_) << " (line: " << line_number << ")";
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100165 }
166 }
167 if (clear_exception_) {
168 // Exception was cleared as part of delivery.
169 DCHECK(!self_->IsExceptionPending());
170 } else {
171 // Put exception back in root set with clear throw location.
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000172 self_->SetException(exception_ref.Get());
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100173 }
David Brazdil77a48ae2015-09-15 12:34:04 +0000174 // If the handler is in optimized code, we need to set the catch environment.
175 if (*handler_quick_frame_ != nullptr &&
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100176 handler_method_header_ != nullptr &&
177 handler_method_header_->IsOptimized()) {
David Brazdil77a48ae2015-09-15 12:34:04 +0000178 SetCatchEnvironmentForOptimizedHandler(&visitor);
179 }
180}
181
182static VRegKind ToVRegKind(DexRegisterLocation::Kind kind) {
183 // Slightly hacky since we cannot map DexRegisterLocationKind and VRegKind
184 // one to one. However, StackVisitor::GetVRegFromOptimizedCode only needs to
185 // distinguish between core/FPU registers and low/high bits on 64-bit.
186 switch (kind) {
187 case DexRegisterLocation::Kind::kConstant:
188 case DexRegisterLocation::Kind::kInStack:
189 // VRegKind is ignored.
190 return VRegKind::kUndefined;
191
192 case DexRegisterLocation::Kind::kInRegister:
193 // Selects core register. For 64-bit registers, selects low 32 bits.
194 return VRegKind::kLongLoVReg;
195
196 case DexRegisterLocation::Kind::kInRegisterHigh:
197 // Selects core register. For 64-bit registers, selects high 32 bits.
198 return VRegKind::kLongHiVReg;
199
200 case DexRegisterLocation::Kind::kInFpuRegister:
201 // Selects FPU register. For 64-bit registers, selects low 32 bits.
202 return VRegKind::kDoubleLoVReg;
203
204 case DexRegisterLocation::Kind::kInFpuRegisterHigh:
205 // Selects FPU register. For 64-bit registers, selects high 32 bits.
206 return VRegKind::kDoubleHiVReg;
207
208 default:
David Srbecky7dc11782016-02-25 13:23:56 +0000209 LOG(FATAL) << "Unexpected vreg location " << kind;
David Brazdil77a48ae2015-09-15 12:34:04 +0000210 UNREACHABLE();
211 }
212}
213
214void QuickExceptionHandler::SetCatchEnvironmentForOptimizedHandler(StackVisitor* stack_visitor) {
215 DCHECK(!is_deoptimization_);
216 DCHECK(*handler_quick_frame_ != nullptr) << "Method should not be called on upcall exceptions";
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100217 DCHECK(handler_method_ != nullptr && handler_method_header_->IsOptimized());
David Brazdil77a48ae2015-09-15 12:34:04 +0000218
219 if (kDebugExceptionDelivery) {
220 self_->DumpStack(LOG(INFO) << "Setting catch phis: ");
221 }
222
223 const size_t number_of_vregs = handler_method_->GetCodeItem()->registers_size_;
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100224 CodeInfo code_info = handler_method_header_->GetOptimizedCodeInfo();
David Srbecky09ed0982016-02-12 21:58:43 +0000225 CodeInfoEncoding encoding = code_info.ExtractEncoding();
David Brazdil77a48ae2015-09-15 12:34:04 +0000226
Nicolas Geoffray012fc4e2016-01-08 15:58:19 +0000227 // Find stack map of the catch block.
228 StackMap catch_stack_map = code_info.GetCatchStackMapForDexPc(GetHandlerDexPc(), encoding);
229 DCHECK(catch_stack_map.IsValid());
230 DexRegisterMap catch_vreg_map =
231 code_info.GetDexRegisterMapOf(catch_stack_map, encoding, number_of_vregs);
232 if (!catch_vreg_map.IsValid()) {
233 return;
234 }
235
David Brazdil77a48ae2015-09-15 12:34:04 +0000236 // Find stack map of the throwing instruction.
237 StackMap throw_stack_map =
238 code_info.GetStackMapForNativePcOffset(stack_visitor->GetNativePcOffset(), encoding);
239 DCHECK(throw_stack_map.IsValid());
240 DexRegisterMap throw_vreg_map =
241 code_info.GetDexRegisterMapOf(throw_stack_map, encoding, number_of_vregs);
Nicolas Geoffray012fc4e2016-01-08 15:58:19 +0000242 DCHECK(throw_vreg_map.IsValid());
David Brazdil77a48ae2015-09-15 12:34:04 +0000243
244 // Copy values between them.
245 for (uint16_t vreg = 0; vreg < number_of_vregs; ++vreg) {
246 DexRegisterLocation::Kind catch_location =
247 catch_vreg_map.GetLocationKind(vreg, number_of_vregs, code_info, encoding);
248 if (catch_location == DexRegisterLocation::Kind::kNone) {
249 continue;
250 }
251 DCHECK(catch_location == DexRegisterLocation::Kind::kInStack);
252
253 // Get vreg value from its current location.
254 uint32_t vreg_value;
255 VRegKind vreg_kind = ToVRegKind(throw_vreg_map.GetLocationKind(vreg,
256 number_of_vregs,
257 code_info,
258 encoding));
259 bool get_vreg_success = stack_visitor->GetVReg(stack_visitor->GetMethod(),
260 vreg,
261 vreg_kind,
262 &vreg_value);
263 CHECK(get_vreg_success) << "VReg " << vreg << " was optimized out ("
264 << "method=" << PrettyMethod(stack_visitor->GetMethod()) << ", "
265 << "dex_pc=" << stack_visitor->GetDexPc() << ", "
266 << "native_pc_offset=" << stack_visitor->GetNativePcOffset() << ")";
267
268 // Copy value to the catch phi's stack slot.
269 int32_t slot_offset = catch_vreg_map.GetStackOffsetInBytes(vreg,
270 number_of_vregs,
271 code_info,
272 encoding);
273 ArtMethod** frame_top = stack_visitor->GetCurrentQuickFrame();
274 uint8_t* slot_address = reinterpret_cast<uint8_t*>(frame_top) + slot_offset;
275 uint32_t* slot_ptr = reinterpret_cast<uint32_t*>(slot_address);
276 *slot_ptr = vreg_value;
277 }
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200278}
279
Ian Rogers5cf98192014-05-29 21:31:50 -0700280// Prepares deoptimization.
281class DeoptimizeStackVisitor FINAL : public StackVisitor {
282 public:
Andreas Gampe639bdd12015-06-03 11:22:45 -0700283 DeoptimizeStackVisitor(Thread* self,
284 Context* context,
285 QuickExceptionHandler* exception_handler,
286 bool single_frame)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700287 REQUIRES_SHARED(Locks::mutator_lock_)
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100288 : StackVisitor(self, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100289 exception_handler_(exception_handler),
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700290 prev_shadow_frame_(nullptr),
Andreas Gampe639bdd12015-06-03 11:22:45 -0700291 stacked_shadow_frame_pushed_(false),
292 single_frame_deopt_(single_frame),
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100293 single_frame_done_(false),
Nicolas Geoffrayb52de242016-02-19 12:43:12 +0000294 single_frame_deopt_method_(nullptr),
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700295 single_frame_deopt_quick_method_header_(nullptr),
296 callee_method_(nullptr) {
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100297 }
298
299 ArtMethod* GetSingleFrameDeoptMethod() const {
300 return single_frame_deopt_method_;
Ian Rogers5cf98192014-05-29 21:31:50 -0700301 }
302
Nicolas Geoffrayb52de242016-02-19 12:43:12 +0000303 const OatQuickMethodHeader* GetSingleFrameDeoptQuickMethodHeader() const {
304 return single_frame_deopt_quick_method_header_;
305 }
306
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700307 void FinishStackWalk() REQUIRES_SHARED(Locks::mutator_lock_) {
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700308 // This is the upcall, or the next full frame in single-frame deopt, or the
309 // code isn't deoptimizeable. We remember the frame and last pc so that we
310 // may long jump to them.
311 exception_handler_->SetHandlerQuickFramePc(GetCurrentQuickFramePc());
312 exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
313 exception_handler_->SetHandlerMethodHeader(GetCurrentOatQuickMethodHeader());
314 if (!stacked_shadow_frame_pushed_) {
315 // In case there is no deoptimized shadow frame for this upcall, we still
316 // need to push a nullptr to the stack since there is always a matching pop after
317 // the long jump.
318 GetThread()->PushStackedShadowFrame(nullptr,
319 StackedShadowFrameType::kDeoptimizationShadowFrame);
320 stacked_shadow_frame_pushed_ = true;
321 }
322 if (GetMethod() == nullptr) {
323 exception_handler_->SetFullFragmentDone(true);
324 } else {
325 CHECK(callee_method_ != nullptr) << art::PrettyMethod(GetMethod(), false);
326 exception_handler_->SetHandlerQuickArg0(reinterpret_cast<uintptr_t>(callee_method_));
327 }
328 }
329
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700330 bool VisitFrame() OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -0700331 exception_handler_->SetHandlerFrameDepth(GetFrameDepth());
Mathieu Chartiere401d142015-04-22 13:56:20 -0700332 ArtMethod* method = GetMethod();
Andreas Gampe639bdd12015-06-03 11:22:45 -0700333 if (method == nullptr || single_frame_done_) {
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700334 FinishStackWalk();
Ian Rogers5cf98192014-05-29 21:31:50 -0700335 return false; // End stack walk.
336 } else if (method->IsRuntimeMethod()) {
337 // Ignore callee save method.
338 DCHECK(method->IsCalleeSaveMethod());
339 return true;
Sebastien Hertz520633b2015-09-08 17:03:36 +0200340 } else if (method->IsNative()) {
341 // If we return from JNI with a pending exception and want to deoptimize, we need to skip
342 // the native method.
343 // The top method is a runtime method, the native method comes next.
344 CHECK_EQ(GetFrameDepth(), 1U);
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700345 callee_method_ = method;
Sebastien Hertz520633b2015-09-08 17:03:36 +0200346 return true;
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700347 } else if (!single_frame_deopt_ &&
348 !Runtime::Current()->IsDeoptimizeable(GetCurrentQuickFramePc())) {
349 // We hit some code that's not deoptimizeable. However, Single-frame deoptimization triggered
350 // from compiled code is always allowed since HDeoptimize always saves the full environment.
351 FinishStackWalk();
352 return false; // End stack walk.
Ian Rogers5cf98192014-05-29 21:31:50 -0700353 } else {
Nicolas Geoffray33856502015-10-20 15:52:58 +0100354 // Check if a shadow frame already exists for debugger's set-local-value purpose.
355 const size_t frame_id = GetFrameId();
356 ShadowFrame* new_frame = GetThread()->FindDebuggerShadowFrame(frame_id);
357 const bool* updated_vregs;
358 const size_t num_regs = method->GetCodeItem()->registers_size_;
359 if (new_frame == nullptr) {
360 new_frame = ShadowFrame::CreateDeoptimizedFrame(num_regs, nullptr, method, GetDexPc());
361 updated_vregs = nullptr;
362 } else {
363 updated_vregs = GetThread()->GetUpdatedVRegFlags(frame_id);
364 DCHECK(updated_vregs != nullptr);
365 }
Andreas Gampebf9611f2016-03-25 16:58:00 -0700366 HandleOptimizingDeoptimization(method, new_frame, updated_vregs);
Nicolas Geoffray33856502015-10-20 15:52:58 +0100367 if (updated_vregs != nullptr) {
368 // Calling Thread::RemoveDebuggerShadowFrameMapping will also delete the updated_vregs
369 // array so this must come after we processed the frame.
370 GetThread()->RemoveDebuggerShadowFrameMapping(frame_id);
371 DCHECK(GetThread()->FindDebuggerShadowFrame(frame_id) == nullptr);
372 }
373 if (prev_shadow_frame_ != nullptr) {
374 prev_shadow_frame_->SetLink(new_frame);
375 } else {
376 // Will be popped after the long jump after DeoptimizeStack(),
377 // right before interpreter::EnterInterpreterFromDeoptimize().
378 stacked_shadow_frame_pushed_ = true;
379 GetThread()->PushStackedShadowFrame(
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700380 new_frame, StackedShadowFrameType::kDeoptimizationShadowFrame);
Nicolas Geoffray33856502015-10-20 15:52:58 +0100381 }
382 prev_shadow_frame_ = new_frame;
383
Andreas Gampe639bdd12015-06-03 11:22:45 -0700384 if (single_frame_deopt_ && !IsInInlinedFrame()) {
385 // Single-frame deopt ends at the first non-inlined frame and needs to store that method.
Andreas Gampe639bdd12015-06-03 11:22:45 -0700386 single_frame_done_ = true;
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100387 single_frame_deopt_method_ = method;
Nicolas Geoffrayb52de242016-02-19 12:43:12 +0000388 single_frame_deopt_quick_method_header_ = GetCurrentOatQuickMethodHeader();
Andreas Gampe639bdd12015-06-03 11:22:45 -0700389 }
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700390 callee_method_ = method;
Andreas Gampe639bdd12015-06-03 11:22:45 -0700391 return true;
Ian Rogers5cf98192014-05-29 21:31:50 -0700392 }
393 }
394
395 private:
Nicolas Geoffray33856502015-10-20 15:52:58 +0100396 void HandleOptimizingDeoptimization(ArtMethod* m,
397 ShadowFrame* new_frame,
398 const bool* updated_vregs)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700399 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray33856502015-10-20 15:52:58 +0100400 const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
401 CodeInfo code_info = method_header->GetOptimizedCodeInfo();
402 uintptr_t native_pc_offset = method_header->NativeQuickPcOffset(GetCurrentQuickFramePc());
David Srbecky09ed0982016-02-12 21:58:43 +0000403 CodeInfoEncoding encoding = code_info.ExtractEncoding();
Nicolas Geoffray33856502015-10-20 15:52:58 +0100404 StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset, encoding);
405 const size_t number_of_vregs = m->GetCodeItem()->registers_size_;
David Srbecky09ed0982016-02-12 21:58:43 +0000406 uint32_t register_mask = stack_map.GetRegisterMask(encoding.stack_map_encoding);
David Brazdilefc3f022015-10-28 12:19:06 -0500407 DexRegisterMap vreg_map = IsInInlinedFrame()
408 ? code_info.GetDexRegisterMapAtDepth(GetCurrentInliningDepth() - 1,
409 code_info.GetInlineInfoOf(stack_map, encoding),
410 encoding,
411 number_of_vregs)
412 : code_info.GetDexRegisterMapOf(stack_map, encoding, number_of_vregs);
Nicolas Geoffray33856502015-10-20 15:52:58 +0100413
Nicolas Geoffray012fc4e2016-01-08 15:58:19 +0000414 if (!vreg_map.IsValid()) {
415 return;
416 }
417
Nicolas Geoffray33856502015-10-20 15:52:58 +0100418 for (uint16_t vreg = 0; vreg < number_of_vregs; ++vreg) {
419 if (updated_vregs != nullptr && updated_vregs[vreg]) {
420 // Keep the value set by debugger.
421 continue;
422 }
423
424 DexRegisterLocation::Kind location =
425 vreg_map.GetLocationKind(vreg, number_of_vregs, code_info, encoding);
426 static constexpr uint32_t kDeadValue = 0xEBADDE09;
427 uint32_t value = kDeadValue;
428 bool is_reference = false;
429
430 switch (location) {
431 case DexRegisterLocation::Kind::kInStack: {
432 const int32_t offset = vreg_map.GetStackOffsetInBytes(vreg,
433 number_of_vregs,
434 code_info,
435 encoding);
436 const uint8_t* addr = reinterpret_cast<const uint8_t*>(GetCurrentQuickFrame()) + offset;
437 value = *reinterpret_cast<const uint32_t*>(addr);
438 uint32_t bit = (offset >> 2);
David Srbecky09ed0982016-02-12 21:58:43 +0000439 if (stack_map.GetNumberOfStackMaskBits(encoding.stack_map_encoding) > bit &&
440 stack_map.GetStackMaskBit(encoding.stack_map_encoding, bit)) {
Nicolas Geoffray33856502015-10-20 15:52:58 +0100441 is_reference = true;
442 }
443 break;
444 }
445 case DexRegisterLocation::Kind::kInRegister:
446 case DexRegisterLocation::Kind::kInRegisterHigh:
447 case DexRegisterLocation::Kind::kInFpuRegister:
448 case DexRegisterLocation::Kind::kInFpuRegisterHigh: {
449 uint32_t reg = vreg_map.GetMachineRegister(vreg, number_of_vregs, code_info, encoding);
450 bool result = GetRegisterIfAccessible(reg, ToVRegKind(location), &value);
451 CHECK(result);
452 if (location == DexRegisterLocation::Kind::kInRegister) {
453 if (((1u << reg) & register_mask) != 0) {
454 is_reference = true;
455 }
456 }
457 break;
458 }
459 case DexRegisterLocation::Kind::kConstant: {
460 value = vreg_map.GetConstant(vreg, number_of_vregs, code_info, encoding);
461 if (value == 0) {
462 // Make it a reference for extra safety.
463 is_reference = true;
464 }
465 break;
466 }
467 case DexRegisterLocation::Kind::kNone: {
468 break;
469 }
470 default: {
471 LOG(FATAL)
David Srbecky7dc11782016-02-25 13:23:56 +0000472 << "Unexpected location kind "
473 << vreg_map.GetLocationInternalKind(vreg,
474 number_of_vregs,
475 code_info,
476 encoding);
Nicolas Geoffray33856502015-10-20 15:52:58 +0100477 UNREACHABLE();
478 }
479 }
480 if (is_reference) {
481 new_frame->SetVRegReference(vreg, reinterpret_cast<mirror::Object*>(value));
482 } else {
483 new_frame->SetVReg(vreg, value);
484 }
485 }
486 }
487
Sebastien Hertzc901dd72014-07-16 11:56:07 +0200488 static VRegKind GetVRegKind(uint16_t reg, const std::vector<int32_t>& kinds) {
489 return static_cast<VRegKind>(kinds.at(reg * 2));
490 }
491
Ian Rogers5cf98192014-05-29 21:31:50 -0700492 QuickExceptionHandler* const exception_handler_;
493 ShadowFrame* prev_shadow_frame_;
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700494 bool stacked_shadow_frame_pushed_;
Andreas Gampe639bdd12015-06-03 11:22:45 -0700495 const bool single_frame_deopt_;
496 bool single_frame_done_;
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100497 ArtMethod* single_frame_deopt_method_;
Nicolas Geoffrayb52de242016-02-19 12:43:12 +0000498 const OatQuickMethodHeader* single_frame_deopt_quick_method_header_;
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700499 ArtMethod* callee_method_;
Ian Rogers5cf98192014-05-29 21:31:50 -0700500
501 DISALLOW_COPY_AND_ASSIGN(DeoptimizeStackVisitor);
502};
503
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700504void QuickExceptionHandler::PrepareForLongJumpToInvokeStubOrInterpreterBridge() {
505 if (full_fragment_done_) {
506 // Restore deoptimization exception. When returning from the invoke stub,
507 // ArtMethod::Invoke() will see the special exception to know deoptimization
508 // is needed.
509 self_->SetException(Thread::GetDeoptimizationException());
510 } else {
511 // PC needs to be of the quick-to-interpreter bridge.
512 int32_t offset;
Andreas Gampe542451c2016-07-26 09:02:02 -0700513 offset = GetThreadOffset<kRuntimePointerSize>(kQuickQuickToInterpreterBridge).Int32Value();
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700514 handler_quick_frame_pc_ = *reinterpret_cast<uintptr_t*>(
515 reinterpret_cast<uint8_t*>(self_) + offset);
516 }
517}
518
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200519void QuickExceptionHandler::DeoptimizeStack() {
520 DCHECK(is_deoptimization_);
Ian Rogers5cf98192014-05-29 21:31:50 -0700521 if (kDebugExceptionDelivery) {
522 self_->DumpStack(LOG(INFO) << "Deoptimizing: ");
523 }
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200524
Andreas Gampe639bdd12015-06-03 11:22:45 -0700525 DeoptimizeStackVisitor visitor(self_, context_, this, false);
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200526 visitor.WalkStack(true);
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700527 PrepareForLongJumpToInvokeStubOrInterpreterBridge();
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100528}
529
Andreas Gampe639bdd12015-06-03 11:22:45 -0700530void QuickExceptionHandler::DeoptimizeSingleFrame() {
531 DCHECK(is_deoptimization_);
532
533 if (VLOG_IS_ON(deopt) || kDebugExceptionDelivery) {
534 LOG(INFO) << "Single-frame deopting:";
535 DumpFramesWithType(self_, true);
536 }
537
538 DeoptimizeStackVisitor visitor(self_, context_, this, true);
539 visitor.WalkStack(true);
540
Nicolas Geoffrayb88d59e2016-02-17 11:31:49 +0000541 // Compiled code made an explicit deoptimization.
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100542 ArtMethod* deopt_method = visitor.GetSingleFrameDeoptMethod();
543 DCHECK(deopt_method != nullptr);
Calin Juravleffc87072016-04-20 14:22:09 +0100544 if (Runtime::Current()->UseJitCompilation()) {
Nicolas Geoffrayb88d59e2016-02-17 11:31:49 +0000545 Runtime::Current()->GetJit()->GetCodeCache()->InvalidateCompiledCodeFor(
Nicolas Geoffrayb52de242016-02-19 12:43:12 +0000546 deopt_method, visitor.GetSingleFrameDeoptQuickMethodHeader());
Nicolas Geoffrayb88d59e2016-02-17 11:31:49 +0000547 } else {
548 // Transfer the code to interpreter.
549 Runtime::Current()->GetInstrumentation()->UpdateMethodsCode(
550 deopt_method, GetQuickToInterpreterBridge());
551 }
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100552
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700553 PrepareForLongJumpToInvokeStubOrInterpreterBridge();
Andreas Gampe639bdd12015-06-03 11:22:45 -0700554}
555
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700556void QuickExceptionHandler::DeoptimizePartialFragmentFixup(uintptr_t return_pc) {
557 // At this point, the instrumentation stack has been updated. We need to install
558 // the real return pc on stack, in case instrumentation stub is stored there,
559 // so that the interpreter bridge code can return to the right place.
560 if (return_pc != 0) {
561 uintptr_t* pc_addr = reinterpret_cast<uintptr_t*>(handler_quick_frame_);
562 CHECK(pc_addr != nullptr);
563 pc_addr--;
564 *reinterpret_cast<uintptr_t*>(pc_addr) = return_pc;
565 }
Andreas Gampe639bdd12015-06-03 11:22:45 -0700566
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700567 // Architecture-dependent work. This is to get the LR right for x86 and x86-64.
Andreas Gampe639bdd12015-06-03 11:22:45 -0700568 if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
569 // On x86, the return address is on the stack, so just reuse it. Otherwise we would have to
570 // change how longjump works.
571 handler_quick_frame_ = reinterpret_cast<ArtMethod**>(
572 reinterpret_cast<uintptr_t>(handler_quick_frame_) - sizeof(void*));
573 }
574}
575
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100576// Unwinds all instrumentation stack frame prior to catch handler or upcall.
577class InstrumentationStackVisitor : public StackVisitor {
578 public:
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700579 InstrumentationStackVisitor(Thread* self, size_t frame_depth)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700580 REQUIRES_SHARED(Locks::mutator_lock_)
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100581 : StackVisitor(self, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
Ian Rogerscf7f1912014-10-22 22:06:39 -0700582 frame_depth_(frame_depth),
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100583 instrumentation_frames_to_pop_(0) {
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -0700584 CHECK_NE(frame_depth_, kInvalidFrameDepth);
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100585 }
586
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700587 bool VisitFrame() REQUIRES_SHARED(Locks::mutator_lock_) {
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -0700588 size_t current_frame_depth = GetFrameDepth();
589 if (current_frame_depth < frame_depth_) {
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100590 CHECK(GetMethod() != nullptr);
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700591 if (UNLIKELY(reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc()) == GetReturnPc())) {
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100592 if (!IsInInlinedFrame()) {
593 // We do not count inlined frames, because we do not instrument them. The reason we
594 // include them in the stack walking is the check against `frame_depth_`, which is
595 // given to us by a visitor that visits inlined frames.
596 ++instrumentation_frames_to_pop_;
597 }
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100598 }
599 return true;
600 } else {
601 // We reached the frame of the catch handler or the upcall.
602 return false;
603 }
604 }
605
606 size_t GetInstrumentationFramesToPop() const {
607 return instrumentation_frames_to_pop_;
608 }
609
610 private:
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -0700611 const size_t frame_depth_;
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100612 size_t instrumentation_frames_to_pop_;
613
614 DISALLOW_COPY_AND_ASSIGN(InstrumentationStackVisitor);
615};
616
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700617uintptr_t QuickExceptionHandler::UpdateInstrumentationStack() {
618 uintptr_t return_pc = 0;
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100619 if (method_tracing_active_) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700620 InstrumentationStackVisitor visitor(self_, handler_frame_depth_);
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100621 visitor.WalkStack(true);
622
623 size_t instrumentation_frames_to_pop = visitor.GetInstrumentationFramesToPop();
624 instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
625 for (size_t i = 0; i < instrumentation_frames_to_pop; ++i) {
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700626 return_pc = instrumentation->PopMethodForUnwind(self_, is_deoptimization_);
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100627 }
628 }
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700629 return return_pc;
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100630}
631
Andreas Gampe639bdd12015-06-03 11:22:45 -0700632void QuickExceptionHandler::DoLongJump(bool smash_caller_saves) {
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100633 // Place context back on thread so it will be available when we continue.
634 self_->ReleaseLongJumpContext(context_);
635 context_->SetSP(reinterpret_cast<uintptr_t>(handler_quick_frame_));
636 CHECK_NE(handler_quick_frame_pc_, 0u);
637 context_->SetPC(handler_quick_frame_pc_);
Andreas Gampe639bdd12015-06-03 11:22:45 -0700638 context_->SetArg0(handler_quick_arg0_);
639 if (smash_caller_saves) {
640 context_->SmashCallerSaves();
641 }
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100642 context_->DoLongJump();
Andreas Gampe794ad762015-02-23 08:12:24 -0800643 UNREACHABLE();
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100644}
645
Andreas Gampe639bdd12015-06-03 11:22:45 -0700646// Prints out methods with their type of frame.
647class DumpFramesWithTypeStackVisitor FINAL : public StackVisitor {
648 public:
Chih-Hung Hsieh471118e2016-04-29 14:27:41 -0700649 explicit DumpFramesWithTypeStackVisitor(Thread* self, bool show_details = false)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700650 REQUIRES_SHARED(Locks::mutator_lock_)
Andreas Gampe639bdd12015-06-03 11:22:45 -0700651 : StackVisitor(self, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
652 show_details_(show_details) {}
653
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700654 bool VisitFrame() OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampe639bdd12015-06-03 11:22:45 -0700655 ArtMethod* method = GetMethod();
656 if (show_details_) {
657 LOG(INFO) << "|> pc = " << std::hex << GetCurrentQuickFramePc();
658 LOG(INFO) << "|> addr = " << std::hex << reinterpret_cast<uintptr_t>(GetCurrentQuickFrame());
659 if (GetCurrentQuickFrame() != nullptr && method != nullptr) {
660 LOG(INFO) << "|> ret = " << std::hex << GetReturnPc();
661 }
662 }
663 if (method == nullptr) {
664 // Transition, do go on, we want to unwind over bridges, all the way.
665 if (show_details_) {
666 LOG(INFO) << "N <transition>";
667 }
668 return true;
669 } else if (method->IsRuntimeMethod()) {
670 if (show_details_) {
671 LOG(INFO) << "R " << PrettyMethod(method, true);
672 }
673 return true;
674 } else {
675 bool is_shadow = GetCurrentShadowFrame() != nullptr;
676 LOG(INFO) << (is_shadow ? "S" : "Q")
677 << ((!is_shadow && IsInInlinedFrame()) ? "i" : " ")
678 << " "
679 << PrettyMethod(method, true);
680 return true; // Go on.
681 }
682 }
683
684 private:
685 bool show_details_;
686
687 DISALLOW_COPY_AND_ASSIGN(DumpFramesWithTypeStackVisitor);
688};
689
690void QuickExceptionHandler::DumpFramesWithType(Thread* self, bool details) {
691 DumpFramesWithTypeStackVisitor visitor(self, details);
692 visitor.WalkStack(true);
693}
694
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100695} // namespace art