blob: e9dd7aa0736d7d8ddd5910c6400b3abc1fbfd893 [file] [log] [blame]
Sebastien Hertzd45a1f52014-01-09 14:56:54 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Sebastien Hertzfd3077e2014-04-23 10:32:43 +020017#include "quick_exception_handler.h"
18
Ian Rogerse63db272014-07-15 15:36:11 -070019#include "arch/context.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method-inl.h"
Ian Rogers5cf98192014-05-29 21:31:50 -070021#include "dex_instruction.h"
Sebastien Hertzfd3077e2014-04-23 10:32:43 +020022#include "entrypoints/entrypoint_utils.h"
Andreas Gampe639bdd12015-06-03 11:22:45 -070023#include "entrypoints/quick/quick_entrypoints_enum.h"
Ian Rogers6f3dbba2014-10-14 17:41:57 -070024#include "entrypoints/runtime_asm_entrypoints.h"
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070025#include "handle_scope-inl.h"
Nicolas Geoffrayb88d59e2016-02-17 11:31:49 +000026#include "jit/jit.h"
27#include "jit/jit_code_cache.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -070028#include "mirror/class-inl.h"
29#include "mirror/class_loader.h"
30#include "mirror/throwable.h"
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010031#include "oat_quick_method_header.h"
Nicolas Geoffray6bc43742015-10-12 18:11:10 +010032#include "stack_map.h"
Ian Rogers5cf98192014-05-29 21:31:50 -070033#include "verifier/method_verifier.h"
Sebastien Hertzd45a1f52014-01-09 14:56:54 +010034
35namespace art {
36
Ian Rogers5cf98192014-05-29 21:31:50 -070037static constexpr bool kDebugExceptionDelivery = false;
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -070038static constexpr size_t kInvalidFrameDepth = 0xffffffff;
Ian Rogers5cf98192014-05-29 21:31:50 -070039
Sebastien Hertzfd3077e2014-04-23 10:32:43 +020040QuickExceptionHandler::QuickExceptionHandler(Thread* self, bool is_deoptimization)
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010041 : self_(self),
42 context_(self->GetLongJumpContext()),
43 is_deoptimization_(is_deoptimization),
44 method_tracing_active_(is_deoptimization ||
45 Runtime::Current()->GetInstrumentation()->AreExitStubsInstalled()),
46 handler_quick_frame_(nullptr),
47 handler_quick_frame_pc_(0),
48 handler_method_header_(nullptr),
49 handler_quick_arg0_(0),
50 handler_method_(nullptr),
51 handler_dex_pc_(0),
52 clear_exception_(false),
Mingyao Yangf711f2c2016-05-23 12:29:39 -070053 handler_frame_depth_(kInvalidFrameDepth),
54 full_fragment_done_(false) {}
Sebastien Hertzd45a1f52014-01-09 14:56:54 +010055
Sebastien Hertz520633b2015-09-08 17:03:36 +020056// Finds catch handler.
Ian Rogers5cf98192014-05-29 21:31:50 -070057class CatchBlockStackVisitor FINAL : public StackVisitor {
58 public:
59 CatchBlockStackVisitor(Thread* self, Context* context, Handle<mirror::Throwable>* exception,
60 QuickExceptionHandler* exception_handler)
Mathieu Chartier90443472015-07-16 20:32:27 -070061 SHARED_REQUIRES(Locks::mutator_lock_)
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +010062 : StackVisitor(self, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +010063 exception_(exception),
Ian Rogers5cf98192014-05-29 21:31:50 -070064 exception_handler_(exception_handler) {
65 }
66
Mathieu Chartier90443472015-07-16 20:32:27 -070067 bool VisitFrame() OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -070068 ArtMethod* method = GetMethod();
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -070069 exception_handler_->SetHandlerFrameDepth(GetFrameDepth());
Ian Rogers5cf98192014-05-29 21:31:50 -070070 if (method == nullptr) {
71 // This is the upcall, we remember the frame and last pc so that we may long jump to them.
72 exception_handler_->SetHandlerQuickFramePc(GetCurrentQuickFramePc());
73 exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010074 exception_handler_->SetHandlerMethodHeader(GetCurrentOatQuickMethodHeader());
Ian Rogers5cf98192014-05-29 21:31:50 -070075 uint32_t next_dex_pc;
Mathieu Chartiere401d142015-04-22 13:56:20 -070076 ArtMethod* next_art_method;
Ian Rogers5cf98192014-05-29 21:31:50 -070077 bool has_next = GetNextMethodAndDexPc(&next_art_method, &next_dex_pc);
78 // Report the method that did the down call as the handler.
79 exception_handler_->SetHandlerDexPc(next_dex_pc);
80 exception_handler_->SetHandlerMethod(next_art_method);
81 if (!has_next) {
82 // No next method? Check exception handler is set up for the unhandled exception handler
83 // case.
84 DCHECK_EQ(0U, exception_handler_->GetHandlerDexPc());
85 DCHECK(nullptr == exception_handler_->GetHandlerMethod());
86 }
87 return false; // End stack walk.
88 }
89 if (method->IsRuntimeMethod()) {
90 // Ignore callee save method.
91 DCHECK(method->IsCalleeSaveMethod());
92 return true;
93 }
Mathieu Chartiere401d142015-04-22 13:56:20 -070094 return HandleTryItems(method);
Ian Rogers5cf98192014-05-29 21:31:50 -070095 }
96
97 private:
Mathieu Chartiere401d142015-04-22 13:56:20 -070098 bool HandleTryItems(ArtMethod* method)
Mathieu Chartier90443472015-07-16 20:32:27 -070099 SHARED_REQUIRES(Locks::mutator_lock_) {
Ian Rogers5cf98192014-05-29 21:31:50 -0700100 uint32_t dex_pc = DexFile::kDexNoIndex;
101 if (!method->IsNative()) {
102 dex_pc = GetDexPc();
103 }
104 if (dex_pc != DexFile::kDexNoIndex) {
105 bool clear_exception = false;
Sebastien Hertz26f72862015-09-15 09:52:07 +0200106 StackHandleScope<1> hs(GetThread());
Ian Rogers5cf98192014-05-29 21:31:50 -0700107 Handle<mirror::Class> to_find(hs.NewHandle((*exception_)->GetClass()));
Mathieu Chartiere401d142015-04-22 13:56:20 -0700108 uint32_t found_dex_pc = method->FindCatchBlock(to_find, dex_pc, &clear_exception);
Ian Rogers5cf98192014-05-29 21:31:50 -0700109 exception_handler_->SetClearException(clear_exception);
110 if (found_dex_pc != DexFile::kDexNoIndex) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700111 exception_handler_->SetHandlerMethod(method);
Ian Rogers5cf98192014-05-29 21:31:50 -0700112 exception_handler_->SetHandlerDexPc(found_dex_pc);
David Brazdil72f7b882015-09-15 17:00:52 +0100113 exception_handler_->SetHandlerQuickFramePc(
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100114 GetCurrentOatQuickMethodHeader()->ToNativeQuickPc(
115 method, found_dex_pc, /* is_catch_handler */ true));
Ian Rogers5cf98192014-05-29 21:31:50 -0700116 exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100117 exception_handler_->SetHandlerMethodHeader(GetCurrentOatQuickMethodHeader());
Ian Rogers5cf98192014-05-29 21:31:50 -0700118 return false; // End stack walk.
Mingyao Yang99170c62015-07-06 11:10:37 -0700119 } else if (UNLIKELY(GetThread()->HasDebuggerShadowFrames())) {
120 // We are going to unwind this frame. Did we prepare a shadow frame for debugging?
121 size_t frame_id = GetFrameId();
122 ShadowFrame* frame = GetThread()->FindDebuggerShadowFrame(frame_id);
123 if (frame != nullptr) {
124 // We will not execute this shadow frame so we can safely deallocate it.
125 GetThread()->RemoveDebuggerShadowFrameMapping(frame_id);
126 ShadowFrame::DeleteDeoptimizedFrame(frame);
127 }
Ian Rogers5cf98192014-05-29 21:31:50 -0700128 }
129 }
130 return true; // Continue stack walk.
131 }
132
Ian Rogers5cf98192014-05-29 21:31:50 -0700133 // The exception we're looking for the catch block of.
134 Handle<mirror::Throwable>* exception_;
135 // The quick exception handler we're visiting for.
136 QuickExceptionHandler* const exception_handler_;
137
138 DISALLOW_COPY_AND_ASSIGN(CatchBlockStackVisitor);
139};
140
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000141void QuickExceptionHandler::FindCatch(mirror::Throwable* exception) {
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200142 DCHECK(!is_deoptimization_);
Ian Rogers5cf98192014-05-29 21:31:50 -0700143 if (kDebugExceptionDelivery) {
144 mirror::String* msg = exception->GetDetailMessage();
145 std::string str_msg(msg != nullptr ? msg->ToModifiedUtf8() : "");
146 self_->DumpStack(LOG(INFO) << "Delivering exception: " << PrettyTypeOf(exception)
147 << ": " << str_msg << "\n");
148 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700149 StackHandleScope<1> hs(self_);
150 Handle<mirror::Throwable> exception_ref(hs.NewHandle(exception));
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200151
Sebastien Hertz520633b2015-09-08 17:03:36 +0200152 // Walk the stack to find catch handler.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700153 CatchBlockStackVisitor visitor(self_, context_, &exception_ref, this);
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100154 visitor.WalkStack(true);
155
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200156 if (kDebugExceptionDelivery) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700157 if (*handler_quick_frame_ == nullptr) {
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100158 LOG(INFO) << "Handler is upcall";
Ian Rogers5cf98192014-05-29 21:31:50 -0700159 }
160 if (handler_method_ != nullptr) {
161 const DexFile& dex_file = *handler_method_->GetDeclaringClass()->GetDexCache()->GetDexFile();
162 int line_number = dex_file.GetLineNumFromPC(handler_method_, handler_dex_pc_);
163 LOG(INFO) << "Handler: " << PrettyMethod(handler_method_) << " (line: " << line_number << ")";
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100164 }
165 }
166 if (clear_exception_) {
167 // Exception was cleared as part of delivery.
168 DCHECK(!self_->IsExceptionPending());
169 } else {
170 // Put exception back in root set with clear throw location.
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000171 self_->SetException(exception_ref.Get());
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100172 }
David Brazdil77a48ae2015-09-15 12:34:04 +0000173 // If the handler is in optimized code, we need to set the catch environment.
174 if (*handler_quick_frame_ != nullptr &&
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100175 handler_method_header_ != nullptr &&
176 handler_method_header_->IsOptimized()) {
David Brazdil77a48ae2015-09-15 12:34:04 +0000177 SetCatchEnvironmentForOptimizedHandler(&visitor);
178 }
179}
180
181static VRegKind ToVRegKind(DexRegisterLocation::Kind kind) {
182 // Slightly hacky since we cannot map DexRegisterLocationKind and VRegKind
183 // one to one. However, StackVisitor::GetVRegFromOptimizedCode only needs to
184 // distinguish between core/FPU registers and low/high bits on 64-bit.
185 switch (kind) {
186 case DexRegisterLocation::Kind::kConstant:
187 case DexRegisterLocation::Kind::kInStack:
188 // VRegKind is ignored.
189 return VRegKind::kUndefined;
190
191 case DexRegisterLocation::Kind::kInRegister:
192 // Selects core register. For 64-bit registers, selects low 32 bits.
193 return VRegKind::kLongLoVReg;
194
195 case DexRegisterLocation::Kind::kInRegisterHigh:
196 // Selects core register. For 64-bit registers, selects high 32 bits.
197 return VRegKind::kLongHiVReg;
198
199 case DexRegisterLocation::Kind::kInFpuRegister:
200 // Selects FPU register. For 64-bit registers, selects low 32 bits.
201 return VRegKind::kDoubleLoVReg;
202
203 case DexRegisterLocation::Kind::kInFpuRegisterHigh:
204 // Selects FPU register. For 64-bit registers, selects high 32 bits.
205 return VRegKind::kDoubleHiVReg;
206
207 default:
David Srbecky7dc11782016-02-25 13:23:56 +0000208 LOG(FATAL) << "Unexpected vreg location " << kind;
David Brazdil77a48ae2015-09-15 12:34:04 +0000209 UNREACHABLE();
210 }
211}
212
213void QuickExceptionHandler::SetCatchEnvironmentForOptimizedHandler(StackVisitor* stack_visitor) {
214 DCHECK(!is_deoptimization_);
215 DCHECK(*handler_quick_frame_ != nullptr) << "Method should not be called on upcall exceptions";
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100216 DCHECK(handler_method_ != nullptr && handler_method_header_->IsOptimized());
David Brazdil77a48ae2015-09-15 12:34:04 +0000217
218 if (kDebugExceptionDelivery) {
219 self_->DumpStack(LOG(INFO) << "Setting catch phis: ");
220 }
221
222 const size_t number_of_vregs = handler_method_->GetCodeItem()->registers_size_;
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100223 CodeInfo code_info = handler_method_header_->GetOptimizedCodeInfo();
David Srbecky09ed0982016-02-12 21:58:43 +0000224 CodeInfoEncoding encoding = code_info.ExtractEncoding();
David Brazdil77a48ae2015-09-15 12:34:04 +0000225
Nicolas Geoffray012fc4e2016-01-08 15:58:19 +0000226 // Find stack map of the catch block.
227 StackMap catch_stack_map = code_info.GetCatchStackMapForDexPc(GetHandlerDexPc(), encoding);
228 DCHECK(catch_stack_map.IsValid());
229 DexRegisterMap catch_vreg_map =
230 code_info.GetDexRegisterMapOf(catch_stack_map, encoding, number_of_vregs);
231 if (!catch_vreg_map.IsValid()) {
232 return;
233 }
234
David Brazdil77a48ae2015-09-15 12:34:04 +0000235 // Find stack map of the throwing instruction.
236 StackMap throw_stack_map =
237 code_info.GetStackMapForNativePcOffset(stack_visitor->GetNativePcOffset(), encoding);
238 DCHECK(throw_stack_map.IsValid());
239 DexRegisterMap throw_vreg_map =
240 code_info.GetDexRegisterMapOf(throw_stack_map, encoding, number_of_vregs);
Nicolas Geoffray012fc4e2016-01-08 15:58:19 +0000241 DCHECK(throw_vreg_map.IsValid());
David Brazdil77a48ae2015-09-15 12:34:04 +0000242
243 // Copy values between them.
244 for (uint16_t vreg = 0; vreg < number_of_vregs; ++vreg) {
245 DexRegisterLocation::Kind catch_location =
246 catch_vreg_map.GetLocationKind(vreg, number_of_vregs, code_info, encoding);
247 if (catch_location == DexRegisterLocation::Kind::kNone) {
248 continue;
249 }
250 DCHECK(catch_location == DexRegisterLocation::Kind::kInStack);
251
252 // Get vreg value from its current location.
253 uint32_t vreg_value;
254 VRegKind vreg_kind = ToVRegKind(throw_vreg_map.GetLocationKind(vreg,
255 number_of_vregs,
256 code_info,
257 encoding));
258 bool get_vreg_success = stack_visitor->GetVReg(stack_visitor->GetMethod(),
259 vreg,
260 vreg_kind,
261 &vreg_value);
262 CHECK(get_vreg_success) << "VReg " << vreg << " was optimized out ("
263 << "method=" << PrettyMethod(stack_visitor->GetMethod()) << ", "
264 << "dex_pc=" << stack_visitor->GetDexPc() << ", "
265 << "native_pc_offset=" << stack_visitor->GetNativePcOffset() << ")";
266
267 // Copy value to the catch phi's stack slot.
268 int32_t slot_offset = catch_vreg_map.GetStackOffsetInBytes(vreg,
269 number_of_vregs,
270 code_info,
271 encoding);
272 ArtMethod** frame_top = stack_visitor->GetCurrentQuickFrame();
273 uint8_t* slot_address = reinterpret_cast<uint8_t*>(frame_top) + slot_offset;
274 uint32_t* slot_ptr = reinterpret_cast<uint32_t*>(slot_address);
275 *slot_ptr = vreg_value;
276 }
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200277}
278
Ian Rogers5cf98192014-05-29 21:31:50 -0700279// Prepares deoptimization.
280class DeoptimizeStackVisitor FINAL : public StackVisitor {
281 public:
Andreas Gampe639bdd12015-06-03 11:22:45 -0700282 DeoptimizeStackVisitor(Thread* self,
283 Context* context,
284 QuickExceptionHandler* exception_handler,
285 bool single_frame)
Mathieu Chartier90443472015-07-16 20:32:27 -0700286 SHARED_REQUIRES(Locks::mutator_lock_)
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100287 : StackVisitor(self, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100288 exception_handler_(exception_handler),
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700289 prev_shadow_frame_(nullptr),
Andreas Gampe639bdd12015-06-03 11:22:45 -0700290 stacked_shadow_frame_pushed_(false),
291 single_frame_deopt_(single_frame),
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100292 single_frame_done_(false),
Nicolas Geoffrayb52de242016-02-19 12:43:12 +0000293 single_frame_deopt_method_(nullptr),
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700294 single_frame_deopt_quick_method_header_(nullptr),
295 callee_method_(nullptr) {
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100296 }
297
298 ArtMethod* GetSingleFrameDeoptMethod() const {
299 return single_frame_deopt_method_;
Ian Rogers5cf98192014-05-29 21:31:50 -0700300 }
301
Nicolas Geoffrayb52de242016-02-19 12:43:12 +0000302 const OatQuickMethodHeader* GetSingleFrameDeoptQuickMethodHeader() const {
303 return single_frame_deopt_quick_method_header_;
304 }
305
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700306 void FinishStackWalk() SHARED_REQUIRES(Locks::mutator_lock_) {
307 // This is the upcall, or the next full frame in single-frame deopt, or the
308 // code isn't deoptimizeable. We remember the frame and last pc so that we
309 // may long jump to them.
310 exception_handler_->SetHandlerQuickFramePc(GetCurrentQuickFramePc());
311 exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
312 exception_handler_->SetHandlerMethodHeader(GetCurrentOatQuickMethodHeader());
313 if (!stacked_shadow_frame_pushed_) {
314 // In case there is no deoptimized shadow frame for this upcall, we still
315 // need to push a nullptr to the stack since there is always a matching pop after
316 // the long jump.
317 GetThread()->PushStackedShadowFrame(nullptr,
318 StackedShadowFrameType::kDeoptimizationShadowFrame);
319 stacked_shadow_frame_pushed_ = true;
320 }
321 if (GetMethod() == nullptr) {
322 exception_handler_->SetFullFragmentDone(true);
323 } else {
324 CHECK(callee_method_ != nullptr) << art::PrettyMethod(GetMethod(), false);
325 exception_handler_->SetHandlerQuickArg0(reinterpret_cast<uintptr_t>(callee_method_));
326 }
327 }
328
Mathieu Chartier90443472015-07-16 20:32:27 -0700329 bool VisitFrame() OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -0700330 exception_handler_->SetHandlerFrameDepth(GetFrameDepth());
Mathieu Chartiere401d142015-04-22 13:56:20 -0700331 ArtMethod* method = GetMethod();
Andreas Gampe639bdd12015-06-03 11:22:45 -0700332 if (method == nullptr || single_frame_done_) {
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700333 FinishStackWalk();
Ian Rogers5cf98192014-05-29 21:31:50 -0700334 return false; // End stack walk.
335 } else if (method->IsRuntimeMethod()) {
336 // Ignore callee save method.
337 DCHECK(method->IsCalleeSaveMethod());
338 return true;
Sebastien Hertz520633b2015-09-08 17:03:36 +0200339 } else if (method->IsNative()) {
340 // If we return from JNI with a pending exception and want to deoptimize, we need to skip
341 // the native method.
342 // The top method is a runtime method, the native method comes next.
343 CHECK_EQ(GetFrameDepth(), 1U);
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700344 callee_method_ = method;
Sebastien Hertz520633b2015-09-08 17:03:36 +0200345 return true;
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700346 } else if (!single_frame_deopt_ &&
347 !Runtime::Current()->IsDeoptimizeable(GetCurrentQuickFramePc())) {
348 // We hit some code that's not deoptimizeable. However, Single-frame deoptimization triggered
349 // from compiled code is always allowed since HDeoptimize always saves the full environment.
350 FinishStackWalk();
351 return false; // End stack walk.
Ian Rogers5cf98192014-05-29 21:31:50 -0700352 } else {
Nicolas Geoffray33856502015-10-20 15:52:58 +0100353 // Check if a shadow frame already exists for debugger's set-local-value purpose.
354 const size_t frame_id = GetFrameId();
355 ShadowFrame* new_frame = GetThread()->FindDebuggerShadowFrame(frame_id);
356 const bool* updated_vregs;
357 const size_t num_regs = method->GetCodeItem()->registers_size_;
358 if (new_frame == nullptr) {
359 new_frame = ShadowFrame::CreateDeoptimizedFrame(num_regs, nullptr, method, GetDexPc());
360 updated_vregs = nullptr;
361 } else {
362 updated_vregs = GetThread()->GetUpdatedVRegFlags(frame_id);
363 DCHECK(updated_vregs != nullptr);
364 }
Andreas Gampebf9611f2016-03-25 16:58:00 -0700365 HandleOptimizingDeoptimization(method, new_frame, updated_vregs);
Nicolas Geoffray33856502015-10-20 15:52:58 +0100366 if (updated_vregs != nullptr) {
367 // Calling Thread::RemoveDebuggerShadowFrameMapping will also delete the updated_vregs
368 // array so this must come after we processed the frame.
369 GetThread()->RemoveDebuggerShadowFrameMapping(frame_id);
370 DCHECK(GetThread()->FindDebuggerShadowFrame(frame_id) == nullptr);
371 }
372 if (prev_shadow_frame_ != nullptr) {
373 prev_shadow_frame_->SetLink(new_frame);
374 } else {
375 // Will be popped after the long jump after DeoptimizeStack(),
376 // right before interpreter::EnterInterpreterFromDeoptimize().
377 stacked_shadow_frame_pushed_ = true;
378 GetThread()->PushStackedShadowFrame(
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700379 new_frame, StackedShadowFrameType::kDeoptimizationShadowFrame);
Nicolas Geoffray33856502015-10-20 15:52:58 +0100380 }
381 prev_shadow_frame_ = new_frame;
382
Andreas Gampe639bdd12015-06-03 11:22:45 -0700383 if (single_frame_deopt_ && !IsInInlinedFrame()) {
384 // Single-frame deopt ends at the first non-inlined frame and needs to store that method.
Andreas Gampe639bdd12015-06-03 11:22:45 -0700385 single_frame_done_ = true;
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100386 single_frame_deopt_method_ = method;
Nicolas Geoffrayb52de242016-02-19 12:43:12 +0000387 single_frame_deopt_quick_method_header_ = GetCurrentOatQuickMethodHeader();
Andreas Gampe639bdd12015-06-03 11:22:45 -0700388 }
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700389 callee_method_ = method;
Andreas Gampe639bdd12015-06-03 11:22:45 -0700390 return true;
Ian Rogers5cf98192014-05-29 21:31:50 -0700391 }
392 }
393
394 private:
Nicolas Geoffray33856502015-10-20 15:52:58 +0100395 void HandleOptimizingDeoptimization(ArtMethod* m,
396 ShadowFrame* new_frame,
397 const bool* updated_vregs)
398 SHARED_REQUIRES(Locks::mutator_lock_) {
399 const OatQuickMethodHeader* method_header = GetCurrentOatQuickMethodHeader();
400 CodeInfo code_info = method_header->GetOptimizedCodeInfo();
401 uintptr_t native_pc_offset = method_header->NativeQuickPcOffset(GetCurrentQuickFramePc());
David Srbecky09ed0982016-02-12 21:58:43 +0000402 CodeInfoEncoding encoding = code_info.ExtractEncoding();
Nicolas Geoffray33856502015-10-20 15:52:58 +0100403 StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset, encoding);
404 const size_t number_of_vregs = m->GetCodeItem()->registers_size_;
David Srbecky09ed0982016-02-12 21:58:43 +0000405 uint32_t register_mask = stack_map.GetRegisterMask(encoding.stack_map_encoding);
David Brazdilefc3f022015-10-28 12:19:06 -0500406 DexRegisterMap vreg_map = IsInInlinedFrame()
407 ? code_info.GetDexRegisterMapAtDepth(GetCurrentInliningDepth() - 1,
408 code_info.GetInlineInfoOf(stack_map, encoding),
409 encoding,
410 number_of_vregs)
411 : code_info.GetDexRegisterMapOf(stack_map, encoding, number_of_vregs);
Nicolas Geoffray33856502015-10-20 15:52:58 +0100412
Nicolas Geoffray012fc4e2016-01-08 15:58:19 +0000413 if (!vreg_map.IsValid()) {
414 return;
415 }
416
Nicolas Geoffray33856502015-10-20 15:52:58 +0100417 for (uint16_t vreg = 0; vreg < number_of_vregs; ++vreg) {
418 if (updated_vregs != nullptr && updated_vregs[vreg]) {
419 // Keep the value set by debugger.
420 continue;
421 }
422
423 DexRegisterLocation::Kind location =
424 vreg_map.GetLocationKind(vreg, number_of_vregs, code_info, encoding);
425 static constexpr uint32_t kDeadValue = 0xEBADDE09;
426 uint32_t value = kDeadValue;
427 bool is_reference = false;
428
429 switch (location) {
430 case DexRegisterLocation::Kind::kInStack: {
431 const int32_t offset = vreg_map.GetStackOffsetInBytes(vreg,
432 number_of_vregs,
433 code_info,
434 encoding);
435 const uint8_t* addr = reinterpret_cast<const uint8_t*>(GetCurrentQuickFrame()) + offset;
436 value = *reinterpret_cast<const uint32_t*>(addr);
437 uint32_t bit = (offset >> 2);
David Srbecky09ed0982016-02-12 21:58:43 +0000438 if (stack_map.GetNumberOfStackMaskBits(encoding.stack_map_encoding) > bit &&
439 stack_map.GetStackMaskBit(encoding.stack_map_encoding, bit)) {
Nicolas Geoffray33856502015-10-20 15:52:58 +0100440 is_reference = true;
441 }
442 break;
443 }
444 case DexRegisterLocation::Kind::kInRegister:
445 case DexRegisterLocation::Kind::kInRegisterHigh:
446 case DexRegisterLocation::Kind::kInFpuRegister:
447 case DexRegisterLocation::Kind::kInFpuRegisterHigh: {
448 uint32_t reg = vreg_map.GetMachineRegister(vreg, number_of_vregs, code_info, encoding);
449 bool result = GetRegisterIfAccessible(reg, ToVRegKind(location), &value);
450 CHECK(result);
451 if (location == DexRegisterLocation::Kind::kInRegister) {
452 if (((1u << reg) & register_mask) != 0) {
453 is_reference = true;
454 }
455 }
456 break;
457 }
458 case DexRegisterLocation::Kind::kConstant: {
459 value = vreg_map.GetConstant(vreg, number_of_vregs, code_info, encoding);
460 if (value == 0) {
461 // Make it a reference for extra safety.
462 is_reference = true;
463 }
464 break;
465 }
466 case DexRegisterLocation::Kind::kNone: {
467 break;
468 }
469 default: {
470 LOG(FATAL)
David Srbecky7dc11782016-02-25 13:23:56 +0000471 << "Unexpected location kind "
472 << vreg_map.GetLocationInternalKind(vreg,
473 number_of_vregs,
474 code_info,
475 encoding);
Nicolas Geoffray33856502015-10-20 15:52:58 +0100476 UNREACHABLE();
477 }
478 }
479 if (is_reference) {
480 new_frame->SetVRegReference(vreg, reinterpret_cast<mirror::Object*>(value));
481 } else {
482 new_frame->SetVReg(vreg, value);
483 }
484 }
485 }
486
Sebastien Hertzc901dd72014-07-16 11:56:07 +0200487 static VRegKind GetVRegKind(uint16_t reg, const std::vector<int32_t>& kinds) {
488 return static_cast<VRegKind>(kinds.at(reg * 2));
489 }
490
Ian Rogers5cf98192014-05-29 21:31:50 -0700491 QuickExceptionHandler* const exception_handler_;
492 ShadowFrame* prev_shadow_frame_;
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700493 bool stacked_shadow_frame_pushed_;
Andreas Gampe639bdd12015-06-03 11:22:45 -0700494 const bool single_frame_deopt_;
495 bool single_frame_done_;
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100496 ArtMethod* single_frame_deopt_method_;
Nicolas Geoffrayb52de242016-02-19 12:43:12 +0000497 const OatQuickMethodHeader* single_frame_deopt_quick_method_header_;
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700498 ArtMethod* callee_method_;
Ian Rogers5cf98192014-05-29 21:31:50 -0700499
500 DISALLOW_COPY_AND_ASSIGN(DeoptimizeStackVisitor);
501};
502
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700503void QuickExceptionHandler::PrepareForLongJumpToInvokeStubOrInterpreterBridge() {
504 if (full_fragment_done_) {
505 // Restore deoptimization exception. When returning from the invoke stub,
506 // ArtMethod::Invoke() will see the special exception to know deoptimization
507 // is needed.
508 self_->SetException(Thread::GetDeoptimizationException());
509 } else {
510 // PC needs to be of the quick-to-interpreter bridge.
511 int32_t offset;
512 #ifdef __LP64__
513 offset = GetThreadOffset<8>(kQuickQuickToInterpreterBridge).Int32Value();
514 #else
515 offset = GetThreadOffset<4>(kQuickQuickToInterpreterBridge).Int32Value();
516 #endif
517 handler_quick_frame_pc_ = *reinterpret_cast<uintptr_t*>(
518 reinterpret_cast<uint8_t*>(self_) + offset);
519 }
520}
521
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200522void QuickExceptionHandler::DeoptimizeStack() {
523 DCHECK(is_deoptimization_);
Ian Rogers5cf98192014-05-29 21:31:50 -0700524 if (kDebugExceptionDelivery) {
525 self_->DumpStack(LOG(INFO) << "Deoptimizing: ");
526 }
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200527
Andreas Gampe639bdd12015-06-03 11:22:45 -0700528 DeoptimizeStackVisitor visitor(self_, context_, this, false);
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200529 visitor.WalkStack(true);
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700530 PrepareForLongJumpToInvokeStubOrInterpreterBridge();
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100531}
532
Andreas Gampe639bdd12015-06-03 11:22:45 -0700533void QuickExceptionHandler::DeoptimizeSingleFrame() {
534 DCHECK(is_deoptimization_);
535
536 if (VLOG_IS_ON(deopt) || kDebugExceptionDelivery) {
537 LOG(INFO) << "Single-frame deopting:";
538 DumpFramesWithType(self_, true);
539 }
540
541 DeoptimizeStackVisitor visitor(self_, context_, this, true);
542 visitor.WalkStack(true);
543
Nicolas Geoffrayb88d59e2016-02-17 11:31:49 +0000544 // Compiled code made an explicit deoptimization.
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100545 ArtMethod* deopt_method = visitor.GetSingleFrameDeoptMethod();
546 DCHECK(deopt_method != nullptr);
Calin Juravleffc87072016-04-20 14:22:09 +0100547 if (Runtime::Current()->UseJitCompilation()) {
Nicolas Geoffrayb88d59e2016-02-17 11:31:49 +0000548 Runtime::Current()->GetJit()->GetCodeCache()->InvalidateCompiledCodeFor(
Nicolas Geoffrayb52de242016-02-19 12:43:12 +0000549 deopt_method, visitor.GetSingleFrameDeoptQuickMethodHeader());
Nicolas Geoffrayb88d59e2016-02-17 11:31:49 +0000550 } else {
551 // Transfer the code to interpreter.
552 Runtime::Current()->GetInstrumentation()->UpdateMethodsCode(
553 deopt_method, GetQuickToInterpreterBridge());
554 }
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100555
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700556 PrepareForLongJumpToInvokeStubOrInterpreterBridge();
Andreas Gampe639bdd12015-06-03 11:22:45 -0700557}
558
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700559void QuickExceptionHandler::DeoptimizePartialFragmentFixup(uintptr_t return_pc) {
560 // At this point, the instrumentation stack has been updated. We need to install
561 // the real return pc on stack, in case instrumentation stub is stored there,
562 // so that the interpreter bridge code can return to the right place.
563 if (return_pc != 0) {
564 uintptr_t* pc_addr = reinterpret_cast<uintptr_t*>(handler_quick_frame_);
565 CHECK(pc_addr != nullptr);
566 pc_addr--;
567 *reinterpret_cast<uintptr_t*>(pc_addr) = return_pc;
568 }
Andreas Gampe639bdd12015-06-03 11:22:45 -0700569
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700570 // Architecture-dependent work. This is to get the LR right for x86 and x86-64.
Andreas Gampe639bdd12015-06-03 11:22:45 -0700571 if (kRuntimeISA == InstructionSet::kX86 || kRuntimeISA == InstructionSet::kX86_64) {
572 // On x86, the return address is on the stack, so just reuse it. Otherwise we would have to
573 // change how longjump works.
574 handler_quick_frame_ = reinterpret_cast<ArtMethod**>(
575 reinterpret_cast<uintptr_t>(handler_quick_frame_) - sizeof(void*));
576 }
577}
578
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100579// Unwinds all instrumentation stack frame prior to catch handler or upcall.
580class InstrumentationStackVisitor : public StackVisitor {
581 public:
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700582 InstrumentationStackVisitor(Thread* self, size_t frame_depth)
Mathieu Chartier90443472015-07-16 20:32:27 -0700583 SHARED_REQUIRES(Locks::mutator_lock_)
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100584 : StackVisitor(self, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
Ian Rogerscf7f1912014-10-22 22:06:39 -0700585 frame_depth_(frame_depth),
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100586 instrumentation_frames_to_pop_(0) {
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -0700587 CHECK_NE(frame_depth_, kInvalidFrameDepth);
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100588 }
589
Mathieu Chartier90443472015-07-16 20:32:27 -0700590 bool VisitFrame() SHARED_REQUIRES(Locks::mutator_lock_) {
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -0700591 size_t current_frame_depth = GetFrameDepth();
592 if (current_frame_depth < frame_depth_) {
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100593 CHECK(GetMethod() != nullptr);
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700594 if (UNLIKELY(reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc()) == GetReturnPc())) {
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100595 if (!IsInInlinedFrame()) {
596 // We do not count inlined frames, because we do not instrument them. The reason we
597 // include them in the stack walking is the check against `frame_depth_`, which is
598 // given to us by a visitor that visits inlined frames.
599 ++instrumentation_frames_to_pop_;
600 }
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100601 }
602 return true;
603 } else {
604 // We reached the frame of the catch handler or the upcall.
605 return false;
606 }
607 }
608
609 size_t GetInstrumentationFramesToPop() const {
610 return instrumentation_frames_to_pop_;
611 }
612
613 private:
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -0700614 const size_t frame_depth_;
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100615 size_t instrumentation_frames_to_pop_;
616
617 DISALLOW_COPY_AND_ASSIGN(InstrumentationStackVisitor);
618};
619
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700620uintptr_t QuickExceptionHandler::UpdateInstrumentationStack() {
621 uintptr_t return_pc = 0;
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100622 if (method_tracing_active_) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700623 InstrumentationStackVisitor visitor(self_, handler_frame_depth_);
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100624 visitor.WalkStack(true);
625
626 size_t instrumentation_frames_to_pop = visitor.GetInstrumentationFramesToPop();
627 instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
628 for (size_t i = 0; i < instrumentation_frames_to_pop; ++i) {
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700629 return_pc = instrumentation->PopMethodForUnwind(self_, is_deoptimization_);
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100630 }
631 }
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700632 return return_pc;
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100633}
634
Andreas Gampe639bdd12015-06-03 11:22:45 -0700635void QuickExceptionHandler::DoLongJump(bool smash_caller_saves) {
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100636 // Place context back on thread so it will be available when we continue.
637 self_->ReleaseLongJumpContext(context_);
638 context_->SetSP(reinterpret_cast<uintptr_t>(handler_quick_frame_));
639 CHECK_NE(handler_quick_frame_pc_, 0u);
640 context_->SetPC(handler_quick_frame_pc_);
Andreas Gampe639bdd12015-06-03 11:22:45 -0700641 context_->SetArg0(handler_quick_arg0_);
642 if (smash_caller_saves) {
643 context_->SmashCallerSaves();
644 }
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100645 context_->DoLongJump();
Andreas Gampe794ad762015-02-23 08:12:24 -0800646 UNREACHABLE();
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100647}
648
Andreas Gampe639bdd12015-06-03 11:22:45 -0700649// Prints out methods with their type of frame.
650class DumpFramesWithTypeStackVisitor FINAL : public StackVisitor {
651 public:
Chih-Hung Hsieh471118e2016-04-29 14:27:41 -0700652 explicit DumpFramesWithTypeStackVisitor(Thread* self, bool show_details = false)
Andreas Gampe639bdd12015-06-03 11:22:45 -0700653 SHARED_REQUIRES(Locks::mutator_lock_)
654 : StackVisitor(self, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
655 show_details_(show_details) {}
656
657 bool VisitFrame() OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
658 ArtMethod* method = GetMethod();
659 if (show_details_) {
660 LOG(INFO) << "|> pc = " << std::hex << GetCurrentQuickFramePc();
661 LOG(INFO) << "|> addr = " << std::hex << reinterpret_cast<uintptr_t>(GetCurrentQuickFrame());
662 if (GetCurrentQuickFrame() != nullptr && method != nullptr) {
663 LOG(INFO) << "|> ret = " << std::hex << GetReturnPc();
664 }
665 }
666 if (method == nullptr) {
667 // Transition, do go on, we want to unwind over bridges, all the way.
668 if (show_details_) {
669 LOG(INFO) << "N <transition>";
670 }
671 return true;
672 } else if (method->IsRuntimeMethod()) {
673 if (show_details_) {
674 LOG(INFO) << "R " << PrettyMethod(method, true);
675 }
676 return true;
677 } else {
678 bool is_shadow = GetCurrentShadowFrame() != nullptr;
679 LOG(INFO) << (is_shadow ? "S" : "Q")
680 << ((!is_shadow && IsInInlinedFrame()) ? "i" : " ")
681 << " "
682 << PrettyMethod(method, true);
683 return true; // Go on.
684 }
685 }
686
687 private:
688 bool show_details_;
689
690 DISALLOW_COPY_AND_ASSIGN(DumpFramesWithTypeStackVisitor);
691};
692
693void QuickExceptionHandler::DumpFramesWithType(Thread* self, bool details) {
694 DumpFramesWithTypeStackVisitor visitor(self, details);
695 visitor.WalkStack(true);
696}
697
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100698} // namespace art