blob: 5c13e13f9081f88eaa6acc72091809dcef444fc9 [file] [log] [blame]
Sebastien Hertzd45a1f52014-01-09 14:56:54 +01001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Sebastien Hertzfd3077e2014-04-23 10:32:43 +020017#include "quick_exception_handler.h"
18
Ian Rogerse63db272014-07-15 15:36:11 -070019#include "arch/context.h"
Mathieu Chartiere401d142015-04-22 13:56:20 -070020#include "art_method-inl.h"
Ian Rogers5cf98192014-05-29 21:31:50 -070021#include "dex_instruction.h"
Sebastien Hertzfd3077e2014-04-23 10:32:43 +020022#include "entrypoints/entrypoint_utils.h"
Ian Rogers6f3dbba2014-10-14 17:41:57 -070023#include "entrypoints/runtime_asm_entrypoints.h"
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070024#include "handle_scope-inl.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -070025#include "mirror/class-inl.h"
26#include "mirror/class_loader.h"
27#include "mirror/throwable.h"
Ian Rogers5cf98192014-05-29 21:31:50 -070028#include "verifier/method_verifier.h"
Sebastien Hertzd45a1f52014-01-09 14:56:54 +010029
30namespace art {
31
Ian Rogers5cf98192014-05-29 21:31:50 -070032static constexpr bool kDebugExceptionDelivery = false;
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -070033static constexpr size_t kInvalidFrameDepth = 0xffffffff;
Ian Rogers5cf98192014-05-29 21:31:50 -070034
Sebastien Hertzfd3077e2014-04-23 10:32:43 +020035QuickExceptionHandler::QuickExceptionHandler(Thread* self, bool is_deoptimization)
36 : self_(self), context_(self->GetLongJumpContext()), is_deoptimization_(is_deoptimization),
Sebastien Hertzd45a1f52014-01-09 14:56:54 +010037 method_tracing_active_(is_deoptimization ||
38 Runtime::Current()->GetInstrumentation()->AreExitStubsInstalled()),
Ian Rogers5cf98192014-05-29 21:31:50 -070039 handler_quick_frame_(nullptr), handler_quick_frame_pc_(0), handler_method_(nullptr),
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -070040 handler_dex_pc_(0), clear_exception_(false), handler_frame_depth_(kInvalidFrameDepth) {
Sebastien Hertzd45a1f52014-01-09 14:56:54 +010041}
42
Sebastien Hertz520633b2015-09-08 17:03:36 +020043// Finds catch handler.
Ian Rogers5cf98192014-05-29 21:31:50 -070044class CatchBlockStackVisitor FINAL : public StackVisitor {
45 public:
46 CatchBlockStackVisitor(Thread* self, Context* context, Handle<mirror::Throwable>* exception,
47 QuickExceptionHandler* exception_handler)
Mathieu Chartier90443472015-07-16 20:32:27 -070048 SHARED_REQUIRES(Locks::mutator_lock_)
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +010049 : StackVisitor(self, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +010050 exception_(exception),
Ian Rogers5cf98192014-05-29 21:31:50 -070051 exception_handler_(exception_handler) {
52 }
53
Mathieu Chartier90443472015-07-16 20:32:27 -070054 bool VisitFrame() OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -070055 ArtMethod* method = GetMethod();
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -070056 exception_handler_->SetHandlerFrameDepth(GetFrameDepth());
Ian Rogers5cf98192014-05-29 21:31:50 -070057 if (method == nullptr) {
58 // This is the upcall, we remember the frame and last pc so that we may long jump to them.
59 exception_handler_->SetHandlerQuickFramePc(GetCurrentQuickFramePc());
60 exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
61 uint32_t next_dex_pc;
Mathieu Chartiere401d142015-04-22 13:56:20 -070062 ArtMethod* next_art_method;
Ian Rogers5cf98192014-05-29 21:31:50 -070063 bool has_next = GetNextMethodAndDexPc(&next_art_method, &next_dex_pc);
64 // Report the method that did the down call as the handler.
65 exception_handler_->SetHandlerDexPc(next_dex_pc);
66 exception_handler_->SetHandlerMethod(next_art_method);
67 if (!has_next) {
68 // No next method? Check exception handler is set up for the unhandled exception handler
69 // case.
70 DCHECK_EQ(0U, exception_handler_->GetHandlerDexPc());
71 DCHECK(nullptr == exception_handler_->GetHandlerMethod());
72 }
73 return false; // End stack walk.
74 }
75 if (method->IsRuntimeMethod()) {
76 // Ignore callee save method.
77 DCHECK(method->IsCalleeSaveMethod());
78 return true;
79 }
Mathieu Chartiere401d142015-04-22 13:56:20 -070080 return HandleTryItems(method);
Ian Rogers5cf98192014-05-29 21:31:50 -070081 }
82
83 private:
Mathieu Chartiere401d142015-04-22 13:56:20 -070084 bool HandleTryItems(ArtMethod* method)
Mathieu Chartier90443472015-07-16 20:32:27 -070085 SHARED_REQUIRES(Locks::mutator_lock_) {
Ian Rogers5cf98192014-05-29 21:31:50 -070086 uint32_t dex_pc = DexFile::kDexNoIndex;
87 if (!method->IsNative()) {
88 dex_pc = GetDexPc();
89 }
90 if (dex_pc != DexFile::kDexNoIndex) {
91 bool clear_exception = false;
Sebastien Hertz26f72862015-09-15 09:52:07 +020092 StackHandleScope<1> hs(GetThread());
Ian Rogers5cf98192014-05-29 21:31:50 -070093 Handle<mirror::Class> to_find(hs.NewHandle((*exception_)->GetClass()));
Mathieu Chartiere401d142015-04-22 13:56:20 -070094 uint32_t found_dex_pc = method->FindCatchBlock(to_find, dex_pc, &clear_exception);
Ian Rogers5cf98192014-05-29 21:31:50 -070095 exception_handler_->SetClearException(clear_exception);
96 if (found_dex_pc != DexFile::kDexNoIndex) {
Mathieu Chartiere401d142015-04-22 13:56:20 -070097 exception_handler_->SetHandlerMethod(method);
Ian Rogers5cf98192014-05-29 21:31:50 -070098 exception_handler_->SetHandlerDexPc(found_dex_pc);
David Brazdil72f7b882015-09-15 17:00:52 +010099 exception_handler_->SetHandlerQuickFramePc(
100 method->ToNativeQuickPc(found_dex_pc, /* is_catch_handler */ true));
Ian Rogers5cf98192014-05-29 21:31:50 -0700101 exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
102 return false; // End stack walk.
Mingyao Yang99170c62015-07-06 11:10:37 -0700103 } else if (UNLIKELY(GetThread()->HasDebuggerShadowFrames())) {
104 // We are going to unwind this frame. Did we prepare a shadow frame for debugging?
105 size_t frame_id = GetFrameId();
106 ShadowFrame* frame = GetThread()->FindDebuggerShadowFrame(frame_id);
107 if (frame != nullptr) {
108 // We will not execute this shadow frame so we can safely deallocate it.
109 GetThread()->RemoveDebuggerShadowFrameMapping(frame_id);
110 ShadowFrame::DeleteDeoptimizedFrame(frame);
111 }
Ian Rogers5cf98192014-05-29 21:31:50 -0700112 }
113 }
114 return true; // Continue stack walk.
115 }
116
Ian Rogers5cf98192014-05-29 21:31:50 -0700117 // The exception we're looking for the catch block of.
118 Handle<mirror::Throwable>* exception_;
119 // The quick exception handler we're visiting for.
120 QuickExceptionHandler* const exception_handler_;
121
122 DISALLOW_COPY_AND_ASSIGN(CatchBlockStackVisitor);
123};
124
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000125void QuickExceptionHandler::FindCatch(mirror::Throwable* exception) {
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200126 DCHECK(!is_deoptimization_);
Ian Rogers5cf98192014-05-29 21:31:50 -0700127 if (kDebugExceptionDelivery) {
128 mirror::String* msg = exception->GetDetailMessage();
129 std::string str_msg(msg != nullptr ? msg->ToModifiedUtf8() : "");
130 self_->DumpStack(LOG(INFO) << "Delivering exception: " << PrettyTypeOf(exception)
131 << ": " << str_msg << "\n");
132 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700133 StackHandleScope<1> hs(self_);
134 Handle<mirror::Throwable> exception_ref(hs.NewHandle(exception));
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200135
Sebastien Hertz520633b2015-09-08 17:03:36 +0200136 // Walk the stack to find catch handler.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700137 CatchBlockStackVisitor visitor(self_, context_, &exception_ref, this);
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100138 visitor.WalkStack(true);
139
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200140 if (kDebugExceptionDelivery) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700141 if (*handler_quick_frame_ == nullptr) {
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100142 LOG(INFO) << "Handler is upcall";
Ian Rogers5cf98192014-05-29 21:31:50 -0700143 }
144 if (handler_method_ != nullptr) {
145 const DexFile& dex_file = *handler_method_->GetDeclaringClass()->GetDexCache()->GetDexFile();
146 int line_number = dex_file.GetLineNumFromPC(handler_method_, handler_dex_pc_);
147 LOG(INFO) << "Handler: " << PrettyMethod(handler_method_) << " (line: " << line_number << ")";
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100148 }
149 }
150 if (clear_exception_) {
151 // Exception was cleared as part of delivery.
152 DCHECK(!self_->IsExceptionPending());
153 } else {
154 // Put exception back in root set with clear throw location.
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000155 self_->SetException(exception_ref.Get());
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100156 }
David Brazdil77a48ae2015-09-15 12:34:04 +0000157 // If the handler is in optimized code, we need to set the catch environment.
158 if (*handler_quick_frame_ != nullptr &&
159 handler_method_ != nullptr &&
160 handler_method_->IsOptimized(sizeof(void*))) {
161 SetCatchEnvironmentForOptimizedHandler(&visitor);
162 }
163}
164
165static VRegKind ToVRegKind(DexRegisterLocation::Kind kind) {
166 // Slightly hacky since we cannot map DexRegisterLocationKind and VRegKind
167 // one to one. However, StackVisitor::GetVRegFromOptimizedCode only needs to
168 // distinguish between core/FPU registers and low/high bits on 64-bit.
169 switch (kind) {
170 case DexRegisterLocation::Kind::kConstant:
171 case DexRegisterLocation::Kind::kInStack:
172 // VRegKind is ignored.
173 return VRegKind::kUndefined;
174
175 case DexRegisterLocation::Kind::kInRegister:
176 // Selects core register. For 64-bit registers, selects low 32 bits.
177 return VRegKind::kLongLoVReg;
178
179 case DexRegisterLocation::Kind::kInRegisterHigh:
180 // Selects core register. For 64-bit registers, selects high 32 bits.
181 return VRegKind::kLongHiVReg;
182
183 case DexRegisterLocation::Kind::kInFpuRegister:
184 // Selects FPU register. For 64-bit registers, selects low 32 bits.
185 return VRegKind::kDoubleLoVReg;
186
187 case DexRegisterLocation::Kind::kInFpuRegisterHigh:
188 // Selects FPU register. For 64-bit registers, selects high 32 bits.
189 return VRegKind::kDoubleHiVReg;
190
191 default:
192 LOG(FATAL) << "Unexpected vreg location "
193 << DexRegisterLocation::PrettyDescriptor(kind);
194 UNREACHABLE();
195 }
196}
197
198void QuickExceptionHandler::SetCatchEnvironmentForOptimizedHandler(StackVisitor* stack_visitor) {
199 DCHECK(!is_deoptimization_);
200 DCHECK(*handler_quick_frame_ != nullptr) << "Method should not be called on upcall exceptions";
201 DCHECK(handler_method_ != nullptr && handler_method_->IsOptimized(sizeof(void*)));
202
203 if (kDebugExceptionDelivery) {
204 self_->DumpStack(LOG(INFO) << "Setting catch phis: ");
205 }
206
207 const size_t number_of_vregs = handler_method_->GetCodeItem()->registers_size_;
208 CodeInfo code_info = handler_method_->GetOptimizedCodeInfo();
209 StackMapEncoding encoding = code_info.ExtractEncoding();
210
211 // Find stack map of the throwing instruction.
212 StackMap throw_stack_map =
213 code_info.GetStackMapForNativePcOffset(stack_visitor->GetNativePcOffset(), encoding);
214 DCHECK(throw_stack_map.IsValid());
215 DexRegisterMap throw_vreg_map =
216 code_info.GetDexRegisterMapOf(throw_stack_map, encoding, number_of_vregs);
217
218 // Find stack map of the catch block.
219 StackMap catch_stack_map = code_info.GetCatchStackMapForDexPc(GetHandlerDexPc(), encoding);
220 DCHECK(catch_stack_map.IsValid());
221 DexRegisterMap catch_vreg_map =
222 code_info.GetDexRegisterMapOf(catch_stack_map, encoding, number_of_vregs);
223
224 // Copy values between them.
225 for (uint16_t vreg = 0; vreg < number_of_vregs; ++vreg) {
226 DexRegisterLocation::Kind catch_location =
227 catch_vreg_map.GetLocationKind(vreg, number_of_vregs, code_info, encoding);
228 if (catch_location == DexRegisterLocation::Kind::kNone) {
229 continue;
230 }
231 DCHECK(catch_location == DexRegisterLocation::Kind::kInStack);
232
233 // Get vreg value from its current location.
234 uint32_t vreg_value;
235 VRegKind vreg_kind = ToVRegKind(throw_vreg_map.GetLocationKind(vreg,
236 number_of_vregs,
237 code_info,
238 encoding));
239 bool get_vreg_success = stack_visitor->GetVReg(stack_visitor->GetMethod(),
240 vreg,
241 vreg_kind,
242 &vreg_value);
243 CHECK(get_vreg_success) << "VReg " << vreg << " was optimized out ("
244 << "method=" << PrettyMethod(stack_visitor->GetMethod()) << ", "
245 << "dex_pc=" << stack_visitor->GetDexPc() << ", "
246 << "native_pc_offset=" << stack_visitor->GetNativePcOffset() << ")";
247
248 // Copy value to the catch phi's stack slot.
249 int32_t slot_offset = catch_vreg_map.GetStackOffsetInBytes(vreg,
250 number_of_vregs,
251 code_info,
252 encoding);
253 ArtMethod** frame_top = stack_visitor->GetCurrentQuickFrame();
254 uint8_t* slot_address = reinterpret_cast<uint8_t*>(frame_top) + slot_offset;
255 uint32_t* slot_ptr = reinterpret_cast<uint32_t*>(slot_address);
256 *slot_ptr = vreg_value;
257 }
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200258}
259
Ian Rogers5cf98192014-05-29 21:31:50 -0700260// Prepares deoptimization.
261class DeoptimizeStackVisitor FINAL : public StackVisitor {
262 public:
263 DeoptimizeStackVisitor(Thread* self, Context* context, QuickExceptionHandler* exception_handler)
Mathieu Chartier90443472015-07-16 20:32:27 -0700264 SHARED_REQUIRES(Locks::mutator_lock_)
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100265 : StackVisitor(self, context, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100266 exception_handler_(exception_handler),
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700267 prev_shadow_frame_(nullptr),
268 stacked_shadow_frame_pushed_(false) {
Ian Rogers5cf98192014-05-29 21:31:50 -0700269 }
270
Mathieu Chartier90443472015-07-16 20:32:27 -0700271 bool VisitFrame() OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -0700272 exception_handler_->SetHandlerFrameDepth(GetFrameDepth());
Mathieu Chartiere401d142015-04-22 13:56:20 -0700273 ArtMethod* method = GetMethod();
Ian Rogers5cf98192014-05-29 21:31:50 -0700274 if (method == nullptr) {
275 // This is the upcall, we remember the frame and last pc so that we may long jump to them.
276 exception_handler_->SetHandlerQuickFramePc(GetCurrentQuickFramePc());
277 exception_handler_->SetHandlerQuickFrame(GetCurrentQuickFrame());
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700278 if (!stacked_shadow_frame_pushed_) {
279 // In case there is no deoptimized shadow frame for this upcall, we still
280 // need to push a nullptr to the stack since there is always a matching pop after
281 // the long jump.
Sebastien Hertz26f72862015-09-15 09:52:07 +0200282 GetThread()->PushStackedShadowFrame(nullptr,
283 StackedShadowFrameType::kDeoptimizationShadowFrame);
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700284 stacked_shadow_frame_pushed_ = true;
285 }
Ian Rogers5cf98192014-05-29 21:31:50 -0700286 return false; // End stack walk.
287 } else if (method->IsRuntimeMethod()) {
288 // Ignore callee save method.
289 DCHECK(method->IsCalleeSaveMethod());
290 return true;
Sebastien Hertz520633b2015-09-08 17:03:36 +0200291 } else if (method->IsNative()) {
292 // If we return from JNI with a pending exception and want to deoptimize, we need to skip
293 // the native method.
294 // The top method is a runtime method, the native method comes next.
295 CHECK_EQ(GetFrameDepth(), 1U);
296 return true;
Ian Rogers5cf98192014-05-29 21:31:50 -0700297 } else {
298 return HandleDeoptimization(method);
299 }
300 }
301
302 private:
Sebastien Hertzc901dd72014-07-16 11:56:07 +0200303 static VRegKind GetVRegKind(uint16_t reg, const std::vector<int32_t>& kinds) {
304 return static_cast<VRegKind>(kinds.at(reg * 2));
305 }
306
Mathieu Chartier90443472015-07-16 20:32:27 -0700307 bool HandleDeoptimization(ArtMethod* m) SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700308 const DexFile::CodeItem* code_item = m->GetCodeItem();
Sebastien Hertz520633b2015-09-08 17:03:36 +0200309 CHECK(code_item != nullptr) << "No code item for " << PrettyMethod(m);
Ian Rogers5cf98192014-05-29 21:31:50 -0700310 uint16_t num_regs = code_item->registers_size_;
311 uint32_t dex_pc = GetDexPc();
Sebastien Hertz26f72862015-09-15 09:52:07 +0200312 StackHandleScope<2> hs(GetThread()); // Dex cache, class loader and method.
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700313 mirror::Class* declaring_class = m->GetDeclaringClass();
314 Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(declaring_class->GetDexCache()));
315 Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(declaring_class->GetClassLoader()));
Sebastien Hertz26f72862015-09-15 09:52:07 +0200316 verifier::MethodVerifier verifier(GetThread(), h_dex_cache->GetDexFile(), h_dex_cache,
317 h_class_loader, &m->GetClassDef(), code_item,
318 m->GetDexMethodIndex(), m, m->GetAccessFlags(), true, true,
319 true, true);
Andreas Gampe2e04bb22015-02-10 15:37:27 -0800320 bool verifier_success = verifier.Verify();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700321 CHECK(verifier_success) << PrettyMethod(m);
Mingyao Yang99170c62015-07-06 11:10:37 -0700322 // Check if a shadow frame already exists for debugger's set-local-value purpose.
323 const size_t frame_id = GetFrameId();
324 ShadowFrame* new_frame = GetThread()->FindDebuggerShadowFrame(frame_id);
325 const bool* updated_vregs;
326 if (new_frame == nullptr) {
327 new_frame = ShadowFrame::CreateDeoptimizedFrame(num_regs, nullptr, m, dex_pc);
328 updated_vregs = nullptr;
329 } else {
330 updated_vregs = GetThread()->GetUpdatedVRegFlags(frame_id);
331 DCHECK(updated_vregs != nullptr);
332 }
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700333 {
Sebastien Hertz26f72862015-09-15 09:52:07 +0200334 ScopedStackedShadowFramePusher pusher(GetThread(), new_frame,
Sebastien Hertzf7958692015-06-09 14:09:14 +0200335 StackedShadowFrameType::kShadowFrameUnderConstruction);
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700336 const std::vector<int32_t> kinds(verifier.DescribeVRegs(dex_pc));
Nicolas Geoffray15b9d522015-03-12 15:05:13 +0000337
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700338 // Markers for dead values, used when the verifier knows a Dex register is undefined,
339 // or when the compiler knows the register has not been initialized, or is not used
340 // anymore in the method.
341 static constexpr uint32_t kDeadValue = 0xEBADDE09;
342 static constexpr uint64_t kLongDeadValue = 0xEBADDE09EBADDE09;
343 for (uint16_t reg = 0; reg < num_regs; ++reg) {
Mingyao Yang99170c62015-07-06 11:10:37 -0700344 if (updated_vregs != nullptr && updated_vregs[reg]) {
345 // Keep the value set by debugger.
346 continue;
347 }
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700348 VRegKind kind = GetVRegKind(reg, kinds);
349 switch (kind) {
350 case kUndefined:
Nicolas Geoffray15b9d522015-03-12 15:05:13 +0000351 new_frame->SetVReg(reg, kDeadValue);
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700352 break;
353 case kConstant:
354 new_frame->SetVReg(reg, kinds.at((reg * 2) + 1));
355 break;
356 case kReferenceVReg: {
357 uint32_t value = 0;
358 // Check IsReferenceVReg in case the compiled GC map doesn't agree with the verifier.
359 // We don't want to copy a stale reference into the shadow frame as a reference.
360 // b/20736048
361 if (GetVReg(m, reg, kind, &value) && IsReferenceVReg(m, reg)) {
362 new_frame->SetVRegReference(reg, reinterpret_cast<mirror::Object*>(value));
363 } else {
364 new_frame->SetVReg(reg, kDeadValue);
365 }
366 break;
Nicolas Geoffray15b9d522015-03-12 15:05:13 +0000367 }
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700368 case kLongLoVReg:
369 if (GetVRegKind(reg + 1, kinds) == kLongHiVReg) {
370 // Treat it as a "long" register pair.
371 uint64_t value = 0;
372 if (GetVRegPair(m, reg, kLongLoVReg, kLongHiVReg, &value)) {
373 new_frame->SetVRegLong(reg, value);
374 } else {
375 new_frame->SetVRegLong(reg, kLongDeadValue);
376 }
377 } else {
378 uint32_t value = 0;
379 if (GetVReg(m, reg, kind, &value)) {
380 new_frame->SetVReg(reg, value);
381 } else {
382 new_frame->SetVReg(reg, kDeadValue);
383 }
384 }
385 break;
386 case kLongHiVReg:
387 if (GetVRegKind(reg - 1, kinds) == kLongLoVReg) {
388 // Nothing to do: we treated it as a "long" register pair.
389 } else {
390 uint32_t value = 0;
391 if (GetVReg(m, reg, kind, &value)) {
392 new_frame->SetVReg(reg, value);
393 } else {
394 new_frame->SetVReg(reg, kDeadValue);
395 }
396 }
397 break;
398 case kDoubleLoVReg:
399 if (GetVRegKind(reg + 1, kinds) == kDoubleHiVReg) {
400 uint64_t value = 0;
401 if (GetVRegPair(m, reg, kDoubleLoVReg, kDoubleHiVReg, &value)) {
402 // Treat it as a "double" register pair.
403 new_frame->SetVRegLong(reg, value);
404 } else {
405 new_frame->SetVRegLong(reg, kLongDeadValue);
406 }
407 } else {
408 uint32_t value = 0;
409 if (GetVReg(m, reg, kind, &value)) {
410 new_frame->SetVReg(reg, value);
411 } else {
412 new_frame->SetVReg(reg, kDeadValue);
413 }
414 }
415 break;
416 case kDoubleHiVReg:
417 if (GetVRegKind(reg - 1, kinds) == kDoubleLoVReg) {
418 // Nothing to do: we treated it as a "double" register pair.
419 } else {
420 uint32_t value = 0;
421 if (GetVReg(m, reg, kind, &value)) {
422 new_frame->SetVReg(reg, value);
423 } else {
424 new_frame->SetVReg(reg, kDeadValue);
425 }
426 }
427 break;
428 default:
429 uint32_t value = 0;
430 if (GetVReg(m, reg, kind, &value)) {
431 new_frame->SetVReg(reg, value);
432 } else {
433 new_frame->SetVReg(reg, kDeadValue);
434 }
435 break;
Nicolas Geoffray15b9d522015-03-12 15:05:13 +0000436 }
Ian Rogers5cf98192014-05-29 21:31:50 -0700437 }
438 }
Mingyao Yang99170c62015-07-06 11:10:37 -0700439 if (updated_vregs != nullptr) {
440 // Calling Thread::RemoveDebuggerShadowFrameMapping will also delete the updated_vregs
441 // array so this must come after we processed the frame.
442 GetThread()->RemoveDebuggerShadowFrameMapping(frame_id);
443 DCHECK(GetThread()->FindDebuggerShadowFrame(frame_id) == nullptr);
444 }
Ian Rogers5cf98192014-05-29 21:31:50 -0700445 if (prev_shadow_frame_ != nullptr) {
446 prev_shadow_frame_->SetLink(new_frame);
447 } else {
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700448 // Will be popped after the long jump after DeoptimizeStack(),
449 // right before interpreter::EnterInterpreterFromDeoptimize().
450 stacked_shadow_frame_pushed_ = true;
Sebastien Hertz26f72862015-09-15 09:52:07 +0200451 GetThread()->PushStackedShadowFrame(new_frame,
452 StackedShadowFrameType::kDeoptimizationShadowFrame);
Ian Rogers5cf98192014-05-29 21:31:50 -0700453 }
454 prev_shadow_frame_ = new_frame;
455 return true;
456 }
457
Ian Rogers5cf98192014-05-29 21:31:50 -0700458 QuickExceptionHandler* const exception_handler_;
459 ShadowFrame* prev_shadow_frame_;
Mingyao Yang1f2d3ba2015-05-18 12:12:50 -0700460 bool stacked_shadow_frame_pushed_;
Ian Rogers5cf98192014-05-29 21:31:50 -0700461
462 DISALLOW_COPY_AND_ASSIGN(DeoptimizeStackVisitor);
463};
464
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200465void QuickExceptionHandler::DeoptimizeStack() {
466 DCHECK(is_deoptimization_);
Ian Rogers5cf98192014-05-29 21:31:50 -0700467 if (kDebugExceptionDelivery) {
468 self_->DumpStack(LOG(INFO) << "Deoptimizing: ");
469 }
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200470
471 DeoptimizeStackVisitor visitor(self_, context_, this);
472 visitor.WalkStack(true);
473
474 // Restore deoptimization exception
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000475 self_->SetException(Thread::GetDeoptimizationException());
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100476}
477
478// Unwinds all instrumentation stack frame prior to catch handler or upcall.
479class InstrumentationStackVisitor : public StackVisitor {
480 public:
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700481 InstrumentationStackVisitor(Thread* self, size_t frame_depth)
Mathieu Chartier90443472015-07-16 20:32:27 -0700482 SHARED_REQUIRES(Locks::mutator_lock_)
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100483 : StackVisitor(self, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames),
Ian Rogerscf7f1912014-10-22 22:06:39 -0700484 frame_depth_(frame_depth),
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100485 instrumentation_frames_to_pop_(0) {
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -0700486 CHECK_NE(frame_depth_, kInvalidFrameDepth);
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100487 }
488
Mathieu Chartier90443472015-07-16 20:32:27 -0700489 bool VisitFrame() SHARED_REQUIRES(Locks::mutator_lock_) {
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -0700490 size_t current_frame_depth = GetFrameDepth();
491 if (current_frame_depth < frame_depth_) {
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100492 CHECK(GetMethod() != nullptr);
Ian Rogers6f3dbba2014-10-14 17:41:57 -0700493 if (UNLIKELY(reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc()) == GetReturnPc())) {
Nicolas Geoffray8e5bd182015-05-06 11:34:34 +0100494 if (!IsInInlinedFrame()) {
495 // We do not count inlined frames, because we do not instrument them. The reason we
496 // include them in the stack walking is the check against `frame_depth_`, which is
497 // given to us by a visitor that visits inlined frames.
498 ++instrumentation_frames_to_pop_;
499 }
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100500 }
501 return true;
502 } else {
503 // We reached the frame of the catch handler or the upcall.
504 return false;
505 }
506 }
507
508 size_t GetInstrumentationFramesToPop() const {
509 return instrumentation_frames_to_pop_;
510 }
511
512 private:
Hiroshi Yamauchi649278c2014-08-13 11:12:22 -0700513 const size_t frame_depth_;
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100514 size_t instrumentation_frames_to_pop_;
515
516 DISALLOW_COPY_AND_ASSIGN(InstrumentationStackVisitor);
517};
518
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200519void QuickExceptionHandler::UpdateInstrumentationStack() {
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100520 if (method_tracing_active_) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700521 InstrumentationStackVisitor visitor(self_, handler_frame_depth_);
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100522 visitor.WalkStack(true);
523
524 size_t instrumentation_frames_to_pop = visitor.GetInstrumentationFramesToPop();
525 instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
526 for (size_t i = 0; i < instrumentation_frames_to_pop; ++i) {
527 instrumentation->PopMethodForUnwind(self_, is_deoptimization_);
528 }
529 }
530}
531
Sebastien Hertzfd3077e2014-04-23 10:32:43 +0200532void QuickExceptionHandler::DoLongJump() {
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100533 // Place context back on thread so it will be available when we continue.
534 self_->ReleaseLongJumpContext(context_);
535 context_->SetSP(reinterpret_cast<uintptr_t>(handler_quick_frame_));
536 CHECK_NE(handler_quick_frame_pc_, 0u);
537 context_->SetPC(handler_quick_frame_pc_);
538 context_->SmashCallerSaves();
539 context_->DoLongJump();
Andreas Gampe794ad762015-02-23 08:12:24 -0800540 UNREACHABLE();
Sebastien Hertzd45a1f52014-01-09 14:56:54 +0100541}
542
543} // namespace art