blob: e83bcd812b5642a5981f4e607a2458fdfd6f09bf [file] [log] [blame]
Ian Rogers848871b2013-08-05 10:56:33 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Mathieu Chartiere401d142015-04-22 13:56:20 -070017#include "art_method-inl.h"
Andreas Gampe8228cdf2017-05-30 15:03:54 -070018#include "base/callee_save_type.h"
Andreas Gampe542451c2016-07-26 09:02:02 -070019#include "base/enums.h"
Ian Rogers848871b2013-08-05 10:56:33 -070020#include "callee_save_frame.h"
Dragos Sbirleabd136a22013-08-13 18:07:04 -070021#include "common_throws.h"
Vladimir Marko606adb32018-04-05 14:49:24 +010022#include "debug_print.h"
Andreas Gampe513061a2017-06-01 09:17:34 -070023#include "debugger.h"
David Sehr9e734c72018-01-04 17:56:19 -080024#include "dex/dex_file-inl.h"
25#include "dex/dex_file_types.h"
26#include "dex/dex_instruction-inl.h"
David Sehr312f3b22018-03-19 08:39:26 -070027#include "dex/method_reference.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -070028#include "entrypoints/entrypoint_utils-inl.h"
Ian Rogers6f3dbba2014-10-14 17:41:57 -070029#include "entrypoints/runtime_asm_entrypoints.h"
Ian Rogers83883d72013-10-21 21:07:24 -070030#include "gc/accounting/card_table-inl.h"
Andreas Gampe75a7db62016-09-26 12:04:26 -070031#include "imt_conflict_table.h"
32#include "imtable-inl.h"
Vladimir Markof3c52b42017-11-17 17:32:12 +000033#include "index_bss_mapping.h"
Alex Lightb7edcda2017-04-27 13:20:31 -070034#include "instrumentation.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070035#include "interpreter/interpreter.h"
Vladimir Marko2196c652017-11-30 16:16:07 +000036#include "jit/jit.h"
Nicolas Geoffray796d6302016-03-13 22:22:31 +000037#include "linear_alloc.h"
Orion Hodsonac141392017-01-13 11:53:47 +000038#include "method_handles.h"
Ian Rogers848871b2013-08-05 10:56:33 -070039#include "mirror/class-inl.h"
Mathieu Chartier5f3ded42014-04-03 15:25:30 -070040#include "mirror/dex_cache-inl.h"
Mathieu Chartierfc58af42015-04-16 18:00:39 -070041#include "mirror/method.h"
Orion Hodsonac141392017-01-13 11:53:47 +000042#include "mirror/method_handle_impl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070043#include "mirror/object-inl.h"
44#include "mirror/object_array-inl.h"
Orion Hodson537a4fe2018-05-15 13:57:58 +010045#include "mirror/var_handle.h"
Vladimir Marko0eb882b2017-05-15 13:39:18 +010046#include "oat_file.h"
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010047#include "oat_quick_method_header.h"
Andreas Gampe639bdd12015-06-03 11:22:45 -070048#include "quick_exception_handler.h"
Ian Rogers848871b2013-08-05 10:56:33 -070049#include "runtime.h"
Mathieu Chartier0795f232016-09-27 18:43:30 -070050#include "scoped_thread_state_change-inl.h"
Andreas Gampeb3025922015-09-01 14:45:00 -070051#include "stack.h"
Andreas Gampe513061a2017-06-01 09:17:34 -070052#include "thread-inl.h"
Orion Hodson537a4fe2018-05-15 13:57:58 +010053#include "var_handles.h"
Orion Hodsonac141392017-01-13 11:53:47 +000054#include "well_known_classes.h"
Ian Rogers848871b2013-08-05 10:56:33 -070055
56namespace art {
57
Andreas Gampe8228cdf2017-05-30 15:03:54 -070058// Visits the arguments as saved to the stack by a CalleeSaveType::kRefAndArgs callee save frame.
Ian Rogers848871b2013-08-05 10:56:33 -070059class QuickArgumentVisitor {
Ian Rogers936b37f2014-02-14 00:52:24 -080060 // Number of bytes for each out register in the caller method's frame.
61 static constexpr size_t kBytesStackArgLocation = 4;
Alexei Zavjalov41c507a2014-05-15 16:02:46 +070062 // Frame size in bytes of a callee-save frame for RefsAndArgs.
63 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_FrameSize =
Andreas Gampe8228cdf2017-05-30 15:03:54 -070064 GetCalleeSaveFrameSize(kRuntimeISA, CalleeSaveType::kSaveRefsAndArgs);
Ian Rogers848871b2013-08-05 10:56:33 -070065#if defined(__arm__)
66 // The callee save frame is pointed to by SP.
67 // | argN | |
68 // | ... | |
69 // | arg4 | |
70 // | arg3 spill | | Caller's frame
71 // | arg2 spill | |
72 // | arg1 spill | |
73 // | Method* | ---
74 // | LR |
Zheng Xu5667fdb2014-10-23 18:29:55 +080075 // | ... | 4x6 bytes callee saves
76 // | R3 |
77 // | R2 |
78 // | R1 |
79 // | S15 |
80 // | : |
81 // | S0 |
82 // | | 4x2 bytes padding
Ian Rogers848871b2013-08-05 10:56:33 -070083 // | Method* | <- sp
Andreas Gampe217d6d32017-09-18 12:48:20 -070084 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
85 static constexpr bool kAlignPairRegister = true;
86 static constexpr bool kQuickSoftFloatAbi = false;
87 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = true;
Goran Jakovljevicff734982015-08-24 12:58:55 +000088 static constexpr bool kQuickSkipOddFpRegisters = false;
Zheng Xu5667fdb2014-10-23 18:29:55 +080089 static constexpr size_t kNumQuickGprArgs = 3;
Andreas Gampe217d6d32017-09-18 12:48:20 -070090 static constexpr size_t kNumQuickFprArgs = 16;
Andreas Gampe1a5c4062015-01-15 12:10:47 -080091 static constexpr bool kGprFprLockstep = false;
Zheng Xub551fdc2014-07-25 11:49:42 +080092 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset =
Andreas Gampe8228cdf2017-05-30 15:03:54 -070093 arm::ArmCalleeSaveFpr1Offset(CalleeSaveType::kSaveRefsAndArgs); // Offset of first FPR arg.
Zheng Xub551fdc2014-07-25 11:49:42 +080094 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset =
Andreas Gampe8228cdf2017-05-30 15:03:54 -070095 arm::ArmCalleeSaveGpr1Offset(CalleeSaveType::kSaveRefsAndArgs); // Offset of first GPR arg.
Zheng Xub551fdc2014-07-25 11:49:42 +080096 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset =
Andreas Gampe8228cdf2017-05-30 15:03:54 -070097 arm::ArmCalleeSaveLrOffset(CalleeSaveType::kSaveRefsAndArgs); // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -080098 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +000099 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -0800100 }
Stuart Monteithb95a5342014-03-12 13:32:32 +0000101#elif defined(__aarch64__)
102 // The callee save frame is pointed to by SP.
103 // | argN | |
104 // | ... | |
105 // | arg4 | |
106 // | arg3 spill | | Caller's frame
107 // | arg2 spill | |
108 // | arg1 spill | |
109 // | Method* | ---
110 // | LR |
Zheng Xub551fdc2014-07-25 11:49:42 +0800111 // | X29 |
Stuart Monteithb95a5342014-03-12 13:32:32 +0000112 // | : |
Serban Constantinescu9bd88b02015-04-22 16:24:46 +0100113 // | X20 |
Stuart Monteithb95a5342014-03-12 13:32:32 +0000114 // | X7 |
115 // | : |
116 // | X1 |
Zheng Xub551fdc2014-07-25 11:49:42 +0800117 // | D7 |
Stuart Monteithb95a5342014-03-12 13:32:32 +0000118 // | : |
119 // | D0 |
120 // | | padding
121 // | Method* | <- sp
Mark Mendell3e6a3bf2015-01-19 14:09:22 -0500122 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000123 static constexpr bool kAlignPairRegister = false;
Stuart Monteithb95a5342014-03-12 13:32:32 +0000124 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800125 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000126 static constexpr bool kQuickSkipOddFpRegisters = false;
Stuart Monteithb95a5342014-03-12 13:32:32 +0000127 static constexpr size_t kNumQuickGprArgs = 7; // 7 arguments passed in GPRs.
128 static constexpr size_t kNumQuickFprArgs = 8; // 8 arguments passed in FPRs.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800129 static constexpr bool kGprFprLockstep = false;
Andreas Gampe8228cdf2017-05-30 15:03:54 -0700130 // Offset of first FPR arg.
Zheng Xub551fdc2014-07-25 11:49:42 +0800131 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset =
Andreas Gampe8228cdf2017-05-30 15:03:54 -0700132 arm64::Arm64CalleeSaveFpr1Offset(CalleeSaveType::kSaveRefsAndArgs);
133 // Offset of first GPR arg.
Zheng Xub551fdc2014-07-25 11:49:42 +0800134 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset =
Andreas Gampe8228cdf2017-05-30 15:03:54 -0700135 arm64::Arm64CalleeSaveGpr1Offset(CalleeSaveType::kSaveRefsAndArgs);
136 // Offset of return address.
Zheng Xub551fdc2014-07-25 11:49:42 +0800137 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset =
Andreas Gampe8228cdf2017-05-30 15:03:54 -0700138 arm64::Arm64CalleeSaveLrOffset(CalleeSaveType::kSaveRefsAndArgs);
Stuart Monteithb95a5342014-03-12 13:32:32 +0000139 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000140 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Stuart Monteithb95a5342014-03-12 13:32:32 +0000141 }
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800142#elif defined(__mips__) && !defined(__LP64__)
Ian Rogers848871b2013-08-05 10:56:33 -0700143 // The callee save frame is pointed to by SP.
144 // | argN | |
145 // | ... | |
146 // | arg4 | |
147 // | arg3 spill | | Caller's frame
148 // | arg2 spill | |
149 // | arg1 spill | |
150 // | Method* | ---
151 // | RA |
152 // | ... | callee saves
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800153 // | T1 | arg5
154 // | T0 | arg4
Ian Rogers848871b2013-08-05 10:56:33 -0700155 // | A3 | arg3
156 // | A2 | arg2
157 // | A1 | arg1
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800158 // | F19 |
159 // | F18 | f_arg5
160 // | F17 |
161 // | F16 | f_arg4
Goran Jakovljevicff734982015-08-24 12:58:55 +0000162 // | F15 |
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800163 // | F14 | f_arg3
Goran Jakovljevicff734982015-08-24 12:58:55 +0000164 // | F13 |
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800165 // | F12 | f_arg2
166 // | F11 |
167 // | F10 | f_arg1
168 // | F9 |
169 // | F8 | f_arg0
Goran Jakovljevicff734982015-08-24 12:58:55 +0000170 // | | padding
Ian Rogers848871b2013-08-05 10:56:33 -0700171 // | A0/Method* | <- sp
Goran Jakovljevicff734982015-08-24 12:58:55 +0000172 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
173 static constexpr bool kAlignPairRegister = true;
174 static constexpr bool kQuickSoftFloatAbi = false;
Zheng Xu5667fdb2014-10-23 18:29:55 +0800175 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000176 static constexpr bool kQuickSkipOddFpRegisters = true;
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800177 static constexpr size_t kNumQuickGprArgs = 5; // 5 arguments passed in GPRs.
178 static constexpr size_t kNumQuickFprArgs = 12; // 6 arguments passed in FPRs. Floats can be
179 // passed only in even numbered registers and each
180 // double occupies two registers.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800181 static constexpr bool kGprFprLockstep = false;
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800182 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 8; // Offset of first FPR arg.
183 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 56; // Offset of first GPR arg.
184 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 108; // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -0800185 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000186 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -0800187 }
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800188#elif defined(__mips__) && defined(__LP64__)
189 // The callee save frame is pointed to by SP.
190 // | argN | |
191 // | ... | |
192 // | arg4 | |
193 // | arg3 spill | | Caller's frame
194 // | arg2 spill | |
195 // | arg1 spill | |
196 // | Method* | ---
197 // | RA |
198 // | ... | callee saves
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800199 // | A7 | arg7
200 // | A6 | arg6
201 // | A5 | arg5
202 // | A4 | arg4
203 // | A3 | arg3
204 // | A2 | arg2
205 // | A1 | arg1
Goran Jakovljevicff734982015-08-24 12:58:55 +0000206 // | F19 | f_arg7
207 // | F18 | f_arg6
208 // | F17 | f_arg5
209 // | F16 | f_arg4
210 // | F15 | f_arg3
211 // | F14 | f_arg2
212 // | F13 | f_arg1
213 // | F12 | f_arg0
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800214 // | | padding
215 // | A0/Method* | <- sp
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800216 // NOTE: for Mip64, when A0 is skipped, F12 is also skipped.
Douglas Leungd18e0832015-02-09 15:22:26 -0800217 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800218 static constexpr bool kAlignPairRegister = false;
219 static constexpr bool kQuickSoftFloatAbi = false;
220 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000221 static constexpr bool kQuickSkipOddFpRegisters = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800222 static constexpr size_t kNumQuickGprArgs = 7; // 7 arguments passed in GPRs.
223 static constexpr size_t kNumQuickFprArgs = 7; // 7 arguments passed in FPRs.
224 static constexpr bool kGprFprLockstep = true;
225
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800226 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 24; // Offset of first FPR arg (F13).
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800227 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 80; // Offset of first GPR arg (A1).
228 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 200; // Offset of return address.
229 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
230 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
231 }
Ian Rogers848871b2013-08-05 10:56:33 -0700232#elif defined(__i386__)
233 // The callee save frame is pointed to by SP.
234 // | argN | |
235 // | ... | |
236 // | arg4 | |
237 // | arg3 spill | | Caller's frame
238 // | arg2 spill | |
239 // | arg1 spill | |
240 // | Method* | ---
241 // | Return |
242 // | EBP,ESI,EDI | callee saves
243 // | EBX | arg3
244 // | EDX | arg2
245 // | ECX | arg1
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000246 // | XMM3 | float arg 4
247 // | XMM2 | float arg 3
248 // | XMM1 | float arg 2
249 // | XMM0 | float arg 1
Ian Rogers848871b2013-08-05 10:56:33 -0700250 // | EAX/Method* | <- sp
Mark Mendell3e6a3bf2015-01-19 14:09:22 -0500251 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000252 static constexpr bool kAlignPairRegister = false;
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000253 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800254 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000255 static constexpr bool kQuickSkipOddFpRegisters = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800256 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs.
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000257 static constexpr size_t kNumQuickFprArgs = 4; // 4 arguments passed in FPRs.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800258 static constexpr bool kGprFprLockstep = false;
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000259 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 4; // Offset of first FPR arg.
260 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 4 + 4*8; // Offset of first GPR arg.
261 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 28 + 4*8; // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -0800262 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000263 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -0800264 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800265#elif defined(__x86_64__)
Ian Rogers936b37f2014-02-14 00:52:24 -0800266 // The callee save frame is pointed to by SP.
267 // | argN | |
268 // | ... | |
269 // | reg. arg spills | | Caller's frame
270 // | Method* | ---
271 // | Return |
272 // | R15 | callee save
273 // | R14 | callee save
274 // | R13 | callee save
275 // | R12 | callee save
276 // | R9 | arg5
277 // | R8 | arg4
278 // | RSI/R6 | arg1
279 // | RBP/R5 | callee save
280 // | RBX/R3 | callee save
281 // | RDX/R2 | arg2
282 // | RCX/R1 | arg3
283 // | XMM7 | float arg 8
284 // | XMM6 | float arg 7
285 // | XMM5 | float arg 6
286 // | XMM4 | float arg 5
287 // | XMM3 | float arg 4
288 // | XMM2 | float arg 3
289 // | XMM1 | float arg 2
290 // | XMM0 | float arg 1
291 // | Padding |
292 // | RDI/Method* | <- sp
Mark Mendell3e6a3bf2015-01-19 14:09:22 -0500293 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000294 static constexpr bool kAlignPairRegister = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800295 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800296 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000297 static constexpr bool kQuickSkipOddFpRegisters = false;
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700298 static constexpr size_t kNumQuickGprArgs = 5; // 5 arguments passed in GPRs.
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700299 static constexpr size_t kNumQuickFprArgs = 8; // 8 arguments passed in FPRs.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800300 static constexpr bool kGprFprLockstep = false;
Ian Rogers936b37f2014-02-14 00:52:24 -0800301 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 16; // Offset of first FPR arg.
Serguei Katkovc3801912014-07-08 17:21:53 +0700302 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 80 + 4*8; // Offset of first GPR arg.
303 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 168 + 4*8; // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -0800304 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
305 switch (gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000306 case 0: return (4 * GetBytesPerGprSpillLocation(kRuntimeISA));
307 case 1: return (1 * GetBytesPerGprSpillLocation(kRuntimeISA));
308 case 2: return (0 * GetBytesPerGprSpillLocation(kRuntimeISA));
309 case 3: return (5 * GetBytesPerGprSpillLocation(kRuntimeISA));
310 case 4: return (6 * GetBytesPerGprSpillLocation(kRuntimeISA));
Ian Rogers936b37f2014-02-14 00:52:24 -0800311 default:
Andreas Gampec200a4a2014-06-16 18:39:09 -0700312 LOG(FATAL) << "Unexpected GPR index: " << gpr_index;
313 return 0;
Ian Rogers936b37f2014-02-14 00:52:24 -0800314 }
315 }
Ian Rogers848871b2013-08-05 10:56:33 -0700316#else
317#error "Unsupported architecture"
Ian Rogers848871b2013-08-05 10:56:33 -0700318#endif
319
Ian Rogers936b37f2014-02-14 00:52:24 -0800320 public:
Sebastien Hertza836bc92014-11-25 16:30:53 +0100321 // Special handling for proxy methods. Proxy methods are instance methods so the
322 // 'this' object is the 1st argument. They also have the same frame layout as the
323 // kRefAndArgs runtime method. Since 'this' is a reference, it is located in the
324 // 1st GPR.
Roland Levillainfa854e42018-02-07 13:09:55 +0000325 static StackReference<mirror::Object>* GetProxyThisObjectReference(ArtMethod** sp)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700326 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray3a090922015-11-24 09:17:30 +0000327 CHECK((*sp)->IsProxyMethod());
Sebastien Hertza836bc92014-11-25 16:30:53 +0100328 CHECK_GT(kNumQuickGprArgs, 0u);
329 constexpr uint32_t kThisGprIndex = 0u; // 'this' is in the 1st GPR.
330 size_t this_arg_offset = kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset +
331 GprIndexToGprOffset(kThisGprIndex);
332 uint8_t* this_arg_address = reinterpret_cast<uint8_t*>(sp) + this_arg_offset;
Roland Levillainfa854e42018-02-07 13:09:55 +0000333 return reinterpret_cast<StackReference<mirror::Object>*>(this_arg_address);
Sebastien Hertza836bc92014-11-25 16:30:53 +0100334 }
335
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700336 static ArtMethod* GetCallingMethod(ArtMethod** sp) REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700337 DCHECK((*sp)->IsCalleeSaveMethod());
Andreas Gampe8228cdf2017-05-30 15:03:54 -0700338 return GetCalleeSaveMethodCaller(sp, CalleeSaveType::kSaveRefsAndArgs);
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +0100339 }
340
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700341 static ArtMethod* GetOuterMethod(ArtMethod** sp) REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700342 DCHECK((*sp)->IsCalleeSaveMethod());
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100343 uint8_t* previous_sp =
344 reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700345 return *reinterpret_cast<ArtMethod**>(previous_sp);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100346 }
347
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700348 static uint32_t GetCallingDexPc(ArtMethod** sp) REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700349 DCHECK((*sp)->IsCalleeSaveMethod());
Andreas Gampe8228cdf2017-05-30 15:03:54 -0700350 const size_t callee_frame_size = GetCalleeSaveFrameSize(kRuntimeISA,
351 CalleeSaveType::kSaveRefsAndArgs);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700352 ArtMethod** caller_sp = reinterpret_cast<ArtMethod**>(
353 reinterpret_cast<uintptr_t>(sp) + callee_frame_size);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100354 uintptr_t outer_pc = QuickArgumentVisitor::GetCallingPc(sp);
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100355 const OatQuickMethodHeader* current_code = (*caller_sp)->GetOatQuickMethodHeader(outer_pc);
356 uintptr_t outer_pc_offset = current_code->NativeQuickPcOffset(outer_pc);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100357
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100358 if (current_code->IsOptimized()) {
David Srbecky052f8ca2018-04-26 15:42:54 +0100359 CodeInfo code_info(current_code);
360 StackMap stack_map = code_info.GetStackMapForNativePcOffset(outer_pc_offset);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100361 DCHECK(stack_map.IsValid());
David Srbecky052f8ca2018-04-26 15:42:54 +0100362 if (stack_map.HasInlineInfo()) {
363 InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map);
364 return inline_info.GetDexPcAtDepth(inline_info.GetDepth()-1);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100365 } else {
David Srbecky052f8ca2018-04-26 15:42:54 +0100366 return stack_map.GetDexPc();
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100367 }
368 } else {
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100369 return current_code->ToDexPc(*caller_sp, outer_pc);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100370 }
Ian Rogers848871b2013-08-05 10:56:33 -0700371 }
372
Mathieu Chartierd776ff02017-01-17 09:32:18 -0800373 static bool GetInvokeType(ArtMethod** sp, InvokeType* invoke_type, uint32_t* dex_method_index)
374 REQUIRES_SHARED(Locks::mutator_lock_) {
375 DCHECK((*sp)->IsCalleeSaveMethod());
Andreas Gampe8228cdf2017-05-30 15:03:54 -0700376 const size_t callee_frame_size = GetCalleeSaveFrameSize(kRuntimeISA,
377 CalleeSaveType::kSaveRefsAndArgs);
Mathieu Chartierd776ff02017-01-17 09:32:18 -0800378 ArtMethod** caller_sp = reinterpret_cast<ArtMethod**>(
379 reinterpret_cast<uintptr_t>(sp) + callee_frame_size);
380 uintptr_t outer_pc = QuickArgumentVisitor::GetCallingPc(sp);
381 const OatQuickMethodHeader* current_code = (*caller_sp)->GetOatQuickMethodHeader(outer_pc);
382 if (!current_code->IsOptimized()) {
383 return false;
384 }
385 uintptr_t outer_pc_offset = current_code->NativeQuickPcOffset(outer_pc);
David Srbecky052f8ca2018-04-26 15:42:54 +0100386 CodeInfo code_info(current_code);
Mathieu Chartiercbcedbf2017-03-12 22:24:50 -0700387 MethodInfo method_info = current_code->GetOptimizedMethodInfo();
David Srbecky052f8ca2018-04-26 15:42:54 +0100388 InvokeInfo invoke(code_info.GetInvokeInfoForNativePcOffset(outer_pc_offset));
Mathieu Chartierd776ff02017-01-17 09:32:18 -0800389 if (invoke.IsValid()) {
David Srbecky052f8ca2018-04-26 15:42:54 +0100390 *invoke_type = static_cast<InvokeType>(invoke.GetInvokeType());
391 *dex_method_index = invoke.GetMethodIndex(method_info);
Mathieu Chartierd776ff02017-01-17 09:32:18 -0800392 return true;
393 }
394 return false;
395 }
396
Ian Rogers936b37f2014-02-14 00:52:24 -0800397 // For the given quick ref and args quick frame, return the caller's PC.
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700398 static uintptr_t GetCallingPc(ArtMethod** sp) REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700399 DCHECK((*sp)->IsCalleeSaveMethod());
Ian Rogers13735952014-10-08 12:43:28 -0700400 uint8_t* lr = reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_LrOffset;
Ian Rogers848871b2013-08-05 10:56:33 -0700401 return *reinterpret_cast<uintptr_t*>(lr);
402 }
403
Mathieu Chartiere401d142015-04-22 13:56:20 -0700404 QuickArgumentVisitor(ArtMethod** sp, bool is_static, const char* shorty,
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700405 uint32_t shorty_len) REQUIRES_SHARED(Locks::mutator_lock_) :
Andreas Gampec200a4a2014-06-16 18:39:09 -0700406 is_static_(is_static), shorty_(shorty), shorty_len_(shorty_len),
Ian Rogers13735952014-10-08 12:43:28 -0700407 gpr_args_(reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset),
408 fpr_args_(reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset),
409 stack_args_(reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize
Mathieu Chartiere401d142015-04-22 13:56:20 -0700410 + sizeof(ArtMethod*)), // Skip ArtMethod*.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800411 gpr_index_(0), fpr_index_(0), fpr_double_index_(0), stack_index_(0),
412 cur_type_(Primitive::kPrimVoid), is_split_long_or_double_(false) {
Andreas Gampe575e78c2014-11-03 23:41:03 -0800413 static_assert(kQuickSoftFloatAbi == (kNumQuickFprArgs == 0),
414 "Number of Quick FPR arguments unexpected");
415 static_assert(!(kQuickSoftFloatAbi && kQuickDoubleRegAlignedFloatBackFilled),
416 "Double alignment unexpected");
Zheng Xu5667fdb2014-10-23 18:29:55 +0800417 // For register alignment, we want to assume that counters(fpr_double_index_) are even if the
418 // next register is even.
Andreas Gampe575e78c2014-11-03 23:41:03 -0800419 static_assert(!kQuickDoubleRegAlignedFloatBackFilled || kNumQuickFprArgs % 2 == 0,
420 "Number of Quick FPR arguments not even");
Andreas Gampe542451c2016-07-26 09:02:02 -0700421 DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), kRuntimePointerSize);
Zheng Xu5667fdb2014-10-23 18:29:55 +0800422 }
Ian Rogers848871b2013-08-05 10:56:33 -0700423
424 virtual ~QuickArgumentVisitor() {}
425
426 virtual void Visit() = 0;
427
Ian Rogers936b37f2014-02-14 00:52:24 -0800428 Primitive::Type GetParamPrimitiveType() const {
429 return cur_type_;
Ian Rogers848871b2013-08-05 10:56:33 -0700430 }
431
Ian Rogers13735952014-10-08 12:43:28 -0700432 uint8_t* GetParamAddress() const {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800433 if (!kQuickSoftFloatAbi) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800434 Primitive::Type type = GetParamPrimitiveType();
435 if (UNLIKELY((type == Primitive::kPrimDouble) || (type == Primitive::kPrimFloat))) {
Zheng Xu5667fdb2014-10-23 18:29:55 +0800436 if (type == Primitive::kPrimDouble && kQuickDoubleRegAlignedFloatBackFilled) {
437 if (fpr_double_index_ + 2 < kNumQuickFprArgs + 1) {
438 return fpr_args_ + (fpr_double_index_ * GetBytesPerFprSpillLocation(kRuntimeISA));
439 }
440 } else if (fpr_index_ + 1 < kNumQuickFprArgs + 1) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000441 return fpr_args_ + (fpr_index_ * GetBytesPerFprSpillLocation(kRuntimeISA));
Ian Rogers936b37f2014-02-14 00:52:24 -0800442 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700443 return stack_args_ + (stack_index_ * kBytesStackArgLocation);
Ian Rogers936b37f2014-02-14 00:52:24 -0800444 }
445 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800446 if (gpr_index_ < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800447 return gpr_args_ + GprIndexToGprOffset(gpr_index_);
448 }
449 return stack_args_ + (stack_index_ * kBytesStackArgLocation);
Ian Rogers848871b2013-08-05 10:56:33 -0700450 }
451
452 bool IsSplitLongOrDouble() const {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700453 if ((GetBytesPerGprSpillLocation(kRuntimeISA) == 4) ||
454 (GetBytesPerFprSpillLocation(kRuntimeISA) == 4)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800455 return is_split_long_or_double_;
456 } else {
457 return false; // An optimization for when GPR and FPRs are 64bit.
458 }
Ian Rogers848871b2013-08-05 10:56:33 -0700459 }
460
Ian Rogers936b37f2014-02-14 00:52:24 -0800461 bool IsParamAReference() const {
Ian Rogers848871b2013-08-05 10:56:33 -0700462 return GetParamPrimitiveType() == Primitive::kPrimNot;
463 }
464
Ian Rogers936b37f2014-02-14 00:52:24 -0800465 bool IsParamALongOrDouble() const {
Ian Rogers848871b2013-08-05 10:56:33 -0700466 Primitive::Type type = GetParamPrimitiveType();
467 return type == Primitive::kPrimLong || type == Primitive::kPrimDouble;
468 }
469
470 uint64_t ReadSplitLongParam() const {
Nicolas Geoffray425f2392015-01-08 14:52:29 +0000471 // The splitted long is always available through the stack.
472 return *reinterpret_cast<uint64_t*>(stack_args_
473 + stack_index_ * kBytesStackArgLocation);
Ian Rogers848871b2013-08-05 10:56:33 -0700474 }
475
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800476 void IncGprIndex() {
477 gpr_index_++;
478 if (kGprFprLockstep) {
479 fpr_index_++;
480 }
481 }
482
483 void IncFprIndex() {
484 fpr_index_++;
485 if (kGprFprLockstep) {
486 gpr_index_++;
487 }
488 }
489
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700490 void VisitArguments() REQUIRES_SHARED(Locks::mutator_lock_) {
Zheng Xu5667fdb2014-10-23 18:29:55 +0800491 // (a) 'stack_args_' should point to the first method's argument
492 // (b) whatever the argument type it is, the 'stack_index_' should
493 // be moved forward along with every visiting.
Ian Rogers936b37f2014-02-14 00:52:24 -0800494 gpr_index_ = 0;
495 fpr_index_ = 0;
Zheng Xu5667fdb2014-10-23 18:29:55 +0800496 if (kQuickDoubleRegAlignedFloatBackFilled) {
497 fpr_double_index_ = 0;
498 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800499 stack_index_ = 0;
500 if (!is_static_) { // Handle this.
501 cur_type_ = Primitive::kPrimNot;
502 is_split_long_or_double_ = false;
Ian Rogers848871b2013-08-05 10:56:33 -0700503 Visit();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800504 stack_index_++;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800505 if (kNumQuickGprArgs > 0) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800506 IncGprIndex();
Ian Rogers936b37f2014-02-14 00:52:24 -0800507 }
Ian Rogers848871b2013-08-05 10:56:33 -0700508 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800509 for (uint32_t shorty_index = 1; shorty_index < shorty_len_; ++shorty_index) {
510 cur_type_ = Primitive::GetType(shorty_[shorty_index]);
511 switch (cur_type_) {
512 case Primitive::kPrimNot:
513 case Primitive::kPrimBoolean:
514 case Primitive::kPrimByte:
515 case Primitive::kPrimChar:
516 case Primitive::kPrimShort:
517 case Primitive::kPrimInt:
518 is_split_long_or_double_ = false;
519 Visit();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800520 stack_index_++;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800521 if (gpr_index_ < kNumQuickGprArgs) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800522 IncGprIndex();
Ian Rogers936b37f2014-02-14 00:52:24 -0800523 }
524 break;
525 case Primitive::kPrimFloat:
526 is_split_long_or_double_ = false;
527 Visit();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800528 stack_index_++;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800529 if (kQuickSoftFloatAbi) {
530 if (gpr_index_ < kNumQuickGprArgs) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800531 IncGprIndex();
Ian Rogers936b37f2014-02-14 00:52:24 -0800532 }
533 } else {
Zheng Xu5667fdb2014-10-23 18:29:55 +0800534 if (fpr_index_ + 1 < kNumQuickFprArgs + 1) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800535 IncFprIndex();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800536 if (kQuickDoubleRegAlignedFloatBackFilled) {
537 // Double should not overlap with float.
538 // For example, if fpr_index_ = 3, fpr_double_index_ should be at least 4.
539 fpr_double_index_ = std::max(fpr_double_index_, RoundUp(fpr_index_, 2));
540 // Float should not overlap with double.
541 if (fpr_index_ % 2 == 0) {
542 fpr_index_ = std::max(fpr_double_index_, fpr_index_);
543 }
Goran Jakovljevicff734982015-08-24 12:58:55 +0000544 } else if (kQuickSkipOddFpRegisters) {
545 IncFprIndex();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800546 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800547 }
548 }
549 break;
550 case Primitive::kPrimDouble:
551 case Primitive::kPrimLong:
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800552 if (kQuickSoftFloatAbi || (cur_type_ == Primitive::kPrimLong)) {
Alexey Frunze1b8464d2016-11-12 17:22:05 -0800553 if (cur_type_ == Primitive::kPrimLong &&
554#if defined(__mips__) && !defined(__LP64__)
555 (gpr_index_ == 0 || gpr_index_ == 2) &&
556#else
557 gpr_index_ == 0 &&
558#endif
559 kAlignPairRegister) {
560 // Currently, this is only for ARM and MIPS, where we align long parameters with
561 // even-numbered registers by skipping R1 (on ARM) or A1(A3) (on MIPS) and using
562 // R2 (on ARM) or A2(T0) (on MIPS) instead.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800563 IncGprIndex();
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000564 }
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000565 is_split_long_or_double_ = (GetBytesPerGprSpillLocation(kRuntimeISA) == 4) &&
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800566 ((gpr_index_ + 1) == kNumQuickGprArgs);
Mark Mendell3e6a3bf2015-01-19 14:09:22 -0500567 if (!kSplitPairAcrossRegisterAndStack && is_split_long_or_double_) {
568 // We don't want to split this. Pass over this register.
569 gpr_index_++;
570 is_split_long_or_double_ = false;
571 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800572 Visit();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800573 if (kBytesStackArgLocation == 4) {
574 stack_index_+= 2;
575 } else {
576 CHECK_EQ(kBytesStackArgLocation, 8U);
577 stack_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800578 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700579 if (gpr_index_ < kNumQuickGprArgs) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800580 IncGprIndex();
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000581 if (GetBytesPerGprSpillLocation(kRuntimeISA) == 4) {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700582 if (gpr_index_ < kNumQuickGprArgs) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800583 IncGprIndex();
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700584 }
585 }
586 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800587 } else {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000588 is_split_long_or_double_ = (GetBytesPerFprSpillLocation(kRuntimeISA) == 4) &&
Zheng Xu5667fdb2014-10-23 18:29:55 +0800589 ((fpr_index_ + 1) == kNumQuickFprArgs) && !kQuickDoubleRegAlignedFloatBackFilled;
Ian Rogers936b37f2014-02-14 00:52:24 -0800590 Visit();
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700591 if (kBytesStackArgLocation == 4) {
592 stack_index_+= 2;
Ian Rogers936b37f2014-02-14 00:52:24 -0800593 } else {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700594 CHECK_EQ(kBytesStackArgLocation, 8U);
595 stack_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800596 }
Zheng Xu5667fdb2014-10-23 18:29:55 +0800597 if (kQuickDoubleRegAlignedFloatBackFilled) {
598 if (fpr_double_index_ + 2 < kNumQuickFprArgs + 1) {
599 fpr_double_index_ += 2;
600 // Float should not overlap with double.
601 if (fpr_index_ % 2 == 0) {
602 fpr_index_ = std::max(fpr_double_index_, fpr_index_);
603 }
604 }
605 } else if (fpr_index_ + 1 < kNumQuickFprArgs + 1) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800606 IncFprIndex();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800607 if (GetBytesPerFprSpillLocation(kRuntimeISA) == 4) {
608 if (fpr_index_ + 1 < kNumQuickFprArgs + 1) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800609 IncFprIndex();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800610 }
611 }
612 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800613 }
614 break;
615 default:
616 LOG(FATAL) << "Unexpected type: " << cur_type_ << " in " << shorty_;
617 }
Ian Rogers848871b2013-08-05 10:56:33 -0700618 }
619 }
620
Andreas Gampec200a4a2014-06-16 18:39:09 -0700621 protected:
Ian Rogers848871b2013-08-05 10:56:33 -0700622 const bool is_static_;
623 const char* const shorty_;
624 const uint32_t shorty_len_;
Andreas Gampec200a4a2014-06-16 18:39:09 -0700625
626 private:
Ian Rogers13735952014-10-08 12:43:28 -0700627 uint8_t* const gpr_args_; // Address of GPR arguments in callee save frame.
628 uint8_t* const fpr_args_; // Address of FPR arguments in callee save frame.
629 uint8_t* const stack_args_; // Address of stack arguments in caller's frame.
Ian Rogers936b37f2014-02-14 00:52:24 -0800630 uint32_t gpr_index_; // Index into spilled GPRs.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800631 // Index into spilled FPRs.
632 // In case kQuickDoubleRegAlignedFloatBackFilled, it may index a hole while fpr_double_index_
633 // holds a higher register number.
634 uint32_t fpr_index_;
635 // Index into spilled FPRs for aligned double.
636 // Only used when kQuickDoubleRegAlignedFloatBackFilled. Next available double register indexed in
637 // terms of singles, may be behind fpr_index.
638 uint32_t fpr_double_index_;
Ian Rogers936b37f2014-02-14 00:52:24 -0800639 uint32_t stack_index_; // Index into arguments on the stack.
640 // The current type of argument during VisitArguments.
641 Primitive::Type cur_type_;
Ian Rogers848871b2013-08-05 10:56:33 -0700642 // Does a 64bit parameter straddle the register and stack arguments?
643 bool is_split_long_or_double_;
644};
645
Sebastien Hertza836bc92014-11-25 16:30:53 +0100646// Returns the 'this' object of a proxy method. This function is only used by StackVisitor. It
647// allows to use the QuickArgumentVisitor constants without moving all the code in its own module.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700648extern "C" mirror::Object* artQuickGetProxyThisObject(ArtMethod** sp)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700649 REQUIRES_SHARED(Locks::mutator_lock_) {
Roland Levillainfa854e42018-02-07 13:09:55 +0000650 return QuickArgumentVisitor::GetProxyThisObjectReference(sp)->AsMirrorPtr();
651}
Sebastien Hertza836bc92014-11-25 16:30:53 +0100652
Ian Rogers848871b2013-08-05 10:56:33 -0700653// Visits arguments on the stack placing them into the shadow frame.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800654class BuildQuickShadowFrameVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700655 public:
Mathieu Chartiere401d142015-04-22 13:56:20 -0700656 BuildQuickShadowFrameVisitor(ArtMethod** sp, bool is_static, const char* shorty,
657 uint32_t shorty_len, ShadowFrame* sf, size_t first_arg_reg) :
Andreas Gampec200a4a2014-06-16 18:39:09 -0700658 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), sf_(sf), cur_reg_(first_arg_reg) {}
Ian Rogers848871b2013-08-05 10:56:33 -0700659
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700660 void Visit() REQUIRES_SHARED(Locks::mutator_lock_) OVERRIDE;
Ian Rogers848871b2013-08-05 10:56:33 -0700661
662 private:
Ian Rogers936b37f2014-02-14 00:52:24 -0800663 ShadowFrame* const sf_;
664 uint32_t cur_reg_;
Ian Rogers848871b2013-08-05 10:56:33 -0700665
Dragos Sbirleabd136a22013-08-13 18:07:04 -0700666 DISALLOW_COPY_AND_ASSIGN(BuildQuickShadowFrameVisitor);
Ian Rogers848871b2013-08-05 10:56:33 -0700667};
668
Andreas Gampec200a4a2014-06-16 18:39:09 -0700669void BuildQuickShadowFrameVisitor::Visit() {
Ian Rogers9758f792014-03-13 09:02:55 -0700670 Primitive::Type type = GetParamPrimitiveType();
671 switch (type) {
672 case Primitive::kPrimLong: // Fall-through.
673 case Primitive::kPrimDouble:
674 if (IsSplitLongOrDouble()) {
675 sf_->SetVRegLong(cur_reg_, ReadSplitLongParam());
676 } else {
677 sf_->SetVRegLong(cur_reg_, *reinterpret_cast<jlong*>(GetParamAddress()));
678 }
679 ++cur_reg_;
680 break;
681 case Primitive::kPrimNot: {
682 StackReference<mirror::Object>* stack_ref =
683 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
684 sf_->SetVRegReference(cur_reg_, stack_ref->AsMirrorPtr());
685 }
686 break;
687 case Primitive::kPrimBoolean: // Fall-through.
688 case Primitive::kPrimByte: // Fall-through.
689 case Primitive::kPrimChar: // Fall-through.
690 case Primitive::kPrimShort: // Fall-through.
691 case Primitive::kPrimInt: // Fall-through.
692 case Primitive::kPrimFloat:
693 sf_->SetVReg(cur_reg_, *reinterpret_cast<jint*>(GetParamAddress()));
694 break;
695 case Primitive::kPrimVoid:
696 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -0700697 UNREACHABLE();
Ian Rogers9758f792014-03-13 09:02:55 -0700698 }
699 ++cur_reg_;
700}
701
Mingyao Yang417528d2017-09-13 12:10:40 -0700702// Don't inline. See b/65159206.
703NO_INLINE
704static void HandleDeoptimization(JValue* result,
705 ArtMethod* method,
706 ShadowFrame* deopt_frame,
707 ManagedStack* fragment)
708 REQUIRES_SHARED(Locks::mutator_lock_) {
709 // Coming from partial-fragment deopt.
710 Thread* self = Thread::Current();
711 if (kIsDebugBuild) {
712 // Sanity-check: are the methods as expected? We check that the last shadow frame (the bottom
713 // of the call-stack) corresponds to the called method.
714 ShadowFrame* linked = deopt_frame;
715 while (linked->GetLink() != nullptr) {
716 linked = linked->GetLink();
717 }
718 CHECK_EQ(method, linked->GetMethod()) << method->PrettyMethod() << " "
719 << ArtMethod::PrettyMethod(linked->GetMethod());
720 }
721
722 if (VLOG_IS_ON(deopt)) {
723 // Print out the stack to verify that it was a partial-fragment deopt.
724 LOG(INFO) << "Continue-ing from deopt. Stack is:";
725 QuickExceptionHandler::DumpFramesWithType(self, true);
726 }
727
728 ObjPtr<mirror::Throwable> pending_exception;
729 bool from_code = false;
730 DeoptimizationMethodType method_type;
731 self->PopDeoptimizationContext(/* out */ result,
732 /* out */ &pending_exception,
733 /* out */ &from_code,
734 /* out */ &method_type);
735
736 // Push a transition back into managed code onto the linked list in thread.
737 self->PushManagedStackFragment(fragment);
738
739 // Ensure that the stack is still in order.
740 if (kIsDebugBuild) {
741 class DummyStackVisitor : public StackVisitor {
742 public:
743 explicit DummyStackVisitor(Thread* self_in) REQUIRES_SHARED(Locks::mutator_lock_)
744 : StackVisitor(self_in, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames) {}
745
746 bool VisitFrame() OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) {
747 // Nothing to do here. In a debug build, SanityCheckFrame will do the work in the walking
748 // logic. Just always say we want to continue.
749 return true;
750 }
751 };
752 DummyStackVisitor dsv(self);
753 dsv.WalkStack();
754 }
755
756 // Restore the exception that was pending before deoptimization then interpret the
757 // deoptimized frames.
758 if (pending_exception != nullptr) {
759 self->SetException(pending_exception);
760 }
761 interpreter::EnterInterpreterFromDeoptimize(self,
762 deopt_frame,
763 result,
764 from_code,
765 DeoptimizationMethodType::kDefault);
766}
767
Mathieu Chartiere401d142015-04-22 13:56:20 -0700768extern "C" uint64_t artQuickToInterpreterBridge(ArtMethod* method, Thread* self, ArtMethod** sp)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700769 REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers848871b2013-08-05 10:56:33 -0700770 // Ensure we don't get thread suspension until the object arguments are safely in the shadow
771 // frame.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700772 ScopedQuickEntrypointChecks sqec(self);
Ian Rogers848871b2013-08-05 10:56:33 -0700773
Alex Light9139e002015-10-09 15:59:48 -0700774 if (UNLIKELY(!method->IsInvokable())) {
775 method->ThrowInvocationTimeError();
Ian Rogers848871b2013-08-05 10:56:33 -0700776 return 0;
Andreas Gampe639bdd12015-06-03 11:22:45 -0700777 }
778
779 JValue tmp_value;
780 ShadowFrame* deopt_frame = self->PopStackedShadowFrame(
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700781 StackedShadowFrameType::kDeoptimizationShadowFrame, false);
Andreas Gampe639bdd12015-06-03 11:22:45 -0700782 ManagedStack fragment;
783
David Sehr709b0702016-10-13 09:12:37 -0700784 DCHECK(!method->IsNative()) << method->PrettyMethod();
Andreas Gampe639bdd12015-06-03 11:22:45 -0700785 uint32_t shorty_len = 0;
Andreas Gampe542451c2016-07-26 09:02:02 -0700786 ArtMethod* non_proxy_method = method->GetInterfaceMethodIfProxy(kRuntimePointerSize);
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800787 DCHECK(non_proxy_method->GetCodeItem() != nullptr) << method->PrettyMethod();
David Sehr0225f8e2018-01-31 08:52:24 +0000788 CodeItemDataAccessor accessor(non_proxy_method->DexInstructionData());
Andreas Gampe639bdd12015-06-03 11:22:45 -0700789 const char* shorty = non_proxy_method->GetShorty(&shorty_len);
790
791 JValue result;
792
Mingyao Yang417528d2017-09-13 12:10:40 -0700793 if (UNLIKELY(deopt_frame != nullptr)) {
794 HandleDeoptimization(&result, method, deopt_frame, &fragment);
Ian Rogers848871b2013-08-05 10:56:33 -0700795 } else {
Andreas Gampec200a4a2014-06-16 18:39:09 -0700796 const char* old_cause = self->StartAssertNoThreadSuspension(
797 "Building interpreter shadow frame");
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800798 uint16_t num_regs = accessor.RegistersSize();
Andreas Gampec200a4a2014-06-16 18:39:09 -0700799 // No last shadow coming from quick.
Andreas Gampeb3025922015-09-01 14:45:00 -0700800 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
Andreas Gampe03ec9302015-08-27 17:41:47 -0700801 CREATE_SHADOW_FRAME(num_regs, /* link */ nullptr, method, /* dex pc */ 0);
Andreas Gampeb3025922015-09-01 14:45:00 -0700802 ShadowFrame* shadow_frame = shadow_frame_unique_ptr.get();
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800803 size_t first_arg_reg = accessor.RegistersSize() - accessor.InsSize();
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700804 BuildQuickShadowFrameVisitor shadow_frame_builder(sp, method->IsStatic(), shorty, shorty_len,
Ian Rogers936b37f2014-02-14 00:52:24 -0800805 shadow_frame, first_arg_reg);
Ian Rogers848871b2013-08-05 10:56:33 -0700806 shadow_frame_builder.VisitArguments();
Ian Rogerse94652f2014-12-02 11:13:19 -0800807 const bool needs_initialization =
808 method->IsStatic() && !method->GetDeclaringClass()->IsInitialized();
Ian Rogers848871b2013-08-05 10:56:33 -0700809 // Push a transition back into managed code onto the linked list in thread.
Ian Rogers848871b2013-08-05 10:56:33 -0700810 self->PushManagedStackFragment(&fragment);
811 self->PushShadowFrame(shadow_frame);
812 self->EndAssertNoThreadSuspension(old_cause);
813
Ian Rogerse94652f2014-12-02 11:13:19 -0800814 if (needs_initialization) {
Ian Rogers848871b2013-08-05 10:56:33 -0700815 // Ensure static method's class is initialized.
Ian Rogerse94652f2014-12-02 11:13:19 -0800816 StackHandleScope<1> hs(self);
817 Handle<mirror::Class> h_class(hs.NewHandle(shadow_frame->GetMethod()->GetDeclaringClass()));
Ian Rogers7b078e82014-09-10 14:44:24 -0700818 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_class, true, true)) {
David Sehr709b0702016-10-13 09:12:37 -0700819 DCHECK(Thread::Current()->IsExceptionPending())
820 << shadow_frame->GetMethod()->PrettyMethod();
Ian Rogers848871b2013-08-05 10:56:33 -0700821 self->PopManagedStackFragment(fragment);
822 return 0;
823 }
824 }
Daniel Mihalyieb076692014-08-22 17:33:31 +0200825
Mathieu Chartier808c7a52017-12-15 11:19:33 -0800826 result = interpreter::EnterInterpreterFromEntryPoint(self, accessor, shadow_frame);
Ian Rogers848871b2013-08-05 10:56:33 -0700827 }
Andreas Gampe639bdd12015-06-03 11:22:45 -0700828
829 // Pop transition.
830 self->PopManagedStackFragment(fragment);
831
832 // Request a stack deoptimization if needed
833 ArtMethod* caller = QuickArgumentVisitor::GetCallingMethod(sp);
Mingyao Yangf711f2c2016-05-23 12:29:39 -0700834 uintptr_t caller_pc = QuickArgumentVisitor::GetCallingPc(sp);
Mingyao Yanga3549d22016-06-02 17:01:02 -0700835 // If caller_pc is the instrumentation exit stub, the stub will check to see if deoptimization
836 // should be done and it knows the real return pc.
837 if (UNLIKELY(caller_pc != reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc()) &&
Nicolas Geoffray433b79a2017-01-30 20:54:45 +0000838 Dbg::IsForcedInterpreterNeededForUpcall(self, caller))) {
839 if (!Runtime::Current()->IsAsyncDeoptimizeable(caller_pc)) {
840 LOG(WARNING) << "Got a deoptimization request on un-deoptimizable method "
841 << caller->PrettyMethod();
842 } else {
843 // Push the context of the deoptimization stack so we can restore the return value and the
844 // exception before executing the deoptimized frames.
845 self->PushDeoptimizationContext(
Mingyao Yang2ee17902017-08-30 11:37:08 -0700846 result,
847 shorty[0] == 'L' || shorty[0] == '[', /* class or array */
848 self->GetException(),
849 false /* from_code */,
850 DeoptimizationMethodType::kDefault);
Andreas Gampe639bdd12015-06-03 11:22:45 -0700851
Nicolas Geoffray433b79a2017-01-30 20:54:45 +0000852 // Set special exception to cause deoptimization.
853 self->SetException(Thread::GetDeoptimizationException());
854 }
Andreas Gampe639bdd12015-06-03 11:22:45 -0700855 }
856
857 // No need to restore the args since the method has already been run by the interpreter.
858 return result.GetJ();
Ian Rogers848871b2013-08-05 10:56:33 -0700859}
860
861// Visits arguments on the stack placing them into the args vector, Object* arguments are converted
862// to jobjects.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800863class BuildQuickArgumentVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700864 public:
Mathieu Chartiere401d142015-04-22 13:56:20 -0700865 BuildQuickArgumentVisitor(ArtMethod** sp, bool is_static, const char* shorty, uint32_t shorty_len,
Andreas Gampecf4035a2014-05-28 22:43:01 -0700866 ScopedObjectAccessUnchecked* soa, std::vector<jvalue>* args) :
Andreas Gampec200a4a2014-06-16 18:39:09 -0700867 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa), args_(args) {}
Ian Rogers848871b2013-08-05 10:56:33 -0700868
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700869 void Visit() REQUIRES_SHARED(Locks::mutator_lock_) OVERRIDE;
Ian Rogers848871b2013-08-05 10:56:33 -0700870
871 private:
Ian Rogers9758f792014-03-13 09:02:55 -0700872 ScopedObjectAccessUnchecked* const soa_;
873 std::vector<jvalue>* const args_;
Ian Rogers9758f792014-03-13 09:02:55 -0700874
Ian Rogers848871b2013-08-05 10:56:33 -0700875 DISALLOW_COPY_AND_ASSIGN(BuildQuickArgumentVisitor);
876};
877
Ian Rogers9758f792014-03-13 09:02:55 -0700878void BuildQuickArgumentVisitor::Visit() {
879 jvalue val;
880 Primitive::Type type = GetParamPrimitiveType();
881 switch (type) {
882 case Primitive::kPrimNot: {
883 StackReference<mirror::Object>* stack_ref =
884 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
885 val.l = soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr());
Ian Rogers9758f792014-03-13 09:02:55 -0700886 break;
887 }
888 case Primitive::kPrimLong: // Fall-through.
889 case Primitive::kPrimDouble:
890 if (IsSplitLongOrDouble()) {
891 val.j = ReadSplitLongParam();
892 } else {
893 val.j = *reinterpret_cast<jlong*>(GetParamAddress());
894 }
895 break;
896 case Primitive::kPrimBoolean: // Fall-through.
897 case Primitive::kPrimByte: // Fall-through.
898 case Primitive::kPrimChar: // Fall-through.
899 case Primitive::kPrimShort: // Fall-through.
900 case Primitive::kPrimInt: // Fall-through.
901 case Primitive::kPrimFloat:
902 val.i = *reinterpret_cast<jint*>(GetParamAddress());
903 break;
904 case Primitive::kPrimVoid:
905 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -0700906 UNREACHABLE();
Ian Rogers9758f792014-03-13 09:02:55 -0700907 }
908 args_->push_back(val);
909}
910
Ian Rogers848871b2013-08-05 10:56:33 -0700911// Handler for invocation on proxy methods. On entry a frame will exist for the proxy object method
912// which is responsible for recording callee save registers. We explicitly place into jobjects the
913// incoming reference arguments (so they survive GC). We invoke the invocation handler, which is a
914// field within the proxy object, which will box the primitive arguments and deal with error cases.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700915extern "C" uint64_t artQuickProxyInvokeHandler(
916 ArtMethod* proxy_method, mirror::Object* receiver, Thread* self, ArtMethod** sp)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -0700917 REQUIRES_SHARED(Locks::mutator_lock_) {
David Sehr709b0702016-10-13 09:12:37 -0700918 DCHECK(proxy_method->IsProxyMethod()) << proxy_method->PrettyMethod();
919 DCHECK(receiver->GetClass()->IsProxyClass()) << proxy_method->PrettyMethod();
Ian Rogers848871b2013-08-05 10:56:33 -0700920 // Ensure we don't get thread suspension until the object arguments are safely in jobjects.
921 const char* old_cause =
922 self->StartAssertNoThreadSuspension("Adding to IRT proxy object arguments");
923 // Register the top of the managed stack, making stack crawlable.
David Sehr709b0702016-10-13 09:12:37 -0700924 DCHECK_EQ((*sp), proxy_method) << proxy_method->PrettyMethod();
Ian Rogers848871b2013-08-05 10:56:33 -0700925 self->VerifyStack();
926 // Start new JNI local reference state.
927 JNIEnvExt* env = self->GetJniEnv();
928 ScopedObjectAccessUnchecked soa(env);
929 ScopedJniEnvLocalRefState env_state(env);
930 // Create local ref. copies of proxy method and the receiver.
931 jobject rcvr_jobj = soa.AddLocalReference<jobject>(receiver);
932
933 // Placing arguments into args vector and remove the receiver.
Andreas Gampe542451c2016-07-26 09:02:02 -0700934 ArtMethod* non_proxy_method = proxy_method->GetInterfaceMethodIfProxy(kRuntimePointerSize);
David Sehr709b0702016-10-13 09:12:37 -0700935 CHECK(!non_proxy_method->IsStatic()) << proxy_method->PrettyMethod() << " "
936 << non_proxy_method->PrettyMethod();
Ian Rogers848871b2013-08-05 10:56:33 -0700937 std::vector<jvalue> args;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700938 uint32_t shorty_len = 0;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700939 const char* shorty = non_proxy_method->GetShorty(&shorty_len);
Roland Levillainad0777d2018-02-12 20:00:18 +0000940 BuildQuickArgumentVisitor local_ref_visitor(
941 sp, /* is_static */ false, shorty, shorty_len, &soa, &args);
Brian Carlstromd3633d52013-08-20 21:06:26 -0700942
Ian Rogers848871b2013-08-05 10:56:33 -0700943 local_ref_visitor.VisitArguments();
David Sehr709b0702016-10-13 09:12:37 -0700944 DCHECK_GT(args.size(), 0U) << proxy_method->PrettyMethod();
Ian Rogers848871b2013-08-05 10:56:33 -0700945 args.erase(args.begin());
946
947 // Convert proxy method into expected interface method.
Andreas Gampe542451c2016-07-26 09:02:02 -0700948 ArtMethod* interface_method = proxy_method->FindOverriddenMethod(kRuntimePointerSize);
David Sehr709b0702016-10-13 09:12:37 -0700949 DCHECK(interface_method != nullptr) << proxy_method->PrettyMethod();
950 DCHECK(!interface_method->IsProxyMethod()) << interface_method->PrettyMethod();
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700951 self->EndAssertNoThreadSuspension(old_cause);
Andreas Gampe542451c2016-07-26 09:02:02 -0700952 DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), kRuntimePointerSize);
Andreas Gampee01e3642016-07-25 13:06:04 -0700953 DCHECK(!Runtime::Current()->IsActiveTransaction());
Andreas Gampeee29a072017-11-02 15:28:09 -0700954 ObjPtr<mirror::Method> interface_reflect_method =
955 mirror::Method::CreateFromArtMethod<kRuntimePointerSize, false>(soa.Self(), interface_method);
956 if (interface_reflect_method == nullptr) {
957 soa.Self()->AssertPendingOOMException();
958 return 0;
959 }
960 jobject interface_method_jobj = soa.AddLocalReference<jobject>(interface_reflect_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700961
962 // All naked Object*s should now be in jobjects, so its safe to go into the main invoke code
963 // that performs allocations.
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700964 JValue result = InvokeProxyInvocationHandler(soa, shorty, rcvr_jobj, interface_method_jobj, args);
Ian Rogers848871b2013-08-05 10:56:33 -0700965 return result.GetJ();
966}
967
Roland Levillainad0777d2018-02-12 20:00:18 +0000968// Visitor returning a reference argument at a given position in a Quick stack frame.
969// NOTE: Only used for testing purposes.
970class GetQuickReferenceArgumentAtVisitor FINAL : public QuickArgumentVisitor {
971 public:
972 GetQuickReferenceArgumentAtVisitor(ArtMethod** sp,
973 const char* shorty,
974 uint32_t shorty_len,
975 size_t arg_pos)
976 : QuickArgumentVisitor(sp, /* is_static */ false, shorty, shorty_len),
977 cur_pos_(0u),
978 arg_pos_(arg_pos),
979 ref_arg_(nullptr) {
980 CHECK_LT(arg_pos, shorty_len) << "Argument position greater than the number arguments";
981 }
982
983 void Visit() REQUIRES_SHARED(Locks::mutator_lock_) OVERRIDE {
984 if (cur_pos_ == arg_pos_) {
985 Primitive::Type type = GetParamPrimitiveType();
986 CHECK_EQ(type, Primitive::kPrimNot) << "Argument at searched position is not a reference";
987 ref_arg_ = reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
988 }
989 ++cur_pos_;
990 }
991
992 StackReference<mirror::Object>* GetReferenceArgument() {
993 return ref_arg_;
994 }
995
996 private:
997 // The position of the currently visited argument.
998 size_t cur_pos_;
999 // The position of the searched argument.
1000 const size_t arg_pos_;
1001 // The reference argument, if found.
1002 StackReference<mirror::Object>* ref_arg_;
1003
1004 DISALLOW_COPY_AND_ASSIGN(GetQuickReferenceArgumentAtVisitor);
1005};
1006
1007// Returning reference argument at position `arg_pos` in Quick stack frame at address `sp`.
1008// NOTE: Only used for testing purposes.
1009extern "C" StackReference<mirror::Object>* artQuickGetProxyReferenceArgumentAt(size_t arg_pos,
1010 ArtMethod** sp)
1011 REQUIRES_SHARED(Locks::mutator_lock_) {
1012 ArtMethod* proxy_method = *sp;
1013 ArtMethod* non_proxy_method = proxy_method->GetInterfaceMethodIfProxy(kRuntimePointerSize);
1014 CHECK(!non_proxy_method->IsStatic())
1015 << proxy_method->PrettyMethod() << " " << non_proxy_method->PrettyMethod();
1016 uint32_t shorty_len = 0;
1017 const char* shorty = non_proxy_method->GetShorty(&shorty_len);
1018 GetQuickReferenceArgumentAtVisitor ref_arg_visitor(sp, shorty, shorty_len, arg_pos);
1019 ref_arg_visitor.VisitArguments();
1020 StackReference<mirror::Object>* ref_arg = ref_arg_visitor.GetReferenceArgument();
1021 return ref_arg;
1022}
1023
1024// Visitor returning all the reference arguments in a Quick stack frame.
1025class GetQuickReferenceArgumentsVisitor FINAL : public QuickArgumentVisitor {
1026 public:
1027 GetQuickReferenceArgumentsVisitor(ArtMethod** sp,
1028 bool is_static,
1029 const char* shorty,
1030 uint32_t shorty_len)
1031 : QuickArgumentVisitor(sp, is_static, shorty, shorty_len) {}
1032
1033 void Visit() REQUIRES_SHARED(Locks::mutator_lock_) OVERRIDE {
1034 Primitive::Type type = GetParamPrimitiveType();
1035 if (type == Primitive::kPrimNot) {
1036 StackReference<mirror::Object>* ref_arg =
1037 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
1038 ref_args_.push_back(ref_arg);
1039 }
1040 }
1041
1042 std::vector<StackReference<mirror::Object>*> GetReferenceArguments() {
1043 return ref_args_;
1044 }
1045
1046 private:
1047 // The reference arguments.
1048 std::vector<StackReference<mirror::Object>*> ref_args_;
1049
1050 DISALLOW_COPY_AND_ASSIGN(GetQuickReferenceArgumentsVisitor);
1051};
1052
1053// Returning all reference arguments in Quick stack frame at address `sp`.
1054std::vector<StackReference<mirror::Object>*> GetProxyReferenceArguments(ArtMethod** sp)
1055 REQUIRES_SHARED(Locks::mutator_lock_) {
1056 ArtMethod* proxy_method = *sp;
1057 ArtMethod* non_proxy_method = proxy_method->GetInterfaceMethodIfProxy(kRuntimePointerSize);
1058 CHECK(!non_proxy_method->IsStatic())
1059 << proxy_method->PrettyMethod() << " " << non_proxy_method->PrettyMethod();
1060 uint32_t shorty_len = 0;
1061 const char* shorty = non_proxy_method->GetShorty(&shorty_len);
1062 GetQuickReferenceArgumentsVisitor ref_args_visitor(sp, /* is_static */ false, shorty, shorty_len);
1063 ref_args_visitor.VisitArguments();
1064 std::vector<StackReference<mirror::Object>*> ref_args = ref_args_visitor.GetReferenceArguments();
1065 return ref_args;
1066}
1067
Ian Rogers848871b2013-08-05 10:56:33 -07001068// Read object references held in arguments from quick frames and place in a JNI local references,
1069// so they don't get garbage collected.
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001070class RememberForGcArgumentVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -07001071 public:
Mathieu Chartiere401d142015-04-22 13:56:20 -07001072 RememberForGcArgumentVisitor(ArtMethod** sp, bool is_static, const char* shorty,
1073 uint32_t shorty_len, ScopedObjectAccessUnchecked* soa) :
Andreas Gampec200a4a2014-06-16 18:39:09 -07001074 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa) {}
Ian Rogers848871b2013-08-05 10:56:33 -07001075
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001076 void Visit() REQUIRES_SHARED(Locks::mutator_lock_) OVERRIDE;
Mathieu Chartier07d447b2013-09-26 11:57:43 -07001077
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001078 void FixupReferences() REQUIRES_SHARED(Locks::mutator_lock_);
Ian Rogers848871b2013-08-05 10:56:33 -07001079
1080 private:
Ian Rogers9758f792014-03-13 09:02:55 -07001081 ScopedObjectAccessUnchecked* const soa_;
Mathieu Chartier5275bcb2014-02-20 17:16:42 -08001082 // References which we must update when exiting in case the GC moved the objects.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001083 std::vector<std::pair<jobject, StackReference<mirror::Object>*> > references_;
1084
Mathieu Chartier590fee92013-09-13 13:46:47 -07001085 DISALLOW_COPY_AND_ASSIGN(RememberForGcArgumentVisitor);
Ian Rogers848871b2013-08-05 10:56:33 -07001086};
1087
Ian Rogers9758f792014-03-13 09:02:55 -07001088void RememberForGcArgumentVisitor::Visit() {
1089 if (IsParamAReference()) {
1090 StackReference<mirror::Object>* stack_ref =
1091 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
1092 jobject reference =
1093 soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr());
1094 references_.push_back(std::make_pair(reference, stack_ref));
1095 }
1096}
1097
1098void RememberForGcArgumentVisitor::FixupReferences() {
1099 // Fixup any references which may have changed.
1100 for (const auto& pair : references_) {
Mathieu Chartier1a5337f2016-10-13 13:48:23 -07001101 pair.second->Assign(soa_->Decode<mirror::Object>(pair.first));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001102 soa_->Env()->DeleteLocalRef(pair.first);
Ian Rogers9758f792014-03-13 09:02:55 -07001103 }
1104}
1105
Alex Lightb7edcda2017-04-27 13:20:31 -07001106extern "C" const void* artInstrumentationMethodEntryFromCode(ArtMethod* method,
1107 mirror::Object* this_object,
1108 Thread* self,
1109 ArtMethod** sp)
1110 REQUIRES_SHARED(Locks::mutator_lock_) {
1111 const void* result;
1112 // Instrumentation changes the stack. Thus, when exiting, the stack cannot be verified, so skip
1113 // that part.
1114 ScopedQuickEntrypointChecks sqec(self, kIsDebugBuild, false);
1115 instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
1116 if (instrumentation->IsDeoptimized(method)) {
1117 result = GetQuickToInterpreterBridge();
1118 } else {
1119 result = instrumentation->GetQuickCodeFor(method, kRuntimePointerSize);
1120 DCHECK(!Runtime::Current()->GetClassLinker()->IsQuickToInterpreterBridge(result));
1121 }
1122
1123 bool interpreter_entry = (result == GetQuickToInterpreterBridge());
1124 bool is_static = method->IsStatic();
1125 uint32_t shorty_len;
1126 const char* shorty =
1127 method->GetInterfaceMethodIfProxy(kRuntimePointerSize)->GetShorty(&shorty_len);
1128
1129 ScopedObjectAccessUnchecked soa(self);
1130 RememberForGcArgumentVisitor visitor(sp, is_static, shorty, shorty_len, &soa);
1131 visitor.VisitArguments();
1132
1133 instrumentation->PushInstrumentationStackFrame(self,
1134 is_static ? nullptr : this_object,
1135 method,
1136 QuickArgumentVisitor::GetCallingPc(sp),
1137 interpreter_entry);
1138
1139 visitor.FixupReferences();
1140 if (UNLIKELY(self->IsExceptionPending())) {
1141 return nullptr;
1142 }
1143 CHECK(result != nullptr) << method->PrettyMethod();
1144 return result;
1145}
1146
1147extern "C" TwoWordReturn artInstrumentationMethodExitFromCode(Thread* self,
1148 ArtMethod** sp,
1149 uint64_t* gpr_result,
1150 uint64_t* fpr_result)
1151 REQUIRES_SHARED(Locks::mutator_lock_) {
1152 DCHECK_EQ(reinterpret_cast<uintptr_t>(self), reinterpret_cast<uintptr_t>(Thread::Current()));
1153 CHECK(gpr_result != nullptr);
1154 CHECK(fpr_result != nullptr);
1155 // Instrumentation exit stub must not be entered with a pending exception.
1156 CHECK(!self->IsExceptionPending()) << "Enter instrumentation exit stub with pending exception "
1157 << self->GetException()->Dump();
1158 // Compute address of return PC and sanity check that it currently holds 0.
Mingyao Yang2ee17902017-08-30 11:37:08 -07001159 size_t return_pc_offset = GetCalleeSaveReturnPcOffset(kRuntimeISA,
1160 CalleeSaveType::kSaveEverything);
Alex Lightb7edcda2017-04-27 13:20:31 -07001161 uintptr_t* return_pc = reinterpret_cast<uintptr_t*>(reinterpret_cast<uint8_t*>(sp) +
1162 return_pc_offset);
1163 CHECK_EQ(*return_pc, 0U);
1164
1165 // Pop the frame filling in the return pc. The low half of the return value is 0 when
1166 // deoptimization shouldn't be performed with the high-half having the return address. When
1167 // deoptimization should be performed the low half is zero and the high-half the address of the
1168 // deoptimization entry point.
1169 instrumentation::Instrumentation* instrumentation = Runtime::Current()->GetInstrumentation();
1170 TwoWordReturn return_or_deoptimize_pc = instrumentation->PopInstrumentationStackFrame(
1171 self, return_pc, gpr_result, fpr_result);
Vladimir Markofac21782018-03-13 17:01:09 +00001172 if (self->IsExceptionPending() || self->ObserveAsyncException()) {
Alex Lightb7edcda2017-04-27 13:20:31 -07001173 return GetTwoWordFailureValue();
1174 }
1175 return return_or_deoptimize_pc;
1176}
1177
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001178static std::string DumpInstruction(ArtMethod* method, uint32_t dex_pc)
1179 REQUIRES_SHARED(Locks::mutator_lock_) {
1180 if (dex_pc == static_cast<uint32_t>(-1)) {
1181 CHECK(method == jni::DecodeArtMethod(WellKnownClasses::java_lang_String_charAt));
1182 return "<native>";
1183 } else {
1184 CodeItemInstructionAccessor accessor = method->DexInstructions();
1185 CHECK_LT(dex_pc, accessor.InsnsSizeInCodeUnits());
1186 return accessor.InstructionAt(dex_pc).DumpString(method->GetDexFile());
1187 }
1188}
1189
Vladimir Marko606adb32018-04-05 14:49:24 +01001190static void DumpB74410240ClassData(ObjPtr<mirror::Class> klass)
1191 REQUIRES_SHARED(Locks::mutator_lock_) {
1192 std::string storage;
1193 const char* descriptor = klass->GetDescriptor(&storage);
1194 LOG(FATAL_WITHOUT_ABORT) << " " << DescribeLoaders(klass->GetClassLoader(), descriptor);
1195 const OatDexFile* oat_dex_file = klass->GetDexFile().GetOatDexFile();
1196 if (oat_dex_file != nullptr) {
1197 const OatFile* oat_file = oat_dex_file->GetOatFile();
1198 const char* dex2oat_cmdline =
1199 oat_file->GetOatHeader().GetStoreValueByKey(OatHeader::kDex2OatCmdLineKey);
1200 LOG(FATAL_WITHOUT_ABORT) << " OatFile: " << oat_file->GetLocation()
1201 << "; " << (dex2oat_cmdline != nullptr ? dex2oat_cmdline : "<not recorded>");
1202 }
1203}
1204
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001205static void DumpB74410240DebugData(ArtMethod** sp) REQUIRES_SHARED(Locks::mutator_lock_) {
1206 // Mimick the search for the caller and dump some data while doing so.
Vladimir Marko606adb32018-04-05 14:49:24 +01001207 LOG(FATAL_WITHOUT_ABORT) << "Dumping debugging data, please attach a bugreport to b/74410240.";
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001208
1209 constexpr CalleeSaveType type = CalleeSaveType::kSaveRefsAndArgs;
1210 CHECK_EQ(*sp, Runtime::Current()->GetCalleeSaveMethod(type));
1211
1212 const size_t callee_frame_size = GetCalleeSaveFrameSize(kRuntimeISA, type);
1213 auto** caller_sp = reinterpret_cast<ArtMethod**>(
1214 reinterpret_cast<uintptr_t>(sp) + callee_frame_size);
1215 const size_t callee_return_pc_offset = GetCalleeSaveReturnPcOffset(kRuntimeISA, type);
1216 uintptr_t caller_pc = *reinterpret_cast<uintptr_t*>(
1217 (reinterpret_cast<uint8_t*>(sp) + callee_return_pc_offset));
1218 ArtMethod* outer_method = *caller_sp;
1219
1220 if (UNLIKELY(caller_pc == reinterpret_cast<uintptr_t>(GetQuickInstrumentationExitPc()))) {
1221 LOG(FATAL_WITHOUT_ABORT) << "Method: " << outer_method->PrettyMethod()
1222 << " native pc: " << caller_pc << " Instrumented!";
1223 return;
1224 }
1225
1226 const OatQuickMethodHeader* current_code = outer_method->GetOatQuickMethodHeader(caller_pc);
1227 CHECK(current_code != nullptr);
1228 CHECK(current_code->IsOptimized());
1229 uintptr_t native_pc_offset = current_code->NativeQuickPcOffset(caller_pc);
David Srbecky052f8ca2018-04-26 15:42:54 +01001230 CodeInfo code_info(current_code);
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001231 MethodInfo method_info = current_code->GetOptimizedMethodInfo();
David Srbecky052f8ca2018-04-26 15:42:54 +01001232 StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset);
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001233 CHECK(stack_map.IsValid());
David Srbecky052f8ca2018-04-26 15:42:54 +01001234 uint32_t dex_pc = stack_map.GetDexPc();
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001235
1236 // Log the outer method and its associated dex file and class table pointer which can be used
1237 // to find out if the inlined methods were defined by other dex file(s) or class loader(s).
1238 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
1239 LOG(FATAL_WITHOUT_ABORT) << "Outer: " << outer_method->PrettyMethod()
1240 << " native pc: " << caller_pc
1241 << " dex pc: " << dex_pc
1242 << " dex file: " << outer_method->GetDexFile()->GetLocation()
1243 << " class table: " << class_linker->ClassTableForClassLoader(outer_method->GetClassLoader());
Vladimir Marko606adb32018-04-05 14:49:24 +01001244 DumpB74410240ClassData(outer_method->GetDeclaringClass());
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001245 LOG(FATAL_WITHOUT_ABORT) << " instruction: " << DumpInstruction(outer_method, dex_pc);
1246
1247 ArtMethod* caller = outer_method;
David Srbecky052f8ca2018-04-26 15:42:54 +01001248 if (stack_map.HasInlineInfo()) {
1249 InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map);
1250 size_t depth = inline_info.GetDepth();
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001251 for (size_t d = 0; d < depth; ++d) {
1252 const char* tag = "";
David Srbecky052f8ca2018-04-26 15:42:54 +01001253 dex_pc = inline_info.GetDexPcAtDepth(d);
1254 if (inline_info.EncodesArtMethodAtDepth(d)) {
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001255 tag = "encoded ";
David Srbecky052f8ca2018-04-26 15:42:54 +01001256 caller = inline_info.GetArtMethodAtDepth(d);
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001257 } else {
David Srbecky052f8ca2018-04-26 15:42:54 +01001258 uint32_t method_index = inline_info.GetMethodIndexAtDepth(method_info, d);
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001259 if (dex_pc == static_cast<uint32_t>(-1)) {
1260 tag = "special ";
1261 CHECK_EQ(d + 1u, depth);
1262 caller = jni::DecodeArtMethod(WellKnownClasses::java_lang_String_charAt);
1263 CHECK_EQ(caller->GetDexMethodIndex(), method_index);
1264 } else {
1265 ObjPtr<mirror::DexCache> dex_cache = caller->GetDexCache();
1266 ObjPtr<mirror::ClassLoader> class_loader = caller->GetClassLoader();
1267 caller = class_linker->LookupResolvedMethod(method_index, dex_cache, class_loader);
1268 CHECK(caller != nullptr);
1269 }
1270 }
1271 LOG(FATAL_WITHOUT_ABORT) << "Inlined method #" << d << ": " << tag << caller->PrettyMethod()
1272 << " dex pc: " << dex_pc
1273 << " dex file: " << caller->GetDexFile()->GetLocation()
1274 << " class table: "
Vladimir Marko606adb32018-04-05 14:49:24 +01001275 << class_linker->ClassTableForClassLoader(caller->GetClassLoader());
1276 DumpB74410240ClassData(caller->GetDeclaringClass());
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001277 LOG(FATAL_WITHOUT_ABORT) << " instruction: " << DumpInstruction(caller, dex_pc);
1278 }
1279 }
1280}
1281
Ian Rogers848871b2013-08-05 10:56:33 -07001282// Lazily resolve a method for quick. Called by stub code.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001283extern "C" const void* artQuickResolutionTrampoline(
1284 ArtMethod* called, mirror::Object* receiver, Thread* self, ArtMethod** sp)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001285 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampe3b45ef22015-05-26 21:34:09 -07001286 // The resolution trampoline stashes the resolved method into the callee-save frame to transport
1287 // it. Thus, when exiting, the stack cannot be verified (as the resolved method most likely
1288 // does not have the same stack layout as the callee-save method).
1289 ScopedQuickEntrypointChecks sqec(self, kIsDebugBuild, false);
Ian Rogers848871b2013-08-05 10:56:33 -07001290 // Start new JNI local reference state
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001291 JNIEnvExt* env = self->GetJniEnv();
Ian Rogers848871b2013-08-05 10:56:33 -07001292 ScopedObjectAccessUnchecked soa(env);
1293 ScopedJniEnvLocalRefState env_state(env);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001294 const char* old_cause = self->StartAssertNoThreadSuspension("Quick method resolution set up");
Ian Rogers848871b2013-08-05 10:56:33 -07001295
1296 // Compute details about the called method (avoid GCs)
1297 ClassLinker* linker = Runtime::Current()->GetClassLinker();
Ian Rogers848871b2013-08-05 10:56:33 -07001298 InvokeType invoke_type;
Ian Rogerse0a02da2014-12-02 14:10:53 -08001299 MethodReference called_method(nullptr, 0);
1300 const bool called_method_known_on_entry = !called->IsRuntimeMethod();
Mathieu Chartiere401d142015-04-22 13:56:20 -07001301 ArtMethod* caller = nullptr;
Ian Rogerse0a02da2014-12-02 14:10:53 -08001302 if (!called_method_known_on_entry) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01001303 caller = QuickArgumentVisitor::GetCallingMethod(sp);
Ian Rogerse0a02da2014-12-02 14:10:53 -08001304 called_method.dex_file = caller->GetDexFile();
Mathieu Chartierd776ff02017-01-17 09:32:18 -08001305
1306 InvokeType stack_map_invoke_type;
1307 uint32_t stack_map_dex_method_idx;
1308 const bool found_stack_map = QuickArgumentVisitor::GetInvokeType(sp,
1309 &stack_map_invoke_type,
1310 &stack_map_dex_method_idx);
1311 // For debug builds, we make sure both of the paths are consistent by also looking at the dex
1312 // code.
1313 if (!found_stack_map || kIsDebugBuild) {
1314 uint32_t dex_pc = QuickArgumentVisitor::GetCallingDexPc(sp);
David Sehr0225f8e2018-01-31 08:52:24 +00001315 CodeItemInstructionAccessor accessor(caller->DexInstructions());
Mathieu Chartier808c7a52017-12-15 11:19:33 -08001316 CHECK_LT(dex_pc, accessor.InsnsSizeInCodeUnits());
1317 const Instruction& instr = accessor.InstructionAt(dex_pc);
Vladimir Markod7559b72017-09-28 13:50:37 +01001318 Instruction::Code instr_code = instr.Opcode();
Mathieu Chartierd776ff02017-01-17 09:32:18 -08001319 bool is_range;
1320 switch (instr_code) {
1321 case Instruction::INVOKE_DIRECT:
1322 invoke_type = kDirect;
1323 is_range = false;
1324 break;
1325 case Instruction::INVOKE_DIRECT_RANGE:
1326 invoke_type = kDirect;
1327 is_range = true;
1328 break;
1329 case Instruction::INVOKE_STATIC:
1330 invoke_type = kStatic;
1331 is_range = false;
1332 break;
1333 case Instruction::INVOKE_STATIC_RANGE:
1334 invoke_type = kStatic;
1335 is_range = true;
1336 break;
1337 case Instruction::INVOKE_SUPER:
1338 invoke_type = kSuper;
1339 is_range = false;
1340 break;
1341 case Instruction::INVOKE_SUPER_RANGE:
1342 invoke_type = kSuper;
1343 is_range = true;
1344 break;
1345 case Instruction::INVOKE_VIRTUAL:
1346 invoke_type = kVirtual;
1347 is_range = false;
1348 break;
1349 case Instruction::INVOKE_VIRTUAL_RANGE:
1350 invoke_type = kVirtual;
1351 is_range = true;
1352 break;
1353 case Instruction::INVOKE_INTERFACE:
1354 invoke_type = kInterface;
1355 is_range = false;
1356 break;
1357 case Instruction::INVOKE_INTERFACE_RANGE:
1358 invoke_type = kInterface;
1359 is_range = true;
1360 break;
1361 default:
Vladimir Marko5b4b9a02018-03-16 09:42:09 +00001362 DumpB74410240DebugData(sp);
Vladimir Markod7559b72017-09-28 13:50:37 +01001363 LOG(FATAL) << "Unexpected call into trampoline: " << instr.DumpString(nullptr);
Mathieu Chartierd776ff02017-01-17 09:32:18 -08001364 UNREACHABLE();
1365 }
Vladimir Markod7559b72017-09-28 13:50:37 +01001366 called_method.index = (is_range) ? instr.VRegB_3rc() : instr.VRegB_35c();
Mathieu Chartierd776ff02017-01-17 09:32:18 -08001367 // Check that the invoke matches what we expected, note that this path only happens for debug
1368 // builds.
1369 if (found_stack_map) {
1370 DCHECK_EQ(stack_map_invoke_type, invoke_type);
1371 if (invoke_type != kSuper) {
1372 // Super may be sharpened.
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001373 DCHECK_EQ(stack_map_dex_method_idx, called_method.index)
Mathieu Chartierd776ff02017-01-17 09:32:18 -08001374 << called_method.dex_file->PrettyMethod(stack_map_dex_method_idx) << " "
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001375 << called_method.PrettyMethod();
Mathieu Chartierd776ff02017-01-17 09:32:18 -08001376 }
1377 } else {
Andreas Gampe9e6dee22017-04-11 13:50:23 -07001378 VLOG(dex) << "Accessed dex file for invoke " << invoke_type << " "
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001379 << called_method.index;
Mathieu Chartierd776ff02017-01-17 09:32:18 -08001380 }
1381 } else {
1382 invoke_type = stack_map_invoke_type;
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001383 called_method.index = stack_map_dex_method_idx;
Ian Rogers848871b2013-08-05 10:56:33 -07001384 }
Ian Rogers848871b2013-08-05 10:56:33 -07001385 } else {
1386 invoke_type = kStatic;
Ian Rogerse0a02da2014-12-02 14:10:53 -08001387 called_method.dex_file = called->GetDexFile();
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001388 called_method.index = called->GetDexMethodIndex();
Ian Rogers848871b2013-08-05 10:56:33 -07001389 }
1390 uint32_t shorty_len;
1391 const char* shorty =
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001392 called_method.dex_file->GetMethodShorty(called_method.GetMethodId(), &shorty_len);
Mathieu Chartier590fee92013-09-13 13:46:47 -07001393 RememberForGcArgumentVisitor visitor(sp, invoke_type == kStatic, shorty, shorty_len, &soa);
Ian Rogers848871b2013-08-05 10:56:33 -07001394 visitor.VisitArguments();
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001395 self->EndAssertNoThreadSuspension(old_cause);
Ian Rogerse0a02da2014-12-02 14:10:53 -08001396 const bool virtual_or_interface = invoke_type == kVirtual || invoke_type == kInterface;
Ian Rogers848871b2013-08-05 10:56:33 -07001397 // Resolve method filling in dex cache.
Ian Rogerse0a02da2014-12-02 14:10:53 -08001398 if (!called_method_known_on_entry) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001399 StackHandleScope<1> hs(self);
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001400 mirror::Object* dummy = nullptr;
1401 HandleWrapper<mirror::Object> h_receiver(
1402 hs.NewHandleWrapper(virtual_or_interface ? &receiver : &dummy));
Ian Rogerse0a02da2014-12-02 14:10:53 -08001403 DCHECK_EQ(caller->GetDexFile(), called_method.dex_file);
Vladimir Markoba118822017-06-12 15:41:56 +01001404 called = linker->ResolveMethod<ClassLinker::ResolveMode::kCheckICCEAndIAE>(
Mathieu Chartierfc8b4222017-09-17 13:44:24 -07001405 self, called_method.index, caller, invoke_type);
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001406
1407 // Update .bss entry in oat file if any.
1408 if (called != nullptr && called_method.dex_file->GetOatDexFile() != nullptr) {
Vladimir Markof3c52b42017-11-17 17:32:12 +00001409 size_t bss_offset = IndexBssMappingLookup::GetBssOffset(
1410 called_method.dex_file->GetOatDexFile()->GetMethodBssMapping(),
1411 called_method.index,
1412 called_method.dex_file->NumMethodIds(),
1413 static_cast<size_t>(kRuntimePointerSize));
1414 if (bss_offset != IndexBssMappingLookup::npos) {
1415 DCHECK_ALIGNED(bss_offset, static_cast<size_t>(kRuntimePointerSize));
1416 const OatFile* oat_file = called_method.dex_file->GetOatDexFile()->GetOatFile();
1417 ArtMethod** method_entry = reinterpret_cast<ArtMethod**>(const_cast<uint8_t*>(
1418 oat_file->BssBegin() + bss_offset));
1419 DCHECK_GE(method_entry, oat_file->GetBssMethods().data());
1420 DCHECK_LT(method_entry,
1421 oat_file->GetBssMethods().data() + oat_file->GetBssMethods().size());
1422 *method_entry = called;
Vladimir Marko0eb882b2017-05-15 13:39:18 +01001423 }
1424 }
Ian Rogers848871b2013-08-05 10:56:33 -07001425 }
Ian Rogerse0a02da2014-12-02 14:10:53 -08001426 const void* code = nullptr;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001427 if (LIKELY(!self->IsExceptionPending())) {
Ian Rogers848871b2013-08-05 10:56:33 -07001428 // Incompatible class change should have been handled in resolve method.
Brian Carlstrom2ec65202014-03-03 15:16:37 -08001429 CHECK(!called->CheckIncompatibleClassChange(invoke_type))
David Sehr709b0702016-10-13 09:12:37 -07001430 << called->PrettyMethod() << " " << invoke_type;
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001431 if (virtual_or_interface || invoke_type == kSuper) {
1432 // Refine called method based on receiver for kVirtual/kInterface, and
1433 // caller for kSuper.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001434 ArtMethod* orig_called = called;
Mathieu Chartier55871bf2014-02-27 10:24:50 -08001435 if (invoke_type == kVirtual) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001436 CHECK(receiver != nullptr) << invoke_type;
Andreas Gampe542451c2016-07-26 09:02:02 -07001437 called = receiver->GetClass()->FindVirtualMethodForVirtual(called, kRuntimePointerSize);
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001438 } else if (invoke_type == kInterface) {
1439 CHECK(receiver != nullptr) << invoke_type;
Andreas Gampe542451c2016-07-26 09:02:02 -07001440 called = receiver->GetClass()->FindVirtualMethodForInterface(called, kRuntimePointerSize);
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001441 } else {
1442 DCHECK_EQ(invoke_type, kSuper);
1443 CHECK(caller != nullptr) << invoke_type;
Vladimir Markoba118822017-06-12 15:41:56 +01001444 ObjPtr<mirror::Class> ref_class = linker->LookupResolvedType(
Vladimir Marko666ee3d2017-12-11 18:37:36 +00001445 caller->GetDexFile()->GetMethodId(called_method.index).class_idx_, caller);
Alex Lightfedd91d2016-01-07 14:49:16 -08001446 if (ref_class->IsInterface()) {
Andreas Gampe542451c2016-07-26 09:02:02 -07001447 called = ref_class->FindVirtualMethodForInterfaceSuper(called, kRuntimePointerSize);
Alex Lightfedd91d2016-01-07 14:49:16 -08001448 } else {
1449 called = caller->GetDeclaringClass()->GetSuperClass()->GetVTableEntry(
Andreas Gampe542451c2016-07-26 09:02:02 -07001450 called->GetMethodIndex(), kRuntimePointerSize);
Alex Lightfedd91d2016-01-07 14:49:16 -08001451 }
Mathieu Chartier55871bf2014-02-27 10:24:50 -08001452 }
Mingyao Yangf4867782014-05-05 11:55:02 -07001453
David Sehr709b0702016-10-13 09:12:37 -07001454 CHECK(called != nullptr) << orig_called->PrettyMethod() << " "
1455 << mirror::Object::PrettyTypeOf(receiver) << " "
Mingyao Yangf4867782014-05-05 11:55:02 -07001456 << invoke_type << " " << orig_called->GetVtableIndex();
Ian Rogers83883d72013-10-21 21:07:24 -07001457 }
Daniel Mihalyieb076692014-08-22 17:33:31 +02001458
Ian Rogers848871b2013-08-05 10:56:33 -07001459 // Ensure that the called method's class is initialized.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001460 StackHandleScope<1> hs(soa.Self());
1461 Handle<mirror::Class> called_class(hs.NewHandle(called->GetDeclaringClass()));
Ian Rogers7b078e82014-09-10 14:44:24 -07001462 linker->EnsureInitialized(soa.Self(), called_class, true, true);
Ian Rogers848871b2013-08-05 10:56:33 -07001463 if (LIKELY(called_class->IsInitialized())) {
Daniel Mihalyieb076692014-08-22 17:33:31 +02001464 if (UNLIKELY(Dbg::IsForcedInterpreterNeededForResolution(self, called))) {
1465 // If we are single-stepping or the called method is deoptimized (by a
1466 // breakpoint, for example), then we have to execute the called method
1467 // with the interpreter.
1468 code = GetQuickToInterpreterBridge();
1469 } else if (UNLIKELY(Dbg::IsForcedInstrumentationNeededForResolution(self, caller))) {
1470 // If the caller is deoptimized (by a breakpoint, for example), we have to
1471 // continue its execution with interpreter when returning from the called
1472 // method. Because we do not want to execute the called method with the
1473 // interpreter, we wrap its execution into the instrumentation stubs.
1474 // When the called method returns, it will execute the instrumentation
1475 // exit hook that will determine the need of the interpreter with a call
1476 // to Dbg::IsForcedInterpreterNeededForUpcall and deoptimize the stack if
1477 // it is needed.
1478 code = GetQuickInstrumentationEntryPoint();
1479 } else {
1480 code = called->GetEntryPointFromQuickCompiledCode();
1481 }
Ian Rogers848871b2013-08-05 10:56:33 -07001482 } else if (called_class->IsInitializing()) {
Daniel Mihalyieb076692014-08-22 17:33:31 +02001483 if (UNLIKELY(Dbg::IsForcedInterpreterNeededForResolution(self, called))) {
1484 // If we are single-stepping or the called method is deoptimized (by a
1485 // breakpoint, for example), then we have to execute the called method
1486 // with the interpreter.
1487 code = GetQuickToInterpreterBridge();
1488 } else if (invoke_type == kStatic) {
Alex Lightfc49fec2018-01-16 22:28:36 +00001489 // Class is still initializing, go to oat and grab code (trampoline must be left in place
1490 // until class is initialized to stop races between threads).
1491 code = linker->GetQuickOatCodeFor(called);
Ian Rogers848871b2013-08-05 10:56:33 -07001492 } else {
1493 // No trampoline for non-static methods.
Ian Rogersef7d42f2014-01-06 12:55:46 -08001494 code = called->GetEntryPointFromQuickCompiledCode();
Ian Rogers848871b2013-08-05 10:56:33 -07001495 }
1496 } else {
1497 DCHECK(called_class->IsErroneous());
1498 }
1499 }
Ian Rogerse0a02da2014-12-02 14:10:53 -08001500 CHECK_EQ(code == nullptr, self->IsExceptionPending());
Mathieu Chartier07d447b2013-09-26 11:57:43 -07001501 // Fixup any locally saved objects may have moved during a GC.
1502 visitor.FixupReferences();
Ian Rogers848871b2013-08-05 10:56:33 -07001503 // Place called method in callee-save frame to be placed as first argument to quick method.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001504 *sp = called;
1505
Ian Rogers848871b2013-08-05 10:56:33 -07001506 return code;
1507}
1508
Andreas Gampec147b002014-03-06 18:11:06 -08001509/*
1510 * This class uses a couple of observations to unite the different calling conventions through
1511 * a few constants.
1512 *
1513 * 1) Number of registers used for passing is normally even, so counting down has no penalty for
1514 * possible alignment.
1515 * 2) Known 64b architectures store 8B units on the stack, both for integral and floating point
1516 * types, so using uintptr_t is OK. Also means that we can use kRegistersNeededX to denote
1517 * when we have to split things
1518 * 3) The only soft-float, Arm, is 32b, so no widening needs to be taken into account for floats
1519 * and we can use Int handling directly.
1520 * 4) Only 64b architectures widen, and their stack is aligned 8B anyways, so no padding code
1521 * necessary when widening. Also, widening of Ints will take place implicitly, and the
1522 * extension should be compatible with Aarch64, which mandates copying the available bits
1523 * into LSB and leaving the rest unspecified.
1524 * 5) Aligning longs and doubles is necessary on arm only, and it's the same in registers and on
1525 * the stack.
1526 * 6) There is only little endian.
1527 *
1528 *
1529 * Actual work is supposed to be done in a delegate of the template type. The interface is as
1530 * follows:
1531 *
1532 * void PushGpr(uintptr_t): Add a value for the next GPR
1533 *
1534 * void PushFpr4(float): Add a value for the next FPR of size 32b. Is only called if we need
1535 * padding, that is, think the architecture is 32b and aligns 64b.
1536 *
1537 * void PushFpr8(uint64_t): Push a double. We _will_ call this on 32b, it's the callee's job to
1538 * split this if necessary. The current state will have aligned, if
1539 * necessary.
1540 *
1541 * void PushStack(uintptr_t): Push a value to the stack.
1542 *
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001543 * uintptr_t PushHandleScope(mirror::Object* ref): Add a reference to the HandleScope. This _will_ have nullptr,
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001544 * as this might be important for null initialization.
Andreas Gampec147b002014-03-06 18:11:06 -08001545 * Must return the jobject, that is, the reference to the
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001546 * entry in the HandleScope (nullptr if necessary).
Andreas Gampec147b002014-03-06 18:11:06 -08001547 *
1548 */
Andreas Gampec200a4a2014-06-16 18:39:09 -07001549template<class T> class BuildNativeCallFrameStateMachine {
Andreas Gampec147b002014-03-06 18:11:06 -08001550 public:
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001551#if defined(__arm__)
1552 // TODO: These are all dummy values!
Andreas Gampec147b002014-03-06 18:11:06 -08001553 static constexpr bool kNativeSoftFloatAbi = true;
1554 static constexpr size_t kNumNativeGprArgs = 4; // 4 arguments passed in GPRs, r0-r3
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001555 static constexpr size_t kNumNativeFprArgs = 0; // 0 arguments passed in FPRs.
1556
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001557 static constexpr size_t kRegistersNeededForLong = 2;
1558 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -08001559 static constexpr bool kMultiRegistersAligned = true;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001560 static constexpr bool kMultiFPRegistersWidened = false;
1561 static constexpr bool kMultiGPRegistersWidened = false;
Andreas Gampec147b002014-03-06 18:11:06 -08001562 static constexpr bool kAlignLongOnStack = true;
1563 static constexpr bool kAlignDoubleOnStack = true;
Stuart Monteithb95a5342014-03-12 13:32:32 +00001564#elif defined(__aarch64__)
1565 static constexpr bool kNativeSoftFloatAbi = false; // This is a hard float ABI.
1566 static constexpr size_t kNumNativeGprArgs = 8; // 6 arguments passed in GPRs.
1567 static constexpr size_t kNumNativeFprArgs = 8; // 8 arguments passed in FPRs.
1568
1569 static constexpr size_t kRegistersNeededForLong = 1;
1570 static constexpr size_t kRegistersNeededForDouble = 1;
1571 static constexpr bool kMultiRegistersAligned = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001572 static constexpr bool kMultiFPRegistersWidened = false;
1573 static constexpr bool kMultiGPRegistersWidened = false;
Stuart Monteithb95a5342014-03-12 13:32:32 +00001574 static constexpr bool kAlignLongOnStack = false;
1575 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001576#elif defined(__mips__) && !defined(__LP64__)
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001577 static constexpr bool kNativeSoftFloatAbi = true; // This is a hard float ABI.
Douglas Leung735b8552014-10-31 12:21:40 -07001578 static constexpr size_t kNumNativeGprArgs = 4; // 4 arguments passed in GPRs.
1579 static constexpr size_t kNumNativeFprArgs = 0; // 0 arguments passed in FPRs.
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001580
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001581 static constexpr size_t kRegistersNeededForLong = 2;
1582 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -08001583 static constexpr bool kMultiRegistersAligned = true;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001584 static constexpr bool kMultiFPRegistersWidened = true;
1585 static constexpr bool kMultiGPRegistersWidened = false;
Douglas Leung735b8552014-10-31 12:21:40 -07001586 static constexpr bool kAlignLongOnStack = true;
1587 static constexpr bool kAlignDoubleOnStack = true;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001588#elif defined(__mips__) && defined(__LP64__)
1589 // Let the code prepare GPRs only and we will load the FPRs with same data.
1590 static constexpr bool kNativeSoftFloatAbi = true;
1591 static constexpr size_t kNumNativeGprArgs = 8;
1592 static constexpr size_t kNumNativeFprArgs = 0;
1593
1594 static constexpr size_t kRegistersNeededForLong = 1;
1595 static constexpr size_t kRegistersNeededForDouble = 1;
1596 static constexpr bool kMultiRegistersAligned = false;
1597 static constexpr bool kMultiFPRegistersWidened = false;
1598 static constexpr bool kMultiGPRegistersWidened = true;
1599 static constexpr bool kAlignLongOnStack = false;
1600 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001601#elif defined(__i386__)
1602 // TODO: Check these!
Andreas Gampec147b002014-03-06 18:11:06 -08001603 static constexpr bool kNativeSoftFloatAbi = false; // Not using int registers for fp
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001604 static constexpr size_t kNumNativeGprArgs = 0; // 6 arguments passed in GPRs.
1605 static constexpr size_t kNumNativeFprArgs = 0; // 8 arguments passed in FPRs.
1606
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001607 static constexpr size_t kRegistersNeededForLong = 2;
1608 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec200a4a2014-06-16 18:39:09 -07001609 static constexpr bool kMultiRegistersAligned = false; // x86 not using regs, anyways
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001610 static constexpr bool kMultiFPRegistersWidened = false;
1611 static constexpr bool kMultiGPRegistersWidened = false;
Andreas Gampec147b002014-03-06 18:11:06 -08001612 static constexpr bool kAlignLongOnStack = false;
1613 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001614#elif defined(__x86_64__)
1615 static constexpr bool kNativeSoftFloatAbi = false; // This is a hard float ABI.
1616 static constexpr size_t kNumNativeGprArgs = 6; // 6 arguments passed in GPRs.
1617 static constexpr size_t kNumNativeFprArgs = 8; // 8 arguments passed in FPRs.
1618
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001619 static constexpr size_t kRegistersNeededForLong = 1;
1620 static constexpr size_t kRegistersNeededForDouble = 1;
Andreas Gampec147b002014-03-06 18:11:06 -08001621 static constexpr bool kMultiRegistersAligned = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001622 static constexpr bool kMultiFPRegistersWidened = false;
1623 static constexpr bool kMultiGPRegistersWidened = false;
Andreas Gampec147b002014-03-06 18:11:06 -08001624 static constexpr bool kAlignLongOnStack = false;
1625 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001626#else
1627#error "Unsupported architecture"
1628#endif
1629
Andreas Gampec147b002014-03-06 18:11:06 -08001630 public:
Andreas Gampec200a4a2014-06-16 18:39:09 -07001631 explicit BuildNativeCallFrameStateMachine(T* delegate)
1632 : gpr_index_(kNumNativeGprArgs),
1633 fpr_index_(kNumNativeFprArgs),
1634 stack_entries_(0),
1635 delegate_(delegate) {
Andreas Gampec147b002014-03-06 18:11:06 -08001636 // For register alignment, we want to assume that counters (gpr_index_, fpr_index_) are even iff
1637 // the next register is even; counting down is just to make the compiler happy...
Andreas Gampe575e78c2014-11-03 23:41:03 -08001638 static_assert(kNumNativeGprArgs % 2 == 0U, "Number of native GPR arguments not even");
1639 static_assert(kNumNativeFprArgs % 2 == 0U, "Number of native FPR arguments not even");
Andreas Gampec147b002014-03-06 18:11:06 -08001640 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001641
Andreas Gampec200a4a2014-06-16 18:39:09 -07001642 virtual ~BuildNativeCallFrameStateMachine() {}
Andreas Gampec147b002014-03-06 18:11:06 -08001643
Ian Rogers1428dce2014-10-21 15:02:15 -07001644 bool HavePointerGpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001645 return gpr_index_ > 0;
1646 }
1647
Andreas Gampec200a4a2014-06-16 18:39:09 -07001648 void AdvancePointer(const void* val) {
Andreas Gampec147b002014-03-06 18:11:06 -08001649 if (HavePointerGpr()) {
1650 gpr_index_--;
1651 PushGpr(reinterpret_cast<uintptr_t>(val));
1652 } else {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001653 stack_entries_++; // TODO: have a field for pointer length as multiple of 32b
Andreas Gampec147b002014-03-06 18:11:06 -08001654 PushStack(reinterpret_cast<uintptr_t>(val));
1655 gpr_index_ = 0;
1656 }
1657 }
1658
Ian Rogers1428dce2014-10-21 15:02:15 -07001659 bool HaveHandleScopeGpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001660 return gpr_index_ > 0;
1661 }
1662
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001663 void AdvanceHandleScope(mirror::Object* ptr) REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001664 uintptr_t handle = PushHandle(ptr);
1665 if (HaveHandleScopeGpr()) {
Andreas Gampec147b002014-03-06 18:11:06 -08001666 gpr_index_--;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001667 PushGpr(handle);
Andreas Gampec147b002014-03-06 18:11:06 -08001668 } else {
1669 stack_entries_++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001670 PushStack(handle);
Andreas Gampec147b002014-03-06 18:11:06 -08001671 gpr_index_ = 0;
1672 }
1673 }
1674
Ian Rogers1428dce2014-10-21 15:02:15 -07001675 bool HaveIntGpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001676 return gpr_index_ > 0;
1677 }
1678
1679 void AdvanceInt(uint32_t val) {
1680 if (HaveIntGpr()) {
1681 gpr_index_--;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001682 if (kMultiGPRegistersWidened) {
1683 DCHECK_EQ(sizeof(uintptr_t), sizeof(int64_t));
Roland Levillainda4d79b2015-03-24 14:36:11 +00001684 PushGpr(static_cast<int64_t>(bit_cast<int32_t, uint32_t>(val)));
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001685 } else {
1686 PushGpr(val);
1687 }
Andreas Gampec147b002014-03-06 18:11:06 -08001688 } else {
1689 stack_entries_++;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001690 if (kMultiGPRegistersWidened) {
1691 DCHECK_EQ(sizeof(uintptr_t), sizeof(int64_t));
Roland Levillainda4d79b2015-03-24 14:36:11 +00001692 PushStack(static_cast<int64_t>(bit_cast<int32_t, uint32_t>(val)));
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001693 } else {
1694 PushStack(val);
1695 }
Andreas Gampec147b002014-03-06 18:11:06 -08001696 gpr_index_ = 0;
1697 }
1698 }
1699
Ian Rogers1428dce2014-10-21 15:02:15 -07001700 bool HaveLongGpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001701 return gpr_index_ >= kRegistersNeededForLong + (LongGprNeedsPadding() ? 1 : 0);
1702 }
1703
Ian Rogers1428dce2014-10-21 15:02:15 -07001704 bool LongGprNeedsPadding() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001705 return kRegistersNeededForLong > 1 && // only pad when using multiple registers
1706 kAlignLongOnStack && // and when it needs alignment
1707 (gpr_index_ & 1) == 1; // counter is odd, see constructor
1708 }
1709
Ian Rogers1428dce2014-10-21 15:02:15 -07001710 bool LongStackNeedsPadding() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001711 return kRegistersNeededForLong > 1 && // only pad when using multiple registers
1712 kAlignLongOnStack && // and when it needs 8B alignment
1713 (stack_entries_ & 1) == 1; // counter is odd
1714 }
1715
1716 void AdvanceLong(uint64_t val) {
1717 if (HaveLongGpr()) {
1718 if (LongGprNeedsPadding()) {
1719 PushGpr(0);
1720 gpr_index_--;
1721 }
1722 if (kRegistersNeededForLong == 1) {
1723 PushGpr(static_cast<uintptr_t>(val));
1724 } else {
1725 PushGpr(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1726 PushGpr(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1727 }
1728 gpr_index_ -= kRegistersNeededForLong;
1729 } else {
1730 if (LongStackNeedsPadding()) {
1731 PushStack(0);
1732 stack_entries_++;
1733 }
1734 if (kRegistersNeededForLong == 1) {
1735 PushStack(static_cast<uintptr_t>(val));
1736 stack_entries_++;
1737 } else {
1738 PushStack(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1739 PushStack(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1740 stack_entries_ += 2;
1741 }
1742 gpr_index_ = 0;
1743 }
1744 }
1745
Ian Rogers1428dce2014-10-21 15:02:15 -07001746 bool HaveFloatFpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001747 return fpr_index_ > 0;
1748 }
1749
Andreas Gampec147b002014-03-06 18:11:06 -08001750 void AdvanceFloat(float val) {
1751 if (kNativeSoftFloatAbi) {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001752 AdvanceInt(bit_cast<uint32_t, float>(val));
Andreas Gampec147b002014-03-06 18:11:06 -08001753 } else {
1754 if (HaveFloatFpr()) {
1755 fpr_index_--;
1756 if (kRegistersNeededForDouble == 1) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001757 if (kMultiFPRegistersWidened) {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001758 PushFpr8(bit_cast<uint64_t, double>(val));
Andreas Gampec147b002014-03-06 18:11:06 -08001759 } else {
1760 // No widening, just use the bits.
Roland Levillainda4d79b2015-03-24 14:36:11 +00001761 PushFpr8(static_cast<uint64_t>(bit_cast<uint32_t, float>(val)));
Andreas Gampec147b002014-03-06 18:11:06 -08001762 }
1763 } else {
1764 PushFpr4(val);
1765 }
1766 } else {
1767 stack_entries_++;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001768 if (kRegistersNeededForDouble == 1 && kMultiFPRegistersWidened) {
Andreas Gampec147b002014-03-06 18:11:06 -08001769 // Need to widen before storing: Note the "double" in the template instantiation.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001770 // Note: We need to jump through those hoops to make the compiler happy.
1771 DCHECK_EQ(sizeof(uintptr_t), sizeof(uint64_t));
Roland Levillainda4d79b2015-03-24 14:36:11 +00001772 PushStack(static_cast<uintptr_t>(bit_cast<uint64_t, double>(val)));
Andreas Gampec147b002014-03-06 18:11:06 -08001773 } else {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001774 PushStack(static_cast<uintptr_t>(bit_cast<uint32_t, float>(val)));
Andreas Gampec147b002014-03-06 18:11:06 -08001775 }
1776 fpr_index_ = 0;
1777 }
1778 }
1779 }
1780
Ian Rogers1428dce2014-10-21 15:02:15 -07001781 bool HaveDoubleFpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001782 return fpr_index_ >= kRegistersNeededForDouble + (DoubleFprNeedsPadding() ? 1 : 0);
1783 }
1784
Ian Rogers1428dce2014-10-21 15:02:15 -07001785 bool DoubleFprNeedsPadding() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001786 return kRegistersNeededForDouble > 1 && // only pad when using multiple registers
1787 kAlignDoubleOnStack && // and when it needs alignment
1788 (fpr_index_ & 1) == 1; // counter is odd, see constructor
1789 }
1790
Ian Rogers1428dce2014-10-21 15:02:15 -07001791 bool DoubleStackNeedsPadding() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001792 return kRegistersNeededForDouble > 1 && // only pad when using multiple registers
1793 kAlignDoubleOnStack && // and when it needs 8B alignment
1794 (stack_entries_ & 1) == 1; // counter is odd
1795 }
1796
1797 void AdvanceDouble(uint64_t val) {
1798 if (kNativeSoftFloatAbi) {
1799 AdvanceLong(val);
1800 } else {
1801 if (HaveDoubleFpr()) {
1802 if (DoubleFprNeedsPadding()) {
1803 PushFpr4(0);
1804 fpr_index_--;
1805 }
1806 PushFpr8(val);
1807 fpr_index_ -= kRegistersNeededForDouble;
1808 } else {
1809 if (DoubleStackNeedsPadding()) {
1810 PushStack(0);
1811 stack_entries_++;
1812 }
1813 if (kRegistersNeededForDouble == 1) {
1814 PushStack(static_cast<uintptr_t>(val));
1815 stack_entries_++;
1816 } else {
1817 PushStack(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1818 PushStack(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1819 stack_entries_ += 2;
1820 }
1821 fpr_index_ = 0;
1822 }
1823 }
1824 }
1825
Ian Rogers1428dce2014-10-21 15:02:15 -07001826 uint32_t GetStackEntries() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001827 return stack_entries_;
1828 }
1829
Ian Rogers1428dce2014-10-21 15:02:15 -07001830 uint32_t GetNumberOfUsedGprs() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001831 return kNumNativeGprArgs - gpr_index_;
1832 }
1833
Ian Rogers1428dce2014-10-21 15:02:15 -07001834 uint32_t GetNumberOfUsedFprs() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001835 return kNumNativeFprArgs - fpr_index_;
1836 }
1837
1838 private:
1839 void PushGpr(uintptr_t val) {
1840 delegate_->PushGpr(val);
1841 }
1842 void PushFpr4(float val) {
1843 delegate_->PushFpr4(val);
1844 }
1845 void PushFpr8(uint64_t val) {
1846 delegate_->PushFpr8(val);
1847 }
1848 void PushStack(uintptr_t val) {
1849 delegate_->PushStack(val);
1850 }
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001851 uintptr_t PushHandle(mirror::Object* ref) REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001852 return delegate_->PushHandle(ref);
Andreas Gampec147b002014-03-06 18:11:06 -08001853 }
1854
1855 uint32_t gpr_index_; // Number of free GPRs
1856 uint32_t fpr_index_; // Number of free FPRs
1857 uint32_t stack_entries_; // Stack entries are in multiples of 32b, as floats are usually not
1858 // extended
Ian Rogers1428dce2014-10-21 15:02:15 -07001859 T* const delegate_; // What Push implementation gets called
Andreas Gampec147b002014-03-06 18:11:06 -08001860};
1861
Andreas Gampec200a4a2014-06-16 18:39:09 -07001862// Computes the sizes of register stacks and call stack area. Handling of references can be extended
1863// in subclasses.
1864//
1865// To handle native pointers, use "L" in the shorty for an object reference, which simulates
1866// them with handles.
1867class ComputeNativeCallFrameSize {
Andreas Gampec147b002014-03-06 18:11:06 -08001868 public:
Andreas Gampec200a4a2014-06-16 18:39:09 -07001869 ComputeNativeCallFrameSize() : num_stack_entries_(0) {}
1870
1871 virtual ~ComputeNativeCallFrameSize() {}
Andreas Gampec147b002014-03-06 18:11:06 -08001872
Ian Rogers1428dce2014-10-21 15:02:15 -07001873 uint32_t GetStackSize() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001874 return num_stack_entries_ * sizeof(uintptr_t);
1875 }
1876
Ian Rogers1428dce2014-10-21 15:02:15 -07001877 uint8_t* LayoutCallStack(uint8_t* sp8) const {
Andreas Gampec147b002014-03-06 18:11:06 -08001878 sp8 -= GetStackSize();
Andreas Gampe779f8c92014-06-09 18:29:38 -07001879 // Align by kStackAlignment.
1880 sp8 = reinterpret_cast<uint8_t*>(RoundDown(reinterpret_cast<uintptr_t>(sp8), kStackAlignment));
Andreas Gampec200a4a2014-06-16 18:39:09 -07001881 return sp8;
Andreas Gampec147b002014-03-06 18:11:06 -08001882 }
1883
Ian Rogers1428dce2014-10-21 15:02:15 -07001884 uint8_t* LayoutCallRegisterStacks(uint8_t* sp8, uintptr_t** start_gpr, uint32_t** start_fpr)
1885 const {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001886 // Assumption is OK right now, as we have soft-float arm
1887 size_t fregs = BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>::kNumNativeFprArgs;
1888 sp8 -= fregs * sizeof(uintptr_t);
1889 *start_fpr = reinterpret_cast<uint32_t*>(sp8);
1890 size_t iregs = BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>::kNumNativeGprArgs;
1891 sp8 -= iregs * sizeof(uintptr_t);
1892 *start_gpr = reinterpret_cast<uintptr_t*>(sp8);
1893 return sp8;
1894 }
Andreas Gampec147b002014-03-06 18:11:06 -08001895
Andreas Gampec200a4a2014-06-16 18:39:09 -07001896 uint8_t* LayoutNativeCall(uint8_t* sp8, uintptr_t** start_stack, uintptr_t** start_gpr,
Ian Rogers1428dce2014-10-21 15:02:15 -07001897 uint32_t** start_fpr) const {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001898 // Native call stack.
1899 sp8 = LayoutCallStack(sp8);
1900 *start_stack = reinterpret_cast<uintptr_t*>(sp8);
Andreas Gampec147b002014-03-06 18:11:06 -08001901
Andreas Gampec200a4a2014-06-16 18:39:09 -07001902 // Put fprs and gprs below.
1903 sp8 = LayoutCallRegisterStacks(sp8, start_gpr, start_fpr);
Andreas Gampec147b002014-03-06 18:11:06 -08001904
Andreas Gampec200a4a2014-06-16 18:39:09 -07001905 // Return the new bottom.
1906 return sp8;
1907 }
1908
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001909 virtual void WalkHeader(
1910 BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm ATTRIBUTE_UNUSED)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001911 REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001912 }
Andreas Gampec200a4a2014-06-16 18:39:09 -07001913
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001914 void Walk(const char* shorty, uint32_t shorty_len) REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001915 BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize> sm(this);
1916
1917 WalkHeader(&sm);
Andreas Gampec147b002014-03-06 18:11:06 -08001918
1919 for (uint32_t i = 1; i < shorty_len; ++i) {
1920 Primitive::Type cur_type_ = Primitive::GetType(shorty[i]);
1921 switch (cur_type_) {
1922 case Primitive::kPrimNot:
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001923 // TODO: fix abuse of mirror types.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001924 sm.AdvanceHandleScope(
1925 reinterpret_cast<mirror::Object*>(0x12345678));
Andreas Gampec147b002014-03-06 18:11:06 -08001926 break;
1927
1928 case Primitive::kPrimBoolean:
1929 case Primitive::kPrimByte:
1930 case Primitive::kPrimChar:
1931 case Primitive::kPrimShort:
1932 case Primitive::kPrimInt:
1933 sm.AdvanceInt(0);
1934 break;
1935 case Primitive::kPrimFloat:
1936 sm.AdvanceFloat(0);
1937 break;
1938 case Primitive::kPrimDouble:
1939 sm.AdvanceDouble(0);
1940 break;
1941 case Primitive::kPrimLong:
1942 sm.AdvanceLong(0);
1943 break;
1944 default:
1945 LOG(FATAL) << "Unexpected type: " << cur_type_ << " in " << shorty;
Ian Rogerse0a02da2014-12-02 14:10:53 -08001946 UNREACHABLE();
Andreas Gampec147b002014-03-06 18:11:06 -08001947 }
1948 }
1949
Ian Rogers1428dce2014-10-21 15:02:15 -07001950 num_stack_entries_ = sm.GetStackEntries();
Andreas Gampec147b002014-03-06 18:11:06 -08001951 }
1952
1953 void PushGpr(uintptr_t /* val */) {
1954 // not optimizing registers, yet
1955 }
1956
1957 void PushFpr4(float /* val */) {
1958 // not optimizing registers, yet
1959 }
1960
1961 void PushFpr8(uint64_t /* val */) {
1962 // not optimizing registers, yet
1963 }
1964
1965 void PushStack(uintptr_t /* val */) {
1966 // counting is already done in the superclass
1967 }
1968
Andreas Gampec200a4a2014-06-16 18:39:09 -07001969 virtual uintptr_t PushHandle(mirror::Object* /* ptr */) {
Andreas Gampec147b002014-03-06 18:11:06 -08001970 return reinterpret_cast<uintptr_t>(nullptr);
1971 }
1972
Andreas Gampec200a4a2014-06-16 18:39:09 -07001973 protected:
Andreas Gampec147b002014-03-06 18:11:06 -08001974 uint32_t num_stack_entries_;
1975};
1976
Andreas Gampec200a4a2014-06-16 18:39:09 -07001977class ComputeGenericJniFrameSize FINAL : public ComputeNativeCallFrameSize {
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001978 public:
Igor Murashkin06a04e02016-09-13 15:57:37 -07001979 explicit ComputeGenericJniFrameSize(bool critical_native)
1980 : num_handle_scope_references_(0), critical_native_(critical_native) {}
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001981
Andreas Gampec200a4a2014-06-16 18:39:09 -07001982 // Lays out the callee-save frame. Assumes that the incorrect frame corresponding to RefsAndArgs
1983 // is at *m = sp. Will update to point to the bottom of the save frame.
1984 //
1985 // Note: assumes ComputeAll() has been run before.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001986 void LayoutCalleeSaveFrame(Thread* self, ArtMethod*** m, void* sp, HandleScope** handle_scope)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07001987 REQUIRES_SHARED(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001988 ArtMethod* method = **m;
1989
Andreas Gampe542451c2016-07-26 09:02:02 -07001990 DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), kRuntimePointerSize);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001991
Andreas Gampec200a4a2014-06-16 18:39:09 -07001992 uint8_t* sp8 = reinterpret_cast<uint8_t*>(sp);
1993
1994 // First, fix up the layout of the callee-save frame.
1995 // We have to squeeze in the HandleScope, and relocate the method pointer.
1996
1997 // "Free" the slot for the method.
Ian Rogers13735952014-10-08 12:43:28 -07001998 sp8 += sizeof(void*); // In the callee-save frame we use a full pointer.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001999
2000 // Under the callee saves put handle scope and new method stack reference.
Andreas Gampec200a4a2014-06-16 18:39:09 -07002001 size_t handle_scope_size = HandleScope::SizeOf(num_handle_scope_references_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002002 size_t scope_and_method = handle_scope_size + sizeof(ArtMethod*);
Andreas Gampec200a4a2014-06-16 18:39:09 -07002003
2004 sp8 -= scope_and_method;
2005 // Align by kStackAlignment.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002006 sp8 = reinterpret_cast<uint8_t*>(RoundDown(reinterpret_cast<uintptr_t>(sp8), kStackAlignment));
Andreas Gampec200a4a2014-06-16 18:39:09 -07002007
Mathieu Chartiere401d142015-04-22 13:56:20 -07002008 uint8_t* sp8_table = sp8 + sizeof(ArtMethod*);
Ian Rogers59c07062014-10-10 13:03:39 -07002009 *handle_scope = HandleScope::Create(sp8_table, self->GetTopHandleScope(),
2010 num_handle_scope_references_);
Andreas Gampec200a4a2014-06-16 18:39:09 -07002011
2012 // Add a slot for the method pointer, and fill it. Fix the pointer-pointer given to us.
2013 uint8_t* method_pointer = sp8;
Mathieu Chartiere401d142015-04-22 13:56:20 -07002014 auto** new_method_ref = reinterpret_cast<ArtMethod**>(method_pointer);
2015 *new_method_ref = method;
Andreas Gampec200a4a2014-06-16 18:39:09 -07002016 *m = new_method_ref;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002017 }
2018
Andreas Gampec200a4a2014-06-16 18:39:09 -07002019 // Adds space for the cookie. Note: may leave stack unaligned.
Ian Rogers1428dce2014-10-21 15:02:15 -07002020 void LayoutCookie(uint8_t** sp) const {
Andreas Gampec200a4a2014-06-16 18:39:09 -07002021 // Reference cookie and padding
2022 *sp -= 8;
Mathieu Chartier0cd81352014-05-22 16:48:55 -07002023 }
2024
Andreas Gampec200a4a2014-06-16 18:39:09 -07002025 // Re-layout the callee-save frame (insert a handle-scope). Then add space for the cookie.
2026 // Returns the new bottom. Note: this may be unaligned.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002027 uint8_t* LayoutJNISaveFrame(Thread* self, ArtMethod*** m, void* sp, HandleScope** handle_scope)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002028 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07002029 // First, fix up the layout of the callee-save frame.
2030 // We have to squeeze in the HandleScope, and relocate the method pointer.
Ian Rogers59c07062014-10-10 13:03:39 -07002031 LayoutCalleeSaveFrame(self, m, sp, handle_scope);
Andreas Gampec200a4a2014-06-16 18:39:09 -07002032
2033 // The bottom of the callee-save frame is now where the method is, *m.
2034 uint8_t* sp8 = reinterpret_cast<uint8_t*>(*m);
2035
2036 // Add space for cookie.
2037 LayoutCookie(&sp8);
2038
2039 return sp8;
2040 }
2041
2042 // WARNING: After this, *sp won't be pointing to the method anymore!
Mathieu Chartiere401d142015-04-22 13:56:20 -07002043 uint8_t* ComputeLayout(Thread* self, ArtMethod*** m, const char* shorty, uint32_t shorty_len,
2044 HandleScope** handle_scope, uintptr_t** start_stack, uintptr_t** start_gpr,
2045 uint32_t** start_fpr)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002046 REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07002047 Walk(shorty, shorty_len);
2048
2049 // JNI part.
Ian Rogers59c07062014-10-10 13:03:39 -07002050 uint8_t* sp8 = LayoutJNISaveFrame(self, m, reinterpret_cast<void*>(*m), handle_scope);
Andreas Gampec200a4a2014-06-16 18:39:09 -07002051
2052 sp8 = LayoutNativeCall(sp8, start_stack, start_gpr, start_fpr);
2053
2054 // Return the new bottom.
2055 return sp8;
2056 }
2057
2058 uintptr_t PushHandle(mirror::Object* /* ptr */) OVERRIDE;
2059
2060 // Add JNIEnv* and jobj/jclass before the shorty-derived elements.
2061 void WalkHeader(BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm) OVERRIDE
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002062 REQUIRES_SHARED(Locks::mutator_lock_);
Andreas Gampec200a4a2014-06-16 18:39:09 -07002063
2064 private:
2065 uint32_t num_handle_scope_references_;
Igor Murashkin06a04e02016-09-13 15:57:37 -07002066 const bool critical_native_;
Andreas Gampec200a4a2014-06-16 18:39:09 -07002067};
2068
2069uintptr_t ComputeGenericJniFrameSize::PushHandle(mirror::Object* /* ptr */) {
2070 num_handle_scope_references_++;
2071 return reinterpret_cast<uintptr_t>(nullptr);
2072}
2073
2074void ComputeGenericJniFrameSize::WalkHeader(
2075 BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm) {
Igor Murashkin06a04e02016-09-13 15:57:37 -07002076 // First 2 parameters are always excluded for @CriticalNative.
2077 if (UNLIKELY(critical_native_)) {
2078 return;
2079 }
2080
Andreas Gampec200a4a2014-06-16 18:39:09 -07002081 // JNIEnv
2082 sm->AdvancePointer(nullptr);
2083
2084 // Class object or this as first argument
2085 sm->AdvanceHandleScope(reinterpret_cast<mirror::Object*>(0x12345678));
2086}
2087
2088// Class to push values to three separate regions. Used to fill the native call part. Adheres to
2089// the template requirements of BuildGenericJniFrameStateMachine.
2090class FillNativeCall {
2091 public:
2092 FillNativeCall(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args) :
2093 cur_gpr_reg_(gpr_regs), cur_fpr_reg_(fpr_regs), cur_stack_arg_(stack_args) {}
2094
2095 virtual ~FillNativeCall() {}
2096
2097 void Reset(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args) {
2098 cur_gpr_reg_ = gpr_regs;
2099 cur_fpr_reg_ = fpr_regs;
2100 cur_stack_arg_ = stack_args;
Andreas Gampec147b002014-03-06 18:11:06 -08002101 }
2102
2103 void PushGpr(uintptr_t val) {
2104 *cur_gpr_reg_ = val;
2105 cur_gpr_reg_++;
2106 }
2107
2108 void PushFpr4(float val) {
2109 *cur_fpr_reg_ = val;
2110 cur_fpr_reg_++;
2111 }
2112
2113 void PushFpr8(uint64_t val) {
2114 uint64_t* tmp = reinterpret_cast<uint64_t*>(cur_fpr_reg_);
2115 *tmp = val;
2116 cur_fpr_reg_ += 2;
2117 }
2118
2119 void PushStack(uintptr_t val) {
2120 *cur_stack_arg_ = val;
2121 cur_stack_arg_++;
2122 }
2123
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002124 virtual uintptr_t PushHandle(mirror::Object*) REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07002125 LOG(FATAL) << "(Non-JNI) Native call does not use handles.";
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002126 UNREACHABLE();
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002127 }
2128
2129 private:
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002130 uintptr_t* cur_gpr_reg_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002131 uint32_t* cur_fpr_reg_;
2132 uintptr_t* cur_stack_arg_;
Andreas Gampec200a4a2014-06-16 18:39:09 -07002133};
Andreas Gampec147b002014-03-06 18:11:06 -08002134
Andreas Gampec200a4a2014-06-16 18:39:09 -07002135// Visits arguments on the stack placing them into a region lower down the stack for the benefit
2136// of transitioning into native code.
2137class BuildGenericJniFrameVisitor FINAL : public QuickArgumentVisitor {
2138 public:
Igor Murashkin06a04e02016-09-13 15:57:37 -07002139 BuildGenericJniFrameVisitor(Thread* self,
2140 bool is_static,
2141 bool critical_native,
2142 const char* shorty,
2143 uint32_t shorty_len,
Mathieu Chartiere401d142015-04-22 13:56:20 -07002144 ArtMethod*** sp)
Andreas Gampec200a4a2014-06-16 18:39:09 -07002145 : QuickArgumentVisitor(*sp, is_static, shorty, shorty_len),
Igor Murashkin06a04e02016-09-13 15:57:37 -07002146 jni_call_(nullptr, nullptr, nullptr, nullptr, critical_native),
2147 sm_(&jni_call_) {
2148 ComputeGenericJniFrameSize fsc(critical_native);
Andreas Gampec200a4a2014-06-16 18:39:09 -07002149 uintptr_t* start_gpr_reg;
2150 uint32_t* start_fpr_reg;
2151 uintptr_t* start_stack_arg;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07002152 bottom_of_used_area_ = fsc.ComputeLayout(self, sp, shorty, shorty_len,
Ian Rogers59c07062014-10-10 13:03:39 -07002153 &handle_scope_,
2154 &start_stack_arg,
Andreas Gampec200a4a2014-06-16 18:39:09 -07002155 &start_gpr_reg, &start_fpr_reg);
2156
Andreas Gampec200a4a2014-06-16 18:39:09 -07002157 jni_call_.Reset(start_gpr_reg, start_fpr_reg, start_stack_arg, handle_scope_);
2158
Igor Murashkin06a04e02016-09-13 15:57:37 -07002159 // First 2 parameters are always excluded for CriticalNative methods.
2160 if (LIKELY(!critical_native)) {
2161 // jni environment is always first argument
2162 sm_.AdvancePointer(self->GetJniEnv());
Andreas Gampec200a4a2014-06-16 18:39:09 -07002163
Igor Murashkin06a04e02016-09-13 15:57:37 -07002164 if (is_static) {
2165 sm_.AdvanceHandleScope((**sp)->GetDeclaringClass());
2166 } // else "this" reference is already handled by QuickArgumentVisitor.
Andreas Gampec200a4a2014-06-16 18:39:09 -07002167 }
2168 }
2169
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002170 void Visit() REQUIRES_SHARED(Locks::mutator_lock_) OVERRIDE;
Andreas Gampec200a4a2014-06-16 18:39:09 -07002171
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002172 void FinalizeHandleScope(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_);
Andreas Gampec200a4a2014-06-16 18:39:09 -07002173
Vladimir Markof39745e2016-01-26 12:16:55 +00002174 StackReference<mirror::Object>* GetFirstHandleScopeEntry() {
Andreas Gampec200a4a2014-06-16 18:39:09 -07002175 return handle_scope_->GetHandle(0).GetReference();
2176 }
2177
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002178 jobject GetFirstHandleScopeJObject() const REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07002179 return handle_scope_->GetHandle(0).ToJObject();
2180 }
2181
Ian Rogers1428dce2014-10-21 15:02:15 -07002182 void* GetBottomOfUsedArea() const {
Andreas Gampec200a4a2014-06-16 18:39:09 -07002183 return bottom_of_used_area_;
2184 }
2185
2186 private:
2187 // A class to fill a JNI call. Adds reference/handle-scope management to FillNativeCall.
2188 class FillJniCall FINAL : public FillNativeCall {
2189 public:
2190 FillJniCall(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args,
Igor Murashkin06a04e02016-09-13 15:57:37 -07002191 HandleScope* handle_scope, bool critical_native)
2192 : FillNativeCall(gpr_regs, fpr_regs, stack_args),
2193 handle_scope_(handle_scope),
2194 cur_entry_(0),
2195 critical_native_(critical_native) {}
Andreas Gampec200a4a2014-06-16 18:39:09 -07002196
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002197 uintptr_t PushHandle(mirror::Object* ref) OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_);
Andreas Gampec200a4a2014-06-16 18:39:09 -07002198
2199 void Reset(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args, HandleScope* scope) {
2200 FillNativeCall::Reset(gpr_regs, fpr_regs, stack_args);
2201 handle_scope_ = scope;
2202 cur_entry_ = 0U;
2203 }
2204
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002205 void ResetRemainingScopeSlots() REQUIRES_SHARED(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07002206 // Initialize padding entries.
2207 size_t expected_slots = handle_scope_->NumberOfReferences();
2208 while (cur_entry_ < expected_slots) {
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07002209 handle_scope_->GetMutableHandle(cur_entry_++).Assign(nullptr);
Andreas Gampec200a4a2014-06-16 18:39:09 -07002210 }
Igor Murashkin06a04e02016-09-13 15:57:37 -07002211
2212 if (!critical_native_) {
2213 // Non-critical natives have at least the self class (jclass) or this (jobject).
2214 DCHECK_NE(cur_entry_, 0U);
2215 }
Andreas Gampec200a4a2014-06-16 18:39:09 -07002216 }
2217
Mathieu Chartier1432a5b2016-10-04 15:41:42 -07002218 bool CriticalNative() const {
2219 return critical_native_;
2220 }
2221
Andreas Gampec200a4a2014-06-16 18:39:09 -07002222 private:
2223 HandleScope* handle_scope_;
2224 size_t cur_entry_;
Igor Murashkin06a04e02016-09-13 15:57:37 -07002225 const bool critical_native_;
Andreas Gampec200a4a2014-06-16 18:39:09 -07002226 };
2227
2228 HandleScope* handle_scope_;
2229 FillJniCall jni_call_;
2230 void* bottom_of_used_area_;
2231
2232 BuildNativeCallFrameStateMachine<FillJniCall> sm_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002233
2234 DISALLOW_COPY_AND_ASSIGN(BuildGenericJniFrameVisitor);
2235};
2236
Andreas Gampec200a4a2014-06-16 18:39:09 -07002237uintptr_t BuildGenericJniFrameVisitor::FillJniCall::PushHandle(mirror::Object* ref) {
2238 uintptr_t tmp;
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07002239 MutableHandle<mirror::Object> h = handle_scope_->GetMutableHandle(cur_entry_);
Andreas Gampec200a4a2014-06-16 18:39:09 -07002240 h.Assign(ref);
2241 tmp = reinterpret_cast<uintptr_t>(h.ToJObject());
2242 cur_entry_++;
2243 return tmp;
2244}
2245
Ian Rogers9758f792014-03-13 09:02:55 -07002246void BuildGenericJniFrameVisitor::Visit() {
2247 Primitive::Type type = GetParamPrimitiveType();
2248 switch (type) {
2249 case Primitive::kPrimLong: {
2250 jlong long_arg;
2251 if (IsSplitLongOrDouble()) {
2252 long_arg = ReadSplitLongParam();
2253 } else {
2254 long_arg = *reinterpret_cast<jlong*>(GetParamAddress());
2255 }
2256 sm_.AdvanceLong(long_arg);
2257 break;
2258 }
2259 case Primitive::kPrimDouble: {
2260 uint64_t double_arg;
2261 if (IsSplitLongOrDouble()) {
2262 // Read into union so that we don't case to a double.
2263 double_arg = ReadSplitLongParam();
2264 } else {
2265 double_arg = *reinterpret_cast<uint64_t*>(GetParamAddress());
2266 }
2267 sm_.AdvanceDouble(double_arg);
2268 break;
2269 }
2270 case Primitive::kPrimNot: {
2271 StackReference<mirror::Object>* stack_ref =
2272 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002273 sm_.AdvanceHandleScope(stack_ref->AsMirrorPtr());
Ian Rogers9758f792014-03-13 09:02:55 -07002274 break;
2275 }
2276 case Primitive::kPrimFloat:
2277 sm_.AdvanceFloat(*reinterpret_cast<float*>(GetParamAddress()));
2278 break;
2279 case Primitive::kPrimBoolean: // Fall-through.
2280 case Primitive::kPrimByte: // Fall-through.
2281 case Primitive::kPrimChar: // Fall-through.
2282 case Primitive::kPrimShort: // Fall-through.
2283 case Primitive::kPrimInt: // Fall-through.
2284 sm_.AdvanceInt(*reinterpret_cast<jint*>(GetParamAddress()));
2285 break;
2286 case Primitive::kPrimVoid:
2287 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07002288 UNREACHABLE();
Ian Rogers9758f792014-03-13 09:02:55 -07002289 }
2290}
2291
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002292void BuildGenericJniFrameVisitor::FinalizeHandleScope(Thread* self) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07002293 // Clear out rest of the scope.
2294 jni_call_.ResetRemainingScopeSlots();
Mathieu Chartier1432a5b2016-10-04 15:41:42 -07002295 if (!jni_call_.CriticalNative()) {
2296 // Install HandleScope.
2297 self->PushHandleScope(handle_scope_);
2298 }
Ian Rogers9758f792014-03-13 09:02:55 -07002299}
2300
Ian Rogers04c31d22014-07-07 21:44:06 -07002301#if defined(__arm__) || defined(__aarch64__)
Alex Lightd78ddec2017-04-18 15:20:38 -07002302extern "C" const void* artFindNativeMethod();
Ian Rogers04c31d22014-07-07 21:44:06 -07002303#else
Alex Lightd78ddec2017-04-18 15:20:38 -07002304extern "C" const void* artFindNativeMethod(Thread* self);
Ian Rogers04c31d22014-07-07 21:44:06 -07002305#endif
Andreas Gampe90546832014-03-12 18:07:19 -07002306
Igor Murashkin06a04e02016-09-13 15:57:37 -07002307static uint64_t artQuickGenericJniEndJNIRef(Thread* self,
2308 uint32_t cookie,
2309 bool fast_native ATTRIBUTE_UNUSED,
2310 jobject l,
2311 jobject lock) {
2312 // TODO: add entrypoints for @FastNative returning objects.
Andreas Gampead615172014-04-04 16:20:13 -07002313 if (lock != nullptr) {
2314 return reinterpret_cast<uint64_t>(JniMethodEndWithReferenceSynchronized(l, cookie, lock, self));
2315 } else {
2316 return reinterpret_cast<uint64_t>(JniMethodEndWithReference(l, cookie, self));
2317 }
2318}
2319
Igor Murashkin06a04e02016-09-13 15:57:37 -07002320static void artQuickGenericJniEndJNINonRef(Thread* self,
2321 uint32_t cookie,
2322 bool fast_native,
2323 jobject lock) {
Andreas Gampead615172014-04-04 16:20:13 -07002324 if (lock != nullptr) {
2325 JniMethodEndSynchronized(cookie, lock, self);
Igor Murashkin06a04e02016-09-13 15:57:37 -07002326 // Ignore "fast_native" here because synchronized functions aren't very fast.
Andreas Gampead615172014-04-04 16:20:13 -07002327 } else {
Igor Murashkin06a04e02016-09-13 15:57:37 -07002328 if (UNLIKELY(fast_native)) {
2329 JniMethodFastEnd(cookie, self);
2330 } else {
2331 JniMethodEnd(cookie, self);
2332 }
Andreas Gampead615172014-04-04 16:20:13 -07002333 }
2334}
2335
Andreas Gampec147b002014-03-06 18:11:06 -08002336/*
2337 * Initializes an alloca region assumed to be directly below sp for a native call:
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002338 * Create a HandleScope and call stack and fill a mini stack with values to be pushed to registers.
Andreas Gampec147b002014-03-06 18:11:06 -08002339 * The final element on the stack is a pointer to the native code.
2340 *
Andreas Gampe36fea8d2014-03-10 13:37:40 -07002341 * On entry, the stack has a standard callee-save frame above sp, and an alloca below it.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002342 * We need to fix this, as the handle scope needs to go into the callee-save frame.
Andreas Gampe36fea8d2014-03-10 13:37:40 -07002343 *
Andreas Gampec147b002014-03-06 18:11:06 -08002344 * The return of this function denotes:
2345 * 1) How many bytes of the alloca can be released, if the value is non-negative.
2346 * 2) An error, if the value is negative.
2347 */
Mathieu Chartiere401d142015-04-22 13:56:20 -07002348extern "C" TwoWordReturn artQuickGenericJniTrampoline(Thread* self, ArtMethod** sp)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002349 REQUIRES_SHARED(Locks::mutator_lock_) {
Vladimir Markob0a6aee2017-10-27 10:34:04 +01002350 // Note: We cannot walk the stack properly until fixed up below.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002351 ArtMethod* called = *sp;
David Sehr709b0702016-10-13 09:12:37 -07002352 DCHECK(called->IsNative()) << called->PrettyMethod(true);
Vladimir Marko2196c652017-11-30 16:16:07 +00002353 Runtime* runtime = Runtime::Current();
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07002354 uint32_t shorty_len = 0;
2355 const char* shorty = called->GetShorty(&shorty_len);
Vladimir Markob0a6aee2017-10-27 10:34:04 +01002356 bool critical_native = called->IsCriticalNative();
2357 bool fast_native = called->IsFastNative();
Igor Murashkin06a04e02016-09-13 15:57:37 -07002358 bool normal_native = !critical_native && !fast_native;
Andreas Gampec200a4a2014-06-16 18:39:09 -07002359
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002360 // Run the visitor and update sp.
Igor Murashkin06a04e02016-09-13 15:57:37 -07002361 BuildGenericJniFrameVisitor visitor(self,
2362 called->IsStatic(),
2363 critical_native,
2364 shorty,
2365 shorty_len,
2366 &sp);
Mathieu Chartierbe08cf52016-09-13 13:41:24 -07002367 {
2368 ScopedAssertNoThreadSuspension sants(__FUNCTION__);
2369 visitor.VisitArguments();
2370 // FinalizeHandleScope pushes the handle scope on the thread.
2371 visitor.FinalizeHandleScope(self);
2372 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002373
Vladimir Markob0a6aee2017-10-27 10:34:04 +01002374 // Fix up managed-stack things in Thread. After this we can walk the stack.
Vladimir Marko2196c652017-11-30 16:16:07 +00002375 self->SetTopOfStackTagged(sp);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002376
Ian Rogerse0dcd462014-03-08 15:21:04 -08002377 self->VerifyStack();
2378
Vladimir Markof8655b32018-03-21 17:53:56 +00002379 // We can now walk the stack if needed by JIT GC from MethodEntered() for JIT-on-first-use.
2380 jit::Jit* jit = runtime->GetJit();
2381 if (jit != nullptr) {
2382 jit->MethodEntered(self, called);
2383 }
2384
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002385 uint32_t cookie;
Igor Murashkin06a04e02016-09-13 15:57:37 -07002386 uint32_t* sp32;
2387 // Skip calling JniMethodStart for @CriticalNative.
2388 if (LIKELY(!critical_native)) {
2389 // Start JNI, save the cookie.
2390 if (called->IsSynchronized()) {
2391 DCHECK(normal_native) << " @FastNative and synchronize is not supported";
2392 cookie = JniMethodStartSynchronized(visitor.GetFirstHandleScopeJObject(), self);
2393 if (self->IsExceptionPending()) {
2394 self->PopHandleScope();
2395 // A negative value denotes an error.
2396 return GetTwoWordFailureValue();
2397 }
2398 } else {
2399 if (fast_native) {
2400 cookie = JniMethodFastStart(self);
2401 } else {
2402 DCHECK(normal_native);
2403 cookie = JniMethodStart(self);
2404 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002405 }
Igor Murashkin06a04e02016-09-13 15:57:37 -07002406 sp32 = reinterpret_cast<uint32_t*>(sp);
2407 *(sp32 - 1) = cookie;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002408 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002409
Andreas Gampe90546832014-03-12 18:07:19 -07002410 // Retrieve the stored native code.
Alex Lightd78ddec2017-04-18 15:20:38 -07002411 void const* nativeCode = called->GetEntryPointFromJni();
Andreas Gampe90546832014-03-12 18:07:19 -07002412
Andreas Gampe9a6a99a2014-03-14 07:52:20 -07002413 // There are two cases for the content of nativeCode:
2414 // 1) Pointer to the native function.
2415 // 2) Pointer to the trampoline for native code binding.
2416 // In the second case, we need to execute the binding and continue with the actual native function
2417 // pointer.
Andreas Gampe90546832014-03-12 18:07:19 -07002418 DCHECK(nativeCode != nullptr);
2419 if (nativeCode == GetJniDlsymLookupStub()) {
Ian Rogers04c31d22014-07-07 21:44:06 -07002420#if defined(__arm__) || defined(__aarch64__)
Andreas Gampe90546832014-03-12 18:07:19 -07002421 nativeCode = artFindNativeMethod();
Ian Rogers04c31d22014-07-07 21:44:06 -07002422#else
2423 nativeCode = artFindNativeMethod(self);
2424#endif
Andreas Gampe90546832014-03-12 18:07:19 -07002425
2426 if (nativeCode == nullptr) {
2427 DCHECK(self->IsExceptionPending()); // There should be an exception pending now.
Andreas Gampead615172014-04-04 16:20:13 -07002428
Igor Murashkin06a04e02016-09-13 15:57:37 -07002429 // @CriticalNative calls do not need to call back into JniMethodEnd.
2430 if (LIKELY(!critical_native)) {
2431 // End JNI, as the assembly will move to deliver the exception.
2432 jobject lock = called->IsSynchronized() ? visitor.GetFirstHandleScopeJObject() : nullptr;
2433 if (shorty[0] == 'L') {
2434 artQuickGenericJniEndJNIRef(self, cookie, fast_native, nullptr, lock);
2435 } else {
2436 artQuickGenericJniEndJNINonRef(self, cookie, fast_native, lock);
2437 }
Andreas Gampead615172014-04-04 16:20:13 -07002438 }
2439
Andreas Gampec200a4a2014-06-16 18:39:09 -07002440 return GetTwoWordFailureValue();
Andreas Gampe90546832014-03-12 18:07:19 -07002441 }
2442 // Note that the native code pointer will be automatically set by artFindNativeMethod().
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002443 }
2444
Alexey Frunze1b8464d2016-11-12 17:22:05 -08002445#if defined(__mips__) && !defined(__LP64__)
2446 // On MIPS32 if the first two arguments are floating-point, we need to know their types
2447 // so that art_quick_generic_jni_trampoline can correctly extract them from the stack
2448 // and load into floating-point registers.
2449 // Possible arrangements of first two floating-point arguments on the stack (32-bit FPU
2450 // view):
2451 // (1)
2452 // | DOUBLE | DOUBLE | other args, if any
2453 // | F12 | F13 | F14 | F15 |
2454 // | SP+0 | SP+4 | SP+8 | SP+12 | SP+16
2455 // (2)
2456 // | DOUBLE | FLOAT | (PAD) | other args, if any
2457 // | F12 | F13 | F14 | |
2458 // | SP+0 | SP+4 | SP+8 | SP+12 | SP+16
2459 // (3)
2460 // | FLOAT | (PAD) | DOUBLE | other args, if any
2461 // | F12 | | F14 | F15 |
2462 // | SP+0 | SP+4 | SP+8 | SP+12 | SP+16
2463 // (4)
2464 // | FLOAT | FLOAT | other args, if any
2465 // | F12 | F14 |
2466 // | SP+0 | SP+4 | SP+8
2467 // As you can see, only the last case (4) is special. In all others we can just
2468 // load F12/F13 and F14/F15 in the same manner.
2469 // Set bit 0 of the native code address to 1 in this case (valid code addresses
2470 // are always a multiple of 4 on MIPS32, so we have 2 spare bits available).
2471 if (nativeCode != nullptr &&
2472 shorty != nullptr &&
2473 shorty_len >= 3 &&
2474 shorty[1] == 'F' &&
2475 shorty[2] == 'F') {
2476 nativeCode = reinterpret_cast<void*>(reinterpret_cast<uintptr_t>(nativeCode) | 1);
2477 }
2478#endif
2479
Andreas Gampec200a4a2014-06-16 18:39:09 -07002480 // Return native code addr(lo) and bottom of alloca address(hi).
2481 return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(visitor.GetBottomOfUsedArea()),
2482 reinterpret_cast<uintptr_t>(nativeCode));
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002483}
2484
Hiroshi Yamauchia23b4682015-09-28 17:47:32 -07002485// Defined in quick_jni_entrypoints.cc.
2486extern uint64_t GenericJniMethodEnd(Thread* self, uint32_t saved_local_ref_cookie,
2487 jvalue result, uint64_t result_f, ArtMethod* called,
2488 HandleScope* handle_scope);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002489/*
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002490 * Is called after the native JNI code. Responsible for cleanup (handle scope, saved state) and
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002491 * unlocking.
2492 */
Hiroshi Yamauchia23b4682015-09-28 17:47:32 -07002493extern "C" uint64_t artQuickGenericJniEndTrampoline(Thread* self,
2494 jvalue result,
2495 uint64_t result_f) {
2496 // We're here just back from a native call. We don't have the shared mutator lock at this point
2497 // yet until we call GoToRunnable() later in GenericJniMethodEnd(). Accessing objects or doing
2498 // anything that requires a mutator lock before that would cause problems as GC may have the
2499 // exclusive mutator lock and may be moving objects, etc.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002500 ArtMethod** sp = self->GetManagedStack()->GetTopQuickFrame();
Vladimir Marko2196c652017-11-30 16:16:07 +00002501 DCHECK(self->GetManagedStack()->GetTopQuickFrameTag());
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002502 uint32_t* sp32 = reinterpret_cast<uint32_t*>(sp);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002503 ArtMethod* called = *sp;
Ian Rogerse0dcd462014-03-08 15:21:04 -08002504 uint32_t cookie = *(sp32 - 1);
Hiroshi Yamauchia23b4682015-09-28 17:47:32 -07002505 HandleScope* table = reinterpret_cast<HandleScope*>(reinterpret_cast<uint8_t*>(sp) + sizeof(*sp));
2506 return GenericJniMethodEnd(self, cookie, result, result_f, called, table);
Andreas Gampe2da88232014-02-27 12:26:20 -08002507}
2508
Andreas Gamped58342c2014-06-05 14:18:08 -07002509// We use TwoWordReturn to optimize scalar returns. We use the hi value for code, and the lo value
2510// for the method pointer.
Andreas Gampe51f76352014-05-21 08:28:48 -07002511//
Andreas Gamped58342c2014-06-05 14:18:08 -07002512// It is valid to use this, as at the usage points here (returns from C functions) we are assuming
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002513// to hold the mutator lock (see REQUIRES_SHARED(Locks::mutator_lock_) annotations).
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002514
Vladimir Markof79aa7f2017-07-04 16:58:55 +01002515template <InvokeType type, bool access_check>
Mathieu Chartieref41db72016-10-25 15:08:01 -07002516static TwoWordReturn artInvokeCommon(uint32_t method_idx,
2517 ObjPtr<mirror::Object> this_object,
2518 Thread* self,
Mathieu Chartiere401d142015-04-22 13:56:20 -07002519 ArtMethod** sp) {
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002520 ScopedQuickEntrypointChecks sqec(self);
Andreas Gampe8228cdf2017-05-30 15:03:54 -07002521 DCHECK_EQ(*sp, Runtime::Current()->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs));
Mathieu Chartiere401d142015-04-22 13:56:20 -07002522 ArtMethod* caller_method = QuickArgumentVisitor::GetCallingMethod(sp);
Vladimir Markof79aa7f2017-07-04 16:58:55 +01002523 ArtMethod* method = FindMethodFast<type, access_check>(method_idx, this_object, caller_method);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002524 if (UNLIKELY(method == nullptr)) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002525 const DexFile* dex_file = caller_method->GetDeclaringClass()->GetDexCache()->GetDexFile();
2526 uint32_t shorty_len;
Andreas Gampec200a4a2014-06-16 18:39:09 -07002527 const char* shorty = dex_file->GetMethodShorty(dex_file->GetMethodId(method_idx), &shorty_len);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002528 {
2529 // Remember the args in case a GC happens in FindMethodFromCode.
2530 ScopedObjectAccessUnchecked soa(self->GetJniEnv());
2531 RememberForGcArgumentVisitor visitor(sp, type == kStatic, shorty, shorty_len, &soa);
2532 visitor.VisitArguments();
Mathieu Chartieref41db72016-10-25 15:08:01 -07002533 method = FindMethodFromCode<type, access_check>(method_idx,
2534 &this_object,
2535 caller_method,
Mathieu Chartier0cd81352014-05-22 16:48:55 -07002536 self);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002537 visitor.FixupReferences();
2538 }
2539
Ian Rogerse0a02da2014-12-02 14:10:53 -08002540 if (UNLIKELY(method == nullptr)) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002541 CHECK(self->IsExceptionPending());
Andreas Gamped58342c2014-06-05 14:18:08 -07002542 return GetTwoWordFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002543 }
2544 }
2545 DCHECK(!self->IsExceptionPending());
2546 const void* code = method->GetEntryPointFromQuickCompiledCode();
2547
2548 // When we return, the caller will branch to this address, so it had better not be 0!
David Sehr709b0702016-10-13 09:12:37 -07002549 DCHECK(code != nullptr) << "Code was null in method: " << method->PrettyMethod()
Andreas Gampec200a4a2014-06-16 18:39:09 -07002550 << " location: "
2551 << method->GetDexFile()->GetLocation();
Andreas Gampe51f76352014-05-21 08:28:48 -07002552
Andreas Gamped58342c2014-06-05 14:18:08 -07002553 return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(code),
2554 reinterpret_cast<uintptr_t>(method));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002555}
2556
Nicolas Geoffray8689a0a2014-04-04 09:26:24 +01002557// Explicit artInvokeCommon template function declarations to please analysis tool.
2558#define EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(type, access_check) \
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002559 template REQUIRES_SHARED(Locks::mutator_lock_) \
Mathieu Chartiere401d142015-04-22 13:56:20 -07002560 TwoWordReturn artInvokeCommon<type, access_check>( \
Mathieu Chartieref41db72016-10-25 15:08:01 -07002561 uint32_t method_idx, ObjPtr<mirror::Object> his_object, Thread* self, ArtMethod** sp)
Nicolas Geoffray8689a0a2014-04-04 09:26:24 +01002562
2563EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kVirtual, false);
2564EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kVirtual, true);
2565EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kInterface, false);
2566EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kInterface, true);
2567EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kDirect, false);
2568EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kDirect, true);
2569EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kStatic, false);
2570EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kStatic, true);
2571EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kSuper, false);
2572EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kSuper, true);
2573#undef EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL
2574
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002575// See comments in runtime_support_asm.S
Andreas Gampec200a4a2014-06-16 18:39:09 -07002576extern "C" TwoWordReturn artInvokeInterfaceTrampolineWithAccessCheck(
Mathieu Chartiere401d142015-04-22 13:56:20 -07002577 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002578 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01002579 return artInvokeCommon<kInterface, true>(method_idx, this_object, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002580}
2581
Andreas Gampec200a4a2014-06-16 18:39:09 -07002582extern "C" TwoWordReturn artInvokeDirectTrampolineWithAccessCheck(
Mathieu Chartiere401d142015-04-22 13:56:20 -07002583 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002584 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01002585 return artInvokeCommon<kDirect, true>(method_idx, this_object, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002586}
2587
Andreas Gampec200a4a2014-06-16 18:39:09 -07002588extern "C" TwoWordReturn artInvokeStaticTrampolineWithAccessCheck(
Mathieu Chartieref41db72016-10-25 15:08:01 -07002589 uint32_t method_idx,
2590 mirror::Object* this_object ATTRIBUTE_UNUSED,
2591 Thread* self,
2592 ArtMethod** sp) REQUIRES_SHARED(Locks::mutator_lock_) {
2593 // For static, this_object is not required and may be random garbage. Don't pass it down so that
2594 // it doesn't cause ObjPtr alignment failure check.
2595 return artInvokeCommon<kStatic, true>(method_idx, nullptr, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002596}
2597
Andreas Gampec200a4a2014-06-16 18:39:09 -07002598extern "C" TwoWordReturn artInvokeSuperTrampolineWithAccessCheck(
Mathieu Chartiere401d142015-04-22 13:56:20 -07002599 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002600 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01002601 return artInvokeCommon<kSuper, true>(method_idx, this_object, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002602}
2603
Andreas Gampec200a4a2014-06-16 18:39:09 -07002604extern "C" TwoWordReturn artInvokeVirtualTrampolineWithAccessCheck(
Mathieu Chartiere401d142015-04-22 13:56:20 -07002605 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002606 REQUIRES_SHARED(Locks::mutator_lock_) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01002607 return artInvokeCommon<kVirtual, true>(method_idx, this_object, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002608}
2609
Vladimir Marko07bfbac2017-07-06 14:55:02 +01002610// Helper function for art_quick_imt_conflict_trampoline to look up the interface method.
2611extern "C" ArtMethod* artLookupResolvedMethod(uint32_t method_index, ArtMethod* referrer)
2612 REQUIRES_SHARED(Locks::mutator_lock_) {
2613 ScopedAssertNoThreadSuspension ants(__FUNCTION__);
2614 DCHECK(!referrer->IsProxyMethod());
2615 ArtMethod* result = Runtime::Current()->GetClassLinker()->LookupResolvedMethod(
2616 method_index, referrer->GetDexCache(), referrer->GetClassLoader());
2617 DCHECK(result == nullptr ||
2618 result->GetDeclaringClass()->IsInterface() ||
2619 result->GetDeclaringClass() ==
2620 WellKnownClasses::ToClass(WellKnownClasses::java_lang_Object))
2621 << result->PrettyMethod();
2622 return result;
2623}
2624
Jeff Hao5667f562017-02-27 19:32:01 -08002625// Determine target of interface dispatch. The interface method and this object are known non-null.
2626// The interface method is the method returned by the dex cache in the conflict trampoline.
2627extern "C" TwoWordReturn artInvokeInterfaceTrampoline(ArtMethod* interface_method,
Mathieu Chartieref41db72016-10-25 15:08:01 -07002628 mirror::Object* raw_this_object,
Nicolas Geoffray796d6302016-03-13 22:22:31 +00002629 Thread* self,
2630 ArtMethod** sp)
Andreas Gampebdf7f1c2016-08-30 16:38:47 -07002631 REQUIRES_SHARED(Locks::mutator_lock_) {
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002632 ScopedQuickEntrypointChecks sqec(self);
Vladimir Marko302f69c2017-07-25 15:27:15 +01002633 StackHandleScope<2> hs(self);
2634 Handle<mirror::Object> this_object = hs.NewHandle(raw_this_object);
2635 Handle<mirror::Class> cls = hs.NewHandle(this_object->GetClass());
Nicolas Geoffray796d6302016-03-13 22:22:31 +00002636
Nicolas Geoffray5bf7bac2016-07-06 14:18:23 +00002637 ArtMethod* caller_method = QuickArgumentVisitor::GetCallingMethod(sp);
Nicolas Geoffray796d6302016-03-13 22:22:31 +00002638 ArtMethod* method = nullptr;
Andreas Gampe542451c2016-07-26 09:02:02 -07002639 ImTable* imt = cls->GetImt(kRuntimePointerSize);
Nicolas Geoffray796d6302016-03-13 22:22:31 +00002640
Vladimir Marko302f69c2017-07-25 15:27:15 +01002641 if (UNLIKELY(interface_method == nullptr)) {
Vladimir Marko07bfbac2017-07-06 14:55:02 +01002642 // The interface method is unresolved, so resolve it in the dex file of the caller.
Jeff Hao5667f562017-02-27 19:32:01 -08002643 // Fetch the dex_method_idx of the target interface method from the caller.
2644 uint32_t dex_method_idx;
2645 uint32_t dex_pc = QuickArgumentVisitor::GetCallingDexPc(sp);
Mathieu Chartier808c7a52017-12-15 11:19:33 -08002646 const Instruction& instr = caller_method->DexInstructions().InstructionAt(dex_pc);
Vladimir Markod7559b72017-09-28 13:50:37 +01002647 Instruction::Code instr_code = instr.Opcode();
Jeff Hao5667f562017-02-27 19:32:01 -08002648 DCHECK(instr_code == Instruction::INVOKE_INTERFACE ||
2649 instr_code == Instruction::INVOKE_INTERFACE_RANGE)
Vladimir Markod7559b72017-09-28 13:50:37 +01002650 << "Unexpected call into interface trampoline: " << instr.DumpString(nullptr);
Jeff Hao5667f562017-02-27 19:32:01 -08002651 if (instr_code == Instruction::INVOKE_INTERFACE) {
Vladimir Markod7559b72017-09-28 13:50:37 +01002652 dex_method_idx = instr.VRegB_35c();
Jeff Hao5667f562017-02-27 19:32:01 -08002653 } else {
2654 DCHECK_EQ(instr_code, Instruction::INVOKE_INTERFACE_RANGE);
Vladimir Markod7559b72017-09-28 13:50:37 +01002655 dex_method_idx = instr.VRegB_3rc();
Jeff Hao5667f562017-02-27 19:32:01 -08002656 }
2657
Vladimir Marko302f69c2017-07-25 15:27:15 +01002658 const DexFile& dex_file = caller_method->GetDeclaringClass()->GetDexFile();
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002659 uint32_t shorty_len;
Vladimir Marko302f69c2017-07-25 15:27:15 +01002660 const char* shorty = dex_file.GetMethodShorty(dex_file.GetMethodId(dex_method_idx),
2661 &shorty_len);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002662 {
Vladimir Marko302f69c2017-07-25 15:27:15 +01002663 // Remember the args in case a GC happens in ClassLinker::ResolveMethod().
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002664 ScopedObjectAccessUnchecked soa(self->GetJniEnv());
2665 RememberForGcArgumentVisitor visitor(sp, false, shorty, shorty_len, &soa);
2666 visitor.VisitArguments();
Vladimir Marko302f69c2017-07-25 15:27:15 +01002667 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
2668 interface_method = class_linker->ResolveMethod<ClassLinker::ResolveMode::kNoChecks>(
2669 self, dex_method_idx, caller_method, kInterface);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002670 visitor.FixupReferences();
2671 }
2672
Vladimir Marko302f69c2017-07-25 15:27:15 +01002673 if (UNLIKELY(interface_method == nullptr)) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002674 CHECK(self->IsExceptionPending());
Andreas Gamped58342c2014-06-05 14:18:08 -07002675 return GetTwoWordFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002676 }
Vladimir Marko302f69c2017-07-25 15:27:15 +01002677 }
2678
2679 DCHECK(!interface_method->IsRuntimeMethod());
2680 // Look whether we have a match in the ImtConflictTable.
2681 uint32_t imt_index = ImTable::GetImtIndex(interface_method);
2682 ArtMethod* conflict_method = imt->Get(imt_index, kRuntimePointerSize);
2683 if (LIKELY(conflict_method->IsRuntimeMethod())) {
2684 ImtConflictTable* current_table = conflict_method->GetImtConflictTable(kRuntimePointerSize);
2685 DCHECK(current_table != nullptr);
2686 method = current_table->Lookup(interface_method, kRuntimePointerSize);
2687 } else {
2688 // It seems we aren't really a conflict method!
2689 if (kIsDebugBuild) {
2690 ArtMethod* m = cls->FindVirtualMethodForInterface(interface_method, kRuntimePointerSize);
2691 CHECK_EQ(conflict_method, m)
2692 << interface_method->PrettyMethod() << " / " << conflict_method->PrettyMethod() << " / "
2693 << " / " << ArtMethod::PrettyMethod(m) << " / " << cls->PrettyClass();
2694 }
2695 method = conflict_method;
2696 }
2697 if (method != nullptr) {
2698 return GetTwoWordSuccessValue(
2699 reinterpret_cast<uintptr_t>(method->GetEntryPointFromQuickCompiledCode()),
2700 reinterpret_cast<uintptr_t>(method));
2701 }
2702
2703 // No match, use the IfTable.
2704 method = cls->FindVirtualMethodForInterface(interface_method, kRuntimePointerSize);
2705 if (UNLIKELY(method == nullptr)) {
2706 ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(
2707 interface_method, this_object.Get(), caller_method);
2708 return GetTwoWordFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002709 }
Nicolas Geoffray796d6302016-03-13 22:22:31 +00002710
2711 // We arrive here if we have found an implementation, and it is not in the ImtConflictTable.
2712 // We create a new table with the new pair { interface_method, method }.
Vladimir Marko302f69c2017-07-25 15:27:15 +01002713 DCHECK(conflict_method->IsRuntimeMethod());
2714 ArtMethod* new_conflict_method = Runtime::Current()->GetClassLinker()->AddMethodToConflictTable(
2715 cls.Get(),
2716 conflict_method,
2717 interface_method,
2718 method,
2719 /*force_new_conflict_method*/false);
2720 if (new_conflict_method != conflict_method) {
2721 // Update the IMT if we create a new conflict method. No fence needed here, as the
2722 // data is consistent.
2723 imt->Set(imt_index,
2724 new_conflict_method,
2725 kRuntimePointerSize);
Nicolas Geoffray796d6302016-03-13 22:22:31 +00002726 }
2727
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002728 const void* code = method->GetEntryPointFromQuickCompiledCode();
2729
2730 // When we return, the caller will branch to this address, so it had better not be 0!
David Sehr709b0702016-10-13 09:12:37 -07002731 DCHECK(code != nullptr) << "Code was null in method: " << method->PrettyMethod()
Andreas Gampec200a4a2014-06-16 18:39:09 -07002732 << " location: " << method->GetDexFile()->GetLocation();
Andreas Gampe51f76352014-05-21 08:28:48 -07002733
Andreas Gamped58342c2014-06-05 14:18:08 -07002734 return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(code),
2735 reinterpret_cast<uintptr_t>(method));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002736}
2737
Orion Hodsonac141392017-01-13 11:53:47 +00002738// Returns shorty type so the caller can determine how to put |result|
2739// into expected registers. The shorty type is static so the compiler
2740// could call different flavors of this code path depending on the
2741// shorty type though this would require different entry points for
2742// each type.
2743extern "C" uintptr_t artInvokePolymorphic(
2744 JValue* result,
Orion Hodson43f0cdb2017-10-10 14:47:32 +01002745 mirror::Object* raw_receiver,
Orion Hodsonac141392017-01-13 11:53:47 +00002746 Thread* self,
2747 ArtMethod** sp)
2748 REQUIRES_SHARED(Locks::mutator_lock_) {
2749 ScopedQuickEntrypointChecks sqec(self);
Andreas Gampe8228cdf2017-05-30 15:03:54 -07002750 DCHECK_EQ(*sp, Runtime::Current()->GetCalleeSaveMethod(CalleeSaveType::kSaveRefsAndArgs));
Orion Hodsonac141392017-01-13 11:53:47 +00002751
2752 // Start new JNI local reference state
2753 JNIEnvExt* env = self->GetJniEnv();
2754 ScopedObjectAccessUnchecked soa(env);
2755 ScopedJniEnvLocalRefState env_state(env);
2756 const char* old_cause = self->StartAssertNoThreadSuspension("Making stack arguments safe.");
2757
2758 // From the instruction, get the |callsite_shorty| and expose arguments on the stack to the GC.
2759 ArtMethod* caller_method = QuickArgumentVisitor::GetCallingMethod(sp);
2760 uint32_t dex_pc = QuickArgumentVisitor::GetCallingDexPc(sp);
Mathieu Chartier73f21d42018-01-02 14:26:50 -08002761 const Instruction& inst = caller_method->DexInstructions().InstructionAt(dex_pc);
Vladimir Markod7559b72017-09-28 13:50:37 +01002762 DCHECK(inst.Opcode() == Instruction::INVOKE_POLYMORPHIC ||
2763 inst.Opcode() == Instruction::INVOKE_POLYMORPHIC_RANGE);
Orion Hodson06d10a72018-05-14 08:53:38 +01002764 const dex::ProtoIndex proto_idx(inst.VRegH());
Vladimir Marko666ee3d2017-12-11 18:37:36 +00002765 const char* shorty = caller_method->GetDexFile()->GetShorty(proto_idx);
Orion Hodsonac141392017-01-13 11:53:47 +00002766 const size_t shorty_length = strlen(shorty);
2767 static const bool kMethodIsStatic = false; // invoke() and invokeExact() are not static.
2768 RememberForGcArgumentVisitor gc_visitor(sp, kMethodIsStatic, shorty, shorty_length, &soa);
Orion Hodsonfea84dd2017-01-16 13:52:20 +00002769 gc_visitor.VisitArguments();
Orion Hodsonac141392017-01-13 11:53:47 +00002770
Orion Hodson43f0cdb2017-10-10 14:47:32 +01002771 // Wrap raw_receiver in a Handle for safety.
2772 StackHandleScope<3> hs(self);
2773 Handle<mirror::Object> receiver_handle(hs.NewHandle(raw_receiver));
2774 raw_receiver = nullptr;
Orion Hodsonac141392017-01-13 11:53:47 +00002775 self->EndAssertNoThreadSuspension(old_cause);
2776
Orion Hodson43f0cdb2017-10-10 14:47:32 +01002777 // Resolve method.
Orion Hodsonac141392017-01-13 11:53:47 +00002778 ClassLinker* linker = Runtime::Current()->GetClassLinker();
Vladimir Markoba118822017-06-12 15:41:56 +01002779 ArtMethod* resolved_method = linker->ResolveMethod<ClassLinker::ResolveMode::kCheckICCEAndIAE>(
Vladimir Markod7559b72017-09-28 13:50:37 +01002780 self, inst.VRegB(), caller_method, kVirtual);
Orion Hodson43f0cdb2017-10-10 14:47:32 +01002781
2782 if (UNLIKELY(receiver_handle.IsNull())) {
Orion Hodsonac141392017-01-13 11:53:47 +00002783 ThrowNullPointerExceptionForMethodAccess(resolved_method, InvokeType::kVirtual);
2784 return static_cast<uintptr_t>('V');
2785 }
2786
Orion Hodsone7732be2017-10-11 14:35:20 +01002787 Handle<mirror::MethodType> method_type(
2788 hs.NewHandle(linker->ResolveMethodType(self, proto_idx, caller_method)));
2789
Orion Hodsonac141392017-01-13 11:53:47 +00002790 // This implies we couldn't resolve one or more types in this method handle.
2791 if (UNLIKELY(method_type.IsNull())) {
2792 CHECK(self->IsExceptionPending());
2793 return static_cast<uintptr_t>('V');
2794 }
2795
Vladimir Markod7559b72017-09-28 13:50:37 +01002796 DCHECK_EQ(ArtMethod::NumArgRegisters(shorty) + 1u, (uint32_t)inst.VRegA());
Orion Hodsonac141392017-01-13 11:53:47 +00002797 DCHECK_EQ(resolved_method->IsStatic(), kMethodIsStatic);
2798
2799 // Fix references before constructing the shadow frame.
2800 gc_visitor.FixupReferences();
2801
2802 // Construct shadow frame placing arguments consecutively from |first_arg|.
Vladimir Markod7559b72017-09-28 13:50:37 +01002803 const bool is_range = (inst.Opcode() == Instruction::INVOKE_POLYMORPHIC_RANGE);
2804 const size_t num_vregs = is_range ? inst.VRegA_4rcc() : inst.VRegA_45cc();
Orion Hodsonac141392017-01-13 11:53:47 +00002805 const size_t first_arg = 0;
2806 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
2807 CREATE_SHADOW_FRAME(num_vregs, /* link */ nullptr, resolved_method, dex_pc);
2808 ShadowFrame* shadow_frame = shadow_frame_unique_ptr.get();
2809 ScopedStackedShadowFramePusher
2810 frame_pusher(self, shadow_frame, StackedShadowFrameType::kShadowFrameUnderConstruction);
2811 BuildQuickShadowFrameVisitor shadow_frame_builder(sp,
2812 kMethodIsStatic,
2813 shorty,
2814 strlen(shorty),
2815 shadow_frame,
2816 first_arg);
2817 shadow_frame_builder.VisitArguments();
2818
2819 // Push a transition back into managed code onto the linked list in thread.
2820 ManagedStack fragment;
2821 self->PushManagedStackFragment(&fragment);
2822
2823 // Call DoInvokePolymorphic with |is_range| = true, as shadow frame has argument registers in
2824 // consecutive order.
Orion Hodson960d4f72017-11-10 15:32:38 +00002825 RangeInstructionOperands operands(first_arg + 1, num_vregs - 1);
Orion Hodson537a4fe2018-05-15 13:57:58 +01002826 Intrinsics intrinsic = static_cast<Intrinsics>(resolved_method->GetIntrinsic());
Orion Hodson43f0cdb2017-10-10 14:47:32 +01002827 bool success = false;
Orion Hodson537a4fe2018-05-15 13:57:58 +01002828 if (resolved_method->GetDeclaringClass() == mirror::MethodHandle::StaticClass()) {
2829 Handle<mirror::MethodHandle> method_handle(hs.NewHandle(
2830 ObjPtr<mirror::MethodHandle>::DownCast(MakeObjPtr(receiver_handle.Get()))));
2831 if (intrinsic == Intrinsics::kMethodHandleInvokeExact) {
2832 success = MethodHandleInvokeExact(self,
2833 *shadow_frame,
2834 method_handle,
2835 method_type,
2836 &operands,
2837 result);
2838 } else {
2839 DCHECK_EQ(static_cast<uint32_t>(intrinsic),
2840 static_cast<uint32_t>(Intrinsics::kMethodHandleInvoke));
2841 success = MethodHandleInvoke(self,
2842 *shadow_frame,
2843 method_handle,
2844 method_type,
2845 &operands,
2846 result);
2847 }
2848 } else {
2849 DCHECK_EQ(mirror::VarHandle::StaticClass(), resolved_method->GetDeclaringClass());
2850 Handle<mirror::VarHandle> var_handle(hs.NewHandle(
2851 ObjPtr<mirror::VarHandle>::DownCast(MakeObjPtr(receiver_handle.Get()))));
2852 mirror::VarHandle::AccessMode access_mode =
2853 mirror::VarHandle::GetAccessModeByIntrinsic(intrinsic);
2854 success = VarHandleInvokeAccessor(self,
Orion Hodson960d4f72017-11-10 15:32:38 +00002855 *shadow_frame,
Orion Hodson537a4fe2018-05-15 13:57:58 +01002856 var_handle,
Orion Hodson960d4f72017-11-10 15:32:38 +00002857 method_type,
Orion Hodson537a4fe2018-05-15 13:57:58 +01002858 access_mode,
Orion Hodson960d4f72017-11-10 15:32:38 +00002859 &operands,
2860 result);
Orion Hodsonac141392017-01-13 11:53:47 +00002861 }
Orion Hodson537a4fe2018-05-15 13:57:58 +01002862
Orion Hodson43f0cdb2017-10-10 14:47:32 +01002863 DCHECK(success || self->IsExceptionPending());
Orion Hodsonac141392017-01-13 11:53:47 +00002864
2865 // Pop transition record.
2866 self->PopManagedStackFragment(fragment);
2867
2868 return static_cast<uintptr_t>(shorty[0]);
2869}
2870
Ian Rogers848871b2013-08-05 10:56:33 -07002871} // namespace art