blob: 638fdb4f46cd9215a11f160b55ee7ddd2acf96e1 [file] [log] [blame]
Ian Rogers848871b2013-08-05 10:56:33 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Mathieu Chartiere401d142015-04-22 13:56:20 -070017#include "art_method-inl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070018#include "callee_save_frame.h"
Dragos Sbirleabd136a22013-08-13 18:07:04 -070019#include "common_throws.h"
Ian Rogers848871b2013-08-05 10:56:33 -070020#include "dex_file-inl.h"
21#include "dex_instruction-inl.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -070022#include "entrypoints/entrypoint_utils-inl.h"
Ian Rogers6f3dbba2014-10-14 17:41:57 -070023#include "entrypoints/runtime_asm_entrypoints.h"
Ian Rogers83883d72013-10-21 21:07:24 -070024#include "gc/accounting/card_table-inl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070025#include "interpreter/interpreter.h"
Ian Rogerse0a02da2014-12-02 14:10:53 -080026#include "method_reference.h"
Ian Rogers848871b2013-08-05 10:56:33 -070027#include "mirror/class-inl.h"
Mathieu Chartier5f3ded42014-04-03 15:25:30 -070028#include "mirror/dex_cache-inl.h"
Mathieu Chartierfc58af42015-04-16 18:00:39 -070029#include "mirror/method.h"
Ian Rogers848871b2013-08-05 10:56:33 -070030#include "mirror/object-inl.h"
31#include "mirror/object_array-inl.h"
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +010032#include "oat_quick_method_header.h"
Andreas Gampe639bdd12015-06-03 11:22:45 -070033#include "quick_exception_handler.h"
Ian Rogers848871b2013-08-05 10:56:33 -070034#include "runtime.h"
Ian Rogers53b8b092014-03-13 23:45:53 -070035#include "scoped_thread_state_change.h"
Andreas Gampeb3025922015-09-01 14:45:00 -070036#include "stack.h"
Daniel Mihalyieb076692014-08-22 17:33:31 +020037#include "debugger.h"
Ian Rogers848871b2013-08-05 10:56:33 -070038
39namespace art {
40
41// Visits the arguments as saved to the stack by a Runtime::kRefAndArgs callee save frame.
42class QuickArgumentVisitor {
Ian Rogers936b37f2014-02-14 00:52:24 -080043 // Number of bytes for each out register in the caller method's frame.
44 static constexpr size_t kBytesStackArgLocation = 4;
Alexei Zavjalov41c507a2014-05-15 16:02:46 +070045 // Frame size in bytes of a callee-save frame for RefsAndArgs.
46 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_FrameSize =
47 GetCalleeSaveFrameSize(kRuntimeISA, Runtime::kRefsAndArgs);
Ian Rogers848871b2013-08-05 10:56:33 -070048#if defined(__arm__)
49 // The callee save frame is pointed to by SP.
50 // | argN | |
51 // | ... | |
52 // | arg4 | |
53 // | arg3 spill | | Caller's frame
54 // | arg2 spill | |
55 // | arg1 spill | |
56 // | Method* | ---
57 // | LR |
Zheng Xu5667fdb2014-10-23 18:29:55 +080058 // | ... | 4x6 bytes callee saves
59 // | R3 |
60 // | R2 |
61 // | R1 |
62 // | S15 |
63 // | : |
64 // | S0 |
65 // | | 4x2 bytes padding
Ian Rogers848871b2013-08-05 10:56:33 -070066 // | Method* | <- sp
Mark Mendell3e6a3bf2015-01-19 14:09:22 -050067 static constexpr bool kSplitPairAcrossRegisterAndStack = kArm32QuickCodeUseSoftFloat;
Nicolas Geoffray69c15d32015-01-13 11:42:13 +000068 static constexpr bool kAlignPairRegister = !kArm32QuickCodeUseSoftFloat;
Zheng Xu5667fdb2014-10-23 18:29:55 +080069 static constexpr bool kQuickSoftFloatAbi = kArm32QuickCodeUseSoftFloat;
70 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = !kArm32QuickCodeUseSoftFloat;
Goran Jakovljevicff734982015-08-24 12:58:55 +000071 static constexpr bool kQuickSkipOddFpRegisters = false;
Zheng Xu5667fdb2014-10-23 18:29:55 +080072 static constexpr size_t kNumQuickGprArgs = 3;
73 static constexpr size_t kNumQuickFprArgs = kArm32QuickCodeUseSoftFloat ? 0 : 16;
Andreas Gampe1a5c4062015-01-15 12:10:47 -080074 static constexpr bool kGprFprLockstep = false;
Zheng Xub551fdc2014-07-25 11:49:42 +080075 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset =
76 arm::ArmCalleeSaveFpr1Offset(Runtime::kRefsAndArgs); // Offset of first FPR arg.
77 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset =
78 arm::ArmCalleeSaveGpr1Offset(Runtime::kRefsAndArgs); // Offset of first GPR arg.
79 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset =
80 arm::ArmCalleeSaveLrOffset(Runtime::kRefsAndArgs); // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -080081 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +000082 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -080083 }
Stuart Monteithb95a5342014-03-12 13:32:32 +000084#elif defined(__aarch64__)
85 // The callee save frame is pointed to by SP.
86 // | argN | |
87 // | ... | |
88 // | arg4 | |
89 // | arg3 spill | | Caller's frame
90 // | arg2 spill | |
91 // | arg1 spill | |
92 // | Method* | ---
93 // | LR |
Zheng Xub551fdc2014-07-25 11:49:42 +080094 // | X29 |
Stuart Monteithb95a5342014-03-12 13:32:32 +000095 // | : |
Serban Constantinescu9bd88b02015-04-22 16:24:46 +010096 // | X20 |
Stuart Monteithb95a5342014-03-12 13:32:32 +000097 // | X7 |
98 // | : |
99 // | X1 |
Zheng Xub551fdc2014-07-25 11:49:42 +0800100 // | D7 |
Stuart Monteithb95a5342014-03-12 13:32:32 +0000101 // | : |
102 // | D0 |
103 // | | padding
104 // | Method* | <- sp
Mark Mendell3e6a3bf2015-01-19 14:09:22 -0500105 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000106 static constexpr bool kAlignPairRegister = false;
Stuart Monteithb95a5342014-03-12 13:32:32 +0000107 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800108 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000109 static constexpr bool kQuickSkipOddFpRegisters = false;
Stuart Monteithb95a5342014-03-12 13:32:32 +0000110 static constexpr size_t kNumQuickGprArgs = 7; // 7 arguments passed in GPRs.
111 static constexpr size_t kNumQuickFprArgs = 8; // 8 arguments passed in FPRs.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800112 static constexpr bool kGprFprLockstep = false;
Zheng Xub551fdc2014-07-25 11:49:42 +0800113 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset =
114 arm64::Arm64CalleeSaveFpr1Offset(Runtime::kRefsAndArgs); // Offset of first FPR arg.
115 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset =
116 arm64::Arm64CalleeSaveGpr1Offset(Runtime::kRefsAndArgs); // Offset of first GPR arg.
117 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset =
118 arm64::Arm64CalleeSaveLrOffset(Runtime::kRefsAndArgs); // Offset of return address.
Stuart Monteithb95a5342014-03-12 13:32:32 +0000119 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000120 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Stuart Monteithb95a5342014-03-12 13:32:32 +0000121 }
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800122#elif defined(__mips__) && !defined(__LP64__)
Ian Rogers848871b2013-08-05 10:56:33 -0700123 // The callee save frame is pointed to by SP.
124 // | argN | |
125 // | ... | |
126 // | arg4 | |
127 // | arg3 spill | | Caller's frame
128 // | arg2 spill | |
129 // | arg1 spill | |
130 // | Method* | ---
131 // | RA |
132 // | ... | callee saves
133 // | A3 | arg3
134 // | A2 | arg2
135 // | A1 | arg1
Goran Jakovljevicff734982015-08-24 12:58:55 +0000136 // | F15 |
137 // | F14 | f_arg1
138 // | F13 |
139 // | F12 | f_arg0
140 // | | padding
Ian Rogers848871b2013-08-05 10:56:33 -0700141 // | A0/Method* | <- sp
Goran Jakovljevicff734982015-08-24 12:58:55 +0000142 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
143 static constexpr bool kAlignPairRegister = true;
144 static constexpr bool kQuickSoftFloatAbi = false;
Zheng Xu5667fdb2014-10-23 18:29:55 +0800145 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000146 static constexpr bool kQuickSkipOddFpRegisters = true;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800147 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs.
Goran Jakovljevicff734982015-08-24 12:58:55 +0000148 static constexpr size_t kNumQuickFprArgs = 4; // 2 arguments passed in FPRs. Floats can be passed
149 // only in even numbered registers and each double
150 // occupies two registers.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800151 static constexpr bool kGprFprLockstep = false;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000152 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 16; // Offset of first FPR arg.
153 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 32; // Offset of first GPR arg.
154 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 76; // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -0800155 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000156 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -0800157 }
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800158#elif defined(__mips__) && defined(__LP64__)
159 // The callee save frame is pointed to by SP.
160 // | argN | |
161 // | ... | |
162 // | arg4 | |
163 // | arg3 spill | | Caller's frame
164 // | arg2 spill | |
165 // | arg1 spill | |
166 // | Method* | ---
167 // | RA |
168 // | ... | callee saves
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800169 // | A7 | arg7
170 // | A6 | arg6
171 // | A5 | arg5
172 // | A4 | arg4
173 // | A3 | arg3
174 // | A2 | arg2
175 // | A1 | arg1
Goran Jakovljevicff734982015-08-24 12:58:55 +0000176 // | F19 | f_arg7
177 // | F18 | f_arg6
178 // | F17 | f_arg5
179 // | F16 | f_arg4
180 // | F15 | f_arg3
181 // | F14 | f_arg2
182 // | F13 | f_arg1
183 // | F12 | f_arg0
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800184 // | | padding
185 // | A0/Method* | <- sp
186 // NOTE: for Mip64, when A0 is skipped, F0 is also skipped.
Douglas Leungd18e0832015-02-09 15:22:26 -0800187 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800188 static constexpr bool kAlignPairRegister = false;
189 static constexpr bool kQuickSoftFloatAbi = false;
190 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000191 static constexpr bool kQuickSkipOddFpRegisters = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800192 static constexpr size_t kNumQuickGprArgs = 7; // 7 arguments passed in GPRs.
193 static constexpr size_t kNumQuickFprArgs = 7; // 7 arguments passed in FPRs.
194 static constexpr bool kGprFprLockstep = true;
195
196 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 24; // Offset of first FPR arg (F1).
197 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 80; // Offset of first GPR arg (A1).
198 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 200; // Offset of return address.
199 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
200 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
201 }
Ian Rogers848871b2013-08-05 10:56:33 -0700202#elif defined(__i386__)
203 // The callee save frame is pointed to by SP.
204 // | argN | |
205 // | ... | |
206 // | arg4 | |
207 // | arg3 spill | | Caller's frame
208 // | arg2 spill | |
209 // | arg1 spill | |
210 // | Method* | ---
211 // | Return |
212 // | EBP,ESI,EDI | callee saves
213 // | EBX | arg3
214 // | EDX | arg2
215 // | ECX | arg1
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000216 // | XMM3 | float arg 4
217 // | XMM2 | float arg 3
218 // | XMM1 | float arg 2
219 // | XMM0 | float arg 1
Ian Rogers848871b2013-08-05 10:56:33 -0700220 // | EAX/Method* | <- sp
Mark Mendell3e6a3bf2015-01-19 14:09:22 -0500221 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000222 static constexpr bool kAlignPairRegister = false;
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000223 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800224 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000225 static constexpr bool kQuickSkipOddFpRegisters = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800226 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs.
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000227 static constexpr size_t kNumQuickFprArgs = 4; // 4 arguments passed in FPRs.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800228 static constexpr bool kGprFprLockstep = false;
Mark P Mendell966c3ae2015-01-27 15:45:27 +0000229 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 4; // Offset of first FPR arg.
230 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 4 + 4*8; // Offset of first GPR arg.
231 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 28 + 4*8; // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -0800232 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000233 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -0800234 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800235#elif defined(__x86_64__)
Ian Rogers936b37f2014-02-14 00:52:24 -0800236 // The callee save frame is pointed to by SP.
237 // | argN | |
238 // | ... | |
239 // | reg. arg spills | | Caller's frame
240 // | Method* | ---
241 // | Return |
242 // | R15 | callee save
243 // | R14 | callee save
244 // | R13 | callee save
245 // | R12 | callee save
246 // | R9 | arg5
247 // | R8 | arg4
248 // | RSI/R6 | arg1
249 // | RBP/R5 | callee save
250 // | RBX/R3 | callee save
251 // | RDX/R2 | arg2
252 // | RCX/R1 | arg3
253 // | XMM7 | float arg 8
254 // | XMM6 | float arg 7
255 // | XMM5 | float arg 6
256 // | XMM4 | float arg 5
257 // | XMM3 | float arg 4
258 // | XMM2 | float arg 3
259 // | XMM1 | float arg 2
260 // | XMM0 | float arg 1
261 // | Padding |
262 // | RDI/Method* | <- sp
Mark Mendell3e6a3bf2015-01-19 14:09:22 -0500263 static constexpr bool kSplitPairAcrossRegisterAndStack = false;
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000264 static constexpr bool kAlignPairRegister = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800265 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800266 static constexpr bool kQuickDoubleRegAlignedFloatBackFilled = false;
Goran Jakovljevicff734982015-08-24 12:58:55 +0000267 static constexpr bool kQuickSkipOddFpRegisters = false;
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700268 static constexpr size_t kNumQuickGprArgs = 5; // 5 arguments passed in GPRs.
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700269 static constexpr size_t kNumQuickFprArgs = 8; // 8 arguments passed in FPRs.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800270 static constexpr bool kGprFprLockstep = false;
Ian Rogers936b37f2014-02-14 00:52:24 -0800271 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 16; // Offset of first FPR arg.
Serguei Katkovc3801912014-07-08 17:21:53 +0700272 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 80 + 4*8; // Offset of first GPR arg.
273 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 168 + 4*8; // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -0800274 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
275 switch (gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000276 case 0: return (4 * GetBytesPerGprSpillLocation(kRuntimeISA));
277 case 1: return (1 * GetBytesPerGprSpillLocation(kRuntimeISA));
278 case 2: return (0 * GetBytesPerGprSpillLocation(kRuntimeISA));
279 case 3: return (5 * GetBytesPerGprSpillLocation(kRuntimeISA));
280 case 4: return (6 * GetBytesPerGprSpillLocation(kRuntimeISA));
Ian Rogers936b37f2014-02-14 00:52:24 -0800281 default:
Andreas Gampec200a4a2014-06-16 18:39:09 -0700282 LOG(FATAL) << "Unexpected GPR index: " << gpr_index;
283 return 0;
Ian Rogers936b37f2014-02-14 00:52:24 -0800284 }
285 }
Ian Rogers848871b2013-08-05 10:56:33 -0700286#else
287#error "Unsupported architecture"
Ian Rogers848871b2013-08-05 10:56:33 -0700288#endif
289
Ian Rogers936b37f2014-02-14 00:52:24 -0800290 public:
Sebastien Hertza836bc92014-11-25 16:30:53 +0100291 // Special handling for proxy methods. Proxy methods are instance methods so the
292 // 'this' object is the 1st argument. They also have the same frame layout as the
293 // kRefAndArgs runtime method. Since 'this' is a reference, it is located in the
294 // 1st GPR.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700295 static mirror::Object* GetProxyThisObject(ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -0700296 SHARED_REQUIRES(Locks::mutator_lock_) {
Nicolas Geoffray3a090922015-11-24 09:17:30 +0000297 CHECK((*sp)->IsProxyMethod());
Sebastien Hertza836bc92014-11-25 16:30:53 +0100298 CHECK_GT(kNumQuickGprArgs, 0u);
299 constexpr uint32_t kThisGprIndex = 0u; // 'this' is in the 1st GPR.
300 size_t this_arg_offset = kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset +
301 GprIndexToGprOffset(kThisGprIndex);
302 uint8_t* this_arg_address = reinterpret_cast<uint8_t*>(sp) + this_arg_offset;
303 return reinterpret_cast<StackReference<mirror::Object>*>(this_arg_address)->AsMirrorPtr();
304 }
305
Mathieu Chartier90443472015-07-16 20:32:27 -0700306 static ArtMethod* GetCallingMethod(ArtMethod** sp) SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700307 DCHECK((*sp)->IsCalleeSaveMethod());
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +0100308 return GetCalleeSaveMethodCaller(sp, Runtime::kRefsAndArgs);
309 }
310
Mathieu Chartier90443472015-07-16 20:32:27 -0700311 static ArtMethod* GetOuterMethod(ArtMethod** sp) SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700312 DCHECK((*sp)->IsCalleeSaveMethod());
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100313 uint8_t* previous_sp =
314 reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700315 return *reinterpret_cast<ArtMethod**>(previous_sp);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100316 }
317
Mathieu Chartier90443472015-07-16 20:32:27 -0700318 static uint32_t GetCallingDexPc(ArtMethod** sp) SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700319 DCHECK((*sp)->IsCalleeSaveMethod());
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100320 const size_t callee_frame_size = GetCalleeSaveFrameSize(kRuntimeISA, Runtime::kRefsAndArgs);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700321 ArtMethod** caller_sp = reinterpret_cast<ArtMethod**>(
322 reinterpret_cast<uintptr_t>(sp) + callee_frame_size);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100323 uintptr_t outer_pc = QuickArgumentVisitor::GetCallingPc(sp);
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100324 const OatQuickMethodHeader* current_code = (*caller_sp)->GetOatQuickMethodHeader(outer_pc);
325 uintptr_t outer_pc_offset = current_code->NativeQuickPcOffset(outer_pc);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100326
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100327 if (current_code->IsOptimized()) {
328 CodeInfo code_info = current_code->GetOptimizedCodeInfo();
David Brazdilf677ebf2015-05-29 16:29:43 +0100329 StackMapEncoding encoding = code_info.ExtractEncoding();
330 StackMap stack_map = code_info.GetStackMapForNativePcOffset(outer_pc_offset, encoding);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100331 DCHECK(stack_map.IsValid());
David Brazdilf677ebf2015-05-29 16:29:43 +0100332 if (stack_map.HasInlineInfo(encoding)) {
333 InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map, encoding);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100334 return inline_info.GetDexPcAtDepth(inline_info.GetDepth() - 1);
335 } else {
David Brazdilf677ebf2015-05-29 16:29:43 +0100336 return stack_map.GetDexPc(encoding);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100337 }
338 } else {
Nicolas Geoffray524e7ea2015-10-16 17:13:34 +0100339 return current_code->ToDexPc(*caller_sp, outer_pc);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +0100340 }
Ian Rogers848871b2013-08-05 10:56:33 -0700341 }
342
Ian Rogers936b37f2014-02-14 00:52:24 -0800343 // For the given quick ref and args quick frame, return the caller's PC.
Mathieu Chartier90443472015-07-16 20:32:27 -0700344 static uintptr_t GetCallingPc(ArtMethod** sp) SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700345 DCHECK((*sp)->IsCalleeSaveMethod());
Ian Rogers13735952014-10-08 12:43:28 -0700346 uint8_t* lr = reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_LrOffset;
Ian Rogers848871b2013-08-05 10:56:33 -0700347 return *reinterpret_cast<uintptr_t*>(lr);
348 }
349
Mathieu Chartiere401d142015-04-22 13:56:20 -0700350 QuickArgumentVisitor(ArtMethod** sp, bool is_static, const char* shorty,
Mathieu Chartier90443472015-07-16 20:32:27 -0700351 uint32_t shorty_len) SHARED_REQUIRES(Locks::mutator_lock_) :
Andreas Gampec200a4a2014-06-16 18:39:09 -0700352 is_static_(is_static), shorty_(shorty), shorty_len_(shorty_len),
Ian Rogers13735952014-10-08 12:43:28 -0700353 gpr_args_(reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset),
354 fpr_args_(reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset),
355 stack_args_(reinterpret_cast<uint8_t*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize
Mathieu Chartiere401d142015-04-22 13:56:20 -0700356 + sizeof(ArtMethod*)), // Skip ArtMethod*.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800357 gpr_index_(0), fpr_index_(0), fpr_double_index_(0), stack_index_(0),
358 cur_type_(Primitive::kPrimVoid), is_split_long_or_double_(false) {
Andreas Gampe575e78c2014-11-03 23:41:03 -0800359 static_assert(kQuickSoftFloatAbi == (kNumQuickFprArgs == 0),
360 "Number of Quick FPR arguments unexpected");
361 static_assert(!(kQuickSoftFloatAbi && kQuickDoubleRegAlignedFloatBackFilled),
362 "Double alignment unexpected");
Zheng Xu5667fdb2014-10-23 18:29:55 +0800363 // For register alignment, we want to assume that counters(fpr_double_index_) are even if the
364 // next register is even.
Andreas Gampe575e78c2014-11-03 23:41:03 -0800365 static_assert(!kQuickDoubleRegAlignedFloatBackFilled || kNumQuickFprArgs % 2 == 0,
366 "Number of Quick FPR arguments not even");
Mathieu Chartiere401d142015-04-22 13:56:20 -0700367 DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), sizeof(void*));
Zheng Xu5667fdb2014-10-23 18:29:55 +0800368 }
Ian Rogers848871b2013-08-05 10:56:33 -0700369
370 virtual ~QuickArgumentVisitor() {}
371
372 virtual void Visit() = 0;
373
Ian Rogers936b37f2014-02-14 00:52:24 -0800374 Primitive::Type GetParamPrimitiveType() const {
375 return cur_type_;
Ian Rogers848871b2013-08-05 10:56:33 -0700376 }
377
Ian Rogers13735952014-10-08 12:43:28 -0700378 uint8_t* GetParamAddress() const {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800379 if (!kQuickSoftFloatAbi) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800380 Primitive::Type type = GetParamPrimitiveType();
381 if (UNLIKELY((type == Primitive::kPrimDouble) || (type == Primitive::kPrimFloat))) {
Zheng Xu5667fdb2014-10-23 18:29:55 +0800382 if (type == Primitive::kPrimDouble && kQuickDoubleRegAlignedFloatBackFilled) {
383 if (fpr_double_index_ + 2 < kNumQuickFprArgs + 1) {
384 return fpr_args_ + (fpr_double_index_ * GetBytesPerFprSpillLocation(kRuntimeISA));
385 }
386 } else if (fpr_index_ + 1 < kNumQuickFprArgs + 1) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000387 return fpr_args_ + (fpr_index_ * GetBytesPerFprSpillLocation(kRuntimeISA));
Ian Rogers936b37f2014-02-14 00:52:24 -0800388 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700389 return stack_args_ + (stack_index_ * kBytesStackArgLocation);
Ian Rogers936b37f2014-02-14 00:52:24 -0800390 }
391 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800392 if (gpr_index_ < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800393 return gpr_args_ + GprIndexToGprOffset(gpr_index_);
394 }
395 return stack_args_ + (stack_index_ * kBytesStackArgLocation);
Ian Rogers848871b2013-08-05 10:56:33 -0700396 }
397
398 bool IsSplitLongOrDouble() const {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700399 if ((GetBytesPerGprSpillLocation(kRuntimeISA) == 4) ||
400 (GetBytesPerFprSpillLocation(kRuntimeISA) == 4)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800401 return is_split_long_or_double_;
402 } else {
403 return false; // An optimization for when GPR and FPRs are 64bit.
404 }
Ian Rogers848871b2013-08-05 10:56:33 -0700405 }
406
Ian Rogers936b37f2014-02-14 00:52:24 -0800407 bool IsParamAReference() const {
Ian Rogers848871b2013-08-05 10:56:33 -0700408 return GetParamPrimitiveType() == Primitive::kPrimNot;
409 }
410
Ian Rogers936b37f2014-02-14 00:52:24 -0800411 bool IsParamALongOrDouble() const {
Ian Rogers848871b2013-08-05 10:56:33 -0700412 Primitive::Type type = GetParamPrimitiveType();
413 return type == Primitive::kPrimLong || type == Primitive::kPrimDouble;
414 }
415
416 uint64_t ReadSplitLongParam() const {
Nicolas Geoffray425f2392015-01-08 14:52:29 +0000417 // The splitted long is always available through the stack.
418 return *reinterpret_cast<uint64_t*>(stack_args_
419 + stack_index_ * kBytesStackArgLocation);
Ian Rogers848871b2013-08-05 10:56:33 -0700420 }
421
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800422 void IncGprIndex() {
423 gpr_index_++;
424 if (kGprFprLockstep) {
425 fpr_index_++;
426 }
427 }
428
429 void IncFprIndex() {
430 fpr_index_++;
431 if (kGprFprLockstep) {
432 gpr_index_++;
433 }
434 }
435
Mathieu Chartier90443472015-07-16 20:32:27 -0700436 void VisitArguments() SHARED_REQUIRES(Locks::mutator_lock_) {
Zheng Xu5667fdb2014-10-23 18:29:55 +0800437 // (a) 'stack_args_' should point to the first method's argument
438 // (b) whatever the argument type it is, the 'stack_index_' should
439 // be moved forward along with every visiting.
Ian Rogers936b37f2014-02-14 00:52:24 -0800440 gpr_index_ = 0;
441 fpr_index_ = 0;
Zheng Xu5667fdb2014-10-23 18:29:55 +0800442 if (kQuickDoubleRegAlignedFloatBackFilled) {
443 fpr_double_index_ = 0;
444 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800445 stack_index_ = 0;
446 if (!is_static_) { // Handle this.
447 cur_type_ = Primitive::kPrimNot;
448 is_split_long_or_double_ = false;
Ian Rogers848871b2013-08-05 10:56:33 -0700449 Visit();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800450 stack_index_++;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800451 if (kNumQuickGprArgs > 0) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800452 IncGprIndex();
Ian Rogers936b37f2014-02-14 00:52:24 -0800453 }
Ian Rogers848871b2013-08-05 10:56:33 -0700454 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800455 for (uint32_t shorty_index = 1; shorty_index < shorty_len_; ++shorty_index) {
456 cur_type_ = Primitive::GetType(shorty_[shorty_index]);
457 switch (cur_type_) {
458 case Primitive::kPrimNot:
459 case Primitive::kPrimBoolean:
460 case Primitive::kPrimByte:
461 case Primitive::kPrimChar:
462 case Primitive::kPrimShort:
463 case Primitive::kPrimInt:
464 is_split_long_or_double_ = false;
465 Visit();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800466 stack_index_++;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800467 if (gpr_index_ < kNumQuickGprArgs) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800468 IncGprIndex();
Ian Rogers936b37f2014-02-14 00:52:24 -0800469 }
470 break;
471 case Primitive::kPrimFloat:
472 is_split_long_or_double_ = false;
473 Visit();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800474 stack_index_++;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800475 if (kQuickSoftFloatAbi) {
476 if (gpr_index_ < kNumQuickGprArgs) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800477 IncGprIndex();
Ian Rogers936b37f2014-02-14 00:52:24 -0800478 }
479 } else {
Zheng Xu5667fdb2014-10-23 18:29:55 +0800480 if (fpr_index_ + 1 < kNumQuickFprArgs + 1) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800481 IncFprIndex();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800482 if (kQuickDoubleRegAlignedFloatBackFilled) {
483 // Double should not overlap with float.
484 // For example, if fpr_index_ = 3, fpr_double_index_ should be at least 4.
485 fpr_double_index_ = std::max(fpr_double_index_, RoundUp(fpr_index_, 2));
486 // Float should not overlap with double.
487 if (fpr_index_ % 2 == 0) {
488 fpr_index_ = std::max(fpr_double_index_, fpr_index_);
489 }
Goran Jakovljevicff734982015-08-24 12:58:55 +0000490 } else if (kQuickSkipOddFpRegisters) {
491 IncFprIndex();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800492 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800493 }
494 }
495 break;
496 case Primitive::kPrimDouble:
497 case Primitive::kPrimLong:
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800498 if (kQuickSoftFloatAbi || (cur_type_ == Primitive::kPrimLong)) {
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000499 if (cur_type_ == Primitive::kPrimLong && kAlignPairRegister && gpr_index_ == 0) {
Goran Jakovljevicff734982015-08-24 12:58:55 +0000500 // Currently, this is only for ARM and MIPS, where the first available parameter
501 // register is R1 (on ARM) or A1 (on MIPS). So we skip it, and use R2 (on ARM) or
502 // A2 (on MIPS) instead.
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800503 IncGprIndex();
Nicolas Geoffray69c15d32015-01-13 11:42:13 +0000504 }
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000505 is_split_long_or_double_ = (GetBytesPerGprSpillLocation(kRuntimeISA) == 4) &&
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800506 ((gpr_index_ + 1) == kNumQuickGprArgs);
Mark Mendell3e6a3bf2015-01-19 14:09:22 -0500507 if (!kSplitPairAcrossRegisterAndStack && is_split_long_or_double_) {
508 // We don't want to split this. Pass over this register.
509 gpr_index_++;
510 is_split_long_or_double_ = false;
511 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800512 Visit();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800513 if (kBytesStackArgLocation == 4) {
514 stack_index_+= 2;
515 } else {
516 CHECK_EQ(kBytesStackArgLocation, 8U);
517 stack_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800518 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700519 if (gpr_index_ < kNumQuickGprArgs) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800520 IncGprIndex();
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000521 if (GetBytesPerGprSpillLocation(kRuntimeISA) == 4) {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700522 if (gpr_index_ < kNumQuickGprArgs) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800523 IncGprIndex();
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700524 }
525 }
526 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800527 } else {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000528 is_split_long_or_double_ = (GetBytesPerFprSpillLocation(kRuntimeISA) == 4) &&
Zheng Xu5667fdb2014-10-23 18:29:55 +0800529 ((fpr_index_ + 1) == kNumQuickFprArgs) && !kQuickDoubleRegAlignedFloatBackFilled;
Ian Rogers936b37f2014-02-14 00:52:24 -0800530 Visit();
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700531 if (kBytesStackArgLocation == 4) {
532 stack_index_+= 2;
Ian Rogers936b37f2014-02-14 00:52:24 -0800533 } else {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700534 CHECK_EQ(kBytesStackArgLocation, 8U);
535 stack_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800536 }
Zheng Xu5667fdb2014-10-23 18:29:55 +0800537 if (kQuickDoubleRegAlignedFloatBackFilled) {
538 if (fpr_double_index_ + 2 < kNumQuickFprArgs + 1) {
539 fpr_double_index_ += 2;
540 // Float should not overlap with double.
541 if (fpr_index_ % 2 == 0) {
542 fpr_index_ = std::max(fpr_double_index_, fpr_index_);
543 }
544 }
545 } else if (fpr_index_ + 1 < kNumQuickFprArgs + 1) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800546 IncFprIndex();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800547 if (GetBytesPerFprSpillLocation(kRuntimeISA) == 4) {
548 if (fpr_index_ + 1 < kNumQuickFprArgs + 1) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -0800549 IncFprIndex();
Zheng Xu5667fdb2014-10-23 18:29:55 +0800550 }
551 }
552 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800553 }
554 break;
555 default:
556 LOG(FATAL) << "Unexpected type: " << cur_type_ << " in " << shorty_;
557 }
Ian Rogers848871b2013-08-05 10:56:33 -0700558 }
559 }
560
Andreas Gampec200a4a2014-06-16 18:39:09 -0700561 protected:
Ian Rogers848871b2013-08-05 10:56:33 -0700562 const bool is_static_;
563 const char* const shorty_;
564 const uint32_t shorty_len_;
Andreas Gampec200a4a2014-06-16 18:39:09 -0700565
566 private:
Ian Rogers13735952014-10-08 12:43:28 -0700567 uint8_t* const gpr_args_; // Address of GPR arguments in callee save frame.
568 uint8_t* const fpr_args_; // Address of FPR arguments in callee save frame.
569 uint8_t* const stack_args_; // Address of stack arguments in caller's frame.
Ian Rogers936b37f2014-02-14 00:52:24 -0800570 uint32_t gpr_index_; // Index into spilled GPRs.
Zheng Xu5667fdb2014-10-23 18:29:55 +0800571 // Index into spilled FPRs.
572 // In case kQuickDoubleRegAlignedFloatBackFilled, it may index a hole while fpr_double_index_
573 // holds a higher register number.
574 uint32_t fpr_index_;
575 // Index into spilled FPRs for aligned double.
576 // Only used when kQuickDoubleRegAlignedFloatBackFilled. Next available double register indexed in
577 // terms of singles, may be behind fpr_index.
578 uint32_t fpr_double_index_;
Ian Rogers936b37f2014-02-14 00:52:24 -0800579 uint32_t stack_index_; // Index into arguments on the stack.
580 // The current type of argument during VisitArguments.
581 Primitive::Type cur_type_;
Ian Rogers848871b2013-08-05 10:56:33 -0700582 // Does a 64bit parameter straddle the register and stack arguments?
583 bool is_split_long_or_double_;
584};
585
Sebastien Hertza836bc92014-11-25 16:30:53 +0100586// Returns the 'this' object of a proxy method. This function is only used by StackVisitor. It
587// allows to use the QuickArgumentVisitor constants without moving all the code in its own module.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700588extern "C" mirror::Object* artQuickGetProxyThisObject(ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -0700589 SHARED_REQUIRES(Locks::mutator_lock_) {
Sebastien Hertza836bc92014-11-25 16:30:53 +0100590 return QuickArgumentVisitor::GetProxyThisObject(sp);
591}
592
Ian Rogers848871b2013-08-05 10:56:33 -0700593// Visits arguments on the stack placing them into the shadow frame.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800594class BuildQuickShadowFrameVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700595 public:
Mathieu Chartiere401d142015-04-22 13:56:20 -0700596 BuildQuickShadowFrameVisitor(ArtMethod** sp, bool is_static, const char* shorty,
597 uint32_t shorty_len, ShadowFrame* sf, size_t first_arg_reg) :
Andreas Gampec200a4a2014-06-16 18:39:09 -0700598 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), sf_(sf), cur_reg_(first_arg_reg) {}
Ian Rogers848871b2013-08-05 10:56:33 -0700599
Mathieu Chartier90443472015-07-16 20:32:27 -0700600 void Visit() SHARED_REQUIRES(Locks::mutator_lock_) OVERRIDE;
Ian Rogers848871b2013-08-05 10:56:33 -0700601
602 private:
Ian Rogers936b37f2014-02-14 00:52:24 -0800603 ShadowFrame* const sf_;
604 uint32_t cur_reg_;
Ian Rogers848871b2013-08-05 10:56:33 -0700605
Dragos Sbirleabd136a22013-08-13 18:07:04 -0700606 DISALLOW_COPY_AND_ASSIGN(BuildQuickShadowFrameVisitor);
Ian Rogers848871b2013-08-05 10:56:33 -0700607};
608
Andreas Gampec200a4a2014-06-16 18:39:09 -0700609void BuildQuickShadowFrameVisitor::Visit() {
Ian Rogers9758f792014-03-13 09:02:55 -0700610 Primitive::Type type = GetParamPrimitiveType();
611 switch (type) {
612 case Primitive::kPrimLong: // Fall-through.
613 case Primitive::kPrimDouble:
614 if (IsSplitLongOrDouble()) {
615 sf_->SetVRegLong(cur_reg_, ReadSplitLongParam());
616 } else {
617 sf_->SetVRegLong(cur_reg_, *reinterpret_cast<jlong*>(GetParamAddress()));
618 }
619 ++cur_reg_;
620 break;
621 case Primitive::kPrimNot: {
622 StackReference<mirror::Object>* stack_ref =
623 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
624 sf_->SetVRegReference(cur_reg_, stack_ref->AsMirrorPtr());
625 }
626 break;
627 case Primitive::kPrimBoolean: // Fall-through.
628 case Primitive::kPrimByte: // Fall-through.
629 case Primitive::kPrimChar: // Fall-through.
630 case Primitive::kPrimShort: // Fall-through.
631 case Primitive::kPrimInt: // Fall-through.
632 case Primitive::kPrimFloat:
633 sf_->SetVReg(cur_reg_, *reinterpret_cast<jint*>(GetParamAddress()));
634 break;
635 case Primitive::kPrimVoid:
636 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -0700637 UNREACHABLE();
Ian Rogers9758f792014-03-13 09:02:55 -0700638 }
639 ++cur_reg_;
640}
641
Mathieu Chartiere401d142015-04-22 13:56:20 -0700642extern "C" uint64_t artQuickToInterpreterBridge(ArtMethod* method, Thread* self, ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -0700643 SHARED_REQUIRES(Locks::mutator_lock_) {
Ian Rogers848871b2013-08-05 10:56:33 -0700644 // Ensure we don't get thread suspension until the object arguments are safely in the shadow
645 // frame.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -0700646 ScopedQuickEntrypointChecks sqec(self);
Ian Rogers848871b2013-08-05 10:56:33 -0700647
Alex Light9139e002015-10-09 15:59:48 -0700648 if (UNLIKELY(!method->IsInvokable())) {
649 method->ThrowInvocationTimeError();
Ian Rogers848871b2013-08-05 10:56:33 -0700650 return 0;
Andreas Gampe639bdd12015-06-03 11:22:45 -0700651 }
652
653 JValue tmp_value;
654 ShadowFrame* deopt_frame = self->PopStackedShadowFrame(
655 StackedShadowFrameType::kSingleFrameDeoptimizationShadowFrame, false);
656 const DexFile::CodeItem* code_item = method->GetCodeItem();
657 DCHECK(code_item != nullptr) << PrettyMethod(method);
658 ManagedStack fragment;
659
660 DCHECK(!method->IsNative()) << PrettyMethod(method);
661 uint32_t shorty_len = 0;
662 auto* non_proxy_method = method->GetInterfaceMethodIfProxy(sizeof(void*));
663 const char* shorty = non_proxy_method->GetShorty(&shorty_len);
664
665 JValue result;
666
667 if (deopt_frame != nullptr) {
668 // Coming from single-frame deopt.
669
670 if (kIsDebugBuild) {
671 // Sanity-check: are the methods as expected? We check that the last shadow frame (the bottom
672 // of the call-stack) corresponds to the called method.
673 ShadowFrame* linked = deopt_frame;
674 while (linked->GetLink() != nullptr) {
675 linked = linked->GetLink();
676 }
677 CHECK_EQ(method, linked->GetMethod()) << PrettyMethod(method) << " "
678 << PrettyMethod(linked->GetMethod());
679 }
680
681 if (VLOG_IS_ON(deopt)) {
682 // Print out the stack to verify that it was a single-frame deopt.
683 LOG(INFO) << "Continue-ing from deopt. Stack is:";
684 QuickExceptionHandler::DumpFramesWithType(self, true);
685 }
686
687 mirror::Throwable* pending_exception = nullptr;
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100688 bool from_code = false;
689 self->PopDeoptimizationContext(&result, &pending_exception, /* out */ &from_code);
690 CHECK(from_code);
Andreas Gampe639bdd12015-06-03 11:22:45 -0700691
692 // Push a transition back into managed code onto the linked list in thread.
693 self->PushManagedStackFragment(&fragment);
694
695 // Ensure that the stack is still in order.
696 if (kIsDebugBuild) {
697 class DummyStackVisitor : public StackVisitor {
698 public:
699 explicit DummyStackVisitor(Thread* self_in) SHARED_REQUIRES(Locks::mutator_lock_)
700 : StackVisitor(self_in, nullptr, StackVisitor::StackWalkKind::kIncludeInlinedFrames) {}
701
702 bool VisitFrame() OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_) {
703 // Nothing to do here. In a debug build, SanityCheckFrame will do the work in the walking
704 // logic. Just always say we want to continue.
705 return true;
706 }
707 };
708 DummyStackVisitor dsv(self);
709 dsv.WalkStack();
710 }
711
712 // Restore the exception that was pending before deoptimization then interpret the
713 // deoptimized frames.
714 if (pending_exception != nullptr) {
715 self->SetException(pending_exception);
716 }
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100717 interpreter::EnterInterpreterFromDeoptimize(self, deopt_frame, from_code, &result);
Ian Rogers848871b2013-08-05 10:56:33 -0700718 } else {
Andreas Gampec200a4a2014-06-16 18:39:09 -0700719 const char* old_cause = self->StartAssertNoThreadSuspension(
720 "Building interpreter shadow frame");
Ian Rogers848871b2013-08-05 10:56:33 -0700721 uint16_t num_regs = code_item->registers_size_;
Andreas Gampec200a4a2014-06-16 18:39:09 -0700722 // No last shadow coming from quick.
Andreas Gampeb3025922015-09-01 14:45:00 -0700723 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
Andreas Gampe03ec9302015-08-27 17:41:47 -0700724 CREATE_SHADOW_FRAME(num_regs, /* link */ nullptr, method, /* dex pc */ 0);
Andreas Gampeb3025922015-09-01 14:45:00 -0700725 ShadowFrame* shadow_frame = shadow_frame_unique_ptr.get();
Ian Rogers848871b2013-08-05 10:56:33 -0700726 size_t first_arg_reg = code_item->registers_size_ - code_item->ins_size_;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700727 BuildQuickShadowFrameVisitor shadow_frame_builder(sp, method->IsStatic(), shorty, shorty_len,
Ian Rogers936b37f2014-02-14 00:52:24 -0800728 shadow_frame, first_arg_reg);
Ian Rogers848871b2013-08-05 10:56:33 -0700729 shadow_frame_builder.VisitArguments();
Ian Rogerse94652f2014-12-02 11:13:19 -0800730 const bool needs_initialization =
731 method->IsStatic() && !method->GetDeclaringClass()->IsInitialized();
Ian Rogers848871b2013-08-05 10:56:33 -0700732 // Push a transition back into managed code onto the linked list in thread.
Ian Rogers848871b2013-08-05 10:56:33 -0700733 self->PushManagedStackFragment(&fragment);
734 self->PushShadowFrame(shadow_frame);
735 self->EndAssertNoThreadSuspension(old_cause);
736
Ian Rogerse94652f2014-12-02 11:13:19 -0800737 if (needs_initialization) {
Ian Rogers848871b2013-08-05 10:56:33 -0700738 // Ensure static method's class is initialized.
Ian Rogerse94652f2014-12-02 11:13:19 -0800739 StackHandleScope<1> hs(self);
740 Handle<mirror::Class> h_class(hs.NewHandle(shadow_frame->GetMethod()->GetDeclaringClass()));
Ian Rogers7b078e82014-09-10 14:44:24 -0700741 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_class, true, true)) {
Ian Rogerse94652f2014-12-02 11:13:19 -0800742 DCHECK(Thread::Current()->IsExceptionPending()) << PrettyMethod(shadow_frame->GetMethod());
Ian Rogers848871b2013-08-05 10:56:33 -0700743 self->PopManagedStackFragment(fragment);
744 return 0;
745 }
746 }
Daniel Mihalyieb076692014-08-22 17:33:31 +0200747
Andreas Gampe639bdd12015-06-03 11:22:45 -0700748 result = interpreter::EnterInterpreterFromEntryPoint(self, code_item, shadow_frame);
Ian Rogers848871b2013-08-05 10:56:33 -0700749 }
Andreas Gampe639bdd12015-06-03 11:22:45 -0700750
751 // Pop transition.
752 self->PopManagedStackFragment(fragment);
753
754 // Request a stack deoptimization if needed
755 ArtMethod* caller = QuickArgumentVisitor::GetCallingMethod(sp);
756 if (UNLIKELY(Dbg::IsForcedInterpreterNeededForUpcall(self, caller))) {
757 // Push the context of the deoptimization stack so we can restore the return value and the
758 // exception before executing the deoptimized frames.
Nicolas Geoffray73be1e82015-09-17 15:22:56 +0100759 self->PushDeoptimizationContext(
760 result, shorty[0] == 'L', /* from_code */ false, self->GetException());
Andreas Gampe639bdd12015-06-03 11:22:45 -0700761
762 // Set special exception to cause deoptimization.
763 self->SetException(Thread::GetDeoptimizationException());
764 }
765
766 // No need to restore the args since the method has already been run by the interpreter.
767 return result.GetJ();
Ian Rogers848871b2013-08-05 10:56:33 -0700768}
769
770// Visits arguments on the stack placing them into the args vector, Object* arguments are converted
771// to jobjects.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800772class BuildQuickArgumentVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700773 public:
Mathieu Chartiere401d142015-04-22 13:56:20 -0700774 BuildQuickArgumentVisitor(ArtMethod** sp, bool is_static, const char* shorty, uint32_t shorty_len,
Andreas Gampecf4035a2014-05-28 22:43:01 -0700775 ScopedObjectAccessUnchecked* soa, std::vector<jvalue>* args) :
Andreas Gampec200a4a2014-06-16 18:39:09 -0700776 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa), args_(args) {}
Ian Rogers848871b2013-08-05 10:56:33 -0700777
Mathieu Chartier90443472015-07-16 20:32:27 -0700778 void Visit() SHARED_REQUIRES(Locks::mutator_lock_) OVERRIDE;
Ian Rogers848871b2013-08-05 10:56:33 -0700779
Mathieu Chartier90443472015-07-16 20:32:27 -0700780 void FixupReferences() SHARED_REQUIRES(Locks::mutator_lock_);
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800781
Ian Rogers848871b2013-08-05 10:56:33 -0700782 private:
Ian Rogers9758f792014-03-13 09:02:55 -0700783 ScopedObjectAccessUnchecked* const soa_;
784 std::vector<jvalue>* const args_;
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800785 // References which we must update when exiting in case the GC moved the objects.
Ian Rogers700a4022014-05-19 16:49:03 -0700786 std::vector<std::pair<jobject, StackReference<mirror::Object>*>> references_;
Ian Rogers9758f792014-03-13 09:02:55 -0700787
Ian Rogers848871b2013-08-05 10:56:33 -0700788 DISALLOW_COPY_AND_ASSIGN(BuildQuickArgumentVisitor);
789};
790
Ian Rogers9758f792014-03-13 09:02:55 -0700791void BuildQuickArgumentVisitor::Visit() {
792 jvalue val;
793 Primitive::Type type = GetParamPrimitiveType();
794 switch (type) {
795 case Primitive::kPrimNot: {
796 StackReference<mirror::Object>* stack_ref =
797 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
798 val.l = soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr());
799 references_.push_back(std::make_pair(val.l, stack_ref));
800 break;
801 }
802 case Primitive::kPrimLong: // Fall-through.
803 case Primitive::kPrimDouble:
804 if (IsSplitLongOrDouble()) {
805 val.j = ReadSplitLongParam();
806 } else {
807 val.j = *reinterpret_cast<jlong*>(GetParamAddress());
808 }
809 break;
810 case Primitive::kPrimBoolean: // Fall-through.
811 case Primitive::kPrimByte: // Fall-through.
812 case Primitive::kPrimChar: // Fall-through.
813 case Primitive::kPrimShort: // Fall-through.
814 case Primitive::kPrimInt: // Fall-through.
815 case Primitive::kPrimFloat:
816 val.i = *reinterpret_cast<jint*>(GetParamAddress());
817 break;
818 case Primitive::kPrimVoid:
819 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -0700820 UNREACHABLE();
Ian Rogers9758f792014-03-13 09:02:55 -0700821 }
822 args_->push_back(val);
823}
824
825void BuildQuickArgumentVisitor::FixupReferences() {
826 // Fixup any references which may have changed.
827 for (const auto& pair : references_) {
828 pair.second->Assign(soa_->Decode<mirror::Object*>(pair.first));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -0700829 soa_->Env()->DeleteLocalRef(pair.first);
Ian Rogers9758f792014-03-13 09:02:55 -0700830 }
831}
832
Ian Rogers848871b2013-08-05 10:56:33 -0700833// Handler for invocation on proxy methods. On entry a frame will exist for the proxy object method
834// which is responsible for recording callee save registers. We explicitly place into jobjects the
835// incoming reference arguments (so they survive GC). We invoke the invocation handler, which is a
836// field within the proxy object, which will box the primitive arguments and deal with error cases.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700837extern "C" uint64_t artQuickProxyInvokeHandler(
838 ArtMethod* proxy_method, mirror::Object* receiver, Thread* self, ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -0700839 SHARED_REQUIRES(Locks::mutator_lock_) {
Nicolas Geoffray3a090922015-11-24 09:17:30 +0000840 DCHECK(proxy_method->IsProxyMethod()) << PrettyMethod(proxy_method);
841 DCHECK(receiver->GetClass()->IsProxyClass()) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700842 // Ensure we don't get thread suspension until the object arguments are safely in jobjects.
843 const char* old_cause =
844 self->StartAssertNoThreadSuspension("Adding to IRT proxy object arguments");
845 // Register the top of the managed stack, making stack crawlable.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700846 DCHECK_EQ((*sp), proxy_method) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700847 self->VerifyStack();
848 // Start new JNI local reference state.
849 JNIEnvExt* env = self->GetJniEnv();
850 ScopedObjectAccessUnchecked soa(env);
851 ScopedJniEnvLocalRefState env_state(env);
852 // Create local ref. copies of proxy method and the receiver.
853 jobject rcvr_jobj = soa.AddLocalReference<jobject>(receiver);
854
855 // Placing arguments into args vector and remove the receiver.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700856 ArtMethod* non_proxy_method = proxy_method->GetInterfaceMethodIfProxy(sizeof(void*));
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700857 CHECK(!non_proxy_method->IsStatic()) << PrettyMethod(proxy_method) << " "
Andreas Gampec200a4a2014-06-16 18:39:09 -0700858 << PrettyMethod(non_proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700859 std::vector<jvalue> args;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700860 uint32_t shorty_len = 0;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700861 const char* shorty = non_proxy_method->GetShorty(&shorty_len);
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700862 BuildQuickArgumentVisitor local_ref_visitor(sp, false, shorty, shorty_len, &soa, &args);
Brian Carlstromd3633d52013-08-20 21:06:26 -0700863
Ian Rogers848871b2013-08-05 10:56:33 -0700864 local_ref_visitor.VisitArguments();
Brian Carlstromd3633d52013-08-20 21:06:26 -0700865 DCHECK_GT(args.size(), 0U) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700866 args.erase(args.begin());
867
868 // Convert proxy method into expected interface method.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700869 ArtMethod* interface_method = proxy_method->FindOverriddenMethod(sizeof(void*));
Ian Rogerse0a02da2014-12-02 14:10:53 -0800870 DCHECK(interface_method != nullptr) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700871 DCHECK(!interface_method->IsProxyMethod()) << PrettyMethod(interface_method);
Mathieu Chartierfc58af42015-04-16 18:00:39 -0700872 self->EndAssertNoThreadSuspension(old_cause);
873 jobject interface_method_jobj = soa.AddLocalReference<jobject>(
874 mirror::Method::CreateFromArtMethod(soa.Self(), interface_method));
Ian Rogers848871b2013-08-05 10:56:33 -0700875
876 // All naked Object*s should now be in jobjects, so its safe to go into the main invoke code
877 // that performs allocations.
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700878 JValue result = InvokeProxyInvocationHandler(soa, shorty, rcvr_jobj, interface_method_jobj, args);
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800879 // Restore references which might have moved.
880 local_ref_visitor.FixupReferences();
Ian Rogers848871b2013-08-05 10:56:33 -0700881 return result.GetJ();
882}
883
884// Read object references held in arguments from quick frames and place in a JNI local references,
885// so they don't get garbage collected.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800886class RememberForGcArgumentVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700887 public:
Mathieu Chartiere401d142015-04-22 13:56:20 -0700888 RememberForGcArgumentVisitor(ArtMethod** sp, bool is_static, const char* shorty,
889 uint32_t shorty_len, ScopedObjectAccessUnchecked* soa) :
Andreas Gampec200a4a2014-06-16 18:39:09 -0700890 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa) {}
Ian Rogers848871b2013-08-05 10:56:33 -0700891
Mathieu Chartier90443472015-07-16 20:32:27 -0700892 void Visit() SHARED_REQUIRES(Locks::mutator_lock_) OVERRIDE;
Mathieu Chartier07d447b2013-09-26 11:57:43 -0700893
Mathieu Chartier90443472015-07-16 20:32:27 -0700894 void FixupReferences() SHARED_REQUIRES(Locks::mutator_lock_);
Ian Rogers848871b2013-08-05 10:56:33 -0700895
896 private:
Ian Rogers9758f792014-03-13 09:02:55 -0700897 ScopedObjectAccessUnchecked* const soa_;
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800898 // References which we must update when exiting in case the GC moved the objects.
Andreas Gampec200a4a2014-06-16 18:39:09 -0700899 std::vector<std::pair<jobject, StackReference<mirror::Object>*> > references_;
900
Mathieu Chartier590fee92013-09-13 13:46:47 -0700901 DISALLOW_COPY_AND_ASSIGN(RememberForGcArgumentVisitor);
Ian Rogers848871b2013-08-05 10:56:33 -0700902};
903
Ian Rogers9758f792014-03-13 09:02:55 -0700904void RememberForGcArgumentVisitor::Visit() {
905 if (IsParamAReference()) {
906 StackReference<mirror::Object>* stack_ref =
907 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
908 jobject reference =
909 soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr());
910 references_.push_back(std::make_pair(reference, stack_ref));
911 }
912}
913
914void RememberForGcArgumentVisitor::FixupReferences() {
915 // Fixup any references which may have changed.
916 for (const auto& pair : references_) {
917 pair.second->Assign(soa_->Decode<mirror::Object*>(pair.first));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -0700918 soa_->Env()->DeleteLocalRef(pair.first);
Ian Rogers9758f792014-03-13 09:02:55 -0700919 }
920}
921
Ian Rogers848871b2013-08-05 10:56:33 -0700922// Lazily resolve a method for quick. Called by stub code.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700923extern "C" const void* artQuickResolutionTrampoline(
924 ArtMethod* called, mirror::Object* receiver, Thread* self, ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -0700925 SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampe3b45ef22015-05-26 21:34:09 -0700926 // The resolution trampoline stashes the resolved method into the callee-save frame to transport
927 // it. Thus, when exiting, the stack cannot be verified (as the resolved method most likely
928 // does not have the same stack layout as the callee-save method).
929 ScopedQuickEntrypointChecks sqec(self, kIsDebugBuild, false);
Ian Rogers848871b2013-08-05 10:56:33 -0700930 // Start new JNI local reference state
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800931 JNIEnvExt* env = self->GetJniEnv();
Ian Rogers848871b2013-08-05 10:56:33 -0700932 ScopedObjectAccessUnchecked soa(env);
933 ScopedJniEnvLocalRefState env_state(env);
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800934 const char* old_cause = self->StartAssertNoThreadSuspension("Quick method resolution set up");
Ian Rogers848871b2013-08-05 10:56:33 -0700935
936 // Compute details about the called method (avoid GCs)
937 ClassLinker* linker = Runtime::Current()->GetClassLinker();
Ian Rogers848871b2013-08-05 10:56:33 -0700938 InvokeType invoke_type;
Ian Rogerse0a02da2014-12-02 14:10:53 -0800939 MethodReference called_method(nullptr, 0);
940 const bool called_method_known_on_entry = !called->IsRuntimeMethod();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700941 ArtMethod* caller = nullptr;
Ian Rogerse0a02da2014-12-02 14:10:53 -0800942 if (!called_method_known_on_entry) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +0100943 caller = QuickArgumentVisitor::GetCallingMethod(sp);
944 uint32_t dex_pc = QuickArgumentVisitor::GetCallingDexPc(sp);
Ian Rogers848871b2013-08-05 10:56:33 -0700945 const DexFile::CodeItem* code;
Ian Rogerse0a02da2014-12-02 14:10:53 -0800946 called_method.dex_file = caller->GetDexFile();
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700947 code = caller->GetCodeItem();
Ian Rogers848871b2013-08-05 10:56:33 -0700948 CHECK_LT(dex_pc, code->insns_size_in_code_units_);
949 const Instruction* instr = Instruction::At(&code->insns_[dex_pc]);
950 Instruction::Code instr_code = instr->Opcode();
951 bool is_range;
952 switch (instr_code) {
953 case Instruction::INVOKE_DIRECT:
954 invoke_type = kDirect;
955 is_range = false;
956 break;
957 case Instruction::INVOKE_DIRECT_RANGE:
958 invoke_type = kDirect;
959 is_range = true;
960 break;
961 case Instruction::INVOKE_STATIC:
962 invoke_type = kStatic;
963 is_range = false;
964 break;
965 case Instruction::INVOKE_STATIC_RANGE:
966 invoke_type = kStatic;
967 is_range = true;
968 break;
969 case Instruction::INVOKE_SUPER:
970 invoke_type = kSuper;
971 is_range = false;
972 break;
973 case Instruction::INVOKE_SUPER_RANGE:
974 invoke_type = kSuper;
975 is_range = true;
976 break;
977 case Instruction::INVOKE_VIRTUAL:
978 invoke_type = kVirtual;
979 is_range = false;
980 break;
981 case Instruction::INVOKE_VIRTUAL_RANGE:
982 invoke_type = kVirtual;
983 is_range = true;
984 break;
985 case Instruction::INVOKE_INTERFACE:
986 invoke_type = kInterface;
987 is_range = false;
988 break;
989 case Instruction::INVOKE_INTERFACE_RANGE:
990 invoke_type = kInterface;
991 is_range = true;
992 break;
993 default:
Ian Rogerse0a02da2014-12-02 14:10:53 -0800994 LOG(FATAL) << "Unexpected call into trampoline: " << instr->DumpString(nullptr);
995 UNREACHABLE();
Ian Rogers848871b2013-08-05 10:56:33 -0700996 }
Ian Rogerse0a02da2014-12-02 14:10:53 -0800997 called_method.dex_method_index = (is_range) ? instr->VRegB_3rc() : instr->VRegB_35c();
Ian Rogers848871b2013-08-05 10:56:33 -0700998 } else {
999 invoke_type = kStatic;
Ian Rogerse0a02da2014-12-02 14:10:53 -08001000 called_method.dex_file = called->GetDexFile();
1001 called_method.dex_method_index = called->GetDexMethodIndex();
Ian Rogers848871b2013-08-05 10:56:33 -07001002 }
1003 uint32_t shorty_len;
1004 const char* shorty =
Ian Rogerse0a02da2014-12-02 14:10:53 -08001005 called_method.dex_file->GetMethodShorty(
1006 called_method.dex_file->GetMethodId(called_method.dex_method_index), &shorty_len);
Mathieu Chartier590fee92013-09-13 13:46:47 -07001007 RememberForGcArgumentVisitor visitor(sp, invoke_type == kStatic, shorty, shorty_len, &soa);
Ian Rogers848871b2013-08-05 10:56:33 -07001008 visitor.VisitArguments();
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001009 self->EndAssertNoThreadSuspension(old_cause);
Ian Rogerse0a02da2014-12-02 14:10:53 -08001010 const bool virtual_or_interface = invoke_type == kVirtual || invoke_type == kInterface;
Ian Rogers848871b2013-08-05 10:56:33 -07001011 // Resolve method filling in dex cache.
Ian Rogerse0a02da2014-12-02 14:10:53 -08001012 if (!called_method_known_on_entry) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001013 StackHandleScope<1> hs(self);
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001014 mirror::Object* dummy = nullptr;
1015 HandleWrapper<mirror::Object> h_receiver(
1016 hs.NewHandleWrapper(virtual_or_interface ? &receiver : &dummy));
Ian Rogerse0a02da2014-12-02 14:10:53 -08001017 DCHECK_EQ(caller->GetDexFile(), called_method.dex_file);
Andreas Gampe42ef8ab2015-12-03 17:27:32 -08001018 called = linker->ResolveMethod<ClassLinker::kForceICCECheck>(
1019 self, called_method.dex_method_index, caller, invoke_type);
Ian Rogers848871b2013-08-05 10:56:33 -07001020 }
Ian Rogerse0a02da2014-12-02 14:10:53 -08001021 const void* code = nullptr;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001022 if (LIKELY(!self->IsExceptionPending())) {
Ian Rogers848871b2013-08-05 10:56:33 -07001023 // Incompatible class change should have been handled in resolve method.
Brian Carlstrom2ec65202014-03-03 15:16:37 -08001024 CHECK(!called->CheckIncompatibleClassChange(invoke_type))
1025 << PrettyMethod(called) << " " << invoke_type;
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001026 if (virtual_or_interface || invoke_type == kSuper) {
1027 // Refine called method based on receiver for kVirtual/kInterface, and
1028 // caller for kSuper.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001029 ArtMethod* orig_called = called;
Mathieu Chartier55871bf2014-02-27 10:24:50 -08001030 if (invoke_type == kVirtual) {
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001031 CHECK(receiver != nullptr) << invoke_type;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001032 called = receiver->GetClass()->FindVirtualMethodForVirtual(called, sizeof(void*));
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001033 } else if (invoke_type == kInterface) {
1034 CHECK(receiver != nullptr) << invoke_type;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001035 called = receiver->GetClass()->FindVirtualMethodForInterface(called, sizeof(void*));
Nicolas Geoffraye5234232015-12-02 09:06:11 +00001036 } else {
1037 DCHECK_EQ(invoke_type, kSuper);
1038 CHECK(caller != nullptr) << invoke_type;
1039 called = caller->GetDeclaringClass()->GetSuperClass()->GetVTableEntry(
1040 called->GetMethodIndex(), sizeof(void*));
Mathieu Chartier55871bf2014-02-27 10:24:50 -08001041 }
Mingyao Yangf4867782014-05-05 11:55:02 -07001042
1043 CHECK(called != nullptr) << PrettyMethod(orig_called) << " "
1044 << PrettyTypeOf(receiver) << " "
1045 << invoke_type << " " << orig_called->GetVtableIndex();
1046
Ian Rogers83883d72013-10-21 21:07:24 -07001047 // We came here because of sharpening. Ensure the dex cache is up-to-date on the method index
Ian Rogerse0a02da2014-12-02 14:10:53 -08001048 // of the sharpened method avoiding dirtying the dex cache if possible.
Ian Rogers00f15272014-12-02 16:55:46 -08001049 // Note, called_method.dex_method_index references the dex method before the
1050 // FindVirtualMethodFor... This is ok for FindDexMethodIndexInOtherDexFile that only cares
1051 // about the name and signature.
1052 uint32_t update_dex_cache_method_index = called->GetDexMethodIndex();
Vladimir Marko05792b92015-08-03 11:56:49 +01001053 if (!called->HasSameDexCacheResolvedMethods(caller, sizeof(void*))) {
Ian Rogers83883d72013-10-21 21:07:24 -07001054 // Calling from one dex file to another, need to compute the method index appropriate to
Vladimir Markobbcc0c02014-02-03 14:08:42 +00001055 // the caller's dex file. Since we get here only if the original called was a runtime
1056 // method, we've got the correct dex_file and a dex_method_idx from above.
Ian Rogerse0a02da2014-12-02 14:10:53 -08001057 DCHECK(!called_method_known_on_entry);
1058 DCHECK_EQ(caller->GetDexFile(), called_method.dex_file);
1059 const DexFile* caller_dex_file = called_method.dex_file;
1060 uint32_t caller_method_name_and_sig_index = called_method.dex_method_index;
1061 update_dex_cache_method_index =
1062 called->FindDexMethodIndexInOtherDexFile(*caller_dex_file,
1063 caller_method_name_and_sig_index);
1064 }
1065 if ((update_dex_cache_method_index != DexFile::kDexNoIndex) &&
Mathieu Chartiere401d142015-04-22 13:56:20 -07001066 (caller->GetDexCacheResolvedMethod(
1067 update_dex_cache_method_index, sizeof(void*)) != called)) {
1068 caller->SetDexCacheResolvedMethod(update_dex_cache_method_index, called, sizeof(void*));
Ian Rogers83883d72013-10-21 21:07:24 -07001069 }
Mathieu Chartiere4a91bb2015-01-28 13:11:44 -08001070 } else if (invoke_type == kStatic) {
1071 const auto called_dex_method_idx = called->GetDexMethodIndex();
1072 // For static invokes, we may dispatch to the static method in the superclass but resolve
1073 // using the subclass. To prevent getting slow paths on each invoke, we force set the
1074 // resolved method for the super class dex method index if we are in the same dex file.
1075 // b/19175856
1076 if (called->GetDexFile() == called_method.dex_file &&
1077 called_method.dex_method_index != called_dex_method_idx) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001078 called->GetDexCache()->SetResolvedMethod(called_dex_method_idx, called, sizeof(void*));
Mathieu Chartiere4a91bb2015-01-28 13:11:44 -08001079 }
Ian Rogers83883d72013-10-21 21:07:24 -07001080 }
Daniel Mihalyieb076692014-08-22 17:33:31 +02001081
Ian Rogers848871b2013-08-05 10:56:33 -07001082 // Ensure that the called method's class is initialized.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001083 StackHandleScope<1> hs(soa.Self());
1084 Handle<mirror::Class> called_class(hs.NewHandle(called->GetDeclaringClass()));
Ian Rogers7b078e82014-09-10 14:44:24 -07001085 linker->EnsureInitialized(soa.Self(), called_class, true, true);
Ian Rogers848871b2013-08-05 10:56:33 -07001086 if (LIKELY(called_class->IsInitialized())) {
Daniel Mihalyieb076692014-08-22 17:33:31 +02001087 if (UNLIKELY(Dbg::IsForcedInterpreterNeededForResolution(self, called))) {
1088 // If we are single-stepping or the called method is deoptimized (by a
1089 // breakpoint, for example), then we have to execute the called method
1090 // with the interpreter.
1091 code = GetQuickToInterpreterBridge();
1092 } else if (UNLIKELY(Dbg::IsForcedInstrumentationNeededForResolution(self, caller))) {
1093 // If the caller is deoptimized (by a breakpoint, for example), we have to
1094 // continue its execution with interpreter when returning from the called
1095 // method. Because we do not want to execute the called method with the
1096 // interpreter, we wrap its execution into the instrumentation stubs.
1097 // When the called method returns, it will execute the instrumentation
1098 // exit hook that will determine the need of the interpreter with a call
1099 // to Dbg::IsForcedInterpreterNeededForUpcall and deoptimize the stack if
1100 // it is needed.
1101 code = GetQuickInstrumentationEntryPoint();
1102 } else {
1103 code = called->GetEntryPointFromQuickCompiledCode();
1104 }
Ian Rogers848871b2013-08-05 10:56:33 -07001105 } else if (called_class->IsInitializing()) {
Daniel Mihalyieb076692014-08-22 17:33:31 +02001106 if (UNLIKELY(Dbg::IsForcedInterpreterNeededForResolution(self, called))) {
1107 // If we are single-stepping or the called method is deoptimized (by a
1108 // breakpoint, for example), then we have to execute the called method
1109 // with the interpreter.
1110 code = GetQuickToInterpreterBridge();
1111 } else if (invoke_type == kStatic) {
Ian Rogers848871b2013-08-05 10:56:33 -07001112 // Class is still initializing, go to oat and grab code (trampoline must be left in place
1113 // until class is initialized to stop races between threads).
Ian Rogersef7d42f2014-01-06 12:55:46 -08001114 code = linker->GetQuickOatCodeFor(called);
Ian Rogers848871b2013-08-05 10:56:33 -07001115 } else {
1116 // No trampoline for non-static methods.
Ian Rogersef7d42f2014-01-06 12:55:46 -08001117 code = called->GetEntryPointFromQuickCompiledCode();
Ian Rogers848871b2013-08-05 10:56:33 -07001118 }
1119 } else {
1120 DCHECK(called_class->IsErroneous());
1121 }
1122 }
Ian Rogerse0a02da2014-12-02 14:10:53 -08001123 CHECK_EQ(code == nullptr, self->IsExceptionPending());
Mathieu Chartier07d447b2013-09-26 11:57:43 -07001124 // Fixup any locally saved objects may have moved during a GC.
1125 visitor.FixupReferences();
Ian Rogers848871b2013-08-05 10:56:33 -07001126 // Place called method in callee-save frame to be placed as first argument to quick method.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001127 *sp = called;
1128
Ian Rogers848871b2013-08-05 10:56:33 -07001129 return code;
1130}
1131
Andreas Gampec147b002014-03-06 18:11:06 -08001132/*
1133 * This class uses a couple of observations to unite the different calling conventions through
1134 * a few constants.
1135 *
1136 * 1) Number of registers used for passing is normally even, so counting down has no penalty for
1137 * possible alignment.
1138 * 2) Known 64b architectures store 8B units on the stack, both for integral and floating point
1139 * types, so using uintptr_t is OK. Also means that we can use kRegistersNeededX to denote
1140 * when we have to split things
1141 * 3) The only soft-float, Arm, is 32b, so no widening needs to be taken into account for floats
1142 * and we can use Int handling directly.
1143 * 4) Only 64b architectures widen, and their stack is aligned 8B anyways, so no padding code
1144 * necessary when widening. Also, widening of Ints will take place implicitly, and the
1145 * extension should be compatible with Aarch64, which mandates copying the available bits
1146 * into LSB and leaving the rest unspecified.
1147 * 5) Aligning longs and doubles is necessary on arm only, and it's the same in registers and on
1148 * the stack.
1149 * 6) There is only little endian.
1150 *
1151 *
1152 * Actual work is supposed to be done in a delegate of the template type. The interface is as
1153 * follows:
1154 *
1155 * void PushGpr(uintptr_t): Add a value for the next GPR
1156 *
1157 * void PushFpr4(float): Add a value for the next FPR of size 32b. Is only called if we need
1158 * padding, that is, think the architecture is 32b and aligns 64b.
1159 *
1160 * void PushFpr8(uint64_t): Push a double. We _will_ call this on 32b, it's the callee's job to
1161 * split this if necessary. The current state will have aligned, if
1162 * necessary.
1163 *
1164 * void PushStack(uintptr_t): Push a value to the stack.
1165 *
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001166 * uintptr_t PushHandleScope(mirror::Object* ref): Add a reference to the HandleScope. This _will_ have nullptr,
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001167 * as this might be important for null initialization.
Andreas Gampec147b002014-03-06 18:11:06 -08001168 * Must return the jobject, that is, the reference to the
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001169 * entry in the HandleScope (nullptr if necessary).
Andreas Gampec147b002014-03-06 18:11:06 -08001170 *
1171 */
Andreas Gampec200a4a2014-06-16 18:39:09 -07001172template<class T> class BuildNativeCallFrameStateMachine {
Andreas Gampec147b002014-03-06 18:11:06 -08001173 public:
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001174#if defined(__arm__)
1175 // TODO: These are all dummy values!
Andreas Gampec147b002014-03-06 18:11:06 -08001176 static constexpr bool kNativeSoftFloatAbi = true;
1177 static constexpr size_t kNumNativeGprArgs = 4; // 4 arguments passed in GPRs, r0-r3
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001178 static constexpr size_t kNumNativeFprArgs = 0; // 0 arguments passed in FPRs.
1179
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001180 static constexpr size_t kRegistersNeededForLong = 2;
1181 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -08001182 static constexpr bool kMultiRegistersAligned = true;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001183 static constexpr bool kMultiFPRegistersWidened = false;
1184 static constexpr bool kMultiGPRegistersWidened = false;
Andreas Gampec147b002014-03-06 18:11:06 -08001185 static constexpr bool kAlignLongOnStack = true;
1186 static constexpr bool kAlignDoubleOnStack = true;
Stuart Monteithb95a5342014-03-12 13:32:32 +00001187#elif defined(__aarch64__)
1188 static constexpr bool kNativeSoftFloatAbi = false; // This is a hard float ABI.
1189 static constexpr size_t kNumNativeGprArgs = 8; // 6 arguments passed in GPRs.
1190 static constexpr size_t kNumNativeFprArgs = 8; // 8 arguments passed in FPRs.
1191
1192 static constexpr size_t kRegistersNeededForLong = 1;
1193 static constexpr size_t kRegistersNeededForDouble = 1;
1194 static constexpr bool kMultiRegistersAligned = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001195 static constexpr bool kMultiFPRegistersWidened = false;
1196 static constexpr bool kMultiGPRegistersWidened = false;
Stuart Monteithb95a5342014-03-12 13:32:32 +00001197 static constexpr bool kAlignLongOnStack = false;
1198 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001199#elif defined(__mips__) && !defined(__LP64__)
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001200 static constexpr bool kNativeSoftFloatAbi = true; // This is a hard float ABI.
Douglas Leung735b8552014-10-31 12:21:40 -07001201 static constexpr size_t kNumNativeGprArgs = 4; // 4 arguments passed in GPRs.
1202 static constexpr size_t kNumNativeFprArgs = 0; // 0 arguments passed in FPRs.
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001203
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001204 static constexpr size_t kRegistersNeededForLong = 2;
1205 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -08001206 static constexpr bool kMultiRegistersAligned = true;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001207 static constexpr bool kMultiFPRegistersWidened = true;
1208 static constexpr bool kMultiGPRegistersWidened = false;
Douglas Leung735b8552014-10-31 12:21:40 -07001209 static constexpr bool kAlignLongOnStack = true;
1210 static constexpr bool kAlignDoubleOnStack = true;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001211#elif defined(__mips__) && defined(__LP64__)
1212 // Let the code prepare GPRs only and we will load the FPRs with same data.
1213 static constexpr bool kNativeSoftFloatAbi = true;
1214 static constexpr size_t kNumNativeGprArgs = 8;
1215 static constexpr size_t kNumNativeFprArgs = 0;
1216
1217 static constexpr size_t kRegistersNeededForLong = 1;
1218 static constexpr size_t kRegistersNeededForDouble = 1;
1219 static constexpr bool kMultiRegistersAligned = false;
1220 static constexpr bool kMultiFPRegistersWidened = false;
1221 static constexpr bool kMultiGPRegistersWidened = true;
1222 static constexpr bool kAlignLongOnStack = false;
1223 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001224#elif defined(__i386__)
1225 // TODO: Check these!
Andreas Gampec147b002014-03-06 18:11:06 -08001226 static constexpr bool kNativeSoftFloatAbi = false; // Not using int registers for fp
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001227 static constexpr size_t kNumNativeGprArgs = 0; // 6 arguments passed in GPRs.
1228 static constexpr size_t kNumNativeFprArgs = 0; // 8 arguments passed in FPRs.
1229
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001230 static constexpr size_t kRegistersNeededForLong = 2;
1231 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec200a4a2014-06-16 18:39:09 -07001232 static constexpr bool kMultiRegistersAligned = false; // x86 not using regs, anyways
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001233 static constexpr bool kMultiFPRegistersWidened = false;
1234 static constexpr bool kMultiGPRegistersWidened = false;
Andreas Gampec147b002014-03-06 18:11:06 -08001235 static constexpr bool kAlignLongOnStack = false;
1236 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001237#elif defined(__x86_64__)
1238 static constexpr bool kNativeSoftFloatAbi = false; // This is a hard float ABI.
1239 static constexpr size_t kNumNativeGprArgs = 6; // 6 arguments passed in GPRs.
1240 static constexpr size_t kNumNativeFprArgs = 8; // 8 arguments passed in FPRs.
1241
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001242 static constexpr size_t kRegistersNeededForLong = 1;
1243 static constexpr size_t kRegistersNeededForDouble = 1;
Andreas Gampec147b002014-03-06 18:11:06 -08001244 static constexpr bool kMultiRegistersAligned = false;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001245 static constexpr bool kMultiFPRegistersWidened = false;
1246 static constexpr bool kMultiGPRegistersWidened = false;
Andreas Gampec147b002014-03-06 18:11:06 -08001247 static constexpr bool kAlignLongOnStack = false;
1248 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001249#else
1250#error "Unsupported architecture"
1251#endif
1252
Andreas Gampec147b002014-03-06 18:11:06 -08001253 public:
Andreas Gampec200a4a2014-06-16 18:39:09 -07001254 explicit BuildNativeCallFrameStateMachine(T* delegate)
1255 : gpr_index_(kNumNativeGprArgs),
1256 fpr_index_(kNumNativeFprArgs),
1257 stack_entries_(0),
1258 delegate_(delegate) {
Andreas Gampec147b002014-03-06 18:11:06 -08001259 // For register alignment, we want to assume that counters (gpr_index_, fpr_index_) are even iff
1260 // the next register is even; counting down is just to make the compiler happy...
Andreas Gampe575e78c2014-11-03 23:41:03 -08001261 static_assert(kNumNativeGprArgs % 2 == 0U, "Number of native GPR arguments not even");
1262 static_assert(kNumNativeFprArgs % 2 == 0U, "Number of native FPR arguments not even");
Andreas Gampec147b002014-03-06 18:11:06 -08001263 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001264
Andreas Gampec200a4a2014-06-16 18:39:09 -07001265 virtual ~BuildNativeCallFrameStateMachine() {}
Andreas Gampec147b002014-03-06 18:11:06 -08001266
Ian Rogers1428dce2014-10-21 15:02:15 -07001267 bool HavePointerGpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001268 return gpr_index_ > 0;
1269 }
1270
Andreas Gampec200a4a2014-06-16 18:39:09 -07001271 void AdvancePointer(const void* val) {
Andreas Gampec147b002014-03-06 18:11:06 -08001272 if (HavePointerGpr()) {
1273 gpr_index_--;
1274 PushGpr(reinterpret_cast<uintptr_t>(val));
1275 } else {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001276 stack_entries_++; // TODO: have a field for pointer length as multiple of 32b
Andreas Gampec147b002014-03-06 18:11:06 -08001277 PushStack(reinterpret_cast<uintptr_t>(val));
1278 gpr_index_ = 0;
1279 }
1280 }
1281
Ian Rogers1428dce2014-10-21 15:02:15 -07001282 bool HaveHandleScopeGpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001283 return gpr_index_ > 0;
1284 }
1285
Mathieu Chartier90443472015-07-16 20:32:27 -07001286 void AdvanceHandleScope(mirror::Object* ptr) SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001287 uintptr_t handle = PushHandle(ptr);
1288 if (HaveHandleScopeGpr()) {
Andreas Gampec147b002014-03-06 18:11:06 -08001289 gpr_index_--;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001290 PushGpr(handle);
Andreas Gampec147b002014-03-06 18:11:06 -08001291 } else {
1292 stack_entries_++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001293 PushStack(handle);
Andreas Gampec147b002014-03-06 18:11:06 -08001294 gpr_index_ = 0;
1295 }
1296 }
1297
Ian Rogers1428dce2014-10-21 15:02:15 -07001298 bool HaveIntGpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001299 return gpr_index_ > 0;
1300 }
1301
1302 void AdvanceInt(uint32_t val) {
1303 if (HaveIntGpr()) {
1304 gpr_index_--;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001305 if (kMultiGPRegistersWidened) {
1306 DCHECK_EQ(sizeof(uintptr_t), sizeof(int64_t));
Roland Levillainda4d79b2015-03-24 14:36:11 +00001307 PushGpr(static_cast<int64_t>(bit_cast<int32_t, uint32_t>(val)));
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001308 } else {
1309 PushGpr(val);
1310 }
Andreas Gampec147b002014-03-06 18:11:06 -08001311 } else {
1312 stack_entries_++;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001313 if (kMultiGPRegistersWidened) {
1314 DCHECK_EQ(sizeof(uintptr_t), sizeof(int64_t));
Roland Levillainda4d79b2015-03-24 14:36:11 +00001315 PushStack(static_cast<int64_t>(bit_cast<int32_t, uint32_t>(val)));
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001316 } else {
1317 PushStack(val);
1318 }
Andreas Gampec147b002014-03-06 18:11:06 -08001319 gpr_index_ = 0;
1320 }
1321 }
1322
Ian Rogers1428dce2014-10-21 15:02:15 -07001323 bool HaveLongGpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001324 return gpr_index_ >= kRegistersNeededForLong + (LongGprNeedsPadding() ? 1 : 0);
1325 }
1326
Ian Rogers1428dce2014-10-21 15:02:15 -07001327 bool LongGprNeedsPadding() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001328 return kRegistersNeededForLong > 1 && // only pad when using multiple registers
1329 kAlignLongOnStack && // and when it needs alignment
1330 (gpr_index_ & 1) == 1; // counter is odd, see constructor
1331 }
1332
Ian Rogers1428dce2014-10-21 15:02:15 -07001333 bool LongStackNeedsPadding() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001334 return kRegistersNeededForLong > 1 && // only pad when using multiple registers
1335 kAlignLongOnStack && // and when it needs 8B alignment
1336 (stack_entries_ & 1) == 1; // counter is odd
1337 }
1338
1339 void AdvanceLong(uint64_t val) {
1340 if (HaveLongGpr()) {
1341 if (LongGprNeedsPadding()) {
1342 PushGpr(0);
1343 gpr_index_--;
1344 }
1345 if (kRegistersNeededForLong == 1) {
1346 PushGpr(static_cast<uintptr_t>(val));
1347 } else {
1348 PushGpr(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1349 PushGpr(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1350 }
1351 gpr_index_ -= kRegistersNeededForLong;
1352 } else {
1353 if (LongStackNeedsPadding()) {
1354 PushStack(0);
1355 stack_entries_++;
1356 }
1357 if (kRegistersNeededForLong == 1) {
1358 PushStack(static_cast<uintptr_t>(val));
1359 stack_entries_++;
1360 } else {
1361 PushStack(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1362 PushStack(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1363 stack_entries_ += 2;
1364 }
1365 gpr_index_ = 0;
1366 }
1367 }
1368
Ian Rogers1428dce2014-10-21 15:02:15 -07001369 bool HaveFloatFpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001370 return fpr_index_ > 0;
1371 }
1372
Andreas Gampec147b002014-03-06 18:11:06 -08001373 void AdvanceFloat(float val) {
1374 if (kNativeSoftFloatAbi) {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001375 AdvanceInt(bit_cast<uint32_t, float>(val));
Andreas Gampec147b002014-03-06 18:11:06 -08001376 } else {
1377 if (HaveFloatFpr()) {
1378 fpr_index_--;
1379 if (kRegistersNeededForDouble == 1) {
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001380 if (kMultiFPRegistersWidened) {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001381 PushFpr8(bit_cast<uint64_t, double>(val));
Andreas Gampec147b002014-03-06 18:11:06 -08001382 } else {
1383 // No widening, just use the bits.
Roland Levillainda4d79b2015-03-24 14:36:11 +00001384 PushFpr8(static_cast<uint64_t>(bit_cast<uint32_t, float>(val)));
Andreas Gampec147b002014-03-06 18:11:06 -08001385 }
1386 } else {
1387 PushFpr4(val);
1388 }
1389 } else {
1390 stack_entries_++;
Andreas Gampe1a5c4062015-01-15 12:10:47 -08001391 if (kRegistersNeededForDouble == 1 && kMultiFPRegistersWidened) {
Andreas Gampec147b002014-03-06 18:11:06 -08001392 // Need to widen before storing: Note the "double" in the template instantiation.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001393 // Note: We need to jump through those hoops to make the compiler happy.
1394 DCHECK_EQ(sizeof(uintptr_t), sizeof(uint64_t));
Roland Levillainda4d79b2015-03-24 14:36:11 +00001395 PushStack(static_cast<uintptr_t>(bit_cast<uint64_t, double>(val)));
Andreas Gampec147b002014-03-06 18:11:06 -08001396 } else {
Roland Levillainda4d79b2015-03-24 14:36:11 +00001397 PushStack(static_cast<uintptr_t>(bit_cast<uint32_t, float>(val)));
Andreas Gampec147b002014-03-06 18:11:06 -08001398 }
1399 fpr_index_ = 0;
1400 }
1401 }
1402 }
1403
Ian Rogers1428dce2014-10-21 15:02:15 -07001404 bool HaveDoubleFpr() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001405 return fpr_index_ >= kRegistersNeededForDouble + (DoubleFprNeedsPadding() ? 1 : 0);
1406 }
1407
Ian Rogers1428dce2014-10-21 15:02:15 -07001408 bool DoubleFprNeedsPadding() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001409 return kRegistersNeededForDouble > 1 && // only pad when using multiple registers
1410 kAlignDoubleOnStack && // and when it needs alignment
1411 (fpr_index_ & 1) == 1; // counter is odd, see constructor
1412 }
1413
Ian Rogers1428dce2014-10-21 15:02:15 -07001414 bool DoubleStackNeedsPadding() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001415 return kRegistersNeededForDouble > 1 && // only pad when using multiple registers
1416 kAlignDoubleOnStack && // and when it needs 8B alignment
1417 (stack_entries_ & 1) == 1; // counter is odd
1418 }
1419
1420 void AdvanceDouble(uint64_t val) {
1421 if (kNativeSoftFloatAbi) {
1422 AdvanceLong(val);
1423 } else {
1424 if (HaveDoubleFpr()) {
1425 if (DoubleFprNeedsPadding()) {
1426 PushFpr4(0);
1427 fpr_index_--;
1428 }
1429 PushFpr8(val);
1430 fpr_index_ -= kRegistersNeededForDouble;
1431 } else {
1432 if (DoubleStackNeedsPadding()) {
1433 PushStack(0);
1434 stack_entries_++;
1435 }
1436 if (kRegistersNeededForDouble == 1) {
1437 PushStack(static_cast<uintptr_t>(val));
1438 stack_entries_++;
1439 } else {
1440 PushStack(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1441 PushStack(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1442 stack_entries_ += 2;
1443 }
1444 fpr_index_ = 0;
1445 }
1446 }
1447 }
1448
Ian Rogers1428dce2014-10-21 15:02:15 -07001449 uint32_t GetStackEntries() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001450 return stack_entries_;
1451 }
1452
Ian Rogers1428dce2014-10-21 15:02:15 -07001453 uint32_t GetNumberOfUsedGprs() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001454 return kNumNativeGprArgs - gpr_index_;
1455 }
1456
Ian Rogers1428dce2014-10-21 15:02:15 -07001457 uint32_t GetNumberOfUsedFprs() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001458 return kNumNativeFprArgs - fpr_index_;
1459 }
1460
1461 private:
1462 void PushGpr(uintptr_t val) {
1463 delegate_->PushGpr(val);
1464 }
1465 void PushFpr4(float val) {
1466 delegate_->PushFpr4(val);
1467 }
1468 void PushFpr8(uint64_t val) {
1469 delegate_->PushFpr8(val);
1470 }
1471 void PushStack(uintptr_t val) {
1472 delegate_->PushStack(val);
1473 }
Mathieu Chartier90443472015-07-16 20:32:27 -07001474 uintptr_t PushHandle(mirror::Object* ref) SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001475 return delegate_->PushHandle(ref);
Andreas Gampec147b002014-03-06 18:11:06 -08001476 }
1477
1478 uint32_t gpr_index_; // Number of free GPRs
1479 uint32_t fpr_index_; // Number of free FPRs
1480 uint32_t stack_entries_; // Stack entries are in multiples of 32b, as floats are usually not
1481 // extended
Ian Rogers1428dce2014-10-21 15:02:15 -07001482 T* const delegate_; // What Push implementation gets called
Andreas Gampec147b002014-03-06 18:11:06 -08001483};
1484
Andreas Gampec200a4a2014-06-16 18:39:09 -07001485// Computes the sizes of register stacks and call stack area. Handling of references can be extended
1486// in subclasses.
1487//
1488// To handle native pointers, use "L" in the shorty for an object reference, which simulates
1489// them with handles.
1490class ComputeNativeCallFrameSize {
Andreas Gampec147b002014-03-06 18:11:06 -08001491 public:
Andreas Gampec200a4a2014-06-16 18:39:09 -07001492 ComputeNativeCallFrameSize() : num_stack_entries_(0) {}
1493
1494 virtual ~ComputeNativeCallFrameSize() {}
Andreas Gampec147b002014-03-06 18:11:06 -08001495
Ian Rogers1428dce2014-10-21 15:02:15 -07001496 uint32_t GetStackSize() const {
Andreas Gampec147b002014-03-06 18:11:06 -08001497 return num_stack_entries_ * sizeof(uintptr_t);
1498 }
1499
Ian Rogers1428dce2014-10-21 15:02:15 -07001500 uint8_t* LayoutCallStack(uint8_t* sp8) const {
Andreas Gampec147b002014-03-06 18:11:06 -08001501 sp8 -= GetStackSize();
Andreas Gampe779f8c92014-06-09 18:29:38 -07001502 // Align by kStackAlignment.
1503 sp8 = reinterpret_cast<uint8_t*>(RoundDown(reinterpret_cast<uintptr_t>(sp8), kStackAlignment));
Andreas Gampec200a4a2014-06-16 18:39:09 -07001504 return sp8;
Andreas Gampec147b002014-03-06 18:11:06 -08001505 }
1506
Ian Rogers1428dce2014-10-21 15:02:15 -07001507 uint8_t* LayoutCallRegisterStacks(uint8_t* sp8, uintptr_t** start_gpr, uint32_t** start_fpr)
1508 const {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001509 // Assumption is OK right now, as we have soft-float arm
1510 size_t fregs = BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>::kNumNativeFprArgs;
1511 sp8 -= fregs * sizeof(uintptr_t);
1512 *start_fpr = reinterpret_cast<uint32_t*>(sp8);
1513 size_t iregs = BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>::kNumNativeGprArgs;
1514 sp8 -= iregs * sizeof(uintptr_t);
1515 *start_gpr = reinterpret_cast<uintptr_t*>(sp8);
1516 return sp8;
1517 }
Andreas Gampec147b002014-03-06 18:11:06 -08001518
Andreas Gampec200a4a2014-06-16 18:39:09 -07001519 uint8_t* LayoutNativeCall(uint8_t* sp8, uintptr_t** start_stack, uintptr_t** start_gpr,
Ian Rogers1428dce2014-10-21 15:02:15 -07001520 uint32_t** start_fpr) const {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001521 // Native call stack.
1522 sp8 = LayoutCallStack(sp8);
1523 *start_stack = reinterpret_cast<uintptr_t*>(sp8);
Andreas Gampec147b002014-03-06 18:11:06 -08001524
Andreas Gampec200a4a2014-06-16 18:39:09 -07001525 // Put fprs and gprs below.
1526 sp8 = LayoutCallRegisterStacks(sp8, start_gpr, start_fpr);
Andreas Gampec147b002014-03-06 18:11:06 -08001527
Andreas Gampec200a4a2014-06-16 18:39:09 -07001528 // Return the new bottom.
1529 return sp8;
1530 }
1531
Roland Levillain4b8f1ec2015-08-26 18:34:03 +01001532 virtual void WalkHeader(
1533 BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm ATTRIBUTE_UNUSED)
Mathieu Chartier90443472015-07-16 20:32:27 -07001534 SHARED_REQUIRES(Locks::mutator_lock_) {
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001535 }
Andreas Gampec200a4a2014-06-16 18:39:09 -07001536
Mathieu Chartier90443472015-07-16 20:32:27 -07001537 void Walk(const char* shorty, uint32_t shorty_len) SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001538 BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize> sm(this);
1539
1540 WalkHeader(&sm);
Andreas Gampec147b002014-03-06 18:11:06 -08001541
1542 for (uint32_t i = 1; i < shorty_len; ++i) {
1543 Primitive::Type cur_type_ = Primitive::GetType(shorty[i]);
1544 switch (cur_type_) {
1545 case Primitive::kPrimNot:
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001546 // TODO: fix abuse of mirror types.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001547 sm.AdvanceHandleScope(
1548 reinterpret_cast<mirror::Object*>(0x12345678));
Andreas Gampec147b002014-03-06 18:11:06 -08001549 break;
1550
1551 case Primitive::kPrimBoolean:
1552 case Primitive::kPrimByte:
1553 case Primitive::kPrimChar:
1554 case Primitive::kPrimShort:
1555 case Primitive::kPrimInt:
1556 sm.AdvanceInt(0);
1557 break;
1558 case Primitive::kPrimFloat:
1559 sm.AdvanceFloat(0);
1560 break;
1561 case Primitive::kPrimDouble:
1562 sm.AdvanceDouble(0);
1563 break;
1564 case Primitive::kPrimLong:
1565 sm.AdvanceLong(0);
1566 break;
1567 default:
1568 LOG(FATAL) << "Unexpected type: " << cur_type_ << " in " << shorty;
Ian Rogerse0a02da2014-12-02 14:10:53 -08001569 UNREACHABLE();
Andreas Gampec147b002014-03-06 18:11:06 -08001570 }
1571 }
1572
Ian Rogers1428dce2014-10-21 15:02:15 -07001573 num_stack_entries_ = sm.GetStackEntries();
Andreas Gampec147b002014-03-06 18:11:06 -08001574 }
1575
1576 void PushGpr(uintptr_t /* val */) {
1577 // not optimizing registers, yet
1578 }
1579
1580 void PushFpr4(float /* val */) {
1581 // not optimizing registers, yet
1582 }
1583
1584 void PushFpr8(uint64_t /* val */) {
1585 // not optimizing registers, yet
1586 }
1587
1588 void PushStack(uintptr_t /* val */) {
1589 // counting is already done in the superclass
1590 }
1591
Andreas Gampec200a4a2014-06-16 18:39:09 -07001592 virtual uintptr_t PushHandle(mirror::Object* /* ptr */) {
Andreas Gampec147b002014-03-06 18:11:06 -08001593 return reinterpret_cast<uintptr_t>(nullptr);
1594 }
1595
Andreas Gampec200a4a2014-06-16 18:39:09 -07001596 protected:
Andreas Gampec147b002014-03-06 18:11:06 -08001597 uint32_t num_stack_entries_;
1598};
1599
Andreas Gampec200a4a2014-06-16 18:39:09 -07001600class ComputeGenericJniFrameSize FINAL : public ComputeNativeCallFrameSize {
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001601 public:
Andreas Gampec200a4a2014-06-16 18:39:09 -07001602 ComputeGenericJniFrameSize() : num_handle_scope_references_(0) {}
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001603
Andreas Gampec200a4a2014-06-16 18:39:09 -07001604 // Lays out the callee-save frame. Assumes that the incorrect frame corresponding to RefsAndArgs
1605 // is at *m = sp. Will update to point to the bottom of the save frame.
1606 //
1607 // Note: assumes ComputeAll() has been run before.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001608 void LayoutCalleeSaveFrame(Thread* self, ArtMethod*** m, void* sp, HandleScope** handle_scope)
Mathieu Chartier90443472015-07-16 20:32:27 -07001609 SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001610 ArtMethod* method = **m;
1611
1612 DCHECK_EQ(Runtime::Current()->GetClassLinker()->GetImagePointerSize(), sizeof(void*));
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001613
Andreas Gampec200a4a2014-06-16 18:39:09 -07001614 uint8_t* sp8 = reinterpret_cast<uint8_t*>(sp);
1615
1616 // First, fix up the layout of the callee-save frame.
1617 // We have to squeeze in the HandleScope, and relocate the method pointer.
1618
1619 // "Free" the slot for the method.
Ian Rogers13735952014-10-08 12:43:28 -07001620 sp8 += sizeof(void*); // In the callee-save frame we use a full pointer.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001621
1622 // Under the callee saves put handle scope and new method stack reference.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001623 size_t handle_scope_size = HandleScope::SizeOf(num_handle_scope_references_);
Mathieu Chartiere401d142015-04-22 13:56:20 -07001624 size_t scope_and_method = handle_scope_size + sizeof(ArtMethod*);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001625
1626 sp8 -= scope_and_method;
1627 // Align by kStackAlignment.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001628 sp8 = reinterpret_cast<uint8_t*>(RoundDown(reinterpret_cast<uintptr_t>(sp8), kStackAlignment));
Andreas Gampec200a4a2014-06-16 18:39:09 -07001629
Mathieu Chartiere401d142015-04-22 13:56:20 -07001630 uint8_t* sp8_table = sp8 + sizeof(ArtMethod*);
Ian Rogers59c07062014-10-10 13:03:39 -07001631 *handle_scope = HandleScope::Create(sp8_table, self->GetTopHandleScope(),
1632 num_handle_scope_references_);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001633
1634 // Add a slot for the method pointer, and fill it. Fix the pointer-pointer given to us.
1635 uint8_t* method_pointer = sp8;
Mathieu Chartiere401d142015-04-22 13:56:20 -07001636 auto** new_method_ref = reinterpret_cast<ArtMethod**>(method_pointer);
1637 *new_method_ref = method;
Andreas Gampec200a4a2014-06-16 18:39:09 -07001638 *m = new_method_ref;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001639 }
1640
Andreas Gampec200a4a2014-06-16 18:39:09 -07001641 // Adds space for the cookie. Note: may leave stack unaligned.
Ian Rogers1428dce2014-10-21 15:02:15 -07001642 void LayoutCookie(uint8_t** sp) const {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001643 // Reference cookie and padding
1644 *sp -= 8;
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001645 }
1646
Andreas Gampec200a4a2014-06-16 18:39:09 -07001647 // Re-layout the callee-save frame (insert a handle-scope). Then add space for the cookie.
1648 // Returns the new bottom. Note: this may be unaligned.
Mathieu Chartiere401d142015-04-22 13:56:20 -07001649 uint8_t* LayoutJNISaveFrame(Thread* self, ArtMethod*** m, void* sp, HandleScope** handle_scope)
Mathieu Chartier90443472015-07-16 20:32:27 -07001650 SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001651 // First, fix up the layout of the callee-save frame.
1652 // We have to squeeze in the HandleScope, and relocate the method pointer.
Ian Rogers59c07062014-10-10 13:03:39 -07001653 LayoutCalleeSaveFrame(self, m, sp, handle_scope);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001654
1655 // The bottom of the callee-save frame is now where the method is, *m.
1656 uint8_t* sp8 = reinterpret_cast<uint8_t*>(*m);
1657
1658 // Add space for cookie.
1659 LayoutCookie(&sp8);
1660
1661 return sp8;
1662 }
1663
1664 // WARNING: After this, *sp won't be pointing to the method anymore!
Mathieu Chartiere401d142015-04-22 13:56:20 -07001665 uint8_t* ComputeLayout(Thread* self, ArtMethod*** m, const char* shorty, uint32_t shorty_len,
1666 HandleScope** handle_scope, uintptr_t** start_stack, uintptr_t** start_gpr,
1667 uint32_t** start_fpr)
Mathieu Chartier90443472015-07-16 20:32:27 -07001668 SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001669 Walk(shorty, shorty_len);
1670
1671 // JNI part.
Ian Rogers59c07062014-10-10 13:03:39 -07001672 uint8_t* sp8 = LayoutJNISaveFrame(self, m, reinterpret_cast<void*>(*m), handle_scope);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001673
1674 sp8 = LayoutNativeCall(sp8, start_stack, start_gpr, start_fpr);
1675
1676 // Return the new bottom.
1677 return sp8;
1678 }
1679
1680 uintptr_t PushHandle(mirror::Object* /* ptr */) OVERRIDE;
1681
1682 // Add JNIEnv* and jobj/jclass before the shorty-derived elements.
1683 void WalkHeader(BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm) OVERRIDE
Mathieu Chartier90443472015-07-16 20:32:27 -07001684 SHARED_REQUIRES(Locks::mutator_lock_);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001685
1686 private:
1687 uint32_t num_handle_scope_references_;
1688};
1689
1690uintptr_t ComputeGenericJniFrameSize::PushHandle(mirror::Object* /* ptr */) {
1691 num_handle_scope_references_++;
1692 return reinterpret_cast<uintptr_t>(nullptr);
1693}
1694
1695void ComputeGenericJniFrameSize::WalkHeader(
1696 BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm) {
1697 // JNIEnv
1698 sm->AdvancePointer(nullptr);
1699
1700 // Class object or this as first argument
1701 sm->AdvanceHandleScope(reinterpret_cast<mirror::Object*>(0x12345678));
1702}
1703
1704// Class to push values to three separate regions. Used to fill the native call part. Adheres to
1705// the template requirements of BuildGenericJniFrameStateMachine.
1706class FillNativeCall {
1707 public:
1708 FillNativeCall(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args) :
1709 cur_gpr_reg_(gpr_regs), cur_fpr_reg_(fpr_regs), cur_stack_arg_(stack_args) {}
1710
1711 virtual ~FillNativeCall() {}
1712
1713 void Reset(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args) {
1714 cur_gpr_reg_ = gpr_regs;
1715 cur_fpr_reg_ = fpr_regs;
1716 cur_stack_arg_ = stack_args;
Andreas Gampec147b002014-03-06 18:11:06 -08001717 }
1718
1719 void PushGpr(uintptr_t val) {
1720 *cur_gpr_reg_ = val;
1721 cur_gpr_reg_++;
1722 }
1723
1724 void PushFpr4(float val) {
1725 *cur_fpr_reg_ = val;
1726 cur_fpr_reg_++;
1727 }
1728
1729 void PushFpr8(uint64_t val) {
1730 uint64_t* tmp = reinterpret_cast<uint64_t*>(cur_fpr_reg_);
1731 *tmp = val;
1732 cur_fpr_reg_ += 2;
1733 }
1734
1735 void PushStack(uintptr_t val) {
1736 *cur_stack_arg_ = val;
1737 cur_stack_arg_++;
1738 }
1739
Mathieu Chartier90443472015-07-16 20:32:27 -07001740 virtual uintptr_t PushHandle(mirror::Object*) SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001741 LOG(FATAL) << "(Non-JNI) Native call does not use handles.";
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001742 UNREACHABLE();
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001743 }
1744
1745 private:
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001746 uintptr_t* cur_gpr_reg_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001747 uint32_t* cur_fpr_reg_;
1748 uintptr_t* cur_stack_arg_;
Andreas Gampec200a4a2014-06-16 18:39:09 -07001749};
Andreas Gampec147b002014-03-06 18:11:06 -08001750
Andreas Gampec200a4a2014-06-16 18:39:09 -07001751// Visits arguments on the stack placing them into a region lower down the stack for the benefit
1752// of transitioning into native code.
1753class BuildGenericJniFrameVisitor FINAL : public QuickArgumentVisitor {
1754 public:
Ian Rogers59c07062014-10-10 13:03:39 -07001755 BuildGenericJniFrameVisitor(Thread* self, bool is_static, const char* shorty, uint32_t shorty_len,
Mathieu Chartiere401d142015-04-22 13:56:20 -07001756 ArtMethod*** sp)
Andreas Gampec200a4a2014-06-16 18:39:09 -07001757 : QuickArgumentVisitor(*sp, is_static, shorty, shorty_len),
1758 jni_call_(nullptr, nullptr, nullptr, nullptr), sm_(&jni_call_) {
1759 ComputeGenericJniFrameSize fsc;
1760 uintptr_t* start_gpr_reg;
1761 uint32_t* start_fpr_reg;
1762 uintptr_t* start_stack_arg;
Ian Rogers6a3c1fc2014-10-31 00:33:20 -07001763 bottom_of_used_area_ = fsc.ComputeLayout(self, sp, shorty, shorty_len,
Ian Rogers59c07062014-10-10 13:03:39 -07001764 &handle_scope_,
1765 &start_stack_arg,
Andreas Gampec200a4a2014-06-16 18:39:09 -07001766 &start_gpr_reg, &start_fpr_reg);
1767
Andreas Gampec200a4a2014-06-16 18:39:09 -07001768 jni_call_.Reset(start_gpr_reg, start_fpr_reg, start_stack_arg, handle_scope_);
1769
1770 // jni environment is always first argument
1771 sm_.AdvancePointer(self->GetJniEnv());
1772
1773 if (is_static) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001774 sm_.AdvanceHandleScope((**sp)->GetDeclaringClass());
Andreas Gampec200a4a2014-06-16 18:39:09 -07001775 }
1776 }
1777
Mathieu Chartier90443472015-07-16 20:32:27 -07001778 void Visit() SHARED_REQUIRES(Locks::mutator_lock_) OVERRIDE;
Andreas Gampec200a4a2014-06-16 18:39:09 -07001779
Mathieu Chartier90443472015-07-16 20:32:27 -07001780 void FinalizeHandleScope(Thread* self) SHARED_REQUIRES(Locks::mutator_lock_);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001781
Vladimir Markof39745e2016-01-26 12:16:55 +00001782 StackReference<mirror::Object>* GetFirstHandleScopeEntry() {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001783 return handle_scope_->GetHandle(0).GetReference();
1784 }
1785
Mathieu Chartier90443472015-07-16 20:32:27 -07001786 jobject GetFirstHandleScopeJObject() const SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001787 return handle_scope_->GetHandle(0).ToJObject();
1788 }
1789
Ian Rogers1428dce2014-10-21 15:02:15 -07001790 void* GetBottomOfUsedArea() const {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001791 return bottom_of_used_area_;
1792 }
1793
1794 private:
1795 // A class to fill a JNI call. Adds reference/handle-scope management to FillNativeCall.
1796 class FillJniCall FINAL : public FillNativeCall {
1797 public:
1798 FillJniCall(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args,
1799 HandleScope* handle_scope) : FillNativeCall(gpr_regs, fpr_regs, stack_args),
1800 handle_scope_(handle_scope), cur_entry_(0) {}
1801
Mathieu Chartier90443472015-07-16 20:32:27 -07001802 uintptr_t PushHandle(mirror::Object* ref) OVERRIDE SHARED_REQUIRES(Locks::mutator_lock_);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001803
1804 void Reset(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args, HandleScope* scope) {
1805 FillNativeCall::Reset(gpr_regs, fpr_regs, stack_args);
1806 handle_scope_ = scope;
1807 cur_entry_ = 0U;
1808 }
1809
Mathieu Chartier90443472015-07-16 20:32:27 -07001810 void ResetRemainingScopeSlots() SHARED_REQUIRES(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001811 // Initialize padding entries.
1812 size_t expected_slots = handle_scope_->NumberOfReferences();
1813 while (cur_entry_ < expected_slots) {
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07001814 handle_scope_->GetMutableHandle(cur_entry_++).Assign(nullptr);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001815 }
1816 DCHECK_NE(cur_entry_, 0U);
1817 }
1818
1819 private:
1820 HandleScope* handle_scope_;
1821 size_t cur_entry_;
1822 };
1823
1824 HandleScope* handle_scope_;
1825 FillJniCall jni_call_;
1826 void* bottom_of_used_area_;
1827
1828 BuildNativeCallFrameStateMachine<FillJniCall> sm_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001829
1830 DISALLOW_COPY_AND_ASSIGN(BuildGenericJniFrameVisitor);
1831};
1832
Andreas Gampec200a4a2014-06-16 18:39:09 -07001833uintptr_t BuildGenericJniFrameVisitor::FillJniCall::PushHandle(mirror::Object* ref) {
1834 uintptr_t tmp;
Andreas Gampe5a4b8a22014-09-11 08:30:08 -07001835 MutableHandle<mirror::Object> h = handle_scope_->GetMutableHandle(cur_entry_);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001836 h.Assign(ref);
1837 tmp = reinterpret_cast<uintptr_t>(h.ToJObject());
1838 cur_entry_++;
1839 return tmp;
1840}
1841
Ian Rogers9758f792014-03-13 09:02:55 -07001842void BuildGenericJniFrameVisitor::Visit() {
1843 Primitive::Type type = GetParamPrimitiveType();
1844 switch (type) {
1845 case Primitive::kPrimLong: {
1846 jlong long_arg;
1847 if (IsSplitLongOrDouble()) {
1848 long_arg = ReadSplitLongParam();
1849 } else {
1850 long_arg = *reinterpret_cast<jlong*>(GetParamAddress());
1851 }
1852 sm_.AdvanceLong(long_arg);
1853 break;
1854 }
1855 case Primitive::kPrimDouble: {
1856 uint64_t double_arg;
1857 if (IsSplitLongOrDouble()) {
1858 // Read into union so that we don't case to a double.
1859 double_arg = ReadSplitLongParam();
1860 } else {
1861 double_arg = *reinterpret_cast<uint64_t*>(GetParamAddress());
1862 }
1863 sm_.AdvanceDouble(double_arg);
1864 break;
1865 }
1866 case Primitive::kPrimNot: {
1867 StackReference<mirror::Object>* stack_ref =
1868 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001869 sm_.AdvanceHandleScope(stack_ref->AsMirrorPtr());
Ian Rogers9758f792014-03-13 09:02:55 -07001870 break;
1871 }
1872 case Primitive::kPrimFloat:
1873 sm_.AdvanceFloat(*reinterpret_cast<float*>(GetParamAddress()));
1874 break;
1875 case Primitive::kPrimBoolean: // Fall-through.
1876 case Primitive::kPrimByte: // Fall-through.
1877 case Primitive::kPrimChar: // Fall-through.
1878 case Primitive::kPrimShort: // Fall-through.
1879 case Primitive::kPrimInt: // Fall-through.
1880 sm_.AdvanceInt(*reinterpret_cast<jint*>(GetParamAddress()));
1881 break;
1882 case Primitive::kPrimVoid:
1883 LOG(FATAL) << "UNREACHABLE";
Ian Rogers2c4257b2014-10-24 14:20:06 -07001884 UNREACHABLE();
Ian Rogers9758f792014-03-13 09:02:55 -07001885 }
1886}
1887
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001888void BuildGenericJniFrameVisitor::FinalizeHandleScope(Thread* self) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001889 // Clear out rest of the scope.
1890 jni_call_.ResetRemainingScopeSlots();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001891 // Install HandleScope.
1892 self->PushHandleScope(handle_scope_);
Ian Rogers9758f792014-03-13 09:02:55 -07001893}
1894
Ian Rogers04c31d22014-07-07 21:44:06 -07001895#if defined(__arm__) || defined(__aarch64__)
Andreas Gampe90546832014-03-12 18:07:19 -07001896extern "C" void* artFindNativeMethod();
Ian Rogers04c31d22014-07-07 21:44:06 -07001897#else
1898extern "C" void* artFindNativeMethod(Thread* self);
1899#endif
Andreas Gampe90546832014-03-12 18:07:19 -07001900
Andreas Gampead615172014-04-04 16:20:13 -07001901uint64_t artQuickGenericJniEndJNIRef(Thread* self, uint32_t cookie, jobject l, jobject lock) {
1902 if (lock != nullptr) {
1903 return reinterpret_cast<uint64_t>(JniMethodEndWithReferenceSynchronized(l, cookie, lock, self));
1904 } else {
1905 return reinterpret_cast<uint64_t>(JniMethodEndWithReference(l, cookie, self));
1906 }
1907}
1908
1909void artQuickGenericJniEndJNINonRef(Thread* self, uint32_t cookie, jobject lock) {
1910 if (lock != nullptr) {
1911 JniMethodEndSynchronized(cookie, lock, self);
1912 } else {
1913 JniMethodEnd(cookie, self);
1914 }
1915}
1916
Andreas Gampec147b002014-03-06 18:11:06 -08001917/*
1918 * Initializes an alloca region assumed to be directly below sp for a native call:
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001919 * Create a HandleScope and call stack and fill a mini stack with values to be pushed to registers.
Andreas Gampec147b002014-03-06 18:11:06 -08001920 * The final element on the stack is a pointer to the native code.
1921 *
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001922 * On entry, the stack has a standard callee-save frame above sp, and an alloca below it.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001923 * We need to fix this, as the handle scope needs to go into the callee-save frame.
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001924 *
Andreas Gampec147b002014-03-06 18:11:06 -08001925 * The return of this function denotes:
1926 * 1) How many bytes of the alloca can be released, if the value is non-negative.
1927 * 2) An error, if the value is negative.
1928 */
Mathieu Chartiere401d142015-04-22 13:56:20 -07001929extern "C" TwoWordReturn artQuickGenericJniTrampoline(Thread* self, ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -07001930 SHARED_REQUIRES(Locks::mutator_lock_) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07001931 ArtMethod* called = *sp;
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001932 DCHECK(called->IsNative()) << PrettyMethod(called, true);
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001933 uint32_t shorty_len = 0;
1934 const char* shorty = called->GetShorty(&shorty_len);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001935
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001936 // Run the visitor and update sp.
Ian Rogers59c07062014-10-10 13:03:39 -07001937 BuildGenericJniFrameVisitor visitor(self, called->IsStatic(), shorty, shorty_len, &sp);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001938 visitor.VisitArguments();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001939 visitor.FinalizeHandleScope(self);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001940
Andreas Gampec200a4a2014-06-16 18:39:09 -07001941 // Fix up managed-stack things in Thread.
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07001942 self->SetTopOfStack(sp);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001943
Ian Rogerse0dcd462014-03-08 15:21:04 -08001944 self->VerifyStack();
1945
Andreas Gampe90546832014-03-12 18:07:19 -07001946 // Start JNI, save the cookie.
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001947 uint32_t cookie;
1948 if (called->IsSynchronized()) {
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001949 cookie = JniMethodStartSynchronized(visitor.GetFirstHandleScopeJObject(), self);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001950 if (self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001951 self->PopHandleScope();
Andreas Gampec147b002014-03-06 18:11:06 -08001952 // A negative value denotes an error.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001953 return GetTwoWordFailureValue();
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001954 }
1955 } else {
1956 cookie = JniMethodStart(self);
1957 }
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001958 uint32_t* sp32 = reinterpret_cast<uint32_t*>(sp);
Ian Rogerse0dcd462014-03-08 15:21:04 -08001959 *(sp32 - 1) = cookie;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001960
Andreas Gampe90546832014-03-12 18:07:19 -07001961 // Retrieve the stored native code.
Mathieu Chartier2d721012014-11-10 11:08:06 -08001962 void* nativeCode = called->GetEntryPointFromJni();
Andreas Gampe90546832014-03-12 18:07:19 -07001963
Andreas Gampe9a6a99a2014-03-14 07:52:20 -07001964 // There are two cases for the content of nativeCode:
1965 // 1) Pointer to the native function.
1966 // 2) Pointer to the trampoline for native code binding.
1967 // In the second case, we need to execute the binding and continue with the actual native function
1968 // pointer.
Andreas Gampe90546832014-03-12 18:07:19 -07001969 DCHECK(nativeCode != nullptr);
1970 if (nativeCode == GetJniDlsymLookupStub()) {
Ian Rogers04c31d22014-07-07 21:44:06 -07001971#if defined(__arm__) || defined(__aarch64__)
Andreas Gampe90546832014-03-12 18:07:19 -07001972 nativeCode = artFindNativeMethod();
Ian Rogers04c31d22014-07-07 21:44:06 -07001973#else
1974 nativeCode = artFindNativeMethod(self);
1975#endif
Andreas Gampe90546832014-03-12 18:07:19 -07001976
1977 if (nativeCode == nullptr) {
1978 DCHECK(self->IsExceptionPending()); // There should be an exception pending now.
Andreas Gampead615172014-04-04 16:20:13 -07001979
1980 // End JNI, as the assembly will move to deliver the exception.
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001981 jobject lock = called->IsSynchronized() ? visitor.GetFirstHandleScopeJObject() : nullptr;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001982 if (shorty[0] == 'L') {
Andreas Gampead615172014-04-04 16:20:13 -07001983 artQuickGenericJniEndJNIRef(self, cookie, nullptr, lock);
1984 } else {
1985 artQuickGenericJniEndJNINonRef(self, cookie, lock);
1986 }
1987
Andreas Gampec200a4a2014-06-16 18:39:09 -07001988 return GetTwoWordFailureValue();
Andreas Gampe90546832014-03-12 18:07:19 -07001989 }
1990 // Note that the native code pointer will be automatically set by artFindNativeMethod().
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001991 }
1992
Andreas Gampec200a4a2014-06-16 18:39:09 -07001993 // Return native code addr(lo) and bottom of alloca address(hi).
1994 return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(visitor.GetBottomOfUsedArea()),
1995 reinterpret_cast<uintptr_t>(nativeCode));
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001996}
1997
Hiroshi Yamauchia23b4682015-09-28 17:47:32 -07001998// Defined in quick_jni_entrypoints.cc.
1999extern uint64_t GenericJniMethodEnd(Thread* self, uint32_t saved_local_ref_cookie,
2000 jvalue result, uint64_t result_f, ArtMethod* called,
2001 HandleScope* handle_scope);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002002/*
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07002003 * Is called after the native JNI code. Responsible for cleanup (handle scope, saved state) and
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002004 * unlocking.
2005 */
Hiroshi Yamauchia23b4682015-09-28 17:47:32 -07002006extern "C" uint64_t artQuickGenericJniEndTrampoline(Thread* self,
2007 jvalue result,
2008 uint64_t result_f) {
2009 // We're here just back from a native call. We don't have the shared mutator lock at this point
2010 // yet until we call GoToRunnable() later in GenericJniMethodEnd(). Accessing objects or doing
2011 // anything that requires a mutator lock before that would cause problems as GC may have the
2012 // exclusive mutator lock and may be moving objects, etc.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002013 ArtMethod** sp = self->GetManagedStack()->GetTopQuickFrame();
Andreas Gampebf6b92a2014-03-05 16:11:04 -08002014 uint32_t* sp32 = reinterpret_cast<uint32_t*>(sp);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002015 ArtMethod* called = *sp;
Ian Rogerse0dcd462014-03-08 15:21:04 -08002016 uint32_t cookie = *(sp32 - 1);
Hiroshi Yamauchia23b4682015-09-28 17:47:32 -07002017 HandleScope* table = reinterpret_cast<HandleScope*>(reinterpret_cast<uint8_t*>(sp) + sizeof(*sp));
2018 return GenericJniMethodEnd(self, cookie, result, result_f, called, table);
Andreas Gampe2da88232014-02-27 12:26:20 -08002019}
2020
Andreas Gamped58342c2014-06-05 14:18:08 -07002021// We use TwoWordReturn to optimize scalar returns. We use the hi value for code, and the lo value
2022// for the method pointer.
Andreas Gampe51f76352014-05-21 08:28:48 -07002023//
Andreas Gamped58342c2014-06-05 14:18:08 -07002024// It is valid to use this, as at the usage points here (returns from C functions) we are assuming
Mathieu Chartier90443472015-07-16 20:32:27 -07002025// to hold the mutator lock (see SHARED_REQUIRES(Locks::mutator_lock_) annotations).
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002026
2027template<InvokeType type, bool access_check>
Mathieu Chartiere401d142015-04-22 13:56:20 -07002028static TwoWordReturn artInvokeCommon(uint32_t method_idx, mirror::Object* this_object, Thread* self,
2029 ArtMethod** sp) {
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002030 ScopedQuickEntrypointChecks sqec(self);
Mathieu Chartiere401d142015-04-22 13:56:20 -07002031 DCHECK_EQ(*sp, Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs));
2032 ArtMethod* caller_method = QuickArgumentVisitor::GetCallingMethod(sp);
2033 ArtMethod* method = FindMethodFast(method_idx, this_object, caller_method, access_check, type);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002034 if (UNLIKELY(method == nullptr)) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002035 const DexFile* dex_file = caller_method->GetDeclaringClass()->GetDexCache()->GetDexFile();
2036 uint32_t shorty_len;
Andreas Gampec200a4a2014-06-16 18:39:09 -07002037 const char* shorty = dex_file->GetMethodShorty(dex_file->GetMethodId(method_idx), &shorty_len);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002038 {
2039 // Remember the args in case a GC happens in FindMethodFromCode.
2040 ScopedObjectAccessUnchecked soa(self->GetJniEnv());
2041 RememberForGcArgumentVisitor visitor(sp, type == kStatic, shorty, shorty_len, &soa);
2042 visitor.VisitArguments();
Andreas Gampe3a357142015-08-07 17:20:11 -07002043 method = FindMethodFromCode<type, access_check>(method_idx, &this_object, caller_method,
Mathieu Chartier0cd81352014-05-22 16:48:55 -07002044 self);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002045 visitor.FixupReferences();
2046 }
2047
Ian Rogerse0a02da2014-12-02 14:10:53 -08002048 if (UNLIKELY(method == nullptr)) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002049 CHECK(self->IsExceptionPending());
Andreas Gamped58342c2014-06-05 14:18:08 -07002050 return GetTwoWordFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002051 }
2052 }
2053 DCHECK(!self->IsExceptionPending());
2054 const void* code = method->GetEntryPointFromQuickCompiledCode();
2055
2056 // When we return, the caller will branch to this address, so it had better not be 0!
Ian Rogerse0a02da2014-12-02 14:10:53 -08002057 DCHECK(code != nullptr) << "Code was null in method: " << PrettyMethod(method)
Andreas Gampec200a4a2014-06-16 18:39:09 -07002058 << " location: "
2059 << method->GetDexFile()->GetLocation();
Andreas Gampe51f76352014-05-21 08:28:48 -07002060
Andreas Gamped58342c2014-06-05 14:18:08 -07002061 return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(code),
2062 reinterpret_cast<uintptr_t>(method));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002063}
2064
Nicolas Geoffray8689a0a2014-04-04 09:26:24 +01002065// Explicit artInvokeCommon template function declarations to please analysis tool.
2066#define EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(type, access_check) \
Mathieu Chartier90443472015-07-16 20:32:27 -07002067 template SHARED_REQUIRES(Locks::mutator_lock_) \
Mathieu Chartiere401d142015-04-22 13:56:20 -07002068 TwoWordReturn artInvokeCommon<type, access_check>( \
2069 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
Nicolas Geoffray8689a0a2014-04-04 09:26:24 +01002070
2071EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kVirtual, false);
2072EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kVirtual, true);
2073EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kInterface, false);
2074EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kInterface, true);
2075EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kDirect, false);
2076EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kDirect, true);
2077EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kStatic, false);
2078EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kStatic, true);
2079EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kSuper, false);
2080EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kSuper, true);
2081#undef EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL
2082
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002083// See comments in runtime_support_asm.S
Andreas Gampec200a4a2014-06-16 18:39:09 -07002084extern "C" TwoWordReturn artInvokeInterfaceTrampolineWithAccessCheck(
Mathieu Chartiere401d142015-04-22 13:56:20 -07002085 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -07002086 SHARED_REQUIRES(Locks::mutator_lock_) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01002087 return artInvokeCommon<kInterface, true>(method_idx, this_object, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002088}
2089
Andreas Gampec200a4a2014-06-16 18:39:09 -07002090extern "C" TwoWordReturn artInvokeDirectTrampolineWithAccessCheck(
Mathieu Chartiere401d142015-04-22 13:56:20 -07002091 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -07002092 SHARED_REQUIRES(Locks::mutator_lock_) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01002093 return artInvokeCommon<kDirect, true>(method_idx, this_object, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002094}
2095
Andreas Gampec200a4a2014-06-16 18:39:09 -07002096extern "C" TwoWordReturn artInvokeStaticTrampolineWithAccessCheck(
Mathieu Chartiere401d142015-04-22 13:56:20 -07002097 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -07002098 SHARED_REQUIRES(Locks::mutator_lock_) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01002099 return artInvokeCommon<kStatic, true>(method_idx, this_object, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002100}
2101
Andreas Gampec200a4a2014-06-16 18:39:09 -07002102extern "C" TwoWordReturn artInvokeSuperTrampolineWithAccessCheck(
Mathieu Chartiere401d142015-04-22 13:56:20 -07002103 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -07002104 SHARED_REQUIRES(Locks::mutator_lock_) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01002105 return artInvokeCommon<kSuper, true>(method_idx, this_object, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002106}
2107
Andreas Gampec200a4a2014-06-16 18:39:09 -07002108extern "C" TwoWordReturn artInvokeVirtualTrampolineWithAccessCheck(
Mathieu Chartiere401d142015-04-22 13:56:20 -07002109 uint32_t method_idx, mirror::Object* this_object, Thread* self, ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -07002110 SHARED_REQUIRES(Locks::mutator_lock_) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +01002111 return artInvokeCommon<kVirtual, true>(method_idx, this_object, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002112}
2113
2114// Determine target of interface dispatch. This object is known non-null.
Nicolas Geoffray8ea18d02015-05-26 16:29:08 +01002115extern "C" TwoWordReturn artInvokeInterfaceTrampoline(uint32_t dex_method_idx,
Andreas Gampe51f76352014-05-21 08:28:48 -07002116 mirror::Object* this_object,
Mathieu Chartiere401d142015-04-22 13:56:20 -07002117 Thread* self, ArtMethod** sp)
Mathieu Chartier90443472015-07-16 20:32:27 -07002118 SHARED_REQUIRES(Locks::mutator_lock_) {
Ian Rogers1d8cdbc2014-09-22 22:51:09 -07002119 ScopedQuickEntrypointChecks sqec(self);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +01002120 // The optimizing compiler currently does not inline methods that have an interface
2121 // invocation. We use the outer method directly to avoid fetching a stack map, which is
2122 // more expensive.
Mathieu Chartiere401d142015-04-22 13:56:20 -07002123 ArtMethod* caller_method = QuickArgumentVisitor::GetOuterMethod(sp);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +01002124 DCHECK_EQ(caller_method, QuickArgumentVisitor::GetCallingMethod(sp));
Mathieu Chartiere401d142015-04-22 13:56:20 -07002125 ArtMethod* interface_method = caller_method->GetDexCacheResolvedMethod(
2126 dex_method_idx, sizeof(void*));
2127 DCHECK(interface_method != nullptr) << dex_method_idx << " " << PrettyMethod(caller_method);
2128 ArtMethod* method;
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002129 if (LIKELY(interface_method->GetDexMethodIndex() != DexFile::kDexNoIndex)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07002130 method = this_object->GetClass()->FindVirtualMethodForInterface(
2131 interface_method, sizeof(void*));
Ian Rogerse0a02da2014-12-02 14:10:53 -08002132 if (UNLIKELY(method == nullptr)) {
Mathieu Chartiere401d142015-04-22 13:56:20 -07002133 ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(
2134 interface_method, this_object, caller_method);
Andreas Gamped58342c2014-06-05 14:18:08 -07002135 return GetTwoWordFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002136 }
2137 } else {
Mathieu Chartier4edd8472015-06-01 10:47:36 -07002138 DCHECK_EQ(interface_method, Runtime::Current()->GetResolutionMethod());
Nicolas Geoffray8ea18d02015-05-26 16:29:08 +01002139 if (kIsDebugBuild) {
2140 uint32_t dex_pc = QuickArgumentVisitor::GetCallingDexPc(sp);
2141 const DexFile::CodeItem* code = caller_method->GetCodeItem();
2142 CHECK_LT(dex_pc, code->insns_size_in_code_units_);
2143 const Instruction* instr = Instruction::At(&code->insns_[dex_pc]);
2144 Instruction::Code instr_code = instr->Opcode();
2145 CHECK(instr_code == Instruction::INVOKE_INTERFACE ||
2146 instr_code == Instruction::INVOKE_INTERFACE_RANGE)
2147 << "Unexpected call into interface trampoline: " << instr->DumpString(nullptr);
2148 if (instr_code == Instruction::INVOKE_INTERFACE) {
2149 CHECK_EQ(dex_method_idx, instr->VRegB_35c());
2150 } else {
2151 CHECK_EQ(instr_code, Instruction::INVOKE_INTERFACE_RANGE);
2152 CHECK_EQ(dex_method_idx, instr->VRegB_3rc());
2153 }
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002154 }
2155
Andreas Gampec200a4a2014-06-16 18:39:09 -07002156 const DexFile* dex_file = caller_method->GetDeclaringClass()->GetDexCache()
2157 ->GetDexFile();
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002158 uint32_t shorty_len;
Andreas Gampec200a4a2014-06-16 18:39:09 -07002159 const char* shorty = dex_file->GetMethodShorty(dex_file->GetMethodId(dex_method_idx),
2160 &shorty_len);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002161 {
2162 // Remember the args in case a GC happens in FindMethodFromCode.
2163 ScopedObjectAccessUnchecked soa(self->GetJniEnv());
2164 RememberForGcArgumentVisitor visitor(sp, false, shorty, shorty_len, &soa);
2165 visitor.VisitArguments();
Andreas Gampe3a357142015-08-07 17:20:11 -07002166 method = FindMethodFromCode<kInterface, false>(dex_method_idx, &this_object, caller_method,
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002167 self);
2168 visitor.FixupReferences();
2169 }
2170
2171 if (UNLIKELY(method == nullptr)) {
2172 CHECK(self->IsExceptionPending());
Andreas Gamped58342c2014-06-05 14:18:08 -07002173 return GetTwoWordFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002174 }
2175 }
2176 const void* code = method->GetEntryPointFromQuickCompiledCode();
2177
2178 // When we return, the caller will branch to this address, so it had better not be 0!
Ian Rogerse0a02da2014-12-02 14:10:53 -08002179 DCHECK(code != nullptr) << "Code was null in method: " << PrettyMethod(method)
Andreas Gampec200a4a2014-06-16 18:39:09 -07002180 << " location: " << method->GetDexFile()->GetLocation();
Andreas Gampe51f76352014-05-21 08:28:48 -07002181
Andreas Gamped58342c2014-06-05 14:18:08 -07002182 return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(code),
2183 reinterpret_cast<uintptr_t>(method));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07002184}
2185
Ian Rogers848871b2013-08-05 10:56:33 -07002186} // namespace art