blob: 61ed472c48ec86ab9e4a86f0b09a0f6e650b0eb4 [file] [log] [blame]
Ian Rogers848871b2013-08-05 10:56:33 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "callee_save_frame.h"
Dragos Sbirleabd136a22013-08-13 18:07:04 -070018#include "common_throws.h"
Ian Rogers848871b2013-08-05 10:56:33 -070019#include "dex_file-inl.h"
20#include "dex_instruction-inl.h"
Dragos Sbirleabd136a22013-08-13 18:07:04 -070021#include "entrypoints/entrypoint_utils.h"
Ian Rogers83883d72013-10-21 21:07:24 -070022#include "gc/accounting/card_table-inl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070023#include "interpreter/interpreter.h"
Brian Carlstromea46f952013-07-30 01:26:50 -070024#include "mirror/art_method-inl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070025#include "mirror/class-inl.h"
Mathieu Chartier5f3ded42014-04-03 15:25:30 -070026#include "mirror/dex_cache-inl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070027#include "mirror/object-inl.h"
28#include "mirror/object_array-inl.h"
29#include "object_utils.h"
30#include "runtime.h"
Ian Rogers53b8b092014-03-13 23:45:53 -070031#include "scoped_thread_state_change.h"
Ian Rogers848871b2013-08-05 10:56:33 -070032
33namespace art {
34
35// Visits the arguments as saved to the stack by a Runtime::kRefAndArgs callee save frame.
36class QuickArgumentVisitor {
Ian Rogers936b37f2014-02-14 00:52:24 -080037 // Size of each spilled GPR.
38#ifdef __LP64__
39 static constexpr size_t kBytesPerGprSpillLocation = 8;
40#else
41 static constexpr size_t kBytesPerGprSpillLocation = 4;
42#endif
43 // Number of bytes for each out register in the caller method's frame.
44 static constexpr size_t kBytesStackArgLocation = 4;
Ian Rogers848871b2013-08-05 10:56:33 -070045#if defined(__arm__)
46 // The callee save frame is pointed to by SP.
47 // | argN | |
48 // | ... | |
49 // | arg4 | |
50 // | arg3 spill | | Caller's frame
51 // | arg2 spill | |
52 // | arg1 spill | |
53 // | Method* | ---
54 // | LR |
55 // | ... | callee saves
56 // | R3 | arg3
57 // | R2 | arg2
58 // | R1 | arg1
Ian Rogers936b37f2014-02-14 00:52:24 -080059 // | R0 | padding
Ian Rogers848871b2013-08-05 10:56:33 -070060 // | Method* | <- sp
Andreas Gampebf6b92a2014-03-05 16:11:04 -080061 static constexpr bool kQuickSoftFloatAbi = true; // This is a soft float ABI.
62 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs.
63 static constexpr size_t kNumQuickFprArgs = 0; // 0 arguments passed in FPRs.
Ian Rogers936b37f2014-02-14 00:52:24 -080064 static constexpr size_t kBytesPerFprSpillLocation = 4; // FPR spill size is 4 bytes.
65 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 0; // Offset of first FPR arg.
66 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 8; // Offset of first GPR arg.
67 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 44; // Offset of return address.
68 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_FrameSize = 48; // Frame size.
69 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
70 return gpr_index * kBytesPerGprSpillLocation;
71 }
Stuart Monteithb95a5342014-03-12 13:32:32 +000072#elif defined(__aarch64__)
73 // The callee save frame is pointed to by SP.
74 // | argN | |
75 // | ... | |
76 // | arg4 | |
77 // | arg3 spill | | Caller's frame
78 // | arg2 spill | |
79 // | arg1 spill | |
80 // | Method* | ---
81 // | LR |
82 // | X28 |
83 // | : |
84 // | X19 |
85 // | X7 |
86 // | : |
87 // | X1 |
88 // | D15 |
89 // | : |
90 // | D0 |
91 // | | padding
92 // | Method* | <- sp
93 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
94 static constexpr size_t kNumQuickGprArgs = 7; // 7 arguments passed in GPRs.
95 static constexpr size_t kNumQuickFprArgs = 8; // 8 arguments passed in FPRs.
96 static constexpr size_t kBytesPerFprSpillLocation = 8; // FPR spill size is 8 bytes.
97 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset =16; // Offset of first FPR arg.
98 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 144; // Offset of first GPR arg.
99 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 296; // Offset of return address.
100 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_FrameSize = 304; // Frame size.
101 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
102 return gpr_index * kBytesPerGprSpillLocation;
103 }
Ian Rogers848871b2013-08-05 10:56:33 -0700104#elif defined(__mips__)
105 // The callee save frame is pointed to by SP.
106 // | argN | |
107 // | ... | |
108 // | arg4 | |
109 // | arg3 spill | | Caller's frame
110 // | arg2 spill | |
111 // | arg1 spill | |
112 // | Method* | ---
113 // | RA |
114 // | ... | callee saves
115 // | A3 | arg3
116 // | A2 | arg2
117 // | A1 | arg1
118 // | A0/Method* | <- sp
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800119 static constexpr bool kQuickSoftFloatAbi = true; // This is a soft float ABI.
120 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs.
121 static constexpr size_t kNumQuickFprArgs = 0; // 0 arguments passed in FPRs.
Ian Rogers936b37f2014-02-14 00:52:24 -0800122 static constexpr size_t kBytesPerFprSpillLocation = 4; // FPR spill size is 4 bytes.
123 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 0; // Offset of first FPR arg.
124 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 4; // Offset of first GPR arg.
125 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 60; // Offset of return address.
126 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_FrameSize = 64; // Frame size.
127 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
128 return gpr_index * kBytesPerGprSpillLocation;
129 }
Ian Rogers848871b2013-08-05 10:56:33 -0700130#elif defined(__i386__)
131 // The callee save frame is pointed to by SP.
132 // | argN | |
133 // | ... | |
134 // | arg4 | |
135 // | arg3 spill | | Caller's frame
136 // | arg2 spill | |
137 // | arg1 spill | |
138 // | Method* | ---
139 // | Return |
140 // | EBP,ESI,EDI | callee saves
141 // | EBX | arg3
142 // | EDX | arg2
143 // | ECX | arg1
144 // | EAX/Method* | <- sp
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800145 static constexpr bool kQuickSoftFloatAbi = true; // This is a soft float ABI.
146 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs.
147 static constexpr size_t kNumQuickFprArgs = 0; // 0 arguments passed in FPRs.
Ian Rogers936b37f2014-02-14 00:52:24 -0800148 static constexpr size_t kBytesPerFprSpillLocation = 8; // FPR spill size is 8 bytes.
149 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 0; // Offset of first FPR arg.
150 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 4; // Offset of first GPR arg.
151 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 28; // Offset of return address.
152 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_FrameSize = 32; // Frame size.
153 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
154 return gpr_index * kBytesPerGprSpillLocation;
155 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800156#elif defined(__x86_64__)
Ian Rogers936b37f2014-02-14 00:52:24 -0800157 // The callee save frame is pointed to by SP.
158 // | argN | |
159 // | ... | |
160 // | reg. arg spills | | Caller's frame
161 // | Method* | ---
162 // | Return |
163 // | R15 | callee save
164 // | R14 | callee save
165 // | R13 | callee save
166 // | R12 | callee save
167 // | R9 | arg5
168 // | R8 | arg4
169 // | RSI/R6 | arg1
170 // | RBP/R5 | callee save
171 // | RBX/R3 | callee save
172 // | RDX/R2 | arg2
173 // | RCX/R1 | arg3
174 // | XMM7 | float arg 8
175 // | XMM6 | float arg 7
176 // | XMM5 | float arg 6
177 // | XMM4 | float arg 5
178 // | XMM3 | float arg 4
179 // | XMM2 | float arg 3
180 // | XMM1 | float arg 2
181 // | XMM0 | float arg 1
182 // | Padding |
183 // | RDI/Method* | <- sp
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800184 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
185 static constexpr size_t kNumQuickGprArgs = 5; // 3 arguments passed in GPRs.
186 static constexpr size_t kNumQuickFprArgs = 8; // 0 arguments passed in FPRs.
Ian Rogers936b37f2014-02-14 00:52:24 -0800187 static constexpr size_t kBytesPerFprSpillLocation = 8; // FPR spill size is 8 bytes.
188 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 16; // Offset of first FPR arg.
189 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 80; // Offset of first GPR arg.
190 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 168; // Offset of return address.
191 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_FrameSize = 176; // Frame size.
192 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
193 switch (gpr_index) {
194 case 0: return (4 * kBytesPerGprSpillLocation);
195 case 1: return (1 * kBytesPerGprSpillLocation);
196 case 2: return (0 * kBytesPerGprSpillLocation);
197 case 3: return (5 * kBytesPerGprSpillLocation);
198 case 4: return (6 * kBytesPerGprSpillLocation);
199 default:
200 LOG(FATAL) << "Unexpected GPR index: " << gpr_index;
201 return 0;
202 }
203 }
Ian Rogers848871b2013-08-05 10:56:33 -0700204#else
205#error "Unsupported architecture"
Ian Rogers848871b2013-08-05 10:56:33 -0700206#endif
207
Ian Rogers936b37f2014-02-14 00:52:24 -0800208 public:
209 static mirror::ArtMethod* GetCallingMethod(mirror::ArtMethod** sp)
210 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
211 DCHECK((*sp)->IsCalleeSaveMethod());
212 byte* previous_sp = reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize;
Brian Carlstromea46f952013-07-30 01:26:50 -0700213 return *reinterpret_cast<mirror::ArtMethod**>(previous_sp);
Ian Rogers848871b2013-08-05 10:56:33 -0700214 }
215
Ian Rogers936b37f2014-02-14 00:52:24 -0800216 // For the given quick ref and args quick frame, return the caller's PC.
217 static uintptr_t GetCallingPc(mirror::ArtMethod** sp)
218 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
219 DCHECK((*sp)->IsCalleeSaveMethod());
220 byte* lr = reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_LrOffset;
Ian Rogers848871b2013-08-05 10:56:33 -0700221 return *reinterpret_cast<uintptr_t*>(lr);
222 }
223
Brian Carlstromea46f952013-07-30 01:26:50 -0700224 QuickArgumentVisitor(mirror::ArtMethod** sp, bool is_static,
Ian Rogers848871b2013-08-05 10:56:33 -0700225 const char* shorty, uint32_t shorty_len)
Ian Rogers936b37f2014-02-14 00:52:24 -0800226 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) :
227 is_static_(is_static), shorty_(shorty), shorty_len_(shorty_len),
228 gpr_args_(reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset),
229 fpr_args_(reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset),
230 stack_args_(reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize
231 + StackArgumentStartFromShorty(is_static, shorty, shorty_len)),
232 gpr_index_(0), fpr_index_(0), stack_index_(0), cur_type_(Primitive::kPrimVoid),
233 is_split_long_or_double_(false) {
234 DCHECK_EQ(kQuickCalleeSaveFrame_RefAndArgs_FrameSize,
Ian Rogers848871b2013-08-05 10:56:33 -0700235 Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)->GetFrameSizeInBytes());
236 }
237
238 virtual ~QuickArgumentVisitor() {}
239
240 virtual void Visit() = 0;
241
Ian Rogers936b37f2014-02-14 00:52:24 -0800242 Primitive::Type GetParamPrimitiveType() const {
243 return cur_type_;
Ian Rogers848871b2013-08-05 10:56:33 -0700244 }
245
246 byte* GetParamAddress() const {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800247 if (!kQuickSoftFloatAbi) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800248 Primitive::Type type = GetParamPrimitiveType();
249 if (UNLIKELY((type == Primitive::kPrimDouble) || (type == Primitive::kPrimFloat))) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800250 if ((kNumQuickFprArgs != 0) && (fpr_index_ + 1 < kNumQuickFprArgs + 1)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800251 return fpr_args_ + (fpr_index_ * kBytesPerFprSpillLocation);
252 }
253 }
254 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800255 if (gpr_index_ < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800256 return gpr_args_ + GprIndexToGprOffset(gpr_index_);
257 }
258 return stack_args_ + (stack_index_ * kBytesStackArgLocation);
Ian Rogers848871b2013-08-05 10:56:33 -0700259 }
260
261 bool IsSplitLongOrDouble() const {
Ian Rogers936b37f2014-02-14 00:52:24 -0800262 if ((kBytesPerGprSpillLocation == 4) || (kBytesPerFprSpillLocation == 4)) {
263 return is_split_long_or_double_;
264 } else {
265 return false; // An optimization for when GPR and FPRs are 64bit.
266 }
Ian Rogers848871b2013-08-05 10:56:33 -0700267 }
268
Ian Rogers936b37f2014-02-14 00:52:24 -0800269 bool IsParamAReference() const {
Ian Rogers848871b2013-08-05 10:56:33 -0700270 return GetParamPrimitiveType() == Primitive::kPrimNot;
271 }
272
Ian Rogers936b37f2014-02-14 00:52:24 -0800273 bool IsParamALongOrDouble() const {
Ian Rogers848871b2013-08-05 10:56:33 -0700274 Primitive::Type type = GetParamPrimitiveType();
275 return type == Primitive::kPrimLong || type == Primitive::kPrimDouble;
276 }
277
278 uint64_t ReadSplitLongParam() const {
279 DCHECK(IsSplitLongOrDouble());
280 uint64_t low_half = *reinterpret_cast<uint32_t*>(GetParamAddress());
281 uint64_t high_half = *reinterpret_cast<uint32_t*>(stack_args_);
282 return (low_half & 0xffffffffULL) | (high_half << 32);
283 }
284
285 void VisitArguments() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800286 gpr_index_ = 0;
287 fpr_index_ = 0;
288 stack_index_ = 0;
289 if (!is_static_) { // Handle this.
290 cur_type_ = Primitive::kPrimNot;
291 is_split_long_or_double_ = false;
Ian Rogers848871b2013-08-05 10:56:33 -0700292 Visit();
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800293 if (kNumQuickGprArgs > 0) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800294 gpr_index_++;
295 } else {
296 stack_index_++;
297 }
Ian Rogers848871b2013-08-05 10:56:33 -0700298 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800299 for (uint32_t shorty_index = 1; shorty_index < shorty_len_; ++shorty_index) {
300 cur_type_ = Primitive::GetType(shorty_[shorty_index]);
301 switch (cur_type_) {
302 case Primitive::kPrimNot:
303 case Primitive::kPrimBoolean:
304 case Primitive::kPrimByte:
305 case Primitive::kPrimChar:
306 case Primitive::kPrimShort:
307 case Primitive::kPrimInt:
308 is_split_long_or_double_ = false;
309 Visit();
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800310 if (gpr_index_ < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800311 gpr_index_++;
312 } else {
313 stack_index_++;
314 }
315 break;
316 case Primitive::kPrimFloat:
317 is_split_long_or_double_ = false;
318 Visit();
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800319 if (kQuickSoftFloatAbi) {
320 if (gpr_index_ < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800321 gpr_index_++;
322 } else {
323 stack_index_++;
324 }
325 } else {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800326 if ((kNumQuickFprArgs != 0) && (fpr_index_ + 1 < kNumQuickFprArgs + 1)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800327 fpr_index_++;
328 } else {
329 stack_index_++;
330 }
331 }
332 break;
333 case Primitive::kPrimDouble:
334 case Primitive::kPrimLong:
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800335 if (kQuickSoftFloatAbi || (cur_type_ == Primitive::kPrimLong)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800336 is_split_long_or_double_ = (kBytesPerGprSpillLocation == 4) &&
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800337 ((gpr_index_ + 1) == kNumQuickGprArgs);
Ian Rogers936b37f2014-02-14 00:52:24 -0800338 Visit();
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800339 if (gpr_index_ < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800340 gpr_index_++;
341 if (kBytesPerGprSpillLocation == 4) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800342 if (gpr_index_ < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800343 gpr_index_++;
344 } else {
345 stack_index_++;
346 }
347 }
348 } else {
349 if (kBytesStackArgLocation == 4) {
350 stack_index_+= 2;
351 } else {
352 CHECK_EQ(kBytesStackArgLocation, 8U);
353 stack_index_++;
354 }
355 }
356 } else {
357 is_split_long_or_double_ = (kBytesPerFprSpillLocation == 4) &&
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800358 ((fpr_index_ + 1) == kNumQuickFprArgs);
Ian Rogers936b37f2014-02-14 00:52:24 -0800359 Visit();
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800360 if ((kNumQuickFprArgs != 0) && (fpr_index_ + 1 < kNumQuickFprArgs + 1)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800361 fpr_index_++;
362 if (kBytesPerFprSpillLocation == 4) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800363 if ((kNumQuickFprArgs != 0) && (fpr_index_ + 1 < kNumQuickFprArgs + 1)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800364 fpr_index_++;
365 } else {
366 stack_index_++;
367 }
368 }
369 } else {
370 if (kBytesStackArgLocation == 4) {
371 stack_index_+= 2;
372 } else {
373 CHECK_EQ(kBytesStackArgLocation, 8U);
374 stack_index_++;
375 }
376 }
377 }
378 break;
379 default:
380 LOG(FATAL) << "Unexpected type: " << cur_type_ << " in " << shorty_;
381 }
Ian Rogers848871b2013-08-05 10:56:33 -0700382 }
383 }
384
385 private:
Ian Rogers936b37f2014-02-14 00:52:24 -0800386 static size_t StackArgumentStartFromShorty(bool is_static, const char* shorty,
387 uint32_t shorty_len) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800388 if (kQuickSoftFloatAbi) {
389 CHECK_EQ(kNumQuickFprArgs, 0U);
390 return (kNumQuickGprArgs * kBytesPerGprSpillLocation) + kBytesPerGprSpillLocation /* ArtMethod* */;
Ian Rogers936b37f2014-02-14 00:52:24 -0800391 } else {
392 size_t offset = kBytesPerGprSpillLocation; // Skip Method*.
393 size_t gprs_seen = 0;
394 size_t fprs_seen = 0;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800395 if (!is_static && (gprs_seen < kNumQuickGprArgs)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800396 gprs_seen++;
397 offset += kBytesStackArgLocation;
Ian Rogers848871b2013-08-05 10:56:33 -0700398 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800399 for (uint32_t i = 1; i < shorty_len; ++i) {
400 switch (shorty[i]) {
401 case 'Z':
402 case 'B':
403 case 'C':
404 case 'S':
405 case 'I':
406 case 'L':
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800407 if (gprs_seen < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800408 gprs_seen++;
409 offset += kBytesStackArgLocation;
410 }
411 break;
412 case 'J':
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800413 if (gprs_seen < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800414 gprs_seen++;
415 offset += 2 * kBytesStackArgLocation;
416 if (kBytesPerGprSpillLocation == 4) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800417 if (gprs_seen < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800418 gprs_seen++;
419 }
420 }
421 }
422 break;
423 case 'F':
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800424 if ((kNumQuickFprArgs != 0) && (fprs_seen + 1 < kNumQuickFprArgs + 1)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800425 fprs_seen++;
426 offset += kBytesStackArgLocation;
427 }
428 break;
429 case 'D':
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800430 if ((kNumQuickFprArgs != 0) && (fprs_seen + 1 < kNumQuickFprArgs + 1)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800431 fprs_seen++;
432 offset += 2 * kBytesStackArgLocation;
433 if (kBytesPerFprSpillLocation == 4) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800434 if ((kNumQuickFprArgs != 0) && (fprs_seen + 1 < kNumQuickFprArgs + 1)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800435 fprs_seen++;
436 }
437 }
438 }
439 break;
440 default:
441 LOG(FATAL) << "Unexpected shorty character: " << shorty[i] << " in " << shorty;
442 }
Ian Rogers848871b2013-08-05 10:56:33 -0700443 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800444 return offset;
Ian Rogers848871b2013-08-05 10:56:33 -0700445 }
Ian Rogers848871b2013-08-05 10:56:33 -0700446 }
447
448 const bool is_static_;
449 const char* const shorty_;
450 const uint32_t shorty_len_;
Ian Rogers936b37f2014-02-14 00:52:24 -0800451 byte* const gpr_args_; // Address of GPR arguments in callee save frame.
452 byte* const fpr_args_; // Address of FPR arguments in callee save frame.
453 byte* const stack_args_; // Address of stack arguments in caller's frame.
454 uint32_t gpr_index_; // Index into spilled GPRs.
455 uint32_t fpr_index_; // Index into spilled FPRs.
456 uint32_t stack_index_; // Index into arguments on the stack.
457 // The current type of argument during VisitArguments.
458 Primitive::Type cur_type_;
Ian Rogers848871b2013-08-05 10:56:33 -0700459 // Does a 64bit parameter straddle the register and stack arguments?
460 bool is_split_long_or_double_;
461};
462
463// Visits arguments on the stack placing them into the shadow frame.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800464class BuildQuickShadowFrameVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700465 public:
Ian Rogers936b37f2014-02-14 00:52:24 -0800466 BuildQuickShadowFrameVisitor(mirror::ArtMethod** sp, bool is_static, const char* shorty,
467 uint32_t shorty_len, ShadowFrame* sf, size_t first_arg_reg) :
Ian Rogers848871b2013-08-05 10:56:33 -0700468 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), sf_(sf), cur_reg_(first_arg_reg) {}
469
Ian Rogers9758f792014-03-13 09:02:55 -0700470 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
Ian Rogers848871b2013-08-05 10:56:33 -0700471
472 private:
Ian Rogers936b37f2014-02-14 00:52:24 -0800473 ShadowFrame* const sf_;
474 uint32_t cur_reg_;
Ian Rogers848871b2013-08-05 10:56:33 -0700475
Dragos Sbirleabd136a22013-08-13 18:07:04 -0700476 DISALLOW_COPY_AND_ASSIGN(BuildQuickShadowFrameVisitor);
Ian Rogers848871b2013-08-05 10:56:33 -0700477};
478
Ian Rogers9758f792014-03-13 09:02:55 -0700479void BuildQuickShadowFrameVisitor::Visit() {
480 Primitive::Type type = GetParamPrimitiveType();
481 switch (type) {
482 case Primitive::kPrimLong: // Fall-through.
483 case Primitive::kPrimDouble:
484 if (IsSplitLongOrDouble()) {
485 sf_->SetVRegLong(cur_reg_, ReadSplitLongParam());
486 } else {
487 sf_->SetVRegLong(cur_reg_, *reinterpret_cast<jlong*>(GetParamAddress()));
488 }
489 ++cur_reg_;
490 break;
491 case Primitive::kPrimNot: {
492 StackReference<mirror::Object>* stack_ref =
493 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
494 sf_->SetVRegReference(cur_reg_, stack_ref->AsMirrorPtr());
495 }
496 break;
497 case Primitive::kPrimBoolean: // Fall-through.
498 case Primitive::kPrimByte: // Fall-through.
499 case Primitive::kPrimChar: // Fall-through.
500 case Primitive::kPrimShort: // Fall-through.
501 case Primitive::kPrimInt: // Fall-through.
502 case Primitive::kPrimFloat:
503 sf_->SetVReg(cur_reg_, *reinterpret_cast<jint*>(GetParamAddress()));
504 break;
505 case Primitive::kPrimVoid:
506 LOG(FATAL) << "UNREACHABLE";
507 break;
508 }
509 ++cur_reg_;
510}
511
Brian Carlstromea46f952013-07-30 01:26:50 -0700512extern "C" uint64_t artQuickToInterpreterBridge(mirror::ArtMethod* method, Thread* self,
513 mirror::ArtMethod** sp)
Ian Rogers848871b2013-08-05 10:56:33 -0700514 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
515 // Ensure we don't get thread suspension until the object arguments are safely in the shadow
516 // frame.
517 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
518
519 if (method->IsAbstract()) {
520 ThrowAbstractMethodError(method);
521 return 0;
522 } else {
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800523 DCHECK(!method->IsNative()) << PrettyMethod(method);
Ian Rogers848871b2013-08-05 10:56:33 -0700524 const char* old_cause = self->StartAssertNoThreadSuspension("Building interpreter shadow frame");
525 MethodHelper mh(method);
526 const DexFile::CodeItem* code_item = mh.GetCodeItem();
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800527 DCHECK(code_item != nullptr) << PrettyMethod(method);
Ian Rogers848871b2013-08-05 10:56:33 -0700528 uint16_t num_regs = code_item->registers_size_;
529 void* memory = alloca(ShadowFrame::ComputeSize(num_regs));
530 ShadowFrame* shadow_frame(ShadowFrame::Create(num_regs, NULL, // No last shadow coming from quick.
531 method, 0, memory));
532 size_t first_arg_reg = code_item->registers_size_ - code_item->ins_size_;
Dragos Sbirleabd136a22013-08-13 18:07:04 -0700533 BuildQuickShadowFrameVisitor shadow_frame_builder(sp, mh.IsStatic(), mh.GetShorty(),
Ian Rogers936b37f2014-02-14 00:52:24 -0800534 mh.GetShortyLength(),
535 shadow_frame, first_arg_reg);
Ian Rogers848871b2013-08-05 10:56:33 -0700536 shadow_frame_builder.VisitArguments();
537 // Push a transition back into managed code onto the linked list in thread.
538 ManagedStack fragment;
539 self->PushManagedStackFragment(&fragment);
540 self->PushShadowFrame(shadow_frame);
541 self->EndAssertNoThreadSuspension(old_cause);
542
543 if (method->IsStatic() && !method->GetDeclaringClass()->IsInitializing()) {
544 // Ensure static method's class is initialized.
Mathieu Chartierc528dba2013-11-26 12:00:11 -0800545 SirtRef<mirror::Class> sirt_c(self, method->GetDeclaringClass());
546 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(sirt_c, true, true)) {
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800547 DCHECK(Thread::Current()->IsExceptionPending()) << PrettyMethod(method);
Ian Rogers848871b2013-08-05 10:56:33 -0700548 self->PopManagedStackFragment(fragment);
549 return 0;
550 }
551 }
552
553 JValue result = interpreter::EnterInterpreterFromStub(self, mh, code_item, *shadow_frame);
554 // Pop transition.
555 self->PopManagedStackFragment(fragment);
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800556 // No need to restore the args since the method has already been run by the interpreter.
Ian Rogers848871b2013-08-05 10:56:33 -0700557 return result.GetJ();
558 }
559}
560
561// Visits arguments on the stack placing them into the args vector, Object* arguments are converted
562// to jobjects.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800563class BuildQuickArgumentVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700564 public:
Brian Carlstromea46f952013-07-30 01:26:50 -0700565 BuildQuickArgumentVisitor(mirror::ArtMethod** sp, bool is_static, const char* shorty,
Ian Rogers848871b2013-08-05 10:56:33 -0700566 uint32_t shorty_len, ScopedObjectAccessUnchecked* soa,
567 std::vector<jvalue>* args) :
568 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa), args_(args) {}
569
Ian Rogers9758f792014-03-13 09:02:55 -0700570 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
Ian Rogers848871b2013-08-05 10:56:33 -0700571
Ian Rogers9758f792014-03-13 09:02:55 -0700572 void FixupReferences() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800573
Ian Rogers848871b2013-08-05 10:56:33 -0700574 private:
Ian Rogers9758f792014-03-13 09:02:55 -0700575 ScopedObjectAccessUnchecked* const soa_;
576 std::vector<jvalue>* const args_;
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800577 // References which we must update when exiting in case the GC moved the objects.
578 std::vector<std::pair<jobject, StackReference<mirror::Object>*> > references_;
Ian Rogers9758f792014-03-13 09:02:55 -0700579
Ian Rogers848871b2013-08-05 10:56:33 -0700580 DISALLOW_COPY_AND_ASSIGN(BuildQuickArgumentVisitor);
581};
582
Ian Rogers9758f792014-03-13 09:02:55 -0700583void BuildQuickArgumentVisitor::Visit() {
584 jvalue val;
585 Primitive::Type type = GetParamPrimitiveType();
586 switch (type) {
587 case Primitive::kPrimNot: {
588 StackReference<mirror::Object>* stack_ref =
589 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
590 val.l = soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr());
591 references_.push_back(std::make_pair(val.l, stack_ref));
592 break;
593 }
594 case Primitive::kPrimLong: // Fall-through.
595 case Primitive::kPrimDouble:
596 if (IsSplitLongOrDouble()) {
597 val.j = ReadSplitLongParam();
598 } else {
599 val.j = *reinterpret_cast<jlong*>(GetParamAddress());
600 }
601 break;
602 case Primitive::kPrimBoolean: // Fall-through.
603 case Primitive::kPrimByte: // Fall-through.
604 case Primitive::kPrimChar: // Fall-through.
605 case Primitive::kPrimShort: // Fall-through.
606 case Primitive::kPrimInt: // Fall-through.
607 case Primitive::kPrimFloat:
608 val.i = *reinterpret_cast<jint*>(GetParamAddress());
609 break;
610 case Primitive::kPrimVoid:
611 LOG(FATAL) << "UNREACHABLE";
612 val.j = 0;
613 break;
614 }
615 args_->push_back(val);
616}
617
618void BuildQuickArgumentVisitor::FixupReferences() {
619 // Fixup any references which may have changed.
620 for (const auto& pair : references_) {
621 pair.second->Assign(soa_->Decode<mirror::Object*>(pair.first));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -0700622 soa_->Env()->DeleteLocalRef(pair.first);
Ian Rogers9758f792014-03-13 09:02:55 -0700623 }
624}
625
Ian Rogers848871b2013-08-05 10:56:33 -0700626// Handler for invocation on proxy methods. On entry a frame will exist for the proxy object method
627// which is responsible for recording callee save registers. We explicitly place into jobjects the
628// incoming reference arguments (so they survive GC). We invoke the invocation handler, which is a
629// field within the proxy object, which will box the primitive arguments and deal with error cases.
Brian Carlstromea46f952013-07-30 01:26:50 -0700630extern "C" uint64_t artQuickProxyInvokeHandler(mirror::ArtMethod* proxy_method,
Ian Rogers848871b2013-08-05 10:56:33 -0700631 mirror::Object* receiver,
Brian Carlstromea46f952013-07-30 01:26:50 -0700632 Thread* self, mirror::ArtMethod** sp)
Ian Rogers848871b2013-08-05 10:56:33 -0700633 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Brian Carlstromd3633d52013-08-20 21:06:26 -0700634 DCHECK(proxy_method->IsProxyMethod()) << PrettyMethod(proxy_method);
635 DCHECK(receiver->GetClass()->IsProxyClass()) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700636 // Ensure we don't get thread suspension until the object arguments are safely in jobjects.
637 const char* old_cause =
638 self->StartAssertNoThreadSuspension("Adding to IRT proxy object arguments");
639 // Register the top of the managed stack, making stack crawlable.
Brian Carlstromd3633d52013-08-20 21:06:26 -0700640 DCHECK_EQ(*sp, proxy_method) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700641 self->SetTopOfStack(sp, 0);
642 DCHECK_EQ(proxy_method->GetFrameSizeInBytes(),
Brian Carlstromd3633d52013-08-20 21:06:26 -0700643 Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)->GetFrameSizeInBytes())
644 << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700645 self->VerifyStack();
646 // Start new JNI local reference state.
647 JNIEnvExt* env = self->GetJniEnv();
648 ScopedObjectAccessUnchecked soa(env);
649 ScopedJniEnvLocalRefState env_state(env);
650 // Create local ref. copies of proxy method and the receiver.
651 jobject rcvr_jobj = soa.AddLocalReference<jobject>(receiver);
652
653 // Placing arguments into args vector and remove the receiver.
654 MethodHelper proxy_mh(proxy_method);
Brian Carlstromd3633d52013-08-20 21:06:26 -0700655 DCHECK(!proxy_mh.IsStatic()) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700656 std::vector<jvalue> args;
657 BuildQuickArgumentVisitor local_ref_visitor(sp, proxy_mh.IsStatic(), proxy_mh.GetShorty(),
658 proxy_mh.GetShortyLength(), &soa, &args);
Brian Carlstromd3633d52013-08-20 21:06:26 -0700659
Ian Rogers848871b2013-08-05 10:56:33 -0700660 local_ref_visitor.VisitArguments();
Brian Carlstromd3633d52013-08-20 21:06:26 -0700661 DCHECK_GT(args.size(), 0U) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700662 args.erase(args.begin());
663
664 // Convert proxy method into expected interface method.
Brian Carlstromea46f952013-07-30 01:26:50 -0700665 mirror::ArtMethod* interface_method = proxy_method->FindOverriddenMethod();
Brian Carlstromd3633d52013-08-20 21:06:26 -0700666 DCHECK(interface_method != NULL) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700667 DCHECK(!interface_method->IsProxyMethod()) << PrettyMethod(interface_method);
668 jobject interface_method_jobj = soa.AddLocalReference<jobject>(interface_method);
669
670 // All naked Object*s should now be in jobjects, so its safe to go into the main invoke code
671 // that performs allocations.
672 self->EndAssertNoThreadSuspension(old_cause);
673 JValue result = InvokeProxyInvocationHandler(soa, proxy_mh.GetShorty(),
674 rcvr_jobj, interface_method_jobj, args);
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800675 // Restore references which might have moved.
676 local_ref_visitor.FixupReferences();
Ian Rogers848871b2013-08-05 10:56:33 -0700677 return result.GetJ();
678}
679
680// Read object references held in arguments from quick frames and place in a JNI local references,
681// so they don't get garbage collected.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800682class RememberForGcArgumentVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700683 public:
Mathieu Chartier590fee92013-09-13 13:46:47 -0700684 RememberForGcArgumentVisitor(mirror::ArtMethod** sp, bool is_static, const char* shorty,
685 uint32_t shorty_len, ScopedObjectAccessUnchecked* soa) :
Ian Rogers848871b2013-08-05 10:56:33 -0700686 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa) {}
687
Ian Rogers9758f792014-03-13 09:02:55 -0700688 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
Mathieu Chartier07d447b2013-09-26 11:57:43 -0700689
Ian Rogers9758f792014-03-13 09:02:55 -0700690 void FixupReferences() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Ian Rogers848871b2013-08-05 10:56:33 -0700691
692 private:
Ian Rogers9758f792014-03-13 09:02:55 -0700693 ScopedObjectAccessUnchecked* const soa_;
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800694 // References which we must update when exiting in case the GC moved the objects.
695 std::vector<std::pair<jobject, StackReference<mirror::Object>*> > references_;
Mathieu Chartier590fee92013-09-13 13:46:47 -0700696 DISALLOW_COPY_AND_ASSIGN(RememberForGcArgumentVisitor);
Ian Rogers848871b2013-08-05 10:56:33 -0700697};
698
Ian Rogers9758f792014-03-13 09:02:55 -0700699void RememberForGcArgumentVisitor::Visit() {
700 if (IsParamAReference()) {
701 StackReference<mirror::Object>* stack_ref =
702 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
703 jobject reference =
704 soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr());
705 references_.push_back(std::make_pair(reference, stack_ref));
706 }
707}
708
709void RememberForGcArgumentVisitor::FixupReferences() {
710 // Fixup any references which may have changed.
711 for (const auto& pair : references_) {
712 pair.second->Assign(soa_->Decode<mirror::Object*>(pair.first));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -0700713 soa_->Env()->DeleteLocalRef(pair.first);
Ian Rogers9758f792014-03-13 09:02:55 -0700714 }
715}
716
717
Ian Rogers848871b2013-08-05 10:56:33 -0700718// Lazily resolve a method for quick. Called by stub code.
Brian Carlstromea46f952013-07-30 01:26:50 -0700719extern "C" const void* artQuickResolutionTrampoline(mirror::ArtMethod* called,
Ian Rogers848871b2013-08-05 10:56:33 -0700720 mirror::Object* receiver,
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800721 Thread* self, mirror::ArtMethod** sp)
Ian Rogers848871b2013-08-05 10:56:33 -0700722 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800723 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
Ian Rogers848871b2013-08-05 10:56:33 -0700724 // Start new JNI local reference state
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800725 JNIEnvExt* env = self->GetJniEnv();
Ian Rogers848871b2013-08-05 10:56:33 -0700726 ScopedObjectAccessUnchecked soa(env);
727 ScopedJniEnvLocalRefState env_state(env);
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800728 const char* old_cause = self->StartAssertNoThreadSuspension("Quick method resolution set up");
Ian Rogers848871b2013-08-05 10:56:33 -0700729
730 // Compute details about the called method (avoid GCs)
731 ClassLinker* linker = Runtime::Current()->GetClassLinker();
Brian Carlstromea46f952013-07-30 01:26:50 -0700732 mirror::ArtMethod* caller = QuickArgumentVisitor::GetCallingMethod(sp);
Ian Rogers848871b2013-08-05 10:56:33 -0700733 InvokeType invoke_type;
734 const DexFile* dex_file;
735 uint32_t dex_method_idx;
736 if (called->IsRuntimeMethod()) {
737 uint32_t dex_pc = caller->ToDexPc(QuickArgumentVisitor::GetCallingPc(sp));
738 const DexFile::CodeItem* code;
739 {
740 MethodHelper mh(caller);
741 dex_file = &mh.GetDexFile();
742 code = mh.GetCodeItem();
743 }
744 CHECK_LT(dex_pc, code->insns_size_in_code_units_);
745 const Instruction* instr = Instruction::At(&code->insns_[dex_pc]);
746 Instruction::Code instr_code = instr->Opcode();
747 bool is_range;
748 switch (instr_code) {
749 case Instruction::INVOKE_DIRECT:
750 invoke_type = kDirect;
751 is_range = false;
752 break;
753 case Instruction::INVOKE_DIRECT_RANGE:
754 invoke_type = kDirect;
755 is_range = true;
756 break;
757 case Instruction::INVOKE_STATIC:
758 invoke_type = kStatic;
759 is_range = false;
760 break;
761 case Instruction::INVOKE_STATIC_RANGE:
762 invoke_type = kStatic;
763 is_range = true;
764 break;
765 case Instruction::INVOKE_SUPER:
766 invoke_type = kSuper;
767 is_range = false;
768 break;
769 case Instruction::INVOKE_SUPER_RANGE:
770 invoke_type = kSuper;
771 is_range = true;
772 break;
773 case Instruction::INVOKE_VIRTUAL:
774 invoke_type = kVirtual;
775 is_range = false;
776 break;
777 case Instruction::INVOKE_VIRTUAL_RANGE:
778 invoke_type = kVirtual;
779 is_range = true;
780 break;
781 case Instruction::INVOKE_INTERFACE:
782 invoke_type = kInterface;
783 is_range = false;
784 break;
785 case Instruction::INVOKE_INTERFACE_RANGE:
786 invoke_type = kInterface;
787 is_range = true;
788 break;
789 default:
790 LOG(FATAL) << "Unexpected call into trampoline: " << instr->DumpString(NULL);
791 // Avoid used uninitialized warnings.
792 invoke_type = kDirect;
793 is_range = false;
794 }
795 dex_method_idx = (is_range) ? instr->VRegB_3rc() : instr->VRegB_35c();
796
797 } else {
798 invoke_type = kStatic;
799 dex_file = &MethodHelper(called).GetDexFile();
800 dex_method_idx = called->GetDexMethodIndex();
801 }
802 uint32_t shorty_len;
803 const char* shorty =
804 dex_file->GetMethodShorty(dex_file->GetMethodId(dex_method_idx), &shorty_len);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700805 RememberForGcArgumentVisitor visitor(sp, invoke_type == kStatic, shorty, shorty_len, &soa);
Ian Rogers848871b2013-08-05 10:56:33 -0700806 visitor.VisitArguments();
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800807 self->EndAssertNoThreadSuspension(old_cause);
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800808 bool virtual_or_interface = invoke_type == kVirtual || invoke_type == kInterface;
Ian Rogers848871b2013-08-05 10:56:33 -0700809 // Resolve method filling in dex cache.
810 if (called->IsRuntimeMethod()) {
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800811 SirtRef<mirror::Object> sirt_receiver(soa.Self(), virtual_or_interface ? receiver : nullptr);
Ian Rogers848871b2013-08-05 10:56:33 -0700812 called = linker->ResolveMethod(dex_method_idx, caller, invoke_type);
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800813 receiver = sirt_receiver.get();
Ian Rogers848871b2013-08-05 10:56:33 -0700814 }
815 const void* code = NULL;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800816 if (LIKELY(!self->IsExceptionPending())) {
Ian Rogers848871b2013-08-05 10:56:33 -0700817 // Incompatible class change should have been handled in resolve method.
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800818 CHECK(!called->CheckIncompatibleClassChange(invoke_type))
819 << PrettyMethod(called) << " " << invoke_type;
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800820 if (virtual_or_interface) {
821 // Refine called method based on receiver.
822 CHECK(receiver != nullptr) << invoke_type;
823 if (invoke_type == kVirtual) {
824 called = receiver->GetClass()->FindVirtualMethodForVirtual(called);
825 } else {
826 called = receiver->GetClass()->FindVirtualMethodForInterface(called);
827 }
Ian Rogers83883d72013-10-21 21:07:24 -0700828 // We came here because of sharpening. Ensure the dex cache is up-to-date on the method index
829 // of the sharpened method.
830 if (called->GetDexCacheResolvedMethods() == caller->GetDexCacheResolvedMethods()) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100831 caller->GetDexCacheResolvedMethods()->Set<false>(called->GetDexMethodIndex(), called);
Ian Rogers83883d72013-10-21 21:07:24 -0700832 } else {
833 // Calling from one dex file to another, need to compute the method index appropriate to
Vladimir Markobbcc0c02014-02-03 14:08:42 +0000834 // the caller's dex file. Since we get here only if the original called was a runtime
835 // method, we've got the correct dex_file and a dex_method_idx from above.
836 DCHECK(&MethodHelper(caller).GetDexFile() == dex_file);
Ian Rogers83883d72013-10-21 21:07:24 -0700837 uint32_t method_index =
Vladimir Markobbcc0c02014-02-03 14:08:42 +0000838 MethodHelper(called).FindDexMethodIndexInOtherDexFile(*dex_file, dex_method_idx);
Ian Rogers83883d72013-10-21 21:07:24 -0700839 if (method_index != DexFile::kDexNoIndex) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100840 caller->GetDexCacheResolvedMethods()->Set<false>(method_index, called);
Ian Rogers83883d72013-10-21 21:07:24 -0700841 }
842 }
843 }
Ian Rogers848871b2013-08-05 10:56:33 -0700844 // Ensure that the called method's class is initialized.
Mathieu Chartierc528dba2013-11-26 12:00:11 -0800845 SirtRef<mirror::Class> called_class(soa.Self(), called->GetDeclaringClass());
Ian Rogers848871b2013-08-05 10:56:33 -0700846 linker->EnsureInitialized(called_class, true, true);
847 if (LIKELY(called_class->IsInitialized())) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800848 code = called->GetEntryPointFromQuickCompiledCode();
Ian Rogers848871b2013-08-05 10:56:33 -0700849 } else if (called_class->IsInitializing()) {
850 if (invoke_type == kStatic) {
851 // Class is still initializing, go to oat and grab code (trampoline must be left in place
852 // until class is initialized to stop races between threads).
Ian Rogersef7d42f2014-01-06 12:55:46 -0800853 code = linker->GetQuickOatCodeFor(called);
Ian Rogers848871b2013-08-05 10:56:33 -0700854 } else {
855 // No trampoline for non-static methods.
Ian Rogersef7d42f2014-01-06 12:55:46 -0800856 code = called->GetEntryPointFromQuickCompiledCode();
Ian Rogers848871b2013-08-05 10:56:33 -0700857 }
858 } else {
859 DCHECK(called_class->IsErroneous());
860 }
861 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800862 CHECK_EQ(code == NULL, self->IsExceptionPending());
Mathieu Chartier07d447b2013-09-26 11:57:43 -0700863 // Fixup any locally saved objects may have moved during a GC.
864 visitor.FixupReferences();
Ian Rogers848871b2013-08-05 10:56:33 -0700865 // Place called method in callee-save frame to be placed as first argument to quick method.
866 *sp = called;
867 return code;
868}
869
Andreas Gampec147b002014-03-06 18:11:06 -0800870
871
872/*
873 * This class uses a couple of observations to unite the different calling conventions through
874 * a few constants.
875 *
876 * 1) Number of registers used for passing is normally even, so counting down has no penalty for
877 * possible alignment.
878 * 2) Known 64b architectures store 8B units on the stack, both for integral and floating point
879 * types, so using uintptr_t is OK. Also means that we can use kRegistersNeededX to denote
880 * when we have to split things
881 * 3) The only soft-float, Arm, is 32b, so no widening needs to be taken into account for floats
882 * and we can use Int handling directly.
883 * 4) Only 64b architectures widen, and their stack is aligned 8B anyways, so no padding code
884 * necessary when widening. Also, widening of Ints will take place implicitly, and the
885 * extension should be compatible with Aarch64, which mandates copying the available bits
886 * into LSB and leaving the rest unspecified.
887 * 5) Aligning longs and doubles is necessary on arm only, and it's the same in registers and on
888 * the stack.
889 * 6) There is only little endian.
890 *
891 *
892 * Actual work is supposed to be done in a delegate of the template type. The interface is as
893 * follows:
894 *
895 * void PushGpr(uintptr_t): Add a value for the next GPR
896 *
897 * void PushFpr4(float): Add a value for the next FPR of size 32b. Is only called if we need
898 * padding, that is, think the architecture is 32b and aligns 64b.
899 *
900 * void PushFpr8(uint64_t): Push a double. We _will_ call this on 32b, it's the callee's job to
901 * split this if necessary. The current state will have aligned, if
902 * necessary.
903 *
904 * void PushStack(uintptr_t): Push a value to the stack.
905 *
Andreas Gampe36fea8d2014-03-10 13:37:40 -0700906 * uintptr_t PushSirt(mirror::Object* ref): Add a reference to the Sirt. This _will_ have nullptr,
907 * as this might be important for null initialization.
Andreas Gampec147b002014-03-06 18:11:06 -0800908 * Must return the jobject, that is, the reference to the
Andreas Gampe36fea8d2014-03-10 13:37:40 -0700909 * entry in the Sirt (nullptr if necessary).
Andreas Gampec147b002014-03-06 18:11:06 -0800910 *
911 */
912template <class T> class BuildGenericJniFrameStateMachine {
913 public:
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800914#if defined(__arm__)
915 // TODO: These are all dummy values!
Andreas Gampec147b002014-03-06 18:11:06 -0800916 static constexpr bool kNativeSoftFloatAbi = true;
917 static constexpr size_t kNumNativeGprArgs = 4; // 4 arguments passed in GPRs, r0-r3
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800918 static constexpr size_t kNumNativeFprArgs = 0; // 0 arguments passed in FPRs.
919
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800920 static constexpr size_t kRegistersNeededForLong = 2;
921 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -0800922 static constexpr bool kMultiRegistersAligned = true;
923 static constexpr bool kMultiRegistersWidened = false;
924 static constexpr bool kAlignLongOnStack = true;
925 static constexpr bool kAlignDoubleOnStack = true;
Stuart Monteithb95a5342014-03-12 13:32:32 +0000926#elif defined(__aarch64__)
927 static constexpr bool kNativeSoftFloatAbi = false; // This is a hard float ABI.
928 static constexpr size_t kNumNativeGprArgs = 8; // 6 arguments passed in GPRs.
929 static constexpr size_t kNumNativeFprArgs = 8; // 8 arguments passed in FPRs.
930
931 static constexpr size_t kRegistersNeededForLong = 1;
932 static constexpr size_t kRegistersNeededForDouble = 1;
933 static constexpr bool kMultiRegistersAligned = false;
934 static constexpr bool kMultiRegistersWidened = false;
935 static constexpr bool kAlignLongOnStack = false;
936 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800937#elif defined(__mips__)
938 // TODO: These are all dummy values!
939 static constexpr bool kNativeSoftFloatAbi = true; // This is a hard float ABI.
940 static constexpr size_t kNumNativeGprArgs = 0; // 6 arguments passed in GPRs.
941 static constexpr size_t kNumNativeFprArgs = 0; // 8 arguments passed in FPRs.
942
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800943 static constexpr size_t kRegistersNeededForLong = 2;
944 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -0800945 static constexpr bool kMultiRegistersAligned = true;
946 static constexpr bool kMultiRegistersWidened = true;
947 static constexpr bool kAlignLongOnStack = false;
948 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800949#elif defined(__i386__)
950 // TODO: Check these!
Andreas Gampec147b002014-03-06 18:11:06 -0800951 static constexpr bool kNativeSoftFloatAbi = false; // Not using int registers for fp
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800952 static constexpr size_t kNumNativeGprArgs = 0; // 6 arguments passed in GPRs.
953 static constexpr size_t kNumNativeFprArgs = 0; // 8 arguments passed in FPRs.
954
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800955 static constexpr size_t kRegistersNeededForLong = 2;
956 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -0800957 static constexpr bool kMultiRegistersAligned = false; // x86 not using regs, anyways
958 static constexpr bool kMultiRegistersWidened = false;
959 static constexpr bool kAlignLongOnStack = false;
960 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800961#elif defined(__x86_64__)
962 static constexpr bool kNativeSoftFloatAbi = false; // This is a hard float ABI.
963 static constexpr size_t kNumNativeGprArgs = 6; // 6 arguments passed in GPRs.
964 static constexpr size_t kNumNativeFprArgs = 8; // 8 arguments passed in FPRs.
965
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800966 static constexpr size_t kRegistersNeededForLong = 1;
967 static constexpr size_t kRegistersNeededForDouble = 1;
Andreas Gampec147b002014-03-06 18:11:06 -0800968 static constexpr bool kMultiRegistersAligned = false;
Andreas Gampe7a0e5042014-03-07 13:03:19 -0800969 static constexpr bool kMultiRegistersWidened = false;
Andreas Gampec147b002014-03-06 18:11:06 -0800970 static constexpr bool kAlignLongOnStack = false;
971 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800972#else
973#error "Unsupported architecture"
974#endif
975
Andreas Gampec147b002014-03-06 18:11:06 -0800976 public:
977 explicit BuildGenericJniFrameStateMachine(T* delegate) : gpr_index_(kNumNativeGprArgs),
978 fpr_index_(kNumNativeFprArgs),
979 stack_entries_(0),
980 delegate_(delegate) {
981 // For register alignment, we want to assume that counters (gpr_index_, fpr_index_) are even iff
982 // the next register is even; counting down is just to make the compiler happy...
983 CHECK_EQ(kNumNativeGprArgs % 2, 0U);
984 CHECK_EQ(kNumNativeFprArgs % 2, 0U);
985 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800986
Andreas Gampec147b002014-03-06 18:11:06 -0800987 virtual ~BuildGenericJniFrameStateMachine() {}
988
989 bool HavePointerGpr() {
990 return gpr_index_ > 0;
991 }
992
993 void AdvancePointer(void* val) {
994 if (HavePointerGpr()) {
995 gpr_index_--;
996 PushGpr(reinterpret_cast<uintptr_t>(val));
997 } else {
998 stack_entries_++; // TODO: have a field for pointer length as multiple of 32b
999 PushStack(reinterpret_cast<uintptr_t>(val));
1000 gpr_index_ = 0;
1001 }
1002 }
1003
1004
1005 bool HaveSirtGpr() {
1006 return gpr_index_ > 0;
1007 }
1008
1009 void AdvanceSirt(mirror::Object* ptr) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001010 uintptr_t sirtRef = PushSirt(ptr);
Andreas Gampec147b002014-03-06 18:11:06 -08001011 if (HaveSirtGpr()) {
1012 gpr_index_--;
1013 PushGpr(sirtRef);
1014 } else {
1015 stack_entries_++;
1016 PushStack(sirtRef);
1017 gpr_index_ = 0;
1018 }
1019 }
1020
1021
1022 bool HaveIntGpr() {
1023 return gpr_index_ > 0;
1024 }
1025
1026 void AdvanceInt(uint32_t val) {
1027 if (HaveIntGpr()) {
1028 gpr_index_--;
1029 PushGpr(val);
1030 } else {
1031 stack_entries_++;
1032 PushStack(val);
1033 gpr_index_ = 0;
1034 }
1035 }
1036
1037
1038 bool HaveLongGpr() {
1039 return gpr_index_ >= kRegistersNeededForLong + (LongGprNeedsPadding() ? 1 : 0);
1040 }
1041
1042 bool LongGprNeedsPadding() {
1043 return kRegistersNeededForLong > 1 && // only pad when using multiple registers
1044 kAlignLongOnStack && // and when it needs alignment
1045 (gpr_index_ & 1) == 1; // counter is odd, see constructor
1046 }
1047
1048 bool LongStackNeedsPadding() {
1049 return kRegistersNeededForLong > 1 && // only pad when using multiple registers
1050 kAlignLongOnStack && // and when it needs 8B alignment
1051 (stack_entries_ & 1) == 1; // counter is odd
1052 }
1053
1054 void AdvanceLong(uint64_t val) {
1055 if (HaveLongGpr()) {
1056 if (LongGprNeedsPadding()) {
1057 PushGpr(0);
1058 gpr_index_--;
1059 }
1060 if (kRegistersNeededForLong == 1) {
1061 PushGpr(static_cast<uintptr_t>(val));
1062 } else {
1063 PushGpr(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1064 PushGpr(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1065 }
1066 gpr_index_ -= kRegistersNeededForLong;
1067 } else {
1068 if (LongStackNeedsPadding()) {
1069 PushStack(0);
1070 stack_entries_++;
1071 }
1072 if (kRegistersNeededForLong == 1) {
1073 PushStack(static_cast<uintptr_t>(val));
1074 stack_entries_++;
1075 } else {
1076 PushStack(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1077 PushStack(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1078 stack_entries_ += 2;
1079 }
1080 gpr_index_ = 0;
1081 }
1082 }
1083
1084
1085 bool HaveFloatFpr() {
1086 return fpr_index_ > 0;
1087 }
1088
Andreas Gampec147b002014-03-06 18:11:06 -08001089 template <typename U, typename V> V convert(U in) {
1090 CHECK_LE(sizeof(U), sizeof(V));
1091 union { U u; V v; } tmp;
1092 tmp.u = in;
1093 return tmp.v;
1094 }
1095
1096 void AdvanceFloat(float val) {
1097 if (kNativeSoftFloatAbi) {
1098 AdvanceInt(convert<float, uint32_t>(val));
1099 } else {
1100 if (HaveFloatFpr()) {
1101 fpr_index_--;
1102 if (kRegistersNeededForDouble == 1) {
1103 if (kMultiRegistersWidened) {
1104 PushFpr8(convert<double, uint64_t>(val));
1105 } else {
1106 // No widening, just use the bits.
1107 PushFpr8(convert<float, uint64_t>(val));
1108 }
1109 } else {
1110 PushFpr4(val);
1111 }
1112 } else {
1113 stack_entries_++;
1114 if (kRegistersNeededForDouble == 1 && kMultiRegistersWidened) {
1115 // Need to widen before storing: Note the "double" in the template instantiation.
1116 PushStack(convert<double, uintptr_t>(val));
1117 } else {
1118 PushStack(convert<float, uintptr_t>(val));
1119 }
1120 fpr_index_ = 0;
1121 }
1122 }
1123 }
1124
1125
1126 bool HaveDoubleFpr() {
1127 return fpr_index_ >= kRegistersNeededForDouble + (DoubleFprNeedsPadding() ? 1 : 0);
1128 }
1129
1130 bool DoubleFprNeedsPadding() {
1131 return kRegistersNeededForDouble > 1 && // only pad when using multiple registers
1132 kAlignDoubleOnStack && // and when it needs alignment
1133 (fpr_index_ & 1) == 1; // counter is odd, see constructor
1134 }
1135
1136 bool DoubleStackNeedsPadding() {
1137 return kRegistersNeededForDouble > 1 && // only pad when using multiple registers
1138 kAlignDoubleOnStack && // and when it needs 8B alignment
1139 (stack_entries_ & 1) == 1; // counter is odd
1140 }
1141
1142 void AdvanceDouble(uint64_t val) {
1143 if (kNativeSoftFloatAbi) {
1144 AdvanceLong(val);
1145 } else {
1146 if (HaveDoubleFpr()) {
1147 if (DoubleFprNeedsPadding()) {
1148 PushFpr4(0);
1149 fpr_index_--;
1150 }
1151 PushFpr8(val);
1152 fpr_index_ -= kRegistersNeededForDouble;
1153 } else {
1154 if (DoubleStackNeedsPadding()) {
1155 PushStack(0);
1156 stack_entries_++;
1157 }
1158 if (kRegistersNeededForDouble == 1) {
1159 PushStack(static_cast<uintptr_t>(val));
1160 stack_entries_++;
1161 } else {
1162 PushStack(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1163 PushStack(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1164 stack_entries_ += 2;
1165 }
1166 fpr_index_ = 0;
1167 }
1168 }
1169 }
1170
1171 uint32_t getStackEntries() {
1172 return stack_entries_;
1173 }
1174
1175 uint32_t getNumberOfUsedGprs() {
1176 return kNumNativeGprArgs - gpr_index_;
1177 }
1178
1179 uint32_t getNumberOfUsedFprs() {
1180 return kNumNativeFprArgs - fpr_index_;
1181 }
1182
1183 private:
1184 void PushGpr(uintptr_t val) {
1185 delegate_->PushGpr(val);
1186 }
1187 void PushFpr4(float val) {
1188 delegate_->PushFpr4(val);
1189 }
1190 void PushFpr8(uint64_t val) {
1191 delegate_->PushFpr8(val);
1192 }
1193 void PushStack(uintptr_t val) {
1194 delegate_->PushStack(val);
1195 }
1196 uintptr_t PushSirt(mirror::Object* ref) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1197 return delegate_->PushSirt(ref);
1198 }
1199
1200 uint32_t gpr_index_; // Number of free GPRs
1201 uint32_t fpr_index_; // Number of free FPRs
1202 uint32_t stack_entries_; // Stack entries are in multiples of 32b, as floats are usually not
1203 // extended
1204 T* delegate_; // What Push implementation gets called
1205};
1206
1207class ComputeGenericJniFrameSize FINAL {
1208 public:
1209 ComputeGenericJniFrameSize() : num_sirt_references_(0), num_stack_entries_(0) {}
1210
Andreas Gampec147b002014-03-06 18:11:06 -08001211 uint32_t GetStackSize() {
1212 return num_stack_entries_ * sizeof(uintptr_t);
1213 }
1214
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001215 // WARNING: After this, *sp won't be pointing to the method anymore!
1216 void ComputeLayout(mirror::ArtMethod*** m, bool is_static, const char* shorty, uint32_t shorty_len,
1217 void* sp, StackIndirectReferenceTable** table, uint32_t* sirt_entries,
1218 uintptr_t** start_stack, uintptr_t** start_gpr, uint32_t** start_fpr,
1219 void** code_return, size_t* overall_size)
Andreas Gampec147b002014-03-06 18:11:06 -08001220 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1221 ComputeAll(is_static, shorty, shorty_len);
1222
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001223 mirror::ArtMethod* method = **m;
1224
Andreas Gampec147b002014-03-06 18:11:06 -08001225 uint8_t* sp8 = reinterpret_cast<uint8_t*>(sp);
Andreas Gampec147b002014-03-06 18:11:06 -08001226
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001227 // First, fix up the layout of the callee-save frame.
1228 // We have to squeeze in the Sirt, and relocate the method pointer.
1229
1230 // "Free" the slot for the method.
1231 sp8 += kPointerSize;
1232
1233 // Add the Sirt.
Andreas Gampec147b002014-03-06 18:11:06 -08001234 *sirt_entries = num_sirt_references_;
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001235 size_t sirt_size = StackIndirectReferenceTable::GetAlignedSirtSize(num_sirt_references_);
1236 sp8 -= sirt_size;
Andreas Gampec147b002014-03-06 18:11:06 -08001237 *table = reinterpret_cast<StackIndirectReferenceTable*>(sp8);
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001238 (*table)->SetNumberOfReferences(num_sirt_references_);
Andreas Gampec147b002014-03-06 18:11:06 -08001239
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001240 // Add a slot for the method pointer, and fill it. Fix the pointer-pointer given to us.
1241 sp8 -= kPointerSize;
1242 uint8_t* method_pointer = sp8;
1243 *(reinterpret_cast<mirror::ArtMethod**>(method_pointer)) = method;
1244 *m = reinterpret_cast<mirror::ArtMethod**>(method_pointer);
1245
1246 // Reference cookie and padding
1247 sp8 -= 8;
1248 // Store Sirt size
1249 *reinterpret_cast<uint32_t*>(sp8) = static_cast<uint32_t>(sirt_size & 0xFFFFFFFF);
1250
1251 // Next comes the native call stack.
Andreas Gampec147b002014-03-06 18:11:06 -08001252 sp8 -= GetStackSize();
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001253 // Now align the call stack below. This aligns by 16, as AArch64 seems to require.
Andreas Gampec147b002014-03-06 18:11:06 -08001254 uintptr_t mask = ~0x0F;
1255 sp8 = reinterpret_cast<uint8_t*>(reinterpret_cast<uintptr_t>(sp8) & mask);
1256 *start_stack = reinterpret_cast<uintptr_t*>(sp8);
1257
1258 // put fprs and gprs below
1259 // Assumption is OK right now, as we have soft-float arm
1260 size_t fregs = BuildGenericJniFrameStateMachine<ComputeGenericJniFrameSize>::kNumNativeFprArgs;
1261 sp8 -= fregs * sizeof(uintptr_t);
1262 *start_fpr = reinterpret_cast<uint32_t*>(sp8);
1263 size_t iregs = BuildGenericJniFrameStateMachine<ComputeGenericJniFrameSize>::kNumNativeGprArgs;
1264 sp8 -= iregs * sizeof(uintptr_t);
1265 *start_gpr = reinterpret_cast<uintptr_t*>(sp8);
1266
1267 // reserve space for the code pointer
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001268 sp8 -= kPointerSize;
Andreas Gampec147b002014-03-06 18:11:06 -08001269 *code_return = reinterpret_cast<void*>(sp8);
1270
1271 *overall_size = reinterpret_cast<uint8_t*>(sp) - sp8;
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001272
1273 // The new SP is stored at the end of the alloca, so it can be immediately popped
1274 sp8 = reinterpret_cast<uint8_t*>(sp) - 5 * KB;
1275 *(reinterpret_cast<uint8_t**>(sp8)) = method_pointer;
Andreas Gampec147b002014-03-06 18:11:06 -08001276 }
1277
1278 void ComputeSirtOffset() { } // nothing to do, static right now
1279
1280 void ComputeAll(bool is_static, const char* shorty, uint32_t shorty_len)
1281 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1282 BuildGenericJniFrameStateMachine<ComputeGenericJniFrameSize> sm(this);
1283
1284 // JNIEnv
1285 sm.AdvancePointer(nullptr);
1286
1287 // Class object or this as first argument
1288 sm.AdvanceSirt(reinterpret_cast<mirror::Object*>(0x12345678));
1289
1290 for (uint32_t i = 1; i < shorty_len; ++i) {
1291 Primitive::Type cur_type_ = Primitive::GetType(shorty[i]);
1292 switch (cur_type_) {
1293 case Primitive::kPrimNot:
1294 sm.AdvanceSirt(reinterpret_cast<mirror::Object*>(0x12345678));
1295 break;
1296
1297 case Primitive::kPrimBoolean:
1298 case Primitive::kPrimByte:
1299 case Primitive::kPrimChar:
1300 case Primitive::kPrimShort:
1301 case Primitive::kPrimInt:
1302 sm.AdvanceInt(0);
1303 break;
1304 case Primitive::kPrimFloat:
1305 sm.AdvanceFloat(0);
1306 break;
1307 case Primitive::kPrimDouble:
1308 sm.AdvanceDouble(0);
1309 break;
1310 case Primitive::kPrimLong:
1311 sm.AdvanceLong(0);
1312 break;
1313 default:
1314 LOG(FATAL) << "Unexpected type: " << cur_type_ << " in " << shorty;
1315 }
1316 }
1317
1318 num_stack_entries_ = sm.getStackEntries();
1319 }
1320
1321 void PushGpr(uintptr_t /* val */) {
1322 // not optimizing registers, yet
1323 }
1324
1325 void PushFpr4(float /* val */) {
1326 // not optimizing registers, yet
1327 }
1328
1329 void PushFpr8(uint64_t /* val */) {
1330 // not optimizing registers, yet
1331 }
1332
1333 void PushStack(uintptr_t /* val */) {
1334 // counting is already done in the superclass
1335 }
1336
1337 uintptr_t PushSirt(mirror::Object* /* ptr */) {
1338 num_sirt_references_++;
1339 return reinterpret_cast<uintptr_t>(nullptr);
1340 }
1341
1342 private:
1343 uint32_t num_sirt_references_;
1344 uint32_t num_stack_entries_;
1345};
1346
1347// Visits arguments on the stack placing them into a region lower down the stack for the benefit
1348// of transitioning into native code.
1349class BuildGenericJniFrameVisitor FINAL : public QuickArgumentVisitor {
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001350 public:
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001351 BuildGenericJniFrameVisitor(mirror::ArtMethod*** sp, bool is_static, const char* shorty,
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001352 uint32_t shorty_len, Thread* self) :
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001353 QuickArgumentVisitor(*sp, is_static, shorty, shorty_len), sm_(this) {
Andreas Gampec147b002014-03-06 18:11:06 -08001354 ComputeGenericJniFrameSize fsc;
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001355 fsc.ComputeLayout(sp, is_static, shorty, shorty_len, *sp, &sirt_, &sirt_expected_refs_,
1356 &cur_stack_arg_, &cur_gpr_reg_, &cur_fpr_reg_, &code_return_,
1357 &alloca_used_size_);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001358 sirt_number_of_references_ = 0;
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001359 cur_sirt_entry_ = reinterpret_cast<StackReference<mirror::Object>*>(GetFirstSirtEntry());
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001360
1361 // jni environment is always first argument
Andreas Gampec147b002014-03-06 18:11:06 -08001362 sm_.AdvancePointer(self->GetJniEnv());
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001363
1364 if (is_static) {
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001365 sm_.AdvanceSirt((**sp)->GetDeclaringClass());
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001366 }
1367 }
1368
Ian Rogers9758f792014-03-13 09:02:55 -07001369 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001370
Ian Rogers9758f792014-03-13 09:02:55 -07001371 void FinalizeSirt(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001372
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001373 jobject GetFirstSirtEntry() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1374 return reinterpret_cast<jobject>(sirt_->GetStackReference(0));
Andreas Gampec147b002014-03-06 18:11:06 -08001375 }
1376
1377 void PushGpr(uintptr_t val) {
1378 *cur_gpr_reg_ = val;
1379 cur_gpr_reg_++;
1380 }
1381
1382 void PushFpr4(float val) {
1383 *cur_fpr_reg_ = val;
1384 cur_fpr_reg_++;
1385 }
1386
1387 void PushFpr8(uint64_t val) {
1388 uint64_t* tmp = reinterpret_cast<uint64_t*>(cur_fpr_reg_);
1389 *tmp = val;
1390 cur_fpr_reg_ += 2;
1391 }
1392
1393 void PushStack(uintptr_t val) {
1394 *cur_stack_arg_ = val;
1395 cur_stack_arg_++;
1396 }
1397
1398 uintptr_t PushSirt(mirror::Object* ref) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001399 uintptr_t tmp;
1400 if (ref == nullptr) {
1401 *cur_sirt_entry_ = StackReference<mirror::Object>();
1402 tmp = reinterpret_cast<uintptr_t>(nullptr);
1403 } else {
1404 *cur_sirt_entry_ = StackReference<mirror::Object>::FromMirrorPtr(ref);
1405 tmp = reinterpret_cast<uintptr_t>(cur_sirt_entry_);
1406 }
1407 cur_sirt_entry_++;
Andreas Gampec147b002014-03-06 18:11:06 -08001408 sirt_number_of_references_++;
1409 return tmp;
1410 }
1411
1412 // Size of the part of the alloca that we actually need.
1413 size_t GetAllocaUsedSize() {
1414 return alloca_used_size_;
1415 }
1416
1417 void* GetCodeReturn() {
1418 return code_return_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001419 }
1420
1421 private:
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001422 uint32_t sirt_number_of_references_;
1423 StackReference<mirror::Object>* cur_sirt_entry_;
Andreas Gampec147b002014-03-06 18:11:06 -08001424 StackIndirectReferenceTable* sirt_;
1425 uint32_t sirt_expected_refs_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001426 uintptr_t* cur_gpr_reg_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001427 uint32_t* cur_fpr_reg_;
1428 uintptr_t* cur_stack_arg_;
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001429 // StackReference<mirror::Object>* top_of_sirt_;
Andreas Gampec147b002014-03-06 18:11:06 -08001430 void* code_return_;
1431 size_t alloca_used_size_;
1432
1433 BuildGenericJniFrameStateMachine<BuildGenericJniFrameVisitor> sm_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001434
1435 DISALLOW_COPY_AND_ASSIGN(BuildGenericJniFrameVisitor);
1436};
1437
Ian Rogers9758f792014-03-13 09:02:55 -07001438void BuildGenericJniFrameVisitor::Visit() {
1439 Primitive::Type type = GetParamPrimitiveType();
1440 switch (type) {
1441 case Primitive::kPrimLong: {
1442 jlong long_arg;
1443 if (IsSplitLongOrDouble()) {
1444 long_arg = ReadSplitLongParam();
1445 } else {
1446 long_arg = *reinterpret_cast<jlong*>(GetParamAddress());
1447 }
1448 sm_.AdvanceLong(long_arg);
1449 break;
1450 }
1451 case Primitive::kPrimDouble: {
1452 uint64_t double_arg;
1453 if (IsSplitLongOrDouble()) {
1454 // Read into union so that we don't case to a double.
1455 double_arg = ReadSplitLongParam();
1456 } else {
1457 double_arg = *reinterpret_cast<uint64_t*>(GetParamAddress());
1458 }
1459 sm_.AdvanceDouble(double_arg);
1460 break;
1461 }
1462 case Primitive::kPrimNot: {
1463 StackReference<mirror::Object>* stack_ref =
1464 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
1465 sm_.AdvanceSirt(stack_ref->AsMirrorPtr());
1466 break;
1467 }
1468 case Primitive::kPrimFloat:
1469 sm_.AdvanceFloat(*reinterpret_cast<float*>(GetParamAddress()));
1470 break;
1471 case Primitive::kPrimBoolean: // Fall-through.
1472 case Primitive::kPrimByte: // Fall-through.
1473 case Primitive::kPrimChar: // Fall-through.
1474 case Primitive::kPrimShort: // Fall-through.
1475 case Primitive::kPrimInt: // Fall-through.
1476 sm_.AdvanceInt(*reinterpret_cast<jint*>(GetParamAddress()));
1477 break;
1478 case Primitive::kPrimVoid:
1479 LOG(FATAL) << "UNREACHABLE";
1480 break;
1481 }
1482}
1483
1484void BuildGenericJniFrameVisitor::FinalizeSirt(Thread* self) {
1485 // Initialize padding entries.
1486 while (sirt_number_of_references_ < sirt_expected_refs_) {
1487 *cur_sirt_entry_ = StackReference<mirror::Object>();
1488 cur_sirt_entry_++;
1489 sirt_number_of_references_++;
1490 }
1491 sirt_->SetNumberOfReferences(sirt_expected_refs_);
1492 DCHECK_NE(sirt_expected_refs_, 0U);
1493 // Install Sirt.
1494 self->PushSirt(sirt_);
1495}
1496
Andreas Gampe90546832014-03-12 18:07:19 -07001497extern "C" void* artFindNativeMethod();
1498
Andreas Gampec147b002014-03-06 18:11:06 -08001499/*
1500 * Initializes an alloca region assumed to be directly below sp for a native call:
1501 * Create a Sirt and call stack and fill a mini stack with values to be pushed to registers.
1502 * The final element on the stack is a pointer to the native code.
1503 *
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001504 * On entry, the stack has a standard callee-save frame above sp, and an alloca below it.
1505 * We need to fix this, as the Sirt needs to go into the callee-save frame.
1506 *
Andreas Gampec147b002014-03-06 18:11:06 -08001507 * The return of this function denotes:
1508 * 1) How many bytes of the alloca can be released, if the value is non-negative.
1509 * 2) An error, if the value is negative.
1510 */
1511extern "C" ssize_t artQuickGenericJniTrampoline(Thread* self, mirror::ArtMethod** sp)
Andreas Gampe2da88232014-02-27 12:26:20 -08001512 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001513 mirror::ArtMethod* called = *sp;
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001514 DCHECK(called->IsNative()) << PrettyMethod(called, true);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001515
1516 // run the visitor
1517 MethodHelper mh(called);
Andreas Gampec147b002014-03-06 18:11:06 -08001518
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001519 BuildGenericJniFrameVisitor visitor(&sp, called->IsStatic(), mh.GetShorty(), mh.GetShortyLength(),
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001520 self);
1521 visitor.VisitArguments();
1522 visitor.FinalizeSirt(self);
1523
1524 // fix up managed-stack things in Thread
1525 self->SetTopOfStack(sp, 0);
1526
Ian Rogerse0dcd462014-03-08 15:21:04 -08001527 self->VerifyStack();
1528
Andreas Gampe90546832014-03-12 18:07:19 -07001529 // Start JNI, save the cookie.
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001530 uint32_t cookie;
1531 if (called->IsSynchronized()) {
1532 cookie = JniMethodStartSynchronized(visitor.GetFirstSirtEntry(), self);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001533 if (self->IsExceptionPending()) {
1534 self->PopSirt();
Andreas Gampec147b002014-03-06 18:11:06 -08001535 // A negative value denotes an error.
1536 return -1;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001537 }
1538 } else {
1539 cookie = JniMethodStart(self);
1540 }
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001541 uint32_t* sp32 = reinterpret_cast<uint32_t*>(sp);
Ian Rogerse0dcd462014-03-08 15:21:04 -08001542 *(sp32 - 1) = cookie;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001543
Andreas Gampe90546832014-03-12 18:07:19 -07001544 // Retrieve the stored native code.
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001545 const void* nativeCode = called->GetNativeMethod();
Andreas Gampe90546832014-03-12 18:07:19 -07001546
Andreas Gampe9a6a99a2014-03-14 07:52:20 -07001547 // There are two cases for the content of nativeCode:
1548 // 1) Pointer to the native function.
1549 // 2) Pointer to the trampoline for native code binding.
1550 // In the second case, we need to execute the binding and continue with the actual native function
1551 // pointer.
Andreas Gampe90546832014-03-12 18:07:19 -07001552 DCHECK(nativeCode != nullptr);
1553 if (nativeCode == GetJniDlsymLookupStub()) {
1554 nativeCode = artFindNativeMethod();
1555
1556 if (nativeCode == nullptr) {
1557 DCHECK(self->IsExceptionPending()); // There should be an exception pending now.
1558 return -1;
1559 }
1560 // Note that the native code pointer will be automatically set by artFindNativeMethod().
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001561 }
1562
Andreas Gampe90546832014-03-12 18:07:19 -07001563 // Store the native code pointer in the stack at the right location.
Andreas Gampec147b002014-03-06 18:11:06 -08001564 uintptr_t* code_pointer = reinterpret_cast<uintptr_t*>(visitor.GetCodeReturn());
Andreas Gampec147b002014-03-06 18:11:06 -08001565 *code_pointer = reinterpret_cast<uintptr_t>(nativeCode);
1566
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001567 // 5K reserved, window_size + frame pointer used.
Andreas Gampe90546832014-03-12 18:07:19 -07001568 size_t window_size = visitor.GetAllocaUsedSize();
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001569 return (5 * KB) - window_size - kPointerSize;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001570}
1571
1572/*
1573 * Is called after the native JNI code. Responsible for cleanup (SIRT, saved state) and
1574 * unlocking.
1575 */
1576extern "C" uint64_t artQuickGenericJniEndTrampoline(Thread* self, mirror::ArtMethod** sp,
1577 jvalue result, uint64_t result_f)
1578 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1579 uint32_t* sp32 = reinterpret_cast<uint32_t*>(sp);
1580 mirror::ArtMethod* called = *sp;
Ian Rogerse0dcd462014-03-08 15:21:04 -08001581 uint32_t cookie = *(sp32 - 1);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001582
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001583 MethodHelper mh(called);
1584 char return_shorty_char = mh.GetShorty()[0];
1585
1586 if (return_shorty_char == 'L') {
1587 // the only special ending call
1588 if (called->IsSynchronized()) {
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001589 StackIndirectReferenceTable* table =
1590 reinterpret_cast<StackIndirectReferenceTable*>(
1591 reinterpret_cast<uint8_t*>(sp) + kPointerSize);
1592 jobject tmp = reinterpret_cast<jobject>(table->GetStackReference(0));
Andreas Gampec147b002014-03-06 18:11:06 -08001593
1594 return reinterpret_cast<uint64_t>(JniMethodEndWithReferenceSynchronized(result.l, cookie, tmp,
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001595 self));
1596 } else {
1597 return reinterpret_cast<uint64_t>(JniMethodEndWithReference(result.l, cookie, self));
1598 }
1599 } else {
1600 if (called->IsSynchronized()) {
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001601 StackIndirectReferenceTable* table =
1602 reinterpret_cast<StackIndirectReferenceTable*>(
1603 reinterpret_cast<uint8_t*>(sp) + kPointerSize);
1604 jobject tmp = reinterpret_cast<jobject>(table->GetStackReference(0));
Andreas Gampec147b002014-03-06 18:11:06 -08001605
1606 JniMethodEndSynchronized(cookie, tmp, self);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001607 } else {
1608 JniMethodEnd(cookie, self);
1609 }
1610
1611 switch (return_shorty_char) {
1612 case 'F': // Fall-through.
1613 case 'D':
1614 return result_f;
1615 case 'Z':
1616 return result.z;
1617 case 'B':
1618 return result.b;
1619 case 'C':
1620 return result.c;
1621 case 'S':
1622 return result.s;
1623 case 'I':
1624 return result.i;
1625 case 'J':
1626 return result.j;
1627 case 'V':
1628 return 0;
1629 default:
1630 LOG(FATAL) << "Unexpected return shorty character " << return_shorty_char;
1631 return 0;
1632 }
1633 }
Andreas Gampe2da88232014-02-27 12:26:20 -08001634}
1635
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001636template<InvokeType type, bool access_check>
Nicolas Geoffray8689a0a2014-04-04 09:26:24 +01001637static uint64_t artInvokeCommon(uint32_t method_idx, mirror::Object* this_object,
1638 mirror::ArtMethod* caller_method,
1639 Thread* self, mirror::ArtMethod** sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001640
1641template<InvokeType type, bool access_check>
Nicolas Geoffray8689a0a2014-04-04 09:26:24 +01001642static uint64_t artInvokeCommon(uint32_t method_idx, mirror::Object* this_object,
1643 mirror::ArtMethod* caller_method,
1644 Thread* self, mirror::ArtMethod** sp) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001645 mirror::ArtMethod* method = FindMethodFast(method_idx, this_object, caller_method, access_check,
1646 type);
1647 if (UNLIKELY(method == nullptr)) {
1648 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
1649 const DexFile* dex_file = caller_method->GetDeclaringClass()->GetDexCache()->GetDexFile();
1650 uint32_t shorty_len;
1651 const char* shorty =
1652 dex_file->GetMethodShorty(dex_file->GetMethodId(method_idx), &shorty_len);
1653 {
1654 // Remember the args in case a GC happens in FindMethodFromCode.
1655 ScopedObjectAccessUnchecked soa(self->GetJniEnv());
1656 RememberForGcArgumentVisitor visitor(sp, type == kStatic, shorty, shorty_len, &soa);
1657 visitor.VisitArguments();
1658 method = FindMethodFromCode<type, access_check>(method_idx, this_object, caller_method, self);
1659 visitor.FixupReferences();
1660 }
1661
1662 if (UNLIKELY(method == NULL)) {
1663 CHECK(self->IsExceptionPending());
1664 return 0; // failure
1665 }
1666 }
1667 DCHECK(!self->IsExceptionPending());
1668 const void* code = method->GetEntryPointFromQuickCompiledCode();
1669
1670 // When we return, the caller will branch to this address, so it had better not be 0!
1671 DCHECK(code != nullptr) << "Code was NULL in method: " << PrettyMethod(method) << " location: "
1672 << MethodHelper(method).GetDexFile().GetLocation();
1673#ifdef __LP64__
1674 UNIMPLEMENTED(FATAL);
1675 return 0;
1676#else
1677 uint32_t method_uint = reinterpret_cast<uint32_t>(method);
1678 uint64_t code_uint = reinterpret_cast<uint32_t>(code);
1679 uint64_t result = ((code_uint << 32) | method_uint);
1680 return result;
1681#endif
1682}
1683
Nicolas Geoffray8689a0a2014-04-04 09:26:24 +01001684// Explicit artInvokeCommon template function declarations to please analysis tool.
1685#define EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(type, access_check) \
1686 template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) \
1687 static uint64_t artInvokeCommon<type, access_check>(uint32_t method_idx, \
1688 mirror::Object* this_object, \
1689 mirror::ArtMethod* caller_method, \
1690 Thread* self, mirror::ArtMethod** sp) \
1691
1692EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kVirtual, false);
1693EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kVirtual, true);
1694EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kInterface, false);
1695EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kInterface, true);
1696EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kDirect, false);
1697EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kDirect, true);
1698EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kStatic, false);
1699EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kStatic, true);
1700EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kSuper, false);
1701EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kSuper, true);
1702#undef EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL
1703
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001704
1705// See comments in runtime_support_asm.S
1706extern "C" uint64_t artInvokeInterfaceTrampolineWithAccessCheck(uint32_t method_idx,
1707 mirror::Object* this_object,
1708 mirror::ArtMethod* caller_method,
1709 Thread* self,
1710 mirror::ArtMethod** sp)
1711 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1712 return artInvokeCommon<kInterface, true>(method_idx, this_object, caller_method, self, sp);
1713}
1714
1715
1716extern "C" uint64_t artInvokeDirectTrampolineWithAccessCheck(uint32_t method_idx,
1717 mirror::Object* this_object,
1718 mirror::ArtMethod* caller_method,
1719 Thread* self,
1720 mirror::ArtMethod** sp)
1721 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1722 return artInvokeCommon<kDirect, true>(method_idx, this_object, caller_method, self, sp);
1723}
1724
1725extern "C" uint64_t artInvokeStaticTrampolineWithAccessCheck(uint32_t method_idx,
1726 mirror::Object* this_object,
1727 mirror::ArtMethod* caller_method,
1728 Thread* self,
1729 mirror::ArtMethod** sp)
1730 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1731 return artInvokeCommon<kStatic, true>(method_idx, this_object, caller_method, self, sp);
1732}
1733
1734extern "C" uint64_t artInvokeSuperTrampolineWithAccessCheck(uint32_t method_idx,
1735 mirror::Object* this_object,
1736 mirror::ArtMethod* caller_method,
1737 Thread* self,
1738 mirror::ArtMethod** sp)
1739 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1740 return artInvokeCommon<kSuper, true>(method_idx, this_object, caller_method, self, sp);
1741}
1742
1743extern "C" uint64_t artInvokeVirtualTrampolineWithAccessCheck(uint32_t method_idx,
1744 mirror::Object* this_object,
1745 mirror::ArtMethod* caller_method,
1746 Thread* self,
1747 mirror::ArtMethod** sp)
1748 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1749 return artInvokeCommon<kVirtual, true>(method_idx, this_object, caller_method, self, sp);
1750}
1751
1752// Determine target of interface dispatch. This object is known non-null.
1753extern "C" uint64_t artInvokeInterfaceTrampoline(mirror::ArtMethod* interface_method,
1754 mirror::Object* this_object,
1755 mirror::ArtMethod* caller_method,
1756 Thread* self, mirror::ArtMethod** sp)
1757 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1758 mirror::ArtMethod* method;
1759 if (LIKELY(interface_method->GetDexMethodIndex() != DexFile::kDexNoIndex)) {
1760 method = this_object->GetClass()->FindVirtualMethodForInterface(interface_method);
1761 if (UNLIKELY(method == NULL)) {
1762 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
1763 ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(interface_method, this_object,
1764 caller_method);
1765 return 0; // Failure.
1766 }
1767 } else {
1768 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
1769 DCHECK(interface_method == Runtime::Current()->GetResolutionMethod());
1770 // Determine method index from calling dex instruction.
1771#if defined(__arm__)
1772 // On entry the stack pointed by sp is:
1773 // | argN | |
1774 // | ... | |
1775 // | arg4 | |
1776 // | arg3 spill | | Caller's frame
1777 // | arg2 spill | |
1778 // | arg1 spill | |
1779 // | Method* | ---
1780 // | LR |
1781 // | ... | callee saves
1782 // | R3 | arg3
1783 // | R2 | arg2
1784 // | R1 | arg1
1785 // | R0 |
1786 // | Method* | <- sp
1787 DCHECK_EQ(48U, Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)->GetFrameSizeInBytes());
1788 uintptr_t* regs = reinterpret_cast<uintptr_t*>(reinterpret_cast<byte*>(sp) + kPointerSize);
1789 uintptr_t caller_pc = regs[10];
1790#elif defined(__i386__)
1791 // On entry the stack pointed by sp is:
1792 // | argN | |
1793 // | ... | |
1794 // | arg4 | |
1795 // | arg3 spill | | Caller's frame
1796 // | arg2 spill | |
1797 // | arg1 spill | |
1798 // | Method* | ---
1799 // | Return |
1800 // | EBP,ESI,EDI | callee saves
1801 // | EBX | arg3
1802 // | EDX | arg2
1803 // | ECX | arg1
1804 // | EAX/Method* | <- sp
1805 DCHECK_EQ(32U, Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)->GetFrameSizeInBytes());
1806 uintptr_t* regs = reinterpret_cast<uintptr_t*>(reinterpret_cast<byte*>(sp));
1807 uintptr_t caller_pc = regs[7];
1808#elif defined(__mips__)
1809 // On entry the stack pointed by sp is:
1810 // | argN | |
1811 // | ... | |
1812 // | arg4 | |
1813 // | arg3 spill | | Caller's frame
1814 // | arg2 spill | |
1815 // | arg1 spill | |
1816 // | Method* | ---
1817 // | RA |
1818 // | ... | callee saves
1819 // | A3 | arg3
1820 // | A2 | arg2
1821 // | A1 | arg1
1822 // | A0/Method* | <- sp
1823 DCHECK_EQ(64U, Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)->GetFrameSizeInBytes());
1824 uintptr_t* regs = reinterpret_cast<uintptr_t*>(reinterpret_cast<byte*>(sp));
1825 uintptr_t caller_pc = regs[15];
1826#else
1827 UNIMPLEMENTED(FATAL);
1828 uintptr_t caller_pc = 0;
1829#endif
1830 uint32_t dex_pc = caller_method->ToDexPc(caller_pc);
1831 const DexFile::CodeItem* code = MethodHelper(caller_method).GetCodeItem();
1832 CHECK_LT(dex_pc, code->insns_size_in_code_units_);
1833 const Instruction* instr = Instruction::At(&code->insns_[dex_pc]);
1834 Instruction::Code instr_code = instr->Opcode();
1835 CHECK(instr_code == Instruction::INVOKE_INTERFACE ||
1836 instr_code == Instruction::INVOKE_INTERFACE_RANGE)
1837 << "Unexpected call into interface trampoline: " << instr->DumpString(NULL);
1838 uint32_t dex_method_idx;
1839 if (instr_code == Instruction::INVOKE_INTERFACE) {
1840 dex_method_idx = instr->VRegB_35c();
1841 } else {
1842 DCHECK_EQ(instr_code, Instruction::INVOKE_INTERFACE_RANGE);
1843 dex_method_idx = instr->VRegB_3rc();
1844 }
1845
1846 const DexFile* dex_file = caller_method->GetDeclaringClass()->GetDexCache()->GetDexFile();
1847 uint32_t shorty_len;
1848 const char* shorty =
1849 dex_file->GetMethodShorty(dex_file->GetMethodId(dex_method_idx), &shorty_len);
1850 {
1851 // Remember the args in case a GC happens in FindMethodFromCode.
1852 ScopedObjectAccessUnchecked soa(self->GetJniEnv());
1853 RememberForGcArgumentVisitor visitor(sp, false, shorty, shorty_len, &soa);
1854 visitor.VisitArguments();
1855 method = FindMethodFromCode<kInterface, false>(dex_method_idx, this_object, caller_method,
1856 self);
1857 visitor.FixupReferences();
1858 }
1859
1860 if (UNLIKELY(method == nullptr)) {
1861 CHECK(self->IsExceptionPending());
1862 return 0; // Failure.
1863 }
1864 }
1865 const void* code = method->GetEntryPointFromQuickCompiledCode();
1866
1867 // When we return, the caller will branch to this address, so it had better not be 0!
1868 DCHECK(code != nullptr) << "Code was NULL in method: " << PrettyMethod(method) << " location: "
1869 << MethodHelper(method).GetDexFile().GetLocation();
1870#ifdef __LP64__
1871 UNIMPLEMENTED(FATAL);
1872 return 0;
1873#else
1874 uint32_t method_uint = reinterpret_cast<uint32_t>(method);
1875 uint64_t code_uint = reinterpret_cast<uint32_t>(code);
1876 uint64_t result = ((code_uint << 32) | method_uint);
1877 return result;
1878#endif
1879}
1880
Ian Rogers848871b2013-08-05 10:56:33 -07001881} // namespace art