blob: 7a144b6d72ef3ab9eeff3f5a6f0d14a1e681959b [file] [log] [blame]
Ian Rogers848871b2013-08-05 10:56:33 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "callee_save_frame.h"
Dragos Sbirleabd136a22013-08-13 18:07:04 -070018#include "common_throws.h"
Ian Rogers848871b2013-08-05 10:56:33 -070019#include "dex_file-inl.h"
20#include "dex_instruction-inl.h"
Dragos Sbirleabd136a22013-08-13 18:07:04 -070021#include "entrypoints/entrypoint_utils.h"
Ian Rogers83883d72013-10-21 21:07:24 -070022#include "gc/accounting/card_table-inl.h"
Andreas Gamped58342c2014-06-05 14:18:08 -070023#include "instruction_set.h"
Ian Rogers848871b2013-08-05 10:56:33 -070024#include "interpreter/interpreter.h"
Brian Carlstromea46f952013-07-30 01:26:50 -070025#include "mirror/art_method-inl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070026#include "mirror/class-inl.h"
Mathieu Chartier5f3ded42014-04-03 15:25:30 -070027#include "mirror/dex_cache-inl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070028#include "mirror/object-inl.h"
29#include "mirror/object_array-inl.h"
30#include "object_utils.h"
31#include "runtime.h"
Ian Rogers53b8b092014-03-13 23:45:53 -070032#include "scoped_thread_state_change.h"
Ian Rogers848871b2013-08-05 10:56:33 -070033
34namespace art {
35
36// Visits the arguments as saved to the stack by a Runtime::kRefAndArgs callee save frame.
37class QuickArgumentVisitor {
Ian Rogers936b37f2014-02-14 00:52:24 -080038 // Number of bytes for each out register in the caller method's frame.
39 static constexpr size_t kBytesStackArgLocation = 4;
Alexei Zavjalov41c507a2014-05-15 16:02:46 +070040 // Frame size in bytes of a callee-save frame for RefsAndArgs.
41 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_FrameSize =
42 GetCalleeSaveFrameSize(kRuntimeISA, Runtime::kRefsAndArgs);
Ian Rogers848871b2013-08-05 10:56:33 -070043#if defined(__arm__)
44 // The callee save frame is pointed to by SP.
45 // | argN | |
46 // | ... | |
47 // | arg4 | |
48 // | arg3 spill | | Caller's frame
49 // | arg2 spill | |
50 // | arg1 spill | |
51 // | Method* | ---
52 // | LR |
53 // | ... | callee saves
54 // | R3 | arg3
55 // | R2 | arg2
56 // | R1 | arg1
Ian Rogers936b37f2014-02-14 00:52:24 -080057 // | R0 | padding
Ian Rogers848871b2013-08-05 10:56:33 -070058 // | Method* | <- sp
Andreas Gampebf6b92a2014-03-05 16:11:04 -080059 static constexpr bool kQuickSoftFloatAbi = true; // This is a soft float ABI.
60 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs.
61 static constexpr size_t kNumQuickFprArgs = 0; // 0 arguments passed in FPRs.
Ian Rogers936b37f2014-02-14 00:52:24 -080062 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 0; // Offset of first FPR arg.
63 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 8; // Offset of first GPR arg.
64 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 44; // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -080065 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +000066 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -080067 }
Stuart Monteithb95a5342014-03-12 13:32:32 +000068#elif defined(__aarch64__)
69 // The callee save frame is pointed to by SP.
70 // | argN | |
71 // | ... | |
72 // | arg4 | |
73 // | arg3 spill | | Caller's frame
74 // | arg2 spill | |
75 // | arg1 spill | |
76 // | Method* | ---
77 // | LR |
78 // | X28 |
79 // | : |
80 // | X19 |
81 // | X7 |
82 // | : |
83 // | X1 |
84 // | D15 |
85 // | : |
86 // | D0 |
87 // | | padding
88 // | Method* | <- sp
89 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
90 static constexpr size_t kNumQuickGprArgs = 7; // 7 arguments passed in GPRs.
91 static constexpr size_t kNumQuickFprArgs = 8; // 8 arguments passed in FPRs.
Alexei Zavjalov41c507a2014-05-15 16:02:46 +070092 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 16; // Offset of first FPR arg.
Stuart Monteithb95a5342014-03-12 13:32:32 +000093 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 144; // Offset of first GPR arg.
94 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 296; // Offset of return address.
Stuart Monteithb95a5342014-03-12 13:32:32 +000095 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +000096 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Stuart Monteithb95a5342014-03-12 13:32:32 +000097 }
Ian Rogers848871b2013-08-05 10:56:33 -070098#elif defined(__mips__)
99 // The callee save frame is pointed to by SP.
100 // | argN | |
101 // | ... | |
102 // | arg4 | |
103 // | arg3 spill | | Caller's frame
104 // | arg2 spill | |
105 // | arg1 spill | |
106 // | Method* | ---
107 // | RA |
108 // | ... | callee saves
109 // | A3 | arg3
110 // | A2 | arg2
111 // | A1 | arg1
112 // | A0/Method* | <- sp
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800113 static constexpr bool kQuickSoftFloatAbi = true; // This is a soft float ABI.
114 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs.
115 static constexpr size_t kNumQuickFprArgs = 0; // 0 arguments passed in FPRs.
Ian Rogers936b37f2014-02-14 00:52:24 -0800116 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 0; // Offset of first FPR arg.
117 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 4; // Offset of first GPR arg.
118 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 60; // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -0800119 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000120 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -0800121 }
Ian Rogers848871b2013-08-05 10:56:33 -0700122#elif defined(__i386__)
123 // The callee save frame is pointed to by SP.
124 // | argN | |
125 // | ... | |
126 // | arg4 | |
127 // | arg3 spill | | Caller's frame
128 // | arg2 spill | |
129 // | arg1 spill | |
130 // | Method* | ---
131 // | Return |
132 // | EBP,ESI,EDI | callee saves
133 // | EBX | arg3
134 // | EDX | arg2
135 // | ECX | arg1
136 // | EAX/Method* | <- sp
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800137 static constexpr bool kQuickSoftFloatAbi = true; // This is a soft float ABI.
138 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs.
139 static constexpr size_t kNumQuickFprArgs = 0; // 0 arguments passed in FPRs.
Ian Rogers936b37f2014-02-14 00:52:24 -0800140 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 0; // Offset of first FPR arg.
141 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 4; // Offset of first GPR arg.
142 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 28; // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -0800143 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000144 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -0800145 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800146#elif defined(__x86_64__)
Ian Rogers936b37f2014-02-14 00:52:24 -0800147 // The callee save frame is pointed to by SP.
148 // | argN | |
149 // | ... | |
150 // | reg. arg spills | | Caller's frame
151 // | Method* | ---
152 // | Return |
153 // | R15 | callee save
154 // | R14 | callee save
155 // | R13 | callee save
156 // | R12 | callee save
157 // | R9 | arg5
158 // | R8 | arg4
159 // | RSI/R6 | arg1
160 // | RBP/R5 | callee save
161 // | RBX/R3 | callee save
162 // | RDX/R2 | arg2
163 // | RCX/R1 | arg3
164 // | XMM7 | float arg 8
165 // | XMM6 | float arg 7
166 // | XMM5 | float arg 6
167 // | XMM4 | float arg 5
168 // | XMM3 | float arg 4
169 // | XMM2 | float arg 3
170 // | XMM1 | float arg 2
171 // | XMM0 | float arg 1
172 // | Padding |
173 // | RDI/Method* | <- sp
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800174 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700175 static constexpr size_t kNumQuickGprArgs = 5; // 5 arguments passed in GPRs.
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700176 static constexpr size_t kNumQuickFprArgs = 8; // 8 arguments passed in FPRs.
Ian Rogers936b37f2014-02-14 00:52:24 -0800177 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 16; // Offset of first FPR arg.
178 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 80; // Offset of first GPR arg.
179 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 168; // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -0800180 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
181 switch (gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000182 case 0: return (4 * GetBytesPerGprSpillLocation(kRuntimeISA));
183 case 1: return (1 * GetBytesPerGprSpillLocation(kRuntimeISA));
184 case 2: return (0 * GetBytesPerGprSpillLocation(kRuntimeISA));
185 case 3: return (5 * GetBytesPerGprSpillLocation(kRuntimeISA));
186 case 4: return (6 * GetBytesPerGprSpillLocation(kRuntimeISA));
Ian Rogers936b37f2014-02-14 00:52:24 -0800187 default:
188 LOG(FATAL) << "Unexpected GPR index: " << gpr_index;
189 return 0;
190 }
191 }
Ian Rogers848871b2013-08-05 10:56:33 -0700192#else
193#error "Unsupported architecture"
Ian Rogers848871b2013-08-05 10:56:33 -0700194#endif
195
Ian Rogers936b37f2014-02-14 00:52:24 -0800196 public:
Andreas Gampecf4035a2014-05-28 22:43:01 -0700197 static mirror::ArtMethod* GetCallingMethod(StackReference<mirror::ArtMethod>* sp)
Ian Rogers936b37f2014-02-14 00:52:24 -0800198 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampecf4035a2014-05-28 22:43:01 -0700199 DCHECK(sp->AsMirrorPtr()->IsCalleeSaveMethod());
Ian Rogers936b37f2014-02-14 00:52:24 -0800200 byte* previous_sp = reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize;
Andreas Gampecf4035a2014-05-28 22:43:01 -0700201 return reinterpret_cast<StackReference<mirror::ArtMethod>*>(previous_sp)->AsMirrorPtr();
Ian Rogers848871b2013-08-05 10:56:33 -0700202 }
203
Ian Rogers936b37f2014-02-14 00:52:24 -0800204 // For the given quick ref and args quick frame, return the caller's PC.
Andreas Gampecf4035a2014-05-28 22:43:01 -0700205 static uintptr_t GetCallingPc(StackReference<mirror::ArtMethod>* sp)
Ian Rogers936b37f2014-02-14 00:52:24 -0800206 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampecf4035a2014-05-28 22:43:01 -0700207 DCHECK(sp->AsMirrorPtr()->IsCalleeSaveMethod());
Ian Rogers936b37f2014-02-14 00:52:24 -0800208 byte* lr = reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_LrOffset;
Ian Rogers848871b2013-08-05 10:56:33 -0700209 return *reinterpret_cast<uintptr_t*>(lr);
210 }
211
Andreas Gampecf4035a2014-05-28 22:43:01 -0700212 QuickArgumentVisitor(StackReference<mirror::ArtMethod>* sp, bool is_static,
Ian Rogers848871b2013-08-05 10:56:33 -0700213 const char* shorty, uint32_t shorty_len)
Ian Rogers936b37f2014-02-14 00:52:24 -0800214 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) :
215 is_static_(is_static), shorty_(shorty), shorty_len_(shorty_len),
216 gpr_args_(reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset),
217 fpr_args_(reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset),
218 stack_args_(reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize
219 + StackArgumentStartFromShorty(is_static, shorty, shorty_len)),
220 gpr_index_(0), fpr_index_(0), stack_index_(0), cur_type_(Primitive::kPrimVoid),
Alexei Zavjalov41c507a2014-05-15 16:02:46 +0700221 is_split_long_or_double_(false) { }
Ian Rogers848871b2013-08-05 10:56:33 -0700222
223 virtual ~QuickArgumentVisitor() {}
224
225 virtual void Visit() = 0;
226
Ian Rogers936b37f2014-02-14 00:52:24 -0800227 Primitive::Type GetParamPrimitiveType() const {
228 return cur_type_;
Ian Rogers848871b2013-08-05 10:56:33 -0700229 }
230
231 byte* GetParamAddress() const {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800232 if (!kQuickSoftFloatAbi) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800233 Primitive::Type type = GetParamPrimitiveType();
234 if (UNLIKELY((type == Primitive::kPrimDouble) || (type == Primitive::kPrimFloat))) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800235 if ((kNumQuickFprArgs != 0) && (fpr_index_ + 1 < kNumQuickFprArgs + 1)) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000236 return fpr_args_ + (fpr_index_ * GetBytesPerFprSpillLocation(kRuntimeISA));
Ian Rogers936b37f2014-02-14 00:52:24 -0800237 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700238 return stack_args_ + (stack_index_ * kBytesStackArgLocation);
Ian Rogers936b37f2014-02-14 00:52:24 -0800239 }
240 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800241 if (gpr_index_ < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800242 return gpr_args_ + GprIndexToGprOffset(gpr_index_);
243 }
244 return stack_args_ + (stack_index_ * kBytesStackArgLocation);
Ian Rogers848871b2013-08-05 10:56:33 -0700245 }
246
247 bool IsSplitLongOrDouble() const {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000248 if ((GetBytesPerGprSpillLocation(kRuntimeISA) == 4) || (GetBytesPerFprSpillLocation(kRuntimeISA) == 4)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800249 return is_split_long_or_double_;
250 } else {
251 return false; // An optimization for when GPR and FPRs are 64bit.
252 }
Ian Rogers848871b2013-08-05 10:56:33 -0700253 }
254
Ian Rogers936b37f2014-02-14 00:52:24 -0800255 bool IsParamAReference() const {
Ian Rogers848871b2013-08-05 10:56:33 -0700256 return GetParamPrimitiveType() == Primitive::kPrimNot;
257 }
258
Ian Rogers936b37f2014-02-14 00:52:24 -0800259 bool IsParamALongOrDouble() const {
Ian Rogers848871b2013-08-05 10:56:33 -0700260 Primitive::Type type = GetParamPrimitiveType();
261 return type == Primitive::kPrimLong || type == Primitive::kPrimDouble;
262 }
263
264 uint64_t ReadSplitLongParam() const {
265 DCHECK(IsSplitLongOrDouble());
266 uint64_t low_half = *reinterpret_cast<uint32_t*>(GetParamAddress());
267 uint64_t high_half = *reinterpret_cast<uint32_t*>(stack_args_);
268 return (low_half & 0xffffffffULL) | (high_half << 32);
269 }
270
271 void VisitArguments() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700272 // This implementation doesn't support reg-spill area for hard float
273 // ABI targets such as x86_64 and aarch64. So, for those targets whose
274 // 'kQuickSoftFloatAbi' is 'false':
275 // (a) 'stack_args_' should point to the first method's argument
276 // (b) whatever the argument type it is, the 'stack_index_' should
277 // be moved forward along with every visiting.
Ian Rogers936b37f2014-02-14 00:52:24 -0800278 gpr_index_ = 0;
279 fpr_index_ = 0;
280 stack_index_ = 0;
281 if (!is_static_) { // Handle this.
282 cur_type_ = Primitive::kPrimNot;
283 is_split_long_or_double_ = false;
Ian Rogers848871b2013-08-05 10:56:33 -0700284 Visit();
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700285 if (!kQuickSoftFloatAbi || kNumQuickGprArgs == 0) {
286 stack_index_++;
287 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800288 if (kNumQuickGprArgs > 0) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800289 gpr_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800290 }
Ian Rogers848871b2013-08-05 10:56:33 -0700291 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800292 for (uint32_t shorty_index = 1; shorty_index < shorty_len_; ++shorty_index) {
293 cur_type_ = Primitive::GetType(shorty_[shorty_index]);
294 switch (cur_type_) {
295 case Primitive::kPrimNot:
296 case Primitive::kPrimBoolean:
297 case Primitive::kPrimByte:
298 case Primitive::kPrimChar:
299 case Primitive::kPrimShort:
300 case Primitive::kPrimInt:
301 is_split_long_or_double_ = false;
302 Visit();
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700303 if (!kQuickSoftFloatAbi || kNumQuickGprArgs == gpr_index_) {
304 stack_index_++;
305 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800306 if (gpr_index_ < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800307 gpr_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800308 }
309 break;
310 case Primitive::kPrimFloat:
311 is_split_long_or_double_ = false;
312 Visit();
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800313 if (kQuickSoftFloatAbi) {
314 if (gpr_index_ < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800315 gpr_index_++;
316 } else {
317 stack_index_++;
318 }
319 } else {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800320 if ((kNumQuickFprArgs != 0) && (fpr_index_ + 1 < kNumQuickFprArgs + 1)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800321 fpr_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800322 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700323 stack_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800324 }
325 break;
326 case Primitive::kPrimDouble:
327 case Primitive::kPrimLong:
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800328 if (kQuickSoftFloatAbi || (cur_type_ == Primitive::kPrimLong)) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000329 is_split_long_or_double_ = (GetBytesPerGprSpillLocation(kRuntimeISA) == 4) &&
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800330 ((gpr_index_ + 1) == kNumQuickGprArgs);
Ian Rogers936b37f2014-02-14 00:52:24 -0800331 Visit();
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700332 if (!kQuickSoftFloatAbi || kNumQuickGprArgs == gpr_index_) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800333 if (kBytesStackArgLocation == 4) {
334 stack_index_+= 2;
335 } else {
336 CHECK_EQ(kBytesStackArgLocation, 8U);
337 stack_index_++;
338 }
339 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700340 if (gpr_index_ < kNumQuickGprArgs) {
341 gpr_index_++;
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000342 if (GetBytesPerGprSpillLocation(kRuntimeISA) == 4) {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700343 if (gpr_index_ < kNumQuickGprArgs) {
344 gpr_index_++;
345 } else if (kQuickSoftFloatAbi) {
346 stack_index_++;
347 }
348 }
349 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800350 } else {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000351 is_split_long_or_double_ = (GetBytesPerFprSpillLocation(kRuntimeISA) == 4) &&
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800352 ((fpr_index_ + 1) == kNumQuickFprArgs);
Ian Rogers936b37f2014-02-14 00:52:24 -0800353 Visit();
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800354 if ((kNumQuickFprArgs != 0) && (fpr_index_ + 1 < kNumQuickFprArgs + 1)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800355 fpr_index_++;
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000356 if (GetBytesPerFprSpillLocation(kRuntimeISA) == 4) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800357 if ((kNumQuickFprArgs != 0) && (fpr_index_ + 1 < kNumQuickFprArgs + 1)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800358 fpr_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800359 }
360 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700361 }
362 if (kBytesStackArgLocation == 4) {
363 stack_index_+= 2;
Ian Rogers936b37f2014-02-14 00:52:24 -0800364 } else {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700365 CHECK_EQ(kBytesStackArgLocation, 8U);
366 stack_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800367 }
368 }
369 break;
370 default:
371 LOG(FATAL) << "Unexpected type: " << cur_type_ << " in " << shorty_;
372 }
Ian Rogers848871b2013-08-05 10:56:33 -0700373 }
374 }
375
376 private:
Ian Rogers936b37f2014-02-14 00:52:24 -0800377 static size_t StackArgumentStartFromShorty(bool is_static, const char* shorty,
378 uint32_t shorty_len) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800379 if (kQuickSoftFloatAbi) {
380 CHECK_EQ(kNumQuickFprArgs, 0U);
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000381 return (kNumQuickGprArgs * GetBytesPerGprSpillLocation(kRuntimeISA))
Andreas Gampecf4035a2014-05-28 22:43:01 -0700382 + sizeof(StackReference<mirror::ArtMethod>) /* StackReference<ArtMethod> */;
Ian Rogers936b37f2014-02-14 00:52:24 -0800383 } else {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700384 // For now, there is no reg-spill area for the targets with
385 // hard float ABI. So, the offset pointing to the first method's
386 // parameter ('this' for non-static methods) should be returned.
Andreas Gampecf4035a2014-05-28 22:43:01 -0700387 return sizeof(StackReference<mirror::ArtMethod>); // Skip StackReference<ArtMethod>.
Ian Rogers848871b2013-08-05 10:56:33 -0700388 }
Ian Rogers848871b2013-08-05 10:56:33 -0700389 }
390
391 const bool is_static_;
392 const char* const shorty_;
393 const uint32_t shorty_len_;
Ian Rogers936b37f2014-02-14 00:52:24 -0800394 byte* const gpr_args_; // Address of GPR arguments in callee save frame.
395 byte* const fpr_args_; // Address of FPR arguments in callee save frame.
396 byte* const stack_args_; // Address of stack arguments in caller's frame.
397 uint32_t gpr_index_; // Index into spilled GPRs.
398 uint32_t fpr_index_; // Index into spilled FPRs.
399 uint32_t stack_index_; // Index into arguments on the stack.
400 // The current type of argument during VisitArguments.
401 Primitive::Type cur_type_;
Ian Rogers848871b2013-08-05 10:56:33 -0700402 // Does a 64bit parameter straddle the register and stack arguments?
403 bool is_split_long_or_double_;
404};
405
406// Visits arguments on the stack placing them into the shadow frame.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800407class BuildQuickShadowFrameVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700408 public:
Andreas Gampecf4035a2014-05-28 22:43:01 -0700409 BuildQuickShadowFrameVisitor(StackReference<mirror::ArtMethod>* sp, bool is_static,
410 const char* shorty, uint32_t shorty_len, ShadowFrame* sf,
411 size_t first_arg_reg) :
Ian Rogers848871b2013-08-05 10:56:33 -0700412 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), sf_(sf), cur_reg_(first_arg_reg) {}
413
Ian Rogers9758f792014-03-13 09:02:55 -0700414 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
Ian Rogers848871b2013-08-05 10:56:33 -0700415
416 private:
Ian Rogers936b37f2014-02-14 00:52:24 -0800417 ShadowFrame* const sf_;
418 uint32_t cur_reg_;
Ian Rogers848871b2013-08-05 10:56:33 -0700419
Dragos Sbirleabd136a22013-08-13 18:07:04 -0700420 DISALLOW_COPY_AND_ASSIGN(BuildQuickShadowFrameVisitor);
Ian Rogers848871b2013-08-05 10:56:33 -0700421};
422
Ian Rogers9758f792014-03-13 09:02:55 -0700423void BuildQuickShadowFrameVisitor::Visit() {
424 Primitive::Type type = GetParamPrimitiveType();
425 switch (type) {
426 case Primitive::kPrimLong: // Fall-through.
427 case Primitive::kPrimDouble:
428 if (IsSplitLongOrDouble()) {
429 sf_->SetVRegLong(cur_reg_, ReadSplitLongParam());
430 } else {
431 sf_->SetVRegLong(cur_reg_, *reinterpret_cast<jlong*>(GetParamAddress()));
432 }
433 ++cur_reg_;
434 break;
435 case Primitive::kPrimNot: {
436 StackReference<mirror::Object>* stack_ref =
437 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
438 sf_->SetVRegReference(cur_reg_, stack_ref->AsMirrorPtr());
439 }
440 break;
441 case Primitive::kPrimBoolean: // Fall-through.
442 case Primitive::kPrimByte: // Fall-through.
443 case Primitive::kPrimChar: // Fall-through.
444 case Primitive::kPrimShort: // Fall-through.
445 case Primitive::kPrimInt: // Fall-through.
446 case Primitive::kPrimFloat:
447 sf_->SetVReg(cur_reg_, *reinterpret_cast<jint*>(GetParamAddress()));
448 break;
449 case Primitive::kPrimVoid:
450 LOG(FATAL) << "UNREACHABLE";
451 break;
452 }
453 ++cur_reg_;
454}
455
Brian Carlstromea46f952013-07-30 01:26:50 -0700456extern "C" uint64_t artQuickToInterpreterBridge(mirror::ArtMethod* method, Thread* self,
Andreas Gampecf4035a2014-05-28 22:43:01 -0700457 StackReference<mirror::ArtMethod>* sp)
Ian Rogers848871b2013-08-05 10:56:33 -0700458 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
459 // Ensure we don't get thread suspension until the object arguments are safely in the shadow
460 // frame.
461 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
462
463 if (method->IsAbstract()) {
464 ThrowAbstractMethodError(method);
465 return 0;
466 } else {
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800467 DCHECK(!method->IsNative()) << PrettyMethod(method);
Ian Rogers848871b2013-08-05 10:56:33 -0700468 const char* old_cause = self->StartAssertNoThreadSuspension("Building interpreter shadow frame");
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700469 const DexFile::CodeItem* code_item = method->GetCodeItem();
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800470 DCHECK(code_item != nullptr) << PrettyMethod(method);
Ian Rogers848871b2013-08-05 10:56:33 -0700471 uint16_t num_regs = code_item->registers_size_;
472 void* memory = alloca(ShadowFrame::ComputeSize(num_regs));
473 ShadowFrame* shadow_frame(ShadowFrame::Create(num_regs, NULL, // No last shadow coming from quick.
474 method, 0, memory));
475 size_t first_arg_reg = code_item->registers_size_ - code_item->ins_size_;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700476 uint32_t shorty_len = 0;
477 const char* shorty = method->GetShorty(&shorty_len);
478 BuildQuickShadowFrameVisitor shadow_frame_builder(sp, method->IsStatic(), shorty, shorty_len,
Ian Rogers936b37f2014-02-14 00:52:24 -0800479 shadow_frame, first_arg_reg);
Ian Rogers848871b2013-08-05 10:56:33 -0700480 shadow_frame_builder.VisitArguments();
481 // Push a transition back into managed code onto the linked list in thread.
482 ManagedStack fragment;
483 self->PushManagedStackFragment(&fragment);
484 self->PushShadowFrame(shadow_frame);
485 self->EndAssertNoThreadSuspension(old_cause);
486
Ian Rogers6c5cb212014-06-18 16:07:20 -0700487 if (method->IsStatic() && !method->GetDeclaringClass()->IsInitialized()) {
Ian Rogers848871b2013-08-05 10:56:33 -0700488 // Ensure static method's class is initialized.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700489 StackHandleScope<1> hs(self);
490 Handle<mirror::Class> h_class(hs.NewHandle(method->GetDeclaringClass()));
491 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(h_class, true, true)) {
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800492 DCHECK(Thread::Current()->IsExceptionPending()) << PrettyMethod(method);
Ian Rogers848871b2013-08-05 10:56:33 -0700493 self->PopManagedStackFragment(fragment);
494 return 0;
495 }
496 }
497
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700498 StackHandleScope<1> hs(self);
499 MethodHelper mh(hs.NewHandle(method));
Ian Rogers848871b2013-08-05 10:56:33 -0700500 JValue result = interpreter::EnterInterpreterFromStub(self, mh, code_item, *shadow_frame);
501 // Pop transition.
502 self->PopManagedStackFragment(fragment);
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800503 // No need to restore the args since the method has already been run by the interpreter.
Ian Rogers848871b2013-08-05 10:56:33 -0700504 return result.GetJ();
505 }
506}
507
508// Visits arguments on the stack placing them into the args vector, Object* arguments are converted
509// to jobjects.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800510class BuildQuickArgumentVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700511 public:
Andreas Gampecf4035a2014-05-28 22:43:01 -0700512 BuildQuickArgumentVisitor(StackReference<mirror::ArtMethod>* sp, bool is_static,
513 const char* shorty, uint32_t shorty_len,
514 ScopedObjectAccessUnchecked* soa, std::vector<jvalue>* args) :
Ian Rogers848871b2013-08-05 10:56:33 -0700515 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa), args_(args) {}
516
Ian Rogers9758f792014-03-13 09:02:55 -0700517 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
Ian Rogers848871b2013-08-05 10:56:33 -0700518
Ian Rogers9758f792014-03-13 09:02:55 -0700519 void FixupReferences() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800520
Ian Rogers848871b2013-08-05 10:56:33 -0700521 private:
Ian Rogers9758f792014-03-13 09:02:55 -0700522 ScopedObjectAccessUnchecked* const soa_;
523 std::vector<jvalue>* const args_;
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800524 // References which we must update when exiting in case the GC moved the objects.
Ian Rogers700a4022014-05-19 16:49:03 -0700525 std::vector<std::pair<jobject, StackReference<mirror::Object>*>> references_;
Ian Rogers9758f792014-03-13 09:02:55 -0700526
Ian Rogers848871b2013-08-05 10:56:33 -0700527 DISALLOW_COPY_AND_ASSIGN(BuildQuickArgumentVisitor);
528};
529
Ian Rogers9758f792014-03-13 09:02:55 -0700530void BuildQuickArgumentVisitor::Visit() {
531 jvalue val;
532 Primitive::Type type = GetParamPrimitiveType();
533 switch (type) {
534 case Primitive::kPrimNot: {
535 StackReference<mirror::Object>* stack_ref =
536 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
537 val.l = soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr());
538 references_.push_back(std::make_pair(val.l, stack_ref));
539 break;
540 }
541 case Primitive::kPrimLong: // Fall-through.
542 case Primitive::kPrimDouble:
543 if (IsSplitLongOrDouble()) {
544 val.j = ReadSplitLongParam();
545 } else {
546 val.j = *reinterpret_cast<jlong*>(GetParamAddress());
547 }
548 break;
549 case Primitive::kPrimBoolean: // Fall-through.
550 case Primitive::kPrimByte: // Fall-through.
551 case Primitive::kPrimChar: // Fall-through.
552 case Primitive::kPrimShort: // Fall-through.
553 case Primitive::kPrimInt: // Fall-through.
554 case Primitive::kPrimFloat:
555 val.i = *reinterpret_cast<jint*>(GetParamAddress());
556 break;
557 case Primitive::kPrimVoid:
558 LOG(FATAL) << "UNREACHABLE";
559 val.j = 0;
560 break;
561 }
562 args_->push_back(val);
563}
564
565void BuildQuickArgumentVisitor::FixupReferences() {
566 // Fixup any references which may have changed.
567 for (const auto& pair : references_) {
568 pair.second->Assign(soa_->Decode<mirror::Object*>(pair.first));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -0700569 soa_->Env()->DeleteLocalRef(pair.first);
Ian Rogers9758f792014-03-13 09:02:55 -0700570 }
571}
572
Ian Rogers848871b2013-08-05 10:56:33 -0700573// Handler for invocation on proxy methods. On entry a frame will exist for the proxy object method
574// which is responsible for recording callee save registers. We explicitly place into jobjects the
575// incoming reference arguments (so they survive GC). We invoke the invocation handler, which is a
576// field within the proxy object, which will box the primitive arguments and deal with error cases.
Brian Carlstromea46f952013-07-30 01:26:50 -0700577extern "C" uint64_t artQuickProxyInvokeHandler(mirror::ArtMethod* proxy_method,
Ian Rogers848871b2013-08-05 10:56:33 -0700578 mirror::Object* receiver,
Andreas Gampecf4035a2014-05-28 22:43:01 -0700579 Thread* self, StackReference<mirror::ArtMethod>* sp)
Ian Rogers848871b2013-08-05 10:56:33 -0700580 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Brian Carlstromd3633d52013-08-20 21:06:26 -0700581 DCHECK(proxy_method->IsProxyMethod()) << PrettyMethod(proxy_method);
582 DCHECK(receiver->GetClass()->IsProxyClass()) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700583 // Ensure we don't get thread suspension until the object arguments are safely in jobjects.
584 const char* old_cause =
585 self->StartAssertNoThreadSuspension("Adding to IRT proxy object arguments");
586 // Register the top of the managed stack, making stack crawlable.
Andreas Gampecf4035a2014-05-28 22:43:01 -0700587 DCHECK_EQ(sp->AsMirrorPtr(), proxy_method) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700588 self->SetTopOfStack(sp, 0);
589 DCHECK_EQ(proxy_method->GetFrameSizeInBytes(),
Brian Carlstromd3633d52013-08-20 21:06:26 -0700590 Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)->GetFrameSizeInBytes())
591 << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700592 self->VerifyStack();
593 // Start new JNI local reference state.
594 JNIEnvExt* env = self->GetJniEnv();
595 ScopedObjectAccessUnchecked soa(env);
596 ScopedJniEnvLocalRefState env_state(env);
597 // Create local ref. copies of proxy method and the receiver.
598 jobject rcvr_jobj = soa.AddLocalReference<jobject>(receiver);
599
600 // Placing arguments into args vector and remove the receiver.
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700601 mirror::ArtMethod* non_proxy_method = proxy_method->GetInterfaceMethodIfProxy();
602 CHECK(!non_proxy_method->IsStatic()) << PrettyMethod(proxy_method) << " "
603 << PrettyMethod(non_proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700604 std::vector<jvalue> args;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700605 uint32_t shorty_len = 0;
606 const char* shorty = proxy_method->GetShorty(&shorty_len);
607 BuildQuickArgumentVisitor local_ref_visitor(sp, false, shorty, shorty_len, &soa, &args);
Brian Carlstromd3633d52013-08-20 21:06:26 -0700608
Ian Rogers848871b2013-08-05 10:56:33 -0700609 local_ref_visitor.VisitArguments();
Brian Carlstromd3633d52013-08-20 21:06:26 -0700610 DCHECK_GT(args.size(), 0U) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700611 args.erase(args.begin());
612
613 // Convert proxy method into expected interface method.
Brian Carlstromea46f952013-07-30 01:26:50 -0700614 mirror::ArtMethod* interface_method = proxy_method->FindOverriddenMethod();
Brian Carlstromd3633d52013-08-20 21:06:26 -0700615 DCHECK(interface_method != NULL) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700616 DCHECK(!interface_method->IsProxyMethod()) << PrettyMethod(interface_method);
617 jobject interface_method_jobj = soa.AddLocalReference<jobject>(interface_method);
618
619 // All naked Object*s should now be in jobjects, so its safe to go into the main invoke code
620 // that performs allocations.
621 self->EndAssertNoThreadSuspension(old_cause);
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700622 JValue result = InvokeProxyInvocationHandler(soa, shorty, rcvr_jobj, interface_method_jobj, args);
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800623 // Restore references which might have moved.
624 local_ref_visitor.FixupReferences();
Ian Rogers848871b2013-08-05 10:56:33 -0700625 return result.GetJ();
626}
627
628// Read object references held in arguments from quick frames and place in a JNI local references,
629// so they don't get garbage collected.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800630class RememberForGcArgumentVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700631 public:
Andreas Gampecf4035a2014-05-28 22:43:01 -0700632 RememberForGcArgumentVisitor(StackReference<mirror::ArtMethod>* sp, bool is_static,
633 const char* shorty, uint32_t shorty_len,
634 ScopedObjectAccessUnchecked* soa) :
Ian Rogers848871b2013-08-05 10:56:33 -0700635 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa) {}
636
Ian Rogers9758f792014-03-13 09:02:55 -0700637 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
Mathieu Chartier07d447b2013-09-26 11:57:43 -0700638
Ian Rogers9758f792014-03-13 09:02:55 -0700639 void FixupReferences() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Ian Rogers848871b2013-08-05 10:56:33 -0700640
641 private:
Ian Rogers9758f792014-03-13 09:02:55 -0700642 ScopedObjectAccessUnchecked* const soa_;
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800643 // References which we must update when exiting in case the GC moved the objects.
Ian Rogers700a4022014-05-19 16:49:03 -0700644 std::vector<std::pair<jobject, StackReference<mirror::Object>*>> references_;
Mathieu Chartier590fee92013-09-13 13:46:47 -0700645 DISALLOW_COPY_AND_ASSIGN(RememberForGcArgumentVisitor);
Ian Rogers848871b2013-08-05 10:56:33 -0700646};
647
Ian Rogers9758f792014-03-13 09:02:55 -0700648void RememberForGcArgumentVisitor::Visit() {
649 if (IsParamAReference()) {
650 StackReference<mirror::Object>* stack_ref =
651 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
652 jobject reference =
653 soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr());
654 references_.push_back(std::make_pair(reference, stack_ref));
655 }
656}
657
658void RememberForGcArgumentVisitor::FixupReferences() {
659 // Fixup any references which may have changed.
660 for (const auto& pair : references_) {
661 pair.second->Assign(soa_->Decode<mirror::Object*>(pair.first));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -0700662 soa_->Env()->DeleteLocalRef(pair.first);
Ian Rogers9758f792014-03-13 09:02:55 -0700663 }
664}
665
666
Ian Rogers848871b2013-08-05 10:56:33 -0700667// Lazily resolve a method for quick. Called by stub code.
Brian Carlstromea46f952013-07-30 01:26:50 -0700668extern "C" const void* artQuickResolutionTrampoline(mirror::ArtMethod* called,
Ian Rogers848871b2013-08-05 10:56:33 -0700669 mirror::Object* receiver,
Andreas Gampecf4035a2014-05-28 22:43:01 -0700670 Thread* self,
671 StackReference<mirror::ArtMethod>* sp)
Ian Rogers848871b2013-08-05 10:56:33 -0700672 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800673 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
Ian Rogers848871b2013-08-05 10:56:33 -0700674 // Start new JNI local reference state
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800675 JNIEnvExt* env = self->GetJniEnv();
Ian Rogers848871b2013-08-05 10:56:33 -0700676 ScopedObjectAccessUnchecked soa(env);
677 ScopedJniEnvLocalRefState env_state(env);
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800678 const char* old_cause = self->StartAssertNoThreadSuspension("Quick method resolution set up");
Ian Rogers848871b2013-08-05 10:56:33 -0700679
680 // Compute details about the called method (avoid GCs)
681 ClassLinker* linker = Runtime::Current()->GetClassLinker();
Brian Carlstromea46f952013-07-30 01:26:50 -0700682 mirror::ArtMethod* caller = QuickArgumentVisitor::GetCallingMethod(sp);
Ian Rogers848871b2013-08-05 10:56:33 -0700683 InvokeType invoke_type;
684 const DexFile* dex_file;
685 uint32_t dex_method_idx;
686 if (called->IsRuntimeMethod()) {
687 uint32_t dex_pc = caller->ToDexPc(QuickArgumentVisitor::GetCallingPc(sp));
688 const DexFile::CodeItem* code;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700689 dex_file = caller->GetDexFile();
690 code = caller->GetCodeItem();
Ian Rogers848871b2013-08-05 10:56:33 -0700691 CHECK_LT(dex_pc, code->insns_size_in_code_units_);
692 const Instruction* instr = Instruction::At(&code->insns_[dex_pc]);
693 Instruction::Code instr_code = instr->Opcode();
694 bool is_range;
695 switch (instr_code) {
696 case Instruction::INVOKE_DIRECT:
697 invoke_type = kDirect;
698 is_range = false;
699 break;
700 case Instruction::INVOKE_DIRECT_RANGE:
701 invoke_type = kDirect;
702 is_range = true;
703 break;
704 case Instruction::INVOKE_STATIC:
705 invoke_type = kStatic;
706 is_range = false;
707 break;
708 case Instruction::INVOKE_STATIC_RANGE:
709 invoke_type = kStatic;
710 is_range = true;
711 break;
712 case Instruction::INVOKE_SUPER:
713 invoke_type = kSuper;
714 is_range = false;
715 break;
716 case Instruction::INVOKE_SUPER_RANGE:
717 invoke_type = kSuper;
718 is_range = true;
719 break;
720 case Instruction::INVOKE_VIRTUAL:
721 invoke_type = kVirtual;
722 is_range = false;
723 break;
724 case Instruction::INVOKE_VIRTUAL_RANGE:
725 invoke_type = kVirtual;
726 is_range = true;
727 break;
728 case Instruction::INVOKE_INTERFACE:
729 invoke_type = kInterface;
730 is_range = false;
731 break;
732 case Instruction::INVOKE_INTERFACE_RANGE:
733 invoke_type = kInterface;
734 is_range = true;
735 break;
736 default:
737 LOG(FATAL) << "Unexpected call into trampoline: " << instr->DumpString(NULL);
738 // Avoid used uninitialized warnings.
739 invoke_type = kDirect;
740 is_range = false;
741 }
742 dex_method_idx = (is_range) ? instr->VRegB_3rc() : instr->VRegB_35c();
743
744 } else {
745 invoke_type = kStatic;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700746 dex_file = called->GetDexFile();
Ian Rogers848871b2013-08-05 10:56:33 -0700747 dex_method_idx = called->GetDexMethodIndex();
748 }
749 uint32_t shorty_len;
750 const char* shorty =
751 dex_file->GetMethodShorty(dex_file->GetMethodId(dex_method_idx), &shorty_len);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700752 RememberForGcArgumentVisitor visitor(sp, invoke_type == kStatic, shorty, shorty_len, &soa);
Ian Rogers848871b2013-08-05 10:56:33 -0700753 visitor.VisitArguments();
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800754 self->EndAssertNoThreadSuspension(old_cause);
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800755 bool virtual_or_interface = invoke_type == kVirtual || invoke_type == kInterface;
Ian Rogers848871b2013-08-05 10:56:33 -0700756 // Resolve method filling in dex cache.
Mathieu Chartier0cd81352014-05-22 16:48:55 -0700757 if (UNLIKELY(called->IsRuntimeMethod())) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700758 StackHandleScope<1> hs(self);
Mathieu Chartier0cd81352014-05-22 16:48:55 -0700759 mirror::Object* dummy = nullptr;
760 HandleWrapper<mirror::Object> h_receiver(
761 hs.NewHandleWrapper(virtual_or_interface ? &receiver : &dummy));
762 called = linker->ResolveMethod(self, dex_method_idx, &caller, invoke_type);
Ian Rogers848871b2013-08-05 10:56:33 -0700763 }
764 const void* code = NULL;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800765 if (LIKELY(!self->IsExceptionPending())) {
Ian Rogers848871b2013-08-05 10:56:33 -0700766 // Incompatible class change should have been handled in resolve method.
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800767 CHECK(!called->CheckIncompatibleClassChange(invoke_type))
768 << PrettyMethod(called) << " " << invoke_type;
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800769 if (virtual_or_interface) {
770 // Refine called method based on receiver.
771 CHECK(receiver != nullptr) << invoke_type;
Mingyao Yangf4867782014-05-05 11:55:02 -0700772
773 mirror::ArtMethod* orig_called = called;
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800774 if (invoke_type == kVirtual) {
775 called = receiver->GetClass()->FindVirtualMethodForVirtual(called);
776 } else {
777 called = receiver->GetClass()->FindVirtualMethodForInterface(called);
778 }
Mingyao Yangf4867782014-05-05 11:55:02 -0700779
780 CHECK(called != nullptr) << PrettyMethod(orig_called) << " "
781 << PrettyTypeOf(receiver) << " "
782 << invoke_type << " " << orig_called->GetVtableIndex();
783
Ian Rogers83883d72013-10-21 21:07:24 -0700784 // We came here because of sharpening. Ensure the dex cache is up-to-date on the method index
785 // of the sharpened method.
786 if (called->GetDexCacheResolvedMethods() == caller->GetDexCacheResolvedMethods()) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100787 caller->GetDexCacheResolvedMethods()->Set<false>(called->GetDexMethodIndex(), called);
Ian Rogers83883d72013-10-21 21:07:24 -0700788 } else {
789 // Calling from one dex file to another, need to compute the method index appropriate to
Vladimir Markobbcc0c02014-02-03 14:08:42 +0000790 // the caller's dex file. Since we get here only if the original called was a runtime
791 // method, we've got the correct dex_file and a dex_method_idx from above.
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700792 DCHECK_EQ(caller->GetDexFile(), dex_file);
793 StackHandleScope<1> hs(self);
794 MethodHelper mh(hs.NewHandle(called));
795 uint32_t method_index = mh.FindDexMethodIndexInOtherDexFile(*dex_file, dex_method_idx);
Ian Rogers83883d72013-10-21 21:07:24 -0700796 if (method_index != DexFile::kDexNoIndex) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100797 caller->GetDexCacheResolvedMethods()->Set<false>(method_index, called);
Ian Rogers83883d72013-10-21 21:07:24 -0700798 }
799 }
800 }
Ian Rogers848871b2013-08-05 10:56:33 -0700801 // Ensure that the called method's class is initialized.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700802 StackHandleScope<1> hs(soa.Self());
803 Handle<mirror::Class> called_class(hs.NewHandle(called->GetDeclaringClass()));
Ian Rogers848871b2013-08-05 10:56:33 -0700804 linker->EnsureInitialized(called_class, true, true);
805 if (LIKELY(called_class->IsInitialized())) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800806 code = called->GetEntryPointFromQuickCompiledCode();
Ian Rogers848871b2013-08-05 10:56:33 -0700807 } else if (called_class->IsInitializing()) {
808 if (invoke_type == kStatic) {
809 // Class is still initializing, go to oat and grab code (trampoline must be left in place
810 // until class is initialized to stop races between threads).
Ian Rogersef7d42f2014-01-06 12:55:46 -0800811 code = linker->GetQuickOatCodeFor(called);
Ian Rogers848871b2013-08-05 10:56:33 -0700812 } else {
813 // No trampoline for non-static methods.
Ian Rogersef7d42f2014-01-06 12:55:46 -0800814 code = called->GetEntryPointFromQuickCompiledCode();
Ian Rogers848871b2013-08-05 10:56:33 -0700815 }
816 } else {
817 DCHECK(called_class->IsErroneous());
818 }
819 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800820 CHECK_EQ(code == NULL, self->IsExceptionPending());
Mathieu Chartier07d447b2013-09-26 11:57:43 -0700821 // Fixup any locally saved objects may have moved during a GC.
822 visitor.FixupReferences();
Ian Rogers848871b2013-08-05 10:56:33 -0700823 // Place called method in callee-save frame to be placed as first argument to quick method.
Andreas Gampecf4035a2014-05-28 22:43:01 -0700824 sp->Assign(called);
Ian Rogers848871b2013-08-05 10:56:33 -0700825 return code;
826}
827
Andreas Gampec147b002014-03-06 18:11:06 -0800828
829
830/*
831 * This class uses a couple of observations to unite the different calling conventions through
832 * a few constants.
833 *
834 * 1) Number of registers used for passing is normally even, so counting down has no penalty for
835 * possible alignment.
836 * 2) Known 64b architectures store 8B units on the stack, both for integral and floating point
837 * types, so using uintptr_t is OK. Also means that we can use kRegistersNeededX to denote
838 * when we have to split things
839 * 3) The only soft-float, Arm, is 32b, so no widening needs to be taken into account for floats
840 * and we can use Int handling directly.
841 * 4) Only 64b architectures widen, and their stack is aligned 8B anyways, so no padding code
842 * necessary when widening. Also, widening of Ints will take place implicitly, and the
843 * extension should be compatible with Aarch64, which mandates copying the available bits
844 * into LSB and leaving the rest unspecified.
845 * 5) Aligning longs and doubles is necessary on arm only, and it's the same in registers and on
846 * the stack.
847 * 6) There is only little endian.
848 *
849 *
850 * Actual work is supposed to be done in a delegate of the template type. The interface is as
851 * follows:
852 *
853 * void PushGpr(uintptr_t): Add a value for the next GPR
854 *
855 * void PushFpr4(float): Add a value for the next FPR of size 32b. Is only called if we need
856 * padding, that is, think the architecture is 32b and aligns 64b.
857 *
858 * void PushFpr8(uint64_t): Push a double. We _will_ call this on 32b, it's the callee's job to
859 * split this if necessary. The current state will have aligned, if
860 * necessary.
861 *
862 * void PushStack(uintptr_t): Push a value to the stack.
863 *
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700864 * uintptr_t PushHandleScope(mirror::Object* ref): Add a reference to the HandleScope. This _will_ have nullptr,
Andreas Gampe36fea8d2014-03-10 13:37:40 -0700865 * as this might be important for null initialization.
Andreas Gampec147b002014-03-06 18:11:06 -0800866 * Must return the jobject, that is, the reference to the
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700867 * entry in the HandleScope (nullptr if necessary).
Andreas Gampec147b002014-03-06 18:11:06 -0800868 *
869 */
870template <class T> class BuildGenericJniFrameStateMachine {
871 public:
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800872#if defined(__arm__)
873 // TODO: These are all dummy values!
Andreas Gampec147b002014-03-06 18:11:06 -0800874 static constexpr bool kNativeSoftFloatAbi = true;
875 static constexpr size_t kNumNativeGprArgs = 4; // 4 arguments passed in GPRs, r0-r3
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800876 static constexpr size_t kNumNativeFprArgs = 0; // 0 arguments passed in FPRs.
877
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800878 static constexpr size_t kRegistersNeededForLong = 2;
879 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -0800880 static constexpr bool kMultiRegistersAligned = true;
881 static constexpr bool kMultiRegistersWidened = false;
882 static constexpr bool kAlignLongOnStack = true;
883 static constexpr bool kAlignDoubleOnStack = true;
Stuart Monteithb95a5342014-03-12 13:32:32 +0000884#elif defined(__aarch64__)
885 static constexpr bool kNativeSoftFloatAbi = false; // This is a hard float ABI.
886 static constexpr size_t kNumNativeGprArgs = 8; // 6 arguments passed in GPRs.
887 static constexpr size_t kNumNativeFprArgs = 8; // 8 arguments passed in FPRs.
888
889 static constexpr size_t kRegistersNeededForLong = 1;
890 static constexpr size_t kRegistersNeededForDouble = 1;
891 static constexpr bool kMultiRegistersAligned = false;
892 static constexpr bool kMultiRegistersWidened = false;
893 static constexpr bool kAlignLongOnStack = false;
894 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800895#elif defined(__mips__)
896 // TODO: These are all dummy values!
897 static constexpr bool kNativeSoftFloatAbi = true; // This is a hard float ABI.
898 static constexpr size_t kNumNativeGprArgs = 0; // 6 arguments passed in GPRs.
899 static constexpr size_t kNumNativeFprArgs = 0; // 8 arguments passed in FPRs.
900
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800901 static constexpr size_t kRegistersNeededForLong = 2;
902 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -0800903 static constexpr bool kMultiRegistersAligned = true;
904 static constexpr bool kMultiRegistersWidened = true;
905 static constexpr bool kAlignLongOnStack = false;
906 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800907#elif defined(__i386__)
908 // TODO: Check these!
Andreas Gampec147b002014-03-06 18:11:06 -0800909 static constexpr bool kNativeSoftFloatAbi = false; // Not using int registers for fp
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800910 static constexpr size_t kNumNativeGprArgs = 0; // 6 arguments passed in GPRs.
911 static constexpr size_t kNumNativeFprArgs = 0; // 8 arguments passed in FPRs.
912
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800913 static constexpr size_t kRegistersNeededForLong = 2;
914 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -0800915 static constexpr bool kMultiRegistersAligned = false; // x86 not using regs, anyways
916 static constexpr bool kMultiRegistersWidened = false;
917 static constexpr bool kAlignLongOnStack = false;
918 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800919#elif defined(__x86_64__)
920 static constexpr bool kNativeSoftFloatAbi = false; // This is a hard float ABI.
921 static constexpr size_t kNumNativeGprArgs = 6; // 6 arguments passed in GPRs.
922 static constexpr size_t kNumNativeFprArgs = 8; // 8 arguments passed in FPRs.
923
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800924 static constexpr size_t kRegistersNeededForLong = 1;
925 static constexpr size_t kRegistersNeededForDouble = 1;
Andreas Gampec147b002014-03-06 18:11:06 -0800926 static constexpr bool kMultiRegistersAligned = false;
Andreas Gampe7a0e5042014-03-07 13:03:19 -0800927 static constexpr bool kMultiRegistersWidened = false;
Andreas Gampec147b002014-03-06 18:11:06 -0800928 static constexpr bool kAlignLongOnStack = false;
929 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800930#else
931#error "Unsupported architecture"
932#endif
933
Andreas Gampec147b002014-03-06 18:11:06 -0800934 public:
935 explicit BuildGenericJniFrameStateMachine(T* delegate) : gpr_index_(kNumNativeGprArgs),
936 fpr_index_(kNumNativeFprArgs),
937 stack_entries_(0),
938 delegate_(delegate) {
939 // For register alignment, we want to assume that counters (gpr_index_, fpr_index_) are even iff
940 // the next register is even; counting down is just to make the compiler happy...
941 CHECK_EQ(kNumNativeGprArgs % 2, 0U);
942 CHECK_EQ(kNumNativeFprArgs % 2, 0U);
943 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800944
Andreas Gampec147b002014-03-06 18:11:06 -0800945 virtual ~BuildGenericJniFrameStateMachine() {}
946
947 bool HavePointerGpr() {
948 return gpr_index_ > 0;
949 }
950
951 void AdvancePointer(void* val) {
952 if (HavePointerGpr()) {
953 gpr_index_--;
954 PushGpr(reinterpret_cast<uintptr_t>(val));
955 } else {
956 stack_entries_++; // TODO: have a field for pointer length as multiple of 32b
957 PushStack(reinterpret_cast<uintptr_t>(val));
958 gpr_index_ = 0;
959 }
960 }
961
962
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700963 bool HaveHandleScopeGpr() {
Andreas Gampec147b002014-03-06 18:11:06 -0800964 return gpr_index_ > 0;
965 }
966
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700967 void AdvanceHandleScope(mirror::Object* ptr) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
968 uintptr_t handle = PushHandle(ptr);
969 if (HaveHandleScopeGpr()) {
Andreas Gampec147b002014-03-06 18:11:06 -0800970 gpr_index_--;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700971 PushGpr(handle);
Andreas Gampec147b002014-03-06 18:11:06 -0800972 } else {
973 stack_entries_++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700974 PushStack(handle);
Andreas Gampec147b002014-03-06 18:11:06 -0800975 gpr_index_ = 0;
976 }
977 }
978
979
980 bool HaveIntGpr() {
981 return gpr_index_ > 0;
982 }
983
984 void AdvanceInt(uint32_t val) {
985 if (HaveIntGpr()) {
986 gpr_index_--;
987 PushGpr(val);
988 } else {
989 stack_entries_++;
990 PushStack(val);
991 gpr_index_ = 0;
992 }
993 }
994
995
996 bool HaveLongGpr() {
997 return gpr_index_ >= kRegistersNeededForLong + (LongGprNeedsPadding() ? 1 : 0);
998 }
999
1000 bool LongGprNeedsPadding() {
1001 return kRegistersNeededForLong > 1 && // only pad when using multiple registers
1002 kAlignLongOnStack && // and when it needs alignment
1003 (gpr_index_ & 1) == 1; // counter is odd, see constructor
1004 }
1005
1006 bool LongStackNeedsPadding() {
1007 return kRegistersNeededForLong > 1 && // only pad when using multiple registers
1008 kAlignLongOnStack && // and when it needs 8B alignment
1009 (stack_entries_ & 1) == 1; // counter is odd
1010 }
1011
1012 void AdvanceLong(uint64_t val) {
1013 if (HaveLongGpr()) {
1014 if (LongGprNeedsPadding()) {
1015 PushGpr(0);
1016 gpr_index_--;
1017 }
1018 if (kRegistersNeededForLong == 1) {
1019 PushGpr(static_cast<uintptr_t>(val));
1020 } else {
1021 PushGpr(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1022 PushGpr(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1023 }
1024 gpr_index_ -= kRegistersNeededForLong;
1025 } else {
1026 if (LongStackNeedsPadding()) {
1027 PushStack(0);
1028 stack_entries_++;
1029 }
1030 if (kRegistersNeededForLong == 1) {
1031 PushStack(static_cast<uintptr_t>(val));
1032 stack_entries_++;
1033 } else {
1034 PushStack(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1035 PushStack(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1036 stack_entries_ += 2;
1037 }
1038 gpr_index_ = 0;
1039 }
1040 }
1041
1042
1043 bool HaveFloatFpr() {
1044 return fpr_index_ > 0;
1045 }
1046
Andreas Gampec147b002014-03-06 18:11:06 -08001047 template <typename U, typename V> V convert(U in) {
1048 CHECK_LE(sizeof(U), sizeof(V));
1049 union { U u; V v; } tmp;
1050 tmp.u = in;
1051 return tmp.v;
1052 }
1053
1054 void AdvanceFloat(float val) {
1055 if (kNativeSoftFloatAbi) {
1056 AdvanceInt(convert<float, uint32_t>(val));
1057 } else {
1058 if (HaveFloatFpr()) {
1059 fpr_index_--;
1060 if (kRegistersNeededForDouble == 1) {
1061 if (kMultiRegistersWidened) {
1062 PushFpr8(convert<double, uint64_t>(val));
1063 } else {
1064 // No widening, just use the bits.
1065 PushFpr8(convert<float, uint64_t>(val));
1066 }
1067 } else {
1068 PushFpr4(val);
1069 }
1070 } else {
1071 stack_entries_++;
1072 if (kRegistersNeededForDouble == 1 && kMultiRegistersWidened) {
1073 // Need to widen before storing: Note the "double" in the template instantiation.
1074 PushStack(convert<double, uintptr_t>(val));
1075 } else {
1076 PushStack(convert<float, uintptr_t>(val));
1077 }
1078 fpr_index_ = 0;
1079 }
1080 }
1081 }
1082
1083
1084 bool HaveDoubleFpr() {
1085 return fpr_index_ >= kRegistersNeededForDouble + (DoubleFprNeedsPadding() ? 1 : 0);
1086 }
1087
1088 bool DoubleFprNeedsPadding() {
1089 return kRegistersNeededForDouble > 1 && // only pad when using multiple registers
1090 kAlignDoubleOnStack && // and when it needs alignment
1091 (fpr_index_ & 1) == 1; // counter is odd, see constructor
1092 }
1093
1094 bool DoubleStackNeedsPadding() {
1095 return kRegistersNeededForDouble > 1 && // only pad when using multiple registers
1096 kAlignDoubleOnStack && // and when it needs 8B alignment
1097 (stack_entries_ & 1) == 1; // counter is odd
1098 }
1099
1100 void AdvanceDouble(uint64_t val) {
1101 if (kNativeSoftFloatAbi) {
1102 AdvanceLong(val);
1103 } else {
1104 if (HaveDoubleFpr()) {
1105 if (DoubleFprNeedsPadding()) {
1106 PushFpr4(0);
1107 fpr_index_--;
1108 }
1109 PushFpr8(val);
1110 fpr_index_ -= kRegistersNeededForDouble;
1111 } else {
1112 if (DoubleStackNeedsPadding()) {
1113 PushStack(0);
1114 stack_entries_++;
1115 }
1116 if (kRegistersNeededForDouble == 1) {
1117 PushStack(static_cast<uintptr_t>(val));
1118 stack_entries_++;
1119 } else {
1120 PushStack(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1121 PushStack(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1122 stack_entries_ += 2;
1123 }
1124 fpr_index_ = 0;
1125 }
1126 }
1127 }
1128
1129 uint32_t getStackEntries() {
1130 return stack_entries_;
1131 }
1132
1133 uint32_t getNumberOfUsedGprs() {
1134 return kNumNativeGprArgs - gpr_index_;
1135 }
1136
1137 uint32_t getNumberOfUsedFprs() {
1138 return kNumNativeFprArgs - fpr_index_;
1139 }
1140
1141 private:
1142 void PushGpr(uintptr_t val) {
1143 delegate_->PushGpr(val);
1144 }
1145 void PushFpr4(float val) {
1146 delegate_->PushFpr4(val);
1147 }
1148 void PushFpr8(uint64_t val) {
1149 delegate_->PushFpr8(val);
1150 }
1151 void PushStack(uintptr_t val) {
1152 delegate_->PushStack(val);
1153 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001154 uintptr_t PushHandle(mirror::Object* ref) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1155 return delegate_->PushHandle(ref);
Andreas Gampec147b002014-03-06 18:11:06 -08001156 }
1157
1158 uint32_t gpr_index_; // Number of free GPRs
1159 uint32_t fpr_index_; // Number of free FPRs
1160 uint32_t stack_entries_; // Stack entries are in multiples of 32b, as floats are usually not
1161 // extended
1162 T* delegate_; // What Push implementation gets called
1163};
1164
1165class ComputeGenericJniFrameSize FINAL {
1166 public:
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001167 ComputeGenericJniFrameSize() : num_handle_scope_references_(0), num_stack_entries_(0) {}
Andreas Gampec147b002014-03-06 18:11:06 -08001168
Andreas Gampec147b002014-03-06 18:11:06 -08001169 uint32_t GetStackSize() {
1170 return num_stack_entries_ * sizeof(uintptr_t);
1171 }
1172
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001173 // WARNING: After this, *sp won't be pointing to the method anymore!
Andreas Gampecf4035a2014-05-28 22:43:01 -07001174 void ComputeLayout(StackReference<mirror::ArtMethod>** m, bool is_static, const char* shorty,
1175 uint32_t shorty_len, void* sp, HandleScope** table,
1176 uint32_t* handle_scope_entries, uintptr_t** start_stack, uintptr_t** start_gpr,
1177 uint32_t** start_fpr, void** code_return, size_t* overall_size)
Andreas Gampec147b002014-03-06 18:11:06 -08001178 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1179 ComputeAll(is_static, shorty, shorty_len);
1180
Andreas Gampecf4035a2014-05-28 22:43:01 -07001181 mirror::ArtMethod* method = (*m)->AsMirrorPtr();
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001182
Andreas Gampec147b002014-03-06 18:11:06 -08001183 uint8_t* sp8 = reinterpret_cast<uint8_t*>(sp);
Andreas Gampec147b002014-03-06 18:11:06 -08001184
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001185 // First, fix up the layout of the callee-save frame.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001186 // We have to squeeze in the HandleScope, and relocate the method pointer.
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001187
1188 // "Free" the slot for the method.
Andreas Gampecf4035a2014-05-28 22:43:01 -07001189 sp8 += kPointerSize; // In the callee-save frame we use a full pointer.
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001190
Andreas Gampecf4035a2014-05-28 22:43:01 -07001191 // Under the callee saves put handle scope and new method stack reference.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001192 *handle_scope_entries = num_handle_scope_references_;
Andreas Gampecf4035a2014-05-28 22:43:01 -07001193
1194 size_t handle_scope_size = HandleScope::SizeOf(num_handle_scope_references_);
1195 size_t scope_and_method = handle_scope_size + sizeof(StackReference<mirror::ArtMethod>);
1196
1197 sp8 -= scope_and_method;
Andreas Gampe779f8c92014-06-09 18:29:38 -07001198 // Align by kStackAlignment.
1199 sp8 = reinterpret_cast<uint8_t*>(RoundDown(reinterpret_cast<uintptr_t>(sp8), kStackAlignment));
Andreas Gampecf4035a2014-05-28 22:43:01 -07001200
1201 uint8_t* sp8_table = sp8 + sizeof(StackReference<mirror::ArtMethod>);
1202 *table = reinterpret_cast<HandleScope*>(sp8_table);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001203 (*table)->SetNumberOfReferences(num_handle_scope_references_);
Andreas Gampec147b002014-03-06 18:11:06 -08001204
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001205 // Add a slot for the method pointer, and fill it. Fix the pointer-pointer given to us.
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001206 uint8_t* method_pointer = sp8;
Andreas Gampecf4035a2014-05-28 22:43:01 -07001207 StackReference<mirror::ArtMethod>* new_method_ref =
1208 reinterpret_cast<StackReference<mirror::ArtMethod>*>(method_pointer);
1209 new_method_ref->Assign(method);
1210 *m = new_method_ref;
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001211
1212 // Reference cookie and padding
1213 sp8 -= 8;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001214 // Store HandleScope size
1215 *reinterpret_cast<uint32_t*>(sp8) = static_cast<uint32_t>(handle_scope_size & 0xFFFFFFFF);
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001216
1217 // Next comes the native call stack.
Andreas Gampec147b002014-03-06 18:11:06 -08001218 sp8 -= GetStackSize();
Andreas Gampe779f8c92014-06-09 18:29:38 -07001219 // Align by kStackAlignment.
1220 sp8 = reinterpret_cast<uint8_t*>(RoundDown(reinterpret_cast<uintptr_t>(sp8), kStackAlignment));
Andreas Gampec147b002014-03-06 18:11:06 -08001221 *start_stack = reinterpret_cast<uintptr_t*>(sp8);
1222
1223 // put fprs and gprs below
1224 // Assumption is OK right now, as we have soft-float arm
1225 size_t fregs = BuildGenericJniFrameStateMachine<ComputeGenericJniFrameSize>::kNumNativeFprArgs;
1226 sp8 -= fregs * sizeof(uintptr_t);
1227 *start_fpr = reinterpret_cast<uint32_t*>(sp8);
1228 size_t iregs = BuildGenericJniFrameStateMachine<ComputeGenericJniFrameSize>::kNumNativeGprArgs;
1229 sp8 -= iregs * sizeof(uintptr_t);
1230 *start_gpr = reinterpret_cast<uintptr_t*>(sp8);
1231
1232 // reserve space for the code pointer
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001233 sp8 -= kPointerSize;
Andreas Gampec147b002014-03-06 18:11:06 -08001234 *code_return = reinterpret_cast<void*>(sp8);
1235
1236 *overall_size = reinterpret_cast<uint8_t*>(sp) - sp8;
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001237
1238 // The new SP is stored at the end of the alloca, so it can be immediately popped
1239 sp8 = reinterpret_cast<uint8_t*>(sp) - 5 * KB;
1240 *(reinterpret_cast<uint8_t**>(sp8)) = method_pointer;
Andreas Gampec147b002014-03-06 18:11:06 -08001241 }
1242
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001243 void ComputeHandleScopeOffset() { } // nothing to do, static right now
Andreas Gampec147b002014-03-06 18:11:06 -08001244
1245 void ComputeAll(bool is_static, const char* shorty, uint32_t shorty_len)
1246 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1247 BuildGenericJniFrameStateMachine<ComputeGenericJniFrameSize> sm(this);
1248
1249 // JNIEnv
1250 sm.AdvancePointer(nullptr);
1251
1252 // Class object or this as first argument
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001253 sm.AdvanceHandleScope(reinterpret_cast<mirror::Object*>(0x12345678));
Andreas Gampec147b002014-03-06 18:11:06 -08001254
1255 for (uint32_t i = 1; i < shorty_len; ++i) {
1256 Primitive::Type cur_type_ = Primitive::GetType(shorty[i]);
1257 switch (cur_type_) {
1258 case Primitive::kPrimNot:
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001259 sm.AdvanceHandleScope(reinterpret_cast<mirror::Object*>(0x12345678));
Andreas Gampec147b002014-03-06 18:11:06 -08001260 break;
1261
1262 case Primitive::kPrimBoolean:
1263 case Primitive::kPrimByte:
1264 case Primitive::kPrimChar:
1265 case Primitive::kPrimShort:
1266 case Primitive::kPrimInt:
1267 sm.AdvanceInt(0);
1268 break;
1269 case Primitive::kPrimFloat:
1270 sm.AdvanceFloat(0);
1271 break;
1272 case Primitive::kPrimDouble:
1273 sm.AdvanceDouble(0);
1274 break;
1275 case Primitive::kPrimLong:
1276 sm.AdvanceLong(0);
1277 break;
1278 default:
1279 LOG(FATAL) << "Unexpected type: " << cur_type_ << " in " << shorty;
1280 }
1281 }
1282
1283 num_stack_entries_ = sm.getStackEntries();
1284 }
1285
1286 void PushGpr(uintptr_t /* val */) {
1287 // not optimizing registers, yet
1288 }
1289
1290 void PushFpr4(float /* val */) {
1291 // not optimizing registers, yet
1292 }
1293
1294 void PushFpr8(uint64_t /* val */) {
1295 // not optimizing registers, yet
1296 }
1297
1298 void PushStack(uintptr_t /* val */) {
1299 // counting is already done in the superclass
1300 }
1301
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001302 uintptr_t PushHandle(mirror::Object* /* ptr */) {
1303 num_handle_scope_references_++;
Andreas Gampec147b002014-03-06 18:11:06 -08001304 return reinterpret_cast<uintptr_t>(nullptr);
1305 }
1306
1307 private:
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001308 uint32_t num_handle_scope_references_;
Andreas Gampec147b002014-03-06 18:11:06 -08001309 uint32_t num_stack_entries_;
1310};
1311
1312// Visits arguments on the stack placing them into a region lower down the stack for the benefit
1313// of transitioning into native code.
1314class BuildGenericJniFrameVisitor FINAL : public QuickArgumentVisitor {
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001315 public:
Andreas Gampecf4035a2014-05-28 22:43:01 -07001316 BuildGenericJniFrameVisitor(StackReference<mirror::ArtMethod>** sp, bool is_static,
1317 const char* shorty, uint32_t shorty_len, Thread* self) :
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001318 QuickArgumentVisitor(*sp, is_static, shorty, shorty_len), sm_(this) {
Andreas Gampec147b002014-03-06 18:11:06 -08001319 ComputeGenericJniFrameSize fsc;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001320 fsc.ComputeLayout(sp, is_static, shorty, shorty_len, *sp, &handle_scope_, &handle_scope_expected_refs_,
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001321 &cur_stack_arg_, &cur_gpr_reg_, &cur_fpr_reg_, &code_return_,
1322 &alloca_used_size_);
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001323 handle_scope_number_of_references_ = 0;
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001324 cur_hs_entry_ = GetFirstHandleScopeEntry();
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001325
1326 // jni environment is always first argument
Andreas Gampec147b002014-03-06 18:11:06 -08001327 sm_.AdvancePointer(self->GetJniEnv());
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001328
1329 if (is_static) {
Andreas Gampecf4035a2014-05-28 22:43:01 -07001330 sm_.AdvanceHandleScope((*sp)->AsMirrorPtr()->GetDeclaringClass());
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001331 }
1332 }
1333
Ian Rogers9758f792014-03-13 09:02:55 -07001334 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001335
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001336 void FinalizeHandleScope(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001337
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001338 StackReference<mirror::Object>* GetFirstHandleScopeEntry()
1339 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1340 return handle_scope_->GetHandle(0).GetReference();
1341 }
1342
1343 jobject GetFirstHandleScopeJObject()
1344 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001345 return handle_scope_->GetHandle(0).ToJObject();
Andreas Gampec147b002014-03-06 18:11:06 -08001346 }
1347
1348 void PushGpr(uintptr_t val) {
1349 *cur_gpr_reg_ = val;
1350 cur_gpr_reg_++;
1351 }
1352
1353 void PushFpr4(float val) {
1354 *cur_fpr_reg_ = val;
1355 cur_fpr_reg_++;
1356 }
1357
1358 void PushFpr8(uint64_t val) {
1359 uint64_t* tmp = reinterpret_cast<uint64_t*>(cur_fpr_reg_);
1360 *tmp = val;
1361 cur_fpr_reg_ += 2;
1362 }
1363
1364 void PushStack(uintptr_t val) {
1365 *cur_stack_arg_ = val;
1366 cur_stack_arg_++;
1367 }
1368
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001369 uintptr_t PushHandle(mirror::Object* ref) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001370 uintptr_t tmp;
1371 if (ref == nullptr) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001372 *cur_hs_entry_ = StackReference<mirror::Object>();
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001373 tmp = reinterpret_cast<uintptr_t>(nullptr);
1374 } else {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001375 *cur_hs_entry_ = StackReference<mirror::Object>::FromMirrorPtr(ref);
1376 tmp = reinterpret_cast<uintptr_t>(cur_hs_entry_);
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001377 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001378 cur_hs_entry_++;
1379 handle_scope_number_of_references_++;
Andreas Gampec147b002014-03-06 18:11:06 -08001380 return tmp;
1381 }
1382
1383 // Size of the part of the alloca that we actually need.
1384 size_t GetAllocaUsedSize() {
1385 return alloca_used_size_;
1386 }
1387
1388 void* GetCodeReturn() {
1389 return code_return_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001390 }
1391
1392 private:
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001393 uint32_t handle_scope_number_of_references_;
1394 StackReference<mirror::Object>* cur_hs_entry_;
1395 HandleScope* handle_scope_;
1396 uint32_t handle_scope_expected_refs_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001397 uintptr_t* cur_gpr_reg_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001398 uint32_t* cur_fpr_reg_;
1399 uintptr_t* cur_stack_arg_;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001400 // StackReference<mirror::Object>* top_of_handle_scope_;
Andreas Gampec147b002014-03-06 18:11:06 -08001401 void* code_return_;
1402 size_t alloca_used_size_;
1403
1404 BuildGenericJniFrameStateMachine<BuildGenericJniFrameVisitor> sm_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001405
1406 DISALLOW_COPY_AND_ASSIGN(BuildGenericJniFrameVisitor);
1407};
1408
Ian Rogers9758f792014-03-13 09:02:55 -07001409void BuildGenericJniFrameVisitor::Visit() {
1410 Primitive::Type type = GetParamPrimitiveType();
1411 switch (type) {
1412 case Primitive::kPrimLong: {
1413 jlong long_arg;
1414 if (IsSplitLongOrDouble()) {
1415 long_arg = ReadSplitLongParam();
1416 } else {
1417 long_arg = *reinterpret_cast<jlong*>(GetParamAddress());
1418 }
1419 sm_.AdvanceLong(long_arg);
1420 break;
1421 }
1422 case Primitive::kPrimDouble: {
1423 uint64_t double_arg;
1424 if (IsSplitLongOrDouble()) {
1425 // Read into union so that we don't case to a double.
1426 double_arg = ReadSplitLongParam();
1427 } else {
1428 double_arg = *reinterpret_cast<uint64_t*>(GetParamAddress());
1429 }
1430 sm_.AdvanceDouble(double_arg);
1431 break;
1432 }
1433 case Primitive::kPrimNot: {
1434 StackReference<mirror::Object>* stack_ref =
1435 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001436 sm_.AdvanceHandleScope(stack_ref->AsMirrorPtr());
Ian Rogers9758f792014-03-13 09:02:55 -07001437 break;
1438 }
1439 case Primitive::kPrimFloat:
1440 sm_.AdvanceFloat(*reinterpret_cast<float*>(GetParamAddress()));
1441 break;
1442 case Primitive::kPrimBoolean: // Fall-through.
1443 case Primitive::kPrimByte: // Fall-through.
1444 case Primitive::kPrimChar: // Fall-through.
1445 case Primitive::kPrimShort: // Fall-through.
1446 case Primitive::kPrimInt: // Fall-through.
1447 sm_.AdvanceInt(*reinterpret_cast<jint*>(GetParamAddress()));
1448 break;
1449 case Primitive::kPrimVoid:
1450 LOG(FATAL) << "UNREACHABLE";
1451 break;
1452 }
1453}
1454
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001455void BuildGenericJniFrameVisitor::FinalizeHandleScope(Thread* self) {
Ian Rogers9758f792014-03-13 09:02:55 -07001456 // Initialize padding entries.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001457 while (handle_scope_number_of_references_ < handle_scope_expected_refs_) {
1458 *cur_hs_entry_ = StackReference<mirror::Object>();
1459 cur_hs_entry_++;
1460 handle_scope_number_of_references_++;
Ian Rogers9758f792014-03-13 09:02:55 -07001461 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001462 handle_scope_->SetNumberOfReferences(handle_scope_expected_refs_);
1463 DCHECK_NE(handle_scope_expected_refs_, 0U);
1464 // Install HandleScope.
1465 self->PushHandleScope(handle_scope_);
Ian Rogers9758f792014-03-13 09:02:55 -07001466}
1467
Andreas Gampe90546832014-03-12 18:07:19 -07001468extern "C" void* artFindNativeMethod();
1469
Andreas Gampead615172014-04-04 16:20:13 -07001470uint64_t artQuickGenericJniEndJNIRef(Thread* self, uint32_t cookie, jobject l, jobject lock) {
1471 if (lock != nullptr) {
1472 return reinterpret_cast<uint64_t>(JniMethodEndWithReferenceSynchronized(l, cookie, lock, self));
1473 } else {
1474 return reinterpret_cast<uint64_t>(JniMethodEndWithReference(l, cookie, self));
1475 }
1476}
1477
1478void artQuickGenericJniEndJNINonRef(Thread* self, uint32_t cookie, jobject lock) {
1479 if (lock != nullptr) {
1480 JniMethodEndSynchronized(cookie, lock, self);
1481 } else {
1482 JniMethodEnd(cookie, self);
1483 }
1484}
1485
Andreas Gampec147b002014-03-06 18:11:06 -08001486/*
1487 * Initializes an alloca region assumed to be directly below sp for a native call:
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001488 * Create a HandleScope and call stack and fill a mini stack with values to be pushed to registers.
Andreas Gampec147b002014-03-06 18:11:06 -08001489 * The final element on the stack is a pointer to the native code.
1490 *
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001491 * On entry, the stack has a standard callee-save frame above sp, and an alloca below it.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001492 * We need to fix this, as the handle scope needs to go into the callee-save frame.
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001493 *
Andreas Gampec147b002014-03-06 18:11:06 -08001494 * The return of this function denotes:
1495 * 1) How many bytes of the alloca can be released, if the value is non-negative.
1496 * 2) An error, if the value is negative.
1497 */
Andreas Gampecf4035a2014-05-28 22:43:01 -07001498extern "C" ssize_t artQuickGenericJniTrampoline(Thread* self, StackReference<mirror::ArtMethod>* sp)
Andreas Gampe2da88232014-02-27 12:26:20 -08001499 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampecf4035a2014-05-28 22:43:01 -07001500 mirror::ArtMethod* called = sp->AsMirrorPtr();
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001501 DCHECK(called->IsNative()) << PrettyMethod(called, true);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001502
1503 // run the visitor
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001504 uint32_t shorty_len = 0;
1505 const char* shorty = called->GetShorty(&shorty_len);
1506 BuildGenericJniFrameVisitor visitor(&sp, called->IsStatic(), shorty, shorty_len, self);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001507 visitor.VisitArguments();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001508 visitor.FinalizeHandleScope(self);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001509
1510 // fix up managed-stack things in Thread
1511 self->SetTopOfStack(sp, 0);
1512
Ian Rogerse0dcd462014-03-08 15:21:04 -08001513 self->VerifyStack();
1514
Andreas Gampe90546832014-03-12 18:07:19 -07001515 // Start JNI, save the cookie.
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001516 uint32_t cookie;
1517 if (called->IsSynchronized()) {
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001518 cookie = JniMethodStartSynchronized(visitor.GetFirstHandleScopeJObject(), self);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001519 if (self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001520 self->PopHandleScope();
Andreas Gampec147b002014-03-06 18:11:06 -08001521 // A negative value denotes an error.
1522 return -1;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001523 }
1524 } else {
1525 cookie = JniMethodStart(self);
1526 }
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001527 uint32_t* sp32 = reinterpret_cast<uint32_t*>(sp);
Ian Rogerse0dcd462014-03-08 15:21:04 -08001528 *(sp32 - 1) = cookie;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001529
Andreas Gampe90546832014-03-12 18:07:19 -07001530 // Retrieve the stored native code.
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001531 const void* nativeCode = called->GetNativeMethod();
Andreas Gampe90546832014-03-12 18:07:19 -07001532
Andreas Gampe9a6a99a2014-03-14 07:52:20 -07001533 // There are two cases for the content of nativeCode:
1534 // 1) Pointer to the native function.
1535 // 2) Pointer to the trampoline for native code binding.
1536 // In the second case, we need to execute the binding and continue with the actual native function
1537 // pointer.
Andreas Gampe90546832014-03-12 18:07:19 -07001538 DCHECK(nativeCode != nullptr);
1539 if (nativeCode == GetJniDlsymLookupStub()) {
1540 nativeCode = artFindNativeMethod();
1541
1542 if (nativeCode == nullptr) {
1543 DCHECK(self->IsExceptionPending()); // There should be an exception pending now.
Andreas Gampead615172014-04-04 16:20:13 -07001544
1545 // End JNI, as the assembly will move to deliver the exception.
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001546 jobject lock = called->IsSynchronized() ? visitor.GetFirstHandleScopeJObject() : nullptr;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001547 if (shorty[0] == 'L') {
Andreas Gampead615172014-04-04 16:20:13 -07001548 artQuickGenericJniEndJNIRef(self, cookie, nullptr, lock);
1549 } else {
1550 artQuickGenericJniEndJNINonRef(self, cookie, lock);
1551 }
1552
Andreas Gampe90546832014-03-12 18:07:19 -07001553 return -1;
1554 }
1555 // Note that the native code pointer will be automatically set by artFindNativeMethod().
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001556 }
1557
Andreas Gampe90546832014-03-12 18:07:19 -07001558 // Store the native code pointer in the stack at the right location.
Andreas Gampec147b002014-03-06 18:11:06 -08001559 uintptr_t* code_pointer = reinterpret_cast<uintptr_t*>(visitor.GetCodeReturn());
Andreas Gampec147b002014-03-06 18:11:06 -08001560 *code_pointer = reinterpret_cast<uintptr_t>(nativeCode);
1561
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001562 // 5K reserved, window_size + frame pointer used.
Andreas Gampe90546832014-03-12 18:07:19 -07001563 size_t window_size = visitor.GetAllocaUsedSize();
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001564 return (5 * KB) - window_size - kPointerSize;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001565}
1566
1567/*
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001568 * Is called after the native JNI code. Responsible for cleanup (handle scope, saved state) and
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001569 * unlocking.
1570 */
Andreas Gampecf4035a2014-05-28 22:43:01 -07001571extern "C" uint64_t artQuickGenericJniEndTrampoline(Thread* self,
1572 StackReference<mirror::ArtMethod>* sp,
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001573 jvalue result, uint64_t result_f)
1574 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1575 uint32_t* sp32 = reinterpret_cast<uint32_t*>(sp);
Andreas Gampecf4035a2014-05-28 22:43:01 -07001576 mirror::ArtMethod* called = sp->AsMirrorPtr();
Ian Rogerse0dcd462014-03-08 15:21:04 -08001577 uint32_t cookie = *(sp32 - 1);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001578
Andreas Gampead615172014-04-04 16:20:13 -07001579 jobject lock = nullptr;
1580 if (called->IsSynchronized()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001581 HandleScope* table = reinterpret_cast<HandleScope*>(
Andreas Gampecf4035a2014-05-28 22:43:01 -07001582 reinterpret_cast<uint8_t*>(sp) + sizeof(StackReference<mirror::ArtMethod>));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001583 lock = table->GetHandle(0).ToJObject();
Andreas Gampead615172014-04-04 16:20:13 -07001584 }
1585
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001586 char return_shorty_char = called->GetShorty()[0];
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001587
1588 if (return_shorty_char == 'L') {
Andreas Gampead615172014-04-04 16:20:13 -07001589 return artQuickGenericJniEndJNIRef(self, cookie, result.l, lock);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001590 } else {
Andreas Gampead615172014-04-04 16:20:13 -07001591 artQuickGenericJniEndJNINonRef(self, cookie, lock);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001592
1593 switch (return_shorty_char) {
1594 case 'F': // Fall-through.
1595 case 'D':
1596 return result_f;
1597 case 'Z':
1598 return result.z;
1599 case 'B':
1600 return result.b;
1601 case 'C':
1602 return result.c;
1603 case 'S':
1604 return result.s;
1605 case 'I':
1606 return result.i;
1607 case 'J':
1608 return result.j;
1609 case 'V':
1610 return 0;
1611 default:
1612 LOG(FATAL) << "Unexpected return shorty character " << return_shorty_char;
1613 return 0;
1614 }
1615 }
Andreas Gampe2da88232014-02-27 12:26:20 -08001616}
1617
Andreas Gamped58342c2014-06-05 14:18:08 -07001618// We use TwoWordReturn to optimize scalar returns. We use the hi value for code, and the lo value
1619// for the method pointer.
Andreas Gampe51f76352014-05-21 08:28:48 -07001620//
Andreas Gamped58342c2014-06-05 14:18:08 -07001621// It is valid to use this, as at the usage points here (returns from C functions) we are assuming
1622// to hold the mutator lock (see SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) annotations).
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001623
1624template<InvokeType type, bool access_check>
Andreas Gamped58342c2014-06-05 14:18:08 -07001625static TwoWordReturn artInvokeCommon(uint32_t method_idx, mirror::Object* this_object,
Andreas Gampe51f76352014-05-21 08:28:48 -07001626 mirror::ArtMethod* caller_method,
Andreas Gampecf4035a2014-05-28 22:43:01 -07001627 Thread* self, StackReference<mirror::ArtMethod>* sp);
Andreas Gampe51f76352014-05-21 08:28:48 -07001628
1629template<InvokeType type, bool access_check>
Andreas Gamped58342c2014-06-05 14:18:08 -07001630static TwoWordReturn artInvokeCommon(uint32_t method_idx, mirror::Object* this_object,
Andreas Gampe51f76352014-05-21 08:28:48 -07001631 mirror::ArtMethod* caller_method,
Andreas Gampecf4035a2014-05-28 22:43:01 -07001632 Thread* self, StackReference<mirror::ArtMethod>* sp) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001633 mirror::ArtMethod* method = FindMethodFast(method_idx, this_object, caller_method, access_check,
1634 type);
1635 if (UNLIKELY(method == nullptr)) {
1636 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
1637 const DexFile* dex_file = caller_method->GetDeclaringClass()->GetDexCache()->GetDexFile();
1638 uint32_t shorty_len;
1639 const char* shorty =
1640 dex_file->GetMethodShorty(dex_file->GetMethodId(method_idx), &shorty_len);
1641 {
1642 // Remember the args in case a GC happens in FindMethodFromCode.
1643 ScopedObjectAccessUnchecked soa(self->GetJniEnv());
1644 RememberForGcArgumentVisitor visitor(sp, type == kStatic, shorty, shorty_len, &soa);
1645 visitor.VisitArguments();
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001646 method = FindMethodFromCode<type, access_check>(method_idx, &this_object, &caller_method,
1647 self);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001648 visitor.FixupReferences();
1649 }
1650
1651 if (UNLIKELY(method == NULL)) {
1652 CHECK(self->IsExceptionPending());
Andreas Gamped58342c2014-06-05 14:18:08 -07001653 return GetTwoWordFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001654 }
1655 }
1656 DCHECK(!self->IsExceptionPending());
1657 const void* code = method->GetEntryPointFromQuickCompiledCode();
1658
1659 // When we return, the caller will branch to this address, so it had better not be 0!
1660 DCHECK(code != nullptr) << "Code was NULL in method: " << PrettyMethod(method) << " location: "
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001661 << method->GetDexFile()->GetLocation();
Andreas Gampe51f76352014-05-21 08:28:48 -07001662
Andreas Gamped58342c2014-06-05 14:18:08 -07001663 return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(code),
1664 reinterpret_cast<uintptr_t>(method));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001665}
1666
Nicolas Geoffray8689a0a2014-04-04 09:26:24 +01001667// Explicit artInvokeCommon template function declarations to please analysis tool.
1668#define EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(type, access_check) \
1669 template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) \
Andreas Gamped58342c2014-06-05 14:18:08 -07001670 TwoWordReturn artInvokeCommon<type, access_check>(uint32_t method_idx, \
Andreas Gampe51f76352014-05-21 08:28:48 -07001671 mirror::Object* this_object, \
1672 mirror::ArtMethod* caller_method, \
Andreas Gampecf4035a2014-05-28 22:43:01 -07001673 Thread* self, \
1674 StackReference<mirror::ArtMethod>* sp) \
Nicolas Geoffray8689a0a2014-04-04 09:26:24 +01001675
1676EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kVirtual, false);
1677EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kVirtual, true);
1678EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kInterface, false);
1679EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kInterface, true);
1680EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kDirect, false);
1681EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kDirect, true);
1682EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kStatic, false);
1683EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kStatic, true);
1684EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kSuper, false);
1685EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kSuper, true);
1686#undef EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL
1687
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001688
1689// See comments in runtime_support_asm.S
Andreas Gamped58342c2014-06-05 14:18:08 -07001690extern "C" TwoWordReturn artInvokeInterfaceTrampolineWithAccessCheck(uint32_t method_idx,
Andreas Gampecf4035a2014-05-28 22:43:01 -07001691 mirror::Object* this_object,
1692 mirror::ArtMethod* caller_method,
1693 Thread* self,
1694 StackReference<mirror::ArtMethod>* sp) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001695 return artInvokeCommon<kInterface, true>(method_idx, this_object, caller_method, self, sp);
1696}
1697
1698
Andreas Gamped58342c2014-06-05 14:18:08 -07001699extern "C" TwoWordReturn artInvokeDirectTrampolineWithAccessCheck(uint32_t method_idx,
Andreas Gampecf4035a2014-05-28 22:43:01 -07001700 mirror::Object* this_object,
1701 mirror::ArtMethod* caller_method,
1702 Thread* self,
1703 StackReference<mirror::ArtMethod>* sp) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001704 return artInvokeCommon<kDirect, true>(method_idx, this_object, caller_method, self, sp);
1705}
1706
Andreas Gamped58342c2014-06-05 14:18:08 -07001707extern "C" TwoWordReturn artInvokeStaticTrampolineWithAccessCheck(uint32_t method_idx,
Andreas Gampecf4035a2014-05-28 22:43:01 -07001708 mirror::Object* this_object,
1709 mirror::ArtMethod* caller_method,
1710 Thread* self,
1711 StackReference<mirror::ArtMethod>* sp) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001712 return artInvokeCommon<kStatic, true>(method_idx, this_object, caller_method, self, sp);
1713}
1714
Andreas Gamped58342c2014-06-05 14:18:08 -07001715extern "C" TwoWordReturn artInvokeSuperTrampolineWithAccessCheck(uint32_t method_idx,
Andreas Gampecf4035a2014-05-28 22:43:01 -07001716 mirror::Object* this_object,
1717 mirror::ArtMethod* caller_method,
1718 Thread* self,
1719 StackReference<mirror::ArtMethod>* sp) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001720 return artInvokeCommon<kSuper, true>(method_idx, this_object, caller_method, self, sp);
1721}
1722
Andreas Gamped58342c2014-06-05 14:18:08 -07001723extern "C" TwoWordReturn artInvokeVirtualTrampolineWithAccessCheck(uint32_t method_idx,
Andreas Gampecf4035a2014-05-28 22:43:01 -07001724 mirror::Object* this_object,
1725 mirror::ArtMethod* caller_method,
1726 Thread* self,
1727 StackReference<mirror::ArtMethod>* sp) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001728 return artInvokeCommon<kVirtual, true>(method_idx, this_object, caller_method, self, sp);
1729}
1730
1731// Determine target of interface dispatch. This object is known non-null.
Andreas Gamped58342c2014-06-05 14:18:08 -07001732extern "C" TwoWordReturn artInvokeInterfaceTrampoline(mirror::ArtMethod* interface_method,
Andreas Gampe51f76352014-05-21 08:28:48 -07001733 mirror::Object* this_object,
1734 mirror::ArtMethod* caller_method,
Andreas Gampecf4035a2014-05-28 22:43:01 -07001735 Thread* self,
1736 StackReference<mirror::ArtMethod>* sp)
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001737 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1738 mirror::ArtMethod* method;
1739 if (LIKELY(interface_method->GetDexMethodIndex() != DexFile::kDexNoIndex)) {
1740 method = this_object->GetClass()->FindVirtualMethodForInterface(interface_method);
1741 if (UNLIKELY(method == NULL)) {
1742 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
1743 ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(interface_method, this_object,
1744 caller_method);
Andreas Gamped58342c2014-06-05 14:18:08 -07001745 return GetTwoWordFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001746 }
1747 } else {
1748 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
1749 DCHECK(interface_method == Runtime::Current()->GetResolutionMethod());
Alexei Zavjalov41c507a2014-05-15 16:02:46 +07001750
1751 // Find the caller PC.
1752 constexpr size_t pc_offset = GetCalleeSavePCOffset(kRuntimeISA, Runtime::kRefsAndArgs);
1753 uintptr_t caller_pc = *reinterpret_cast<uintptr_t*>(reinterpret_cast<byte*>(sp) + pc_offset);
1754
1755 // Map the caller PC to a dex PC.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001756 uint32_t dex_pc = caller_method->ToDexPc(caller_pc);
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001757 const DexFile::CodeItem* code = caller_method->GetCodeItem();
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001758 CHECK_LT(dex_pc, code->insns_size_in_code_units_);
1759 const Instruction* instr = Instruction::At(&code->insns_[dex_pc]);
1760 Instruction::Code instr_code = instr->Opcode();
1761 CHECK(instr_code == Instruction::INVOKE_INTERFACE ||
1762 instr_code == Instruction::INVOKE_INTERFACE_RANGE)
1763 << "Unexpected call into interface trampoline: " << instr->DumpString(NULL);
1764 uint32_t dex_method_idx;
1765 if (instr_code == Instruction::INVOKE_INTERFACE) {
1766 dex_method_idx = instr->VRegB_35c();
1767 } else {
1768 DCHECK_EQ(instr_code, Instruction::INVOKE_INTERFACE_RANGE);
1769 dex_method_idx = instr->VRegB_3rc();
1770 }
1771
1772 const DexFile* dex_file = caller_method->GetDeclaringClass()->GetDexCache()->GetDexFile();
1773 uint32_t shorty_len;
1774 const char* shorty =
1775 dex_file->GetMethodShorty(dex_file->GetMethodId(dex_method_idx), &shorty_len);
1776 {
1777 // Remember the args in case a GC happens in FindMethodFromCode.
1778 ScopedObjectAccessUnchecked soa(self->GetJniEnv());
1779 RememberForGcArgumentVisitor visitor(sp, false, shorty, shorty_len, &soa);
1780 visitor.VisitArguments();
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001781 method = FindMethodFromCode<kInterface, false>(dex_method_idx, &this_object, &caller_method,
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001782 self);
1783 visitor.FixupReferences();
1784 }
1785
1786 if (UNLIKELY(method == nullptr)) {
1787 CHECK(self->IsExceptionPending());
Andreas Gamped58342c2014-06-05 14:18:08 -07001788 return GetTwoWordFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001789 }
1790 }
1791 const void* code = method->GetEntryPointFromQuickCompiledCode();
1792
1793 // When we return, the caller will branch to this address, so it had better not be 0!
1794 DCHECK(code != nullptr) << "Code was NULL in method: " << PrettyMethod(method) << " location: "
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001795 << method->GetDexFile()->GetLocation();
Andreas Gampe51f76352014-05-21 08:28:48 -07001796
Andreas Gamped58342c2014-06-05 14:18:08 -07001797 return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(code),
1798 reinterpret_cast<uintptr_t>(method));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001799}
1800
Ian Rogers848871b2013-08-05 10:56:33 -07001801} // namespace art