blob: 49bb65f48894df076dcdf522c10805400784536d [file] [log] [blame]
Ian Rogers848871b2013-08-05 10:56:33 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "callee_save_frame.h"
Dragos Sbirleabd136a22013-08-13 18:07:04 -070018#include "common_throws.h"
Ian Rogers848871b2013-08-05 10:56:33 -070019#include "dex_file-inl.h"
20#include "dex_instruction-inl.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -070021#include "entrypoints/entrypoint_utils-inl.h"
Ian Rogers83883d72013-10-21 21:07:24 -070022#include "gc/accounting/card_table-inl.h"
Andreas Gamped58342c2014-06-05 14:18:08 -070023#include "instruction_set.h"
Ian Rogers848871b2013-08-05 10:56:33 -070024#include "interpreter/interpreter.h"
Brian Carlstromea46f952013-07-30 01:26:50 -070025#include "mirror/art_method-inl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070026#include "mirror/class-inl.h"
Mathieu Chartier5f3ded42014-04-03 15:25:30 -070027#include "mirror/dex_cache-inl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070028#include "mirror/object-inl.h"
29#include "mirror/object_array-inl.h"
Ian Rogers848871b2013-08-05 10:56:33 -070030#include "runtime.h"
Ian Rogers53b8b092014-03-13 23:45:53 -070031#include "scoped_thread_state_change.h"
Ian Rogers848871b2013-08-05 10:56:33 -070032
33namespace art {
34
35// Visits the arguments as saved to the stack by a Runtime::kRefAndArgs callee save frame.
36class QuickArgumentVisitor {
Ian Rogers936b37f2014-02-14 00:52:24 -080037 // Number of bytes for each out register in the caller method's frame.
38 static constexpr size_t kBytesStackArgLocation = 4;
Alexei Zavjalov41c507a2014-05-15 16:02:46 +070039 // Frame size in bytes of a callee-save frame for RefsAndArgs.
40 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_FrameSize =
41 GetCalleeSaveFrameSize(kRuntimeISA, Runtime::kRefsAndArgs);
Ian Rogers848871b2013-08-05 10:56:33 -070042#if defined(__arm__)
43 // The callee save frame is pointed to by SP.
44 // | argN | |
45 // | ... | |
46 // | arg4 | |
47 // | arg3 spill | | Caller's frame
48 // | arg2 spill | |
49 // | arg1 spill | |
50 // | Method* | ---
51 // | LR |
52 // | ... | callee saves
53 // | R3 | arg3
54 // | R2 | arg2
55 // | R1 | arg1
Ian Rogers936b37f2014-02-14 00:52:24 -080056 // | R0 | padding
Ian Rogers848871b2013-08-05 10:56:33 -070057 // | Method* | <- sp
Andreas Gampebf6b92a2014-03-05 16:11:04 -080058 static constexpr bool kQuickSoftFloatAbi = true; // This is a soft float ABI.
59 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs.
60 static constexpr size_t kNumQuickFprArgs = 0; // 0 arguments passed in FPRs.
Zheng Xub551fdc2014-07-25 11:49:42 +080061 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset =
62 arm::ArmCalleeSaveFpr1Offset(Runtime::kRefsAndArgs); // Offset of first FPR arg.
63 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset =
64 arm::ArmCalleeSaveGpr1Offset(Runtime::kRefsAndArgs); // Offset of first GPR arg.
65 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset =
66 arm::ArmCalleeSaveLrOffset(Runtime::kRefsAndArgs); // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -080067 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +000068 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -080069 }
Stuart Monteithb95a5342014-03-12 13:32:32 +000070#elif defined(__aarch64__)
71 // The callee save frame is pointed to by SP.
72 // | argN | |
73 // | ... | |
74 // | arg4 | |
75 // | arg3 spill | | Caller's frame
76 // | arg2 spill | |
77 // | arg1 spill | |
78 // | Method* | ---
79 // | LR |
Zheng Xub551fdc2014-07-25 11:49:42 +080080 // | X29 |
Stuart Monteithb95a5342014-03-12 13:32:32 +000081 // | : |
Zheng Xub551fdc2014-07-25 11:49:42 +080082 // | X20 |
Stuart Monteithb95a5342014-03-12 13:32:32 +000083 // | X7 |
84 // | : |
85 // | X1 |
Zheng Xub551fdc2014-07-25 11:49:42 +080086 // | D7 |
Stuart Monteithb95a5342014-03-12 13:32:32 +000087 // | : |
88 // | D0 |
89 // | | padding
90 // | Method* | <- sp
91 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
92 static constexpr size_t kNumQuickGprArgs = 7; // 7 arguments passed in GPRs.
93 static constexpr size_t kNumQuickFprArgs = 8; // 8 arguments passed in FPRs.
Zheng Xub551fdc2014-07-25 11:49:42 +080094 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset =
95 arm64::Arm64CalleeSaveFpr1Offset(Runtime::kRefsAndArgs); // Offset of first FPR arg.
96 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset =
97 arm64::Arm64CalleeSaveGpr1Offset(Runtime::kRefsAndArgs); // Offset of first GPR arg.
98 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset =
99 arm64::Arm64CalleeSaveLrOffset(Runtime::kRefsAndArgs); // Offset of return address.
Stuart Monteithb95a5342014-03-12 13:32:32 +0000100 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000101 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Stuart Monteithb95a5342014-03-12 13:32:32 +0000102 }
Ian Rogers848871b2013-08-05 10:56:33 -0700103#elif defined(__mips__)
104 // The callee save frame is pointed to by SP.
105 // | argN | |
106 // | ... | |
107 // | arg4 | |
108 // | arg3 spill | | Caller's frame
109 // | arg2 spill | |
110 // | arg1 spill | |
111 // | Method* | ---
112 // | RA |
113 // | ... | callee saves
114 // | A3 | arg3
115 // | A2 | arg2
116 // | A1 | arg1
117 // | A0/Method* | <- sp
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800118 static constexpr bool kQuickSoftFloatAbi = true; // This is a soft float ABI.
119 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs.
120 static constexpr size_t kNumQuickFprArgs = 0; // 0 arguments passed in FPRs.
Ian Rogers936b37f2014-02-14 00:52:24 -0800121 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 0; // Offset of first FPR arg.
122 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 4; // Offset of first GPR arg.
123 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 60; // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -0800124 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000125 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -0800126 }
Ian Rogers848871b2013-08-05 10:56:33 -0700127#elif defined(__i386__)
128 // The callee save frame is pointed to by SP.
129 // | argN | |
130 // | ... | |
131 // | arg4 | |
132 // | arg3 spill | | Caller's frame
133 // | arg2 spill | |
134 // | arg1 spill | |
135 // | Method* | ---
136 // | Return |
137 // | EBP,ESI,EDI | callee saves
138 // | EBX | arg3
139 // | EDX | arg2
140 // | ECX | arg1
141 // | EAX/Method* | <- sp
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800142 static constexpr bool kQuickSoftFloatAbi = true; // This is a soft float ABI.
143 static constexpr size_t kNumQuickGprArgs = 3; // 3 arguments passed in GPRs.
144 static constexpr size_t kNumQuickFprArgs = 0; // 0 arguments passed in FPRs.
Ian Rogers936b37f2014-02-14 00:52:24 -0800145 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 0; // Offset of first FPR arg.
146 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 4; // Offset of first GPR arg.
147 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 28; // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -0800148 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000149 return gpr_index * GetBytesPerGprSpillLocation(kRuntimeISA);
Ian Rogers936b37f2014-02-14 00:52:24 -0800150 }
Ian Rogersef7d42f2014-01-06 12:55:46 -0800151#elif defined(__x86_64__)
Ian Rogers936b37f2014-02-14 00:52:24 -0800152 // The callee save frame is pointed to by SP.
153 // | argN | |
154 // | ... | |
155 // | reg. arg spills | | Caller's frame
156 // | Method* | ---
157 // | Return |
158 // | R15 | callee save
159 // | R14 | callee save
160 // | R13 | callee save
161 // | R12 | callee save
162 // | R9 | arg5
163 // | R8 | arg4
164 // | RSI/R6 | arg1
165 // | RBP/R5 | callee save
166 // | RBX/R3 | callee save
167 // | RDX/R2 | arg2
168 // | RCX/R1 | arg3
169 // | XMM7 | float arg 8
170 // | XMM6 | float arg 7
171 // | XMM5 | float arg 6
172 // | XMM4 | float arg 5
173 // | XMM3 | float arg 4
174 // | XMM2 | float arg 3
175 // | XMM1 | float arg 2
176 // | XMM0 | float arg 1
177 // | Padding |
178 // | RDI/Method* | <- sp
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800179 static constexpr bool kQuickSoftFloatAbi = false; // This is a hard float ABI.
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700180 static constexpr size_t kNumQuickGprArgs = 5; // 5 arguments passed in GPRs.
Dmitry Petrochenko58994cd2014-05-17 01:02:18 +0700181 static constexpr size_t kNumQuickFprArgs = 8; // 8 arguments passed in FPRs.
Ian Rogers936b37f2014-02-14 00:52:24 -0800182 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset = 16; // Offset of first FPR arg.
Serguei Katkovc3801912014-07-08 17:21:53 +0700183 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset = 80 + 4*8; // Offset of first GPR arg.
184 static constexpr size_t kQuickCalleeSaveFrame_RefAndArgs_LrOffset = 168 + 4*8; // Offset of return address.
Ian Rogers936b37f2014-02-14 00:52:24 -0800185 static size_t GprIndexToGprOffset(uint32_t gpr_index) {
186 switch (gpr_index) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000187 case 0: return (4 * GetBytesPerGprSpillLocation(kRuntimeISA));
188 case 1: return (1 * GetBytesPerGprSpillLocation(kRuntimeISA));
189 case 2: return (0 * GetBytesPerGprSpillLocation(kRuntimeISA));
190 case 3: return (5 * GetBytesPerGprSpillLocation(kRuntimeISA));
191 case 4: return (6 * GetBytesPerGprSpillLocation(kRuntimeISA));
Ian Rogers936b37f2014-02-14 00:52:24 -0800192 default:
Andreas Gampec200a4a2014-06-16 18:39:09 -0700193 LOG(FATAL) << "Unexpected GPR index: " << gpr_index;
194 return 0;
Ian Rogers936b37f2014-02-14 00:52:24 -0800195 }
196 }
Ian Rogers848871b2013-08-05 10:56:33 -0700197#else
198#error "Unsupported architecture"
Ian Rogers848871b2013-08-05 10:56:33 -0700199#endif
200
Ian Rogers936b37f2014-02-14 00:52:24 -0800201 public:
Andreas Gampecf4035a2014-05-28 22:43:01 -0700202 static mirror::ArtMethod* GetCallingMethod(StackReference<mirror::ArtMethod>* sp)
Ian Rogers936b37f2014-02-14 00:52:24 -0800203 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampecf4035a2014-05-28 22:43:01 -0700204 DCHECK(sp->AsMirrorPtr()->IsCalleeSaveMethod());
Ian Rogers936b37f2014-02-14 00:52:24 -0800205 byte* previous_sp = reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize;
Andreas Gampecf4035a2014-05-28 22:43:01 -0700206 return reinterpret_cast<StackReference<mirror::ArtMethod>*>(previous_sp)->AsMirrorPtr();
Ian Rogers848871b2013-08-05 10:56:33 -0700207 }
208
Ian Rogers936b37f2014-02-14 00:52:24 -0800209 // For the given quick ref and args quick frame, return the caller's PC.
Andreas Gampecf4035a2014-05-28 22:43:01 -0700210 static uintptr_t GetCallingPc(StackReference<mirror::ArtMethod>* sp)
Ian Rogers936b37f2014-02-14 00:52:24 -0800211 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampecf4035a2014-05-28 22:43:01 -0700212 DCHECK(sp->AsMirrorPtr()->IsCalleeSaveMethod());
Ian Rogers936b37f2014-02-14 00:52:24 -0800213 byte* lr = reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_LrOffset;
Ian Rogers848871b2013-08-05 10:56:33 -0700214 return *reinterpret_cast<uintptr_t*>(lr);
215 }
216
Andreas Gampec200a4a2014-06-16 18:39:09 -0700217 QuickArgumentVisitor(StackReference<mirror::ArtMethod>* sp, bool is_static, const char* shorty,
218 uint32_t shorty_len) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) :
219 is_static_(is_static), shorty_(shorty), shorty_len_(shorty_len),
220 gpr_args_(reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Gpr1Offset),
221 fpr_args_(reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_Fpr1Offset),
222 stack_args_(reinterpret_cast<byte*>(sp) + kQuickCalleeSaveFrame_RefAndArgs_FrameSize
223 + StackArgumentStartFromShorty(is_static, shorty, shorty_len)),
224 gpr_index_(0), fpr_index_(0), stack_index_(0), cur_type_(Primitive::kPrimVoid),
225 is_split_long_or_double_(false) {}
Ian Rogers848871b2013-08-05 10:56:33 -0700226
227 virtual ~QuickArgumentVisitor() {}
228
229 virtual void Visit() = 0;
230
Ian Rogers936b37f2014-02-14 00:52:24 -0800231 Primitive::Type GetParamPrimitiveType() const {
232 return cur_type_;
Ian Rogers848871b2013-08-05 10:56:33 -0700233 }
234
235 byte* GetParamAddress() const {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800236 if (!kQuickSoftFloatAbi) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800237 Primitive::Type type = GetParamPrimitiveType();
238 if (UNLIKELY((type == Primitive::kPrimDouble) || (type == Primitive::kPrimFloat))) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800239 if ((kNumQuickFprArgs != 0) && (fpr_index_ + 1 < kNumQuickFprArgs + 1)) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000240 return fpr_args_ + (fpr_index_ * GetBytesPerFprSpillLocation(kRuntimeISA));
Ian Rogers936b37f2014-02-14 00:52:24 -0800241 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700242 return stack_args_ + (stack_index_ * kBytesStackArgLocation);
Ian Rogers936b37f2014-02-14 00:52:24 -0800243 }
244 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800245 if (gpr_index_ < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800246 return gpr_args_ + GprIndexToGprOffset(gpr_index_);
247 }
248 return stack_args_ + (stack_index_ * kBytesStackArgLocation);
Ian Rogers848871b2013-08-05 10:56:33 -0700249 }
250
251 bool IsSplitLongOrDouble() const {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000252 if ((GetBytesPerGprSpillLocation(kRuntimeISA) == 4) || (GetBytesPerFprSpillLocation(kRuntimeISA) == 4)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800253 return is_split_long_or_double_;
254 } else {
255 return false; // An optimization for when GPR and FPRs are 64bit.
256 }
Ian Rogers848871b2013-08-05 10:56:33 -0700257 }
258
Ian Rogers936b37f2014-02-14 00:52:24 -0800259 bool IsParamAReference() const {
Ian Rogers848871b2013-08-05 10:56:33 -0700260 return GetParamPrimitiveType() == Primitive::kPrimNot;
261 }
262
Ian Rogers936b37f2014-02-14 00:52:24 -0800263 bool IsParamALongOrDouble() const {
Ian Rogers848871b2013-08-05 10:56:33 -0700264 Primitive::Type type = GetParamPrimitiveType();
265 return type == Primitive::kPrimLong || type == Primitive::kPrimDouble;
266 }
267
268 uint64_t ReadSplitLongParam() const {
269 DCHECK(IsSplitLongOrDouble());
270 uint64_t low_half = *reinterpret_cast<uint32_t*>(GetParamAddress());
271 uint64_t high_half = *reinterpret_cast<uint32_t*>(stack_args_);
272 return (low_half & 0xffffffffULL) | (high_half << 32);
273 }
274
275 void VisitArguments() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700276 // This implementation doesn't support reg-spill area for hard float
277 // ABI targets such as x86_64 and aarch64. So, for those targets whose
278 // 'kQuickSoftFloatAbi' is 'false':
279 // (a) 'stack_args_' should point to the first method's argument
280 // (b) whatever the argument type it is, the 'stack_index_' should
281 // be moved forward along with every visiting.
Ian Rogers936b37f2014-02-14 00:52:24 -0800282 gpr_index_ = 0;
283 fpr_index_ = 0;
284 stack_index_ = 0;
285 if (!is_static_) { // Handle this.
286 cur_type_ = Primitive::kPrimNot;
287 is_split_long_or_double_ = false;
Ian Rogers848871b2013-08-05 10:56:33 -0700288 Visit();
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700289 if (!kQuickSoftFloatAbi || kNumQuickGprArgs == 0) {
290 stack_index_++;
291 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800292 if (kNumQuickGprArgs > 0) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800293 gpr_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800294 }
Ian Rogers848871b2013-08-05 10:56:33 -0700295 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800296 for (uint32_t shorty_index = 1; shorty_index < shorty_len_; ++shorty_index) {
297 cur_type_ = Primitive::GetType(shorty_[shorty_index]);
298 switch (cur_type_) {
299 case Primitive::kPrimNot:
300 case Primitive::kPrimBoolean:
301 case Primitive::kPrimByte:
302 case Primitive::kPrimChar:
303 case Primitive::kPrimShort:
304 case Primitive::kPrimInt:
305 is_split_long_or_double_ = false;
306 Visit();
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700307 if (!kQuickSoftFloatAbi || kNumQuickGprArgs == gpr_index_) {
308 stack_index_++;
309 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800310 if (gpr_index_ < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800311 gpr_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800312 }
313 break;
314 case Primitive::kPrimFloat:
315 is_split_long_or_double_ = false;
316 Visit();
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800317 if (kQuickSoftFloatAbi) {
318 if (gpr_index_ < kNumQuickGprArgs) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800319 gpr_index_++;
320 } else {
321 stack_index_++;
322 }
323 } else {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800324 if ((kNumQuickFprArgs != 0) && (fpr_index_ + 1 < kNumQuickFprArgs + 1)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800325 fpr_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800326 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700327 stack_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800328 }
329 break;
330 case Primitive::kPrimDouble:
331 case Primitive::kPrimLong:
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800332 if (kQuickSoftFloatAbi || (cur_type_ == Primitive::kPrimLong)) {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000333 is_split_long_or_double_ = (GetBytesPerGprSpillLocation(kRuntimeISA) == 4) &&
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800334 ((gpr_index_ + 1) == kNumQuickGprArgs);
Ian Rogers936b37f2014-02-14 00:52:24 -0800335 Visit();
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700336 if (!kQuickSoftFloatAbi || kNumQuickGprArgs == gpr_index_) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800337 if (kBytesStackArgLocation == 4) {
338 stack_index_+= 2;
339 } else {
340 CHECK_EQ(kBytesStackArgLocation, 8U);
341 stack_index_++;
342 }
343 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700344 if (gpr_index_ < kNumQuickGprArgs) {
345 gpr_index_++;
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000346 if (GetBytesPerGprSpillLocation(kRuntimeISA) == 4) {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700347 if (gpr_index_ < kNumQuickGprArgs) {
348 gpr_index_++;
349 } else if (kQuickSoftFloatAbi) {
350 stack_index_++;
351 }
352 }
353 }
Ian Rogers936b37f2014-02-14 00:52:24 -0800354 } else {
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000355 is_split_long_or_double_ = (GetBytesPerFprSpillLocation(kRuntimeISA) == 4) &&
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800356 ((fpr_index_ + 1) == kNumQuickFprArgs);
Ian Rogers936b37f2014-02-14 00:52:24 -0800357 Visit();
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800358 if ((kNumQuickFprArgs != 0) && (fpr_index_ + 1 < kNumQuickFprArgs + 1)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800359 fpr_index_++;
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000360 if (GetBytesPerFprSpillLocation(kRuntimeISA) == 4) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800361 if ((kNumQuickFprArgs != 0) && (fpr_index_ + 1 < kNumQuickFprArgs + 1)) {
Ian Rogers936b37f2014-02-14 00:52:24 -0800362 fpr_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800363 }
364 }
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700365 }
366 if (kBytesStackArgLocation == 4) {
367 stack_index_+= 2;
Ian Rogers936b37f2014-02-14 00:52:24 -0800368 } else {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700369 CHECK_EQ(kBytesStackArgLocation, 8U);
370 stack_index_++;
Ian Rogers936b37f2014-02-14 00:52:24 -0800371 }
372 }
373 break;
374 default:
375 LOG(FATAL) << "Unexpected type: " << cur_type_ << " in " << shorty_;
376 }
Ian Rogers848871b2013-08-05 10:56:33 -0700377 }
378 }
379
380 private:
Ian Rogers936b37f2014-02-14 00:52:24 -0800381 static size_t StackArgumentStartFromShorty(bool is_static, const char* shorty,
382 uint32_t shorty_len) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800383 if (kQuickSoftFloatAbi) {
384 CHECK_EQ(kNumQuickFprArgs, 0U);
Nicolas Geoffray42fcd982014-04-22 11:03:52 +0000385 return (kNumQuickGprArgs * GetBytesPerGprSpillLocation(kRuntimeISA))
Andreas Gampecf4035a2014-05-28 22:43:01 -0700386 + sizeof(StackReference<mirror::ArtMethod>) /* StackReference<ArtMethod> */;
Ian Rogers936b37f2014-02-14 00:52:24 -0800387 } else {
Vladimir Kostyukov1dd61ba2014-04-02 18:42:20 +0700388 // For now, there is no reg-spill area for the targets with
389 // hard float ABI. So, the offset pointing to the first method's
390 // parameter ('this' for non-static methods) should be returned.
Andreas Gampecf4035a2014-05-28 22:43:01 -0700391 return sizeof(StackReference<mirror::ArtMethod>); // Skip StackReference<ArtMethod>.
Ian Rogers848871b2013-08-05 10:56:33 -0700392 }
Ian Rogers848871b2013-08-05 10:56:33 -0700393 }
394
Andreas Gampec200a4a2014-06-16 18:39:09 -0700395 protected:
Ian Rogers848871b2013-08-05 10:56:33 -0700396 const bool is_static_;
397 const char* const shorty_;
398 const uint32_t shorty_len_;
Andreas Gampec200a4a2014-06-16 18:39:09 -0700399
400 private:
Ian Rogers936b37f2014-02-14 00:52:24 -0800401 byte* const gpr_args_; // Address of GPR arguments in callee save frame.
402 byte* const fpr_args_; // Address of FPR arguments in callee save frame.
403 byte* const stack_args_; // Address of stack arguments in caller's frame.
404 uint32_t gpr_index_; // Index into spilled GPRs.
405 uint32_t fpr_index_; // Index into spilled FPRs.
406 uint32_t stack_index_; // Index into arguments on the stack.
407 // The current type of argument during VisitArguments.
408 Primitive::Type cur_type_;
Ian Rogers848871b2013-08-05 10:56:33 -0700409 // Does a 64bit parameter straddle the register and stack arguments?
410 bool is_split_long_or_double_;
411};
412
413// Visits arguments on the stack placing them into the shadow frame.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800414class BuildQuickShadowFrameVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700415 public:
Andreas Gampecf4035a2014-05-28 22:43:01 -0700416 BuildQuickShadowFrameVisitor(StackReference<mirror::ArtMethod>* sp, bool is_static,
417 const char* shorty, uint32_t shorty_len, ShadowFrame* sf,
418 size_t first_arg_reg) :
Andreas Gampec200a4a2014-06-16 18:39:09 -0700419 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), sf_(sf), cur_reg_(first_arg_reg) {}
Ian Rogers848871b2013-08-05 10:56:33 -0700420
Ian Rogers9758f792014-03-13 09:02:55 -0700421 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
Ian Rogers848871b2013-08-05 10:56:33 -0700422
423 private:
Ian Rogers936b37f2014-02-14 00:52:24 -0800424 ShadowFrame* const sf_;
425 uint32_t cur_reg_;
Ian Rogers848871b2013-08-05 10:56:33 -0700426
Dragos Sbirleabd136a22013-08-13 18:07:04 -0700427 DISALLOW_COPY_AND_ASSIGN(BuildQuickShadowFrameVisitor);
Ian Rogers848871b2013-08-05 10:56:33 -0700428};
429
Andreas Gampec200a4a2014-06-16 18:39:09 -0700430void BuildQuickShadowFrameVisitor::Visit() {
Ian Rogers9758f792014-03-13 09:02:55 -0700431 Primitive::Type type = GetParamPrimitiveType();
432 switch (type) {
433 case Primitive::kPrimLong: // Fall-through.
434 case Primitive::kPrimDouble:
435 if (IsSplitLongOrDouble()) {
436 sf_->SetVRegLong(cur_reg_, ReadSplitLongParam());
437 } else {
438 sf_->SetVRegLong(cur_reg_, *reinterpret_cast<jlong*>(GetParamAddress()));
439 }
440 ++cur_reg_;
441 break;
442 case Primitive::kPrimNot: {
443 StackReference<mirror::Object>* stack_ref =
444 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
445 sf_->SetVRegReference(cur_reg_, stack_ref->AsMirrorPtr());
446 }
447 break;
448 case Primitive::kPrimBoolean: // Fall-through.
449 case Primitive::kPrimByte: // Fall-through.
450 case Primitive::kPrimChar: // Fall-through.
451 case Primitive::kPrimShort: // Fall-through.
452 case Primitive::kPrimInt: // Fall-through.
453 case Primitive::kPrimFloat:
454 sf_->SetVReg(cur_reg_, *reinterpret_cast<jint*>(GetParamAddress()));
455 break;
456 case Primitive::kPrimVoid:
457 LOG(FATAL) << "UNREACHABLE";
458 break;
459 }
460 ++cur_reg_;
461}
462
Brian Carlstromea46f952013-07-30 01:26:50 -0700463extern "C" uint64_t artQuickToInterpreterBridge(mirror::ArtMethod* method, Thread* self,
Andreas Gampecf4035a2014-05-28 22:43:01 -0700464 StackReference<mirror::ArtMethod>* sp)
Ian Rogers848871b2013-08-05 10:56:33 -0700465 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
466 // Ensure we don't get thread suspension until the object arguments are safely in the shadow
467 // frame.
468 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
469
470 if (method->IsAbstract()) {
471 ThrowAbstractMethodError(method);
472 return 0;
473 } else {
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800474 DCHECK(!method->IsNative()) << PrettyMethod(method);
Andreas Gampec200a4a2014-06-16 18:39:09 -0700475 const char* old_cause = self->StartAssertNoThreadSuspension(
476 "Building interpreter shadow frame");
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700477 const DexFile::CodeItem* code_item = method->GetCodeItem();
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800478 DCHECK(code_item != nullptr) << PrettyMethod(method);
Ian Rogers848871b2013-08-05 10:56:33 -0700479 uint16_t num_regs = code_item->registers_size_;
480 void* memory = alloca(ShadowFrame::ComputeSize(num_regs));
Andreas Gampec200a4a2014-06-16 18:39:09 -0700481 // No last shadow coming from quick.
482 ShadowFrame* shadow_frame(ShadowFrame::Create(num_regs, nullptr, method, 0, memory));
Ian Rogers848871b2013-08-05 10:56:33 -0700483 size_t first_arg_reg = code_item->registers_size_ - code_item->ins_size_;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700484 uint32_t shorty_len = 0;
485 const char* shorty = method->GetShorty(&shorty_len);
486 BuildQuickShadowFrameVisitor shadow_frame_builder(sp, method->IsStatic(), shorty, shorty_len,
Ian Rogers936b37f2014-02-14 00:52:24 -0800487 shadow_frame, first_arg_reg);
Ian Rogers848871b2013-08-05 10:56:33 -0700488 shadow_frame_builder.VisitArguments();
489 // Push a transition back into managed code onto the linked list in thread.
490 ManagedStack fragment;
491 self->PushManagedStackFragment(&fragment);
492 self->PushShadowFrame(shadow_frame);
493 self->EndAssertNoThreadSuspension(old_cause);
494
Ian Rogers6c5cb212014-06-18 16:07:20 -0700495 if (method->IsStatic() && !method->GetDeclaringClass()->IsInitialized()) {
Ian Rogers848871b2013-08-05 10:56:33 -0700496 // Ensure static method's class is initialized.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700497 StackHandleScope<1> hs(self);
498 Handle<mirror::Class> h_class(hs.NewHandle(method->GetDeclaringClass()));
499 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(h_class, true, true)) {
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800500 DCHECK(Thread::Current()->IsExceptionPending()) << PrettyMethod(method);
Ian Rogers848871b2013-08-05 10:56:33 -0700501 self->PopManagedStackFragment(fragment);
502 return 0;
503 }
504 }
505
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700506 StackHandleScope<1> hs(self);
507 MethodHelper mh(hs.NewHandle(method));
Ian Rogers848871b2013-08-05 10:56:33 -0700508 JValue result = interpreter::EnterInterpreterFromStub(self, mh, code_item, *shadow_frame);
509 // Pop transition.
510 self->PopManagedStackFragment(fragment);
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800511 // No need to restore the args since the method has already been run by the interpreter.
Ian Rogers848871b2013-08-05 10:56:33 -0700512 return result.GetJ();
513 }
514}
515
516// Visits arguments on the stack placing them into the args vector, Object* arguments are converted
517// to jobjects.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800518class BuildQuickArgumentVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700519 public:
Andreas Gampecf4035a2014-05-28 22:43:01 -0700520 BuildQuickArgumentVisitor(StackReference<mirror::ArtMethod>* sp, bool is_static,
521 const char* shorty, uint32_t shorty_len,
522 ScopedObjectAccessUnchecked* soa, std::vector<jvalue>* args) :
Andreas Gampec200a4a2014-06-16 18:39:09 -0700523 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa), args_(args) {}
Ian Rogers848871b2013-08-05 10:56:33 -0700524
Ian Rogers9758f792014-03-13 09:02:55 -0700525 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
Ian Rogers848871b2013-08-05 10:56:33 -0700526
Ian Rogers9758f792014-03-13 09:02:55 -0700527 void FixupReferences() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800528
Ian Rogers848871b2013-08-05 10:56:33 -0700529 private:
Ian Rogers9758f792014-03-13 09:02:55 -0700530 ScopedObjectAccessUnchecked* const soa_;
531 std::vector<jvalue>* const args_;
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800532 // References which we must update when exiting in case the GC moved the objects.
Ian Rogers700a4022014-05-19 16:49:03 -0700533 std::vector<std::pair<jobject, StackReference<mirror::Object>*>> references_;
Ian Rogers9758f792014-03-13 09:02:55 -0700534
Ian Rogers848871b2013-08-05 10:56:33 -0700535 DISALLOW_COPY_AND_ASSIGN(BuildQuickArgumentVisitor);
536};
537
Ian Rogers9758f792014-03-13 09:02:55 -0700538void BuildQuickArgumentVisitor::Visit() {
539 jvalue val;
540 Primitive::Type type = GetParamPrimitiveType();
541 switch (type) {
542 case Primitive::kPrimNot: {
543 StackReference<mirror::Object>* stack_ref =
544 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
545 val.l = soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr());
546 references_.push_back(std::make_pair(val.l, stack_ref));
547 break;
548 }
549 case Primitive::kPrimLong: // Fall-through.
550 case Primitive::kPrimDouble:
551 if (IsSplitLongOrDouble()) {
552 val.j = ReadSplitLongParam();
553 } else {
554 val.j = *reinterpret_cast<jlong*>(GetParamAddress());
555 }
556 break;
557 case Primitive::kPrimBoolean: // Fall-through.
558 case Primitive::kPrimByte: // Fall-through.
559 case Primitive::kPrimChar: // Fall-through.
560 case Primitive::kPrimShort: // Fall-through.
561 case Primitive::kPrimInt: // Fall-through.
562 case Primitive::kPrimFloat:
563 val.i = *reinterpret_cast<jint*>(GetParamAddress());
564 break;
565 case Primitive::kPrimVoid:
566 LOG(FATAL) << "UNREACHABLE";
567 val.j = 0;
568 break;
569 }
570 args_->push_back(val);
571}
572
573void BuildQuickArgumentVisitor::FixupReferences() {
574 // Fixup any references which may have changed.
575 for (const auto& pair : references_) {
576 pair.second->Assign(soa_->Decode<mirror::Object*>(pair.first));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -0700577 soa_->Env()->DeleteLocalRef(pair.first);
Ian Rogers9758f792014-03-13 09:02:55 -0700578 }
579}
580
Ian Rogers848871b2013-08-05 10:56:33 -0700581// Handler for invocation on proxy methods. On entry a frame will exist for the proxy object method
582// which is responsible for recording callee save registers. We explicitly place into jobjects the
583// incoming reference arguments (so they survive GC). We invoke the invocation handler, which is a
584// field within the proxy object, which will box the primitive arguments and deal with error cases.
Brian Carlstromea46f952013-07-30 01:26:50 -0700585extern "C" uint64_t artQuickProxyInvokeHandler(mirror::ArtMethod* proxy_method,
Ian Rogers848871b2013-08-05 10:56:33 -0700586 mirror::Object* receiver,
Andreas Gampecf4035a2014-05-28 22:43:01 -0700587 Thread* self, StackReference<mirror::ArtMethod>* sp)
Ian Rogers848871b2013-08-05 10:56:33 -0700588 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Brian Carlstromd3633d52013-08-20 21:06:26 -0700589 DCHECK(proxy_method->IsProxyMethod()) << PrettyMethod(proxy_method);
590 DCHECK(receiver->GetClass()->IsProxyClass()) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700591 // Ensure we don't get thread suspension until the object arguments are safely in jobjects.
592 const char* old_cause =
593 self->StartAssertNoThreadSuspension("Adding to IRT proxy object arguments");
594 // Register the top of the managed stack, making stack crawlable.
Nicolas Geoffray167cc7c2014-07-29 08:33:44 +0000595 DCHECK_EQ(sp->AsMirrorPtr(), proxy_method)
596 << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700597 self->SetTopOfStack(sp, 0);
598 DCHECK_EQ(proxy_method->GetFrameSizeInBytes(),
Brian Carlstromd3633d52013-08-20 21:06:26 -0700599 Runtime::Current()->GetCalleeSaveMethod(Runtime::kRefsAndArgs)->GetFrameSizeInBytes())
600 << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700601 self->VerifyStack();
602 // Start new JNI local reference state.
603 JNIEnvExt* env = self->GetJniEnv();
604 ScopedObjectAccessUnchecked soa(env);
605 ScopedJniEnvLocalRefState env_state(env);
606 // Create local ref. copies of proxy method and the receiver.
607 jobject rcvr_jobj = soa.AddLocalReference<jobject>(receiver);
608
609 // Placing arguments into args vector and remove the receiver.
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700610 mirror::ArtMethod* non_proxy_method = proxy_method->GetInterfaceMethodIfProxy();
611 CHECK(!non_proxy_method->IsStatic()) << PrettyMethod(proxy_method) << " "
Andreas Gampec200a4a2014-06-16 18:39:09 -0700612 << PrettyMethod(non_proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700613 std::vector<jvalue> args;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700614 uint32_t shorty_len = 0;
615 const char* shorty = proxy_method->GetShorty(&shorty_len);
616 BuildQuickArgumentVisitor local_ref_visitor(sp, false, shorty, shorty_len, &soa, &args);
Brian Carlstromd3633d52013-08-20 21:06:26 -0700617
Ian Rogers848871b2013-08-05 10:56:33 -0700618 local_ref_visitor.VisitArguments();
Brian Carlstromd3633d52013-08-20 21:06:26 -0700619 DCHECK_GT(args.size(), 0U) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700620 args.erase(args.begin());
621
622 // Convert proxy method into expected interface method.
Brian Carlstromea46f952013-07-30 01:26:50 -0700623 mirror::ArtMethod* interface_method = proxy_method->FindOverriddenMethod();
Brian Carlstromd3633d52013-08-20 21:06:26 -0700624 DCHECK(interface_method != NULL) << PrettyMethod(proxy_method);
Ian Rogers848871b2013-08-05 10:56:33 -0700625 DCHECK(!interface_method->IsProxyMethod()) << PrettyMethod(interface_method);
626 jobject interface_method_jobj = soa.AddLocalReference<jobject>(interface_method);
627
628 // All naked Object*s should now be in jobjects, so its safe to go into the main invoke code
629 // that performs allocations.
630 self->EndAssertNoThreadSuspension(old_cause);
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700631 JValue result = InvokeProxyInvocationHandler(soa, shorty, rcvr_jobj, interface_method_jobj, args);
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800632 // Restore references which might have moved.
633 local_ref_visitor.FixupReferences();
Ian Rogers848871b2013-08-05 10:56:33 -0700634 return result.GetJ();
635}
636
637// Read object references held in arguments from quick frames and place in a JNI local references,
638// so they don't get garbage collected.
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800639class RememberForGcArgumentVisitor FINAL : public QuickArgumentVisitor {
Ian Rogers848871b2013-08-05 10:56:33 -0700640 public:
Andreas Gampecf4035a2014-05-28 22:43:01 -0700641 RememberForGcArgumentVisitor(StackReference<mirror::ArtMethod>* sp, bool is_static,
642 const char* shorty, uint32_t shorty_len,
643 ScopedObjectAccessUnchecked* soa) :
Andreas Gampec200a4a2014-06-16 18:39:09 -0700644 QuickArgumentVisitor(sp, is_static, shorty, shorty_len), soa_(soa) {}
Ian Rogers848871b2013-08-05 10:56:33 -0700645
Ian Rogers9758f792014-03-13 09:02:55 -0700646 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
Mathieu Chartier07d447b2013-09-26 11:57:43 -0700647
Ian Rogers9758f792014-03-13 09:02:55 -0700648 void FixupReferences() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Ian Rogers848871b2013-08-05 10:56:33 -0700649
650 private:
Ian Rogers9758f792014-03-13 09:02:55 -0700651 ScopedObjectAccessUnchecked* const soa_;
Mathieu Chartier5275bcb2014-02-20 17:16:42 -0800652 // References which we must update when exiting in case the GC moved the objects.
Andreas Gampec200a4a2014-06-16 18:39:09 -0700653 std::vector<std::pair<jobject, StackReference<mirror::Object>*> > references_;
654
Mathieu Chartier590fee92013-09-13 13:46:47 -0700655 DISALLOW_COPY_AND_ASSIGN(RememberForGcArgumentVisitor);
Ian Rogers848871b2013-08-05 10:56:33 -0700656};
657
Ian Rogers9758f792014-03-13 09:02:55 -0700658void RememberForGcArgumentVisitor::Visit() {
659 if (IsParamAReference()) {
660 StackReference<mirror::Object>* stack_ref =
661 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
662 jobject reference =
663 soa_->AddLocalReference<jobject>(stack_ref->AsMirrorPtr());
664 references_.push_back(std::make_pair(reference, stack_ref));
665 }
666}
667
668void RememberForGcArgumentVisitor::FixupReferences() {
669 // Fixup any references which may have changed.
670 for (const auto& pair : references_) {
671 pair.second->Assign(soa_->Decode<mirror::Object*>(pair.first));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -0700672 soa_->Env()->DeleteLocalRef(pair.first);
Ian Rogers9758f792014-03-13 09:02:55 -0700673 }
674}
675
Ian Rogers848871b2013-08-05 10:56:33 -0700676// Lazily resolve a method for quick. Called by stub code.
Brian Carlstromea46f952013-07-30 01:26:50 -0700677extern "C" const void* artQuickResolutionTrampoline(mirror::ArtMethod* called,
Ian Rogers848871b2013-08-05 10:56:33 -0700678 mirror::Object* receiver,
Andreas Gampecf4035a2014-05-28 22:43:01 -0700679 Thread* self,
680 StackReference<mirror::ArtMethod>* sp)
Ian Rogers848871b2013-08-05 10:56:33 -0700681 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800682 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
Ian Rogers848871b2013-08-05 10:56:33 -0700683 // Start new JNI local reference state
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800684 JNIEnvExt* env = self->GetJniEnv();
Ian Rogers848871b2013-08-05 10:56:33 -0700685 ScopedObjectAccessUnchecked soa(env);
686 ScopedJniEnvLocalRefState env_state(env);
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800687 const char* old_cause = self->StartAssertNoThreadSuspension("Quick method resolution set up");
Ian Rogers848871b2013-08-05 10:56:33 -0700688
689 // Compute details about the called method (avoid GCs)
690 ClassLinker* linker = Runtime::Current()->GetClassLinker();
Brian Carlstromea46f952013-07-30 01:26:50 -0700691 mirror::ArtMethod* caller = QuickArgumentVisitor::GetCallingMethod(sp);
Ian Rogers848871b2013-08-05 10:56:33 -0700692 InvokeType invoke_type;
693 const DexFile* dex_file;
694 uint32_t dex_method_idx;
695 if (called->IsRuntimeMethod()) {
696 uint32_t dex_pc = caller->ToDexPc(QuickArgumentVisitor::GetCallingPc(sp));
697 const DexFile::CodeItem* code;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700698 dex_file = caller->GetDexFile();
699 code = caller->GetCodeItem();
Ian Rogers848871b2013-08-05 10:56:33 -0700700 CHECK_LT(dex_pc, code->insns_size_in_code_units_);
701 const Instruction* instr = Instruction::At(&code->insns_[dex_pc]);
702 Instruction::Code instr_code = instr->Opcode();
703 bool is_range;
704 switch (instr_code) {
705 case Instruction::INVOKE_DIRECT:
706 invoke_type = kDirect;
707 is_range = false;
708 break;
709 case Instruction::INVOKE_DIRECT_RANGE:
710 invoke_type = kDirect;
711 is_range = true;
712 break;
713 case Instruction::INVOKE_STATIC:
714 invoke_type = kStatic;
715 is_range = false;
716 break;
717 case Instruction::INVOKE_STATIC_RANGE:
718 invoke_type = kStatic;
719 is_range = true;
720 break;
721 case Instruction::INVOKE_SUPER:
722 invoke_type = kSuper;
723 is_range = false;
724 break;
725 case Instruction::INVOKE_SUPER_RANGE:
726 invoke_type = kSuper;
727 is_range = true;
728 break;
729 case Instruction::INVOKE_VIRTUAL:
730 invoke_type = kVirtual;
731 is_range = false;
732 break;
733 case Instruction::INVOKE_VIRTUAL_RANGE:
734 invoke_type = kVirtual;
735 is_range = true;
736 break;
737 case Instruction::INVOKE_INTERFACE:
738 invoke_type = kInterface;
739 is_range = false;
740 break;
741 case Instruction::INVOKE_INTERFACE_RANGE:
742 invoke_type = kInterface;
743 is_range = true;
744 break;
745 default:
746 LOG(FATAL) << "Unexpected call into trampoline: " << instr->DumpString(NULL);
747 // Avoid used uninitialized warnings.
748 invoke_type = kDirect;
749 is_range = false;
750 }
751 dex_method_idx = (is_range) ? instr->VRegB_3rc() : instr->VRegB_35c();
Ian Rogers848871b2013-08-05 10:56:33 -0700752 } else {
753 invoke_type = kStatic;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700754 dex_file = called->GetDexFile();
Ian Rogers848871b2013-08-05 10:56:33 -0700755 dex_method_idx = called->GetDexMethodIndex();
756 }
757 uint32_t shorty_len;
758 const char* shorty =
759 dex_file->GetMethodShorty(dex_file->GetMethodId(dex_method_idx), &shorty_len);
Mathieu Chartier590fee92013-09-13 13:46:47 -0700760 RememberForGcArgumentVisitor visitor(sp, invoke_type == kStatic, shorty, shorty_len, &soa);
Ian Rogers848871b2013-08-05 10:56:33 -0700761 visitor.VisitArguments();
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800762 self->EndAssertNoThreadSuspension(old_cause);
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800763 bool virtual_or_interface = invoke_type == kVirtual || invoke_type == kInterface;
Ian Rogers848871b2013-08-05 10:56:33 -0700764 // Resolve method filling in dex cache.
Mathieu Chartier0cd81352014-05-22 16:48:55 -0700765 if (UNLIKELY(called->IsRuntimeMethod())) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700766 StackHandleScope<1> hs(self);
Mathieu Chartier0cd81352014-05-22 16:48:55 -0700767 mirror::Object* dummy = nullptr;
768 HandleWrapper<mirror::Object> h_receiver(
769 hs.NewHandleWrapper(virtual_or_interface ? &receiver : &dummy));
770 called = linker->ResolveMethod(self, dex_method_idx, &caller, invoke_type);
Ian Rogers848871b2013-08-05 10:56:33 -0700771 }
772 const void* code = NULL;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800773 if (LIKELY(!self->IsExceptionPending())) {
Ian Rogers848871b2013-08-05 10:56:33 -0700774 // Incompatible class change should have been handled in resolve method.
Brian Carlstrom2ec65202014-03-03 15:16:37 -0800775 CHECK(!called->CheckIncompatibleClassChange(invoke_type))
776 << PrettyMethod(called) << " " << invoke_type;
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800777 if (virtual_or_interface) {
778 // Refine called method based on receiver.
779 CHECK(receiver != nullptr) << invoke_type;
Mingyao Yangf4867782014-05-05 11:55:02 -0700780
781 mirror::ArtMethod* orig_called = called;
Mathieu Chartier55871bf2014-02-27 10:24:50 -0800782 if (invoke_type == kVirtual) {
783 called = receiver->GetClass()->FindVirtualMethodForVirtual(called);
784 } else {
785 called = receiver->GetClass()->FindVirtualMethodForInterface(called);
786 }
Mingyao Yangf4867782014-05-05 11:55:02 -0700787
788 CHECK(called != nullptr) << PrettyMethod(orig_called) << " "
789 << PrettyTypeOf(receiver) << " "
790 << invoke_type << " " << orig_called->GetVtableIndex();
791
Ian Rogers83883d72013-10-21 21:07:24 -0700792 // We came here because of sharpening. Ensure the dex cache is up-to-date on the method index
793 // of the sharpened method.
794 if (called->GetDexCacheResolvedMethods() == caller->GetDexCacheResolvedMethods()) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100795 caller->GetDexCacheResolvedMethods()->Set<false>(called->GetDexMethodIndex(), called);
Ian Rogers83883d72013-10-21 21:07:24 -0700796 } else {
797 // Calling from one dex file to another, need to compute the method index appropriate to
Vladimir Markobbcc0c02014-02-03 14:08:42 +0000798 // the caller's dex file. Since we get here only if the original called was a runtime
799 // method, we've got the correct dex_file and a dex_method_idx from above.
Mathieu Chartierbfd9a432014-05-21 17:43:44 -0700800 DCHECK_EQ(caller->GetDexFile(), dex_file);
801 StackHandleScope<1> hs(self);
802 MethodHelper mh(hs.NewHandle(called));
803 uint32_t method_index = mh.FindDexMethodIndexInOtherDexFile(*dex_file, dex_method_idx);
Ian Rogers83883d72013-10-21 21:07:24 -0700804 if (method_index != DexFile::kDexNoIndex) {
Sebastien Hertzd2fe10a2014-01-15 10:20:56 +0100805 caller->GetDexCacheResolvedMethods()->Set<false>(method_index, called);
Ian Rogers83883d72013-10-21 21:07:24 -0700806 }
807 }
808 }
Ian Rogers848871b2013-08-05 10:56:33 -0700809 // Ensure that the called method's class is initialized.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700810 StackHandleScope<1> hs(soa.Self());
811 Handle<mirror::Class> called_class(hs.NewHandle(called->GetDeclaringClass()));
Ian Rogers848871b2013-08-05 10:56:33 -0700812 linker->EnsureInitialized(called_class, true, true);
813 if (LIKELY(called_class->IsInitialized())) {
Ian Rogersef7d42f2014-01-06 12:55:46 -0800814 code = called->GetEntryPointFromQuickCompiledCode();
Ian Rogers848871b2013-08-05 10:56:33 -0700815 } else if (called_class->IsInitializing()) {
816 if (invoke_type == kStatic) {
817 // Class is still initializing, go to oat and grab code (trampoline must be left in place
818 // until class is initialized to stop races between threads).
Ian Rogersef7d42f2014-01-06 12:55:46 -0800819 code = linker->GetQuickOatCodeFor(called);
Ian Rogers848871b2013-08-05 10:56:33 -0700820 } else {
821 // No trampoline for non-static methods.
Ian Rogersef7d42f2014-01-06 12:55:46 -0800822 code = called->GetEntryPointFromQuickCompiledCode();
Ian Rogers848871b2013-08-05 10:56:33 -0700823 }
824 } else {
825 DCHECK(called_class->IsErroneous());
826 }
827 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800828 CHECK_EQ(code == NULL, self->IsExceptionPending());
Mathieu Chartier07d447b2013-09-26 11:57:43 -0700829 // Fixup any locally saved objects may have moved during a GC.
830 visitor.FixupReferences();
Ian Rogers848871b2013-08-05 10:56:33 -0700831 // Place called method in callee-save frame to be placed as first argument to quick method.
Andreas Gampecf4035a2014-05-28 22:43:01 -0700832 sp->Assign(called);
Ian Rogers848871b2013-08-05 10:56:33 -0700833 return code;
834}
835
Andreas Gampec147b002014-03-06 18:11:06 -0800836/*
837 * This class uses a couple of observations to unite the different calling conventions through
838 * a few constants.
839 *
840 * 1) Number of registers used for passing is normally even, so counting down has no penalty for
841 * possible alignment.
842 * 2) Known 64b architectures store 8B units on the stack, both for integral and floating point
843 * types, so using uintptr_t is OK. Also means that we can use kRegistersNeededX to denote
844 * when we have to split things
845 * 3) The only soft-float, Arm, is 32b, so no widening needs to be taken into account for floats
846 * and we can use Int handling directly.
847 * 4) Only 64b architectures widen, and their stack is aligned 8B anyways, so no padding code
848 * necessary when widening. Also, widening of Ints will take place implicitly, and the
849 * extension should be compatible with Aarch64, which mandates copying the available bits
850 * into LSB and leaving the rest unspecified.
851 * 5) Aligning longs and doubles is necessary on arm only, and it's the same in registers and on
852 * the stack.
853 * 6) There is only little endian.
854 *
855 *
856 * Actual work is supposed to be done in a delegate of the template type. The interface is as
857 * follows:
858 *
859 * void PushGpr(uintptr_t): Add a value for the next GPR
860 *
861 * void PushFpr4(float): Add a value for the next FPR of size 32b. Is only called if we need
862 * padding, that is, think the architecture is 32b and aligns 64b.
863 *
864 * void PushFpr8(uint64_t): Push a double. We _will_ call this on 32b, it's the callee's job to
865 * split this if necessary. The current state will have aligned, if
866 * necessary.
867 *
868 * void PushStack(uintptr_t): Push a value to the stack.
869 *
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700870 * uintptr_t PushHandleScope(mirror::Object* ref): Add a reference to the HandleScope. This _will_ have nullptr,
Andreas Gampe36fea8d2014-03-10 13:37:40 -0700871 * as this might be important for null initialization.
Andreas Gampec147b002014-03-06 18:11:06 -0800872 * Must return the jobject, that is, the reference to the
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700873 * entry in the HandleScope (nullptr if necessary).
Andreas Gampec147b002014-03-06 18:11:06 -0800874 *
875 */
Andreas Gampec200a4a2014-06-16 18:39:09 -0700876template<class T> class BuildNativeCallFrameStateMachine {
Andreas Gampec147b002014-03-06 18:11:06 -0800877 public:
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800878#if defined(__arm__)
879 // TODO: These are all dummy values!
Andreas Gampec147b002014-03-06 18:11:06 -0800880 static constexpr bool kNativeSoftFloatAbi = true;
881 static constexpr size_t kNumNativeGprArgs = 4; // 4 arguments passed in GPRs, r0-r3
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800882 static constexpr size_t kNumNativeFprArgs = 0; // 0 arguments passed in FPRs.
883
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800884 static constexpr size_t kRegistersNeededForLong = 2;
885 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -0800886 static constexpr bool kMultiRegistersAligned = true;
887 static constexpr bool kMultiRegistersWidened = false;
888 static constexpr bool kAlignLongOnStack = true;
889 static constexpr bool kAlignDoubleOnStack = true;
Stuart Monteithb95a5342014-03-12 13:32:32 +0000890#elif defined(__aarch64__)
891 static constexpr bool kNativeSoftFloatAbi = false; // This is a hard float ABI.
892 static constexpr size_t kNumNativeGprArgs = 8; // 6 arguments passed in GPRs.
893 static constexpr size_t kNumNativeFprArgs = 8; // 8 arguments passed in FPRs.
894
895 static constexpr size_t kRegistersNeededForLong = 1;
896 static constexpr size_t kRegistersNeededForDouble = 1;
897 static constexpr bool kMultiRegistersAligned = false;
898 static constexpr bool kMultiRegistersWidened = false;
899 static constexpr bool kAlignLongOnStack = false;
900 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800901#elif defined(__mips__)
902 // TODO: These are all dummy values!
903 static constexpr bool kNativeSoftFloatAbi = true; // This is a hard float ABI.
904 static constexpr size_t kNumNativeGprArgs = 0; // 6 arguments passed in GPRs.
905 static constexpr size_t kNumNativeFprArgs = 0; // 8 arguments passed in FPRs.
906
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800907 static constexpr size_t kRegistersNeededForLong = 2;
908 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec147b002014-03-06 18:11:06 -0800909 static constexpr bool kMultiRegistersAligned = true;
910 static constexpr bool kMultiRegistersWidened = true;
911 static constexpr bool kAlignLongOnStack = false;
912 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800913#elif defined(__i386__)
914 // TODO: Check these!
Andreas Gampec147b002014-03-06 18:11:06 -0800915 static constexpr bool kNativeSoftFloatAbi = false; // Not using int registers for fp
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800916 static constexpr size_t kNumNativeGprArgs = 0; // 6 arguments passed in GPRs.
917 static constexpr size_t kNumNativeFprArgs = 0; // 8 arguments passed in FPRs.
918
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800919 static constexpr size_t kRegistersNeededForLong = 2;
920 static constexpr size_t kRegistersNeededForDouble = 2;
Andreas Gampec200a4a2014-06-16 18:39:09 -0700921 static constexpr bool kMultiRegistersAligned = false; // x86 not using regs, anyways
Andreas Gampec147b002014-03-06 18:11:06 -0800922 static constexpr bool kMultiRegistersWidened = false;
923 static constexpr bool kAlignLongOnStack = false;
924 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800925#elif defined(__x86_64__)
926 static constexpr bool kNativeSoftFloatAbi = false; // This is a hard float ABI.
927 static constexpr size_t kNumNativeGprArgs = 6; // 6 arguments passed in GPRs.
928 static constexpr size_t kNumNativeFprArgs = 8; // 8 arguments passed in FPRs.
929
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800930 static constexpr size_t kRegistersNeededForLong = 1;
931 static constexpr size_t kRegistersNeededForDouble = 1;
Andreas Gampec147b002014-03-06 18:11:06 -0800932 static constexpr bool kMultiRegistersAligned = false;
Andreas Gampe7a0e5042014-03-07 13:03:19 -0800933 static constexpr bool kMultiRegistersWidened = false;
Andreas Gampec147b002014-03-06 18:11:06 -0800934 static constexpr bool kAlignLongOnStack = false;
935 static constexpr bool kAlignDoubleOnStack = false;
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800936#else
937#error "Unsupported architecture"
938#endif
939
Andreas Gampec147b002014-03-06 18:11:06 -0800940 public:
Andreas Gampec200a4a2014-06-16 18:39:09 -0700941 explicit BuildNativeCallFrameStateMachine(T* delegate)
942 : gpr_index_(kNumNativeGprArgs),
943 fpr_index_(kNumNativeFprArgs),
944 stack_entries_(0),
945 delegate_(delegate) {
Andreas Gampec147b002014-03-06 18:11:06 -0800946 // For register alignment, we want to assume that counters (gpr_index_, fpr_index_) are even iff
947 // the next register is even; counting down is just to make the compiler happy...
948 CHECK_EQ(kNumNativeGprArgs % 2, 0U);
949 CHECK_EQ(kNumNativeFprArgs % 2, 0U);
950 }
Andreas Gampebf6b92a2014-03-05 16:11:04 -0800951
Andreas Gampec200a4a2014-06-16 18:39:09 -0700952 virtual ~BuildNativeCallFrameStateMachine() {}
Andreas Gampec147b002014-03-06 18:11:06 -0800953
954 bool HavePointerGpr() {
955 return gpr_index_ > 0;
956 }
957
Andreas Gampec200a4a2014-06-16 18:39:09 -0700958 void AdvancePointer(const void* val) {
Andreas Gampec147b002014-03-06 18:11:06 -0800959 if (HavePointerGpr()) {
960 gpr_index_--;
961 PushGpr(reinterpret_cast<uintptr_t>(val));
962 } else {
Andreas Gampec200a4a2014-06-16 18:39:09 -0700963 stack_entries_++; // TODO: have a field for pointer length as multiple of 32b
Andreas Gampec147b002014-03-06 18:11:06 -0800964 PushStack(reinterpret_cast<uintptr_t>(val));
965 gpr_index_ = 0;
966 }
967 }
968
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700969 bool HaveHandleScopeGpr() {
Andreas Gampec147b002014-03-06 18:11:06 -0800970 return gpr_index_ > 0;
971 }
972
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700973 void AdvanceHandleScope(mirror::Object* ptr) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
974 uintptr_t handle = PushHandle(ptr);
975 if (HaveHandleScopeGpr()) {
Andreas Gampec147b002014-03-06 18:11:06 -0800976 gpr_index_--;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700977 PushGpr(handle);
Andreas Gampec147b002014-03-06 18:11:06 -0800978 } else {
979 stack_entries_++;
Mathieu Chartiereb8167a2014-05-07 15:43:14 -0700980 PushStack(handle);
Andreas Gampec147b002014-03-06 18:11:06 -0800981 gpr_index_ = 0;
982 }
983 }
984
Andreas Gampec147b002014-03-06 18:11:06 -0800985 bool HaveIntGpr() {
986 return gpr_index_ > 0;
987 }
988
989 void AdvanceInt(uint32_t val) {
990 if (HaveIntGpr()) {
991 gpr_index_--;
992 PushGpr(val);
993 } else {
994 stack_entries_++;
995 PushStack(val);
996 gpr_index_ = 0;
997 }
998 }
999
Andreas Gampec147b002014-03-06 18:11:06 -08001000 bool HaveLongGpr() {
1001 return gpr_index_ >= kRegistersNeededForLong + (LongGprNeedsPadding() ? 1 : 0);
1002 }
1003
1004 bool LongGprNeedsPadding() {
1005 return kRegistersNeededForLong > 1 && // only pad when using multiple registers
1006 kAlignLongOnStack && // and when it needs alignment
1007 (gpr_index_ & 1) == 1; // counter is odd, see constructor
1008 }
1009
1010 bool LongStackNeedsPadding() {
1011 return kRegistersNeededForLong > 1 && // only pad when using multiple registers
1012 kAlignLongOnStack && // and when it needs 8B alignment
1013 (stack_entries_ & 1) == 1; // counter is odd
1014 }
1015
1016 void AdvanceLong(uint64_t val) {
1017 if (HaveLongGpr()) {
1018 if (LongGprNeedsPadding()) {
1019 PushGpr(0);
1020 gpr_index_--;
1021 }
1022 if (kRegistersNeededForLong == 1) {
1023 PushGpr(static_cast<uintptr_t>(val));
1024 } else {
1025 PushGpr(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1026 PushGpr(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1027 }
1028 gpr_index_ -= kRegistersNeededForLong;
1029 } else {
1030 if (LongStackNeedsPadding()) {
1031 PushStack(0);
1032 stack_entries_++;
1033 }
1034 if (kRegistersNeededForLong == 1) {
1035 PushStack(static_cast<uintptr_t>(val));
1036 stack_entries_++;
1037 } else {
1038 PushStack(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1039 PushStack(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1040 stack_entries_ += 2;
1041 }
1042 gpr_index_ = 0;
1043 }
1044 }
1045
Andreas Gampec147b002014-03-06 18:11:06 -08001046 bool HaveFloatFpr() {
1047 return fpr_index_ > 0;
1048 }
1049
Andreas Gampec147b002014-03-06 18:11:06 -08001050 void AdvanceFloat(float val) {
1051 if (kNativeSoftFloatAbi) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001052 AdvanceInt(bit_cast<float, uint32_t>(val));
Andreas Gampec147b002014-03-06 18:11:06 -08001053 } else {
1054 if (HaveFloatFpr()) {
1055 fpr_index_--;
1056 if (kRegistersNeededForDouble == 1) {
1057 if (kMultiRegistersWidened) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001058 PushFpr8(bit_cast<double, uint64_t>(val));
Andreas Gampec147b002014-03-06 18:11:06 -08001059 } else {
1060 // No widening, just use the bits.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001061 PushFpr8(bit_cast<float, uint64_t>(val));
Andreas Gampec147b002014-03-06 18:11:06 -08001062 }
1063 } else {
1064 PushFpr4(val);
1065 }
1066 } else {
1067 stack_entries_++;
1068 if (kRegistersNeededForDouble == 1 && kMultiRegistersWidened) {
1069 // Need to widen before storing: Note the "double" in the template instantiation.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001070 // Note: We need to jump through those hoops to make the compiler happy.
1071 DCHECK_EQ(sizeof(uintptr_t), sizeof(uint64_t));
1072 PushStack(static_cast<uintptr_t>(bit_cast<double, uint64_t>(val)));
Andreas Gampec147b002014-03-06 18:11:06 -08001073 } else {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001074 PushStack(bit_cast<float, uintptr_t>(val));
Andreas Gampec147b002014-03-06 18:11:06 -08001075 }
1076 fpr_index_ = 0;
1077 }
1078 }
1079 }
1080
Andreas Gampec147b002014-03-06 18:11:06 -08001081 bool HaveDoubleFpr() {
1082 return fpr_index_ >= kRegistersNeededForDouble + (DoubleFprNeedsPadding() ? 1 : 0);
1083 }
1084
1085 bool DoubleFprNeedsPadding() {
1086 return kRegistersNeededForDouble > 1 && // only pad when using multiple registers
1087 kAlignDoubleOnStack && // and when it needs alignment
1088 (fpr_index_ & 1) == 1; // counter is odd, see constructor
1089 }
1090
1091 bool DoubleStackNeedsPadding() {
1092 return kRegistersNeededForDouble > 1 && // only pad when using multiple registers
1093 kAlignDoubleOnStack && // and when it needs 8B alignment
1094 (stack_entries_ & 1) == 1; // counter is odd
1095 }
1096
1097 void AdvanceDouble(uint64_t val) {
1098 if (kNativeSoftFloatAbi) {
1099 AdvanceLong(val);
1100 } else {
1101 if (HaveDoubleFpr()) {
1102 if (DoubleFprNeedsPadding()) {
1103 PushFpr4(0);
1104 fpr_index_--;
1105 }
1106 PushFpr8(val);
1107 fpr_index_ -= kRegistersNeededForDouble;
1108 } else {
1109 if (DoubleStackNeedsPadding()) {
1110 PushStack(0);
1111 stack_entries_++;
1112 }
1113 if (kRegistersNeededForDouble == 1) {
1114 PushStack(static_cast<uintptr_t>(val));
1115 stack_entries_++;
1116 } else {
1117 PushStack(static_cast<uintptr_t>(val & 0xFFFFFFFF));
1118 PushStack(static_cast<uintptr_t>((val >> 32) & 0xFFFFFFFF));
1119 stack_entries_ += 2;
1120 }
1121 fpr_index_ = 0;
1122 }
1123 }
1124 }
1125
1126 uint32_t getStackEntries() {
1127 return stack_entries_;
1128 }
1129
1130 uint32_t getNumberOfUsedGprs() {
1131 return kNumNativeGprArgs - gpr_index_;
1132 }
1133
1134 uint32_t getNumberOfUsedFprs() {
1135 return kNumNativeFprArgs - fpr_index_;
1136 }
1137
1138 private:
1139 void PushGpr(uintptr_t val) {
1140 delegate_->PushGpr(val);
1141 }
1142 void PushFpr4(float val) {
1143 delegate_->PushFpr4(val);
1144 }
1145 void PushFpr8(uint64_t val) {
1146 delegate_->PushFpr8(val);
1147 }
1148 void PushStack(uintptr_t val) {
1149 delegate_->PushStack(val);
1150 }
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001151 uintptr_t PushHandle(mirror::Object* ref) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1152 return delegate_->PushHandle(ref);
Andreas Gampec147b002014-03-06 18:11:06 -08001153 }
1154
1155 uint32_t gpr_index_; // Number of free GPRs
1156 uint32_t fpr_index_; // Number of free FPRs
1157 uint32_t stack_entries_; // Stack entries are in multiples of 32b, as floats are usually not
1158 // extended
1159 T* delegate_; // What Push implementation gets called
1160};
1161
Andreas Gampec200a4a2014-06-16 18:39:09 -07001162// Computes the sizes of register stacks and call stack area. Handling of references can be extended
1163// in subclasses.
1164//
1165// To handle native pointers, use "L" in the shorty for an object reference, which simulates
1166// them with handles.
1167class ComputeNativeCallFrameSize {
Andreas Gampec147b002014-03-06 18:11:06 -08001168 public:
Andreas Gampec200a4a2014-06-16 18:39:09 -07001169 ComputeNativeCallFrameSize() : num_stack_entries_(0) {}
1170
1171 virtual ~ComputeNativeCallFrameSize() {}
Andreas Gampec147b002014-03-06 18:11:06 -08001172
Andreas Gampec147b002014-03-06 18:11:06 -08001173 uint32_t GetStackSize() {
1174 return num_stack_entries_ * sizeof(uintptr_t);
1175 }
1176
Andreas Gampec200a4a2014-06-16 18:39:09 -07001177 uint8_t* LayoutCallStack(uint8_t* sp8) {
Andreas Gampec147b002014-03-06 18:11:06 -08001178 sp8 -= GetStackSize();
Andreas Gampe779f8c92014-06-09 18:29:38 -07001179 // Align by kStackAlignment.
1180 sp8 = reinterpret_cast<uint8_t*>(RoundDown(reinterpret_cast<uintptr_t>(sp8), kStackAlignment));
Andreas Gampec200a4a2014-06-16 18:39:09 -07001181 return sp8;
Andreas Gampec147b002014-03-06 18:11:06 -08001182 }
1183
Andreas Gampec200a4a2014-06-16 18:39:09 -07001184 uint8_t* LayoutCallRegisterStacks(uint8_t* sp8, uintptr_t** start_gpr, uint32_t** start_fpr) {
1185 // Assumption is OK right now, as we have soft-float arm
1186 size_t fregs = BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>::kNumNativeFprArgs;
1187 sp8 -= fregs * sizeof(uintptr_t);
1188 *start_fpr = reinterpret_cast<uint32_t*>(sp8);
1189 size_t iregs = BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>::kNumNativeGprArgs;
1190 sp8 -= iregs * sizeof(uintptr_t);
1191 *start_gpr = reinterpret_cast<uintptr_t*>(sp8);
1192 return sp8;
1193 }
Andreas Gampec147b002014-03-06 18:11:06 -08001194
Andreas Gampec200a4a2014-06-16 18:39:09 -07001195 uint8_t* LayoutNativeCall(uint8_t* sp8, uintptr_t** start_stack, uintptr_t** start_gpr,
1196 uint32_t** start_fpr) {
1197 // Native call stack.
1198 sp8 = LayoutCallStack(sp8);
1199 *start_stack = reinterpret_cast<uintptr_t*>(sp8);
Andreas Gampec147b002014-03-06 18:11:06 -08001200
Andreas Gampec200a4a2014-06-16 18:39:09 -07001201 // Put fprs and gprs below.
1202 sp8 = LayoutCallRegisterStacks(sp8, start_gpr, start_fpr);
Andreas Gampec147b002014-03-06 18:11:06 -08001203
Andreas Gampec200a4a2014-06-16 18:39:09 -07001204 // Return the new bottom.
1205 return sp8;
1206 }
1207
1208 virtual void WalkHeader(BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm)
1209 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {}
1210
1211 void Walk(const char* shorty, uint32_t shorty_len) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1212 BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize> sm(this);
1213
1214 WalkHeader(&sm);
Andreas Gampec147b002014-03-06 18:11:06 -08001215
1216 for (uint32_t i = 1; i < shorty_len; ++i) {
1217 Primitive::Type cur_type_ = Primitive::GetType(shorty[i]);
1218 switch (cur_type_) {
1219 case Primitive::kPrimNot:
Andreas Gampec200a4a2014-06-16 18:39:09 -07001220 sm.AdvanceHandleScope(
1221 reinterpret_cast<mirror::Object*>(0x12345678));
Andreas Gampec147b002014-03-06 18:11:06 -08001222 break;
1223
1224 case Primitive::kPrimBoolean:
1225 case Primitive::kPrimByte:
1226 case Primitive::kPrimChar:
1227 case Primitive::kPrimShort:
1228 case Primitive::kPrimInt:
1229 sm.AdvanceInt(0);
1230 break;
1231 case Primitive::kPrimFloat:
1232 sm.AdvanceFloat(0);
1233 break;
1234 case Primitive::kPrimDouble:
1235 sm.AdvanceDouble(0);
1236 break;
1237 case Primitive::kPrimLong:
1238 sm.AdvanceLong(0);
1239 break;
1240 default:
1241 LOG(FATAL) << "Unexpected type: " << cur_type_ << " in " << shorty;
1242 }
1243 }
1244
1245 num_stack_entries_ = sm.getStackEntries();
1246 }
1247
1248 void PushGpr(uintptr_t /* val */) {
1249 // not optimizing registers, yet
1250 }
1251
1252 void PushFpr4(float /* val */) {
1253 // not optimizing registers, yet
1254 }
1255
1256 void PushFpr8(uint64_t /* val */) {
1257 // not optimizing registers, yet
1258 }
1259
1260 void PushStack(uintptr_t /* val */) {
1261 // counting is already done in the superclass
1262 }
1263
Andreas Gampec200a4a2014-06-16 18:39:09 -07001264 virtual uintptr_t PushHandle(mirror::Object* /* ptr */) {
Andreas Gampec147b002014-03-06 18:11:06 -08001265 return reinterpret_cast<uintptr_t>(nullptr);
1266 }
1267
Andreas Gampec200a4a2014-06-16 18:39:09 -07001268 protected:
Andreas Gampec147b002014-03-06 18:11:06 -08001269 uint32_t num_stack_entries_;
1270};
1271
Andreas Gampec200a4a2014-06-16 18:39:09 -07001272class ComputeGenericJniFrameSize FINAL : public ComputeNativeCallFrameSize {
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001273 public:
Andreas Gampec200a4a2014-06-16 18:39:09 -07001274 ComputeGenericJniFrameSize() : num_handle_scope_references_(0) {}
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001275
Andreas Gampec200a4a2014-06-16 18:39:09 -07001276 // Lays out the callee-save frame. Assumes that the incorrect frame corresponding to RefsAndArgs
1277 // is at *m = sp. Will update to point to the bottom of the save frame.
1278 //
1279 // Note: assumes ComputeAll() has been run before.
1280 void LayoutCalleeSaveFrame(StackReference<mirror::ArtMethod>** m, void* sp, HandleScope** table,
1281 uint32_t* handle_scope_entries)
1282 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1283 mirror::ArtMethod* method = (*m)->AsMirrorPtr();
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001284
Andreas Gampec200a4a2014-06-16 18:39:09 -07001285 uint8_t* sp8 = reinterpret_cast<uint8_t*>(sp);
1286
1287 // First, fix up the layout of the callee-save frame.
1288 // We have to squeeze in the HandleScope, and relocate the method pointer.
1289
1290 // "Free" the slot for the method.
1291 sp8 += kPointerSize; // In the callee-save frame we use a full pointer.
1292
1293 // Under the callee saves put handle scope and new method stack reference.
1294 *handle_scope_entries = num_handle_scope_references_;
1295
1296 size_t handle_scope_size = HandleScope::SizeOf(num_handle_scope_references_);
1297 size_t scope_and_method = handle_scope_size + sizeof(StackReference<mirror::ArtMethod>);
1298
1299 sp8 -= scope_and_method;
1300 // Align by kStackAlignment.
1301 sp8 = reinterpret_cast<uint8_t*>(RoundDown(
1302 reinterpret_cast<uintptr_t>(sp8), kStackAlignment));
1303
1304 uint8_t* sp8_table = sp8 + sizeof(StackReference<mirror::ArtMethod>);
1305 *table = reinterpret_cast<HandleScope*>(sp8_table);
1306 (*table)->SetNumberOfReferences(num_handle_scope_references_);
1307
1308 // Add a slot for the method pointer, and fill it. Fix the pointer-pointer given to us.
1309 uint8_t* method_pointer = sp8;
1310 StackReference<mirror::ArtMethod>* new_method_ref =
1311 reinterpret_cast<StackReference<mirror::ArtMethod>*>(method_pointer);
1312 new_method_ref->Assign(method);
1313 *m = new_method_ref;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001314 }
1315
Andreas Gampec200a4a2014-06-16 18:39:09 -07001316 // Adds space for the cookie. Note: may leave stack unaligned.
1317 void LayoutCookie(uint8_t** sp) {
1318 // Reference cookie and padding
1319 *sp -= 8;
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001320 }
1321
Andreas Gampec200a4a2014-06-16 18:39:09 -07001322 // Re-layout the callee-save frame (insert a handle-scope). Then add space for the cookie.
1323 // Returns the new bottom. Note: this may be unaligned.
1324 uint8_t* LayoutJNISaveFrame(StackReference<mirror::ArtMethod>** m, void* sp, HandleScope** table,
1325 uint32_t* handle_scope_entries)
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001326 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001327 // First, fix up the layout of the callee-save frame.
1328 // We have to squeeze in the HandleScope, and relocate the method pointer.
1329 LayoutCalleeSaveFrame(m, sp, table, handle_scope_entries);
1330
1331 // The bottom of the callee-save frame is now where the method is, *m.
1332 uint8_t* sp8 = reinterpret_cast<uint8_t*>(*m);
1333
1334 // Add space for cookie.
1335 LayoutCookie(&sp8);
1336
1337 return sp8;
1338 }
1339
1340 // WARNING: After this, *sp won't be pointing to the method anymore!
1341 uint8_t* ComputeLayout(StackReference<mirror::ArtMethod>** m, bool is_static, const char* shorty,
1342 uint32_t shorty_len, HandleScope** table, uint32_t* handle_scope_entries,
1343 uintptr_t** start_stack, uintptr_t** start_gpr, uint32_t** start_fpr)
1344 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1345 Walk(shorty, shorty_len);
1346
1347 // JNI part.
1348 uint8_t* sp8 = LayoutJNISaveFrame(m, reinterpret_cast<void*>(*m), table, handle_scope_entries);
1349
1350 sp8 = LayoutNativeCall(sp8, start_stack, start_gpr, start_fpr);
1351
1352 // Return the new bottom.
1353 return sp8;
1354 }
1355
1356 uintptr_t PushHandle(mirror::Object* /* ptr */) OVERRIDE;
1357
1358 // Add JNIEnv* and jobj/jclass before the shorty-derived elements.
1359 void WalkHeader(BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm) OVERRIDE
1360 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
1361
1362 private:
1363 uint32_t num_handle_scope_references_;
1364};
1365
1366uintptr_t ComputeGenericJniFrameSize::PushHandle(mirror::Object* /* ptr */) {
1367 num_handle_scope_references_++;
1368 return reinterpret_cast<uintptr_t>(nullptr);
1369}
1370
1371void ComputeGenericJniFrameSize::WalkHeader(
1372 BuildNativeCallFrameStateMachine<ComputeNativeCallFrameSize>* sm) {
1373 // JNIEnv
1374 sm->AdvancePointer(nullptr);
1375
1376 // Class object or this as first argument
1377 sm->AdvanceHandleScope(reinterpret_cast<mirror::Object*>(0x12345678));
1378}
1379
1380// Class to push values to three separate regions. Used to fill the native call part. Adheres to
1381// the template requirements of BuildGenericJniFrameStateMachine.
1382class FillNativeCall {
1383 public:
1384 FillNativeCall(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args) :
1385 cur_gpr_reg_(gpr_regs), cur_fpr_reg_(fpr_regs), cur_stack_arg_(stack_args) {}
1386
1387 virtual ~FillNativeCall() {}
1388
1389 void Reset(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args) {
1390 cur_gpr_reg_ = gpr_regs;
1391 cur_fpr_reg_ = fpr_regs;
1392 cur_stack_arg_ = stack_args;
Andreas Gampec147b002014-03-06 18:11:06 -08001393 }
1394
1395 void PushGpr(uintptr_t val) {
1396 *cur_gpr_reg_ = val;
1397 cur_gpr_reg_++;
1398 }
1399
1400 void PushFpr4(float val) {
1401 *cur_fpr_reg_ = val;
1402 cur_fpr_reg_++;
1403 }
1404
1405 void PushFpr8(uint64_t val) {
1406 uint64_t* tmp = reinterpret_cast<uint64_t*>(cur_fpr_reg_);
1407 *tmp = val;
1408 cur_fpr_reg_ += 2;
1409 }
1410
1411 void PushStack(uintptr_t val) {
1412 *cur_stack_arg_ = val;
1413 cur_stack_arg_++;
1414 }
1415
Andreas Gampec200a4a2014-06-16 18:39:09 -07001416 virtual uintptr_t PushHandle(mirror::Object* ref) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1417 LOG(FATAL) << "(Non-JNI) Native call does not use handles.";
1418 return 0U;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001419 }
1420
1421 private:
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001422 uintptr_t* cur_gpr_reg_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001423 uint32_t* cur_fpr_reg_;
1424 uintptr_t* cur_stack_arg_;
Andreas Gampec200a4a2014-06-16 18:39:09 -07001425};
Andreas Gampec147b002014-03-06 18:11:06 -08001426
Andreas Gampec200a4a2014-06-16 18:39:09 -07001427// Visits arguments on the stack placing them into a region lower down the stack for the benefit
1428// of transitioning into native code.
1429class BuildGenericJniFrameVisitor FINAL : public QuickArgumentVisitor {
1430 public:
1431 BuildGenericJniFrameVisitor(StackReference<mirror::ArtMethod>** sp, bool is_static,
1432 const char* shorty, uint32_t shorty_len, Thread* self)
1433 : QuickArgumentVisitor(*sp, is_static, shorty, shorty_len),
1434 jni_call_(nullptr, nullptr, nullptr, nullptr), sm_(&jni_call_) {
1435 ComputeGenericJniFrameSize fsc;
1436 uintptr_t* start_gpr_reg;
1437 uint32_t* start_fpr_reg;
1438 uintptr_t* start_stack_arg;
1439 uint32_t handle_scope_entries;
1440 bottom_of_used_area_ = fsc.ComputeLayout(sp, is_static, shorty, shorty_len, &handle_scope_,
1441 &handle_scope_entries, &start_stack_arg,
1442 &start_gpr_reg, &start_fpr_reg);
1443
1444 handle_scope_->SetNumberOfReferences(handle_scope_entries);
1445 jni_call_.Reset(start_gpr_reg, start_fpr_reg, start_stack_arg, handle_scope_);
1446
1447 // jni environment is always first argument
1448 sm_.AdvancePointer(self->GetJniEnv());
1449
1450 if (is_static) {
1451 sm_.AdvanceHandleScope((*sp)->AsMirrorPtr()->GetDeclaringClass());
1452 }
1453 }
1454
1455 void Visit() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) OVERRIDE;
1456
1457 void FinalizeHandleScope(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
1458
1459 StackReference<mirror::Object>* GetFirstHandleScopeEntry()
1460 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1461 return handle_scope_->GetHandle(0).GetReference();
1462 }
1463
1464 jobject GetFirstHandleScopeJObject() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1465 return handle_scope_->GetHandle(0).ToJObject();
1466 }
1467
1468 void* GetBottomOfUsedArea() {
1469 return bottom_of_used_area_;
1470 }
1471
1472 private:
1473 // A class to fill a JNI call. Adds reference/handle-scope management to FillNativeCall.
1474 class FillJniCall FINAL : public FillNativeCall {
1475 public:
1476 FillJniCall(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args,
1477 HandleScope* handle_scope) : FillNativeCall(gpr_regs, fpr_regs, stack_args),
1478 handle_scope_(handle_scope), cur_entry_(0) {}
1479
1480 uintptr_t PushHandle(mirror::Object* ref) OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
1481
1482 void Reset(uintptr_t* gpr_regs, uint32_t* fpr_regs, uintptr_t* stack_args, HandleScope* scope) {
1483 FillNativeCall::Reset(gpr_regs, fpr_regs, stack_args);
1484 handle_scope_ = scope;
1485 cur_entry_ = 0U;
1486 }
1487
1488 void ResetRemainingScopeSlots() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1489 // Initialize padding entries.
1490 size_t expected_slots = handle_scope_->NumberOfReferences();
1491 while (cur_entry_ < expected_slots) {
1492 handle_scope_->GetHandle(cur_entry_++).Assign(nullptr);
1493 }
1494 DCHECK_NE(cur_entry_, 0U);
1495 }
1496
1497 private:
1498 HandleScope* handle_scope_;
1499 size_t cur_entry_;
1500 };
1501
1502 HandleScope* handle_scope_;
1503 FillJniCall jni_call_;
1504 void* bottom_of_used_area_;
1505
1506 BuildNativeCallFrameStateMachine<FillJniCall> sm_;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001507
1508 DISALLOW_COPY_AND_ASSIGN(BuildGenericJniFrameVisitor);
1509};
1510
Andreas Gampec200a4a2014-06-16 18:39:09 -07001511uintptr_t BuildGenericJniFrameVisitor::FillJniCall::PushHandle(mirror::Object* ref) {
1512 uintptr_t tmp;
1513 Handle<mirror::Object> h = handle_scope_->GetHandle(cur_entry_);
1514 h.Assign(ref);
1515 tmp = reinterpret_cast<uintptr_t>(h.ToJObject());
1516 cur_entry_++;
1517 return tmp;
1518}
1519
Ian Rogers9758f792014-03-13 09:02:55 -07001520void BuildGenericJniFrameVisitor::Visit() {
1521 Primitive::Type type = GetParamPrimitiveType();
1522 switch (type) {
1523 case Primitive::kPrimLong: {
1524 jlong long_arg;
1525 if (IsSplitLongOrDouble()) {
1526 long_arg = ReadSplitLongParam();
1527 } else {
1528 long_arg = *reinterpret_cast<jlong*>(GetParamAddress());
1529 }
1530 sm_.AdvanceLong(long_arg);
1531 break;
1532 }
1533 case Primitive::kPrimDouble: {
1534 uint64_t double_arg;
1535 if (IsSplitLongOrDouble()) {
1536 // Read into union so that we don't case to a double.
1537 double_arg = ReadSplitLongParam();
1538 } else {
1539 double_arg = *reinterpret_cast<uint64_t*>(GetParamAddress());
1540 }
1541 sm_.AdvanceDouble(double_arg);
1542 break;
1543 }
1544 case Primitive::kPrimNot: {
1545 StackReference<mirror::Object>* stack_ref =
1546 reinterpret_cast<StackReference<mirror::Object>*>(GetParamAddress());
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001547 sm_.AdvanceHandleScope(stack_ref->AsMirrorPtr());
Ian Rogers9758f792014-03-13 09:02:55 -07001548 break;
1549 }
1550 case Primitive::kPrimFloat:
1551 sm_.AdvanceFloat(*reinterpret_cast<float*>(GetParamAddress()));
1552 break;
1553 case Primitive::kPrimBoolean: // Fall-through.
1554 case Primitive::kPrimByte: // Fall-through.
1555 case Primitive::kPrimChar: // Fall-through.
1556 case Primitive::kPrimShort: // Fall-through.
1557 case Primitive::kPrimInt: // Fall-through.
1558 sm_.AdvanceInt(*reinterpret_cast<jint*>(GetParamAddress()));
1559 break;
1560 case Primitive::kPrimVoid:
1561 LOG(FATAL) << "UNREACHABLE";
1562 break;
1563 }
1564}
1565
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001566void BuildGenericJniFrameVisitor::FinalizeHandleScope(Thread* self) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001567 // Clear out rest of the scope.
1568 jni_call_.ResetRemainingScopeSlots();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001569 // Install HandleScope.
1570 self->PushHandleScope(handle_scope_);
Ian Rogers9758f792014-03-13 09:02:55 -07001571}
1572
Ian Rogers04c31d22014-07-07 21:44:06 -07001573#if defined(__arm__) || defined(__aarch64__)
Andreas Gampe90546832014-03-12 18:07:19 -07001574extern "C" void* artFindNativeMethod();
Ian Rogers04c31d22014-07-07 21:44:06 -07001575#else
1576extern "C" void* artFindNativeMethod(Thread* self);
1577#endif
Andreas Gampe90546832014-03-12 18:07:19 -07001578
Andreas Gampead615172014-04-04 16:20:13 -07001579uint64_t artQuickGenericJniEndJNIRef(Thread* self, uint32_t cookie, jobject l, jobject lock) {
1580 if (lock != nullptr) {
1581 return reinterpret_cast<uint64_t>(JniMethodEndWithReferenceSynchronized(l, cookie, lock, self));
1582 } else {
1583 return reinterpret_cast<uint64_t>(JniMethodEndWithReference(l, cookie, self));
1584 }
1585}
1586
1587void artQuickGenericJniEndJNINonRef(Thread* self, uint32_t cookie, jobject lock) {
1588 if (lock != nullptr) {
1589 JniMethodEndSynchronized(cookie, lock, self);
1590 } else {
1591 JniMethodEnd(cookie, self);
1592 }
1593}
1594
Andreas Gampec147b002014-03-06 18:11:06 -08001595/*
1596 * Initializes an alloca region assumed to be directly below sp for a native call:
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001597 * Create a HandleScope and call stack and fill a mini stack with values to be pushed to registers.
Andreas Gampec147b002014-03-06 18:11:06 -08001598 * The final element on the stack is a pointer to the native code.
1599 *
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001600 * On entry, the stack has a standard callee-save frame above sp, and an alloca below it.
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001601 * We need to fix this, as the handle scope needs to go into the callee-save frame.
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001602 *
Andreas Gampec147b002014-03-06 18:11:06 -08001603 * The return of this function denotes:
1604 * 1) How many bytes of the alloca can be released, if the value is non-negative.
1605 * 2) An error, if the value is negative.
1606 */
Andreas Gampec200a4a2014-06-16 18:39:09 -07001607extern "C" TwoWordReturn artQuickGenericJniTrampoline(Thread* self,
1608 StackReference<mirror::ArtMethod>* sp)
Andreas Gampe2da88232014-02-27 12:26:20 -08001609 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampecf4035a2014-05-28 22:43:01 -07001610 mirror::ArtMethod* called = sp->AsMirrorPtr();
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001611 DCHECK(called->IsNative()) << PrettyMethod(called, true);
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001612 uint32_t shorty_len = 0;
1613 const char* shorty = called->GetShorty(&shorty_len);
Andreas Gampec200a4a2014-06-16 18:39:09 -07001614
1615 // Run the visitor.
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001616 BuildGenericJniFrameVisitor visitor(&sp, called->IsStatic(), shorty, shorty_len, self);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001617 visitor.VisitArguments();
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001618 visitor.FinalizeHandleScope(self);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001619
Andreas Gampec200a4a2014-06-16 18:39:09 -07001620 // Fix up managed-stack things in Thread.
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001621 self->SetTopOfStack(sp, 0);
1622
Ian Rogerse0dcd462014-03-08 15:21:04 -08001623 self->VerifyStack();
1624
Andreas Gampe90546832014-03-12 18:07:19 -07001625 // Start JNI, save the cookie.
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001626 uint32_t cookie;
1627 if (called->IsSynchronized()) {
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001628 cookie = JniMethodStartSynchronized(visitor.GetFirstHandleScopeJObject(), self);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001629 if (self->IsExceptionPending()) {
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001630 self->PopHandleScope();
Andreas Gampec147b002014-03-06 18:11:06 -08001631 // A negative value denotes an error.
Andreas Gampec200a4a2014-06-16 18:39:09 -07001632 return GetTwoWordFailureValue();
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001633 }
1634 } else {
1635 cookie = JniMethodStart(self);
1636 }
Andreas Gampe36fea8d2014-03-10 13:37:40 -07001637 uint32_t* sp32 = reinterpret_cast<uint32_t*>(sp);
Ian Rogerse0dcd462014-03-08 15:21:04 -08001638 *(sp32 - 1) = cookie;
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001639
Andreas Gampe90546832014-03-12 18:07:19 -07001640 // Retrieve the stored native code.
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001641 const void* nativeCode = called->GetNativeMethod();
Andreas Gampe90546832014-03-12 18:07:19 -07001642
Andreas Gampe9a6a99a2014-03-14 07:52:20 -07001643 // There are two cases for the content of nativeCode:
1644 // 1) Pointer to the native function.
1645 // 2) Pointer to the trampoline for native code binding.
1646 // In the second case, we need to execute the binding and continue with the actual native function
1647 // pointer.
Andreas Gampe90546832014-03-12 18:07:19 -07001648 DCHECK(nativeCode != nullptr);
1649 if (nativeCode == GetJniDlsymLookupStub()) {
Ian Rogers04c31d22014-07-07 21:44:06 -07001650#if defined(__arm__) || defined(__aarch64__)
Andreas Gampe90546832014-03-12 18:07:19 -07001651 nativeCode = artFindNativeMethod();
Ian Rogers04c31d22014-07-07 21:44:06 -07001652#else
1653 nativeCode = artFindNativeMethod(self);
1654#endif
Andreas Gampe90546832014-03-12 18:07:19 -07001655
1656 if (nativeCode == nullptr) {
1657 DCHECK(self->IsExceptionPending()); // There should be an exception pending now.
Andreas Gampead615172014-04-04 16:20:13 -07001658
1659 // End JNI, as the assembly will move to deliver the exception.
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001660 jobject lock = called->IsSynchronized() ? visitor.GetFirstHandleScopeJObject() : nullptr;
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001661 if (shorty[0] == 'L') {
Andreas Gampead615172014-04-04 16:20:13 -07001662 artQuickGenericJniEndJNIRef(self, cookie, nullptr, lock);
1663 } else {
1664 artQuickGenericJniEndJNINonRef(self, cookie, lock);
1665 }
1666
Andreas Gampec200a4a2014-06-16 18:39:09 -07001667 return GetTwoWordFailureValue();
Andreas Gampe90546832014-03-12 18:07:19 -07001668 }
1669 // Note that the native code pointer will be automatically set by artFindNativeMethod().
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001670 }
1671
Andreas Gampec200a4a2014-06-16 18:39:09 -07001672 // Return native code addr(lo) and bottom of alloca address(hi).
1673 return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(visitor.GetBottomOfUsedArea()),
1674 reinterpret_cast<uintptr_t>(nativeCode));
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001675}
1676
1677/*
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001678 * Is called after the native JNI code. Responsible for cleanup (handle scope, saved state) and
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001679 * unlocking.
1680 */
Andreas Gampec200a4a2014-06-16 18:39:09 -07001681extern "C" uint64_t artQuickGenericJniEndTrampoline(Thread* self, jvalue result, uint64_t result_f)
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001682 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001683 StackReference<mirror::ArtMethod>* sp = self->GetManagedStack()->GetTopQuickFrame();
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001684 uint32_t* sp32 = reinterpret_cast<uint32_t*>(sp);
Andreas Gampecf4035a2014-05-28 22:43:01 -07001685 mirror::ArtMethod* called = sp->AsMirrorPtr();
Ian Rogerse0dcd462014-03-08 15:21:04 -08001686 uint32_t cookie = *(sp32 - 1);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001687
Andreas Gampead615172014-04-04 16:20:13 -07001688 jobject lock = nullptr;
1689 if (called->IsSynchronized()) {
Andreas Gampec200a4a2014-06-16 18:39:09 -07001690 HandleScope* table = reinterpret_cast<HandleScope*>(reinterpret_cast<uint8_t*>(sp)
1691 + sizeof(StackReference<mirror::ArtMethod>));
Mathieu Chartiereb8167a2014-05-07 15:43:14 -07001692 lock = table->GetHandle(0).ToJObject();
Andreas Gampead615172014-04-04 16:20:13 -07001693 }
1694
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001695 char return_shorty_char = called->GetShorty()[0];
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001696
1697 if (return_shorty_char == 'L') {
Andreas Gampead615172014-04-04 16:20:13 -07001698 return artQuickGenericJniEndJNIRef(self, cookie, result.l, lock);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001699 } else {
Andreas Gampead615172014-04-04 16:20:13 -07001700 artQuickGenericJniEndJNINonRef(self, cookie, lock);
Andreas Gampebf6b92a2014-03-05 16:11:04 -08001701
1702 switch (return_shorty_char) {
1703 case 'F': // Fall-through.
1704 case 'D':
1705 return result_f;
1706 case 'Z':
1707 return result.z;
1708 case 'B':
1709 return result.b;
1710 case 'C':
1711 return result.c;
1712 case 'S':
1713 return result.s;
1714 case 'I':
1715 return result.i;
1716 case 'J':
1717 return result.j;
1718 case 'V':
1719 return 0;
1720 default:
1721 LOG(FATAL) << "Unexpected return shorty character " << return_shorty_char;
1722 return 0;
1723 }
1724 }
Andreas Gampe2da88232014-02-27 12:26:20 -08001725}
1726
Andreas Gamped58342c2014-06-05 14:18:08 -07001727// We use TwoWordReturn to optimize scalar returns. We use the hi value for code, and the lo value
1728// for the method pointer.
Andreas Gampe51f76352014-05-21 08:28:48 -07001729//
Andreas Gamped58342c2014-06-05 14:18:08 -07001730// It is valid to use this, as at the usage points here (returns from C functions) we are assuming
1731// to hold the mutator lock (see SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) annotations).
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001732
1733template<InvokeType type, bool access_check>
Andreas Gamped58342c2014-06-05 14:18:08 -07001734static TwoWordReturn artInvokeCommon(uint32_t method_idx, mirror::Object* this_object,
Andreas Gampe51f76352014-05-21 08:28:48 -07001735 mirror::ArtMethod* caller_method,
Andreas Gampecf4035a2014-05-28 22:43:01 -07001736 Thread* self, StackReference<mirror::ArtMethod>* sp);
Andreas Gampe51f76352014-05-21 08:28:48 -07001737
1738template<InvokeType type, bool access_check>
Andreas Gamped58342c2014-06-05 14:18:08 -07001739static TwoWordReturn artInvokeCommon(uint32_t method_idx, mirror::Object* this_object,
Andreas Gampe51f76352014-05-21 08:28:48 -07001740 mirror::ArtMethod* caller_method,
Andreas Gampecf4035a2014-05-28 22:43:01 -07001741 Thread* self, StackReference<mirror::ArtMethod>* sp) {
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001742 mirror::ArtMethod* method = FindMethodFast(method_idx, this_object, caller_method, access_check,
1743 type);
1744 if (UNLIKELY(method == nullptr)) {
1745 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
1746 const DexFile* dex_file = caller_method->GetDeclaringClass()->GetDexCache()->GetDexFile();
1747 uint32_t shorty_len;
Andreas Gampec200a4a2014-06-16 18:39:09 -07001748 const char* shorty = dex_file->GetMethodShorty(dex_file->GetMethodId(method_idx), &shorty_len);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001749 {
1750 // Remember the args in case a GC happens in FindMethodFromCode.
1751 ScopedObjectAccessUnchecked soa(self->GetJniEnv());
1752 RememberForGcArgumentVisitor visitor(sp, type == kStatic, shorty, shorty_len, &soa);
1753 visitor.VisitArguments();
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001754 method = FindMethodFromCode<type, access_check>(method_idx, &this_object, &caller_method,
1755 self);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001756 visitor.FixupReferences();
1757 }
1758
1759 if (UNLIKELY(method == NULL)) {
1760 CHECK(self->IsExceptionPending());
Andreas Gamped58342c2014-06-05 14:18:08 -07001761 return GetTwoWordFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001762 }
1763 }
1764 DCHECK(!self->IsExceptionPending());
1765 const void* code = method->GetEntryPointFromQuickCompiledCode();
1766
1767 // When we return, the caller will branch to this address, so it had better not be 0!
Andreas Gampec200a4a2014-06-16 18:39:09 -07001768 DCHECK(code != nullptr) << "Code was NULL in method: " << PrettyMethod(method)
1769 << " location: "
1770 << method->GetDexFile()->GetLocation();
Andreas Gampe51f76352014-05-21 08:28:48 -07001771
Andreas Gamped58342c2014-06-05 14:18:08 -07001772 return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(code),
1773 reinterpret_cast<uintptr_t>(method));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001774}
1775
Nicolas Geoffray8689a0a2014-04-04 09:26:24 +01001776// Explicit artInvokeCommon template function declarations to please analysis tool.
1777#define EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(type, access_check) \
1778 template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) \
Andreas Gamped58342c2014-06-05 14:18:08 -07001779 TwoWordReturn artInvokeCommon<type, access_check>(uint32_t method_idx, \
Andreas Gampe51f76352014-05-21 08:28:48 -07001780 mirror::Object* this_object, \
1781 mirror::ArtMethod* caller_method, \
Andreas Gampecf4035a2014-05-28 22:43:01 -07001782 Thread* self, \
1783 StackReference<mirror::ArtMethod>* sp) \
Nicolas Geoffray8689a0a2014-04-04 09:26:24 +01001784
1785EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kVirtual, false);
1786EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kVirtual, true);
1787EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kInterface, false);
1788EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kInterface, true);
1789EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kDirect, false);
1790EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kDirect, true);
1791EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kStatic, false);
1792EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kStatic, true);
1793EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kSuper, false);
1794EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL(kSuper, true);
1795#undef EXPLICIT_INVOKE_COMMON_TEMPLATE_DECL
1796
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001797// See comments in runtime_support_asm.S
Andreas Gampec200a4a2014-06-16 18:39:09 -07001798extern "C" TwoWordReturn artInvokeInterfaceTrampolineWithAccessCheck(
1799 uint32_t method_idx, mirror::Object* this_object,
1800 mirror::ArtMethod* caller_method, Thread* self,
1801 StackReference<mirror::ArtMethod>* sp)
1802 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1803 return artInvokeCommon<kInterface, true>(method_idx, this_object,
1804 caller_method, self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001805}
1806
Andreas Gampec200a4a2014-06-16 18:39:09 -07001807extern "C" TwoWordReturn artInvokeDirectTrampolineWithAccessCheck(
1808 uint32_t method_idx, mirror::Object* this_object,
1809 mirror::ArtMethod* caller_method, Thread* self,
1810 StackReference<mirror::ArtMethod>* sp)
1811 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1812 return artInvokeCommon<kDirect, true>(method_idx, this_object, caller_method,
1813 self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001814}
1815
Andreas Gampec200a4a2014-06-16 18:39:09 -07001816extern "C" TwoWordReturn artInvokeStaticTrampolineWithAccessCheck(
1817 uint32_t method_idx, mirror::Object* this_object,
1818 mirror::ArtMethod* caller_method, Thread* self,
1819 StackReference<mirror::ArtMethod>* sp)
1820 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1821 return artInvokeCommon<kStatic, true>(method_idx, this_object, caller_method,
1822 self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001823}
1824
Andreas Gampec200a4a2014-06-16 18:39:09 -07001825extern "C" TwoWordReturn artInvokeSuperTrampolineWithAccessCheck(
1826 uint32_t method_idx, mirror::Object* this_object,
1827 mirror::ArtMethod* caller_method, Thread* self,
1828 StackReference<mirror::ArtMethod>* sp)
1829 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1830 return artInvokeCommon<kSuper, true>(method_idx, this_object, caller_method,
1831 self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001832}
1833
Andreas Gampec200a4a2014-06-16 18:39:09 -07001834extern "C" TwoWordReturn artInvokeVirtualTrampolineWithAccessCheck(
1835 uint32_t method_idx, mirror::Object* this_object,
1836 mirror::ArtMethod* caller_method, Thread* self,
1837 StackReference<mirror::ArtMethod>* sp)
1838 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1839 return artInvokeCommon<kVirtual, true>(method_idx, this_object, caller_method,
1840 self, sp);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001841}
1842
1843// Determine target of interface dispatch. This object is known non-null.
Andreas Gamped58342c2014-06-05 14:18:08 -07001844extern "C" TwoWordReturn artInvokeInterfaceTrampoline(mirror::ArtMethod* interface_method,
Andreas Gampe51f76352014-05-21 08:28:48 -07001845 mirror::Object* this_object,
1846 mirror::ArtMethod* caller_method,
Andreas Gampecf4035a2014-05-28 22:43:01 -07001847 Thread* self,
1848 StackReference<mirror::ArtMethod>* sp)
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001849 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
1850 mirror::ArtMethod* method;
1851 if (LIKELY(interface_method->GetDexMethodIndex() != DexFile::kDexNoIndex)) {
1852 method = this_object->GetClass()->FindVirtualMethodForInterface(interface_method);
1853 if (UNLIKELY(method == NULL)) {
1854 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
1855 ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(interface_method, this_object,
1856 caller_method);
Andreas Gamped58342c2014-06-05 14:18:08 -07001857 return GetTwoWordFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001858 }
1859 } else {
1860 FinishCalleeSaveFrameSetup(self, sp, Runtime::kRefsAndArgs);
1861 DCHECK(interface_method == Runtime::Current()->GetResolutionMethod());
Alexei Zavjalov41c507a2014-05-15 16:02:46 +07001862
1863 // Find the caller PC.
1864 constexpr size_t pc_offset = GetCalleeSavePCOffset(kRuntimeISA, Runtime::kRefsAndArgs);
1865 uintptr_t caller_pc = *reinterpret_cast<uintptr_t*>(reinterpret_cast<byte*>(sp) + pc_offset);
1866
1867 // Map the caller PC to a dex PC.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001868 uint32_t dex_pc = caller_method->ToDexPc(caller_pc);
Mathieu Chartierbfd9a432014-05-21 17:43:44 -07001869 const DexFile::CodeItem* code = caller_method->GetCodeItem();
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001870 CHECK_LT(dex_pc, code->insns_size_in_code_units_);
1871 const Instruction* instr = Instruction::At(&code->insns_[dex_pc]);
1872 Instruction::Code instr_code = instr->Opcode();
1873 CHECK(instr_code == Instruction::INVOKE_INTERFACE ||
1874 instr_code == Instruction::INVOKE_INTERFACE_RANGE)
1875 << "Unexpected call into interface trampoline: " << instr->DumpString(NULL);
1876 uint32_t dex_method_idx;
1877 if (instr_code == Instruction::INVOKE_INTERFACE) {
1878 dex_method_idx = instr->VRegB_35c();
1879 } else {
1880 DCHECK_EQ(instr_code, Instruction::INVOKE_INTERFACE_RANGE);
1881 dex_method_idx = instr->VRegB_3rc();
1882 }
1883
Andreas Gampec200a4a2014-06-16 18:39:09 -07001884 const DexFile* dex_file = caller_method->GetDeclaringClass()->GetDexCache()
1885 ->GetDexFile();
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001886 uint32_t shorty_len;
Andreas Gampec200a4a2014-06-16 18:39:09 -07001887 const char* shorty = dex_file->GetMethodShorty(dex_file->GetMethodId(dex_method_idx),
1888 &shorty_len);
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001889 {
1890 // Remember the args in case a GC happens in FindMethodFromCode.
1891 ScopedObjectAccessUnchecked soa(self->GetJniEnv());
1892 RememberForGcArgumentVisitor visitor(sp, false, shorty, shorty_len, &soa);
1893 visitor.VisitArguments();
Mathieu Chartier0cd81352014-05-22 16:48:55 -07001894 method = FindMethodFromCode<kInterface, false>(dex_method_idx, &this_object, &caller_method,
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001895 self);
1896 visitor.FixupReferences();
1897 }
1898
1899 if (UNLIKELY(method == nullptr)) {
1900 CHECK(self->IsExceptionPending());
Andreas Gamped58342c2014-06-05 14:18:08 -07001901 return GetTwoWordFailureValue(); // Failure.
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001902 }
1903 }
1904 const void* code = method->GetEntryPointFromQuickCompiledCode();
1905
1906 // When we return, the caller will branch to this address, so it had better not be 0!
Andreas Gampec200a4a2014-06-16 18:39:09 -07001907 DCHECK(code != nullptr) << "Code was NULL in method: " << PrettyMethod(method)
1908 << " location: " << method->GetDexFile()->GetLocation();
Andreas Gampe51f76352014-05-21 08:28:48 -07001909
Andreas Gamped58342c2014-06-05 14:18:08 -07001910 return GetTwoWordSuccessValue(reinterpret_cast<uintptr_t>(code),
1911 reinterpret_cast<uintptr_t>(method));
Mathieu Chartier5f3ded42014-04-03 15:25:30 -07001912}
1913
Ian Rogers848871b2013-08-05 10:56:33 -07001914} // namespace art