blob: 5a5d5713a80a154a34580555737d5e3d00d9c2db [file] [log] [blame]
Orion Hodsonba28f9f2016-10-26 10:56:25 +01001/*
2 * Copyright (C) 2016 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Orion Hodsonba28f9f2016-10-26 10:56:25 +010017#include "method_handles-inl.h"
Andreas Gampe46ee31b2016-12-14 10:11:49 -080018
19#include "android-base/stringprintf.h"
20
Orion Hodson811bd5f2016-12-07 11:35:37 +000021#include "common_dex_operations.h"
Orion Hodsonba28f9f2016-10-26 10:56:25 +010022#include "jvalue-inl.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070023#include "jvalue.h"
Orion Hodson811bd5f2016-12-07 11:35:37 +000024#include "mirror/emulated_stack_frame.h"
Andreas Gampec6ea7d02017-02-01 16:46:28 -080025#include "mirror/method_handle_impl-inl.h"
Orion Hodson811bd5f2016-12-07 11:35:37 +000026#include "mirror/method_type.h"
Orion Hodsonba28f9f2016-10-26 10:56:25 +010027#include "reflection-inl.h"
Andreas Gampe8cf9cb32017-07-19 09:28:38 -070028#include "reflection.h"
Orion Hodson1a06f9f2016-11-09 08:32:42 +000029#include "well_known_classes.h"
Orion Hodsonba28f9f2016-10-26 10:56:25 +010030
31namespace art {
32
Andreas Gampe46ee31b2016-12-14 10:11:49 -080033using android::base::StringPrintf;
34
Orion Hodsonba28f9f2016-10-26 10:56:25 +010035namespace {
36
Orion Hodson1a06f9f2016-11-09 08:32:42 +000037#define PRIMITIVES_LIST(V) \
38 V(Primitive::kPrimBoolean, Boolean, Boolean, Z) \
39 V(Primitive::kPrimByte, Byte, Byte, B) \
40 V(Primitive::kPrimChar, Char, Character, C) \
41 V(Primitive::kPrimShort, Short, Short, S) \
42 V(Primitive::kPrimInt, Int, Integer, I) \
43 V(Primitive::kPrimLong, Long, Long, J) \
44 V(Primitive::kPrimFloat, Float, Float, F) \
45 V(Primitive::kPrimDouble, Double, Double, D)
Orion Hodsonba28f9f2016-10-26 10:56:25 +010046
47// Assigns |type| to the primitive type associated with |klass|. Returns
48// true iff. |klass| was a boxed type (Integer, Long etc.), false otherwise.
49bool GetUnboxedPrimitiveType(ObjPtr<mirror::Class> klass, Primitive::Type* type)
50 REQUIRES_SHARED(Locks::mutator_lock_) {
51 ScopedAssertNoThreadSuspension ants(__FUNCTION__);
Orion Hodsona1be7132017-03-21 10:04:12 +000052 std::string storage;
53 const char* descriptor = klass->GetDescriptor(&storage);
54 static const char kJavaLangPrefix[] = "Ljava/lang/";
55 static const size_t kJavaLangPrefixSize = sizeof(kJavaLangPrefix) - 1;
56 if (strncmp(descriptor, kJavaLangPrefix, kJavaLangPrefixSize) != 0) {
57 return false;
58 }
59
60 descriptor += kJavaLangPrefixSize;
61#define LOOKUP_PRIMITIVE(primitive, _, java_name, ___) \
62 if (strcmp(descriptor, #java_name ";") == 0) { \
63 *type = primitive; \
64 return true; \
Orion Hodsonba28f9f2016-10-26 10:56:25 +010065 }
Orion Hodsonba28f9f2016-10-26 10:56:25 +010066
Orion Hodson1a06f9f2016-11-09 08:32:42 +000067 PRIMITIVES_LIST(LOOKUP_PRIMITIVE);
68#undef LOOKUP_PRIMITIVE
Orion Hodsonba28f9f2016-10-26 10:56:25 +010069 return false;
70}
71
Orion Hodson1a06f9f2016-11-09 08:32:42 +000072ObjPtr<mirror::Class> GetBoxedPrimitiveClass(Primitive::Type type)
Orion Hodsonba28f9f2016-10-26 10:56:25 +010073 REQUIRES_SHARED(Locks::mutator_lock_) {
Orion Hodson1a06f9f2016-11-09 08:32:42 +000074 ScopedAssertNoThreadSuspension ants(__FUNCTION__);
75 jmethodID m = nullptr;
76 switch (type) {
77#define CASE_PRIMITIVE(primitive, _, java_name, __) \
78 case primitive: \
79 m = WellKnownClasses::java_lang_ ## java_name ## _valueOf; \
80 break;
81 PRIMITIVES_LIST(CASE_PRIMITIVE);
82#undef CASE_PRIMITIVE
83 case Primitive::Type::kPrimNot:
84 case Primitive::Type::kPrimVoid:
85 return nullptr;
Orion Hodsonba28f9f2016-10-26 10:56:25 +010086 }
Orion Hodson1a06f9f2016-11-09 08:32:42 +000087 return jni::DecodeArtMethod(m)->GetDeclaringClass();
88}
Orion Hodsonba28f9f2016-10-26 10:56:25 +010089
Orion Hodson1a06f9f2016-11-09 08:32:42 +000090bool GetUnboxedTypeAndValue(ObjPtr<mirror::Object> o, Primitive::Type* type, JValue* value)
91 REQUIRES_SHARED(Locks::mutator_lock_) {
92 ScopedAssertNoThreadSuspension ants(__FUNCTION__);
Orion Hodsonba28f9f2016-10-26 10:56:25 +010093 ObjPtr<mirror::Class> klass = o->GetClass();
Orion Hodsonba28f9f2016-10-26 10:56:25 +010094 ArtField* primitive_field = &klass->GetIFieldsPtr()->At(0);
Orion Hodson1a06f9f2016-11-09 08:32:42 +000095#define CASE_PRIMITIVE(primitive, abbrev, _, shorthand) \
96 if (klass == GetBoxedPrimitiveClass(primitive)) { \
97 *type = primitive; \
98 value->Set ## shorthand(primitive_field->Get ## abbrev(o)); \
99 return true; \
Orion Hodsonba28f9f2016-10-26 10:56:25 +0100100 }
Orion Hodson1a06f9f2016-11-09 08:32:42 +0000101 PRIMITIVES_LIST(CASE_PRIMITIVE)
102#undef CASE_PRIMITIVE
103 return false;
Orion Hodsonba28f9f2016-10-26 10:56:25 +0100104}
105
106inline bool IsReferenceType(Primitive::Type type) {
107 return type == Primitive::kPrimNot;
108}
109
110inline bool IsPrimitiveType(Primitive::Type type) {
111 return !IsReferenceType(type);
112}
113
114} // namespace
115
Orion Hodson1a06f9f2016-11-09 08:32:42 +0000116bool IsParameterTypeConvertible(ObjPtr<mirror::Class> from, ObjPtr<mirror::Class> to)
117 REQUIRES_SHARED(Locks::mutator_lock_) {
118 // This function returns true if there's any conceivable conversion
119 // between |from| and |to|. It's expected this method will be used
120 // to determine if a WrongMethodTypeException should be raised. The
121 // decision logic follows the documentation for MethodType.asType().
122 if (from == to) {
123 return true;
124 }
125
126 Primitive::Type from_primitive = from->GetPrimitiveType();
127 Primitive::Type to_primitive = to->GetPrimitiveType();
128 DCHECK(from_primitive != Primitive::Type::kPrimVoid);
129 DCHECK(to_primitive != Primitive::Type::kPrimVoid);
130
131 // If |to| and |from| are references.
132 if (IsReferenceType(from_primitive) && IsReferenceType(to_primitive)) {
133 // Assignability is determined during parameter conversion when
134 // invoking the associated method handle.
135 return true;
136 }
137
138 // If |to| and |from| are primitives and a widening conversion exists.
139 if (Primitive::IsWidenable(from_primitive, to_primitive)) {
140 return true;
141 }
142
143 // If |to| is a reference and |from| is a primitive, then boxing conversion.
144 if (IsReferenceType(to_primitive) && IsPrimitiveType(from_primitive)) {
145 return to->IsAssignableFrom(GetBoxedPrimitiveClass(from_primitive));
146 }
147
148 // If |from| is a reference and |to| is a primitive, then unboxing conversion.
149 if (IsPrimitiveType(to_primitive) && IsReferenceType(from_primitive)) {
150 if (from->DescriptorEquals("Ljava/lang/Object;")) {
151 // Object might be converted into a primitive during unboxing.
152 return true;
Orion Hodsona1be7132017-03-21 10:04:12 +0000153 }
154
155 if (Primitive::IsNumericType(to_primitive) && from->DescriptorEquals("Ljava/lang/Number;")) {
Orion Hodson1a06f9f2016-11-09 08:32:42 +0000156 // Number might be unboxed into any of the number primitive types.
157 return true;
158 }
Orion Hodsona1be7132017-03-21 10:04:12 +0000159
Orion Hodson1a06f9f2016-11-09 08:32:42 +0000160 Primitive::Type unboxed_type;
161 if (GetUnboxedPrimitiveType(from, &unboxed_type)) {
Orion Hodsonf1412b42016-11-11 12:03:29 +0000162 if (unboxed_type == to_primitive) {
163 // Straightforward unboxing conversion such as Boolean => boolean.
164 return true;
Orion Hodsonf1412b42016-11-11 12:03:29 +0000165 }
Orion Hodsona1be7132017-03-21 10:04:12 +0000166
167 // Check if widening operations for numeric primitives would work,
168 // such as Byte => byte => long.
169 return Primitive::IsWidenable(unboxed_type, to_primitive);
Orion Hodson1a06f9f2016-11-09 08:32:42 +0000170 }
171 }
172
173 return false;
174}
175
176bool IsReturnTypeConvertible(ObjPtr<mirror::Class> from, ObjPtr<mirror::Class> to)
177 REQUIRES_SHARED(Locks::mutator_lock_) {
178 if (to->GetPrimitiveType() == Primitive::Type::kPrimVoid) {
179 // Result will be ignored.
180 return true;
181 } else if (from->GetPrimitiveType() == Primitive::Type::kPrimVoid) {
182 // Returned value will be 0 / null.
183 return true;
184 } else {
185 // Otherwise apply usual parameter conversion rules.
186 return IsParameterTypeConvertible(from, to);
187 }
188}
189
Orion Hodsonba28f9f2016-10-26 10:56:25 +0100190bool ConvertJValueCommon(
191 Handle<mirror::MethodType> callsite_type,
192 Handle<mirror::MethodType> callee_type,
193 ObjPtr<mirror::Class> from,
194 ObjPtr<mirror::Class> to,
195 JValue* value) {
196 // The reader maybe concerned about the safety of the heap object
197 // that may be in |value|. There is only one case where allocation
198 // is obviously needed and that's for boxing. However, in the case
199 // of boxing |value| contains a non-reference type.
200
201 const Primitive::Type from_type = from->GetPrimitiveType();
202 const Primitive::Type to_type = to->GetPrimitiveType();
203
Orion Hodson1a06f9f2016-11-09 08:32:42 +0000204 // Put incoming value into |src_value| and set return value to 0.
205 // Errors and conversions from void require the return value to be 0.
206 const JValue src_value(*value);
207 value->SetJ(0);
208
209 // Conversion from void set result to zero.
210 if (from_type == Primitive::kPrimVoid) {
211 return true;
212 }
213
Orion Hodsonba28f9f2016-10-26 10:56:25 +0100214 // This method must be called only when the types don't match.
215 DCHECK(from != to);
216
217 if (IsPrimitiveType(from_type) && IsPrimitiveType(to_type)) {
218 // The source and target types are both primitives.
Orion Hodson1a06f9f2016-11-09 08:32:42 +0000219 if (UNLIKELY(!ConvertPrimitiveValueNoThrow(from_type, to_type, src_value, value))) {
Orion Hodsonba28f9f2016-10-26 10:56:25 +0100220 ThrowWrongMethodTypeException(callee_type.Get(), callsite_type.Get());
Orion Hodsonba28f9f2016-10-26 10:56:25 +0100221 return false;
222 }
223 return true;
224 } else if (IsReferenceType(from_type) && IsReferenceType(to_type)) {
225 // They're both reference types. If "from" is null, we can pass it
226 // through unchanged. If not, we must generate a cast exception if
227 // |to| is not assignable from the dynamic type of |ref|.
228 //
229 // Playing it safe with StackHandleScope here, not expecting any allocation
230 // in mirror::Class::IsAssignable().
231 StackHandleScope<2> hs(Thread::Current());
232 Handle<mirror::Class> h_to(hs.NewHandle(to));
Orion Hodson1a06f9f2016-11-09 08:32:42 +0000233 Handle<mirror::Object> h_obj(hs.NewHandle(src_value.GetL()));
Andreas Gampefa4333d2017-02-14 11:10:34 -0800234 if (h_obj != nullptr && !to->IsAssignableFrom(h_obj->GetClass())) {
Orion Hodsonba28f9f2016-10-26 10:56:25 +0100235 ThrowClassCastException(h_to.Get(), h_obj->GetClass());
236 return false;
237 }
238 value->SetL(h_obj.Get());
239 return true;
240 } else if (IsReferenceType(to_type)) {
241 DCHECK(IsPrimitiveType(from_type));
Orion Hodsonba28f9f2016-10-26 10:56:25 +0100242 // The source type is a primitive and the target type is a reference, so we must box.
243 // The target type maybe a super class of the boxed source type, for example,
244 // if the source type is int, it's boxed type is java.lang.Integer, and the target
245 // type could be java.lang.Number.
246 Primitive::Type type;
247 if (!GetUnboxedPrimitiveType(to, &type)) {
248 ObjPtr<mirror::Class> boxed_from_class = GetBoxedPrimitiveClass(from_type);
Orion Hodson1a06f9f2016-11-09 08:32:42 +0000249 if (boxed_from_class->IsSubClass(to)) {
Orion Hodsonba28f9f2016-10-26 10:56:25 +0100250 type = from_type;
251 } else {
Orion Hodsonba28f9f2016-10-26 10:56:25 +0100252 ThrowWrongMethodTypeException(callee_type.Get(), callsite_type.Get());
253 return false;
254 }
255 }
256
257 if (UNLIKELY(from_type != type)) {
Orion Hodsonba28f9f2016-10-26 10:56:25 +0100258 ThrowWrongMethodTypeException(callee_type.Get(), callsite_type.Get());
259 return false;
260 }
261
Orion Hodson1a06f9f2016-11-09 08:32:42 +0000262 if (!ConvertPrimitiveValueNoThrow(from_type, type, src_value, value)) {
Orion Hodsonba28f9f2016-10-26 10:56:25 +0100263 ThrowWrongMethodTypeException(callee_type.Get(), callsite_type.Get());
264 return false;
265 }
266
267 // Then perform the actual boxing, and then set the reference.
Orion Hodson1a06f9f2016-11-09 08:32:42 +0000268 ObjPtr<mirror::Object> boxed = BoxPrimitive(type, src_value);
Orion Hodsonba28f9f2016-10-26 10:56:25 +0100269 value->SetL(boxed.Ptr());
270 return true;
271 } else {
272 // The source type is a reference and the target type is a primitive, so we must unbox.
273 DCHECK(IsReferenceType(from_type));
274 DCHECK(IsPrimitiveType(to_type));
275
Orion Hodson1a06f9f2016-11-09 08:32:42 +0000276 ObjPtr<mirror::Object> from_obj(src_value.GetL());
277 if (UNLIKELY(from_obj == nullptr)) {
278 ThrowNullPointerException(
279 StringPrintf("Expected to unbox a '%s' primitive type but was returned null",
280 from->PrettyDescriptor().c_str()).c_str());
281 return false;
282 }
Orion Hodsonba28f9f2016-10-26 10:56:25 +0100283
Orion Hodson1a06f9f2016-11-09 08:32:42 +0000284 Primitive::Type unboxed_type;
285 JValue unboxed_value;
286 if (UNLIKELY(!GetUnboxedTypeAndValue(from_obj, &unboxed_type, &unboxed_value))) {
Orion Hodsonba28f9f2016-10-26 10:56:25 +0100287 ThrowWrongMethodTypeException(callee_type.Get(), callsite_type.Get());
288 return false;
289 }
290
Orion Hodson1a06f9f2016-11-09 08:32:42 +0000291 if (UNLIKELY(!ConvertPrimitiveValueNoThrow(unboxed_type, to_type, unboxed_value, value))) {
292 ThrowClassCastException(from, to);
Orion Hodsonba28f9f2016-10-26 10:56:25 +0100293 return false;
294 }
295
Orion Hodson1a06f9f2016-11-09 08:32:42 +0000296 return true;
Orion Hodsonba28f9f2016-10-26 10:56:25 +0100297 }
298}
299
Orion Hodson811bd5f2016-12-07 11:35:37 +0000300namespace {
301
302template <bool is_range>
303inline void CopyArgumentsFromCallerFrame(const ShadowFrame& caller_frame,
304 ShadowFrame* callee_frame,
305 const uint32_t (&args)[Instruction::kMaxVarArgRegs],
306 uint32_t first_arg,
307 const size_t first_dst_reg,
308 const size_t num_regs)
309 REQUIRES_SHARED(Locks::mutator_lock_) {
310 for (size_t i = 0; i < num_regs; ++i) {
311 size_t dst_reg = first_dst_reg + i;
312 size_t src_reg = is_range ? (first_arg + i) : args[i];
313 // Uint required, so that sign extension does not make this wrong on 64-bit systems
314 uint32_t src_value = caller_frame.GetVReg(src_reg);
315 ObjPtr<mirror::Object> o = caller_frame.GetVRegReference<kVerifyNone>(src_reg);
316 // If both register locations contains the same value, the register probably holds a reference.
317 // Note: As an optimization, non-moving collectors leave a stale reference value
318 // in the references array even after the original vreg was overwritten to a non-reference.
319 if (src_value == reinterpret_cast<uintptr_t>(o.Ptr())) {
320 callee_frame->SetVRegReference(dst_reg, o.Ptr());
321 } else {
322 callee_frame->SetVReg(dst_reg, src_value);
323 }
324 }
325}
326
327template <bool is_range>
328inline bool ConvertAndCopyArgumentsFromCallerFrame(
329 Thread* self,
330 Handle<mirror::MethodType> callsite_type,
331 Handle<mirror::MethodType> callee_type,
332 const ShadowFrame& caller_frame,
333 const uint32_t (&args)[Instruction::kMaxVarArgRegs],
334 uint32_t first_arg,
335 uint32_t first_dst_reg,
336 ShadowFrame* callee_frame)
337 REQUIRES_SHARED(Locks::mutator_lock_) {
338 ObjPtr<mirror::ObjectArray<mirror::Class>> from_types(callsite_type->GetPTypes());
339 ObjPtr<mirror::ObjectArray<mirror::Class>> to_types(callee_type->GetPTypes());
340
341 const int32_t num_method_params = from_types->GetLength();
342 if (to_types->GetLength() != num_method_params) {
343 ThrowWrongMethodTypeException(callee_type.Get(), callsite_type.Get());
344 return false;
345 }
346
347 ShadowFrameGetter<is_range> getter(first_arg, args, caller_frame);
348 ShadowFrameSetter setter(callee_frame, first_dst_reg);
349
350 return PerformConversions<ShadowFrameGetter<is_range>, ShadowFrameSetter>(self,
351 callsite_type,
352 callee_type,
353 &getter,
354 &setter,
355 num_method_params);
356}
357
Orion Hodson811bd5f2016-12-07 11:35:37 +0000358inline bool IsInvoke(const mirror::MethodHandle::Kind handle_kind) {
359 return handle_kind <= mirror::MethodHandle::Kind::kLastInvokeKind;
360}
361
362inline bool IsInvokeTransform(const mirror::MethodHandle::Kind handle_kind) {
363 return (handle_kind == mirror::MethodHandle::Kind::kInvokeTransform
364 || handle_kind == mirror::MethodHandle::Kind::kInvokeCallSiteTransform);
365}
366
367inline bool IsFieldAccess(mirror::MethodHandle::Kind handle_kind) {
368 return (handle_kind >= mirror::MethodHandle::Kind::kFirstAccessorKind
369 && handle_kind <= mirror::MethodHandle::Kind::kLastAccessorKind);
370}
371
372// Calculate the number of ins for a proxy or native method, where we
373// can't just look at the code item.
374static inline size_t GetInsForProxyOrNativeMethod(ArtMethod* method)
375 REQUIRES_SHARED(Locks::mutator_lock_) {
376 DCHECK(method->IsNative() || method->IsProxyMethod());
Orion Hodson811bd5f2016-12-07 11:35:37 +0000377 method = method->GetInterfaceMethodIfProxy(kRuntimePointerSize);
Orion Hodsona1be7132017-03-21 10:04:12 +0000378 uint32_t shorty_length = 0;
379 const char* shorty = method->GetShorty(&shorty_length);
Orion Hodson811bd5f2016-12-07 11:35:37 +0000380
Orion Hodsona1be7132017-03-21 10:04:12 +0000381 // Static methods do not include the receiver. The receiver isn't included
382 // in the shorty_length though the return value is.
383 size_t num_ins = method->IsStatic() ? shorty_length - 1 : shorty_length;
384 for (const char* c = shorty + 1; *c != '\0'; ++c) {
385 if (*c == 'J' || *c == 'D') {
Orion Hodson811bd5f2016-12-07 11:35:37 +0000386 ++num_ins;
387 }
388 }
Orion Hodson811bd5f2016-12-07 11:35:37 +0000389 return num_ins;
390}
391
392// Returns true iff. the callsite type for a polymorphic invoke is transformer
393// like, i.e that it has a single input argument whose type is
394// dalvik.system.EmulatedStackFrame.
395static inline bool IsCallerTransformer(Handle<mirror::MethodType> callsite_type)
396 REQUIRES_SHARED(Locks::mutator_lock_) {
397 ObjPtr<mirror::ObjectArray<mirror::Class>> param_types(callsite_type->GetPTypes());
398 if (param_types->GetLength() == 1) {
399 ObjPtr<mirror::Class> param(param_types->GetWithoutChecks(0));
Orion Hodsona1be7132017-03-21 10:04:12 +0000400 // NB Comparing descriptor here as it appears faster in cycle simulation than using:
401 // param == WellKnownClasses::ToClass(WellKnownClasses::dalvik_system_EmulatedStackFrame)
402 // Costs are 98 vs 173 cycles per invocation.
403 return param->DescriptorEquals("Ldalvik/system/EmulatedStackFrame;");
Orion Hodson811bd5f2016-12-07 11:35:37 +0000404 }
405
406 return false;
407}
408
409template <bool is_range>
Orion Hodson43f0cdb2017-10-10 14:47:32 +0100410static inline bool MethodHandleInvokeMethod(ArtMethod* called_method,
411 Handle<mirror::MethodType> callsite_type,
412 Handle<mirror::MethodType> target_type,
413 Thread* self,
414 ShadowFrame& shadow_frame,
415 const uint32_t (&args)[Instruction::kMaxVarArgRegs],
416 uint32_t first_arg,
417 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) {
Orion Hodson811bd5f2016-12-07 11:35:37 +0000418 // Compute method information.
419 const DexFile::CodeItem* code_item = called_method->GetCodeItem();
420
421 // Number of registers for the callee's call frame. Note that for non-exact
422 // invokes, we always derive this information from the callee method. We
423 // cannot guarantee during verification that the number of registers encoded
424 // in the invoke is equal to the number of ins for the callee. This is because
425 // some transformations (such as boxing a long -> Long or wideining an
426 // int -> long will change that number.
427 uint16_t num_regs;
428 size_t num_input_regs;
429 size_t first_dest_reg;
430 if (LIKELY(code_item != nullptr)) {
431 num_regs = code_item->registers_size_;
432 first_dest_reg = num_regs - code_item->ins_size_;
433 num_input_regs = code_item->ins_size_;
434 // Parameter registers go at the end of the shadow frame.
435 DCHECK_NE(first_dest_reg, (size_t)-1);
436 } else {
437 // No local regs for proxy and native methods.
438 DCHECK(called_method->IsNative() || called_method->IsProxyMethod());
439 num_regs = num_input_regs = GetInsForProxyOrNativeMethod(called_method);
440 first_dest_reg = 0;
441 }
442
443 // Allocate shadow frame on the stack.
444 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
445 CREATE_SHADOW_FRAME(num_regs, &shadow_frame, called_method, /* dex pc */ 0);
446 ShadowFrame* new_shadow_frame = shadow_frame_unique_ptr.get();
447
448 // Whether this polymorphic invoke was issued by a transformer method.
449 bool is_caller_transformer = false;
450 // Thread might be suspended during PerformArgumentConversions due to the
451 // allocations performed during boxing.
452 {
453 ScopedStackedShadowFramePusher pusher(
454 self, new_shadow_frame, StackedShadowFrameType::kShadowFrameUnderConstruction);
455 if (callsite_type->IsExactMatch(target_type.Get())) {
456 // This is an exact invoke, we can take the fast path of just copying all
457 // registers without performing any argument conversions.
458 CopyArgumentsFromCallerFrame<is_range>(shadow_frame,
459 new_shadow_frame,
460 args,
461 first_arg,
462 first_dest_reg,
463 num_input_regs);
464 } else {
465 // This includes the case where we're entering this invoke-polymorphic
466 // from a transformer method. In that case, the callsite_type will contain
467 // a single argument of type dalvik.system.EmulatedStackFrame. In that
468 // case, we'll have to unmarshal the EmulatedStackFrame into the
469 // new_shadow_frame and perform argument conversions on it.
470 if (IsCallerTransformer(callsite_type)) {
471 is_caller_transformer = true;
472 // The emulated stack frame is the first and only argument when we're coming
473 // through from a transformer.
474 size_t first_arg_register = (is_range) ? first_arg : args[0];
475 ObjPtr<mirror::EmulatedStackFrame> emulated_stack_frame(
476 reinterpret_cast<mirror::EmulatedStackFrame*>(
477 shadow_frame.GetVRegReference(first_arg_register)));
478 if (!emulated_stack_frame->WriteToShadowFrame(self,
479 target_type,
480 first_dest_reg,
481 new_shadow_frame)) {
482 DCHECK(self->IsExceptionPending());
483 result->SetL(0);
484 return false;
485 }
Orion Hodsona1be7132017-03-21 10:04:12 +0000486 } else {
487 if (!callsite_type->IsConvertible(target_type.Get())) {
488 ThrowWrongMethodTypeException(target_type.Get(), callsite_type.Get());
489 return false;
490 }
491 if (!ConvertAndCopyArgumentsFromCallerFrame<is_range>(self,
492 callsite_type,
493 target_type,
494 shadow_frame,
495 args,
496 first_arg,
497 first_dest_reg,
498 new_shadow_frame)) {
499 DCHECK(self->IsExceptionPending());
500 result->SetL(0);
501 return false;
502 }
Orion Hodson811bd5f2016-12-07 11:35:37 +0000503 }
504 }
505 }
506
Jeff Hao5ea84132017-05-05 16:59:29 -0700507 bool use_interpreter_entrypoint = ClassLinker::ShouldUseInterpreterEntrypoint(
508 called_method, called_method->GetEntryPointFromQuickCompiledCode());
509 PerformCall(self,
510 code_item,
511 shadow_frame.GetMethod(),
512 first_dest_reg,
513 new_shadow_frame,
514 result,
515 use_interpreter_entrypoint);
Orion Hodson811bd5f2016-12-07 11:35:37 +0000516 if (self->IsExceptionPending()) {
517 return false;
518 }
519
520 // If the caller of this signature polymorphic method was a transformer,
521 // we need to copy the result back out to the emulated stack frame.
522 if (is_caller_transformer) {
523 StackHandleScope<2> hs(self);
524 size_t first_callee_register = is_range ? (first_arg) : args[0];
525 Handle<mirror::EmulatedStackFrame> emulated_stack_frame(
526 hs.NewHandle(reinterpret_cast<mirror::EmulatedStackFrame*>(
527 shadow_frame.GetVRegReference(first_callee_register))));
528 Handle<mirror::MethodType> emulated_stack_type(hs.NewHandle(emulated_stack_frame->GetType()));
529 JValue local_result;
530 local_result.SetJ(result->GetJ());
531
532 if (ConvertReturnValue(emulated_stack_type, target_type, &local_result)) {
533 emulated_stack_frame->SetReturnValue(self, local_result);
534 return true;
Orion Hodson811bd5f2016-12-07 11:35:37 +0000535 }
Orion Hodsona1be7132017-03-21 10:04:12 +0000536
537 DCHECK(self->IsExceptionPending());
538 return false;
Orion Hodson811bd5f2016-12-07 11:35:37 +0000539 }
Orion Hodsona1be7132017-03-21 10:04:12 +0000540
541 return ConvertReturnValue(callsite_type, target_type, result);
Orion Hodson811bd5f2016-12-07 11:35:37 +0000542}
543
544template <bool is_range>
Orion Hodson43f0cdb2017-10-10 14:47:32 +0100545static inline bool MethodHandleInvokeTransform(ArtMethod* called_method,
546 Handle<mirror::MethodType> callsite_type,
547 Handle<mirror::MethodType> callee_type,
548 Thread* self,
549 ShadowFrame& shadow_frame,
550 Handle<mirror::MethodHandle> receiver,
551 const uint32_t (&args)[Instruction::kMaxVarArgRegs],
552 uint32_t first_arg,
553 JValue* result)
Orion Hodson811bd5f2016-12-07 11:35:37 +0000554 REQUIRES_SHARED(Locks::mutator_lock_) {
555 // This can be fixed to two, because the method we're calling here
556 // (MethodHandle.transformInternal) doesn't have any locals and the signature
557 // is known :
558 //
559 // private MethodHandle.transformInternal(EmulatedStackFrame sf);
560 //
561 // This means we need only two vregs :
562 // - One for the receiver object.
563 // - One for the only method argument (an EmulatedStackFrame).
564 static constexpr size_t kNumRegsForTransform = 2;
565
566 const DexFile::CodeItem* code_item = called_method->GetCodeItem();
567 DCHECK(code_item != nullptr);
568 DCHECK_EQ(kNumRegsForTransform, code_item->registers_size_);
569 DCHECK_EQ(kNumRegsForTransform, code_item->ins_size_);
570
571 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
572 CREATE_SHADOW_FRAME(kNumRegsForTransform, &shadow_frame, called_method, /* dex pc */ 0);
573 ShadowFrame* new_shadow_frame = shadow_frame_unique_ptr.get();
574
575 StackHandleScope<1> hs(self);
576 MutableHandle<mirror::EmulatedStackFrame> sf(hs.NewHandle<mirror::EmulatedStackFrame>(nullptr));
577 if (IsCallerTransformer(callsite_type)) {
578 // If we're entering this transformer from another transformer, we can pass
579 // through the handle directly to the callee, instead of having to
580 // instantiate a new stack frame based on the shadow frame.
581 size_t first_callee_register = is_range ? first_arg : args[0];
582 sf.Assign(reinterpret_cast<mirror::EmulatedStackFrame*>(
583 shadow_frame.GetVRegReference(first_callee_register)));
584 } else {
585 sf.Assign(mirror::EmulatedStackFrame::CreateFromShadowFrameAndArgs<is_range>(self,
586 callsite_type,
587 callee_type,
588 shadow_frame,
589 first_arg,
590 args));
591
592 // Something went wrong while creating the emulated stack frame, we should
593 // throw the pending exception.
Andreas Gampefa4333d2017-02-14 11:10:34 -0800594 if (sf == nullptr) {
Orion Hodson811bd5f2016-12-07 11:35:37 +0000595 DCHECK(self->IsExceptionPending());
596 return false;
597 }
598 }
599
600 new_shadow_frame->SetVRegReference(0, receiver.Get());
601 new_shadow_frame->SetVRegReference(1, sf.Get());
602
Jeff Hao5ea84132017-05-05 16:59:29 -0700603 bool use_interpreter_entrypoint = ClassLinker::ShouldUseInterpreterEntrypoint(
604 called_method, called_method->GetEntryPointFromQuickCompiledCode());
Orion Hodson811bd5f2016-12-07 11:35:37 +0000605 PerformCall(self,
606 code_item,
607 shadow_frame.GetMethod(),
608 0 /* first destination register */,
609 new_shadow_frame,
Jeff Hao5ea84132017-05-05 16:59:29 -0700610 result,
611 use_interpreter_entrypoint);
Orion Hodson811bd5f2016-12-07 11:35:37 +0000612 if (self->IsExceptionPending()) {
613 return false;
614 }
615
616 // If the called transformer method we called has returned a value, then we
617 // need to copy it back to |result|.
618 sf->GetReturnValue(self, result);
619 return ConvertReturnValue(callsite_type, callee_type, result);
620}
621
622inline static ObjPtr<mirror::Class> GetAndInitializeDeclaringClass(Thread* self, ArtField* field)
623 REQUIRES_SHARED(Locks::mutator_lock_) {
624 // Method handle invocations on static fields should ensure class is
625 // initialized. This usually happens when an instance is constructed
626 // or class members referenced, but this is not guaranteed when
627 // looking up method handles.
628 ObjPtr<mirror::Class> klass = field->GetDeclaringClass();
629 if (UNLIKELY(!klass->IsInitialized())) {
630 StackHandleScope<1> hs(self);
631 HandleWrapperObjPtr<mirror::Class> h(hs.NewHandleWrapper(&klass));
632 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h, true, true)) {
633 DCHECK(self->IsExceptionPending());
634 return nullptr;
635 }
636 }
637 return klass;
638}
639
Orion Hodsona1be7132017-03-21 10:04:12 +0000640ArtMethod* RefineTargetMethod(Thread* self,
641 ShadowFrame& shadow_frame,
642 const mirror::MethodHandle::Kind& handle_kind,
643 Handle<mirror::MethodType> handle_type,
644 Handle<mirror::MethodType> callsite_type,
645 const uint32_t receiver_reg,
646 ArtMethod* target_method)
647 REQUIRES_SHARED(Locks::mutator_lock_) {
648 if (handle_kind == mirror::MethodHandle::Kind::kInvokeVirtual ||
649 handle_kind == mirror::MethodHandle::Kind::kInvokeInterface) {
650 // For virtual and interface methods ensure target_method points to
651 // the actual method to invoke.
652 ObjPtr<mirror::Object> receiver(shadow_frame.GetVRegReference(receiver_reg));
653 if (IsCallerTransformer(callsite_type)) {
654 // The current receiver is an emulated stack frame, the method's
655 // receiver needs to be fetched from there as the emulated frame
656 // will be unpacked into a new frame.
657 receiver = ObjPtr<mirror::EmulatedStackFrame>::DownCast(receiver)->GetReceiver();
658 }
659
660 ObjPtr<mirror::Class> declaring_class(target_method->GetDeclaringClass());
661 if (receiver == nullptr || receiver->GetClass() != declaring_class) {
662 // Verify that _vRegC is an object reference and of the type expected by
663 // the receiver.
664 if (!VerifyObjectIsClass(receiver, declaring_class)) {
665 DCHECK(self->IsExceptionPending());
666 return nullptr;
667 }
668 return receiver->GetClass()->FindVirtualMethodForVirtualOrInterface(
669 target_method, kRuntimePointerSize);
670 }
671 } else if (handle_kind == mirror::MethodHandle::Kind::kInvokeDirect) {
672 // String constructors are a special case, they are replaced with
673 // StringFactory methods.
674 if (target_method->IsConstructor() && target_method->GetDeclaringClass()->IsStringClass()) {
675 DCHECK(handle_type->GetRType()->IsStringClass());
676 return WellKnownClasses::StringInitToStringFactory(target_method);
677 }
678 } else if (handle_kind == mirror::MethodHandle::Kind::kInvokeSuper) {
679 ObjPtr<mirror::Class> declaring_class = target_method->GetDeclaringClass();
680
681 // Note that we're not dynamically dispatching on the type of the receiver
682 // here. We use the static type of the "receiver" object that we've
683 // recorded in the method handle's type, which will be the same as the
684 // special caller that was specified at the point of lookup.
685 ObjPtr<mirror::Class> referrer_class = handle_type->GetPTypes()->Get(0);
686 if (!declaring_class->IsInterface()) {
687 ObjPtr<mirror::Class> super_class = referrer_class->GetSuperClass();
688 uint16_t vtable_index = target_method->GetMethodIndex();
689 DCHECK(super_class != nullptr);
690 DCHECK(super_class->HasVTable());
691 // Note that super_class is a super of referrer_class and target_method
692 // will always be declared by super_class (or one of its super classes).
693 DCHECK_LT(vtable_index, super_class->GetVTableLength());
694 return super_class->GetVTableEntry(vtable_index, kRuntimePointerSize);
695 } else {
696 return referrer_class->FindVirtualMethodForInterfaceSuper(target_method, kRuntimePointerSize);
697 }
698 }
699 return target_method;
700}
701
Orion Hodson811bd5f2016-12-07 11:35:37 +0000702template <bool is_range>
Orion Hodsona1be7132017-03-21 10:04:12 +0000703bool DoInvokePolymorphicMethod(Thread* self,
704 ShadowFrame& shadow_frame,
705 Handle<mirror::MethodHandle> method_handle,
706 Handle<mirror::MethodType> callsite_type,
707 const uint32_t (&args)[Instruction::kMaxVarArgRegs],
708 uint32_t first_arg,
709 JValue* result)
Orion Hodson811bd5f2016-12-07 11:35:37 +0000710 REQUIRES_SHARED(Locks::mutator_lock_) {
711 StackHandleScope<1> hs(self);
712 Handle<mirror::MethodType> handle_type(hs.NewHandle(method_handle->GetMethodType()));
713 const mirror::MethodHandle::Kind handle_kind = method_handle->GetHandleKind();
Orion Hodsona1be7132017-03-21 10:04:12 +0000714 DCHECK(IsInvoke(handle_kind));
Orion Hodson811bd5f2016-12-07 11:35:37 +0000715
Orion Hodsona1be7132017-03-21 10:04:12 +0000716 // Get the method we're actually invoking along with the kind of
717 // invoke that is desired. We don't need to perform access checks at this
718 // point because they would have been performed on our behalf at the point
719 // of creation of the method handle.
720 ArtMethod* target_method = method_handle->GetTargetMethod();
721 uint32_t receiver_reg = is_range ? first_arg: args[0];
722 ArtMethod* called_method = RefineTargetMethod(self,
723 shadow_frame,
724 handle_kind,
725 handle_type,
726 callsite_type,
727 receiver_reg,
728 target_method);
729 if (called_method == nullptr) {
730 DCHECK(self->IsExceptionPending());
731 return false;
732 }
Orion Hodson811bd5f2016-12-07 11:35:37 +0000733
Orion Hodsona1be7132017-03-21 10:04:12 +0000734 if (IsInvokeTransform(handle_kind)) {
735 // There are two cases here - method handles representing regular
736 // transforms and those representing call site transforms. Method
737 // handles for call site transforms adapt their MethodType to match
738 // the call site. For these, the |callee_type| is the same as the
739 // |callsite_type|. The VarargsCollector is such a tranform, its
740 // method type depends on the call site, ie. x(a) or x(a, b), or
741 // x(a, b, c). The VarargsCollector invokes a variable arity method
742 // with the arity arguments in an array.
743 Handle<mirror::MethodType> callee_type =
744 (handle_kind == mirror::MethodHandle::Kind::kInvokeCallSiteTransform) ? callsite_type
745 : handle_type;
Orion Hodson43f0cdb2017-10-10 14:47:32 +0100746 return MethodHandleInvokeTransform<is_range>(called_method,
747 callsite_type,
748 callee_type,
749 self,
750 shadow_frame,
751 method_handle /* receiver */,
752 args,
753 first_arg,
754 result);
Orion Hodsona1be7132017-03-21 10:04:12 +0000755 } else {
Orion Hodson43f0cdb2017-10-10 14:47:32 +0100756 return MethodHandleInvokeMethod<is_range>(called_method,
757 callsite_type,
758 handle_type,
759 self,
760 shadow_frame,
761 args,
762 first_arg,
763 result);
Orion Hodson811bd5f2016-12-07 11:35:37 +0000764 }
765}
766
767// Helper for getters in invoke-polymorphic.
Orion Hodson43f0cdb2017-10-10 14:47:32 +0100768inline static void MethodHandleFieldGet(Thread* self,
769 const ShadowFrame& shadow_frame,
770 ObjPtr<mirror::Object>& obj,
771 ArtField* field,
772 Primitive::Type field_type,
773 JValue* result)
Orion Hodson811bd5f2016-12-07 11:35:37 +0000774 REQUIRES_SHARED(Locks::mutator_lock_) {
775 switch (field_type) {
776 case Primitive::kPrimBoolean:
777 DoFieldGetCommon<Primitive::kPrimBoolean>(self, shadow_frame, obj, field, result);
778 break;
779 case Primitive::kPrimByte:
780 DoFieldGetCommon<Primitive::kPrimByte>(self, shadow_frame, obj, field, result);
781 break;
782 case Primitive::kPrimChar:
783 DoFieldGetCommon<Primitive::kPrimChar>(self, shadow_frame, obj, field, result);
784 break;
785 case Primitive::kPrimShort:
786 DoFieldGetCommon<Primitive::kPrimShort>(self, shadow_frame, obj, field, result);
787 break;
788 case Primitive::kPrimInt:
789 DoFieldGetCommon<Primitive::kPrimInt>(self, shadow_frame, obj, field, result);
790 break;
791 case Primitive::kPrimLong:
792 DoFieldGetCommon<Primitive::kPrimLong>(self, shadow_frame, obj, field, result);
793 break;
794 case Primitive::kPrimFloat:
795 DoFieldGetCommon<Primitive::kPrimInt>(self, shadow_frame, obj, field, result);
796 break;
797 case Primitive::kPrimDouble:
798 DoFieldGetCommon<Primitive::kPrimLong>(self, shadow_frame, obj, field, result);
799 break;
800 case Primitive::kPrimNot:
801 DoFieldGetCommon<Primitive::kPrimNot>(self, shadow_frame, obj, field, result);
802 break;
803 case Primitive::kPrimVoid:
804 LOG(FATAL) << "Unreachable: " << field_type;
805 UNREACHABLE();
806 }
807}
808
809// Helper for setters in invoke-polymorphic.
Orion Hodson43f0cdb2017-10-10 14:47:32 +0100810inline bool MethodHandleFieldPut(Thread* self,
811 ShadowFrame& shadow_frame,
812 ObjPtr<mirror::Object>& obj,
813 ArtField* field,
814 Primitive::Type field_type,
815 JValue& value)
Orion Hodson811bd5f2016-12-07 11:35:37 +0000816 REQUIRES_SHARED(Locks::mutator_lock_) {
Orion Hodsonc069a302017-01-18 09:23:12 +0000817 DCHECK(!Runtime::Current()->IsActiveTransaction());
818 static const bool kTransaction = false; // Not in a transaction.
819 static const bool kAssignabilityCheck = false; // No access check.
Orion Hodson811bd5f2016-12-07 11:35:37 +0000820 switch (field_type) {
821 case Primitive::kPrimBoolean:
Orion Hodsonc069a302017-01-18 09:23:12 +0000822 return
823 DoFieldPutCommon<Primitive::kPrimBoolean, kAssignabilityCheck, kTransaction>(
824 self, shadow_frame, obj, field, value);
Orion Hodson811bd5f2016-12-07 11:35:37 +0000825 case Primitive::kPrimByte:
Orion Hodsonc069a302017-01-18 09:23:12 +0000826 return DoFieldPutCommon<Primitive::kPrimByte, kAssignabilityCheck, kTransaction>(
Orion Hodson811bd5f2016-12-07 11:35:37 +0000827 self, shadow_frame, obj, field, value);
828 case Primitive::kPrimChar:
Orion Hodsonc069a302017-01-18 09:23:12 +0000829 return DoFieldPutCommon<Primitive::kPrimChar, kAssignabilityCheck, kTransaction>(
Orion Hodson811bd5f2016-12-07 11:35:37 +0000830 self, shadow_frame, obj, field, value);
831 case Primitive::kPrimShort:
Orion Hodsonc069a302017-01-18 09:23:12 +0000832 return DoFieldPutCommon<Primitive::kPrimShort, kAssignabilityCheck, kTransaction>(
Orion Hodson811bd5f2016-12-07 11:35:37 +0000833 self, shadow_frame, obj, field, value);
834 case Primitive::kPrimInt:
835 case Primitive::kPrimFloat:
Orion Hodsonc069a302017-01-18 09:23:12 +0000836 return DoFieldPutCommon<Primitive::kPrimInt, kAssignabilityCheck, kTransaction>(
Orion Hodson811bd5f2016-12-07 11:35:37 +0000837 self, shadow_frame, obj, field, value);
838 case Primitive::kPrimLong:
839 case Primitive::kPrimDouble:
Orion Hodsonc069a302017-01-18 09:23:12 +0000840 return DoFieldPutCommon<Primitive::kPrimLong, kAssignabilityCheck, kTransaction>(
Orion Hodson811bd5f2016-12-07 11:35:37 +0000841 self, shadow_frame, obj, field, value);
842 case Primitive::kPrimNot:
Orion Hodsonc069a302017-01-18 09:23:12 +0000843 return DoFieldPutCommon<Primitive::kPrimNot, kAssignabilityCheck, kTransaction>(
Orion Hodson811bd5f2016-12-07 11:35:37 +0000844 self, shadow_frame, obj, field, value);
845 case Primitive::kPrimVoid:
846 LOG(FATAL) << "Unreachable: " << field_type;
847 UNREACHABLE();
848 }
849}
850
851static JValue GetValueFromShadowFrame(const ShadowFrame& shadow_frame,
852 Primitive::Type field_type,
853 uint32_t vreg)
854 REQUIRES_SHARED(Locks::mutator_lock_) {
855 JValue field_value;
856 switch (field_type) {
857 case Primitive::kPrimBoolean:
858 field_value.SetZ(static_cast<uint8_t>(shadow_frame.GetVReg(vreg)));
859 break;
860 case Primitive::kPrimByte:
861 field_value.SetB(static_cast<int8_t>(shadow_frame.GetVReg(vreg)));
862 break;
863 case Primitive::kPrimChar:
864 field_value.SetC(static_cast<uint16_t>(shadow_frame.GetVReg(vreg)));
865 break;
866 case Primitive::kPrimShort:
867 field_value.SetS(static_cast<int16_t>(shadow_frame.GetVReg(vreg)));
868 break;
869 case Primitive::kPrimInt:
870 case Primitive::kPrimFloat:
871 field_value.SetI(shadow_frame.GetVReg(vreg));
872 break;
873 case Primitive::kPrimLong:
874 case Primitive::kPrimDouble:
875 field_value.SetJ(shadow_frame.GetVRegLong(vreg));
876 break;
877 case Primitive::kPrimNot:
878 field_value.SetL(shadow_frame.GetVRegReference(vreg));
879 break;
880 case Primitive::kPrimVoid:
881 LOG(FATAL) << "Unreachable: " << field_type;
882 UNREACHABLE();
883 }
884 return field_value;
885}
886
Orion Hodsonc069a302017-01-18 09:23:12 +0000887template <bool is_range, bool do_conversions>
Orion Hodson43f0cdb2017-10-10 14:47:32 +0100888bool MethodHandleFieldAccess(Thread* self,
889 ShadowFrame& shadow_frame,
890 Handle<mirror::MethodHandle> method_handle,
891 Handle<mirror::MethodType> callsite_type,
892 const uint32_t (&args)[Instruction::kMaxVarArgRegs],
893 uint32_t first_arg,
894 JValue* result) REQUIRES_SHARED(Locks::mutator_lock_) {
Orion Hodson811bd5f2016-12-07 11:35:37 +0000895 StackHandleScope<1> hs(self);
896 Handle<mirror::MethodType> handle_type(hs.NewHandle(method_handle->GetMethodType()));
897 const mirror::MethodHandle::Kind handle_kind = method_handle->GetHandleKind();
898 ArtField* field = method_handle->GetTargetField();
899 Primitive::Type field_type = field->GetTypeAsPrimitiveType();
900
901 switch (handle_kind) {
902 case mirror::MethodHandle::kInstanceGet: {
903 size_t obj_reg = is_range ? first_arg : args[0];
904 ObjPtr<mirror::Object> obj = shadow_frame.GetVRegReference(obj_reg);
Orion Hodson43f0cdb2017-10-10 14:47:32 +0100905 MethodHandleFieldGet(self, shadow_frame, obj, field, field_type, result);
Orion Hodson811bd5f2016-12-07 11:35:37 +0000906 if (do_conversions && !ConvertReturnValue(callsite_type, handle_type, result)) {
907 DCHECK(self->IsExceptionPending());
908 return false;
909 }
910 return true;
911 }
912 case mirror::MethodHandle::kStaticGet: {
913 ObjPtr<mirror::Object> obj = GetAndInitializeDeclaringClass(self, field);
914 if (obj == nullptr) {
915 DCHECK(self->IsExceptionPending());
916 return false;
917 }
Orion Hodson43f0cdb2017-10-10 14:47:32 +0100918 MethodHandleFieldGet(self, shadow_frame, obj, field, field_type, result);
Orion Hodson811bd5f2016-12-07 11:35:37 +0000919 if (do_conversions && !ConvertReturnValue(callsite_type, handle_type, result)) {
920 DCHECK(self->IsExceptionPending());
921 return false;
922 }
923 return true;
924 }
925 case mirror::MethodHandle::kInstancePut: {
926 size_t obj_reg = is_range ? first_arg : args[0];
927 size_t value_reg = is_range ? (first_arg + 1) : args[1];
Mathieu Chartier71b17082017-04-17 20:12:29 -0700928 const size_t kPTypeIndex = 1;
929 // Use ptypes instead of field type since we may be unboxing a reference for a primitive
930 // field. The field type is incorrect for this case.
931 JValue value = GetValueFromShadowFrame(
932 shadow_frame,
933 callsite_type->GetPTypes()->Get(kPTypeIndex)->GetPrimitiveType(),
934 value_reg);
935 if (do_conversions && !ConvertArgumentValue(callsite_type,
936 handle_type,
937 kPTypeIndex,
938 &value)) {
Orion Hodson811bd5f2016-12-07 11:35:37 +0000939 DCHECK(self->IsExceptionPending());
940 return false;
941 }
942 ObjPtr<mirror::Object> obj = shadow_frame.GetVRegReference(obj_reg);
Orion Hodson43f0cdb2017-10-10 14:47:32 +0100943 return MethodHandleFieldPut(self, shadow_frame, obj, field, field_type, value);
Orion Hodson811bd5f2016-12-07 11:35:37 +0000944 }
945 case mirror::MethodHandle::kStaticPut: {
946 ObjPtr<mirror::Object> obj = GetAndInitializeDeclaringClass(self, field);
947 if (obj == nullptr) {
948 DCHECK(self->IsExceptionPending());
949 return false;
950 }
951 size_t value_reg = is_range ? first_arg : args[0];
Mathieu Chartier71b17082017-04-17 20:12:29 -0700952 const size_t kPTypeIndex = 0;
953 // Use ptypes instead of field type since we may be unboxing a reference for a primitive
954 // field. The field type is incorrect for this case.
955 JValue value = GetValueFromShadowFrame(
956 shadow_frame,
957 callsite_type->GetPTypes()->Get(kPTypeIndex)->GetPrimitiveType(),
958 value_reg);
959 if (do_conversions && !ConvertArgumentValue(callsite_type,
960 handle_type,
961 kPTypeIndex,
962 &value)) {
Orion Hodson811bd5f2016-12-07 11:35:37 +0000963 DCHECK(self->IsExceptionPending());
964 return false;
965 }
Orion Hodson43f0cdb2017-10-10 14:47:32 +0100966 return MethodHandleFieldPut(self, shadow_frame, obj, field, field_type, value);
Orion Hodson811bd5f2016-12-07 11:35:37 +0000967 }
968 default:
969 LOG(FATAL) << "Unreachable: " << handle_kind;
970 UNREACHABLE();
971 }
972}
973
Orion Hodsonc069a302017-01-18 09:23:12 +0000974template <bool is_range>
Orion Hodson43f0cdb2017-10-10 14:47:32 +0100975static inline bool MethodHandleInvokeInternal(Thread* self,
976 ShadowFrame& shadow_frame,
977 Handle<mirror::MethodHandle> method_handle,
978 Handle<mirror::MethodType> callsite_type,
979 const uint32_t (&args)[Instruction::kMaxVarArgRegs],
980 uint32_t first_arg,
981 JValue* result)
Orion Hodson811bd5f2016-12-07 11:35:37 +0000982 REQUIRES_SHARED(Locks::mutator_lock_) {
983 const mirror::MethodHandle::Kind handle_kind = method_handle->GetHandleKind();
Orion Hodsona1be7132017-03-21 10:04:12 +0000984 if (IsFieldAccess(handle_kind)) {
Orion Hodson43f0cdb2017-10-10 14:47:32 +0100985 ObjPtr<mirror::MethodType> handle_type(method_handle->GetMethodType());
Orion Hodsona1be7132017-03-21 10:04:12 +0000986 DCHECK(!callsite_type->IsExactMatch(handle_type.Ptr()));
987 if (!callsite_type->IsConvertible(handle_type.Ptr())) {
Orion Hodson811bd5f2016-12-07 11:35:37 +0000988 ThrowWrongMethodTypeException(handle_type.Ptr(), callsite_type.Get());
989 return false;
990 }
Orion Hodsona1be7132017-03-21 10:04:12 +0000991 const bool do_convert = true;
Orion Hodson43f0cdb2017-10-10 14:47:32 +0100992 return MethodHandleFieldAccess<is_range, do_convert>(
Orion Hodsona1be7132017-03-21 10:04:12 +0000993 self,
994 shadow_frame,
995 method_handle,
996 callsite_type,
997 args,
998 first_arg,
999 result);
Orion Hodson811bd5f2016-12-07 11:35:37 +00001000 }
Orion Hodsona1be7132017-03-21 10:04:12 +00001001 return DoInvokePolymorphicMethod<is_range>(self,
1002 shadow_frame,
1003 method_handle,
1004 callsite_type,
1005 args,
1006 first_arg,
1007 result);
Orion Hodson811bd5f2016-12-07 11:35:37 +00001008}
1009
Orion Hodsonc069a302017-01-18 09:23:12 +00001010template <bool is_range>
Orion Hodson43f0cdb2017-10-10 14:47:32 +01001011static inline bool MethodHandleInvokeExactInternal(
1012 Thread* self,
1013 ShadowFrame& shadow_frame,
1014 Handle<mirror::MethodHandle> method_handle,
1015 Handle<mirror::MethodType> callsite_type,
1016 const uint32_t (&args)[Instruction::kMaxVarArgRegs],
1017 uint32_t first_arg,
1018 JValue* result)
Orion Hodson811bd5f2016-12-07 11:35:37 +00001019 REQUIRES_SHARED(Locks::mutator_lock_) {
Orion Hodsona1be7132017-03-21 10:04:12 +00001020 StackHandleScope<1> hs(self);
Orion Hodsona1be7132017-03-21 10:04:12 +00001021 Handle<mirror::MethodType> method_handle_type(hs.NewHandle(method_handle->GetMethodType()));
Orion Hodson43f0cdb2017-10-10 14:47:32 +01001022 if (!callsite_type->IsExactMatch(method_handle_type.Get())) {
1023 ThrowWrongMethodTypeException(method_handle_type.Get(), callsite_type.Get());
1024 return false;
1025 }
1026
1027 const mirror::MethodHandle::Kind handle_kind = method_handle->GetHandleKind();
Orion Hodson811bd5f2016-12-07 11:35:37 +00001028 if (IsFieldAccess(handle_kind)) {
1029 const bool do_convert = false;
Orion Hodson43f0cdb2017-10-10 14:47:32 +01001030 return MethodHandleFieldAccess<is_range, do_convert>(self,
1031 shadow_frame,
1032 method_handle,
1033 callsite_type,
1034 args,
1035 first_arg,
1036 result);
Orion Hodson811bd5f2016-12-07 11:35:37 +00001037 }
1038
Orion Hodsona1be7132017-03-21 10:04:12 +00001039 // Slow-path check.
1040 if (IsInvokeTransform(handle_kind) || IsCallerTransformer(callsite_type)) {
1041 return DoInvokePolymorphicMethod<is_range>(self,
1042 shadow_frame,
1043 method_handle,
1044 callsite_type,
1045 args,
1046 first_arg,
1047 result);
1048 }
1049
1050 // On the fast-path. This is equivalent to DoCallPolymoprhic without the conversion paths.
1051 ArtMethod* target_method = method_handle->GetTargetMethod();
1052 uint32_t receiver_reg = is_range ? first_arg : args[0];
1053 ArtMethod* called_method = RefineTargetMethod(self,
Orion Hodson811bd5f2016-12-07 11:35:37 +00001054 shadow_frame,
Orion Hodsona1be7132017-03-21 10:04:12 +00001055 handle_kind,
1056 method_handle_type,
Orion Hodson811bd5f2016-12-07 11:35:37 +00001057 callsite_type,
Orion Hodsona1be7132017-03-21 10:04:12 +00001058 receiver_reg,
1059 target_method);
1060 if (called_method == nullptr) {
1061 DCHECK(self->IsExceptionPending());
1062 return false;
1063 }
1064
1065 // Compute method information.
1066 const DexFile::CodeItem* code_item = called_method->GetCodeItem();
1067 uint16_t num_regs;
1068 size_t num_input_regs;
1069 size_t first_dest_reg;
1070 if (LIKELY(code_item != nullptr)) {
1071 num_regs = code_item->registers_size_;
1072 first_dest_reg = num_regs - code_item->ins_size_;
1073 num_input_regs = code_item->ins_size_;
1074 // Parameter registers go at the end of the shadow frame.
1075 DCHECK_NE(first_dest_reg, (size_t)-1);
1076 } else {
1077 // No local regs for proxy and native methods.
1078 DCHECK(called_method->IsNative() || called_method->IsProxyMethod());
1079 num_regs = num_input_regs = GetInsForProxyOrNativeMethod(called_method);
1080 first_dest_reg = 0;
1081 }
1082
1083 // Allocate shadow frame on the stack.
1084 const char* old_cause = self->StartAssertNoThreadSuspension("DoCallCommon");
1085 ShadowFrameAllocaUniquePtr shadow_frame_unique_ptr =
1086 CREATE_SHADOW_FRAME(num_regs, &shadow_frame, called_method, /* dex pc */ 0);
1087 ShadowFrame* new_shadow_frame = shadow_frame_unique_ptr.get();
1088 CopyArgumentsFromCallerFrame<is_range>(shadow_frame,
1089 new_shadow_frame,
1090 args,
1091 first_arg,
1092 first_dest_reg,
1093 num_input_regs);
1094 self->EndAssertNoThreadSuspension(old_cause);
1095
Jeff Hao5ea84132017-05-05 16:59:29 -07001096 bool use_interpreter_entrypoint = ClassLinker::ShouldUseInterpreterEntrypoint(
1097 called_method, called_method->GetEntryPointFromQuickCompiledCode());
1098 PerformCall(self,
1099 code_item,
1100 shadow_frame.GetMethod(),
1101 first_dest_reg,
1102 new_shadow_frame,
1103 result,
1104 use_interpreter_entrypoint);
Orion Hodsona1be7132017-03-21 10:04:12 +00001105 if (self->IsExceptionPending()) {
1106 return false;
1107 }
1108 return true;
Orion Hodson811bd5f2016-12-07 11:35:37 +00001109}
1110
1111} // namespace
1112
Orion Hodsonc069a302017-01-18 09:23:12 +00001113template <bool is_range>
Orion Hodson43f0cdb2017-10-10 14:47:32 +01001114inline bool MethodHandleInvoke(Thread* self,
1115 ShadowFrame& shadow_frame,
1116 Handle<mirror::MethodHandle> method_handle,
1117 Handle<mirror::MethodType> callsite_type,
1118 const uint32_t (&args)[Instruction::kMaxVarArgRegs],
1119 uint32_t first_arg,
1120 JValue* result)
Orion Hodson811bd5f2016-12-07 11:35:37 +00001121 REQUIRES_SHARED(Locks::mutator_lock_) {
Orion Hodson43f0cdb2017-10-10 14:47:32 +01001122 if (UNLIKELY(callsite_type->IsExactMatch(method_handle->GetMethodType()))) {
1123 // A non-exact invoke that can be invoked exactly.
1124 return MethodHandleInvokeExactInternal<is_range>(self,
Orion Hodsona1be7132017-03-21 10:04:12 +00001125 shadow_frame,
1126 method_handle,
1127 callsite_type,
1128 args,
1129 first_arg,
1130 result);
Orion Hodson811bd5f2016-12-07 11:35:37 +00001131 } else {
Orion Hodson43f0cdb2017-10-10 14:47:32 +01001132 return MethodHandleInvokeInternal<is_range>(self,
Orion Hodsona1be7132017-03-21 10:04:12 +00001133 shadow_frame,
1134 method_handle,
1135 callsite_type,
1136 args,
1137 first_arg,
1138 result);
Orion Hodson811bd5f2016-12-07 11:35:37 +00001139 }
1140}
1141
Orion Hodson43f0cdb2017-10-10 14:47:32 +01001142template <bool is_range>
1143bool MethodHandleInvokeExact(Thread* self,
1144 ShadowFrame& shadow_frame,
1145 Handle<mirror::MethodHandle> method_handle,
1146 Handle<mirror::MethodType> callsite_type,
1147 const uint32_t (&args)[Instruction::kMaxVarArgRegs],
1148 uint32_t first_arg,
1149 JValue* result)
1150 REQUIRES_SHARED(Locks::mutator_lock_) {
1151 // We need to check the nominal type of the handle in addition to the
1152 // real type. The "nominal" type is present when MethodHandle.asType is
1153 // called any handle, and results in the declared type of the handle
1154 // changing.
1155 ObjPtr<mirror::MethodType> nominal_type(method_handle->GetNominalType());
1156 if (UNLIKELY(nominal_type != nullptr)) {
1157 if (UNLIKELY(!callsite_type->IsExactMatch(nominal_type.Ptr()))) {
1158 ThrowWrongMethodTypeException(nominal_type.Ptr(), callsite_type.Get());
1159 return false;
1160 }
1161 if (LIKELY(!nominal_type->IsExactMatch(method_handle->GetMethodType()))) {
1162 // Different nominal type means we have to treat as non-exact.
1163 return MethodHandleInvokeInternal<is_range>(self,
1164 shadow_frame,
1165 method_handle,
1166 callsite_type,
1167 args,
1168 first_arg,
1169 result);
1170 }
1171 }
1172 return MethodHandleInvokeExactInternal<is_range>(self,
1173 shadow_frame,
1174 method_handle,
1175 callsite_type,
1176 args,
1177 first_arg,
1178 result);
1179}
1180
1181#define EXPLICIT_DO_METHOD_HANDLE_METHOD(_name, _is_range) \
Orion Hodsonc069a302017-01-18 09:23:12 +00001182 template REQUIRES_SHARED(Locks::mutator_lock_) \
Orion Hodson43f0cdb2017-10-10 14:47:32 +01001183 bool MethodHandle##_name<_is_range>( \
Orion Hodsonc069a302017-01-18 09:23:12 +00001184 Thread* self, \
Orion Hodsonc069a302017-01-18 09:23:12 +00001185 ShadowFrame& shadow_frame, \
1186 Handle<mirror::MethodHandle> method_handle, \
1187 Handle<mirror::MethodType> callsite_type, \
1188 const uint32_t (&args)[Instruction::kMaxVarArgRegs], \
1189 uint32_t first_arg, \
1190 JValue* result)
Orion Hodson811bd5f2016-12-07 11:35:37 +00001191
Orion Hodson43f0cdb2017-10-10 14:47:32 +01001192EXPLICIT_DO_METHOD_HANDLE_METHOD(Invoke, true);
1193EXPLICIT_DO_METHOD_HANDLE_METHOD(Invoke, false);
1194EXPLICIT_DO_METHOD_HANDLE_METHOD(InvokeExact, true);
1195EXPLICIT_DO_METHOD_HANDLE_METHOD(InvokeExact, false);
1196#undef EXPLICIT_DO_METHOD_HANDLE_METHOD
Orion Hodson811bd5f2016-12-07 11:35:37 +00001197
Orion Hodsonba28f9f2016-10-26 10:56:25 +01001198} // namespace art