blob: 7c73c7924408d48394c8f462831e8b7a469a8237 [file] [log] [blame]
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_
18#define ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_
19
20#include "entrypoint_utils.h"
21
22#include "class_linker-inl.h"
23#include "common_throws.h"
24#include "dex_file.h"
25#include "indirect_reference_table.h"
26#include "invoke_type.h"
27#include "jni_internal.h"
28#include "mirror/art_method.h"
29#include "mirror/array.h"
30#include "mirror/class-inl.h"
31#include "mirror/object-inl.h"
32#include "mirror/throwable.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -070033#include "handle_scope-inl.h"
34#include "thread.h"
35
36namespace art {
37
38// TODO: Fix no thread safety analysis when GCC can handle template specialization.
39template <const bool kAccessCheck>
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -070040ALWAYS_INLINE
Ian Rogerse5877a12014-07-16 12:06:35 -070041static inline mirror::Class* CheckObjectAlloc(uint32_t type_idx,
42 mirror::ArtMethod* method,
43 Thread* self, bool* slow_path) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -070044 mirror::Class* klass = method->GetDexCacheResolvedTypes()->GetWithoutChecks(type_idx);
45 if (UNLIKELY(klass == NULL)) {
46 klass = Runtime::Current()->GetClassLinker()->ResolveType(type_idx, method);
47 *slow_path = true;
48 if (klass == NULL) {
49 DCHECK(self->IsExceptionPending());
50 return nullptr; // Failure
51 }
52 }
53 if (kAccessCheck) {
54 if (UNLIKELY(!klass->IsInstantiable())) {
55 ThrowLocation throw_location = self->GetCurrentLocationForThrow();
56 self->ThrowNewException(throw_location, "Ljava/lang/InstantiationError;",
57 PrettyDescriptor(klass).c_str());
58 *slow_path = true;
59 return nullptr; // Failure
60 }
61 mirror::Class* referrer = method->GetDeclaringClass();
62 if (UNLIKELY(!referrer->CanAccess(klass))) {
63 ThrowIllegalAccessErrorClass(referrer, klass);
64 *slow_path = true;
65 return nullptr; // Failure
66 }
67 }
68 if (UNLIKELY(!klass->IsInitialized())) {
69 StackHandleScope<1> hs(self);
70 Handle<mirror::Class> h_klass(hs.NewHandle(klass));
71 // EnsureInitialized (the class initializer) might cause a GC.
72 // may cause us to suspend meaning that another thread may try to
73 // change the allocator while we are stuck in the entrypoints of
74 // an old allocator. Also, the class initialization may fail. To
75 // handle these cases we mark the slow path boolean as true so
76 // that the caller knows to check the allocator type to see if it
77 // has changed and to null-check the return value in case the
78 // initialization fails.
79 *slow_path = true;
80 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(h_klass, true, true)) {
81 DCHECK(self->IsExceptionPending());
82 return nullptr; // Failure
83 }
84 return h_klass.Get();
85 }
86 return klass;
87}
88
89// TODO: Fix no thread safety analysis when annotalysis is smarter.
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -070090ALWAYS_INLINE
Ian Rogerse5877a12014-07-16 12:06:35 -070091static inline mirror::Class* CheckClassInitializedForObjectAlloc(mirror::Class* klass,
92 Thread* self,
93 bool* slow_path) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -070094 if (UNLIKELY(!klass->IsInitialized())) {
95 StackHandleScope<1> hs(self);
96 Handle<mirror::Class> h_class(hs.NewHandle(klass));
97 // EnsureInitialized (the class initializer) might cause a GC.
98 // may cause us to suspend meaning that another thread may try to
99 // change the allocator while we are stuck in the entrypoints of
100 // an old allocator. Also, the class initialization may fail. To
101 // handle these cases we mark the slow path boolean as true so
102 // that the caller knows to check the allocator type to see if it
103 // has changed and to null-check the return value in case the
104 // initialization fails.
105 *slow_path = true;
106 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(h_class, true, true)) {
107 DCHECK(self->IsExceptionPending());
108 return nullptr; // Failure
109 }
110 return h_class.Get();
111 }
112 return klass;
113}
114
115// Given the context of a calling Method, use its DexCache to resolve a type to a Class. If it
116// cannot be resolved, throw an error. If it can, use it to create an instance.
117// When verification/compiler hasn't been able to verify access, optionally perform an access
118// check.
119// TODO: Fix NO_THREAD_SAFETY_ANALYSIS when GCC is smarter.
120template <bool kAccessCheck, bool kInstrumented>
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -0700121ALWAYS_INLINE
Ian Rogerse5877a12014-07-16 12:06:35 -0700122static inline mirror::Object* AllocObjectFromCode(uint32_t type_idx,
123 mirror::ArtMethod* method,
124 Thread* self,
125 gc::AllocatorType allocator_type) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700126 bool slow_path = false;
127 mirror::Class* klass = CheckObjectAlloc<kAccessCheck>(type_idx, method, self, &slow_path);
128 if (UNLIKELY(slow_path)) {
129 if (klass == nullptr) {
130 return nullptr;
131 }
132 return klass->Alloc<kInstrumented>(self, Runtime::Current()->GetHeap()->GetCurrentAllocator());
133 }
134 DCHECK(klass != nullptr);
135 return klass->Alloc<kInstrumented>(self, allocator_type);
136}
137
138// Given the context of a calling Method and a resolved class, create an instance.
139// TODO: Fix NO_THREAD_SAFETY_ANALYSIS when GCC is smarter.
140template <bool kInstrumented>
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -0700141ALWAYS_INLINE
Ian Rogerse5877a12014-07-16 12:06:35 -0700142static inline mirror::Object* AllocObjectFromCodeResolved(mirror::Class* klass,
143 mirror::ArtMethod* method,
144 Thread* self,
145 gc::AllocatorType allocator_type) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700146 DCHECK(klass != nullptr);
147 bool slow_path = false;
148 klass = CheckClassInitializedForObjectAlloc(klass, self, &slow_path);
149 if (UNLIKELY(slow_path)) {
150 if (klass == nullptr) {
151 return nullptr;
152 }
153 gc::Heap* heap = Runtime::Current()->GetHeap();
154 // Pass in false since the object can not be finalizable.
155 return klass->Alloc<kInstrumented, false>(self, heap->GetCurrentAllocator());
156 }
157 // Pass in false since the object can not be finalizable.
158 return klass->Alloc<kInstrumented, false>(self, allocator_type);
159}
160
161// Given the context of a calling Method and an initialized class, create an instance.
162// TODO: Fix NO_THREAD_SAFETY_ANALYSIS when GCC is smarter.
163template <bool kInstrumented>
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -0700164ALWAYS_INLINE
Ian Rogerse5877a12014-07-16 12:06:35 -0700165static inline mirror::Object* AllocObjectFromCodeInitialized(mirror::Class* klass,
166 mirror::ArtMethod* method,
167 Thread* self,
168 gc::AllocatorType allocator_type) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700169 DCHECK(klass != nullptr);
170 // Pass in false since the object can not be finalizable.
171 return klass->Alloc<kInstrumented, false>(self, allocator_type);
172}
173
174
175// TODO: Fix no thread safety analysis when GCC can handle template specialization.
176template <bool kAccessCheck>
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -0700177ALWAYS_INLINE
Ian Rogerse5877a12014-07-16 12:06:35 -0700178static inline mirror::Class* CheckArrayAlloc(uint32_t type_idx,
179 mirror::ArtMethod* method,
180 int32_t component_count,
181 bool* slow_path) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700182 if (UNLIKELY(component_count < 0)) {
183 ThrowNegativeArraySizeException(component_count);
184 *slow_path = true;
185 return nullptr; // Failure
186 }
187 mirror::Class* klass = method->GetDexCacheResolvedTypes()->GetWithoutChecks(type_idx);
188 if (UNLIKELY(klass == nullptr)) { // Not in dex cache so try to resolve
189 klass = Runtime::Current()->GetClassLinker()->ResolveType(type_idx, method);
190 *slow_path = true;
191 if (klass == nullptr) { // Error
192 DCHECK(Thread::Current()->IsExceptionPending());
193 return nullptr; // Failure
194 }
195 CHECK(klass->IsArrayClass()) << PrettyClass(klass);
196 }
197 if (kAccessCheck) {
198 mirror::Class* referrer = method->GetDeclaringClass();
199 if (UNLIKELY(!referrer->CanAccess(klass))) {
200 ThrowIllegalAccessErrorClass(referrer, klass);
201 *slow_path = true;
202 return nullptr; // Failure
203 }
204 }
205 return klass;
206}
207
208// Given the context of a calling Method, use its DexCache to resolve a type to an array Class. If
209// it cannot be resolved, throw an error. If it can, use it to create an array.
210// When verification/compiler hasn't been able to verify access, optionally perform an access
211// check.
212// TODO: Fix no thread safety analysis when GCC can handle template specialization.
213template <bool kAccessCheck, bool kInstrumented>
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -0700214ALWAYS_INLINE
Ian Rogerse5877a12014-07-16 12:06:35 -0700215static inline mirror::Array* AllocArrayFromCode(uint32_t type_idx,
216 mirror::ArtMethod* method,
217 int32_t component_count,
218 Thread* self,
219 gc::AllocatorType allocator_type) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700220 bool slow_path = false;
221 mirror::Class* klass = CheckArrayAlloc<kAccessCheck>(type_idx, method, component_count,
222 &slow_path);
223 if (UNLIKELY(slow_path)) {
224 if (klass == nullptr) {
225 return nullptr;
226 }
227 gc::Heap* heap = Runtime::Current()->GetHeap();
228 return mirror::Array::Alloc<kInstrumented>(self, klass, component_count,
229 klass->GetComponentSize(),
230 heap->GetCurrentAllocator());
231 }
232 return mirror::Array::Alloc<kInstrumented>(self, klass, component_count,
233 klass->GetComponentSize(), allocator_type);
234}
235
236template <bool kAccessCheck, bool kInstrumented>
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -0700237ALWAYS_INLINE
Ian Rogerse5877a12014-07-16 12:06:35 -0700238static inline mirror::Array* AllocArrayFromCodeResolved(mirror::Class* klass,
239 mirror::ArtMethod* method,
240 int32_t component_count,
241 Thread* self,
242 gc::AllocatorType allocator_type) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700243 DCHECK(klass != nullptr);
244 if (UNLIKELY(component_count < 0)) {
245 ThrowNegativeArraySizeException(component_count);
246 return nullptr; // Failure
247 }
248 if (kAccessCheck) {
249 mirror::Class* referrer = method->GetDeclaringClass();
250 if (UNLIKELY(!referrer->CanAccess(klass))) {
251 ThrowIllegalAccessErrorClass(referrer, klass);
252 return nullptr; // Failure
253 }
254 }
255 // No need to retry a slow-path allocation as the above code won't cause a GC or thread
256 // suspension.
257 return mirror::Array::Alloc<kInstrumented>(self, klass, component_count,
258 klass->GetComponentSize(), allocator_type);
259}
260
261template<FindFieldType type, bool access_check>
262static inline mirror::ArtField* FindFieldFromCode(uint32_t field_idx, mirror::ArtMethod* referrer,
263 Thread* self, size_t expected_size) {
264 bool is_primitive;
265 bool is_set;
266 bool is_static;
267 switch (type) {
268 case InstanceObjectRead: is_primitive = false; is_set = false; is_static = false; break;
269 case InstanceObjectWrite: is_primitive = false; is_set = true; is_static = false; break;
270 case InstancePrimitiveRead: is_primitive = true; is_set = false; is_static = false; break;
271 case InstancePrimitiveWrite: is_primitive = true; is_set = true; is_static = false; break;
272 case StaticObjectRead: is_primitive = false; is_set = false; is_static = true; break;
273 case StaticObjectWrite: is_primitive = false; is_set = true; is_static = true; break;
274 case StaticPrimitiveRead: is_primitive = true; is_set = false; is_static = true; break;
275 case StaticPrimitiveWrite: // Keep GCC happy by having a default handler, fall-through.
276 default: is_primitive = true; is_set = true; is_static = true; break;
277 }
278 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
279 mirror::ArtField* resolved_field = class_linker->ResolveField(field_idx, referrer, is_static);
280 if (UNLIKELY(resolved_field == nullptr)) {
281 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
282 return nullptr; // Failure.
283 }
284 mirror::Class* fields_class = resolved_field->GetDeclaringClass();
285 if (access_check) {
286 if (UNLIKELY(resolved_field->IsStatic() != is_static)) {
287 ThrowIncompatibleClassChangeErrorField(resolved_field, is_static, referrer);
288 return nullptr;
289 }
290 mirror::Class* referring_class = referrer->GetDeclaringClass();
291 if (UNLIKELY(!referring_class->CheckResolvedFieldAccess(fields_class, resolved_field,
292 field_idx))) {
293 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
294 return nullptr; // Failure.
295 }
296 if (UNLIKELY(is_set && resolved_field->IsFinal() && (fields_class != referring_class))) {
297 ThrowIllegalAccessErrorFinalField(referrer, resolved_field);
298 return nullptr; // Failure.
299 } else {
300 if (UNLIKELY(resolved_field->IsPrimitiveType() != is_primitive ||
301 resolved_field->FieldSize() != expected_size)) {
302 ThrowLocation throw_location = self->GetCurrentLocationForThrow();
303 DCHECK(throw_location.GetMethod() == referrer);
304 self->ThrowNewExceptionF(throw_location, "Ljava/lang/NoSuchFieldError;",
305 "Attempted read of %zd-bit %s on field '%s'",
306 expected_size * (32 / sizeof(int32_t)),
307 is_primitive ? "primitive" : "non-primitive",
308 PrettyField(resolved_field, true).c_str());
309 return nullptr; // Failure.
310 }
311 }
312 }
313 if (!is_static) {
314 // instance fields must be being accessed on an initialized class
315 return resolved_field;
316 } else {
317 // If the class is initialized we're done.
318 if (LIKELY(fields_class->IsInitialized())) {
319 return resolved_field;
320 } else {
321 StackHandleScope<1> hs(self);
322 Handle<mirror::Class> h_class(hs.NewHandle(fields_class));
323 if (LIKELY(class_linker->EnsureInitialized(h_class, true, true))) {
324 // Otherwise let's ensure the class is initialized before resolving the field.
325 return resolved_field;
326 }
327 DCHECK(self->IsExceptionPending()); // Throw exception and unwind
328 return nullptr; // Failure.
329 }
330 }
331}
332
333// Explicit template declarations of FindFieldFromCode for all field access types.
334#define EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \
335template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE \
336mirror::ArtField* FindFieldFromCode<_type, _access_check>(uint32_t field_idx, \
337 mirror::ArtMethod* referrer, \
338 Thread* self, size_t expected_size) \
339
340#define EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \
341 EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, false); \
342 EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, true)
343
344EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectRead);
345EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectWrite);
346EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveRead);
347EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveWrite);
348EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectRead);
349EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectWrite);
350EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveRead);
351EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveWrite);
352
353#undef EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL
354#undef EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL
355
356template<InvokeType type, bool access_check>
357static inline mirror::ArtMethod* FindMethodFromCode(uint32_t method_idx,
358 mirror::Object** this_object,
359 mirror::ArtMethod** referrer, Thread* self) {
360 ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
361 mirror::ArtMethod* resolved_method = class_linker->GetResolvedMethod(method_idx, *referrer, type);
362 if (resolved_method == nullptr) {
363 StackHandleScope<1> hs(self);
364 mirror::Object* null_this = nullptr;
365 HandleWrapper<mirror::Object> h_this(
366 hs.NewHandleWrapper(type == kStatic ? &null_this : this_object));
367 resolved_method = class_linker->ResolveMethod(self, method_idx, referrer, type);
368 }
369 if (UNLIKELY(resolved_method == nullptr)) {
370 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
371 return nullptr; // Failure.
372 } else if (UNLIKELY(*this_object == nullptr && type != kStatic)) {
373 // Maintain interpreter-like semantics where NullPointerException is thrown
374 // after potential NoSuchMethodError from class linker.
375 ThrowLocation throw_location = self->GetCurrentLocationForThrow();
376 DCHECK_EQ(*referrer, throw_location.GetMethod());
377 ThrowNullPointerExceptionForMethodAccess(throw_location, method_idx, type);
378 return nullptr; // Failure.
379 } else if (access_check) {
380 // Incompatible class change should have been handled in resolve method.
381 if (UNLIKELY(resolved_method->CheckIncompatibleClassChange(type))) {
382 ThrowIncompatibleClassChangeError(type, resolved_method->GetInvokeType(), resolved_method,
383 *referrer);
384 return nullptr; // Failure.
385 }
386 mirror::Class* methods_class = resolved_method->GetDeclaringClass();
387 mirror::Class* referring_class = (*referrer)->GetDeclaringClass();
388 bool can_access_resolved_method =
389 referring_class->CheckResolvedMethodAccess<type>(methods_class, resolved_method,
390 method_idx);
391 if (UNLIKELY(!can_access_resolved_method)) {
392 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
393 return nullptr; // Failure.
394 }
395 }
396 switch (type) {
397 case kStatic:
398 case kDirect:
399 return resolved_method;
400 case kVirtual: {
Mingyao Yang2cdbad72014-07-16 10:44:41 -0700401 mirror::Class* klass = (*this_object)->GetClass();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700402 uint16_t vtable_index = resolved_method->GetMethodIndex();
403 if (access_check &&
Mingyao Yang2cdbad72014-07-16 10:44:41 -0700404 (!klass->HasVTable() ||
405 vtable_index >= static_cast<uint32_t>(klass->GetVTableLength()))) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700406 // Behavior to agree with that of the verifier.
407 ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(),
408 resolved_method->GetName(), resolved_method->GetSignature());
409 return nullptr; // Failure.
410 }
Mingyao Yang2cdbad72014-07-16 10:44:41 -0700411 DCHECK(klass->HasVTable()) << PrettyClass(klass);
412 return klass->GetVTableEntry(vtable_index);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700413 }
414 case kSuper: {
415 mirror::Class* super_class = (*referrer)->GetDeclaringClass()->GetSuperClass();
416 uint16_t vtable_index = resolved_method->GetMethodIndex();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700417 if (access_check) {
418 // Check existence of super class.
Mingyao Yang2cdbad72014-07-16 10:44:41 -0700419 if (super_class == nullptr || !super_class->HasVTable() ||
420 vtable_index >= static_cast<uint32_t>(super_class->GetVTableLength())) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700421 // Behavior to agree with that of the verifier.
422 ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(),
423 resolved_method->GetName(), resolved_method->GetSignature());
424 return nullptr; // Failure.
425 }
426 } else {
427 // Super class must exist.
428 DCHECK(super_class != nullptr);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700429 }
Mingyao Yang2cdbad72014-07-16 10:44:41 -0700430 DCHECK(super_class->HasVTable());
431 return super_class->GetVTableEntry(vtable_index);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700432 }
433 case kInterface: {
434 uint32_t imt_index = resolved_method->GetDexMethodIndex() % mirror::Class::kImtSize;
435 mirror::ArtMethod* imt_method = (*this_object)->GetClass()->GetEmbeddedImTableEntry(imt_index);
436 if (!imt_method->IsImtConflictMethod()) {
437 return imt_method;
438 } else {
439 mirror::ArtMethod* interface_method =
440 (*this_object)->GetClass()->FindVirtualMethodForInterface(resolved_method);
441 if (UNLIKELY(interface_method == nullptr)) {
442 ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(resolved_method,
443 *this_object, *referrer);
444 return nullptr; // Failure.
445 }
446 return interface_method;
447 }
448 }
449 default:
450 LOG(FATAL) << "Unknown invoke type " << type;
451 return nullptr; // Failure.
452 }
453}
454
455// Explicit template declarations of FindMethodFromCode for all invoke types.
456#define EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \
457 template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE \
458 mirror::ArtMethod* FindMethodFromCode<_type, _access_check>(uint32_t method_idx, \
459 mirror::Object** this_object, \
460 mirror::ArtMethod** referrer, \
461 Thread* self)
462#define EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \
463 EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, false); \
464 EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, true)
465
466EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kStatic);
467EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kDirect);
468EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kVirtual);
469EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kSuper);
470EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kInterface);
471
472#undef EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL
473#undef EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL
474
475// Fast path field resolution that can't initialize classes or throw exceptions.
476static inline mirror::ArtField* FindFieldFast(uint32_t field_idx,
477 mirror::ArtMethod* referrer,
Ian Rogerse5877a12014-07-16 12:06:35 -0700478 FindFieldType type, size_t expected_size) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700479 mirror::ArtField* resolved_field =
480 referrer->GetDeclaringClass()->GetDexCache()->GetResolvedField(field_idx);
481 if (UNLIKELY(resolved_field == nullptr)) {
482 return nullptr;
483 }
484 // Check for incompatible class change.
485 bool is_primitive;
486 bool is_set;
487 bool is_static;
488 switch (type) {
489 case InstanceObjectRead: is_primitive = false; is_set = false; is_static = false; break;
490 case InstanceObjectWrite: is_primitive = false; is_set = true; is_static = false; break;
491 case InstancePrimitiveRead: is_primitive = true; is_set = false; is_static = false; break;
492 case InstancePrimitiveWrite: is_primitive = true; is_set = true; is_static = false; break;
493 case StaticObjectRead: is_primitive = false; is_set = false; is_static = true; break;
494 case StaticObjectWrite: is_primitive = false; is_set = true; is_static = true; break;
495 case StaticPrimitiveRead: is_primitive = true; is_set = false; is_static = true; break;
496 case StaticPrimitiveWrite: is_primitive = true; is_set = true; is_static = true; break;
497 default:
498 LOG(FATAL) << "UNREACHABLE"; // Assignment below to avoid GCC warnings.
499 is_primitive = true;
500 is_set = true;
501 is_static = true;
502 break;
503 }
504 if (UNLIKELY(resolved_field->IsStatic() != is_static)) {
505 // Incompatible class change.
506 return nullptr;
507 }
508 mirror::Class* fields_class = resolved_field->GetDeclaringClass();
509 if (is_static) {
510 // Check class is initialized else fail so that we can contend to initialize the class with
511 // other threads that may be racing to do this.
512 if (UNLIKELY(!fields_class->IsInitialized())) {
513 return nullptr;
514 }
515 }
516 mirror::Class* referring_class = referrer->GetDeclaringClass();
517 if (UNLIKELY(!referring_class->CanAccess(fields_class) ||
518 !referring_class->CanAccessMember(fields_class,
519 resolved_field->GetAccessFlags()) ||
520 (is_set && resolved_field->IsFinal() && (fields_class != referring_class)))) {
521 // Illegal access.
522 return nullptr;
523 }
524 if (UNLIKELY(resolved_field->IsPrimitiveType() != is_primitive ||
525 resolved_field->FieldSize() != expected_size)) {
526 return nullptr;
527 }
528 return resolved_field;
529}
530
531// Fast path method resolution that can't throw exceptions.
532static inline mirror::ArtMethod* FindMethodFast(uint32_t method_idx,
533 mirror::Object* this_object,
534 mirror::ArtMethod* referrer,
Ian Rogerse5877a12014-07-16 12:06:35 -0700535 bool access_check, InvokeType type) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700536 bool is_direct = type == kStatic || type == kDirect;
537 if (UNLIKELY(this_object == NULL && !is_direct)) {
538 return NULL;
539 }
540 mirror::ArtMethod* resolved_method =
541 referrer->GetDeclaringClass()->GetDexCache()->GetResolvedMethod(method_idx);
542 if (UNLIKELY(resolved_method == NULL)) {
543 return NULL;
544 }
545 if (access_check) {
546 // Check for incompatible class change errors and access.
547 bool icce = resolved_method->CheckIncompatibleClassChange(type);
548 if (UNLIKELY(icce)) {
549 return NULL;
550 }
551 mirror::Class* methods_class = resolved_method->GetDeclaringClass();
552 mirror::Class* referring_class = referrer->GetDeclaringClass();
553 if (UNLIKELY(!referring_class->CanAccess(methods_class) ||
554 !referring_class->CanAccessMember(methods_class,
555 resolved_method->GetAccessFlags()))) {
556 // Potential illegal access, may need to refine the method's class.
557 return NULL;
558 }
559 }
560 if (type == kInterface) { // Most common form of slow path dispatch.
561 return this_object->GetClass()->FindVirtualMethodForInterface(resolved_method);
562 } else if (is_direct) {
563 return resolved_method;
564 } else if (type == kSuper) {
Mingyao Yang2cdbad72014-07-16 10:44:41 -0700565 return referrer->GetDeclaringClass()->GetSuperClass()
566 ->GetVTableEntry(resolved_method->GetMethodIndex());
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700567 } else {
568 DCHECK(type == kVirtual);
Mingyao Yang2cdbad72014-07-16 10:44:41 -0700569 return this_object->GetClass()->GetVTableEntry(resolved_method->GetMethodIndex());
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700570 }
571}
572
573static inline mirror::Class* ResolveVerifyAndClinit(uint32_t type_idx,
574 mirror::ArtMethod* referrer,
575 Thread* self, bool can_run_clinit,
Ian Rogerse5877a12014-07-16 12:06:35 -0700576 bool verify_access) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700577 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
578 mirror::Class* klass = class_linker->ResolveType(type_idx, referrer);
579 if (UNLIKELY(klass == nullptr)) {
580 CHECK(self->IsExceptionPending());
581 return nullptr; // Failure - Indicate to caller to deliver exception
582 }
583 // Perform access check if necessary.
584 mirror::Class* referring_class = referrer->GetDeclaringClass();
585 if (verify_access && UNLIKELY(!referring_class->CanAccess(klass))) {
586 ThrowIllegalAccessErrorClass(referring_class, klass);
587 return nullptr; // Failure - Indicate to caller to deliver exception
588 }
589 // If we're just implementing const-class, we shouldn't call <clinit>.
590 if (!can_run_clinit) {
591 return klass;
592 }
593 // If we are the <clinit> of this class, just return our storage.
594 //
595 // Do not set the DexCache InitializedStaticStorage, since that implies <clinit> has finished
596 // running.
597 if (klass == referring_class && referrer->IsConstructor() && referrer->IsStatic()) {
598 return klass;
599 }
600 StackHandleScope<1> hs(self);
601 Handle<mirror::Class> h_class(hs.NewHandle(klass));
602 if (!class_linker->EnsureInitialized(h_class, true, true)) {
603 CHECK(self->IsExceptionPending());
604 return nullptr; // Failure - Indicate to caller to deliver exception
605 }
606 return h_class.Get();
607}
608
609static inline mirror::String* ResolveStringFromCode(mirror::ArtMethod* referrer,
Ian Rogerse5877a12014-07-16 12:06:35 -0700610 uint32_t string_idx) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700611 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
612 return class_linker->ResolveString(string_idx, referrer);
613}
614
Ian Rogerse5877a12014-07-16 12:06:35 -0700615static inline void UnlockJniSynchronizedMethod(jobject locked, Thread* self) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700616 // Save any pending exception over monitor exit call.
617 mirror::Throwable* saved_exception = NULL;
618 ThrowLocation saved_throw_location;
619 bool is_exception_reported = self->IsExceptionReportedToInstrumentation();
620 if (UNLIKELY(self->IsExceptionPending())) {
621 saved_exception = self->GetException(&saved_throw_location);
622 self->ClearException();
623 }
624 // Decode locked object and unlock, before popping local references.
625 self->DecodeJObject(locked)->MonitorExit(self);
626 if (UNLIKELY(self->IsExceptionPending())) {
627 LOG(FATAL) << "Synchronized JNI code returning with an exception:\n"
628 << saved_exception->Dump()
629 << "\nEncountered second exception during implicit MonitorExit:\n"
630 << self->GetException(NULL)->Dump();
631 }
632 // Restore pending exception.
633 if (saved_exception != NULL) {
634 self->SetException(saved_throw_location, saved_exception);
635 self->SetExceptionReportedToInstrumentation(is_exception_reported);
636 }
637}
638
Ian Rogerse5877a12014-07-16 12:06:35 -0700639static inline void CheckSuspend(Thread* thread) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700640 for (;;) {
641 if (thread->ReadFlag(kCheckpointRequest)) {
642 thread->RunCheckpointFunction();
643 } else if (thread->ReadFlag(kSuspendRequest)) {
644 thread->FullSuspendCheck();
645 } else {
646 break;
647 }
648 }
649}
650
651template <typename INT_TYPE, typename FLOAT_TYPE>
652static inline INT_TYPE art_float_to_integral(FLOAT_TYPE f) {
653 const INT_TYPE kMaxInt = static_cast<INT_TYPE>(std::numeric_limits<INT_TYPE>::max());
654 const INT_TYPE kMinInt = static_cast<INT_TYPE>(std::numeric_limits<INT_TYPE>::min());
655 const FLOAT_TYPE kMaxIntAsFloat = static_cast<FLOAT_TYPE>(kMaxInt);
656 const FLOAT_TYPE kMinIntAsFloat = static_cast<FLOAT_TYPE>(kMinInt);
657 if (LIKELY(f > kMinIntAsFloat)) {
658 if (LIKELY(f < kMaxIntAsFloat)) {
659 return static_cast<INT_TYPE>(f);
660 } else {
661 return kMaxInt;
662 }
663 } else {
664 return (f != f) ? 0 : kMinInt; // f != f implies NaN
665 }
666}
667
668} // namespace art
669
670#endif // ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_