blob: 482ad47d5d157f9ab36e33448cd92209006435cb [file] [log] [blame]
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_
18#define ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_
19
20#include "entrypoint_utils.h"
21
22#include "class_linker-inl.h"
23#include "common_throws.h"
24#include "dex_file.h"
25#include "indirect_reference_table.h"
26#include "invoke_type.h"
27#include "jni_internal.h"
28#include "mirror/art_method.h"
29#include "mirror/array.h"
30#include "mirror/class-inl.h"
31#include "mirror/object-inl.h"
32#include "mirror/throwable.h"
33#include "object_utils.h"
34#include "handle_scope-inl.h"
35#include "thread.h"
36
37namespace art {
38
39// TODO: Fix no thread safety analysis when GCC can handle template specialization.
40template <const bool kAccessCheck>
41ALWAYS_INLINE static inline mirror::Class* CheckObjectAlloc(uint32_t type_idx,
42 mirror::ArtMethod* method,
43 Thread* self, bool* slow_path)
44 NO_THREAD_SAFETY_ANALYSIS {
45 mirror::Class* klass = method->GetDexCacheResolvedTypes()->GetWithoutChecks(type_idx);
46 if (UNLIKELY(klass == NULL)) {
47 klass = Runtime::Current()->GetClassLinker()->ResolveType(type_idx, method);
48 *slow_path = true;
49 if (klass == NULL) {
50 DCHECK(self->IsExceptionPending());
51 return nullptr; // Failure
52 }
53 }
54 if (kAccessCheck) {
55 if (UNLIKELY(!klass->IsInstantiable())) {
56 ThrowLocation throw_location = self->GetCurrentLocationForThrow();
57 self->ThrowNewException(throw_location, "Ljava/lang/InstantiationError;",
58 PrettyDescriptor(klass).c_str());
59 *slow_path = true;
60 return nullptr; // Failure
61 }
62 mirror::Class* referrer = method->GetDeclaringClass();
63 if (UNLIKELY(!referrer->CanAccess(klass))) {
64 ThrowIllegalAccessErrorClass(referrer, klass);
65 *slow_path = true;
66 return nullptr; // Failure
67 }
68 }
69 if (UNLIKELY(!klass->IsInitialized())) {
70 StackHandleScope<1> hs(self);
71 Handle<mirror::Class> h_klass(hs.NewHandle(klass));
72 // EnsureInitialized (the class initializer) might cause a GC.
73 // may cause us to suspend meaning that another thread may try to
74 // change the allocator while we are stuck in the entrypoints of
75 // an old allocator. Also, the class initialization may fail. To
76 // handle these cases we mark the slow path boolean as true so
77 // that the caller knows to check the allocator type to see if it
78 // has changed and to null-check the return value in case the
79 // initialization fails.
80 *slow_path = true;
81 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(h_klass, true, true)) {
82 DCHECK(self->IsExceptionPending());
83 return nullptr; // Failure
84 }
85 return h_klass.Get();
86 }
87 return klass;
88}
89
90// TODO: Fix no thread safety analysis when annotalysis is smarter.
91ALWAYS_INLINE static inline mirror::Class* CheckClassInitializedForObjectAlloc(mirror::Class* klass,
92 Thread* self, bool* slow_path)
93 NO_THREAD_SAFETY_ANALYSIS {
94 if (UNLIKELY(!klass->IsInitialized())) {
95 StackHandleScope<1> hs(self);
96 Handle<mirror::Class> h_class(hs.NewHandle(klass));
97 // EnsureInitialized (the class initializer) might cause a GC.
98 // may cause us to suspend meaning that another thread may try to
99 // change the allocator while we are stuck in the entrypoints of
100 // an old allocator. Also, the class initialization may fail. To
101 // handle these cases we mark the slow path boolean as true so
102 // that the caller knows to check the allocator type to see if it
103 // has changed and to null-check the return value in case the
104 // initialization fails.
105 *slow_path = true;
106 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(h_class, true, true)) {
107 DCHECK(self->IsExceptionPending());
108 return nullptr; // Failure
109 }
110 return h_class.Get();
111 }
112 return klass;
113}
114
115// Given the context of a calling Method, use its DexCache to resolve a type to a Class. If it
116// cannot be resolved, throw an error. If it can, use it to create an instance.
117// When verification/compiler hasn't been able to verify access, optionally perform an access
118// check.
119// TODO: Fix NO_THREAD_SAFETY_ANALYSIS when GCC is smarter.
120template <bool kAccessCheck, bool kInstrumented>
121ALWAYS_INLINE static inline mirror::Object* AllocObjectFromCode(uint32_t type_idx,
122 mirror::ArtMethod* method,
123 Thread* self,
124 gc::AllocatorType allocator_type)
125 NO_THREAD_SAFETY_ANALYSIS {
126 bool slow_path = false;
127 mirror::Class* klass = CheckObjectAlloc<kAccessCheck>(type_idx, method, self, &slow_path);
128 if (UNLIKELY(slow_path)) {
129 if (klass == nullptr) {
130 return nullptr;
131 }
132 return klass->Alloc<kInstrumented>(self, Runtime::Current()->GetHeap()->GetCurrentAllocator());
133 }
134 DCHECK(klass != nullptr);
135 return klass->Alloc<kInstrumented>(self, allocator_type);
136}
137
138// Given the context of a calling Method and a resolved class, create an instance.
139// TODO: Fix NO_THREAD_SAFETY_ANALYSIS when GCC is smarter.
140template <bool kInstrumented>
141ALWAYS_INLINE static inline mirror::Object* AllocObjectFromCodeResolved(mirror::Class* klass,
142 mirror::ArtMethod* method,
143 Thread* self,
144 gc::AllocatorType allocator_type)
145 NO_THREAD_SAFETY_ANALYSIS {
146 DCHECK(klass != nullptr);
147 bool slow_path = false;
148 klass = CheckClassInitializedForObjectAlloc(klass, self, &slow_path);
149 if (UNLIKELY(slow_path)) {
150 if (klass == nullptr) {
151 return nullptr;
152 }
153 gc::Heap* heap = Runtime::Current()->GetHeap();
154 // Pass in false since the object can not be finalizable.
155 return klass->Alloc<kInstrumented, false>(self, heap->GetCurrentAllocator());
156 }
157 // Pass in false since the object can not be finalizable.
158 return klass->Alloc<kInstrumented, false>(self, allocator_type);
159}
160
161// Given the context of a calling Method and an initialized class, create an instance.
162// TODO: Fix NO_THREAD_SAFETY_ANALYSIS when GCC is smarter.
163template <bool kInstrumented>
164ALWAYS_INLINE static inline mirror::Object* AllocObjectFromCodeInitialized(mirror::Class* klass,
165 mirror::ArtMethod* method,
166 Thread* self,
167 gc::AllocatorType allocator_type)
168 NO_THREAD_SAFETY_ANALYSIS {
169 DCHECK(klass != nullptr);
170 // Pass in false since the object can not be finalizable.
171 return klass->Alloc<kInstrumented, false>(self, allocator_type);
172}
173
174
175// TODO: Fix no thread safety analysis when GCC can handle template specialization.
176template <bool kAccessCheck>
177ALWAYS_INLINE static inline mirror::Class* CheckArrayAlloc(uint32_t type_idx,
178 mirror::ArtMethod* method,
179 int32_t component_count,
180 bool* slow_path)
181 NO_THREAD_SAFETY_ANALYSIS {
182 if (UNLIKELY(component_count < 0)) {
183 ThrowNegativeArraySizeException(component_count);
184 *slow_path = true;
185 return nullptr; // Failure
186 }
187 mirror::Class* klass = method->GetDexCacheResolvedTypes()->GetWithoutChecks(type_idx);
188 if (UNLIKELY(klass == nullptr)) { // Not in dex cache so try to resolve
189 klass = Runtime::Current()->GetClassLinker()->ResolveType(type_idx, method);
190 *slow_path = true;
191 if (klass == nullptr) { // Error
192 DCHECK(Thread::Current()->IsExceptionPending());
193 return nullptr; // Failure
194 }
195 CHECK(klass->IsArrayClass()) << PrettyClass(klass);
196 }
197 if (kAccessCheck) {
198 mirror::Class* referrer = method->GetDeclaringClass();
199 if (UNLIKELY(!referrer->CanAccess(klass))) {
200 ThrowIllegalAccessErrorClass(referrer, klass);
201 *slow_path = true;
202 return nullptr; // Failure
203 }
204 }
205 return klass;
206}
207
208// Given the context of a calling Method, use its DexCache to resolve a type to an array Class. If
209// it cannot be resolved, throw an error. If it can, use it to create an array.
210// When verification/compiler hasn't been able to verify access, optionally perform an access
211// check.
212// TODO: Fix no thread safety analysis when GCC can handle template specialization.
213template <bool kAccessCheck, bool kInstrumented>
214ALWAYS_INLINE static inline mirror::Array* AllocArrayFromCode(uint32_t type_idx,
215 mirror::ArtMethod* method,
216 int32_t component_count,
217 Thread* self,
218 gc::AllocatorType allocator_type)
219 NO_THREAD_SAFETY_ANALYSIS {
220 bool slow_path = false;
221 mirror::Class* klass = CheckArrayAlloc<kAccessCheck>(type_idx, method, component_count,
222 &slow_path);
223 if (UNLIKELY(slow_path)) {
224 if (klass == nullptr) {
225 return nullptr;
226 }
227 gc::Heap* heap = Runtime::Current()->GetHeap();
228 return mirror::Array::Alloc<kInstrumented>(self, klass, component_count,
229 klass->GetComponentSize(),
230 heap->GetCurrentAllocator());
231 }
232 return mirror::Array::Alloc<kInstrumented>(self, klass, component_count,
233 klass->GetComponentSize(), allocator_type);
234}
235
236template <bool kAccessCheck, bool kInstrumented>
237ALWAYS_INLINE static inline mirror::Array* AllocArrayFromCodeResolved(mirror::Class* klass,
238 mirror::ArtMethod* method,
239 int32_t component_count,
240 Thread* self,
241 gc::AllocatorType allocator_type)
242 NO_THREAD_SAFETY_ANALYSIS {
243 DCHECK(klass != nullptr);
244 if (UNLIKELY(component_count < 0)) {
245 ThrowNegativeArraySizeException(component_count);
246 return nullptr; // Failure
247 }
248 if (kAccessCheck) {
249 mirror::Class* referrer = method->GetDeclaringClass();
250 if (UNLIKELY(!referrer->CanAccess(klass))) {
251 ThrowIllegalAccessErrorClass(referrer, klass);
252 return nullptr; // Failure
253 }
254 }
255 // No need to retry a slow-path allocation as the above code won't cause a GC or thread
256 // suspension.
257 return mirror::Array::Alloc<kInstrumented>(self, klass, component_count,
258 klass->GetComponentSize(), allocator_type);
259}
260
261template<FindFieldType type, bool access_check>
262static inline mirror::ArtField* FindFieldFromCode(uint32_t field_idx, mirror::ArtMethod* referrer,
263 Thread* self, size_t expected_size) {
264 bool is_primitive;
265 bool is_set;
266 bool is_static;
267 switch (type) {
268 case InstanceObjectRead: is_primitive = false; is_set = false; is_static = false; break;
269 case InstanceObjectWrite: is_primitive = false; is_set = true; is_static = false; break;
270 case InstancePrimitiveRead: is_primitive = true; is_set = false; is_static = false; break;
271 case InstancePrimitiveWrite: is_primitive = true; is_set = true; is_static = false; break;
272 case StaticObjectRead: is_primitive = false; is_set = false; is_static = true; break;
273 case StaticObjectWrite: is_primitive = false; is_set = true; is_static = true; break;
274 case StaticPrimitiveRead: is_primitive = true; is_set = false; is_static = true; break;
275 case StaticPrimitiveWrite: // Keep GCC happy by having a default handler, fall-through.
276 default: is_primitive = true; is_set = true; is_static = true; break;
277 }
278 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
279 mirror::ArtField* resolved_field = class_linker->ResolveField(field_idx, referrer, is_static);
280 if (UNLIKELY(resolved_field == nullptr)) {
281 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
282 return nullptr; // Failure.
283 }
284 mirror::Class* fields_class = resolved_field->GetDeclaringClass();
285 if (access_check) {
286 if (UNLIKELY(resolved_field->IsStatic() != is_static)) {
287 ThrowIncompatibleClassChangeErrorField(resolved_field, is_static, referrer);
288 return nullptr;
289 }
290 mirror::Class* referring_class = referrer->GetDeclaringClass();
291 if (UNLIKELY(!referring_class->CheckResolvedFieldAccess(fields_class, resolved_field,
292 field_idx))) {
293 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
294 return nullptr; // Failure.
295 }
296 if (UNLIKELY(is_set && resolved_field->IsFinal() && (fields_class != referring_class))) {
297 ThrowIllegalAccessErrorFinalField(referrer, resolved_field);
298 return nullptr; // Failure.
299 } else {
300 if (UNLIKELY(resolved_field->IsPrimitiveType() != is_primitive ||
301 resolved_field->FieldSize() != expected_size)) {
302 ThrowLocation throw_location = self->GetCurrentLocationForThrow();
303 DCHECK(throw_location.GetMethod() == referrer);
304 self->ThrowNewExceptionF(throw_location, "Ljava/lang/NoSuchFieldError;",
305 "Attempted read of %zd-bit %s on field '%s'",
306 expected_size * (32 / sizeof(int32_t)),
307 is_primitive ? "primitive" : "non-primitive",
308 PrettyField(resolved_field, true).c_str());
309 return nullptr; // Failure.
310 }
311 }
312 }
313 if (!is_static) {
314 // instance fields must be being accessed on an initialized class
315 return resolved_field;
316 } else {
317 // If the class is initialized we're done.
318 if (LIKELY(fields_class->IsInitialized())) {
319 return resolved_field;
320 } else {
321 StackHandleScope<1> hs(self);
322 Handle<mirror::Class> h_class(hs.NewHandle(fields_class));
323 if (LIKELY(class_linker->EnsureInitialized(h_class, true, true))) {
324 // Otherwise let's ensure the class is initialized before resolving the field.
325 return resolved_field;
326 }
327 DCHECK(self->IsExceptionPending()); // Throw exception and unwind
328 return nullptr; // Failure.
329 }
330 }
331}
332
333// Explicit template declarations of FindFieldFromCode for all field access types.
334#define EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \
335template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE \
336mirror::ArtField* FindFieldFromCode<_type, _access_check>(uint32_t field_idx, \
337 mirror::ArtMethod* referrer, \
338 Thread* self, size_t expected_size) \
339
340#define EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \
341 EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, false); \
342 EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, true)
343
344EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectRead);
345EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectWrite);
346EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveRead);
347EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveWrite);
348EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectRead);
349EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectWrite);
350EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveRead);
351EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveWrite);
352
353#undef EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL
354#undef EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL
355
356template<InvokeType type, bool access_check>
357static inline mirror::ArtMethod* FindMethodFromCode(uint32_t method_idx,
358 mirror::Object** this_object,
359 mirror::ArtMethod** referrer, Thread* self) {
360 ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
361 mirror::ArtMethod* resolved_method = class_linker->GetResolvedMethod(method_idx, *referrer, type);
362 if (resolved_method == nullptr) {
363 StackHandleScope<1> hs(self);
364 mirror::Object* null_this = nullptr;
365 HandleWrapper<mirror::Object> h_this(
366 hs.NewHandleWrapper(type == kStatic ? &null_this : this_object));
367 resolved_method = class_linker->ResolveMethod(self, method_idx, referrer, type);
368 }
369 if (UNLIKELY(resolved_method == nullptr)) {
370 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
371 return nullptr; // Failure.
372 } else if (UNLIKELY(*this_object == nullptr && type != kStatic)) {
373 // Maintain interpreter-like semantics where NullPointerException is thrown
374 // after potential NoSuchMethodError from class linker.
375 ThrowLocation throw_location = self->GetCurrentLocationForThrow();
376 DCHECK_EQ(*referrer, throw_location.GetMethod());
377 ThrowNullPointerExceptionForMethodAccess(throw_location, method_idx, type);
378 return nullptr; // Failure.
379 } else if (access_check) {
380 // Incompatible class change should have been handled in resolve method.
381 if (UNLIKELY(resolved_method->CheckIncompatibleClassChange(type))) {
382 ThrowIncompatibleClassChangeError(type, resolved_method->GetInvokeType(), resolved_method,
383 *referrer);
384 return nullptr; // Failure.
385 }
386 mirror::Class* methods_class = resolved_method->GetDeclaringClass();
387 mirror::Class* referring_class = (*referrer)->GetDeclaringClass();
388 bool can_access_resolved_method =
389 referring_class->CheckResolvedMethodAccess<type>(methods_class, resolved_method,
390 method_idx);
391 if (UNLIKELY(!can_access_resolved_method)) {
392 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
393 return nullptr; // Failure.
394 }
395 }
396 switch (type) {
397 case kStatic:
398 case kDirect:
399 return resolved_method;
400 case kVirtual: {
401 mirror::ObjectArray<mirror::ArtMethod>* vtable = (*this_object)->GetClass()->GetVTable();
402 uint16_t vtable_index = resolved_method->GetMethodIndex();
403 if (access_check &&
404 (vtable == nullptr || vtable_index >= static_cast<uint32_t>(vtable->GetLength()))) {
405 // Behavior to agree with that of the verifier.
406 ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(),
407 resolved_method->GetName(), resolved_method->GetSignature());
408 return nullptr; // Failure.
409 }
410 DCHECK(vtable != nullptr);
411 return vtable->GetWithoutChecks(vtable_index);
412 }
413 case kSuper: {
414 mirror::Class* super_class = (*referrer)->GetDeclaringClass()->GetSuperClass();
415 uint16_t vtable_index = resolved_method->GetMethodIndex();
416 mirror::ObjectArray<mirror::ArtMethod>* vtable;
417 if (access_check) {
418 // Check existence of super class.
419 vtable = (super_class != nullptr) ? super_class->GetVTable() : nullptr;
420 if (vtable == nullptr || vtable_index >= static_cast<uint32_t>(vtable->GetLength())) {
421 // Behavior to agree with that of the verifier.
422 ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(),
423 resolved_method->GetName(), resolved_method->GetSignature());
424 return nullptr; // Failure.
425 }
426 } else {
427 // Super class must exist.
428 DCHECK(super_class != nullptr);
429 vtable = super_class->GetVTable();
430 }
431 DCHECK(vtable != nullptr);
432 return vtable->GetWithoutChecks(vtable_index);
433 }
434 case kInterface: {
435 uint32_t imt_index = resolved_method->GetDexMethodIndex() % mirror::Class::kImtSize;
436 mirror::ArtMethod* imt_method = (*this_object)->GetClass()->GetEmbeddedImTableEntry(imt_index);
437 if (!imt_method->IsImtConflictMethod()) {
438 return imt_method;
439 } else {
440 mirror::ArtMethod* interface_method =
441 (*this_object)->GetClass()->FindVirtualMethodForInterface(resolved_method);
442 if (UNLIKELY(interface_method == nullptr)) {
443 ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(resolved_method,
444 *this_object, *referrer);
445 return nullptr; // Failure.
446 }
447 return interface_method;
448 }
449 }
450 default:
451 LOG(FATAL) << "Unknown invoke type " << type;
452 return nullptr; // Failure.
453 }
454}
455
456// Explicit template declarations of FindMethodFromCode for all invoke types.
457#define EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \
458 template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE \
459 mirror::ArtMethod* FindMethodFromCode<_type, _access_check>(uint32_t method_idx, \
460 mirror::Object** this_object, \
461 mirror::ArtMethod** referrer, \
462 Thread* self)
463#define EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \
464 EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, false); \
465 EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, true)
466
467EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kStatic);
468EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kDirect);
469EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kVirtual);
470EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kSuper);
471EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kInterface);
472
473#undef EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL
474#undef EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL
475
476// Fast path field resolution that can't initialize classes or throw exceptions.
477static inline mirror::ArtField* FindFieldFast(uint32_t field_idx,
478 mirror::ArtMethod* referrer,
479 FindFieldType type, size_t expected_size)
480 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
481 mirror::ArtField* resolved_field =
482 referrer->GetDeclaringClass()->GetDexCache()->GetResolvedField(field_idx);
483 if (UNLIKELY(resolved_field == nullptr)) {
484 return nullptr;
485 }
486 // Check for incompatible class change.
487 bool is_primitive;
488 bool is_set;
489 bool is_static;
490 switch (type) {
491 case InstanceObjectRead: is_primitive = false; is_set = false; is_static = false; break;
492 case InstanceObjectWrite: is_primitive = false; is_set = true; is_static = false; break;
493 case InstancePrimitiveRead: is_primitive = true; is_set = false; is_static = false; break;
494 case InstancePrimitiveWrite: is_primitive = true; is_set = true; is_static = false; break;
495 case StaticObjectRead: is_primitive = false; is_set = false; is_static = true; break;
496 case StaticObjectWrite: is_primitive = false; is_set = true; is_static = true; break;
497 case StaticPrimitiveRead: is_primitive = true; is_set = false; is_static = true; break;
498 case StaticPrimitiveWrite: is_primitive = true; is_set = true; is_static = true; break;
499 default:
500 LOG(FATAL) << "UNREACHABLE"; // Assignment below to avoid GCC warnings.
501 is_primitive = true;
502 is_set = true;
503 is_static = true;
504 break;
505 }
506 if (UNLIKELY(resolved_field->IsStatic() != is_static)) {
507 // Incompatible class change.
508 return nullptr;
509 }
510 mirror::Class* fields_class = resolved_field->GetDeclaringClass();
511 if (is_static) {
512 // Check class is initialized else fail so that we can contend to initialize the class with
513 // other threads that may be racing to do this.
514 if (UNLIKELY(!fields_class->IsInitialized())) {
515 return nullptr;
516 }
517 }
518 mirror::Class* referring_class = referrer->GetDeclaringClass();
519 if (UNLIKELY(!referring_class->CanAccess(fields_class) ||
520 !referring_class->CanAccessMember(fields_class,
521 resolved_field->GetAccessFlags()) ||
522 (is_set && resolved_field->IsFinal() && (fields_class != referring_class)))) {
523 // Illegal access.
524 return nullptr;
525 }
526 if (UNLIKELY(resolved_field->IsPrimitiveType() != is_primitive ||
527 resolved_field->FieldSize() != expected_size)) {
528 return nullptr;
529 }
530 return resolved_field;
531}
532
533// Fast path method resolution that can't throw exceptions.
534static inline mirror::ArtMethod* FindMethodFast(uint32_t method_idx,
535 mirror::Object* this_object,
536 mirror::ArtMethod* referrer,
537 bool access_check, InvokeType type)
538 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
539 bool is_direct = type == kStatic || type == kDirect;
540 if (UNLIKELY(this_object == NULL && !is_direct)) {
541 return NULL;
542 }
543 mirror::ArtMethod* resolved_method =
544 referrer->GetDeclaringClass()->GetDexCache()->GetResolvedMethod(method_idx);
545 if (UNLIKELY(resolved_method == NULL)) {
546 return NULL;
547 }
548 if (access_check) {
549 // Check for incompatible class change errors and access.
550 bool icce = resolved_method->CheckIncompatibleClassChange(type);
551 if (UNLIKELY(icce)) {
552 return NULL;
553 }
554 mirror::Class* methods_class = resolved_method->GetDeclaringClass();
555 mirror::Class* referring_class = referrer->GetDeclaringClass();
556 if (UNLIKELY(!referring_class->CanAccess(methods_class) ||
557 !referring_class->CanAccessMember(methods_class,
558 resolved_method->GetAccessFlags()))) {
559 // Potential illegal access, may need to refine the method's class.
560 return NULL;
561 }
562 }
563 if (type == kInterface) { // Most common form of slow path dispatch.
564 return this_object->GetClass()->FindVirtualMethodForInterface(resolved_method);
565 } else if (is_direct) {
566 return resolved_method;
567 } else if (type == kSuper) {
568 return referrer->GetDeclaringClass()->GetSuperClass()->GetVTable()->
569 Get(resolved_method->GetMethodIndex());
570 } else {
571 DCHECK(type == kVirtual);
572 return this_object->GetClass()->GetVTable()->Get(resolved_method->GetMethodIndex());
573 }
574}
575
576static inline mirror::Class* ResolveVerifyAndClinit(uint32_t type_idx,
577 mirror::ArtMethod* referrer,
578 Thread* self, bool can_run_clinit,
579 bool verify_access)
580 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
581 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
582 mirror::Class* klass = class_linker->ResolveType(type_idx, referrer);
583 if (UNLIKELY(klass == nullptr)) {
584 CHECK(self->IsExceptionPending());
585 return nullptr; // Failure - Indicate to caller to deliver exception
586 }
587 // Perform access check if necessary.
588 mirror::Class* referring_class = referrer->GetDeclaringClass();
589 if (verify_access && UNLIKELY(!referring_class->CanAccess(klass))) {
590 ThrowIllegalAccessErrorClass(referring_class, klass);
591 return nullptr; // Failure - Indicate to caller to deliver exception
592 }
593 // If we're just implementing const-class, we shouldn't call <clinit>.
594 if (!can_run_clinit) {
595 return klass;
596 }
597 // If we are the <clinit> of this class, just return our storage.
598 //
599 // Do not set the DexCache InitializedStaticStorage, since that implies <clinit> has finished
600 // running.
601 if (klass == referring_class && referrer->IsConstructor() && referrer->IsStatic()) {
602 return klass;
603 }
604 StackHandleScope<1> hs(self);
605 Handle<mirror::Class> h_class(hs.NewHandle(klass));
606 if (!class_linker->EnsureInitialized(h_class, true, true)) {
607 CHECK(self->IsExceptionPending());
608 return nullptr; // Failure - Indicate to caller to deliver exception
609 }
610 return h_class.Get();
611}
612
613static inline mirror::String* ResolveStringFromCode(mirror::ArtMethod* referrer,
614 uint32_t string_idx)
615 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
616 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
617 return class_linker->ResolveString(string_idx, referrer);
618}
619
620static inline void UnlockJniSynchronizedMethod(jobject locked, Thread* self)
621 NO_THREAD_SAFETY_ANALYSIS /* SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) */ {
622 // Save any pending exception over monitor exit call.
623 mirror::Throwable* saved_exception = NULL;
624 ThrowLocation saved_throw_location;
625 bool is_exception_reported = self->IsExceptionReportedToInstrumentation();
626 if (UNLIKELY(self->IsExceptionPending())) {
627 saved_exception = self->GetException(&saved_throw_location);
628 self->ClearException();
629 }
630 // Decode locked object and unlock, before popping local references.
631 self->DecodeJObject(locked)->MonitorExit(self);
632 if (UNLIKELY(self->IsExceptionPending())) {
633 LOG(FATAL) << "Synchronized JNI code returning with an exception:\n"
634 << saved_exception->Dump()
635 << "\nEncountered second exception during implicit MonitorExit:\n"
636 << self->GetException(NULL)->Dump();
637 }
638 // Restore pending exception.
639 if (saved_exception != NULL) {
640 self->SetException(saved_throw_location, saved_exception);
641 self->SetExceptionReportedToInstrumentation(is_exception_reported);
642 }
643}
644
645static inline void CheckReferenceResult(mirror::Object* o, Thread* self)
646 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
647 if (o == NULL) {
648 return;
649 }
650 mirror::ArtMethod* m = self->GetCurrentMethod(NULL);
651 if (o == kInvalidIndirectRefObject) {
652 JniAbortF(NULL, "invalid reference returned from %s", PrettyMethod(m).c_str());
653 }
654 // Make sure that the result is an instance of the type this method was expected to return.
655 StackHandleScope<1> hs(self);
656 Handle<mirror::ArtMethod> h_m(hs.NewHandle(m));
657 mirror::Class* return_type = MethodHelper(h_m).GetReturnType();
658
659 if (!o->InstanceOf(return_type)) {
660 JniAbortF(NULL, "attempt to return an instance of %s from %s", PrettyTypeOf(o).c_str(),
661 PrettyMethod(h_m.Get()).c_str());
662 }
663}
664
665static inline void CheckSuspend(Thread* thread) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
666 for (;;) {
667 if (thread->ReadFlag(kCheckpointRequest)) {
668 thread->RunCheckpointFunction();
669 } else if (thread->ReadFlag(kSuspendRequest)) {
670 thread->FullSuspendCheck();
671 } else {
672 break;
673 }
674 }
675}
676
677template <typename INT_TYPE, typename FLOAT_TYPE>
678static inline INT_TYPE art_float_to_integral(FLOAT_TYPE f) {
679 const INT_TYPE kMaxInt = static_cast<INT_TYPE>(std::numeric_limits<INT_TYPE>::max());
680 const INT_TYPE kMinInt = static_cast<INT_TYPE>(std::numeric_limits<INT_TYPE>::min());
681 const FLOAT_TYPE kMaxIntAsFloat = static_cast<FLOAT_TYPE>(kMaxInt);
682 const FLOAT_TYPE kMinIntAsFloat = static_cast<FLOAT_TYPE>(kMinInt);
683 if (LIKELY(f > kMinIntAsFloat)) {
684 if (LIKELY(f < kMaxIntAsFloat)) {
685 return static_cast<INT_TYPE>(f);
686 } else {
687 return kMaxInt;
688 }
689 } else {
690 return (f != f) ? 0 : kMinInt; // f != f implies NaN
691 }
692}
693
694} // namespace art
695
696#endif // ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_