blob: 35579d66d4a9b517d3b1f2ad8ef4e09870830af4 [file] [log] [blame]
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_
18#define ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_
19
20#include "entrypoint_utils.h"
21
22#include "class_linker-inl.h"
23#include "common_throws.h"
24#include "dex_file.h"
25#include "indirect_reference_table.h"
26#include "invoke_type.h"
27#include "jni_internal.h"
28#include "mirror/art_method.h"
29#include "mirror/array.h"
30#include "mirror/class-inl.h"
31#include "mirror/object-inl.h"
32#include "mirror/throwable.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -070033#include "handle_scope-inl.h"
34#include "thread.h"
35
36namespace art {
37
Mingyao Yang98d1cc82014-05-15 17:02:16 -070038template <const bool kAccessCheck>
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -070039ALWAYS_INLINE
Andreas Gampe9f612ff2014-11-24 13:42:22 -080040inline mirror::Class* CheckObjectAlloc(uint32_t type_idx,
41 mirror::ArtMethod* method,
42 Thread* self, bool* slow_path) {
Andreas Gampe58a5af82014-07-31 16:23:49 -070043 mirror::Class* klass = method->GetDexCacheResolvedType<false>(type_idx);
Mingyao Yang98d1cc82014-05-15 17:02:16 -070044 if (UNLIKELY(klass == NULL)) {
45 klass = Runtime::Current()->GetClassLinker()->ResolveType(type_idx, method);
46 *slow_path = true;
47 if (klass == NULL) {
48 DCHECK(self->IsExceptionPending());
49 return nullptr; // Failure
Mathieu Chartier524507a2014-08-27 15:28:28 -070050 } else {
51 DCHECK(!self->IsExceptionPending());
Mingyao Yang98d1cc82014-05-15 17:02:16 -070052 }
53 }
54 if (kAccessCheck) {
55 if (UNLIKELY(!klass->IsInstantiable())) {
56 ThrowLocation throw_location = self->GetCurrentLocationForThrow();
57 self->ThrowNewException(throw_location, "Ljava/lang/InstantiationError;",
58 PrettyDescriptor(klass).c_str());
59 *slow_path = true;
60 return nullptr; // Failure
61 }
62 mirror::Class* referrer = method->GetDeclaringClass();
63 if (UNLIKELY(!referrer->CanAccess(klass))) {
64 ThrowIllegalAccessErrorClass(referrer, klass);
65 *slow_path = true;
66 return nullptr; // Failure
67 }
68 }
69 if (UNLIKELY(!klass->IsInitialized())) {
70 StackHandleScope<1> hs(self);
71 Handle<mirror::Class> h_klass(hs.NewHandle(klass));
72 // EnsureInitialized (the class initializer) might cause a GC.
73 // may cause us to suspend meaning that another thread may try to
74 // change the allocator while we are stuck in the entrypoints of
75 // an old allocator. Also, the class initialization may fail. To
76 // handle these cases we mark the slow path boolean as true so
77 // that the caller knows to check the allocator type to see if it
78 // has changed and to null-check the return value in case the
79 // initialization fails.
80 *slow_path = true;
Ian Rogers7b078e82014-09-10 14:44:24 -070081 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_klass, true, true)) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -070082 DCHECK(self->IsExceptionPending());
83 return nullptr; // Failure
Mathieu Chartier524507a2014-08-27 15:28:28 -070084 } else {
85 DCHECK(!self->IsExceptionPending());
Mingyao Yang98d1cc82014-05-15 17:02:16 -070086 }
87 return h_klass.Get();
88 }
89 return klass;
90}
91
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -070092ALWAYS_INLINE
Andreas Gampe9f612ff2014-11-24 13:42:22 -080093inline mirror::Class* CheckClassInitializedForObjectAlloc(mirror::Class* klass,
94 Thread* self,
95 bool* slow_path) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -070096 if (UNLIKELY(!klass->IsInitialized())) {
97 StackHandleScope<1> hs(self);
98 Handle<mirror::Class> h_class(hs.NewHandle(klass));
99 // EnsureInitialized (the class initializer) might cause a GC.
100 // may cause us to suspend meaning that another thread may try to
101 // change the allocator while we are stuck in the entrypoints of
102 // an old allocator. Also, the class initialization may fail. To
103 // handle these cases we mark the slow path boolean as true so
104 // that the caller knows to check the allocator type to see if it
105 // has changed and to null-check the return value in case the
106 // initialization fails.
107 *slow_path = true;
Ian Rogers7b078e82014-09-10 14:44:24 -0700108 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_class, true, true)) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700109 DCHECK(self->IsExceptionPending());
110 return nullptr; // Failure
111 }
112 return h_class.Get();
113 }
114 return klass;
115}
116
117// Given the context of a calling Method, use its DexCache to resolve a type to a Class. If it
118// cannot be resolved, throw an error. If it can, use it to create an instance.
119// When verification/compiler hasn't been able to verify access, optionally perform an access
120// check.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700121template <bool kAccessCheck, bool kInstrumented>
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -0700122ALWAYS_INLINE
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800123inline mirror::Object* AllocObjectFromCode(uint32_t type_idx,
124 mirror::ArtMethod* method,
125 Thread* self,
126 gc::AllocatorType allocator_type) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700127 bool slow_path = false;
128 mirror::Class* klass = CheckObjectAlloc<kAccessCheck>(type_idx, method, self, &slow_path);
129 if (UNLIKELY(slow_path)) {
130 if (klass == nullptr) {
131 return nullptr;
132 }
133 return klass->Alloc<kInstrumented>(self, Runtime::Current()->GetHeap()->GetCurrentAllocator());
134 }
135 DCHECK(klass != nullptr);
136 return klass->Alloc<kInstrumented>(self, allocator_type);
137}
138
139// Given the context of a calling Method and a resolved class, create an instance.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700140template <bool kInstrumented>
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -0700141ALWAYS_INLINE
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800142inline mirror::Object* AllocObjectFromCodeResolved(mirror::Class* klass,
143 Thread* self,
144 gc::AllocatorType allocator_type) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700145 DCHECK(klass != nullptr);
146 bool slow_path = false;
147 klass = CheckClassInitializedForObjectAlloc(klass, self, &slow_path);
148 if (UNLIKELY(slow_path)) {
149 if (klass == nullptr) {
150 return nullptr;
151 }
152 gc::Heap* heap = Runtime::Current()->GetHeap();
153 // Pass in false since the object can not be finalizable.
154 return klass->Alloc<kInstrumented, false>(self, heap->GetCurrentAllocator());
155 }
156 // Pass in false since the object can not be finalizable.
157 return klass->Alloc<kInstrumented, false>(self, allocator_type);
158}
159
160// Given the context of a calling Method and an initialized class, create an instance.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700161template <bool kInstrumented>
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -0700162ALWAYS_INLINE
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800163inline mirror::Object* AllocObjectFromCodeInitialized(mirror::Class* klass,
164 Thread* self,
165 gc::AllocatorType allocator_type) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700166 DCHECK(klass != nullptr);
167 // Pass in false since the object can not be finalizable.
168 return klass->Alloc<kInstrumented, false>(self, allocator_type);
169}
170
171
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700172template <bool kAccessCheck>
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -0700173ALWAYS_INLINE
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800174inline mirror::Class* CheckArrayAlloc(uint32_t type_idx,
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800175 int32_t component_count,
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800176 mirror::ArtMethod* method,
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800177 bool* slow_path) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700178 if (UNLIKELY(component_count < 0)) {
179 ThrowNegativeArraySizeException(component_count);
180 *slow_path = true;
181 return nullptr; // Failure
182 }
Andreas Gampe58a5af82014-07-31 16:23:49 -0700183 mirror::Class* klass = method->GetDexCacheResolvedType<false>(type_idx);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700184 if (UNLIKELY(klass == nullptr)) { // Not in dex cache so try to resolve
185 klass = Runtime::Current()->GetClassLinker()->ResolveType(type_idx, method);
186 *slow_path = true;
187 if (klass == nullptr) { // Error
188 DCHECK(Thread::Current()->IsExceptionPending());
189 return nullptr; // Failure
190 }
191 CHECK(klass->IsArrayClass()) << PrettyClass(klass);
192 }
193 if (kAccessCheck) {
194 mirror::Class* referrer = method->GetDeclaringClass();
195 if (UNLIKELY(!referrer->CanAccess(klass))) {
196 ThrowIllegalAccessErrorClass(referrer, klass);
197 *slow_path = true;
198 return nullptr; // Failure
199 }
200 }
201 return klass;
202}
203
204// Given the context of a calling Method, use its DexCache to resolve a type to an array Class. If
205// it cannot be resolved, throw an error. If it can, use it to create an array.
206// When verification/compiler hasn't been able to verify access, optionally perform an access
207// check.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700208template <bool kAccessCheck, bool kInstrumented>
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -0700209ALWAYS_INLINE
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800210inline mirror::Array* AllocArrayFromCode(uint32_t type_idx,
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800211 int32_t component_count,
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800212 mirror::ArtMethod* method,
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800213 Thread* self,
214 gc::AllocatorType allocator_type) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700215 bool slow_path = false;
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800216 mirror::Class* klass = CheckArrayAlloc<kAccessCheck>(type_idx, component_count, method,
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700217 &slow_path);
218 if (UNLIKELY(slow_path)) {
219 if (klass == nullptr) {
220 return nullptr;
221 }
222 gc::Heap* heap = Runtime::Current()->GetHeap();
223 return mirror::Array::Alloc<kInstrumented>(self, klass, component_count,
Hiroshi Yamauchif0edfc32014-09-25 11:46:46 -0700224 klass->GetComponentSizeShift(),
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700225 heap->GetCurrentAllocator());
226 }
227 return mirror::Array::Alloc<kInstrumented>(self, klass, component_count,
Hiroshi Yamauchif0edfc32014-09-25 11:46:46 -0700228 klass->GetComponentSizeShift(), allocator_type);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700229}
230
231template <bool kAccessCheck, bool kInstrumented>
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -0700232ALWAYS_INLINE
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800233inline mirror::Array* AllocArrayFromCodeResolved(mirror::Class* klass,
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800234 int32_t component_count,
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800235 mirror::ArtMethod* method,
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800236 Thread* self,
237 gc::AllocatorType allocator_type) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700238 DCHECK(klass != nullptr);
239 if (UNLIKELY(component_count < 0)) {
240 ThrowNegativeArraySizeException(component_count);
241 return nullptr; // Failure
242 }
243 if (kAccessCheck) {
244 mirror::Class* referrer = method->GetDeclaringClass();
245 if (UNLIKELY(!referrer->CanAccess(klass))) {
246 ThrowIllegalAccessErrorClass(referrer, klass);
247 return nullptr; // Failure
248 }
249 }
250 // No need to retry a slow-path allocation as the above code won't cause a GC or thread
251 // suspension.
252 return mirror::Array::Alloc<kInstrumented>(self, klass, component_count,
Hiroshi Yamauchif0edfc32014-09-25 11:46:46 -0700253 klass->GetComponentSizeShift(), allocator_type);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700254}
255
256template<FindFieldType type, bool access_check>
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800257inline mirror::ArtField* FindFieldFromCode(uint32_t field_idx, mirror::ArtMethod* referrer,
258 Thread* self, size_t expected_size) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700259 bool is_primitive;
260 bool is_set;
261 bool is_static;
262 switch (type) {
263 case InstanceObjectRead: is_primitive = false; is_set = false; is_static = false; break;
264 case InstanceObjectWrite: is_primitive = false; is_set = true; is_static = false; break;
265 case InstancePrimitiveRead: is_primitive = true; is_set = false; is_static = false; break;
266 case InstancePrimitiveWrite: is_primitive = true; is_set = true; is_static = false; break;
267 case StaticObjectRead: is_primitive = false; is_set = false; is_static = true; break;
268 case StaticObjectWrite: is_primitive = false; is_set = true; is_static = true; break;
269 case StaticPrimitiveRead: is_primitive = true; is_set = false; is_static = true; break;
270 case StaticPrimitiveWrite: // Keep GCC happy by having a default handler, fall-through.
271 default: is_primitive = true; is_set = true; is_static = true; break;
272 }
273 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
274 mirror::ArtField* resolved_field = class_linker->ResolveField(field_idx, referrer, is_static);
275 if (UNLIKELY(resolved_field == nullptr)) {
276 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
277 return nullptr; // Failure.
278 }
279 mirror::Class* fields_class = resolved_field->GetDeclaringClass();
280 if (access_check) {
281 if (UNLIKELY(resolved_field->IsStatic() != is_static)) {
282 ThrowIncompatibleClassChangeErrorField(resolved_field, is_static, referrer);
283 return nullptr;
284 }
285 mirror::Class* referring_class = referrer->GetDeclaringClass();
286 if (UNLIKELY(!referring_class->CheckResolvedFieldAccess(fields_class, resolved_field,
287 field_idx))) {
288 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
289 return nullptr; // Failure.
290 }
291 if (UNLIKELY(is_set && resolved_field->IsFinal() && (fields_class != referring_class))) {
292 ThrowIllegalAccessErrorFinalField(referrer, resolved_field);
293 return nullptr; // Failure.
294 } else {
295 if (UNLIKELY(resolved_field->IsPrimitiveType() != is_primitive ||
296 resolved_field->FieldSize() != expected_size)) {
297 ThrowLocation throw_location = self->GetCurrentLocationForThrow();
298 DCHECK(throw_location.GetMethod() == referrer);
299 self->ThrowNewExceptionF(throw_location, "Ljava/lang/NoSuchFieldError;",
300 "Attempted read of %zd-bit %s on field '%s'",
301 expected_size * (32 / sizeof(int32_t)),
302 is_primitive ? "primitive" : "non-primitive",
303 PrettyField(resolved_field, true).c_str());
304 return nullptr; // Failure.
305 }
306 }
307 }
308 if (!is_static) {
309 // instance fields must be being accessed on an initialized class
310 return resolved_field;
311 } else {
312 // If the class is initialized we're done.
313 if (LIKELY(fields_class->IsInitialized())) {
314 return resolved_field;
315 } else {
316 StackHandleScope<1> hs(self);
317 Handle<mirror::Class> h_class(hs.NewHandle(fields_class));
Ian Rogers7b078e82014-09-10 14:44:24 -0700318 if (LIKELY(class_linker->EnsureInitialized(self, h_class, true, true))) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700319 // Otherwise let's ensure the class is initialized before resolving the field.
320 return resolved_field;
321 }
322 DCHECK(self->IsExceptionPending()); // Throw exception and unwind
323 return nullptr; // Failure.
324 }
325 }
326}
327
328// Explicit template declarations of FindFieldFromCode for all field access types.
329#define EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \
330template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE \
331mirror::ArtField* FindFieldFromCode<_type, _access_check>(uint32_t field_idx, \
332 mirror::ArtMethod* referrer, \
333 Thread* self, size_t expected_size) \
334
335#define EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \
336 EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, false); \
337 EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, true)
338
339EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectRead);
340EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectWrite);
341EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveRead);
342EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveWrite);
343EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectRead);
344EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectWrite);
345EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveRead);
346EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveWrite);
347
348#undef EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL
349#undef EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL
350
351template<InvokeType type, bool access_check>
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800352inline mirror::ArtMethod* FindMethodFromCode(uint32_t method_idx,
353 mirror::Object** this_object,
354 mirror::ArtMethod** referrer, Thread* self) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700355 ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700356 mirror::ArtMethod* resolved_method = class_linker->GetResolvedMethod(method_idx, *referrer);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700357 if (resolved_method == nullptr) {
358 StackHandleScope<1> hs(self);
359 mirror::Object* null_this = nullptr;
360 HandleWrapper<mirror::Object> h_this(
361 hs.NewHandleWrapper(type == kStatic ? &null_this : this_object));
362 resolved_method = class_linker->ResolveMethod(self, method_idx, referrer, type);
363 }
364 if (UNLIKELY(resolved_method == nullptr)) {
365 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
366 return nullptr; // Failure.
367 } else if (UNLIKELY(*this_object == nullptr && type != kStatic)) {
368 // Maintain interpreter-like semantics where NullPointerException is thrown
369 // after potential NoSuchMethodError from class linker.
370 ThrowLocation throw_location = self->GetCurrentLocationForThrow();
371 DCHECK_EQ(*referrer, throw_location.GetMethod());
372 ThrowNullPointerExceptionForMethodAccess(throw_location, method_idx, type);
373 return nullptr; // Failure.
374 } else if (access_check) {
375 // Incompatible class change should have been handled in resolve method.
376 if (UNLIKELY(resolved_method->CheckIncompatibleClassChange(type))) {
377 ThrowIncompatibleClassChangeError(type, resolved_method->GetInvokeType(), resolved_method,
378 *referrer);
379 return nullptr; // Failure.
380 }
381 mirror::Class* methods_class = resolved_method->GetDeclaringClass();
382 mirror::Class* referring_class = (*referrer)->GetDeclaringClass();
383 bool can_access_resolved_method =
384 referring_class->CheckResolvedMethodAccess<type>(methods_class, resolved_method,
385 method_idx);
386 if (UNLIKELY(!can_access_resolved_method)) {
387 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
388 return nullptr; // Failure.
389 }
390 }
391 switch (type) {
392 case kStatic:
393 case kDirect:
394 return resolved_method;
395 case kVirtual: {
Mingyao Yang2cdbad72014-07-16 10:44:41 -0700396 mirror::Class* klass = (*this_object)->GetClass();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700397 uint16_t vtable_index = resolved_method->GetMethodIndex();
398 if (access_check &&
Mingyao Yang2cdbad72014-07-16 10:44:41 -0700399 (!klass->HasVTable() ||
400 vtable_index >= static_cast<uint32_t>(klass->GetVTableLength()))) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700401 // Behavior to agree with that of the verifier.
402 ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(),
403 resolved_method->GetName(), resolved_method->GetSignature());
404 return nullptr; // Failure.
405 }
Mingyao Yang2cdbad72014-07-16 10:44:41 -0700406 DCHECK(klass->HasVTable()) << PrettyClass(klass);
407 return klass->GetVTableEntry(vtable_index);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700408 }
409 case kSuper: {
410 mirror::Class* super_class = (*referrer)->GetDeclaringClass()->GetSuperClass();
411 uint16_t vtable_index = resolved_method->GetMethodIndex();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700412 if (access_check) {
413 // Check existence of super class.
Mingyao Yang2cdbad72014-07-16 10:44:41 -0700414 if (super_class == nullptr || !super_class->HasVTable() ||
415 vtable_index >= static_cast<uint32_t>(super_class->GetVTableLength())) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700416 // Behavior to agree with that of the verifier.
417 ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(),
418 resolved_method->GetName(), resolved_method->GetSignature());
419 return nullptr; // Failure.
420 }
421 } else {
422 // Super class must exist.
423 DCHECK(super_class != nullptr);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700424 }
Mingyao Yang2cdbad72014-07-16 10:44:41 -0700425 DCHECK(super_class->HasVTable());
426 return super_class->GetVTableEntry(vtable_index);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700427 }
428 case kInterface: {
429 uint32_t imt_index = resolved_method->GetDexMethodIndex() % mirror::Class::kImtSize;
430 mirror::ArtMethod* imt_method = (*this_object)->GetClass()->GetEmbeddedImTableEntry(imt_index);
Mathieu Chartier2d2621a2014-10-23 16:48:06 -0700431 if (!imt_method->IsImtConflictMethod() && !imt_method->IsImtUnimplementedMethod()) {
432 if (kIsDebugBuild) {
433 mirror::Class* klass = (*this_object)->GetClass();
434 mirror::ArtMethod* method = klass->FindVirtualMethodForInterface(resolved_method);
435 CHECK_EQ(imt_method, method) << PrettyMethod(resolved_method) << " / " <<
436 PrettyMethod(imt_method) << " / " << PrettyMethod(method) << " / " <<
437 PrettyClass(klass);
438 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700439 return imt_method;
440 } else {
441 mirror::ArtMethod* interface_method =
442 (*this_object)->GetClass()->FindVirtualMethodForInterface(resolved_method);
443 if (UNLIKELY(interface_method == nullptr)) {
444 ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(resolved_method,
445 *this_object, *referrer);
446 return nullptr; // Failure.
447 }
448 return interface_method;
449 }
450 }
451 default:
452 LOG(FATAL) << "Unknown invoke type " << type;
453 return nullptr; // Failure.
454 }
455}
456
457// Explicit template declarations of FindMethodFromCode for all invoke types.
458#define EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \
459 template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE \
460 mirror::ArtMethod* FindMethodFromCode<_type, _access_check>(uint32_t method_idx, \
461 mirror::Object** this_object, \
462 mirror::ArtMethod** referrer, \
463 Thread* self)
464#define EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \
465 EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, false); \
466 EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, true)
467
468EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kStatic);
469EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kDirect);
470EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kVirtual);
471EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kSuper);
472EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kInterface);
473
474#undef EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL
475#undef EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL
476
477// Fast path field resolution that can't initialize classes or throw exceptions.
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800478inline mirror::ArtField* FindFieldFast(uint32_t field_idx,
479 mirror::ArtMethod* referrer,
480 FindFieldType type, size_t expected_size) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700481 mirror::ArtField* resolved_field =
482 referrer->GetDeclaringClass()->GetDexCache()->GetResolvedField(field_idx);
483 if (UNLIKELY(resolved_field == nullptr)) {
484 return nullptr;
485 }
486 // Check for incompatible class change.
487 bool is_primitive;
488 bool is_set;
489 bool is_static;
490 switch (type) {
491 case InstanceObjectRead: is_primitive = false; is_set = false; is_static = false; break;
492 case InstanceObjectWrite: is_primitive = false; is_set = true; is_static = false; break;
493 case InstancePrimitiveRead: is_primitive = true; is_set = false; is_static = false; break;
494 case InstancePrimitiveWrite: is_primitive = true; is_set = true; is_static = false; break;
495 case StaticObjectRead: is_primitive = false; is_set = false; is_static = true; break;
496 case StaticObjectWrite: is_primitive = false; is_set = true; is_static = true; break;
497 case StaticPrimitiveRead: is_primitive = true; is_set = false; is_static = true; break;
498 case StaticPrimitiveWrite: is_primitive = true; is_set = true; is_static = true; break;
499 default:
Ian Rogers2c4257b2014-10-24 14:20:06 -0700500 LOG(FATAL) << "UNREACHABLE";
501 UNREACHABLE();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700502 }
503 if (UNLIKELY(resolved_field->IsStatic() != is_static)) {
504 // Incompatible class change.
505 return nullptr;
506 }
507 mirror::Class* fields_class = resolved_field->GetDeclaringClass();
508 if (is_static) {
509 // Check class is initialized else fail so that we can contend to initialize the class with
510 // other threads that may be racing to do this.
511 if (UNLIKELY(!fields_class->IsInitialized())) {
512 return nullptr;
513 }
514 }
515 mirror::Class* referring_class = referrer->GetDeclaringClass();
516 if (UNLIKELY(!referring_class->CanAccess(fields_class) ||
517 !referring_class->CanAccessMember(fields_class,
518 resolved_field->GetAccessFlags()) ||
519 (is_set && resolved_field->IsFinal() && (fields_class != referring_class)))) {
520 // Illegal access.
521 return nullptr;
522 }
523 if (UNLIKELY(resolved_field->IsPrimitiveType() != is_primitive ||
524 resolved_field->FieldSize() != expected_size)) {
525 return nullptr;
526 }
527 return resolved_field;
528}
529
530// Fast path method resolution that can't throw exceptions.
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800531inline mirror::ArtMethod* FindMethodFast(uint32_t method_idx,
532 mirror::Object* this_object,
533 mirror::ArtMethod* referrer,
534 bool access_check, InvokeType type) {
Jeff Hao207a37d2014-10-29 17:24:25 -0700535 if (UNLIKELY(this_object == NULL && type != kStatic)) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700536 return NULL;
537 }
538 mirror::ArtMethod* resolved_method =
539 referrer->GetDeclaringClass()->GetDexCache()->GetResolvedMethod(method_idx);
540 if (UNLIKELY(resolved_method == NULL)) {
541 return NULL;
542 }
543 if (access_check) {
544 // Check for incompatible class change errors and access.
545 bool icce = resolved_method->CheckIncompatibleClassChange(type);
546 if (UNLIKELY(icce)) {
547 return NULL;
548 }
549 mirror::Class* methods_class = resolved_method->GetDeclaringClass();
550 mirror::Class* referring_class = referrer->GetDeclaringClass();
551 if (UNLIKELY(!referring_class->CanAccess(methods_class) ||
552 !referring_class->CanAccessMember(methods_class,
553 resolved_method->GetAccessFlags()))) {
554 // Potential illegal access, may need to refine the method's class.
555 return NULL;
556 }
557 }
558 if (type == kInterface) { // Most common form of slow path dispatch.
559 return this_object->GetClass()->FindVirtualMethodForInterface(resolved_method);
Jeff Hao207a37d2014-10-29 17:24:25 -0700560 } else if (type == kStatic || type == kDirect) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700561 return resolved_method;
562 } else if (type == kSuper) {
Mingyao Yang2cdbad72014-07-16 10:44:41 -0700563 return referrer->GetDeclaringClass()->GetSuperClass()
564 ->GetVTableEntry(resolved_method->GetMethodIndex());
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700565 } else {
566 DCHECK(type == kVirtual);
Mingyao Yang2cdbad72014-07-16 10:44:41 -0700567 return this_object->GetClass()->GetVTableEntry(resolved_method->GetMethodIndex());
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700568 }
569}
570
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800571inline mirror::Class* ResolveVerifyAndClinit(uint32_t type_idx,
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700572 mirror::ArtMethod* referrer,
573 Thread* self, bool can_run_clinit,
Ian Rogerse5877a12014-07-16 12:06:35 -0700574 bool verify_access) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700575 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
576 mirror::Class* klass = class_linker->ResolveType(type_idx, referrer);
577 if (UNLIKELY(klass == nullptr)) {
578 CHECK(self->IsExceptionPending());
579 return nullptr; // Failure - Indicate to caller to deliver exception
580 }
581 // Perform access check if necessary.
582 mirror::Class* referring_class = referrer->GetDeclaringClass();
583 if (verify_access && UNLIKELY(!referring_class->CanAccess(klass))) {
584 ThrowIllegalAccessErrorClass(referring_class, klass);
585 return nullptr; // Failure - Indicate to caller to deliver exception
586 }
587 // If we're just implementing const-class, we shouldn't call <clinit>.
588 if (!can_run_clinit) {
589 return klass;
590 }
591 // If we are the <clinit> of this class, just return our storage.
592 //
593 // Do not set the DexCache InitializedStaticStorage, since that implies <clinit> has finished
594 // running.
595 if (klass == referring_class && referrer->IsConstructor() && referrer->IsStatic()) {
596 return klass;
597 }
598 StackHandleScope<1> hs(self);
599 Handle<mirror::Class> h_class(hs.NewHandle(klass));
Ian Rogers7b078e82014-09-10 14:44:24 -0700600 if (!class_linker->EnsureInitialized(self, h_class, true, true)) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700601 CHECK(self->IsExceptionPending());
602 return nullptr; // Failure - Indicate to caller to deliver exception
603 }
604 return h_class.Get();
605}
606
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800607inline mirror::String* ResolveStringFromCode(mirror::ArtMethod* referrer,
608 uint32_t string_idx) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700609 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
610 return class_linker->ResolveString(string_idx, referrer);
611}
612
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800613inline void UnlockJniSynchronizedMethod(jobject locked, Thread* self) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700614 // Save any pending exception over monitor exit call.
615 mirror::Throwable* saved_exception = NULL;
616 ThrowLocation saved_throw_location;
617 bool is_exception_reported = self->IsExceptionReportedToInstrumentation();
618 if (UNLIKELY(self->IsExceptionPending())) {
619 saved_exception = self->GetException(&saved_throw_location);
620 self->ClearException();
621 }
622 // Decode locked object and unlock, before popping local references.
623 self->DecodeJObject(locked)->MonitorExit(self);
624 if (UNLIKELY(self->IsExceptionPending())) {
625 LOG(FATAL) << "Synchronized JNI code returning with an exception:\n"
626 << saved_exception->Dump()
627 << "\nEncountered second exception during implicit MonitorExit:\n"
628 << self->GetException(NULL)->Dump();
629 }
630 // Restore pending exception.
631 if (saved_exception != NULL) {
632 self->SetException(saved_throw_location, saved_exception);
633 self->SetExceptionReportedToInstrumentation(is_exception_reported);
634 }
635}
636
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700637template <typename INT_TYPE, typename FLOAT_TYPE>
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800638inline INT_TYPE art_float_to_integral(FLOAT_TYPE f) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700639 const INT_TYPE kMaxInt = static_cast<INT_TYPE>(std::numeric_limits<INT_TYPE>::max());
640 const INT_TYPE kMinInt = static_cast<INT_TYPE>(std::numeric_limits<INT_TYPE>::min());
641 const FLOAT_TYPE kMaxIntAsFloat = static_cast<FLOAT_TYPE>(kMaxInt);
642 const FLOAT_TYPE kMinIntAsFloat = static_cast<FLOAT_TYPE>(kMinInt);
643 if (LIKELY(f > kMinIntAsFloat)) {
644 if (LIKELY(f < kMaxIntAsFloat)) {
645 return static_cast<INT_TYPE>(f);
646 } else {
647 return kMaxInt;
648 }
649 } else {
650 return (f != f) ? 0 : kMinInt; // f != f implies NaN
651 }
652}
653
654} // namespace art
655
656#endif // ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_