blob: 526fb8d11f5354aa7a4fbcb4396ebb8816a10465 [file] [log] [blame]
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_
18#define ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_
19
20#include "entrypoint_utils.h"
21
22#include "class_linker-inl.h"
23#include "common_throws.h"
24#include "dex_file.h"
Vladimir Marko5ea536a2015-04-20 20:11:30 +010025#include "entrypoints/quick/callee_save_frame.h"
26#include "handle_scope-inl.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -070027#include "indirect_reference_table.h"
28#include "invoke_type.h"
29#include "jni_internal.h"
30#include "mirror/art_method.h"
31#include "mirror/array.h"
32#include "mirror/class-inl.h"
33#include "mirror/object-inl.h"
34#include "mirror/throwable.h"
Vladimir Marko5ea536a2015-04-20 20:11:30 +010035#include "nth_caller_visitor.h"
36#include "runtime.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -070037#include "thread.h"
38
39namespace art {
40
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +010041inline mirror::ArtMethod* GetResolvedMethod(mirror::ArtMethod* outer_method,
42 uint32_t method_index,
43 InvokeType invoke_type)
44 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
45 mirror::ArtMethod* caller = outer_method->GetDexCacheResolvedMethod(method_index);
46 if (!caller->IsRuntimeMethod()) {
47 return caller;
48 }
49
50 // The method in the dex cache can be the runtime method responsible for invoking
51 // the stub that will then update the dex cache. Therefore, we need to do the
52 // resolution ourselves.
53
54 StackHandleScope<3> hs(Thread::Current());
55 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
56 Handle<mirror::ArtMethod> outer(hs.NewHandle(outer_method));
57 Handle<mirror::ClassLoader> class_loader(hs.NewHandle(outer->GetClassLoader()));
58 Handle<mirror::DexCache> dex_cache(hs.NewHandle(outer->GetDexCache()));
59 Handle<mirror::ArtMethod> referrer;
60 return class_linker->ResolveMethod(
61 *outer->GetDexFile(), method_index, dex_cache, class_loader, referrer, invoke_type);
62}
63
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +010064inline mirror::ArtMethod* GetCalleeSaveMethodCaller(StackReference<mirror::ArtMethod>* sp,
65 Runtime::CalleeSaveType type,
66 bool do_caller_check = false)
Vladimir Marko5ea536a2015-04-20 20:11:30 +010067 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +010068 DCHECK_EQ(sp->AsMirrorPtr(), Runtime::Current()->GetCalleeSaveMethod(type));
Vladimir Marko5ea536a2015-04-20 20:11:30 +010069
70 const size_t callee_frame_size = GetCalleeSaveFrameSize(kRuntimeISA, type);
71 auto* caller_sp = reinterpret_cast<StackReference<mirror::ArtMethod>*>(
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +010072 reinterpret_cast<uintptr_t>(sp) + callee_frame_size);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +010073 mirror::ArtMethod* outer_method = caller_sp->AsMirrorPtr();
74 mirror::ArtMethod* caller = outer_method;
75
76 if ((outer_method != nullptr) && outer_method->IsOptimized(sizeof(void*))) {
77 const size_t callee_return_pc_offset = GetCalleeSaveReturnPcOffset(kRuntimeISA, type);
78 uintptr_t caller_pc = *reinterpret_cast<uintptr_t*>(
79 (reinterpret_cast<uint8_t*>(sp) + callee_return_pc_offset));
80 uintptr_t native_pc_offset = outer_method->NativeQuickPcOffset(caller_pc);
81 CodeInfo code_info = outer_method->GetOptimizedCodeInfo();
82 StackMap stack_map = code_info.GetStackMapForNativePcOffset(native_pc_offset);
83 DCHECK(stack_map.IsValid());
84 if (stack_map.HasInlineInfo(code_info)) {
85 InlineInfo inline_info = code_info.GetInlineInfoOf(stack_map);
86 uint32_t method_index = inline_info.GetMethodIndexAtDepth(inline_info.GetDepth() - 1);
87 InvokeType invoke_type = static_cast<InvokeType>(
88 inline_info.GetInvokeTypeAtDepth(inline_info.GetDepth() - 1));
89 caller = GetResolvedMethod(outer_method, method_index, invoke_type);
90 }
91 }
Vladimir Marko5ea536a2015-04-20 20:11:30 +010092
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +010093 if (kIsDebugBuild && do_caller_check) {
94 // Note that do_caller_check is optional, as this method can be called by
95 // stubs, and tests without a proper call stack.
96 NthCallerVisitor visitor(Thread::Current(), 1, true);
Vladimir Marko5ea536a2015-04-20 20:11:30 +010097 visitor.WalkStack();
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +010098 CHECK_EQ(caller, visitor.caller);
Vladimir Marko5ea536a2015-04-20 20:11:30 +010099 }
100
101 return caller;
102}
103
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +0100104inline mirror::ArtMethod* GetCalleeSaveMethodCaller(Thread* self, Runtime::CalleeSaveType type)
105 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
106 return GetCalleeSaveMethodCaller(
107 self->GetManagedStack()->GetTopQuickFrame(), type, true /* do_caller_check */);
108}
109
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700110template <const bool kAccessCheck>
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -0700111ALWAYS_INLINE
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800112inline mirror::Class* CheckObjectAlloc(uint32_t type_idx,
113 mirror::ArtMethod* method,
114 Thread* self, bool* slow_path) {
Andreas Gampe58a5af82014-07-31 16:23:49 -0700115 mirror::Class* klass = method->GetDexCacheResolvedType<false>(type_idx);
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700116 if (UNLIKELY(klass == nullptr)) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700117 klass = Runtime::Current()->GetClassLinker()->ResolveType(type_idx, method);
118 *slow_path = true;
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700119 if (klass == nullptr) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700120 DCHECK(self->IsExceptionPending());
121 return nullptr; // Failure
Mathieu Chartier524507a2014-08-27 15:28:28 -0700122 } else {
123 DCHECK(!self->IsExceptionPending());
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700124 }
125 }
126 if (kAccessCheck) {
127 if (UNLIKELY(!klass->IsInstantiable())) {
Nicolas Geoffray0aa50ce2015-03-10 11:03:29 +0000128 self->ThrowNewException("Ljava/lang/InstantiationError;", PrettyDescriptor(klass).c_str());
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700129 *slow_path = true;
130 return nullptr; // Failure
131 }
132 mirror::Class* referrer = method->GetDeclaringClass();
133 if (UNLIKELY(!referrer->CanAccess(klass))) {
134 ThrowIllegalAccessErrorClass(referrer, klass);
135 *slow_path = true;
136 return nullptr; // Failure
137 }
138 }
139 if (UNLIKELY(!klass->IsInitialized())) {
140 StackHandleScope<1> hs(self);
141 Handle<mirror::Class> h_klass(hs.NewHandle(klass));
142 // EnsureInitialized (the class initializer) might cause a GC.
143 // may cause us to suspend meaning that another thread may try to
144 // change the allocator while we are stuck in the entrypoints of
145 // an old allocator. Also, the class initialization may fail. To
146 // handle these cases we mark the slow path boolean as true so
147 // that the caller knows to check the allocator type to see if it
148 // has changed and to null-check the return value in case the
149 // initialization fails.
150 *slow_path = true;
Ian Rogers7b078e82014-09-10 14:44:24 -0700151 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_klass, true, true)) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700152 DCHECK(self->IsExceptionPending());
153 return nullptr; // Failure
Mathieu Chartier524507a2014-08-27 15:28:28 -0700154 } else {
155 DCHECK(!self->IsExceptionPending());
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700156 }
157 return h_klass.Get();
158 }
159 return klass;
160}
161
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -0700162ALWAYS_INLINE
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800163inline mirror::Class* CheckClassInitializedForObjectAlloc(mirror::Class* klass,
164 Thread* self,
165 bool* slow_path) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700166 if (UNLIKELY(!klass->IsInitialized())) {
167 StackHandleScope<1> hs(self);
168 Handle<mirror::Class> h_class(hs.NewHandle(klass));
169 // EnsureInitialized (the class initializer) might cause a GC.
170 // may cause us to suspend meaning that another thread may try to
171 // change the allocator while we are stuck in the entrypoints of
172 // an old allocator. Also, the class initialization may fail. To
173 // handle these cases we mark the slow path boolean as true so
174 // that the caller knows to check the allocator type to see if it
175 // has changed and to null-check the return value in case the
176 // initialization fails.
177 *slow_path = true;
Ian Rogers7b078e82014-09-10 14:44:24 -0700178 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_class, true, true)) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700179 DCHECK(self->IsExceptionPending());
180 return nullptr; // Failure
181 }
182 return h_class.Get();
183 }
184 return klass;
185}
186
187// Given the context of a calling Method, use its DexCache to resolve a type to a Class. If it
188// cannot be resolved, throw an error. If it can, use it to create an instance.
189// When verification/compiler hasn't been able to verify access, optionally perform an access
190// check.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700191template <bool kAccessCheck, bool kInstrumented>
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -0700192ALWAYS_INLINE
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800193inline mirror::Object* AllocObjectFromCode(uint32_t type_idx,
194 mirror::ArtMethod* method,
195 Thread* self,
196 gc::AllocatorType allocator_type) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700197 bool slow_path = false;
198 mirror::Class* klass = CheckObjectAlloc<kAccessCheck>(type_idx, method, self, &slow_path);
199 if (UNLIKELY(slow_path)) {
200 if (klass == nullptr) {
201 return nullptr;
202 }
203 return klass->Alloc<kInstrumented>(self, Runtime::Current()->GetHeap()->GetCurrentAllocator());
204 }
205 DCHECK(klass != nullptr);
206 return klass->Alloc<kInstrumented>(self, allocator_type);
207}
208
209// Given the context of a calling Method and a resolved class, create an instance.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700210template <bool kInstrumented>
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -0700211ALWAYS_INLINE
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800212inline mirror::Object* AllocObjectFromCodeResolved(mirror::Class* klass,
213 Thread* self,
214 gc::AllocatorType allocator_type) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700215 DCHECK(klass != nullptr);
216 bool slow_path = false;
217 klass = CheckClassInitializedForObjectAlloc(klass, self, &slow_path);
218 if (UNLIKELY(slow_path)) {
219 if (klass == nullptr) {
220 return nullptr;
221 }
222 gc::Heap* heap = Runtime::Current()->GetHeap();
223 // Pass in false since the object can not be finalizable.
224 return klass->Alloc<kInstrumented, false>(self, heap->GetCurrentAllocator());
225 }
226 // Pass in false since the object can not be finalizable.
227 return klass->Alloc<kInstrumented, false>(self, allocator_type);
228}
229
230// Given the context of a calling Method and an initialized class, create an instance.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700231template <bool kInstrumented>
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -0700232ALWAYS_INLINE
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800233inline mirror::Object* AllocObjectFromCodeInitialized(mirror::Class* klass,
234 Thread* self,
235 gc::AllocatorType allocator_type) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700236 DCHECK(klass != nullptr);
237 // Pass in false since the object can not be finalizable.
238 return klass->Alloc<kInstrumented, false>(self, allocator_type);
239}
240
241
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700242template <bool kAccessCheck>
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -0700243ALWAYS_INLINE
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800244inline mirror::Class* CheckArrayAlloc(uint32_t type_idx,
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800245 int32_t component_count,
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800246 mirror::ArtMethod* method,
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800247 bool* slow_path) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700248 if (UNLIKELY(component_count < 0)) {
249 ThrowNegativeArraySizeException(component_count);
250 *slow_path = true;
251 return nullptr; // Failure
252 }
Andreas Gampe58a5af82014-07-31 16:23:49 -0700253 mirror::Class* klass = method->GetDexCacheResolvedType<false>(type_idx);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700254 if (UNLIKELY(klass == nullptr)) { // Not in dex cache so try to resolve
255 klass = Runtime::Current()->GetClassLinker()->ResolveType(type_idx, method);
256 *slow_path = true;
257 if (klass == nullptr) { // Error
258 DCHECK(Thread::Current()->IsExceptionPending());
259 return nullptr; // Failure
260 }
261 CHECK(klass->IsArrayClass()) << PrettyClass(klass);
262 }
263 if (kAccessCheck) {
264 mirror::Class* referrer = method->GetDeclaringClass();
265 if (UNLIKELY(!referrer->CanAccess(klass))) {
266 ThrowIllegalAccessErrorClass(referrer, klass);
267 *slow_path = true;
268 return nullptr; // Failure
269 }
270 }
271 return klass;
272}
273
274// Given the context of a calling Method, use its DexCache to resolve a type to an array Class. If
275// it cannot be resolved, throw an error. If it can, use it to create an array.
276// When verification/compiler hasn't been able to verify access, optionally perform an access
277// check.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700278template <bool kAccessCheck, bool kInstrumented>
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -0700279ALWAYS_INLINE
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800280inline mirror::Array* AllocArrayFromCode(uint32_t type_idx,
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800281 int32_t component_count,
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800282 mirror::ArtMethod* method,
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800283 Thread* self,
284 gc::AllocatorType allocator_type) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700285 bool slow_path = false;
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800286 mirror::Class* klass = CheckArrayAlloc<kAccessCheck>(type_idx, component_count, method,
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700287 &slow_path);
288 if (UNLIKELY(slow_path)) {
289 if (klass == nullptr) {
290 return nullptr;
291 }
292 gc::Heap* heap = Runtime::Current()->GetHeap();
293 return mirror::Array::Alloc<kInstrumented>(self, klass, component_count,
Hiroshi Yamauchif0edfc32014-09-25 11:46:46 -0700294 klass->GetComponentSizeShift(),
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700295 heap->GetCurrentAllocator());
296 }
297 return mirror::Array::Alloc<kInstrumented>(self, klass, component_count,
Hiroshi Yamauchif0edfc32014-09-25 11:46:46 -0700298 klass->GetComponentSizeShift(), allocator_type);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700299}
300
301template <bool kAccessCheck, bool kInstrumented>
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -0700302ALWAYS_INLINE
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800303inline mirror::Array* AllocArrayFromCodeResolved(mirror::Class* klass,
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800304 int32_t component_count,
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800305 mirror::ArtMethod* method,
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800306 Thread* self,
307 gc::AllocatorType allocator_type) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700308 DCHECK(klass != nullptr);
309 if (UNLIKELY(component_count < 0)) {
310 ThrowNegativeArraySizeException(component_count);
311 return nullptr; // Failure
312 }
313 if (kAccessCheck) {
314 mirror::Class* referrer = method->GetDeclaringClass();
315 if (UNLIKELY(!referrer->CanAccess(klass))) {
316 ThrowIllegalAccessErrorClass(referrer, klass);
317 return nullptr; // Failure
318 }
319 }
320 // No need to retry a slow-path allocation as the above code won't cause a GC or thread
321 // suspension.
322 return mirror::Array::Alloc<kInstrumented>(self, klass, component_count,
Hiroshi Yamauchif0edfc32014-09-25 11:46:46 -0700323 klass->GetComponentSizeShift(), allocator_type);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700324}
325
326template<FindFieldType type, bool access_check>
Mathieu Chartierc7853442015-03-27 14:35:38 -0700327inline ArtField* FindFieldFromCode(uint32_t field_idx, mirror::ArtMethod* referrer,
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800328 Thread* self, size_t expected_size) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700329 bool is_primitive;
330 bool is_set;
331 bool is_static;
332 switch (type) {
333 case InstanceObjectRead: is_primitive = false; is_set = false; is_static = false; break;
334 case InstanceObjectWrite: is_primitive = false; is_set = true; is_static = false; break;
335 case InstancePrimitiveRead: is_primitive = true; is_set = false; is_static = false; break;
336 case InstancePrimitiveWrite: is_primitive = true; is_set = true; is_static = false; break;
337 case StaticObjectRead: is_primitive = false; is_set = false; is_static = true; break;
338 case StaticObjectWrite: is_primitive = false; is_set = true; is_static = true; break;
339 case StaticPrimitiveRead: is_primitive = true; is_set = false; is_static = true; break;
340 case StaticPrimitiveWrite: // Keep GCC happy by having a default handler, fall-through.
341 default: is_primitive = true; is_set = true; is_static = true; break;
342 }
343 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Mathieu Chartierc7853442015-03-27 14:35:38 -0700344 ArtField* resolved_field = class_linker->ResolveField(field_idx, referrer, is_static);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700345 if (UNLIKELY(resolved_field == nullptr)) {
346 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
347 return nullptr; // Failure.
348 }
349 mirror::Class* fields_class = resolved_field->GetDeclaringClass();
350 if (access_check) {
351 if (UNLIKELY(resolved_field->IsStatic() != is_static)) {
352 ThrowIncompatibleClassChangeErrorField(resolved_field, is_static, referrer);
353 return nullptr;
354 }
355 mirror::Class* referring_class = referrer->GetDeclaringClass();
356 if (UNLIKELY(!referring_class->CheckResolvedFieldAccess(fields_class, resolved_field,
357 field_idx))) {
358 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
359 return nullptr; // Failure.
360 }
361 if (UNLIKELY(is_set && resolved_field->IsFinal() && (fields_class != referring_class))) {
362 ThrowIllegalAccessErrorFinalField(referrer, resolved_field);
363 return nullptr; // Failure.
364 } else {
365 if (UNLIKELY(resolved_field->IsPrimitiveType() != is_primitive ||
366 resolved_field->FieldSize() != expected_size)) {
Nicolas Geoffray0aa50ce2015-03-10 11:03:29 +0000367 self->ThrowNewExceptionF("Ljava/lang/NoSuchFieldError;",
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700368 "Attempted read of %zd-bit %s on field '%s'",
369 expected_size * (32 / sizeof(int32_t)),
370 is_primitive ? "primitive" : "non-primitive",
371 PrettyField(resolved_field, true).c_str());
372 return nullptr; // Failure.
373 }
374 }
375 }
376 if (!is_static) {
377 // instance fields must be being accessed on an initialized class
378 return resolved_field;
379 } else {
380 // If the class is initialized we're done.
381 if (LIKELY(fields_class->IsInitialized())) {
382 return resolved_field;
383 } else {
384 StackHandleScope<1> hs(self);
385 Handle<mirror::Class> h_class(hs.NewHandle(fields_class));
Ian Rogers7b078e82014-09-10 14:44:24 -0700386 if (LIKELY(class_linker->EnsureInitialized(self, h_class, true, true))) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700387 // Otherwise let's ensure the class is initialized before resolving the field.
388 return resolved_field;
389 }
390 DCHECK(self->IsExceptionPending()); // Throw exception and unwind
391 return nullptr; // Failure.
392 }
393 }
394}
395
396// Explicit template declarations of FindFieldFromCode for all field access types.
397#define EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \
398template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE \
Mathieu Chartierc7853442015-03-27 14:35:38 -0700399ArtField* FindFieldFromCode<_type, _access_check>(uint32_t field_idx, \
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700400 mirror::ArtMethod* referrer, \
401 Thread* self, size_t expected_size) \
402
403#define EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \
404 EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, false); \
405 EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, true)
406
407EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectRead);
408EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectWrite);
409EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveRead);
410EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveWrite);
411EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectRead);
412EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectWrite);
413EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveRead);
414EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveWrite);
415
416#undef EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL
417#undef EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL
418
419template<InvokeType type, bool access_check>
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800420inline mirror::ArtMethod* FindMethodFromCode(uint32_t method_idx,
421 mirror::Object** this_object,
422 mirror::ArtMethod** referrer, Thread* self) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700423 ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
Ian Rogers6a3c1fc2014-10-31 00:33:20 -0700424 mirror::ArtMethod* resolved_method = class_linker->GetResolvedMethod(method_idx, *referrer);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700425 if (resolved_method == nullptr) {
426 StackHandleScope<1> hs(self);
427 mirror::Object* null_this = nullptr;
428 HandleWrapper<mirror::Object> h_this(
429 hs.NewHandleWrapper(type == kStatic ? &null_this : this_object));
430 resolved_method = class_linker->ResolveMethod(self, method_idx, referrer, type);
431 }
432 if (UNLIKELY(resolved_method == nullptr)) {
433 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
434 return nullptr; // Failure.
435 } else if (UNLIKELY(*this_object == nullptr && type != kStatic)) {
436 // Maintain interpreter-like semantics where NullPointerException is thrown
437 // after potential NoSuchMethodError from class linker.
Nicolas Geoffray0aa50ce2015-03-10 11:03:29 +0000438 ThrowNullPointerExceptionForMethodAccess(method_idx, type);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700439 return nullptr; // Failure.
440 } else if (access_check) {
441 // Incompatible class change should have been handled in resolve method.
442 if (UNLIKELY(resolved_method->CheckIncompatibleClassChange(type))) {
443 ThrowIncompatibleClassChangeError(type, resolved_method->GetInvokeType(), resolved_method,
444 *referrer);
445 return nullptr; // Failure.
446 }
447 mirror::Class* methods_class = resolved_method->GetDeclaringClass();
448 mirror::Class* referring_class = (*referrer)->GetDeclaringClass();
449 bool can_access_resolved_method =
450 referring_class->CheckResolvedMethodAccess<type>(methods_class, resolved_method,
451 method_idx);
452 if (UNLIKELY(!can_access_resolved_method)) {
453 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
454 return nullptr; // Failure.
455 }
456 }
457 switch (type) {
458 case kStatic:
459 case kDirect:
460 return resolved_method;
461 case kVirtual: {
Mingyao Yang2cdbad72014-07-16 10:44:41 -0700462 mirror::Class* klass = (*this_object)->GetClass();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700463 uint16_t vtable_index = resolved_method->GetMethodIndex();
464 if (access_check &&
Mingyao Yang2cdbad72014-07-16 10:44:41 -0700465 (!klass->HasVTable() ||
466 vtable_index >= static_cast<uint32_t>(klass->GetVTableLength()))) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700467 // Behavior to agree with that of the verifier.
468 ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(),
469 resolved_method->GetName(), resolved_method->GetSignature());
470 return nullptr; // Failure.
471 }
Mingyao Yang2cdbad72014-07-16 10:44:41 -0700472 DCHECK(klass->HasVTable()) << PrettyClass(klass);
473 return klass->GetVTableEntry(vtable_index);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700474 }
475 case kSuper: {
476 mirror::Class* super_class = (*referrer)->GetDeclaringClass()->GetSuperClass();
477 uint16_t vtable_index = resolved_method->GetMethodIndex();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700478 if (access_check) {
479 // Check existence of super class.
Mingyao Yang2cdbad72014-07-16 10:44:41 -0700480 if (super_class == nullptr || !super_class->HasVTable() ||
481 vtable_index >= static_cast<uint32_t>(super_class->GetVTableLength())) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700482 // Behavior to agree with that of the verifier.
483 ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(),
484 resolved_method->GetName(), resolved_method->GetSignature());
485 return nullptr; // Failure.
486 }
487 } else {
488 // Super class must exist.
489 DCHECK(super_class != nullptr);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700490 }
Mingyao Yang2cdbad72014-07-16 10:44:41 -0700491 DCHECK(super_class->HasVTable());
492 return super_class->GetVTableEntry(vtable_index);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700493 }
494 case kInterface: {
495 uint32_t imt_index = resolved_method->GetDexMethodIndex() % mirror::Class::kImtSize;
496 mirror::ArtMethod* imt_method = (*this_object)->GetClass()->GetEmbeddedImTableEntry(imt_index);
Mathieu Chartier2d2621a2014-10-23 16:48:06 -0700497 if (!imt_method->IsImtConflictMethod() && !imt_method->IsImtUnimplementedMethod()) {
498 if (kIsDebugBuild) {
499 mirror::Class* klass = (*this_object)->GetClass();
500 mirror::ArtMethod* method = klass->FindVirtualMethodForInterface(resolved_method);
501 CHECK_EQ(imt_method, method) << PrettyMethod(resolved_method) << " / " <<
502 PrettyMethod(imt_method) << " / " << PrettyMethod(method) << " / " <<
503 PrettyClass(klass);
504 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700505 return imt_method;
506 } else {
507 mirror::ArtMethod* interface_method =
508 (*this_object)->GetClass()->FindVirtualMethodForInterface(resolved_method);
509 if (UNLIKELY(interface_method == nullptr)) {
510 ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(resolved_method,
511 *this_object, *referrer);
512 return nullptr; // Failure.
513 }
514 return interface_method;
515 }
516 }
517 default:
518 LOG(FATAL) << "Unknown invoke type " << type;
519 return nullptr; // Failure.
520 }
521}
522
523// Explicit template declarations of FindMethodFromCode for all invoke types.
524#define EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \
525 template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE \
526 mirror::ArtMethod* FindMethodFromCode<_type, _access_check>(uint32_t method_idx, \
527 mirror::Object** this_object, \
528 mirror::ArtMethod** referrer, \
529 Thread* self)
530#define EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \
531 EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, false); \
532 EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, true)
533
534EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kStatic);
535EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kDirect);
536EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kVirtual);
537EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kSuper);
538EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kInterface);
539
540#undef EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL
541#undef EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL
542
543// Fast path field resolution that can't initialize classes or throw exceptions.
Mathieu Chartierc7853442015-03-27 14:35:38 -0700544inline ArtField* FindFieldFast(uint32_t field_idx,
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800545 mirror::ArtMethod* referrer,
546 FindFieldType type, size_t expected_size) {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700547 ArtField* resolved_field =
548 referrer->GetDeclaringClass()->GetDexCache()->GetResolvedField(field_idx, sizeof(void*));
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700549 if (UNLIKELY(resolved_field == nullptr)) {
550 return nullptr;
551 }
552 // Check for incompatible class change.
553 bool is_primitive;
554 bool is_set;
555 bool is_static;
556 switch (type) {
557 case InstanceObjectRead: is_primitive = false; is_set = false; is_static = false; break;
558 case InstanceObjectWrite: is_primitive = false; is_set = true; is_static = false; break;
559 case InstancePrimitiveRead: is_primitive = true; is_set = false; is_static = false; break;
560 case InstancePrimitiveWrite: is_primitive = true; is_set = true; is_static = false; break;
561 case StaticObjectRead: is_primitive = false; is_set = false; is_static = true; break;
562 case StaticObjectWrite: is_primitive = false; is_set = true; is_static = true; break;
563 case StaticPrimitiveRead: is_primitive = true; is_set = false; is_static = true; break;
564 case StaticPrimitiveWrite: is_primitive = true; is_set = true; is_static = true; break;
565 default:
Ian Rogers2c4257b2014-10-24 14:20:06 -0700566 LOG(FATAL) << "UNREACHABLE";
567 UNREACHABLE();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700568 }
569 if (UNLIKELY(resolved_field->IsStatic() != is_static)) {
570 // Incompatible class change.
571 return nullptr;
572 }
573 mirror::Class* fields_class = resolved_field->GetDeclaringClass();
574 if (is_static) {
575 // Check class is initialized else fail so that we can contend to initialize the class with
576 // other threads that may be racing to do this.
577 if (UNLIKELY(!fields_class->IsInitialized())) {
578 return nullptr;
579 }
580 }
581 mirror::Class* referring_class = referrer->GetDeclaringClass();
582 if (UNLIKELY(!referring_class->CanAccess(fields_class) ||
583 !referring_class->CanAccessMember(fields_class,
584 resolved_field->GetAccessFlags()) ||
585 (is_set && resolved_field->IsFinal() && (fields_class != referring_class)))) {
586 // Illegal access.
587 return nullptr;
588 }
589 if (UNLIKELY(resolved_field->IsPrimitiveType() != is_primitive ||
590 resolved_field->FieldSize() != expected_size)) {
591 return nullptr;
592 }
593 return resolved_field;
594}
595
596// Fast path method resolution that can't throw exceptions.
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800597inline mirror::ArtMethod* FindMethodFast(uint32_t method_idx,
598 mirror::Object* this_object,
599 mirror::ArtMethod* referrer,
600 bool access_check, InvokeType type) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700601 if (UNLIKELY(this_object == nullptr && type != kStatic)) {
602 return nullptr;
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700603 }
604 mirror::ArtMethod* resolved_method =
605 referrer->GetDeclaringClass()->GetDexCache()->GetResolvedMethod(method_idx);
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700606 if (UNLIKELY(resolved_method == nullptr)) {
607 return nullptr;
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700608 }
609 if (access_check) {
610 // Check for incompatible class change errors and access.
611 bool icce = resolved_method->CheckIncompatibleClassChange(type);
612 if (UNLIKELY(icce)) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700613 return nullptr;
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700614 }
615 mirror::Class* methods_class = resolved_method->GetDeclaringClass();
616 mirror::Class* referring_class = referrer->GetDeclaringClass();
617 if (UNLIKELY(!referring_class->CanAccess(methods_class) ||
618 !referring_class->CanAccessMember(methods_class,
619 resolved_method->GetAccessFlags()))) {
620 // Potential illegal access, may need to refine the method's class.
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700621 return nullptr;
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700622 }
623 }
624 if (type == kInterface) { // Most common form of slow path dispatch.
625 return this_object->GetClass()->FindVirtualMethodForInterface(resolved_method);
Jeff Hao207a37d2014-10-29 17:24:25 -0700626 } else if (type == kStatic || type == kDirect) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700627 return resolved_method;
628 } else if (type == kSuper) {
Mingyao Yang2cdbad72014-07-16 10:44:41 -0700629 return referrer->GetDeclaringClass()->GetSuperClass()
630 ->GetVTableEntry(resolved_method->GetMethodIndex());
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700631 } else {
632 DCHECK(type == kVirtual);
Mingyao Yang2cdbad72014-07-16 10:44:41 -0700633 return this_object->GetClass()->GetVTableEntry(resolved_method->GetMethodIndex());
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700634 }
635}
636
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800637inline mirror::Class* ResolveVerifyAndClinit(uint32_t type_idx,
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700638 mirror::ArtMethod* referrer,
639 Thread* self, bool can_run_clinit,
Ian Rogerse5877a12014-07-16 12:06:35 -0700640 bool verify_access) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700641 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
642 mirror::Class* klass = class_linker->ResolveType(type_idx, referrer);
643 if (UNLIKELY(klass == nullptr)) {
644 CHECK(self->IsExceptionPending());
645 return nullptr; // Failure - Indicate to caller to deliver exception
646 }
647 // Perform access check if necessary.
648 mirror::Class* referring_class = referrer->GetDeclaringClass();
649 if (verify_access && UNLIKELY(!referring_class->CanAccess(klass))) {
650 ThrowIllegalAccessErrorClass(referring_class, klass);
651 return nullptr; // Failure - Indicate to caller to deliver exception
652 }
653 // If we're just implementing const-class, we shouldn't call <clinit>.
654 if (!can_run_clinit) {
655 return klass;
656 }
657 // If we are the <clinit> of this class, just return our storage.
658 //
659 // Do not set the DexCache InitializedStaticStorage, since that implies <clinit> has finished
660 // running.
661 if (klass == referring_class && referrer->IsConstructor() && referrer->IsStatic()) {
662 return klass;
663 }
664 StackHandleScope<1> hs(self);
665 Handle<mirror::Class> h_class(hs.NewHandle(klass));
Ian Rogers7b078e82014-09-10 14:44:24 -0700666 if (!class_linker->EnsureInitialized(self, h_class, true, true)) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700667 CHECK(self->IsExceptionPending());
668 return nullptr; // Failure - Indicate to caller to deliver exception
669 }
670 return h_class.Get();
671}
672
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800673inline mirror::String* ResolveStringFromCode(mirror::ArtMethod* referrer,
674 uint32_t string_idx) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700675 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
676 return class_linker->ResolveString(string_idx, referrer);
677}
678
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800679inline void UnlockJniSynchronizedMethod(jobject locked, Thread* self) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700680 // Save any pending exception over monitor exit call.
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700681 mirror::Throwable* saved_exception = nullptr;
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700682 if (UNLIKELY(self->IsExceptionPending())) {
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000683 saved_exception = self->GetException();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700684 self->ClearException();
685 }
686 // Decode locked object and unlock, before popping local references.
687 self->DecodeJObject(locked)->MonitorExit(self);
688 if (UNLIKELY(self->IsExceptionPending())) {
689 LOG(FATAL) << "Synchronized JNI code returning with an exception:\n"
690 << saved_exception->Dump()
691 << "\nEncountered second exception during implicit MonitorExit:\n"
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000692 << self->GetException()->Dump();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700693 }
694 // Restore pending exception.
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700695 if (saved_exception != nullptr) {
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000696 self->SetException(saved_exception);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700697 }
698}
699
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700700template <typename INT_TYPE, typename FLOAT_TYPE>
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800701inline INT_TYPE art_float_to_integral(FLOAT_TYPE f) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700702 const INT_TYPE kMaxInt = static_cast<INT_TYPE>(std::numeric_limits<INT_TYPE>::max());
703 const INT_TYPE kMinInt = static_cast<INT_TYPE>(std::numeric_limits<INT_TYPE>::min());
704 const FLOAT_TYPE kMaxIntAsFloat = static_cast<FLOAT_TYPE>(kMaxInt);
705 const FLOAT_TYPE kMinIntAsFloat = static_cast<FLOAT_TYPE>(kMinInt);
706 if (LIKELY(f > kMinIntAsFloat)) {
707 if (LIKELY(f < kMaxIntAsFloat)) {
708 return static_cast<INT_TYPE>(f);
709 } else {
710 return kMaxInt;
711 }
712 } else {
713 return (f != f) ? 0 : kMinInt; // f != f implies NaN
714 }
715}
716
717} // namespace art
718
719#endif // ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_