blob: 116261b32b80530209f9230fc56114b8365e720b [file] [log] [blame]
Mingyao Yang98d1cc82014-05-15 17:02:16 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_
18#define ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_
19
20#include "entrypoint_utils.h"
21
Mathieu Chartiere401d142015-04-22 13:56:20 -070022#include "art_method.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -070023#include "class_linker-inl.h"
24#include "common_throws.h"
25#include "dex_file.h"
Vladimir Marko5ea536a2015-04-20 20:11:30 +010026#include "entrypoints/quick/callee_save_frame.h"
27#include "handle_scope-inl.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -070028#include "indirect_reference_table.h"
29#include "invoke_type.h"
30#include "jni_internal.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -070031#include "mirror/array.h"
32#include "mirror/class-inl.h"
33#include "mirror/object-inl.h"
34#include "mirror/throwable.h"
Vladimir Marko5ea536a2015-04-20 20:11:30 +010035#include "nth_caller_visitor.h"
36#include "runtime.h"
Nicolas Geoffray6bc43742015-10-12 18:11:10 +010037#include "stack_map.h"
Mingyao Yang98d1cc82014-05-15 17:02:16 -070038#include "thread.h"
39
40namespace art {
41
Mathieu Chartier45bf2502016-03-31 11:07:09 -070042template <bool kResolve = true>
Mathieu Chartiere401d142015-04-22 13:56:20 -070043inline ArtMethod* GetResolvedMethod(ArtMethod* outer_method,
Nicolas Geoffray32c9ea52015-06-12 14:52:33 +010044 const InlineInfo& inline_info,
45 uint8_t inlining_depth)
Mathieu Chartier90443472015-07-16 20:32:27 -070046 SHARED_REQUIRES(Locks::mutator_lock_) {
Nicolas Geoffray32c9ea52015-06-12 14:52:33 +010047 uint32_t method_index = inline_info.GetMethodIndexAtDepth(inlining_depth);
48 InvokeType invoke_type = static_cast<InvokeType>(
49 inline_info.GetInvokeTypeAtDepth(inlining_depth));
Mathieu Chartiere401d142015-04-22 13:56:20 -070050 ArtMethod* caller = outer_method->GetDexCacheResolvedMethod(method_index, sizeof(void*));
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +010051 if (!caller->IsRuntimeMethod()) {
52 return caller;
53 }
Mathieu Chartier45bf2502016-03-31 11:07:09 -070054 if (!kResolve) {
55 return nullptr;
56 }
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +010057
58 // The method in the dex cache can be the runtime method responsible for invoking
59 // the stub that will then update the dex cache. Therefore, we need to do the
60 // resolution ourselves.
Nicolas Geoffray3976e5e2015-06-15 08:58:03 +010061
Nicolas Geoffray32c9ea52015-06-12 14:52:33 +010062 // We first find the class loader of our caller. If it is the outer method, we can directly
63 // use its class loader. Otherwise, we also need to resolve our caller.
Mathieu Chartiere401d142015-04-22 13:56:20 -070064 StackHandleScope<2> hs(Thread::Current());
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +010065 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Nicolas Geoffray32c9ea52015-06-12 14:52:33 +010066 MutableHandle<mirror::ClassLoader> class_loader(hs.NewHandle<mirror::Class>(nullptr));
Mathieu Chartiere401d142015-04-22 13:56:20 -070067 Handle<mirror::DexCache> dex_cache(hs.NewHandle(outer_method->GetDexCache()));
Nicolas Geoffray32c9ea52015-06-12 14:52:33 +010068 if (inlining_depth == 0) {
69 class_loader.Assign(outer_method->GetClassLoader());
70 } else {
Mathieu Chartier45bf2502016-03-31 11:07:09 -070071 caller = GetResolvedMethod<kResolve>(outer_method, inline_info, inlining_depth - 1);
Nicolas Geoffray32c9ea52015-06-12 14:52:33 +010072 class_loader.Assign(caller->GetClassLoader());
73 }
74
Andreas Gampe42ef8ab2015-12-03 17:27:32 -080075 return class_linker->ResolveMethod<ClassLinker::kNoICCECheckForCache>(
Mathieu Chartiere401d142015-04-22 13:56:20 -070076 *outer_method->GetDexFile(), method_index, dex_cache, class_loader, nullptr, invoke_type);
Nicolas Geoffrayd23eeef2015-05-18 22:31:29 +010077}
78
Mathieu Chartiere401d142015-04-22 13:56:20 -070079inline ArtMethod* GetCalleeSaveMethodCaller(Thread* self, Runtime::CalleeSaveType type)
Mathieu Chartier90443472015-07-16 20:32:27 -070080 SHARED_REQUIRES(Locks::mutator_lock_) {
Nicolas Geoffray7ea6a172015-05-19 18:58:54 +010081 return GetCalleeSaveMethodCaller(
82 self->GetManagedStack()->GetTopQuickFrame(), type, true /* do_caller_check */);
83}
84
Mingyao Yang98d1cc82014-05-15 17:02:16 -070085template <const bool kAccessCheck>
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -070086ALWAYS_INLINE
Andreas Gampe9f612ff2014-11-24 13:42:22 -080087inline mirror::Class* CheckObjectAlloc(uint32_t type_idx,
Mathieu Chartiere401d142015-04-22 13:56:20 -070088 ArtMethod* method,
Andreas Gampe9f612ff2014-11-24 13:42:22 -080089 Thread* self, bool* slow_path) {
Vladimir Marko05792b92015-08-03 11:56:49 +010090 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
91 size_t pointer_size = class_linker->GetImagePointerSize();
92 mirror::Class* klass = method->GetDexCacheResolvedType<false>(type_idx, pointer_size);
Mathieu Chartier2cebb242015-04-21 16:50:40 -070093 if (UNLIKELY(klass == nullptr)) {
Vladimir Marko05792b92015-08-03 11:56:49 +010094 klass = class_linker->ResolveType(type_idx, method);
Mingyao Yang98d1cc82014-05-15 17:02:16 -070095 *slow_path = true;
Mathieu Chartier2cebb242015-04-21 16:50:40 -070096 if (klass == nullptr) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -070097 DCHECK(self->IsExceptionPending());
98 return nullptr; // Failure
Mathieu Chartier524507a2014-08-27 15:28:28 -070099 } else {
100 DCHECK(!self->IsExceptionPending());
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700101 }
102 }
103 if (kAccessCheck) {
104 if (UNLIKELY(!klass->IsInstantiable())) {
Nicolas Geoffray0aa50ce2015-03-10 11:03:29 +0000105 self->ThrowNewException("Ljava/lang/InstantiationError;", PrettyDescriptor(klass).c_str());
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700106 *slow_path = true;
107 return nullptr; // Failure
108 }
109 mirror::Class* referrer = method->GetDeclaringClass();
110 if (UNLIKELY(!referrer->CanAccess(klass))) {
111 ThrowIllegalAccessErrorClass(referrer, klass);
112 *slow_path = true;
113 return nullptr; // Failure
114 }
115 }
116 if (UNLIKELY(!klass->IsInitialized())) {
117 StackHandleScope<1> hs(self);
118 Handle<mirror::Class> h_klass(hs.NewHandle(klass));
119 // EnsureInitialized (the class initializer) might cause a GC.
120 // may cause us to suspend meaning that another thread may try to
121 // change the allocator while we are stuck in the entrypoints of
122 // an old allocator. Also, the class initialization may fail. To
123 // handle these cases we mark the slow path boolean as true so
124 // that the caller knows to check the allocator type to see if it
125 // has changed and to null-check the return value in case the
126 // initialization fails.
127 *slow_path = true;
Ian Rogers7b078e82014-09-10 14:44:24 -0700128 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_klass, true, true)) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700129 DCHECK(self->IsExceptionPending());
130 return nullptr; // Failure
Mathieu Chartier524507a2014-08-27 15:28:28 -0700131 } else {
132 DCHECK(!self->IsExceptionPending());
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700133 }
134 return h_klass.Get();
135 }
136 return klass;
137}
138
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -0700139ALWAYS_INLINE
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800140inline mirror::Class* CheckClassInitializedForObjectAlloc(mirror::Class* klass,
141 Thread* self,
142 bool* slow_path) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700143 if (UNLIKELY(!klass->IsInitialized())) {
144 StackHandleScope<1> hs(self);
145 Handle<mirror::Class> h_class(hs.NewHandle(klass));
146 // EnsureInitialized (the class initializer) might cause a GC.
147 // may cause us to suspend meaning that another thread may try to
148 // change the allocator while we are stuck in the entrypoints of
149 // an old allocator. Also, the class initialization may fail. To
150 // handle these cases we mark the slow path boolean as true so
151 // that the caller knows to check the allocator type to see if it
152 // has changed and to null-check the return value in case the
153 // initialization fails.
154 *slow_path = true;
Ian Rogers7b078e82014-09-10 14:44:24 -0700155 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_class, true, true)) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700156 DCHECK(self->IsExceptionPending());
157 return nullptr; // Failure
158 }
159 return h_class.Get();
160 }
161 return klass;
162}
163
164// Given the context of a calling Method, use its DexCache to resolve a type to a Class. If it
165// cannot be resolved, throw an error. If it can, use it to create an instance.
166// When verification/compiler hasn't been able to verify access, optionally perform an access
167// check.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700168template <bool kAccessCheck, bool kInstrumented>
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -0700169ALWAYS_INLINE
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800170inline mirror::Object* AllocObjectFromCode(uint32_t type_idx,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700171 ArtMethod* method,
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800172 Thread* self,
173 gc::AllocatorType allocator_type) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700174 bool slow_path = false;
175 mirror::Class* klass = CheckObjectAlloc<kAccessCheck>(type_idx, method, self, &slow_path);
176 if (UNLIKELY(slow_path)) {
177 if (klass == nullptr) {
178 return nullptr;
179 }
Mathieu Chartier14b0a5d2016-03-11 17:22:23 -0800180 // CheckObjectAlloc can cause thread suspension which means we may now be instrumented.
181 return klass->Alloc</*kInstrumented*/true>(
182 self,
183 Runtime::Current()->GetHeap()->GetCurrentAllocator());
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700184 }
185 DCHECK(klass != nullptr);
186 return klass->Alloc<kInstrumented>(self, allocator_type);
187}
188
189// Given the context of a calling Method and a resolved class, create an instance.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700190template <bool kInstrumented>
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -0700191ALWAYS_INLINE
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800192inline mirror::Object* AllocObjectFromCodeResolved(mirror::Class* klass,
193 Thread* self,
194 gc::AllocatorType allocator_type) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700195 DCHECK(klass != nullptr);
196 bool slow_path = false;
197 klass = CheckClassInitializedForObjectAlloc(klass, self, &slow_path);
198 if (UNLIKELY(slow_path)) {
199 if (klass == nullptr) {
200 return nullptr;
201 }
202 gc::Heap* heap = Runtime::Current()->GetHeap();
Roland Levillain91d65e02016-01-19 15:59:16 +0000203 // Pass in false since the object cannot be finalizable.
Mathieu Chartier14b0a5d2016-03-11 17:22:23 -0800204 // CheckClassInitializedForObjectAlloc can cause thread suspension which means we may now be
205 // instrumented.
206 return klass->Alloc</*kInstrumented*/true, false>(self, heap->GetCurrentAllocator());
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700207 }
Roland Levillain91d65e02016-01-19 15:59:16 +0000208 // Pass in false since the object cannot be finalizable.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700209 return klass->Alloc<kInstrumented, false>(self, allocator_type);
210}
211
212// Given the context of a calling Method and an initialized class, create an instance.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700213template <bool kInstrumented>
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -0700214ALWAYS_INLINE
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800215inline mirror::Object* AllocObjectFromCodeInitialized(mirror::Class* klass,
216 Thread* self,
217 gc::AllocatorType allocator_type) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700218 DCHECK(klass != nullptr);
Roland Levillain91d65e02016-01-19 15:59:16 +0000219 // Pass in false since the object cannot be finalizable.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700220 return klass->Alloc<kInstrumented, false>(self, allocator_type);
221}
222
223
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700224template <bool kAccessCheck>
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -0700225ALWAYS_INLINE
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800226inline mirror::Class* CheckArrayAlloc(uint32_t type_idx,
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800227 int32_t component_count,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700228 ArtMethod* method,
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800229 bool* slow_path) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700230 if (UNLIKELY(component_count < 0)) {
231 ThrowNegativeArraySizeException(component_count);
232 *slow_path = true;
233 return nullptr; // Failure
234 }
Vladimir Marko05792b92015-08-03 11:56:49 +0100235 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
236 size_t pointer_size = class_linker->GetImagePointerSize();
237 mirror::Class* klass = method->GetDexCacheResolvedType<false>(type_idx, pointer_size);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700238 if (UNLIKELY(klass == nullptr)) { // Not in dex cache so try to resolve
Vladimir Marko05792b92015-08-03 11:56:49 +0100239 klass = class_linker->ResolveType(type_idx, method);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700240 *slow_path = true;
241 if (klass == nullptr) { // Error
242 DCHECK(Thread::Current()->IsExceptionPending());
243 return nullptr; // Failure
244 }
245 CHECK(klass->IsArrayClass()) << PrettyClass(klass);
246 }
247 if (kAccessCheck) {
248 mirror::Class* referrer = method->GetDeclaringClass();
249 if (UNLIKELY(!referrer->CanAccess(klass))) {
250 ThrowIllegalAccessErrorClass(referrer, klass);
251 *slow_path = true;
252 return nullptr; // Failure
253 }
254 }
255 return klass;
256}
257
258// Given the context of a calling Method, use its DexCache to resolve a type to an array Class. If
259// it cannot be resolved, throw an error. If it can, use it to create an array.
260// When verification/compiler hasn't been able to verify access, optionally perform an access
261// check.
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700262template <bool kAccessCheck, bool kInstrumented>
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -0700263ALWAYS_INLINE
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800264inline mirror::Array* AllocArrayFromCode(uint32_t type_idx,
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800265 int32_t component_count,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700266 ArtMethod* method,
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800267 Thread* self,
268 gc::AllocatorType allocator_type) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700269 bool slow_path = false;
Andreas Gampe1cc7dba2014-12-17 18:43:01 -0800270 mirror::Class* klass = CheckArrayAlloc<kAccessCheck>(type_idx, component_count, method,
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700271 &slow_path);
272 if (UNLIKELY(slow_path)) {
273 if (klass == nullptr) {
274 return nullptr;
275 }
276 gc::Heap* heap = Runtime::Current()->GetHeap();
Mathieu Chartier14b0a5d2016-03-11 17:22:23 -0800277 // CheckArrayAlloc can cause thread suspension which means we may now be instrumented.
278 return mirror::Array::Alloc</*kInstrumented*/true>(self,
279 klass,
280 component_count,
281 klass->GetComponentSizeShift(),
282 heap->GetCurrentAllocator());
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700283 }
284 return mirror::Array::Alloc<kInstrumented>(self, klass, component_count,
Hiroshi Yamauchif0edfc32014-09-25 11:46:46 -0700285 klass->GetComponentSizeShift(), allocator_type);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700286}
287
288template <bool kAccessCheck, bool kInstrumented>
Hiroshi Yamauchieb1e9292014-08-06 12:41:15 -0700289ALWAYS_INLINE
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800290inline mirror::Array* AllocArrayFromCodeResolved(mirror::Class* klass,
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800291 int32_t component_count,
Mathieu Chartiere401d142015-04-22 13:56:20 -0700292 ArtMethod* method,
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800293 Thread* self,
294 gc::AllocatorType allocator_type) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700295 DCHECK(klass != nullptr);
296 if (UNLIKELY(component_count < 0)) {
297 ThrowNegativeArraySizeException(component_count);
298 return nullptr; // Failure
299 }
300 if (kAccessCheck) {
301 mirror::Class* referrer = method->GetDeclaringClass();
302 if (UNLIKELY(!referrer->CanAccess(klass))) {
303 ThrowIllegalAccessErrorClass(referrer, klass);
304 return nullptr; // Failure
305 }
306 }
307 // No need to retry a slow-path allocation as the above code won't cause a GC or thread
308 // suspension.
309 return mirror::Array::Alloc<kInstrumented>(self, klass, component_count,
Hiroshi Yamauchif0edfc32014-09-25 11:46:46 -0700310 klass->GetComponentSizeShift(), allocator_type);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700311}
312
313template<FindFieldType type, bool access_check>
Mathieu Chartierbf369182016-02-04 18:13:32 -0800314inline ArtField* FindFieldFromCode(uint32_t field_idx,
315 ArtMethod* referrer,
316 Thread* self,
317 size_t expected_size) REQUIRES(!Roles::uninterruptible_) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700318 bool is_primitive;
319 bool is_set;
320 bool is_static;
321 switch (type) {
322 case InstanceObjectRead: is_primitive = false; is_set = false; is_static = false; break;
323 case InstanceObjectWrite: is_primitive = false; is_set = true; is_static = false; break;
324 case InstancePrimitiveRead: is_primitive = true; is_set = false; is_static = false; break;
325 case InstancePrimitiveWrite: is_primitive = true; is_set = true; is_static = false; break;
326 case StaticObjectRead: is_primitive = false; is_set = false; is_static = true; break;
327 case StaticObjectWrite: is_primitive = false; is_set = true; is_static = true; break;
328 case StaticPrimitiveRead: is_primitive = true; is_set = false; is_static = true; break;
329 case StaticPrimitiveWrite: // Keep GCC happy by having a default handler, fall-through.
330 default: is_primitive = true; is_set = true; is_static = true; break;
331 }
332 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
Igor Murashkinf1b4c412016-02-01 17:40:19 -0800333
334 ArtField* resolved_field;
335 if (access_check) {
336 // Slow path: According to JLS 13.4.8, a linkage error may occur if a compile-time
337 // qualifying type of a field and the resolved run-time qualifying type of a field differed
338 // in their static-ness.
339 //
340 // In particular, don't assume the dex instruction already correctly knows if the
341 // real field is static or not. The resolution must not be aware of this.
342 ArtMethod* method = referrer->GetInterfaceMethodIfProxy(sizeof(void*));
343
344 StackHandleScope<2> hs(self);
345 Handle<mirror::DexCache> h_dex_cache(hs.NewHandle(method->GetDexCache()));
346 Handle<mirror::ClassLoader> h_class_loader(hs.NewHandle(method->GetClassLoader()));
347
348 resolved_field = class_linker->ResolveFieldJLS(*method->GetDexFile(),
349 field_idx,
350 h_dex_cache,
351 h_class_loader);
352 } else {
353 // Fast path: Verifier already would've called ResolveFieldJLS and we wouldn't
354 // be executing here if there was a static/non-static mismatch.
355 resolved_field = class_linker->ResolveField(field_idx, referrer, is_static);
356 }
357
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700358 if (UNLIKELY(resolved_field == nullptr)) {
359 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
360 return nullptr; // Failure.
361 }
362 mirror::Class* fields_class = resolved_field->GetDeclaringClass();
363 if (access_check) {
364 if (UNLIKELY(resolved_field->IsStatic() != is_static)) {
365 ThrowIncompatibleClassChangeErrorField(resolved_field, is_static, referrer);
366 return nullptr;
367 }
368 mirror::Class* referring_class = referrer->GetDeclaringClass();
369 if (UNLIKELY(!referring_class->CheckResolvedFieldAccess(fields_class, resolved_field,
370 field_idx))) {
371 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
372 return nullptr; // Failure.
373 }
374 if (UNLIKELY(is_set && resolved_field->IsFinal() && (fields_class != referring_class))) {
375 ThrowIllegalAccessErrorFinalField(referrer, resolved_field);
376 return nullptr; // Failure.
377 } else {
378 if (UNLIKELY(resolved_field->IsPrimitiveType() != is_primitive ||
379 resolved_field->FieldSize() != expected_size)) {
Nicolas Geoffray0aa50ce2015-03-10 11:03:29 +0000380 self->ThrowNewExceptionF("Ljava/lang/NoSuchFieldError;",
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700381 "Attempted read of %zd-bit %s on field '%s'",
382 expected_size * (32 / sizeof(int32_t)),
383 is_primitive ? "primitive" : "non-primitive",
384 PrettyField(resolved_field, true).c_str());
385 return nullptr; // Failure.
386 }
387 }
388 }
389 if (!is_static) {
390 // instance fields must be being accessed on an initialized class
391 return resolved_field;
392 } else {
393 // If the class is initialized we're done.
394 if (LIKELY(fields_class->IsInitialized())) {
395 return resolved_field;
396 } else {
397 StackHandleScope<1> hs(self);
398 Handle<mirror::Class> h_class(hs.NewHandle(fields_class));
Ian Rogers7b078e82014-09-10 14:44:24 -0700399 if (LIKELY(class_linker->EnsureInitialized(self, h_class, true, true))) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700400 // Otherwise let's ensure the class is initialized before resolving the field.
401 return resolved_field;
402 }
403 DCHECK(self->IsExceptionPending()); // Throw exception and unwind
404 return nullptr; // Failure.
405 }
406 }
407}
408
409// Explicit template declarations of FindFieldFromCode for all field access types.
410#define EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \
Mathieu Chartier90443472015-07-16 20:32:27 -0700411template SHARED_REQUIRES(Locks::mutator_lock_) ALWAYS_INLINE \
Mathieu Chartierc7853442015-03-27 14:35:38 -0700412ArtField* FindFieldFromCode<_type, _access_check>(uint32_t field_idx, \
Mathieu Chartiere401d142015-04-22 13:56:20 -0700413 ArtMethod* referrer, \
414 Thread* self, size_t expected_size) \
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700415
416#define EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \
417 EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, false); \
418 EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, true)
419
420EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectRead);
421EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectWrite);
422EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveRead);
423EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveWrite);
424EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectRead);
425EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectWrite);
426EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveRead);
427EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveWrite);
428
429#undef EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL
430#undef EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL
431
432template<InvokeType type, bool access_check>
Mathieu Chartiere401d142015-04-22 13:56:20 -0700433inline ArtMethod* FindMethodFromCode(uint32_t method_idx, mirror::Object** this_object,
Andreas Gampe3a357142015-08-07 17:20:11 -0700434 ArtMethod* referrer, Thread* self) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700435 ClassLinker* const class_linker = Runtime::Current()->GetClassLinker();
Andreas Gampe3a357142015-08-07 17:20:11 -0700436 ArtMethod* resolved_method = class_linker->GetResolvedMethod(method_idx, referrer);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700437 if (resolved_method == nullptr) {
438 StackHandleScope<1> hs(self);
439 mirror::Object* null_this = nullptr;
440 HandleWrapper<mirror::Object> h_this(
441 hs.NewHandleWrapper(type == kStatic ? &null_this : this_object));
Andreas Gampe42ef8ab2015-12-03 17:27:32 -0800442 constexpr ClassLinker::ResolveMode resolve_mode =
443 access_check ? ClassLinker::kForceICCECheck
444 : ClassLinker::kNoICCECheckForCache;
445 resolved_method = class_linker->ResolveMethod<resolve_mode>(self, method_idx, referrer, type);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700446 }
447 if (UNLIKELY(resolved_method == nullptr)) {
448 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
449 return nullptr; // Failure.
450 } else if (UNLIKELY(*this_object == nullptr && type != kStatic)) {
David Brazdil65902e82016-01-15 09:35:13 +0000451 if (UNLIKELY(resolved_method->GetDeclaringClass()->IsStringClass() &&
452 resolved_method->IsConstructor())) {
453 // Hack for String init:
454 //
455 // We assume that the input of String.<init> in verified code is always
456 // an unitialized reference. If it is a null constant, it must have been
457 // optimized out by the compiler. Do not throw NullPointerException.
458 } else {
459 // Maintain interpreter-like semantics where NullPointerException is thrown
460 // after potential NoSuchMethodError from class linker.
461 ThrowNullPointerExceptionForMethodAccess(method_idx, type);
462 return nullptr; // Failure.
463 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700464 } else if (access_check) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700465 mirror::Class* methods_class = resolved_method->GetDeclaringClass();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700466 bool can_access_resolved_method =
Alex Light705ad492015-09-21 11:36:30 -0700467 referrer->GetDeclaringClass()->CheckResolvedMethodAccess<type>(methods_class,
468 resolved_method,
469 method_idx);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700470 if (UNLIKELY(!can_access_resolved_method)) {
471 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
472 return nullptr; // Failure.
473 }
Nicolas Geoffray470d54f2015-10-02 17:14:53 +0100474 // Incompatible class change should have been handled in resolve method.
475 if (UNLIKELY(resolved_method->CheckIncompatibleClassChange(type))) {
476 ThrowIncompatibleClassChangeError(type, resolved_method->GetInvokeType(), resolved_method,
477 referrer);
478 return nullptr; // Failure.
479 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700480 }
481 switch (type) {
482 case kStatic:
483 case kDirect:
484 return resolved_method;
485 case kVirtual: {
Mingyao Yang2cdbad72014-07-16 10:44:41 -0700486 mirror::Class* klass = (*this_object)->GetClass();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700487 uint16_t vtable_index = resolved_method->GetMethodIndex();
488 if (access_check &&
Mingyao Yang2cdbad72014-07-16 10:44:41 -0700489 (!klass->HasVTable() ||
490 vtable_index >= static_cast<uint32_t>(klass->GetVTableLength()))) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700491 // Behavior to agree with that of the verifier.
492 ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(),
493 resolved_method->GetName(), resolved_method->GetSignature());
494 return nullptr; // Failure.
495 }
Mingyao Yang2cdbad72014-07-16 10:44:41 -0700496 DCHECK(klass->HasVTable()) << PrettyClass(klass);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700497 return klass->GetVTableEntry(vtable_index, class_linker->GetImagePointerSize());
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700498 }
499 case kSuper: {
Alex Light705ad492015-09-21 11:36:30 -0700500 // TODO This lookup is quite slow.
501 // NB This is actually quite tricky to do any other way. We cannot use GetDeclaringClass since
502 // that will actually not be what we want in some cases where there are miranda methods or
503 // defaults. What we actually need is a GetContainingClass that says which classes virtuals
504 // this method is coming from.
505 mirror::Class* referring_class = referrer->GetDeclaringClass();
506 uint16_t method_type_idx = referring_class->GetDexFile().GetMethodId(method_idx).class_idx_;
507 mirror::Class* method_reference_class = class_linker->ResolveType(method_type_idx, referrer);
508 if (UNLIKELY(method_reference_class == nullptr)) {
509 // Bad type idx.
510 CHECK(self->IsExceptionPending());
511 return nullptr;
512 } else if (!method_reference_class->IsInterface()) {
513 // It is not an interface.
514 mirror::Class* super_class = referring_class->GetSuperClass();
515 uint16_t vtable_index = resolved_method->GetMethodIndex();
516 if (access_check) {
517 // Check existence of super class.
518 if (super_class == nullptr || !super_class->HasVTable() ||
519 vtable_index >= static_cast<uint32_t>(super_class->GetVTableLength())) {
520 // Behavior to agree with that of the verifier.
521 ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(),
522 resolved_method->GetName(), resolved_method->GetSignature());
523 return nullptr; // Failure.
524 }
525 }
526 DCHECK(super_class != nullptr);
527 DCHECK(super_class->HasVTable());
528 return super_class->GetVTableEntry(vtable_index, class_linker->GetImagePointerSize());
529 } else {
530 // It is an interface.
531 if (access_check) {
532 if (!method_reference_class->IsAssignableFrom((*this_object)->GetClass())) {
533 ThrowIncompatibleClassChangeErrorClassForInterfaceSuper(resolved_method,
534 method_reference_class,
535 *this_object,
536 referrer);
537 return nullptr; // Failure.
538 }
539 }
540 // TODO We can do better than this for a (compiled) fastpath.
541 ArtMethod* result = method_reference_class->FindVirtualMethodForInterfaceSuper(
542 resolved_method, class_linker->GetImagePointerSize());
543 // Throw an NSME if nullptr;
544 if (result == nullptr) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700545 ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(),
546 resolved_method->GetName(), resolved_method->GetSignature());
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700547 }
Alex Light705ad492015-09-21 11:36:30 -0700548 return result;
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700549 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700550 }
551 case kInterface: {
552 uint32_t imt_index = resolved_method->GetDexMethodIndex() % mirror::Class::kImtSize;
Mathieu Chartiere401d142015-04-22 13:56:20 -0700553 ArtMethod* imt_method = (*this_object)->GetClass()->GetEmbeddedImTableEntry(
554 imt_index, class_linker->GetImagePointerSize());
Nicolas Geoffray796d6302016-03-13 22:22:31 +0000555 if (!imt_method->IsRuntimeMethod()) {
Mathieu Chartier2d2621a2014-10-23 16:48:06 -0700556 if (kIsDebugBuild) {
557 mirror::Class* klass = (*this_object)->GetClass();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700558 ArtMethod* method = klass->FindVirtualMethodForInterface(
559 resolved_method, class_linker->GetImagePointerSize());
Mathieu Chartier2d2621a2014-10-23 16:48:06 -0700560 CHECK_EQ(imt_method, method) << PrettyMethod(resolved_method) << " / " <<
561 PrettyMethod(imt_method) << " / " << PrettyMethod(method) << " / " <<
562 PrettyClass(klass);
563 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700564 return imt_method;
565 } else {
Mathieu Chartiere401d142015-04-22 13:56:20 -0700566 ArtMethod* interface_method = (*this_object)->GetClass()->FindVirtualMethodForInterface(
567 resolved_method, class_linker->GetImagePointerSize());
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700568 if (UNLIKELY(interface_method == nullptr)) {
569 ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(resolved_method,
Andreas Gampe3a357142015-08-07 17:20:11 -0700570 *this_object, referrer);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700571 return nullptr; // Failure.
572 }
573 return interface_method;
574 }
575 }
576 default:
577 LOG(FATAL) << "Unknown invoke type " << type;
578 return nullptr; // Failure.
579 }
580}
581
582// Explicit template declarations of FindMethodFromCode for all invoke types.
583#define EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \
Mathieu Chartier90443472015-07-16 20:32:27 -0700584 template SHARED_REQUIRES(Locks::mutator_lock_) ALWAYS_INLINE \
Mathieu Chartiere401d142015-04-22 13:56:20 -0700585 ArtMethod* FindMethodFromCode<_type, _access_check>(uint32_t method_idx, \
586 mirror::Object** this_object, \
Andreas Gampe3a357142015-08-07 17:20:11 -0700587 ArtMethod* referrer, \
Mathieu Chartiere401d142015-04-22 13:56:20 -0700588 Thread* self)
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700589#define EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \
590 EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, false); \
591 EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, true)
592
593EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kStatic);
594EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kDirect);
595EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kVirtual);
596EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kSuper);
597EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kInterface);
598
599#undef EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL
600#undef EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL
601
602// Fast path field resolution that can't initialize classes or throw exceptions.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700603inline ArtField* FindFieldFast(uint32_t field_idx, ArtMethod* referrer, FindFieldType type,
604 size_t expected_size) {
Mathieu Chartierc7853442015-03-27 14:35:38 -0700605 ArtField* resolved_field =
606 referrer->GetDeclaringClass()->GetDexCache()->GetResolvedField(field_idx, sizeof(void*));
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700607 if (UNLIKELY(resolved_field == nullptr)) {
608 return nullptr;
609 }
610 // Check for incompatible class change.
611 bool is_primitive;
612 bool is_set;
613 bool is_static;
614 switch (type) {
615 case InstanceObjectRead: is_primitive = false; is_set = false; is_static = false; break;
616 case InstanceObjectWrite: is_primitive = false; is_set = true; is_static = false; break;
617 case InstancePrimitiveRead: is_primitive = true; is_set = false; is_static = false; break;
618 case InstancePrimitiveWrite: is_primitive = true; is_set = true; is_static = false; break;
619 case StaticObjectRead: is_primitive = false; is_set = false; is_static = true; break;
620 case StaticObjectWrite: is_primitive = false; is_set = true; is_static = true; break;
621 case StaticPrimitiveRead: is_primitive = true; is_set = false; is_static = true; break;
622 case StaticPrimitiveWrite: is_primitive = true; is_set = true; is_static = true; break;
623 default:
Ian Rogers2c4257b2014-10-24 14:20:06 -0700624 LOG(FATAL) << "UNREACHABLE";
625 UNREACHABLE();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700626 }
627 if (UNLIKELY(resolved_field->IsStatic() != is_static)) {
628 // Incompatible class change.
629 return nullptr;
630 }
631 mirror::Class* fields_class = resolved_field->GetDeclaringClass();
632 if (is_static) {
633 // Check class is initialized else fail so that we can contend to initialize the class with
634 // other threads that may be racing to do this.
635 if (UNLIKELY(!fields_class->IsInitialized())) {
636 return nullptr;
637 }
638 }
639 mirror::Class* referring_class = referrer->GetDeclaringClass();
640 if (UNLIKELY(!referring_class->CanAccess(fields_class) ||
Mathieu Chartiere401d142015-04-22 13:56:20 -0700641 !referring_class->CanAccessMember(fields_class, resolved_field->GetAccessFlags()) ||
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700642 (is_set && resolved_field->IsFinal() && (fields_class != referring_class)))) {
643 // Illegal access.
644 return nullptr;
645 }
646 if (UNLIKELY(resolved_field->IsPrimitiveType() != is_primitive ||
647 resolved_field->FieldSize() != expected_size)) {
648 return nullptr;
649 }
650 return resolved_field;
651}
652
653// Fast path method resolution that can't throw exceptions.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700654inline ArtMethod* FindMethodFast(uint32_t method_idx, mirror::Object* this_object,
655 ArtMethod* referrer, bool access_check, InvokeType type) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700656 if (UNLIKELY(this_object == nullptr && type != kStatic)) {
657 return nullptr;
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700658 }
Alex Light705ad492015-09-21 11:36:30 -0700659 mirror::Class* referring_class = referrer->GetDeclaringClass();
Mathieu Chartiere401d142015-04-22 13:56:20 -0700660 ArtMethod* resolved_method =
Alex Light705ad492015-09-21 11:36:30 -0700661 referring_class->GetDexCache()->GetResolvedMethod(method_idx, sizeof(void*));
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700662 if (UNLIKELY(resolved_method == nullptr)) {
663 return nullptr;
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700664 }
665 if (access_check) {
666 // Check for incompatible class change errors and access.
667 bool icce = resolved_method->CheckIncompatibleClassChange(type);
668 if (UNLIKELY(icce)) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700669 return nullptr;
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700670 }
671 mirror::Class* methods_class = resolved_method->GetDeclaringClass();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700672 if (UNLIKELY(!referring_class->CanAccess(methods_class) ||
673 !referring_class->CanAccessMember(methods_class,
674 resolved_method->GetAccessFlags()))) {
675 // Potential illegal access, may need to refine the method's class.
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700676 return nullptr;
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700677 }
678 }
679 if (type == kInterface) { // Most common form of slow path dispatch.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700680 return this_object->GetClass()->FindVirtualMethodForInterface(resolved_method, sizeof(void*));
Jeff Hao207a37d2014-10-29 17:24:25 -0700681 } else if (type == kStatic || type == kDirect) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700682 return resolved_method;
683 } else if (type == kSuper) {
Alex Light705ad492015-09-21 11:36:30 -0700684 // TODO This lookup is rather slow.
685 uint16_t method_type_idx = referring_class->GetDexFile().GetMethodId(method_idx).class_idx_;
686 mirror::Class* method_reference_class =
687 referring_class->GetDexCache()->GetResolvedType(method_type_idx);
688 if (method_reference_class == nullptr) {
689 // Need to do full type resolution...
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000690 return nullptr;
Alex Light705ad492015-09-21 11:36:30 -0700691 } else if (!method_reference_class->IsInterface()) {
692 // It is not an interface.
693 mirror::Class* super_class = referrer->GetDeclaringClass()->GetSuperClass();
694 if (resolved_method->GetMethodIndex() >= super_class->GetVTableLength()) {
695 // The super class does not have the method.
696 return nullptr;
697 }
698 return super_class->GetVTableEntry(resolved_method->GetMethodIndex(), sizeof(void*));
699 } else {
700 return method_reference_class->FindVirtualMethodForInterfaceSuper(
701 resolved_method, sizeof(void*));
Nicolas Geoffraye5234232015-12-02 09:06:11 +0000702 }
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700703 } else {
704 DCHECK(type == kVirtual);
Mathieu Chartiere401d142015-04-22 13:56:20 -0700705 return this_object->GetClass()->GetVTableEntry(
706 resolved_method->GetMethodIndex(), sizeof(void*));
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700707 }
708}
709
Mathieu Chartiere401d142015-04-22 13:56:20 -0700710inline mirror::Class* ResolveVerifyAndClinit(uint32_t type_idx, ArtMethod* referrer, Thread* self,
711 bool can_run_clinit, bool verify_access) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700712 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
713 mirror::Class* klass = class_linker->ResolveType(type_idx, referrer);
714 if (UNLIKELY(klass == nullptr)) {
715 CHECK(self->IsExceptionPending());
716 return nullptr; // Failure - Indicate to caller to deliver exception
717 }
718 // Perform access check if necessary.
719 mirror::Class* referring_class = referrer->GetDeclaringClass();
720 if (verify_access && UNLIKELY(!referring_class->CanAccess(klass))) {
721 ThrowIllegalAccessErrorClass(referring_class, klass);
722 return nullptr; // Failure - Indicate to caller to deliver exception
723 }
724 // If we're just implementing const-class, we shouldn't call <clinit>.
725 if (!can_run_clinit) {
726 return klass;
727 }
728 // If we are the <clinit> of this class, just return our storage.
729 //
730 // Do not set the DexCache InitializedStaticStorage, since that implies <clinit> has finished
731 // running.
732 if (klass == referring_class && referrer->IsConstructor() && referrer->IsStatic()) {
733 return klass;
734 }
735 StackHandleScope<1> hs(self);
736 Handle<mirror::Class> h_class(hs.NewHandle(klass));
Ian Rogers7b078e82014-09-10 14:44:24 -0700737 if (!class_linker->EnsureInitialized(self, h_class, true, true)) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700738 CHECK(self->IsExceptionPending());
739 return nullptr; // Failure - Indicate to caller to deliver exception
740 }
741 return h_class.Get();
742}
743
Mathieu Chartiere401d142015-04-22 13:56:20 -0700744inline mirror::String* ResolveStringFromCode(ArtMethod* referrer, uint32_t string_idx) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700745 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
746 return class_linker->ResolveString(string_idx, referrer);
747}
748
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800749inline void UnlockJniSynchronizedMethod(jobject locked, Thread* self) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700750 // Save any pending exception over monitor exit call.
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700751 mirror::Throwable* saved_exception = nullptr;
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700752 if (UNLIKELY(self->IsExceptionPending())) {
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000753 saved_exception = self->GetException();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700754 self->ClearException();
755 }
756 // Decode locked object and unlock, before popping local references.
757 self->DecodeJObject(locked)->MonitorExit(self);
758 if (UNLIKELY(self->IsExceptionPending())) {
759 LOG(FATAL) << "Synchronized JNI code returning with an exception:\n"
760 << saved_exception->Dump()
761 << "\nEncountered second exception during implicit MonitorExit:\n"
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000762 << self->GetException()->Dump();
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700763 }
764 // Restore pending exception.
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700765 if (saved_exception != nullptr) {
Nicolas Geoffray14691c52015-03-05 10:40:17 +0000766 self->SetException(saved_exception);
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700767 }
768}
769
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700770template <typename INT_TYPE, typename FLOAT_TYPE>
Andreas Gampe9f612ff2014-11-24 13:42:22 -0800771inline INT_TYPE art_float_to_integral(FLOAT_TYPE f) {
Mingyao Yang98d1cc82014-05-15 17:02:16 -0700772 const INT_TYPE kMaxInt = static_cast<INT_TYPE>(std::numeric_limits<INT_TYPE>::max());
773 const INT_TYPE kMinInt = static_cast<INT_TYPE>(std::numeric_limits<INT_TYPE>::min());
774 const FLOAT_TYPE kMaxIntAsFloat = static_cast<FLOAT_TYPE>(kMaxInt);
775 const FLOAT_TYPE kMinIntAsFloat = static_cast<FLOAT_TYPE>(kMinInt);
776 if (LIKELY(f > kMinIntAsFloat)) {
777 if (LIKELY(f < kMaxIntAsFloat)) {
778 return static_cast<INT_TYPE>(f);
779 } else {
780 return kMaxInt;
781 }
782 } else {
783 return (f != f) ? 0 : kMinInt; // f != f implies NaN
784 }
785}
786
787} // namespace art
788
789#endif // ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_