blob: f6e8ca3d643e28e0b7bf268dd29c212dddfcad02 [file] [log] [blame]
Elliott Hughes0f3c5532012-03-30 14:51:51 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
buzbee54330722011-08-23 16:46:55 -070016
Ian Rogers7655f292013-07-29 11:07:13 -070017#ifndef ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_H_
18#define ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_H_
Ian Rogers450dcb52013-09-20 17:36:02 -070019
20#include "base/macros.h"
Shih-wei Liao2d831012011-09-28 22:06:53 -070021#include "class_linker.h"
Ian Rogers87e552d2012-08-31 15:54:48 -070022#include "common_throws.h"
Ian Rogers57b86d42012-03-27 16:05:41 -070023#include "dex_file.h"
Ian Rogers81d425b2012-09-27 16:03:43 -070024#include "indirect_reference_table.h"
Elliott Hughes0f3c5532012-03-30 14:51:51 -070025#include "invoke_type.h"
Ian Rogers81d425b2012-09-27 16:03:43 -070026#include "jni_internal.h"
Brian Carlstromea46f952013-07-30 01:26:50 -070027#include "mirror/art_method.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080028#include "mirror/array.h"
Ian Rogers693ff612013-02-01 10:56:12 -080029#include "mirror/class-inl.h"
Mathieu Chartiercbb2d202013-11-14 17:45:16 -080030#include "mirror/object-inl.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080031#include "mirror/throwable.h"
Mathieu Chartiercbb2d202013-11-14 17:45:16 -080032#include "locks.h"
Ian Rogers450dcb52013-09-20 17:36:02 -070033#include "object_utils.h"
Mathieu Chartiercbb2d202013-11-14 17:45:16 -080034#include "sirt_ref.h"
Ian Rogers57b86d42012-03-27 16:05:41 -070035#include "thread.h"
36
Shih-wei Liao2d831012011-09-28 22:06:53 -070037namespace art {
Ian Rogers848871b2013-08-05 10:56:33 -070038
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080039namespace mirror {
Ian Rogers848871b2013-08-05 10:56:33 -070040 class Class;
Brian Carlstromea46f952013-07-30 01:26:50 -070041 class ArtField;
Ian Rogers848871b2013-08-05 10:56:33 -070042 class Object;
43} // namespace mirror
Ian Rogers57b86d42012-03-27 16:05:41 -070044
Mathieu Chartiercbb2d202013-11-14 17:45:16 -080045// TODO: Fix no thread safety analysis when GCC can handle template specialization.
46template <const bool kAccessCheck>
47ALWAYS_INLINE static inline mirror::Class* CheckObjectAlloc(uint32_t type_idx,
48 mirror::ArtMethod* method,
Mathieu Chartiere6da9af2013-12-16 11:54:42 -080049 Thread* self, bool* slow_path)
Mathieu Chartiercbb2d202013-11-14 17:45:16 -080050 NO_THREAD_SAFETY_ANALYSIS {
Hiroshi Yamauchi3b4c1892013-09-12 21:33:12 -070051 mirror::Class* klass = method->GetDexCacheResolvedTypes()->GetWithoutChecks(type_idx);
Hiroshi Yamauchi3b4c1892013-09-12 21:33:12 -070052 if (UNLIKELY(klass == NULL)) {
Mathieu Chartiercbb2d202013-11-14 17:45:16 -080053 klass = Runtime::Current()->GetClassLinker()->ResolveType(type_idx, method);
Mathieu Chartiere6da9af2013-12-16 11:54:42 -080054 *slow_path = true;
Hiroshi Yamauchi3b4c1892013-09-12 21:33:12 -070055 if (klass == NULL) {
56 DCHECK(self->IsExceptionPending());
Mathieu Chartiercbb2d202013-11-14 17:45:16 -080057 return nullptr; // Failure
Hiroshi Yamauchi3b4c1892013-09-12 21:33:12 -070058 }
59 }
Mathieu Chartiercbb2d202013-11-14 17:45:16 -080060 if (kAccessCheck) {
Hiroshi Yamauchi3b4c1892013-09-12 21:33:12 -070061 if (UNLIKELY(!klass->IsInstantiable())) {
62 ThrowLocation throw_location = self->GetCurrentLocationForThrow();
63 self->ThrowNewException(throw_location, "Ljava/lang/InstantiationError;",
64 PrettyDescriptor(klass).c_str());
Mathieu Chartiere6da9af2013-12-16 11:54:42 -080065 *slow_path = true;
Mathieu Chartiercbb2d202013-11-14 17:45:16 -080066 return nullptr; // Failure
Hiroshi Yamauchi3b4c1892013-09-12 21:33:12 -070067 }
68 mirror::Class* referrer = method->GetDeclaringClass();
69 if (UNLIKELY(!referrer->CanAccess(klass))) {
70 ThrowIllegalAccessErrorClass(referrer, klass);
Mathieu Chartiere6da9af2013-12-16 11:54:42 -080071 *slow_path = true;
Mathieu Chartiercbb2d202013-11-14 17:45:16 -080072 return nullptr; // Failure
Hiroshi Yamauchi3b4c1892013-09-12 21:33:12 -070073 }
74 }
Mathieu Chartiercbb2d202013-11-14 17:45:16 -080075 if (UNLIKELY(!klass->IsInitialized())) {
76 SirtRef<mirror::Class> sirt_klass(self, klass);
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -080077 // EnsureInitialized (the class initializer) might cause a GC.
78 // may cause us to suspend meaning that another thread may try to
79 // change the allocator while we are stuck in the entrypoints of
80 // an old allocator. Also, the class initialization may fail. To
81 // handle these cases we mark the slow path boolean as true so
82 // that the caller knows to check the allocator type to see if it
83 // has changed and to null-check the return value in case the
84 // initialization fails.
85 *slow_path = true;
Mathieu Chartierc528dba2013-11-26 12:00:11 -080086 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(sirt_klass, true, true)) {
Mathieu Chartiercbb2d202013-11-14 17:45:16 -080087 DCHECK(self->IsExceptionPending());
88 return nullptr; // Failure
89 }
90 return sirt_klass.get();
Hiroshi Yamauchi3b4c1892013-09-12 21:33:12 -070091 }
Mathieu Chartiercbb2d202013-11-14 17:45:16 -080092 return klass;
Hiroshi Yamauchi3b4c1892013-09-12 21:33:12 -070093}
94
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -080095// TODO: Fix no thread safety analysis when annotalysis is smarter.
96ALWAYS_INLINE static inline mirror::Class* CheckClassInitializedForObjectAlloc(mirror::Class* klass,
97 Thread* self, bool* slow_path)
98 NO_THREAD_SAFETY_ANALYSIS {
99 if (UNLIKELY(!klass->IsInitialized())) {
100 SirtRef<mirror::Class> sirt_class(self, klass);
101 // EnsureInitialized (the class initializer) might cause a GC.
102 // may cause us to suspend meaning that another thread may try to
103 // change the allocator while we are stuck in the entrypoints of
104 // an old allocator. Also, the class initialization may fail. To
105 // handle these cases we mark the slow path boolean as true so
106 // that the caller knows to check the allocator type to see if it
107 // has changed and to null-check the return value in case the
108 // initialization fails.
109 *slow_path = true;
110 if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(sirt_class, true, true)) {
111 DCHECK(self->IsExceptionPending());
112 return nullptr; // Failure
113 }
114 return sirt_class.get();
115 }
116 return klass;
117}
118
Ian Rogers57b86d42012-03-27 16:05:41 -0700119// Given the context of a calling Method, use its DexCache to resolve a type to a Class. If it
120// cannot be resolved, throw an error. If it can, use it to create an instance.
121// When verification/compiler hasn't been able to verify access, optionally perform an access
122// check.
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800123// TODO: Fix NO_THREAD_SAFETY_ANALYSIS when GCC is smarter.
124template <bool kAccessCheck, bool kInstrumented>
125ALWAYS_INLINE static inline mirror::Object* AllocObjectFromCode(uint32_t type_idx,
126 mirror::ArtMethod* method,
127 Thread* self,
128 gc::AllocatorType allocator_type)
129 NO_THREAD_SAFETY_ANALYSIS {
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800130 bool slow_path = false;
131 mirror::Class* klass = CheckObjectAlloc<kAccessCheck>(type_idx, method, self, &slow_path);
132 if (UNLIKELY(slow_path)) {
133 if (klass == nullptr) {
134 return nullptr;
135 }
136 gc::Heap* heap = Runtime::Current()->GetHeap();
137 return klass->Alloc<kInstrumented>(self, heap->GetCurrentAllocator());
Hiroshi Yamauchi3b4c1892013-09-12 21:33:12 -0700138 }
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800139 return klass->Alloc<kInstrumented>(self, allocator_type);
Hiroshi Yamauchi3b4c1892013-09-12 21:33:12 -0700140}
141
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -0800142// Given the context of a calling Method and a resolved class, create an instance.
143// TODO: Fix NO_THREAD_SAFETY_ANALYSIS when GCC is smarter.
144template <bool kInstrumented>
145ALWAYS_INLINE static inline mirror::Object* AllocObjectFromCodeResolved(mirror::Class* klass,
146 mirror::ArtMethod* method,
147 Thread* self,
148 gc::AllocatorType allocator_type)
149 NO_THREAD_SAFETY_ANALYSIS {
150 DCHECK(klass != nullptr);
151 bool slow_path = false;
152 klass = CheckClassInitializedForObjectAlloc(klass, self, &slow_path);
153 if (UNLIKELY(slow_path)) {
154 if (klass == nullptr) {
155 return nullptr;
156 }
157 gc::Heap* heap = Runtime::Current()->GetHeap();
158 return klass->Alloc<kInstrumented>(self, heap->GetCurrentAllocator());
159 }
160 return klass->Alloc<kInstrumented>(self, allocator_type);
161}
162
163// Given the context of a calling Method and an initialized class, create an instance.
164// TODO: Fix NO_THREAD_SAFETY_ANALYSIS when GCC is smarter.
165template <bool kInstrumented>
166ALWAYS_INLINE static inline mirror::Object* AllocObjectFromCodeInitialized(mirror::Class* klass,
167 mirror::ArtMethod* method,
168 Thread* self,
169 gc::AllocatorType allocator_type)
170 NO_THREAD_SAFETY_ANALYSIS {
171 DCHECK(klass != nullptr);
172 return klass->Alloc<kInstrumented>(self, allocator_type);
173}
174
175
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800176// TODO: Fix no thread safety analysis when GCC can handle template specialization.
177template <bool kAccessCheck>
178ALWAYS_INLINE static inline mirror::Class* CheckArrayAlloc(uint32_t type_idx,
179 mirror::ArtMethod* method,
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800180 int32_t component_count,
181 bool* slow_path)
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800182 NO_THREAD_SAFETY_ANALYSIS {
Hiroshi Yamauchi3b4c1892013-09-12 21:33:12 -0700183 if (UNLIKELY(component_count < 0)) {
184 ThrowNegativeArraySizeException(component_count);
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800185 *slow_path = true;
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800186 return nullptr; // Failure
Hiroshi Yamauchi3b4c1892013-09-12 21:33:12 -0700187 }
188 mirror::Class* klass = method->GetDexCacheResolvedTypes()->GetWithoutChecks(type_idx);
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800189 if (UNLIKELY(klass == nullptr)) { // Not in dex cache so try to resolve
Hiroshi Yamauchi3b4c1892013-09-12 21:33:12 -0700190 klass = Runtime::Current()->GetClassLinker()->ResolveType(type_idx, method);
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800191 *slow_path = true;
192 if (klass == nullptr) { // Error
Hiroshi Yamauchi3b4c1892013-09-12 21:33:12 -0700193 DCHECK(Thread::Current()->IsExceptionPending());
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800194 return nullptr; // Failure
Ian Rogers57b86d42012-03-27 16:05:41 -0700195 }
Hiroshi Yamauchi3b4c1892013-09-12 21:33:12 -0700196 CHECK(klass->IsArrayClass()) << PrettyClass(klass);
Ian Rogers57b86d42012-03-27 16:05:41 -0700197 }
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800198 if (kAccessCheck) {
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800199 mirror::Class* referrer = method->GetDeclaringClass();
Ian Rogers57b86d42012-03-27 16:05:41 -0700200 if (UNLIKELY(!referrer->CanAccess(klass))) {
Ian Rogers87e552d2012-08-31 15:54:48 -0700201 ThrowIllegalAccessErrorClass(referrer, klass);
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800202 *slow_path = true;
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800203 return nullptr; // Failure
Ian Rogers57b86d42012-03-27 16:05:41 -0700204 }
205 }
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800206 return klass;
Ian Rogers57b86d42012-03-27 16:05:41 -0700207}
208
209// Given the context of a calling Method, use its DexCache to resolve a type to an array Class. If
210// it cannot be resolved, throw an error. If it can, use it to create an array.
211// When verification/compiler hasn't been able to verify access, optionally perform an access
212// check.
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800213// TODO: Fix no thread safety analysis when GCC can handle template specialization.
214template <bool kAccessCheck, bool kInstrumented>
215ALWAYS_INLINE static inline mirror::Array* AllocArrayFromCode(uint32_t type_idx,
216 mirror::ArtMethod* method,
217 int32_t component_count,
218 Thread* self,
219 gc::AllocatorType allocator_type)
220 NO_THREAD_SAFETY_ANALYSIS {
Mathieu Chartiere6da9af2013-12-16 11:54:42 -0800221 bool slow_path = false;
222 mirror::Class* klass = CheckArrayAlloc<kAccessCheck>(type_idx, method, component_count,
223 &slow_path);
224 if (UNLIKELY(slow_path)) {
225 if (klass == nullptr) {
226 return nullptr;
227 }
228 gc::Heap* heap = Runtime::Current()->GetHeap();
229 return mirror::Array::Alloc<kInstrumented>(self, klass, component_count,
230 heap->GetCurrentAllocator());
Ian Rogers57b86d42012-03-27 16:05:41 -0700231 }
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800232 return mirror::Array::Alloc<kInstrumented>(self, klass, component_count, allocator_type);
Ian Rogers57b86d42012-03-27 16:05:41 -0700233}
234
Hiroshi Yamauchibb8f0ab2014-01-27 16:50:29 -0800235template <bool kAccessCheck, bool kInstrumented>
236ALWAYS_INLINE static inline mirror::Array* AllocArrayFromCodeResolved(mirror::Class* klass,
237 mirror::ArtMethod* method,
238 int32_t component_count,
239 Thread* self,
240 gc::AllocatorType allocator_type)
241 NO_THREAD_SAFETY_ANALYSIS {
242 DCHECK(klass != nullptr);
243 if (UNLIKELY(component_count < 0)) {
244 ThrowNegativeArraySizeException(component_count);
245 return nullptr; // Failure
246 }
247 if (kAccessCheck) {
248 mirror::Class* referrer = method->GetDeclaringClass();
249 if (UNLIKELY(!referrer->CanAccess(klass))) {
250 ThrowIllegalAccessErrorClass(referrer, klass);
251 return nullptr; // Failure
252 }
253 }
254 // No need to retry a slow-path allocation as the above code won't
255 // cause a GC or thread suspension.
256 return mirror::Array::Alloc<kInstrumented>(self, klass, component_count, allocator_type);
257}
258
Brian Carlstromea46f952013-07-30 01:26:50 -0700259extern mirror::Array* CheckAndAllocArrayFromCode(uint32_t type_idx, mirror::ArtMethod* method,
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800260 int32_t component_count, Thread* self,
261 bool access_check,
262 gc::AllocatorType allocator_type)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700263 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Ian Rogers57b86d42012-03-27 16:05:41 -0700264
Mathieu Chartiercbb2d202013-11-14 17:45:16 -0800265extern mirror::Array* CheckAndAllocArrayFromCodeInstrumented(uint32_t type_idx,
266 mirror::ArtMethod* method,
267 int32_t component_count, Thread* self,
268 bool access_check,
269 gc::AllocatorType allocator_type)
Hiroshi Yamauchi3b4c1892013-09-12 21:33:12 -0700270 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
271
Ian Rogers08f753d2012-08-24 14:35:25 -0700272// Type of find field operation for fast and slow case.
273enum FindFieldType {
274 InstanceObjectRead,
275 InstanceObjectWrite,
276 InstancePrimitiveRead,
277 InstancePrimitiveWrite,
278 StaticObjectRead,
279 StaticObjectWrite,
280 StaticPrimitiveRead,
281 StaticPrimitiveWrite,
282};
283
Sebastien Hertzd4beb6b2013-10-02 17:07:20 +0200284template<FindFieldType type, bool access_check>
285static inline mirror::ArtField* FindFieldFromCode(uint32_t field_idx, const mirror::ArtMethod* referrer,
286 Thread* self, size_t expected_size) {
287 bool is_primitive;
288 bool is_set;
289 bool is_static;
290 switch (type) {
291 case InstanceObjectRead: is_primitive = false; is_set = false; is_static = false; break;
292 case InstanceObjectWrite: is_primitive = false; is_set = true; is_static = false; break;
293 case InstancePrimitiveRead: is_primitive = true; is_set = false; is_static = false; break;
294 case InstancePrimitiveWrite: is_primitive = true; is_set = true; is_static = false; break;
295 case StaticObjectRead: is_primitive = false; is_set = false; is_static = true; break;
296 case StaticObjectWrite: is_primitive = false; is_set = true; is_static = true; break;
297 case StaticPrimitiveRead: is_primitive = true; is_set = false; is_static = true; break;
298 case StaticPrimitiveWrite: // Keep GCC happy by having a default handler, fall-through.
299 default: is_primitive = true; is_set = true; is_static = true; break;
300 }
301 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
302 mirror::ArtField* resolved_field = class_linker->ResolveField(field_idx, referrer, is_static);
303 if (UNLIKELY(resolved_field == nullptr)) {
304 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
305 return nullptr; // Failure.
306 }
307 mirror::Class* fields_class = resolved_field->GetDeclaringClass();
308 if (access_check) {
309 if (UNLIKELY(resolved_field->IsStatic() != is_static)) {
310 ThrowIncompatibleClassChangeErrorField(resolved_field, is_static, referrer);
311 return nullptr;
312 }
313 mirror::Class* referring_class = referrer->GetDeclaringClass();
Vladimir Marko23a28212014-01-09 19:24:37 +0000314 if (UNLIKELY(!referring_class->CanAccessResolvedField<true>(fields_class, resolved_field,
315 field_idx))) {
316 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
317 return nullptr; // Failure.
Sebastien Hertzd4beb6b2013-10-02 17:07:20 +0200318 }
319 if (UNLIKELY(is_set && resolved_field->IsFinal() && (fields_class != referring_class))) {
320 ThrowIllegalAccessErrorFinalField(referrer, resolved_field);
Vladimir Marko23a28212014-01-09 19:24:37 +0000321 return nullptr; // Failure.
Sebastien Hertzd4beb6b2013-10-02 17:07:20 +0200322 } else {
323 FieldHelper fh(resolved_field);
324 if (UNLIKELY(fh.IsPrimitiveType() != is_primitive ||
325 fh.FieldSize() != expected_size)) {
326 ThrowLocation throw_location = self->GetCurrentLocationForThrow();
327 DCHECK(throw_location.GetMethod() == referrer);
328 self->ThrowNewExceptionF(throw_location, "Ljava/lang/NoSuchFieldError;",
329 "Attempted read of %zd-bit %s on field '%s'",
330 expected_size * (32 / sizeof(int32_t)),
331 is_primitive ? "primitive" : "non-primitive",
332 PrettyField(resolved_field, true).c_str());
Vladimir Marko23a28212014-01-09 19:24:37 +0000333 return nullptr; // Failure.
Sebastien Hertzd4beb6b2013-10-02 17:07:20 +0200334 }
335 }
336 }
337 if (!is_static) {
338 // instance fields must be being accessed on an initialized class
339 return resolved_field;
340 } else {
341 // If the class is initialized we're done.
342 if (LIKELY(fields_class->IsInitialized())) {
343 return resolved_field;
Sebastien Hertzd4beb6b2013-10-02 17:07:20 +0200344 } else {
Mathieu Chartierc528dba2013-11-26 12:00:11 -0800345 SirtRef<mirror::Class> sirt_class(self, fields_class);
346 if (LIKELY(class_linker->EnsureInitialized(sirt_class, true, true))) {
347 // Otherwise let's ensure the class is initialized before resolving the field.
348 return resolved_field;
349 } else {
350 DCHECK(self->IsExceptionPending()); // Throw exception and unwind
Vladimir Marko23a28212014-01-09 19:24:37 +0000351 return nullptr; // Failure.
Mathieu Chartierc528dba2013-11-26 12:00:11 -0800352 }
Sebastien Hertzd4beb6b2013-10-02 17:07:20 +0200353 }
354 }
355}
356
357// Explicit template declarations of FindFieldFromCode for all field access types.
358#define EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \
359template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE \
Bernhard Rosenkränzer46053622013-12-12 02:15:52 +0100360mirror::ArtField* FindFieldFromCode<_type, _access_check>(uint32_t field_idx, \
361 const mirror::ArtMethod* referrer, \
362 Thread* self, size_t expected_size) \
Sebastien Hertzd4beb6b2013-10-02 17:07:20 +0200363
364#define EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \
365 EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, false); \
366 EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, true)
367
368EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectRead);
369EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectWrite);
370EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveRead);
371EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveWrite);
372EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectRead);
373EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectWrite);
374EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveRead);
375EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveWrite);
376
377#undef EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL
378#undef EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL
379
380template<InvokeType type, bool access_check>
381static inline mirror::ArtMethod* FindMethodFromCode(uint32_t method_idx, mirror::Object* this_object,
382 mirror::ArtMethod* referrer, Thread* self) {
383 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
384 mirror::ArtMethod* resolved_method = class_linker->ResolveMethod(method_idx, referrer, type);
385 if (UNLIKELY(resolved_method == nullptr)) {
386 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
387 return nullptr; // Failure.
388 } else if (UNLIKELY(this_object == nullptr && type != kStatic)) {
389 // Maintain interpreter-like semantics where NullPointerException is thrown
390 // after potential NoSuchMethodError from class linker.
391 ThrowLocation throw_location = self->GetCurrentLocationForThrow();
392 DCHECK(referrer == throw_location.GetMethod());
393 ThrowNullPointerExceptionForMethodAccess(throw_location, method_idx, type);
394 return nullptr; // Failure.
395 } else if (access_check) {
396 // Incompatible class change should have been handled in resolve method.
397 if (UNLIKELY(resolved_method->CheckIncompatibleClassChange(type))) {
398 ThrowIncompatibleClassChangeError(type, resolved_method->GetInvokeType(), resolved_method,
399 referrer);
400 return nullptr; // Failure.
401 }
402 mirror::Class* methods_class = resolved_method->GetDeclaringClass();
403 mirror::Class* referring_class = referrer->GetDeclaringClass();
Hiroshi Yamauchibe1ca552014-01-15 11:46:48 -0800404 bool can_access_resolved_method =
405 referring_class->CanAccessResolvedMethod<true, type>(methods_class, resolved_method,
406 method_idx);
407 if (UNLIKELY(!can_access_resolved_method)) {
Vladimir Marko23a28212014-01-09 19:24:37 +0000408 DCHECK(self->IsExceptionPending()); // Throw exception and unwind.
409 return nullptr; // Failure.
Sebastien Hertzd4beb6b2013-10-02 17:07:20 +0200410 }
411 }
412 switch (type) {
413 case kStatic:
414 case kDirect:
415 return resolved_method;
416 case kVirtual: {
417 mirror::ObjectArray<mirror::ArtMethod>* vtable = this_object->GetClass()->GetVTable();
418 uint16_t vtable_index = resolved_method->GetMethodIndex();
419 if (access_check &&
420 (vtable == nullptr || vtable_index >= static_cast<uint32_t>(vtable->GetLength()))) {
421 // Behavior to agree with that of the verifier.
422 MethodHelper mh(resolved_method);
423 ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(), mh.GetName(),
424 mh.GetSignature());
425 return nullptr; // Failure.
426 }
427 DCHECK(vtable != nullptr);
428 return vtable->GetWithoutChecks(vtable_index);
429 }
430 case kSuper: {
431 mirror::Class* super_class = referrer->GetDeclaringClass()->GetSuperClass();
432 uint16_t vtable_index = resolved_method->GetMethodIndex();
433 mirror::ObjectArray<mirror::ArtMethod>* vtable;
434 if (access_check) {
435 // Check existence of super class.
436 vtable = (super_class != nullptr) ? super_class->GetVTable() : nullptr;
437 if (vtable == nullptr || vtable_index >= static_cast<uint32_t>(vtable->GetLength())) {
438 // Behavior to agree with that of the verifier.
439 MethodHelper mh(resolved_method);
440 ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(), mh.GetName(),
441 mh.GetSignature());
442 return nullptr; // Failure.
443 }
444 } else {
445 // Super class must exist.
446 DCHECK(super_class != nullptr);
447 vtable = super_class->GetVTable();
448 }
449 DCHECK(vtable != nullptr);
450 return vtable->GetWithoutChecks(vtable_index);
451 }
452 case kInterface: {
Jeff Hao88474b42013-10-23 16:24:40 -0700453 uint32_t imt_index = resolved_method->GetDexMethodIndex() % ClassLinker::kImtSize;
454 mirror::ObjectArray<mirror::ArtMethod>* imt_table = this_object->GetClass()->GetImTable();
455 mirror::ArtMethod* imt_method = imt_table->Get(imt_index);
456 if (!imt_method->IsImtConflictMethod()) {
457 return imt_method;
Sebastien Hertzd4beb6b2013-10-02 17:07:20 +0200458 } else {
Jeff Hao88474b42013-10-23 16:24:40 -0700459 mirror::ArtMethod* interface_method =
460 this_object->GetClass()->FindVirtualMethodForInterface(resolved_method);
461 if (UNLIKELY(interface_method == nullptr)) {
462 ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(resolved_method, this_object,
463 referrer);
464 return nullptr; // Failure.
465 } else {
466 return interface_method;
467 }
Sebastien Hertzd4beb6b2013-10-02 17:07:20 +0200468 }
469 }
470 default:
471 LOG(FATAL) << "Unknown invoke type " << type;
472 return nullptr; // Failure.
473 }
474}
475
476// Explicit template declarations of FindMethodFromCode for all invoke types.
Bernhard Rosenkränzer46053622013-12-12 02:15:52 +0100477#define EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \
478 template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE \
479 mirror::ArtMethod* FindMethodFromCode<_type, _access_check>(uint32_t method_idx, \
480 mirror::Object* this_object, \
481 mirror::ArtMethod* referrer, \
482 Thread* self)
Sebastien Hertzd4beb6b2013-10-02 17:07:20 +0200483#define EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \
484 EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, false); \
485 EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, true)
486
487EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kStatic);
488EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kDirect);
489EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kVirtual);
490EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kSuper);
491EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kInterface);
492
493#undef EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL
494#undef EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL
Ian Rogers57b86d42012-03-27 16:05:41 -0700495
Ian Rogers08f753d2012-08-24 14:35:25 -0700496// Fast path field resolution that can't initialize classes or throw exceptions.
Brian Carlstromea46f952013-07-30 01:26:50 -0700497static inline mirror::ArtField* FindFieldFast(uint32_t field_idx,
498 const mirror::ArtMethod* referrer,
499 FindFieldType type, size_t expected_size)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700500 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Brian Carlstromea46f952013-07-30 01:26:50 -0700501 mirror::ArtField* resolved_field =
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800502 referrer->GetDeclaringClass()->GetDexCache()->GetResolvedField(field_idx);
Ian Rogers57b86d42012-03-27 16:05:41 -0700503 if (UNLIKELY(resolved_field == NULL)) {
504 return NULL;
505 }
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800506 mirror::Class* fields_class = resolved_field->GetDeclaringClass();
Ian Rogers08f753d2012-08-24 14:35:25 -0700507 // Check class is initiliazed or initializing.
Ian Rogers57b86d42012-03-27 16:05:41 -0700508 if (UNLIKELY(!fields_class->IsInitializing())) {
509 return NULL;
510 }
Ian Rogers08f753d2012-08-24 14:35:25 -0700511 // Check for incompatible class change.
512 bool is_primitive;
513 bool is_set;
514 bool is_static;
515 switch (type) {
516 case InstanceObjectRead: is_primitive = false; is_set = false; is_static = false; break;
517 case InstanceObjectWrite: is_primitive = false; is_set = true; is_static = false; break;
518 case InstancePrimitiveRead: is_primitive = true; is_set = false; is_static = false; break;
519 case InstancePrimitiveWrite: is_primitive = true; is_set = true; is_static = false; break;
520 case StaticObjectRead: is_primitive = false; is_set = false; is_static = true; break;
521 case StaticObjectWrite: is_primitive = false; is_set = true; is_static = true; break;
522 case StaticPrimitiveRead: is_primitive = true; is_set = false; is_static = true; break;
523 case StaticPrimitiveWrite: is_primitive = true; is_set = true; is_static = true; break;
Brian Carlstromf69863b2013-07-17 21:53:13 -0700524 default:
525 LOG(FATAL) << "UNREACHABLE"; // Assignment below to avoid GCC warnings.
526 is_primitive = true;
527 is_set = true;
528 is_static = true;
529 break;
Ian Rogers08f753d2012-08-24 14:35:25 -0700530 }
531 if (UNLIKELY(resolved_field->IsStatic() != is_static)) {
532 // Incompatible class change.
533 return NULL;
534 }
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800535 mirror::Class* referring_class = referrer->GetDeclaringClass();
Ian Rogers57b86d42012-03-27 16:05:41 -0700536 if (UNLIKELY(!referring_class->CanAccess(fields_class) ||
537 !referring_class->CanAccessMember(fields_class,
538 resolved_field->GetAccessFlags()) ||
539 (is_set && resolved_field->IsFinal() && (fields_class != referring_class)))) {
Ian Rogers08f753d2012-08-24 14:35:25 -0700540 // Illegal access.
Ian Rogers57b86d42012-03-27 16:05:41 -0700541 return NULL;
542 }
543 FieldHelper fh(resolved_field);
544 if (UNLIKELY(fh.IsPrimitiveType() != is_primitive ||
545 fh.FieldSize() != expected_size)) {
546 return NULL;
547 }
548 return resolved_field;
549}
550
Ian Rogers08f753d2012-08-24 14:35:25 -0700551// Fast path method resolution that can't throw exceptions.
Brian Carlstromea46f952013-07-30 01:26:50 -0700552static inline mirror::ArtMethod* FindMethodFast(uint32_t method_idx,
553 mirror::Object* this_object,
554 const mirror::ArtMethod* referrer,
555 bool access_check, InvokeType type)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700556 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers57b86d42012-03-27 16:05:41 -0700557 bool is_direct = type == kStatic || type == kDirect;
558 if (UNLIKELY(this_object == NULL && !is_direct)) {
559 return NULL;
560 }
Brian Carlstromea46f952013-07-30 01:26:50 -0700561 mirror::ArtMethod* resolved_method =
Ian Rogers57b86d42012-03-27 16:05:41 -0700562 referrer->GetDeclaringClass()->GetDexCache()->GetResolvedMethod(method_idx);
563 if (UNLIKELY(resolved_method == NULL)) {
564 return NULL;
565 }
566 if (access_check) {
Ian Rogers08f753d2012-08-24 14:35:25 -0700567 // Check for incompatible class change errors and access.
568 bool icce = resolved_method->CheckIncompatibleClassChange(type);
569 if (UNLIKELY(icce)) {
570 return NULL;
571 }
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800572 mirror::Class* methods_class = resolved_method->GetDeclaringClass();
573 mirror::Class* referring_class = referrer->GetDeclaringClass();
Ian Rogers57b86d42012-03-27 16:05:41 -0700574 if (UNLIKELY(!referring_class->CanAccess(methods_class) ||
575 !referring_class->CanAccessMember(methods_class,
576 resolved_method->GetAccessFlags()))) {
Ian Rogers08f753d2012-08-24 14:35:25 -0700577 // Potential illegal access, may need to refine the method's class.
Ian Rogers57b86d42012-03-27 16:05:41 -0700578 return NULL;
579 }
580 }
581 if (type == kInterface) { // Most common form of slow path dispatch.
582 return this_object->GetClass()->FindVirtualMethodForInterface(resolved_method);
583 } else if (is_direct) {
584 return resolved_method;
585 } else if (type == kSuper) {
586 return referrer->GetDeclaringClass()->GetSuperClass()->GetVTable()->
587 Get(resolved_method->GetMethodIndex());
588 } else {
589 DCHECK(type == kVirtual);
590 return this_object->GetClass()->GetVTable()->Get(resolved_method->GetMethodIndex());
591 }
592}
593
Ian Rogersfa46d3e2013-05-15 00:16:04 -0700594static inline mirror::Class* ResolveVerifyAndClinit(uint32_t type_idx,
Brian Carlstromea46f952013-07-30 01:26:50 -0700595 const mirror::ArtMethod* referrer,
Ian Rogersfa46d3e2013-05-15 00:16:04 -0700596 Thread* self, bool can_run_clinit,
597 bool verify_access)
598 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
599 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
600 mirror::Class* klass = class_linker->ResolveType(type_idx, referrer);
Ian Rogers5ddb4102014-01-07 08:58:46 -0800601 if (UNLIKELY(klass == nullptr)) {
Ian Rogersfa46d3e2013-05-15 00:16:04 -0700602 CHECK(self->IsExceptionPending());
Ian Rogers5ddb4102014-01-07 08:58:46 -0800603 return nullptr; // Failure - Indicate to caller to deliver exception
Ian Rogersfa46d3e2013-05-15 00:16:04 -0700604 }
605 // Perform access check if necessary.
606 mirror::Class* referring_class = referrer->GetDeclaringClass();
607 if (verify_access && UNLIKELY(!referring_class->CanAccess(klass))) {
608 ThrowIllegalAccessErrorClass(referring_class, klass);
Ian Rogers5ddb4102014-01-07 08:58:46 -0800609 return nullptr; // Failure - Indicate to caller to deliver exception
Ian Rogersfa46d3e2013-05-15 00:16:04 -0700610 }
611 // If we're just implementing const-class, we shouldn't call <clinit>.
612 if (!can_run_clinit) {
613 return klass;
614 }
615 // If we are the <clinit> of this class, just return our storage.
616 //
617 // Do not set the DexCache InitializedStaticStorage, since that implies <clinit> has finished
618 // running.
Ian Rogers241b5de2013-10-09 17:58:57 -0700619 if (klass == referring_class && referrer->IsConstructor() && referrer->IsStatic()) {
Ian Rogersfa46d3e2013-05-15 00:16:04 -0700620 return klass;
621 }
Mathieu Chartierc528dba2013-11-26 12:00:11 -0800622 SirtRef<mirror::Class> sirt_class(self, klass);
623 if (!class_linker->EnsureInitialized(sirt_class, true, true)) {
Ian Rogersfa46d3e2013-05-15 00:16:04 -0700624 CHECK(self->IsExceptionPending());
Ian Rogers5ddb4102014-01-07 08:58:46 -0800625 return nullptr; // Failure - Indicate to caller to deliver exception
Ian Rogersfa46d3e2013-05-15 00:16:04 -0700626 }
Mathieu Chartierc528dba2013-11-26 12:00:11 -0800627 return sirt_class.get();
Ian Rogersfa46d3e2013-05-15 00:16:04 -0700628}
Ian Rogers57b86d42012-03-27 16:05:41 -0700629
jeffhaod7521322012-11-21 15:38:24 -0800630extern void ThrowStackOverflowError(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
631
Brian Carlstromea46f952013-07-30 01:26:50 -0700632static inline mirror::String* ResolveStringFromCode(const mirror::ArtMethod* referrer,
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800633 uint32_t string_idx)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700634 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers57b86d42012-03-27 16:05:41 -0700635 ClassLinker* class_linker = Runtime::Current()->GetClassLinker();
636 return class_linker->ResolveString(string_idx, referrer);
637}
Shih-wei Liao2d831012011-09-28 22:06:53 -0700638
TDYa1273d71d802012-08-15 03:47:03 -0700639static inline void UnlockJniSynchronizedMethod(jobject locked, Thread* self)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700640 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
TDYa1273d71d802012-08-15 03:47:03 -0700641 UNLOCK_FUNCTION(monitor_lock_) {
642 // Save any pending exception over monitor exit call.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800643 mirror::Throwable* saved_exception = NULL;
Ian Rogers62d6c772013-02-27 08:32:07 -0800644 ThrowLocation saved_throw_location;
TDYa1273d71d802012-08-15 03:47:03 -0700645 if (UNLIKELY(self->IsExceptionPending())) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800646 saved_exception = self->GetException(&saved_throw_location);
TDYa1273d71d802012-08-15 03:47:03 -0700647 self->ClearException();
648 }
649 // Decode locked object and unlock, before popping local references.
650 self->DecodeJObject(locked)->MonitorExit(self);
651 if (UNLIKELY(self->IsExceptionPending())) {
652 LOG(FATAL) << "Synchronized JNI code returning with an exception:\n"
653 << saved_exception->Dump()
654 << "\nEncountered second exception during implicit MonitorExit:\n"
Ian Rogers62d6c772013-02-27 08:32:07 -0800655 << self->GetException(NULL)->Dump();
TDYa1273d71d802012-08-15 03:47:03 -0700656 }
657 // Restore pending exception.
658 if (saved_exception != NULL) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800659 self->SetException(saved_throw_location, saved_exception);
TDYa1273d71d802012-08-15 03:47:03 -0700660 }
661}
662
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800663static inline void CheckReferenceResult(mirror::Object* o, Thread* self)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700664 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
TDYa1273d71d802012-08-15 03:47:03 -0700665 if (o == NULL) {
666 return;
667 }
Brian Carlstromea46f952013-07-30 01:26:50 -0700668 mirror::ArtMethod* m = self->GetCurrentMethod(NULL);
TDYa1273d71d802012-08-15 03:47:03 -0700669 if (o == kInvalidIndirectRefObject) {
Ian Rogers62d6c772013-02-27 08:32:07 -0800670 JniAbortF(NULL, "invalid reference returned from %s", PrettyMethod(m).c_str());
TDYa1273d71d802012-08-15 03:47:03 -0700671 }
672 // Make sure that the result is an instance of the type this method was expected to return.
Ian Rogers62d6c772013-02-27 08:32:07 -0800673 mirror::Class* return_type = MethodHelper(m).GetReturnType();
TDYa1273d71d802012-08-15 03:47:03 -0700674
675 if (!o->InstanceOf(return_type)) {
676 JniAbortF(NULL, "attempt to return an instance of %s from %s",
677 PrettyTypeOf(o).c_str(), PrettyMethod(m).c_str());
678 }
679}
680
Ian Rogersaf6e67a2013-01-16 08:38:37 -0800681static inline void CheckSuspend(Thread* thread) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
jeffhao373c52f2012-11-20 16:11:52 -0800682 for (;;) {
683 if (thread->ReadFlag(kCheckpointRequest)) {
684 thread->RunCheckpointFunction();
jeffhao373c52f2012-11-20 16:11:52 -0800685 } else if (thread->ReadFlag(kSuspendRequest)) {
686 thread->FullSuspendCheck();
687 } else {
688 break;
689 }
690 }
691}
692
Ian Rogersaf6e67a2013-01-16 08:38:37 -0800693JValue InvokeProxyInvocationHandler(ScopedObjectAccessUnchecked& soa, const char* shorty,
Brian Carlstromea46f952013-07-30 01:26:50 -0700694 jobject rcvr_jobj, jobject interface_art_method_jobj,
Ian Rogersaf6e67a2013-01-16 08:38:37 -0800695 std::vector<jvalue>& args)
Brian Carlstrom02c8cc62013-07-18 15:54:44 -0700696 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Ian Rogersaf6e67a2013-01-16 08:38:37 -0800697
Jeff Hao58df3272013-04-22 15:28:53 -0700698// Entry point for deoptimization.
Ian Rogers848871b2013-08-05 10:56:33 -0700699extern "C" void art_quick_deoptimize();
700static inline uintptr_t GetQuickDeoptimizationEntryPoint() {
Jeff Hao58df3272013-04-22 15:28:53 -0700701 return reinterpret_cast<uintptr_t>(art_quick_deoptimize);
702}
703
704// Return address of instrumentation stub.
Ian Rogers848871b2013-08-05 10:56:33 -0700705extern "C" void art_quick_instrumentation_entry(void*);
706static inline void* GetQuickInstrumentationEntryPoint() {
707 return reinterpret_cast<void*>(art_quick_instrumentation_entry);
Jeff Hao58df3272013-04-22 15:28:53 -0700708}
709
710// The return_pc of instrumentation exit stub.
Ian Rogers848871b2013-08-05 10:56:33 -0700711extern "C" void art_quick_instrumentation_exit();
712static inline uintptr_t GetQuickInstrumentationExitPc() {
713 return reinterpret_cast<uintptr_t>(art_quick_instrumentation_exit);
714}
715
Brian Carlstromea46f952013-07-30 01:26:50 -0700716extern "C" void art_portable_to_interpreter_bridge(mirror::ArtMethod*);
Ian Rogers848871b2013-08-05 10:56:33 -0700717static inline const void* GetPortableToInterpreterBridge() {
718 return reinterpret_cast<void*>(art_portable_to_interpreter_bridge);
719}
720
Brian Carlstromea46f952013-07-30 01:26:50 -0700721extern "C" void art_quick_to_interpreter_bridge(mirror::ArtMethod*);
Ian Rogers848871b2013-08-05 10:56:33 -0700722static inline const void* GetQuickToInterpreterBridge() {
723 return reinterpret_cast<void*>(art_quick_to_interpreter_bridge);
Jeff Hao58df3272013-04-22 15:28:53 -0700724}
725
726// Return address of interpreter stub.
Ian Rogers848871b2013-08-05 10:56:33 -0700727static inline const void* GetCompiledCodeToInterpreterBridge() {
728#if defined(ART_USE_PORTABLE_COMPILER)
729 return GetPortableToInterpreterBridge();
730#else
731 return GetQuickToInterpreterBridge();
732#endif
Jeff Hao58df3272013-04-22 15:28:53 -0700733}
734
Ian Rogers848871b2013-08-05 10:56:33 -0700735
Jeff Hao0aba0ba2013-06-03 14:49:28 -0700736static inline const void* GetPortableResolutionTrampoline(ClassLinker* class_linker) {
737 return class_linker->GetPortableResolutionTrampoline();
Jeff Hao58df3272013-04-22 15:28:53 -0700738}
739
Jeff Hao0aba0ba2013-06-03 14:49:28 -0700740static inline const void* GetQuickResolutionTrampoline(ClassLinker* class_linker) {
741 return class_linker->GetQuickResolutionTrampoline();
Jeff Hao58df3272013-04-22 15:28:53 -0700742}
743
744// Return address of resolution trampoline stub for defined compiler.
Jeff Hao0aba0ba2013-06-03 14:49:28 -0700745static inline const void* GetResolutionTrampoline(ClassLinker* class_linker) {
Jeff Hao58df3272013-04-22 15:28:53 -0700746#if defined(ART_USE_PORTABLE_COMPILER)
Jeff Hao0aba0ba2013-06-03 14:49:28 -0700747 return GetPortableResolutionTrampoline(class_linker);
Jeff Hao58df3272013-04-22 15:28:53 -0700748#else
Jeff Hao0aba0ba2013-06-03 14:49:28 -0700749 return GetQuickResolutionTrampoline(class_linker);
Jeff Hao58df3272013-04-22 15:28:53 -0700750#endif
Jeff Hao79fe5392013-04-24 18:41:58 -0700751}
752
Jeff Hao88474b42013-10-23 16:24:40 -0700753static inline const void* GetPortableImtConflictTrampoline(ClassLinker* class_linker) {
754 return class_linker->GetPortableImtConflictTrampoline();
755}
756
757static inline const void* GetQuickImtConflictTrampoline(ClassLinker* class_linker) {
758 return class_linker->GetQuickImtConflictTrampoline();
759}
760
761// Return address of imt conflict trampoline stub for defined compiler.
762static inline const void* GetImtConflictTrampoline(ClassLinker* class_linker) {
763#if defined(ART_USE_PORTABLE_COMPILER)
764 return GetPortableImtConflictTrampoline(class_linker);
765#else
766 return GetQuickImtConflictTrampoline(class_linker);
767#endif
768}
769
Ian Rogers848871b2013-08-05 10:56:33 -0700770extern "C" void art_portable_proxy_invoke_handler();
771static inline const void* GetPortableProxyInvokeHandler() {
772 return reinterpret_cast<void*>(art_portable_proxy_invoke_handler);
Jeff Hao79fe5392013-04-24 18:41:58 -0700773}
774
Ian Rogers848871b2013-08-05 10:56:33 -0700775extern "C" void art_quick_proxy_invoke_handler();
776static inline const void* GetQuickProxyInvokeHandler() {
777 return reinterpret_cast<void*>(art_quick_proxy_invoke_handler);
Jeff Hao79fe5392013-04-24 18:41:58 -0700778}
779
Ian Rogers848871b2013-08-05 10:56:33 -0700780static inline const void* GetProxyInvokeHandler() {
Jeff Hao79fe5392013-04-24 18:41:58 -0700781#if defined(ART_USE_PORTABLE_COMPILER)
Ian Rogers848871b2013-08-05 10:56:33 -0700782 return GetPortableProxyInvokeHandler();
Jeff Hao79fe5392013-04-24 18:41:58 -0700783#else
Ian Rogers848871b2013-08-05 10:56:33 -0700784 return GetQuickProxyInvokeHandler();
Jeff Hao79fe5392013-04-24 18:41:58 -0700785#endif
786}
787
Ian Rogers848871b2013-08-05 10:56:33 -0700788extern "C" void* art_jni_dlsym_lookup_stub(JNIEnv*, jobject);
Jeff Hao79fe5392013-04-24 18:41:58 -0700789static inline void* GetJniDlsymLookupStub() {
790 return reinterpret_cast<void*>(art_jni_dlsym_lookup_stub);
791}
Jeff Hao58df3272013-04-22 15:28:53 -0700792
Ian Rogers450dcb52013-09-20 17:36:02 -0700793template <typename INT_TYPE, typename FLOAT_TYPE>
794static inline INT_TYPE art_float_to_integral(FLOAT_TYPE f) {
795 const INT_TYPE kMaxInt = static_cast<INT_TYPE>(std::numeric_limits<INT_TYPE>::max());
796 const INT_TYPE kMinInt = static_cast<INT_TYPE>(std::numeric_limits<INT_TYPE>::min());
797 const FLOAT_TYPE kMaxIntAsFloat = static_cast<FLOAT_TYPE>(kMaxInt);
798 const FLOAT_TYPE kMinIntAsFloat = static_cast<FLOAT_TYPE>(kMinInt);
799 if (LIKELY(f > kMinIntAsFloat)) {
800 if (LIKELY(f < kMaxIntAsFloat)) {
801 return static_cast<INT_TYPE>(f);
802 } else {
803 return kMaxInt;
804 }
805 } else {
806 return (f != f) ? 0 : kMinInt; // f != f implies NaN
807 }
808}
809
Shih-wei Liao2d831012011-09-28 22:06:53 -0700810} // namespace art
Ian Rogersad42e132011-09-17 20:23:33 -0700811
Ian Rogers7655f292013-07-29 11:07:13 -0700812#endif // ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_H_