| Elliott Hughes | 0f3c553 | 2012-03-30 14:51:51 -0700 | [diff] [blame] | 1 | /* | 
|  | 2 | * Copyright (C) 2012 The Android Open Source Project | 
|  | 3 | * | 
|  | 4 | * Licensed under the Apache License, Version 2.0 (the "License"); | 
|  | 5 | * you may not use this file except in compliance with the License. | 
|  | 6 | * You may obtain a copy of the License at | 
|  | 7 | * | 
|  | 8 | *      http://www.apache.org/licenses/LICENSE-2.0 | 
|  | 9 | * | 
|  | 10 | * Unless required by applicable law or agreed to in writing, software | 
|  | 11 | * distributed under the License is distributed on an "AS IS" BASIS, | 
|  | 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | 
|  | 13 | * See the License for the specific language governing permissions and | 
|  | 14 | * limitations under the License. | 
|  | 15 | */ | 
| buzbee | 5433072 | 2011-08-23 16:46:55 -0700 | [diff] [blame] | 16 |  | 
| Ian Rogers | 7655f29 | 2013-07-29 11:07:13 -0700 | [diff] [blame] | 17 | #ifndef ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_H_ | 
|  | 18 | #define ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_H_ | 
| Ian Rogers | 450dcb5 | 2013-09-20 17:36:02 -0700 | [diff] [blame] | 19 |  | 
|  | 20 | #include "base/macros.h" | 
| Ian Rogers | 9837939 | 2014-02-24 16:53:16 -0800 | [diff] [blame] | 21 | #include "class_linker-inl.h" | 
| Ian Rogers | 87e552d | 2012-08-31 15:54:48 -0700 | [diff] [blame] | 22 | #include "common_throws.h" | 
| Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 23 | #include "dex_file.h" | 
| Ian Rogers | 81d425b | 2012-09-27 16:03:43 -0700 | [diff] [blame] | 24 | #include "indirect_reference_table.h" | 
| Elliott Hughes | 0f3c553 | 2012-03-30 14:51:51 -0700 | [diff] [blame] | 25 | #include "invoke_type.h" | 
| Ian Rogers | 81d425b | 2012-09-27 16:03:43 -0700 | [diff] [blame] | 26 | #include "jni_internal.h" | 
| Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 27 | #include "mirror/art_method.h" | 
| Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 28 | #include "mirror/array.h" | 
| Ian Rogers | 693ff61 | 2013-02-01 10:56:12 -0800 | [diff] [blame] | 29 | #include "mirror/class-inl.h" | 
| Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 30 | #include "mirror/object-inl.h" | 
| Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 31 | #include "mirror/throwable.h" | 
| Ian Rogers | 450dcb5 | 2013-09-20 17:36:02 -0700 | [diff] [blame] | 32 | #include "object_utils.h" | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 33 | #include "handle_scope-inl.h" | 
| Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 34 | #include "thread.h" | 
|  | 35 |  | 
| Shih-wei Liao | 2d83101 | 2011-09-28 22:06:53 -0700 | [diff] [blame] | 36 | namespace art { | 
| Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 37 |  | 
| Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 38 | namespace mirror { | 
| Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 39 | class Class; | 
| Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 40 | class ArtField; | 
| Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 41 | class Object; | 
|  | 42 | }  // namespace mirror | 
| Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 43 |  | 
| Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 44 | // TODO: Fix no thread safety analysis when GCC can handle template specialization. | 
|  | 45 | template <const bool kAccessCheck> | 
|  | 46 | ALWAYS_INLINE static inline mirror::Class* CheckObjectAlloc(uint32_t type_idx, | 
|  | 47 | mirror::ArtMethod* method, | 
| Mathieu Chartier | e6da9af | 2013-12-16 11:54:42 -0800 | [diff] [blame] | 48 | Thread* self, bool* slow_path) | 
| Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 49 | NO_THREAD_SAFETY_ANALYSIS { | 
| Hiroshi Yamauchi | 3b4c189 | 2013-09-12 21:33:12 -0700 | [diff] [blame] | 50 | mirror::Class* klass = method->GetDexCacheResolvedTypes()->GetWithoutChecks(type_idx); | 
| Hiroshi Yamauchi | 3b4c189 | 2013-09-12 21:33:12 -0700 | [diff] [blame] | 51 | if (UNLIKELY(klass == NULL)) { | 
| Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 52 | klass = Runtime::Current()->GetClassLinker()->ResolveType(type_idx, method); | 
| Mathieu Chartier | e6da9af | 2013-12-16 11:54:42 -0800 | [diff] [blame] | 53 | *slow_path = true; | 
| Hiroshi Yamauchi | 3b4c189 | 2013-09-12 21:33:12 -0700 | [diff] [blame] | 54 | if (klass == NULL) { | 
|  | 55 | DCHECK(self->IsExceptionPending()); | 
| Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 56 | return nullptr;  // Failure | 
| Hiroshi Yamauchi | 3b4c189 | 2013-09-12 21:33:12 -0700 | [diff] [blame] | 57 | } | 
|  | 58 | } | 
| Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 59 | if (kAccessCheck) { | 
| Hiroshi Yamauchi | 3b4c189 | 2013-09-12 21:33:12 -0700 | [diff] [blame] | 60 | if (UNLIKELY(!klass->IsInstantiable())) { | 
|  | 61 | ThrowLocation throw_location = self->GetCurrentLocationForThrow(); | 
|  | 62 | self->ThrowNewException(throw_location, "Ljava/lang/InstantiationError;", | 
|  | 63 | PrettyDescriptor(klass).c_str()); | 
| Mathieu Chartier | e6da9af | 2013-12-16 11:54:42 -0800 | [diff] [blame] | 64 | *slow_path = true; | 
| Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 65 | return nullptr;  // Failure | 
| Hiroshi Yamauchi | 3b4c189 | 2013-09-12 21:33:12 -0700 | [diff] [blame] | 66 | } | 
|  | 67 | mirror::Class* referrer = method->GetDeclaringClass(); | 
|  | 68 | if (UNLIKELY(!referrer->CanAccess(klass))) { | 
|  | 69 | ThrowIllegalAccessErrorClass(referrer, klass); | 
| Mathieu Chartier | e6da9af | 2013-12-16 11:54:42 -0800 | [diff] [blame] | 70 | *slow_path = true; | 
| Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 71 | return nullptr;  // Failure | 
| Hiroshi Yamauchi | 3b4c189 | 2013-09-12 21:33:12 -0700 | [diff] [blame] | 72 | } | 
|  | 73 | } | 
| Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 74 | if (UNLIKELY(!klass->IsInitialized())) { | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 75 | StackHandleScope<1> hs(self); | 
|  | 76 | Handle<mirror::Class> h_klass(hs.NewHandle(klass)); | 
| Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 77 | // EnsureInitialized (the class initializer) might cause a GC. | 
|  | 78 | // may cause us to suspend meaning that another thread may try to | 
|  | 79 | // change the allocator while we are stuck in the entrypoints of | 
|  | 80 | // an old allocator. Also, the class initialization may fail. To | 
|  | 81 | // handle these cases we mark the slow path boolean as true so | 
|  | 82 | // that the caller knows to check the allocator type to see if it | 
|  | 83 | // has changed and to null-check the return value in case the | 
|  | 84 | // initialization fails. | 
|  | 85 | *slow_path = true; | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 86 | if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(h_klass, true, true)) { | 
| Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 87 | DCHECK(self->IsExceptionPending()); | 
|  | 88 | return nullptr;  // Failure | 
|  | 89 | } | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 90 | return h_klass.Get(); | 
| Hiroshi Yamauchi | 3b4c189 | 2013-09-12 21:33:12 -0700 | [diff] [blame] | 91 | } | 
| Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 92 | return klass; | 
| Hiroshi Yamauchi | 3b4c189 | 2013-09-12 21:33:12 -0700 | [diff] [blame] | 93 | } | 
|  | 94 |  | 
| Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 95 | // TODO: Fix no thread safety analysis when annotalysis is smarter. | 
|  | 96 | ALWAYS_INLINE static inline mirror::Class* CheckClassInitializedForObjectAlloc(mirror::Class* klass, | 
|  | 97 | Thread* self, bool* slow_path) | 
|  | 98 | NO_THREAD_SAFETY_ANALYSIS { | 
|  | 99 | if (UNLIKELY(!klass->IsInitialized())) { | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 100 | StackHandleScope<1> hs(self); | 
|  | 101 | Handle<mirror::Class> h_class(hs.NewHandle(klass)); | 
| Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 102 | // EnsureInitialized (the class initializer) might cause a GC. | 
|  | 103 | // may cause us to suspend meaning that another thread may try to | 
|  | 104 | // change the allocator while we are stuck in the entrypoints of | 
|  | 105 | // an old allocator. Also, the class initialization may fail. To | 
|  | 106 | // handle these cases we mark the slow path boolean as true so | 
|  | 107 | // that the caller knows to check the allocator type to see if it | 
|  | 108 | // has changed and to null-check the return value in case the | 
|  | 109 | // initialization fails. | 
|  | 110 | *slow_path = true; | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 111 | if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(h_class, true, true)) { | 
| Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 112 | DCHECK(self->IsExceptionPending()); | 
|  | 113 | return nullptr;  // Failure | 
|  | 114 | } | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 115 | return h_class.Get(); | 
| Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 116 | } | 
|  | 117 | return klass; | 
|  | 118 | } | 
|  | 119 |  | 
| Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 120 | // Given the context of a calling Method, use its DexCache to resolve a type to a Class. If it | 
|  | 121 | // cannot be resolved, throw an error. If it can, use it to create an instance. | 
|  | 122 | // When verification/compiler hasn't been able to verify access, optionally perform an access | 
|  | 123 | // check. | 
| Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 124 | // TODO: Fix NO_THREAD_SAFETY_ANALYSIS when GCC is smarter. | 
|  | 125 | template <bool kAccessCheck, bool kInstrumented> | 
|  | 126 | ALWAYS_INLINE static inline mirror::Object* AllocObjectFromCode(uint32_t type_idx, | 
|  | 127 | mirror::ArtMethod* method, | 
|  | 128 | Thread* self, | 
|  | 129 | gc::AllocatorType allocator_type) | 
|  | 130 | NO_THREAD_SAFETY_ANALYSIS { | 
| Mathieu Chartier | e6da9af | 2013-12-16 11:54:42 -0800 | [diff] [blame] | 131 | bool slow_path = false; | 
|  | 132 | mirror::Class* klass = CheckObjectAlloc<kAccessCheck>(type_idx, method, self, &slow_path); | 
|  | 133 | if (UNLIKELY(slow_path)) { | 
|  | 134 | if (klass == nullptr) { | 
|  | 135 | return nullptr; | 
|  | 136 | } | 
| Mathieu Chartier | 8668c3c | 2014-04-24 16:48:11 -0700 | [diff] [blame] | 137 | return klass->Alloc<kInstrumented>(self, Runtime::Current()->GetHeap()->GetCurrentAllocator()); | 
| Hiroshi Yamauchi | 3b4c189 | 2013-09-12 21:33:12 -0700 | [diff] [blame] | 138 | } | 
| Sebastien Hertz | d2fe10a | 2014-01-15 10:20:56 +0100 | [diff] [blame] | 139 | DCHECK(klass != nullptr); | 
| Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 140 | return klass->Alloc<kInstrumented>(self, allocator_type); | 
| Hiroshi Yamauchi | 3b4c189 | 2013-09-12 21:33:12 -0700 | [diff] [blame] | 141 | } | 
|  | 142 |  | 
| Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 143 | // Given the context of a calling Method and a resolved class, create an instance. | 
|  | 144 | // TODO: Fix NO_THREAD_SAFETY_ANALYSIS when GCC is smarter. | 
|  | 145 | template <bool kInstrumented> | 
|  | 146 | ALWAYS_INLINE static inline mirror::Object* AllocObjectFromCodeResolved(mirror::Class* klass, | 
|  | 147 | mirror::ArtMethod* method, | 
|  | 148 | Thread* self, | 
|  | 149 | gc::AllocatorType allocator_type) | 
|  | 150 | NO_THREAD_SAFETY_ANALYSIS { | 
|  | 151 | DCHECK(klass != nullptr); | 
|  | 152 | bool slow_path = false; | 
|  | 153 | klass = CheckClassInitializedForObjectAlloc(klass, self, &slow_path); | 
|  | 154 | if (UNLIKELY(slow_path)) { | 
|  | 155 | if (klass == nullptr) { | 
|  | 156 | return nullptr; | 
|  | 157 | } | 
|  | 158 | gc::Heap* heap = Runtime::Current()->GetHeap(); | 
| Mathieu Chartier | 8668c3c | 2014-04-24 16:48:11 -0700 | [diff] [blame] | 159 | // Pass in false since the object can not be finalizable. | 
|  | 160 | return klass->Alloc<kInstrumented, false>(self, heap->GetCurrentAllocator()); | 
| Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 161 | } | 
| Mathieu Chartier | 8668c3c | 2014-04-24 16:48:11 -0700 | [diff] [blame] | 162 | // Pass in false since the object can not be finalizable. | 
|  | 163 | return klass->Alloc<kInstrumented, false>(self, allocator_type); | 
| Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 164 | } | 
|  | 165 |  | 
|  | 166 | // Given the context of a calling Method and an initialized class, create an instance. | 
|  | 167 | // TODO: Fix NO_THREAD_SAFETY_ANALYSIS when GCC is smarter. | 
|  | 168 | template <bool kInstrumented> | 
|  | 169 | ALWAYS_INLINE static inline mirror::Object* AllocObjectFromCodeInitialized(mirror::Class* klass, | 
|  | 170 | mirror::ArtMethod* method, | 
|  | 171 | Thread* self, | 
|  | 172 | gc::AllocatorType allocator_type) | 
|  | 173 | NO_THREAD_SAFETY_ANALYSIS { | 
|  | 174 | DCHECK(klass != nullptr); | 
| Mathieu Chartier | 8668c3c | 2014-04-24 16:48:11 -0700 | [diff] [blame] | 175 | // Pass in false since the object can not be finalizable. | 
|  | 176 | return klass->Alloc<kInstrumented, false>(self, allocator_type); | 
| Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 177 | } | 
|  | 178 |  | 
|  | 179 |  | 
| Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 180 | // TODO: Fix no thread safety analysis when GCC can handle template specialization. | 
|  | 181 | template <bool kAccessCheck> | 
|  | 182 | ALWAYS_INLINE static inline mirror::Class* CheckArrayAlloc(uint32_t type_idx, | 
|  | 183 | mirror::ArtMethod* method, | 
| Mathieu Chartier | e6da9af | 2013-12-16 11:54:42 -0800 | [diff] [blame] | 184 | int32_t component_count, | 
|  | 185 | bool* slow_path) | 
| Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 186 | NO_THREAD_SAFETY_ANALYSIS { | 
| Hiroshi Yamauchi | 3b4c189 | 2013-09-12 21:33:12 -0700 | [diff] [blame] | 187 | if (UNLIKELY(component_count < 0)) { | 
|  | 188 | ThrowNegativeArraySizeException(component_count); | 
| Mathieu Chartier | e6da9af | 2013-12-16 11:54:42 -0800 | [diff] [blame] | 189 | *slow_path = true; | 
| Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 190 | return nullptr;  // Failure | 
| Hiroshi Yamauchi | 3b4c189 | 2013-09-12 21:33:12 -0700 | [diff] [blame] | 191 | } | 
|  | 192 | mirror::Class* klass = method->GetDexCacheResolvedTypes()->GetWithoutChecks(type_idx); | 
| Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 193 | if (UNLIKELY(klass == nullptr)) {  // Not in dex cache so try to resolve | 
| Hiroshi Yamauchi | 3b4c189 | 2013-09-12 21:33:12 -0700 | [diff] [blame] | 194 | klass = Runtime::Current()->GetClassLinker()->ResolveType(type_idx, method); | 
| Mathieu Chartier | e6da9af | 2013-12-16 11:54:42 -0800 | [diff] [blame] | 195 | *slow_path = true; | 
|  | 196 | if (klass == nullptr) {  // Error | 
| Hiroshi Yamauchi | 3b4c189 | 2013-09-12 21:33:12 -0700 | [diff] [blame] | 197 | DCHECK(Thread::Current()->IsExceptionPending()); | 
| Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 198 | return nullptr;  // Failure | 
| Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 199 | } | 
| Hiroshi Yamauchi | 3b4c189 | 2013-09-12 21:33:12 -0700 | [diff] [blame] | 200 | CHECK(klass->IsArrayClass()) << PrettyClass(klass); | 
| Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 201 | } | 
| Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 202 | if (kAccessCheck) { | 
| Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 203 | mirror::Class* referrer = method->GetDeclaringClass(); | 
| Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 204 | if (UNLIKELY(!referrer->CanAccess(klass))) { | 
| Ian Rogers | 87e552d | 2012-08-31 15:54:48 -0700 | [diff] [blame] | 205 | ThrowIllegalAccessErrorClass(referrer, klass); | 
| Mathieu Chartier | e6da9af | 2013-12-16 11:54:42 -0800 | [diff] [blame] | 206 | *slow_path = true; | 
| Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 207 | return nullptr;  // Failure | 
| Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 208 | } | 
|  | 209 | } | 
| Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 210 | return klass; | 
| Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 211 | } | 
|  | 212 |  | 
|  | 213 | // Given the context of a calling Method, use its DexCache to resolve a type to an array Class. If | 
|  | 214 | // it cannot be resolved, throw an error. If it can, use it to create an array. | 
|  | 215 | // When verification/compiler hasn't been able to verify access, optionally perform an access | 
|  | 216 | // check. | 
| Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 217 | // TODO: Fix no thread safety analysis when GCC can handle template specialization. | 
|  | 218 | template <bool kAccessCheck, bool kInstrumented> | 
|  | 219 | ALWAYS_INLINE static inline mirror::Array* AllocArrayFromCode(uint32_t type_idx, | 
|  | 220 | mirror::ArtMethod* method, | 
|  | 221 | int32_t component_count, | 
|  | 222 | Thread* self, | 
|  | 223 | gc::AllocatorType allocator_type) | 
|  | 224 | NO_THREAD_SAFETY_ANALYSIS { | 
| Mathieu Chartier | e6da9af | 2013-12-16 11:54:42 -0800 | [diff] [blame] | 225 | bool slow_path = false; | 
|  | 226 | mirror::Class* klass = CheckArrayAlloc<kAccessCheck>(type_idx, method, component_count, | 
|  | 227 | &slow_path); | 
|  | 228 | if (UNLIKELY(slow_path)) { | 
|  | 229 | if (klass == nullptr) { | 
|  | 230 | return nullptr; | 
|  | 231 | } | 
|  | 232 | gc::Heap* heap = Runtime::Current()->GetHeap(); | 
|  | 233 | return mirror::Array::Alloc<kInstrumented>(self, klass, component_count, | 
| Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 234 | klass->GetComponentSize(), | 
| Mathieu Chartier | e6da9af | 2013-12-16 11:54:42 -0800 | [diff] [blame] | 235 | heap->GetCurrentAllocator()); | 
| Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 236 | } | 
| Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 237 | return mirror::Array::Alloc<kInstrumented>(self, klass, component_count, | 
|  | 238 | klass->GetComponentSize(), allocator_type); | 
| Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 239 | } | 
|  | 240 |  | 
| Hiroshi Yamauchi | bb8f0ab | 2014-01-27 16:50:29 -0800 | [diff] [blame] | 241 | template <bool kAccessCheck, bool kInstrumented> | 
|  | 242 | ALWAYS_INLINE static inline mirror::Array* AllocArrayFromCodeResolved(mirror::Class* klass, | 
|  | 243 | mirror::ArtMethod* method, | 
|  | 244 | int32_t component_count, | 
|  | 245 | Thread* self, | 
|  | 246 | gc::AllocatorType allocator_type) | 
|  | 247 | NO_THREAD_SAFETY_ANALYSIS { | 
|  | 248 | DCHECK(klass != nullptr); | 
|  | 249 | if (UNLIKELY(component_count < 0)) { | 
|  | 250 | ThrowNegativeArraySizeException(component_count); | 
|  | 251 | return nullptr;  // Failure | 
|  | 252 | } | 
|  | 253 | if (kAccessCheck) { | 
|  | 254 | mirror::Class* referrer = method->GetDeclaringClass(); | 
|  | 255 | if (UNLIKELY(!referrer->CanAccess(klass))) { | 
|  | 256 | ThrowIllegalAccessErrorClass(referrer, klass); | 
|  | 257 | return nullptr;  // Failure | 
|  | 258 | } | 
|  | 259 | } | 
| Ian Rogers | 6fac447 | 2014-02-25 17:01:10 -0800 | [diff] [blame] | 260 | // No need to retry a slow-path allocation as the above code won't cause a GC or thread | 
|  | 261 | // suspension. | 
|  | 262 | return mirror::Array::Alloc<kInstrumented>(self, klass, component_count, | 
|  | 263 | klass->GetComponentSize(), allocator_type); | 
| Hiroshi Yamauchi | bb8f0ab | 2014-01-27 16:50:29 -0800 | [diff] [blame] | 264 | } | 
|  | 265 |  | 
| Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 266 | extern mirror::Array* CheckAndAllocArrayFromCode(uint32_t type_idx, mirror::ArtMethod* method, | 
| Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 267 | int32_t component_count, Thread* self, | 
|  | 268 | bool access_check, | 
|  | 269 | gc::AllocatorType allocator_type) | 
| Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 270 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); | 
| Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 271 |  | 
| Mathieu Chartier | cbb2d20 | 2013-11-14 17:45:16 -0800 | [diff] [blame] | 272 | extern mirror::Array* CheckAndAllocArrayFromCodeInstrumented(uint32_t type_idx, | 
|  | 273 | mirror::ArtMethod* method, | 
|  | 274 | int32_t component_count, Thread* self, | 
|  | 275 | bool access_check, | 
|  | 276 | gc::AllocatorType allocator_type) | 
| Hiroshi Yamauchi | 3b4c189 | 2013-09-12 21:33:12 -0700 | [diff] [blame] | 277 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); | 
|  | 278 |  | 
| Ian Rogers | 08f753d | 2012-08-24 14:35:25 -0700 | [diff] [blame] | 279 | // Type of find field operation for fast and slow case. | 
|  | 280 | enum FindFieldType { | 
|  | 281 | InstanceObjectRead, | 
|  | 282 | InstanceObjectWrite, | 
|  | 283 | InstancePrimitiveRead, | 
|  | 284 | InstancePrimitiveWrite, | 
|  | 285 | StaticObjectRead, | 
|  | 286 | StaticObjectWrite, | 
|  | 287 | StaticPrimitiveRead, | 
|  | 288 | StaticPrimitiveWrite, | 
|  | 289 | }; | 
|  | 290 |  | 
| Sebastien Hertz | d4beb6b | 2013-10-02 17:07:20 +0200 | [diff] [blame] | 291 | template<FindFieldType type, bool access_check> | 
| Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 292 | static inline mirror::ArtField* FindFieldFromCode(uint32_t field_idx, mirror::ArtMethod* referrer, | 
| Sebastien Hertz | d4beb6b | 2013-10-02 17:07:20 +0200 | [diff] [blame] | 293 | Thread* self, size_t expected_size) { | 
|  | 294 | bool is_primitive; | 
|  | 295 | bool is_set; | 
|  | 296 | bool is_static; | 
|  | 297 | switch (type) { | 
|  | 298 | case InstanceObjectRead:     is_primitive = false; is_set = false; is_static = false; break; | 
|  | 299 | case InstanceObjectWrite:    is_primitive = false; is_set = true;  is_static = false; break; | 
|  | 300 | case InstancePrimitiveRead:  is_primitive = true;  is_set = false; is_static = false; break; | 
|  | 301 | case InstancePrimitiveWrite: is_primitive = true;  is_set = true;  is_static = false; break; | 
|  | 302 | case StaticObjectRead:       is_primitive = false; is_set = false; is_static = true;  break; | 
|  | 303 | case StaticObjectWrite:      is_primitive = false; is_set = true;  is_static = true;  break; | 
|  | 304 | case StaticPrimitiveRead:    is_primitive = true;  is_set = false; is_static = true;  break; | 
|  | 305 | case StaticPrimitiveWrite:   // Keep GCC happy by having a default handler, fall-through. | 
|  | 306 | default:                     is_primitive = true;  is_set = true;  is_static = true;  break; | 
|  | 307 | } | 
|  | 308 | ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); | 
|  | 309 | mirror::ArtField* resolved_field = class_linker->ResolveField(field_idx, referrer, is_static); | 
|  | 310 | if (UNLIKELY(resolved_field == nullptr)) { | 
|  | 311 | DCHECK(self->IsExceptionPending());  // Throw exception and unwind. | 
|  | 312 | return nullptr;  // Failure. | 
|  | 313 | } | 
|  | 314 | mirror::Class* fields_class = resolved_field->GetDeclaringClass(); | 
|  | 315 | if (access_check) { | 
|  | 316 | if (UNLIKELY(resolved_field->IsStatic() != is_static)) { | 
|  | 317 | ThrowIncompatibleClassChangeErrorField(resolved_field, is_static, referrer); | 
|  | 318 | return nullptr; | 
|  | 319 | } | 
|  | 320 | mirror::Class* referring_class = referrer->GetDeclaringClass(); | 
| Vladimir Marko | 8978643 | 2014-01-31 15:03:55 +0000 | [diff] [blame] | 321 | if (UNLIKELY(!referring_class->CheckResolvedFieldAccess(fields_class, resolved_field, | 
|  | 322 | field_idx))) { | 
| Vladimir Marko | 23a2821 | 2014-01-09 19:24:37 +0000 | [diff] [blame] | 323 | DCHECK(self->IsExceptionPending());  // Throw exception and unwind. | 
|  | 324 | return nullptr;  // Failure. | 
| Sebastien Hertz | d4beb6b | 2013-10-02 17:07:20 +0200 | [diff] [blame] | 325 | } | 
|  | 326 | if (UNLIKELY(is_set && resolved_field->IsFinal() && (fields_class != referring_class))) { | 
|  | 327 | ThrowIllegalAccessErrorFinalField(referrer, resolved_field); | 
| Vladimir Marko | 23a2821 | 2014-01-09 19:24:37 +0000 | [diff] [blame] | 328 | return nullptr;  // Failure. | 
| Sebastien Hertz | d4beb6b | 2013-10-02 17:07:20 +0200 | [diff] [blame] | 329 | } else { | 
| Mathieu Chartier | 61c5ebc | 2014-06-05 17:42:53 -0700 | [diff] [blame] | 330 | if (UNLIKELY(resolved_field->IsPrimitiveType() != is_primitive || | 
|  | 331 | resolved_field->FieldSize() != expected_size)) { | 
| Sebastien Hertz | d4beb6b | 2013-10-02 17:07:20 +0200 | [diff] [blame] | 332 | ThrowLocation throw_location = self->GetCurrentLocationForThrow(); | 
|  | 333 | DCHECK(throw_location.GetMethod() == referrer); | 
|  | 334 | self->ThrowNewExceptionF(throw_location, "Ljava/lang/NoSuchFieldError;", | 
|  | 335 | "Attempted read of %zd-bit %s on field '%s'", | 
|  | 336 | expected_size * (32 / sizeof(int32_t)), | 
|  | 337 | is_primitive ? "primitive" : "non-primitive", | 
|  | 338 | PrettyField(resolved_field, true).c_str()); | 
| Vladimir Marko | 23a2821 | 2014-01-09 19:24:37 +0000 | [diff] [blame] | 339 | return nullptr;  // Failure. | 
| Sebastien Hertz | d4beb6b | 2013-10-02 17:07:20 +0200 | [diff] [blame] | 340 | } | 
|  | 341 | } | 
|  | 342 | } | 
|  | 343 | if (!is_static) { | 
|  | 344 | // instance fields must be being accessed on an initialized class | 
|  | 345 | return resolved_field; | 
|  | 346 | } else { | 
|  | 347 | // If the class is initialized we're done. | 
|  | 348 | if (LIKELY(fields_class->IsInitialized())) { | 
|  | 349 | return resolved_field; | 
| Sebastien Hertz | d4beb6b | 2013-10-02 17:07:20 +0200 | [diff] [blame] | 350 | } else { | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 351 | StackHandleScope<1> hs(self); | 
|  | 352 | Handle<mirror::Class> h_class(hs.NewHandle(fields_class)); | 
|  | 353 | if (LIKELY(class_linker->EnsureInitialized(h_class, true, true))) { | 
| Mathieu Chartier | c528dba | 2013-11-26 12:00:11 -0800 | [diff] [blame] | 354 | // Otherwise let's ensure the class is initialized before resolving the field. | 
|  | 355 | return resolved_field; | 
| Mathieu Chartier | c528dba | 2013-11-26 12:00:11 -0800 | [diff] [blame] | 356 | } | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 357 | DCHECK(self->IsExceptionPending());  // Throw exception and unwind | 
|  | 358 | return nullptr;  // Failure. | 
| Sebastien Hertz | d4beb6b | 2013-10-02 17:07:20 +0200 | [diff] [blame] | 359 | } | 
|  | 360 | } | 
|  | 361 | } | 
|  | 362 |  | 
|  | 363 | // Explicit template declarations of FindFieldFromCode for all field access types. | 
|  | 364 | #define EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \ | 
|  | 365 | template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE \ | 
| Bernhard Rosenkränzer | 4605362 | 2013-12-12 02:15:52 +0100 | [diff] [blame] | 366 | mirror::ArtField* FindFieldFromCode<_type, _access_check>(uint32_t field_idx, \ | 
| Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 367 | mirror::ArtMethod* referrer, \ | 
| Bernhard Rosenkränzer | 4605362 | 2013-12-12 02:15:52 +0100 | [diff] [blame] | 368 | Thread* self, size_t expected_size) \ | 
| Sebastien Hertz | d4beb6b | 2013-10-02 17:07:20 +0200 | [diff] [blame] | 369 |  | 
|  | 370 | #define EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \ | 
|  | 371 | EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, false); \ | 
|  | 372 | EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, true) | 
|  | 373 |  | 
|  | 374 | EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectRead); | 
|  | 375 | EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectWrite); | 
|  | 376 | EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveRead); | 
|  | 377 | EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveWrite); | 
|  | 378 | EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectRead); | 
|  | 379 | EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectWrite); | 
|  | 380 | EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveRead); | 
|  | 381 | EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveWrite); | 
|  | 382 |  | 
|  | 383 | #undef EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL | 
|  | 384 | #undef EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL | 
|  | 385 |  | 
|  | 386 | template<InvokeType type, bool access_check> | 
| Mathieu Chartier | d565caf | 2014-02-16 15:59:00 -0800 | [diff] [blame] | 387 | static inline mirror::ArtMethod* FindMethodFromCode(uint32_t method_idx, | 
| Mathieu Chartier | 0cd8135 | 2014-05-22 16:48:55 -0700 | [diff] [blame] | 388 | mirror::Object** this_object, | 
|  | 389 | mirror::ArtMethod** referrer, Thread* self) { | 
|  | 390 | ClassLinker* const class_linker = Runtime::Current()->GetClassLinker(); | 
|  | 391 | mirror::ArtMethod* resolved_method = class_linker->GetResolvedMethod(method_idx, *referrer, type); | 
|  | 392 | if (resolved_method == nullptr) { | 
|  | 393 | StackHandleScope<1> hs(self); | 
|  | 394 | mirror::Object* null_this = nullptr; | 
|  | 395 | HandleWrapper<mirror::Object> h_this( | 
|  | 396 | hs.NewHandleWrapper(type == kStatic ? &null_this : this_object)); | 
|  | 397 | resolved_method = class_linker->ResolveMethod(self, method_idx, referrer, type); | 
|  | 398 | } | 
| Sebastien Hertz | d4beb6b | 2013-10-02 17:07:20 +0200 | [diff] [blame] | 399 | if (UNLIKELY(resolved_method == nullptr)) { | 
|  | 400 | DCHECK(self->IsExceptionPending());  // Throw exception and unwind. | 
|  | 401 | return nullptr;  // Failure. | 
| Mathieu Chartier | 0cd8135 | 2014-05-22 16:48:55 -0700 | [diff] [blame] | 402 | } else if (UNLIKELY(*this_object == nullptr && type != kStatic)) { | 
| Sebastien Hertz | d4beb6b | 2013-10-02 17:07:20 +0200 | [diff] [blame] | 403 | // Maintain interpreter-like semantics where NullPointerException is thrown | 
|  | 404 | // after potential NoSuchMethodError from class linker. | 
|  | 405 | ThrowLocation throw_location = self->GetCurrentLocationForThrow(); | 
| Mathieu Chartier | 0cd8135 | 2014-05-22 16:48:55 -0700 | [diff] [blame] | 406 | DCHECK_EQ(*referrer, throw_location.GetMethod()); | 
| Sebastien Hertz | d4beb6b | 2013-10-02 17:07:20 +0200 | [diff] [blame] | 407 | ThrowNullPointerExceptionForMethodAccess(throw_location, method_idx, type); | 
|  | 408 | return nullptr;  // Failure. | 
|  | 409 | } else if (access_check) { | 
|  | 410 | // Incompatible class change should have been handled in resolve method. | 
|  | 411 | if (UNLIKELY(resolved_method->CheckIncompatibleClassChange(type))) { | 
|  | 412 | ThrowIncompatibleClassChangeError(type, resolved_method->GetInvokeType(), resolved_method, | 
| Mathieu Chartier | 0cd8135 | 2014-05-22 16:48:55 -0700 | [diff] [blame] | 413 | *referrer); | 
| Sebastien Hertz | d4beb6b | 2013-10-02 17:07:20 +0200 | [diff] [blame] | 414 | return nullptr;  // Failure. | 
|  | 415 | } | 
|  | 416 | mirror::Class* methods_class = resolved_method->GetDeclaringClass(); | 
| Mathieu Chartier | 0cd8135 | 2014-05-22 16:48:55 -0700 | [diff] [blame] | 417 | mirror::Class* referring_class = (*referrer)->GetDeclaringClass(); | 
| Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 418 | bool can_access_resolved_method = | 
| Vladimir Marko | 8978643 | 2014-01-31 15:03:55 +0000 | [diff] [blame] | 419 | referring_class->CheckResolvedMethodAccess<type>(methods_class, resolved_method, | 
|  | 420 | method_idx); | 
| Hiroshi Yamauchi | be1ca55 | 2014-01-15 11:46:48 -0800 | [diff] [blame] | 421 | if (UNLIKELY(!can_access_resolved_method)) { | 
| Vladimir Marko | 23a2821 | 2014-01-09 19:24:37 +0000 | [diff] [blame] | 422 | DCHECK(self->IsExceptionPending());  // Throw exception and unwind. | 
|  | 423 | return nullptr;  // Failure. | 
| Sebastien Hertz | d4beb6b | 2013-10-02 17:07:20 +0200 | [diff] [blame] | 424 | } | 
|  | 425 | } | 
|  | 426 | switch (type) { | 
|  | 427 | case kStatic: | 
|  | 428 | case kDirect: | 
|  | 429 | return resolved_method; | 
|  | 430 | case kVirtual: { | 
| Mathieu Chartier | 0cd8135 | 2014-05-22 16:48:55 -0700 | [diff] [blame] | 431 | mirror::ObjectArray<mirror::ArtMethod>* vtable = (*this_object)->GetClass()->GetVTable(); | 
| Sebastien Hertz | d4beb6b | 2013-10-02 17:07:20 +0200 | [diff] [blame] | 432 | uint16_t vtable_index = resolved_method->GetMethodIndex(); | 
|  | 433 | if (access_check && | 
|  | 434 | (vtable == nullptr || vtable_index >= static_cast<uint32_t>(vtable->GetLength()))) { | 
|  | 435 | // Behavior to agree with that of the verifier. | 
| Mathieu Chartier | bfd9a43 | 2014-05-21 17:43:44 -0700 | [diff] [blame] | 436 | ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(), | 
|  | 437 | resolved_method->GetName(), resolved_method->GetSignature()); | 
| Sebastien Hertz | d4beb6b | 2013-10-02 17:07:20 +0200 | [diff] [blame] | 438 | return nullptr;  // Failure. | 
|  | 439 | } | 
|  | 440 | DCHECK(vtable != nullptr); | 
|  | 441 | return vtable->GetWithoutChecks(vtable_index); | 
|  | 442 | } | 
|  | 443 | case kSuper: { | 
| Mathieu Chartier | 0cd8135 | 2014-05-22 16:48:55 -0700 | [diff] [blame] | 444 | mirror::Class* super_class = (*referrer)->GetDeclaringClass()->GetSuperClass(); | 
| Sebastien Hertz | d4beb6b | 2013-10-02 17:07:20 +0200 | [diff] [blame] | 445 | uint16_t vtable_index = resolved_method->GetMethodIndex(); | 
|  | 446 | mirror::ObjectArray<mirror::ArtMethod>* vtable; | 
|  | 447 | if (access_check) { | 
|  | 448 | // Check existence of super class. | 
|  | 449 | vtable = (super_class != nullptr) ? super_class->GetVTable() : nullptr; | 
|  | 450 | if (vtable == nullptr || vtable_index >= static_cast<uint32_t>(vtable->GetLength())) { | 
|  | 451 | // Behavior to agree with that of the verifier. | 
| Mathieu Chartier | bfd9a43 | 2014-05-21 17:43:44 -0700 | [diff] [blame] | 452 | ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(), | 
|  | 453 | resolved_method->GetName(), resolved_method->GetSignature()); | 
| Sebastien Hertz | d4beb6b | 2013-10-02 17:07:20 +0200 | [diff] [blame] | 454 | return nullptr;  // Failure. | 
|  | 455 | } | 
|  | 456 | } else { | 
|  | 457 | // Super class must exist. | 
|  | 458 | DCHECK(super_class != nullptr); | 
|  | 459 | vtable = super_class->GetVTable(); | 
|  | 460 | } | 
|  | 461 | DCHECK(vtable != nullptr); | 
|  | 462 | return vtable->GetWithoutChecks(vtable_index); | 
|  | 463 | } | 
|  | 464 | case kInterface: { | 
| Jeff Hao | 88474b4 | 2013-10-23 16:24:40 -0700 | [diff] [blame] | 465 | uint32_t imt_index = resolved_method->GetDexMethodIndex() % ClassLinker::kImtSize; | 
| Mathieu Chartier | 0cd8135 | 2014-05-22 16:48:55 -0700 | [diff] [blame] | 466 | mirror::ObjectArray<mirror::ArtMethod>* imt_table = (*this_object)->GetClass()->GetImTable(); | 
| Jeff Hao | 88474b4 | 2013-10-23 16:24:40 -0700 | [diff] [blame] | 467 | mirror::ArtMethod* imt_method = imt_table->Get(imt_index); | 
|  | 468 | if (!imt_method->IsImtConflictMethod()) { | 
|  | 469 | return imt_method; | 
| Sebastien Hertz | d4beb6b | 2013-10-02 17:07:20 +0200 | [diff] [blame] | 470 | } else { | 
| Jeff Hao | 88474b4 | 2013-10-23 16:24:40 -0700 | [diff] [blame] | 471 | mirror::ArtMethod* interface_method = | 
| Mathieu Chartier | 0cd8135 | 2014-05-22 16:48:55 -0700 | [diff] [blame] | 472 | (*this_object)->GetClass()->FindVirtualMethodForInterface(resolved_method); | 
| Jeff Hao | 88474b4 | 2013-10-23 16:24:40 -0700 | [diff] [blame] | 473 | if (UNLIKELY(interface_method == nullptr)) { | 
| Mathieu Chartier | 37a9876 | 2014-02-05 12:14:39 -0800 | [diff] [blame] | 474 | ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(resolved_method, | 
| Mathieu Chartier | 0cd8135 | 2014-05-22 16:48:55 -0700 | [diff] [blame] | 475 | *this_object, *referrer); | 
| Jeff Hao | 88474b4 | 2013-10-23 16:24:40 -0700 | [diff] [blame] | 476 | return nullptr;  // Failure. | 
| Jeff Hao | 88474b4 | 2013-10-23 16:24:40 -0700 | [diff] [blame] | 477 | } | 
| Mathieu Chartier | 0cd8135 | 2014-05-22 16:48:55 -0700 | [diff] [blame] | 478 | return interface_method; | 
| Sebastien Hertz | d4beb6b | 2013-10-02 17:07:20 +0200 | [diff] [blame] | 479 | } | 
|  | 480 | } | 
|  | 481 | default: | 
|  | 482 | LOG(FATAL) << "Unknown invoke type " << type; | 
|  | 483 | return nullptr;  // Failure. | 
|  | 484 | } | 
|  | 485 | } | 
|  | 486 |  | 
|  | 487 | // Explicit template declarations of FindMethodFromCode for all invoke types. | 
| Bernhard Rosenkränzer | 4605362 | 2013-12-12 02:15:52 +0100 | [diff] [blame] | 488 | #define EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, _access_check)                 \ | 
|  | 489 | template SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE                       \ | 
|  | 490 | mirror::ArtMethod* FindMethodFromCode<_type, _access_check>(uint32_t method_idx,         \ | 
| Mathieu Chartier | 0cd8135 | 2014-05-22 16:48:55 -0700 | [diff] [blame] | 491 | mirror::Object** this_object, \ | 
|  | 492 | mirror::ArtMethod** referrer, \ | 
| Bernhard Rosenkränzer | 4605362 | 2013-12-12 02:15:52 +0100 | [diff] [blame] | 493 | Thread* self) | 
| Sebastien Hertz | d4beb6b | 2013-10-02 17:07:20 +0200 | [diff] [blame] | 494 | #define EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \ | 
|  | 495 | EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, false);   \ | 
|  | 496 | EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, true) | 
|  | 497 |  | 
|  | 498 | EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kStatic); | 
|  | 499 | EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kDirect); | 
|  | 500 | EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kVirtual); | 
|  | 501 | EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kSuper); | 
|  | 502 | EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kInterface); | 
|  | 503 |  | 
|  | 504 | #undef EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL | 
|  | 505 | #undef EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL | 
| Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 506 |  | 
| Ian Rogers | 08f753d | 2012-08-24 14:35:25 -0700 | [diff] [blame] | 507 | // Fast path field resolution that can't initialize classes or throw exceptions. | 
| Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 508 | static inline mirror::ArtField* FindFieldFast(uint32_t field_idx, | 
| Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 509 | mirror::ArtMethod* referrer, | 
| Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 510 | FindFieldType type, size_t expected_size) | 
| Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 511 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { | 
| Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 512 | mirror::ArtField* resolved_field = | 
| Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 513 | referrer->GetDeclaringClass()->GetDexCache()->GetResolvedField(field_idx); | 
| Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 514 | if (UNLIKELY(resolved_field == NULL)) { | 
|  | 515 | return NULL; | 
|  | 516 | } | 
| Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 517 | mirror::Class* fields_class = resolved_field->GetDeclaringClass(); | 
| Ian Rogers | 08f753d | 2012-08-24 14:35:25 -0700 | [diff] [blame] | 518 | // Check class is initiliazed or initializing. | 
| Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 519 | if (UNLIKELY(!fields_class->IsInitializing())) { | 
|  | 520 | return NULL; | 
|  | 521 | } | 
| Ian Rogers | 08f753d | 2012-08-24 14:35:25 -0700 | [diff] [blame] | 522 | // Check for incompatible class change. | 
|  | 523 | bool is_primitive; | 
|  | 524 | bool is_set; | 
|  | 525 | bool is_static; | 
|  | 526 | switch (type) { | 
|  | 527 | case InstanceObjectRead:     is_primitive = false; is_set = false; is_static = false; break; | 
|  | 528 | case InstanceObjectWrite:    is_primitive = false; is_set = true;  is_static = false; break; | 
|  | 529 | case InstancePrimitiveRead:  is_primitive = true;  is_set = false; is_static = false; break; | 
|  | 530 | case InstancePrimitiveWrite: is_primitive = true;  is_set = true;  is_static = false; break; | 
|  | 531 | case StaticObjectRead:       is_primitive = false; is_set = false; is_static = true;  break; | 
|  | 532 | case StaticObjectWrite:      is_primitive = false; is_set = true;  is_static = true;  break; | 
|  | 533 | case StaticPrimitiveRead:    is_primitive = true;  is_set = false; is_static = true;  break; | 
|  | 534 | case StaticPrimitiveWrite:   is_primitive = true;  is_set = true;  is_static = true;  break; | 
| Brian Carlstrom | f69863b | 2013-07-17 21:53:13 -0700 | [diff] [blame] | 535 | default: | 
|  | 536 | LOG(FATAL) << "UNREACHABLE";  // Assignment below to avoid GCC warnings. | 
|  | 537 | is_primitive = true; | 
|  | 538 | is_set = true; | 
|  | 539 | is_static = true; | 
|  | 540 | break; | 
| Ian Rogers | 08f753d | 2012-08-24 14:35:25 -0700 | [diff] [blame] | 541 | } | 
|  | 542 | if (UNLIKELY(resolved_field->IsStatic() != is_static)) { | 
|  | 543 | // Incompatible class change. | 
|  | 544 | return NULL; | 
|  | 545 | } | 
| Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 546 | mirror::Class* referring_class = referrer->GetDeclaringClass(); | 
| Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 547 | if (UNLIKELY(!referring_class->CanAccess(fields_class) || | 
|  | 548 | !referring_class->CanAccessMember(fields_class, | 
|  | 549 | resolved_field->GetAccessFlags()) || | 
|  | 550 | (is_set && resolved_field->IsFinal() && (fields_class != referring_class)))) { | 
| Ian Rogers | 08f753d | 2012-08-24 14:35:25 -0700 | [diff] [blame] | 551 | // Illegal access. | 
| Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 552 | return NULL; | 
|  | 553 | } | 
| Mathieu Chartier | 61c5ebc | 2014-06-05 17:42:53 -0700 | [diff] [blame] | 554 | if (UNLIKELY(resolved_field->IsPrimitiveType() != is_primitive || | 
|  | 555 | resolved_field->FieldSize() != expected_size)) { | 
| Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 556 | return NULL; | 
|  | 557 | } | 
|  | 558 | return resolved_field; | 
|  | 559 | } | 
|  | 560 |  | 
| Ian Rogers | 08f753d | 2012-08-24 14:35:25 -0700 | [diff] [blame] | 561 | // Fast path method resolution that can't throw exceptions. | 
| Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 562 | static inline mirror::ArtMethod* FindMethodFast(uint32_t method_idx, | 
|  | 563 | mirror::Object* this_object, | 
| Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 564 | mirror::ArtMethod* referrer, | 
| Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 565 | bool access_check, InvokeType type) | 
| Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 566 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { | 
| Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 567 | bool is_direct = type == kStatic || type == kDirect; | 
|  | 568 | if (UNLIKELY(this_object == NULL && !is_direct)) { | 
|  | 569 | return NULL; | 
|  | 570 | } | 
| Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 571 | mirror::ArtMethod* resolved_method = | 
| Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 572 | referrer->GetDeclaringClass()->GetDexCache()->GetResolvedMethod(method_idx); | 
|  | 573 | if (UNLIKELY(resolved_method == NULL)) { | 
|  | 574 | return NULL; | 
|  | 575 | } | 
|  | 576 | if (access_check) { | 
| Ian Rogers | 08f753d | 2012-08-24 14:35:25 -0700 | [diff] [blame] | 577 | // Check for incompatible class change errors and access. | 
|  | 578 | bool icce = resolved_method->CheckIncompatibleClassChange(type); | 
|  | 579 | if (UNLIKELY(icce)) { | 
|  | 580 | return NULL; | 
|  | 581 | } | 
| Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 582 | mirror::Class* methods_class = resolved_method->GetDeclaringClass(); | 
|  | 583 | mirror::Class* referring_class = referrer->GetDeclaringClass(); | 
| Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 584 | if (UNLIKELY(!referring_class->CanAccess(methods_class) || | 
|  | 585 | !referring_class->CanAccessMember(methods_class, | 
|  | 586 | resolved_method->GetAccessFlags()))) { | 
| Ian Rogers | 08f753d | 2012-08-24 14:35:25 -0700 | [diff] [blame] | 587 | // Potential illegal access, may need to refine the method's class. | 
| Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 588 | return NULL; | 
|  | 589 | } | 
|  | 590 | } | 
|  | 591 | if (type == kInterface) {  // Most common form of slow path dispatch. | 
|  | 592 | return this_object->GetClass()->FindVirtualMethodForInterface(resolved_method); | 
|  | 593 | } else if (is_direct) { | 
|  | 594 | return resolved_method; | 
|  | 595 | } else if (type == kSuper) { | 
|  | 596 | return referrer->GetDeclaringClass()->GetSuperClass()->GetVTable()-> | 
|  | 597 | Get(resolved_method->GetMethodIndex()); | 
|  | 598 | } else { | 
|  | 599 | DCHECK(type == kVirtual); | 
|  | 600 | return this_object->GetClass()->GetVTable()->Get(resolved_method->GetMethodIndex()); | 
|  | 601 | } | 
|  | 602 | } | 
|  | 603 |  | 
| Ian Rogers | fa46d3e | 2013-05-15 00:16:04 -0700 | [diff] [blame] | 604 | static inline mirror::Class* ResolveVerifyAndClinit(uint32_t type_idx, | 
| Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 605 | mirror::ArtMethod* referrer, | 
| Ian Rogers | fa46d3e | 2013-05-15 00:16:04 -0700 | [diff] [blame] | 606 | Thread* self, bool can_run_clinit, | 
|  | 607 | bool verify_access) | 
|  | 608 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { | 
|  | 609 | ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); | 
|  | 610 | mirror::Class* klass = class_linker->ResolveType(type_idx, referrer); | 
| Ian Rogers | 5ddb410 | 2014-01-07 08:58:46 -0800 | [diff] [blame] | 611 | if (UNLIKELY(klass == nullptr)) { | 
| Ian Rogers | fa46d3e | 2013-05-15 00:16:04 -0700 | [diff] [blame] | 612 | CHECK(self->IsExceptionPending()); | 
| Ian Rogers | 5ddb410 | 2014-01-07 08:58:46 -0800 | [diff] [blame] | 613 | return nullptr;  // Failure - Indicate to caller to deliver exception | 
| Ian Rogers | fa46d3e | 2013-05-15 00:16:04 -0700 | [diff] [blame] | 614 | } | 
|  | 615 | // Perform access check if necessary. | 
|  | 616 | mirror::Class* referring_class = referrer->GetDeclaringClass(); | 
|  | 617 | if (verify_access && UNLIKELY(!referring_class->CanAccess(klass))) { | 
|  | 618 | ThrowIllegalAccessErrorClass(referring_class, klass); | 
| Ian Rogers | 5ddb410 | 2014-01-07 08:58:46 -0800 | [diff] [blame] | 619 | return nullptr;  // Failure - Indicate to caller to deliver exception | 
| Ian Rogers | fa46d3e | 2013-05-15 00:16:04 -0700 | [diff] [blame] | 620 | } | 
|  | 621 | // If we're just implementing const-class, we shouldn't call <clinit>. | 
|  | 622 | if (!can_run_clinit) { | 
|  | 623 | return klass; | 
|  | 624 | } | 
|  | 625 | // If we are the <clinit> of this class, just return our storage. | 
|  | 626 | // | 
|  | 627 | // Do not set the DexCache InitializedStaticStorage, since that implies <clinit> has finished | 
|  | 628 | // running. | 
| Ian Rogers | 241b5de | 2013-10-09 17:58:57 -0700 | [diff] [blame] | 629 | if (klass == referring_class && referrer->IsConstructor() && referrer->IsStatic()) { | 
| Ian Rogers | fa46d3e | 2013-05-15 00:16:04 -0700 | [diff] [blame] | 630 | return klass; | 
|  | 631 | } | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 632 | StackHandleScope<1> hs(self); | 
|  | 633 | Handle<mirror::Class> h_class(hs.NewHandle(klass)); | 
|  | 634 | if (!class_linker->EnsureInitialized(h_class, true, true)) { | 
| Ian Rogers | fa46d3e | 2013-05-15 00:16:04 -0700 | [diff] [blame] | 635 | CHECK(self->IsExceptionPending()); | 
| Ian Rogers | 5ddb410 | 2014-01-07 08:58:46 -0800 | [diff] [blame] | 636 | return nullptr;  // Failure - Indicate to caller to deliver exception | 
| Ian Rogers | fa46d3e | 2013-05-15 00:16:04 -0700 | [diff] [blame] | 637 | } | 
| Mathieu Chartier | eb8167a | 2014-05-07 15:43:14 -0700 | [diff] [blame] | 638 | return h_class.Get(); | 
| Ian Rogers | fa46d3e | 2013-05-15 00:16:04 -0700 | [diff] [blame] | 639 | } | 
| Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 640 |  | 
| jeffhao | d752132 | 2012-11-21 15:38:24 -0800 | [diff] [blame] | 641 | extern void ThrowStackOverflowError(Thread* self) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); | 
|  | 642 |  | 
| Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 643 | static inline mirror::String* ResolveStringFromCode(mirror::ArtMethod* referrer, | 
| Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 644 | uint32_t string_idx) | 
| Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 645 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { | 
| Ian Rogers | 57b86d4 | 2012-03-27 16:05:41 -0700 | [diff] [blame] | 646 | ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); | 
|  | 647 | return class_linker->ResolveString(string_idx, referrer); | 
|  | 648 | } | 
| Shih-wei Liao | 2d83101 | 2011-09-28 22:06:53 -0700 | [diff] [blame] | 649 |  | 
| TDYa127 | 3d71d80 | 2012-08-15 03:47:03 -0700 | [diff] [blame] | 650 | static inline void UnlockJniSynchronizedMethod(jobject locked, Thread* self) | 
| Ian Rogers | 719d1a3 | 2014-03-06 12:13:39 -0800 | [diff] [blame] | 651 | NO_THREAD_SAFETY_ANALYSIS /* SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) */ { | 
| TDYa127 | 3d71d80 | 2012-08-15 03:47:03 -0700 | [diff] [blame] | 652 | // Save any pending exception over monitor exit call. | 
| Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 653 | mirror::Throwable* saved_exception = NULL; | 
| Ian Rogers | 62d6c77 | 2013-02-27 08:32:07 -0800 | [diff] [blame] | 654 | ThrowLocation saved_throw_location; | 
| Sebastien Hertz | 9f10203 | 2014-05-23 08:59:42 +0200 | [diff] [blame^] | 655 | bool is_exception_reported = self->IsExceptionReportedToInstrumentation(); | 
| TDYa127 | 3d71d80 | 2012-08-15 03:47:03 -0700 | [diff] [blame] | 656 | if (UNLIKELY(self->IsExceptionPending())) { | 
| Ian Rogers | 62d6c77 | 2013-02-27 08:32:07 -0800 | [diff] [blame] | 657 | saved_exception = self->GetException(&saved_throw_location); | 
| TDYa127 | 3d71d80 | 2012-08-15 03:47:03 -0700 | [diff] [blame] | 658 | self->ClearException(); | 
|  | 659 | } | 
|  | 660 | // Decode locked object and unlock, before popping local references. | 
|  | 661 | self->DecodeJObject(locked)->MonitorExit(self); | 
|  | 662 | if (UNLIKELY(self->IsExceptionPending())) { | 
|  | 663 | LOG(FATAL) << "Synchronized JNI code returning with an exception:\n" | 
|  | 664 | << saved_exception->Dump() | 
|  | 665 | << "\nEncountered second exception during implicit MonitorExit:\n" | 
| Ian Rogers | 62d6c77 | 2013-02-27 08:32:07 -0800 | [diff] [blame] | 666 | << self->GetException(NULL)->Dump(); | 
| TDYa127 | 3d71d80 | 2012-08-15 03:47:03 -0700 | [diff] [blame] | 667 | } | 
|  | 668 | // Restore pending exception. | 
|  | 669 | if (saved_exception != NULL) { | 
| Ian Rogers | 62d6c77 | 2013-02-27 08:32:07 -0800 | [diff] [blame] | 670 | self->SetException(saved_throw_location, saved_exception); | 
| Sebastien Hertz | 9f10203 | 2014-05-23 08:59:42 +0200 | [diff] [blame^] | 671 | self->SetExceptionReportedToInstrumentation(is_exception_reported); | 
| TDYa127 | 3d71d80 | 2012-08-15 03:47:03 -0700 | [diff] [blame] | 672 | } | 
|  | 673 | } | 
|  | 674 |  | 
| Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 675 | static inline void CheckReferenceResult(mirror::Object* o, Thread* self) | 
| Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 676 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { | 
| TDYa127 | 3d71d80 | 2012-08-15 03:47:03 -0700 | [diff] [blame] | 677 | if (o == NULL) { | 
|  | 678 | return; | 
|  | 679 | } | 
| Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 680 | mirror::ArtMethod* m = self->GetCurrentMethod(NULL); | 
| TDYa127 | 3d71d80 | 2012-08-15 03:47:03 -0700 | [diff] [blame] | 681 | if (o == kInvalidIndirectRefObject) { | 
| Ian Rogers | 62d6c77 | 2013-02-27 08:32:07 -0800 | [diff] [blame] | 682 | JniAbortF(NULL, "invalid reference returned from %s", PrettyMethod(m).c_str()); | 
| TDYa127 | 3d71d80 | 2012-08-15 03:47:03 -0700 | [diff] [blame] | 683 | } | 
|  | 684 | // Make sure that the result is an instance of the type this method was expected to return. | 
| Mathieu Chartier | bfd9a43 | 2014-05-21 17:43:44 -0700 | [diff] [blame] | 685 | StackHandleScope<1> hs(self); | 
|  | 686 | Handle<mirror::ArtMethod> h_m(hs.NewHandle(m)); | 
|  | 687 | mirror::Class* return_type = MethodHelper(h_m).GetReturnType(); | 
| TDYa127 | 3d71d80 | 2012-08-15 03:47:03 -0700 | [diff] [blame] | 688 |  | 
|  | 689 | if (!o->InstanceOf(return_type)) { | 
| Mathieu Chartier | bfd9a43 | 2014-05-21 17:43:44 -0700 | [diff] [blame] | 690 | JniAbortF(NULL, "attempt to return an instance of %s from %s", PrettyTypeOf(o).c_str(), | 
|  | 691 | PrettyMethod(h_m.Get()).c_str()); | 
| TDYa127 | 3d71d80 | 2012-08-15 03:47:03 -0700 | [diff] [blame] | 692 | } | 
|  | 693 | } | 
|  | 694 |  | 
| Ian Rogers | af6e67a | 2013-01-16 08:38:37 -0800 | [diff] [blame] | 695 | static inline void CheckSuspend(Thread* thread) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) { | 
| jeffhao | 373c52f | 2012-11-20 16:11:52 -0800 | [diff] [blame] | 696 | for (;;) { | 
|  | 697 | if (thread->ReadFlag(kCheckpointRequest)) { | 
|  | 698 | thread->RunCheckpointFunction(); | 
| jeffhao | 373c52f | 2012-11-20 16:11:52 -0800 | [diff] [blame] | 699 | } else if (thread->ReadFlag(kSuspendRequest)) { | 
|  | 700 | thread->FullSuspendCheck(); | 
|  | 701 | } else { | 
|  | 702 | break; | 
|  | 703 | } | 
|  | 704 | } | 
|  | 705 | } | 
|  | 706 |  | 
| Mathieu Chartier | 2b7c4d1 | 2014-05-19 10:52:16 -0700 | [diff] [blame] | 707 | JValue InvokeProxyInvocationHandler(ScopedObjectAccessAlreadyRunnable& soa, const char* shorty, | 
| Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 708 | jobject rcvr_jobj, jobject interface_art_method_jobj, | 
| Ian Rogers | af6e67a | 2013-01-16 08:38:37 -0800 | [diff] [blame] | 709 | std::vector<jvalue>& args) | 
| Brian Carlstrom | 02c8cc6 | 2013-07-18 15:54:44 -0700 | [diff] [blame] | 710 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); | 
| Ian Rogers | af6e67a | 2013-01-16 08:38:37 -0800 | [diff] [blame] | 711 |  | 
| Jeff Hao | 58df327 | 2013-04-22 15:28:53 -0700 | [diff] [blame] | 712 | // Entry point for deoptimization. | 
| Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 713 | extern "C" void art_quick_deoptimize(); | 
|  | 714 | static inline uintptr_t GetQuickDeoptimizationEntryPoint() { | 
| Jeff Hao | 58df327 | 2013-04-22 15:28:53 -0700 | [diff] [blame] | 715 | return reinterpret_cast<uintptr_t>(art_quick_deoptimize); | 
|  | 716 | } | 
|  | 717 |  | 
|  | 718 | // Return address of instrumentation stub. | 
| Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 719 | extern "C" void art_quick_instrumentation_entry(void*); | 
|  | 720 | static inline void* GetQuickInstrumentationEntryPoint() { | 
|  | 721 | return reinterpret_cast<void*>(art_quick_instrumentation_entry); | 
| Jeff Hao | 58df327 | 2013-04-22 15:28:53 -0700 | [diff] [blame] | 722 | } | 
|  | 723 |  | 
|  | 724 | // The return_pc of instrumentation exit stub. | 
| Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 725 | extern "C" void art_quick_instrumentation_exit(); | 
|  | 726 | static inline uintptr_t GetQuickInstrumentationExitPc() { | 
|  | 727 | return reinterpret_cast<uintptr_t>(art_quick_instrumentation_exit); | 
|  | 728 | } | 
|  | 729 |  | 
| Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 730 | extern "C" void art_portable_to_interpreter_bridge(mirror::ArtMethod*); | 
| Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 731 | static inline const void* GetPortableToInterpreterBridge() { | 
|  | 732 | return reinterpret_cast<void*>(art_portable_to_interpreter_bridge); | 
|  | 733 | } | 
|  | 734 |  | 
| Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 735 | static inline const void* GetPortableToQuickBridge() { | 
|  | 736 | // TODO: portable to quick bridge. Bug: 8196384 | 
|  | 737 | return GetPortableToInterpreterBridge(); | 
|  | 738 | } | 
|  | 739 |  | 
| Brian Carlstrom | ea46f95 | 2013-07-30 01:26:50 -0700 | [diff] [blame] | 740 | extern "C" void art_quick_to_interpreter_bridge(mirror::ArtMethod*); | 
| Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 741 | static inline const void* GetQuickToInterpreterBridge() { | 
|  | 742 | return reinterpret_cast<void*>(art_quick_to_interpreter_bridge); | 
| Jeff Hao | 58df327 | 2013-04-22 15:28:53 -0700 | [diff] [blame] | 743 | } | 
|  | 744 |  | 
| Ian Rogers | ef7d42f | 2014-01-06 12:55:46 -0800 | [diff] [blame] | 745 | static inline const void* GetQuickToPortableBridge() { | 
|  | 746 | // TODO: quick to portable bridge. Bug: 8196384 | 
| Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 747 | return GetQuickToInterpreterBridge(); | 
| Jeff Hao | 58df327 | 2013-04-22 15:28:53 -0700 | [diff] [blame] | 748 | } | 
|  | 749 |  | 
| Jeff Hao | 0aba0ba | 2013-06-03 14:49:28 -0700 | [diff] [blame] | 750 | static inline const void* GetPortableResolutionTrampoline(ClassLinker* class_linker) { | 
|  | 751 | return class_linker->GetPortableResolutionTrampoline(); | 
| Jeff Hao | 58df327 | 2013-04-22 15:28:53 -0700 | [diff] [blame] | 752 | } | 
|  | 753 |  | 
| Jeff Hao | 0aba0ba | 2013-06-03 14:49:28 -0700 | [diff] [blame] | 754 | static inline const void* GetQuickResolutionTrampoline(ClassLinker* class_linker) { | 
|  | 755 | return class_linker->GetQuickResolutionTrampoline(); | 
| Jeff Hao | 58df327 | 2013-04-22 15:28:53 -0700 | [diff] [blame] | 756 | } | 
|  | 757 |  | 
| Jeff Hao | 88474b4 | 2013-10-23 16:24:40 -0700 | [diff] [blame] | 758 | static inline const void* GetPortableImtConflictTrampoline(ClassLinker* class_linker) { | 
|  | 759 | return class_linker->GetPortableImtConflictTrampoline(); | 
|  | 760 | } | 
|  | 761 |  | 
|  | 762 | static inline const void* GetQuickImtConflictTrampoline(ClassLinker* class_linker) { | 
|  | 763 | return class_linker->GetQuickImtConflictTrampoline(); | 
|  | 764 | } | 
|  | 765 |  | 
| Vladimir Marko | 8a63057 | 2014-04-09 18:45:35 +0100 | [diff] [blame] | 766 | static inline const void* GetQuickToInterpreterBridgeTrampoline(ClassLinker* class_linker) { | 
|  | 767 | return class_linker->GetQuickToInterpreterBridgeTrampoline(); | 
|  | 768 | } | 
|  | 769 |  | 
| Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 770 | extern "C" void art_portable_proxy_invoke_handler(); | 
|  | 771 | static inline const void* GetPortableProxyInvokeHandler() { | 
|  | 772 | return reinterpret_cast<void*>(art_portable_proxy_invoke_handler); | 
| Jeff Hao | 79fe539 | 2013-04-24 18:41:58 -0700 | [diff] [blame] | 773 | } | 
|  | 774 |  | 
| Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 775 | extern "C" void art_quick_proxy_invoke_handler(); | 
|  | 776 | static inline const void* GetQuickProxyInvokeHandler() { | 
|  | 777 | return reinterpret_cast<void*>(art_quick_proxy_invoke_handler); | 
| Jeff Hao | 79fe539 | 2013-04-24 18:41:58 -0700 | [diff] [blame] | 778 | } | 
|  | 779 |  | 
| Ian Rogers | 848871b | 2013-08-05 10:56:33 -0700 | [diff] [blame] | 780 | extern "C" void* art_jni_dlsym_lookup_stub(JNIEnv*, jobject); | 
| Jeff Hao | 79fe539 | 2013-04-24 18:41:58 -0700 | [diff] [blame] | 781 | static inline void* GetJniDlsymLookupStub() { | 
|  | 782 | return reinterpret_cast<void*>(art_jni_dlsym_lookup_stub); | 
|  | 783 | } | 
| Jeff Hao | 58df327 | 2013-04-22 15:28:53 -0700 | [diff] [blame] | 784 |  | 
| Ian Rogers | 450dcb5 | 2013-09-20 17:36:02 -0700 | [diff] [blame] | 785 | template <typename INT_TYPE, typename FLOAT_TYPE> | 
|  | 786 | static inline INT_TYPE art_float_to_integral(FLOAT_TYPE f) { | 
|  | 787 | const INT_TYPE kMaxInt = static_cast<INT_TYPE>(std::numeric_limits<INT_TYPE>::max()); | 
|  | 788 | const INT_TYPE kMinInt = static_cast<INT_TYPE>(std::numeric_limits<INT_TYPE>::min()); | 
|  | 789 | const FLOAT_TYPE kMaxIntAsFloat = static_cast<FLOAT_TYPE>(kMaxInt); | 
|  | 790 | const FLOAT_TYPE kMinIntAsFloat = static_cast<FLOAT_TYPE>(kMinInt); | 
|  | 791 | if (LIKELY(f > kMinIntAsFloat)) { | 
|  | 792 | if (LIKELY(f < kMaxIntAsFloat)) { | 
|  | 793 | return static_cast<INT_TYPE>(f); | 
|  | 794 | } else { | 
|  | 795 | return kMaxInt; | 
|  | 796 | } | 
|  | 797 | } else { | 
|  | 798 | return (f != f) ? 0 : kMinInt;  // f != f implies NaN | 
|  | 799 | } | 
|  | 800 | } | 
|  | 801 |  | 
| Shih-wei Liao | 2d83101 | 2011-09-28 22:06:53 -0700 | [diff] [blame] | 802 | }  // namespace art | 
| Ian Rogers | ad42e13 | 2011-09-17 20:23:33 -0700 | [diff] [blame] | 803 |  | 
| Ian Rogers | 7655f29 | 2013-07-29 11:07:13 -0700 | [diff] [blame] | 804 | #endif  // ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_H_ |