| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 1 | /* | 
|  | 2 | * Copyright (C) 2012 The Android Open Source Project | 
|  | 3 | * | 
|  | 4 | * Licensed under the Apache License, Version 2.0 (the "License"); | 
|  | 5 | * you may not use this file except in compliance with the License. | 
|  | 6 | * You may obtain a copy of the License at | 
|  | 7 | * | 
|  | 8 | *      http://www.apache.org/licenses/LICENSE-2.0 | 
|  | 9 | * | 
|  | 10 | * Unless required by applicable law or agreed to in writing, software | 
|  | 11 | * distributed under the License is distributed on an "AS IS" BASIS, | 
|  | 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | 
|  | 13 | * See the License for the specific language governing permissions and | 
|  | 14 | * limitations under the License. | 
|  | 15 | */ | 
|  | 16 |  | 
|  | 17 | #ifndef ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_ | 
|  | 18 | #define ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_ | 
|  | 19 |  | 
|  | 20 | #include "entrypoint_utils.h" | 
|  | 21 |  | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 22 | #include "art_method.h" | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 23 | #include "class_linker-inl.h" | 
|  | 24 | #include "common_throws.h" | 
|  | 25 | #include "dex_file.h" | 
| Vladimir Marko | 5ea536a | 2015-04-20 20:11:30 +0100 | [diff] [blame] | 26 | #include "entrypoints/quick/callee_save_frame.h" | 
|  | 27 | #include "handle_scope-inl.h" | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 28 | #include "indirect_reference_table.h" | 
|  | 29 | #include "invoke_type.h" | 
|  | 30 | #include "jni_internal.h" | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 31 | #include "mirror/array.h" | 
|  | 32 | #include "mirror/class-inl.h" | 
|  | 33 | #include "mirror/object-inl.h" | 
|  | 34 | #include "mirror/throwable.h" | 
| Vladimir Marko | 5ea536a | 2015-04-20 20:11:30 +0100 | [diff] [blame] | 35 | #include "nth_caller_visitor.h" | 
|  | 36 | #include "runtime.h" | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 37 | #include "thread.h" | 
|  | 38 |  | 
|  | 39 | namespace art { | 
|  | 40 |  | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 41 | inline ArtMethod* GetResolvedMethod(ArtMethod* outer_method, | 
| Nicolas Geoffray | 32c9ea5 | 2015-06-12 14:52:33 +0100 | [diff] [blame] | 42 | const InlineInfo& inline_info, | 
|  | 43 | uint8_t inlining_depth) | 
| Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 44 | SHARED_REQUIRES(Locks::mutator_lock_) { | 
| Nicolas Geoffray | 32c9ea5 | 2015-06-12 14:52:33 +0100 | [diff] [blame] | 45 | uint32_t method_index = inline_info.GetMethodIndexAtDepth(inlining_depth); | 
|  | 46 | InvokeType invoke_type = static_cast<InvokeType>( | 
|  | 47 | inline_info.GetInvokeTypeAtDepth(inlining_depth)); | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 48 | ArtMethod* caller = outer_method->GetDexCacheResolvedMethod(method_index, sizeof(void*)); | 
| Nicolas Geoffray | d23eeef | 2015-05-18 22:31:29 +0100 | [diff] [blame] | 49 | if (!caller->IsRuntimeMethod()) { | 
|  | 50 | return caller; | 
|  | 51 | } | 
|  | 52 |  | 
|  | 53 | // The method in the dex cache can be the runtime method responsible for invoking | 
|  | 54 | // the stub that will then update the dex cache. Therefore, we need to do the | 
|  | 55 | // resolution ourselves. | 
| Nicolas Geoffray | 3976e5e | 2015-06-15 08:58:03 +0100 | [diff] [blame] | 56 |  | 
| Nicolas Geoffray | 32c9ea5 | 2015-06-12 14:52:33 +0100 | [diff] [blame] | 57 | // We first find the class loader of our caller. If it is the outer method, we can directly | 
|  | 58 | // use its class loader. Otherwise, we also need to resolve our caller. | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 59 | StackHandleScope<2> hs(Thread::Current()); | 
| Nicolas Geoffray | d23eeef | 2015-05-18 22:31:29 +0100 | [diff] [blame] | 60 | ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); | 
| Nicolas Geoffray | 32c9ea5 | 2015-06-12 14:52:33 +0100 | [diff] [blame] | 61 | MutableHandle<mirror::ClassLoader> class_loader(hs.NewHandle<mirror::Class>(nullptr)); | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 62 | Handle<mirror::DexCache> dex_cache(hs.NewHandle(outer_method->GetDexCache())); | 
| Nicolas Geoffray | 32c9ea5 | 2015-06-12 14:52:33 +0100 | [diff] [blame] | 63 | if (inlining_depth == 0) { | 
|  | 64 | class_loader.Assign(outer_method->GetClassLoader()); | 
|  | 65 | } else { | 
|  | 66 | caller = GetResolvedMethod(outer_method, inline_info, inlining_depth - 1); | 
|  | 67 | class_loader.Assign(caller->GetClassLoader()); | 
|  | 68 | } | 
|  | 69 |  | 
| Nicolas Geoffray | d23eeef | 2015-05-18 22:31:29 +0100 | [diff] [blame] | 70 | return class_linker->ResolveMethod( | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 71 | *outer_method->GetDexFile(), method_index, dex_cache, class_loader, nullptr, invoke_type); | 
| Nicolas Geoffray | d23eeef | 2015-05-18 22:31:29 +0100 | [diff] [blame] | 72 | } | 
|  | 73 |  | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 74 | inline ArtMethod* GetCalleeSaveMethodCaller(Thread* self, Runtime::CalleeSaveType type) | 
| Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 75 | SHARED_REQUIRES(Locks::mutator_lock_) { | 
| Nicolas Geoffray | 7ea6a17 | 2015-05-19 18:58:54 +0100 | [diff] [blame] | 76 | return GetCalleeSaveMethodCaller( | 
|  | 77 | self->GetManagedStack()->GetTopQuickFrame(), type, true /* do_caller_check */); | 
|  | 78 | } | 
|  | 79 |  | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 80 | template <const bool kAccessCheck> | 
| Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 81 | ALWAYS_INLINE | 
| Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 82 | inline mirror::Class* CheckObjectAlloc(uint32_t type_idx, | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 83 | ArtMethod* method, | 
| Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 84 | Thread* self, bool* slow_path) { | 
| Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 85 | ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); | 
|  | 86 | size_t pointer_size = class_linker->GetImagePointerSize(); | 
|  | 87 | mirror::Class* klass = method->GetDexCacheResolvedType<false>(type_idx, pointer_size); | 
| Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 88 | if (UNLIKELY(klass == nullptr)) { | 
| Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 89 | klass = class_linker->ResolveType(type_idx, method); | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 90 | *slow_path = true; | 
| Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 91 | if (klass == nullptr) { | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 92 | DCHECK(self->IsExceptionPending()); | 
|  | 93 | return nullptr;  // Failure | 
| Mathieu Chartier | 524507a | 2014-08-27 15:28:28 -0700 | [diff] [blame] | 94 | } else { | 
|  | 95 | DCHECK(!self->IsExceptionPending()); | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 96 | } | 
|  | 97 | } | 
|  | 98 | if (kAccessCheck) { | 
|  | 99 | if (UNLIKELY(!klass->IsInstantiable())) { | 
| Nicolas Geoffray | 0aa50ce | 2015-03-10 11:03:29 +0000 | [diff] [blame] | 100 | self->ThrowNewException("Ljava/lang/InstantiationError;", PrettyDescriptor(klass).c_str()); | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 101 | *slow_path = true; | 
|  | 102 | return nullptr;  // Failure | 
|  | 103 | } | 
|  | 104 | mirror::Class* referrer = method->GetDeclaringClass(); | 
|  | 105 | if (UNLIKELY(!referrer->CanAccess(klass))) { | 
|  | 106 | ThrowIllegalAccessErrorClass(referrer, klass); | 
|  | 107 | *slow_path = true; | 
|  | 108 | return nullptr;  // Failure | 
|  | 109 | } | 
|  | 110 | } | 
|  | 111 | if (UNLIKELY(!klass->IsInitialized())) { | 
|  | 112 | StackHandleScope<1> hs(self); | 
|  | 113 | Handle<mirror::Class> h_klass(hs.NewHandle(klass)); | 
|  | 114 | // EnsureInitialized (the class initializer) might cause a GC. | 
|  | 115 | // may cause us to suspend meaning that another thread may try to | 
|  | 116 | // change the allocator while we are stuck in the entrypoints of | 
|  | 117 | // an old allocator. Also, the class initialization may fail. To | 
|  | 118 | // handle these cases we mark the slow path boolean as true so | 
|  | 119 | // that the caller knows to check the allocator type to see if it | 
|  | 120 | // has changed and to null-check the return value in case the | 
|  | 121 | // initialization fails. | 
|  | 122 | *slow_path = true; | 
| Ian Rogers | 7b078e8 | 2014-09-10 14:44:24 -0700 | [diff] [blame] | 123 | if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_klass, true, true)) { | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 124 | DCHECK(self->IsExceptionPending()); | 
|  | 125 | return nullptr;  // Failure | 
| Mathieu Chartier | 524507a | 2014-08-27 15:28:28 -0700 | [diff] [blame] | 126 | } else { | 
|  | 127 | DCHECK(!self->IsExceptionPending()); | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 128 | } | 
|  | 129 | return h_klass.Get(); | 
|  | 130 | } | 
|  | 131 | return klass; | 
|  | 132 | } | 
|  | 133 |  | 
| Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 134 | ALWAYS_INLINE | 
| Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 135 | inline mirror::Class* CheckClassInitializedForObjectAlloc(mirror::Class* klass, | 
|  | 136 | Thread* self, | 
|  | 137 | bool* slow_path) { | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 138 | if (UNLIKELY(!klass->IsInitialized())) { | 
|  | 139 | StackHandleScope<1> hs(self); | 
|  | 140 | Handle<mirror::Class> h_class(hs.NewHandle(klass)); | 
|  | 141 | // EnsureInitialized (the class initializer) might cause a GC. | 
|  | 142 | // may cause us to suspend meaning that another thread may try to | 
|  | 143 | // change the allocator while we are stuck in the entrypoints of | 
|  | 144 | // an old allocator. Also, the class initialization may fail. To | 
|  | 145 | // handle these cases we mark the slow path boolean as true so | 
|  | 146 | // that the caller knows to check the allocator type to see if it | 
|  | 147 | // has changed and to null-check the return value in case the | 
|  | 148 | // initialization fails. | 
|  | 149 | *slow_path = true; | 
| Ian Rogers | 7b078e8 | 2014-09-10 14:44:24 -0700 | [diff] [blame] | 150 | if (!Runtime::Current()->GetClassLinker()->EnsureInitialized(self, h_class, true, true)) { | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 151 | DCHECK(self->IsExceptionPending()); | 
|  | 152 | return nullptr;  // Failure | 
|  | 153 | } | 
|  | 154 | return h_class.Get(); | 
|  | 155 | } | 
|  | 156 | return klass; | 
|  | 157 | } | 
|  | 158 |  | 
|  | 159 | // Given the context of a calling Method, use its DexCache to resolve a type to a Class. If it | 
|  | 160 | // cannot be resolved, throw an error. If it can, use it to create an instance. | 
|  | 161 | // When verification/compiler hasn't been able to verify access, optionally perform an access | 
|  | 162 | // check. | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 163 | template <bool kAccessCheck, bool kInstrumented> | 
| Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 164 | ALWAYS_INLINE | 
| Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 165 | inline mirror::Object* AllocObjectFromCode(uint32_t type_idx, | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 166 | ArtMethod* method, | 
| Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 167 | Thread* self, | 
|  | 168 | gc::AllocatorType allocator_type) { | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 169 | bool slow_path = false; | 
|  | 170 | mirror::Class* klass = CheckObjectAlloc<kAccessCheck>(type_idx, method, self, &slow_path); | 
|  | 171 | if (UNLIKELY(slow_path)) { | 
|  | 172 | if (klass == nullptr) { | 
|  | 173 | return nullptr; | 
|  | 174 | } | 
|  | 175 | return klass->Alloc<kInstrumented>(self, Runtime::Current()->GetHeap()->GetCurrentAllocator()); | 
|  | 176 | } | 
|  | 177 | DCHECK(klass != nullptr); | 
|  | 178 | return klass->Alloc<kInstrumented>(self, allocator_type); | 
|  | 179 | } | 
|  | 180 |  | 
|  | 181 | // Given the context of a calling Method and a resolved class, create an instance. | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 182 | template <bool kInstrumented> | 
| Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 183 | ALWAYS_INLINE | 
| Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 184 | inline mirror::Object* AllocObjectFromCodeResolved(mirror::Class* klass, | 
|  | 185 | Thread* self, | 
|  | 186 | gc::AllocatorType allocator_type) { | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 187 | DCHECK(klass != nullptr); | 
|  | 188 | bool slow_path = false; | 
|  | 189 | klass = CheckClassInitializedForObjectAlloc(klass, self, &slow_path); | 
|  | 190 | if (UNLIKELY(slow_path)) { | 
|  | 191 | if (klass == nullptr) { | 
|  | 192 | return nullptr; | 
|  | 193 | } | 
|  | 194 | gc::Heap* heap = Runtime::Current()->GetHeap(); | 
|  | 195 | // Pass in false since the object can not be finalizable. | 
|  | 196 | return klass->Alloc<kInstrumented, false>(self, heap->GetCurrentAllocator()); | 
|  | 197 | } | 
|  | 198 | // Pass in false since the object can not be finalizable. | 
|  | 199 | return klass->Alloc<kInstrumented, false>(self, allocator_type); | 
|  | 200 | } | 
|  | 201 |  | 
|  | 202 | // Given the context of a calling Method and an initialized class, create an instance. | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 203 | template <bool kInstrumented> | 
| Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 204 | ALWAYS_INLINE | 
| Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 205 | inline mirror::Object* AllocObjectFromCodeInitialized(mirror::Class* klass, | 
|  | 206 | Thread* self, | 
|  | 207 | gc::AllocatorType allocator_type) { | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 208 | DCHECK(klass != nullptr); | 
|  | 209 | // Pass in false since the object can not be finalizable. | 
|  | 210 | return klass->Alloc<kInstrumented, false>(self, allocator_type); | 
|  | 211 | } | 
|  | 212 |  | 
|  | 213 |  | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 214 | template <bool kAccessCheck> | 
| Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 215 | ALWAYS_INLINE | 
| Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 216 | inline mirror::Class* CheckArrayAlloc(uint32_t type_idx, | 
| Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 217 | int32_t component_count, | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 218 | ArtMethod* method, | 
| Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 219 | bool* slow_path) { | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 220 | if (UNLIKELY(component_count < 0)) { | 
|  | 221 | ThrowNegativeArraySizeException(component_count); | 
|  | 222 | *slow_path = true; | 
|  | 223 | return nullptr;  // Failure | 
|  | 224 | } | 
| Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 225 | ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); | 
|  | 226 | size_t pointer_size = class_linker->GetImagePointerSize(); | 
|  | 227 | mirror::Class* klass = method->GetDexCacheResolvedType<false>(type_idx, pointer_size); | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 228 | if (UNLIKELY(klass == nullptr)) {  // Not in dex cache so try to resolve | 
| Vladimir Marko | 05792b9 | 2015-08-03 11:56:49 +0100 | [diff] [blame] | 229 | klass = class_linker->ResolveType(type_idx, method); | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 230 | *slow_path = true; | 
|  | 231 | if (klass == nullptr) {  // Error | 
|  | 232 | DCHECK(Thread::Current()->IsExceptionPending()); | 
|  | 233 | return nullptr;  // Failure | 
|  | 234 | } | 
|  | 235 | CHECK(klass->IsArrayClass()) << PrettyClass(klass); | 
|  | 236 | } | 
|  | 237 | if (kAccessCheck) { | 
|  | 238 | mirror::Class* referrer = method->GetDeclaringClass(); | 
|  | 239 | if (UNLIKELY(!referrer->CanAccess(klass))) { | 
|  | 240 | ThrowIllegalAccessErrorClass(referrer, klass); | 
|  | 241 | *slow_path = true; | 
|  | 242 | return nullptr;  // Failure | 
|  | 243 | } | 
|  | 244 | } | 
|  | 245 | return klass; | 
|  | 246 | } | 
|  | 247 |  | 
|  | 248 | // Given the context of a calling Method, use its DexCache to resolve a type to an array Class. If | 
|  | 249 | // it cannot be resolved, throw an error. If it can, use it to create an array. | 
|  | 250 | // When verification/compiler hasn't been able to verify access, optionally perform an access | 
|  | 251 | // check. | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 252 | template <bool kAccessCheck, bool kInstrumented> | 
| Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 253 | ALWAYS_INLINE | 
| Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 254 | inline mirror::Array* AllocArrayFromCode(uint32_t type_idx, | 
| Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 255 | int32_t component_count, | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 256 | ArtMethod* method, | 
| Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 257 | Thread* self, | 
|  | 258 | gc::AllocatorType allocator_type) { | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 259 | bool slow_path = false; | 
| Andreas Gampe | 1cc7dba | 2014-12-17 18:43:01 -0800 | [diff] [blame] | 260 | mirror::Class* klass = CheckArrayAlloc<kAccessCheck>(type_idx, component_count, method, | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 261 | &slow_path); | 
|  | 262 | if (UNLIKELY(slow_path)) { | 
|  | 263 | if (klass == nullptr) { | 
|  | 264 | return nullptr; | 
|  | 265 | } | 
|  | 266 | gc::Heap* heap = Runtime::Current()->GetHeap(); | 
|  | 267 | return mirror::Array::Alloc<kInstrumented>(self, klass, component_count, | 
| Hiroshi Yamauchi | f0edfc3 | 2014-09-25 11:46:46 -0700 | [diff] [blame] | 268 | klass->GetComponentSizeShift(), | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 269 | heap->GetCurrentAllocator()); | 
|  | 270 | } | 
|  | 271 | return mirror::Array::Alloc<kInstrumented>(self, klass, component_count, | 
| Hiroshi Yamauchi | f0edfc3 | 2014-09-25 11:46:46 -0700 | [diff] [blame] | 272 | klass->GetComponentSizeShift(), allocator_type); | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 273 | } | 
|  | 274 |  | 
|  | 275 | template <bool kAccessCheck, bool kInstrumented> | 
| Hiroshi Yamauchi | eb1e929 | 2014-08-06 12:41:15 -0700 | [diff] [blame] | 276 | ALWAYS_INLINE | 
| Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 277 | inline mirror::Array* AllocArrayFromCodeResolved(mirror::Class* klass, | 
| Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 278 | int32_t component_count, | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 279 | ArtMethod* method, | 
| Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 280 | Thread* self, | 
|  | 281 | gc::AllocatorType allocator_type) { | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 282 | DCHECK(klass != nullptr); | 
|  | 283 | if (UNLIKELY(component_count < 0)) { | 
|  | 284 | ThrowNegativeArraySizeException(component_count); | 
|  | 285 | return nullptr;  // Failure | 
|  | 286 | } | 
|  | 287 | if (kAccessCheck) { | 
|  | 288 | mirror::Class* referrer = method->GetDeclaringClass(); | 
|  | 289 | if (UNLIKELY(!referrer->CanAccess(klass))) { | 
|  | 290 | ThrowIllegalAccessErrorClass(referrer, klass); | 
|  | 291 | return nullptr;  // Failure | 
|  | 292 | } | 
|  | 293 | } | 
|  | 294 | // No need to retry a slow-path allocation as the above code won't cause a GC or thread | 
|  | 295 | // suspension. | 
|  | 296 | return mirror::Array::Alloc<kInstrumented>(self, klass, component_count, | 
| Hiroshi Yamauchi | f0edfc3 | 2014-09-25 11:46:46 -0700 | [diff] [blame] | 297 | klass->GetComponentSizeShift(), allocator_type); | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 298 | } | 
|  | 299 |  | 
|  | 300 | template<FindFieldType type, bool access_check> | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 301 | inline ArtField* FindFieldFromCode(uint32_t field_idx, ArtMethod* referrer, | 
| Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 302 | Thread* self, size_t expected_size) { | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 303 | bool is_primitive; | 
|  | 304 | bool is_set; | 
|  | 305 | bool is_static; | 
|  | 306 | switch (type) { | 
|  | 307 | case InstanceObjectRead:     is_primitive = false; is_set = false; is_static = false; break; | 
|  | 308 | case InstanceObjectWrite:    is_primitive = false; is_set = true;  is_static = false; break; | 
|  | 309 | case InstancePrimitiveRead:  is_primitive = true;  is_set = false; is_static = false; break; | 
|  | 310 | case InstancePrimitiveWrite: is_primitive = true;  is_set = true;  is_static = false; break; | 
|  | 311 | case StaticObjectRead:       is_primitive = false; is_set = false; is_static = true;  break; | 
|  | 312 | case StaticObjectWrite:      is_primitive = false; is_set = true;  is_static = true;  break; | 
|  | 313 | case StaticPrimitiveRead:    is_primitive = true;  is_set = false; is_static = true;  break; | 
|  | 314 | case StaticPrimitiveWrite:   // Keep GCC happy by having a default handler, fall-through. | 
|  | 315 | default:                     is_primitive = true;  is_set = true;  is_static = true;  break; | 
|  | 316 | } | 
|  | 317 | ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); | 
| Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 318 | ArtField* resolved_field = class_linker->ResolveField(field_idx, referrer, is_static); | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 319 | if (UNLIKELY(resolved_field == nullptr)) { | 
|  | 320 | DCHECK(self->IsExceptionPending());  // Throw exception and unwind. | 
|  | 321 | return nullptr;  // Failure. | 
|  | 322 | } | 
|  | 323 | mirror::Class* fields_class = resolved_field->GetDeclaringClass(); | 
|  | 324 | if (access_check) { | 
|  | 325 | if (UNLIKELY(resolved_field->IsStatic() != is_static)) { | 
|  | 326 | ThrowIncompatibleClassChangeErrorField(resolved_field, is_static, referrer); | 
|  | 327 | return nullptr; | 
|  | 328 | } | 
|  | 329 | mirror::Class* referring_class = referrer->GetDeclaringClass(); | 
|  | 330 | if (UNLIKELY(!referring_class->CheckResolvedFieldAccess(fields_class, resolved_field, | 
|  | 331 | field_idx))) { | 
|  | 332 | DCHECK(self->IsExceptionPending());  // Throw exception and unwind. | 
|  | 333 | return nullptr;  // Failure. | 
|  | 334 | } | 
|  | 335 | if (UNLIKELY(is_set && resolved_field->IsFinal() && (fields_class != referring_class))) { | 
|  | 336 | ThrowIllegalAccessErrorFinalField(referrer, resolved_field); | 
|  | 337 | return nullptr;  // Failure. | 
|  | 338 | } else { | 
|  | 339 | if (UNLIKELY(resolved_field->IsPrimitiveType() != is_primitive || | 
|  | 340 | resolved_field->FieldSize() != expected_size)) { | 
| Nicolas Geoffray | 0aa50ce | 2015-03-10 11:03:29 +0000 | [diff] [blame] | 341 | self->ThrowNewExceptionF("Ljava/lang/NoSuchFieldError;", | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 342 | "Attempted read of %zd-bit %s on field '%s'", | 
|  | 343 | expected_size * (32 / sizeof(int32_t)), | 
|  | 344 | is_primitive ? "primitive" : "non-primitive", | 
|  | 345 | PrettyField(resolved_field, true).c_str()); | 
|  | 346 | return nullptr;  // Failure. | 
|  | 347 | } | 
|  | 348 | } | 
|  | 349 | } | 
|  | 350 | if (!is_static) { | 
|  | 351 | // instance fields must be being accessed on an initialized class | 
|  | 352 | return resolved_field; | 
|  | 353 | } else { | 
|  | 354 | // If the class is initialized we're done. | 
|  | 355 | if (LIKELY(fields_class->IsInitialized())) { | 
|  | 356 | return resolved_field; | 
|  | 357 | } else { | 
|  | 358 | StackHandleScope<1> hs(self); | 
|  | 359 | Handle<mirror::Class> h_class(hs.NewHandle(fields_class)); | 
| Ian Rogers | 7b078e8 | 2014-09-10 14:44:24 -0700 | [diff] [blame] | 360 | if (LIKELY(class_linker->EnsureInitialized(self, h_class, true, true))) { | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 361 | // Otherwise let's ensure the class is initialized before resolving the field. | 
|  | 362 | return resolved_field; | 
|  | 363 | } | 
|  | 364 | DCHECK(self->IsExceptionPending());  // Throw exception and unwind | 
|  | 365 | return nullptr;  // Failure. | 
|  | 366 | } | 
|  | 367 | } | 
|  | 368 | } | 
|  | 369 |  | 
|  | 370 | // Explicit template declarations of FindFieldFromCode for all field access types. | 
|  | 371 | #define EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, _access_check) \ | 
| Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 372 | template SHARED_REQUIRES(Locks::mutator_lock_) ALWAYS_INLINE \ | 
| Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 373 | ArtField* FindFieldFromCode<_type, _access_check>(uint32_t field_idx, \ | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 374 | ArtMethod* referrer, \ | 
|  | 375 | Thread* self, size_t expected_size) \ | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 376 |  | 
|  | 377 | #define EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \ | 
|  | 378 | EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, false); \ | 
|  | 379 | EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL(_type, true) | 
|  | 380 |  | 
|  | 381 | EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectRead); | 
|  | 382 | EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstanceObjectWrite); | 
|  | 383 | EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveRead); | 
|  | 384 | EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(InstancePrimitiveWrite); | 
|  | 385 | EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectRead); | 
|  | 386 | EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticObjectWrite); | 
|  | 387 | EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveRead); | 
|  | 388 | EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL(StaticPrimitiveWrite); | 
|  | 389 |  | 
|  | 390 | #undef EXPLICIT_FIND_FIELD_FROM_CODE_TYPED_TEMPLATE_DECL | 
|  | 391 | #undef EXPLICIT_FIND_FIELD_FROM_CODE_TEMPLATE_DECL | 
|  | 392 |  | 
|  | 393 | template<InvokeType type, bool access_check> | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 394 | inline ArtMethod* FindMethodFromCode(uint32_t method_idx, mirror::Object** this_object, | 
| Andreas Gampe | 3a35714 | 2015-08-07 17:20:11 -0700 | [diff] [blame] | 395 | ArtMethod* referrer, Thread* self) { | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 396 | ClassLinker* const class_linker = Runtime::Current()->GetClassLinker(); | 
| Andreas Gampe | 3a35714 | 2015-08-07 17:20:11 -0700 | [diff] [blame] | 397 | ArtMethod* resolved_method = class_linker->GetResolvedMethod(method_idx, referrer); | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 398 | if (resolved_method == nullptr) { | 
|  | 399 | StackHandleScope<1> hs(self); | 
|  | 400 | mirror::Object* null_this = nullptr; | 
|  | 401 | HandleWrapper<mirror::Object> h_this( | 
|  | 402 | hs.NewHandleWrapper(type == kStatic ? &null_this : this_object)); | 
| Andreas Gampe | 3a35714 | 2015-08-07 17:20:11 -0700 | [diff] [blame] | 403 | resolved_method = class_linker->ResolveMethod(self, method_idx, referrer, type); | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 404 | } | 
|  | 405 | if (UNLIKELY(resolved_method == nullptr)) { | 
|  | 406 | DCHECK(self->IsExceptionPending());  // Throw exception and unwind. | 
|  | 407 | return nullptr;  // Failure. | 
|  | 408 | } else if (UNLIKELY(*this_object == nullptr && type != kStatic)) { | 
|  | 409 | // Maintain interpreter-like semantics where NullPointerException is thrown | 
|  | 410 | // after potential NoSuchMethodError from class linker. | 
| Nicolas Geoffray | 0aa50ce | 2015-03-10 11:03:29 +0000 | [diff] [blame] | 411 | ThrowNullPointerExceptionForMethodAccess(method_idx, type); | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 412 | return nullptr;  // Failure. | 
|  | 413 | } else if (access_check) { | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 414 | mirror::Class* methods_class = resolved_method->GetDeclaringClass(); | 
| Andreas Gampe | 3a35714 | 2015-08-07 17:20:11 -0700 | [diff] [blame] | 415 | mirror::Class* referring_class = referrer->GetDeclaringClass(); | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 416 | bool can_access_resolved_method = | 
|  | 417 | referring_class->CheckResolvedMethodAccess<type>(methods_class, resolved_method, | 
|  | 418 | method_idx); | 
|  | 419 | if (UNLIKELY(!can_access_resolved_method)) { | 
|  | 420 | DCHECK(self->IsExceptionPending());  // Throw exception and unwind. | 
|  | 421 | return nullptr;  // Failure. | 
|  | 422 | } | 
| Nicolas Geoffray | 470d54f | 2015-10-02 17:14:53 +0100 | [diff] [blame^] | 423 | // Incompatible class change should have been handled in resolve method. | 
|  | 424 | if (UNLIKELY(resolved_method->CheckIncompatibleClassChange(type))) { | 
|  | 425 | ThrowIncompatibleClassChangeError(type, resolved_method->GetInvokeType(), resolved_method, | 
|  | 426 | referrer); | 
|  | 427 | return nullptr;  // Failure. | 
|  | 428 | } | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 429 | } | 
|  | 430 | switch (type) { | 
|  | 431 | case kStatic: | 
|  | 432 | case kDirect: | 
|  | 433 | return resolved_method; | 
|  | 434 | case kVirtual: { | 
| Mingyao Yang | 2cdbad7 | 2014-07-16 10:44:41 -0700 | [diff] [blame] | 435 | mirror::Class* klass = (*this_object)->GetClass(); | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 436 | uint16_t vtable_index = resolved_method->GetMethodIndex(); | 
|  | 437 | if (access_check && | 
| Mingyao Yang | 2cdbad7 | 2014-07-16 10:44:41 -0700 | [diff] [blame] | 438 | (!klass->HasVTable() || | 
|  | 439 | vtable_index >= static_cast<uint32_t>(klass->GetVTableLength()))) { | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 440 | // Behavior to agree with that of the verifier. | 
|  | 441 | ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(), | 
|  | 442 | resolved_method->GetName(), resolved_method->GetSignature()); | 
|  | 443 | return nullptr;  // Failure. | 
|  | 444 | } | 
| Mingyao Yang | 2cdbad7 | 2014-07-16 10:44:41 -0700 | [diff] [blame] | 445 | DCHECK(klass->HasVTable()) << PrettyClass(klass); | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 446 | return klass->GetVTableEntry(vtable_index, class_linker->GetImagePointerSize()); | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 447 | } | 
|  | 448 | case kSuper: { | 
| Andreas Gampe | 3a35714 | 2015-08-07 17:20:11 -0700 | [diff] [blame] | 449 | mirror::Class* super_class = referrer->GetDeclaringClass()->GetSuperClass(); | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 450 | uint16_t vtable_index = resolved_method->GetMethodIndex(); | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 451 | if (access_check) { | 
|  | 452 | // Check existence of super class. | 
| Mingyao Yang | 2cdbad7 | 2014-07-16 10:44:41 -0700 | [diff] [blame] | 453 | if (super_class == nullptr || !super_class->HasVTable() || | 
|  | 454 | vtable_index >= static_cast<uint32_t>(super_class->GetVTableLength())) { | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 455 | // Behavior to agree with that of the verifier. | 
|  | 456 | ThrowNoSuchMethodError(type, resolved_method->GetDeclaringClass(), | 
|  | 457 | resolved_method->GetName(), resolved_method->GetSignature()); | 
|  | 458 | return nullptr;  // Failure. | 
|  | 459 | } | 
|  | 460 | } else { | 
|  | 461 | // Super class must exist. | 
|  | 462 | DCHECK(super_class != nullptr); | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 463 | } | 
| Mingyao Yang | 2cdbad7 | 2014-07-16 10:44:41 -0700 | [diff] [blame] | 464 | DCHECK(super_class->HasVTable()); | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 465 | return super_class->GetVTableEntry(vtable_index, class_linker->GetImagePointerSize()); | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 466 | } | 
|  | 467 | case kInterface: { | 
|  | 468 | uint32_t imt_index = resolved_method->GetDexMethodIndex() % mirror::Class::kImtSize; | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 469 | ArtMethod* imt_method = (*this_object)->GetClass()->GetEmbeddedImTableEntry( | 
|  | 470 | imt_index, class_linker->GetImagePointerSize()); | 
| Mathieu Chartier | 2d2621a | 2014-10-23 16:48:06 -0700 | [diff] [blame] | 471 | if (!imt_method->IsImtConflictMethod() && !imt_method->IsImtUnimplementedMethod()) { | 
|  | 472 | if (kIsDebugBuild) { | 
|  | 473 | mirror::Class* klass = (*this_object)->GetClass(); | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 474 | ArtMethod* method = klass->FindVirtualMethodForInterface( | 
|  | 475 | resolved_method, class_linker->GetImagePointerSize()); | 
| Mathieu Chartier | 2d2621a | 2014-10-23 16:48:06 -0700 | [diff] [blame] | 476 | CHECK_EQ(imt_method, method) << PrettyMethod(resolved_method) << " / " << | 
|  | 477 | PrettyMethod(imt_method) << " / " << PrettyMethod(method) << " / " << | 
|  | 478 | PrettyClass(klass); | 
|  | 479 | } | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 480 | return imt_method; | 
|  | 481 | } else { | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 482 | ArtMethod* interface_method = (*this_object)->GetClass()->FindVirtualMethodForInterface( | 
|  | 483 | resolved_method, class_linker->GetImagePointerSize()); | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 484 | if (UNLIKELY(interface_method == nullptr)) { | 
|  | 485 | ThrowIncompatibleClassChangeErrorClassForInterfaceDispatch(resolved_method, | 
| Andreas Gampe | 3a35714 | 2015-08-07 17:20:11 -0700 | [diff] [blame] | 486 | *this_object, referrer); | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 487 | return nullptr;  // Failure. | 
|  | 488 | } | 
|  | 489 | return interface_method; | 
|  | 490 | } | 
|  | 491 | } | 
|  | 492 | default: | 
|  | 493 | LOG(FATAL) << "Unknown invoke type " << type; | 
|  | 494 | return nullptr;  // Failure. | 
|  | 495 | } | 
|  | 496 | } | 
|  | 497 |  | 
|  | 498 | // Explicit template declarations of FindMethodFromCode for all invoke types. | 
|  | 499 | #define EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, _access_check)                 \ | 
| Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 500 | template SHARED_REQUIRES(Locks::mutator_lock_) ALWAYS_INLINE                       \ | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 501 | ArtMethod* FindMethodFromCode<_type, _access_check>(uint32_t method_idx,         \ | 
|  | 502 | mirror::Object** this_object, \ | 
| Andreas Gampe | 3a35714 | 2015-08-07 17:20:11 -0700 | [diff] [blame] | 503 | ArtMethod* referrer, \ | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 504 | Thread* self) | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 505 | #define EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(_type) \ | 
|  | 506 | EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, false);   \ | 
|  | 507 | EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL(_type, true) | 
|  | 508 |  | 
|  | 509 | EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kStatic); | 
|  | 510 | EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kDirect); | 
|  | 511 | EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kVirtual); | 
|  | 512 | EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kSuper); | 
|  | 513 | EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL(kInterface); | 
|  | 514 |  | 
|  | 515 | #undef EXPLICIT_FIND_METHOD_FROM_CODE_TYPED_TEMPLATE_DECL | 
|  | 516 | #undef EXPLICIT_FIND_METHOD_FROM_CODE_TEMPLATE_DECL | 
|  | 517 |  | 
|  | 518 | // Fast path field resolution that can't initialize classes or throw exceptions. | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 519 | inline ArtField* FindFieldFast(uint32_t field_idx, ArtMethod* referrer, FindFieldType type, | 
|  | 520 | size_t expected_size) { | 
| Mathieu Chartier | c785344 | 2015-03-27 14:35:38 -0700 | [diff] [blame] | 521 | ArtField* resolved_field = | 
|  | 522 | referrer->GetDeclaringClass()->GetDexCache()->GetResolvedField(field_idx, sizeof(void*)); | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 523 | if (UNLIKELY(resolved_field == nullptr)) { | 
|  | 524 | return nullptr; | 
|  | 525 | } | 
|  | 526 | // Check for incompatible class change. | 
|  | 527 | bool is_primitive; | 
|  | 528 | bool is_set; | 
|  | 529 | bool is_static; | 
|  | 530 | switch (type) { | 
|  | 531 | case InstanceObjectRead:     is_primitive = false; is_set = false; is_static = false; break; | 
|  | 532 | case InstanceObjectWrite:    is_primitive = false; is_set = true;  is_static = false; break; | 
|  | 533 | case InstancePrimitiveRead:  is_primitive = true;  is_set = false; is_static = false; break; | 
|  | 534 | case InstancePrimitiveWrite: is_primitive = true;  is_set = true;  is_static = false; break; | 
|  | 535 | case StaticObjectRead:       is_primitive = false; is_set = false; is_static = true;  break; | 
|  | 536 | case StaticObjectWrite:      is_primitive = false; is_set = true;  is_static = true;  break; | 
|  | 537 | case StaticPrimitiveRead:    is_primitive = true;  is_set = false; is_static = true;  break; | 
|  | 538 | case StaticPrimitiveWrite:   is_primitive = true;  is_set = true;  is_static = true;  break; | 
|  | 539 | default: | 
| Ian Rogers | 2c4257b | 2014-10-24 14:20:06 -0700 | [diff] [blame] | 540 | LOG(FATAL) << "UNREACHABLE"; | 
|  | 541 | UNREACHABLE(); | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 542 | } | 
|  | 543 | if (UNLIKELY(resolved_field->IsStatic() != is_static)) { | 
|  | 544 | // Incompatible class change. | 
|  | 545 | return nullptr; | 
|  | 546 | } | 
|  | 547 | mirror::Class* fields_class = resolved_field->GetDeclaringClass(); | 
|  | 548 | if (is_static) { | 
|  | 549 | // Check class is initialized else fail so that we can contend to initialize the class with | 
|  | 550 | // other threads that may be racing to do this. | 
|  | 551 | if (UNLIKELY(!fields_class->IsInitialized())) { | 
|  | 552 | return nullptr; | 
|  | 553 | } | 
|  | 554 | } | 
|  | 555 | mirror::Class* referring_class = referrer->GetDeclaringClass(); | 
|  | 556 | if (UNLIKELY(!referring_class->CanAccess(fields_class) || | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 557 | !referring_class->CanAccessMember(fields_class, resolved_field->GetAccessFlags()) || | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 558 | (is_set && resolved_field->IsFinal() && (fields_class != referring_class)))) { | 
|  | 559 | // Illegal access. | 
|  | 560 | return nullptr; | 
|  | 561 | } | 
|  | 562 | if (UNLIKELY(resolved_field->IsPrimitiveType() != is_primitive || | 
|  | 563 | resolved_field->FieldSize() != expected_size)) { | 
|  | 564 | return nullptr; | 
|  | 565 | } | 
|  | 566 | return resolved_field; | 
|  | 567 | } | 
|  | 568 |  | 
|  | 569 | // Fast path method resolution that can't throw exceptions. | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 570 | inline ArtMethod* FindMethodFast(uint32_t method_idx, mirror::Object* this_object, | 
|  | 571 | ArtMethod* referrer, bool access_check, InvokeType type) { | 
| Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 572 | if (UNLIKELY(this_object == nullptr && type != kStatic)) { | 
|  | 573 | return nullptr; | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 574 | } | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 575 | ArtMethod* resolved_method = | 
|  | 576 | referrer->GetDeclaringClass()->GetDexCache()->GetResolvedMethod(method_idx, sizeof(void*)); | 
| Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 577 | if (UNLIKELY(resolved_method == nullptr)) { | 
|  | 578 | return nullptr; | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 579 | } | 
|  | 580 | if (access_check) { | 
|  | 581 | // Check for incompatible class change errors and access. | 
|  | 582 | bool icce = resolved_method->CheckIncompatibleClassChange(type); | 
|  | 583 | if (UNLIKELY(icce)) { | 
| Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 584 | return nullptr; | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 585 | } | 
|  | 586 | mirror::Class* methods_class = resolved_method->GetDeclaringClass(); | 
|  | 587 | mirror::Class* referring_class = referrer->GetDeclaringClass(); | 
|  | 588 | if (UNLIKELY(!referring_class->CanAccess(methods_class) || | 
|  | 589 | !referring_class->CanAccessMember(methods_class, | 
|  | 590 | resolved_method->GetAccessFlags()))) { | 
|  | 591 | // Potential illegal access, may need to refine the method's class. | 
| Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 592 | return nullptr; | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 593 | } | 
|  | 594 | } | 
|  | 595 | if (type == kInterface) {  // Most common form of slow path dispatch. | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 596 | return this_object->GetClass()->FindVirtualMethodForInterface(resolved_method, sizeof(void*)); | 
| Jeff Hao | 207a37d | 2014-10-29 17:24:25 -0700 | [diff] [blame] | 597 | } else if (type == kStatic || type == kDirect) { | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 598 | return resolved_method; | 
|  | 599 | } else if (type == kSuper) { | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 600 | return referrer->GetDeclaringClass()->GetSuperClass()->GetVTableEntry( | 
|  | 601 | resolved_method->GetMethodIndex(), sizeof(void*)); | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 602 | } else { | 
|  | 603 | DCHECK(type == kVirtual); | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 604 | return this_object->GetClass()->GetVTableEntry( | 
|  | 605 | resolved_method->GetMethodIndex(), sizeof(void*)); | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 606 | } | 
|  | 607 | } | 
|  | 608 |  | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 609 | inline mirror::Class* ResolveVerifyAndClinit(uint32_t type_idx, ArtMethod* referrer, Thread* self, | 
|  | 610 | bool can_run_clinit, bool verify_access) { | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 611 | ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); | 
|  | 612 | mirror::Class* klass = class_linker->ResolveType(type_idx, referrer); | 
|  | 613 | if (UNLIKELY(klass == nullptr)) { | 
|  | 614 | CHECK(self->IsExceptionPending()); | 
|  | 615 | return nullptr;  // Failure - Indicate to caller to deliver exception | 
|  | 616 | } | 
|  | 617 | // Perform access check if necessary. | 
|  | 618 | mirror::Class* referring_class = referrer->GetDeclaringClass(); | 
|  | 619 | if (verify_access && UNLIKELY(!referring_class->CanAccess(klass))) { | 
|  | 620 | ThrowIllegalAccessErrorClass(referring_class, klass); | 
|  | 621 | return nullptr;  // Failure - Indicate to caller to deliver exception | 
|  | 622 | } | 
|  | 623 | // If we're just implementing const-class, we shouldn't call <clinit>. | 
|  | 624 | if (!can_run_clinit) { | 
|  | 625 | return klass; | 
|  | 626 | } | 
|  | 627 | // If we are the <clinit> of this class, just return our storage. | 
|  | 628 | // | 
|  | 629 | // Do not set the DexCache InitializedStaticStorage, since that implies <clinit> has finished | 
|  | 630 | // running. | 
|  | 631 | if (klass == referring_class && referrer->IsConstructor() && referrer->IsStatic()) { | 
|  | 632 | return klass; | 
|  | 633 | } | 
|  | 634 | StackHandleScope<1> hs(self); | 
|  | 635 | Handle<mirror::Class> h_class(hs.NewHandle(klass)); | 
| Ian Rogers | 7b078e8 | 2014-09-10 14:44:24 -0700 | [diff] [blame] | 636 | if (!class_linker->EnsureInitialized(self, h_class, true, true)) { | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 637 | CHECK(self->IsExceptionPending()); | 
|  | 638 | return nullptr;  // Failure - Indicate to caller to deliver exception | 
|  | 639 | } | 
|  | 640 | return h_class.Get(); | 
|  | 641 | } | 
|  | 642 |  | 
| Mathieu Chartier | e401d14 | 2015-04-22 13:56:20 -0700 | [diff] [blame] | 643 | inline mirror::String* ResolveStringFromCode(ArtMethod* referrer, uint32_t string_idx) { | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 644 | ClassLinker* class_linker = Runtime::Current()->GetClassLinker(); | 
|  | 645 | return class_linker->ResolveString(string_idx, referrer); | 
|  | 646 | } | 
|  | 647 |  | 
| Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 648 | inline void UnlockJniSynchronizedMethod(jobject locked, Thread* self) { | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 649 | // Save any pending exception over monitor exit call. | 
| Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 650 | mirror::Throwable* saved_exception = nullptr; | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 651 | if (UNLIKELY(self->IsExceptionPending())) { | 
| Nicolas Geoffray | 14691c5 | 2015-03-05 10:40:17 +0000 | [diff] [blame] | 652 | saved_exception = self->GetException(); | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 653 | self->ClearException(); | 
|  | 654 | } | 
|  | 655 | // Decode locked object and unlock, before popping local references. | 
|  | 656 | self->DecodeJObject(locked)->MonitorExit(self); | 
|  | 657 | if (UNLIKELY(self->IsExceptionPending())) { | 
|  | 658 | LOG(FATAL) << "Synchronized JNI code returning with an exception:\n" | 
|  | 659 | << saved_exception->Dump() | 
|  | 660 | << "\nEncountered second exception during implicit MonitorExit:\n" | 
| Nicolas Geoffray | 14691c5 | 2015-03-05 10:40:17 +0000 | [diff] [blame] | 661 | << self->GetException()->Dump(); | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 662 | } | 
|  | 663 | // Restore pending exception. | 
| Mathieu Chartier | 2cebb24 | 2015-04-21 16:50:40 -0700 | [diff] [blame] | 664 | if (saved_exception != nullptr) { | 
| Nicolas Geoffray | 14691c5 | 2015-03-05 10:40:17 +0000 | [diff] [blame] | 665 | self->SetException(saved_exception); | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 666 | } | 
|  | 667 | } | 
|  | 668 |  | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 669 | template <typename INT_TYPE, typename FLOAT_TYPE> | 
| Andreas Gampe | 9f612ff | 2014-11-24 13:42:22 -0800 | [diff] [blame] | 670 | inline INT_TYPE art_float_to_integral(FLOAT_TYPE f) { | 
| Mingyao Yang | 98d1cc8 | 2014-05-15 17:02:16 -0700 | [diff] [blame] | 671 | const INT_TYPE kMaxInt = static_cast<INT_TYPE>(std::numeric_limits<INT_TYPE>::max()); | 
|  | 672 | const INT_TYPE kMinInt = static_cast<INT_TYPE>(std::numeric_limits<INT_TYPE>::min()); | 
|  | 673 | const FLOAT_TYPE kMaxIntAsFloat = static_cast<FLOAT_TYPE>(kMaxInt); | 
|  | 674 | const FLOAT_TYPE kMinIntAsFloat = static_cast<FLOAT_TYPE>(kMinInt); | 
|  | 675 | if (LIKELY(f > kMinIntAsFloat)) { | 
|  | 676 | if (LIKELY(f < kMaxIntAsFloat)) { | 
|  | 677 | return static_cast<INT_TYPE>(f); | 
|  | 678 | } else { | 
|  | 679 | return kMaxInt; | 
|  | 680 | } | 
|  | 681 | } else { | 
|  | 682 | return (f != f) ? 0 : kMinInt;  // f != f implies NaN | 
|  | 683 | } | 
|  | 684 | } | 
|  | 685 |  | 
|  | 686 | }  // namespace art | 
|  | 687 |  | 
|  | 688 | #endif  // ART_RUNTIME_ENTRYPOINTS_ENTRYPOINT_UTILS_INL_H_ |