| /* |
| * Copyright (C) 2012 The Android Open Source Project |
| * |
| * Licensed under the Apache License, Version 2.0 (the "License"); |
| * you may not use this file except in compliance with the License. |
| * You may obtain a copy of the License at |
| * |
| * http://www.apache.org/licenses/LICENSE-2.0 |
| * |
| * Unless required by applicable law or agreed to in writing, software |
| * distributed under the License is distributed on an "AS IS" BASIS, |
| * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| * See the License for the specific language governing permissions and |
| * limitations under the License. |
| */ |
| |
| #include "art_method-inl.h" |
| #include "entrypoints/entrypoint_utils-inl.h" |
| #include "mirror/object-inl.h" |
| #include "thread-inl.h" |
| #include "verify_object-inl.h" |
| |
| namespace art { |
| |
| extern void ReadBarrierJni(mirror::CompressedReference<mirror::Object>* handle_on_stack, |
| Thread* self ATTRIBUTE_UNUSED) { |
| DCHECK(kUseReadBarrier); |
| if (kUseBakerReadBarrier) { |
| DCHECK(handle_on_stack->AsMirrorPtr() != nullptr) |
| << "The class of a static jni call must not be null"; |
| // Check the mark bit and return early if it's already marked. |
| if (LIKELY(handle_on_stack->AsMirrorPtr()->GetMarkBit() != 0)) { |
| return; |
| } |
| } |
| // Call the read barrier and update the handle. |
| mirror::Object* to_ref = ReadBarrier::BarrierForRoot(handle_on_stack); |
| handle_on_stack->Assign(to_ref); |
| } |
| |
| // Called on entry to fast JNI, push a new local reference table only. |
| extern uint32_t JniMethodFastStart(Thread* self) { |
| JNIEnvExt* env = self->GetJniEnv(); |
| DCHECK(env != nullptr); |
| uint32_t saved_local_ref_cookie = env->local_ref_cookie; |
| env->local_ref_cookie = env->locals.GetSegmentState(); |
| |
| if (kIsDebugBuild) { |
| ArtMethod* native_method = *self->GetManagedStack()->GetTopQuickFrame(); |
| CHECK(native_method->IsAnnotatedWithFastNative()) << native_method->PrettyMethod(); |
| } |
| |
| return saved_local_ref_cookie; |
| } |
| |
| // Called on entry to JNI, transition out of Runnable and release share of mutator_lock_. |
| extern uint32_t JniMethodStart(Thread* self) { |
| JNIEnvExt* env = self->GetJniEnv(); |
| DCHECK(env != nullptr); |
| uint32_t saved_local_ref_cookie = env->local_ref_cookie; |
| env->local_ref_cookie = env->locals.GetSegmentState(); |
| ArtMethod* native_method = *self->GetManagedStack()->GetTopQuickFrame(); |
| if (!native_method->IsFastNative()) { |
| // When not fast JNI we transition out of runnable. |
| self->TransitionFromRunnableToSuspended(kNative); |
| } |
| return saved_local_ref_cookie; |
| } |
| |
| extern uint32_t JniMethodStartSynchronized(jobject to_lock, Thread* self) { |
| self->DecodeJObject(to_lock)->MonitorEnter(self); |
| return JniMethodStart(self); |
| } |
| |
| // TODO: NO_THREAD_SAFETY_ANALYSIS due to different control paths depending on fast JNI. |
| static void GoToRunnable(Thread* self) NO_THREAD_SAFETY_ANALYSIS { |
| ArtMethod* native_method = *self->GetManagedStack()->GetTopQuickFrame(); |
| bool is_fast = native_method->IsFastNative(); |
| if (!is_fast) { |
| self->TransitionFromSuspendedToRunnable(); |
| } else if (UNLIKELY(self->TestAllFlags())) { |
| // In fast JNI mode we never transitioned out of runnable. Perform a suspend check if there |
| // is a flag raised. |
| DCHECK(Locks::mutator_lock_->IsSharedHeld(self)); |
| self->CheckSuspend(); |
| } |
| } |
| |
| static void PopLocalReferences(uint32_t saved_local_ref_cookie, Thread* self) |
| REQUIRES_SHARED(Locks::mutator_lock_) { |
| JNIEnvExt* env = self->GetJniEnv(); |
| if (UNLIKELY(env->check_jni)) { |
| env->CheckNoHeldMonitors(); |
| } |
| env->locals.SetSegmentState(env->local_ref_cookie); |
| env->local_ref_cookie = saved_local_ref_cookie; |
| self->PopHandleScope(); |
| } |
| |
| // TODO: These should probably be templatized or macro-ized. |
| // Otherwise there's just too much repetitive boilerplate. |
| |
| extern void JniMethodEnd(uint32_t saved_local_ref_cookie, Thread* self) { |
| GoToRunnable(self); |
| PopLocalReferences(saved_local_ref_cookie, self); |
| } |
| |
| extern void JniMethodFastEnd(uint32_t saved_local_ref_cookie, Thread* self) { |
| // inlined fast version of GoToRunnable(self); |
| |
| if (kIsDebugBuild) { |
| ArtMethod* native_method = *self->GetManagedStack()->GetTopQuickFrame(); |
| CHECK(native_method->IsAnnotatedWithFastNative()) << native_method->PrettyMethod(); |
| } |
| |
| if (UNLIKELY(self->TestAllFlags())) { |
| // In fast JNI mode we never transitioned out of runnable. Perform a suspend check if there |
| // is a flag raised. |
| DCHECK(Locks::mutator_lock_->IsSharedHeld(self)); |
| self->CheckSuspend(); |
| } |
| |
| PopLocalReferences(saved_local_ref_cookie, self); |
| } |
| |
| extern void JniMethodEndSynchronized(uint32_t saved_local_ref_cookie, |
| jobject locked, |
| Thread* self) { |
| GoToRunnable(self); |
| UnlockJniSynchronizedMethod(locked, self); // Must decode before pop. |
| PopLocalReferences(saved_local_ref_cookie, self); |
| } |
| |
| // TODO: JniMethodFastEndWithReference |
| // (Probably don't need to have a synchronized variant since |
| // it already has to do atomic operations) |
| |
| // Common result handling for EndWithReference. |
| static mirror::Object* JniMethodEndWithReferenceHandleResult(jobject result, |
| uint32_t saved_local_ref_cookie, |
| Thread* self) |
| NO_THREAD_SAFETY_ANALYSIS { |
| // Must decode before pop. The 'result' may not be valid in case of an exception, though. |
| ObjPtr<mirror::Object> o; |
| if (!self->IsExceptionPending()) { |
| o = self->DecodeJObject(result); |
| } |
| PopLocalReferences(saved_local_ref_cookie, self); |
| // Process result. |
| if (UNLIKELY(self->GetJniEnv()->check_jni)) { |
| // CheckReferenceResult can resolve types. |
| StackHandleScope<1> hs(self); |
| HandleWrapperObjPtr<mirror::Object> h_obj(hs.NewHandleWrapper(&o)); |
| CheckReferenceResult(h_obj, self); |
| } |
| VerifyObject(o); |
| return o.Ptr(); |
| } |
| |
| extern mirror::Object* JniMethodEndWithReference(jobject result, |
| uint32_t saved_local_ref_cookie, |
| Thread* self) { |
| GoToRunnable(self); |
| return JniMethodEndWithReferenceHandleResult(result, saved_local_ref_cookie, self); |
| } |
| |
| extern mirror::Object* JniMethodEndWithReferenceSynchronized(jobject result, |
| uint32_t saved_local_ref_cookie, |
| jobject locked, |
| Thread* self) { |
| GoToRunnable(self); |
| UnlockJniSynchronizedMethod(locked, self); |
| return JniMethodEndWithReferenceHandleResult(result, saved_local_ref_cookie, self); |
| } |
| |
| extern uint64_t GenericJniMethodEnd(Thread* self, |
| uint32_t saved_local_ref_cookie, |
| jvalue result, |
| uint64_t result_f, |
| ArtMethod* called, |
| HandleScope* handle_scope) |
| // TODO: NO_THREAD_SAFETY_ANALYSIS as GoToRunnable() is NO_THREAD_SAFETY_ANALYSIS |
| NO_THREAD_SAFETY_ANALYSIS { |
| bool critical_native = called->IsAnnotatedWithCriticalNative(); |
| bool fast_native = called->IsAnnotatedWithFastNative(); |
| bool normal_native = !critical_native && !fast_native; |
| |
| // @Fast and @CriticalNative do not do a state transition. |
| if (LIKELY(normal_native)) { |
| GoToRunnable(self); |
| } |
| // We need the mutator lock (i.e., calling GoToRunnable()) before accessing the shorty or the |
| // locked object. |
| jobject locked = called->IsSynchronized() ? handle_scope->GetHandle(0).ToJObject() : nullptr; |
| char return_shorty_char = called->GetShorty()[0]; |
| if (return_shorty_char == 'L') { |
| if (locked != nullptr) { |
| DCHECK(normal_native) << " @FastNative and synchronize is not supported"; |
| UnlockJniSynchronizedMethod(locked, self); |
| } |
| return reinterpret_cast<uint64_t>(JniMethodEndWithReferenceHandleResult( |
| result.l, saved_local_ref_cookie, self)); |
| } else { |
| if (locked != nullptr) { |
| DCHECK(normal_native) << " @FastNative and synchronize is not supported"; |
| UnlockJniSynchronizedMethod(locked, self); // Must decode before pop. |
| } |
| if (LIKELY(!critical_native)) { |
| PopLocalReferences(saved_local_ref_cookie, self); |
| } |
| switch (return_shorty_char) { |
| case 'F': { |
| if (kRuntimeISA == kX86) { |
| // Convert back the result to float. |
| double d = bit_cast<double, uint64_t>(result_f); |
| return bit_cast<uint32_t, float>(static_cast<float>(d)); |
| } else { |
| return result_f; |
| } |
| } |
| case 'D': |
| return result_f; |
| case 'Z': |
| return result.z; |
| case 'B': |
| return result.b; |
| case 'C': |
| return result.c; |
| case 'S': |
| return result.s; |
| case 'I': |
| return result.i; |
| case 'J': |
| return result.j; |
| case 'V': |
| return 0; |
| default: |
| LOG(FATAL) << "Unexpected return shorty character " << return_shorty_char; |
| return 0; |
| } |
| } |
| } |
| |
| } // namespace art |