Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2012 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #ifndef ART_RUNTIME_SCOPED_THREAD_STATE_CHANGE_INL_H_ |
| 18 | #define ART_RUNTIME_SCOPED_THREAD_STATE_CHANGE_INL_H_ |
| 19 | |
| 20 | #include "scoped_thread_state_change.h" |
| 21 | |
Andreas Gampe | 5794381 | 2017-12-06 21:39:13 -0800 | [diff] [blame] | 22 | #include <android-base/logging.h> |
| 23 | |
Andreas Gampe | c15a2f4 | 2017-04-21 12:09:39 -0700 | [diff] [blame] | 24 | #include "base/casts.h" |
Andreas Gampe | 7fbc4a5 | 2018-11-28 08:26:47 -0800 | [diff] [blame] | 25 | #include "base/mutex.h" |
Vladimir Marko | a3ad0cd | 2018-05-04 10:06:38 +0100 | [diff] [blame] | 26 | #include "jni/jni_env_ext-inl.h" |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 27 | #include "obj_ptr-inl.h" |
Andreas Gampe | b486a98 | 2017-06-01 13:45:54 -0700 | [diff] [blame] | 28 | #include "runtime.h" |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 29 | #include "thread-inl.h" |
| 30 | |
| 31 | namespace art { |
| 32 | |
| 33 | inline ScopedThreadStateChange::ScopedThreadStateChange(Thread* self, ThreadState new_thread_state) |
| 34 | : self_(self), thread_state_(new_thread_state), expected_has_no_thread_(false) { |
| 35 | if (UNLIKELY(self_ == nullptr)) { |
| 36 | // Value chosen arbitrarily and won't be used in the destructor since thread_ == null. |
| 37 | old_thread_state_ = kTerminated; |
| 38 | Runtime* runtime = Runtime::Current(); |
| 39 | CHECK(runtime == nullptr || !runtime->IsStarted() || runtime->IsShuttingDown(self_)); |
| 40 | } else { |
| 41 | DCHECK_EQ(self, Thread::Current()); |
| 42 | // Read state without locks, ok as state is effectively thread local and we're not interested |
| 43 | // in the suspend count (this will be handled in the runnable transitions). |
| 44 | old_thread_state_ = self->GetState(); |
| 45 | if (old_thread_state_ != new_thread_state) { |
| 46 | if (new_thread_state == kRunnable) { |
| 47 | self_->TransitionFromSuspendedToRunnable(); |
| 48 | } else if (old_thread_state_ == kRunnable) { |
| 49 | self_->TransitionFromRunnableToSuspended(new_thread_state); |
| 50 | } else { |
| 51 | // A suspended transition to another effectively suspended transition, ok to use Unsafe. |
| 52 | self_->SetState(new_thread_state); |
| 53 | } |
| 54 | } |
| 55 | } |
| 56 | } |
| 57 | |
| 58 | inline ScopedThreadStateChange::~ScopedThreadStateChange() { |
| 59 | if (UNLIKELY(self_ == nullptr)) { |
| 60 | if (!expected_has_no_thread_) { |
| 61 | Runtime* runtime = Runtime::Current(); |
| 62 | bool shutting_down = (runtime == nullptr) || runtime->IsShuttingDown(nullptr); |
| 63 | CHECK(shutting_down); |
| 64 | } |
| 65 | } else { |
| 66 | if (old_thread_state_ != thread_state_) { |
| 67 | if (old_thread_state_ == kRunnable) { |
| 68 | self_->TransitionFromSuspendedToRunnable(); |
| 69 | } else if (thread_state_ == kRunnable) { |
| 70 | self_->TransitionFromRunnableToSuspended(old_thread_state_); |
| 71 | } else { |
| 72 | // A suspended transition to another effectively suspended transition, ok to use Unsafe. |
| 73 | self_->SetState(old_thread_state_); |
| 74 | } |
| 75 | } |
| 76 | } |
| 77 | } |
| 78 | |
| 79 | template<typename T> |
Mathieu Chartier | 8778c52 | 2016-10-04 19:06:30 -0700 | [diff] [blame] | 80 | inline T ScopedObjectAccessAlreadyRunnable::AddLocalReference(ObjPtr<mirror::Object> obj) const { |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 81 | Locks::mutator_lock_->AssertSharedHeld(Self()); |
Andreas Gampe | c15a2f4 | 2017-04-21 12:09:39 -0700 | [diff] [blame] | 82 | if (kIsDebugBuild) { |
| 83 | CHECK(IsRunnable()); // Don't work with raw objects in non-runnable states. |
| 84 | DCheckObjIsNotClearedJniWeakGlobal(obj); |
| 85 | } |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 86 | return obj == nullptr ? nullptr : Env()->AddLocalReference<T>(obj); |
| 87 | } |
| 88 | |
Andreas Gampe | c73cb64 | 2017-02-22 10:11:30 -0800 | [diff] [blame] | 89 | template<typename T> |
| 90 | inline ObjPtr<T> ScopedObjectAccessAlreadyRunnable::Decode(jobject obj) const { |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 91 | Locks::mutator_lock_->AssertSharedHeld(Self()); |
| 92 | DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states. |
Andreas Gampe | c73cb64 | 2017-02-22 10:11:30 -0800 | [diff] [blame] | 93 | return ObjPtr<T>::DownCast(Self()->DecodeJObject(obj)); |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 94 | } |
| 95 | |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 96 | inline bool ScopedObjectAccessAlreadyRunnable::IsRunnable() const { |
| 97 | return self_->GetState() == kRunnable; |
| 98 | } |
| 99 | |
| 100 | inline ScopedObjectAccessAlreadyRunnable::ScopedObjectAccessAlreadyRunnable(JNIEnv* env) |
Ian Rogers | 55256cb | 2017-12-21 17:07:11 -0800 | [diff] [blame] | 101 | : self_(ThreadForEnv(env)), env_(down_cast<JNIEnvExt*>(env)), vm_(env_->GetVm()) {} |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 102 | |
| 103 | inline ScopedObjectAccessAlreadyRunnable::ScopedObjectAccessAlreadyRunnable(Thread* self) |
| 104 | : self_(self), |
| 105 | env_(down_cast<JNIEnvExt*>(self->GetJniEnv())), |
Ian Rogers | 55256cb | 2017-12-21 17:07:11 -0800 | [diff] [blame] | 106 | vm_(env_ != nullptr ? env_->GetVm() : nullptr) {} |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 107 | |
| 108 | inline ScopedObjectAccessUnchecked::ScopedObjectAccessUnchecked(JNIEnv* env) |
| 109 | : ScopedObjectAccessAlreadyRunnable(env), tsc_(Self(), kRunnable) { |
| 110 | Self()->VerifyStack(); |
| 111 | Locks::mutator_lock_->AssertSharedHeld(Self()); |
| 112 | } |
| 113 | |
| 114 | inline ScopedObjectAccessUnchecked::ScopedObjectAccessUnchecked(Thread* self) |
| 115 | : ScopedObjectAccessAlreadyRunnable(self), tsc_(self, kRunnable) { |
| 116 | Self()->VerifyStack(); |
| 117 | Locks::mutator_lock_->AssertSharedHeld(Self()); |
| 118 | } |
| 119 | |
Andreas Gampe | 90b936d | 2017-01-31 08:58:55 -0800 | [diff] [blame] | 120 | inline ScopedObjectAccess::ScopedObjectAccess(JNIEnv* env) : ScopedObjectAccessUnchecked(env) {} |
| 121 | inline ScopedObjectAccess::ScopedObjectAccess(Thread* self) : ScopedObjectAccessUnchecked(self) {} |
| 122 | inline ScopedObjectAccess::~ScopedObjectAccess() {} |
| 123 | |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 124 | inline ScopedThreadSuspension::ScopedThreadSuspension(Thread* self, ThreadState suspended_state) |
| 125 | : self_(self), suspended_state_(suspended_state) { |
| 126 | DCHECK(self_ != nullptr); |
| 127 | self_->TransitionFromRunnableToSuspended(suspended_state); |
| 128 | } |
| 129 | |
| 130 | inline ScopedThreadSuspension::~ScopedThreadSuspension() { |
| 131 | DCHECK_EQ(self_->GetState(), suspended_state_); |
| 132 | self_->TransitionFromSuspendedToRunnable(); |
| 133 | } |
| 134 | |
| 135 | } // namespace art |
| 136 | |
| 137 | #endif // ART_RUNTIME_SCOPED_THREAD_STATE_CHANGE_INL_H_ |