Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2012 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #ifndef ART_RUNTIME_SCOPED_THREAD_STATE_CHANGE_INL_H_ |
| 18 | #define ART_RUNTIME_SCOPED_THREAD_STATE_CHANGE_INL_H_ |
| 19 | |
| 20 | #include "scoped_thread_state_change.h" |
| 21 | |
Andreas Gampe | 5794381 | 2017-12-06 21:39:13 -0800 | [diff] [blame] | 22 | #include <android-base/logging.h> |
| 23 | |
Andreas Gampe | c15a2f4 | 2017-04-21 12:09:39 -0700 | [diff] [blame] | 24 | #include "base/casts.h" |
Andreas Gampe | 7fbc4a5 | 2018-11-28 08:26:47 -0800 | [diff] [blame] | 25 | #include "base/mutex.h" |
Vladimir Marko | a3ad0cd | 2018-05-04 10:06:38 +0100 | [diff] [blame] | 26 | #include "jni/jni_env_ext-inl.h" |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 27 | #include "obj_ptr-inl.h" |
Andreas Gampe | b486a98 | 2017-06-01 13:45:54 -0700 | [diff] [blame] | 28 | #include "runtime.h" |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 29 | #include "thread-inl.h" |
| 30 | |
| 31 | namespace art { |
| 32 | |
| 33 | inline ScopedThreadStateChange::ScopedThreadStateChange(Thread* self, ThreadState new_thread_state) |
| 34 | : self_(self), thread_state_(new_thread_state), expected_has_no_thread_(false) { |
| 35 | if (UNLIKELY(self_ == nullptr)) { |
| 36 | // Value chosen arbitrarily and won't be used in the destructor since thread_ == null. |
| 37 | old_thread_state_ = kTerminated; |
| 38 | Runtime* runtime = Runtime::Current(); |
| 39 | CHECK(runtime == nullptr || !runtime->IsStarted() || runtime->IsShuttingDown(self_)); |
| 40 | } else { |
| 41 | DCHECK_EQ(self, Thread::Current()); |
| 42 | // Read state without locks, ok as state is effectively thread local and we're not interested |
| 43 | // in the suspend count (this will be handled in the runnable transitions). |
| 44 | old_thread_state_ = self->GetState(); |
| 45 | if (old_thread_state_ != new_thread_state) { |
| 46 | if (new_thread_state == kRunnable) { |
| 47 | self_->TransitionFromSuspendedToRunnable(); |
| 48 | } else if (old_thread_state_ == kRunnable) { |
| 49 | self_->TransitionFromRunnableToSuspended(new_thread_state); |
| 50 | } else { |
| 51 | // A suspended transition to another effectively suspended transition, ok to use Unsafe. |
| 52 | self_->SetState(new_thread_state); |
| 53 | } |
| 54 | } |
| 55 | } |
| 56 | } |
| 57 | |
| 58 | inline ScopedThreadStateChange::~ScopedThreadStateChange() { |
| 59 | if (UNLIKELY(self_ == nullptr)) { |
Andreas Gampe | 1b0fcf4 | 2019-07-23 16:39:21 -0700 | [diff] [blame] | 60 | ScopedThreadChangeDestructorCheck(); |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 61 | } else { |
| 62 | if (old_thread_state_ != thread_state_) { |
| 63 | if (old_thread_state_ == kRunnable) { |
| 64 | self_->TransitionFromSuspendedToRunnable(); |
| 65 | } else if (thread_state_ == kRunnable) { |
| 66 | self_->TransitionFromRunnableToSuspended(old_thread_state_); |
| 67 | } else { |
| 68 | // A suspended transition to another effectively suspended transition, ok to use Unsafe. |
| 69 | self_->SetState(old_thread_state_); |
| 70 | } |
| 71 | } |
| 72 | } |
| 73 | } |
| 74 | |
| 75 | template<typename T> |
Mathieu Chartier | 8778c52 | 2016-10-04 19:06:30 -0700 | [diff] [blame] | 76 | inline T ScopedObjectAccessAlreadyRunnable::AddLocalReference(ObjPtr<mirror::Object> obj) const { |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 77 | Locks::mutator_lock_->AssertSharedHeld(Self()); |
Andreas Gampe | c15a2f4 | 2017-04-21 12:09:39 -0700 | [diff] [blame] | 78 | if (kIsDebugBuild) { |
| 79 | CHECK(IsRunnable()); // Don't work with raw objects in non-runnable states. |
| 80 | DCheckObjIsNotClearedJniWeakGlobal(obj); |
| 81 | } |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 82 | return obj == nullptr ? nullptr : Env()->AddLocalReference<T>(obj); |
| 83 | } |
| 84 | |
Andreas Gampe | c73cb64 | 2017-02-22 10:11:30 -0800 | [diff] [blame] | 85 | template<typename T> |
| 86 | inline ObjPtr<T> ScopedObjectAccessAlreadyRunnable::Decode(jobject obj) const { |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 87 | Locks::mutator_lock_->AssertSharedHeld(Self()); |
| 88 | DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states. |
Andreas Gampe | c73cb64 | 2017-02-22 10:11:30 -0800 | [diff] [blame] | 89 | return ObjPtr<T>::DownCast(Self()->DecodeJObject(obj)); |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 90 | } |
| 91 | |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 92 | inline bool ScopedObjectAccessAlreadyRunnable::IsRunnable() const { |
| 93 | return self_->GetState() == kRunnable; |
| 94 | } |
| 95 | |
| 96 | inline ScopedObjectAccessAlreadyRunnable::ScopedObjectAccessAlreadyRunnable(JNIEnv* env) |
Ian Rogers | 55256cb | 2017-12-21 17:07:11 -0800 | [diff] [blame] | 97 | : self_(ThreadForEnv(env)), env_(down_cast<JNIEnvExt*>(env)), vm_(env_->GetVm()) {} |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 98 | |
| 99 | inline ScopedObjectAccessAlreadyRunnable::ScopedObjectAccessAlreadyRunnable(Thread* self) |
| 100 | : self_(self), |
| 101 | env_(down_cast<JNIEnvExt*>(self->GetJniEnv())), |
Ian Rogers | 55256cb | 2017-12-21 17:07:11 -0800 | [diff] [blame] | 102 | vm_(env_ != nullptr ? env_->GetVm() : nullptr) {} |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 103 | |
| 104 | inline ScopedObjectAccessUnchecked::ScopedObjectAccessUnchecked(JNIEnv* env) |
| 105 | : ScopedObjectAccessAlreadyRunnable(env), tsc_(Self(), kRunnable) { |
| 106 | Self()->VerifyStack(); |
| 107 | Locks::mutator_lock_->AssertSharedHeld(Self()); |
| 108 | } |
| 109 | |
| 110 | inline ScopedObjectAccessUnchecked::ScopedObjectAccessUnchecked(Thread* self) |
| 111 | : ScopedObjectAccessAlreadyRunnable(self), tsc_(self, kRunnable) { |
| 112 | Self()->VerifyStack(); |
| 113 | Locks::mutator_lock_->AssertSharedHeld(Self()); |
| 114 | } |
| 115 | |
Andreas Gampe | 90b936d | 2017-01-31 08:58:55 -0800 | [diff] [blame] | 116 | inline ScopedObjectAccess::ScopedObjectAccess(JNIEnv* env) : ScopedObjectAccessUnchecked(env) {} |
| 117 | inline ScopedObjectAccess::ScopedObjectAccess(Thread* self) : ScopedObjectAccessUnchecked(self) {} |
| 118 | inline ScopedObjectAccess::~ScopedObjectAccess() {} |
| 119 | |
Mathieu Chartier | 0795f23 | 2016-09-27 18:43:30 -0700 | [diff] [blame] | 120 | inline ScopedThreadSuspension::ScopedThreadSuspension(Thread* self, ThreadState suspended_state) |
| 121 | : self_(self), suspended_state_(suspended_state) { |
| 122 | DCHECK(self_ != nullptr); |
| 123 | self_->TransitionFromRunnableToSuspended(suspended_state); |
| 124 | } |
| 125 | |
| 126 | inline ScopedThreadSuspension::~ScopedThreadSuspension() { |
| 127 | DCHECK_EQ(self_->GetState(), suspended_state_); |
| 128 | self_->TransitionFromSuspendedToRunnable(); |
| 129 | } |
| 130 | |
| 131 | } // namespace art |
| 132 | |
| 133 | #endif // ART_RUNTIME_SCOPED_THREAD_STATE_CHANGE_INL_H_ |