blob: b90aa0ec0e33d953ef9fd29096c74985688cb38c [file] [log] [blame]
Ian Rogers00f7d0e2012-07-19 15:28:27 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_RUNTIME_SCOPED_THREAD_STATE_CHANGE_H_
18#define ART_RUNTIME_SCOPED_THREAD_STATE_CHANGE_H_
Ian Rogers00f7d0e2012-07-19 15:28:27 -070019
Elliott Hughes1aa246d2012-12-13 09:29:36 -080020#include "base/casts.h"
Ian Rogers68d8b422014-07-17 11:09:10 -070021#include "java_vm_ext.h"
22#include "jni_env_ext-inl.h"
Mathieu Chartierc7853442015-03-27 14:35:38 -070023#include "art_field.h"
Hiroshi Yamauchi94f7b492014-07-22 18:08:23 -070024#include "read_barrier.h"
Ian Rogers693ff612013-02-01 10:56:12 -080025#include "thread-inl.h"
Mathieu Chartier4e305412014-02-19 10:54:44 -080026#include "verify_object.h"
Ian Rogers00f7d0e2012-07-19 15:28:27 -070027
28namespace art {
29
30// Scoped change into and out of a particular state. Handles Runnable transitions that require
31// more complicated suspension checking. The subclasses ScopedObjectAccessUnchecked and
Mathieu Chartiereb8167a2014-05-07 15:43:14 -070032// ScopedObjectAccess are used to handle the change into Runnable to Get direct access to objects,
Ian Rogers00f7d0e2012-07-19 15:28:27 -070033// the unchecked variant doesn't aid annotalysis.
34class ScopedThreadStateChange {
35 public:
36 ScopedThreadStateChange(Thread* self, ThreadState new_thread_state)
Mathieu Chartier90443472015-07-16 20:32:27 -070037 REQUIRES(!Locks::thread_suspend_count_lock_) ALWAYS_INLINE
Ian Rogers00f7d0e2012-07-19 15:28:27 -070038 : self_(self), thread_state_(new_thread_state), expected_has_no_thread_(false) {
Mathieu Chartier2cebb242015-04-21 16:50:40 -070039 if (UNLIKELY(self_ == nullptr)) {
40 // Value chosen arbitrarily and won't be used in the destructor since thread_ == null.
Ian Rogers00f7d0e2012-07-19 15:28:27 -070041 old_thread_state_ = kTerminated;
Ian Rogers120f1c72012-09-28 17:17:10 -070042 Runtime* runtime = Runtime::Current();
Mathieu Chartier2cebb242015-04-21 16:50:40 -070043 CHECK(runtime == nullptr || !runtime->IsStarted() || runtime->IsShuttingDown(self_));
Ian Rogers00f7d0e2012-07-19 15:28:27 -070044 } else {
Ian Rogers22f454c2012-09-08 11:06:29 -070045 DCHECK_EQ(self, Thread::Current());
46 // Read state without locks, ok as state is effectively thread local and we're not interested
47 // in the suspend count (this will be handled in the runnable transitions).
Ian Rogers474b6da2012-09-25 00:20:38 -070048 old_thread_state_ = self->GetState();
Mathieu Chartier92b78892014-04-24 16:14:43 -070049 if (old_thread_state_ != new_thread_state) {
Ian Rogers00f7d0e2012-07-19 15:28:27 -070050 if (new_thread_state == kRunnable) {
51 self_->TransitionFromSuspendedToRunnable();
Mathieu Chartier92b78892014-04-24 16:14:43 -070052 } else if (old_thread_state_ == kRunnable) {
Ian Rogers00f7d0e2012-07-19 15:28:27 -070053 self_->TransitionFromRunnableToSuspended(new_thread_state);
Mathieu Chartier92b78892014-04-24 16:14:43 -070054 } else {
55 // A suspended transition to another effectively suspended transition, ok to use Unsafe.
56 self_->SetState(new_thread_state);
Ian Rogers00f7d0e2012-07-19 15:28:27 -070057 }
58 }
59 }
60 }
61
Mathieu Chartier90443472015-07-16 20:32:27 -070062 ~ScopedThreadStateChange() REQUIRES(!Locks::thread_suspend_count_lock_) ALWAYS_INLINE {
Mathieu Chartier2cebb242015-04-21 16:50:40 -070063 if (UNLIKELY(self_ == nullptr)) {
Ian Rogers00f7d0e2012-07-19 15:28:27 -070064 if (!expected_has_no_thread_) {
Ian Rogers120f1c72012-09-28 17:17:10 -070065 Runtime* runtime = Runtime::Current();
Mathieu Chartier2cebb242015-04-21 16:50:40 -070066 bool shutting_down = (runtime == nullptr) || runtime->IsShuttingDown(nullptr);
Ian Rogers120f1c72012-09-28 17:17:10 -070067 CHECK(shutting_down);
Ian Rogers00f7d0e2012-07-19 15:28:27 -070068 }
69 } else {
70 if (old_thread_state_ != thread_state_) {
71 if (old_thread_state_ == kRunnable) {
72 self_->TransitionFromSuspendedToRunnable();
73 } else if (thread_state_ == kRunnable) {
74 self_->TransitionFromRunnableToSuspended(old_thread_state_);
75 } else {
Ian Rogers22f454c2012-09-08 11:06:29 -070076 // A suspended transition to another effectively suspended transition, ok to use Unsafe.
Ian Rogers474b6da2012-09-25 00:20:38 -070077 self_->SetState(old_thread_state_);
Ian Rogers00f7d0e2012-07-19 15:28:27 -070078 }
79 }
80 }
81 }
82
83 Thread* Self() const {
84 return self_;
85 }
86
87 protected:
88 // Constructor used by ScopedJniThreadState for an unattached thread that has access to the VM*.
89 ScopedThreadStateChange()
Mathieu Chartier2cebb242015-04-21 16:50:40 -070090 : self_(nullptr), thread_state_(kTerminated), old_thread_state_(kTerminated),
Ian Rogers00f7d0e2012-07-19 15:28:27 -070091 expected_has_no_thread_(true) {}
92
93 Thread* const self_;
94 const ThreadState thread_state_;
95
96 private:
97 ThreadState old_thread_state_;
98 const bool expected_has_no_thread_;
99
Mathieu Chartier2b7c4d12014-05-19 10:52:16 -0700100 friend class ScopedObjectAccessUnchecked;
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700101 DISALLOW_COPY_AND_ASSIGN(ScopedThreadStateChange);
102};
103
Mathieu Chartier2b7c4d12014-05-19 10:52:16 -0700104// Assumes we are already runnable.
105class ScopedObjectAccessAlreadyRunnable {
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700106 public:
Mathieu Chartier2b7c4d12014-05-19 10:52:16 -0700107 Thread* Self() const {
108 return self_;
Ian Rogersc0fa3ad2013-02-05 00:11:55 -0800109 }
110
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700111 JNIEnvExt* Env() const {
112 return env_;
113 }
114
115 JavaVMExt* Vm() const {
116 return vm_;
117 }
118
Ian Rogers68d8b422014-07-17 11:09:10 -0700119 bool ForceCopy() const {
120 return vm_->ForceCopy();
121 }
122
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700123 /*
124 * Add a local reference for an object to the indirect reference table associated with the
125 * current stack frame. When the native function returns, the reference will be discarded.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700126 *
Mathieu Chartier2cebb242015-04-21 16:50:40 -0700127 * We need to allow the same reference to be added multiple times, and cope with nullptr.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700128 *
Elliott Hughes9dcd45c2013-07-29 14:40:52 -0700129 * This will be called on otherwise unreferenced objects. We cannot do GC allocations here, and
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700130 * it's best if we don't grab a mutex.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700131 */
132 template<typename T>
Mathieu Chartier90443472015-07-16 20:32:27 -0700133 T AddLocalReference(mirror::Object* obj) const SHARED_REQUIRES(Locks::mutator_lock_) {
Ian Rogers1eb512d2013-10-18 15:42:20 -0700134 Locks::mutator_lock_->AssertSharedHeld(Self());
Mathieu Chartier2b7c4d12014-05-19 10:52:16 -0700135 DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states.
Mathieu Chartier50030ef2015-05-08 14:19:26 -0700136 DCHECK_NE(obj, Runtime::Current()->GetClearedJniWeakGlobal());
Mathieu Chartiere1a01532015-05-04 11:46:25 -0700137 return obj == nullptr ? nullptr : Env()->AddLocalReference<T>(obj);
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700138 }
139
140 template<typename T>
141 T Decode(jobject obj) const
Mathieu Chartier90443472015-07-16 20:32:27 -0700142 SHARED_REQUIRES(Locks::mutator_lock_) {
Ian Rogers81d425b2012-09-27 16:03:43 -0700143 Locks::mutator_lock_->AssertSharedHeld(Self());
Mathieu Chartier2b7c4d12014-05-19 10:52:16 -0700144 DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700145 return down_cast<T>(Self()->DecodeJObject(obj));
146 }
147
Mathieu Chartierc7853442015-03-27 14:35:38 -0700148 ArtField* DecodeField(jfieldID fid) const
Mathieu Chartier90443472015-07-16 20:32:27 -0700149 SHARED_REQUIRES(Locks::mutator_lock_) {
Ian Rogers81d425b2012-09-27 16:03:43 -0700150 Locks::mutator_lock_->AssertSharedHeld(Self());
Mathieu Chartier2b7c4d12014-05-19 10:52:16 -0700151 DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states.
Mathieu Chartierc7853442015-03-27 14:35:38 -0700152 return reinterpret_cast<ArtField*>(fid);
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700153 }
154
Mathieu Chartier90443472015-07-16 20:32:27 -0700155 jfieldID EncodeField(ArtField* field) const SHARED_REQUIRES(Locks::mutator_lock_) {
Ian Rogers81d425b2012-09-27 16:03:43 -0700156 Locks::mutator_lock_->AssertSharedHeld(Self());
Mathieu Chartier2b7c4d12014-05-19 10:52:16 -0700157 DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700158 return reinterpret_cast<jfieldID>(field);
159 }
160
Mathieu Chartier90443472015-07-16 20:32:27 -0700161 ArtMethod* DecodeMethod(jmethodID mid) const SHARED_REQUIRES(Locks::mutator_lock_) {
Ian Rogers81d425b2012-09-27 16:03:43 -0700162 Locks::mutator_lock_->AssertSharedHeld(Self());
Mathieu Chartier2b7c4d12014-05-19 10:52:16 -0700163 DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states.
Mathieu Chartiere401d142015-04-22 13:56:20 -0700164 return reinterpret_cast<ArtMethod*>(mid);
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700165 }
166
Mathieu Chartier90443472015-07-16 20:32:27 -0700167 jmethodID EncodeMethod(ArtMethod* method) const SHARED_REQUIRES(Locks::mutator_lock_) {
Ian Rogers81d425b2012-09-27 16:03:43 -0700168 Locks::mutator_lock_->AssertSharedHeld(Self());
Mathieu Chartier2b7c4d12014-05-19 10:52:16 -0700169 DCHECK(IsRunnable()); // Don't work with raw objects in non-runnable states.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700170 return reinterpret_cast<jmethodID>(method);
171 }
172
Mathieu Chartier2b7c4d12014-05-19 10:52:16 -0700173 bool IsRunnable() const {
174 return self_->GetState() == kRunnable;
175 }
176
177 protected:
178 explicit ScopedObjectAccessAlreadyRunnable(JNIEnv* env)
Mathieu Chartier90443472015-07-16 20:32:27 -0700179 REQUIRES(!Locks::thread_suspend_count_lock_) ALWAYS_INLINE
Mathieu Chartier2b7c4d12014-05-19 10:52:16 -0700180 : self_(ThreadForEnv(env)), env_(down_cast<JNIEnvExt*>(env)), vm_(env_->vm) {
181 }
182
183 explicit ScopedObjectAccessAlreadyRunnable(Thread* self)
Mathieu Chartier90443472015-07-16 20:32:27 -0700184 REQUIRES(!Locks::thread_suspend_count_lock_) ALWAYS_INLINE
Mathieu Chartier2b7c4d12014-05-19 10:52:16 -0700185 : self_(self), env_(down_cast<JNIEnvExt*>(self->GetJniEnv())),
186 vm_(env_ != nullptr ? env_->vm : nullptr) {
187 }
188
189 // Used when we want a scoped JNI thread state but have no thread/JNIEnv. Consequently doesn't
190 // change into Runnable or acquire a share on the mutator_lock_.
191 explicit ScopedObjectAccessAlreadyRunnable(JavaVM* vm)
192 : self_(nullptr), env_(nullptr), vm_(down_cast<JavaVMExt*>(vm)) {}
193
194 // Here purely to force inlining.
195 ~ScopedObjectAccessAlreadyRunnable() ALWAYS_INLINE {
196 }
197
198 // Self thread, can be null.
199 Thread* const self_;
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700200 // The full JNIEnv.
201 JNIEnvExt* const env_;
202 // The full JavaVM.
203 JavaVMExt* const vm_;
Mathieu Chartier2b7c4d12014-05-19 10:52:16 -0700204};
205
206// Entry/exit processing for transitions from Native to Runnable (ie within JNI functions).
207//
208// This class performs the necessary thread state switching to and from Runnable and lets us
209// amortize the cost of working out the current thread. Additionally it lets us check (and repair)
210// apps that are using a JNIEnv on the wrong thread. The class also decodes and encodes Objects
211// into jobjects via methods of this class. Performing this here enforces the Runnable thread state
212// for use of Object, thereby inhibiting the Object being modified by GC whilst native or VM code
213// is also manipulating the Object.
214//
215// The destructor transitions back to the previous thread state, typically Native. In this state
216// GC and thread suspension may occur.
217//
218// For annotalysis the subclass ScopedObjectAccess (below) makes it explicit that a shared of
219// the mutator_lock_ will be acquired on construction.
220class ScopedObjectAccessUnchecked : public ScopedObjectAccessAlreadyRunnable {
221 public:
222 explicit ScopedObjectAccessUnchecked(JNIEnv* env)
Mathieu Chartier90443472015-07-16 20:32:27 -0700223 REQUIRES(!Locks::thread_suspend_count_lock_) ALWAYS_INLINE
Mathieu Chartier2b7c4d12014-05-19 10:52:16 -0700224 : ScopedObjectAccessAlreadyRunnable(env), tsc_(Self(), kRunnable) {
225 Self()->VerifyStack();
226 Locks::mutator_lock_->AssertSharedHeld(Self());
227 }
228
229 explicit ScopedObjectAccessUnchecked(Thread* self)
Mathieu Chartier90443472015-07-16 20:32:27 -0700230 REQUIRES(!Locks::thread_suspend_count_lock_) ALWAYS_INLINE
Mathieu Chartier2b7c4d12014-05-19 10:52:16 -0700231 : ScopedObjectAccessAlreadyRunnable(self), tsc_(self, kRunnable) {
232 Self()->VerifyStack();
233 Locks::mutator_lock_->AssertSharedHeld(Self());
234 }
235
236 // Used when we want a scoped JNI thread state but have no thread/JNIEnv. Consequently doesn't
237 // change into Runnable or acquire a share on the mutator_lock_.
238 explicit ScopedObjectAccessUnchecked(JavaVM* vm) ALWAYS_INLINE
239 : ScopedObjectAccessAlreadyRunnable(vm), tsc_() {}
240
241 private:
242 // The scoped thread state change makes sure that we are runnable and restores the thread state
243 // in the destructor.
244 const ScopedThreadStateChange tsc_;
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700245
246 DISALLOW_COPY_AND_ASSIGN(ScopedObjectAccessUnchecked);
247};
248
249// Annotalysis helping variant of the above.
250class ScopedObjectAccess : public ScopedObjectAccessUnchecked {
251 public:
252 explicit ScopedObjectAccess(JNIEnv* env)
Mathieu Chartier90443472015-07-16 20:32:27 -0700253 REQUIRES(!Locks::thread_suspend_count_lock_)
Ian Rogers1ffa32f2013-02-05 18:29:08 -0800254 SHARED_LOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700255 : ScopedObjectAccessUnchecked(env) {
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700256 }
257
258 explicit ScopedObjectAccess(Thread* self)
Mathieu Chartier90443472015-07-16 20:32:27 -0700259 REQUIRES(!Locks::thread_suspend_count_lock_)
Mathieu Chartier2b7c4d12014-05-19 10:52:16 -0700260 SHARED_LOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700261 : ScopedObjectAccessUnchecked(self) {
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700262 }
263
Ian Rogers1ffa32f2013-02-05 18:29:08 -0800264 ~ScopedObjectAccess() UNLOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE {
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700265 // Base class will release share of lock. Invoked after this destructor.
266 }
267
268 private:
269 // TODO: remove this constructor. It is used by check JNI's ScopedCheck to make it believe that
270 // routines operating with just a VM are sound, they are not, but when you have just a VM
271 // you cannot call the unsound routines.
272 explicit ScopedObjectAccess(JavaVM* vm)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700273 SHARED_LOCK_FUNCTION(Locks::mutator_lock_)
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700274 : ScopedObjectAccessUnchecked(vm) {}
275
276 friend class ScopedCheck;
277 DISALLOW_COPY_AND_ASSIGN(ScopedObjectAccess);
278};
279
280} // namespace art
281
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700282#endif // ART_RUNTIME_SCOPED_THREAD_STATE_CHANGE_H_