blob: b36922e10d03078a15c0e2576262be109c0122c8 [file] [log] [blame]
Ian Rogers00f7d0e2012-07-19 15:28:27 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_SRC_SCOPED_THREAD_STATE_CHANGE_H_
18#define ART_SRC_SCOPED_THREAD_STATE_CHANGE_H_
19
20#include "casts.h"
Ian Rogers81d425b2012-09-27 16:03:43 -070021#include "jni_internal.h"
Ian Rogers00f7d0e2012-07-19 15:28:27 -070022#include "thread.h"
23
24namespace art {
25
26// Scoped change into and out of a particular state. Handles Runnable transitions that require
27// more complicated suspension checking. The subclasses ScopedObjectAccessUnchecked and
28// ScopedObjectAccess are used to handle the change into Runnable to get direct access to objects,
29// the unchecked variant doesn't aid annotalysis.
30class ScopedThreadStateChange {
31 public:
32 ScopedThreadStateChange(Thread* self, ThreadState new_thread_state)
Ian Rogersb726dcb2012-09-05 08:57:23 -070033 LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
Ian Rogers00f7d0e2012-07-19 15:28:27 -070034 : self_(self), thread_state_(new_thread_state), expected_has_no_thread_(false) {
35 if (self_ == NULL) {
36 // Value chosen arbitrarily and won't be used in the destructor since thread_ == NULL.
37 old_thread_state_ = kTerminated;
38 CHECK(!Runtime::Current()->IsStarted() || Runtime::Current()->IsShuttingDown());
39 } else {
40 bool runnable_transition;
Ian Rogers22f454c2012-09-08 11:06:29 -070041 DCHECK_EQ(self, Thread::Current());
42 // Read state without locks, ok as state is effectively thread local and we're not interested
43 // in the suspend count (this will be handled in the runnable transitions).
Ian Rogers474b6da2012-09-25 00:20:38 -070044 old_thread_state_ = self->GetState();
Ian Rogers22f454c2012-09-08 11:06:29 -070045 runnable_transition = old_thread_state_ == kRunnable || new_thread_state == kRunnable;
46 if (!runnable_transition) {
47 // A suspended transition to another effectively suspended transition, ok to use Unsafe.
Ian Rogers474b6da2012-09-25 00:20:38 -070048 self_->SetState(new_thread_state);
Ian Rogers00f7d0e2012-07-19 15:28:27 -070049 }
50 if (runnable_transition && old_thread_state_ != new_thread_state) {
51 if (new_thread_state == kRunnable) {
52 self_->TransitionFromSuspendedToRunnable();
53 } else {
54 DCHECK_EQ(old_thread_state_, kRunnable);
55 self_->TransitionFromRunnableToSuspended(new_thread_state);
56 }
57 }
58 }
59 }
60
Ian Rogersb726dcb2012-09-05 08:57:23 -070061 ~ScopedThreadStateChange() LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) {
Ian Rogers00f7d0e2012-07-19 15:28:27 -070062 if (self_ == NULL) {
63 if (!expected_has_no_thread_) {
64 CHECK(Runtime::Current()->IsShuttingDown());
65 }
66 } else {
67 if (old_thread_state_ != thread_state_) {
68 if (old_thread_state_ == kRunnable) {
69 self_->TransitionFromSuspendedToRunnable();
70 } else if (thread_state_ == kRunnable) {
71 self_->TransitionFromRunnableToSuspended(old_thread_state_);
72 } else {
Ian Rogers22f454c2012-09-08 11:06:29 -070073 // A suspended transition to another effectively suspended transition, ok to use Unsafe.
Ian Rogers474b6da2012-09-25 00:20:38 -070074 self_->SetState(old_thread_state_);
Ian Rogers00f7d0e2012-07-19 15:28:27 -070075 }
76 }
77 }
78 }
79
80 Thread* Self() const {
81 return self_;
82 }
83
84 protected:
85 // Constructor used by ScopedJniThreadState for an unattached thread that has access to the VM*.
86 ScopedThreadStateChange()
87 : self_(NULL), thread_state_(kTerminated), old_thread_state_(kTerminated),
88 expected_has_no_thread_(true) {}
89
90 Thread* const self_;
91 const ThreadState thread_state_;
92
93 private:
94 ThreadState old_thread_state_;
95 const bool expected_has_no_thread_;
96
97 DISALLOW_COPY_AND_ASSIGN(ScopedThreadStateChange);
98};
99
100// Entry/exit processing for transitions from Native to Runnable (ie within JNI functions).
101//
102// This class performs the necessary thread state switching to and from Runnable and lets us
103// amortize the cost of working out the current thread. Additionally it lets us check (and repair)
104// apps that are using a JNIEnv on the wrong thread. The class also decodes and encodes Objects
105// into jobjects via methods of this class. Performing this here enforces the Runnable thread state
106// for use of Object, thereby inhibiting the Object being modified by GC whilst native or VM code
107// is also manipulating the Object.
108//
109// The destructor transitions back to the previous thread state, typically Native. In this state
110// GC and thread suspension may occur.
111//
112// For annotalysis the subclass ScopedObjectAccess (below) makes it explicit that a shared of
113// the mutator_lock_ will be acquired on construction.
114class ScopedObjectAccessUnchecked : public ScopedThreadStateChange {
115 public:
116 explicit ScopedObjectAccessUnchecked(JNIEnv* env)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700117 LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700118 : ScopedThreadStateChange(ThreadForEnv(env), kRunnable),
119 env_(reinterpret_cast<JNIEnvExt*>(env)), vm_(env_->vm) {
120 self_->VerifyStack();
121 }
122
123 explicit ScopedObjectAccessUnchecked(Thread* self)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700124 LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700125 : ScopedThreadStateChange(self, kRunnable),
126 env_(reinterpret_cast<JNIEnvExt*>(self->GetJniEnv())),
127 vm_(env_ != NULL ? env_->vm : NULL) {
128 if (Vm() != NULL && !Vm()->work_around_app_jni_bugs && self != Thread::Current()) {
129 UnexpectedThreads(self, Thread::Current());
130 }
131 self_->VerifyStack();
132 }
133
134 // Used when we want a scoped JNI thread state but have no thread/JNIEnv. Consequently doesn't
135 // change into Runnable or acquire a share on the mutator_lock_.
136 explicit ScopedObjectAccessUnchecked(JavaVM* vm)
137 : ScopedThreadStateChange(), env_(NULL), vm_(reinterpret_cast<JavaVMExt*>(vm)) {}
138
139 JNIEnvExt* Env() const {
140 return env_;
141 }
142
143 JavaVMExt* Vm() const {
144 return vm_;
145 }
146
147 /*
148 * Add a local reference for an object to the indirect reference table associated with the
149 * current stack frame. When the native function returns, the reference will be discarded.
150 * Part of the ScopedJniThreadState as native code shouldn't be working on raw Object* without
151 * having transitioned its state.
152 *
153 * We need to allow the same reference to be added multiple times.
154 *
155 * This will be called on otherwise unreferenced objects. We cannot do GC allocations here, and
156 * it's best if we don't grab a mutex.
157 *
158 * Returns the local reference (currently just the same pointer that was
159 * passed in), or NULL on failure.
160 */
161 template<typename T>
162 T AddLocalReference(Object* obj) const
Ian Rogersb726dcb2012-09-05 08:57:23 -0700163 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700164 DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states.
165 if (obj == NULL) {
166 return NULL;
167 }
168
169 DCHECK_NE((reinterpret_cast<uintptr_t>(obj) & 0xffff0000), 0xebad0000);
170
171 IndirectReferenceTable& locals = Env()->locals;
172
173 uint32_t cookie = Env()->local_ref_cookie;
174 IndirectRef ref = locals.Add(cookie, obj);
175
176#if 0 // TODO: fix this to understand PushLocalFrame, so we can turn it on.
177 if (Env()->check_jni) {
178 size_t entry_count = locals.Capacity();
179 if (entry_count > 16) {
180 LOG(WARNING) << "Warning: more than 16 JNI local references: "
181 << entry_count << " (most recent was a " << PrettyTypeOf(obj) << ")\n"
182 << Dumpable<IndirectReferenceTable>(locals);
183 // TODO: LOG(FATAL) in a later release?
184 }
185 }
186#endif
187
188 if (Vm()->work_around_app_jni_bugs) {
189 // Hand out direct pointers to support broken old apps.
190 return reinterpret_cast<T>(obj);
191 }
192
193 return reinterpret_cast<T>(ref);
194 }
195
196 template<typename T>
197 T Decode(jobject obj) const
198 LOCKS_EXCLUDED(JavaVMExt::globals_lock,
199 JavaVMExt::weak_globals_lock)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700200 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers81d425b2012-09-27 16:03:43 -0700201 Locks::mutator_lock_->AssertSharedHeld(Self());
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700202 DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states.
203 return down_cast<T>(Self()->DecodeJObject(obj));
204 }
205
206 Field* DecodeField(jfieldID fid) const
207 LOCKS_EXCLUDED(JavaVMExt::globals_lock,
208 JavaVMExt::weak_globals_lock)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700209 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers81d425b2012-09-27 16:03:43 -0700210 Locks::mutator_lock_->AssertSharedHeld(Self());
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700211 DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states.
212#ifdef MOVING_GARBAGE_COLLECTOR
213 // TODO: we should make these unique weak globals if Field instances can ever move.
214 UNIMPLEMENTED(WARNING);
215#endif
216 return reinterpret_cast<Field*>(fid);
217 }
218
219 jfieldID EncodeField(Field* field) const
220 LOCKS_EXCLUDED(JavaVMExt::globals_lock,
221 JavaVMExt::weak_globals_lock)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700222 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers81d425b2012-09-27 16:03:43 -0700223 Locks::mutator_lock_->AssertSharedHeld(Self());
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700224 DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states.
225#ifdef MOVING_GARBAGE_COLLECTOR
226 UNIMPLEMENTED(WARNING);
227#endif
228 return reinterpret_cast<jfieldID>(field);
229 }
230
Mathieu Chartier66f19252012-09-18 08:57:04 -0700231 AbstractMethod* DecodeMethod(jmethodID mid) const
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700232 LOCKS_EXCLUDED(JavaVMExt::globals_lock,
233 JavaVMExt::weak_globals_lock)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700234 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers81d425b2012-09-27 16:03:43 -0700235 Locks::mutator_lock_->AssertSharedHeld(Self());
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700236 DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states.
237#ifdef MOVING_GARBAGE_COLLECTOR
238 // TODO: we should make these unique weak globals if Method instances can ever move.
239 UNIMPLEMENTED(WARNING);
240#endif
Mathieu Chartier66f19252012-09-18 08:57:04 -0700241 return reinterpret_cast<AbstractMethod*>(mid);
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700242 }
243
Mathieu Chartier66f19252012-09-18 08:57:04 -0700244 jmethodID EncodeMethod(AbstractMethod* method) const
Ian Rogersb726dcb2012-09-05 08:57:23 -0700245 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers81d425b2012-09-27 16:03:43 -0700246 Locks::mutator_lock_->AssertSharedHeld(Self());
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700247 DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states.
248#ifdef MOVING_GARBAGE_COLLECTOR
249 UNIMPLEMENTED(WARNING);
250#endif
251 return reinterpret_cast<jmethodID>(method);
252 }
253
254 private:
255 static Thread* ThreadForEnv(JNIEnv* env) {
256 JNIEnvExt* full_env(reinterpret_cast<JNIEnvExt*>(env));
257 bool work_around_app_jni_bugs = full_env->vm->work_around_app_jni_bugs;
258 Thread* env_self = full_env->self;
259 Thread* self = work_around_app_jni_bugs ? Thread::Current() : env_self;
260 if (!work_around_app_jni_bugs && self != env_self) {
261 UnexpectedThreads(env_self, self);
262 }
263 return self;
264 }
265
266 static void UnexpectedThreads(Thread* found_self, Thread* expected_self) {
267 // TODO: pass through function name so we can use it here instead of NULL...
268 JniAbortF(NULL, "JNIEnv for %s used on %s",
269 found_self != NULL ? ToStr<Thread>(*found_self).c_str() : "NULL",
270 expected_self != NULL ? ToStr<Thread>(*expected_self).c_str() : "NULL");
271
272 }
273
274 // The full JNIEnv.
275 JNIEnvExt* const env_;
276 // The full JavaVM.
277 JavaVMExt* const vm_;
278
279 DISALLOW_COPY_AND_ASSIGN(ScopedObjectAccessUnchecked);
280};
281
282// Annotalysis helping variant of the above.
283class ScopedObjectAccess : public ScopedObjectAccessUnchecked {
284 public:
285 explicit ScopedObjectAccess(JNIEnv* env)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700286 LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
287 SHARED_LOCK_FUNCTION(Locks::mutator_lock_)
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700288 : ScopedObjectAccessUnchecked(env) {
Ian Rogers81d425b2012-09-27 16:03:43 -0700289 Locks::mutator_lock_->AssertSharedHeld(Self());
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700290 }
291
292 explicit ScopedObjectAccess(Thread* self)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700293 LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
294 SHARED_LOCK_FUNCTION(Locks::mutator_lock_)
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700295 : ScopedObjectAccessUnchecked(self) {
Ian Rogers81d425b2012-09-27 16:03:43 -0700296 Locks::mutator_lock_->AssertSharedHeld(Self());
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700297 }
298
Ian Rogersb726dcb2012-09-05 08:57:23 -0700299 ~ScopedObjectAccess() UNLOCK_FUNCTION(Locks::mutator_lock_) {
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700300 // Base class will release share of lock. Invoked after this destructor.
301 }
302
303 private:
304 // TODO: remove this constructor. It is used by check JNI's ScopedCheck to make it believe that
305 // routines operating with just a VM are sound, they are not, but when you have just a VM
306 // you cannot call the unsound routines.
307 explicit ScopedObjectAccess(JavaVM* vm)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700308 SHARED_LOCK_FUNCTION(Locks::mutator_lock_)
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700309 : ScopedObjectAccessUnchecked(vm) {}
310
311 friend class ScopedCheck;
312 DISALLOW_COPY_AND_ASSIGN(ScopedObjectAccess);
313};
314
315} // namespace art
316
317#endif // ART_SRC_SCOPED_THREAD_STATE_CHANGE_H_