blob: ed3c3845902b9a87b38357443a8991805e3ae519 [file] [log] [blame]
Ian Rogers00f7d0e2012-07-19 15:28:27 -07001/*
2 * Copyright (C) 2012 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#ifndef ART_SRC_SCOPED_THREAD_STATE_CHANGE_H_
18#define ART_SRC_SCOPED_THREAD_STATE_CHANGE_H_
19
20#include "casts.h"
21#include "thread.h"
22
23namespace art {
24
25// Scoped change into and out of a particular state. Handles Runnable transitions that require
26// more complicated suspension checking. The subclasses ScopedObjectAccessUnchecked and
27// ScopedObjectAccess are used to handle the change into Runnable to get direct access to objects,
28// the unchecked variant doesn't aid annotalysis.
29class ScopedThreadStateChange {
30 public:
31 ScopedThreadStateChange(Thread* self, ThreadState new_thread_state)
Ian Rogersb726dcb2012-09-05 08:57:23 -070032 LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
Ian Rogers00f7d0e2012-07-19 15:28:27 -070033 : self_(self), thread_state_(new_thread_state), expected_has_no_thread_(false) {
34 if (self_ == NULL) {
35 // Value chosen arbitrarily and won't be used in the destructor since thread_ == NULL.
36 old_thread_state_ = kTerminated;
37 CHECK(!Runtime::Current()->IsStarted() || Runtime::Current()->IsShuttingDown());
38 } else {
39 bool runnable_transition;
40 {
Ian Rogersb726dcb2012-09-05 08:57:23 -070041 MutexLock mu(*Locks::thread_suspend_count_lock_);
Ian Rogers00f7d0e2012-07-19 15:28:27 -070042 old_thread_state_ = self->GetState();
43 runnable_transition = old_thread_state_ == kRunnable || new_thread_state == kRunnable;
44 if (!runnable_transition) {
45 self_->SetState(new_thread_state);
46 }
47 }
48 if (runnable_transition && old_thread_state_ != new_thread_state) {
49 if (new_thread_state == kRunnable) {
50 self_->TransitionFromSuspendedToRunnable();
51 } else {
52 DCHECK_EQ(old_thread_state_, kRunnable);
53 self_->TransitionFromRunnableToSuspended(new_thread_state);
54 }
55 }
56 }
57 }
58
Ian Rogersb726dcb2012-09-05 08:57:23 -070059 ~ScopedThreadStateChange() LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) {
Ian Rogers00f7d0e2012-07-19 15:28:27 -070060 if (self_ == NULL) {
61 if (!expected_has_no_thread_) {
62 CHECK(Runtime::Current()->IsShuttingDown());
63 }
64 } else {
65 if (old_thread_state_ != thread_state_) {
66 if (old_thread_state_ == kRunnable) {
67 self_->TransitionFromSuspendedToRunnable();
68 } else if (thread_state_ == kRunnable) {
69 self_->TransitionFromRunnableToSuspended(old_thread_state_);
70 } else {
Ian Rogersb726dcb2012-09-05 08:57:23 -070071 MutexLock mu(*Locks::thread_suspend_count_lock_);
Ian Rogers00f7d0e2012-07-19 15:28:27 -070072 self_->SetState(old_thread_state_);
73 }
74 }
75 }
76 }
77
78 Thread* Self() const {
79 return self_;
80 }
81
82 protected:
83 // Constructor used by ScopedJniThreadState for an unattached thread that has access to the VM*.
84 ScopedThreadStateChange()
85 : self_(NULL), thread_state_(kTerminated), old_thread_state_(kTerminated),
86 expected_has_no_thread_(true) {}
87
88 Thread* const self_;
89 const ThreadState thread_state_;
90
91 private:
92 ThreadState old_thread_state_;
93 const bool expected_has_no_thread_;
94
95 DISALLOW_COPY_AND_ASSIGN(ScopedThreadStateChange);
96};
97
98// Entry/exit processing for transitions from Native to Runnable (ie within JNI functions).
99//
100// This class performs the necessary thread state switching to and from Runnable and lets us
101// amortize the cost of working out the current thread. Additionally it lets us check (and repair)
102// apps that are using a JNIEnv on the wrong thread. The class also decodes and encodes Objects
103// into jobjects via methods of this class. Performing this here enforces the Runnable thread state
104// for use of Object, thereby inhibiting the Object being modified by GC whilst native or VM code
105// is also manipulating the Object.
106//
107// The destructor transitions back to the previous thread state, typically Native. In this state
108// GC and thread suspension may occur.
109//
110// For annotalysis the subclass ScopedObjectAccess (below) makes it explicit that a shared of
111// the mutator_lock_ will be acquired on construction.
112class ScopedObjectAccessUnchecked : public ScopedThreadStateChange {
113 public:
114 explicit ScopedObjectAccessUnchecked(JNIEnv* env)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700115 LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700116 : ScopedThreadStateChange(ThreadForEnv(env), kRunnable),
117 env_(reinterpret_cast<JNIEnvExt*>(env)), vm_(env_->vm) {
118 self_->VerifyStack();
119 }
120
121 explicit ScopedObjectAccessUnchecked(Thread* self)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700122 LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700123 : ScopedThreadStateChange(self, kRunnable),
124 env_(reinterpret_cast<JNIEnvExt*>(self->GetJniEnv())),
125 vm_(env_ != NULL ? env_->vm : NULL) {
126 if (Vm() != NULL && !Vm()->work_around_app_jni_bugs && self != Thread::Current()) {
127 UnexpectedThreads(self, Thread::Current());
128 }
129 self_->VerifyStack();
130 }
131
132 // Used when we want a scoped JNI thread state but have no thread/JNIEnv. Consequently doesn't
133 // change into Runnable or acquire a share on the mutator_lock_.
134 explicit ScopedObjectAccessUnchecked(JavaVM* vm)
135 : ScopedThreadStateChange(), env_(NULL), vm_(reinterpret_cast<JavaVMExt*>(vm)) {}
136
137 JNIEnvExt* Env() const {
138 return env_;
139 }
140
141 JavaVMExt* Vm() const {
142 return vm_;
143 }
144
145 /*
146 * Add a local reference for an object to the indirect reference table associated with the
147 * current stack frame. When the native function returns, the reference will be discarded.
148 * Part of the ScopedJniThreadState as native code shouldn't be working on raw Object* without
149 * having transitioned its state.
150 *
151 * We need to allow the same reference to be added multiple times.
152 *
153 * This will be called on otherwise unreferenced objects. We cannot do GC allocations here, and
154 * it's best if we don't grab a mutex.
155 *
156 * Returns the local reference (currently just the same pointer that was
157 * passed in), or NULL on failure.
158 */
159 template<typename T>
160 T AddLocalReference(Object* obj) const
Ian Rogersb726dcb2012-09-05 08:57:23 -0700161 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700162 DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states.
163 if (obj == NULL) {
164 return NULL;
165 }
166
167 DCHECK_NE((reinterpret_cast<uintptr_t>(obj) & 0xffff0000), 0xebad0000);
168
169 IndirectReferenceTable& locals = Env()->locals;
170
171 uint32_t cookie = Env()->local_ref_cookie;
172 IndirectRef ref = locals.Add(cookie, obj);
173
174#if 0 // TODO: fix this to understand PushLocalFrame, so we can turn it on.
175 if (Env()->check_jni) {
176 size_t entry_count = locals.Capacity();
177 if (entry_count > 16) {
178 LOG(WARNING) << "Warning: more than 16 JNI local references: "
179 << entry_count << " (most recent was a " << PrettyTypeOf(obj) << ")\n"
180 << Dumpable<IndirectReferenceTable>(locals);
181 // TODO: LOG(FATAL) in a later release?
182 }
183 }
184#endif
185
186 if (Vm()->work_around_app_jni_bugs) {
187 // Hand out direct pointers to support broken old apps.
188 return reinterpret_cast<T>(obj);
189 }
190
191 return reinterpret_cast<T>(ref);
192 }
193
194 template<typename T>
195 T Decode(jobject obj) const
196 LOCKS_EXCLUDED(JavaVMExt::globals_lock,
197 JavaVMExt::weak_globals_lock)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700198 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
199 Locks::mutator_lock_->AssertSharedHeld();
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700200 DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states.
201 return down_cast<T>(Self()->DecodeJObject(obj));
202 }
203
204 Field* DecodeField(jfieldID fid) const
205 LOCKS_EXCLUDED(JavaVMExt::globals_lock,
206 JavaVMExt::weak_globals_lock)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700207 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
208 Locks::mutator_lock_->AssertSharedHeld();
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700209 DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states.
210#ifdef MOVING_GARBAGE_COLLECTOR
211 // TODO: we should make these unique weak globals if Field instances can ever move.
212 UNIMPLEMENTED(WARNING);
213#endif
214 return reinterpret_cast<Field*>(fid);
215 }
216
217 jfieldID EncodeField(Field* field) const
218 LOCKS_EXCLUDED(JavaVMExt::globals_lock,
219 JavaVMExt::weak_globals_lock)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700220 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
221 Locks::mutator_lock_->AssertSharedHeld();
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700222 DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states.
223#ifdef MOVING_GARBAGE_COLLECTOR
224 UNIMPLEMENTED(WARNING);
225#endif
226 return reinterpret_cast<jfieldID>(field);
227 }
228
229 Method* DecodeMethod(jmethodID mid) const
230 LOCKS_EXCLUDED(JavaVMExt::globals_lock,
231 JavaVMExt::weak_globals_lock)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700232 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
233 Locks::mutator_lock_->AssertSharedHeld();
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700234 DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states.
235#ifdef MOVING_GARBAGE_COLLECTOR
236 // TODO: we should make these unique weak globals if Method instances can ever move.
237 UNIMPLEMENTED(WARNING);
238#endif
239 return reinterpret_cast<Method*>(mid);
240 }
241
242 jmethodID EncodeMethod(Method* method) const
Ian Rogersb726dcb2012-09-05 08:57:23 -0700243 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
244 Locks::mutator_lock_->AssertSharedHeld();
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700245 DCHECK_EQ(thread_state_, kRunnable); // Don't work with raw objects in non-runnable states.
246#ifdef MOVING_GARBAGE_COLLECTOR
247 UNIMPLEMENTED(WARNING);
248#endif
249 return reinterpret_cast<jmethodID>(method);
250 }
251
252 private:
253 static Thread* ThreadForEnv(JNIEnv* env) {
254 JNIEnvExt* full_env(reinterpret_cast<JNIEnvExt*>(env));
255 bool work_around_app_jni_bugs = full_env->vm->work_around_app_jni_bugs;
256 Thread* env_self = full_env->self;
257 Thread* self = work_around_app_jni_bugs ? Thread::Current() : env_self;
258 if (!work_around_app_jni_bugs && self != env_self) {
259 UnexpectedThreads(env_self, self);
260 }
261 return self;
262 }
263
264 static void UnexpectedThreads(Thread* found_self, Thread* expected_self) {
265 // TODO: pass through function name so we can use it here instead of NULL...
266 JniAbortF(NULL, "JNIEnv for %s used on %s",
267 found_self != NULL ? ToStr<Thread>(*found_self).c_str() : "NULL",
268 expected_self != NULL ? ToStr<Thread>(*expected_self).c_str() : "NULL");
269
270 }
271
272 // The full JNIEnv.
273 JNIEnvExt* const env_;
274 // The full JavaVM.
275 JavaVMExt* const vm_;
276
277 DISALLOW_COPY_AND_ASSIGN(ScopedObjectAccessUnchecked);
278};
279
280// Annotalysis helping variant of the above.
281class ScopedObjectAccess : public ScopedObjectAccessUnchecked {
282 public:
283 explicit ScopedObjectAccess(JNIEnv* env)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700284 LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
285 SHARED_LOCK_FUNCTION(Locks::mutator_lock_)
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700286 : ScopedObjectAccessUnchecked(env) {
Ian Rogersb726dcb2012-09-05 08:57:23 -0700287 Locks::mutator_lock_->AssertSharedHeld();
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700288 }
289
290 explicit ScopedObjectAccess(Thread* self)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700291 LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
292 SHARED_LOCK_FUNCTION(Locks::mutator_lock_)
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700293 : ScopedObjectAccessUnchecked(self) {
Ian Rogersb726dcb2012-09-05 08:57:23 -0700294 Locks::mutator_lock_->AssertSharedHeld();
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700295 }
296
Ian Rogersb726dcb2012-09-05 08:57:23 -0700297 ~ScopedObjectAccess() UNLOCK_FUNCTION(Locks::mutator_lock_) {
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700298 // Base class will release share of lock. Invoked after this destructor.
299 }
300
301 private:
302 // TODO: remove this constructor. It is used by check JNI's ScopedCheck to make it believe that
303 // routines operating with just a VM are sound, they are not, but when you have just a VM
304 // you cannot call the unsound routines.
305 explicit ScopedObjectAccess(JavaVM* vm)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700306 SHARED_LOCK_FUNCTION(Locks::mutator_lock_)
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700307 : ScopedObjectAccessUnchecked(vm) {}
308
309 friend class ScopedCheck;
310 DISALLOW_COPY_AND_ASSIGN(ScopedObjectAccess);
311};
312
313} // namespace art
314
315#endif // ART_SRC_SCOPED_THREAD_STATE_CHANGE_H_