Don't get and restore thread state for ScopedFastNativeObjectAccess.

Before we would ensure that we were runnable for fast native object
access. However, these are done when you are already runnable.

Change-Id: Ia4c6e4c83d146fe2a988b37b3133ca46b0f0fa42
diff --git a/runtime/scoped_thread_state_change.h b/runtime/scoped_thread_state_change.h
index dbd961f..d56495e 100644
--- a/runtime/scoped_thread_state_change.h
+++ b/runtime/scoped_thread_state_change.h
@@ -93,50 +93,15 @@
   ThreadState old_thread_state_;
   const bool expected_has_no_thread_;
 
+  friend class ScopedObjectAccessUnchecked;
   DISALLOW_COPY_AND_ASSIGN(ScopedThreadStateChange);
 };
 
-// Entry/exit processing for transitions from Native to Runnable (ie within JNI functions).
-//
-// This class performs the necessary thread state switching to and from Runnable and lets us
-// amortize the cost of working out the current thread. Additionally it lets us check (and repair)
-// apps that are using a JNIEnv on the wrong thread. The class also decodes and encodes Objects
-// into jobjects via methods of this class. Performing this here enforces the Runnable thread state
-// for use of Object, thereby inhibiting the Object being modified by GC whilst native or VM code
-// is also manipulating the Object.
-//
-// The destructor transitions back to the previous thread state, typically Native. In this state
-// GC and thread suspension may occur.
-//
-// For annotalysis the subclass ScopedObjectAccess (below) makes it explicit that a shared of
-// the mutator_lock_ will be acquired on construction.
-class ScopedObjectAccessUnchecked : public ScopedThreadStateChange {
+// Assumes we are already runnable.
+class ScopedObjectAccessAlreadyRunnable {
  public:
-  explicit ScopedObjectAccessUnchecked(JNIEnv* env)
-      LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE
-      : ScopedThreadStateChange(ThreadForEnv(env), kRunnable),
-        env_(down_cast<JNIEnvExt*>(env)), vm_(env_->vm) {
-    self_->VerifyStack();
-    Locks::mutator_lock_->AssertSharedHeld(self_);
-  }
-
-  explicit ScopedObjectAccessUnchecked(Thread* self)
-      LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
-      : ScopedThreadStateChange(self, kRunnable),
-        env_(down_cast<JNIEnvExt*>(self->GetJniEnv())),
-        vm_(env_ != NULL ? env_->vm : NULL) {
-    self_->VerifyStack();
-    Locks::mutator_lock_->AssertSharedHeld(self_);
-  }
-
-  // Used when we want a scoped JNI thread state but have no thread/JNIEnv. Consequently doesn't
-  // change into Runnable or acquire a share on the mutator_lock_.
-  explicit ScopedObjectAccessUnchecked(JavaVM* vm)
-      : ScopedThreadStateChange(), env_(NULL), vm_(down_cast<JavaVMExt*>(vm)) {}
-
-  // Here purely to force inlining.
-  ~ScopedObjectAccessUnchecked() ALWAYS_INLINE {
-    Locks::mutator_lock_->AssertSharedHeld(self_);
+  Thread* Self() const {
+    return self_;
   }
 
   JNIEnvExt* Env() const {
@@ -159,13 +124,11 @@
   template<typename T>
   T AddLocalReference(mirror::Object* obj) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
     Locks::mutator_lock_->AssertSharedHeld(Self());
-    DCHECK_EQ(thread_state_, kRunnable);  // Don't work with raw objects in non-runnable states.
+    DCHECK(IsRunnable());  // Don't work with raw objects in non-runnable states.
     if (obj == NULL) {
       return NULL;
     }
-
     DCHECK_NE((reinterpret_cast<uintptr_t>(obj) & 0xffff0000), 0xebad0000);
-
     return Env()->AddLocalReference<T>(obj);
   }
 
@@ -173,14 +136,14 @@
   T Decode(jobject obj) const
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
     Locks::mutator_lock_->AssertSharedHeld(Self());
-    DCHECK_EQ(thread_state_, kRunnable);  // Don't work with raw objects in non-runnable states.
+    DCHECK(IsRunnable());  // Don't work with raw objects in non-runnable states.
     return down_cast<T>(Self()->DecodeJObject(obj));
   }
 
   mirror::ArtField* DecodeField(jfieldID fid) const
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
     Locks::mutator_lock_->AssertSharedHeld(Self());
-    DCHECK_EQ(thread_state_, kRunnable);  // Don't work with raw objects in non-runnable states.
+    DCHECK(IsRunnable());  // Don't work with raw objects in non-runnable states.
     CHECK(!kMovingFields);
     return reinterpret_cast<mirror::ArtField*>(fid);
   }
@@ -188,7 +151,7 @@
   jfieldID EncodeField(mirror::ArtField* field) const
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
     Locks::mutator_lock_->AssertSharedHeld(Self());
-    DCHECK_EQ(thread_state_, kRunnable);  // Don't work with raw objects in non-runnable states.
+    DCHECK(IsRunnable());  // Don't work with raw objects in non-runnable states.
     CHECK(!kMovingFields);
     return reinterpret_cast<jfieldID>(field);
   }
@@ -196,7 +159,7 @@
   mirror::ArtMethod* DecodeMethod(jmethodID mid) const
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
     Locks::mutator_lock_->AssertSharedHeld(Self());
-    DCHECK_EQ(thread_state_, kRunnable);  // Don't work with raw objects in non-runnable states.
+    DCHECK(IsRunnable());  // Don't work with raw objects in non-runnable states.
     CHECK(!kMovingMethods);
     return reinterpret_cast<mirror::ArtMethod*>(mid);
   }
@@ -204,16 +167,83 @@
   jmethodID EncodeMethod(mirror::ArtMethod* method) const
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
     Locks::mutator_lock_->AssertSharedHeld(Self());
-    DCHECK_EQ(thread_state_, kRunnable);  // Don't work with raw objects in non-runnable states.
+    DCHECK(IsRunnable());  // Don't work with raw objects in non-runnable states.
     CHECK(!kMovingMethods);
     return reinterpret_cast<jmethodID>(method);
   }
 
- private:
+  bool IsRunnable() const {
+    return self_->GetState() == kRunnable;
+  }
+
+ protected:
+  explicit ScopedObjectAccessAlreadyRunnable(JNIEnv* env)
+      LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE
+      : self_(ThreadForEnv(env)), env_(down_cast<JNIEnvExt*>(env)), vm_(env_->vm) {
+  }
+
+  explicit ScopedObjectAccessAlreadyRunnable(Thread* self)
+      LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE
+      : self_(self), env_(down_cast<JNIEnvExt*>(self->GetJniEnv())),
+        vm_(env_ != nullptr ? env_->vm : nullptr) {
+  }
+
+  // Used when we want a scoped JNI thread state but have no thread/JNIEnv. Consequently doesn't
+  // change into Runnable or acquire a share on the mutator_lock_.
+  explicit ScopedObjectAccessAlreadyRunnable(JavaVM* vm)
+      : self_(nullptr), env_(nullptr), vm_(down_cast<JavaVMExt*>(vm)) {}
+
+  // Here purely to force inlining.
+  ~ScopedObjectAccessAlreadyRunnable() ALWAYS_INLINE {
+  }
+
+  // Self thread, can be null.
+  Thread* const self_;
   // The full JNIEnv.
   JNIEnvExt* const env_;
   // The full JavaVM.
   JavaVMExt* const vm_;
+};
+
+// Entry/exit processing for transitions from Native to Runnable (ie within JNI functions).
+//
+// This class performs the necessary thread state switching to and from Runnable and lets us
+// amortize the cost of working out the current thread. Additionally it lets us check (and repair)
+// apps that are using a JNIEnv on the wrong thread. The class also decodes and encodes Objects
+// into jobjects via methods of this class. Performing this here enforces the Runnable thread state
+// for use of Object, thereby inhibiting the Object being modified by GC whilst native or VM code
+// is also manipulating the Object.
+//
+// The destructor transitions back to the previous thread state, typically Native. In this state
+// GC and thread suspension may occur.
+//
+// For annotalysis the subclass ScopedObjectAccess (below) makes it explicit that a shared of
+// the mutator_lock_ will be acquired on construction.
+class ScopedObjectAccessUnchecked : public ScopedObjectAccessAlreadyRunnable {
+ public:
+  explicit ScopedObjectAccessUnchecked(JNIEnv* env)
+      LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE
+      : ScopedObjectAccessAlreadyRunnable(env), tsc_(Self(), kRunnable) {
+    Self()->VerifyStack();
+    Locks::mutator_lock_->AssertSharedHeld(Self());
+  }
+
+  explicit ScopedObjectAccessUnchecked(Thread* self)
+      LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE
+      : ScopedObjectAccessAlreadyRunnable(self), tsc_(self, kRunnable) {
+    Self()->VerifyStack();
+    Locks::mutator_lock_->AssertSharedHeld(Self());
+  }
+
+  // Used when we want a scoped JNI thread state but have no thread/JNIEnv. Consequently doesn't
+  // change into Runnable or acquire a share on the mutator_lock_.
+  explicit ScopedObjectAccessUnchecked(JavaVM* vm) ALWAYS_INLINE
+      : ScopedObjectAccessAlreadyRunnable(vm), tsc_() {}
+
+ private:
+  // The scoped thread state change makes sure that we are runnable and restores the thread state
+  // in the destructor.
+  const ScopedThreadStateChange tsc_;
 
   DISALLOW_COPY_AND_ASSIGN(ScopedObjectAccessUnchecked);
 };
@@ -229,7 +259,7 @@
 
   explicit ScopedObjectAccess(Thread* self)
       LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_)
-      SHARED_LOCK_FUNCTION(Locks::mutator_lock_)
+      SHARED_LOCK_FUNCTION(Locks::mutator_lock_) ALWAYS_INLINE
       : ScopedObjectAccessUnchecked(self) {
   }