Merge "Remove started runtime check in RevokeAllThreadLocalAllocationStacks"
diff --git a/oatdump/oatdump.cc b/oatdump/oatdump.cc
index 0d9b991..330a307 100644
--- a/oatdump/oatdump.cc
+++ b/oatdump/oatdump.cc
@@ -49,6 +49,7 @@
 #include "runtime.h"
 #include "safe_map.h"
 #include "scoped_thread_state_change.h"
+#include "thread_list.h"
 #include "verifier/dex_gc_map.h"
 #include "verifier/method_verifier.h"
 #include "vmap_table.h"
@@ -817,12 +818,21 @@
     const std::vector<gc::space::ContinuousSpace*>& spaces = heap->GetContinuousSpaces();
     Thread* self = Thread::Current();
     {
-      WriterMutexLock mu(self, *Locks::heap_bitmap_lock_);
-      heap->FlushAllocStack();
+      {
+        WriterMutexLock mu(self, *Locks::heap_bitmap_lock_);
+        heap->FlushAllocStack();
+      }
       // Since FlushAllocStack() above resets the (active) allocation
       // stack. Need to revoke the thread-local allocation stacks that
       // point into it.
-      heap->RevokeAllThreadLocalAllocationStacks(self);
+      {
+        self->TransitionFromRunnableToSuspended(kNative);
+        ThreadList* thread_list = Runtime::Current()->GetThreadList();
+        thread_list->SuspendAll();
+        heap->RevokeAllThreadLocalAllocationStacks(self);
+        thread_list->ResumeAll();
+        self->TransitionFromSuspendedToRunnable();
+      }
     }
     {
       std::ostream* saved_os = os_;
@@ -1548,7 +1558,6 @@
   // give it away now and then switch to a more managable ScopedObjectAccess.
   Thread::Current()->TransitionFromRunnableToSuspended(kNative);
   ScopedObjectAccess soa(Thread::Current());
-
   gc::Heap* heap = Runtime::Current()->GetHeap();
   gc::space::ImageSpace* image_space = heap->GetImageSpace();
   CHECK(image_space != NULL);
diff --git a/runtime/gc/collector/mark_sweep.cc b/runtime/gc/collector/mark_sweep.cc
index 9ab2d2e..cc34689 100644
--- a/runtime/gc/collector/mark_sweep.cc
+++ b/runtime/gc/collector/mark_sweep.cc
@@ -210,7 +210,7 @@
     // Since SweepArray() above resets the (active) allocation
     // stack. Need to revoke the thread-local allocation stacks that
     // point into it.
-    GetHeap()->RevokeAllThreadLocalAllocationStacks(self);
+    RevokeAllThreadLocalAllocationStacks(self);
   }
 
   timings_.StartSplit("PreSweepingGcVerification");
@@ -252,6 +252,13 @@
   }
 }
 
+void MarkSweep::RevokeAllThreadLocalAllocationStacks(Thread* self) {
+  if (kUseThreadLocalAllocationStack) {
+    Locks::mutator_lock_->AssertExclusiveHeld(self);
+    heap_->RevokeAllThreadLocalAllocationStacks(self);
+  }
+}
+
 void MarkSweep::MarkingPhase() {
   TimingLogger::ScopedSplit split("MarkingPhase", &timings_);
   Thread* self = Thread::Current();
@@ -271,9 +278,7 @@
   if (Locks::mutator_lock_->IsExclusiveHeld(self)) {
     // If we exclusively hold the mutator lock, all threads must be suspended.
     MarkRoots();
-    if (kUseThreadLocalAllocationStack) {
-      heap_->RevokeAllThreadLocalAllocationStacks(self);
-    }
+    RevokeAllThreadLocalAllocationStacks(self);
   } else {
     MarkThreadRoots(self);
     // At this point the live stack should no longer have any mutators which push into it.
diff --git a/runtime/gc/collector/mark_sweep.h b/runtime/gc/collector/mark_sweep.h
index 8f8a0f0..29fafd6 100644
--- a/runtime/gc/collector/mark_sweep.h
+++ b/runtime/gc/collector/mark_sweep.h
@@ -334,6 +334,10 @@
   void ClearWhiteReferences(mirror::Object** list)
       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
 
+  // Used to get around thread safety annotations. The call is from MarkingPhase and is guarded by
+  // IsExclusiveHeld.
+  void RevokeAllThreadLocalAllocationStacks(Thread* self) NO_THREAD_SAFETY_ANALYSIS;
+
   // Whether or not we count how many of each type of object were scanned.
   static const bool kCountScannedTypes = false;
 
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index b970df3..58db7a8 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -2010,12 +2010,6 @@
 }
 
 void Heap::RevokeAllThreadLocalAllocationStacks(Thread* self) {
-  if (!Runtime::Current()->IsStarted()) {
-    // There's no thread list if the runtime hasn't started (eg
-    // dex2oat or a test). Just revoke for self.
-    self->RevokeThreadLocalAllocationStack();
-    return;
-  }
   // This must be called only during the pause.
   CHECK(Locks::mutator_lock_->IsExclusiveHeld(self));
   MutexLock mu(self, *Locks::runtime_shutdown_lock_);
diff --git a/runtime/gc/heap.h b/runtime/gc/heap.h
index 83202a5..5d44ee1 100644
--- a/runtime/gc/heap.h
+++ b/runtime/gc/heap.h
@@ -207,11 +207,9 @@
   // Check sanity of all live references.
   void VerifyHeap() LOCKS_EXCLUDED(Locks::heap_bitmap_lock_);
   bool VerifyHeapReferences()
-      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
-      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
   bool VerifyMissingCardMarks()
-      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
-      SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
 
   // A weaker test than IsLiveObject or VerifyObject that doesn't require the heap lock,
   // and doesn't abort on error, allowing the caller to report more
@@ -463,7 +461,8 @@
       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
 
   // Revoke all the thread-local allocation stacks.
-  void RevokeAllThreadLocalAllocationStacks(Thread* self);
+  void RevokeAllThreadLocalAllocationStacks(Thread* self)
+      EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
 
   // Mark all the objects in the allocation stack in the specified bitmap.
   void MarkAllocStack(accounting::SpaceBitmap* bitmap1, accounting::SpaceBitmap* bitmap2,