Remove sorted variable in allocation stacks.

Speeds up allocations, mark stack processing non parallel. This is
safe to do since the pre/post GC heap verification is the only
place where we sort the allocation and live stacks and this is
done with mutators suspended.

Change-Id: I4ae1858db7109def3e2e49ebca58b924818d71f2
diff --git a/runtime/gc/accounting/atomic_stack.h b/runtime/gc/accounting/atomic_stack.h
index 92d9ea2..a732566 100644
--- a/runtime/gc/accounting/atomic_stack.h
+++ b/runtime/gc/accounting/atomic_stack.h
@@ -47,7 +47,7 @@
     DCHECK(begin_ != NULL);
     front_index_ = 0;
     back_index_ = 0;
-    is_sorted_ = true;
+    debug_is_sorted_ = true;
     int result = madvise(begin_, sizeof(T) * capacity_, MADV_DONTNEED);
     if (result == -1) {
       PLOG(WARNING) << "madvise failed";
@@ -58,8 +58,10 @@
 
   // Returns false if we overflowed the stack.
   bool AtomicPushBack(const T& value) {
+    if (kIsDebugBuild) {
+      debug_is_sorted_ = false;
+    }
     int32_t index;
-    is_sorted_ = false;
     do {
       index = back_index_;
       if (UNLIKELY(static_cast<size_t>(index) >= capacity_)) {
@@ -72,7 +74,9 @@
   }
 
   void PushBack(const T& value) {
-    is_sorted_ = false;
+    if (kIsDebugBuild) {
+      debug_is_sorted_ = false;
+    }
     int32_t index = back_index_;
     DCHECK_LT(static_cast<size_t>(index), capacity_);
     back_index_ = index + 1;
@@ -122,22 +126,23 @@
   }
 
   void Sort() {
-    if (!is_sorted_) {
-      int32_t start_back_index = back_index_.load();
-      int32_t start_front_index = front_index_.load();
-      is_sorted_ = true;
-      std::sort(Begin(), End());
-      CHECK_EQ(start_back_index, back_index_.load());
-      CHECK_EQ(start_front_index, front_index_.load());
+    int32_t start_back_index = back_index_.load();
+    int32_t start_front_index = front_index_.load();
+    std::sort(Begin(), End());
+    CHECK_EQ(start_back_index, back_index_.load());
+    CHECK_EQ(start_front_index, front_index_.load());
+    if (kIsDebugBuild) {
+      debug_is_sorted_ = true;
     }
   }
 
+  bool ContainsSorted(const T& value) const {
+    DCHECK(debug_is_sorted_);
+    return std::binary_search(Begin(), End(), value);
+  }
+
   bool Contains(const T& value) const {
-    if (is_sorted_) {
-      return std::binary_search(Begin(), End(), value);
-    } else {
-      return std::find(Begin(), End(), value) != End();
-    }
+    return std::find(Begin(), End(), value) != End();
   }
 
  private:
@@ -147,7 +152,7 @@
         front_index_(0),
         begin_(NULL),
         capacity_(capacity),
-        is_sorted_(true) {
+        debug_is_sorted_(true) {
   }
 
   // Size in number of elements.
@@ -156,6 +161,7 @@
     CHECK(mem_map_.get() != NULL) << "couldn't allocate mark stack";
     byte* addr = mem_map_->Begin();
     CHECK(addr != NULL);
+    debug_is_sorted_ = true;
     begin_ = reinterpret_cast<T*>(addr);
     Reset();
   }
@@ -178,7 +184,8 @@
   // Maximum number of elements.
   size_t capacity_;
 
-  bool is_sorted_;
+  // Whether or not the stack is sorted, only updated in debug mode to avoid performance overhead.
+  bool debug_is_sorted_;
 
   DISALLOW_COPY_AND_ASSIGN(AtomicStack);
 };
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index 292fd29..00f7e5b 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -524,25 +524,24 @@
 
 bool Heap::IsLiveObjectLocked(const mirror::Object* obj) {
   // Locks::heap_bitmap_lock_->AssertReaderHeld(Thread::Current());
-  if (obj == NULL) {
+  if (obj == NULL || UNLIKELY(!IsAligned<kObjectAlignment>(obj))) {
     return false;
   }
-  if (UNLIKELY(!IsAligned<kObjectAlignment>(obj))) {
-    return false;
-  }
-  space::ContinuousSpace* cont_space = FindContinuousSpaceFromObject(obj, true);
-  if (cont_space != NULL) {
-    if (cont_space->GetLiveBitmap()->Test(obj)) {
+  space::ContinuousSpace* c_space = FindContinuousSpaceFromObject(obj, true);
+  space::DiscontinuousSpace* d_space = NULL;
+  if (c_space != NULL) {
+    if (c_space->GetLiveBitmap()->Test(obj)) {
       return true;
     }
   } else {
-    space::DiscontinuousSpace* disc_space = FindDiscontinuousSpaceFromObject(obj, true);
-    if (disc_space != NULL) {
-      if (disc_space->GetLiveObjects()->Test(obj)) {
+    d_space = FindDiscontinuousSpaceFromObject(obj, true);
+    if (d_space != NULL) {
+      if (d_space->GetLiveObjects()->Test(obj)) {
         return true;
       }
     }
   }
+  // This is covering the allocation/live stack swapping that is done without mutators suspended.
   for (size_t i = 0; i < 5; ++i) {
     if (allocation_stack_->Contains(const_cast<mirror::Object*>(obj)) ||
         live_stack_->Contains(const_cast<mirror::Object*>(obj))) {
@@ -550,6 +549,18 @@
     }
     NanoSleep(MsToNs(10));
   }
+  // We need to check the bitmaps again since there is a race where we mark something as live and
+  // then clear the stack containing it.
+  if (c_space != NULL) {
+    if (c_space->GetLiveBitmap()->Test(obj)) {
+      return true;
+    }
+  } else {
+    d_space = FindDiscontinuousSpaceFromObject(obj, true);
+    if (d_space != NULL && d_space->GetLiveObjects()->Test(obj)) {
+      return true;
+    }
+  }
   return false;
 }
 
@@ -1229,10 +1240,10 @@
         if (bitmap != NULL && bitmap->Test(obj)) {
           LOG(ERROR) << "Object " << obj << " found in live bitmap";
         }
-        if (alloc_stack->Contains(const_cast<mirror::Object*>(obj))) {
+        if (alloc_stack->ContainsSorted(const_cast<mirror::Object*>(obj))) {
           LOG(ERROR) << "Object " << obj << " found in allocation stack";
         }
-        if (live_stack->Contains(const_cast<mirror::Object*>(obj))) {
+        if (live_stack->ContainsSorted(const_cast<mirror::Object*>(obj))) {
           LOG(ERROR) << "Object " << obj << " found in live stack";
         }
         // Attempt to see if the card table missed the reference.
@@ -1252,10 +1263,10 @@
       } else {
         LOG(ERROR) << "Root references dead object " << ref << "\nRef type " << PrettyTypeOf(ref);
       }
-      if (alloc_stack->Contains(const_cast<mirror::Object*>(ref))) {
+      if (alloc_stack->ContainsSorted(const_cast<mirror::Object*>(ref))) {
         LOG(ERROR) << "Reference " << ref << " found in allocation stack!";
       }
-      if (live_stack->Contains(const_cast<mirror::Object*>(ref))) {
+      if (live_stack->ContainsSorted(const_cast<mirror::Object*>(ref))) {
         LOG(ERROR) << "Reference " << ref << " found in live stack!";
       }
       heap_->image_mod_union_table_->Dump(LOG(ERROR) << "Image mod-union table: ");
@@ -1345,8 +1356,8 @@
         // Card should be either kCardDirty if it got re-dirtied after we aged it, or
         // kCardDirty - 1 if it didnt get touched since we aged it.
         accounting::ObjectStack* live_stack = heap_->live_stack_.get();
-        if (live_stack->Contains(const_cast<mirror::Object*>(ref))) {
-          if (live_stack->Contains(const_cast<mirror::Object*>(obj))) {
+        if (live_stack->ContainsSorted(const_cast<mirror::Object*>(ref))) {
+          if (live_stack->ContainsSorted(const_cast<mirror::Object*>(obj))) {
             LOG(ERROR) << "Object " << obj << " found in live stack";
           }
           if (heap_->GetLiveBitmap()->Test(obj)) {