Refactor some GC code.

Reduced amount of code in mark sweep / semi space by moving
common logic to garbage_collector.cc. Cleaned up mod union tables
and deleted an unused implementation.

Change-Id: I4bcc6ba41afd96d230cfbaf4d6636f37c52e37ea
diff --git a/runtime/gc/accounting/heap_bitmap-inl.h b/runtime/gc/accounting/heap_bitmap-inl.h
index 18b93d4..04e85d2 100644
--- a/runtime/gc/accounting/heap_bitmap-inl.h
+++ b/runtime/gc/accounting/heap_bitmap-inl.h
@@ -19,6 +19,8 @@
 
 #include "heap_bitmap.h"
 
+#include "space_bitmap-inl.h"
+
 namespace art {
 namespace gc {
 namespace accounting {
@@ -34,6 +36,55 @@
   }
 }
 
+inline bool HeapBitmap::Test(const mirror::Object* obj) {
+  SpaceBitmap* bitmap = GetContinuousSpaceBitmap(obj);
+  if (LIKELY(bitmap != nullptr)) {
+    return bitmap->Test(obj);
+  } else {
+    return GetDiscontinuousSpaceObjectSet(obj) != NULL;
+  }
+}
+
+inline void HeapBitmap::Clear(const mirror::Object* obj) {
+  SpaceBitmap* bitmap = GetContinuousSpaceBitmap(obj);
+  if (LIKELY(bitmap != nullptr)) {
+    bitmap->Clear(obj);
+  } else {
+    ObjectSet* set = GetDiscontinuousSpaceObjectSet(obj);
+    DCHECK(set != NULL);
+    set->Clear(obj);
+  }
+}
+
+inline void HeapBitmap::Set(const mirror::Object* obj) {
+  SpaceBitmap* bitmap = GetContinuousSpaceBitmap(obj);
+  if (LIKELY(bitmap != NULL)) {
+    bitmap->Set(obj);
+  } else {
+    ObjectSet* set = GetDiscontinuousSpaceObjectSet(obj);
+    DCHECK(set != NULL);
+    set->Set(obj);
+  }
+}
+
+inline SpaceBitmap* HeapBitmap::GetContinuousSpaceBitmap(const mirror::Object* obj) const {
+  for (const auto& bitmap : continuous_space_bitmaps_) {
+    if (bitmap->HasAddress(obj)) {
+      return bitmap;
+    }
+  }
+  return nullptr;
+}
+
+inline ObjectSet* HeapBitmap::GetDiscontinuousSpaceObjectSet(const mirror::Object* obj) const {
+  for (const auto& space_set : discontinuous_space_sets_) {
+    if (space_set->Test(obj)) {
+      return space_set;
+    }
+  }
+  return nullptr;
+}
+
 }  // namespace accounting
 }  // namespace gc
 }  // namespace art
diff --git a/runtime/gc/accounting/heap_bitmap.h b/runtime/gc/accounting/heap_bitmap.h
index b23b12e..f729c0e 100644
--- a/runtime/gc/accounting/heap_bitmap.h
+++ b/runtime/gc/accounting/heap_bitmap.h
@@ -31,54 +31,11 @@
 
 class HeapBitmap {
  public:
-  bool Test(const mirror::Object* obj) SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) {
-    SpaceBitmap* bitmap = GetContinuousSpaceBitmap(obj);
-    if (LIKELY(bitmap != nullptr)) {
-      return bitmap->Test(obj);
-    } else {
-      return GetDiscontinuousSpaceObjectSet(obj) != NULL;
-    }
-  }
-
-  void Clear(const mirror::Object* obj) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) {
-    SpaceBitmap* bitmap = GetContinuousSpaceBitmap(obj);
-    if (LIKELY(bitmap != NULL)) {
-      bitmap->Clear(obj);
-    } else {
-      ObjectSet* set = GetDiscontinuousSpaceObjectSet(obj);
-      DCHECK(set != NULL);
-      set->Clear(obj);
-    }
-  }
-
-  void Set(const mirror::Object* obj) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) {
-    SpaceBitmap* bitmap = GetContinuousSpaceBitmap(obj);
-    if (LIKELY(bitmap != NULL)) {
-      bitmap->Set(obj);
-    } else {
-      ObjectSet* set = GetDiscontinuousSpaceObjectSet(obj);
-      DCHECK(set != NULL);
-      set->Set(obj);
-    }
-  }
-
-  SpaceBitmap* GetContinuousSpaceBitmap(const mirror::Object* obj) {
-    for (const auto& bitmap : continuous_space_bitmaps_) {
-      if (bitmap->HasAddress(obj)) {
-        return bitmap;
-      }
-    }
-    return nullptr;
-  }
-
-  ObjectSet* GetDiscontinuousSpaceObjectSet(const mirror::Object* obj) {
-    for (const auto& space_set : discontinuous_space_sets_) {
-      if (space_set->Test(obj)) {
-        return space_set;
-      }
-    }
-    return nullptr;
-  }
+  bool Test(const mirror::Object* obj) SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
+  void Clear(const mirror::Object* obj) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
+  void Set(const mirror::Object* obj) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
+  SpaceBitmap* GetContinuousSpaceBitmap(const mirror::Object* obj) const;
+  ObjectSet* GetDiscontinuousSpaceObjectSet(const mirror::Object* obj) const;
 
   void Walk(ObjectCallback* callback, void* arg)
       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
diff --git a/runtime/gc/accounting/mod_union_table-inl.h b/runtime/gc/accounting/mod_union_table-inl.h
index 76719b6..c756127 100644
--- a/runtime/gc/accounting/mod_union_table-inl.h
+++ b/runtime/gc/accounting/mod_union_table-inl.h
@@ -32,39 +32,8 @@
                                            space::ContinuousSpace* space)
       : ModUnionTableReferenceCache(name, heap, space) {}
 
-  bool AddReference(const mirror::Object* /* obj */, const mirror::Object* ref) ALWAYS_INLINE {
-    for (space::ContinuousSpace* space : GetHeap()->GetContinuousSpaces()) {
-      if (space->HasAddress(ref)) {
-        return !space->IsImageSpace();
-      }
-    }
-    // Assume it points to a large object.
-    // TODO: Check.
-    return true;
-  }
-};
-
-// A mod-union table to record Zygote references to the alloc space.
-class ModUnionTableToAllocspace : public ModUnionTableReferenceCache {
- public:
-  explicit ModUnionTableToAllocspace(const std::string& name, Heap* heap,
-                                     space::ContinuousSpace* space)
-      : ModUnionTableReferenceCache(name, heap, space) {}
-
-  bool AddReference(const mirror::Object* /* obj */, const mirror::Object* ref) ALWAYS_INLINE {
-    const std::vector<space::ContinuousSpace*>& spaces = GetHeap()->GetContinuousSpaces();
-    typedef std::vector<space::ContinuousSpace*>::const_iterator It;
-    for (It it = spaces.begin(); it != spaces.end(); ++it) {
-      space::ContinuousSpace* space = *it;
-      if (space->Contains(ref)) {
-        // The allocation space is always considered for collection whereas the Zygote space is
-        // only considered for full GC.
-        return space->GetGcRetentionPolicy() == space::kGcRetentionPolicyAlwaysCollect;
-      }
-    }
-    // Assume it points to a large object.
-    // TODO: Check.
-    return true;
+  bool ShouldAddReference(const mirror::Object* ref) const OVERRIDE ALWAYS_INLINE {
+    return !space_->HasAddress(ref);
   }
 };
 
diff --git a/runtime/gc/accounting/mod_union_table.cc b/runtime/gc/accounting/mod_union_table.cc
index 314f3c5..34ca654 100644
--- a/runtime/gc/accounting/mod_union_table.cc
+++ b/runtime/gc/accounting/mod_union_table.cc
@@ -76,7 +76,7 @@
   }
 
   // Extra parameters are required since we use this same visitor signature for checking objects.
-  void operator()(Object* obj, MemberOffset offset, bool /* static */) const
+  void operator()(Object* obj, MemberOffset offset, bool /*is_static*/) const
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
     // Only add the reference if it is non null and fits our criteria.
     mirror::HeapReference<Object>* obj_ptr = obj->GetFieldObjectReferenceAddr(offset);
@@ -123,12 +123,12 @@
   }
 
   // Extra parameters are required since we use this same visitor signature for checking objects.
-  void operator()(Object* obj, MemberOffset offset, bool /* static */) const
+  void operator()(Object* obj, MemberOffset offset, bool /*is_static*/) const
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
     mirror::HeapReference<Object>* ref_ptr = obj->GetFieldObjectReferenceAddr(offset);
     mirror::Object* ref = ref_ptr->AsMirrorPtr();
     // Only add the reference if it is non null and fits our criteria.
-    if (ref  != nullptr && mod_union_table_->AddReference(obj, ref)) {
+    if (ref != nullptr && mod_union_table_->ShouldAddReference(ref)) {
       // Push the adddress of the reference.
       references_->push_back(ref_ptr);
     }
@@ -168,10 +168,10 @@
   }
 
   // Extra parameters are required since we use this same visitor signature for checking objects.
-  void operator()(Object* obj, MemberOffset offset, bool /* is_static */) const
+  void operator()(Object* obj, MemberOffset offset, bool /*is_static*/) const
       SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
     mirror::Object* ref = obj->GetFieldObject<mirror::Object>(offset, false);
-    if (ref != nullptr && mod_union_table_->AddReference(obj, ref) &&
+    if (ref != nullptr && mod_union_table_->ShouldAddReference(ref) &&
         references_.find(ref) == references_.end()) {
       Heap* heap = mod_union_table_->GetHeap();
       space::ContinuousSpace* from_space = heap->FindContinuousSpaceFromObject(obj, false);
@@ -260,8 +260,7 @@
 
 void ModUnionTableReferenceCache::UpdateAndMarkReferences(MarkHeapReferenceCallback* callback,
                                                           void* arg) {
-  Heap* heap = GetHeap();
-  CardTable* card_table = heap->GetCardTable();
+  CardTable* card_table = heap_->GetCardTable();
 
   std::vector<mirror::HeapReference<Object>*> cards_references;
   ModUnionReferenceVisitor add_visitor(this, &cards_references);
@@ -271,7 +270,7 @@
     cards_references.clear();
     uintptr_t start = reinterpret_cast<uintptr_t>(card_table->AddrFromCard(card));
     uintptr_t end = start + CardTable::kCardSize;
-    auto* space = heap->FindContinuousSpaceFromObject(reinterpret_cast<Object*>(start), false);
+    auto* space = heap_->FindContinuousSpaceFromObject(reinterpret_cast<Object*>(start), false);
     DCHECK(space != nullptr);
     SpaceBitmap* live_bitmap = space->GetLiveBitmap();
     live_bitmap->VisitMarkedRange(start, end, add_visitor);
diff --git a/runtime/gc/accounting/mod_union_table.h b/runtime/gc/accounting/mod_union_table.h
index c4b020b..c3a90e2 100644
--- a/runtime/gc/accounting/mod_union_table.h
+++ b/runtime/gc/accounting/mod_union_table.h
@@ -117,7 +117,7 @@
       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
 
   // Function that tells whether or not to add a reference to the table.
-  virtual bool AddReference(const mirror::Object* obj, const mirror::Object* ref) = 0;
+  virtual bool ShouldAddReference(const mirror::Object* ref) const = 0;
 
   void Dump(std::ostream& os) SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
diff --git a/runtime/gc/collector/garbage_collector.cc b/runtime/gc/collector/garbage_collector.cc
index 0c7565c..c199a69 100644
--- a/runtime/gc/collector/garbage_collector.cc
+++ b/runtime/gc/collector/garbage_collector.cc
@@ -86,8 +86,8 @@
       // Pause is the entire length of the GC.
       uint64_t pause_start = NanoTime();
       ATRACE_BEGIN("Application threads suspended");
-      // Mutator lock may be already exclusively held when we do garbage collections for changing the
-      // current collector / allocator during process state updates.
+      // Mutator lock may be already exclusively held when we do garbage collections for changing
+      // the current collector / allocator during process state updates.
       if (Locks::mutator_lock_->IsExclusiveHeld(self)) {
         // PreGcRosAllocVerification() is called in Heap::TransitionCollector().
         RevokeAllThreadLocalBuffers();
@@ -146,7 +146,11 @@
       break;
     }
   }
-
+  // Add the current timings to the cumulative timings.
+  cumulative_timings_.AddLogger(timings_);
+  // Update cumulative statistics with how many bytes the GC iteration freed.
+  total_freed_objects_ += GetFreedObjects() + GetFreedLargeObjects();
+  total_freed_bytes_ += GetFreedBytes() + GetFreedLargeObjectBytes();
   uint64_t end_time = NanoTime();
   duration_ns_ = end_time - start_time;
   total_time_ns_ += GetDurationNs();
diff --git a/runtime/gc/collector/mark_sweep-inl.h b/runtime/gc/collector/mark_sweep-inl.h
index 1cb2adb..974952d 100644
--- a/runtime/gc/collector/mark_sweep-inl.h
+++ b/runtime/gc/collector/mark_sweep-inl.h
@@ -21,7 +21,7 @@
 
 #include "gc/heap.h"
 #include "mirror/art_field.h"
-#include "mirror/class.h"
+#include "mirror/class-inl.h"
 #include "mirror/object_array-inl.h"
 #include "mirror/reference.h"
 
diff --git a/runtime/gc/collector/mark_sweep.cc b/runtime/gc/collector/mark_sweep.cc
index 8abf5e2..e289df5 100644
--- a/runtime/gc/collector/mark_sweep.cc
+++ b/runtime/gc/collector/mark_sweep.cc
@@ -27,31 +27,20 @@
 #include "base/mutex-inl.h"
 #include "base/timing_logger.h"
 #include "gc/accounting/card_table-inl.h"
-#include "gc/accounting/heap_bitmap.h"
+#include "gc/accounting/heap_bitmap-inl.h"
 #include "gc/accounting/mod_union_table.h"
 #include "gc/accounting/space_bitmap-inl.h"
 #include "gc/heap.h"
 #include "gc/space/image_space.h"
 #include "gc/space/large_object_space.h"
 #include "gc/space/space-inl.h"
-#include "indirect_reference_table.h"
-#include "intern_table.h"
-#include "jni_internal.h"
-#include "monitor.h"
 #include "mark_sweep-inl.h"
-#include "mirror/art_field.h"
 #include "mirror/art_field-inl.h"
-#include "mirror/class-inl.h"
-#include "mirror/class_loader.h"
-#include "mirror/dex_cache.h"
-#include "mirror/reference-inl.h"
 #include "mirror/object-inl.h"
-#include "mirror/object_array.h"
-#include "mirror/object_array-inl.h"
 #include "runtime.h"
+#include "scoped_thread_state_change.h"
 #include "thread-inl.h"
 #include "thread_list.h"
-#include "verifier/method_verifier.h"
 
 using ::art::mirror::ArtField;
 using ::art::mirror::Class;
@@ -228,7 +217,6 @@
     // acquired / released in the checkpoint code).
     // The other roots are also marked to help reduce the pause.
     MarkThreadRoots(self);
-    // TODO: Only mark the dirty roots.
     MarkNonThreadRoots();
     MarkConcurrentRoots(
         static_cast<VisitRootFlags>(kVisitRootFlagClearRootLog | kVisitRootFlagNewRoots));
@@ -264,7 +252,6 @@
   WriterMutexLock mu(self, *Locks::heap_bitmap_lock_);
   MarkRoots(self);
   live_stack_freeze_size_ = heap_->GetLiveStack()->Size();
-  UpdateAndMarkModUnion();
   MarkReachableObjects();
   // Pre-clean dirtied cards to reduce pauses.
   PreCleanCards();
@@ -289,6 +276,7 @@
 }
 
 void MarkSweep::MarkReachableObjects() {
+  UpdateAndMarkModUnion();
   // Mark everything allocated since the last as GC live so that we can sweep concurrently,
   // knowing that new allocations won't be marked as live.
   timings_.StartSplit("MarkStackAsLive");
@@ -1034,14 +1022,13 @@
 }
 
 void MarkSweep::VerifyIsLive(const Object* obj) {
-  Heap* heap = GetHeap();
-  if (!heap->GetLiveBitmap()->Test(obj)) {
-    space::LargeObjectSpace* large_object_space = GetHeap()->GetLargeObjectsSpace();
+  if (!heap_->GetLiveBitmap()->Test(obj)) {
+    space::LargeObjectSpace* large_object_space = heap_->GetLargeObjectsSpace();
     if (!large_object_space->GetLiveObjects()->Test(obj)) {
-      if (std::find(heap->allocation_stack_->Begin(), heap->allocation_stack_->End(), obj) ==
-          heap->allocation_stack_->End()) {
+      if (std::find(heap_->allocation_stack_->Begin(), heap_->allocation_stack_->End(), obj) ==
+          heap_->allocation_stack_->End()) {
         // Object not found!
-        heap->DumpSpaces();
+        heap_->DumpSpaces();
         LOG(FATAL) << "Found dead object " << obj;
       }
     }
@@ -1057,7 +1044,7 @@
  public:
   explicit CheckpointMarkThreadRoots(MarkSweep* mark_sweep) : mark_sweep_(mark_sweep) {}
 
-  virtual void Run(Thread* thread) NO_THREAD_SAFETY_ANALYSIS {
+  virtual void Run(Thread* thread) OVERRIDE NO_THREAD_SAFETY_ANALYSIS {
     ATRACE_BEGIN("Marking thread roots");
     // Note: self is not necessarily equal to thread since thread may be suspended.
     Thread* self = Thread::Current();
@@ -1075,7 +1062,7 @@
   }
 
  private:
-  MarkSweep* mark_sweep_;
+  MarkSweep* const mark_sweep_;
 };
 
 void MarkSweep::MarkRootsCheckpoint(Thread* self) {
@@ -1089,10 +1076,10 @@
   // TODO: optimize to not release locks when there are no threads to wait for.
   Locks::heap_bitmap_lock_->ExclusiveUnlock(self);
   Locks::mutator_lock_->SharedUnlock(self);
-  ThreadState old_state = self->SetState(kWaitingForCheckPointsToRun);
-  CHECK_EQ(old_state, kWaitingPerformingGc);
-  gc_barrier_->Increment(self, barrier_count);
-  self->SetState(kWaitingPerformingGc);
+  {
+    ScopedThreadStateChange tsc(self, kWaitingForCheckPointsToRun);
+    gc_barrier_->Increment(self, barrier_count);
+  }
   Locks::mutator_lock_->SharedLock(self);
   Locks::heap_bitmap_lock_->ExclusiveLock(self);
   timings_.EndSplit();
@@ -1326,7 +1313,7 @@
         }
         obj = mark_stack_->PopBack();
       }
-      DCHECK(obj != NULL);
+      DCHECK(obj != nullptr);
       ScanObject(obj);
     }
   }
@@ -1347,14 +1334,8 @@
 void MarkSweep::FinishPhase() {
   TimingLogger::ScopedSplit split("FinishPhase", &timings_);
   // Can't enqueue references if we hold the mutator lock.
-  Heap* heap = GetHeap();
   timings_.NewSplit("PostGcVerification");
-  heap->PostGcVerification(this);
-  // Update the cumulative statistics.
-  total_freed_objects_ += GetFreedObjects() + GetFreedLargeObjects();
-  total_freed_bytes_ += GetFreedBytes() + GetFreedLargeObjectBytes();
-  // Ensure that the mark stack is empty.
-  CHECK(mark_stack_->IsEmpty());
+  heap_->PostGcVerification(this);
   if (kCountScannedTypes) {
     VLOG(gc) << "MarkSweep scanned classes=" << class_count_ << " arrays=" << array_count_
              << " other=" << other_count_;
@@ -1375,22 +1356,10 @@
     VLOG(gc) << "Marked: null=" << mark_null_count_ << " immune=" <<  mark_immune_count_
         << " fastpath=" << mark_fastpath_count_ << " slowpath=" << mark_slowpath_count_;
   }
-  // Update the cumulative loggers.
-  cumulative_timings_.Start();
-  cumulative_timings_.AddLogger(timings_);
-  cumulative_timings_.End();
-  // Clear all of the spaces' mark bitmaps.
-  for (const auto& space : GetHeap()->GetContinuousSpaces()) {
-    accounting::SpaceBitmap* bitmap = space->GetMarkBitmap();
-    if (bitmap != nullptr &&
-        space->GetGcRetentionPolicy() != space::kGcRetentionPolicyNeverCollect) {
-      bitmap->Clear();
-    }
-  }
+  CHECK(mark_stack_->IsEmpty());  // Ensure that the mark stack is empty.
   mark_stack_->Reset();
-  // Reset the marked large objects.
-  space::LargeObjectSpace* large_objects = GetHeap()->GetLargeObjectsSpace();
-  large_objects->GetMarkObjects()->Clear();
+  WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
+  heap_->ClearMarkedObjects();
 }
 
 void MarkSweep::RevokeAllThreadLocalBuffers() {
diff --git a/runtime/gc/collector/mark_sweep.h b/runtime/gc/collector/mark_sweep.h
index 84b775a..9bf4cd1 100644
--- a/runtime/gc/collector/mark_sweep.h
+++ b/runtime/gc/collector/mark_sweep.h
@@ -32,33 +32,22 @@
 namespace mirror {
   class Class;
   class Object;
-  template<class T> class ObjectArray;
   class Reference;
 }  // namespace mirror
 
-class StackVisitor;
 class Thread;
 enum VisitRootFlags : uint8_t;
 
 namespace gc {
 
+class Heap;
+
 namespace accounting {
-  template <typename T> class AtomicStack;
-  class MarkIfReachesAllocspaceVisitor;
-  class ModUnionClearCardVisitor;
-  class ModUnionVisitor;
-  class ModUnionTableBitmap;
-  class MarkStackChunk;
+  template<typename T> class AtomicStack;
   typedef AtomicStack<mirror::Object*> ObjectStack;
   class SpaceBitmap;
 }  // namespace accounting
 
-namespace space {
-  class ContinuousSpace;
-}  // namespace space
-
-class Heap;
-
 namespace collector {
 
 class MarkSweep : public GarbageCollector {
@@ -137,8 +126,8 @@
       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
-  // Update and mark references from immune spaces. Virtual as overridden by StickyMarkSweep.
-  virtual void UpdateAndMarkModUnion()
+  // Update and mark references from immune spaces.
+  void UpdateAndMarkModUnion()
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
 
   // Pre clean cards to reduce how much work is needed in the pause.
@@ -311,7 +300,7 @@
 
   accounting::ObjectStack* mark_stack_;
 
-  // Immune range, every object inside the immune range is assumed to be marked.
+  // Immune region, every object inside the immune range is assumed to be marked.
   ImmuneRegion immune_region_;
 
   // Parallel finger.
diff --git a/runtime/gc/collector/semi_space.cc b/runtime/gc/collector/semi_space.cc
index d4f47ef..222bd63 100644
--- a/runtime/gc/collector/semi_space.cc
+++ b/runtime/gc/collector/semi_space.cc
@@ -25,7 +25,7 @@
 #include "base/macros.h"
 #include "base/mutex-inl.h"
 #include "base/timing_logger.h"
-#include "gc/accounting/heap_bitmap.h"
+#include "gc/accounting/heap_bitmap-inl.h"
 #include "gc/accounting/mod_union_table.h"
 #include "gc/accounting/remembered_set.h"
 #include "gc/accounting/space_bitmap-inl.h"
@@ -726,8 +726,8 @@
     return obj;
   }
   if (from_space_->HasAddress(obj)) {
-    mirror::Object* forwarding_address = GetForwardingAddressInFromSpace(const_cast<Object*>(obj));
-    return forwarding_address;  // Returns either the forwarding address or nullptr.
+    // Returns either the forwarding address or nullptr.
+    return GetForwardingAddressInFromSpace(obj);
   } else if (to_space_->HasAddress(obj)) {
     // Should be unlikely.
     // Already forwarded, must be marked.
@@ -751,38 +751,12 @@
   Heap* heap = GetHeap();
   timings_.NewSplit("PostGcVerification");
   heap->PostGcVerification(this);
-
   // Null the "to" and "from" spaces since compacting from one to the other isn't valid until
   // further action is done by the heap.
   to_space_ = nullptr;
   from_space_ = nullptr;
-
-  // Update the cumulative statistics
-  total_freed_objects_ += GetFreedObjects() + GetFreedLargeObjects();
-  total_freed_bytes_ += GetFreedBytes() + GetFreedLargeObjectBytes();
-
-  // Ensure that the mark stack is empty.
   CHECK(mark_stack_->IsEmpty());
-
-  // Update the cumulative loggers.
-  cumulative_timings_.Start();
-  cumulative_timings_.AddLogger(timings_);
-  cumulative_timings_.End();
-
-  // Clear all of the spaces' mark bitmaps.
-  for (const auto& space : GetHeap()->GetContinuousSpaces()) {
-    accounting::SpaceBitmap* bitmap = space->GetMarkBitmap();
-    if (bitmap != nullptr &&
-        space->GetGcRetentionPolicy() != space::kGcRetentionPolicyNeverCollect) {
-      bitmap->Clear();
-    }
-  }
   mark_stack_->Reset();
-
-  // Reset the marked large objects.
-  space::LargeObjectSpace* large_objects = GetHeap()->GetLargeObjectsSpace();
-  large_objects->GetMarkObjects()->Clear();
-
   if (generational_) {
     // Decide whether to do a whole heap collection or a bump pointer
     // only space collection at the next collection by updating
@@ -800,6 +774,9 @@
       whole_heap_collection_ = false;
     }
   }
+  // Clear all of the spaces' mark bitmaps.
+  WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
+  heap_->ClearMarkedObjects();
 }
 
 void SemiSpace::RevokeAllThreadLocalBuffers() {
diff --git a/runtime/gc/collector/semi_space.h b/runtime/gc/collector/semi_space.h
index 52b53aa..f067cb2 100644
--- a/runtime/gc/collector/semi_space.h
+++ b/runtime/gc/collector/semi_space.h
@@ -28,37 +28,28 @@
 
 namespace art {
 
+class Thread;
+
 namespace mirror {
   class Class;
   class Object;
-  template<class T> class ObjectArray;
 }  // namespace mirror
 
-class StackVisitor;
-class Thread;
-
 namespace gc {
 
+class Heap;
+
 namespace accounting {
   template <typename T> class AtomicStack;
-  class MarkIfReachesAllocspaceVisitor;
-  class ModUnionClearCardVisitor;
-  class ModUnionVisitor;
-  class ModUnionTableBitmap;
-  class MarkStackChunk;
   typedef AtomicStack<mirror::Object*> ObjectStack;
   class SpaceBitmap;
 }  // namespace accounting
 
 namespace space {
-  class BumpPointerSpace;
   class ContinuousMemMapAllocSpace;
   class ContinuousSpace;
-  class MallocSpace;
 }  // namespace space
 
-class Heap;
-
 namespace collector {
 
 class SemiSpace : public GarbageCollector {
@@ -189,12 +180,6 @@
   void ProcessMarkStack()
       EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
 
-  void ProcessReferences(mirror::Object** soft_references, bool clear_soft_references,
-                         mirror::Object** weak_references,
-                         mirror::Object** finalizer_references,
-                         mirror::Object** phantom_references)
-      EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
-
   inline mirror::Object* GetForwardingAddressInFromSpace(mirror::Object* obj) const;
 
   // Revoke all the thread-local buffers.
diff --git a/runtime/gc/collector/sticky_mark_sweep.h b/runtime/gc/collector/sticky_mark_sweep.h
index 934b1bd..4f9dabf 100644
--- a/runtime/gc/collector/sticky_mark_sweep.h
+++ b/runtime/gc/collector/sticky_mark_sweep.h
@@ -46,10 +46,6 @@
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
 
-  // Don't need to do anything special here since we scan all the cards which may have references
-  // to the newly allocated objects.
-  void UpdateAndMarkModUnion() OVERRIDE { }
-
  private:
   DISALLOW_COPY_AND_ASSIGN(StickyMarkSweep);
 };
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index ff4b4ce..26a812a 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -2821,5 +2821,19 @@
   CHECK(remembered_sets_.find(space) == remembered_sets_.end());
 }
 
+void Heap::ClearMarkedObjects() {
+  // Clear all of the spaces' mark bitmaps.
+  for (const auto& space : GetContinuousSpaces()) {
+    accounting::SpaceBitmap* mark_bitmap = space->GetMarkBitmap();
+    if (space->GetLiveBitmap() != mark_bitmap) {
+      mark_bitmap->Clear();
+    }
+  }
+  // Clear the marked objects in the discontinous space object sets.
+  for (const auto& space : GetDiscontinuousSpaces()) {
+    space->GetMarkObjects()->Clear();
+  }
+}
+
 }  // namespace gc
 }  // namespace art
diff --git a/runtime/gc/heap.h b/runtime/gc/heap.h
index a522750..511ad61 100644
--- a/runtime/gc/heap.h
+++ b/runtime/gc/heap.h
@@ -254,6 +254,9 @@
   void IncrementDisableMovingGC(Thread* self);
   void DecrementDisableMovingGC(Thread* self);
 
+  // Clear all of the mark bits, doesn't clear bitmaps which have the same live bits as mark bits.
+  void ClearMarkedObjects() EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
+
   // Initiates an explicit garbage collection.
   void CollectGarbage(bool clear_soft_references);