Refactor GC to have a class for each different type of GC.

Added a seperate files for mark sweep, partial mark sweep,
sticky mark sweep.

Added a common superclass for GC.

Added additional statistics for each GC.

Moved main garbage collection code away from Heap.cc.

Change-Id: Ida0021ab2f740fc8228bbbf4d43cd9bc56b4ba46
diff --git a/src/heap.h b/src/heap.h
index 22b009c..5ba5c84 100644
--- a/src/heap.h
+++ b/src/heap.h
@@ -44,6 +44,7 @@
 class Class;
 class ConditionVariable;
 class DlMallocSpace;
+class GarbageCollector;
 class HeapBitmap;
 class ImageSpace;
 class LargeObjectSpace;
@@ -192,7 +193,11 @@
   // true if we waited for the GC to complete.
   GcType WaitForConcurrentGcToComplete(Thread* self) LOCKS_EXCLUDED(gc_complete_lock_);
 
-  const Spaces& GetSpaces() {
+  const Spaces& GetSpaces() const {
+    return spaces_;
+  }
+
+  Spaces& GetSpaces() {
     return spaces_;
   }
 
@@ -291,6 +296,10 @@
     return mark_bitmap_.get();
   }
 
+  ObjectStack* GetLiveStack() SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) {
+    return live_stack_.get();
+  }
+
   void PreZygoteFork() LOCKS_EXCLUDED(Locks::heap_bitmap_lock_);
 
   // Mark and empty stack.
@@ -349,10 +358,6 @@
   void RequestHeapTrim() LOCKS_EXCLUDED(Locks::runtime_shutdown_lock_);
   void RequestConcurrentGC(Thread* self) LOCKS_EXCLUDED(Locks::runtime_shutdown_lock_);
 
-  // Swap bitmaps (if we are a full Gc then we swap the zygote bitmap too).
-  void SwapBitmaps(GcType gc_type) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
-  void SwapLargeObjects() EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
-
   void RecordAllocation(size_t size, Object* object)
       LOCKS_EXCLUDED(GlobalSynchronization::heap_bitmap_lock_)
       SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -364,14 +369,10 @@
                      Locks::heap_bitmap_lock_,
                      Locks::mutator_lock_,
                      Locks::thread_suspend_count_lock_);
-  void CollectGarbageMarkSweepPlan(Thread* self, GcType gc_plan, GcCause gc_cause,
-                                   bool clear_soft_references)
-      LOCKS_EXCLUDED(Locks::heap_bitmap_lock_,
-                     Locks::mutator_lock_);
-  void CollectGarbageConcurrentMarkSweepPlan(Thread* self, GcType gc_plan, GcCause gc_cause,
-                                             bool clear_soft_references)
-      LOCKS_EXCLUDED(Locks::heap_bitmap_lock_,
-                     Locks::mutator_lock_);
+
+  void PreGcVerification(GarbageCollector* gc);
+  void PreSweepingGcVerification(GarbageCollector* gc);
+  void PostGcVerification(GarbageCollector* gc);
 
   // Given the current contents of the alloc space, increase the allowed heap footprint to match
   // the target utilization ratio.  This should only be called immediately after a full garbage
@@ -392,9 +393,6 @@
   // Swap the allocation stack with the live stack.
   void SwapStacks();
 
-  // Bind bitmaps (makes the live and mark bitmaps for immune spaces point to the same bitmap).
-  void BindBitmaps(GcType gc_type, MarkSweep& mark_sweep);
-
   // Clear cards and update the mod union table.
   void ProcessCards(TimingLogger& timings);
 
@@ -406,10 +404,6 @@
   // The alloc space which we are currently allocating into.
   DlMallocSpace* alloc_space_;
 
-  // One cumulative logger for each type of Gc.
-  typedef SafeMap<GcType, CumulativeLogger*> CumulativeTimings;
-  CumulativeTimings cumulative_timings_;
-
   // The mod-union table remembers all of the references from the image space to the alloc /
   // zygote spaces.
   UniquePtr<ModUnionTable> mod_union_table_;
@@ -453,7 +447,6 @@
   size_t concurrent_start_bytes_;
 
   // Number of bytes allocated since the last Gc, we use this to help determine when to schedule concurrent GCs.
-  size_t bytes_since_last_gc_;
   size_t sticky_gc_count_;
 
   size_t total_bytes_freed_;
@@ -495,9 +488,6 @@
   UniquePtr<HeapBitmap> live_bitmap_ GUARDED_BY(Locks::heap_bitmap_lock_);
   UniquePtr<HeapBitmap> mark_bitmap_ GUARDED_BY(Locks::heap_bitmap_lock_);
 
-  // Used to ensure that we don't ever recursively request GC.
-  volatile bool requesting_gc_;
-
   // Mark stack that we reuse to avoid re-allocating the mark stack.
   UniquePtr<ObjectStack> mark_stack_;
 
@@ -535,7 +525,6 @@
   double target_utilization_;
 
   // Total time which mutators are paused or waiting for GC to complete.
-  uint64_t total_paused_time_;
   uint64_t total_wait_time_;
 
   // Total number of objects allocated in microseconds.
@@ -543,6 +532,8 @@
   AtomicInteger total_allocation_time_;
 
   bool verify_objects_;
+  typedef std::vector<MarkSweep*> Collectors;
+  Collectors mark_sweep_collectors_;
 
   friend class MarkSweep;
   friend class VerifyReferenceCardVisitor;