Background compaction support.

When the process state changes to a state which does not perceives
jank, we copy from the main free-list backed allocation space to
the bump pointer space and enable the semispace allocator.

When we transition back to foreground, we copy back to a free-list
backed space.

Create a seperate non-moving space which only holds non-movable
objects. This enables us to quickly wipe the current alloc space
(DlMalloc / RosAlloc) when we transition to background.

Added multiple alloc space support to the sticky mark sweep GC.

Added a -XX:BackgroundGC option which lets you specify
which GC to use for background apps. Passing in
-XX:BackgroundGC=SS makes the heap compact the heap for apps which
do not perceive jank.

Results:
Simple background foreground test:
0. Reboot phone, unlock.
1. Open browser, click on home.
2. Open calculator, click on home.
3. Open calendar, click on home.
4. Open camera, click on home.
5. Open clock, click on home.
6. adb shell dumpsys meminfo

PSS Normal ART:
Sample 1:
    88468 kB: Dalvik
     3188 kB: Dalvik Other
Sample 2:
    81125 kB: Dalvik
     3080 kB: Dalvik Other

PSS Dalvik:
Total PSS by category:
Sample 1:
    81033 kB: Dalvik
    27787 kB: Dalvik Other
Sample 2:
    81901 kB: Dalvik
    28869 kB: Dalvik Other

PSS ART + Background Compaction:
Sample 1:
    71014 kB: Dalvik
     1412 kB: Dalvik Other
Sample 2:
    73859 kB: Dalvik
     1400 kB: Dalvik Other

Dalvik other reduction can be explained by less deep allocation
stacks / less live bitmaps / less dirty cards.

TODO improvements: Recycle mem-maps which are unused in the current
state. Not hardcode 64 MB capacity of non movable space (avoid
returning linear alloc nightmares). Figure out ways to deal with low
virtual address memory problems.

Bug: 8981901

Change-Id: Ib235d03f45548ffc08a06b8ae57bf5bada49d6f3
diff --git a/runtime/gc/collector/semi_space.cc b/runtime/gc/collector/semi_space.cc
index 0dd8792..0150609 100644
--- a/runtime/gc/collector/semi_space.cc
+++ b/runtime/gc/collector/semi_space.cc
@@ -99,9 +99,13 @@
   WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
   // Mark all of the spaces we never collect as immune.
   for (const auto& space : GetHeap()->GetContinuousSpaces()) {
-    if (space->GetGcRetentionPolicy() == space::kGcRetentionPolicyNeverCollect
-        || space->GetGcRetentionPolicy() == space::kGcRetentionPolicyFullCollect) {
-      ImmuneSpace(space);
+    if (space->GetLiveBitmap() != nullptr) {
+      if (space == to_space_) {
+        BindLiveToMarkBitmap(to_space_);
+      } else if (space->GetGcRetentionPolicy() == space::kGcRetentionPolicyNeverCollect
+          || space->GetGcRetentionPolicy() == space::kGcRetentionPolicyFullCollect) {
+        ImmuneSpace(space);
+      }
     }
   }
   timings_.EndSplit();
@@ -115,11 +119,6 @@
       immune_end_(nullptr),
       to_space_(nullptr),
       from_space_(nullptr),
-      soft_reference_list_(nullptr),
-      weak_reference_list_(nullptr),
-      finalizer_reference_list_(nullptr),
-      phantom_reference_list_(nullptr),
-      cleared_reference_list_(nullptr),
       self_(nullptr),
       last_gc_to_space_end_(nullptr),
       bytes_promoted_(0) {
@@ -132,15 +131,12 @@
   DCHECK(mark_stack_ != nullptr);
   immune_begin_ = nullptr;
   immune_end_ = nullptr;
-  soft_reference_list_ = nullptr;
-  weak_reference_list_ = nullptr;
-  finalizer_reference_list_ = nullptr;
-  phantom_reference_list_ = nullptr;
-  cleared_reference_list_ = nullptr;
   self_ = Thread::Current();
   // Do any pre GC verification.
   timings_.NewSplit("PreGcVerification");
   heap_->PreGcVerification(this);
+  // Set the initial bitmap.
+  to_space_live_bitmap_ = to_space_->GetLiveBitmap();
 }
 
 void SemiSpace::ProcessReferences(Thread* self) {
@@ -229,17 +225,18 @@
     SweepSystemWeaks();
   }
   // Record freed memory.
-  int from_bytes = from_space_->GetBytesAllocated();
-  int to_bytes = to_space_->GetBytesAllocated();
-  int from_objects = from_space_->GetObjectsAllocated();
-  int to_objects = to_space_->GetObjectsAllocated();
-  int freed_bytes = from_bytes - to_bytes;
-  int freed_objects = from_objects - to_objects;
-  CHECK_GE(freed_bytes, 0);
+  uint64_t from_bytes = from_space_->GetBytesAllocated();
+  uint64_t to_bytes = to_space_->GetBytesAllocated();
+  uint64_t from_objects = from_space_->GetObjectsAllocated();
+  uint64_t to_objects = to_space_->GetObjectsAllocated();
+  CHECK_LE(to_objects, from_objects);
+  int64_t freed_bytes = from_bytes - to_bytes;
+  int64_t freed_objects = from_objects - to_objects;
   freed_bytes_.FetchAndAdd(freed_bytes);
   freed_objects_.FetchAndAdd(freed_objects);
-  heap_->RecordFree(static_cast<size_t>(freed_objects), static_cast<size_t>(freed_bytes));
-
+  // Note: Freed bytes can be negative if we copy form a compacted space to a free-list backed
+  // space.
+  heap_->RecordFree(freed_objects, freed_bytes);
   timings_.StartSplit("PreSweepingGcVerification");
   heap_->PreSweepingGcVerification(this);
   timings_.EndSplit();
@@ -356,6 +353,9 @@
         // Make sure to only update the forwarding address AFTER you copy the object so that the
         // monitor word doesn't get stomped over.
         obj->SetLockWord(LockWord::FromForwardingAddress(reinterpret_cast<size_t>(forward_address)));
+        if (to_space_live_bitmap_ != nullptr) {
+          to_space_live_bitmap_->Set(forward_address);
+        }
         MarkStackPush(forward_address);
       } else {
         DCHECK(to_space_->HasAddress(forward_address) ||