Bin packing the zygote (best fit).

We now use bin packing to fill holes in the non movable space with
objects from the bump pointer space when we create the zygote.

Zygote space PSS reduction on AOSP: ~300k.
Zygote size on AOSP: 2121040 bytes -> 1597944 bytes.

Deleted some unreachable code.

Bug: 11902311

Change-Id: Idc957d69e93b3a941e0c298d47a21b73526dd286
diff --git a/runtime/gc/collector/semi_space.cc b/runtime/gc/collector/semi_space.cc
index 0150609..ffa9154 100644
--- a/runtime/gc/collector/semi_space.cc
+++ b/runtime/gc/collector/semi_space.cc
@@ -306,62 +306,69 @@
   return false;
 }
 
+mirror::Object* SemiSpace::MarkNonForwardedObject(mirror::Object* obj) {
+  size_t object_size = obj->SizeOf();
+  size_t bytes_allocated = 0;
+  mirror::Object* forward_address = nullptr;
+  if (kEnableSimplePromo && reinterpret_cast<byte*>(obj) < last_gc_to_space_end_) {
+    // If it's allocated before the last GC (older), move (pseudo-promote) it to
+    // the non-moving space (as sort of an old generation).
+    size_t bytes_promoted;
+    space::MallocSpace* non_moving_space = GetHeap()->GetNonMovingSpace();
+    forward_address = non_moving_space->Alloc(self_, object_size, &bytes_promoted);
+    if (forward_address == nullptr) {
+      // If out of space, fall back to the to-space.
+      forward_address = to_space_->Alloc(self_, object_size, &bytes_allocated);
+    } else {
+      GetHeap()->num_bytes_allocated_.FetchAndAdd(bytes_promoted);
+      bytes_promoted_ += bytes_promoted;
+      // Mark forward_address on the live bit map.
+      accounting::SpaceBitmap* live_bitmap = non_moving_space->GetLiveBitmap();
+      DCHECK(live_bitmap != nullptr);
+      DCHECK(!live_bitmap->Test(forward_address));
+      live_bitmap->Set(forward_address);
+      // Mark forward_address on the mark bit map.
+      accounting::SpaceBitmap* mark_bitmap = non_moving_space->GetMarkBitmap();
+      DCHECK(mark_bitmap != nullptr);
+      DCHECK(!mark_bitmap->Test(forward_address));
+      mark_bitmap->Set(forward_address);
+    }
+    DCHECK(forward_address != nullptr);
+  } else {
+    // If it's allocated after the last GC (younger), copy it to the to-space.
+    forward_address = to_space_->Alloc(self_, object_size, &bytes_allocated);
+  }
+  // Copy over the object and add it to the mark stack since we still need to update its
+  // references.
+  memcpy(reinterpret_cast<void*>(forward_address), obj, object_size);
+  if (to_space_live_bitmap_ != nullptr) {
+    to_space_live_bitmap_->Set(forward_address);
+  }
+  return forward_address;
+}
+
 // Used to mark and copy objects. Any newly-marked objects who are in the from space get moved to
 // the to-space and have their forward address updated. Objects which have been newly marked are
 // pushed on the mark stack.
 Object* SemiSpace::MarkObject(Object* obj) {
-  Object* ret = obj;
+  Object* forward_address = obj;
   if (obj != nullptr && !IsImmune(obj)) {
     if (from_space_->HasAddress(obj)) {
-      mirror::Object* forward_address = GetForwardingAddressInFromSpace(obj);
+      forward_address = GetForwardingAddressInFromSpace(obj);
       // If the object has already been moved, return the new forward address.
       if (forward_address == nullptr) {
-        // Otherwise, we need to move the object and add it to the markstack for processing.
-        size_t object_size = obj->SizeOf();
-        size_t bytes_allocated = 0;
-        if (kEnableSimplePromo && reinterpret_cast<byte*>(obj) < last_gc_to_space_end_) {
-          // If it's allocated before the last GC (older), move (pseudo-promote) it to
-          // the non-moving space (as sort of an old generation.)
-          size_t bytes_promoted;
-          space::MallocSpace* non_moving_space = GetHeap()->GetNonMovingSpace();
-          forward_address = non_moving_space->Alloc(self_, object_size, &bytes_promoted);
-          if (forward_address == nullptr) {
-            // If out of space, fall back to the to-space.
-            forward_address = to_space_->Alloc(self_, object_size, &bytes_allocated);
-          } else {
-            GetHeap()->num_bytes_allocated_.FetchAndAdd(bytes_promoted);
-            bytes_promoted_ += bytes_promoted;
-            // Mark forward_address on the live bit map.
-            accounting::SpaceBitmap* live_bitmap = non_moving_space->GetLiveBitmap();
-            DCHECK(live_bitmap != nullptr);
-            DCHECK(!live_bitmap->Test(forward_address));
-            live_bitmap->Set(forward_address);
-            // Mark forward_address on the mark bit map.
-            accounting::SpaceBitmap* mark_bitmap = non_moving_space->GetMarkBitmap();
-            DCHECK(mark_bitmap != nullptr);
-            DCHECK(!mark_bitmap->Test(forward_address));
-            mark_bitmap->Set(forward_address);
-          }
-          DCHECK(forward_address != nullptr);
-        } else {
-          // If it's allocated after the last GC (younger), copy it to the to-space.
-          forward_address = to_space_->Alloc(self_, object_size, &bytes_allocated);
-        }
-        // Copy over the object and add it to the mark stack since we still need to update it's
-        // references.
-        memcpy(reinterpret_cast<void*>(forward_address), obj, object_size);
+        forward_address = MarkNonForwardedObject(obj);
+        DCHECK(forward_address != nullptr);
         // Make sure to only update the forwarding address AFTER you copy the object so that the
         // monitor word doesn't get stomped over.
-        obj->SetLockWord(LockWord::FromForwardingAddress(reinterpret_cast<size_t>(forward_address)));
-        if (to_space_live_bitmap_ != nullptr) {
-          to_space_live_bitmap_->Set(forward_address);
-        }
+        obj->SetLockWord(LockWord::FromForwardingAddress(
+            reinterpret_cast<size_t>(forward_address)));
+        // Push the object onto the mark stack for later processing.
         MarkStackPush(forward_address);
       } else {
         DCHECK(to_space_->HasAddress(forward_address) ||
                (kEnableSimplePromo && GetHeap()->GetNonMovingSpace()->HasAddress(forward_address)));
       }
-      ret = forward_address;
       // TODO: Do we need this if in the else statement?
     } else {
       accounting::SpaceBitmap* object_bitmap = heap_->GetMarkBitmap()->GetContinuousSpaceBitmap(obj);
@@ -379,7 +386,7 @@
       }
     }
   }
-  return ret;
+  return forward_address;
 }
 
 Object* SemiSpace::RecursiveMarkObjectCallback(Object* root, void* arg) {
@@ -431,43 +438,19 @@
   timings_.EndSplit();
 }
 
-struct SweepCallbackContext {
-  SemiSpace* mark_sweep;
-  space::AllocSpace* space;
-  Thread* self;
-};
-
-void SemiSpace::SweepCallback(size_t num_ptrs, Object** ptrs, void* arg) {
-  SweepCallbackContext* context = static_cast<SweepCallbackContext*>(arg);
-  SemiSpace* gc = context->mark_sweep;
-  Heap* heap = gc->GetHeap();
-  space::AllocSpace* space = context->space;
-  Thread* self = context->self;
-  Locks::heap_bitmap_lock_->AssertExclusiveHeld(self);
-  size_t freed_bytes = space->FreeList(self, num_ptrs, ptrs);
-  heap->RecordFree(num_ptrs, freed_bytes);
-  gc->freed_objects_.FetchAndAdd(num_ptrs);
-  gc->freed_bytes_.FetchAndAdd(freed_bytes);
-}
-
-void SemiSpace::ZygoteSweepCallback(size_t num_ptrs, Object** ptrs, void* arg) {
-  SweepCallbackContext* context = static_cast<SweepCallbackContext*>(arg);
-  Locks::heap_bitmap_lock_->AssertExclusiveHeld(context->self);
-  Heap* heap = context->mark_sweep->GetHeap();
-  // We don't free any actual memory to avoid dirtying the shared zygote pages.
-  for (size_t i = 0; i < num_ptrs; ++i) {
-    Object* obj = static_cast<Object*>(ptrs[i]);
-    heap->GetLiveBitmap()->Clear(obj);
-    heap->GetCardTable()->MarkCard(obj);
-  }
+bool SemiSpace::ShouldSweepSpace(space::MallocSpace* space) const {
+  return space != from_space_ && space != to_space_;
 }
 
 void SemiSpace::Sweep(bool swap_bitmaps) {
   DCHECK(mark_stack_->IsEmpty());
   TimingLogger::ScopedSplit("Sweep", &timings_);
   for (const auto& space : GetHeap()->GetContinuousSpaces()) {
-    if (space->IsMallocSpace() && space != from_space_ && space != to_space_) {
+    if (space->IsMallocSpace()) {
       space::MallocSpace* malloc_space = space->AsMallocSpace();
+      if (!ShouldSweepSpace(malloc_space)) {
+        continue;
+      }
       TimingLogger::ScopedSplit split(
           malloc_space->IsZygoteSpace() ? "SweepZygoteSpace" : "SweepAllocSpace", &timings_);
       size_t freed_objects = 0;
@@ -552,12 +535,9 @@
     // If the object is forwarded then it MUST be marked.
     DCHECK(forwarding_address == nullptr || to_space_->HasAddress(forwarding_address) ||
            (kEnableSimplePromo && GetHeap()->GetNonMovingSpace()->HasAddress(forwarding_address)));
-    if (forwarding_address != nullptr) {
-      return forwarding_address;
-    }
-    // Must not be marked, return nullptr;
-    return nullptr;
+    return forwarding_address;  // Returns either the forwarding address or nullptr.
   } else if (to_space_->HasAddress(obj)) {
+    // Should be unlikely.
     // Already forwarded, must be marked.
     return obj;
   }
diff --git a/runtime/gc/collector/semi_space.h b/runtime/gc/collector/semi_space.h
index b76ef5f..7bb7b19 100644
--- a/runtime/gc/collector/semi_space.h
+++ b/runtime/gc/collector/semi_space.h
@@ -54,6 +54,7 @@
   class BumpPointerSpace;
   class ContinuousMemMapAllocSpace;
   class ContinuousSpace;
+  class MallocSpace;
 }  // namespace space
 
 class Heap;
@@ -149,6 +150,9 @@
   static mirror::Object* RecursiveMarkObjectCallback(mirror::Object* root, void* arg)
       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
 
+  virtual mirror::Object* MarkNonForwardedObject(mirror::Object* obj)
+      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
+
  protected:
   // Returns null if the object is not marked, otherwise returns the forwarding address (same as
   // object for non movable things).
@@ -162,16 +166,12 @@
   bool MarkLargeObject(const mirror::Object* obj)
       EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
 
-  static void SweepCallback(size_t num_ptrs, mirror::Object** ptrs, void* arg)
-      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
-
-  // Special sweep for zygote that just marks objects / dirties cards.
-  static void ZygoteSweepCallback(size_t num_ptrs, mirror::Object** ptrs, void* arg)
-      EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
-
   // Expand mark stack to 2x its current size.
   void ResizeMarkStack(size_t new_size);
 
+  // Returns true if we should sweep the space.
+  virtual bool ShouldSweepSpace(space::MallocSpace* space) const;
+
   // Returns how many threads we should use for the current GC phase based on if we are paused,
   // whether or not we care about pauses.
   size_t GetThreadCount(bool paused) const;