Remove unnecessary indirection from MemMap.

Avoid plain MemMap pointers being passed around by changing
the MemMap to moveable and return MemMap objects by value.
Previously we could have a valid zero-size MemMap but this
is now forbidden.

MemMap::RemapAtEnd() is changed to avoid the explicit call
to munmap(); mmap() with MAP_FIXED automatically removes
old mappings for overlapping regions.

Test: m test-art-host-gtest
Test: testrunner.py --host --optimizing
Test: Pixel 2 XL boots.
Test: m test-art-target-gtest
Test: testrunner.py --target --optimizing
Change-Id: I12bd453c26a396edc20eb141bfd4dad20923f170
diff --git a/runtime/gc/space/large_object_space.h b/runtime/gc/space/large_object_space.h
index f37d814..b69bd91 100644
--- a/runtime/gc/space/large_object_space.h
+++ b/runtime/gc/space/large_object_space.h
@@ -148,7 +148,7 @@
 
  protected:
   struct LargeObject {
-    MemMap* mem_map;
+    MemMap mem_map;
     bool is_zygote;
   };
   explicit LargeObjectMapSpace(const std::string& name);
@@ -182,7 +182,7 @@
   std::pair<uint8_t*, uint8_t*> GetBeginEndAtomic() const OVERRIDE REQUIRES(!lock_);
 
  protected:
-  FreeListSpace(const std::string& name, MemMap* mem_map, uint8_t* begin, uint8_t* end);
+  FreeListSpace(const std::string& name, MemMap&& mem_map, uint8_t* begin, uint8_t* end);
   size_t GetSlotIndexForAddress(uintptr_t address) const {
     DCHECK(Contains(reinterpret_cast<mirror::Object*>(address)));
     return (address - reinterpret_cast<uintptr_t>(Begin())) / kAlignment;
@@ -210,9 +210,9 @@
 
   // There is not footer for any allocations at the end of the space, so we keep track of how much
   // free space there is at the end manually.
-  std::unique_ptr<MemMap> mem_map_;
+  MemMap mem_map_;
   // Side table for allocation info, one per page.
-  std::unique_ptr<MemMap> allocation_info_map_;
+  MemMap allocation_info_map_;
   AllocationInfo* allocation_info_;
 
   mutable Mutex lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;