Reduce memory lost by ArenaAllocator for large allocations.

When allocating from a new arena, check if the old arena has
more remaining space than the new one after the current
allocation. If so, keep using the old arena to reduce the
amount of "lost" arena memory. This can happen when we try
to allocate more than half the default arena size. If the
allocation exceeds the default arena size, it's very likely
to happen even though the ArenaPool could still provide some
much larger previously allocated arena.

Also avoid artithmetic overflow when checking if the
request can be satisfied from the current arena.
And abort immediately if calloc() fails.

Bug: 28173563
Bug: 28256882
Change-Id: I1b4bda5d3f32ecd95fbd11addd1f0ca6dcc33e45
diff --git a/build/Android.gtest.mk b/build/Android.gtest.mk
index e9b6a6d..cd463ec 100644
--- a/build/Android.gtest.mk
+++ b/build/Android.gtest.mk
@@ -172,6 +172,7 @@
   runtime/arch/x86/instruction_set_features_x86_test.cc \
   runtime/arch/x86_64/instruction_set_features_x86_64_test.cc \
   runtime/barrier_test.cc \
+  runtime/base/arena_allocator_test.cc \
   runtime/base/bit_field_test.cc \
   runtime/base/bit_utils_test.cc \
   runtime/base/bit_vector_test.cc \
@@ -274,7 +275,6 @@
   compiler/optimizing/ssa_test.cc \
   compiler/optimizing/stack_map_test.cc \
   compiler/optimizing/suspend_check_test.cc \
-  compiler/utils/arena_allocator_test.cc \
   compiler/utils/dedupe_set_test.cc \
   compiler/utils/intrusive_forward_list_test.cc \
   compiler/utils/swap_space_test.cc \
diff --git a/compiler/utils/arena_allocator_test.cc b/compiler/utils/arena_allocator_test.cc
deleted file mode 100644
index 7f67ef1..0000000
--- a/compiler/utils/arena_allocator_test.cc
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "base/arena_allocator.h"
-#include "base/arena_bit_vector.h"
-#include "gtest/gtest.h"
-
-namespace art {
-
-TEST(ArenaAllocator, Test) {
-  ArenaPool pool;
-  ArenaAllocator arena(&pool);
-  ArenaBitVector bv(&arena, 10, true);
-  bv.SetBit(5);
-  EXPECT_EQ(1U, bv.GetStorageSize());
-  bv.SetBit(35);
-  EXPECT_EQ(2U, bv.GetStorageSize());
-}
-
-}  // namespace art
diff --git a/runtime/base/arena_allocator.cc b/runtime/base/arena_allocator.cc
index d951089..5b6801f 100644
--- a/runtime/base/arena_allocator.cc
+++ b/runtime/base/arena_allocator.cc
@@ -162,6 +162,7 @@
 
 MallocArena::MallocArena(size_t size) {
   memory_ = reinterpret_cast<uint8_t*>(calloc(1, size));
+  CHECK(memory_ != nullptr);  // Abort on OOM.
   size_ = size;
 }
 
@@ -319,13 +320,23 @@
   // mark only the actually allocated memory as defined. That leaves red zones
   // and padding between allocations marked as inaccessible.
   size_t rounded_bytes = RoundUp(bytes + kMemoryToolRedZoneBytes, 8);
-  if (UNLIKELY(ptr_ + rounded_bytes > end_)) {
-    // Obtain a new block.
-    ObtainNewArenaForAllocation(rounded_bytes);
-    CHECK(ptr_ != nullptr);
-    MEMORY_TOOL_MAKE_NOACCESS(ptr_, end_ - ptr_);
-  }
   ArenaAllocatorStats::RecordAlloc(rounded_bytes, kind);
+  if (UNLIKELY(rounded_bytes > static_cast<size_t>(end_ - ptr_))) {
+    void* ret = AllocFromNewArena(rounded_bytes);
+    if (ret == arena_head_->Begin()) {
+      DCHECK(ptr_ - rounded_bytes == ret);
+      uint8_t* noaccess_begin = ptr_ - rounded_bytes + bytes;
+      MEMORY_TOOL_MAKE_NOACCESS(noaccess_begin, end_ - noaccess_begin);
+    } else {
+      // We're still using the old arena but `ret` comes from a new one just after it.
+      DCHECK(arena_head_->next_ != nullptr);
+      DCHECK(ret == arena_head_->next_->Begin());
+      DCHECK_EQ(rounded_bytes, arena_head_->next_->GetBytesAllocated());
+      uint8_t* noaccess_begin = arena_head_->next_->Begin() + bytes;
+      MEMORY_TOOL_MAKE_NOACCESS(noaccess_begin, arena_head_->next_->End() - noaccess_begin);
+    }
+    return ret;
+  }
   uint8_t* ret = ptr_;
   ptr_ += rounded_bytes;
   MEMORY_TOOL_MAKE_DEFINED(ret, bytes);
@@ -340,14 +351,27 @@
   pool_->FreeArenaChain(arena_head_);
 }
 
-void ArenaAllocator::ObtainNewArenaForAllocation(size_t allocation_size) {
-  UpdateBytesAllocated();
-  Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, allocation_size));
-  new_arena->next_ = arena_head_;
-  arena_head_ = new_arena;
-  // Update our internal data structures.
-  ptr_ = begin_ = new_arena->Begin();
-  end_ = new_arena->End();
+void* ArenaAllocator::AllocFromNewArena(size_t bytes) {
+  Arena* new_arena = pool_->AllocArena(std::max(Arena::kDefaultSize, bytes));
+  DCHECK(new_arena != nullptr);
+  DCHECK_LE(bytes, new_arena->Size());
+  if (static_cast<size_t>(end_ - ptr_) > new_arena->Size() - bytes) {
+    // The old arena has more space remaining than the new one, so keep using it.
+    // This can happen when the requested size is over half of the default size.
+    DCHECK(arena_head_ != nullptr);
+    new_arena->bytes_allocated_ = bytes;  // UpdateBytesAllocated() on the new_arena.
+    new_arena->next_ = arena_head_->next_;
+    arena_head_->next_ = new_arena;
+  } else {
+    UpdateBytesAllocated();
+    new_arena->next_ = arena_head_;
+    arena_head_ = new_arena;
+    // Update our internal data structures.
+    begin_ = new_arena->Begin();
+    ptr_ = begin_ + bytes;
+    end_ = new_arena->End();
+  }
+  return new_arena->Begin();
 }
 
 bool ArenaAllocator::Contains(const void* ptr) const {
diff --git a/runtime/base/arena_allocator.h b/runtime/base/arena_allocator.h
index 52a1002..4b745f0 100644
--- a/runtime/base/arena_allocator.h
+++ b/runtime/base/arena_allocator.h
@@ -234,6 +234,8 @@
   friend class ScopedArenaAllocator;
   template <bool kCount> friend class ArenaAllocatorStatsImpl;
 
+  friend class ArenaAllocatorTest;
+
  private:
   DISALLOW_COPY_AND_ASSIGN(Arena);
 };
@@ -303,14 +305,10 @@
       return AllocWithMemoryTool(bytes, kind);
     }
     bytes = RoundUp(bytes, kAlignment);
-    if (UNLIKELY(ptr_ + bytes > end_)) {
-      // Obtain a new block.
-      ObtainNewArenaForAllocation(bytes);
-      if (UNLIKELY(ptr_ == nullptr)) {
-        return nullptr;
-      }
-    }
     ArenaAllocatorStats::RecordAlloc(bytes, kind);
+    if (UNLIKELY(bytes > static_cast<size_t>(end_ - ptr_))) {
+      return AllocFromNewArena(bytes);
+    }
     uint8_t* ret = ptr_;
     ptr_ += bytes;
     return ret;
@@ -350,10 +348,6 @@
     return static_cast<T*>(Alloc(length * sizeof(T), kind));
   }
 
-  void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind);
-
-  void ObtainNewArenaForAllocation(size_t allocation_size);
-
   size_t BytesAllocated() const;
 
   MemStats GetMemStats() const;
@@ -369,6 +363,9 @@
   bool Contains(const void* ptr) const;
 
  private:
+  void* AllocWithMemoryTool(size_t bytes, ArenaAllocKind kind);
+  void* AllocFromNewArena(size_t bytes);
+
   static constexpr size_t kAlignment = 8;
 
   void UpdateBytesAllocated();
@@ -382,6 +379,8 @@
   template <typename U>
   friend class ArenaAllocatorAdapter;
 
+  friend class ArenaAllocatorTest;
+
   DISALLOW_COPY_AND_ASSIGN(ArenaAllocator);
 };  // ArenaAllocator
 
diff --git a/runtime/base/arena_allocator_test.cc b/runtime/base/arena_allocator_test.cc
new file mode 100644
index 0000000..c07c0ed
--- /dev/null
+++ b/runtime/base/arena_allocator_test.cc
@@ -0,0 +1,105 @@
+/*
+ * Copyright (C) 2013 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include "base/arena_allocator.h"
+#include "base/arena_bit_vector.h"
+#include "gtest/gtest.h"
+
+namespace art {
+
+class ArenaAllocatorTest : public testing::Test {
+ protected:
+  size_t NumberOfArenas(ArenaAllocator* arena) {
+    size_t result = 0u;
+    for (Arena* a = arena->arena_head_; a != nullptr; a = a->next_) {
+      ++result;
+    }
+    return result;
+  }
+};
+
+TEST_F(ArenaAllocatorTest, Test) {
+  ArenaPool pool;
+  ArenaAllocator arena(&pool);
+  ArenaBitVector bv(&arena, 10, true);
+  bv.SetBit(5);
+  EXPECT_EQ(1U, bv.GetStorageSize());
+  bv.SetBit(35);
+  EXPECT_EQ(2U, bv.GetStorageSize());
+}
+
+TEST_F(ArenaAllocatorTest, LargeAllocations) {
+  {
+    ArenaPool pool;
+    ArenaAllocator arena(&pool);
+    // Note: Leaving some space for memory tool red zones.
+    void* alloc1 = arena.Alloc(Arena::kDefaultSize * 5 / 8);
+    void* alloc2 = arena.Alloc(Arena::kDefaultSize * 2 / 8);
+    ASSERT_NE(alloc1, alloc2);
+    ASSERT_EQ(1u, NumberOfArenas(&arena));
+  }
+  {
+    ArenaPool pool;
+    ArenaAllocator arena(&pool);
+    void* alloc1 = arena.Alloc(Arena::kDefaultSize * 13 / 16);
+    void* alloc2 = arena.Alloc(Arena::kDefaultSize * 11 / 16);
+    ASSERT_NE(alloc1, alloc2);
+    ASSERT_EQ(2u, NumberOfArenas(&arena));
+    void* alloc3 = arena.Alloc(Arena::kDefaultSize * 7 / 16);
+    ASSERT_NE(alloc1, alloc3);
+    ASSERT_NE(alloc2, alloc3);
+    ASSERT_EQ(3u, NumberOfArenas(&arena));
+  }
+  {
+    ArenaPool pool;
+    ArenaAllocator arena(&pool);
+    void* alloc1 = arena.Alloc(Arena::kDefaultSize * 13 / 16);
+    void* alloc2 = arena.Alloc(Arena::kDefaultSize * 9 / 16);
+    ASSERT_NE(alloc1, alloc2);
+    ASSERT_EQ(2u, NumberOfArenas(&arena));
+    // Note: Leaving some space for memory tool red zones.
+    void* alloc3 = arena.Alloc(Arena::kDefaultSize * 5 / 16);
+    ASSERT_NE(alloc1, alloc3);
+    ASSERT_NE(alloc2, alloc3);
+    ASSERT_EQ(2u, NumberOfArenas(&arena));
+  }
+  {
+    ArenaPool pool;
+    ArenaAllocator arena(&pool);
+    void* alloc1 = arena.Alloc(Arena::kDefaultSize * 9 / 16);
+    void* alloc2 = arena.Alloc(Arena::kDefaultSize * 13 / 16);
+    ASSERT_NE(alloc1, alloc2);
+    ASSERT_EQ(2u, NumberOfArenas(&arena));
+    // Note: Leaving some space for memory tool red zones.
+    void* alloc3 = arena.Alloc(Arena::kDefaultSize * 5 / 16);
+    ASSERT_NE(alloc1, alloc3);
+    ASSERT_NE(alloc2, alloc3);
+    ASSERT_EQ(2u, NumberOfArenas(&arena));
+  }
+  {
+    ArenaPool pool;
+    ArenaAllocator arena(&pool);
+    // Note: Leaving some space for memory tool red zones.
+    for (size_t i = 0; i != 15; ++i) {
+      arena.Alloc(Arena::kDefaultSize * 1 / 16);    // Allocate 15 times from the same arena.
+      ASSERT_EQ(i + 1u, NumberOfArenas(&arena));
+      arena.Alloc(Arena::kDefaultSize * 17 / 16);   // Allocate a separate arena.
+      ASSERT_EQ(i + 2u, NumberOfArenas(&arena));
+    }
+  }
+}
+
+}  // namespace art