Separate Malloc and MemMap ArenaPools
Make ArenaPool an abstract base class and leave MallocArenaPool
implementation with it. This enables arena_allocator to be free
of MemMap, Mutex, etc., in preparation to move the remaining collections
out of runtime/base to libartbase/base.
Bug: 22322814
Test: make -j 50 test-art-host
build and boot
Change-Id: Ief84dcbfb749165d9bc82000c6b8f96f93052422
diff --git a/runtime/runtime.cc b/runtime/runtime.cc
index b80ce7d..4068158 100644
--- a/runtime/runtime.cc
+++ b/runtime/runtime.cc
@@ -62,6 +62,8 @@
#include "base/dumpable.h"
#include "base/enums.h"
#include "base/file_utils.h"
+#include "base/malloc_arena_pool.h"
+#include "base/mem_map_arena_pool.h"
#include "base/memory_tool.h"
#include "base/mutex.h"
#include "base/os.h"
@@ -1332,13 +1334,17 @@
// Use MemMap arena pool for jit, malloc otherwise. Malloc arenas are faster to allocate but
// can't be trimmed as easily.
const bool use_malloc = IsAotCompiler();
- arena_pool_.reset(new ArenaPool(use_malloc, /* low_4gb */ false));
- jit_arena_pool_.reset(
- new ArenaPool(/* use_malloc */ false, /* low_4gb */ false, "CompilerMetadata"));
+ if (use_malloc) {
+ arena_pool_.reset(new MallocArenaPool());
+ jit_arena_pool_.reset(new MallocArenaPool());
+ } else {
+ arena_pool_.reset(new MemMapArenaPool(/* low_4gb */ false));
+ jit_arena_pool_.reset(new MemMapArenaPool(/* low_4gb */ false, "CompilerMetadata"));
+ }
if (IsAotCompiler() && Is64BitInstructionSet(kRuntimeISA)) {
// 4gb, no malloc. Explanation in header.
- low_4gb_arena_pool_.reset(new ArenaPool(/* use_malloc */ false, /* low_4gb */ true));
+ low_4gb_arena_pool_.reset(new MemMapArenaPool(/* low_4gb */ true));
}
linear_alloc_.reset(CreateLinearAlloc());