Add thread unsafe allocation methods to spaces.
Used by SS/GSS collectors since these run with mutators suspended and
only allocate from a single thread. Added AllocThreadUnsafe to
BumpPointerSpace and RosAllocSpace. Added AllocThreadUnsafe which uses
current runs as thread local runs for a thread unsafe allocation.
Added code to revoke current runs which are the same idx as thread
local runs.
Changed:
The number of thread local runs in each thread is now the the number
of thread local runs in RosAlloc instead of the number of size
brackets.
Total GC time / time on EvaluateAndApplyChanges.
TLAB SS:
Before: 36.7s / 7254
After: 16.1s / 4837
TLAB GSS:
Before: 6.9s / 3973
After: 5.7s / 3778
Bug: 8981901
Change-Id: Id1d264ade3799f431bf7ebbdcca6146aefbeb632
diff --git a/runtime/gc/collector/semi_space.cc b/runtime/gc/collector/semi_space.cc
index 0b26019..b53ee10 100644
--- a/runtime/gc/collector/semi_space.cc
+++ b/runtime/gc/collector/semi_space.cc
@@ -180,7 +180,9 @@
runtime->SetFaultMessage(oss.str());
CHECK_EQ(self_->SetStateUnsafe(old_state), kRunnable);
}
-
+ // Revoke the thread local buffers since the GC may allocate into a RosAllocSpace and this helps
+ // to prevent fragmentation.
+ RevokeAllThreadLocalBuffers();
if (generational_) {
if (gc_cause_ == kGcCauseExplicit || gc_cause_ == kGcCauseForNativeAlloc ||
clear_soft_references_) {
@@ -332,11 +334,8 @@
class SemiSpaceScanObjectVisitor {
public:
explicit SemiSpaceScanObjectVisitor(SemiSpace* ss) : semi_space_(ss) {}
- void operator()(Object* obj) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
- EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) {
- // TODO: fix NO_THREAD_SAFETY_ANALYSIS. ScanObject() requires an
- // exclusive lock on the mutator lock, but
- // SpaceBitmap::VisitMarkedRange() only requires the shared lock.
+ void operator()(Object* obj) const EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_,
+ Locks::heap_bitmap_lock_) {
DCHECK(obj != nullptr);
semi_space_->ScanObject(obj);
}
@@ -552,10 +551,11 @@
// (pseudo-promote) it to the main free list space (as sort
// of an old generation.)
space::MallocSpace* promo_dest_space = GetHeap()->GetPrimaryFreeListSpace();
- forward_address = promo_dest_space->Alloc(self_, object_size, &bytes_allocated, nullptr);
+ forward_address = promo_dest_space->AllocThreadUnsafe(self_, object_size, &bytes_allocated,
+ nullptr);
if (UNLIKELY(forward_address == nullptr)) {
// If out of space, fall back to the to-space.
- forward_address = to_space_->Alloc(self_, object_size, &bytes_allocated, nullptr);
+ forward_address = to_space_->AllocThreadUnsafe(self_, object_size, &bytes_allocated, nullptr);
} else {
bytes_promoted_ += bytes_allocated;
// Dirty the card at the destionation as it may contain
@@ -599,7 +599,7 @@
DCHECK(forward_address != nullptr);
} else {
// If it's allocated after the last GC (younger), copy it to the to-space.
- forward_address = to_space_->Alloc(self_, object_size, &bytes_allocated, nullptr);
+ forward_address = to_space_->AllocThreadUnsafe(self_, object_size, &bytes_allocated, nullptr);
}
++objects_moved_;
bytes_moved_ += bytes_allocated;