Merge "Refactor and improve mod-union tables." into dalvik-dev
diff --git a/runtime/gc/accounting/mod_union_table-inl.h b/runtime/gc/accounting/mod_union_table-inl.h
index 29450c1..fb425df 100644
--- a/runtime/gc/accounting/mod_union_table-inl.h
+++ b/runtime/gc/accounting/mod_union_table-inl.h
@@ -28,9 +28,11 @@
// A mod-union table to record image references to the Zygote and alloc space.
class ModUnionTableToZygoteAllocspace : public ModUnionTableReferenceCache {
public:
- explicit ModUnionTableToZygoteAllocspace(Heap* heap) : ModUnionTableReferenceCache(heap) {}
+ explicit ModUnionTableToZygoteAllocspace(const std::string& name, Heap* heap,
+ space::ContinuousSpace* space)
+ : ModUnionTableReferenceCache(name, heap, space) {}
- bool AddReference(const mirror::Object* /* obj */, const mirror::Object* ref) {
+ bool AddReference(const mirror::Object* /* obj */, const mirror::Object* ref) ALWAYS_INLINE {
const std::vector<space::ContinuousSpace*>& spaces = GetHeap()->GetContinuousSpaces();
typedef std::vector<space::ContinuousSpace*>::const_iterator It;
for (It it = spaces.begin(); it != spaces.end(); ++it) {
@@ -47,16 +49,18 @@
// A mod-union table to record Zygote references to the alloc space.
class ModUnionTableToAllocspace : public ModUnionTableReferenceCache {
public:
- explicit ModUnionTableToAllocspace(Heap* heap) : ModUnionTableReferenceCache(heap) {}
+ explicit ModUnionTableToAllocspace(const std::string& name, Heap* heap,
+ space::ContinuousSpace* space)
+ : ModUnionTableReferenceCache(name, heap, space) {}
- bool AddReference(const mirror::Object* /* obj */, const mirror::Object* ref) {
+ bool AddReference(const mirror::Object* /* obj */, const mirror::Object* ref) ALWAYS_INLINE {
const std::vector<space::ContinuousSpace*>& spaces = GetHeap()->GetContinuousSpaces();
typedef std::vector<space::ContinuousSpace*>::const_iterator It;
for (It it = spaces.begin(); it != spaces.end(); ++it) {
space::ContinuousSpace* space = *it;
if (space->Contains(ref)) {
// The allocation space is always considered for collection whereas the Zygote space is
- //
+ // only considered for full GC.
return space->GetGcRetentionPolicy() == space::kGcRetentionPolicyAlwaysCollect;
}
}
diff --git a/runtime/gc/accounting/mod_union_table.cc b/runtime/gc/accounting/mod_union_table.cc
index 4865219..7cbe94d 100644
--- a/runtime/gc/accounting/mod_union_table.cc
+++ b/runtime/gc/accounting/mod_union_table.cc
@@ -19,6 +19,7 @@
#include "base/stl_util.h"
#include "card_table-inl.h"
#include "heap_bitmap.h"
+#include "gc/collector/mark_sweep.h"
#include "gc/collector/mark_sweep-inl.h"
#include "gc/heap.h"
#include "gc/space/space.h"
@@ -67,60 +68,87 @@
std::vector<byte*>* const cleared_cards_;
};
-class ModUnionScanImageRootVisitor {
+class ModUnionUpdateObjectReferencesVisitor {
public:
- explicit ModUnionScanImageRootVisitor(collector::MarkSweep* const mark_sweep)
- : mark_sweep_(mark_sweep) {}
+ ModUnionUpdateObjectReferencesVisitor(RootVisitor visitor, void* arg)
+ : visitor_(visitor),
+ arg_(arg) {
+ }
- void operator()(const Object* root) const
- EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
- DCHECK(root != NULL);
- mark_sweep_->ScanRoot(root);
+ // Extra parameters are required since we use this same visitor signature for checking objects.
+ void operator()(Object* obj, Object* ref, const MemberOffset& offset,
+ bool /* is_static */) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ // Only add the reference if it is non null and fits our criteria.
+ if (ref != nullptr) {
+ Object* new_ref = visitor_(ref, arg_);
+ if (new_ref != ref) {
+ obj->SetFieldObject(offset, ref, false, true);
+ }
+ }
}
private:
- collector::MarkSweep* const mark_sweep_;
+ RootVisitor* visitor_;
+ void* arg_;
};
-void ModUnionTableReferenceCache::ClearCards(space::ContinuousSpace* space) {
+class ModUnionScanImageRootVisitor {
+ public:
+ ModUnionScanImageRootVisitor(RootVisitor visitor, void* arg)
+ : visitor_(visitor), arg_(arg) {}
+
+ void operator()(Object* root) const
+ EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
+ DCHECK(root != NULL);
+ ModUnionUpdateObjectReferencesVisitor ref_visitor(visitor_, arg_);
+ collector::MarkSweep::VisitObjectReferences(root, ref_visitor, true);
+ }
+
+ private:
+ RootVisitor* visitor_;
+ void* arg_;
+};
+
+void ModUnionTableReferenceCache::ClearCards() {
CardTable* card_table = GetHeap()->GetCardTable();
ModUnionClearCardSetVisitor visitor(&cleared_cards_);
// Clear dirty cards in the this space and update the corresponding mod-union bits.
- card_table->ModifyCardsAtomic(space->Begin(), space->End(), AgeCardVisitor(), visitor);
+ card_table->ModifyCardsAtomic(space_->Begin(), space_->End(), AgeCardVisitor(), visitor);
}
class AddToReferenceArrayVisitor {
public:
explicit AddToReferenceArrayVisitor(ModUnionTableReferenceCache* mod_union_table,
- std::vector<const Object*>* references)
+ std::vector<Object**>* references)
: mod_union_table_(mod_union_table),
references_(references) {
}
// Extra parameters are required since we use this same visitor signature for checking objects.
- void operator()(const Object* obj, const Object* ref, const MemberOffset& /* offset */,
+ void operator()(Object* obj, Object* ref, const MemberOffset& offset,
bool /* is_static */) const {
// Only add the reference if it is non null and fits our criteria.
- if (ref != NULL && mod_union_table_->AddReference(obj, ref)) {
- references_->push_back(ref);
+ if (ref != nullptr && mod_union_table_->AddReference(obj, ref)) {
+ // Push the adddress of the reference.
+ references_->push_back(obj->GetFieldObjectAddr(offset));
}
}
private:
ModUnionTableReferenceCache* const mod_union_table_;
- std::vector<const Object*>* const references_;
+ std::vector<Object**>* const references_;
};
class ModUnionReferenceVisitor {
public:
explicit ModUnionReferenceVisitor(ModUnionTableReferenceCache* const mod_union_table,
- std::vector<const Object*>* references)
+ std::vector<Object**>* references)
: mod_union_table_(mod_union_table),
references_(references) {
}
- void operator()(const Object* obj) const
+ void operator()(Object* obj) const
SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
DCHECK(obj != NULL);
// We don't have an early exit since we use the visitor pattern, an early
@@ -130,7 +158,7 @@
}
private:
ModUnionTableReferenceCache* const mod_union_table_;
- std::vector<const Object*>* const references_;
+ std::vector<Object**>* const references_;
};
class CheckReferenceVisitor {
@@ -143,8 +171,8 @@
// Extra parameters are required since we use this same visitor signature for checking objects.
// TODO: Fixme when anotatalysis works with visitors.
- void operator()(const Object* obj, const Object* ref, const MemberOffset& /* offset */,
- bool /* is_static */) const
+ void operator()(const Object* obj, const Object* ref,
+ const MemberOffset& /* offset */, bool /* is_static */) const
SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
Heap* heap = mod_union_table_->GetHeap();
if (ref != NULL && mod_union_table_->AddReference(obj, ref) &&
@@ -174,7 +202,7 @@
: mod_union_table_(mod_union_table), references_(references) {
}
- void operator()(const Object* obj) const NO_THREAD_SAFETY_ANALYSIS {
+ void operator()(Object* obj) const NO_THREAD_SAFETY_ANALYSIS {
Locks::heap_bitmap_lock_->AssertSharedHeld(Thread::Current());
DCHECK(obj != NULL);
CheckReferenceVisitor visitor(mod_union_table_, references_);
@@ -188,26 +216,25 @@
void ModUnionTableReferenceCache::Verify() {
// Start by checking that everything in the mod union table is marked.
- Heap* heap = GetHeap();
- for (const std::pair<const byte*, std::vector<const Object*> >& it : references_) {
- for (const Object* ref : it.second) {
- CHECK(heap->IsLiveObjectLocked(ref));
+ for (const auto& ref_pair : references_) {
+ for (Object** ref : ref_pair.second) {
+ CHECK(heap_->IsLiveObjectLocked(*ref));
}
}
// Check the references of each clean card which is also in the mod union table.
- CardTable* card_table = heap->GetCardTable();
- for (const std::pair<const byte*, std::vector<const Object*> > & it : references_) {
- const byte* card = it.first;
+ CardTable* card_table = heap_->GetCardTable();
+ SpaceBitmap* live_bitmap = space_->GetLiveBitmap();
+ for (const auto& ref_pair : references_) {
+ const byte* card = ref_pair.first;
if (*card == CardTable::kCardClean) {
- std::set<const Object*> reference_set(it.second.begin(), it.second.end());
+ std::set<const Object*> reference_set;
+ for (Object** obj_ptr : ref_pair.second) {
+ reference_set.insert(*obj_ptr);
+ }
ModUnionCheckReferences visitor(this, reference_set);
uintptr_t start = reinterpret_cast<uintptr_t>(card_table->AddrFromCard(card));
- uintptr_t end = start + CardTable::kCardSize;
- auto* space = heap->FindContinuousSpaceFromObject(reinterpret_cast<Object*>(start), false);
- DCHECK(space != nullptr);
- SpaceBitmap* live_bitmap = space->GetLiveBitmap();
- live_bitmap->VisitMarkedRange(start, end, visitor);
+ live_bitmap->VisitMarkedRange(start, start + CardTable::kCardSize, visitor);
}
}
}
@@ -221,24 +248,24 @@
os << reinterpret_cast<void*>(start) << "-" << reinterpret_cast<void*>(end) << ",";
}
os << "]\nModUnionTable references: [";
- for (const std::pair<const byte*, std::vector<const Object*> >& it : references_) {
- const byte* card_addr = it.first;
+ for (const auto& ref_pair : references_) {
+ const byte* card_addr = ref_pair.first;
uintptr_t start = reinterpret_cast<uintptr_t>(card_table->AddrFromCard(card_addr));
uintptr_t end = start + CardTable::kCardSize;
os << reinterpret_cast<void*>(start) << "-" << reinterpret_cast<void*>(end) << "->{";
- for (const mirror::Object* ref : it.second) {
- os << reinterpret_cast<const void*>(ref) << ",";
+ for (Object** ref : ref_pair.second) {
+ os << reinterpret_cast<const void*>(*ref) << ",";
}
os << "},";
}
}
-void ModUnionTableReferenceCache::Update() {
+void ModUnionTableReferenceCache::UpdateAndMarkReferences(RootVisitor visitor, void* arg) {
Heap* heap = GetHeap();
CardTable* card_table = heap->GetCardTable();
- std::vector<const Object*> cards_references;
- ModUnionReferenceVisitor visitor(this, &cards_references);
+ std::vector<Object**> cards_references;
+ ModUnionReferenceVisitor add_visitor(this, &cards_references);
for (const auto& card : cleared_cards_) {
// Clear and re-compute alloc space references associated with this card.
@@ -248,7 +275,7 @@
auto* space = heap->FindContinuousSpaceFromObject(reinterpret_cast<Object*>(start), false);
DCHECK(space != nullptr);
SpaceBitmap* live_bitmap = space->GetLiveBitmap();
- live_bitmap->VisitMarkedRange(start, end, visitor);
+ live_bitmap->VisitMarkedRange(start, end, add_visitor);
// Update the corresponding references for the card.
auto found = references_.find(card);
@@ -263,46 +290,41 @@
}
}
cleared_cards_.clear();
-}
-
-void ModUnionTableReferenceCache::MarkReferences(collector::MarkSweep* mark_sweep) {
size_t count = 0;
-
for (const auto& ref : references_) {
- for (const auto& obj : ref.second) {
- mark_sweep->MarkRoot(obj);
- ++count;
+ for (const auto& obj_ptr : ref.second) {
+ Object* obj = *obj_ptr;
+ if (obj != nullptr) {
+ Object* new_obj = visitor(obj, arg);
+ // Avoid dirtying pages in the image unless necessary.
+ if (new_obj != obj) {
+ *obj_ptr = new_obj;
+ }
+ }
}
+ count += ref.second.size();
}
if (VLOG_IS_ON(heap)) {
VLOG(gc) << "Marked " << count << " references in mod union table";
}
}
-void ModUnionTableCardCache::ClearCards(space::ContinuousSpace* space) {
+void ModUnionTableCardCache::ClearCards() {
CardTable* card_table = GetHeap()->GetCardTable();
ModUnionClearCardSetVisitor visitor(&cleared_cards_);
// Clear dirty cards in the this space and update the corresponding mod-union bits.
- card_table->ModifyCardsAtomic(space->Begin(), space->End(), AgeCardVisitor(), visitor);
+ card_table->ModifyCardsAtomic(space_->Begin(), space_->End(), AgeCardVisitor(), visitor);
}
// Mark all references to the alloc space(s).
-void ModUnionTableCardCache::MarkReferences(collector::MarkSweep* mark_sweep) {
+void ModUnionTableCardCache::UpdateAndMarkReferences(RootVisitor visitor, void* arg) {
CardTable* card_table = heap_->GetCardTable();
- ModUnionScanImageRootVisitor visitor(mark_sweep);
- space::ContinuousSpace* space = nullptr;
- SpaceBitmap* bitmap = nullptr;
+ ModUnionScanImageRootVisitor scan_visitor(visitor, arg);
+ SpaceBitmap* bitmap = space_->GetLiveBitmap();
for (const byte* card_addr : cleared_cards_) {
- auto start = reinterpret_cast<uintptr_t>(card_table->AddrFromCard(card_addr));
- auto end = start + CardTable::kCardSize;
- auto obj_start = reinterpret_cast<Object*>(start);
- if (UNLIKELY(space == nullptr || !space->Contains(obj_start))) {
- space = heap_->FindContinuousSpaceFromObject(obj_start, false);
- DCHECK(space != nullptr);
- bitmap = space->GetLiveBitmap();
- DCHECK(bitmap != nullptr);
- }
- bitmap->VisitMarkedRange(start, end, visitor);
+ uintptr_t start = reinterpret_cast<uintptr_t>(card_table->AddrFromCard(card_addr));
+ DCHECK(space_->HasAddress(reinterpret_cast<Object*>(start)));
+ bitmap->VisitMarkedRange(start, start + CardTable::kCardSize, scan_visitor);
}
}
diff --git a/runtime/gc/accounting/mod_union_table.h b/runtime/gc/accounting/mod_union_table.h
index eb7a754..d874c60 100644
--- a/runtime/gc/accounting/mod_union_table.h
+++ b/runtime/gc/accounting/mod_union_table.h
@@ -19,6 +19,7 @@
#include "gc_allocator.h"
#include "globals.h"
+#include "root_visitor.h"
#include "safe_map.h"
#include <set>
@@ -52,21 +53,23 @@
public:
typedef std::set<byte*, std::less<byte*>, GCAllocator<byte*> > CardSet;
- explicit ModUnionTable(Heap* heap) : heap_(heap) {}
+ explicit ModUnionTable(const std::string& name, Heap* heap, space::ContinuousSpace* space)
+ : name_(name),
+ heap_(heap),
+ space_(space) {
+ }
virtual ~ModUnionTable() {}
// Clear cards which map to a memory range of a space. This doesn't immediately update the
// mod-union table, as updating the mod-union table may have an associated cost, such as
// determining references to track.
- virtual void ClearCards(space::ContinuousSpace* space) = 0;
+ virtual void ClearCards() = 0;
// Update the mod-union table using data stored by ClearCards. There may be multiple ClearCards
- // before a call to update, for example, back-to-back sticky GCs.
- virtual void Update() = 0;
-
- // Mark the bitmaps for all references which are stored in the mod-union table.
- virtual void MarkReferences(collector::MarkSweep* mark_sweep) = 0;
+ // before a call to update, for example, back-to-back sticky GCs. Also mark references to other
+ // spaces which are stored in the mod-union table.
+ virtual void UpdateAndMarkReferences(RootVisitor visitor, void* arg) = 0;
// Verification, sanity checks that we don't have clean cards which conflict with out cached data
// for said cards. Exclusive lock is required since verify sometimes uses
@@ -75,31 +78,35 @@
virtual void Verify() EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) = 0;
virtual void Dump(std::ostream& os) = 0;
-
+ space::ContinuousSpace* GetSpace() {
+ return space_;
+ }
Heap* GetHeap() const {
return heap_;
}
+ const std::string& GetName() const {
+ return name_;
+ }
protected:
+ const std::string name_;
Heap* const heap_;
+ space::ContinuousSpace* const space_;
};
// Reference caching implementation. Caches references pointing to alloc space(s) for each card.
class ModUnionTableReferenceCache : public ModUnionTable {
public:
- explicit ModUnionTableReferenceCache(Heap* heap) : ModUnionTable(heap) {}
+ explicit ModUnionTableReferenceCache(const std::string& name, Heap* heap,
+ space::ContinuousSpace* space)
+ : ModUnionTable(name, heap, space) {}
virtual ~ModUnionTableReferenceCache() {}
// Clear and store cards for a space.
- void ClearCards(space::ContinuousSpace* space);
+ void ClearCards();
- // Update table based on cleared cards.
- void Update()
- EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
- SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
-
- // Mark all references to the alloc space(s).
- void MarkReferences(collector::MarkSweep* mark_sweep)
+ // Update table based on cleared cards and mark all references to the other spaces.
+ void UpdateAndMarkReferences(RootVisitor visitor, void* arg)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
@@ -117,24 +124,22 @@
ModUnionTable::CardSet cleared_cards_;
// Maps from dirty cards to their corresponding alloc space references.
- SafeMap<const byte*, std::vector<const mirror::Object*>, std::less<const byte*>,
- GCAllocator<std::pair<const byte*, std::vector<const mirror::Object*> > > > references_;
+ SafeMap<const byte*, std::vector<mirror::Object**>, std::less<const byte*>,
+ GCAllocator<std::pair<const byte*, std::vector<mirror::Object**> > > > references_;
};
// Card caching implementation. Keeps track of which cards we cleared and only this information.
class ModUnionTableCardCache : public ModUnionTable {
public:
- explicit ModUnionTableCardCache(Heap* heap) : ModUnionTable(heap) {}
+ explicit ModUnionTableCardCache(const std::string& name, Heap* heap, space::ContinuousSpace* space)
+ : ModUnionTable(name, heap, space) {}
virtual ~ModUnionTableCardCache() {}
// Clear and store cards for a space.
- void ClearCards(space::ContinuousSpace* space);
-
- // Nothing to update as all dirty cards were placed into cleared cards during clearing.
- void Update() {}
+ void ClearCards();
// Mark all references to the alloc space(s).
- void MarkReferences(collector::MarkSweep* mark_sweep)
+ void UpdateAndMarkReferences(RootVisitor visitor, void* arg)
EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
diff --git a/runtime/gc/accounting/space_bitmap.h b/runtime/gc/accounting/space_bitmap.h
index f975692..4cf8872 100644
--- a/runtime/gc/accounting/space_bitmap.h
+++ b/runtime/gc/accounting/space_bitmap.h
@@ -247,8 +247,8 @@
template <typename Visitor>
void Visit(const Visitor& visitor) NO_THREAD_SAFETY_ANALYSIS {
- for (Objects::iterator it = contained_.begin(); it != contained_.end(); ++it) {
- visitor(*it);
+ for (const mirror::Object* obj : contained_) {
+ visitor(const_cast<mirror::Object*>(obj));
}
}
diff --git a/runtime/gc/collector/mark_sweep-inl.h b/runtime/gc/collector/mark_sweep-inl.h
index d0b0b5c..270c9ef 100644
--- a/runtime/gc/collector/mark_sweep-inl.h
+++ b/runtime/gc/collector/mark_sweep-inl.h
@@ -29,7 +29,7 @@
namespace collector {
template <typename MarkVisitor>
-inline void MarkSweep::ScanObjectVisit(const mirror::Object* obj, const MarkVisitor& visitor) {
+inline void MarkSweep::ScanObjectVisit(mirror::Object* obj, const MarkVisitor& visitor) {
DCHECK(obj != NULL);
if (kIsDebugBuild && !IsMarked(obj)) {
heap_->DumpSpaces();
@@ -62,7 +62,8 @@
}
template <typename Visitor>
-inline void MarkSweep::VisitObjectReferences(const mirror::Object* obj, const Visitor& visitor)
+inline void MarkSweep::VisitObjectReferences(mirror::Object* obj, const Visitor& visitor,
+ bool visit_class)
SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
Locks::mutator_lock_) {
DCHECK(obj != NULL);
@@ -70,6 +71,9 @@
mirror::Class* klass = obj->GetClass();
DCHECK(klass != NULL);
+ if (visit_class) {
+ visitor(obj, klass, MemberOffset(0), false);
+ }
if (klass == mirror::Class::GetJavaLangClass()) {
DCHECK_EQ(klass->GetClass(), mirror::Class::GetJavaLangClass());
VisitClassReferences(klass, obj, visitor);
@@ -86,8 +90,8 @@
}
template <typename Visitor>
-inline void MarkSweep::VisitInstanceFieldsReferences(const mirror::Class* klass,
- const mirror::Object* obj,
+inline void MarkSweep::VisitInstanceFieldsReferences(mirror::Class* klass,
+ mirror::Object* obj,
const Visitor& visitor)
SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
DCHECK(obj != NULL);
@@ -96,7 +100,7 @@
}
template <typename Visitor>
-inline void MarkSweep::VisitClassReferences(const mirror::Class* klass, const mirror::Object* obj,
+inline void MarkSweep::VisitClassReferences(mirror::Class* klass, mirror::Object* obj,
const Visitor& visitor)
SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
VisitInstanceFieldsReferences(klass, obj, visitor);
@@ -104,15 +108,14 @@
}
template <typename Visitor>
-inline void MarkSweep::VisitStaticFieldsReferences(const mirror::Class* klass,
- const Visitor& visitor)
+inline void MarkSweep::VisitStaticFieldsReferences(mirror::Class* klass, const Visitor& visitor)
SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
DCHECK(klass != NULL);
VisitFieldsReferences(klass, klass->GetReferenceStaticOffsets(), true, visitor);
}
template <typename Visitor>
-inline void MarkSweep::VisitFieldsReferences(const mirror::Object* obj, uint32_t ref_offsets,
+inline void MarkSweep::VisitFieldsReferences(mirror::Object* obj, uint32_t ref_offsets,
bool is_static, const Visitor& visitor) {
if (LIKELY(ref_offsets != CLASS_WALK_SUPER)) {
// Found a reference offset bitmap. Mark the specified offsets.
@@ -124,7 +127,7 @@
while (ref_offsets != 0) {
size_t right_shift = CLZ(ref_offsets);
MemberOffset field_offset = CLASS_OFFSET_FROM_CLZ(right_shift);
- const mirror::Object* ref = obj->GetFieldObject<const mirror::Object*>(field_offset, false);
+ mirror::Object* ref = obj->GetFieldObject<mirror::Object*>(field_offset, false);
visitor(obj, ref, field_offset, is_static);
ref_offsets &= ~(CLASS_HIGH_BIT >> right_shift);
}
@@ -143,7 +146,7 @@
mirror::ArtField* field = (is_static ? klass->GetStaticField(i)
: klass->GetInstanceField(i));
MemberOffset field_offset = field->GetOffset();
- const mirror::Object* ref = obj->GetFieldObject<const mirror::Object*>(field_offset, false);
+ mirror::Object* ref = obj->GetFieldObject<mirror::Object*>(field_offset, false);
visitor(obj, ref, field_offset, is_static);
}
}
@@ -151,11 +154,11 @@
}
template <typename Visitor>
-inline void MarkSweep::VisitObjectArrayReferences(const mirror::ObjectArray<mirror::Object>* array,
+inline void MarkSweep::VisitObjectArrayReferences(mirror::ObjectArray<mirror::Object>* array,
const Visitor& visitor) {
const size_t length = static_cast<size_t>(array->GetLength());
for (size_t i = 0; i < length; ++i) {
- const mirror::Object* element = array->GetWithoutChecks(static_cast<int32_t>(i));
+ mirror::Object* element = array->GetWithoutChecks(static_cast<int32_t>(i));
const size_t width = sizeof(mirror::Object*);
MemberOffset offset(i * width + mirror::Array::DataOffset(width).Int32Value());
visitor(array, element, offset, false);
diff --git a/runtime/gc/collector/mark_sweep.cc b/runtime/gc/collector/mark_sweep.cc
index 1625ba6..a5e66d2 100644
--- a/runtime/gc/collector/mark_sweep.cc
+++ b/runtime/gc/collector/mark_sweep.cc
@@ -28,6 +28,7 @@
#include "base/timing_logger.h"
#include "gc/accounting/card_table-inl.h"
#include "gc/accounting/heap_bitmap.h"
+#include "gc/accounting/mod_union_table.h"
#include "gc/accounting/space_bitmap-inl.h"
#include "gc/heap.h"
#include "gc/space/image_space.h"
@@ -99,7 +100,7 @@
} else {
const space::ContinuousSpace* prev_space = nullptr;
// Find out if the previous space is immune.
- for (space::ContinuousSpace* cur_space : GetHeap()->GetContinuousSpaces()) {
+ for (const space::ContinuousSpace* cur_space : GetHeap()->GetContinuousSpaces()) {
if (cur_space == space) {
break;
}
@@ -107,15 +108,19 @@
}
// If previous space was immune, then extend the immune region. Relies on continuous spaces
// being sorted by Heap::AddContinuousSpace.
- if (prev_space != NULL &&
- immune_begin_ <= reinterpret_cast<Object*>(prev_space->Begin()) &&
- immune_end_ >= reinterpret_cast<Object*>(prev_space->End())) {
+ if (prev_space != NULL && IsImmuneSpace(prev_space)) {
immune_begin_ = std::min(reinterpret_cast<Object*>(space->Begin()), immune_begin_);
immune_end_ = std::max(reinterpret_cast<Object*>(space->End()), immune_end_);
}
}
}
+bool MarkSweep::IsImmuneSpace(const space::ContinuousSpace* space) {
+ return
+ immune_begin_ <= reinterpret_cast<Object*>(space->Begin()) &&
+ immune_end_ >= reinterpret_cast<Object*>(space->End());
+}
+
void MarkSweep::BindBitmaps() {
timings_.StartSplit("BindBitmaps");
WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
@@ -263,11 +268,23 @@
}
live_stack_freeze_size_ = heap_->GetLiveStack()->Size();
MarkConcurrentRoots();
-
- heap_->UpdateAndMarkModUnion(this, timings_, GetGcType());
+ UpdateAndMarkModUnion();
MarkReachableObjects();
}
+void MarkSweep::UpdateAndMarkModUnion() {
+ for (const auto& space : heap_->GetContinuousSpaces()) {
+ if (IsImmuneSpace(space)) {
+ const char* name = space->IsZygoteSpace() ? "UpdateAndMarkZygoteModUnionTable" :
+ "UpdateAndMarkImageModUnionTable";
+ base::TimingLogger::ScopedSplit split(name, &timings_);
+ accounting::ModUnionTable* mod_union_table = heap_->FindModUnionTableFromSpace(space);
+ CHECK(mod_union_table != nullptr);
+ mod_union_table->UpdateAndMarkReferences(MarkRootCallback, this);
+ }
+ }
+}
+
void MarkSweep::MarkThreadRoots(Thread* self) {
MarkRootsCheckpoint(self);
}
@@ -577,11 +594,11 @@
void MarkSweep::CheckObject(const Object* obj) {
DCHECK(obj != NULL);
- VisitObjectReferences(obj, [this](const Object* obj, const Object* ref, MemberOffset offset,
- bool is_static) NO_THREAD_SAFETY_ANALYSIS {
+ VisitObjectReferences(const_cast<Object*>(obj), [this](const Object* obj, const Object* ref,
+ MemberOffset offset, bool is_static) NO_THREAD_SAFETY_ANALYSIS {
Locks::heap_bitmap_lock_->AssertSharedHeld(Thread::Current());
CheckReference(obj, ref, offset, is_static);
- });
+ }, true);
}
void MarkSweep::VerifyImageRootVisitor(Object* root, void* arg) {
@@ -647,11 +664,11 @@
explicit ScanObjectParallelVisitor(MarkStackTask<kUseFinger>* chunk_task) ALWAYS_INLINE
: chunk_task_(chunk_task) {}
- void operator()(const Object* obj) const {
+ void operator()(Object* obj) const {
MarkSweep* mark_sweep = chunk_task_->mark_sweep_;
mark_sweep->ScanObjectVisit(obj,
- [mark_sweep, this](const Object* /* obj */, const Object* ref,
- const MemberOffset& /* offset */, bool /* is_static */) ALWAYS_INLINE {
+ [mark_sweep, this](Object* /* obj */, Object* ref, const MemberOffset& /* offset */,
+ bool /* is_static */) ALWAYS_INLINE {
if (ref != nullptr && mark_sweep->MarkObjectParallel(ref)) {
if (kUseFinger) {
android_memory_barrier();
@@ -708,11 +725,11 @@
static const size_t kFifoSize = 4;
BoundedFifoPowerOfTwo<const Object*, kFifoSize> prefetch_fifo;
for (;;) {
- const Object* obj = NULL;
+ const Object* obj = nullptr;
if (kUseMarkStackPrefetch) {
while (mark_stack_pos_ != 0 && prefetch_fifo.size() < kFifoSize) {
const Object* obj = mark_stack_[--mark_stack_pos_];
- DCHECK(obj != NULL);
+ DCHECK(obj != nullptr);
__builtin_prefetch(obj);
prefetch_fifo.push_back(obj);
}
@@ -727,8 +744,8 @@
}
obj = mark_stack_[--mark_stack_pos_];
}
- DCHECK(obj != NULL);
- visitor(obj);
+ DCHECK(obj != nullptr);
+ visitor(const_cast<mirror::Object*>(obj));
}
}
};
@@ -1366,7 +1383,7 @@
// and dispatches to a specialized scanning routine.
void MarkSweep::ScanObject(const Object* obj) {
MarkObjectVisitor visitor(this);
- ScanObjectVisit(obj, visitor);
+ ScanObjectVisit(const_cast<Object*>(obj), visitor);
}
void MarkSweep::ProcessMarkStackParallel(size_t thread_count) {
diff --git a/runtime/gc/collector/mark_sweep.h b/runtime/gc/collector/mark_sweep.h
index 76e71fd..19df2da 100644
--- a/runtime/gc/collector/mark_sweep.h
+++ b/runtime/gc/collector/mark_sweep.h
@@ -114,6 +114,9 @@
EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ bool IsImmuneSpace(const space::ContinuousSpace* space)
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
// Bind the live bits to the mark bits of bitmaps for spaces that are never collected, ie
// the image. Mark that portion of the heap as immune.
virtual void BindBitmaps() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
@@ -137,6 +140,9 @@
void ProcessReferences(Thread* self)
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+ virtual void UpdateAndMarkModUnion()
+ SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
+
// Sweeps unmarked objects to complete the garbage collection.
virtual void Sweep(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
@@ -163,7 +169,7 @@
// TODO: enable thread safety analysis when in use by multiple worker threads.
template <typename MarkVisitor>
- void ScanObjectVisit(const mirror::Object* obj, const MarkVisitor& visitor)
+ void ScanObjectVisit(mirror::Object* obj, const MarkVisitor& visitor)
NO_THREAD_SAFETY_ANALYSIS;
size_t GetFreedBytes() const {
@@ -215,7 +221,8 @@
SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
template <typename Visitor>
- static void VisitObjectReferences(const mirror::Object* obj, const Visitor& visitor)
+ static void VisitObjectReferences(mirror::Object* obj, const Visitor& visitor,
+ bool visit_class = false)
SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
Locks::mutator_lock_);
@@ -306,7 +313,7 @@
size_t GetThreadCount(bool paused) const;
// Returns true if an object is inside of the immune region (assumed to be marked).
- bool IsImmune(const mirror::Object* obj) const {
+ bool IsImmune(const mirror::Object* obj) const ALWAYS_INLINE {
return obj >= immune_begin_ && obj < immune_end_;
}
@@ -317,34 +324,34 @@
NO_THREAD_SAFETY_ANALYSIS;
template <typename Visitor>
- static void VisitInstanceFieldsReferences(const mirror::Class* klass, const mirror::Object* obj,
+ static void VisitInstanceFieldsReferences(mirror::Class* klass, mirror::Object* obj,
const Visitor& visitor)
SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
// Visit the header, static field references, and interface pointers of a class object.
template <typename Visitor>
- static void VisitClassReferences(const mirror::Class* klass, const mirror::Object* obj,
+ static void VisitClassReferences(mirror::Class* klass, mirror::Object* obj,
const Visitor& visitor)
SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
template <typename Visitor>
- static void VisitStaticFieldsReferences(const mirror::Class* klass, const Visitor& visitor)
+ static void VisitStaticFieldsReferences(mirror::Class* klass, const Visitor& visitor)
SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
template <typename Visitor>
- static void VisitFieldsReferences(const mirror::Object* obj, uint32_t ref_offsets, bool is_static,
+ static void VisitFieldsReferences(mirror::Object* obj, uint32_t ref_offsets, bool is_static,
const Visitor& visitor)
SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
// Visit all of the references in an object array.
template <typename Visitor>
- static void VisitObjectArrayReferences(const mirror::ObjectArray<mirror::Object>* array,
+ static void VisitObjectArrayReferences(mirror::ObjectArray<mirror::Object>* array,
const Visitor& visitor)
SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
// Visits the header and field references of a data object.
template <typename Visitor>
- static void VisitOtherReferences(const mirror::Class* klass, const mirror::Object* obj,
+ static void VisitOtherReferences(mirror::Class* klass, mirror::Object* obj,
const Visitor& visitor)
SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
return VisitInstanceFieldsReferences(klass, obj, visitor);
diff --git a/runtime/gc/collector/sticky_mark_sweep.h b/runtime/gc/collector/sticky_mark_sweep.h
index 79c4359..8bee00f 100644
--- a/runtime/gc/collector/sticky_mark_sweep.h
+++ b/runtime/gc/collector/sticky_mark_sweep.h
@@ -31,6 +31,10 @@
return kGcTypeSticky;
}
+ // Don't need to do anything special here since we scan all the cards which may have references
+ // to the newly allocated objects.
+ virtual void UpdateAndMarkModUnion() { }
+
explicit StickyMarkSweep(Heap* heap, bool is_concurrent, const std::string& name_prefix = "");
~StickyMarkSweep() {}
diff --git a/runtime/gc/heap.cc b/runtime/gc/heap.cc
index 2ad6117..cefde04 100644
--- a/runtime/gc/heap.cc
+++ b/runtime/gc/heap.cc
@@ -191,11 +191,11 @@
card_table_.reset(accounting::CardTable::Create(heap_begin, heap_capacity));
CHECK(card_table_.get() != NULL) << "Failed to create card table";
- image_mod_union_table_.reset(new accounting::ModUnionTableToZygoteAllocspace(this));
- CHECK(image_mod_union_table_.get() != NULL) << "Failed to create image mod-union table";
-
- zygote_mod_union_table_.reset(new accounting::ModUnionTableCardCache(this));
- CHECK(zygote_mod_union_table_.get() != NULL) << "Failed to create Zygote mod-union table";
+ accounting::ModUnionTable* mod_union_table =
+ new accounting::ModUnionTableToZygoteAllocspace("Image mod-union table", this,
+ GetImageSpace());
+ CHECK(mod_union_table != nullptr) << "Failed to create image mod-union table";
+ AddModUnionTable(mod_union_table);
// TODO: Count objects in the image space here.
num_bytes_allocated_ = 0;
@@ -489,10 +489,7 @@
live_stack_->Reset();
VLOG(heap) << "~Heap()";
- // We can't take the heap lock here because there might be a daemon thread suspended with the
- // heap lock held. We know though that no non-daemon threads are executing, and we know that
- // all daemon threads are suspended, and we also know that the threads list have been deleted, so
- // those threads can't resume. We're the only running thread, and we can do whatever we like...
+ STLDeleteValues(&mod_union_tables_);
STLDeleteElements(&continuous_spaces_);
STLDeleteElements(&discontinuous_spaces_);
delete gc_complete_lock_;
@@ -1084,15 +1081,15 @@
// For bitmap Visit.
// TODO: Fix lock analysis to not use NO_THREAD_SAFETY_ANALYSIS, requires support for
// annotalysis on visitors.
- void operator()(const mirror::Object* o) const NO_THREAD_SAFETY_ANALYSIS {
- collector::MarkSweep::VisitObjectReferences(o, *this);
+ void operator()(mirror::Object* obj) const NO_THREAD_SAFETY_ANALYSIS {
+ collector::MarkSweep::VisitObjectReferences(obj, *this, true);
}
// For MarkSweep::VisitObjectReferences.
- void operator()(const mirror::Object* referrer, const mirror::Object* object,
+ void operator()(mirror::Object* referrer, mirror::Object* object,
const MemberOffset&, bool) const {
if (object == object_ && (max_count_ == 0 || referring_objects_.size() < max_count_)) {
- referring_objects_.push_back(const_cast<mirror::Object*>(referrer));
+ referring_objects_.push_back(referrer);
}
}
@@ -1157,6 +1154,12 @@
AddContinuousSpace(alloc_space_);
have_zygote_space_ = true;
+ // Create the zygote space mod union table.
+ accounting::ModUnionTable* mod_union_table =
+ new accounting::ModUnionTableCardCache("zygote space mod-union table", this, zygote_space);
+ CHECK(mod_union_table != nullptr) << "Failed to create zygote space mod-union table";
+ AddModUnionTable(mod_union_table);
+
// Reset the cumulative loggers since we now have a few additional timing phases.
for (const auto& collector : mark_sweep_collectors_) {
collector->ResetCumulativeStatistics();
@@ -1313,33 +1316,6 @@
return gc_type;
}
-void Heap::UpdateAndMarkModUnion(collector::MarkSweep* mark_sweep, base::TimingLogger& timings,
- collector::GcType gc_type) {
- if (gc_type == collector::kGcTypeSticky) {
- // Don't need to do anything for mod union table in this case since we are only scanning dirty
- // cards.
- return;
- }
-
- base::TimingLogger::ScopedSplit split("UpdateModUnionTable", &timings);
- // Update zygote mod union table.
- if (gc_type == collector::kGcTypePartial) {
- base::TimingLogger::ScopedSplit split("UpdateZygoteModUnionTable", &timings);
- zygote_mod_union_table_->Update();
-
- timings.NewSplit("ZygoteMarkReferences");
- zygote_mod_union_table_->MarkReferences(mark_sweep);
- }
-
- // Processes the cards we cleared earlier and adds their objects into the mod-union table.
- timings.NewSplit("UpdateModUnionTable");
- image_mod_union_table_->Update();
-
- // Scans all objects in the mod-union table.
- timings.NewSplit("MarkImageToAllocSpaceReferences");
- image_mod_union_table_->MarkReferences(mark_sweep);
-}
-
static mirror::Object* RootMatchesObjectVisitor(mirror::Object* root, void* arg) {
mirror::Object* obj = reinterpret_cast<mirror::Object*>(arg);
if (root == obj) {
@@ -1483,7 +1459,7 @@
VerifyReferenceVisitor visitor(heap_);
// The class doesn't count as a reference but we should verify it anyways.
visitor(obj, obj->GetClass(), MemberOffset(0), false);
- collector::MarkSweep::VisitObjectReferences(obj, visitor);
+ collector::MarkSweep::VisitObjectReferences(const_cast<mirror::Object*>(obj), visitor, true);
failed_ = failed_ || visitor.Failed();
}
@@ -1516,8 +1492,10 @@
// pointing to dead objects if they are not reachable.
if (visitor.Failed()) {
// Dump mod-union tables.
- image_mod_union_table_->Dump(LOG(ERROR) << "Image mod-union table: ");
- zygote_mod_union_table_->Dump(LOG(ERROR) << "Zygote mod-union table: ");
+ for (const auto& table_pair : mod_union_tables_) {
+ accounting::ModUnionTable* mod_union_table = table_pair.second;
+ mod_union_table->Dump(LOG(ERROR) << mod_union_table->GetName() << ": ");
+ }
DumpSpaces();
return false;
}
@@ -1601,10 +1579,10 @@
: heap_(heap),
failed_(false) {}
- void operator()(const mirror::Object* obj) const
+ void operator()(mirror::Object* obj) const
SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
VerifyReferenceCardVisitor visitor(heap_, const_cast<bool*>(&failed_));
- collector::MarkSweep::VisitObjectReferences(obj, visitor);
+ collector::MarkSweep::VisitObjectReferences(obj, visitor, true);
}
bool Failed() const {
@@ -1640,15 +1618,23 @@
allocation_stack_.swap(live_stack_);
}
+accounting::ModUnionTable* Heap::FindModUnionTableFromSpace(space::Space* space) {
+ auto it = mod_union_tables_.find(space);
+ if (it == mod_union_tables_.end()) {
+ return nullptr;
+ }
+ return it->second;
+}
+
void Heap::ProcessCards(base::TimingLogger& timings) {
// Clear cards and keep track of cards cleared in the mod-union table.
for (const auto& space : continuous_spaces_) {
- if (space->IsImageSpace()) {
- base::TimingLogger::ScopedSplit split("ImageModUnionClearCards", &timings);
- image_mod_union_table_->ClearCards(space);
- } else if (space->IsZygoteSpace()) {
- base::TimingLogger::ScopedSplit split("ZygoteModUnionClearCards", &timings);
- zygote_mod_union_table_->ClearCards(space);
+ accounting::ModUnionTable* table = FindModUnionTableFromSpace(space);
+ if (table != nullptr) {
+ const char* name = space->IsZygoteSpace() ? "ZygoteModUnionClearCards" :
+ "ImageModUnionClearCards";
+ base::TimingLogger::ScopedSplit split(name, &timings);
+ table->ClearCards();
} else {
base::TimingLogger::ScopedSplit split("AllocSpaceClearCards", &timings);
// No mod union table for the AllocSpace. Age the cards so that the GC knows that these cards
@@ -1658,6 +1644,10 @@
}
}
+static mirror::Object* IdentityCallback(mirror::Object* obj, void*) {
+ return obj;
+}
+
void Heap::PreGcVerification(collector::GarbageCollector* gc) {
ThreadList* thread_list = Runtime::Current()->GetThreadList();
Thread* self = Thread::Current();
@@ -1691,10 +1681,11 @@
if (verify_mod_union_table_) {
thread_list->SuspendAll();
ReaderMutexLock reader_lock(self, *Locks::heap_bitmap_lock_);
- zygote_mod_union_table_->Update();
- zygote_mod_union_table_->Verify();
- image_mod_union_table_->Update();
- image_mod_union_table_->Verify();
+ for (const auto& table_pair : mod_union_tables_) {
+ accounting::ModUnionTable* mod_union_table = table_pair.second;
+ mod_union_table->UpdateAndMarkReferences(IdentityCallback, nullptr);
+ mod_union_table->Verify();
+ }
thread_list->ResumeAll();
}
}
@@ -2148,5 +2139,10 @@
return ret;
}
+void Heap::AddModUnionTable(accounting::ModUnionTable* mod_union_table) {
+ DCHECK(mod_union_table != nullptr);
+ mod_union_tables_.Put(mod_union_table->GetSpace(), mod_union_table);
+}
+
} // namespace gc
} // namespace art
diff --git a/runtime/gc/heap.h b/runtime/gc/heap.h
index 0b64261..0ac3cf0 100644
--- a/runtime/gc/heap.h
+++ b/runtime/gc/heap.h
@@ -368,11 +368,6 @@
accounting::ObjectStack* stack)
EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
- // Update and mark mod union table based on gc type.
- void UpdateAndMarkModUnion(collector::MarkSweep* mark_sweep, base::TimingLogger& timings,
- collector::GcType gc_type)
- EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
-
// Gets called when we get notified by ActivityThread that the process state has changed.
void ListenForProcessStateChange();
@@ -426,6 +421,8 @@
size_t GetConcGCThreadCount() const {
return conc_gc_threads_;
}
+ accounting::ModUnionTable* FindModUnionTableFromSpace(space::Space* space);
+ void AddModUnionTable(accounting::ModUnionTable* mod_union_table);
private:
// Allocates uninitialized storage. Passing in a null space tries to place the object in the
@@ -522,12 +519,8 @@
// The card table, dirtied by the write barrier.
UniquePtr<accounting::CardTable> card_table_;
- // The mod-union table remembers all of the references from the image space to the alloc /
- // zygote spaces to allow the card table to be cleared.
- UniquePtr<accounting::ModUnionTable> image_mod_union_table_;
-
- // This table holds all of the references from the zygote space to the alloc space.
- UniquePtr<accounting::ModUnionTable> zygote_mod_union_table_;
+ // A mod-union table remembers all of the references from the it's space to other spaces.
+ SafeMap<space::Space*, accounting::ModUnionTable*> mod_union_tables_;
// What kind of concurrency behavior is the runtime after? True for concurrent mark sweep GC,
// false for stop-the-world mark sweep.
diff --git a/runtime/mirror/object.h b/runtime/mirror/object.h
index e105525..003581a 100644
--- a/runtime/mirror/object.h
+++ b/runtime/mirror/object.h
@@ -189,6 +189,11 @@
}
}
+ Object** GetFieldObjectAddr(MemberOffset field_offset) ALWAYS_INLINE {
+ VerifyObject(this);
+ return reinterpret_cast<Object**>(reinterpret_cast<byte*>(this) + field_offset.Int32Value());
+ }
+
uint32_t GetField32(MemberOffset field_offset, bool is_volatile) const {
VerifyObject(this);
const byte* raw_addr = reinterpret_cast<const byte*>(this) + field_offset.Int32Value();