blob: cdc398c989677c86ebd8ff31ba0849db6fa58ba5 [file] [log] [blame]
Mathieu Chartier52e4b432014-06-10 11:22:31 -07001/*
2 * Copyright (C) 2014 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include "mark_compact.h"
18
19#include "base/logging.h"
20#include "base/mutex-inl.h"
21#include "base/timing_logger.h"
22#include "gc/accounting/heap_bitmap-inl.h"
23#include "gc/accounting/mod_union_table.h"
24#include "gc/accounting/remembered_set.h"
25#include "gc/accounting/space_bitmap-inl.h"
26#include "gc/heap.h"
27#include "gc/reference_processor.h"
28#include "gc/space/bump_pointer_space.h"
29#include "gc/space/bump_pointer_space-inl.h"
30#include "gc/space/image_space.h"
31#include "gc/space/large_object_space.h"
32#include "gc/space/space-inl.h"
33#include "indirect_reference_table.h"
34#include "intern_table.h"
35#include "jni_internal.h"
36#include "mark_sweep-inl.h"
37#include "monitor.h"
38#include "mirror/art_field.h"
39#include "mirror/art_field-inl.h"
40#include "mirror/class-inl.h"
41#include "mirror/class_loader.h"
42#include "mirror/dex_cache.h"
43#include "mirror/reference-inl.h"
44#include "mirror/object-inl.h"
45#include "mirror/object_array.h"
46#include "mirror/object_array-inl.h"
47#include "runtime.h"
48#include "stack.h"
49#include "thread-inl.h"
50#include "thread_list.h"
51
Mathieu Chartier52e4b432014-06-10 11:22:31 -070052using ::art::mirror::Object;
53
54namespace art {
55namespace gc {
56namespace collector {
57
58void MarkCompact::BindBitmaps() {
Mathieu Chartierf5997b42014-06-20 10:37:54 -070059 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
Mathieu Chartier52e4b432014-06-10 11:22:31 -070060 WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
61 // Mark all of the spaces we never collect as immune.
62 for (const auto& space : GetHeap()->GetContinuousSpaces()) {
63 if (space->GetGcRetentionPolicy() == space::kGcRetentionPolicyNeverCollect ||
64 space->GetGcRetentionPolicy() == space::kGcRetentionPolicyFullCollect) {
65 CHECK(immune_region_.AddContinuousSpace(space)) << "Failed to add space " << *space;
66 }
67 }
Mathieu Chartier52e4b432014-06-10 11:22:31 -070068}
69
70MarkCompact::MarkCompact(Heap* heap, const std::string& name_prefix)
71 : GarbageCollector(heap, name_prefix + (name_prefix.empty() ? "" : " ") + "mark compact"),
72 space_(nullptr), collector_name_(name_) {
73}
74
75void MarkCompact::RunPhases() {
76 Thread* self = Thread::Current();
77 InitializePhase();
78 CHECK(!Locks::mutator_lock_->IsExclusiveHeld(self));
79 {
80 ScopedPause pause(this);
81 GetHeap()->PreGcVerificationPaused(this);
82 GetHeap()->PrePauseRosAllocVerification(this);
83 MarkingPhase();
84 ReclaimPhase();
85 }
86 GetHeap()->PostGcVerification(this);
87 FinishPhase();
88}
89
90void MarkCompact::ForwardObject(mirror::Object* obj) {
91 const size_t alloc_size = RoundUp(obj->SizeOf(), space::BumpPointerSpace::kAlignment);
92 LockWord lock_word = obj->GetLockWord(false);
93 // If we have a non empty lock word, store it and restore it later.
94 if (lock_word.GetValue() != LockWord().GetValue()) {
95 // Set the bit in the bitmap so that we know to restore it later.
96 objects_with_lockword_->Set(obj);
97 lock_words_to_restore_.push_back(lock_word);
98 }
99 obj->SetLockWord(LockWord::FromForwardingAddress(reinterpret_cast<size_t>(bump_pointer_)),
100 false);
101 bump_pointer_ += alloc_size;
102 ++live_objects_in_space_;
103}
104
105class CalculateObjectForwardingAddressVisitor {
106 public:
107 explicit CalculateObjectForwardingAddressVisitor(MarkCompact* collector)
108 : collector_(collector) {}
109 void operator()(mirror::Object* obj) const EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_,
110 Locks::heap_bitmap_lock_) {
111 DCHECK_ALIGNED(obj, space::BumpPointerSpace::kAlignment);
112 DCHECK(collector_->IsMarked(obj));
113 collector_->ForwardObject(obj);
114 }
115
116 private:
117 MarkCompact* const collector_;
118};
119
120void MarkCompact::CalculateObjectForwardingAddresses() {
Mathieu Chartierf5997b42014-06-20 10:37:54 -0700121 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700122 // The bump pointer in the space where the next forwarding address will be.
Ian Rogers13735952014-10-08 12:43:28 -0700123 bump_pointer_ = reinterpret_cast<uint8_t*>(space_->Begin());
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700124 // Visit all the marked objects in the bitmap.
125 CalculateObjectForwardingAddressVisitor visitor(this);
126 objects_before_forwarding_->VisitMarkedRange(reinterpret_cast<uintptr_t>(space_->Begin()),
127 reinterpret_cast<uintptr_t>(space_->End()),
128 visitor);
129}
130
131void MarkCompact::InitializePhase() {
Mathieu Chartierf5997b42014-06-20 10:37:54 -0700132 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700133 mark_stack_ = heap_->GetMarkStack();
134 DCHECK(mark_stack_ != nullptr);
135 immune_region_.Reset();
136 CHECK(space_->CanMoveObjects()) << "Attempting compact non-movable space from " << *space_;
137 // TODO: I don't think we should need heap bitmap lock to Get the mark bitmap.
138 ReaderMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
139 mark_bitmap_ = heap_->GetMarkBitmap();
140 live_objects_in_space_ = 0;
141}
142
143void MarkCompact::ProcessReferences(Thread* self) {
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700144 WriterMutexLock mu(self, *Locks::heap_bitmap_lock_);
145 heap_->GetReferenceProcessor()->ProcessReferences(
Mathieu Chartier10fb83a2014-06-15 15:15:43 -0700146 false, GetTimings(), GetCurrentIteration()->GetClearSoftReferences(),
147 &HeapReferenceMarkedCallback, &MarkObjectCallback, &ProcessMarkStackCallback, this);
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700148}
149
150class BitmapSetSlowPathVisitor {
151 public:
152 void operator()(const mirror::Object* obj) const {
153 // Marking a large object, make sure its aligned as a sanity check.
154 if (!IsAligned<kPageSize>(obj)) {
155 Runtime::Current()->GetHeap()->DumpSpaces(LOG(ERROR));
156 LOG(FATAL) << obj;
157 }
158 }
159};
160
161inline void MarkCompact::MarkObject(mirror::Object* obj) {
162 if (obj == nullptr) {
163 return;
164 }
165 if (kUseBakerOrBrooksReadBarrier) {
166 // Verify all the objects have the correct forward pointer installed.
167 obj->AssertReadBarrierPointer();
168 }
169 if (immune_region_.ContainsObject(obj)) {
170 return;
171 }
172 if (objects_before_forwarding_->HasAddress(obj)) {
173 if (!objects_before_forwarding_->Set(obj)) {
174 MarkStackPush(obj); // This object was not previously marked.
175 }
176 } else {
177 DCHECK(!space_->HasAddress(obj));
178 BitmapSetSlowPathVisitor visitor;
179 if (!mark_bitmap_->Set(obj, visitor)) {
180 // This object was not previously marked.
181 MarkStackPush(obj);
182 }
183 }
184}
185
186void MarkCompact::MarkingPhase() {
Mathieu Chartierf5997b42014-06-20 10:37:54 -0700187 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700188 Thread* self = Thread::Current();
189 // Bitmap which describes which objects we have to move.
190 objects_before_forwarding_.reset(accounting::ContinuousSpaceBitmap::Create(
191 "objects before forwarding", space_->Begin(), space_->Size()));
192 // Bitmap which describes which lock words we need to restore.
193 objects_with_lockword_.reset(accounting::ContinuousSpaceBitmap::Create(
194 "objects with lock words", space_->Begin(), space_->Size()));
195 CHECK(Locks::mutator_lock_->IsExclusiveHeld(self));
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700196 // Assume the cleared space is already empty.
197 BindBitmaps();
Mathieu Chartierf5997b42014-06-20 10:37:54 -0700198 t.NewTiming("ProcessCards");
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700199 // Process dirty cards and add dirty cards to mod-union tables.
Lei Li4add3b42015-01-15 11:55:26 +0800200 heap_->ProcessCards(GetTimings(), false, false, true);
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700201 // Clear the whole card table since we can not Get any additional dirty cards during the
202 // paused GC. This saves memory but only works for pause the world collectors.
Mathieu Chartierf5997b42014-06-20 10:37:54 -0700203 t.NewTiming("ClearCardTable");
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700204 heap_->GetCardTable()->ClearCardTable();
205 // Need to do this before the checkpoint since we don't want any threads to add references to
206 // the live stack during the recursive mark.
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700207 if (kUseThreadLocalAllocationStack) {
Mathieu Chartierf5997b42014-06-20 10:37:54 -0700208 t.NewTiming("RevokeAllThreadLocalAllocationStacks");
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700209 heap_->RevokeAllThreadLocalAllocationStacks(self);
210 }
Mathieu Chartierf5997b42014-06-20 10:37:54 -0700211 t.NewTiming("SwapStacks");
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700212 heap_->SwapStacks(self);
213 {
214 WriterMutexLock mu(self, *Locks::heap_bitmap_lock_);
215 MarkRoots();
216 // Mark roots of immune spaces.
217 UpdateAndMarkModUnion();
218 // Recursively mark remaining objects.
219 MarkReachableObjects();
220 }
221 ProcessReferences(self);
222 {
223 ReaderMutexLock mu(self, *Locks::heap_bitmap_lock_);
224 SweepSystemWeaks();
225 }
226 // Revoke buffers before measuring how many objects were moved since the TLABs need to be revoked
227 // before they are properly counted.
228 RevokeAllThreadLocalBuffers();
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700229 // Disabled due to an issue where we have objects in the bump pointer space which reference dead
230 // objects.
231 // heap_->PreSweepingGcVerification(this);
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700232}
233
234void MarkCompact::UpdateAndMarkModUnion() {
Mathieu Chartierf5997b42014-06-20 10:37:54 -0700235 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700236 for (auto& space : heap_->GetContinuousSpaces()) {
237 // If the space is immune then we need to mark the references to other spaces.
238 if (immune_region_.ContainsSpace(space)) {
239 accounting::ModUnionTable* table = heap_->FindModUnionTableFromSpace(space);
240 if (table != nullptr) {
241 // TODO: Improve naming.
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800242 TimingLogger::ScopedTiming t2(
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700243 space->IsZygoteSpace() ? "UpdateAndMarkZygoteModUnionTable" :
Mathieu Chartier10fb83a2014-06-15 15:15:43 -0700244 "UpdateAndMarkImageModUnionTable", GetTimings());
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700245 table->UpdateAndMarkReferences(MarkHeapReferenceCallback, this);
246 }
247 }
248 }
249}
250
251void MarkCompact::MarkReachableObjects() {
Mathieu Chartierf5997b42014-06-20 10:37:54 -0700252 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700253 accounting::ObjectStack* live_stack = heap_->GetLiveStack();
Mathieu Chartierf5997b42014-06-20 10:37:54 -0700254 {
255 TimingLogger::ScopedTiming t2("MarkAllocStackAsLive", GetTimings());
256 heap_->MarkAllocStackAsLive(live_stack);
257 }
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700258 live_stack->Reset();
259 // Recursively process the mark stack.
260 ProcessMarkStack();
261}
262
263void MarkCompact::ReclaimPhase() {
Mathieu Chartierf5997b42014-06-20 10:37:54 -0700264 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700265 WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
266 // Reclaim unmarked objects.
267 Sweep(false);
268 // Swap the live and mark bitmaps for each space which we modified space. This is an
269 // optimization that enables us to not clear live bits inside of the sweep. Only swaps unbound
270 // bitmaps.
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700271 SwapBitmaps();
272 GetHeap()->UnBindBitmaps(); // Unbind the live and mark bitmaps.
273 Compact();
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700274}
275
276void MarkCompact::ResizeMarkStack(size_t new_size) {
277 std::vector<Object*> temp(mark_stack_->Begin(), mark_stack_->End());
278 CHECK_LE(mark_stack_->Size(), new_size);
279 mark_stack_->Resize(new_size);
280 for (const auto& obj : temp) {
281 mark_stack_->PushBack(obj);
282 }
283}
284
285inline void MarkCompact::MarkStackPush(Object* obj) {
286 if (UNLIKELY(mark_stack_->Size() >= mark_stack_->Capacity())) {
287 ResizeMarkStack(mark_stack_->Capacity() * 2);
288 }
289 // The object must be pushed on to the mark stack.
290 mark_stack_->PushBack(obj);
291}
292
293void MarkCompact::ProcessMarkStackCallback(void* arg) {
294 reinterpret_cast<MarkCompact*>(arg)->ProcessMarkStack();
295}
296
297mirror::Object* MarkCompact::MarkObjectCallback(mirror::Object* root, void* arg) {
298 reinterpret_cast<MarkCompact*>(arg)->MarkObject(root);
299 return root;
300}
301
302void MarkCompact::MarkHeapReferenceCallback(mirror::HeapReference<mirror::Object>* obj_ptr,
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -0800303 void* arg) {
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700304 reinterpret_cast<MarkCompact*>(arg)->MarkObject(obj_ptr->AsMirrorPtr());
305}
306
307void MarkCompact::DelayReferenceReferentCallback(mirror::Class* klass, mirror::Reference* ref,
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -0800308 void* arg) {
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700309 reinterpret_cast<MarkCompact*>(arg)->DelayReferenceReferent(klass, ref);
310}
311
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -0800312void MarkCompact::MarkRootCallback(Object** root, void* arg, const RootInfo& /*root_info*/) {
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700313 reinterpret_cast<MarkCompact*>(arg)->MarkObject(*root);
314}
315
Mathieu Chartiere34fa1d2015-01-14 14:55:47 -0800316void MarkCompact::UpdateRootCallback(Object** root, void* arg, const RootInfo& /*root_info*/) {
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700317 mirror::Object* obj = *root;
318 mirror::Object* new_obj = reinterpret_cast<MarkCompact*>(arg)->GetMarkedForwardAddress(obj);
319 if (obj != new_obj) {
320 *root = new_obj;
321 DCHECK(new_obj != nullptr);
322 }
323}
324
325class UpdateObjectReferencesVisitor {
326 public:
327 explicit UpdateObjectReferencesVisitor(MarkCompact* collector) : collector_(collector) {
328 }
329 void operator()(mirror::Object* obj) const SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
330 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE {
331 collector_->UpdateObjectReferences(obj);
332 }
333
334 private:
335 MarkCompact* const collector_;
336};
337
338void MarkCompact::UpdateReferences() {
Mathieu Chartierf5997b42014-06-20 10:37:54 -0700339 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700340 Runtime* runtime = Runtime::Current();
341 // Update roots.
342 runtime->VisitRoots(UpdateRootCallback, this);
343 // Update object references in mod union tables and spaces.
344 for (const auto& space : heap_->GetContinuousSpaces()) {
345 // If the space is immune then we need to mark the references to other spaces.
346 accounting::ModUnionTable* table = heap_->FindModUnionTableFromSpace(space);
347 if (table != nullptr) {
348 // TODO: Improve naming.
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800349 TimingLogger::ScopedTiming t2(
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700350 space->IsZygoteSpace() ? "UpdateZygoteModUnionTableReferences" :
351 "UpdateImageModUnionTableReferences",
Mathieu Chartier10fb83a2014-06-15 15:15:43 -0700352 GetTimings());
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700353 table->UpdateAndMarkReferences(&UpdateHeapReferenceCallback, this);
354 } else {
355 // No mod union table, so we need to scan the space using bitmap visit.
356 // Scan the space using bitmap visit.
357 accounting::ContinuousSpaceBitmap* bitmap = space->GetLiveBitmap();
358 if (bitmap != nullptr) {
359 UpdateObjectReferencesVisitor visitor(this);
360 bitmap->VisitMarkedRange(reinterpret_cast<uintptr_t>(space->Begin()),
361 reinterpret_cast<uintptr_t>(space->End()),
362 visitor);
363 }
364 }
365 }
366 CHECK(!kMovingClasses)
367 << "Didn't update large object classes since they are assumed to not move.";
368 // Update the system weaks, these should already have been swept.
369 runtime->SweepSystemWeaks(&MarkedForwardingAddressCallback, this);
370 // Update the objects in the bump pointer space last, these objects don't have a bitmap.
371 UpdateObjectReferencesVisitor visitor(this);
372 objects_before_forwarding_->VisitMarkedRange(reinterpret_cast<uintptr_t>(space_->Begin()),
373 reinterpret_cast<uintptr_t>(space_->End()),
374 visitor);
375 // Update the reference processor cleared list.
376 heap_->GetReferenceProcessor()->UpdateRoots(&MarkedForwardingAddressCallback, this);
377}
378
379void MarkCompact::Compact() {
Mathieu Chartierf5997b42014-06-20 10:37:54 -0700380 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700381 CalculateObjectForwardingAddresses();
382 UpdateReferences();
383 MoveObjects();
384 // Space
385 int64_t objects_freed = space_->GetObjectsAllocated() - live_objects_in_space_;
386 int64_t bytes_freed = reinterpret_cast<int64_t>(space_->End()) -
387 reinterpret_cast<int64_t>(bump_pointer_);
Mathieu Chartierf5997b42014-06-20 10:37:54 -0700388 t.NewTiming("RecordFree");
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700389 space_->RecordFree(objects_freed, bytes_freed);
Mathieu Chartier10fb83a2014-06-15 15:15:43 -0700390 RecordFree(ObjectBytePair(objects_freed, bytes_freed));
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700391 space_->SetEnd(bump_pointer_);
392 // Need to zero out the memory we freed. TODO: Use madvise for pages.
393 memset(bump_pointer_, 0, bytes_freed);
394}
395
396// Marks all objects in the root set.
397void MarkCompact::MarkRoots() {
Mathieu Chartierf5997b42014-06-20 10:37:54 -0700398 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700399 Runtime::Current()->VisitRoots(MarkRootCallback, this);
400}
401
402mirror::Object* MarkCompact::MarkedForwardingAddressCallback(mirror::Object* obj, void* arg) {
403 return reinterpret_cast<MarkCompact*>(arg)->GetMarkedForwardAddress(obj);
404}
405
406inline void MarkCompact::UpdateHeapReference(mirror::HeapReference<mirror::Object>* reference) {
407 mirror::Object* obj = reference->AsMirrorPtr();
408 if (obj != nullptr) {
409 mirror::Object* new_obj = GetMarkedForwardAddress(obj);
410 if (obj != new_obj) {
411 DCHECK(new_obj != nullptr);
412 reference->Assign(new_obj);
413 }
414 }
415}
416
417void MarkCompact::UpdateHeapReferenceCallback(mirror::HeapReference<mirror::Object>* reference,
418 void* arg) {
419 reinterpret_cast<MarkCompact*>(arg)->UpdateHeapReference(reference);
420}
421
422class UpdateReferenceVisitor {
423 public:
424 explicit UpdateReferenceVisitor(MarkCompact* collector) : collector_(collector) {
425 }
426
427 void operator()(Object* obj, MemberOffset offset, bool /*is_static*/) const
428 ALWAYS_INLINE EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
429 collector_->UpdateHeapReference(obj->GetFieldObjectReferenceAddr<kVerifyNone>(offset));
430 }
431
432 void operator()(mirror::Class* /*klass*/, mirror::Reference* ref) const
433 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
434 collector_->UpdateHeapReference(
435 ref->GetFieldObjectReferenceAddr<kVerifyNone>(mirror::Reference::ReferentOffset()));
436 }
437
438 private:
439 MarkCompact* const collector_;
440};
441
442void MarkCompact::UpdateObjectReferences(mirror::Object* obj) {
443 UpdateReferenceVisitor visitor(this);
444 obj->VisitReferences<kMovingClasses>(visitor, visitor);
445}
446
447inline mirror::Object* MarkCompact::GetMarkedForwardAddress(mirror::Object* obj) const {
448 DCHECK(obj != nullptr);
449 if (objects_before_forwarding_->HasAddress(obj)) {
450 DCHECK(objects_before_forwarding_->Test(obj));
451 mirror::Object* ret =
452 reinterpret_cast<mirror::Object*>(obj->GetLockWord(false).ForwardingAddress());
453 DCHECK(ret != nullptr);
454 return ret;
455 }
456 DCHECK(!space_->HasAddress(obj));
457 DCHECK(IsMarked(obj));
458 return obj;
459}
460
461inline bool MarkCompact::IsMarked(const Object* object) const {
462 if (immune_region_.ContainsObject(object)) {
463 return true;
464 }
465 if (objects_before_forwarding_->HasAddress(object)) {
466 return objects_before_forwarding_->Test(object);
467 }
468 return mark_bitmap_->Test(object);
469}
470
471mirror::Object* MarkCompact::IsMarkedCallback(mirror::Object* object, void* arg) {
472 return reinterpret_cast<MarkCompact*>(arg)->IsMarked(object) ? object : nullptr;
473}
474
475bool MarkCompact::HeapReferenceMarkedCallback(mirror::HeapReference<mirror::Object>* ref_ptr,
476 void* arg) {
477 // Side effect free since we call this before ever moving objects.
478 return reinterpret_cast<MarkCompact*>(arg)->IsMarked(ref_ptr->AsMirrorPtr());
479}
480
481void MarkCompact::SweepSystemWeaks() {
Mathieu Chartierf5997b42014-06-20 10:37:54 -0700482 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700483 Runtime::Current()->SweepSystemWeaks(IsMarkedCallback, this);
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700484}
485
486bool MarkCompact::ShouldSweepSpace(space::ContinuousSpace* space) const {
487 return space != space_ && !immune_region_.ContainsSpace(space);
488}
489
490class MoveObjectVisitor {
491 public:
492 explicit MoveObjectVisitor(MarkCompact* collector) : collector_(collector) {
493 }
494 void operator()(mirror::Object* obj) const SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
495 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_) ALWAYS_INLINE {
496 collector_->MoveObject(obj, obj->SizeOf());
497 }
498
499 private:
500 MarkCompact* const collector_;
501};
502
503void MarkCompact::MoveObject(mirror::Object* obj, size_t len) {
504 // Look at the forwarding address stored in the lock word to know where to copy.
505 DCHECK(space_->HasAddress(obj)) << obj;
506 uintptr_t dest_addr = obj->GetLockWord(false).ForwardingAddress();
507 mirror::Object* dest_obj = reinterpret_cast<mirror::Object*>(dest_addr);
508 DCHECK(space_->HasAddress(dest_obj)) << dest_obj;
509 // Use memmove since there may be overlap.
510 memmove(reinterpret_cast<void*>(dest_addr), reinterpret_cast<const void*>(obj), len);
511 // Restore the saved lock word if needed.
512 LockWord lock_word;
513 if (UNLIKELY(objects_with_lockword_->Test(obj))) {
514 lock_word = lock_words_to_restore_.front();
515 lock_words_to_restore_.pop_front();
516 }
517 dest_obj->SetLockWord(lock_word, false);
518}
519
520void MarkCompact::MoveObjects() {
Mathieu Chartierf5997b42014-06-20 10:37:54 -0700521 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700522 // Move the objects in the before forwarding bitmap.
523 MoveObjectVisitor visitor(this);
524 objects_before_forwarding_->VisitMarkedRange(reinterpret_cast<uintptr_t>(space_->Begin()),
525 reinterpret_cast<uintptr_t>(space_->End()),
526 visitor);
527 CHECK(lock_words_to_restore_.empty());
528}
529
530void MarkCompact::Sweep(bool swap_bitmaps) {
Mathieu Chartierf5997b42014-06-20 10:37:54 -0700531 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700532 DCHECK(mark_stack_->IsEmpty());
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700533 for (const auto& space : GetHeap()->GetContinuousSpaces()) {
534 if (space->IsContinuousMemMapAllocSpace()) {
535 space::ContinuousMemMapAllocSpace* alloc_space = space->AsContinuousMemMapAllocSpace();
536 if (!ShouldSweepSpace(alloc_space)) {
537 continue;
538 }
Andreas Gampe277ccbd2014-11-03 21:36:10 -0800539 TimingLogger::ScopedTiming t2(
Mathieu Chartier10fb83a2014-06-15 15:15:43 -0700540 alloc_space->IsZygoteSpace() ? "SweepZygoteSpace" : "SweepAllocSpace", GetTimings());
541 RecordFree(alloc_space->Sweep(swap_bitmaps));
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700542 }
543 }
544 SweepLargeObjects(swap_bitmaps);
545}
546
547void MarkCompact::SweepLargeObjects(bool swap_bitmaps) {
Mathieu Chartier2dbe6272014-09-16 10:43:23 -0700548 space::LargeObjectSpace* los = heap_->GetLargeObjectsSpace();
549 if (los != nullptr) {
550 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());\
551 RecordFreeLOS(los->Sweep(swap_bitmaps));
552 }
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700553}
554
555// Process the "referent" field in a java.lang.ref.Reference. If the referent has not yet been
556// marked, put it on the appropriate list in the heap for later processing.
557void MarkCompact::DelayReferenceReferent(mirror::Class* klass, mirror::Reference* reference) {
558 heap_->GetReferenceProcessor()->DelayReferenceReferent(klass, reference,
559 &HeapReferenceMarkedCallback, this);
560}
561
562class MarkCompactMarkObjectVisitor {
563 public:
564 explicit MarkCompactMarkObjectVisitor(MarkCompact* collector) : collector_(collector) {
565 }
566
567 void operator()(Object* obj, MemberOffset offset, bool /*is_static*/) const ALWAYS_INLINE
568 EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_) {
569 // Object was already verified when we scanned it.
570 collector_->MarkObject(obj->GetFieldObject<mirror::Object, kVerifyNone>(offset));
571 }
572
573 void operator()(mirror::Class* klass, mirror::Reference* ref) const
574 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
575 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) {
576 collector_->DelayReferenceReferent(klass, ref);
577 }
578
579 private:
580 MarkCompact* const collector_;
581};
582
583// Visit all of the references of an object and update.
584void MarkCompact::ScanObject(Object* obj) {
585 MarkCompactMarkObjectVisitor visitor(this);
586 obj->VisitReferences<kMovingClasses>(visitor, visitor);
587}
588
589// Scan anything that's on the mark stack.
590void MarkCompact::ProcessMarkStack() {
Mathieu Chartierf5997b42014-06-20 10:37:54 -0700591 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700592 while (!mark_stack_->IsEmpty()) {
593 Object* obj = mark_stack_->PopBack();
594 DCHECK(obj != nullptr);
595 ScanObject(obj);
596 }
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700597}
598
599void MarkCompact::SetSpace(space::BumpPointerSpace* space) {
600 DCHECK(space != nullptr);
601 space_ = space;
602}
603
604void MarkCompact::FinishPhase() {
Mathieu Chartierf5997b42014-06-20 10:37:54 -0700605 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700606 space_ = nullptr;
607 CHECK(mark_stack_->IsEmpty());
608 mark_stack_->Reset();
609 // Clear all of the spaces' mark bitmaps.
610 WriterMutexLock mu(Thread::Current(), *Locks::heap_bitmap_lock_);
611 heap_->ClearMarkedObjects();
612 // Release our bitmaps.
613 objects_before_forwarding_.reset(nullptr);
614 objects_with_lockword_.reset(nullptr);
615}
616
617void MarkCompact::RevokeAllThreadLocalBuffers() {
Mathieu Chartierf5997b42014-06-20 10:37:54 -0700618 TimingLogger::ScopedTiming t(__FUNCTION__, GetTimings());
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700619 GetHeap()->RevokeAllThreadLocalBuffers();
Mathieu Chartier52e4b432014-06-10 11:22:31 -0700620}
621
622} // namespace collector
623} // namespace gc
624} // namespace art