blob: af7f1a501568f55575a3f41be1d95d7b17aa2436 [file] [log] [blame]
Carl Shapiro69759ea2011-07-21 18:13:35 -07001// Copyright 2011 Google Inc. All Rights Reserved.
Carl Shapiro69759ea2011-07-21 18:13:35 -07002
Brian Carlstrom578bbdc2011-07-21 14:07:47 -07003#include "mark_sweep.h"
Carl Shapiro69759ea2011-07-21 18:13:35 -07004
Carl Shapiro58551df2011-07-24 03:09:51 -07005#include <climits>
6#include <vector>
7
Elliott Hughes410c0c82011-09-01 17:58:25 -07008#include "class_loader.h"
Carl Shapiro58551df2011-07-24 03:09:51 -07009#include "heap.h"
Elliott Hughes410c0c82011-09-01 17:58:25 -070010#include "indirect_reference_table.h"
11#include "intern_table.h"
Brian Carlstrom578bbdc2011-07-21 14:07:47 -070012#include "logging.h"
13#include "macros.h"
14#include "mark_stack.h"
15#include "object.h"
Brian Carlstrom1f870082011-08-23 16:02:11 -070016#include "runtime.h"
Carl Shapiro58551df2011-07-24 03:09:51 -070017#include "space.h"
Brian Carlstrom578bbdc2011-07-21 14:07:47 -070018#include "thread.h"
Carl Shapiro69759ea2011-07-21 18:13:35 -070019
Carl Shapiro69759ea2011-07-21 18:13:35 -070020namespace art {
21
Carl Shapiro58551df2011-07-24 03:09:51 -070022bool MarkSweep::Init() {
Brian Carlstrom4a289ed2011-08-16 17:17:49 -070023 mark_stack_ = MarkStack::Create();
Carl Shapiro58551df2011-07-24 03:09:51 -070024 if (mark_stack_ == NULL) {
25 return false;
26 }
27
28 mark_bitmap_ = Heap::GetMarkBits();
29 live_bitmap_ = Heap::GetLiveBits();
30
31 // TODO: if concurrent, clear the card table.
32
33 // TODO: check that the mark bitmap is entirely clear.
34
35 return true;
36}
37
Carl Shapiro69759ea2011-07-21 18:13:35 -070038void MarkSweep::MarkObject0(const Object* obj, bool check_finger) {
39 DCHECK(obj != NULL);
40 if (obj < condemned_) {
41 DCHECK(IsMarked(obj));
42 return;
43 }
44 bool is_marked = mark_bitmap_->Test(obj);
45 // This object was not previously marked.
46 if (!is_marked) {
47 mark_bitmap_->Set(obj);
48 if (check_finger && obj < finger_) {
49 // The object must be pushed on to the mark stack.
50 mark_stack_->Push(obj);
51 }
52 }
53}
54
55// Used to mark objects when recursing. Recursion is done by moving
56// the finger across the bitmaps in address order and marking child
57// objects. Any newly-marked objects whose addresses are lower than
58// the finger won't be visited by the bitmap scan, so those objects
59// need to be added to the mark stack.
60void MarkSweep::MarkObject(const Object* obj) {
61 if (obj != NULL) {
62 MarkObject0(obj, true);
63 }
64}
65
Elliott Hughescf4c6c42011-09-01 15:16:42 -070066void MarkSweep::MarkObjectVisitor(const Object* root, void* arg) {
Brian Carlstrom1f870082011-08-23 16:02:11 -070067 DCHECK(root != NULL);
68 DCHECK(arg != NULL);
69 MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg);
70 mark_sweep->MarkObject0(root, true);
71}
72
Carl Shapiro69759ea2011-07-21 18:13:35 -070073// Marks all objects in the root set.
74void MarkSweep::MarkRoots() {
Brian Carlstrom1f870082011-08-23 16:02:11 -070075 Runtime::Current()->VisitRoots(MarkObjectVisitor, this);
Carl Shapiro69759ea2011-07-21 18:13:35 -070076}
77
Carl Shapiro58551df2011-07-24 03:09:51 -070078void MarkSweep::ScanBitmapCallback(Object* obj, void* finger, void* arg) {
79 MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg);
80 mark_sweep->finger_ = reinterpret_cast<Object*>(finger);
81 mark_sweep->ScanObject(obj);
82}
83
84// Populates the mark stack based on the set of marked objects and
85// recursively marks until the mark stack is emptied.
86void MarkSweep::RecursiveMark() {
Brian Carlstrom1f870082011-08-23 16:02:11 -070087
88 // RecursiveMark will build the lists of known instances of the Reference classes.
89 // See DelayReferenceReferent for details.
90 CHECK(soft_reference_list_ == NULL);
91 CHECK(weak_reference_list_ == NULL);
92 CHECK(finalizer_reference_list_ == NULL);
93 CHECK(phantom_reference_list_ == NULL);
94 CHECK(cleared_reference_list_ == NULL);
95
Carl Shapiro58551df2011-07-24 03:09:51 -070096 void* arg = reinterpret_cast<void*>(this);
97 const std::vector<Space*>& spaces = Heap::GetSpaces();
98 for (size_t i = 0; i < spaces.size(); ++i) {
99 if (spaces[i]->IsCondemned()) {
100 uintptr_t base = reinterpret_cast<uintptr_t>(spaces[i]->GetBase());
Brian Carlstrom1f870082011-08-23 16:02:11 -0700101 mark_bitmap_->ScanWalk(base, &MarkSweep::ScanBitmapCallback, arg);
Carl Shapiro58551df2011-07-24 03:09:51 -0700102 }
103 }
104 finger_ = reinterpret_cast<Object*>(~0);
105 ProcessMarkStack();
106}
107
108void MarkSweep::ReMarkRoots() {
Elliott Hughes53b61312011-08-12 18:28:20 -0700109 UNIMPLEMENTED(FATAL);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700110}
111
Elliott Hughes410c0c82011-09-01 17:58:25 -0700112void MarkSweep::SweepJniWeakGlobals() {
113 JavaVMExt* vm = Runtime::Current()->GetJavaVM();
114 MutexLock mu(vm->weak_globals_lock);
115 IndirectReferenceTable* table = &vm->weak_globals;
116 typedef IndirectReferenceTable::iterator It; // TODO: C++0x auto
117 for (It it = table->begin(), end = table->end(); it != end; ++it) {
118 const Object** entry = *it;
119 if (!IsMarked(*entry)) {
120 *entry = kClearedJniWeakGlobal;
121 }
122 }
123}
124
125struct InternTableEntryIsUnmarked : public InternTable::Predicate {
126 InternTableEntryIsUnmarked(MarkSweep* ms) : ms_(ms) { }
127
128 bool operator()(const String* s) const {
129 return !ms_->IsMarked(s);
130 }
131
132 MarkSweep* ms_;
133};
134
135void MarkSweep::SweepMonitorList() {
Elliott Hughes53b61312011-08-12 18:28:20 -0700136 UNIMPLEMENTED(FATAL);
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700137 //dvmSweepMonitorList(&gDvm.monitorList, isUnmarkedObject);
Elliott Hughes410c0c82011-09-01 17:58:25 -0700138}
139
140void MarkSweep::SweepSystemWeaks() {
141 Runtime::Current()->GetInternTable()->RemoveWeakIf(InternTableEntryIsUnmarked(this));
142 SweepMonitorList();
143 SweepJniWeakGlobals();
Carl Shapiro58551df2011-07-24 03:09:51 -0700144}
145
146void MarkSweep::SweepCallback(size_t num_ptrs, void **ptrs, void *arg) {
147 // TODO, lock heap if concurrent
148 Space* space = static_cast<Space*>(arg);
149 for (size_t i = 0; i < num_ptrs; ++i) {
150 Object* obj = static_cast<Object*>(ptrs[i]);
151 space->Free(obj);
152 }
153 // TODO, unlock heap if concurrent
154}
155
156void MarkSweep::Sweep() {
157 const std::vector<Space*>& spaces = Heap::GetSpaces();
158 for (size_t i = 0; i < spaces.size(); ++i) {
159 if (spaces[i]->IsCondemned()) {
160 uintptr_t base = reinterpret_cast<uintptr_t>(spaces[i]->GetBase());
161 uintptr_t limit = reinterpret_cast<uintptr_t>(spaces[i]->GetLimit());
162 void* arg = static_cast<void*>(spaces[i]);
163 HeapBitmap::SweepWalk(*live_bitmap_, *mark_bitmap_, base, limit,
164 &MarkSweep::SweepCallback, arg);
165 }
166 }
167}
168
Carl Shapiro69759ea2011-07-21 18:13:35 -0700169// Scans instance fields.
170void MarkSweep::ScanInstanceFields(const Object* obj) {
171 DCHECK(obj != NULL);
Brian Carlstrom4873d462011-08-21 15:23:39 -0700172 Class* klass = obj->GetClass();
173 DCHECK(klass != NULL);
174 ScanFields(obj,
175 klass->GetReferenceInstanceOffsets(),
176 false);
177}
178
179// Scans static storage on a Class.
180void MarkSweep::ScanStaticFields(const Class* klass) {
181 DCHECK(klass != NULL);
182 ScanFields(klass,
183 klass->GetReferenceStaticOffsets(),
184 true);
185}
186
187void MarkSweep::ScanFields(const Object* obj,
188 uint32_t ref_offsets,
189 bool is_static) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700190 if (ref_offsets != CLASS_WALK_SUPER) {
191 // Found a reference offset bitmap. Mark the specified offsets.
192 while (ref_offsets != 0) {
193 size_t right_shift = CLZ(ref_offsets);
194 size_t byte_offset = CLASS_OFFSET_FROM_CLZ(right_shift);
195 const Object* ref = obj->GetFieldObject(byte_offset);
196 MarkObject(ref);
197 ref_offsets &= ~(CLASS_HIGH_BIT >> right_shift);
198 }
199 } else {
Brian Carlstrom4873d462011-08-21 15:23:39 -0700200 // There is no reference offset bitmap. In the non-static case,
201 // walk up the class inheritance hierarchy and find reference
202 // offsets the hard way. In the static case, just consider this
203 // class.
204 for (const Class* klass = is_static ? obj->AsClass() : obj->GetClass();
Carl Shapiro69759ea2011-07-21 18:13:35 -0700205 klass != NULL;
Brian Carlstrom4873d462011-08-21 15:23:39 -0700206 klass = is_static ? NULL : klass->GetSuperClass()) {
207 size_t num_reference_fields = (is_static
208 ? klass->NumReferenceStaticFields()
209 : klass->NumReferenceInstanceFields());
210 for (size_t i = 0; i < num_reference_fields; ++i) {
211 Field* field = (is_static
212 ? klass->GetStaticField(i)
213 : klass->GetInstanceField(i));
214 size_t field_offset = field->GetOffset();
Carl Shapiro69759ea2011-07-21 18:13:35 -0700215 const Object* ref = obj->GetFieldObject(field_offset);
216 MarkObject(ref);
217 }
218 }
219 }
220}
221
Carl Shapiro69759ea2011-07-21 18:13:35 -0700222void MarkSweep::ScanInterfaces(const Class* klass) {
223 DCHECK(klass != NULL);
224 for (size_t i = 0; i < klass->NumInterfaces(); ++i) {
225 MarkObject(klass->GetInterface(i));
226 }
227}
228
229// Scans the header, static field references, and interface pointers
230// of a class object.
231void MarkSweep::ScanClass(const Object* obj) {
232 DCHECK(obj != NULL);
233 DCHECK(obj->IsClass());
234 const Class* klass = obj->AsClass();
235 MarkObject(klass->GetClass());
Brian Carlstromb63ec392011-08-27 17:38:27 -0700236 if (klass->IsArrayClass()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700237 MarkObject(klass->GetComponentType());
238 }
239 if (klass->IsLoaded()) {
240 MarkObject(klass->GetSuperClass());
241 }
242 MarkObject(klass->GetClassLoader());
243 ScanInstanceFields(obj);
244 ScanStaticFields(klass);
245 // TODO: scan methods
246 // TODO: scan instance fields
247 if (klass->IsLoaded()) {
248 ScanInterfaces(klass);
249 }
250}
251
252// Scans the header of all array objects. If the array object is
253// specialized to a reference type, scans the array data as well.
Brian Carlstrom4873d462011-08-21 15:23:39 -0700254void MarkSweep::ScanArray(const Object* obj) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700255 DCHECK(obj != NULL);
256 DCHECK(obj->GetClass() != NULL);
257 MarkObject(obj->GetClass());
258 if (obj->IsObjectArray()) {
Brian Carlstromdb4d5402011-08-09 12:18:28 -0700259 const ObjectArray<Object>* array = obj->AsObjectArray<Object>();
Elliott Hughesd8ddfd52011-08-15 14:32:53 -0700260 for (int32_t i = 0; i < array->GetLength(); ++i) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700261 const Object* element = array->Get(i);
262 MarkObject(element);
263 }
264 }
265}
266
267void MarkSweep::EnqueuePendingReference(Object* ref, Object** list) {
268 DCHECK(ref != NULL);
269 DCHECK(list != NULL);
Brian Carlstrom1f870082011-08-23 16:02:11 -0700270 size_t offset = Heap::GetReferencePendingNextOffset();
Carl Shapiro69759ea2011-07-21 18:13:35 -0700271 if (*list == NULL) {
272 ref->SetFieldObject(offset, ref);
273 *list = ref;
274 } else {
Brian Carlstrom4873d462011-08-21 15:23:39 -0700275 Object* head = (*list)->GetFieldObject(offset);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700276 ref->SetFieldObject(offset, head);
277 (*list)->SetFieldObject(offset, ref);
278 }
279}
280
281Object* MarkSweep::DequeuePendingReference(Object** list) {
282 DCHECK(list != NULL);
283 DCHECK(*list != NULL);
Brian Carlstrom1f870082011-08-23 16:02:11 -0700284 size_t offset = Heap::GetReferencePendingNextOffset();
Carl Shapiro69759ea2011-07-21 18:13:35 -0700285 Object* head = (*list)->GetFieldObject(offset);
286 Object* ref;
287 if (*list == head) {
288 ref = *list;
289 *list = NULL;
290 } else {
Brian Carlstrom4873d462011-08-21 15:23:39 -0700291 Object* next = head->GetFieldObject(offset);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700292 (*list)->SetFieldObject(offset, next);
293 ref = head;
294 }
295 ref->SetFieldObject(offset, NULL);
296 return ref;
297}
298
299// Process the "referent" field in a java.lang.ref.Reference. If the
300// referent has not yet been marked, put it on the appropriate list in
301// the gcHeap for later processing.
302void MarkSweep::DelayReferenceReferent(Object* obj) {
303 DCHECK(obj != NULL);
Brian Carlstrom1f870082011-08-23 16:02:11 -0700304 Class* klass = obj->GetClass();
305 DCHECK(klass != NULL);
306 DCHECK(klass->IsReference());
307 Object* pending = obj->GetFieldObject(Heap::GetReferencePendingNextOffset());
308 Object* referent = obj->GetFieldObject(Heap::GetReferenceReferentOffset());
Carl Shapiro69759ea2011-07-21 18:13:35 -0700309 if (pending == NULL && referent != NULL && !IsMarked(referent)) {
Brian Carlstrom4873d462011-08-21 15:23:39 -0700310 Object** list = NULL;
Brian Carlstrom1f870082011-08-23 16:02:11 -0700311 if (klass->IsSoftReference()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700312 list = &soft_reference_list_;
Brian Carlstrom1f870082011-08-23 16:02:11 -0700313 } else if (klass->IsWeakReference()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700314 list = &weak_reference_list_;
Brian Carlstrom1f870082011-08-23 16:02:11 -0700315 } else if (klass->IsFinalizerReference()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700316 list = &finalizer_reference_list_;
Brian Carlstrom1f870082011-08-23 16:02:11 -0700317 } else if (klass->IsPhantomReference()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700318 list = &phantom_reference_list_;
319 }
320 DCHECK(list != NULL);
321 EnqueuePendingReference(obj, list);
322 }
323}
324
325// Scans the header and field references of a data object. If the
326// scanned object is a reference subclass, it is scheduled for later
327// processing
Carl Shapiro58551df2011-07-24 03:09:51 -0700328void MarkSweep::ScanOther(const Object* obj) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700329 DCHECK(obj != NULL);
Brian Carlstrom1f870082011-08-23 16:02:11 -0700330 Class* klass = obj->GetClass();
331 DCHECK(klass != NULL);
332 MarkObject(klass);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700333 ScanInstanceFields(obj);
Brian Carlstrom1f870082011-08-23 16:02:11 -0700334 if (klass->IsReference()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700335 DelayReferenceReferent(const_cast<Object*>(obj));
336 }
337}
338
339// Scans an object reference. Determines the type of the reference
340// and dispatches to a specialized scanning routine.
341void MarkSweep::ScanObject(const Object* obj) {
342 DCHECK(obj != NULL);
343 DCHECK(obj->GetClass() != NULL);
344 DCHECK(IsMarked(obj));
345 if (obj->IsClass()) {
346 ScanClass(obj);
Brian Carlstromb63ec392011-08-27 17:38:27 -0700347 } else if (obj->IsArrayInstance()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700348 ScanArray(obj);
349 } else {
Carl Shapiro58551df2011-07-24 03:09:51 -0700350 ScanOther(obj);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700351 }
352}
353
354// Scan anything that's on the mark stack. We can't use the bitmaps
355// anymore, so use a finger that points past the end of them.
356void MarkSweep::ProcessMarkStack() {
357 while (!mark_stack_->IsEmpty()) {
Brian Carlstrom4873d462011-08-21 15:23:39 -0700358 const Object* obj = mark_stack_->Pop();
Carl Shapiro69759ea2011-07-21 18:13:35 -0700359 ScanObject(obj);
360 }
361}
362
363void MarkSweep::ScanDirtyObjects() {
364 ProcessMarkStack();
365}
366
367void MarkSweep::ClearReference(Object* ref) {
368 DCHECK(ref != NULL);
Brian Carlstrom1f870082011-08-23 16:02:11 -0700369 ref->SetFieldObject(Heap::GetReferenceReferentOffset(), NULL);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700370}
371
372bool MarkSweep::IsEnqueuable(const Object* ref) {
373 DCHECK(ref != NULL);
Brian Carlstrom1f870082011-08-23 16:02:11 -0700374 const Object* queue = ref->GetFieldObject(Heap::GetReferenceQueueOffset());
375 const Object* queue_next = ref->GetFieldObject(Heap::GetReferenceQueueNextOffset());
Carl Shapiro69759ea2011-07-21 18:13:35 -0700376 return (queue != NULL) && (queue_next == NULL);
377}
378
379void MarkSweep::EnqueueReference(Object* ref) {
380 DCHECK(ref != NULL);
Brian Carlstrom1f870082011-08-23 16:02:11 -0700381 CHECK(ref->GetFieldObject(Heap::GetReferenceQueueOffset()) != NULL);
382 CHECK(ref->GetFieldObject(Heap::GetReferenceQueueNextOffset()) == NULL);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700383 EnqueuePendingReference(ref, &cleared_reference_list_);
384}
385
386// Walks the reference list marking any references subject to the
387// reference clearing policy. References with a black referent are
388// removed from the list. References with white referents biased
389// toward saving are blackened and also removed from the list.
390void MarkSweep::PreserveSomeSoftReferences(Object** list) {
391 DCHECK(list != NULL);
392 Object* clear = NULL;
393 size_t counter = 0;
394 while (*list != NULL) {
395 Object* ref = DequeuePendingReference(list);
Brian Carlstrom1f870082011-08-23 16:02:11 -0700396 Object* referent = ref->GetFieldObject(Heap::GetReferenceReferentOffset());
Carl Shapiro69759ea2011-07-21 18:13:35 -0700397 if (referent == NULL) {
398 // Referent was cleared by the user during marking.
399 continue;
400 }
401 bool is_marked = IsMarked(referent);
402 if (!is_marked && ((++counter) & 1)) {
403 // Referent is white and biased toward saving, mark it.
404 MarkObject(referent);
405 is_marked = true;
406 }
407 if (!is_marked) {
408 // Referent is white, queue it for clearing.
409 EnqueuePendingReference(ref, &clear);
410 }
411 }
412 *list = clear;
413 // Restart the mark with the newly black references added to the
414 // root set.
415 ProcessMarkStack();
416}
417
418// Unlink the reference list clearing references objects with white
419// referents. Cleared references registered to a reference queue are
420// scheduled for appending by the heap worker thread.
421void MarkSweep::ClearWhiteReferences(Object** list) {
422 DCHECK(list != NULL);
Brian Carlstrom1f870082011-08-23 16:02:11 -0700423 size_t offset = Heap::GetReferenceReferentOffset();
Carl Shapiro69759ea2011-07-21 18:13:35 -0700424 while (*list != NULL) {
Brian Carlstrom4873d462011-08-21 15:23:39 -0700425 Object* ref = DequeuePendingReference(list);
426 Object* referent = ref->GetFieldObject(offset);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700427 if (referent != NULL && !IsMarked(referent)) {
428 // Referent is white, clear it.
429 ClearReference(ref);
430 if (IsEnqueuable(ref)) {
431 EnqueueReference(ref);
432 }
433 }
434 }
435 DCHECK(*list == NULL);
436}
437
438// Enqueues finalizer references with white referents. White
439// referents are blackened, moved to the zombie field, and the
440// referent field is cleared.
441void MarkSweep::EnqueueFinalizerReferences(Object** list) {
442 DCHECK(list != NULL);
Brian Carlstrom1f870082011-08-23 16:02:11 -0700443 size_t referent_offset = Heap::GetReferenceReferentOffset();
444 size_t zombie_offset = Heap::GetFinalizerReferenceZombieOffset();
Carl Shapiro69759ea2011-07-21 18:13:35 -0700445 bool has_enqueued = false;
446 while (*list != NULL) {
447 Object* ref = DequeuePendingReference(list);
448 Object* referent = ref->GetFieldObject(referent_offset);
449 if (referent != NULL && !IsMarked(referent)) {
450 MarkObject(referent);
451 // If the referent is non-null the reference must queuable.
452 DCHECK(IsEnqueuable(ref));
453 ref->SetFieldObject(zombie_offset, referent);
454 ClearReference(ref);
455 EnqueueReference(ref);
456 has_enqueued = true;
457 }
458 }
459 if (has_enqueued) {
460 ProcessMarkStack();
461 }
462 DCHECK(*list == NULL);
463}
464
Carl Shapiro58551df2011-07-24 03:09:51 -0700465// Process reference class instances and schedule finalizations.
Carl Shapiro69759ea2011-07-21 18:13:35 -0700466void MarkSweep::ProcessReferences(Object** soft_references, bool clear_soft,
467 Object** weak_references,
468 Object** finalizer_references,
469 Object** phantom_references) {
470 DCHECK(soft_references != NULL);
471 DCHECK(weak_references != NULL);
472 DCHECK(finalizer_references != NULL);
473 DCHECK(phantom_references != NULL);
474
475 // Unless we are in the zygote or required to clear soft references
476 // with white references, preserve some white referents.
477 if (clear_soft) {
478 PreserveSomeSoftReferences(soft_references);
479 }
480
481 // Clear all remaining soft and weak references with white
482 // referents.
483 ClearWhiteReferences(soft_references);
484 ClearWhiteReferences(weak_references);
485
486 // Preserve all white objects with finalize methods and schedule
487 // them for finalization.
488 EnqueueFinalizerReferences(finalizer_references);
489
490 // Clear all f-reachable soft and weak references with white
491 // referents.
492 ClearWhiteReferences(soft_references);
493 ClearWhiteReferences(weak_references);
494
495 // Clear all phantom references with white referents.
496 ClearWhiteReferences(phantom_references);
497
498 // At this point all reference lists should be empty.
499 DCHECK(*soft_references == NULL);
500 DCHECK(*weak_references == NULL);
501 DCHECK(*finalizer_references == NULL);
502 DCHECK(*phantom_references == NULL);
503}
504
505// Pushes a list of cleared references out to the managed heap.
506void MarkSweep::EnqueueClearedReferences(Object** cleared) {
507 DCHECK(cleared != NULL);
508 if (*cleared != NULL) {
509 Thread* self = Thread::Current();
510 DCHECK(self != NULL);
511 // TODO: Method *meth = gDvm.methJavaLangRefReferenceQueueAdd;
512 // DCHECK(meth != NULL);
513 // JValue unused;
Brian Carlstrom4873d462011-08-21 15:23:39 -0700514 // Object* reference = *cleared;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700515 // TODO: dvmCallMethod(self, meth, NULL, &unused, reference);
Elliott Hughes53b61312011-08-12 18:28:20 -0700516 UNIMPLEMENTED(FATAL);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700517 *cleared = NULL;
518 }
519}
520
521MarkSweep::~MarkSweep() {
Carl Shapiro58551df2011-07-24 03:09:51 -0700522 delete mark_stack_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700523 mark_bitmap_->Clear();
524}
525
526} // namespace art