blob: 0b61b9ab653641937263ac8a2ba8e45fdcf65394 [file] [log] [blame]
Carl Shapiro69759ea2011-07-21 18:13:35 -07001// Copyright 2011 Google Inc. All Rights Reserved.
Carl Shapiro69759ea2011-07-21 18:13:35 -07002
Brian Carlstrom578bbdc2011-07-21 14:07:47 -07003#include "mark_sweep.h"
Carl Shapiro69759ea2011-07-21 18:13:35 -07004
Carl Shapiro58551df2011-07-24 03:09:51 -07005#include <climits>
6#include <vector>
7
8#include "heap.h"
Brian Carlstrom578bbdc2011-07-21 14:07:47 -07009#include "logging.h"
10#include "macros.h"
11#include "mark_stack.h"
12#include "object.h"
Brian Carlstrom1f870082011-08-23 16:02:11 -070013#include "class_loader.h"
14#include "runtime.h"
Carl Shapiro58551df2011-07-24 03:09:51 -070015#include "space.h"
Brian Carlstrom578bbdc2011-07-21 14:07:47 -070016#include "thread.h"
Carl Shapiro69759ea2011-07-21 18:13:35 -070017
Carl Shapiro69759ea2011-07-21 18:13:35 -070018namespace art {
19
Carl Shapiro58551df2011-07-24 03:09:51 -070020bool MarkSweep::Init() {
Brian Carlstrom4a289ed2011-08-16 17:17:49 -070021 mark_stack_ = MarkStack::Create();
Carl Shapiro58551df2011-07-24 03:09:51 -070022 if (mark_stack_ == NULL) {
23 return false;
24 }
25
26 mark_bitmap_ = Heap::GetMarkBits();
27 live_bitmap_ = Heap::GetLiveBits();
28
29 // TODO: if concurrent, clear the card table.
30
31 // TODO: check that the mark bitmap is entirely clear.
32
33 return true;
34}
35
Carl Shapiro69759ea2011-07-21 18:13:35 -070036void MarkSweep::MarkObject0(const Object* obj, bool check_finger) {
37 DCHECK(obj != NULL);
38 if (obj < condemned_) {
39 DCHECK(IsMarked(obj));
40 return;
41 }
42 bool is_marked = mark_bitmap_->Test(obj);
43 // This object was not previously marked.
44 if (!is_marked) {
45 mark_bitmap_->Set(obj);
46 if (check_finger && obj < finger_) {
47 // The object must be pushed on to the mark stack.
48 mark_stack_->Push(obj);
49 }
50 }
51}
52
53// Used to mark objects when recursing. Recursion is done by moving
54// the finger across the bitmaps in address order and marking child
55// objects. Any newly-marked objects whose addresses are lower than
56// the finger won't be visited by the bitmap scan, so those objects
57// need to be added to the mark stack.
58void MarkSweep::MarkObject(const Object* obj) {
59 if (obj != NULL) {
60 MarkObject0(obj, true);
61 }
62}
63
Elliott Hughescf4c6c42011-09-01 15:16:42 -070064void MarkSweep::MarkObjectVisitor(const Object* root, void* arg) {
Brian Carlstrom1f870082011-08-23 16:02:11 -070065 DCHECK(root != NULL);
66 DCHECK(arg != NULL);
67 MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg);
68 mark_sweep->MarkObject0(root, true);
69}
70
Carl Shapiro69759ea2011-07-21 18:13:35 -070071// Marks all objects in the root set.
72void MarkSweep::MarkRoots() {
Brian Carlstrom1f870082011-08-23 16:02:11 -070073 Runtime::Current()->VisitRoots(MarkObjectVisitor, this);
Carl Shapiro69759ea2011-07-21 18:13:35 -070074}
75
Carl Shapiro58551df2011-07-24 03:09:51 -070076void MarkSweep::ScanBitmapCallback(Object* obj, void* finger, void* arg) {
77 MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg);
78 mark_sweep->finger_ = reinterpret_cast<Object*>(finger);
79 mark_sweep->ScanObject(obj);
80}
81
82// Populates the mark stack based on the set of marked objects and
83// recursively marks until the mark stack is emptied.
84void MarkSweep::RecursiveMark() {
Brian Carlstrom1f870082011-08-23 16:02:11 -070085
86 // RecursiveMark will build the lists of known instances of the Reference classes.
87 // See DelayReferenceReferent for details.
88 CHECK(soft_reference_list_ == NULL);
89 CHECK(weak_reference_list_ == NULL);
90 CHECK(finalizer_reference_list_ == NULL);
91 CHECK(phantom_reference_list_ == NULL);
92 CHECK(cleared_reference_list_ == NULL);
93
Carl Shapiro58551df2011-07-24 03:09:51 -070094 void* arg = reinterpret_cast<void*>(this);
95 const std::vector<Space*>& spaces = Heap::GetSpaces();
96 for (size_t i = 0; i < spaces.size(); ++i) {
97 if (spaces[i]->IsCondemned()) {
98 uintptr_t base = reinterpret_cast<uintptr_t>(spaces[i]->GetBase());
Brian Carlstrom1f870082011-08-23 16:02:11 -070099 mark_bitmap_->ScanWalk(base, &MarkSweep::ScanBitmapCallback, arg);
Carl Shapiro58551df2011-07-24 03:09:51 -0700100 }
101 }
102 finger_ = reinterpret_cast<Object*>(~0);
103 ProcessMarkStack();
104}
105
106void MarkSweep::ReMarkRoots() {
Elliott Hughes53b61312011-08-12 18:28:20 -0700107 UNIMPLEMENTED(FATAL);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700108}
109
Carl Shapiro58551df2011-07-24 03:09:51 -0700110void MarkSweep::SweepSystemWeaks() {
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700111 //Runtime::Current()->GetInternTable().RemoveWeakIf(isUnmarkedObject);
Elliott Hughes53b61312011-08-12 18:28:20 -0700112 UNIMPLEMENTED(FATAL);
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700113 //dvmSweepMonitorList(&gDvm.monitorList, isUnmarkedObject);
114 //sweepWeakJniGlobals();
Carl Shapiro58551df2011-07-24 03:09:51 -0700115}
116
117void MarkSweep::SweepCallback(size_t num_ptrs, void **ptrs, void *arg) {
118 // TODO, lock heap if concurrent
119 Space* space = static_cast<Space*>(arg);
120 for (size_t i = 0; i < num_ptrs; ++i) {
121 Object* obj = static_cast<Object*>(ptrs[i]);
122 space->Free(obj);
123 }
124 // TODO, unlock heap if concurrent
125}
126
127void MarkSweep::Sweep() {
128 const std::vector<Space*>& spaces = Heap::GetSpaces();
129 for (size_t i = 0; i < spaces.size(); ++i) {
130 if (spaces[i]->IsCondemned()) {
131 uintptr_t base = reinterpret_cast<uintptr_t>(spaces[i]->GetBase());
132 uintptr_t limit = reinterpret_cast<uintptr_t>(spaces[i]->GetLimit());
133 void* arg = static_cast<void*>(spaces[i]);
134 HeapBitmap::SweepWalk(*live_bitmap_, *mark_bitmap_, base, limit,
135 &MarkSweep::SweepCallback, arg);
136 }
137 }
138}
139
Carl Shapiro69759ea2011-07-21 18:13:35 -0700140// Scans instance fields.
141void MarkSweep::ScanInstanceFields(const Object* obj) {
142 DCHECK(obj != NULL);
Brian Carlstrom4873d462011-08-21 15:23:39 -0700143 Class* klass = obj->GetClass();
144 DCHECK(klass != NULL);
145 ScanFields(obj,
146 klass->GetReferenceInstanceOffsets(),
147 false);
148}
149
150// Scans static storage on a Class.
151void MarkSweep::ScanStaticFields(const Class* klass) {
152 DCHECK(klass != NULL);
153 ScanFields(klass,
154 klass->GetReferenceStaticOffsets(),
155 true);
156}
157
158void MarkSweep::ScanFields(const Object* obj,
159 uint32_t ref_offsets,
160 bool is_static) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700161 if (ref_offsets != CLASS_WALK_SUPER) {
162 // Found a reference offset bitmap. Mark the specified offsets.
163 while (ref_offsets != 0) {
164 size_t right_shift = CLZ(ref_offsets);
165 size_t byte_offset = CLASS_OFFSET_FROM_CLZ(right_shift);
166 const Object* ref = obj->GetFieldObject(byte_offset);
167 MarkObject(ref);
168 ref_offsets &= ~(CLASS_HIGH_BIT >> right_shift);
169 }
170 } else {
Brian Carlstrom4873d462011-08-21 15:23:39 -0700171 // There is no reference offset bitmap. In the non-static case,
172 // walk up the class inheritance hierarchy and find reference
173 // offsets the hard way. In the static case, just consider this
174 // class.
175 for (const Class* klass = is_static ? obj->AsClass() : obj->GetClass();
Carl Shapiro69759ea2011-07-21 18:13:35 -0700176 klass != NULL;
Brian Carlstrom4873d462011-08-21 15:23:39 -0700177 klass = is_static ? NULL : klass->GetSuperClass()) {
178 size_t num_reference_fields = (is_static
179 ? klass->NumReferenceStaticFields()
180 : klass->NumReferenceInstanceFields());
181 for (size_t i = 0; i < num_reference_fields; ++i) {
182 Field* field = (is_static
183 ? klass->GetStaticField(i)
184 : klass->GetInstanceField(i));
185 size_t field_offset = field->GetOffset();
Carl Shapiro69759ea2011-07-21 18:13:35 -0700186 const Object* ref = obj->GetFieldObject(field_offset);
187 MarkObject(ref);
188 }
189 }
190 }
191}
192
Carl Shapiro69759ea2011-07-21 18:13:35 -0700193void MarkSweep::ScanInterfaces(const Class* klass) {
194 DCHECK(klass != NULL);
195 for (size_t i = 0; i < klass->NumInterfaces(); ++i) {
196 MarkObject(klass->GetInterface(i));
197 }
198}
199
200// Scans the header, static field references, and interface pointers
201// of a class object.
202void MarkSweep::ScanClass(const Object* obj) {
203 DCHECK(obj != NULL);
204 DCHECK(obj->IsClass());
205 const Class* klass = obj->AsClass();
206 MarkObject(klass->GetClass());
Brian Carlstromb63ec392011-08-27 17:38:27 -0700207 if (klass->IsArrayClass()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700208 MarkObject(klass->GetComponentType());
209 }
210 if (klass->IsLoaded()) {
211 MarkObject(klass->GetSuperClass());
212 }
213 MarkObject(klass->GetClassLoader());
214 ScanInstanceFields(obj);
215 ScanStaticFields(klass);
216 // TODO: scan methods
217 // TODO: scan instance fields
218 if (klass->IsLoaded()) {
219 ScanInterfaces(klass);
220 }
221}
222
223// Scans the header of all array objects. If the array object is
224// specialized to a reference type, scans the array data as well.
Brian Carlstrom4873d462011-08-21 15:23:39 -0700225void MarkSweep::ScanArray(const Object* obj) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700226 DCHECK(obj != NULL);
227 DCHECK(obj->GetClass() != NULL);
228 MarkObject(obj->GetClass());
229 if (obj->IsObjectArray()) {
Brian Carlstromdb4d5402011-08-09 12:18:28 -0700230 const ObjectArray<Object>* array = obj->AsObjectArray<Object>();
Elliott Hughesd8ddfd52011-08-15 14:32:53 -0700231 for (int32_t i = 0; i < array->GetLength(); ++i) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700232 const Object* element = array->Get(i);
233 MarkObject(element);
234 }
235 }
236}
237
238void MarkSweep::EnqueuePendingReference(Object* ref, Object** list) {
239 DCHECK(ref != NULL);
240 DCHECK(list != NULL);
Brian Carlstrom1f870082011-08-23 16:02:11 -0700241 size_t offset = Heap::GetReferencePendingNextOffset();
Carl Shapiro69759ea2011-07-21 18:13:35 -0700242 if (*list == NULL) {
243 ref->SetFieldObject(offset, ref);
244 *list = ref;
245 } else {
Brian Carlstrom4873d462011-08-21 15:23:39 -0700246 Object* head = (*list)->GetFieldObject(offset);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700247 ref->SetFieldObject(offset, head);
248 (*list)->SetFieldObject(offset, ref);
249 }
250}
251
252Object* MarkSweep::DequeuePendingReference(Object** list) {
253 DCHECK(list != NULL);
254 DCHECK(*list != NULL);
Brian Carlstrom1f870082011-08-23 16:02:11 -0700255 size_t offset = Heap::GetReferencePendingNextOffset();
Carl Shapiro69759ea2011-07-21 18:13:35 -0700256 Object* head = (*list)->GetFieldObject(offset);
257 Object* ref;
258 if (*list == head) {
259 ref = *list;
260 *list = NULL;
261 } else {
Brian Carlstrom4873d462011-08-21 15:23:39 -0700262 Object* next = head->GetFieldObject(offset);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700263 (*list)->SetFieldObject(offset, next);
264 ref = head;
265 }
266 ref->SetFieldObject(offset, NULL);
267 return ref;
268}
269
270// Process the "referent" field in a java.lang.ref.Reference. If the
271// referent has not yet been marked, put it on the appropriate list in
272// the gcHeap for later processing.
273void MarkSweep::DelayReferenceReferent(Object* obj) {
274 DCHECK(obj != NULL);
Brian Carlstrom1f870082011-08-23 16:02:11 -0700275 Class* klass = obj->GetClass();
276 DCHECK(klass != NULL);
277 DCHECK(klass->IsReference());
278 Object* pending = obj->GetFieldObject(Heap::GetReferencePendingNextOffset());
279 Object* referent = obj->GetFieldObject(Heap::GetReferenceReferentOffset());
Carl Shapiro69759ea2011-07-21 18:13:35 -0700280 if (pending == NULL && referent != NULL && !IsMarked(referent)) {
Brian Carlstrom4873d462011-08-21 15:23:39 -0700281 Object** list = NULL;
Brian Carlstrom1f870082011-08-23 16:02:11 -0700282 if (klass->IsSoftReference()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700283 list = &soft_reference_list_;
Brian Carlstrom1f870082011-08-23 16:02:11 -0700284 } else if (klass->IsWeakReference()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700285 list = &weak_reference_list_;
Brian Carlstrom1f870082011-08-23 16:02:11 -0700286 } else if (klass->IsFinalizerReference()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700287 list = &finalizer_reference_list_;
Brian Carlstrom1f870082011-08-23 16:02:11 -0700288 } else if (klass->IsPhantomReference()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700289 list = &phantom_reference_list_;
290 }
291 DCHECK(list != NULL);
292 EnqueuePendingReference(obj, list);
293 }
294}
295
296// Scans the header and field references of a data object. If the
297// scanned object is a reference subclass, it is scheduled for later
298// processing
Carl Shapiro58551df2011-07-24 03:09:51 -0700299void MarkSweep::ScanOther(const Object* obj) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700300 DCHECK(obj != NULL);
Brian Carlstrom1f870082011-08-23 16:02:11 -0700301 Class* klass = obj->GetClass();
302 DCHECK(klass != NULL);
303 MarkObject(klass);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700304 ScanInstanceFields(obj);
Brian Carlstrom1f870082011-08-23 16:02:11 -0700305 if (klass->IsReference()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700306 DelayReferenceReferent(const_cast<Object*>(obj));
307 }
308}
309
310// Scans an object reference. Determines the type of the reference
311// and dispatches to a specialized scanning routine.
312void MarkSweep::ScanObject(const Object* obj) {
313 DCHECK(obj != NULL);
314 DCHECK(obj->GetClass() != NULL);
315 DCHECK(IsMarked(obj));
316 if (obj->IsClass()) {
317 ScanClass(obj);
Brian Carlstromb63ec392011-08-27 17:38:27 -0700318 } else if (obj->IsArrayInstance()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700319 ScanArray(obj);
320 } else {
Carl Shapiro58551df2011-07-24 03:09:51 -0700321 ScanOther(obj);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700322 }
323}
324
325// Scan anything that's on the mark stack. We can't use the bitmaps
326// anymore, so use a finger that points past the end of them.
327void MarkSweep::ProcessMarkStack() {
328 while (!mark_stack_->IsEmpty()) {
Brian Carlstrom4873d462011-08-21 15:23:39 -0700329 const Object* obj = mark_stack_->Pop();
Carl Shapiro69759ea2011-07-21 18:13:35 -0700330 ScanObject(obj);
331 }
332}
333
334void MarkSweep::ScanDirtyObjects() {
335 ProcessMarkStack();
336}
337
338void MarkSweep::ClearReference(Object* ref) {
339 DCHECK(ref != NULL);
Brian Carlstrom1f870082011-08-23 16:02:11 -0700340 ref->SetFieldObject(Heap::GetReferenceReferentOffset(), NULL);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700341}
342
343bool MarkSweep::IsEnqueuable(const Object* ref) {
344 DCHECK(ref != NULL);
Brian Carlstrom1f870082011-08-23 16:02:11 -0700345 const Object* queue = ref->GetFieldObject(Heap::GetReferenceQueueOffset());
346 const Object* queue_next = ref->GetFieldObject(Heap::GetReferenceQueueNextOffset());
Carl Shapiro69759ea2011-07-21 18:13:35 -0700347 return (queue != NULL) && (queue_next == NULL);
348}
349
350void MarkSweep::EnqueueReference(Object* ref) {
351 DCHECK(ref != NULL);
Brian Carlstrom1f870082011-08-23 16:02:11 -0700352 CHECK(ref->GetFieldObject(Heap::GetReferenceQueueOffset()) != NULL);
353 CHECK(ref->GetFieldObject(Heap::GetReferenceQueueNextOffset()) == NULL);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700354 EnqueuePendingReference(ref, &cleared_reference_list_);
355}
356
357// Walks the reference list marking any references subject to the
358// reference clearing policy. References with a black referent are
359// removed from the list. References with white referents biased
360// toward saving are blackened and also removed from the list.
361void MarkSweep::PreserveSomeSoftReferences(Object** list) {
362 DCHECK(list != NULL);
363 Object* clear = NULL;
364 size_t counter = 0;
365 while (*list != NULL) {
366 Object* ref = DequeuePendingReference(list);
Brian Carlstrom1f870082011-08-23 16:02:11 -0700367 Object* referent = ref->GetFieldObject(Heap::GetReferenceReferentOffset());
Carl Shapiro69759ea2011-07-21 18:13:35 -0700368 if (referent == NULL) {
369 // Referent was cleared by the user during marking.
370 continue;
371 }
372 bool is_marked = IsMarked(referent);
373 if (!is_marked && ((++counter) & 1)) {
374 // Referent is white and biased toward saving, mark it.
375 MarkObject(referent);
376 is_marked = true;
377 }
378 if (!is_marked) {
379 // Referent is white, queue it for clearing.
380 EnqueuePendingReference(ref, &clear);
381 }
382 }
383 *list = clear;
384 // Restart the mark with the newly black references added to the
385 // root set.
386 ProcessMarkStack();
387}
388
389// Unlink the reference list clearing references objects with white
390// referents. Cleared references registered to a reference queue are
391// scheduled for appending by the heap worker thread.
392void MarkSweep::ClearWhiteReferences(Object** list) {
393 DCHECK(list != NULL);
Brian Carlstrom1f870082011-08-23 16:02:11 -0700394 size_t offset = Heap::GetReferenceReferentOffset();
Carl Shapiro69759ea2011-07-21 18:13:35 -0700395 while (*list != NULL) {
Brian Carlstrom4873d462011-08-21 15:23:39 -0700396 Object* ref = DequeuePendingReference(list);
397 Object* referent = ref->GetFieldObject(offset);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700398 if (referent != NULL && !IsMarked(referent)) {
399 // Referent is white, clear it.
400 ClearReference(ref);
401 if (IsEnqueuable(ref)) {
402 EnqueueReference(ref);
403 }
404 }
405 }
406 DCHECK(*list == NULL);
407}
408
409// Enqueues finalizer references with white referents. White
410// referents are blackened, moved to the zombie field, and the
411// referent field is cleared.
412void MarkSweep::EnqueueFinalizerReferences(Object** list) {
413 DCHECK(list != NULL);
Brian Carlstrom1f870082011-08-23 16:02:11 -0700414 size_t referent_offset = Heap::GetReferenceReferentOffset();
415 size_t zombie_offset = Heap::GetFinalizerReferenceZombieOffset();
Carl Shapiro69759ea2011-07-21 18:13:35 -0700416 bool has_enqueued = false;
417 while (*list != NULL) {
418 Object* ref = DequeuePendingReference(list);
419 Object* referent = ref->GetFieldObject(referent_offset);
420 if (referent != NULL && !IsMarked(referent)) {
421 MarkObject(referent);
422 // If the referent is non-null the reference must queuable.
423 DCHECK(IsEnqueuable(ref));
424 ref->SetFieldObject(zombie_offset, referent);
425 ClearReference(ref);
426 EnqueueReference(ref);
427 has_enqueued = true;
428 }
429 }
430 if (has_enqueued) {
431 ProcessMarkStack();
432 }
433 DCHECK(*list == NULL);
434}
435
Carl Shapiro58551df2011-07-24 03:09:51 -0700436// Process reference class instances and schedule finalizations.
Carl Shapiro69759ea2011-07-21 18:13:35 -0700437void MarkSweep::ProcessReferences(Object** soft_references, bool clear_soft,
438 Object** weak_references,
439 Object** finalizer_references,
440 Object** phantom_references) {
441 DCHECK(soft_references != NULL);
442 DCHECK(weak_references != NULL);
443 DCHECK(finalizer_references != NULL);
444 DCHECK(phantom_references != NULL);
445
446 // Unless we are in the zygote or required to clear soft references
447 // with white references, preserve some white referents.
448 if (clear_soft) {
449 PreserveSomeSoftReferences(soft_references);
450 }
451
452 // Clear all remaining soft and weak references with white
453 // referents.
454 ClearWhiteReferences(soft_references);
455 ClearWhiteReferences(weak_references);
456
457 // Preserve all white objects with finalize methods and schedule
458 // them for finalization.
459 EnqueueFinalizerReferences(finalizer_references);
460
461 // Clear all f-reachable soft and weak references with white
462 // referents.
463 ClearWhiteReferences(soft_references);
464 ClearWhiteReferences(weak_references);
465
466 // Clear all phantom references with white referents.
467 ClearWhiteReferences(phantom_references);
468
469 // At this point all reference lists should be empty.
470 DCHECK(*soft_references == NULL);
471 DCHECK(*weak_references == NULL);
472 DCHECK(*finalizer_references == NULL);
473 DCHECK(*phantom_references == NULL);
474}
475
476// Pushes a list of cleared references out to the managed heap.
477void MarkSweep::EnqueueClearedReferences(Object** cleared) {
478 DCHECK(cleared != NULL);
479 if (*cleared != NULL) {
480 Thread* self = Thread::Current();
481 DCHECK(self != NULL);
482 // TODO: Method *meth = gDvm.methJavaLangRefReferenceQueueAdd;
483 // DCHECK(meth != NULL);
484 // JValue unused;
Brian Carlstrom4873d462011-08-21 15:23:39 -0700485 // Object* reference = *cleared;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700486 // TODO: dvmCallMethod(self, meth, NULL, &unused, reference);
Elliott Hughes53b61312011-08-12 18:28:20 -0700487 UNIMPLEMENTED(FATAL);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700488 *cleared = NULL;
489 }
490}
491
492MarkSweep::~MarkSweep() {
Carl Shapiro58551df2011-07-24 03:09:51 -0700493 delete mark_stack_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700494 mark_bitmap_->Clear();
495}
496
497} // namespace art