blob: 7adc34436fd5e2dea18c29314fe60905d423a4dd [file] [log] [blame]
Elliott Hughes2faa5f12012-01-30 14:42:07 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Carl Shapiro69759ea2011-07-21 18:13:35 -070016
Brian Carlstrom578bbdc2011-07-21 14:07:47 -070017#include "mark_sweep.h"
Carl Shapiro69759ea2011-07-21 18:13:35 -070018
Carl Shapiro58551df2011-07-24 03:09:51 -070019#include <climits>
20#include <vector>
21
Elliott Hughes410c0c82011-09-01 17:58:25 -070022#include "class_loader.h"
Brian Carlstrom693267a2011-09-06 09:25:34 -070023#include "dex_cache.h"
Carl Shapiro58551df2011-07-24 03:09:51 -070024#include "heap.h"
Elliott Hughes410c0c82011-09-01 17:58:25 -070025#include "indirect_reference_table.h"
26#include "intern_table.h"
Brian Carlstrom578bbdc2011-07-21 14:07:47 -070027#include "logging.h"
28#include "macros.h"
29#include "mark_stack.h"
Elliott Hughesc33a32b2011-10-11 18:18:07 -070030#include "monitor.h"
Brian Carlstrom578bbdc2011-07-21 14:07:47 -070031#include "object.h"
Brian Carlstrom1f870082011-08-23 16:02:11 -070032#include "runtime.h"
Mathieu Chartier654d3a22012-07-11 17:54:18 -070033#include "scoped_heap_lock.h"
Carl Shapiro58551df2011-07-24 03:09:51 -070034#include "space.h"
Elliott Hughes307f75d2011-10-12 18:04:40 -070035#include "timing_logger.h"
Brian Carlstrom578bbdc2011-07-21 14:07:47 -070036#include "thread.h"
Carl Shapiro69759ea2011-07-21 18:13:35 -070037
Carl Shapiro69759ea2011-07-21 18:13:35 -070038namespace art {
39
Mathieu Chartier5301cd22012-05-31 12:11:36 -070040MarkSweep::MarkSweep(MarkStack* mark_stack)
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070041 : current_mark_bitmap_(NULL),
42 mark_stack_(mark_stack),
Mathieu Chartier5301cd22012-05-31 12:11:36 -070043 heap_(NULL),
Mathieu Chartier5301cd22012-05-31 12:11:36 -070044 finger_(NULL),
45 condemned_(NULL),
46 soft_reference_list_(NULL),
47 weak_reference_list_(NULL),
48 finalizer_reference_list_(NULL),
49 phantom_reference_list_(NULL),
50 cleared_reference_list_(NULL),
51 class_count_(0), array_count_(0), other_count_(0) {
52 DCHECK(mark_stack_ != NULL);
53}
Elliott Hughesb3bd5f02012-03-08 21:05:27 -080054
Mathieu Chartier5301cd22012-05-31 12:11:36 -070055void MarkSweep::Init() {
Elliott Hughesb3bd5f02012-03-08 21:05:27 -080056 heap_ = Runtime::Current()->GetHeap();
Mathieu Chartier5301cd22012-05-31 12:11:36 -070057 mark_stack_->Reset();
Carl Shapiro58551df2011-07-24 03:09:51 -070058
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070059 const Spaces& spaces = heap_->GetSpaces();
60 // TODO: C++0x auto
61 for (Spaces::const_iterator cur = spaces.begin(); cur != spaces.end(); ++cur) {
Mathieu Chartiercc236d72012-07-20 10:29:05 -070062 if ((*cur)->GetGcRetentionPolicy() == GCRP_ALWAYS_COLLECT) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070063 current_mark_bitmap_ = (*cur)->GetMarkBitmap();
64 break;
65 }
66 }
buzbee0d966cf2011-09-08 17:34:58 -070067 // TODO: if concurrent, enable card marking in compiler
Carl Shapiro58551df2011-07-24 03:09:51 -070068 // TODO: check that the mark bitmap is entirely clear.
Carl Shapiro58551df2011-07-24 03:09:51 -070069}
70
Mathieu Chartierb43b7d42012-06-19 13:15:09 -070071void MarkSweep::MarkObject0(const Object* obj, bool check_finger) {
Carl Shapiro69759ea2011-07-21 18:13:35 -070072 DCHECK(obj != NULL);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070073
74 SpaceBitmap* space_bitmap = NULL;
75 // Try to take advantage of locality of references within a space, failing this find the space
76 // the hard way.
77 if (current_mark_bitmap_->HasAddress(obj)) {
78 space_bitmap = current_mark_bitmap_;
79 } else {
80 space_bitmap = heap_->GetMarkBitmap()->GetSpaceBitmap(obj);
81 }
82
Carl Shapiro69759ea2011-07-21 18:13:35 -070083 if (obj < condemned_) {
84 DCHECK(IsMarked(obj));
85 return;
86 }
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070087 bool is_marked = space_bitmap->Test(obj);
Carl Shapiro69759ea2011-07-21 18:13:35 -070088 // This object was not previously marked.
89 if (!is_marked) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070090 space_bitmap->Set(obj);
Carl Shapiro69759ea2011-07-21 18:13:35 -070091 if (check_finger && obj < finger_) {
92 // The object must be pushed on to the mark stack.
93 mark_stack_->Push(obj);
94 }
95 }
96}
97
98// Used to mark objects when recursing. Recursion is done by moving
99// the finger across the bitmaps in address order and marking child
100// objects. Any newly-marked objects whose addresses are lower than
101// the finger won't be visited by the bitmap scan, so those objects
102// need to be added to the mark stack.
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700103void MarkSweep::MarkObject(const Object* obj) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700104 if (obj != NULL) {
105 MarkObject0(obj, true);
106 }
107}
108
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700109void MarkSweep::MarkObjectVisitor(const Object* root, void* arg) {
Brian Carlstrom1f870082011-08-23 16:02:11 -0700110 DCHECK(root != NULL);
111 DCHECK(arg != NULL);
112 MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg);
Ian Rogers5d76c432011-10-31 21:42:49 -0700113 DCHECK(mark_sweep->finger_ == NULL); // no point to check finger if it is NULL
114 mark_sweep->MarkObject0(root, false);
Brian Carlstrom1f870082011-08-23 16:02:11 -0700115}
116
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700117void MarkSweep::ReMarkObjectVisitor(const Object* root, void* arg) {
118 DCHECK(root != NULL);
119 DCHECK(arg != NULL);
120 MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg);
121 mark_sweep->MarkObject0(root, true);
122}
123
Carl Shapiro69759ea2011-07-21 18:13:35 -0700124// Marks all objects in the root set.
125void MarkSweep::MarkRoots() {
Brian Carlstrom1f870082011-08-23 16:02:11 -0700126 Runtime::Current()->VisitRoots(MarkObjectVisitor, this);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700127}
128
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700129class CheckObjectVisitor {
130 public:
131 CheckObjectVisitor(MarkSweep* const mark_sweep)
132 : mark_sweep_(mark_sweep) {
133
134 }
135
136 void operator ()(const Object* obj, const Object* ref, MemberOffset offset, bool is_static) const {
137 mark_sweep_->CheckReference(obj, ref, offset, is_static);
138 }
139
140 private:
141 MarkSweep* const mark_sweep_;
142};
143
144void MarkSweep::CheckObject(const Object* obj) {
145 DCHECK(obj != NULL);
146 CheckObjectVisitor visitor(this);
147 VisitObjectReferences(obj, visitor);
148}
149
150void MarkSweep::VerifyImageRootVisitor(Object* root, void* arg) {
151 DCHECK(root != NULL);
152 DCHECK(arg != NULL);
153 MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700154 DCHECK(mark_sweep->heap_->GetMarkBitmap()->Test(root));
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700155 mark_sweep->CheckObject(root);
156}
157
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700158void MarkSweep::CopyMarkBits() {
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800159 const std::vector<Space*>& spaces = heap_->GetSpaces();
Ian Rogers5d76c432011-10-31 21:42:49 -0700160 for (size_t i = 0; i < spaces.size(); ++i) {
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700161 Space* space = spaces[i];
162 if (space->GetGcRetentionPolicy() == GCRP_FULL_COLLECT) {
163 SpaceBitmap* live_bitmap = space->GetLiveBitmap();
164 SpaceBitmap* mark_bitmap = space->GetMarkBitmap();
165 DCHECK_EQ(live_bitmap->Size(), mark_bitmap->Size());
166 std::copy(live_bitmap->Begin(), live_bitmap->Begin() + live_bitmap->Size() / kWordSize, mark_bitmap->Begin());
Ian Rogers5d76c432011-10-31 21:42:49 -0700167 }
168 }
169}
170
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700171class ScanImageRootVisitor {
172 public:
173 ScanImageRootVisitor(MarkSweep* const mark_sweep) : mark_sweep_(mark_sweep) {
Ian Rogers5d76c432011-10-31 21:42:49 -0700174
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700175 }
176
177 void operator ()(const Object* root) const {
178 DCHECK(root != NULL);
179 mark_sweep_->ScanObject(root);
180 }
181
182 private:
183 MarkSweep* const mark_sweep_;
184};
185
186// Marks all objects that are in images and have been touched by the mutator
187void MarkSweep::ScanDirtyImageRoots() {
188 const std::vector<Space*>& spaces = heap_->GetSpaces();
189 CardTable* card_table = heap_->GetCardTable();
190 ScanImageRootVisitor image_root_visitor(this);
191 for (size_t i = 0; i < spaces.size(); ++i) {
192 Space* space = spaces[i];
193 if (space->IsImageSpace()) {
194 card_table->Scan(space->GetLiveBitmap(), space->Begin(), space->End(), image_root_visitor);
195 }
196 }
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700197}
198
Carl Shapiro58551df2011-07-24 03:09:51 -0700199void MarkSweep::ScanBitmapCallback(Object* obj, void* finger, void* arg) {
200 MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg);
201 mark_sweep->finger_ = reinterpret_cast<Object*>(finger);
202 mark_sweep->ScanObject(obj);
203}
204
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700205void MarkSweep::ScanDirtyCardCallback(Object* obj, void* arg) {
206 MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg);
207 mark_sweep->ScanObject(obj);
208}
209
210void MarkSweep::ScanGrayObjects() {
211 const std::vector<Space*>& spaces = heap_->GetSpaces();
212 CardTable* card_table = heap_->GetCardTable();
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700213 ScanImageRootVisitor image_root_visitor(this);
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700214 for (size_t i = 0; i < spaces.size(); ++i) {
215 byte* begin = spaces[i]->Begin();
216 byte* end = spaces[i]->End();
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700217 // Image spaces are handled properly since live == marked for them.
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700218 card_table->Scan(spaces[i]->GetMarkBitmap(), begin, end, image_root_visitor);
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700219 }
220}
221
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700222class CheckBitmapVisitor {
223 public:
224 CheckBitmapVisitor(MarkSweep* mark_sweep) : mark_sweep_(mark_sweep) {
225
226 }
227
228 void operator ()(const Object* obj) const {
229 DCHECK(obj != NULL);
230 mark_sweep_->CheckObject(obj);
231 }
232
233 private:
234 MarkSweep* mark_sweep_;
235};
236
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700237void MarkSweep::VerifyImageRoots() {
238 // Verify roots ensures that all the references inside the image space point
239 // objects which are either in the image space or marked objects in the alloc
240 // space
241#ifndef NDEBUG
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700242 CheckBitmapVisitor visitor(this);
243 const Spaces& spaces = heap_->GetSpaces();
244 for (Spaces::const_iterator it = spaces.begin(); it != spaces.end(); ++it) {
245 const Space* space = *it;
246 if (space->IsImageSpace()) {
247 uintptr_t begin = reinterpret_cast<uintptr_t>(space->Begin());
248 uintptr_t end = reinterpret_cast<uintptr_t>(space->End());
249 SpaceBitmap* live_bitmap = space->GetLiveBitmap();
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700250 DCHECK(live_bitmap != NULL);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700251 live_bitmap->VisitMarkedRange(begin, end, visitor);
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700252 }
253 }
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700254#endif
255}
256
Carl Shapiro58551df2011-07-24 03:09:51 -0700257// Populates the mark stack based on the set of marked objects and
258// recursively marks until the mark stack is emptied.
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700259void MarkSweep::RecursiveMark(bool partial) {
Brian Carlstrom1f870082011-08-23 16:02:11 -0700260 // RecursiveMark will build the lists of known instances of the Reference classes.
261 // See DelayReferenceReferent for details.
262 CHECK(soft_reference_list_ == NULL);
263 CHECK(weak_reference_list_ == NULL);
264 CHECK(finalizer_reference_list_ == NULL);
265 CHECK(phantom_reference_list_ == NULL);
266 CHECK(cleared_reference_list_ == NULL);
267
Carl Shapiro58551df2011-07-24 03:09:51 -0700268 void* arg = reinterpret_cast<void*>(this);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700269 const Spaces& spaces = heap_->GetSpaces();
270
Carl Shapiro58551df2011-07-24 03:09:51 -0700271 for (size_t i = 0; i < spaces.size(); ++i) {
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700272 Space* space = spaces[i];
273 if (space->GetGcRetentionPolicy() == GCRP_ALWAYS_COLLECT ||
274 (!partial && space->GetGcRetentionPolicy() == GCRP_FULL_COLLECT)
275 ) {
276 uintptr_t begin = reinterpret_cast<uintptr_t>(space->Begin());
277 uintptr_t end = reinterpret_cast<uintptr_t>(space->End());
278
279 current_mark_bitmap_ = space->GetMarkBitmap();
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700280 current_mark_bitmap_->ScanWalk(begin, end, &ScanBitmapCallback, arg);
Mathieu Chartier7664f5c2012-06-08 18:15:32 -0700281 }
Carl Shapiro58551df2011-07-24 03:09:51 -0700282 }
283 finger_ = reinterpret_cast<Object*>(~0);
Ian Rogers5d76c432011-10-31 21:42:49 -0700284 // TODO: tune the frequency of emptying the mark stack
Carl Shapiro58551df2011-07-24 03:09:51 -0700285 ProcessMarkStack();
286}
287
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700288void MarkSweep::RecursiveMarkDirtyObjects() {
289 ScanGrayObjects();
290 ProcessMarkStack();
291}
292
Carl Shapiro58551df2011-07-24 03:09:51 -0700293void MarkSweep::ReMarkRoots() {
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700294 Runtime::Current()->VisitRoots(ReMarkObjectVisitor, this);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700295}
296
Mathieu Chartier46a23632012-08-07 18:44:40 -0700297void MarkSweep::SweepJniWeakGlobals(HeapBitmap* bitmap) {
Elliott Hughes410c0c82011-09-01 17:58:25 -0700298 JavaVMExt* vm = Runtime::Current()->GetJavaVM();
299 MutexLock mu(vm->weak_globals_lock);
300 IndirectReferenceTable* table = &vm->weak_globals;
Mathieu Chartier654d3a22012-07-11 17:54:18 -0700301 typedef IndirectReferenceTable::iterator It; // TODO: C++0x auto
Elliott Hughes410c0c82011-09-01 17:58:25 -0700302 for (It it = table->begin(), end = table->end(); it != end; ++it) {
303 const Object** entry = *it;
Mathieu Chartier46a23632012-08-07 18:44:40 -0700304 if (!bitmap->Test(*entry)) {
Elliott Hughes410c0c82011-09-01 17:58:25 -0700305 *entry = kClearedJniWeakGlobal;
306 }
307 }
308}
309
Mathieu Chartier46a23632012-08-07 18:44:40 -0700310void MarkSweep::SweepSystemWeaks(bool swap_bitmaps) {
311 Runtime* runtime = Runtime::Current();
312 runtime->GetInternTable()->SweepInternTableWeaks(swap_bitmaps ? IsLiveCallback : IsMarkedCallback,
313 this);
314 runtime->GetMonitorList()->SweepMonitorList(swap_bitmaps ? IsLiveCallback : IsMarkedCallback,
315 this);
316 SweepJniWeakGlobals(swap_bitmaps ? GetHeap()->GetLiveBitmap() : GetHeap()->GetMarkBitmap());
Carl Shapiro58551df2011-07-24 03:09:51 -0700317}
318
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800319struct SweepCallbackContext {
320 Heap* heap;
321 AllocSpace* space;
322};
323
Ian Rogers30fab402012-01-23 15:43:46 -0800324void MarkSweep::SweepCallback(size_t num_ptrs, Object** ptrs, void* arg) {
Mathieu Chartier654d3a22012-07-11 17:54:18 -0700325 ScopedHeapLock lock;
326
Elliott Hughes307f75d2011-10-12 18:04:40 -0700327 size_t freed_objects = num_ptrs;
328 size_t freed_bytes = 0;
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800329 SweepCallbackContext* context = static_cast<SweepCallbackContext*>(arg);
330 Heap* heap = context->heap;
331 AllocSpace* space = context->space;
Ian Rogers5d76c432011-10-31 21:42:49 -0700332 // Use a bulk free, that merges consecutive objects before freeing or free per object?
333 // Documentation suggests better free performance with merging, but this may be at the expensive
334 // of allocation.
335 // TODO: investigate performance
Ian Rogers30fab402012-01-23 15:43:46 -0800336 static const bool kUseFreeList = true;
337 if (kUseFreeList) {
Ian Rogers5d76c432011-10-31 21:42:49 -0700338 for (size_t i = 0; i < num_ptrs; ++i) {
339 Object* obj = static_cast<Object*>(ptrs[i]);
Ian Rogers30fab402012-01-23 15:43:46 -0800340 freed_bytes += space->AllocationSize(obj);
Ian Rogers5d76c432011-10-31 21:42:49 -0700341 }
Ian Rogers30fab402012-01-23 15:43:46 -0800342 // AllocSpace::FreeList clears the value in ptrs, so perform after clearing the live bit
343 space->FreeList(num_ptrs, ptrs);
Ian Rogers5d76c432011-10-31 21:42:49 -0700344 } else {
345 for (size_t i = 0; i < num_ptrs; ++i) {
346 Object* obj = static_cast<Object*>(ptrs[i]);
Ian Rogers30fab402012-01-23 15:43:46 -0800347 freed_bytes += space->AllocationSize(obj);
Ian Rogers30fab402012-01-23 15:43:46 -0800348 space->Free(obj);
Ian Rogers5d76c432011-10-31 21:42:49 -0700349 }
Carl Shapiro58551df2011-07-24 03:09:51 -0700350 }
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800351 heap->RecordFreeLocked(freed_objects, freed_bytes);
Carl Shapiro58551df2011-07-24 03:09:51 -0700352}
353
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700354void MarkSweep::ZygoteSweepCallback(size_t num_ptrs, Object** ptrs, void* arg) {
355 ScopedHeapLock lock;
356 SweepCallbackContext* context = static_cast<SweepCallbackContext*>(arg);
357 Heap* heap = context->heap;
358 // We don't free any actual memory to avoid dirtying the shared zygote pages.
359 for (size_t i = 0; i < num_ptrs; ++i) {
360 Object* obj = static_cast<Object*>(ptrs[i]);
361 heap->GetLiveBitmap()->Clear(obj);
362 heap->GetCardTable()->MarkCard(obj);
363 }
364}
365
366void MarkSweep::Sweep(bool partial) {
Mathieu Chartier46a23632012-08-07 18:44:40 -0700367 // If we don't swap bitmaps then we can not do this concurrently.
368 SweepSystemWeaks(true);
Elliott Hughes2da50362011-10-10 16:57:08 -0700369
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700370 DCHECK(mark_stack_->IsEmpty());
371
Mathieu Chartier46a23632012-08-07 18:44:40 -0700372 const Spaces& spaces = heap_->GetSpaces();
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800373 SweepCallbackContext scc;
374 scc.heap = heap_;
Carl Shapiro58551df2011-07-24 03:09:51 -0700375 for (size_t i = 0; i < spaces.size(); ++i) {
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700376 Space* space = spaces[i];
377 if (
378 space->GetGcRetentionPolicy() == GCRP_ALWAYS_COLLECT ||
379 (!partial && space->GetGcRetentionPolicy() == GCRP_FULL_COLLECT)
380 ) {
381 uintptr_t begin = reinterpret_cast<uintptr_t>(space->Begin());
382 uintptr_t end = reinterpret_cast<uintptr_t>(space->End());
383 scc.space = space->AsAllocSpace();
384 SpaceBitmap* live_bitmap = space->GetLiveBitmap();
385 SpaceBitmap* mark_bitmap = space->GetMarkBitmap();
386 if (space->GetGcRetentionPolicy() == GCRP_ALWAYS_COLLECT) {
387 // Bitmaps are pre-swapped for optimization which enables sweeping with the heap unlocked.
388 SpaceBitmap::SweepWalk(
389 *mark_bitmap, *live_bitmap, begin, end, &SweepCallback, reinterpret_cast<void*>(&scc));
390 } else {
391 // Zygote sweep takes care of dirtying cards and clearing live bits, does not free actual memory.
392 SpaceBitmap::SweepWalk(
393 *live_bitmap, *mark_bitmap, begin, end, &ZygoteSweepCallback, reinterpret_cast<void*>(&scc));
394 }
Carl Shapiro58551df2011-07-24 03:09:51 -0700395 }
396 }
397}
398
Carl Shapiro69759ea2011-07-21 18:13:35 -0700399// Scans instance fields.
Elliott Hughesb0663112011-10-19 18:16:37 -0700400inline void MarkSweep::ScanInstanceFields(const Object* obj) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700401 DCHECK(obj != NULL);
Brian Carlstrom4873d462011-08-21 15:23:39 -0700402 Class* klass = obj->GetClass();
403 DCHECK(klass != NULL);
Elliott Hughes2da50362011-10-10 16:57:08 -0700404 ScanFields(obj, klass->GetReferenceInstanceOffsets(), false);
Brian Carlstrom4873d462011-08-21 15:23:39 -0700405}
406
407// Scans static storage on a Class.
Elliott Hughesb0663112011-10-19 18:16:37 -0700408inline void MarkSweep::ScanStaticFields(const Class* klass) {
Brian Carlstrom4873d462011-08-21 15:23:39 -0700409 DCHECK(klass != NULL);
Elliott Hughesadb460d2011-10-05 17:02:34 -0700410 ScanFields(klass, klass->GetReferenceStaticOffsets(), true);
Brian Carlstrom4873d462011-08-21 15:23:39 -0700411}
412
Elliott Hughesb0663112011-10-19 18:16:37 -0700413inline void MarkSweep::ScanFields(const Object* obj, uint32_t ref_offsets, bool is_static) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700414 if (ref_offsets != CLASS_WALK_SUPER) {
415 // Found a reference offset bitmap. Mark the specified offsets.
416 while (ref_offsets != 0) {
417 size_t right_shift = CLZ(ref_offsets);
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700418 MemberOffset byte_offset = CLASS_OFFSET_FROM_CLZ(right_shift);
419 const Object* ref = obj->GetFieldObject<const Object*>(byte_offset, false);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700420 MarkObject(ref);
421 ref_offsets &= ~(CLASS_HIGH_BIT >> right_shift);
422 }
423 } else {
Brian Carlstrom4873d462011-08-21 15:23:39 -0700424 // There is no reference offset bitmap. In the non-static case,
425 // walk up the class inheritance hierarchy and find reference
426 // offsets the hard way. In the static case, just consider this
427 // class.
428 for (const Class* klass = is_static ? obj->AsClass() : obj->GetClass();
Carl Shapiro69759ea2011-07-21 18:13:35 -0700429 klass != NULL;
Brian Carlstrom4873d462011-08-21 15:23:39 -0700430 klass = is_static ? NULL : klass->GetSuperClass()) {
431 size_t num_reference_fields = (is_static
432 ? klass->NumReferenceStaticFields()
433 : klass->NumReferenceInstanceFields());
434 for (size_t i = 0; i < num_reference_fields; ++i) {
435 Field* field = (is_static
436 ? klass->GetStaticField(i)
437 : klass->GetInstanceField(i));
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700438 MemberOffset field_offset = field->GetOffset();
439 const Object* ref = obj->GetFieldObject<const Object*>(field_offset, false);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700440 MarkObject(ref);
441 }
442 }
443 }
444}
445
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700446void MarkSweep::CheckReference(const Object* obj, const Object* ref, MemberOffset offset, bool is_static) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700447 const Spaces& spaces = heap_->GetSpaces();
448 // TODO: C++0x auto
449 for (Spaces::const_iterator cur = spaces.begin(); cur != spaces.end(); ++cur) {
450 if ((*cur)->IsAllocSpace() && (*cur)->Contains(ref)) {
451 DCHECK(IsMarked(obj));
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700452
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700453 bool is_marked = IsMarked(ref);
454 if (!is_marked) {
455 LOG(INFO) << **cur;
456 LOG(WARNING) << (is_static ? "Static ref'" : "Instance ref'") << PrettyTypeOf(ref)
457 << "' (" << reinterpret_cast<const void*>(ref) << ") in '" << PrettyTypeOf(obj)
458 << "' (" << reinterpret_cast<const void*>(obj) << ") at offset "
459 << reinterpret_cast<void*>(offset.Int32Value()) << " wasn't marked";
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700460
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700461 const Class* klass = is_static ? obj->AsClass() : obj->GetClass();
462 DCHECK(klass != NULL);
463 const ObjectArray<Field>* fields = is_static ? klass->GetSFields() : klass->GetIFields();
464 DCHECK(fields != NULL);
465 bool found = false;
466 for (int32_t i = 0; i < fields->GetLength(); ++i) {
467 const Field* cur = fields->Get(i);
468 if (cur->GetOffset().Int32Value() == offset.Int32Value()) {
469 LOG(WARNING) << "Field referencing the alloc space was " << PrettyField(cur);
470 found = true;
471 break;
472 }
473 }
474 if (!found) {
475 LOG(WARNING) << "Could not find field in object alloc space with offset " << offset.Int32Value();
476 }
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700477
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700478 bool obj_marked = heap_->GetCardTable()->IsDirty(obj);
479 if (!obj_marked) {
480 LOG(WARNING) << "Object '" << PrettyTypeOf(obj) << "' "
481 << "(" << reinterpret_cast<const void*>(obj) << ") contains references to "
482 << "the alloc space, but wasn't card marked";
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700483 }
484 }
Ian Rogers5d76c432011-10-31 21:42:49 -0700485 }
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700486 break;
Ian Rogers5d76c432011-10-31 21:42:49 -0700487 }
488}
489
Carl Shapiro69759ea2011-07-21 18:13:35 -0700490// Scans the header, static field references, and interface pointers
491// of a class object.
Elliott Hughesb0663112011-10-19 18:16:37 -0700492inline void MarkSweep::ScanClass(const Object* obj) {
Elliott Hughes352a4242011-10-31 15:15:21 -0700493#ifndef NDEBUG
494 ++class_count_;
495#endif
Brian Carlstrom693267a2011-09-06 09:25:34 -0700496 ScanInstanceFields(obj);
Brian Carlstrom40381fb2011-10-19 14:13:40 -0700497 ScanStaticFields(obj->AsClass());
Carl Shapiro69759ea2011-07-21 18:13:35 -0700498}
499
500// Scans the header of all array objects. If the array object is
501// specialized to a reference type, scans the array data as well.
Elliott Hughesb0663112011-10-19 18:16:37 -0700502inline void MarkSweep::ScanArray(const Object* obj) {
Elliott Hughes352a4242011-10-31 15:15:21 -0700503#ifndef NDEBUG
504 ++array_count_;
505#endif
Carl Shapiro69759ea2011-07-21 18:13:35 -0700506 MarkObject(obj->GetClass());
507 if (obj->IsObjectArray()) {
Brian Carlstromdb4d5402011-08-09 12:18:28 -0700508 const ObjectArray<Object>* array = obj->AsObjectArray<Object>();
Elliott Hughesd8ddfd52011-08-15 14:32:53 -0700509 for (int32_t i = 0; i < array->GetLength(); ++i) {
Ian Rogers5d76c432011-10-31 21:42:49 -0700510 const Object* element = array->GetWithoutChecks(i);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700511 MarkObject(element);
512 }
513 }
514}
515
Carl Shapiro69759ea2011-07-21 18:13:35 -0700516// Process the "referent" field in a java.lang.ref.Reference. If the
517// referent has not yet been marked, put it on the appropriate list in
518// the gcHeap for later processing.
519void MarkSweep::DelayReferenceReferent(Object* obj) {
520 DCHECK(obj != NULL);
Brian Carlstrom1f870082011-08-23 16:02:11 -0700521 Class* klass = obj->GetClass();
522 DCHECK(klass != NULL);
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700523 DCHECK(klass->IsReferenceClass());
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800524 Object* pending = obj->GetFieldObject<Object*>(heap_->GetReferencePendingNextOffset(), false);
525 Object* referent = heap_->GetReferenceReferent(obj);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700526 if (pending == NULL && referent != NULL && !IsMarked(referent)) {
Brian Carlstrom4873d462011-08-21 15:23:39 -0700527 Object** list = NULL;
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700528 if (klass->IsSoftReferenceClass()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700529 list = &soft_reference_list_;
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700530 } else if (klass->IsWeakReferenceClass()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700531 list = &weak_reference_list_;
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700532 } else if (klass->IsFinalizerReferenceClass()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700533 list = &finalizer_reference_list_;
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700534 } else if (klass->IsPhantomReferenceClass()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700535 list = &phantom_reference_list_;
536 }
Brian Carlstrom0796af02011-10-12 14:31:45 -0700537 DCHECK(list != NULL) << PrettyClass(klass) << " " << std::hex << klass->GetAccessFlags();
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800538 heap_->EnqueuePendingReference(obj, list);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700539 }
540}
541
542// Scans the header and field references of a data object. If the
543// scanned object is a reference subclass, it is scheduled for later
Elliott Hughesadb460d2011-10-05 17:02:34 -0700544// processing.
Elliott Hughesb0663112011-10-19 18:16:37 -0700545inline void MarkSweep::ScanOther(const Object* obj) {
Elliott Hughes352a4242011-10-31 15:15:21 -0700546#ifndef NDEBUG
547 ++other_count_;
548#endif
Carl Shapiro69759ea2011-07-21 18:13:35 -0700549 ScanInstanceFields(obj);
Elliott Hughes352a4242011-10-31 15:15:21 -0700550 if (obj->GetClass()->IsReferenceClass()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700551 DelayReferenceReferent(const_cast<Object*>(obj));
552 }
553}
554
555// Scans an object reference. Determines the type of the reference
556// and dispatches to a specialized scanning routine.
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700557void MarkSweep::ScanObject(const Object* obj) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700558 DCHECK(obj != NULL);
559 DCHECK(obj->GetClass() != NULL);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700560 DCHECK(heap_->GetMarkBitmap()->Test(obj));
Carl Shapiro69759ea2011-07-21 18:13:35 -0700561 if (obj->IsClass()) {
562 ScanClass(obj);
Brian Carlstromb63ec392011-08-27 17:38:27 -0700563 } else if (obj->IsArrayInstance()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700564 ScanArray(obj);
565 } else {
Carl Shapiro58551df2011-07-24 03:09:51 -0700566 ScanOther(obj);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700567 }
568}
569
Ian Rogers5d76c432011-10-31 21:42:49 -0700570// Scan anything that's on the mark stack.
Carl Shapiro69759ea2011-07-21 18:13:35 -0700571void MarkSweep::ProcessMarkStack() {
572 while (!mark_stack_->IsEmpty()) {
Brian Carlstrom4873d462011-08-21 15:23:39 -0700573 const Object* obj = mark_stack_->Pop();
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700574 DCHECK(obj != NULL);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700575 ScanObject(obj);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700576 }
577}
578
Carl Shapiro69759ea2011-07-21 18:13:35 -0700579// Walks the reference list marking any references subject to the
580// reference clearing policy. References with a black referent are
581// removed from the list. References with white referents biased
582// toward saving are blackened and also removed from the list.
583void MarkSweep::PreserveSomeSoftReferences(Object** list) {
584 DCHECK(list != NULL);
585 Object* clear = NULL;
586 size_t counter = 0;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700587
588 DCHECK(mark_stack_->IsEmpty());
589
Carl Shapiro69759ea2011-07-21 18:13:35 -0700590 while (*list != NULL) {
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800591 Object* ref = heap_->DequeuePendingReference(list);
592 Object* referent = heap_->GetReferenceReferent(ref);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700593 if (referent == NULL) {
594 // Referent was cleared by the user during marking.
595 continue;
596 }
597 bool is_marked = IsMarked(referent);
598 if (!is_marked && ((++counter) & 1)) {
599 // Referent is white and biased toward saving, mark it.
600 MarkObject(referent);
601 is_marked = true;
602 }
603 if (!is_marked) {
604 // Referent is white, queue it for clearing.
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800605 heap_->EnqueuePendingReference(ref, &clear);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700606 }
607 }
608 *list = clear;
609 // Restart the mark with the newly black references added to the
610 // root set.
611 ProcessMarkStack();
612}
613
614// Unlink the reference list clearing references objects with white
615// referents. Cleared references registered to a reference queue are
616// scheduled for appending by the heap worker thread.
617void MarkSweep::ClearWhiteReferences(Object** list) {
618 DCHECK(list != NULL);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700619 while (*list != NULL) {
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800620 Object* ref = heap_->DequeuePendingReference(list);
621 Object* referent = heap_->GetReferenceReferent(ref);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700622 if (referent != NULL && !IsMarked(referent)) {
623 // Referent is white, clear it.
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800624 heap_->ClearReferenceReferent(ref);
625 if (heap_->IsEnqueuable(ref)) {
626 heap_->EnqueueReference(ref, &cleared_reference_list_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700627 }
628 }
629 }
630 DCHECK(*list == NULL);
631}
632
633// Enqueues finalizer references with white referents. White
634// referents are blackened, moved to the zombie field, and the
635// referent field is cleared.
636void MarkSweep::EnqueueFinalizerReferences(Object** list) {
637 DCHECK(list != NULL);
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800638 MemberOffset zombie_offset = heap_->GetFinalizerReferenceZombieOffset();
Carl Shapiro69759ea2011-07-21 18:13:35 -0700639 bool has_enqueued = false;
640 while (*list != NULL) {
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800641 Object* ref = heap_->DequeuePendingReference(list);
642 Object* referent = heap_->GetReferenceReferent(ref);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700643 if (referent != NULL && !IsMarked(referent)) {
644 MarkObject(referent);
645 // If the referent is non-null the reference must queuable.
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800646 DCHECK(heap_->IsEnqueuable(ref));
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700647 ref->SetFieldObject(zombie_offset, referent, false);
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800648 heap_->ClearReferenceReferent(ref);
649 heap_->EnqueueReference(ref, &cleared_reference_list_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700650 has_enqueued = true;
651 }
652 }
653 if (has_enqueued) {
654 ProcessMarkStack();
655 }
656 DCHECK(*list == NULL);
657}
658
Carl Shapiro58551df2011-07-24 03:09:51 -0700659// Process reference class instances and schedule finalizations.
Carl Shapiro69759ea2011-07-21 18:13:35 -0700660void MarkSweep::ProcessReferences(Object** soft_references, bool clear_soft,
661 Object** weak_references,
662 Object** finalizer_references,
663 Object** phantom_references) {
664 DCHECK(soft_references != NULL);
665 DCHECK(weak_references != NULL);
666 DCHECK(finalizer_references != NULL);
667 DCHECK(phantom_references != NULL);
668
669 // Unless we are in the zygote or required to clear soft references
670 // with white references, preserve some white referents.
Ian Rogers2945e242012-06-03 14:45:16 -0700671 if (!clear_soft && !Runtime::Current()->IsZygote()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700672 PreserveSomeSoftReferences(soft_references);
673 }
674
675 // Clear all remaining soft and weak references with white
676 // referents.
677 ClearWhiteReferences(soft_references);
678 ClearWhiteReferences(weak_references);
679
680 // Preserve all white objects with finalize methods and schedule
681 // them for finalization.
682 EnqueueFinalizerReferences(finalizer_references);
683
684 // Clear all f-reachable soft and weak references with white
685 // referents.
686 ClearWhiteReferences(soft_references);
687 ClearWhiteReferences(weak_references);
688
689 // Clear all phantom references with white referents.
690 ClearWhiteReferences(phantom_references);
691
692 // At this point all reference lists should be empty.
693 DCHECK(*soft_references == NULL);
694 DCHECK(*weak_references == NULL);
695 DCHECK(*finalizer_references == NULL);
696 DCHECK(*phantom_references == NULL);
697}
698
Carl Shapiro69759ea2011-07-21 18:13:35 -0700699MarkSweep::~MarkSweep() {
Elliott Hughes352a4242011-10-31 15:15:21 -0700700#ifndef NDEBUG
Elliott Hughes4dd9b4d2011-12-12 18:29:24 -0800701 VLOG(heap) << "MarkSweep scanned classes=" << class_count_ << " arrays=" << array_count_ << " other=" << other_count_;
Elliott Hughes352a4242011-10-31 15:15:21 -0700702#endif
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700703 // Clear all of the alloc spaces' mark bitmaps.
704 const Spaces& spaces = heap_->GetSpaces();
705 // TODO: C++0x auto
706 for (Spaces::const_iterator cur = spaces.begin(); cur != spaces.end(); ++cur) {
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700707 if ((*cur)->GetGcRetentionPolicy() != GCRP_NEVER_COLLECT) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700708 (*cur)->GetMarkBitmap()->Clear();
709 }
710 }
Mathieu Chartier5301cd22012-05-31 12:11:36 -0700711 mark_stack_->Reset();
Carl Shapiro69759ea2011-07-21 18:13:35 -0700712}
713
714} // namespace art