blob: 227614dd5e92ba5da23d238e08ea2a8bc4a258b4 [file] [log] [blame]
Elliott Hughes2faa5f12012-01-30 14:42:07 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Carl Shapiro69759ea2011-07-21 18:13:35 -070016
Brian Carlstrom578bbdc2011-07-21 14:07:47 -070017#include "mark_sweep.h"
Carl Shapiro69759ea2011-07-21 18:13:35 -070018
Carl Shapiro58551df2011-07-24 03:09:51 -070019#include <climits>
20#include <vector>
21
Elliott Hughes410c0c82011-09-01 17:58:25 -070022#include "class_loader.h"
Brian Carlstrom693267a2011-09-06 09:25:34 -070023#include "dex_cache.h"
Carl Shapiro58551df2011-07-24 03:09:51 -070024#include "heap.h"
Elliott Hughes410c0c82011-09-01 17:58:25 -070025#include "indirect_reference_table.h"
26#include "intern_table.h"
Brian Carlstrom578bbdc2011-07-21 14:07:47 -070027#include "logging.h"
28#include "macros.h"
29#include "mark_stack.h"
Elliott Hughesc33a32b2011-10-11 18:18:07 -070030#include "monitor.h"
Brian Carlstrom578bbdc2011-07-21 14:07:47 -070031#include "object.h"
Brian Carlstrom1f870082011-08-23 16:02:11 -070032#include "runtime.h"
Carl Shapiro58551df2011-07-24 03:09:51 -070033#include "space.h"
Elliott Hughes307f75d2011-10-12 18:04:40 -070034#include "timing_logger.h"
Brian Carlstrom578bbdc2011-07-21 14:07:47 -070035#include "thread.h"
Carl Shapiro69759ea2011-07-21 18:13:35 -070036
Carl Shapiro69759ea2011-07-21 18:13:35 -070037namespace art {
38
Mathieu Chartier5301cd22012-05-31 12:11:36 -070039MarkSweep::MarkSweep(MarkStack* mark_stack)
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070040 : current_mark_bitmap_(NULL),
41 mark_stack_(mark_stack),
Mathieu Chartier5301cd22012-05-31 12:11:36 -070042 heap_(NULL),
Mathieu Chartier5301cd22012-05-31 12:11:36 -070043 finger_(NULL),
44 condemned_(NULL),
45 soft_reference_list_(NULL),
46 weak_reference_list_(NULL),
47 finalizer_reference_list_(NULL),
48 phantom_reference_list_(NULL),
49 cleared_reference_list_(NULL),
50 class_count_(0), array_count_(0), other_count_(0) {
51 DCHECK(mark_stack_ != NULL);
52}
Elliott Hughesb3bd5f02012-03-08 21:05:27 -080053
Mathieu Chartier5301cd22012-05-31 12:11:36 -070054void MarkSweep::Init() {
Elliott Hughesb3bd5f02012-03-08 21:05:27 -080055 heap_ = Runtime::Current()->GetHeap();
Mathieu Chartier5301cd22012-05-31 12:11:36 -070056 mark_stack_->Reset();
Carl Shapiro58551df2011-07-24 03:09:51 -070057
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070058 const Spaces& spaces = heap_->GetSpaces();
59 // TODO: C++0x auto
60 for (Spaces::const_iterator cur = spaces.begin(); cur != spaces.end(); ++cur) {
Mathieu Chartiercc236d72012-07-20 10:29:05 -070061 if ((*cur)->GetGcRetentionPolicy() == GCRP_ALWAYS_COLLECT) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070062 current_mark_bitmap_ = (*cur)->GetMarkBitmap();
63 break;
64 }
65 }
buzbee0d966cf2011-09-08 17:34:58 -070066 // TODO: if concurrent, enable card marking in compiler
Carl Shapiro58551df2011-07-24 03:09:51 -070067 // TODO: check that the mark bitmap is entirely clear.
Carl Shapiro58551df2011-07-24 03:09:51 -070068}
69
Mathieu Chartierb43b7d42012-06-19 13:15:09 -070070void MarkSweep::MarkObject0(const Object* obj, bool check_finger) {
Carl Shapiro69759ea2011-07-21 18:13:35 -070071 DCHECK(obj != NULL);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070072
73 SpaceBitmap* space_bitmap = NULL;
74 // Try to take advantage of locality of references within a space, failing this find the space
75 // the hard way.
76 if (current_mark_bitmap_->HasAddress(obj)) {
77 space_bitmap = current_mark_bitmap_;
78 } else {
79 space_bitmap = heap_->GetMarkBitmap()->GetSpaceBitmap(obj);
80 }
81
Carl Shapiro69759ea2011-07-21 18:13:35 -070082 if (obj < condemned_) {
83 DCHECK(IsMarked(obj));
84 return;
85 }
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070086 bool is_marked = space_bitmap->Test(obj);
Carl Shapiro69759ea2011-07-21 18:13:35 -070087 // This object was not previously marked.
88 if (!is_marked) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070089 space_bitmap->Set(obj);
Carl Shapiro69759ea2011-07-21 18:13:35 -070090 if (check_finger && obj < finger_) {
91 // The object must be pushed on to the mark stack.
92 mark_stack_->Push(obj);
93 }
94 }
95}
96
97// Used to mark objects when recursing. Recursion is done by moving
98// the finger across the bitmaps in address order and marking child
99// objects. Any newly-marked objects whose addresses are lower than
100// the finger won't be visited by the bitmap scan, so those objects
101// need to be added to the mark stack.
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700102void MarkSweep::MarkObject(const Object* obj) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700103 if (obj != NULL) {
104 MarkObject0(obj, true);
105 }
106}
107
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700108void MarkSweep::MarkObjectVisitor(const Object* root, void* arg) {
Brian Carlstrom1f870082011-08-23 16:02:11 -0700109 DCHECK(root != NULL);
110 DCHECK(arg != NULL);
111 MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg);
Ian Rogers5d76c432011-10-31 21:42:49 -0700112 DCHECK(mark_sweep->finger_ == NULL); // no point to check finger if it is NULL
113 mark_sweep->MarkObject0(root, false);
Brian Carlstrom1f870082011-08-23 16:02:11 -0700114}
115
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700116void MarkSweep::ReMarkObjectVisitor(const Object* root, void* arg) {
117 DCHECK(root != NULL);
118 DCHECK(arg != NULL);
119 MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg);
120 mark_sweep->MarkObject0(root, true);
121}
122
Carl Shapiro69759ea2011-07-21 18:13:35 -0700123// Marks all objects in the root set.
124void MarkSweep::MarkRoots() {
Brian Carlstrom1f870082011-08-23 16:02:11 -0700125 Runtime::Current()->VisitRoots(MarkObjectVisitor, this);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700126}
127
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700128class CheckObjectVisitor {
129 public:
130 CheckObjectVisitor(MarkSweep* const mark_sweep)
131 : mark_sweep_(mark_sweep) {
132
133 }
134
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700135 void operator ()(const Object* obj, const Object* ref, MemberOffset offset, bool is_static) const
136 SHARED_LOCKS_REQUIRED(GlobalSynchronization::heap_bitmap_lock_,
137 GlobalSynchronization::mutator_lock_) {
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700138 mark_sweep_->CheckReference(obj, ref, offset, is_static);
139 }
140
141 private:
142 MarkSweep* const mark_sweep_;
143};
144
145void MarkSweep::CheckObject(const Object* obj) {
146 DCHECK(obj != NULL);
147 CheckObjectVisitor visitor(this);
148 VisitObjectReferences(obj, visitor);
149}
150
151void MarkSweep::VerifyImageRootVisitor(Object* root, void* arg) {
152 DCHECK(root != NULL);
153 DCHECK(arg != NULL);
154 MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700155 DCHECK(mark_sweep->heap_->GetMarkBitmap()->Test(root));
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700156 mark_sweep->CheckObject(root);
157}
158
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700159void MarkSweep::CopyMarkBits() {
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800160 const std::vector<Space*>& spaces = heap_->GetSpaces();
Ian Rogers5d76c432011-10-31 21:42:49 -0700161 for (size_t i = 0; i < spaces.size(); ++i) {
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700162 Space* space = spaces[i];
163 if (space->GetGcRetentionPolicy() == GCRP_FULL_COLLECT) {
164 SpaceBitmap* live_bitmap = space->GetLiveBitmap();
165 SpaceBitmap* mark_bitmap = space->GetMarkBitmap();
166 DCHECK_EQ(live_bitmap->Size(), mark_bitmap->Size());
167 std::copy(live_bitmap->Begin(), live_bitmap->Begin() + live_bitmap->Size() / kWordSize, mark_bitmap->Begin());
Ian Rogers5d76c432011-10-31 21:42:49 -0700168 }
169 }
170}
171
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700172class ScanImageRootVisitor {
173 public:
174 ScanImageRootVisitor(MarkSweep* const mark_sweep) : mark_sweep_(mark_sweep) {
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700175 }
176
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700177 void operator ()(const Object* root) const
178 EXCLUSIVE_LOCKS_REQUIRED(GlobalSynchronization::heap_bitmap_lock_)
179 SHARED_LOCKS_REQUIRED(GlobalSynchronization::mutator_lock_) {
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700180 DCHECK(root != NULL);
181 mark_sweep_->ScanObject(root);
182 }
183
184 private:
185 MarkSweep* const mark_sweep_;
186};
187
188// Marks all objects that are in images and have been touched by the mutator
189void MarkSweep::ScanDirtyImageRoots() {
190 const std::vector<Space*>& spaces = heap_->GetSpaces();
191 CardTable* card_table = heap_->GetCardTable();
192 ScanImageRootVisitor image_root_visitor(this);
193 for (size_t i = 0; i < spaces.size(); ++i) {
194 Space* space = spaces[i];
195 if (space->IsImageSpace()) {
196 card_table->Scan(space->GetLiveBitmap(), space->Begin(), space->End(), image_root_visitor);
197 }
198 }
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700199}
200
Carl Shapiro58551df2011-07-24 03:09:51 -0700201void MarkSweep::ScanBitmapCallback(Object* obj, void* finger, void* arg) {
202 MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg);
203 mark_sweep->finger_ = reinterpret_cast<Object*>(finger);
204 mark_sweep->ScanObject(obj);
205}
206
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700207void MarkSweep::ScanDirtyCardCallback(Object* obj, void* arg) {
208 MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg);
209 mark_sweep->ScanObject(obj);
210}
211
212void MarkSweep::ScanGrayObjects() {
213 const std::vector<Space*>& spaces = heap_->GetSpaces();
214 CardTable* card_table = heap_->GetCardTable();
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700215 ScanImageRootVisitor image_root_visitor(this);
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700216 for (size_t i = 0; i < spaces.size(); ++i) {
217 byte* begin = spaces[i]->Begin();
218 byte* end = spaces[i]->End();
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700219 // Image spaces are handled properly since live == marked for them.
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700220 card_table->Scan(spaces[i]->GetMarkBitmap(), begin, end, image_root_visitor);
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700221 }
222}
223
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700224class CheckBitmapVisitor {
225 public:
226 CheckBitmapVisitor(MarkSweep* mark_sweep) : mark_sweep_(mark_sweep) {
227
228 }
229
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700230 void operator ()(const Object* obj) const
231 SHARED_LOCKS_REQUIRED(GlobalSynchronization::heap_bitmap_lock_,
232 GlobalSynchronization::mutator_lock_) {
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700233 DCHECK(obj != NULL);
234 mark_sweep_->CheckObject(obj);
235 }
236
237 private:
238 MarkSweep* mark_sweep_;
239};
240
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700241void MarkSweep::VerifyImageRoots() {
242 // Verify roots ensures that all the references inside the image space point
243 // objects which are either in the image space or marked objects in the alloc
244 // space
245#ifndef NDEBUG
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700246 CheckBitmapVisitor visitor(this);
247 const Spaces& spaces = heap_->GetSpaces();
248 for (Spaces::const_iterator it = spaces.begin(); it != spaces.end(); ++it) {
249 const Space* space = *it;
250 if (space->IsImageSpace()) {
251 uintptr_t begin = reinterpret_cast<uintptr_t>(space->Begin());
252 uintptr_t end = reinterpret_cast<uintptr_t>(space->End());
253 SpaceBitmap* live_bitmap = space->GetLiveBitmap();
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700254 DCHECK(live_bitmap != NULL);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700255 live_bitmap->VisitMarkedRange(begin, end, visitor);
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700256 }
257 }
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700258#endif
259}
260
Carl Shapiro58551df2011-07-24 03:09:51 -0700261// Populates the mark stack based on the set of marked objects and
262// recursively marks until the mark stack is emptied.
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700263void MarkSweep::RecursiveMark(bool partial) {
Brian Carlstrom1f870082011-08-23 16:02:11 -0700264 // RecursiveMark will build the lists of known instances of the Reference classes.
265 // See DelayReferenceReferent for details.
266 CHECK(soft_reference_list_ == NULL);
267 CHECK(weak_reference_list_ == NULL);
268 CHECK(finalizer_reference_list_ == NULL);
269 CHECK(phantom_reference_list_ == NULL);
270 CHECK(cleared_reference_list_ == NULL);
271
Carl Shapiro58551df2011-07-24 03:09:51 -0700272 void* arg = reinterpret_cast<void*>(this);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700273 const Spaces& spaces = heap_->GetSpaces();
274
Carl Shapiro58551df2011-07-24 03:09:51 -0700275 for (size_t i = 0; i < spaces.size(); ++i) {
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700276 Space* space = spaces[i];
277 if (space->GetGcRetentionPolicy() == GCRP_ALWAYS_COLLECT ||
278 (!partial && space->GetGcRetentionPolicy() == GCRP_FULL_COLLECT)
279 ) {
280 uintptr_t begin = reinterpret_cast<uintptr_t>(space->Begin());
281 uintptr_t end = reinterpret_cast<uintptr_t>(space->End());
282
283 current_mark_bitmap_ = space->GetMarkBitmap();
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700284 current_mark_bitmap_->ScanWalk(begin, end, &ScanBitmapCallback, arg);
Mathieu Chartier7664f5c2012-06-08 18:15:32 -0700285 }
Carl Shapiro58551df2011-07-24 03:09:51 -0700286 }
287 finger_ = reinterpret_cast<Object*>(~0);
Ian Rogers5d76c432011-10-31 21:42:49 -0700288 // TODO: tune the frequency of emptying the mark stack
Carl Shapiro58551df2011-07-24 03:09:51 -0700289 ProcessMarkStack();
290}
291
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700292void MarkSweep::RecursiveMarkDirtyObjects() {
293 ScanGrayObjects();
294 ProcessMarkStack();
295}
296
Carl Shapiro58551df2011-07-24 03:09:51 -0700297void MarkSweep::ReMarkRoots() {
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700298 Runtime::Current()->VisitRoots(ReMarkObjectVisitor, this);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700299}
300
Mathieu Chartier46a23632012-08-07 18:44:40 -0700301void MarkSweep::SweepJniWeakGlobals(HeapBitmap* bitmap) {
Elliott Hughes410c0c82011-09-01 17:58:25 -0700302 JavaVMExt* vm = Runtime::Current()->GetJavaVM();
303 MutexLock mu(vm->weak_globals_lock);
304 IndirectReferenceTable* table = &vm->weak_globals;
Mathieu Chartier654d3a22012-07-11 17:54:18 -0700305 typedef IndirectReferenceTable::iterator It; // TODO: C++0x auto
Elliott Hughes410c0c82011-09-01 17:58:25 -0700306 for (It it = table->begin(), end = table->end(); it != end; ++it) {
307 const Object** entry = *it;
Mathieu Chartier46a23632012-08-07 18:44:40 -0700308 if (!bitmap->Test(*entry)) {
Elliott Hughes410c0c82011-09-01 17:58:25 -0700309 *entry = kClearedJniWeakGlobal;
310 }
311 }
312}
313
Mathieu Chartier46a23632012-08-07 18:44:40 -0700314void MarkSweep::SweepSystemWeaks(bool swap_bitmaps) {
315 Runtime* runtime = Runtime::Current();
316 runtime->GetInternTable()->SweepInternTableWeaks(swap_bitmaps ? IsLiveCallback : IsMarkedCallback,
317 this);
318 runtime->GetMonitorList()->SweepMonitorList(swap_bitmaps ? IsLiveCallback : IsMarkedCallback,
319 this);
320 SweepJniWeakGlobals(swap_bitmaps ? GetHeap()->GetLiveBitmap() : GetHeap()->GetMarkBitmap());
Carl Shapiro58551df2011-07-24 03:09:51 -0700321}
322
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800323struct SweepCallbackContext {
324 Heap* heap;
325 AllocSpace* space;
326};
327
Ian Rogers30fab402012-01-23 15:43:46 -0800328void MarkSweep::SweepCallback(size_t num_ptrs, Object** ptrs, void* arg) {
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700329 GlobalSynchronization::heap_bitmap_lock_->AssertExclusiveHeld();
Mathieu Chartier654d3a22012-07-11 17:54:18 -0700330
Elliott Hughes307f75d2011-10-12 18:04:40 -0700331 size_t freed_objects = num_ptrs;
332 size_t freed_bytes = 0;
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800333 SweepCallbackContext* context = static_cast<SweepCallbackContext*>(arg);
334 Heap* heap = context->heap;
335 AllocSpace* space = context->space;
Ian Rogers5d76c432011-10-31 21:42:49 -0700336 // Use a bulk free, that merges consecutive objects before freeing or free per object?
337 // Documentation suggests better free performance with merging, but this may be at the expensive
338 // of allocation.
339 // TODO: investigate performance
Ian Rogers30fab402012-01-23 15:43:46 -0800340 static const bool kUseFreeList = true;
341 if (kUseFreeList) {
Ian Rogers5d76c432011-10-31 21:42:49 -0700342 for (size_t i = 0; i < num_ptrs; ++i) {
343 Object* obj = static_cast<Object*>(ptrs[i]);
Ian Rogers30fab402012-01-23 15:43:46 -0800344 freed_bytes += space->AllocationSize(obj);
Ian Rogers5d76c432011-10-31 21:42:49 -0700345 }
Ian Rogers30fab402012-01-23 15:43:46 -0800346 // AllocSpace::FreeList clears the value in ptrs, so perform after clearing the live bit
347 space->FreeList(num_ptrs, ptrs);
Ian Rogers5d76c432011-10-31 21:42:49 -0700348 } else {
349 for (size_t i = 0; i < num_ptrs; ++i) {
350 Object* obj = static_cast<Object*>(ptrs[i]);
Ian Rogers30fab402012-01-23 15:43:46 -0800351 freed_bytes += space->AllocationSize(obj);
Ian Rogers30fab402012-01-23 15:43:46 -0800352 space->Free(obj);
Ian Rogers5d76c432011-10-31 21:42:49 -0700353 }
Carl Shapiro58551df2011-07-24 03:09:51 -0700354 }
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700355 heap->RecordFree(freed_objects, freed_bytes);
Carl Shapiro58551df2011-07-24 03:09:51 -0700356}
357
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700358void MarkSweep::ZygoteSweepCallback(size_t num_ptrs, Object** ptrs, void* arg) {
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700359 GlobalSynchronization::heap_bitmap_lock_->AssertExclusiveHeld();
360
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700361 SweepCallbackContext* context = static_cast<SweepCallbackContext*>(arg);
362 Heap* heap = context->heap;
363 // We don't free any actual memory to avoid dirtying the shared zygote pages.
364 for (size_t i = 0; i < num_ptrs; ++i) {
365 Object* obj = static_cast<Object*>(ptrs[i]);
366 heap->GetLiveBitmap()->Clear(obj);
367 heap->GetCardTable()->MarkCard(obj);
368 }
369}
370
371void MarkSweep::Sweep(bool partial) {
Mathieu Chartier46a23632012-08-07 18:44:40 -0700372 // If we don't swap bitmaps then we can not do this concurrently.
373 SweepSystemWeaks(true);
Elliott Hughes2da50362011-10-10 16:57:08 -0700374
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700375 DCHECK(mark_stack_->IsEmpty());
376
Mathieu Chartier46a23632012-08-07 18:44:40 -0700377 const Spaces& spaces = heap_->GetSpaces();
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800378 SweepCallbackContext scc;
379 scc.heap = heap_;
Carl Shapiro58551df2011-07-24 03:09:51 -0700380 for (size_t i = 0; i < spaces.size(); ++i) {
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700381 Space* space = spaces[i];
382 if (
383 space->GetGcRetentionPolicy() == GCRP_ALWAYS_COLLECT ||
384 (!partial && space->GetGcRetentionPolicy() == GCRP_FULL_COLLECT)
385 ) {
386 uintptr_t begin = reinterpret_cast<uintptr_t>(space->Begin());
387 uintptr_t end = reinterpret_cast<uintptr_t>(space->End());
388 scc.space = space->AsAllocSpace();
389 SpaceBitmap* live_bitmap = space->GetLiveBitmap();
390 SpaceBitmap* mark_bitmap = space->GetMarkBitmap();
391 if (space->GetGcRetentionPolicy() == GCRP_ALWAYS_COLLECT) {
392 // Bitmaps are pre-swapped for optimization which enables sweeping with the heap unlocked.
393 SpaceBitmap::SweepWalk(
394 *mark_bitmap, *live_bitmap, begin, end, &SweepCallback, reinterpret_cast<void*>(&scc));
395 } else {
396 // Zygote sweep takes care of dirtying cards and clearing live bits, does not free actual memory.
397 SpaceBitmap::SweepWalk(
398 *live_bitmap, *mark_bitmap, begin, end, &ZygoteSweepCallback, reinterpret_cast<void*>(&scc));
399 }
Carl Shapiro58551df2011-07-24 03:09:51 -0700400 }
401 }
402}
403
Carl Shapiro69759ea2011-07-21 18:13:35 -0700404// Scans instance fields.
Elliott Hughesb0663112011-10-19 18:16:37 -0700405inline void MarkSweep::ScanInstanceFields(const Object* obj) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700406 DCHECK(obj != NULL);
Brian Carlstrom4873d462011-08-21 15:23:39 -0700407 Class* klass = obj->GetClass();
408 DCHECK(klass != NULL);
Elliott Hughes2da50362011-10-10 16:57:08 -0700409 ScanFields(obj, klass->GetReferenceInstanceOffsets(), false);
Brian Carlstrom4873d462011-08-21 15:23:39 -0700410}
411
412// Scans static storage on a Class.
Elliott Hughesb0663112011-10-19 18:16:37 -0700413inline void MarkSweep::ScanStaticFields(const Class* klass) {
Brian Carlstrom4873d462011-08-21 15:23:39 -0700414 DCHECK(klass != NULL);
Elliott Hughesadb460d2011-10-05 17:02:34 -0700415 ScanFields(klass, klass->GetReferenceStaticOffsets(), true);
Brian Carlstrom4873d462011-08-21 15:23:39 -0700416}
417
Elliott Hughesb0663112011-10-19 18:16:37 -0700418inline void MarkSweep::ScanFields(const Object* obj, uint32_t ref_offsets, bool is_static) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700419 if (ref_offsets != CLASS_WALK_SUPER) {
420 // Found a reference offset bitmap. Mark the specified offsets.
421 while (ref_offsets != 0) {
422 size_t right_shift = CLZ(ref_offsets);
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700423 MemberOffset byte_offset = CLASS_OFFSET_FROM_CLZ(right_shift);
424 const Object* ref = obj->GetFieldObject<const Object*>(byte_offset, false);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700425 MarkObject(ref);
426 ref_offsets &= ~(CLASS_HIGH_BIT >> right_shift);
427 }
428 } else {
Brian Carlstrom4873d462011-08-21 15:23:39 -0700429 // There is no reference offset bitmap. In the non-static case,
430 // walk up the class inheritance hierarchy and find reference
431 // offsets the hard way. In the static case, just consider this
432 // class.
433 for (const Class* klass = is_static ? obj->AsClass() : obj->GetClass();
Carl Shapiro69759ea2011-07-21 18:13:35 -0700434 klass != NULL;
Brian Carlstrom4873d462011-08-21 15:23:39 -0700435 klass = is_static ? NULL : klass->GetSuperClass()) {
436 size_t num_reference_fields = (is_static
437 ? klass->NumReferenceStaticFields()
438 : klass->NumReferenceInstanceFields());
439 for (size_t i = 0; i < num_reference_fields; ++i) {
440 Field* field = (is_static
441 ? klass->GetStaticField(i)
442 : klass->GetInstanceField(i));
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700443 MemberOffset field_offset = field->GetOffset();
444 const Object* ref = obj->GetFieldObject<const Object*>(field_offset, false);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700445 MarkObject(ref);
446 }
447 }
448 }
449}
450
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700451void MarkSweep::CheckReference(const Object* obj, const Object* ref, MemberOffset offset, bool is_static) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700452 const Spaces& spaces = heap_->GetSpaces();
453 // TODO: C++0x auto
454 for (Spaces::const_iterator cur = spaces.begin(); cur != spaces.end(); ++cur) {
455 if ((*cur)->IsAllocSpace() && (*cur)->Contains(ref)) {
456 DCHECK(IsMarked(obj));
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700457
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700458 bool is_marked = IsMarked(ref);
459 if (!is_marked) {
460 LOG(INFO) << **cur;
461 LOG(WARNING) << (is_static ? "Static ref'" : "Instance ref'") << PrettyTypeOf(ref)
462 << "' (" << reinterpret_cast<const void*>(ref) << ") in '" << PrettyTypeOf(obj)
463 << "' (" << reinterpret_cast<const void*>(obj) << ") at offset "
464 << reinterpret_cast<void*>(offset.Int32Value()) << " wasn't marked";
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700465
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700466 const Class* klass = is_static ? obj->AsClass() : obj->GetClass();
467 DCHECK(klass != NULL);
468 const ObjectArray<Field>* fields = is_static ? klass->GetSFields() : klass->GetIFields();
469 DCHECK(fields != NULL);
470 bool found = false;
471 for (int32_t i = 0; i < fields->GetLength(); ++i) {
472 const Field* cur = fields->Get(i);
473 if (cur->GetOffset().Int32Value() == offset.Int32Value()) {
474 LOG(WARNING) << "Field referencing the alloc space was " << PrettyField(cur);
475 found = true;
476 break;
477 }
478 }
479 if (!found) {
480 LOG(WARNING) << "Could not find field in object alloc space with offset " << offset.Int32Value();
481 }
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700482
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700483 bool obj_marked = heap_->GetCardTable()->IsDirty(obj);
484 if (!obj_marked) {
485 LOG(WARNING) << "Object '" << PrettyTypeOf(obj) << "' "
486 << "(" << reinterpret_cast<const void*>(obj) << ") contains references to "
487 << "the alloc space, but wasn't card marked";
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700488 }
489 }
Ian Rogers5d76c432011-10-31 21:42:49 -0700490 }
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700491 break;
Ian Rogers5d76c432011-10-31 21:42:49 -0700492 }
493}
494
Carl Shapiro69759ea2011-07-21 18:13:35 -0700495// Scans the header, static field references, and interface pointers
496// of a class object.
Elliott Hughesb0663112011-10-19 18:16:37 -0700497inline void MarkSweep::ScanClass(const Object* obj) {
Elliott Hughes352a4242011-10-31 15:15:21 -0700498#ifndef NDEBUG
499 ++class_count_;
500#endif
Brian Carlstrom693267a2011-09-06 09:25:34 -0700501 ScanInstanceFields(obj);
Brian Carlstrom40381fb2011-10-19 14:13:40 -0700502 ScanStaticFields(obj->AsClass());
Carl Shapiro69759ea2011-07-21 18:13:35 -0700503}
504
505// Scans the header of all array objects. If the array object is
506// specialized to a reference type, scans the array data as well.
Elliott Hughesb0663112011-10-19 18:16:37 -0700507inline void MarkSweep::ScanArray(const Object* obj) {
Elliott Hughes352a4242011-10-31 15:15:21 -0700508#ifndef NDEBUG
509 ++array_count_;
510#endif
Carl Shapiro69759ea2011-07-21 18:13:35 -0700511 MarkObject(obj->GetClass());
512 if (obj->IsObjectArray()) {
Brian Carlstromdb4d5402011-08-09 12:18:28 -0700513 const ObjectArray<Object>* array = obj->AsObjectArray<Object>();
Elliott Hughesd8ddfd52011-08-15 14:32:53 -0700514 for (int32_t i = 0; i < array->GetLength(); ++i) {
Ian Rogers5d76c432011-10-31 21:42:49 -0700515 const Object* element = array->GetWithoutChecks(i);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700516 MarkObject(element);
517 }
518 }
519}
520
Carl Shapiro69759ea2011-07-21 18:13:35 -0700521// Process the "referent" field in a java.lang.ref.Reference. If the
522// referent has not yet been marked, put it on the appropriate list in
523// the gcHeap for later processing.
524void MarkSweep::DelayReferenceReferent(Object* obj) {
525 DCHECK(obj != NULL);
Brian Carlstrom1f870082011-08-23 16:02:11 -0700526 Class* klass = obj->GetClass();
527 DCHECK(klass != NULL);
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700528 DCHECK(klass->IsReferenceClass());
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800529 Object* pending = obj->GetFieldObject<Object*>(heap_->GetReferencePendingNextOffset(), false);
530 Object* referent = heap_->GetReferenceReferent(obj);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700531 if (pending == NULL && referent != NULL && !IsMarked(referent)) {
Brian Carlstrom4873d462011-08-21 15:23:39 -0700532 Object** list = NULL;
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700533 if (klass->IsSoftReferenceClass()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700534 list = &soft_reference_list_;
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700535 } else if (klass->IsWeakReferenceClass()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700536 list = &weak_reference_list_;
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700537 } else if (klass->IsFinalizerReferenceClass()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700538 list = &finalizer_reference_list_;
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700539 } else if (klass->IsPhantomReferenceClass()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700540 list = &phantom_reference_list_;
541 }
Brian Carlstrom0796af02011-10-12 14:31:45 -0700542 DCHECK(list != NULL) << PrettyClass(klass) << " " << std::hex << klass->GetAccessFlags();
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800543 heap_->EnqueuePendingReference(obj, list);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700544 }
545}
546
547// Scans the header and field references of a data object. If the
548// scanned object is a reference subclass, it is scheduled for later
Elliott Hughesadb460d2011-10-05 17:02:34 -0700549// processing.
Elliott Hughesb0663112011-10-19 18:16:37 -0700550inline void MarkSweep::ScanOther(const Object* obj) {
Elliott Hughes352a4242011-10-31 15:15:21 -0700551#ifndef NDEBUG
552 ++other_count_;
553#endif
Carl Shapiro69759ea2011-07-21 18:13:35 -0700554 ScanInstanceFields(obj);
Elliott Hughes352a4242011-10-31 15:15:21 -0700555 if (obj->GetClass()->IsReferenceClass()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700556 DelayReferenceReferent(const_cast<Object*>(obj));
557 }
558}
559
560// Scans an object reference. Determines the type of the reference
561// and dispatches to a specialized scanning routine.
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700562void MarkSweep::ScanObject(const Object* obj) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700563 DCHECK(obj != NULL);
564 DCHECK(obj->GetClass() != NULL);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700565 DCHECK(heap_->GetMarkBitmap()->Test(obj));
Carl Shapiro69759ea2011-07-21 18:13:35 -0700566 if (obj->IsClass()) {
567 ScanClass(obj);
Brian Carlstromb63ec392011-08-27 17:38:27 -0700568 } else if (obj->IsArrayInstance()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700569 ScanArray(obj);
570 } else {
Carl Shapiro58551df2011-07-24 03:09:51 -0700571 ScanOther(obj);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700572 }
573}
574
Ian Rogers5d76c432011-10-31 21:42:49 -0700575// Scan anything that's on the mark stack.
Carl Shapiro69759ea2011-07-21 18:13:35 -0700576void MarkSweep::ProcessMarkStack() {
577 while (!mark_stack_->IsEmpty()) {
Brian Carlstrom4873d462011-08-21 15:23:39 -0700578 const Object* obj = mark_stack_->Pop();
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700579 DCHECK(obj != NULL);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700580 ScanObject(obj);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700581 }
582}
583
Carl Shapiro69759ea2011-07-21 18:13:35 -0700584// Walks the reference list marking any references subject to the
585// reference clearing policy. References with a black referent are
586// removed from the list. References with white referents biased
587// toward saving are blackened and also removed from the list.
588void MarkSweep::PreserveSomeSoftReferences(Object** list) {
589 DCHECK(list != NULL);
590 Object* clear = NULL;
591 size_t counter = 0;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700592
593 DCHECK(mark_stack_->IsEmpty());
594
Carl Shapiro69759ea2011-07-21 18:13:35 -0700595 while (*list != NULL) {
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800596 Object* ref = heap_->DequeuePendingReference(list);
597 Object* referent = heap_->GetReferenceReferent(ref);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700598 if (referent == NULL) {
599 // Referent was cleared by the user during marking.
600 continue;
601 }
602 bool is_marked = IsMarked(referent);
603 if (!is_marked && ((++counter) & 1)) {
604 // Referent is white and biased toward saving, mark it.
605 MarkObject(referent);
606 is_marked = true;
607 }
608 if (!is_marked) {
609 // Referent is white, queue it for clearing.
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800610 heap_->EnqueuePendingReference(ref, &clear);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700611 }
612 }
613 *list = clear;
614 // Restart the mark with the newly black references added to the
615 // root set.
616 ProcessMarkStack();
617}
618
619// Unlink the reference list clearing references objects with white
620// referents. Cleared references registered to a reference queue are
621// scheduled for appending by the heap worker thread.
622void MarkSweep::ClearWhiteReferences(Object** list) {
623 DCHECK(list != NULL);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700624 while (*list != NULL) {
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800625 Object* ref = heap_->DequeuePendingReference(list);
626 Object* referent = heap_->GetReferenceReferent(ref);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700627 if (referent != NULL && !IsMarked(referent)) {
628 // Referent is white, clear it.
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800629 heap_->ClearReferenceReferent(ref);
630 if (heap_->IsEnqueuable(ref)) {
631 heap_->EnqueueReference(ref, &cleared_reference_list_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700632 }
633 }
634 }
635 DCHECK(*list == NULL);
636}
637
638// Enqueues finalizer references with white referents. White
639// referents are blackened, moved to the zombie field, and the
640// referent field is cleared.
641void MarkSweep::EnqueueFinalizerReferences(Object** list) {
642 DCHECK(list != NULL);
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800643 MemberOffset zombie_offset = heap_->GetFinalizerReferenceZombieOffset();
Carl Shapiro69759ea2011-07-21 18:13:35 -0700644 bool has_enqueued = false;
645 while (*list != NULL) {
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800646 Object* ref = heap_->DequeuePendingReference(list);
647 Object* referent = heap_->GetReferenceReferent(ref);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700648 if (referent != NULL && !IsMarked(referent)) {
649 MarkObject(referent);
650 // If the referent is non-null the reference must queuable.
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800651 DCHECK(heap_->IsEnqueuable(ref));
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700652 ref->SetFieldObject(zombie_offset, referent, false);
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800653 heap_->ClearReferenceReferent(ref);
654 heap_->EnqueueReference(ref, &cleared_reference_list_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700655 has_enqueued = true;
656 }
657 }
658 if (has_enqueued) {
659 ProcessMarkStack();
660 }
661 DCHECK(*list == NULL);
662}
663
Carl Shapiro58551df2011-07-24 03:09:51 -0700664// Process reference class instances and schedule finalizations.
Carl Shapiro69759ea2011-07-21 18:13:35 -0700665void MarkSweep::ProcessReferences(Object** soft_references, bool clear_soft,
666 Object** weak_references,
667 Object** finalizer_references,
668 Object** phantom_references) {
669 DCHECK(soft_references != NULL);
670 DCHECK(weak_references != NULL);
671 DCHECK(finalizer_references != NULL);
672 DCHECK(phantom_references != NULL);
673
674 // Unless we are in the zygote or required to clear soft references
675 // with white references, preserve some white referents.
Ian Rogers2945e242012-06-03 14:45:16 -0700676 if (!clear_soft && !Runtime::Current()->IsZygote()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700677 PreserveSomeSoftReferences(soft_references);
678 }
679
680 // Clear all remaining soft and weak references with white
681 // referents.
682 ClearWhiteReferences(soft_references);
683 ClearWhiteReferences(weak_references);
684
685 // Preserve all white objects with finalize methods and schedule
686 // them for finalization.
687 EnqueueFinalizerReferences(finalizer_references);
688
689 // Clear all f-reachable soft and weak references with white
690 // referents.
691 ClearWhiteReferences(soft_references);
692 ClearWhiteReferences(weak_references);
693
694 // Clear all phantom references with white referents.
695 ClearWhiteReferences(phantom_references);
696
697 // At this point all reference lists should be empty.
698 DCHECK(*soft_references == NULL);
699 DCHECK(*weak_references == NULL);
700 DCHECK(*finalizer_references == NULL);
701 DCHECK(*phantom_references == NULL);
702}
703
Carl Shapiro69759ea2011-07-21 18:13:35 -0700704MarkSweep::~MarkSweep() {
Elliott Hughes352a4242011-10-31 15:15:21 -0700705#ifndef NDEBUG
Elliott Hughes4dd9b4d2011-12-12 18:29:24 -0800706 VLOG(heap) << "MarkSweep scanned classes=" << class_count_ << " arrays=" << array_count_ << " other=" << other_count_;
Elliott Hughes352a4242011-10-31 15:15:21 -0700707#endif
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700708 // Clear all of the alloc spaces' mark bitmaps.
709 const Spaces& spaces = heap_->GetSpaces();
710 // TODO: C++0x auto
711 for (Spaces::const_iterator cur = spaces.begin(); cur != spaces.end(); ++cur) {
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700712 if ((*cur)->GetGcRetentionPolicy() != GCRP_NEVER_COLLECT) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700713 (*cur)->GetMarkBitmap()->Clear();
714 }
715 }
Mathieu Chartier5301cd22012-05-31 12:11:36 -0700716 mark_stack_->Reset();
Carl Shapiro69759ea2011-07-21 18:13:35 -0700717}
718
719} // namespace art