blob: 5155e30afee0fbf62f1b85c191de953a422667e9 [file] [log] [blame]
Elliott Hughes2faa5f12012-01-30 14:42:07 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Carl Shapiro69759ea2011-07-21 18:13:35 -070016
Brian Carlstrom578bbdc2011-07-21 14:07:47 -070017#include "mark_sweep.h"
Carl Shapiro69759ea2011-07-21 18:13:35 -070018
Carl Shapiro58551df2011-07-24 03:09:51 -070019#include <climits>
20#include <vector>
21
Elliott Hughes410c0c82011-09-01 17:58:25 -070022#include "class_loader.h"
Brian Carlstrom693267a2011-09-06 09:25:34 -070023#include "dex_cache.h"
Carl Shapiro58551df2011-07-24 03:09:51 -070024#include "heap.h"
Elliott Hughes410c0c82011-09-01 17:58:25 -070025#include "indirect_reference_table.h"
26#include "intern_table.h"
Brian Carlstrom578bbdc2011-07-21 14:07:47 -070027#include "logging.h"
28#include "macros.h"
29#include "mark_stack.h"
Elliott Hughesc33a32b2011-10-11 18:18:07 -070030#include "monitor.h"
Brian Carlstrom578bbdc2011-07-21 14:07:47 -070031#include "object.h"
Brian Carlstrom1f870082011-08-23 16:02:11 -070032#include "runtime.h"
Mathieu Chartier654d3a22012-07-11 17:54:18 -070033#include "scoped_heap_lock.h"
Carl Shapiro58551df2011-07-24 03:09:51 -070034#include "space.h"
Elliott Hughes307f75d2011-10-12 18:04:40 -070035#include "timing_logger.h"
Brian Carlstrom578bbdc2011-07-21 14:07:47 -070036#include "thread.h"
Carl Shapiro69759ea2011-07-21 18:13:35 -070037
Carl Shapiro69759ea2011-07-21 18:13:35 -070038namespace art {
39
Mathieu Chartier5301cd22012-05-31 12:11:36 -070040MarkSweep::MarkSweep(MarkStack* mark_stack)
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070041 : current_mark_bitmap_(NULL),
42 mark_stack_(mark_stack),
Mathieu Chartier5301cd22012-05-31 12:11:36 -070043 heap_(NULL),
Mathieu Chartier5301cd22012-05-31 12:11:36 -070044 finger_(NULL),
45 condemned_(NULL),
46 soft_reference_list_(NULL),
47 weak_reference_list_(NULL),
48 finalizer_reference_list_(NULL),
49 phantom_reference_list_(NULL),
50 cleared_reference_list_(NULL),
51 class_count_(0), array_count_(0), other_count_(0) {
52 DCHECK(mark_stack_ != NULL);
53}
Elliott Hughesb3bd5f02012-03-08 21:05:27 -080054
Mathieu Chartier5301cd22012-05-31 12:11:36 -070055void MarkSweep::Init() {
Elliott Hughesb3bd5f02012-03-08 21:05:27 -080056 heap_ = Runtime::Current()->GetHeap();
Mathieu Chartier5301cd22012-05-31 12:11:36 -070057 mark_stack_->Reset();
Carl Shapiro58551df2011-07-24 03:09:51 -070058
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070059 const Spaces& spaces = heap_->GetSpaces();
60 // TODO: C++0x auto
61 for (Spaces::const_iterator cur = spaces.begin(); cur != spaces.end(); ++cur) {
62 if ((*cur)->IsAllocSpace()) {
63 current_mark_bitmap_ = (*cur)->GetMarkBitmap();
64 break;
65 }
66 }
buzbee0d966cf2011-09-08 17:34:58 -070067 // TODO: if concurrent, enable card marking in compiler
Carl Shapiro58551df2011-07-24 03:09:51 -070068 // TODO: check that the mark bitmap is entirely clear.
Carl Shapiro58551df2011-07-24 03:09:51 -070069}
70
Mathieu Chartierb43b7d42012-06-19 13:15:09 -070071void MarkSweep::MarkObject0(const Object* obj, bool check_finger) {
Carl Shapiro69759ea2011-07-21 18:13:35 -070072 DCHECK(obj != NULL);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070073
74 SpaceBitmap* space_bitmap = NULL;
75 // Try to take advantage of locality of references within a space, failing this find the space
76 // the hard way.
77 if (current_mark_bitmap_->HasAddress(obj)) {
78 space_bitmap = current_mark_bitmap_;
79 } else {
80 space_bitmap = heap_->GetMarkBitmap()->GetSpaceBitmap(obj);
81 }
82
Carl Shapiro69759ea2011-07-21 18:13:35 -070083 if (obj < condemned_) {
84 DCHECK(IsMarked(obj));
85 return;
86 }
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070087 bool is_marked = space_bitmap->Test(obj);
Carl Shapiro69759ea2011-07-21 18:13:35 -070088 // This object was not previously marked.
89 if (!is_marked) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -070090 space_bitmap->Set(obj);
Carl Shapiro69759ea2011-07-21 18:13:35 -070091 if (check_finger && obj < finger_) {
92 // The object must be pushed on to the mark stack.
93 mark_stack_->Push(obj);
94 }
95 }
96}
97
98// Used to mark objects when recursing. Recursion is done by moving
99// the finger across the bitmaps in address order and marking child
100// objects. Any newly-marked objects whose addresses are lower than
101// the finger won't be visited by the bitmap scan, so those objects
102// need to be added to the mark stack.
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700103void MarkSweep::MarkObject(const Object* obj) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700104 if (obj != NULL) {
105 MarkObject0(obj, true);
106 }
107}
108
Elliott Hughescf4c6c42011-09-01 15:16:42 -0700109void MarkSweep::MarkObjectVisitor(const Object* root, void* arg) {
Brian Carlstrom1f870082011-08-23 16:02:11 -0700110 DCHECK(root != NULL);
111 DCHECK(arg != NULL);
112 MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg);
Ian Rogers5d76c432011-10-31 21:42:49 -0700113 DCHECK(mark_sweep->finger_ == NULL); // no point to check finger if it is NULL
114 mark_sweep->MarkObject0(root, false);
Brian Carlstrom1f870082011-08-23 16:02:11 -0700115}
116
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700117void MarkSweep::ReMarkObjectVisitor(const Object* root, void* arg) {
118 DCHECK(root != NULL);
119 DCHECK(arg != NULL);
120 MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg);
121 mark_sweep->MarkObject0(root, true);
122}
123
Carl Shapiro69759ea2011-07-21 18:13:35 -0700124// Marks all objects in the root set.
125void MarkSweep::MarkRoots() {
Brian Carlstrom1f870082011-08-23 16:02:11 -0700126 Runtime::Current()->VisitRoots(MarkObjectVisitor, this);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700127}
128
Ian Rogers5d76c432011-10-31 21:42:49 -0700129void MarkSweep::ScanImageRootVisitor(Object* root, void* arg) {
130 DCHECK(root != NULL);
131 DCHECK(arg != NULL);
132 MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700133 // We do not need to mark since live == marked for image spaces.
Ian Rogers5d76c432011-10-31 21:42:49 -0700134 mark_sweep->ScanObject(root);
135}
136
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700137class CheckObjectVisitor {
138 public:
139 CheckObjectVisitor(MarkSweep* const mark_sweep)
140 : mark_sweep_(mark_sweep) {
141
142 }
143
144 void operator ()(const Object* obj, const Object* ref, MemberOffset offset, bool is_static) const {
145 mark_sweep_->CheckReference(obj, ref, offset, is_static);
146 }
147
148 private:
149 MarkSweep* const mark_sweep_;
150};
151
152void MarkSweep::CheckObject(const Object* obj) {
153 DCHECK(obj != NULL);
154 CheckObjectVisitor visitor(this);
155 VisitObjectReferences(obj, visitor);
156}
157
158void MarkSweep::VerifyImageRootVisitor(Object* root, void* arg) {
159 DCHECK(root != NULL);
160 DCHECK(arg != NULL);
161 MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700162 DCHECK(mark_sweep->heap_->GetMarkBitmap()->Test(root));
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700163 mark_sweep->CheckObject(root);
164}
165
Ian Rogers5d76c432011-10-31 21:42:49 -0700166// Marks all objects that are in images and have been touched by the mutator
167void MarkSweep::ScanDirtyImageRoots() {
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800168 const std::vector<Space*>& spaces = heap_->GetSpaces();
169 CardTable* card_table = heap_->GetCardTable();
Ian Rogers5d76c432011-10-31 21:42:49 -0700170 for (size_t i = 0; i < spaces.size(); ++i) {
171 if (spaces[i]->IsImageSpace()) {
Ian Rogers30fab402012-01-23 15:43:46 -0800172 byte* begin = spaces[i]->Begin();
173 byte* end = spaces[i]->End();
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700174 card_table->Scan(spaces[i]->GetLiveBitmap(), begin, end, ScanImageRootVisitor, this);
Ian Rogers5d76c432011-10-31 21:42:49 -0700175 }
176 }
177}
178
179void MarkSweep::CheckBitmapCallback(Object* obj, void* finger, void* arg) {
180 MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg);
181 mark_sweep->finger_ = reinterpret_cast<Object*>(finger);
182 mark_sweep->CheckObject(obj);
183}
184
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700185void MarkSweep::CheckBitmapNoFingerCallback(Object* obj, void* arg) {
186 MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg);
187 mark_sweep->CheckObject(obj);
188}
189
Carl Shapiro58551df2011-07-24 03:09:51 -0700190void MarkSweep::ScanBitmapCallback(Object* obj, void* finger, void* arg) {
191 MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg);
192 mark_sweep->finger_ = reinterpret_cast<Object*>(finger);
193 mark_sweep->ScanObject(obj);
194}
195
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700196void MarkSweep::ScanDirtyCardCallback(Object* obj, void* arg) {
197 MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg);
198 mark_sweep->ScanObject(obj);
199}
200
201void MarkSweep::ScanGrayObjects() {
202 const std::vector<Space*>& spaces = heap_->GetSpaces();
203 CardTable* card_table = heap_->GetCardTable();
204 for (size_t i = 0; i < spaces.size(); ++i) {
205 byte* begin = spaces[i]->Begin();
206 byte* end = spaces[i]->End();
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700207 // Image spaces are handled properly since live == marked for them.
208 card_table->Scan(spaces[i]->GetMarkBitmap(), begin, end, ScanImageRootVisitor, this);
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700209 }
210}
211
212void MarkSweep::VerifyImageRoots() {
213 // Verify roots ensures that all the references inside the image space point
214 // objects which are either in the image space or marked objects in the alloc
215 // space
216#ifndef NDEBUG
217 void* arg = reinterpret_cast<void*>(this);
218 const std::vector<Space*>& spaces = heap_->GetSpaces();
219 for (size_t i = 0; i < spaces.size(); ++i) {
220 if (spaces[i]->IsImageSpace()) {
221 uintptr_t begin = reinterpret_cast<uintptr_t>(spaces[i]->Begin());
222 uintptr_t end = reinterpret_cast<uintptr_t>(spaces[i]->End());
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700223 SpaceBitmap* live_bitmap = spaces[i]->GetLiveBitmap();
224 DCHECK(live_bitmap != NULL);
225 live_bitmap->ScanWalk(begin, end, CheckBitmapCallback, arg);
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700226 }
227 }
228 finger_ = reinterpret_cast<Object*>(~0);
229#endif
230}
231
Carl Shapiro58551df2011-07-24 03:09:51 -0700232// Populates the mark stack based on the set of marked objects and
233// recursively marks until the mark stack is emptied.
234void MarkSweep::RecursiveMark() {
Brian Carlstrom1f870082011-08-23 16:02:11 -0700235 // RecursiveMark will build the lists of known instances of the Reference classes.
236 // See DelayReferenceReferent for details.
237 CHECK(soft_reference_list_ == NULL);
238 CHECK(weak_reference_list_ == NULL);
239 CHECK(finalizer_reference_list_ == NULL);
240 CHECK(phantom_reference_list_ == NULL);
241 CHECK(cleared_reference_list_ == NULL);
242
Carl Shapiro58551df2011-07-24 03:09:51 -0700243 void* arg = reinterpret_cast<void*>(this);
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800244 const std::vector<Space*>& spaces = heap_->GetSpaces();
Carl Shapiro58551df2011-07-24 03:09:51 -0700245 for (size_t i = 0; i < spaces.size(); ++i) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700246 if (spaces[i]->IsAllocSpace()) {
Mathieu Chartier7664f5c2012-06-08 18:15:32 -0700247 uintptr_t begin = reinterpret_cast<uintptr_t>(spaces[i]->Begin());
248 uintptr_t end = reinterpret_cast<uintptr_t>(spaces[i]->End());
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700249 current_mark_bitmap_ = spaces[i]->GetMarkBitmap();
250 current_mark_bitmap_->ScanWalk(begin, end, &ScanBitmapCallback, arg);
Mathieu Chartier7664f5c2012-06-08 18:15:32 -0700251 }
Carl Shapiro58551df2011-07-24 03:09:51 -0700252 }
253 finger_ = reinterpret_cast<Object*>(~0);
Ian Rogers5d76c432011-10-31 21:42:49 -0700254 // TODO: tune the frequency of emptying the mark stack
Carl Shapiro58551df2011-07-24 03:09:51 -0700255 ProcessMarkStack();
256}
257
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700258void MarkSweep::RecursiveMarkDirtyObjects() {
259 ScanGrayObjects();
260 ProcessMarkStack();
261}
262
Carl Shapiro58551df2011-07-24 03:09:51 -0700263void MarkSweep::ReMarkRoots() {
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700264 Runtime::Current()->VisitRoots(ReMarkObjectVisitor, this);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700265}
266
Elliott Hughes410c0c82011-09-01 17:58:25 -0700267void MarkSweep::SweepJniWeakGlobals() {
268 JavaVMExt* vm = Runtime::Current()->GetJavaVM();
269 MutexLock mu(vm->weak_globals_lock);
270 IndirectReferenceTable* table = &vm->weak_globals;
Mathieu Chartier654d3a22012-07-11 17:54:18 -0700271 typedef IndirectReferenceTable::iterator It; // TODO: C++0x auto
Elliott Hughes410c0c82011-09-01 17:58:25 -0700272 for (It it = table->begin(), end = table->end(); it != end; ++it) {
273 const Object** entry = *it;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700274 if (!heap_->GetMarkBitmap()->Test(*entry)) {
Elliott Hughes410c0c82011-09-01 17:58:25 -0700275 *entry = kClearedJniWeakGlobal;
276 }
277 }
278}
279
Elliott Hughes410c0c82011-09-01 17:58:25 -0700280void MarkSweep::SweepSystemWeaks() {
Elliott Hughesc33a32b2011-10-11 18:18:07 -0700281 Runtime::Current()->GetInternTable()->SweepInternTableWeaks(IsMarked, this);
282 Runtime::Current()->GetMonitorList()->SweepMonitorList(IsMarked, this);
Elliott Hughes410c0c82011-09-01 17:58:25 -0700283 SweepJniWeakGlobals();
Carl Shapiro58551df2011-07-24 03:09:51 -0700284}
285
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800286struct SweepCallbackContext {
287 Heap* heap;
288 AllocSpace* space;
289};
290
Ian Rogers30fab402012-01-23 15:43:46 -0800291void MarkSweep::SweepCallback(size_t num_ptrs, Object** ptrs, void* arg) {
Mathieu Chartier654d3a22012-07-11 17:54:18 -0700292 ScopedHeapLock lock;
293
Elliott Hughes307f75d2011-10-12 18:04:40 -0700294 size_t freed_objects = num_ptrs;
295 size_t freed_bytes = 0;
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800296 SweepCallbackContext* context = static_cast<SweepCallbackContext*>(arg);
297 Heap* heap = context->heap;
298 AllocSpace* space = context->space;
Ian Rogers5d76c432011-10-31 21:42:49 -0700299 // Use a bulk free, that merges consecutive objects before freeing or free per object?
300 // Documentation suggests better free performance with merging, but this may be at the expensive
301 // of allocation.
302 // TODO: investigate performance
Ian Rogers30fab402012-01-23 15:43:46 -0800303 static const bool kUseFreeList = true;
304 if (kUseFreeList) {
Ian Rogers5d76c432011-10-31 21:42:49 -0700305 for (size_t i = 0; i < num_ptrs; ++i) {
306 Object* obj = static_cast<Object*>(ptrs[i]);
Ian Rogers30fab402012-01-23 15:43:46 -0800307 freed_bytes += space->AllocationSize(obj);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700308 heap->GetLiveBitmap()->Clear(obj);
Ian Rogers5d76c432011-10-31 21:42:49 -0700309 }
Ian Rogers30fab402012-01-23 15:43:46 -0800310 // AllocSpace::FreeList clears the value in ptrs, so perform after clearing the live bit
311 space->FreeList(num_ptrs, ptrs);
Ian Rogers5d76c432011-10-31 21:42:49 -0700312 } else {
313 for (size_t i = 0; i < num_ptrs; ++i) {
314 Object* obj = static_cast<Object*>(ptrs[i]);
Ian Rogers30fab402012-01-23 15:43:46 -0800315 freed_bytes += space->AllocationSize(obj);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700316 heap->GetLiveBitmap()->Clear(obj);
Ian Rogers30fab402012-01-23 15:43:46 -0800317 space->Free(obj);
Ian Rogers5d76c432011-10-31 21:42:49 -0700318 }
Carl Shapiro58551df2011-07-24 03:09:51 -0700319 }
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800320 heap->RecordFreeLocked(freed_objects, freed_bytes);
Carl Shapiro58551df2011-07-24 03:09:51 -0700321}
322
323void MarkSweep::Sweep() {
Elliott Hughes2da50362011-10-10 16:57:08 -0700324 SweepSystemWeaks();
325
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700326 DCHECK(mark_stack_->IsEmpty());
327
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800328 const std::vector<Space*>& spaces = heap_->GetSpaces();
329 SweepCallbackContext scc;
330 scc.heap = heap_;
Carl Shapiro58551df2011-07-24 03:09:51 -0700331 for (size_t i = 0; i < spaces.size(); ++i) {
Elliott Hughes307f75d2011-10-12 18:04:40 -0700332 if (!spaces[i]->IsImageSpace()) {
Ian Rogers30fab402012-01-23 15:43:46 -0800333 uintptr_t begin = reinterpret_cast<uintptr_t>(spaces[i]->Begin());
334 uintptr_t end = reinterpret_cast<uintptr_t>(spaces[i]->End());
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800335 scc.space = spaces[i]->AsAllocSpace();
Mathieu Chartier654d3a22012-07-11 17:54:18 -0700336 // Bitmaps are pre-swapped for optimization which enables sweeping with the heap unlocked.
337 SpaceBitmap* live_bitmap = scc.space->GetMarkBitmap();
338 SpaceBitmap* mark_bitmap = scc.space->GetLiveBitmap();
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700339 SpaceBitmap::SweepWalk(*live_bitmap, *mark_bitmap, begin, end,
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800340 &MarkSweep::SweepCallback, reinterpret_cast<void*>(&scc));
Carl Shapiro58551df2011-07-24 03:09:51 -0700341 }
342 }
343}
344
Carl Shapiro69759ea2011-07-21 18:13:35 -0700345// Scans instance fields.
Elliott Hughesb0663112011-10-19 18:16:37 -0700346inline void MarkSweep::ScanInstanceFields(const Object* obj) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700347 DCHECK(obj != NULL);
Brian Carlstrom4873d462011-08-21 15:23:39 -0700348 Class* klass = obj->GetClass();
349 DCHECK(klass != NULL);
Elliott Hughes2da50362011-10-10 16:57:08 -0700350 ScanFields(obj, klass->GetReferenceInstanceOffsets(), false);
Brian Carlstrom4873d462011-08-21 15:23:39 -0700351}
352
353// Scans static storage on a Class.
Elliott Hughesb0663112011-10-19 18:16:37 -0700354inline void MarkSweep::ScanStaticFields(const Class* klass) {
Brian Carlstrom4873d462011-08-21 15:23:39 -0700355 DCHECK(klass != NULL);
Elliott Hughesadb460d2011-10-05 17:02:34 -0700356 ScanFields(klass, klass->GetReferenceStaticOffsets(), true);
Brian Carlstrom4873d462011-08-21 15:23:39 -0700357}
358
Elliott Hughesb0663112011-10-19 18:16:37 -0700359inline void MarkSweep::ScanFields(const Object* obj, uint32_t ref_offsets, bool is_static) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700360 if (ref_offsets != CLASS_WALK_SUPER) {
361 // Found a reference offset bitmap. Mark the specified offsets.
362 while (ref_offsets != 0) {
363 size_t right_shift = CLZ(ref_offsets);
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700364 MemberOffset byte_offset = CLASS_OFFSET_FROM_CLZ(right_shift);
365 const Object* ref = obj->GetFieldObject<const Object*>(byte_offset, false);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700366 MarkObject(ref);
367 ref_offsets &= ~(CLASS_HIGH_BIT >> right_shift);
368 }
369 } else {
Brian Carlstrom4873d462011-08-21 15:23:39 -0700370 // There is no reference offset bitmap. In the non-static case,
371 // walk up the class inheritance hierarchy and find reference
372 // offsets the hard way. In the static case, just consider this
373 // class.
374 for (const Class* klass = is_static ? obj->AsClass() : obj->GetClass();
Carl Shapiro69759ea2011-07-21 18:13:35 -0700375 klass != NULL;
Brian Carlstrom4873d462011-08-21 15:23:39 -0700376 klass = is_static ? NULL : klass->GetSuperClass()) {
377 size_t num_reference_fields = (is_static
378 ? klass->NumReferenceStaticFields()
379 : klass->NumReferenceInstanceFields());
380 for (size_t i = 0; i < num_reference_fields; ++i) {
381 Field* field = (is_static
382 ? klass->GetStaticField(i)
383 : klass->GetInstanceField(i));
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700384 MemberOffset field_offset = field->GetOffset();
385 const Object* ref = obj->GetFieldObject<const Object*>(field_offset, false);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700386 MarkObject(ref);
387 }
388 }
389 }
390}
391
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700392void MarkSweep::CheckReference(const Object* obj, const Object* ref, MemberOffset offset, bool is_static) {
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700393 const Spaces& spaces = heap_->GetSpaces();
394 // TODO: C++0x auto
395 for (Spaces::const_iterator cur = spaces.begin(); cur != spaces.end(); ++cur) {
396 if ((*cur)->IsAllocSpace() && (*cur)->Contains(ref)) {
397 DCHECK(IsMarked(obj));
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700398
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700399 bool is_marked = IsMarked(ref);
400 if (!is_marked) {
401 LOG(INFO) << **cur;
402 LOG(WARNING) << (is_static ? "Static ref'" : "Instance ref'") << PrettyTypeOf(ref)
403 << "' (" << reinterpret_cast<const void*>(ref) << ") in '" << PrettyTypeOf(obj)
404 << "' (" << reinterpret_cast<const void*>(obj) << ") at offset "
405 << reinterpret_cast<void*>(offset.Int32Value()) << " wasn't marked";
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700406
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700407 const Class* klass = is_static ? obj->AsClass() : obj->GetClass();
408 DCHECK(klass != NULL);
409 const ObjectArray<Field>* fields = is_static ? klass->GetSFields() : klass->GetIFields();
410 DCHECK(fields != NULL);
411 bool found = false;
412 for (int32_t i = 0; i < fields->GetLength(); ++i) {
413 const Field* cur = fields->Get(i);
414 if (cur->GetOffset().Int32Value() == offset.Int32Value()) {
415 LOG(WARNING) << "Field referencing the alloc space was " << PrettyField(cur);
416 found = true;
417 break;
418 }
419 }
420 if (!found) {
421 LOG(WARNING) << "Could not find field in object alloc space with offset " << offset.Int32Value();
422 }
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700423
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700424 bool obj_marked = heap_->GetCardTable()->IsDirty(obj);
425 if (!obj_marked) {
426 LOG(WARNING) << "Object '" << PrettyTypeOf(obj) << "' "
427 << "(" << reinterpret_cast<const void*>(obj) << ") contains references to "
428 << "the alloc space, but wasn't card marked";
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700429 }
430 }
Ian Rogers5d76c432011-10-31 21:42:49 -0700431 }
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700432 break;
Ian Rogers5d76c432011-10-31 21:42:49 -0700433 }
434}
435
Carl Shapiro69759ea2011-07-21 18:13:35 -0700436// Scans the header, static field references, and interface pointers
437// of a class object.
Elliott Hughesb0663112011-10-19 18:16:37 -0700438inline void MarkSweep::ScanClass(const Object* obj) {
Elliott Hughes352a4242011-10-31 15:15:21 -0700439#ifndef NDEBUG
440 ++class_count_;
441#endif
Brian Carlstrom693267a2011-09-06 09:25:34 -0700442 ScanInstanceFields(obj);
Brian Carlstrom40381fb2011-10-19 14:13:40 -0700443 ScanStaticFields(obj->AsClass());
Carl Shapiro69759ea2011-07-21 18:13:35 -0700444}
445
446// Scans the header of all array objects. If the array object is
447// specialized to a reference type, scans the array data as well.
Elliott Hughesb0663112011-10-19 18:16:37 -0700448inline void MarkSweep::ScanArray(const Object* obj) {
Elliott Hughes352a4242011-10-31 15:15:21 -0700449#ifndef NDEBUG
450 ++array_count_;
451#endif
Carl Shapiro69759ea2011-07-21 18:13:35 -0700452 MarkObject(obj->GetClass());
453 if (obj->IsObjectArray()) {
Brian Carlstromdb4d5402011-08-09 12:18:28 -0700454 const ObjectArray<Object>* array = obj->AsObjectArray<Object>();
Elliott Hughesd8ddfd52011-08-15 14:32:53 -0700455 for (int32_t i = 0; i < array->GetLength(); ++i) {
Ian Rogers5d76c432011-10-31 21:42:49 -0700456 const Object* element = array->GetWithoutChecks(i);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700457 MarkObject(element);
458 }
459 }
460}
461
Carl Shapiro69759ea2011-07-21 18:13:35 -0700462// Process the "referent" field in a java.lang.ref.Reference. If the
463// referent has not yet been marked, put it on the appropriate list in
464// the gcHeap for later processing.
465void MarkSweep::DelayReferenceReferent(Object* obj) {
466 DCHECK(obj != NULL);
Brian Carlstrom1f870082011-08-23 16:02:11 -0700467 Class* klass = obj->GetClass();
468 DCHECK(klass != NULL);
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700469 DCHECK(klass->IsReferenceClass());
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800470 Object* pending = obj->GetFieldObject<Object*>(heap_->GetReferencePendingNextOffset(), false);
471 Object* referent = heap_->GetReferenceReferent(obj);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700472 if (pending == NULL && referent != NULL && !IsMarked(referent)) {
Brian Carlstrom4873d462011-08-21 15:23:39 -0700473 Object** list = NULL;
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700474 if (klass->IsSoftReferenceClass()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700475 list = &soft_reference_list_;
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700476 } else if (klass->IsWeakReferenceClass()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700477 list = &weak_reference_list_;
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700478 } else if (klass->IsFinalizerReferenceClass()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700479 list = &finalizer_reference_list_;
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700480 } else if (klass->IsPhantomReferenceClass()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700481 list = &phantom_reference_list_;
482 }
Brian Carlstrom0796af02011-10-12 14:31:45 -0700483 DCHECK(list != NULL) << PrettyClass(klass) << " " << std::hex << klass->GetAccessFlags();
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800484 heap_->EnqueuePendingReference(obj, list);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700485 }
486}
487
488// Scans the header and field references of a data object. If the
489// scanned object is a reference subclass, it is scheduled for later
Elliott Hughesadb460d2011-10-05 17:02:34 -0700490// processing.
Elliott Hughesb0663112011-10-19 18:16:37 -0700491inline void MarkSweep::ScanOther(const Object* obj) {
Elliott Hughes352a4242011-10-31 15:15:21 -0700492#ifndef NDEBUG
493 ++other_count_;
494#endif
Carl Shapiro69759ea2011-07-21 18:13:35 -0700495 ScanInstanceFields(obj);
Elliott Hughes352a4242011-10-31 15:15:21 -0700496 if (obj->GetClass()->IsReferenceClass()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700497 DelayReferenceReferent(const_cast<Object*>(obj));
498 }
499}
500
501// Scans an object reference. Determines the type of the reference
502// and dispatches to a specialized scanning routine.
Elliott Hughesb0663112011-10-19 18:16:37 -0700503inline void MarkSweep::ScanObject(const Object* obj) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700504 DCHECK(obj != NULL);
505 DCHECK(obj->GetClass() != NULL);
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700506 DCHECK(heap_->GetMarkBitmap()->Test(obj));
Carl Shapiro69759ea2011-07-21 18:13:35 -0700507 if (obj->IsClass()) {
508 ScanClass(obj);
Brian Carlstromb63ec392011-08-27 17:38:27 -0700509 } else if (obj->IsArrayInstance()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700510 ScanArray(obj);
511 } else {
Carl Shapiro58551df2011-07-24 03:09:51 -0700512 ScanOther(obj);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700513 }
514}
515
Ian Rogers5d76c432011-10-31 21:42:49 -0700516// Scan anything that's on the mark stack.
Carl Shapiro69759ea2011-07-21 18:13:35 -0700517void MarkSweep::ProcessMarkStack() {
518 while (!mark_stack_->IsEmpty()) {
Brian Carlstrom4873d462011-08-21 15:23:39 -0700519 const Object* obj = mark_stack_->Pop();
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700520 ScanObject(obj);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700521 }
522}
523
Carl Shapiro69759ea2011-07-21 18:13:35 -0700524// Walks the reference list marking any references subject to the
525// reference clearing policy. References with a black referent are
526// removed from the list. References with white referents biased
527// toward saving are blackened and also removed from the list.
528void MarkSweep::PreserveSomeSoftReferences(Object** list) {
529 DCHECK(list != NULL);
530 Object* clear = NULL;
531 size_t counter = 0;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700532
533 DCHECK(mark_stack_->IsEmpty());
534
Carl Shapiro69759ea2011-07-21 18:13:35 -0700535 while (*list != NULL) {
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800536 Object* ref = heap_->DequeuePendingReference(list);
537 Object* referent = heap_->GetReferenceReferent(ref);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700538 if (referent == NULL) {
539 // Referent was cleared by the user during marking.
540 continue;
541 }
542 bool is_marked = IsMarked(referent);
543 if (!is_marked && ((++counter) & 1)) {
544 // Referent is white and biased toward saving, mark it.
545 MarkObject(referent);
546 is_marked = true;
547 }
548 if (!is_marked) {
549 // Referent is white, queue it for clearing.
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800550 heap_->EnqueuePendingReference(ref, &clear);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700551 }
552 }
553 *list = clear;
554 // Restart the mark with the newly black references added to the
555 // root set.
556 ProcessMarkStack();
557}
558
559// Unlink the reference list clearing references objects with white
560// referents. Cleared references registered to a reference queue are
561// scheduled for appending by the heap worker thread.
562void MarkSweep::ClearWhiteReferences(Object** list) {
563 DCHECK(list != NULL);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700564 while (*list != NULL) {
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800565 Object* ref = heap_->DequeuePendingReference(list);
566 Object* referent = heap_->GetReferenceReferent(ref);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700567 if (referent != NULL && !IsMarked(referent)) {
568 // Referent is white, clear it.
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800569 heap_->ClearReferenceReferent(ref);
570 if (heap_->IsEnqueuable(ref)) {
571 heap_->EnqueueReference(ref, &cleared_reference_list_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700572 }
573 }
574 }
575 DCHECK(*list == NULL);
576}
577
578// Enqueues finalizer references with white referents. White
579// referents are blackened, moved to the zombie field, and the
580// referent field is cleared.
581void MarkSweep::EnqueueFinalizerReferences(Object** list) {
582 DCHECK(list != NULL);
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800583 MemberOffset zombie_offset = heap_->GetFinalizerReferenceZombieOffset();
Carl Shapiro69759ea2011-07-21 18:13:35 -0700584 bool has_enqueued = false;
585 while (*list != NULL) {
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800586 Object* ref = heap_->DequeuePendingReference(list);
587 Object* referent = heap_->GetReferenceReferent(ref);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700588 if (referent != NULL && !IsMarked(referent)) {
589 MarkObject(referent);
590 // If the referent is non-null the reference must queuable.
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800591 DCHECK(heap_->IsEnqueuable(ref));
Ian Rogers0cfe1fb2011-08-26 03:29:44 -0700592 ref->SetFieldObject(zombie_offset, referent, false);
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800593 heap_->ClearReferenceReferent(ref);
594 heap_->EnqueueReference(ref, &cleared_reference_list_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700595 has_enqueued = true;
596 }
597 }
598 if (has_enqueued) {
599 ProcessMarkStack();
600 }
601 DCHECK(*list == NULL);
602}
603
Carl Shapiro58551df2011-07-24 03:09:51 -0700604// Process reference class instances and schedule finalizations.
Carl Shapiro69759ea2011-07-21 18:13:35 -0700605void MarkSweep::ProcessReferences(Object** soft_references, bool clear_soft,
606 Object** weak_references,
607 Object** finalizer_references,
608 Object** phantom_references) {
609 DCHECK(soft_references != NULL);
610 DCHECK(weak_references != NULL);
611 DCHECK(finalizer_references != NULL);
612 DCHECK(phantom_references != NULL);
613
614 // Unless we are in the zygote or required to clear soft references
615 // with white references, preserve some white referents.
Ian Rogers2945e242012-06-03 14:45:16 -0700616 if (!clear_soft && !Runtime::Current()->IsZygote()) {
Carl Shapiro69759ea2011-07-21 18:13:35 -0700617 PreserveSomeSoftReferences(soft_references);
618 }
619
620 // Clear all remaining soft and weak references with white
621 // referents.
622 ClearWhiteReferences(soft_references);
623 ClearWhiteReferences(weak_references);
624
625 // Preserve all white objects with finalize methods and schedule
626 // them for finalization.
627 EnqueueFinalizerReferences(finalizer_references);
628
629 // Clear all f-reachable soft and weak references with white
630 // referents.
631 ClearWhiteReferences(soft_references);
632 ClearWhiteReferences(weak_references);
633
634 // Clear all phantom references with white referents.
635 ClearWhiteReferences(phantom_references);
636
637 // At this point all reference lists should be empty.
638 DCHECK(*soft_references == NULL);
639 DCHECK(*weak_references == NULL);
640 DCHECK(*finalizer_references == NULL);
641 DCHECK(*phantom_references == NULL);
642}
643
Carl Shapiro69759ea2011-07-21 18:13:35 -0700644MarkSweep::~MarkSweep() {
Elliott Hughes352a4242011-10-31 15:15:21 -0700645#ifndef NDEBUG
Elliott Hughes4dd9b4d2011-12-12 18:29:24 -0800646 VLOG(heap) << "MarkSweep scanned classes=" << class_count_ << " arrays=" << array_count_ << " other=" << other_count_;
Elliott Hughes352a4242011-10-31 15:15:21 -0700647#endif
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700648 // Clear all of the alloc spaces' mark bitmaps.
649 const Spaces& spaces = heap_->GetSpaces();
650 // TODO: C++0x auto
651 for (Spaces::const_iterator cur = spaces.begin(); cur != spaces.end(); ++cur) {
652 if ((*cur)->IsAllocSpace()) {
653 (*cur)->GetMarkBitmap()->Clear();
654 }
655 }
Mathieu Chartier5301cd22012-05-31 12:11:36 -0700656 mark_stack_->Reset();
Carl Shapiro69759ea2011-07-21 18:13:35 -0700657}
658
659} // namespace art