blob: d64439f4e3aefbfa47e867e16cc39f549537e164 [file] [log] [blame]
Elliott Hughes2faa5f12012-01-30 14:42:07 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Carl Shapiro69759ea2011-07-21 18:13:35 -070016
17#ifndef ART_SRC_MARK_SWEEP_H_
18#define ART_SRC_MARK_SWEEP_H_
19
Mathieu Chartierd8195f12012-10-05 12:21:28 -070020#include "atomic_stack.h"
Brian Carlstrom578bbdc2011-07-21 14:07:47 -070021#include "macros.h"
Elliott Hughes5e71b522011-10-20 13:12:32 -070022#include "heap_bitmap.h"
Mathieu Chartierb43b7d42012-06-19 13:15:09 -070023#include "object.h"
Ian Rogers0cfe1fb2011-08-26 03:29:44 -070024#include "offsets.h"
Carl Shapiro69759ea2011-07-21 18:13:35 -070025
26namespace art {
27
Mathieu Chartier858f1c52012-10-17 17:45:55 -070028class Barrier;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -070029class CheckObjectVisitor;
Carl Shapiro69759ea2011-07-21 18:13:35 -070030class Class;
Elliott Hughesb3bd5f02012-03-08 21:05:27 -080031class Heap;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -070032class MarkIfReachesAllocspaceVisitor;
33class ModUnionClearCardVisitor;
34class ModUnionVisitor;
35class ModUnionTableBitmap;
Carl Shapiro69759ea2011-07-21 18:13:35 -070036class Object;
Mathieu Chartier357e9be2012-08-01 11:00:14 -070037class TimingLogger;
Carl Shapiro69759ea2011-07-21 18:13:35 -070038
39class MarkSweep {
40 public:
Mathieu Chartierd8195f12012-10-05 12:21:28 -070041 explicit MarkSweep(ObjectStack* mark_stack);
Carl Shapiro58551df2011-07-24 03:09:51 -070042
Carl Shapiro69759ea2011-07-21 18:13:35 -070043 ~MarkSweep();
44
Carl Shapiro58551df2011-07-24 03:09:51 -070045 // Initializes internal structures.
Jesse Wilson078f9b02011-11-18 17:51:47 -050046 void Init();
Carl Shapiro58551df2011-07-24 03:09:51 -070047
Mathieu Chartier7469ebf2012-09-24 16:28:36 -070048 // Find the default mark bitmap.
49 void FindDefaultMarkBitmap();
50
Carl Shapiro69759ea2011-07-21 18:13:35 -070051 // Marks the root set at the start of a garbage collection.
Ian Rogers00f7d0e2012-07-19 15:28:27 -070052 void MarkRoots()
Ian Rogersb726dcb2012-09-05 08:57:23 -070053 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
54 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -070055
Mathieu Chartier858f1c52012-10-17 17:45:55 -070056 void MarkNonThreadRoots()
57 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
58
Mathieu Chartier9ebae1f2012-10-15 17:38:16 -070059 void MarkConcurrentRoots();
60 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
61
Mathieu Chartier858f1c52012-10-17 17:45:55 -070062 void MarkRootsCheckpoint();
63 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
64
Mathieu Chartier262e5ff2012-06-01 17:35:38 -070065 // Verify that image roots point to only marked objects within the alloc space.
Ian Rogersb726dcb2012-09-05 08:57:23 -070066 void VerifyImageRoots() EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier262e5ff2012-06-01 17:35:38 -070067
Carl Shapiro58551df2011-07-24 03:09:51 -070068 // Builds a mark stack and recursively mark until it empties.
Mathieu Chartier357e9be2012-08-01 11:00:14 -070069 void RecursiveMark(bool partial, TimingLogger& timings)
Ian Rogersb726dcb2012-09-05 08:57:23 -070070 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
71 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartiercc236d72012-07-20 10:29:05 -070072
Mathieu Chartier357e9be2012-08-01 11:00:14 -070073 // Copies mark bits from live bitmap of ZygoteSpace to mark bitmap for partial GCs.
Mathieu Chartier2fde5332012-09-14 14:51:54 -070074 void CopyMarkBits(ContinuousSpace* space);
Mathieu Chartier7469ebf2012-09-24 16:28:36 -070075 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
76
77 void BindLiveToMarkBitmap(ContinuousSpace* space)
78 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
79
80 void UnBindBitmaps()
81 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro58551df2011-07-24 03:09:51 -070082
Mathieu Chartier262e5ff2012-06-01 17:35:38 -070083 // Builds a mark stack with objects on dirty cards and recursively mark
84 // until it empties.
Mathieu Chartier357e9be2012-08-01 11:00:14 -070085 void RecursiveMarkDirtyObjects(bool update_finger)
Ian Rogersb726dcb2012-09-05 08:57:23 -070086 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
87 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier262e5ff2012-06-01 17:35:38 -070088
Mathieu Chartier357e9be2012-08-01 11:00:14 -070089 // Recursive mark objects on specified cards. Updates finger.
90 void RecursiveMarkCards(CardTable* card_table, const std::vector<byte*>& cards,
91 TimingLogger& timings)
Ian Rogersb726dcb2012-09-05 08:57:23 -070092 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
93 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);;
Mathieu Chartier357e9be2012-08-01 11:00:14 -070094
Carl Shapiro69759ea2011-07-21 18:13:35 -070095 // Remarks the root set after completing the concurrent mark.
Ian Rogers00f7d0e2012-07-19 15:28:27 -070096 void ReMarkRoots()
Ian Rogersb726dcb2012-09-05 08:57:23 -070097 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
98 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -070099
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700100 Heap* GetHeap() {
101 return heap_;
102 }
103
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700104 void ProcessReferences(bool clear_soft_references)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700105 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
106 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Carl Shapiro58551df2011-07-24 03:09:51 -0700107 ProcessReferences(&soft_reference_list_, clear_soft_references,
108 &weak_reference_list_,
109 &finalizer_reference_list_,
110 &phantom_reference_list_);
111 }
112
Carl Shapiro69759ea2011-07-21 18:13:35 -0700113 // Sweeps unmarked objects to complete the garbage collection.
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700114 void Sweep(bool partial, bool swap_bitmaps)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700115 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700116
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700117 // Sweeps unmarked objects to complete the garbage collection.
118 void SweepLargeObjects(bool swap_bitmaps)
119 EXCLUSIVE_LOCKS_REQUIRED(GlobalSynchronization::heap_bitmap_lock_);
120
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700121 // Sweep only pointers within an array. WARNING: Trashes objects.
Mathieu Chartierd8195f12012-10-05 12:21:28 -0700122 void SweepArray(TimingLogger& logger, ObjectStack* allocation_stack_, bool swap_bitmaps)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700123 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700124
Elliott Hughesadb460d2011-10-05 17:02:34 -0700125 Object* GetClearedReferences() {
126 return cleared_reference_list_;
127 }
128
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700129 // Proxy for external access to ScanObject.
130 void ScanRoot(const Object* obj)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700131 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
132 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700133
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700134 // Blackens an object.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700135 void ScanObject(const Object* obj)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700136 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
137 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700138
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700139 void SetFinger(Object* new_finger) {
140 finger_ = new_finger;
141 }
142
143 void DisableFinger() {
144 SetFinger(reinterpret_cast<Object*>(~static_cast<uintptr_t>(0)));
145 }
146
147 size_t GetFreedBytes() const {
148 return freed_bytes_;
149 }
150
151 size_t GetFreedObjects() const {
152 return freed_objects_;
153 }
154
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700155 // Everything inside the immune range is marked.
156 void SetImmuneRange(Object* begin, Object* end) {
157 immune_begin_ = begin;
158 immune_end_ = end;
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700159 }
160
Mathieu Chartier7469ebf2012-09-24 16:28:36 -0700161 void SweepSystemWeaks()
162 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
163
164 // Only sweep the weaks which are inside of an allocation stack.
165 void SweepSystemWeaksArray(ObjectStack* allocations)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700166 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700167
Mathieu Chartierc7b83a02012-09-11 18:07:39 -0700168 static bool VerifyIsLiveCallback(const Object* obj, void* arg)
169 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
170
171 void VerifySystemWeaks()
172 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
173
174 // Verify that an object is live, either in a live bitmap or in the allocation stack.
175 void VerifyIsLive(const Object* obj)
176 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
177
Mathieu Chartierfd678be2012-08-30 14:50:54 -0700178 template <typename Visitor>
179 static void VisitObjectReferences(const Object* obj, const Visitor& visitor)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700180 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
181 Locks::mutator_lock_) {
Mathieu Chartierfd678be2012-08-30 14:50:54 -0700182 DCHECK(obj != NULL);
183 DCHECK(obj->GetClass() != NULL);
184 if (obj->IsClass()) {
185 VisitClassReferences(obj, visitor);
186 } else if (obj->IsArrayInstance()) {
187 VisitArrayReferences(obj, visitor);
188 } else {
189 VisitOtherReferences(obj, visitor);
190 }
191 }
192
Mathieu Chartier858f1c52012-10-17 17:45:55 -0700193 static void MarkObjectVisitor(const Object* root, void* arg)
194 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
195
196 Barrier& GetBarrier();
197
Carl Shapiro69759ea2011-07-21 18:13:35 -0700198 private:
199 // Returns true if the object has its bit set in the mark bitmap.
Mathieu Chartier7469ebf2012-09-24 16:28:36 -0700200 bool IsMarked(const Object* object) const;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700201
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700202 static bool IsMarkedCallback(const Object* object, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700203 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Elliott Hughesc33a32b2011-10-11 18:18:07 -0700204
Mathieu Chartier7469ebf2012-09-24 16:28:36 -0700205 static bool IsMarkedArrayCallback(const Object* object, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700206 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier46a23632012-08-07 18:44:40 -0700207
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700208 static void ReMarkObjectVisitor(const Object* root, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700209 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700210
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700211 static void VerifyImageRootVisitor(Object* root, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700212 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
213 Locks::mutator_lock_);
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700214
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700215 static void ScanDirtyCardCallback(Object* obj, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700216 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
217 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700218
Carl Shapiro69759ea2011-07-21 18:13:35 -0700219 // Marks an object.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700220 void MarkObject(const Object* obj)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700221 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700222
223 // Yuck.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700224 void MarkObject0(const Object* obj, bool check_finger)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700225 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700226
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700227 static void ScanBitmapCallback(Object* obj, void* finger, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700228 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
229 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro58551df2011-07-24 03:09:51 -0700230
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700231 static void SweepCallback(size_t num_ptrs, Object** ptrs, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700232 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro58551df2011-07-24 03:09:51 -0700233
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700234 // Special sweep for zygote that just marks objects / dirties cards.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700235 static void ZygoteSweepCallback(size_t num_ptrs, Object** ptrs, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700236 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700237
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700238 void CheckReference(const Object* obj, const Object* ref, MemberOffset offset, bool is_static)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700239 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700240
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700241 void CheckObject(const Object* obj)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700242 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700243
Carl Shapiro69759ea2011-07-21 18:13:35 -0700244 // Grays references in instance fields.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700245 void ScanInstanceFields(const Object* obj)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700246 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
247 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700248
Mathieu Chartier6f1c9492012-10-15 12:08:41 -0700249 // Verify the roots of the heap and print out information related to any invalid roots.
250 // Called in MarkObject, so may we may not hold the mutator lock.
251 void VerifyRoots()
252 NO_THREAD_SAFETY_ANALYSIS;
253
254 static void VerifyRootCallback(const Object* root, void* arg, size_t vreg,
255 const AbstractMethod* method);
256
257 void VerifyRoot(const Object* root, size_t vreg, const AbstractMethod* method)
258 NO_THREAD_SAFETY_ANALYSIS;
259
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700260 template <typename Visitor>
Mathieu Chartierfd678be2012-08-30 14:50:54 -0700261 static void VisitInstanceFieldsReferences(const Object* obj, const Visitor& visitor)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700262 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700263 DCHECK(obj != NULL);
264 Class* klass = obj->GetClass();
265 DCHECK(klass != NULL);
266 VisitFieldsReferences(obj, klass->GetReferenceInstanceOffsets(), false, visitor);
267 }
Ian Rogers5d76c432011-10-31 21:42:49 -0700268
Carl Shapiro69759ea2011-07-21 18:13:35 -0700269 // Blackens a class object.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700270 void ScanClass(const Object* obj)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700271 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
272 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700273
Carl Shapiro69759ea2011-07-21 18:13:35 -0700274
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700275 template <typename Visitor>
Mathieu Chartierfd678be2012-08-30 14:50:54 -0700276 static void VisitClassReferences(const Object* obj, const Visitor& visitor)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700277 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700278 VisitInstanceFieldsReferences(obj, visitor);
279 VisitStaticFieldsReferences(obj->AsClass(), visitor);
280 }
Ian Rogers5d76c432011-10-31 21:42:49 -0700281
Carl Shapiro69759ea2011-07-21 18:13:35 -0700282 // Grays references in static fields.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700283 void ScanStaticFields(const Class* klass)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700284 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
285 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700286
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700287 template <typename Visitor>
Mathieu Chartierfd678be2012-08-30 14:50:54 -0700288 static void VisitStaticFieldsReferences(const Class* klass, const Visitor& visitor)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700289 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700290 DCHECK(klass != NULL);
291 VisitFieldsReferences(klass, klass->GetReferenceStaticOffsets(), true, visitor);
292 }
Ian Rogers5d76c432011-10-31 21:42:49 -0700293
Brian Carlstrom4873d462011-08-21 15:23:39 -0700294 // Used by ScanInstanceFields and ScanStaticFields
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700295 void ScanFields(const Object* obj, uint32_t ref_offsets, bool is_static)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700296 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
297 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Brian Carlstrom4873d462011-08-21 15:23:39 -0700298
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700299 template <typename Visitor>
Mathieu Chartierfd678be2012-08-30 14:50:54 -0700300 static void VisitFieldsReferences(const Object* obj, uint32_t ref_offsets, bool is_static,
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700301 const Visitor& visitor)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700302 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
303 Locks::mutator_lock_) {
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700304 if (ref_offsets != CLASS_WALK_SUPER) {
305 // Found a reference offset bitmap. Mark the specified offsets.
306 while (ref_offsets != 0) {
307 size_t right_shift = CLZ(ref_offsets);
308 MemberOffset field_offset = CLASS_OFFSET_FROM_CLZ(right_shift);
309 const Object* ref = obj->GetFieldObject<const Object*>(field_offset, false);
310 visitor(obj, ref, field_offset, is_static);
311 ref_offsets &= ~(CLASS_HIGH_BIT >> right_shift);
312 }
313 } else {
314 // There is no reference offset bitmap. In the non-static case,
315 // walk up the class inheritance hierarchy and find reference
316 // offsets the hard way. In the static case, just consider this
317 // class.
318 for (const Class* klass = is_static ? obj->AsClass() : obj->GetClass();
319 klass != NULL;
320 klass = is_static ? NULL : klass->GetSuperClass()) {
321 size_t num_reference_fields = (is_static
322 ? klass->NumReferenceStaticFields()
323 : klass->NumReferenceInstanceFields());
324 for (size_t i = 0; i < num_reference_fields; ++i) {
325 Field* field = (is_static
326 ? klass->GetStaticField(i)
327 : klass->GetInstanceField(i));
328 MemberOffset field_offset = field->GetOffset();
329 const Object* ref = obj->GetFieldObject<const Object*>(field_offset, false);
330 visitor(obj, ref, field_offset, is_static);
331 }
332 }
333 }
334 }
Ian Rogers5d76c432011-10-31 21:42:49 -0700335
Carl Shapiro69759ea2011-07-21 18:13:35 -0700336 // Grays references in an array.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700337 void ScanArray(const Object* obj)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700338 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
339 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700340
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700341 template <typename Visitor>
Mathieu Chartierfd678be2012-08-30 14:50:54 -0700342 static void VisitArrayReferences(const Object* obj, const Visitor& visitor)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700343 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700344 visitor(obj, obj->GetClass(), Object::ClassOffset(), false);
345 if (obj->IsObjectArray()) {
346 const ObjectArray<Object>* array = obj->AsObjectArray<Object>();
347 for (int32_t i = 0; i < array->GetLength(); ++i) {
348 const Object* element = array->GetWithoutChecks(i);
349 size_t width = sizeof(Object*);
350 visitor(obj, element, MemberOffset(i * width + Array::DataOffset(width).Int32Value()), false);
351 }
352 }
353 }
Ian Rogers5d76c432011-10-31 21:42:49 -0700354
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700355 void ScanOther(const Object* obj)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700356 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
357 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700358
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700359 template <typename Visitor>
Mathieu Chartierfd678be2012-08-30 14:50:54 -0700360 static void VisitOtherReferences(const Object* obj, const Visitor& visitor)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700361 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700362 return VisitInstanceFieldsReferences(obj, visitor);
363 }
Ian Rogers5d76c432011-10-31 21:42:49 -0700364
Carl Shapiro69759ea2011-07-21 18:13:35 -0700365 // Blackens objects grayed during a garbage collection.
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700366 void ScanGrayObjects(bool update_finger)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700367 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700368
369 // Schedules an unmarked object for reference processing.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700370 void DelayReferenceReferent(Object* reference)
Ian Rogers23435d02012-09-24 11:23:12 -0700371 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700372
373 // Recursively blackens objects on the mark stack.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700374 void ProcessMarkStack()
Ian Rogersb726dcb2012-09-05 08:57:23 -0700375 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
376 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700377
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700378 void EnqueueFinalizerReferences(Object** ref)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700379 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
380 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700381
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700382 void PreserveSomeSoftReferences(Object** ref)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700383 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
384 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700385
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700386 void ClearWhiteReferences(Object** list)
Ian Rogers23435d02012-09-24 11:23:12 -0700387 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700388
Carl Shapiro58551df2011-07-24 03:09:51 -0700389 void ProcessReferences(Object** soft_references, bool clear_soft_references,
Carl Shapiro69759ea2011-07-21 18:13:35 -0700390 Object** weak_references,
391 Object** finalizer_references,
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700392 Object** phantom_references)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700393 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
394 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700395
Mathieu Chartier7469ebf2012-09-24 16:28:36 -0700396 void SweepJniWeakGlobals(Heap::IsMarkedTester is_marked, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700397 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro58551df2011-07-24 03:09:51 -0700398
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700399 // Current space, we check this space first to avoid searching for the appropriate space for an object.
400 SpaceBitmap* current_mark_bitmap_;
401
Mathieu Chartierd8195f12012-10-05 12:21:28 -0700402 ObjectStack* mark_stack_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700403
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800404 Heap* heap_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700405
406 Object* finger_;
407
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700408 // Immune range, every object inside the immune range is assumed to be marked.
409 Object* immune_begin_;
410 Object* immune_end_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700411
412 Object* soft_reference_list_;
413
414 Object* weak_reference_list_;
415
416 Object* finalizer_reference_list_;
417
418 Object* phantom_reference_list_;
419
420 Object* cleared_reference_list_;
421
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700422 size_t freed_bytes_;
423 size_t freed_objects_;
424
Elliott Hughes352a4242011-10-31 15:15:21 -0700425 size_t class_count_;
426 size_t array_count_;
427 size_t other_count_;
428
Mathieu Chartier858f1c52012-10-17 17:45:55 -0700429 UniquePtr<Barrier> gc_barrier_;
430
Mathieu Chartiere6e06512012-06-26 15:00:26 -0700431 friend class AddIfReachesAllocSpaceVisitor; // Used by mod-union table.
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700432 friend class CheckBitmapVisitor;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700433 friend class CheckObjectVisitor;
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700434 friend class CheckReferenceVisitor;
Elliott Hughes410c0c82011-09-01 17:58:25 -0700435 friend class InternTableEntryIsUnmarked;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700436 friend class MarkIfReachesAllocspaceVisitor;
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700437 friend class ModUnionCheckReferences;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700438 friend class ModUnionClearCardVisitor;
Mathieu Chartiere6e06512012-06-26 15:00:26 -0700439 friend class ModUnionReferenceVisitor;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700440 friend class ModUnionVisitor;
441 friend class ModUnionTableBitmap;
Mathieu Chartiere6e06512012-06-26 15:00:26 -0700442 friend class ModUnionTableReferenceCache;
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700443 friend class ModUnionScanImageRootVisitor;
444 friend class ScanBitmapVisitor;
445 friend class ScanImageRootVisitor;
Elliott Hughes410c0c82011-09-01 17:58:25 -0700446
Carl Shapiro69759ea2011-07-21 18:13:35 -0700447 DISALLOW_COPY_AND_ASSIGN(MarkSweep);
448};
449
450} // namespace art
451
452#endif // ART_SRC_MARK_SWEEP_H_