blob: ed74f993fc5eec8203d4edbdca55a44d1eb6e431 [file] [log] [blame]
Elliott Hughes2faa5f12012-01-30 14:42:07 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Carl Shapiro69759ea2011-07-21 18:13:35 -070016
17#ifndef ART_SRC_MARK_SWEEP_H_
18#define ART_SRC_MARK_SWEEP_H_
19
Mathieu Chartierd8195f12012-10-05 12:21:28 -070020#include "atomic_stack.h"
Brian Carlstrom578bbdc2011-07-21 14:07:47 -070021#include "macros.h"
Elliott Hughes5e71b522011-10-20 13:12:32 -070022#include "heap_bitmap.h"
Mathieu Chartierb43b7d42012-06-19 13:15:09 -070023#include "object.h"
Ian Rogers0cfe1fb2011-08-26 03:29:44 -070024#include "offsets.h"
Carl Shapiro69759ea2011-07-21 18:13:35 -070025
26namespace art {
27
Mathieu Chartierb43b7d42012-06-19 13:15:09 -070028class CheckObjectVisitor;
Carl Shapiro69759ea2011-07-21 18:13:35 -070029class Class;
Elliott Hughesb3bd5f02012-03-08 21:05:27 -080030class Heap;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -070031class MarkIfReachesAllocspaceVisitor;
32class ModUnionClearCardVisitor;
33class ModUnionVisitor;
34class ModUnionTableBitmap;
Carl Shapiro69759ea2011-07-21 18:13:35 -070035class Object;
Mathieu Chartier357e9be2012-08-01 11:00:14 -070036class TimingLogger;
Carl Shapiro69759ea2011-07-21 18:13:35 -070037
38class MarkSweep {
39 public:
Mathieu Chartierd8195f12012-10-05 12:21:28 -070040 explicit MarkSweep(ObjectStack* mark_stack);
Carl Shapiro58551df2011-07-24 03:09:51 -070041
Carl Shapiro69759ea2011-07-21 18:13:35 -070042 ~MarkSweep();
43
Carl Shapiro58551df2011-07-24 03:09:51 -070044 // Initializes internal structures.
Jesse Wilson078f9b02011-11-18 17:51:47 -050045 void Init();
Carl Shapiro58551df2011-07-24 03:09:51 -070046
Mathieu Chartier7469ebf2012-09-24 16:28:36 -070047 // Find the default mark bitmap.
48 void FindDefaultMarkBitmap();
49
Carl Shapiro69759ea2011-07-21 18:13:35 -070050 // Marks the root set at the start of a garbage collection.
Ian Rogers00f7d0e2012-07-19 15:28:27 -070051 void MarkRoots()
Ian Rogersb726dcb2012-09-05 08:57:23 -070052 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
53 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -070054
Mathieu Chartier9ebae1f2012-10-15 17:38:16 -070055 void MarkConcurrentRoots();
56 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
57
Mathieu Chartier262e5ff2012-06-01 17:35:38 -070058 // Verify that image roots point to only marked objects within the alloc space.
Ian Rogersb726dcb2012-09-05 08:57:23 -070059 void VerifyImageRoots() EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier262e5ff2012-06-01 17:35:38 -070060
Carl Shapiro58551df2011-07-24 03:09:51 -070061 // Builds a mark stack and recursively mark until it empties.
Mathieu Chartier357e9be2012-08-01 11:00:14 -070062 void RecursiveMark(bool partial, TimingLogger& timings)
Ian Rogersb726dcb2012-09-05 08:57:23 -070063 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
64 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartiercc236d72012-07-20 10:29:05 -070065
Mathieu Chartier357e9be2012-08-01 11:00:14 -070066 // Copies mark bits from live bitmap of ZygoteSpace to mark bitmap for partial GCs.
Mathieu Chartier2fde5332012-09-14 14:51:54 -070067 void CopyMarkBits(ContinuousSpace* space);
Mathieu Chartier7469ebf2012-09-24 16:28:36 -070068 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
69
70 void BindLiveToMarkBitmap(ContinuousSpace* space)
71 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
72
73 void UnBindBitmaps()
74 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro58551df2011-07-24 03:09:51 -070075
Mathieu Chartier262e5ff2012-06-01 17:35:38 -070076 // Builds a mark stack with objects on dirty cards and recursively mark
77 // until it empties.
Mathieu Chartier357e9be2012-08-01 11:00:14 -070078 void RecursiveMarkDirtyObjects(bool update_finger)
Ian Rogersb726dcb2012-09-05 08:57:23 -070079 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
80 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier262e5ff2012-06-01 17:35:38 -070081
Mathieu Chartier357e9be2012-08-01 11:00:14 -070082 // Recursive mark objects on specified cards. Updates finger.
83 void RecursiveMarkCards(CardTable* card_table, const std::vector<byte*>& cards,
84 TimingLogger& timings)
Ian Rogersb726dcb2012-09-05 08:57:23 -070085 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
86 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);;
Mathieu Chartier357e9be2012-08-01 11:00:14 -070087
Carl Shapiro69759ea2011-07-21 18:13:35 -070088 // Remarks the root set after completing the concurrent mark.
Ian Rogers00f7d0e2012-07-19 15:28:27 -070089 void ReMarkRoots()
Ian Rogersb726dcb2012-09-05 08:57:23 -070090 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
91 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -070092
Mathieu Chartiercc236d72012-07-20 10:29:05 -070093 Heap* GetHeap() {
94 return heap_;
95 }
96
Ian Rogers00f7d0e2012-07-19 15:28:27 -070097 void ProcessReferences(bool clear_soft_references)
Ian Rogersb726dcb2012-09-05 08:57:23 -070098 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
99 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Carl Shapiro58551df2011-07-24 03:09:51 -0700100 ProcessReferences(&soft_reference_list_, clear_soft_references,
101 &weak_reference_list_,
102 &finalizer_reference_list_,
103 &phantom_reference_list_);
104 }
105
Carl Shapiro69759ea2011-07-21 18:13:35 -0700106 // Sweeps unmarked objects to complete the garbage collection.
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700107 void Sweep(bool partial, bool swap_bitmaps)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700108 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700109
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700110 // Sweeps unmarked objects to complete the garbage collection.
111 void SweepLargeObjects(bool swap_bitmaps)
112 EXCLUSIVE_LOCKS_REQUIRED(GlobalSynchronization::heap_bitmap_lock_);
113
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700114 // Sweep only pointers within an array. WARNING: Trashes objects.
Mathieu Chartierd8195f12012-10-05 12:21:28 -0700115 void SweepArray(TimingLogger& logger, ObjectStack* allocation_stack_, bool swap_bitmaps)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700116 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700117
Elliott Hughesadb460d2011-10-05 17:02:34 -0700118 Object* GetClearedReferences() {
119 return cleared_reference_list_;
120 }
121
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700122 // Proxy for external access to ScanObject.
123 void ScanRoot(const Object* obj)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700124 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
125 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700126
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700127 // Blackens an object.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700128 void ScanObject(const Object* obj)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700129 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
130 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700131
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700132 void SetFinger(Object* new_finger) {
133 finger_ = new_finger;
134 }
135
136 void DisableFinger() {
137 SetFinger(reinterpret_cast<Object*>(~static_cast<uintptr_t>(0)));
138 }
139
140 size_t GetFreedBytes() const {
141 return freed_bytes_;
142 }
143
144 size_t GetFreedObjects() const {
145 return freed_objects_;
146 }
147
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700148 // Everything inside the immune range is marked.
149 void SetImmuneRange(Object* begin, Object* end) {
150 immune_begin_ = begin;
151 immune_end_ = end;
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700152 }
153
Mathieu Chartier7469ebf2012-09-24 16:28:36 -0700154 void SweepSystemWeaks()
155 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
156
157 // Only sweep the weaks which are inside of an allocation stack.
158 void SweepSystemWeaksArray(ObjectStack* allocations)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700159 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700160
Mathieu Chartierc7b83a02012-09-11 18:07:39 -0700161 static bool VerifyIsLiveCallback(const Object* obj, void* arg)
162 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
163
164 void VerifySystemWeaks()
165 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
166
167 // Verify that an object is live, either in a live bitmap or in the allocation stack.
168 void VerifyIsLive(const Object* obj)
169 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
170
Mathieu Chartierfd678be2012-08-30 14:50:54 -0700171 template <typename Visitor>
172 static void VisitObjectReferences(const Object* obj, const Visitor& visitor)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700173 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
174 Locks::mutator_lock_) {
Mathieu Chartierfd678be2012-08-30 14:50:54 -0700175 DCHECK(obj != NULL);
176 DCHECK(obj->GetClass() != NULL);
177 if (obj->IsClass()) {
178 VisitClassReferences(obj, visitor);
179 } else if (obj->IsArrayInstance()) {
180 VisitArrayReferences(obj, visitor);
181 } else {
182 VisitOtherReferences(obj, visitor);
183 }
184 }
185
Carl Shapiro69759ea2011-07-21 18:13:35 -0700186 private:
187 // Returns true if the object has its bit set in the mark bitmap.
Mathieu Chartier7469ebf2012-09-24 16:28:36 -0700188 bool IsMarked(const Object* object) const;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700189
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700190 static bool IsMarkedCallback(const Object* object, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700191 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Elliott Hughesc33a32b2011-10-11 18:18:07 -0700192
Mathieu Chartier7469ebf2012-09-24 16:28:36 -0700193 static bool IsMarkedArrayCallback(const Object* object, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700194 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier46a23632012-08-07 18:44:40 -0700195
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700196 static void MarkObjectVisitor(const Object* root, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700197 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Brian Carlstrom1f870082011-08-23 16:02:11 -0700198
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700199 static void ReMarkObjectVisitor(const Object* root, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700200 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700201
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700202 static void VerifyImageRootVisitor(Object* root, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700203 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
204 Locks::mutator_lock_);
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700205
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700206 static void ScanDirtyCardCallback(Object* obj, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700207 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
208 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700209
Carl Shapiro69759ea2011-07-21 18:13:35 -0700210 // Marks an object.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700211 void MarkObject(const Object* obj)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700212 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700213
214 // Yuck.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700215 void MarkObject0(const Object* obj, bool check_finger)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700216 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700217
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700218 static void ScanBitmapCallback(Object* obj, void* finger, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700219 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
220 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro58551df2011-07-24 03:09:51 -0700221
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700222 static void SweepCallback(size_t num_ptrs, Object** ptrs, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700223 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro58551df2011-07-24 03:09:51 -0700224
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700225 // Special sweep for zygote that just marks objects / dirties cards.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700226 static void ZygoteSweepCallback(size_t num_ptrs, Object** ptrs, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700227 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700228
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700229 void CheckReference(const Object* obj, const Object* ref, MemberOffset offset, bool is_static)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700230 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700231
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700232 void CheckObject(const Object* obj)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700233 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700234
Carl Shapiro69759ea2011-07-21 18:13:35 -0700235 // Grays references in instance fields.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700236 void ScanInstanceFields(const Object* obj)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700237 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
238 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700239
Mathieu Chartier6f1c9492012-10-15 12:08:41 -0700240 // Verify the roots of the heap and print out information related to any invalid roots.
241 // Called in MarkObject, so may we may not hold the mutator lock.
242 void VerifyRoots()
243 NO_THREAD_SAFETY_ANALYSIS;
244
245 static void VerifyRootCallback(const Object* root, void* arg, size_t vreg,
246 const AbstractMethod* method);
247
248 void VerifyRoot(const Object* root, size_t vreg, const AbstractMethod* method)
249 NO_THREAD_SAFETY_ANALYSIS;
250
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700251 template <typename Visitor>
Mathieu Chartierfd678be2012-08-30 14:50:54 -0700252 static void VisitInstanceFieldsReferences(const Object* obj, const Visitor& visitor)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700253 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700254 DCHECK(obj != NULL);
255 Class* klass = obj->GetClass();
256 DCHECK(klass != NULL);
257 VisitFieldsReferences(obj, klass->GetReferenceInstanceOffsets(), false, visitor);
258 }
Ian Rogers5d76c432011-10-31 21:42:49 -0700259
Carl Shapiro69759ea2011-07-21 18:13:35 -0700260 // Blackens a class object.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700261 void ScanClass(const Object* obj)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700262 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
263 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700264
Carl Shapiro69759ea2011-07-21 18:13:35 -0700265
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700266 template <typename Visitor>
Mathieu Chartierfd678be2012-08-30 14:50:54 -0700267 static void VisitClassReferences(const Object* obj, const Visitor& visitor)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700268 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700269 VisitInstanceFieldsReferences(obj, visitor);
270 VisitStaticFieldsReferences(obj->AsClass(), visitor);
271 }
Ian Rogers5d76c432011-10-31 21:42:49 -0700272
Carl Shapiro69759ea2011-07-21 18:13:35 -0700273 // Grays references in static fields.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700274 void ScanStaticFields(const Class* klass)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700275 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
276 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700277
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700278 template <typename Visitor>
Mathieu Chartierfd678be2012-08-30 14:50:54 -0700279 static void VisitStaticFieldsReferences(const Class* klass, const Visitor& visitor)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700280 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700281 DCHECK(klass != NULL);
282 VisitFieldsReferences(klass, klass->GetReferenceStaticOffsets(), true, visitor);
283 }
Ian Rogers5d76c432011-10-31 21:42:49 -0700284
Brian Carlstrom4873d462011-08-21 15:23:39 -0700285 // Used by ScanInstanceFields and ScanStaticFields
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700286 void ScanFields(const Object* obj, uint32_t ref_offsets, bool is_static)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700287 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
288 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Brian Carlstrom4873d462011-08-21 15:23:39 -0700289
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700290 template <typename Visitor>
Mathieu Chartierfd678be2012-08-30 14:50:54 -0700291 static void VisitFieldsReferences(const Object* obj, uint32_t ref_offsets, bool is_static,
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700292 const Visitor& visitor)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700293 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
294 Locks::mutator_lock_) {
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700295 if (ref_offsets != CLASS_WALK_SUPER) {
296 // Found a reference offset bitmap. Mark the specified offsets.
297 while (ref_offsets != 0) {
298 size_t right_shift = CLZ(ref_offsets);
299 MemberOffset field_offset = CLASS_OFFSET_FROM_CLZ(right_shift);
300 const Object* ref = obj->GetFieldObject<const Object*>(field_offset, false);
301 visitor(obj, ref, field_offset, is_static);
302 ref_offsets &= ~(CLASS_HIGH_BIT >> right_shift);
303 }
304 } else {
305 // There is no reference offset bitmap. In the non-static case,
306 // walk up the class inheritance hierarchy and find reference
307 // offsets the hard way. In the static case, just consider this
308 // class.
309 for (const Class* klass = is_static ? obj->AsClass() : obj->GetClass();
310 klass != NULL;
311 klass = is_static ? NULL : klass->GetSuperClass()) {
312 size_t num_reference_fields = (is_static
313 ? klass->NumReferenceStaticFields()
314 : klass->NumReferenceInstanceFields());
315 for (size_t i = 0; i < num_reference_fields; ++i) {
316 Field* field = (is_static
317 ? klass->GetStaticField(i)
318 : klass->GetInstanceField(i));
319 MemberOffset field_offset = field->GetOffset();
320 const Object* ref = obj->GetFieldObject<const Object*>(field_offset, false);
321 visitor(obj, ref, field_offset, is_static);
322 }
323 }
324 }
325 }
Ian Rogers5d76c432011-10-31 21:42:49 -0700326
Carl Shapiro69759ea2011-07-21 18:13:35 -0700327 // Grays references in an array.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700328 void ScanArray(const Object* obj)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700329 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
330 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700331
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700332 template <typename Visitor>
Mathieu Chartierfd678be2012-08-30 14:50:54 -0700333 static void VisitArrayReferences(const Object* obj, const Visitor& visitor)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700334 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700335 visitor(obj, obj->GetClass(), Object::ClassOffset(), false);
336 if (obj->IsObjectArray()) {
337 const ObjectArray<Object>* array = obj->AsObjectArray<Object>();
338 for (int32_t i = 0; i < array->GetLength(); ++i) {
339 const Object* element = array->GetWithoutChecks(i);
340 size_t width = sizeof(Object*);
341 visitor(obj, element, MemberOffset(i * width + Array::DataOffset(width).Int32Value()), false);
342 }
343 }
344 }
Ian Rogers5d76c432011-10-31 21:42:49 -0700345
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700346 void ScanOther(const Object* obj)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700347 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
348 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700349
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700350 template <typename Visitor>
Mathieu Chartierfd678be2012-08-30 14:50:54 -0700351 static void VisitOtherReferences(const Object* obj, const Visitor& visitor)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700352 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700353 return VisitInstanceFieldsReferences(obj, visitor);
354 }
Ian Rogers5d76c432011-10-31 21:42:49 -0700355
Carl Shapiro69759ea2011-07-21 18:13:35 -0700356 // Blackens objects grayed during a garbage collection.
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700357 void ScanGrayObjects(bool update_finger)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700358 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700359
360 // Schedules an unmarked object for reference processing.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700361 void DelayReferenceReferent(Object* reference)
Ian Rogers23435d02012-09-24 11:23:12 -0700362 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700363
364 // Recursively blackens objects on the mark stack.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700365 void ProcessMarkStack()
Ian Rogersb726dcb2012-09-05 08:57:23 -0700366 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
367 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700368
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700369 void EnqueueFinalizerReferences(Object** ref)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700370 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
371 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700372
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700373 void PreserveSomeSoftReferences(Object** ref)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700374 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
375 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700376
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700377 void ClearWhiteReferences(Object** list)
Ian Rogers23435d02012-09-24 11:23:12 -0700378 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700379
Carl Shapiro58551df2011-07-24 03:09:51 -0700380 void ProcessReferences(Object** soft_references, bool clear_soft_references,
Carl Shapiro69759ea2011-07-21 18:13:35 -0700381 Object** weak_references,
382 Object** finalizer_references,
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700383 Object** phantom_references)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700384 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
385 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700386
Mathieu Chartier7469ebf2012-09-24 16:28:36 -0700387 void SweepJniWeakGlobals(Heap::IsMarkedTester is_marked, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700388 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro58551df2011-07-24 03:09:51 -0700389
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700390 // Current space, we check this space first to avoid searching for the appropriate space for an object.
391 SpaceBitmap* current_mark_bitmap_;
392
Mathieu Chartierd8195f12012-10-05 12:21:28 -0700393 ObjectStack* mark_stack_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700394
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800395 Heap* heap_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700396
397 Object* finger_;
398
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700399 // Immune range, every object inside the immune range is assumed to be marked.
400 Object* immune_begin_;
401 Object* immune_end_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700402
403 Object* soft_reference_list_;
404
405 Object* weak_reference_list_;
406
407 Object* finalizer_reference_list_;
408
409 Object* phantom_reference_list_;
410
411 Object* cleared_reference_list_;
412
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700413 size_t freed_bytes_;
414 size_t freed_objects_;
415
Elliott Hughes352a4242011-10-31 15:15:21 -0700416 size_t class_count_;
417 size_t array_count_;
418 size_t other_count_;
419
Mathieu Chartiere6e06512012-06-26 15:00:26 -0700420 friend class AddIfReachesAllocSpaceVisitor; // Used by mod-union table.
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700421 friend class CheckBitmapVisitor;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700422 friend class CheckObjectVisitor;
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700423 friend class CheckReferenceVisitor;
Elliott Hughes410c0c82011-09-01 17:58:25 -0700424 friend class InternTableEntryIsUnmarked;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700425 friend class MarkIfReachesAllocspaceVisitor;
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700426 friend class ModUnionCheckReferences;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700427 friend class ModUnionClearCardVisitor;
Mathieu Chartiere6e06512012-06-26 15:00:26 -0700428 friend class ModUnionReferenceVisitor;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700429 friend class ModUnionVisitor;
430 friend class ModUnionTableBitmap;
Mathieu Chartiere6e06512012-06-26 15:00:26 -0700431 friend class ModUnionTableReferenceCache;
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700432 friend class ModUnionScanImageRootVisitor;
433 friend class ScanBitmapVisitor;
434 friend class ScanImageRootVisitor;
Elliott Hughes410c0c82011-09-01 17:58:25 -0700435
Carl Shapiro69759ea2011-07-21 18:13:35 -0700436 DISALLOW_COPY_AND_ASSIGN(MarkSweep);
437};
438
439} // namespace art
440
441#endif // ART_SRC_MARK_SWEEP_H_