blob: 98445d4376b767dfd916c1b13218f255d0a003a6 [file] [log] [blame]
Elliott Hughes2faa5f12012-01-30 14:42:07 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Carl Shapiro69759ea2011-07-21 18:13:35 -070016
17#ifndef ART_SRC_MARK_SWEEP_H_
18#define ART_SRC_MARK_SWEEP_H_
19
Mathieu Chartierd8195f12012-10-05 12:21:28 -070020#include "atomic_stack.h"
Brian Carlstrom578bbdc2011-07-21 14:07:47 -070021#include "macros.h"
Elliott Hughes5e71b522011-10-20 13:12:32 -070022#include "heap_bitmap.h"
Mathieu Chartierb43b7d42012-06-19 13:15:09 -070023#include "object.h"
Ian Rogers0cfe1fb2011-08-26 03:29:44 -070024#include "offsets.h"
Carl Shapiro69759ea2011-07-21 18:13:35 -070025
26namespace art {
27
Mathieu Chartier858f1c52012-10-17 17:45:55 -070028class Barrier;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -070029class CheckObjectVisitor;
Carl Shapiro69759ea2011-07-21 18:13:35 -070030class Class;
Elliott Hughesb3bd5f02012-03-08 21:05:27 -080031class Heap;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -070032class MarkIfReachesAllocspaceVisitor;
33class ModUnionClearCardVisitor;
34class ModUnionVisitor;
35class ModUnionTableBitmap;
Carl Shapiro69759ea2011-07-21 18:13:35 -070036class Object;
Mathieu Chartier357e9be2012-08-01 11:00:14 -070037class TimingLogger;
Mathieu Chartier02b6a782012-10-26 13:51:26 -070038class MarkStackChunk;
Carl Shapiro69759ea2011-07-21 18:13:35 -070039
40class MarkSweep {
41 public:
Mathieu Chartierd8195f12012-10-05 12:21:28 -070042 explicit MarkSweep(ObjectStack* mark_stack);
Carl Shapiro58551df2011-07-24 03:09:51 -070043
Carl Shapiro69759ea2011-07-21 18:13:35 -070044 ~MarkSweep();
45
Carl Shapiro58551df2011-07-24 03:09:51 -070046 // Initializes internal structures.
Jesse Wilson078f9b02011-11-18 17:51:47 -050047 void Init();
Carl Shapiro58551df2011-07-24 03:09:51 -070048
Mathieu Chartier7469ebf2012-09-24 16:28:36 -070049 // Find the default mark bitmap.
50 void FindDefaultMarkBitmap();
51
Carl Shapiro69759ea2011-07-21 18:13:35 -070052 // Marks the root set at the start of a garbage collection.
Ian Rogers00f7d0e2012-07-19 15:28:27 -070053 void MarkRoots()
Ian Rogersb726dcb2012-09-05 08:57:23 -070054 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
55 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -070056
Mathieu Chartier858f1c52012-10-17 17:45:55 -070057 void MarkNonThreadRoots()
58 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
59
Mathieu Chartier9ebae1f2012-10-15 17:38:16 -070060 void MarkConcurrentRoots();
61 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
62
Mathieu Chartier858f1c52012-10-17 17:45:55 -070063 void MarkRootsCheckpoint();
64 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
65
Mathieu Chartier262e5ff2012-06-01 17:35:38 -070066 // Verify that image roots point to only marked objects within the alloc space.
Ian Rogersb726dcb2012-09-05 08:57:23 -070067 void VerifyImageRoots() EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier262e5ff2012-06-01 17:35:38 -070068
Carl Shapiro58551df2011-07-24 03:09:51 -070069 // Builds a mark stack and recursively mark until it empties.
Mathieu Chartier357e9be2012-08-01 11:00:14 -070070 void RecursiveMark(bool partial, TimingLogger& timings)
Ian Rogersb726dcb2012-09-05 08:57:23 -070071 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
72 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartiercc236d72012-07-20 10:29:05 -070073
Mathieu Chartier357e9be2012-08-01 11:00:14 -070074 // Copies mark bits from live bitmap of ZygoteSpace to mark bitmap for partial GCs.
Mathieu Chartier2fde5332012-09-14 14:51:54 -070075 void CopyMarkBits(ContinuousSpace* space);
Mathieu Chartier7469ebf2012-09-24 16:28:36 -070076 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
77
78 void BindLiveToMarkBitmap(ContinuousSpace* space)
79 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
80
81 void UnBindBitmaps()
82 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro58551df2011-07-24 03:09:51 -070083
Mathieu Chartier02b6a782012-10-26 13:51:26 -070084 // Builds a mark stack with objects on dirty cards and recursively mark until it empties.
Mathieu Chartierd22d5482012-11-06 17:14:12 -080085 void RecursiveMarkDirtyObjects(byte minimum_age = CardTable::kCardDirty)
Ian Rogersb726dcb2012-09-05 08:57:23 -070086 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
87 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier262e5ff2012-06-01 17:35:38 -070088
Mathieu Chartier357e9be2012-08-01 11:00:14 -070089 // Recursive mark objects on specified cards. Updates finger.
90 void RecursiveMarkCards(CardTable* card_table, const std::vector<byte*>& cards,
91 TimingLogger& timings)
Ian Rogersb726dcb2012-09-05 08:57:23 -070092 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
93 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);;
Mathieu Chartier357e9be2012-08-01 11:00:14 -070094
Carl Shapiro69759ea2011-07-21 18:13:35 -070095 // Remarks the root set after completing the concurrent mark.
Ian Rogers00f7d0e2012-07-19 15:28:27 -070096 void ReMarkRoots()
Ian Rogersb726dcb2012-09-05 08:57:23 -070097 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
98 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -070099
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700100 Heap* GetHeap() {
101 return heap_;
102 }
103
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700104 void ProcessReferences(bool clear_soft_references)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700105 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
106 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
Carl Shapiro58551df2011-07-24 03:09:51 -0700107 ProcessReferences(&soft_reference_list_, clear_soft_references,
108 &weak_reference_list_,
109 &finalizer_reference_list_,
110 &phantom_reference_list_);
111 }
112
Carl Shapiro69759ea2011-07-21 18:13:35 -0700113 // Sweeps unmarked objects to complete the garbage collection.
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700114 void Sweep(TimingLogger& timings, bool partial, bool swap_bitmaps)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700115 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700116
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700117 // Sweeps unmarked objects to complete the garbage collection.
118 void SweepLargeObjects(bool swap_bitmaps)
119 EXCLUSIVE_LOCKS_REQUIRED(GlobalSynchronization::heap_bitmap_lock_);
120
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700121 // Sweep only pointers within an array. WARNING: Trashes objects.
Mathieu Chartierd8195f12012-10-05 12:21:28 -0700122 void SweepArray(TimingLogger& logger, ObjectStack* allocation_stack_, bool swap_bitmaps)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700123 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700124
Elliott Hughesadb460d2011-10-05 17:02:34 -0700125 Object* GetClearedReferences() {
126 return cleared_reference_list_;
127 }
128
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700129 // Proxy for external access to ScanObject.
130 void ScanRoot(const Object* obj)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700131 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
132 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700133
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700134 // Blackens an object.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700135 void ScanObject(const Object* obj)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700136 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
137 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700138
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700139 template <typename MarkVisitor>
140 void ScanObjectVisit(const Object* obj, const MarkVisitor& visitor)
141 NO_THREAD_SAFETY_ANALYSIS {
142 DCHECK(obj != NULL);
143 if (kIsDebugBuild && !IsMarked(obj)) {
144 heap_->DumpSpaces();
145 LOG(FATAL) << "Scanning unmarked object " << obj;
146 }
147 Class* klass = obj->GetClass();
148 DCHECK(klass != NULL);
149 if (klass == java_lang_Class_) {
150 DCHECK_EQ(klass->GetClass(), java_lang_Class_);
151 if (kCountScannedTypes) {
152 ++class_count_;
153 }
154 VisitClassReferences(klass, obj, visitor);
155 } else if (klass->IsArrayClass()) {
156 if (kCountScannedTypes) {
157 ++array_count_;
158 }
159 visitor(obj, klass, Object::ClassOffset(), false);
160 if (klass->IsObjectArrayClass()) {
161 VisitObjectArrayReferences(obj->AsObjectArray<Object>(), visitor);
162 }
163 } else {
164 if (kCountScannedTypes) {
165 ++other_count_;
166 }
167 VisitOtherReferences(klass, obj, visitor);
Mathieu Chartierd22d5482012-11-06 17:14:12 -0800168 if (UNLIKELY(klass->IsReferenceClass())) {
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700169 DelayReferenceReferent(const_cast<Object*>(obj));
170 }
171 }
172 }
173
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700174 void SetFinger(Object* new_finger) {
175 finger_ = new_finger;
176 }
177
178 void DisableFinger() {
179 SetFinger(reinterpret_cast<Object*>(~static_cast<uintptr_t>(0)));
180 }
181
182 size_t GetFreedBytes() const {
183 return freed_bytes_;
184 }
185
186 size_t GetFreedObjects() const {
187 return freed_objects_;
188 }
189
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700190 // Everything inside the immune range is marked.
191 void SetImmuneRange(Object* begin, Object* end) {
192 immune_begin_ = begin;
193 immune_end_ = end;
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700194 }
195
Mathieu Chartier7469ebf2012-09-24 16:28:36 -0700196 void SweepSystemWeaks()
197 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
198
199 // Only sweep the weaks which are inside of an allocation stack.
200 void SweepSystemWeaksArray(ObjectStack* allocations)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700201 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700202
Mathieu Chartierc7b83a02012-09-11 18:07:39 -0700203 static bool VerifyIsLiveCallback(const Object* obj, void* arg)
204 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
205
206 void VerifySystemWeaks()
207 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
208
209 // Verify that an object is live, either in a live bitmap or in the allocation stack.
210 void VerifyIsLive(const Object* obj)
211 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
212
Mathieu Chartierfd678be2012-08-30 14:50:54 -0700213 template <typename Visitor>
214 static void VisitObjectReferences(const Object* obj, const Visitor& visitor)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700215 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
216 Locks::mutator_lock_) {
Mathieu Chartierfd678be2012-08-30 14:50:54 -0700217 DCHECK(obj != NULL);
218 DCHECK(obj->GetClass() != NULL);
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700219
220 Class* klass = obj->GetClass();
221 DCHECK(klass != NULL);
222 if (klass == Class::GetJavaLangClass()) {
223 DCHECK_EQ(klass->GetClass(), Class::GetJavaLangClass());
224 VisitClassReferences(klass, obj, visitor);
Mathieu Chartierfd678be2012-08-30 14:50:54 -0700225 } else {
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700226 if (klass->IsArrayClass()) {
227 visitor(obj, klass, Object::ClassOffset(), false);
228 if (klass->IsObjectArrayClass()) {
229 VisitObjectArrayReferences(obj->AsObjectArray<Object>(), visitor);
230 }
231 } else {
232 VisitOtherReferences(klass, obj, visitor);
233 }
Mathieu Chartierfd678be2012-08-30 14:50:54 -0700234 }
235 }
236
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700237 static void MarkObjectCallback(const Object* root, void* arg)
238 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
239
Mathieu Chartierac86a7c2012-11-12 15:03:16 -0800240 static void MarkRootParallelCallback(const Object* root, void* arg);
241
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700242 // Marks an object.
243 void MarkObject(const Object* obj)
Mathieu Chartier858f1c52012-10-17 17:45:55 -0700244 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
245
246 Barrier& GetBarrier();
247
Carl Shapiro69759ea2011-07-21 18:13:35 -0700248 private:
249 // Returns true if the object has its bit set in the mark bitmap.
Mathieu Chartier7469ebf2012-09-24 16:28:36 -0700250 bool IsMarked(const Object* object) const;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700251
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700252 static bool IsMarkedCallback(const Object* object, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700253 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Elliott Hughesc33a32b2011-10-11 18:18:07 -0700254
Mathieu Chartier7469ebf2012-09-24 16:28:36 -0700255 static bool IsMarkedArrayCallback(const Object* object, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700256 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier46a23632012-08-07 18:44:40 -0700257
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700258 static void ReMarkObjectVisitor(const Object* root, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700259 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700260
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700261 static void VerifyImageRootVisitor(Object* root, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700262 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
263 Locks::mutator_lock_);
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700264
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700265 void MarkObjectNonNull(const Object* obj, bool check_finger)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700266 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700267
Mathieu Chartierac86a7c2012-11-12 15:03:16 -0800268 void MarkObjectNonNullParallel(const Object* obj, bool check_finger);
269
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700270 bool MarkLargeObject(const Object* obj)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700271 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700272
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700273 // Returns true if we need to add obj to a mark stack.
274 bool MarkObjectParallel(const Object* obj) NO_THREAD_SAFETY_ANALYSIS;
275
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700276 static void SweepCallback(size_t num_ptrs, Object** ptrs, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700277 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro58551df2011-07-24 03:09:51 -0700278
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700279 // Special sweep for zygote that just marks objects / dirties cards.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700280 static void ZygoteSweepCallback(size_t num_ptrs, Object** ptrs, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700281 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700282
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700283 void CheckReference(const Object* obj, const Object* ref, MemberOffset offset, bool is_static)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700284 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700285
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700286 void CheckObject(const Object* obj)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700287 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700288
Mathieu Chartier6f1c9492012-10-15 12:08:41 -0700289 // Verify the roots of the heap and print out information related to any invalid roots.
290 // Called in MarkObject, so may we may not hold the mutator lock.
291 void VerifyRoots()
292 NO_THREAD_SAFETY_ANALYSIS;
293
Mathieu Chartierac86a7c2012-11-12 15:03:16 -0800294 // Expand mark stack to 2x its current size. Thread safe.
295 void ExpandMarkStack();
296
Mathieu Chartier6f1c9492012-10-15 12:08:41 -0700297 static void VerifyRootCallback(const Object* root, void* arg, size_t vreg,
298 const AbstractMethod* method);
299
300 void VerifyRoot(const Object* root, size_t vreg, const AbstractMethod* method)
301 NO_THREAD_SAFETY_ANALYSIS;
302
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700303 template <typename Visitor>
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700304 static void VisitInstanceFieldsReferences(const Class* klass, const Object* obj,
305 const Visitor& visitor)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700306 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700307 DCHECK(obj != NULL);
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700308 DCHECK(klass != NULL);
309 VisitFieldsReferences(obj, klass->GetReferenceInstanceOffsets(), false, visitor);
310 }
Ian Rogers5d76c432011-10-31 21:42:49 -0700311
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700312 // Visit the header, static field references, and interface pointers of a class object.
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700313 template <typename Visitor>
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700314 static void VisitClassReferences(const Class* klass, const Object* obj,
315 const Visitor& visitor)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700316 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700317 VisitInstanceFieldsReferences(klass, obj, visitor);
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700318 VisitStaticFieldsReferences(obj->AsClass(), visitor);
319 }
Ian Rogers5d76c432011-10-31 21:42:49 -0700320
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700321 template <typename Visitor>
Mathieu Chartierfd678be2012-08-30 14:50:54 -0700322 static void VisitStaticFieldsReferences(const Class* klass, const Visitor& visitor)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700323 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700324 DCHECK(klass != NULL);
325 VisitFieldsReferences(klass, klass->GetReferenceStaticOffsets(), true, visitor);
326 }
Ian Rogers5d76c432011-10-31 21:42:49 -0700327
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700328 template <typename Visitor>
Mathieu Chartierfd678be2012-08-30 14:50:54 -0700329 static void VisitFieldsReferences(const Object* obj, uint32_t ref_offsets, bool is_static,
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700330 const Visitor& visitor)
Mathieu Chartierd22d5482012-11-06 17:14:12 -0800331 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
332 if (LIKELY(ref_offsets != CLASS_WALK_SUPER)) {
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700333 // Found a reference offset bitmap. Mark the specified offsets.
334 while (ref_offsets != 0) {
335 size_t right_shift = CLZ(ref_offsets);
336 MemberOffset field_offset = CLASS_OFFSET_FROM_CLZ(right_shift);
337 const Object* ref = obj->GetFieldObject<const Object*>(field_offset, false);
338 visitor(obj, ref, field_offset, is_static);
339 ref_offsets &= ~(CLASS_HIGH_BIT >> right_shift);
340 }
341 } else {
342 // There is no reference offset bitmap. In the non-static case,
343 // walk up the class inheritance hierarchy and find reference
344 // offsets the hard way. In the static case, just consider this
345 // class.
346 for (const Class* klass = is_static ? obj->AsClass() : obj->GetClass();
347 klass != NULL;
348 klass = is_static ? NULL : klass->GetSuperClass()) {
349 size_t num_reference_fields = (is_static
350 ? klass->NumReferenceStaticFields()
351 : klass->NumReferenceInstanceFields());
352 for (size_t i = 0; i < num_reference_fields; ++i) {
353 Field* field = (is_static
354 ? klass->GetStaticField(i)
355 : klass->GetInstanceField(i));
356 MemberOffset field_offset = field->GetOffset();
357 const Object* ref = obj->GetFieldObject<const Object*>(field_offset, false);
358 visitor(obj, ref, field_offset, is_static);
359 }
360 }
361 }
362 }
Ian Rogers5d76c432011-10-31 21:42:49 -0700363
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700364 // Visit all of the references in an object array.
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700365 template <typename Visitor>
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700366 static void VisitObjectArrayReferences(const ObjectArray<Object>* array,
367 const Visitor& visitor)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700368 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700369 const int32_t length = array->GetLength();
370 for (int32_t i = 0; i < length; ++i) {
371 const Object* element = array->GetWithoutChecks(i);
372 const size_t width = sizeof(Object*);
373 MemberOffset offset = MemberOffset(i * width + Array::DataOffset(width).Int32Value());
374 visitor(array, element, offset, false);
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700375 }
376 }
Ian Rogers5d76c432011-10-31 21:42:49 -0700377
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700378 // Visits the header and field references of a data object.
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700379 template <typename Visitor>
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700380 static void VisitOtherReferences(const Class* klass, const Object* obj,
381 const Visitor& visitor)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700382 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700383 return VisitInstanceFieldsReferences(klass, obj, visitor);
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700384 }
Ian Rogers5d76c432011-10-31 21:42:49 -0700385
Carl Shapiro69759ea2011-07-21 18:13:35 -0700386 // Blackens objects grayed during a garbage collection.
Mathieu Chartierd22d5482012-11-06 17:14:12 -0800387 void ScanGrayObjects(byte minimum_age)
388 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
389 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700390
391 // Schedules an unmarked object for reference processing.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700392 void DelayReferenceReferent(Object* reference)
Ian Rogers23435d02012-09-24 11:23:12 -0700393 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700394
395 // Recursively blackens objects on the mark stack.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700396 void ProcessMarkStack()
Ian Rogersb726dcb2012-09-05 08:57:23 -0700397 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
398 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700399
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700400 void ProcessMarkStackParallel()
401 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
402 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
403
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700404 void EnqueueFinalizerReferences(Object** ref)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700405 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
406 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700407
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700408 void PreserveSomeSoftReferences(Object** ref)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700409 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
410 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700411
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700412 void ClearWhiteReferences(Object** list)
Ian Rogers23435d02012-09-24 11:23:12 -0700413 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700414
Carl Shapiro58551df2011-07-24 03:09:51 -0700415 void ProcessReferences(Object** soft_references, bool clear_soft_references,
Carl Shapiro69759ea2011-07-21 18:13:35 -0700416 Object** weak_references,
417 Object** finalizer_references,
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700418 Object** phantom_references)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700419 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
420 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700421
Mathieu Chartier7469ebf2012-09-24 16:28:36 -0700422 void SweepJniWeakGlobals(Heap::IsMarkedTester is_marked, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700423 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro58551df2011-07-24 03:09:51 -0700424
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700425 // Whether or not we count how many of each type of object were scanned.
426 static const bool kCountScannedTypes = false;
427
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700428 // Current space, we check this space first to avoid searching for the appropriate space for an object.
429 SpaceBitmap* current_mark_bitmap_;
430
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700431 // Cache java.lang.Class for optimization.
432 Class* java_lang_Class_;
433
Mathieu Chartierd8195f12012-10-05 12:21:28 -0700434 ObjectStack* mark_stack_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700435
Elliott Hughesb3bd5f02012-03-08 21:05:27 -0800436 Heap* heap_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700437
438 Object* finger_;
439
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700440 // Immune range, every object inside the immune range is assumed to be marked.
441 Object* immune_begin_;
442 Object* immune_end_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700443
444 Object* soft_reference_list_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700445 Object* weak_reference_list_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700446 Object* finalizer_reference_list_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700447 Object* phantom_reference_list_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700448 Object* cleared_reference_list_;
449
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700450 AtomicInteger freed_bytes_;
451 AtomicInteger freed_objects_;
452 AtomicInteger class_count_;
453 AtomicInteger array_count_;
454 AtomicInteger other_count_;
455 AtomicInteger large_object_test_;
456 AtomicInteger large_object_mark_;
457 AtomicInteger classes_marked_;
458 AtomicInteger overhead_time_;
459 AtomicInteger work_chunks_created_;
460 AtomicInteger work_chunks_deleted_;
Mathieu Chartierd22d5482012-11-06 17:14:12 -0800461 AtomicInteger reference_count_;
Elliott Hughes352a4242011-10-31 15:15:21 -0700462
Mathieu Chartier858f1c52012-10-17 17:45:55 -0700463 UniquePtr<Barrier> gc_barrier_;
Mathieu Chartierac86a7c2012-11-12 15:03:16 -0800464 Mutex large_object_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
465 Mutex mark_stack_expand_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
Mathieu Chartier858f1c52012-10-17 17:45:55 -0700466
Mathieu Chartiere6e06512012-06-26 15:00:26 -0700467 friend class AddIfReachesAllocSpaceVisitor; // Used by mod-union table.
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700468 friend class CheckBitmapVisitor;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700469 friend class CheckObjectVisitor;
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700470 friend class CheckReferenceVisitor;
Elliott Hughes410c0c82011-09-01 17:58:25 -0700471 friend class InternTableEntryIsUnmarked;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700472 friend class MarkIfReachesAllocspaceVisitor;
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700473 friend class ModUnionCheckReferences;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700474 friend class ModUnionClearCardVisitor;
Mathieu Chartiere6e06512012-06-26 15:00:26 -0700475 friend class ModUnionReferenceVisitor;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700476 friend class ModUnionVisitor;
477 friend class ModUnionTableBitmap;
Mathieu Chartiere6e06512012-06-26 15:00:26 -0700478 friend class ModUnionTableReferenceCache;
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700479 friend class ModUnionScanImageRootVisitor;
480 friend class ScanBitmapVisitor;
481 friend class ScanImageRootVisitor;
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700482 friend class MarkStackChunk;
483 friend class FifoMarkStackChunk;
Elliott Hughes410c0c82011-09-01 17:58:25 -0700484
Carl Shapiro69759ea2011-07-21 18:13:35 -0700485 DISALLOW_COPY_AND_ASSIGN(MarkSweep);
486};
487
488} // namespace art
489
490#endif // ART_SRC_MARK_SWEEP_H_