blob: a857dab23211ec95dbebdd641c1e482c31e5baed [file] [log] [blame]
Elliott Hughes2faa5f12012-01-30 14:42:07 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Carl Shapiro69759ea2011-07-21 18:13:35 -070016
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
18#define ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
Carl Shapiro69759ea2011-07-21 18:13:35 -070019
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080020#include "atomic_integer.h"
Sameer Abu Asal4aeb5672013-02-19 15:30:35 -080021#include "barrier.h"
Elliott Hughes76160052012-12-12 16:31:20 -080022#include "base/macros.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080023#include "base/mutex.h"
Mathieu Chartier2b82db42012-11-14 17:29:05 -080024#include "garbage_collector.h"
Ian Rogers0cfe1fb2011-08-26 03:29:44 -070025#include "offsets.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080026#include "root_visitor.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080027#include "UniquePtr.h"
Carl Shapiro69759ea2011-07-21 18:13:35 -070028
29namespace art {
Ian Rogers1d54e732013-05-02 21:10:01 -070030
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080031namespace mirror {
Ian Rogers1d54e732013-05-02 21:10:01 -070032 class Class;
33 class Object;
34 template<class T> class ObjectArray;
35} // namespace mirror
36
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080037class StackVisitor;
38class Thread;
Ian Rogers1d54e732013-05-02 21:10:01 -070039
40namespace gc {
41
42namespace accounting {
43 template <typename T> class AtomicStack;
44 class MarkIfReachesAllocspaceVisitor;
45 class ModUnionClearCardVisitor;
46 class ModUnionVisitor;
47 class ModUnionTableBitmap;
48 class MarkStackChunk;
49 typedef AtomicStack<mirror::Object*> ObjectStack;
50 class SpaceBitmap;
51} // namespace accounting
52
53namespace space {
54 class ContinuousSpace;
55} // namespace space
56
Ian Rogers1d54e732013-05-02 21:10:01 -070057class Heap;
58
59namespace collector {
Carl Shapiro69759ea2011-07-21 18:13:35 -070060
Mathieu Chartier2b82db42012-11-14 17:29:05 -080061class MarkSweep : public GarbageCollector {
Carl Shapiro69759ea2011-07-21 18:13:35 -070062 public:
Ian Rogers1d54e732013-05-02 21:10:01 -070063 explicit MarkSweep(Heap* heap, bool is_concurrent, const std::string& name_prefix = "");
Carl Shapiro58551df2011-07-24 03:09:51 -070064
Ian Rogers1d54e732013-05-02 21:10:01 -070065 ~MarkSweep() {}
Carl Shapiro69759ea2011-07-21 18:13:35 -070066
Mathieu Chartier2b82db42012-11-14 17:29:05 -080067 virtual void InitializePhase();
68 virtual bool IsConcurrent() const;
69 virtual bool HandleDirtyObjectsPhase() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
70 virtual void MarkingPhase() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
71 virtual void ReclaimPhase() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
72 virtual void FinishPhase();
73 virtual void MarkReachableObjects()
74 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
75 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
76 virtual GcType GetGcType() const {
77 return kGcTypeFull;
78 }
79
Carl Shapiro58551df2011-07-24 03:09:51 -070080 // Initializes internal structures.
Jesse Wilson078f9b02011-11-18 17:51:47 -050081 void Init();
Carl Shapiro58551df2011-07-24 03:09:51 -070082
Mathieu Chartier7469ebf2012-09-24 16:28:36 -070083 // Find the default mark bitmap.
84 void FindDefaultMarkBitmap();
85
Carl Shapiro69759ea2011-07-21 18:13:35 -070086 // Marks the root set at the start of a garbage collection.
Ian Rogers00f7d0e2012-07-19 15:28:27 -070087 void MarkRoots()
Ian Rogersb726dcb2012-09-05 08:57:23 -070088 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
89 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -070090
Mathieu Chartier858f1c52012-10-17 17:45:55 -070091 void MarkNonThreadRoots()
92 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
93
Mathieu Chartier9ebae1f2012-10-15 17:38:16 -070094 void MarkConcurrentRoots();
95 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
96
Ian Rogers1d54e732013-05-02 21:10:01 -070097 void MarkRootsCheckpoint(Thread* self)
98 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
99 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier858f1c52012-10-17 17:45:55 -0700100
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700101 // Verify that image roots point to only marked objects within the alloc space.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800102 void VerifyImageRoots()
103 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
104 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700105
Carl Shapiro58551df2011-07-24 03:09:51 -0700106 // Builds a mark stack and recursively mark until it empties.
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800107 void RecursiveMark()
Ian Rogersb726dcb2012-09-05 08:57:23 -0700108 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
109 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700110
Ian Rogers1d54e732013-05-02 21:10:01 -0700111 // Make a space immune, immune spaces have all live objects marked - that is the mark and
112 // live bitmaps are bound together.
113 void ImmuneSpace(space::ContinuousSpace* space)
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800114 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
115 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800116
Ian Rogers1d54e732013-05-02 21:10:01 -0700117 // Bind the live bits to the mark bits of bitmaps for spaces that are never collected, ie
118 // the image. Mark that portion of the heap as immune.
119 virtual void BindBitmaps() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier7469ebf2012-09-24 16:28:36 -0700120
Ian Rogers1d54e732013-05-02 21:10:01 -0700121 void BindLiveToMarkBitmap(space::ContinuousSpace* space)
Mathieu Chartier7469ebf2012-09-24 16:28:36 -0700122 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
123
124 void UnBindBitmaps()
125 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro58551df2011-07-24 03:09:51 -0700126
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700127 // Builds a mark stack with objects on dirty cards and recursively mark until it empties.
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700128 void RecursiveMarkDirtyObjects(bool paused, byte minimum_age)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700129 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
130 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700131
Carl Shapiro69759ea2011-07-21 18:13:35 -0700132 // Remarks the root set after completing the concurrent mark.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700133 void ReMarkRoots()
Ian Rogersb726dcb2012-09-05 08:57:23 -0700134 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
135 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700136
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800137 void ProcessReferences(Thread* self)
138 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro58551df2011-07-24 03:09:51 -0700139
Carl Shapiro69759ea2011-07-21 18:13:35 -0700140 // Sweeps unmarked objects to complete the garbage collection.
Ian Rogers1d54e732013-05-02 21:10:01 -0700141 virtual void Sweep(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700142
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700143 // Sweeps unmarked objects to complete the garbage collection.
Ian Rogers1d54e732013-05-02 21:10:01 -0700144 void SweepLargeObjects(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700145
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700146 // Sweep only pointers within an array. WARNING: Trashes objects.
Ian Rogers1d54e732013-05-02 21:10:01 -0700147 void SweepArray(accounting::ObjectStack* allocation_stack_, bool swap_bitmaps)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700148 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700149
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800150 mirror::Object* GetClearedReferences() {
Elliott Hughesadb460d2011-10-05 17:02:34 -0700151 return cleared_reference_list_;
152 }
153
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700154 // Proxy for external access to ScanObject.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800155 void ScanRoot(const mirror::Object* obj)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700156 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
157 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700158
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700159 // Blackens an object.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800160 void ScanObject(const mirror::Object* obj)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700161 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
162 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700163
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800164 // TODO: enable thread safety analysis when in use by multiple worker threads.
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700165 template <typename MarkVisitor>
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800166 void ScanObjectVisit(const mirror::Object* obj, const MarkVisitor& visitor)
167 NO_THREAD_SAFETY_ANALYSIS;
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700168
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700169 size_t GetFreedBytes() const {
170 return freed_bytes_;
171 }
172
Mathieu Chartiere53225c2013-08-19 10:59:11 -0700173 size_t GetFreedLargeObjectBytes() const {
174 return freed_large_object_bytes_;
175 }
176
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700177 size_t GetFreedObjects() const {
178 return freed_objects_;
179 }
180
Mathieu Chartiere53225c2013-08-19 10:59:11 -0700181 size_t GetFreedLargeObjects() const {
182 return freed_large_objects_;
183 }
184
Ian Rogers1d54e732013-05-02 21:10:01 -0700185 uint64_t GetTotalTimeNs() const {
186 return total_time_ns_;
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700187 }
188
Ian Rogers1d54e732013-05-02 21:10:01 -0700189 uint64_t GetTotalPausedTimeNs() const {
190 return total_paused_time_ns_;
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800191 }
192
193 uint64_t GetTotalFreedObjects() const {
194 return total_freed_objects_;
195 }
196
197 uint64_t GetTotalFreedBytes() const {
198 return total_freed_bytes_;
199 }
200
201 // Everything inside the immune range is assumed to be marked.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800202 void SetImmuneRange(mirror::Object* begin, mirror::Object* end);
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800203
Mathieu Chartier7469ebf2012-09-24 16:28:36 -0700204 void SweepSystemWeaks()
205 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
206
207 // Only sweep the weaks which are inside of an allocation stack.
Ian Rogers1d54e732013-05-02 21:10:01 -0700208 void SweepSystemWeaksArray(accounting::ObjectStack* allocations)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700209 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700210
Mathieu Chartier6aa3df92013-09-17 15:17:28 -0700211 static mirror::Object* VerifySystemWeakIsLiveCallback(mirror::Object* obj, void* arg)
Mathieu Chartierc7b83a02012-09-11 18:07:39 -0700212 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
213
214 void VerifySystemWeaks()
215 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
216
217 // Verify that an object is live, either in a live bitmap or in the allocation stack.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800218 void VerifyIsLive(const mirror::Object* obj)
Mathieu Chartierc7b83a02012-09-11 18:07:39 -0700219 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
220
Mathieu Chartierfd678be2012-08-30 14:50:54 -0700221 template <typename Visitor>
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800222 static void VisitObjectReferences(const mirror::Object* obj, const Visitor& visitor)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700223 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800224 Locks::mutator_lock_);
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700225
Mathieu Chartier423d2a32013-09-12 17:33:56 -0700226 static mirror::Object* MarkRootCallback(mirror::Object* root, void* arg)
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800227 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700228 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
229
Mathieu Chartier423d2a32013-09-12 17:33:56 -0700230 static mirror::Object* MarkRootParallelCallback(mirror::Object* root, void* arg);
Mathieu Chartierac86a7c2012-11-12 15:03:16 -0800231
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700232 // Marks an object.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800233 void MarkObject(const mirror::Object* obj)
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800234 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
235 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
236
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800237 void MarkRoot(const mirror::Object* obj)
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800238 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
Mathieu Chartier858f1c52012-10-17 17:45:55 -0700239 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
240
Sameer Abu Asala8439542013-02-14 16:06:42 -0800241 Barrier& GetBarrier() {
242 return *gc_barrier_;
243 }
244
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800245 protected:
Carl Shapiro69759ea2011-07-21 18:13:35 -0700246 // Returns true if the object has its bit set in the mark bitmap.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800247 bool IsMarked(const mirror::Object* object) const;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700248
Mathieu Chartier6aa3df92013-09-17 15:17:28 -0700249 static mirror::Object* SystemWeakIsMarkedCallback(mirror::Object* object, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700250 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Elliott Hughesc33a32b2011-10-11 18:18:07 -0700251
Mathieu Chartier6aa3df92013-09-17 15:17:28 -0700252 static mirror::Object* SystemWeakIsMarkedArrayCallback(mirror::Object* object, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700253 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier46a23632012-08-07 18:44:40 -0700254
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800255 static void VerifyImageRootVisitor(mirror::Object* root, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700256 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
257 Locks::mutator_lock_);
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700258
Mathieu Chartier9642c962013-08-05 17:40:36 -0700259 void MarkObjectNonNull(const mirror::Object* obj)
260 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
261 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700262
Mathieu Chartier9642c962013-08-05 17:40:36 -0700263 // Unmarks an object by clearing the bit inside of the corresponding bitmap, or if it is in a
264 // space set, removing the object from the set.
265 void UnMarkObjectNonNull(const mirror::Object* obj)
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700266 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
267 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
268
269 // Mark the vm thread roots.
270 virtual void MarkThreadRoots(Thread* self)
271 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
272 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartierac86a7c2012-11-12 15:03:16 -0800273
Mathieu Chartier9642c962013-08-05 17:40:36 -0700274 // Marks an object atomically, safe to use from multiple threads.
275 void MarkObjectNonNullParallel(const mirror::Object* obj);
276
277 // Marks or unmarks a large object based on whether or not set is true. If set is true, then we
278 // mark, otherwise we unmark.
279 bool MarkLargeObject(const mirror::Object* obj, bool set)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700280 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700281
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700282 // Returns true if we need to add obj to a mark stack.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800283 bool MarkObjectParallel(const mirror::Object* obj) NO_THREAD_SAFETY_ANALYSIS;
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700284
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800285 static void SweepCallback(size_t num_ptrs, mirror::Object** ptrs, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700286 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro58551df2011-07-24 03:09:51 -0700287
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700288 // Special sweep for zygote that just marks objects / dirties cards.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800289 static void ZygoteSweepCallback(size_t num_ptrs, mirror::Object** ptrs, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700290 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700291
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800292 void CheckReference(const mirror::Object* obj, const mirror::Object* ref, MemberOffset offset,
293 bool is_static)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700294 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700295
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800296 void CheckObject(const mirror::Object* obj)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700297 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700298
Mathieu Chartier6f1c9492012-10-15 12:08:41 -0700299 // Verify the roots of the heap and print out information related to any invalid roots.
300 // Called in MarkObject, so may we may not hold the mutator lock.
301 void VerifyRoots()
302 NO_THREAD_SAFETY_ANALYSIS;
303
Mathieu Chartierba311b42013-08-27 13:02:30 -0700304 // Expand mark stack to 2x its current size.
305 void ExpandMarkStack() EXCLUSIVE_LOCKS_REQUIRED(mark_stack_lock_);
306 void ResizeMarkStack(size_t new_size) EXCLUSIVE_LOCKS_REQUIRED(mark_stack_lock_);
Mathieu Chartierac86a7c2012-11-12 15:03:16 -0800307
Mathieu Chartier2775ee42013-08-20 17:43:47 -0700308 // Returns how many threads we should use for the current GC phase based on if we are paused,
309 // whether or not we care about pauses.
310 size_t GetThreadCount(bool paused) const;
311
Mathieu Chartier9642c962013-08-05 17:40:36 -0700312 // Returns true if an object is inside of the immune region (assumed to be marked).
313 bool IsImmune(const mirror::Object* obj) const {
314 return obj >= immune_begin_ && obj < immune_end_;
315 }
316
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800317 static void VerifyRootCallback(const mirror::Object* root, void* arg, size_t vreg,
Ian Rogers40e3bac2012-11-20 00:09:14 -0800318 const StackVisitor *visitor);
Mathieu Chartier6f1c9492012-10-15 12:08:41 -0700319
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800320 void VerifyRoot(const mirror::Object* root, size_t vreg, const StackVisitor* visitor)
Mathieu Chartier6f1c9492012-10-15 12:08:41 -0700321 NO_THREAD_SAFETY_ANALYSIS;
322
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700323 template <typename Visitor>
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800324 static void VisitInstanceFieldsReferences(const mirror::Class* klass, const mirror::Object* obj,
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700325 const Visitor& visitor)
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800326 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700327
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700328 // Visit the header, static field references, and interface pointers of a class object.
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700329 template <typename Visitor>
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800330 static void VisitClassReferences(const mirror::Class* klass, const mirror::Object* obj,
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700331 const Visitor& visitor)
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800332 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700333
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700334 template <typename Visitor>
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800335 static void VisitStaticFieldsReferences(const mirror::Class* klass, const Visitor& visitor)
336 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700337
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700338 template <typename Visitor>
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800339 static void VisitFieldsReferences(const mirror::Object* obj, uint32_t ref_offsets, bool is_static,
340 const Visitor& visitor)
341 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700342
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700343 // Visit all of the references in an object array.
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700344 template <typename Visitor>
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800345 static void VisitObjectArrayReferences(const mirror::ObjectArray<mirror::Object>* array,
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700346 const Visitor& visitor)
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800347 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700348
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700349 // Visits the header and field references of a data object.
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700350 template <typename Visitor>
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800351 static void VisitOtherReferences(const mirror::Class* klass, const mirror::Object* obj,
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700352 const Visitor& visitor)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700353 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700354 return VisitInstanceFieldsReferences(klass, obj, visitor);
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700355 }
Ian Rogers5d76c432011-10-31 21:42:49 -0700356
Carl Shapiro69759ea2011-07-21 18:13:35 -0700357 // Blackens objects grayed during a garbage collection.
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700358 void ScanGrayObjects(bool paused, byte minimum_age)
Mathieu Chartierd22d5482012-11-06 17:14:12 -0800359 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
360 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700361
362 // Schedules an unmarked object for reference processing.
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700363 void DelayReferenceReferent(mirror::Class* klass, mirror::Object* reference)
Ian Rogers23435d02012-09-24 11:23:12 -0700364 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700365
366 // Recursively blackens objects on the mark stack.
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700367 void ProcessMarkStack(bool paused)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700368 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
369 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700370
Mathieu Chartier2775ee42013-08-20 17:43:47 -0700371 void ProcessMarkStackParallel(size_t thread_count)
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700372 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
373 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
374
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800375 void EnqueueFinalizerReferences(mirror::Object** ref)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700376 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
377 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700378
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800379 void PreserveSomeSoftReferences(mirror::Object** ref)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700380 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
381 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700382
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800383 void ClearWhiteReferences(mirror::Object** list)
Ian Rogers23435d02012-09-24 11:23:12 -0700384 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700385
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800386 void ProcessReferences(mirror::Object** soft_references, bool clear_soft_references,
387 mirror::Object** weak_references,
388 mirror::Object** finalizer_references,
389 mirror::Object** phantom_references)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700390 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
391 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700392
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700393 // Whether or not we count how many of each type of object were scanned.
394 static const bool kCountScannedTypes = false;
395
Ian Rogers1d54e732013-05-02 21:10:01 -0700396 // Current space, we check this space first to avoid searching for the appropriate space for an
397 // object.
398 accounting::SpaceBitmap* current_mark_bitmap_;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700399
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700400 // Cache java.lang.Class for optimization.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800401 mirror::Class* java_lang_Class_;
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700402
Ian Rogers1d54e732013-05-02 21:10:01 -0700403 accounting::ObjectStack* mark_stack_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700404
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700405 // Immune range, every object inside the immune range is assumed to be marked.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800406 mirror::Object* immune_begin_;
407 mirror::Object* immune_end_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700408
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800409 mirror::Object* soft_reference_list_;
410 mirror::Object* weak_reference_list_;
411 mirror::Object* finalizer_reference_list_;
412 mirror::Object* phantom_reference_list_;
413 mirror::Object* cleared_reference_list_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700414
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700415 // Parallel finger.
416 AtomicInteger atomic_finger_;
417
Mathieu Chartiere53225c2013-08-19 10:59:11 -0700418 // Number of non large object bytes freed in this collection.
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700419 AtomicInteger freed_bytes_;
Mathieu Chartiere53225c2013-08-19 10:59:11 -0700420 // Number of large object bytes freed.
421 AtomicInteger freed_large_object_bytes_;
Ian Rogers1d54e732013-05-02 21:10:01 -0700422 // Number of objects freed in this collection.
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700423 AtomicInteger freed_objects_;
Mathieu Chartiere53225c2013-08-19 10:59:11 -0700424 // Number of freed large objects.
425 AtomicInteger freed_large_objects_;
Ian Rogers1d54e732013-05-02 21:10:01 -0700426 // Number of classes scanned, if kCountScannedTypes.
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700427 AtomicInteger class_count_;
Ian Rogers1d54e732013-05-02 21:10:01 -0700428 // Number of arrays scanned, if kCountScannedTypes.
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700429 AtomicInteger array_count_;
Ian Rogers1d54e732013-05-02 21:10:01 -0700430 // Number of non-class/arrays scanned, if kCountScannedTypes.
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700431 AtomicInteger other_count_;
432 AtomicInteger large_object_test_;
433 AtomicInteger large_object_mark_;
434 AtomicInteger classes_marked_;
435 AtomicInteger overhead_time_;
436 AtomicInteger work_chunks_created_;
437 AtomicInteger work_chunks_deleted_;
Mathieu Chartierd22d5482012-11-06 17:14:12 -0800438 AtomicInteger reference_count_;
Elliott Hughes352a4242011-10-31 15:15:21 -0700439
Mathieu Chartier858f1c52012-10-17 17:45:55 -0700440 UniquePtr<Barrier> gc_barrier_;
Mathieu Chartierac86a7c2012-11-12 15:03:16 -0800441 Mutex large_object_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
Mathieu Chartier958291c2013-08-27 18:14:55 -0700442 Mutex mark_stack_lock_ ACQUIRED_AFTER(Locks::classlinker_classes_lock_);
Ian Rogers1bd4b4c2013-04-18 17:47:42 -0700443
444 const bool is_concurrent_;
445
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800446 bool clear_soft_references_;
Mathieu Chartier858f1c52012-10-17 17:45:55 -0700447
Mathieu Chartier02e25112013-08-14 16:14:24 -0700448 private:
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700449 friend class AddIfReachesAllocSpaceVisitor; // Used by mod-union table.
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700450 friend class CheckBitmapVisitor;
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700451 friend class CheckReferenceVisitor;
Ian Rogers1d54e732013-05-02 21:10:01 -0700452 friend class art::gc::Heap;
Elliott Hughes410c0c82011-09-01 17:58:25 -0700453 friend class InternTableEntryIsUnmarked;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700454 friend class MarkIfReachesAllocspaceVisitor;
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700455 friend class ModUnionCheckReferences;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700456 friend class ModUnionClearCardVisitor;
Mathieu Chartiere6e06512012-06-26 15:00:26 -0700457 friend class ModUnionReferenceVisitor;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700458 friend class ModUnionVisitor;
459 friend class ModUnionTableBitmap;
Mathieu Chartiere6e06512012-06-26 15:00:26 -0700460 friend class ModUnionTableReferenceCache;
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700461 friend class ModUnionScanImageRootVisitor;
462 friend class ScanBitmapVisitor;
463 friend class ScanImageRootVisitor;
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700464 template<bool kUseFinger> friend class MarkStackTask;
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700465 friend class FifoMarkStackChunk;
Elliott Hughes410c0c82011-09-01 17:58:25 -0700466
Carl Shapiro69759ea2011-07-21 18:13:35 -0700467 DISALLOW_COPY_AND_ASSIGN(MarkSweep);
468};
469
Ian Rogers1d54e732013-05-02 21:10:01 -0700470} // namespace collector
471} // namespace gc
Carl Shapiro69759ea2011-07-21 18:13:35 -0700472} // namespace art
473
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700474#endif // ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_