blob: bfedac733d82994169c2a1fe43faae0be0dc410b [file] [log] [blame]
Elliott Hughes2faa5f12012-01-30 14:42:07 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Carl Shapiro69759ea2011-07-21 18:13:35 -070016
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
18#define ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
Carl Shapiro69759ea2011-07-21 18:13:35 -070019
Ian Rogersef7d42f2014-01-06 12:55:46 -080020#include "atomic.h"
Sameer Abu Asal4aeb5672013-02-19 15:30:35 -080021#include "barrier.h"
Elliott Hughes76160052012-12-12 16:31:20 -080022#include "base/macros.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080023#include "base/mutex.h"
Mathieu Chartier2b82db42012-11-14 17:29:05 -080024#include "garbage_collector.h"
Ian Rogers0cfe1fb2011-08-26 03:29:44 -070025#include "offsets.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080026#include "root_visitor.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080027#include "UniquePtr.h"
Carl Shapiro69759ea2011-07-21 18:13:35 -070028
29namespace art {
Ian Rogers1d54e732013-05-02 21:10:01 -070030
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080031namespace mirror {
Ian Rogers1d54e732013-05-02 21:10:01 -070032 class Class;
33 class Object;
34 template<class T> class ObjectArray;
35} // namespace mirror
36
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080037class StackVisitor;
38class Thread;
Ian Rogers1d54e732013-05-02 21:10:01 -070039
40namespace gc {
41
42namespace accounting {
43 template <typename T> class AtomicStack;
44 class MarkIfReachesAllocspaceVisitor;
45 class ModUnionClearCardVisitor;
46 class ModUnionVisitor;
47 class ModUnionTableBitmap;
48 class MarkStackChunk;
49 typedef AtomicStack<mirror::Object*> ObjectStack;
50 class SpaceBitmap;
51} // namespace accounting
52
53namespace space {
54 class ContinuousSpace;
55} // namespace space
56
Ian Rogers1d54e732013-05-02 21:10:01 -070057class Heap;
58
59namespace collector {
Carl Shapiro69759ea2011-07-21 18:13:35 -070060
Mathieu Chartier2b82db42012-11-14 17:29:05 -080061class MarkSweep : public GarbageCollector {
Carl Shapiro69759ea2011-07-21 18:13:35 -070062 public:
Ian Rogers1d54e732013-05-02 21:10:01 -070063 explicit MarkSweep(Heap* heap, bool is_concurrent, const std::string& name_prefix = "");
Carl Shapiro58551df2011-07-24 03:09:51 -070064
Ian Rogers1d54e732013-05-02 21:10:01 -070065 ~MarkSweep() {}
Carl Shapiro69759ea2011-07-21 18:13:35 -070066
Mathieu Chartier2b82db42012-11-14 17:29:05 -080067 virtual void InitializePhase();
68 virtual bool IsConcurrent() const;
69 virtual bool HandleDirtyObjectsPhase() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
70 virtual void MarkingPhase() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
71 virtual void ReclaimPhase() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartierad2541a2013-10-25 10:05:23 -070072 virtual void FinishPhase() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier2b82db42012-11-14 17:29:05 -080073 virtual void MarkReachableObjects()
74 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
75 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
76 virtual GcType GetGcType() const {
77 return kGcTypeFull;
78 }
79
Carl Shapiro58551df2011-07-24 03:09:51 -070080 // Initializes internal structures.
Jesse Wilson078f9b02011-11-18 17:51:47 -050081 void Init();
Carl Shapiro58551df2011-07-24 03:09:51 -070082
Mathieu Chartier7469ebf2012-09-24 16:28:36 -070083 // Find the default mark bitmap.
84 void FindDefaultMarkBitmap();
85
Carl Shapiro69759ea2011-07-21 18:13:35 -070086 // Marks the root set at the start of a garbage collection.
Ian Rogers00f7d0e2012-07-19 15:28:27 -070087 void MarkRoots()
Ian Rogersb726dcb2012-09-05 08:57:23 -070088 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
89 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -070090
Mathieu Chartier858f1c52012-10-17 17:45:55 -070091 void MarkNonThreadRoots()
Mathieu Chartierc528dba2013-11-26 12:00:11 -080092 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
93 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier858f1c52012-10-17 17:45:55 -070094
Mathieu Chartierc528dba2013-11-26 12:00:11 -080095 void MarkConcurrentRoots()
96 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
97 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier9ebae1f2012-10-15 17:38:16 -070098
Ian Rogers1d54e732013-05-02 21:10:01 -070099 void MarkRootsCheckpoint(Thread* self)
100 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
101 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier858f1c52012-10-17 17:45:55 -0700102
Carl Shapiro58551df2011-07-24 03:09:51 -0700103 // Builds a mark stack and recursively mark until it empties.
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800104 void RecursiveMark()
Ian Rogersb726dcb2012-09-05 08:57:23 -0700105 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
106 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700107
Ian Rogers1d54e732013-05-02 21:10:01 -0700108 // Make a space immune, immune spaces have all live objects marked - that is the mark and
109 // live bitmaps are bound together.
110 void ImmuneSpace(space::ContinuousSpace* space)
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800111 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
112 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800113
Mathieu Chartier590fee92013-09-13 13:46:47 -0700114 bool IsImmuneSpace(const space::ContinuousSpace* space) const;
Mathieu Chartier11409ae2013-09-23 11:49:36 -0700115 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
116
Ian Rogers1d54e732013-05-02 21:10:01 -0700117 // Bind the live bits to the mark bits of bitmaps for spaces that are never collected, ie
118 // the image. Mark that portion of the heap as immune.
119 virtual void BindBitmaps() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier7469ebf2012-09-24 16:28:36 -0700120
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700121 // Builds a mark stack with objects on dirty cards and recursively mark until it empties.
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700122 void RecursiveMarkDirtyObjects(bool paused, byte minimum_age)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700123 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
124 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700125
Carl Shapiro69759ea2011-07-21 18:13:35 -0700126 // Remarks the root set after completing the concurrent mark.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700127 void ReMarkRoots()
Ian Rogersb726dcb2012-09-05 08:57:23 -0700128 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
129 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700130
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800131 void ProcessReferences(Thread* self)
132 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro58551df2011-07-24 03:09:51 -0700133
Mathieu Chartier590fee92013-09-13 13:46:47 -0700134 // Update and mark references from immune spaces.
Mathieu Chartier11409ae2013-09-23 11:49:36 -0700135 virtual void UpdateAndMarkModUnion()
136 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
137
Carl Shapiro69759ea2011-07-21 18:13:35 -0700138 // Sweeps unmarked objects to complete the garbage collection.
Ian Rogers1d54e732013-05-02 21:10:01 -0700139 virtual void Sweep(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700140
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700141 // Sweeps unmarked objects to complete the garbage collection.
Ian Rogers1d54e732013-05-02 21:10:01 -0700142 void SweepLargeObjects(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700143
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700144 // Sweep only pointers within an array. WARNING: Trashes objects.
Ian Rogers1d54e732013-05-02 21:10:01 -0700145 void SweepArray(accounting::ObjectStack* allocation_stack_, bool swap_bitmaps)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700146 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700147
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700148 // Blackens an object.
Mathieu Chartier590fee92013-09-13 13:46:47 -0700149 void ScanObject(mirror::Object* obj)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700150 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
151 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700152
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800153 // TODO: enable thread safety analysis when in use by multiple worker threads.
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700154 template <typename MarkVisitor>
Mathieu Chartier11409ae2013-09-23 11:49:36 -0700155 void ScanObjectVisit(mirror::Object* obj, const MarkVisitor& visitor)
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800156 NO_THREAD_SAFETY_ANALYSIS;
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700157
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800158 // Everything inside the immune range is assumed to be marked.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800159 void SetImmuneRange(mirror::Object* begin, mirror::Object* end);
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800160
Mathieu Chartier7469ebf2012-09-24 16:28:36 -0700161 void SweepSystemWeaks()
Mathieu Chartierad2541a2013-10-25 10:05:23 -0700162 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
Mathieu Chartier7469ebf2012-09-24 16:28:36 -0700163
Mathieu Chartier6aa3df92013-09-17 15:17:28 -0700164 static mirror::Object* VerifySystemWeakIsLiveCallback(mirror::Object* obj, void* arg)
Mathieu Chartierc7b83a02012-09-11 18:07:39 -0700165 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
166
167 void VerifySystemWeaks()
Mathieu Chartierad2541a2013-10-25 10:05:23 -0700168 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
Mathieu Chartierc7b83a02012-09-11 18:07:39 -0700169
170 // Verify that an object is live, either in a live bitmap or in the allocation stack.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800171 void VerifyIsLive(const mirror::Object* obj)
Mathieu Chartierc7b83a02012-09-11 18:07:39 -0700172 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
173
Mathieu Chartierfd678be2012-08-30 14:50:54 -0700174 template <typename Visitor>
Mathieu Chartier590fee92013-09-13 13:46:47 -0700175 static void VisitObjectReferences(mirror::Object* obj, const Visitor& visitor, bool visit_class)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700176 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800177 Locks::mutator_lock_);
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700178
Mathieu Chartier39e32612013-11-12 16:28:05 -0800179 static mirror::Object* RecursiveMarkObjectCallback(mirror::Object* obj, void* arg)
180 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
181 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
182
Mathieu Chartier423d2a32013-09-12 17:33:56 -0700183 static mirror::Object* MarkRootCallback(mirror::Object* root, void* arg)
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800184 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700185 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
186
Mathieu Chartier423d2a32013-09-12 17:33:56 -0700187 static mirror::Object* MarkRootParallelCallback(mirror::Object* root, void* arg);
Mathieu Chartierac86a7c2012-11-12 15:03:16 -0800188
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700189 // Marks an object.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800190 void MarkObject(const mirror::Object* obj)
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800191 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
192 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
193
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800194 void MarkRoot(const mirror::Object* obj)
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800195 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
Mathieu Chartier858f1c52012-10-17 17:45:55 -0700196 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
197
Sameer Abu Asala8439542013-02-14 16:06:42 -0800198 Barrier& GetBarrier() {
199 return *gc_barrier_;
200 }
201
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800202 protected:
Carl Shapiro69759ea2011-07-21 18:13:35 -0700203 // Returns true if the object has its bit set in the mark bitmap.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800204 bool IsMarked(const mirror::Object* object) const;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700205
Mathieu Chartier39e32612013-11-12 16:28:05 -0800206 static mirror::Object* IsMarkedCallback(mirror::Object* object, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700207 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier46a23632012-08-07 18:44:40 -0700208
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800209 static void VerifyImageRootVisitor(mirror::Object* root, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700210 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
211 Locks::mutator_lock_);
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700212
Mathieu Chartier9642c962013-08-05 17:40:36 -0700213 void MarkObjectNonNull(const mirror::Object* obj)
214 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
215 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700216
Mathieu Chartier9642c962013-08-05 17:40:36 -0700217 // Unmarks an object by clearing the bit inside of the corresponding bitmap, or if it is in a
218 // space set, removing the object from the set.
219 void UnMarkObjectNonNull(const mirror::Object* obj)
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700220 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
221 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
222
223 // Mark the vm thread roots.
224 virtual void MarkThreadRoots(Thread* self)
225 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
226 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartierac86a7c2012-11-12 15:03:16 -0800227
Mathieu Chartier9642c962013-08-05 17:40:36 -0700228 // Marks an object atomically, safe to use from multiple threads.
229 void MarkObjectNonNullParallel(const mirror::Object* obj);
230
231 // Marks or unmarks a large object based on whether or not set is true. If set is true, then we
232 // mark, otherwise we unmark.
233 bool MarkLargeObject(const mirror::Object* obj, bool set)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700234 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700235
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700236 // Returns true if we need to add obj to a mark stack.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800237 bool MarkObjectParallel(const mirror::Object* obj) NO_THREAD_SAFETY_ANALYSIS;
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700238
Mathieu Chartier6f1c9492012-10-15 12:08:41 -0700239 // Verify the roots of the heap and print out information related to any invalid roots.
240 // Called in MarkObject, so may we may not hold the mutator lock.
241 void VerifyRoots()
242 NO_THREAD_SAFETY_ANALYSIS;
243
Mathieu Chartierba311b42013-08-27 13:02:30 -0700244 // Expand mark stack to 2x its current size.
245 void ExpandMarkStack() EXCLUSIVE_LOCKS_REQUIRED(mark_stack_lock_);
246 void ResizeMarkStack(size_t new_size) EXCLUSIVE_LOCKS_REQUIRED(mark_stack_lock_);
Mathieu Chartierac86a7c2012-11-12 15:03:16 -0800247
Mathieu Chartier2775ee42013-08-20 17:43:47 -0700248 // Returns how many threads we should use for the current GC phase based on if we are paused,
249 // whether or not we care about pauses.
250 size_t GetThreadCount(bool paused) const;
251
Mathieu Chartier9642c962013-08-05 17:40:36 -0700252 // Returns true if an object is inside of the immune region (assumed to be marked).
Mathieu Chartier11409ae2013-09-23 11:49:36 -0700253 bool IsImmune(const mirror::Object* obj) const ALWAYS_INLINE {
Mathieu Chartier9642c962013-08-05 17:40:36 -0700254 return obj >= immune_begin_ && obj < immune_end_;
255 }
256
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800257 static void VerifyRootCallback(const mirror::Object* root, void* arg, size_t vreg,
Ian Rogers40e3bac2012-11-20 00:09:14 -0800258 const StackVisitor *visitor);
Mathieu Chartier6f1c9492012-10-15 12:08:41 -0700259
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800260 void VerifyRoot(const mirror::Object* root, size_t vreg, const StackVisitor* visitor)
Mathieu Chartier6f1c9492012-10-15 12:08:41 -0700261 NO_THREAD_SAFETY_ANALYSIS;
262
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700263 template <typename Visitor>
Mathieu Chartier11409ae2013-09-23 11:49:36 -0700264 static void VisitInstanceFieldsReferences(mirror::Class* klass, mirror::Object* obj,
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700265 const Visitor& visitor)
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800266 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700267
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700268 // Visit the header, static field references, and interface pointers of a class object.
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700269 template <typename Visitor>
Mathieu Chartier11409ae2013-09-23 11:49:36 -0700270 static void VisitClassReferences(mirror::Class* klass, mirror::Object* obj,
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700271 const Visitor& visitor)
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800272 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700273
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700274 template <typename Visitor>
Mathieu Chartier11409ae2013-09-23 11:49:36 -0700275 static void VisitStaticFieldsReferences(mirror::Class* klass, const Visitor& visitor)
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800276 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700277
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700278 template <typename Visitor>
Mathieu Chartier11409ae2013-09-23 11:49:36 -0700279 static void VisitFieldsReferences(mirror::Object* obj, uint32_t ref_offsets, bool is_static,
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800280 const Visitor& visitor)
281 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700282
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700283 // Visit all of the references in an object array.
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700284 template <typename Visitor>
Mathieu Chartier11409ae2013-09-23 11:49:36 -0700285 static void VisitObjectArrayReferences(mirror::ObjectArray<mirror::Object>* array,
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700286 const Visitor& visitor)
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800287 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700288
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700289 // Visits the header and field references of a data object.
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700290 template <typename Visitor>
Mathieu Chartier11409ae2013-09-23 11:49:36 -0700291 static void VisitOtherReferences(mirror::Class* klass, mirror::Object* obj,
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700292 const Visitor& visitor)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700293 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700294 return VisitInstanceFieldsReferences(klass, obj, visitor);
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700295 }
Ian Rogers5d76c432011-10-31 21:42:49 -0700296
Carl Shapiro69759ea2011-07-21 18:13:35 -0700297 // Blackens objects grayed during a garbage collection.
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700298 void ScanGrayObjects(bool paused, byte minimum_age)
Mathieu Chartierd22d5482012-11-06 17:14:12 -0800299 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
300 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700301
302 // Schedules an unmarked object for reference processing.
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700303 void DelayReferenceReferent(mirror::Class* klass, mirror::Object* reference)
Ian Rogers23435d02012-09-24 11:23:12 -0700304 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700305
306 // Recursively blackens objects on the mark stack.
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700307 void ProcessMarkStack(bool paused)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700308 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
309 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700310
Mathieu Chartier2775ee42013-08-20 17:43:47 -0700311 void ProcessMarkStackParallel(size_t thread_count)
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700312 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
313 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
314
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800315 void EnqueueFinalizerReferences(mirror::Object** ref)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700316 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
317 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700318
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800319 void PreserveSomeSoftReferences(mirror::Object** ref)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700320 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
321 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700322
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800323 void ClearWhiteReferences(mirror::Object** list)
Ian Rogers23435d02012-09-24 11:23:12 -0700324 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700325
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700326 // Whether or not we count how many of each type of object were scanned.
327 static const bool kCountScannedTypes = false;
328
Ian Rogers1d54e732013-05-02 21:10:01 -0700329 // Current space, we check this space first to avoid searching for the appropriate space for an
330 // object.
331 accounting::SpaceBitmap* current_mark_bitmap_;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700332
Ian Rogers1d54e732013-05-02 21:10:01 -0700333 accounting::ObjectStack* mark_stack_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700334
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700335 // Immune range, every object inside the immune range is assumed to be marked.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800336 mirror::Object* immune_begin_;
337 mirror::Object* immune_end_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700338
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700339 // Parallel finger.
340 AtomicInteger atomic_finger_;
Ian Rogers1d54e732013-05-02 21:10:01 -0700341 // Number of classes scanned, if kCountScannedTypes.
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700342 AtomicInteger class_count_;
Ian Rogers1d54e732013-05-02 21:10:01 -0700343 // Number of arrays scanned, if kCountScannedTypes.
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700344 AtomicInteger array_count_;
Ian Rogers1d54e732013-05-02 21:10:01 -0700345 // Number of non-class/arrays scanned, if kCountScannedTypes.
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700346 AtomicInteger other_count_;
347 AtomicInteger large_object_test_;
348 AtomicInteger large_object_mark_;
349 AtomicInteger classes_marked_;
350 AtomicInteger overhead_time_;
351 AtomicInteger work_chunks_created_;
352 AtomicInteger work_chunks_deleted_;
Mathieu Chartierd22d5482012-11-06 17:14:12 -0800353 AtomicInteger reference_count_;
Mathieu Chartier0f72e412013-09-06 16:40:01 -0700354
355 // Verification.
356 size_t live_stack_freeze_size_;
Elliott Hughes352a4242011-10-31 15:15:21 -0700357
Mathieu Chartier858f1c52012-10-17 17:45:55 -0700358 UniquePtr<Barrier> gc_barrier_;
Mathieu Chartierac86a7c2012-11-12 15:03:16 -0800359 Mutex large_object_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
Mathieu Chartier958291c2013-08-27 18:14:55 -0700360 Mutex mark_stack_lock_ ACQUIRED_AFTER(Locks::classlinker_classes_lock_);
Ian Rogers1bd4b4c2013-04-18 17:47:42 -0700361
362 const bool is_concurrent_;
363
Mathieu Chartier02e25112013-08-14 16:14:24 -0700364 private:
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700365 friend class AddIfReachesAllocSpaceVisitor; // Used by mod-union table.
Mathieu Chartier0f72e412013-09-06 16:40:01 -0700366 friend class CardScanTask;
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700367 friend class CheckBitmapVisitor;
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700368 friend class CheckReferenceVisitor;
Ian Rogers1d54e732013-05-02 21:10:01 -0700369 friend class art::gc::Heap;
Elliott Hughes410c0c82011-09-01 17:58:25 -0700370 friend class InternTableEntryIsUnmarked;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700371 friend class MarkIfReachesAllocspaceVisitor;
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700372 friend class ModUnionCheckReferences;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700373 friend class ModUnionClearCardVisitor;
Mathieu Chartiere6e06512012-06-26 15:00:26 -0700374 friend class ModUnionReferenceVisitor;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700375 friend class ModUnionVisitor;
376 friend class ModUnionTableBitmap;
Mathieu Chartiere6e06512012-06-26 15:00:26 -0700377 friend class ModUnionTableReferenceCache;
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700378 friend class ModUnionScanImageRootVisitor;
379 friend class ScanBitmapVisitor;
380 friend class ScanImageRootVisitor;
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700381 template<bool kUseFinger> friend class MarkStackTask;
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700382 friend class FifoMarkStackChunk;
Elliott Hughes410c0c82011-09-01 17:58:25 -0700383
Carl Shapiro69759ea2011-07-21 18:13:35 -0700384 DISALLOW_COPY_AND_ASSIGN(MarkSweep);
385};
386
Ian Rogers1d54e732013-05-02 21:10:01 -0700387} // namespace collector
388} // namespace gc
Carl Shapiro69759ea2011-07-21 18:13:35 -0700389} // namespace art
390
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700391#endif // ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_