blob: 53d85b0d700100aef99c23bb7cc0c614400292c2 [file] [log] [blame]
Elliott Hughes2faa5f12012-01-30 14:42:07 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Carl Shapiro69759ea2011-07-21 18:13:35 -070016
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
18#define ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
Carl Shapiro69759ea2011-07-21 18:13:35 -070019
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080020#include "atomic_integer.h"
Sameer Abu Asal4aeb5672013-02-19 15:30:35 -080021#include "barrier.h"
Elliott Hughes76160052012-12-12 16:31:20 -080022#include "base/macros.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080023#include "base/mutex.h"
Mathieu Chartier2b82db42012-11-14 17:29:05 -080024#include "garbage_collector.h"
Ian Rogers0cfe1fb2011-08-26 03:29:44 -070025#include "offsets.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080026#include "root_visitor.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080027#include "UniquePtr.h"
Carl Shapiro69759ea2011-07-21 18:13:35 -070028
29namespace art {
Ian Rogers1d54e732013-05-02 21:10:01 -070030
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080031namespace mirror {
Ian Rogers1d54e732013-05-02 21:10:01 -070032 class Class;
33 class Object;
34 template<class T> class ObjectArray;
35} // namespace mirror
36
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080037class StackVisitor;
38class Thread;
Ian Rogers1d54e732013-05-02 21:10:01 -070039
40namespace gc {
41
42namespace accounting {
43 template <typename T> class AtomicStack;
44 class MarkIfReachesAllocspaceVisitor;
45 class ModUnionClearCardVisitor;
46 class ModUnionVisitor;
47 class ModUnionTableBitmap;
48 class MarkStackChunk;
49 typedef AtomicStack<mirror::Object*> ObjectStack;
50 class SpaceBitmap;
51} // namespace accounting
52
53namespace space {
54 class ContinuousSpace;
55} // namespace space
56
Ian Rogers1d54e732013-05-02 21:10:01 -070057class Heap;
58
59namespace collector {
Carl Shapiro69759ea2011-07-21 18:13:35 -070060
Mathieu Chartier2b82db42012-11-14 17:29:05 -080061class MarkSweep : public GarbageCollector {
Carl Shapiro69759ea2011-07-21 18:13:35 -070062 public:
Ian Rogers1d54e732013-05-02 21:10:01 -070063 explicit MarkSweep(Heap* heap, bool is_concurrent, const std::string& name_prefix = "");
Carl Shapiro58551df2011-07-24 03:09:51 -070064
Ian Rogers1d54e732013-05-02 21:10:01 -070065 ~MarkSweep() {}
Carl Shapiro69759ea2011-07-21 18:13:35 -070066
Mathieu Chartier2b82db42012-11-14 17:29:05 -080067 virtual void InitializePhase();
68 virtual bool IsConcurrent() const;
69 virtual bool HandleDirtyObjectsPhase() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
70 virtual void MarkingPhase() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
71 virtual void ReclaimPhase() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartierad2541a2013-10-25 10:05:23 -070072 virtual void FinishPhase() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier2b82db42012-11-14 17:29:05 -080073 virtual void MarkReachableObjects()
74 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
75 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
76 virtual GcType GetGcType() const {
77 return kGcTypeFull;
78 }
79
Carl Shapiro58551df2011-07-24 03:09:51 -070080 // Initializes internal structures.
Jesse Wilson078f9b02011-11-18 17:51:47 -050081 void Init();
Carl Shapiro58551df2011-07-24 03:09:51 -070082
Mathieu Chartier7469ebf2012-09-24 16:28:36 -070083 // Find the default mark bitmap.
84 void FindDefaultMarkBitmap();
85
Carl Shapiro69759ea2011-07-21 18:13:35 -070086 // Marks the root set at the start of a garbage collection.
Ian Rogers00f7d0e2012-07-19 15:28:27 -070087 void MarkRoots()
Ian Rogersb726dcb2012-09-05 08:57:23 -070088 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
89 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -070090
Mathieu Chartier858f1c52012-10-17 17:45:55 -070091 void MarkNonThreadRoots()
92 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
93
Mathieu Chartier9ebae1f2012-10-15 17:38:16 -070094 void MarkConcurrentRoots();
95 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
96
Ian Rogers1d54e732013-05-02 21:10:01 -070097 void MarkRootsCheckpoint(Thread* self)
98 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
99 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier858f1c52012-10-17 17:45:55 -0700100
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700101 // Verify that image roots point to only marked objects within the alloc space.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800102 void VerifyImageRoots()
103 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
104 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700105
Carl Shapiro58551df2011-07-24 03:09:51 -0700106 // Builds a mark stack and recursively mark until it empties.
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800107 void RecursiveMark()
Ian Rogersb726dcb2012-09-05 08:57:23 -0700108 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
109 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700110
Ian Rogers1d54e732013-05-02 21:10:01 -0700111 // Make a space immune, immune spaces have all live objects marked - that is the mark and
112 // live bitmaps are bound together.
113 void ImmuneSpace(space::ContinuousSpace* space)
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800114 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
115 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800116
Mathieu Chartier590fee92013-09-13 13:46:47 -0700117 bool IsImmuneSpace(const space::ContinuousSpace* space) const;
Mathieu Chartier11409ae2013-09-23 11:49:36 -0700118 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
119
Ian Rogers1d54e732013-05-02 21:10:01 -0700120 // Bind the live bits to the mark bits of bitmaps for spaces that are never collected, ie
121 // the image. Mark that portion of the heap as immune.
122 virtual void BindBitmaps() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier7469ebf2012-09-24 16:28:36 -0700123
Ian Rogers1d54e732013-05-02 21:10:01 -0700124 void BindLiveToMarkBitmap(space::ContinuousSpace* space)
Mathieu Chartier7469ebf2012-09-24 16:28:36 -0700125 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
126
127 void UnBindBitmaps()
128 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro58551df2011-07-24 03:09:51 -0700129
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700130 // Builds a mark stack with objects on dirty cards and recursively mark until it empties.
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700131 void RecursiveMarkDirtyObjects(bool paused, byte minimum_age)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700132 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
133 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700134
Carl Shapiro69759ea2011-07-21 18:13:35 -0700135 // Remarks the root set after completing the concurrent mark.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700136 void ReMarkRoots()
Ian Rogersb726dcb2012-09-05 08:57:23 -0700137 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
138 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700139
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800140 void ProcessReferences(Thread* self)
141 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro58551df2011-07-24 03:09:51 -0700142
Mathieu Chartier590fee92013-09-13 13:46:47 -0700143 // Update and mark references from immune spaces.
Mathieu Chartier11409ae2013-09-23 11:49:36 -0700144 virtual void UpdateAndMarkModUnion()
145 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
146
Carl Shapiro69759ea2011-07-21 18:13:35 -0700147 // Sweeps unmarked objects to complete the garbage collection.
Ian Rogers1d54e732013-05-02 21:10:01 -0700148 virtual void Sweep(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700149
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700150 // Sweeps unmarked objects to complete the garbage collection.
Ian Rogers1d54e732013-05-02 21:10:01 -0700151 void SweepLargeObjects(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700152
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700153 // Sweep only pointers within an array. WARNING: Trashes objects.
Ian Rogers1d54e732013-05-02 21:10:01 -0700154 void SweepArray(accounting::ObjectStack* allocation_stack_, bool swap_bitmaps)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700155 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700156
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800157 mirror::Object* GetClearedReferences() {
Elliott Hughesadb460d2011-10-05 17:02:34 -0700158 return cleared_reference_list_;
159 }
160
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700161 // Blackens an object.
Mathieu Chartier590fee92013-09-13 13:46:47 -0700162 void ScanObject(mirror::Object* obj)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700163 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
164 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700165
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800166 // TODO: enable thread safety analysis when in use by multiple worker threads.
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700167 template <typename MarkVisitor>
Mathieu Chartier11409ae2013-09-23 11:49:36 -0700168 void ScanObjectVisit(mirror::Object* obj, const MarkVisitor& visitor)
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800169 NO_THREAD_SAFETY_ANALYSIS;
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700170
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800171 // Everything inside the immune range is assumed to be marked.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800172 void SetImmuneRange(mirror::Object* begin, mirror::Object* end);
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800173
Mathieu Chartier7469ebf2012-09-24 16:28:36 -0700174 void SweepSystemWeaks()
Mathieu Chartierad2541a2013-10-25 10:05:23 -0700175 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
Mathieu Chartier7469ebf2012-09-24 16:28:36 -0700176
Mathieu Chartier6aa3df92013-09-17 15:17:28 -0700177 static mirror::Object* VerifySystemWeakIsLiveCallback(mirror::Object* obj, void* arg)
Mathieu Chartierc7b83a02012-09-11 18:07:39 -0700178 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
179
180 void VerifySystemWeaks()
Mathieu Chartierad2541a2013-10-25 10:05:23 -0700181 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
Mathieu Chartierc7b83a02012-09-11 18:07:39 -0700182
183 // Verify that an object is live, either in a live bitmap or in the allocation stack.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800184 void VerifyIsLive(const mirror::Object* obj)
Mathieu Chartierc7b83a02012-09-11 18:07:39 -0700185 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
186
Mathieu Chartierfd678be2012-08-30 14:50:54 -0700187 template <typename Visitor>
Mathieu Chartier590fee92013-09-13 13:46:47 -0700188 static void VisitObjectReferences(mirror::Object* obj, const Visitor& visitor, bool visit_class)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700189 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800190 Locks::mutator_lock_);
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700191
Mathieu Chartier39e32612013-11-12 16:28:05 -0800192 static mirror::Object* RecursiveMarkObjectCallback(mirror::Object* obj, void* arg)
193 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
194 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
195
Mathieu Chartier423d2a32013-09-12 17:33:56 -0700196 static mirror::Object* MarkRootCallback(mirror::Object* root, void* arg)
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800197 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700198 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
199
Mathieu Chartier423d2a32013-09-12 17:33:56 -0700200 static mirror::Object* MarkRootParallelCallback(mirror::Object* root, void* arg);
Mathieu Chartierac86a7c2012-11-12 15:03:16 -0800201
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700202 // Marks an object.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800203 void MarkObject(const mirror::Object* obj)
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800204 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
205 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
206
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800207 void MarkRoot(const mirror::Object* obj)
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800208 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
Mathieu Chartier858f1c52012-10-17 17:45:55 -0700209 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
210
Sameer Abu Asala8439542013-02-14 16:06:42 -0800211 Barrier& GetBarrier() {
212 return *gc_barrier_;
213 }
214
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800215 protected:
Carl Shapiro69759ea2011-07-21 18:13:35 -0700216 // Returns true if the object has its bit set in the mark bitmap.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800217 bool IsMarked(const mirror::Object* object) const;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700218
Mathieu Chartier39e32612013-11-12 16:28:05 -0800219 static mirror::Object* IsMarkedCallback(mirror::Object* object, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700220 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier46a23632012-08-07 18:44:40 -0700221
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800222 static void VerifyImageRootVisitor(mirror::Object* root, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700223 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
224 Locks::mutator_lock_);
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700225
Mathieu Chartier9642c962013-08-05 17:40:36 -0700226 void MarkObjectNonNull(const mirror::Object* obj)
227 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
228 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700229
Mathieu Chartier9642c962013-08-05 17:40:36 -0700230 // Unmarks an object by clearing the bit inside of the corresponding bitmap, or if it is in a
231 // space set, removing the object from the set.
232 void UnMarkObjectNonNull(const mirror::Object* obj)
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700233 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
234 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
235
236 // Mark the vm thread roots.
237 virtual void MarkThreadRoots(Thread* self)
238 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
239 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartierac86a7c2012-11-12 15:03:16 -0800240
Mathieu Chartier9642c962013-08-05 17:40:36 -0700241 // Marks an object atomically, safe to use from multiple threads.
242 void MarkObjectNonNullParallel(const mirror::Object* obj);
243
244 // Marks or unmarks a large object based on whether or not set is true. If set is true, then we
245 // mark, otherwise we unmark.
246 bool MarkLargeObject(const mirror::Object* obj, bool set)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700247 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700248
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700249 // Returns true if we need to add obj to a mark stack.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800250 bool MarkObjectParallel(const mirror::Object* obj) NO_THREAD_SAFETY_ANALYSIS;
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700251
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800252 static void SweepCallback(size_t num_ptrs, mirror::Object** ptrs, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700253 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro58551df2011-07-24 03:09:51 -0700254
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700255 // Special sweep for zygote that just marks objects / dirties cards.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800256 static void ZygoteSweepCallback(size_t num_ptrs, mirror::Object** ptrs, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700257 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700258
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800259 void CheckReference(const mirror::Object* obj, const mirror::Object* ref, MemberOffset offset,
260 bool is_static)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700261 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700262
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800263 void CheckObject(const mirror::Object* obj)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700264 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700265
Mathieu Chartier6f1c9492012-10-15 12:08:41 -0700266 // Verify the roots of the heap and print out information related to any invalid roots.
267 // Called in MarkObject, so may we may not hold the mutator lock.
268 void VerifyRoots()
269 NO_THREAD_SAFETY_ANALYSIS;
270
Mathieu Chartierba311b42013-08-27 13:02:30 -0700271 // Expand mark stack to 2x its current size.
272 void ExpandMarkStack() EXCLUSIVE_LOCKS_REQUIRED(mark_stack_lock_);
273 void ResizeMarkStack(size_t new_size) EXCLUSIVE_LOCKS_REQUIRED(mark_stack_lock_);
Mathieu Chartierac86a7c2012-11-12 15:03:16 -0800274
Mathieu Chartier2775ee42013-08-20 17:43:47 -0700275 // Returns how many threads we should use for the current GC phase based on if we are paused,
276 // whether or not we care about pauses.
277 size_t GetThreadCount(bool paused) const;
278
Mathieu Chartier9642c962013-08-05 17:40:36 -0700279 // Returns true if an object is inside of the immune region (assumed to be marked).
Mathieu Chartier11409ae2013-09-23 11:49:36 -0700280 bool IsImmune(const mirror::Object* obj) const ALWAYS_INLINE {
Mathieu Chartier9642c962013-08-05 17:40:36 -0700281 return obj >= immune_begin_ && obj < immune_end_;
282 }
283
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800284 static void VerifyRootCallback(const mirror::Object* root, void* arg, size_t vreg,
Ian Rogers40e3bac2012-11-20 00:09:14 -0800285 const StackVisitor *visitor);
Mathieu Chartier6f1c9492012-10-15 12:08:41 -0700286
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800287 void VerifyRoot(const mirror::Object* root, size_t vreg, const StackVisitor* visitor)
Mathieu Chartier6f1c9492012-10-15 12:08:41 -0700288 NO_THREAD_SAFETY_ANALYSIS;
289
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700290 template <typename Visitor>
Mathieu Chartier11409ae2013-09-23 11:49:36 -0700291 static void VisitInstanceFieldsReferences(mirror::Class* klass, mirror::Object* obj,
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700292 const Visitor& visitor)
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800293 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700294
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700295 // Visit the header, static field references, and interface pointers of a class object.
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700296 template <typename Visitor>
Mathieu Chartier11409ae2013-09-23 11:49:36 -0700297 static void VisitClassReferences(mirror::Class* klass, mirror::Object* obj,
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700298 const Visitor& visitor)
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800299 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700300
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700301 template <typename Visitor>
Mathieu Chartier11409ae2013-09-23 11:49:36 -0700302 static void VisitStaticFieldsReferences(mirror::Class* klass, const Visitor& visitor)
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800303 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700304
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700305 template <typename Visitor>
Mathieu Chartier11409ae2013-09-23 11:49:36 -0700306 static void VisitFieldsReferences(mirror::Object* obj, uint32_t ref_offsets, bool is_static,
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800307 const Visitor& visitor)
308 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700309
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700310 // Visit all of the references in an object array.
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700311 template <typename Visitor>
Mathieu Chartier11409ae2013-09-23 11:49:36 -0700312 static void VisitObjectArrayReferences(mirror::ObjectArray<mirror::Object>* array,
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700313 const Visitor& visitor)
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800314 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700315
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700316 // Visits the header and field references of a data object.
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700317 template <typename Visitor>
Mathieu Chartier11409ae2013-09-23 11:49:36 -0700318 static void VisitOtherReferences(mirror::Class* klass, mirror::Object* obj,
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700319 const Visitor& visitor)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700320 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700321 return VisitInstanceFieldsReferences(klass, obj, visitor);
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700322 }
Ian Rogers5d76c432011-10-31 21:42:49 -0700323
Carl Shapiro69759ea2011-07-21 18:13:35 -0700324 // Blackens objects grayed during a garbage collection.
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700325 void ScanGrayObjects(bool paused, byte minimum_age)
Mathieu Chartierd22d5482012-11-06 17:14:12 -0800326 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
327 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700328
329 // Schedules an unmarked object for reference processing.
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700330 void DelayReferenceReferent(mirror::Class* klass, mirror::Object* reference)
Ian Rogers23435d02012-09-24 11:23:12 -0700331 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700332
333 // Recursively blackens objects on the mark stack.
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700334 void ProcessMarkStack(bool paused)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700335 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
336 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700337
Mathieu Chartier2775ee42013-08-20 17:43:47 -0700338 void ProcessMarkStackParallel(size_t thread_count)
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700339 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
340 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
341
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800342 void EnqueueFinalizerReferences(mirror::Object** ref)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700343 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
344 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700345
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800346 void PreserveSomeSoftReferences(mirror::Object** ref)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700347 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
348 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700349
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800350 void ClearWhiteReferences(mirror::Object** list)
Ian Rogers23435d02012-09-24 11:23:12 -0700351 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700352
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700353 // Whether or not we count how many of each type of object were scanned.
354 static const bool kCountScannedTypes = false;
355
Ian Rogers1d54e732013-05-02 21:10:01 -0700356 // Current space, we check this space first to avoid searching for the appropriate space for an
357 // object.
358 accounting::SpaceBitmap* current_mark_bitmap_;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700359
Ian Rogers1d54e732013-05-02 21:10:01 -0700360 accounting::ObjectStack* mark_stack_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700361
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700362 // Immune range, every object inside the immune range is assumed to be marked.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800363 mirror::Object* immune_begin_;
364 mirror::Object* immune_end_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700365
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800366 mirror::Object* soft_reference_list_;
367 mirror::Object* weak_reference_list_;
368 mirror::Object* finalizer_reference_list_;
369 mirror::Object* phantom_reference_list_;
370 mirror::Object* cleared_reference_list_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700371
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700372 // Parallel finger.
373 AtomicInteger atomic_finger_;
Ian Rogers1d54e732013-05-02 21:10:01 -0700374 // Number of classes scanned, if kCountScannedTypes.
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700375 AtomicInteger class_count_;
Ian Rogers1d54e732013-05-02 21:10:01 -0700376 // Number of arrays scanned, if kCountScannedTypes.
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700377 AtomicInteger array_count_;
Ian Rogers1d54e732013-05-02 21:10:01 -0700378 // Number of non-class/arrays scanned, if kCountScannedTypes.
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700379 AtomicInteger other_count_;
380 AtomicInteger large_object_test_;
381 AtomicInteger large_object_mark_;
382 AtomicInteger classes_marked_;
383 AtomicInteger overhead_time_;
384 AtomicInteger work_chunks_created_;
385 AtomicInteger work_chunks_deleted_;
Mathieu Chartierd22d5482012-11-06 17:14:12 -0800386 AtomicInteger reference_count_;
Mathieu Chartier0f72e412013-09-06 16:40:01 -0700387
388 // Verification.
389 size_t live_stack_freeze_size_;
Elliott Hughes352a4242011-10-31 15:15:21 -0700390
Mathieu Chartier858f1c52012-10-17 17:45:55 -0700391 UniquePtr<Barrier> gc_barrier_;
Mathieu Chartierac86a7c2012-11-12 15:03:16 -0800392 Mutex large_object_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
Mathieu Chartier958291c2013-08-27 18:14:55 -0700393 Mutex mark_stack_lock_ ACQUIRED_AFTER(Locks::classlinker_classes_lock_);
Ian Rogers1bd4b4c2013-04-18 17:47:42 -0700394
395 const bool is_concurrent_;
396
Mathieu Chartier02e25112013-08-14 16:14:24 -0700397 private:
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700398 friend class AddIfReachesAllocSpaceVisitor; // Used by mod-union table.
Mathieu Chartier0f72e412013-09-06 16:40:01 -0700399 friend class CardScanTask;
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700400 friend class CheckBitmapVisitor;
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700401 friend class CheckReferenceVisitor;
Ian Rogers1d54e732013-05-02 21:10:01 -0700402 friend class art::gc::Heap;
Elliott Hughes410c0c82011-09-01 17:58:25 -0700403 friend class InternTableEntryIsUnmarked;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700404 friend class MarkIfReachesAllocspaceVisitor;
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700405 friend class ModUnionCheckReferences;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700406 friend class ModUnionClearCardVisitor;
Mathieu Chartiere6e06512012-06-26 15:00:26 -0700407 friend class ModUnionReferenceVisitor;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700408 friend class ModUnionVisitor;
409 friend class ModUnionTableBitmap;
Mathieu Chartiere6e06512012-06-26 15:00:26 -0700410 friend class ModUnionTableReferenceCache;
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700411 friend class ModUnionScanImageRootVisitor;
412 friend class ScanBitmapVisitor;
413 friend class ScanImageRootVisitor;
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700414 template<bool kUseFinger> friend class MarkStackTask;
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700415 friend class FifoMarkStackChunk;
Elliott Hughes410c0c82011-09-01 17:58:25 -0700416
Carl Shapiro69759ea2011-07-21 18:13:35 -0700417 DISALLOW_COPY_AND_ASSIGN(MarkSweep);
418};
419
Ian Rogers1d54e732013-05-02 21:10:01 -0700420} // namespace collector
421} // namespace gc
Carl Shapiro69759ea2011-07-21 18:13:35 -0700422} // namespace art
423
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700424#endif // ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_