blob: c55b2b2d13e7e567a4bdbb349142fac994af1949 [file] [log] [blame]
Elliott Hughes2faa5f12012-01-30 14:42:07 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Carl Shapiro69759ea2011-07-21 18:13:35 -070016
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
18#define ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
Carl Shapiro69759ea2011-07-21 18:13:35 -070019
Ian Rogersef7d42f2014-01-06 12:55:46 -080020#include "atomic.h"
Sameer Abu Asal4aeb5672013-02-19 15:30:35 -080021#include "barrier.h"
Elliott Hughes76160052012-12-12 16:31:20 -080022#include "base/macros.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080023#include "base/mutex.h"
Mathieu Chartier2b82db42012-11-14 17:29:05 -080024#include "garbage_collector.h"
Mathieu Chartier83c8ee02014-01-28 14:50:23 -080025#include "object_callbacks.h"
Ian Rogers0cfe1fb2011-08-26 03:29:44 -070026#include "offsets.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080027#include "UniquePtr.h"
Carl Shapiro69759ea2011-07-21 18:13:35 -070028
29namespace art {
Ian Rogers1d54e732013-05-02 21:10:01 -070030
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080031namespace mirror {
Ian Rogers1d54e732013-05-02 21:10:01 -070032 class Class;
33 class Object;
34 template<class T> class ObjectArray;
35} // namespace mirror
36
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080037class StackVisitor;
38class Thread;
Ian Rogers1d54e732013-05-02 21:10:01 -070039
40namespace gc {
41
42namespace accounting {
43 template <typename T> class AtomicStack;
44 class MarkIfReachesAllocspaceVisitor;
45 class ModUnionClearCardVisitor;
46 class ModUnionVisitor;
47 class ModUnionTableBitmap;
48 class MarkStackChunk;
49 typedef AtomicStack<mirror::Object*> ObjectStack;
50 class SpaceBitmap;
51} // namespace accounting
52
53namespace space {
54 class ContinuousSpace;
55} // namespace space
56
Ian Rogers1d54e732013-05-02 21:10:01 -070057class Heap;
58
59namespace collector {
Carl Shapiro69759ea2011-07-21 18:13:35 -070060
Mathieu Chartier2b82db42012-11-14 17:29:05 -080061class MarkSweep : public GarbageCollector {
Carl Shapiro69759ea2011-07-21 18:13:35 -070062 public:
Ian Rogers1d54e732013-05-02 21:10:01 -070063 explicit MarkSweep(Heap* heap, bool is_concurrent, const std::string& name_prefix = "");
Carl Shapiro58551df2011-07-24 03:09:51 -070064
Ian Rogers1d54e732013-05-02 21:10:01 -070065 ~MarkSweep() {}
Carl Shapiro69759ea2011-07-21 18:13:35 -070066
Ian Rogers6fac4472014-02-25 17:01:10 -080067 virtual void InitializePhase() OVERRIDE;
68 virtual void MarkingPhase() OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
69 virtual bool HandleDirtyObjectsPhase() OVERRIDE EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
70 virtual void ReclaimPhase() OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
71 virtual void FinishPhase() OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier2b82db42012-11-14 17:29:05 -080072 virtual void MarkReachableObjects()
73 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
74 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Ian Rogers6fac4472014-02-25 17:01:10 -080075
76 virtual bool IsConcurrent() const OVERRIDE;
77
78 virtual GcType GetGcType() const OVERRIDE {
Mathieu Chartier2b82db42012-11-14 17:29:05 -080079 return kGcTypeFull;
80 }
81
Carl Shapiro58551df2011-07-24 03:09:51 -070082 // Initializes internal structures.
Jesse Wilson078f9b02011-11-18 17:51:47 -050083 void Init();
Carl Shapiro58551df2011-07-24 03:09:51 -070084
Mathieu Chartier7469ebf2012-09-24 16:28:36 -070085 // Find the default mark bitmap.
86 void FindDefaultMarkBitmap();
87
Carl Shapiro69759ea2011-07-21 18:13:35 -070088 // Marks the root set at the start of a garbage collection.
Ian Rogers00f7d0e2012-07-19 15:28:27 -070089 void MarkRoots()
Ian Rogersb726dcb2012-09-05 08:57:23 -070090 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
91 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -070092
Mathieu Chartier858f1c52012-10-17 17:45:55 -070093 void MarkNonThreadRoots()
Mathieu Chartierc528dba2013-11-26 12:00:11 -080094 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
95 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier858f1c52012-10-17 17:45:55 -070096
Mathieu Chartierc528dba2013-11-26 12:00:11 -080097 void MarkConcurrentRoots()
98 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
99 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier9ebae1f2012-10-15 17:38:16 -0700100
Ian Rogers1d54e732013-05-02 21:10:01 -0700101 void MarkRootsCheckpoint(Thread* self)
102 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
103 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier858f1c52012-10-17 17:45:55 -0700104
Carl Shapiro58551df2011-07-24 03:09:51 -0700105 // Builds a mark stack and recursively mark until it empties.
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800106 void RecursiveMark()
Ian Rogersb726dcb2012-09-05 08:57:23 -0700107 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
108 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700109
Ian Rogers1d54e732013-05-02 21:10:01 -0700110 // Make a space immune, immune spaces have all live objects marked - that is the mark and
111 // live bitmaps are bound together.
112 void ImmuneSpace(space::ContinuousSpace* space)
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800113 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
114 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800115
Mathieu Chartier590fee92013-09-13 13:46:47 -0700116 bool IsImmuneSpace(const space::ContinuousSpace* space) const;
Mathieu Chartier11409ae2013-09-23 11:49:36 -0700117 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
118
Ian Rogers1d54e732013-05-02 21:10:01 -0700119 // Bind the live bits to the mark bits of bitmaps for spaces that are never collected, ie
120 // the image. Mark that portion of the heap as immune.
121 virtual void BindBitmaps() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier7469ebf2012-09-24 16:28:36 -0700122
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700123 // Builds a mark stack with objects on dirty cards and recursively mark until it empties.
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700124 void RecursiveMarkDirtyObjects(bool paused, byte minimum_age)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700125 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
126 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700127
Carl Shapiro69759ea2011-07-21 18:13:35 -0700128 // Remarks the root set after completing the concurrent mark.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700129 void ReMarkRoots()
Ian Rogersb726dcb2012-09-05 08:57:23 -0700130 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
131 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700132
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800133 void ProcessReferences(Thread* self)
134 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro58551df2011-07-24 03:09:51 -0700135
Ian Rogers6fac4472014-02-25 17:01:10 -0800136 // Update and mark references from immune spaces. Virtual as overridden by StickyMarkSweep.
Mathieu Chartier11409ae2013-09-23 11:49:36 -0700137 virtual void UpdateAndMarkModUnion()
138 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
139
Mathieu Chartierdda54f52014-02-24 09:58:40 -0800140 // Pre clean cards to reduce how much work is needed in the pause.
141 void PreCleanCards()
142 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
143 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
144
Ian Rogers6fac4472014-02-25 17:01:10 -0800145 // Sweeps unmarked objects to complete the garbage collection. Virtual as by default it sweeps
146 // all allocation spaces. Partial and sticky GCs want to just sweep a subset of the heap.
Ian Rogers1d54e732013-05-02 21:10:01 -0700147 virtual void Sweep(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700148
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700149 // Sweeps unmarked objects to complete the garbage collection.
Ian Rogers1d54e732013-05-02 21:10:01 -0700150 void SweepLargeObjects(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700151
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700152 // Sweep only pointers within an array. WARNING: Trashes objects.
Ian Rogers1d54e732013-05-02 21:10:01 -0700153 void SweepArray(accounting::ObjectStack* allocation_stack_, bool swap_bitmaps)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700154 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700155
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700156 // Blackens an object.
Mathieu Chartier590fee92013-09-13 13:46:47 -0700157 void ScanObject(mirror::Object* obj)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700158 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
159 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700160
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800161 // TODO: enable thread safety analysis when in use by multiple worker threads.
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700162 template <typename MarkVisitor>
Mathieu Chartier11409ae2013-09-23 11:49:36 -0700163 void ScanObjectVisit(mirror::Object* obj, const MarkVisitor& visitor)
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800164 NO_THREAD_SAFETY_ANALYSIS;
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700165
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800166 // Everything inside the immune range is assumed to be marked.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800167 void SetImmuneRange(mirror::Object* begin, mirror::Object* end);
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800168
Mathieu Chartier7469ebf2012-09-24 16:28:36 -0700169 void SweepSystemWeaks()
Mathieu Chartierad2541a2013-10-25 10:05:23 -0700170 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
Mathieu Chartier7469ebf2012-09-24 16:28:36 -0700171
Mathieu Chartier6aa3df92013-09-17 15:17:28 -0700172 static mirror::Object* VerifySystemWeakIsLiveCallback(mirror::Object* obj, void* arg)
Mathieu Chartierc7b83a02012-09-11 18:07:39 -0700173 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
174
175 void VerifySystemWeaks()
Mathieu Chartierad2541a2013-10-25 10:05:23 -0700176 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
Mathieu Chartierc7b83a02012-09-11 18:07:39 -0700177
178 // Verify that an object is live, either in a live bitmap or in the allocation stack.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800179 void VerifyIsLive(const mirror::Object* obj)
Mathieu Chartierc7b83a02012-09-11 18:07:39 -0700180 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
181
Mathieu Chartierfd678be2012-08-30 14:50:54 -0700182 template <typename Visitor>
Mathieu Chartier590fee92013-09-13 13:46:47 -0700183 static void VisitObjectReferences(mirror::Object* obj, const Visitor& visitor, bool visit_class)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700184 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800185 Locks::mutator_lock_);
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700186
Mathieu Chartier3bb57c72014-02-18 11:38:45 -0800187 static mirror::Object* MarkObjectCallback(mirror::Object* obj, void* arg)
Mathieu Chartier39e32612013-11-12 16:28:05 -0800188 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
189 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
190
Mathieu Chartier815873e2014-02-13 18:02:13 -0800191 static void MarkRootCallback(mirror::Object** root, void* arg, uint32_t thread_id,
192 RootType root_type)
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800193 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700194 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
195
Mathieu Chartier3bb57c72014-02-18 11:38:45 -0800196 static void ProcessMarkStackPausedCallback(void* arg)
197 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Mathieu Chartier815873e2014-02-13 18:02:13 -0800198
199 static void MarkRootParallelCallback(mirror::Object** root, void* arg, uint32_t thread_id,
200 RootType root_type)
201 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartierac86a7c2012-11-12 15:03:16 -0800202
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700203 // Marks an object.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800204 void MarkObject(const mirror::Object* obj)
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800205 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
206 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
207
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800208 void MarkRoot(const mirror::Object* obj)
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800209 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
Mathieu Chartier858f1c52012-10-17 17:45:55 -0700210 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
211
Sameer Abu Asala8439542013-02-14 16:06:42 -0800212 Barrier& GetBarrier() {
213 return *gc_barrier_;
214 }
215
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800216 protected:
Carl Shapiro69759ea2011-07-21 18:13:35 -0700217 // Returns true if the object has its bit set in the mark bitmap.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800218 bool IsMarked(const mirror::Object* object) const;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700219
Mathieu Chartier39e32612013-11-12 16:28:05 -0800220 static mirror::Object* IsMarkedCallback(mirror::Object* object, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700221 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier46a23632012-08-07 18:44:40 -0700222
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800223 static void VerifyImageRootVisitor(mirror::Object* root, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700224 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
225 Locks::mutator_lock_);
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700226
Mathieu Chartier9642c962013-08-05 17:40:36 -0700227 void MarkObjectNonNull(const mirror::Object* obj)
228 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
229 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700230
Mathieu Chartier9642c962013-08-05 17:40:36 -0700231 // Unmarks an object by clearing the bit inside of the corresponding bitmap, or if it is in a
232 // space set, removing the object from the set.
233 void UnMarkObjectNonNull(const mirror::Object* obj)
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700234 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
235 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
236
237 // Mark the vm thread roots.
Ian Rogers6fac4472014-02-25 17:01:10 -0800238 void MarkThreadRoots(Thread* self)
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700239 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
240 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartierac86a7c2012-11-12 15:03:16 -0800241
Mathieu Chartier9642c962013-08-05 17:40:36 -0700242 // Marks an object atomically, safe to use from multiple threads.
243 void MarkObjectNonNullParallel(const mirror::Object* obj);
244
245 // Marks or unmarks a large object based on whether or not set is true. If set is true, then we
246 // mark, otherwise we unmark.
247 bool MarkLargeObject(const mirror::Object* obj, bool set)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700248 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700249
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700250 // Returns true if we need to add obj to a mark stack.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800251 bool MarkObjectParallel(const mirror::Object* obj) NO_THREAD_SAFETY_ANALYSIS;
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700252
Mathieu Chartier6f1c9492012-10-15 12:08:41 -0700253 // Verify the roots of the heap and print out information related to any invalid roots.
254 // Called in MarkObject, so may we may not hold the mutator lock.
255 void VerifyRoots()
256 NO_THREAD_SAFETY_ANALYSIS;
257
Mathieu Chartierba311b42013-08-27 13:02:30 -0700258 // Expand mark stack to 2x its current size.
259 void ExpandMarkStack() EXCLUSIVE_LOCKS_REQUIRED(mark_stack_lock_);
260 void ResizeMarkStack(size_t new_size) EXCLUSIVE_LOCKS_REQUIRED(mark_stack_lock_);
Mathieu Chartierac86a7c2012-11-12 15:03:16 -0800261
Mathieu Chartier2775ee42013-08-20 17:43:47 -0700262 // Returns how many threads we should use for the current GC phase based on if we are paused,
263 // whether or not we care about pauses.
264 size_t GetThreadCount(bool paused) const;
265
Mathieu Chartier9642c962013-08-05 17:40:36 -0700266 // Returns true if an object is inside of the immune region (assumed to be marked).
Mathieu Chartier11409ae2013-09-23 11:49:36 -0700267 bool IsImmune(const mirror::Object* obj) const ALWAYS_INLINE {
Mathieu Chartier9642c962013-08-05 17:40:36 -0700268 return obj >= immune_begin_ && obj < immune_end_;
269 }
270
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800271 static void VerifyRootCallback(const mirror::Object* root, void* arg, size_t vreg,
Ian Rogers40e3bac2012-11-20 00:09:14 -0800272 const StackVisitor *visitor);
Mathieu Chartier6f1c9492012-10-15 12:08:41 -0700273
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800274 void VerifyRoot(const mirror::Object* root, size_t vreg, const StackVisitor* visitor)
Mathieu Chartier6f1c9492012-10-15 12:08:41 -0700275 NO_THREAD_SAFETY_ANALYSIS;
276
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700277 template <typename Visitor>
Mathieu Chartier11409ae2013-09-23 11:49:36 -0700278 static void VisitInstanceFieldsReferences(mirror::Class* klass, mirror::Object* obj,
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700279 const Visitor& visitor)
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800280 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700281
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700282 // Visit the header, static field references, and interface pointers of a class object.
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700283 template <typename Visitor>
Mathieu Chartier11409ae2013-09-23 11:49:36 -0700284 static void VisitClassReferences(mirror::Class* klass, mirror::Object* obj,
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700285 const Visitor& visitor)
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800286 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700287
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700288 template <typename Visitor>
Mathieu Chartier11409ae2013-09-23 11:49:36 -0700289 static void VisitStaticFieldsReferences(mirror::Class* klass, const Visitor& visitor)
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800290 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700291
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700292 template <typename Visitor>
Mathieu Chartier11409ae2013-09-23 11:49:36 -0700293 static void VisitFieldsReferences(mirror::Object* obj, uint32_t ref_offsets, bool is_static,
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800294 const Visitor& visitor)
295 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700296
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700297 // Visit all of the references in an object array.
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700298 template <typename Visitor>
Mathieu Chartier11409ae2013-09-23 11:49:36 -0700299 static void VisitObjectArrayReferences(mirror::ObjectArray<mirror::Object>* array,
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700300 const Visitor& visitor)
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800301 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700302
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700303 // Visits the header and field references of a data object.
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700304 template <typename Visitor>
Mathieu Chartier11409ae2013-09-23 11:49:36 -0700305 static void VisitOtherReferences(mirror::Class* klass, mirror::Object* obj,
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700306 const Visitor& visitor)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700307 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700308 return VisitInstanceFieldsReferences(klass, obj, visitor);
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700309 }
Ian Rogers5d76c432011-10-31 21:42:49 -0700310
Carl Shapiro69759ea2011-07-21 18:13:35 -0700311 // Blackens objects grayed during a garbage collection.
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700312 void ScanGrayObjects(bool paused, byte minimum_age)
Mathieu Chartierd22d5482012-11-06 17:14:12 -0800313 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
314 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700315
316 // Schedules an unmarked object for reference processing.
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700317 void DelayReferenceReferent(mirror::Class* klass, mirror::Object* reference)
Ian Rogers23435d02012-09-24 11:23:12 -0700318 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700319
320 // Recursively blackens objects on the mark stack.
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700321 void ProcessMarkStack(bool paused)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700322 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
323 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700324
Mathieu Chartier2775ee42013-08-20 17:43:47 -0700325 void ProcessMarkStackParallel(size_t thread_count)
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700326 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
327 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
328
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800329 void EnqueueFinalizerReferences(mirror::Object** ref)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700330 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
331 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700332
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800333 void PreserveSomeSoftReferences(mirror::Object** ref)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700334 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
335 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700336
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800337 void ClearWhiteReferences(mirror::Object** list)
Ian Rogers23435d02012-09-24 11:23:12 -0700338 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700339
Mathieu Chartierc22c59e2014-02-24 15:16:06 -0800340 // Used to get around thread safety annotations. The call is from MarkingPhase and is guarded by
341 // IsExclusiveHeld.
342 void RevokeAllThreadLocalAllocationStacks(Thread* self) NO_THREAD_SAFETY_ANALYSIS;
343
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700344 // Whether or not we count how many of each type of object were scanned.
345 static const bool kCountScannedTypes = false;
346
Ian Rogers1d54e732013-05-02 21:10:01 -0700347 // Current space, we check this space first to avoid searching for the appropriate space for an
348 // object.
349 accounting::SpaceBitmap* current_mark_bitmap_;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700350
Ian Rogers1d54e732013-05-02 21:10:01 -0700351 accounting::ObjectStack* mark_stack_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700352
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700353 // Immune range, every object inside the immune range is assumed to be marked.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800354 mirror::Object* immune_begin_;
355 mirror::Object* immune_end_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700356
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700357 // Parallel finger.
358 AtomicInteger atomic_finger_;
Ian Rogers1d54e732013-05-02 21:10:01 -0700359 // Number of classes scanned, if kCountScannedTypes.
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700360 AtomicInteger class_count_;
Ian Rogers1d54e732013-05-02 21:10:01 -0700361 // Number of arrays scanned, if kCountScannedTypes.
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700362 AtomicInteger array_count_;
Ian Rogers1d54e732013-05-02 21:10:01 -0700363 // Number of non-class/arrays scanned, if kCountScannedTypes.
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700364 AtomicInteger other_count_;
365 AtomicInteger large_object_test_;
366 AtomicInteger large_object_mark_;
367 AtomicInteger classes_marked_;
368 AtomicInteger overhead_time_;
369 AtomicInteger work_chunks_created_;
370 AtomicInteger work_chunks_deleted_;
Mathieu Chartierd22d5482012-11-06 17:14:12 -0800371 AtomicInteger reference_count_;
Mathieu Chartier0f72e412013-09-06 16:40:01 -0700372
373 // Verification.
374 size_t live_stack_freeze_size_;
Elliott Hughes352a4242011-10-31 15:15:21 -0700375
Mathieu Chartier858f1c52012-10-17 17:45:55 -0700376 UniquePtr<Barrier> gc_barrier_;
Mathieu Chartierac86a7c2012-11-12 15:03:16 -0800377 Mutex large_object_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
Mathieu Chartier958291c2013-08-27 18:14:55 -0700378 Mutex mark_stack_lock_ ACQUIRED_AFTER(Locks::classlinker_classes_lock_);
Ian Rogers1bd4b4c2013-04-18 17:47:42 -0700379
380 const bool is_concurrent_;
381
Mathieu Chartier02e25112013-08-14 16:14:24 -0700382 private:
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700383 friend class AddIfReachesAllocSpaceVisitor; // Used by mod-union table.
Mathieu Chartier0f72e412013-09-06 16:40:01 -0700384 friend class CardScanTask;
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700385 friend class CheckBitmapVisitor;
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700386 friend class CheckReferenceVisitor;
Ian Rogers1d54e732013-05-02 21:10:01 -0700387 friend class art::gc::Heap;
Elliott Hughes410c0c82011-09-01 17:58:25 -0700388 friend class InternTableEntryIsUnmarked;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700389 friend class MarkIfReachesAllocspaceVisitor;
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700390 friend class ModUnionCheckReferences;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700391 friend class ModUnionClearCardVisitor;
Mathieu Chartiere6e06512012-06-26 15:00:26 -0700392 friend class ModUnionReferenceVisitor;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700393 friend class ModUnionVisitor;
394 friend class ModUnionTableBitmap;
Mathieu Chartiere6e06512012-06-26 15:00:26 -0700395 friend class ModUnionTableReferenceCache;
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700396 friend class ModUnionScanImageRootVisitor;
397 friend class ScanBitmapVisitor;
398 friend class ScanImageRootVisitor;
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700399 template<bool kUseFinger> friend class MarkStackTask;
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700400 friend class FifoMarkStackChunk;
Elliott Hughes410c0c82011-09-01 17:58:25 -0700401
Carl Shapiro69759ea2011-07-21 18:13:35 -0700402 DISALLOW_COPY_AND_ASSIGN(MarkSweep);
403};
404
Ian Rogers1d54e732013-05-02 21:10:01 -0700405} // namespace collector
406} // namespace gc
Carl Shapiro69759ea2011-07-21 18:13:35 -0700407} // namespace art
408
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700409#endif // ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_