blob: 84b775a44ae4d5c4d0f2556820c73a4efc7a0a70 [file] [log] [blame]
Elliott Hughes2faa5f12012-01-30 14:42:07 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Carl Shapiro69759ea2011-07-21 18:13:35 -070016
Brian Carlstromfc0e3212013-07-17 14:40:12 -070017#ifndef ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
18#define ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_
Carl Shapiro69759ea2011-07-21 18:13:35 -070019
Ian Rogersef7d42f2014-01-06 12:55:46 -080020#include "atomic.h"
Sameer Abu Asal4aeb5672013-02-19 15:30:35 -080021#include "barrier.h"
Elliott Hughes76160052012-12-12 16:31:20 -080022#include "base/macros.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080023#include "base/mutex.h"
Mathieu Chartier2b82db42012-11-14 17:29:05 -080024#include "garbage_collector.h"
Mathieu Chartier8d562102014-03-12 17:42:10 -070025#include "immune_region.h"
Mathieu Chartier83c8ee02014-01-28 14:50:23 -080026#include "object_callbacks.h"
Ian Rogers0cfe1fb2011-08-26 03:29:44 -070027#include "offsets.h"
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080028#include "UniquePtr.h"
Carl Shapiro69759ea2011-07-21 18:13:35 -070029
30namespace art {
Ian Rogers1d54e732013-05-02 21:10:01 -070031
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080032namespace mirror {
Ian Rogers1d54e732013-05-02 21:10:01 -070033 class Class;
34 class Object;
35 template<class T> class ObjectArray;
Mathieu Chartier407f7022014-02-18 14:37:05 -080036 class Reference;
Ian Rogers1d54e732013-05-02 21:10:01 -070037} // namespace mirror
38
Ian Rogers2dd0e2c2013-01-24 12:42:14 -080039class StackVisitor;
40class Thread;
Mathieu Chartier893263b2014-03-04 11:07:42 -080041enum VisitRootFlags : uint8_t;
Ian Rogers1d54e732013-05-02 21:10:01 -070042
43namespace gc {
44
45namespace accounting {
46 template <typename T> class AtomicStack;
47 class MarkIfReachesAllocspaceVisitor;
48 class ModUnionClearCardVisitor;
49 class ModUnionVisitor;
50 class ModUnionTableBitmap;
51 class MarkStackChunk;
52 typedef AtomicStack<mirror::Object*> ObjectStack;
53 class SpaceBitmap;
54} // namespace accounting
55
56namespace space {
57 class ContinuousSpace;
58} // namespace space
59
Ian Rogers1d54e732013-05-02 21:10:01 -070060class Heap;
61
62namespace collector {
Carl Shapiro69759ea2011-07-21 18:13:35 -070063
Mathieu Chartier2b82db42012-11-14 17:29:05 -080064class MarkSweep : public GarbageCollector {
Carl Shapiro69759ea2011-07-21 18:13:35 -070065 public:
Ian Rogers1d54e732013-05-02 21:10:01 -070066 explicit MarkSweep(Heap* heap, bool is_concurrent, const std::string& name_prefix = "");
Carl Shapiro58551df2011-07-24 03:09:51 -070067
Ian Rogers1d54e732013-05-02 21:10:01 -070068 ~MarkSweep() {}
Carl Shapiro69759ea2011-07-21 18:13:35 -070069
Ian Rogers6fac4472014-02-25 17:01:10 -080070 virtual void InitializePhase() OVERRIDE;
71 virtual void MarkingPhase() OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Hiroshi Yamauchi3e417802014-03-20 12:03:02 -070072 virtual void HandleDirtyObjectsPhase() OVERRIDE EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
Ian Rogers6fac4472014-02-25 17:01:10 -080073 virtual void ReclaimPhase() OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
74 virtual void FinishPhase() OVERRIDE SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier2b82db42012-11-14 17:29:05 -080075 virtual void MarkReachableObjects()
76 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
77 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Ian Rogers6fac4472014-02-25 17:01:10 -080078
Hiroshi Yamauchi3e417802014-03-20 12:03:02 -070079 bool IsConcurrent() const {
80 return is_concurrent_;
81 }
Ian Rogers6fac4472014-02-25 17:01:10 -080082
83 virtual GcType GetGcType() const OVERRIDE {
Mathieu Chartier2b82db42012-11-14 17:29:05 -080084 return kGcTypeFull;
85 }
86
Hiroshi Yamauchi3e417802014-03-20 12:03:02 -070087 virtual CollectorType GetCollectorType() const OVERRIDE {
88 return is_concurrent_ ? kCollectorTypeCMS : kCollectorTypeMS;
89 }
90
Carl Shapiro58551df2011-07-24 03:09:51 -070091 // Initializes internal structures.
Jesse Wilson078f9b02011-11-18 17:51:47 -050092 void Init();
Carl Shapiro58551df2011-07-24 03:09:51 -070093
Mathieu Chartier7469ebf2012-09-24 16:28:36 -070094 // Find the default mark bitmap.
Mathieu Chartier0e54cd02014-03-20 12:41:23 -070095 void FindDefaultSpaceBitmap();
Mathieu Chartier7469ebf2012-09-24 16:28:36 -070096
Mathieu Chartier893263b2014-03-04 11:07:42 -080097 // Marks all objects in the root set at the start of a garbage collection.
98 void MarkRoots(Thread* self)
Ian Rogersb726dcb2012-09-05 08:57:23 -070099 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
100 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700101
Mathieu Chartier858f1c52012-10-17 17:45:55 -0700102 void MarkNonThreadRoots()
Mathieu Chartierc528dba2013-11-26 12:00:11 -0800103 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
104 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier858f1c52012-10-17 17:45:55 -0700105
Mathieu Chartier893263b2014-03-04 11:07:42 -0800106 void MarkConcurrentRoots(VisitRootFlags flags)
Mathieu Chartierc528dba2013-11-26 12:00:11 -0800107 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
108 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier9ebae1f2012-10-15 17:38:16 -0700109
Ian Rogers1d54e732013-05-02 21:10:01 -0700110 void MarkRootsCheckpoint(Thread* self)
111 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
112 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier858f1c52012-10-17 17:45:55 -0700113
Carl Shapiro58551df2011-07-24 03:09:51 -0700114 // Builds a mark stack and recursively mark until it empties.
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800115 void RecursiveMark()
Ian Rogersb726dcb2012-09-05 08:57:23 -0700116 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
117 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700118
Ian Rogers1d54e732013-05-02 21:10:01 -0700119 // Bind the live bits to the mark bits of bitmaps for spaces that are never collected, ie
120 // the image. Mark that portion of the heap as immune.
121 virtual void BindBitmaps() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier7469ebf2012-09-24 16:28:36 -0700122
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700123 // Builds a mark stack with objects on dirty cards and recursively mark until it empties.
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700124 void RecursiveMarkDirtyObjects(bool paused, byte minimum_age)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700125 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
126 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700127
Carl Shapiro69759ea2011-07-21 18:13:35 -0700128 // Remarks the root set after completing the concurrent mark.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700129 void ReMarkRoots()
Ian Rogersb726dcb2012-09-05 08:57:23 -0700130 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
131 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700132
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800133 void ProcessReferences(Thread* self)
134 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro58551df2011-07-24 03:09:51 -0700135
Mathieu Chartier601276a2014-03-20 15:12:30 -0700136 void PreProcessReferences()
Mathieu Chartier1ad27842014-03-19 17:08:17 -0700137 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
138 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
139
Ian Rogers6fac4472014-02-25 17:01:10 -0800140 // Update and mark references from immune spaces. Virtual as overridden by StickyMarkSweep.
Mathieu Chartier11409ae2013-09-23 11:49:36 -0700141 virtual void UpdateAndMarkModUnion()
142 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
143
Mathieu Chartierdda54f52014-02-24 09:58:40 -0800144 // Pre clean cards to reduce how much work is needed in the pause.
145 void PreCleanCards()
146 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
147 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
148
Ian Rogers6fac4472014-02-25 17:01:10 -0800149 // Sweeps unmarked objects to complete the garbage collection. Virtual as by default it sweeps
150 // all allocation spaces. Partial and sticky GCs want to just sweep a subset of the heap.
Ian Rogers1d54e732013-05-02 21:10:01 -0700151 virtual void Sweep(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700152
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700153 // Sweeps unmarked objects to complete the garbage collection.
Ian Rogers1d54e732013-05-02 21:10:01 -0700154 void SweepLargeObjects(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700155
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700156 // Sweep only pointers within an array. WARNING: Trashes objects.
Ian Rogers1d54e732013-05-02 21:10:01 -0700157 void SweepArray(accounting::ObjectStack* allocation_stack_, bool swap_bitmaps)
Ian Rogers719d1a32014-03-06 12:13:39 -0800158 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700159 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700160
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700161 // Blackens an object.
Mathieu Chartier590fee92013-09-13 13:46:47 -0700162 void ScanObject(mirror::Object* obj)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700163 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
164 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700165
Mathieu Chartier407f7022014-02-18 14:37:05 -0800166 // No thread safety analysis due to lambdas.
167 template<typename MarkVisitor, typename ReferenceVisitor>
168 void ScanObjectVisit(mirror::Object* obj, const MarkVisitor& visitor,
169 const ReferenceVisitor& ref_visitor)
170 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
171 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700172
Mathieu Chartier7469ebf2012-09-24 16:28:36 -0700173 void SweepSystemWeaks()
Mathieu Chartierad2541a2013-10-25 10:05:23 -0700174 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
Mathieu Chartier7469ebf2012-09-24 16:28:36 -0700175
Mathieu Chartier6aa3df92013-09-17 15:17:28 -0700176 static mirror::Object* VerifySystemWeakIsLiveCallback(mirror::Object* obj, void* arg)
Mathieu Chartierc7b83a02012-09-11 18:07:39 -0700177 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
178
179 void VerifySystemWeaks()
Mathieu Chartierad2541a2013-10-25 10:05:23 -0700180 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_);
Mathieu Chartierc7b83a02012-09-11 18:07:39 -0700181
182 // Verify that an object is live, either in a live bitmap or in the allocation stack.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800183 void VerifyIsLive(const mirror::Object* obj)
Mathieu Chartierc7b83a02012-09-11 18:07:39 -0700184 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
185
Mathieu Chartier3bb57c72014-02-18 11:38:45 -0800186 static mirror::Object* MarkObjectCallback(mirror::Object* obj, void* arg)
Mathieu Chartier39e32612013-11-12 16:28:05 -0800187 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
188 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
189
Mathieu Chartier407f7022014-02-18 14:37:05 -0800190 static void MarkHeapReferenceCallback(mirror::HeapReference<mirror::Object>* ref, void* arg)
191 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
192 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
193
Mathieu Chartier815873e2014-02-13 18:02:13 -0800194 static void MarkRootCallback(mirror::Object** root, void* arg, uint32_t thread_id,
195 RootType root_type)
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800196 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700197 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
198
Mathieu Chartier893263b2014-03-04 11:07:42 -0800199 static void VerifyRootMarked(mirror::Object** root, void* arg, uint32_t /*thread_id*/,
200 RootType /*root_type*/)
201 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
202 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
203
Mathieu Chartier3bb57c72014-02-18 11:38:45 -0800204 static void ProcessMarkStackPausedCallback(void* arg)
205 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Mathieu Chartier815873e2014-02-13 18:02:13 -0800206
207 static void MarkRootParallelCallback(mirror::Object** root, void* arg, uint32_t thread_id,
208 RootType root_type)
209 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartierac86a7c2012-11-12 15:03:16 -0800210
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700211 // Marks an object.
Mathieu Chartier0e54cd02014-03-20 12:41:23 -0700212 void MarkObject(mirror::Object* obj)
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800213 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
214 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
215
Sameer Abu Asala8439542013-02-14 16:06:42 -0800216 Barrier& GetBarrier() {
217 return *gc_barrier_;
218 }
219
Mathieu Chartier407f7022014-02-18 14:37:05 -0800220 // Schedules an unmarked object for reference processing.
221 void DelayReferenceReferent(mirror::Class* klass, mirror::Reference* reference)
222 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
223
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800224 protected:
Carl Shapiro69759ea2011-07-21 18:13:35 -0700225 // Returns true if the object has its bit set in the mark bitmap.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800226 bool IsMarked(const mirror::Object* object) const;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700227
Mathieu Chartier39e32612013-11-12 16:28:05 -0800228 static mirror::Object* IsMarkedCallback(mirror::Object* object, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700229 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier46a23632012-08-07 18:44:40 -0700230
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800231 static void VerifyImageRootVisitor(mirror::Object* root, void* arg)
Mathieu Chartier0e54cd02014-03-20 12:41:23 -0700232 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700233
Mathieu Chartier0e54cd02014-03-20 12:41:23 -0700234 void MarkObjectNonNull(mirror::Object* obj)
Mathieu Chartier9642c962013-08-05 17:40:36 -0700235 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
236 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700237
Mathieu Chartier9642c962013-08-05 17:40:36 -0700238 // Unmarks an object by clearing the bit inside of the corresponding bitmap, or if it is in a
239 // space set, removing the object from the set.
240 void UnMarkObjectNonNull(const mirror::Object* obj)
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700241 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
242 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
243
244 // Mark the vm thread roots.
Ian Rogers6fac4472014-02-25 17:01:10 -0800245 void MarkThreadRoots(Thread* self)
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700246 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
247 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartierac86a7c2012-11-12 15:03:16 -0800248
Mathieu Chartier9642c962013-08-05 17:40:36 -0700249 // Marks an object atomically, safe to use from multiple threads.
Mathieu Chartier0e54cd02014-03-20 12:41:23 -0700250 void MarkObjectNonNullParallel(mirror::Object* obj);
Mathieu Chartier9642c962013-08-05 17:40:36 -0700251
252 // Marks or unmarks a large object based on whether or not set is true. If set is true, then we
253 // mark, otherwise we unmark.
254 bool MarkLargeObject(const mirror::Object* obj, bool set)
Mathieu Chartier0e54cd02014-03-20 12:41:23 -0700255 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) LOCKS_EXCLUDED(large_object_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700256
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700257 // Returns true if we need to add obj to a mark stack.
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800258 bool MarkObjectParallel(const mirror::Object* obj) NO_THREAD_SAFETY_ANALYSIS;
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700259
Mathieu Chartier6f1c9492012-10-15 12:08:41 -0700260 // Verify the roots of the heap and print out information related to any invalid roots.
261 // Called in MarkObject, so may we may not hold the mutator lock.
262 void VerifyRoots()
263 NO_THREAD_SAFETY_ANALYSIS;
264
Mathieu Chartierba311b42013-08-27 13:02:30 -0700265 // Expand mark stack to 2x its current size.
266 void ExpandMarkStack() EXCLUSIVE_LOCKS_REQUIRED(mark_stack_lock_);
267 void ResizeMarkStack(size_t new_size) EXCLUSIVE_LOCKS_REQUIRED(mark_stack_lock_);
Mathieu Chartierac86a7c2012-11-12 15:03:16 -0800268
Mathieu Chartier2775ee42013-08-20 17:43:47 -0700269 // Returns how many threads we should use for the current GC phase based on if we are paused,
270 // whether or not we care about pauses.
271 size_t GetThreadCount(bool paused) const;
272
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800273 static void VerifyRootCallback(const mirror::Object* root, void* arg, size_t vreg,
Ian Rogers40e3bac2012-11-20 00:09:14 -0800274 const StackVisitor *visitor);
Mathieu Chartier6f1c9492012-10-15 12:08:41 -0700275
Ian Rogers2dd0e2c2013-01-24 12:42:14 -0800276 void VerifyRoot(const mirror::Object* root, size_t vreg, const StackVisitor* visitor)
Mathieu Chartier6f1c9492012-10-15 12:08:41 -0700277 NO_THREAD_SAFETY_ANALYSIS;
278
Mathieu Chartier0e54cd02014-03-20 12:41:23 -0700279 // Push a single reference on a mark stack.
280 void PushOnMarkStack(mirror::Object* obj);
Ian Rogers5d76c432011-10-31 21:42:49 -0700281
Carl Shapiro69759ea2011-07-21 18:13:35 -0700282 // Blackens objects grayed during a garbage collection.
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700283 void ScanGrayObjects(bool paused, byte minimum_age)
Mathieu Chartierd22d5482012-11-06 17:14:12 -0800284 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
285 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700286
Carl Shapiro69759ea2011-07-21 18:13:35 -0700287 // Recursively blackens objects on the mark stack.
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700288 void ProcessMarkStack(bool paused)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700289 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
290 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700291
Mathieu Chartier2775ee42013-08-20 17:43:47 -0700292 void ProcessMarkStackParallel(size_t thread_count)
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700293 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
294 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
295
Mathieu Chartierc22c59e2014-02-24 15:16:06 -0800296 // Used to get around thread safety annotations. The call is from MarkingPhase and is guarded by
297 // IsExclusiveHeld.
298 void RevokeAllThreadLocalAllocationStacks(Thread* self) NO_THREAD_SAFETY_ANALYSIS;
299
Hiroshi Yamauchic93c5302014-03-20 16:15:37 -0700300 // Revoke all the thread-local buffers.
301 void RevokeAllThreadLocalBuffers();
302
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700303 // Whether or not we count how many of each type of object were scanned.
304 static const bool kCountScannedTypes = false;
305
Ian Rogers1d54e732013-05-02 21:10:01 -0700306 // Current space, we check this space first to avoid searching for the appropriate space for an
307 // object.
Mathieu Chartier0e54cd02014-03-20 12:41:23 -0700308 accounting::SpaceBitmap* current_space_bitmap_;
309 // Cache the heap's mark bitmap to prevent having to do 2 loads during slow path marking.
310 accounting::HeapBitmap* mark_bitmap_;
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700311
Ian Rogers1d54e732013-05-02 21:10:01 -0700312 accounting::ObjectStack* mark_stack_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700313
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700314 // Immune range, every object inside the immune range is assumed to be marked.
Mathieu Chartier8d562102014-03-12 17:42:10 -0700315 ImmuneRegion immune_region_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700316
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700317 // Parallel finger.
318 AtomicInteger atomic_finger_;
Ian Rogers1d54e732013-05-02 21:10:01 -0700319 // Number of classes scanned, if kCountScannedTypes.
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700320 AtomicInteger class_count_;
Ian Rogers1d54e732013-05-02 21:10:01 -0700321 // Number of arrays scanned, if kCountScannedTypes.
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700322 AtomicInteger array_count_;
Ian Rogers1d54e732013-05-02 21:10:01 -0700323 // Number of non-class/arrays scanned, if kCountScannedTypes.
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700324 AtomicInteger other_count_;
325 AtomicInteger large_object_test_;
326 AtomicInteger large_object_mark_;
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700327 AtomicInteger overhead_time_;
328 AtomicInteger work_chunks_created_;
329 AtomicInteger work_chunks_deleted_;
Mathieu Chartierd22d5482012-11-06 17:14:12 -0800330 AtomicInteger reference_count_;
Mathieu Chartier0e54cd02014-03-20 12:41:23 -0700331 AtomicInteger mark_null_count_;
332 AtomicInteger mark_immune_count_;
333 AtomicInteger mark_fastpath_count_;
334 AtomicInteger mark_slowpath_count_;
Mathieu Chartier0f72e412013-09-06 16:40:01 -0700335
336 // Verification.
337 size_t live_stack_freeze_size_;
Elliott Hughes352a4242011-10-31 15:15:21 -0700338
Mathieu Chartier858f1c52012-10-17 17:45:55 -0700339 UniquePtr<Barrier> gc_barrier_;
Mathieu Chartierac86a7c2012-11-12 15:03:16 -0800340 Mutex large_object_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
Mathieu Chartier958291c2013-08-27 18:14:55 -0700341 Mutex mark_stack_lock_ ACQUIRED_AFTER(Locks::classlinker_classes_lock_);
Ian Rogers1bd4b4c2013-04-18 17:47:42 -0700342
343 const bool is_concurrent_;
344
Mathieu Chartier02e25112013-08-14 16:14:24 -0700345 private:
Brian Carlstrom7934ac22013-07-26 10:54:15 -0700346 friend class AddIfReachesAllocSpaceVisitor; // Used by mod-union table.
Mathieu Chartier0f72e412013-09-06 16:40:01 -0700347 friend class CardScanTask;
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700348 friend class CheckBitmapVisitor;
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700349 friend class CheckReferenceVisitor;
Ian Rogers1d54e732013-05-02 21:10:01 -0700350 friend class art::gc::Heap;
Elliott Hughes410c0c82011-09-01 17:58:25 -0700351 friend class InternTableEntryIsUnmarked;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700352 friend class MarkIfReachesAllocspaceVisitor;
Mathieu Chartier0e54cd02014-03-20 12:41:23 -0700353 friend class MarkObjectVisitor;
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700354 friend class ModUnionCheckReferences;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700355 friend class ModUnionClearCardVisitor;
Mathieu Chartiere6e06512012-06-26 15:00:26 -0700356 friend class ModUnionReferenceVisitor;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700357 friend class ModUnionVisitor;
358 friend class ModUnionTableBitmap;
Mathieu Chartiere6e06512012-06-26 15:00:26 -0700359 friend class ModUnionTableReferenceCache;
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700360 friend class ModUnionScanImageRootVisitor;
361 friend class ScanBitmapVisitor;
362 friend class ScanImageRootVisitor;
Mathieu Chartier94c32c52013-08-09 11:14:04 -0700363 template<bool kUseFinger> friend class MarkStackTask;
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700364 friend class FifoMarkStackChunk;
Elliott Hughes410c0c82011-09-01 17:58:25 -0700365
Carl Shapiro69759ea2011-07-21 18:13:35 -0700366 DISALLOW_COPY_AND_ASSIGN(MarkSweep);
367};
368
Ian Rogers1d54e732013-05-02 21:10:01 -0700369} // namespace collector
370} // namespace gc
Carl Shapiro69759ea2011-07-21 18:13:35 -0700371} // namespace art
372
Brian Carlstromfc0e3212013-07-17 14:40:12 -0700373#endif // ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_