blob: 3581d98772e79c234b7333d796181996212ee021 [file] [log] [blame]
Elliott Hughes2faa5f12012-01-30 14:42:07 -08001/*
2 * Copyright (C) 2011 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
Carl Shapiro69759ea2011-07-21 18:13:35 -070016
17#ifndef ART_SRC_MARK_SWEEP_H_
18#define ART_SRC_MARK_SWEEP_H_
19
Mathieu Chartierd8195f12012-10-05 12:21:28 -070020#include "atomic_stack.h"
Elliott Hughes76160052012-12-12 16:31:20 -080021#include "base/macros.h"
Mathieu Chartier2b82db42012-11-14 17:29:05 -080022#include "garbage_collector.h"
Elliott Hughes5e71b522011-10-20 13:12:32 -070023#include "heap_bitmap.h"
Mathieu Chartierb43b7d42012-06-19 13:15:09 -070024#include "object.h"
Ian Rogers0cfe1fb2011-08-26 03:29:44 -070025#include "offsets.h"
Carl Shapiro69759ea2011-07-21 18:13:35 -070026
27namespace art {
28
Mathieu Chartier858f1c52012-10-17 17:45:55 -070029class Barrier;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -070030class CheckObjectVisitor;
Carl Shapiro69759ea2011-07-21 18:13:35 -070031class Class;
Elliott Hughesb3bd5f02012-03-08 21:05:27 -080032class Heap;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -070033class MarkIfReachesAllocspaceVisitor;
34class ModUnionClearCardVisitor;
35class ModUnionVisitor;
36class ModUnionTableBitmap;
Carl Shapiro69759ea2011-07-21 18:13:35 -070037class Object;
Mathieu Chartier357e9be2012-08-01 11:00:14 -070038class TimingLogger;
Mathieu Chartier02b6a782012-10-26 13:51:26 -070039class MarkStackChunk;
Carl Shapiro69759ea2011-07-21 18:13:35 -070040
Mathieu Chartier2b82db42012-11-14 17:29:05 -080041class MarkSweep : public GarbageCollector {
Carl Shapiro69759ea2011-07-21 18:13:35 -070042 public:
Mathieu Chartier2b82db42012-11-14 17:29:05 -080043 explicit MarkSweep(Heap* heap, bool is_concurrent);
Carl Shapiro58551df2011-07-24 03:09:51 -070044
Carl Shapiro69759ea2011-07-21 18:13:35 -070045 ~MarkSweep();
46
Mathieu Chartier2b82db42012-11-14 17:29:05 -080047 virtual std::string GetName() const;
48 virtual void InitializePhase();
49 virtual bool IsConcurrent() const;
50 virtual bool HandleDirtyObjectsPhase() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_);
51 virtual void MarkingPhase() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
52 virtual void ReclaimPhase() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
53 virtual void FinishPhase();
54 virtual void MarkReachableObjects()
55 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
56 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
57 virtual GcType GetGcType() const {
58 return kGcTypeFull;
59 }
60
Carl Shapiro58551df2011-07-24 03:09:51 -070061 // Initializes internal structures.
Jesse Wilson078f9b02011-11-18 17:51:47 -050062 void Init();
Carl Shapiro58551df2011-07-24 03:09:51 -070063
Mathieu Chartier7469ebf2012-09-24 16:28:36 -070064 // Find the default mark bitmap.
65 void FindDefaultMarkBitmap();
66
Carl Shapiro69759ea2011-07-21 18:13:35 -070067 // Marks the root set at the start of a garbage collection.
Ian Rogers00f7d0e2012-07-19 15:28:27 -070068 void MarkRoots()
Ian Rogersb726dcb2012-09-05 08:57:23 -070069 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
70 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -070071
Mathieu Chartier858f1c52012-10-17 17:45:55 -070072 void MarkNonThreadRoots()
73 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
74
Mathieu Chartier9ebae1f2012-10-15 17:38:16 -070075 void MarkConcurrentRoots();
76 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
77
Mathieu Chartier858f1c52012-10-17 17:45:55 -070078 void MarkRootsCheckpoint();
Mathieu Chartier2b82db42012-11-14 17:29:05 -080079 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier858f1c52012-10-17 17:45:55 -070080
Mathieu Chartier262e5ff2012-06-01 17:35:38 -070081 // Verify that image roots point to only marked objects within the alloc space.
Ian Rogersb726dcb2012-09-05 08:57:23 -070082 void VerifyImageRoots() EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier262e5ff2012-06-01 17:35:38 -070083
Carl Shapiro58551df2011-07-24 03:09:51 -070084 // Builds a mark stack and recursively mark until it empties.
Mathieu Chartier2b82db42012-11-14 17:29:05 -080085 void RecursiveMark()
Ian Rogersb726dcb2012-09-05 08:57:23 -070086 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
87 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartiercc236d72012-07-20 10:29:05 -070088
Mathieu Chartier2b82db42012-11-14 17:29:05 -080089 // Make a space immune, immune spaces are assumed to have all live objects marked.
90 void ImmuneSpace(ContinuousSpace* space)
Mathieu Chartier7469ebf2012-09-24 16:28:36 -070091 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier2b82db42012-11-14 17:29:05 -080092 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);;
93
94 // Bind the live bits to the mark bits of bitmaps based on the gc type.
95 virtual void BindBitmaps()
96 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier7469ebf2012-09-24 16:28:36 -070097
98 void BindLiveToMarkBitmap(ContinuousSpace* space)
99 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
100
101 void UnBindBitmaps()
102 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro58551df2011-07-24 03:09:51 -0700103
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700104 // Builds a mark stack with objects on dirty cards and recursively mark until it empties.
Mathieu Chartierd22d5482012-11-06 17:14:12 -0800105 void RecursiveMarkDirtyObjects(byte minimum_age = CardTable::kCardDirty)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700106 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
107 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700108
Carl Shapiro69759ea2011-07-21 18:13:35 -0700109 // Remarks the root set after completing the concurrent mark.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700110 void ReMarkRoots()
Ian Rogersb726dcb2012-09-05 08:57:23 -0700111 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
112 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700113
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800114 void ProcessReferences(Thread* self)
115 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro58551df2011-07-24 03:09:51 -0700116
Carl Shapiro69759ea2011-07-21 18:13:35 -0700117 // Sweeps unmarked objects to complete the garbage collection.
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800118 virtual void Sweep(TimingLogger& timings, bool swap_bitmaps)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700119 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700120
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700121 // Sweeps unmarked objects to complete the garbage collection.
122 void SweepLargeObjects(bool swap_bitmaps)
123 EXCLUSIVE_LOCKS_REQUIRED(GlobalSynchronization::heap_bitmap_lock_);
124
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700125 // Sweep only pointers within an array. WARNING: Trashes objects.
Mathieu Chartierd8195f12012-10-05 12:21:28 -0700126 void SweepArray(TimingLogger& logger, ObjectStack* allocation_stack_, bool swap_bitmaps)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700127 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700128
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800129 // Swap bitmaps (if we are a full Gc then we swap the zygote bitmap too).
130 virtual void SwapBitmaps() EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
131 void SwapLargeObjects() EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
132
Elliott Hughesadb460d2011-10-05 17:02:34 -0700133 Object* GetClearedReferences() {
134 return cleared_reference_list_;
135 }
136
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700137 // Proxy for external access to ScanObject.
138 void ScanRoot(const Object* obj)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700139 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
140 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700141
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700142 // Blackens an object.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700143 void ScanObject(const Object* obj)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700144 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
145 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700146
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700147 template <typename MarkVisitor>
148 void ScanObjectVisit(const Object* obj, const MarkVisitor& visitor)
149 NO_THREAD_SAFETY_ANALYSIS {
150 DCHECK(obj != NULL);
151 if (kIsDebugBuild && !IsMarked(obj)) {
152 heap_->DumpSpaces();
153 LOG(FATAL) << "Scanning unmarked object " << obj;
154 }
155 Class* klass = obj->GetClass();
156 DCHECK(klass != NULL);
157 if (klass == java_lang_Class_) {
158 DCHECK_EQ(klass->GetClass(), java_lang_Class_);
159 if (kCountScannedTypes) {
160 ++class_count_;
161 }
162 VisitClassReferences(klass, obj, visitor);
163 } else if (klass->IsArrayClass()) {
164 if (kCountScannedTypes) {
165 ++array_count_;
166 }
167 visitor(obj, klass, Object::ClassOffset(), false);
168 if (klass->IsObjectArrayClass()) {
169 VisitObjectArrayReferences(obj->AsObjectArray<Object>(), visitor);
170 }
171 } else {
172 if (kCountScannedTypes) {
173 ++other_count_;
174 }
175 VisitOtherReferences(klass, obj, visitor);
Mathieu Chartierd22d5482012-11-06 17:14:12 -0800176 if (UNLIKELY(klass->IsReferenceClass())) {
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700177 DelayReferenceReferent(const_cast<Object*>(obj));
178 }
179 }
180 }
181
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700182 void SetFinger(Object* new_finger) {
183 finger_ = new_finger;
184 }
185
186 void DisableFinger() {
187 SetFinger(reinterpret_cast<Object*>(~static_cast<uintptr_t>(0)));
188 }
189
190 size_t GetFreedBytes() const {
191 return freed_bytes_;
192 }
193
194 size_t GetFreedObjects() const {
195 return freed_objects_;
196 }
197
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800198 uint64_t GetTotalTime() const {
199 return total_time_;
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700200 }
201
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800202 uint64_t GetTotalPausedTime() const {
203 return total_paused_time_;
204 }
205
206 uint64_t GetTotalFreedObjects() const {
207 return total_freed_objects_;
208 }
209
210 uint64_t GetTotalFreedBytes() const {
211 return total_freed_bytes_;
212 }
213
214 // Everything inside the immune range is assumed to be marked.
215 void SetImmuneRange(Object* begin, Object* end);
216
Mathieu Chartier7469ebf2012-09-24 16:28:36 -0700217 void SweepSystemWeaks()
218 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
219
220 // Only sweep the weaks which are inside of an allocation stack.
221 void SweepSystemWeaksArray(ObjectStack* allocations)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700222 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier357e9be2012-08-01 11:00:14 -0700223
Mathieu Chartierc7b83a02012-09-11 18:07:39 -0700224 static bool VerifyIsLiveCallback(const Object* obj, void* arg)
225 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
226
227 void VerifySystemWeaks()
228 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
229
230 // Verify that an object is live, either in a live bitmap or in the allocation stack.
231 void VerifyIsLive(const Object* obj)
232 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
233
Mathieu Chartierfd678be2012-08-30 14:50:54 -0700234 template <typename Visitor>
235 static void VisitObjectReferences(const Object* obj, const Visitor& visitor)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700236 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
237 Locks::mutator_lock_) {
Mathieu Chartierfd678be2012-08-30 14:50:54 -0700238 DCHECK(obj != NULL);
239 DCHECK(obj->GetClass() != NULL);
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700240
241 Class* klass = obj->GetClass();
242 DCHECK(klass != NULL);
243 if (klass == Class::GetJavaLangClass()) {
244 DCHECK_EQ(klass->GetClass(), Class::GetJavaLangClass());
245 VisitClassReferences(klass, obj, visitor);
Mathieu Chartierfd678be2012-08-30 14:50:54 -0700246 } else {
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700247 if (klass->IsArrayClass()) {
248 visitor(obj, klass, Object::ClassOffset(), false);
249 if (klass->IsObjectArrayClass()) {
250 VisitObjectArrayReferences(obj->AsObjectArray<Object>(), visitor);
251 }
252 } else {
253 VisitOtherReferences(klass, obj, visitor);
254 }
Mathieu Chartierfd678be2012-08-30 14:50:54 -0700255 }
256 }
257
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700258 static void MarkObjectCallback(const Object* root, void* arg)
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800259 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700260 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
261
Mathieu Chartierac86a7c2012-11-12 15:03:16 -0800262 static void MarkRootParallelCallback(const Object* root, void* arg);
263
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700264 // Marks an object.
265 void MarkObject(const Object* obj)
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800266 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
267 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
268
269 void MarkRoot(const Object* obj)
270 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
Mathieu Chartier858f1c52012-10-17 17:45:55 -0700271 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
272
273 Barrier& GetBarrier();
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800274 const TimingLogger& GetTimings() const;
275 const CumulativeLogger& GetCumulativeTimings() const;
276 void ResetCumulativeStatistics();
Mathieu Chartier858f1c52012-10-17 17:45:55 -0700277
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800278 protected:
Carl Shapiro69759ea2011-07-21 18:13:35 -0700279 // Returns true if the object has its bit set in the mark bitmap.
Mathieu Chartier7469ebf2012-09-24 16:28:36 -0700280 bool IsMarked(const Object* object) const;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700281
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700282 static bool IsMarkedCallback(const Object* object, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700283 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Elliott Hughesc33a32b2011-10-11 18:18:07 -0700284
Mathieu Chartier7469ebf2012-09-24 16:28:36 -0700285 static bool IsMarkedArrayCallback(const Object* object, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700286 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier46a23632012-08-07 18:44:40 -0700287
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700288 static void ReMarkObjectVisitor(const Object* root, void* arg)
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800289 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700290 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Mathieu Chartier262e5ff2012-06-01 17:35:38 -0700291
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700292 static void VerifyImageRootVisitor(Object* root, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700293 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_,
294 Locks::mutator_lock_);
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700295
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700296 void MarkObjectNonNull(const Object* obj, bool check_finger)
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800297 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700298 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700299
Mathieu Chartierac86a7c2012-11-12 15:03:16 -0800300 void MarkObjectNonNullParallel(const Object* obj, bool check_finger);
301
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700302 bool MarkLargeObject(const Object* obj)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700303 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700304
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700305 // Returns true if we need to add obj to a mark stack.
306 bool MarkObjectParallel(const Object* obj) NO_THREAD_SAFETY_ANALYSIS;
307
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700308 static void SweepCallback(size_t num_ptrs, Object** ptrs, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700309 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro58551df2011-07-24 03:09:51 -0700310
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700311 // Special sweep for zygote that just marks objects / dirties cards.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700312 static void ZygoteSweepCallback(size_t num_ptrs, Object** ptrs, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700313 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700314
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700315 void CheckReference(const Object* obj, const Object* ref, MemberOffset offset, bool is_static)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700316 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700317
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700318 void CheckObject(const Object* obj)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700319 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Ian Rogers5d76c432011-10-31 21:42:49 -0700320
Mathieu Chartier6f1c9492012-10-15 12:08:41 -0700321 // Verify the roots of the heap and print out information related to any invalid roots.
322 // Called in MarkObject, so may we may not hold the mutator lock.
323 void VerifyRoots()
324 NO_THREAD_SAFETY_ANALYSIS;
325
Mathieu Chartierac86a7c2012-11-12 15:03:16 -0800326 // Expand mark stack to 2x its current size. Thread safe.
327 void ExpandMarkStack();
328
Mathieu Chartier6f1c9492012-10-15 12:08:41 -0700329 static void VerifyRootCallback(const Object* root, void* arg, size_t vreg,
Ian Rogers40e3bac2012-11-20 00:09:14 -0800330 const StackVisitor *visitor);
Mathieu Chartier6f1c9492012-10-15 12:08:41 -0700331
Ian Rogers40e3bac2012-11-20 00:09:14 -0800332 void VerifyRoot(const Object* root, size_t vreg, const StackVisitor* visitor)
Mathieu Chartier6f1c9492012-10-15 12:08:41 -0700333 NO_THREAD_SAFETY_ANALYSIS;
334
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700335 template <typename Visitor>
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700336 static void VisitInstanceFieldsReferences(const Class* klass, const Object* obj,
337 const Visitor& visitor)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700338 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700339 DCHECK(obj != NULL);
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700340 DCHECK(klass != NULL);
341 VisitFieldsReferences(obj, klass->GetReferenceInstanceOffsets(), false, visitor);
342 }
Ian Rogers5d76c432011-10-31 21:42:49 -0700343
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700344 // Visit the header, static field references, and interface pointers of a class object.
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700345 template <typename Visitor>
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700346 static void VisitClassReferences(const Class* klass, const Object* obj,
347 const Visitor& visitor)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700348 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700349 VisitInstanceFieldsReferences(klass, obj, visitor);
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700350 VisitStaticFieldsReferences(obj->AsClass(), visitor);
351 }
Ian Rogers5d76c432011-10-31 21:42:49 -0700352
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700353 template <typename Visitor>
Mathieu Chartierfd678be2012-08-30 14:50:54 -0700354 static void VisitStaticFieldsReferences(const Class* klass, const Visitor& visitor)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700355 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700356 DCHECK(klass != NULL);
357 VisitFieldsReferences(klass, klass->GetReferenceStaticOffsets(), true, visitor);
358 }
Ian Rogers5d76c432011-10-31 21:42:49 -0700359
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700360 template <typename Visitor>
Mathieu Chartierfd678be2012-08-30 14:50:54 -0700361 static void VisitFieldsReferences(const Object* obj, uint32_t ref_offsets, bool is_static,
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700362 const Visitor& visitor)
Mathieu Chartierd22d5482012-11-06 17:14:12 -0800363 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
364 if (LIKELY(ref_offsets != CLASS_WALK_SUPER)) {
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700365 // Found a reference offset bitmap. Mark the specified offsets.
366 while (ref_offsets != 0) {
367 size_t right_shift = CLZ(ref_offsets);
368 MemberOffset field_offset = CLASS_OFFSET_FROM_CLZ(right_shift);
369 const Object* ref = obj->GetFieldObject<const Object*>(field_offset, false);
370 visitor(obj, ref, field_offset, is_static);
371 ref_offsets &= ~(CLASS_HIGH_BIT >> right_shift);
372 }
373 } else {
374 // There is no reference offset bitmap. In the non-static case,
375 // walk up the class inheritance hierarchy and find reference
376 // offsets the hard way. In the static case, just consider this
377 // class.
378 for (const Class* klass = is_static ? obj->AsClass() : obj->GetClass();
379 klass != NULL;
380 klass = is_static ? NULL : klass->GetSuperClass()) {
381 size_t num_reference_fields = (is_static
382 ? klass->NumReferenceStaticFields()
383 : klass->NumReferenceInstanceFields());
384 for (size_t i = 0; i < num_reference_fields; ++i) {
385 Field* field = (is_static
386 ? klass->GetStaticField(i)
387 : klass->GetInstanceField(i));
388 MemberOffset field_offset = field->GetOffset();
389 const Object* ref = obj->GetFieldObject<const Object*>(field_offset, false);
390 visitor(obj, ref, field_offset, is_static);
391 }
392 }
393 }
394 }
Ian Rogers5d76c432011-10-31 21:42:49 -0700395
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700396 // Visit all of the references in an object array.
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700397 template <typename Visitor>
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700398 static void VisitObjectArrayReferences(const ObjectArray<Object>* array,
399 const Visitor& visitor)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700400 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700401 const int32_t length = array->GetLength();
402 for (int32_t i = 0; i < length; ++i) {
403 const Object* element = array->GetWithoutChecks(i);
404 const size_t width = sizeof(Object*);
405 MemberOffset offset = MemberOffset(i * width + Array::DataOffset(width).Int32Value());
406 visitor(array, element, offset, false);
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700407 }
408 }
Ian Rogers5d76c432011-10-31 21:42:49 -0700409
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700410 // Visits the header and field references of a data object.
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700411 template <typename Visitor>
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700412 static void VisitOtherReferences(const Class* klass, const Object* obj,
413 const Visitor& visitor)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700414 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) {
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700415 return VisitInstanceFieldsReferences(klass, obj, visitor);
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700416 }
Ian Rogers5d76c432011-10-31 21:42:49 -0700417
Carl Shapiro69759ea2011-07-21 18:13:35 -0700418 // Blackens objects grayed during a garbage collection.
Mathieu Chartierd22d5482012-11-06 17:14:12 -0800419 void ScanGrayObjects(byte minimum_age)
420 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
421 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700422
423 // Schedules an unmarked object for reference processing.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700424 void DelayReferenceReferent(Object* reference)
Ian Rogers23435d02012-09-24 11:23:12 -0700425 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700426
427 // Recursively blackens objects on the mark stack.
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700428 void ProcessMarkStack()
Ian Rogersb726dcb2012-09-05 08:57:23 -0700429 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
430 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700431
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700432 void ProcessMarkStackParallel()
433 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
434 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
435
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700436 void EnqueueFinalizerReferences(Object** ref)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700437 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
438 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700439
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700440 void PreserveSomeSoftReferences(Object** ref)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700441 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
442 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700443
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700444 void ClearWhiteReferences(Object** list)
Ian Rogers23435d02012-09-24 11:23:12 -0700445 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700446
Carl Shapiro58551df2011-07-24 03:09:51 -0700447 void ProcessReferences(Object** soft_references, bool clear_soft_references,
Carl Shapiro69759ea2011-07-21 18:13:35 -0700448 Object** weak_references,
449 Object** finalizer_references,
Ian Rogers00f7d0e2012-07-19 15:28:27 -0700450 Object** phantom_references)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700451 EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_)
452 SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);
Carl Shapiro69759ea2011-07-21 18:13:35 -0700453
Mathieu Chartier7469ebf2012-09-24 16:28:36 -0700454 void SweepJniWeakGlobals(Heap::IsMarkedTester is_marked, void* arg)
Ian Rogersb726dcb2012-09-05 08:57:23 -0700455 SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_);
Carl Shapiro58551df2011-07-24 03:09:51 -0700456
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700457 // Whether or not we count how many of each type of object were scanned.
458 static const bool kCountScannedTypes = false;
459
Mathieu Chartierb062fdd2012-07-03 09:51:48 -0700460 // Current space, we check this space first to avoid searching for the appropriate space for an object.
461 SpaceBitmap* current_mark_bitmap_;
462
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700463 // Cache java.lang.Class for optimization.
464 Class* java_lang_Class_;
465
Mathieu Chartierd8195f12012-10-05 12:21:28 -0700466 ObjectStack* mark_stack_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700467
Carl Shapiro69759ea2011-07-21 18:13:35 -0700468 Object* finger_;
469
Mathieu Chartiere0f0cb32012-08-28 11:26:00 -0700470 // Immune range, every object inside the immune range is assumed to be marked.
471 Object* immune_begin_;
472 Object* immune_end_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700473
474 Object* soft_reference_list_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700475 Object* weak_reference_list_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700476 Object* finalizer_reference_list_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700477 Object* phantom_reference_list_;
Carl Shapiro69759ea2011-07-21 18:13:35 -0700478 Object* cleared_reference_list_;
479
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700480 AtomicInteger freed_bytes_;
481 AtomicInteger freed_objects_;
482 AtomicInteger class_count_;
483 AtomicInteger array_count_;
484 AtomicInteger other_count_;
485 AtomicInteger large_object_test_;
486 AtomicInteger large_object_mark_;
487 AtomicInteger classes_marked_;
488 AtomicInteger overhead_time_;
489 AtomicInteger work_chunks_created_;
490 AtomicInteger work_chunks_deleted_;
Mathieu Chartierd22d5482012-11-06 17:14:12 -0800491 AtomicInteger reference_count_;
Elliott Hughes352a4242011-10-31 15:15:21 -0700492
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800493 // Cumulative statistics.
494 uint64_t total_time_;
495 uint64_t total_paused_time_;
496 uint64_t total_freed_objects_;
497 uint64_t total_freed_bytes_;
498
Mathieu Chartier858f1c52012-10-17 17:45:55 -0700499 UniquePtr<Barrier> gc_barrier_;
Mathieu Chartierac86a7c2012-11-12 15:03:16 -0800500 Mutex large_object_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
501 Mutex mark_stack_expand_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER;
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800502 TimingLogger timings_;
503 CumulativeLogger cumulative_timings_;
504
505 bool is_concurrent_;
506 bool clear_soft_references_;
Mathieu Chartier858f1c52012-10-17 17:45:55 -0700507
Mathieu Chartiere6e06512012-06-26 15:00:26 -0700508 friend class AddIfReachesAllocSpaceVisitor; // Used by mod-union table.
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700509 friend class CheckBitmapVisitor;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700510 friend class CheckObjectVisitor;
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700511 friend class CheckReferenceVisitor;
Mathieu Chartier2b82db42012-11-14 17:29:05 -0800512 friend class Heap;
Elliott Hughes410c0c82011-09-01 17:58:25 -0700513 friend class InternTableEntryIsUnmarked;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700514 friend class MarkIfReachesAllocspaceVisitor;
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700515 friend class ModUnionCheckReferences;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700516 friend class ModUnionClearCardVisitor;
Mathieu Chartiere6e06512012-06-26 15:00:26 -0700517 friend class ModUnionReferenceVisitor;
Mathieu Chartierb43b7d42012-06-19 13:15:09 -0700518 friend class ModUnionVisitor;
519 friend class ModUnionTableBitmap;
Mathieu Chartiere6e06512012-06-26 15:00:26 -0700520 friend class ModUnionTableReferenceCache;
Mathieu Chartiercc236d72012-07-20 10:29:05 -0700521 friend class ModUnionScanImageRootVisitor;
522 friend class ScanBitmapVisitor;
523 friend class ScanImageRootVisitor;
Mathieu Chartier02b6a782012-10-26 13:51:26 -0700524 friend class MarkStackChunk;
525 friend class FifoMarkStackChunk;
Elliott Hughes410c0c82011-09-01 17:58:25 -0700526
Carl Shapiro69759ea2011-07-21 18:13:35 -0700527 DISALLOW_COPY_AND_ASSIGN(MarkSweep);
528};
529
530} // namespace art
531
532#endif // ART_SRC_MARK_SWEEP_H_