Elliott Hughes | 2faa5f1 | 2012-01-30 14:42:07 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2011 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 16 | |
| 17 | #ifndef ART_SRC_MARK_SWEEP_H_ |
| 18 | #define ART_SRC_MARK_SWEEP_H_ |
| 19 | |
Mathieu Chartier | d8195f1 | 2012-10-05 12:21:28 -0700 | [diff] [blame] | 20 | #include "atomic_stack.h" |
Elliott Hughes | 7616005 | 2012-12-12 16:31:20 -0800 | [diff] [blame^] | 21 | #include "base/macros.h" |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 22 | #include "garbage_collector.h" |
Elliott Hughes | 5e71b52 | 2011-10-20 13:12:32 -0700 | [diff] [blame] | 23 | #include "heap_bitmap.h" |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 24 | #include "object.h" |
Ian Rogers | 0cfe1fb | 2011-08-26 03:29:44 -0700 | [diff] [blame] | 25 | #include "offsets.h" |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 26 | |
| 27 | namespace art { |
| 28 | |
Mathieu Chartier | 858f1c5 | 2012-10-17 17:45:55 -0700 | [diff] [blame] | 29 | class Barrier; |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 30 | class CheckObjectVisitor; |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 31 | class Class; |
Elliott Hughes | b3bd5f0 | 2012-03-08 21:05:27 -0800 | [diff] [blame] | 32 | class Heap; |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 33 | class MarkIfReachesAllocspaceVisitor; |
| 34 | class ModUnionClearCardVisitor; |
| 35 | class ModUnionVisitor; |
| 36 | class ModUnionTableBitmap; |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 37 | class Object; |
Mathieu Chartier | 357e9be | 2012-08-01 11:00:14 -0700 | [diff] [blame] | 38 | class TimingLogger; |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 39 | class MarkStackChunk; |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 40 | |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 41 | class MarkSweep : public GarbageCollector { |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 42 | public: |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 43 | explicit MarkSweep(Heap* heap, bool is_concurrent); |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 44 | |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 45 | ~MarkSweep(); |
| 46 | |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 47 | virtual std::string GetName() const; |
| 48 | virtual void InitializePhase(); |
| 49 | virtual bool IsConcurrent() const; |
| 50 | virtual bool HandleDirtyObjectsPhase() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_); |
| 51 | virtual void MarkingPhase() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
| 52 | virtual void ReclaimPhase() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
| 53 | virtual void FinishPhase(); |
| 54 | virtual void MarkReachableObjects() |
| 55 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) |
| 56 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
| 57 | virtual GcType GetGcType() const { |
| 58 | return kGcTypeFull; |
| 59 | } |
| 60 | |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 61 | // Initializes internal structures. |
Jesse Wilson | 078f9b0 | 2011-11-18 17:51:47 -0500 | [diff] [blame] | 62 | void Init(); |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 63 | |
Mathieu Chartier | 7469ebf | 2012-09-24 16:28:36 -0700 | [diff] [blame] | 64 | // Find the default mark bitmap. |
| 65 | void FindDefaultMarkBitmap(); |
| 66 | |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 67 | // Marks the root set at the start of a garbage collection. |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame] | 68 | void MarkRoots() |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 69 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) |
| 70 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 71 | |
Mathieu Chartier | 858f1c5 | 2012-10-17 17:45:55 -0700 | [diff] [blame] | 72 | void MarkNonThreadRoots() |
| 73 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
| 74 | |
Mathieu Chartier | 9ebae1f | 2012-10-15 17:38:16 -0700 | [diff] [blame] | 75 | void MarkConcurrentRoots(); |
| 76 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
| 77 | |
Mathieu Chartier | 858f1c5 | 2012-10-17 17:45:55 -0700 | [diff] [blame] | 78 | void MarkRootsCheckpoint(); |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 79 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
Mathieu Chartier | 858f1c5 | 2012-10-17 17:45:55 -0700 | [diff] [blame] | 80 | |
Mathieu Chartier | 262e5ff | 2012-06-01 17:35:38 -0700 | [diff] [blame] | 81 | // Verify that image roots point to only marked objects within the alloc space. |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 82 | void VerifyImageRoots() EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
Mathieu Chartier | 262e5ff | 2012-06-01 17:35:38 -0700 | [diff] [blame] | 83 | |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 84 | // Builds a mark stack and recursively mark until it empties. |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 85 | void RecursiveMark() |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 86 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) |
| 87 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 88 | |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 89 | // Make a space immune, immune spaces are assumed to have all live objects marked. |
| 90 | void ImmuneSpace(ContinuousSpace* space) |
Mathieu Chartier | 7469ebf | 2012-09-24 16:28:36 -0700 | [diff] [blame] | 91 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 92 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_);; |
| 93 | |
| 94 | // Bind the live bits to the mark bits of bitmaps based on the gc type. |
| 95 | virtual void BindBitmaps() |
| 96 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Mathieu Chartier | 7469ebf | 2012-09-24 16:28:36 -0700 | [diff] [blame] | 97 | |
| 98 | void BindLiveToMarkBitmap(ContinuousSpace* space) |
| 99 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
| 100 | |
| 101 | void UnBindBitmaps() |
| 102 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 103 | |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 104 | // Builds a mark stack with objects on dirty cards and recursively mark until it empties. |
Mathieu Chartier | d22d548 | 2012-11-06 17:14:12 -0800 | [diff] [blame] | 105 | void RecursiveMarkDirtyObjects(byte minimum_age = CardTable::kCardDirty) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 106 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) |
| 107 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Mathieu Chartier | 262e5ff | 2012-06-01 17:35:38 -0700 | [diff] [blame] | 108 | |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 109 | // Remarks the root set after completing the concurrent mark. |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame] | 110 | void ReMarkRoots() |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 111 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) |
| 112 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 113 | |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 114 | void ProcessReferences(Thread* self) |
| 115 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 116 | |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 117 | // Sweeps unmarked objects to complete the garbage collection. |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 118 | virtual void Sweep(TimingLogger& timings, bool swap_bitmaps) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 119 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
Mathieu Chartier | 357e9be | 2012-08-01 11:00:14 -0700 | [diff] [blame] | 120 | |
Mathieu Chartier | e0f0cb3 | 2012-08-28 11:26:00 -0700 | [diff] [blame] | 121 | // Sweeps unmarked objects to complete the garbage collection. |
| 122 | void SweepLargeObjects(bool swap_bitmaps) |
| 123 | EXCLUSIVE_LOCKS_REQUIRED(GlobalSynchronization::heap_bitmap_lock_); |
| 124 | |
Mathieu Chartier | 357e9be | 2012-08-01 11:00:14 -0700 | [diff] [blame] | 125 | // Sweep only pointers within an array. WARNING: Trashes objects. |
Mathieu Chartier | d8195f1 | 2012-10-05 12:21:28 -0700 | [diff] [blame] | 126 | void SweepArray(TimingLogger& logger, ObjectStack* allocation_stack_, bool swap_bitmaps) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 127 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 128 | |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 129 | // Swap bitmaps (if we are a full Gc then we swap the zygote bitmap too). |
| 130 | virtual void SwapBitmaps() EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
| 131 | void SwapLargeObjects() EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
| 132 | |
Elliott Hughes | adb460d | 2011-10-05 17:02:34 -0700 | [diff] [blame] | 133 | Object* GetClearedReferences() { |
| 134 | return cleared_reference_list_; |
| 135 | } |
| 136 | |
Mathieu Chartier | 357e9be | 2012-08-01 11:00:14 -0700 | [diff] [blame] | 137 | // Proxy for external access to ScanObject. |
| 138 | void ScanRoot(const Object* obj) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 139 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) |
| 140 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Mathieu Chartier | 357e9be | 2012-08-01 11:00:14 -0700 | [diff] [blame] | 141 | |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 142 | // Blackens an object. |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame] | 143 | void ScanObject(const Object* obj) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 144 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) |
| 145 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 146 | |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 147 | template <typename MarkVisitor> |
| 148 | void ScanObjectVisit(const Object* obj, const MarkVisitor& visitor) |
| 149 | NO_THREAD_SAFETY_ANALYSIS { |
| 150 | DCHECK(obj != NULL); |
| 151 | if (kIsDebugBuild && !IsMarked(obj)) { |
| 152 | heap_->DumpSpaces(); |
| 153 | LOG(FATAL) << "Scanning unmarked object " << obj; |
| 154 | } |
| 155 | Class* klass = obj->GetClass(); |
| 156 | DCHECK(klass != NULL); |
| 157 | if (klass == java_lang_Class_) { |
| 158 | DCHECK_EQ(klass->GetClass(), java_lang_Class_); |
| 159 | if (kCountScannedTypes) { |
| 160 | ++class_count_; |
| 161 | } |
| 162 | VisitClassReferences(klass, obj, visitor); |
| 163 | } else if (klass->IsArrayClass()) { |
| 164 | if (kCountScannedTypes) { |
| 165 | ++array_count_; |
| 166 | } |
| 167 | visitor(obj, klass, Object::ClassOffset(), false); |
| 168 | if (klass->IsObjectArrayClass()) { |
| 169 | VisitObjectArrayReferences(obj->AsObjectArray<Object>(), visitor); |
| 170 | } |
| 171 | } else { |
| 172 | if (kCountScannedTypes) { |
| 173 | ++other_count_; |
| 174 | } |
| 175 | VisitOtherReferences(klass, obj, visitor); |
Mathieu Chartier | d22d548 | 2012-11-06 17:14:12 -0800 | [diff] [blame] | 176 | if (UNLIKELY(klass->IsReferenceClass())) { |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 177 | DelayReferenceReferent(const_cast<Object*>(obj)); |
| 178 | } |
| 179 | } |
| 180 | } |
| 181 | |
Mathieu Chartier | 357e9be | 2012-08-01 11:00:14 -0700 | [diff] [blame] | 182 | void SetFinger(Object* new_finger) { |
| 183 | finger_ = new_finger; |
| 184 | } |
| 185 | |
| 186 | void DisableFinger() { |
| 187 | SetFinger(reinterpret_cast<Object*>(~static_cast<uintptr_t>(0))); |
| 188 | } |
| 189 | |
| 190 | size_t GetFreedBytes() const { |
| 191 | return freed_bytes_; |
| 192 | } |
| 193 | |
| 194 | size_t GetFreedObjects() const { |
| 195 | return freed_objects_; |
| 196 | } |
| 197 | |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 198 | uint64_t GetTotalTime() const { |
| 199 | return total_time_; |
Mathieu Chartier | 357e9be | 2012-08-01 11:00:14 -0700 | [diff] [blame] | 200 | } |
| 201 | |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 202 | uint64_t GetTotalPausedTime() const { |
| 203 | return total_paused_time_; |
| 204 | } |
| 205 | |
| 206 | uint64_t GetTotalFreedObjects() const { |
| 207 | return total_freed_objects_; |
| 208 | } |
| 209 | |
| 210 | uint64_t GetTotalFreedBytes() const { |
| 211 | return total_freed_bytes_; |
| 212 | } |
| 213 | |
| 214 | // Everything inside the immune range is assumed to be marked. |
| 215 | void SetImmuneRange(Object* begin, Object* end); |
| 216 | |
Mathieu Chartier | 7469ebf | 2012-09-24 16:28:36 -0700 | [diff] [blame] | 217 | void SweepSystemWeaks() |
| 218 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
| 219 | |
| 220 | // Only sweep the weaks which are inside of an allocation stack. |
| 221 | void SweepSystemWeaksArray(ObjectStack* allocations) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 222 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
Mathieu Chartier | 357e9be | 2012-08-01 11:00:14 -0700 | [diff] [blame] | 223 | |
Mathieu Chartier | c7b83a0 | 2012-09-11 18:07:39 -0700 | [diff] [blame] | 224 | static bool VerifyIsLiveCallback(const Object* obj, void* arg) |
| 225 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
| 226 | |
| 227 | void VerifySystemWeaks() |
| 228 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
| 229 | |
| 230 | // Verify that an object is live, either in a live bitmap or in the allocation stack. |
| 231 | void VerifyIsLive(const Object* obj) |
| 232 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
| 233 | |
Mathieu Chartier | fd678be | 2012-08-30 14:50:54 -0700 | [diff] [blame] | 234 | template <typename Visitor> |
| 235 | static void VisitObjectReferences(const Object* obj, const Visitor& visitor) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 236 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, |
| 237 | Locks::mutator_lock_) { |
Mathieu Chartier | fd678be | 2012-08-30 14:50:54 -0700 | [diff] [blame] | 238 | DCHECK(obj != NULL); |
| 239 | DCHECK(obj->GetClass() != NULL); |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 240 | |
| 241 | Class* klass = obj->GetClass(); |
| 242 | DCHECK(klass != NULL); |
| 243 | if (klass == Class::GetJavaLangClass()) { |
| 244 | DCHECK_EQ(klass->GetClass(), Class::GetJavaLangClass()); |
| 245 | VisitClassReferences(klass, obj, visitor); |
Mathieu Chartier | fd678be | 2012-08-30 14:50:54 -0700 | [diff] [blame] | 246 | } else { |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 247 | if (klass->IsArrayClass()) { |
| 248 | visitor(obj, klass, Object::ClassOffset(), false); |
| 249 | if (klass->IsObjectArrayClass()) { |
| 250 | VisitObjectArrayReferences(obj->AsObjectArray<Object>(), visitor); |
| 251 | } |
| 252 | } else { |
| 253 | VisitOtherReferences(klass, obj, visitor); |
| 254 | } |
Mathieu Chartier | fd678be | 2012-08-30 14:50:54 -0700 | [diff] [blame] | 255 | } |
| 256 | } |
| 257 | |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 258 | static void MarkObjectCallback(const Object* root, void* arg) |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 259 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 260 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
| 261 | |
Mathieu Chartier | ac86a7c | 2012-11-12 15:03:16 -0800 | [diff] [blame] | 262 | static void MarkRootParallelCallback(const Object* root, void* arg); |
| 263 | |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 264 | // Marks an object. |
| 265 | void MarkObject(const Object* obj) |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 266 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) |
| 267 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
| 268 | |
| 269 | void MarkRoot(const Object* obj) |
| 270 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) |
Mathieu Chartier | 858f1c5 | 2012-10-17 17:45:55 -0700 | [diff] [blame] | 271 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
| 272 | |
| 273 | Barrier& GetBarrier(); |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 274 | const TimingLogger& GetTimings() const; |
| 275 | const CumulativeLogger& GetCumulativeTimings() const; |
| 276 | void ResetCumulativeStatistics(); |
Mathieu Chartier | 858f1c5 | 2012-10-17 17:45:55 -0700 | [diff] [blame] | 277 | |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 278 | protected: |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 279 | // Returns true if the object has its bit set in the mark bitmap. |
Mathieu Chartier | 7469ebf | 2012-09-24 16:28:36 -0700 | [diff] [blame] | 280 | bool IsMarked(const Object* object) const; |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 281 | |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame] | 282 | static bool IsMarkedCallback(const Object* object, void* arg) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 283 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
Elliott Hughes | c33a32b | 2011-10-11 18:18:07 -0700 | [diff] [blame] | 284 | |
Mathieu Chartier | 7469ebf | 2012-09-24 16:28:36 -0700 | [diff] [blame] | 285 | static bool IsMarkedArrayCallback(const Object* object, void* arg) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 286 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
Mathieu Chartier | 46a2363 | 2012-08-07 18:44:40 -0700 | [diff] [blame] | 287 | |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame] | 288 | static void ReMarkObjectVisitor(const Object* root, void* arg) |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 289 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 290 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
Mathieu Chartier | 262e5ff | 2012-06-01 17:35:38 -0700 | [diff] [blame] | 291 | |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame] | 292 | static void VerifyImageRootVisitor(Object* root, void* arg) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 293 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, |
| 294 | Locks::mutator_lock_); |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 295 | |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 296 | void MarkObjectNonNull(const Object* obj, bool check_finger) |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 297 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 298 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 299 | |
Mathieu Chartier | ac86a7c | 2012-11-12 15:03:16 -0800 | [diff] [blame] | 300 | void MarkObjectNonNullParallel(const Object* obj, bool check_finger); |
| 301 | |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 302 | bool MarkLargeObject(const Object* obj) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 303 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 304 | |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 305 | // Returns true if we need to add obj to a mark stack. |
| 306 | bool MarkObjectParallel(const Object* obj) NO_THREAD_SAFETY_ANALYSIS; |
| 307 | |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame] | 308 | static void SweepCallback(size_t num_ptrs, Object** ptrs, void* arg) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 309 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 310 | |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 311 | // Special sweep for zygote that just marks objects / dirties cards. |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame] | 312 | static void ZygoteSweepCallback(size_t num_ptrs, Object** ptrs, void* arg) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 313 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 314 | |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame] | 315 | void CheckReference(const Object* obj, const Object* ref, MemberOffset offset, bool is_static) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 316 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 317 | |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame] | 318 | void CheckObject(const Object* obj) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 319 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 320 | |
Mathieu Chartier | 6f1c949 | 2012-10-15 12:08:41 -0700 | [diff] [blame] | 321 | // Verify the roots of the heap and print out information related to any invalid roots. |
| 322 | // Called in MarkObject, so may we may not hold the mutator lock. |
| 323 | void VerifyRoots() |
| 324 | NO_THREAD_SAFETY_ANALYSIS; |
| 325 | |
Mathieu Chartier | ac86a7c | 2012-11-12 15:03:16 -0800 | [diff] [blame] | 326 | // Expand mark stack to 2x its current size. Thread safe. |
| 327 | void ExpandMarkStack(); |
| 328 | |
Mathieu Chartier | 6f1c949 | 2012-10-15 12:08:41 -0700 | [diff] [blame] | 329 | static void VerifyRootCallback(const Object* root, void* arg, size_t vreg, |
Ian Rogers | 40e3bac | 2012-11-20 00:09:14 -0800 | [diff] [blame] | 330 | const StackVisitor *visitor); |
Mathieu Chartier | 6f1c949 | 2012-10-15 12:08:41 -0700 | [diff] [blame] | 331 | |
Ian Rogers | 40e3bac | 2012-11-20 00:09:14 -0800 | [diff] [blame] | 332 | void VerifyRoot(const Object* root, size_t vreg, const StackVisitor* visitor) |
Mathieu Chartier | 6f1c949 | 2012-10-15 12:08:41 -0700 | [diff] [blame] | 333 | NO_THREAD_SAFETY_ANALYSIS; |
| 334 | |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 335 | template <typename Visitor> |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 336 | static void VisitInstanceFieldsReferences(const Class* klass, const Object* obj, |
| 337 | const Visitor& visitor) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 338 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) { |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 339 | DCHECK(obj != NULL); |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 340 | DCHECK(klass != NULL); |
| 341 | VisitFieldsReferences(obj, klass->GetReferenceInstanceOffsets(), false, visitor); |
| 342 | } |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 343 | |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 344 | // Visit the header, static field references, and interface pointers of a class object. |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 345 | template <typename Visitor> |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 346 | static void VisitClassReferences(const Class* klass, const Object* obj, |
| 347 | const Visitor& visitor) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 348 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) { |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 349 | VisitInstanceFieldsReferences(klass, obj, visitor); |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 350 | VisitStaticFieldsReferences(obj->AsClass(), visitor); |
| 351 | } |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 352 | |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 353 | template <typename Visitor> |
Mathieu Chartier | fd678be | 2012-08-30 14:50:54 -0700 | [diff] [blame] | 354 | static void VisitStaticFieldsReferences(const Class* klass, const Visitor& visitor) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 355 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) { |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 356 | DCHECK(klass != NULL); |
| 357 | VisitFieldsReferences(klass, klass->GetReferenceStaticOffsets(), true, visitor); |
| 358 | } |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 359 | |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 360 | template <typename Visitor> |
Mathieu Chartier | fd678be | 2012-08-30 14:50:54 -0700 | [diff] [blame] | 361 | static void VisitFieldsReferences(const Object* obj, uint32_t ref_offsets, bool is_static, |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame] | 362 | const Visitor& visitor) |
Mathieu Chartier | d22d548 | 2012-11-06 17:14:12 -0800 | [diff] [blame] | 363 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) { |
| 364 | if (LIKELY(ref_offsets != CLASS_WALK_SUPER)) { |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 365 | // Found a reference offset bitmap. Mark the specified offsets. |
| 366 | while (ref_offsets != 0) { |
| 367 | size_t right_shift = CLZ(ref_offsets); |
| 368 | MemberOffset field_offset = CLASS_OFFSET_FROM_CLZ(right_shift); |
| 369 | const Object* ref = obj->GetFieldObject<const Object*>(field_offset, false); |
| 370 | visitor(obj, ref, field_offset, is_static); |
| 371 | ref_offsets &= ~(CLASS_HIGH_BIT >> right_shift); |
| 372 | } |
| 373 | } else { |
| 374 | // There is no reference offset bitmap. In the non-static case, |
| 375 | // walk up the class inheritance hierarchy and find reference |
| 376 | // offsets the hard way. In the static case, just consider this |
| 377 | // class. |
| 378 | for (const Class* klass = is_static ? obj->AsClass() : obj->GetClass(); |
| 379 | klass != NULL; |
| 380 | klass = is_static ? NULL : klass->GetSuperClass()) { |
| 381 | size_t num_reference_fields = (is_static |
| 382 | ? klass->NumReferenceStaticFields() |
| 383 | : klass->NumReferenceInstanceFields()); |
| 384 | for (size_t i = 0; i < num_reference_fields; ++i) { |
| 385 | Field* field = (is_static |
| 386 | ? klass->GetStaticField(i) |
| 387 | : klass->GetInstanceField(i)); |
| 388 | MemberOffset field_offset = field->GetOffset(); |
| 389 | const Object* ref = obj->GetFieldObject<const Object*>(field_offset, false); |
| 390 | visitor(obj, ref, field_offset, is_static); |
| 391 | } |
| 392 | } |
| 393 | } |
| 394 | } |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 395 | |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 396 | // Visit all of the references in an object array. |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 397 | template <typename Visitor> |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 398 | static void VisitObjectArrayReferences(const ObjectArray<Object>* array, |
| 399 | const Visitor& visitor) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 400 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) { |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 401 | const int32_t length = array->GetLength(); |
| 402 | for (int32_t i = 0; i < length; ++i) { |
| 403 | const Object* element = array->GetWithoutChecks(i); |
| 404 | const size_t width = sizeof(Object*); |
| 405 | MemberOffset offset = MemberOffset(i * width + Array::DataOffset(width).Int32Value()); |
| 406 | visitor(array, element, offset, false); |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 407 | } |
| 408 | } |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 409 | |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 410 | // Visits the header and field references of a data object. |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 411 | template <typename Visitor> |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 412 | static void VisitOtherReferences(const Class* klass, const Object* obj, |
| 413 | const Visitor& visitor) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 414 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) { |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 415 | return VisitInstanceFieldsReferences(klass, obj, visitor); |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 416 | } |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 417 | |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 418 | // Blackens objects grayed during a garbage collection. |
Mathieu Chartier | d22d548 | 2012-11-06 17:14:12 -0800 | [diff] [blame] | 419 | void ScanGrayObjects(byte minimum_age) |
| 420 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) |
| 421 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 422 | |
| 423 | // Schedules an unmarked object for reference processing. |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame] | 424 | void DelayReferenceReferent(Object* reference) |
Ian Rogers | 23435d0 | 2012-09-24 11:23:12 -0700 | [diff] [blame] | 425 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 426 | |
| 427 | // Recursively blackens objects on the mark stack. |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame] | 428 | void ProcessMarkStack() |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 429 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) |
| 430 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 431 | |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 432 | void ProcessMarkStackParallel() |
| 433 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) |
| 434 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
| 435 | |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame] | 436 | void EnqueueFinalizerReferences(Object** ref) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 437 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) |
| 438 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 439 | |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame] | 440 | void PreserveSomeSoftReferences(Object** ref) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 441 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) |
| 442 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 443 | |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame] | 444 | void ClearWhiteReferences(Object** list) |
Ian Rogers | 23435d0 | 2012-09-24 11:23:12 -0700 | [diff] [blame] | 445 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 446 | |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 447 | void ProcessReferences(Object** soft_references, bool clear_soft_references, |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 448 | Object** weak_references, |
| 449 | Object** finalizer_references, |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame] | 450 | Object** phantom_references) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 451 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) |
| 452 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 453 | |
Mathieu Chartier | 7469ebf | 2012-09-24 16:28:36 -0700 | [diff] [blame] | 454 | void SweepJniWeakGlobals(Heap::IsMarkedTester is_marked, void* arg) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 455 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 456 | |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 457 | // Whether or not we count how many of each type of object were scanned. |
| 458 | static const bool kCountScannedTypes = false; |
| 459 | |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 460 | // Current space, we check this space first to avoid searching for the appropriate space for an object. |
| 461 | SpaceBitmap* current_mark_bitmap_; |
| 462 | |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 463 | // Cache java.lang.Class for optimization. |
| 464 | Class* java_lang_Class_; |
| 465 | |
Mathieu Chartier | d8195f1 | 2012-10-05 12:21:28 -0700 | [diff] [blame] | 466 | ObjectStack* mark_stack_; |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 467 | |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 468 | Object* finger_; |
| 469 | |
Mathieu Chartier | e0f0cb3 | 2012-08-28 11:26:00 -0700 | [diff] [blame] | 470 | // Immune range, every object inside the immune range is assumed to be marked. |
| 471 | Object* immune_begin_; |
| 472 | Object* immune_end_; |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 473 | |
| 474 | Object* soft_reference_list_; |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 475 | Object* weak_reference_list_; |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 476 | Object* finalizer_reference_list_; |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 477 | Object* phantom_reference_list_; |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 478 | Object* cleared_reference_list_; |
| 479 | |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 480 | AtomicInteger freed_bytes_; |
| 481 | AtomicInteger freed_objects_; |
| 482 | AtomicInteger class_count_; |
| 483 | AtomicInteger array_count_; |
| 484 | AtomicInteger other_count_; |
| 485 | AtomicInteger large_object_test_; |
| 486 | AtomicInteger large_object_mark_; |
| 487 | AtomicInteger classes_marked_; |
| 488 | AtomicInteger overhead_time_; |
| 489 | AtomicInteger work_chunks_created_; |
| 490 | AtomicInteger work_chunks_deleted_; |
Mathieu Chartier | d22d548 | 2012-11-06 17:14:12 -0800 | [diff] [blame] | 491 | AtomicInteger reference_count_; |
Elliott Hughes | 352a424 | 2011-10-31 15:15:21 -0700 | [diff] [blame] | 492 | |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 493 | // Cumulative statistics. |
| 494 | uint64_t total_time_; |
| 495 | uint64_t total_paused_time_; |
| 496 | uint64_t total_freed_objects_; |
| 497 | uint64_t total_freed_bytes_; |
| 498 | |
Mathieu Chartier | 858f1c5 | 2012-10-17 17:45:55 -0700 | [diff] [blame] | 499 | UniquePtr<Barrier> gc_barrier_; |
Mathieu Chartier | ac86a7c | 2012-11-12 15:03:16 -0800 | [diff] [blame] | 500 | Mutex large_object_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER; |
| 501 | Mutex mark_stack_expand_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER; |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 502 | TimingLogger timings_; |
| 503 | CumulativeLogger cumulative_timings_; |
| 504 | |
| 505 | bool is_concurrent_; |
| 506 | bool clear_soft_references_; |
Mathieu Chartier | 858f1c5 | 2012-10-17 17:45:55 -0700 | [diff] [blame] | 507 | |
Mathieu Chartier | e6e0651 | 2012-06-26 15:00:26 -0700 | [diff] [blame] | 508 | friend class AddIfReachesAllocSpaceVisitor; // Used by mod-union table. |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 509 | friend class CheckBitmapVisitor; |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 510 | friend class CheckObjectVisitor; |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 511 | friend class CheckReferenceVisitor; |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 512 | friend class Heap; |
Elliott Hughes | 410c0c8 | 2011-09-01 17:58:25 -0700 | [diff] [blame] | 513 | friend class InternTableEntryIsUnmarked; |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 514 | friend class MarkIfReachesAllocspaceVisitor; |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 515 | friend class ModUnionCheckReferences; |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 516 | friend class ModUnionClearCardVisitor; |
Mathieu Chartier | e6e0651 | 2012-06-26 15:00:26 -0700 | [diff] [blame] | 517 | friend class ModUnionReferenceVisitor; |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 518 | friend class ModUnionVisitor; |
| 519 | friend class ModUnionTableBitmap; |
Mathieu Chartier | e6e0651 | 2012-06-26 15:00:26 -0700 | [diff] [blame] | 520 | friend class ModUnionTableReferenceCache; |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 521 | friend class ModUnionScanImageRootVisitor; |
| 522 | friend class ScanBitmapVisitor; |
| 523 | friend class ScanImageRootVisitor; |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 524 | friend class MarkStackChunk; |
| 525 | friend class FifoMarkStackChunk; |
Elliott Hughes | 410c0c8 | 2011-09-01 17:58:25 -0700 | [diff] [blame] | 526 | |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 527 | DISALLOW_COPY_AND_ASSIGN(MarkSweep); |
| 528 | }; |
| 529 | |
| 530 | } // namespace art |
| 531 | |
| 532 | #endif // ART_SRC_MARK_SWEEP_H_ |