Elliott Hughes | 2faa5f1 | 2012-01-30 14:42:07 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2011 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 16 | |
Brian Carlstrom | fc0e321 | 2013-07-17 14:40:12 -0700 | [diff] [blame] | 17 | #ifndef ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_ |
| 18 | #define ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_ |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 19 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 20 | #include "atomic_integer.h" |
Sameer Abu Asal | 4aeb567 | 2013-02-19 15:30:35 -0800 | [diff] [blame] | 21 | #include "barrier.h" |
Elliott Hughes | 7616005 | 2012-12-12 16:31:20 -0800 | [diff] [blame] | 22 | #include "base/macros.h" |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 23 | #include "base/mutex.h" |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 24 | #include "garbage_collector.h" |
Ian Rogers | 0cfe1fb | 2011-08-26 03:29:44 -0700 | [diff] [blame] | 25 | #include "offsets.h" |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 26 | #include "root_visitor.h" |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 27 | #include "UniquePtr.h" |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 28 | |
| 29 | namespace art { |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 30 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 31 | namespace mirror { |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 32 | class Class; |
| 33 | class Object; |
| 34 | template<class T> class ObjectArray; |
| 35 | } // namespace mirror |
| 36 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 37 | class StackVisitor; |
| 38 | class Thread; |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 39 | |
| 40 | namespace gc { |
| 41 | |
| 42 | namespace accounting { |
| 43 | template <typename T> class AtomicStack; |
| 44 | class MarkIfReachesAllocspaceVisitor; |
| 45 | class ModUnionClearCardVisitor; |
| 46 | class ModUnionVisitor; |
| 47 | class ModUnionTableBitmap; |
| 48 | class MarkStackChunk; |
| 49 | typedef AtomicStack<mirror::Object*> ObjectStack; |
| 50 | class SpaceBitmap; |
| 51 | } // namespace accounting |
| 52 | |
| 53 | namespace space { |
| 54 | class ContinuousSpace; |
| 55 | } // namespace space |
| 56 | |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 57 | class Heap; |
| 58 | |
| 59 | namespace collector { |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 60 | |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 61 | class MarkSweep : public GarbageCollector { |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 62 | public: |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 63 | explicit MarkSweep(Heap* heap, bool is_concurrent, const std::string& name_prefix = ""); |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 64 | |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 65 | ~MarkSweep() {} |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 66 | |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 67 | virtual void InitializePhase(); |
| 68 | virtual bool IsConcurrent() const; |
| 69 | virtual bool HandleDirtyObjectsPhase() EXCLUSIVE_LOCKS_REQUIRED(Locks::mutator_lock_); |
| 70 | virtual void MarkingPhase() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
| 71 | virtual void ReclaimPhase() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Mathieu Chartier | ad2541a | 2013-10-25 10:05:23 -0700 | [diff] [blame] | 72 | virtual void FinishPhase() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 73 | virtual void MarkReachableObjects() |
| 74 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) |
| 75 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
| 76 | virtual GcType GetGcType() const { |
| 77 | return kGcTypeFull; |
| 78 | } |
| 79 | |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 80 | // Initializes internal structures. |
Jesse Wilson | 078f9b0 | 2011-11-18 17:51:47 -0500 | [diff] [blame] | 81 | void Init(); |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 82 | |
Mathieu Chartier | 7469ebf | 2012-09-24 16:28:36 -0700 | [diff] [blame] | 83 | // Find the default mark bitmap. |
| 84 | void FindDefaultMarkBitmap(); |
| 85 | |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 86 | // Marks the root set at the start of a garbage collection. |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame] | 87 | void MarkRoots() |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 88 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) |
| 89 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 90 | |
Mathieu Chartier | 858f1c5 | 2012-10-17 17:45:55 -0700 | [diff] [blame] | 91 | void MarkNonThreadRoots() |
Mathieu Chartier | c528dba | 2013-11-26 12:00:11 -0800 | [diff] [blame^] | 92 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) |
| 93 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Mathieu Chartier | 858f1c5 | 2012-10-17 17:45:55 -0700 | [diff] [blame] | 94 | |
Mathieu Chartier | c528dba | 2013-11-26 12:00:11 -0800 | [diff] [blame^] | 95 | void MarkConcurrentRoots() |
| 96 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) |
| 97 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Mathieu Chartier | 9ebae1f | 2012-10-15 17:38:16 -0700 | [diff] [blame] | 98 | |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 99 | void MarkRootsCheckpoint(Thread* self) |
| 100 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) |
| 101 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Mathieu Chartier | 858f1c5 | 2012-10-17 17:45:55 -0700 | [diff] [blame] | 102 | |
Mathieu Chartier | 262e5ff | 2012-06-01 17:35:38 -0700 | [diff] [blame] | 103 | // Verify that image roots point to only marked objects within the alloc space. |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 104 | void VerifyImageRoots() |
| 105 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) |
| 106 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Mathieu Chartier | 262e5ff | 2012-06-01 17:35:38 -0700 | [diff] [blame] | 107 | |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 108 | // Builds a mark stack and recursively mark until it empties. |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 109 | void RecursiveMark() |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 110 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) |
| 111 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 112 | |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 113 | // Make a space immune, immune spaces have all live objects marked - that is the mark and |
| 114 | // live bitmaps are bound together. |
| 115 | void ImmuneSpace(space::ContinuousSpace* space) |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 116 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) |
| 117 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 118 | |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 119 | bool IsImmuneSpace(const space::ContinuousSpace* space) const; |
Mathieu Chartier | 11409ae | 2013-09-23 11:49:36 -0700 | [diff] [blame] | 120 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
| 121 | |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 122 | // Bind the live bits to the mark bits of bitmaps for spaces that are never collected, ie |
| 123 | // the image. Mark that portion of the heap as immune. |
| 124 | virtual void BindBitmaps() SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Mathieu Chartier | 7469ebf | 2012-09-24 16:28:36 -0700 | [diff] [blame] | 125 | |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 126 | void BindLiveToMarkBitmap(space::ContinuousSpace* space) |
Mathieu Chartier | 7469ebf | 2012-09-24 16:28:36 -0700 | [diff] [blame] | 127 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
| 128 | |
| 129 | void UnBindBitmaps() |
| 130 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 131 | |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 132 | // Builds a mark stack with objects on dirty cards and recursively mark until it empties. |
Mathieu Chartier | 94c32c5 | 2013-08-09 11:14:04 -0700 | [diff] [blame] | 133 | void RecursiveMarkDirtyObjects(bool paused, byte minimum_age) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 134 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) |
| 135 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Mathieu Chartier | 262e5ff | 2012-06-01 17:35:38 -0700 | [diff] [blame] | 136 | |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 137 | // Remarks the root set after completing the concurrent mark. |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame] | 138 | void ReMarkRoots() |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 139 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) |
| 140 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 141 | |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 142 | void ProcessReferences(Thread* self) |
| 143 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 144 | |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 145 | // Update and mark references from immune spaces. |
Mathieu Chartier | 11409ae | 2013-09-23 11:49:36 -0700 | [diff] [blame] | 146 | virtual void UpdateAndMarkModUnion() |
| 147 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
| 148 | |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 149 | // Sweeps unmarked objects to complete the garbage collection. |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 150 | virtual void Sweep(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
Mathieu Chartier | 357e9be | 2012-08-01 11:00:14 -0700 | [diff] [blame] | 151 | |
Mathieu Chartier | e0f0cb3 | 2012-08-28 11:26:00 -0700 | [diff] [blame] | 152 | // Sweeps unmarked objects to complete the garbage collection. |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 153 | void SweepLargeObjects(bool swap_bitmaps) EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
Mathieu Chartier | e0f0cb3 | 2012-08-28 11:26:00 -0700 | [diff] [blame] | 154 | |
Mathieu Chartier | 357e9be | 2012-08-01 11:00:14 -0700 | [diff] [blame] | 155 | // Sweep only pointers within an array. WARNING: Trashes objects. |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 156 | void SweepArray(accounting::ObjectStack* allocation_stack_, bool swap_bitmaps) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 157 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 158 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 159 | mirror::Object* GetClearedReferences() { |
Elliott Hughes | adb460d | 2011-10-05 17:02:34 -0700 | [diff] [blame] | 160 | return cleared_reference_list_; |
| 161 | } |
| 162 | |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 163 | // Blackens an object. |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 164 | void ScanObject(mirror::Object* obj) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 165 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) |
| 166 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 167 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 168 | // TODO: enable thread safety analysis when in use by multiple worker threads. |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 169 | template <typename MarkVisitor> |
Mathieu Chartier | 11409ae | 2013-09-23 11:49:36 -0700 | [diff] [blame] | 170 | void ScanObjectVisit(mirror::Object* obj, const MarkVisitor& visitor) |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 171 | NO_THREAD_SAFETY_ANALYSIS; |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 172 | |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 173 | // Everything inside the immune range is assumed to be marked. |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 174 | void SetImmuneRange(mirror::Object* begin, mirror::Object* end); |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 175 | |
Mathieu Chartier | 7469ebf | 2012-09-24 16:28:36 -0700 | [diff] [blame] | 176 | void SweepSystemWeaks() |
Mathieu Chartier | ad2541a | 2013-10-25 10:05:23 -0700 | [diff] [blame] | 177 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_); |
Mathieu Chartier | 7469ebf | 2012-09-24 16:28:36 -0700 | [diff] [blame] | 178 | |
Mathieu Chartier | 6aa3df9 | 2013-09-17 15:17:28 -0700 | [diff] [blame] | 179 | static mirror::Object* VerifySystemWeakIsLiveCallback(mirror::Object* obj, void* arg) |
Mathieu Chartier | c7b83a0 | 2012-09-11 18:07:39 -0700 | [diff] [blame] | 180 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
| 181 | |
| 182 | void VerifySystemWeaks() |
Mathieu Chartier | ad2541a | 2013-10-25 10:05:23 -0700 | [diff] [blame] | 183 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_, Locks::heap_bitmap_lock_); |
Mathieu Chartier | c7b83a0 | 2012-09-11 18:07:39 -0700 | [diff] [blame] | 184 | |
| 185 | // Verify that an object is live, either in a live bitmap or in the allocation stack. |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 186 | void VerifyIsLive(const mirror::Object* obj) |
Mathieu Chartier | c7b83a0 | 2012-09-11 18:07:39 -0700 | [diff] [blame] | 187 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
| 188 | |
Mathieu Chartier | fd678be | 2012-08-30 14:50:54 -0700 | [diff] [blame] | 189 | template <typename Visitor> |
Mathieu Chartier | 590fee9 | 2013-09-13 13:46:47 -0700 | [diff] [blame] | 190 | static void VisitObjectReferences(mirror::Object* obj, const Visitor& visitor, bool visit_class) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 191 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 192 | Locks::mutator_lock_); |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 193 | |
Mathieu Chartier | 39e3261 | 2013-11-12 16:28:05 -0800 | [diff] [blame] | 194 | static mirror::Object* RecursiveMarkObjectCallback(mirror::Object* obj, void* arg) |
| 195 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) |
| 196 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
| 197 | |
Mathieu Chartier | 423d2a3 | 2013-09-12 17:33:56 -0700 | [diff] [blame] | 198 | static mirror::Object* MarkRootCallback(mirror::Object* root, void* arg) |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 199 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 200 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
| 201 | |
Mathieu Chartier | 423d2a3 | 2013-09-12 17:33:56 -0700 | [diff] [blame] | 202 | static mirror::Object* MarkRootParallelCallback(mirror::Object* root, void* arg); |
Mathieu Chartier | ac86a7c | 2012-11-12 15:03:16 -0800 | [diff] [blame] | 203 | |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 204 | // Marks an object. |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 205 | void MarkObject(const mirror::Object* obj) |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 206 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) |
| 207 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
| 208 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 209 | void MarkRoot(const mirror::Object* obj) |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 210 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) |
Mathieu Chartier | 858f1c5 | 2012-10-17 17:45:55 -0700 | [diff] [blame] | 211 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
| 212 | |
Sameer Abu Asal | a843954 | 2013-02-14 16:06:42 -0800 | [diff] [blame] | 213 | Barrier& GetBarrier() { |
| 214 | return *gc_barrier_; |
| 215 | } |
| 216 | |
Mathieu Chartier | 2b82db4 | 2012-11-14 17:29:05 -0800 | [diff] [blame] | 217 | protected: |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 218 | // Returns true if the object has its bit set in the mark bitmap. |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 219 | bool IsMarked(const mirror::Object* object) const; |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 220 | |
Mathieu Chartier | 39e3261 | 2013-11-12 16:28:05 -0800 | [diff] [blame] | 221 | static mirror::Object* IsMarkedCallback(mirror::Object* object, void* arg) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 222 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
Mathieu Chartier | 46a2363 | 2012-08-07 18:44:40 -0700 | [diff] [blame] | 223 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 224 | static void VerifyImageRootVisitor(mirror::Object* root, void* arg) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 225 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, |
| 226 | Locks::mutator_lock_); |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 227 | |
Mathieu Chartier | 9642c96 | 2013-08-05 17:40:36 -0700 | [diff] [blame] | 228 | void MarkObjectNonNull(const mirror::Object* obj) |
| 229 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) |
| 230 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 231 | |
Mathieu Chartier | 9642c96 | 2013-08-05 17:40:36 -0700 | [diff] [blame] | 232 | // Unmarks an object by clearing the bit inside of the corresponding bitmap, or if it is in a |
| 233 | // space set, removing the object from the set. |
| 234 | void UnMarkObjectNonNull(const mirror::Object* obj) |
Mathieu Chartier | 94c32c5 | 2013-08-09 11:14:04 -0700 | [diff] [blame] | 235 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) |
| 236 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
| 237 | |
| 238 | // Mark the vm thread roots. |
| 239 | virtual void MarkThreadRoots(Thread* self) |
| 240 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) |
| 241 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Mathieu Chartier | ac86a7c | 2012-11-12 15:03:16 -0800 | [diff] [blame] | 242 | |
Mathieu Chartier | 9642c96 | 2013-08-05 17:40:36 -0700 | [diff] [blame] | 243 | // Marks an object atomically, safe to use from multiple threads. |
| 244 | void MarkObjectNonNullParallel(const mirror::Object* obj); |
| 245 | |
| 246 | // Marks or unmarks a large object based on whether or not set is true. If set is true, then we |
| 247 | // mark, otherwise we unmark. |
| 248 | bool MarkLargeObject(const mirror::Object* obj, bool set) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 249 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 250 | |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 251 | // Returns true if we need to add obj to a mark stack. |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 252 | bool MarkObjectParallel(const mirror::Object* obj) NO_THREAD_SAFETY_ANALYSIS; |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 253 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 254 | static void SweepCallback(size_t num_ptrs, mirror::Object** ptrs, void* arg) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 255 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 256 | |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 257 | // Special sweep for zygote that just marks objects / dirties cards. |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 258 | static void ZygoteSweepCallback(size_t num_ptrs, mirror::Object** ptrs, void* arg) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 259 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 260 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 261 | void CheckReference(const mirror::Object* obj, const mirror::Object* ref, MemberOffset offset, |
| 262 | bool is_static) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 263 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 264 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 265 | void CheckObject(const mirror::Object* obj) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 266 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 267 | |
Mathieu Chartier | 6f1c949 | 2012-10-15 12:08:41 -0700 | [diff] [blame] | 268 | // Verify the roots of the heap and print out information related to any invalid roots. |
| 269 | // Called in MarkObject, so may we may not hold the mutator lock. |
| 270 | void VerifyRoots() |
| 271 | NO_THREAD_SAFETY_ANALYSIS; |
| 272 | |
Mathieu Chartier | ba311b4 | 2013-08-27 13:02:30 -0700 | [diff] [blame] | 273 | // Expand mark stack to 2x its current size. |
| 274 | void ExpandMarkStack() EXCLUSIVE_LOCKS_REQUIRED(mark_stack_lock_); |
| 275 | void ResizeMarkStack(size_t new_size) EXCLUSIVE_LOCKS_REQUIRED(mark_stack_lock_); |
Mathieu Chartier | ac86a7c | 2012-11-12 15:03:16 -0800 | [diff] [blame] | 276 | |
Mathieu Chartier | 2775ee4 | 2013-08-20 17:43:47 -0700 | [diff] [blame] | 277 | // Returns how many threads we should use for the current GC phase based on if we are paused, |
| 278 | // whether or not we care about pauses. |
| 279 | size_t GetThreadCount(bool paused) const; |
| 280 | |
Mathieu Chartier | 9642c96 | 2013-08-05 17:40:36 -0700 | [diff] [blame] | 281 | // Returns true if an object is inside of the immune region (assumed to be marked). |
Mathieu Chartier | 11409ae | 2013-09-23 11:49:36 -0700 | [diff] [blame] | 282 | bool IsImmune(const mirror::Object* obj) const ALWAYS_INLINE { |
Mathieu Chartier | 9642c96 | 2013-08-05 17:40:36 -0700 | [diff] [blame] | 283 | return obj >= immune_begin_ && obj < immune_end_; |
| 284 | } |
| 285 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 286 | static void VerifyRootCallback(const mirror::Object* root, void* arg, size_t vreg, |
Ian Rogers | 40e3bac | 2012-11-20 00:09:14 -0800 | [diff] [blame] | 287 | const StackVisitor *visitor); |
Mathieu Chartier | 6f1c949 | 2012-10-15 12:08:41 -0700 | [diff] [blame] | 288 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 289 | void VerifyRoot(const mirror::Object* root, size_t vreg, const StackVisitor* visitor) |
Mathieu Chartier | 6f1c949 | 2012-10-15 12:08:41 -0700 | [diff] [blame] | 290 | NO_THREAD_SAFETY_ANALYSIS; |
| 291 | |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 292 | template <typename Visitor> |
Mathieu Chartier | 11409ae | 2013-09-23 11:49:36 -0700 | [diff] [blame] | 293 | static void VisitInstanceFieldsReferences(mirror::Class* klass, mirror::Object* obj, |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 294 | const Visitor& visitor) |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 295 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 296 | |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 297 | // Visit the header, static field references, and interface pointers of a class object. |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 298 | template <typename Visitor> |
Mathieu Chartier | 11409ae | 2013-09-23 11:49:36 -0700 | [diff] [blame] | 299 | static void VisitClassReferences(mirror::Class* klass, mirror::Object* obj, |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 300 | const Visitor& visitor) |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 301 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 302 | |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 303 | template <typename Visitor> |
Mathieu Chartier | 11409ae | 2013-09-23 11:49:36 -0700 | [diff] [blame] | 304 | static void VisitStaticFieldsReferences(mirror::Class* klass, const Visitor& visitor) |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 305 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 306 | |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 307 | template <typename Visitor> |
Mathieu Chartier | 11409ae | 2013-09-23 11:49:36 -0700 | [diff] [blame] | 308 | static void VisitFieldsReferences(mirror::Object* obj, uint32_t ref_offsets, bool is_static, |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 309 | const Visitor& visitor) |
| 310 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 311 | |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 312 | // Visit all of the references in an object array. |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 313 | template <typename Visitor> |
Mathieu Chartier | 11409ae | 2013-09-23 11:49:36 -0700 | [diff] [blame] | 314 | static void VisitObjectArrayReferences(mirror::ObjectArray<mirror::Object>* array, |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 315 | const Visitor& visitor) |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 316 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 317 | |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 318 | // Visits the header and field references of a data object. |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 319 | template <typename Visitor> |
Mathieu Chartier | 11409ae | 2013-09-23 11:49:36 -0700 | [diff] [blame] | 320 | static void VisitOtherReferences(mirror::Class* klass, mirror::Object* obj, |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 321 | const Visitor& visitor) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 322 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_) { |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 323 | return VisitInstanceFieldsReferences(klass, obj, visitor); |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 324 | } |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 325 | |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 326 | // Blackens objects grayed during a garbage collection. |
Mathieu Chartier | 94c32c5 | 2013-08-09 11:14:04 -0700 | [diff] [blame] | 327 | void ScanGrayObjects(bool paused, byte minimum_age) |
Mathieu Chartier | d22d548 | 2012-11-06 17:14:12 -0800 | [diff] [blame] | 328 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) |
| 329 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 330 | |
| 331 | // Schedules an unmarked object for reference processing. |
Mathieu Chartier | 94c32c5 | 2013-08-09 11:14:04 -0700 | [diff] [blame] | 332 | void DelayReferenceReferent(mirror::Class* klass, mirror::Object* reference) |
Ian Rogers | 23435d0 | 2012-09-24 11:23:12 -0700 | [diff] [blame] | 333 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 334 | |
| 335 | // Recursively blackens objects on the mark stack. |
Mathieu Chartier | 94c32c5 | 2013-08-09 11:14:04 -0700 | [diff] [blame] | 336 | void ProcessMarkStack(bool paused) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 337 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) |
| 338 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 339 | |
Mathieu Chartier | 2775ee4 | 2013-08-20 17:43:47 -0700 | [diff] [blame] | 340 | void ProcessMarkStackParallel(size_t thread_count) |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 341 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) |
| 342 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
| 343 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 344 | void EnqueueFinalizerReferences(mirror::Object** ref) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 345 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) |
| 346 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 347 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 348 | void PreserveSomeSoftReferences(mirror::Object** ref) |
Ian Rogers | b726dcb | 2012-09-05 08:57:23 -0700 | [diff] [blame] | 349 | EXCLUSIVE_LOCKS_REQUIRED(Locks::heap_bitmap_lock_) |
| 350 | SHARED_LOCKS_REQUIRED(Locks::mutator_lock_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 351 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 352 | void ClearWhiteReferences(mirror::Object** list) |
Ian Rogers | 23435d0 | 2012-09-24 11:23:12 -0700 | [diff] [blame] | 353 | SHARED_LOCKS_REQUIRED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 354 | |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 355 | // Whether or not we count how many of each type of object were scanned. |
| 356 | static const bool kCountScannedTypes = false; |
| 357 | |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 358 | // Current space, we check this space first to avoid searching for the appropriate space for an |
| 359 | // object. |
| 360 | accounting::SpaceBitmap* current_mark_bitmap_; |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 361 | |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 362 | accounting::ObjectStack* mark_stack_; |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 363 | |
Mathieu Chartier | e0f0cb3 | 2012-08-28 11:26:00 -0700 | [diff] [blame] | 364 | // Immune range, every object inside the immune range is assumed to be marked. |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 365 | mirror::Object* immune_begin_; |
| 366 | mirror::Object* immune_end_; |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 367 | |
Ian Rogers | 2dd0e2c | 2013-01-24 12:42:14 -0800 | [diff] [blame] | 368 | mirror::Object* soft_reference_list_; |
| 369 | mirror::Object* weak_reference_list_; |
| 370 | mirror::Object* finalizer_reference_list_; |
| 371 | mirror::Object* phantom_reference_list_; |
| 372 | mirror::Object* cleared_reference_list_; |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 373 | |
Mathieu Chartier | 94c32c5 | 2013-08-09 11:14:04 -0700 | [diff] [blame] | 374 | // Parallel finger. |
| 375 | AtomicInteger atomic_finger_; |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 376 | // Number of classes scanned, if kCountScannedTypes. |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 377 | AtomicInteger class_count_; |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 378 | // Number of arrays scanned, if kCountScannedTypes. |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 379 | AtomicInteger array_count_; |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 380 | // Number of non-class/arrays scanned, if kCountScannedTypes. |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 381 | AtomicInteger other_count_; |
| 382 | AtomicInteger large_object_test_; |
| 383 | AtomicInteger large_object_mark_; |
| 384 | AtomicInteger classes_marked_; |
| 385 | AtomicInteger overhead_time_; |
| 386 | AtomicInteger work_chunks_created_; |
| 387 | AtomicInteger work_chunks_deleted_; |
Mathieu Chartier | d22d548 | 2012-11-06 17:14:12 -0800 | [diff] [blame] | 388 | AtomicInteger reference_count_; |
Mathieu Chartier | 0f72e41 | 2013-09-06 16:40:01 -0700 | [diff] [blame] | 389 | |
| 390 | // Verification. |
| 391 | size_t live_stack_freeze_size_; |
Elliott Hughes | 352a424 | 2011-10-31 15:15:21 -0700 | [diff] [blame] | 392 | |
Mathieu Chartier | 858f1c5 | 2012-10-17 17:45:55 -0700 | [diff] [blame] | 393 | UniquePtr<Barrier> gc_barrier_; |
Mathieu Chartier | ac86a7c | 2012-11-12 15:03:16 -0800 | [diff] [blame] | 394 | Mutex large_object_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER; |
Mathieu Chartier | 958291c | 2013-08-27 18:14:55 -0700 | [diff] [blame] | 395 | Mutex mark_stack_lock_ ACQUIRED_AFTER(Locks::classlinker_classes_lock_); |
Ian Rogers | 1bd4b4c | 2013-04-18 17:47:42 -0700 | [diff] [blame] | 396 | |
| 397 | const bool is_concurrent_; |
| 398 | |
Mathieu Chartier | 02e2511 | 2013-08-14 16:14:24 -0700 | [diff] [blame] | 399 | private: |
Brian Carlstrom | 7934ac2 | 2013-07-26 10:54:15 -0700 | [diff] [blame] | 400 | friend class AddIfReachesAllocSpaceVisitor; // Used by mod-union table. |
Mathieu Chartier | 0f72e41 | 2013-09-06 16:40:01 -0700 | [diff] [blame] | 401 | friend class CardScanTask; |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 402 | friend class CheckBitmapVisitor; |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 403 | friend class CheckReferenceVisitor; |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 404 | friend class art::gc::Heap; |
Elliott Hughes | 410c0c8 | 2011-09-01 17:58:25 -0700 | [diff] [blame] | 405 | friend class InternTableEntryIsUnmarked; |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 406 | friend class MarkIfReachesAllocspaceVisitor; |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 407 | friend class ModUnionCheckReferences; |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 408 | friend class ModUnionClearCardVisitor; |
Mathieu Chartier | e6e0651 | 2012-06-26 15:00:26 -0700 | [diff] [blame] | 409 | friend class ModUnionReferenceVisitor; |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 410 | friend class ModUnionVisitor; |
| 411 | friend class ModUnionTableBitmap; |
Mathieu Chartier | e6e0651 | 2012-06-26 15:00:26 -0700 | [diff] [blame] | 412 | friend class ModUnionTableReferenceCache; |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 413 | friend class ModUnionScanImageRootVisitor; |
| 414 | friend class ScanBitmapVisitor; |
| 415 | friend class ScanImageRootVisitor; |
Mathieu Chartier | 94c32c5 | 2013-08-09 11:14:04 -0700 | [diff] [blame] | 416 | template<bool kUseFinger> friend class MarkStackTask; |
Mathieu Chartier | 02b6a78 | 2012-10-26 13:51:26 -0700 | [diff] [blame] | 417 | friend class FifoMarkStackChunk; |
Elliott Hughes | 410c0c8 | 2011-09-01 17:58:25 -0700 | [diff] [blame] | 418 | |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 419 | DISALLOW_COPY_AND_ASSIGN(MarkSweep); |
| 420 | }; |
| 421 | |
Ian Rogers | 1d54e73 | 2013-05-02 21:10:01 -0700 | [diff] [blame] | 422 | } // namespace collector |
| 423 | } // namespace gc |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 424 | } // namespace art |
| 425 | |
Brian Carlstrom | fc0e321 | 2013-07-17 14:40:12 -0700 | [diff] [blame] | 426 | #endif // ART_RUNTIME_GC_COLLECTOR_MARK_SWEEP_H_ |