Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2014 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #ifndef ART_RUNTIME_GC_COLLECTOR_MARK_COMPACT_H_ |
| 18 | #define ART_RUNTIME_GC_COLLECTOR_MARK_COMPACT_H_ |
| 19 | |
| 20 | #include <deque> |
| 21 | #include <memory> // For unique_ptr. |
| 22 | |
| 23 | #include "atomic.h" |
| 24 | #include "base/macros.h" |
| 25 | #include "base/mutex.h" |
| 26 | #include "garbage_collector.h" |
Mathieu Chartier | e34fa1d | 2015-01-14 14:55:47 -0800 | [diff] [blame] | 27 | #include "gc_root.h" |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 28 | #include "gc/accounting/heap_bitmap.h" |
Mathieu Chartier | 763a31e | 2015-11-16 16:05:55 -0800 | [diff] [blame] | 29 | #include "immune_spaces.h" |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 30 | #include "lock_word.h" |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 31 | #include "offsets.h" |
| 32 | |
| 33 | namespace art { |
| 34 | |
| 35 | class Thread; |
| 36 | |
| 37 | namespace mirror { |
| 38 | class Class; |
| 39 | class Object; |
| 40 | } // namespace mirror |
| 41 | |
| 42 | namespace gc { |
| 43 | |
| 44 | class Heap; |
| 45 | |
| 46 | namespace accounting { |
| 47 | template <typename T> class AtomicStack; |
Mathieu Chartier | cb535da | 2015-01-23 13:50:03 -0800 | [diff] [blame] | 48 | typedef AtomicStack<mirror::Object> ObjectStack; |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 49 | } // namespace accounting |
| 50 | |
| 51 | namespace space { |
Ian Rogers | e63db27 | 2014-07-15 15:36:11 -0700 | [diff] [blame] | 52 | class BumpPointerSpace; |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 53 | class ContinuousMemMapAllocSpace; |
| 54 | class ContinuousSpace; |
| 55 | } // namespace space |
| 56 | |
| 57 | namespace collector { |
| 58 | |
| 59 | class MarkCompact : public GarbageCollector { |
| 60 | public: |
| 61 | explicit MarkCompact(Heap* heap, const std::string& name_prefix = ""); |
| 62 | ~MarkCompact() {} |
| 63 | |
| 64 | virtual void RunPhases() OVERRIDE NO_THREAD_SAFETY_ANALYSIS; |
| 65 | void InitializePhase(); |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 66 | void MarkingPhase() REQUIRES(Locks::mutator_lock_) |
| 67 | REQUIRES(!Locks::heap_bitmap_lock_); |
| 68 | void ReclaimPhase() REQUIRES(Locks::mutator_lock_) |
| 69 | REQUIRES(!Locks::heap_bitmap_lock_); |
| 70 | void FinishPhase() REQUIRES(Locks::mutator_lock_); |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 71 | void MarkReachableObjects() |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 72 | REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_); |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 73 | virtual GcType GetGcType() const OVERRIDE { |
| 74 | return kGcTypePartial; |
| 75 | } |
| 76 | virtual CollectorType GetCollectorType() const OVERRIDE { |
| 77 | return kCollectorTypeMC; |
| 78 | } |
| 79 | |
| 80 | // Sets which space we will be copying objects in. |
| 81 | void SetSpace(space::BumpPointerSpace* space); |
| 82 | |
| 83 | // Initializes internal structures. |
| 84 | void Init(); |
| 85 | |
| 86 | // Find the default mark bitmap. |
| 87 | void FindDefaultMarkBitmap(); |
| 88 | |
| 89 | void ScanObject(mirror::Object* obj) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 90 | REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_); |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 91 | |
| 92 | // Marks the root set at the start of a garbage collection. |
| 93 | void MarkRoots() |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 94 | REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_); |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 95 | |
| 96 | // Bind the live bits to the mark bits of bitmaps for spaces that are never collected, ie |
| 97 | // the image. Mark that portion of the heap as immune. |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 98 | void BindBitmaps() REQUIRES_SHARED(Locks::mutator_lock_) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 99 | REQUIRES(!Locks::heap_bitmap_lock_); |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 100 | |
| 101 | void UnBindBitmaps() |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 102 | REQUIRES(Locks::heap_bitmap_lock_); |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 103 | |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 104 | void ProcessReferences(Thread* self) REQUIRES(Locks::mutator_lock_) |
| 105 | REQUIRES(Locks::mutator_lock_); |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 106 | |
| 107 | // Sweeps unmarked objects to complete the garbage collection. |
Mathieu Chartier | a9d82fe | 2016-01-25 20:06:11 -0800 | [diff] [blame] | 108 | void Sweep(bool swap_bitmaps) REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_); |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 109 | |
| 110 | // Sweeps unmarked objects to complete the garbage collection. |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 111 | void SweepLargeObjects(bool swap_bitmaps) REQUIRES(Locks::heap_bitmap_lock_); |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 112 | |
| 113 | void SweepSystemWeaks() |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 114 | REQUIRES_SHARED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 115 | |
Mathieu Chartier | bb87e0f | 2015-04-03 11:21:55 -0700 | [diff] [blame] | 116 | virtual void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 117 | OVERRIDE REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_); |
Mathieu Chartier | bb87e0f | 2015-04-03 11:21:55 -0700 | [diff] [blame] | 118 | |
| 119 | virtual void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count, |
| 120 | const RootInfo& info) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 121 | OVERRIDE REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_); |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 122 | |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 123 | // Schedules an unmarked object for reference processing. |
Mathieu Chartier | 31e8822 | 2016-10-14 18:43:19 -0700 | [diff] [blame] | 124 | void DelayReferenceReferent(ObjPtr<mirror::Class> klass, ObjPtr<mirror::Reference> reference) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 125 | REQUIRES_SHARED(Locks::heap_bitmap_lock_, Locks::mutator_lock_); |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 126 | |
| 127 | protected: |
| 128 | // Returns null if the object is not marked, otherwise returns the forwarding address (same as |
| 129 | // object for non movable things). |
Mathieu Chartier | 9750995 | 2015-07-13 14:35:43 -0700 | [diff] [blame] | 130 | mirror::Object* GetMarkedForwardAddress(mirror::Object* object) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 131 | REQUIRES(Locks::mutator_lock_) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 132 | REQUIRES_SHARED(Locks::heap_bitmap_lock_); |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 133 | |
| 134 | // Marks or unmarks a large object based on whether or not set is true. If set is true, then we |
| 135 | // mark, otherwise we unmark. |
| 136 | bool MarkLargeObject(const mirror::Object* obj) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 137 | REQUIRES(Locks::heap_bitmap_lock_) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 138 | REQUIRES_SHARED(Locks::mutator_lock_); |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 139 | |
| 140 | // Expand mark stack to 2x its current size. |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 141 | void ResizeMarkStack(size_t new_size) REQUIRES_SHARED(Locks::mutator_lock_); |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 142 | |
| 143 | // Returns true if we should sweep the space. |
| 144 | bool ShouldSweepSpace(space::ContinuousSpace* space) const; |
| 145 | |
| 146 | // Push an object onto the mark stack. |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 147 | void MarkStackPush(mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_); |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 148 | |
| 149 | void UpdateAndMarkModUnion() |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 150 | REQUIRES(Locks::heap_bitmap_lock_) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 151 | REQUIRES_SHARED(Locks::mutator_lock_); |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 152 | |
| 153 | // Recursively blackens objects on the mark stack. |
| 154 | void ProcessMarkStack() |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 155 | REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_); |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 156 | |
| 157 | // 3 pass mark compact approach. |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 158 | void Compact() REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_); |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 159 | // Calculate the forwarding address of objects marked as "live" in the objects_before_forwarding |
| 160 | // bitmap. |
| 161 | void CalculateObjectForwardingAddresses() |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 162 | REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_); |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 163 | // Update the references of objects by using the forwarding addresses. |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 164 | void UpdateReferences() REQUIRES(Locks::mutator_lock_, Locks::heap_bitmap_lock_); |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 165 | // Move objects and restore lock words. |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 166 | void MoveObjects() REQUIRES(Locks::mutator_lock_); |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 167 | // Move a single object to its forward address. |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 168 | void MoveObject(mirror::Object* obj, size_t len) REQUIRES(Locks::mutator_lock_); |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 169 | // Mark a single object. |
Mathieu Chartier | 9750995 | 2015-07-13 14:35:43 -0700 | [diff] [blame] | 170 | virtual mirror::Object* MarkObject(mirror::Object* obj) OVERRIDE |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 171 | REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_); |
Hiroshi Yamauchi | 057d977 | 2017-02-17 15:33:23 -0800 | [diff] [blame] | 172 | virtual void MarkHeapReference(mirror::HeapReference<mirror::Object>* obj_ptr, |
| 173 | bool do_atomic_update) OVERRIDE |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 174 | REQUIRES(Locks::heap_bitmap_lock_, Locks::mutator_lock_); |
Mathieu Chartier | 9750995 | 2015-07-13 14:35:43 -0700 | [diff] [blame] | 175 | virtual mirror::Object* IsMarked(mirror::Object* obj) OVERRIDE |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 176 | REQUIRES_SHARED(Locks::heap_bitmap_lock_) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 177 | REQUIRES(Locks::mutator_lock_); |
Hiroshi Yamauchi | 65f5f24 | 2016-12-19 11:44:47 -0800 | [diff] [blame] | 178 | virtual bool IsNullOrMarkedHeapReference(mirror::HeapReference<mirror::Object>* obj, |
| 179 | bool do_atomic_update) OVERRIDE |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 180 | REQUIRES_SHARED(Locks::heap_bitmap_lock_) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 181 | REQUIRES(Locks::mutator_lock_); |
| 182 | void ForwardObject(mirror::Object* obj) REQUIRES(Locks::heap_bitmap_lock_, |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 183 | Locks::mutator_lock_); |
| 184 | // Update a single heap reference. |
| 185 | void UpdateHeapReference(mirror::HeapReference<mirror::Object>* reference) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 186 | REQUIRES_SHARED(Locks::heap_bitmap_lock_) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 187 | REQUIRES(Locks::mutator_lock_); |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 188 | // Update all of the references of a single object. |
| 189 | void UpdateObjectReferences(mirror::Object* obj) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame] | 190 | REQUIRES_SHARED(Locks::heap_bitmap_lock_) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 191 | REQUIRES(Locks::mutator_lock_); |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 192 | |
| 193 | // Revoke all the thread-local buffers. |
| 194 | void RevokeAllThreadLocalBuffers(); |
| 195 | |
| 196 | accounting::ObjectStack* mark_stack_; |
| 197 | |
Mathieu Chartier | 763a31e | 2015-11-16 16:05:55 -0800 | [diff] [blame] | 198 | // Every object inside the immune spaces is assumed to be marked. |
| 199 | ImmuneSpaces immune_spaces_; |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 200 | |
| 201 | // Bump pointer space which we are collecting. |
| 202 | space::BumpPointerSpace* space_; |
| 203 | // Cached mark bitmap as an optimization. |
| 204 | accounting::HeapBitmap* mark_bitmap_; |
| 205 | |
| 206 | // The name of the collector. |
| 207 | std::string collector_name_; |
| 208 | |
| 209 | // The bump pointer in the space where the next forwarding address will be. |
Ian Rogers | 1373595 | 2014-10-08 12:43:28 -0700 | [diff] [blame] | 210 | uint8_t* bump_pointer_; |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 211 | // How many live objects we have in the space. |
| 212 | size_t live_objects_in_space_; |
| 213 | |
| 214 | // Bitmap which describes which objects we have to move, need to do / 2 so that we can handle |
| 215 | // objects which are only 8 bytes. |
| 216 | std::unique_ptr<accounting::ContinuousSpaceBitmap> objects_before_forwarding_; |
| 217 | // Bitmap which describes which lock words we need to restore. |
| 218 | std::unique_ptr<accounting::ContinuousSpaceBitmap> objects_with_lockword_; |
| 219 | // Which lock words we need to restore as we are moving objects. |
| 220 | std::deque<LockWord> lock_words_to_restore_; |
| 221 | |
Mathieu Chartier | 9750995 | 2015-07-13 14:35:43 -0700 | [diff] [blame] | 222 | // State whether or not we are updating references. |
| 223 | bool updating_references_; |
| 224 | |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 225 | private: |
Mathieu Chartier | a07f559 | 2016-06-16 11:44:28 -0700 | [diff] [blame] | 226 | class MarkObjectVisitor; |
| 227 | class UpdateObjectReferencesVisitor; |
| 228 | class UpdateReferenceVisitor; |
| 229 | class UpdateRootVisitor; |
Mathieu Chartier | bb87e0f | 2015-04-03 11:21:55 -0700 | [diff] [blame] | 230 | |
Mathieu Chartier | 3130cdf | 2015-05-03 15:20:23 -0700 | [diff] [blame] | 231 | DISALLOW_IMPLICIT_CONSTRUCTORS(MarkCompact); |
Mathieu Chartier | 52e4b43 | 2014-06-10 11:22:31 -0700 | [diff] [blame] | 232 | }; |
| 233 | |
| 234 | } // namespace collector |
| 235 | } // namespace gc |
| 236 | } // namespace art |
| 237 | |
| 238 | #endif // ART_RUNTIME_GC_COLLECTOR_MARK_COMPACT_H_ |