Hiroshi Yamauchi | d5307ec | 2014-03-27 21:07:51 -0700 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2014 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
| 16 | |
| 17 | #ifndef ART_RUNTIME_GC_COLLECTOR_CONCURRENT_COPYING_H_ |
| 18 | #define ART_RUNTIME_GC_COLLECTOR_CONCURRENT_COPYING_H_ |
| 19 | |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 20 | #include "barrier.h" |
Hiroshi Yamauchi | d5307ec | 2014-03-27 21:07:51 -0700 | [diff] [blame] | 21 | #include "garbage_collector.h" |
Mathieu Chartier | 763a31e | 2015-11-16 16:05:55 -0800 | [diff] [blame] | 22 | #include "immune_spaces.h" |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 23 | #include "jni.h" |
| 24 | #include "object_callbacks.h" |
| 25 | #include "offsets.h" |
| 26 | #include "gc/accounting/atomic_stack.h" |
| 27 | #include "gc/accounting/read_barrier_table.h" |
| 28 | #include "gc/accounting/space_bitmap.h" |
| 29 | #include "mirror/object.h" |
| 30 | #include "mirror/object_reference.h" |
| 31 | #include "safe_map.h" |
| 32 | |
| 33 | #include <unordered_map> |
| 34 | #include <vector> |
Hiroshi Yamauchi | d5307ec | 2014-03-27 21:07:51 -0700 | [diff] [blame] | 35 | |
| 36 | namespace art { |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 37 | class RootInfo; |
| 38 | |
Hiroshi Yamauchi | d5307ec | 2014-03-27 21:07:51 -0700 | [diff] [blame] | 39 | namespace gc { |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 40 | |
| 41 | namespace accounting { |
| 42 | typedef SpaceBitmap<kObjectAlignment> ContinuousSpaceBitmap; |
| 43 | class HeapBitmap; |
| 44 | } // namespace accounting |
| 45 | |
| 46 | namespace space { |
| 47 | class RegionSpace; |
| 48 | } // namespace space |
| 49 | |
Hiroshi Yamauchi | d5307ec | 2014-03-27 21:07:51 -0700 | [diff] [blame] | 50 | namespace collector { |
| 51 | |
| 52 | class ConcurrentCopying : public GarbageCollector { |
| 53 | public: |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 54 | // Enable the no-from-space-refs verification at the pause. |
Hiroshi Yamauchi | daf61a1 | 2016-06-10 14:27:38 -0700 | [diff] [blame] | 55 | static constexpr bool kEnableNoFromSpaceRefsVerification = kIsDebugBuild; |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 56 | // Enable the from-space bytes/objects check. |
Hiroshi Yamauchi | daf61a1 | 2016-06-10 14:27:38 -0700 | [diff] [blame] | 57 | static constexpr bool kEnableFromSpaceAccountingCheck = kIsDebugBuild; |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 58 | // Enable verbose mode. |
Hiroshi Yamauchi | 3c44893 | 2016-01-22 16:26:50 -0800 | [diff] [blame] | 59 | static constexpr bool kVerboseMode = false; |
Mathieu Chartier | 36a270a | 2016-07-28 18:08:51 -0700 | [diff] [blame] | 60 | // If kGrayDirtyImmuneObjects is true then we gray dirty objects in the GC pause to prevent dirty |
| 61 | // pages. |
| 62 | static constexpr bool kGrayDirtyImmuneObjects = true; |
Hiroshi Yamauchi | d5307ec | 2014-03-27 21:07:51 -0700 | [diff] [blame] | 63 | |
Mathieu Chartier | 56fe258 | 2016-07-14 13:30:03 -0700 | [diff] [blame] | 64 | ConcurrentCopying(Heap* heap, |
| 65 | const std::string& name_prefix = "", |
| 66 | bool measure_read_barrier_slow_path = false); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 67 | ~ConcurrentCopying(); |
Hiroshi Yamauchi | d5307ec | 2014-03-27 21:07:51 -0700 | [diff] [blame] | 68 | |
Hiroshi Yamauchi | d8db5a2 | 2016-06-28 14:07:41 -0700 | [diff] [blame] | 69 | virtual void RunPhases() OVERRIDE |
Mathieu Chartier | 56fe258 | 2016-07-14 13:30:03 -0700 | [diff] [blame] | 70 | REQUIRES(!immune_gray_stack_lock_, |
| 71 | !mark_stack_lock_, |
| 72 | !rb_slow_path_histogram_lock_, |
| 73 | !skipped_blocks_lock_); |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 74 | void InitializePhase() REQUIRES_SHARED(Locks::mutator_lock_) |
Hiroshi Yamauchi | d8db5a2 | 2016-06-28 14:07:41 -0700 | [diff] [blame] | 75 | REQUIRES(!mark_stack_lock_, !immune_gray_stack_lock_); |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 76 | void MarkingPhase() REQUIRES_SHARED(Locks::mutator_lock_) |
Hiroshi Yamauchi | d8db5a2 | 2016-06-28 14:07:41 -0700 | [diff] [blame] | 77 | REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_); |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 78 | void ReclaimPhase() REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!mark_stack_lock_); |
Mathieu Chartier | 56fe258 | 2016-07-14 13:30:03 -0700 | [diff] [blame] | 79 | void FinishPhase() REQUIRES(!mark_stack_lock_, |
| 80 | !rb_slow_path_histogram_lock_, |
| 81 | !skipped_blocks_lock_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 82 | |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 83 | void BindBitmaps() REQUIRES_SHARED(Locks::mutator_lock_) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 84 | REQUIRES(!Locks::heap_bitmap_lock_); |
Hiroshi Yamauchi | d5307ec | 2014-03-27 21:07:51 -0700 | [diff] [blame] | 85 | virtual GcType GetGcType() const OVERRIDE { |
| 86 | return kGcTypePartial; |
| 87 | } |
| 88 | virtual CollectorType GetCollectorType() const OVERRIDE { |
| 89 | return kCollectorTypeCC; |
| 90 | } |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 91 | virtual void RevokeAllThreadLocalBuffers() OVERRIDE; |
| 92 | void SetRegionSpace(space::RegionSpace* region_space) { |
| 93 | DCHECK(region_space != nullptr); |
| 94 | region_space_ = region_space; |
| 95 | } |
| 96 | space::RegionSpace* RegionSpace() { |
| 97 | return region_space_; |
| 98 | } |
| 99 | void AssertToSpaceInvariant(mirror::Object* obj, MemberOffset offset, mirror::Object* ref) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 100 | REQUIRES_SHARED(Locks::mutator_lock_); |
Hiroshi Yamauchi | 3f64f25 | 2015-06-12 18:35:06 -0700 | [diff] [blame] | 101 | void AssertToSpaceInvariant(GcRootSource* gc_root_source, mirror::Object* ref) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 102 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 103 | bool IsInToSpace(mirror::Object* ref) REQUIRES_SHARED(Locks::mutator_lock_) { |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 104 | DCHECK(ref != nullptr); |
| 105 | return IsMarked(ref) == ref; |
| 106 | } |
Mathieu Chartier | c381c36 | 2016-08-23 13:27:53 -0700 | [diff] [blame] | 107 | template<bool kGrayImmuneObject = true, bool kFromGCThread = false> |
Mathieu Chartier | 56fe258 | 2016-07-14 13:30:03 -0700 | [diff] [blame] | 108 | ALWAYS_INLINE mirror::Object* Mark(mirror::Object* from_ref) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 109 | REQUIRES_SHARED(Locks::mutator_lock_) |
Mathieu Chartier | 56fe258 | 2016-07-14 13:30:03 -0700 | [diff] [blame] | 110 | REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_); |
| 111 | ALWAYS_INLINE mirror::Object* MarkFromReadBarrier(mirror::Object* from_ref) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 112 | REQUIRES_SHARED(Locks::mutator_lock_) |
Hiroshi Yamauchi | d8db5a2 | 2016-06-28 14:07:41 -0700 | [diff] [blame] | 113 | REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 114 | bool IsMarking() const { |
| 115 | return is_marking_; |
| 116 | } |
| 117 | bool IsActive() const { |
| 118 | return is_active_; |
| 119 | } |
| 120 | Barrier& GetBarrier() { |
| 121 | return *gc_barrier_; |
| 122 | } |
Hiroshi Yamauchi | 0b71357 | 2015-06-16 18:29:23 -0700 | [diff] [blame] | 123 | bool IsWeakRefAccessEnabled() { |
| 124 | return weak_ref_access_enabled_.LoadRelaxed(); |
| 125 | } |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 126 | void RevokeThreadLocalMarkStack(Thread* thread) REQUIRES_SHARED(Locks::mutator_lock_) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 127 | REQUIRES(!mark_stack_lock_); |
Hiroshi Yamauchi | d5307ec | 2014-03-27 21:07:51 -0700 | [diff] [blame] | 128 | |
| 129 | private: |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 130 | void PushOntoMarkStack(mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 131 | REQUIRES(!mark_stack_lock_); |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 132 | mirror::Object* Copy(mirror::Object* from_ref) REQUIRES_SHARED(Locks::mutator_lock_) |
Mathieu Chartier | d6636d3 | 2016-07-28 11:02:38 -0700 | [diff] [blame] | 133 | REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_); |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 134 | void Scan(mirror::Object* to_ref) REQUIRES_SHARED(Locks::mutator_lock_) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 135 | REQUIRES(!mark_stack_lock_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 136 | void Process(mirror::Object* obj, MemberOffset offset) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 137 | REQUIRES_SHARED(Locks::mutator_lock_) |
Hiroshi Yamauchi | d8db5a2 | 2016-06-28 14:07:41 -0700 | [diff] [blame] | 138 | REQUIRES(!mark_stack_lock_ , !skipped_blocks_lock_, !immune_gray_stack_lock_); |
Mathieu Chartier | bb87e0f | 2015-04-03 11:21:55 -0700 | [diff] [blame] | 139 | virtual void VisitRoots(mirror::Object*** roots, size_t count, const RootInfo& info) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 140 | OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) |
Hiroshi Yamauchi | d8db5a2 | 2016-06-28 14:07:41 -0700 | [diff] [blame] | 141 | REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_); |
| 142 | template<bool kGrayImmuneObject> |
Mathieu Chartier | da7c650 | 2015-07-23 16:01:26 -0700 | [diff] [blame] | 143 | void MarkRoot(mirror::CompressedReference<mirror::Object>* root) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 144 | REQUIRES_SHARED(Locks::mutator_lock_) |
Hiroshi Yamauchi | d8db5a2 | 2016-06-28 14:07:41 -0700 | [diff] [blame] | 145 | REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_); |
Mathieu Chartier | bb87e0f | 2015-04-03 11:21:55 -0700 | [diff] [blame] | 146 | virtual void VisitRoots(mirror::CompressedReference<mirror::Object>** roots, size_t count, |
| 147 | const RootInfo& info) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 148 | OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) |
Hiroshi Yamauchi | d8db5a2 | 2016-06-28 14:07:41 -0700 | [diff] [blame] | 149 | REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_); |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 150 | void VerifyNoFromSpaceReferences() REQUIRES(Locks::mutator_lock_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 151 | accounting::ObjectStack* GetAllocationStack(); |
| 152 | accounting::ObjectStack* GetLiveStack(); |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 153 | virtual void ProcessMarkStack() OVERRIDE REQUIRES_SHARED(Locks::mutator_lock_) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 154 | REQUIRES(!mark_stack_lock_); |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 155 | bool ProcessMarkStackOnce() REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!mark_stack_lock_); |
| 156 | void ProcessMarkStackRef(mirror::Object* to_ref) REQUIRES_SHARED(Locks::mutator_lock_) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 157 | REQUIRES(!mark_stack_lock_); |
Mathieu Chartier | 21328a1 | 2016-07-22 10:47:45 -0700 | [diff] [blame] | 158 | void GrayAllDirtyImmuneObjects() |
| 159 | REQUIRES(Locks::mutator_lock_) |
| 160 | REQUIRES(!mark_stack_lock_); |
| 161 | void VerifyGrayImmuneObjects() |
| 162 | REQUIRES(Locks::mutator_lock_) |
| 163 | REQUIRES(!mark_stack_lock_); |
Hiroshi Yamauchi | 0b71357 | 2015-06-16 18:29:23 -0700 | [diff] [blame] | 164 | size_t ProcessThreadLocalMarkStacks(bool disable_weak_ref_access) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 165 | REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!mark_stack_lock_); |
Hiroshi Yamauchi | 0b71357 | 2015-06-16 18:29:23 -0700 | [diff] [blame] | 166 | void RevokeThreadLocalMarkStacks(bool disable_weak_ref_access) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 167 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 168 | void SwitchToSharedMarkStackMode() REQUIRES_SHARED(Locks::mutator_lock_) |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 169 | REQUIRES(!mark_stack_lock_); |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 170 | void SwitchToGcExclusiveMarkStackMode() REQUIRES_SHARED(Locks::mutator_lock_); |
Mathieu Chartier | 9750995 | 2015-07-13 14:35:43 -0700 | [diff] [blame] | 171 | virtual void DelayReferenceReferent(mirror::Class* klass, mirror::Reference* reference) OVERRIDE |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 172 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 173 | void ProcessReferences(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_); |
Mathieu Chartier | 9750995 | 2015-07-13 14:35:43 -0700 | [diff] [blame] | 174 | virtual mirror::Object* MarkObject(mirror::Object* from_ref) OVERRIDE |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 175 | REQUIRES_SHARED(Locks::mutator_lock_) |
Hiroshi Yamauchi | d8db5a2 | 2016-06-28 14:07:41 -0700 | [diff] [blame] | 176 | REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_); |
Mathieu Chartier | 9750995 | 2015-07-13 14:35:43 -0700 | [diff] [blame] | 177 | virtual void MarkHeapReference(mirror::HeapReference<mirror::Object>* from_ref) OVERRIDE |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 178 | REQUIRES_SHARED(Locks::mutator_lock_) |
Hiroshi Yamauchi | d8db5a2 | 2016-06-28 14:07:41 -0700 | [diff] [blame] | 179 | REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_); |
Mathieu Chartier | 9750995 | 2015-07-13 14:35:43 -0700 | [diff] [blame] | 180 | virtual mirror::Object* IsMarked(mirror::Object* from_ref) OVERRIDE |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 181 | REQUIRES_SHARED(Locks::mutator_lock_); |
Mathieu Chartier | c381c36 | 2016-08-23 13:27:53 -0700 | [diff] [blame] | 182 | bool IsMarkedInUnevacFromSpace(mirror::Object* from_ref) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 183 | REQUIRES_SHARED(Locks::mutator_lock_); |
Mathieu Chartier | 9750995 | 2015-07-13 14:35:43 -0700 | [diff] [blame] | 184 | virtual bool IsMarkedHeapReference(mirror::HeapReference<mirror::Object>* field) OVERRIDE |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 185 | REQUIRES_SHARED(Locks::mutator_lock_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 186 | void SweepSystemWeaks(Thread* self) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 187 | REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!Locks::heap_bitmap_lock_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 188 | void Sweep(bool swap_bitmaps) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 189 | REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_, !mark_stack_lock_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 190 | void SweepLargeObjects(bool swap_bitmaps) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 191 | REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(Locks::heap_bitmap_lock_); |
Mathieu Chartier | 962cd7a | 2016-08-16 12:15:59 -0700 | [diff] [blame] | 192 | void MarkZygoteLargeObjects() |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 193 | REQUIRES_SHARED(Locks::mutator_lock_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 194 | void FillWithDummyObject(mirror::Object* dummy_obj, size_t byte_size) |
Mathieu Chartier | d6636d3 | 2016-07-28 11:02:38 -0700 | [diff] [blame] | 195 | REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 196 | REQUIRES_SHARED(Locks::mutator_lock_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 197 | mirror::Object* AllocateInSkippedBlock(size_t alloc_size) |
Mathieu Chartier | d6636d3 | 2016-07-28 11:02:38 -0700 | [diff] [blame] | 198 | REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 199 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 200 | void CheckEmptyMarkStack() REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!mark_stack_lock_); |
| 201 | void IssueEmptyCheckpoint() REQUIRES_SHARED(Locks::mutator_lock_); |
| 202 | bool IsOnAllocStack(mirror::Object* ref) REQUIRES_SHARED(Locks::mutator_lock_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 203 | mirror::Object* GetFwdPtr(mirror::Object* from_ref) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 204 | REQUIRES_SHARED(Locks::mutator_lock_); |
Mathieu Chartier | 9044347 | 2015-07-16 20:32:27 -0700 | [diff] [blame] | 205 | void FlipThreadRoots() REQUIRES(!Locks::mutator_lock_); |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 206 | void SwapStacks() REQUIRES_SHARED(Locks::mutator_lock_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 207 | void RecordLiveStackFreezeSize(Thread* self); |
| 208 | void ComputeUnevacFromSpaceLiveRatio(); |
Hiroshi Yamauchi | 3f64f25 | 2015-06-12 18:35:06 -0700 | [diff] [blame] | 209 | void LogFromSpaceRefHolder(mirror::Object* obj, MemberOffset offset) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 210 | REQUIRES_SHARED(Locks::mutator_lock_); |
Hiroshi Yamauchi | 3f64f25 | 2015-06-12 18:35:06 -0700 | [diff] [blame] | 211 | void AssertToSpaceInvariantInNonMovingSpace(mirror::Object* obj, mirror::Object* ref) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 212 | REQUIRES_SHARED(Locks::mutator_lock_); |
| 213 | void ReenableWeakRefAccess(Thread* self) REQUIRES_SHARED(Locks::mutator_lock_); |
| 214 | void DisableMarking() REQUIRES_SHARED(Locks::mutator_lock_); |
| 215 | void IssueDisableMarkingCheckpoint() REQUIRES_SHARED(Locks::mutator_lock_); |
| 216 | void ExpandGcMarkStack() REQUIRES_SHARED(Locks::mutator_lock_); |
| 217 | mirror::Object* MarkNonMoving(mirror::Object* from_ref) REQUIRES_SHARED(Locks::mutator_lock_) |
Hiroshi Yamauchi | 723e6ce | 2015-10-28 20:59:47 -0700 | [diff] [blame] | 218 | REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_); |
Hiroshi Yamauchi | d8db5a2 | 2016-06-28 14:07:41 -0700 | [diff] [blame] | 219 | ALWAYS_INLINE mirror::Object* MarkUnevacFromSpaceRegion(mirror::Object* from_ref, |
Hiroshi Yamauchi | 8e67465 | 2015-12-22 11:09:18 -0800 | [diff] [blame] | 220 | accounting::SpaceBitmap<kObjectAlignment>* bitmap) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 221 | REQUIRES_SHARED(Locks::mutator_lock_) |
Hiroshi Yamauchi | 8e67465 | 2015-12-22 11:09:18 -0800 | [diff] [blame] | 222 | REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_); |
Hiroshi Yamauchi | d8db5a2 | 2016-06-28 14:07:41 -0700 | [diff] [blame] | 223 | template<bool kGrayImmuneObject> |
| 224 | ALWAYS_INLINE mirror::Object* MarkImmuneSpace(mirror::Object* from_ref) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 225 | REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!immune_gray_stack_lock_); |
| 226 | void PushOntoFalseGrayStack(mirror::Object* obj) REQUIRES_SHARED(Locks::mutator_lock_) |
Hiroshi Yamauchi | 8e67465 | 2015-12-22 11:09:18 -0800 | [diff] [blame] | 227 | REQUIRES(!mark_stack_lock_); |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 228 | void ProcessFalseGrayStack() REQUIRES_SHARED(Locks::mutator_lock_) |
Hiroshi Yamauchi | 8e67465 | 2015-12-22 11:09:18 -0800 | [diff] [blame] | 229 | REQUIRES(!mark_stack_lock_); |
Hiroshi Yamauchi | d8db5a2 | 2016-06-28 14:07:41 -0700 | [diff] [blame] | 230 | void ScanImmuneObject(mirror::Object* obj) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 231 | REQUIRES_SHARED(Locks::mutator_lock_) REQUIRES(!mark_stack_lock_); |
Mathieu Chartier | 56fe258 | 2016-07-14 13:30:03 -0700 | [diff] [blame] | 232 | mirror::Object* MarkFromReadBarrierWithMeasurements(mirror::Object* from_ref) |
Andreas Gampe | bdf7f1c | 2016-08-30 16:38:47 -0700 | [diff] [blame^] | 233 | REQUIRES_SHARED(Locks::mutator_lock_) |
Mathieu Chartier | 56fe258 | 2016-07-14 13:30:03 -0700 | [diff] [blame] | 234 | REQUIRES(!mark_stack_lock_, !skipped_blocks_lock_, !immune_gray_stack_lock_); |
| 235 | void DumpPerformanceInfo(std::ostream& os) OVERRIDE REQUIRES(!rb_slow_path_histogram_lock_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 236 | |
| 237 | space::RegionSpace* region_space_; // The underlying region space. |
| 238 | std::unique_ptr<Barrier> gc_barrier_; |
Hiroshi Yamauchi | 0b71357 | 2015-06-16 18:29:23 -0700 | [diff] [blame] | 239 | std::unique_ptr<accounting::ObjectStack> gc_mark_stack_; |
Mathieu Chartier | 36a270a | 2016-07-28 18:08:51 -0700 | [diff] [blame] | 240 | std::unique_ptr<accounting::ObjectStack> rb_mark_bit_stack_; |
| 241 | bool rb_mark_bit_stack_full_; |
Hiroshi Yamauchi | 8e67465 | 2015-12-22 11:09:18 -0800 | [diff] [blame] | 242 | std::vector<mirror::Object*> false_gray_stack_ GUARDED_BY(mark_stack_lock_); |
Hiroshi Yamauchi | 0b71357 | 2015-06-16 18:29:23 -0700 | [diff] [blame] | 243 | Mutex mark_stack_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER; |
| 244 | std::vector<accounting::ObjectStack*> revoked_mark_stacks_ |
| 245 | GUARDED_BY(mark_stack_lock_); |
| 246 | static constexpr size_t kMarkStackSize = kPageSize; |
| 247 | static constexpr size_t kMarkStackPoolSize = 256; |
| 248 | std::vector<accounting::ObjectStack*> pooled_mark_stacks_ |
| 249 | GUARDED_BY(mark_stack_lock_); |
| 250 | Thread* thread_running_gc_; |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 251 | bool is_marking_; // True while marking is ongoing. |
| 252 | bool is_active_; // True while the collection is ongoing. |
| 253 | bool is_asserting_to_space_invariant_; // True while asserting the to-space invariant. |
Mathieu Chartier | 763a31e | 2015-11-16 16:05:55 -0800 | [diff] [blame] | 254 | ImmuneSpaces immune_spaces_; |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 255 | accounting::SpaceBitmap<kObjectAlignment>* region_space_bitmap_; |
| 256 | // A cache of Heap::GetMarkBitmap(). |
| 257 | accounting::HeapBitmap* heap_mark_bitmap_; |
| 258 | size_t live_stack_freeze_size_; |
| 259 | size_t from_space_num_objects_at_first_pause_; |
| 260 | size_t from_space_num_bytes_at_first_pause_; |
Hiroshi Yamauchi | 0b71357 | 2015-06-16 18:29:23 -0700 | [diff] [blame] | 261 | Atomic<int> is_mark_stack_push_disallowed_; |
| 262 | enum MarkStackMode { |
| 263 | kMarkStackModeOff = 0, // Mark stack is off. |
| 264 | kMarkStackModeThreadLocal, // All threads except for the GC-running thread push refs onto |
| 265 | // thread-local mark stacks. The GC-running thread pushes onto and |
| 266 | // pops off the GC mark stack without a lock. |
| 267 | kMarkStackModeShared, // All threads share the GC mark stack with a lock. |
| 268 | kMarkStackModeGcExclusive // The GC-running thread pushes onto and pops from the GC mark stack |
| 269 | // without a lock. Other threads won't access the mark stack. |
| 270 | }; |
| 271 | Atomic<MarkStackMode> mark_stack_mode_; |
| 272 | Atomic<bool> weak_ref_access_enabled_; |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 273 | |
| 274 | // How many objects and bytes we moved. Used for accounting. |
| 275 | Atomic<size_t> bytes_moved_; |
| 276 | Atomic<size_t> objects_moved_; |
Mathieu Chartier | cca44a0 | 2016-08-17 10:07:29 -0700 | [diff] [blame] | 277 | Atomic<uint64_t> cumulative_bytes_moved_; |
| 278 | Atomic<uint64_t> cumulative_objects_moved_; |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 279 | |
| 280 | // The skipped blocks are memory blocks/chucks that were copies of |
| 281 | // objects that were unused due to lost races (cas failures) at |
| 282 | // object copy/forward pointer install. They are reused. |
| 283 | Mutex skipped_blocks_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER; |
| 284 | std::multimap<size_t, uint8_t*> skipped_blocks_map_ GUARDED_BY(skipped_blocks_lock_); |
| 285 | Atomic<size_t> to_space_bytes_skipped_; |
| 286 | Atomic<size_t> to_space_objects_skipped_; |
| 287 | |
Mathieu Chartier | 56fe258 | 2016-07-14 13:30:03 -0700 | [diff] [blame] | 288 | // If measure_read_barrier_slow_path_ is true, we count how long is spent in MarkFromReadBarrier |
| 289 | // and also log. |
| 290 | bool measure_read_barrier_slow_path_; |
| 291 | // mark_from_read_barrier_measurements_ is true if systrace is enabled or |
| 292 | // measure_read_barrier_time_ is true. |
| 293 | bool mark_from_read_barrier_measurements_; |
| 294 | Atomic<uint64_t> rb_slow_path_ns_; |
| 295 | Atomic<uint64_t> rb_slow_path_count_; |
| 296 | Atomic<uint64_t> rb_slow_path_count_gc_; |
| 297 | mutable Mutex rb_slow_path_histogram_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER; |
| 298 | Histogram<uint64_t> rb_slow_path_time_histogram_ GUARDED_BY(rb_slow_path_histogram_lock_); |
| 299 | uint64_t rb_slow_path_count_total_ GUARDED_BY(rb_slow_path_histogram_lock_); |
| 300 | uint64_t rb_slow_path_count_gc_total_ GUARDED_BY(rb_slow_path_histogram_lock_); |
| 301 | |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 302 | accounting::ReadBarrierTable* rb_table_; |
| 303 | bool force_evacuate_all_; // True if all regions are evacuated. |
Hiroshi Yamauchi | d8db5a2 | 2016-06-28 14:07:41 -0700 | [diff] [blame] | 304 | Atomic<bool> updated_all_immune_objects_; |
| 305 | bool gc_grays_immune_objects_; |
| 306 | Mutex immune_gray_stack_lock_ DEFAULT_MUTEX_ACQUIRED_AFTER; |
| 307 | std::vector<mirror::Object*> immune_gray_stack_ GUARDED_BY(immune_gray_stack_lock_); |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 308 | |
Mathieu Chartier | a07f559 | 2016-06-16 11:44:28 -0700 | [diff] [blame] | 309 | class AssertToSpaceInvariantFieldVisitor; |
| 310 | class AssertToSpaceInvariantObjectVisitor; |
| 311 | class AssertToSpaceInvariantRefsVisitor; |
| 312 | class ClearBlackPtrsVisitor; |
| 313 | class ComputeUnevacFromSpaceLiveRatioVisitor; |
| 314 | class DisableMarkingCheckpoint; |
| 315 | class FlipCallback; |
Mathieu Chartier | 21328a1 | 2016-07-22 10:47:45 -0700 | [diff] [blame] | 316 | class GrayImmuneObjectVisitor; |
Hiroshi Yamauchi | d8db5a2 | 2016-06-28 14:07:41 -0700 | [diff] [blame] | 317 | class ImmuneSpaceScanObjVisitor; |
Mathieu Chartier | a07f559 | 2016-06-16 11:44:28 -0700 | [diff] [blame] | 318 | class LostCopyVisitor; |
| 319 | class RefFieldsVisitor; |
| 320 | class RevokeThreadLocalMarkStackCheckpoint; |
Hiroshi Yamauchi | d8db5a2 | 2016-06-28 14:07:41 -0700 | [diff] [blame] | 321 | class ScopedGcGraysImmuneObjects; |
| 322 | class ThreadFlipVisitor; |
Mathieu Chartier | 21328a1 | 2016-07-22 10:47:45 -0700 | [diff] [blame] | 323 | class VerifyGrayImmuneObjectsVisitor; |
Mathieu Chartier | a07f559 | 2016-06-16 11:44:28 -0700 | [diff] [blame] | 324 | class VerifyNoFromSpaceRefsFieldVisitor; |
| 325 | class VerifyNoFromSpaceRefsObjectVisitor; |
| 326 | class VerifyNoFromSpaceRefsVisitor; |
Hiroshi Yamauchi | 2cd334a | 2015-01-09 14:03:35 -0800 | [diff] [blame] | 327 | |
Mathieu Chartier | 3130cdf | 2015-05-03 15:20:23 -0700 | [diff] [blame] | 328 | DISALLOW_IMPLICIT_CONSTRUCTORS(ConcurrentCopying); |
Hiroshi Yamauchi | d5307ec | 2014-03-27 21:07:51 -0700 | [diff] [blame] | 329 | }; |
| 330 | |
| 331 | } // namespace collector |
| 332 | } // namespace gc |
| 333 | } // namespace art |
| 334 | |
| 335 | #endif // ART_RUNTIME_GC_COLLECTOR_CONCURRENT_COPYING_H_ |