Elliott Hughes | 2faa5f1 | 2012-01-30 14:42:07 -0800 | [diff] [blame] | 1 | /* |
| 2 | * Copyright (C) 2011 The Android Open Source Project |
| 3 | * |
| 4 | * Licensed under the Apache License, Version 2.0 (the "License"); |
| 5 | * you may not use this file except in compliance with the License. |
| 6 | * You may obtain a copy of the License at |
| 7 | * |
| 8 | * http://www.apache.org/licenses/LICENSE-2.0 |
| 9 | * |
| 10 | * Unless required by applicable law or agreed to in writing, software |
| 11 | * distributed under the License is distributed on an "AS IS" BASIS, |
| 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 13 | * See the License for the specific language governing permissions and |
| 14 | * limitations under the License. |
| 15 | */ |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 16 | |
Brian Carlstrom | 578bbdc | 2011-07-21 14:07:47 -0700 | [diff] [blame] | 17 | #include "mark_sweep.h" |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 18 | |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 19 | #include <climits> |
| 20 | #include <vector> |
| 21 | |
Elliott Hughes | 410c0c8 | 2011-09-01 17:58:25 -0700 | [diff] [blame] | 22 | #include "class_loader.h" |
Brian Carlstrom | 693267a | 2011-09-06 09:25:34 -0700 | [diff] [blame] | 23 | #include "dex_cache.h" |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 24 | #include "heap.h" |
Elliott Hughes | 410c0c8 | 2011-09-01 17:58:25 -0700 | [diff] [blame] | 25 | #include "indirect_reference_table.h" |
| 26 | #include "intern_table.h" |
Brian Carlstrom | 578bbdc | 2011-07-21 14:07:47 -0700 | [diff] [blame] | 27 | #include "logging.h" |
| 28 | #include "macros.h" |
| 29 | #include "mark_stack.h" |
Elliott Hughes | c33a32b | 2011-10-11 18:18:07 -0700 | [diff] [blame] | 30 | #include "monitor.h" |
Brian Carlstrom | 578bbdc | 2011-07-21 14:07:47 -0700 | [diff] [blame] | 31 | #include "object.h" |
Brian Carlstrom | 1f87008 | 2011-08-23 16:02:11 -0700 | [diff] [blame] | 32 | #include "runtime.h" |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 33 | #include "space.h" |
Elliott Hughes | 307f75d | 2011-10-12 18:04:40 -0700 | [diff] [blame] | 34 | #include "timing_logger.h" |
Brian Carlstrom | 578bbdc | 2011-07-21 14:07:47 -0700 | [diff] [blame] | 35 | #include "thread.h" |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 36 | |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 37 | namespace art { |
| 38 | |
Mathieu Chartier | 5301cd2 | 2012-05-31 12:11:36 -0700 | [diff] [blame] | 39 | MarkSweep::MarkSweep(MarkStack* mark_stack) |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 40 | : current_mark_bitmap_(NULL), |
| 41 | mark_stack_(mark_stack), |
Mathieu Chartier | 5301cd2 | 2012-05-31 12:11:36 -0700 | [diff] [blame] | 42 | heap_(NULL), |
Mathieu Chartier | 5301cd2 | 2012-05-31 12:11:36 -0700 | [diff] [blame] | 43 | finger_(NULL), |
| 44 | condemned_(NULL), |
| 45 | soft_reference_list_(NULL), |
| 46 | weak_reference_list_(NULL), |
| 47 | finalizer_reference_list_(NULL), |
| 48 | phantom_reference_list_(NULL), |
| 49 | cleared_reference_list_(NULL), |
| 50 | class_count_(0), array_count_(0), other_count_(0) { |
| 51 | DCHECK(mark_stack_ != NULL); |
| 52 | } |
Elliott Hughes | b3bd5f0 | 2012-03-08 21:05:27 -0800 | [diff] [blame] | 53 | |
Mathieu Chartier | 5301cd2 | 2012-05-31 12:11:36 -0700 | [diff] [blame] | 54 | void MarkSweep::Init() { |
Elliott Hughes | b3bd5f0 | 2012-03-08 21:05:27 -0800 | [diff] [blame] | 55 | heap_ = Runtime::Current()->GetHeap(); |
Mathieu Chartier | 5301cd2 | 2012-05-31 12:11:36 -0700 | [diff] [blame] | 56 | mark_stack_->Reset(); |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 57 | |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 58 | const Spaces& spaces = heap_->GetSpaces(); |
| 59 | // TODO: C++0x auto |
| 60 | for (Spaces::const_iterator cur = spaces.begin(); cur != spaces.end(); ++cur) { |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 61 | if ((*cur)->GetGcRetentionPolicy() == GCRP_ALWAYS_COLLECT) { |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 62 | current_mark_bitmap_ = (*cur)->GetMarkBitmap(); |
| 63 | break; |
| 64 | } |
| 65 | } |
buzbee | 0d966cf | 2011-09-08 17:34:58 -0700 | [diff] [blame] | 66 | // TODO: if concurrent, enable card marking in compiler |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 67 | // TODO: check that the mark bitmap is entirely clear. |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 68 | } |
| 69 | |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 70 | void MarkSweep::MarkObject0(const Object* obj, bool check_finger) { |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 71 | DCHECK(obj != NULL); |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 72 | |
| 73 | SpaceBitmap* space_bitmap = NULL; |
| 74 | // Try to take advantage of locality of references within a space, failing this find the space |
| 75 | // the hard way. |
| 76 | if (current_mark_bitmap_->HasAddress(obj)) { |
| 77 | space_bitmap = current_mark_bitmap_; |
| 78 | } else { |
| 79 | space_bitmap = heap_->GetMarkBitmap()->GetSpaceBitmap(obj); |
| 80 | } |
| 81 | |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 82 | if (obj < condemned_) { |
| 83 | DCHECK(IsMarked(obj)); |
| 84 | return; |
| 85 | } |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 86 | bool is_marked = space_bitmap->Test(obj); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 87 | // This object was not previously marked. |
| 88 | if (!is_marked) { |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 89 | space_bitmap->Set(obj); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 90 | if (check_finger && obj < finger_) { |
| 91 | // The object must be pushed on to the mark stack. |
| 92 | mark_stack_->Push(obj); |
| 93 | } |
| 94 | } |
| 95 | } |
| 96 | |
| 97 | // Used to mark objects when recursing. Recursion is done by moving |
| 98 | // the finger across the bitmaps in address order and marking child |
| 99 | // objects. Any newly-marked objects whose addresses are lower than |
| 100 | // the finger won't be visited by the bitmap scan, so those objects |
| 101 | // need to be added to the mark stack. |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 102 | void MarkSweep::MarkObject(const Object* obj) { |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 103 | if (obj != NULL) { |
| 104 | MarkObject0(obj, true); |
| 105 | } |
| 106 | } |
| 107 | |
Elliott Hughes | cf4c6c4 | 2011-09-01 15:16:42 -0700 | [diff] [blame] | 108 | void MarkSweep::MarkObjectVisitor(const Object* root, void* arg) { |
Brian Carlstrom | 1f87008 | 2011-08-23 16:02:11 -0700 | [diff] [blame] | 109 | DCHECK(root != NULL); |
| 110 | DCHECK(arg != NULL); |
| 111 | MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 112 | DCHECK(mark_sweep->finger_ == NULL); // no point to check finger if it is NULL |
| 113 | mark_sweep->MarkObject0(root, false); |
Brian Carlstrom | 1f87008 | 2011-08-23 16:02:11 -0700 | [diff] [blame] | 114 | } |
| 115 | |
Mathieu Chartier | 262e5ff | 2012-06-01 17:35:38 -0700 | [diff] [blame] | 116 | void MarkSweep::ReMarkObjectVisitor(const Object* root, void* arg) { |
| 117 | DCHECK(root != NULL); |
| 118 | DCHECK(arg != NULL); |
| 119 | MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg); |
| 120 | mark_sweep->MarkObject0(root, true); |
| 121 | } |
| 122 | |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 123 | // Marks all objects in the root set. |
| 124 | void MarkSweep::MarkRoots() { |
Brian Carlstrom | 1f87008 | 2011-08-23 16:02:11 -0700 | [diff] [blame] | 125 | Runtime::Current()->VisitRoots(MarkObjectVisitor, this); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 126 | } |
| 127 | |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 128 | class CheckObjectVisitor { |
| 129 | public: |
| 130 | CheckObjectVisitor(MarkSweep* const mark_sweep) |
| 131 | : mark_sweep_(mark_sweep) { |
| 132 | |
| 133 | } |
| 134 | |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame^] | 135 | void operator ()(const Object* obj, const Object* ref, MemberOffset offset, bool is_static) const |
| 136 | SHARED_LOCKS_REQUIRED(GlobalSynchronization::heap_bitmap_lock_, |
| 137 | GlobalSynchronization::mutator_lock_) { |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 138 | mark_sweep_->CheckReference(obj, ref, offset, is_static); |
| 139 | } |
| 140 | |
| 141 | private: |
| 142 | MarkSweep* const mark_sweep_; |
| 143 | }; |
| 144 | |
| 145 | void MarkSweep::CheckObject(const Object* obj) { |
| 146 | DCHECK(obj != NULL); |
| 147 | CheckObjectVisitor visitor(this); |
| 148 | VisitObjectReferences(obj, visitor); |
| 149 | } |
| 150 | |
| 151 | void MarkSweep::VerifyImageRootVisitor(Object* root, void* arg) { |
| 152 | DCHECK(root != NULL); |
| 153 | DCHECK(arg != NULL); |
| 154 | MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg); |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 155 | DCHECK(mark_sweep->heap_->GetMarkBitmap()->Test(root)); |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 156 | mark_sweep->CheckObject(root); |
| 157 | } |
| 158 | |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 159 | void MarkSweep::CopyMarkBits() { |
Elliott Hughes | b3bd5f0 | 2012-03-08 21:05:27 -0800 | [diff] [blame] | 160 | const std::vector<Space*>& spaces = heap_->GetSpaces(); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 161 | for (size_t i = 0; i < spaces.size(); ++i) { |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 162 | Space* space = spaces[i]; |
| 163 | if (space->GetGcRetentionPolicy() == GCRP_FULL_COLLECT) { |
| 164 | SpaceBitmap* live_bitmap = space->GetLiveBitmap(); |
| 165 | SpaceBitmap* mark_bitmap = space->GetMarkBitmap(); |
| 166 | DCHECK_EQ(live_bitmap->Size(), mark_bitmap->Size()); |
| 167 | std::copy(live_bitmap->Begin(), live_bitmap->Begin() + live_bitmap->Size() / kWordSize, mark_bitmap->Begin()); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 168 | } |
| 169 | } |
| 170 | } |
| 171 | |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 172 | class ScanImageRootVisitor { |
| 173 | public: |
| 174 | ScanImageRootVisitor(MarkSweep* const mark_sweep) : mark_sweep_(mark_sweep) { |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 175 | } |
| 176 | |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame^] | 177 | void operator ()(const Object* root) const |
| 178 | EXCLUSIVE_LOCKS_REQUIRED(GlobalSynchronization::heap_bitmap_lock_) |
| 179 | SHARED_LOCKS_REQUIRED(GlobalSynchronization::mutator_lock_) { |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 180 | DCHECK(root != NULL); |
| 181 | mark_sweep_->ScanObject(root); |
| 182 | } |
| 183 | |
| 184 | private: |
| 185 | MarkSweep* const mark_sweep_; |
| 186 | }; |
| 187 | |
| 188 | // Marks all objects that are in images and have been touched by the mutator |
| 189 | void MarkSweep::ScanDirtyImageRoots() { |
| 190 | const std::vector<Space*>& spaces = heap_->GetSpaces(); |
| 191 | CardTable* card_table = heap_->GetCardTable(); |
| 192 | ScanImageRootVisitor image_root_visitor(this); |
| 193 | for (size_t i = 0; i < spaces.size(); ++i) { |
| 194 | Space* space = spaces[i]; |
| 195 | if (space->IsImageSpace()) { |
| 196 | card_table->Scan(space->GetLiveBitmap(), space->Begin(), space->End(), image_root_visitor); |
| 197 | } |
| 198 | } |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 199 | } |
| 200 | |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 201 | void MarkSweep::ScanBitmapCallback(Object* obj, void* finger, void* arg) { |
| 202 | MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg); |
| 203 | mark_sweep->finger_ = reinterpret_cast<Object*>(finger); |
| 204 | mark_sweep->ScanObject(obj); |
| 205 | } |
| 206 | |
Mathieu Chartier | 262e5ff | 2012-06-01 17:35:38 -0700 | [diff] [blame] | 207 | void MarkSweep::ScanDirtyCardCallback(Object* obj, void* arg) { |
| 208 | MarkSweep* mark_sweep = reinterpret_cast<MarkSweep*>(arg); |
| 209 | mark_sweep->ScanObject(obj); |
| 210 | } |
| 211 | |
| 212 | void MarkSweep::ScanGrayObjects() { |
| 213 | const std::vector<Space*>& spaces = heap_->GetSpaces(); |
| 214 | CardTable* card_table = heap_->GetCardTable(); |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 215 | ScanImageRootVisitor image_root_visitor(this); |
Mathieu Chartier | 262e5ff | 2012-06-01 17:35:38 -0700 | [diff] [blame] | 216 | for (size_t i = 0; i < spaces.size(); ++i) { |
| 217 | byte* begin = spaces[i]->Begin(); |
| 218 | byte* end = spaces[i]->End(); |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 219 | // Image spaces are handled properly since live == marked for them. |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 220 | card_table->Scan(spaces[i]->GetMarkBitmap(), begin, end, image_root_visitor); |
Mathieu Chartier | 262e5ff | 2012-06-01 17:35:38 -0700 | [diff] [blame] | 221 | } |
| 222 | } |
| 223 | |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 224 | class CheckBitmapVisitor { |
| 225 | public: |
| 226 | CheckBitmapVisitor(MarkSweep* mark_sweep) : mark_sweep_(mark_sweep) { |
| 227 | |
| 228 | } |
| 229 | |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame^] | 230 | void operator ()(const Object* obj) const |
| 231 | SHARED_LOCKS_REQUIRED(GlobalSynchronization::heap_bitmap_lock_, |
| 232 | GlobalSynchronization::mutator_lock_) { |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 233 | DCHECK(obj != NULL); |
| 234 | mark_sweep_->CheckObject(obj); |
| 235 | } |
| 236 | |
| 237 | private: |
| 238 | MarkSweep* mark_sweep_; |
| 239 | }; |
| 240 | |
Mathieu Chartier | 262e5ff | 2012-06-01 17:35:38 -0700 | [diff] [blame] | 241 | void MarkSweep::VerifyImageRoots() { |
| 242 | // Verify roots ensures that all the references inside the image space point |
| 243 | // objects which are either in the image space or marked objects in the alloc |
| 244 | // space |
| 245 | #ifndef NDEBUG |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 246 | CheckBitmapVisitor visitor(this); |
| 247 | const Spaces& spaces = heap_->GetSpaces(); |
| 248 | for (Spaces::const_iterator it = spaces.begin(); it != spaces.end(); ++it) { |
| 249 | const Space* space = *it; |
| 250 | if (space->IsImageSpace()) { |
| 251 | uintptr_t begin = reinterpret_cast<uintptr_t>(space->Begin()); |
| 252 | uintptr_t end = reinterpret_cast<uintptr_t>(space->End()); |
| 253 | SpaceBitmap* live_bitmap = space->GetLiveBitmap(); |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 254 | DCHECK(live_bitmap != NULL); |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 255 | live_bitmap->VisitMarkedRange(begin, end, visitor); |
Mathieu Chartier | 262e5ff | 2012-06-01 17:35:38 -0700 | [diff] [blame] | 256 | } |
| 257 | } |
Mathieu Chartier | 262e5ff | 2012-06-01 17:35:38 -0700 | [diff] [blame] | 258 | #endif |
| 259 | } |
| 260 | |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 261 | // Populates the mark stack based on the set of marked objects and |
| 262 | // recursively marks until the mark stack is emptied. |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 263 | void MarkSweep::RecursiveMark(bool partial) { |
Brian Carlstrom | 1f87008 | 2011-08-23 16:02:11 -0700 | [diff] [blame] | 264 | // RecursiveMark will build the lists of known instances of the Reference classes. |
| 265 | // See DelayReferenceReferent for details. |
| 266 | CHECK(soft_reference_list_ == NULL); |
| 267 | CHECK(weak_reference_list_ == NULL); |
| 268 | CHECK(finalizer_reference_list_ == NULL); |
| 269 | CHECK(phantom_reference_list_ == NULL); |
| 270 | CHECK(cleared_reference_list_ == NULL); |
| 271 | |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 272 | void* arg = reinterpret_cast<void*>(this); |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 273 | const Spaces& spaces = heap_->GetSpaces(); |
| 274 | |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 275 | for (size_t i = 0; i < spaces.size(); ++i) { |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 276 | Space* space = spaces[i]; |
| 277 | if (space->GetGcRetentionPolicy() == GCRP_ALWAYS_COLLECT || |
| 278 | (!partial && space->GetGcRetentionPolicy() == GCRP_FULL_COLLECT) |
| 279 | ) { |
| 280 | uintptr_t begin = reinterpret_cast<uintptr_t>(space->Begin()); |
| 281 | uintptr_t end = reinterpret_cast<uintptr_t>(space->End()); |
| 282 | |
| 283 | current_mark_bitmap_ = space->GetMarkBitmap(); |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 284 | current_mark_bitmap_->ScanWalk(begin, end, &ScanBitmapCallback, arg); |
Mathieu Chartier | 7664f5c | 2012-06-08 18:15:32 -0700 | [diff] [blame] | 285 | } |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 286 | } |
| 287 | finger_ = reinterpret_cast<Object*>(~0); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 288 | // TODO: tune the frequency of emptying the mark stack |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 289 | ProcessMarkStack(); |
| 290 | } |
| 291 | |
Mathieu Chartier | 262e5ff | 2012-06-01 17:35:38 -0700 | [diff] [blame] | 292 | void MarkSweep::RecursiveMarkDirtyObjects() { |
| 293 | ScanGrayObjects(); |
| 294 | ProcessMarkStack(); |
| 295 | } |
| 296 | |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 297 | void MarkSweep::ReMarkRoots() { |
Mathieu Chartier | 262e5ff | 2012-06-01 17:35:38 -0700 | [diff] [blame] | 298 | Runtime::Current()->VisitRoots(ReMarkObjectVisitor, this); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 299 | } |
| 300 | |
Mathieu Chartier | 46a2363 | 2012-08-07 18:44:40 -0700 | [diff] [blame] | 301 | void MarkSweep::SweepJniWeakGlobals(HeapBitmap* bitmap) { |
Elliott Hughes | 410c0c8 | 2011-09-01 17:58:25 -0700 | [diff] [blame] | 302 | JavaVMExt* vm = Runtime::Current()->GetJavaVM(); |
| 303 | MutexLock mu(vm->weak_globals_lock); |
| 304 | IndirectReferenceTable* table = &vm->weak_globals; |
Mathieu Chartier | 654d3a2 | 2012-07-11 17:54:18 -0700 | [diff] [blame] | 305 | typedef IndirectReferenceTable::iterator It; // TODO: C++0x auto |
Elliott Hughes | 410c0c8 | 2011-09-01 17:58:25 -0700 | [diff] [blame] | 306 | for (It it = table->begin(), end = table->end(); it != end; ++it) { |
| 307 | const Object** entry = *it; |
Mathieu Chartier | 46a2363 | 2012-08-07 18:44:40 -0700 | [diff] [blame] | 308 | if (!bitmap->Test(*entry)) { |
Elliott Hughes | 410c0c8 | 2011-09-01 17:58:25 -0700 | [diff] [blame] | 309 | *entry = kClearedJniWeakGlobal; |
| 310 | } |
| 311 | } |
| 312 | } |
| 313 | |
Mathieu Chartier | 46a2363 | 2012-08-07 18:44:40 -0700 | [diff] [blame] | 314 | void MarkSweep::SweepSystemWeaks(bool swap_bitmaps) { |
| 315 | Runtime* runtime = Runtime::Current(); |
| 316 | runtime->GetInternTable()->SweepInternTableWeaks(swap_bitmaps ? IsLiveCallback : IsMarkedCallback, |
| 317 | this); |
| 318 | runtime->GetMonitorList()->SweepMonitorList(swap_bitmaps ? IsLiveCallback : IsMarkedCallback, |
| 319 | this); |
| 320 | SweepJniWeakGlobals(swap_bitmaps ? GetHeap()->GetLiveBitmap() : GetHeap()->GetMarkBitmap()); |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 321 | } |
| 322 | |
Elliott Hughes | b3bd5f0 | 2012-03-08 21:05:27 -0800 | [diff] [blame] | 323 | struct SweepCallbackContext { |
| 324 | Heap* heap; |
| 325 | AllocSpace* space; |
| 326 | }; |
| 327 | |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 328 | void MarkSweep::SweepCallback(size_t num_ptrs, Object** ptrs, void* arg) { |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame^] | 329 | GlobalSynchronization::heap_bitmap_lock_->AssertExclusiveHeld(); |
Mathieu Chartier | 654d3a2 | 2012-07-11 17:54:18 -0700 | [diff] [blame] | 330 | |
Elliott Hughes | 307f75d | 2011-10-12 18:04:40 -0700 | [diff] [blame] | 331 | size_t freed_objects = num_ptrs; |
| 332 | size_t freed_bytes = 0; |
Elliott Hughes | b3bd5f0 | 2012-03-08 21:05:27 -0800 | [diff] [blame] | 333 | SweepCallbackContext* context = static_cast<SweepCallbackContext*>(arg); |
| 334 | Heap* heap = context->heap; |
| 335 | AllocSpace* space = context->space; |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 336 | // Use a bulk free, that merges consecutive objects before freeing or free per object? |
| 337 | // Documentation suggests better free performance with merging, but this may be at the expensive |
| 338 | // of allocation. |
| 339 | // TODO: investigate performance |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 340 | static const bool kUseFreeList = true; |
| 341 | if (kUseFreeList) { |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 342 | for (size_t i = 0; i < num_ptrs; ++i) { |
| 343 | Object* obj = static_cast<Object*>(ptrs[i]); |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 344 | freed_bytes += space->AllocationSize(obj); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 345 | } |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 346 | // AllocSpace::FreeList clears the value in ptrs, so perform after clearing the live bit |
| 347 | space->FreeList(num_ptrs, ptrs); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 348 | } else { |
| 349 | for (size_t i = 0; i < num_ptrs; ++i) { |
| 350 | Object* obj = static_cast<Object*>(ptrs[i]); |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 351 | freed_bytes += space->AllocationSize(obj); |
Ian Rogers | 30fab40 | 2012-01-23 15:43:46 -0800 | [diff] [blame] | 352 | space->Free(obj); |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 353 | } |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 354 | } |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame^] | 355 | heap->RecordFree(freed_objects, freed_bytes); |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 356 | } |
| 357 | |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 358 | void MarkSweep::ZygoteSweepCallback(size_t num_ptrs, Object** ptrs, void* arg) { |
Ian Rogers | 00f7d0e | 2012-07-19 15:28:27 -0700 | [diff] [blame^] | 359 | GlobalSynchronization::heap_bitmap_lock_->AssertExclusiveHeld(); |
| 360 | |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 361 | SweepCallbackContext* context = static_cast<SweepCallbackContext*>(arg); |
| 362 | Heap* heap = context->heap; |
| 363 | // We don't free any actual memory to avoid dirtying the shared zygote pages. |
| 364 | for (size_t i = 0; i < num_ptrs; ++i) { |
| 365 | Object* obj = static_cast<Object*>(ptrs[i]); |
| 366 | heap->GetLiveBitmap()->Clear(obj); |
| 367 | heap->GetCardTable()->MarkCard(obj); |
| 368 | } |
| 369 | } |
| 370 | |
| 371 | void MarkSweep::Sweep(bool partial) { |
Mathieu Chartier | 46a2363 | 2012-08-07 18:44:40 -0700 | [diff] [blame] | 372 | // If we don't swap bitmaps then we can not do this concurrently. |
| 373 | SweepSystemWeaks(true); |
Elliott Hughes | 2da5036 | 2011-10-10 16:57:08 -0700 | [diff] [blame] | 374 | |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 375 | DCHECK(mark_stack_->IsEmpty()); |
| 376 | |
Mathieu Chartier | 46a2363 | 2012-08-07 18:44:40 -0700 | [diff] [blame] | 377 | const Spaces& spaces = heap_->GetSpaces(); |
Elliott Hughes | b3bd5f0 | 2012-03-08 21:05:27 -0800 | [diff] [blame] | 378 | SweepCallbackContext scc; |
| 379 | scc.heap = heap_; |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 380 | for (size_t i = 0; i < spaces.size(); ++i) { |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 381 | Space* space = spaces[i]; |
| 382 | if ( |
| 383 | space->GetGcRetentionPolicy() == GCRP_ALWAYS_COLLECT || |
| 384 | (!partial && space->GetGcRetentionPolicy() == GCRP_FULL_COLLECT) |
| 385 | ) { |
| 386 | uintptr_t begin = reinterpret_cast<uintptr_t>(space->Begin()); |
| 387 | uintptr_t end = reinterpret_cast<uintptr_t>(space->End()); |
| 388 | scc.space = space->AsAllocSpace(); |
| 389 | SpaceBitmap* live_bitmap = space->GetLiveBitmap(); |
| 390 | SpaceBitmap* mark_bitmap = space->GetMarkBitmap(); |
| 391 | if (space->GetGcRetentionPolicy() == GCRP_ALWAYS_COLLECT) { |
| 392 | // Bitmaps are pre-swapped for optimization which enables sweeping with the heap unlocked. |
| 393 | SpaceBitmap::SweepWalk( |
| 394 | *mark_bitmap, *live_bitmap, begin, end, &SweepCallback, reinterpret_cast<void*>(&scc)); |
| 395 | } else { |
| 396 | // Zygote sweep takes care of dirtying cards and clearing live bits, does not free actual memory. |
| 397 | SpaceBitmap::SweepWalk( |
| 398 | *live_bitmap, *mark_bitmap, begin, end, &ZygoteSweepCallback, reinterpret_cast<void*>(&scc)); |
| 399 | } |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 400 | } |
| 401 | } |
| 402 | } |
| 403 | |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 404 | // Scans instance fields. |
Elliott Hughes | b066311 | 2011-10-19 18:16:37 -0700 | [diff] [blame] | 405 | inline void MarkSweep::ScanInstanceFields(const Object* obj) { |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 406 | DCHECK(obj != NULL); |
Brian Carlstrom | 4873d46 | 2011-08-21 15:23:39 -0700 | [diff] [blame] | 407 | Class* klass = obj->GetClass(); |
| 408 | DCHECK(klass != NULL); |
Elliott Hughes | 2da5036 | 2011-10-10 16:57:08 -0700 | [diff] [blame] | 409 | ScanFields(obj, klass->GetReferenceInstanceOffsets(), false); |
Brian Carlstrom | 4873d46 | 2011-08-21 15:23:39 -0700 | [diff] [blame] | 410 | } |
| 411 | |
| 412 | // Scans static storage on a Class. |
Elliott Hughes | b066311 | 2011-10-19 18:16:37 -0700 | [diff] [blame] | 413 | inline void MarkSweep::ScanStaticFields(const Class* klass) { |
Brian Carlstrom | 4873d46 | 2011-08-21 15:23:39 -0700 | [diff] [blame] | 414 | DCHECK(klass != NULL); |
Elliott Hughes | adb460d | 2011-10-05 17:02:34 -0700 | [diff] [blame] | 415 | ScanFields(klass, klass->GetReferenceStaticOffsets(), true); |
Brian Carlstrom | 4873d46 | 2011-08-21 15:23:39 -0700 | [diff] [blame] | 416 | } |
| 417 | |
Elliott Hughes | b066311 | 2011-10-19 18:16:37 -0700 | [diff] [blame] | 418 | inline void MarkSweep::ScanFields(const Object* obj, uint32_t ref_offsets, bool is_static) { |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 419 | if (ref_offsets != CLASS_WALK_SUPER) { |
| 420 | // Found a reference offset bitmap. Mark the specified offsets. |
| 421 | while (ref_offsets != 0) { |
| 422 | size_t right_shift = CLZ(ref_offsets); |
Ian Rogers | 0cfe1fb | 2011-08-26 03:29:44 -0700 | [diff] [blame] | 423 | MemberOffset byte_offset = CLASS_OFFSET_FROM_CLZ(right_shift); |
| 424 | const Object* ref = obj->GetFieldObject<const Object*>(byte_offset, false); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 425 | MarkObject(ref); |
| 426 | ref_offsets &= ~(CLASS_HIGH_BIT >> right_shift); |
| 427 | } |
| 428 | } else { |
Brian Carlstrom | 4873d46 | 2011-08-21 15:23:39 -0700 | [diff] [blame] | 429 | // There is no reference offset bitmap. In the non-static case, |
| 430 | // walk up the class inheritance hierarchy and find reference |
| 431 | // offsets the hard way. In the static case, just consider this |
| 432 | // class. |
| 433 | for (const Class* klass = is_static ? obj->AsClass() : obj->GetClass(); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 434 | klass != NULL; |
Brian Carlstrom | 4873d46 | 2011-08-21 15:23:39 -0700 | [diff] [blame] | 435 | klass = is_static ? NULL : klass->GetSuperClass()) { |
| 436 | size_t num_reference_fields = (is_static |
| 437 | ? klass->NumReferenceStaticFields() |
| 438 | : klass->NumReferenceInstanceFields()); |
| 439 | for (size_t i = 0; i < num_reference_fields; ++i) { |
| 440 | Field* field = (is_static |
| 441 | ? klass->GetStaticField(i) |
| 442 | : klass->GetInstanceField(i)); |
Ian Rogers | 0cfe1fb | 2011-08-26 03:29:44 -0700 | [diff] [blame] | 443 | MemberOffset field_offset = field->GetOffset(); |
| 444 | const Object* ref = obj->GetFieldObject<const Object*>(field_offset, false); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 445 | MarkObject(ref); |
| 446 | } |
| 447 | } |
| 448 | } |
| 449 | } |
| 450 | |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 451 | void MarkSweep::CheckReference(const Object* obj, const Object* ref, MemberOffset offset, bool is_static) { |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 452 | const Spaces& spaces = heap_->GetSpaces(); |
| 453 | // TODO: C++0x auto |
| 454 | for (Spaces::const_iterator cur = spaces.begin(); cur != spaces.end(); ++cur) { |
| 455 | if ((*cur)->IsAllocSpace() && (*cur)->Contains(ref)) { |
| 456 | DCHECK(IsMarked(obj)); |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 457 | |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 458 | bool is_marked = IsMarked(ref); |
| 459 | if (!is_marked) { |
| 460 | LOG(INFO) << **cur; |
| 461 | LOG(WARNING) << (is_static ? "Static ref'" : "Instance ref'") << PrettyTypeOf(ref) |
| 462 | << "' (" << reinterpret_cast<const void*>(ref) << ") in '" << PrettyTypeOf(obj) |
| 463 | << "' (" << reinterpret_cast<const void*>(obj) << ") at offset " |
| 464 | << reinterpret_cast<void*>(offset.Int32Value()) << " wasn't marked"; |
Mathieu Chartier | 262e5ff | 2012-06-01 17:35:38 -0700 | [diff] [blame] | 465 | |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 466 | const Class* klass = is_static ? obj->AsClass() : obj->GetClass(); |
| 467 | DCHECK(klass != NULL); |
| 468 | const ObjectArray<Field>* fields = is_static ? klass->GetSFields() : klass->GetIFields(); |
| 469 | DCHECK(fields != NULL); |
| 470 | bool found = false; |
| 471 | for (int32_t i = 0; i < fields->GetLength(); ++i) { |
| 472 | const Field* cur = fields->Get(i); |
| 473 | if (cur->GetOffset().Int32Value() == offset.Int32Value()) { |
| 474 | LOG(WARNING) << "Field referencing the alloc space was " << PrettyField(cur); |
| 475 | found = true; |
| 476 | break; |
| 477 | } |
| 478 | } |
| 479 | if (!found) { |
| 480 | LOG(WARNING) << "Could not find field in object alloc space with offset " << offset.Int32Value(); |
| 481 | } |
Mathieu Chartier | 262e5ff | 2012-06-01 17:35:38 -0700 | [diff] [blame] | 482 | |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 483 | bool obj_marked = heap_->GetCardTable()->IsDirty(obj); |
| 484 | if (!obj_marked) { |
| 485 | LOG(WARNING) << "Object '" << PrettyTypeOf(obj) << "' " |
| 486 | << "(" << reinterpret_cast<const void*>(obj) << ") contains references to " |
| 487 | << "the alloc space, but wasn't card marked"; |
Mathieu Chartier | 262e5ff | 2012-06-01 17:35:38 -0700 | [diff] [blame] | 488 | } |
| 489 | } |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 490 | } |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 491 | break; |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 492 | } |
| 493 | } |
| 494 | |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 495 | // Scans the header, static field references, and interface pointers |
| 496 | // of a class object. |
Elliott Hughes | b066311 | 2011-10-19 18:16:37 -0700 | [diff] [blame] | 497 | inline void MarkSweep::ScanClass(const Object* obj) { |
Elliott Hughes | 352a424 | 2011-10-31 15:15:21 -0700 | [diff] [blame] | 498 | #ifndef NDEBUG |
| 499 | ++class_count_; |
| 500 | #endif |
Brian Carlstrom | 693267a | 2011-09-06 09:25:34 -0700 | [diff] [blame] | 501 | ScanInstanceFields(obj); |
Brian Carlstrom | 40381fb | 2011-10-19 14:13:40 -0700 | [diff] [blame] | 502 | ScanStaticFields(obj->AsClass()); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 503 | } |
| 504 | |
| 505 | // Scans the header of all array objects. If the array object is |
| 506 | // specialized to a reference type, scans the array data as well. |
Elliott Hughes | b066311 | 2011-10-19 18:16:37 -0700 | [diff] [blame] | 507 | inline void MarkSweep::ScanArray(const Object* obj) { |
Elliott Hughes | 352a424 | 2011-10-31 15:15:21 -0700 | [diff] [blame] | 508 | #ifndef NDEBUG |
| 509 | ++array_count_; |
| 510 | #endif |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 511 | MarkObject(obj->GetClass()); |
| 512 | if (obj->IsObjectArray()) { |
Brian Carlstrom | db4d540 | 2011-08-09 12:18:28 -0700 | [diff] [blame] | 513 | const ObjectArray<Object>* array = obj->AsObjectArray<Object>(); |
Elliott Hughes | d8ddfd5 | 2011-08-15 14:32:53 -0700 | [diff] [blame] | 514 | for (int32_t i = 0; i < array->GetLength(); ++i) { |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 515 | const Object* element = array->GetWithoutChecks(i); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 516 | MarkObject(element); |
| 517 | } |
| 518 | } |
| 519 | } |
| 520 | |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 521 | // Process the "referent" field in a java.lang.ref.Reference. If the |
| 522 | // referent has not yet been marked, put it on the appropriate list in |
| 523 | // the gcHeap for later processing. |
| 524 | void MarkSweep::DelayReferenceReferent(Object* obj) { |
| 525 | DCHECK(obj != NULL); |
Brian Carlstrom | 1f87008 | 2011-08-23 16:02:11 -0700 | [diff] [blame] | 526 | Class* klass = obj->GetClass(); |
| 527 | DCHECK(klass != NULL); |
Ian Rogers | 0cfe1fb | 2011-08-26 03:29:44 -0700 | [diff] [blame] | 528 | DCHECK(klass->IsReferenceClass()); |
Elliott Hughes | b3bd5f0 | 2012-03-08 21:05:27 -0800 | [diff] [blame] | 529 | Object* pending = obj->GetFieldObject<Object*>(heap_->GetReferencePendingNextOffset(), false); |
| 530 | Object* referent = heap_->GetReferenceReferent(obj); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 531 | if (pending == NULL && referent != NULL && !IsMarked(referent)) { |
Brian Carlstrom | 4873d46 | 2011-08-21 15:23:39 -0700 | [diff] [blame] | 532 | Object** list = NULL; |
Ian Rogers | 0cfe1fb | 2011-08-26 03:29:44 -0700 | [diff] [blame] | 533 | if (klass->IsSoftReferenceClass()) { |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 534 | list = &soft_reference_list_; |
Ian Rogers | 0cfe1fb | 2011-08-26 03:29:44 -0700 | [diff] [blame] | 535 | } else if (klass->IsWeakReferenceClass()) { |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 536 | list = &weak_reference_list_; |
Ian Rogers | 0cfe1fb | 2011-08-26 03:29:44 -0700 | [diff] [blame] | 537 | } else if (klass->IsFinalizerReferenceClass()) { |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 538 | list = &finalizer_reference_list_; |
Ian Rogers | 0cfe1fb | 2011-08-26 03:29:44 -0700 | [diff] [blame] | 539 | } else if (klass->IsPhantomReferenceClass()) { |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 540 | list = &phantom_reference_list_; |
| 541 | } |
Brian Carlstrom | 0796af0 | 2011-10-12 14:31:45 -0700 | [diff] [blame] | 542 | DCHECK(list != NULL) << PrettyClass(klass) << " " << std::hex << klass->GetAccessFlags(); |
Elliott Hughes | b3bd5f0 | 2012-03-08 21:05:27 -0800 | [diff] [blame] | 543 | heap_->EnqueuePendingReference(obj, list); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 544 | } |
| 545 | } |
| 546 | |
| 547 | // Scans the header and field references of a data object. If the |
| 548 | // scanned object is a reference subclass, it is scheduled for later |
Elliott Hughes | adb460d | 2011-10-05 17:02:34 -0700 | [diff] [blame] | 549 | // processing. |
Elliott Hughes | b066311 | 2011-10-19 18:16:37 -0700 | [diff] [blame] | 550 | inline void MarkSweep::ScanOther(const Object* obj) { |
Elliott Hughes | 352a424 | 2011-10-31 15:15:21 -0700 | [diff] [blame] | 551 | #ifndef NDEBUG |
| 552 | ++other_count_; |
| 553 | #endif |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 554 | ScanInstanceFields(obj); |
Elliott Hughes | 352a424 | 2011-10-31 15:15:21 -0700 | [diff] [blame] | 555 | if (obj->GetClass()->IsReferenceClass()) { |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 556 | DelayReferenceReferent(const_cast<Object*>(obj)); |
| 557 | } |
| 558 | } |
| 559 | |
| 560 | // Scans an object reference. Determines the type of the reference |
| 561 | // and dispatches to a specialized scanning routine. |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 562 | void MarkSweep::ScanObject(const Object* obj) { |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 563 | DCHECK(obj != NULL); |
| 564 | DCHECK(obj->GetClass() != NULL); |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 565 | DCHECK(heap_->GetMarkBitmap()->Test(obj)); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 566 | if (obj->IsClass()) { |
| 567 | ScanClass(obj); |
Brian Carlstrom | b63ec39 | 2011-08-27 17:38:27 -0700 | [diff] [blame] | 568 | } else if (obj->IsArrayInstance()) { |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 569 | ScanArray(obj); |
| 570 | } else { |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 571 | ScanOther(obj); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 572 | } |
| 573 | } |
| 574 | |
Ian Rogers | 5d76c43 | 2011-10-31 21:42:49 -0700 | [diff] [blame] | 575 | // Scan anything that's on the mark stack. |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 576 | void MarkSweep::ProcessMarkStack() { |
| 577 | while (!mark_stack_->IsEmpty()) { |
Brian Carlstrom | 4873d46 | 2011-08-21 15:23:39 -0700 | [diff] [blame] | 578 | const Object* obj = mark_stack_->Pop(); |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 579 | DCHECK(obj != NULL); |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 580 | ScanObject(obj); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 581 | } |
| 582 | } |
| 583 | |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 584 | // Walks the reference list marking any references subject to the |
| 585 | // reference clearing policy. References with a black referent are |
| 586 | // removed from the list. References with white referents biased |
| 587 | // toward saving are blackened and also removed from the list. |
| 588 | void MarkSweep::PreserveSomeSoftReferences(Object** list) { |
| 589 | DCHECK(list != NULL); |
| 590 | Object* clear = NULL; |
| 591 | size_t counter = 0; |
Mathieu Chartier | b43b7d4 | 2012-06-19 13:15:09 -0700 | [diff] [blame] | 592 | |
| 593 | DCHECK(mark_stack_->IsEmpty()); |
| 594 | |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 595 | while (*list != NULL) { |
Elliott Hughes | b3bd5f0 | 2012-03-08 21:05:27 -0800 | [diff] [blame] | 596 | Object* ref = heap_->DequeuePendingReference(list); |
| 597 | Object* referent = heap_->GetReferenceReferent(ref); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 598 | if (referent == NULL) { |
| 599 | // Referent was cleared by the user during marking. |
| 600 | continue; |
| 601 | } |
| 602 | bool is_marked = IsMarked(referent); |
| 603 | if (!is_marked && ((++counter) & 1)) { |
| 604 | // Referent is white and biased toward saving, mark it. |
| 605 | MarkObject(referent); |
| 606 | is_marked = true; |
| 607 | } |
| 608 | if (!is_marked) { |
| 609 | // Referent is white, queue it for clearing. |
Elliott Hughes | b3bd5f0 | 2012-03-08 21:05:27 -0800 | [diff] [blame] | 610 | heap_->EnqueuePendingReference(ref, &clear); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 611 | } |
| 612 | } |
| 613 | *list = clear; |
| 614 | // Restart the mark with the newly black references added to the |
| 615 | // root set. |
| 616 | ProcessMarkStack(); |
| 617 | } |
| 618 | |
| 619 | // Unlink the reference list clearing references objects with white |
| 620 | // referents. Cleared references registered to a reference queue are |
| 621 | // scheduled for appending by the heap worker thread. |
| 622 | void MarkSweep::ClearWhiteReferences(Object** list) { |
| 623 | DCHECK(list != NULL); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 624 | while (*list != NULL) { |
Elliott Hughes | b3bd5f0 | 2012-03-08 21:05:27 -0800 | [diff] [blame] | 625 | Object* ref = heap_->DequeuePendingReference(list); |
| 626 | Object* referent = heap_->GetReferenceReferent(ref); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 627 | if (referent != NULL && !IsMarked(referent)) { |
| 628 | // Referent is white, clear it. |
Elliott Hughes | b3bd5f0 | 2012-03-08 21:05:27 -0800 | [diff] [blame] | 629 | heap_->ClearReferenceReferent(ref); |
| 630 | if (heap_->IsEnqueuable(ref)) { |
| 631 | heap_->EnqueueReference(ref, &cleared_reference_list_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 632 | } |
| 633 | } |
| 634 | } |
| 635 | DCHECK(*list == NULL); |
| 636 | } |
| 637 | |
| 638 | // Enqueues finalizer references with white referents. White |
| 639 | // referents are blackened, moved to the zombie field, and the |
| 640 | // referent field is cleared. |
| 641 | void MarkSweep::EnqueueFinalizerReferences(Object** list) { |
| 642 | DCHECK(list != NULL); |
Elliott Hughes | b3bd5f0 | 2012-03-08 21:05:27 -0800 | [diff] [blame] | 643 | MemberOffset zombie_offset = heap_->GetFinalizerReferenceZombieOffset(); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 644 | bool has_enqueued = false; |
| 645 | while (*list != NULL) { |
Elliott Hughes | b3bd5f0 | 2012-03-08 21:05:27 -0800 | [diff] [blame] | 646 | Object* ref = heap_->DequeuePendingReference(list); |
| 647 | Object* referent = heap_->GetReferenceReferent(ref); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 648 | if (referent != NULL && !IsMarked(referent)) { |
| 649 | MarkObject(referent); |
| 650 | // If the referent is non-null the reference must queuable. |
Elliott Hughes | b3bd5f0 | 2012-03-08 21:05:27 -0800 | [diff] [blame] | 651 | DCHECK(heap_->IsEnqueuable(ref)); |
Ian Rogers | 0cfe1fb | 2011-08-26 03:29:44 -0700 | [diff] [blame] | 652 | ref->SetFieldObject(zombie_offset, referent, false); |
Elliott Hughes | b3bd5f0 | 2012-03-08 21:05:27 -0800 | [diff] [blame] | 653 | heap_->ClearReferenceReferent(ref); |
| 654 | heap_->EnqueueReference(ref, &cleared_reference_list_); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 655 | has_enqueued = true; |
| 656 | } |
| 657 | } |
| 658 | if (has_enqueued) { |
| 659 | ProcessMarkStack(); |
| 660 | } |
| 661 | DCHECK(*list == NULL); |
| 662 | } |
| 663 | |
Carl Shapiro | 58551df | 2011-07-24 03:09:51 -0700 | [diff] [blame] | 664 | // Process reference class instances and schedule finalizations. |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 665 | void MarkSweep::ProcessReferences(Object** soft_references, bool clear_soft, |
| 666 | Object** weak_references, |
| 667 | Object** finalizer_references, |
| 668 | Object** phantom_references) { |
| 669 | DCHECK(soft_references != NULL); |
| 670 | DCHECK(weak_references != NULL); |
| 671 | DCHECK(finalizer_references != NULL); |
| 672 | DCHECK(phantom_references != NULL); |
| 673 | |
| 674 | // Unless we are in the zygote or required to clear soft references |
| 675 | // with white references, preserve some white referents. |
Ian Rogers | 2945e24 | 2012-06-03 14:45:16 -0700 | [diff] [blame] | 676 | if (!clear_soft && !Runtime::Current()->IsZygote()) { |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 677 | PreserveSomeSoftReferences(soft_references); |
| 678 | } |
| 679 | |
| 680 | // Clear all remaining soft and weak references with white |
| 681 | // referents. |
| 682 | ClearWhiteReferences(soft_references); |
| 683 | ClearWhiteReferences(weak_references); |
| 684 | |
| 685 | // Preserve all white objects with finalize methods and schedule |
| 686 | // them for finalization. |
| 687 | EnqueueFinalizerReferences(finalizer_references); |
| 688 | |
| 689 | // Clear all f-reachable soft and weak references with white |
| 690 | // referents. |
| 691 | ClearWhiteReferences(soft_references); |
| 692 | ClearWhiteReferences(weak_references); |
| 693 | |
| 694 | // Clear all phantom references with white referents. |
| 695 | ClearWhiteReferences(phantom_references); |
| 696 | |
| 697 | // At this point all reference lists should be empty. |
| 698 | DCHECK(*soft_references == NULL); |
| 699 | DCHECK(*weak_references == NULL); |
| 700 | DCHECK(*finalizer_references == NULL); |
| 701 | DCHECK(*phantom_references == NULL); |
| 702 | } |
| 703 | |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 704 | MarkSweep::~MarkSweep() { |
Elliott Hughes | 352a424 | 2011-10-31 15:15:21 -0700 | [diff] [blame] | 705 | #ifndef NDEBUG |
Elliott Hughes | 4dd9b4d | 2011-12-12 18:29:24 -0800 | [diff] [blame] | 706 | VLOG(heap) << "MarkSweep scanned classes=" << class_count_ << " arrays=" << array_count_ << " other=" << other_count_; |
Elliott Hughes | 352a424 | 2011-10-31 15:15:21 -0700 | [diff] [blame] | 707 | #endif |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 708 | // Clear all of the alloc spaces' mark bitmaps. |
| 709 | const Spaces& spaces = heap_->GetSpaces(); |
| 710 | // TODO: C++0x auto |
| 711 | for (Spaces::const_iterator cur = spaces.begin(); cur != spaces.end(); ++cur) { |
Mathieu Chartier | cc236d7 | 2012-07-20 10:29:05 -0700 | [diff] [blame] | 712 | if ((*cur)->GetGcRetentionPolicy() != GCRP_NEVER_COLLECT) { |
Mathieu Chartier | b062fdd | 2012-07-03 09:51:48 -0700 | [diff] [blame] | 713 | (*cur)->GetMarkBitmap()->Clear(); |
| 714 | } |
| 715 | } |
Mathieu Chartier | 5301cd2 | 2012-05-31 12:11:36 -0700 | [diff] [blame] | 716 | mark_stack_->Reset(); |
Carl Shapiro | 69759ea | 2011-07-21 18:13:35 -0700 | [diff] [blame] | 717 | } |
| 718 | |
| 719 | } // namespace art |