Ben Murdoch | 097c5b2 | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 1 | // Copyright 2016 the V8 project authors. All rights reserved. |
| 2 | // Use of this source code is governed by a BSD-style license that can be |
| 3 | // found in the LICENSE file. |
| 4 | |
| 5 | #ifndef V8_REMEMBERED_SET_H |
| 6 | #define V8_REMEMBERED_SET_H |
| 7 | |
| 8 | #include "src/heap/heap.h" |
| 9 | #include "src/heap/slot-set.h" |
| 10 | #include "src/heap/spaces.h" |
| 11 | |
| 12 | namespace v8 { |
| 13 | namespace internal { |
| 14 | |
| 15 | enum PointerDirection { OLD_TO_OLD, OLD_TO_NEW }; |
| 16 | |
| 17 | template <PointerDirection direction> |
| 18 | class RememberedSet { |
| 19 | public: |
| 20 | // Given a page and a slot in that page, this function adds the slot to the |
| 21 | // remembered set. |
| 22 | static void Insert(Page* page, Address slot_addr) { |
| 23 | DCHECK(page->Contains(slot_addr)); |
| 24 | SlotSet* slot_set = GetSlotSet(page); |
| 25 | if (slot_set == nullptr) { |
| 26 | slot_set = AllocateSlotSet(page); |
| 27 | } |
| 28 | uintptr_t offset = slot_addr - page->address(); |
| 29 | slot_set[offset / Page::kPageSize].Insert(offset % Page::kPageSize); |
| 30 | } |
| 31 | |
| 32 | // Given a page and a slot in that page, this function removes the slot from |
| 33 | // the remembered set. |
| 34 | // If the slot was never added, then the function does nothing. |
| 35 | static void Remove(Page* page, Address slot_addr) { |
| 36 | DCHECK(page->Contains(slot_addr)); |
| 37 | SlotSet* slot_set = GetSlotSet(page); |
| 38 | if (slot_set != nullptr) { |
| 39 | uintptr_t offset = slot_addr - page->address(); |
| 40 | slot_set[offset / Page::kPageSize].Remove(offset % Page::kPageSize); |
| 41 | } |
| 42 | } |
| 43 | |
| 44 | // Given a page and a range of slots in that page, this function removes the |
| 45 | // slots from the remembered set. |
| 46 | static void RemoveRange(Page* page, Address start, Address end) { |
| 47 | SlotSet* slot_set = GetSlotSet(page); |
| 48 | if (slot_set != nullptr) { |
| 49 | uintptr_t start_offset = start - page->address(); |
| 50 | uintptr_t end_offset = end - page->address(); |
| 51 | DCHECK_LT(start_offset, end_offset); |
| 52 | DCHECK_LE(end_offset, static_cast<uintptr_t>(Page::kPageSize)); |
| 53 | slot_set->RemoveRange(static_cast<uint32_t>(start_offset), |
| 54 | static_cast<uint32_t>(end_offset)); |
| 55 | } |
| 56 | } |
| 57 | |
| 58 | // Iterates and filters the remembered set with the given callback. |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 59 | // The callback should take (Address slot) and return SlotCallbackResult. |
Ben Murdoch | 097c5b2 | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 60 | template <typename Callback> |
| 61 | static void Iterate(Heap* heap, Callback callback) { |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 62 | IterateMemoryChunks( |
| 63 | heap, [callback](MemoryChunk* chunk) { Iterate(chunk, callback); }); |
| 64 | } |
| 65 | |
| 66 | // Iterates over all memory chunks that contains non-empty slot sets. |
| 67 | // The callback should take (MemoryChunk* chunk) and return void. |
| 68 | template <typename Callback> |
| 69 | static void IterateMemoryChunks(Heap* heap, Callback callback) { |
| 70 | MemoryChunkIterator it(heap, direction == OLD_TO_OLD |
| 71 | ? MemoryChunkIterator::ALL |
| 72 | : MemoryChunkIterator::ALL_BUT_CODE_SPACE); |
Ben Murdoch | 097c5b2 | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 73 | MemoryChunk* chunk; |
| 74 | while ((chunk = it.next()) != nullptr) { |
| 75 | SlotSet* slots = GetSlotSet(chunk); |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 76 | TypedSlotSet* typed_slots = GetTypedSlotSet(chunk); |
| 77 | if (slots != nullptr || typed_slots != nullptr) { |
| 78 | callback(chunk); |
| 79 | } |
| 80 | } |
| 81 | } |
| 82 | |
| 83 | // Iterates and filters the remembered set in the given memory chunk with |
| 84 | // the given callback. The callback should take (Address slot) and return |
| 85 | // SlotCallbackResult. |
| 86 | template <typename Callback> |
| 87 | static void Iterate(MemoryChunk* chunk, Callback callback) { |
| 88 | SlotSet* slots = GetSlotSet(chunk); |
| 89 | if (slots != nullptr) { |
| 90 | size_t pages = (chunk->size() + Page::kPageSize - 1) / Page::kPageSize; |
| 91 | int new_count = 0; |
| 92 | for (size_t page = 0; page < pages; page++) { |
| 93 | new_count += slots[page].Iterate(callback); |
| 94 | } |
| 95 | if (new_count == 0) { |
| 96 | ReleaseSlotSet(chunk); |
Ben Murdoch | 097c5b2 | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 97 | } |
| 98 | } |
| 99 | } |
| 100 | |
| 101 | // Iterates and filters the remembered set with the given callback. |
| 102 | // The callback should take (HeapObject** slot, HeapObject* target) and |
| 103 | // update the slot. |
| 104 | // A special wrapper takes care of filtering the slots based on their values. |
| 105 | // For OLD_TO_NEW case: slots that do not point to the ToSpace after |
| 106 | // callback invocation will be removed from the set. |
| 107 | template <typename Callback> |
| 108 | static void IterateWithWrapper(Heap* heap, Callback callback) { |
| 109 | Iterate(heap, [heap, callback](Address addr) { |
| 110 | return Wrapper(heap, addr, callback); |
| 111 | }); |
| 112 | } |
| 113 | |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 114 | template <typename Callback> |
| 115 | static void IterateWithWrapper(Heap* heap, MemoryChunk* chunk, |
| 116 | Callback callback) { |
| 117 | Iterate(chunk, [heap, callback](Address addr) { |
| 118 | return Wrapper(heap, addr, callback); |
| 119 | }); |
| 120 | } |
| 121 | |
| 122 | // Given a page and a typed slot in that page, this function adds the slot |
| 123 | // to the remembered set. |
| 124 | static void InsertTyped(Page* page, SlotType slot_type, Address slot_addr) { |
| 125 | STATIC_ASSERT(direction == OLD_TO_OLD); |
| 126 | TypedSlotSet* slot_set = page->typed_old_to_old_slots(); |
| 127 | if (slot_set == nullptr) { |
| 128 | page->AllocateTypedOldToOldSlots(); |
| 129 | slot_set = page->typed_old_to_old_slots(); |
| 130 | } |
| 131 | uintptr_t offset = slot_addr - page->address(); |
| 132 | DCHECK_LT(offset, static_cast<uintptr_t>(TypedSlotSet::kMaxOffset)); |
| 133 | slot_set->Insert(slot_type, static_cast<uint32_t>(offset)); |
| 134 | } |
| 135 | |
| 136 | // Given a page and a range of typed slots in that page, this function removes |
| 137 | // the slots from the remembered set. |
| 138 | static void RemoveRangeTyped(Page* page, Address start, Address end) { |
| 139 | TypedSlotSet* slots = page->typed_old_to_old_slots(); |
| 140 | if (slots != nullptr) { |
| 141 | slots->Iterate([start, end](SlotType slot_type, Address slot_addr) { |
| 142 | return start <= slot_addr && slot_addr < end ? REMOVE_SLOT : KEEP_SLOT; |
| 143 | }); |
| 144 | } |
| 145 | } |
| 146 | |
| 147 | // Iterates and filters typed old to old pointers in the given memory chunk |
| 148 | // with the given callback. The callback should take (SlotType slot_type, |
| 149 | // Address slot_addr) and return SlotCallbackResult. |
| 150 | template <typename Callback> |
| 151 | static void IterateTyped(MemoryChunk* chunk, Callback callback) { |
| 152 | TypedSlotSet* slots = chunk->typed_old_to_old_slots(); |
| 153 | if (slots != nullptr) { |
| 154 | int new_count = slots->Iterate(callback); |
| 155 | if (new_count == 0) { |
| 156 | chunk->ReleaseTypedOldToOldSlots(); |
| 157 | } |
| 158 | } |
| 159 | } |
| 160 | |
| 161 | // Clear all old to old slots from the remembered set. |
| 162 | static void ClearAll(Heap* heap) { |
| 163 | STATIC_ASSERT(direction == OLD_TO_OLD); |
| 164 | MemoryChunkIterator it(heap, MemoryChunkIterator::ALL); |
| 165 | MemoryChunk* chunk; |
| 166 | while ((chunk = it.next()) != nullptr) { |
| 167 | chunk->ReleaseOldToOldSlots(); |
| 168 | chunk->ReleaseTypedOldToOldSlots(); |
| 169 | } |
| 170 | } |
| 171 | |
Ben Murdoch | 097c5b2 | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 172 | // Eliminates all stale slots from the remembered set, i.e. |
| 173 | // slots that are not part of live objects anymore. This method must be |
| 174 | // called after marking, when the whole transitive closure is known and |
| 175 | // must be called before sweeping when mark bits are still intact. |
| 176 | static void ClearInvalidSlots(Heap* heap); |
| 177 | |
| 178 | static void VerifyValidSlots(Heap* heap); |
| 179 | |
| 180 | private: |
| 181 | static SlotSet* GetSlotSet(MemoryChunk* chunk) { |
| 182 | if (direction == OLD_TO_OLD) { |
| 183 | return chunk->old_to_old_slots(); |
| 184 | } else { |
| 185 | return chunk->old_to_new_slots(); |
| 186 | } |
| 187 | } |
| 188 | |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 189 | static TypedSlotSet* GetTypedSlotSet(MemoryChunk* chunk) { |
| 190 | if (direction == OLD_TO_OLD) { |
| 191 | return chunk->typed_old_to_old_slots(); |
| 192 | } else { |
| 193 | return nullptr; |
| 194 | } |
| 195 | } |
| 196 | |
Ben Murdoch | 097c5b2 | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 197 | static void ReleaseSlotSet(MemoryChunk* chunk) { |
| 198 | if (direction == OLD_TO_OLD) { |
| 199 | chunk->ReleaseOldToOldSlots(); |
| 200 | } else { |
| 201 | chunk->ReleaseOldToNewSlots(); |
| 202 | } |
| 203 | } |
| 204 | |
| 205 | static SlotSet* AllocateSlotSet(MemoryChunk* chunk) { |
| 206 | if (direction == OLD_TO_OLD) { |
| 207 | chunk->AllocateOldToOldSlots(); |
| 208 | return chunk->old_to_old_slots(); |
| 209 | } else { |
| 210 | chunk->AllocateOldToNewSlots(); |
| 211 | return chunk->old_to_new_slots(); |
| 212 | } |
| 213 | } |
| 214 | |
| 215 | template <typename Callback> |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 216 | static SlotCallbackResult Wrapper(Heap* heap, Address slot_address, |
| 217 | Callback slot_callback) { |
Ben Murdoch | 097c5b2 | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 218 | STATIC_ASSERT(direction == OLD_TO_NEW); |
| 219 | Object** slot = reinterpret_cast<Object**>(slot_address); |
| 220 | Object* object = *slot; |
| 221 | if (heap->InFromSpace(object)) { |
| 222 | HeapObject* heap_object = reinterpret_cast<HeapObject*>(object); |
| 223 | DCHECK(heap_object->IsHeapObject()); |
| 224 | slot_callback(reinterpret_cast<HeapObject**>(slot), heap_object); |
| 225 | object = *slot; |
| 226 | // If the object was in from space before and is after executing the |
| 227 | // callback in to space, the object is still live. |
| 228 | // Unfortunately, we do not know about the slot. It could be in a |
| 229 | // just freed free space object. |
| 230 | if (heap->InToSpace(object)) { |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 231 | return KEEP_SLOT; |
Ben Murdoch | 097c5b2 | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 232 | } |
| 233 | } else { |
| 234 | DCHECK(!heap->InNewSpace(object)); |
| 235 | } |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 236 | return REMOVE_SLOT; |
Ben Murdoch | 097c5b2 | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 237 | } |
| 238 | |
Ben Murdoch | da12d29 | 2016-06-02 14:46:10 +0100 | [diff] [blame^] | 239 | static bool IsValidSlot(Heap* heap, MemoryChunk* chunk, Object** slot); |
Ben Murdoch | 097c5b2 | 2016-05-18 11:27:45 +0100 | [diff] [blame] | 240 | }; |
| 241 | |
| 242 | } // namespace internal |
| 243 | } // namespace v8 |
| 244 | |
| 245 | #endif // V8_REMEMBERED_SET_H |