blob: dfde5745ea00d58d902a4a6ecd17e3888da9a63f [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2011 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdochb8a8cc12014-11-26 15:28:44 +00005#include "src/heap/objects-visiting.h"
6
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007#include "src/heap/mark-compact-inl.h"
8#include "src/heap/objects-visiting-inl.h"
9
Ben Murdochb8a8cc12014-11-26 15:28:44 +000010namespace v8 {
11namespace internal {
12
13
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000014StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(Map* map) {
15 return GetVisitorId(map->instance_type(), map->instance_size(),
16 FLAG_unbox_double_fields && !map->HasFastPointerLayout());
17}
18
19
Ben Murdochb8a8cc12014-11-26 15:28:44 +000020StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
Emily Bernierd0a1eb72015-03-24 16:35:39 -040021 int instance_type, int instance_size, bool has_unboxed_fields) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000022 if (instance_type < FIRST_NONSTRING_TYPE) {
23 switch (instance_type & kStringRepresentationMask) {
24 case kSeqStringTag:
25 if ((instance_type & kStringEncodingMask) == kOneByteStringTag) {
26 return kVisitSeqOneByteString;
27 } else {
28 return kVisitSeqTwoByteString;
29 }
30
31 case kConsStringTag:
32 if (IsShortcutCandidate(instance_type)) {
33 return kVisitShortcutCandidate;
34 } else {
35 return kVisitConsString;
36 }
37
38 case kSlicedStringTag:
39 return kVisitSlicedString;
40
41 case kExternalStringTag:
42 return GetVisitorIdForSize(kVisitDataObject, kVisitDataObjectGeneric,
Emily Bernierd0a1eb72015-03-24 16:35:39 -040043 instance_size, has_unboxed_fields);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000044 }
45 UNREACHABLE();
46 }
47
48 switch (instance_type) {
49 case BYTE_ARRAY_TYPE:
50 return kVisitByteArray;
51
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000052 case BYTECODE_ARRAY_TYPE:
53 return kVisitBytecodeArray;
54
Ben Murdochb8a8cc12014-11-26 15:28:44 +000055 case FREE_SPACE_TYPE:
56 return kVisitFreeSpace;
57
58 case FIXED_ARRAY_TYPE:
59 return kVisitFixedArray;
60
61 case FIXED_DOUBLE_ARRAY_TYPE:
62 return kVisitFixedDoubleArray;
63
Ben Murdochb8a8cc12014-11-26 15:28:44 +000064 case ODDBALL_TYPE:
65 return kVisitOddball;
66
67 case MAP_TYPE:
68 return kVisitMap;
69
70 case CODE_TYPE:
71 return kVisitCode;
72
73 case CELL_TYPE:
74 return kVisitCell;
75
76 case PROPERTY_CELL_TYPE:
77 return kVisitPropertyCell;
78
Emily Bernierd0a1eb72015-03-24 16:35:39 -040079 case WEAK_CELL_TYPE:
80 return kVisitWeakCell;
81
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000082 case TRANSITION_ARRAY_TYPE:
83 return kVisitTransitionArray;
Ben Murdochb8a8cc12014-11-26 15:28:44 +000084
85 case JS_WEAK_MAP_TYPE:
86 case JS_WEAK_SET_TYPE:
87 return kVisitJSWeakCollection;
88
89 case JS_REGEXP_TYPE:
90 return kVisitJSRegExp;
91
92 case SHARED_FUNCTION_INFO_TYPE:
93 return kVisitSharedFunctionInfo;
94
95 case JS_PROXY_TYPE:
96 return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000097 instance_size, has_unboxed_fields);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000098
99 case SYMBOL_TYPE:
100 return kVisitSymbol;
101
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000102 case JS_ARRAY_BUFFER_TYPE:
103 return kVisitJSArrayBuffer;
104
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000105 case JS_OBJECT_TYPE:
106 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
107 case JS_GENERATOR_OBJECT_TYPE:
108 case JS_MODULE_TYPE:
109 case JS_VALUE_TYPE:
110 case JS_DATE_TYPE:
111 case JS_ARRAY_TYPE:
112 case JS_GLOBAL_PROXY_TYPE:
113 case JS_GLOBAL_OBJECT_TYPE:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000114 case JS_MESSAGE_OBJECT_TYPE:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000115 case JS_TYPED_ARRAY_TYPE:
116 case JS_DATA_VIEW_TYPE:
117 case JS_SET_TYPE:
118 case JS_MAP_TYPE:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000119 case JS_SET_ITERATOR_TYPE:
120 case JS_MAP_ITERATOR_TYPE:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000121 case JS_PROMISE_TYPE:
122 case JS_BOUND_FUNCTION_TYPE:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000123 return GetVisitorIdForSize(kVisitJSObject, kVisitJSObjectGeneric,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400124 instance_size, has_unboxed_fields);
Ben Murdochc5610432016-08-08 18:44:38 +0100125 case JS_API_OBJECT_TYPE:
126 case JS_SPECIAL_API_OBJECT_TYPE:
127 return GetVisitorIdForSize(kVisitJSApiObject, kVisitJSApiObjectGeneric,
128 instance_size, has_unboxed_fields);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000129
130 case JS_FUNCTION_TYPE:
131 return kVisitJSFunction;
132
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000133 case FILLER_TYPE:
134 if (instance_size == kPointerSize) return kVisitDataObjectGeneric;
135 // Fall through.
136 case FOREIGN_TYPE:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000137 case HEAP_NUMBER_TYPE:
138 case MUTABLE_HEAP_NUMBER_TYPE:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000139 case SIMD128_VALUE_TYPE:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000140 return GetVisitorIdForSize(kVisitDataObject, kVisitDataObjectGeneric,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400141 instance_size, has_unboxed_fields);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000142
143 case FIXED_UINT8_ARRAY_TYPE:
144 case FIXED_INT8_ARRAY_TYPE:
145 case FIXED_UINT16_ARRAY_TYPE:
146 case FIXED_INT16_ARRAY_TYPE:
147 case FIXED_UINT32_ARRAY_TYPE:
148 case FIXED_INT32_ARRAY_TYPE:
149 case FIXED_FLOAT32_ARRAY_TYPE:
150 case FIXED_UINT8_CLAMPED_ARRAY_TYPE:
151 return kVisitFixedTypedArray;
152
153 case FIXED_FLOAT64_ARRAY_TYPE:
154 return kVisitFixedFloat64Array;
155
156#define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE:
157 STRUCT_LIST(MAKE_STRUCT_CASE)
158#undef MAKE_STRUCT_CASE
159 if (instance_type == ALLOCATION_SITE_TYPE) {
160 return kVisitAllocationSite;
161 }
162
163 return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400164 instance_size, has_unboxed_fields);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000165
166 default:
167 UNREACHABLE();
168 return kVisitorIdCount;
169 }
170}
171
172
173// We don't record weak slots during marking or scavenges. Instead we do it
174// once when we complete mark-compact cycle. Note that write barrier has no
175// effect if we are already in the middle of compacting mark-sweep cycle and we
176// have to record slots manually.
177static bool MustRecordSlots(Heap* heap) {
178 return heap->gc_state() == Heap::MARK_COMPACT &&
179 heap->mark_compact_collector()->is_compacting();
180}
181
182
183template <class T>
184struct WeakListVisitor;
185
186
187template <class T>
188Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) {
189 Object* undefined = heap->undefined_value();
190 Object* head = undefined;
191 T* tail = NULL;
192 MarkCompactCollector* collector = heap->mark_compact_collector();
193 bool record_slots = MustRecordSlots(heap);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000194
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000195 while (list != undefined) {
196 // Check whether to keep the candidate in the list.
197 T* candidate = reinterpret_cast<T*>(list);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000198
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000199 Object* retained = retainer->RetainAs(list);
200 if (retained != NULL) {
201 if (head == undefined) {
202 // First element in the list.
203 head = retained;
204 } else {
205 // Subsequent elements in the list.
206 DCHECK(tail != NULL);
207 WeakListVisitor<T>::SetWeakNext(tail, retained);
208 if (record_slots) {
209 Object** next_slot =
210 HeapObject::RawField(tail, WeakListVisitor<T>::WeakNextOffset());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000211 collector->RecordSlot(tail, next_slot, retained);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000212 }
213 }
214 // Retained object is new tail.
215 DCHECK(!retained->IsUndefined());
216 candidate = reinterpret_cast<T*>(retained);
217 tail = candidate;
218
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000219 // tail is a live object, visit it.
220 WeakListVisitor<T>::VisitLiveObject(heap, tail, retainer);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000221
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000222 } else {
223 WeakListVisitor<T>::VisitPhantomObject(heap, candidate);
224 }
225
226 // Move to next element in the list.
227 list = WeakListVisitor<T>::WeakNext(candidate);
228 }
229
230 // Terminate the list if there is one or more elements.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000231 if (tail != NULL) WeakListVisitor<T>::SetWeakNext(tail, undefined);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000232 return head;
233}
234
235
236template <class T>
237static void ClearWeakList(Heap* heap, Object* list) {
238 Object* undefined = heap->undefined_value();
239 while (list != undefined) {
240 T* candidate = reinterpret_cast<T*>(list);
241 list = WeakListVisitor<T>::WeakNext(candidate);
242 WeakListVisitor<T>::SetWeakNext(candidate, undefined);
243 }
244}
245
246
247template <>
248struct WeakListVisitor<JSFunction> {
249 static void SetWeakNext(JSFunction* function, Object* next) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000250 function->set_next_function_link(next, UPDATE_WEAK_WRITE_BARRIER);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000251 }
252
253 static Object* WeakNext(JSFunction* function) {
254 return function->next_function_link();
255 }
256
257 static int WeakNextOffset() { return JSFunction::kNextFunctionLinkOffset; }
258
259 static void VisitLiveObject(Heap*, JSFunction*, WeakObjectRetainer*) {}
260
261 static void VisitPhantomObject(Heap*, JSFunction*) {}
262};
263
264
265template <>
266struct WeakListVisitor<Code> {
267 static void SetWeakNext(Code* code, Object* next) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000268 code->set_next_code_link(next, UPDATE_WEAK_WRITE_BARRIER);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000269 }
270
271 static Object* WeakNext(Code* code) { return code->next_code_link(); }
272
273 static int WeakNextOffset() { return Code::kNextCodeLinkOffset; }
274
275 static void VisitLiveObject(Heap*, Code*, WeakObjectRetainer*) {}
276
277 static void VisitPhantomObject(Heap*, Code*) {}
278};
279
280
281template <>
282struct WeakListVisitor<Context> {
283 static void SetWeakNext(Context* context, Object* next) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000284 context->set(Context::NEXT_CONTEXT_LINK, next, UPDATE_WEAK_WRITE_BARRIER);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000285 }
286
287 static Object* WeakNext(Context* context) {
Ben Murdochc5610432016-08-08 18:44:38 +0100288 return context->next_context_link();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000289 }
290
291 static int WeakNextOffset() {
292 return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK);
293 }
294
295 static void VisitLiveObject(Heap* heap, Context* context,
296 WeakObjectRetainer* retainer) {
297 // Process the three weak lists linked off the context.
298 DoWeakList<JSFunction>(heap, context, retainer,
299 Context::OPTIMIZED_FUNCTIONS_LIST);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000300
301 if (heap->gc_state() == Heap::MARK_COMPACT) {
302 // Record the slots of the weak entries in the native context.
303 MarkCompactCollector* collector = heap->mark_compact_collector();
304 for (int idx = Context::FIRST_WEAK_SLOT;
305 idx < Context::NATIVE_CONTEXT_SLOTS; ++idx) {
306 Object** slot = Context::cast(context)->RawFieldOfElementAt(idx);
307 collector->RecordSlot(context, slot, *slot);
308 }
309 // Code objects are always allocated in Code space, we do not have to
310 // visit
311 // them during scavenges.
312 DoWeakList<Code>(heap, context, retainer, Context::OPTIMIZED_CODE_LIST);
313 DoWeakList<Code>(heap, context, retainer, Context::DEOPTIMIZED_CODE_LIST);
314 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000315 }
316
317 template <class T>
318 static void DoWeakList(Heap* heap, Context* context,
319 WeakObjectRetainer* retainer, int index) {
320 // Visit the weak list, removing dead intermediate elements.
321 Object* list_head = VisitWeakList<T>(heap, context->get(index), retainer);
322
323 // Update the list head.
324 context->set(index, list_head, UPDATE_WRITE_BARRIER);
325
326 if (MustRecordSlots(heap)) {
327 // Record the updated slot if necessary.
328 Object** head_slot =
329 HeapObject::RawField(context, FixedArray::SizeFor(index));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000330 heap->mark_compact_collector()->RecordSlot(context, head_slot, list_head);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000331 }
332 }
333
334 static void VisitPhantomObject(Heap* heap, Context* context) {
335 ClearWeakList<JSFunction>(heap,
336 context->get(Context::OPTIMIZED_FUNCTIONS_LIST));
337 ClearWeakList<Code>(heap, context->get(Context::OPTIMIZED_CODE_LIST));
338 ClearWeakList<Code>(heap, context->get(Context::DEOPTIMIZED_CODE_LIST));
339 }
340};
341
342
343template <>
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000344struct WeakListVisitor<AllocationSite> {
345 static void SetWeakNext(AllocationSite* obj, Object* next) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000346 obj->set_weak_next(next, UPDATE_WEAK_WRITE_BARRIER);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000347 }
348
349 static Object* WeakNext(AllocationSite* obj) { return obj->weak_next(); }
350
351 static int WeakNextOffset() { return AllocationSite::kWeakNextOffset; }
352
353 static void VisitLiveObject(Heap*, AllocationSite*, WeakObjectRetainer*) {}
354
355 static void VisitPhantomObject(Heap*, AllocationSite*) {}
356};
357
358
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000359template Object* VisitWeakList<Context>(Heap* heap, Object* list,
360 WeakObjectRetainer* retainer);
361
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000362template Object* VisitWeakList<AllocationSite>(Heap* heap, Object* list,
363 WeakObjectRetainer* retainer);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000364} // namespace internal
365} // namespace v8