blob: 20d92de2f9724270d51d2e4bd3c44277d8d44df3 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2011 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/v8.h"
6
7#include "src/heap/objects-visiting.h"
8
9namespace v8 {
10namespace internal {
11
12
13StaticVisitorBase::VisitorId StaticVisitorBase::GetVisitorId(
Emily Bernierd0a1eb72015-03-24 16:35:39 -040014 int instance_type, int instance_size, bool has_unboxed_fields) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000015 if (instance_type < FIRST_NONSTRING_TYPE) {
16 switch (instance_type & kStringRepresentationMask) {
17 case kSeqStringTag:
18 if ((instance_type & kStringEncodingMask) == kOneByteStringTag) {
19 return kVisitSeqOneByteString;
20 } else {
21 return kVisitSeqTwoByteString;
22 }
23
24 case kConsStringTag:
25 if (IsShortcutCandidate(instance_type)) {
26 return kVisitShortcutCandidate;
27 } else {
28 return kVisitConsString;
29 }
30
31 case kSlicedStringTag:
32 return kVisitSlicedString;
33
34 case kExternalStringTag:
35 return GetVisitorIdForSize(kVisitDataObject, kVisitDataObjectGeneric,
Emily Bernierd0a1eb72015-03-24 16:35:39 -040036 instance_size, has_unboxed_fields);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000037 }
38 UNREACHABLE();
39 }
40
41 switch (instance_type) {
42 case BYTE_ARRAY_TYPE:
43 return kVisitByteArray;
44
45 case FREE_SPACE_TYPE:
46 return kVisitFreeSpace;
47
48 case FIXED_ARRAY_TYPE:
49 return kVisitFixedArray;
50
51 case FIXED_DOUBLE_ARRAY_TYPE:
52 return kVisitFixedDoubleArray;
53
54 case CONSTANT_POOL_ARRAY_TYPE:
55 return kVisitConstantPoolArray;
56
57 case ODDBALL_TYPE:
58 return kVisitOddball;
59
60 case MAP_TYPE:
61 return kVisitMap;
62
63 case CODE_TYPE:
64 return kVisitCode;
65
66 case CELL_TYPE:
67 return kVisitCell;
68
69 case PROPERTY_CELL_TYPE:
70 return kVisitPropertyCell;
71
Emily Bernierd0a1eb72015-03-24 16:35:39 -040072 case WEAK_CELL_TYPE:
73 return kVisitWeakCell;
74
Ben Murdochb8a8cc12014-11-26 15:28:44 +000075 case JS_SET_TYPE:
76 return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
Emily Bernierd0a1eb72015-03-24 16:35:39 -040077 JSSet::kSize, has_unboxed_fields);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000078
79 case JS_MAP_TYPE:
80 return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
Emily Bernierd0a1eb72015-03-24 16:35:39 -040081 JSMap::kSize, has_unboxed_fields);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000082
83 case JS_WEAK_MAP_TYPE:
84 case JS_WEAK_SET_TYPE:
85 return kVisitJSWeakCollection;
86
87 case JS_REGEXP_TYPE:
88 return kVisitJSRegExp;
89
90 case SHARED_FUNCTION_INFO_TYPE:
91 return kVisitSharedFunctionInfo;
92
93 case JS_PROXY_TYPE:
94 return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
Emily Bernierd0a1eb72015-03-24 16:35:39 -040095 JSProxy::kSize, has_unboxed_fields);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000096
97 case JS_FUNCTION_PROXY_TYPE:
98 return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
Emily Bernierd0a1eb72015-03-24 16:35:39 -040099 JSFunctionProxy::kSize, has_unboxed_fields);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000100
101 case FOREIGN_TYPE:
102 return GetVisitorIdForSize(kVisitDataObject, kVisitDataObjectGeneric,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400103 Foreign::kSize, has_unboxed_fields);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000104
105 case SYMBOL_TYPE:
106 return kVisitSymbol;
107
108 case FILLER_TYPE:
109 return kVisitDataObjectGeneric;
110
111 case JS_ARRAY_BUFFER_TYPE:
112 return kVisitJSArrayBuffer;
113
114 case JS_TYPED_ARRAY_TYPE:
115 return kVisitJSTypedArray;
116
117 case JS_DATA_VIEW_TYPE:
118 return kVisitJSDataView;
119
120 case JS_OBJECT_TYPE:
121 case JS_CONTEXT_EXTENSION_OBJECT_TYPE:
122 case JS_GENERATOR_OBJECT_TYPE:
123 case JS_MODULE_TYPE:
124 case JS_VALUE_TYPE:
125 case JS_DATE_TYPE:
126 case JS_ARRAY_TYPE:
127 case JS_GLOBAL_PROXY_TYPE:
128 case JS_GLOBAL_OBJECT_TYPE:
129 case JS_BUILTINS_OBJECT_TYPE:
130 case JS_MESSAGE_OBJECT_TYPE:
131 case JS_SET_ITERATOR_TYPE:
132 case JS_MAP_ITERATOR_TYPE:
133 return GetVisitorIdForSize(kVisitJSObject, kVisitJSObjectGeneric,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400134 instance_size, has_unboxed_fields);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000135
136 case JS_FUNCTION_TYPE:
137 return kVisitJSFunction;
138
139 case HEAP_NUMBER_TYPE:
140 case MUTABLE_HEAP_NUMBER_TYPE:
141#define EXTERNAL_ARRAY_CASE(Type, type, TYPE, ctype, size) \
142 case EXTERNAL_##TYPE##_ARRAY_TYPE:
143
144 TYPED_ARRAYS(EXTERNAL_ARRAY_CASE)
145 return GetVisitorIdForSize(kVisitDataObject, kVisitDataObjectGeneric,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400146 instance_size, has_unboxed_fields);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000147#undef EXTERNAL_ARRAY_CASE
148
149 case FIXED_UINT8_ARRAY_TYPE:
150 case FIXED_INT8_ARRAY_TYPE:
151 case FIXED_UINT16_ARRAY_TYPE:
152 case FIXED_INT16_ARRAY_TYPE:
153 case FIXED_UINT32_ARRAY_TYPE:
154 case FIXED_INT32_ARRAY_TYPE:
155 case FIXED_FLOAT32_ARRAY_TYPE:
156 case FIXED_UINT8_CLAMPED_ARRAY_TYPE:
157 return kVisitFixedTypedArray;
158
159 case FIXED_FLOAT64_ARRAY_TYPE:
160 return kVisitFixedFloat64Array;
161
162#define MAKE_STRUCT_CASE(NAME, Name, name) case NAME##_TYPE:
163 STRUCT_LIST(MAKE_STRUCT_CASE)
164#undef MAKE_STRUCT_CASE
165 if (instance_type == ALLOCATION_SITE_TYPE) {
166 return kVisitAllocationSite;
167 }
168
169 return GetVisitorIdForSize(kVisitStruct, kVisitStructGeneric,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400170 instance_size, has_unboxed_fields);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000171
172 default:
173 UNREACHABLE();
174 return kVisitorIdCount;
175 }
176}
177
178
179// We don't record weak slots during marking or scavenges. Instead we do it
180// once when we complete mark-compact cycle. Note that write barrier has no
181// effect if we are already in the middle of compacting mark-sweep cycle and we
182// have to record slots manually.
183static bool MustRecordSlots(Heap* heap) {
184 return heap->gc_state() == Heap::MARK_COMPACT &&
185 heap->mark_compact_collector()->is_compacting();
186}
187
188
189template <class T>
190struct WeakListVisitor;
191
192
193template <class T>
194Object* VisitWeakList(Heap* heap, Object* list, WeakObjectRetainer* retainer) {
195 Object* undefined = heap->undefined_value();
196 Object* head = undefined;
197 T* tail = NULL;
198 MarkCompactCollector* collector = heap->mark_compact_collector();
199 bool record_slots = MustRecordSlots(heap);
200 while (list != undefined) {
201 // Check whether to keep the candidate in the list.
202 T* candidate = reinterpret_cast<T*>(list);
203 Object* retained = retainer->RetainAs(list);
204 if (retained != NULL) {
205 if (head == undefined) {
206 // First element in the list.
207 head = retained;
208 } else {
209 // Subsequent elements in the list.
210 DCHECK(tail != NULL);
211 WeakListVisitor<T>::SetWeakNext(tail, retained);
212 if (record_slots) {
213 Object** next_slot =
214 HeapObject::RawField(tail, WeakListVisitor<T>::WeakNextOffset());
215 collector->RecordSlot(next_slot, next_slot, retained);
216 }
217 }
218 // Retained object is new tail.
219 DCHECK(!retained->IsUndefined());
220 candidate = reinterpret_cast<T*>(retained);
221 tail = candidate;
222
223
224 // tail is a live object, visit it.
225 WeakListVisitor<T>::VisitLiveObject(heap, tail, retainer);
226 } else {
227 WeakListVisitor<T>::VisitPhantomObject(heap, candidate);
228 }
229
230 // Move to next element in the list.
231 list = WeakListVisitor<T>::WeakNext(candidate);
232 }
233
234 // Terminate the list if there is one or more elements.
235 if (tail != NULL) {
236 WeakListVisitor<T>::SetWeakNext(tail, undefined);
237 }
238 return head;
239}
240
241
242template <class T>
243static void ClearWeakList(Heap* heap, Object* list) {
244 Object* undefined = heap->undefined_value();
245 while (list != undefined) {
246 T* candidate = reinterpret_cast<T*>(list);
247 list = WeakListVisitor<T>::WeakNext(candidate);
248 WeakListVisitor<T>::SetWeakNext(candidate, undefined);
249 }
250}
251
252
253template <>
254struct WeakListVisitor<JSFunction> {
255 static void SetWeakNext(JSFunction* function, Object* next) {
256 function->set_next_function_link(next);
257 }
258
259 static Object* WeakNext(JSFunction* function) {
260 return function->next_function_link();
261 }
262
263 static int WeakNextOffset() { return JSFunction::kNextFunctionLinkOffset; }
264
265 static void VisitLiveObject(Heap*, JSFunction*, WeakObjectRetainer*) {}
266
267 static void VisitPhantomObject(Heap*, JSFunction*) {}
268};
269
270
271template <>
272struct WeakListVisitor<Code> {
273 static void SetWeakNext(Code* code, Object* next) {
274 code->set_next_code_link(next);
275 }
276
277 static Object* WeakNext(Code* code) { return code->next_code_link(); }
278
279 static int WeakNextOffset() { return Code::kNextCodeLinkOffset; }
280
281 static void VisitLiveObject(Heap*, Code*, WeakObjectRetainer*) {}
282
283 static void VisitPhantomObject(Heap*, Code*) {}
284};
285
286
287template <>
288struct WeakListVisitor<Context> {
289 static void SetWeakNext(Context* context, Object* next) {
290 context->set(Context::NEXT_CONTEXT_LINK, next, UPDATE_WRITE_BARRIER);
291 }
292
293 static Object* WeakNext(Context* context) {
294 return context->get(Context::NEXT_CONTEXT_LINK);
295 }
296
297 static int WeakNextOffset() {
298 return FixedArray::SizeFor(Context::NEXT_CONTEXT_LINK);
299 }
300
301 static void VisitLiveObject(Heap* heap, Context* context,
302 WeakObjectRetainer* retainer) {
303 // Process the three weak lists linked off the context.
304 DoWeakList<JSFunction>(heap, context, retainer,
305 Context::OPTIMIZED_FUNCTIONS_LIST);
306 DoWeakList<Code>(heap, context, retainer, Context::OPTIMIZED_CODE_LIST);
307 DoWeakList<Code>(heap, context, retainer, Context::DEOPTIMIZED_CODE_LIST);
308 }
309
310 template <class T>
311 static void DoWeakList(Heap* heap, Context* context,
312 WeakObjectRetainer* retainer, int index) {
313 // Visit the weak list, removing dead intermediate elements.
314 Object* list_head = VisitWeakList<T>(heap, context->get(index), retainer);
315
316 // Update the list head.
317 context->set(index, list_head, UPDATE_WRITE_BARRIER);
318
319 if (MustRecordSlots(heap)) {
320 // Record the updated slot if necessary.
321 Object** head_slot =
322 HeapObject::RawField(context, FixedArray::SizeFor(index));
323 heap->mark_compact_collector()->RecordSlot(head_slot, head_slot,
324 list_head);
325 }
326 }
327
328 static void VisitPhantomObject(Heap* heap, Context* context) {
329 ClearWeakList<JSFunction>(heap,
330 context->get(Context::OPTIMIZED_FUNCTIONS_LIST));
331 ClearWeakList<Code>(heap, context->get(Context::OPTIMIZED_CODE_LIST));
332 ClearWeakList<Code>(heap, context->get(Context::DEOPTIMIZED_CODE_LIST));
333 }
334};
335
336
337template <>
338struct WeakListVisitor<JSArrayBufferView> {
339 static void SetWeakNext(JSArrayBufferView* obj, Object* next) {
340 obj->set_weak_next(next);
341 }
342
343 static Object* WeakNext(JSArrayBufferView* obj) { return obj->weak_next(); }
344
345 static int WeakNextOffset() { return JSArrayBufferView::kWeakNextOffset; }
346
347 static void VisitLiveObject(Heap*, JSArrayBufferView*, WeakObjectRetainer*) {}
348
349 static void VisitPhantomObject(Heap*, JSArrayBufferView*) {}
350};
351
352
353template <>
354struct WeakListVisitor<JSArrayBuffer> {
355 static void SetWeakNext(JSArrayBuffer* obj, Object* next) {
356 obj->set_weak_next(next);
357 }
358
359 static Object* WeakNext(JSArrayBuffer* obj) { return obj->weak_next(); }
360
361 static int WeakNextOffset() { return JSArrayBuffer::kWeakNextOffset; }
362
363 static void VisitLiveObject(Heap* heap, JSArrayBuffer* array_buffer,
364 WeakObjectRetainer* retainer) {
365 Object* typed_array_obj = VisitWeakList<JSArrayBufferView>(
366 heap, array_buffer->weak_first_view(), retainer);
367 array_buffer->set_weak_first_view(typed_array_obj);
368 if (typed_array_obj != heap->undefined_value() && MustRecordSlots(heap)) {
369 Object** slot = HeapObject::RawField(array_buffer,
370 JSArrayBuffer::kWeakFirstViewOffset);
371 heap->mark_compact_collector()->RecordSlot(slot, slot, typed_array_obj);
372 }
373 }
374
375 static void VisitPhantomObject(Heap* heap, JSArrayBuffer* phantom) {
376 Runtime::FreeArrayBuffer(heap->isolate(), phantom);
377 }
378};
379
380
381template <>
382struct WeakListVisitor<AllocationSite> {
383 static void SetWeakNext(AllocationSite* obj, Object* next) {
384 obj->set_weak_next(next);
385 }
386
387 static Object* WeakNext(AllocationSite* obj) { return obj->weak_next(); }
388
389 static int WeakNextOffset() { return AllocationSite::kWeakNextOffset; }
390
391 static void VisitLiveObject(Heap*, AllocationSite*, WeakObjectRetainer*) {}
392
393 static void VisitPhantomObject(Heap*, AllocationSite*) {}
394};
395
396
397template Object* VisitWeakList<Code>(Heap* heap, Object* list,
398 WeakObjectRetainer* retainer);
399
400
401template Object* VisitWeakList<JSFunction>(Heap* heap, Object* list,
402 WeakObjectRetainer* retainer);
403
404
405template Object* VisitWeakList<Context>(Heap* heap, Object* list,
406 WeakObjectRetainer* retainer);
407
408
409template Object* VisitWeakList<JSArrayBuffer>(Heap* heap, Object* list,
410 WeakObjectRetainer* retainer);
411
412
413template Object* VisitWeakList<AllocationSite>(Heap* heap, Object* list,
414 WeakObjectRetainer* retainer);
415}
416} // namespace v8::internal