blob: 6d26ad00b0b9805c4e7881d3800df2e2193c5423 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_OBJECTS_VISITING_INL_H_
6#define V8_OBJECTS_VISITING_INL_H_
7
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008#include "src/heap/array-buffer-tracker.h"
9#include "src/heap/objects-visiting.h"
10#include "src/ic/ic-state.h"
11#include "src/macro-assembler.h"
12#include "src/objects-body-descriptors-inl.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000013
14namespace v8 {
15namespace internal {
16
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000017
18template <typename Callback>
19Callback VisitorDispatchTable<Callback>::GetVisitor(Map* map) {
20 return reinterpret_cast<Callback>(callbacks_[map->visitor_id()]);
21}
22
23
Ben Murdochb8a8cc12014-11-26 15:28:44 +000024template <typename StaticVisitor>
25void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
26 table_.Register(
27 kVisitShortcutCandidate,
28 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
29
30 table_.Register(
31 kVisitConsString,
32 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
33
34 table_.Register(kVisitSlicedString,
35 &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
36 int>::Visit);
37
38 table_.Register(
39 kVisitSymbol,
40 &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, int>::Visit);
41
42 table_.Register(kVisitFixedArray,
43 &FlexibleBodyVisitor<StaticVisitor,
44 FixedArray::BodyDescriptor, int>::Visit);
45
46 table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000047 table_.Register(
48 kVisitFixedTypedArray,
49 &FlexibleBodyVisitor<StaticVisitor, FixedTypedArrayBase::BodyDescriptor,
50 int>::Visit);
51
52 table_.Register(
53 kVisitFixedFloat64Array,
54 &FlexibleBodyVisitor<StaticVisitor, FixedTypedArrayBase::BodyDescriptor,
55 int>::Visit);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000056
57 table_.Register(
58 kVisitNativeContext,
59 &FixedBodyVisitor<StaticVisitor, Context::ScavengeBodyDescriptor,
60 int>::Visit);
61
62 table_.Register(kVisitByteArray, &VisitByteArray);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000063 table_.Register(kVisitBytecodeArray, &VisitBytecodeArray);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000064
65 table_.Register(
66 kVisitSharedFunctionInfo,
67 &FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptor,
68 int>::Visit);
69
70 table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString);
71
72 table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
73
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000074 // Don't visit code entry. We are using this visitor only during scavenges.
75 table_.Register(
76 kVisitJSFunction,
77 &FlexibleBodyVisitor<StaticVisitor, JSFunction::BodyDescriptorWeakCode,
78 int>::Visit);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000079
Ben Murdoch61f157c2016-09-16 13:49:30 +010080 table_.Register(
81 kVisitJSArrayBuffer,
82 &FlexibleBodyVisitor<StaticVisitor, JSArrayBuffer::BodyDescriptor,
83 int>::Visit);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000084
Ben Murdochb8a8cc12014-11-26 15:28:44 +000085 table_.Register(kVisitFreeSpace, &VisitFreeSpace);
86
87 table_.Register(kVisitJSWeakCollection, &JSObjectVisitor::Visit);
88
89 table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
90
91 table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
92 kVisitDataObjectGeneric>();
93
94 table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
95 kVisitJSObjectGeneric>();
Ben Murdochc5610432016-08-08 18:44:38 +010096
97 // Not using specialized Api object visitor for newspace.
98 table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSApiObject,
99 kVisitJSApiObjectGeneric>();
100
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000101 table_.template RegisterSpecializations<StructVisitor, kVisitStruct,
102 kVisitStructGeneric>();
103}
104
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000105template <typename StaticVisitor>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000106int StaticNewSpaceVisitor<StaticVisitor>::VisitBytecodeArray(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000107 Map* map, HeapObject* object) {
108 VisitPointers(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000109 map->GetHeap(), object,
110 HeapObject::RawField(object, BytecodeArray::kConstantPoolOffset),
Ben Murdoch097c5b22016-05-18 11:27:45 +0100111 HeapObject::RawField(object, BytecodeArray::kFrameSizeOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000112 return reinterpret_cast<BytecodeArray*>(object)->BytecodeArraySize();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000113}
114
115
116template <typename StaticVisitor>
117void StaticMarkingVisitor<StaticVisitor>::Initialize() {
118 table_.Register(kVisitShortcutCandidate,
119 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
120 void>::Visit);
121
122 table_.Register(kVisitConsString,
123 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
124 void>::Visit);
125
126 table_.Register(kVisitSlicedString,
127 &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
128 void>::Visit);
129
130 table_.Register(
131 kVisitSymbol,
132 &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, void>::Visit);
133
134 table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
135
136 table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
137
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000138 table_.Register(
139 kVisitFixedTypedArray,
140 &FlexibleBodyVisitor<StaticVisitor, FixedTypedArrayBase::BodyDescriptor,
141 void>::Visit);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000142
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000143 table_.Register(
144 kVisitFixedFloat64Array,
145 &FlexibleBodyVisitor<StaticVisitor, FixedTypedArrayBase::BodyDescriptor,
146 void>::Visit);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000147
148 table_.Register(kVisitNativeContext, &VisitNativeContext);
149
150 table_.Register(kVisitAllocationSite, &VisitAllocationSite);
151
152 table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
153
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000154 table_.Register(kVisitBytecodeArray, &VisitBytecodeArray);
155
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000156 table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
157
158 table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit);
159
160 table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
161
162 table_.Register(kVisitJSWeakCollection, &VisitWeakCollection);
163
164 table_.Register(
165 kVisitOddball,
166 &FixedBodyVisitor<StaticVisitor, Oddball::BodyDescriptor, void>::Visit);
167
168 table_.Register(kVisitMap, &VisitMap);
169
170 table_.Register(kVisitCode, &VisitCode);
171
172 table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
173
174 table_.Register(kVisitJSFunction, &VisitJSFunction);
175
Ben Murdoch61f157c2016-09-16 13:49:30 +0100176 table_.Register(
177 kVisitJSArrayBuffer,
178 &FlexibleBodyVisitor<StaticVisitor, JSArrayBuffer::BodyDescriptor,
179 void>::Visit);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000180
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000181 // Registration for kVisitJSRegExp is done by StaticVisitor.
182
183 table_.Register(
184 kVisitCell,
185 &FixedBodyVisitor<StaticVisitor, Cell::BodyDescriptor, void>::Visit);
186
187 table_.Register(kVisitPropertyCell, &VisitPropertyCell);
188
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400189 table_.Register(kVisitWeakCell, &VisitWeakCell);
190
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000191 table_.Register(kVisitTransitionArray, &VisitTransitionArray);
192
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000193 table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
194 kVisitDataObjectGeneric>();
195
196 table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
197 kVisitJSObjectGeneric>();
198
Ben Murdochc5610432016-08-08 18:44:38 +0100199 table_.template RegisterSpecializations<JSApiObjectVisitor, kVisitJSApiObject,
200 kVisitJSApiObjectGeneric>();
201
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000202 table_.template RegisterSpecializations<StructObjectVisitor, kVisitStruct,
203 kVisitStructGeneric>();
204}
205
206
207template <typename StaticVisitor>
208void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000209 Heap* heap, HeapObject* object, Address entry_address) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000210 Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000211 heap->mark_compact_collector()->RecordCodeEntrySlot(object, entry_address,
212 code);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000213 StaticVisitor::MarkObject(heap, code);
214}
215
216
217template <typename StaticVisitor>
218void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
219 Heap* heap, RelocInfo* rinfo) {
220 DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
221 HeapObject* object = HeapObject::cast(rinfo->target_object());
Ben Murdochda12d292016-06-02 14:46:10 +0100222 Code* host = rinfo->host();
223 heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000224 // TODO(ulan): It could be better to record slots only for strongly embedded
225 // objects here and record slots for weakly embedded object during clearing
226 // of non-live references in mark-compact.
Ben Murdochda12d292016-06-02 14:46:10 +0100227 if (!host->IsWeakObject(object)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000228 StaticVisitor::MarkObject(heap, object);
229 }
230}
231
232
233template <typename StaticVisitor>
234void StaticMarkingVisitor<StaticVisitor>::VisitCell(Heap* heap,
235 RelocInfo* rinfo) {
236 DCHECK(rinfo->rmode() == RelocInfo::CELL);
237 Cell* cell = rinfo->target_cell();
Ben Murdochda12d292016-06-02 14:46:10 +0100238 Code* host = rinfo->host();
239 heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, cell);
240 if (!host->IsWeakObject(cell)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000241 StaticVisitor::MarkObject(heap, cell);
242 }
243}
244
245
246template <typename StaticVisitor>
247void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(Heap* heap,
248 RelocInfo* rinfo) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000249 DCHECK(RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
250 rinfo->IsPatchedDebugBreakSlotSequence());
251 Code* target = Code::GetCodeFromTargetAddress(rinfo->debug_call_address());
Ben Murdochda12d292016-06-02 14:46:10 +0100252 Code* host = rinfo->host();
253 heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, target);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000254 StaticVisitor::MarkObject(heap, target);
255}
256
257
258template <typename StaticVisitor>
259void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(Heap* heap,
260 RelocInfo* rinfo) {
261 DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
262 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
263 // Monomorphic ICs are preserved when possible, but need to be flushed
264 // when they might be keeping a Context alive, or when the heap is about
265 // to be serialized.
266 if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub() &&
Ben Murdochc5610432016-08-08 18:44:38 +0100267 (heap->isolate()->serializer_enabled() ||
268 target->ic_age() != heap->global_ic_age())) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000269 ICUtility::Clear(heap->isolate(), rinfo->pc(),
270 rinfo->host()->constant_pool());
271 target = Code::GetCodeFromTargetAddress(rinfo->target_address());
272 }
Ben Murdochda12d292016-06-02 14:46:10 +0100273 Code* host = rinfo->host();
274 heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, target);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000275 StaticVisitor::MarkObject(heap, target);
276}
277
278
279template <typename StaticVisitor>
280void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
281 Heap* heap, RelocInfo* rinfo) {
282 DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
283 Code* target = rinfo->code_age_stub();
284 DCHECK(target != NULL);
Ben Murdochda12d292016-06-02 14:46:10 +0100285 Code* host = rinfo->host();
286 heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, target);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000287 StaticVisitor::MarkObject(heap, target);
288}
289
290
291template <typename StaticVisitor>
292void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
293 Map* map, HeapObject* object) {
294 FixedBodyVisitor<StaticVisitor, Context::MarkCompactBodyDescriptor,
295 void>::Visit(map, object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000296}
297
298
299template <typename StaticVisitor>
300void StaticMarkingVisitor<StaticVisitor>::VisitMap(Map* map,
301 HeapObject* object) {
302 Heap* heap = map->GetHeap();
303 Map* map_object = Map::cast(object);
304
305 // Clears the cache of ICs related to this map.
306 if (FLAG_cleanup_code_caches_at_gc) {
307 map_object->ClearCodeCache(heap);
308 }
309
310 // When map collection is enabled we have to mark through map's transitions
311 // and back pointers in a special way to make these links weak.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000312 if (map_object->CanTransition()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000313 MarkMapContents(heap, map_object);
314 } else {
315 StaticVisitor::VisitPointers(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000316 heap, object,
317 HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000318 HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
319 }
320}
321
322
323template <typename StaticVisitor>
324void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell(
325 Map* map, HeapObject* object) {
326 Heap* heap = map->GetHeap();
327
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000328 StaticVisitor::VisitPointers(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000329 heap, object,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000330 HeapObject::RawField(object, PropertyCell::kPointerFieldsBeginOffset),
331 HeapObject::RawField(object, PropertyCell::kPointerFieldsEndOffset));
332}
333
334
335template <typename StaticVisitor>
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400336void StaticMarkingVisitor<StaticVisitor>::VisitWeakCell(Map* map,
337 HeapObject* object) {
338 Heap* heap = map->GetHeap();
339 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(object);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400340 // Enqueue weak cell in linked list of encountered weak collections.
341 // We can ignore weak cells with cleared values because they will always
342 // contain smi zero.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000343 if (weak_cell->next_cleared() && !weak_cell->cleared()) {
344 HeapObject* value = HeapObject::cast(weak_cell->value());
345 if (MarkCompactCollector::IsMarked(value)) {
346 // Weak cells with live values are directly processed here to reduce
347 // the processing time of weak cells during the main GC pause.
348 Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
349 map->GetHeap()->mark_compact_collector()->RecordSlot(weak_cell, slot,
350 *slot);
351 } else {
352 // If we do not know about liveness of values of weak cells, we have to
353 // process them when we know the liveness of the whole transitive
354 // closure.
355 weak_cell->set_next(heap->encountered_weak_cells(),
356 UPDATE_WEAK_WRITE_BARRIER);
357 heap->set_encountered_weak_cells(weak_cell);
358 }
359 }
360}
361
362
363template <typename StaticVisitor>
364void StaticMarkingVisitor<StaticVisitor>::VisitTransitionArray(
365 Map* map, HeapObject* object) {
366 TransitionArray* array = TransitionArray::cast(object);
367 Heap* heap = array->GetHeap();
368 // Visit strong references.
369 if (array->HasPrototypeTransitions()) {
370 StaticVisitor::VisitPointer(heap, array,
371 array->GetPrototypeTransitionsSlot());
372 }
373 int num_transitions = TransitionArray::NumberOfTransitions(array);
374 for (int i = 0; i < num_transitions; ++i) {
375 StaticVisitor::VisitPointer(heap, array, array->GetKeySlot(i));
376 }
377 // Enqueue the array in linked list of encountered transition arrays if it is
378 // not already in the list.
Ben Murdoch61f157c2016-09-16 13:49:30 +0100379 if (array->next_link()->IsUndefined(heap->isolate())) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000380 Heap* heap = map->GetHeap();
381 array->set_next_link(heap->encountered_transition_arrays(),
382 UPDATE_WEAK_WRITE_BARRIER);
383 heap->set_encountered_transition_arrays(array);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400384 }
385}
386
387
388template <typename StaticVisitor>
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000389void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite(
390 Map* map, HeapObject* object) {
391 Heap* heap = map->GetHeap();
392
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000393 StaticVisitor::VisitPointers(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000394 heap, object,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000395 HeapObject::RawField(object, AllocationSite::kPointerFieldsBeginOffset),
396 HeapObject::RawField(object, AllocationSite::kPointerFieldsEndOffset));
397}
398
399
400template <typename StaticVisitor>
401void StaticMarkingVisitor<StaticVisitor>::VisitWeakCollection(
402 Map* map, HeapObject* object) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000403 typedef FlexibleBodyVisitor<StaticVisitor,
404 JSWeakCollection::BodyDescriptorWeak,
405 void> JSWeakCollectionBodyVisitor;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000406 Heap* heap = map->GetHeap();
407 JSWeakCollection* weak_collection =
408 reinterpret_cast<JSWeakCollection*>(object);
409
410 // Enqueue weak collection in linked list of encountered weak collections.
411 if (weak_collection->next() == heap->undefined_value()) {
412 weak_collection->set_next(heap->encountered_weak_collections());
413 heap->set_encountered_weak_collections(weak_collection);
414 }
415
416 // Skip visiting the backing hash table containing the mappings and the
417 // pointer to the other enqueued weak collections, both are post-processed.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000418 JSWeakCollectionBodyVisitor::Visit(map, object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000419
420 // Partially initialized weak collection is enqueued, but table is ignored.
421 if (!weak_collection->table()->IsHashTable()) return;
422
423 // Mark the backing hash table without pushing it on the marking stack.
424 Object** slot = HeapObject::RawField(object, JSWeakCollection::kTableOffset);
425 HeapObject* obj = HeapObject::cast(*slot);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000426 heap->mark_compact_collector()->RecordSlot(object, slot, obj);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000427 StaticVisitor::MarkObjectWithoutPush(heap, obj);
428}
429
430
431template <typename StaticVisitor>
432void StaticMarkingVisitor<StaticVisitor>::VisitCode(Map* map,
433 HeapObject* object) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000434 typedef FlexibleBodyVisitor<StaticVisitor, Code::BodyDescriptor, void>
435 CodeBodyVisitor;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000436 Heap* heap = map->GetHeap();
437 Code* code = Code::cast(object);
438 if (FLAG_age_code && !heap->isolate()->serializer_enabled()) {
439 code->MakeOlder(heap->mark_compact_collector()->marking_parity());
440 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000441 CodeBodyVisitor::Visit(map, object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000442}
443
444
445template <typename StaticVisitor>
446void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
447 Map* map, HeapObject* object) {
448 Heap* heap = map->GetHeap();
449 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
450 if (shared->ic_age() != heap->global_ic_age()) {
451 shared->ResetForNewContext(heap->global_ic_age());
452 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000453 if (FLAG_flush_optimized_code_cache) {
454 if (!shared->OptimizedCodeMapIsCleared()) {
455 // Always flush the optimized code map if requested by flag.
456 shared->ClearOptimizedCodeMap();
457 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000458 }
459 MarkCompactCollector* collector = heap->mark_compact_collector();
460 if (collector->is_code_flushing_enabled()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000461 if (IsFlushable(heap, shared)) {
462 // This function's code looks flushable. But we have to postpone
463 // the decision until we see all functions that point to the same
464 // SharedFunctionInfo because some of them might be optimized.
465 // That would also make the non-optimized version of the code
466 // non-flushable, because it is required for bailing out from
467 // optimized code.
468 collector->code_flusher()->AddCandidate(shared);
469 // Treat the reference to the code object weakly.
470 VisitSharedFunctionInfoWeakCode(heap, object);
471 return;
472 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000473 }
474 VisitSharedFunctionInfoStrongCode(heap, object);
475}
476
477
478template <typename StaticVisitor>
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000479void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(Map* map,
480 HeapObject* object) {
481 Heap* heap = map->GetHeap();
482 JSFunction* function = JSFunction::cast(object);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100483 if (FLAG_cleanup_code_caches_at_gc) {
484 function->ClearTypeFeedbackInfoAtGCTime();
485 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000486 MarkCompactCollector* collector = heap->mark_compact_collector();
487 if (collector->is_code_flushing_enabled()) {
488 if (IsFlushable(heap, function)) {
489 // This function's code looks flushable. But we have to postpone
490 // the decision until we see all functions that point to the same
491 // SharedFunctionInfo because some of them might be optimized.
492 // That would also make the non-optimized version of the code
493 // non-flushable, because it is required for bailing out from
494 // optimized code.
495 collector->code_flusher()->AddCandidate(function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000496 // Treat the reference to the code object weakly.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000497 VisitJSFunctionWeakCode(map, object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000498 return;
499 } else {
500 // Visit all unoptimized code objects to prevent flushing them.
501 StaticVisitor::MarkObject(heap, function->shared()->code());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000502 }
503 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000504 VisitJSFunctionStrongCode(map, object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000505}
506
507
508template <typename StaticVisitor>
509void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(Map* map,
510 HeapObject* object) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000511 JSObjectVisitor::Visit(map, object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000512}
513
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000514template <typename StaticVisitor>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000515void StaticMarkingVisitor<StaticVisitor>::VisitBytecodeArray(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000516 Map* map, HeapObject* object) {
517 StaticVisitor::VisitPointers(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000518 map->GetHeap(), object,
519 HeapObject::RawField(object, BytecodeArray::kConstantPoolOffset),
Ben Murdoch097c5b22016-05-18 11:27:45 +0100520 HeapObject::RawField(object, BytecodeArray::kFrameSizeOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000521}
522
523
524template <typename StaticVisitor>
525void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap,
526 Map* map) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000527 // Since descriptor arrays are potentially shared, ensure that only the
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000528 // descriptors that belong to this map are marked. The first time a non-empty
529 // descriptor array is marked, its header is also visited. The slot holding
530 // the descriptor array will be implicitly recorded when the pointer fields of
531 // this map are visited. Prototype maps don't keep track of transitions, so
532 // just mark the entire descriptor array.
533 if (!map->is_prototype_map()) {
534 DescriptorArray* descriptors = map->instance_descriptors();
535 if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
536 descriptors->length() > 0) {
537 StaticVisitor::VisitPointers(heap, descriptors,
538 descriptors->GetFirstElementAddress(),
539 descriptors->GetDescriptorEndSlot(0));
540 }
541 int start = 0;
542 int end = map->NumberOfOwnDescriptors();
543 if (start < end) {
544 StaticVisitor::VisitPointers(heap, descriptors,
545 descriptors->GetDescriptorStartSlot(start),
546 descriptors->GetDescriptorEndSlot(end));
547 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000548 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000549
550 // Mark the pointer fields of the Map. Since the transitions array has
551 // been marked already, it is fine that one of these fields contains a
552 // pointer to it.
553 StaticVisitor::VisitPointers(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000554 heap, map, HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000555 HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
556}
557
558
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000559inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
560 Object* undefined = heap->undefined_value();
561 return (info->script() != undefined) &&
562 (reinterpret_cast<Script*>(info->script())->source() != undefined);
563}
564
565
566template <typename StaticVisitor>
567bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(Heap* heap,
568 JSFunction* function) {
569 SharedFunctionInfo* shared_info = function->shared();
570
571 // Code is either on stack, in compilation cache or referenced
572 // by optimized version of function.
573 MarkBit code_mark = Marking::MarkBitFrom(function->code());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000574 if (Marking::IsBlackOrGrey(code_mark)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000575 return false;
576 }
577
578 // We do not (yet) flush code for optimized functions.
579 if (function->code() != shared_info->code()) {
580 return false;
581 }
582
583 // Check age of optimized code.
584 if (FLAG_age_code && !function->code()->IsOld()) {
585 return false;
586 }
587
588 return IsFlushable(heap, shared_info);
589}
590
591
592template <typename StaticVisitor>
593bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
594 Heap* heap, SharedFunctionInfo* shared_info) {
595 // Code is either on stack, in compilation cache or referenced
596 // by optimized version of function.
597 MarkBit code_mark = Marking::MarkBitFrom(shared_info->code());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000598 if (Marking::IsBlackOrGrey(code_mark)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000599 return false;
600 }
601
602 // The function must be compiled and have the source code available,
603 // to be able to recompile it in case we need the function again.
604 if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
605 return false;
606 }
607
608 // We never flush code for API functions.
Ben Murdochc5610432016-08-08 18:44:38 +0100609 if (shared_info->IsApiFunction()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000610 return false;
611 }
612
613 // Only flush code for functions.
614 if (shared_info->code()->kind() != Code::FUNCTION) {
615 return false;
616 }
617
618 // Function must be lazy compilable.
619 if (!shared_info->allows_lazy_compilation()) {
620 return false;
621 }
622
Ben Murdoch61f157c2016-09-16 13:49:30 +0100623 // We do not (yet?) flush code for generator functions, or async functions,
624 // because we don't know if there are still live activations
625 // (generator objects) on the heap.
626 if (shared_info->is_resumable()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000627 return false;
628 }
629
630 // If this is a full script wrapped in a function we do not flush the code.
631 if (shared_info->is_toplevel()) {
632 return false;
633 }
634
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000635 // The function must not be a builtin.
636 if (shared_info->IsBuiltin()) {
637 return false;
638 }
639
640 // Maintain debug break slots in the code.
641 if (shared_info->HasDebugCode()) {
642 return false;
643 }
644
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000645 // If this is a function initialized with %SetCode then the one-to-one
646 // relation between SharedFunctionInfo and Code is broken.
647 if (shared_info->dont_flush()) {
648 return false;
649 }
650
651 // Check age of code. If code aging is disabled we never flush.
652 if (!FLAG_age_code || !shared_info->code()->IsOld()) {
653 return false;
654 }
655
656 return true;
657}
658
659
660template <typename StaticVisitor>
661void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
662 Heap* heap, HeapObject* object) {
663 Object** start_slot = HeapObject::RawField(
664 object, SharedFunctionInfo::BodyDescriptor::kStartOffset);
665 Object** end_slot = HeapObject::RawField(
666 object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000667 StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000668}
669
670
671template <typename StaticVisitor>
672void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
673 Heap* heap, HeapObject* object) {
674 Object** name_slot =
675 HeapObject::RawField(object, SharedFunctionInfo::kNameOffset);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000676 StaticVisitor::VisitPointer(heap, object, name_slot);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000677
678 // Skip visiting kCodeOffset as it is treated weakly here.
679 STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize ==
680 SharedFunctionInfo::kCodeOffset);
681 STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize ==
682 SharedFunctionInfo::kOptimizedCodeMapOffset);
683
684 Object** start_slot =
685 HeapObject::RawField(object, SharedFunctionInfo::kOptimizedCodeMapOffset);
686 Object** end_slot = HeapObject::RawField(
687 object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000688 StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000689}
690
691
692template <typename StaticVisitor>
693void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000694 Map* map, HeapObject* object) {
695 typedef FlexibleBodyVisitor<StaticVisitor,
696 JSFunction::BodyDescriptorStrongCode,
697 void> JSFunctionStrongCodeBodyVisitor;
698 JSFunctionStrongCodeBodyVisitor::Visit(map, object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000699}
700
701
702template <typename StaticVisitor>
703void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000704 Map* map, HeapObject* object) {
705 typedef FlexibleBodyVisitor<StaticVisitor, JSFunction::BodyDescriptorWeakCode,
706 void> JSFunctionWeakCodeBodyVisitor;
707 JSFunctionWeakCodeBodyVisitor::Visit(map, object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000708}
709
710
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000711} // namespace internal
712} // namespace v8
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000713
714#endif // V8_OBJECTS_VISITING_INL_H_