blob: c415713ee3f1c04ebc4713292d4289369f0a5624 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2012 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_OBJECTS_VISITING_INL_H_
6#define V8_OBJECTS_VISITING_INL_H_
7
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00008#include "src/heap/array-buffer-tracker.h"
9#include "src/heap/objects-visiting.h"
10#include "src/ic/ic-state.h"
11#include "src/macro-assembler.h"
12#include "src/objects-body-descriptors-inl.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000013
14namespace v8 {
15namespace internal {
16
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000017
18template <typename Callback>
19Callback VisitorDispatchTable<Callback>::GetVisitor(Map* map) {
20 return reinterpret_cast<Callback>(callbacks_[map->visitor_id()]);
21}
22
23
Ben Murdochb8a8cc12014-11-26 15:28:44 +000024template <typename StaticVisitor>
25void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
26 table_.Register(
27 kVisitShortcutCandidate,
28 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
29
30 table_.Register(
31 kVisitConsString,
32 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor, int>::Visit);
33
34 table_.Register(kVisitSlicedString,
35 &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
36 int>::Visit);
37
38 table_.Register(
39 kVisitSymbol,
40 &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, int>::Visit);
41
42 table_.Register(kVisitFixedArray,
43 &FlexibleBodyVisitor<StaticVisitor,
44 FixedArray::BodyDescriptor, int>::Visit);
45
46 table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000047 table_.Register(
48 kVisitFixedTypedArray,
49 &FlexibleBodyVisitor<StaticVisitor, FixedTypedArrayBase::BodyDescriptor,
50 int>::Visit);
51
52 table_.Register(
53 kVisitFixedFloat64Array,
54 &FlexibleBodyVisitor<StaticVisitor, FixedTypedArrayBase::BodyDescriptor,
55 int>::Visit);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000056
57 table_.Register(
58 kVisitNativeContext,
59 &FixedBodyVisitor<StaticVisitor, Context::ScavengeBodyDescriptor,
60 int>::Visit);
61
62 table_.Register(kVisitByteArray, &VisitByteArray);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000063 table_.Register(kVisitBytecodeArray, &VisitBytecodeArray);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000064
65 table_.Register(
66 kVisitSharedFunctionInfo,
67 &FixedBodyVisitor<StaticVisitor, SharedFunctionInfo::BodyDescriptor,
68 int>::Visit);
69
70 table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString);
71
72 table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
73
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000074 // Don't visit code entry. We are using this visitor only during scavenges.
75 table_.Register(
76 kVisitJSFunction,
77 &FlexibleBodyVisitor<StaticVisitor, JSFunction::BodyDescriptorWeakCode,
78 int>::Visit);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000079
80 table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
81
Ben Murdochb8a8cc12014-11-26 15:28:44 +000082 table_.Register(kVisitFreeSpace, &VisitFreeSpace);
83
84 table_.Register(kVisitJSWeakCollection, &JSObjectVisitor::Visit);
85
86 table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
87
88 table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
89 kVisitDataObjectGeneric>();
90
91 table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
92 kVisitJSObjectGeneric>();
93 table_.template RegisterSpecializations<StructVisitor, kVisitStruct,
94 kVisitStructGeneric>();
95}
96
97
98template <typename StaticVisitor>
99int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
100 Map* map, HeapObject* object) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000101 typedef FlexibleBodyVisitor<StaticVisitor, JSArrayBuffer::BodyDescriptor, int>
102 JSArrayBufferBodyVisitor;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000103
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000104 if (!JSArrayBuffer::cast(object)->is_external()) {
105 Heap* heap = map->GetHeap();
106 heap->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(object));
107 }
108 return JSArrayBufferBodyVisitor::Visit(map, object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000109}
110
111
112template <typename StaticVisitor>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000113int StaticNewSpaceVisitor<StaticVisitor>::VisitBytecodeArray(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000114 Map* map, HeapObject* object) {
115 VisitPointers(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000116 map->GetHeap(), object,
117 HeapObject::RawField(object, BytecodeArray::kConstantPoolOffset),
Ben Murdoch097c5b22016-05-18 11:27:45 +0100118 HeapObject::RawField(object, BytecodeArray::kFrameSizeOffset));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000119 return reinterpret_cast<BytecodeArray*>(object)->BytecodeArraySize();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000120}
121
122
123template <typename StaticVisitor>
124void StaticMarkingVisitor<StaticVisitor>::Initialize() {
125 table_.Register(kVisitShortcutCandidate,
126 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
127 void>::Visit);
128
129 table_.Register(kVisitConsString,
130 &FixedBodyVisitor<StaticVisitor, ConsString::BodyDescriptor,
131 void>::Visit);
132
133 table_.Register(kVisitSlicedString,
134 &FixedBodyVisitor<StaticVisitor, SlicedString::BodyDescriptor,
135 void>::Visit);
136
137 table_.Register(
138 kVisitSymbol,
139 &FixedBodyVisitor<StaticVisitor, Symbol::BodyDescriptor, void>::Visit);
140
141 table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
142
143 table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
144
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000145 table_.Register(
146 kVisitFixedTypedArray,
147 &FlexibleBodyVisitor<StaticVisitor, FixedTypedArrayBase::BodyDescriptor,
148 void>::Visit);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000149
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000150 table_.Register(
151 kVisitFixedFloat64Array,
152 &FlexibleBodyVisitor<StaticVisitor, FixedTypedArrayBase::BodyDescriptor,
153 void>::Visit);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000154
155 table_.Register(kVisitNativeContext, &VisitNativeContext);
156
157 table_.Register(kVisitAllocationSite, &VisitAllocationSite);
158
159 table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
160
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000161 table_.Register(kVisitBytecodeArray, &VisitBytecodeArray);
162
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000163 table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
164
165 table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit);
166
167 table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
168
169 table_.Register(kVisitJSWeakCollection, &VisitWeakCollection);
170
171 table_.Register(
172 kVisitOddball,
173 &FixedBodyVisitor<StaticVisitor, Oddball::BodyDescriptor, void>::Visit);
174
175 table_.Register(kVisitMap, &VisitMap);
176
177 table_.Register(kVisitCode, &VisitCode);
178
179 table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
180
181 table_.Register(kVisitJSFunction, &VisitJSFunction);
182
183 table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
184
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000185 // Registration for kVisitJSRegExp is done by StaticVisitor.
186
187 table_.Register(
188 kVisitCell,
189 &FixedBodyVisitor<StaticVisitor, Cell::BodyDescriptor, void>::Visit);
190
191 table_.Register(kVisitPropertyCell, &VisitPropertyCell);
192
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400193 table_.Register(kVisitWeakCell, &VisitWeakCell);
194
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000195 table_.Register(kVisitTransitionArray, &VisitTransitionArray);
196
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000197 table_.template RegisterSpecializations<DataObjectVisitor, kVisitDataObject,
198 kVisitDataObjectGeneric>();
199
200 table_.template RegisterSpecializations<JSObjectVisitor, kVisitJSObject,
201 kVisitJSObjectGeneric>();
202
203 table_.template RegisterSpecializations<StructObjectVisitor, kVisitStruct,
204 kVisitStructGeneric>();
205}
206
207
208template <typename StaticVisitor>
209void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000210 Heap* heap, HeapObject* object, Address entry_address) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000211 Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000212 heap->mark_compact_collector()->RecordCodeEntrySlot(object, entry_address,
213 code);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000214 StaticVisitor::MarkObject(heap, code);
215}
216
217
218template <typename StaticVisitor>
219void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
220 Heap* heap, RelocInfo* rinfo) {
221 DCHECK(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
222 HeapObject* object = HeapObject::cast(rinfo->target_object());
Ben Murdochda12d292016-06-02 14:46:10 +0100223 Code* host = rinfo->host();
224 heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000225 // TODO(ulan): It could be better to record slots only for strongly embedded
226 // objects here and record slots for weakly embedded object during clearing
227 // of non-live references in mark-compact.
Ben Murdochda12d292016-06-02 14:46:10 +0100228 if (!host->IsWeakObject(object)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000229 StaticVisitor::MarkObject(heap, object);
230 }
231}
232
233
234template <typename StaticVisitor>
235void StaticMarkingVisitor<StaticVisitor>::VisitCell(Heap* heap,
236 RelocInfo* rinfo) {
237 DCHECK(rinfo->rmode() == RelocInfo::CELL);
238 Cell* cell = rinfo->target_cell();
Ben Murdochda12d292016-06-02 14:46:10 +0100239 Code* host = rinfo->host();
240 heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, cell);
241 if (!host->IsWeakObject(cell)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000242 StaticVisitor::MarkObject(heap, cell);
243 }
244}
245
246
247template <typename StaticVisitor>
248void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(Heap* heap,
249 RelocInfo* rinfo) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000250 DCHECK(RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
251 rinfo->IsPatchedDebugBreakSlotSequence());
252 Code* target = Code::GetCodeFromTargetAddress(rinfo->debug_call_address());
Ben Murdochda12d292016-06-02 14:46:10 +0100253 Code* host = rinfo->host();
254 heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, target);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000255 StaticVisitor::MarkObject(heap, target);
256}
257
258
259template <typename StaticVisitor>
260void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(Heap* heap,
261 RelocInfo* rinfo) {
262 DCHECK(RelocInfo::IsCodeTarget(rinfo->rmode()));
263 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
264 // Monomorphic ICs are preserved when possible, but need to be flushed
265 // when they might be keeping a Context alive, or when the heap is about
266 // to be serialized.
267 if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub() &&
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000268 !target->is_call_stub() && (heap->isolate()->serializer_enabled() ||
269 target->ic_age() != heap->global_ic_age())) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000270 ICUtility::Clear(heap->isolate(), rinfo->pc(),
271 rinfo->host()->constant_pool());
272 target = Code::GetCodeFromTargetAddress(rinfo->target_address());
273 }
Ben Murdochda12d292016-06-02 14:46:10 +0100274 Code* host = rinfo->host();
275 heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, target);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000276 StaticVisitor::MarkObject(heap, target);
277}
278
279
280template <typename StaticVisitor>
281void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
282 Heap* heap, RelocInfo* rinfo) {
283 DCHECK(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
284 Code* target = rinfo->code_age_stub();
285 DCHECK(target != NULL);
Ben Murdochda12d292016-06-02 14:46:10 +0100286 Code* host = rinfo->host();
287 heap->mark_compact_collector()->RecordRelocSlot(host, rinfo, target);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000288 StaticVisitor::MarkObject(heap, target);
289}
290
291
292template <typename StaticVisitor>
293void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
294 Map* map, HeapObject* object) {
295 FixedBodyVisitor<StaticVisitor, Context::MarkCompactBodyDescriptor,
296 void>::Visit(map, object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000297}
298
299
300template <typename StaticVisitor>
301void StaticMarkingVisitor<StaticVisitor>::VisitMap(Map* map,
302 HeapObject* object) {
303 Heap* heap = map->GetHeap();
304 Map* map_object = Map::cast(object);
305
306 // Clears the cache of ICs related to this map.
307 if (FLAG_cleanup_code_caches_at_gc) {
308 map_object->ClearCodeCache(heap);
309 }
310
311 // When map collection is enabled we have to mark through map's transitions
312 // and back pointers in a special way to make these links weak.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000313 if (map_object->CanTransition()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000314 MarkMapContents(heap, map_object);
315 } else {
316 StaticVisitor::VisitPointers(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000317 heap, object,
318 HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000319 HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
320 }
321}
322
323
324template <typename StaticVisitor>
325void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell(
326 Map* map, HeapObject* object) {
327 Heap* heap = map->GetHeap();
328
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000329 StaticVisitor::VisitPointers(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000330 heap, object,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000331 HeapObject::RawField(object, PropertyCell::kPointerFieldsBeginOffset),
332 HeapObject::RawField(object, PropertyCell::kPointerFieldsEndOffset));
333}
334
335
336template <typename StaticVisitor>
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400337void StaticMarkingVisitor<StaticVisitor>::VisitWeakCell(Map* map,
338 HeapObject* object) {
339 Heap* heap = map->GetHeap();
340 WeakCell* weak_cell = reinterpret_cast<WeakCell*>(object);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400341 // Enqueue weak cell in linked list of encountered weak collections.
342 // We can ignore weak cells with cleared values because they will always
343 // contain smi zero.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000344 if (weak_cell->next_cleared() && !weak_cell->cleared()) {
345 HeapObject* value = HeapObject::cast(weak_cell->value());
346 if (MarkCompactCollector::IsMarked(value)) {
347 // Weak cells with live values are directly processed here to reduce
348 // the processing time of weak cells during the main GC pause.
349 Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
350 map->GetHeap()->mark_compact_collector()->RecordSlot(weak_cell, slot,
351 *slot);
352 } else {
353 // If we do not know about liveness of values of weak cells, we have to
354 // process them when we know the liveness of the whole transitive
355 // closure.
356 weak_cell->set_next(heap->encountered_weak_cells(),
357 UPDATE_WEAK_WRITE_BARRIER);
358 heap->set_encountered_weak_cells(weak_cell);
359 }
360 }
361}
362
363
364template <typename StaticVisitor>
365void StaticMarkingVisitor<StaticVisitor>::VisitTransitionArray(
366 Map* map, HeapObject* object) {
367 TransitionArray* array = TransitionArray::cast(object);
368 Heap* heap = array->GetHeap();
369 // Visit strong references.
370 if (array->HasPrototypeTransitions()) {
371 StaticVisitor::VisitPointer(heap, array,
372 array->GetPrototypeTransitionsSlot());
373 }
374 int num_transitions = TransitionArray::NumberOfTransitions(array);
375 for (int i = 0; i < num_transitions; ++i) {
376 StaticVisitor::VisitPointer(heap, array, array->GetKeySlot(i));
377 }
378 // Enqueue the array in linked list of encountered transition arrays if it is
379 // not already in the list.
380 if (array->next_link()->IsUndefined()) {
381 Heap* heap = map->GetHeap();
382 array->set_next_link(heap->encountered_transition_arrays(),
383 UPDATE_WEAK_WRITE_BARRIER);
384 heap->set_encountered_transition_arrays(array);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400385 }
386}
387
388
389template <typename StaticVisitor>
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000390void StaticMarkingVisitor<StaticVisitor>::VisitAllocationSite(
391 Map* map, HeapObject* object) {
392 Heap* heap = map->GetHeap();
393
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000394 StaticVisitor::VisitPointers(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000395 heap, object,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000396 HeapObject::RawField(object, AllocationSite::kPointerFieldsBeginOffset),
397 HeapObject::RawField(object, AllocationSite::kPointerFieldsEndOffset));
398}
399
400
401template <typename StaticVisitor>
402void StaticMarkingVisitor<StaticVisitor>::VisitWeakCollection(
403 Map* map, HeapObject* object) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000404 typedef FlexibleBodyVisitor<StaticVisitor,
405 JSWeakCollection::BodyDescriptorWeak,
406 void> JSWeakCollectionBodyVisitor;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000407 Heap* heap = map->GetHeap();
408 JSWeakCollection* weak_collection =
409 reinterpret_cast<JSWeakCollection*>(object);
410
411 // Enqueue weak collection in linked list of encountered weak collections.
412 if (weak_collection->next() == heap->undefined_value()) {
413 weak_collection->set_next(heap->encountered_weak_collections());
414 heap->set_encountered_weak_collections(weak_collection);
415 }
416
417 // Skip visiting the backing hash table containing the mappings and the
418 // pointer to the other enqueued weak collections, both are post-processed.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000419 JSWeakCollectionBodyVisitor::Visit(map, object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000420
421 // Partially initialized weak collection is enqueued, but table is ignored.
422 if (!weak_collection->table()->IsHashTable()) return;
423
424 // Mark the backing hash table without pushing it on the marking stack.
425 Object** slot = HeapObject::RawField(object, JSWeakCollection::kTableOffset);
426 HeapObject* obj = HeapObject::cast(*slot);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000427 heap->mark_compact_collector()->RecordSlot(object, slot, obj);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000428 StaticVisitor::MarkObjectWithoutPush(heap, obj);
429}
430
431
432template <typename StaticVisitor>
433void StaticMarkingVisitor<StaticVisitor>::VisitCode(Map* map,
434 HeapObject* object) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000435 typedef FlexibleBodyVisitor<StaticVisitor, Code::BodyDescriptor, void>
436 CodeBodyVisitor;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000437 Heap* heap = map->GetHeap();
438 Code* code = Code::cast(object);
439 if (FLAG_age_code && !heap->isolate()->serializer_enabled()) {
440 code->MakeOlder(heap->mark_compact_collector()->marking_parity());
441 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000442 CodeBodyVisitor::Visit(map, object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000443}
444
445
446template <typename StaticVisitor>
447void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
448 Map* map, HeapObject* object) {
449 Heap* heap = map->GetHeap();
450 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
451 if (shared->ic_age() != heap->global_ic_age()) {
452 shared->ResetForNewContext(heap->global_ic_age());
453 }
454 if (FLAG_cleanup_code_caches_at_gc) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000455 shared->ClearTypeFeedbackInfoAtGCTime();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000456 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000457 if (FLAG_flush_optimized_code_cache) {
458 if (!shared->OptimizedCodeMapIsCleared()) {
459 // Always flush the optimized code map if requested by flag.
460 shared->ClearOptimizedCodeMap();
461 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000462 }
463 MarkCompactCollector* collector = heap->mark_compact_collector();
464 if (collector->is_code_flushing_enabled()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000465 if (IsFlushable(heap, shared)) {
466 // This function's code looks flushable. But we have to postpone
467 // the decision until we see all functions that point to the same
468 // SharedFunctionInfo because some of them might be optimized.
469 // That would also make the non-optimized version of the code
470 // non-flushable, because it is required for bailing out from
471 // optimized code.
472 collector->code_flusher()->AddCandidate(shared);
473 // Treat the reference to the code object weakly.
474 VisitSharedFunctionInfoWeakCode(heap, object);
475 return;
476 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000477 }
478 VisitSharedFunctionInfoStrongCode(heap, object);
479}
480
481
482template <typename StaticVisitor>
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000483void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(Map* map,
484 HeapObject* object) {
485 Heap* heap = map->GetHeap();
486 JSFunction* function = JSFunction::cast(object);
487 MarkCompactCollector* collector = heap->mark_compact_collector();
488 if (collector->is_code_flushing_enabled()) {
489 if (IsFlushable(heap, function)) {
490 // This function's code looks flushable. But we have to postpone
491 // the decision until we see all functions that point to the same
492 // SharedFunctionInfo because some of them might be optimized.
493 // That would also make the non-optimized version of the code
494 // non-flushable, because it is required for bailing out from
495 // optimized code.
496 collector->code_flusher()->AddCandidate(function);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000497 // Treat the reference to the code object weakly.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000498 VisitJSFunctionWeakCode(map, object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000499 return;
500 } else {
501 // Visit all unoptimized code objects to prevent flushing them.
502 StaticVisitor::MarkObject(heap, function->shared()->code());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000503 }
504 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000505 VisitJSFunctionStrongCode(map, object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000506}
507
508
509template <typename StaticVisitor>
510void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(Map* map,
511 HeapObject* object) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000512 JSObjectVisitor::Visit(map, object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000513}
514
515
516template <typename StaticVisitor>
517void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
518 Map* map, HeapObject* object) {
519 Heap* heap = map->GetHeap();
520
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000521 typedef FlexibleBodyVisitor<StaticVisitor, JSArrayBuffer::BodyDescriptor,
522 void> JSArrayBufferBodyVisitor;
523
524 JSArrayBufferBodyVisitor::Visit(map, object);
525
526 if (!JSArrayBuffer::cast(object)->is_external() &&
527 !heap->InNewSpace(object)) {
528 heap->array_buffer_tracker()->MarkLive(JSArrayBuffer::cast(object));
529 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000530}
531
532
533template <typename StaticVisitor>
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000534void StaticMarkingVisitor<StaticVisitor>::VisitBytecodeArray(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000535 Map* map, HeapObject* object) {
536 StaticVisitor::VisitPointers(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000537 map->GetHeap(), object,
538 HeapObject::RawField(object, BytecodeArray::kConstantPoolOffset),
Ben Murdoch097c5b22016-05-18 11:27:45 +0100539 HeapObject::RawField(object, BytecodeArray::kFrameSizeOffset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000540}
541
542
543template <typename StaticVisitor>
544void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(Heap* heap,
545 Map* map) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000546 // Since descriptor arrays are potentially shared, ensure that only the
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000547 // descriptors that belong to this map are marked. The first time a non-empty
548 // descriptor array is marked, its header is also visited. The slot holding
549 // the descriptor array will be implicitly recorded when the pointer fields of
550 // this map are visited. Prototype maps don't keep track of transitions, so
551 // just mark the entire descriptor array.
552 if (!map->is_prototype_map()) {
553 DescriptorArray* descriptors = map->instance_descriptors();
554 if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
555 descriptors->length() > 0) {
556 StaticVisitor::VisitPointers(heap, descriptors,
557 descriptors->GetFirstElementAddress(),
558 descriptors->GetDescriptorEndSlot(0));
559 }
560 int start = 0;
561 int end = map->NumberOfOwnDescriptors();
562 if (start < end) {
563 StaticVisitor::VisitPointers(heap, descriptors,
564 descriptors->GetDescriptorStartSlot(start),
565 descriptors->GetDescriptorEndSlot(end));
566 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000567 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000568
569 // Mark the pointer fields of the Map. Since the transitions array has
570 // been marked already, it is fine that one of these fields contains a
571 // pointer to it.
572 StaticVisitor::VisitPointers(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000573 heap, map, HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000574 HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
575}
576
577
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000578inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
579 Object* undefined = heap->undefined_value();
580 return (info->script() != undefined) &&
581 (reinterpret_cast<Script*>(info->script())->source() != undefined);
582}
583
584
585template <typename StaticVisitor>
586bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(Heap* heap,
587 JSFunction* function) {
588 SharedFunctionInfo* shared_info = function->shared();
589
590 // Code is either on stack, in compilation cache or referenced
591 // by optimized version of function.
592 MarkBit code_mark = Marking::MarkBitFrom(function->code());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000593 if (Marking::IsBlackOrGrey(code_mark)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000594 return false;
595 }
596
597 // We do not (yet) flush code for optimized functions.
598 if (function->code() != shared_info->code()) {
599 return false;
600 }
601
602 // Check age of optimized code.
603 if (FLAG_age_code && !function->code()->IsOld()) {
604 return false;
605 }
606
607 return IsFlushable(heap, shared_info);
608}
609
610
611template <typename StaticVisitor>
612bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
613 Heap* heap, SharedFunctionInfo* shared_info) {
614 // Code is either on stack, in compilation cache or referenced
615 // by optimized version of function.
616 MarkBit code_mark = Marking::MarkBitFrom(shared_info->code());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000617 if (Marking::IsBlackOrGrey(code_mark)) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000618 return false;
619 }
620
621 // The function must be compiled and have the source code available,
622 // to be able to recompile it in case we need the function again.
623 if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
624 return false;
625 }
626
627 // We never flush code for API functions.
628 Object* function_data = shared_info->function_data();
629 if (function_data->IsFunctionTemplateInfo()) {
630 return false;
631 }
632
633 // Only flush code for functions.
634 if (shared_info->code()->kind() != Code::FUNCTION) {
635 return false;
636 }
637
638 // Function must be lazy compilable.
639 if (!shared_info->allows_lazy_compilation()) {
640 return false;
641 }
642
643 // We do not (yet?) flush code for generator functions, because we don't know
644 // if there are still live activations (generator objects) on the heap.
645 if (shared_info->is_generator()) {
646 return false;
647 }
648
649 // If this is a full script wrapped in a function we do not flush the code.
650 if (shared_info->is_toplevel()) {
651 return false;
652 }
653
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000654 // The function must not be a builtin.
655 if (shared_info->IsBuiltin()) {
656 return false;
657 }
658
659 // Maintain debug break slots in the code.
660 if (shared_info->HasDebugCode()) {
661 return false;
662 }
663
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000664 // If this is a function initialized with %SetCode then the one-to-one
665 // relation between SharedFunctionInfo and Code is broken.
666 if (shared_info->dont_flush()) {
667 return false;
668 }
669
670 // Check age of code. If code aging is disabled we never flush.
671 if (!FLAG_age_code || !shared_info->code()->IsOld()) {
672 return false;
673 }
674
675 return true;
676}
677
678
679template <typename StaticVisitor>
680void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
681 Heap* heap, HeapObject* object) {
682 Object** start_slot = HeapObject::RawField(
683 object, SharedFunctionInfo::BodyDescriptor::kStartOffset);
684 Object** end_slot = HeapObject::RawField(
685 object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000686 StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000687}
688
689
690template <typename StaticVisitor>
691void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
692 Heap* heap, HeapObject* object) {
693 Object** name_slot =
694 HeapObject::RawField(object, SharedFunctionInfo::kNameOffset);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000695 StaticVisitor::VisitPointer(heap, object, name_slot);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000696
697 // Skip visiting kCodeOffset as it is treated weakly here.
698 STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize ==
699 SharedFunctionInfo::kCodeOffset);
700 STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize ==
701 SharedFunctionInfo::kOptimizedCodeMapOffset);
702
703 Object** start_slot =
704 HeapObject::RawField(object, SharedFunctionInfo::kOptimizedCodeMapOffset);
705 Object** end_slot = HeapObject::RawField(
706 object, SharedFunctionInfo::BodyDescriptor::kEndOffset);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000707 StaticVisitor::VisitPointers(heap, object, start_slot, end_slot);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000708}
709
710
711template <typename StaticVisitor>
712void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000713 Map* map, HeapObject* object) {
714 typedef FlexibleBodyVisitor<StaticVisitor,
715 JSFunction::BodyDescriptorStrongCode,
716 void> JSFunctionStrongCodeBodyVisitor;
717 JSFunctionStrongCodeBodyVisitor::Visit(map, object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000718}
719
720
721template <typename StaticVisitor>
722void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000723 Map* map, HeapObject* object) {
724 typedef FlexibleBodyVisitor<StaticVisitor, JSFunction::BodyDescriptorWeakCode,
725 void> JSFunctionWeakCodeBodyVisitor;
726 JSFunctionWeakCodeBodyVisitor::Visit(map, object);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000727}
728
729
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000730} // namespace internal
731} // namespace v8
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000732
733#endif // V8_OBJECTS_VISITING_INL_H_