blob: cfb7d4461fc13a63cb5aa1f385c1fee34c32df33 [file] [log] [blame]
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +00001// Copyright 2012 the V8 project authors. All rights reserved.
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_OBJECTS_VISITING_INL_H_
29#define V8_OBJECTS_VISITING_INL_H_
30
31
32namespace v8 {
33namespace internal {
34
35template<typename StaticVisitor>
36void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
37 table_.Register(kVisitShortcutCandidate,
38 &FixedBodyVisitor<StaticVisitor,
39 ConsString::BodyDescriptor,
40 int>::Visit);
41
42 table_.Register(kVisitConsString,
43 &FixedBodyVisitor<StaticVisitor,
44 ConsString::BodyDescriptor,
45 int>::Visit);
46
47 table_.Register(kVisitSlicedString,
48 &FixedBodyVisitor<StaticVisitor,
49 SlicedString::BodyDescriptor,
50 int>::Visit);
51
mstarzinger@chromium.orgf705b502013-04-04 11:38:09 +000052 table_.Register(kVisitSymbol,
53 &FixedBodyVisitor<StaticVisitor,
54 Symbol::BodyDescriptor,
55 int>::Visit);
56
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +000057 table_.Register(kVisitFixedArray,
58 &FlexibleBodyVisitor<StaticVisitor,
59 FixedArray::BodyDescriptor,
60 int>::Visit);
61
62 table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
63
yangguo@chromium.org46839fb2012-08-28 09:06:19 +000064 table_.Register(kVisitNativeContext,
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +000065 &FixedBodyVisitor<StaticVisitor,
66 Context::ScavengeBodyDescriptor,
67 int>::Visit);
68
69 table_.Register(kVisitByteArray, &VisitByteArray);
70
71 table_.Register(kVisitSharedFunctionInfo,
72 &FixedBodyVisitor<StaticVisitor,
73 SharedFunctionInfo::BodyDescriptor,
74 int>::Visit);
75
yangguo@chromium.orgfb377212012-11-16 14:43:43 +000076 table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString);
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +000077
78 table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
79
erik.corry@gmail.comed49e962012-04-17 11:57:53 +000080 table_.Register(kVisitJSFunction, &VisitJSFunction);
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +000081
danno@chromium.org1fd77d52013-06-07 16:01:45 +000082 table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
83
84 table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
85
mstarzinger@chromium.org1510d582013-06-28 14:00:48 +000086 table_.Register(kVisitJSDataView, &VisitJSDataView);
87
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +000088 table_.Register(kVisitFreeSpace, &VisitFreeSpace);
89
90 table_.Register(kVisitJSWeakMap, &JSObjectVisitor::Visit);
91
92 table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
93
94 table_.template RegisterSpecializations<DataObjectVisitor,
95 kVisitDataObject,
96 kVisitDataObjectGeneric>();
97
98 table_.template RegisterSpecializations<JSObjectVisitor,
99 kVisitJSObject,
100 kVisitJSObjectGeneric>();
101 table_.template RegisterSpecializations<StructVisitor,
102 kVisitStruct,
103 kVisitStructGeneric>();
104}
105
106
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000107template<typename StaticVisitor>
danno@chromium.org1fd77d52013-06-07 16:01:45 +0000108int StaticNewSpaceVisitor<StaticVisitor>::VisitJSArrayBuffer(
109 Map* map, HeapObject* object) {
110 Heap* heap = map->GetHeap();
111
112 STATIC_ASSERT(
mstarzinger@chromium.org1510d582013-06-28 14:00:48 +0000113 JSArrayBuffer::kWeakFirstViewOffset ==
danno@chromium.org1fd77d52013-06-07 16:01:45 +0000114 JSArrayBuffer::kWeakNextOffset + kPointerSize);
115 VisitPointers(
116 heap,
117 HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
118 HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
119 VisitPointers(
120 heap,
121 HeapObject::RawField(object,
122 JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
123 HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
124 return JSArrayBuffer::kSizeWithInternalFields;
125}
126
127
128template<typename StaticVisitor>
129int StaticNewSpaceVisitor<StaticVisitor>::VisitJSTypedArray(
130 Map* map, HeapObject* object) {
131 VisitPointers(
132 map->GetHeap(),
133 HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
134 HeapObject::RawField(object, JSTypedArray::kWeakNextOffset));
135 VisitPointers(
136 map->GetHeap(),
137 HeapObject::RawField(object,
138 JSTypedArray::kWeakNextOffset + kPointerSize),
139 HeapObject::RawField(object, JSTypedArray::kSize));
140 return JSTypedArray::kSize;
141}
142
143
144template<typename StaticVisitor>
mstarzinger@chromium.org1510d582013-06-28 14:00:48 +0000145int StaticNewSpaceVisitor<StaticVisitor>::VisitJSDataView(
146 Map* map, HeapObject* object) {
147 VisitPointers(
148 map->GetHeap(),
149 HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
150 HeapObject::RawField(object, JSDataView::kWeakNextOffset));
151 VisitPointers(
152 map->GetHeap(),
153 HeapObject::RawField(object,
154 JSDataView::kWeakNextOffset + kPointerSize),
155 HeapObject::RawField(object, JSDataView::kSize));
156 return JSDataView::kSize;
157}
158
159
160template<typename StaticVisitor>
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000161void StaticMarkingVisitor<StaticVisitor>::Initialize() {
162 table_.Register(kVisitShortcutCandidate,
163 &FixedBodyVisitor<StaticVisitor,
164 ConsString::BodyDescriptor,
165 void>::Visit);
166
167 table_.Register(kVisitConsString,
168 &FixedBodyVisitor<StaticVisitor,
169 ConsString::BodyDescriptor,
170 void>::Visit);
171
172 table_.Register(kVisitSlicedString,
173 &FixedBodyVisitor<StaticVisitor,
174 SlicedString::BodyDescriptor,
175 void>::Visit);
176
mstarzinger@chromium.orgf705b502013-04-04 11:38:09 +0000177 table_.Register(kVisitSymbol,
178 &FixedBodyVisitor<StaticVisitor,
179 Symbol::BodyDescriptor,
180 void>::Visit);
181
yangguo@chromium.orgfb377212012-11-16 14:43:43 +0000182 table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000183
184 table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
185
yangguo@chromium.org46839fb2012-08-28 09:06:19 +0000186 table_.Register(kVisitNativeContext, &VisitNativeContext);
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000187
188 table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
189
190 table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
191
yangguo@chromium.orgfb377212012-11-16 14:43:43 +0000192 table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit);
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000193
194 table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
195
196 table_.Register(kVisitJSWeakMap, &StaticVisitor::VisitJSWeakMap);
197
198 table_.Register(kVisitOddball,
199 &FixedBodyVisitor<StaticVisitor,
200 Oddball::BodyDescriptor,
201 void>::Visit);
202
verwaest@chromium.org33e09c82012-10-10 17:07:22 +0000203 table_.Register(kVisitMap, &VisitMap);
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000204
verwaest@chromium.org33e09c82012-10-10 17:07:22 +0000205 table_.Register(kVisitCode, &VisitCode);
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000206
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000207 table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000208
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000209 table_.Register(kVisitJSFunction, &VisitJSFunction);
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000210
danno@chromium.org1fd77d52013-06-07 16:01:45 +0000211 table_.Register(kVisitJSArrayBuffer, &VisitJSArrayBuffer);
212
213 table_.Register(kVisitJSTypedArray, &VisitJSTypedArray);
214
mstarzinger@chromium.org1510d582013-06-28 14:00:48 +0000215 table_.Register(kVisitJSDataView, &VisitJSDataView);
216
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000217 // Registration for kVisitJSRegExp is done by StaticVisitor.
218
danno@chromium.org41728482013-06-12 22:31:22 +0000219 table_.Register(kVisitCell,
220 &FixedBodyVisitor<StaticVisitor,
221 Cell::BodyDescriptor,
222 void>::Visit);
223
mstarzinger@chromium.org1510d582013-06-28 14:00:48 +0000224 table_.Register(kVisitPropertyCell, &VisitPropertyCell);
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000225
226 table_.template RegisterSpecializations<DataObjectVisitor,
227 kVisitDataObject,
228 kVisitDataObjectGeneric>();
229
230 table_.template RegisterSpecializations<JSObjectVisitor,
231 kVisitJSObject,
232 kVisitJSObjectGeneric>();
233
234 table_.template RegisterSpecializations<StructObjectVisitor,
235 kVisitStruct,
236 kVisitStructGeneric>();
237}
238
239
240template<typename StaticVisitor>
241void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry(
242 Heap* heap, Address entry_address) {
243 Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
244 heap->mark_compact_collector()->RecordCodeEntrySlot(entry_address, code);
245 StaticVisitor::MarkObject(heap, code);
246}
247
248
249template<typename StaticVisitor>
mstarzinger@chromium.org471f2f12012-08-10 14:46:33 +0000250void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
251 Heap* heap, RelocInfo* rinfo) {
252 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
253 ASSERT(!rinfo->target_object()->IsConsString());
254 HeapObject* object = HeapObject::cast(rinfo->target_object());
danno@chromium.org94b0d6f2013-02-04 13:33:20 +0000255 if (!FLAG_weak_embedded_maps_in_optimized_code || !FLAG_collect_maps ||
256 rinfo->host()->kind() != Code::OPTIMIZED_FUNCTION ||
yangguo@chromium.org003650e2013-01-24 16:31:08 +0000257 !object->IsMap() || !Map::cast(object)->CanTransition()) {
258 heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
259 StaticVisitor::MarkObject(heap, object);
260 }
mstarzinger@chromium.org471f2f12012-08-10 14:46:33 +0000261}
262
263
264template<typename StaticVisitor>
danno@chromium.org41728482013-06-12 22:31:22 +0000265void StaticMarkingVisitor<StaticVisitor>::VisitCell(
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000266 Heap* heap, RelocInfo* rinfo) {
danno@chromium.org41728482013-06-12 22:31:22 +0000267 ASSERT(rinfo->rmode() == RelocInfo::CELL);
268 Cell* cell = rinfo->target_cell();
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000269 StaticVisitor::MarkObject(heap, cell);
270}
271
272
273template<typename StaticVisitor>
274void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(
275 Heap* heap, RelocInfo* rinfo) {
276 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
277 rinfo->IsPatchedReturnSequence()) ||
278 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
279 rinfo->IsPatchedDebugBreakSlotSequence()));
280 Code* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
281 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
282 StaticVisitor::MarkObject(heap, target);
283}
284
285
286template<typename StaticVisitor>
mstarzinger@chromium.org471f2f12012-08-10 14:46:33 +0000287void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(
288 Heap* heap, RelocInfo* rinfo) {
289 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
290 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
291 // Monomorphic ICs are preserved when possible, but need to be flushed
292 // when they might be keeping a Context alive, or when the heap is about
293 // to be serialized.
294 if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub()
yangguo@chromium.org46a2a512013-01-18 16:29:40 +0000295 && (target->ic_state() == MEGAMORPHIC || target->ic_state() == GENERIC ||
jkummerow@chromium.org59297c72013-01-09 16:32:23 +0000296 target->ic_state() == POLYMORPHIC || heap->flush_monomorphic_ics() ||
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000297 Serializer::enabled() || target->ic_age() != heap->global_ic_age())) {
mstarzinger@chromium.org471f2f12012-08-10 14:46:33 +0000298 IC::Clear(rinfo->pc());
299 target = Code::GetCodeFromTargetAddress(rinfo->target_address());
300 }
301 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
302 StaticVisitor::MarkObject(heap, target);
303}
304
305
306template<typename StaticVisitor>
mvstanton@chromium.orge4ac3ef2012-11-12 14:53:34 +0000307void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
308 Heap* heap, RelocInfo* rinfo) {
309 ASSERT(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
310 Code* target = rinfo->code_age_stub();
311 ASSERT(target != NULL);
312 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
313 StaticVisitor::MarkObject(heap, target);
314}
315
316
317template<typename StaticVisitor>
yangguo@chromium.org46839fb2012-08-28 09:06:19 +0000318void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000319 Map* map, HeapObject* object) {
320 FixedBodyVisitor<StaticVisitor,
321 Context::MarkCompactBodyDescriptor,
322 void>::Visit(map, object);
323
324 MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
325 for (int idx = Context::FIRST_WEAK_SLOT;
yangguo@chromium.org46839fb2012-08-28 09:06:19 +0000326 idx < Context::NATIVE_CONTEXT_SLOTS;
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000327 ++idx) {
328 Object** slot =
329 HeapObject::RawField(object, FixedArray::OffsetOfElementAt(idx));
330 collector->RecordSlot(slot, slot, *slot);
331 }
332}
333
334
335template<typename StaticVisitor>
verwaest@chromium.org33e09c82012-10-10 17:07:22 +0000336void StaticMarkingVisitor<StaticVisitor>::VisitMap(
337 Map* map, HeapObject* object) {
338 Heap* heap = map->GetHeap();
339 Map* map_object = Map::cast(object);
340
341 // Clears the cache of ICs related to this map.
342 if (FLAG_cleanup_code_caches_at_gc) {
343 map_object->ClearCodeCache(heap);
344 }
345
yangguo@chromium.org003650e2013-01-24 16:31:08 +0000346 // When map collection is enabled we have to mark through map's transitions
347 // and back pointers in a special way to make these links weak.
348 if (FLAG_collect_maps && map_object->CanTransition()) {
verwaest@chromium.org33e09c82012-10-10 17:07:22 +0000349 MarkMapContents(heap, map_object);
350 } else {
351 StaticVisitor::VisitPointers(heap,
352 HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
353 HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
354 }
355}
356
357
358template<typename StaticVisitor>
mstarzinger@chromium.org1510d582013-06-28 14:00:48 +0000359void StaticMarkingVisitor<StaticVisitor>::VisitPropertyCell(
360 Map* map, HeapObject* object) {
361 Heap* heap = map->GetHeap();
362
363 Object** slot =
364 HeapObject::RawField(object, PropertyCell::kDependentCodeOffset);
365 if (FLAG_collect_maps) {
366 // Mark property cell dependent codes array but do not push it onto marking
367 // stack, this will make references from it weak. We will clean dead
368 // codes when we iterate over property cells in ClearNonLiveReferences.
369 HeapObject* obj = HeapObject::cast(*slot);
370 heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
371 StaticVisitor::MarkObjectWithoutPush(heap, obj);
372 } else {
373 StaticVisitor::VisitPointer(heap, slot);
374 }
375
376 StaticVisitor::VisitPointers(heap,
377 HeapObject::RawField(object, PropertyCell::kPointerFieldsBeginOffset),
378 HeapObject::RawField(object, PropertyCell::kPointerFieldsEndOffset));
379}
380
381
382template<typename StaticVisitor>
mstarzinger@chromium.org471f2f12012-08-10 14:46:33 +0000383void StaticMarkingVisitor<StaticVisitor>::VisitCode(
384 Map* map, HeapObject* object) {
385 Heap* heap = map->GetHeap();
386 Code* code = Code::cast(object);
387 if (FLAG_cleanup_code_caches_at_gc) {
388 code->ClearTypeFeedbackCells(heap);
389 }
mvstanton@chromium.orge4ac3ef2012-11-12 14:53:34 +0000390 if (FLAG_age_code && !Serializer::enabled()) {
391 code->MakeOlder(heap->mark_compact_collector()->marking_parity());
392 }
mstarzinger@chromium.org471f2f12012-08-10 14:46:33 +0000393 code->CodeIterateBody<StaticVisitor>(heap);
394}
395
396
397template<typename StaticVisitor>
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000398void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
399 Map* map, HeapObject* object) {
400 Heap* heap = map->GetHeap();
401 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
402 if (shared->ic_age() != heap->global_ic_age()) {
403 shared->ResetForNewContext(heap->global_ic_age());
404 }
jkummerow@chromium.org4e308cf2013-05-17 13:39:16 +0000405 if (FLAG_cache_optimized_code &&
406 FLAG_flush_optimized_code_cache &&
407 !shared->optimized_code_map()->IsSmi()) {
408 // Always flush the optimized code map if requested by flag.
409 shared->ClearOptimizedCodeMap();
mstarzinger@chromium.org0ae265a2012-12-11 17:41:11 +0000410 }
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000411 MarkCompactCollector* collector = heap->mark_compact_collector();
412 if (collector->is_code_flushing_enabled()) {
jkummerow@chromium.org4e308cf2013-05-17 13:39:16 +0000413 if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
414 // Add the shared function info holding an optimized code map to
415 // the code flusher for processing of code maps after marking.
416 collector->code_flusher()->AddOptimizedCodeMap(shared);
417 // Treat all references within the code map weakly by marking the
418 // code map itself but not pushing it onto the marking deque.
419 FixedArray* code_map = FixedArray::cast(shared->optimized_code_map());
420 StaticVisitor::MarkObjectWithoutPush(heap, code_map);
421 }
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000422 if (IsFlushable(heap, shared)) {
423 // This function's code looks flushable. But we have to postpone
424 // the decision until we see all functions that point to the same
425 // SharedFunctionInfo because some of them might be optimized.
426 // That would also make the non-optimized version of the code
427 // non-flushable, because it is required for bailing out from
428 // optimized code.
429 collector->code_flusher()->AddCandidate(shared);
430 // Treat the reference to the code object weakly.
431 VisitSharedFunctionInfoWeakCode(heap, object);
432 return;
433 }
jkummerow@chromium.org4e308cf2013-05-17 13:39:16 +0000434 } else {
435 if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
436 // Flush optimized code map on major GCs without code flushing,
437 // needed because cached code doesn't contain breakpoints.
438 shared->ClearOptimizedCodeMap();
439 }
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000440 }
441 VisitSharedFunctionInfoStrongCode(heap, object);
442}
443
444
445template<typename StaticVisitor>
446void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(
447 Map* map, HeapObject* object) {
448 Heap* heap = map->GetHeap();
449 JSFunction* function = JSFunction::cast(object);
450 MarkCompactCollector* collector = heap->mark_compact_collector();
451 if (collector->is_code_flushing_enabled()) {
452 if (IsFlushable(heap, function)) {
453 // This function's code looks flushable. But we have to postpone
454 // the decision until we see all functions that point to the same
455 // SharedFunctionInfo because some of them might be optimized.
456 // That would also make the non-optimized version of the code
457 // non-flushable, because it is required for bailing out from
458 // optimized code.
459 collector->code_flusher()->AddCandidate(function);
460 // Visit shared function info immediately to avoid double checking
461 // of its flushability later. This is just an optimization because
462 // the shared function info would eventually be visited.
danno@chromium.org41728482013-06-12 22:31:22 +0000463 SharedFunctionInfo* shared = function->shared();
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000464 if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) {
465 StaticVisitor::MarkObject(heap, shared->map());
466 VisitSharedFunctionInfoWeakCode(heap, shared);
467 }
468 // Treat the reference to the code object weakly.
469 VisitJSFunctionWeakCode(heap, object);
470 return;
471 } else {
472 // Visit all unoptimized code objects to prevent flushing them.
473 StaticVisitor::MarkObject(heap, function->shared()->code());
474 if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) {
475 MarkInlinedFunctionsCode(heap, function->code());
476 }
477 }
478 }
479 VisitJSFunctionStrongCode(heap, object);
480}
481
482
483template<typename StaticVisitor>
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000484void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(
485 Map* map, HeapObject* object) {
486 int last_property_offset =
487 JSRegExp::kSize + kPointerSize * map->inobject_properties();
danno@chromium.org2ab0c3b2012-10-05 08:50:56 +0000488 StaticVisitor::VisitPointers(map->GetHeap(),
489 HeapObject::RawField(object, JSRegExp::kPropertiesOffset),
490 HeapObject::RawField(object, last_property_offset));
ulan@chromium.org56c14af2012-09-20 12:51:09 +0000491}
492
493
verwaest@chromium.org33e09c82012-10-10 17:07:22 +0000494template<typename StaticVisitor>
danno@chromium.org1fd77d52013-06-07 16:01:45 +0000495void StaticMarkingVisitor<StaticVisitor>::VisitJSArrayBuffer(
496 Map* map, HeapObject* object) {
497 Heap* heap = map->GetHeap();
498
499 STATIC_ASSERT(
mstarzinger@chromium.org1510d582013-06-28 14:00:48 +0000500 JSArrayBuffer::kWeakFirstViewOffset ==
danno@chromium.org1fd77d52013-06-07 16:01:45 +0000501 JSArrayBuffer::kWeakNextOffset + kPointerSize);
502 StaticVisitor::VisitPointers(
503 heap,
504 HeapObject::RawField(object, JSArrayBuffer::BodyDescriptor::kStartOffset),
505 HeapObject::RawField(object, JSArrayBuffer::kWeakNextOffset));
506 StaticVisitor::VisitPointers(
507 heap,
508 HeapObject::RawField(object,
509 JSArrayBuffer::kWeakNextOffset + 2 * kPointerSize),
510 HeapObject::RawField(object, JSArrayBuffer::kSizeWithInternalFields));
511}
512
513
514template<typename StaticVisitor>
515void StaticMarkingVisitor<StaticVisitor>::VisitJSTypedArray(
516 Map* map, HeapObject* object) {
517 StaticVisitor::VisitPointers(
518 map->GetHeap(),
519 HeapObject::RawField(object, JSTypedArray::BodyDescriptor::kStartOffset),
520 HeapObject::RawField(object, JSTypedArray::kWeakNextOffset));
521 StaticVisitor::VisitPointers(
522 map->GetHeap(),
523 HeapObject::RawField(object,
524 JSTypedArray::kWeakNextOffset + kPointerSize),
525 HeapObject::RawField(object, JSTypedArray::kSize));
526}
527
528
529template<typename StaticVisitor>
mstarzinger@chromium.org1510d582013-06-28 14:00:48 +0000530void StaticMarkingVisitor<StaticVisitor>::VisitJSDataView(
531 Map* map, HeapObject* object) {
532 StaticVisitor::VisitPointers(
533 map->GetHeap(),
534 HeapObject::RawField(object, JSDataView::BodyDescriptor::kStartOffset),
535 HeapObject::RawField(object, JSDataView::kWeakNextOffset));
536 StaticVisitor::VisitPointers(
537 map->GetHeap(),
538 HeapObject::RawField(object,
539 JSDataView::kWeakNextOffset + kPointerSize),
540 HeapObject::RawField(object, JSDataView::kSize));
541}
542
543
544template<typename StaticVisitor>
verwaest@chromium.org33e09c82012-10-10 17:07:22 +0000545void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(
546 Heap* heap, Map* map) {
547 // Make sure that the back pointer stored either in the map itself or
548 // inside its transitions array is marked. Skip recording the back
549 // pointer slot since map space is not compacted.
550 StaticVisitor::MarkObject(heap, HeapObject::cast(map->GetBackPointer()));
551
552 // Treat pointers in the transitions array as weak and also mark that
553 // array to prevent visiting it later. Skip recording the transition
554 // array slot, since it will be implicitly recorded when the pointer
555 // fields of this map are visited.
556 TransitionArray* transitions = map->unchecked_transition_array();
557 if (transitions->IsTransitionArray()) {
558 MarkTransitionArray(heap, transitions);
559 } else {
560 // Already marked by marking map->GetBackPointer() above.
561 ASSERT(transitions->IsMap() || transitions->IsUndefined());
562 }
563
mstarzinger@chromium.orge3b8d0f2013-02-01 09:06:41 +0000564 // Since descriptor arrays are potentially shared, ensure that only the
danno@chromium.orgc99cd482013-03-21 15:26:42 +0000565 // descriptors that belong to this map are marked. The first time a
mstarzinger@chromium.orge3b8d0f2013-02-01 09:06:41 +0000566 // non-empty descriptor array is marked, its header is also visited. The slot
567 // holding the descriptor array will be implicitly recorded when the pointer
568 // fields of this map are visited.
569 DescriptorArray* descriptors = map->instance_descriptors();
570 if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
571 descriptors->length() > 0) {
572 StaticVisitor::VisitPointers(heap,
573 descriptors->GetFirstElementAddress(),
574 descriptors->GetDescriptorEndSlot(0));
575 }
576 int start = 0;
577 int end = map->NumberOfOwnDescriptors();
mstarzinger@chromium.orge3b8d0f2013-02-01 09:06:41 +0000578 if (start < end) {
579 StaticVisitor::VisitPointers(heap,
580 descriptors->GetDescriptorStartSlot(start),
581 descriptors->GetDescriptorEndSlot(end));
582 }
583
yangguo@chromium.org003650e2013-01-24 16:31:08 +0000584 // Mark prototype dependent codes array but do not push it onto marking
585 // stack, this will make references from it weak. We will clean dead
586 // codes when we iterate over maps in ClearNonLiveTransitions.
ulan@chromium.org2e04b582013-02-21 14:06:02 +0000587 Object** slot = HeapObject::RawField(map, Map::kDependentCodeOffset);
yangguo@chromium.org003650e2013-01-24 16:31:08 +0000588 HeapObject* obj = HeapObject::cast(*slot);
589 heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
590 StaticVisitor::MarkObjectWithoutPush(heap, obj);
591
verwaest@chromium.org33e09c82012-10-10 17:07:22 +0000592 // Mark the pointer fields of the Map. Since the transitions array has
593 // been marked already, it is fine that one of these fields contains a
594 // pointer to it.
595 StaticVisitor::VisitPointers(heap,
596 HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
597 HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
598}
599
600
601template<typename StaticVisitor>
602void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray(
603 Heap* heap, TransitionArray* transitions) {
604 if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return;
605
verwaest@chromium.org33e09c82012-10-10 17:07:22 +0000606 // Simple transitions do not have keys nor prototype transitions.
607 if (transitions->IsSimpleTransition()) return;
608
609 if (transitions->HasPrototypeTransitions()) {
610 // Mark prototype transitions array but do not push it onto marking
611 // stack, this will make references from it weak. We will clean dead
612 // prototype transitions in ClearNonLiveTransitions.
613 Object** slot = transitions->GetPrototypeTransitionsSlot();
614 HeapObject* obj = HeapObject::cast(*slot);
615 heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
616 StaticVisitor::MarkObjectWithoutPush(heap, obj);
617 }
618
619 for (int i = 0; i < transitions->number_of_transitions(); ++i) {
620 StaticVisitor::VisitPointer(heap, transitions->GetKeySlot(i));
621 }
622}
623
624
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000625template<typename StaticVisitor>
626void StaticMarkingVisitor<StaticVisitor>::MarkInlinedFunctionsCode(
627 Heap* heap, Code* code) {
628 // For optimized functions we should retain both non-optimized version
629 // of its code and non-optimized version of all inlined functions.
630 // This is required to support bailing out from inlined code.
631 DeoptimizationInputData* data =
632 DeoptimizationInputData::cast(code->deoptimization_data());
633 FixedArray* literals = data->LiteralArray();
634 for (int i = 0, count = data->InlinedFunctionCount()->value();
635 i < count;
636 i++) {
637 JSFunction* inlined = JSFunction::cast(literals->get(i));
638 StaticVisitor::MarkObject(heap, inlined->shared()->code());
639 }
640}
641
642
643inline static bool IsValidNonBuiltinContext(Object* context) {
644 return context->IsContext() &&
645 !Context::cast(context)->global_object()->IsJSBuiltinsObject();
646}
647
648
649inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
650 Object* undefined = heap->undefined_value();
651 return (info->script() != undefined) &&
652 (reinterpret_cast<Script*>(info->script())->source() != undefined);
653}
654
655
656template<typename StaticVisitor>
657bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
658 Heap* heap, JSFunction* function) {
danno@chromium.org41728482013-06-12 22:31:22 +0000659 SharedFunctionInfo* shared_info = function->shared();
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000660
661 // Code is either on stack, in compilation cache or referenced
662 // by optimized version of function.
663 MarkBit code_mark = Marking::MarkBitFrom(function->code());
664 if (code_mark.Get()) {
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000665 return false;
666 }
667
668 // The function must have a valid context and not be a builtin.
mstarzinger@chromium.org1510d582013-06-28 14:00:48 +0000669 if (!IsValidNonBuiltinContext(function->context())) {
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000670 return false;
671 }
672
mvstanton@chromium.orge4ac3ef2012-11-12 14:53:34 +0000673 // We do not (yet) flush code for optimized functions.
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000674 if (function->code() != shared_info->code()) {
675 return false;
676 }
677
mvstanton@chromium.orge4ac3ef2012-11-12 14:53:34 +0000678 // Check age of optimized code.
679 if (FLAG_age_code && !function->code()->IsOld()) {
680 return false;
681 }
682
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000683 return IsFlushable(heap, shared_info);
684}
685
686
687template<typename StaticVisitor>
688bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
689 Heap* heap, SharedFunctionInfo* shared_info) {
690 // Code is either on stack, in compilation cache or referenced
691 // by optimized version of function.
692 MarkBit code_mark = Marking::MarkBitFrom(shared_info->code());
693 if (code_mark.Get()) {
694 return false;
695 }
696
697 // The function must be compiled and have the source code available,
698 // to be able to recompile it in case we need the function again.
699 if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
700 return false;
701 }
702
703 // We never flush code for API functions.
704 Object* function_data = shared_info->function_data();
705 if (function_data->IsFunctionTemplateInfo()) {
706 return false;
707 }
708
709 // Only flush code for functions.
710 if (shared_info->code()->kind() != Code::FUNCTION) {
711 return false;
712 }
713
714 // Function must be lazy compilable.
715 if (!shared_info->allows_lazy_compilation()) {
716 return false;
717 }
718
ulan@chromium.org906e2fb2013-05-14 08:14:38 +0000719 // We do not (yet?) flush code for generator functions, because we don't know
720 // if there are still live activations (generator objects) on the heap.
721 if (shared_info->is_generator()) {
722 return false;
723 }
724
725 // If this is a full script wrapped in a function we do not flush the code.
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000726 if (shared_info->is_toplevel()) {
727 return false;
728 }
729
ulan@chromium.org906e2fb2013-05-14 08:14:38 +0000730 // If this is a function initialized with %SetCode then the one-to-one
731 // relation between SharedFunctionInfo and Code is broken.
732 if (shared_info->dont_flush()) {
ulan@chromium.org57ff8812013-05-10 08:16:55 +0000733 return false;
734 }
735
mstarzinger@chromium.org1510d582013-06-28 14:00:48 +0000736 // Check age of code. If code aging is disabled we never flush.
737 if (!FLAG_age_code || !shared_info->code()->IsOld()) {
738 return false;
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000739 }
mstarzinger@chromium.org1510d582013-06-28 14:00:48 +0000740
741 return true;
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000742}
743
744
745template<typename StaticVisitor>
746void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
747 Heap* heap, HeapObject* object) {
748 StaticVisitor::BeforeVisitingSharedFunctionInfo(object);
749 Object** start_slot =
750 HeapObject::RawField(object,
751 SharedFunctionInfo::BodyDescriptor::kStartOffset);
752 Object** end_slot =
753 HeapObject::RawField(object,
754 SharedFunctionInfo::BodyDescriptor::kEndOffset);
755 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
756}
757
758
759template<typename StaticVisitor>
760void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
761 Heap* heap, HeapObject* object) {
762 StaticVisitor::BeforeVisitingSharedFunctionInfo(object);
763 Object** name_slot =
764 HeapObject::RawField(object, SharedFunctionInfo::kNameOffset);
765 StaticVisitor::VisitPointer(heap, name_slot);
766
767 // Skip visiting kCodeOffset as it is treated weakly here.
768 STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize ==
769 SharedFunctionInfo::kCodeOffset);
770 STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize ==
771 SharedFunctionInfo::kOptimizedCodeMapOffset);
772
773 Object** start_slot =
774 HeapObject::RawField(object,
775 SharedFunctionInfo::kOptimizedCodeMapOffset);
776 Object** end_slot =
777 HeapObject::RawField(object,
778 SharedFunctionInfo::BodyDescriptor::kEndOffset);
779 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
780}
781
782
783template<typename StaticVisitor>
784void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
785 Heap* heap, HeapObject* object) {
786 Object** start_slot =
787 HeapObject::RawField(object, JSFunction::kPropertiesOffset);
788 Object** end_slot =
789 HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
790 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
791
792 VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
793 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
794 JSFunction::kPrototypeOrInitialMapOffset);
795
796 start_slot =
797 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
798 end_slot =
799 HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
800 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
801}
802
803
804template<typename StaticVisitor>
805void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
806 Heap* heap, HeapObject* object) {
807 Object** start_slot =
808 HeapObject::RawField(object, JSFunction::kPropertiesOffset);
809 Object** end_slot =
810 HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
811 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
812
813 // Skip visiting kCodeEntryOffset as it is treated weakly here.
814 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
815 JSFunction::kPrototypeOrInitialMapOffset);
816
817 start_slot =
818 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
819 end_slot =
820 HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
821 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
822}
823
824
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000825void Code::CodeIterateBody(ObjectVisitor* v) {
826 int mode_mask = RelocInfo::kCodeTargetMask |
827 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
danno@chromium.org41728482013-06-12 22:31:22 +0000828 RelocInfo::ModeMask(RelocInfo::CELL) |
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000829 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
830 RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
831 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
832 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
833
jkummerow@chromium.org04e4f1e2011-11-14 13:36:17 +0000834 // There are two places where we iterate code bodies: here and the
yangguo@chromium.org003650e2013-01-24 16:31:08 +0000835 // templated CodeIterateBody (below). They should be kept in sync.
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000836 IteratePointer(v, kRelocationInfoOffset);
jkummerow@chromium.org04e4f1e2011-11-14 13:36:17 +0000837 IteratePointer(v, kHandlerTableOffset);
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000838 IteratePointer(v, kDeoptimizationDataOffset);
jkummerow@chromium.orgf7a58842012-02-21 10:08:21 +0000839 IteratePointer(v, kTypeFeedbackInfoOffset);
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000840
841 RelocIterator it(this, mode_mask);
842 for (; !it.done(); it.next()) {
843 it.rinfo()->Visit(v);
844 }
845}
846
847
848template<typename StaticVisitor>
849void Code::CodeIterateBody(Heap* heap) {
850 int mode_mask = RelocInfo::kCodeTargetMask |
851 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
danno@chromium.org41728482013-06-12 22:31:22 +0000852 RelocInfo::ModeMask(RelocInfo::CELL) |
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000853 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
854 RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
855 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
856 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
857
yangguo@chromium.org003650e2013-01-24 16:31:08 +0000858 // There are two places where we iterate code bodies: here and the non-
859 // templated CodeIterateBody (above). They should be kept in sync.
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000860 StaticVisitor::VisitPointer(
861 heap,
862 reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset));
863 StaticVisitor::VisitPointer(
864 heap,
jkummerow@chromium.org04e4f1e2011-11-14 13:36:17 +0000865 reinterpret_cast<Object**>(this->address() + kHandlerTableOffset));
866 StaticVisitor::VisitPointer(
867 heap,
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000868 reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset));
danno@chromium.orgfa458e42012-02-01 10:48:36 +0000869 StaticVisitor::VisitPointer(
870 heap,
jkummerow@chromium.orgf7a58842012-02-21 10:08:21 +0000871 reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset));
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000872
873 RelocIterator it(this, mode_mask);
874 for (; !it.done(); it.next()) {
875 it.rinfo()->template Visit<StaticVisitor>(heap);
876 }
877}
878
879
880} } // namespace v8::internal
881
882#endif // V8_OBJECTS_VISITING_INL_H_