blob: beb07b564497d7d87e044adbce43d278a6a12356 [file] [log] [blame]
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +00001// Copyright 2012 the V8 project authors. All rights reserved.
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_OBJECTS_VISITING_INL_H_
29#define V8_OBJECTS_VISITING_INL_H_
30
31
32namespace v8 {
33namespace internal {
34
35template<typename StaticVisitor>
36void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
37 table_.Register(kVisitShortcutCandidate,
38 &FixedBodyVisitor<StaticVisitor,
39 ConsString::BodyDescriptor,
40 int>::Visit);
41
42 table_.Register(kVisitConsString,
43 &FixedBodyVisitor<StaticVisitor,
44 ConsString::BodyDescriptor,
45 int>::Visit);
46
47 table_.Register(kVisitSlicedString,
48 &FixedBodyVisitor<StaticVisitor,
49 SlicedString::BodyDescriptor,
50 int>::Visit);
51
52 table_.Register(kVisitFixedArray,
53 &FlexibleBodyVisitor<StaticVisitor,
54 FixedArray::BodyDescriptor,
55 int>::Visit);
56
57 table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
58
yangguo@chromium.org46839fb2012-08-28 09:06:19 +000059 table_.Register(kVisitNativeContext,
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +000060 &FixedBodyVisitor<StaticVisitor,
61 Context::ScavengeBodyDescriptor,
62 int>::Visit);
63
64 table_.Register(kVisitByteArray, &VisitByteArray);
65
66 table_.Register(kVisitSharedFunctionInfo,
67 &FixedBodyVisitor<StaticVisitor,
68 SharedFunctionInfo::BodyDescriptor,
69 int>::Visit);
70
yangguo@chromium.orgfb377212012-11-16 14:43:43 +000071 table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString);
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +000072
73 table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
74
erik.corry@gmail.comed49e962012-04-17 11:57:53 +000075 table_.Register(kVisitJSFunction, &VisitJSFunction);
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +000076
77 table_.Register(kVisitFreeSpace, &VisitFreeSpace);
78
79 table_.Register(kVisitJSWeakMap, &JSObjectVisitor::Visit);
80
81 table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
82
83 table_.template RegisterSpecializations<DataObjectVisitor,
84 kVisitDataObject,
85 kVisitDataObjectGeneric>();
86
87 table_.template RegisterSpecializations<JSObjectVisitor,
88 kVisitJSObject,
89 kVisitJSObjectGeneric>();
90 table_.template RegisterSpecializations<StructVisitor,
91 kVisitStruct,
92 kVisitStructGeneric>();
93}
94
95
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +000096template<typename StaticVisitor>
97void StaticMarkingVisitor<StaticVisitor>::Initialize() {
98 table_.Register(kVisitShortcutCandidate,
99 &FixedBodyVisitor<StaticVisitor,
100 ConsString::BodyDescriptor,
101 void>::Visit);
102
103 table_.Register(kVisitConsString,
104 &FixedBodyVisitor<StaticVisitor,
105 ConsString::BodyDescriptor,
106 void>::Visit);
107
108 table_.Register(kVisitSlicedString,
109 &FixedBodyVisitor<StaticVisitor,
110 SlicedString::BodyDescriptor,
111 void>::Visit);
112
yangguo@chromium.orgfb377212012-11-16 14:43:43 +0000113 table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000114
115 table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
116
yangguo@chromium.org46839fb2012-08-28 09:06:19 +0000117 table_.Register(kVisitNativeContext, &VisitNativeContext);
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000118
119 table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
120
121 table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
122
yangguo@chromium.orgfb377212012-11-16 14:43:43 +0000123 table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit);
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000124
125 table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
126
127 table_.Register(kVisitJSWeakMap, &StaticVisitor::VisitJSWeakMap);
128
129 table_.Register(kVisitOddball,
130 &FixedBodyVisitor<StaticVisitor,
131 Oddball::BodyDescriptor,
132 void>::Visit);
133
verwaest@chromium.org33e09c82012-10-10 17:07:22 +0000134 table_.Register(kVisitMap, &VisitMap);
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000135
verwaest@chromium.org33e09c82012-10-10 17:07:22 +0000136 table_.Register(kVisitCode, &VisitCode);
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000137
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000138 table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000139
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000140 table_.Register(kVisitJSFunction, &VisitJSFunction);
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000141
142 // Registration for kVisitJSRegExp is done by StaticVisitor.
143
144 table_.Register(kVisitPropertyCell,
145 &FixedBodyVisitor<StaticVisitor,
146 JSGlobalPropertyCell::BodyDescriptor,
147 void>::Visit);
148
149 table_.template RegisterSpecializations<DataObjectVisitor,
150 kVisitDataObject,
151 kVisitDataObjectGeneric>();
152
153 table_.template RegisterSpecializations<JSObjectVisitor,
154 kVisitJSObject,
155 kVisitJSObjectGeneric>();
156
157 table_.template RegisterSpecializations<StructObjectVisitor,
158 kVisitStruct,
159 kVisitStructGeneric>();
160}
161
162
163template<typename StaticVisitor>
164void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry(
165 Heap* heap, Address entry_address) {
166 Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
167 heap->mark_compact_collector()->RecordCodeEntrySlot(entry_address, code);
168 StaticVisitor::MarkObject(heap, code);
169}
170
171
172template<typename StaticVisitor>
mstarzinger@chromium.org471f2f12012-08-10 14:46:33 +0000173void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
174 Heap* heap, RelocInfo* rinfo) {
175 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
176 ASSERT(!rinfo->target_object()->IsConsString());
177 HeapObject* object = HeapObject::cast(rinfo->target_object());
danno@chromium.org94b0d6f2013-02-04 13:33:20 +0000178 if (!FLAG_weak_embedded_maps_in_optimized_code || !FLAG_collect_maps ||
179 rinfo->host()->kind() != Code::OPTIMIZED_FUNCTION ||
yangguo@chromium.org003650e2013-01-24 16:31:08 +0000180 !object->IsMap() || !Map::cast(object)->CanTransition()) {
181 heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
182 StaticVisitor::MarkObject(heap, object);
183 }
mstarzinger@chromium.org471f2f12012-08-10 14:46:33 +0000184}
185
186
187template<typename StaticVisitor>
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000188void StaticMarkingVisitor<StaticVisitor>::VisitGlobalPropertyCell(
189 Heap* heap, RelocInfo* rinfo) {
190 ASSERT(rinfo->rmode() == RelocInfo::GLOBAL_PROPERTY_CELL);
191 JSGlobalPropertyCell* cell = rinfo->target_cell();
192 StaticVisitor::MarkObject(heap, cell);
193}
194
195
196template<typename StaticVisitor>
197void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(
198 Heap* heap, RelocInfo* rinfo) {
199 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
200 rinfo->IsPatchedReturnSequence()) ||
201 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
202 rinfo->IsPatchedDebugBreakSlotSequence()));
203 Code* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
204 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
205 StaticVisitor::MarkObject(heap, target);
206}
207
208
209template<typename StaticVisitor>
mstarzinger@chromium.org471f2f12012-08-10 14:46:33 +0000210void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(
211 Heap* heap, RelocInfo* rinfo) {
212 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
213 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
214 // Monomorphic ICs are preserved when possible, but need to be flushed
215 // when they might be keeping a Context alive, or when the heap is about
216 // to be serialized.
217 if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub()
yangguo@chromium.org46a2a512013-01-18 16:29:40 +0000218 && (target->ic_state() == MEGAMORPHIC || target->ic_state() == GENERIC ||
jkummerow@chromium.org59297c72013-01-09 16:32:23 +0000219 target->ic_state() == POLYMORPHIC || heap->flush_monomorphic_ics() ||
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000220 Serializer::enabled() || target->ic_age() != heap->global_ic_age())) {
mstarzinger@chromium.org471f2f12012-08-10 14:46:33 +0000221 IC::Clear(rinfo->pc());
222 target = Code::GetCodeFromTargetAddress(rinfo->target_address());
223 }
224 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
225 StaticVisitor::MarkObject(heap, target);
226}
227
228
229template<typename StaticVisitor>
mvstanton@chromium.orge4ac3ef2012-11-12 14:53:34 +0000230void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
231 Heap* heap, RelocInfo* rinfo) {
232 ASSERT(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
233 Code* target = rinfo->code_age_stub();
234 ASSERT(target != NULL);
235 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
236 StaticVisitor::MarkObject(heap, target);
237}
238
239
240template<typename StaticVisitor>
yangguo@chromium.org46839fb2012-08-28 09:06:19 +0000241void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000242 Map* map, HeapObject* object) {
243 FixedBodyVisitor<StaticVisitor,
244 Context::MarkCompactBodyDescriptor,
245 void>::Visit(map, object);
246
247 MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
248 for (int idx = Context::FIRST_WEAK_SLOT;
yangguo@chromium.org46839fb2012-08-28 09:06:19 +0000249 idx < Context::NATIVE_CONTEXT_SLOTS;
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000250 ++idx) {
251 Object** slot =
252 HeapObject::RawField(object, FixedArray::OffsetOfElementAt(idx));
253 collector->RecordSlot(slot, slot, *slot);
254 }
255}
256
257
258template<typename StaticVisitor>
verwaest@chromium.org33e09c82012-10-10 17:07:22 +0000259void StaticMarkingVisitor<StaticVisitor>::VisitMap(
260 Map* map, HeapObject* object) {
261 Heap* heap = map->GetHeap();
262 Map* map_object = Map::cast(object);
263
264 // Clears the cache of ICs related to this map.
265 if (FLAG_cleanup_code_caches_at_gc) {
266 map_object->ClearCodeCache(heap);
267 }
268
yangguo@chromium.org003650e2013-01-24 16:31:08 +0000269 // When map collection is enabled we have to mark through map's transitions
270 // and back pointers in a special way to make these links weak.
271 if (FLAG_collect_maps && map_object->CanTransition()) {
verwaest@chromium.org33e09c82012-10-10 17:07:22 +0000272 MarkMapContents(heap, map_object);
273 } else {
274 StaticVisitor::VisitPointers(heap,
275 HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
276 HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
277 }
278}
279
280
281template<typename StaticVisitor>
mstarzinger@chromium.org471f2f12012-08-10 14:46:33 +0000282void StaticMarkingVisitor<StaticVisitor>::VisitCode(
283 Map* map, HeapObject* object) {
284 Heap* heap = map->GetHeap();
285 Code* code = Code::cast(object);
286 if (FLAG_cleanup_code_caches_at_gc) {
287 code->ClearTypeFeedbackCells(heap);
288 }
mvstanton@chromium.orge4ac3ef2012-11-12 14:53:34 +0000289 if (FLAG_age_code && !Serializer::enabled()) {
290 code->MakeOlder(heap->mark_compact_collector()->marking_parity());
291 }
mstarzinger@chromium.org471f2f12012-08-10 14:46:33 +0000292 code->CodeIterateBody<StaticVisitor>(heap);
293}
294
295
296template<typename StaticVisitor>
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000297void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
298 Map* map, HeapObject* object) {
299 Heap* heap = map->GetHeap();
300 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
301 if (shared->ic_age() != heap->global_ic_age()) {
302 shared->ResetForNewContext(heap->global_ic_age());
303 }
mstarzinger@chromium.org0ae265a2012-12-11 17:41:11 +0000304 if (FLAG_cache_optimized_code) {
305 // Flush optimized code map on major GC.
306 // TODO(mstarzinger): We may experiment with rebuilding it or with
307 // retaining entries which should survive as we iterate through
308 // optimized functions anyway.
309 shared->ClearOptimizedCodeMap();
310 }
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000311 MarkCompactCollector* collector = heap->mark_compact_collector();
312 if (collector->is_code_flushing_enabled()) {
313 if (IsFlushable(heap, shared)) {
314 // This function's code looks flushable. But we have to postpone
315 // the decision until we see all functions that point to the same
316 // SharedFunctionInfo because some of them might be optimized.
317 // That would also make the non-optimized version of the code
318 // non-flushable, because it is required for bailing out from
319 // optimized code.
320 collector->code_flusher()->AddCandidate(shared);
321 // Treat the reference to the code object weakly.
322 VisitSharedFunctionInfoWeakCode(heap, object);
323 return;
324 }
325 }
326 VisitSharedFunctionInfoStrongCode(heap, object);
327}
328
329
330template<typename StaticVisitor>
331void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(
332 Map* map, HeapObject* object) {
333 Heap* heap = map->GetHeap();
334 JSFunction* function = JSFunction::cast(object);
335 MarkCompactCollector* collector = heap->mark_compact_collector();
336 if (collector->is_code_flushing_enabled()) {
337 if (IsFlushable(heap, function)) {
338 // This function's code looks flushable. But we have to postpone
339 // the decision until we see all functions that point to the same
340 // SharedFunctionInfo because some of them might be optimized.
341 // That would also make the non-optimized version of the code
342 // non-flushable, because it is required for bailing out from
343 // optimized code.
344 collector->code_flusher()->AddCandidate(function);
345 // Visit shared function info immediately to avoid double checking
346 // of its flushability later. This is just an optimization because
347 // the shared function info would eventually be visited.
348 SharedFunctionInfo* shared = function->unchecked_shared();
349 if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) {
350 StaticVisitor::MarkObject(heap, shared->map());
351 VisitSharedFunctionInfoWeakCode(heap, shared);
352 }
353 // Treat the reference to the code object weakly.
354 VisitJSFunctionWeakCode(heap, object);
355 return;
356 } else {
357 // Visit all unoptimized code objects to prevent flushing them.
358 StaticVisitor::MarkObject(heap, function->shared()->code());
359 if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) {
360 MarkInlinedFunctionsCode(heap, function->code());
361 }
362 }
363 }
364 VisitJSFunctionStrongCode(heap, object);
365}
366
367
368template<typename StaticVisitor>
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000369void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(
370 Map* map, HeapObject* object) {
371 int last_property_offset =
372 JSRegExp::kSize + kPointerSize * map->inobject_properties();
danno@chromium.org2ab0c3b2012-10-05 08:50:56 +0000373 StaticVisitor::VisitPointers(map->GetHeap(),
374 HeapObject::RawField(object, JSRegExp::kPropertiesOffset),
375 HeapObject::RawField(object, last_property_offset));
ulan@chromium.org56c14af2012-09-20 12:51:09 +0000376}
377
378
verwaest@chromium.org33e09c82012-10-10 17:07:22 +0000379template<typename StaticVisitor>
380void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(
381 Heap* heap, Map* map) {
382 // Make sure that the back pointer stored either in the map itself or
383 // inside its transitions array is marked. Skip recording the back
384 // pointer slot since map space is not compacted.
385 StaticVisitor::MarkObject(heap, HeapObject::cast(map->GetBackPointer()));
386
387 // Treat pointers in the transitions array as weak and also mark that
388 // array to prevent visiting it later. Skip recording the transition
389 // array slot, since it will be implicitly recorded when the pointer
390 // fields of this map are visited.
391 TransitionArray* transitions = map->unchecked_transition_array();
392 if (transitions->IsTransitionArray()) {
393 MarkTransitionArray(heap, transitions);
394 } else {
395 // Already marked by marking map->GetBackPointer() above.
396 ASSERT(transitions->IsMap() || transitions->IsUndefined());
397 }
398
mstarzinger@chromium.orge3b8d0f2013-02-01 09:06:41 +0000399 // Since descriptor arrays are potentially shared, ensure that only the
400 // descriptors that appeared for this map are marked. The first time a
401 // non-empty descriptor array is marked, its header is also visited. The slot
402 // holding the descriptor array will be implicitly recorded when the pointer
403 // fields of this map are visited.
404 DescriptorArray* descriptors = map->instance_descriptors();
405 if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
406 descriptors->length() > 0) {
407 StaticVisitor::VisitPointers(heap,
408 descriptors->GetFirstElementAddress(),
409 descriptors->GetDescriptorEndSlot(0));
410 }
411 int start = 0;
412 int end = map->NumberOfOwnDescriptors();
413 Object* back_pointer = map->GetBackPointer();
414 if (!back_pointer->IsUndefined()) {
415 Map* parent_map = Map::cast(back_pointer);
416 if (descriptors == parent_map->instance_descriptors()) {
417 start = parent_map->NumberOfOwnDescriptors();
418 }
419 }
420 if (start < end) {
421 StaticVisitor::VisitPointers(heap,
422 descriptors->GetDescriptorStartSlot(start),
423 descriptors->GetDescriptorEndSlot(end));
424 }
425
yangguo@chromium.org003650e2013-01-24 16:31:08 +0000426 // Mark prototype dependent codes array but do not push it onto marking
427 // stack, this will make references from it weak. We will clean dead
428 // codes when we iterate over maps in ClearNonLiveTransitions.
ulan@chromium.org2e04b582013-02-21 14:06:02 +0000429 Object** slot = HeapObject::RawField(map, Map::kDependentCodeOffset);
yangguo@chromium.org003650e2013-01-24 16:31:08 +0000430 HeapObject* obj = HeapObject::cast(*slot);
431 heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
432 StaticVisitor::MarkObjectWithoutPush(heap, obj);
433
verwaest@chromium.org33e09c82012-10-10 17:07:22 +0000434 // Mark the pointer fields of the Map. Since the transitions array has
435 // been marked already, it is fine that one of these fields contains a
436 // pointer to it.
437 StaticVisitor::VisitPointers(heap,
438 HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
439 HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
440}
441
442
443template<typename StaticVisitor>
444void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray(
445 Heap* heap, TransitionArray* transitions) {
446 if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return;
447
verwaest@chromium.org33e09c82012-10-10 17:07:22 +0000448 // Simple transitions do not have keys nor prototype transitions.
449 if (transitions->IsSimpleTransition()) return;
450
451 if (transitions->HasPrototypeTransitions()) {
452 // Mark prototype transitions array but do not push it onto marking
453 // stack, this will make references from it weak. We will clean dead
454 // prototype transitions in ClearNonLiveTransitions.
455 Object** slot = transitions->GetPrototypeTransitionsSlot();
456 HeapObject* obj = HeapObject::cast(*slot);
457 heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
458 StaticVisitor::MarkObjectWithoutPush(heap, obj);
459 }
460
461 for (int i = 0; i < transitions->number_of_transitions(); ++i) {
462 StaticVisitor::VisitPointer(heap, transitions->GetKeySlot(i));
463 }
464}
465
466
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000467template<typename StaticVisitor>
468void StaticMarkingVisitor<StaticVisitor>::MarkInlinedFunctionsCode(
469 Heap* heap, Code* code) {
470 // For optimized functions we should retain both non-optimized version
471 // of its code and non-optimized version of all inlined functions.
472 // This is required to support bailing out from inlined code.
473 DeoptimizationInputData* data =
474 DeoptimizationInputData::cast(code->deoptimization_data());
475 FixedArray* literals = data->LiteralArray();
476 for (int i = 0, count = data->InlinedFunctionCount()->value();
477 i < count;
478 i++) {
479 JSFunction* inlined = JSFunction::cast(literals->get(i));
480 StaticVisitor::MarkObject(heap, inlined->shared()->code());
481 }
482}
483
484
485inline static bool IsValidNonBuiltinContext(Object* context) {
486 return context->IsContext() &&
487 !Context::cast(context)->global_object()->IsJSBuiltinsObject();
488}
489
490
491inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
492 Object* undefined = heap->undefined_value();
493 return (info->script() != undefined) &&
494 (reinterpret_cast<Script*>(info->script())->source() != undefined);
495}
496
497
498template<typename StaticVisitor>
499bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
500 Heap* heap, JSFunction* function) {
501 SharedFunctionInfo* shared_info = function->unchecked_shared();
502
503 // Code is either on stack, in compilation cache or referenced
504 // by optimized version of function.
505 MarkBit code_mark = Marking::MarkBitFrom(function->code());
506 if (code_mark.Get()) {
mvstanton@chromium.orge4ac3ef2012-11-12 14:53:34 +0000507 if (!FLAG_age_code) {
508 if (!Marking::MarkBitFrom(shared_info).Get()) {
509 shared_info->set_code_age(0);
510 }
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000511 }
512 return false;
513 }
514
515 // The function must have a valid context and not be a builtin.
516 if (!IsValidNonBuiltinContext(function->unchecked_context())) {
517 return false;
518 }
519
mvstanton@chromium.orge4ac3ef2012-11-12 14:53:34 +0000520 // We do not (yet) flush code for optimized functions.
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000521 if (function->code() != shared_info->code()) {
522 return false;
523 }
524
mvstanton@chromium.orge4ac3ef2012-11-12 14:53:34 +0000525 // Check age of optimized code.
526 if (FLAG_age_code && !function->code()->IsOld()) {
527 return false;
528 }
529
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000530 return IsFlushable(heap, shared_info);
531}
532
533
534template<typename StaticVisitor>
535bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
536 Heap* heap, SharedFunctionInfo* shared_info) {
537 // Code is either on stack, in compilation cache or referenced
538 // by optimized version of function.
539 MarkBit code_mark = Marking::MarkBitFrom(shared_info->code());
540 if (code_mark.Get()) {
541 return false;
542 }
543
544 // The function must be compiled and have the source code available,
545 // to be able to recompile it in case we need the function again.
546 if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
547 return false;
548 }
549
550 // We never flush code for API functions.
551 Object* function_data = shared_info->function_data();
552 if (function_data->IsFunctionTemplateInfo()) {
553 return false;
554 }
555
556 // Only flush code for functions.
557 if (shared_info->code()->kind() != Code::FUNCTION) {
558 return false;
559 }
560
561 // Function must be lazy compilable.
562 if (!shared_info->allows_lazy_compilation()) {
563 return false;
564 }
565
566 // If this is a full script wrapped in a function we do no flush the code.
567 if (shared_info->is_toplevel()) {
568 return false;
569 }
570
mvstanton@chromium.orge4ac3ef2012-11-12 14:53:34 +0000571 if (FLAG_age_code) {
572 return shared_info->code()->IsOld();
573 } else {
574 // How many collections newly compiled code object will survive before being
575 // flushed.
576 static const int kCodeAgeThreshold = 5;
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000577
mvstanton@chromium.orge4ac3ef2012-11-12 14:53:34 +0000578 // Age this shared function info.
579 if (shared_info->code_age() < kCodeAgeThreshold) {
580 shared_info->set_code_age(shared_info->code_age() + 1);
581 return false;
582 }
583 return true;
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000584 }
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000585}
586
587
588template<typename StaticVisitor>
589void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
590 Heap* heap, HeapObject* object) {
591 StaticVisitor::BeforeVisitingSharedFunctionInfo(object);
592 Object** start_slot =
593 HeapObject::RawField(object,
594 SharedFunctionInfo::BodyDescriptor::kStartOffset);
595 Object** end_slot =
596 HeapObject::RawField(object,
597 SharedFunctionInfo::BodyDescriptor::kEndOffset);
598 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
599}
600
601
602template<typename StaticVisitor>
603void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
604 Heap* heap, HeapObject* object) {
605 StaticVisitor::BeforeVisitingSharedFunctionInfo(object);
606 Object** name_slot =
607 HeapObject::RawField(object, SharedFunctionInfo::kNameOffset);
608 StaticVisitor::VisitPointer(heap, name_slot);
609
610 // Skip visiting kCodeOffset as it is treated weakly here.
611 STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize ==
612 SharedFunctionInfo::kCodeOffset);
613 STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize ==
614 SharedFunctionInfo::kOptimizedCodeMapOffset);
615
616 Object** start_slot =
617 HeapObject::RawField(object,
618 SharedFunctionInfo::kOptimizedCodeMapOffset);
619 Object** end_slot =
620 HeapObject::RawField(object,
621 SharedFunctionInfo::BodyDescriptor::kEndOffset);
622 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
623}
624
625
626template<typename StaticVisitor>
627void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
628 Heap* heap, HeapObject* object) {
629 Object** start_slot =
630 HeapObject::RawField(object, JSFunction::kPropertiesOffset);
631 Object** end_slot =
632 HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
633 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
634
635 VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
636 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
637 JSFunction::kPrototypeOrInitialMapOffset);
638
639 start_slot =
640 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
641 end_slot =
642 HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
643 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
644}
645
646
647template<typename StaticVisitor>
648void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
649 Heap* heap, HeapObject* object) {
650 Object** start_slot =
651 HeapObject::RawField(object, JSFunction::kPropertiesOffset);
652 Object** end_slot =
653 HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
654 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
655
656 // Skip visiting kCodeEntryOffset as it is treated weakly here.
657 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
658 JSFunction::kPrototypeOrInitialMapOffset);
659
660 start_slot =
661 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
662 end_slot =
663 HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
664 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
665}
666
667
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000668void Code::CodeIterateBody(ObjectVisitor* v) {
669 int mode_mask = RelocInfo::kCodeTargetMask |
670 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
671 RelocInfo::ModeMask(RelocInfo::GLOBAL_PROPERTY_CELL) |
672 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
673 RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
674 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
675 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
676
jkummerow@chromium.org04e4f1e2011-11-14 13:36:17 +0000677 // There are two places where we iterate code bodies: here and the
yangguo@chromium.org003650e2013-01-24 16:31:08 +0000678 // templated CodeIterateBody (below). They should be kept in sync.
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000679 IteratePointer(v, kRelocationInfoOffset);
jkummerow@chromium.org04e4f1e2011-11-14 13:36:17 +0000680 IteratePointer(v, kHandlerTableOffset);
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000681 IteratePointer(v, kDeoptimizationDataOffset);
jkummerow@chromium.orgf7a58842012-02-21 10:08:21 +0000682 IteratePointer(v, kTypeFeedbackInfoOffset);
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000683
684 RelocIterator it(this, mode_mask);
685 for (; !it.done(); it.next()) {
686 it.rinfo()->Visit(v);
687 }
688}
689
690
691template<typename StaticVisitor>
692void Code::CodeIterateBody(Heap* heap) {
693 int mode_mask = RelocInfo::kCodeTargetMask |
694 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
695 RelocInfo::ModeMask(RelocInfo::GLOBAL_PROPERTY_CELL) |
696 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
697 RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
698 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
699 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
700
yangguo@chromium.org003650e2013-01-24 16:31:08 +0000701 // There are two places where we iterate code bodies: here and the non-
702 // templated CodeIterateBody (above). They should be kept in sync.
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000703 StaticVisitor::VisitPointer(
704 heap,
705 reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset));
706 StaticVisitor::VisitPointer(
707 heap,
jkummerow@chromium.org04e4f1e2011-11-14 13:36:17 +0000708 reinterpret_cast<Object**>(this->address() + kHandlerTableOffset));
709 StaticVisitor::VisitPointer(
710 heap,
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000711 reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset));
danno@chromium.orgfa458e42012-02-01 10:48:36 +0000712 StaticVisitor::VisitPointer(
713 heap,
jkummerow@chromium.orgf7a58842012-02-21 10:08:21 +0000714 reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset));
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000715
716 RelocIterator it(this, mode_mask);
717 for (; !it.done(); it.next()) {
718 it.rinfo()->template Visit<StaticVisitor>(heap);
719 }
720}
721
722
723} } // namespace v8::internal
724
725#endif // V8_OBJECTS_VISITING_INL_H_