blob: f83f00fd5d139574a9ccc2a3ab9ae09456794f0d [file] [log] [blame]
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +00001// Copyright 2012 the V8 project authors. All rights reserved.
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_OBJECTS_VISITING_INL_H_
29#define V8_OBJECTS_VISITING_INL_H_
30
31
32namespace v8 {
33namespace internal {
34
35template<typename StaticVisitor>
36void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
37 table_.Register(kVisitShortcutCandidate,
38 &FixedBodyVisitor<StaticVisitor,
39 ConsString::BodyDescriptor,
40 int>::Visit);
41
42 table_.Register(kVisitConsString,
43 &FixedBodyVisitor<StaticVisitor,
44 ConsString::BodyDescriptor,
45 int>::Visit);
46
47 table_.Register(kVisitSlicedString,
48 &FixedBodyVisitor<StaticVisitor,
49 SlicedString::BodyDescriptor,
50 int>::Visit);
51
mstarzinger@chromium.orgf705b502013-04-04 11:38:09 +000052 table_.Register(kVisitSymbol,
53 &FixedBodyVisitor<StaticVisitor,
54 Symbol::BodyDescriptor,
55 int>::Visit);
56
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +000057 table_.Register(kVisitFixedArray,
58 &FlexibleBodyVisitor<StaticVisitor,
59 FixedArray::BodyDescriptor,
60 int>::Visit);
61
62 table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
63
yangguo@chromium.org46839fb2012-08-28 09:06:19 +000064 table_.Register(kVisitNativeContext,
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +000065 &FixedBodyVisitor<StaticVisitor,
66 Context::ScavengeBodyDescriptor,
67 int>::Visit);
68
69 table_.Register(kVisitByteArray, &VisitByteArray);
70
71 table_.Register(kVisitSharedFunctionInfo,
72 &FixedBodyVisitor<StaticVisitor,
73 SharedFunctionInfo::BodyDescriptor,
74 int>::Visit);
75
yangguo@chromium.orgfb377212012-11-16 14:43:43 +000076 table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString);
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +000077
78 table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
79
erik.corry@gmail.comed49e962012-04-17 11:57:53 +000080 table_.Register(kVisitJSFunction, &VisitJSFunction);
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +000081
82 table_.Register(kVisitFreeSpace, &VisitFreeSpace);
83
84 table_.Register(kVisitJSWeakMap, &JSObjectVisitor::Visit);
85
86 table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
87
88 table_.template RegisterSpecializations<DataObjectVisitor,
89 kVisitDataObject,
90 kVisitDataObjectGeneric>();
91
92 table_.template RegisterSpecializations<JSObjectVisitor,
93 kVisitJSObject,
94 kVisitJSObjectGeneric>();
95 table_.template RegisterSpecializations<StructVisitor,
96 kVisitStruct,
97 kVisitStructGeneric>();
98}
99
100
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000101template<typename StaticVisitor>
102void StaticMarkingVisitor<StaticVisitor>::Initialize() {
103 table_.Register(kVisitShortcutCandidate,
104 &FixedBodyVisitor<StaticVisitor,
105 ConsString::BodyDescriptor,
106 void>::Visit);
107
108 table_.Register(kVisitConsString,
109 &FixedBodyVisitor<StaticVisitor,
110 ConsString::BodyDescriptor,
111 void>::Visit);
112
113 table_.Register(kVisitSlicedString,
114 &FixedBodyVisitor<StaticVisitor,
115 SlicedString::BodyDescriptor,
116 void>::Visit);
117
mstarzinger@chromium.orgf705b502013-04-04 11:38:09 +0000118 table_.Register(kVisitSymbol,
119 &FixedBodyVisitor<StaticVisitor,
120 Symbol::BodyDescriptor,
121 void>::Visit);
122
yangguo@chromium.orgfb377212012-11-16 14:43:43 +0000123 table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000124
125 table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
126
yangguo@chromium.org46839fb2012-08-28 09:06:19 +0000127 table_.Register(kVisitNativeContext, &VisitNativeContext);
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000128
129 table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
130
131 table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
132
yangguo@chromium.orgfb377212012-11-16 14:43:43 +0000133 table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit);
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000134
135 table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
136
137 table_.Register(kVisitJSWeakMap, &StaticVisitor::VisitJSWeakMap);
138
139 table_.Register(kVisitOddball,
140 &FixedBodyVisitor<StaticVisitor,
141 Oddball::BodyDescriptor,
142 void>::Visit);
143
verwaest@chromium.org33e09c82012-10-10 17:07:22 +0000144 table_.Register(kVisitMap, &VisitMap);
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000145
verwaest@chromium.org33e09c82012-10-10 17:07:22 +0000146 table_.Register(kVisitCode, &VisitCode);
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000147
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000148 table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000149
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000150 table_.Register(kVisitJSFunction, &VisitJSFunction);
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000151
152 // Registration for kVisitJSRegExp is done by StaticVisitor.
153
154 table_.Register(kVisitPropertyCell,
155 &FixedBodyVisitor<StaticVisitor,
156 JSGlobalPropertyCell::BodyDescriptor,
157 void>::Visit);
158
159 table_.template RegisterSpecializations<DataObjectVisitor,
160 kVisitDataObject,
161 kVisitDataObjectGeneric>();
162
163 table_.template RegisterSpecializations<JSObjectVisitor,
164 kVisitJSObject,
165 kVisitJSObjectGeneric>();
166
167 table_.template RegisterSpecializations<StructObjectVisitor,
168 kVisitStruct,
169 kVisitStructGeneric>();
170}
171
172
173template<typename StaticVisitor>
174void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry(
175 Heap* heap, Address entry_address) {
176 Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
177 heap->mark_compact_collector()->RecordCodeEntrySlot(entry_address, code);
178 StaticVisitor::MarkObject(heap, code);
179}
180
181
182template<typename StaticVisitor>
mstarzinger@chromium.org471f2f12012-08-10 14:46:33 +0000183void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
184 Heap* heap, RelocInfo* rinfo) {
185 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
186 ASSERT(!rinfo->target_object()->IsConsString());
187 HeapObject* object = HeapObject::cast(rinfo->target_object());
danno@chromium.org94b0d6f2013-02-04 13:33:20 +0000188 if (!FLAG_weak_embedded_maps_in_optimized_code || !FLAG_collect_maps ||
189 rinfo->host()->kind() != Code::OPTIMIZED_FUNCTION ||
yangguo@chromium.org003650e2013-01-24 16:31:08 +0000190 !object->IsMap() || !Map::cast(object)->CanTransition()) {
191 heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
192 StaticVisitor::MarkObject(heap, object);
193 }
mstarzinger@chromium.org471f2f12012-08-10 14:46:33 +0000194}
195
196
197template<typename StaticVisitor>
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000198void StaticMarkingVisitor<StaticVisitor>::VisitGlobalPropertyCell(
199 Heap* heap, RelocInfo* rinfo) {
200 ASSERT(rinfo->rmode() == RelocInfo::GLOBAL_PROPERTY_CELL);
201 JSGlobalPropertyCell* cell = rinfo->target_cell();
202 StaticVisitor::MarkObject(heap, cell);
203}
204
205
206template<typename StaticVisitor>
207void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(
208 Heap* heap, RelocInfo* rinfo) {
209 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
210 rinfo->IsPatchedReturnSequence()) ||
211 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
212 rinfo->IsPatchedDebugBreakSlotSequence()));
213 Code* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
214 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
215 StaticVisitor::MarkObject(heap, target);
216}
217
218
219template<typename StaticVisitor>
mstarzinger@chromium.org471f2f12012-08-10 14:46:33 +0000220void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(
221 Heap* heap, RelocInfo* rinfo) {
222 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
223 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
224 // Monomorphic ICs are preserved when possible, but need to be flushed
225 // when they might be keeping a Context alive, or when the heap is about
226 // to be serialized.
227 if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub()
yangguo@chromium.org46a2a512013-01-18 16:29:40 +0000228 && (target->ic_state() == MEGAMORPHIC || target->ic_state() == GENERIC ||
jkummerow@chromium.org59297c72013-01-09 16:32:23 +0000229 target->ic_state() == POLYMORPHIC || heap->flush_monomorphic_ics() ||
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000230 Serializer::enabled() || target->ic_age() != heap->global_ic_age())) {
mstarzinger@chromium.org471f2f12012-08-10 14:46:33 +0000231 IC::Clear(rinfo->pc());
232 target = Code::GetCodeFromTargetAddress(rinfo->target_address());
233 }
234 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
235 StaticVisitor::MarkObject(heap, target);
236}
237
238
239template<typename StaticVisitor>
mvstanton@chromium.orge4ac3ef2012-11-12 14:53:34 +0000240void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
241 Heap* heap, RelocInfo* rinfo) {
242 ASSERT(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
243 Code* target = rinfo->code_age_stub();
244 ASSERT(target != NULL);
245 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
246 StaticVisitor::MarkObject(heap, target);
247}
248
249
250template<typename StaticVisitor>
yangguo@chromium.org46839fb2012-08-28 09:06:19 +0000251void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000252 Map* map, HeapObject* object) {
253 FixedBodyVisitor<StaticVisitor,
254 Context::MarkCompactBodyDescriptor,
255 void>::Visit(map, object);
256
257 MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
258 for (int idx = Context::FIRST_WEAK_SLOT;
yangguo@chromium.org46839fb2012-08-28 09:06:19 +0000259 idx < Context::NATIVE_CONTEXT_SLOTS;
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000260 ++idx) {
261 Object** slot =
262 HeapObject::RawField(object, FixedArray::OffsetOfElementAt(idx));
263 collector->RecordSlot(slot, slot, *slot);
264 }
265}
266
267
268template<typename StaticVisitor>
verwaest@chromium.org33e09c82012-10-10 17:07:22 +0000269void StaticMarkingVisitor<StaticVisitor>::VisitMap(
270 Map* map, HeapObject* object) {
271 Heap* heap = map->GetHeap();
272 Map* map_object = Map::cast(object);
273
274 // Clears the cache of ICs related to this map.
275 if (FLAG_cleanup_code_caches_at_gc) {
276 map_object->ClearCodeCache(heap);
277 }
278
yangguo@chromium.org003650e2013-01-24 16:31:08 +0000279 // When map collection is enabled we have to mark through map's transitions
280 // and back pointers in a special way to make these links weak.
281 if (FLAG_collect_maps && map_object->CanTransition()) {
verwaest@chromium.org33e09c82012-10-10 17:07:22 +0000282 MarkMapContents(heap, map_object);
283 } else {
284 StaticVisitor::VisitPointers(heap,
285 HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
286 HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
287 }
288}
289
290
291template<typename StaticVisitor>
mstarzinger@chromium.org471f2f12012-08-10 14:46:33 +0000292void StaticMarkingVisitor<StaticVisitor>::VisitCode(
293 Map* map, HeapObject* object) {
294 Heap* heap = map->GetHeap();
295 Code* code = Code::cast(object);
296 if (FLAG_cleanup_code_caches_at_gc) {
297 code->ClearTypeFeedbackCells(heap);
298 }
mvstanton@chromium.orge4ac3ef2012-11-12 14:53:34 +0000299 if (FLAG_age_code && !Serializer::enabled()) {
300 code->MakeOlder(heap->mark_compact_collector()->marking_parity());
301 }
mstarzinger@chromium.org471f2f12012-08-10 14:46:33 +0000302 code->CodeIterateBody<StaticVisitor>(heap);
303}
304
305
306template<typename StaticVisitor>
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000307void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
308 Map* map, HeapObject* object) {
309 Heap* heap = map->GetHeap();
310 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
311 if (shared->ic_age() != heap->global_ic_age()) {
312 shared->ResetForNewContext(heap->global_ic_age());
313 }
jkummerow@chromium.org4e308cf2013-05-17 13:39:16 +0000314 if (FLAG_cache_optimized_code &&
315 FLAG_flush_optimized_code_cache &&
316 !shared->optimized_code_map()->IsSmi()) {
317 // Always flush the optimized code map if requested by flag.
318 shared->ClearOptimizedCodeMap();
mstarzinger@chromium.org0ae265a2012-12-11 17:41:11 +0000319 }
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000320 MarkCompactCollector* collector = heap->mark_compact_collector();
321 if (collector->is_code_flushing_enabled()) {
jkummerow@chromium.org4e308cf2013-05-17 13:39:16 +0000322 if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
323 // Add the shared function info holding an optimized code map to
324 // the code flusher for processing of code maps after marking.
325 collector->code_flusher()->AddOptimizedCodeMap(shared);
326 // Treat all references within the code map weakly by marking the
327 // code map itself but not pushing it onto the marking deque.
328 FixedArray* code_map = FixedArray::cast(shared->optimized_code_map());
329 StaticVisitor::MarkObjectWithoutPush(heap, code_map);
330 }
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000331 if (IsFlushable(heap, shared)) {
332 // This function's code looks flushable. But we have to postpone
333 // the decision until we see all functions that point to the same
334 // SharedFunctionInfo because some of them might be optimized.
335 // That would also make the non-optimized version of the code
336 // non-flushable, because it is required for bailing out from
337 // optimized code.
338 collector->code_flusher()->AddCandidate(shared);
339 // Treat the reference to the code object weakly.
340 VisitSharedFunctionInfoWeakCode(heap, object);
341 return;
342 }
jkummerow@chromium.org4e308cf2013-05-17 13:39:16 +0000343 } else {
344 if (FLAG_cache_optimized_code && !shared->optimized_code_map()->IsSmi()) {
345 // Flush optimized code map on major GCs without code flushing,
346 // needed because cached code doesn't contain breakpoints.
347 shared->ClearOptimizedCodeMap();
348 }
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000349 }
350 VisitSharedFunctionInfoStrongCode(heap, object);
351}
352
353
354template<typename StaticVisitor>
355void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(
356 Map* map, HeapObject* object) {
357 Heap* heap = map->GetHeap();
358 JSFunction* function = JSFunction::cast(object);
359 MarkCompactCollector* collector = heap->mark_compact_collector();
360 if (collector->is_code_flushing_enabled()) {
361 if (IsFlushable(heap, function)) {
362 // This function's code looks flushable. But we have to postpone
363 // the decision until we see all functions that point to the same
364 // SharedFunctionInfo because some of them might be optimized.
365 // That would also make the non-optimized version of the code
366 // non-flushable, because it is required for bailing out from
367 // optimized code.
368 collector->code_flusher()->AddCandidate(function);
369 // Visit shared function info immediately to avoid double checking
370 // of its flushability later. This is just an optimization because
371 // the shared function info would eventually be visited.
372 SharedFunctionInfo* shared = function->unchecked_shared();
373 if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) {
374 StaticVisitor::MarkObject(heap, shared->map());
375 VisitSharedFunctionInfoWeakCode(heap, shared);
376 }
377 // Treat the reference to the code object weakly.
378 VisitJSFunctionWeakCode(heap, object);
379 return;
380 } else {
381 // Visit all unoptimized code objects to prevent flushing them.
382 StaticVisitor::MarkObject(heap, function->shared()->code());
383 if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) {
384 MarkInlinedFunctionsCode(heap, function->code());
385 }
386 }
387 }
388 VisitJSFunctionStrongCode(heap, object);
389}
390
391
392template<typename StaticVisitor>
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000393void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(
394 Map* map, HeapObject* object) {
395 int last_property_offset =
396 JSRegExp::kSize + kPointerSize * map->inobject_properties();
danno@chromium.org2ab0c3b2012-10-05 08:50:56 +0000397 StaticVisitor::VisitPointers(map->GetHeap(),
398 HeapObject::RawField(object, JSRegExp::kPropertiesOffset),
399 HeapObject::RawField(object, last_property_offset));
ulan@chromium.org56c14af2012-09-20 12:51:09 +0000400}
401
402
verwaest@chromium.org33e09c82012-10-10 17:07:22 +0000403template<typename StaticVisitor>
404void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(
405 Heap* heap, Map* map) {
406 // Make sure that the back pointer stored either in the map itself or
407 // inside its transitions array is marked. Skip recording the back
408 // pointer slot since map space is not compacted.
409 StaticVisitor::MarkObject(heap, HeapObject::cast(map->GetBackPointer()));
410
411 // Treat pointers in the transitions array as weak and also mark that
412 // array to prevent visiting it later. Skip recording the transition
413 // array slot, since it will be implicitly recorded when the pointer
414 // fields of this map are visited.
415 TransitionArray* transitions = map->unchecked_transition_array();
416 if (transitions->IsTransitionArray()) {
417 MarkTransitionArray(heap, transitions);
418 } else {
419 // Already marked by marking map->GetBackPointer() above.
420 ASSERT(transitions->IsMap() || transitions->IsUndefined());
421 }
422
mstarzinger@chromium.orge3b8d0f2013-02-01 09:06:41 +0000423 // Since descriptor arrays are potentially shared, ensure that only the
danno@chromium.orgc99cd482013-03-21 15:26:42 +0000424 // descriptors that belong to this map are marked. The first time a
mstarzinger@chromium.orge3b8d0f2013-02-01 09:06:41 +0000425 // non-empty descriptor array is marked, its header is also visited. The slot
426 // holding the descriptor array will be implicitly recorded when the pointer
427 // fields of this map are visited.
428 DescriptorArray* descriptors = map->instance_descriptors();
429 if (StaticVisitor::MarkObjectWithoutPush(heap, descriptors) &&
430 descriptors->length() > 0) {
431 StaticVisitor::VisitPointers(heap,
432 descriptors->GetFirstElementAddress(),
433 descriptors->GetDescriptorEndSlot(0));
434 }
435 int start = 0;
436 int end = map->NumberOfOwnDescriptors();
mstarzinger@chromium.orge3b8d0f2013-02-01 09:06:41 +0000437 if (start < end) {
438 StaticVisitor::VisitPointers(heap,
439 descriptors->GetDescriptorStartSlot(start),
440 descriptors->GetDescriptorEndSlot(end));
441 }
442
yangguo@chromium.org003650e2013-01-24 16:31:08 +0000443 // Mark prototype dependent codes array but do not push it onto marking
444 // stack, this will make references from it weak. We will clean dead
445 // codes when we iterate over maps in ClearNonLiveTransitions.
ulan@chromium.org2e04b582013-02-21 14:06:02 +0000446 Object** slot = HeapObject::RawField(map, Map::kDependentCodeOffset);
yangguo@chromium.org003650e2013-01-24 16:31:08 +0000447 HeapObject* obj = HeapObject::cast(*slot);
448 heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
449 StaticVisitor::MarkObjectWithoutPush(heap, obj);
450
verwaest@chromium.org33e09c82012-10-10 17:07:22 +0000451 // Mark the pointer fields of the Map. Since the transitions array has
452 // been marked already, it is fine that one of these fields contains a
453 // pointer to it.
454 StaticVisitor::VisitPointers(heap,
455 HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
456 HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
457}
458
459
460template<typename StaticVisitor>
461void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray(
462 Heap* heap, TransitionArray* transitions) {
463 if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return;
464
verwaest@chromium.org33e09c82012-10-10 17:07:22 +0000465 // Simple transitions do not have keys nor prototype transitions.
466 if (transitions->IsSimpleTransition()) return;
467
468 if (transitions->HasPrototypeTransitions()) {
469 // Mark prototype transitions array but do not push it onto marking
470 // stack, this will make references from it weak. We will clean dead
471 // prototype transitions in ClearNonLiveTransitions.
472 Object** slot = transitions->GetPrototypeTransitionsSlot();
473 HeapObject* obj = HeapObject::cast(*slot);
474 heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
475 StaticVisitor::MarkObjectWithoutPush(heap, obj);
476 }
477
478 for (int i = 0; i < transitions->number_of_transitions(); ++i) {
479 StaticVisitor::VisitPointer(heap, transitions->GetKeySlot(i));
480 }
481}
482
483
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000484template<typename StaticVisitor>
485void StaticMarkingVisitor<StaticVisitor>::MarkInlinedFunctionsCode(
486 Heap* heap, Code* code) {
487 // For optimized functions we should retain both non-optimized version
488 // of its code and non-optimized version of all inlined functions.
489 // This is required to support bailing out from inlined code.
490 DeoptimizationInputData* data =
491 DeoptimizationInputData::cast(code->deoptimization_data());
492 FixedArray* literals = data->LiteralArray();
493 for (int i = 0, count = data->InlinedFunctionCount()->value();
494 i < count;
495 i++) {
496 JSFunction* inlined = JSFunction::cast(literals->get(i));
497 StaticVisitor::MarkObject(heap, inlined->shared()->code());
498 }
499}
500
501
502inline static bool IsValidNonBuiltinContext(Object* context) {
503 return context->IsContext() &&
504 !Context::cast(context)->global_object()->IsJSBuiltinsObject();
505}
506
507
508inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
509 Object* undefined = heap->undefined_value();
510 return (info->script() != undefined) &&
511 (reinterpret_cast<Script*>(info->script())->source() != undefined);
512}
513
514
515template<typename StaticVisitor>
516bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
517 Heap* heap, JSFunction* function) {
518 SharedFunctionInfo* shared_info = function->unchecked_shared();
519
520 // Code is either on stack, in compilation cache or referenced
521 // by optimized version of function.
522 MarkBit code_mark = Marking::MarkBitFrom(function->code());
523 if (code_mark.Get()) {
mvstanton@chromium.orge4ac3ef2012-11-12 14:53:34 +0000524 if (!FLAG_age_code) {
525 if (!Marking::MarkBitFrom(shared_info).Get()) {
526 shared_info->set_code_age(0);
527 }
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000528 }
529 return false;
530 }
531
532 // The function must have a valid context and not be a builtin.
533 if (!IsValidNonBuiltinContext(function->unchecked_context())) {
534 return false;
535 }
536
mvstanton@chromium.orge4ac3ef2012-11-12 14:53:34 +0000537 // We do not (yet) flush code for optimized functions.
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000538 if (function->code() != shared_info->code()) {
539 return false;
540 }
541
mvstanton@chromium.orge4ac3ef2012-11-12 14:53:34 +0000542 // Check age of optimized code.
543 if (FLAG_age_code && !function->code()->IsOld()) {
544 return false;
545 }
546
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000547 return IsFlushable(heap, shared_info);
548}
549
550
551template<typename StaticVisitor>
552bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
553 Heap* heap, SharedFunctionInfo* shared_info) {
554 // Code is either on stack, in compilation cache or referenced
555 // by optimized version of function.
556 MarkBit code_mark = Marking::MarkBitFrom(shared_info->code());
557 if (code_mark.Get()) {
558 return false;
559 }
560
561 // The function must be compiled and have the source code available,
562 // to be able to recompile it in case we need the function again.
563 if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
564 return false;
565 }
566
567 // We never flush code for API functions.
568 Object* function_data = shared_info->function_data();
569 if (function_data->IsFunctionTemplateInfo()) {
570 return false;
571 }
572
573 // Only flush code for functions.
574 if (shared_info->code()->kind() != Code::FUNCTION) {
575 return false;
576 }
577
578 // Function must be lazy compilable.
579 if (!shared_info->allows_lazy_compilation()) {
580 return false;
581 }
582
ulan@chromium.org906e2fb2013-05-14 08:14:38 +0000583 // We do not (yet?) flush code for generator functions, because we don't know
584 // if there are still live activations (generator objects) on the heap.
585 if (shared_info->is_generator()) {
586 return false;
587 }
588
589 // If this is a full script wrapped in a function we do not flush the code.
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000590 if (shared_info->is_toplevel()) {
591 return false;
592 }
593
ulan@chromium.org906e2fb2013-05-14 08:14:38 +0000594 // If this is a function initialized with %SetCode then the one-to-one
595 // relation between SharedFunctionInfo and Code is broken.
596 if (shared_info->dont_flush()) {
ulan@chromium.org57ff8812013-05-10 08:16:55 +0000597 return false;
598 }
599
mvstanton@chromium.orge4ac3ef2012-11-12 14:53:34 +0000600 if (FLAG_age_code) {
601 return shared_info->code()->IsOld();
602 } else {
603 // How many collections newly compiled code object will survive before being
604 // flushed.
605 static const int kCodeAgeThreshold = 5;
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000606
mvstanton@chromium.orge4ac3ef2012-11-12 14:53:34 +0000607 // Age this shared function info.
608 if (shared_info->code_age() < kCodeAgeThreshold) {
609 shared_info->set_code_age(shared_info->code_age() + 1);
610 return false;
611 }
612 return true;
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000613 }
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000614}
615
616
617template<typename StaticVisitor>
618void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
619 Heap* heap, HeapObject* object) {
620 StaticVisitor::BeforeVisitingSharedFunctionInfo(object);
621 Object** start_slot =
622 HeapObject::RawField(object,
623 SharedFunctionInfo::BodyDescriptor::kStartOffset);
624 Object** end_slot =
625 HeapObject::RawField(object,
626 SharedFunctionInfo::BodyDescriptor::kEndOffset);
627 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
628}
629
630
631template<typename StaticVisitor>
632void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
633 Heap* heap, HeapObject* object) {
634 StaticVisitor::BeforeVisitingSharedFunctionInfo(object);
635 Object** name_slot =
636 HeapObject::RawField(object, SharedFunctionInfo::kNameOffset);
637 StaticVisitor::VisitPointer(heap, name_slot);
638
639 // Skip visiting kCodeOffset as it is treated weakly here.
640 STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize ==
641 SharedFunctionInfo::kCodeOffset);
642 STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize ==
643 SharedFunctionInfo::kOptimizedCodeMapOffset);
644
645 Object** start_slot =
646 HeapObject::RawField(object,
647 SharedFunctionInfo::kOptimizedCodeMapOffset);
648 Object** end_slot =
649 HeapObject::RawField(object,
650 SharedFunctionInfo::BodyDescriptor::kEndOffset);
651 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
652}
653
654
655template<typename StaticVisitor>
656void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
657 Heap* heap, HeapObject* object) {
658 Object** start_slot =
659 HeapObject::RawField(object, JSFunction::kPropertiesOffset);
660 Object** end_slot =
661 HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
662 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
663
664 VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
665 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
666 JSFunction::kPrototypeOrInitialMapOffset);
667
668 start_slot =
669 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
670 end_slot =
671 HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
672 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
673}
674
675
676template<typename StaticVisitor>
677void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
678 Heap* heap, HeapObject* object) {
679 Object** start_slot =
680 HeapObject::RawField(object, JSFunction::kPropertiesOffset);
681 Object** end_slot =
682 HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
683 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
684
685 // Skip visiting kCodeEntryOffset as it is treated weakly here.
686 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
687 JSFunction::kPrototypeOrInitialMapOffset);
688
689 start_slot =
690 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
691 end_slot =
692 HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
693 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
694}
695
696
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000697void Code::CodeIterateBody(ObjectVisitor* v) {
698 int mode_mask = RelocInfo::kCodeTargetMask |
699 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
700 RelocInfo::ModeMask(RelocInfo::GLOBAL_PROPERTY_CELL) |
701 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
702 RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
703 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
704 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
705
jkummerow@chromium.org04e4f1e2011-11-14 13:36:17 +0000706 // There are two places where we iterate code bodies: here and the
yangguo@chromium.org003650e2013-01-24 16:31:08 +0000707 // templated CodeIterateBody (below). They should be kept in sync.
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000708 IteratePointer(v, kRelocationInfoOffset);
jkummerow@chromium.org04e4f1e2011-11-14 13:36:17 +0000709 IteratePointer(v, kHandlerTableOffset);
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000710 IteratePointer(v, kDeoptimizationDataOffset);
jkummerow@chromium.orgf7a58842012-02-21 10:08:21 +0000711 IteratePointer(v, kTypeFeedbackInfoOffset);
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000712
713 RelocIterator it(this, mode_mask);
714 for (; !it.done(); it.next()) {
715 it.rinfo()->Visit(v);
716 }
717}
718
719
720template<typename StaticVisitor>
721void Code::CodeIterateBody(Heap* heap) {
722 int mode_mask = RelocInfo::kCodeTargetMask |
723 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
724 RelocInfo::ModeMask(RelocInfo::GLOBAL_PROPERTY_CELL) |
725 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
726 RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
727 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
728 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
729
yangguo@chromium.org003650e2013-01-24 16:31:08 +0000730 // There are two places where we iterate code bodies: here and the non-
731 // templated CodeIterateBody (above). They should be kept in sync.
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000732 StaticVisitor::VisitPointer(
733 heap,
734 reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset));
735 StaticVisitor::VisitPointer(
736 heap,
jkummerow@chromium.org04e4f1e2011-11-14 13:36:17 +0000737 reinterpret_cast<Object**>(this->address() + kHandlerTableOffset));
738 StaticVisitor::VisitPointer(
739 heap,
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000740 reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset));
danno@chromium.orgfa458e42012-02-01 10:48:36 +0000741 StaticVisitor::VisitPointer(
742 heap,
jkummerow@chromium.orgf7a58842012-02-21 10:08:21 +0000743 reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset));
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000744
745 RelocIterator it(this, mode_mask);
746 for (; !it.done(); it.next()) {
747 it.rinfo()->template Visit<StaticVisitor>(heap);
748 }
749}
750
751
752} } // namespace v8::internal
753
754#endif // V8_OBJECTS_VISITING_INL_H_