blob: 309cddf66c7b617fd3a0236d4ea47fa19dc5c712 [file] [log] [blame]
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +00001// Copyright 2012 the V8 project authors. All rights reserved.
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +00002// Redistribution and use in source and binary forms, with or without
3// modification, are permitted provided that the following conditions are
4// met:
5//
6// * Redistributions of source code must retain the above copyright
7// notice, this list of conditions and the following disclaimer.
8// * Redistributions in binary form must reproduce the above
9// copyright notice, this list of conditions and the following
10// disclaimer in the documentation and/or other materials provided
11// with the distribution.
12// * Neither the name of Google Inc. nor the names of its
13// contributors may be used to endorse or promote products derived
14// from this software without specific prior written permission.
15//
16// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
17// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
18// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
19// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
20// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
21// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
22// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
26// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27
28#ifndef V8_OBJECTS_VISITING_INL_H_
29#define V8_OBJECTS_VISITING_INL_H_
30
31
32namespace v8 {
33namespace internal {
34
35template<typename StaticVisitor>
36void StaticNewSpaceVisitor<StaticVisitor>::Initialize() {
37 table_.Register(kVisitShortcutCandidate,
38 &FixedBodyVisitor<StaticVisitor,
39 ConsString::BodyDescriptor,
40 int>::Visit);
41
42 table_.Register(kVisitConsString,
43 &FixedBodyVisitor<StaticVisitor,
44 ConsString::BodyDescriptor,
45 int>::Visit);
46
47 table_.Register(kVisitSlicedString,
48 &FixedBodyVisitor<StaticVisitor,
49 SlicedString::BodyDescriptor,
50 int>::Visit);
51
52 table_.Register(kVisitFixedArray,
53 &FlexibleBodyVisitor<StaticVisitor,
54 FixedArray::BodyDescriptor,
55 int>::Visit);
56
57 table_.Register(kVisitFixedDoubleArray, &VisitFixedDoubleArray);
58
yangguo@chromium.org46839fb2012-08-28 09:06:19 +000059 table_.Register(kVisitNativeContext,
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +000060 &FixedBodyVisitor<StaticVisitor,
61 Context::ScavengeBodyDescriptor,
62 int>::Visit);
63
64 table_.Register(kVisitByteArray, &VisitByteArray);
65
66 table_.Register(kVisitSharedFunctionInfo,
67 &FixedBodyVisitor<StaticVisitor,
68 SharedFunctionInfo::BodyDescriptor,
69 int>::Visit);
70
yangguo@chromium.orgfb377212012-11-16 14:43:43 +000071 table_.Register(kVisitSeqOneByteString, &VisitSeqOneByteString);
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +000072
73 table_.Register(kVisitSeqTwoByteString, &VisitSeqTwoByteString);
74
erik.corry@gmail.comed49e962012-04-17 11:57:53 +000075 table_.Register(kVisitJSFunction, &VisitJSFunction);
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +000076
77 table_.Register(kVisitFreeSpace, &VisitFreeSpace);
78
79 table_.Register(kVisitJSWeakMap, &JSObjectVisitor::Visit);
80
81 table_.Register(kVisitJSRegExp, &JSObjectVisitor::Visit);
82
83 table_.template RegisterSpecializations<DataObjectVisitor,
84 kVisitDataObject,
85 kVisitDataObjectGeneric>();
86
87 table_.template RegisterSpecializations<JSObjectVisitor,
88 kVisitJSObject,
89 kVisitJSObjectGeneric>();
90 table_.template RegisterSpecializations<StructVisitor,
91 kVisitStruct,
92 kVisitStructGeneric>();
93}
94
95
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +000096template<typename StaticVisitor>
97void StaticMarkingVisitor<StaticVisitor>::Initialize() {
98 table_.Register(kVisitShortcutCandidate,
99 &FixedBodyVisitor<StaticVisitor,
100 ConsString::BodyDescriptor,
101 void>::Visit);
102
103 table_.Register(kVisitConsString,
104 &FixedBodyVisitor<StaticVisitor,
105 ConsString::BodyDescriptor,
106 void>::Visit);
107
108 table_.Register(kVisitSlicedString,
109 &FixedBodyVisitor<StaticVisitor,
110 SlicedString::BodyDescriptor,
111 void>::Visit);
112
yangguo@chromium.orgfb377212012-11-16 14:43:43 +0000113 table_.Register(kVisitFixedArray, &FixedArrayVisitor::Visit);
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000114
115 table_.Register(kVisitFixedDoubleArray, &DataObjectVisitor::Visit);
116
yangguo@chromium.org46839fb2012-08-28 09:06:19 +0000117 table_.Register(kVisitNativeContext, &VisitNativeContext);
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000118
119 table_.Register(kVisitByteArray, &DataObjectVisitor::Visit);
120
121 table_.Register(kVisitFreeSpace, &DataObjectVisitor::Visit);
122
yangguo@chromium.orgfb377212012-11-16 14:43:43 +0000123 table_.Register(kVisitSeqOneByteString, &DataObjectVisitor::Visit);
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000124
125 table_.Register(kVisitSeqTwoByteString, &DataObjectVisitor::Visit);
126
127 table_.Register(kVisitJSWeakMap, &StaticVisitor::VisitJSWeakMap);
128
129 table_.Register(kVisitOddball,
130 &FixedBodyVisitor<StaticVisitor,
131 Oddball::BodyDescriptor,
132 void>::Visit);
133
verwaest@chromium.org33e09c82012-10-10 17:07:22 +0000134 table_.Register(kVisitMap, &VisitMap);
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000135
verwaest@chromium.org33e09c82012-10-10 17:07:22 +0000136 table_.Register(kVisitCode, &VisitCode);
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000137
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000138 table_.Register(kVisitSharedFunctionInfo, &VisitSharedFunctionInfo);
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000139
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000140 table_.Register(kVisitJSFunction, &VisitJSFunction);
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000141
142 // Registration for kVisitJSRegExp is done by StaticVisitor.
143
144 table_.Register(kVisitPropertyCell,
145 &FixedBodyVisitor<StaticVisitor,
146 JSGlobalPropertyCell::BodyDescriptor,
147 void>::Visit);
148
149 table_.template RegisterSpecializations<DataObjectVisitor,
150 kVisitDataObject,
151 kVisitDataObjectGeneric>();
152
153 table_.template RegisterSpecializations<JSObjectVisitor,
154 kVisitJSObject,
155 kVisitJSObjectGeneric>();
156
157 table_.template RegisterSpecializations<StructObjectVisitor,
158 kVisitStruct,
159 kVisitStructGeneric>();
160}
161
162
163template<typename StaticVisitor>
164void StaticMarkingVisitor<StaticVisitor>::VisitCodeEntry(
165 Heap* heap, Address entry_address) {
166 Code* code = Code::cast(Code::GetObjectFromEntryAddress(entry_address));
167 heap->mark_compact_collector()->RecordCodeEntrySlot(entry_address, code);
168 StaticVisitor::MarkObject(heap, code);
169}
170
171
172template<typename StaticVisitor>
mstarzinger@chromium.org471f2f12012-08-10 14:46:33 +0000173void StaticMarkingVisitor<StaticVisitor>::VisitEmbeddedPointer(
174 Heap* heap, RelocInfo* rinfo) {
175 ASSERT(rinfo->rmode() == RelocInfo::EMBEDDED_OBJECT);
176 ASSERT(!rinfo->target_object()->IsConsString());
177 HeapObject* object = HeapObject::cast(rinfo->target_object());
178 heap->mark_compact_collector()->RecordRelocSlot(rinfo, object);
179 StaticVisitor::MarkObject(heap, object);
180}
181
182
183template<typename StaticVisitor>
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000184void StaticMarkingVisitor<StaticVisitor>::VisitGlobalPropertyCell(
185 Heap* heap, RelocInfo* rinfo) {
186 ASSERT(rinfo->rmode() == RelocInfo::GLOBAL_PROPERTY_CELL);
187 JSGlobalPropertyCell* cell = rinfo->target_cell();
188 StaticVisitor::MarkObject(heap, cell);
189}
190
191
192template<typename StaticVisitor>
193void StaticMarkingVisitor<StaticVisitor>::VisitDebugTarget(
194 Heap* heap, RelocInfo* rinfo) {
195 ASSERT((RelocInfo::IsJSReturn(rinfo->rmode()) &&
196 rinfo->IsPatchedReturnSequence()) ||
197 (RelocInfo::IsDebugBreakSlot(rinfo->rmode()) &&
198 rinfo->IsPatchedDebugBreakSlotSequence()));
199 Code* target = Code::GetCodeFromTargetAddress(rinfo->call_address());
200 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
201 StaticVisitor::MarkObject(heap, target);
202}
203
204
205template<typename StaticVisitor>
mstarzinger@chromium.org471f2f12012-08-10 14:46:33 +0000206void StaticMarkingVisitor<StaticVisitor>::VisitCodeTarget(
207 Heap* heap, RelocInfo* rinfo) {
208 ASSERT(RelocInfo::IsCodeTarget(rinfo->rmode()));
209 Code* target = Code::GetCodeFromTargetAddress(rinfo->target_address());
210 // Monomorphic ICs are preserved when possible, but need to be flushed
211 // when they might be keeping a Context alive, or when the heap is about
212 // to be serialized.
213 if (FLAG_cleanup_code_caches_at_gc && target->is_inline_cache_stub()
jkummerow@chromium.org59297c72013-01-09 16:32:23 +0000214 && (target->ic_state() == MEGAMORPHIC ||
215 target->ic_state() == POLYMORPHIC || heap->flush_monomorphic_ics() ||
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000216 Serializer::enabled() || target->ic_age() != heap->global_ic_age())) {
mstarzinger@chromium.org471f2f12012-08-10 14:46:33 +0000217 IC::Clear(rinfo->pc());
218 target = Code::GetCodeFromTargetAddress(rinfo->target_address());
219 }
220 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
221 StaticVisitor::MarkObject(heap, target);
222}
223
224
225template<typename StaticVisitor>
mvstanton@chromium.orge4ac3ef2012-11-12 14:53:34 +0000226void StaticMarkingVisitor<StaticVisitor>::VisitCodeAgeSequence(
227 Heap* heap, RelocInfo* rinfo) {
228 ASSERT(RelocInfo::IsCodeAgeSequence(rinfo->rmode()));
229 Code* target = rinfo->code_age_stub();
230 ASSERT(target != NULL);
231 heap->mark_compact_collector()->RecordRelocSlot(rinfo, target);
232 StaticVisitor::MarkObject(heap, target);
233}
234
235
236template<typename StaticVisitor>
yangguo@chromium.org46839fb2012-08-28 09:06:19 +0000237void StaticMarkingVisitor<StaticVisitor>::VisitNativeContext(
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000238 Map* map, HeapObject* object) {
239 FixedBodyVisitor<StaticVisitor,
240 Context::MarkCompactBodyDescriptor,
241 void>::Visit(map, object);
242
243 MarkCompactCollector* collector = map->GetHeap()->mark_compact_collector();
244 for (int idx = Context::FIRST_WEAK_SLOT;
yangguo@chromium.org46839fb2012-08-28 09:06:19 +0000245 idx < Context::NATIVE_CONTEXT_SLOTS;
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000246 ++idx) {
247 Object** slot =
248 HeapObject::RawField(object, FixedArray::OffsetOfElementAt(idx));
249 collector->RecordSlot(slot, slot, *slot);
250 }
251}
252
253
254template<typename StaticVisitor>
verwaest@chromium.org33e09c82012-10-10 17:07:22 +0000255void StaticMarkingVisitor<StaticVisitor>::VisitMap(
256 Map* map, HeapObject* object) {
257 Heap* heap = map->GetHeap();
258 Map* map_object = Map::cast(object);
259
260 // Clears the cache of ICs related to this map.
261 if (FLAG_cleanup_code_caches_at_gc) {
262 map_object->ClearCodeCache(heap);
263 }
264
265 // When map collection is enabled we have to mark through map's
266 // transitions and back pointers in a special way to make these links
267 // weak. Only maps for subclasses of JSReceiver can have transitions.
268 STATIC_ASSERT(LAST_TYPE == LAST_JS_RECEIVER_TYPE);
269 if (FLAG_collect_maps &&
270 map_object->instance_type() >= FIRST_JS_RECEIVER_TYPE) {
271 MarkMapContents(heap, map_object);
272 } else {
273 StaticVisitor::VisitPointers(heap,
274 HeapObject::RawField(object, Map::kPointerFieldsBeginOffset),
275 HeapObject::RawField(object, Map::kPointerFieldsEndOffset));
276 }
277}
278
279
280template<typename StaticVisitor>
mstarzinger@chromium.org471f2f12012-08-10 14:46:33 +0000281void StaticMarkingVisitor<StaticVisitor>::VisitCode(
282 Map* map, HeapObject* object) {
283 Heap* heap = map->GetHeap();
284 Code* code = Code::cast(object);
285 if (FLAG_cleanup_code_caches_at_gc) {
286 code->ClearTypeFeedbackCells(heap);
287 }
mvstanton@chromium.orge4ac3ef2012-11-12 14:53:34 +0000288 if (FLAG_age_code && !Serializer::enabled()) {
289 code->MakeOlder(heap->mark_compact_collector()->marking_parity());
290 }
mstarzinger@chromium.org471f2f12012-08-10 14:46:33 +0000291 code->CodeIterateBody<StaticVisitor>(heap);
292}
293
294
295template<typename StaticVisitor>
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000296void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfo(
297 Map* map, HeapObject* object) {
298 Heap* heap = map->GetHeap();
299 SharedFunctionInfo* shared = SharedFunctionInfo::cast(object);
300 if (shared->ic_age() != heap->global_ic_age()) {
301 shared->ResetForNewContext(heap->global_ic_age());
302 }
mstarzinger@chromium.org0ae265a2012-12-11 17:41:11 +0000303 if (FLAG_cache_optimized_code) {
304 // Flush optimized code map on major GC.
305 // TODO(mstarzinger): We may experiment with rebuilding it or with
306 // retaining entries which should survive as we iterate through
307 // optimized functions anyway.
308 shared->ClearOptimizedCodeMap();
309 }
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000310 MarkCompactCollector* collector = heap->mark_compact_collector();
311 if (collector->is_code_flushing_enabled()) {
312 if (IsFlushable(heap, shared)) {
313 // This function's code looks flushable. But we have to postpone
314 // the decision until we see all functions that point to the same
315 // SharedFunctionInfo because some of them might be optimized.
316 // That would also make the non-optimized version of the code
317 // non-flushable, because it is required for bailing out from
318 // optimized code.
319 collector->code_flusher()->AddCandidate(shared);
320 // Treat the reference to the code object weakly.
321 VisitSharedFunctionInfoWeakCode(heap, object);
322 return;
323 }
324 }
325 VisitSharedFunctionInfoStrongCode(heap, object);
326}
327
328
329template<typename StaticVisitor>
330void StaticMarkingVisitor<StaticVisitor>::VisitJSFunction(
331 Map* map, HeapObject* object) {
332 Heap* heap = map->GetHeap();
333 JSFunction* function = JSFunction::cast(object);
334 MarkCompactCollector* collector = heap->mark_compact_collector();
335 if (collector->is_code_flushing_enabled()) {
336 if (IsFlushable(heap, function)) {
337 // This function's code looks flushable. But we have to postpone
338 // the decision until we see all functions that point to the same
339 // SharedFunctionInfo because some of them might be optimized.
340 // That would also make the non-optimized version of the code
341 // non-flushable, because it is required for bailing out from
342 // optimized code.
343 collector->code_flusher()->AddCandidate(function);
344 // Visit shared function info immediately to avoid double checking
345 // of its flushability later. This is just an optimization because
346 // the shared function info would eventually be visited.
347 SharedFunctionInfo* shared = function->unchecked_shared();
348 if (StaticVisitor::MarkObjectWithoutPush(heap, shared)) {
349 StaticVisitor::MarkObject(heap, shared->map());
350 VisitSharedFunctionInfoWeakCode(heap, shared);
351 }
352 // Treat the reference to the code object weakly.
353 VisitJSFunctionWeakCode(heap, object);
354 return;
355 } else {
356 // Visit all unoptimized code objects to prevent flushing them.
357 StaticVisitor::MarkObject(heap, function->shared()->code());
358 if (function->code()->kind() == Code::OPTIMIZED_FUNCTION) {
359 MarkInlinedFunctionsCode(heap, function->code());
360 }
361 }
362 }
363 VisitJSFunctionStrongCode(heap, object);
364}
365
366
367template<typename StaticVisitor>
verwaest@chromium.orgb6d052d2012-07-27 08:03:27 +0000368void StaticMarkingVisitor<StaticVisitor>::VisitJSRegExp(
369 Map* map, HeapObject* object) {
370 int last_property_offset =
371 JSRegExp::kSize + kPointerSize * map->inobject_properties();
danno@chromium.org2ab0c3b2012-10-05 08:50:56 +0000372 StaticVisitor::VisitPointers(map->GetHeap(),
373 HeapObject::RawField(object, JSRegExp::kPropertiesOffset),
374 HeapObject::RawField(object, last_property_offset));
ulan@chromium.org56c14af2012-09-20 12:51:09 +0000375}
376
377
verwaest@chromium.org33e09c82012-10-10 17:07:22 +0000378template<typename StaticVisitor>
379void StaticMarkingVisitor<StaticVisitor>::MarkMapContents(
380 Heap* heap, Map* map) {
381 // Make sure that the back pointer stored either in the map itself or
382 // inside its transitions array is marked. Skip recording the back
383 // pointer slot since map space is not compacted.
384 StaticVisitor::MarkObject(heap, HeapObject::cast(map->GetBackPointer()));
385
386 // Treat pointers in the transitions array as weak and also mark that
387 // array to prevent visiting it later. Skip recording the transition
388 // array slot, since it will be implicitly recorded when the pointer
389 // fields of this map are visited.
390 TransitionArray* transitions = map->unchecked_transition_array();
391 if (transitions->IsTransitionArray()) {
392 MarkTransitionArray(heap, transitions);
393 } else {
394 // Already marked by marking map->GetBackPointer() above.
395 ASSERT(transitions->IsMap() || transitions->IsUndefined());
396 }
397
398 // Mark the pointer fields of the Map. Since the transitions array has
399 // been marked already, it is fine that one of these fields contains a
400 // pointer to it.
401 StaticVisitor::VisitPointers(heap,
402 HeapObject::RawField(map, Map::kPointerFieldsBeginOffset),
403 HeapObject::RawField(map, Map::kPointerFieldsEndOffset));
404}
405
406
407template<typename StaticVisitor>
408void StaticMarkingVisitor<StaticVisitor>::MarkTransitionArray(
409 Heap* heap, TransitionArray* transitions) {
410 if (!StaticVisitor::MarkObjectWithoutPush(heap, transitions)) return;
411
verwaest@chromium.org33e09c82012-10-10 17:07:22 +0000412 // Simple transitions do not have keys nor prototype transitions.
413 if (transitions->IsSimpleTransition()) return;
414
415 if (transitions->HasPrototypeTransitions()) {
416 // Mark prototype transitions array but do not push it onto marking
417 // stack, this will make references from it weak. We will clean dead
418 // prototype transitions in ClearNonLiveTransitions.
419 Object** slot = transitions->GetPrototypeTransitionsSlot();
420 HeapObject* obj = HeapObject::cast(*slot);
421 heap->mark_compact_collector()->RecordSlot(slot, slot, obj);
422 StaticVisitor::MarkObjectWithoutPush(heap, obj);
423 }
424
425 for (int i = 0; i < transitions->number_of_transitions(); ++i) {
426 StaticVisitor::VisitPointer(heap, transitions->GetKeySlot(i));
427 }
428}
429
430
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000431template<typename StaticVisitor>
432void StaticMarkingVisitor<StaticVisitor>::MarkInlinedFunctionsCode(
433 Heap* heap, Code* code) {
434 // For optimized functions we should retain both non-optimized version
435 // of its code and non-optimized version of all inlined functions.
436 // This is required to support bailing out from inlined code.
437 DeoptimizationInputData* data =
438 DeoptimizationInputData::cast(code->deoptimization_data());
439 FixedArray* literals = data->LiteralArray();
440 for (int i = 0, count = data->InlinedFunctionCount()->value();
441 i < count;
442 i++) {
443 JSFunction* inlined = JSFunction::cast(literals->get(i));
444 StaticVisitor::MarkObject(heap, inlined->shared()->code());
445 }
446}
447
448
449inline static bool IsValidNonBuiltinContext(Object* context) {
450 return context->IsContext() &&
451 !Context::cast(context)->global_object()->IsJSBuiltinsObject();
452}
453
454
455inline static bool HasSourceCode(Heap* heap, SharedFunctionInfo* info) {
456 Object* undefined = heap->undefined_value();
457 return (info->script() != undefined) &&
458 (reinterpret_cast<Script*>(info->script())->source() != undefined);
459}
460
461
462template<typename StaticVisitor>
463bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
464 Heap* heap, JSFunction* function) {
465 SharedFunctionInfo* shared_info = function->unchecked_shared();
466
467 // Code is either on stack, in compilation cache or referenced
468 // by optimized version of function.
469 MarkBit code_mark = Marking::MarkBitFrom(function->code());
470 if (code_mark.Get()) {
mvstanton@chromium.orge4ac3ef2012-11-12 14:53:34 +0000471 if (!FLAG_age_code) {
472 if (!Marking::MarkBitFrom(shared_info).Get()) {
473 shared_info->set_code_age(0);
474 }
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000475 }
476 return false;
477 }
478
479 // The function must have a valid context and not be a builtin.
480 if (!IsValidNonBuiltinContext(function->unchecked_context())) {
481 return false;
482 }
483
mvstanton@chromium.orge4ac3ef2012-11-12 14:53:34 +0000484 // We do not (yet) flush code for optimized functions.
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000485 if (function->code() != shared_info->code()) {
486 return false;
487 }
488
mvstanton@chromium.orge4ac3ef2012-11-12 14:53:34 +0000489 // Check age of optimized code.
490 if (FLAG_age_code && !function->code()->IsOld()) {
491 return false;
492 }
493
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000494 return IsFlushable(heap, shared_info);
495}
496
497
498template<typename StaticVisitor>
499bool StaticMarkingVisitor<StaticVisitor>::IsFlushable(
500 Heap* heap, SharedFunctionInfo* shared_info) {
501 // Code is either on stack, in compilation cache or referenced
502 // by optimized version of function.
503 MarkBit code_mark = Marking::MarkBitFrom(shared_info->code());
504 if (code_mark.Get()) {
505 return false;
506 }
507
508 // The function must be compiled and have the source code available,
509 // to be able to recompile it in case we need the function again.
510 if (!(shared_info->is_compiled() && HasSourceCode(heap, shared_info))) {
511 return false;
512 }
513
514 // We never flush code for API functions.
515 Object* function_data = shared_info->function_data();
516 if (function_data->IsFunctionTemplateInfo()) {
517 return false;
518 }
519
520 // Only flush code for functions.
521 if (shared_info->code()->kind() != Code::FUNCTION) {
522 return false;
523 }
524
525 // Function must be lazy compilable.
526 if (!shared_info->allows_lazy_compilation()) {
527 return false;
528 }
529
530 // If this is a full script wrapped in a function we do no flush the code.
531 if (shared_info->is_toplevel()) {
532 return false;
533 }
534
mvstanton@chromium.orge4ac3ef2012-11-12 14:53:34 +0000535 if (FLAG_age_code) {
536 return shared_info->code()->IsOld();
537 } else {
538 // How many collections newly compiled code object will survive before being
539 // flushed.
540 static const int kCodeAgeThreshold = 5;
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000541
mvstanton@chromium.orge4ac3ef2012-11-12 14:53:34 +0000542 // Age this shared function info.
543 if (shared_info->code_age() < kCodeAgeThreshold) {
544 shared_info->set_code_age(shared_info->code_age() + 1);
545 return false;
546 }
547 return true;
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000548 }
svenpanne@chromium.orgc859c4f2012-10-15 11:51:39 +0000549}
550
551
552template<typename StaticVisitor>
553void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoStrongCode(
554 Heap* heap, HeapObject* object) {
555 StaticVisitor::BeforeVisitingSharedFunctionInfo(object);
556 Object** start_slot =
557 HeapObject::RawField(object,
558 SharedFunctionInfo::BodyDescriptor::kStartOffset);
559 Object** end_slot =
560 HeapObject::RawField(object,
561 SharedFunctionInfo::BodyDescriptor::kEndOffset);
562 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
563}
564
565
566template<typename StaticVisitor>
567void StaticMarkingVisitor<StaticVisitor>::VisitSharedFunctionInfoWeakCode(
568 Heap* heap, HeapObject* object) {
569 StaticVisitor::BeforeVisitingSharedFunctionInfo(object);
570 Object** name_slot =
571 HeapObject::RawField(object, SharedFunctionInfo::kNameOffset);
572 StaticVisitor::VisitPointer(heap, name_slot);
573
574 // Skip visiting kCodeOffset as it is treated weakly here.
575 STATIC_ASSERT(SharedFunctionInfo::kNameOffset + kPointerSize ==
576 SharedFunctionInfo::kCodeOffset);
577 STATIC_ASSERT(SharedFunctionInfo::kCodeOffset + kPointerSize ==
578 SharedFunctionInfo::kOptimizedCodeMapOffset);
579
580 Object** start_slot =
581 HeapObject::RawField(object,
582 SharedFunctionInfo::kOptimizedCodeMapOffset);
583 Object** end_slot =
584 HeapObject::RawField(object,
585 SharedFunctionInfo::BodyDescriptor::kEndOffset);
586 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
587}
588
589
590template<typename StaticVisitor>
591void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionStrongCode(
592 Heap* heap, HeapObject* object) {
593 Object** start_slot =
594 HeapObject::RawField(object, JSFunction::kPropertiesOffset);
595 Object** end_slot =
596 HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
597 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
598
599 VisitCodeEntry(heap, object->address() + JSFunction::kCodeEntryOffset);
600 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
601 JSFunction::kPrototypeOrInitialMapOffset);
602
603 start_slot =
604 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
605 end_slot =
606 HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
607 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
608}
609
610
611template<typename StaticVisitor>
612void StaticMarkingVisitor<StaticVisitor>::VisitJSFunctionWeakCode(
613 Heap* heap, HeapObject* object) {
614 Object** start_slot =
615 HeapObject::RawField(object, JSFunction::kPropertiesOffset);
616 Object** end_slot =
617 HeapObject::RawField(object, JSFunction::kCodeEntryOffset);
618 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
619
620 // Skip visiting kCodeEntryOffset as it is treated weakly here.
621 STATIC_ASSERT(JSFunction::kCodeEntryOffset + kPointerSize ==
622 JSFunction::kPrototypeOrInitialMapOffset);
623
624 start_slot =
625 HeapObject::RawField(object, JSFunction::kPrototypeOrInitialMapOffset);
626 end_slot =
627 HeapObject::RawField(object, JSFunction::kNonWeakFieldsEndOffset);
628 StaticVisitor::VisitPointers(heap, start_slot, end_slot);
629}
630
631
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000632void Code::CodeIterateBody(ObjectVisitor* v) {
633 int mode_mask = RelocInfo::kCodeTargetMask |
634 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
635 RelocInfo::ModeMask(RelocInfo::GLOBAL_PROPERTY_CELL) |
636 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
637 RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
638 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
639 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
640
jkummerow@chromium.org04e4f1e2011-11-14 13:36:17 +0000641 // There are two places where we iterate code bodies: here and the
642 // templated CodeIterateBody (below). They should be kept in sync.
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000643 IteratePointer(v, kRelocationInfoOffset);
jkummerow@chromium.org04e4f1e2011-11-14 13:36:17 +0000644 IteratePointer(v, kHandlerTableOffset);
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000645 IteratePointer(v, kDeoptimizationDataOffset);
jkummerow@chromium.orgf7a58842012-02-21 10:08:21 +0000646 IteratePointer(v, kTypeFeedbackInfoOffset);
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000647
648 RelocIterator it(this, mode_mask);
649 for (; !it.done(); it.next()) {
650 it.rinfo()->Visit(v);
651 }
652}
653
654
655template<typename StaticVisitor>
656void Code::CodeIterateBody(Heap* heap) {
657 int mode_mask = RelocInfo::kCodeTargetMask |
658 RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
659 RelocInfo::ModeMask(RelocInfo::GLOBAL_PROPERTY_CELL) |
660 RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
661 RelocInfo::ModeMask(RelocInfo::JS_RETURN) |
662 RelocInfo::ModeMask(RelocInfo::DEBUG_BREAK_SLOT) |
663 RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
664
jkummerow@chromium.org04e4f1e2011-11-14 13:36:17 +0000665 // There are two places where we iterate code bodies: here and the
666 // non-templated CodeIterateBody (above). They should be kept in sync.
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000667 StaticVisitor::VisitPointer(
668 heap,
669 reinterpret_cast<Object**>(this->address() + kRelocationInfoOffset));
670 StaticVisitor::VisitPointer(
671 heap,
jkummerow@chromium.org04e4f1e2011-11-14 13:36:17 +0000672 reinterpret_cast<Object**>(this->address() + kHandlerTableOffset));
673 StaticVisitor::VisitPointer(
674 heap,
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000675 reinterpret_cast<Object**>(this->address() + kDeoptimizationDataOffset));
danno@chromium.orgfa458e42012-02-01 10:48:36 +0000676 StaticVisitor::VisitPointer(
677 heap,
jkummerow@chromium.orgf7a58842012-02-21 10:08:21 +0000678 reinterpret_cast<Object**>(this->address() + kTypeFeedbackInfoOffset));
erik.corry@gmail.comc3b670f2011-10-05 21:44:48 +0000679
680 RelocIterator it(this, mode_mask);
681 for (; !it.done(); it.next()) {
682 it.rinfo()->template Visit<StaticVisitor>(heap);
683 }
684}
685
686
687} } // namespace v8::internal
688
689#endif // V8_OBJECTS_VISITING_INL_H_