blob: 80598e80bd04aa30a6b0c949feb8df69a7a53a44 [file] [log] [blame]
Ben Murdochda12d292016-06-02 14:46:10 +01001// Copyright 2016 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/snapshot/startup-serializer.h"
6
7#include "src/objects-inl.h"
8#include "src/v8threads.h"
9
10namespace v8 {
11namespace internal {
12
13StartupSerializer::StartupSerializer(
Ben Murdoch61f157c2016-09-16 13:49:30 +010014 Isolate* isolate,
15 v8::SnapshotCreator::FunctionCodeHandling function_code_handling)
16 : Serializer(isolate),
17 clear_function_code_(function_code_handling ==
18 v8::SnapshotCreator::FunctionCodeHandling::kClear),
Ben Murdochda12d292016-06-02 14:46:10 +010019 serializing_builtins_(false) {
20 InitializeCodeAddressMap();
21}
22
23StartupSerializer::~StartupSerializer() {
24 OutputStatistics("StartupSerializer");
25}
26
27void StartupSerializer::SerializeObject(HeapObject* obj, HowToCode how_to_code,
28 WhereToPoint where_to_point, int skip) {
29 DCHECK(!obj->IsJSFunction());
30
Ben Murdoch61f157c2016-09-16 13:49:30 +010031 if (clear_function_code_) {
Ben Murdochda12d292016-06-02 14:46:10 +010032 if (obj->IsCode()) {
33 Code* code = Code::cast(obj);
34 // If the function code is compiled (either as native code or bytecode),
35 // replace it with lazy-compile builtin. Only exception is when we are
36 // serializing the canonical interpreter-entry-trampoline builtin.
37 if (code->kind() == Code::FUNCTION ||
Ben Murdoch61f157c2016-09-16 13:49:30 +010038 (!serializing_builtins_ &&
39 code->is_interpreter_trampoline_builtin())) {
Ben Murdochda12d292016-06-02 14:46:10 +010040 obj = isolate()->builtins()->builtin(Builtins::kCompileLazy);
41 }
42 } else if (obj->IsBytecodeArray()) {
43 obj = isolate()->heap()->undefined_value();
44 }
45 } else if (obj->IsCode()) {
Ben Murdochda12d292016-06-02 14:46:10 +010046 Code* code = Code::cast(obj);
47 if (code->kind() == Code::FUNCTION) {
48 code->ClearInlineCaches();
49 code->set_profiler_ticks(0);
50 }
51 }
52
Ben Murdoch61f157c2016-09-16 13:49:30 +010053 if (SerializeHotObject(obj, how_to_code, where_to_point, skip)) return;
54
Ben Murdochda12d292016-06-02 14:46:10 +010055 int root_index = root_index_map_.Lookup(obj);
56 // We can only encode roots as such if it has already been serialized.
57 // That applies to root indices below the wave front.
58 if (root_index != RootIndexMap::kInvalidRootIndex) {
59 if (root_has_been_serialized_.test(root_index)) {
60 PutRoot(root_index, obj, how_to_code, where_to_point, skip);
61 return;
62 }
63 }
64
Ben Murdoch61f157c2016-09-16 13:49:30 +010065 if (SerializeBackReference(obj, how_to_code, where_to_point, skip)) return;
Ben Murdochda12d292016-06-02 14:46:10 +010066
67 FlushSkip(skip);
68
69 // Object has not yet been serialized. Serialize it here.
Ben Murdoch61f157c2016-09-16 13:49:30 +010070 ObjectSerializer object_serializer(this, obj, &sink_, how_to_code,
Ben Murdochda12d292016-06-02 14:46:10 +010071 where_to_point);
72 object_serializer.Serialize();
73
74 if (serializing_immortal_immovables_roots_ &&
75 root_index != RootIndexMap::kInvalidRootIndex) {
76 // Make sure that the immortal immovable root has been included in the first
77 // chunk of its reserved space , so that it is deserialized onto the first
78 // page of its space and stays immortal immovable.
Ben Murdochc5610432016-08-08 18:44:38 +010079 SerializerReference ref = reference_map_.Lookup(obj);
80 CHECK(ref.is_back_reference() && ref.chunk_index() == 0);
Ben Murdochda12d292016-06-02 14:46:10 +010081 }
82}
83
84void StartupSerializer::SerializeWeakReferencesAndDeferred() {
85 // This comes right after serialization of the partial snapshot, where we
86 // add entries to the partial snapshot cache of the startup snapshot. Add
87 // one entry with 'undefined' to terminate the partial snapshot cache.
88 Object* undefined = isolate()->heap()->undefined_value();
89 VisitPointer(&undefined);
90 isolate()->heap()->IterateWeakRoots(this, VISIT_ALL);
91 SerializeDeferredObjects();
92 Pad();
93}
94
Ben Murdoch61f157c2016-09-16 13:49:30 +010095int StartupSerializer::PartialSnapshotCacheIndex(HeapObject* heap_object) {
96 int index;
97 if (!partial_cache_index_map_.LookupOrInsert(heap_object, &index)) {
98 // This object is not part of the partial snapshot cache yet. Add it to the
99 // startup snapshot so we can refer to it via partial snapshot index from
100 // the partial snapshot.
101 VisitPointer(reinterpret_cast<Object**>(&heap_object));
102 }
103 return index;
104}
105
Ben Murdochda12d292016-06-02 14:46:10 +0100106void StartupSerializer::Synchronize(VisitorSynchronization::SyncTag tag) {
107 // We expect the builtins tag after builtins have been serialized.
108 DCHECK(!serializing_builtins_ || tag == VisitorSynchronization::kBuiltins);
109 serializing_builtins_ = (tag == VisitorSynchronization::kHandleScope);
Ben Murdoch61f157c2016-09-16 13:49:30 +0100110 sink_.Put(kSynchronize, "Synchronize");
Ben Murdochda12d292016-06-02 14:46:10 +0100111}
112
113void StartupSerializer::SerializeStrongReferences() {
114 Isolate* isolate = this->isolate();
115 // No active threads.
116 CHECK_NULL(isolate->thread_manager()->FirstThreadStateInUse());
117 // No active or weak handles.
118 CHECK(isolate->handle_scope_implementer()->blocks()->is_empty());
119 CHECK_EQ(0, isolate->global_handles()->NumberOfWeakHandles());
120 CHECK_EQ(0, isolate->eternal_handles()->NumberOfHandles());
121 // We don't support serializing installed extensions.
122 CHECK(!isolate->has_installed_extensions());
123 // First visit immortal immovables to make sure they end up in the first page.
124 serializing_immortal_immovables_roots_ = true;
125 isolate->heap()->IterateStrongRoots(this, VISIT_ONLY_STRONG_ROOT_LIST);
126 // Check that immortal immovable roots are allocated on the first page.
127 CHECK(HasNotExceededFirstPageOfEachSpace());
128 serializing_immortal_immovables_roots_ = false;
129 // Visit the rest of the strong roots.
130 // Clear the stack limits to make the snapshot reproducible.
131 // Reset it again afterwards.
132 isolate->heap()->ClearStackLimits();
133 isolate->heap()->IterateSmiRoots(this);
134 isolate->heap()->SetStackLimits();
135
136 isolate->heap()->IterateStrongRoots(this,
137 VISIT_ONLY_STRONG_FOR_SERIALIZATION);
138}
139
140void StartupSerializer::VisitPointers(Object** start, Object** end) {
141 if (start == isolate()->heap()->roots_array_start()) {
142 // Serializing the root list needs special handling:
143 // - The first pass over the root list only serializes immortal immovables.
144 // - The second pass over the root list serializes the rest.
145 // - Only root list elements that have been fully serialized can be
146 // referenced via as root by using kRootArray bytecodes.
147 int skip = 0;
148 for (Object** current = start; current < end; current++) {
149 int root_index = static_cast<int>(current - start);
150 if (RootShouldBeSkipped(root_index)) {
151 skip += kPointerSize;
152 continue;
153 } else {
154 if ((*current)->IsSmi()) {
155 FlushSkip(skip);
156 PutSmi(Smi::cast(*current));
157 } else {
158 SerializeObject(HeapObject::cast(*current), kPlain, kStartOfObject,
159 skip);
160 }
161 root_has_been_serialized_.set(root_index);
162 skip = 0;
163 }
164 }
165 FlushSkip(skip);
166 } else {
167 Serializer::VisitPointers(start, end);
168 }
169}
170
171bool StartupSerializer::RootShouldBeSkipped(int root_index) {
172 if (root_index == Heap::kStackLimitRootIndex ||
173 root_index == Heap::kRealStackLimitRootIndex) {
174 return true;
175 }
176 return Heap::RootIsImmortalImmovable(root_index) !=
177 serializing_immortal_immovables_roots_;
178}
179
180} // namespace internal
181} // namespace v8