blob: 712cfe0b2da9407cfb3109bde003add32ad8977d [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/code-generator.h"
6
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007#include "src/address-map.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00008#include "src/compiler/code-generator-impl.h"
9#include "src/compiler/linkage.h"
10#include "src/compiler/pipeline.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000011#include "src/frames-inl.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000012
13namespace v8 {
14namespace internal {
15namespace compiler {
16
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000017class CodeGenerator::JumpTable final : public ZoneObject {
18 public:
19 JumpTable(JumpTable* next, Label** targets, size_t target_count)
20 : next_(next), targets_(targets), target_count_(target_count) {}
21
22 Label* label() { return &label_; }
23 JumpTable* next() const { return next_; }
24 Label** targets() const { return targets_; }
25 size_t target_count() const { return target_count_; }
26
27 private:
28 Label label_;
29 JumpTable* const next_;
30 Label** const targets_;
31 size_t const target_count_;
32};
33
34
Emily Bernierd0a1eb72015-03-24 16:35:39 -040035CodeGenerator::CodeGenerator(Frame* frame, Linkage* linkage,
36 InstructionSequence* code, CompilationInfo* info)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000037 : frame_access_state_(new (code->zone()) FrameAccessState(frame)),
Emily Bernierd0a1eb72015-03-24 16:35:39 -040038 linkage_(linkage),
39 code_(code),
40 info_(info),
41 labels_(zone()->NewArray<Label>(code->InstructionBlockCount())),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000042 current_block_(RpoNumber::Invalid()),
43 current_source_position_(SourcePosition::Unknown()),
44 masm_(info->isolate(), nullptr, 0, CodeObjectRequired::kYes),
Ben Murdochb8a8cc12014-11-26 15:28:44 +000045 resolver_(this),
46 safepoints_(code->zone()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000047 handlers_(code->zone()),
Ben Murdochb8a8cc12014-11-26 15:28:44 +000048 deoptimization_states_(code->zone()),
49 deoptimization_literals_(code->zone()),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000050 inlined_function_count_(0),
Ben Murdochb8a8cc12014-11-26 15:28:44 +000051 translations_(code->zone()),
Emily Bernierd0a1eb72015-03-24 16:35:39 -040052 last_lazy_deopt_pc_(0),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000053 jump_tables_(nullptr),
54 ools_(nullptr),
55 osr_pc_offset_(-1) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040056 for (int i = 0; i < code->InstructionBlockCount(); ++i) {
57 new (&labels_[i]) Label;
58 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000059 if (code->ContainsCall()) {
60 frame->MarkNeedsFrame();
61 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -040062}
Ben Murdochb8a8cc12014-11-26 15:28:44 +000063
64
65Handle<Code> CodeGenerator::GenerateCode() {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040066 CompilationInfo* info = this->info();
Ben Murdochb8a8cc12014-11-26 15:28:44 +000067
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000068 // Open a frame scope to indicate that there is a frame on the stack. The
69 // MANUAL indicates that the scope shouldn't actually generate code to set up
70 // the frame (that is done in AssemblePrologue).
71 FrameScope frame_scope(masm(), StackFrame::MANUAL);
72
Ben Murdochb8a8cc12014-11-26 15:28:44 +000073 // Emit a code line info recording start event.
74 PositionsRecorder* recorder = masm()->positions_recorder();
75 LOG_CODE_EVENT(isolate(), CodeStartLinePosInfoRecordEvent(recorder));
76
77 // Place function entry hook if requested to do so.
78 if (linkage()->GetIncomingDescriptor()->IsJSFunctionCall()) {
79 ProfileEntryHookStub::MaybeCallEntryHook(masm());
80 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000081 // Architecture-specific, linkage-specific prologue.
82 info->set_prologue_offset(masm()->pc_offset());
83 AssemblePrologue();
Ben Murdoch097c5b22016-05-18 11:27:45 +010084 if (linkage()->GetIncomingDescriptor()->InitializeRootRegister()) {
85 masm()->InitializeRootRegister();
86 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000087
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000088 // Define deoptimization literals for all inlined functions.
89 DCHECK_EQ(0u, deoptimization_literals_.size());
90 for (auto& inlined : info->inlined_functions()) {
91 if (!inlined.shared_info.is_identical_to(info->shared_info())) {
92 DefineDeoptimizationLiteral(inlined.shared_info);
93 }
94 }
95 inlined_function_count_ = deoptimization_literals_.size();
96
97 // Define deoptimization literals for all unoptimized code objects of inlined
98 // functions. This ensures unoptimized code is kept alive by optimized code.
99 for (auto& inlined : info->inlined_functions()) {
100 if (!inlined.shared_info.is_identical_to(info->shared_info())) {
101 DefineDeoptimizationLiteral(inlined.inlined_code_object_root);
102 }
103 }
104
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400105 // Assemble all non-deferred blocks, followed by deferred ones.
106 for (int deferred = 0; deferred < 2; ++deferred) {
107 for (auto const block : code()->instruction_blocks()) {
108 if (block->IsDeferred() == (deferred == 0)) {
109 continue;
110 }
111 // Align loop headers on 16-byte boundaries.
112 if (block->IsLoopHeader()) masm()->Align(16);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000113 // Ensure lazy deopt doesn't patch handler entry points.
114 if (block->IsHandler()) EnsureSpaceForLazyDeopt();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400115 // Bind a label for a block.
116 current_block_ = block->rpo_number();
117 if (FLAG_code_comments) {
118 // TODO(titzer): these code comments are a giant memory leak.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000119 Vector<char> buffer = Vector<char>::New(200);
120 char* buffer_start = buffer.start();
121
122 int next = SNPrintF(
123 buffer, "-- B%d start%s%s%s%s", block->rpo_number().ToInt(),
124 block->IsDeferred() ? " (deferred)" : "",
125 block->needs_frame() ? "" : " (no frame)",
126 block->must_construct_frame() ? " (construct frame)" : "",
127 block->must_deconstruct_frame() ? " (deconstruct frame)" : "");
128
129 buffer = buffer.SubVector(next, buffer.length());
130
131 if (block->IsLoopHeader()) {
132 next =
133 SNPrintF(buffer, " (loop up to %d)", block->loop_end().ToInt());
134 buffer = buffer.SubVector(next, buffer.length());
135 }
136 if (block->loop_header().IsValid()) {
137 next =
138 SNPrintF(buffer, " (in loop %d)", block->loop_header().ToInt());
139 buffer = buffer.SubVector(next, buffer.length());
140 }
141 SNPrintF(buffer, " --");
142 masm()->RecordComment(buffer_start);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400143 }
144 masm()->bind(GetLabel(current_block_));
145 for (int i = block->code_start(); i < block->code_end(); ++i) {
146 AssembleInstruction(code()->InstructionAt(i));
147 }
148 }
149 }
150
151 // Assemble all out-of-line code.
152 if (ools_) {
153 masm()->RecordComment("-- Out of line code --");
154 for (OutOfLineCode* ool = ools_; ool; ool = ool->next()) {
155 masm()->bind(ool->entry());
156 ool->Generate();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000157 if (ool->exit()->is_bound()) masm()->jmp(ool->exit());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400158 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000159 }
160
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400161 // Ensure there is space for lazy deoptimization in the code.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000162 if (info->ShouldEnsureSpaceForLazyDeopt()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000163 int target_offset = masm()->pc_offset() + Deoptimizer::patch_size();
164 while (masm()->pc_offset() < target_offset) {
165 masm()->nop();
166 }
167 }
168
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000169 FinishCode(masm());
170
171 // Emit the jump tables.
172 if (jump_tables_) {
173 masm()->Align(kPointerSize);
174 for (JumpTable* table = jump_tables_; table; table = table->next()) {
175 masm()->bind(table->label());
176 AssembleJumpTable(table->targets(), table->target_count());
177 }
178 }
179
Ben Murdoch097c5b22016-05-18 11:27:45 +0100180 safepoints()->Emit(masm(), frame()->GetTotalFrameSlotCount());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000181
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000182 Handle<Code> result =
183 v8::internal::CodeGenerator::MakeCodeEpilogue(masm(), info);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000184 result->set_is_turbofanned(true);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100185 result->set_stack_slots(frame()->GetTotalFrameSlotCount());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000186 result->set_safepoint_table_offset(safepoints()->GetCodeOffset());
187
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000188 // Emit exception handler table.
189 if (!handlers_.empty()) {
190 Handle<HandlerTable> table =
191 Handle<HandlerTable>::cast(isolate()->factory()->NewFixedArray(
192 HandlerTable::LengthForReturn(static_cast<int>(handlers_.size())),
193 TENURED));
194 for (size_t i = 0; i < handlers_.size(); ++i) {
195 int position = handlers_[i].handler->pos();
196 HandlerTable::CatchPrediction prediction = handlers_[i].caught_locally
197 ? HandlerTable::CAUGHT
198 : HandlerTable::UNCAUGHT;
199 table->SetReturnOffset(static_cast<int>(i), handlers_[i].pc_offset);
200 table->SetReturnHandler(static_cast<int>(i), position, prediction);
201 }
202 result->set_handler_table(*table);
203 }
204
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000205 PopulateDeoptimizationData(result);
206
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400207 // Ensure there is space for lazy deoptimization in the relocation info.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000208 if (info->ShouldEnsureSpaceForLazyDeopt()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400209 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(result);
210 }
211
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000212 // Emit a code line info recording stop event.
213 void* line_info = recorder->DetachJITHandlerData();
214 LOG_CODE_EVENT(isolate(), CodeEndLinePosInfoRecordEvent(*result, line_info));
215
216 return result;
217}
218
219
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000220bool CodeGenerator::IsNextInAssemblyOrder(RpoNumber block) const {
221 return code()
222 ->InstructionBlockAt(current_block_)
223 ->ao_number()
224 .IsNext(code()->InstructionBlockAt(block)->ao_number());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400225}
226
227
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000228void CodeGenerator::RecordSafepoint(ReferenceMap* references,
229 Safepoint::Kind kind, int arguments,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000230 Safepoint::DeoptMode deopt_mode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000231 Safepoint safepoint =
232 safepoints()->DefineSafepoint(masm(), kind, arguments, deopt_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000233 int stackSlotToSpillSlotDelta =
234 frame()->GetTotalFrameSlotCount() - frame()->GetSpillSlotCount();
235 for (auto& operand : references->reference_operands()) {
236 if (operand.IsStackSlot()) {
237 int index = LocationOperand::cast(operand).index();
238 DCHECK(index >= 0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100239 // We might index values in the fixed part of the frame (i.e. the
240 // closure pointer or the context pointer); these are not spill slots
241 // and therefore don't work with the SafepointTable currently, but
242 // we also don't need to worry about them, since the GC has special
243 // knowledge about those fields anyway.
244 if (index < stackSlotToSpillSlotDelta) continue;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000245 safepoint.DefinePointerSlot(index, zone());
246 } else if (operand.IsRegister() && (kind & Safepoint::kWithRegisters)) {
247 Register reg = LocationOperand::cast(operand).GetRegister();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000248 safepoint.DefinePointerRegister(reg, zone());
249 }
250 }
251}
252
253
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000254bool CodeGenerator::IsMaterializableFromFrame(Handle<HeapObject> object,
255 int* offset_return) {
256 if (linkage()->GetIncomingDescriptor()->IsJSFunctionCall()) {
257 if (info()->has_context() && object.is_identical_to(info()->context()) &&
258 !info()->is_osr()) {
259 *offset_return = StandardFrameConstants::kContextOffset;
260 return true;
261 } else if (object.is_identical_to(info()->closure())) {
262 *offset_return = JavaScriptFrameConstants::kFunctionOffset;
263 return true;
264 }
265 }
266 return false;
267}
268
269
270bool CodeGenerator::IsMaterializableFromRoot(
271 Handle<HeapObject> object, Heap::RootListIndex* index_return) {
272 const CallDescriptor* incoming_descriptor =
273 linkage()->GetIncomingDescriptor();
274 if (incoming_descriptor->flags() & CallDescriptor::kCanUseRoots) {
275 RootIndexMap map(isolate());
276 int root_index = map.Lookup(*object);
277 if (root_index != RootIndexMap::kInvalidRootIndex) {
278 *index_return = static_cast<Heap::RootListIndex>(root_index);
279 return true;
280 }
281 }
282 return false;
283}
284
285
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000286void CodeGenerator::AssembleInstruction(Instruction* instr) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000287 AssembleGaps(instr);
288 AssembleSourcePosition(instr);
289 // Assemble architecture-specific code for the instruction.
290 AssembleArchInstruction(instr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000291
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000292 FlagsMode mode = FlagsModeField::decode(instr->opcode());
293 FlagsCondition condition = FlagsConditionField::decode(instr->opcode());
294 if (mode == kFlags_branch) {
295 // Assemble a branch after this instruction.
296 InstructionOperandConverter i(this, instr);
297 RpoNumber true_rpo = i.InputRpo(instr->InputCount() - 2);
298 RpoNumber false_rpo = i.InputRpo(instr->InputCount() - 1);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400299
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000300 if (true_rpo == false_rpo) {
301 // redundant branch.
302 if (!IsNextInAssemblyOrder(true_rpo)) {
303 AssembleArchJump(true_rpo);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400304 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000305 return;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000306 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000307 if (IsNextInAssemblyOrder(true_rpo)) {
308 // true block is next, can fall through if condition negated.
309 std::swap(true_rpo, false_rpo);
310 condition = NegateFlagsCondition(condition);
311 }
312 BranchInfo branch;
313 branch.condition = condition;
314 branch.true_label = GetLabel(true_rpo);
315 branch.false_label = GetLabel(false_rpo);
316 branch.fallthru = IsNextInAssemblyOrder(false_rpo);
317 // Assemble architecture-specific branch.
318 AssembleArchBranch(instr, &branch);
319 } else if (mode == kFlags_set) {
320 // Assemble a boolean materialization after this instruction.
321 AssembleArchBoolean(instr, condition);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000322 }
323}
324
325
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000326void CodeGenerator::AssembleSourcePosition(Instruction* instr) {
327 SourcePosition source_position;
328 if (!code()->GetSourcePosition(instr, &source_position)) return;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000329 if (source_position == current_source_position_) return;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000330 current_source_position_ = source_position;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000331 if (source_position.IsUnknown()) return;
332 int code_pos = source_position.raw();
333 masm()->positions_recorder()->RecordPosition(code_pos);
334 masm()->positions_recorder()->WriteRecordedPositions();
335 if (FLAG_code_comments) {
336 Vector<char> buffer = Vector<char>::New(256);
337 CompilationInfo* info = this->info();
338 int ln = Script::GetLineNumber(info->script(), code_pos);
339 int cn = Script::GetColumnNumber(info->script(), code_pos);
340 if (info->script()->name()->IsString()) {
341 Handle<String> file(String::cast(info->script()->name()));
342 base::OS::SNPrintF(buffer.start(), buffer.length(), "-- %s:%d:%d --",
343 file->ToCString().get(), ln, cn);
344 } else {
345 base::OS::SNPrintF(buffer.start(), buffer.length(),
346 "-- <unknown>:%d:%d --", ln, cn);
347 }
348 masm()->RecordComment(buffer.start());
349 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000350}
351
352
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000353void CodeGenerator::AssembleGaps(Instruction* instr) {
354 for (int i = Instruction::FIRST_GAP_POSITION;
355 i <= Instruction::LAST_GAP_POSITION; i++) {
356 Instruction::GapPosition inner_pos =
357 static_cast<Instruction::GapPosition>(i);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000358 ParallelMove* move = instr->GetParallelMove(inner_pos);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000359 if (move != nullptr) resolver()->Resolve(move);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000360 }
361}
362
363
364void CodeGenerator::PopulateDeoptimizationData(Handle<Code> code_object) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400365 CompilationInfo* info = this->info();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000366 int deopt_count = static_cast<int>(deoptimization_states_.size());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000367 if (deopt_count == 0 && !info->is_osr()) return;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000368 Handle<DeoptimizationInputData> data =
369 DeoptimizationInputData::New(isolate(), deopt_count, TENURED);
370
371 Handle<ByteArray> translation_array =
372 translations_.CreateByteArray(isolate()->factory());
373
374 data->SetTranslationByteArray(*translation_array);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000375 data->SetInlinedFunctionCount(
376 Smi::FromInt(static_cast<int>(inlined_function_count_)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000377 data->SetOptimizationId(Smi::FromInt(info->optimization_id()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000378
379 if (info->has_shared_info()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000380 data->SetSharedFunctionInfo(*info->shared_info());
381 } else {
382 data->SetSharedFunctionInfo(Smi::FromInt(0));
383 }
384
385 Handle<FixedArray> literals = isolate()->factory()->NewFixedArray(
386 static_cast<int>(deoptimization_literals_.size()), TENURED);
387 {
388 AllowDeferredHandleDereference copy_handles;
389 for (unsigned i = 0; i < deoptimization_literals_.size(); i++) {
390 literals->set(i, *deoptimization_literals_[i]);
391 }
392 data->SetLiteralArray(*literals);
393 }
394
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000395 if (info->is_osr()) {
396 DCHECK(osr_pc_offset_ >= 0);
397 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id().ToInt()));
398 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
399 } else {
400 BailoutId osr_ast_id = BailoutId::None();
401 data->SetOsrAstId(Smi::FromInt(osr_ast_id.ToInt()));
402 data->SetOsrPcOffset(Smi::FromInt(-1));
403 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000404
405 // Populate deoptimization entries.
406 for (int i = 0; i < deopt_count; i++) {
407 DeoptimizationState* deoptimization_state = deoptimization_states_[i];
408 data->SetAstId(i, deoptimization_state->bailout_id());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000409 CHECK(deoptimization_states_[i]);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000410 data->SetTranslationIndex(
411 i, Smi::FromInt(deoptimization_states_[i]->translation_id()));
412 data->SetArgumentsStackHeight(i, Smi::FromInt(0));
413 data->SetPc(i, Smi::FromInt(deoptimization_state->pc_offset()));
414 }
415
416 code_object->set_deoptimization_data(*data);
417}
418
419
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000420Label* CodeGenerator::AddJumpTable(Label** targets, size_t target_count) {
421 jump_tables_ = new (zone()) JumpTable(jump_tables_, targets, target_count);
422 return jump_tables_->label();
423}
424
425
426void CodeGenerator::RecordCallPosition(Instruction* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000427 CallDescriptor::Flags flags(MiscField::decode(instr->opcode()));
428
429 bool needs_frame_state = (flags & CallDescriptor::kNeedsFrameState);
430
431 RecordSafepoint(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000432 instr->reference_map(), Safepoint::kSimple, 0,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000433 needs_frame_state ? Safepoint::kLazyDeopt : Safepoint::kNoLazyDeopt);
434
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000435 if (flags & CallDescriptor::kHasExceptionHandler) {
436 InstructionOperandConverter i(this, instr);
437 bool caught = flags & CallDescriptor::kHasLocalCatchHandler;
438 RpoNumber handler_rpo = i.InputRpo(instr->InputCount() - 1);
439 handlers_.push_back({caught, GetLabel(handler_rpo), masm()->pc_offset()});
440 }
441
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000442 if (flags & CallDescriptor::kNeedsNopAfterCall) {
443 AddNopForSmiCodeInlining();
444 }
445
446 if (needs_frame_state) {
447 MarkLazyDeoptSite();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000448 // If the frame state is present, it starts at argument 1 (just after the
449 // code address).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000450 size_t frame_state_offset = 1;
451 FrameStateDescriptor* descriptor =
452 GetFrameStateDescriptor(instr, frame_state_offset);
453 int pc_offset = masm()->pc_offset();
454 int deopt_state_id = BuildTranslation(instr, pc_offset, frame_state_offset,
455 descriptor->state_combine());
456 // If the pre-call frame state differs from the post-call one, produce the
457 // pre-call frame state, too.
458 // TODO(jarin) We might want to avoid building the pre-call frame state
459 // because it is only used to get locals and arguments (by the debugger and
460 // f.arguments), and those are the same in the pre-call and post-call
461 // states.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400462 if (!descriptor->state_combine().IsOutputIgnored()) {
463 deopt_state_id = BuildTranslation(instr, -1, frame_state_offset,
464 OutputFrameStateCombine::Ignore());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000465 }
466#if DEBUG
467 // Make sure all the values live in stack slots or they are immediates.
468 // (The values should not live in register because registers are clobbered
469 // by calls.)
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400470 for (size_t i = 0; i < descriptor->GetSize(); i++) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000471 InstructionOperand* op = instr->InputAt(frame_state_offset + 1 + i);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400472 CHECK(op->IsStackSlot() || op->IsDoubleStackSlot() || op->IsImmediate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000473 }
474#endif
475 safepoints()->RecordLazyDeoptimizationIndex(deopt_state_id);
476 }
477}
478
479
480int CodeGenerator::DefineDeoptimizationLiteral(Handle<Object> literal) {
481 int result = static_cast<int>(deoptimization_literals_.size());
482 for (unsigned i = 0; i < deoptimization_literals_.size(); ++i) {
483 if (deoptimization_literals_[i].is_identical_to(literal)) return i;
484 }
485 deoptimization_literals_.push_back(literal);
486 return result;
487}
488
489
490FrameStateDescriptor* CodeGenerator::GetFrameStateDescriptor(
491 Instruction* instr, size_t frame_state_offset) {
492 InstructionOperandConverter i(this, instr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000493 InstructionSequence::StateId state_id =
494 InstructionSequence::StateId::FromInt(i.InputInt32(frame_state_offset));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000495 return code()->GetFrameStateDescriptor(state_id);
496}
497
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400498
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000499void CodeGenerator::TranslateStateValueDescriptor(
500 StateValueDescriptor* desc, Translation* translation,
501 InstructionOperandIterator* iter) {
502 if (desc->IsNested()) {
503 translation->BeginCapturedObject(static_cast<int>(desc->size()));
504 for (size_t index = 0; index < desc->fields().size(); index++) {
505 TranslateStateValueDescriptor(&desc->fields()[index], translation, iter);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400506 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000507 } else if (desc->IsDuplicate()) {
508 translation->DuplicateObject(static_cast<int>(desc->id()));
509 } else {
510 DCHECK(desc->IsPlain());
511 AddTranslationForOperand(translation, iter->instruction(), iter->Advance(),
512 desc->type());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400513 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000514}
515
516
517void CodeGenerator::TranslateFrameStateDescriptorOperands(
518 FrameStateDescriptor* desc, InstructionOperandIterator* iter,
519 OutputFrameStateCombine combine, Translation* translation) {
520 for (size_t index = 0; index < desc->GetSize(combine); index++) {
521 switch (combine.kind()) {
522 case OutputFrameStateCombine::kPushOutput: {
523 DCHECK(combine.GetPushCount() <= iter->instruction()->OutputCount());
524 size_t size_without_output =
525 desc->GetSize(OutputFrameStateCombine::Ignore());
526 // If the index is past the existing stack items in values_.
527 if (index >= size_without_output) {
528 // Materialize the result of the call instruction in this slot.
529 AddTranslationForOperand(
530 translation, iter->instruction(),
531 iter->instruction()->OutputAt(index - size_without_output),
532 MachineType::AnyTagged());
533 continue;
534 }
535 break;
536 }
537 case OutputFrameStateCombine::kPokeAt:
538 // The result of the call should be placed at position
539 // [index_from_top] in the stack (overwriting whatever was
540 // previously there).
541 size_t index_from_top =
542 desc->GetSize(combine) - 1 - combine.GetOffsetToPokeAt();
543 if (index >= index_from_top &&
544 index < index_from_top + iter->instruction()->OutputCount()) {
545 AddTranslationForOperand(
546 translation, iter->instruction(),
547 iter->instruction()->OutputAt(index - index_from_top),
548 MachineType::AnyTagged());
549 iter->Advance(); // We do not use this input, but we need to
550 // advace, as the input got replaced.
551 continue;
552 }
553 break;
554 }
555 StateValueDescriptor* value_desc = desc->GetStateValueDescriptor();
556 TranslateStateValueDescriptor(&value_desc->fields()[index], translation,
557 iter);
558 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400559}
560
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000561
562void CodeGenerator::BuildTranslationForFrameStateDescriptor(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000563 FrameStateDescriptor* descriptor, InstructionOperandIterator* iter,
564 Translation* translation, OutputFrameStateCombine state_combine) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000565 // Outer-most state must be added to translation first.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000566 if (descriptor->outer_state() != nullptr) {
567 BuildTranslationForFrameStateDescriptor(descriptor->outer_state(), iter,
568 translation,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400569 OutputFrameStateCombine::Ignore());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000570 }
571
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000572 Handle<SharedFunctionInfo> shared_info;
573 if (!descriptor->shared_info().ToHandle(&shared_info)) {
574 if (!info()->has_shared_info()) {
575 return; // Stub with no SharedFunctionInfo.
576 }
577 shared_info = info()->shared_info();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000578 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000579 int shared_info_id = DefineDeoptimizationLiteral(shared_info);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000580
581 switch (descriptor->type()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000582 case FrameStateType::kJavaScriptFunction:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000583 translation->BeginJSFrame(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000584 descriptor->bailout_id(), shared_info_id,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400585 static_cast<unsigned int>(descriptor->GetSize(state_combine) -
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000586 (1 + descriptor->parameters_count())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000587 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000588 case FrameStateType::kInterpretedFunction:
589 translation->BeginInterpretedFrame(
590 descriptor->bailout_id(), shared_info_id,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100591 static_cast<unsigned int>(descriptor->locals_count() + 1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000592 break;
593 case FrameStateType::kArgumentsAdaptor:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000594 translation->BeginArgumentsAdaptorFrame(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000595 shared_info_id,
596 static_cast<unsigned int>(descriptor->parameters_count()));
597 break;
598 case FrameStateType::kConstructStub:
599 translation->BeginConstructStubFrame(
600 shared_info_id,
601 static_cast<unsigned int>(descriptor->parameters_count()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000602 break;
603 }
604
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000605 TranslateFrameStateDescriptorOperands(descriptor, iter, state_combine,
606 translation);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000607}
608
609
610int CodeGenerator::BuildTranslation(Instruction* instr, int pc_offset,
611 size_t frame_state_offset,
612 OutputFrameStateCombine state_combine) {
613 FrameStateDescriptor* descriptor =
614 GetFrameStateDescriptor(instr, frame_state_offset);
615 frame_state_offset++;
616
617 Translation translation(
618 &translations_, static_cast<int>(descriptor->GetFrameCount()),
619 static_cast<int>(descriptor->GetJSFrameCount()), zone());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000620 InstructionOperandIterator iter(instr, frame_state_offset);
621 BuildTranslationForFrameStateDescriptor(descriptor, &iter, &translation,
622 state_combine);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000623
624 int deoptimization_id = static_cast<int>(deoptimization_states_.size());
625
626 deoptimization_states_.push_back(new (zone()) DeoptimizationState(
627 descriptor->bailout_id(), translation.index(), pc_offset));
628
629 return deoptimization_id;
630}
631
632
633void CodeGenerator::AddTranslationForOperand(Translation* translation,
634 Instruction* instr,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400635 InstructionOperand* op,
636 MachineType type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000637 if (op->IsStackSlot()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000638 if (type.representation() == MachineRepresentation::kBit) {
639 translation->StoreBoolStackSlot(LocationOperand::cast(op)->index());
640 } else if (type == MachineType::Int8() || type == MachineType::Int16() ||
641 type == MachineType::Int32()) {
642 translation->StoreInt32StackSlot(LocationOperand::cast(op)->index());
643 } else if (type == MachineType::Uint8() || type == MachineType::Uint16() ||
644 type == MachineType::Uint32()) {
645 translation->StoreUint32StackSlot(LocationOperand::cast(op)->index());
646 } else if (type.representation() == MachineRepresentation::kTagged) {
647 translation->StoreStackSlot(LocationOperand::cast(op)->index());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400648 } else {
649 CHECK(false);
650 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000651 } else if (op->IsDoubleStackSlot()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000652 DCHECK(IsFloatingPoint(type.representation()));
653 translation->StoreDoubleStackSlot(LocationOperand::cast(op)->index());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000654 } else if (op->IsRegister()) {
655 InstructionOperandConverter converter(this, instr);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000656 if (type.representation() == MachineRepresentation::kBit) {
657 translation->StoreBoolRegister(converter.ToRegister(op));
658 } else if (type == MachineType::Int8() || type == MachineType::Int16() ||
659 type == MachineType::Int32()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400660 translation->StoreInt32Register(converter.ToRegister(op));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000661 } else if (type == MachineType::Uint8() || type == MachineType::Uint16() ||
662 type == MachineType::Uint32()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400663 translation->StoreUint32Register(converter.ToRegister(op));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000664 } else if (type.representation() == MachineRepresentation::kTagged) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400665 translation->StoreRegister(converter.ToRegister(op));
666 } else {
667 CHECK(false);
668 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000669 } else if (op->IsDoubleRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000670 DCHECK(IsFloatingPoint(type.representation()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000671 InstructionOperandConverter converter(this, instr);
672 translation->StoreDoubleRegister(converter.ToDoubleRegister(op));
673 } else if (op->IsImmediate()) {
674 InstructionOperandConverter converter(this, instr);
675 Constant constant = converter.ToConstant(op);
676 Handle<Object> constant_object;
677 switch (constant.type()) {
678 case Constant::kInt32:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000679 DCHECK(type == MachineType::Int32() || type == MachineType::Uint32() ||
680 type.representation() == MachineRepresentation::kBit);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000681 constant_object =
682 isolate()->factory()->NewNumberFromInt(constant.ToInt32());
683 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000684 case Constant::kFloat32:
685 DCHECK(type.representation() == MachineRepresentation::kFloat32 ||
686 type.representation() == MachineRepresentation::kTagged);
687 constant_object = isolate()->factory()->NewNumber(constant.ToFloat32());
688 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000689 case Constant::kFloat64:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000690 DCHECK(type.representation() == MachineRepresentation::kFloat64 ||
691 type.representation() == MachineRepresentation::kTagged);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000692 constant_object = isolate()->factory()->NewNumber(constant.ToFloat64());
693 break;
694 case Constant::kHeapObject:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000695 DCHECK(type.representation() == MachineRepresentation::kTagged);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000696 constant_object = constant.ToHeapObject();
697 break;
698 default:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400699 CHECK(false);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000700 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000701 if (constant_object.is_identical_to(info()->closure())) {
702 translation->StoreJSFrameFunction();
703 } else {
704 int literal_id = DefineDeoptimizationLiteral(constant_object);
705 translation->StoreLiteral(literal_id);
706 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000707 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400708 CHECK(false);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000709 }
710}
711
712
713void CodeGenerator::MarkLazyDeoptSite() {
714 last_lazy_deopt_pc_ = masm()->pc_offset();
715}
716
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000717
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000718int CodeGenerator::TailCallFrameStackSlotDelta(int stack_param_delta) {
719 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
720 int spill_slots = frame()->GetSpillSlotCount();
721 bool has_frame = descriptor->IsJSFunctionCall() || spill_slots > 0;
722 // Leave the PC on the stack on platforms that have that as part of their ABI
723 int pc_slots = V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK ? 1 : 0;
724 int sp_slot_delta =
725 has_frame ? (frame()->GetTotalFrameSlotCount() - pc_slots) : 0;
726 // Discard only slots that won't be used by new parameters.
727 sp_slot_delta += stack_param_delta;
728 return sp_slot_delta;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000729}
730
731
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400732OutOfLineCode::OutOfLineCode(CodeGenerator* gen)
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000733 : frame_(gen->frame()), masm_(gen->masm()), next_(gen->ools_) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400734 gen->ools_ = this;
735}
736
737
738OutOfLineCode::~OutOfLineCode() {}
739
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000740} // namespace compiler
741} // namespace internal
742} // namespace v8