blob: c69e86e0a5367c4a1724b4ecc33fb66e9394d039 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/code-generator.h"
6
Ben Murdoch014dc512016-03-22 12:00:34 +00007#include "src/address-map.h"
Ben Murdochf91f0612016-11-29 16:50:11 +00008#include "src/base/adapters.h"
Ben Murdochf3b273f2017-01-17 12:11:28 +00009#include "src/compilation-info.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000010#include "src/compiler/code-generator-impl.h"
11#include "src/compiler/linkage.h"
12#include "src/compiler/pipeline.h"
Ben Murdoch014dc512016-03-22 12:00:34 +000013#include "src/frames-inl.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000014
15namespace v8 {
16namespace internal {
17namespace compiler {
18
Ben Murdoch014dc512016-03-22 12:00:34 +000019class CodeGenerator::JumpTable final : public ZoneObject {
20 public:
21 JumpTable(JumpTable* next, Label** targets, size_t target_count)
22 : next_(next), targets_(targets), target_count_(target_count) {}
23
24 Label* label() { return &label_; }
25 JumpTable* next() const { return next_; }
26 Label** targets() const { return targets_; }
27 size_t target_count() const { return target_count_; }
28
29 private:
30 Label label_;
31 JumpTable* const next_;
32 Label** const targets_;
33 size_t const target_count_;
34};
35
Emily Bernier958fae72015-03-24 16:35:39 -040036CodeGenerator::CodeGenerator(Frame* frame, Linkage* linkage,
37 InstructionSequence* code, CompilationInfo* info)
Ben Murdochbcf72ee2016-08-08 18:44:38 +010038 : frame_access_state_(nullptr),
Emily Bernier958fae72015-03-24 16:35:39 -040039 linkage_(linkage),
40 code_(code),
Ben Murdochf91f0612016-11-29 16:50:11 +000041 unwinding_info_writer_(zone()),
Emily Bernier958fae72015-03-24 16:35:39 -040042 info_(info),
43 labels_(zone()->NewArray<Label>(code->InstructionBlockCount())),
Ben Murdoch014dc512016-03-22 12:00:34 +000044 current_block_(RpoNumber::Invalid()),
45 current_source_position_(SourcePosition::Unknown()),
Ben Murdochf91f0612016-11-29 16:50:11 +000046 masm_(info->isolate(), nullptr, 0, CodeObjectRequired::kNo),
Ben Murdochb8a8cc12014-11-26 15:28:44 +000047 resolver_(this),
48 safepoints_(code->zone()),
Ben Murdoch014dc512016-03-22 12:00:34 +000049 handlers_(code->zone()),
Ben Murdoch3b9bc312016-06-02 14:46:10 +010050 deoptimization_exits_(code->zone()),
Ben Murdochb8a8cc12014-11-26 15:28:44 +000051 deoptimization_states_(code->zone()),
52 deoptimization_literals_(code->zone()),
Ben Murdoch014dc512016-03-22 12:00:34 +000053 inlined_function_count_(0),
Ben Murdochb8a8cc12014-11-26 15:28:44 +000054 translations_(code->zone()),
Emily Bernier958fae72015-03-24 16:35:39 -040055 last_lazy_deopt_pc_(0),
Ben Murdoch014dc512016-03-22 12:00:34 +000056 jump_tables_(nullptr),
57 ools_(nullptr),
Ben Murdochf91f0612016-11-29 16:50:11 +000058 osr_pc_offset_(-1),
59 source_position_table_builder_(code->zone(),
60 info->SourcePositionRecordingMode()) {
Emily Bernier958fae72015-03-24 16:35:39 -040061 for (int i = 0; i < code->InstructionBlockCount(); ++i) {
62 new (&labels_[i]) Label;
63 }
Ben Murdochbcf72ee2016-08-08 18:44:38 +010064 CreateFrameAccessState(frame);
65}
66
Ben Murdochf3b273f2017-01-17 12:11:28 +000067Isolate* CodeGenerator::isolate() const { return info_->isolate(); }
68
Ben Murdochbcf72ee2016-08-08 18:44:38 +010069void CodeGenerator::CreateFrameAccessState(Frame* frame) {
70 FinishFrame(frame);
71 frame_access_state_ = new (code()->zone()) FrameAccessState(frame);
Emily Bernier958fae72015-03-24 16:35:39 -040072}
Ben Murdochb8a8cc12014-11-26 15:28:44 +000073
Ben Murdochb8a8cc12014-11-26 15:28:44 +000074Handle<Code> CodeGenerator::GenerateCode() {
Emily Bernier958fae72015-03-24 16:35:39 -040075 CompilationInfo* info = this->info();
Ben Murdochb8a8cc12014-11-26 15:28:44 +000076
Ben Murdoch014dc512016-03-22 12:00:34 +000077 // Open a frame scope to indicate that there is a frame on the stack. The
78 // MANUAL indicates that the scope shouldn't actually generate code to set up
79 // the frame (that is done in AssemblePrologue).
80 FrameScope frame_scope(masm(), StackFrame::MANUAL);
81
Ben Murdochb8a8cc12014-11-26 15:28:44 +000082 // Place function entry hook if requested to do so.
83 if (linkage()->GetIncomingDescriptor()->IsJSFunctionCall()) {
84 ProfileEntryHookStub::MaybeCallEntryHook(masm());
85 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000086 // Architecture-specific, linkage-specific prologue.
87 info->set_prologue_offset(masm()->pc_offset());
Ben Murdochb8a8cc12014-11-26 15:28:44 +000088
Ben Murdoch014dc512016-03-22 12:00:34 +000089 // Define deoptimization literals for all inlined functions.
90 DCHECK_EQ(0u, deoptimization_literals_.size());
Ben Murdochc8c1d9e2017-03-08 14:04:23 +000091 for (CompilationInfo::InlinedFunctionHolder& inlined :
Ben Murdoch3b9bc312016-06-02 14:46:10 +010092 info->inlined_functions()) {
Ben Murdoch014dc512016-03-22 12:00:34 +000093 if (!inlined.shared_info.is_identical_to(info->shared_info())) {
Ben Murdochc8c1d9e2017-03-08 14:04:23 +000094 int index = DefineDeoptimizationLiteral(inlined.shared_info);
95 inlined.RegisterInlinedFunctionId(index);
Ben Murdoch014dc512016-03-22 12:00:34 +000096 }
97 }
98 inlined_function_count_ = deoptimization_literals_.size();
99
100 // Define deoptimization literals for all unoptimized code objects of inlined
101 // functions. This ensures unoptimized code is kept alive by optimized code.
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100102 for (const CompilationInfo::InlinedFunctionHolder& inlined :
103 info->inlined_functions()) {
Ben Murdoch014dc512016-03-22 12:00:34 +0000104 if (!inlined.shared_info.is_identical_to(info->shared_info())) {
105 DefineDeoptimizationLiteral(inlined.inlined_code_object_root);
106 }
107 }
108
Ben Murdochf91f0612016-11-29 16:50:11 +0000109 unwinding_info_writer_.SetNumberOfInstructionBlocks(
110 code()->InstructionBlockCount());
111
Emily Bernier958fae72015-03-24 16:35:39 -0400112 // Assemble all non-deferred blocks, followed by deferred ones.
113 for (int deferred = 0; deferred < 2; ++deferred) {
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100114 for (const InstructionBlock* block : code()->instruction_blocks()) {
Emily Bernier958fae72015-03-24 16:35:39 -0400115 if (block->IsDeferred() == (deferred == 0)) {
116 continue;
117 }
118 // Align loop headers on 16-byte boundaries.
119 if (block->IsLoopHeader()) masm()->Align(16);
Ben Murdoch014dc512016-03-22 12:00:34 +0000120 // Ensure lazy deopt doesn't patch handler entry points.
121 if (block->IsHandler()) EnsureSpaceForLazyDeopt();
Emily Bernier958fae72015-03-24 16:35:39 -0400122 // Bind a label for a block.
123 current_block_ = block->rpo_number();
Ben Murdochf91f0612016-11-29 16:50:11 +0000124 unwinding_info_writer_.BeginInstructionBlock(masm()->pc_offset(), block);
Emily Bernier958fae72015-03-24 16:35:39 -0400125 if (FLAG_code_comments) {
126 // TODO(titzer): these code comments are a giant memory leak.
Ben Murdoch014dc512016-03-22 12:00:34 +0000127 Vector<char> buffer = Vector<char>::New(200);
128 char* buffer_start = buffer.start();
129
130 int next = SNPrintF(
131 buffer, "-- B%d start%s%s%s%s", block->rpo_number().ToInt(),
132 block->IsDeferred() ? " (deferred)" : "",
133 block->needs_frame() ? "" : " (no frame)",
134 block->must_construct_frame() ? " (construct frame)" : "",
135 block->must_deconstruct_frame() ? " (deconstruct frame)" : "");
136
137 buffer = buffer.SubVector(next, buffer.length());
138
139 if (block->IsLoopHeader()) {
140 next =
141 SNPrintF(buffer, " (loop up to %d)", block->loop_end().ToInt());
142 buffer = buffer.SubVector(next, buffer.length());
143 }
144 if (block->loop_header().IsValid()) {
145 next =
146 SNPrintF(buffer, " (in loop %d)", block->loop_header().ToInt());
147 buffer = buffer.SubVector(next, buffer.length());
148 }
149 SNPrintF(buffer, " --");
150 masm()->RecordComment(buffer_start);
Emily Bernier958fae72015-03-24 16:35:39 -0400151 }
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100152
153 frame_access_state()->MarkHasFrame(block->needs_frame());
154
Emily Bernier958fae72015-03-24 16:35:39 -0400155 masm()->bind(GetLabel(current_block_));
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100156 if (block->must_construct_frame()) {
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100157 AssembleConstructFrame();
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100158 // We need to setup the root register after we assemble the prologue, to
159 // avoid clobbering callee saved registers in case of C linkage and
160 // using the roots.
161 // TODO(mtrofin): investigate how we can avoid doing this repeatedly.
162 if (linkage()->GetIncomingDescriptor()->InitializeRootRegister()) {
163 masm()->InitializeRootRegister();
164 }
165 }
166
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100167 CodeGenResult result;
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100168 if (FLAG_enable_embedded_constant_pool && !block->needs_frame()) {
169 ConstantPoolUnavailableScope constant_pool_unavailable(masm());
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100170 result = AssembleBlock(block);
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100171 } else {
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100172 result = AssembleBlock(block);
Emily Bernier958fae72015-03-24 16:35:39 -0400173 }
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100174 if (result != kSuccess) return Handle<Code>();
Ben Murdochf91f0612016-11-29 16:50:11 +0000175 unwinding_info_writer_.EndInstructionBlock(block);
Emily Bernier958fae72015-03-24 16:35:39 -0400176 }
177 }
178
179 // Assemble all out-of-line code.
180 if (ools_) {
181 masm()->RecordComment("-- Out of line code --");
182 for (OutOfLineCode* ool = ools_; ool; ool = ool->next()) {
183 masm()->bind(ool->entry());
184 ool->Generate();
Ben Murdoch014dc512016-03-22 12:00:34 +0000185 if (ool->exit()->is_bound()) masm()->jmp(ool->exit());
Emily Bernier958fae72015-03-24 16:35:39 -0400186 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000187 }
188
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100189 // Assemble all eager deoptimization exits.
190 for (DeoptimizationExit* exit : deoptimization_exits_) {
191 masm()->bind(exit->label());
Ben Murdochf3b273f2017-01-17 12:11:28 +0000192 AssembleDeoptimizerCall(exit->deoptimization_id(), Deoptimizer::EAGER,
193 exit->pos());
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100194 }
195
Emily Bernier958fae72015-03-24 16:35:39 -0400196 // Ensure there is space for lazy deoptimization in the code.
Ben Murdoch014dc512016-03-22 12:00:34 +0000197 if (info->ShouldEnsureSpaceForLazyDeopt()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000198 int target_offset = masm()->pc_offset() + Deoptimizer::patch_size();
199 while (masm()->pc_offset() < target_offset) {
200 masm()->nop();
201 }
202 }
203
Ben Murdoch014dc512016-03-22 12:00:34 +0000204 FinishCode(masm());
205
206 // Emit the jump tables.
207 if (jump_tables_) {
208 masm()->Align(kPointerSize);
209 for (JumpTable* table = jump_tables_; table; table = table->next()) {
210 masm()->bind(table->label());
211 AssembleJumpTable(table->targets(), table->target_count());
212 }
213 }
214
Ben Murdoch109988c2016-05-18 11:27:45 +0100215 safepoints()->Emit(masm(), frame()->GetTotalFrameSlotCount());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000216
Ben Murdochf91f0612016-11-29 16:50:11 +0000217 unwinding_info_writer_.Finish(masm()->pc_offset());
218
219 Handle<Code> result = v8::internal::CodeGenerator::MakeCodeEpilogue(
220 masm(), unwinding_info_writer_.eh_frame_writer(), info, Handle<Object>());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000221 result->set_is_turbofanned(true);
Ben Murdoch109988c2016-05-18 11:27:45 +0100222 result->set_stack_slots(frame()->GetTotalFrameSlotCount());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000223 result->set_safepoint_table_offset(safepoints()->GetCodeOffset());
Ben Murdochf91f0612016-11-29 16:50:11 +0000224 Handle<ByteArray> source_positions =
225 source_position_table_builder_.ToSourcePositionTable(
226 isolate(), Handle<AbstractCode>::cast(result));
227 result->set_source_position_table(*source_positions);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000228
Ben Murdoch014dc512016-03-22 12:00:34 +0000229 // Emit exception handler table.
230 if (!handlers_.empty()) {
231 Handle<HandlerTable> table =
232 Handle<HandlerTable>::cast(isolate()->factory()->NewFixedArray(
233 HandlerTable::LengthForReturn(static_cast<int>(handlers_.size())),
234 TENURED));
235 for (size_t i = 0; i < handlers_.size(); ++i) {
Ben Murdoch014dc512016-03-22 12:00:34 +0000236 table->SetReturnOffset(static_cast<int>(i), handlers_[i].pc_offset);
Ben Murdochf91f0612016-11-29 16:50:11 +0000237 table->SetReturnHandler(static_cast<int>(i), handlers_[i].handler->pos());
Ben Murdoch014dc512016-03-22 12:00:34 +0000238 }
239 result->set_handler_table(*table);
240 }
241
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000242 PopulateDeoptimizationData(result);
243
Emily Bernier958fae72015-03-24 16:35:39 -0400244 // Ensure there is space for lazy deoptimization in the relocation info.
Ben Murdoch014dc512016-03-22 12:00:34 +0000245 if (info->ShouldEnsureSpaceForLazyDeopt()) {
Emily Bernier958fae72015-03-24 16:35:39 -0400246 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(result);
247 }
248
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000249 return result;
250}
251
252
Ben Murdoch014dc512016-03-22 12:00:34 +0000253bool CodeGenerator::IsNextInAssemblyOrder(RpoNumber block) const {
254 return code()
255 ->InstructionBlockAt(current_block_)
256 ->ao_number()
257 .IsNext(code()->InstructionBlockAt(block)->ao_number());
Emily Bernier958fae72015-03-24 16:35:39 -0400258}
259
260
Ben Murdoch014dc512016-03-22 12:00:34 +0000261void CodeGenerator::RecordSafepoint(ReferenceMap* references,
262 Safepoint::Kind kind, int arguments,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000263 Safepoint::DeoptMode deopt_mode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000264 Safepoint safepoint =
265 safepoints()->DefineSafepoint(masm(), kind, arguments, deopt_mode);
Ben Murdoch014dc512016-03-22 12:00:34 +0000266 int stackSlotToSpillSlotDelta =
267 frame()->GetTotalFrameSlotCount() - frame()->GetSpillSlotCount();
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100268 for (const InstructionOperand& operand : references->reference_operands()) {
Ben Murdoch014dc512016-03-22 12:00:34 +0000269 if (operand.IsStackSlot()) {
270 int index = LocationOperand::cast(operand).index();
271 DCHECK(index >= 0);
Ben Murdoch109988c2016-05-18 11:27:45 +0100272 // We might index values in the fixed part of the frame (i.e. the
273 // closure pointer or the context pointer); these are not spill slots
274 // and therefore don't work with the SafepointTable currently, but
275 // we also don't need to worry about them, since the GC has special
276 // knowledge about those fields anyway.
277 if (index < stackSlotToSpillSlotDelta) continue;
Ben Murdoch014dc512016-03-22 12:00:34 +0000278 safepoint.DefinePointerSlot(index, zone());
279 } else if (operand.IsRegister() && (kind & Safepoint::kWithRegisters)) {
280 Register reg = LocationOperand::cast(operand).GetRegister();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000281 safepoint.DefinePointerRegister(reg, zone());
282 }
283 }
284}
285
Ben Murdoch014dc512016-03-22 12:00:34 +0000286bool CodeGenerator::IsMaterializableFromRoot(
287 Handle<HeapObject> object, Heap::RootListIndex* index_return) {
288 const CallDescriptor* incoming_descriptor =
289 linkage()->GetIncomingDescriptor();
290 if (incoming_descriptor->flags() & CallDescriptor::kCanUseRoots) {
291 RootIndexMap map(isolate());
292 int root_index = map.Lookup(*object);
293 if (root_index != RootIndexMap::kInvalidRootIndex) {
294 *index_return = static_cast<Heap::RootListIndex>(root_index);
295 return true;
296 }
297 }
298 return false;
299}
300
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100301CodeGenerator::CodeGenResult CodeGenerator::AssembleBlock(
302 const InstructionBlock* block) {
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100303 for (int i = block->code_start(); i < block->code_end(); ++i) {
304 Instruction* instr = code()->InstructionAt(i);
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100305 CodeGenResult result = AssembleInstruction(instr, block);
306 if (result != kSuccess) return result;
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100307 }
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100308 return kSuccess;
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100309}
Ben Murdoch014dc512016-03-22 12:00:34 +0000310
Ben Murdochf91f0612016-11-29 16:50:11 +0000311bool CodeGenerator::IsValidPush(InstructionOperand source,
312 CodeGenerator::PushTypeFlags push_type) {
313 if (source.IsImmediate() &&
314 ((push_type & CodeGenerator::kImmediatePush) != 0)) {
315 return true;
316 }
317 if ((source.IsRegister() || source.IsStackSlot()) &&
318 ((push_type & CodeGenerator::kScalarPush) != 0)) {
319 return true;
320 }
321 if ((source.IsFloatRegister() || source.IsFloatStackSlot()) &&
322 ((push_type & CodeGenerator::kFloat32Push) != 0)) {
323 return true;
324 }
325 if ((source.IsDoubleRegister() || source.IsFloatStackSlot()) &&
326 ((push_type & CodeGenerator::kFloat64Push) != 0)) {
327 return true;
328 }
329 return false;
330}
331
332void CodeGenerator::GetPushCompatibleMoves(Instruction* instr,
333 PushTypeFlags push_type,
334 ZoneVector<MoveOperands*>* pushes) {
335 pushes->clear();
336 for (int i = Instruction::FIRST_GAP_POSITION;
337 i <= Instruction::LAST_GAP_POSITION; ++i) {
338 Instruction::GapPosition inner_pos =
339 static_cast<Instruction::GapPosition>(i);
340 ParallelMove* parallel_move = instr->GetParallelMove(inner_pos);
341 if (parallel_move != nullptr) {
342 for (auto move : *parallel_move) {
343 InstructionOperand source = move->source();
344 InstructionOperand destination = move->destination();
345 int first_push_compatible_index =
346 V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK ? 1 : 0;
347 // If there are any moves from slots that will be overridden by pushes,
348 // then the full gap resolver must be used since optimization with
349 // pushes don't participate in the parallel move and might clobber
350 // values needed for the gap resolve.
351 if (source.IsStackSlot() &&
352 LocationOperand::cast(source).index() >=
353 first_push_compatible_index) {
354 pushes->clear();
355 return;
356 }
357 // TODO(danno): Right now, only consider moves from the FIRST gap for
358 // pushes. Theoretically, we could extract pushes for both gaps (there
359 // are cases where this happens), but the logic for that would also have
360 // to check to make sure that non-memory inputs to the pushes from the
361 // LAST gap don't get clobbered in the FIRST gap.
362 if (i == Instruction::FIRST_GAP_POSITION) {
363 if (destination.IsStackSlot() &&
364 LocationOperand::cast(destination).index() >=
365 first_push_compatible_index) {
366 int index = LocationOperand::cast(destination).index();
367 if (IsValidPush(source, push_type)) {
368 if (index >= static_cast<int>(pushes->size())) {
369 pushes->resize(index + 1);
370 }
371 (*pushes)[index] = move;
372 }
373 }
374 }
375 }
376 }
377 }
378
379 // For now, only support a set of continuous pushes at the end of the list.
380 size_t push_count_upper_bound = pushes->size();
381 size_t push_begin = push_count_upper_bound;
382 for (auto move : base::Reversed(*pushes)) {
383 if (move == nullptr) break;
384 push_begin--;
385 }
386 size_t push_count = pushes->size() - push_begin;
387 std::copy(pushes->begin() + push_begin,
388 pushes->begin() + push_begin + push_count, pushes->begin());
389 pushes->resize(push_count);
390}
391
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100392CodeGenerator::CodeGenResult CodeGenerator::AssembleInstruction(
393 Instruction* instr, const InstructionBlock* block) {
Ben Murdochf91f0612016-11-29 16:50:11 +0000394 int first_unused_stack_slot;
395 bool adjust_stack =
396 GetSlotAboveSPBeforeTailCall(instr, &first_unused_stack_slot);
397 if (adjust_stack) AssembleTailCallBeforeGap(instr, first_unused_stack_slot);
Ben Murdoch014dc512016-03-22 12:00:34 +0000398 AssembleGaps(instr);
Ben Murdochf91f0612016-11-29 16:50:11 +0000399 if (adjust_stack) AssembleTailCallAfterGap(instr, first_unused_stack_slot);
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100400 DCHECK_IMPLIES(
401 block->must_deconstruct_frame(),
402 instr != code()->InstructionAt(block->last_instruction_index()) ||
403 instr->IsRet() || instr->IsJump());
404 if (instr->IsJump() && block->must_deconstruct_frame()) {
405 AssembleDeconstructFrame();
406 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000407 AssembleSourcePosition(instr);
408 // Assemble architecture-specific code for the instruction.
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100409 CodeGenResult result = AssembleArchInstruction(instr);
410 if (result != kSuccess) return result;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000411
Ben Murdoch014dc512016-03-22 12:00:34 +0000412 FlagsMode mode = FlagsModeField::decode(instr->opcode());
413 FlagsCondition condition = FlagsConditionField::decode(instr->opcode());
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100414 switch (mode) {
415 case kFlags_branch: {
416 // Assemble a branch after this instruction.
417 InstructionOperandConverter i(this, instr);
418 RpoNumber true_rpo = i.InputRpo(instr->InputCount() - 2);
419 RpoNumber false_rpo = i.InputRpo(instr->InputCount() - 1);
Emily Bernier958fae72015-03-24 16:35:39 -0400420
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100421 if (true_rpo == false_rpo) {
422 // redundant branch.
423 if (!IsNextInAssemblyOrder(true_rpo)) {
424 AssembleArchJump(true_rpo);
425 }
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100426 return kSuccess;
Emily Bernier958fae72015-03-24 16:35:39 -0400427 }
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100428 if (IsNextInAssemblyOrder(true_rpo)) {
429 // true block is next, can fall through if condition negated.
430 std::swap(true_rpo, false_rpo);
431 condition = NegateFlagsCondition(condition);
432 }
433 BranchInfo branch;
434 branch.condition = condition;
435 branch.true_label = GetLabel(true_rpo);
436 branch.false_label = GetLabel(false_rpo);
437 branch.fallthru = IsNextInAssemblyOrder(false_rpo);
438 // Assemble architecture-specific branch.
439 AssembleArchBranch(instr, &branch);
440 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000441 }
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100442 case kFlags_deoptimize: {
443 // Assemble a conditional eager deoptimization after this instruction.
444 InstructionOperandConverter i(this, instr);
445 size_t frame_state_offset = MiscField::decode(instr->opcode());
446 DeoptimizationExit* const exit =
447 AddDeoptimizationExit(instr, frame_state_offset);
448 Label continue_label;
449 BranchInfo branch;
450 branch.condition = condition;
451 branch.true_label = exit->label();
452 branch.false_label = &continue_label;
453 branch.fallthru = true;
454 // Assemble architecture-specific branch.
455 AssembleArchBranch(instr, &branch);
456 masm()->bind(&continue_label);
457 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000458 }
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100459 case kFlags_set: {
460 // Assemble a boolean materialization after this instruction.
461 AssembleArchBoolean(instr, condition);
462 break;
463 }
464 case kFlags_none: {
465 break;
466 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000467 }
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100468 return kSuccess;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000469}
470
471
Ben Murdoch014dc512016-03-22 12:00:34 +0000472void CodeGenerator::AssembleSourcePosition(Instruction* instr) {
Ben Murdochc8c1d9e2017-03-08 14:04:23 +0000473 SourcePosition source_position = SourcePosition::Unknown();
Ben Murdoch014dc512016-03-22 12:00:34 +0000474 if (!code()->GetSourcePosition(instr, &source_position)) return;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000475 if (source_position == current_source_position_) return;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000476 current_source_position_ = source_position;
Ben Murdochc8c1d9e2017-03-08 14:04:23 +0000477 if (!source_position.IsKnown()) return;
478 source_position_table_builder_.AddPosition(masm()->pc_offset(),
479 source_position, false);
Ben Murdoch014dc512016-03-22 12:00:34 +0000480 if (FLAG_code_comments) {
Ben Murdoch014dc512016-03-22 12:00:34 +0000481 CompilationInfo* info = this->info();
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100482 if (!info->parse_info()) return;
Ben Murdochc8c1d9e2017-03-08 14:04:23 +0000483 std::ostringstream buffer;
484 buffer << "-- " << source_position.InliningStack(info) << " --";
485 masm()->RecordComment(StrDup(buffer.str().c_str()));
Ben Murdoch014dc512016-03-22 12:00:34 +0000486 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000487}
488
Ben Murdochf91f0612016-11-29 16:50:11 +0000489bool CodeGenerator::GetSlotAboveSPBeforeTailCall(Instruction* instr,
490 int* slot) {
491 if (instr->IsTailCall()) {
492 InstructionOperandConverter g(this, instr);
493 *slot = g.InputInt32(instr->InputCount() - 1);
494 return true;
495 } else {
496 return false;
497 }
498}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000499
Ben Murdoch014dc512016-03-22 12:00:34 +0000500void CodeGenerator::AssembleGaps(Instruction* instr) {
501 for (int i = Instruction::FIRST_GAP_POSITION;
502 i <= Instruction::LAST_GAP_POSITION; i++) {
503 Instruction::GapPosition inner_pos =
504 static_cast<Instruction::GapPosition>(i);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000505 ParallelMove* move = instr->GetParallelMove(inner_pos);
Ben Murdoch014dc512016-03-22 12:00:34 +0000506 if (move != nullptr) resolver()->Resolve(move);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000507 }
508}
509
Ben Murdochc8c1d9e2017-03-08 14:04:23 +0000510namespace {
511
512Handle<PodArray<InliningPosition>> CreateInliningPositions(
513 CompilationInfo* info) {
514 const CompilationInfo::InlinedFunctionList& inlined_functions =
515 info->inlined_functions();
516 if (inlined_functions.size() == 0) {
517 return Handle<PodArray<InliningPosition>>::cast(
518 info->isolate()->factory()->empty_byte_array());
519 }
520 Handle<PodArray<InliningPosition>> inl_positions =
521 PodArray<InliningPosition>::New(
522 info->isolate(), static_cast<int>(inlined_functions.size()), TENURED);
523 for (size_t i = 0; i < inlined_functions.size(); ++i) {
524 inl_positions->set(static_cast<int>(i), inlined_functions[i].position);
525 }
526 return inl_positions;
527}
528
529} // namespace
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000530
531void CodeGenerator::PopulateDeoptimizationData(Handle<Code> code_object) {
Emily Bernier958fae72015-03-24 16:35:39 -0400532 CompilationInfo* info = this->info();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000533 int deopt_count = static_cast<int>(deoptimization_states_.size());
Ben Murdoch014dc512016-03-22 12:00:34 +0000534 if (deopt_count == 0 && !info->is_osr()) return;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000535 Handle<DeoptimizationInputData> data =
536 DeoptimizationInputData::New(isolate(), deopt_count, TENURED);
537
538 Handle<ByteArray> translation_array =
539 translations_.CreateByteArray(isolate()->factory());
540
541 data->SetTranslationByteArray(*translation_array);
Ben Murdoch014dc512016-03-22 12:00:34 +0000542 data->SetInlinedFunctionCount(
543 Smi::FromInt(static_cast<int>(inlined_function_count_)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000544 data->SetOptimizationId(Smi::FromInt(info->optimization_id()));
Ben Murdoch014dc512016-03-22 12:00:34 +0000545
546 if (info->has_shared_info()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000547 data->SetSharedFunctionInfo(*info->shared_info());
548 } else {
Ben Murdochc8c1d9e2017-03-08 14:04:23 +0000549 data->SetSharedFunctionInfo(Smi::kZero);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000550 }
551
552 Handle<FixedArray> literals = isolate()->factory()->NewFixedArray(
553 static_cast<int>(deoptimization_literals_.size()), TENURED);
554 {
555 AllowDeferredHandleDereference copy_handles;
556 for (unsigned i = 0; i < deoptimization_literals_.size(); i++) {
557 literals->set(i, *deoptimization_literals_[i]);
558 }
559 data->SetLiteralArray(*literals);
560 }
561
Ben Murdochc8c1d9e2017-03-08 14:04:23 +0000562 Handle<PodArray<InliningPosition>> inl_pos = CreateInliningPositions(info);
563 data->SetInliningPositions(*inl_pos);
564
Ben Murdoch014dc512016-03-22 12:00:34 +0000565 if (info->is_osr()) {
566 DCHECK(osr_pc_offset_ >= 0);
567 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id().ToInt()));
568 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
569 } else {
570 BailoutId osr_ast_id = BailoutId::None();
571 data->SetOsrAstId(Smi::FromInt(osr_ast_id.ToInt()));
572 data->SetOsrPcOffset(Smi::FromInt(-1));
573 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000574
575 // Populate deoptimization entries.
576 for (int i = 0; i < deopt_count; i++) {
577 DeoptimizationState* deoptimization_state = deoptimization_states_[i];
578 data->SetAstId(i, deoptimization_state->bailout_id());
Ben Murdoch014dc512016-03-22 12:00:34 +0000579 CHECK(deoptimization_states_[i]);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000580 data->SetTranslationIndex(
581 i, Smi::FromInt(deoptimization_states_[i]->translation_id()));
Ben Murdochc8c1d9e2017-03-08 14:04:23 +0000582 data->SetArgumentsStackHeight(i, Smi::kZero);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000583 data->SetPc(i, Smi::FromInt(deoptimization_state->pc_offset()));
584 }
585
586 code_object->set_deoptimization_data(*data);
587}
588
589
Ben Murdoch014dc512016-03-22 12:00:34 +0000590Label* CodeGenerator::AddJumpTable(Label** targets, size_t target_count) {
591 jump_tables_ = new (zone()) JumpTable(jump_tables_, targets, target_count);
592 return jump_tables_->label();
593}
594
595
596void CodeGenerator::RecordCallPosition(Instruction* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000597 CallDescriptor::Flags flags(MiscField::decode(instr->opcode()));
598
599 bool needs_frame_state = (flags & CallDescriptor::kNeedsFrameState);
600
601 RecordSafepoint(
Ben Murdoch014dc512016-03-22 12:00:34 +0000602 instr->reference_map(), Safepoint::kSimple, 0,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000603 needs_frame_state ? Safepoint::kLazyDeopt : Safepoint::kNoLazyDeopt);
604
Ben Murdoch014dc512016-03-22 12:00:34 +0000605 if (flags & CallDescriptor::kHasExceptionHandler) {
606 InstructionOperandConverter i(this, instr);
Ben Murdoch014dc512016-03-22 12:00:34 +0000607 RpoNumber handler_rpo = i.InputRpo(instr->InputCount() - 1);
Ben Murdochf91f0612016-11-29 16:50:11 +0000608 handlers_.push_back({GetLabel(handler_rpo), masm()->pc_offset()});
Ben Murdoch014dc512016-03-22 12:00:34 +0000609 }
610
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000611 if (needs_frame_state) {
612 MarkLazyDeoptSite();
Ben Murdoch014dc512016-03-22 12:00:34 +0000613 // If the frame state is present, it starts at argument 1 (just after the
614 // code address).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000615 size_t frame_state_offset = 1;
616 FrameStateDescriptor* descriptor =
Ben Murdochf91f0612016-11-29 16:50:11 +0000617 GetDeoptimizationEntry(instr, frame_state_offset).descriptor();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000618 int pc_offset = masm()->pc_offset();
619 int deopt_state_id = BuildTranslation(instr, pc_offset, frame_state_offset,
620 descriptor->state_combine());
621 // If the pre-call frame state differs from the post-call one, produce the
622 // pre-call frame state, too.
623 // TODO(jarin) We might want to avoid building the pre-call frame state
624 // because it is only used to get locals and arguments (by the debugger and
625 // f.arguments), and those are the same in the pre-call and post-call
626 // states.
Emily Bernier958fae72015-03-24 16:35:39 -0400627 if (!descriptor->state_combine().IsOutputIgnored()) {
628 deopt_state_id = BuildTranslation(instr, -1, frame_state_offset,
629 OutputFrameStateCombine::Ignore());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000630 }
631#if DEBUG
632 // Make sure all the values live in stack slots or they are immediates.
633 // (The values should not live in register because registers are clobbered
634 // by calls.)
Emily Bernier958fae72015-03-24 16:35:39 -0400635 for (size_t i = 0; i < descriptor->GetSize(); i++) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000636 InstructionOperand* op = instr->InputAt(frame_state_offset + 1 + i);
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100637 CHECK(op->IsStackSlot() || op->IsFPStackSlot() || op->IsImmediate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000638 }
639#endif
640 safepoints()->RecordLazyDeoptimizationIndex(deopt_state_id);
641 }
642}
643
644
645int CodeGenerator::DefineDeoptimizationLiteral(Handle<Object> literal) {
646 int result = static_cast<int>(deoptimization_literals_.size());
647 for (unsigned i = 0; i < deoptimization_literals_.size(); ++i) {
648 if (deoptimization_literals_[i].is_identical_to(literal)) return i;
649 }
650 deoptimization_literals_.push_back(literal);
651 return result;
652}
653
Ben Murdochf91f0612016-11-29 16:50:11 +0000654DeoptimizationEntry const& CodeGenerator::GetDeoptimizationEntry(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000655 Instruction* instr, size_t frame_state_offset) {
656 InstructionOperandConverter i(this, instr);
Ben Murdochf91f0612016-11-29 16:50:11 +0000657 int const state_id = i.InputInt32(frame_state_offset);
658 return code()->GetDeoptimizationEntry(state_id);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000659}
660
Ben Murdochf91f0612016-11-29 16:50:11 +0000661DeoptimizeReason CodeGenerator::GetDeoptimizationReason(
662 int deoptimization_id) const {
663 size_t const index = static_cast<size_t>(deoptimization_id);
664 DCHECK_LT(index, deoptimization_states_.size());
665 return deoptimization_states_[index]->reason();
666}
Emily Bernier958fae72015-03-24 16:35:39 -0400667
Ben Murdoch014dc512016-03-22 12:00:34 +0000668void CodeGenerator::TranslateStateValueDescriptor(
669 StateValueDescriptor* desc, Translation* translation,
670 InstructionOperandIterator* iter) {
671 if (desc->IsNested()) {
672 translation->BeginCapturedObject(static_cast<int>(desc->size()));
673 for (size_t index = 0; index < desc->fields().size(); index++) {
674 TranslateStateValueDescriptor(&desc->fields()[index], translation, iter);
Emily Bernier958fae72015-03-24 16:35:39 -0400675 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000676 } else if (desc->IsDuplicate()) {
677 translation->DuplicateObject(static_cast<int>(desc->id()));
678 } else {
679 DCHECK(desc->IsPlain());
680 AddTranslationForOperand(translation, iter->instruction(), iter->Advance(),
681 desc->type());
Emily Bernier958fae72015-03-24 16:35:39 -0400682 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000683}
684
685
686void CodeGenerator::TranslateFrameStateDescriptorOperands(
687 FrameStateDescriptor* desc, InstructionOperandIterator* iter,
688 OutputFrameStateCombine combine, Translation* translation) {
689 for (size_t index = 0; index < desc->GetSize(combine); index++) {
690 switch (combine.kind()) {
691 case OutputFrameStateCombine::kPushOutput: {
692 DCHECK(combine.GetPushCount() <= iter->instruction()->OutputCount());
693 size_t size_without_output =
694 desc->GetSize(OutputFrameStateCombine::Ignore());
695 // If the index is past the existing stack items in values_.
696 if (index >= size_without_output) {
697 // Materialize the result of the call instruction in this slot.
698 AddTranslationForOperand(
699 translation, iter->instruction(),
700 iter->instruction()->OutputAt(index - size_without_output),
701 MachineType::AnyTagged());
702 continue;
703 }
704 break;
705 }
706 case OutputFrameStateCombine::kPokeAt:
707 // The result of the call should be placed at position
708 // [index_from_top] in the stack (overwriting whatever was
709 // previously there).
710 size_t index_from_top =
711 desc->GetSize(combine) - 1 - combine.GetOffsetToPokeAt();
712 if (index >= index_from_top &&
713 index < index_from_top + iter->instruction()->OutputCount()) {
714 AddTranslationForOperand(
715 translation, iter->instruction(),
716 iter->instruction()->OutputAt(index - index_from_top),
717 MachineType::AnyTagged());
718 iter->Advance(); // We do not use this input, but we need to
719 // advace, as the input got replaced.
720 continue;
721 }
722 break;
723 }
724 StateValueDescriptor* value_desc = desc->GetStateValueDescriptor();
725 TranslateStateValueDescriptor(&value_desc->fields()[index], translation,
726 iter);
727 }
Emily Bernier958fae72015-03-24 16:35:39 -0400728}
729
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000730
731void CodeGenerator::BuildTranslationForFrameStateDescriptor(
Ben Murdoch014dc512016-03-22 12:00:34 +0000732 FrameStateDescriptor* descriptor, InstructionOperandIterator* iter,
733 Translation* translation, OutputFrameStateCombine state_combine) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000734 // Outer-most state must be added to translation first.
Ben Murdoch014dc512016-03-22 12:00:34 +0000735 if (descriptor->outer_state() != nullptr) {
736 BuildTranslationForFrameStateDescriptor(descriptor->outer_state(), iter,
737 translation,
Emily Bernier958fae72015-03-24 16:35:39 -0400738 OutputFrameStateCombine::Ignore());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000739 }
740
Ben Murdoch014dc512016-03-22 12:00:34 +0000741 Handle<SharedFunctionInfo> shared_info;
742 if (!descriptor->shared_info().ToHandle(&shared_info)) {
743 if (!info()->has_shared_info()) {
744 return; // Stub with no SharedFunctionInfo.
745 }
746 shared_info = info()->shared_info();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000747 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000748 int shared_info_id = DefineDeoptimizationLiteral(shared_info);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000749
750 switch (descriptor->type()) {
Ben Murdoch014dc512016-03-22 12:00:34 +0000751 case FrameStateType::kJavaScriptFunction:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000752 translation->BeginJSFrame(
Ben Murdoch014dc512016-03-22 12:00:34 +0000753 descriptor->bailout_id(), shared_info_id,
Emily Bernier958fae72015-03-24 16:35:39 -0400754 static_cast<unsigned int>(descriptor->GetSize(state_combine) -
Ben Murdoch014dc512016-03-22 12:00:34 +0000755 (1 + descriptor->parameters_count())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000756 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000757 case FrameStateType::kInterpretedFunction:
758 translation->BeginInterpretedFrame(
759 descriptor->bailout_id(), shared_info_id,
Ben Murdoch109988c2016-05-18 11:27:45 +0100760 static_cast<unsigned int>(descriptor->locals_count() + 1));
Ben Murdoch014dc512016-03-22 12:00:34 +0000761 break;
762 case FrameStateType::kArgumentsAdaptor:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000763 translation->BeginArgumentsAdaptorFrame(
Ben Murdoch014dc512016-03-22 12:00:34 +0000764 shared_info_id,
765 static_cast<unsigned int>(descriptor->parameters_count()));
766 break;
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100767 case FrameStateType::kTailCallerFunction:
768 translation->BeginTailCallerFrame(shared_info_id);
769 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000770 case FrameStateType::kConstructStub:
771 translation->BeginConstructStubFrame(
772 shared_info_id,
773 static_cast<unsigned int>(descriptor->parameters_count()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000774 break;
Ben Murdochf91f0612016-11-29 16:50:11 +0000775 case FrameStateType::kGetterStub:
776 translation->BeginGetterStubFrame(shared_info_id);
777 break;
778 case FrameStateType::kSetterStub:
779 translation->BeginSetterStubFrame(shared_info_id);
780 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000781 }
782
Ben Murdoch014dc512016-03-22 12:00:34 +0000783 TranslateFrameStateDescriptorOperands(descriptor, iter, state_combine,
784 translation);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000785}
786
787
788int CodeGenerator::BuildTranslation(Instruction* instr, int pc_offset,
789 size_t frame_state_offset,
790 OutputFrameStateCombine state_combine) {
Ben Murdochf91f0612016-11-29 16:50:11 +0000791 DeoptimizationEntry const& entry =
792 GetDeoptimizationEntry(instr, frame_state_offset);
793 FrameStateDescriptor* const descriptor = entry.descriptor();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000794 frame_state_offset++;
795
796 Translation translation(
797 &translations_, static_cast<int>(descriptor->GetFrameCount()),
798 static_cast<int>(descriptor->GetJSFrameCount()), zone());
Ben Murdoch014dc512016-03-22 12:00:34 +0000799 InstructionOperandIterator iter(instr, frame_state_offset);
800 BuildTranslationForFrameStateDescriptor(descriptor, &iter, &translation,
801 state_combine);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000802
803 int deoptimization_id = static_cast<int>(deoptimization_states_.size());
804
805 deoptimization_states_.push_back(new (zone()) DeoptimizationState(
Ben Murdochf91f0612016-11-29 16:50:11 +0000806 descriptor->bailout_id(), translation.index(), pc_offset,
807 entry.reason()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000808
809 return deoptimization_id;
810}
811
812
813void CodeGenerator::AddTranslationForOperand(Translation* translation,
814 Instruction* instr,
Emily Bernier958fae72015-03-24 16:35:39 -0400815 InstructionOperand* op,
816 MachineType type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000817 if (op->IsStackSlot()) {
Ben Murdoch014dc512016-03-22 12:00:34 +0000818 if (type.representation() == MachineRepresentation::kBit) {
819 translation->StoreBoolStackSlot(LocationOperand::cast(op)->index());
820 } else if (type == MachineType::Int8() || type == MachineType::Int16() ||
821 type == MachineType::Int32()) {
822 translation->StoreInt32StackSlot(LocationOperand::cast(op)->index());
823 } else if (type == MachineType::Uint8() || type == MachineType::Uint16() ||
824 type == MachineType::Uint32()) {
825 translation->StoreUint32StackSlot(LocationOperand::cast(op)->index());
Ben Murdochf3b273f2017-01-17 12:11:28 +0000826 } else if (IsAnyTagged(type.representation())) {
Ben Murdoch014dc512016-03-22 12:00:34 +0000827 translation->StoreStackSlot(LocationOperand::cast(op)->index());
Emily Bernier958fae72015-03-24 16:35:39 -0400828 } else {
829 CHECK(false);
830 }
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100831 } else if (op->IsFPStackSlot()) {
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100832 if (type.representation() == MachineRepresentation::kFloat64) {
833 translation->StoreDoubleStackSlot(LocationOperand::cast(op)->index());
834 } else {
835 DCHECK_EQ(MachineRepresentation::kFloat32, type.representation());
836 translation->StoreFloatStackSlot(LocationOperand::cast(op)->index());
837 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000838 } else if (op->IsRegister()) {
839 InstructionOperandConverter converter(this, instr);
Ben Murdoch014dc512016-03-22 12:00:34 +0000840 if (type.representation() == MachineRepresentation::kBit) {
841 translation->StoreBoolRegister(converter.ToRegister(op));
842 } else if (type == MachineType::Int8() || type == MachineType::Int16() ||
843 type == MachineType::Int32()) {
Emily Bernier958fae72015-03-24 16:35:39 -0400844 translation->StoreInt32Register(converter.ToRegister(op));
Ben Murdoch014dc512016-03-22 12:00:34 +0000845 } else if (type == MachineType::Uint8() || type == MachineType::Uint16() ||
846 type == MachineType::Uint32()) {
Emily Bernier958fae72015-03-24 16:35:39 -0400847 translation->StoreUint32Register(converter.ToRegister(op));
Ben Murdochf3b273f2017-01-17 12:11:28 +0000848 } else if (IsAnyTagged(type.representation())) {
Emily Bernier958fae72015-03-24 16:35:39 -0400849 translation->StoreRegister(converter.ToRegister(op));
850 } else {
851 CHECK(false);
852 }
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100853 } else if (op->IsFPRegister()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000854 InstructionOperandConverter converter(this, instr);
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100855 if (type.representation() == MachineRepresentation::kFloat64) {
856 translation->StoreDoubleRegister(converter.ToDoubleRegister(op));
857 } else {
858 DCHECK_EQ(MachineRepresentation::kFloat32, type.representation());
859 translation->StoreFloatRegister(converter.ToFloatRegister(op));
860 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000861 } else if (op->IsImmediate()) {
862 InstructionOperandConverter converter(this, instr);
863 Constant constant = converter.ToConstant(op);
864 Handle<Object> constant_object;
865 switch (constant.type()) {
866 case Constant::kInt32:
Ben Murdochf3b273f2017-01-17 12:11:28 +0000867 if (type.representation() == MachineRepresentation::kTagged ||
868 type.representation() == MachineRepresentation::kTaggedSigned) {
Ben Murdochf91f0612016-11-29 16:50:11 +0000869 // When pointers are 4 bytes, we can use int32 constants to represent
870 // Smis.
871 DCHECK_EQ(4, kPointerSize);
872 constant_object =
873 handle(reinterpret_cast<Smi*>(constant.ToInt32()), isolate());
874 DCHECK(constant_object->IsSmi());
Ben Murdochc8c1d9e2017-03-08 14:04:23 +0000875 } else if (type.representation() == MachineRepresentation::kBit) {
876 if (constant.ToInt32() == 0) {
877 constant_object = isolate()->factory()->false_value();
878 } else {
879 DCHECK_EQ(1, constant.ToInt32());
880 constant_object = isolate()->factory()->true_value();
881 }
Ben Murdochf91f0612016-11-29 16:50:11 +0000882 } else {
Ben Murdochc8c1d9e2017-03-08 14:04:23 +0000883 // TODO(jarin,bmeurer): We currently pass in raw pointers to the
884 // JSFunction::entry here. We should really consider fixing this.
Ben Murdochf91f0612016-11-29 16:50:11 +0000885 DCHECK(type == MachineType::Int32() ||
886 type == MachineType::Uint32() ||
Ben Murdochc8c1d9e2017-03-08 14:04:23 +0000887 type.representation() == MachineRepresentation::kWord32 ||
Ben Murdochf91f0612016-11-29 16:50:11 +0000888 type.representation() == MachineRepresentation::kNone);
889 DCHECK(type.representation() != MachineRepresentation::kNone ||
890 constant.ToInt32() == FrameStateDescriptor::kImpossibleValue);
891
892 constant_object =
893 isolate()->factory()->NewNumberFromInt(constant.ToInt32());
894 }
895 break;
896 case Constant::kInt64:
897 // When pointers are 8 bytes, we can use int64 constants to represent
898 // Smis.
Ben Murdochc8c1d9e2017-03-08 14:04:23 +0000899 // TODO(jarin,bmeurer): We currently pass in raw pointers to the
900 // JSFunction::entry here. We should really consider fixing this.
901 DCHECK(type.representation() == MachineRepresentation::kWord64 ||
902 type.representation() == MachineRepresentation::kTagged ||
Ben Murdochf3b273f2017-01-17 12:11:28 +0000903 type.representation() == MachineRepresentation::kTaggedSigned);
Ben Murdochf91f0612016-11-29 16:50:11 +0000904 DCHECK_EQ(8, kPointerSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000905 constant_object =
Ben Murdochf91f0612016-11-29 16:50:11 +0000906 handle(reinterpret_cast<Smi*>(constant.ToInt64()), isolate());
907 DCHECK(constant_object->IsSmi());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000908 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000909 case Constant::kFloat32:
Ben Murdochf3b273f2017-01-17 12:11:28 +0000910 if (type.representation() == MachineRepresentation::kTaggedSigned) {
911 DCHECK(IsSmiDouble(constant.ToFloat32()));
912 } else {
913 DCHECK(type.representation() == MachineRepresentation::kFloat32 ||
914 CanBeTaggedPointer(type.representation()));
915 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000916 constant_object = isolate()->factory()->NewNumber(constant.ToFloat32());
917 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000918 case Constant::kFloat64:
Ben Murdochf3b273f2017-01-17 12:11:28 +0000919 if (type.representation() == MachineRepresentation::kTaggedSigned) {
920 DCHECK(IsSmiDouble(constant.ToFloat64()));
921 } else {
922 DCHECK(type.representation() == MachineRepresentation::kFloat64 ||
923 CanBeTaggedPointer(type.representation()));
924 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000925 constant_object = isolate()->factory()->NewNumber(constant.ToFloat64());
926 break;
927 case Constant::kHeapObject:
Ben Murdochf3b273f2017-01-17 12:11:28 +0000928 DCHECK(CanBeTaggedPointer(type.representation()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000929 constant_object = constant.ToHeapObject();
930 break;
931 default:
Emily Bernier958fae72015-03-24 16:35:39 -0400932 CHECK(false);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000933 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000934 if (constant_object.is_identical_to(info()->closure())) {
935 translation->StoreJSFrameFunction();
936 } else {
937 int literal_id = DefineDeoptimizationLiteral(constant_object);
938 translation->StoreLiteral(literal_id);
939 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000940 } else {
Emily Bernier958fae72015-03-24 16:35:39 -0400941 CHECK(false);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000942 }
943}
944
945
946void CodeGenerator::MarkLazyDeoptSite() {
947 last_lazy_deopt_pc_ = masm()->pc_offset();
948}
949
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100950DeoptimizationExit* CodeGenerator::AddDeoptimizationExit(
951 Instruction* instr, size_t frame_state_offset) {
952 int const deoptimization_id = BuildTranslation(
953 instr, -1, frame_state_offset, OutputFrameStateCombine::Ignore());
Ben Murdochf3b273f2017-01-17 12:11:28 +0000954 DeoptimizationExit* const exit = new (zone())
955 DeoptimizationExit(deoptimization_id, current_source_position_);
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100956 deoptimization_exits_.push_back(exit);
957 return exit;
958}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000959
Emily Bernier958fae72015-03-24 16:35:39 -0400960OutOfLineCode::OutOfLineCode(CodeGenerator* gen)
Ben Murdoch014dc512016-03-22 12:00:34 +0000961 : frame_(gen->frame()), masm_(gen->masm()), next_(gen->ools_) {
Emily Bernier958fae72015-03-24 16:35:39 -0400962 gen->ools_ = this;
963}
964
965
966OutOfLineCode::~OutOfLineCode() {}
967
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000968} // namespace compiler
969} // namespace internal
970} // namespace v8