blob: 03136a7c2c032327fec4c2e2a9d4024ede05054e [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/code-generator.h"
6
Ben Murdoch014dc512016-03-22 12:00:34 +00007#include "src/address-map.h"
Ben Murdochf91f0612016-11-29 16:50:11 +00008#include "src/base/adapters.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00009#include "src/compiler/code-generator-impl.h"
10#include "src/compiler/linkage.h"
11#include "src/compiler/pipeline.h"
Ben Murdoch014dc512016-03-22 12:00:34 +000012#include "src/frames-inl.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000013
14namespace v8 {
15namespace internal {
16namespace compiler {
17
Ben Murdoch014dc512016-03-22 12:00:34 +000018class CodeGenerator::JumpTable final : public ZoneObject {
19 public:
20 JumpTable(JumpTable* next, Label** targets, size_t target_count)
21 : next_(next), targets_(targets), target_count_(target_count) {}
22
23 Label* label() { return &label_; }
24 JumpTable* next() const { return next_; }
25 Label** targets() const { return targets_; }
26 size_t target_count() const { return target_count_; }
27
28 private:
29 Label label_;
30 JumpTable* const next_;
31 Label** const targets_;
32 size_t const target_count_;
33};
34
Emily Bernier958fae72015-03-24 16:35:39 -040035CodeGenerator::CodeGenerator(Frame* frame, Linkage* linkage,
36 InstructionSequence* code, CompilationInfo* info)
Ben Murdochbcf72ee2016-08-08 18:44:38 +010037 : frame_access_state_(nullptr),
Emily Bernier958fae72015-03-24 16:35:39 -040038 linkage_(linkage),
39 code_(code),
Ben Murdochf91f0612016-11-29 16:50:11 +000040 unwinding_info_writer_(zone()),
Emily Bernier958fae72015-03-24 16:35:39 -040041 info_(info),
42 labels_(zone()->NewArray<Label>(code->InstructionBlockCount())),
Ben Murdoch014dc512016-03-22 12:00:34 +000043 current_block_(RpoNumber::Invalid()),
44 current_source_position_(SourcePosition::Unknown()),
Ben Murdochf91f0612016-11-29 16:50:11 +000045 masm_(info->isolate(), nullptr, 0, CodeObjectRequired::kNo),
Ben Murdochb8a8cc12014-11-26 15:28:44 +000046 resolver_(this),
47 safepoints_(code->zone()),
Ben Murdoch014dc512016-03-22 12:00:34 +000048 handlers_(code->zone()),
Ben Murdoch3b9bc312016-06-02 14:46:10 +010049 deoptimization_exits_(code->zone()),
Ben Murdochb8a8cc12014-11-26 15:28:44 +000050 deoptimization_states_(code->zone()),
51 deoptimization_literals_(code->zone()),
Ben Murdoch014dc512016-03-22 12:00:34 +000052 inlined_function_count_(0),
Ben Murdochb8a8cc12014-11-26 15:28:44 +000053 translations_(code->zone()),
Emily Bernier958fae72015-03-24 16:35:39 -040054 last_lazy_deopt_pc_(0),
Ben Murdoch014dc512016-03-22 12:00:34 +000055 jump_tables_(nullptr),
56 ools_(nullptr),
Ben Murdochf91f0612016-11-29 16:50:11 +000057 osr_pc_offset_(-1),
58 source_position_table_builder_(code->zone(),
59 info->SourcePositionRecordingMode()) {
Emily Bernier958fae72015-03-24 16:35:39 -040060 for (int i = 0; i < code->InstructionBlockCount(); ++i) {
61 new (&labels_[i]) Label;
62 }
Ben Murdochbcf72ee2016-08-08 18:44:38 +010063 CreateFrameAccessState(frame);
64}
65
66void CodeGenerator::CreateFrameAccessState(Frame* frame) {
67 FinishFrame(frame);
68 frame_access_state_ = new (code()->zone()) FrameAccessState(frame);
Emily Bernier958fae72015-03-24 16:35:39 -040069}
Ben Murdochb8a8cc12014-11-26 15:28:44 +000070
Ben Murdochb8a8cc12014-11-26 15:28:44 +000071Handle<Code> CodeGenerator::GenerateCode() {
Emily Bernier958fae72015-03-24 16:35:39 -040072 CompilationInfo* info = this->info();
Ben Murdochb8a8cc12014-11-26 15:28:44 +000073
Ben Murdoch014dc512016-03-22 12:00:34 +000074 // Open a frame scope to indicate that there is a frame on the stack. The
75 // MANUAL indicates that the scope shouldn't actually generate code to set up
76 // the frame (that is done in AssemblePrologue).
77 FrameScope frame_scope(masm(), StackFrame::MANUAL);
78
Ben Murdochb8a8cc12014-11-26 15:28:44 +000079 // Place function entry hook if requested to do so.
80 if (linkage()->GetIncomingDescriptor()->IsJSFunctionCall()) {
81 ProfileEntryHookStub::MaybeCallEntryHook(masm());
82 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000083 // Architecture-specific, linkage-specific prologue.
84 info->set_prologue_offset(masm()->pc_offset());
Ben Murdochb8a8cc12014-11-26 15:28:44 +000085
Ben Murdoch014dc512016-03-22 12:00:34 +000086 // Define deoptimization literals for all inlined functions.
87 DCHECK_EQ(0u, deoptimization_literals_.size());
Ben Murdoch3b9bc312016-06-02 14:46:10 +010088 for (const CompilationInfo::InlinedFunctionHolder& inlined :
89 info->inlined_functions()) {
Ben Murdoch014dc512016-03-22 12:00:34 +000090 if (!inlined.shared_info.is_identical_to(info->shared_info())) {
91 DefineDeoptimizationLiteral(inlined.shared_info);
92 }
93 }
94 inlined_function_count_ = deoptimization_literals_.size();
95
96 // Define deoptimization literals for all unoptimized code objects of inlined
97 // functions. This ensures unoptimized code is kept alive by optimized code.
Ben Murdoch3b9bc312016-06-02 14:46:10 +010098 for (const CompilationInfo::InlinedFunctionHolder& inlined :
99 info->inlined_functions()) {
Ben Murdoch014dc512016-03-22 12:00:34 +0000100 if (!inlined.shared_info.is_identical_to(info->shared_info())) {
101 DefineDeoptimizationLiteral(inlined.inlined_code_object_root);
102 }
103 }
104
Ben Murdochf91f0612016-11-29 16:50:11 +0000105 unwinding_info_writer_.SetNumberOfInstructionBlocks(
106 code()->InstructionBlockCount());
107
Emily Bernier958fae72015-03-24 16:35:39 -0400108 // Assemble all non-deferred blocks, followed by deferred ones.
109 for (int deferred = 0; deferred < 2; ++deferred) {
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100110 for (const InstructionBlock* block : code()->instruction_blocks()) {
Emily Bernier958fae72015-03-24 16:35:39 -0400111 if (block->IsDeferred() == (deferred == 0)) {
112 continue;
113 }
114 // Align loop headers on 16-byte boundaries.
115 if (block->IsLoopHeader()) masm()->Align(16);
Ben Murdoch014dc512016-03-22 12:00:34 +0000116 // Ensure lazy deopt doesn't patch handler entry points.
117 if (block->IsHandler()) EnsureSpaceForLazyDeopt();
Emily Bernier958fae72015-03-24 16:35:39 -0400118 // Bind a label for a block.
119 current_block_ = block->rpo_number();
Ben Murdochf91f0612016-11-29 16:50:11 +0000120 unwinding_info_writer_.BeginInstructionBlock(masm()->pc_offset(), block);
Emily Bernier958fae72015-03-24 16:35:39 -0400121 if (FLAG_code_comments) {
122 // TODO(titzer): these code comments are a giant memory leak.
Ben Murdoch014dc512016-03-22 12:00:34 +0000123 Vector<char> buffer = Vector<char>::New(200);
124 char* buffer_start = buffer.start();
125
126 int next = SNPrintF(
127 buffer, "-- B%d start%s%s%s%s", block->rpo_number().ToInt(),
128 block->IsDeferred() ? " (deferred)" : "",
129 block->needs_frame() ? "" : " (no frame)",
130 block->must_construct_frame() ? " (construct frame)" : "",
131 block->must_deconstruct_frame() ? " (deconstruct frame)" : "");
132
133 buffer = buffer.SubVector(next, buffer.length());
134
135 if (block->IsLoopHeader()) {
136 next =
137 SNPrintF(buffer, " (loop up to %d)", block->loop_end().ToInt());
138 buffer = buffer.SubVector(next, buffer.length());
139 }
140 if (block->loop_header().IsValid()) {
141 next =
142 SNPrintF(buffer, " (in loop %d)", block->loop_header().ToInt());
143 buffer = buffer.SubVector(next, buffer.length());
144 }
145 SNPrintF(buffer, " --");
146 masm()->RecordComment(buffer_start);
Emily Bernier958fae72015-03-24 16:35:39 -0400147 }
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100148
149 frame_access_state()->MarkHasFrame(block->needs_frame());
150
Emily Bernier958fae72015-03-24 16:35:39 -0400151 masm()->bind(GetLabel(current_block_));
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100152 if (block->must_construct_frame()) {
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100153 AssembleConstructFrame();
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100154 // We need to setup the root register after we assemble the prologue, to
155 // avoid clobbering callee saved registers in case of C linkage and
156 // using the roots.
157 // TODO(mtrofin): investigate how we can avoid doing this repeatedly.
158 if (linkage()->GetIncomingDescriptor()->InitializeRootRegister()) {
159 masm()->InitializeRootRegister();
160 }
161 }
162
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100163 CodeGenResult result;
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100164 if (FLAG_enable_embedded_constant_pool && !block->needs_frame()) {
165 ConstantPoolUnavailableScope constant_pool_unavailable(masm());
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100166 result = AssembleBlock(block);
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100167 } else {
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100168 result = AssembleBlock(block);
Emily Bernier958fae72015-03-24 16:35:39 -0400169 }
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100170 if (result != kSuccess) return Handle<Code>();
Ben Murdochf91f0612016-11-29 16:50:11 +0000171 unwinding_info_writer_.EndInstructionBlock(block);
Emily Bernier958fae72015-03-24 16:35:39 -0400172 }
173 }
174
175 // Assemble all out-of-line code.
176 if (ools_) {
177 masm()->RecordComment("-- Out of line code --");
178 for (OutOfLineCode* ool = ools_; ool; ool = ool->next()) {
179 masm()->bind(ool->entry());
180 ool->Generate();
Ben Murdoch014dc512016-03-22 12:00:34 +0000181 if (ool->exit()->is_bound()) masm()->jmp(ool->exit());
Emily Bernier958fae72015-03-24 16:35:39 -0400182 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000183 }
184
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100185 // Assemble all eager deoptimization exits.
186 for (DeoptimizationExit* exit : deoptimization_exits_) {
187 masm()->bind(exit->label());
188 AssembleDeoptimizerCall(exit->deoptimization_id(), Deoptimizer::EAGER);
189 }
190
Emily Bernier958fae72015-03-24 16:35:39 -0400191 // Ensure there is space for lazy deoptimization in the code.
Ben Murdoch014dc512016-03-22 12:00:34 +0000192 if (info->ShouldEnsureSpaceForLazyDeopt()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000193 int target_offset = masm()->pc_offset() + Deoptimizer::patch_size();
194 while (masm()->pc_offset() < target_offset) {
195 masm()->nop();
196 }
197 }
198
Ben Murdoch014dc512016-03-22 12:00:34 +0000199 FinishCode(masm());
200
201 // Emit the jump tables.
202 if (jump_tables_) {
203 masm()->Align(kPointerSize);
204 for (JumpTable* table = jump_tables_; table; table = table->next()) {
205 masm()->bind(table->label());
206 AssembleJumpTable(table->targets(), table->target_count());
207 }
208 }
209
Ben Murdoch109988c2016-05-18 11:27:45 +0100210 safepoints()->Emit(masm(), frame()->GetTotalFrameSlotCount());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000211
Ben Murdochf91f0612016-11-29 16:50:11 +0000212 unwinding_info_writer_.Finish(masm()->pc_offset());
213
214 Handle<Code> result = v8::internal::CodeGenerator::MakeCodeEpilogue(
215 masm(), unwinding_info_writer_.eh_frame_writer(), info, Handle<Object>());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000216 result->set_is_turbofanned(true);
Ben Murdoch109988c2016-05-18 11:27:45 +0100217 result->set_stack_slots(frame()->GetTotalFrameSlotCount());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000218 result->set_safepoint_table_offset(safepoints()->GetCodeOffset());
Ben Murdochf91f0612016-11-29 16:50:11 +0000219 Handle<ByteArray> source_positions =
220 source_position_table_builder_.ToSourcePositionTable(
221 isolate(), Handle<AbstractCode>::cast(result));
222 result->set_source_position_table(*source_positions);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000223
Ben Murdoch014dc512016-03-22 12:00:34 +0000224 // Emit exception handler table.
225 if (!handlers_.empty()) {
226 Handle<HandlerTable> table =
227 Handle<HandlerTable>::cast(isolate()->factory()->NewFixedArray(
228 HandlerTable::LengthForReturn(static_cast<int>(handlers_.size())),
229 TENURED));
230 for (size_t i = 0; i < handlers_.size(); ++i) {
Ben Murdoch014dc512016-03-22 12:00:34 +0000231 table->SetReturnOffset(static_cast<int>(i), handlers_[i].pc_offset);
Ben Murdochf91f0612016-11-29 16:50:11 +0000232 table->SetReturnHandler(static_cast<int>(i), handlers_[i].handler->pos());
Ben Murdoch014dc512016-03-22 12:00:34 +0000233 }
234 result->set_handler_table(*table);
235 }
236
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000237 PopulateDeoptimizationData(result);
238
Emily Bernier958fae72015-03-24 16:35:39 -0400239 // Ensure there is space for lazy deoptimization in the relocation info.
Ben Murdoch014dc512016-03-22 12:00:34 +0000240 if (info->ShouldEnsureSpaceForLazyDeopt()) {
Emily Bernier958fae72015-03-24 16:35:39 -0400241 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(result);
242 }
243
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000244 return result;
245}
246
247
Ben Murdoch014dc512016-03-22 12:00:34 +0000248bool CodeGenerator::IsNextInAssemblyOrder(RpoNumber block) const {
249 return code()
250 ->InstructionBlockAt(current_block_)
251 ->ao_number()
252 .IsNext(code()->InstructionBlockAt(block)->ao_number());
Emily Bernier958fae72015-03-24 16:35:39 -0400253}
254
255
Ben Murdoch014dc512016-03-22 12:00:34 +0000256void CodeGenerator::RecordSafepoint(ReferenceMap* references,
257 Safepoint::Kind kind, int arguments,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000258 Safepoint::DeoptMode deopt_mode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000259 Safepoint safepoint =
260 safepoints()->DefineSafepoint(masm(), kind, arguments, deopt_mode);
Ben Murdoch014dc512016-03-22 12:00:34 +0000261 int stackSlotToSpillSlotDelta =
262 frame()->GetTotalFrameSlotCount() - frame()->GetSpillSlotCount();
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100263 for (const InstructionOperand& operand : references->reference_operands()) {
Ben Murdoch014dc512016-03-22 12:00:34 +0000264 if (operand.IsStackSlot()) {
265 int index = LocationOperand::cast(operand).index();
266 DCHECK(index >= 0);
Ben Murdoch109988c2016-05-18 11:27:45 +0100267 // We might index values in the fixed part of the frame (i.e. the
268 // closure pointer or the context pointer); these are not spill slots
269 // and therefore don't work with the SafepointTable currently, but
270 // we also don't need to worry about them, since the GC has special
271 // knowledge about those fields anyway.
272 if (index < stackSlotToSpillSlotDelta) continue;
Ben Murdoch014dc512016-03-22 12:00:34 +0000273 safepoint.DefinePointerSlot(index, zone());
274 } else if (operand.IsRegister() && (kind & Safepoint::kWithRegisters)) {
275 Register reg = LocationOperand::cast(operand).GetRegister();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000276 safepoint.DefinePointerRegister(reg, zone());
277 }
278 }
279}
280
Ben Murdoch014dc512016-03-22 12:00:34 +0000281bool CodeGenerator::IsMaterializableFromRoot(
282 Handle<HeapObject> object, Heap::RootListIndex* index_return) {
283 const CallDescriptor* incoming_descriptor =
284 linkage()->GetIncomingDescriptor();
285 if (incoming_descriptor->flags() & CallDescriptor::kCanUseRoots) {
286 RootIndexMap map(isolate());
287 int root_index = map.Lookup(*object);
288 if (root_index != RootIndexMap::kInvalidRootIndex) {
289 *index_return = static_cast<Heap::RootListIndex>(root_index);
290 return true;
291 }
292 }
293 return false;
294}
295
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100296CodeGenerator::CodeGenResult CodeGenerator::AssembleBlock(
297 const InstructionBlock* block) {
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100298 for (int i = block->code_start(); i < block->code_end(); ++i) {
299 Instruction* instr = code()->InstructionAt(i);
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100300 CodeGenResult result = AssembleInstruction(instr, block);
301 if (result != kSuccess) return result;
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100302 }
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100303 return kSuccess;
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100304}
Ben Murdoch014dc512016-03-22 12:00:34 +0000305
Ben Murdochf91f0612016-11-29 16:50:11 +0000306bool CodeGenerator::IsValidPush(InstructionOperand source,
307 CodeGenerator::PushTypeFlags push_type) {
308 if (source.IsImmediate() &&
309 ((push_type & CodeGenerator::kImmediatePush) != 0)) {
310 return true;
311 }
312 if ((source.IsRegister() || source.IsStackSlot()) &&
313 ((push_type & CodeGenerator::kScalarPush) != 0)) {
314 return true;
315 }
316 if ((source.IsFloatRegister() || source.IsFloatStackSlot()) &&
317 ((push_type & CodeGenerator::kFloat32Push) != 0)) {
318 return true;
319 }
320 if ((source.IsDoubleRegister() || source.IsFloatStackSlot()) &&
321 ((push_type & CodeGenerator::kFloat64Push) != 0)) {
322 return true;
323 }
324 return false;
325}
326
327void CodeGenerator::GetPushCompatibleMoves(Instruction* instr,
328 PushTypeFlags push_type,
329 ZoneVector<MoveOperands*>* pushes) {
330 pushes->clear();
331 for (int i = Instruction::FIRST_GAP_POSITION;
332 i <= Instruction::LAST_GAP_POSITION; ++i) {
333 Instruction::GapPosition inner_pos =
334 static_cast<Instruction::GapPosition>(i);
335 ParallelMove* parallel_move = instr->GetParallelMove(inner_pos);
336 if (parallel_move != nullptr) {
337 for (auto move : *parallel_move) {
338 InstructionOperand source = move->source();
339 InstructionOperand destination = move->destination();
340 int first_push_compatible_index =
341 V8_TARGET_ARCH_STORES_RETURN_ADDRESS_ON_STACK ? 1 : 0;
342 // If there are any moves from slots that will be overridden by pushes,
343 // then the full gap resolver must be used since optimization with
344 // pushes don't participate in the parallel move and might clobber
345 // values needed for the gap resolve.
346 if (source.IsStackSlot() &&
347 LocationOperand::cast(source).index() >=
348 first_push_compatible_index) {
349 pushes->clear();
350 return;
351 }
352 // TODO(danno): Right now, only consider moves from the FIRST gap for
353 // pushes. Theoretically, we could extract pushes for both gaps (there
354 // are cases where this happens), but the logic for that would also have
355 // to check to make sure that non-memory inputs to the pushes from the
356 // LAST gap don't get clobbered in the FIRST gap.
357 if (i == Instruction::FIRST_GAP_POSITION) {
358 if (destination.IsStackSlot() &&
359 LocationOperand::cast(destination).index() >=
360 first_push_compatible_index) {
361 int index = LocationOperand::cast(destination).index();
362 if (IsValidPush(source, push_type)) {
363 if (index >= static_cast<int>(pushes->size())) {
364 pushes->resize(index + 1);
365 }
366 (*pushes)[index] = move;
367 }
368 }
369 }
370 }
371 }
372 }
373
374 // For now, only support a set of continuous pushes at the end of the list.
375 size_t push_count_upper_bound = pushes->size();
376 size_t push_begin = push_count_upper_bound;
377 for (auto move : base::Reversed(*pushes)) {
378 if (move == nullptr) break;
379 push_begin--;
380 }
381 size_t push_count = pushes->size() - push_begin;
382 std::copy(pushes->begin() + push_begin,
383 pushes->begin() + push_begin + push_count, pushes->begin());
384 pushes->resize(push_count);
385}
386
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100387CodeGenerator::CodeGenResult CodeGenerator::AssembleInstruction(
388 Instruction* instr, const InstructionBlock* block) {
Ben Murdochf91f0612016-11-29 16:50:11 +0000389 int first_unused_stack_slot;
390 bool adjust_stack =
391 GetSlotAboveSPBeforeTailCall(instr, &first_unused_stack_slot);
392 if (adjust_stack) AssembleTailCallBeforeGap(instr, first_unused_stack_slot);
Ben Murdoch014dc512016-03-22 12:00:34 +0000393 AssembleGaps(instr);
Ben Murdochf91f0612016-11-29 16:50:11 +0000394 if (adjust_stack) AssembleTailCallAfterGap(instr, first_unused_stack_slot);
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100395 DCHECK_IMPLIES(
396 block->must_deconstruct_frame(),
397 instr != code()->InstructionAt(block->last_instruction_index()) ||
398 instr->IsRet() || instr->IsJump());
399 if (instr->IsJump() && block->must_deconstruct_frame()) {
400 AssembleDeconstructFrame();
401 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000402 AssembleSourcePosition(instr);
403 // Assemble architecture-specific code for the instruction.
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100404 CodeGenResult result = AssembleArchInstruction(instr);
405 if (result != kSuccess) return result;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000406
Ben Murdoch014dc512016-03-22 12:00:34 +0000407 FlagsMode mode = FlagsModeField::decode(instr->opcode());
408 FlagsCondition condition = FlagsConditionField::decode(instr->opcode());
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100409 switch (mode) {
410 case kFlags_branch: {
411 // Assemble a branch after this instruction.
412 InstructionOperandConverter i(this, instr);
413 RpoNumber true_rpo = i.InputRpo(instr->InputCount() - 2);
414 RpoNumber false_rpo = i.InputRpo(instr->InputCount() - 1);
Emily Bernier958fae72015-03-24 16:35:39 -0400415
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100416 if (true_rpo == false_rpo) {
417 // redundant branch.
418 if (!IsNextInAssemblyOrder(true_rpo)) {
419 AssembleArchJump(true_rpo);
420 }
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100421 return kSuccess;
Emily Bernier958fae72015-03-24 16:35:39 -0400422 }
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100423 if (IsNextInAssemblyOrder(true_rpo)) {
424 // true block is next, can fall through if condition negated.
425 std::swap(true_rpo, false_rpo);
426 condition = NegateFlagsCondition(condition);
427 }
428 BranchInfo branch;
429 branch.condition = condition;
430 branch.true_label = GetLabel(true_rpo);
431 branch.false_label = GetLabel(false_rpo);
432 branch.fallthru = IsNextInAssemblyOrder(false_rpo);
433 // Assemble architecture-specific branch.
434 AssembleArchBranch(instr, &branch);
435 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000436 }
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100437 case kFlags_deoptimize: {
438 // Assemble a conditional eager deoptimization after this instruction.
439 InstructionOperandConverter i(this, instr);
440 size_t frame_state_offset = MiscField::decode(instr->opcode());
441 DeoptimizationExit* const exit =
442 AddDeoptimizationExit(instr, frame_state_offset);
443 Label continue_label;
444 BranchInfo branch;
445 branch.condition = condition;
446 branch.true_label = exit->label();
447 branch.false_label = &continue_label;
448 branch.fallthru = true;
449 // Assemble architecture-specific branch.
450 AssembleArchBranch(instr, &branch);
451 masm()->bind(&continue_label);
452 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000453 }
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100454 case kFlags_set: {
455 // Assemble a boolean materialization after this instruction.
456 AssembleArchBoolean(instr, condition);
457 break;
458 }
459 case kFlags_none: {
460 break;
461 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000462 }
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100463 return kSuccess;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000464}
465
466
Ben Murdoch014dc512016-03-22 12:00:34 +0000467void CodeGenerator::AssembleSourcePosition(Instruction* instr) {
468 SourcePosition source_position;
469 if (!code()->GetSourcePosition(instr, &source_position)) return;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000470 if (source_position == current_source_position_) return;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000471 current_source_position_ = source_position;
Ben Murdoch014dc512016-03-22 12:00:34 +0000472 if (source_position.IsUnknown()) return;
473 int code_pos = source_position.raw();
Ben Murdochf91f0612016-11-29 16:50:11 +0000474 source_position_table_builder_.AddPosition(masm()->pc_offset(), code_pos,
475 false);
Ben Murdoch014dc512016-03-22 12:00:34 +0000476 if (FLAG_code_comments) {
Ben Murdoch014dc512016-03-22 12:00:34 +0000477 CompilationInfo* info = this->info();
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100478 if (!info->parse_info()) return;
479 Vector<char> buffer = Vector<char>::New(256);
Ben Murdoch014dc512016-03-22 12:00:34 +0000480 int ln = Script::GetLineNumber(info->script(), code_pos);
481 int cn = Script::GetColumnNumber(info->script(), code_pos);
482 if (info->script()->name()->IsString()) {
483 Handle<String> file(String::cast(info->script()->name()));
484 base::OS::SNPrintF(buffer.start(), buffer.length(), "-- %s:%d:%d --",
485 file->ToCString().get(), ln, cn);
486 } else {
487 base::OS::SNPrintF(buffer.start(), buffer.length(),
488 "-- <unknown>:%d:%d --", ln, cn);
489 }
490 masm()->RecordComment(buffer.start());
491 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000492}
493
Ben Murdochf91f0612016-11-29 16:50:11 +0000494bool CodeGenerator::GetSlotAboveSPBeforeTailCall(Instruction* instr,
495 int* slot) {
496 if (instr->IsTailCall()) {
497 InstructionOperandConverter g(this, instr);
498 *slot = g.InputInt32(instr->InputCount() - 1);
499 return true;
500 } else {
501 return false;
502 }
503}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000504
Ben Murdoch014dc512016-03-22 12:00:34 +0000505void CodeGenerator::AssembleGaps(Instruction* instr) {
506 for (int i = Instruction::FIRST_GAP_POSITION;
507 i <= Instruction::LAST_GAP_POSITION; i++) {
508 Instruction::GapPosition inner_pos =
509 static_cast<Instruction::GapPosition>(i);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000510 ParallelMove* move = instr->GetParallelMove(inner_pos);
Ben Murdoch014dc512016-03-22 12:00:34 +0000511 if (move != nullptr) resolver()->Resolve(move);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000512 }
513}
514
515
516void CodeGenerator::PopulateDeoptimizationData(Handle<Code> code_object) {
Emily Bernier958fae72015-03-24 16:35:39 -0400517 CompilationInfo* info = this->info();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000518 int deopt_count = static_cast<int>(deoptimization_states_.size());
Ben Murdoch014dc512016-03-22 12:00:34 +0000519 if (deopt_count == 0 && !info->is_osr()) return;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000520 Handle<DeoptimizationInputData> data =
521 DeoptimizationInputData::New(isolate(), deopt_count, TENURED);
522
523 Handle<ByteArray> translation_array =
524 translations_.CreateByteArray(isolate()->factory());
525
526 data->SetTranslationByteArray(*translation_array);
Ben Murdoch014dc512016-03-22 12:00:34 +0000527 data->SetInlinedFunctionCount(
528 Smi::FromInt(static_cast<int>(inlined_function_count_)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000529 data->SetOptimizationId(Smi::FromInt(info->optimization_id()));
Ben Murdoch014dc512016-03-22 12:00:34 +0000530
531 if (info->has_shared_info()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000532 data->SetSharedFunctionInfo(*info->shared_info());
533 } else {
534 data->SetSharedFunctionInfo(Smi::FromInt(0));
535 }
536
537 Handle<FixedArray> literals = isolate()->factory()->NewFixedArray(
538 static_cast<int>(deoptimization_literals_.size()), TENURED);
539 {
540 AllowDeferredHandleDereference copy_handles;
541 for (unsigned i = 0; i < deoptimization_literals_.size(); i++) {
542 literals->set(i, *deoptimization_literals_[i]);
543 }
544 data->SetLiteralArray(*literals);
545 }
546
Ben Murdoch014dc512016-03-22 12:00:34 +0000547 if (info->is_osr()) {
548 DCHECK(osr_pc_offset_ >= 0);
549 data->SetOsrAstId(Smi::FromInt(info_->osr_ast_id().ToInt()));
550 data->SetOsrPcOffset(Smi::FromInt(osr_pc_offset_));
551 } else {
552 BailoutId osr_ast_id = BailoutId::None();
553 data->SetOsrAstId(Smi::FromInt(osr_ast_id.ToInt()));
554 data->SetOsrPcOffset(Smi::FromInt(-1));
555 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000556
557 // Populate deoptimization entries.
558 for (int i = 0; i < deopt_count; i++) {
559 DeoptimizationState* deoptimization_state = deoptimization_states_[i];
560 data->SetAstId(i, deoptimization_state->bailout_id());
Ben Murdoch014dc512016-03-22 12:00:34 +0000561 CHECK(deoptimization_states_[i]);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000562 data->SetTranslationIndex(
563 i, Smi::FromInt(deoptimization_states_[i]->translation_id()));
564 data->SetArgumentsStackHeight(i, Smi::FromInt(0));
565 data->SetPc(i, Smi::FromInt(deoptimization_state->pc_offset()));
566 }
567
568 code_object->set_deoptimization_data(*data);
569}
570
571
Ben Murdoch014dc512016-03-22 12:00:34 +0000572Label* CodeGenerator::AddJumpTable(Label** targets, size_t target_count) {
573 jump_tables_ = new (zone()) JumpTable(jump_tables_, targets, target_count);
574 return jump_tables_->label();
575}
576
577
578void CodeGenerator::RecordCallPosition(Instruction* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000579 CallDescriptor::Flags flags(MiscField::decode(instr->opcode()));
580
581 bool needs_frame_state = (flags & CallDescriptor::kNeedsFrameState);
582
583 RecordSafepoint(
Ben Murdoch014dc512016-03-22 12:00:34 +0000584 instr->reference_map(), Safepoint::kSimple, 0,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000585 needs_frame_state ? Safepoint::kLazyDeopt : Safepoint::kNoLazyDeopt);
586
Ben Murdoch014dc512016-03-22 12:00:34 +0000587 if (flags & CallDescriptor::kHasExceptionHandler) {
588 InstructionOperandConverter i(this, instr);
Ben Murdoch014dc512016-03-22 12:00:34 +0000589 RpoNumber handler_rpo = i.InputRpo(instr->InputCount() - 1);
Ben Murdochf91f0612016-11-29 16:50:11 +0000590 handlers_.push_back({GetLabel(handler_rpo), masm()->pc_offset()});
Ben Murdoch014dc512016-03-22 12:00:34 +0000591 }
592
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000593 if (needs_frame_state) {
594 MarkLazyDeoptSite();
Ben Murdoch014dc512016-03-22 12:00:34 +0000595 // If the frame state is present, it starts at argument 1 (just after the
596 // code address).
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000597 size_t frame_state_offset = 1;
598 FrameStateDescriptor* descriptor =
Ben Murdochf91f0612016-11-29 16:50:11 +0000599 GetDeoptimizationEntry(instr, frame_state_offset).descriptor();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000600 int pc_offset = masm()->pc_offset();
601 int deopt_state_id = BuildTranslation(instr, pc_offset, frame_state_offset,
602 descriptor->state_combine());
603 // If the pre-call frame state differs from the post-call one, produce the
604 // pre-call frame state, too.
605 // TODO(jarin) We might want to avoid building the pre-call frame state
606 // because it is only used to get locals and arguments (by the debugger and
607 // f.arguments), and those are the same in the pre-call and post-call
608 // states.
Emily Bernier958fae72015-03-24 16:35:39 -0400609 if (!descriptor->state_combine().IsOutputIgnored()) {
610 deopt_state_id = BuildTranslation(instr, -1, frame_state_offset,
611 OutputFrameStateCombine::Ignore());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000612 }
613#if DEBUG
614 // Make sure all the values live in stack slots or they are immediates.
615 // (The values should not live in register because registers are clobbered
616 // by calls.)
Emily Bernier958fae72015-03-24 16:35:39 -0400617 for (size_t i = 0; i < descriptor->GetSize(); i++) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000618 InstructionOperand* op = instr->InputAt(frame_state_offset + 1 + i);
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100619 CHECK(op->IsStackSlot() || op->IsFPStackSlot() || op->IsImmediate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000620 }
621#endif
622 safepoints()->RecordLazyDeoptimizationIndex(deopt_state_id);
623 }
624}
625
626
627int CodeGenerator::DefineDeoptimizationLiteral(Handle<Object> literal) {
628 int result = static_cast<int>(deoptimization_literals_.size());
629 for (unsigned i = 0; i < deoptimization_literals_.size(); ++i) {
630 if (deoptimization_literals_[i].is_identical_to(literal)) return i;
631 }
632 deoptimization_literals_.push_back(literal);
633 return result;
634}
635
Ben Murdochf91f0612016-11-29 16:50:11 +0000636DeoptimizationEntry const& CodeGenerator::GetDeoptimizationEntry(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000637 Instruction* instr, size_t frame_state_offset) {
638 InstructionOperandConverter i(this, instr);
Ben Murdochf91f0612016-11-29 16:50:11 +0000639 int const state_id = i.InputInt32(frame_state_offset);
640 return code()->GetDeoptimizationEntry(state_id);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000641}
642
Ben Murdochf91f0612016-11-29 16:50:11 +0000643DeoptimizeReason CodeGenerator::GetDeoptimizationReason(
644 int deoptimization_id) const {
645 size_t const index = static_cast<size_t>(deoptimization_id);
646 DCHECK_LT(index, deoptimization_states_.size());
647 return deoptimization_states_[index]->reason();
648}
Emily Bernier958fae72015-03-24 16:35:39 -0400649
Ben Murdoch014dc512016-03-22 12:00:34 +0000650void CodeGenerator::TranslateStateValueDescriptor(
651 StateValueDescriptor* desc, Translation* translation,
652 InstructionOperandIterator* iter) {
653 if (desc->IsNested()) {
654 translation->BeginCapturedObject(static_cast<int>(desc->size()));
655 for (size_t index = 0; index < desc->fields().size(); index++) {
656 TranslateStateValueDescriptor(&desc->fields()[index], translation, iter);
Emily Bernier958fae72015-03-24 16:35:39 -0400657 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000658 } else if (desc->IsDuplicate()) {
659 translation->DuplicateObject(static_cast<int>(desc->id()));
660 } else {
661 DCHECK(desc->IsPlain());
662 AddTranslationForOperand(translation, iter->instruction(), iter->Advance(),
663 desc->type());
Emily Bernier958fae72015-03-24 16:35:39 -0400664 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000665}
666
667
668void CodeGenerator::TranslateFrameStateDescriptorOperands(
669 FrameStateDescriptor* desc, InstructionOperandIterator* iter,
670 OutputFrameStateCombine combine, Translation* translation) {
671 for (size_t index = 0; index < desc->GetSize(combine); index++) {
672 switch (combine.kind()) {
673 case OutputFrameStateCombine::kPushOutput: {
674 DCHECK(combine.GetPushCount() <= iter->instruction()->OutputCount());
675 size_t size_without_output =
676 desc->GetSize(OutputFrameStateCombine::Ignore());
677 // If the index is past the existing stack items in values_.
678 if (index >= size_without_output) {
679 // Materialize the result of the call instruction in this slot.
680 AddTranslationForOperand(
681 translation, iter->instruction(),
682 iter->instruction()->OutputAt(index - size_without_output),
683 MachineType::AnyTagged());
684 continue;
685 }
686 break;
687 }
688 case OutputFrameStateCombine::kPokeAt:
689 // The result of the call should be placed at position
690 // [index_from_top] in the stack (overwriting whatever was
691 // previously there).
692 size_t index_from_top =
693 desc->GetSize(combine) - 1 - combine.GetOffsetToPokeAt();
694 if (index >= index_from_top &&
695 index < index_from_top + iter->instruction()->OutputCount()) {
696 AddTranslationForOperand(
697 translation, iter->instruction(),
698 iter->instruction()->OutputAt(index - index_from_top),
699 MachineType::AnyTagged());
700 iter->Advance(); // We do not use this input, but we need to
701 // advace, as the input got replaced.
702 continue;
703 }
704 break;
705 }
706 StateValueDescriptor* value_desc = desc->GetStateValueDescriptor();
707 TranslateStateValueDescriptor(&value_desc->fields()[index], translation,
708 iter);
709 }
Emily Bernier958fae72015-03-24 16:35:39 -0400710}
711
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000712
713void CodeGenerator::BuildTranslationForFrameStateDescriptor(
Ben Murdoch014dc512016-03-22 12:00:34 +0000714 FrameStateDescriptor* descriptor, InstructionOperandIterator* iter,
715 Translation* translation, OutputFrameStateCombine state_combine) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000716 // Outer-most state must be added to translation first.
Ben Murdoch014dc512016-03-22 12:00:34 +0000717 if (descriptor->outer_state() != nullptr) {
718 BuildTranslationForFrameStateDescriptor(descriptor->outer_state(), iter,
719 translation,
Emily Bernier958fae72015-03-24 16:35:39 -0400720 OutputFrameStateCombine::Ignore());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000721 }
722
Ben Murdoch014dc512016-03-22 12:00:34 +0000723 Handle<SharedFunctionInfo> shared_info;
724 if (!descriptor->shared_info().ToHandle(&shared_info)) {
725 if (!info()->has_shared_info()) {
726 return; // Stub with no SharedFunctionInfo.
727 }
728 shared_info = info()->shared_info();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000729 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000730 int shared_info_id = DefineDeoptimizationLiteral(shared_info);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000731
732 switch (descriptor->type()) {
Ben Murdoch014dc512016-03-22 12:00:34 +0000733 case FrameStateType::kJavaScriptFunction:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000734 translation->BeginJSFrame(
Ben Murdoch014dc512016-03-22 12:00:34 +0000735 descriptor->bailout_id(), shared_info_id,
Emily Bernier958fae72015-03-24 16:35:39 -0400736 static_cast<unsigned int>(descriptor->GetSize(state_combine) -
Ben Murdoch014dc512016-03-22 12:00:34 +0000737 (1 + descriptor->parameters_count())));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000738 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000739 case FrameStateType::kInterpretedFunction:
740 translation->BeginInterpretedFrame(
741 descriptor->bailout_id(), shared_info_id,
Ben Murdoch109988c2016-05-18 11:27:45 +0100742 static_cast<unsigned int>(descriptor->locals_count() + 1));
Ben Murdoch014dc512016-03-22 12:00:34 +0000743 break;
744 case FrameStateType::kArgumentsAdaptor:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000745 translation->BeginArgumentsAdaptorFrame(
Ben Murdoch014dc512016-03-22 12:00:34 +0000746 shared_info_id,
747 static_cast<unsigned int>(descriptor->parameters_count()));
748 break;
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100749 case FrameStateType::kTailCallerFunction:
750 translation->BeginTailCallerFrame(shared_info_id);
751 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000752 case FrameStateType::kConstructStub:
753 translation->BeginConstructStubFrame(
754 shared_info_id,
755 static_cast<unsigned int>(descriptor->parameters_count()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000756 break;
Ben Murdochf91f0612016-11-29 16:50:11 +0000757 case FrameStateType::kGetterStub:
758 translation->BeginGetterStubFrame(shared_info_id);
759 break;
760 case FrameStateType::kSetterStub:
761 translation->BeginSetterStubFrame(shared_info_id);
762 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000763 }
764
Ben Murdoch014dc512016-03-22 12:00:34 +0000765 TranslateFrameStateDescriptorOperands(descriptor, iter, state_combine,
766 translation);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000767}
768
769
770int CodeGenerator::BuildTranslation(Instruction* instr, int pc_offset,
771 size_t frame_state_offset,
772 OutputFrameStateCombine state_combine) {
Ben Murdochf91f0612016-11-29 16:50:11 +0000773 DeoptimizationEntry const& entry =
774 GetDeoptimizationEntry(instr, frame_state_offset);
775 FrameStateDescriptor* const descriptor = entry.descriptor();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000776 frame_state_offset++;
777
778 Translation translation(
779 &translations_, static_cast<int>(descriptor->GetFrameCount()),
780 static_cast<int>(descriptor->GetJSFrameCount()), zone());
Ben Murdoch014dc512016-03-22 12:00:34 +0000781 InstructionOperandIterator iter(instr, frame_state_offset);
782 BuildTranslationForFrameStateDescriptor(descriptor, &iter, &translation,
783 state_combine);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000784
785 int deoptimization_id = static_cast<int>(deoptimization_states_.size());
786
787 deoptimization_states_.push_back(new (zone()) DeoptimizationState(
Ben Murdochf91f0612016-11-29 16:50:11 +0000788 descriptor->bailout_id(), translation.index(), pc_offset,
789 entry.reason()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000790
791 return deoptimization_id;
792}
793
794
795void CodeGenerator::AddTranslationForOperand(Translation* translation,
796 Instruction* instr,
Emily Bernier958fae72015-03-24 16:35:39 -0400797 InstructionOperand* op,
798 MachineType type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000799 if (op->IsStackSlot()) {
Ben Murdoch014dc512016-03-22 12:00:34 +0000800 if (type.representation() == MachineRepresentation::kBit) {
801 translation->StoreBoolStackSlot(LocationOperand::cast(op)->index());
802 } else if (type == MachineType::Int8() || type == MachineType::Int16() ||
803 type == MachineType::Int32()) {
804 translation->StoreInt32StackSlot(LocationOperand::cast(op)->index());
805 } else if (type == MachineType::Uint8() || type == MachineType::Uint16() ||
806 type == MachineType::Uint32()) {
807 translation->StoreUint32StackSlot(LocationOperand::cast(op)->index());
808 } else if (type.representation() == MachineRepresentation::kTagged) {
809 translation->StoreStackSlot(LocationOperand::cast(op)->index());
Emily Bernier958fae72015-03-24 16:35:39 -0400810 } else {
811 CHECK(false);
812 }
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100813 } else if (op->IsFPStackSlot()) {
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100814 if (type.representation() == MachineRepresentation::kFloat64) {
815 translation->StoreDoubleStackSlot(LocationOperand::cast(op)->index());
816 } else {
817 DCHECK_EQ(MachineRepresentation::kFloat32, type.representation());
818 translation->StoreFloatStackSlot(LocationOperand::cast(op)->index());
819 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000820 } else if (op->IsRegister()) {
821 InstructionOperandConverter converter(this, instr);
Ben Murdoch014dc512016-03-22 12:00:34 +0000822 if (type.representation() == MachineRepresentation::kBit) {
823 translation->StoreBoolRegister(converter.ToRegister(op));
824 } else if (type == MachineType::Int8() || type == MachineType::Int16() ||
825 type == MachineType::Int32()) {
Emily Bernier958fae72015-03-24 16:35:39 -0400826 translation->StoreInt32Register(converter.ToRegister(op));
Ben Murdoch014dc512016-03-22 12:00:34 +0000827 } else if (type == MachineType::Uint8() || type == MachineType::Uint16() ||
828 type == MachineType::Uint32()) {
Emily Bernier958fae72015-03-24 16:35:39 -0400829 translation->StoreUint32Register(converter.ToRegister(op));
Ben Murdoch014dc512016-03-22 12:00:34 +0000830 } else if (type.representation() == MachineRepresentation::kTagged) {
Emily Bernier958fae72015-03-24 16:35:39 -0400831 translation->StoreRegister(converter.ToRegister(op));
832 } else {
833 CHECK(false);
834 }
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100835 } else if (op->IsFPRegister()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000836 InstructionOperandConverter converter(this, instr);
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100837 if (type.representation() == MachineRepresentation::kFloat64) {
838 translation->StoreDoubleRegister(converter.ToDoubleRegister(op));
839 } else {
840 DCHECK_EQ(MachineRepresentation::kFloat32, type.representation());
841 translation->StoreFloatRegister(converter.ToFloatRegister(op));
842 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000843 } else if (op->IsImmediate()) {
844 InstructionOperandConverter converter(this, instr);
845 Constant constant = converter.ToConstant(op);
846 Handle<Object> constant_object;
847 switch (constant.type()) {
848 case Constant::kInt32:
Ben Murdochf91f0612016-11-29 16:50:11 +0000849 if (type.representation() == MachineRepresentation::kTagged) {
850 // When pointers are 4 bytes, we can use int32 constants to represent
851 // Smis.
852 DCHECK_EQ(4, kPointerSize);
853 constant_object =
854 handle(reinterpret_cast<Smi*>(constant.ToInt32()), isolate());
855 DCHECK(constant_object->IsSmi());
856 } else {
857 DCHECK(type == MachineType::Int32() ||
858 type == MachineType::Uint32() ||
859 type.representation() == MachineRepresentation::kBit ||
860 type.representation() == MachineRepresentation::kNone);
861 DCHECK(type.representation() != MachineRepresentation::kNone ||
862 constant.ToInt32() == FrameStateDescriptor::kImpossibleValue);
863
864 constant_object =
865 isolate()->factory()->NewNumberFromInt(constant.ToInt32());
866 }
867 break;
868 case Constant::kInt64:
869 // When pointers are 8 bytes, we can use int64 constants to represent
870 // Smis.
871 DCHECK_EQ(type.representation(), MachineRepresentation::kTagged);
872 DCHECK_EQ(8, kPointerSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000873 constant_object =
Ben Murdochf91f0612016-11-29 16:50:11 +0000874 handle(reinterpret_cast<Smi*>(constant.ToInt64()), isolate());
875 DCHECK(constant_object->IsSmi());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000876 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000877 case Constant::kFloat32:
878 DCHECK(type.representation() == MachineRepresentation::kFloat32 ||
879 type.representation() == MachineRepresentation::kTagged);
880 constant_object = isolate()->factory()->NewNumber(constant.ToFloat32());
881 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000882 case Constant::kFloat64:
Ben Murdoch014dc512016-03-22 12:00:34 +0000883 DCHECK(type.representation() == MachineRepresentation::kFloat64 ||
884 type.representation() == MachineRepresentation::kTagged);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000885 constant_object = isolate()->factory()->NewNumber(constant.ToFloat64());
886 break;
887 case Constant::kHeapObject:
Ben Murdoch014dc512016-03-22 12:00:34 +0000888 DCHECK(type.representation() == MachineRepresentation::kTagged);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000889 constant_object = constant.ToHeapObject();
890 break;
891 default:
Emily Bernier958fae72015-03-24 16:35:39 -0400892 CHECK(false);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000893 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000894 if (constant_object.is_identical_to(info()->closure())) {
895 translation->StoreJSFrameFunction();
896 } else {
897 int literal_id = DefineDeoptimizationLiteral(constant_object);
898 translation->StoreLiteral(literal_id);
899 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000900 } else {
Emily Bernier958fae72015-03-24 16:35:39 -0400901 CHECK(false);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000902 }
903}
904
905
906void CodeGenerator::MarkLazyDeoptSite() {
907 last_lazy_deopt_pc_ = masm()->pc_offset();
908}
909
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100910DeoptimizationExit* CodeGenerator::AddDeoptimizationExit(
911 Instruction* instr, size_t frame_state_offset) {
912 int const deoptimization_id = BuildTranslation(
913 instr, -1, frame_state_offset, OutputFrameStateCombine::Ignore());
914 DeoptimizationExit* const exit =
915 new (zone()) DeoptimizationExit(deoptimization_id);
916 deoptimization_exits_.push_back(exit);
917 return exit;
918}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000919
Emily Bernier958fae72015-03-24 16:35:39 -0400920OutOfLineCode::OutOfLineCode(CodeGenerator* gen)
Ben Murdoch014dc512016-03-22 12:00:34 +0000921 : frame_(gen->frame()), masm_(gen->masm()), next_(gen->ools_) {
Emily Bernier958fae72015-03-24 16:35:39 -0400922 gen->ools_ = this;
923}
924
925
926OutOfLineCode::~OutOfLineCode() {}
927
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000928} // namespace compiler
929} // namespace internal
930} // namespace v8