blob: cfe4f06606d1062abc5af529135fd89b95bb2b3b [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2013 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/code-generator.h"
6
7#include "src/compiler/code-generator-impl.h"
8#include "src/compiler/linkage.h"
9#include "src/compiler/pipeline.h"
10
11namespace v8 {
12namespace internal {
13namespace compiler {
14
Emily Bernierd0a1eb72015-03-24 16:35:39 -040015CodeGenerator::CodeGenerator(Frame* frame, Linkage* linkage,
16 InstructionSequence* code, CompilationInfo* info)
17 : frame_(frame),
18 linkage_(linkage),
19 code_(code),
20 info_(info),
21 labels_(zone()->NewArray<Label>(code->InstructionBlockCount())),
22 current_block_(BasicBlock::RpoNumber::Invalid()),
Ben Murdochb8a8cc12014-11-26 15:28:44 +000023 current_source_position_(SourcePosition::Invalid()),
24 masm_(code->zone()->isolate(), NULL, 0),
25 resolver_(this),
26 safepoints_(code->zone()),
27 deoptimization_states_(code->zone()),
28 deoptimization_literals_(code->zone()),
29 translations_(code->zone()),
Emily Bernierd0a1eb72015-03-24 16:35:39 -040030 last_lazy_deopt_pc_(0),
31 ools_(nullptr) {
32 for (int i = 0; i < code->InstructionBlockCount(); ++i) {
33 new (&labels_[i]) Label;
34 }
35}
Ben Murdochb8a8cc12014-11-26 15:28:44 +000036
37
38Handle<Code> CodeGenerator::GenerateCode() {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040039 CompilationInfo* info = this->info();
Ben Murdochb8a8cc12014-11-26 15:28:44 +000040
41 // Emit a code line info recording start event.
42 PositionsRecorder* recorder = masm()->positions_recorder();
43 LOG_CODE_EVENT(isolate(), CodeStartLinePosInfoRecordEvent(recorder));
44
45 // Place function entry hook if requested to do so.
46 if (linkage()->GetIncomingDescriptor()->IsJSFunctionCall()) {
47 ProfileEntryHookStub::MaybeCallEntryHook(masm());
48 }
49
50 // Architecture-specific, linkage-specific prologue.
51 info->set_prologue_offset(masm()->pc_offset());
52 AssemblePrologue();
53
Emily Bernierd0a1eb72015-03-24 16:35:39 -040054 // Assemble all non-deferred blocks, followed by deferred ones.
55 for (int deferred = 0; deferred < 2; ++deferred) {
56 for (auto const block : code()->instruction_blocks()) {
57 if (block->IsDeferred() == (deferred == 0)) {
58 continue;
59 }
60 // Align loop headers on 16-byte boundaries.
61 if (block->IsLoopHeader()) masm()->Align(16);
62 // Bind a label for a block.
63 current_block_ = block->rpo_number();
64 if (FLAG_code_comments) {
65 // TODO(titzer): these code comments are a giant memory leak.
66 Vector<char> buffer = Vector<char>::New(32);
67 SNPrintF(buffer, "-- B%d start --", block->id().ToInt());
68 masm()->RecordComment(buffer.start());
69 }
70 masm()->bind(GetLabel(current_block_));
71 for (int i = block->code_start(); i < block->code_end(); ++i) {
72 AssembleInstruction(code()->InstructionAt(i));
73 }
74 }
75 }
76
77 // Assemble all out-of-line code.
78 if (ools_) {
79 masm()->RecordComment("-- Out of line code --");
80 for (OutOfLineCode* ool = ools_; ool; ool = ool->next()) {
81 masm()->bind(ool->entry());
82 ool->Generate();
83 masm()->jmp(ool->exit());
84 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000085 }
86
87 FinishCode(masm());
88
Emily Bernierd0a1eb72015-03-24 16:35:39 -040089 // Ensure there is space for lazy deoptimization in the code.
Ben Murdochb8a8cc12014-11-26 15:28:44 +000090 if (!info->IsStub()) {
91 int target_offset = masm()->pc_offset() + Deoptimizer::patch_size();
92 while (masm()->pc_offset() < target_offset) {
93 masm()->nop();
94 }
95 }
96
97 safepoints()->Emit(masm(), frame()->GetSpillSlotCount());
98
99 // TODO(titzer): what are the right code flags here?
100 Code::Kind kind = Code::STUB;
101 if (linkage()->GetIncomingDescriptor()->IsJSFunctionCall()) {
102 kind = Code::OPTIMIZED_FUNCTION;
103 }
104 Handle<Code> result = v8::internal::CodeGenerator::MakeCodeEpilogue(
105 masm(), Code::ComputeFlags(kind), info);
106 result->set_is_turbofanned(true);
107 result->set_stack_slots(frame()->GetSpillSlotCount());
108 result->set_safepoint_table_offset(safepoints()->GetCodeOffset());
109
110 PopulateDeoptimizationData(result);
111
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400112 // Ensure there is space for lazy deoptimization in the relocation info.
113 if (!info->IsStub()) {
114 Deoptimizer::EnsureRelocSpaceForLazyDeoptimization(result);
115 }
116
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000117 // Emit a code line info recording stop event.
118 void* line_info = recorder->DetachJITHandlerData();
119 LOG_CODE_EVENT(isolate(), CodeEndLinePosInfoRecordEvent(*result, line_info));
120
121 return result;
122}
123
124
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400125bool CodeGenerator::IsNextInAssemblyOrder(BasicBlock::RpoNumber block) const {
126 return code()->InstructionBlockAt(current_block_)->ao_number().IsNext(
127 code()->InstructionBlockAt(block)->ao_number());
128}
129
130
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000131void CodeGenerator::RecordSafepoint(PointerMap* pointers, Safepoint::Kind kind,
132 int arguments,
133 Safepoint::DeoptMode deopt_mode) {
134 const ZoneList<InstructionOperand*>* operands =
135 pointers->GetNormalizedOperands();
136 Safepoint safepoint =
137 safepoints()->DefineSafepoint(masm(), kind, arguments, deopt_mode);
138 for (int i = 0; i < operands->length(); i++) {
139 InstructionOperand* pointer = operands->at(i);
140 if (pointer->IsStackSlot()) {
141 safepoint.DefinePointerSlot(pointer->index(), zone());
142 } else if (pointer->IsRegister() && (kind & Safepoint::kWithRegisters)) {
143 Register reg = Register::FromAllocationIndex(pointer->index());
144 safepoint.DefinePointerRegister(reg, zone());
145 }
146 }
147}
148
149
150void CodeGenerator::AssembleInstruction(Instruction* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000151 if (instr->IsGapMoves()) {
152 // Handle parallel moves associated with the gap instruction.
153 AssembleGap(GapInstruction::cast(instr));
154 } else if (instr->IsSourcePosition()) {
155 AssembleSourcePosition(SourcePositionInstruction::cast(instr));
156 } else {
157 // Assemble architecture-specific code for the instruction.
158 AssembleArchInstruction(instr);
159
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000160 FlagsMode mode = FlagsModeField::decode(instr->opcode());
161 FlagsCondition condition = FlagsConditionField::decode(instr->opcode());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400162 if (mode == kFlags_branch) {
163 // Assemble a branch after this instruction.
164 InstructionOperandConverter i(this, instr);
165 BasicBlock::RpoNumber true_rpo =
166 i.InputRpo(static_cast<int>(instr->InputCount()) - 2);
167 BasicBlock::RpoNumber false_rpo =
168 i.InputRpo(static_cast<int>(instr->InputCount()) - 1);
169
170 if (true_rpo == false_rpo) {
171 // redundant branch.
172 if (!IsNextInAssemblyOrder(true_rpo)) {
173 AssembleArchJump(true_rpo);
174 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000175 return;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400176 }
177 if (IsNextInAssemblyOrder(true_rpo)) {
178 // true block is next, can fall through if condition negated.
179 std::swap(true_rpo, false_rpo);
180 condition = NegateFlagsCondition(condition);
181 }
182 BranchInfo branch;
183 branch.condition = condition;
184 branch.true_label = GetLabel(true_rpo);
185 branch.false_label = GetLabel(false_rpo);
186 branch.fallthru = IsNextInAssemblyOrder(false_rpo);
187 // Assemble architecture-specific branch.
188 AssembleArchBranch(instr, &branch);
189 } else if (mode == kFlags_set) {
190 // Assemble a boolean materialization after this instruction.
191 AssembleArchBoolean(instr, condition);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000192 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000193 }
194}
195
196
197void CodeGenerator::AssembleSourcePosition(SourcePositionInstruction* instr) {
198 SourcePosition source_position = instr->source_position();
199 if (source_position == current_source_position_) return;
200 DCHECK(!source_position.IsInvalid());
201 if (!source_position.IsUnknown()) {
202 int code_pos = source_position.raw();
203 masm()->positions_recorder()->RecordPosition(source_position.raw());
204 masm()->positions_recorder()->WriteRecordedPositions();
205 if (FLAG_code_comments) {
206 Vector<char> buffer = Vector<char>::New(256);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400207 CompilationInfo* info = this->info();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000208 int ln = Script::GetLineNumber(info->script(), code_pos);
209 int cn = Script::GetColumnNumber(info->script(), code_pos);
210 if (info->script()->name()->IsString()) {
211 Handle<String> file(String::cast(info->script()->name()));
212 base::OS::SNPrintF(buffer.start(), buffer.length(), "-- %s:%d:%d --",
213 file->ToCString().get(), ln, cn);
214 } else {
215 base::OS::SNPrintF(buffer.start(), buffer.length(),
216 "-- <unknown>:%d:%d --", ln, cn);
217 }
218 masm()->RecordComment(buffer.start());
219 }
220 }
221 current_source_position_ = source_position;
222}
223
224
225void CodeGenerator::AssembleGap(GapInstruction* instr) {
226 for (int i = GapInstruction::FIRST_INNER_POSITION;
227 i <= GapInstruction::LAST_INNER_POSITION; i++) {
228 GapInstruction::InnerPosition inner_pos =
229 static_cast<GapInstruction::InnerPosition>(i);
230 ParallelMove* move = instr->GetParallelMove(inner_pos);
231 if (move != NULL) resolver()->Resolve(move);
232 }
233}
234
235
236void CodeGenerator::PopulateDeoptimizationData(Handle<Code> code_object) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400237 CompilationInfo* info = this->info();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000238 int deopt_count = static_cast<int>(deoptimization_states_.size());
239 if (deopt_count == 0) return;
240 Handle<DeoptimizationInputData> data =
241 DeoptimizationInputData::New(isolate(), deopt_count, TENURED);
242
243 Handle<ByteArray> translation_array =
244 translations_.CreateByteArray(isolate()->factory());
245
246 data->SetTranslationByteArray(*translation_array);
247 data->SetInlinedFunctionCount(Smi::FromInt(0));
248 data->SetOptimizationId(Smi::FromInt(info->optimization_id()));
249 // TODO(jarin) The following code was copied over from Lithium, not sure
250 // whether the scope or the IsOptimizing condition are really needed.
251 if (info->IsOptimizing()) {
252 // Reference to shared function info does not change between phases.
253 AllowDeferredHandleDereference allow_handle_dereference;
254 data->SetSharedFunctionInfo(*info->shared_info());
255 } else {
256 data->SetSharedFunctionInfo(Smi::FromInt(0));
257 }
258
259 Handle<FixedArray> literals = isolate()->factory()->NewFixedArray(
260 static_cast<int>(deoptimization_literals_.size()), TENURED);
261 {
262 AllowDeferredHandleDereference copy_handles;
263 for (unsigned i = 0; i < deoptimization_literals_.size(); i++) {
264 literals->set(i, *deoptimization_literals_[i]);
265 }
266 data->SetLiteralArray(*literals);
267 }
268
269 // No OSR in Turbofan yet...
270 BailoutId osr_ast_id = BailoutId::None();
271 data->SetOsrAstId(Smi::FromInt(osr_ast_id.ToInt()));
272 data->SetOsrPcOffset(Smi::FromInt(-1));
273
274 // Populate deoptimization entries.
275 for (int i = 0; i < deopt_count; i++) {
276 DeoptimizationState* deoptimization_state = deoptimization_states_[i];
277 data->SetAstId(i, deoptimization_state->bailout_id());
278 CHECK_NE(NULL, deoptimization_states_[i]);
279 data->SetTranslationIndex(
280 i, Smi::FromInt(deoptimization_states_[i]->translation_id()));
281 data->SetArgumentsStackHeight(i, Smi::FromInt(0));
282 data->SetPc(i, Smi::FromInt(deoptimization_state->pc_offset()));
283 }
284
285 code_object->set_deoptimization_data(*data);
286}
287
288
289void CodeGenerator::AddSafepointAndDeopt(Instruction* instr) {
290 CallDescriptor::Flags flags(MiscField::decode(instr->opcode()));
291
292 bool needs_frame_state = (flags & CallDescriptor::kNeedsFrameState);
293
294 RecordSafepoint(
295 instr->pointer_map(), Safepoint::kSimple, 0,
296 needs_frame_state ? Safepoint::kLazyDeopt : Safepoint::kNoLazyDeopt);
297
298 if (flags & CallDescriptor::kNeedsNopAfterCall) {
299 AddNopForSmiCodeInlining();
300 }
301
302 if (needs_frame_state) {
303 MarkLazyDeoptSite();
304 // If the frame state is present, it starts at argument 1
305 // (just after the code address).
306 InstructionOperandConverter converter(this, instr);
307 // Deoptimization info starts at argument 1
308 size_t frame_state_offset = 1;
309 FrameStateDescriptor* descriptor =
310 GetFrameStateDescriptor(instr, frame_state_offset);
311 int pc_offset = masm()->pc_offset();
312 int deopt_state_id = BuildTranslation(instr, pc_offset, frame_state_offset,
313 descriptor->state_combine());
314 // If the pre-call frame state differs from the post-call one, produce the
315 // pre-call frame state, too.
316 // TODO(jarin) We might want to avoid building the pre-call frame state
317 // because it is only used to get locals and arguments (by the debugger and
318 // f.arguments), and those are the same in the pre-call and post-call
319 // states.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400320 if (!descriptor->state_combine().IsOutputIgnored()) {
321 deopt_state_id = BuildTranslation(instr, -1, frame_state_offset,
322 OutputFrameStateCombine::Ignore());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000323 }
324#if DEBUG
325 // Make sure all the values live in stack slots or they are immediates.
326 // (The values should not live in register because registers are clobbered
327 // by calls.)
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400328 for (size_t i = 0; i < descriptor->GetSize(); i++) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000329 InstructionOperand* op = instr->InputAt(frame_state_offset + 1 + i);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400330 CHECK(op->IsStackSlot() || op->IsDoubleStackSlot() || op->IsImmediate());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000331 }
332#endif
333 safepoints()->RecordLazyDeoptimizationIndex(deopt_state_id);
334 }
335}
336
337
338int CodeGenerator::DefineDeoptimizationLiteral(Handle<Object> literal) {
339 int result = static_cast<int>(deoptimization_literals_.size());
340 for (unsigned i = 0; i < deoptimization_literals_.size(); ++i) {
341 if (deoptimization_literals_[i].is_identical_to(literal)) return i;
342 }
343 deoptimization_literals_.push_back(literal);
344 return result;
345}
346
347
348FrameStateDescriptor* CodeGenerator::GetFrameStateDescriptor(
349 Instruction* instr, size_t frame_state_offset) {
350 InstructionOperandConverter i(this, instr);
351 InstructionSequence::StateId state_id = InstructionSequence::StateId::FromInt(
352 i.InputInt32(static_cast<int>(frame_state_offset)));
353 return code()->GetFrameStateDescriptor(state_id);
354}
355
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400356struct OperandAndType {
357 OperandAndType(InstructionOperand* operand, MachineType type)
358 : operand_(operand), type_(type) {}
359
360 InstructionOperand* operand_;
361 MachineType type_;
362};
363
364static OperandAndType TypedOperandForFrameState(
365 FrameStateDescriptor* descriptor, Instruction* instr,
366 size_t frame_state_offset, size_t index, OutputFrameStateCombine combine) {
367 DCHECK(index < descriptor->GetSize(combine));
368 switch (combine.kind()) {
369 case OutputFrameStateCombine::kPushOutput: {
370 DCHECK(combine.GetPushCount() <= instr->OutputCount());
371 size_t size_without_output =
372 descriptor->GetSize(OutputFrameStateCombine::Ignore());
373 // If the index is past the existing stack items, return the output.
374 if (index >= size_without_output) {
375 return OperandAndType(instr->OutputAt(index - size_without_output),
376 kMachAnyTagged);
377 }
378 break;
379 }
380 case OutputFrameStateCombine::kPokeAt:
381 size_t index_from_top =
382 descriptor->GetSize(combine) - 1 - combine.GetOffsetToPokeAt();
383 if (index >= index_from_top &&
384 index < index_from_top + instr->OutputCount()) {
385 return OperandAndType(instr->OutputAt(index - index_from_top),
386 kMachAnyTagged);
387 }
388 break;
389 }
390 return OperandAndType(instr->InputAt(frame_state_offset + index),
391 descriptor->GetType(index));
392}
393
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000394
395void CodeGenerator::BuildTranslationForFrameStateDescriptor(
396 FrameStateDescriptor* descriptor, Instruction* instr,
397 Translation* translation, size_t frame_state_offset,
398 OutputFrameStateCombine state_combine) {
399 // Outer-most state must be added to translation first.
400 if (descriptor->outer_state() != NULL) {
401 BuildTranslationForFrameStateDescriptor(descriptor->outer_state(), instr,
402 translation, frame_state_offset,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400403 OutputFrameStateCombine::Ignore());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000404 }
405
406 int id = Translation::kSelfLiteralId;
407 if (!descriptor->jsfunction().is_null()) {
408 id = DefineDeoptimizationLiteral(
409 Handle<Object>::cast(descriptor->jsfunction().ToHandleChecked()));
410 }
411
412 switch (descriptor->type()) {
413 case JS_FRAME:
414 translation->BeginJSFrame(
415 descriptor->bailout_id(), id,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400416 static_cast<unsigned int>(descriptor->GetSize(state_combine) -
417 descriptor->parameters_count()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000418 break;
419 case ARGUMENTS_ADAPTOR:
420 translation->BeginArgumentsAdaptorFrame(
421 id, static_cast<unsigned int>(descriptor->parameters_count()));
422 break;
423 }
424
425 frame_state_offset += descriptor->outer_state()->GetTotalSize();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400426 for (size_t i = 0; i < descriptor->GetSize(state_combine); i++) {
427 OperandAndType op = TypedOperandForFrameState(
428 descriptor, instr, frame_state_offset, i, state_combine);
429 AddTranslationForOperand(translation, instr, op.operand_, op.type_);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000430 }
431}
432
433
434int CodeGenerator::BuildTranslation(Instruction* instr, int pc_offset,
435 size_t frame_state_offset,
436 OutputFrameStateCombine state_combine) {
437 FrameStateDescriptor* descriptor =
438 GetFrameStateDescriptor(instr, frame_state_offset);
439 frame_state_offset++;
440
441 Translation translation(
442 &translations_, static_cast<int>(descriptor->GetFrameCount()),
443 static_cast<int>(descriptor->GetJSFrameCount()), zone());
444 BuildTranslationForFrameStateDescriptor(descriptor, instr, &translation,
445 frame_state_offset, state_combine);
446
447 int deoptimization_id = static_cast<int>(deoptimization_states_.size());
448
449 deoptimization_states_.push_back(new (zone()) DeoptimizationState(
450 descriptor->bailout_id(), translation.index(), pc_offset));
451
452 return deoptimization_id;
453}
454
455
456void CodeGenerator::AddTranslationForOperand(Translation* translation,
457 Instruction* instr,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400458 InstructionOperand* op,
459 MachineType type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000460 if (op->IsStackSlot()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400461 if (type == kMachBool || type == kMachInt32 || type == kMachInt8 ||
462 type == kMachInt16) {
463 translation->StoreInt32StackSlot(op->index());
464 } else if (type == kMachUint32 || type == kMachUint16 ||
465 type == kMachUint8) {
466 translation->StoreUint32StackSlot(op->index());
467 } else if ((type & kRepMask) == kRepTagged) {
468 translation->StoreStackSlot(op->index());
469 } else {
470 CHECK(false);
471 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000472 } else if (op->IsDoubleStackSlot()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400473 DCHECK((type & (kRepFloat32 | kRepFloat64)) != 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000474 translation->StoreDoubleStackSlot(op->index());
475 } else if (op->IsRegister()) {
476 InstructionOperandConverter converter(this, instr);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400477 if (type == kMachBool || type == kMachInt32 || type == kMachInt8 ||
478 type == kMachInt16) {
479 translation->StoreInt32Register(converter.ToRegister(op));
480 } else if (type == kMachUint32 || type == kMachUint16 ||
481 type == kMachUint8) {
482 translation->StoreUint32Register(converter.ToRegister(op));
483 } else if ((type & kRepMask) == kRepTagged) {
484 translation->StoreRegister(converter.ToRegister(op));
485 } else {
486 CHECK(false);
487 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000488 } else if (op->IsDoubleRegister()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400489 DCHECK((type & (kRepFloat32 | kRepFloat64)) != 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000490 InstructionOperandConverter converter(this, instr);
491 translation->StoreDoubleRegister(converter.ToDoubleRegister(op));
492 } else if (op->IsImmediate()) {
493 InstructionOperandConverter converter(this, instr);
494 Constant constant = converter.ToConstant(op);
495 Handle<Object> constant_object;
496 switch (constant.type()) {
497 case Constant::kInt32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400498 DCHECK(type == kMachInt32 || type == kMachUint32);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000499 constant_object =
500 isolate()->factory()->NewNumberFromInt(constant.ToInt32());
501 break;
502 case Constant::kFloat64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400503 DCHECK(type == kMachFloat64 || type == kMachAnyTagged);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000504 constant_object = isolate()->factory()->NewNumber(constant.ToFloat64());
505 break;
506 case Constant::kHeapObject:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400507 DCHECK((type & kRepMask) == kRepTagged);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000508 constant_object = constant.ToHeapObject();
509 break;
510 default:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400511 CHECK(false);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000512 }
513 int literal_id = DefineDeoptimizationLiteral(constant_object);
514 translation->StoreLiteral(literal_id);
515 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400516 CHECK(false);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000517 }
518}
519
520
521void CodeGenerator::MarkLazyDeoptSite() {
522 last_lazy_deopt_pc_ = masm()->pc_offset();
523}
524
525#if !V8_TURBOFAN_BACKEND
526
527void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
528 UNIMPLEMENTED();
529}
530
531
532void CodeGenerator::AssembleArchBranch(Instruction* instr,
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400533 BranchInfo* branch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000534 UNIMPLEMENTED();
535}
536
537
538void CodeGenerator::AssembleArchBoolean(Instruction* instr,
539 FlagsCondition condition) {
540 UNIMPLEMENTED();
541}
542
543
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400544void CodeGenerator::AssembleArchJump(BasicBlock::RpoNumber target) {
545 UNIMPLEMENTED();
546}
547
548
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000549void CodeGenerator::AssembleDeoptimizerCall(int deoptimization_id) {
550 UNIMPLEMENTED();
551}
552
553
554void CodeGenerator::AssemblePrologue() { UNIMPLEMENTED(); }
555
556
557void CodeGenerator::AssembleReturn() { UNIMPLEMENTED(); }
558
559
560void CodeGenerator::AssembleMove(InstructionOperand* source,
561 InstructionOperand* destination) {
562 UNIMPLEMENTED();
563}
564
565
566void CodeGenerator::AssembleSwap(InstructionOperand* source,
567 InstructionOperand* destination) {
568 UNIMPLEMENTED();
569}
570
571
572void CodeGenerator::AddNopForSmiCodeInlining() { UNIMPLEMENTED(); }
573
574#endif // !V8_TURBOFAN_BACKEND
575
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400576
577OutOfLineCode::OutOfLineCode(CodeGenerator* gen)
578 : masm_(gen->masm()), next_(gen->ools_) {
579 gen->ools_ = this;
580}
581
582
583OutOfLineCode::~OutOfLineCode() {}
584
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000585} // namespace compiler
586} // namespace internal
587} // namespace v8