blob: ffb8f9fa8d0f7e4a733e7e3894385625323fe578 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/instruction-selector.h"
6
Emily Bernierd0a1eb72015-03-24 16:35:39 -04007#include "src/compiler/graph.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00008#include "src/compiler/instruction-selector-impl.h"
9#include "src/compiler/node-matchers.h"
10#include "src/compiler/node-properties-inl.h"
11#include "src/compiler/pipeline.h"
12
13namespace v8 {
14namespace internal {
15namespace compiler {
16
Emily Bernierd0a1eb72015-03-24 16:35:39 -040017InstructionSelector::InstructionSelector(Zone* local_zone, Graph* graph,
18 Linkage* linkage,
19 InstructionSequence* sequence,
20 Schedule* schedule,
Ben Murdochb8a8cc12014-11-26 15:28:44 +000021 SourcePositionTable* source_positions,
22 Features features)
Emily Bernierd0a1eb72015-03-24 16:35:39 -040023 : zone_(local_zone),
24 linkage_(linkage),
Ben Murdochb8a8cc12014-11-26 15:28:44 +000025 sequence_(sequence),
26 source_positions_(source_positions),
27 features_(features),
Emily Bernierd0a1eb72015-03-24 16:35:39 -040028 schedule_(schedule),
29 node_map_(graph->NodeCount(), kNodeUnmapped, zone()),
Ben Murdochb8a8cc12014-11-26 15:28:44 +000030 current_block_(NULL),
31 instructions_(zone()),
Emily Bernierd0a1eb72015-03-24 16:35:39 -040032 defined_(graph->NodeCount(), false, zone()),
33 used_(graph->NodeCount(), false, zone()) {}
Ben Murdochb8a8cc12014-11-26 15:28:44 +000034
35
36void InstructionSelector::SelectInstructions() {
37 // Mark the inputs of all phis in loop headers as used.
38 BasicBlockVector* blocks = schedule()->rpo_order();
39 for (BasicBlockVectorIter i = blocks->begin(); i != blocks->end(); ++i) {
40 BasicBlock* block = *i;
41 if (!block->IsLoopHeader()) continue;
Emily Bernierd0a1eb72015-03-24 16:35:39 -040042 DCHECK_NE(0, static_cast<int>(block->PredecessorCount()));
43 DCHECK_NE(1, static_cast<int>(block->PredecessorCount()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +000044 for (BasicBlock::const_iterator j = block->begin(); j != block->end();
45 ++j) {
46 Node* phi = *j;
47 if (phi->opcode() != IrOpcode::kPhi) continue;
48
49 // Mark all inputs as used.
Emily Bernierd0a1eb72015-03-24 16:35:39 -040050 for (Node* const k : phi->inputs()) {
51 MarkAsUsed(k);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000052 }
53 }
54 }
55
56 // Visit each basic block in post order.
57 for (BasicBlockVectorRIter i = blocks->rbegin(); i != blocks->rend(); ++i) {
58 VisitBlock(*i);
59 }
60
61 // Schedule the selected instructions.
62 for (BasicBlockVectorIter i = blocks->begin(); i != blocks->end(); ++i) {
63 BasicBlock* block = *i;
Emily Bernierd0a1eb72015-03-24 16:35:39 -040064 InstructionBlock* instruction_block =
65 sequence()->InstructionBlockAt(block->GetRpoNumber());
66 size_t end = instruction_block->code_end();
67 size_t start = instruction_block->code_start();
68 sequence()->StartBlock(block->GetRpoNumber());
Ben Murdochb8a8cc12014-11-26 15:28:44 +000069 while (start-- > end) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040070 sequence()->AddInstruction(instructions_[start]);
Ben Murdochb8a8cc12014-11-26 15:28:44 +000071 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -040072 sequence()->EndBlock(block->GetRpoNumber());
Ben Murdochb8a8cc12014-11-26 15:28:44 +000073 }
74}
75
76
77Instruction* InstructionSelector::Emit(InstructionCode opcode,
78 InstructionOperand* output,
79 size_t temp_count,
80 InstructionOperand** temps) {
81 size_t output_count = output == NULL ? 0 : 1;
82 return Emit(opcode, output_count, &output, 0, NULL, temp_count, temps);
83}
84
85
86Instruction* InstructionSelector::Emit(InstructionCode opcode,
87 InstructionOperand* output,
88 InstructionOperand* a, size_t temp_count,
89 InstructionOperand** temps) {
90 size_t output_count = output == NULL ? 0 : 1;
91 return Emit(opcode, output_count, &output, 1, &a, temp_count, temps);
92}
93
94
95Instruction* InstructionSelector::Emit(InstructionCode opcode,
96 InstructionOperand* output,
97 InstructionOperand* a,
98 InstructionOperand* b, size_t temp_count,
99 InstructionOperand** temps) {
100 size_t output_count = output == NULL ? 0 : 1;
101 InstructionOperand* inputs[] = {a, b};
102 size_t input_count = arraysize(inputs);
103 return Emit(opcode, output_count, &output, input_count, inputs, temp_count,
104 temps);
105}
106
107
108Instruction* InstructionSelector::Emit(InstructionCode opcode,
109 InstructionOperand* output,
110 InstructionOperand* a,
111 InstructionOperand* b,
112 InstructionOperand* c, size_t temp_count,
113 InstructionOperand** temps) {
114 size_t output_count = output == NULL ? 0 : 1;
115 InstructionOperand* inputs[] = {a, b, c};
116 size_t input_count = arraysize(inputs);
117 return Emit(opcode, output_count, &output, input_count, inputs, temp_count,
118 temps);
119}
120
121
122Instruction* InstructionSelector::Emit(
123 InstructionCode opcode, InstructionOperand* output, InstructionOperand* a,
124 InstructionOperand* b, InstructionOperand* c, InstructionOperand* d,
125 size_t temp_count, InstructionOperand** temps) {
126 size_t output_count = output == NULL ? 0 : 1;
127 InstructionOperand* inputs[] = {a, b, c, d};
128 size_t input_count = arraysize(inputs);
129 return Emit(opcode, output_count, &output, input_count, inputs, temp_count,
130 temps);
131}
132
133
134Instruction* InstructionSelector::Emit(
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400135 InstructionCode opcode, InstructionOperand* output, InstructionOperand* a,
136 InstructionOperand* b, InstructionOperand* c, InstructionOperand* d,
137 InstructionOperand* e, size_t temp_count, InstructionOperand** temps) {
138 size_t output_count = output == NULL ? 0 : 1;
139 InstructionOperand* inputs[] = {a, b, c, d, e};
140 size_t input_count = arraysize(inputs);
141 return Emit(opcode, output_count, &output, input_count, inputs, temp_count,
142 temps);
143}
144
145
146Instruction* InstructionSelector::Emit(
147 InstructionCode opcode, InstructionOperand* output, InstructionOperand* a,
148 InstructionOperand* b, InstructionOperand* c, InstructionOperand* d,
149 InstructionOperand* e, InstructionOperand* f, size_t temp_count,
150 InstructionOperand** temps) {
151 size_t output_count = output == NULL ? 0 : 1;
152 InstructionOperand* inputs[] = {a, b, c, d, e, f};
153 size_t input_count = arraysize(inputs);
154 return Emit(opcode, output_count, &output, input_count, inputs, temp_count,
155 temps);
156}
157
158
159Instruction* InstructionSelector::Emit(
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000160 InstructionCode opcode, size_t output_count, InstructionOperand** outputs,
161 size_t input_count, InstructionOperand** inputs, size_t temp_count,
162 InstructionOperand** temps) {
163 Instruction* instr =
164 Instruction::New(instruction_zone(), opcode, output_count, outputs,
165 input_count, inputs, temp_count, temps);
166 return Emit(instr);
167}
168
169
170Instruction* InstructionSelector::Emit(Instruction* instr) {
171 instructions_.push_back(instr);
172 return instr;
173}
174
175
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000176bool InstructionSelector::CanCover(Node* user, Node* node) const {
177 return node->OwnedBy(user) &&
178 schedule()->block(node) == schedule()->block(user);
179}
180
181
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400182int InstructionSelector::GetVirtualRegister(const Node* node) {
183 if (node_map_[node->id()] == kNodeUnmapped) {
184 node_map_[node->id()] = sequence()->NextVirtualRegister();
185 }
186 return node_map_[node->id()];
187}
188
189
190int InstructionSelector::GetMappedVirtualRegister(const Node* node) const {
191 return node_map_[node->id()];
192}
193
194
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000195bool InstructionSelector::IsDefined(Node* node) const {
196 DCHECK_NOT_NULL(node);
197 NodeId id = node->id();
198 DCHECK(id >= 0);
199 DCHECK(id < static_cast<NodeId>(defined_.size()));
200 return defined_[id];
201}
202
203
204void InstructionSelector::MarkAsDefined(Node* node) {
205 DCHECK_NOT_NULL(node);
206 NodeId id = node->id();
207 DCHECK(id >= 0);
208 DCHECK(id < static_cast<NodeId>(defined_.size()));
209 defined_[id] = true;
210}
211
212
213bool InstructionSelector::IsUsed(Node* node) const {
214 if (!node->op()->HasProperty(Operator::kEliminatable)) return true;
215 NodeId id = node->id();
216 DCHECK(id >= 0);
217 DCHECK(id < static_cast<NodeId>(used_.size()));
218 return used_[id];
219}
220
221
222void InstructionSelector::MarkAsUsed(Node* node) {
223 DCHECK_NOT_NULL(node);
224 NodeId id = node->id();
225 DCHECK(id >= 0);
226 DCHECK(id < static_cast<NodeId>(used_.size()));
227 used_[id] = true;
228}
229
230
231bool InstructionSelector::IsDouble(const Node* node) const {
232 DCHECK_NOT_NULL(node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400233 int virtual_register = GetMappedVirtualRegister(node);
234 if (virtual_register == kNodeUnmapped) return false;
235 return sequence()->IsDouble(virtual_register);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000236}
237
238
239void InstructionSelector::MarkAsDouble(Node* node) {
240 DCHECK_NOT_NULL(node);
241 DCHECK(!IsReference(node));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400242 sequence()->MarkAsDouble(GetVirtualRegister(node));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000243}
244
245
246bool InstructionSelector::IsReference(const Node* node) const {
247 DCHECK_NOT_NULL(node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400248 int virtual_register = GetMappedVirtualRegister(node);
249 if (virtual_register == kNodeUnmapped) return false;
250 return sequence()->IsReference(virtual_register);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000251}
252
253
254void InstructionSelector::MarkAsReference(Node* node) {
255 DCHECK_NOT_NULL(node);
256 DCHECK(!IsDouble(node));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400257 sequence()->MarkAsReference(GetVirtualRegister(node));
258}
259
260
261void InstructionSelector::MarkAsRepresentation(MachineType rep,
262 InstructionOperand* op) {
263 UnallocatedOperand* unalloc = UnallocatedOperand::cast(op);
264 switch (RepresentationOf(rep)) {
265 case kRepFloat32:
266 case kRepFloat64:
267 sequence()->MarkAsDouble(unalloc->virtual_register());
268 break;
269 case kRepTagged:
270 sequence()->MarkAsReference(unalloc->virtual_register());
271 break;
272 default:
273 break;
274 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000275}
276
277
278void InstructionSelector::MarkAsRepresentation(MachineType rep, Node* node) {
279 DCHECK_NOT_NULL(node);
280 switch (RepresentationOf(rep)) {
281 case kRepFloat32:
282 case kRepFloat64:
283 MarkAsDouble(node);
284 break;
285 case kRepTagged:
286 MarkAsReference(node);
287 break;
288 default:
289 break;
290 }
291}
292
293
294// TODO(bmeurer): Get rid of the CallBuffer business and make
295// InstructionSelector::VisitCall platform independent instead.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400296CallBuffer::CallBuffer(Zone* zone, const CallDescriptor* d,
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000297 FrameStateDescriptor* frame_desc)
298 : descriptor(d),
299 frame_state_descriptor(frame_desc),
300 output_nodes(zone),
301 outputs(zone),
302 instruction_args(zone),
303 pushed_nodes(zone) {
304 output_nodes.reserve(d->ReturnCount());
305 outputs.reserve(d->ReturnCount());
306 pushed_nodes.reserve(input_count());
307 instruction_args.reserve(input_count() + frame_state_value_count());
308}
309
310
311// TODO(bmeurer): Get rid of the CallBuffer business and make
312// InstructionSelector::VisitCall platform independent instead.
313void InstructionSelector::InitializeCallBuffer(Node* call, CallBuffer* buffer,
314 bool call_code_immediate,
315 bool call_address_immediate) {
316 OperandGenerator g(this);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400317 DCHECK_EQ(call->op()->ValueOutputCount(),
318 static_cast<int>(buffer->descriptor->ReturnCount()));
319 DCHECK_EQ(
320 call->op()->ValueInputCount(),
321 static_cast<int>(buffer->input_count() + buffer->frame_state_count()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000322
323 if (buffer->descriptor->ReturnCount() > 0) {
324 // Collect the projections that represent multiple outputs from this call.
325 if (buffer->descriptor->ReturnCount() == 1) {
326 buffer->output_nodes.push_back(call);
327 } else {
328 buffer->output_nodes.resize(buffer->descriptor->ReturnCount(), NULL);
329 call->CollectProjections(&buffer->output_nodes);
330 }
331
332 // Filter out the outputs that aren't live because no projection uses them.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400333 size_t outputs_needed_by_framestate =
334 buffer->frame_state_descriptor == NULL
335 ? 0
336 : buffer->frame_state_descriptor->state_combine()
337 .ConsumedOutputCount();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000338 for (size_t i = 0; i < buffer->output_nodes.size(); i++) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400339 bool output_is_live =
340 buffer->output_nodes[i] != NULL || i < outputs_needed_by_framestate;
341 if (output_is_live) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000342 MachineType type =
343 buffer->descriptor->GetReturnType(static_cast<int>(i));
344 LinkageLocation location =
345 buffer->descriptor->GetReturnLocation(static_cast<int>(i));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400346
347 Node* output = buffer->output_nodes[i];
348 InstructionOperand* op =
349 output == NULL ? g.TempLocation(location, type)
350 : g.DefineAsLocation(output, location, type);
351 MarkAsRepresentation(type, op);
352
353 buffer->outputs.push_back(op);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000354 }
355 }
356 }
357
358 // The first argument is always the callee code.
359 Node* callee = call->InputAt(0);
360 switch (buffer->descriptor->kind()) {
361 case CallDescriptor::kCallCodeObject:
362 buffer->instruction_args.push_back(
363 (call_code_immediate && callee->opcode() == IrOpcode::kHeapConstant)
364 ? g.UseImmediate(callee)
365 : g.UseRegister(callee));
366 break;
367 case CallDescriptor::kCallAddress:
368 buffer->instruction_args.push_back(
369 (call_address_immediate &&
370 (callee->opcode() == IrOpcode::kInt32Constant ||
371 callee->opcode() == IrOpcode::kInt64Constant))
372 ? g.UseImmediate(callee)
373 : g.UseRegister(callee));
374 break;
375 case CallDescriptor::kCallJSFunction:
376 buffer->instruction_args.push_back(
377 g.UseLocation(callee, buffer->descriptor->GetInputLocation(0),
378 buffer->descriptor->GetInputType(0)));
379 break;
380 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400381 DCHECK_EQ(1, static_cast<int>(buffer->instruction_args.size()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000382
383 // If the call needs a frame state, we insert the state information as
384 // follows (n is the number of value inputs to the frame state):
385 // arg 1 : deoptimization id.
386 // arg 2 - arg (n + 1) : value inputs to the frame state.
387 if (buffer->frame_state_descriptor != NULL) {
388 InstructionSequence::StateId state_id =
389 sequence()->AddFrameStateDescriptor(buffer->frame_state_descriptor);
390 buffer->instruction_args.push_back(g.TempImmediate(state_id.ToInt()));
391
392 Node* frame_state =
393 call->InputAt(static_cast<int>(buffer->descriptor->InputCount()));
394 AddFrameStateInputs(frame_state, &buffer->instruction_args,
395 buffer->frame_state_descriptor);
396 }
397 DCHECK(1 + buffer->frame_state_value_count() ==
398 buffer->instruction_args.size());
399
400 size_t input_count = static_cast<size_t>(buffer->input_count());
401
402 // Split the arguments into pushed_nodes and instruction_args. Pushed
403 // arguments require an explicit push instruction before the call and do
404 // not appear as arguments to the call. Everything else ends up
405 // as an InstructionOperand argument to the call.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400406 auto iter(call->inputs().begin());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000407 int pushed_count = 0;
408 for (size_t index = 0; index < input_count; ++iter, ++index) {
409 DCHECK(iter != call->inputs().end());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000410 DCHECK((*iter)->op()->opcode() != IrOpcode::kFrameState);
411 if (index == 0) continue; // The first argument (callee) is already done.
412 InstructionOperand* op =
413 g.UseLocation(*iter, buffer->descriptor->GetInputLocation(index),
414 buffer->descriptor->GetInputType(index));
415 if (UnallocatedOperand::cast(op)->HasFixedSlotPolicy()) {
416 int stack_index = -UnallocatedOperand::cast(op)->fixed_slot_index() - 1;
417 if (static_cast<size_t>(stack_index) >= buffer->pushed_nodes.size()) {
418 buffer->pushed_nodes.resize(stack_index + 1, NULL);
419 }
420 DCHECK_EQ(NULL, buffer->pushed_nodes[stack_index]);
421 buffer->pushed_nodes[stack_index] = *iter;
422 pushed_count++;
423 } else {
424 buffer->instruction_args.push_back(op);
425 }
426 }
427 CHECK_EQ(pushed_count, static_cast<int>(buffer->pushed_nodes.size()));
428 DCHECK(static_cast<size_t>(input_count) ==
429 (buffer->instruction_args.size() + buffer->pushed_nodes.size() -
430 buffer->frame_state_value_count()));
431}
432
433
434void InstructionSelector::VisitBlock(BasicBlock* block) {
435 DCHECK_EQ(NULL, current_block_);
436 current_block_ = block;
437 int current_block_end = static_cast<int>(instructions_.size());
438
439 // Generate code for the block control "top down", but schedule the code
440 // "bottom up".
441 VisitControl(block);
442 std::reverse(instructions_.begin() + current_block_end, instructions_.end());
443
444 // Visit code in reverse control flow order, because architecture-specific
445 // matching may cover more than one node at a time.
446 for (BasicBlock::reverse_iterator i = block->rbegin(); i != block->rend();
447 ++i) {
448 Node* node = *i;
449 // Skip nodes that are unused or already defined.
450 if (!IsUsed(node) || IsDefined(node)) continue;
451 // Generate code for this node "top down", but schedule the code "bottom
452 // up".
453 size_t current_node_end = instructions_.size();
454 VisitNode(node);
455 std::reverse(instructions_.begin() + current_node_end, instructions_.end());
456 }
457
458 // We're done with the block.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400459 InstructionBlock* instruction_block =
460 sequence()->InstructionBlockAt(block->GetRpoNumber());
461 instruction_block->set_code_start(static_cast<int>(instructions_.size()));
462 instruction_block->set_code_end(current_block_end);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000463
464 current_block_ = NULL;
465}
466
467
468static inline void CheckNoPhis(const BasicBlock* block) {
469#ifdef DEBUG
470 // Branch targets should not have phis.
471 for (BasicBlock::const_iterator i = block->begin(); i != block->end(); ++i) {
472 const Node* node = *i;
473 CHECK_NE(IrOpcode::kPhi, node->opcode());
474 }
475#endif
476}
477
478
479void InstructionSelector::VisitControl(BasicBlock* block) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400480 Node* input = block->control_input();
481 switch (block->control()) {
482 case BasicBlock::kGoto:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000483 return VisitGoto(block->SuccessorAt(0));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400484 case BasicBlock::kBranch: {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000485 DCHECK_EQ(IrOpcode::kBranch, input->opcode());
486 BasicBlock* tbranch = block->SuccessorAt(0);
487 BasicBlock* fbranch = block->SuccessorAt(1);
488 // SSA deconstruction requires targets of branches not to have phis.
489 // Edge split form guarantees this property, but is more strict.
490 CheckNoPhis(tbranch);
491 CheckNoPhis(fbranch);
492 if (tbranch == fbranch) return VisitGoto(tbranch);
493 return VisitBranch(input, tbranch, fbranch);
494 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400495 case BasicBlock::kReturn: {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000496 // If the result itself is a return, return its input.
497 Node* value = (input != NULL && input->opcode() == IrOpcode::kReturn)
498 ? input->InputAt(0)
499 : input;
500 return VisitReturn(value);
501 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400502 case BasicBlock::kThrow:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000503 return VisitThrow(input);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400504 case BasicBlock::kNone: {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000505 // TODO(titzer): exit block doesn't have control.
506 DCHECK(input == NULL);
507 break;
508 }
509 default:
510 UNREACHABLE();
511 break;
512 }
513}
514
515
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400516MachineType InstructionSelector::GetMachineType(Node* node) {
517 DCHECK_NOT_NULL(schedule()->block(node)); // should only use scheduled nodes.
518 switch (node->opcode()) {
519 case IrOpcode::kStart:
520 case IrOpcode::kLoop:
521 case IrOpcode::kEnd:
522 case IrOpcode::kBranch:
523 case IrOpcode::kIfTrue:
524 case IrOpcode::kIfFalse:
525 case IrOpcode::kEffectPhi:
526 case IrOpcode::kMerge:
527 case IrOpcode::kTerminate:
528 // No code needed for these graph artifacts.
529 return kMachNone;
530 case IrOpcode::kFinish:
531 return kMachAnyTagged;
532 case IrOpcode::kParameter:
533 return linkage()->GetParameterType(OpParameter<int>(node));
534 case IrOpcode::kPhi:
535 return OpParameter<MachineType>(node);
536 case IrOpcode::kProjection:
537 // TODO(jarin) Really project from outputs.
538 return kMachAnyTagged;
539 case IrOpcode::kInt32Constant:
540 return kMachInt32;
541 case IrOpcode::kInt64Constant:
542 return kMachInt64;
543 case IrOpcode::kExternalConstant:
544 return kMachPtr;
545 case IrOpcode::kFloat64Constant:
546 return kMachFloat64;
547 case IrOpcode::kHeapConstant:
548 case IrOpcode::kNumberConstant:
549 return kMachAnyTagged;
550 case IrOpcode::kCall:
551 return kMachAnyTagged;
552 case IrOpcode::kFrameState:
553 case IrOpcode::kStateValues:
554 return kMachNone;
555 case IrOpcode::kLoad:
556 return OpParameter<LoadRepresentation>(node);
557 case IrOpcode::kStore:
558 return kMachNone;
559 case IrOpcode::kCheckedLoad:
560 return OpParameter<MachineType>(node);
561 case IrOpcode::kCheckedStore:
562 return kMachNone;
563 case IrOpcode::kWord32And:
564 case IrOpcode::kWord32Or:
565 case IrOpcode::kWord32Xor:
566 case IrOpcode::kWord32Shl:
567 case IrOpcode::kWord32Shr:
568 case IrOpcode::kWord32Sar:
569 case IrOpcode::kWord32Ror:
570 return kMachInt32;
571 case IrOpcode::kWord32Equal:
572 return kMachBool;
573 case IrOpcode::kWord64And:
574 case IrOpcode::kWord64Or:
575 case IrOpcode::kWord64Xor:
576 case IrOpcode::kWord64Shl:
577 case IrOpcode::kWord64Shr:
578 case IrOpcode::kWord64Sar:
579 case IrOpcode::kWord64Ror:
580 return kMachInt64;
581 case IrOpcode::kWord64Equal:
582 return kMachBool;
583 case IrOpcode::kInt32Add:
584 case IrOpcode::kInt32AddWithOverflow:
585 case IrOpcode::kInt32Sub:
586 case IrOpcode::kInt32SubWithOverflow:
587 case IrOpcode::kInt32Mul:
588 case IrOpcode::kInt32Div:
589 case IrOpcode::kInt32Mod:
590 return kMachInt32;
591 case IrOpcode::kInt32LessThan:
592 case IrOpcode::kInt32LessThanOrEqual:
593 case IrOpcode::kUint32LessThan:
594 case IrOpcode::kUint32LessThanOrEqual:
595 return kMachBool;
596 case IrOpcode::kInt64Add:
597 case IrOpcode::kInt64Sub:
598 case IrOpcode::kInt64Mul:
599 case IrOpcode::kInt64Div:
600 case IrOpcode::kInt64Mod:
601 return kMachInt64;
602 case IrOpcode::kInt64LessThan:
603 case IrOpcode::kInt64LessThanOrEqual:
604 return kMachBool;
605 case IrOpcode::kChangeFloat32ToFloat64:
606 case IrOpcode::kChangeInt32ToFloat64:
607 case IrOpcode::kChangeUint32ToFloat64:
608 return kMachFloat64;
609 case IrOpcode::kChangeFloat64ToInt32:
610 return kMachInt32;
611 case IrOpcode::kChangeFloat64ToUint32:
612 return kMachUint32;
613 case IrOpcode::kChangeInt32ToInt64:
614 return kMachInt64;
615 case IrOpcode::kChangeUint32ToUint64:
616 return kMachUint64;
617 case IrOpcode::kTruncateFloat64ToFloat32:
618 return kMachFloat32;
619 case IrOpcode::kTruncateFloat64ToInt32:
620 case IrOpcode::kTruncateInt64ToInt32:
621 return kMachInt32;
622 case IrOpcode::kFloat64Add:
623 case IrOpcode::kFloat64Sub:
624 case IrOpcode::kFloat64Mul:
625 case IrOpcode::kFloat64Div:
626 case IrOpcode::kFloat64Mod:
627 case IrOpcode::kFloat64Sqrt:
628 case IrOpcode::kFloat64Floor:
629 case IrOpcode::kFloat64Ceil:
630 case IrOpcode::kFloat64RoundTruncate:
631 case IrOpcode::kFloat64RoundTiesAway:
632 return kMachFloat64;
633 case IrOpcode::kFloat64Equal:
634 case IrOpcode::kFloat64LessThan:
635 case IrOpcode::kFloat64LessThanOrEqual:
636 return kMachBool;
637 default:
638 V8_Fatal(__FILE__, __LINE__, "Unexpected operator #%d:%s @ node #%d",
639 node->opcode(), node->op()->mnemonic(), node->id());
640 }
641 return kMachNone;
642}
643
644
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000645void InstructionSelector::VisitNode(Node* node) {
646 DCHECK_NOT_NULL(schedule()->block(node)); // should only use scheduled nodes.
647 SourcePosition source_position = source_positions_->GetSourcePosition(node);
648 if (!source_position.IsUnknown()) {
649 DCHECK(!source_position.IsInvalid());
650 if (FLAG_turbo_source_positions || node->opcode() == IrOpcode::kCall) {
651 Emit(SourcePositionInstruction::New(instruction_zone(), source_position));
652 }
653 }
654 switch (node->opcode()) {
655 case IrOpcode::kStart:
656 case IrOpcode::kLoop:
657 case IrOpcode::kEnd:
658 case IrOpcode::kBranch:
659 case IrOpcode::kIfTrue:
660 case IrOpcode::kIfFalse:
661 case IrOpcode::kEffectPhi:
662 case IrOpcode::kMerge:
663 // No code needed for these graph artifacts.
664 return;
665 case IrOpcode::kFinish:
666 return MarkAsReference(node), VisitFinish(node);
667 case IrOpcode::kParameter: {
668 MachineType type = linkage()->GetParameterType(OpParameter<int>(node));
669 MarkAsRepresentation(type, node);
670 return VisitParameter(node);
671 }
672 case IrOpcode::kPhi: {
673 MachineType type = OpParameter<MachineType>(node);
674 MarkAsRepresentation(type, node);
675 return VisitPhi(node);
676 }
677 case IrOpcode::kProjection:
678 return VisitProjection(node);
679 case IrOpcode::kInt32Constant:
680 case IrOpcode::kInt64Constant:
681 case IrOpcode::kExternalConstant:
682 return VisitConstant(node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400683 case IrOpcode::kFloat32Constant:
684 return MarkAsDouble(node), VisitConstant(node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000685 case IrOpcode::kFloat64Constant:
686 return MarkAsDouble(node), VisitConstant(node);
687 case IrOpcode::kHeapConstant:
688 case IrOpcode::kNumberConstant:
689 // TODO(turbofan): only mark non-smis as references.
690 return MarkAsReference(node), VisitConstant(node);
691 case IrOpcode::kCall:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400692 return VisitCall(node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000693 case IrOpcode::kFrameState:
694 case IrOpcode::kStateValues:
695 return;
696 case IrOpcode::kLoad: {
697 LoadRepresentation rep = OpParameter<LoadRepresentation>(node);
698 MarkAsRepresentation(rep, node);
699 return VisitLoad(node);
700 }
701 case IrOpcode::kStore:
702 return VisitStore(node);
703 case IrOpcode::kWord32And:
704 return VisitWord32And(node);
705 case IrOpcode::kWord32Or:
706 return VisitWord32Or(node);
707 case IrOpcode::kWord32Xor:
708 return VisitWord32Xor(node);
709 case IrOpcode::kWord32Shl:
710 return VisitWord32Shl(node);
711 case IrOpcode::kWord32Shr:
712 return VisitWord32Shr(node);
713 case IrOpcode::kWord32Sar:
714 return VisitWord32Sar(node);
715 case IrOpcode::kWord32Ror:
716 return VisitWord32Ror(node);
717 case IrOpcode::kWord32Equal:
718 return VisitWord32Equal(node);
719 case IrOpcode::kWord64And:
720 return VisitWord64And(node);
721 case IrOpcode::kWord64Or:
722 return VisitWord64Or(node);
723 case IrOpcode::kWord64Xor:
724 return VisitWord64Xor(node);
725 case IrOpcode::kWord64Shl:
726 return VisitWord64Shl(node);
727 case IrOpcode::kWord64Shr:
728 return VisitWord64Shr(node);
729 case IrOpcode::kWord64Sar:
730 return VisitWord64Sar(node);
731 case IrOpcode::kWord64Ror:
732 return VisitWord64Ror(node);
733 case IrOpcode::kWord64Equal:
734 return VisitWord64Equal(node);
735 case IrOpcode::kInt32Add:
736 return VisitInt32Add(node);
737 case IrOpcode::kInt32AddWithOverflow:
738 return VisitInt32AddWithOverflow(node);
739 case IrOpcode::kInt32Sub:
740 return VisitInt32Sub(node);
741 case IrOpcode::kInt32SubWithOverflow:
742 return VisitInt32SubWithOverflow(node);
743 case IrOpcode::kInt32Mul:
744 return VisitInt32Mul(node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400745 case IrOpcode::kInt32MulHigh:
746 return VisitInt32MulHigh(node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000747 case IrOpcode::kInt32Div:
748 return VisitInt32Div(node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000749 case IrOpcode::kInt32Mod:
750 return VisitInt32Mod(node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000751 case IrOpcode::kInt32LessThan:
752 return VisitInt32LessThan(node);
753 case IrOpcode::kInt32LessThanOrEqual:
754 return VisitInt32LessThanOrEqual(node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400755 case IrOpcode::kUint32Div:
756 return VisitUint32Div(node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000757 case IrOpcode::kUint32LessThan:
758 return VisitUint32LessThan(node);
759 case IrOpcode::kUint32LessThanOrEqual:
760 return VisitUint32LessThanOrEqual(node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400761 case IrOpcode::kUint32Mod:
762 return VisitUint32Mod(node);
763 case IrOpcode::kUint32MulHigh:
764 return VisitUint32MulHigh(node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000765 case IrOpcode::kInt64Add:
766 return VisitInt64Add(node);
767 case IrOpcode::kInt64Sub:
768 return VisitInt64Sub(node);
769 case IrOpcode::kInt64Mul:
770 return VisitInt64Mul(node);
771 case IrOpcode::kInt64Div:
772 return VisitInt64Div(node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000773 case IrOpcode::kInt64Mod:
774 return VisitInt64Mod(node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000775 case IrOpcode::kInt64LessThan:
776 return VisitInt64LessThan(node);
777 case IrOpcode::kInt64LessThanOrEqual:
778 return VisitInt64LessThanOrEqual(node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400779 case IrOpcode::kUint64Div:
780 return VisitUint64Div(node);
781 case IrOpcode::kUint64LessThan:
782 return VisitUint64LessThan(node);
783 case IrOpcode::kUint64Mod:
784 return VisitUint64Mod(node);
785 case IrOpcode::kChangeFloat32ToFloat64:
786 return MarkAsDouble(node), VisitChangeFloat32ToFloat64(node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000787 case IrOpcode::kChangeInt32ToFloat64:
788 return MarkAsDouble(node), VisitChangeInt32ToFloat64(node);
789 case IrOpcode::kChangeUint32ToFloat64:
790 return MarkAsDouble(node), VisitChangeUint32ToFloat64(node);
791 case IrOpcode::kChangeFloat64ToInt32:
792 return VisitChangeFloat64ToInt32(node);
793 case IrOpcode::kChangeFloat64ToUint32:
794 return VisitChangeFloat64ToUint32(node);
795 case IrOpcode::kChangeInt32ToInt64:
796 return VisitChangeInt32ToInt64(node);
797 case IrOpcode::kChangeUint32ToUint64:
798 return VisitChangeUint32ToUint64(node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400799 case IrOpcode::kTruncateFloat64ToFloat32:
800 return MarkAsDouble(node), VisitTruncateFloat64ToFloat32(node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000801 case IrOpcode::kTruncateFloat64ToInt32:
802 return VisitTruncateFloat64ToInt32(node);
803 case IrOpcode::kTruncateInt64ToInt32:
804 return VisitTruncateInt64ToInt32(node);
805 case IrOpcode::kFloat64Add:
806 return MarkAsDouble(node), VisitFloat64Add(node);
807 case IrOpcode::kFloat64Sub:
808 return MarkAsDouble(node), VisitFloat64Sub(node);
809 case IrOpcode::kFloat64Mul:
810 return MarkAsDouble(node), VisitFloat64Mul(node);
811 case IrOpcode::kFloat64Div:
812 return MarkAsDouble(node), VisitFloat64Div(node);
813 case IrOpcode::kFloat64Mod:
814 return MarkAsDouble(node), VisitFloat64Mod(node);
815 case IrOpcode::kFloat64Sqrt:
816 return MarkAsDouble(node), VisitFloat64Sqrt(node);
817 case IrOpcode::kFloat64Equal:
818 return VisitFloat64Equal(node);
819 case IrOpcode::kFloat64LessThan:
820 return VisitFloat64LessThan(node);
821 case IrOpcode::kFloat64LessThanOrEqual:
822 return VisitFloat64LessThanOrEqual(node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400823 case IrOpcode::kFloat64Floor:
824 return MarkAsDouble(node), VisitFloat64Floor(node);
825 case IrOpcode::kFloat64Ceil:
826 return MarkAsDouble(node), VisitFloat64Ceil(node);
827 case IrOpcode::kFloat64RoundTruncate:
828 return MarkAsDouble(node), VisitFloat64RoundTruncate(node);
829 case IrOpcode::kFloat64RoundTiesAway:
830 return MarkAsDouble(node), VisitFloat64RoundTiesAway(node);
831 case IrOpcode::kLoadStackPointer:
832 return VisitLoadStackPointer(node);
833 case IrOpcode::kCheckedLoad: {
834 MachineType rep = OpParameter<MachineType>(node);
835 MarkAsRepresentation(rep, node);
836 return VisitCheckedLoad(node);
837 }
838 case IrOpcode::kCheckedStore:
839 return VisitCheckedStore(node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000840 default:
841 V8_Fatal(__FILE__, __LINE__, "Unexpected operator #%d:%s @ node #%d",
842 node->opcode(), node->op()->mnemonic(), node->id());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400843 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000844 }
845}
846
847
848#if V8_TURBOFAN_BACKEND
849
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000850void InstructionSelector::VisitTruncateFloat64ToInt32(Node* node) {
851 OperandGenerator g(this);
852 Emit(kArchTruncateDoubleToI, g.DefineAsRegister(node),
853 g.UseRegister(node->InputAt(0)));
854}
855
856
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400857void InstructionSelector::VisitLoadStackPointer(Node* node) {
858 OperandGenerator g(this);
859 Emit(kArchStackPointer, g.DefineAsRegister(node));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000860}
861
862#endif // V8_TURBOFAN_BACKEND
863
864// 32 bit targets do not implement the following instructions.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400865#if V8_TARGET_ARCH_32_BIT && !V8_TARGET_ARCH_X64 && V8_TURBOFAN_BACKEND
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000866
867void InstructionSelector::VisitWord64And(Node* node) { UNIMPLEMENTED(); }
868
869
870void InstructionSelector::VisitWord64Or(Node* node) { UNIMPLEMENTED(); }
871
872
873void InstructionSelector::VisitWord64Xor(Node* node) { UNIMPLEMENTED(); }
874
875
876void InstructionSelector::VisitWord64Shl(Node* node) { UNIMPLEMENTED(); }
877
878
879void InstructionSelector::VisitWord64Shr(Node* node) { UNIMPLEMENTED(); }
880
881
882void InstructionSelector::VisitWord64Sar(Node* node) { UNIMPLEMENTED(); }
883
884
885void InstructionSelector::VisitWord64Ror(Node* node) { UNIMPLEMENTED(); }
886
887
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400888void InstructionSelector::VisitWord64Equal(Node* node) { UNIMPLEMENTED(); }
889
890
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000891void InstructionSelector::VisitInt64Add(Node* node) { UNIMPLEMENTED(); }
892
893
894void InstructionSelector::VisitInt64Sub(Node* node) { UNIMPLEMENTED(); }
895
896
897void InstructionSelector::VisitInt64Mul(Node* node) { UNIMPLEMENTED(); }
898
899
900void InstructionSelector::VisitInt64Div(Node* node) { UNIMPLEMENTED(); }
901
902
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400903void InstructionSelector::VisitInt64LessThan(Node* node) { UNIMPLEMENTED(); }
904
905
906void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) {
907 UNIMPLEMENTED();
908}
909
910
911void InstructionSelector::VisitUint64Div(Node* node) { UNIMPLEMENTED(); }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000912
913
914void InstructionSelector::VisitInt64Mod(Node* node) { UNIMPLEMENTED(); }
915
916
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400917void InstructionSelector::VisitUint64LessThan(Node* node) { UNIMPLEMENTED(); }
918
919
920void InstructionSelector::VisitUint64Mod(Node* node) { UNIMPLEMENTED(); }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000921
922
923void InstructionSelector::VisitChangeInt32ToInt64(Node* node) {
924 UNIMPLEMENTED();
925}
926
927
928void InstructionSelector::VisitChangeUint32ToUint64(Node* node) {
929 UNIMPLEMENTED();
930}
931
932
933void InstructionSelector::VisitTruncateInt64ToInt32(Node* node) {
934 UNIMPLEMENTED();
935}
936
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400937#endif // V8_TARGET_ARCH_32_BIT && !V8_TARGET_ARCH_X64 && V8_TURBOFAN_BACKEND
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000938
939
940void InstructionSelector::VisitFinish(Node* node) {
941 OperandGenerator g(this);
942 Node* value = node->InputAt(0);
943 Emit(kArchNop, g.DefineSameAsFirst(node), g.Use(value));
944}
945
946
947void InstructionSelector::VisitParameter(Node* node) {
948 OperandGenerator g(this);
949 int index = OpParameter<int>(node);
950 Emit(kArchNop,
951 g.DefineAsLocation(node, linkage()->GetParameterLocation(index),
952 linkage()->GetParameterType(index)));
953}
954
955
956void InstructionSelector::VisitPhi(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400957 const int input_count = node->op()->ValueInputCount();
958 PhiInstruction* phi = new (instruction_zone())
959 PhiInstruction(instruction_zone(), GetVirtualRegister(node),
960 static_cast<size_t>(input_count));
961 sequence()->InstructionBlockAt(current_block_->GetRpoNumber())->AddPhi(phi);
962 for (int i = 0; i < input_count; ++i) {
963 Node* const input = node->InputAt(i);
964 MarkAsUsed(input);
965 phi->Extend(instruction_zone(), GetVirtualRegister(input));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000966 }
967}
968
969
970void InstructionSelector::VisitProjection(Node* node) {
971 OperandGenerator g(this);
972 Node* value = node->InputAt(0);
973 switch (value->opcode()) {
974 case IrOpcode::kInt32AddWithOverflow:
975 case IrOpcode::kInt32SubWithOverflow:
976 if (OpParameter<size_t>(node) == 0) {
977 Emit(kArchNop, g.DefineSameAsFirst(node), g.Use(value));
978 } else {
979 DCHECK(OpParameter<size_t>(node) == 1u);
980 MarkAsUsed(value);
981 }
982 break;
983 default:
984 break;
985 }
986}
987
988
989void InstructionSelector::VisitConstant(Node* node) {
990 // We must emit a NOP here because every live range needs a defining
991 // instruction in the register allocator.
992 OperandGenerator g(this);
993 Emit(kArchNop, g.DefineAsConstant(node));
994}
995
996
997void InstructionSelector::VisitGoto(BasicBlock* target) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400998 // jump to the next block.
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000999 OperandGenerator g(this);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001000 Emit(kArchJmp, NULL, g.Label(target))->MarkAsControl();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001001}
1002
1003
1004void InstructionSelector::VisitReturn(Node* value) {
1005 OperandGenerator g(this);
1006 if (value != NULL) {
1007 Emit(kArchRet, NULL, g.UseLocation(value, linkage()->GetReturnLocation(),
1008 linkage()->GetReturnType()));
1009 } else {
1010 Emit(kArchRet, NULL);
1011 }
1012}
1013
1014
1015void InstructionSelector::VisitThrow(Node* value) {
1016 UNIMPLEMENTED(); // TODO(titzer)
1017}
1018
1019
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001020void InstructionSelector::FillTypeVectorFromStateValues(
1021 ZoneVector<MachineType>* types, Node* state_values) {
1022 DCHECK(state_values->opcode() == IrOpcode::kStateValues);
1023 int count = state_values->InputCount();
1024 types->reserve(static_cast<size_t>(count));
1025 for (int i = 0; i < count; i++) {
1026 types->push_back(GetMachineType(state_values->InputAt(i)));
1027 }
1028}
1029
1030
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001031FrameStateDescriptor* InstructionSelector::GetFrameStateDescriptor(
1032 Node* state) {
1033 DCHECK(state->opcode() == IrOpcode::kFrameState);
1034 DCHECK_EQ(5, state->InputCount());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001035 DCHECK_EQ(IrOpcode::kStateValues, state->InputAt(0)->opcode());
1036 DCHECK_EQ(IrOpcode::kStateValues, state->InputAt(1)->opcode());
1037 DCHECK_EQ(IrOpcode::kStateValues, state->InputAt(2)->opcode());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001038 FrameStateCallInfo state_info = OpParameter<FrameStateCallInfo>(state);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001039
1040 int parameters = state->InputAt(0)->InputCount();
1041 int locals = state->InputAt(1)->InputCount();
1042 int stack = state->InputAt(2)->InputCount();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001043
1044 FrameStateDescriptor* outer_state = NULL;
1045 Node* outer_node = state->InputAt(4);
1046 if (outer_node->opcode() == IrOpcode::kFrameState) {
1047 outer_state = GetFrameStateDescriptor(outer_node);
1048 }
1049
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001050 return new (instruction_zone()) FrameStateDescriptor(
1051 instruction_zone(), state_info, parameters, locals, stack, outer_state);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001052}
1053
1054
1055static InstructionOperand* UseOrImmediate(OperandGenerator* g, Node* input) {
1056 switch (input->opcode()) {
1057 case IrOpcode::kInt32Constant:
1058 case IrOpcode::kNumberConstant:
1059 case IrOpcode::kFloat64Constant:
1060 case IrOpcode::kHeapConstant:
1061 return g->UseImmediate(input);
1062 default:
1063 return g->UseUnique(input);
1064 }
1065}
1066
1067
1068void InstructionSelector::AddFrameStateInputs(
1069 Node* state, InstructionOperandVector* inputs,
1070 FrameStateDescriptor* descriptor) {
1071 DCHECK_EQ(IrOpcode::kFrameState, state->op()->opcode());
1072
1073 if (descriptor->outer_state() != NULL) {
1074 AddFrameStateInputs(state->InputAt(4), inputs, descriptor->outer_state());
1075 }
1076
1077 Node* parameters = state->InputAt(0);
1078 Node* locals = state->InputAt(1);
1079 Node* stack = state->InputAt(2);
1080 Node* context = state->InputAt(3);
1081
1082 DCHECK_EQ(IrOpcode::kStateValues, parameters->op()->opcode());
1083 DCHECK_EQ(IrOpcode::kStateValues, locals->op()->opcode());
1084 DCHECK_EQ(IrOpcode::kStateValues, stack->op()->opcode());
1085
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001086 DCHECK_EQ(static_cast<int>(descriptor->parameters_count()),
1087 parameters->InputCount());
1088 DCHECK_EQ(static_cast<int>(descriptor->locals_count()), locals->InputCount());
1089 DCHECK_EQ(static_cast<int>(descriptor->stack_count()), stack->InputCount());
1090
1091 ZoneVector<MachineType> types(instruction_zone());
1092 types.reserve(descriptor->GetSize());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001093
1094 OperandGenerator g(this);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001095 size_t value_index = 0;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001096 for (int i = 0; i < static_cast<int>(descriptor->parameters_count()); i++) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001097 Node* input_node = parameters->InputAt(i);
1098 inputs->push_back(UseOrImmediate(&g, input_node));
1099 descriptor->SetType(value_index++, GetMachineType(input_node));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001100 }
1101 if (descriptor->HasContext()) {
1102 inputs->push_back(UseOrImmediate(&g, context));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001103 descriptor->SetType(value_index++, kMachAnyTagged);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001104 }
1105 for (int i = 0; i < static_cast<int>(descriptor->locals_count()); i++) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001106 Node* input_node = locals->InputAt(i);
1107 inputs->push_back(UseOrImmediate(&g, input_node));
1108 descriptor->SetType(value_index++, GetMachineType(input_node));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001109 }
1110 for (int i = 0; i < static_cast<int>(descriptor->stack_count()); i++) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001111 Node* input_node = stack->InputAt(i);
1112 inputs->push_back(UseOrImmediate(&g, input_node));
1113 descriptor->SetType(value_index++, GetMachineType(input_node));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001114 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001115 DCHECK(value_index == descriptor->GetSize());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001116}
1117
1118
1119#if !V8_TURBOFAN_BACKEND
1120
1121#define DECLARE_UNIMPLEMENTED_SELECTOR(x) \
1122 void InstructionSelector::Visit##x(Node* node) { UNIMPLEMENTED(); }
1123MACHINE_OP_LIST(DECLARE_UNIMPLEMENTED_SELECTOR)
1124#undef DECLARE_UNIMPLEMENTED_SELECTOR
1125
1126
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001127void InstructionSelector::VisitCall(Node* node) { UNIMPLEMENTED(); }
1128
1129
1130void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
1131 BasicBlock* fbranch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001132 UNIMPLEMENTED();
1133}
1134
1135
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001136// static
1137MachineOperatorBuilder::Flags
1138InstructionSelector::SupportedMachineOperatorFlags() {
1139 return MachineOperatorBuilder::Flag::kNoFlags;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001140}
1141
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001142#endif // !V8_TURBOFAN_BACKEND
1143
1144} // namespace compiler
1145} // namespace internal
1146} // namespace v8