blob: d00109e296fd85813ecdd422aa43fcae33234411 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#ifndef V8_COMPILER_INSTRUCTION_SELECTOR_IMPL_H_
6#define V8_COMPILER_INSTRUCTION_SELECTOR_IMPL_H_
7
8#include "src/compiler/instruction.h"
9#include "src/compiler/instruction-selector.h"
10#include "src/compiler/linkage.h"
11
12namespace v8 {
13namespace internal {
14namespace compiler {
15
16// A helper class for the instruction selector that simplifies construction of
17// Operands. This class implements a base for architecture-specific helpers.
18class OperandGenerator {
19 public:
20 explicit OperandGenerator(InstructionSelector* selector)
21 : selector_(selector) {}
22
23 InstructionOperand* DefineAsRegister(Node* node) {
24 return Define(node, new (zone())
25 UnallocatedOperand(UnallocatedOperand::MUST_HAVE_REGISTER));
26 }
27
28 InstructionOperand* DefineSameAsFirst(Node* result) {
29 return Define(result, new (zone())
30 UnallocatedOperand(UnallocatedOperand::SAME_AS_FIRST_INPUT));
31 }
32
33 InstructionOperand* DefineAsFixed(Node* node, Register reg) {
34 return Define(node, new (zone())
35 UnallocatedOperand(UnallocatedOperand::FIXED_REGISTER,
36 Register::ToAllocationIndex(reg)));
37 }
38
39 InstructionOperand* DefineAsFixed(Node* node, DoubleRegister reg) {
40 return Define(node, new (zone())
41 UnallocatedOperand(UnallocatedOperand::FIXED_DOUBLE_REGISTER,
42 DoubleRegister::ToAllocationIndex(reg)));
43 }
44
45 InstructionOperand* DefineAsConstant(Node* node) {
46 selector()->MarkAsDefined(node);
47 sequence()->AddConstant(node->id(), ToConstant(node));
48 return ConstantOperand::Create(node->id(), zone());
49 }
50
51 InstructionOperand* DefineAsLocation(Node* node, LinkageLocation location,
52 MachineType type) {
53 return Define(node, ToUnallocatedOperand(location, type));
54 }
55
56 InstructionOperand* Use(Node* node) {
57 return Use(node,
58 new (zone()) UnallocatedOperand(
59 UnallocatedOperand::ANY, UnallocatedOperand::USED_AT_START));
60 }
61
62 InstructionOperand* UseRegister(Node* node) {
63 return Use(node, new (zone())
64 UnallocatedOperand(UnallocatedOperand::MUST_HAVE_REGISTER,
65 UnallocatedOperand::USED_AT_START));
66 }
67
68 // Use register or operand for the node. If a register is chosen, it won't
69 // alias any temporary or output registers.
70 InstructionOperand* UseUnique(Node* node) {
71 return Use(node, new (zone()) UnallocatedOperand(UnallocatedOperand::ANY));
72 }
73
74 // Use a unique register for the node that does not alias any temporary or
75 // output registers.
76 InstructionOperand* UseUniqueRegister(Node* node) {
77 return Use(node, new (zone())
78 UnallocatedOperand(UnallocatedOperand::MUST_HAVE_REGISTER));
79 }
80
81 InstructionOperand* UseFixed(Node* node, Register reg) {
82 return Use(node, new (zone())
83 UnallocatedOperand(UnallocatedOperand::FIXED_REGISTER,
84 Register::ToAllocationIndex(reg)));
85 }
86
87 InstructionOperand* UseFixed(Node* node, DoubleRegister reg) {
88 return Use(node, new (zone())
89 UnallocatedOperand(UnallocatedOperand::FIXED_DOUBLE_REGISTER,
90 DoubleRegister::ToAllocationIndex(reg)));
91 }
92
93 InstructionOperand* UseImmediate(Node* node) {
94 int index = sequence()->AddImmediate(ToConstant(node));
95 return ImmediateOperand::Create(index, zone());
96 }
97
98 InstructionOperand* UseLocation(Node* node, LinkageLocation location,
99 MachineType type) {
100 return Use(node, ToUnallocatedOperand(location, type));
101 }
102
103 InstructionOperand* TempRegister() {
104 UnallocatedOperand* op =
105 new (zone()) UnallocatedOperand(UnallocatedOperand::MUST_HAVE_REGISTER,
106 UnallocatedOperand::USED_AT_START);
107 op->set_virtual_register(sequence()->NextVirtualRegister());
108 return op;
109 }
110
111 InstructionOperand* TempDoubleRegister() {
112 UnallocatedOperand* op =
113 new (zone()) UnallocatedOperand(UnallocatedOperand::MUST_HAVE_REGISTER,
114 UnallocatedOperand::USED_AT_START);
115 op->set_virtual_register(sequence()->NextVirtualRegister());
116 sequence()->MarkAsDouble(op->virtual_register());
117 return op;
118 }
119
120 InstructionOperand* TempRegister(Register reg) {
121 return new (zone()) UnallocatedOperand(UnallocatedOperand::FIXED_REGISTER,
122 Register::ToAllocationIndex(reg));
123 }
124
125 InstructionOperand* TempImmediate(int32_t imm) {
126 int index = sequence()->AddImmediate(Constant(imm));
127 return ImmediateOperand::Create(index, zone());
128 }
129
130 InstructionOperand* Label(BasicBlock* block) {
131 // TODO(bmeurer): We misuse ImmediateOperand here.
132 return TempImmediate(block->id());
133 }
134
135 protected:
136 Graph* graph() const { return selector()->graph(); }
137 InstructionSelector* selector() const { return selector_; }
138 InstructionSequence* sequence() const { return selector()->sequence(); }
139 Isolate* isolate() const { return zone()->isolate(); }
140 Zone* zone() const { return selector()->instruction_zone(); }
141
142 private:
143 static Constant ToConstant(const Node* node) {
144 switch (node->opcode()) {
145 case IrOpcode::kInt32Constant:
146 return Constant(OpParameter<int32_t>(node));
147 case IrOpcode::kInt64Constant:
148 return Constant(OpParameter<int64_t>(node));
149 case IrOpcode::kNumberConstant:
150 case IrOpcode::kFloat64Constant:
151 return Constant(OpParameter<double>(node));
152 case IrOpcode::kExternalConstant:
153 return Constant(OpParameter<ExternalReference>(node));
154 case IrOpcode::kHeapConstant:
155 return Constant(OpParameter<Unique<HeapObject> >(node).handle());
156 default:
157 break;
158 }
159 UNREACHABLE();
160 return Constant(static_cast<int32_t>(0));
161 }
162
163 UnallocatedOperand* Define(Node* node, UnallocatedOperand* operand) {
164 DCHECK_NOT_NULL(node);
165 DCHECK_NOT_NULL(operand);
166 operand->set_virtual_register(node->id());
167 selector()->MarkAsDefined(node);
168 return operand;
169 }
170
171 UnallocatedOperand* Use(Node* node, UnallocatedOperand* operand) {
172 DCHECK_NOT_NULL(node);
173 DCHECK_NOT_NULL(operand);
174 operand->set_virtual_register(node->id());
175 selector()->MarkAsUsed(node);
176 return operand;
177 }
178
179 UnallocatedOperand* ToUnallocatedOperand(LinkageLocation location,
180 MachineType type) {
181 if (location.location_ == LinkageLocation::ANY_REGISTER) {
182 return new (zone())
183 UnallocatedOperand(UnallocatedOperand::MUST_HAVE_REGISTER);
184 }
185 if (location.location_ < 0) {
186 return new (zone()) UnallocatedOperand(UnallocatedOperand::FIXED_SLOT,
187 location.location_);
188 }
189 if (RepresentationOf(type) == kRepFloat64) {
190 return new (zone()) UnallocatedOperand(
191 UnallocatedOperand::FIXED_DOUBLE_REGISTER, location.location_);
192 }
193 return new (zone()) UnallocatedOperand(UnallocatedOperand::FIXED_REGISTER,
194 location.location_);
195 }
196
197 InstructionSelector* selector_;
198};
199
200
201// The flags continuation is a way to combine a branch or a materialization
202// of a boolean value with an instruction that sets the flags register.
203// The whole instruction is treated as a unit by the register allocator, and
204// thus no spills or moves can be introduced between the flags-setting
205// instruction and the branch or set it should be combined with.
206class FlagsContinuation FINAL {
207 public:
208 FlagsContinuation() : mode_(kFlags_none) {}
209
210 // Creates a new flags continuation from the given condition and true/false
211 // blocks.
212 FlagsContinuation(FlagsCondition condition, BasicBlock* true_block,
213 BasicBlock* false_block)
214 : mode_(kFlags_branch),
215 condition_(condition),
216 true_block_(true_block),
217 false_block_(false_block) {
218 DCHECK_NOT_NULL(true_block);
219 DCHECK_NOT_NULL(false_block);
220 }
221
222 // Creates a new flags continuation from the given condition and result node.
223 FlagsContinuation(FlagsCondition condition, Node* result)
224 : mode_(kFlags_set), condition_(condition), result_(result) {
225 DCHECK_NOT_NULL(result);
226 }
227
228 bool IsNone() const { return mode_ == kFlags_none; }
229 bool IsBranch() const { return mode_ == kFlags_branch; }
230 bool IsSet() const { return mode_ == kFlags_set; }
231 FlagsCondition condition() const {
232 DCHECK(!IsNone());
233 return condition_;
234 }
235 Node* result() const {
236 DCHECK(IsSet());
237 return result_;
238 }
239 BasicBlock* true_block() const {
240 DCHECK(IsBranch());
241 return true_block_;
242 }
243 BasicBlock* false_block() const {
244 DCHECK(IsBranch());
245 return false_block_;
246 }
247
248 void Negate() {
249 DCHECK(!IsNone());
250 condition_ = static_cast<FlagsCondition>(condition_ ^ 1);
251 }
252
253 void Commute() {
254 DCHECK(!IsNone());
255 switch (condition_) {
256 case kEqual:
257 case kNotEqual:
258 case kOverflow:
259 case kNotOverflow:
260 return;
261 case kSignedLessThan:
262 condition_ = kSignedGreaterThan;
263 return;
264 case kSignedGreaterThanOrEqual:
265 condition_ = kSignedLessThanOrEqual;
266 return;
267 case kSignedLessThanOrEqual:
268 condition_ = kSignedGreaterThanOrEqual;
269 return;
270 case kSignedGreaterThan:
271 condition_ = kSignedLessThan;
272 return;
273 case kUnsignedLessThan:
274 condition_ = kUnsignedGreaterThan;
275 return;
276 case kUnsignedGreaterThanOrEqual:
277 condition_ = kUnsignedLessThanOrEqual;
278 return;
279 case kUnsignedLessThanOrEqual:
280 condition_ = kUnsignedGreaterThanOrEqual;
281 return;
282 case kUnsignedGreaterThan:
283 condition_ = kUnsignedLessThan;
284 return;
285 case kUnorderedEqual:
286 case kUnorderedNotEqual:
287 return;
288 case kUnorderedLessThan:
289 condition_ = kUnorderedGreaterThan;
290 return;
291 case kUnorderedGreaterThanOrEqual:
292 condition_ = kUnorderedLessThanOrEqual;
293 return;
294 case kUnorderedLessThanOrEqual:
295 condition_ = kUnorderedGreaterThanOrEqual;
296 return;
297 case kUnorderedGreaterThan:
298 condition_ = kUnorderedLessThan;
299 return;
300 }
301 UNREACHABLE();
302 }
303
304 void OverwriteAndNegateIfEqual(FlagsCondition condition) {
305 bool negate = condition_ == kEqual;
306 condition_ = condition;
307 if (negate) Negate();
308 }
309
310 void SwapBlocks() { std::swap(true_block_, false_block_); }
311
312 // Encodes this flags continuation into the given opcode.
313 InstructionCode Encode(InstructionCode opcode) {
314 opcode |= FlagsModeField::encode(mode_);
315 if (mode_ != kFlags_none) {
316 opcode |= FlagsConditionField::encode(condition_);
317 }
318 return opcode;
319 }
320
321 private:
322 FlagsMode mode_;
323 FlagsCondition condition_;
324 Node* result_; // Only valid if mode_ == kFlags_set.
325 BasicBlock* true_block_; // Only valid if mode_ == kFlags_branch.
326 BasicBlock* false_block_; // Only valid if mode_ == kFlags_branch.
327};
328
329
330// An internal helper class for generating the operands to calls.
331// TODO(bmeurer): Get rid of the CallBuffer business and make
332// InstructionSelector::VisitCall platform independent instead.
333struct CallBuffer {
334 CallBuffer(Zone* zone, CallDescriptor* descriptor,
335 FrameStateDescriptor* frame_state);
336
337 CallDescriptor* descriptor;
338 FrameStateDescriptor* frame_state_descriptor;
339 NodeVector output_nodes;
340 InstructionOperandVector outputs;
341 InstructionOperandVector instruction_args;
342 NodeVector pushed_nodes;
343
344 size_t input_count() const { return descriptor->InputCount(); }
345
346 size_t frame_state_count() const { return descriptor->FrameStateCount(); }
347
348 size_t frame_state_value_count() const {
349 return (frame_state_descriptor == NULL)
350 ? 0
351 : (frame_state_descriptor->GetTotalSize() +
352 1); // Include deopt id.
353 }
354};
355
356} // namespace compiler
357} // namespace internal
358} // namespace v8
359
360#endif // V8_COMPILER_INSTRUCTION_SELECTOR_IMPL_H_