blob: be56dce0d844c6ea8650bd40832f9bc7592dbaa6 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005#include <algorithm>
6
7#include "src/base/adapters.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00008#include "src/compiler/instruction-selector-impl.h"
9#include "src/compiler/node-matchers.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010#include "src/compiler/node-properties.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000011
12namespace v8 {
13namespace internal {
14namespace compiler {
15
16// Adds X64-specific methods for generating operands.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000017class X64OperandGenerator final : public OperandGenerator {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000018 public:
19 explicit X64OperandGenerator(InstructionSelector* selector)
20 : OperandGenerator(selector) {}
21
Ben Murdochb8a8cc12014-11-26 15:28:44 +000022 bool CanBeImmediate(Node* node) {
23 switch (node->opcode()) {
24 case IrOpcode::kInt32Constant:
Ben Murdochc5610432016-08-08 18:44:38 +010025 case IrOpcode::kRelocatableInt32Constant:
Ben Murdochb8a8cc12014-11-26 15:28:44 +000026 return true;
Emily Bernierd0a1eb72015-03-24 16:35:39 -040027 case IrOpcode::kInt64Constant: {
28 const int64_t value = OpParameter<int64_t>(node);
29 return value == static_cast<int64_t>(static_cast<int32_t>(value));
30 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000031 case IrOpcode::kNumberConstant: {
32 const double value = OpParameter<double>(node);
33 return bit_cast<int64_t>(value) == 0;
34 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000035 default:
36 return false;
37 }
38 }
39
Ben Murdochc5610432016-08-08 18:44:38 +010040 bool CanBeMemoryOperand(InstructionCode opcode, Node* node, Node* input,
41 int effect_level) {
Ben Murdochda12d292016-06-02 14:46:10 +010042 if (input->opcode() != IrOpcode::kLoad ||
43 !selector()->CanCover(node, input)) {
44 return false;
45 }
Ben Murdochc5610432016-08-08 18:44:38 +010046 if (effect_level != selector()->GetEffectLevel(input)) {
47 return false;
48 }
Ben Murdochda12d292016-06-02 14:46:10 +010049 MachineRepresentation rep =
50 LoadRepresentationOf(input->op()).representation();
51 switch (opcode) {
52 case kX64Cmp:
53 case kX64Test:
54 return rep == MachineRepresentation::kWord64 ||
55 rep == MachineRepresentation::kTagged;
56 case kX64Cmp32:
57 case kX64Test32:
58 return rep == MachineRepresentation::kWord32;
59 case kX64Cmp16:
60 case kX64Test16:
61 return rep == MachineRepresentation::kWord16;
62 case kX64Cmp8:
63 case kX64Test8:
64 return rep == MachineRepresentation::kWord8;
65 default:
66 break;
67 }
68 return false;
69 }
70
Emily Bernierd0a1eb72015-03-24 16:35:39 -040071 AddressingMode GenerateMemoryOperandInputs(Node* index, int scale_exponent,
72 Node* base, Node* displacement,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000073 InstructionOperand inputs[],
Emily Bernierd0a1eb72015-03-24 16:35:39 -040074 size_t* input_count) {
75 AddressingMode mode = kMode_MRI;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000076 if (base != nullptr) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040077 inputs[(*input_count)++] = UseRegister(base);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000078 if (index != nullptr) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040079 DCHECK(scale_exponent >= 0 && scale_exponent <= 3);
80 inputs[(*input_count)++] = UseRegister(index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000081 if (displacement != nullptr) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040082 inputs[(*input_count)++] = UseImmediate(displacement);
83 static const AddressingMode kMRnI_modes[] = {kMode_MR1I, kMode_MR2I,
84 kMode_MR4I, kMode_MR8I};
85 mode = kMRnI_modes[scale_exponent];
86 } else {
87 static const AddressingMode kMRn_modes[] = {kMode_MR1, kMode_MR2,
88 kMode_MR4, kMode_MR8};
89 mode = kMRn_modes[scale_exponent];
90 }
91 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000092 if (displacement == nullptr) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040093 mode = kMode_MR;
94 } else {
95 inputs[(*input_count)++] = UseImmediate(displacement);
96 mode = kMode_MRI;
97 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000098 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -040099 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000100 DCHECK_NOT_NULL(index);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400101 DCHECK(scale_exponent >= 0 && scale_exponent <= 3);
102 inputs[(*input_count)++] = UseRegister(index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000103 if (displacement != nullptr) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400104 inputs[(*input_count)++] = UseImmediate(displacement);
105 static const AddressingMode kMnI_modes[] = {kMode_MRI, kMode_M2I,
106 kMode_M4I, kMode_M8I};
107 mode = kMnI_modes[scale_exponent];
108 } else {
109 static const AddressingMode kMn_modes[] = {kMode_MR, kMode_MR1,
110 kMode_M4, kMode_M8};
111 mode = kMn_modes[scale_exponent];
112 if (mode == kMode_MR1) {
113 // [%r1 + %r1*1] has a smaller encoding than [%r1*2+0]
114 inputs[(*input_count)++] = UseRegister(index);
115 }
116 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000117 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400118 return mode;
119 }
120
121 AddressingMode GetEffectiveAddressMemoryOperand(Node* operand,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000122 InstructionOperand inputs[],
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400123 size_t* input_count) {
124 BaseWithIndexAndDisplacement64Matcher m(operand, true);
125 DCHECK(m.matches());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000126 if ((m.displacement() == nullptr || CanBeImmediate(m.displacement()))) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400127 return GenerateMemoryOperandInputs(m.index(), m.scale(), m.base(),
128 m.displacement(), inputs, input_count);
129 } else {
130 inputs[(*input_count)++] = UseRegister(operand->InputAt(0));
131 inputs[(*input_count)++] = UseRegister(operand->InputAt(1));
132 return kMode_MR1;
133 }
134 }
135
136 bool CanBeBetterLeftOperand(Node* node) const {
137 return !selector()->IsLive(node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000138 }
139};
140
141
142void InstructionSelector::VisitLoad(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000143 LoadRepresentation load_rep = LoadRepresentationOf(node->op());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000144 X64OperandGenerator g(this);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000145
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000146 ArchOpcode opcode = kArchNop;
147 switch (load_rep.representation()) {
148 case MachineRepresentation::kFloat32:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000149 opcode = kX64Movss;
150 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000151 case MachineRepresentation::kFloat64:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000152 opcode = kX64Movsd;
153 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000154 case MachineRepresentation::kBit: // Fall through.
155 case MachineRepresentation::kWord8:
156 opcode = load_rep.IsSigned() ? kX64Movsxbl : kX64Movzxbl;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000157 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000158 case MachineRepresentation::kWord16:
159 opcode = load_rep.IsSigned() ? kX64Movsxwl : kX64Movzxwl;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000160 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000161 case MachineRepresentation::kWord32:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000162 opcode = kX64Movl;
163 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000164 case MachineRepresentation::kTagged: // Fall through.
165 case MachineRepresentation::kWord64:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000166 opcode = kX64Movq;
167 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100168 case MachineRepresentation::kSimd128: // Fall through.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000169 case MachineRepresentation::kNone:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000170 UNREACHABLE();
171 return;
172 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400173
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000174 InstructionOperand outputs[1];
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400175 outputs[0] = g.DefineAsRegister(node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000176 InstructionOperand inputs[3];
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400177 size_t input_count = 0;
178 AddressingMode mode =
179 g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
180 InstructionCode code = opcode | AddressingModeField::encode(mode);
181 Emit(code, 1, outputs, input_count, inputs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000182}
183
184
185void InstructionSelector::VisitStore(Node* node) {
186 X64OperandGenerator g(this);
187 Node* base = node->InputAt(0);
188 Node* index = node->InputAt(1);
189 Node* value = node->InputAt(2);
190
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000191 StoreRepresentation store_rep = StoreRepresentationOf(node->op());
192 WriteBarrierKind write_barrier_kind = store_rep.write_barrier_kind();
193 MachineRepresentation rep = store_rep.representation();
194
195 if (write_barrier_kind != kNoWriteBarrier) {
196 DCHECK_EQ(MachineRepresentation::kTagged, rep);
197 AddressingMode addressing_mode;
198 InstructionOperand inputs[3];
199 size_t input_count = 0;
200 inputs[input_count++] = g.UseUniqueRegister(base);
201 if (g.CanBeImmediate(index)) {
202 inputs[input_count++] = g.UseImmediate(index);
203 addressing_mode = kMode_MRI;
204 } else {
205 inputs[input_count++] = g.UseUniqueRegister(index);
206 addressing_mode = kMode_MR1;
207 }
Ben Murdochda12d292016-06-02 14:46:10 +0100208 inputs[input_count++] = g.UseUniqueRegister(value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000209 RecordWriteMode record_write_mode = RecordWriteMode::kValueIsAny;
210 switch (write_barrier_kind) {
211 case kNoWriteBarrier:
212 UNREACHABLE();
213 break;
214 case kMapWriteBarrier:
215 record_write_mode = RecordWriteMode::kValueIsMap;
216 break;
217 case kPointerWriteBarrier:
218 record_write_mode = RecordWriteMode::kValueIsPointer;
219 break;
220 case kFullWriteBarrier:
221 record_write_mode = RecordWriteMode::kValueIsAny;
222 break;
223 }
224 InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()};
225 size_t const temp_count = arraysize(temps);
226 InstructionCode code = kArchStoreWithWriteBarrier;
227 code |= AddressingModeField::encode(addressing_mode);
228 code |= MiscField::encode(static_cast<int>(record_write_mode));
229 Emit(code, 0, nullptr, input_count, inputs, temp_count, temps);
230 } else {
231 ArchOpcode opcode = kArchNop;
232 switch (rep) {
233 case MachineRepresentation::kFloat32:
234 opcode = kX64Movss;
235 break;
236 case MachineRepresentation::kFloat64:
237 opcode = kX64Movsd;
238 break;
239 case MachineRepresentation::kBit: // Fall through.
240 case MachineRepresentation::kWord8:
241 opcode = kX64Movb;
242 break;
243 case MachineRepresentation::kWord16:
244 opcode = kX64Movw;
245 break;
246 case MachineRepresentation::kWord32:
247 opcode = kX64Movl;
248 break;
249 case MachineRepresentation::kTagged: // Fall through.
250 case MachineRepresentation::kWord64:
251 opcode = kX64Movq;
252 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100253 case MachineRepresentation::kSimd128: // Fall through.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000254 case MachineRepresentation::kNone:
255 UNREACHABLE();
256 return;
257 }
258 InstructionOperand inputs[4];
259 size_t input_count = 0;
260 AddressingMode addressing_mode =
261 g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
262 InstructionCode code =
263 opcode | AddressingModeField::encode(addressing_mode);
264 InstructionOperand value_operand =
265 g.CanBeImmediate(value) ? g.UseImmediate(value) : g.UseRegister(value);
266 inputs[input_count++] = value_operand;
267 Emit(code, 0, static_cast<InstructionOperand*>(nullptr), input_count,
268 inputs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000269 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400270}
271
272
273void InstructionSelector::VisitCheckedLoad(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000274 CheckedLoadRepresentation load_rep = CheckedLoadRepresentationOf(node->op());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400275 X64OperandGenerator g(this);
276 Node* const buffer = node->InputAt(0);
277 Node* const offset = node->InputAt(1);
278 Node* const length = node->InputAt(2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000279 ArchOpcode opcode = kArchNop;
280 switch (load_rep.representation()) {
281 case MachineRepresentation::kWord8:
282 opcode = load_rep.IsSigned() ? kCheckedLoadInt8 : kCheckedLoadUint8;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400283 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000284 case MachineRepresentation::kWord16:
285 opcode = load_rep.IsSigned() ? kCheckedLoadInt16 : kCheckedLoadUint16;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400286 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000287 case MachineRepresentation::kWord32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400288 opcode = kCheckedLoadWord32;
289 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000290 case MachineRepresentation::kWord64:
291 opcode = kCheckedLoadWord64;
292 break;
293 case MachineRepresentation::kFloat32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400294 opcode = kCheckedLoadFloat32;
295 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000296 case MachineRepresentation::kFloat64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400297 opcode = kCheckedLoadFloat64;
298 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100299 case MachineRepresentation::kBit: // Fall through.
300 case MachineRepresentation::kSimd128: // Fall through.
301 case MachineRepresentation::kTagged: // Fall through.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000302 case MachineRepresentation::kNone:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400303 UNREACHABLE();
304 return;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000305 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400306 if (offset->opcode() == IrOpcode::kInt32Add && CanCover(node, offset)) {
307 Int32Matcher mlength(length);
308 Int32BinopMatcher moffset(offset);
309 if (mlength.HasValue() && moffset.right().HasValue() &&
310 moffset.right().Value() >= 0 &&
311 mlength.Value() >= moffset.right().Value()) {
312 Emit(opcode, g.DefineAsRegister(node), g.UseRegister(buffer),
313 g.UseRegister(moffset.left().node()),
314 g.UseImmediate(moffset.right().node()), g.UseImmediate(length));
315 return;
316 }
317 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000318 InstructionOperand length_operand =
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400319 g.CanBeImmediate(length) ? g.UseImmediate(length) : g.UseRegister(length);
320 Emit(opcode, g.DefineAsRegister(node), g.UseRegister(buffer),
321 g.UseRegister(offset), g.TempImmediate(0), length_operand);
322}
323
324
325void InstructionSelector::VisitCheckedStore(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000326 MachineRepresentation rep = CheckedStoreRepresentationOf(node->op());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400327 X64OperandGenerator g(this);
328 Node* const buffer = node->InputAt(0);
329 Node* const offset = node->InputAt(1);
330 Node* const length = node->InputAt(2);
331 Node* const value = node->InputAt(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000332 ArchOpcode opcode = kArchNop;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400333 switch (rep) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000334 case MachineRepresentation::kWord8:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400335 opcode = kCheckedStoreWord8;
336 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000337 case MachineRepresentation::kWord16:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400338 opcode = kCheckedStoreWord16;
339 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000340 case MachineRepresentation::kWord32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400341 opcode = kCheckedStoreWord32;
342 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000343 case MachineRepresentation::kWord64:
344 opcode = kCheckedStoreWord64;
345 break;
346 case MachineRepresentation::kFloat32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400347 opcode = kCheckedStoreFloat32;
348 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000349 case MachineRepresentation::kFloat64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400350 opcode = kCheckedStoreFloat64;
351 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100352 case MachineRepresentation::kBit: // Fall through.
353 case MachineRepresentation::kSimd128: // Fall through.
354 case MachineRepresentation::kTagged: // Fall through.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000355 case MachineRepresentation::kNone:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400356 UNREACHABLE();
357 return;
358 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000359 InstructionOperand value_operand =
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400360 g.CanBeImmediate(value) ? g.UseImmediate(value) : g.UseRegister(value);
361 if (offset->opcode() == IrOpcode::kInt32Add && CanCover(node, offset)) {
362 Int32Matcher mlength(length);
363 Int32BinopMatcher moffset(offset);
364 if (mlength.HasValue() && moffset.right().HasValue() &&
365 moffset.right().Value() >= 0 &&
366 mlength.Value() >= moffset.right().Value()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000367 Emit(opcode, g.NoOutput(), g.UseRegister(buffer),
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400368 g.UseRegister(moffset.left().node()),
369 g.UseImmediate(moffset.right().node()), g.UseImmediate(length),
370 value_operand);
371 return;
372 }
373 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000374 InstructionOperand length_operand =
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400375 g.CanBeImmediate(length) ? g.UseImmediate(length) : g.UseRegister(length);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000376 Emit(opcode, g.NoOutput(), g.UseRegister(buffer), g.UseRegister(offset),
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400377 g.TempImmediate(0), length_operand, value_operand);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000378}
379
380
381// Shared routine for multiple binary operations.
382static void VisitBinop(InstructionSelector* selector, Node* node,
383 InstructionCode opcode, FlagsContinuation* cont) {
384 X64OperandGenerator g(selector);
385 Int32BinopMatcher m(node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400386 Node* left = m.left().node();
387 Node* right = m.right().node();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000388 InstructionOperand inputs[4];
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000389 size_t input_count = 0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000390 InstructionOperand outputs[2];
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000391 size_t output_count = 0;
392
393 // TODO(turbofan): match complex addressing modes.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400394 if (left == right) {
395 // If both inputs refer to the same operand, enforce allocating a register
396 // for both of them to ensure that we don't end up generating code like
397 // this:
398 //
399 // mov rax, [rbp-0x10]
400 // add rax, [rbp-0x10]
401 // jo label
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000402 InstructionOperand const input = g.UseRegister(left);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400403 inputs[input_count++] = input;
404 inputs[input_count++] = input;
405 } else if (g.CanBeImmediate(right)) {
406 inputs[input_count++] = g.UseRegister(left);
407 inputs[input_count++] = g.UseImmediate(right);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000408 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400409 if (node->op()->HasProperty(Operator::kCommutative) &&
410 g.CanBeBetterLeftOperand(right)) {
411 std::swap(left, right);
412 }
413 inputs[input_count++] = g.UseRegister(left);
414 inputs[input_count++] = g.Use(right);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000415 }
416
417 if (cont->IsBranch()) {
418 inputs[input_count++] = g.Label(cont->true_block());
419 inputs[input_count++] = g.Label(cont->false_block());
420 }
421
422 outputs[output_count++] = g.DefineSameAsFirst(node);
423 if (cont->IsSet()) {
424 outputs[output_count++] = g.DefineAsRegister(cont->result());
425 }
426
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000427 DCHECK_NE(0u, input_count);
428 DCHECK_NE(0u, output_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000429 DCHECK_GE(arraysize(inputs), input_count);
430 DCHECK_GE(arraysize(outputs), output_count);
431
Ben Murdochda12d292016-06-02 14:46:10 +0100432 opcode = cont->Encode(opcode);
433 if (cont->IsDeoptimize()) {
434 selector->EmitDeoptimize(opcode, output_count, outputs, input_count, inputs,
435 cont->frame_state());
436 } else {
437 selector->Emit(opcode, output_count, outputs, input_count, inputs);
438 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000439}
440
441
442// Shared routine for multiple binary operations.
443static void VisitBinop(InstructionSelector* selector, Node* node,
444 InstructionCode opcode) {
445 FlagsContinuation cont;
446 VisitBinop(selector, node, opcode, &cont);
447}
448
449
450void InstructionSelector::VisitWord32And(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000451 X64OperandGenerator g(this);
452 Uint32BinopMatcher m(node);
453 if (m.right().Is(0xff)) {
454 Emit(kX64Movzxbl, g.DefineAsRegister(node), g.Use(m.left().node()));
455 } else if (m.right().Is(0xffff)) {
456 Emit(kX64Movzxwl, g.DefineAsRegister(node), g.Use(m.left().node()));
457 } else {
458 VisitBinop(this, node, kX64And32);
459 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000460}
461
462
463void InstructionSelector::VisitWord64And(Node* node) {
464 VisitBinop(this, node, kX64And);
465}
466
467
468void InstructionSelector::VisitWord32Or(Node* node) {
469 VisitBinop(this, node, kX64Or32);
470}
471
472
473void InstructionSelector::VisitWord64Or(Node* node) {
474 VisitBinop(this, node, kX64Or);
475}
476
477
478void InstructionSelector::VisitWord32Xor(Node* node) {
479 X64OperandGenerator g(this);
480 Uint32BinopMatcher m(node);
481 if (m.right().Is(-1)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400482 Emit(kX64Not32, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000483 } else {
484 VisitBinop(this, node, kX64Xor32);
485 }
486}
487
488
489void InstructionSelector::VisitWord64Xor(Node* node) {
490 X64OperandGenerator g(this);
491 Uint64BinopMatcher m(node);
492 if (m.right().Is(-1)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400493 Emit(kX64Not, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000494 } else {
495 VisitBinop(this, node, kX64Xor);
496 }
497}
498
499
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400500namespace {
501
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000502// Shared routine for multiple 32-bit shift operations.
503// TODO(bmeurer): Merge this with VisitWord64Shift using template magic?
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400504void VisitWord32Shift(InstructionSelector* selector, Node* node,
505 ArchOpcode opcode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000506 X64OperandGenerator g(selector);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400507 Int32BinopMatcher m(node);
508 Node* left = m.left().node();
509 Node* right = m.right().node();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000510
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000511 if (g.CanBeImmediate(right)) {
512 selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
513 g.UseImmediate(right));
514 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000515 selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
516 g.UseFixed(right, rcx));
517 }
518}
519
520
521// Shared routine for multiple 64-bit shift operations.
522// TODO(bmeurer): Merge this with VisitWord32Shift using template magic?
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400523void VisitWord64Shift(InstructionSelector* selector, Node* node,
524 ArchOpcode opcode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000525 X64OperandGenerator g(selector);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400526 Int64BinopMatcher m(node);
527 Node* left = m.left().node();
528 Node* right = m.right().node();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000529
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000530 if (g.CanBeImmediate(right)) {
531 selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
532 g.UseImmediate(right));
533 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000534 if (m.right().IsWord64And()) {
535 Int64BinopMatcher mright(right);
536 if (mright.right().Is(0x3F)) {
537 right = mright.left().node();
538 }
539 }
540 selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
541 g.UseFixed(right, rcx));
542 }
543}
544
545
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400546void EmitLea(InstructionSelector* selector, InstructionCode opcode,
547 Node* result, Node* index, int scale, Node* base,
548 Node* displacement) {
549 X64OperandGenerator g(selector);
550
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000551 InstructionOperand inputs[4];
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400552 size_t input_count = 0;
553 AddressingMode mode = g.GenerateMemoryOperandInputs(
554 index, scale, base, displacement, inputs, &input_count);
555
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000556 DCHECK_NE(0u, input_count);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400557 DCHECK_GE(arraysize(inputs), input_count);
558
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000559 InstructionOperand outputs[1];
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400560 outputs[0] = g.DefineAsRegister(result);
561
562 opcode = AddressingModeField::encode(mode) | opcode;
563
564 selector->Emit(opcode, 1, outputs, input_count, inputs);
565}
566
567} // namespace
568
569
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000570void InstructionSelector::VisitWord32Shl(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400571 Int32ScaleMatcher m(node, true);
572 if (m.matches()) {
573 Node* index = node->InputAt(0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000574 Node* base = m.power_of_two_plus_one() ? index : nullptr;
575 EmitLea(this, kX64Lea32, node, index, m.scale(), base, nullptr);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400576 return;
577 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000578 VisitWord32Shift(this, node, kX64Shl32);
579}
580
581
582void InstructionSelector::VisitWord64Shl(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400583 X64OperandGenerator g(this);
584 Int64BinopMatcher m(node);
585 if ((m.left().IsChangeInt32ToInt64() || m.left().IsChangeUint32ToUint64()) &&
586 m.right().IsInRange(32, 63)) {
587 // There's no need to sign/zero-extend to 64-bit if we shift out the upper
588 // 32 bits anyway.
589 Emit(kX64Shl, g.DefineSameAsFirst(node),
590 g.UseRegister(m.left().node()->InputAt(0)),
591 g.UseImmediate(m.right().node()));
592 return;
593 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000594 VisitWord64Shift(this, node, kX64Shl);
595}
596
597
598void InstructionSelector::VisitWord32Shr(Node* node) {
599 VisitWord32Shift(this, node, kX64Shr32);
600}
601
602
603void InstructionSelector::VisitWord64Shr(Node* node) {
604 VisitWord64Shift(this, node, kX64Shr);
605}
606
607
608void InstructionSelector::VisitWord32Sar(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400609 X64OperandGenerator g(this);
610 Int32BinopMatcher m(node);
611 if (CanCover(m.node(), m.left().node()) && m.left().IsWord32Shl()) {
612 Int32BinopMatcher mleft(m.left().node());
613 if (mleft.right().Is(16) && m.right().Is(16)) {
614 Emit(kX64Movsxwl, g.DefineAsRegister(node), g.Use(mleft.left().node()));
615 return;
616 } else if (mleft.right().Is(24) && m.right().Is(24)) {
617 Emit(kX64Movsxbl, g.DefineAsRegister(node), g.Use(mleft.left().node()));
618 return;
619 }
620 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000621 VisitWord32Shift(this, node, kX64Sar32);
622}
623
624
625void InstructionSelector::VisitWord64Sar(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +0100626 X64OperandGenerator g(this);
627 Int64BinopMatcher m(node);
628 if (CanCover(m.node(), m.left().node()) && m.left().IsLoad() &&
629 m.right().Is(32)) {
630 // Just load and sign-extend the interesting 4 bytes instead. This happens,
631 // for example, when we're loading and untagging SMIs.
632 BaseWithIndexAndDisplacement64Matcher mleft(m.left().node(), true);
633 if (mleft.matches() && (mleft.displacement() == nullptr ||
634 g.CanBeImmediate(mleft.displacement()))) {
635 size_t input_count = 0;
636 InstructionOperand inputs[3];
637 AddressingMode mode = g.GetEffectiveAddressMemoryOperand(
638 m.left().node(), inputs, &input_count);
639 if (mleft.displacement() == nullptr) {
640 // Make sure that the addressing mode indicates the presence of an
641 // immediate displacement. It seems that we never use M1 and M2, but we
642 // handle them here anyways.
643 switch (mode) {
644 case kMode_MR:
645 mode = kMode_MRI;
646 break;
647 case kMode_MR1:
648 mode = kMode_MR1I;
649 break;
650 case kMode_MR2:
651 mode = kMode_MR2I;
652 break;
653 case kMode_MR4:
654 mode = kMode_MR4I;
655 break;
656 case kMode_MR8:
657 mode = kMode_MR8I;
658 break;
659 case kMode_M1:
660 mode = kMode_M1I;
661 break;
662 case kMode_M2:
663 mode = kMode_M2I;
664 break;
665 case kMode_M4:
666 mode = kMode_M4I;
667 break;
668 case kMode_M8:
669 mode = kMode_M8I;
670 break;
671 case kMode_None:
672 case kMode_MRI:
673 case kMode_MR1I:
674 case kMode_MR2I:
675 case kMode_MR4I:
676 case kMode_MR8I:
677 case kMode_M1I:
678 case kMode_M2I:
679 case kMode_M4I:
680 case kMode_M8I:
681 UNREACHABLE();
682 }
683 inputs[input_count++] = ImmediateOperand(ImmediateOperand::INLINE, 4);
684 } else {
685 ImmediateOperand* op = ImmediateOperand::cast(&inputs[input_count - 1]);
686 int32_t displacement = sequence()->GetImmediate(op).ToInt32();
687 *op = ImmediateOperand(ImmediateOperand::INLINE, displacement + 4);
688 }
689 InstructionOperand outputs[] = {g.DefineAsRegister(node)};
690 InstructionCode code = kX64Movsxlq | AddressingModeField::encode(mode);
691 Emit(code, 1, outputs, input_count, inputs);
692 return;
693 }
694 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000695 VisitWord64Shift(this, node, kX64Sar);
696}
697
698
699void InstructionSelector::VisitWord32Ror(Node* node) {
700 VisitWord32Shift(this, node, kX64Ror32);
701}
702
703
704void InstructionSelector::VisitWord64Ror(Node* node) {
705 VisitWord64Shift(this, node, kX64Ror);
706}
707
708
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000709void InstructionSelector::VisitWord64Clz(Node* node) {
710 X64OperandGenerator g(this);
711 Emit(kX64Lzcnt, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
712}
713
714
715void InstructionSelector::VisitWord32Clz(Node* node) {
716 X64OperandGenerator g(this);
717 Emit(kX64Lzcnt32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
718}
719
720
721void InstructionSelector::VisitWord64Ctz(Node* node) {
722 X64OperandGenerator g(this);
723 Emit(kX64Tzcnt, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
724}
725
726
727void InstructionSelector::VisitWord32Ctz(Node* node) {
728 X64OperandGenerator g(this);
729 Emit(kX64Tzcnt32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
730}
731
732
Ben Murdoch097c5b22016-05-18 11:27:45 +0100733void InstructionSelector::VisitWord32ReverseBits(Node* node) { UNREACHABLE(); }
734
735
736void InstructionSelector::VisitWord64ReverseBits(Node* node) { UNREACHABLE(); }
737
738
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000739void InstructionSelector::VisitWord32Popcnt(Node* node) {
740 X64OperandGenerator g(this);
741 Emit(kX64Popcnt32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
742}
743
744
745void InstructionSelector::VisitWord64Popcnt(Node* node) {
746 X64OperandGenerator g(this);
747 Emit(kX64Popcnt, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
748}
749
750
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000751void InstructionSelector::VisitInt32Add(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400752 X64OperandGenerator g(this);
753
754 // Try to match the Add to a leal pattern
755 BaseWithIndexAndDisplacement32Matcher m(node);
756 if (m.matches() &&
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000757 (m.displacement() == nullptr || g.CanBeImmediate(m.displacement()))) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400758 EmitLea(this, kX64Lea32, node, m.index(), m.scale(), m.base(),
759 m.displacement());
760 return;
761 }
762
763 // No leal pattern match, use addl
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000764 VisitBinop(this, node, kX64Add32);
765}
766
767
768void InstructionSelector::VisitInt64Add(Node* node) {
769 VisitBinop(this, node, kX64Add);
770}
771
772
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000773void InstructionSelector::VisitInt64AddWithOverflow(Node* node) {
774 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
Ben Murdochda12d292016-06-02 14:46:10 +0100775 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
776 return VisitBinop(this, node, kX64Add, &cont);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000777 }
778 FlagsContinuation cont;
779 VisitBinop(this, node, kX64Add, &cont);
780}
781
782
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000783void InstructionSelector::VisitInt32Sub(Node* node) {
784 X64OperandGenerator g(this);
785 Int32BinopMatcher m(node);
786 if (m.left().Is(0)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400787 Emit(kX64Neg32, g.DefineSameAsFirst(node), g.UseRegister(m.right().node()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000788 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400789 if (m.right().HasValue() && g.CanBeImmediate(m.right().node())) {
790 // Turn subtractions of constant values into immediate "leal" instructions
791 // by negating the value.
792 Emit(kX64Lea32 | AddressingModeField::encode(kMode_MRI),
793 g.DefineAsRegister(node), g.UseRegister(m.left().node()),
794 g.TempImmediate(-m.right().Value()));
795 return;
796 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000797 VisitBinop(this, node, kX64Sub32);
798 }
799}
800
801
802void InstructionSelector::VisitInt64Sub(Node* node) {
803 X64OperandGenerator g(this);
804 Int64BinopMatcher m(node);
805 if (m.left().Is(0)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400806 Emit(kX64Neg, g.DefineSameAsFirst(node), g.UseRegister(m.right().node()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000807 } else {
808 VisitBinop(this, node, kX64Sub);
809 }
810}
811
812
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000813void InstructionSelector::VisitInt64SubWithOverflow(Node* node) {
814 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
Ben Murdochda12d292016-06-02 14:46:10 +0100815 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000816 return VisitBinop(this, node, kX64Sub, &cont);
817 }
818 FlagsContinuation cont;
819 VisitBinop(this, node, kX64Sub, &cont);
820}
821
822
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400823namespace {
824
825void VisitMul(InstructionSelector* selector, Node* node, ArchOpcode opcode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000826 X64OperandGenerator g(selector);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400827 Int32BinopMatcher m(node);
828 Node* left = m.left().node();
829 Node* right = m.right().node();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000830 if (g.CanBeImmediate(right)) {
831 selector->Emit(opcode, g.DefineAsRegister(node), g.Use(left),
832 g.UseImmediate(right));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000833 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400834 if (g.CanBeBetterLeftOperand(right)) {
835 std::swap(left, right);
836 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000837 selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
838 g.Use(right));
839 }
840}
841
842
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400843void VisitMulHigh(InstructionSelector* selector, Node* node,
844 ArchOpcode opcode) {
845 X64OperandGenerator g(selector);
846 Node* left = node->InputAt(0);
847 Node* right = node->InputAt(1);
848 if (selector->IsLive(left) && !selector->IsLive(right)) {
849 std::swap(left, right);
850 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100851 InstructionOperand temps[] = {g.TempRegister(rax)};
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400852 // TODO(turbofan): We use UseUniqueRegister here to improve register
853 // allocation.
854 selector->Emit(opcode, g.DefineAsFixed(node, rdx), g.UseFixed(left, rax),
Ben Murdoch097c5b22016-05-18 11:27:45 +0100855 g.UseUniqueRegister(right), arraysize(temps), temps);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400856}
857
858
859void VisitDiv(InstructionSelector* selector, Node* node, ArchOpcode opcode) {
860 X64OperandGenerator g(selector);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000861 InstructionOperand temps[] = {g.TempRegister(rdx)};
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400862 selector->Emit(
863 opcode, g.DefineAsFixed(node, rax), g.UseFixed(node->InputAt(0), rax),
864 g.UseUniqueRegister(node->InputAt(1)), arraysize(temps), temps);
865}
866
867
868void VisitMod(InstructionSelector* selector, Node* node, ArchOpcode opcode) {
869 X64OperandGenerator g(selector);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100870 InstructionOperand temps[] = {g.TempRegister(rax)};
871 selector->Emit(
872 opcode, g.DefineAsFixed(node, rdx), g.UseFixed(node->InputAt(0), rax),
873 g.UseUniqueRegister(node->InputAt(1)), arraysize(temps), temps);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400874}
875
876} // namespace
877
878
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000879void InstructionSelector::VisitInt32Mul(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400880 Int32ScaleMatcher m(node, true);
881 if (m.matches()) {
882 Node* index = node->InputAt(0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000883 Node* base = m.power_of_two_plus_one() ? index : nullptr;
884 EmitLea(this, kX64Lea32, node, index, m.scale(), base, nullptr);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400885 return;
886 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000887 VisitMul(this, node, kX64Imul32);
888}
889
890
891void InstructionSelector::VisitInt64Mul(Node* node) {
892 VisitMul(this, node, kX64Imul);
893}
894
895
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400896void InstructionSelector::VisitInt32MulHigh(Node* node) {
897 VisitMulHigh(this, node, kX64ImulHigh32);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000898}
899
900
901void InstructionSelector::VisitInt32Div(Node* node) {
902 VisitDiv(this, node, kX64Idiv32);
903}
904
905
906void InstructionSelector::VisitInt64Div(Node* node) {
907 VisitDiv(this, node, kX64Idiv);
908}
909
910
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400911void InstructionSelector::VisitUint32Div(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000912 VisitDiv(this, node, kX64Udiv32);
913}
914
915
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400916void InstructionSelector::VisitUint64Div(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000917 VisitDiv(this, node, kX64Udiv);
918}
919
920
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000921void InstructionSelector::VisitInt32Mod(Node* node) {
922 VisitMod(this, node, kX64Idiv32);
923}
924
925
926void InstructionSelector::VisitInt64Mod(Node* node) {
927 VisitMod(this, node, kX64Idiv);
928}
929
930
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400931void InstructionSelector::VisitUint32Mod(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000932 VisitMod(this, node, kX64Udiv32);
933}
934
935
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400936void InstructionSelector::VisitUint64Mod(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000937 VisitMod(this, node, kX64Udiv);
938}
939
940
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400941void InstructionSelector::VisitUint32MulHigh(Node* node) {
942 VisitMulHigh(this, node, kX64UmulHigh32);
943}
944
945
946void InstructionSelector::VisitChangeFloat32ToFloat64(Node* node) {
947 X64OperandGenerator g(this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000948 Emit(kSSEFloat32ToFloat64, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400949}
950
951
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000952void InstructionSelector::VisitChangeInt32ToFloat64(Node* node) {
953 X64OperandGenerator g(this);
954 Emit(kSSEInt32ToFloat64, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
955}
956
957
958void InstructionSelector::VisitChangeUint32ToFloat64(Node* node) {
959 X64OperandGenerator g(this);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400960 Emit(kSSEUint32ToFloat64, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000961}
962
963
964void InstructionSelector::VisitChangeFloat64ToInt32(Node* node) {
965 X64OperandGenerator g(this);
966 Emit(kSSEFloat64ToInt32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
967}
968
969
970void InstructionSelector::VisitChangeFloat64ToUint32(Node* node) {
971 X64OperandGenerator g(this);
Ben Murdochda12d292016-06-02 14:46:10 +0100972 Emit(kSSEFloat64ToUint32 | MiscField::encode(1), g.DefineAsRegister(node),
973 g.Use(node->InputAt(0)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000974}
975
Ben Murdochda12d292016-06-02 14:46:10 +0100976void InstructionSelector::VisitTruncateFloat64ToUint32(Node* node) {
977 X64OperandGenerator g(this);
978 Emit(kSSEFloat64ToUint32 | MiscField::encode(0), g.DefineAsRegister(node),
979 g.Use(node->InputAt(0)));
980}
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000981
Ben Murdoch097c5b22016-05-18 11:27:45 +0100982void InstructionSelector::VisitTruncateFloat32ToInt32(Node* node) {
983 X64OperandGenerator g(this);
984 Emit(kSSEFloat32ToInt32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
985}
986
987
988void InstructionSelector::VisitTruncateFloat32ToUint32(Node* node) {
989 X64OperandGenerator g(this);
990 Emit(kSSEFloat32ToUint32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
991}
992
993
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000994void InstructionSelector::VisitTryTruncateFloat32ToInt64(Node* node) {
995 X64OperandGenerator g(this);
996 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
997 InstructionOperand outputs[2];
998 size_t output_count = 0;
999 outputs[output_count++] = g.DefineAsRegister(node);
1000
1001 Node* success_output = NodeProperties::FindProjection(node, 1);
1002 if (success_output) {
1003 outputs[output_count++] = g.DefineAsRegister(success_output);
1004 }
1005
1006 Emit(kSSEFloat32ToInt64, output_count, outputs, 1, inputs);
1007}
1008
1009
1010void InstructionSelector::VisitTryTruncateFloat64ToInt64(Node* node) {
1011 X64OperandGenerator g(this);
1012 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
1013 InstructionOperand outputs[2];
1014 size_t output_count = 0;
1015 outputs[output_count++] = g.DefineAsRegister(node);
1016
1017 Node* success_output = NodeProperties::FindProjection(node, 1);
1018 if (success_output) {
1019 outputs[output_count++] = g.DefineAsRegister(success_output);
1020 }
1021
1022 Emit(kSSEFloat64ToInt64, output_count, outputs, 1, inputs);
1023}
1024
1025
1026void InstructionSelector::VisitTryTruncateFloat32ToUint64(Node* node) {
1027 X64OperandGenerator g(this);
1028 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
1029 InstructionOperand outputs[2];
1030 size_t output_count = 0;
1031 outputs[output_count++] = g.DefineAsRegister(node);
1032
1033 Node* success_output = NodeProperties::FindProjection(node, 1);
1034 if (success_output) {
1035 outputs[output_count++] = g.DefineAsRegister(success_output);
1036 }
1037
1038 Emit(kSSEFloat32ToUint64, output_count, outputs, 1, inputs);
1039}
1040
1041
1042void InstructionSelector::VisitTryTruncateFloat64ToUint64(Node* node) {
1043 X64OperandGenerator g(this);
1044 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
1045 InstructionOperand outputs[2];
1046 size_t output_count = 0;
1047 outputs[output_count++] = g.DefineAsRegister(node);
1048
1049 Node* success_output = NodeProperties::FindProjection(node, 1);
1050 if (success_output) {
1051 outputs[output_count++] = g.DefineAsRegister(success_output);
1052 }
1053
1054 Emit(kSSEFloat64ToUint64, output_count, outputs, 1, inputs);
1055}
1056
1057
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001058void InstructionSelector::VisitChangeInt32ToInt64(Node* node) {
1059 X64OperandGenerator g(this);
1060 Emit(kX64Movsxlq, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
1061}
1062
1063
1064void InstructionSelector::VisitChangeUint32ToUint64(Node* node) {
1065 X64OperandGenerator g(this);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001066 Node* value = node->InputAt(0);
1067 switch (value->opcode()) {
1068 case IrOpcode::kWord32And:
1069 case IrOpcode::kWord32Or:
1070 case IrOpcode::kWord32Xor:
1071 case IrOpcode::kWord32Shl:
1072 case IrOpcode::kWord32Shr:
1073 case IrOpcode::kWord32Sar:
1074 case IrOpcode::kWord32Ror:
1075 case IrOpcode::kWord32Equal:
1076 case IrOpcode::kInt32Add:
1077 case IrOpcode::kInt32Sub:
1078 case IrOpcode::kInt32Mul:
1079 case IrOpcode::kInt32MulHigh:
1080 case IrOpcode::kInt32Div:
1081 case IrOpcode::kInt32LessThan:
1082 case IrOpcode::kInt32LessThanOrEqual:
1083 case IrOpcode::kInt32Mod:
1084 case IrOpcode::kUint32Div:
1085 case IrOpcode::kUint32LessThan:
1086 case IrOpcode::kUint32LessThanOrEqual:
1087 case IrOpcode::kUint32Mod:
1088 case IrOpcode::kUint32MulHigh: {
1089 // These 32-bit operations implicitly zero-extend to 64-bit on x64, so the
1090 // zero-extension is a no-op.
1091 Emit(kArchNop, g.DefineSameAsFirst(node), g.Use(value));
1092 return;
1093 }
1094 default:
1095 break;
1096 }
1097 Emit(kX64Movl, g.DefineAsRegister(node), g.Use(value));
1098}
1099
1100
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001101namespace {
1102
1103void VisitRO(InstructionSelector* selector, Node* node,
1104 InstructionCode opcode) {
1105 X64OperandGenerator g(selector);
1106 selector->Emit(opcode, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
1107}
1108
1109
1110void VisitRR(InstructionSelector* selector, Node* node,
1111 InstructionCode opcode) {
1112 X64OperandGenerator g(selector);
1113 selector->Emit(opcode, g.DefineAsRegister(node),
1114 g.UseRegister(node->InputAt(0)));
1115}
1116
1117
1118void VisitFloatBinop(InstructionSelector* selector, Node* node,
1119 ArchOpcode avx_opcode, ArchOpcode sse_opcode) {
1120 X64OperandGenerator g(selector);
1121 InstructionOperand operand0 = g.UseRegister(node->InputAt(0));
1122 InstructionOperand operand1 = g.Use(node->InputAt(1));
1123 if (selector->IsSupported(AVX)) {
1124 selector->Emit(avx_opcode, g.DefineAsRegister(node), operand0, operand1);
1125 } else {
1126 selector->Emit(sse_opcode, g.DefineSameAsFirst(node), operand0, operand1);
1127 }
1128}
1129
1130
1131void VisitFloatUnop(InstructionSelector* selector, Node* node, Node* input,
1132 ArchOpcode avx_opcode, ArchOpcode sse_opcode) {
1133 X64OperandGenerator g(selector);
1134 if (selector->IsSupported(AVX)) {
1135 selector->Emit(avx_opcode, g.DefineAsRegister(node), g.Use(input));
1136 } else {
1137 selector->Emit(sse_opcode, g.DefineSameAsFirst(node), g.UseRegister(input));
1138 }
1139}
1140
1141} // namespace
1142
1143
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001144void InstructionSelector::VisitTruncateFloat64ToFloat32(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001145 VisitRO(this, node, kSSEFloat64ToFloat32);
1146}
1147
Ben Murdochc5610432016-08-08 18:44:38 +01001148void InstructionSelector::VisitTruncateFloat64ToWord32(Node* node) {
1149 VisitRR(this, node, kArchTruncateDoubleToI);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001150}
1151
1152
1153void InstructionSelector::VisitTruncateInt64ToInt32(Node* node) {
1154 X64OperandGenerator g(this);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001155 Node* value = node->InputAt(0);
1156 if (CanCover(node, value)) {
1157 switch (value->opcode()) {
1158 case IrOpcode::kWord64Sar:
1159 case IrOpcode::kWord64Shr: {
1160 Int64BinopMatcher m(value);
1161 if (m.right().Is(32)) {
1162 Emit(kX64Shr, g.DefineSameAsFirst(node),
1163 g.UseRegister(m.left().node()), g.TempImmediate(32));
1164 return;
1165 }
1166 break;
1167 }
1168 default:
1169 break;
1170 }
1171 }
1172 Emit(kX64Movl, g.DefineAsRegister(node), g.Use(value));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001173}
1174
Ben Murdochc5610432016-08-08 18:44:38 +01001175void InstructionSelector::VisitRoundFloat64ToInt32(Node* node) {
1176 VisitRO(this, node, kSSEFloat64ToInt32);
1177}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001178
Ben Murdoch097c5b22016-05-18 11:27:45 +01001179void InstructionSelector::VisitRoundInt32ToFloat32(Node* node) {
1180 X64OperandGenerator g(this);
1181 Emit(kSSEInt32ToFloat32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
1182}
1183
1184
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001185void InstructionSelector::VisitRoundInt64ToFloat32(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001186 X64OperandGenerator g(this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001187 Emit(kSSEInt64ToFloat32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
1188}
1189
1190
1191void InstructionSelector::VisitRoundInt64ToFloat64(Node* node) {
1192 X64OperandGenerator g(this);
1193 Emit(kSSEInt64ToFloat64, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
1194}
1195
1196
Ben Murdoch097c5b22016-05-18 11:27:45 +01001197void InstructionSelector::VisitRoundUint32ToFloat32(Node* node) {
1198 X64OperandGenerator g(this);
1199 Emit(kSSEUint32ToFloat32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
1200}
1201
1202
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001203void InstructionSelector::VisitRoundUint64ToFloat32(Node* node) {
1204 X64OperandGenerator g(this);
1205 InstructionOperand temps[] = {g.TempRegister()};
1206 Emit(kSSEUint64ToFloat32, g.DefineAsRegister(node), g.Use(node->InputAt(0)),
1207 arraysize(temps), temps);
1208}
1209
1210
1211void InstructionSelector::VisitRoundUint64ToFloat64(Node* node) {
1212 X64OperandGenerator g(this);
1213 InstructionOperand temps[] = {g.TempRegister()};
1214 Emit(kSSEUint64ToFloat64, g.DefineAsRegister(node), g.Use(node->InputAt(0)),
1215 arraysize(temps), temps);
1216}
1217
1218
1219void InstructionSelector::VisitBitcastFloat32ToInt32(Node* node) {
1220 X64OperandGenerator g(this);
1221 Emit(kX64BitcastFI, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
1222}
1223
1224
1225void InstructionSelector::VisitBitcastFloat64ToInt64(Node* node) {
1226 X64OperandGenerator g(this);
1227 Emit(kX64BitcastDL, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
1228}
1229
1230
1231void InstructionSelector::VisitBitcastInt32ToFloat32(Node* node) {
1232 X64OperandGenerator g(this);
1233 Emit(kX64BitcastIF, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
1234}
1235
1236
1237void InstructionSelector::VisitBitcastInt64ToFloat64(Node* node) {
1238 X64OperandGenerator g(this);
1239 Emit(kX64BitcastLD, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
1240}
1241
1242
1243void InstructionSelector::VisitFloat32Add(Node* node) {
1244 VisitFloatBinop(this, node, kAVXFloat32Add, kSSEFloat32Add);
1245}
1246
1247
1248void InstructionSelector::VisitFloat32Sub(Node* node) {
1249 X64OperandGenerator g(this);
1250 Float32BinopMatcher m(node);
1251 if (m.left().IsMinusZero()) {
1252 VisitFloatUnop(this, node, m.right().node(), kAVXFloat32Neg,
1253 kSSEFloat32Neg);
1254 return;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001255 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001256 VisitFloatBinop(this, node, kAVXFloat32Sub, kSSEFloat32Sub);
1257}
1258
Ben Murdochc5610432016-08-08 18:44:38 +01001259void InstructionSelector::VisitFloat32SubPreserveNan(Node* node) {
1260 VisitFloatBinop(this, node, kAVXFloat32Sub, kSSEFloat32Sub);
1261}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001262
1263void InstructionSelector::VisitFloat32Mul(Node* node) {
1264 VisitFloatBinop(this, node, kAVXFloat32Mul, kSSEFloat32Mul);
1265}
1266
1267
1268void InstructionSelector::VisitFloat32Div(Node* node) {
1269 VisitFloatBinop(this, node, kAVXFloat32Div, kSSEFloat32Div);
1270}
1271
1272
1273void InstructionSelector::VisitFloat32Max(Node* node) {
1274 VisitFloatBinop(this, node, kAVXFloat32Max, kSSEFloat32Max);
1275}
1276
1277
1278void InstructionSelector::VisitFloat32Min(Node* node) {
1279 VisitFloatBinop(this, node, kAVXFloat32Min, kSSEFloat32Min);
1280}
1281
1282
1283void InstructionSelector::VisitFloat32Abs(Node* node) {
1284 VisitFloatUnop(this, node, node->InputAt(0), kAVXFloat32Abs, kSSEFloat32Abs);
1285}
1286
1287
1288void InstructionSelector::VisitFloat32Sqrt(Node* node) {
1289 VisitRO(this, node, kSSEFloat32Sqrt);
1290}
1291
1292
1293void InstructionSelector::VisitFloat64Add(Node* node) {
1294 VisitFloatBinop(this, node, kAVXFloat64Add, kSSEFloat64Add);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001295}
1296
1297
1298void InstructionSelector::VisitFloat64Sub(Node* node) {
1299 X64OperandGenerator g(this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001300 Float64BinopMatcher m(node);
1301 if (m.left().IsMinusZero()) {
1302 if (m.right().IsFloat64RoundDown() &&
1303 CanCover(m.node(), m.right().node())) {
1304 if (m.right().InputAt(0)->opcode() == IrOpcode::kFloat64Sub &&
1305 CanCover(m.right().node(), m.right().InputAt(0))) {
1306 Float64BinopMatcher mright0(m.right().InputAt(0));
1307 if (mright0.left().IsMinusZero()) {
1308 Emit(kSSEFloat64Round | MiscField::encode(kRoundUp),
1309 g.DefineAsRegister(node), g.UseRegister(mright0.right().node()));
1310 return;
1311 }
1312 }
1313 }
1314 VisitFloatUnop(this, node, m.right().node(), kAVXFloat64Neg,
1315 kSSEFloat64Neg);
1316 return;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001317 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001318 VisitFloatBinop(this, node, kAVXFloat64Sub, kSSEFloat64Sub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001319}
1320
Ben Murdochc5610432016-08-08 18:44:38 +01001321void InstructionSelector::VisitFloat64SubPreserveNan(Node* node) {
1322 VisitFloatBinop(this, node, kAVXFloat64Sub, kSSEFloat64Sub);
1323}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001324
1325void InstructionSelector::VisitFloat64Mul(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001326 VisitFloatBinop(this, node, kAVXFloat64Mul, kSSEFloat64Mul);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001327}
1328
1329
1330void InstructionSelector::VisitFloat64Div(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001331 VisitFloatBinop(this, node, kAVXFloat64Div, kSSEFloat64Div);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001332}
1333
1334
1335void InstructionSelector::VisitFloat64Mod(Node* node) {
1336 X64OperandGenerator g(this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001337 InstructionOperand temps[] = {g.TempRegister(rax)};
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001338 Emit(kSSEFloat64Mod, g.DefineSameAsFirst(node),
1339 g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)), 1,
1340 temps);
1341}
1342
1343
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001344void InstructionSelector::VisitFloat64Max(Node* node) {
1345 VisitFloatBinop(this, node, kAVXFloat64Max, kSSEFloat64Max);
1346}
1347
1348
1349void InstructionSelector::VisitFloat64Min(Node* node) {
1350 VisitFloatBinop(this, node, kAVXFloat64Min, kSSEFloat64Min);
1351}
1352
1353
1354void InstructionSelector::VisitFloat64Abs(Node* node) {
1355 VisitFloatUnop(this, node, node->InputAt(0), kAVXFloat64Abs, kSSEFloat64Abs);
1356}
1357
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001358void InstructionSelector::VisitFloat64Sqrt(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001359 VisitRO(this, node, kSSEFloat64Sqrt);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001360}
1361
1362
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001363void InstructionSelector::VisitFloat32RoundDown(Node* node) {
1364 VisitRR(this, node, kSSEFloat32Round | MiscField::encode(kRoundDown));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001365}
1366
1367
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001368void InstructionSelector::VisitFloat64RoundDown(Node* node) {
1369 VisitRR(this, node, kSSEFloat64Round | MiscField::encode(kRoundDown));
1370}
1371
1372
1373void InstructionSelector::VisitFloat32RoundUp(Node* node) {
1374 VisitRR(this, node, kSSEFloat32Round | MiscField::encode(kRoundUp));
1375}
1376
1377
1378void InstructionSelector::VisitFloat64RoundUp(Node* node) {
1379 VisitRR(this, node, kSSEFloat64Round | MiscField::encode(kRoundUp));
1380}
1381
1382
1383void InstructionSelector::VisitFloat32RoundTruncate(Node* node) {
1384 VisitRR(this, node, kSSEFloat32Round | MiscField::encode(kRoundToZero));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001385}
1386
1387
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001388void InstructionSelector::VisitFloat64RoundTruncate(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001389 VisitRR(this, node, kSSEFloat64Round | MiscField::encode(kRoundToZero));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001390}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001391
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001392
1393void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
1394 UNREACHABLE();
1395}
1396
1397
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001398void InstructionSelector::VisitFloat32RoundTiesEven(Node* node) {
1399 VisitRR(this, node, kSSEFloat32Round | MiscField::encode(kRoundToNearest));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001400}
1401
1402
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001403void InstructionSelector::VisitFloat64RoundTiesEven(Node* node) {
1404 VisitRR(this, node, kSSEFloat64Round | MiscField::encode(kRoundToNearest));
1405}
1406
Ben Murdoch61f157c2016-09-16 13:49:30 +01001407void InstructionSelector::VisitFloat32Neg(Node* node) { UNREACHABLE(); }
1408
1409void InstructionSelector::VisitFloat64Neg(Node* node) { UNREACHABLE(); }
1410
1411void InstructionSelector::VisitFloat64Ieee754Binop(Node* node,
1412 InstructionCode opcode) {
1413 X64OperandGenerator g(this);
1414 Emit(opcode, g.DefineAsFixed(node, xmm0), g.UseFixed(node->InputAt(0), xmm0),
1415 g.UseFixed(node->InputAt(1), xmm1))
1416 ->MarkAsCall();
1417}
1418
1419void InstructionSelector::VisitFloat64Ieee754Unop(Node* node,
1420 InstructionCode opcode) {
1421 X64OperandGenerator g(this);
1422 Emit(opcode, g.DefineAsFixed(node, xmm0), g.UseFixed(node->InputAt(0), xmm0))
1423 ->MarkAsCall();
1424}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001425
1426void InstructionSelector::EmitPrepareArguments(
1427 ZoneVector<PushParameter>* arguments, const CallDescriptor* descriptor,
1428 Node* node) {
1429 X64OperandGenerator g(this);
1430
1431 // Prepare for C function call.
1432 if (descriptor->IsCFunctionCall()) {
1433 Emit(kArchPrepareCallCFunction |
1434 MiscField::encode(static_cast<int>(descriptor->CParameterCount())),
1435 0, nullptr, 0, nullptr);
1436
1437 // Poke any stack arguments.
1438 for (size_t n = 0; n < arguments->size(); ++n) {
1439 PushParameter input = (*arguments)[n];
1440 if (input.node()) {
1441 int slot = static_cast<int>(n);
1442 InstructionOperand value = g.CanBeImmediate(input.node())
1443 ? g.UseImmediate(input.node())
1444 : g.UseRegister(input.node());
1445 Emit(kX64Poke | MiscField::encode(slot), g.NoOutput(), value);
1446 }
1447 }
1448 } else {
1449 // Push any stack arguments.
1450 for (PushParameter input : base::Reversed(*arguments)) {
1451 // TODO(titzer): X64Push cannot handle stack->stack double moves
1452 // because there is no way to encode fixed double slots.
1453 InstructionOperand value =
1454 g.CanBeImmediate(input.node())
1455 ? g.UseImmediate(input.node())
1456 : IsSupported(ATOM) ||
Ben Murdoch61f157c2016-09-16 13:49:30 +01001457 sequence()->IsFP(GetVirtualRegister(input.node()))
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001458 ? g.UseRegister(input.node())
1459 : g.Use(input.node());
1460 Emit(kX64Push, g.NoOutput(), value);
1461 }
1462 }
1463}
1464
1465
1466bool InstructionSelector::IsTailCallAddressImmediate() { return true; }
1467
Ben Murdochda12d292016-06-02 14:46:10 +01001468int InstructionSelector::GetTempsCountForTailCallFromJSFunction() { return 3; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001469
1470namespace {
1471
Ben Murdoch097c5b22016-05-18 11:27:45 +01001472void VisitCompareWithMemoryOperand(InstructionSelector* selector,
1473 InstructionCode opcode, Node* left,
1474 InstructionOperand right,
1475 FlagsContinuation* cont) {
1476 DCHECK(left->opcode() == IrOpcode::kLoad);
1477 X64OperandGenerator g(selector);
1478 size_t input_count = 0;
1479 InstructionOperand inputs[6];
1480 AddressingMode addressing_mode =
1481 g.GetEffectiveAddressMemoryOperand(left, inputs, &input_count);
1482 opcode |= AddressingModeField::encode(addressing_mode);
1483 opcode = cont->Encode(opcode);
1484 inputs[input_count++] = right;
1485
1486 if (cont->IsBranch()) {
1487 inputs[input_count++] = g.Label(cont->true_block());
1488 inputs[input_count++] = g.Label(cont->false_block());
1489 selector->Emit(opcode, 0, nullptr, input_count, inputs);
Ben Murdochda12d292016-06-02 14:46:10 +01001490 } else if (cont->IsDeoptimize()) {
1491 selector->EmitDeoptimize(opcode, 0, nullptr, input_count, inputs,
1492 cont->frame_state());
Ben Murdoch097c5b22016-05-18 11:27:45 +01001493 } else {
1494 DCHECK(cont->IsSet());
1495 InstructionOperand output = g.DefineAsRegister(cont->result());
1496 selector->Emit(opcode, 1, &output, input_count, inputs);
1497 }
1498}
1499
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001500// Shared routine for multiple compare operations.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001501void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
1502 InstructionOperand left, InstructionOperand right,
1503 FlagsContinuation* cont) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001504 X64OperandGenerator g(selector);
1505 opcode = cont->Encode(opcode);
1506 if (cont->IsBranch()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001507 selector->Emit(opcode, g.NoOutput(), left, right,
1508 g.Label(cont->true_block()), g.Label(cont->false_block()));
Ben Murdochda12d292016-06-02 14:46:10 +01001509 } else if (cont->IsDeoptimize()) {
1510 selector->EmitDeoptimize(opcode, g.NoOutput(), left, right,
1511 cont->frame_state());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001512 } else {
1513 DCHECK(cont->IsSet());
1514 selector->Emit(opcode, g.DefineAsRegister(cont->result()), left, right);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001515 }
1516}
1517
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001518
1519// Shared routine for multiple compare operations.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001520void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
1521 Node* left, Node* right, FlagsContinuation* cont,
1522 bool commutative) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001523 X64OperandGenerator g(selector);
1524 if (commutative && g.CanBeBetterLeftOperand(right)) {
1525 std::swap(left, right);
1526 }
1527 VisitCompare(selector, opcode, g.UseRegister(left), g.Use(right), cont);
1528}
1529
Ben Murdochda12d292016-06-02 14:46:10 +01001530// Tries to match the size of the given opcode to that of the operands, if
1531// possible.
1532InstructionCode TryNarrowOpcodeSize(InstructionCode opcode, Node* left,
1533 Node* right) {
1534 if (opcode != kX64Cmp32 && opcode != kX64Test32) {
1535 return opcode;
1536 }
1537 // Currently, if one of the two operands is not a Load, we don't know what its
1538 // machine representation is, so we bail out.
1539 // TODO(epertoso): we can probably get some size information out of immediates
1540 // and phi nodes.
1541 if (left->opcode() != IrOpcode::kLoad || right->opcode() != IrOpcode::kLoad) {
1542 return opcode;
1543 }
1544 // If the load representations don't match, both operands will be
1545 // zero/sign-extended to 32bit.
1546 LoadRepresentation left_representation = LoadRepresentationOf(left->op());
1547 if (left_representation != LoadRepresentationOf(right->op())) {
1548 return opcode;
1549 }
1550 switch (left_representation.representation()) {
1551 case MachineRepresentation::kBit:
1552 case MachineRepresentation::kWord8:
1553 return opcode == kX64Cmp32 ? kX64Cmp8 : kX64Test8;
1554 case MachineRepresentation::kWord16:
1555 return opcode == kX64Cmp32 ? kX64Cmp16 : kX64Test16;
1556 default:
1557 return opcode;
1558 }
1559}
1560
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001561// Shared routine for multiple word compare operations.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001562void VisitWordCompare(InstructionSelector* selector, Node* node,
1563 InstructionCode opcode, FlagsContinuation* cont) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001564 X64OperandGenerator g(selector);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001565 Node* left = node->InputAt(0);
1566 Node* right = node->InputAt(1);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001567
Ben Murdochda12d292016-06-02 14:46:10 +01001568 opcode = TryNarrowOpcodeSize(opcode, left, right);
1569
1570 // If one of the two inputs is an immediate, make sure it's on the right, or
1571 // if one of the two inputs is a memory operand, make sure it's on the left.
Ben Murdochc5610432016-08-08 18:44:38 +01001572 int effect_level = selector->GetEffectLevel(node);
1573 if (cont->IsBranch()) {
1574 effect_level = selector->GetEffectLevel(
1575 cont->true_block()->PredecessorAt(0)->control_input());
1576 }
1577
Ben Murdochda12d292016-06-02 14:46:10 +01001578 if ((!g.CanBeImmediate(right) && g.CanBeImmediate(left)) ||
Ben Murdochc5610432016-08-08 18:44:38 +01001579 (g.CanBeMemoryOperand(opcode, node, right, effect_level) &&
1580 !g.CanBeMemoryOperand(opcode, node, left, effect_level))) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001581 if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001582 std::swap(left, right);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001583 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001584
Ben Murdoch097c5b22016-05-18 11:27:45 +01001585 // Match immediates on right side of comparison.
1586 if (g.CanBeImmediate(right)) {
Ben Murdochc5610432016-08-08 18:44:38 +01001587 if (g.CanBeMemoryOperand(opcode, node, left, effect_level)) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001588 return VisitCompareWithMemoryOperand(selector, opcode, left,
1589 g.UseImmediate(right), cont);
1590 }
1591 return VisitCompare(selector, opcode, g.Use(left), g.UseImmediate(right),
1592 cont);
1593 }
1594
Ben Murdochda12d292016-06-02 14:46:10 +01001595 // Match memory operands on left side of comparison.
Ben Murdochc5610432016-08-08 18:44:38 +01001596 if (g.CanBeMemoryOperand(opcode, node, left, effect_level)) {
Ben Murdochda12d292016-06-02 14:46:10 +01001597 return VisitCompareWithMemoryOperand(selector, opcode, left,
1598 g.UseRegister(right), cont);
1599 }
1600
Ben Murdoch097c5b22016-05-18 11:27:45 +01001601 if (g.CanBeBetterLeftOperand(right)) {
1602 if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
1603 std::swap(left, right);
1604 }
1605
Ben Murdoch097c5b22016-05-18 11:27:45 +01001606 return VisitCompare(selector, opcode, left, right, cont,
1607 node->op()->HasProperty(Operator::kCommutative));
1608}
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001609
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001610// Shared routine for 64-bit word comparison operations.
1611void VisitWord64Compare(InstructionSelector* selector, Node* node,
1612 FlagsContinuation* cont) {
1613 X64OperandGenerator g(selector);
1614 Int64BinopMatcher m(node);
1615 if (m.left().IsLoad() && m.right().IsLoadStackPointer()) {
1616 LoadMatcher<ExternalReferenceMatcher> mleft(m.left().node());
1617 ExternalReference js_stack_limit =
1618 ExternalReference::address_of_stack_limit(selector->isolate());
1619 if (mleft.object().Is(js_stack_limit) && mleft.index().Is(0)) {
1620 // Compare(Load(js_stack_limit), LoadStackPointer)
1621 if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
1622 InstructionCode opcode = cont->Encode(kX64StackCheck);
1623 if (cont->IsBranch()) {
1624 selector->Emit(opcode, g.NoOutput(), g.Label(cont->true_block()),
1625 g.Label(cont->false_block()));
Ben Murdochda12d292016-06-02 14:46:10 +01001626 } else if (cont->IsDeoptimize()) {
1627 selector->EmitDeoptimize(opcode, 0, nullptr, 0, nullptr,
1628 cont->frame_state());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001629 } else {
1630 DCHECK(cont->IsSet());
1631 selector->Emit(opcode, g.DefineAsRegister(cont->result()));
1632 }
1633 return;
1634 }
1635 }
1636 VisitWordCompare(selector, node, kX64Cmp, cont);
1637}
1638
1639
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001640// Shared routine for comparison with zero.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001641void VisitCompareZero(InstructionSelector* selector, Node* node,
1642 InstructionCode opcode, FlagsContinuation* cont) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001643 X64OperandGenerator g(selector);
1644 VisitCompare(selector, opcode, g.Use(node), g.TempImmediate(0), cont);
1645}
1646
1647
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001648// Shared routine for multiple float32 compare operations (inputs commuted).
1649void VisitFloat32Compare(InstructionSelector* selector, Node* node,
1650 FlagsContinuation* cont) {
1651 Node* const left = node->InputAt(0);
1652 Node* const right = node->InputAt(1);
1653 InstructionCode const opcode =
1654 selector->IsSupported(AVX) ? kAVXFloat32Cmp : kSSEFloat32Cmp;
1655 VisitCompare(selector, opcode, right, left, cont, false);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001656}
1657
1658
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001659// Shared routine for multiple float64 compare operations (inputs commuted).
1660void VisitFloat64Compare(InstructionSelector* selector, Node* node,
1661 FlagsContinuation* cont) {
1662 Node* const left = node->InputAt(0);
1663 Node* const right = node->InputAt(1);
1664 InstructionCode const opcode =
1665 selector->IsSupported(AVX) ? kAVXFloat64Cmp : kSSEFloat64Cmp;
1666 VisitCompare(selector, opcode, right, left, cont, false);
1667}
1668
Ben Murdochda12d292016-06-02 14:46:10 +01001669// Shared routine for word comparison against zero.
1670void VisitWordCompareZero(InstructionSelector* selector, Node* user,
1671 Node* value, FlagsContinuation* cont) {
1672 while (selector->CanCover(user, value)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001673 switch (value->opcode()) {
Ben Murdochda12d292016-06-02 14:46:10 +01001674 case IrOpcode::kWord32Equal: {
1675 // Combine with comparisons against 0 by simply inverting the
1676 // continuation.
1677 Int32BinopMatcher m(value);
1678 if (m.right().Is(0)) {
1679 user = value;
1680 value = m.left().node();
1681 cont->Negate();
1682 continue;
1683 }
1684 cont->OverwriteAndNegateIfEqual(kEqual);
1685 return VisitWordCompare(selector, value, kX64Cmp32, cont);
1686 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001687 case IrOpcode::kInt32LessThan:
Ben Murdochda12d292016-06-02 14:46:10 +01001688 cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1689 return VisitWordCompare(selector, value, kX64Cmp32, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001690 case IrOpcode::kInt32LessThanOrEqual:
Ben Murdochda12d292016-06-02 14:46:10 +01001691 cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1692 return VisitWordCompare(selector, value, kX64Cmp32, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001693 case IrOpcode::kUint32LessThan:
Ben Murdochda12d292016-06-02 14:46:10 +01001694 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1695 return VisitWordCompare(selector, value, kX64Cmp32, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001696 case IrOpcode::kUint32LessThanOrEqual:
Ben Murdochda12d292016-06-02 14:46:10 +01001697 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1698 return VisitWordCompare(selector, value, kX64Cmp32, cont);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001699 case IrOpcode::kWord64Equal: {
Ben Murdochda12d292016-06-02 14:46:10 +01001700 cont->OverwriteAndNegateIfEqual(kEqual);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001701 Int64BinopMatcher m(value);
1702 if (m.right().Is(0)) {
1703 // Try to combine the branch with a comparison.
1704 Node* const user = m.node();
1705 Node* const value = m.left().node();
Ben Murdochda12d292016-06-02 14:46:10 +01001706 if (selector->CanCover(user, value)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001707 switch (value->opcode()) {
1708 case IrOpcode::kInt64Sub:
Ben Murdochda12d292016-06-02 14:46:10 +01001709 return VisitWord64Compare(selector, value, cont);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001710 case IrOpcode::kWord64And:
Ben Murdochda12d292016-06-02 14:46:10 +01001711 return VisitWordCompare(selector, value, kX64Test, cont);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001712 default:
1713 break;
1714 }
1715 }
Ben Murdochda12d292016-06-02 14:46:10 +01001716 return VisitCompareZero(selector, value, kX64Cmp, cont);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001717 }
Ben Murdochda12d292016-06-02 14:46:10 +01001718 return VisitWord64Compare(selector, value, cont);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001719 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001720 case IrOpcode::kInt64LessThan:
Ben Murdochda12d292016-06-02 14:46:10 +01001721 cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1722 return VisitWord64Compare(selector, value, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001723 case IrOpcode::kInt64LessThanOrEqual:
Ben Murdochda12d292016-06-02 14:46:10 +01001724 cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1725 return VisitWord64Compare(selector, value, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001726 case IrOpcode::kUint64LessThan:
Ben Murdochda12d292016-06-02 14:46:10 +01001727 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1728 return VisitWord64Compare(selector, value, cont);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001729 case IrOpcode::kUint64LessThanOrEqual:
Ben Murdochda12d292016-06-02 14:46:10 +01001730 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1731 return VisitWord64Compare(selector, value, cont);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001732 case IrOpcode::kFloat32Equal:
Ben Murdochda12d292016-06-02 14:46:10 +01001733 cont->OverwriteAndNegateIfEqual(kUnorderedEqual);
1734 return VisitFloat32Compare(selector, value, cont);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001735 case IrOpcode::kFloat32LessThan:
Ben Murdochda12d292016-06-02 14:46:10 +01001736 cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThan);
1737 return VisitFloat32Compare(selector, value, cont);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001738 case IrOpcode::kFloat32LessThanOrEqual:
Ben Murdochda12d292016-06-02 14:46:10 +01001739 cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThanOrEqual);
1740 return VisitFloat32Compare(selector, value, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001741 case IrOpcode::kFloat64Equal:
Ben Murdochda12d292016-06-02 14:46:10 +01001742 cont->OverwriteAndNegateIfEqual(kUnorderedEqual);
1743 return VisitFloat64Compare(selector, value, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001744 case IrOpcode::kFloat64LessThan:
Ben Murdochda12d292016-06-02 14:46:10 +01001745 cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThan);
1746 return VisitFloat64Compare(selector, value, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001747 case IrOpcode::kFloat64LessThanOrEqual:
Ben Murdochda12d292016-06-02 14:46:10 +01001748 cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThanOrEqual);
1749 return VisitFloat64Compare(selector, value, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001750 case IrOpcode::kProjection:
1751 // Check if this is the overflow output projection of an
1752 // <Operation>WithOverflow node.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001753 if (ProjectionIndexOf(value->op()) == 1u) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001754 // We cannot combine the <Operation>WithOverflow with this branch
1755 // unless the 0th projection (the use of the actual value of the
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001756 // <Operation> is either nullptr, which means there's no use of the
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001757 // actual value, or was already defined, which means it is scheduled
1758 // *AFTER* this branch).
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001759 Node* const node = value->InputAt(0);
1760 Node* const result = NodeProperties::FindProjection(node, 0);
Ben Murdochda12d292016-06-02 14:46:10 +01001761 if (result == nullptr || selector->IsDefined(result)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001762 switch (node->opcode()) {
1763 case IrOpcode::kInt32AddWithOverflow:
Ben Murdochda12d292016-06-02 14:46:10 +01001764 cont->OverwriteAndNegateIfEqual(kOverflow);
1765 return VisitBinop(selector, node, kX64Add32, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001766 case IrOpcode::kInt32SubWithOverflow:
Ben Murdochda12d292016-06-02 14:46:10 +01001767 cont->OverwriteAndNegateIfEqual(kOverflow);
1768 return VisitBinop(selector, node, kX64Sub32, cont);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001769 case IrOpcode::kInt64AddWithOverflow:
Ben Murdochda12d292016-06-02 14:46:10 +01001770 cont->OverwriteAndNegateIfEqual(kOverflow);
1771 return VisitBinop(selector, node, kX64Add, cont);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001772 case IrOpcode::kInt64SubWithOverflow:
Ben Murdochda12d292016-06-02 14:46:10 +01001773 cont->OverwriteAndNegateIfEqual(kOverflow);
1774 return VisitBinop(selector, node, kX64Sub, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001775 default:
1776 break;
1777 }
1778 }
1779 }
1780 break;
1781 case IrOpcode::kInt32Sub:
Ben Murdochda12d292016-06-02 14:46:10 +01001782 return VisitWordCompare(selector, value, kX64Cmp32, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001783 case IrOpcode::kInt64Sub:
Ben Murdochda12d292016-06-02 14:46:10 +01001784 return VisitWord64Compare(selector, value, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001785 case IrOpcode::kWord32And:
Ben Murdochda12d292016-06-02 14:46:10 +01001786 return VisitWordCompare(selector, value, kX64Test32, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001787 case IrOpcode::kWord64And:
Ben Murdochda12d292016-06-02 14:46:10 +01001788 return VisitWordCompare(selector, value, kX64Test, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001789 default:
1790 break;
1791 }
Ben Murdochda12d292016-06-02 14:46:10 +01001792 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001793 }
1794
1795 // Branch could not be combined with a compare, emit compare against 0.
Ben Murdochda12d292016-06-02 14:46:10 +01001796 VisitCompareZero(selector, value, kX64Cmp32, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001797}
1798
Ben Murdochda12d292016-06-02 14:46:10 +01001799} // namespace
1800
1801void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
1802 BasicBlock* fbranch) {
1803 FlagsContinuation cont(kNotEqual, tbranch, fbranch);
1804 VisitWordCompareZero(this, branch, branch->InputAt(0), &cont);
1805}
1806
1807void InstructionSelector::VisitDeoptimizeIf(Node* node) {
1808 FlagsContinuation cont =
1809 FlagsContinuation::ForDeoptimize(kNotEqual, node->InputAt(1));
1810 VisitWordCompareZero(this, node, node->InputAt(0), &cont);
1811}
1812
1813void InstructionSelector::VisitDeoptimizeUnless(Node* node) {
1814 FlagsContinuation cont =
1815 FlagsContinuation::ForDeoptimize(kEqual, node->InputAt(1));
1816 VisitWordCompareZero(this, node, node->InputAt(0), &cont);
1817}
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001818
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001819void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
1820 X64OperandGenerator g(this);
1821 InstructionOperand value_operand = g.UseRegister(node->InputAt(0));
1822
1823 // Emit either ArchTableSwitch or ArchLookupSwitch.
1824 size_t table_space_cost = 4 + sw.value_range;
1825 size_t table_time_cost = 3;
1826 size_t lookup_space_cost = 3 + 2 * sw.case_count;
1827 size_t lookup_time_cost = sw.case_count;
1828 if (sw.case_count > 4 &&
1829 table_space_cost + 3 * table_time_cost <=
1830 lookup_space_cost + 3 * lookup_time_cost &&
1831 sw.min_value > std::numeric_limits<int32_t>::min()) {
1832 InstructionOperand index_operand = g.TempRegister();
1833 if (sw.min_value) {
1834 // The leal automatically zero extends, so result is a valid 64-bit index.
1835 Emit(kX64Lea32 | AddressingModeField::encode(kMode_MRI), index_operand,
1836 value_operand, g.TempImmediate(-sw.min_value));
1837 } else {
1838 // Zero extend, because we use it as 64-bit index into the jump table.
1839 Emit(kX64Movl, index_operand, value_operand);
1840 }
1841 // Generate a table lookup.
1842 return EmitTableSwitch(sw, index_operand);
1843 }
1844
1845 // Generate a sequence of conditional jumps.
1846 return EmitLookupSwitch(sw, value_operand);
1847}
1848
1849
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001850void InstructionSelector::VisitWord32Equal(Node* const node) {
1851 Node* user = node;
Ben Murdochda12d292016-06-02 14:46:10 +01001852 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001853 Int32BinopMatcher m(user);
1854 if (m.right().Is(0)) {
1855 Node* value = m.left().node();
1856
1857 // Try to combine with comparisons against 0 by simply inverting the branch.
1858 while (CanCover(user, value) && value->opcode() == IrOpcode::kWord32Equal) {
1859 Int32BinopMatcher m(value);
1860 if (m.right().Is(0)) {
1861 user = value;
1862 value = m.left().node();
1863 cont.Negate();
1864 } else {
1865 break;
1866 }
1867 }
1868
1869 // Try to combine the branch with a comparison.
1870 if (CanCover(user, value)) {
1871 switch (value->opcode()) {
1872 case IrOpcode::kInt32Sub:
1873 return VisitWordCompare(this, value, kX64Cmp32, &cont);
1874 case IrOpcode::kWord32And:
1875 return VisitWordCompare(this, value, kX64Test32, &cont);
1876 default:
1877 break;
1878 }
1879 }
1880 return VisitCompareZero(this, value, kX64Cmp32, &cont);
1881 }
1882 VisitWordCompare(this, node, kX64Cmp32, &cont);
1883}
1884
1885
1886void InstructionSelector::VisitInt32LessThan(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01001887 FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001888 VisitWordCompare(this, node, kX64Cmp32, &cont);
1889}
1890
1891
1892void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01001893 FlagsContinuation cont =
1894 FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001895 VisitWordCompare(this, node, kX64Cmp32, &cont);
1896}
1897
1898
1899void InstructionSelector::VisitUint32LessThan(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01001900 FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001901 VisitWordCompare(this, node, kX64Cmp32, &cont);
1902}
1903
1904
1905void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01001906 FlagsContinuation cont =
1907 FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001908 VisitWordCompare(this, node, kX64Cmp32, &cont);
1909}
1910
1911
1912void InstructionSelector::VisitWord64Equal(Node* const node) {
Ben Murdochda12d292016-06-02 14:46:10 +01001913 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001914 Int64BinopMatcher m(node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001915 if (m.right().Is(0)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001916 // Try to combine the equality check with a comparison.
1917 Node* const user = m.node();
1918 Node* const value = m.left().node();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001919 if (CanCover(user, value)) {
1920 switch (value->opcode()) {
1921 case IrOpcode::kInt64Sub:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001922 return VisitWord64Compare(this, value, &cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001923 case IrOpcode::kWord64And:
1924 return VisitWordCompare(this, value, kX64Test, &cont);
1925 default:
1926 break;
1927 }
1928 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001929 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001930 VisitWord64Compare(this, node, &cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001931}
1932
1933
1934void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001935 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
Ben Murdochda12d292016-06-02 14:46:10 +01001936 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1937 return VisitBinop(this, node, kX64Add32, &cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001938 }
1939 FlagsContinuation cont;
1940 VisitBinop(this, node, kX64Add32, &cont);
1941}
1942
1943
1944void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001945 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
Ben Murdochda12d292016-06-02 14:46:10 +01001946 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001947 return VisitBinop(this, node, kX64Sub32, &cont);
1948 }
1949 FlagsContinuation cont;
1950 VisitBinop(this, node, kX64Sub32, &cont);
1951}
1952
1953
1954void InstructionSelector::VisitInt64LessThan(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01001955 FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001956 VisitWord64Compare(this, node, &cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001957}
1958
1959
1960void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01001961 FlagsContinuation cont =
1962 FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001963 VisitWord64Compare(this, node, &cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001964}
1965
1966
1967void InstructionSelector::VisitUint64LessThan(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01001968 FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001969 VisitWord64Compare(this, node, &cont);
1970}
1971
1972
1973void InstructionSelector::VisitUint64LessThanOrEqual(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01001974 FlagsContinuation cont =
1975 FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001976 VisitWord64Compare(this, node, &cont);
1977}
1978
1979
1980void InstructionSelector::VisitFloat32Equal(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01001981 FlagsContinuation cont = FlagsContinuation::ForSet(kUnorderedEqual, node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001982 VisitFloat32Compare(this, node, &cont);
1983}
1984
1985
1986void InstructionSelector::VisitFloat32LessThan(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01001987 FlagsContinuation cont =
1988 FlagsContinuation::ForSet(kUnsignedGreaterThan, node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001989 VisitFloat32Compare(this, node, &cont);
1990}
1991
1992
1993void InstructionSelector::VisitFloat32LessThanOrEqual(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01001994 FlagsContinuation cont =
1995 FlagsContinuation::ForSet(kUnsignedGreaterThanOrEqual, node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001996 VisitFloat32Compare(this, node, &cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001997}
1998
1999
2000void InstructionSelector::VisitFloat64Equal(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002001 FlagsContinuation cont = FlagsContinuation::ForSet(kUnorderedEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002002 VisitFloat64Compare(this, node, &cont);
2003}
2004
2005
2006void InstructionSelector::VisitFloat64LessThan(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002007 FlagsContinuation cont =
2008 FlagsContinuation::ForSet(kUnsignedGreaterThan, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002009 VisitFloat64Compare(this, node, &cont);
2010}
2011
2012
2013void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01002014 FlagsContinuation cont =
2015 FlagsContinuation::ForSet(kUnsignedGreaterThanOrEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002016 VisitFloat64Compare(this, node, &cont);
2017}
2018
2019
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002020void InstructionSelector::VisitFloat64ExtractLowWord32(Node* node) {
2021 X64OperandGenerator g(this);
2022 Emit(kSSEFloat64ExtractLowWord32, g.DefineAsRegister(node),
2023 g.Use(node->InputAt(0)));
2024}
2025
2026
2027void InstructionSelector::VisitFloat64ExtractHighWord32(Node* node) {
2028 X64OperandGenerator g(this);
2029 Emit(kSSEFloat64ExtractHighWord32, g.DefineAsRegister(node),
2030 g.Use(node->InputAt(0)));
2031}
2032
2033
2034void InstructionSelector::VisitFloat64InsertLowWord32(Node* node) {
2035 X64OperandGenerator g(this);
2036 Node* left = node->InputAt(0);
2037 Node* right = node->InputAt(1);
2038 Float64Matcher mleft(left);
2039 if (mleft.HasValue() && (bit_cast<uint64_t>(mleft.Value()) >> 32) == 0u) {
2040 Emit(kSSEFloat64LoadLowWord32, g.DefineAsRegister(node), g.Use(right));
2041 return;
2042 }
2043 Emit(kSSEFloat64InsertLowWord32, g.DefineSameAsFirst(node),
2044 g.UseRegister(left), g.Use(right));
2045}
2046
2047
2048void InstructionSelector::VisitFloat64InsertHighWord32(Node* node) {
2049 X64OperandGenerator g(this);
2050 Node* left = node->InputAt(0);
2051 Node* right = node->InputAt(1);
2052 Emit(kSSEFloat64InsertHighWord32, g.DefineSameAsFirst(node),
2053 g.UseRegister(left), g.Use(right));
2054}
2055
Ben Murdoch61f157c2016-09-16 13:49:30 +01002056void InstructionSelector::VisitFloat64SilenceNaN(Node* node) {
2057 X64OperandGenerator g(this);
2058 Emit(kSSEFloat64SilenceNaN, g.DefineSameAsFirst(node),
2059 g.UseRegister(node->InputAt(0)));
2060}
2061
Ben Murdochc5610432016-08-08 18:44:38 +01002062void InstructionSelector::VisitAtomicLoad(Node* node) {
2063 LoadRepresentation load_rep = LoadRepresentationOf(node->op());
2064 DCHECK(load_rep.representation() == MachineRepresentation::kWord8 ||
2065 load_rep.representation() == MachineRepresentation::kWord16 ||
2066 load_rep.representation() == MachineRepresentation::kWord32);
2067 USE(load_rep);
2068 VisitLoad(node);
2069}
2070
2071void InstructionSelector::VisitAtomicStore(Node* node) {
2072 X64OperandGenerator g(this);
2073 Node* base = node->InputAt(0);
2074 Node* index = node->InputAt(1);
2075 Node* value = node->InputAt(2);
2076
2077 MachineRepresentation rep = AtomicStoreRepresentationOf(node->op());
2078 ArchOpcode opcode = kArchNop;
2079 switch (rep) {
2080 case MachineRepresentation::kWord8:
2081 opcode = kX64Xchgb;
2082 break;
2083 case MachineRepresentation::kWord16:
2084 opcode = kX64Xchgw;
2085 break;
2086 case MachineRepresentation::kWord32:
2087 opcode = kX64Xchgl;
2088 break;
2089 default:
2090 UNREACHABLE();
2091 return;
2092 }
2093 AddressingMode addressing_mode;
2094 InstructionOperand inputs[4];
2095 size_t input_count = 0;
2096 inputs[input_count++] = g.UseUniqueRegister(base);
2097 if (g.CanBeImmediate(index)) {
2098 inputs[input_count++] = g.UseImmediate(index);
2099 addressing_mode = kMode_MRI;
2100 } else {
2101 inputs[input_count++] = g.UseUniqueRegister(index);
2102 addressing_mode = kMode_MR1;
2103 }
2104 inputs[input_count++] = g.UseUniqueRegister(value);
2105 InstructionCode code = opcode | AddressingModeField::encode(addressing_mode);
2106 Emit(code, 0, static_cast<InstructionOperand*>(nullptr), input_count, inputs);
2107}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002108
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002109// static
2110MachineOperatorBuilder::Flags
2111InstructionSelector::SupportedMachineOperatorFlags() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002112 MachineOperatorBuilder::Flags flags =
2113 MachineOperatorBuilder::kFloat32Max |
2114 MachineOperatorBuilder::kFloat32Min |
2115 MachineOperatorBuilder::kFloat64Max |
2116 MachineOperatorBuilder::kFloat64Min |
2117 MachineOperatorBuilder::kWord32ShiftIsSafe |
2118 MachineOperatorBuilder::kWord32Ctz | MachineOperatorBuilder::kWord64Ctz;
2119 if (CpuFeatures::IsSupported(POPCNT)) {
2120 flags |= MachineOperatorBuilder::kWord32Popcnt |
2121 MachineOperatorBuilder::kWord64Popcnt;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002122 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002123 if (CpuFeatures::IsSupported(SSE4_1)) {
2124 flags |= MachineOperatorBuilder::kFloat32RoundDown |
2125 MachineOperatorBuilder::kFloat64RoundDown |
2126 MachineOperatorBuilder::kFloat32RoundUp |
2127 MachineOperatorBuilder::kFloat64RoundUp |
2128 MachineOperatorBuilder::kFloat32RoundTruncate |
2129 MachineOperatorBuilder::kFloat64RoundTruncate |
2130 MachineOperatorBuilder::kFloat32RoundTiesEven |
2131 MachineOperatorBuilder::kFloat64RoundTiesEven;
2132 }
2133 return flags;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002134}
2135
Ben Murdoch61f157c2016-09-16 13:49:30 +01002136// static
2137MachineOperatorBuilder::AlignmentRequirements
2138InstructionSelector::AlignmentRequirements() {
2139 return MachineOperatorBuilder::AlignmentRequirements::
2140 FullUnalignedAccessSupport();
2141}
2142
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002143} // namespace compiler
2144} // namespace internal
2145} // namespace v8