blob: d3a2a8e7533672bd4fb8a988b10a710944279594 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005#include <algorithm>
6
7#include "src/base/adapters.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00008#include "src/compiler/instruction-selector-impl.h"
9#include "src/compiler/node-matchers.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010#include "src/compiler/node-properties.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000011
12namespace v8 {
13namespace internal {
14namespace compiler {
15
16// Adds X64-specific methods for generating operands.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000017class X64OperandGenerator final : public OperandGenerator {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000018 public:
19 explicit X64OperandGenerator(InstructionSelector* selector)
20 : OperandGenerator(selector) {}
21
Ben Murdochb8a8cc12014-11-26 15:28:44 +000022 bool CanBeImmediate(Node* node) {
23 switch (node->opcode()) {
24 case IrOpcode::kInt32Constant:
25 return true;
Emily Bernierd0a1eb72015-03-24 16:35:39 -040026 case IrOpcode::kInt64Constant: {
27 const int64_t value = OpParameter<int64_t>(node);
28 return value == static_cast<int64_t>(static_cast<int32_t>(value));
29 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000030 case IrOpcode::kNumberConstant: {
31 const double value = OpParameter<double>(node);
32 return bit_cast<int64_t>(value) == 0;
33 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000034 default:
35 return false;
36 }
37 }
38
Emily Bernierd0a1eb72015-03-24 16:35:39 -040039 AddressingMode GenerateMemoryOperandInputs(Node* index, int scale_exponent,
40 Node* base, Node* displacement,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000041 InstructionOperand inputs[],
Emily Bernierd0a1eb72015-03-24 16:35:39 -040042 size_t* input_count) {
43 AddressingMode mode = kMode_MRI;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000044 if (base != nullptr) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040045 inputs[(*input_count)++] = UseRegister(base);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000046 if (index != nullptr) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040047 DCHECK(scale_exponent >= 0 && scale_exponent <= 3);
48 inputs[(*input_count)++] = UseRegister(index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000049 if (displacement != nullptr) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040050 inputs[(*input_count)++] = UseImmediate(displacement);
51 static const AddressingMode kMRnI_modes[] = {kMode_MR1I, kMode_MR2I,
52 kMode_MR4I, kMode_MR8I};
53 mode = kMRnI_modes[scale_exponent];
54 } else {
55 static const AddressingMode kMRn_modes[] = {kMode_MR1, kMode_MR2,
56 kMode_MR4, kMode_MR8};
57 mode = kMRn_modes[scale_exponent];
58 }
59 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000060 if (displacement == nullptr) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040061 mode = kMode_MR;
62 } else {
63 inputs[(*input_count)++] = UseImmediate(displacement);
64 mode = kMode_MRI;
65 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000066 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -040067 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000068 DCHECK_NOT_NULL(index);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040069 DCHECK(scale_exponent >= 0 && scale_exponent <= 3);
70 inputs[(*input_count)++] = UseRegister(index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000071 if (displacement != nullptr) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040072 inputs[(*input_count)++] = UseImmediate(displacement);
73 static const AddressingMode kMnI_modes[] = {kMode_MRI, kMode_M2I,
74 kMode_M4I, kMode_M8I};
75 mode = kMnI_modes[scale_exponent];
76 } else {
77 static const AddressingMode kMn_modes[] = {kMode_MR, kMode_MR1,
78 kMode_M4, kMode_M8};
79 mode = kMn_modes[scale_exponent];
80 if (mode == kMode_MR1) {
81 // [%r1 + %r1*1] has a smaller encoding than [%r1*2+0]
82 inputs[(*input_count)++] = UseRegister(index);
83 }
84 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000085 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -040086 return mode;
87 }
88
89 AddressingMode GetEffectiveAddressMemoryOperand(Node* operand,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000090 InstructionOperand inputs[],
Emily Bernierd0a1eb72015-03-24 16:35:39 -040091 size_t* input_count) {
92 BaseWithIndexAndDisplacement64Matcher m(operand, true);
93 DCHECK(m.matches());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000094 if ((m.displacement() == nullptr || CanBeImmediate(m.displacement()))) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040095 return GenerateMemoryOperandInputs(m.index(), m.scale(), m.base(),
96 m.displacement(), inputs, input_count);
97 } else {
98 inputs[(*input_count)++] = UseRegister(operand->InputAt(0));
99 inputs[(*input_count)++] = UseRegister(operand->InputAt(1));
100 return kMode_MR1;
101 }
102 }
103
104 bool CanBeBetterLeftOperand(Node* node) const {
105 return !selector()->IsLive(node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000106 }
107};
108
109
110void InstructionSelector::VisitLoad(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000111 LoadRepresentation load_rep = LoadRepresentationOf(node->op());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000112 X64OperandGenerator g(this);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000113
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000114 ArchOpcode opcode = kArchNop;
115 switch (load_rep.representation()) {
116 case MachineRepresentation::kFloat32:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000117 opcode = kX64Movss;
118 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000119 case MachineRepresentation::kFloat64:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000120 opcode = kX64Movsd;
121 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000122 case MachineRepresentation::kBit: // Fall through.
123 case MachineRepresentation::kWord8:
124 opcode = load_rep.IsSigned() ? kX64Movsxbl : kX64Movzxbl;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000125 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000126 case MachineRepresentation::kWord16:
127 opcode = load_rep.IsSigned() ? kX64Movsxwl : kX64Movzxwl;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000128 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000129 case MachineRepresentation::kWord32:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000130 opcode = kX64Movl;
131 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000132 case MachineRepresentation::kTagged: // Fall through.
133 case MachineRepresentation::kWord64:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000134 opcode = kX64Movq;
135 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100136 case MachineRepresentation::kSimd128: // Fall through.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000137 case MachineRepresentation::kNone:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000138 UNREACHABLE();
139 return;
140 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400141
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000142 InstructionOperand outputs[1];
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400143 outputs[0] = g.DefineAsRegister(node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000144 InstructionOperand inputs[3];
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400145 size_t input_count = 0;
146 AddressingMode mode =
147 g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
148 InstructionCode code = opcode | AddressingModeField::encode(mode);
149 Emit(code, 1, outputs, input_count, inputs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000150}
151
152
153void InstructionSelector::VisitStore(Node* node) {
154 X64OperandGenerator g(this);
155 Node* base = node->InputAt(0);
156 Node* index = node->InputAt(1);
157 Node* value = node->InputAt(2);
158
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000159 StoreRepresentation store_rep = StoreRepresentationOf(node->op());
160 WriteBarrierKind write_barrier_kind = store_rep.write_barrier_kind();
161 MachineRepresentation rep = store_rep.representation();
162
163 if (write_barrier_kind != kNoWriteBarrier) {
164 DCHECK_EQ(MachineRepresentation::kTagged, rep);
165 AddressingMode addressing_mode;
166 InstructionOperand inputs[3];
167 size_t input_count = 0;
168 inputs[input_count++] = g.UseUniqueRegister(base);
169 if (g.CanBeImmediate(index)) {
170 inputs[input_count++] = g.UseImmediate(index);
171 addressing_mode = kMode_MRI;
172 } else {
173 inputs[input_count++] = g.UseUniqueRegister(index);
174 addressing_mode = kMode_MR1;
175 }
176 inputs[input_count++] = (write_barrier_kind == kMapWriteBarrier)
177 ? g.UseRegister(value)
178 : g.UseUniqueRegister(value);
179 RecordWriteMode record_write_mode = RecordWriteMode::kValueIsAny;
180 switch (write_barrier_kind) {
181 case kNoWriteBarrier:
182 UNREACHABLE();
183 break;
184 case kMapWriteBarrier:
185 record_write_mode = RecordWriteMode::kValueIsMap;
186 break;
187 case kPointerWriteBarrier:
188 record_write_mode = RecordWriteMode::kValueIsPointer;
189 break;
190 case kFullWriteBarrier:
191 record_write_mode = RecordWriteMode::kValueIsAny;
192 break;
193 }
194 InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()};
195 size_t const temp_count = arraysize(temps);
196 InstructionCode code = kArchStoreWithWriteBarrier;
197 code |= AddressingModeField::encode(addressing_mode);
198 code |= MiscField::encode(static_cast<int>(record_write_mode));
199 Emit(code, 0, nullptr, input_count, inputs, temp_count, temps);
200 } else {
201 ArchOpcode opcode = kArchNop;
202 switch (rep) {
203 case MachineRepresentation::kFloat32:
204 opcode = kX64Movss;
205 break;
206 case MachineRepresentation::kFloat64:
207 opcode = kX64Movsd;
208 break;
209 case MachineRepresentation::kBit: // Fall through.
210 case MachineRepresentation::kWord8:
211 opcode = kX64Movb;
212 break;
213 case MachineRepresentation::kWord16:
214 opcode = kX64Movw;
215 break;
216 case MachineRepresentation::kWord32:
217 opcode = kX64Movl;
218 break;
219 case MachineRepresentation::kTagged: // Fall through.
220 case MachineRepresentation::kWord64:
221 opcode = kX64Movq;
222 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100223 case MachineRepresentation::kSimd128: // Fall through.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000224 case MachineRepresentation::kNone:
225 UNREACHABLE();
226 return;
227 }
228 InstructionOperand inputs[4];
229 size_t input_count = 0;
230 AddressingMode addressing_mode =
231 g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
232 InstructionCode code =
233 opcode | AddressingModeField::encode(addressing_mode);
234 InstructionOperand value_operand =
235 g.CanBeImmediate(value) ? g.UseImmediate(value) : g.UseRegister(value);
236 inputs[input_count++] = value_operand;
237 Emit(code, 0, static_cast<InstructionOperand*>(nullptr), input_count,
238 inputs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000239 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400240}
241
242
243void InstructionSelector::VisitCheckedLoad(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000244 CheckedLoadRepresentation load_rep = CheckedLoadRepresentationOf(node->op());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400245 X64OperandGenerator g(this);
246 Node* const buffer = node->InputAt(0);
247 Node* const offset = node->InputAt(1);
248 Node* const length = node->InputAt(2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000249 ArchOpcode opcode = kArchNop;
250 switch (load_rep.representation()) {
251 case MachineRepresentation::kWord8:
252 opcode = load_rep.IsSigned() ? kCheckedLoadInt8 : kCheckedLoadUint8;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400253 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000254 case MachineRepresentation::kWord16:
255 opcode = load_rep.IsSigned() ? kCheckedLoadInt16 : kCheckedLoadUint16;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400256 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000257 case MachineRepresentation::kWord32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400258 opcode = kCheckedLoadWord32;
259 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000260 case MachineRepresentation::kWord64:
261 opcode = kCheckedLoadWord64;
262 break;
263 case MachineRepresentation::kFloat32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400264 opcode = kCheckedLoadFloat32;
265 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000266 case MachineRepresentation::kFloat64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400267 opcode = kCheckedLoadFloat64;
268 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100269 case MachineRepresentation::kBit: // Fall through.
270 case MachineRepresentation::kSimd128: // Fall through.
271 case MachineRepresentation::kTagged: // Fall through.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000272 case MachineRepresentation::kNone:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400273 UNREACHABLE();
274 return;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000275 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400276 if (offset->opcode() == IrOpcode::kInt32Add && CanCover(node, offset)) {
277 Int32Matcher mlength(length);
278 Int32BinopMatcher moffset(offset);
279 if (mlength.HasValue() && moffset.right().HasValue() &&
280 moffset.right().Value() >= 0 &&
281 mlength.Value() >= moffset.right().Value()) {
282 Emit(opcode, g.DefineAsRegister(node), g.UseRegister(buffer),
283 g.UseRegister(moffset.left().node()),
284 g.UseImmediate(moffset.right().node()), g.UseImmediate(length));
285 return;
286 }
287 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000288 InstructionOperand length_operand =
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400289 g.CanBeImmediate(length) ? g.UseImmediate(length) : g.UseRegister(length);
290 Emit(opcode, g.DefineAsRegister(node), g.UseRegister(buffer),
291 g.UseRegister(offset), g.TempImmediate(0), length_operand);
292}
293
294
295void InstructionSelector::VisitCheckedStore(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000296 MachineRepresentation rep = CheckedStoreRepresentationOf(node->op());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400297 X64OperandGenerator g(this);
298 Node* const buffer = node->InputAt(0);
299 Node* const offset = node->InputAt(1);
300 Node* const length = node->InputAt(2);
301 Node* const value = node->InputAt(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000302 ArchOpcode opcode = kArchNop;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400303 switch (rep) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000304 case MachineRepresentation::kWord8:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400305 opcode = kCheckedStoreWord8;
306 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000307 case MachineRepresentation::kWord16:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400308 opcode = kCheckedStoreWord16;
309 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000310 case MachineRepresentation::kWord32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400311 opcode = kCheckedStoreWord32;
312 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000313 case MachineRepresentation::kWord64:
314 opcode = kCheckedStoreWord64;
315 break;
316 case MachineRepresentation::kFloat32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400317 opcode = kCheckedStoreFloat32;
318 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000319 case MachineRepresentation::kFloat64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400320 opcode = kCheckedStoreFloat64;
321 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100322 case MachineRepresentation::kBit: // Fall through.
323 case MachineRepresentation::kSimd128: // Fall through.
324 case MachineRepresentation::kTagged: // Fall through.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000325 case MachineRepresentation::kNone:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400326 UNREACHABLE();
327 return;
328 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000329 InstructionOperand value_operand =
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400330 g.CanBeImmediate(value) ? g.UseImmediate(value) : g.UseRegister(value);
331 if (offset->opcode() == IrOpcode::kInt32Add && CanCover(node, offset)) {
332 Int32Matcher mlength(length);
333 Int32BinopMatcher moffset(offset);
334 if (mlength.HasValue() && moffset.right().HasValue() &&
335 moffset.right().Value() >= 0 &&
336 mlength.Value() >= moffset.right().Value()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000337 Emit(opcode, g.NoOutput(), g.UseRegister(buffer),
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400338 g.UseRegister(moffset.left().node()),
339 g.UseImmediate(moffset.right().node()), g.UseImmediate(length),
340 value_operand);
341 return;
342 }
343 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000344 InstructionOperand length_operand =
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400345 g.CanBeImmediate(length) ? g.UseImmediate(length) : g.UseRegister(length);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000346 Emit(opcode, g.NoOutput(), g.UseRegister(buffer), g.UseRegister(offset),
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400347 g.TempImmediate(0), length_operand, value_operand);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000348}
349
350
351// Shared routine for multiple binary operations.
352static void VisitBinop(InstructionSelector* selector, Node* node,
353 InstructionCode opcode, FlagsContinuation* cont) {
354 X64OperandGenerator g(selector);
355 Int32BinopMatcher m(node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400356 Node* left = m.left().node();
357 Node* right = m.right().node();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000358 InstructionOperand inputs[4];
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000359 size_t input_count = 0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000360 InstructionOperand outputs[2];
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000361 size_t output_count = 0;
362
363 // TODO(turbofan): match complex addressing modes.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400364 if (left == right) {
365 // If both inputs refer to the same operand, enforce allocating a register
366 // for both of them to ensure that we don't end up generating code like
367 // this:
368 //
369 // mov rax, [rbp-0x10]
370 // add rax, [rbp-0x10]
371 // jo label
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000372 InstructionOperand const input = g.UseRegister(left);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400373 inputs[input_count++] = input;
374 inputs[input_count++] = input;
375 } else if (g.CanBeImmediate(right)) {
376 inputs[input_count++] = g.UseRegister(left);
377 inputs[input_count++] = g.UseImmediate(right);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000378 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400379 if (node->op()->HasProperty(Operator::kCommutative) &&
380 g.CanBeBetterLeftOperand(right)) {
381 std::swap(left, right);
382 }
383 inputs[input_count++] = g.UseRegister(left);
384 inputs[input_count++] = g.Use(right);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000385 }
386
387 if (cont->IsBranch()) {
388 inputs[input_count++] = g.Label(cont->true_block());
389 inputs[input_count++] = g.Label(cont->false_block());
390 }
391
392 outputs[output_count++] = g.DefineSameAsFirst(node);
393 if (cont->IsSet()) {
394 outputs[output_count++] = g.DefineAsRegister(cont->result());
395 }
396
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000397 DCHECK_NE(0u, input_count);
398 DCHECK_NE(0u, output_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000399 DCHECK_GE(arraysize(inputs), input_count);
400 DCHECK_GE(arraysize(outputs), output_count);
401
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000402 selector->Emit(cont->Encode(opcode), output_count, outputs, input_count,
403 inputs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000404}
405
406
407// Shared routine for multiple binary operations.
408static void VisitBinop(InstructionSelector* selector, Node* node,
409 InstructionCode opcode) {
410 FlagsContinuation cont;
411 VisitBinop(selector, node, opcode, &cont);
412}
413
414
415void InstructionSelector::VisitWord32And(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000416 X64OperandGenerator g(this);
417 Uint32BinopMatcher m(node);
418 if (m.right().Is(0xff)) {
419 Emit(kX64Movzxbl, g.DefineAsRegister(node), g.Use(m.left().node()));
420 } else if (m.right().Is(0xffff)) {
421 Emit(kX64Movzxwl, g.DefineAsRegister(node), g.Use(m.left().node()));
422 } else {
423 VisitBinop(this, node, kX64And32);
424 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000425}
426
427
428void InstructionSelector::VisitWord64And(Node* node) {
429 VisitBinop(this, node, kX64And);
430}
431
432
433void InstructionSelector::VisitWord32Or(Node* node) {
434 VisitBinop(this, node, kX64Or32);
435}
436
437
438void InstructionSelector::VisitWord64Or(Node* node) {
439 VisitBinop(this, node, kX64Or);
440}
441
442
443void InstructionSelector::VisitWord32Xor(Node* node) {
444 X64OperandGenerator g(this);
445 Uint32BinopMatcher m(node);
446 if (m.right().Is(-1)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400447 Emit(kX64Not32, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000448 } else {
449 VisitBinop(this, node, kX64Xor32);
450 }
451}
452
453
454void InstructionSelector::VisitWord64Xor(Node* node) {
455 X64OperandGenerator g(this);
456 Uint64BinopMatcher m(node);
457 if (m.right().Is(-1)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400458 Emit(kX64Not, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000459 } else {
460 VisitBinop(this, node, kX64Xor);
461 }
462}
463
464
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400465namespace {
466
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000467// Shared routine for multiple 32-bit shift operations.
468// TODO(bmeurer): Merge this with VisitWord64Shift using template magic?
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400469void VisitWord32Shift(InstructionSelector* selector, Node* node,
470 ArchOpcode opcode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000471 X64OperandGenerator g(selector);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400472 Int32BinopMatcher m(node);
473 Node* left = m.left().node();
474 Node* right = m.right().node();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000475
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000476 if (g.CanBeImmediate(right)) {
477 selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
478 g.UseImmediate(right));
479 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000480 selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
481 g.UseFixed(right, rcx));
482 }
483}
484
485
486// Shared routine for multiple 64-bit shift operations.
487// TODO(bmeurer): Merge this with VisitWord32Shift using template magic?
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400488void VisitWord64Shift(InstructionSelector* selector, Node* node,
489 ArchOpcode opcode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000490 X64OperandGenerator g(selector);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400491 Int64BinopMatcher m(node);
492 Node* left = m.left().node();
493 Node* right = m.right().node();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000494
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000495 if (g.CanBeImmediate(right)) {
496 selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
497 g.UseImmediate(right));
498 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000499 if (m.right().IsWord64And()) {
500 Int64BinopMatcher mright(right);
501 if (mright.right().Is(0x3F)) {
502 right = mright.left().node();
503 }
504 }
505 selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
506 g.UseFixed(right, rcx));
507 }
508}
509
510
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400511void EmitLea(InstructionSelector* selector, InstructionCode opcode,
512 Node* result, Node* index, int scale, Node* base,
513 Node* displacement) {
514 X64OperandGenerator g(selector);
515
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000516 InstructionOperand inputs[4];
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400517 size_t input_count = 0;
518 AddressingMode mode = g.GenerateMemoryOperandInputs(
519 index, scale, base, displacement, inputs, &input_count);
520
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000521 DCHECK_NE(0u, input_count);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400522 DCHECK_GE(arraysize(inputs), input_count);
523
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000524 InstructionOperand outputs[1];
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400525 outputs[0] = g.DefineAsRegister(result);
526
527 opcode = AddressingModeField::encode(mode) | opcode;
528
529 selector->Emit(opcode, 1, outputs, input_count, inputs);
530}
531
532} // namespace
533
534
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000535void InstructionSelector::VisitWord32Shl(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400536 Int32ScaleMatcher m(node, true);
537 if (m.matches()) {
538 Node* index = node->InputAt(0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000539 Node* base = m.power_of_two_plus_one() ? index : nullptr;
540 EmitLea(this, kX64Lea32, node, index, m.scale(), base, nullptr);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400541 return;
542 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000543 VisitWord32Shift(this, node, kX64Shl32);
544}
545
546
547void InstructionSelector::VisitWord64Shl(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400548 X64OperandGenerator g(this);
549 Int64BinopMatcher m(node);
550 if ((m.left().IsChangeInt32ToInt64() || m.left().IsChangeUint32ToUint64()) &&
551 m.right().IsInRange(32, 63)) {
552 // There's no need to sign/zero-extend to 64-bit if we shift out the upper
553 // 32 bits anyway.
554 Emit(kX64Shl, g.DefineSameAsFirst(node),
555 g.UseRegister(m.left().node()->InputAt(0)),
556 g.UseImmediate(m.right().node()));
557 return;
558 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000559 VisitWord64Shift(this, node, kX64Shl);
560}
561
562
563void InstructionSelector::VisitWord32Shr(Node* node) {
564 VisitWord32Shift(this, node, kX64Shr32);
565}
566
567
568void InstructionSelector::VisitWord64Shr(Node* node) {
569 VisitWord64Shift(this, node, kX64Shr);
570}
571
572
573void InstructionSelector::VisitWord32Sar(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400574 X64OperandGenerator g(this);
575 Int32BinopMatcher m(node);
576 if (CanCover(m.node(), m.left().node()) && m.left().IsWord32Shl()) {
577 Int32BinopMatcher mleft(m.left().node());
578 if (mleft.right().Is(16) && m.right().Is(16)) {
579 Emit(kX64Movsxwl, g.DefineAsRegister(node), g.Use(mleft.left().node()));
580 return;
581 } else if (mleft.right().Is(24) && m.right().Is(24)) {
582 Emit(kX64Movsxbl, g.DefineAsRegister(node), g.Use(mleft.left().node()));
583 return;
584 }
585 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000586 VisitWord32Shift(this, node, kX64Sar32);
587}
588
589
590void InstructionSelector::VisitWord64Sar(Node* node) {
591 VisitWord64Shift(this, node, kX64Sar);
592}
593
594
595void InstructionSelector::VisitWord32Ror(Node* node) {
596 VisitWord32Shift(this, node, kX64Ror32);
597}
598
599
600void InstructionSelector::VisitWord64Ror(Node* node) {
601 VisitWord64Shift(this, node, kX64Ror);
602}
603
604
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000605void InstructionSelector::VisitWord64Clz(Node* node) {
606 X64OperandGenerator g(this);
607 Emit(kX64Lzcnt, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
608}
609
610
611void InstructionSelector::VisitWord32Clz(Node* node) {
612 X64OperandGenerator g(this);
613 Emit(kX64Lzcnt32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
614}
615
616
617void InstructionSelector::VisitWord64Ctz(Node* node) {
618 X64OperandGenerator g(this);
619 Emit(kX64Tzcnt, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
620}
621
622
623void InstructionSelector::VisitWord32Ctz(Node* node) {
624 X64OperandGenerator g(this);
625 Emit(kX64Tzcnt32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
626}
627
628
Ben Murdoch097c5b22016-05-18 11:27:45 +0100629void InstructionSelector::VisitWord32ReverseBits(Node* node) { UNREACHABLE(); }
630
631
632void InstructionSelector::VisitWord64ReverseBits(Node* node) { UNREACHABLE(); }
633
634
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000635void InstructionSelector::VisitWord32Popcnt(Node* node) {
636 X64OperandGenerator g(this);
637 Emit(kX64Popcnt32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
638}
639
640
641void InstructionSelector::VisitWord64Popcnt(Node* node) {
642 X64OperandGenerator g(this);
643 Emit(kX64Popcnt, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
644}
645
646
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000647void InstructionSelector::VisitInt32Add(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400648 X64OperandGenerator g(this);
649
650 // Try to match the Add to a leal pattern
651 BaseWithIndexAndDisplacement32Matcher m(node);
652 if (m.matches() &&
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000653 (m.displacement() == nullptr || g.CanBeImmediate(m.displacement()))) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400654 EmitLea(this, kX64Lea32, node, m.index(), m.scale(), m.base(),
655 m.displacement());
656 return;
657 }
658
659 // No leal pattern match, use addl
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000660 VisitBinop(this, node, kX64Add32);
661}
662
663
664void InstructionSelector::VisitInt64Add(Node* node) {
665 VisitBinop(this, node, kX64Add);
666}
667
668
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000669void InstructionSelector::VisitInt64AddWithOverflow(Node* node) {
670 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
671 FlagsContinuation cont(kOverflow, ovf);
672 VisitBinop(this, node, kX64Add, &cont);
673 }
674 FlagsContinuation cont;
675 VisitBinop(this, node, kX64Add, &cont);
676}
677
678
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000679void InstructionSelector::VisitInt32Sub(Node* node) {
680 X64OperandGenerator g(this);
681 Int32BinopMatcher m(node);
682 if (m.left().Is(0)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400683 Emit(kX64Neg32, g.DefineSameAsFirst(node), g.UseRegister(m.right().node()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000684 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400685 if (m.right().HasValue() && g.CanBeImmediate(m.right().node())) {
686 // Turn subtractions of constant values into immediate "leal" instructions
687 // by negating the value.
688 Emit(kX64Lea32 | AddressingModeField::encode(kMode_MRI),
689 g.DefineAsRegister(node), g.UseRegister(m.left().node()),
690 g.TempImmediate(-m.right().Value()));
691 return;
692 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000693 VisitBinop(this, node, kX64Sub32);
694 }
695}
696
697
698void InstructionSelector::VisitInt64Sub(Node* node) {
699 X64OperandGenerator g(this);
700 Int64BinopMatcher m(node);
701 if (m.left().Is(0)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400702 Emit(kX64Neg, g.DefineSameAsFirst(node), g.UseRegister(m.right().node()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000703 } else {
704 VisitBinop(this, node, kX64Sub);
705 }
706}
707
708
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000709void InstructionSelector::VisitInt64SubWithOverflow(Node* node) {
710 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
711 FlagsContinuation cont(kOverflow, ovf);
712 return VisitBinop(this, node, kX64Sub, &cont);
713 }
714 FlagsContinuation cont;
715 VisitBinop(this, node, kX64Sub, &cont);
716}
717
718
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400719namespace {
720
721void VisitMul(InstructionSelector* selector, Node* node, ArchOpcode opcode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000722 X64OperandGenerator g(selector);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400723 Int32BinopMatcher m(node);
724 Node* left = m.left().node();
725 Node* right = m.right().node();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000726 if (g.CanBeImmediate(right)) {
727 selector->Emit(opcode, g.DefineAsRegister(node), g.Use(left),
728 g.UseImmediate(right));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000729 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400730 if (g.CanBeBetterLeftOperand(right)) {
731 std::swap(left, right);
732 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000733 selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
734 g.Use(right));
735 }
736}
737
738
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400739void VisitMulHigh(InstructionSelector* selector, Node* node,
740 ArchOpcode opcode) {
741 X64OperandGenerator g(selector);
742 Node* left = node->InputAt(0);
743 Node* right = node->InputAt(1);
744 if (selector->IsLive(left) && !selector->IsLive(right)) {
745 std::swap(left, right);
746 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100747 InstructionOperand temps[] = {g.TempRegister(rax)};
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400748 // TODO(turbofan): We use UseUniqueRegister here to improve register
749 // allocation.
750 selector->Emit(opcode, g.DefineAsFixed(node, rdx), g.UseFixed(left, rax),
Ben Murdoch097c5b22016-05-18 11:27:45 +0100751 g.UseUniqueRegister(right), arraysize(temps), temps);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400752}
753
754
755void VisitDiv(InstructionSelector* selector, Node* node, ArchOpcode opcode) {
756 X64OperandGenerator g(selector);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000757 InstructionOperand temps[] = {g.TempRegister(rdx)};
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400758 selector->Emit(
759 opcode, g.DefineAsFixed(node, rax), g.UseFixed(node->InputAt(0), rax),
760 g.UseUniqueRegister(node->InputAt(1)), arraysize(temps), temps);
761}
762
763
764void VisitMod(InstructionSelector* selector, Node* node, ArchOpcode opcode) {
765 X64OperandGenerator g(selector);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100766 InstructionOperand temps[] = {g.TempRegister(rax)};
767 selector->Emit(
768 opcode, g.DefineAsFixed(node, rdx), g.UseFixed(node->InputAt(0), rax),
769 g.UseUniqueRegister(node->InputAt(1)), arraysize(temps), temps);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400770}
771
772} // namespace
773
774
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000775void InstructionSelector::VisitInt32Mul(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400776 Int32ScaleMatcher m(node, true);
777 if (m.matches()) {
778 Node* index = node->InputAt(0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000779 Node* base = m.power_of_two_plus_one() ? index : nullptr;
780 EmitLea(this, kX64Lea32, node, index, m.scale(), base, nullptr);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400781 return;
782 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000783 VisitMul(this, node, kX64Imul32);
784}
785
786
787void InstructionSelector::VisitInt64Mul(Node* node) {
788 VisitMul(this, node, kX64Imul);
789}
790
791
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400792void InstructionSelector::VisitInt32MulHigh(Node* node) {
793 VisitMulHigh(this, node, kX64ImulHigh32);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000794}
795
796
797void InstructionSelector::VisitInt32Div(Node* node) {
798 VisitDiv(this, node, kX64Idiv32);
799}
800
801
802void InstructionSelector::VisitInt64Div(Node* node) {
803 VisitDiv(this, node, kX64Idiv);
804}
805
806
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400807void InstructionSelector::VisitUint32Div(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000808 VisitDiv(this, node, kX64Udiv32);
809}
810
811
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400812void InstructionSelector::VisitUint64Div(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000813 VisitDiv(this, node, kX64Udiv);
814}
815
816
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000817void InstructionSelector::VisitInt32Mod(Node* node) {
818 VisitMod(this, node, kX64Idiv32);
819}
820
821
822void InstructionSelector::VisitInt64Mod(Node* node) {
823 VisitMod(this, node, kX64Idiv);
824}
825
826
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400827void InstructionSelector::VisitUint32Mod(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000828 VisitMod(this, node, kX64Udiv32);
829}
830
831
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400832void InstructionSelector::VisitUint64Mod(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000833 VisitMod(this, node, kX64Udiv);
834}
835
836
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400837void InstructionSelector::VisitUint32MulHigh(Node* node) {
838 VisitMulHigh(this, node, kX64UmulHigh32);
839}
840
841
842void InstructionSelector::VisitChangeFloat32ToFloat64(Node* node) {
843 X64OperandGenerator g(this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000844 Emit(kSSEFloat32ToFloat64, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400845}
846
847
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000848void InstructionSelector::VisitChangeInt32ToFloat64(Node* node) {
849 X64OperandGenerator g(this);
850 Emit(kSSEInt32ToFloat64, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
851}
852
853
854void InstructionSelector::VisitChangeUint32ToFloat64(Node* node) {
855 X64OperandGenerator g(this);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400856 Emit(kSSEUint32ToFloat64, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000857}
858
859
860void InstructionSelector::VisitChangeFloat64ToInt32(Node* node) {
861 X64OperandGenerator g(this);
862 Emit(kSSEFloat64ToInt32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
863}
864
865
866void InstructionSelector::VisitChangeFloat64ToUint32(Node* node) {
867 X64OperandGenerator g(this);
868 Emit(kSSEFloat64ToUint32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
869}
870
871
Ben Murdoch097c5b22016-05-18 11:27:45 +0100872void InstructionSelector::VisitTruncateFloat32ToInt32(Node* node) {
873 X64OperandGenerator g(this);
874 Emit(kSSEFloat32ToInt32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
875}
876
877
878void InstructionSelector::VisitTruncateFloat32ToUint32(Node* node) {
879 X64OperandGenerator g(this);
880 Emit(kSSEFloat32ToUint32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
881}
882
883
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000884void InstructionSelector::VisitTryTruncateFloat32ToInt64(Node* node) {
885 X64OperandGenerator g(this);
886 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
887 InstructionOperand outputs[2];
888 size_t output_count = 0;
889 outputs[output_count++] = g.DefineAsRegister(node);
890
891 Node* success_output = NodeProperties::FindProjection(node, 1);
892 if (success_output) {
893 outputs[output_count++] = g.DefineAsRegister(success_output);
894 }
895
896 Emit(kSSEFloat32ToInt64, output_count, outputs, 1, inputs);
897}
898
899
900void InstructionSelector::VisitTryTruncateFloat64ToInt64(Node* node) {
901 X64OperandGenerator g(this);
902 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
903 InstructionOperand outputs[2];
904 size_t output_count = 0;
905 outputs[output_count++] = g.DefineAsRegister(node);
906
907 Node* success_output = NodeProperties::FindProjection(node, 1);
908 if (success_output) {
909 outputs[output_count++] = g.DefineAsRegister(success_output);
910 }
911
912 Emit(kSSEFloat64ToInt64, output_count, outputs, 1, inputs);
913}
914
915
916void InstructionSelector::VisitTryTruncateFloat32ToUint64(Node* node) {
917 X64OperandGenerator g(this);
918 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
919 InstructionOperand outputs[2];
920 size_t output_count = 0;
921 outputs[output_count++] = g.DefineAsRegister(node);
922
923 Node* success_output = NodeProperties::FindProjection(node, 1);
924 if (success_output) {
925 outputs[output_count++] = g.DefineAsRegister(success_output);
926 }
927
928 Emit(kSSEFloat32ToUint64, output_count, outputs, 1, inputs);
929}
930
931
932void InstructionSelector::VisitTryTruncateFloat64ToUint64(Node* node) {
933 X64OperandGenerator g(this);
934 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
935 InstructionOperand outputs[2];
936 size_t output_count = 0;
937 outputs[output_count++] = g.DefineAsRegister(node);
938
939 Node* success_output = NodeProperties::FindProjection(node, 1);
940 if (success_output) {
941 outputs[output_count++] = g.DefineAsRegister(success_output);
942 }
943
944 Emit(kSSEFloat64ToUint64, output_count, outputs, 1, inputs);
945}
946
947
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000948void InstructionSelector::VisitChangeInt32ToInt64(Node* node) {
949 X64OperandGenerator g(this);
950 Emit(kX64Movsxlq, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
951}
952
953
954void InstructionSelector::VisitChangeUint32ToUint64(Node* node) {
955 X64OperandGenerator g(this);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400956 Node* value = node->InputAt(0);
957 switch (value->opcode()) {
958 case IrOpcode::kWord32And:
959 case IrOpcode::kWord32Or:
960 case IrOpcode::kWord32Xor:
961 case IrOpcode::kWord32Shl:
962 case IrOpcode::kWord32Shr:
963 case IrOpcode::kWord32Sar:
964 case IrOpcode::kWord32Ror:
965 case IrOpcode::kWord32Equal:
966 case IrOpcode::kInt32Add:
967 case IrOpcode::kInt32Sub:
968 case IrOpcode::kInt32Mul:
969 case IrOpcode::kInt32MulHigh:
970 case IrOpcode::kInt32Div:
971 case IrOpcode::kInt32LessThan:
972 case IrOpcode::kInt32LessThanOrEqual:
973 case IrOpcode::kInt32Mod:
974 case IrOpcode::kUint32Div:
975 case IrOpcode::kUint32LessThan:
976 case IrOpcode::kUint32LessThanOrEqual:
977 case IrOpcode::kUint32Mod:
978 case IrOpcode::kUint32MulHigh: {
979 // These 32-bit operations implicitly zero-extend to 64-bit on x64, so the
980 // zero-extension is a no-op.
981 Emit(kArchNop, g.DefineSameAsFirst(node), g.Use(value));
982 return;
983 }
984 default:
985 break;
986 }
987 Emit(kX64Movl, g.DefineAsRegister(node), g.Use(value));
988}
989
990
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000991namespace {
992
993void VisitRO(InstructionSelector* selector, Node* node,
994 InstructionCode opcode) {
995 X64OperandGenerator g(selector);
996 selector->Emit(opcode, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
997}
998
999
1000void VisitRR(InstructionSelector* selector, Node* node,
1001 InstructionCode opcode) {
1002 X64OperandGenerator g(selector);
1003 selector->Emit(opcode, g.DefineAsRegister(node),
1004 g.UseRegister(node->InputAt(0)));
1005}
1006
1007
1008void VisitFloatBinop(InstructionSelector* selector, Node* node,
1009 ArchOpcode avx_opcode, ArchOpcode sse_opcode) {
1010 X64OperandGenerator g(selector);
1011 InstructionOperand operand0 = g.UseRegister(node->InputAt(0));
1012 InstructionOperand operand1 = g.Use(node->InputAt(1));
1013 if (selector->IsSupported(AVX)) {
1014 selector->Emit(avx_opcode, g.DefineAsRegister(node), operand0, operand1);
1015 } else {
1016 selector->Emit(sse_opcode, g.DefineSameAsFirst(node), operand0, operand1);
1017 }
1018}
1019
1020
1021void VisitFloatUnop(InstructionSelector* selector, Node* node, Node* input,
1022 ArchOpcode avx_opcode, ArchOpcode sse_opcode) {
1023 X64OperandGenerator g(selector);
1024 if (selector->IsSupported(AVX)) {
1025 selector->Emit(avx_opcode, g.DefineAsRegister(node), g.Use(input));
1026 } else {
1027 selector->Emit(sse_opcode, g.DefineSameAsFirst(node), g.UseRegister(input));
1028 }
1029}
1030
1031} // namespace
1032
1033
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001034void InstructionSelector::VisitTruncateFloat64ToFloat32(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001035 VisitRO(this, node, kSSEFloat64ToFloat32);
1036}
1037
1038
1039void InstructionSelector::VisitTruncateFloat64ToInt32(Node* node) {
1040 switch (TruncationModeOf(node->op())) {
1041 case TruncationMode::kJavaScript:
1042 return VisitRR(this, node, kArchTruncateDoubleToI);
1043 case TruncationMode::kRoundToZero:
1044 return VisitRO(this, node, kSSEFloat64ToInt32);
1045 }
1046 UNREACHABLE();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001047}
1048
1049
1050void InstructionSelector::VisitTruncateInt64ToInt32(Node* node) {
1051 X64OperandGenerator g(this);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001052 Node* value = node->InputAt(0);
1053 if (CanCover(node, value)) {
1054 switch (value->opcode()) {
1055 case IrOpcode::kWord64Sar:
1056 case IrOpcode::kWord64Shr: {
1057 Int64BinopMatcher m(value);
1058 if (m.right().Is(32)) {
1059 Emit(kX64Shr, g.DefineSameAsFirst(node),
1060 g.UseRegister(m.left().node()), g.TempImmediate(32));
1061 return;
1062 }
1063 break;
1064 }
1065 default:
1066 break;
1067 }
1068 }
1069 Emit(kX64Movl, g.DefineAsRegister(node), g.Use(value));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001070}
1071
1072
Ben Murdoch097c5b22016-05-18 11:27:45 +01001073void InstructionSelector::VisitRoundInt32ToFloat32(Node* node) {
1074 X64OperandGenerator g(this);
1075 Emit(kSSEInt32ToFloat32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
1076}
1077
1078
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001079void InstructionSelector::VisitRoundInt64ToFloat32(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001080 X64OperandGenerator g(this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001081 Emit(kSSEInt64ToFloat32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
1082}
1083
1084
1085void InstructionSelector::VisitRoundInt64ToFloat64(Node* node) {
1086 X64OperandGenerator g(this);
1087 Emit(kSSEInt64ToFloat64, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
1088}
1089
1090
Ben Murdoch097c5b22016-05-18 11:27:45 +01001091void InstructionSelector::VisitRoundUint32ToFloat32(Node* node) {
1092 X64OperandGenerator g(this);
1093 Emit(kSSEUint32ToFloat32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
1094}
1095
1096
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001097void InstructionSelector::VisitRoundUint64ToFloat32(Node* node) {
1098 X64OperandGenerator g(this);
1099 InstructionOperand temps[] = {g.TempRegister()};
1100 Emit(kSSEUint64ToFloat32, g.DefineAsRegister(node), g.Use(node->InputAt(0)),
1101 arraysize(temps), temps);
1102}
1103
1104
1105void InstructionSelector::VisitRoundUint64ToFloat64(Node* node) {
1106 X64OperandGenerator g(this);
1107 InstructionOperand temps[] = {g.TempRegister()};
1108 Emit(kSSEUint64ToFloat64, g.DefineAsRegister(node), g.Use(node->InputAt(0)),
1109 arraysize(temps), temps);
1110}
1111
1112
1113void InstructionSelector::VisitBitcastFloat32ToInt32(Node* node) {
1114 X64OperandGenerator g(this);
1115 Emit(kX64BitcastFI, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
1116}
1117
1118
1119void InstructionSelector::VisitBitcastFloat64ToInt64(Node* node) {
1120 X64OperandGenerator g(this);
1121 Emit(kX64BitcastDL, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
1122}
1123
1124
1125void InstructionSelector::VisitBitcastInt32ToFloat32(Node* node) {
1126 X64OperandGenerator g(this);
1127 Emit(kX64BitcastIF, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
1128}
1129
1130
1131void InstructionSelector::VisitBitcastInt64ToFloat64(Node* node) {
1132 X64OperandGenerator g(this);
1133 Emit(kX64BitcastLD, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
1134}
1135
1136
1137void InstructionSelector::VisitFloat32Add(Node* node) {
1138 VisitFloatBinop(this, node, kAVXFloat32Add, kSSEFloat32Add);
1139}
1140
1141
1142void InstructionSelector::VisitFloat32Sub(Node* node) {
1143 X64OperandGenerator g(this);
1144 Float32BinopMatcher m(node);
1145 if (m.left().IsMinusZero()) {
1146 VisitFloatUnop(this, node, m.right().node(), kAVXFloat32Neg,
1147 kSSEFloat32Neg);
1148 return;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001149 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001150 VisitFloatBinop(this, node, kAVXFloat32Sub, kSSEFloat32Sub);
1151}
1152
1153
1154void InstructionSelector::VisitFloat32Mul(Node* node) {
1155 VisitFloatBinop(this, node, kAVXFloat32Mul, kSSEFloat32Mul);
1156}
1157
1158
1159void InstructionSelector::VisitFloat32Div(Node* node) {
1160 VisitFloatBinop(this, node, kAVXFloat32Div, kSSEFloat32Div);
1161}
1162
1163
1164void InstructionSelector::VisitFloat32Max(Node* node) {
1165 VisitFloatBinop(this, node, kAVXFloat32Max, kSSEFloat32Max);
1166}
1167
1168
1169void InstructionSelector::VisitFloat32Min(Node* node) {
1170 VisitFloatBinop(this, node, kAVXFloat32Min, kSSEFloat32Min);
1171}
1172
1173
1174void InstructionSelector::VisitFloat32Abs(Node* node) {
1175 VisitFloatUnop(this, node, node->InputAt(0), kAVXFloat32Abs, kSSEFloat32Abs);
1176}
1177
1178
1179void InstructionSelector::VisitFloat32Sqrt(Node* node) {
1180 VisitRO(this, node, kSSEFloat32Sqrt);
1181}
1182
1183
1184void InstructionSelector::VisitFloat64Add(Node* node) {
1185 VisitFloatBinop(this, node, kAVXFloat64Add, kSSEFloat64Add);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001186}
1187
1188
1189void InstructionSelector::VisitFloat64Sub(Node* node) {
1190 X64OperandGenerator g(this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001191 Float64BinopMatcher m(node);
1192 if (m.left().IsMinusZero()) {
1193 if (m.right().IsFloat64RoundDown() &&
1194 CanCover(m.node(), m.right().node())) {
1195 if (m.right().InputAt(0)->opcode() == IrOpcode::kFloat64Sub &&
1196 CanCover(m.right().node(), m.right().InputAt(0))) {
1197 Float64BinopMatcher mright0(m.right().InputAt(0));
1198 if (mright0.left().IsMinusZero()) {
1199 Emit(kSSEFloat64Round | MiscField::encode(kRoundUp),
1200 g.DefineAsRegister(node), g.UseRegister(mright0.right().node()));
1201 return;
1202 }
1203 }
1204 }
1205 VisitFloatUnop(this, node, m.right().node(), kAVXFloat64Neg,
1206 kSSEFloat64Neg);
1207 return;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001208 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001209 VisitFloatBinop(this, node, kAVXFloat64Sub, kSSEFloat64Sub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001210}
1211
1212
1213void InstructionSelector::VisitFloat64Mul(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001214 VisitFloatBinop(this, node, kAVXFloat64Mul, kSSEFloat64Mul);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001215}
1216
1217
1218void InstructionSelector::VisitFloat64Div(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001219 VisitFloatBinop(this, node, kAVXFloat64Div, kSSEFloat64Div);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001220}
1221
1222
1223void InstructionSelector::VisitFloat64Mod(Node* node) {
1224 X64OperandGenerator g(this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001225 InstructionOperand temps[] = {g.TempRegister(rax)};
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001226 Emit(kSSEFloat64Mod, g.DefineSameAsFirst(node),
1227 g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)), 1,
1228 temps);
1229}
1230
1231
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001232void InstructionSelector::VisitFloat64Max(Node* node) {
1233 VisitFloatBinop(this, node, kAVXFloat64Max, kSSEFloat64Max);
1234}
1235
1236
1237void InstructionSelector::VisitFloat64Min(Node* node) {
1238 VisitFloatBinop(this, node, kAVXFloat64Min, kSSEFloat64Min);
1239}
1240
1241
1242void InstructionSelector::VisitFloat64Abs(Node* node) {
1243 VisitFloatUnop(this, node, node->InputAt(0), kAVXFloat64Abs, kSSEFloat64Abs);
1244}
1245
1246
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001247void InstructionSelector::VisitFloat64Sqrt(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001248 VisitRO(this, node, kSSEFloat64Sqrt);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001249}
1250
1251
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001252void InstructionSelector::VisitFloat32RoundDown(Node* node) {
1253 VisitRR(this, node, kSSEFloat32Round | MiscField::encode(kRoundDown));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001254}
1255
1256
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001257void InstructionSelector::VisitFloat64RoundDown(Node* node) {
1258 VisitRR(this, node, kSSEFloat64Round | MiscField::encode(kRoundDown));
1259}
1260
1261
1262void InstructionSelector::VisitFloat32RoundUp(Node* node) {
1263 VisitRR(this, node, kSSEFloat32Round | MiscField::encode(kRoundUp));
1264}
1265
1266
1267void InstructionSelector::VisitFloat64RoundUp(Node* node) {
1268 VisitRR(this, node, kSSEFloat64Round | MiscField::encode(kRoundUp));
1269}
1270
1271
1272void InstructionSelector::VisitFloat32RoundTruncate(Node* node) {
1273 VisitRR(this, node, kSSEFloat32Round | MiscField::encode(kRoundToZero));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001274}
1275
1276
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001277void InstructionSelector::VisitFloat64RoundTruncate(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001278 VisitRR(this, node, kSSEFloat64Round | MiscField::encode(kRoundToZero));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001279}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001280
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001281
1282void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
1283 UNREACHABLE();
1284}
1285
1286
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001287void InstructionSelector::VisitFloat32RoundTiesEven(Node* node) {
1288 VisitRR(this, node, kSSEFloat32Round | MiscField::encode(kRoundToNearest));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001289}
1290
1291
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001292void InstructionSelector::VisitFloat64RoundTiesEven(Node* node) {
1293 VisitRR(this, node, kSSEFloat64Round | MiscField::encode(kRoundToNearest));
1294}
1295
1296
1297void InstructionSelector::EmitPrepareArguments(
1298 ZoneVector<PushParameter>* arguments, const CallDescriptor* descriptor,
1299 Node* node) {
1300 X64OperandGenerator g(this);
1301
1302 // Prepare for C function call.
1303 if (descriptor->IsCFunctionCall()) {
1304 Emit(kArchPrepareCallCFunction |
1305 MiscField::encode(static_cast<int>(descriptor->CParameterCount())),
1306 0, nullptr, 0, nullptr);
1307
1308 // Poke any stack arguments.
1309 for (size_t n = 0; n < arguments->size(); ++n) {
1310 PushParameter input = (*arguments)[n];
1311 if (input.node()) {
1312 int slot = static_cast<int>(n);
1313 InstructionOperand value = g.CanBeImmediate(input.node())
1314 ? g.UseImmediate(input.node())
1315 : g.UseRegister(input.node());
1316 Emit(kX64Poke | MiscField::encode(slot), g.NoOutput(), value);
1317 }
1318 }
1319 } else {
1320 // Push any stack arguments.
1321 for (PushParameter input : base::Reversed(*arguments)) {
1322 // TODO(titzer): X64Push cannot handle stack->stack double moves
1323 // because there is no way to encode fixed double slots.
1324 InstructionOperand value =
1325 g.CanBeImmediate(input.node())
1326 ? g.UseImmediate(input.node())
1327 : IsSupported(ATOM) ||
1328 sequence()->IsFloat(GetVirtualRegister(input.node()))
1329 ? g.UseRegister(input.node())
1330 : g.Use(input.node());
1331 Emit(kX64Push, g.NoOutput(), value);
1332 }
1333 }
1334}
1335
1336
1337bool InstructionSelector::IsTailCallAddressImmediate() { return true; }
1338
1339
1340namespace {
1341
Ben Murdoch097c5b22016-05-18 11:27:45 +01001342void VisitCompareWithMemoryOperand(InstructionSelector* selector,
1343 InstructionCode opcode, Node* left,
1344 InstructionOperand right,
1345 FlagsContinuation* cont) {
1346 DCHECK(left->opcode() == IrOpcode::kLoad);
1347 X64OperandGenerator g(selector);
1348 size_t input_count = 0;
1349 InstructionOperand inputs[6];
1350 AddressingMode addressing_mode =
1351 g.GetEffectiveAddressMemoryOperand(left, inputs, &input_count);
1352 opcode |= AddressingModeField::encode(addressing_mode);
1353 opcode = cont->Encode(opcode);
1354 inputs[input_count++] = right;
1355
1356 if (cont->IsBranch()) {
1357 inputs[input_count++] = g.Label(cont->true_block());
1358 inputs[input_count++] = g.Label(cont->false_block());
1359 selector->Emit(opcode, 0, nullptr, input_count, inputs);
1360 } else {
1361 DCHECK(cont->IsSet());
1362 InstructionOperand output = g.DefineAsRegister(cont->result());
1363 selector->Emit(opcode, 1, &output, input_count, inputs);
1364 }
1365}
1366
1367// Determines if {input} of {node} can be replaced by a memory operand.
1368bool CanUseMemoryOperand(InstructionSelector* selector, InstructionCode opcode,
1369 Node* node, Node* input) {
1370 if (input->opcode() != IrOpcode::kLoad || !selector->CanCover(node, input)) {
1371 return false;
1372 }
1373 MachineRepresentation rep =
1374 LoadRepresentationOf(input->op()).representation();
1375 if (rep == MachineRepresentation::kWord64 ||
1376 rep == MachineRepresentation::kTagged) {
1377 return opcode == kX64Cmp || opcode == kX64Test;
1378 } else if (rep == MachineRepresentation::kWord32) {
1379 return opcode == kX64Cmp32 || opcode == kX64Test32;
1380 }
1381 return false;
1382}
1383
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001384// Shared routine for multiple compare operations.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001385void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
1386 InstructionOperand left, InstructionOperand right,
1387 FlagsContinuation* cont) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001388 X64OperandGenerator g(selector);
1389 opcode = cont->Encode(opcode);
1390 if (cont->IsBranch()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001391 selector->Emit(opcode, g.NoOutput(), left, right,
1392 g.Label(cont->true_block()), g.Label(cont->false_block()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001393 } else {
1394 DCHECK(cont->IsSet());
1395 selector->Emit(opcode, g.DefineAsRegister(cont->result()), left, right);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001396 }
1397}
1398
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001399
1400// Shared routine for multiple compare operations.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001401void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
1402 Node* left, Node* right, FlagsContinuation* cont,
1403 bool commutative) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001404 X64OperandGenerator g(selector);
1405 if (commutative && g.CanBeBetterLeftOperand(right)) {
1406 std::swap(left, right);
1407 }
1408 VisitCompare(selector, opcode, g.UseRegister(left), g.Use(right), cont);
1409}
1410
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001411// Shared routine for multiple word compare operations.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001412void VisitWordCompare(InstructionSelector* selector, Node* node,
1413 InstructionCode opcode, FlagsContinuation* cont) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001414 X64OperandGenerator g(selector);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001415 Node* left = node->InputAt(0);
1416 Node* right = node->InputAt(1);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001417
Ben Murdoch097c5b22016-05-18 11:27:45 +01001418 // If one of the two inputs is an immediate, make sure it's on the right.
1419 if (!g.CanBeImmediate(right) && g.CanBeImmediate(left)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001420 if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
Ben Murdoch097c5b22016-05-18 11:27:45 +01001421 std::swap(left, right);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001422 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001423
Ben Murdoch097c5b22016-05-18 11:27:45 +01001424 // Match immediates on right side of comparison.
1425 if (g.CanBeImmediate(right)) {
1426 if (CanUseMemoryOperand(selector, opcode, node, left)) {
1427 return VisitCompareWithMemoryOperand(selector, opcode, left,
1428 g.UseImmediate(right), cont);
1429 }
1430 return VisitCompare(selector, opcode, g.Use(left), g.UseImmediate(right),
1431 cont);
1432 }
1433
1434 if (g.CanBeBetterLeftOperand(right)) {
1435 if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
1436 std::swap(left, right);
1437 }
1438
1439 if (CanUseMemoryOperand(selector, opcode, node, left)) {
1440 return VisitCompareWithMemoryOperand(selector, opcode, left,
1441 g.UseRegister(right), cont);
1442 }
1443 return VisitCompare(selector, opcode, left, right, cont,
1444 node->op()->HasProperty(Operator::kCommutative));
1445}
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001446
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001447// Shared routine for 64-bit word comparison operations.
1448void VisitWord64Compare(InstructionSelector* selector, Node* node,
1449 FlagsContinuation* cont) {
1450 X64OperandGenerator g(selector);
1451 Int64BinopMatcher m(node);
1452 if (m.left().IsLoad() && m.right().IsLoadStackPointer()) {
1453 LoadMatcher<ExternalReferenceMatcher> mleft(m.left().node());
1454 ExternalReference js_stack_limit =
1455 ExternalReference::address_of_stack_limit(selector->isolate());
1456 if (mleft.object().Is(js_stack_limit) && mleft.index().Is(0)) {
1457 // Compare(Load(js_stack_limit), LoadStackPointer)
1458 if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
1459 InstructionCode opcode = cont->Encode(kX64StackCheck);
1460 if (cont->IsBranch()) {
1461 selector->Emit(opcode, g.NoOutput(), g.Label(cont->true_block()),
1462 g.Label(cont->false_block()));
1463 } else {
1464 DCHECK(cont->IsSet());
1465 selector->Emit(opcode, g.DefineAsRegister(cont->result()));
1466 }
1467 return;
1468 }
1469 }
1470 VisitWordCompare(selector, node, kX64Cmp, cont);
1471}
1472
1473
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001474// Shared routine for comparison with zero.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001475void VisitCompareZero(InstructionSelector* selector, Node* node,
1476 InstructionCode opcode, FlagsContinuation* cont) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001477 X64OperandGenerator g(selector);
1478 VisitCompare(selector, opcode, g.Use(node), g.TempImmediate(0), cont);
1479}
1480
1481
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001482// Shared routine for multiple float32 compare operations (inputs commuted).
1483void VisitFloat32Compare(InstructionSelector* selector, Node* node,
1484 FlagsContinuation* cont) {
1485 Node* const left = node->InputAt(0);
1486 Node* const right = node->InputAt(1);
1487 InstructionCode const opcode =
1488 selector->IsSupported(AVX) ? kAVXFloat32Cmp : kSSEFloat32Cmp;
1489 VisitCompare(selector, opcode, right, left, cont, false);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001490}
1491
1492
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001493// Shared routine for multiple float64 compare operations (inputs commuted).
1494void VisitFloat64Compare(InstructionSelector* selector, Node* node,
1495 FlagsContinuation* cont) {
1496 Node* const left = node->InputAt(0);
1497 Node* const right = node->InputAt(1);
1498 InstructionCode const opcode =
1499 selector->IsSupported(AVX) ? kAVXFloat64Cmp : kSSEFloat64Cmp;
1500 VisitCompare(selector, opcode, right, left, cont, false);
1501}
1502
1503} // namespace
1504
1505
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001506void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
1507 BasicBlock* fbranch) {
1508 X64OperandGenerator g(this);
1509 Node* user = branch;
1510 Node* value = branch->InputAt(0);
1511
1512 FlagsContinuation cont(kNotEqual, tbranch, fbranch);
1513
1514 // Try to combine with comparisons against 0 by simply inverting the branch.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001515 while (CanCover(user, value) && value->opcode() == IrOpcode::kWord32Equal) {
1516 Int32BinopMatcher m(value);
1517 if (m.right().Is(0)) {
1518 user = value;
1519 value = m.left().node();
1520 cont.Negate();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001521 } else {
1522 break;
1523 }
1524 }
1525
1526 // Try to combine the branch with a comparison.
1527 if (CanCover(user, value)) {
1528 switch (value->opcode()) {
1529 case IrOpcode::kWord32Equal:
1530 cont.OverwriteAndNegateIfEqual(kEqual);
1531 return VisitWordCompare(this, value, kX64Cmp32, &cont);
1532 case IrOpcode::kInt32LessThan:
1533 cont.OverwriteAndNegateIfEqual(kSignedLessThan);
1534 return VisitWordCompare(this, value, kX64Cmp32, &cont);
1535 case IrOpcode::kInt32LessThanOrEqual:
1536 cont.OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1537 return VisitWordCompare(this, value, kX64Cmp32, &cont);
1538 case IrOpcode::kUint32LessThan:
1539 cont.OverwriteAndNegateIfEqual(kUnsignedLessThan);
1540 return VisitWordCompare(this, value, kX64Cmp32, &cont);
1541 case IrOpcode::kUint32LessThanOrEqual:
1542 cont.OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1543 return VisitWordCompare(this, value, kX64Cmp32, &cont);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001544 case IrOpcode::kWord64Equal: {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001545 cont.OverwriteAndNegateIfEqual(kEqual);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001546 Int64BinopMatcher m(value);
1547 if (m.right().Is(0)) {
1548 // Try to combine the branch with a comparison.
1549 Node* const user = m.node();
1550 Node* const value = m.left().node();
1551 if (CanCover(user, value)) {
1552 switch (value->opcode()) {
1553 case IrOpcode::kInt64Sub:
1554 return VisitWord64Compare(this, value, &cont);
1555 case IrOpcode::kWord64And:
1556 return VisitWordCompare(this, value, kX64Test, &cont);
1557 default:
1558 break;
1559 }
1560 }
1561 return VisitCompareZero(this, value, kX64Cmp, &cont);
1562 }
1563 return VisitWord64Compare(this, value, &cont);
1564 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001565 case IrOpcode::kInt64LessThan:
1566 cont.OverwriteAndNegateIfEqual(kSignedLessThan);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001567 return VisitWord64Compare(this, value, &cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001568 case IrOpcode::kInt64LessThanOrEqual:
1569 cont.OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001570 return VisitWord64Compare(this, value, &cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001571 case IrOpcode::kUint64LessThan:
1572 cont.OverwriteAndNegateIfEqual(kUnsignedLessThan);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001573 return VisitWord64Compare(this, value, &cont);
1574 case IrOpcode::kUint64LessThanOrEqual:
1575 cont.OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1576 return VisitWord64Compare(this, value, &cont);
1577 case IrOpcode::kFloat32Equal:
1578 cont.OverwriteAndNegateIfEqual(kUnorderedEqual);
1579 return VisitFloat32Compare(this, value, &cont);
1580 case IrOpcode::kFloat32LessThan:
1581 cont.OverwriteAndNegateIfEqual(kUnsignedGreaterThan);
1582 return VisitFloat32Compare(this, value, &cont);
1583 case IrOpcode::kFloat32LessThanOrEqual:
1584 cont.OverwriteAndNegateIfEqual(kUnsignedGreaterThanOrEqual);
1585 return VisitFloat32Compare(this, value, &cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001586 case IrOpcode::kFloat64Equal:
1587 cont.OverwriteAndNegateIfEqual(kUnorderedEqual);
1588 return VisitFloat64Compare(this, value, &cont);
1589 case IrOpcode::kFloat64LessThan:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001590 cont.OverwriteAndNegateIfEqual(kUnsignedGreaterThan);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001591 return VisitFloat64Compare(this, value, &cont);
1592 case IrOpcode::kFloat64LessThanOrEqual:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001593 cont.OverwriteAndNegateIfEqual(kUnsignedGreaterThanOrEqual);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001594 return VisitFloat64Compare(this, value, &cont);
1595 case IrOpcode::kProjection:
1596 // Check if this is the overflow output projection of an
1597 // <Operation>WithOverflow node.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001598 if (ProjectionIndexOf(value->op()) == 1u) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001599 // We cannot combine the <Operation>WithOverflow with this branch
1600 // unless the 0th projection (the use of the actual value of the
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001601 // <Operation> is either nullptr, which means there's no use of the
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001602 // actual value, or was already defined, which means it is scheduled
1603 // *AFTER* this branch).
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001604 Node* const node = value->InputAt(0);
1605 Node* const result = NodeProperties::FindProjection(node, 0);
1606 if (result == nullptr || IsDefined(result)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001607 switch (node->opcode()) {
1608 case IrOpcode::kInt32AddWithOverflow:
1609 cont.OverwriteAndNegateIfEqual(kOverflow);
1610 return VisitBinop(this, node, kX64Add32, &cont);
1611 case IrOpcode::kInt32SubWithOverflow:
1612 cont.OverwriteAndNegateIfEqual(kOverflow);
1613 return VisitBinop(this, node, kX64Sub32, &cont);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001614 case IrOpcode::kInt64AddWithOverflow:
1615 cont.OverwriteAndNegateIfEqual(kOverflow);
1616 return VisitBinop(this, node, kX64Add, &cont);
1617 case IrOpcode::kInt64SubWithOverflow:
1618 cont.OverwriteAndNegateIfEqual(kOverflow);
1619 return VisitBinop(this, node, kX64Sub, &cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001620 default:
1621 break;
1622 }
1623 }
1624 }
1625 break;
1626 case IrOpcode::kInt32Sub:
1627 return VisitWordCompare(this, value, kX64Cmp32, &cont);
1628 case IrOpcode::kInt64Sub:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001629 return VisitWord64Compare(this, value, &cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001630 case IrOpcode::kWord32And:
1631 return VisitWordCompare(this, value, kX64Test32, &cont);
1632 case IrOpcode::kWord64And:
1633 return VisitWordCompare(this, value, kX64Test, &cont);
1634 default:
1635 break;
1636 }
1637 }
1638
1639 // Branch could not be combined with a compare, emit compare against 0.
1640 VisitCompareZero(this, value, kX64Cmp32, &cont);
1641}
1642
1643
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001644void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
1645 X64OperandGenerator g(this);
1646 InstructionOperand value_operand = g.UseRegister(node->InputAt(0));
1647
1648 // Emit either ArchTableSwitch or ArchLookupSwitch.
1649 size_t table_space_cost = 4 + sw.value_range;
1650 size_t table_time_cost = 3;
1651 size_t lookup_space_cost = 3 + 2 * sw.case_count;
1652 size_t lookup_time_cost = sw.case_count;
1653 if (sw.case_count > 4 &&
1654 table_space_cost + 3 * table_time_cost <=
1655 lookup_space_cost + 3 * lookup_time_cost &&
1656 sw.min_value > std::numeric_limits<int32_t>::min()) {
1657 InstructionOperand index_operand = g.TempRegister();
1658 if (sw.min_value) {
1659 // The leal automatically zero extends, so result is a valid 64-bit index.
1660 Emit(kX64Lea32 | AddressingModeField::encode(kMode_MRI), index_operand,
1661 value_operand, g.TempImmediate(-sw.min_value));
1662 } else {
1663 // Zero extend, because we use it as 64-bit index into the jump table.
1664 Emit(kX64Movl, index_operand, value_operand);
1665 }
1666 // Generate a table lookup.
1667 return EmitTableSwitch(sw, index_operand);
1668 }
1669
1670 // Generate a sequence of conditional jumps.
1671 return EmitLookupSwitch(sw, value_operand);
1672}
1673
1674
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001675void InstructionSelector::VisitWord32Equal(Node* const node) {
1676 Node* user = node;
1677 FlagsContinuation cont(kEqual, node);
1678 Int32BinopMatcher m(user);
1679 if (m.right().Is(0)) {
1680 Node* value = m.left().node();
1681
1682 // Try to combine with comparisons against 0 by simply inverting the branch.
1683 while (CanCover(user, value) && value->opcode() == IrOpcode::kWord32Equal) {
1684 Int32BinopMatcher m(value);
1685 if (m.right().Is(0)) {
1686 user = value;
1687 value = m.left().node();
1688 cont.Negate();
1689 } else {
1690 break;
1691 }
1692 }
1693
1694 // Try to combine the branch with a comparison.
1695 if (CanCover(user, value)) {
1696 switch (value->opcode()) {
1697 case IrOpcode::kInt32Sub:
1698 return VisitWordCompare(this, value, kX64Cmp32, &cont);
1699 case IrOpcode::kWord32And:
1700 return VisitWordCompare(this, value, kX64Test32, &cont);
1701 default:
1702 break;
1703 }
1704 }
1705 return VisitCompareZero(this, value, kX64Cmp32, &cont);
1706 }
1707 VisitWordCompare(this, node, kX64Cmp32, &cont);
1708}
1709
1710
1711void InstructionSelector::VisitInt32LessThan(Node* node) {
1712 FlagsContinuation cont(kSignedLessThan, node);
1713 VisitWordCompare(this, node, kX64Cmp32, &cont);
1714}
1715
1716
1717void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
1718 FlagsContinuation cont(kSignedLessThanOrEqual, node);
1719 VisitWordCompare(this, node, kX64Cmp32, &cont);
1720}
1721
1722
1723void InstructionSelector::VisitUint32LessThan(Node* node) {
1724 FlagsContinuation cont(kUnsignedLessThan, node);
1725 VisitWordCompare(this, node, kX64Cmp32, &cont);
1726}
1727
1728
1729void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
1730 FlagsContinuation cont(kUnsignedLessThanOrEqual, node);
1731 VisitWordCompare(this, node, kX64Cmp32, &cont);
1732}
1733
1734
1735void InstructionSelector::VisitWord64Equal(Node* const node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001736 FlagsContinuation cont(kEqual, node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001737 Int64BinopMatcher m(node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001738 if (m.right().Is(0)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001739 // Try to combine the equality check with a comparison.
1740 Node* const user = m.node();
1741 Node* const value = m.left().node();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001742 if (CanCover(user, value)) {
1743 switch (value->opcode()) {
1744 case IrOpcode::kInt64Sub:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001745 return VisitWord64Compare(this, value, &cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001746 case IrOpcode::kWord64And:
1747 return VisitWordCompare(this, value, kX64Test, &cont);
1748 default:
1749 break;
1750 }
1751 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001752 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001753 VisitWord64Compare(this, node, &cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001754}
1755
1756
1757void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001758 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001759 FlagsContinuation cont(kOverflow, ovf);
1760 VisitBinop(this, node, kX64Add32, &cont);
1761 }
1762 FlagsContinuation cont;
1763 VisitBinop(this, node, kX64Add32, &cont);
1764}
1765
1766
1767void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001768 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001769 FlagsContinuation cont(kOverflow, ovf);
1770 return VisitBinop(this, node, kX64Sub32, &cont);
1771 }
1772 FlagsContinuation cont;
1773 VisitBinop(this, node, kX64Sub32, &cont);
1774}
1775
1776
1777void InstructionSelector::VisitInt64LessThan(Node* node) {
1778 FlagsContinuation cont(kSignedLessThan, node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001779 VisitWord64Compare(this, node, &cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001780}
1781
1782
1783void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) {
1784 FlagsContinuation cont(kSignedLessThanOrEqual, node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001785 VisitWord64Compare(this, node, &cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001786}
1787
1788
1789void InstructionSelector::VisitUint64LessThan(Node* node) {
1790 FlagsContinuation cont(kUnsignedLessThan, node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001791 VisitWord64Compare(this, node, &cont);
1792}
1793
1794
1795void InstructionSelector::VisitUint64LessThanOrEqual(Node* node) {
1796 FlagsContinuation cont(kUnsignedLessThanOrEqual, node);
1797 VisitWord64Compare(this, node, &cont);
1798}
1799
1800
1801void InstructionSelector::VisitFloat32Equal(Node* node) {
1802 FlagsContinuation cont(kUnorderedEqual, node);
1803 VisitFloat32Compare(this, node, &cont);
1804}
1805
1806
1807void InstructionSelector::VisitFloat32LessThan(Node* node) {
1808 FlagsContinuation cont(kUnsignedGreaterThan, node);
1809 VisitFloat32Compare(this, node, &cont);
1810}
1811
1812
1813void InstructionSelector::VisitFloat32LessThanOrEqual(Node* node) {
1814 FlagsContinuation cont(kUnsignedGreaterThanOrEqual, node);
1815 VisitFloat32Compare(this, node, &cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001816}
1817
1818
1819void InstructionSelector::VisitFloat64Equal(Node* node) {
1820 FlagsContinuation cont(kUnorderedEqual, node);
1821 VisitFloat64Compare(this, node, &cont);
1822}
1823
1824
1825void InstructionSelector::VisitFloat64LessThan(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001826 FlagsContinuation cont(kUnsignedGreaterThan, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001827 VisitFloat64Compare(this, node, &cont);
1828}
1829
1830
1831void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001832 FlagsContinuation cont(kUnsignedGreaterThanOrEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001833 VisitFloat64Compare(this, node, &cont);
1834}
1835
1836
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001837void InstructionSelector::VisitFloat64ExtractLowWord32(Node* node) {
1838 X64OperandGenerator g(this);
1839 Emit(kSSEFloat64ExtractLowWord32, g.DefineAsRegister(node),
1840 g.Use(node->InputAt(0)));
1841}
1842
1843
1844void InstructionSelector::VisitFloat64ExtractHighWord32(Node* node) {
1845 X64OperandGenerator g(this);
1846 Emit(kSSEFloat64ExtractHighWord32, g.DefineAsRegister(node),
1847 g.Use(node->InputAt(0)));
1848}
1849
1850
1851void InstructionSelector::VisitFloat64InsertLowWord32(Node* node) {
1852 X64OperandGenerator g(this);
1853 Node* left = node->InputAt(0);
1854 Node* right = node->InputAt(1);
1855 Float64Matcher mleft(left);
1856 if (mleft.HasValue() && (bit_cast<uint64_t>(mleft.Value()) >> 32) == 0u) {
1857 Emit(kSSEFloat64LoadLowWord32, g.DefineAsRegister(node), g.Use(right));
1858 return;
1859 }
1860 Emit(kSSEFloat64InsertLowWord32, g.DefineSameAsFirst(node),
1861 g.UseRegister(left), g.Use(right));
1862}
1863
1864
1865void InstructionSelector::VisitFloat64InsertHighWord32(Node* node) {
1866 X64OperandGenerator g(this);
1867 Node* left = node->InputAt(0);
1868 Node* right = node->InputAt(1);
1869 Emit(kSSEFloat64InsertHighWord32, g.DefineSameAsFirst(node),
1870 g.UseRegister(left), g.Use(right));
1871}
1872
1873
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001874// static
1875MachineOperatorBuilder::Flags
1876InstructionSelector::SupportedMachineOperatorFlags() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001877 MachineOperatorBuilder::Flags flags =
1878 MachineOperatorBuilder::kFloat32Max |
1879 MachineOperatorBuilder::kFloat32Min |
1880 MachineOperatorBuilder::kFloat64Max |
1881 MachineOperatorBuilder::kFloat64Min |
1882 MachineOperatorBuilder::kWord32ShiftIsSafe |
1883 MachineOperatorBuilder::kWord32Ctz | MachineOperatorBuilder::kWord64Ctz;
1884 if (CpuFeatures::IsSupported(POPCNT)) {
1885 flags |= MachineOperatorBuilder::kWord32Popcnt |
1886 MachineOperatorBuilder::kWord64Popcnt;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001887 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001888 if (CpuFeatures::IsSupported(SSE4_1)) {
1889 flags |= MachineOperatorBuilder::kFloat32RoundDown |
1890 MachineOperatorBuilder::kFloat64RoundDown |
1891 MachineOperatorBuilder::kFloat32RoundUp |
1892 MachineOperatorBuilder::kFloat64RoundUp |
1893 MachineOperatorBuilder::kFloat32RoundTruncate |
1894 MachineOperatorBuilder::kFloat64RoundTruncate |
1895 MachineOperatorBuilder::kFloat32RoundTiesEven |
1896 MachineOperatorBuilder::kFloat64RoundTiesEven;
1897 }
1898 return flags;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001899}
1900
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001901} // namespace compiler
1902} // namespace internal
1903} // namespace v8