blob: c47a42eefe3da6684257f8cbfa9bdc6f3c2a661d [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005#include <algorithm>
6
7#include "src/base/adapters.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00008#include "src/compiler/instruction-selector-impl.h"
9#include "src/compiler/node-matchers.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010#include "src/compiler/node-properties.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000011
12namespace v8 {
13namespace internal {
14namespace compiler {
15
16// Adds X64-specific methods for generating operands.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000017class X64OperandGenerator final : public OperandGenerator {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000018 public:
19 explicit X64OperandGenerator(InstructionSelector* selector)
20 : OperandGenerator(selector) {}
21
Ben Murdochb8a8cc12014-11-26 15:28:44 +000022 bool CanBeImmediate(Node* node) {
23 switch (node->opcode()) {
24 case IrOpcode::kInt32Constant:
25 return true;
Emily Bernierd0a1eb72015-03-24 16:35:39 -040026 case IrOpcode::kInt64Constant: {
27 const int64_t value = OpParameter<int64_t>(node);
28 return value == static_cast<int64_t>(static_cast<int32_t>(value));
29 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000030 case IrOpcode::kNumberConstant: {
31 const double value = OpParameter<double>(node);
32 return bit_cast<int64_t>(value) == 0;
33 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000034 default:
35 return false;
36 }
37 }
38
Emily Bernierd0a1eb72015-03-24 16:35:39 -040039 AddressingMode GenerateMemoryOperandInputs(Node* index, int scale_exponent,
40 Node* base, Node* displacement,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000041 InstructionOperand inputs[],
Emily Bernierd0a1eb72015-03-24 16:35:39 -040042 size_t* input_count) {
43 AddressingMode mode = kMode_MRI;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000044 if (base != nullptr) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040045 inputs[(*input_count)++] = UseRegister(base);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000046 if (index != nullptr) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040047 DCHECK(scale_exponent >= 0 && scale_exponent <= 3);
48 inputs[(*input_count)++] = UseRegister(index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000049 if (displacement != nullptr) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040050 inputs[(*input_count)++] = UseImmediate(displacement);
51 static const AddressingMode kMRnI_modes[] = {kMode_MR1I, kMode_MR2I,
52 kMode_MR4I, kMode_MR8I};
53 mode = kMRnI_modes[scale_exponent];
54 } else {
55 static const AddressingMode kMRn_modes[] = {kMode_MR1, kMode_MR2,
56 kMode_MR4, kMode_MR8};
57 mode = kMRn_modes[scale_exponent];
58 }
59 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000060 if (displacement == nullptr) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040061 mode = kMode_MR;
62 } else {
63 inputs[(*input_count)++] = UseImmediate(displacement);
64 mode = kMode_MRI;
65 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000066 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -040067 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000068 DCHECK_NOT_NULL(index);
Emily Bernierd0a1eb72015-03-24 16:35:39 -040069 DCHECK(scale_exponent >= 0 && scale_exponent <= 3);
70 inputs[(*input_count)++] = UseRegister(index);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000071 if (displacement != nullptr) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040072 inputs[(*input_count)++] = UseImmediate(displacement);
73 static const AddressingMode kMnI_modes[] = {kMode_MRI, kMode_M2I,
74 kMode_M4I, kMode_M8I};
75 mode = kMnI_modes[scale_exponent];
76 } else {
77 static const AddressingMode kMn_modes[] = {kMode_MR, kMode_MR1,
78 kMode_M4, kMode_M8};
79 mode = kMn_modes[scale_exponent];
80 if (mode == kMode_MR1) {
81 // [%r1 + %r1*1] has a smaller encoding than [%r1*2+0]
82 inputs[(*input_count)++] = UseRegister(index);
83 }
84 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000085 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -040086 return mode;
87 }
88
89 AddressingMode GetEffectiveAddressMemoryOperand(Node* operand,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000090 InstructionOperand inputs[],
Emily Bernierd0a1eb72015-03-24 16:35:39 -040091 size_t* input_count) {
92 BaseWithIndexAndDisplacement64Matcher m(operand, true);
93 DCHECK(m.matches());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000094 if ((m.displacement() == nullptr || CanBeImmediate(m.displacement()))) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040095 return GenerateMemoryOperandInputs(m.index(), m.scale(), m.base(),
96 m.displacement(), inputs, input_count);
97 } else {
98 inputs[(*input_count)++] = UseRegister(operand->InputAt(0));
99 inputs[(*input_count)++] = UseRegister(operand->InputAt(1));
100 return kMode_MR1;
101 }
102 }
103
104 bool CanBeBetterLeftOperand(Node* node) const {
105 return !selector()->IsLive(node);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000106 }
107};
108
109
110void InstructionSelector::VisitLoad(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000111 LoadRepresentation load_rep = LoadRepresentationOf(node->op());
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000112 X64OperandGenerator g(this);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000113
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000114 ArchOpcode opcode = kArchNop;
115 switch (load_rep.representation()) {
116 case MachineRepresentation::kFloat32:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000117 opcode = kX64Movss;
118 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000119 case MachineRepresentation::kFloat64:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000120 opcode = kX64Movsd;
121 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000122 case MachineRepresentation::kBit: // Fall through.
123 case MachineRepresentation::kWord8:
124 opcode = load_rep.IsSigned() ? kX64Movsxbl : kX64Movzxbl;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000125 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000126 case MachineRepresentation::kWord16:
127 opcode = load_rep.IsSigned() ? kX64Movsxwl : kX64Movzxwl;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000128 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000129 case MachineRepresentation::kWord32:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000130 opcode = kX64Movl;
131 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000132 case MachineRepresentation::kTagged: // Fall through.
133 case MachineRepresentation::kWord64:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000134 opcode = kX64Movq;
135 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000136 case MachineRepresentation::kNone:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000137 UNREACHABLE();
138 return;
139 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400140
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000141 InstructionOperand outputs[1];
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400142 outputs[0] = g.DefineAsRegister(node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000143 InstructionOperand inputs[3];
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400144 size_t input_count = 0;
145 AddressingMode mode =
146 g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
147 InstructionCode code = opcode | AddressingModeField::encode(mode);
148 Emit(code, 1, outputs, input_count, inputs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000149}
150
151
152void InstructionSelector::VisitStore(Node* node) {
153 X64OperandGenerator g(this);
154 Node* base = node->InputAt(0);
155 Node* index = node->InputAt(1);
156 Node* value = node->InputAt(2);
157
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000158 StoreRepresentation store_rep = StoreRepresentationOf(node->op());
159 WriteBarrierKind write_barrier_kind = store_rep.write_barrier_kind();
160 MachineRepresentation rep = store_rep.representation();
161
162 if (write_barrier_kind != kNoWriteBarrier) {
163 DCHECK_EQ(MachineRepresentation::kTagged, rep);
164 AddressingMode addressing_mode;
165 InstructionOperand inputs[3];
166 size_t input_count = 0;
167 inputs[input_count++] = g.UseUniqueRegister(base);
168 if (g.CanBeImmediate(index)) {
169 inputs[input_count++] = g.UseImmediate(index);
170 addressing_mode = kMode_MRI;
171 } else {
172 inputs[input_count++] = g.UseUniqueRegister(index);
173 addressing_mode = kMode_MR1;
174 }
175 inputs[input_count++] = (write_barrier_kind == kMapWriteBarrier)
176 ? g.UseRegister(value)
177 : g.UseUniqueRegister(value);
178 RecordWriteMode record_write_mode = RecordWriteMode::kValueIsAny;
179 switch (write_barrier_kind) {
180 case kNoWriteBarrier:
181 UNREACHABLE();
182 break;
183 case kMapWriteBarrier:
184 record_write_mode = RecordWriteMode::kValueIsMap;
185 break;
186 case kPointerWriteBarrier:
187 record_write_mode = RecordWriteMode::kValueIsPointer;
188 break;
189 case kFullWriteBarrier:
190 record_write_mode = RecordWriteMode::kValueIsAny;
191 break;
192 }
193 InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()};
194 size_t const temp_count = arraysize(temps);
195 InstructionCode code = kArchStoreWithWriteBarrier;
196 code |= AddressingModeField::encode(addressing_mode);
197 code |= MiscField::encode(static_cast<int>(record_write_mode));
198 Emit(code, 0, nullptr, input_count, inputs, temp_count, temps);
199 } else {
200 ArchOpcode opcode = kArchNop;
201 switch (rep) {
202 case MachineRepresentation::kFloat32:
203 opcode = kX64Movss;
204 break;
205 case MachineRepresentation::kFloat64:
206 opcode = kX64Movsd;
207 break;
208 case MachineRepresentation::kBit: // Fall through.
209 case MachineRepresentation::kWord8:
210 opcode = kX64Movb;
211 break;
212 case MachineRepresentation::kWord16:
213 opcode = kX64Movw;
214 break;
215 case MachineRepresentation::kWord32:
216 opcode = kX64Movl;
217 break;
218 case MachineRepresentation::kTagged: // Fall through.
219 case MachineRepresentation::kWord64:
220 opcode = kX64Movq;
221 break;
222 case MachineRepresentation::kNone:
223 UNREACHABLE();
224 return;
225 }
226 InstructionOperand inputs[4];
227 size_t input_count = 0;
228 AddressingMode addressing_mode =
229 g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
230 InstructionCode code =
231 opcode | AddressingModeField::encode(addressing_mode);
232 InstructionOperand value_operand =
233 g.CanBeImmediate(value) ? g.UseImmediate(value) : g.UseRegister(value);
234 inputs[input_count++] = value_operand;
235 Emit(code, 0, static_cast<InstructionOperand*>(nullptr), input_count,
236 inputs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000237 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400238}
239
240
241void InstructionSelector::VisitCheckedLoad(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000242 CheckedLoadRepresentation load_rep = CheckedLoadRepresentationOf(node->op());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400243 X64OperandGenerator g(this);
244 Node* const buffer = node->InputAt(0);
245 Node* const offset = node->InputAt(1);
246 Node* const length = node->InputAt(2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000247 ArchOpcode opcode = kArchNop;
248 switch (load_rep.representation()) {
249 case MachineRepresentation::kWord8:
250 opcode = load_rep.IsSigned() ? kCheckedLoadInt8 : kCheckedLoadUint8;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400251 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000252 case MachineRepresentation::kWord16:
253 opcode = load_rep.IsSigned() ? kCheckedLoadInt16 : kCheckedLoadUint16;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400254 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000255 case MachineRepresentation::kWord32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400256 opcode = kCheckedLoadWord32;
257 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000258 case MachineRepresentation::kWord64:
259 opcode = kCheckedLoadWord64;
260 break;
261 case MachineRepresentation::kFloat32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400262 opcode = kCheckedLoadFloat32;
263 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000264 case MachineRepresentation::kFloat64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400265 opcode = kCheckedLoadFloat64;
266 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000267 case MachineRepresentation::kBit:
268 case MachineRepresentation::kTagged:
269 case MachineRepresentation::kNone:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400270 UNREACHABLE();
271 return;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000272 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400273 if (offset->opcode() == IrOpcode::kInt32Add && CanCover(node, offset)) {
274 Int32Matcher mlength(length);
275 Int32BinopMatcher moffset(offset);
276 if (mlength.HasValue() && moffset.right().HasValue() &&
277 moffset.right().Value() >= 0 &&
278 mlength.Value() >= moffset.right().Value()) {
279 Emit(opcode, g.DefineAsRegister(node), g.UseRegister(buffer),
280 g.UseRegister(moffset.left().node()),
281 g.UseImmediate(moffset.right().node()), g.UseImmediate(length));
282 return;
283 }
284 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000285 InstructionOperand length_operand =
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400286 g.CanBeImmediate(length) ? g.UseImmediate(length) : g.UseRegister(length);
287 Emit(opcode, g.DefineAsRegister(node), g.UseRegister(buffer),
288 g.UseRegister(offset), g.TempImmediate(0), length_operand);
289}
290
291
292void InstructionSelector::VisitCheckedStore(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000293 MachineRepresentation rep = CheckedStoreRepresentationOf(node->op());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400294 X64OperandGenerator g(this);
295 Node* const buffer = node->InputAt(0);
296 Node* const offset = node->InputAt(1);
297 Node* const length = node->InputAt(2);
298 Node* const value = node->InputAt(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000299 ArchOpcode opcode = kArchNop;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400300 switch (rep) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000301 case MachineRepresentation::kWord8:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400302 opcode = kCheckedStoreWord8;
303 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000304 case MachineRepresentation::kWord16:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400305 opcode = kCheckedStoreWord16;
306 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000307 case MachineRepresentation::kWord32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400308 opcode = kCheckedStoreWord32;
309 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000310 case MachineRepresentation::kWord64:
311 opcode = kCheckedStoreWord64;
312 break;
313 case MachineRepresentation::kFloat32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400314 opcode = kCheckedStoreFloat32;
315 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000316 case MachineRepresentation::kFloat64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400317 opcode = kCheckedStoreFloat64;
318 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000319 case MachineRepresentation::kBit:
320 case MachineRepresentation::kTagged:
321 case MachineRepresentation::kNone:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400322 UNREACHABLE();
323 return;
324 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000325 InstructionOperand value_operand =
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400326 g.CanBeImmediate(value) ? g.UseImmediate(value) : g.UseRegister(value);
327 if (offset->opcode() == IrOpcode::kInt32Add && CanCover(node, offset)) {
328 Int32Matcher mlength(length);
329 Int32BinopMatcher moffset(offset);
330 if (mlength.HasValue() && moffset.right().HasValue() &&
331 moffset.right().Value() >= 0 &&
332 mlength.Value() >= moffset.right().Value()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000333 Emit(opcode, g.NoOutput(), g.UseRegister(buffer),
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400334 g.UseRegister(moffset.left().node()),
335 g.UseImmediate(moffset.right().node()), g.UseImmediate(length),
336 value_operand);
337 return;
338 }
339 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000340 InstructionOperand length_operand =
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400341 g.CanBeImmediate(length) ? g.UseImmediate(length) : g.UseRegister(length);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000342 Emit(opcode, g.NoOutput(), g.UseRegister(buffer), g.UseRegister(offset),
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400343 g.TempImmediate(0), length_operand, value_operand);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000344}
345
346
347// Shared routine for multiple binary operations.
348static void VisitBinop(InstructionSelector* selector, Node* node,
349 InstructionCode opcode, FlagsContinuation* cont) {
350 X64OperandGenerator g(selector);
351 Int32BinopMatcher m(node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400352 Node* left = m.left().node();
353 Node* right = m.right().node();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000354 InstructionOperand inputs[4];
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000355 size_t input_count = 0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000356 InstructionOperand outputs[2];
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000357 size_t output_count = 0;
358
359 // TODO(turbofan): match complex addressing modes.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400360 if (left == right) {
361 // If both inputs refer to the same operand, enforce allocating a register
362 // for both of them to ensure that we don't end up generating code like
363 // this:
364 //
365 // mov rax, [rbp-0x10]
366 // add rax, [rbp-0x10]
367 // jo label
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000368 InstructionOperand const input = g.UseRegister(left);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400369 inputs[input_count++] = input;
370 inputs[input_count++] = input;
371 } else if (g.CanBeImmediate(right)) {
372 inputs[input_count++] = g.UseRegister(left);
373 inputs[input_count++] = g.UseImmediate(right);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000374 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400375 if (node->op()->HasProperty(Operator::kCommutative) &&
376 g.CanBeBetterLeftOperand(right)) {
377 std::swap(left, right);
378 }
379 inputs[input_count++] = g.UseRegister(left);
380 inputs[input_count++] = g.Use(right);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000381 }
382
383 if (cont->IsBranch()) {
384 inputs[input_count++] = g.Label(cont->true_block());
385 inputs[input_count++] = g.Label(cont->false_block());
386 }
387
388 outputs[output_count++] = g.DefineSameAsFirst(node);
389 if (cont->IsSet()) {
390 outputs[output_count++] = g.DefineAsRegister(cont->result());
391 }
392
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000393 DCHECK_NE(0u, input_count);
394 DCHECK_NE(0u, output_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000395 DCHECK_GE(arraysize(inputs), input_count);
396 DCHECK_GE(arraysize(outputs), output_count);
397
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000398 selector->Emit(cont->Encode(opcode), output_count, outputs, input_count,
399 inputs);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000400}
401
402
403// Shared routine for multiple binary operations.
404static void VisitBinop(InstructionSelector* selector, Node* node,
405 InstructionCode opcode) {
406 FlagsContinuation cont;
407 VisitBinop(selector, node, opcode, &cont);
408}
409
410
411void InstructionSelector::VisitWord32And(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000412 X64OperandGenerator g(this);
413 Uint32BinopMatcher m(node);
414 if (m.right().Is(0xff)) {
415 Emit(kX64Movzxbl, g.DefineAsRegister(node), g.Use(m.left().node()));
416 } else if (m.right().Is(0xffff)) {
417 Emit(kX64Movzxwl, g.DefineAsRegister(node), g.Use(m.left().node()));
418 } else {
419 VisitBinop(this, node, kX64And32);
420 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000421}
422
423
424void InstructionSelector::VisitWord64And(Node* node) {
425 VisitBinop(this, node, kX64And);
426}
427
428
429void InstructionSelector::VisitWord32Or(Node* node) {
430 VisitBinop(this, node, kX64Or32);
431}
432
433
434void InstructionSelector::VisitWord64Or(Node* node) {
435 VisitBinop(this, node, kX64Or);
436}
437
438
439void InstructionSelector::VisitWord32Xor(Node* node) {
440 X64OperandGenerator g(this);
441 Uint32BinopMatcher m(node);
442 if (m.right().Is(-1)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400443 Emit(kX64Not32, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000444 } else {
445 VisitBinop(this, node, kX64Xor32);
446 }
447}
448
449
450void InstructionSelector::VisitWord64Xor(Node* node) {
451 X64OperandGenerator g(this);
452 Uint64BinopMatcher m(node);
453 if (m.right().Is(-1)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400454 Emit(kX64Not, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000455 } else {
456 VisitBinop(this, node, kX64Xor);
457 }
458}
459
460
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400461namespace {
462
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000463// Shared routine for multiple 32-bit shift operations.
464// TODO(bmeurer): Merge this with VisitWord64Shift using template magic?
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400465void VisitWord32Shift(InstructionSelector* selector, Node* node,
466 ArchOpcode opcode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000467 X64OperandGenerator g(selector);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400468 Int32BinopMatcher m(node);
469 Node* left = m.left().node();
470 Node* right = m.right().node();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000471
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000472 if (g.CanBeImmediate(right)) {
473 selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
474 g.UseImmediate(right));
475 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000476 selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
477 g.UseFixed(right, rcx));
478 }
479}
480
481
482// Shared routine for multiple 64-bit shift operations.
483// TODO(bmeurer): Merge this with VisitWord32Shift using template magic?
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400484void VisitWord64Shift(InstructionSelector* selector, Node* node,
485 ArchOpcode opcode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000486 X64OperandGenerator g(selector);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400487 Int64BinopMatcher m(node);
488 Node* left = m.left().node();
489 Node* right = m.right().node();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000490
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000491 if (g.CanBeImmediate(right)) {
492 selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
493 g.UseImmediate(right));
494 } else {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000495 if (m.right().IsWord64And()) {
496 Int64BinopMatcher mright(right);
497 if (mright.right().Is(0x3F)) {
498 right = mright.left().node();
499 }
500 }
501 selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
502 g.UseFixed(right, rcx));
503 }
504}
505
506
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400507void EmitLea(InstructionSelector* selector, InstructionCode opcode,
508 Node* result, Node* index, int scale, Node* base,
509 Node* displacement) {
510 X64OperandGenerator g(selector);
511
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000512 InstructionOperand inputs[4];
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400513 size_t input_count = 0;
514 AddressingMode mode = g.GenerateMemoryOperandInputs(
515 index, scale, base, displacement, inputs, &input_count);
516
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000517 DCHECK_NE(0u, input_count);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400518 DCHECK_GE(arraysize(inputs), input_count);
519
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000520 InstructionOperand outputs[1];
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400521 outputs[0] = g.DefineAsRegister(result);
522
523 opcode = AddressingModeField::encode(mode) | opcode;
524
525 selector->Emit(opcode, 1, outputs, input_count, inputs);
526}
527
528} // namespace
529
530
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000531void InstructionSelector::VisitWord32Shl(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400532 Int32ScaleMatcher m(node, true);
533 if (m.matches()) {
534 Node* index = node->InputAt(0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000535 Node* base = m.power_of_two_plus_one() ? index : nullptr;
536 EmitLea(this, kX64Lea32, node, index, m.scale(), base, nullptr);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400537 return;
538 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000539 VisitWord32Shift(this, node, kX64Shl32);
540}
541
542
543void InstructionSelector::VisitWord64Shl(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400544 X64OperandGenerator g(this);
545 Int64BinopMatcher m(node);
546 if ((m.left().IsChangeInt32ToInt64() || m.left().IsChangeUint32ToUint64()) &&
547 m.right().IsInRange(32, 63)) {
548 // There's no need to sign/zero-extend to 64-bit if we shift out the upper
549 // 32 bits anyway.
550 Emit(kX64Shl, g.DefineSameAsFirst(node),
551 g.UseRegister(m.left().node()->InputAt(0)),
552 g.UseImmediate(m.right().node()));
553 return;
554 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000555 VisitWord64Shift(this, node, kX64Shl);
556}
557
558
559void InstructionSelector::VisitWord32Shr(Node* node) {
560 VisitWord32Shift(this, node, kX64Shr32);
561}
562
563
564void InstructionSelector::VisitWord64Shr(Node* node) {
565 VisitWord64Shift(this, node, kX64Shr);
566}
567
568
569void InstructionSelector::VisitWord32Sar(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400570 X64OperandGenerator g(this);
571 Int32BinopMatcher m(node);
572 if (CanCover(m.node(), m.left().node()) && m.left().IsWord32Shl()) {
573 Int32BinopMatcher mleft(m.left().node());
574 if (mleft.right().Is(16) && m.right().Is(16)) {
575 Emit(kX64Movsxwl, g.DefineAsRegister(node), g.Use(mleft.left().node()));
576 return;
577 } else if (mleft.right().Is(24) && m.right().Is(24)) {
578 Emit(kX64Movsxbl, g.DefineAsRegister(node), g.Use(mleft.left().node()));
579 return;
580 }
581 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000582 VisitWord32Shift(this, node, kX64Sar32);
583}
584
585
586void InstructionSelector::VisitWord64Sar(Node* node) {
587 VisitWord64Shift(this, node, kX64Sar);
588}
589
590
591void InstructionSelector::VisitWord32Ror(Node* node) {
592 VisitWord32Shift(this, node, kX64Ror32);
593}
594
595
596void InstructionSelector::VisitWord64Ror(Node* node) {
597 VisitWord64Shift(this, node, kX64Ror);
598}
599
600
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000601void InstructionSelector::VisitWord64Clz(Node* node) {
602 X64OperandGenerator g(this);
603 Emit(kX64Lzcnt, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
604}
605
606
607void InstructionSelector::VisitWord32Clz(Node* node) {
608 X64OperandGenerator g(this);
609 Emit(kX64Lzcnt32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
610}
611
612
613void InstructionSelector::VisitWord64Ctz(Node* node) {
614 X64OperandGenerator g(this);
615 Emit(kX64Tzcnt, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
616}
617
618
619void InstructionSelector::VisitWord32Ctz(Node* node) {
620 X64OperandGenerator g(this);
621 Emit(kX64Tzcnt32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
622}
623
624
625void InstructionSelector::VisitWord32Popcnt(Node* node) {
626 X64OperandGenerator g(this);
627 Emit(kX64Popcnt32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
628}
629
630
631void InstructionSelector::VisitWord64Popcnt(Node* node) {
632 X64OperandGenerator g(this);
633 Emit(kX64Popcnt, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
634}
635
636
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000637void InstructionSelector::VisitInt32Add(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400638 X64OperandGenerator g(this);
639
640 // Try to match the Add to a leal pattern
641 BaseWithIndexAndDisplacement32Matcher m(node);
642 if (m.matches() &&
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000643 (m.displacement() == nullptr || g.CanBeImmediate(m.displacement()))) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400644 EmitLea(this, kX64Lea32, node, m.index(), m.scale(), m.base(),
645 m.displacement());
646 return;
647 }
648
649 // No leal pattern match, use addl
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000650 VisitBinop(this, node, kX64Add32);
651}
652
653
654void InstructionSelector::VisitInt64Add(Node* node) {
655 VisitBinop(this, node, kX64Add);
656}
657
658
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000659void InstructionSelector::VisitInt64AddWithOverflow(Node* node) {
660 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
661 FlagsContinuation cont(kOverflow, ovf);
662 VisitBinop(this, node, kX64Add, &cont);
663 }
664 FlagsContinuation cont;
665 VisitBinop(this, node, kX64Add, &cont);
666}
667
668
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000669void InstructionSelector::VisitInt32Sub(Node* node) {
670 X64OperandGenerator g(this);
671 Int32BinopMatcher m(node);
672 if (m.left().Is(0)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400673 Emit(kX64Neg32, g.DefineSameAsFirst(node), g.UseRegister(m.right().node()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000674 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400675 if (m.right().HasValue() && g.CanBeImmediate(m.right().node())) {
676 // Turn subtractions of constant values into immediate "leal" instructions
677 // by negating the value.
678 Emit(kX64Lea32 | AddressingModeField::encode(kMode_MRI),
679 g.DefineAsRegister(node), g.UseRegister(m.left().node()),
680 g.TempImmediate(-m.right().Value()));
681 return;
682 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000683 VisitBinop(this, node, kX64Sub32);
684 }
685}
686
687
688void InstructionSelector::VisitInt64Sub(Node* node) {
689 X64OperandGenerator g(this);
690 Int64BinopMatcher m(node);
691 if (m.left().Is(0)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400692 Emit(kX64Neg, g.DefineSameAsFirst(node), g.UseRegister(m.right().node()));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000693 } else {
694 VisitBinop(this, node, kX64Sub);
695 }
696}
697
698
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000699void InstructionSelector::VisitInt64SubWithOverflow(Node* node) {
700 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
701 FlagsContinuation cont(kOverflow, ovf);
702 return VisitBinop(this, node, kX64Sub, &cont);
703 }
704 FlagsContinuation cont;
705 VisitBinop(this, node, kX64Sub, &cont);
706}
707
708
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400709namespace {
710
711void VisitMul(InstructionSelector* selector, Node* node, ArchOpcode opcode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000712 X64OperandGenerator g(selector);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400713 Int32BinopMatcher m(node);
714 Node* left = m.left().node();
715 Node* right = m.right().node();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000716 if (g.CanBeImmediate(right)) {
717 selector->Emit(opcode, g.DefineAsRegister(node), g.Use(left),
718 g.UseImmediate(right));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000719 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400720 if (g.CanBeBetterLeftOperand(right)) {
721 std::swap(left, right);
722 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000723 selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
724 g.Use(right));
725 }
726}
727
728
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400729void VisitMulHigh(InstructionSelector* selector, Node* node,
730 ArchOpcode opcode) {
731 X64OperandGenerator g(selector);
732 Node* left = node->InputAt(0);
733 Node* right = node->InputAt(1);
734 if (selector->IsLive(left) && !selector->IsLive(right)) {
735 std::swap(left, right);
736 }
737 // TODO(turbofan): We use UseUniqueRegister here to improve register
738 // allocation.
739 selector->Emit(opcode, g.DefineAsFixed(node, rdx), g.UseFixed(left, rax),
740 g.UseUniqueRegister(right));
741}
742
743
744void VisitDiv(InstructionSelector* selector, Node* node, ArchOpcode opcode) {
745 X64OperandGenerator g(selector);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000746 InstructionOperand temps[] = {g.TempRegister(rdx)};
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400747 selector->Emit(
748 opcode, g.DefineAsFixed(node, rax), g.UseFixed(node->InputAt(0), rax),
749 g.UseUniqueRegister(node->InputAt(1)), arraysize(temps), temps);
750}
751
752
753void VisitMod(InstructionSelector* selector, Node* node, ArchOpcode opcode) {
754 X64OperandGenerator g(selector);
755 selector->Emit(opcode, g.DefineAsFixed(node, rdx),
756 g.UseFixed(node->InputAt(0), rax),
757 g.UseUniqueRegister(node->InputAt(1)));
758}
759
760} // namespace
761
762
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000763void InstructionSelector::VisitInt32Mul(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400764 Int32ScaleMatcher m(node, true);
765 if (m.matches()) {
766 Node* index = node->InputAt(0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000767 Node* base = m.power_of_two_plus_one() ? index : nullptr;
768 EmitLea(this, kX64Lea32, node, index, m.scale(), base, nullptr);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400769 return;
770 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000771 VisitMul(this, node, kX64Imul32);
772}
773
774
775void InstructionSelector::VisitInt64Mul(Node* node) {
776 VisitMul(this, node, kX64Imul);
777}
778
779
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400780void InstructionSelector::VisitInt32MulHigh(Node* node) {
781 VisitMulHigh(this, node, kX64ImulHigh32);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000782}
783
784
785void InstructionSelector::VisitInt32Div(Node* node) {
786 VisitDiv(this, node, kX64Idiv32);
787}
788
789
790void InstructionSelector::VisitInt64Div(Node* node) {
791 VisitDiv(this, node, kX64Idiv);
792}
793
794
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400795void InstructionSelector::VisitUint32Div(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000796 VisitDiv(this, node, kX64Udiv32);
797}
798
799
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400800void InstructionSelector::VisitUint64Div(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000801 VisitDiv(this, node, kX64Udiv);
802}
803
804
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000805void InstructionSelector::VisitInt32Mod(Node* node) {
806 VisitMod(this, node, kX64Idiv32);
807}
808
809
810void InstructionSelector::VisitInt64Mod(Node* node) {
811 VisitMod(this, node, kX64Idiv);
812}
813
814
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400815void InstructionSelector::VisitUint32Mod(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000816 VisitMod(this, node, kX64Udiv32);
817}
818
819
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400820void InstructionSelector::VisitUint64Mod(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000821 VisitMod(this, node, kX64Udiv);
822}
823
824
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400825void InstructionSelector::VisitUint32MulHigh(Node* node) {
826 VisitMulHigh(this, node, kX64UmulHigh32);
827}
828
829
830void InstructionSelector::VisitChangeFloat32ToFloat64(Node* node) {
831 X64OperandGenerator g(this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000832 Emit(kSSEFloat32ToFloat64, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400833}
834
835
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000836void InstructionSelector::VisitChangeInt32ToFloat64(Node* node) {
837 X64OperandGenerator g(this);
838 Emit(kSSEInt32ToFloat64, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
839}
840
841
842void InstructionSelector::VisitChangeUint32ToFloat64(Node* node) {
843 X64OperandGenerator g(this);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400844 Emit(kSSEUint32ToFloat64, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000845}
846
847
848void InstructionSelector::VisitChangeFloat64ToInt32(Node* node) {
849 X64OperandGenerator g(this);
850 Emit(kSSEFloat64ToInt32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
851}
852
853
854void InstructionSelector::VisitChangeFloat64ToUint32(Node* node) {
855 X64OperandGenerator g(this);
856 Emit(kSSEFloat64ToUint32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
857}
858
859
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000860void InstructionSelector::VisitTryTruncateFloat32ToInt64(Node* node) {
861 X64OperandGenerator g(this);
862 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
863 InstructionOperand outputs[2];
864 size_t output_count = 0;
865 outputs[output_count++] = g.DefineAsRegister(node);
866
867 Node* success_output = NodeProperties::FindProjection(node, 1);
868 if (success_output) {
869 outputs[output_count++] = g.DefineAsRegister(success_output);
870 }
871
872 Emit(kSSEFloat32ToInt64, output_count, outputs, 1, inputs);
873}
874
875
876void InstructionSelector::VisitTryTruncateFloat64ToInt64(Node* node) {
877 X64OperandGenerator g(this);
878 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
879 InstructionOperand outputs[2];
880 size_t output_count = 0;
881 outputs[output_count++] = g.DefineAsRegister(node);
882
883 Node* success_output = NodeProperties::FindProjection(node, 1);
884 if (success_output) {
885 outputs[output_count++] = g.DefineAsRegister(success_output);
886 }
887
888 Emit(kSSEFloat64ToInt64, output_count, outputs, 1, inputs);
889}
890
891
892void InstructionSelector::VisitTryTruncateFloat32ToUint64(Node* node) {
893 X64OperandGenerator g(this);
894 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
895 InstructionOperand outputs[2];
896 size_t output_count = 0;
897 outputs[output_count++] = g.DefineAsRegister(node);
898
899 Node* success_output = NodeProperties::FindProjection(node, 1);
900 if (success_output) {
901 outputs[output_count++] = g.DefineAsRegister(success_output);
902 }
903
904 Emit(kSSEFloat32ToUint64, output_count, outputs, 1, inputs);
905}
906
907
908void InstructionSelector::VisitTryTruncateFloat64ToUint64(Node* node) {
909 X64OperandGenerator g(this);
910 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
911 InstructionOperand outputs[2];
912 size_t output_count = 0;
913 outputs[output_count++] = g.DefineAsRegister(node);
914
915 Node* success_output = NodeProperties::FindProjection(node, 1);
916 if (success_output) {
917 outputs[output_count++] = g.DefineAsRegister(success_output);
918 }
919
920 Emit(kSSEFloat64ToUint64, output_count, outputs, 1, inputs);
921}
922
923
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000924void InstructionSelector::VisitChangeInt32ToInt64(Node* node) {
925 X64OperandGenerator g(this);
926 Emit(kX64Movsxlq, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
927}
928
929
930void InstructionSelector::VisitChangeUint32ToUint64(Node* node) {
931 X64OperandGenerator g(this);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400932 Node* value = node->InputAt(0);
933 switch (value->opcode()) {
934 case IrOpcode::kWord32And:
935 case IrOpcode::kWord32Or:
936 case IrOpcode::kWord32Xor:
937 case IrOpcode::kWord32Shl:
938 case IrOpcode::kWord32Shr:
939 case IrOpcode::kWord32Sar:
940 case IrOpcode::kWord32Ror:
941 case IrOpcode::kWord32Equal:
942 case IrOpcode::kInt32Add:
943 case IrOpcode::kInt32Sub:
944 case IrOpcode::kInt32Mul:
945 case IrOpcode::kInt32MulHigh:
946 case IrOpcode::kInt32Div:
947 case IrOpcode::kInt32LessThan:
948 case IrOpcode::kInt32LessThanOrEqual:
949 case IrOpcode::kInt32Mod:
950 case IrOpcode::kUint32Div:
951 case IrOpcode::kUint32LessThan:
952 case IrOpcode::kUint32LessThanOrEqual:
953 case IrOpcode::kUint32Mod:
954 case IrOpcode::kUint32MulHigh: {
955 // These 32-bit operations implicitly zero-extend to 64-bit on x64, so the
956 // zero-extension is a no-op.
957 Emit(kArchNop, g.DefineSameAsFirst(node), g.Use(value));
958 return;
959 }
960 default:
961 break;
962 }
963 Emit(kX64Movl, g.DefineAsRegister(node), g.Use(value));
964}
965
966
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000967namespace {
968
969void VisitRO(InstructionSelector* selector, Node* node,
970 InstructionCode opcode) {
971 X64OperandGenerator g(selector);
972 selector->Emit(opcode, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
973}
974
975
976void VisitRR(InstructionSelector* selector, Node* node,
977 InstructionCode opcode) {
978 X64OperandGenerator g(selector);
979 selector->Emit(opcode, g.DefineAsRegister(node),
980 g.UseRegister(node->InputAt(0)));
981}
982
983
984void VisitFloatBinop(InstructionSelector* selector, Node* node,
985 ArchOpcode avx_opcode, ArchOpcode sse_opcode) {
986 X64OperandGenerator g(selector);
987 InstructionOperand operand0 = g.UseRegister(node->InputAt(0));
988 InstructionOperand operand1 = g.Use(node->InputAt(1));
989 if (selector->IsSupported(AVX)) {
990 selector->Emit(avx_opcode, g.DefineAsRegister(node), operand0, operand1);
991 } else {
992 selector->Emit(sse_opcode, g.DefineSameAsFirst(node), operand0, operand1);
993 }
994}
995
996
997void VisitFloatUnop(InstructionSelector* selector, Node* node, Node* input,
998 ArchOpcode avx_opcode, ArchOpcode sse_opcode) {
999 X64OperandGenerator g(selector);
1000 if (selector->IsSupported(AVX)) {
1001 selector->Emit(avx_opcode, g.DefineAsRegister(node), g.Use(input));
1002 } else {
1003 selector->Emit(sse_opcode, g.DefineSameAsFirst(node), g.UseRegister(input));
1004 }
1005}
1006
1007} // namespace
1008
1009
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001010void InstructionSelector::VisitTruncateFloat64ToFloat32(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001011 VisitRO(this, node, kSSEFloat64ToFloat32);
1012}
1013
1014
1015void InstructionSelector::VisitTruncateFloat64ToInt32(Node* node) {
1016 switch (TruncationModeOf(node->op())) {
1017 case TruncationMode::kJavaScript:
1018 return VisitRR(this, node, kArchTruncateDoubleToI);
1019 case TruncationMode::kRoundToZero:
1020 return VisitRO(this, node, kSSEFloat64ToInt32);
1021 }
1022 UNREACHABLE();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001023}
1024
1025
1026void InstructionSelector::VisitTruncateInt64ToInt32(Node* node) {
1027 X64OperandGenerator g(this);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001028 Node* value = node->InputAt(0);
1029 if (CanCover(node, value)) {
1030 switch (value->opcode()) {
1031 case IrOpcode::kWord64Sar:
1032 case IrOpcode::kWord64Shr: {
1033 Int64BinopMatcher m(value);
1034 if (m.right().Is(32)) {
1035 Emit(kX64Shr, g.DefineSameAsFirst(node),
1036 g.UseRegister(m.left().node()), g.TempImmediate(32));
1037 return;
1038 }
1039 break;
1040 }
1041 default:
1042 break;
1043 }
1044 }
1045 Emit(kX64Movl, g.DefineAsRegister(node), g.Use(value));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001046}
1047
1048
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001049void InstructionSelector::VisitRoundInt64ToFloat32(Node* node) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001050 X64OperandGenerator g(this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001051 Emit(kSSEInt64ToFloat32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
1052}
1053
1054
1055void InstructionSelector::VisitRoundInt64ToFloat64(Node* node) {
1056 X64OperandGenerator g(this);
1057 Emit(kSSEInt64ToFloat64, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
1058}
1059
1060
1061void InstructionSelector::VisitRoundUint64ToFloat32(Node* node) {
1062 X64OperandGenerator g(this);
1063 InstructionOperand temps[] = {g.TempRegister()};
1064 Emit(kSSEUint64ToFloat32, g.DefineAsRegister(node), g.Use(node->InputAt(0)),
1065 arraysize(temps), temps);
1066}
1067
1068
1069void InstructionSelector::VisitRoundUint64ToFloat64(Node* node) {
1070 X64OperandGenerator g(this);
1071 InstructionOperand temps[] = {g.TempRegister()};
1072 Emit(kSSEUint64ToFloat64, g.DefineAsRegister(node), g.Use(node->InputAt(0)),
1073 arraysize(temps), temps);
1074}
1075
1076
1077void InstructionSelector::VisitBitcastFloat32ToInt32(Node* node) {
1078 X64OperandGenerator g(this);
1079 Emit(kX64BitcastFI, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
1080}
1081
1082
1083void InstructionSelector::VisitBitcastFloat64ToInt64(Node* node) {
1084 X64OperandGenerator g(this);
1085 Emit(kX64BitcastDL, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
1086}
1087
1088
1089void InstructionSelector::VisitBitcastInt32ToFloat32(Node* node) {
1090 X64OperandGenerator g(this);
1091 Emit(kX64BitcastIF, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
1092}
1093
1094
1095void InstructionSelector::VisitBitcastInt64ToFloat64(Node* node) {
1096 X64OperandGenerator g(this);
1097 Emit(kX64BitcastLD, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
1098}
1099
1100
1101void InstructionSelector::VisitFloat32Add(Node* node) {
1102 VisitFloatBinop(this, node, kAVXFloat32Add, kSSEFloat32Add);
1103}
1104
1105
1106void InstructionSelector::VisitFloat32Sub(Node* node) {
1107 X64OperandGenerator g(this);
1108 Float32BinopMatcher m(node);
1109 if (m.left().IsMinusZero()) {
1110 VisitFloatUnop(this, node, m.right().node(), kAVXFloat32Neg,
1111 kSSEFloat32Neg);
1112 return;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001113 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001114 VisitFloatBinop(this, node, kAVXFloat32Sub, kSSEFloat32Sub);
1115}
1116
1117
1118void InstructionSelector::VisitFloat32Mul(Node* node) {
1119 VisitFloatBinop(this, node, kAVXFloat32Mul, kSSEFloat32Mul);
1120}
1121
1122
1123void InstructionSelector::VisitFloat32Div(Node* node) {
1124 VisitFloatBinop(this, node, kAVXFloat32Div, kSSEFloat32Div);
1125}
1126
1127
1128void InstructionSelector::VisitFloat32Max(Node* node) {
1129 VisitFloatBinop(this, node, kAVXFloat32Max, kSSEFloat32Max);
1130}
1131
1132
1133void InstructionSelector::VisitFloat32Min(Node* node) {
1134 VisitFloatBinop(this, node, kAVXFloat32Min, kSSEFloat32Min);
1135}
1136
1137
1138void InstructionSelector::VisitFloat32Abs(Node* node) {
1139 VisitFloatUnop(this, node, node->InputAt(0), kAVXFloat32Abs, kSSEFloat32Abs);
1140}
1141
1142
1143void InstructionSelector::VisitFloat32Sqrt(Node* node) {
1144 VisitRO(this, node, kSSEFloat32Sqrt);
1145}
1146
1147
1148void InstructionSelector::VisitFloat64Add(Node* node) {
1149 VisitFloatBinop(this, node, kAVXFloat64Add, kSSEFloat64Add);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001150}
1151
1152
1153void InstructionSelector::VisitFloat64Sub(Node* node) {
1154 X64OperandGenerator g(this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001155 Float64BinopMatcher m(node);
1156 if (m.left().IsMinusZero()) {
1157 if (m.right().IsFloat64RoundDown() &&
1158 CanCover(m.node(), m.right().node())) {
1159 if (m.right().InputAt(0)->opcode() == IrOpcode::kFloat64Sub &&
1160 CanCover(m.right().node(), m.right().InputAt(0))) {
1161 Float64BinopMatcher mright0(m.right().InputAt(0));
1162 if (mright0.left().IsMinusZero()) {
1163 Emit(kSSEFloat64Round | MiscField::encode(kRoundUp),
1164 g.DefineAsRegister(node), g.UseRegister(mright0.right().node()));
1165 return;
1166 }
1167 }
1168 }
1169 VisitFloatUnop(this, node, m.right().node(), kAVXFloat64Neg,
1170 kSSEFloat64Neg);
1171 return;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001172 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001173 VisitFloatBinop(this, node, kAVXFloat64Sub, kSSEFloat64Sub);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001174}
1175
1176
1177void InstructionSelector::VisitFloat64Mul(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001178 VisitFloatBinop(this, node, kAVXFloat64Mul, kSSEFloat64Mul);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001179}
1180
1181
1182void InstructionSelector::VisitFloat64Div(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001183 VisitFloatBinop(this, node, kAVXFloat64Div, kSSEFloat64Div);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001184}
1185
1186
1187void InstructionSelector::VisitFloat64Mod(Node* node) {
1188 X64OperandGenerator g(this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001189 InstructionOperand temps[] = {g.TempRegister(rax)};
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001190 Emit(kSSEFloat64Mod, g.DefineSameAsFirst(node),
1191 g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)), 1,
1192 temps);
1193}
1194
1195
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001196void InstructionSelector::VisitFloat64Max(Node* node) {
1197 VisitFloatBinop(this, node, kAVXFloat64Max, kSSEFloat64Max);
1198}
1199
1200
1201void InstructionSelector::VisitFloat64Min(Node* node) {
1202 VisitFloatBinop(this, node, kAVXFloat64Min, kSSEFloat64Min);
1203}
1204
1205
1206void InstructionSelector::VisitFloat64Abs(Node* node) {
1207 VisitFloatUnop(this, node, node->InputAt(0), kAVXFloat64Abs, kSSEFloat64Abs);
1208}
1209
1210
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001211void InstructionSelector::VisitFloat64Sqrt(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001212 VisitRO(this, node, kSSEFloat64Sqrt);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001213}
1214
1215
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001216void InstructionSelector::VisitFloat32RoundDown(Node* node) {
1217 VisitRR(this, node, kSSEFloat32Round | MiscField::encode(kRoundDown));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001218}
1219
1220
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001221void InstructionSelector::VisitFloat64RoundDown(Node* node) {
1222 VisitRR(this, node, kSSEFloat64Round | MiscField::encode(kRoundDown));
1223}
1224
1225
1226void InstructionSelector::VisitFloat32RoundUp(Node* node) {
1227 VisitRR(this, node, kSSEFloat32Round | MiscField::encode(kRoundUp));
1228}
1229
1230
1231void InstructionSelector::VisitFloat64RoundUp(Node* node) {
1232 VisitRR(this, node, kSSEFloat64Round | MiscField::encode(kRoundUp));
1233}
1234
1235
1236void InstructionSelector::VisitFloat32RoundTruncate(Node* node) {
1237 VisitRR(this, node, kSSEFloat32Round | MiscField::encode(kRoundToZero));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001238}
1239
1240
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001241void InstructionSelector::VisitFloat64RoundTruncate(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001242 VisitRR(this, node, kSSEFloat64Round | MiscField::encode(kRoundToZero));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001243}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001244
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001245
1246void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
1247 UNREACHABLE();
1248}
1249
1250
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001251void InstructionSelector::VisitFloat32RoundTiesEven(Node* node) {
1252 VisitRR(this, node, kSSEFloat32Round | MiscField::encode(kRoundToNearest));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001253}
1254
1255
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001256void InstructionSelector::VisitFloat64RoundTiesEven(Node* node) {
1257 VisitRR(this, node, kSSEFloat64Round | MiscField::encode(kRoundToNearest));
1258}
1259
1260
1261void InstructionSelector::EmitPrepareArguments(
1262 ZoneVector<PushParameter>* arguments, const CallDescriptor* descriptor,
1263 Node* node) {
1264 X64OperandGenerator g(this);
1265
1266 // Prepare for C function call.
1267 if (descriptor->IsCFunctionCall()) {
1268 Emit(kArchPrepareCallCFunction |
1269 MiscField::encode(static_cast<int>(descriptor->CParameterCount())),
1270 0, nullptr, 0, nullptr);
1271
1272 // Poke any stack arguments.
1273 for (size_t n = 0; n < arguments->size(); ++n) {
1274 PushParameter input = (*arguments)[n];
1275 if (input.node()) {
1276 int slot = static_cast<int>(n);
1277 InstructionOperand value = g.CanBeImmediate(input.node())
1278 ? g.UseImmediate(input.node())
1279 : g.UseRegister(input.node());
1280 Emit(kX64Poke | MiscField::encode(slot), g.NoOutput(), value);
1281 }
1282 }
1283 } else {
1284 // Push any stack arguments.
1285 for (PushParameter input : base::Reversed(*arguments)) {
1286 // TODO(titzer): X64Push cannot handle stack->stack double moves
1287 // because there is no way to encode fixed double slots.
1288 InstructionOperand value =
1289 g.CanBeImmediate(input.node())
1290 ? g.UseImmediate(input.node())
1291 : IsSupported(ATOM) ||
1292 sequence()->IsFloat(GetVirtualRegister(input.node()))
1293 ? g.UseRegister(input.node())
1294 : g.Use(input.node());
1295 Emit(kX64Push, g.NoOutput(), value);
1296 }
1297 }
1298}
1299
1300
1301bool InstructionSelector::IsTailCallAddressImmediate() { return true; }
1302
1303
1304namespace {
1305
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001306// Shared routine for multiple compare operations.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001307void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
1308 InstructionOperand left, InstructionOperand right,
1309 FlagsContinuation* cont) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001310 X64OperandGenerator g(selector);
1311 opcode = cont->Encode(opcode);
1312 if (cont->IsBranch()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001313 selector->Emit(opcode, g.NoOutput(), left, right,
1314 g.Label(cont->true_block()), g.Label(cont->false_block()));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001315 } else {
1316 DCHECK(cont->IsSet());
1317 selector->Emit(opcode, g.DefineAsRegister(cont->result()), left, right);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001318 }
1319}
1320
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001321
1322// Shared routine for multiple compare operations.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001323void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
1324 Node* left, Node* right, FlagsContinuation* cont,
1325 bool commutative) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001326 X64OperandGenerator g(selector);
1327 if (commutative && g.CanBeBetterLeftOperand(right)) {
1328 std::swap(left, right);
1329 }
1330 VisitCompare(selector, opcode, g.UseRegister(left), g.Use(right), cont);
1331}
1332
1333
1334// Shared routine for multiple word compare operations.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001335void VisitWordCompare(InstructionSelector* selector, Node* node,
1336 InstructionCode opcode, FlagsContinuation* cont) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001337 X64OperandGenerator g(selector);
1338 Node* const left = node->InputAt(0);
1339 Node* const right = node->InputAt(1);
1340
1341 // Match immediates on left or right side of comparison.
1342 if (g.CanBeImmediate(right)) {
1343 VisitCompare(selector, opcode, g.Use(left), g.UseImmediate(right), cont);
1344 } else if (g.CanBeImmediate(left)) {
1345 if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
1346 VisitCompare(selector, opcode, g.Use(right), g.UseImmediate(left), cont);
1347 } else {
1348 VisitCompare(selector, opcode, left, right, cont,
1349 node->op()->HasProperty(Operator::kCommutative));
1350 }
1351}
1352
1353
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001354// Shared routine for 64-bit word comparison operations.
1355void VisitWord64Compare(InstructionSelector* selector, Node* node,
1356 FlagsContinuation* cont) {
1357 X64OperandGenerator g(selector);
1358 Int64BinopMatcher m(node);
1359 if (m.left().IsLoad() && m.right().IsLoadStackPointer()) {
1360 LoadMatcher<ExternalReferenceMatcher> mleft(m.left().node());
1361 ExternalReference js_stack_limit =
1362 ExternalReference::address_of_stack_limit(selector->isolate());
1363 if (mleft.object().Is(js_stack_limit) && mleft.index().Is(0)) {
1364 // Compare(Load(js_stack_limit), LoadStackPointer)
1365 if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
1366 InstructionCode opcode = cont->Encode(kX64StackCheck);
1367 if (cont->IsBranch()) {
1368 selector->Emit(opcode, g.NoOutput(), g.Label(cont->true_block()),
1369 g.Label(cont->false_block()));
1370 } else {
1371 DCHECK(cont->IsSet());
1372 selector->Emit(opcode, g.DefineAsRegister(cont->result()));
1373 }
1374 return;
1375 }
1376 }
1377 VisitWordCompare(selector, node, kX64Cmp, cont);
1378}
1379
1380
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001381// Shared routine for comparison with zero.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001382void VisitCompareZero(InstructionSelector* selector, Node* node,
1383 InstructionCode opcode, FlagsContinuation* cont) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001384 X64OperandGenerator g(selector);
1385 VisitCompare(selector, opcode, g.Use(node), g.TempImmediate(0), cont);
1386}
1387
1388
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001389// Shared routine for multiple float32 compare operations (inputs commuted).
1390void VisitFloat32Compare(InstructionSelector* selector, Node* node,
1391 FlagsContinuation* cont) {
1392 Node* const left = node->InputAt(0);
1393 Node* const right = node->InputAt(1);
1394 InstructionCode const opcode =
1395 selector->IsSupported(AVX) ? kAVXFloat32Cmp : kSSEFloat32Cmp;
1396 VisitCompare(selector, opcode, right, left, cont, false);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001397}
1398
1399
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001400// Shared routine for multiple float64 compare operations (inputs commuted).
1401void VisitFloat64Compare(InstructionSelector* selector, Node* node,
1402 FlagsContinuation* cont) {
1403 Node* const left = node->InputAt(0);
1404 Node* const right = node->InputAt(1);
1405 InstructionCode const opcode =
1406 selector->IsSupported(AVX) ? kAVXFloat64Cmp : kSSEFloat64Cmp;
1407 VisitCompare(selector, opcode, right, left, cont, false);
1408}
1409
1410} // namespace
1411
1412
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001413void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
1414 BasicBlock* fbranch) {
1415 X64OperandGenerator g(this);
1416 Node* user = branch;
1417 Node* value = branch->InputAt(0);
1418
1419 FlagsContinuation cont(kNotEqual, tbranch, fbranch);
1420
1421 // Try to combine with comparisons against 0 by simply inverting the branch.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001422 while (CanCover(user, value) && value->opcode() == IrOpcode::kWord32Equal) {
1423 Int32BinopMatcher m(value);
1424 if (m.right().Is(0)) {
1425 user = value;
1426 value = m.left().node();
1427 cont.Negate();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001428 } else {
1429 break;
1430 }
1431 }
1432
1433 // Try to combine the branch with a comparison.
1434 if (CanCover(user, value)) {
1435 switch (value->opcode()) {
1436 case IrOpcode::kWord32Equal:
1437 cont.OverwriteAndNegateIfEqual(kEqual);
1438 return VisitWordCompare(this, value, kX64Cmp32, &cont);
1439 case IrOpcode::kInt32LessThan:
1440 cont.OverwriteAndNegateIfEqual(kSignedLessThan);
1441 return VisitWordCompare(this, value, kX64Cmp32, &cont);
1442 case IrOpcode::kInt32LessThanOrEqual:
1443 cont.OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1444 return VisitWordCompare(this, value, kX64Cmp32, &cont);
1445 case IrOpcode::kUint32LessThan:
1446 cont.OverwriteAndNegateIfEqual(kUnsignedLessThan);
1447 return VisitWordCompare(this, value, kX64Cmp32, &cont);
1448 case IrOpcode::kUint32LessThanOrEqual:
1449 cont.OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1450 return VisitWordCompare(this, value, kX64Cmp32, &cont);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001451 case IrOpcode::kWord64Equal: {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001452 cont.OverwriteAndNegateIfEqual(kEqual);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001453 Int64BinopMatcher m(value);
1454 if (m.right().Is(0)) {
1455 // Try to combine the branch with a comparison.
1456 Node* const user = m.node();
1457 Node* const value = m.left().node();
1458 if (CanCover(user, value)) {
1459 switch (value->opcode()) {
1460 case IrOpcode::kInt64Sub:
1461 return VisitWord64Compare(this, value, &cont);
1462 case IrOpcode::kWord64And:
1463 return VisitWordCompare(this, value, kX64Test, &cont);
1464 default:
1465 break;
1466 }
1467 }
1468 return VisitCompareZero(this, value, kX64Cmp, &cont);
1469 }
1470 return VisitWord64Compare(this, value, &cont);
1471 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001472 case IrOpcode::kInt64LessThan:
1473 cont.OverwriteAndNegateIfEqual(kSignedLessThan);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001474 return VisitWord64Compare(this, value, &cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001475 case IrOpcode::kInt64LessThanOrEqual:
1476 cont.OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001477 return VisitWord64Compare(this, value, &cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001478 case IrOpcode::kUint64LessThan:
1479 cont.OverwriteAndNegateIfEqual(kUnsignedLessThan);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001480 return VisitWord64Compare(this, value, &cont);
1481 case IrOpcode::kUint64LessThanOrEqual:
1482 cont.OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1483 return VisitWord64Compare(this, value, &cont);
1484 case IrOpcode::kFloat32Equal:
1485 cont.OverwriteAndNegateIfEqual(kUnorderedEqual);
1486 return VisitFloat32Compare(this, value, &cont);
1487 case IrOpcode::kFloat32LessThan:
1488 cont.OverwriteAndNegateIfEqual(kUnsignedGreaterThan);
1489 return VisitFloat32Compare(this, value, &cont);
1490 case IrOpcode::kFloat32LessThanOrEqual:
1491 cont.OverwriteAndNegateIfEqual(kUnsignedGreaterThanOrEqual);
1492 return VisitFloat32Compare(this, value, &cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001493 case IrOpcode::kFloat64Equal:
1494 cont.OverwriteAndNegateIfEqual(kUnorderedEqual);
1495 return VisitFloat64Compare(this, value, &cont);
1496 case IrOpcode::kFloat64LessThan:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001497 cont.OverwriteAndNegateIfEqual(kUnsignedGreaterThan);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001498 return VisitFloat64Compare(this, value, &cont);
1499 case IrOpcode::kFloat64LessThanOrEqual:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001500 cont.OverwriteAndNegateIfEqual(kUnsignedGreaterThanOrEqual);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001501 return VisitFloat64Compare(this, value, &cont);
1502 case IrOpcode::kProjection:
1503 // Check if this is the overflow output projection of an
1504 // <Operation>WithOverflow node.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001505 if (ProjectionIndexOf(value->op()) == 1u) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001506 // We cannot combine the <Operation>WithOverflow with this branch
1507 // unless the 0th projection (the use of the actual value of the
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001508 // <Operation> is either nullptr, which means there's no use of the
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001509 // actual value, or was already defined, which means it is scheduled
1510 // *AFTER* this branch).
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001511 Node* const node = value->InputAt(0);
1512 Node* const result = NodeProperties::FindProjection(node, 0);
1513 if (result == nullptr || IsDefined(result)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001514 switch (node->opcode()) {
1515 case IrOpcode::kInt32AddWithOverflow:
1516 cont.OverwriteAndNegateIfEqual(kOverflow);
1517 return VisitBinop(this, node, kX64Add32, &cont);
1518 case IrOpcode::kInt32SubWithOverflow:
1519 cont.OverwriteAndNegateIfEqual(kOverflow);
1520 return VisitBinop(this, node, kX64Sub32, &cont);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001521 case IrOpcode::kInt64AddWithOverflow:
1522 cont.OverwriteAndNegateIfEqual(kOverflow);
1523 return VisitBinop(this, node, kX64Add, &cont);
1524 case IrOpcode::kInt64SubWithOverflow:
1525 cont.OverwriteAndNegateIfEqual(kOverflow);
1526 return VisitBinop(this, node, kX64Sub, &cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001527 default:
1528 break;
1529 }
1530 }
1531 }
1532 break;
1533 case IrOpcode::kInt32Sub:
1534 return VisitWordCompare(this, value, kX64Cmp32, &cont);
1535 case IrOpcode::kInt64Sub:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001536 return VisitWord64Compare(this, value, &cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001537 case IrOpcode::kWord32And:
1538 return VisitWordCompare(this, value, kX64Test32, &cont);
1539 case IrOpcode::kWord64And:
1540 return VisitWordCompare(this, value, kX64Test, &cont);
1541 default:
1542 break;
1543 }
1544 }
1545
1546 // Branch could not be combined with a compare, emit compare against 0.
1547 VisitCompareZero(this, value, kX64Cmp32, &cont);
1548}
1549
1550
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001551void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
1552 X64OperandGenerator g(this);
1553 InstructionOperand value_operand = g.UseRegister(node->InputAt(0));
1554
1555 // Emit either ArchTableSwitch or ArchLookupSwitch.
1556 size_t table_space_cost = 4 + sw.value_range;
1557 size_t table_time_cost = 3;
1558 size_t lookup_space_cost = 3 + 2 * sw.case_count;
1559 size_t lookup_time_cost = sw.case_count;
1560 if (sw.case_count > 4 &&
1561 table_space_cost + 3 * table_time_cost <=
1562 lookup_space_cost + 3 * lookup_time_cost &&
1563 sw.min_value > std::numeric_limits<int32_t>::min()) {
1564 InstructionOperand index_operand = g.TempRegister();
1565 if (sw.min_value) {
1566 // The leal automatically zero extends, so result is a valid 64-bit index.
1567 Emit(kX64Lea32 | AddressingModeField::encode(kMode_MRI), index_operand,
1568 value_operand, g.TempImmediate(-sw.min_value));
1569 } else {
1570 // Zero extend, because we use it as 64-bit index into the jump table.
1571 Emit(kX64Movl, index_operand, value_operand);
1572 }
1573 // Generate a table lookup.
1574 return EmitTableSwitch(sw, index_operand);
1575 }
1576
1577 // Generate a sequence of conditional jumps.
1578 return EmitLookupSwitch(sw, value_operand);
1579}
1580
1581
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001582void InstructionSelector::VisitWord32Equal(Node* const node) {
1583 Node* user = node;
1584 FlagsContinuation cont(kEqual, node);
1585 Int32BinopMatcher m(user);
1586 if (m.right().Is(0)) {
1587 Node* value = m.left().node();
1588
1589 // Try to combine with comparisons against 0 by simply inverting the branch.
1590 while (CanCover(user, value) && value->opcode() == IrOpcode::kWord32Equal) {
1591 Int32BinopMatcher m(value);
1592 if (m.right().Is(0)) {
1593 user = value;
1594 value = m.left().node();
1595 cont.Negate();
1596 } else {
1597 break;
1598 }
1599 }
1600
1601 // Try to combine the branch with a comparison.
1602 if (CanCover(user, value)) {
1603 switch (value->opcode()) {
1604 case IrOpcode::kInt32Sub:
1605 return VisitWordCompare(this, value, kX64Cmp32, &cont);
1606 case IrOpcode::kWord32And:
1607 return VisitWordCompare(this, value, kX64Test32, &cont);
1608 default:
1609 break;
1610 }
1611 }
1612 return VisitCompareZero(this, value, kX64Cmp32, &cont);
1613 }
1614 VisitWordCompare(this, node, kX64Cmp32, &cont);
1615}
1616
1617
1618void InstructionSelector::VisitInt32LessThan(Node* node) {
1619 FlagsContinuation cont(kSignedLessThan, node);
1620 VisitWordCompare(this, node, kX64Cmp32, &cont);
1621}
1622
1623
1624void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
1625 FlagsContinuation cont(kSignedLessThanOrEqual, node);
1626 VisitWordCompare(this, node, kX64Cmp32, &cont);
1627}
1628
1629
1630void InstructionSelector::VisitUint32LessThan(Node* node) {
1631 FlagsContinuation cont(kUnsignedLessThan, node);
1632 VisitWordCompare(this, node, kX64Cmp32, &cont);
1633}
1634
1635
1636void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
1637 FlagsContinuation cont(kUnsignedLessThanOrEqual, node);
1638 VisitWordCompare(this, node, kX64Cmp32, &cont);
1639}
1640
1641
1642void InstructionSelector::VisitWord64Equal(Node* const node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001643 FlagsContinuation cont(kEqual, node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001644 Int64BinopMatcher m(node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001645 if (m.right().Is(0)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001646 // Try to combine the equality check with a comparison.
1647 Node* const user = m.node();
1648 Node* const value = m.left().node();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001649 if (CanCover(user, value)) {
1650 switch (value->opcode()) {
1651 case IrOpcode::kInt64Sub:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001652 return VisitWord64Compare(this, value, &cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001653 case IrOpcode::kWord64And:
1654 return VisitWordCompare(this, value, kX64Test, &cont);
1655 default:
1656 break;
1657 }
1658 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001659 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001660 VisitWord64Compare(this, node, &cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001661}
1662
1663
1664void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001665 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001666 FlagsContinuation cont(kOverflow, ovf);
1667 VisitBinop(this, node, kX64Add32, &cont);
1668 }
1669 FlagsContinuation cont;
1670 VisitBinop(this, node, kX64Add32, &cont);
1671}
1672
1673
1674void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001675 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001676 FlagsContinuation cont(kOverflow, ovf);
1677 return VisitBinop(this, node, kX64Sub32, &cont);
1678 }
1679 FlagsContinuation cont;
1680 VisitBinop(this, node, kX64Sub32, &cont);
1681}
1682
1683
1684void InstructionSelector::VisitInt64LessThan(Node* node) {
1685 FlagsContinuation cont(kSignedLessThan, node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001686 VisitWord64Compare(this, node, &cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001687}
1688
1689
1690void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) {
1691 FlagsContinuation cont(kSignedLessThanOrEqual, node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001692 VisitWord64Compare(this, node, &cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001693}
1694
1695
1696void InstructionSelector::VisitUint64LessThan(Node* node) {
1697 FlagsContinuation cont(kUnsignedLessThan, node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001698 VisitWord64Compare(this, node, &cont);
1699}
1700
1701
1702void InstructionSelector::VisitUint64LessThanOrEqual(Node* node) {
1703 FlagsContinuation cont(kUnsignedLessThanOrEqual, node);
1704 VisitWord64Compare(this, node, &cont);
1705}
1706
1707
1708void InstructionSelector::VisitFloat32Equal(Node* node) {
1709 FlagsContinuation cont(kUnorderedEqual, node);
1710 VisitFloat32Compare(this, node, &cont);
1711}
1712
1713
1714void InstructionSelector::VisitFloat32LessThan(Node* node) {
1715 FlagsContinuation cont(kUnsignedGreaterThan, node);
1716 VisitFloat32Compare(this, node, &cont);
1717}
1718
1719
1720void InstructionSelector::VisitFloat32LessThanOrEqual(Node* node) {
1721 FlagsContinuation cont(kUnsignedGreaterThanOrEqual, node);
1722 VisitFloat32Compare(this, node, &cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001723}
1724
1725
1726void InstructionSelector::VisitFloat64Equal(Node* node) {
1727 FlagsContinuation cont(kUnorderedEqual, node);
1728 VisitFloat64Compare(this, node, &cont);
1729}
1730
1731
1732void InstructionSelector::VisitFloat64LessThan(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001733 FlagsContinuation cont(kUnsignedGreaterThan, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001734 VisitFloat64Compare(this, node, &cont);
1735}
1736
1737
1738void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001739 FlagsContinuation cont(kUnsignedGreaterThanOrEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001740 VisitFloat64Compare(this, node, &cont);
1741}
1742
1743
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001744void InstructionSelector::VisitFloat64ExtractLowWord32(Node* node) {
1745 X64OperandGenerator g(this);
1746 Emit(kSSEFloat64ExtractLowWord32, g.DefineAsRegister(node),
1747 g.Use(node->InputAt(0)));
1748}
1749
1750
1751void InstructionSelector::VisitFloat64ExtractHighWord32(Node* node) {
1752 X64OperandGenerator g(this);
1753 Emit(kSSEFloat64ExtractHighWord32, g.DefineAsRegister(node),
1754 g.Use(node->InputAt(0)));
1755}
1756
1757
1758void InstructionSelector::VisitFloat64InsertLowWord32(Node* node) {
1759 X64OperandGenerator g(this);
1760 Node* left = node->InputAt(0);
1761 Node* right = node->InputAt(1);
1762 Float64Matcher mleft(left);
1763 if (mleft.HasValue() && (bit_cast<uint64_t>(mleft.Value()) >> 32) == 0u) {
1764 Emit(kSSEFloat64LoadLowWord32, g.DefineAsRegister(node), g.Use(right));
1765 return;
1766 }
1767 Emit(kSSEFloat64InsertLowWord32, g.DefineSameAsFirst(node),
1768 g.UseRegister(left), g.Use(right));
1769}
1770
1771
1772void InstructionSelector::VisitFloat64InsertHighWord32(Node* node) {
1773 X64OperandGenerator g(this);
1774 Node* left = node->InputAt(0);
1775 Node* right = node->InputAt(1);
1776 Emit(kSSEFloat64InsertHighWord32, g.DefineSameAsFirst(node),
1777 g.UseRegister(left), g.Use(right));
1778}
1779
1780
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001781// static
1782MachineOperatorBuilder::Flags
1783InstructionSelector::SupportedMachineOperatorFlags() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001784 MachineOperatorBuilder::Flags flags =
1785 MachineOperatorBuilder::kFloat32Max |
1786 MachineOperatorBuilder::kFloat32Min |
1787 MachineOperatorBuilder::kFloat64Max |
1788 MachineOperatorBuilder::kFloat64Min |
1789 MachineOperatorBuilder::kWord32ShiftIsSafe |
1790 MachineOperatorBuilder::kWord32Ctz | MachineOperatorBuilder::kWord64Ctz;
1791 if (CpuFeatures::IsSupported(POPCNT)) {
1792 flags |= MachineOperatorBuilder::kWord32Popcnt |
1793 MachineOperatorBuilder::kWord64Popcnt;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001794 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001795 if (CpuFeatures::IsSupported(SSE4_1)) {
1796 flags |= MachineOperatorBuilder::kFloat32RoundDown |
1797 MachineOperatorBuilder::kFloat64RoundDown |
1798 MachineOperatorBuilder::kFloat32RoundUp |
1799 MachineOperatorBuilder::kFloat64RoundUp |
1800 MachineOperatorBuilder::kFloat32RoundTruncate |
1801 MachineOperatorBuilder::kFloat64RoundTruncate |
1802 MachineOperatorBuilder::kFloat32RoundTiesEven |
1803 MachineOperatorBuilder::kFloat64RoundTiesEven;
1804 }
1805 return flags;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001806}
1807
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001808} // namespace compiler
1809} // namespace internal
1810} // namespace v8