blob: cff4aafb278d8c3d5de8a82fcf2dea54f5f45035 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/base/adapters.h"
6#include "src/compiler/instruction-selector-impl.h"
7#include "src/compiler/node-matchers.h"
8#include "src/compiler/node-properties.h"
9
10namespace v8 {
11namespace internal {
12namespace compiler {
13
14// Adds X87-specific methods for generating operands.
15class X87OperandGenerator final : public OperandGenerator {
16 public:
17 explicit X87OperandGenerator(InstructionSelector* selector)
18 : OperandGenerator(selector) {}
19
20 InstructionOperand UseByteRegister(Node* node) {
21 // TODO(titzer): encode byte register use constraints.
22 return UseFixed(node, edx);
23 }
24
25 InstructionOperand DefineAsByteRegister(Node* node) {
26 // TODO(titzer): encode byte register def constraints.
27 return DefineAsRegister(node);
28 }
29
30 InstructionOperand CreateImmediate(int imm) {
31 return sequence()->AddImmediate(Constant(imm));
32 }
33
34 bool CanBeImmediate(Node* node) {
35 switch (node->opcode()) {
36 case IrOpcode::kInt32Constant:
37 case IrOpcode::kNumberConstant:
38 case IrOpcode::kExternalConstant:
39 return true;
40 case IrOpcode::kHeapConstant: {
41 // Constants in new space cannot be used as immediates in V8 because
42 // the GC does not scan code objects when collecting the new generation.
43 Handle<HeapObject> value = OpParameter<Handle<HeapObject>>(node);
44 Isolate* isolate = value->GetIsolate();
45 return !isolate->heap()->InNewSpace(*value);
46 }
47 default:
48 return false;
49 }
50 }
51
52 AddressingMode GenerateMemoryOperandInputs(Node* index, int scale, Node* base,
53 Node* displacement_node,
54 InstructionOperand inputs[],
55 size_t* input_count) {
56 AddressingMode mode = kMode_MRI;
57 int32_t displacement = (displacement_node == nullptr)
58 ? 0
59 : OpParameter<int32_t>(displacement_node);
60 if (base != nullptr) {
61 if (base->opcode() == IrOpcode::kInt32Constant) {
62 displacement += OpParameter<int32_t>(base);
63 base = nullptr;
64 }
65 }
66 if (base != nullptr) {
67 inputs[(*input_count)++] = UseRegister(base);
68 if (index != nullptr) {
69 DCHECK(scale >= 0 && scale <= 3);
70 inputs[(*input_count)++] = UseRegister(index);
71 if (displacement != 0) {
72 inputs[(*input_count)++] = TempImmediate(displacement);
73 static const AddressingMode kMRnI_modes[] = {kMode_MR1I, kMode_MR2I,
74 kMode_MR4I, kMode_MR8I};
75 mode = kMRnI_modes[scale];
76 } else {
77 static const AddressingMode kMRn_modes[] = {kMode_MR1, kMode_MR2,
78 kMode_MR4, kMode_MR8};
79 mode = kMRn_modes[scale];
80 }
81 } else {
82 if (displacement == 0) {
83 mode = kMode_MR;
84 } else {
85 inputs[(*input_count)++] = TempImmediate(displacement);
86 mode = kMode_MRI;
87 }
88 }
89 } else {
90 DCHECK(scale >= 0 && scale <= 3);
91 if (index != nullptr) {
92 inputs[(*input_count)++] = UseRegister(index);
93 if (displacement != 0) {
94 inputs[(*input_count)++] = TempImmediate(displacement);
95 static const AddressingMode kMnI_modes[] = {kMode_MRI, kMode_M2I,
96 kMode_M4I, kMode_M8I};
97 mode = kMnI_modes[scale];
98 } else {
99 static const AddressingMode kMn_modes[] = {kMode_MR, kMode_M2,
100 kMode_M4, kMode_M8};
101 mode = kMn_modes[scale];
102 }
103 } else {
104 inputs[(*input_count)++] = TempImmediate(displacement);
105 return kMode_MI;
106 }
107 }
108 return mode;
109 }
110
111 AddressingMode GetEffectiveAddressMemoryOperand(Node* node,
112 InstructionOperand inputs[],
113 size_t* input_count) {
114 BaseWithIndexAndDisplacement32Matcher m(node, true);
115 DCHECK(m.matches());
116 if ((m.displacement() == nullptr || CanBeImmediate(m.displacement()))) {
117 return GenerateMemoryOperandInputs(m.index(), m.scale(), m.base(),
118 m.displacement(), inputs, input_count);
119 } else {
120 inputs[(*input_count)++] = UseRegister(node->InputAt(0));
121 inputs[(*input_count)++] = UseRegister(node->InputAt(1));
122 return kMode_MR1;
123 }
124 }
125
126 bool CanBeBetterLeftOperand(Node* node) const {
127 return !selector()->IsLive(node);
128 }
129};
130
131
132void InstructionSelector::VisitLoad(Node* node) {
133 LoadRepresentation load_rep = LoadRepresentationOf(node->op());
134
135 ArchOpcode opcode = kArchNop;
136 switch (load_rep.representation()) {
137 case MachineRepresentation::kFloat32:
138 opcode = kX87Movss;
139 break;
140 case MachineRepresentation::kFloat64:
141 opcode = kX87Movsd;
142 break;
143 case MachineRepresentation::kBit: // Fall through.
144 case MachineRepresentation::kWord8:
145 opcode = load_rep.IsSigned() ? kX87Movsxbl : kX87Movzxbl;
146 break;
147 case MachineRepresentation::kWord16:
148 opcode = load_rep.IsSigned() ? kX87Movsxwl : kX87Movzxwl;
149 break;
150 case MachineRepresentation::kTagged: // Fall through.
151 case MachineRepresentation::kWord32:
152 opcode = kX87Movl;
153 break;
154 case MachineRepresentation::kWord64: // Fall through.
155 case MachineRepresentation::kNone:
156 UNREACHABLE();
157 return;
158 }
159
160 X87OperandGenerator g(this);
161 InstructionOperand outputs[1];
162 outputs[0] = g.DefineAsRegister(node);
163 InstructionOperand inputs[3];
164 size_t input_count = 0;
165 AddressingMode mode =
166 g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
167 InstructionCode code = opcode | AddressingModeField::encode(mode);
168 Emit(code, 1, outputs, input_count, inputs);
169}
170
171
172void InstructionSelector::VisitStore(Node* node) {
173 X87OperandGenerator g(this);
174 Node* base = node->InputAt(0);
175 Node* index = node->InputAt(1);
176 Node* value = node->InputAt(2);
177
178 StoreRepresentation store_rep = StoreRepresentationOf(node->op());
179 WriteBarrierKind write_barrier_kind = store_rep.write_barrier_kind();
180 MachineRepresentation rep = store_rep.representation();
181
182 if (write_barrier_kind != kNoWriteBarrier) {
183 DCHECK_EQ(MachineRepresentation::kTagged, rep);
184 AddressingMode addressing_mode;
185 InstructionOperand inputs[3];
186 size_t input_count = 0;
187 inputs[input_count++] = g.UseUniqueRegister(base);
188 if (g.CanBeImmediate(index)) {
189 inputs[input_count++] = g.UseImmediate(index);
190 addressing_mode = kMode_MRI;
191 } else {
192 inputs[input_count++] = g.UseUniqueRegister(index);
193 addressing_mode = kMode_MR1;
194 }
195 inputs[input_count++] = (write_barrier_kind == kMapWriteBarrier)
196 ? g.UseRegister(value)
197 : g.UseUniqueRegister(value);
198 RecordWriteMode record_write_mode = RecordWriteMode::kValueIsAny;
199 switch (write_barrier_kind) {
200 case kNoWriteBarrier:
201 UNREACHABLE();
202 break;
203 case kMapWriteBarrier:
204 record_write_mode = RecordWriteMode::kValueIsMap;
205 break;
206 case kPointerWriteBarrier:
207 record_write_mode = RecordWriteMode::kValueIsPointer;
208 break;
209 case kFullWriteBarrier:
210 record_write_mode = RecordWriteMode::kValueIsAny;
211 break;
212 }
213 InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()};
214 size_t const temp_count = arraysize(temps);
215 InstructionCode code = kArchStoreWithWriteBarrier;
216 code |= AddressingModeField::encode(addressing_mode);
217 code |= MiscField::encode(static_cast<int>(record_write_mode));
218 Emit(code, 0, nullptr, input_count, inputs, temp_count, temps);
219 } else {
220 ArchOpcode opcode = kArchNop;
221 switch (rep) {
222 case MachineRepresentation::kFloat32:
223 opcode = kX87Movss;
224 break;
225 case MachineRepresentation::kFloat64:
226 opcode = kX87Movsd;
227 break;
228 case MachineRepresentation::kBit: // Fall through.
229 case MachineRepresentation::kWord8:
230 opcode = kX87Movb;
231 break;
232 case MachineRepresentation::kWord16:
233 opcode = kX87Movw;
234 break;
235 case MachineRepresentation::kTagged: // Fall through.
236 case MachineRepresentation::kWord32:
237 opcode = kX87Movl;
238 break;
239 case MachineRepresentation::kWord64: // Fall through.
240 case MachineRepresentation::kNone:
241 UNREACHABLE();
242 return;
243 }
244
245 InstructionOperand val;
246 if (g.CanBeImmediate(value)) {
247 val = g.UseImmediate(value);
248 } else if (rep == MachineRepresentation::kWord8 ||
249 rep == MachineRepresentation::kBit) {
250 val = g.UseByteRegister(value);
251 } else {
252 val = g.UseRegister(value);
253 }
254
255 InstructionOperand inputs[4];
256 size_t input_count = 0;
257 AddressingMode addressing_mode =
258 g.GetEffectiveAddressMemoryOperand(node, inputs, &input_count);
259 InstructionCode code =
260 opcode | AddressingModeField::encode(addressing_mode);
261 inputs[input_count++] = val;
262 Emit(code, 0, static_cast<InstructionOperand*>(nullptr), input_count,
263 inputs);
264 }
265}
266
267
268void InstructionSelector::VisitCheckedLoad(Node* node) {
269 CheckedLoadRepresentation load_rep = CheckedLoadRepresentationOf(node->op());
270 X87OperandGenerator g(this);
271 Node* const buffer = node->InputAt(0);
272 Node* const offset = node->InputAt(1);
273 Node* const length = node->InputAt(2);
274 ArchOpcode opcode = kArchNop;
275 switch (load_rep.representation()) {
276 case MachineRepresentation::kWord8:
277 opcode = load_rep.IsSigned() ? kCheckedLoadInt8 : kCheckedLoadUint8;
278 break;
279 case MachineRepresentation::kWord16:
280 opcode = load_rep.IsSigned() ? kCheckedLoadInt16 : kCheckedLoadUint16;
281 break;
282 case MachineRepresentation::kWord32:
283 opcode = kCheckedLoadWord32;
284 break;
285 case MachineRepresentation::kFloat32:
286 opcode = kCheckedLoadFloat32;
287 break;
288 case MachineRepresentation::kFloat64:
289 opcode = kCheckedLoadFloat64;
290 break;
291 case MachineRepresentation::kBit: // Fall through.
292 case MachineRepresentation::kTagged: // Fall through.
293 case MachineRepresentation::kWord64: // Fall through.
294 case MachineRepresentation::kNone:
295 UNREACHABLE();
296 return;
297 }
298 InstructionOperand offset_operand = g.UseRegister(offset);
299 InstructionOperand length_operand =
300 g.CanBeImmediate(length) ? g.UseImmediate(length) : g.UseRegister(length);
301 if (g.CanBeImmediate(buffer)) {
302 Emit(opcode | AddressingModeField::encode(kMode_MRI),
303 g.DefineAsRegister(node), offset_operand, length_operand,
304 offset_operand, g.UseImmediate(buffer));
305 } else {
306 Emit(opcode | AddressingModeField::encode(kMode_MR1),
307 g.DefineAsRegister(node), offset_operand, length_operand,
308 g.UseRegister(buffer), offset_operand);
309 }
310}
311
312
313void InstructionSelector::VisitCheckedStore(Node* node) {
314 MachineRepresentation rep = CheckedStoreRepresentationOf(node->op());
315 X87OperandGenerator g(this);
316 Node* const buffer = node->InputAt(0);
317 Node* const offset = node->InputAt(1);
318 Node* const length = node->InputAt(2);
319 Node* const value = node->InputAt(3);
320 ArchOpcode opcode = kArchNop;
321 switch (rep) {
322 case MachineRepresentation::kWord8:
323 opcode = kCheckedStoreWord8;
324 break;
325 case MachineRepresentation::kWord16:
326 opcode = kCheckedStoreWord16;
327 break;
328 case MachineRepresentation::kWord32:
329 opcode = kCheckedStoreWord32;
330 break;
331 case MachineRepresentation::kFloat32:
332 opcode = kCheckedStoreFloat32;
333 break;
334 case MachineRepresentation::kFloat64:
335 opcode = kCheckedStoreFloat64;
336 break;
337 case MachineRepresentation::kBit: // Fall through.
338 case MachineRepresentation::kTagged: // Fall through.
339 case MachineRepresentation::kWord64: // Fall through.
340 case MachineRepresentation::kNone:
341 UNREACHABLE();
342 return;
343 }
344 InstructionOperand value_operand =
345 g.CanBeImmediate(value) ? g.UseImmediate(value)
346 : ((rep == MachineRepresentation::kWord8 ||
347 rep == MachineRepresentation::kBit)
348 ? g.UseByteRegister(value)
349 : g.UseRegister(value));
350 InstructionOperand offset_operand = g.UseRegister(offset);
351 InstructionOperand length_operand =
352 g.CanBeImmediate(length) ? g.UseImmediate(length) : g.UseRegister(length);
353 if (g.CanBeImmediate(buffer)) {
354 Emit(opcode | AddressingModeField::encode(kMode_MRI), g.NoOutput(),
355 offset_operand, length_operand, value_operand, offset_operand,
356 g.UseImmediate(buffer));
357 } else {
358 Emit(opcode | AddressingModeField::encode(kMode_MR1), g.NoOutput(),
359 offset_operand, length_operand, value_operand, g.UseRegister(buffer),
360 offset_operand);
361 }
362}
363
364
365// Shared routine for multiple binary operations.
366static void VisitBinop(InstructionSelector* selector, Node* node,
367 InstructionCode opcode, FlagsContinuation* cont) {
368 X87OperandGenerator g(selector);
369 Int32BinopMatcher m(node);
370 Node* left = m.left().node();
371 Node* right = m.right().node();
372 InstructionOperand inputs[4];
373 size_t input_count = 0;
374 InstructionOperand outputs[2];
375 size_t output_count = 0;
376
377 // TODO(turbofan): match complex addressing modes.
378 if (left == right) {
379 // If both inputs refer to the same operand, enforce allocating a register
380 // for both of them to ensure that we don't end up generating code like
381 // this:
382 //
383 // mov eax, [ebp-0x10]
384 // add eax, [ebp-0x10]
385 // jo label
386 InstructionOperand const input = g.UseRegister(left);
387 inputs[input_count++] = input;
388 inputs[input_count++] = input;
389 } else if (g.CanBeImmediate(right)) {
390 inputs[input_count++] = g.UseRegister(left);
391 inputs[input_count++] = g.UseImmediate(right);
392 } else {
393 if (node->op()->HasProperty(Operator::kCommutative) &&
394 g.CanBeBetterLeftOperand(right)) {
395 std::swap(left, right);
396 }
397 inputs[input_count++] = g.UseRegister(left);
398 inputs[input_count++] = g.Use(right);
399 }
400
401 if (cont->IsBranch()) {
402 inputs[input_count++] = g.Label(cont->true_block());
403 inputs[input_count++] = g.Label(cont->false_block());
404 }
405
406 outputs[output_count++] = g.DefineSameAsFirst(node);
407 if (cont->IsSet()) {
408 outputs[output_count++] = g.DefineAsRegister(cont->result());
409 }
410
411 DCHECK_NE(0u, input_count);
412 DCHECK_NE(0u, output_count);
413 DCHECK_GE(arraysize(inputs), input_count);
414 DCHECK_GE(arraysize(outputs), output_count);
415
416 selector->Emit(cont->Encode(opcode), output_count, outputs, input_count,
417 inputs);
418}
419
420
421// Shared routine for multiple binary operations.
422static void VisitBinop(InstructionSelector* selector, Node* node,
423 InstructionCode opcode) {
424 FlagsContinuation cont;
425 VisitBinop(selector, node, opcode, &cont);
426}
427
428
429void InstructionSelector::VisitWord32And(Node* node) {
430 VisitBinop(this, node, kX87And);
431}
432
433
434void InstructionSelector::VisitWord32Or(Node* node) {
435 VisitBinop(this, node, kX87Or);
436}
437
438
439void InstructionSelector::VisitWord32Xor(Node* node) {
440 X87OperandGenerator g(this);
441 Int32BinopMatcher m(node);
442 if (m.right().Is(-1)) {
443 Emit(kX87Not, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()));
444 } else {
445 VisitBinop(this, node, kX87Xor);
446 }
447}
448
449
450// Shared routine for multiple shift operations.
451static inline void VisitShift(InstructionSelector* selector, Node* node,
452 ArchOpcode opcode) {
453 X87OperandGenerator g(selector);
454 Node* left = node->InputAt(0);
455 Node* right = node->InputAt(1);
456
457 if (g.CanBeImmediate(right)) {
458 selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
459 g.UseImmediate(right));
460 } else {
461 selector->Emit(opcode, g.DefineSameAsFirst(node), g.UseRegister(left),
462 g.UseFixed(right, ecx));
463 }
464}
465
466
467namespace {
468
469void VisitMulHigh(InstructionSelector* selector, Node* node,
470 ArchOpcode opcode) {
471 X87OperandGenerator g(selector);
472 selector->Emit(opcode, g.DefineAsFixed(node, edx),
473 g.UseFixed(node->InputAt(0), eax),
474 g.UseUniqueRegister(node->InputAt(1)));
475}
476
477
478void VisitDiv(InstructionSelector* selector, Node* node, ArchOpcode opcode) {
479 X87OperandGenerator g(selector);
480 InstructionOperand temps[] = {g.TempRegister(edx)};
481 selector->Emit(opcode, g.DefineAsFixed(node, eax),
482 g.UseFixed(node->InputAt(0), eax),
483 g.UseUnique(node->InputAt(1)), arraysize(temps), temps);
484}
485
486
487void VisitMod(InstructionSelector* selector, Node* node, ArchOpcode opcode) {
488 X87OperandGenerator g(selector);
489 selector->Emit(opcode, g.DefineAsFixed(node, edx),
490 g.UseFixed(node->InputAt(0), eax),
491 g.UseUnique(node->InputAt(1)));
492}
493
494void EmitLea(InstructionSelector* selector, Node* result, Node* index,
495 int scale, Node* base, Node* displacement) {
496 X87OperandGenerator g(selector);
497 InstructionOperand inputs[4];
498 size_t input_count = 0;
499 AddressingMode mode = g.GenerateMemoryOperandInputs(
500 index, scale, base, displacement, inputs, &input_count);
501
502 DCHECK_NE(0u, input_count);
503 DCHECK_GE(arraysize(inputs), input_count);
504
505 InstructionOperand outputs[1];
506 outputs[0] = g.DefineAsRegister(result);
507
508 InstructionCode opcode = AddressingModeField::encode(mode) | kX87Lea;
509
510 selector->Emit(opcode, 1, outputs, input_count, inputs);
511}
512
513} // namespace
514
515
516void InstructionSelector::VisitWord32Shl(Node* node) {
517 Int32ScaleMatcher m(node, true);
518 if (m.matches()) {
519 Node* index = node->InputAt(0);
520 Node* base = m.power_of_two_plus_one() ? index : nullptr;
521 EmitLea(this, node, index, m.scale(), base, nullptr);
522 return;
523 }
524 VisitShift(this, node, kX87Shl);
525}
526
527
528void InstructionSelector::VisitWord32Shr(Node* node) {
529 VisitShift(this, node, kX87Shr);
530}
531
532
533void InstructionSelector::VisitWord32Sar(Node* node) {
534 VisitShift(this, node, kX87Sar);
535}
536
537
538void InstructionSelector::VisitWord32Ror(Node* node) {
539 VisitShift(this, node, kX87Ror);
540}
541
542
543void InstructionSelector::VisitWord32Clz(Node* node) {
544 X87OperandGenerator g(this);
545 Emit(kX87Lzcnt, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
546}
547
548
549void InstructionSelector::VisitWord32Ctz(Node* node) { UNREACHABLE(); }
550
551
552void InstructionSelector::VisitWord32Popcnt(Node* node) {
553 X87OperandGenerator g(this);
554 Emit(kX87Popcnt, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
555}
556
557
558void InstructionSelector::VisitInt32Add(Node* node) {
559 X87OperandGenerator g(this);
560
561 // Try to match the Add to a lea pattern
562 BaseWithIndexAndDisplacement32Matcher m(node);
563 if (m.matches() &&
564 (m.displacement() == nullptr || g.CanBeImmediate(m.displacement()))) {
565 InstructionOperand inputs[4];
566 size_t input_count = 0;
567 AddressingMode mode = g.GenerateMemoryOperandInputs(
568 m.index(), m.scale(), m.base(), m.displacement(), inputs, &input_count);
569
570 DCHECK_NE(0u, input_count);
571 DCHECK_GE(arraysize(inputs), input_count);
572
573 InstructionOperand outputs[1];
574 outputs[0] = g.DefineAsRegister(node);
575
576 InstructionCode opcode = AddressingModeField::encode(mode) | kX87Lea;
577 Emit(opcode, 1, outputs, input_count, inputs);
578 return;
579 }
580
581 // No lea pattern match, use add
582 VisitBinop(this, node, kX87Add);
583}
584
585
586void InstructionSelector::VisitInt32Sub(Node* node) {
587 X87OperandGenerator g(this);
588 Int32BinopMatcher m(node);
589 if (m.left().Is(0)) {
590 Emit(kX87Neg, g.DefineSameAsFirst(node), g.Use(m.right().node()));
591 } else {
592 VisitBinop(this, node, kX87Sub);
593 }
594}
595
596
597void InstructionSelector::VisitInt32Mul(Node* node) {
598 Int32ScaleMatcher m(node, true);
599 if (m.matches()) {
600 Node* index = node->InputAt(0);
601 Node* base = m.power_of_two_plus_one() ? index : nullptr;
602 EmitLea(this, node, index, m.scale(), base, nullptr);
603 return;
604 }
605 X87OperandGenerator g(this);
606 Node* left = node->InputAt(0);
607 Node* right = node->InputAt(1);
608 if (g.CanBeImmediate(right)) {
609 Emit(kX87Imul, g.DefineAsRegister(node), g.Use(left),
610 g.UseImmediate(right));
611 } else {
612 if (g.CanBeBetterLeftOperand(right)) {
613 std::swap(left, right);
614 }
615 Emit(kX87Imul, g.DefineSameAsFirst(node), g.UseRegister(left),
616 g.Use(right));
617 }
618}
619
620
621void InstructionSelector::VisitInt32MulHigh(Node* node) {
622 VisitMulHigh(this, node, kX87ImulHigh);
623}
624
625
626void InstructionSelector::VisitUint32MulHigh(Node* node) {
627 VisitMulHigh(this, node, kX87UmulHigh);
628}
629
630
631void InstructionSelector::VisitInt32Div(Node* node) {
632 VisitDiv(this, node, kX87Idiv);
633}
634
635
636void InstructionSelector::VisitUint32Div(Node* node) {
637 VisitDiv(this, node, kX87Udiv);
638}
639
640
641void InstructionSelector::VisitInt32Mod(Node* node) {
642 VisitMod(this, node, kX87Idiv);
643}
644
645
646void InstructionSelector::VisitUint32Mod(Node* node) {
647 VisitMod(this, node, kX87Udiv);
648}
649
650
651void InstructionSelector::VisitChangeFloat32ToFloat64(Node* node) {
652 X87OperandGenerator g(this);
653 Emit(kX87Float32ToFloat64, g.DefineAsFixed(node, stX_0),
654 g.Use(node->InputAt(0)));
655}
656
657
658void InstructionSelector::VisitChangeInt32ToFloat64(Node* node) {
659 X87OperandGenerator g(this);
660 Emit(kX87Int32ToFloat64, g.DefineAsFixed(node, stX_0),
661 g.Use(node->InputAt(0)));
662}
663
664
665void InstructionSelector::VisitChangeUint32ToFloat64(Node* node) {
666 X87OperandGenerator g(this);
667 Emit(kX87Uint32ToFloat64, g.DefineAsFixed(node, stX_0),
668 g.UseRegister(node->InputAt(0)));
669}
670
671
672void InstructionSelector::VisitChangeFloat64ToInt32(Node* node) {
673 X87OperandGenerator g(this);
674 Emit(kX87Float64ToInt32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
675}
676
677
678void InstructionSelector::VisitChangeFloat64ToUint32(Node* node) {
679 X87OperandGenerator g(this);
680 Emit(kX87Float64ToUint32, g.DefineAsRegister(node), g.Use(node->InputAt(0)));
681}
682
683
684void InstructionSelector::VisitTruncateFloat64ToFloat32(Node* node) {
685 X87OperandGenerator g(this);
686 Emit(kX87Float64ToFloat32, g.DefineAsFixed(node, stX_0),
687 g.Use(node->InputAt(0)));
688}
689
690
691void InstructionSelector::VisitTruncateFloat64ToInt32(Node* node) {
692 X87OperandGenerator g(this);
693
694 switch (TruncationModeOf(node->op())) {
695 case TruncationMode::kJavaScript:
696 Emit(kArchTruncateDoubleToI, g.DefineAsRegister(node),
697 g.Use(node->InputAt(0)));
698 return;
699 case TruncationMode::kRoundToZero:
700 Emit(kX87Float64ToInt32, g.DefineAsRegister(node),
701 g.Use(node->InputAt(0)));
702 return;
703 }
704 UNREACHABLE();
705}
706
707
708void InstructionSelector::VisitBitcastFloat32ToInt32(Node* node) {
709 X87OperandGenerator g(this);
710 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0)));
711 Emit(kX87BitcastFI, g.DefineAsRegister(node), 0, nullptr);
712}
713
714
715void InstructionSelector::VisitBitcastInt32ToFloat32(Node* node) {
716 X87OperandGenerator g(this);
717 Emit(kX87BitcastIF, g.DefineAsFixed(node, stX_0), g.Use(node->InputAt(0)));
718}
719
720
721void InstructionSelector::VisitFloat32Add(Node* node) {
722 X87OperandGenerator g(this);
723 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0)));
724 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1)));
725 Emit(kX87Float32Add, g.DefineAsFixed(node, stX_0), 0, nullptr);
726}
727
728
729void InstructionSelector::VisitFloat64Add(Node* node) {
730 X87OperandGenerator g(this);
731 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
732 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1)));
733 Emit(kX87Float64Add, g.DefineAsFixed(node, stX_0), 0, nullptr);
734}
735
736
737void InstructionSelector::VisitFloat32Sub(Node* node) {
738 X87OperandGenerator g(this);
739 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0)));
740 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1)));
741 Emit(kX87Float32Sub, g.DefineAsFixed(node, stX_0), 0, nullptr);
742}
743
744
745void InstructionSelector::VisitFloat64Sub(Node* node) {
746 X87OperandGenerator g(this);
747 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
748 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1)));
749 Emit(kX87Float64Sub, g.DefineAsFixed(node, stX_0), 0, nullptr);
750}
751
752
753void InstructionSelector::VisitFloat32Mul(Node* node) {
754 X87OperandGenerator g(this);
755 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0)));
756 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1)));
757 Emit(kX87Float32Mul, g.DefineAsFixed(node, stX_0), 0, nullptr);
758}
759
760
761void InstructionSelector::VisitFloat64Mul(Node* node) {
762 X87OperandGenerator g(this);
763 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
764 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1)));
765 Emit(kX87Float64Mul, g.DefineAsFixed(node, stX_0), 0, nullptr);
766}
767
768
769void InstructionSelector::VisitFloat32Div(Node* node) {
770 X87OperandGenerator g(this);
771 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0)));
772 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1)));
773 Emit(kX87Float32Div, g.DefineAsFixed(node, stX_0), 0, nullptr);
774}
775
776
777void InstructionSelector::VisitFloat64Div(Node* node) {
778 X87OperandGenerator g(this);
779 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
780 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1)));
781 Emit(kX87Float64Div, g.DefineAsFixed(node, stX_0), 0, nullptr);
782}
783
784
785void InstructionSelector::VisitFloat64Mod(Node* node) {
786 X87OperandGenerator g(this);
787 InstructionOperand temps[] = {g.TempRegister(eax)};
788 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
789 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1)));
790 Emit(kX87Float64Mod, g.DefineAsFixed(node, stX_0), 1, temps)->MarkAsCall();
791}
792
793
794void InstructionSelector::VisitFloat32Max(Node* node) {
795 X87OperandGenerator g(this);
796 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0)));
797 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1)));
798 Emit(kX87Float32Max, g.DefineAsFixed(node, stX_0), 0, nullptr);
799}
800
801
802void InstructionSelector::VisitFloat64Max(Node* node) {
803 X87OperandGenerator g(this);
804 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
805 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1)));
806 Emit(kX87Float64Max, g.DefineAsFixed(node, stX_0), 0, nullptr);
807}
808
809
810void InstructionSelector::VisitFloat32Min(Node* node) {
811 X87OperandGenerator g(this);
812 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0)));
813 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1)));
814 Emit(kX87Float32Min, g.DefineAsFixed(node, stX_0), 0, nullptr);
815}
816
817
818void InstructionSelector::VisitFloat64Min(Node* node) {
819 X87OperandGenerator g(this);
820 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
821 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1)));
822 Emit(kX87Float64Min, g.DefineAsFixed(node, stX_0), 0, nullptr);
823}
824
825
826void InstructionSelector::VisitFloat32Abs(Node* node) {
827 X87OperandGenerator g(this);
828 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0)));
829 Emit(kX87Float32Abs, g.DefineAsFixed(node, stX_0), 0, nullptr);
830}
831
832
833void InstructionSelector::VisitFloat64Abs(Node* node) {
834 X87OperandGenerator g(this);
835 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
836 Emit(kX87Float64Abs, g.DefineAsFixed(node, stX_0), 0, nullptr);
837}
838
839
840void InstructionSelector::VisitFloat32Sqrt(Node* node) {
841 X87OperandGenerator g(this);
842 Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0)));
843 Emit(kX87Float32Sqrt, g.DefineAsFixed(node, stX_0), 0, nullptr);
844}
845
846
847void InstructionSelector::VisitFloat64Sqrt(Node* node) {
848 X87OperandGenerator g(this);
849 Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
850 Emit(kX87Float64Sqrt, g.DefineAsFixed(node, stX_0), 0, nullptr);
851}
852
853
854void InstructionSelector::VisitFloat32RoundDown(Node* node) {
855 X87OperandGenerator g(this);
856 Emit(kX87Float32Round | MiscField::encode(kRoundDown),
857 g.UseFixed(node, stX_0), g.Use(node->InputAt(0)));
858}
859
860
861void InstructionSelector::VisitFloat64RoundDown(Node* node) {
862 X87OperandGenerator g(this);
863 Emit(kX87Float64Round | MiscField::encode(kRoundDown),
864 g.UseFixed(node, stX_0), g.Use(node->InputAt(0)));
865}
866
867
868void InstructionSelector::VisitFloat32RoundUp(Node* node) {
869 X87OperandGenerator g(this);
870 Emit(kX87Float32Round | MiscField::encode(kRoundUp), g.UseFixed(node, stX_0),
871 g.Use(node->InputAt(0)));
872}
873
874
875void InstructionSelector::VisitFloat64RoundUp(Node* node) {
876 X87OperandGenerator g(this);
877 Emit(kX87Float64Round | MiscField::encode(kRoundUp), g.UseFixed(node, stX_0),
878 g.Use(node->InputAt(0)));
879}
880
881
882void InstructionSelector::VisitFloat32RoundTruncate(Node* node) {
883 X87OperandGenerator g(this);
884 Emit(kX87Float32Round | MiscField::encode(kRoundToZero),
885 g.UseFixed(node, stX_0), g.Use(node->InputAt(0)));
886}
887
888
889void InstructionSelector::VisitFloat64RoundTruncate(Node* node) {
890 X87OperandGenerator g(this);
891 Emit(kX87Float64Round | MiscField::encode(kRoundToZero),
892 g.UseFixed(node, stX_0), g.Use(node->InputAt(0)));
893}
894
895
896void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
897 UNREACHABLE();
898}
899
900
901void InstructionSelector::VisitFloat32RoundTiesEven(Node* node) {
902 X87OperandGenerator g(this);
903 Emit(kX87Float32Round | MiscField::encode(kRoundToNearest),
904 g.UseFixed(node, stX_0), g.Use(node->InputAt(0)));
905}
906
907
908void InstructionSelector::VisitFloat64RoundTiesEven(Node* node) {
909 X87OperandGenerator g(this);
910 Emit(kX87Float64Round | MiscField::encode(kRoundToNearest),
911 g.UseFixed(node, stX_0), g.Use(node->InputAt(0)));
912}
913
914
915void InstructionSelector::EmitPrepareArguments(
916 ZoneVector<PushParameter>* arguments, const CallDescriptor* descriptor,
917 Node* node) {
918 X87OperandGenerator g(this);
919
920 // Prepare for C function call.
921 if (descriptor->IsCFunctionCall()) {
922 InstructionOperand temps[] = {g.TempRegister()};
923 size_t const temp_count = arraysize(temps);
924 Emit(kArchPrepareCallCFunction |
925 MiscField::encode(static_cast<int>(descriptor->CParameterCount())),
926 0, nullptr, 0, nullptr, temp_count, temps);
927
928 // Poke any stack arguments.
929 for (size_t n = 0; n < arguments->size(); ++n) {
930 PushParameter input = (*arguments)[n];
931 if (input.node()) {
932 int const slot = static_cast<int>(n);
933 InstructionOperand value = g.CanBeImmediate(input.node())
934 ? g.UseImmediate(input.node())
935 : g.UseRegister(input.node());
936 Emit(kX87Poke | MiscField::encode(slot), g.NoOutput(), value);
937 }
938 }
939 } else {
940 // Push any stack arguments.
941 for (PushParameter input : base::Reversed(*arguments)) {
942 // TODO(titzer): handle pushing double parameters.
943 if (input.node() == nullptr) continue;
944 InstructionOperand value =
945 g.CanBeImmediate(input.node())
946 ? g.UseImmediate(input.node())
947 : IsSupported(ATOM) ||
948 sequence()->IsFloat(GetVirtualRegister(input.node()))
949 ? g.UseRegister(input.node())
950 : g.Use(input.node());
951 Emit(kX87Push, g.NoOutput(), value);
952 }
953 }
954}
955
956
957bool InstructionSelector::IsTailCallAddressImmediate() { return true; }
958
959
960namespace {
961
962// Shared routine for multiple compare operations.
963void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
964 InstructionOperand left, InstructionOperand right,
965 FlagsContinuation* cont) {
966 X87OperandGenerator g(selector);
967 if (cont->IsBranch()) {
968 selector->Emit(cont->Encode(opcode), g.NoOutput(), left, right,
969 g.Label(cont->true_block()), g.Label(cont->false_block()));
970 } else {
971 DCHECK(cont->IsSet());
972 selector->Emit(cont->Encode(opcode), g.DefineAsByteRegister(cont->result()),
973 left, right);
974 }
975}
976
977
978// Shared routine for multiple compare operations.
979void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
980 Node* left, Node* right, FlagsContinuation* cont,
981 bool commutative) {
982 X87OperandGenerator g(selector);
983 if (commutative && g.CanBeBetterLeftOperand(right)) {
984 std::swap(left, right);
985 }
986 VisitCompare(selector, opcode, g.UseRegister(left), g.Use(right), cont);
987}
988
989
990// Shared routine for multiple float32 compare operations (inputs commuted).
991void VisitFloat32Compare(InstructionSelector* selector, Node* node,
992 FlagsContinuation* cont) {
993 X87OperandGenerator g(selector);
994 selector->Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(0)));
995 selector->Emit(kX87PushFloat32, g.NoOutput(), g.Use(node->InputAt(1)));
996 if (cont->IsBranch()) {
997 selector->Emit(cont->Encode(kX87Float32Cmp), g.NoOutput(),
998 g.Label(cont->true_block()), g.Label(cont->false_block()));
999 } else {
1000 DCHECK(cont->IsSet());
1001 selector->Emit(cont->Encode(kX87Float32Cmp),
1002 g.DefineAsByteRegister(cont->result()));
1003 }
1004}
1005
1006
1007// Shared routine for multiple float64 compare operations (inputs commuted).
1008void VisitFloat64Compare(InstructionSelector* selector, Node* node,
1009 FlagsContinuation* cont) {
1010 X87OperandGenerator g(selector);
1011 selector->Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(0)));
1012 selector->Emit(kX87PushFloat64, g.NoOutput(), g.Use(node->InputAt(1)));
1013 if (cont->IsBranch()) {
1014 selector->Emit(cont->Encode(kX87Float64Cmp), g.NoOutput(),
1015 g.Label(cont->true_block()), g.Label(cont->false_block()));
1016 } else {
1017 DCHECK(cont->IsSet());
1018 selector->Emit(cont->Encode(kX87Float64Cmp),
1019 g.DefineAsByteRegister(cont->result()));
1020 }
1021}
1022
1023
1024// Shared routine for multiple word compare operations.
1025void VisitWordCompare(InstructionSelector* selector, Node* node,
1026 InstructionCode opcode, FlagsContinuation* cont) {
1027 X87OperandGenerator g(selector);
1028 Node* const left = node->InputAt(0);
1029 Node* const right = node->InputAt(1);
1030
1031 // Match immediates on left or right side of comparison.
1032 if (g.CanBeImmediate(right)) {
1033 VisitCompare(selector, opcode, g.Use(left), g.UseImmediate(right), cont);
1034 } else if (g.CanBeImmediate(left)) {
1035 if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
1036 VisitCompare(selector, opcode, g.Use(right), g.UseImmediate(left), cont);
1037 } else {
1038 VisitCompare(selector, opcode, left, right, cont,
1039 node->op()->HasProperty(Operator::kCommutative));
1040 }
1041}
1042
1043
1044void VisitWordCompare(InstructionSelector* selector, Node* node,
1045 FlagsContinuation* cont) {
1046 X87OperandGenerator g(selector);
1047 Int32BinopMatcher m(node);
1048 if (m.left().IsLoad() && m.right().IsLoadStackPointer()) {
1049 LoadMatcher<ExternalReferenceMatcher> mleft(m.left().node());
1050 ExternalReference js_stack_limit =
1051 ExternalReference::address_of_stack_limit(selector->isolate());
1052 if (mleft.object().Is(js_stack_limit) && mleft.index().Is(0)) {
1053 // Compare(Load(js_stack_limit), LoadStackPointer)
1054 if (!node->op()->HasProperty(Operator::kCommutative)) cont->Commute();
1055 InstructionCode opcode = cont->Encode(kX87StackCheck);
1056 if (cont->IsBranch()) {
1057 selector->Emit(opcode, g.NoOutput(), g.Label(cont->true_block()),
1058 g.Label(cont->false_block()));
1059 } else {
1060 DCHECK(cont->IsSet());
1061 selector->Emit(opcode, g.DefineAsRegister(cont->result()));
1062 }
1063 return;
1064 }
1065 }
1066 VisitWordCompare(selector, node, kX87Cmp, cont);
1067}
1068
1069
1070// Shared routine for word comparison with zero.
1071void VisitWordCompareZero(InstructionSelector* selector, Node* user,
1072 Node* value, FlagsContinuation* cont) {
1073 // Try to combine the branch with a comparison.
1074 while (selector->CanCover(user, value)) {
1075 switch (value->opcode()) {
1076 case IrOpcode::kWord32Equal: {
1077 // Try to combine with comparisons against 0 by simply inverting the
1078 // continuation.
1079 Int32BinopMatcher m(value);
1080 if (m.right().Is(0)) {
1081 user = value;
1082 value = m.left().node();
1083 cont->Negate();
1084 continue;
1085 }
1086 cont->OverwriteAndNegateIfEqual(kEqual);
1087 return VisitWordCompare(selector, value, cont);
1088 }
1089 case IrOpcode::kInt32LessThan:
1090 cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1091 return VisitWordCompare(selector, value, cont);
1092 case IrOpcode::kInt32LessThanOrEqual:
1093 cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1094 return VisitWordCompare(selector, value, cont);
1095 case IrOpcode::kUint32LessThan:
1096 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1097 return VisitWordCompare(selector, value, cont);
1098 case IrOpcode::kUint32LessThanOrEqual:
1099 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1100 return VisitWordCompare(selector, value, cont);
1101 case IrOpcode::kFloat32Equal:
1102 cont->OverwriteAndNegateIfEqual(kUnorderedEqual);
1103 return VisitFloat32Compare(selector, value, cont);
1104 case IrOpcode::kFloat32LessThan:
1105 cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThan);
1106 return VisitFloat32Compare(selector, value, cont);
1107 case IrOpcode::kFloat32LessThanOrEqual:
1108 cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThanOrEqual);
1109 return VisitFloat32Compare(selector, value, cont);
1110 case IrOpcode::kFloat64Equal:
1111 cont->OverwriteAndNegateIfEqual(kUnorderedEqual);
1112 return VisitFloat64Compare(selector, value, cont);
1113 case IrOpcode::kFloat64LessThan:
1114 cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThan);
1115 return VisitFloat64Compare(selector, value, cont);
1116 case IrOpcode::kFloat64LessThanOrEqual:
1117 cont->OverwriteAndNegateIfEqual(kUnsignedGreaterThanOrEqual);
1118 return VisitFloat64Compare(selector, value, cont);
1119 case IrOpcode::kProjection:
1120 // Check if this is the overflow output projection of an
1121 // <Operation>WithOverflow node.
1122 if (ProjectionIndexOf(value->op()) == 1u) {
1123 // We cannot combine the <Operation>WithOverflow with this branch
1124 // unless the 0th projection (the use of the actual value of the
1125 // <Operation> is either nullptr, which means there's no use of the
1126 // actual value, or was already defined, which means it is scheduled
1127 // *AFTER* this branch).
1128 Node* const node = value->InputAt(0);
1129 Node* const result = NodeProperties::FindProjection(node, 0);
1130 if (result == nullptr || selector->IsDefined(result)) {
1131 switch (node->opcode()) {
1132 case IrOpcode::kInt32AddWithOverflow:
1133 cont->OverwriteAndNegateIfEqual(kOverflow);
1134 return VisitBinop(selector, node, kX87Add, cont);
1135 case IrOpcode::kInt32SubWithOverflow:
1136 cont->OverwriteAndNegateIfEqual(kOverflow);
1137 return VisitBinop(selector, node, kX87Sub, cont);
1138 default:
1139 break;
1140 }
1141 }
1142 }
1143 break;
1144 case IrOpcode::kInt32Sub:
1145 return VisitWordCompare(selector, value, cont);
1146 case IrOpcode::kWord32And:
1147 return VisitWordCompare(selector, value, kX87Test, cont);
1148 default:
1149 break;
1150 }
1151 break;
1152 }
1153
1154 // Continuation could not be combined with a compare, emit compare against 0.
1155 X87OperandGenerator g(selector);
1156 VisitCompare(selector, kX87Cmp, g.Use(value), g.TempImmediate(0), cont);
1157}
1158
1159} // namespace
1160
1161
1162void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
1163 BasicBlock* fbranch) {
1164 FlagsContinuation cont(kNotEqual, tbranch, fbranch);
1165 VisitWordCompareZero(this, branch, branch->InputAt(0), &cont);
1166}
1167
1168
1169void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
1170 X87OperandGenerator g(this);
1171 InstructionOperand value_operand = g.UseRegister(node->InputAt(0));
1172
1173 // Emit either ArchTableSwitch or ArchLookupSwitch.
1174 size_t table_space_cost = 4 + sw.value_range;
1175 size_t table_time_cost = 3;
1176 size_t lookup_space_cost = 3 + 2 * sw.case_count;
1177 size_t lookup_time_cost = sw.case_count;
1178 if (sw.case_count > 4 &&
1179 table_space_cost + 3 * table_time_cost <=
1180 lookup_space_cost + 3 * lookup_time_cost &&
1181 sw.min_value > std::numeric_limits<int32_t>::min()) {
1182 InstructionOperand index_operand = value_operand;
1183 if (sw.min_value) {
1184 index_operand = g.TempRegister();
1185 Emit(kX87Lea | AddressingModeField::encode(kMode_MRI), index_operand,
1186 value_operand, g.TempImmediate(-sw.min_value));
1187 }
1188 // Generate a table lookup.
1189 return EmitTableSwitch(sw, index_operand);
1190 }
1191
1192 // Generate a sequence of conditional jumps.
1193 return EmitLookupSwitch(sw, value_operand);
1194}
1195
1196
1197void InstructionSelector::VisitWord32Equal(Node* const node) {
1198 FlagsContinuation cont(kEqual, node);
1199 Int32BinopMatcher m(node);
1200 if (m.right().Is(0)) {
1201 return VisitWordCompareZero(this, m.node(), m.left().node(), &cont);
1202 }
1203 VisitWordCompare(this, node, &cont);
1204}
1205
1206
1207void InstructionSelector::VisitInt32LessThan(Node* node) {
1208 FlagsContinuation cont(kSignedLessThan, node);
1209 VisitWordCompare(this, node, &cont);
1210}
1211
1212
1213void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
1214 FlagsContinuation cont(kSignedLessThanOrEqual, node);
1215 VisitWordCompare(this, node, &cont);
1216}
1217
1218
1219void InstructionSelector::VisitUint32LessThan(Node* node) {
1220 FlagsContinuation cont(kUnsignedLessThan, node);
1221 VisitWordCompare(this, node, &cont);
1222}
1223
1224
1225void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
1226 FlagsContinuation cont(kUnsignedLessThanOrEqual, node);
1227 VisitWordCompare(this, node, &cont);
1228}
1229
1230
1231void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
1232 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1233 FlagsContinuation cont(kOverflow, ovf);
1234 return VisitBinop(this, node, kX87Add, &cont);
1235 }
1236 FlagsContinuation cont;
1237 VisitBinop(this, node, kX87Add, &cont);
1238}
1239
1240
1241void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
1242 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1243 FlagsContinuation cont(kOverflow, ovf);
1244 return VisitBinop(this, node, kX87Sub, &cont);
1245 }
1246 FlagsContinuation cont;
1247 VisitBinop(this, node, kX87Sub, &cont);
1248}
1249
1250
1251void InstructionSelector::VisitFloat32Equal(Node* node) {
1252 FlagsContinuation cont(kUnorderedEqual, node);
1253 VisitFloat32Compare(this, node, &cont);
1254}
1255
1256
1257void InstructionSelector::VisitFloat32LessThan(Node* node) {
1258 FlagsContinuation cont(kUnsignedGreaterThan, node);
1259 VisitFloat32Compare(this, node, &cont);
1260}
1261
1262
1263void InstructionSelector::VisitFloat32LessThanOrEqual(Node* node) {
1264 FlagsContinuation cont(kUnsignedGreaterThanOrEqual, node);
1265 VisitFloat32Compare(this, node, &cont);
1266}
1267
1268
1269void InstructionSelector::VisitFloat64Equal(Node* node) {
1270 FlagsContinuation cont(kUnorderedEqual, node);
1271 VisitFloat64Compare(this, node, &cont);
1272}
1273
1274
1275void InstructionSelector::VisitFloat64LessThan(Node* node) {
1276 FlagsContinuation cont(kUnsignedGreaterThan, node);
1277 VisitFloat64Compare(this, node, &cont);
1278}
1279
1280
1281void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
1282 FlagsContinuation cont(kUnsignedGreaterThanOrEqual, node);
1283 VisitFloat64Compare(this, node, &cont);
1284}
1285
1286
1287void InstructionSelector::VisitFloat64ExtractLowWord32(Node* node) {
1288 X87OperandGenerator g(this);
1289 Emit(kX87Float64ExtractLowWord32, g.DefineAsRegister(node),
1290 g.Use(node->InputAt(0)));
1291}
1292
1293
1294void InstructionSelector::VisitFloat64ExtractHighWord32(Node* node) {
1295 X87OperandGenerator g(this);
1296 Emit(kX87Float64ExtractHighWord32, g.DefineAsRegister(node),
1297 g.Use(node->InputAt(0)));
1298}
1299
1300
1301void InstructionSelector::VisitFloat64InsertLowWord32(Node* node) {
1302 X87OperandGenerator g(this);
1303 Node* left = node->InputAt(0);
1304 Node* right = node->InputAt(1);
1305 Emit(kX87Float64InsertLowWord32, g.UseFixed(node, stX_0), g.UseRegister(left),
1306 g.UseRegister(right));
1307}
1308
1309
1310void InstructionSelector::VisitFloat64InsertHighWord32(Node* node) {
1311 X87OperandGenerator g(this);
1312 Node* left = node->InputAt(0);
1313 Node* right = node->InputAt(1);
1314 Emit(kX87Float64InsertHighWord32, g.UseFixed(node, stX_0),
1315 g.UseRegister(left), g.UseRegister(right));
1316}
1317
1318
1319// static
1320MachineOperatorBuilder::Flags
1321InstructionSelector::SupportedMachineOperatorFlags() {
1322 MachineOperatorBuilder::Flags flags =
1323 MachineOperatorBuilder::kFloat32Max |
1324 MachineOperatorBuilder::kFloat32Min |
1325 MachineOperatorBuilder::kFloat64Max |
1326 MachineOperatorBuilder::kFloat64Min |
1327 MachineOperatorBuilder::kWord32ShiftIsSafe;
1328 if (CpuFeatures::IsSupported(POPCNT)) {
1329 flags |= MachineOperatorBuilder::kWord32Popcnt;
1330 }
1331
1332 flags |= MachineOperatorBuilder::kFloat32RoundDown |
1333 MachineOperatorBuilder::kFloat64RoundDown |
1334 MachineOperatorBuilder::kFloat32RoundUp |
1335 MachineOperatorBuilder::kFloat64RoundUp |
1336 MachineOperatorBuilder::kFloat32RoundTruncate |
1337 MachineOperatorBuilder::kFloat64RoundTruncate |
1338 MachineOperatorBuilder::kFloat32RoundTiesEven |
1339 MachineOperatorBuilder::kFloat64RoundTiesEven;
1340 return flags;
1341}
1342
1343} // namespace compiler
1344} // namespace internal
1345} // namespace v8