blob: 5e8e3b1d43a739512dfc76204136dfd03cc73436 [file] [log] [blame]
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/base/bits.h"
6#include "src/compiler/instruction-selector-impl.h"
7#include "src/compiler/node-matchers.h"
8
9namespace v8 {
10namespace internal {
11namespace compiler {
12
13#define TRACE_UNIMPL() \
14 PrintF("UNIMPLEMENTED instr_sel: %s at line %d\n", __FUNCTION__, __LINE__)
15
16#define TRACE() PrintF("instr_sel: %s at line %d\n", __FUNCTION__, __LINE__)
17
18
19// Adds Mips-specific methods for generating InstructionOperands.
20class MipsOperandGenerator FINAL : public OperandGenerator {
21 public:
22 explicit MipsOperandGenerator(InstructionSelector* selector)
23 : OperandGenerator(selector) {}
24
25 InstructionOperand* UseOperand(Node* node, InstructionCode opcode) {
26 if (CanBeImmediate(node, opcode)) {
27 return UseImmediate(node);
28 }
29 return UseRegister(node);
30 }
31
32 bool CanBeImmediate(Node* node, InstructionCode opcode) {
33 Int32Matcher m(node);
34 if (!m.HasValue()) return false;
35 int32_t value = m.Value();
36 switch (ArchOpcodeField::decode(opcode)) {
37 case kMipsShl:
38 case kMipsSar:
39 case kMipsShr:
40 return is_uint5(value);
41 case kMipsXor:
42 return is_uint16(value);
43 case kMipsLdc1:
44 case kMipsSdc1:
45 case kCheckedLoadFloat32:
46 case kCheckedLoadFloat64:
47 case kCheckedStoreFloat32:
48 case kCheckedStoreFloat64:
49 return is_int16(value + kIntSize);
50 default:
51 return is_int16(value);
52 }
53 }
54
55 private:
56 bool ImmediateFitsAddrMode1Instruction(int32_t imm) const {
57 TRACE_UNIMPL();
58 return false;
59 }
60};
61
62
63static void VisitRRR(InstructionSelector* selector, ArchOpcode opcode,
64 Node* node) {
65 MipsOperandGenerator g(selector);
66 selector->Emit(opcode, g.DefineAsRegister(node),
67 g.UseRegister(node->InputAt(0)),
68 g.UseRegister(node->InputAt(1)));
69}
70
71
72static void VisitRR(InstructionSelector* selector, ArchOpcode opcode,
73 Node* node) {
74 MipsOperandGenerator g(selector);
75 selector->Emit(opcode, g.DefineAsRegister(node),
76 g.UseRegister(node->InputAt(0)));
77}
78
79
80static void VisitRRO(InstructionSelector* selector, ArchOpcode opcode,
81 Node* node) {
82 MipsOperandGenerator g(selector);
83 selector->Emit(opcode, g.DefineAsRegister(node),
84 g.UseRegister(node->InputAt(0)),
85 g.UseOperand(node->InputAt(1), opcode));
86}
87
88
89static void VisitBinop(InstructionSelector* selector, Node* node,
90 InstructionCode opcode, FlagsContinuation* cont) {
91 MipsOperandGenerator g(selector);
92 Int32BinopMatcher m(node);
93 InstructionOperand* inputs[4];
94 size_t input_count = 0;
95 InstructionOperand* outputs[2];
96 size_t output_count = 0;
97
98 inputs[input_count++] = g.UseRegister(m.left().node());
99 inputs[input_count++] = g.UseOperand(m.right().node(), opcode);
100
101 if (cont->IsBranch()) {
102 inputs[input_count++] = g.Label(cont->true_block());
103 inputs[input_count++] = g.Label(cont->false_block());
104 }
105
106 outputs[output_count++] = g.DefineAsRegister(node);
107 if (cont->IsSet()) {
108 outputs[output_count++] = g.DefineAsRegister(cont->result());
109 }
110
111 DCHECK_NE(0, input_count);
112 DCHECK_NE(0, output_count);
113 DCHECK_GE(arraysize(inputs), input_count);
114 DCHECK_GE(arraysize(outputs), output_count);
115
116 Instruction* instr = selector->Emit(cont->Encode(opcode), output_count,
117 outputs, input_count, inputs);
118 if (cont->IsBranch()) instr->MarkAsControl();
119}
120
121
122static void VisitBinop(InstructionSelector* selector, Node* node,
123 InstructionCode opcode) {
124 FlagsContinuation cont;
125 VisitBinop(selector, node, opcode, &cont);
126}
127
128
129void InstructionSelector::VisitLoad(Node* node) {
130 MachineType rep = RepresentationOf(OpParameter<LoadRepresentation>(node));
131 MachineType typ = TypeOf(OpParameter<LoadRepresentation>(node));
132 MipsOperandGenerator g(this);
133 Node* base = node->InputAt(0);
134 Node* index = node->InputAt(1);
135
136 ArchOpcode opcode;
137 switch (rep) {
138 case kRepFloat32:
139 opcode = kMipsLwc1;
140 break;
141 case kRepFloat64:
142 opcode = kMipsLdc1;
143 break;
144 case kRepBit: // Fall through.
145 case kRepWord8:
146 opcode = typ == kTypeUint32 ? kMipsLbu : kMipsLb;
147 break;
148 case kRepWord16:
149 opcode = typ == kTypeUint32 ? kMipsLhu : kMipsLh;
150 break;
151 case kRepTagged: // Fall through.
152 case kRepWord32:
153 opcode = kMipsLw;
154 break;
155 default:
156 UNREACHABLE();
157 return;
158 }
159
160 if (g.CanBeImmediate(index, opcode)) {
161 Emit(opcode | AddressingModeField::encode(kMode_MRI),
162 g.DefineAsRegister(node), g.UseRegister(base), g.UseImmediate(index));
163 } else {
164 InstructionOperand* addr_reg = g.TempRegister();
165 Emit(kMipsAdd | AddressingModeField::encode(kMode_None), addr_reg,
166 g.UseRegister(index), g.UseRegister(base));
167 // Emit desired load opcode, using temp addr_reg.
168 Emit(opcode | AddressingModeField::encode(kMode_MRI),
169 g.DefineAsRegister(node), addr_reg, g.TempImmediate(0));
170 }
171}
172
173
174void InstructionSelector::VisitStore(Node* node) {
175 MipsOperandGenerator g(this);
176 Node* base = node->InputAt(0);
177 Node* index = node->InputAt(1);
178 Node* value = node->InputAt(2);
179
180 StoreRepresentation store_rep = OpParameter<StoreRepresentation>(node);
181 MachineType rep = RepresentationOf(store_rep.machine_type());
182 if (store_rep.write_barrier_kind() == kFullWriteBarrier) {
183 DCHECK(rep == kRepTagged);
184 // TODO(dcarney): refactor RecordWrite function to take temp registers
185 // and pass them here instead of using fixed regs
186 // TODO(dcarney): handle immediate indices.
187 InstructionOperand* temps[] = {g.TempRegister(t1), g.TempRegister(t2)};
188 Emit(kMipsStoreWriteBarrier, NULL, g.UseFixed(base, t0),
189 g.UseFixed(index, t1), g.UseFixed(value, t2), arraysize(temps), temps);
190 return;
191 }
192 DCHECK_EQ(kNoWriteBarrier, store_rep.write_barrier_kind());
193
194 ArchOpcode opcode;
195 switch (rep) {
196 case kRepFloat32:
197 opcode = kMipsSwc1;
198 break;
199 case kRepFloat64:
200 opcode = kMipsSdc1;
201 break;
202 case kRepBit: // Fall through.
203 case kRepWord8:
204 opcode = kMipsSb;
205 break;
206 case kRepWord16:
207 opcode = kMipsSh;
208 break;
209 case kRepTagged: // Fall through.
210 case kRepWord32:
211 opcode = kMipsSw;
212 break;
213 default:
214 UNREACHABLE();
215 return;
216 }
217
218 if (g.CanBeImmediate(index, opcode)) {
219 Emit(opcode | AddressingModeField::encode(kMode_MRI), NULL,
220 g.UseRegister(base), g.UseImmediate(index), g.UseRegister(value));
221 } else {
222 InstructionOperand* addr_reg = g.TempRegister();
223 Emit(kMipsAdd | AddressingModeField::encode(kMode_None), addr_reg,
224 g.UseRegister(index), g.UseRegister(base));
225 // Emit desired store opcode, using temp addr_reg.
226 Emit(opcode | AddressingModeField::encode(kMode_MRI), NULL, addr_reg,
227 g.TempImmediate(0), g.UseRegister(value));
228 }
229}
230
231
232void InstructionSelector::VisitWord32And(Node* node) {
233 VisitBinop(this, node, kMipsAnd);
234}
235
236
237void InstructionSelector::VisitWord32Or(Node* node) {
238 VisitBinop(this, node, kMipsOr);
239}
240
241
242void InstructionSelector::VisitWord32Xor(Node* node) {
243 VisitBinop(this, node, kMipsXor);
244}
245
246
247void InstructionSelector::VisitWord32Shl(Node* node) {
248 VisitRRO(this, kMipsShl, node);
249}
250
251
252void InstructionSelector::VisitWord32Shr(Node* node) {
253 VisitRRO(this, kMipsShr, node);
254}
255
256
257void InstructionSelector::VisitWord32Sar(Node* node) {
258 VisitRRO(this, kMipsSar, node);
259}
260
261
262void InstructionSelector::VisitWord32Ror(Node* node) {
263 VisitRRO(this, kMipsRor, node);
264}
265
266
267void InstructionSelector::VisitInt32Add(Node* node) {
268 MipsOperandGenerator g(this);
269
270 // TODO(plind): Consider multiply & add optimization from arm port.
271 VisitBinop(this, node, kMipsAdd);
272}
273
274
275void InstructionSelector::VisitInt32Sub(Node* node) {
276 VisitBinop(this, node, kMipsSub);
277}
278
279
280void InstructionSelector::VisitInt32Mul(Node* node) {
281 MipsOperandGenerator g(this);
282 Int32BinopMatcher m(node);
283 if (m.right().HasValue() && m.right().Value() > 0) {
284 int32_t value = m.right().Value();
285 if (base::bits::IsPowerOfTwo32(value)) {
286 Emit(kMipsShl | AddressingModeField::encode(kMode_None),
287 g.DefineAsRegister(node), g.UseRegister(m.left().node()),
288 g.TempImmediate(WhichPowerOf2(value)));
289 return;
290 }
291 if (base::bits::IsPowerOfTwo32(value - 1)) {
292 InstructionOperand* temp = g.TempRegister();
293 Emit(kMipsShl | AddressingModeField::encode(kMode_None), temp,
294 g.UseRegister(m.left().node()),
295 g.TempImmediate(WhichPowerOf2(value - 1)));
296 Emit(kMipsAdd | AddressingModeField::encode(kMode_None),
297 g.DefineAsRegister(node), g.UseRegister(m.left().node()), temp);
298 return;
299 }
300 if (base::bits::IsPowerOfTwo32(value + 1)) {
301 InstructionOperand* temp = g.TempRegister();
302 Emit(kMipsShl | AddressingModeField::encode(kMode_None), temp,
303 g.UseRegister(m.left().node()),
304 g.TempImmediate(WhichPowerOf2(value + 1)));
305 Emit(kMipsSub | AddressingModeField::encode(kMode_None),
306 g.DefineAsRegister(node), temp, g.UseRegister(m.left().node()));
307 return;
308 }
309 }
310 Emit(kMipsMul, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
311 g.UseRegister(m.right().node()));
312}
313
314
315void InstructionSelector::VisitInt32MulHigh(Node* node) {
316 MipsOperandGenerator g(this);
317 Emit(kMipsMulHigh, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)),
318 g.UseRegister(node->InputAt(1)));
319}
320
321
322void InstructionSelector::VisitUint32MulHigh(Node* node) {
323 MipsOperandGenerator g(this);
324 Emit(kMipsMulHighU, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)),
325 g.UseRegister(node->InputAt(1)));
326}
327
328
329void InstructionSelector::VisitInt32Div(Node* node) {
330 MipsOperandGenerator g(this);
331 Int32BinopMatcher m(node);
332 Emit(kMipsDiv, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
333 g.UseRegister(m.right().node()));
334}
335
336
337void InstructionSelector::VisitUint32Div(Node* node) {
338 MipsOperandGenerator g(this);
339 Int32BinopMatcher m(node);
340 Emit(kMipsDivU, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
341 g.UseRegister(m.right().node()));
342}
343
344
345void InstructionSelector::VisitInt32Mod(Node* node) {
346 MipsOperandGenerator g(this);
347 Int32BinopMatcher m(node);
348 Emit(kMipsMod, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
349 g.UseRegister(m.right().node()));
350}
351
352
353void InstructionSelector::VisitUint32Mod(Node* node) {
354 MipsOperandGenerator g(this);
355 Int32BinopMatcher m(node);
356 Emit(kMipsModU, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
357 g.UseRegister(m.right().node()));
358}
359
360
361void InstructionSelector::VisitChangeFloat32ToFloat64(Node* node) {
362 MipsOperandGenerator g(this);
363 Emit(kMipsCvtDS, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
364}
365
366
367void InstructionSelector::VisitChangeInt32ToFloat64(Node* node) {
368 MipsOperandGenerator g(this);
369 Emit(kMipsCvtDW, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
370}
371
372
373void InstructionSelector::VisitChangeUint32ToFloat64(Node* node) {
374 MipsOperandGenerator g(this);
375 Emit(kMipsCvtDUw, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
376}
377
378
379void InstructionSelector::VisitChangeFloat64ToInt32(Node* node) {
380 MipsOperandGenerator g(this);
381 Emit(kMipsTruncWD, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
382}
383
384
385void InstructionSelector::VisitChangeFloat64ToUint32(Node* node) {
386 MipsOperandGenerator g(this);
387 Emit(kMipsTruncUwD, g.DefineAsRegister(node),
388 g.UseRegister(node->InputAt(0)));
389}
390
391
392void InstructionSelector::VisitTruncateFloat64ToFloat32(Node* node) {
393 MipsOperandGenerator g(this);
394 Emit(kMipsCvtSD, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
395}
396
397
398void InstructionSelector::VisitFloat64Add(Node* node) {
399 VisitRRR(this, kMipsAddD, node);
400}
401
402
403void InstructionSelector::VisitFloat64Sub(Node* node) {
404 VisitRRR(this, kMipsSubD, node);
405}
406
407
408void InstructionSelector::VisitFloat64Mul(Node* node) {
409 VisitRRR(this, kMipsMulD, node);
410}
411
412
413void InstructionSelector::VisitFloat64Div(Node* node) {
414 VisitRRR(this, kMipsDivD, node);
415}
416
417
418void InstructionSelector::VisitFloat64Mod(Node* node) {
419 MipsOperandGenerator g(this);
420 Emit(kMipsModD, g.DefineAsFixed(node, f0), g.UseFixed(node->InputAt(0), f12),
421 g.UseFixed(node->InputAt(1), f14))->MarkAsCall();
422}
423
424
425void InstructionSelector::VisitFloat64Sqrt(Node* node) {
426 MipsOperandGenerator g(this);
427 Emit(kMipsSqrtD, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
428}
429
430
431void InstructionSelector::VisitFloat64Floor(Node* node) {
432 VisitRR(this, kMipsFloat64Floor, node);
433}
434
435
436void InstructionSelector::VisitFloat64Ceil(Node* node) {
437 VisitRR(this, kMipsFloat64Ceil, node);
438}
439
440
441void InstructionSelector::VisitFloat64RoundTruncate(Node* node) {
442 VisitRR(this, kMipsFloat64RoundTruncate, node);
443}
444
445
446void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
447 UNREACHABLE();
448}
449
450
451void InstructionSelector::VisitCall(Node* node) {
452 MipsOperandGenerator g(this);
453 const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node);
454
455 FrameStateDescriptor* frame_state_descriptor = NULL;
456 if (descriptor->NeedsFrameState()) {
457 frame_state_descriptor =
458 GetFrameStateDescriptor(node->InputAt(descriptor->InputCount()));
459 }
460
461 CallBuffer buffer(zone(), descriptor, frame_state_descriptor);
462
463 // Compute InstructionOperands for inputs and outputs.
464 InitializeCallBuffer(node, &buffer, true, false);
465 // Possibly align stack here for functions.
466 int push_count = buffer.pushed_nodes.size();
467 if (push_count > 0) {
468 Emit(kMipsStackClaim | MiscField::encode(push_count), NULL);
469 }
470 int slot = buffer.pushed_nodes.size() - 1;
471 for (NodeVectorRIter input = buffer.pushed_nodes.rbegin();
472 input != buffer.pushed_nodes.rend(); input++) {
473 Emit(kMipsStoreToStackSlot | MiscField::encode(slot), NULL,
474 g.UseRegister(*input));
475 slot--;
476 }
477
478 // Select the appropriate opcode based on the call type.
479 InstructionCode opcode;
480 switch (descriptor->kind()) {
481 case CallDescriptor::kCallCodeObject: {
482 opcode = kArchCallCodeObject;
483 break;
484 }
485 case CallDescriptor::kCallJSFunction:
486 opcode = kArchCallJSFunction;
487 break;
488 default:
489 UNREACHABLE();
490 return;
491 }
492 opcode |= MiscField::encode(descriptor->flags());
493
494 // Emit the call instruction.
495 InstructionOperand** first_output =
496 buffer.outputs.size() > 0 ? &buffer.outputs.front() : NULL;
497 Instruction* call_instr =
498 Emit(opcode, buffer.outputs.size(), first_output,
499 buffer.instruction_args.size(), &buffer.instruction_args.front());
500 call_instr->MarkAsCall();
501}
502
503
504void InstructionSelector::VisitCheckedLoad(Node* node) {
505 MachineType rep = RepresentationOf(OpParameter<MachineType>(node));
506 MachineType typ = TypeOf(OpParameter<MachineType>(node));
507 MipsOperandGenerator g(this);
508 Node* const buffer = node->InputAt(0);
509 Node* const offset = node->InputAt(1);
510 Node* const length = node->InputAt(2);
511 ArchOpcode opcode;
512 switch (rep) {
513 case kRepWord8:
514 opcode = typ == kTypeInt32 ? kCheckedLoadInt8 : kCheckedLoadUint8;
515 break;
516 case kRepWord16:
517 opcode = typ == kTypeInt32 ? kCheckedLoadInt16 : kCheckedLoadUint16;
518 break;
519 case kRepWord32:
520 opcode = kCheckedLoadWord32;
521 break;
522 case kRepFloat32:
523 opcode = kCheckedLoadFloat32;
524 break;
525 case kRepFloat64:
526 opcode = kCheckedLoadFloat64;
527 break;
528 default:
529 UNREACHABLE();
530 return;
531 }
532 InstructionOperand* offset_operand = g.CanBeImmediate(offset, opcode)
533 ? g.UseImmediate(offset)
534 : g.UseRegister(offset);
535
536 InstructionOperand* length_operand =
537 (!g.CanBeImmediate(offset, opcode)) ? g.CanBeImmediate(length, opcode)
538 ? g.UseImmediate(length)
539 : g.UseRegister(length)
540 : g.UseRegister(length);
541
542 Emit(opcode | AddressingModeField::encode(kMode_MRI),
543 g.DefineAsRegister(node), offset_operand, length_operand,
544 g.UseRegister(buffer));
545}
546
547
548void InstructionSelector::VisitCheckedStore(Node* node) {
549 MachineType rep = RepresentationOf(OpParameter<MachineType>(node));
550 MipsOperandGenerator g(this);
551 Node* const buffer = node->InputAt(0);
552 Node* const offset = node->InputAt(1);
553 Node* const length = node->InputAt(2);
554 Node* const value = node->InputAt(3);
555 ArchOpcode opcode;
556 switch (rep) {
557 case kRepWord8:
558 opcode = kCheckedStoreWord8;
559 break;
560 case kRepWord16:
561 opcode = kCheckedStoreWord16;
562 break;
563 case kRepWord32:
564 opcode = kCheckedStoreWord32;
565 break;
566 case kRepFloat32:
567 opcode = kCheckedStoreFloat32;
568 break;
569 case kRepFloat64:
570 opcode = kCheckedStoreFloat64;
571 break;
572 default:
573 UNREACHABLE();
574 return;
575 }
576 InstructionOperand* offset_operand = g.CanBeImmediate(offset, opcode)
577 ? g.UseImmediate(offset)
578 : g.UseRegister(offset);
579
580 InstructionOperand* length_operand =
581 (!g.CanBeImmediate(offset, opcode)) ? g.CanBeImmediate(length, opcode)
582 ? g.UseImmediate(length)
583 : g.UseRegister(length)
584 : g.UseRegister(length);
585
586 Emit(opcode | AddressingModeField::encode(kMode_MRI), nullptr, offset_operand,
587 length_operand, g.UseRegister(value), g.UseRegister(buffer));
588}
589
590
591namespace {
592
593// Shared routine for multiple compare operations.
594static void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
595 InstructionOperand* left, InstructionOperand* right,
596 FlagsContinuation* cont) {
597 MipsOperandGenerator g(selector);
598 opcode = cont->Encode(opcode);
599 if (cont->IsBranch()) {
600 selector->Emit(opcode, NULL, left, right, g.Label(cont->true_block()),
601 g.Label(cont->false_block()))->MarkAsControl();
602 } else {
603 DCHECK(cont->IsSet());
604 // TODO(plind): Revisit and test this path.
605 selector->Emit(opcode, g.DefineAsRegister(cont->result()), left, right);
606 }
607}
608
609
610// Shared routine for multiple float compare operations.
611void VisitFloat64Compare(InstructionSelector* selector, Node* node,
612 FlagsContinuation* cont) {
613 MipsOperandGenerator g(selector);
614 Node* left = node->InputAt(0);
615 Node* right = node->InputAt(1);
616 VisitCompare(selector, kMipsCmpD, g.UseRegister(left), g.UseRegister(right),
617 cont);
618}
619
620
621// Shared routine for multiple word compare operations.
622void VisitWordCompare(InstructionSelector* selector, Node* node,
623 InstructionCode opcode, FlagsContinuation* cont,
624 bool commutative) {
625 MipsOperandGenerator g(selector);
626 Node* left = node->InputAt(0);
627 Node* right = node->InputAt(1);
628
629 // Match immediates on left or right side of comparison.
630 if (g.CanBeImmediate(right, opcode)) {
631 VisitCompare(selector, opcode, g.UseRegister(left), g.UseImmediate(right),
632 cont);
633 } else if (g.CanBeImmediate(left, opcode)) {
634 if (!commutative) cont->Commute();
635 VisitCompare(selector, opcode, g.UseRegister(right), g.UseImmediate(left),
636 cont);
637 } else {
638 VisitCompare(selector, opcode, g.UseRegister(left), g.UseRegister(right),
639 cont);
640 }
641}
642
643
644void VisitWordCompare(InstructionSelector* selector, Node* node,
645 FlagsContinuation* cont) {
646 VisitWordCompare(selector, node, kMipsCmp, cont, false);
647}
648
649} // namespace
650
651
652// Shared routine for word comparisons against zero.
653void VisitWordCompareZero(InstructionSelector* selector, Node* user,
654 Node* value, FlagsContinuation* cont) {
655 while (selector->CanCover(user, value)) {
656 switch (value->opcode()) {
657 case IrOpcode::kWord32Equal: {
658 // Combine with comparisons against 0 by simply inverting the
659 // continuation.
660 Int32BinopMatcher m(value);
661 if (m.right().Is(0)) {
662 user = value;
663 value = m.left().node();
664 cont->Negate();
665 continue;
666 }
667 cont->OverwriteAndNegateIfEqual(kEqual);
668 return VisitWordCompare(selector, value, cont);
669 }
670 case IrOpcode::kInt32LessThan:
671 cont->OverwriteAndNegateIfEqual(kSignedLessThan);
672 return VisitWordCompare(selector, value, cont);
673 case IrOpcode::kInt32LessThanOrEqual:
674 cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
675 return VisitWordCompare(selector, value, cont);
676 case IrOpcode::kUint32LessThan:
677 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
678 return VisitWordCompare(selector, value, cont);
679 case IrOpcode::kUint32LessThanOrEqual:
680 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
681 return VisitWordCompare(selector, value, cont);
682 case IrOpcode::kFloat64Equal:
683 cont->OverwriteAndNegateIfEqual(kUnorderedEqual);
684 return VisitFloat64Compare(selector, value, cont);
685 case IrOpcode::kFloat64LessThan:
686 cont->OverwriteAndNegateIfEqual(kUnorderedLessThan);
687 return VisitFloat64Compare(selector, value, cont);
688 case IrOpcode::kFloat64LessThanOrEqual:
689 cont->OverwriteAndNegateIfEqual(kUnorderedLessThanOrEqual);
690 return VisitFloat64Compare(selector, value, cont);
691 case IrOpcode::kProjection:
692 // Check if this is the overflow output projection of an
693 // <Operation>WithOverflow node.
694 if (OpParameter<size_t>(value) == 1u) {
695 // We cannot combine the <Operation>WithOverflow with this branch
696 // unless the 0th projection (the use of the actual value of the
697 // <Operation> is either NULL, which means there's no use of the
698 // actual value, or was already defined, which means it is scheduled
699 // *AFTER* this branch).
700 Node* const node = value->InputAt(0);
701 Node* const result = node->FindProjection(0);
702 if (!result || selector->IsDefined(result)) {
703 switch (node->opcode()) {
704 case IrOpcode::kInt32AddWithOverflow:
705 cont->OverwriteAndNegateIfEqual(kOverflow);
706 return VisitBinop(selector, node, kMipsAddOvf, cont);
707 case IrOpcode::kInt32SubWithOverflow:
708 cont->OverwriteAndNegateIfEqual(kOverflow);
709 return VisitBinop(selector, node, kMipsSubOvf, cont);
710 default:
711 break;
712 }
713 }
714 }
715 break;
716 case IrOpcode::kWord32And:
717 return VisitWordCompare(selector, value, kMipsTst, cont, true);
718 default:
719 break;
720 }
721 break;
722 }
723
724 // Continuation could not be combined with a compare, emit compare against 0.
725 MipsOperandGenerator g(selector);
726 InstructionCode const opcode = cont->Encode(kMipsCmp);
727 InstructionOperand* const value_operand = g.UseRegister(value);
728 if (cont->IsBranch()) {
729 selector->Emit(opcode, nullptr, value_operand, g.TempImmediate(0),
730 g.Label(cont->true_block()),
731 g.Label(cont->false_block()))->MarkAsControl();
732 } else {
733 selector->Emit(opcode, g.DefineAsRegister(cont->result()), value_operand,
734 g.TempImmediate(0));
735 }
736}
737
738
739void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
740 BasicBlock* fbranch) {
741 FlagsContinuation cont(kNotEqual, tbranch, fbranch);
742 VisitWordCompareZero(this, branch, branch->InputAt(0), &cont);
743}
744
745
746void InstructionSelector::VisitWord32Equal(Node* const node) {
747 FlagsContinuation cont(kEqual, node);
748 Int32BinopMatcher m(node);
749 if (m.right().Is(0)) {
750 return VisitWordCompareZero(this, m.node(), m.left().node(), &cont);
751 }
752 VisitWordCompare(this, node, &cont);
753}
754
755
756void InstructionSelector::VisitInt32LessThan(Node* node) {
757 FlagsContinuation cont(kSignedLessThan, node);
758 VisitWordCompare(this, node, &cont);
759}
760
761
762void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
763 FlagsContinuation cont(kSignedLessThanOrEqual, node);
764 VisitWordCompare(this, node, &cont);
765}
766
767
768void InstructionSelector::VisitUint32LessThan(Node* node) {
769 FlagsContinuation cont(kUnsignedLessThan, node);
770 VisitWordCompare(this, node, &cont);
771}
772
773
774void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
775 FlagsContinuation cont(kUnsignedLessThanOrEqual, node);
776 VisitWordCompare(this, node, &cont);
777}
778
779
780void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
781 if (Node* ovf = node->FindProjection(1)) {
782 FlagsContinuation cont(kOverflow, ovf);
783 return VisitBinop(this, node, kMipsAddOvf, &cont);
784 }
785 FlagsContinuation cont;
786 VisitBinop(this, node, kMipsAddOvf, &cont);
787}
788
789
790void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
791 if (Node* ovf = node->FindProjection(1)) {
792 FlagsContinuation cont(kOverflow, ovf);
793 return VisitBinop(this, node, kMipsSubOvf, &cont);
794 }
795 FlagsContinuation cont;
796 VisitBinop(this, node, kMipsSubOvf, &cont);
797}
798
799
800void InstructionSelector::VisitFloat64Equal(Node* node) {
801 FlagsContinuation cont(kUnorderedEqual, node);
802 VisitFloat64Compare(this, node, &cont);
803}
804
805
806void InstructionSelector::VisitFloat64LessThan(Node* node) {
807 FlagsContinuation cont(kUnorderedLessThan, node);
808 VisitFloat64Compare(this, node, &cont);
809}
810
811
812void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
813 FlagsContinuation cont(kUnorderedLessThanOrEqual, node);
814 VisitFloat64Compare(this, node, &cont);
815}
816
817
818// static
819MachineOperatorBuilder::Flags
820InstructionSelector::SupportedMachineOperatorFlags() {
821 if (IsMipsArchVariant(kMips32r2) || IsMipsArchVariant(kMips32r6)) {
822 return MachineOperatorBuilder::kFloat64Floor |
823 MachineOperatorBuilder::kFloat64Ceil |
824 MachineOperatorBuilder::kFloat64RoundTruncate;
825 }
826 return MachineOperatorBuilder::kNoFlags;
827}
828
829} // namespace compiler
830} // namespace internal
831} // namespace v8