blob: 35ad16ba23b51c58ee299bc2bfe29fc85673eefe [file] [log] [blame]
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/base/bits.h"
6#include "src/compiler/instruction-selector-impl.h"
7#include "src/compiler/node-matchers.h"
8
9namespace v8 {
10namespace internal {
11namespace compiler {
12
13#define TRACE_UNIMPL() \
14 PrintF("UNIMPLEMENTED instr_sel: %s at line %d\n", __FUNCTION__, __LINE__)
15
16#define TRACE() PrintF("instr_sel: %s at line %d\n", __FUNCTION__, __LINE__)
17
18
19// Adds Mips-specific methods for generating InstructionOperands.
20class Mips64OperandGenerator FINAL : public OperandGenerator {
21 public:
22 explicit Mips64OperandGenerator(InstructionSelector* selector)
23 : OperandGenerator(selector) {}
24
25 InstructionOperand* UseOperand(Node* node, InstructionCode opcode) {
26 if (CanBeImmediate(node, opcode)) {
27 return UseImmediate(node);
28 }
29 return UseRegister(node);
30 }
31
32 bool CanBeImmediate(Node* node, InstructionCode opcode) {
33 int64_t value;
34 if (node->opcode() == IrOpcode::kInt32Constant)
35 value = OpParameter<int32_t>(node);
36 else if (node->opcode() == IrOpcode::kInt64Constant)
37 value = OpParameter<int64_t>(node);
38 else
39 return false;
40 switch (ArchOpcodeField::decode(opcode)) {
41 case kMips64Shl:
42 case kMips64Sar:
43 case kMips64Shr:
44 return is_uint5(value);
45 case kMips64Dshl:
46 case kMips64Dsar:
47 case kMips64Dshr:
48 return is_uint6(value);
49 case kMips64Xor:
50 return is_uint16(value);
51 case kMips64Ldc1:
52 case kMips64Sdc1:
53 return is_int16(value + kIntSize);
54 default:
55 return is_int16(value);
56 }
57 }
58
59
60 bool CanBeImmediate(Node* node, InstructionCode opcode,
61 FlagsContinuation* cont) {
62 int64_t value;
63 if (node->opcode() == IrOpcode::kInt32Constant)
64 value = OpParameter<int32_t>(node);
65 else if (node->opcode() == IrOpcode::kInt64Constant)
66 value = OpParameter<int64_t>(node);
67 else
68 return false;
69 switch (ArchOpcodeField::decode(opcode)) {
70 case kMips64Cmp32:
71 switch (cont->condition()) {
72 case kUnsignedLessThan:
73 case kUnsignedGreaterThanOrEqual:
74 case kUnsignedLessThanOrEqual:
75 case kUnsignedGreaterThan:
76 // Immediate operands for unsigned 32-bit compare operations
77 // should not be sign-extended.
78 return is_uint15(value);
79 default:
80 return false;
81 }
82 default:
83 return is_int16(value);
84 }
85 }
86
87
88 private:
89 bool ImmediateFitsAddrMode1Instruction(int32_t imm) const {
90 TRACE_UNIMPL();
91 return false;
92 }
93};
94
95
96static void VisitRR(InstructionSelector* selector, ArchOpcode opcode,
97 Node* node) {
98 Mips64OperandGenerator g(selector);
99 selector->Emit(opcode, g.DefineAsRegister(node),
100 g.UseRegister(node->InputAt(0)));
101}
102
103
104static void VisitRRR(InstructionSelector* selector, ArchOpcode opcode,
105 Node* node) {
106 Mips64OperandGenerator g(selector);
107 selector->Emit(opcode, g.DefineAsRegister(node),
108 g.UseRegister(node->InputAt(0)),
109 g.UseRegister(node->InputAt(1)));
110}
111
112
113static void VisitRRO(InstructionSelector* selector, ArchOpcode opcode,
114 Node* node) {
115 Mips64OperandGenerator g(selector);
116 selector->Emit(opcode, g.DefineAsRegister(node),
117 g.UseRegister(node->InputAt(0)),
118 g.UseOperand(node->InputAt(1), opcode));
119}
120
121
122static void VisitBinop(InstructionSelector* selector, Node* node,
123 InstructionCode opcode, FlagsContinuation* cont) {
124 Mips64OperandGenerator g(selector);
125 Int32BinopMatcher m(node);
126 InstructionOperand* inputs[4];
127 size_t input_count = 0;
128 InstructionOperand* outputs[2];
129 size_t output_count = 0;
130
131 inputs[input_count++] = g.UseRegister(m.left().node());
132 inputs[input_count++] = g.UseOperand(m.right().node(), opcode);
133
134 if (cont->IsBranch()) {
135 inputs[input_count++] = g.Label(cont->true_block());
136 inputs[input_count++] = g.Label(cont->false_block());
137 }
138
139 outputs[output_count++] = g.DefineAsRegister(node);
140 if (cont->IsSet()) {
141 outputs[output_count++] = g.DefineAsRegister(cont->result());
142 }
143
144 DCHECK_NE(0, input_count);
145 DCHECK_NE(0, output_count);
146 DCHECK_GE(arraysize(inputs), input_count);
147 DCHECK_GE(arraysize(outputs), output_count);
148
149 Instruction* instr = selector->Emit(cont->Encode(opcode), output_count,
150 outputs, input_count, inputs);
151 if (cont->IsBranch()) instr->MarkAsControl();
152}
153
154
155static void VisitBinop(InstructionSelector* selector, Node* node,
156 InstructionCode opcode) {
157 FlagsContinuation cont;
158 VisitBinop(selector, node, opcode, &cont);
159}
160
161
162void InstructionSelector::VisitLoad(Node* node) {
163 MachineType rep = RepresentationOf(OpParameter<LoadRepresentation>(node));
164 MachineType typ = TypeOf(OpParameter<LoadRepresentation>(node));
165 Mips64OperandGenerator g(this);
166 Node* base = node->InputAt(0);
167 Node* index = node->InputAt(1);
168
169 ArchOpcode opcode;
170 switch (rep) {
171 case kRepFloat32:
172 opcode = kMips64Lwc1;
173 break;
174 case kRepFloat64:
175 opcode = kMips64Ldc1;
176 break;
177 case kRepBit: // Fall through.
178 case kRepWord8:
179 opcode = typ == kTypeUint32 ? kMips64Lbu : kMips64Lb;
180 break;
181 case kRepWord16:
182 opcode = typ == kTypeUint32 ? kMips64Lhu : kMips64Lh;
183 break;
184 case kRepWord32:
185 opcode = kMips64Lw;
186 break;
187 case kRepTagged: // Fall through.
188 case kRepWord64:
189 opcode = kMips64Ld;
190 break;
191 default:
192 UNREACHABLE();
193 return;
194 }
195
196 if (g.CanBeImmediate(index, opcode)) {
197 Emit(opcode | AddressingModeField::encode(kMode_MRI),
198 g.DefineAsRegister(node), g.UseRegister(base), g.UseImmediate(index));
199 } else {
200 InstructionOperand* addr_reg = g.TempRegister();
201 Emit(kMips64Dadd | AddressingModeField::encode(kMode_None), addr_reg,
202 g.UseRegister(index), g.UseRegister(base));
203 // Emit desired load opcode, using temp addr_reg.
204 Emit(opcode | AddressingModeField::encode(kMode_MRI),
205 g.DefineAsRegister(node), addr_reg, g.TempImmediate(0));
206 }
207}
208
209
210void InstructionSelector::VisitStore(Node* node) {
211 Mips64OperandGenerator g(this);
212 Node* base = node->InputAt(0);
213 Node* index = node->InputAt(1);
214 Node* value = node->InputAt(2);
215
216 StoreRepresentation store_rep = OpParameter<StoreRepresentation>(node);
217 MachineType rep = RepresentationOf(store_rep.machine_type());
218 if (store_rep.write_barrier_kind() == kFullWriteBarrier) {
219 DCHECK(rep == kRepTagged);
220 // TODO(dcarney): refactor RecordWrite function to take temp registers
221 // and pass them here instead of using fixed regs
222 // TODO(dcarney): handle immediate indices.
223 InstructionOperand* temps[] = {g.TempRegister(t1), g.TempRegister(t2)};
224 Emit(kMips64StoreWriteBarrier, NULL, g.UseFixed(base, t0),
225 g.UseFixed(index, t1), g.UseFixed(value, t2), arraysize(temps), temps);
226 return;
227 }
228 DCHECK_EQ(kNoWriteBarrier, store_rep.write_barrier_kind());
229
230 ArchOpcode opcode;
231 switch (rep) {
232 case kRepFloat32:
233 opcode = kMips64Swc1;
234 break;
235 case kRepFloat64:
236 opcode = kMips64Sdc1;
237 break;
238 case kRepBit: // Fall through.
239 case kRepWord8:
240 opcode = kMips64Sb;
241 break;
242 case kRepWord16:
243 opcode = kMips64Sh;
244 break;
245 case kRepWord32:
246 opcode = kMips64Sw;
247 break;
248 case kRepTagged: // Fall through.
249 case kRepWord64:
250 opcode = kMips64Sd;
251 break;
252 default:
253 UNREACHABLE();
254 return;
255 }
256
257 if (g.CanBeImmediate(index, opcode)) {
258 Emit(opcode | AddressingModeField::encode(kMode_MRI), NULL,
259 g.UseRegister(base), g.UseImmediate(index), g.UseRegister(value));
260 } else {
261 InstructionOperand* addr_reg = g.TempRegister();
262 Emit(kMips64Dadd | AddressingModeField::encode(kMode_None), addr_reg,
263 g.UseRegister(index), g.UseRegister(base));
264 // Emit desired store opcode, using temp addr_reg.
265 Emit(opcode | AddressingModeField::encode(kMode_MRI), NULL, addr_reg,
266 g.TempImmediate(0), g.UseRegister(value));
267 }
268}
269
270
271void InstructionSelector::VisitWord32And(Node* node) {
272 VisitBinop(this, node, kMips64And);
273}
274
275
276void InstructionSelector::VisitWord64And(Node* node) {
277 VisitBinop(this, node, kMips64And);
278}
279
280
281void InstructionSelector::VisitWord32Or(Node* node) {
282 VisitBinop(this, node, kMips64Or);
283}
284
285
286void InstructionSelector::VisitWord64Or(Node* node) {
287 VisitBinop(this, node, kMips64Or);
288}
289
290
291void InstructionSelector::VisitWord32Xor(Node* node) {
292 VisitBinop(this, node, kMips64Xor);
293}
294
295
296void InstructionSelector::VisitWord64Xor(Node* node) {
297 VisitBinop(this, node, kMips64Xor);
298}
299
300
301void InstructionSelector::VisitWord32Shl(Node* node) {
302 VisitRRO(this, kMips64Shl, node);
303}
304
305
306void InstructionSelector::VisitWord32Shr(Node* node) {
307 VisitRRO(this, kMips64Shr, node);
308}
309
310
311void InstructionSelector::VisitWord32Sar(Node* node) {
312 VisitRRO(this, kMips64Sar, node);
313}
314
315
316void InstructionSelector::VisitWord64Shl(Node* node) {
317 VisitRRO(this, kMips64Dshl, node);
318}
319
320
321void InstructionSelector::VisitWord64Shr(Node* node) {
322 VisitRRO(this, kMips64Dshr, node);
323}
324
325
326void InstructionSelector::VisitWord64Sar(Node* node) {
327 VisitRRO(this, kMips64Dsar, node);
328}
329
330
331void InstructionSelector::VisitWord32Ror(Node* node) {
332 VisitRRO(this, kMips64Ror, node);
333}
334
335
336void InstructionSelector::VisitWord64Ror(Node* node) {
337 VisitRRO(this, kMips64Dror, node);
338}
339
340
341void InstructionSelector::VisitInt32Add(Node* node) {
342 Mips64OperandGenerator g(this);
343 // TODO(plind): Consider multiply & add optimization from arm port.
344 VisitBinop(this, node, kMips64Add);
345}
346
347
348void InstructionSelector::VisitInt64Add(Node* node) {
349 Mips64OperandGenerator g(this);
350 // TODO(plind): Consider multiply & add optimization from arm port.
351 VisitBinop(this, node, kMips64Dadd);
352}
353
354
355void InstructionSelector::VisitInt32Sub(Node* node) {
356 VisitBinop(this, node, kMips64Sub);
357}
358
359
360void InstructionSelector::VisitInt64Sub(Node* node) {
361 VisitBinop(this, node, kMips64Dsub);
362}
363
364
365void InstructionSelector::VisitInt32Mul(Node* node) {
366 Mips64OperandGenerator g(this);
367 Int32BinopMatcher m(node);
368 if (m.right().HasValue() && m.right().Value() > 0) {
369 int32_t value = m.right().Value();
370 if (base::bits::IsPowerOfTwo32(value)) {
371 Emit(kMips64Shl | AddressingModeField::encode(kMode_None),
372 g.DefineAsRegister(node), g.UseRegister(m.left().node()),
373 g.TempImmediate(WhichPowerOf2(value)));
374 return;
375 }
376 if (base::bits::IsPowerOfTwo32(value - 1)) {
377 InstructionOperand* temp = g.TempRegister();
378 Emit(kMips64Shl | AddressingModeField::encode(kMode_None), temp,
379 g.UseRegister(m.left().node()),
380 g.TempImmediate(WhichPowerOf2(value - 1)));
381 Emit(kMips64Add | AddressingModeField::encode(kMode_None),
382 g.DefineAsRegister(node), g.UseRegister(m.left().node()), temp);
383 return;
384 }
385 if (base::bits::IsPowerOfTwo32(value + 1)) {
386 InstructionOperand* temp = g.TempRegister();
387 Emit(kMips64Shl | AddressingModeField::encode(kMode_None), temp,
388 g.UseRegister(m.left().node()),
389 g.TempImmediate(WhichPowerOf2(value + 1)));
390 Emit(kMips64Sub | AddressingModeField::encode(kMode_None),
391 g.DefineAsRegister(node), temp, g.UseRegister(m.left().node()));
392 return;
393 }
394 }
395 Emit(kMips64Mul, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
396 g.UseRegister(m.right().node()));
397}
398
399
400void InstructionSelector::VisitInt32MulHigh(Node* node) {
401 Mips64OperandGenerator g(this);
402 Emit(kMips64MulHigh, g.DefineAsRegister(node),
403 g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)));
404}
405
406
407void InstructionSelector::VisitUint32MulHigh(Node* node) {
408 Mips64OperandGenerator g(this);
409 InstructionOperand* const dmul_operand = g.TempRegister();
410 Emit(kMips64MulHighU, dmul_operand, g.UseRegister(node->InputAt(0)),
411 g.UseRegister(node->InputAt(1)));
412 Emit(kMips64Ext, g.DefineAsRegister(node), dmul_operand, g.TempImmediate(0),
413 g.TempImmediate(32));
414}
415
416
417void InstructionSelector::VisitInt64Mul(Node* node) {
418 Mips64OperandGenerator g(this);
419 Int64BinopMatcher m(node);
420 // TODO(dusmil): Add optimization for shifts larger than 32.
421 if (m.right().HasValue() && m.right().Value() > 0) {
422 int64_t value = m.right().Value();
423 if (base::bits::IsPowerOfTwo32(value)) {
424 Emit(kMips64Dshl | AddressingModeField::encode(kMode_None),
425 g.DefineAsRegister(node), g.UseRegister(m.left().node()),
426 g.TempImmediate(WhichPowerOf2(value)));
427 return;
428 }
429 if (base::bits::IsPowerOfTwo32(value - 1)) {
430 InstructionOperand* temp = g.TempRegister();
431 Emit(kMips64Dshl | AddressingModeField::encode(kMode_None), temp,
432 g.UseRegister(m.left().node()),
433 g.TempImmediate(WhichPowerOf2(value - 1)));
434 Emit(kMips64Dadd | AddressingModeField::encode(kMode_None),
435 g.DefineAsRegister(node), g.UseRegister(m.left().node()), temp);
436 return;
437 }
438 if (base::bits::IsPowerOfTwo32(value + 1)) {
439 InstructionOperand* temp = g.TempRegister();
440 Emit(kMips64Dshl | AddressingModeField::encode(kMode_None), temp,
441 g.UseRegister(m.left().node()),
442 g.TempImmediate(WhichPowerOf2(value + 1)));
443 Emit(kMips64Dsub | AddressingModeField::encode(kMode_None),
444 g.DefineAsRegister(node), temp, g.UseRegister(m.left().node()));
445 return;
446 }
447 }
448 Emit(kMips64Dmul, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
449 g.UseRegister(m.right().node()));
450}
451
452
453void InstructionSelector::VisitInt32Div(Node* node) {
454 Mips64OperandGenerator g(this);
455 Int32BinopMatcher m(node);
456 Emit(kMips64Div, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
457 g.UseRegister(m.right().node()));
458}
459
460
461void InstructionSelector::VisitUint32Div(Node* node) {
462 Mips64OperandGenerator g(this);
463 Int32BinopMatcher m(node);
464 Emit(kMips64DivU, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
465 g.UseRegister(m.right().node()));
466}
467
468
469void InstructionSelector::VisitInt32Mod(Node* node) {
470 Mips64OperandGenerator g(this);
471 Int32BinopMatcher m(node);
472 Emit(kMips64Mod, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
473 g.UseRegister(m.right().node()));
474}
475
476
477void InstructionSelector::VisitUint32Mod(Node* node) {
478 Mips64OperandGenerator g(this);
479 Int32BinopMatcher m(node);
480 Emit(kMips64ModU, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
481 g.UseRegister(m.right().node()));
482}
483
484
485void InstructionSelector::VisitInt64Div(Node* node) {
486 Mips64OperandGenerator g(this);
487 Int64BinopMatcher m(node);
488 Emit(kMips64Ddiv, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
489 g.UseRegister(m.right().node()));
490}
491
492
493void InstructionSelector::VisitUint64Div(Node* node) {
494 Mips64OperandGenerator g(this);
495 Int64BinopMatcher m(node);
496 Emit(kMips64DdivU, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
497 g.UseRegister(m.right().node()));
498}
499
500
501void InstructionSelector::VisitInt64Mod(Node* node) {
502 Mips64OperandGenerator g(this);
503 Int64BinopMatcher m(node);
504 Emit(kMips64Dmod, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
505 g.UseRegister(m.right().node()));
506}
507
508
509void InstructionSelector::VisitUint64Mod(Node* node) {
510 Mips64OperandGenerator g(this);
511 Int64BinopMatcher m(node);
512 Emit(kMips64DmodU, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
513 g.UseRegister(m.right().node()));
514}
515
516
517void InstructionSelector::VisitChangeFloat32ToFloat64(Node* node) {
518 Mips64OperandGenerator g(this);
519 Emit(kMips64CvtDS, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
520}
521
522
523void InstructionSelector::VisitChangeInt32ToFloat64(Node* node) {
524 Mips64OperandGenerator g(this);
525 Emit(kMips64CvtDW, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
526}
527
528
529void InstructionSelector::VisitChangeUint32ToFloat64(Node* node) {
530 Mips64OperandGenerator g(this);
531 Emit(kMips64CvtDUw, g.DefineAsRegister(node),
532 g.UseRegister(node->InputAt(0)));
533}
534
535
536void InstructionSelector::VisitChangeFloat64ToInt32(Node* node) {
537 Mips64OperandGenerator g(this);
538 Emit(kMips64TruncWD, g.DefineAsRegister(node),
539 g.UseRegister(node->InputAt(0)));
540}
541
542
543void InstructionSelector::VisitChangeFloat64ToUint32(Node* node) {
544 Mips64OperandGenerator g(this);
545 Emit(kMips64TruncUwD, g.DefineAsRegister(node),
546 g.UseRegister(node->InputAt(0)));
547}
548
549
550void InstructionSelector::VisitChangeInt32ToInt64(Node* node) {
551 Mips64OperandGenerator g(this);
552 Emit(kMips64Shl, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)),
553 g.TempImmediate(0));
554}
555
556
557void InstructionSelector::VisitChangeUint32ToUint64(Node* node) {
558 Mips64OperandGenerator g(this);
559 Emit(kMips64Dext, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)),
560 g.TempImmediate(0), g.TempImmediate(32));
561}
562
563
564void InstructionSelector::VisitTruncateInt64ToInt32(Node* node) {
565 Mips64OperandGenerator g(this);
566 Emit(kMips64Ext, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)),
567 g.TempImmediate(0), g.TempImmediate(32));
568}
569
570
571void InstructionSelector::VisitTruncateFloat64ToFloat32(Node* node) {
572 Mips64OperandGenerator g(this);
573 Emit(kMips64CvtSD, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
574}
575
576
577void InstructionSelector::VisitFloat64Add(Node* node) {
578 VisitRRR(this, kMips64AddD, node);
579}
580
581
582void InstructionSelector::VisitFloat64Sub(Node* node) {
583 VisitRRR(this, kMips64SubD, node);
584}
585
586
587void InstructionSelector::VisitFloat64Mul(Node* node) {
588 VisitRRR(this, kMips64MulD, node);
589}
590
591
592void InstructionSelector::VisitFloat64Div(Node* node) {
593 VisitRRR(this, kMips64DivD, node);
594}
595
596
597void InstructionSelector::VisitFloat64Mod(Node* node) {
598 Mips64OperandGenerator g(this);
599 Emit(kMips64ModD, g.DefineAsFixed(node, f0),
600 g.UseFixed(node->InputAt(0), f12),
601 g.UseFixed(node->InputAt(1), f14))->MarkAsCall();
602}
603
604
605void InstructionSelector::VisitFloat64Sqrt(Node* node) {
606 Mips64OperandGenerator g(this);
607 Emit(kMips64SqrtD, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
608}
609
610
611void InstructionSelector::VisitFloat64Floor(Node* node) {
612 VisitRR(this, kMips64Float64Floor, node);
613}
614
615
616void InstructionSelector::VisitFloat64Ceil(Node* node) {
617 VisitRR(this, kMips64Float64Ceil, node);
618}
619
620
621void InstructionSelector::VisitFloat64RoundTruncate(Node* node) {
622 VisitRR(this, kMips64Float64RoundTruncate, node);
623}
624
625
626void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
627 UNREACHABLE();
628}
629
630
631void InstructionSelector::VisitCall(Node* node) {
632 Mips64OperandGenerator g(this);
633 const CallDescriptor* descriptor = OpParameter<const CallDescriptor*>(node);
634
635 FrameStateDescriptor* frame_state_descriptor = NULL;
636 if (descriptor->NeedsFrameState()) {
637 frame_state_descriptor =
638 GetFrameStateDescriptor(node->InputAt(descriptor->InputCount()));
639 }
640
641 CallBuffer buffer(zone(), descriptor, frame_state_descriptor);
642
643 // Compute InstructionOperands for inputs and outputs.
644 InitializeCallBuffer(node, &buffer, true, false);
645
646 int push_count = buffer.pushed_nodes.size();
647 if (push_count > 0) {
648 Emit(kMips64StackClaim | MiscField::encode(push_count), NULL);
649 }
650 int slot = buffer.pushed_nodes.size() - 1;
651 for (NodeVectorRIter input = buffer.pushed_nodes.rbegin();
652 input != buffer.pushed_nodes.rend(); input++) {
653 Emit(kMips64StoreToStackSlot | MiscField::encode(slot), NULL,
654 g.UseRegister(*input));
655 slot--;
656 }
657
658 // Select the appropriate opcode based on the call type.
659 InstructionCode opcode;
660 switch (descriptor->kind()) {
661 case CallDescriptor::kCallCodeObject: {
662 opcode = kArchCallCodeObject;
663 break;
664 }
665 case CallDescriptor::kCallJSFunction:
666 opcode = kArchCallJSFunction;
667 break;
668 default:
669 UNREACHABLE();
670 return;
671 }
672 opcode |= MiscField::encode(descriptor->flags());
673
674 // Emit the call instruction.
675 Instruction* call_instr =
676 Emit(opcode, buffer.outputs.size(), &buffer.outputs.front(),
677 buffer.instruction_args.size(), &buffer.instruction_args.front());
678
679 call_instr->MarkAsCall();
680}
681
682
683void InstructionSelector::VisitCheckedLoad(Node* node) {
684 MachineType rep = RepresentationOf(OpParameter<MachineType>(node));
685 MachineType typ = TypeOf(OpParameter<MachineType>(node));
686 Mips64OperandGenerator g(this);
687 Node* const buffer = node->InputAt(0);
688 Node* const offset = node->InputAt(1);
689 Node* const length = node->InputAt(2);
690 ArchOpcode opcode;
691 switch (rep) {
692 case kRepWord8:
693 opcode = typ == kTypeInt32 ? kCheckedLoadInt8 : kCheckedLoadUint8;
694 break;
695 case kRepWord16:
696 opcode = typ == kTypeInt32 ? kCheckedLoadInt16 : kCheckedLoadUint16;
697 break;
698 case kRepWord32:
699 opcode = kCheckedLoadWord32;
700 break;
701 case kRepFloat32:
702 opcode = kCheckedLoadFloat32;
703 break;
704 case kRepFloat64:
705 opcode = kCheckedLoadFloat64;
706 break;
707 default:
708 UNREACHABLE();
709 return;
710 }
711 InstructionOperand* offset_operand = g.CanBeImmediate(offset, opcode)
712 ? g.UseImmediate(offset)
713 : g.UseRegister(offset);
714
715 InstructionOperand* length_operand =
716 (!g.CanBeImmediate(offset, opcode)) ? g.CanBeImmediate(length, opcode)
717 ? g.UseImmediate(length)
718 : g.UseRegister(length)
719 : g.UseRegister(length);
720
721 Emit(opcode | AddressingModeField::encode(kMode_MRI),
722 g.DefineAsRegister(node), offset_operand, length_operand,
723 g.UseRegister(buffer));
724}
725
726
727void InstructionSelector::VisitCheckedStore(Node* node) {
728 MachineType rep = RepresentationOf(OpParameter<MachineType>(node));
729 Mips64OperandGenerator g(this);
730 Node* const buffer = node->InputAt(0);
731 Node* const offset = node->InputAt(1);
732 Node* const length = node->InputAt(2);
733 Node* const value = node->InputAt(3);
734 ArchOpcode opcode;
735 switch (rep) {
736 case kRepWord8:
737 opcode = kCheckedStoreWord8;
738 break;
739 case kRepWord16:
740 opcode = kCheckedStoreWord16;
741 break;
742 case kRepWord32:
743 opcode = kCheckedStoreWord32;
744 break;
745 case kRepFloat32:
746 opcode = kCheckedStoreFloat32;
747 break;
748 case kRepFloat64:
749 opcode = kCheckedStoreFloat64;
750 break;
751 default:
752 UNREACHABLE();
753 return;
754 }
755 InstructionOperand* offset_operand = g.CanBeImmediate(offset, opcode)
756 ? g.UseImmediate(offset)
757 : g.UseRegister(offset);
758
759 InstructionOperand* length_operand =
760 (!g.CanBeImmediate(offset, opcode)) ? g.CanBeImmediate(length, opcode)
761 ? g.UseImmediate(length)
762 : g.UseRegister(length)
763 : g.UseRegister(length);
764
765 Emit(opcode | AddressingModeField::encode(kMode_MRI), nullptr, offset_operand,
766 length_operand, g.UseRegister(value), g.UseRegister(buffer));
767}
768
769
770namespace {
771
772// Shared routine for multiple compare operations.
773static void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
774 InstructionOperand* left, InstructionOperand* right,
775 FlagsContinuation* cont) {
776 Mips64OperandGenerator g(selector);
777 opcode = cont->Encode(opcode);
778 if (cont->IsBranch()) {
779 selector->Emit(opcode, NULL, left, right, g.Label(cont->true_block()),
780 g.Label(cont->false_block()))->MarkAsControl();
781 } else {
782 DCHECK(cont->IsSet());
783 selector->Emit(opcode, g.DefineAsRegister(cont->result()), left, right);
784 }
785}
786
787
788// Shared routine for multiple float compare operations.
789void VisitFloat64Compare(InstructionSelector* selector, Node* node,
790 FlagsContinuation* cont) {
791 Mips64OperandGenerator g(selector);
792 Node* left = node->InputAt(0);
793 Node* right = node->InputAt(1);
794 VisitCompare(selector, kMips64CmpD, g.UseRegister(left), g.UseRegister(right),
795 cont);
796}
797
798
799// Shared routine for multiple word compare operations.
800void VisitWordCompare(InstructionSelector* selector, Node* node,
801 InstructionCode opcode, FlagsContinuation* cont,
802 bool commutative) {
803 Mips64OperandGenerator g(selector);
804 Node* left = node->InputAt(0);
805 Node* right = node->InputAt(1);
806
807 // Match immediates on left or right side of comparison.
808 if (g.CanBeImmediate(right, opcode, cont)) {
809 VisitCompare(selector, opcode, g.UseRegister(left), g.UseImmediate(right),
810 cont);
811 } else if (g.CanBeImmediate(left, opcode, cont)) {
812 if (!commutative) cont->Commute();
813 VisitCompare(selector, opcode, g.UseRegister(right), g.UseImmediate(left),
814 cont);
815 } else {
816 VisitCompare(selector, opcode, g.UseRegister(left), g.UseRegister(right),
817 cont);
818 }
819}
820
821
822void VisitWord32Compare(InstructionSelector* selector, Node* node,
823 FlagsContinuation* cont) {
824 VisitWordCompare(selector, node, kMips64Cmp32, cont, false);
825}
826
827
828void VisitWord64Compare(InstructionSelector* selector, Node* node,
829 FlagsContinuation* cont) {
830 VisitWordCompare(selector, node, kMips64Cmp, cont, false);
831}
832
833} // namespace
834
835
836void EmitWordCompareZero(InstructionSelector* selector, InstructionCode opcode,
837 Node* value, FlagsContinuation* cont) {
838 Mips64OperandGenerator g(selector);
839 opcode = cont->Encode(opcode);
840 InstructionOperand* const value_operand = g.UseRegister(value);
841 if (cont->IsBranch()) {
842 selector->Emit(opcode, nullptr, value_operand, g.TempImmediate(0),
843 g.Label(cont->true_block()),
844 g.Label(cont->false_block()))->MarkAsControl();
845 } else {
846 selector->Emit(opcode, g.DefineAsRegister(cont->result()), value_operand,
847 g.TempImmediate(0));
848 }
849}
850
851
852// Shared routine for word comparisons against zero.
853void VisitWordCompareZero(InstructionSelector* selector, Node* user,
854 Node* value, FlagsContinuation* cont) {
855 // Initially set comparison against 0 to be 64-bit variant for branches that
856 // cannot combine.
857 InstructionCode opcode = kMips64Cmp;
858 while (selector->CanCover(user, value)) {
859 if (user->opcode() == IrOpcode::kWord32Equal) {
860 opcode = kMips64Cmp32;
861 }
862 switch (value->opcode()) {
863 case IrOpcode::kWord32Equal: {
864 // Combine with comparisons against 0 by simply inverting the
865 // continuation.
866 Int32BinopMatcher m(value);
867 if (m.right().Is(0)) {
868 user = value;
869 value = m.left().node();
870 cont->Negate();
871 opcode = kMips64Cmp32;
872 continue;
873 }
874 cont->OverwriteAndNegateIfEqual(kEqual);
875 return VisitWord32Compare(selector, value, cont);
876 }
877 case IrOpcode::kInt32LessThan:
878 cont->OverwriteAndNegateIfEqual(kSignedLessThan);
879 return VisitWord32Compare(selector, value, cont);
880 case IrOpcode::kInt32LessThanOrEqual:
881 cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
882 return VisitWord32Compare(selector, value, cont);
883 case IrOpcode::kUint32LessThan:
884 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
885 return VisitWord32Compare(selector, value, cont);
886 case IrOpcode::kUint32LessThanOrEqual:
887 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
888 return VisitWord32Compare(selector, value, cont);
889 case IrOpcode::kWord64Equal: {
890 // Combine with comparisons against 0 by simply inverting the
891 // continuation.
892 Int64BinopMatcher m(value);
893 if (m.right().Is(0)) {
894 user = value;
895 value = m.left().node();
896 cont->Negate();
897 continue;
898 }
899 cont->OverwriteAndNegateIfEqual(kEqual);
900 return VisitWord64Compare(selector, value, cont);
901 }
902 case IrOpcode::kInt64LessThan:
903 cont->OverwriteAndNegateIfEqual(kSignedLessThan);
904 return VisitWord64Compare(selector, value, cont);
905 case IrOpcode::kInt64LessThanOrEqual:
906 cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
907 return VisitWord64Compare(selector, value, cont);
908 case IrOpcode::kUint64LessThan:
909 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
910 return VisitWord64Compare(selector, value, cont);
911 case IrOpcode::kFloat64Equal:
912 cont->OverwriteAndNegateIfEqual(kUnorderedEqual);
913 return VisitFloat64Compare(selector, value, cont);
914 case IrOpcode::kFloat64LessThan:
915 cont->OverwriteAndNegateIfEqual(kUnorderedLessThan);
916 return VisitFloat64Compare(selector, value, cont);
917 case IrOpcode::kFloat64LessThanOrEqual:
918 cont->OverwriteAndNegateIfEqual(kUnorderedLessThanOrEqual);
919 return VisitFloat64Compare(selector, value, cont);
920 case IrOpcode::kProjection:
921 // Check if this is the overflow output projection of an
922 // <Operation>WithOverflow node.
923 if (OpParameter<size_t>(value) == 1u) {
924 // We cannot combine the <Operation>WithOverflow with this branch
925 // unless the 0th projection (the use of the actual value of the
926 // <Operation> is either NULL, which means there's no use of the
927 // actual value, or was already defined, which means it is scheduled
928 // *AFTER* this branch).
929 Node* node = value->InputAt(0);
930 Node* result = node->FindProjection(0);
931 if (result == NULL || selector->IsDefined(result)) {
932 switch (node->opcode()) {
933 case IrOpcode::kInt32AddWithOverflow:
934 cont->OverwriteAndNegateIfEqual(kOverflow);
935 return VisitBinop(selector, node, kMips64Dadd, cont);
936 case IrOpcode::kInt32SubWithOverflow:
937 cont->OverwriteAndNegateIfEqual(kOverflow);
938 return VisitBinop(selector, node, kMips64Dsub, cont);
939 default:
940 break;
941 }
942 }
943 }
944 break;
945 case IrOpcode::kWord32And:
946 return VisitWordCompare(selector, value, kMips64Tst32, cont, true);
947 case IrOpcode::kWord64And:
948 return VisitWordCompare(selector, value, kMips64Tst, cont, true);
949 default:
950 break;
951 }
952 break;
953 }
954
955 // Continuation could not be combined with a compare, emit compare against 0.
956 EmitWordCompareZero(selector, opcode, value, cont);
957}
958
959
960void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
961 BasicBlock* fbranch) {
962 FlagsContinuation cont(kNotEqual, tbranch, fbranch);
963 VisitWordCompareZero(this, branch, branch->InputAt(0), &cont);
964}
965
966
967void InstructionSelector::VisitWord32Equal(Node* const node) {
968 FlagsContinuation cont(kEqual, node);
969 Int32BinopMatcher m(node);
970 if (m.right().Is(0)) {
971 return VisitWordCompareZero(this, m.node(), m.left().node(), &cont);
972 }
973
974 VisitWord32Compare(this, node, &cont);
975}
976
977
978void InstructionSelector::VisitInt32LessThan(Node* node) {
979 FlagsContinuation cont(kSignedLessThan, node);
980 VisitWord32Compare(this, node, &cont);
981}
982
983
984void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
985 FlagsContinuation cont(kSignedLessThanOrEqual, node);
986 VisitWord32Compare(this, node, &cont);
987}
988
989
990void InstructionSelector::VisitUint32LessThan(Node* node) {
991 FlagsContinuation cont(kUnsignedLessThan, node);
992 VisitWord32Compare(this, node, &cont);
993}
994
995
996void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
997 FlagsContinuation cont(kUnsignedLessThanOrEqual, node);
998 VisitWord32Compare(this, node, &cont);
999}
1000
1001
1002void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
1003 if (Node* ovf = node->FindProjection(1)) {
1004 FlagsContinuation cont(kOverflow, ovf);
1005 return VisitBinop(this, node, kMips64Dadd, &cont);
1006 }
1007 FlagsContinuation cont;
1008 VisitBinop(this, node, kMips64Dadd, &cont);
1009}
1010
1011
1012void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
1013 if (Node* ovf = node->FindProjection(1)) {
1014 FlagsContinuation cont(kOverflow, ovf);
1015 return VisitBinop(this, node, kMips64Dsub, &cont);
1016 }
1017 FlagsContinuation cont;
1018 VisitBinop(this, node, kMips64Dsub, &cont);
1019}
1020
1021
1022void InstructionSelector::VisitWord64Equal(Node* const node) {
1023 FlagsContinuation cont(kEqual, node);
1024 Int64BinopMatcher m(node);
1025 if (m.right().Is(0)) {
1026 return VisitWordCompareZero(this, m.node(), m.left().node(), &cont);
1027 }
1028
1029 VisitWord64Compare(this, node, &cont);
1030}
1031
1032
1033void InstructionSelector::VisitInt64LessThan(Node* node) {
1034 FlagsContinuation cont(kSignedLessThan, node);
1035 VisitWord64Compare(this, node, &cont);
1036}
1037
1038
1039void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) {
1040 FlagsContinuation cont(kSignedLessThanOrEqual, node);
1041 VisitWord64Compare(this, node, &cont);
1042}
1043
1044
1045void InstructionSelector::VisitUint64LessThan(Node* node) {
1046 FlagsContinuation cont(kUnsignedLessThan, node);
1047 VisitWord64Compare(this, node, &cont);
1048}
1049
1050
1051void InstructionSelector::VisitFloat64Equal(Node* node) {
1052 FlagsContinuation cont(kUnorderedEqual, node);
1053 VisitFloat64Compare(this, node, &cont);
1054}
1055
1056
1057void InstructionSelector::VisitFloat64LessThan(Node* node) {
1058 FlagsContinuation cont(kUnorderedLessThan, node);
1059 VisitFloat64Compare(this, node, &cont);
1060}
1061
1062
1063void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
1064 FlagsContinuation cont(kUnorderedLessThanOrEqual, node);
1065 VisitFloat64Compare(this, node, &cont);
1066}
1067
1068
1069// static
1070MachineOperatorBuilder::Flags
1071InstructionSelector::SupportedMachineOperatorFlags() {
1072 return MachineOperatorBuilder::kFloat64Floor |
1073 MachineOperatorBuilder::kFloat64Ceil |
1074 MachineOperatorBuilder::kFloat64RoundTruncate;
1075}
1076
1077} // namespace compiler
1078} // namespace internal
1079} // namespace v8