blob: 5e2b5f2ad88a9bc53b6d585145afad813aa22551 [file] [log] [blame]
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005#include "src/base/adapters.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -04006#include "src/base/bits.h"
7#include "src/compiler/instruction-selector-impl.h"
8#include "src/compiler/node-matchers.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009#include "src/compiler/node-properties.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040010
11namespace v8 {
12namespace internal {
13namespace compiler {
14
15#define TRACE_UNIMPL() \
16 PrintF("UNIMPLEMENTED instr_sel: %s at line %d\n", __FUNCTION__, __LINE__)
17
18#define TRACE() PrintF("instr_sel: %s at line %d\n", __FUNCTION__, __LINE__)
19
20
21// Adds Mips-specific methods for generating InstructionOperands.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000022class Mips64OperandGenerator final : public OperandGenerator {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040023 public:
24 explicit Mips64OperandGenerator(InstructionSelector* selector)
25 : OperandGenerator(selector) {}
26
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000027 InstructionOperand UseOperand(Node* node, InstructionCode opcode) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040028 if (CanBeImmediate(node, opcode)) {
29 return UseImmediate(node);
30 }
31 return UseRegister(node);
32 }
33
34 bool CanBeImmediate(Node* node, InstructionCode opcode) {
35 int64_t value;
36 if (node->opcode() == IrOpcode::kInt32Constant)
37 value = OpParameter<int32_t>(node);
38 else if (node->opcode() == IrOpcode::kInt64Constant)
39 value = OpParameter<int64_t>(node);
40 else
41 return false;
42 switch (ArchOpcodeField::decode(opcode)) {
43 case kMips64Shl:
44 case kMips64Sar:
45 case kMips64Shr:
46 return is_uint5(value);
47 case kMips64Dshl:
48 case kMips64Dsar:
49 case kMips64Dshr:
50 return is_uint6(value);
51 case kMips64Xor:
52 return is_uint16(value);
53 case kMips64Ldc1:
54 case kMips64Sdc1:
55 return is_int16(value + kIntSize);
56 default:
57 return is_int16(value);
58 }
59 }
60
Emily Bernierd0a1eb72015-03-24 16:35:39 -040061 private:
62 bool ImmediateFitsAddrMode1Instruction(int32_t imm) const {
63 TRACE_UNIMPL();
64 return false;
65 }
66};
67
68
69static void VisitRR(InstructionSelector* selector, ArchOpcode opcode,
70 Node* node) {
71 Mips64OperandGenerator g(selector);
72 selector->Emit(opcode, g.DefineAsRegister(node),
73 g.UseRegister(node->InputAt(0)));
74}
75
76
77static void VisitRRR(InstructionSelector* selector, ArchOpcode opcode,
78 Node* node) {
79 Mips64OperandGenerator g(selector);
80 selector->Emit(opcode, g.DefineAsRegister(node),
81 g.UseRegister(node->InputAt(0)),
82 g.UseRegister(node->InputAt(1)));
83}
84
85
86static void VisitRRO(InstructionSelector* selector, ArchOpcode opcode,
87 Node* node) {
88 Mips64OperandGenerator g(selector);
89 selector->Emit(opcode, g.DefineAsRegister(node),
90 g.UseRegister(node->InputAt(0)),
91 g.UseOperand(node->InputAt(1), opcode));
92}
93
94
95static void VisitBinop(InstructionSelector* selector, Node* node,
96 InstructionCode opcode, FlagsContinuation* cont) {
97 Mips64OperandGenerator g(selector);
98 Int32BinopMatcher m(node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000099 InstructionOperand inputs[4];
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400100 size_t input_count = 0;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000101 InstructionOperand outputs[2];
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400102 size_t output_count = 0;
103
104 inputs[input_count++] = g.UseRegister(m.left().node());
105 inputs[input_count++] = g.UseOperand(m.right().node(), opcode);
106
107 if (cont->IsBranch()) {
108 inputs[input_count++] = g.Label(cont->true_block());
109 inputs[input_count++] = g.Label(cont->false_block());
110 }
111
112 outputs[output_count++] = g.DefineAsRegister(node);
113 if (cont->IsSet()) {
114 outputs[output_count++] = g.DefineAsRegister(cont->result());
115 }
116
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000117 DCHECK_NE(0u, input_count);
118 DCHECK_NE(0u, output_count);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400119 DCHECK_GE(arraysize(inputs), input_count);
120 DCHECK_GE(arraysize(outputs), output_count);
121
Ben Murdochda12d292016-06-02 14:46:10 +0100122 opcode = cont->Encode(opcode);
123 if (cont->IsDeoptimize()) {
124 selector->EmitDeoptimize(opcode, output_count, outputs, input_count, inputs,
125 cont->frame_state());
126 } else {
127 selector->Emit(opcode, output_count, outputs, input_count, inputs);
128 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400129}
130
131
132static void VisitBinop(InstructionSelector* selector, Node* node,
133 InstructionCode opcode) {
134 FlagsContinuation cont;
135 VisitBinop(selector, node, opcode, &cont);
136}
137
138
139void InstructionSelector::VisitLoad(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000140 LoadRepresentation load_rep = LoadRepresentationOf(node->op());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400141 Mips64OperandGenerator g(this);
142 Node* base = node->InputAt(0);
143 Node* index = node->InputAt(1);
144
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000145 ArchOpcode opcode = kArchNop;
146 switch (load_rep.representation()) {
147 case MachineRepresentation::kFloat32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400148 opcode = kMips64Lwc1;
149 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000150 case MachineRepresentation::kFloat64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400151 opcode = kMips64Ldc1;
152 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000153 case MachineRepresentation::kBit: // Fall through.
154 case MachineRepresentation::kWord8:
155 opcode = load_rep.IsUnsigned() ? kMips64Lbu : kMips64Lb;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400156 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000157 case MachineRepresentation::kWord16:
158 opcode = load_rep.IsUnsigned() ? kMips64Lhu : kMips64Lh;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400159 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000160 case MachineRepresentation::kWord32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400161 opcode = kMips64Lw;
162 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000163 case MachineRepresentation::kTagged: // Fall through.
164 case MachineRepresentation::kWord64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400165 opcode = kMips64Ld;
166 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100167 case MachineRepresentation::kSimd128: // Fall through.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000168 case MachineRepresentation::kNone:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400169 UNREACHABLE();
170 return;
171 }
172
173 if (g.CanBeImmediate(index, opcode)) {
174 Emit(opcode | AddressingModeField::encode(kMode_MRI),
175 g.DefineAsRegister(node), g.UseRegister(base), g.UseImmediate(index));
176 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000177 InstructionOperand addr_reg = g.TempRegister();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400178 Emit(kMips64Dadd | AddressingModeField::encode(kMode_None), addr_reg,
179 g.UseRegister(index), g.UseRegister(base));
180 // Emit desired load opcode, using temp addr_reg.
181 Emit(opcode | AddressingModeField::encode(kMode_MRI),
182 g.DefineAsRegister(node), addr_reg, g.TempImmediate(0));
183 }
184}
185
186
187void InstructionSelector::VisitStore(Node* node) {
188 Mips64OperandGenerator g(this);
189 Node* base = node->InputAt(0);
190 Node* index = node->InputAt(1);
191 Node* value = node->InputAt(2);
192
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000193 StoreRepresentation store_rep = StoreRepresentationOf(node->op());
194 WriteBarrierKind write_barrier_kind = store_rep.write_barrier_kind();
195 MachineRepresentation rep = store_rep.representation();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400196
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000197 // TODO(mips): I guess this could be done in a better way.
198 if (write_barrier_kind != kNoWriteBarrier) {
199 DCHECK_EQ(MachineRepresentation::kTagged, rep);
200 InstructionOperand inputs[3];
201 size_t input_count = 0;
202 inputs[input_count++] = g.UseUniqueRegister(base);
203 inputs[input_count++] = g.UseUniqueRegister(index);
Ben Murdochda12d292016-06-02 14:46:10 +0100204 inputs[input_count++] = g.UseUniqueRegister(value);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000205 RecordWriteMode record_write_mode = RecordWriteMode::kValueIsAny;
206 switch (write_barrier_kind) {
207 case kNoWriteBarrier:
208 UNREACHABLE();
209 break;
210 case kMapWriteBarrier:
211 record_write_mode = RecordWriteMode::kValueIsMap;
212 break;
213 case kPointerWriteBarrier:
214 record_write_mode = RecordWriteMode::kValueIsPointer;
215 break;
216 case kFullWriteBarrier:
217 record_write_mode = RecordWriteMode::kValueIsAny;
218 break;
219 }
220 InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()};
221 size_t const temp_count = arraysize(temps);
222 InstructionCode code = kArchStoreWithWriteBarrier;
223 code |= MiscField::encode(static_cast<int>(record_write_mode));
224 Emit(code, 0, nullptr, input_count, inputs, temp_count, temps);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400225 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000226 ArchOpcode opcode = kArchNop;
227 switch (rep) {
228 case MachineRepresentation::kFloat32:
229 opcode = kMips64Swc1;
230 break;
231 case MachineRepresentation::kFloat64:
232 opcode = kMips64Sdc1;
233 break;
234 case MachineRepresentation::kBit: // Fall through.
235 case MachineRepresentation::kWord8:
236 opcode = kMips64Sb;
237 break;
238 case MachineRepresentation::kWord16:
239 opcode = kMips64Sh;
240 break;
241 case MachineRepresentation::kWord32:
242 opcode = kMips64Sw;
243 break;
244 case MachineRepresentation::kTagged: // Fall through.
245 case MachineRepresentation::kWord64:
246 opcode = kMips64Sd;
247 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100248 case MachineRepresentation::kSimd128: // Fall through.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000249 case MachineRepresentation::kNone:
250 UNREACHABLE();
251 return;
252 }
253
254 if (g.CanBeImmediate(index, opcode)) {
255 Emit(opcode | AddressingModeField::encode(kMode_MRI), g.NoOutput(),
256 g.UseRegister(base), g.UseImmediate(index), g.UseRegister(value));
257 } else {
258 InstructionOperand addr_reg = g.TempRegister();
259 Emit(kMips64Dadd | AddressingModeField::encode(kMode_None), addr_reg,
260 g.UseRegister(index), g.UseRegister(base));
261 // Emit desired store opcode, using temp addr_reg.
262 Emit(opcode | AddressingModeField::encode(kMode_MRI), g.NoOutput(),
263 addr_reg, g.TempImmediate(0), g.UseRegister(value));
264 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400265 }
266}
267
268
269void InstructionSelector::VisitWord32And(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000270 Mips64OperandGenerator g(this);
271 Int32BinopMatcher m(node);
272 if (m.left().IsWord32Shr() && CanCover(node, m.left().node()) &&
273 m.right().HasValue()) {
274 uint32_t mask = m.right().Value();
275 uint32_t mask_width = base::bits::CountPopulation32(mask);
276 uint32_t mask_msb = base::bits::CountLeadingZeros32(mask);
277 if ((mask_width != 0) && (mask_msb + mask_width == 32)) {
278 // The mask must be contiguous, and occupy the least-significant bits.
279 DCHECK_EQ(0u, base::bits::CountTrailingZeros32(mask));
280
281 // Select Ext for And(Shr(x, imm), mask) where the mask is in the least
282 // significant bits.
283 Int32BinopMatcher mleft(m.left().node());
284 if (mleft.right().HasValue()) {
285 // Any shift value can match; int32 shifts use `value % 32`.
286 uint32_t lsb = mleft.right().Value() & 0x1f;
287
288 // Ext cannot extract bits past the register size, however since
289 // shifting the original value would have introduced some zeros we can
290 // still use Ext with a smaller mask and the remaining bits will be
291 // zeros.
292 if (lsb + mask_width > 32) mask_width = 32 - lsb;
293
294 Emit(kMips64Ext, g.DefineAsRegister(node),
295 g.UseRegister(mleft.left().node()), g.TempImmediate(lsb),
296 g.TempImmediate(mask_width));
297 return;
298 }
299 // Other cases fall through to the normal And operation.
300 }
301 }
302 if (m.right().HasValue()) {
303 uint32_t mask = m.right().Value();
304 uint32_t shift = base::bits::CountPopulation32(~mask);
305 uint32_t msb = base::bits::CountLeadingZeros32(~mask);
306 if (shift != 0 && shift != 32 && msb + shift == 32) {
307 // Insert zeros for (x >> K) << K => x & ~(2^K - 1) expression reduction
308 // and remove constant loading of inverted mask.
309 Emit(kMips64Ins, g.DefineSameAsFirst(node),
310 g.UseRegister(m.left().node()), g.TempImmediate(0),
311 g.TempImmediate(shift));
312 return;
313 }
314 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400315 VisitBinop(this, node, kMips64And);
316}
317
318
319void InstructionSelector::VisitWord64And(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000320 Mips64OperandGenerator g(this);
321 Int64BinopMatcher m(node);
322 if (m.left().IsWord64Shr() && CanCover(node, m.left().node()) &&
323 m.right().HasValue()) {
324 uint64_t mask = m.right().Value();
325 uint32_t mask_width = base::bits::CountPopulation64(mask);
326 uint32_t mask_msb = base::bits::CountLeadingZeros64(mask);
327 if ((mask_width != 0) && (mask_msb + mask_width == 64)) {
328 // The mask must be contiguous, and occupy the least-significant bits.
329 DCHECK_EQ(0u, base::bits::CountTrailingZeros64(mask));
330
331 // Select Dext for And(Shr(x, imm), mask) where the mask is in the least
332 // significant bits.
333 Int64BinopMatcher mleft(m.left().node());
334 if (mleft.right().HasValue()) {
335 // Any shift value can match; int64 shifts use `value % 64`.
336 uint32_t lsb = static_cast<uint32_t>(mleft.right().Value() & 0x3f);
337
338 // Dext cannot extract bits past the register size, however since
339 // shifting the original value would have introduced some zeros we can
340 // still use Dext with a smaller mask and the remaining bits will be
341 // zeros.
342 if (lsb + mask_width > 64) mask_width = 64 - lsb;
343
344 Emit(kMips64Dext, g.DefineAsRegister(node),
345 g.UseRegister(mleft.left().node()), g.TempImmediate(lsb),
346 g.TempImmediate(static_cast<int32_t>(mask_width)));
347 return;
348 }
349 // Other cases fall through to the normal And operation.
350 }
351 }
352 if (m.right().HasValue()) {
353 uint64_t mask = m.right().Value();
354 uint32_t shift = base::bits::CountPopulation64(~mask);
355 uint32_t msb = base::bits::CountLeadingZeros64(~mask);
356 if (shift != 0 && shift < 32 && msb + shift == 64) {
357 // Insert zeros for (x >> K) << K => x & ~(2^K - 1) expression reduction
358 // and remove constant loading of inverted mask. Dins cannot insert bits
359 // past word size, so shifts smaller than 32 are covered.
360 Emit(kMips64Dins, g.DefineSameAsFirst(node),
361 g.UseRegister(m.left().node()), g.TempImmediate(0),
362 g.TempImmediate(shift));
363 return;
364 }
365 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400366 VisitBinop(this, node, kMips64And);
367}
368
369
370void InstructionSelector::VisitWord32Or(Node* node) {
371 VisitBinop(this, node, kMips64Or);
372}
373
374
375void InstructionSelector::VisitWord64Or(Node* node) {
376 VisitBinop(this, node, kMips64Or);
377}
378
379
380void InstructionSelector::VisitWord32Xor(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000381 Int32BinopMatcher m(node);
382 if (m.left().IsWord32Or() && CanCover(node, m.left().node()) &&
383 m.right().Is(-1)) {
384 Int32BinopMatcher mleft(m.left().node());
385 if (!mleft.right().HasValue()) {
386 Mips64OperandGenerator g(this);
387 Emit(kMips64Nor, g.DefineAsRegister(node),
388 g.UseRegister(mleft.left().node()),
389 g.UseRegister(mleft.right().node()));
390 return;
391 }
392 }
393 if (m.right().Is(-1)) {
394 // Use Nor for bit negation and eliminate constant loading for xori.
395 Mips64OperandGenerator g(this);
396 Emit(kMips64Nor, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
397 g.TempImmediate(0));
398 return;
399 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400400 VisitBinop(this, node, kMips64Xor);
401}
402
403
404void InstructionSelector::VisitWord64Xor(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000405 Int64BinopMatcher m(node);
406 if (m.left().IsWord64Or() && CanCover(node, m.left().node()) &&
407 m.right().Is(-1)) {
408 Int64BinopMatcher mleft(m.left().node());
409 if (!mleft.right().HasValue()) {
410 Mips64OperandGenerator g(this);
411 Emit(kMips64Nor, g.DefineAsRegister(node),
412 g.UseRegister(mleft.left().node()),
413 g.UseRegister(mleft.right().node()));
414 return;
415 }
416 }
417 if (m.right().Is(-1)) {
418 // Use Nor for bit negation and eliminate constant loading for xori.
419 Mips64OperandGenerator g(this);
420 Emit(kMips64Nor, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
421 g.TempImmediate(0));
422 return;
423 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400424 VisitBinop(this, node, kMips64Xor);
425}
426
427
428void InstructionSelector::VisitWord32Shl(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000429 Int32BinopMatcher m(node);
430 if (m.left().IsWord32And() && CanCover(node, m.left().node()) &&
431 m.right().IsInRange(1, 31)) {
432 Mips64OperandGenerator g(this);
433 Int32BinopMatcher mleft(m.left().node());
434 // Match Word32Shl(Word32And(x, mask), imm) to Shl where the mask is
435 // contiguous, and the shift immediate non-zero.
436 if (mleft.right().HasValue()) {
437 uint32_t mask = mleft.right().Value();
438 uint32_t mask_width = base::bits::CountPopulation32(mask);
439 uint32_t mask_msb = base::bits::CountLeadingZeros32(mask);
440 if ((mask_width != 0) && (mask_msb + mask_width == 32)) {
441 uint32_t shift = m.right().Value();
442 DCHECK_EQ(0u, base::bits::CountTrailingZeros32(mask));
443 DCHECK_NE(0u, shift);
444 if ((shift + mask_width) >= 32) {
445 // If the mask is contiguous and reaches or extends beyond the top
446 // bit, only the shift is needed.
447 Emit(kMips64Shl, g.DefineAsRegister(node),
448 g.UseRegister(mleft.left().node()),
449 g.UseImmediate(m.right().node()));
450 return;
451 }
452 }
453 }
454 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400455 VisitRRO(this, kMips64Shl, node);
456}
457
458
459void InstructionSelector::VisitWord32Shr(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000460 Int32BinopMatcher m(node);
461 if (m.left().IsWord32And() && m.right().HasValue()) {
462 uint32_t lsb = m.right().Value() & 0x1f;
463 Int32BinopMatcher mleft(m.left().node());
464 if (mleft.right().HasValue()) {
465 // Select Ext for Shr(And(x, mask), imm) where the result of the mask is
466 // shifted into the least-significant bits.
467 uint32_t mask = (mleft.right().Value() >> lsb) << lsb;
468 unsigned mask_width = base::bits::CountPopulation32(mask);
469 unsigned mask_msb = base::bits::CountLeadingZeros32(mask);
470 if ((mask_msb + mask_width + lsb) == 32) {
471 Mips64OperandGenerator g(this);
472 DCHECK_EQ(lsb, base::bits::CountTrailingZeros32(mask));
473 Emit(kMips64Ext, g.DefineAsRegister(node),
474 g.UseRegister(mleft.left().node()), g.TempImmediate(lsb),
475 g.TempImmediate(mask_width));
476 return;
477 }
478 }
479 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400480 VisitRRO(this, kMips64Shr, node);
481}
482
483
484void InstructionSelector::VisitWord32Sar(Node* node) {
485 VisitRRO(this, kMips64Sar, node);
486}
487
488
489void InstructionSelector::VisitWord64Shl(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000490 Mips64OperandGenerator g(this);
491 Int64BinopMatcher m(node);
492 if ((m.left().IsChangeInt32ToInt64() || m.left().IsChangeUint32ToUint64()) &&
493 m.right().IsInRange(32, 63)) {
494 // There's no need to sign/zero-extend to 64-bit if we shift out the upper
495 // 32 bits anyway.
496 Emit(kMips64Dshl, g.DefineSameAsFirst(node),
497 g.UseRegister(m.left().node()->InputAt(0)),
498 g.UseImmediate(m.right().node()));
499 return;
500 }
501 if (m.left().IsWord64And() && CanCover(node, m.left().node()) &&
502 m.right().IsInRange(1, 63)) {
503 // Match Word64Shl(Word64And(x, mask), imm) to Dshl where the mask is
504 // contiguous, and the shift immediate non-zero.
505 Int64BinopMatcher mleft(m.left().node());
506 if (mleft.right().HasValue()) {
507 uint64_t mask = mleft.right().Value();
508 uint32_t mask_width = base::bits::CountPopulation64(mask);
509 uint32_t mask_msb = base::bits::CountLeadingZeros64(mask);
510 if ((mask_width != 0) && (mask_msb + mask_width == 64)) {
511 uint64_t shift = m.right().Value();
512 DCHECK_EQ(0u, base::bits::CountTrailingZeros64(mask));
513 DCHECK_NE(0u, shift);
514
515 if ((shift + mask_width) >= 64) {
516 // If the mask is contiguous and reaches or extends beyond the top
517 // bit, only the shift is needed.
518 Emit(kMips64Dshl, g.DefineAsRegister(node),
519 g.UseRegister(mleft.left().node()),
520 g.UseImmediate(m.right().node()));
521 return;
522 }
523 }
524 }
525 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400526 VisitRRO(this, kMips64Dshl, node);
527}
528
529
530void InstructionSelector::VisitWord64Shr(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000531 Int64BinopMatcher m(node);
532 if (m.left().IsWord64And() && m.right().HasValue()) {
533 uint32_t lsb = m.right().Value() & 0x3f;
534 Int64BinopMatcher mleft(m.left().node());
535 if (mleft.right().HasValue()) {
536 // Select Dext for Shr(And(x, mask), imm) where the result of the mask is
537 // shifted into the least-significant bits.
538 uint64_t mask = (mleft.right().Value() >> lsb) << lsb;
539 unsigned mask_width = base::bits::CountPopulation64(mask);
540 unsigned mask_msb = base::bits::CountLeadingZeros64(mask);
541 if ((mask_msb + mask_width + lsb) == 64) {
542 Mips64OperandGenerator g(this);
543 DCHECK_EQ(lsb, base::bits::CountTrailingZeros64(mask));
544 Emit(kMips64Dext, g.DefineAsRegister(node),
545 g.UseRegister(mleft.left().node()), g.TempImmediate(lsb),
546 g.TempImmediate(mask_width));
547 return;
548 }
549 }
550 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400551 VisitRRO(this, kMips64Dshr, node);
552}
553
554
555void InstructionSelector::VisitWord64Sar(Node* node) {
556 VisitRRO(this, kMips64Dsar, node);
557}
558
559
560void InstructionSelector::VisitWord32Ror(Node* node) {
561 VisitRRO(this, kMips64Ror, node);
562}
563
564
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000565void InstructionSelector::VisitWord32Clz(Node* node) {
566 VisitRR(this, kMips64Clz, node);
567}
568
569
Ben Murdoch097c5b22016-05-18 11:27:45 +0100570void InstructionSelector::VisitWord32ReverseBits(Node* node) { UNREACHABLE(); }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000571
572
Ben Murdoch097c5b22016-05-18 11:27:45 +0100573void InstructionSelector::VisitWord64ReverseBits(Node* node) { UNREACHABLE(); }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000574
575
Ben Murdoch097c5b22016-05-18 11:27:45 +0100576void InstructionSelector::VisitWord32Ctz(Node* node) {
577 Mips64OperandGenerator g(this);
578 Emit(kMips64Ctz, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
579}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000580
581
Ben Murdoch097c5b22016-05-18 11:27:45 +0100582void InstructionSelector::VisitWord64Ctz(Node* node) {
583 Mips64OperandGenerator g(this);
584 Emit(kMips64Dctz, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)));
585}
586
587
588void InstructionSelector::VisitWord32Popcnt(Node* node) {
589 Mips64OperandGenerator g(this);
590 Emit(kMips64Popcnt, g.DefineAsRegister(node),
591 g.UseRegister(node->InputAt(0)));
592}
593
594
595void InstructionSelector::VisitWord64Popcnt(Node* node) {
596 Mips64OperandGenerator g(this);
597 Emit(kMips64Dpopcnt, g.DefineAsRegister(node),
598 g.UseRegister(node->InputAt(0)));
599}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000600
601
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400602void InstructionSelector::VisitWord64Ror(Node* node) {
603 VisitRRO(this, kMips64Dror, node);
604}
605
606
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000607void InstructionSelector::VisitWord64Clz(Node* node) {
608 VisitRR(this, kMips64Dclz, node);
609}
610
611
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400612void InstructionSelector::VisitInt32Add(Node* node) {
613 Mips64OperandGenerator g(this);
614 // TODO(plind): Consider multiply & add optimization from arm port.
615 VisitBinop(this, node, kMips64Add);
616}
617
618
619void InstructionSelector::VisitInt64Add(Node* node) {
620 Mips64OperandGenerator g(this);
621 // TODO(plind): Consider multiply & add optimization from arm port.
622 VisitBinop(this, node, kMips64Dadd);
623}
624
625
626void InstructionSelector::VisitInt32Sub(Node* node) {
627 VisitBinop(this, node, kMips64Sub);
628}
629
630
631void InstructionSelector::VisitInt64Sub(Node* node) {
632 VisitBinop(this, node, kMips64Dsub);
633}
634
635
636void InstructionSelector::VisitInt32Mul(Node* node) {
637 Mips64OperandGenerator g(this);
638 Int32BinopMatcher m(node);
639 if (m.right().HasValue() && m.right().Value() > 0) {
640 int32_t value = m.right().Value();
641 if (base::bits::IsPowerOfTwo32(value)) {
642 Emit(kMips64Shl | AddressingModeField::encode(kMode_None),
643 g.DefineAsRegister(node), g.UseRegister(m.left().node()),
644 g.TempImmediate(WhichPowerOf2(value)));
645 return;
646 }
647 if (base::bits::IsPowerOfTwo32(value - 1)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000648 InstructionOperand temp = g.TempRegister();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400649 Emit(kMips64Shl | AddressingModeField::encode(kMode_None), temp,
650 g.UseRegister(m.left().node()),
651 g.TempImmediate(WhichPowerOf2(value - 1)));
652 Emit(kMips64Add | AddressingModeField::encode(kMode_None),
653 g.DefineAsRegister(node), g.UseRegister(m.left().node()), temp);
654 return;
655 }
656 if (base::bits::IsPowerOfTwo32(value + 1)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000657 InstructionOperand temp = g.TempRegister();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400658 Emit(kMips64Shl | AddressingModeField::encode(kMode_None), temp,
659 g.UseRegister(m.left().node()),
660 g.TempImmediate(WhichPowerOf2(value + 1)));
661 Emit(kMips64Sub | AddressingModeField::encode(kMode_None),
662 g.DefineAsRegister(node), temp, g.UseRegister(m.left().node()));
663 return;
664 }
665 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000666 Node* left = node->InputAt(0);
667 Node* right = node->InputAt(1);
668 if (CanCover(node, left) && CanCover(node, right)) {
669 if (left->opcode() == IrOpcode::kWord64Sar &&
670 right->opcode() == IrOpcode::kWord64Sar) {
671 Int64BinopMatcher leftInput(left), rightInput(right);
672 if (leftInput.right().Is(32) && rightInput.right().Is(32)) {
673 // Combine untagging shifts with Dmul high.
674 Emit(kMips64DMulHigh, g.DefineSameAsFirst(node),
675 g.UseRegister(leftInput.left().node()),
676 g.UseRegister(rightInput.left().node()));
677 return;
678 }
679 }
680 }
681 VisitRRR(this, kMips64Mul, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400682}
683
684
685void InstructionSelector::VisitInt32MulHigh(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000686 VisitRRR(this, kMips64MulHigh, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400687}
688
689
690void InstructionSelector::VisitUint32MulHigh(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000691 VisitRRR(this, kMips64MulHighU, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400692}
693
694
695void InstructionSelector::VisitInt64Mul(Node* node) {
696 Mips64OperandGenerator g(this);
697 Int64BinopMatcher m(node);
698 // TODO(dusmil): Add optimization for shifts larger than 32.
699 if (m.right().HasValue() && m.right().Value() > 0) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000700 int32_t value = static_cast<int32_t>(m.right().Value());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400701 if (base::bits::IsPowerOfTwo32(value)) {
702 Emit(kMips64Dshl | AddressingModeField::encode(kMode_None),
703 g.DefineAsRegister(node), g.UseRegister(m.left().node()),
704 g.TempImmediate(WhichPowerOf2(value)));
705 return;
706 }
707 if (base::bits::IsPowerOfTwo32(value - 1)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000708 InstructionOperand temp = g.TempRegister();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400709 Emit(kMips64Dshl | AddressingModeField::encode(kMode_None), temp,
710 g.UseRegister(m.left().node()),
711 g.TempImmediate(WhichPowerOf2(value - 1)));
712 Emit(kMips64Dadd | AddressingModeField::encode(kMode_None),
713 g.DefineAsRegister(node), g.UseRegister(m.left().node()), temp);
714 return;
715 }
716 if (base::bits::IsPowerOfTwo32(value + 1)) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000717 InstructionOperand temp = g.TempRegister();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400718 Emit(kMips64Dshl | AddressingModeField::encode(kMode_None), temp,
719 g.UseRegister(m.left().node()),
720 g.TempImmediate(WhichPowerOf2(value + 1)));
721 Emit(kMips64Dsub | AddressingModeField::encode(kMode_None),
722 g.DefineAsRegister(node), temp, g.UseRegister(m.left().node()));
723 return;
724 }
725 }
726 Emit(kMips64Dmul, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
727 g.UseRegister(m.right().node()));
728}
729
730
731void InstructionSelector::VisitInt32Div(Node* node) {
732 Mips64OperandGenerator g(this);
733 Int32BinopMatcher m(node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000734 Node* left = node->InputAt(0);
735 Node* right = node->InputAt(1);
736 if (CanCover(node, left) && CanCover(node, right)) {
737 if (left->opcode() == IrOpcode::kWord64Sar &&
738 right->opcode() == IrOpcode::kWord64Sar) {
739 Int64BinopMatcher rightInput(right), leftInput(left);
740 if (rightInput.right().Is(32) && leftInput.right().Is(32)) {
741 // Combine both shifted operands with Ddiv.
742 Emit(kMips64Ddiv, g.DefineSameAsFirst(node),
743 g.UseRegister(leftInput.left().node()),
744 g.UseRegister(rightInput.left().node()));
745 return;
746 }
747 }
748 }
749 Emit(kMips64Div, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()),
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400750 g.UseRegister(m.right().node()));
751}
752
753
754void InstructionSelector::VisitUint32Div(Node* node) {
755 Mips64OperandGenerator g(this);
756 Int32BinopMatcher m(node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000757 Emit(kMips64DivU, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()),
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400758 g.UseRegister(m.right().node()));
759}
760
761
762void InstructionSelector::VisitInt32Mod(Node* node) {
763 Mips64OperandGenerator g(this);
764 Int32BinopMatcher m(node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000765 Node* left = node->InputAt(0);
766 Node* right = node->InputAt(1);
767 if (CanCover(node, left) && CanCover(node, right)) {
768 if (left->opcode() == IrOpcode::kWord64Sar &&
769 right->opcode() == IrOpcode::kWord64Sar) {
770 Int64BinopMatcher rightInput(right), leftInput(left);
771 if (rightInput.right().Is(32) && leftInput.right().Is(32)) {
772 // Combine both shifted operands with Dmod.
773 Emit(kMips64Dmod, g.DefineSameAsFirst(node),
774 g.UseRegister(leftInput.left().node()),
775 g.UseRegister(rightInput.left().node()));
776 return;
777 }
778 }
779 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400780 Emit(kMips64Mod, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
781 g.UseRegister(m.right().node()));
782}
783
784
785void InstructionSelector::VisitUint32Mod(Node* node) {
786 Mips64OperandGenerator g(this);
787 Int32BinopMatcher m(node);
788 Emit(kMips64ModU, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
789 g.UseRegister(m.right().node()));
790}
791
792
793void InstructionSelector::VisitInt64Div(Node* node) {
794 Mips64OperandGenerator g(this);
795 Int64BinopMatcher m(node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000796 Emit(kMips64Ddiv, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()),
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400797 g.UseRegister(m.right().node()));
798}
799
800
801void InstructionSelector::VisitUint64Div(Node* node) {
802 Mips64OperandGenerator g(this);
803 Int64BinopMatcher m(node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000804 Emit(kMips64DdivU, g.DefineSameAsFirst(node), g.UseRegister(m.left().node()),
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400805 g.UseRegister(m.right().node()));
806}
807
808
809void InstructionSelector::VisitInt64Mod(Node* node) {
810 Mips64OperandGenerator g(this);
811 Int64BinopMatcher m(node);
812 Emit(kMips64Dmod, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
813 g.UseRegister(m.right().node()));
814}
815
816
817void InstructionSelector::VisitUint64Mod(Node* node) {
818 Mips64OperandGenerator g(this);
819 Int64BinopMatcher m(node);
820 Emit(kMips64DmodU, g.DefineAsRegister(node), g.UseRegister(m.left().node()),
821 g.UseRegister(m.right().node()));
822}
823
824
825void InstructionSelector::VisitChangeFloat32ToFloat64(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000826 VisitRR(this, kMips64CvtDS, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400827}
828
829
Ben Murdoch097c5b22016-05-18 11:27:45 +0100830void InstructionSelector::VisitRoundInt32ToFloat32(Node* node) {
831 VisitRR(this, kMips64CvtSW, node);
832}
833
834
835void InstructionSelector::VisitRoundUint32ToFloat32(Node* node) {
836 VisitRR(this, kMips64CvtSUw, node);
837}
838
839
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400840void InstructionSelector::VisitChangeInt32ToFloat64(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000841 VisitRR(this, kMips64CvtDW, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400842}
843
844
845void InstructionSelector::VisitChangeUint32ToFloat64(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000846 VisitRR(this, kMips64CvtDUw, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400847}
848
849
Ben Murdoch097c5b22016-05-18 11:27:45 +0100850void InstructionSelector::VisitTruncateFloat32ToInt32(Node* node) {
851 VisitRR(this, kMips64TruncWS, node);
852}
853
854
855void InstructionSelector::VisitTruncateFloat32ToUint32(Node* node) {
856 VisitRR(this, kMips64TruncUwS, node);
857}
858
859
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400860void InstructionSelector::VisitChangeFloat64ToInt32(Node* node) {
861 Mips64OperandGenerator g(this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000862 Node* value = node->InputAt(0);
863 // Match ChangeFloat64ToInt32(Float64Round##OP) to corresponding instruction
864 // which does rounding and conversion to integer format.
865 if (CanCover(node, value)) {
866 switch (value->opcode()) {
867 case IrOpcode::kFloat64RoundDown:
868 Emit(kMips64FloorWD, g.DefineAsRegister(node),
869 g.UseRegister(value->InputAt(0)));
870 return;
871 case IrOpcode::kFloat64RoundUp:
872 Emit(kMips64CeilWD, g.DefineAsRegister(node),
873 g.UseRegister(value->InputAt(0)));
874 return;
875 case IrOpcode::kFloat64RoundTiesEven:
876 Emit(kMips64RoundWD, g.DefineAsRegister(node),
877 g.UseRegister(value->InputAt(0)));
878 return;
879 case IrOpcode::kFloat64RoundTruncate:
880 Emit(kMips64TruncWD, g.DefineAsRegister(node),
881 g.UseRegister(value->InputAt(0)));
882 return;
883 default:
884 break;
885 }
886 if (value->opcode() == IrOpcode::kChangeFloat32ToFloat64) {
887 Node* next = value->InputAt(0);
888 if (CanCover(value, next)) {
889 // Match ChangeFloat64ToInt32(ChangeFloat32ToFloat64(Float64Round##OP))
890 switch (next->opcode()) {
891 case IrOpcode::kFloat32RoundDown:
892 Emit(kMips64FloorWS, g.DefineAsRegister(node),
893 g.UseRegister(next->InputAt(0)));
894 return;
895 case IrOpcode::kFloat32RoundUp:
896 Emit(kMips64CeilWS, g.DefineAsRegister(node),
897 g.UseRegister(next->InputAt(0)));
898 return;
899 case IrOpcode::kFloat32RoundTiesEven:
900 Emit(kMips64RoundWS, g.DefineAsRegister(node),
901 g.UseRegister(next->InputAt(0)));
902 return;
903 case IrOpcode::kFloat32RoundTruncate:
904 Emit(kMips64TruncWS, g.DefineAsRegister(node),
905 g.UseRegister(next->InputAt(0)));
906 return;
907 default:
908 Emit(kMips64TruncWS, g.DefineAsRegister(node),
909 g.UseRegister(value->InputAt(0)));
910 return;
911 }
912 } else {
913 // Match float32 -> float64 -> int32 representation change path.
914 Emit(kMips64TruncWS, g.DefineAsRegister(node),
915 g.UseRegister(value->InputAt(0)));
916 return;
917 }
918 }
919 }
920 VisitRR(this, kMips64TruncWD, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400921}
922
923
924void InstructionSelector::VisitChangeFloat64ToUint32(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000925 VisitRR(this, kMips64TruncUwD, node);
926}
927
Ben Murdochda12d292016-06-02 14:46:10 +0100928void InstructionSelector::VisitTruncateFloat64ToUint32(Node* node) {
929 VisitRR(this, kMips64TruncUwD, node);
930}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000931
932void InstructionSelector::VisitTryTruncateFloat32ToInt64(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400933 Mips64OperandGenerator g(this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000934 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
935 InstructionOperand outputs[2];
936 size_t output_count = 0;
937 outputs[output_count++] = g.DefineAsRegister(node);
938
939 Node* success_output = NodeProperties::FindProjection(node, 1);
940 if (success_output) {
941 outputs[output_count++] = g.DefineAsRegister(success_output);
942 }
943
944 this->Emit(kMips64TruncLS, output_count, outputs, 1, inputs);
945}
946
947
948void InstructionSelector::VisitTryTruncateFloat64ToInt64(Node* node) {
949 Mips64OperandGenerator g(this);
950 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
951 InstructionOperand outputs[2];
952 size_t output_count = 0;
953 outputs[output_count++] = g.DefineAsRegister(node);
954
955 Node* success_output = NodeProperties::FindProjection(node, 1);
956 if (success_output) {
957 outputs[output_count++] = g.DefineAsRegister(success_output);
958 }
959
960 Emit(kMips64TruncLD, output_count, outputs, 1, inputs);
961}
962
963
964void InstructionSelector::VisitTryTruncateFloat32ToUint64(Node* node) {
965 Mips64OperandGenerator g(this);
966 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
967 InstructionOperand outputs[2];
968 size_t output_count = 0;
969 outputs[output_count++] = g.DefineAsRegister(node);
970
971 Node* success_output = NodeProperties::FindProjection(node, 1);
972 if (success_output) {
973 outputs[output_count++] = g.DefineAsRegister(success_output);
974 }
975
976 Emit(kMips64TruncUlS, output_count, outputs, 1, inputs);
977}
978
979
980void InstructionSelector::VisitTryTruncateFloat64ToUint64(Node* node) {
981 Mips64OperandGenerator g(this);
982
983 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
984 InstructionOperand outputs[2];
985 size_t output_count = 0;
986 outputs[output_count++] = g.DefineAsRegister(node);
987
988 Node* success_output = NodeProperties::FindProjection(node, 1);
989 if (success_output) {
990 outputs[output_count++] = g.DefineAsRegister(success_output);
991 }
992
993 Emit(kMips64TruncUlD, output_count, outputs, 1, inputs);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400994}
995
996
997void InstructionSelector::VisitChangeInt32ToInt64(Node* node) {
998 Mips64OperandGenerator g(this);
999 Emit(kMips64Shl, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)),
1000 g.TempImmediate(0));
1001}
1002
1003
1004void InstructionSelector::VisitChangeUint32ToUint64(Node* node) {
1005 Mips64OperandGenerator g(this);
1006 Emit(kMips64Dext, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)),
1007 g.TempImmediate(0), g.TempImmediate(32));
1008}
1009
1010
1011void InstructionSelector::VisitTruncateInt64ToInt32(Node* node) {
1012 Mips64OperandGenerator g(this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001013 Node* value = node->InputAt(0);
1014 if (CanCover(node, value)) {
1015 switch (value->opcode()) {
1016 case IrOpcode::kWord64Sar: {
1017 Int64BinopMatcher m(value);
1018 if (m.right().IsInRange(32, 63)) {
1019 // After smi untagging no need for truncate. Combine sequence.
1020 Emit(kMips64Dsar, g.DefineSameAsFirst(node),
1021 g.UseRegister(m.left().node()),
1022 g.UseImmediate(m.right().node()));
1023 return;
1024 }
1025 break;
1026 }
1027 default:
1028 break;
1029 }
1030 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001031 Emit(kMips64Ext, g.DefineAsRegister(node), g.UseRegister(node->InputAt(0)),
1032 g.TempImmediate(0), g.TempImmediate(32));
1033}
1034
1035
1036void InstructionSelector::VisitTruncateFloat64ToFloat32(Node* node) {
1037 Mips64OperandGenerator g(this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001038 Node* value = node->InputAt(0);
1039 // Match TruncateFloat64ToFloat32(ChangeInt32ToFloat64) to corresponding
1040 // instruction.
1041 if (CanCover(node, value) &&
1042 value->opcode() == IrOpcode::kChangeInt32ToFloat64) {
1043 Emit(kMips64CvtSW, g.DefineAsRegister(node),
1044 g.UseRegister(value->InputAt(0)));
1045 return;
1046 }
1047 VisitRR(this, kMips64CvtSD, node);
1048}
1049
1050
1051void InstructionSelector::VisitTruncateFloat64ToInt32(Node* node) {
1052 switch (TruncationModeOf(node->op())) {
1053 case TruncationMode::kJavaScript:
1054 return VisitRR(this, kArchTruncateDoubleToI, node);
1055 case TruncationMode::kRoundToZero:
1056 return VisitRR(this, kMips64TruncWD, node);
1057 }
1058 UNREACHABLE();
1059}
1060
1061
1062void InstructionSelector::VisitRoundInt64ToFloat32(Node* node) {
1063 VisitRR(this, kMips64CvtSL, node);
1064}
1065
1066
1067void InstructionSelector::VisitRoundInt64ToFloat64(Node* node) {
1068 VisitRR(this, kMips64CvtDL, node);
1069}
1070
1071
1072void InstructionSelector::VisitRoundUint64ToFloat32(Node* node) {
1073 VisitRR(this, kMips64CvtSUl, node);
1074}
1075
1076
1077void InstructionSelector::VisitRoundUint64ToFloat64(Node* node) {
1078 VisitRR(this, kMips64CvtDUl, node);
1079}
1080
1081
1082void InstructionSelector::VisitBitcastFloat32ToInt32(Node* node) {
1083 VisitRR(this, kMips64Float64ExtractLowWord32, node);
1084}
1085
1086
1087void InstructionSelector::VisitBitcastFloat64ToInt64(Node* node) {
1088 VisitRR(this, kMips64BitcastDL, node);
1089}
1090
1091
1092void InstructionSelector::VisitBitcastInt32ToFloat32(Node* node) {
1093 Mips64OperandGenerator g(this);
1094 Emit(kMips64Float64InsertLowWord32, g.DefineAsRegister(node),
1095 ImmediateOperand(ImmediateOperand::INLINE, 0),
1096 g.UseRegister(node->InputAt(0)));
1097}
1098
1099
1100void InstructionSelector::VisitBitcastInt64ToFloat64(Node* node) {
1101 VisitRR(this, kMips64BitcastLD, node);
1102}
1103
1104
1105void InstructionSelector::VisitFloat32Add(Node* node) {
1106 VisitRRR(this, kMips64AddS, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001107}
1108
1109
1110void InstructionSelector::VisitFloat64Add(Node* node) {
1111 VisitRRR(this, kMips64AddD, node);
1112}
1113
1114
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001115void InstructionSelector::VisitFloat32Sub(Node* node) {
1116 VisitRRR(this, kMips64SubS, node);
1117}
1118
1119
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001120void InstructionSelector::VisitFloat64Sub(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001121 Mips64OperandGenerator g(this);
1122 Float64BinopMatcher m(node);
1123 if (m.left().IsMinusZero() && m.right().IsFloat64RoundDown() &&
1124 CanCover(m.node(), m.right().node())) {
1125 if (m.right().InputAt(0)->opcode() == IrOpcode::kFloat64Sub &&
1126 CanCover(m.right().node(), m.right().InputAt(0))) {
1127 Float64BinopMatcher mright0(m.right().InputAt(0));
1128 if (mright0.left().IsMinusZero()) {
1129 Emit(kMips64Float64RoundUp, g.DefineAsRegister(node),
1130 g.UseRegister(mright0.right().node()));
1131 return;
1132 }
1133 }
1134 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001135 VisitRRR(this, kMips64SubD, node);
1136}
1137
1138
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001139void InstructionSelector::VisitFloat32Mul(Node* node) {
1140 VisitRRR(this, kMips64MulS, node);
1141}
1142
1143
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001144void InstructionSelector::VisitFloat64Mul(Node* node) {
1145 VisitRRR(this, kMips64MulD, node);
1146}
1147
1148
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001149void InstructionSelector::VisitFloat32Div(Node* node) {
1150 VisitRRR(this, kMips64DivS, node);
1151}
1152
1153
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001154void InstructionSelector::VisitFloat64Div(Node* node) {
1155 VisitRRR(this, kMips64DivD, node);
1156}
1157
1158
1159void InstructionSelector::VisitFloat64Mod(Node* node) {
1160 Mips64OperandGenerator g(this);
1161 Emit(kMips64ModD, g.DefineAsFixed(node, f0),
1162 g.UseFixed(node->InputAt(0), f12),
1163 g.UseFixed(node->InputAt(1), f14))->MarkAsCall();
1164}
1165
1166
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001167void InstructionSelector::VisitFloat32Max(Node* node) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001168 Mips64OperandGenerator g(this);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001169 if (kArchVariant == kMips64r6) {
1170 Emit(kMips64Float32Max, g.DefineAsRegister(node),
1171 g.UseUniqueRegister(node->InputAt(0)),
1172 g.UseUniqueRegister(node->InputAt(1)));
1173
1174 } else {
1175 // Reverse operands, and use same reg. for result and right operand.
1176 Emit(kMips64Float32Max, g.DefineSameAsFirst(node),
1177 g.UseRegister(node->InputAt(1)), g.UseRegister(node->InputAt(0)));
1178 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001179}
1180
1181
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001182void InstructionSelector::VisitFloat64Max(Node* node) {
1183 Mips64OperandGenerator g(this);
1184 if (kArchVariant == kMips64r6) {
1185 Emit(kMips64Float64Max, g.DefineAsRegister(node),
1186 g.UseUniqueRegister(node->InputAt(0)),
1187 g.UseUniqueRegister(node->InputAt(1)));
1188
1189 } else {
1190 // Reverse operands, and use same reg. for result and right operand.
1191 Emit(kMips64Float64Max, g.DefineSameAsFirst(node),
1192 g.UseRegister(node->InputAt(1)), g.UseRegister(node->InputAt(0)));
1193 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001194}
1195
1196
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001197void InstructionSelector::VisitFloat32Min(Node* node) {
1198 Mips64OperandGenerator g(this);
1199 if (kArchVariant == kMips64r6) {
1200 Emit(kMips64Float32Min, g.DefineAsRegister(node),
1201 g.UseUniqueRegister(node->InputAt(0)),
1202 g.UseUniqueRegister(node->InputAt(1)));
1203
1204 } else {
1205 // Reverse operands, and use same reg. for result and right operand.
1206 Emit(kMips64Float32Min, g.DefineSameAsFirst(node),
1207 g.UseRegister(node->InputAt(1)), g.UseRegister(node->InputAt(0)));
1208 }
1209}
1210
1211
1212void InstructionSelector::VisitFloat64Min(Node* node) {
1213 Mips64OperandGenerator g(this);
1214 if (kArchVariant == kMips64r6) {
1215 Emit(kMips64Float64Min, g.DefineAsRegister(node),
1216 g.UseUniqueRegister(node->InputAt(0)),
1217 g.UseUniqueRegister(node->InputAt(1)));
1218
1219 } else {
1220 // Reverse operands, and use same reg. for result and right operand.
1221 Emit(kMips64Float64Min, g.DefineSameAsFirst(node),
1222 g.UseRegister(node->InputAt(1)), g.UseRegister(node->InputAt(0)));
1223 }
1224}
1225
1226
1227void InstructionSelector::VisitFloat32Abs(Node* node) {
1228 VisitRR(this, kMips64AbsS, node);
1229}
1230
1231
1232void InstructionSelector::VisitFloat64Abs(Node* node) {
1233 VisitRR(this, kMips64AbsD, node);
1234}
1235
1236
1237void InstructionSelector::VisitFloat32Sqrt(Node* node) {
1238 VisitRR(this, kMips64SqrtS, node);
1239}
1240
1241
1242void InstructionSelector::VisitFloat64Sqrt(Node* node) {
1243 VisitRR(this, kMips64SqrtD, node);
1244}
1245
1246
1247void InstructionSelector::VisitFloat32RoundDown(Node* node) {
1248 VisitRR(this, kMips64Float32RoundDown, node);
1249}
1250
1251
1252void InstructionSelector::VisitFloat64RoundDown(Node* node) {
1253 VisitRR(this, kMips64Float64RoundDown, node);
1254}
1255
1256
1257void InstructionSelector::VisitFloat32RoundUp(Node* node) {
1258 VisitRR(this, kMips64Float32RoundUp, node);
1259}
1260
1261
1262void InstructionSelector::VisitFloat64RoundUp(Node* node) {
1263 VisitRR(this, kMips64Float64RoundUp, node);
1264}
1265
1266
1267void InstructionSelector::VisitFloat32RoundTruncate(Node* node) {
1268 VisitRR(this, kMips64Float32RoundTruncate, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001269}
1270
1271
1272void InstructionSelector::VisitFloat64RoundTruncate(Node* node) {
1273 VisitRR(this, kMips64Float64RoundTruncate, node);
1274}
1275
1276
1277void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
1278 UNREACHABLE();
1279}
1280
1281
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001282void InstructionSelector::VisitFloat32RoundTiesEven(Node* node) {
1283 VisitRR(this, kMips64Float32RoundTiesEven, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001284}
1285
1286
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001287void InstructionSelector::VisitFloat64RoundTiesEven(Node* node) {
1288 VisitRR(this, kMips64Float64RoundTiesEven, node);
1289}
1290
1291
1292void InstructionSelector::EmitPrepareArguments(
1293 ZoneVector<PushParameter>* arguments, const CallDescriptor* descriptor,
1294 Node* node) {
1295 Mips64OperandGenerator g(this);
1296
1297 // Prepare for C function call.
1298 if (descriptor->IsCFunctionCall()) {
1299 Emit(kArchPrepareCallCFunction |
1300 MiscField::encode(static_cast<int>(descriptor->CParameterCount())),
1301 0, nullptr, 0, nullptr);
1302
1303 // Poke any stack arguments.
1304 int slot = kCArgSlotCount;
1305 for (PushParameter input : (*arguments)) {
1306 Emit(kMips64StoreToStackSlot, g.NoOutput(), g.UseRegister(input.node()),
1307 g.TempImmediate(slot << kPointerSizeLog2));
1308 ++slot;
1309 }
1310 } else {
1311 int push_count = static_cast<int>(descriptor->StackParameterCount());
1312 if (push_count > 0) {
1313 Emit(kMips64StackClaim, g.NoOutput(),
1314 g.TempImmediate(push_count << kPointerSizeLog2));
1315 }
1316 for (size_t n = 0; n < arguments->size(); ++n) {
1317 PushParameter input = (*arguments)[n];
1318 if (input.node()) {
1319 Emit(kMips64StoreToStackSlot, g.NoOutput(), g.UseRegister(input.node()),
1320 g.TempImmediate(static_cast<int>(n << kPointerSizeLog2)));
1321 }
1322 }
1323 }
1324}
1325
1326
1327bool InstructionSelector::IsTailCallAddressImmediate() { return false; }
1328
Ben Murdochda12d292016-06-02 14:46:10 +01001329int InstructionSelector::GetTempsCountForTailCallFromJSFunction() { return 3; }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001330
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001331void InstructionSelector::VisitCheckedLoad(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001332 CheckedLoadRepresentation load_rep = CheckedLoadRepresentationOf(node->op());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001333 Mips64OperandGenerator g(this);
1334 Node* const buffer = node->InputAt(0);
1335 Node* const offset = node->InputAt(1);
1336 Node* const length = node->InputAt(2);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001337 ArchOpcode opcode = kArchNop;
1338 switch (load_rep.representation()) {
1339 case MachineRepresentation::kWord8:
1340 opcode = load_rep.IsSigned() ? kCheckedLoadInt8 : kCheckedLoadUint8;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001341 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001342 case MachineRepresentation::kWord16:
1343 opcode = load_rep.IsSigned() ? kCheckedLoadInt16 : kCheckedLoadUint16;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001344 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001345 case MachineRepresentation::kWord32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001346 opcode = kCheckedLoadWord32;
1347 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001348 case MachineRepresentation::kWord64:
1349 opcode = kCheckedLoadWord64;
1350 break;
1351 case MachineRepresentation::kFloat32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001352 opcode = kCheckedLoadFloat32;
1353 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001354 case MachineRepresentation::kFloat64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001355 opcode = kCheckedLoadFloat64;
1356 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001357 case MachineRepresentation::kBit:
1358 case MachineRepresentation::kTagged:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001359 case MachineRepresentation::kSimd128:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001360 case MachineRepresentation::kNone:
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001361 UNREACHABLE();
1362 return;
1363 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001364 InstructionOperand offset_operand = g.CanBeImmediate(offset, opcode)
1365 ? g.UseImmediate(offset)
1366 : g.UseRegister(offset);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001367
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001368 InstructionOperand length_operand = (!g.CanBeImmediate(offset, opcode))
1369 ? g.CanBeImmediate(length, opcode)
1370 ? g.UseImmediate(length)
1371 : g.UseRegister(length)
1372 : g.UseRegister(length);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001373
1374 Emit(opcode | AddressingModeField::encode(kMode_MRI),
1375 g.DefineAsRegister(node), offset_operand, length_operand,
1376 g.UseRegister(buffer));
1377}
1378
1379
1380void InstructionSelector::VisitCheckedStore(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001381 MachineRepresentation rep = CheckedStoreRepresentationOf(node->op());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001382 Mips64OperandGenerator g(this);
1383 Node* const buffer = node->InputAt(0);
1384 Node* const offset = node->InputAt(1);
1385 Node* const length = node->InputAt(2);
1386 Node* const value = node->InputAt(3);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001387 ArchOpcode opcode = kArchNop;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001388 switch (rep) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001389 case MachineRepresentation::kWord8:
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001390 opcode = kCheckedStoreWord8;
1391 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001392 case MachineRepresentation::kWord16:
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001393 opcode = kCheckedStoreWord16;
1394 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001395 case MachineRepresentation::kWord32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001396 opcode = kCheckedStoreWord32;
1397 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001398 case MachineRepresentation::kWord64:
1399 opcode = kCheckedStoreWord64;
1400 break;
1401 case MachineRepresentation::kFloat32:
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001402 opcode = kCheckedStoreFloat32;
1403 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001404 case MachineRepresentation::kFloat64:
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001405 opcode = kCheckedStoreFloat64;
1406 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001407 case MachineRepresentation::kBit:
1408 case MachineRepresentation::kTagged:
Ben Murdoch097c5b22016-05-18 11:27:45 +01001409 case MachineRepresentation::kSimd128:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001410 case MachineRepresentation::kNone:
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001411 UNREACHABLE();
1412 return;
1413 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001414 InstructionOperand offset_operand = g.CanBeImmediate(offset, opcode)
1415 ? g.UseImmediate(offset)
1416 : g.UseRegister(offset);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001417
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001418 InstructionOperand length_operand = (!g.CanBeImmediate(offset, opcode))
1419 ? g.CanBeImmediate(length, opcode)
1420 ? g.UseImmediate(length)
1421 : g.UseRegister(length)
1422 : g.UseRegister(length);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001423
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001424 Emit(opcode | AddressingModeField::encode(kMode_MRI), g.NoOutput(),
1425 offset_operand, length_operand, g.UseRegister(value),
1426 g.UseRegister(buffer));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001427}
1428
1429
1430namespace {
1431
1432// Shared routine for multiple compare operations.
1433static void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001434 InstructionOperand left, InstructionOperand right,
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001435 FlagsContinuation* cont) {
1436 Mips64OperandGenerator g(selector);
1437 opcode = cont->Encode(opcode);
1438 if (cont->IsBranch()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001439 selector->Emit(opcode, g.NoOutput(), left, right,
1440 g.Label(cont->true_block()), g.Label(cont->false_block()));
Ben Murdochda12d292016-06-02 14:46:10 +01001441 } else if (cont->IsDeoptimize()) {
1442 selector->EmitDeoptimize(opcode, g.NoOutput(), left, right,
1443 cont->frame_state());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001444 } else {
1445 DCHECK(cont->IsSet());
1446 selector->Emit(opcode, g.DefineAsRegister(cont->result()), left, right);
1447 }
1448}
1449
1450
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001451// Shared routine for multiple float32 compare operations.
1452void VisitFloat32Compare(InstructionSelector* selector, Node* node,
1453 FlagsContinuation* cont) {
1454 Mips64OperandGenerator g(selector);
1455 Float32BinopMatcher m(node);
1456 InstructionOperand lhs, rhs;
1457
1458 lhs = m.left().IsZero() ? g.UseImmediate(m.left().node())
1459 : g.UseRegister(m.left().node());
1460 rhs = m.right().IsZero() ? g.UseImmediate(m.right().node())
1461 : g.UseRegister(m.right().node());
1462 VisitCompare(selector, kMips64CmpS, lhs, rhs, cont);
1463}
1464
1465
1466// Shared routine for multiple float64 compare operations.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001467void VisitFloat64Compare(InstructionSelector* selector, Node* node,
1468 FlagsContinuation* cont) {
1469 Mips64OperandGenerator g(selector);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001470 Float64BinopMatcher m(node);
1471 InstructionOperand lhs, rhs;
1472
1473 lhs = m.left().IsZero() ? g.UseImmediate(m.left().node())
1474 : g.UseRegister(m.left().node());
1475 rhs = m.right().IsZero() ? g.UseImmediate(m.right().node())
1476 : g.UseRegister(m.right().node());
1477 VisitCompare(selector, kMips64CmpD, lhs, rhs, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001478}
1479
1480
1481// Shared routine for multiple word compare operations.
1482void VisitWordCompare(InstructionSelector* selector, Node* node,
1483 InstructionCode opcode, FlagsContinuation* cont,
1484 bool commutative) {
1485 Mips64OperandGenerator g(selector);
1486 Node* left = node->InputAt(0);
1487 Node* right = node->InputAt(1);
1488
1489 // Match immediates on left or right side of comparison.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001490 if (g.CanBeImmediate(right, opcode)) {
1491 switch (cont->condition()) {
1492 case kEqual:
1493 case kNotEqual:
1494 if (cont->IsSet()) {
1495 VisitCompare(selector, opcode, g.UseRegister(left),
1496 g.UseImmediate(right), cont);
1497 } else {
1498 VisitCompare(selector, opcode, g.UseRegister(left),
1499 g.UseRegister(right), cont);
1500 }
1501 break;
1502 case kSignedLessThan:
1503 case kSignedGreaterThanOrEqual:
1504 case kUnsignedLessThan:
1505 case kUnsignedGreaterThanOrEqual:
1506 VisitCompare(selector, opcode, g.UseRegister(left),
1507 g.UseImmediate(right), cont);
1508 break;
1509 default:
1510 VisitCompare(selector, opcode, g.UseRegister(left),
1511 g.UseRegister(right), cont);
1512 }
1513 } else if (g.CanBeImmediate(left, opcode)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001514 if (!commutative) cont->Commute();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001515 switch (cont->condition()) {
1516 case kEqual:
1517 case kNotEqual:
1518 if (cont->IsSet()) {
1519 VisitCompare(selector, opcode, g.UseRegister(right),
1520 g.UseImmediate(left), cont);
1521 } else {
1522 VisitCompare(selector, opcode, g.UseRegister(right),
1523 g.UseRegister(left), cont);
1524 }
1525 break;
1526 case kSignedLessThan:
1527 case kSignedGreaterThanOrEqual:
1528 case kUnsignedLessThan:
1529 case kUnsignedGreaterThanOrEqual:
1530 VisitCompare(selector, opcode, g.UseRegister(right),
1531 g.UseImmediate(left), cont);
1532 break;
1533 default:
1534 VisitCompare(selector, opcode, g.UseRegister(right),
1535 g.UseRegister(left), cont);
1536 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001537 } else {
1538 VisitCompare(selector, opcode, g.UseRegister(left), g.UseRegister(right),
1539 cont);
1540 }
1541}
1542
1543
1544void VisitWord32Compare(InstructionSelector* selector, Node* node,
1545 FlagsContinuation* cont) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001546 VisitWordCompare(selector, node, kMips64Cmp, cont, false);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001547}
1548
1549
1550void VisitWord64Compare(InstructionSelector* selector, Node* node,
1551 FlagsContinuation* cont) {
1552 VisitWordCompare(selector, node, kMips64Cmp, cont, false);
1553}
1554
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001555
1556
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001557void EmitWordCompareZero(InstructionSelector* selector, Node* value,
1558 FlagsContinuation* cont) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001559 Mips64OperandGenerator g(selector);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001560 InstructionCode opcode = cont->Encode(kMips64Cmp);
1561 InstructionOperand const value_operand = g.UseRegister(value);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001562 if (cont->IsBranch()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001563 selector->Emit(opcode, g.NoOutput(), value_operand, g.TempImmediate(0),
1564 g.Label(cont->true_block()), g.Label(cont->false_block()));
Ben Murdochda12d292016-06-02 14:46:10 +01001565 } else if (cont->IsDeoptimize()) {
1566 selector->EmitDeoptimize(opcode, g.NoOutput(), value_operand,
1567 g.TempImmediate(0), cont->frame_state());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001568 } else {
1569 selector->Emit(opcode, g.DefineAsRegister(cont->result()), value_operand,
1570 g.TempImmediate(0));
1571 }
1572}
1573
1574
1575// Shared routine for word comparisons against zero.
1576void VisitWordCompareZero(InstructionSelector* selector, Node* user,
1577 Node* value, FlagsContinuation* cont) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001578 while (selector->CanCover(user, value)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001579 switch (value->opcode()) {
1580 case IrOpcode::kWord32Equal: {
1581 // Combine with comparisons against 0 by simply inverting the
1582 // continuation.
1583 Int32BinopMatcher m(value);
1584 if (m.right().Is(0)) {
1585 user = value;
1586 value = m.left().node();
1587 cont->Negate();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001588 continue;
1589 }
1590 cont->OverwriteAndNegateIfEqual(kEqual);
1591 return VisitWord32Compare(selector, value, cont);
1592 }
1593 case IrOpcode::kInt32LessThan:
1594 cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1595 return VisitWord32Compare(selector, value, cont);
1596 case IrOpcode::kInt32LessThanOrEqual:
1597 cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1598 return VisitWord32Compare(selector, value, cont);
1599 case IrOpcode::kUint32LessThan:
1600 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1601 return VisitWord32Compare(selector, value, cont);
1602 case IrOpcode::kUint32LessThanOrEqual:
1603 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1604 return VisitWord32Compare(selector, value, cont);
1605 case IrOpcode::kWord64Equal: {
1606 // Combine with comparisons against 0 by simply inverting the
1607 // continuation.
1608 Int64BinopMatcher m(value);
1609 if (m.right().Is(0)) {
1610 user = value;
1611 value = m.left().node();
1612 cont->Negate();
1613 continue;
1614 }
1615 cont->OverwriteAndNegateIfEqual(kEqual);
1616 return VisitWord64Compare(selector, value, cont);
1617 }
1618 case IrOpcode::kInt64LessThan:
1619 cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1620 return VisitWord64Compare(selector, value, cont);
1621 case IrOpcode::kInt64LessThanOrEqual:
1622 cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1623 return VisitWord64Compare(selector, value, cont);
1624 case IrOpcode::kUint64LessThan:
1625 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1626 return VisitWord64Compare(selector, value, cont);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001627 case IrOpcode::kUint64LessThanOrEqual:
1628 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1629 return VisitWord64Compare(selector, value, cont);
1630 case IrOpcode::kFloat32Equal:
1631 cont->OverwriteAndNegateIfEqual(kEqual);
1632 return VisitFloat32Compare(selector, value, cont);
1633 case IrOpcode::kFloat32LessThan:
1634 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1635 return VisitFloat32Compare(selector, value, cont);
1636 case IrOpcode::kFloat32LessThanOrEqual:
1637 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1638 return VisitFloat32Compare(selector, value, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001639 case IrOpcode::kFloat64Equal:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001640 cont->OverwriteAndNegateIfEqual(kEqual);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001641 return VisitFloat64Compare(selector, value, cont);
1642 case IrOpcode::kFloat64LessThan:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001643 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001644 return VisitFloat64Compare(selector, value, cont);
1645 case IrOpcode::kFloat64LessThanOrEqual:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001646 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001647 return VisitFloat64Compare(selector, value, cont);
1648 case IrOpcode::kProjection:
1649 // Check if this is the overflow output projection of an
1650 // <Operation>WithOverflow node.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001651 if (ProjectionIndexOf(value->op()) == 1u) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001652 // We cannot combine the <Operation>WithOverflow with this branch
1653 // unless the 0th projection (the use of the actual value of the
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001654 // <Operation> is either nullptr, which means there's no use of the
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001655 // actual value, or was already defined, which means it is scheduled
1656 // *AFTER* this branch).
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001657 Node* const node = value->InputAt(0);
1658 Node* const result = NodeProperties::FindProjection(node, 0);
1659 if (result == nullptr || selector->IsDefined(result)) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001660 switch (node->opcode()) {
1661 case IrOpcode::kInt32AddWithOverflow:
1662 cont->OverwriteAndNegateIfEqual(kOverflow);
1663 return VisitBinop(selector, node, kMips64Dadd, cont);
1664 case IrOpcode::kInt32SubWithOverflow:
1665 cont->OverwriteAndNegateIfEqual(kOverflow);
1666 return VisitBinop(selector, node, kMips64Dsub, cont);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001667 case IrOpcode::kInt64AddWithOverflow:
1668 cont->OverwriteAndNegateIfEqual(kOverflow);
1669 return VisitBinop(selector, node, kMips64DaddOvf, cont);
1670 case IrOpcode::kInt64SubWithOverflow:
1671 cont->OverwriteAndNegateIfEqual(kOverflow);
1672 return VisitBinop(selector, node, kMips64DsubOvf, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001673 default:
1674 break;
1675 }
1676 }
1677 }
1678 break;
1679 case IrOpcode::kWord32And:
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001680 case IrOpcode::kWord64And:
1681 return VisitWordCompare(selector, value, kMips64Tst, cont, true);
1682 default:
1683 break;
1684 }
1685 break;
1686 }
1687
1688 // Continuation could not be combined with a compare, emit compare against 0.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001689 EmitWordCompareZero(selector, value, cont);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001690}
1691
Ben Murdochda12d292016-06-02 14:46:10 +01001692} // namespace
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001693
1694void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
1695 BasicBlock* fbranch) {
1696 FlagsContinuation cont(kNotEqual, tbranch, fbranch);
1697 VisitWordCompareZero(this, branch, branch->InputAt(0), &cont);
1698}
1699
Ben Murdochda12d292016-06-02 14:46:10 +01001700void InstructionSelector::VisitDeoptimizeIf(Node* node) {
1701 FlagsContinuation cont =
1702 FlagsContinuation::ForDeoptimize(kNotEqual, node->InputAt(1));
1703 VisitWordCompareZero(this, node, node->InputAt(0), &cont);
1704}
1705
1706void InstructionSelector::VisitDeoptimizeUnless(Node* node) {
1707 FlagsContinuation cont =
1708 FlagsContinuation::ForDeoptimize(kEqual, node->InputAt(1));
1709 VisitWordCompareZero(this, node, node->InputAt(0), &cont);
1710}
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001711
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001712void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
1713 Mips64OperandGenerator g(this);
1714 InstructionOperand value_operand = g.UseRegister(node->InputAt(0));
1715
1716 // Emit either ArchTableSwitch or ArchLookupSwitch.
1717 size_t table_space_cost = 10 + 2 * sw.value_range;
1718 size_t table_time_cost = 3;
1719 size_t lookup_space_cost = 2 + 2 * sw.case_count;
1720 size_t lookup_time_cost = sw.case_count;
1721 if (sw.case_count > 0 &&
1722 table_space_cost + 3 * table_time_cost <=
1723 lookup_space_cost + 3 * lookup_time_cost &&
1724 sw.min_value > std::numeric_limits<int32_t>::min()) {
1725 InstructionOperand index_operand = value_operand;
1726 if (sw.min_value) {
1727 index_operand = g.TempRegister();
1728 Emit(kMips64Sub, index_operand, value_operand,
1729 g.TempImmediate(sw.min_value));
1730 }
1731 // Generate a table lookup.
1732 return EmitTableSwitch(sw, index_operand);
1733 }
1734
1735 // Generate a sequence of conditional jumps.
1736 return EmitLookupSwitch(sw, value_operand);
1737}
1738
1739
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001740void InstructionSelector::VisitWord32Equal(Node* const node) {
Ben Murdochda12d292016-06-02 14:46:10 +01001741 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001742 Int32BinopMatcher m(node);
1743 if (m.right().Is(0)) {
1744 return VisitWordCompareZero(this, m.node(), m.left().node(), &cont);
1745 }
1746
1747 VisitWord32Compare(this, node, &cont);
1748}
1749
1750
1751void InstructionSelector::VisitInt32LessThan(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01001752 FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001753 VisitWord32Compare(this, node, &cont);
1754}
1755
1756
1757void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01001758 FlagsContinuation cont =
1759 FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001760 VisitWord32Compare(this, node, &cont);
1761}
1762
1763
1764void InstructionSelector::VisitUint32LessThan(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01001765 FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001766 VisitWord32Compare(this, node, &cont);
1767}
1768
1769
1770void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01001771 FlagsContinuation cont =
1772 FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001773 VisitWord32Compare(this, node, &cont);
1774}
1775
1776
1777void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001778 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
Ben Murdochda12d292016-06-02 14:46:10 +01001779 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001780 return VisitBinop(this, node, kMips64Dadd, &cont);
1781 }
1782 FlagsContinuation cont;
1783 VisitBinop(this, node, kMips64Dadd, &cont);
1784}
1785
1786
1787void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001788 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
Ben Murdochda12d292016-06-02 14:46:10 +01001789 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001790 return VisitBinop(this, node, kMips64Dsub, &cont);
1791 }
1792 FlagsContinuation cont;
1793 VisitBinop(this, node, kMips64Dsub, &cont);
1794}
1795
1796
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001797void InstructionSelector::VisitInt64AddWithOverflow(Node* node) {
1798 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
Ben Murdochda12d292016-06-02 14:46:10 +01001799 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001800 return VisitBinop(this, node, kMips64DaddOvf, &cont);
1801 }
1802 FlagsContinuation cont;
1803 VisitBinop(this, node, kMips64DaddOvf, &cont);
1804}
1805
1806
1807void InstructionSelector::VisitInt64SubWithOverflow(Node* node) {
1808 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
Ben Murdochda12d292016-06-02 14:46:10 +01001809 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001810 return VisitBinop(this, node, kMips64DsubOvf, &cont);
1811 }
1812 FlagsContinuation cont;
1813 VisitBinop(this, node, kMips64DsubOvf, &cont);
1814}
1815
1816
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001817void InstructionSelector::VisitWord64Equal(Node* const node) {
Ben Murdochda12d292016-06-02 14:46:10 +01001818 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001819 Int64BinopMatcher m(node);
1820 if (m.right().Is(0)) {
1821 return VisitWordCompareZero(this, m.node(), m.left().node(), &cont);
1822 }
1823
1824 VisitWord64Compare(this, node, &cont);
1825}
1826
1827
1828void InstructionSelector::VisitInt64LessThan(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01001829 FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001830 VisitWord64Compare(this, node, &cont);
1831}
1832
1833
1834void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01001835 FlagsContinuation cont =
1836 FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001837 VisitWord64Compare(this, node, &cont);
1838}
1839
1840
1841void InstructionSelector::VisitUint64LessThan(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01001842 FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001843 VisitWord64Compare(this, node, &cont);
1844}
1845
1846
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001847void InstructionSelector::VisitUint64LessThanOrEqual(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01001848 FlagsContinuation cont =
1849 FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001850 VisitWord64Compare(this, node, &cont);
1851}
1852
1853
1854void InstructionSelector::VisitFloat32Equal(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01001855 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001856 VisitFloat32Compare(this, node, &cont);
1857}
1858
1859
1860void InstructionSelector::VisitFloat32LessThan(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01001861 FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001862 VisitFloat32Compare(this, node, &cont);
1863}
1864
1865
1866void InstructionSelector::VisitFloat32LessThanOrEqual(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01001867 FlagsContinuation cont =
1868 FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001869 VisitFloat32Compare(this, node, &cont);
1870}
1871
1872
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001873void InstructionSelector::VisitFloat64Equal(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01001874 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001875 VisitFloat64Compare(this, node, &cont);
1876}
1877
1878
1879void InstructionSelector::VisitFloat64LessThan(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01001880 FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001881 VisitFloat64Compare(this, node, &cont);
1882}
1883
1884
1885void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
Ben Murdochda12d292016-06-02 14:46:10 +01001886 FlagsContinuation cont =
1887 FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001888 VisitFloat64Compare(this, node, &cont);
1889}
1890
1891
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001892void InstructionSelector::VisitFloat64ExtractLowWord32(Node* node) {
1893 VisitRR(this, kMips64Float64ExtractLowWord32, node);
1894}
1895
1896
1897void InstructionSelector::VisitFloat64ExtractHighWord32(Node* node) {
1898 VisitRR(this, kMips64Float64ExtractHighWord32, node);
1899}
1900
1901
1902void InstructionSelector::VisitFloat64InsertLowWord32(Node* node) {
1903 Mips64OperandGenerator g(this);
1904 Node* left = node->InputAt(0);
1905 Node* right = node->InputAt(1);
1906 Emit(kMips64Float64InsertLowWord32, g.DefineSameAsFirst(node),
1907 g.UseRegister(left), g.UseRegister(right));
1908}
1909
1910
1911void InstructionSelector::VisitFloat64InsertHighWord32(Node* node) {
1912 Mips64OperandGenerator g(this);
1913 Node* left = node->InputAt(0);
1914 Node* right = node->InputAt(1);
1915 Emit(kMips64Float64InsertHighWord32, g.DefineSameAsFirst(node),
1916 g.UseRegister(left), g.UseRegister(right));
1917}
1918
1919
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001920// static
1921MachineOperatorBuilder::Flags
1922InstructionSelector::SupportedMachineOperatorFlags() {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001923 return MachineOperatorBuilder::kWord32Ctz |
1924 MachineOperatorBuilder::kWord64Ctz |
1925 MachineOperatorBuilder::kWord32Popcnt |
1926 MachineOperatorBuilder::kWord64Popcnt |
1927 MachineOperatorBuilder::kWord32ShiftIsSafe |
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001928 MachineOperatorBuilder::kInt32DivIsSafe |
1929 MachineOperatorBuilder::kUint32DivIsSafe |
1930 MachineOperatorBuilder::kFloat64Min |
1931 MachineOperatorBuilder::kFloat64Max |
1932 MachineOperatorBuilder::kFloat32Min |
1933 MachineOperatorBuilder::kFloat32Max |
1934 MachineOperatorBuilder::kFloat64RoundDown |
1935 MachineOperatorBuilder::kFloat32RoundDown |
1936 MachineOperatorBuilder::kFloat64RoundUp |
1937 MachineOperatorBuilder::kFloat32RoundUp |
1938 MachineOperatorBuilder::kFloat64RoundTruncate |
1939 MachineOperatorBuilder::kFloat32RoundTruncate |
1940 MachineOperatorBuilder::kFloat64RoundTiesEven |
1941 MachineOperatorBuilder::kFloat32RoundTiesEven;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001942}
1943
1944} // namespace compiler
1945} // namespace internal
1946} // namespace v8