blob: 8a4af5e65c5caeff7e12e4390401cb7192cc412a [file] [log] [blame]
Ben Murdochda12d292016-06-02 14:46:10 +01001// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/base/adapters.h"
6#include "src/compiler/instruction-selector-impl.h"
7#include "src/compiler/node-matchers.h"
8#include "src/compiler/node-properties.h"
9#include "src/s390/frames-s390.h"
10
11namespace v8 {
12namespace internal {
13namespace compiler {
14
15enum ImmediateMode {
16 kInt16Imm,
17 kInt16Imm_Unsigned,
18 kInt16Imm_Negate,
19 kInt16Imm_4ByteAligned,
20 kShift32Imm,
21 kShift64Imm,
22 kNoImmediate
23};
24
25// Adds S390-specific methods for generating operands.
26class S390OperandGenerator final : public OperandGenerator {
27 public:
28 explicit S390OperandGenerator(InstructionSelector* selector)
29 : OperandGenerator(selector) {}
30
31 InstructionOperand UseOperand(Node* node, ImmediateMode mode) {
32 if (CanBeImmediate(node, mode)) {
33 return UseImmediate(node);
34 }
35 return UseRegister(node);
36 }
37
38 bool CanBeImmediate(Node* node, ImmediateMode mode) {
39 int64_t value;
40 if (node->opcode() == IrOpcode::kInt32Constant)
41 value = OpParameter<int32_t>(node);
42 else if (node->opcode() == IrOpcode::kInt64Constant)
43 value = OpParameter<int64_t>(node);
44 else
45 return false;
46 return CanBeImmediate(value, mode);
47 }
48
49 bool CanBeImmediate(int64_t value, ImmediateMode mode) {
50 switch (mode) {
51 case kInt16Imm:
52 return is_int16(value);
53 case kInt16Imm_Unsigned:
54 return is_uint16(value);
55 case kInt16Imm_Negate:
56 return is_int16(-value);
57 case kInt16Imm_4ByteAligned:
58 return is_int16(value) && !(value & 3);
59 case kShift32Imm:
60 return 0 <= value && value < 32;
61 case kShift64Imm:
62 return 0 <= value && value < 64;
63 case kNoImmediate:
64 return false;
65 }
66 return false;
67 }
68};
69
70namespace {
71
72void VisitRR(InstructionSelector* selector, ArchOpcode opcode, Node* node) {
73 S390OperandGenerator g(selector);
74 selector->Emit(opcode, g.DefineAsRegister(node),
75 g.UseRegister(node->InputAt(0)));
76}
77
78void VisitRRR(InstructionSelector* selector, ArchOpcode opcode, Node* node) {
79 S390OperandGenerator g(selector);
80 selector->Emit(opcode, g.DefineAsRegister(node),
81 g.UseRegister(node->InputAt(0)),
82 g.UseRegister(node->InputAt(1)));
83}
84
85void VisitRRO(InstructionSelector* selector, ArchOpcode opcode, Node* node,
86 ImmediateMode operand_mode) {
87 S390OperandGenerator g(selector);
88 selector->Emit(opcode, g.DefineAsRegister(node),
89 g.UseRegister(node->InputAt(0)),
90 g.UseOperand(node->InputAt(1), operand_mode));
91}
92
93#if V8_TARGET_ARCH_S390X
94void VisitTryTruncateDouble(InstructionSelector* selector, ArchOpcode opcode,
95 Node* node) {
96 S390OperandGenerator g(selector);
97 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
98 InstructionOperand outputs[2];
99 size_t output_count = 0;
100 outputs[output_count++] = g.DefineAsRegister(node);
101
102 Node* success_output = NodeProperties::FindProjection(node, 1);
103 if (success_output) {
104 outputs[output_count++] = g.DefineAsRegister(success_output);
105 }
106
107 selector->Emit(opcode, output_count, outputs, 1, inputs);
108}
109#endif
110
111// Shared routine for multiple binary operations.
112template <typename Matcher>
113void VisitBinop(InstructionSelector* selector, Node* node,
114 InstructionCode opcode, ImmediateMode operand_mode,
115 FlagsContinuation* cont) {
116 S390OperandGenerator g(selector);
117 Matcher m(node);
118 InstructionOperand inputs[4];
119 size_t input_count = 0;
120 InstructionOperand outputs[2];
121 size_t output_count = 0;
122
123 inputs[input_count++] = g.UseRegister(m.left().node());
124 inputs[input_count++] = g.UseOperand(m.right().node(), operand_mode);
125
126 if (cont->IsBranch()) {
127 inputs[input_count++] = g.Label(cont->true_block());
128 inputs[input_count++] = g.Label(cont->false_block());
129 }
130
131 outputs[output_count++] = g.DefineAsRegister(node);
132 if (cont->IsSet()) {
133 outputs[output_count++] = g.DefineAsRegister(cont->result());
134 }
135
136 DCHECK_NE(0u, input_count);
137 DCHECK_NE(0u, output_count);
138 DCHECK_GE(arraysize(inputs), input_count);
139 DCHECK_GE(arraysize(outputs), output_count);
140
141 opcode = cont->Encode(opcode);
142 if (cont->IsDeoptimize()) {
143 selector->EmitDeoptimize(opcode, output_count, outputs, input_count, inputs,
144 cont->frame_state());
145 } else {
146 selector->Emit(opcode, output_count, outputs, input_count, inputs);
147 }
148}
149
150// Shared routine for multiple binary operations.
151template <typename Matcher>
152void VisitBinop(InstructionSelector* selector, Node* node, ArchOpcode opcode,
153 ImmediateMode operand_mode) {
154 FlagsContinuation cont;
155 VisitBinop<Matcher>(selector, node, opcode, operand_mode, &cont);
156}
157
158} // namespace
159
160void InstructionSelector::VisitLoad(Node* node) {
161 LoadRepresentation load_rep = LoadRepresentationOf(node->op());
162 S390OperandGenerator g(this);
163 Node* base = node->InputAt(0);
164 Node* offset = node->InputAt(1);
165 ArchOpcode opcode = kArchNop;
166 ImmediateMode mode = kInt16Imm;
167 switch (load_rep.representation()) {
168 case MachineRepresentation::kFloat32:
169 opcode = kS390_LoadFloat32;
170 break;
171 case MachineRepresentation::kFloat64:
172 opcode = kS390_LoadDouble;
173 break;
174 case MachineRepresentation::kBit: // Fall through.
175 case MachineRepresentation::kWord8:
176 opcode = load_rep.IsSigned() ? kS390_LoadWordS8 : kS390_LoadWordU8;
177 break;
178 case MachineRepresentation::kWord16:
179 opcode = load_rep.IsSigned() ? kS390_LoadWordS16 : kS390_LoadWordU16;
180 break;
181#if !V8_TARGET_ARCH_S390X
182 case MachineRepresentation::kTagged: // Fall through.
183#endif
184 case MachineRepresentation::kWord32:
185 opcode = kS390_LoadWordS32;
186#if V8_TARGET_ARCH_S390X
187 // TODO(john.yan): Remove this mode since s390 do not has this restriction
188 mode = kInt16Imm_4ByteAligned;
189#endif
190 break;
191#if V8_TARGET_ARCH_S390X
192 case MachineRepresentation::kTagged: // Fall through.
193 case MachineRepresentation::kWord64:
194 opcode = kS390_LoadWord64;
195 mode = kInt16Imm_4ByteAligned;
196 break;
197#else
198 case MachineRepresentation::kWord64: // Fall through.
199#endif
200 case MachineRepresentation::kSimd128: // Fall through.
201 case MachineRepresentation::kNone:
202 UNREACHABLE();
203 return;
204 }
205 if (g.CanBeImmediate(offset, mode)) {
206 Emit(opcode | AddressingModeField::encode(kMode_MRI),
207 g.DefineAsRegister(node), g.UseRegister(base), g.UseImmediate(offset));
208 } else if (g.CanBeImmediate(base, mode)) {
209 Emit(opcode | AddressingModeField::encode(kMode_MRI),
210 g.DefineAsRegister(node), g.UseRegister(offset), g.UseImmediate(base));
211 } else {
212 Emit(opcode | AddressingModeField::encode(kMode_MRR),
213 g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(offset));
214 }
215}
216
217void InstructionSelector::VisitStore(Node* node) {
218 S390OperandGenerator g(this);
219 Node* base = node->InputAt(0);
220 Node* offset = node->InputAt(1);
221 Node* value = node->InputAt(2);
222
223 StoreRepresentation store_rep = StoreRepresentationOf(node->op());
224 WriteBarrierKind write_barrier_kind = store_rep.write_barrier_kind();
225 MachineRepresentation rep = store_rep.representation();
226
227 if (write_barrier_kind != kNoWriteBarrier) {
228 DCHECK_EQ(MachineRepresentation::kTagged, rep);
229 AddressingMode addressing_mode;
230 InstructionOperand inputs[3];
231 size_t input_count = 0;
232 inputs[input_count++] = g.UseUniqueRegister(base);
233 // OutOfLineRecordWrite uses the offset in an 'AddP' instruction as well as
234 // for the store itself, so we must check compatibility with both.
235 if (g.CanBeImmediate(offset, kInt16Imm)
236#if V8_TARGET_ARCH_S390X
237 && g.CanBeImmediate(offset, kInt16Imm_4ByteAligned)
238#endif
239 ) {
240 inputs[input_count++] = g.UseImmediate(offset);
241 addressing_mode = kMode_MRI;
242 } else {
243 inputs[input_count++] = g.UseUniqueRegister(offset);
244 addressing_mode = kMode_MRR;
245 }
246 inputs[input_count++] = g.UseUniqueRegister(value);
247 RecordWriteMode record_write_mode = RecordWriteMode::kValueIsAny;
248 switch (write_barrier_kind) {
249 case kNoWriteBarrier:
250 UNREACHABLE();
251 break;
252 case kMapWriteBarrier:
253 record_write_mode = RecordWriteMode::kValueIsMap;
254 break;
255 case kPointerWriteBarrier:
256 record_write_mode = RecordWriteMode::kValueIsPointer;
257 break;
258 case kFullWriteBarrier:
259 record_write_mode = RecordWriteMode::kValueIsAny;
260 break;
261 }
262 InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()};
263 size_t const temp_count = arraysize(temps);
264 InstructionCode code = kArchStoreWithWriteBarrier;
265 code |= AddressingModeField::encode(addressing_mode);
266 code |= MiscField::encode(static_cast<int>(record_write_mode));
267 Emit(code, 0, nullptr, input_count, inputs, temp_count, temps);
268 } else {
269 ArchOpcode opcode = kArchNop;
270 ImmediateMode mode = kInt16Imm;
271 switch (rep) {
272 case MachineRepresentation::kFloat32:
273 opcode = kS390_StoreFloat32;
274 break;
275 case MachineRepresentation::kFloat64:
276 opcode = kS390_StoreDouble;
277 break;
278 case MachineRepresentation::kBit: // Fall through.
279 case MachineRepresentation::kWord8:
280 opcode = kS390_StoreWord8;
281 break;
282 case MachineRepresentation::kWord16:
283 opcode = kS390_StoreWord16;
284 break;
285#if !V8_TARGET_ARCH_S390X
286 case MachineRepresentation::kTagged: // Fall through.
287#endif
288 case MachineRepresentation::kWord32:
289 opcode = kS390_StoreWord32;
290 break;
291#if V8_TARGET_ARCH_S390X
292 case MachineRepresentation::kTagged: // Fall through.
293 case MachineRepresentation::kWord64:
294 opcode = kS390_StoreWord64;
295 mode = kInt16Imm_4ByteAligned;
296 break;
297#else
298 case MachineRepresentation::kWord64: // Fall through.
299#endif
300 case MachineRepresentation::kSimd128: // Fall through.
301 case MachineRepresentation::kNone:
302 UNREACHABLE();
303 return;
304 }
305 if (g.CanBeImmediate(offset, mode)) {
306 Emit(opcode | AddressingModeField::encode(kMode_MRI), g.NoOutput(),
307 g.UseRegister(base), g.UseImmediate(offset), g.UseRegister(value));
308 } else if (g.CanBeImmediate(base, mode)) {
309 Emit(opcode | AddressingModeField::encode(kMode_MRI), g.NoOutput(),
310 g.UseRegister(offset), g.UseImmediate(base), g.UseRegister(value));
311 } else {
312 Emit(opcode | AddressingModeField::encode(kMode_MRR), g.NoOutput(),
313 g.UseRegister(base), g.UseRegister(offset), g.UseRegister(value));
314 }
315 }
316}
317
318void InstructionSelector::VisitCheckedLoad(Node* node) {
319 CheckedLoadRepresentation load_rep = CheckedLoadRepresentationOf(node->op());
320 S390OperandGenerator g(this);
321 Node* const base = node->InputAt(0);
322 Node* const offset = node->InputAt(1);
323 Node* const length = node->InputAt(2);
324 ArchOpcode opcode = kArchNop;
325 switch (load_rep.representation()) {
326 case MachineRepresentation::kWord8:
327 opcode = load_rep.IsSigned() ? kCheckedLoadInt8 : kCheckedLoadUint8;
328 break;
329 case MachineRepresentation::kWord16:
330 opcode = load_rep.IsSigned() ? kCheckedLoadInt16 : kCheckedLoadUint16;
331 break;
332 case MachineRepresentation::kWord32:
333 opcode = kCheckedLoadWord32;
334 break;
335#if V8_TARGET_ARCH_S390X
336 case MachineRepresentation::kWord64:
337 opcode = kCheckedLoadWord64;
338 break;
339#endif
340 case MachineRepresentation::kFloat32:
341 opcode = kCheckedLoadFloat32;
342 break;
343 case MachineRepresentation::kFloat64:
344 opcode = kCheckedLoadFloat64;
345 break;
346 case MachineRepresentation::kBit: // Fall through.
347 case MachineRepresentation::kTagged: // Fall through.
348#if !V8_TARGET_ARCH_S390X
349 case MachineRepresentation::kWord64: // Fall through.
350#endif
351 case MachineRepresentation::kSimd128: // Fall through.
352 case MachineRepresentation::kNone:
353 UNREACHABLE();
354 return;
355 }
356 AddressingMode addressingMode = kMode_MRR;
357 Emit(opcode | AddressingModeField::encode(addressingMode),
358 g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(offset),
359 g.UseOperand(length, kInt16Imm_Unsigned));
360}
361
362void InstructionSelector::VisitCheckedStore(Node* node) {
363 MachineRepresentation rep = CheckedStoreRepresentationOf(node->op());
364 S390OperandGenerator g(this);
365 Node* const base = node->InputAt(0);
366 Node* const offset = node->InputAt(1);
367 Node* const length = node->InputAt(2);
368 Node* const value = node->InputAt(3);
369 ArchOpcode opcode = kArchNop;
370 switch (rep) {
371 case MachineRepresentation::kWord8:
372 opcode = kCheckedStoreWord8;
373 break;
374 case MachineRepresentation::kWord16:
375 opcode = kCheckedStoreWord16;
376 break;
377 case MachineRepresentation::kWord32:
378 opcode = kCheckedStoreWord32;
379 break;
380#if V8_TARGET_ARCH_S390X
381 case MachineRepresentation::kWord64:
382 opcode = kCheckedStoreWord64;
383 break;
384#endif
385 case MachineRepresentation::kFloat32:
386 opcode = kCheckedStoreFloat32;
387 break;
388 case MachineRepresentation::kFloat64:
389 opcode = kCheckedStoreFloat64;
390 break;
391 case MachineRepresentation::kBit: // Fall through.
392 case MachineRepresentation::kTagged: // Fall through.
393#if !V8_TARGET_ARCH_S390X
394 case MachineRepresentation::kWord64: // Fall through.
395#endif
396 case MachineRepresentation::kSimd128: // Fall through.
397 case MachineRepresentation::kNone:
398 UNREACHABLE();
399 return;
400 }
401 AddressingMode addressingMode = kMode_MRR;
402 Emit(opcode | AddressingModeField::encode(addressingMode), g.NoOutput(),
403 g.UseRegister(base), g.UseRegister(offset),
404 g.UseOperand(length, kInt16Imm_Unsigned), g.UseRegister(value));
405}
406
407template <typename Matcher>
408static void VisitLogical(InstructionSelector* selector, Node* node, Matcher* m,
409 ArchOpcode opcode, bool left_can_cover,
410 bool right_can_cover, ImmediateMode imm_mode) {
411 S390OperandGenerator g(selector);
412
413 // Map instruction to equivalent operation with inverted right input.
414 ArchOpcode inv_opcode = opcode;
415 switch (opcode) {
416 case kS390_And:
417 inv_opcode = kS390_AndComplement;
418 break;
419 case kS390_Or:
420 inv_opcode = kS390_OrComplement;
421 break;
422 default:
423 UNREACHABLE();
424 }
425
426 // Select Logical(y, ~x) for Logical(Xor(x, -1), y).
427 if ((m->left().IsWord32Xor() || m->left().IsWord64Xor()) && left_can_cover) {
428 Matcher mleft(m->left().node());
429 if (mleft.right().Is(-1)) {
430 selector->Emit(inv_opcode, g.DefineAsRegister(node),
431 g.UseRegister(m->right().node()),
432 g.UseRegister(mleft.left().node()));
433 return;
434 }
435 }
436
437 // Select Logical(x, ~y) for Logical(x, Xor(y, -1)).
438 if ((m->right().IsWord32Xor() || m->right().IsWord64Xor()) &&
439 right_can_cover) {
440 Matcher mright(m->right().node());
441 if (mright.right().Is(-1)) {
442 // TODO(all): support shifted operand on right.
443 selector->Emit(inv_opcode, g.DefineAsRegister(node),
444 g.UseRegister(m->left().node()),
445 g.UseRegister(mright.left().node()));
446 return;
447 }
448 }
449
450 VisitBinop<Matcher>(selector, node, opcode, imm_mode);
451}
452
453static inline bool IsContiguousMask32(uint32_t value, int* mb, int* me) {
454 int mask_width = base::bits::CountPopulation32(value);
455 int mask_msb = base::bits::CountLeadingZeros32(value);
456 int mask_lsb = base::bits::CountTrailingZeros32(value);
457 if ((mask_width == 0) || (mask_msb + mask_width + mask_lsb != 32))
458 return false;
459 *mb = mask_lsb + mask_width - 1;
460 *me = mask_lsb;
461 return true;
462}
463
464#if V8_TARGET_ARCH_S390X
465static inline bool IsContiguousMask64(uint64_t value, int* mb, int* me) {
466 int mask_width = base::bits::CountPopulation64(value);
467 int mask_msb = base::bits::CountLeadingZeros64(value);
468 int mask_lsb = base::bits::CountTrailingZeros64(value);
469 if ((mask_width == 0) || (mask_msb + mask_width + mask_lsb != 64))
470 return false;
471 *mb = mask_lsb + mask_width - 1;
472 *me = mask_lsb;
473 return true;
474}
475#endif
476
477void InstructionSelector::VisitWord32And(Node* node) {
478 S390OperandGenerator g(this);
479 Int32BinopMatcher m(node);
480 int mb = 0;
481 int me = 0;
482 if (m.right().HasValue() && IsContiguousMask32(m.right().Value(), &mb, &me)) {
483 int sh = 0;
484 Node* left = m.left().node();
485 if ((m.left().IsWord32Shr() || m.left().IsWord32Shl()) &&
486 CanCover(node, left)) {
487 Int32BinopMatcher mleft(m.left().node());
488 if (mleft.right().IsInRange(0, 31)) {
489 left = mleft.left().node();
490 sh = mleft.right().Value();
491 if (m.left().IsWord32Shr()) {
492 // Adjust the mask such that it doesn't include any rotated bits.
493 if (mb > 31 - sh) mb = 31 - sh;
494 sh = (32 - sh) & 0x1f;
495 } else {
496 // Adjust the mask such that it doesn't include any rotated bits.
497 if (me < sh) me = sh;
498 }
499 }
500 }
501 if (mb >= me) {
502 Emit(kS390_RotLeftAndMask32, g.DefineAsRegister(node),
503 g.UseRegister(left), g.TempImmediate(sh), g.TempImmediate(mb),
504 g.TempImmediate(me));
505 return;
506 }
507 }
508 VisitLogical<Int32BinopMatcher>(
509 this, node, &m, kS390_And, CanCover(node, m.left().node()),
510 CanCover(node, m.right().node()), kInt16Imm_Unsigned);
511}
512
513#if V8_TARGET_ARCH_S390X
514void InstructionSelector::VisitWord64And(Node* node) {
515 S390OperandGenerator g(this);
516 Int64BinopMatcher m(node);
517 int mb = 0;
518 int me = 0;
519 if (m.right().HasValue() && IsContiguousMask64(m.right().Value(), &mb, &me)) {
520 int sh = 0;
521 Node* left = m.left().node();
522 if ((m.left().IsWord64Shr() || m.left().IsWord64Shl()) &&
523 CanCover(node, left)) {
524 Int64BinopMatcher mleft(m.left().node());
525 if (mleft.right().IsInRange(0, 63)) {
526 left = mleft.left().node();
527 sh = mleft.right().Value();
528 if (m.left().IsWord64Shr()) {
529 // Adjust the mask such that it doesn't include any rotated bits.
530 if (mb > 63 - sh) mb = 63 - sh;
531 sh = (64 - sh) & 0x3f;
532 } else {
533 // Adjust the mask such that it doesn't include any rotated bits.
534 if (me < sh) me = sh;
535 }
536 }
537 }
538 if (mb >= me) {
539 bool match = false;
540 ArchOpcode opcode;
541 int mask;
542 if (me == 0) {
543 match = true;
544 opcode = kS390_RotLeftAndClearLeft64;
545 mask = mb;
546 } else if (mb == 63) {
547 match = true;
548 opcode = kS390_RotLeftAndClearRight64;
549 mask = me;
550 } else if (sh && me <= sh && m.left().IsWord64Shl()) {
551 match = true;
552 opcode = kS390_RotLeftAndClear64;
553 mask = mb;
554 }
555 if (match) {
556 Emit(opcode, g.DefineAsRegister(node), g.UseRegister(left),
557 g.TempImmediate(sh), g.TempImmediate(mask));
558 return;
559 }
560 }
561 }
562 VisitLogical<Int64BinopMatcher>(
563 this, node, &m, kS390_And, CanCover(node, m.left().node()),
564 CanCover(node, m.right().node()), kInt16Imm_Unsigned);
565}
566#endif
567
568void InstructionSelector::VisitWord32Or(Node* node) {
569 Int32BinopMatcher m(node);
570 VisitLogical<Int32BinopMatcher>(
571 this, node, &m, kS390_Or, CanCover(node, m.left().node()),
572 CanCover(node, m.right().node()), kInt16Imm_Unsigned);
573}
574
575#if V8_TARGET_ARCH_S390X
576void InstructionSelector::VisitWord64Or(Node* node) {
577 Int64BinopMatcher m(node);
578 VisitLogical<Int64BinopMatcher>(
579 this, node, &m, kS390_Or, CanCover(node, m.left().node()),
580 CanCover(node, m.right().node()), kInt16Imm_Unsigned);
581}
582#endif
583
584void InstructionSelector::VisitWord32Xor(Node* node) {
585 S390OperandGenerator g(this);
586 Int32BinopMatcher m(node);
587 if (m.right().Is(-1)) {
588 Emit(kS390_Not, g.DefineAsRegister(node), g.UseRegister(m.left().node()));
589 } else {
590 VisitBinop<Int32BinopMatcher>(this, node, kS390_Xor, kInt16Imm_Unsigned);
591 }
592}
593
594#if V8_TARGET_ARCH_S390X
595void InstructionSelector::VisitWord64Xor(Node* node) {
596 S390OperandGenerator g(this);
597 Int64BinopMatcher m(node);
598 if (m.right().Is(-1)) {
599 Emit(kS390_Not, g.DefineAsRegister(node), g.UseRegister(m.left().node()));
600 } else {
601 VisitBinop<Int64BinopMatcher>(this, node, kS390_Xor, kInt16Imm_Unsigned);
602 }
603}
604#endif
605
606void InstructionSelector::VisitWord32Shl(Node* node) {
607 S390OperandGenerator g(this);
608 Int32BinopMatcher m(node);
609 if (m.left().IsWord32And() && m.right().IsInRange(0, 31)) {
610 Int32BinopMatcher mleft(m.left().node());
611 int sh = m.right().Value();
612 int mb;
613 int me;
614 if (mleft.right().HasValue() &&
615 IsContiguousMask32(mleft.right().Value() << sh, &mb, &me)) {
616 // Adjust the mask such that it doesn't include any rotated bits.
617 if (me < sh) me = sh;
618 if (mb >= me) {
619 Emit(kS390_RotLeftAndMask32, g.DefineAsRegister(node),
620 g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
621 g.TempImmediate(mb), g.TempImmediate(me));
622 return;
623 }
624 }
625 }
626 VisitRRO(this, kS390_ShiftLeft32, node, kShift32Imm);
627}
628
629#if V8_TARGET_ARCH_S390X
630void InstructionSelector::VisitWord64Shl(Node* node) {
631 S390OperandGenerator g(this);
632 Int64BinopMatcher m(node);
633 // TODO(mbrandy): eliminate left sign extension if right >= 32
634 if (m.left().IsWord64And() && m.right().IsInRange(0, 63)) {
635 Int64BinopMatcher mleft(m.left().node());
636 int sh = m.right().Value();
637 int mb;
638 int me;
639 if (mleft.right().HasValue() &&
640 IsContiguousMask64(mleft.right().Value() << sh, &mb, &me)) {
641 // Adjust the mask such that it doesn't include any rotated bits.
642 if (me < sh) me = sh;
643 if (mb >= me) {
644 bool match = false;
645 ArchOpcode opcode;
646 int mask;
647 if (me == 0) {
648 match = true;
649 opcode = kS390_RotLeftAndClearLeft64;
650 mask = mb;
651 } else if (mb == 63) {
652 match = true;
653 opcode = kS390_RotLeftAndClearRight64;
654 mask = me;
655 } else if (sh && me <= sh) {
656 match = true;
657 opcode = kS390_RotLeftAndClear64;
658 mask = mb;
659 }
660 if (match) {
661 Emit(opcode, g.DefineAsRegister(node),
662 g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
663 g.TempImmediate(mask));
664 return;
665 }
666 }
667 }
668 }
669 VisitRRO(this, kS390_ShiftLeft64, node, kShift64Imm);
670}
671#endif
672
673void InstructionSelector::VisitWord32Shr(Node* node) {
674 S390OperandGenerator g(this);
675 Int32BinopMatcher m(node);
676 if (m.left().IsWord32And() && m.right().IsInRange(0, 31)) {
677 Int32BinopMatcher mleft(m.left().node());
678 int sh = m.right().Value();
679 int mb;
680 int me;
681 if (mleft.right().HasValue() &&
682 IsContiguousMask32((uint32_t)(mleft.right().Value()) >> sh, &mb, &me)) {
683 // Adjust the mask such that it doesn't include any rotated bits.
684 if (mb > 31 - sh) mb = 31 - sh;
685 sh = (32 - sh) & 0x1f;
686 if (mb >= me) {
687 Emit(kS390_RotLeftAndMask32, g.DefineAsRegister(node),
688 g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
689 g.TempImmediate(mb), g.TempImmediate(me));
690 return;
691 }
692 }
693 }
694 VisitRRO(this, kS390_ShiftRight32, node, kShift32Imm);
695}
696
697#if V8_TARGET_ARCH_S390X
698void InstructionSelector::VisitWord64Shr(Node* node) {
699 S390OperandGenerator g(this);
700 Int64BinopMatcher m(node);
701 if (m.left().IsWord64And() && m.right().IsInRange(0, 63)) {
702 Int64BinopMatcher mleft(m.left().node());
703 int sh = m.right().Value();
704 int mb;
705 int me;
706 if (mleft.right().HasValue() &&
707 IsContiguousMask64((uint64_t)(mleft.right().Value()) >> sh, &mb, &me)) {
708 // Adjust the mask such that it doesn't include any rotated bits.
709 if (mb > 63 - sh) mb = 63 - sh;
710 sh = (64 - sh) & 0x3f;
711 if (mb >= me) {
712 bool match = false;
713 ArchOpcode opcode;
714 int mask;
715 if (me == 0) {
716 match = true;
717 opcode = kS390_RotLeftAndClearLeft64;
718 mask = mb;
719 } else if (mb == 63) {
720 match = true;
721 opcode = kS390_RotLeftAndClearRight64;
722 mask = me;
723 }
724 if (match) {
725 Emit(opcode, g.DefineAsRegister(node),
726 g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
727 g.TempImmediate(mask));
728 return;
729 }
730 }
731 }
732 }
733 VisitRRO(this, kS390_ShiftRight64, node, kShift64Imm);
734}
735#endif
736
737void InstructionSelector::VisitWord32Sar(Node* node) {
738 S390OperandGenerator g(this);
739 Int32BinopMatcher m(node);
740 // Replace with sign extension for (x << K) >> K where K is 16 or 24.
741 if (CanCover(node, m.left().node()) && m.left().IsWord32Shl()) {
742 Int32BinopMatcher mleft(m.left().node());
743 if (mleft.right().Is(16) && m.right().Is(16)) {
744 Emit(kS390_ExtendSignWord16, g.DefineAsRegister(node),
745 g.UseRegister(mleft.left().node()));
746 return;
747 } else if (mleft.right().Is(24) && m.right().Is(24)) {
748 Emit(kS390_ExtendSignWord8, g.DefineAsRegister(node),
749 g.UseRegister(mleft.left().node()));
750 return;
751 }
752 }
753 VisitRRO(this, kS390_ShiftRightArith32, node, kShift32Imm);
754}
755
756#if !V8_TARGET_ARCH_S390X
757void VisitPairBinop(InstructionSelector* selector, ArchOpcode opcode,
758 Node* node) {
759 S390OperandGenerator g(selector);
760
761 // We use UseUniqueRegister here to avoid register sharing with the output
762 // registers.
763 InstructionOperand inputs[] = {
764 g.UseRegister(node->InputAt(0)), g.UseUniqueRegister(node->InputAt(1)),
765 g.UseRegister(node->InputAt(2)), g.UseUniqueRegister(node->InputAt(3))};
766
767 InstructionOperand outputs[] = {
768 g.DefineAsRegister(node),
769 g.DefineAsRegister(NodeProperties::FindProjection(node, 1))};
770
771 selector->Emit(opcode, 2, outputs, 4, inputs);
772}
773
774void InstructionSelector::VisitInt32PairAdd(Node* node) {
775 VisitPairBinop(this, kS390_AddPair, node);
776}
777
778void InstructionSelector::VisitInt32PairSub(Node* node) {
779 VisitPairBinop(this, kS390_SubPair, node);
780}
781
782void InstructionSelector::VisitInt32PairMul(Node* node) {
783 S390OperandGenerator g(this);
784 InstructionOperand inputs[] = {g.UseUniqueRegister(node->InputAt(0)),
785 g.UseUniqueRegister(node->InputAt(1)),
786 g.UseUniqueRegister(node->InputAt(2)),
787 g.UseUniqueRegister(node->InputAt(3))};
788
789 InstructionOperand outputs[] = {
790 g.DefineAsRegister(node),
791 g.DefineAsRegister(NodeProperties::FindProjection(node, 1))};
792
793 Emit(kS390_MulPair, 2, outputs, 4, inputs);
794}
795
796void VisitPairShift(InstructionSelector* selector, ArchOpcode opcode,
797 Node* node) {
798 S390OperandGenerator g(selector);
799 Int32Matcher m(node->InputAt(2));
800 InstructionOperand shift_operand;
801 if (m.HasValue()) {
802 shift_operand = g.UseImmediate(m.node());
803 } else {
804 shift_operand = g.UseUniqueRegister(m.node());
805 }
806
807 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0)),
808 g.UseRegister(node->InputAt(1)),
809 shift_operand};
810
811 InstructionOperand outputs[] = {
812 g.DefineSameAsFirst(node),
813 g.DefineAsRegister(NodeProperties::FindProjection(node, 1))};
814
815 selector->Emit(opcode, 2, outputs, 3, inputs);
816}
817
818void InstructionSelector::VisitWord32PairShl(Node* node) {
819 VisitPairShift(this, kS390_ShiftLeftPair, node);
820}
821
822void InstructionSelector::VisitWord32PairShr(Node* node) {
823 VisitPairShift(this, kS390_ShiftRightPair, node);
824}
825
826void InstructionSelector::VisitWord32PairSar(Node* node) {
827 VisitPairShift(this, kS390_ShiftRightArithPair, node);
828}
829#endif
830
831#if V8_TARGET_ARCH_S390X
832void InstructionSelector::VisitWord64Sar(Node* node) {
833 VisitRRO(this, kS390_ShiftRightArith64, node, kShift64Imm);
834}
835#endif
836
837void InstructionSelector::VisitWord32Ror(Node* node) {
838 VisitRRO(this, kS390_RotRight32, node, kShift32Imm);
839}
840
841#if V8_TARGET_ARCH_S390X
842void InstructionSelector::VisitWord64Ror(Node* node) {
843 VisitRRO(this, kS390_RotRight64, node, kShift64Imm);
844}
845#endif
846
847void InstructionSelector::VisitWord32Clz(Node* node) {
848 S390OperandGenerator g(this);
849 Emit(kS390_Cntlz32, g.DefineAsRegister(node),
850 g.UseRegister(node->InputAt(0)));
851}
852
853#if V8_TARGET_ARCH_S390X
854void InstructionSelector::VisitWord64Clz(Node* node) {
855 S390OperandGenerator g(this);
856 Emit(kS390_Cntlz64, g.DefineAsRegister(node),
857 g.UseRegister(node->InputAt(0)));
858}
859#endif
860
861void InstructionSelector::VisitWord32Popcnt(Node* node) {
862 S390OperandGenerator g(this);
863 Emit(kS390_Popcnt32, g.DefineAsRegister(node),
864 g.UseRegister(node->InputAt(0)));
865}
866
867#if V8_TARGET_ARCH_S390X
868void InstructionSelector::VisitWord64Popcnt(Node* node) {
869 S390OperandGenerator g(this);
870 Emit(kS390_Popcnt64, g.DefineAsRegister(node),
871 g.UseRegister(node->InputAt(0)));
872}
873#endif
874
875void InstructionSelector::VisitWord32Ctz(Node* node) { UNREACHABLE(); }
876
877#if V8_TARGET_ARCH_S390X
878void InstructionSelector::VisitWord64Ctz(Node* node) { UNREACHABLE(); }
879#endif
880
881void InstructionSelector::VisitWord32ReverseBits(Node* node) { UNREACHABLE(); }
882
883#if V8_TARGET_ARCH_S390X
884void InstructionSelector::VisitWord64ReverseBits(Node* node) { UNREACHABLE(); }
885#endif
886
887void InstructionSelector::VisitInt32Add(Node* node) {
888 VisitBinop<Int32BinopMatcher>(this, node, kS390_Add, kInt16Imm);
889}
890
891#if V8_TARGET_ARCH_S390X
892void InstructionSelector::VisitInt64Add(Node* node) {
893 VisitBinop<Int64BinopMatcher>(this, node, kS390_Add, kInt16Imm);
894}
895#endif
896
897void InstructionSelector::VisitInt32Sub(Node* node) {
898 S390OperandGenerator g(this);
899 Int32BinopMatcher m(node);
900 if (m.left().Is(0)) {
901 Emit(kS390_Neg, g.DefineAsRegister(node), g.UseRegister(m.right().node()));
902 } else {
903 VisitBinop<Int32BinopMatcher>(this, node, kS390_Sub, kInt16Imm_Negate);
904 }
905}
906
907#if V8_TARGET_ARCH_S390X
908void InstructionSelector::VisitInt64Sub(Node* node) {
909 S390OperandGenerator g(this);
910 Int64BinopMatcher m(node);
911 if (m.left().Is(0)) {
912 Emit(kS390_Neg, g.DefineAsRegister(node), g.UseRegister(m.right().node()));
913 } else {
914 VisitBinop<Int64BinopMatcher>(this, node, kS390_Sub, kInt16Imm_Negate);
915 }
916}
917#endif
918
919void InstructionSelector::VisitInt32Mul(Node* node) {
920 VisitRRR(this, kS390_Mul32, node);
921}
922
923#if V8_TARGET_ARCH_S390X
924void InstructionSelector::VisitInt64Mul(Node* node) {
925 VisitRRR(this, kS390_Mul64, node);
926}
927#endif
928
929void InstructionSelector::VisitInt32MulHigh(Node* node) {
930 S390OperandGenerator g(this);
931 Emit(kS390_MulHigh32, g.DefineAsRegister(node),
932 g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)));
933}
934
935void InstructionSelector::VisitUint32MulHigh(Node* node) {
936 S390OperandGenerator g(this);
937 Emit(kS390_MulHighU32, g.DefineAsRegister(node),
938 g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)));
939}
940
941void InstructionSelector::VisitInt32Div(Node* node) {
942 VisitRRR(this, kS390_Div32, node);
943}
944
945#if V8_TARGET_ARCH_S390X
946void InstructionSelector::VisitInt64Div(Node* node) {
947 VisitRRR(this, kS390_Div64, node);
948}
949#endif
950
951void InstructionSelector::VisitUint32Div(Node* node) {
952 VisitRRR(this, kS390_DivU32, node);
953}
954
955#if V8_TARGET_ARCH_S390X
956void InstructionSelector::VisitUint64Div(Node* node) {
957 VisitRRR(this, kS390_DivU64, node);
958}
959#endif
960
961void InstructionSelector::VisitInt32Mod(Node* node) {
962 VisitRRR(this, kS390_Mod32, node);
963}
964
965#if V8_TARGET_ARCH_S390X
966void InstructionSelector::VisitInt64Mod(Node* node) {
967 VisitRRR(this, kS390_Mod64, node);
968}
969#endif
970
971void InstructionSelector::VisitUint32Mod(Node* node) {
972 VisitRRR(this, kS390_ModU32, node);
973}
974
975#if V8_TARGET_ARCH_S390X
976void InstructionSelector::VisitUint64Mod(Node* node) {
977 VisitRRR(this, kS390_ModU64, node);
978}
979#endif
980
981void InstructionSelector::VisitChangeFloat32ToFloat64(Node* node) {
982 VisitRR(this, kS390_Float32ToDouble, node);
983}
984
985void InstructionSelector::VisitRoundInt32ToFloat32(Node* node) {
986 VisitRR(this, kS390_Int32ToFloat32, node);
987}
988
989void InstructionSelector::VisitRoundUint32ToFloat32(Node* node) {
990 VisitRR(this, kS390_Uint32ToFloat32, node);
991}
992
993void InstructionSelector::VisitChangeInt32ToFloat64(Node* node) {
994 VisitRR(this, kS390_Int32ToDouble, node);
995}
996
997void InstructionSelector::VisitChangeUint32ToFloat64(Node* node) {
998 VisitRR(this, kS390_Uint32ToDouble, node);
999}
1000
1001void InstructionSelector::VisitChangeFloat64ToInt32(Node* node) {
1002 VisitRR(this, kS390_DoubleToInt32, node);
1003}
1004
1005void InstructionSelector::VisitChangeFloat64ToUint32(Node* node) {
1006 VisitRR(this, kS390_DoubleToUint32, node);
1007}
1008
1009void InstructionSelector::VisitTruncateFloat64ToUint32(Node* node) {
1010 VisitRR(this, kS390_DoubleToUint32, node);
1011}
1012
1013#if V8_TARGET_ARCH_S390X
1014void InstructionSelector::VisitTryTruncateFloat32ToInt64(Node* node) {
1015 VisitTryTruncateDouble(this, kS390_Float32ToInt64, node);
1016}
1017
1018void InstructionSelector::VisitTryTruncateFloat64ToInt64(Node* node) {
1019 VisitTryTruncateDouble(this, kS390_DoubleToInt64, node);
1020}
1021
1022void InstructionSelector::VisitTryTruncateFloat32ToUint64(Node* node) {
1023 VisitTryTruncateDouble(this, kS390_Float32ToUint64, node);
1024}
1025
1026void InstructionSelector::VisitTryTruncateFloat64ToUint64(Node* node) {
1027 VisitTryTruncateDouble(this, kS390_DoubleToUint64, node);
1028}
1029
1030void InstructionSelector::VisitChangeInt32ToInt64(Node* node) {
1031 // TODO(mbrandy): inspect input to see if nop is appropriate.
1032 VisitRR(this, kS390_ExtendSignWord32, node);
1033}
1034
1035void InstructionSelector::VisitChangeUint32ToUint64(Node* node) {
1036 // TODO(mbrandy): inspect input to see if nop is appropriate.
1037 VisitRR(this, kS390_Uint32ToUint64, node);
1038}
1039#endif
1040
1041void InstructionSelector::VisitTruncateFloat64ToFloat32(Node* node) {
1042 VisitRR(this, kS390_DoubleToFloat32, node);
1043}
1044
1045void InstructionSelector::VisitTruncateFloat64ToInt32(Node* node) {
1046 switch (TruncationModeOf(node->op())) {
1047 case TruncationMode::kJavaScript:
1048 return VisitRR(this, kArchTruncateDoubleToI, node);
1049 case TruncationMode::kRoundToZero:
1050 return VisitRR(this, kS390_DoubleToInt32, node);
1051 }
1052 UNREACHABLE();
1053}
1054
1055void InstructionSelector::VisitTruncateFloat32ToInt32(Node* node) {
1056 VisitRR(this, kS390_Float32ToInt32, node);
1057}
1058
1059void InstructionSelector::VisitTruncateFloat32ToUint32(Node* node) {
1060 VisitRR(this, kS390_Float32ToUint32, node);
1061}
1062
1063#if V8_TARGET_ARCH_S390X
1064void InstructionSelector::VisitTruncateInt64ToInt32(Node* node) {
1065 // TODO(mbrandy): inspect input to see if nop is appropriate.
1066 VisitRR(this, kS390_Int64ToInt32, node);
1067}
1068
1069void InstructionSelector::VisitRoundInt64ToFloat32(Node* node) {
1070 VisitRR(this, kS390_Int64ToFloat32, node);
1071}
1072
1073void InstructionSelector::VisitRoundInt64ToFloat64(Node* node) {
1074 VisitRR(this, kS390_Int64ToDouble, node);
1075}
1076
1077void InstructionSelector::VisitRoundUint64ToFloat32(Node* node) {
1078 VisitRR(this, kS390_Uint64ToFloat32, node);
1079}
1080
1081void InstructionSelector::VisitRoundUint64ToFloat64(Node* node) {
1082 VisitRR(this, kS390_Uint64ToDouble, node);
1083}
1084#endif
1085
1086void InstructionSelector::VisitBitcastFloat32ToInt32(Node* node) {
1087 VisitRR(this, kS390_BitcastFloat32ToInt32, node);
1088}
1089
1090#if V8_TARGET_ARCH_S390X
1091void InstructionSelector::VisitBitcastFloat64ToInt64(Node* node) {
1092 VisitRR(this, kS390_BitcastDoubleToInt64, node);
1093}
1094#endif
1095
1096void InstructionSelector::VisitBitcastInt32ToFloat32(Node* node) {
1097 VisitRR(this, kS390_BitcastInt32ToFloat32, node);
1098}
1099
1100#if V8_TARGET_ARCH_S390X
1101void InstructionSelector::VisitBitcastInt64ToFloat64(Node* node) {
1102 VisitRR(this, kS390_BitcastInt64ToDouble, node);
1103}
1104#endif
1105
1106void InstructionSelector::VisitFloat32Add(Node* node) {
1107 VisitRRR(this, kS390_AddFloat, node);
1108}
1109
1110void InstructionSelector::VisitFloat64Add(Node* node) {
1111 // TODO(mbrandy): detect multiply-add
1112 VisitRRR(this, kS390_AddDouble, node);
1113}
1114
1115void InstructionSelector::VisitFloat32Sub(Node* node) {
1116 S390OperandGenerator g(this);
1117 Float32BinopMatcher m(node);
1118 if (m.left().IsMinusZero()) {
1119 Emit(kS390_NegDouble, g.DefineAsRegister(node),
1120 g.UseRegister(m.right().node()));
1121 return;
1122 }
1123 VisitRRR(this, kS390_SubFloat, node);
1124}
1125
1126void InstructionSelector::VisitFloat64Sub(Node* node) {
1127 // TODO(mbrandy): detect multiply-subtract
1128 S390OperandGenerator g(this);
1129 Float64BinopMatcher m(node);
1130 if (m.left().IsMinusZero()) {
1131 if (m.right().IsFloat64RoundDown() &&
1132 CanCover(m.node(), m.right().node())) {
1133 if (m.right().InputAt(0)->opcode() == IrOpcode::kFloat64Sub &&
1134 CanCover(m.right().node(), m.right().InputAt(0))) {
1135 Float64BinopMatcher mright0(m.right().InputAt(0));
1136 if (mright0.left().IsMinusZero()) {
1137 // -floor(-x) = ceil(x)
1138 Emit(kS390_CeilDouble, g.DefineAsRegister(node),
1139 g.UseRegister(mright0.right().node()));
1140 return;
1141 }
1142 }
1143 }
1144 Emit(kS390_NegDouble, g.DefineAsRegister(node),
1145 g.UseRegister(m.right().node()));
1146 return;
1147 }
1148 VisitRRR(this, kS390_SubDouble, node);
1149}
1150
1151void InstructionSelector::VisitFloat32Mul(Node* node) {
1152 VisitRRR(this, kS390_MulFloat, node);
1153}
1154
1155void InstructionSelector::VisitFloat64Mul(Node* node) {
1156 // TODO(mbrandy): detect negate
1157 VisitRRR(this, kS390_MulDouble, node);
1158}
1159
1160void InstructionSelector::VisitFloat32Div(Node* node) {
1161 VisitRRR(this, kS390_DivFloat, node);
1162}
1163
1164void InstructionSelector::VisitFloat64Div(Node* node) {
1165 VisitRRR(this, kS390_DivDouble, node);
1166}
1167
1168void InstructionSelector::VisitFloat64Mod(Node* node) {
1169 S390OperandGenerator g(this);
1170 Emit(kS390_ModDouble, g.DefineAsFixed(node, d1),
1171 g.UseFixed(node->InputAt(0), d1), g.UseFixed(node->InputAt(1), d2))
1172 ->MarkAsCall();
1173}
1174
1175void InstructionSelector::VisitFloat32Max(Node* node) { UNREACHABLE(); }
1176
1177void InstructionSelector::VisitFloat64Max(Node* node) { UNREACHABLE(); }
1178
1179void InstructionSelector::VisitFloat32Min(Node* node) { UNREACHABLE(); }
1180
1181void InstructionSelector::VisitFloat64Min(Node* node) { UNREACHABLE(); }
1182
1183void InstructionSelector::VisitFloat32Abs(Node* node) {
1184 VisitRR(this, kS390_AbsFloat, node);
1185}
1186
1187void InstructionSelector::VisitFloat64Abs(Node* node) {
1188 VisitRR(this, kS390_AbsDouble, node);
1189}
1190
1191void InstructionSelector::VisitFloat32Sqrt(Node* node) {
1192 VisitRR(this, kS390_SqrtFloat, node);
1193}
1194
1195void InstructionSelector::VisitFloat64Sqrt(Node* node) {
1196 VisitRR(this, kS390_SqrtDouble, node);
1197}
1198
1199void InstructionSelector::VisitFloat32RoundDown(Node* node) {
1200 VisitRR(this, kS390_FloorFloat, node);
1201}
1202
1203void InstructionSelector::VisitFloat64RoundDown(Node* node) {
1204 VisitRR(this, kS390_FloorDouble, node);
1205}
1206
1207void InstructionSelector::VisitFloat32RoundUp(Node* node) {
1208 VisitRR(this, kS390_CeilFloat, node);
1209}
1210
1211void InstructionSelector::VisitFloat64RoundUp(Node* node) {
1212 VisitRR(this, kS390_CeilDouble, node);
1213}
1214
1215void InstructionSelector::VisitFloat32RoundTruncate(Node* node) {
1216 VisitRR(this, kS390_TruncateFloat, node);
1217}
1218
1219void InstructionSelector::VisitFloat64RoundTruncate(Node* node) {
1220 VisitRR(this, kS390_TruncateDouble, node);
1221}
1222
1223void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
1224 VisitRR(this, kS390_RoundDouble, node);
1225}
1226
1227void InstructionSelector::VisitFloat32RoundTiesEven(Node* node) {
1228 UNREACHABLE();
1229}
1230
1231void InstructionSelector::VisitFloat64RoundTiesEven(Node* node) {
1232 UNREACHABLE();
1233}
1234
1235void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
1236 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1237 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1238 return VisitBinop<Int32BinopMatcher>(this, node, kS390_AddWithOverflow32,
1239 kInt16Imm, &cont);
1240 }
1241 FlagsContinuation cont;
1242 VisitBinop<Int32BinopMatcher>(this, node, kS390_AddWithOverflow32, kInt16Imm,
1243 &cont);
1244}
1245
1246void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
1247 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1248 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1249 return VisitBinop<Int32BinopMatcher>(this, node, kS390_SubWithOverflow32,
1250 kInt16Imm_Negate, &cont);
1251 }
1252 FlagsContinuation cont;
1253 VisitBinop<Int32BinopMatcher>(this, node, kS390_SubWithOverflow32,
1254 kInt16Imm_Negate, &cont);
1255}
1256
1257#if V8_TARGET_ARCH_S390X
1258void InstructionSelector::VisitInt64AddWithOverflow(Node* node) {
1259 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1260 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1261 return VisitBinop<Int64BinopMatcher>(this, node, kS390_Add, kInt16Imm,
1262 &cont);
1263 }
1264 FlagsContinuation cont;
1265 VisitBinop<Int64BinopMatcher>(this, node, kS390_Add, kInt16Imm, &cont);
1266}
1267
1268void InstructionSelector::VisitInt64SubWithOverflow(Node* node) {
1269 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1270 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1271 return VisitBinop<Int64BinopMatcher>(this, node, kS390_Sub,
1272 kInt16Imm_Negate, &cont);
1273 }
1274 FlagsContinuation cont;
1275 VisitBinop<Int64BinopMatcher>(this, node, kS390_Sub, kInt16Imm_Negate, &cont);
1276}
1277#endif
1278
1279static bool CompareLogical(FlagsContinuation* cont) {
1280 switch (cont->condition()) {
1281 case kUnsignedLessThan:
1282 case kUnsignedGreaterThanOrEqual:
1283 case kUnsignedLessThanOrEqual:
1284 case kUnsignedGreaterThan:
1285 return true;
1286 default:
1287 return false;
1288 }
1289 UNREACHABLE();
1290 return false;
1291}
1292
1293namespace {
1294
1295// Shared routine for multiple compare operations.
1296void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
1297 InstructionOperand left, InstructionOperand right,
1298 FlagsContinuation* cont) {
1299 S390OperandGenerator g(selector);
1300 opcode = cont->Encode(opcode);
1301 if (cont->IsBranch()) {
1302 selector->Emit(opcode, g.NoOutput(), left, right,
1303 g.Label(cont->true_block()), g.Label(cont->false_block()));
1304 } else if (cont->IsDeoptimize()) {
1305 selector->EmitDeoptimize(opcode, g.NoOutput(), left, right,
1306 cont->frame_state());
1307 } else {
1308 DCHECK(cont->IsSet());
1309 selector->Emit(opcode, g.DefineAsRegister(cont->result()), left, right);
1310 }
1311}
1312
1313// Shared routine for multiple word compare operations.
1314void VisitWordCompare(InstructionSelector* selector, Node* node,
1315 InstructionCode opcode, FlagsContinuation* cont,
1316 bool commutative, ImmediateMode immediate_mode) {
1317 S390OperandGenerator g(selector);
1318 Node* left = node->InputAt(0);
1319 Node* right = node->InputAt(1);
1320
1321 // Match immediates on left or right side of comparison.
1322 if (g.CanBeImmediate(right, immediate_mode)) {
1323 VisitCompare(selector, opcode, g.UseRegister(left), g.UseImmediate(right),
1324 cont);
1325 } else if (g.CanBeImmediate(left, immediate_mode)) {
1326 if (!commutative) cont->Commute();
1327 VisitCompare(selector, opcode, g.UseRegister(right), g.UseImmediate(left),
1328 cont);
1329 } else {
1330 VisitCompare(selector, opcode, g.UseRegister(left), g.UseRegister(right),
1331 cont);
1332 }
1333}
1334
1335void VisitWord32Compare(InstructionSelector* selector, Node* node,
1336 FlagsContinuation* cont) {
1337 ImmediateMode mode = (CompareLogical(cont) ? kInt16Imm_Unsigned : kInt16Imm);
1338 VisitWordCompare(selector, node, kS390_Cmp32, cont, false, mode);
1339}
1340
1341#if V8_TARGET_ARCH_S390X
1342void VisitWord64Compare(InstructionSelector* selector, Node* node,
1343 FlagsContinuation* cont) {
1344 ImmediateMode mode = (CompareLogical(cont) ? kInt16Imm_Unsigned : kInt16Imm);
1345 VisitWordCompare(selector, node, kS390_Cmp64, cont, false, mode);
1346}
1347#endif
1348
1349// Shared routine for multiple float32 compare operations.
1350void VisitFloat32Compare(InstructionSelector* selector, Node* node,
1351 FlagsContinuation* cont) {
1352 S390OperandGenerator g(selector);
1353 Node* left = node->InputAt(0);
1354 Node* right = node->InputAt(1);
1355 VisitCompare(selector, kS390_CmpFloat, g.UseRegister(left),
1356 g.UseRegister(right), cont);
1357}
1358
1359// Shared routine for multiple float64 compare operations.
1360void VisitFloat64Compare(InstructionSelector* selector, Node* node,
1361 FlagsContinuation* cont) {
1362 S390OperandGenerator g(selector);
1363 Node* left = node->InputAt(0);
1364 Node* right = node->InputAt(1);
1365 VisitCompare(selector, kS390_CmpDouble, g.UseRegister(left),
1366 g.UseRegister(right), cont);
1367}
1368
1369// Shared routine for word comparisons against zero.
1370void VisitWordCompareZero(InstructionSelector* selector, Node* user,
1371 Node* value, InstructionCode opcode,
1372 FlagsContinuation* cont) {
1373 while (selector->CanCover(user, value)) {
1374 switch (value->opcode()) {
1375 case IrOpcode::kWord32Equal: {
1376 // Combine with comparisons against 0 by simply inverting the
1377 // continuation.
1378 Int32BinopMatcher m(value);
1379 if (m.right().Is(0)) {
1380 user = value;
1381 value = m.left().node();
1382 cont->Negate();
1383 continue;
1384 }
1385 cont->OverwriteAndNegateIfEqual(kEqual);
1386 return VisitWord32Compare(selector, value, cont);
1387 }
1388 case IrOpcode::kInt32LessThan:
1389 cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1390 return VisitWord32Compare(selector, value, cont);
1391 case IrOpcode::kInt32LessThanOrEqual:
1392 cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1393 return VisitWord32Compare(selector, value, cont);
1394 case IrOpcode::kUint32LessThan:
1395 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1396 return VisitWord32Compare(selector, value, cont);
1397 case IrOpcode::kUint32LessThanOrEqual:
1398 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1399 return VisitWord32Compare(selector, value, cont);
1400#if V8_TARGET_ARCH_S390X
1401 case IrOpcode::kWord64Equal:
1402 cont->OverwriteAndNegateIfEqual(kEqual);
1403 return VisitWord64Compare(selector, value, cont);
1404 case IrOpcode::kInt64LessThan:
1405 cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1406 return VisitWord64Compare(selector, value, cont);
1407 case IrOpcode::kInt64LessThanOrEqual:
1408 cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1409 return VisitWord64Compare(selector, value, cont);
1410 case IrOpcode::kUint64LessThan:
1411 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1412 return VisitWord64Compare(selector, value, cont);
1413 case IrOpcode::kUint64LessThanOrEqual:
1414 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1415 return VisitWord64Compare(selector, value, cont);
1416#endif
1417 case IrOpcode::kFloat32Equal:
1418 cont->OverwriteAndNegateIfEqual(kEqual);
1419 return VisitFloat32Compare(selector, value, cont);
1420 case IrOpcode::kFloat32LessThan:
1421 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1422 return VisitFloat32Compare(selector, value, cont);
1423 case IrOpcode::kFloat32LessThanOrEqual:
1424 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1425 return VisitFloat32Compare(selector, value, cont);
1426 case IrOpcode::kFloat64Equal:
1427 cont->OverwriteAndNegateIfEqual(kEqual);
1428 return VisitFloat64Compare(selector, value, cont);
1429 case IrOpcode::kFloat64LessThan:
1430 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1431 return VisitFloat64Compare(selector, value, cont);
1432 case IrOpcode::kFloat64LessThanOrEqual:
1433 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1434 return VisitFloat64Compare(selector, value, cont);
1435 case IrOpcode::kProjection:
1436 // Check if this is the overflow output projection of an
1437 // <Operation>WithOverflow node.
1438 if (ProjectionIndexOf(value->op()) == 1u) {
1439 // We cannot combine the <Operation>WithOverflow with this branch
1440 // unless the 0th projection (the use of the actual value of the
1441 // <Operation> is either nullptr, which means there's no use of the
1442 // actual value, or was already defined, which means it is scheduled
1443 // *AFTER* this branch).
1444 Node* const node = value->InputAt(0);
1445 Node* const result = NodeProperties::FindProjection(node, 0);
1446 if (result == nullptr || selector->IsDefined(result)) {
1447 switch (node->opcode()) {
1448 case IrOpcode::kInt32AddWithOverflow:
1449 cont->OverwriteAndNegateIfEqual(kOverflow);
1450 return VisitBinop<Int32BinopMatcher>(
1451 selector, node, kS390_AddWithOverflow32, kInt16Imm, cont);
1452 case IrOpcode::kInt32SubWithOverflow:
1453 cont->OverwriteAndNegateIfEqual(kOverflow);
1454 return VisitBinop<Int32BinopMatcher>(selector, node,
1455 kS390_SubWithOverflow32,
1456 kInt16Imm_Negate, cont);
1457#if V8_TARGET_ARCH_S390X
1458 case IrOpcode::kInt64AddWithOverflow:
1459 cont->OverwriteAndNegateIfEqual(kOverflow);
1460 return VisitBinop<Int64BinopMatcher>(selector, node, kS390_Add,
1461 kInt16Imm, cont);
1462 case IrOpcode::kInt64SubWithOverflow:
1463 cont->OverwriteAndNegateIfEqual(kOverflow);
1464 return VisitBinop<Int64BinopMatcher>(selector, node, kS390_Sub,
1465 kInt16Imm_Negate, cont);
1466#endif
1467 default:
1468 break;
1469 }
1470 }
1471 }
1472 break;
1473 case IrOpcode::kInt32Sub:
1474 return VisitWord32Compare(selector, value, cont);
1475 case IrOpcode::kWord32And:
1476 return VisitWordCompare(selector, value, kS390_Tst32, cont, true,
1477 kInt16Imm_Unsigned);
1478// TODO(mbrandy): Handle?
1479// case IrOpcode::kInt32Add:
1480// case IrOpcode::kWord32Or:
1481// case IrOpcode::kWord32Xor:
1482// case IrOpcode::kWord32Sar:
1483// case IrOpcode::kWord32Shl:
1484// case IrOpcode::kWord32Shr:
1485// case IrOpcode::kWord32Ror:
1486#if V8_TARGET_ARCH_S390X
1487 case IrOpcode::kInt64Sub:
1488 return VisitWord64Compare(selector, value, cont);
1489 case IrOpcode::kWord64And:
1490 return VisitWordCompare(selector, value, kS390_Tst64, cont, true,
1491 kInt16Imm_Unsigned);
1492// TODO(mbrandy): Handle?
1493// case IrOpcode::kInt64Add:
1494// case IrOpcode::kWord64Or:
1495// case IrOpcode::kWord64Xor:
1496// case IrOpcode::kWord64Sar:
1497// case IrOpcode::kWord64Shl:
1498// case IrOpcode::kWord64Shr:
1499// case IrOpcode::kWord64Ror:
1500#endif
1501 default:
1502 break;
1503 }
1504 break;
1505 }
1506
1507 // Branch could not be combined with a compare, emit compare against 0.
1508 S390OperandGenerator g(selector);
1509 VisitCompare(selector, opcode, g.UseRegister(value), g.TempImmediate(0),
1510 cont);
1511}
1512
1513void VisitWord32CompareZero(InstructionSelector* selector, Node* user,
1514 Node* value, FlagsContinuation* cont) {
1515 VisitWordCompareZero(selector, user, value, kS390_Cmp32, cont);
1516}
1517
1518#if V8_TARGET_ARCH_S390X
1519void VisitWord64CompareZero(InstructionSelector* selector, Node* user,
1520 Node* value, FlagsContinuation* cont) {
1521 VisitWordCompareZero(selector, user, value, kS390_Cmp64, cont);
1522}
1523#endif
1524
1525} // namespace
1526
1527void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
1528 BasicBlock* fbranch) {
1529 FlagsContinuation cont(kNotEqual, tbranch, fbranch);
1530 VisitWord32CompareZero(this, branch, branch->InputAt(0), &cont);
1531}
1532
1533void InstructionSelector::VisitDeoptimizeIf(Node* node) {
1534 FlagsContinuation cont =
1535 FlagsContinuation::ForDeoptimize(kNotEqual, node->InputAt(1));
1536 VisitWord32CompareZero(this, node, node->InputAt(0), &cont);
1537}
1538
1539void InstructionSelector::VisitDeoptimizeUnless(Node* node) {
1540 FlagsContinuation cont =
1541 FlagsContinuation::ForDeoptimize(kEqual, node->InputAt(1));
1542 VisitWord32CompareZero(this, node, node->InputAt(0), &cont);
1543}
1544
1545void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
1546 S390OperandGenerator g(this);
1547 InstructionOperand value_operand = g.UseRegister(node->InputAt(0));
1548
1549 // Emit either ArchTableSwitch or ArchLookupSwitch.
1550 size_t table_space_cost = 4 + sw.value_range;
1551 size_t table_time_cost = 3;
1552 size_t lookup_space_cost = 3 + 2 * sw.case_count;
1553 size_t lookup_time_cost = sw.case_count;
1554 if (sw.case_count > 0 &&
1555 table_space_cost + 3 * table_time_cost <=
1556 lookup_space_cost + 3 * lookup_time_cost &&
1557 sw.min_value > std::numeric_limits<int32_t>::min()) {
1558 InstructionOperand index_operand = value_operand;
1559 if (sw.min_value) {
1560 index_operand = g.TempRegister();
1561 Emit(kS390_Sub, index_operand, value_operand,
1562 g.TempImmediate(sw.min_value));
1563 }
1564 // Generate a table lookup.
1565 return EmitTableSwitch(sw, index_operand);
1566 }
1567
1568 // Generate a sequence of conditional jumps.
1569 return EmitLookupSwitch(sw, value_operand);
1570}
1571
1572void InstructionSelector::VisitWord32Equal(Node* const node) {
1573 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
1574 Int32BinopMatcher m(node);
1575 if (m.right().Is(0)) {
1576 return VisitWord32CompareZero(this, m.node(), m.left().node(), &cont);
1577 }
1578 VisitWord32Compare(this, node, &cont);
1579}
1580
1581void InstructionSelector::VisitInt32LessThan(Node* node) {
1582 FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
1583 VisitWord32Compare(this, node, &cont);
1584}
1585
1586void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
1587 FlagsContinuation cont =
1588 FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
1589 VisitWord32Compare(this, node, &cont);
1590}
1591
1592void InstructionSelector::VisitUint32LessThan(Node* node) {
1593 FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
1594 VisitWord32Compare(this, node, &cont);
1595}
1596
1597void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
1598 FlagsContinuation cont =
1599 FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
1600 VisitWord32Compare(this, node, &cont);
1601}
1602
1603#if V8_TARGET_ARCH_S390X
1604void InstructionSelector::VisitWord64Equal(Node* const node) {
1605 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
1606 Int64BinopMatcher m(node);
1607 if (m.right().Is(0)) {
1608 return VisitWord64CompareZero(this, m.node(), m.left().node(), &cont);
1609 }
1610 VisitWord64Compare(this, node, &cont);
1611}
1612
1613void InstructionSelector::VisitInt64LessThan(Node* node) {
1614 FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
1615 VisitWord64Compare(this, node, &cont);
1616}
1617
1618void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) {
1619 FlagsContinuation cont =
1620 FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
1621 VisitWord64Compare(this, node, &cont);
1622}
1623
1624void InstructionSelector::VisitUint64LessThan(Node* node) {
1625 FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
1626 VisitWord64Compare(this, node, &cont);
1627}
1628
1629void InstructionSelector::VisitUint64LessThanOrEqual(Node* node) {
1630 FlagsContinuation cont =
1631 FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
1632 VisitWord64Compare(this, node, &cont);
1633}
1634#endif
1635
1636void InstructionSelector::VisitFloat32Equal(Node* node) {
1637 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
1638 VisitFloat32Compare(this, node, &cont);
1639}
1640
1641void InstructionSelector::VisitFloat32LessThan(Node* node) {
1642 FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
1643 VisitFloat32Compare(this, node, &cont);
1644}
1645
1646void InstructionSelector::VisitFloat32LessThanOrEqual(Node* node) {
1647 FlagsContinuation cont =
1648 FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
1649 VisitFloat32Compare(this, node, &cont);
1650}
1651
1652void InstructionSelector::VisitFloat64Equal(Node* node) {
1653 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
1654 VisitFloat64Compare(this, node, &cont);
1655}
1656
1657void InstructionSelector::VisitFloat64LessThan(Node* node) {
1658 FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
1659 VisitFloat64Compare(this, node, &cont);
1660}
1661
1662void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
1663 FlagsContinuation cont =
1664 FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
1665 VisitFloat64Compare(this, node, &cont);
1666}
1667
1668void InstructionSelector::EmitPrepareArguments(
1669 ZoneVector<PushParameter>* arguments, const CallDescriptor* descriptor,
1670 Node* node) {
1671 S390OperandGenerator g(this);
1672
1673 // Prepare for C function call.
1674 if (descriptor->IsCFunctionCall()) {
1675 Emit(kArchPrepareCallCFunction |
1676 MiscField::encode(static_cast<int>(descriptor->CParameterCount())),
1677 0, nullptr, 0, nullptr);
1678
1679 // Poke any stack arguments.
1680 int slot = kStackFrameExtraParamSlot;
1681 for (PushParameter input : (*arguments)) {
1682 Emit(kS390_StoreToStackSlot, g.NoOutput(), g.UseRegister(input.node()),
1683 g.TempImmediate(slot));
1684 ++slot;
1685 }
1686 } else {
1687 // Push any stack arguments.
1688 int num_slots = static_cast<int>(descriptor->StackParameterCount());
1689 int slot = 0;
1690 for (PushParameter input : (*arguments)) {
1691 if (slot == 0) {
1692 DCHECK(input.node());
1693 Emit(kS390_PushFrame, g.NoOutput(), g.UseRegister(input.node()),
1694 g.TempImmediate(num_slots));
1695 } else {
1696 // Skip any alignment holes in pushed nodes.
1697 if (input.node()) {
1698 Emit(kS390_StoreToStackSlot, g.NoOutput(),
1699 g.UseRegister(input.node()), g.TempImmediate(slot));
1700 }
1701 }
1702 ++slot;
1703 }
1704 }
1705}
1706
1707bool InstructionSelector::IsTailCallAddressImmediate() { return false; }
1708
1709int InstructionSelector::GetTempsCountForTailCallFromJSFunction() { return 3; }
1710
1711void InstructionSelector::VisitFloat64ExtractLowWord32(Node* node) {
1712 S390OperandGenerator g(this);
1713 Emit(kS390_DoubleExtractLowWord32, g.DefineAsRegister(node),
1714 g.UseRegister(node->InputAt(0)));
1715}
1716
1717void InstructionSelector::VisitFloat64ExtractHighWord32(Node* node) {
1718 S390OperandGenerator g(this);
1719 Emit(kS390_DoubleExtractHighWord32, g.DefineAsRegister(node),
1720 g.UseRegister(node->InputAt(0)));
1721}
1722
1723void InstructionSelector::VisitFloat64InsertLowWord32(Node* node) {
1724 S390OperandGenerator g(this);
1725 Node* left = node->InputAt(0);
1726 Node* right = node->InputAt(1);
1727 if (left->opcode() == IrOpcode::kFloat64InsertHighWord32 &&
1728 CanCover(node, left)) {
1729 left = left->InputAt(1);
1730 Emit(kS390_DoubleConstruct, g.DefineAsRegister(node), g.UseRegister(left),
1731 g.UseRegister(right));
1732 return;
1733 }
1734 Emit(kS390_DoubleInsertLowWord32, g.DefineSameAsFirst(node),
1735 g.UseRegister(left), g.UseRegister(right));
1736}
1737
1738void InstructionSelector::VisitFloat64InsertHighWord32(Node* node) {
1739 S390OperandGenerator g(this);
1740 Node* left = node->InputAt(0);
1741 Node* right = node->InputAt(1);
1742 if (left->opcode() == IrOpcode::kFloat64InsertLowWord32 &&
1743 CanCover(node, left)) {
1744 left = left->InputAt(1);
1745 Emit(kS390_DoubleConstruct, g.DefineAsRegister(node), g.UseRegister(right),
1746 g.UseRegister(left));
1747 return;
1748 }
1749 Emit(kS390_DoubleInsertHighWord32, g.DefineSameAsFirst(node),
1750 g.UseRegister(left), g.UseRegister(right));
1751}
1752
1753// static
1754MachineOperatorBuilder::Flags
1755InstructionSelector::SupportedMachineOperatorFlags() {
1756 return MachineOperatorBuilder::kFloat32RoundDown |
1757 MachineOperatorBuilder::kFloat64RoundDown |
1758 MachineOperatorBuilder::kFloat32RoundUp |
1759 MachineOperatorBuilder::kFloat64RoundUp |
1760 MachineOperatorBuilder::kFloat32RoundTruncate |
1761 MachineOperatorBuilder::kFloat64RoundTruncate |
1762 MachineOperatorBuilder::kFloat64RoundTiesAway |
1763 MachineOperatorBuilder::kWord32Popcnt |
1764 MachineOperatorBuilder::kWord64Popcnt;
1765}
1766
1767} // namespace compiler
1768} // namespace internal
1769} // namespace v8