blob: 00782d15a0b4230a7096dbb1baa44acf12854fab [file] [log] [blame]
Ben Murdochda12d292016-06-02 14:46:10 +01001// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/base/adapters.h"
6#include "src/compiler/instruction-selector-impl.h"
7#include "src/compiler/node-matchers.h"
8#include "src/compiler/node-properties.h"
9#include "src/s390/frames-s390.h"
10
11namespace v8 {
12namespace internal {
13namespace compiler {
14
15enum ImmediateMode {
16 kInt16Imm,
17 kInt16Imm_Unsigned,
18 kInt16Imm_Negate,
19 kInt16Imm_4ByteAligned,
20 kShift32Imm,
21 kShift64Imm,
22 kNoImmediate
23};
24
25// Adds S390-specific methods for generating operands.
26class S390OperandGenerator final : public OperandGenerator {
27 public:
28 explicit S390OperandGenerator(InstructionSelector* selector)
29 : OperandGenerator(selector) {}
30
31 InstructionOperand UseOperand(Node* node, ImmediateMode mode) {
32 if (CanBeImmediate(node, mode)) {
33 return UseImmediate(node);
34 }
35 return UseRegister(node);
36 }
37
38 bool CanBeImmediate(Node* node, ImmediateMode mode) {
39 int64_t value;
40 if (node->opcode() == IrOpcode::kInt32Constant)
41 value = OpParameter<int32_t>(node);
42 else if (node->opcode() == IrOpcode::kInt64Constant)
43 value = OpParameter<int64_t>(node);
44 else
45 return false;
46 return CanBeImmediate(value, mode);
47 }
48
49 bool CanBeImmediate(int64_t value, ImmediateMode mode) {
50 switch (mode) {
51 case kInt16Imm:
52 return is_int16(value);
53 case kInt16Imm_Unsigned:
54 return is_uint16(value);
55 case kInt16Imm_Negate:
56 return is_int16(-value);
57 case kInt16Imm_4ByteAligned:
58 return is_int16(value) && !(value & 3);
59 case kShift32Imm:
60 return 0 <= value && value < 32;
61 case kShift64Imm:
62 return 0 <= value && value < 64;
63 case kNoImmediate:
64 return false;
65 }
66 return false;
67 }
68};
69
70namespace {
71
72void VisitRR(InstructionSelector* selector, ArchOpcode opcode, Node* node) {
73 S390OperandGenerator g(selector);
74 selector->Emit(opcode, g.DefineAsRegister(node),
75 g.UseRegister(node->InputAt(0)));
76}
77
78void VisitRRR(InstructionSelector* selector, ArchOpcode opcode, Node* node) {
79 S390OperandGenerator g(selector);
80 selector->Emit(opcode, g.DefineAsRegister(node),
81 g.UseRegister(node->InputAt(0)),
82 g.UseRegister(node->InputAt(1)));
83}
84
85void VisitRRO(InstructionSelector* selector, ArchOpcode opcode, Node* node,
86 ImmediateMode operand_mode) {
87 S390OperandGenerator g(selector);
88 selector->Emit(opcode, g.DefineAsRegister(node),
89 g.UseRegister(node->InputAt(0)),
90 g.UseOperand(node->InputAt(1), operand_mode));
91}
92
93#if V8_TARGET_ARCH_S390X
94void VisitTryTruncateDouble(InstructionSelector* selector, ArchOpcode opcode,
95 Node* node) {
96 S390OperandGenerator g(selector);
97 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
98 InstructionOperand outputs[2];
99 size_t output_count = 0;
100 outputs[output_count++] = g.DefineAsRegister(node);
101
102 Node* success_output = NodeProperties::FindProjection(node, 1);
103 if (success_output) {
104 outputs[output_count++] = g.DefineAsRegister(success_output);
105 }
106
107 selector->Emit(opcode, output_count, outputs, 1, inputs);
108}
109#endif
110
111// Shared routine for multiple binary operations.
112template <typename Matcher>
113void VisitBinop(InstructionSelector* selector, Node* node,
114 InstructionCode opcode, ImmediateMode operand_mode,
115 FlagsContinuation* cont) {
116 S390OperandGenerator g(selector);
117 Matcher m(node);
118 InstructionOperand inputs[4];
119 size_t input_count = 0;
120 InstructionOperand outputs[2];
121 size_t output_count = 0;
122
123 inputs[input_count++] = g.UseRegister(m.left().node());
124 inputs[input_count++] = g.UseOperand(m.right().node(), operand_mode);
125
126 if (cont->IsBranch()) {
127 inputs[input_count++] = g.Label(cont->true_block());
128 inputs[input_count++] = g.Label(cont->false_block());
129 }
130
131 outputs[output_count++] = g.DefineAsRegister(node);
132 if (cont->IsSet()) {
133 outputs[output_count++] = g.DefineAsRegister(cont->result());
134 }
135
136 DCHECK_NE(0u, input_count);
137 DCHECK_NE(0u, output_count);
138 DCHECK_GE(arraysize(inputs), input_count);
139 DCHECK_GE(arraysize(outputs), output_count);
140
141 opcode = cont->Encode(opcode);
142 if (cont->IsDeoptimize()) {
143 selector->EmitDeoptimize(opcode, output_count, outputs, input_count, inputs,
144 cont->frame_state());
145 } else {
146 selector->Emit(opcode, output_count, outputs, input_count, inputs);
147 }
148}
149
150// Shared routine for multiple binary operations.
151template <typename Matcher>
152void VisitBinop(InstructionSelector* selector, Node* node, ArchOpcode opcode,
153 ImmediateMode operand_mode) {
154 FlagsContinuation cont;
155 VisitBinop<Matcher>(selector, node, opcode, operand_mode, &cont);
156}
157
158} // namespace
159
160void InstructionSelector::VisitLoad(Node* node) {
161 LoadRepresentation load_rep = LoadRepresentationOf(node->op());
162 S390OperandGenerator g(this);
163 Node* base = node->InputAt(0);
164 Node* offset = node->InputAt(1);
165 ArchOpcode opcode = kArchNop;
166 ImmediateMode mode = kInt16Imm;
167 switch (load_rep.representation()) {
168 case MachineRepresentation::kFloat32:
169 opcode = kS390_LoadFloat32;
170 break;
171 case MachineRepresentation::kFloat64:
172 opcode = kS390_LoadDouble;
173 break;
174 case MachineRepresentation::kBit: // Fall through.
175 case MachineRepresentation::kWord8:
176 opcode = load_rep.IsSigned() ? kS390_LoadWordS8 : kS390_LoadWordU8;
177 break;
178 case MachineRepresentation::kWord16:
179 opcode = load_rep.IsSigned() ? kS390_LoadWordS16 : kS390_LoadWordU16;
180 break;
181#if !V8_TARGET_ARCH_S390X
182 case MachineRepresentation::kTagged: // Fall through.
183#endif
184 case MachineRepresentation::kWord32:
Ben Murdochc5610432016-08-08 18:44:38 +0100185 opcode = kS390_LoadWordU32;
Ben Murdochda12d292016-06-02 14:46:10 +0100186 break;
187#if V8_TARGET_ARCH_S390X
188 case MachineRepresentation::kTagged: // Fall through.
189 case MachineRepresentation::kWord64:
190 opcode = kS390_LoadWord64;
191 mode = kInt16Imm_4ByteAligned;
192 break;
193#else
194 case MachineRepresentation::kWord64: // Fall through.
195#endif
196 case MachineRepresentation::kSimd128: // Fall through.
197 case MachineRepresentation::kNone:
198 UNREACHABLE();
199 return;
200 }
201 if (g.CanBeImmediate(offset, mode)) {
202 Emit(opcode | AddressingModeField::encode(kMode_MRI),
203 g.DefineAsRegister(node), g.UseRegister(base), g.UseImmediate(offset));
204 } else if (g.CanBeImmediate(base, mode)) {
205 Emit(opcode | AddressingModeField::encode(kMode_MRI),
206 g.DefineAsRegister(node), g.UseRegister(offset), g.UseImmediate(base));
207 } else {
208 Emit(opcode | AddressingModeField::encode(kMode_MRR),
209 g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(offset));
210 }
211}
212
213void InstructionSelector::VisitStore(Node* node) {
214 S390OperandGenerator g(this);
215 Node* base = node->InputAt(0);
216 Node* offset = node->InputAt(1);
217 Node* value = node->InputAt(2);
218
219 StoreRepresentation store_rep = StoreRepresentationOf(node->op());
220 WriteBarrierKind write_barrier_kind = store_rep.write_barrier_kind();
221 MachineRepresentation rep = store_rep.representation();
222
223 if (write_barrier_kind != kNoWriteBarrier) {
224 DCHECK_EQ(MachineRepresentation::kTagged, rep);
225 AddressingMode addressing_mode;
226 InstructionOperand inputs[3];
227 size_t input_count = 0;
228 inputs[input_count++] = g.UseUniqueRegister(base);
229 // OutOfLineRecordWrite uses the offset in an 'AddP' instruction as well as
230 // for the store itself, so we must check compatibility with both.
231 if (g.CanBeImmediate(offset, kInt16Imm)
232#if V8_TARGET_ARCH_S390X
233 && g.CanBeImmediate(offset, kInt16Imm_4ByteAligned)
234#endif
235 ) {
236 inputs[input_count++] = g.UseImmediate(offset);
237 addressing_mode = kMode_MRI;
238 } else {
239 inputs[input_count++] = g.UseUniqueRegister(offset);
240 addressing_mode = kMode_MRR;
241 }
242 inputs[input_count++] = g.UseUniqueRegister(value);
243 RecordWriteMode record_write_mode = RecordWriteMode::kValueIsAny;
244 switch (write_barrier_kind) {
245 case kNoWriteBarrier:
246 UNREACHABLE();
247 break;
248 case kMapWriteBarrier:
249 record_write_mode = RecordWriteMode::kValueIsMap;
250 break;
251 case kPointerWriteBarrier:
252 record_write_mode = RecordWriteMode::kValueIsPointer;
253 break;
254 case kFullWriteBarrier:
255 record_write_mode = RecordWriteMode::kValueIsAny;
256 break;
257 }
258 InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()};
259 size_t const temp_count = arraysize(temps);
260 InstructionCode code = kArchStoreWithWriteBarrier;
261 code |= AddressingModeField::encode(addressing_mode);
262 code |= MiscField::encode(static_cast<int>(record_write_mode));
263 Emit(code, 0, nullptr, input_count, inputs, temp_count, temps);
264 } else {
265 ArchOpcode opcode = kArchNop;
266 ImmediateMode mode = kInt16Imm;
267 switch (rep) {
268 case MachineRepresentation::kFloat32:
269 opcode = kS390_StoreFloat32;
270 break;
271 case MachineRepresentation::kFloat64:
272 opcode = kS390_StoreDouble;
273 break;
274 case MachineRepresentation::kBit: // Fall through.
275 case MachineRepresentation::kWord8:
276 opcode = kS390_StoreWord8;
277 break;
278 case MachineRepresentation::kWord16:
279 opcode = kS390_StoreWord16;
280 break;
281#if !V8_TARGET_ARCH_S390X
282 case MachineRepresentation::kTagged: // Fall through.
283#endif
284 case MachineRepresentation::kWord32:
285 opcode = kS390_StoreWord32;
286 break;
287#if V8_TARGET_ARCH_S390X
288 case MachineRepresentation::kTagged: // Fall through.
289 case MachineRepresentation::kWord64:
290 opcode = kS390_StoreWord64;
291 mode = kInt16Imm_4ByteAligned;
292 break;
293#else
294 case MachineRepresentation::kWord64: // Fall through.
295#endif
296 case MachineRepresentation::kSimd128: // Fall through.
297 case MachineRepresentation::kNone:
298 UNREACHABLE();
299 return;
300 }
301 if (g.CanBeImmediate(offset, mode)) {
302 Emit(opcode | AddressingModeField::encode(kMode_MRI), g.NoOutput(),
303 g.UseRegister(base), g.UseImmediate(offset), g.UseRegister(value));
304 } else if (g.CanBeImmediate(base, mode)) {
305 Emit(opcode | AddressingModeField::encode(kMode_MRI), g.NoOutput(),
306 g.UseRegister(offset), g.UseImmediate(base), g.UseRegister(value));
307 } else {
308 Emit(opcode | AddressingModeField::encode(kMode_MRR), g.NoOutput(),
309 g.UseRegister(base), g.UseRegister(offset), g.UseRegister(value));
310 }
311 }
312}
313
314void InstructionSelector::VisitCheckedLoad(Node* node) {
315 CheckedLoadRepresentation load_rep = CheckedLoadRepresentationOf(node->op());
316 S390OperandGenerator g(this);
317 Node* const base = node->InputAt(0);
318 Node* const offset = node->InputAt(1);
319 Node* const length = node->InputAt(2);
320 ArchOpcode opcode = kArchNop;
321 switch (load_rep.representation()) {
322 case MachineRepresentation::kWord8:
323 opcode = load_rep.IsSigned() ? kCheckedLoadInt8 : kCheckedLoadUint8;
324 break;
325 case MachineRepresentation::kWord16:
326 opcode = load_rep.IsSigned() ? kCheckedLoadInt16 : kCheckedLoadUint16;
327 break;
328 case MachineRepresentation::kWord32:
329 opcode = kCheckedLoadWord32;
330 break;
331#if V8_TARGET_ARCH_S390X
332 case MachineRepresentation::kWord64:
333 opcode = kCheckedLoadWord64;
334 break;
335#endif
336 case MachineRepresentation::kFloat32:
337 opcode = kCheckedLoadFloat32;
338 break;
339 case MachineRepresentation::kFloat64:
340 opcode = kCheckedLoadFloat64;
341 break;
342 case MachineRepresentation::kBit: // Fall through.
343 case MachineRepresentation::kTagged: // Fall through.
344#if !V8_TARGET_ARCH_S390X
345 case MachineRepresentation::kWord64: // Fall through.
346#endif
347 case MachineRepresentation::kSimd128: // Fall through.
348 case MachineRepresentation::kNone:
349 UNREACHABLE();
350 return;
351 }
352 AddressingMode addressingMode = kMode_MRR;
353 Emit(opcode | AddressingModeField::encode(addressingMode),
354 g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(offset),
355 g.UseOperand(length, kInt16Imm_Unsigned));
356}
357
358void InstructionSelector::VisitCheckedStore(Node* node) {
359 MachineRepresentation rep = CheckedStoreRepresentationOf(node->op());
360 S390OperandGenerator g(this);
361 Node* const base = node->InputAt(0);
362 Node* const offset = node->InputAt(1);
363 Node* const length = node->InputAt(2);
364 Node* const value = node->InputAt(3);
365 ArchOpcode opcode = kArchNop;
366 switch (rep) {
367 case MachineRepresentation::kWord8:
368 opcode = kCheckedStoreWord8;
369 break;
370 case MachineRepresentation::kWord16:
371 opcode = kCheckedStoreWord16;
372 break;
373 case MachineRepresentation::kWord32:
374 opcode = kCheckedStoreWord32;
375 break;
376#if V8_TARGET_ARCH_S390X
377 case MachineRepresentation::kWord64:
378 opcode = kCheckedStoreWord64;
379 break;
380#endif
381 case MachineRepresentation::kFloat32:
382 opcode = kCheckedStoreFloat32;
383 break;
384 case MachineRepresentation::kFloat64:
385 opcode = kCheckedStoreFloat64;
386 break;
387 case MachineRepresentation::kBit: // Fall through.
388 case MachineRepresentation::kTagged: // Fall through.
389#if !V8_TARGET_ARCH_S390X
390 case MachineRepresentation::kWord64: // Fall through.
391#endif
392 case MachineRepresentation::kSimd128: // Fall through.
393 case MachineRepresentation::kNone:
394 UNREACHABLE();
395 return;
396 }
397 AddressingMode addressingMode = kMode_MRR;
398 Emit(opcode | AddressingModeField::encode(addressingMode), g.NoOutput(),
399 g.UseRegister(base), g.UseRegister(offset),
400 g.UseOperand(length, kInt16Imm_Unsigned), g.UseRegister(value));
401}
402
403template <typename Matcher>
404static void VisitLogical(InstructionSelector* selector, Node* node, Matcher* m,
405 ArchOpcode opcode, bool left_can_cover,
406 bool right_can_cover, ImmediateMode imm_mode) {
407 S390OperandGenerator g(selector);
408
409 // Map instruction to equivalent operation with inverted right input.
410 ArchOpcode inv_opcode = opcode;
411 switch (opcode) {
412 case kS390_And:
413 inv_opcode = kS390_AndComplement;
414 break;
415 case kS390_Or:
416 inv_opcode = kS390_OrComplement;
417 break;
418 default:
419 UNREACHABLE();
420 }
421
422 // Select Logical(y, ~x) for Logical(Xor(x, -1), y).
423 if ((m->left().IsWord32Xor() || m->left().IsWord64Xor()) && left_can_cover) {
424 Matcher mleft(m->left().node());
425 if (mleft.right().Is(-1)) {
426 selector->Emit(inv_opcode, g.DefineAsRegister(node),
427 g.UseRegister(m->right().node()),
428 g.UseRegister(mleft.left().node()));
429 return;
430 }
431 }
432
433 // Select Logical(x, ~y) for Logical(x, Xor(y, -1)).
434 if ((m->right().IsWord32Xor() || m->right().IsWord64Xor()) &&
435 right_can_cover) {
436 Matcher mright(m->right().node());
437 if (mright.right().Is(-1)) {
438 // TODO(all): support shifted operand on right.
439 selector->Emit(inv_opcode, g.DefineAsRegister(node),
440 g.UseRegister(m->left().node()),
441 g.UseRegister(mright.left().node()));
442 return;
443 }
444 }
445
446 VisitBinop<Matcher>(selector, node, opcode, imm_mode);
447}
448
449static inline bool IsContiguousMask32(uint32_t value, int* mb, int* me) {
450 int mask_width = base::bits::CountPopulation32(value);
451 int mask_msb = base::bits::CountLeadingZeros32(value);
452 int mask_lsb = base::bits::CountTrailingZeros32(value);
453 if ((mask_width == 0) || (mask_msb + mask_width + mask_lsb != 32))
454 return false;
455 *mb = mask_lsb + mask_width - 1;
456 *me = mask_lsb;
457 return true;
458}
459
460#if V8_TARGET_ARCH_S390X
461static inline bool IsContiguousMask64(uint64_t value, int* mb, int* me) {
462 int mask_width = base::bits::CountPopulation64(value);
463 int mask_msb = base::bits::CountLeadingZeros64(value);
464 int mask_lsb = base::bits::CountTrailingZeros64(value);
465 if ((mask_width == 0) || (mask_msb + mask_width + mask_lsb != 64))
466 return false;
467 *mb = mask_lsb + mask_width - 1;
468 *me = mask_lsb;
469 return true;
470}
471#endif
472
473void InstructionSelector::VisitWord32And(Node* node) {
474 S390OperandGenerator g(this);
475 Int32BinopMatcher m(node);
476 int mb = 0;
477 int me = 0;
478 if (m.right().HasValue() && IsContiguousMask32(m.right().Value(), &mb, &me)) {
479 int sh = 0;
480 Node* left = m.left().node();
481 if ((m.left().IsWord32Shr() || m.left().IsWord32Shl()) &&
482 CanCover(node, left)) {
483 Int32BinopMatcher mleft(m.left().node());
484 if (mleft.right().IsInRange(0, 31)) {
485 left = mleft.left().node();
486 sh = mleft.right().Value();
487 if (m.left().IsWord32Shr()) {
488 // Adjust the mask such that it doesn't include any rotated bits.
489 if (mb > 31 - sh) mb = 31 - sh;
490 sh = (32 - sh) & 0x1f;
491 } else {
492 // Adjust the mask such that it doesn't include any rotated bits.
493 if (me < sh) me = sh;
494 }
495 }
496 }
497 if (mb >= me) {
498 Emit(kS390_RotLeftAndMask32, g.DefineAsRegister(node),
499 g.UseRegister(left), g.TempImmediate(sh), g.TempImmediate(mb),
500 g.TempImmediate(me));
501 return;
502 }
503 }
504 VisitLogical<Int32BinopMatcher>(
505 this, node, &m, kS390_And, CanCover(node, m.left().node()),
506 CanCover(node, m.right().node()), kInt16Imm_Unsigned);
507}
508
509#if V8_TARGET_ARCH_S390X
510void InstructionSelector::VisitWord64And(Node* node) {
511 S390OperandGenerator g(this);
512 Int64BinopMatcher m(node);
513 int mb = 0;
514 int me = 0;
515 if (m.right().HasValue() && IsContiguousMask64(m.right().Value(), &mb, &me)) {
516 int sh = 0;
517 Node* left = m.left().node();
518 if ((m.left().IsWord64Shr() || m.left().IsWord64Shl()) &&
519 CanCover(node, left)) {
520 Int64BinopMatcher mleft(m.left().node());
521 if (mleft.right().IsInRange(0, 63)) {
522 left = mleft.left().node();
523 sh = mleft.right().Value();
524 if (m.left().IsWord64Shr()) {
525 // Adjust the mask such that it doesn't include any rotated bits.
526 if (mb > 63 - sh) mb = 63 - sh;
527 sh = (64 - sh) & 0x3f;
528 } else {
529 // Adjust the mask such that it doesn't include any rotated bits.
530 if (me < sh) me = sh;
531 }
532 }
533 }
534 if (mb >= me) {
535 bool match = false;
536 ArchOpcode opcode;
537 int mask;
538 if (me == 0) {
539 match = true;
540 opcode = kS390_RotLeftAndClearLeft64;
541 mask = mb;
542 } else if (mb == 63) {
543 match = true;
544 opcode = kS390_RotLeftAndClearRight64;
545 mask = me;
546 } else if (sh && me <= sh && m.left().IsWord64Shl()) {
547 match = true;
548 opcode = kS390_RotLeftAndClear64;
549 mask = mb;
550 }
551 if (match) {
552 Emit(opcode, g.DefineAsRegister(node), g.UseRegister(left),
553 g.TempImmediate(sh), g.TempImmediate(mask));
554 return;
555 }
556 }
557 }
558 VisitLogical<Int64BinopMatcher>(
559 this, node, &m, kS390_And, CanCover(node, m.left().node()),
560 CanCover(node, m.right().node()), kInt16Imm_Unsigned);
561}
562#endif
563
564void InstructionSelector::VisitWord32Or(Node* node) {
565 Int32BinopMatcher m(node);
566 VisitLogical<Int32BinopMatcher>(
567 this, node, &m, kS390_Or, CanCover(node, m.left().node()),
568 CanCover(node, m.right().node()), kInt16Imm_Unsigned);
569}
570
571#if V8_TARGET_ARCH_S390X
572void InstructionSelector::VisitWord64Or(Node* node) {
573 Int64BinopMatcher m(node);
574 VisitLogical<Int64BinopMatcher>(
575 this, node, &m, kS390_Or, CanCover(node, m.left().node()),
576 CanCover(node, m.right().node()), kInt16Imm_Unsigned);
577}
578#endif
579
580void InstructionSelector::VisitWord32Xor(Node* node) {
581 S390OperandGenerator g(this);
582 Int32BinopMatcher m(node);
583 if (m.right().Is(-1)) {
584 Emit(kS390_Not, g.DefineAsRegister(node), g.UseRegister(m.left().node()));
585 } else {
586 VisitBinop<Int32BinopMatcher>(this, node, kS390_Xor, kInt16Imm_Unsigned);
587 }
588}
589
590#if V8_TARGET_ARCH_S390X
591void InstructionSelector::VisitWord64Xor(Node* node) {
592 S390OperandGenerator g(this);
593 Int64BinopMatcher m(node);
594 if (m.right().Is(-1)) {
595 Emit(kS390_Not, g.DefineAsRegister(node), g.UseRegister(m.left().node()));
596 } else {
597 VisitBinop<Int64BinopMatcher>(this, node, kS390_Xor, kInt16Imm_Unsigned);
598 }
599}
600#endif
601
602void InstructionSelector::VisitWord32Shl(Node* node) {
603 S390OperandGenerator g(this);
604 Int32BinopMatcher m(node);
605 if (m.left().IsWord32And() && m.right().IsInRange(0, 31)) {
606 Int32BinopMatcher mleft(m.left().node());
607 int sh = m.right().Value();
608 int mb;
609 int me;
610 if (mleft.right().HasValue() &&
611 IsContiguousMask32(mleft.right().Value() << sh, &mb, &me)) {
612 // Adjust the mask such that it doesn't include any rotated bits.
613 if (me < sh) me = sh;
614 if (mb >= me) {
615 Emit(kS390_RotLeftAndMask32, g.DefineAsRegister(node),
616 g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
617 g.TempImmediate(mb), g.TempImmediate(me));
618 return;
619 }
620 }
621 }
622 VisitRRO(this, kS390_ShiftLeft32, node, kShift32Imm);
623}
624
625#if V8_TARGET_ARCH_S390X
626void InstructionSelector::VisitWord64Shl(Node* node) {
627 S390OperandGenerator g(this);
628 Int64BinopMatcher m(node);
629 // TODO(mbrandy): eliminate left sign extension if right >= 32
630 if (m.left().IsWord64And() && m.right().IsInRange(0, 63)) {
631 Int64BinopMatcher mleft(m.left().node());
632 int sh = m.right().Value();
633 int mb;
634 int me;
635 if (mleft.right().HasValue() &&
636 IsContiguousMask64(mleft.right().Value() << sh, &mb, &me)) {
637 // Adjust the mask such that it doesn't include any rotated bits.
638 if (me < sh) me = sh;
639 if (mb >= me) {
640 bool match = false;
641 ArchOpcode opcode;
642 int mask;
643 if (me == 0) {
644 match = true;
645 opcode = kS390_RotLeftAndClearLeft64;
646 mask = mb;
647 } else if (mb == 63) {
648 match = true;
649 opcode = kS390_RotLeftAndClearRight64;
650 mask = me;
651 } else if (sh && me <= sh) {
652 match = true;
653 opcode = kS390_RotLeftAndClear64;
654 mask = mb;
655 }
656 if (match) {
657 Emit(opcode, g.DefineAsRegister(node),
658 g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
659 g.TempImmediate(mask));
660 return;
661 }
662 }
663 }
664 }
665 VisitRRO(this, kS390_ShiftLeft64, node, kShift64Imm);
666}
667#endif
668
669void InstructionSelector::VisitWord32Shr(Node* node) {
670 S390OperandGenerator g(this);
671 Int32BinopMatcher m(node);
672 if (m.left().IsWord32And() && m.right().IsInRange(0, 31)) {
673 Int32BinopMatcher mleft(m.left().node());
674 int sh = m.right().Value();
675 int mb;
676 int me;
677 if (mleft.right().HasValue() &&
678 IsContiguousMask32((uint32_t)(mleft.right().Value()) >> sh, &mb, &me)) {
679 // Adjust the mask such that it doesn't include any rotated bits.
680 if (mb > 31 - sh) mb = 31 - sh;
681 sh = (32 - sh) & 0x1f;
682 if (mb >= me) {
683 Emit(kS390_RotLeftAndMask32, g.DefineAsRegister(node),
684 g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
685 g.TempImmediate(mb), g.TempImmediate(me));
686 return;
687 }
688 }
689 }
690 VisitRRO(this, kS390_ShiftRight32, node, kShift32Imm);
691}
692
693#if V8_TARGET_ARCH_S390X
694void InstructionSelector::VisitWord64Shr(Node* node) {
695 S390OperandGenerator g(this);
696 Int64BinopMatcher m(node);
697 if (m.left().IsWord64And() && m.right().IsInRange(0, 63)) {
698 Int64BinopMatcher mleft(m.left().node());
699 int sh = m.right().Value();
700 int mb;
701 int me;
702 if (mleft.right().HasValue() &&
703 IsContiguousMask64((uint64_t)(mleft.right().Value()) >> sh, &mb, &me)) {
704 // Adjust the mask such that it doesn't include any rotated bits.
705 if (mb > 63 - sh) mb = 63 - sh;
706 sh = (64 - sh) & 0x3f;
707 if (mb >= me) {
708 bool match = false;
709 ArchOpcode opcode;
710 int mask;
711 if (me == 0) {
712 match = true;
713 opcode = kS390_RotLeftAndClearLeft64;
714 mask = mb;
715 } else if (mb == 63) {
716 match = true;
717 opcode = kS390_RotLeftAndClearRight64;
718 mask = me;
719 }
720 if (match) {
721 Emit(opcode, g.DefineAsRegister(node),
722 g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
723 g.TempImmediate(mask));
724 return;
725 }
726 }
727 }
728 }
729 VisitRRO(this, kS390_ShiftRight64, node, kShift64Imm);
730}
731#endif
732
733void InstructionSelector::VisitWord32Sar(Node* node) {
734 S390OperandGenerator g(this);
735 Int32BinopMatcher m(node);
736 // Replace with sign extension for (x << K) >> K where K is 16 or 24.
737 if (CanCover(node, m.left().node()) && m.left().IsWord32Shl()) {
738 Int32BinopMatcher mleft(m.left().node());
739 if (mleft.right().Is(16) && m.right().Is(16)) {
740 Emit(kS390_ExtendSignWord16, g.DefineAsRegister(node),
741 g.UseRegister(mleft.left().node()));
742 return;
743 } else if (mleft.right().Is(24) && m.right().Is(24)) {
744 Emit(kS390_ExtendSignWord8, g.DefineAsRegister(node),
745 g.UseRegister(mleft.left().node()));
746 return;
747 }
748 }
749 VisitRRO(this, kS390_ShiftRightArith32, node, kShift32Imm);
750}
751
752#if !V8_TARGET_ARCH_S390X
753void VisitPairBinop(InstructionSelector* selector, ArchOpcode opcode,
754 Node* node) {
755 S390OperandGenerator g(selector);
756
757 // We use UseUniqueRegister here to avoid register sharing with the output
758 // registers.
759 InstructionOperand inputs[] = {
760 g.UseRegister(node->InputAt(0)), g.UseUniqueRegister(node->InputAt(1)),
761 g.UseRegister(node->InputAt(2)), g.UseUniqueRegister(node->InputAt(3))};
762
763 InstructionOperand outputs[] = {
764 g.DefineAsRegister(node),
765 g.DefineAsRegister(NodeProperties::FindProjection(node, 1))};
766
767 selector->Emit(opcode, 2, outputs, 4, inputs);
768}
769
770void InstructionSelector::VisitInt32PairAdd(Node* node) {
771 VisitPairBinop(this, kS390_AddPair, node);
772}
773
774void InstructionSelector::VisitInt32PairSub(Node* node) {
775 VisitPairBinop(this, kS390_SubPair, node);
776}
777
778void InstructionSelector::VisitInt32PairMul(Node* node) {
779 S390OperandGenerator g(this);
780 InstructionOperand inputs[] = {g.UseUniqueRegister(node->InputAt(0)),
781 g.UseUniqueRegister(node->InputAt(1)),
782 g.UseUniqueRegister(node->InputAt(2)),
783 g.UseUniqueRegister(node->InputAt(3))};
784
785 InstructionOperand outputs[] = {
786 g.DefineAsRegister(node),
787 g.DefineAsRegister(NodeProperties::FindProjection(node, 1))};
788
789 Emit(kS390_MulPair, 2, outputs, 4, inputs);
790}
791
792void VisitPairShift(InstructionSelector* selector, ArchOpcode opcode,
793 Node* node) {
794 S390OperandGenerator g(selector);
795 Int32Matcher m(node->InputAt(2));
796 InstructionOperand shift_operand;
797 if (m.HasValue()) {
798 shift_operand = g.UseImmediate(m.node());
799 } else {
800 shift_operand = g.UseUniqueRegister(m.node());
801 }
802
803 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0)),
804 g.UseRegister(node->InputAt(1)),
805 shift_operand};
806
807 InstructionOperand outputs[] = {
808 g.DefineSameAsFirst(node),
809 g.DefineAsRegister(NodeProperties::FindProjection(node, 1))};
810
811 selector->Emit(opcode, 2, outputs, 3, inputs);
812}
813
814void InstructionSelector::VisitWord32PairShl(Node* node) {
815 VisitPairShift(this, kS390_ShiftLeftPair, node);
816}
817
818void InstructionSelector::VisitWord32PairShr(Node* node) {
819 VisitPairShift(this, kS390_ShiftRightPair, node);
820}
821
822void InstructionSelector::VisitWord32PairSar(Node* node) {
823 VisitPairShift(this, kS390_ShiftRightArithPair, node);
824}
825#endif
826
827#if V8_TARGET_ARCH_S390X
828void InstructionSelector::VisitWord64Sar(Node* node) {
829 VisitRRO(this, kS390_ShiftRightArith64, node, kShift64Imm);
830}
831#endif
832
833void InstructionSelector::VisitWord32Ror(Node* node) {
834 VisitRRO(this, kS390_RotRight32, node, kShift32Imm);
835}
836
837#if V8_TARGET_ARCH_S390X
838void InstructionSelector::VisitWord64Ror(Node* node) {
839 VisitRRO(this, kS390_RotRight64, node, kShift64Imm);
840}
841#endif
842
843void InstructionSelector::VisitWord32Clz(Node* node) {
844 S390OperandGenerator g(this);
845 Emit(kS390_Cntlz32, g.DefineAsRegister(node),
846 g.UseRegister(node->InputAt(0)));
847}
848
849#if V8_TARGET_ARCH_S390X
850void InstructionSelector::VisitWord64Clz(Node* node) {
851 S390OperandGenerator g(this);
852 Emit(kS390_Cntlz64, g.DefineAsRegister(node),
853 g.UseRegister(node->InputAt(0)));
854}
855#endif
856
857void InstructionSelector::VisitWord32Popcnt(Node* node) {
858 S390OperandGenerator g(this);
859 Emit(kS390_Popcnt32, g.DefineAsRegister(node),
860 g.UseRegister(node->InputAt(0)));
861}
862
863#if V8_TARGET_ARCH_S390X
864void InstructionSelector::VisitWord64Popcnt(Node* node) {
865 S390OperandGenerator g(this);
866 Emit(kS390_Popcnt64, g.DefineAsRegister(node),
867 g.UseRegister(node->InputAt(0)));
868}
869#endif
870
871void InstructionSelector::VisitWord32Ctz(Node* node) { UNREACHABLE(); }
872
873#if V8_TARGET_ARCH_S390X
874void InstructionSelector::VisitWord64Ctz(Node* node) { UNREACHABLE(); }
875#endif
876
877void InstructionSelector::VisitWord32ReverseBits(Node* node) { UNREACHABLE(); }
878
879#if V8_TARGET_ARCH_S390X
880void InstructionSelector::VisitWord64ReverseBits(Node* node) { UNREACHABLE(); }
881#endif
882
883void InstructionSelector::VisitInt32Add(Node* node) {
884 VisitBinop<Int32BinopMatcher>(this, node, kS390_Add, kInt16Imm);
885}
886
887#if V8_TARGET_ARCH_S390X
888void InstructionSelector::VisitInt64Add(Node* node) {
889 VisitBinop<Int64BinopMatcher>(this, node, kS390_Add, kInt16Imm);
890}
891#endif
892
893void InstructionSelector::VisitInt32Sub(Node* node) {
894 S390OperandGenerator g(this);
895 Int32BinopMatcher m(node);
896 if (m.left().Is(0)) {
897 Emit(kS390_Neg, g.DefineAsRegister(node), g.UseRegister(m.right().node()));
898 } else {
899 VisitBinop<Int32BinopMatcher>(this, node, kS390_Sub, kInt16Imm_Negate);
900 }
901}
902
903#if V8_TARGET_ARCH_S390X
904void InstructionSelector::VisitInt64Sub(Node* node) {
905 S390OperandGenerator g(this);
906 Int64BinopMatcher m(node);
907 if (m.left().Is(0)) {
908 Emit(kS390_Neg, g.DefineAsRegister(node), g.UseRegister(m.right().node()));
909 } else {
910 VisitBinop<Int64BinopMatcher>(this, node, kS390_Sub, kInt16Imm_Negate);
911 }
912}
913#endif
914
915void InstructionSelector::VisitInt32Mul(Node* node) {
916 VisitRRR(this, kS390_Mul32, node);
917}
918
919#if V8_TARGET_ARCH_S390X
920void InstructionSelector::VisitInt64Mul(Node* node) {
921 VisitRRR(this, kS390_Mul64, node);
922}
923#endif
924
925void InstructionSelector::VisitInt32MulHigh(Node* node) {
926 S390OperandGenerator g(this);
927 Emit(kS390_MulHigh32, g.DefineAsRegister(node),
928 g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)));
929}
930
931void InstructionSelector::VisitUint32MulHigh(Node* node) {
932 S390OperandGenerator g(this);
933 Emit(kS390_MulHighU32, g.DefineAsRegister(node),
934 g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)));
935}
936
937void InstructionSelector::VisitInt32Div(Node* node) {
938 VisitRRR(this, kS390_Div32, node);
939}
940
941#if V8_TARGET_ARCH_S390X
942void InstructionSelector::VisitInt64Div(Node* node) {
943 VisitRRR(this, kS390_Div64, node);
944}
945#endif
946
947void InstructionSelector::VisitUint32Div(Node* node) {
948 VisitRRR(this, kS390_DivU32, node);
949}
950
951#if V8_TARGET_ARCH_S390X
952void InstructionSelector::VisitUint64Div(Node* node) {
953 VisitRRR(this, kS390_DivU64, node);
954}
955#endif
956
957void InstructionSelector::VisitInt32Mod(Node* node) {
958 VisitRRR(this, kS390_Mod32, node);
959}
960
961#if V8_TARGET_ARCH_S390X
962void InstructionSelector::VisitInt64Mod(Node* node) {
963 VisitRRR(this, kS390_Mod64, node);
964}
965#endif
966
967void InstructionSelector::VisitUint32Mod(Node* node) {
968 VisitRRR(this, kS390_ModU32, node);
969}
970
971#if V8_TARGET_ARCH_S390X
972void InstructionSelector::VisitUint64Mod(Node* node) {
973 VisitRRR(this, kS390_ModU64, node);
974}
975#endif
976
977void InstructionSelector::VisitChangeFloat32ToFloat64(Node* node) {
978 VisitRR(this, kS390_Float32ToDouble, node);
979}
980
981void InstructionSelector::VisitRoundInt32ToFloat32(Node* node) {
982 VisitRR(this, kS390_Int32ToFloat32, node);
983}
984
985void InstructionSelector::VisitRoundUint32ToFloat32(Node* node) {
986 VisitRR(this, kS390_Uint32ToFloat32, node);
987}
988
989void InstructionSelector::VisitChangeInt32ToFloat64(Node* node) {
990 VisitRR(this, kS390_Int32ToDouble, node);
991}
992
993void InstructionSelector::VisitChangeUint32ToFloat64(Node* node) {
994 VisitRR(this, kS390_Uint32ToDouble, node);
995}
996
997void InstructionSelector::VisitChangeFloat64ToInt32(Node* node) {
998 VisitRR(this, kS390_DoubleToInt32, node);
999}
1000
1001void InstructionSelector::VisitChangeFloat64ToUint32(Node* node) {
1002 VisitRR(this, kS390_DoubleToUint32, node);
1003}
1004
1005void InstructionSelector::VisitTruncateFloat64ToUint32(Node* node) {
1006 VisitRR(this, kS390_DoubleToUint32, node);
1007}
1008
1009#if V8_TARGET_ARCH_S390X
1010void InstructionSelector::VisitTryTruncateFloat32ToInt64(Node* node) {
1011 VisitTryTruncateDouble(this, kS390_Float32ToInt64, node);
1012}
1013
1014void InstructionSelector::VisitTryTruncateFloat64ToInt64(Node* node) {
1015 VisitTryTruncateDouble(this, kS390_DoubleToInt64, node);
1016}
1017
1018void InstructionSelector::VisitTryTruncateFloat32ToUint64(Node* node) {
1019 VisitTryTruncateDouble(this, kS390_Float32ToUint64, node);
1020}
1021
1022void InstructionSelector::VisitTryTruncateFloat64ToUint64(Node* node) {
1023 VisitTryTruncateDouble(this, kS390_DoubleToUint64, node);
1024}
1025
1026void InstructionSelector::VisitChangeInt32ToInt64(Node* node) {
1027 // TODO(mbrandy): inspect input to see if nop is appropriate.
1028 VisitRR(this, kS390_ExtendSignWord32, node);
1029}
1030
1031void InstructionSelector::VisitChangeUint32ToUint64(Node* node) {
1032 // TODO(mbrandy): inspect input to see if nop is appropriate.
1033 VisitRR(this, kS390_Uint32ToUint64, node);
1034}
1035#endif
1036
1037void InstructionSelector::VisitTruncateFloat64ToFloat32(Node* node) {
1038 VisitRR(this, kS390_DoubleToFloat32, node);
1039}
1040
Ben Murdochc5610432016-08-08 18:44:38 +01001041void InstructionSelector::VisitTruncateFloat64ToWord32(Node* node) {
1042 VisitRR(this, kArchTruncateDoubleToI, node);
1043}
1044
1045void InstructionSelector::VisitRoundFloat64ToInt32(Node* node) {
1046 VisitRR(this, kS390_DoubleToInt32, node);
Ben Murdochda12d292016-06-02 14:46:10 +01001047}
1048
1049void InstructionSelector::VisitTruncateFloat32ToInt32(Node* node) {
1050 VisitRR(this, kS390_Float32ToInt32, node);
1051}
1052
1053void InstructionSelector::VisitTruncateFloat32ToUint32(Node* node) {
1054 VisitRR(this, kS390_Float32ToUint32, node);
1055}
1056
1057#if V8_TARGET_ARCH_S390X
1058void InstructionSelector::VisitTruncateInt64ToInt32(Node* node) {
1059 // TODO(mbrandy): inspect input to see if nop is appropriate.
1060 VisitRR(this, kS390_Int64ToInt32, node);
1061}
1062
1063void InstructionSelector::VisitRoundInt64ToFloat32(Node* node) {
1064 VisitRR(this, kS390_Int64ToFloat32, node);
1065}
1066
1067void InstructionSelector::VisitRoundInt64ToFloat64(Node* node) {
1068 VisitRR(this, kS390_Int64ToDouble, node);
1069}
1070
1071void InstructionSelector::VisitRoundUint64ToFloat32(Node* node) {
1072 VisitRR(this, kS390_Uint64ToFloat32, node);
1073}
1074
1075void InstructionSelector::VisitRoundUint64ToFloat64(Node* node) {
1076 VisitRR(this, kS390_Uint64ToDouble, node);
1077}
1078#endif
1079
1080void InstructionSelector::VisitBitcastFloat32ToInt32(Node* node) {
1081 VisitRR(this, kS390_BitcastFloat32ToInt32, node);
1082}
1083
1084#if V8_TARGET_ARCH_S390X
1085void InstructionSelector::VisitBitcastFloat64ToInt64(Node* node) {
1086 VisitRR(this, kS390_BitcastDoubleToInt64, node);
1087}
1088#endif
1089
1090void InstructionSelector::VisitBitcastInt32ToFloat32(Node* node) {
1091 VisitRR(this, kS390_BitcastInt32ToFloat32, node);
1092}
1093
1094#if V8_TARGET_ARCH_S390X
1095void InstructionSelector::VisitBitcastInt64ToFloat64(Node* node) {
1096 VisitRR(this, kS390_BitcastInt64ToDouble, node);
1097}
1098#endif
1099
1100void InstructionSelector::VisitFloat32Add(Node* node) {
1101 VisitRRR(this, kS390_AddFloat, node);
1102}
1103
1104void InstructionSelector::VisitFloat64Add(Node* node) {
1105 // TODO(mbrandy): detect multiply-add
1106 VisitRRR(this, kS390_AddDouble, node);
1107}
1108
1109void InstructionSelector::VisitFloat32Sub(Node* node) {
1110 S390OperandGenerator g(this);
1111 Float32BinopMatcher m(node);
1112 if (m.left().IsMinusZero()) {
1113 Emit(kS390_NegDouble, g.DefineAsRegister(node),
1114 g.UseRegister(m.right().node()));
1115 return;
1116 }
1117 VisitRRR(this, kS390_SubFloat, node);
1118}
1119
Ben Murdochc5610432016-08-08 18:44:38 +01001120void InstructionSelector::VisitFloat32SubPreserveNan(Node* node) {
1121 S390OperandGenerator g(this);
1122 VisitRRR(this, kS390_SubFloat, node);
1123}
1124
Ben Murdochda12d292016-06-02 14:46:10 +01001125void InstructionSelector::VisitFloat64Sub(Node* node) {
1126 // TODO(mbrandy): detect multiply-subtract
1127 S390OperandGenerator g(this);
1128 Float64BinopMatcher m(node);
1129 if (m.left().IsMinusZero()) {
1130 if (m.right().IsFloat64RoundDown() &&
1131 CanCover(m.node(), m.right().node())) {
1132 if (m.right().InputAt(0)->opcode() == IrOpcode::kFloat64Sub &&
1133 CanCover(m.right().node(), m.right().InputAt(0))) {
1134 Float64BinopMatcher mright0(m.right().InputAt(0));
1135 if (mright0.left().IsMinusZero()) {
1136 // -floor(-x) = ceil(x)
1137 Emit(kS390_CeilDouble, g.DefineAsRegister(node),
1138 g.UseRegister(mright0.right().node()));
1139 return;
1140 }
1141 }
1142 }
1143 Emit(kS390_NegDouble, g.DefineAsRegister(node),
1144 g.UseRegister(m.right().node()));
1145 return;
1146 }
1147 VisitRRR(this, kS390_SubDouble, node);
1148}
1149
Ben Murdochc5610432016-08-08 18:44:38 +01001150void InstructionSelector::VisitFloat64SubPreserveNan(Node* node) {
1151 VisitRRR(this, kS390_SubDouble, node);
1152}
1153
Ben Murdochda12d292016-06-02 14:46:10 +01001154void InstructionSelector::VisitFloat32Mul(Node* node) {
1155 VisitRRR(this, kS390_MulFloat, node);
1156}
1157
1158void InstructionSelector::VisitFloat64Mul(Node* node) {
1159 // TODO(mbrandy): detect negate
1160 VisitRRR(this, kS390_MulDouble, node);
1161}
1162
1163void InstructionSelector::VisitFloat32Div(Node* node) {
1164 VisitRRR(this, kS390_DivFloat, node);
1165}
1166
1167void InstructionSelector::VisitFloat64Div(Node* node) {
1168 VisitRRR(this, kS390_DivDouble, node);
1169}
1170
1171void InstructionSelector::VisitFloat64Mod(Node* node) {
1172 S390OperandGenerator g(this);
1173 Emit(kS390_ModDouble, g.DefineAsFixed(node, d1),
1174 g.UseFixed(node->InputAt(0), d1), g.UseFixed(node->InputAt(1), d2))
1175 ->MarkAsCall();
1176}
1177
1178void InstructionSelector::VisitFloat32Max(Node* node) { UNREACHABLE(); }
1179
1180void InstructionSelector::VisitFloat64Max(Node* node) { UNREACHABLE(); }
1181
1182void InstructionSelector::VisitFloat32Min(Node* node) { UNREACHABLE(); }
1183
1184void InstructionSelector::VisitFloat64Min(Node* node) { UNREACHABLE(); }
1185
1186void InstructionSelector::VisitFloat32Abs(Node* node) {
1187 VisitRR(this, kS390_AbsFloat, node);
1188}
1189
1190void InstructionSelector::VisitFloat64Abs(Node* node) {
1191 VisitRR(this, kS390_AbsDouble, node);
1192}
1193
1194void InstructionSelector::VisitFloat32Sqrt(Node* node) {
1195 VisitRR(this, kS390_SqrtFloat, node);
1196}
1197
1198void InstructionSelector::VisitFloat64Sqrt(Node* node) {
1199 VisitRR(this, kS390_SqrtDouble, node);
1200}
1201
1202void InstructionSelector::VisitFloat32RoundDown(Node* node) {
1203 VisitRR(this, kS390_FloorFloat, node);
1204}
1205
1206void InstructionSelector::VisitFloat64RoundDown(Node* node) {
1207 VisitRR(this, kS390_FloorDouble, node);
1208}
1209
1210void InstructionSelector::VisitFloat32RoundUp(Node* node) {
1211 VisitRR(this, kS390_CeilFloat, node);
1212}
1213
1214void InstructionSelector::VisitFloat64RoundUp(Node* node) {
1215 VisitRR(this, kS390_CeilDouble, node);
1216}
1217
1218void InstructionSelector::VisitFloat32RoundTruncate(Node* node) {
1219 VisitRR(this, kS390_TruncateFloat, node);
1220}
1221
1222void InstructionSelector::VisitFloat64RoundTruncate(Node* node) {
1223 VisitRR(this, kS390_TruncateDouble, node);
1224}
1225
1226void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
1227 VisitRR(this, kS390_RoundDouble, node);
1228}
1229
1230void InstructionSelector::VisitFloat32RoundTiesEven(Node* node) {
1231 UNREACHABLE();
1232}
1233
1234void InstructionSelector::VisitFloat64RoundTiesEven(Node* node) {
1235 UNREACHABLE();
1236}
1237
1238void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
1239 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1240 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1241 return VisitBinop<Int32BinopMatcher>(this, node, kS390_AddWithOverflow32,
1242 kInt16Imm, &cont);
1243 }
1244 FlagsContinuation cont;
1245 VisitBinop<Int32BinopMatcher>(this, node, kS390_AddWithOverflow32, kInt16Imm,
1246 &cont);
1247}
1248
1249void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
1250 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1251 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1252 return VisitBinop<Int32BinopMatcher>(this, node, kS390_SubWithOverflow32,
1253 kInt16Imm_Negate, &cont);
1254 }
1255 FlagsContinuation cont;
1256 VisitBinop<Int32BinopMatcher>(this, node, kS390_SubWithOverflow32,
1257 kInt16Imm_Negate, &cont);
1258}
1259
1260#if V8_TARGET_ARCH_S390X
1261void InstructionSelector::VisitInt64AddWithOverflow(Node* node) {
1262 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1263 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1264 return VisitBinop<Int64BinopMatcher>(this, node, kS390_Add, kInt16Imm,
1265 &cont);
1266 }
1267 FlagsContinuation cont;
1268 VisitBinop<Int64BinopMatcher>(this, node, kS390_Add, kInt16Imm, &cont);
1269}
1270
1271void InstructionSelector::VisitInt64SubWithOverflow(Node* node) {
1272 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1273 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1274 return VisitBinop<Int64BinopMatcher>(this, node, kS390_Sub,
1275 kInt16Imm_Negate, &cont);
1276 }
1277 FlagsContinuation cont;
1278 VisitBinop<Int64BinopMatcher>(this, node, kS390_Sub, kInt16Imm_Negate, &cont);
1279}
1280#endif
1281
1282static bool CompareLogical(FlagsContinuation* cont) {
1283 switch (cont->condition()) {
1284 case kUnsignedLessThan:
1285 case kUnsignedGreaterThanOrEqual:
1286 case kUnsignedLessThanOrEqual:
1287 case kUnsignedGreaterThan:
1288 return true;
1289 default:
1290 return false;
1291 }
1292 UNREACHABLE();
1293 return false;
1294}
1295
1296namespace {
1297
1298// Shared routine for multiple compare operations.
1299void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
1300 InstructionOperand left, InstructionOperand right,
1301 FlagsContinuation* cont) {
1302 S390OperandGenerator g(selector);
1303 opcode = cont->Encode(opcode);
1304 if (cont->IsBranch()) {
1305 selector->Emit(opcode, g.NoOutput(), left, right,
1306 g.Label(cont->true_block()), g.Label(cont->false_block()));
1307 } else if (cont->IsDeoptimize()) {
1308 selector->EmitDeoptimize(opcode, g.NoOutput(), left, right,
1309 cont->frame_state());
1310 } else {
1311 DCHECK(cont->IsSet());
1312 selector->Emit(opcode, g.DefineAsRegister(cont->result()), left, right);
1313 }
1314}
1315
1316// Shared routine for multiple word compare operations.
1317void VisitWordCompare(InstructionSelector* selector, Node* node,
1318 InstructionCode opcode, FlagsContinuation* cont,
1319 bool commutative, ImmediateMode immediate_mode) {
1320 S390OperandGenerator g(selector);
1321 Node* left = node->InputAt(0);
1322 Node* right = node->InputAt(1);
1323
1324 // Match immediates on left or right side of comparison.
1325 if (g.CanBeImmediate(right, immediate_mode)) {
1326 VisitCompare(selector, opcode, g.UseRegister(left), g.UseImmediate(right),
1327 cont);
1328 } else if (g.CanBeImmediate(left, immediate_mode)) {
1329 if (!commutative) cont->Commute();
1330 VisitCompare(selector, opcode, g.UseRegister(right), g.UseImmediate(left),
1331 cont);
1332 } else {
1333 VisitCompare(selector, opcode, g.UseRegister(left), g.UseRegister(right),
1334 cont);
1335 }
1336}
1337
1338void VisitWord32Compare(InstructionSelector* selector, Node* node,
1339 FlagsContinuation* cont) {
1340 ImmediateMode mode = (CompareLogical(cont) ? kInt16Imm_Unsigned : kInt16Imm);
1341 VisitWordCompare(selector, node, kS390_Cmp32, cont, false, mode);
1342}
1343
1344#if V8_TARGET_ARCH_S390X
1345void VisitWord64Compare(InstructionSelector* selector, Node* node,
1346 FlagsContinuation* cont) {
1347 ImmediateMode mode = (CompareLogical(cont) ? kInt16Imm_Unsigned : kInt16Imm);
1348 VisitWordCompare(selector, node, kS390_Cmp64, cont, false, mode);
1349}
1350#endif
1351
1352// Shared routine for multiple float32 compare operations.
1353void VisitFloat32Compare(InstructionSelector* selector, Node* node,
1354 FlagsContinuation* cont) {
1355 S390OperandGenerator g(selector);
1356 Node* left = node->InputAt(0);
1357 Node* right = node->InputAt(1);
1358 VisitCompare(selector, kS390_CmpFloat, g.UseRegister(left),
1359 g.UseRegister(right), cont);
1360}
1361
1362// Shared routine for multiple float64 compare operations.
1363void VisitFloat64Compare(InstructionSelector* selector, Node* node,
1364 FlagsContinuation* cont) {
1365 S390OperandGenerator g(selector);
1366 Node* left = node->InputAt(0);
1367 Node* right = node->InputAt(1);
1368 VisitCompare(selector, kS390_CmpDouble, g.UseRegister(left),
1369 g.UseRegister(right), cont);
1370}
1371
1372// Shared routine for word comparisons against zero.
1373void VisitWordCompareZero(InstructionSelector* selector, Node* user,
1374 Node* value, InstructionCode opcode,
1375 FlagsContinuation* cont) {
1376 while (selector->CanCover(user, value)) {
1377 switch (value->opcode()) {
1378 case IrOpcode::kWord32Equal: {
1379 // Combine with comparisons against 0 by simply inverting the
1380 // continuation.
1381 Int32BinopMatcher m(value);
1382 if (m.right().Is(0)) {
1383 user = value;
1384 value = m.left().node();
1385 cont->Negate();
1386 continue;
1387 }
1388 cont->OverwriteAndNegateIfEqual(kEqual);
1389 return VisitWord32Compare(selector, value, cont);
1390 }
1391 case IrOpcode::kInt32LessThan:
1392 cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1393 return VisitWord32Compare(selector, value, cont);
1394 case IrOpcode::kInt32LessThanOrEqual:
1395 cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1396 return VisitWord32Compare(selector, value, cont);
1397 case IrOpcode::kUint32LessThan:
1398 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1399 return VisitWord32Compare(selector, value, cont);
1400 case IrOpcode::kUint32LessThanOrEqual:
1401 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1402 return VisitWord32Compare(selector, value, cont);
1403#if V8_TARGET_ARCH_S390X
1404 case IrOpcode::kWord64Equal:
1405 cont->OverwriteAndNegateIfEqual(kEqual);
1406 return VisitWord64Compare(selector, value, cont);
1407 case IrOpcode::kInt64LessThan:
1408 cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1409 return VisitWord64Compare(selector, value, cont);
1410 case IrOpcode::kInt64LessThanOrEqual:
1411 cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1412 return VisitWord64Compare(selector, value, cont);
1413 case IrOpcode::kUint64LessThan:
1414 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1415 return VisitWord64Compare(selector, value, cont);
1416 case IrOpcode::kUint64LessThanOrEqual:
1417 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1418 return VisitWord64Compare(selector, value, cont);
1419#endif
1420 case IrOpcode::kFloat32Equal:
1421 cont->OverwriteAndNegateIfEqual(kEqual);
1422 return VisitFloat32Compare(selector, value, cont);
1423 case IrOpcode::kFloat32LessThan:
1424 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1425 return VisitFloat32Compare(selector, value, cont);
1426 case IrOpcode::kFloat32LessThanOrEqual:
1427 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1428 return VisitFloat32Compare(selector, value, cont);
1429 case IrOpcode::kFloat64Equal:
1430 cont->OverwriteAndNegateIfEqual(kEqual);
1431 return VisitFloat64Compare(selector, value, cont);
1432 case IrOpcode::kFloat64LessThan:
1433 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1434 return VisitFloat64Compare(selector, value, cont);
1435 case IrOpcode::kFloat64LessThanOrEqual:
1436 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1437 return VisitFloat64Compare(selector, value, cont);
1438 case IrOpcode::kProjection:
1439 // Check if this is the overflow output projection of an
1440 // <Operation>WithOverflow node.
1441 if (ProjectionIndexOf(value->op()) == 1u) {
1442 // We cannot combine the <Operation>WithOverflow with this branch
1443 // unless the 0th projection (the use of the actual value of the
1444 // <Operation> is either nullptr, which means there's no use of the
1445 // actual value, or was already defined, which means it is scheduled
1446 // *AFTER* this branch).
1447 Node* const node = value->InputAt(0);
1448 Node* const result = NodeProperties::FindProjection(node, 0);
1449 if (result == nullptr || selector->IsDefined(result)) {
1450 switch (node->opcode()) {
1451 case IrOpcode::kInt32AddWithOverflow:
1452 cont->OverwriteAndNegateIfEqual(kOverflow);
1453 return VisitBinop<Int32BinopMatcher>(
1454 selector, node, kS390_AddWithOverflow32, kInt16Imm, cont);
1455 case IrOpcode::kInt32SubWithOverflow:
1456 cont->OverwriteAndNegateIfEqual(kOverflow);
1457 return VisitBinop<Int32BinopMatcher>(selector, node,
1458 kS390_SubWithOverflow32,
1459 kInt16Imm_Negate, cont);
1460#if V8_TARGET_ARCH_S390X
1461 case IrOpcode::kInt64AddWithOverflow:
1462 cont->OverwriteAndNegateIfEqual(kOverflow);
1463 return VisitBinop<Int64BinopMatcher>(selector, node, kS390_Add,
1464 kInt16Imm, cont);
1465 case IrOpcode::kInt64SubWithOverflow:
1466 cont->OverwriteAndNegateIfEqual(kOverflow);
1467 return VisitBinop<Int64BinopMatcher>(selector, node, kS390_Sub,
1468 kInt16Imm_Negate, cont);
1469#endif
1470 default:
1471 break;
1472 }
1473 }
1474 }
1475 break;
1476 case IrOpcode::kInt32Sub:
1477 return VisitWord32Compare(selector, value, cont);
1478 case IrOpcode::kWord32And:
1479 return VisitWordCompare(selector, value, kS390_Tst32, cont, true,
1480 kInt16Imm_Unsigned);
1481// TODO(mbrandy): Handle?
1482// case IrOpcode::kInt32Add:
1483// case IrOpcode::kWord32Or:
1484// case IrOpcode::kWord32Xor:
1485// case IrOpcode::kWord32Sar:
1486// case IrOpcode::kWord32Shl:
1487// case IrOpcode::kWord32Shr:
1488// case IrOpcode::kWord32Ror:
1489#if V8_TARGET_ARCH_S390X
1490 case IrOpcode::kInt64Sub:
1491 return VisitWord64Compare(selector, value, cont);
1492 case IrOpcode::kWord64And:
1493 return VisitWordCompare(selector, value, kS390_Tst64, cont, true,
1494 kInt16Imm_Unsigned);
1495// TODO(mbrandy): Handle?
1496// case IrOpcode::kInt64Add:
1497// case IrOpcode::kWord64Or:
1498// case IrOpcode::kWord64Xor:
1499// case IrOpcode::kWord64Sar:
1500// case IrOpcode::kWord64Shl:
1501// case IrOpcode::kWord64Shr:
1502// case IrOpcode::kWord64Ror:
1503#endif
1504 default:
1505 break;
1506 }
1507 break;
1508 }
1509
1510 // Branch could not be combined with a compare, emit compare against 0.
1511 S390OperandGenerator g(selector);
1512 VisitCompare(selector, opcode, g.UseRegister(value), g.TempImmediate(0),
1513 cont);
1514}
1515
1516void VisitWord32CompareZero(InstructionSelector* selector, Node* user,
1517 Node* value, FlagsContinuation* cont) {
1518 VisitWordCompareZero(selector, user, value, kS390_Cmp32, cont);
1519}
1520
1521#if V8_TARGET_ARCH_S390X
1522void VisitWord64CompareZero(InstructionSelector* selector, Node* user,
1523 Node* value, FlagsContinuation* cont) {
1524 VisitWordCompareZero(selector, user, value, kS390_Cmp64, cont);
1525}
1526#endif
1527
1528} // namespace
1529
1530void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
1531 BasicBlock* fbranch) {
1532 FlagsContinuation cont(kNotEqual, tbranch, fbranch);
1533 VisitWord32CompareZero(this, branch, branch->InputAt(0), &cont);
1534}
1535
1536void InstructionSelector::VisitDeoptimizeIf(Node* node) {
1537 FlagsContinuation cont =
1538 FlagsContinuation::ForDeoptimize(kNotEqual, node->InputAt(1));
1539 VisitWord32CompareZero(this, node, node->InputAt(0), &cont);
1540}
1541
1542void InstructionSelector::VisitDeoptimizeUnless(Node* node) {
1543 FlagsContinuation cont =
1544 FlagsContinuation::ForDeoptimize(kEqual, node->InputAt(1));
1545 VisitWord32CompareZero(this, node, node->InputAt(0), &cont);
1546}
1547
1548void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
1549 S390OperandGenerator g(this);
1550 InstructionOperand value_operand = g.UseRegister(node->InputAt(0));
1551
1552 // Emit either ArchTableSwitch or ArchLookupSwitch.
1553 size_t table_space_cost = 4 + sw.value_range;
1554 size_t table_time_cost = 3;
1555 size_t lookup_space_cost = 3 + 2 * sw.case_count;
1556 size_t lookup_time_cost = sw.case_count;
1557 if (sw.case_count > 0 &&
1558 table_space_cost + 3 * table_time_cost <=
1559 lookup_space_cost + 3 * lookup_time_cost &&
1560 sw.min_value > std::numeric_limits<int32_t>::min()) {
1561 InstructionOperand index_operand = value_operand;
1562 if (sw.min_value) {
1563 index_operand = g.TempRegister();
1564 Emit(kS390_Sub, index_operand, value_operand,
1565 g.TempImmediate(sw.min_value));
1566 }
1567 // Generate a table lookup.
1568 return EmitTableSwitch(sw, index_operand);
1569 }
1570
1571 // Generate a sequence of conditional jumps.
1572 return EmitLookupSwitch(sw, value_operand);
1573}
1574
1575void InstructionSelector::VisitWord32Equal(Node* const node) {
1576 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
1577 Int32BinopMatcher m(node);
1578 if (m.right().Is(0)) {
1579 return VisitWord32CompareZero(this, m.node(), m.left().node(), &cont);
1580 }
1581 VisitWord32Compare(this, node, &cont);
1582}
1583
1584void InstructionSelector::VisitInt32LessThan(Node* node) {
1585 FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
1586 VisitWord32Compare(this, node, &cont);
1587}
1588
1589void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
1590 FlagsContinuation cont =
1591 FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
1592 VisitWord32Compare(this, node, &cont);
1593}
1594
1595void InstructionSelector::VisitUint32LessThan(Node* node) {
1596 FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
1597 VisitWord32Compare(this, node, &cont);
1598}
1599
1600void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
1601 FlagsContinuation cont =
1602 FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
1603 VisitWord32Compare(this, node, &cont);
1604}
1605
1606#if V8_TARGET_ARCH_S390X
1607void InstructionSelector::VisitWord64Equal(Node* const node) {
1608 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
1609 Int64BinopMatcher m(node);
1610 if (m.right().Is(0)) {
1611 return VisitWord64CompareZero(this, m.node(), m.left().node(), &cont);
1612 }
1613 VisitWord64Compare(this, node, &cont);
1614}
1615
1616void InstructionSelector::VisitInt64LessThan(Node* node) {
1617 FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
1618 VisitWord64Compare(this, node, &cont);
1619}
1620
1621void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) {
1622 FlagsContinuation cont =
1623 FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
1624 VisitWord64Compare(this, node, &cont);
1625}
1626
1627void InstructionSelector::VisitUint64LessThan(Node* node) {
1628 FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
1629 VisitWord64Compare(this, node, &cont);
1630}
1631
1632void InstructionSelector::VisitUint64LessThanOrEqual(Node* node) {
1633 FlagsContinuation cont =
1634 FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
1635 VisitWord64Compare(this, node, &cont);
1636}
1637#endif
1638
1639void InstructionSelector::VisitFloat32Equal(Node* node) {
1640 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
1641 VisitFloat32Compare(this, node, &cont);
1642}
1643
1644void InstructionSelector::VisitFloat32LessThan(Node* node) {
1645 FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
1646 VisitFloat32Compare(this, node, &cont);
1647}
1648
1649void InstructionSelector::VisitFloat32LessThanOrEqual(Node* node) {
1650 FlagsContinuation cont =
1651 FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
1652 VisitFloat32Compare(this, node, &cont);
1653}
1654
1655void InstructionSelector::VisitFloat64Equal(Node* node) {
1656 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
1657 VisitFloat64Compare(this, node, &cont);
1658}
1659
1660void InstructionSelector::VisitFloat64LessThan(Node* node) {
1661 FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
1662 VisitFloat64Compare(this, node, &cont);
1663}
1664
1665void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
1666 FlagsContinuation cont =
1667 FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
1668 VisitFloat64Compare(this, node, &cont);
1669}
1670
1671void InstructionSelector::EmitPrepareArguments(
1672 ZoneVector<PushParameter>* arguments, const CallDescriptor* descriptor,
1673 Node* node) {
1674 S390OperandGenerator g(this);
1675
1676 // Prepare for C function call.
1677 if (descriptor->IsCFunctionCall()) {
1678 Emit(kArchPrepareCallCFunction |
1679 MiscField::encode(static_cast<int>(descriptor->CParameterCount())),
1680 0, nullptr, 0, nullptr);
1681
1682 // Poke any stack arguments.
1683 int slot = kStackFrameExtraParamSlot;
1684 for (PushParameter input : (*arguments)) {
1685 Emit(kS390_StoreToStackSlot, g.NoOutput(), g.UseRegister(input.node()),
1686 g.TempImmediate(slot));
1687 ++slot;
1688 }
1689 } else {
1690 // Push any stack arguments.
1691 int num_slots = static_cast<int>(descriptor->StackParameterCount());
1692 int slot = 0;
1693 for (PushParameter input : (*arguments)) {
1694 if (slot == 0) {
1695 DCHECK(input.node());
1696 Emit(kS390_PushFrame, g.NoOutput(), g.UseRegister(input.node()),
1697 g.TempImmediate(num_slots));
1698 } else {
1699 // Skip any alignment holes in pushed nodes.
1700 if (input.node()) {
1701 Emit(kS390_StoreToStackSlot, g.NoOutput(),
1702 g.UseRegister(input.node()), g.TempImmediate(slot));
1703 }
1704 }
1705 ++slot;
1706 }
1707 }
1708}
1709
1710bool InstructionSelector::IsTailCallAddressImmediate() { return false; }
1711
1712int InstructionSelector::GetTempsCountForTailCallFromJSFunction() { return 3; }
1713
1714void InstructionSelector::VisitFloat64ExtractLowWord32(Node* node) {
1715 S390OperandGenerator g(this);
1716 Emit(kS390_DoubleExtractLowWord32, g.DefineAsRegister(node),
1717 g.UseRegister(node->InputAt(0)));
1718}
1719
1720void InstructionSelector::VisitFloat64ExtractHighWord32(Node* node) {
1721 S390OperandGenerator g(this);
1722 Emit(kS390_DoubleExtractHighWord32, g.DefineAsRegister(node),
1723 g.UseRegister(node->InputAt(0)));
1724}
1725
1726void InstructionSelector::VisitFloat64InsertLowWord32(Node* node) {
1727 S390OperandGenerator g(this);
1728 Node* left = node->InputAt(0);
1729 Node* right = node->InputAt(1);
1730 if (left->opcode() == IrOpcode::kFloat64InsertHighWord32 &&
1731 CanCover(node, left)) {
1732 left = left->InputAt(1);
1733 Emit(kS390_DoubleConstruct, g.DefineAsRegister(node), g.UseRegister(left),
1734 g.UseRegister(right));
1735 return;
1736 }
1737 Emit(kS390_DoubleInsertLowWord32, g.DefineSameAsFirst(node),
1738 g.UseRegister(left), g.UseRegister(right));
1739}
1740
1741void InstructionSelector::VisitFloat64InsertHighWord32(Node* node) {
1742 S390OperandGenerator g(this);
1743 Node* left = node->InputAt(0);
1744 Node* right = node->InputAt(1);
1745 if (left->opcode() == IrOpcode::kFloat64InsertLowWord32 &&
1746 CanCover(node, left)) {
1747 left = left->InputAt(1);
1748 Emit(kS390_DoubleConstruct, g.DefineAsRegister(node), g.UseRegister(right),
1749 g.UseRegister(left));
1750 return;
1751 }
1752 Emit(kS390_DoubleInsertHighWord32, g.DefineSameAsFirst(node),
1753 g.UseRegister(left), g.UseRegister(right));
1754}
1755
Ben Murdochc5610432016-08-08 18:44:38 +01001756void InstructionSelector::VisitAtomicLoad(Node* node) {
1757 LoadRepresentation load_rep = LoadRepresentationOf(node->op());
1758 S390OperandGenerator g(this);
1759 Node* base = node->InputAt(0);
1760 Node* index = node->InputAt(1);
1761 ArchOpcode opcode = kArchNop;
1762 switch (load_rep.representation()) {
1763 case MachineRepresentation::kWord8:
1764 opcode = load_rep.IsSigned() ? kAtomicLoadInt8 : kAtomicLoadUint8;
1765 break;
1766 case MachineRepresentation::kWord16:
1767 opcode = load_rep.IsSigned() ? kAtomicLoadInt16 : kAtomicLoadUint16;
1768 break;
1769 case MachineRepresentation::kWord32:
1770 opcode = kAtomicLoadWord32;
1771 break;
1772 default:
1773 UNREACHABLE();
1774 return;
1775 }
1776 Emit(opcode | AddressingModeField::encode(kMode_MRR),
1777 g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(index));
1778}
1779
1780void InstructionSelector::VisitAtomicStore(Node* node) {
1781 MachineRepresentation rep = AtomicStoreRepresentationOf(node->op());
1782 S390OperandGenerator g(this);
1783 Node* base = node->InputAt(0);
1784 Node* index = node->InputAt(1);
1785 Node* value = node->InputAt(2);
1786 ArchOpcode opcode = kArchNop;
1787 switch (rep) {
1788 case MachineRepresentation::kWord8:
1789 opcode = kAtomicStoreWord8;
1790 break;
1791 case MachineRepresentation::kWord16:
1792 opcode = kAtomicStoreWord16;
1793 break;
1794 case MachineRepresentation::kWord32:
1795 opcode = kAtomicStoreWord32;
1796 break;
1797 default:
1798 UNREACHABLE();
1799 return;
1800 }
1801
1802 InstructionOperand inputs[4];
1803 size_t input_count = 0;
1804 inputs[input_count++] = g.UseUniqueRegister(value);
1805 inputs[input_count++] = g.UseUniqueRegister(base);
1806 inputs[input_count++] = g.UseUniqueRegister(index);
1807 Emit(opcode | AddressingModeField::encode(kMode_MRR), 0, nullptr, input_count,
1808 inputs);
1809}
1810
Ben Murdochda12d292016-06-02 14:46:10 +01001811// static
1812MachineOperatorBuilder::Flags
1813InstructionSelector::SupportedMachineOperatorFlags() {
1814 return MachineOperatorBuilder::kFloat32RoundDown |
1815 MachineOperatorBuilder::kFloat64RoundDown |
1816 MachineOperatorBuilder::kFloat32RoundUp |
1817 MachineOperatorBuilder::kFloat64RoundUp |
1818 MachineOperatorBuilder::kFloat32RoundTruncate |
1819 MachineOperatorBuilder::kFloat64RoundTruncate |
1820 MachineOperatorBuilder::kFloat64RoundTiesAway |
1821 MachineOperatorBuilder::kWord32Popcnt |
1822 MachineOperatorBuilder::kWord64Popcnt;
1823}
1824
1825} // namespace compiler
1826} // namespace internal
1827} // namespace v8