blob: 1b1bd2f624d0295072ab7fae6b0df26e83bd13d2 [file] [log] [blame]
Ben Murdochda12d292016-06-02 14:46:10 +01001// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/base/adapters.h"
6#include "src/compiler/instruction-selector-impl.h"
7#include "src/compiler/node-matchers.h"
8#include "src/compiler/node-properties.h"
9#include "src/s390/frames-s390.h"
10
11namespace v8 {
12namespace internal {
13namespace compiler {
14
15enum ImmediateMode {
16 kInt16Imm,
17 kInt16Imm_Unsigned,
18 kInt16Imm_Negate,
19 kInt16Imm_4ByteAligned,
20 kShift32Imm,
21 kShift64Imm,
22 kNoImmediate
23};
24
25// Adds S390-specific methods for generating operands.
26class S390OperandGenerator final : public OperandGenerator {
27 public:
28 explicit S390OperandGenerator(InstructionSelector* selector)
29 : OperandGenerator(selector) {}
30
31 InstructionOperand UseOperand(Node* node, ImmediateMode mode) {
32 if (CanBeImmediate(node, mode)) {
33 return UseImmediate(node);
34 }
35 return UseRegister(node);
36 }
37
38 bool CanBeImmediate(Node* node, ImmediateMode mode) {
39 int64_t value;
40 if (node->opcode() == IrOpcode::kInt32Constant)
41 value = OpParameter<int32_t>(node);
42 else if (node->opcode() == IrOpcode::kInt64Constant)
43 value = OpParameter<int64_t>(node);
44 else
45 return false;
46 return CanBeImmediate(value, mode);
47 }
48
49 bool CanBeImmediate(int64_t value, ImmediateMode mode) {
50 switch (mode) {
51 case kInt16Imm:
52 return is_int16(value);
53 case kInt16Imm_Unsigned:
54 return is_uint16(value);
55 case kInt16Imm_Negate:
56 return is_int16(-value);
57 case kInt16Imm_4ByteAligned:
58 return is_int16(value) && !(value & 3);
59 case kShift32Imm:
60 return 0 <= value && value < 32;
61 case kShift64Imm:
62 return 0 <= value && value < 64;
63 case kNoImmediate:
64 return false;
65 }
66 return false;
67 }
68};
69
70namespace {
71
72void VisitRR(InstructionSelector* selector, ArchOpcode opcode, Node* node) {
73 S390OperandGenerator g(selector);
74 selector->Emit(opcode, g.DefineAsRegister(node),
75 g.UseRegister(node->InputAt(0)));
76}
77
78void VisitRRR(InstructionSelector* selector, ArchOpcode opcode, Node* node) {
79 S390OperandGenerator g(selector);
80 selector->Emit(opcode, g.DefineAsRegister(node),
81 g.UseRegister(node->InputAt(0)),
82 g.UseRegister(node->InputAt(1)));
83}
84
85void VisitRRO(InstructionSelector* selector, ArchOpcode opcode, Node* node,
86 ImmediateMode operand_mode) {
87 S390OperandGenerator g(selector);
88 selector->Emit(opcode, g.DefineAsRegister(node),
89 g.UseRegister(node->InputAt(0)),
90 g.UseOperand(node->InputAt(1), operand_mode));
91}
92
93#if V8_TARGET_ARCH_S390X
94void VisitTryTruncateDouble(InstructionSelector* selector, ArchOpcode opcode,
95 Node* node) {
96 S390OperandGenerator g(selector);
97 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0))};
98 InstructionOperand outputs[2];
99 size_t output_count = 0;
100 outputs[output_count++] = g.DefineAsRegister(node);
101
102 Node* success_output = NodeProperties::FindProjection(node, 1);
103 if (success_output) {
104 outputs[output_count++] = g.DefineAsRegister(success_output);
105 }
106
107 selector->Emit(opcode, output_count, outputs, 1, inputs);
108}
109#endif
110
111// Shared routine for multiple binary operations.
112template <typename Matcher>
113void VisitBinop(InstructionSelector* selector, Node* node,
114 InstructionCode opcode, ImmediateMode operand_mode,
115 FlagsContinuation* cont) {
116 S390OperandGenerator g(selector);
117 Matcher m(node);
118 InstructionOperand inputs[4];
119 size_t input_count = 0;
120 InstructionOperand outputs[2];
121 size_t output_count = 0;
122
123 inputs[input_count++] = g.UseRegister(m.left().node());
124 inputs[input_count++] = g.UseOperand(m.right().node(), operand_mode);
125
126 if (cont->IsBranch()) {
127 inputs[input_count++] = g.Label(cont->true_block());
128 inputs[input_count++] = g.Label(cont->false_block());
129 }
130
131 outputs[output_count++] = g.DefineAsRegister(node);
132 if (cont->IsSet()) {
133 outputs[output_count++] = g.DefineAsRegister(cont->result());
134 }
135
136 DCHECK_NE(0u, input_count);
137 DCHECK_NE(0u, output_count);
138 DCHECK_GE(arraysize(inputs), input_count);
139 DCHECK_GE(arraysize(outputs), output_count);
140
141 opcode = cont->Encode(opcode);
142 if (cont->IsDeoptimize()) {
143 selector->EmitDeoptimize(opcode, output_count, outputs, input_count, inputs,
144 cont->frame_state());
145 } else {
146 selector->Emit(opcode, output_count, outputs, input_count, inputs);
147 }
148}
149
150// Shared routine for multiple binary operations.
151template <typename Matcher>
152void VisitBinop(InstructionSelector* selector, Node* node, ArchOpcode opcode,
153 ImmediateMode operand_mode) {
154 FlagsContinuation cont;
155 VisitBinop<Matcher>(selector, node, opcode, operand_mode, &cont);
156}
157
158} // namespace
159
160void InstructionSelector::VisitLoad(Node* node) {
161 LoadRepresentation load_rep = LoadRepresentationOf(node->op());
162 S390OperandGenerator g(this);
163 Node* base = node->InputAt(0);
164 Node* offset = node->InputAt(1);
165 ArchOpcode opcode = kArchNop;
166 ImmediateMode mode = kInt16Imm;
167 switch (load_rep.representation()) {
168 case MachineRepresentation::kFloat32:
169 opcode = kS390_LoadFloat32;
170 break;
171 case MachineRepresentation::kFloat64:
172 opcode = kS390_LoadDouble;
173 break;
174 case MachineRepresentation::kBit: // Fall through.
175 case MachineRepresentation::kWord8:
176 opcode = load_rep.IsSigned() ? kS390_LoadWordS8 : kS390_LoadWordU8;
177 break;
178 case MachineRepresentation::kWord16:
179 opcode = load_rep.IsSigned() ? kS390_LoadWordS16 : kS390_LoadWordU16;
180 break;
181#if !V8_TARGET_ARCH_S390X
182 case MachineRepresentation::kTagged: // Fall through.
183#endif
184 case MachineRepresentation::kWord32:
Ben Murdochc5610432016-08-08 18:44:38 +0100185 opcode = kS390_LoadWordU32;
Ben Murdochda12d292016-06-02 14:46:10 +0100186 break;
187#if V8_TARGET_ARCH_S390X
188 case MachineRepresentation::kTagged: // Fall through.
189 case MachineRepresentation::kWord64:
190 opcode = kS390_LoadWord64;
191 mode = kInt16Imm_4ByteAligned;
192 break;
193#else
194 case MachineRepresentation::kWord64: // Fall through.
195#endif
196 case MachineRepresentation::kSimd128: // Fall through.
197 case MachineRepresentation::kNone:
198 UNREACHABLE();
199 return;
200 }
201 if (g.CanBeImmediate(offset, mode)) {
202 Emit(opcode | AddressingModeField::encode(kMode_MRI),
203 g.DefineAsRegister(node), g.UseRegister(base), g.UseImmediate(offset));
204 } else if (g.CanBeImmediate(base, mode)) {
205 Emit(opcode | AddressingModeField::encode(kMode_MRI),
206 g.DefineAsRegister(node), g.UseRegister(offset), g.UseImmediate(base));
207 } else {
208 Emit(opcode | AddressingModeField::encode(kMode_MRR),
209 g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(offset));
210 }
211}
212
213void InstructionSelector::VisitStore(Node* node) {
214 S390OperandGenerator g(this);
215 Node* base = node->InputAt(0);
216 Node* offset = node->InputAt(1);
217 Node* value = node->InputAt(2);
218
219 StoreRepresentation store_rep = StoreRepresentationOf(node->op());
220 WriteBarrierKind write_barrier_kind = store_rep.write_barrier_kind();
221 MachineRepresentation rep = store_rep.representation();
222
223 if (write_barrier_kind != kNoWriteBarrier) {
224 DCHECK_EQ(MachineRepresentation::kTagged, rep);
225 AddressingMode addressing_mode;
226 InstructionOperand inputs[3];
227 size_t input_count = 0;
228 inputs[input_count++] = g.UseUniqueRegister(base);
229 // OutOfLineRecordWrite uses the offset in an 'AddP' instruction as well as
230 // for the store itself, so we must check compatibility with both.
231 if (g.CanBeImmediate(offset, kInt16Imm)
232#if V8_TARGET_ARCH_S390X
233 && g.CanBeImmediate(offset, kInt16Imm_4ByteAligned)
234#endif
235 ) {
236 inputs[input_count++] = g.UseImmediate(offset);
237 addressing_mode = kMode_MRI;
238 } else {
239 inputs[input_count++] = g.UseUniqueRegister(offset);
240 addressing_mode = kMode_MRR;
241 }
242 inputs[input_count++] = g.UseUniqueRegister(value);
243 RecordWriteMode record_write_mode = RecordWriteMode::kValueIsAny;
244 switch (write_barrier_kind) {
245 case kNoWriteBarrier:
246 UNREACHABLE();
247 break;
248 case kMapWriteBarrier:
249 record_write_mode = RecordWriteMode::kValueIsMap;
250 break;
251 case kPointerWriteBarrier:
252 record_write_mode = RecordWriteMode::kValueIsPointer;
253 break;
254 case kFullWriteBarrier:
255 record_write_mode = RecordWriteMode::kValueIsAny;
256 break;
257 }
258 InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()};
259 size_t const temp_count = arraysize(temps);
260 InstructionCode code = kArchStoreWithWriteBarrier;
261 code |= AddressingModeField::encode(addressing_mode);
262 code |= MiscField::encode(static_cast<int>(record_write_mode));
263 Emit(code, 0, nullptr, input_count, inputs, temp_count, temps);
264 } else {
265 ArchOpcode opcode = kArchNop;
266 ImmediateMode mode = kInt16Imm;
267 switch (rep) {
268 case MachineRepresentation::kFloat32:
269 opcode = kS390_StoreFloat32;
270 break;
271 case MachineRepresentation::kFloat64:
272 opcode = kS390_StoreDouble;
273 break;
274 case MachineRepresentation::kBit: // Fall through.
275 case MachineRepresentation::kWord8:
276 opcode = kS390_StoreWord8;
277 break;
278 case MachineRepresentation::kWord16:
279 opcode = kS390_StoreWord16;
280 break;
281#if !V8_TARGET_ARCH_S390X
282 case MachineRepresentation::kTagged: // Fall through.
283#endif
284 case MachineRepresentation::kWord32:
285 opcode = kS390_StoreWord32;
286 break;
287#if V8_TARGET_ARCH_S390X
288 case MachineRepresentation::kTagged: // Fall through.
289 case MachineRepresentation::kWord64:
290 opcode = kS390_StoreWord64;
291 mode = kInt16Imm_4ByteAligned;
292 break;
293#else
294 case MachineRepresentation::kWord64: // Fall through.
295#endif
296 case MachineRepresentation::kSimd128: // Fall through.
297 case MachineRepresentation::kNone:
298 UNREACHABLE();
299 return;
300 }
301 if (g.CanBeImmediate(offset, mode)) {
302 Emit(opcode | AddressingModeField::encode(kMode_MRI), g.NoOutput(),
303 g.UseRegister(base), g.UseImmediate(offset), g.UseRegister(value));
304 } else if (g.CanBeImmediate(base, mode)) {
305 Emit(opcode | AddressingModeField::encode(kMode_MRI), g.NoOutput(),
306 g.UseRegister(offset), g.UseImmediate(base), g.UseRegister(value));
307 } else {
308 Emit(opcode | AddressingModeField::encode(kMode_MRR), g.NoOutput(),
309 g.UseRegister(base), g.UseRegister(offset), g.UseRegister(value));
310 }
311 }
312}
313
314void InstructionSelector::VisitCheckedLoad(Node* node) {
315 CheckedLoadRepresentation load_rep = CheckedLoadRepresentationOf(node->op());
316 S390OperandGenerator g(this);
317 Node* const base = node->InputAt(0);
318 Node* const offset = node->InputAt(1);
319 Node* const length = node->InputAt(2);
320 ArchOpcode opcode = kArchNop;
321 switch (load_rep.representation()) {
322 case MachineRepresentation::kWord8:
323 opcode = load_rep.IsSigned() ? kCheckedLoadInt8 : kCheckedLoadUint8;
324 break;
325 case MachineRepresentation::kWord16:
326 opcode = load_rep.IsSigned() ? kCheckedLoadInt16 : kCheckedLoadUint16;
327 break;
328 case MachineRepresentation::kWord32:
329 opcode = kCheckedLoadWord32;
330 break;
331#if V8_TARGET_ARCH_S390X
332 case MachineRepresentation::kWord64:
333 opcode = kCheckedLoadWord64;
334 break;
335#endif
336 case MachineRepresentation::kFloat32:
337 opcode = kCheckedLoadFloat32;
338 break;
339 case MachineRepresentation::kFloat64:
340 opcode = kCheckedLoadFloat64;
341 break;
342 case MachineRepresentation::kBit: // Fall through.
343 case MachineRepresentation::kTagged: // Fall through.
344#if !V8_TARGET_ARCH_S390X
345 case MachineRepresentation::kWord64: // Fall through.
346#endif
347 case MachineRepresentation::kSimd128: // Fall through.
348 case MachineRepresentation::kNone:
349 UNREACHABLE();
350 return;
351 }
352 AddressingMode addressingMode = kMode_MRR;
353 Emit(opcode | AddressingModeField::encode(addressingMode),
354 g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(offset),
355 g.UseOperand(length, kInt16Imm_Unsigned));
356}
357
358void InstructionSelector::VisitCheckedStore(Node* node) {
359 MachineRepresentation rep = CheckedStoreRepresentationOf(node->op());
360 S390OperandGenerator g(this);
361 Node* const base = node->InputAt(0);
362 Node* const offset = node->InputAt(1);
363 Node* const length = node->InputAt(2);
364 Node* const value = node->InputAt(3);
365 ArchOpcode opcode = kArchNop;
366 switch (rep) {
367 case MachineRepresentation::kWord8:
368 opcode = kCheckedStoreWord8;
369 break;
370 case MachineRepresentation::kWord16:
371 opcode = kCheckedStoreWord16;
372 break;
373 case MachineRepresentation::kWord32:
374 opcode = kCheckedStoreWord32;
375 break;
376#if V8_TARGET_ARCH_S390X
377 case MachineRepresentation::kWord64:
378 opcode = kCheckedStoreWord64;
379 break;
380#endif
381 case MachineRepresentation::kFloat32:
382 opcode = kCheckedStoreFloat32;
383 break;
384 case MachineRepresentation::kFloat64:
385 opcode = kCheckedStoreFloat64;
386 break;
387 case MachineRepresentation::kBit: // Fall through.
388 case MachineRepresentation::kTagged: // Fall through.
389#if !V8_TARGET_ARCH_S390X
390 case MachineRepresentation::kWord64: // Fall through.
391#endif
392 case MachineRepresentation::kSimd128: // Fall through.
393 case MachineRepresentation::kNone:
394 UNREACHABLE();
395 return;
396 }
397 AddressingMode addressingMode = kMode_MRR;
398 Emit(opcode | AddressingModeField::encode(addressingMode), g.NoOutput(),
399 g.UseRegister(base), g.UseRegister(offset),
400 g.UseOperand(length, kInt16Imm_Unsigned), g.UseRegister(value));
401}
402
403template <typename Matcher>
404static void VisitLogical(InstructionSelector* selector, Node* node, Matcher* m,
405 ArchOpcode opcode, bool left_can_cover,
406 bool right_can_cover, ImmediateMode imm_mode) {
407 S390OperandGenerator g(selector);
408
409 // Map instruction to equivalent operation with inverted right input.
410 ArchOpcode inv_opcode = opcode;
411 switch (opcode) {
412 case kS390_And:
413 inv_opcode = kS390_AndComplement;
414 break;
415 case kS390_Or:
416 inv_opcode = kS390_OrComplement;
417 break;
418 default:
419 UNREACHABLE();
420 }
421
422 // Select Logical(y, ~x) for Logical(Xor(x, -1), y).
423 if ((m->left().IsWord32Xor() || m->left().IsWord64Xor()) && left_can_cover) {
424 Matcher mleft(m->left().node());
425 if (mleft.right().Is(-1)) {
426 selector->Emit(inv_opcode, g.DefineAsRegister(node),
427 g.UseRegister(m->right().node()),
428 g.UseRegister(mleft.left().node()));
429 return;
430 }
431 }
432
433 // Select Logical(x, ~y) for Logical(x, Xor(y, -1)).
434 if ((m->right().IsWord32Xor() || m->right().IsWord64Xor()) &&
435 right_can_cover) {
436 Matcher mright(m->right().node());
437 if (mright.right().Is(-1)) {
438 // TODO(all): support shifted operand on right.
439 selector->Emit(inv_opcode, g.DefineAsRegister(node),
440 g.UseRegister(m->left().node()),
441 g.UseRegister(mright.left().node()));
442 return;
443 }
444 }
445
446 VisitBinop<Matcher>(selector, node, opcode, imm_mode);
447}
448
449static inline bool IsContiguousMask32(uint32_t value, int* mb, int* me) {
450 int mask_width = base::bits::CountPopulation32(value);
451 int mask_msb = base::bits::CountLeadingZeros32(value);
452 int mask_lsb = base::bits::CountTrailingZeros32(value);
453 if ((mask_width == 0) || (mask_msb + mask_width + mask_lsb != 32))
454 return false;
455 *mb = mask_lsb + mask_width - 1;
456 *me = mask_lsb;
457 return true;
458}
459
460#if V8_TARGET_ARCH_S390X
461static inline bool IsContiguousMask64(uint64_t value, int* mb, int* me) {
462 int mask_width = base::bits::CountPopulation64(value);
463 int mask_msb = base::bits::CountLeadingZeros64(value);
464 int mask_lsb = base::bits::CountTrailingZeros64(value);
465 if ((mask_width == 0) || (mask_msb + mask_width + mask_lsb != 64))
466 return false;
467 *mb = mask_lsb + mask_width - 1;
468 *me = mask_lsb;
469 return true;
470}
471#endif
472
473void InstructionSelector::VisitWord32And(Node* node) {
474 S390OperandGenerator g(this);
475 Int32BinopMatcher m(node);
476 int mb = 0;
477 int me = 0;
478 if (m.right().HasValue() && IsContiguousMask32(m.right().Value(), &mb, &me)) {
479 int sh = 0;
480 Node* left = m.left().node();
481 if ((m.left().IsWord32Shr() || m.left().IsWord32Shl()) &&
482 CanCover(node, left)) {
483 Int32BinopMatcher mleft(m.left().node());
484 if (mleft.right().IsInRange(0, 31)) {
485 left = mleft.left().node();
486 sh = mleft.right().Value();
487 if (m.left().IsWord32Shr()) {
488 // Adjust the mask such that it doesn't include any rotated bits.
489 if (mb > 31 - sh) mb = 31 - sh;
490 sh = (32 - sh) & 0x1f;
491 } else {
492 // Adjust the mask such that it doesn't include any rotated bits.
493 if (me < sh) me = sh;
494 }
495 }
496 }
497 if (mb >= me) {
498 Emit(kS390_RotLeftAndMask32, g.DefineAsRegister(node),
499 g.UseRegister(left), g.TempImmediate(sh), g.TempImmediate(mb),
500 g.TempImmediate(me));
501 return;
502 }
503 }
504 VisitLogical<Int32BinopMatcher>(
505 this, node, &m, kS390_And, CanCover(node, m.left().node()),
506 CanCover(node, m.right().node()), kInt16Imm_Unsigned);
507}
508
509#if V8_TARGET_ARCH_S390X
510void InstructionSelector::VisitWord64And(Node* node) {
511 S390OperandGenerator g(this);
512 Int64BinopMatcher m(node);
513 int mb = 0;
514 int me = 0;
515 if (m.right().HasValue() && IsContiguousMask64(m.right().Value(), &mb, &me)) {
516 int sh = 0;
517 Node* left = m.left().node();
518 if ((m.left().IsWord64Shr() || m.left().IsWord64Shl()) &&
519 CanCover(node, left)) {
520 Int64BinopMatcher mleft(m.left().node());
521 if (mleft.right().IsInRange(0, 63)) {
522 left = mleft.left().node();
523 sh = mleft.right().Value();
524 if (m.left().IsWord64Shr()) {
525 // Adjust the mask such that it doesn't include any rotated bits.
526 if (mb > 63 - sh) mb = 63 - sh;
527 sh = (64 - sh) & 0x3f;
528 } else {
529 // Adjust the mask such that it doesn't include any rotated bits.
530 if (me < sh) me = sh;
531 }
532 }
533 }
534 if (mb >= me) {
535 bool match = false;
536 ArchOpcode opcode;
537 int mask;
538 if (me == 0) {
539 match = true;
540 opcode = kS390_RotLeftAndClearLeft64;
541 mask = mb;
542 } else if (mb == 63) {
543 match = true;
544 opcode = kS390_RotLeftAndClearRight64;
545 mask = me;
546 } else if (sh && me <= sh && m.left().IsWord64Shl()) {
547 match = true;
548 opcode = kS390_RotLeftAndClear64;
549 mask = mb;
550 }
551 if (match) {
552 Emit(opcode, g.DefineAsRegister(node), g.UseRegister(left),
553 g.TempImmediate(sh), g.TempImmediate(mask));
554 return;
555 }
556 }
557 }
558 VisitLogical<Int64BinopMatcher>(
559 this, node, &m, kS390_And, CanCover(node, m.left().node()),
560 CanCover(node, m.right().node()), kInt16Imm_Unsigned);
561}
562#endif
563
564void InstructionSelector::VisitWord32Or(Node* node) {
565 Int32BinopMatcher m(node);
566 VisitLogical<Int32BinopMatcher>(
567 this, node, &m, kS390_Or, CanCover(node, m.left().node()),
568 CanCover(node, m.right().node()), kInt16Imm_Unsigned);
569}
570
571#if V8_TARGET_ARCH_S390X
572void InstructionSelector::VisitWord64Or(Node* node) {
573 Int64BinopMatcher m(node);
574 VisitLogical<Int64BinopMatcher>(
575 this, node, &m, kS390_Or, CanCover(node, m.left().node()),
576 CanCover(node, m.right().node()), kInt16Imm_Unsigned);
577}
578#endif
579
580void InstructionSelector::VisitWord32Xor(Node* node) {
581 S390OperandGenerator g(this);
582 Int32BinopMatcher m(node);
583 if (m.right().Is(-1)) {
584 Emit(kS390_Not, g.DefineAsRegister(node), g.UseRegister(m.left().node()));
585 } else {
586 VisitBinop<Int32BinopMatcher>(this, node, kS390_Xor, kInt16Imm_Unsigned);
587 }
588}
589
590#if V8_TARGET_ARCH_S390X
591void InstructionSelector::VisitWord64Xor(Node* node) {
592 S390OperandGenerator g(this);
593 Int64BinopMatcher m(node);
594 if (m.right().Is(-1)) {
595 Emit(kS390_Not, g.DefineAsRegister(node), g.UseRegister(m.left().node()));
596 } else {
597 VisitBinop<Int64BinopMatcher>(this, node, kS390_Xor, kInt16Imm_Unsigned);
598 }
599}
600#endif
601
602void InstructionSelector::VisitWord32Shl(Node* node) {
603 S390OperandGenerator g(this);
604 Int32BinopMatcher m(node);
605 if (m.left().IsWord32And() && m.right().IsInRange(0, 31)) {
606 Int32BinopMatcher mleft(m.left().node());
607 int sh = m.right().Value();
608 int mb;
609 int me;
610 if (mleft.right().HasValue() &&
611 IsContiguousMask32(mleft.right().Value() << sh, &mb, &me)) {
612 // Adjust the mask such that it doesn't include any rotated bits.
613 if (me < sh) me = sh;
614 if (mb >= me) {
615 Emit(kS390_RotLeftAndMask32, g.DefineAsRegister(node),
616 g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
617 g.TempImmediate(mb), g.TempImmediate(me));
618 return;
619 }
620 }
621 }
622 VisitRRO(this, kS390_ShiftLeft32, node, kShift32Imm);
623}
624
625#if V8_TARGET_ARCH_S390X
626void InstructionSelector::VisitWord64Shl(Node* node) {
627 S390OperandGenerator g(this);
628 Int64BinopMatcher m(node);
629 // TODO(mbrandy): eliminate left sign extension if right >= 32
630 if (m.left().IsWord64And() && m.right().IsInRange(0, 63)) {
631 Int64BinopMatcher mleft(m.left().node());
632 int sh = m.right().Value();
633 int mb;
634 int me;
635 if (mleft.right().HasValue() &&
636 IsContiguousMask64(mleft.right().Value() << sh, &mb, &me)) {
637 // Adjust the mask such that it doesn't include any rotated bits.
638 if (me < sh) me = sh;
639 if (mb >= me) {
640 bool match = false;
641 ArchOpcode opcode;
642 int mask;
643 if (me == 0) {
644 match = true;
645 opcode = kS390_RotLeftAndClearLeft64;
646 mask = mb;
647 } else if (mb == 63) {
648 match = true;
649 opcode = kS390_RotLeftAndClearRight64;
650 mask = me;
651 } else if (sh && me <= sh) {
652 match = true;
653 opcode = kS390_RotLeftAndClear64;
654 mask = mb;
655 }
656 if (match) {
657 Emit(opcode, g.DefineAsRegister(node),
658 g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
659 g.TempImmediate(mask));
660 return;
661 }
662 }
663 }
664 }
665 VisitRRO(this, kS390_ShiftLeft64, node, kShift64Imm);
666}
667#endif
668
669void InstructionSelector::VisitWord32Shr(Node* node) {
670 S390OperandGenerator g(this);
671 Int32BinopMatcher m(node);
672 if (m.left().IsWord32And() && m.right().IsInRange(0, 31)) {
673 Int32BinopMatcher mleft(m.left().node());
674 int sh = m.right().Value();
675 int mb;
676 int me;
677 if (mleft.right().HasValue() &&
678 IsContiguousMask32((uint32_t)(mleft.right().Value()) >> sh, &mb, &me)) {
679 // Adjust the mask such that it doesn't include any rotated bits.
680 if (mb > 31 - sh) mb = 31 - sh;
681 sh = (32 - sh) & 0x1f;
682 if (mb >= me) {
683 Emit(kS390_RotLeftAndMask32, g.DefineAsRegister(node),
684 g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
685 g.TempImmediate(mb), g.TempImmediate(me));
686 return;
687 }
688 }
689 }
690 VisitRRO(this, kS390_ShiftRight32, node, kShift32Imm);
691}
692
693#if V8_TARGET_ARCH_S390X
694void InstructionSelector::VisitWord64Shr(Node* node) {
695 S390OperandGenerator g(this);
696 Int64BinopMatcher m(node);
697 if (m.left().IsWord64And() && m.right().IsInRange(0, 63)) {
698 Int64BinopMatcher mleft(m.left().node());
699 int sh = m.right().Value();
700 int mb;
701 int me;
702 if (mleft.right().HasValue() &&
703 IsContiguousMask64((uint64_t)(mleft.right().Value()) >> sh, &mb, &me)) {
704 // Adjust the mask such that it doesn't include any rotated bits.
705 if (mb > 63 - sh) mb = 63 - sh;
706 sh = (64 - sh) & 0x3f;
707 if (mb >= me) {
708 bool match = false;
709 ArchOpcode opcode;
710 int mask;
711 if (me == 0) {
712 match = true;
713 opcode = kS390_RotLeftAndClearLeft64;
714 mask = mb;
715 } else if (mb == 63) {
716 match = true;
717 opcode = kS390_RotLeftAndClearRight64;
718 mask = me;
719 }
720 if (match) {
721 Emit(opcode, g.DefineAsRegister(node),
722 g.UseRegister(mleft.left().node()), g.TempImmediate(sh),
723 g.TempImmediate(mask));
724 return;
725 }
726 }
727 }
728 }
729 VisitRRO(this, kS390_ShiftRight64, node, kShift64Imm);
730}
731#endif
732
733void InstructionSelector::VisitWord32Sar(Node* node) {
734 S390OperandGenerator g(this);
735 Int32BinopMatcher m(node);
736 // Replace with sign extension for (x << K) >> K where K is 16 or 24.
737 if (CanCover(node, m.left().node()) && m.left().IsWord32Shl()) {
738 Int32BinopMatcher mleft(m.left().node());
739 if (mleft.right().Is(16) && m.right().Is(16)) {
740 Emit(kS390_ExtendSignWord16, g.DefineAsRegister(node),
741 g.UseRegister(mleft.left().node()));
742 return;
743 } else if (mleft.right().Is(24) && m.right().Is(24)) {
744 Emit(kS390_ExtendSignWord8, g.DefineAsRegister(node),
745 g.UseRegister(mleft.left().node()));
746 return;
747 }
748 }
749 VisitRRO(this, kS390_ShiftRightArith32, node, kShift32Imm);
750}
751
752#if !V8_TARGET_ARCH_S390X
753void VisitPairBinop(InstructionSelector* selector, ArchOpcode opcode,
754 Node* node) {
755 S390OperandGenerator g(selector);
756
757 // We use UseUniqueRegister here to avoid register sharing with the output
758 // registers.
759 InstructionOperand inputs[] = {
760 g.UseRegister(node->InputAt(0)), g.UseUniqueRegister(node->InputAt(1)),
761 g.UseRegister(node->InputAt(2)), g.UseUniqueRegister(node->InputAt(3))};
762
763 InstructionOperand outputs[] = {
764 g.DefineAsRegister(node),
765 g.DefineAsRegister(NodeProperties::FindProjection(node, 1))};
766
767 selector->Emit(opcode, 2, outputs, 4, inputs);
768}
769
770void InstructionSelector::VisitInt32PairAdd(Node* node) {
771 VisitPairBinop(this, kS390_AddPair, node);
772}
773
774void InstructionSelector::VisitInt32PairSub(Node* node) {
775 VisitPairBinop(this, kS390_SubPair, node);
776}
777
778void InstructionSelector::VisitInt32PairMul(Node* node) {
779 S390OperandGenerator g(this);
780 InstructionOperand inputs[] = {g.UseUniqueRegister(node->InputAt(0)),
781 g.UseUniqueRegister(node->InputAt(1)),
782 g.UseUniqueRegister(node->InputAt(2)),
783 g.UseUniqueRegister(node->InputAt(3))};
784
785 InstructionOperand outputs[] = {
786 g.DefineAsRegister(node),
787 g.DefineAsRegister(NodeProperties::FindProjection(node, 1))};
788
789 Emit(kS390_MulPair, 2, outputs, 4, inputs);
790}
791
792void VisitPairShift(InstructionSelector* selector, ArchOpcode opcode,
793 Node* node) {
794 S390OperandGenerator g(selector);
795 Int32Matcher m(node->InputAt(2));
796 InstructionOperand shift_operand;
797 if (m.HasValue()) {
798 shift_operand = g.UseImmediate(m.node());
799 } else {
800 shift_operand = g.UseUniqueRegister(m.node());
801 }
802
803 InstructionOperand inputs[] = {g.UseRegister(node->InputAt(0)),
804 g.UseRegister(node->InputAt(1)),
805 shift_operand};
806
807 InstructionOperand outputs[] = {
808 g.DefineSameAsFirst(node),
809 g.DefineAsRegister(NodeProperties::FindProjection(node, 1))};
810
811 selector->Emit(opcode, 2, outputs, 3, inputs);
812}
813
814void InstructionSelector::VisitWord32PairShl(Node* node) {
815 VisitPairShift(this, kS390_ShiftLeftPair, node);
816}
817
818void InstructionSelector::VisitWord32PairShr(Node* node) {
819 VisitPairShift(this, kS390_ShiftRightPair, node);
820}
821
822void InstructionSelector::VisitWord32PairSar(Node* node) {
823 VisitPairShift(this, kS390_ShiftRightArithPair, node);
824}
825#endif
826
827#if V8_TARGET_ARCH_S390X
828void InstructionSelector::VisitWord64Sar(Node* node) {
829 VisitRRO(this, kS390_ShiftRightArith64, node, kShift64Imm);
830}
831#endif
832
833void InstructionSelector::VisitWord32Ror(Node* node) {
834 VisitRRO(this, kS390_RotRight32, node, kShift32Imm);
835}
836
837#if V8_TARGET_ARCH_S390X
838void InstructionSelector::VisitWord64Ror(Node* node) {
839 VisitRRO(this, kS390_RotRight64, node, kShift64Imm);
840}
841#endif
842
843void InstructionSelector::VisitWord32Clz(Node* node) {
844 S390OperandGenerator g(this);
845 Emit(kS390_Cntlz32, g.DefineAsRegister(node),
846 g.UseRegister(node->InputAt(0)));
847}
848
849#if V8_TARGET_ARCH_S390X
850void InstructionSelector::VisitWord64Clz(Node* node) {
851 S390OperandGenerator g(this);
852 Emit(kS390_Cntlz64, g.DefineAsRegister(node),
853 g.UseRegister(node->InputAt(0)));
854}
855#endif
856
857void InstructionSelector::VisitWord32Popcnt(Node* node) {
858 S390OperandGenerator g(this);
859 Emit(kS390_Popcnt32, g.DefineAsRegister(node),
860 g.UseRegister(node->InputAt(0)));
861}
862
863#if V8_TARGET_ARCH_S390X
864void InstructionSelector::VisitWord64Popcnt(Node* node) {
865 S390OperandGenerator g(this);
866 Emit(kS390_Popcnt64, g.DefineAsRegister(node),
867 g.UseRegister(node->InputAt(0)));
868}
869#endif
870
871void InstructionSelector::VisitWord32Ctz(Node* node) { UNREACHABLE(); }
872
873#if V8_TARGET_ARCH_S390X
874void InstructionSelector::VisitWord64Ctz(Node* node) { UNREACHABLE(); }
875#endif
876
877void InstructionSelector::VisitWord32ReverseBits(Node* node) { UNREACHABLE(); }
878
879#if V8_TARGET_ARCH_S390X
880void InstructionSelector::VisitWord64ReverseBits(Node* node) { UNREACHABLE(); }
881#endif
882
883void InstructionSelector::VisitInt32Add(Node* node) {
884 VisitBinop<Int32BinopMatcher>(this, node, kS390_Add, kInt16Imm);
885}
886
887#if V8_TARGET_ARCH_S390X
888void InstructionSelector::VisitInt64Add(Node* node) {
889 VisitBinop<Int64BinopMatcher>(this, node, kS390_Add, kInt16Imm);
890}
891#endif
892
893void InstructionSelector::VisitInt32Sub(Node* node) {
894 S390OperandGenerator g(this);
895 Int32BinopMatcher m(node);
896 if (m.left().Is(0)) {
897 Emit(kS390_Neg, g.DefineAsRegister(node), g.UseRegister(m.right().node()));
898 } else {
899 VisitBinop<Int32BinopMatcher>(this, node, kS390_Sub, kInt16Imm_Negate);
900 }
901}
902
903#if V8_TARGET_ARCH_S390X
904void InstructionSelector::VisitInt64Sub(Node* node) {
905 S390OperandGenerator g(this);
906 Int64BinopMatcher m(node);
907 if (m.left().Is(0)) {
908 Emit(kS390_Neg, g.DefineAsRegister(node), g.UseRegister(m.right().node()));
909 } else {
910 VisitBinop<Int64BinopMatcher>(this, node, kS390_Sub, kInt16Imm_Negate);
911 }
912}
913#endif
914
915void InstructionSelector::VisitInt32Mul(Node* node) {
916 VisitRRR(this, kS390_Mul32, node);
917}
918
919#if V8_TARGET_ARCH_S390X
920void InstructionSelector::VisitInt64Mul(Node* node) {
921 VisitRRR(this, kS390_Mul64, node);
922}
923#endif
924
925void InstructionSelector::VisitInt32MulHigh(Node* node) {
926 S390OperandGenerator g(this);
927 Emit(kS390_MulHigh32, g.DefineAsRegister(node),
928 g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)));
929}
930
931void InstructionSelector::VisitUint32MulHigh(Node* node) {
932 S390OperandGenerator g(this);
933 Emit(kS390_MulHighU32, g.DefineAsRegister(node),
934 g.UseRegister(node->InputAt(0)), g.UseRegister(node->InputAt(1)));
935}
936
937void InstructionSelector::VisitInt32Div(Node* node) {
938 VisitRRR(this, kS390_Div32, node);
939}
940
941#if V8_TARGET_ARCH_S390X
942void InstructionSelector::VisitInt64Div(Node* node) {
943 VisitRRR(this, kS390_Div64, node);
944}
945#endif
946
947void InstructionSelector::VisitUint32Div(Node* node) {
948 VisitRRR(this, kS390_DivU32, node);
949}
950
951#if V8_TARGET_ARCH_S390X
952void InstructionSelector::VisitUint64Div(Node* node) {
953 VisitRRR(this, kS390_DivU64, node);
954}
955#endif
956
957void InstructionSelector::VisitInt32Mod(Node* node) {
958 VisitRRR(this, kS390_Mod32, node);
959}
960
961#if V8_TARGET_ARCH_S390X
962void InstructionSelector::VisitInt64Mod(Node* node) {
963 VisitRRR(this, kS390_Mod64, node);
964}
965#endif
966
967void InstructionSelector::VisitUint32Mod(Node* node) {
968 VisitRRR(this, kS390_ModU32, node);
969}
970
971#if V8_TARGET_ARCH_S390X
972void InstructionSelector::VisitUint64Mod(Node* node) {
973 VisitRRR(this, kS390_ModU64, node);
974}
975#endif
976
977void InstructionSelector::VisitChangeFloat32ToFloat64(Node* node) {
978 VisitRR(this, kS390_Float32ToDouble, node);
979}
980
981void InstructionSelector::VisitRoundInt32ToFloat32(Node* node) {
982 VisitRR(this, kS390_Int32ToFloat32, node);
983}
984
985void InstructionSelector::VisitRoundUint32ToFloat32(Node* node) {
986 VisitRR(this, kS390_Uint32ToFloat32, node);
987}
988
989void InstructionSelector::VisitChangeInt32ToFloat64(Node* node) {
990 VisitRR(this, kS390_Int32ToDouble, node);
991}
992
993void InstructionSelector::VisitChangeUint32ToFloat64(Node* node) {
994 VisitRR(this, kS390_Uint32ToDouble, node);
995}
996
997void InstructionSelector::VisitChangeFloat64ToInt32(Node* node) {
998 VisitRR(this, kS390_DoubleToInt32, node);
999}
1000
1001void InstructionSelector::VisitChangeFloat64ToUint32(Node* node) {
1002 VisitRR(this, kS390_DoubleToUint32, node);
1003}
1004
1005void InstructionSelector::VisitTruncateFloat64ToUint32(Node* node) {
1006 VisitRR(this, kS390_DoubleToUint32, node);
1007}
1008
1009#if V8_TARGET_ARCH_S390X
1010void InstructionSelector::VisitTryTruncateFloat32ToInt64(Node* node) {
1011 VisitTryTruncateDouble(this, kS390_Float32ToInt64, node);
1012}
1013
1014void InstructionSelector::VisitTryTruncateFloat64ToInt64(Node* node) {
1015 VisitTryTruncateDouble(this, kS390_DoubleToInt64, node);
1016}
1017
1018void InstructionSelector::VisitTryTruncateFloat32ToUint64(Node* node) {
1019 VisitTryTruncateDouble(this, kS390_Float32ToUint64, node);
1020}
1021
1022void InstructionSelector::VisitTryTruncateFloat64ToUint64(Node* node) {
1023 VisitTryTruncateDouble(this, kS390_DoubleToUint64, node);
1024}
1025
1026void InstructionSelector::VisitChangeInt32ToInt64(Node* node) {
1027 // TODO(mbrandy): inspect input to see if nop is appropriate.
1028 VisitRR(this, kS390_ExtendSignWord32, node);
1029}
1030
1031void InstructionSelector::VisitChangeUint32ToUint64(Node* node) {
1032 // TODO(mbrandy): inspect input to see if nop is appropriate.
1033 VisitRR(this, kS390_Uint32ToUint64, node);
1034}
1035#endif
1036
1037void InstructionSelector::VisitTruncateFloat64ToFloat32(Node* node) {
1038 VisitRR(this, kS390_DoubleToFloat32, node);
1039}
1040
Ben Murdochc5610432016-08-08 18:44:38 +01001041void InstructionSelector::VisitTruncateFloat64ToWord32(Node* node) {
1042 VisitRR(this, kArchTruncateDoubleToI, node);
1043}
1044
1045void InstructionSelector::VisitRoundFloat64ToInt32(Node* node) {
1046 VisitRR(this, kS390_DoubleToInt32, node);
Ben Murdochda12d292016-06-02 14:46:10 +01001047}
1048
1049void InstructionSelector::VisitTruncateFloat32ToInt32(Node* node) {
1050 VisitRR(this, kS390_Float32ToInt32, node);
1051}
1052
1053void InstructionSelector::VisitTruncateFloat32ToUint32(Node* node) {
1054 VisitRR(this, kS390_Float32ToUint32, node);
1055}
1056
1057#if V8_TARGET_ARCH_S390X
1058void InstructionSelector::VisitTruncateInt64ToInt32(Node* node) {
1059 // TODO(mbrandy): inspect input to see if nop is appropriate.
1060 VisitRR(this, kS390_Int64ToInt32, node);
1061}
1062
1063void InstructionSelector::VisitRoundInt64ToFloat32(Node* node) {
1064 VisitRR(this, kS390_Int64ToFloat32, node);
1065}
1066
1067void InstructionSelector::VisitRoundInt64ToFloat64(Node* node) {
1068 VisitRR(this, kS390_Int64ToDouble, node);
1069}
1070
1071void InstructionSelector::VisitRoundUint64ToFloat32(Node* node) {
1072 VisitRR(this, kS390_Uint64ToFloat32, node);
1073}
1074
1075void InstructionSelector::VisitRoundUint64ToFloat64(Node* node) {
1076 VisitRR(this, kS390_Uint64ToDouble, node);
1077}
1078#endif
1079
1080void InstructionSelector::VisitBitcastFloat32ToInt32(Node* node) {
1081 VisitRR(this, kS390_BitcastFloat32ToInt32, node);
1082}
1083
1084#if V8_TARGET_ARCH_S390X
1085void InstructionSelector::VisitBitcastFloat64ToInt64(Node* node) {
1086 VisitRR(this, kS390_BitcastDoubleToInt64, node);
1087}
1088#endif
1089
1090void InstructionSelector::VisitBitcastInt32ToFloat32(Node* node) {
1091 VisitRR(this, kS390_BitcastInt32ToFloat32, node);
1092}
1093
1094#if V8_TARGET_ARCH_S390X
1095void InstructionSelector::VisitBitcastInt64ToFloat64(Node* node) {
1096 VisitRR(this, kS390_BitcastInt64ToDouble, node);
1097}
1098#endif
1099
1100void InstructionSelector::VisitFloat32Add(Node* node) {
1101 VisitRRR(this, kS390_AddFloat, node);
1102}
1103
1104void InstructionSelector::VisitFloat64Add(Node* node) {
1105 // TODO(mbrandy): detect multiply-add
1106 VisitRRR(this, kS390_AddDouble, node);
1107}
1108
1109void InstructionSelector::VisitFloat32Sub(Node* node) {
1110 S390OperandGenerator g(this);
1111 Float32BinopMatcher m(node);
1112 if (m.left().IsMinusZero()) {
1113 Emit(kS390_NegDouble, g.DefineAsRegister(node),
1114 g.UseRegister(m.right().node()));
1115 return;
1116 }
1117 VisitRRR(this, kS390_SubFloat, node);
1118}
1119
Ben Murdochc5610432016-08-08 18:44:38 +01001120void InstructionSelector::VisitFloat32SubPreserveNan(Node* node) {
1121 S390OperandGenerator g(this);
1122 VisitRRR(this, kS390_SubFloat, node);
1123}
1124
Ben Murdochda12d292016-06-02 14:46:10 +01001125void InstructionSelector::VisitFloat64Sub(Node* node) {
1126 // TODO(mbrandy): detect multiply-subtract
1127 S390OperandGenerator g(this);
1128 Float64BinopMatcher m(node);
1129 if (m.left().IsMinusZero()) {
1130 if (m.right().IsFloat64RoundDown() &&
1131 CanCover(m.node(), m.right().node())) {
1132 if (m.right().InputAt(0)->opcode() == IrOpcode::kFloat64Sub &&
1133 CanCover(m.right().node(), m.right().InputAt(0))) {
1134 Float64BinopMatcher mright0(m.right().InputAt(0));
1135 if (mright0.left().IsMinusZero()) {
1136 // -floor(-x) = ceil(x)
1137 Emit(kS390_CeilDouble, g.DefineAsRegister(node),
1138 g.UseRegister(mright0.right().node()));
1139 return;
1140 }
1141 }
1142 }
1143 Emit(kS390_NegDouble, g.DefineAsRegister(node),
1144 g.UseRegister(m.right().node()));
1145 return;
1146 }
1147 VisitRRR(this, kS390_SubDouble, node);
1148}
1149
Ben Murdochc5610432016-08-08 18:44:38 +01001150void InstructionSelector::VisitFloat64SubPreserveNan(Node* node) {
1151 VisitRRR(this, kS390_SubDouble, node);
1152}
1153
Ben Murdochda12d292016-06-02 14:46:10 +01001154void InstructionSelector::VisitFloat32Mul(Node* node) {
1155 VisitRRR(this, kS390_MulFloat, node);
1156}
1157
1158void InstructionSelector::VisitFloat64Mul(Node* node) {
1159 // TODO(mbrandy): detect negate
1160 VisitRRR(this, kS390_MulDouble, node);
1161}
1162
1163void InstructionSelector::VisitFloat32Div(Node* node) {
1164 VisitRRR(this, kS390_DivFloat, node);
1165}
1166
1167void InstructionSelector::VisitFloat64Div(Node* node) {
1168 VisitRRR(this, kS390_DivDouble, node);
1169}
1170
1171void InstructionSelector::VisitFloat64Mod(Node* node) {
1172 S390OperandGenerator g(this);
1173 Emit(kS390_ModDouble, g.DefineAsFixed(node, d1),
1174 g.UseFixed(node->InputAt(0), d1), g.UseFixed(node->InputAt(1), d2))
1175 ->MarkAsCall();
1176}
1177
1178void InstructionSelector::VisitFloat32Max(Node* node) { UNREACHABLE(); }
1179
1180void InstructionSelector::VisitFloat64Max(Node* node) { UNREACHABLE(); }
1181
Ben Murdoch61f157c2016-09-16 13:49:30 +01001182void InstructionSelector::VisitFloat64SilenceNaN(Node* node) {
1183 VisitRR(this, kS390_Float64SilenceNaN, node);
1184}
1185
Ben Murdochda12d292016-06-02 14:46:10 +01001186void InstructionSelector::VisitFloat32Min(Node* node) { UNREACHABLE(); }
1187
1188void InstructionSelector::VisitFloat64Min(Node* node) { UNREACHABLE(); }
1189
1190void InstructionSelector::VisitFloat32Abs(Node* node) {
1191 VisitRR(this, kS390_AbsFloat, node);
1192}
1193
1194void InstructionSelector::VisitFloat64Abs(Node* node) {
1195 VisitRR(this, kS390_AbsDouble, node);
1196}
1197
1198void InstructionSelector::VisitFloat32Sqrt(Node* node) {
1199 VisitRR(this, kS390_SqrtFloat, node);
1200}
1201
Ben Murdoch61f157c2016-09-16 13:49:30 +01001202void InstructionSelector::VisitFloat64Ieee754Unop(Node* node,
1203 InstructionCode opcode) {
1204 S390OperandGenerator g(this);
1205 Emit(opcode, g.DefineAsFixed(node, d0), g.UseFixed(node->InputAt(0), d0))
1206 ->MarkAsCall();
1207}
1208
1209void InstructionSelector::VisitFloat64Ieee754Binop(Node* node,
1210 InstructionCode opcode) {
1211 S390OperandGenerator g(this);
1212 Emit(opcode, g.DefineAsFixed(node, d0), g.UseFixed(node->InputAt(0), d0),
1213 g.UseFixed(node->InputAt(1), d2))
1214 ->MarkAsCall();
1215}
1216
Ben Murdochda12d292016-06-02 14:46:10 +01001217void InstructionSelector::VisitFloat64Sqrt(Node* node) {
1218 VisitRR(this, kS390_SqrtDouble, node);
1219}
1220
1221void InstructionSelector::VisitFloat32RoundDown(Node* node) {
1222 VisitRR(this, kS390_FloorFloat, node);
1223}
1224
1225void InstructionSelector::VisitFloat64RoundDown(Node* node) {
1226 VisitRR(this, kS390_FloorDouble, node);
1227}
1228
1229void InstructionSelector::VisitFloat32RoundUp(Node* node) {
1230 VisitRR(this, kS390_CeilFloat, node);
1231}
1232
1233void InstructionSelector::VisitFloat64RoundUp(Node* node) {
1234 VisitRR(this, kS390_CeilDouble, node);
1235}
1236
1237void InstructionSelector::VisitFloat32RoundTruncate(Node* node) {
1238 VisitRR(this, kS390_TruncateFloat, node);
1239}
1240
1241void InstructionSelector::VisitFloat64RoundTruncate(Node* node) {
1242 VisitRR(this, kS390_TruncateDouble, node);
1243}
1244
1245void InstructionSelector::VisitFloat64RoundTiesAway(Node* node) {
1246 VisitRR(this, kS390_RoundDouble, node);
1247}
1248
1249void InstructionSelector::VisitFloat32RoundTiesEven(Node* node) {
1250 UNREACHABLE();
1251}
1252
1253void InstructionSelector::VisitFloat64RoundTiesEven(Node* node) {
1254 UNREACHABLE();
1255}
1256
Ben Murdoch61f157c2016-09-16 13:49:30 +01001257void InstructionSelector::VisitFloat32Neg(Node* node) { UNREACHABLE(); }
1258
1259void InstructionSelector::VisitFloat64Neg(Node* node) { UNREACHABLE(); }
1260
Ben Murdochda12d292016-06-02 14:46:10 +01001261void InstructionSelector::VisitInt32AddWithOverflow(Node* node) {
1262 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1263 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1264 return VisitBinop<Int32BinopMatcher>(this, node, kS390_AddWithOverflow32,
1265 kInt16Imm, &cont);
1266 }
1267 FlagsContinuation cont;
1268 VisitBinop<Int32BinopMatcher>(this, node, kS390_AddWithOverflow32, kInt16Imm,
1269 &cont);
1270}
1271
1272void InstructionSelector::VisitInt32SubWithOverflow(Node* node) {
1273 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1274 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1275 return VisitBinop<Int32BinopMatcher>(this, node, kS390_SubWithOverflow32,
1276 kInt16Imm_Negate, &cont);
1277 }
1278 FlagsContinuation cont;
1279 VisitBinop<Int32BinopMatcher>(this, node, kS390_SubWithOverflow32,
1280 kInt16Imm_Negate, &cont);
1281}
1282
1283#if V8_TARGET_ARCH_S390X
1284void InstructionSelector::VisitInt64AddWithOverflow(Node* node) {
1285 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1286 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1287 return VisitBinop<Int64BinopMatcher>(this, node, kS390_Add, kInt16Imm,
1288 &cont);
1289 }
1290 FlagsContinuation cont;
1291 VisitBinop<Int64BinopMatcher>(this, node, kS390_Add, kInt16Imm, &cont);
1292}
1293
1294void InstructionSelector::VisitInt64SubWithOverflow(Node* node) {
1295 if (Node* ovf = NodeProperties::FindProjection(node, 1)) {
1296 FlagsContinuation cont = FlagsContinuation::ForSet(kOverflow, ovf);
1297 return VisitBinop<Int64BinopMatcher>(this, node, kS390_Sub,
1298 kInt16Imm_Negate, &cont);
1299 }
1300 FlagsContinuation cont;
1301 VisitBinop<Int64BinopMatcher>(this, node, kS390_Sub, kInt16Imm_Negate, &cont);
1302}
1303#endif
1304
1305static bool CompareLogical(FlagsContinuation* cont) {
1306 switch (cont->condition()) {
1307 case kUnsignedLessThan:
1308 case kUnsignedGreaterThanOrEqual:
1309 case kUnsignedLessThanOrEqual:
1310 case kUnsignedGreaterThan:
1311 return true;
1312 default:
1313 return false;
1314 }
1315 UNREACHABLE();
1316 return false;
1317}
1318
1319namespace {
1320
1321// Shared routine for multiple compare operations.
1322void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
1323 InstructionOperand left, InstructionOperand right,
1324 FlagsContinuation* cont) {
1325 S390OperandGenerator g(selector);
1326 opcode = cont->Encode(opcode);
1327 if (cont->IsBranch()) {
1328 selector->Emit(opcode, g.NoOutput(), left, right,
1329 g.Label(cont->true_block()), g.Label(cont->false_block()));
1330 } else if (cont->IsDeoptimize()) {
1331 selector->EmitDeoptimize(opcode, g.NoOutput(), left, right,
1332 cont->frame_state());
1333 } else {
1334 DCHECK(cont->IsSet());
1335 selector->Emit(opcode, g.DefineAsRegister(cont->result()), left, right);
1336 }
1337}
1338
1339// Shared routine for multiple word compare operations.
1340void VisitWordCompare(InstructionSelector* selector, Node* node,
1341 InstructionCode opcode, FlagsContinuation* cont,
1342 bool commutative, ImmediateMode immediate_mode) {
1343 S390OperandGenerator g(selector);
1344 Node* left = node->InputAt(0);
1345 Node* right = node->InputAt(1);
1346
1347 // Match immediates on left or right side of comparison.
1348 if (g.CanBeImmediate(right, immediate_mode)) {
1349 VisitCompare(selector, opcode, g.UseRegister(left), g.UseImmediate(right),
1350 cont);
1351 } else if (g.CanBeImmediate(left, immediate_mode)) {
1352 if (!commutative) cont->Commute();
1353 VisitCompare(selector, opcode, g.UseRegister(right), g.UseImmediate(left),
1354 cont);
1355 } else {
1356 VisitCompare(selector, opcode, g.UseRegister(left), g.UseRegister(right),
1357 cont);
1358 }
1359}
1360
1361void VisitWord32Compare(InstructionSelector* selector, Node* node,
1362 FlagsContinuation* cont) {
1363 ImmediateMode mode = (CompareLogical(cont) ? kInt16Imm_Unsigned : kInt16Imm);
1364 VisitWordCompare(selector, node, kS390_Cmp32, cont, false, mode);
1365}
1366
1367#if V8_TARGET_ARCH_S390X
1368void VisitWord64Compare(InstructionSelector* selector, Node* node,
1369 FlagsContinuation* cont) {
1370 ImmediateMode mode = (CompareLogical(cont) ? kInt16Imm_Unsigned : kInt16Imm);
1371 VisitWordCompare(selector, node, kS390_Cmp64, cont, false, mode);
1372}
1373#endif
1374
1375// Shared routine for multiple float32 compare operations.
1376void VisitFloat32Compare(InstructionSelector* selector, Node* node,
1377 FlagsContinuation* cont) {
1378 S390OperandGenerator g(selector);
1379 Node* left = node->InputAt(0);
1380 Node* right = node->InputAt(1);
1381 VisitCompare(selector, kS390_CmpFloat, g.UseRegister(left),
1382 g.UseRegister(right), cont);
1383}
1384
1385// Shared routine for multiple float64 compare operations.
1386void VisitFloat64Compare(InstructionSelector* selector, Node* node,
1387 FlagsContinuation* cont) {
1388 S390OperandGenerator g(selector);
1389 Node* left = node->InputAt(0);
1390 Node* right = node->InputAt(1);
1391 VisitCompare(selector, kS390_CmpDouble, g.UseRegister(left),
1392 g.UseRegister(right), cont);
1393}
1394
1395// Shared routine for word comparisons against zero.
1396void VisitWordCompareZero(InstructionSelector* selector, Node* user,
1397 Node* value, InstructionCode opcode,
1398 FlagsContinuation* cont) {
1399 while (selector->CanCover(user, value)) {
1400 switch (value->opcode()) {
1401 case IrOpcode::kWord32Equal: {
1402 // Combine with comparisons against 0 by simply inverting the
1403 // continuation.
1404 Int32BinopMatcher m(value);
1405 if (m.right().Is(0)) {
1406 user = value;
1407 value = m.left().node();
1408 cont->Negate();
1409 continue;
1410 }
1411 cont->OverwriteAndNegateIfEqual(kEqual);
1412 return VisitWord32Compare(selector, value, cont);
1413 }
1414 case IrOpcode::kInt32LessThan:
1415 cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1416 return VisitWord32Compare(selector, value, cont);
1417 case IrOpcode::kInt32LessThanOrEqual:
1418 cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1419 return VisitWord32Compare(selector, value, cont);
1420 case IrOpcode::kUint32LessThan:
1421 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1422 return VisitWord32Compare(selector, value, cont);
1423 case IrOpcode::kUint32LessThanOrEqual:
1424 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1425 return VisitWord32Compare(selector, value, cont);
1426#if V8_TARGET_ARCH_S390X
1427 case IrOpcode::kWord64Equal:
1428 cont->OverwriteAndNegateIfEqual(kEqual);
1429 return VisitWord64Compare(selector, value, cont);
1430 case IrOpcode::kInt64LessThan:
1431 cont->OverwriteAndNegateIfEqual(kSignedLessThan);
1432 return VisitWord64Compare(selector, value, cont);
1433 case IrOpcode::kInt64LessThanOrEqual:
1434 cont->OverwriteAndNegateIfEqual(kSignedLessThanOrEqual);
1435 return VisitWord64Compare(selector, value, cont);
1436 case IrOpcode::kUint64LessThan:
1437 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1438 return VisitWord64Compare(selector, value, cont);
1439 case IrOpcode::kUint64LessThanOrEqual:
1440 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1441 return VisitWord64Compare(selector, value, cont);
1442#endif
1443 case IrOpcode::kFloat32Equal:
1444 cont->OverwriteAndNegateIfEqual(kEqual);
1445 return VisitFloat32Compare(selector, value, cont);
1446 case IrOpcode::kFloat32LessThan:
1447 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1448 return VisitFloat32Compare(selector, value, cont);
1449 case IrOpcode::kFloat32LessThanOrEqual:
1450 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1451 return VisitFloat32Compare(selector, value, cont);
1452 case IrOpcode::kFloat64Equal:
1453 cont->OverwriteAndNegateIfEqual(kEqual);
1454 return VisitFloat64Compare(selector, value, cont);
1455 case IrOpcode::kFloat64LessThan:
1456 cont->OverwriteAndNegateIfEqual(kUnsignedLessThan);
1457 return VisitFloat64Compare(selector, value, cont);
1458 case IrOpcode::kFloat64LessThanOrEqual:
1459 cont->OverwriteAndNegateIfEqual(kUnsignedLessThanOrEqual);
1460 return VisitFloat64Compare(selector, value, cont);
1461 case IrOpcode::kProjection:
1462 // Check if this is the overflow output projection of an
1463 // <Operation>WithOverflow node.
1464 if (ProjectionIndexOf(value->op()) == 1u) {
1465 // We cannot combine the <Operation>WithOverflow with this branch
1466 // unless the 0th projection (the use of the actual value of the
1467 // <Operation> is either nullptr, which means there's no use of the
1468 // actual value, or was already defined, which means it is scheduled
1469 // *AFTER* this branch).
1470 Node* const node = value->InputAt(0);
1471 Node* const result = NodeProperties::FindProjection(node, 0);
1472 if (result == nullptr || selector->IsDefined(result)) {
1473 switch (node->opcode()) {
1474 case IrOpcode::kInt32AddWithOverflow:
1475 cont->OverwriteAndNegateIfEqual(kOverflow);
1476 return VisitBinop<Int32BinopMatcher>(
1477 selector, node, kS390_AddWithOverflow32, kInt16Imm, cont);
1478 case IrOpcode::kInt32SubWithOverflow:
1479 cont->OverwriteAndNegateIfEqual(kOverflow);
1480 return VisitBinop<Int32BinopMatcher>(selector, node,
1481 kS390_SubWithOverflow32,
1482 kInt16Imm_Negate, cont);
1483#if V8_TARGET_ARCH_S390X
1484 case IrOpcode::kInt64AddWithOverflow:
1485 cont->OverwriteAndNegateIfEqual(kOverflow);
1486 return VisitBinop<Int64BinopMatcher>(selector, node, kS390_Add,
1487 kInt16Imm, cont);
1488 case IrOpcode::kInt64SubWithOverflow:
1489 cont->OverwriteAndNegateIfEqual(kOverflow);
1490 return VisitBinop<Int64BinopMatcher>(selector, node, kS390_Sub,
1491 kInt16Imm_Negate, cont);
1492#endif
1493 default:
1494 break;
1495 }
1496 }
1497 }
1498 break;
1499 case IrOpcode::kInt32Sub:
1500 return VisitWord32Compare(selector, value, cont);
1501 case IrOpcode::kWord32And:
1502 return VisitWordCompare(selector, value, kS390_Tst32, cont, true,
1503 kInt16Imm_Unsigned);
1504// TODO(mbrandy): Handle?
1505// case IrOpcode::kInt32Add:
1506// case IrOpcode::kWord32Or:
1507// case IrOpcode::kWord32Xor:
1508// case IrOpcode::kWord32Sar:
1509// case IrOpcode::kWord32Shl:
1510// case IrOpcode::kWord32Shr:
1511// case IrOpcode::kWord32Ror:
1512#if V8_TARGET_ARCH_S390X
1513 case IrOpcode::kInt64Sub:
1514 return VisitWord64Compare(selector, value, cont);
1515 case IrOpcode::kWord64And:
1516 return VisitWordCompare(selector, value, kS390_Tst64, cont, true,
1517 kInt16Imm_Unsigned);
1518// TODO(mbrandy): Handle?
1519// case IrOpcode::kInt64Add:
1520// case IrOpcode::kWord64Or:
1521// case IrOpcode::kWord64Xor:
1522// case IrOpcode::kWord64Sar:
1523// case IrOpcode::kWord64Shl:
1524// case IrOpcode::kWord64Shr:
1525// case IrOpcode::kWord64Ror:
1526#endif
1527 default:
1528 break;
1529 }
1530 break;
1531 }
1532
1533 // Branch could not be combined with a compare, emit compare against 0.
1534 S390OperandGenerator g(selector);
1535 VisitCompare(selector, opcode, g.UseRegister(value), g.TempImmediate(0),
1536 cont);
1537}
1538
1539void VisitWord32CompareZero(InstructionSelector* selector, Node* user,
1540 Node* value, FlagsContinuation* cont) {
1541 VisitWordCompareZero(selector, user, value, kS390_Cmp32, cont);
1542}
1543
1544#if V8_TARGET_ARCH_S390X
1545void VisitWord64CompareZero(InstructionSelector* selector, Node* user,
1546 Node* value, FlagsContinuation* cont) {
1547 VisitWordCompareZero(selector, user, value, kS390_Cmp64, cont);
1548}
1549#endif
1550
1551} // namespace
1552
1553void InstructionSelector::VisitBranch(Node* branch, BasicBlock* tbranch,
1554 BasicBlock* fbranch) {
1555 FlagsContinuation cont(kNotEqual, tbranch, fbranch);
1556 VisitWord32CompareZero(this, branch, branch->InputAt(0), &cont);
1557}
1558
1559void InstructionSelector::VisitDeoptimizeIf(Node* node) {
1560 FlagsContinuation cont =
1561 FlagsContinuation::ForDeoptimize(kNotEqual, node->InputAt(1));
1562 VisitWord32CompareZero(this, node, node->InputAt(0), &cont);
1563}
1564
1565void InstructionSelector::VisitDeoptimizeUnless(Node* node) {
1566 FlagsContinuation cont =
1567 FlagsContinuation::ForDeoptimize(kEqual, node->InputAt(1));
1568 VisitWord32CompareZero(this, node, node->InputAt(0), &cont);
1569}
1570
1571void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
1572 S390OperandGenerator g(this);
1573 InstructionOperand value_operand = g.UseRegister(node->InputAt(0));
1574
1575 // Emit either ArchTableSwitch or ArchLookupSwitch.
1576 size_t table_space_cost = 4 + sw.value_range;
1577 size_t table_time_cost = 3;
1578 size_t lookup_space_cost = 3 + 2 * sw.case_count;
1579 size_t lookup_time_cost = sw.case_count;
1580 if (sw.case_count > 0 &&
1581 table_space_cost + 3 * table_time_cost <=
1582 lookup_space_cost + 3 * lookup_time_cost &&
1583 sw.min_value > std::numeric_limits<int32_t>::min()) {
1584 InstructionOperand index_operand = value_operand;
1585 if (sw.min_value) {
1586 index_operand = g.TempRegister();
1587 Emit(kS390_Sub, index_operand, value_operand,
1588 g.TempImmediate(sw.min_value));
1589 }
1590 // Generate a table lookup.
1591 return EmitTableSwitch(sw, index_operand);
1592 }
1593
1594 // Generate a sequence of conditional jumps.
1595 return EmitLookupSwitch(sw, value_operand);
1596}
1597
1598void InstructionSelector::VisitWord32Equal(Node* const node) {
1599 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
1600 Int32BinopMatcher m(node);
1601 if (m.right().Is(0)) {
1602 return VisitWord32CompareZero(this, m.node(), m.left().node(), &cont);
1603 }
1604 VisitWord32Compare(this, node, &cont);
1605}
1606
1607void InstructionSelector::VisitInt32LessThan(Node* node) {
1608 FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
1609 VisitWord32Compare(this, node, &cont);
1610}
1611
1612void InstructionSelector::VisitInt32LessThanOrEqual(Node* node) {
1613 FlagsContinuation cont =
1614 FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
1615 VisitWord32Compare(this, node, &cont);
1616}
1617
1618void InstructionSelector::VisitUint32LessThan(Node* node) {
1619 FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
1620 VisitWord32Compare(this, node, &cont);
1621}
1622
1623void InstructionSelector::VisitUint32LessThanOrEqual(Node* node) {
1624 FlagsContinuation cont =
1625 FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
1626 VisitWord32Compare(this, node, &cont);
1627}
1628
1629#if V8_TARGET_ARCH_S390X
1630void InstructionSelector::VisitWord64Equal(Node* const node) {
1631 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
1632 Int64BinopMatcher m(node);
1633 if (m.right().Is(0)) {
1634 return VisitWord64CompareZero(this, m.node(), m.left().node(), &cont);
1635 }
1636 VisitWord64Compare(this, node, &cont);
1637}
1638
1639void InstructionSelector::VisitInt64LessThan(Node* node) {
1640 FlagsContinuation cont = FlagsContinuation::ForSet(kSignedLessThan, node);
1641 VisitWord64Compare(this, node, &cont);
1642}
1643
1644void InstructionSelector::VisitInt64LessThanOrEqual(Node* node) {
1645 FlagsContinuation cont =
1646 FlagsContinuation::ForSet(kSignedLessThanOrEqual, node);
1647 VisitWord64Compare(this, node, &cont);
1648}
1649
1650void InstructionSelector::VisitUint64LessThan(Node* node) {
1651 FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
1652 VisitWord64Compare(this, node, &cont);
1653}
1654
1655void InstructionSelector::VisitUint64LessThanOrEqual(Node* node) {
1656 FlagsContinuation cont =
1657 FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
1658 VisitWord64Compare(this, node, &cont);
1659}
1660#endif
1661
1662void InstructionSelector::VisitFloat32Equal(Node* node) {
1663 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
1664 VisitFloat32Compare(this, node, &cont);
1665}
1666
1667void InstructionSelector::VisitFloat32LessThan(Node* node) {
1668 FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
1669 VisitFloat32Compare(this, node, &cont);
1670}
1671
1672void InstructionSelector::VisitFloat32LessThanOrEqual(Node* node) {
1673 FlagsContinuation cont =
1674 FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
1675 VisitFloat32Compare(this, node, &cont);
1676}
1677
1678void InstructionSelector::VisitFloat64Equal(Node* node) {
1679 FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
1680 VisitFloat64Compare(this, node, &cont);
1681}
1682
1683void InstructionSelector::VisitFloat64LessThan(Node* node) {
1684 FlagsContinuation cont = FlagsContinuation::ForSet(kUnsignedLessThan, node);
1685 VisitFloat64Compare(this, node, &cont);
1686}
1687
1688void InstructionSelector::VisitFloat64LessThanOrEqual(Node* node) {
1689 FlagsContinuation cont =
1690 FlagsContinuation::ForSet(kUnsignedLessThanOrEqual, node);
1691 VisitFloat64Compare(this, node, &cont);
1692}
1693
1694void InstructionSelector::EmitPrepareArguments(
1695 ZoneVector<PushParameter>* arguments, const CallDescriptor* descriptor,
1696 Node* node) {
1697 S390OperandGenerator g(this);
1698
1699 // Prepare for C function call.
1700 if (descriptor->IsCFunctionCall()) {
1701 Emit(kArchPrepareCallCFunction |
1702 MiscField::encode(static_cast<int>(descriptor->CParameterCount())),
1703 0, nullptr, 0, nullptr);
1704
1705 // Poke any stack arguments.
1706 int slot = kStackFrameExtraParamSlot;
1707 for (PushParameter input : (*arguments)) {
1708 Emit(kS390_StoreToStackSlot, g.NoOutput(), g.UseRegister(input.node()),
1709 g.TempImmediate(slot));
1710 ++slot;
1711 }
1712 } else {
1713 // Push any stack arguments.
1714 int num_slots = static_cast<int>(descriptor->StackParameterCount());
1715 int slot = 0;
1716 for (PushParameter input : (*arguments)) {
1717 if (slot == 0) {
1718 DCHECK(input.node());
1719 Emit(kS390_PushFrame, g.NoOutput(), g.UseRegister(input.node()),
1720 g.TempImmediate(num_slots));
1721 } else {
1722 // Skip any alignment holes in pushed nodes.
1723 if (input.node()) {
1724 Emit(kS390_StoreToStackSlot, g.NoOutput(),
1725 g.UseRegister(input.node()), g.TempImmediate(slot));
1726 }
1727 }
1728 ++slot;
1729 }
1730 }
1731}
1732
1733bool InstructionSelector::IsTailCallAddressImmediate() { return false; }
1734
1735int InstructionSelector::GetTempsCountForTailCallFromJSFunction() { return 3; }
1736
1737void InstructionSelector::VisitFloat64ExtractLowWord32(Node* node) {
1738 S390OperandGenerator g(this);
1739 Emit(kS390_DoubleExtractLowWord32, g.DefineAsRegister(node),
1740 g.UseRegister(node->InputAt(0)));
1741}
1742
1743void InstructionSelector::VisitFloat64ExtractHighWord32(Node* node) {
1744 S390OperandGenerator g(this);
1745 Emit(kS390_DoubleExtractHighWord32, g.DefineAsRegister(node),
1746 g.UseRegister(node->InputAt(0)));
1747}
1748
1749void InstructionSelector::VisitFloat64InsertLowWord32(Node* node) {
1750 S390OperandGenerator g(this);
1751 Node* left = node->InputAt(0);
1752 Node* right = node->InputAt(1);
1753 if (left->opcode() == IrOpcode::kFloat64InsertHighWord32 &&
1754 CanCover(node, left)) {
1755 left = left->InputAt(1);
1756 Emit(kS390_DoubleConstruct, g.DefineAsRegister(node), g.UseRegister(left),
1757 g.UseRegister(right));
1758 return;
1759 }
1760 Emit(kS390_DoubleInsertLowWord32, g.DefineSameAsFirst(node),
1761 g.UseRegister(left), g.UseRegister(right));
1762}
1763
1764void InstructionSelector::VisitFloat64InsertHighWord32(Node* node) {
1765 S390OperandGenerator g(this);
1766 Node* left = node->InputAt(0);
1767 Node* right = node->InputAt(1);
1768 if (left->opcode() == IrOpcode::kFloat64InsertLowWord32 &&
1769 CanCover(node, left)) {
1770 left = left->InputAt(1);
1771 Emit(kS390_DoubleConstruct, g.DefineAsRegister(node), g.UseRegister(right),
1772 g.UseRegister(left));
1773 return;
1774 }
1775 Emit(kS390_DoubleInsertHighWord32, g.DefineSameAsFirst(node),
1776 g.UseRegister(left), g.UseRegister(right));
1777}
1778
Ben Murdochc5610432016-08-08 18:44:38 +01001779void InstructionSelector::VisitAtomicLoad(Node* node) {
1780 LoadRepresentation load_rep = LoadRepresentationOf(node->op());
1781 S390OperandGenerator g(this);
1782 Node* base = node->InputAt(0);
1783 Node* index = node->InputAt(1);
1784 ArchOpcode opcode = kArchNop;
1785 switch (load_rep.representation()) {
1786 case MachineRepresentation::kWord8:
1787 opcode = load_rep.IsSigned() ? kAtomicLoadInt8 : kAtomicLoadUint8;
1788 break;
1789 case MachineRepresentation::kWord16:
1790 opcode = load_rep.IsSigned() ? kAtomicLoadInt16 : kAtomicLoadUint16;
1791 break;
1792 case MachineRepresentation::kWord32:
1793 opcode = kAtomicLoadWord32;
1794 break;
1795 default:
1796 UNREACHABLE();
1797 return;
1798 }
1799 Emit(opcode | AddressingModeField::encode(kMode_MRR),
1800 g.DefineAsRegister(node), g.UseRegister(base), g.UseRegister(index));
1801}
1802
1803void InstructionSelector::VisitAtomicStore(Node* node) {
1804 MachineRepresentation rep = AtomicStoreRepresentationOf(node->op());
1805 S390OperandGenerator g(this);
1806 Node* base = node->InputAt(0);
1807 Node* index = node->InputAt(1);
1808 Node* value = node->InputAt(2);
1809 ArchOpcode opcode = kArchNop;
1810 switch (rep) {
1811 case MachineRepresentation::kWord8:
1812 opcode = kAtomicStoreWord8;
1813 break;
1814 case MachineRepresentation::kWord16:
1815 opcode = kAtomicStoreWord16;
1816 break;
1817 case MachineRepresentation::kWord32:
1818 opcode = kAtomicStoreWord32;
1819 break;
1820 default:
1821 UNREACHABLE();
1822 return;
1823 }
1824
1825 InstructionOperand inputs[4];
1826 size_t input_count = 0;
1827 inputs[input_count++] = g.UseUniqueRegister(value);
1828 inputs[input_count++] = g.UseUniqueRegister(base);
1829 inputs[input_count++] = g.UseUniqueRegister(index);
1830 Emit(opcode | AddressingModeField::encode(kMode_MRR), 0, nullptr, input_count,
1831 inputs);
1832}
1833
Ben Murdochda12d292016-06-02 14:46:10 +01001834// static
1835MachineOperatorBuilder::Flags
1836InstructionSelector::SupportedMachineOperatorFlags() {
1837 return MachineOperatorBuilder::kFloat32RoundDown |
1838 MachineOperatorBuilder::kFloat64RoundDown |
1839 MachineOperatorBuilder::kFloat32RoundUp |
1840 MachineOperatorBuilder::kFloat64RoundUp |
1841 MachineOperatorBuilder::kFloat32RoundTruncate |
1842 MachineOperatorBuilder::kFloat64RoundTruncate |
1843 MachineOperatorBuilder::kFloat64RoundTiesAway |
1844 MachineOperatorBuilder::kWord32Popcnt |
1845 MachineOperatorBuilder::kWord64Popcnt;
1846}
1847
Ben Murdoch61f157c2016-09-16 13:49:30 +01001848// static
1849MachineOperatorBuilder::AlignmentRequirements
1850InstructionSelector::AlignmentRequirements() {
1851 return MachineOperatorBuilder::AlignmentRequirements::
1852 FullUnalignedAccessSupport();
1853}
1854
Ben Murdochda12d292016-06-02 14:46:10 +01001855} // namespace compiler
1856} // namespace internal
1857} // namespace v8