blob: fece5963c625aaf4ad209c69bd4e0bc4d65bfa24 [file] [log] [blame]
Ben Murdochda12d292016-06-02 14:46:10 +01001// Copyright 2015 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/code-generator.h"
6
7#include "src/ast/scopes.h"
8#include "src/compiler/code-generator-impl.h"
9#include "src/compiler/gap-resolver.h"
10#include "src/compiler/node-matchers.h"
11#include "src/compiler/osr.h"
12#include "src/s390/macro-assembler-s390.h"
13
14namespace v8 {
15namespace internal {
16namespace compiler {
17
18#define __ masm()->
19
20#define kScratchReg ip
21
22// Adds S390-specific methods to convert InstructionOperands.
23class S390OperandConverter final : public InstructionOperandConverter {
24 public:
25 S390OperandConverter(CodeGenerator* gen, Instruction* instr)
26 : InstructionOperandConverter(gen, instr) {}
27
28 size_t OutputCount() { return instr_->OutputCount(); }
29
30 bool CompareLogical() const {
31 switch (instr_->flags_condition()) {
32 case kUnsignedLessThan:
33 case kUnsignedGreaterThanOrEqual:
34 case kUnsignedLessThanOrEqual:
35 case kUnsignedGreaterThan:
36 return true;
37 default:
38 return false;
39 }
40 UNREACHABLE();
41 return false;
42 }
43
44 Operand InputImmediate(size_t index) {
45 Constant constant = ToConstant(instr_->InputAt(index));
46 switch (constant.type()) {
47 case Constant::kInt32:
48 return Operand(constant.ToInt32());
49 case Constant::kFloat32:
50 return Operand(
51 isolate()->factory()->NewNumber(constant.ToFloat32(), TENURED));
52 case Constant::kFloat64:
53 return Operand(
54 isolate()->factory()->NewNumber(constant.ToFloat64(), TENURED));
55 case Constant::kInt64:
56#if V8_TARGET_ARCH_S390X
57 return Operand(constant.ToInt64());
58#endif
59 case Constant::kExternalReference:
60 case Constant::kHeapObject:
61 case Constant::kRpoNumber:
62 break;
63 }
64 UNREACHABLE();
65 return Operand::Zero();
66 }
67
68 MemOperand MemoryOperand(AddressingMode* mode, size_t* first_index) {
69 const size_t index = *first_index;
Ben Murdochc5610432016-08-08 18:44:38 +010070 if (mode) *mode = AddressingModeField::decode(instr_->opcode());
71 switch (AddressingModeField::decode(instr_->opcode())) {
Ben Murdochda12d292016-06-02 14:46:10 +010072 case kMode_None:
73 break;
74 case kMode_MRI:
75 *first_index += 2;
76 return MemOperand(InputRegister(index + 0), InputInt32(index + 1));
77 case kMode_MRR:
78 *first_index += 2;
79 return MemOperand(InputRegister(index + 0), InputRegister(index + 1));
80 }
81 UNREACHABLE();
82 return MemOperand(r0);
83 }
84
Ben Murdochc5610432016-08-08 18:44:38 +010085 MemOperand MemoryOperand(AddressingMode* mode = NULL,
86 size_t first_index = 0) {
Ben Murdochda12d292016-06-02 14:46:10 +010087 return MemoryOperand(mode, &first_index);
88 }
89
90 MemOperand ToMemOperand(InstructionOperand* op) const {
91 DCHECK_NOT_NULL(op);
Ben Murdochc5610432016-08-08 18:44:38 +010092 DCHECK(op->IsStackSlot() || op->IsFPStackSlot());
Ben Murdochda12d292016-06-02 14:46:10 +010093 return SlotToMemOperand(AllocatedOperand::cast(op)->index());
94 }
95
96 MemOperand SlotToMemOperand(int slot) const {
97 FrameOffset offset = frame_access_state()->GetFrameOffset(slot);
98 return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset());
99 }
100};
101
102static inline bool HasRegisterInput(Instruction* instr, int index) {
103 return instr->InputAt(index)->IsRegister();
104}
105
106namespace {
107
108class OutOfLineLoadNAN32 final : public OutOfLineCode {
109 public:
110 OutOfLineLoadNAN32(CodeGenerator* gen, DoubleRegister result)
111 : OutOfLineCode(gen), result_(result) {}
112
113 void Generate() final {
114 __ LoadDoubleLiteral(result_, std::numeric_limits<float>::quiet_NaN(),
115 kScratchReg);
116 }
117
118 private:
119 DoubleRegister const result_;
120};
121
122class OutOfLineLoadNAN64 final : public OutOfLineCode {
123 public:
124 OutOfLineLoadNAN64(CodeGenerator* gen, DoubleRegister result)
125 : OutOfLineCode(gen), result_(result) {}
126
127 void Generate() final {
128 __ LoadDoubleLiteral(result_, std::numeric_limits<double>::quiet_NaN(),
129 kScratchReg);
130 }
131
132 private:
133 DoubleRegister const result_;
134};
135
136class OutOfLineLoadZero final : public OutOfLineCode {
137 public:
138 OutOfLineLoadZero(CodeGenerator* gen, Register result)
139 : OutOfLineCode(gen), result_(result) {}
140
141 void Generate() final { __ LoadImmP(result_, Operand::Zero()); }
142
143 private:
144 Register const result_;
145};
146
147class OutOfLineRecordWrite final : public OutOfLineCode {
148 public:
149 OutOfLineRecordWrite(CodeGenerator* gen, Register object, Register offset,
150 Register value, Register scratch0, Register scratch1,
151 RecordWriteMode mode)
152 : OutOfLineCode(gen),
153 object_(object),
154 offset_(offset),
155 offset_immediate_(0),
156 value_(value),
157 scratch0_(scratch0),
158 scratch1_(scratch1),
Ben Murdochc5610432016-08-08 18:44:38 +0100159 mode_(mode),
160 must_save_lr_(!gen->frame_access_state()->has_frame()) {}
Ben Murdochda12d292016-06-02 14:46:10 +0100161
162 OutOfLineRecordWrite(CodeGenerator* gen, Register object, int32_t offset,
163 Register value, Register scratch0, Register scratch1,
164 RecordWriteMode mode)
165 : OutOfLineCode(gen),
166 object_(object),
167 offset_(no_reg),
168 offset_immediate_(offset),
169 value_(value),
170 scratch0_(scratch0),
171 scratch1_(scratch1),
172 mode_(mode),
173 must_save_lr_(!gen->frame_access_state()->has_frame()) {}
174
175 void Generate() final {
176 if (mode_ > RecordWriteMode::kValueIsPointer) {
177 __ JumpIfSmi(value_, exit());
178 }
179 __ CheckPageFlag(value_, scratch0_,
180 MemoryChunk::kPointersToHereAreInterestingMask, eq,
181 exit());
182 RememberedSetAction const remembered_set_action =
183 mode_ > RecordWriteMode::kValueIsMap ? EMIT_REMEMBERED_SET
184 : OMIT_REMEMBERED_SET;
185 SaveFPRegsMode const save_fp_mode =
186 frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs;
187 if (must_save_lr_) {
188 // We need to save and restore r14 if the frame was elided.
189 __ Push(r14);
190 }
191 RecordWriteStub stub(isolate(), object_, scratch0_, scratch1_,
192 remembered_set_action, save_fp_mode);
193 if (offset_.is(no_reg)) {
194 __ AddP(scratch1_, object_, Operand(offset_immediate_));
195 } else {
196 DCHECK_EQ(0, offset_immediate_);
197 __ AddP(scratch1_, object_, offset_);
198 }
199 __ CallStub(&stub);
200 if (must_save_lr_) {
201 // We need to save and restore r14 if the frame was elided.
202 __ Pop(r14);
203 }
204 }
205
206 private:
207 Register const object_;
208 Register const offset_;
209 int32_t const offset_immediate_; // Valid if offset_.is(no_reg).
210 Register const value_;
211 Register const scratch0_;
212 Register const scratch1_;
213 RecordWriteMode const mode_;
214 bool must_save_lr_;
215};
216
217Condition FlagsConditionToCondition(FlagsCondition condition, ArchOpcode op) {
218 switch (condition) {
219 case kEqual:
220 return eq;
221 case kNotEqual:
222 return ne;
223 case kSignedLessThan:
224 case kUnsignedLessThan:
225 return lt;
226 case kSignedGreaterThanOrEqual:
227 case kUnsignedGreaterThanOrEqual:
228 return ge;
229 case kSignedLessThanOrEqual:
230 case kUnsignedLessThanOrEqual:
231 return le;
232 case kSignedGreaterThan:
233 case kUnsignedGreaterThan:
234 return gt;
235 case kOverflow:
236 // Overflow checked for AddP/SubP only.
237 switch (op) {
238#if V8_TARGET_ARCH_S390X
239 case kS390_Add:
240 case kS390_Sub:
Ben Murdochda12d292016-06-02 14:46:10 +0100241#endif
242 case kS390_AddWithOverflow32:
243 case kS390_SubWithOverflow32:
Ben Murdochda12d292016-06-02 14:46:10 +0100244 return lt;
Ben Murdochda12d292016-06-02 14:46:10 +0100245 default:
246 break;
247 }
248 break;
249 case kNotOverflow:
250 switch (op) {
251#if V8_TARGET_ARCH_S390X
252 case kS390_Add:
253 case kS390_Sub:
Ben Murdochda12d292016-06-02 14:46:10 +0100254#endif
255 case kS390_AddWithOverflow32:
256 case kS390_SubWithOverflow32:
Ben Murdochda12d292016-06-02 14:46:10 +0100257 return ge;
Ben Murdochda12d292016-06-02 14:46:10 +0100258 default:
259 break;
260 }
261 break;
262 default:
263 break;
264 }
265 UNREACHABLE();
266 return kNoCondition;
267}
268
269} // namespace
270
271#define ASSEMBLE_FLOAT_UNOP(asm_instr) \
272 do { \
273 __ asm_instr(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); \
274 } while (0)
275
276#define ASSEMBLE_FLOAT_BINOP(asm_instr) \
277 do { \
278 __ asm_instr(i.OutputDoubleRegister(), i.InputDoubleRegister(0), \
279 i.InputDoubleRegister(1)); \
280 } while (0)
281
282#define ASSEMBLE_BINOP(asm_instr_reg, asm_instr_imm) \
283 do { \
284 if (HasRegisterInput(instr, 1)) { \
285 __ asm_instr_reg(i.OutputRegister(), i.InputRegister(0), \
286 i.InputRegister(1)); \
287 } else { \
288 __ asm_instr_imm(i.OutputRegister(), i.InputRegister(0), \
289 i.InputImmediate(1)); \
290 } \
291 } while (0)
292
293#define ASSEMBLE_BINOP_INT(asm_instr_reg, asm_instr_imm) \
294 do { \
295 if (HasRegisterInput(instr, 1)) { \
296 __ asm_instr_reg(i.OutputRegister(), i.InputRegister(0), \
297 i.InputRegister(1)); \
298 } else { \
299 __ asm_instr_imm(i.OutputRegister(), i.InputRegister(0), \
300 i.InputInt32(1)); \
301 } \
302 } while (0)
303
304#define ASSEMBLE_ADD_WITH_OVERFLOW() \
305 do { \
306 if (HasRegisterInput(instr, 1)) { \
307 __ AddAndCheckForOverflow(i.OutputRegister(), i.InputRegister(0), \
308 i.InputRegister(1), kScratchReg, r0); \
309 } else { \
310 __ AddAndCheckForOverflow(i.OutputRegister(), i.InputRegister(0), \
311 i.InputInt32(1), kScratchReg, r0); \
312 } \
313 } while (0)
314
315#define ASSEMBLE_SUB_WITH_OVERFLOW() \
316 do { \
317 if (HasRegisterInput(instr, 1)) { \
318 __ SubAndCheckForOverflow(i.OutputRegister(), i.InputRegister(0), \
319 i.InputRegister(1), kScratchReg, r0); \
320 } else { \
321 __ AddAndCheckForOverflow(i.OutputRegister(), i.InputRegister(0), \
322 -i.InputInt32(1), kScratchReg, r0); \
323 } \
324 } while (0)
325
326#if V8_TARGET_ARCH_S390X
Ben Murdochc5610432016-08-08 18:44:38 +0100327#define ASSEMBLE_ADD_WITH_OVERFLOW32() \
328 do { \
329 ASSEMBLE_ADD_WITH_OVERFLOW(); \
330 __ LoadAndTestP_ExtendSrc(kScratchReg, kScratchReg); \
Ben Murdochda12d292016-06-02 14:46:10 +0100331 } while (0)
332
Ben Murdochc5610432016-08-08 18:44:38 +0100333#define ASSEMBLE_SUB_WITH_OVERFLOW32() \
334 do { \
335 ASSEMBLE_SUB_WITH_OVERFLOW(); \
336 __ LoadAndTestP_ExtendSrc(kScratchReg, kScratchReg); \
Ben Murdochda12d292016-06-02 14:46:10 +0100337 } while (0)
338#else
339#define ASSEMBLE_ADD_WITH_OVERFLOW32 ASSEMBLE_ADD_WITH_OVERFLOW
340#define ASSEMBLE_SUB_WITH_OVERFLOW32 ASSEMBLE_SUB_WITH_OVERFLOW
341#endif
342
343#define ASSEMBLE_COMPARE(cmp_instr, cmpl_instr) \
344 do { \
345 if (HasRegisterInput(instr, 1)) { \
346 if (i.CompareLogical()) { \
347 __ cmpl_instr(i.InputRegister(0), i.InputRegister(1)); \
348 } else { \
349 __ cmp_instr(i.InputRegister(0), i.InputRegister(1)); \
350 } \
351 } else { \
352 if (i.CompareLogical()) { \
353 __ cmpl_instr(i.InputRegister(0), i.InputImmediate(1)); \
354 } else { \
355 __ cmp_instr(i.InputRegister(0), i.InputImmediate(1)); \
356 } \
357 } \
358 } while (0)
359
360#define ASSEMBLE_FLOAT_COMPARE(cmp_instr) \
361 do { \
362 __ cmp_instr(i.InputDoubleRegister(0), i.InputDoubleRegister(1); \
363 } while (0)
364
365// Divide instruction dr will implicity use register pair
366// r0 & r1 below.
367// R0:R1 = R1 / divisor - R0 remainder
368// Copy remainder to output reg
369#define ASSEMBLE_MODULO(div_instr, shift_instr) \
370 do { \
371 __ LoadRR(r0, i.InputRegister(0)); \
372 __ shift_instr(r0, Operand(32)); \
373 __ div_instr(r0, i.InputRegister(1)); \
374 __ ltr(i.OutputRegister(), r0); \
375 } while (0)
376
377#define ASSEMBLE_FLOAT_MODULO() \
378 do { \
379 FrameScope scope(masm(), StackFrame::MANUAL); \
380 __ PrepareCallCFunction(0, 2, kScratchReg); \
381 __ MovToFloatParameters(i.InputDoubleRegister(0), \
382 i.InputDoubleRegister(1)); \
383 __ CallCFunction(ExternalReference::mod_two_doubles_operation(isolate()), \
384 0, 2); \
385 __ MovFromFloatResult(i.OutputDoubleRegister()); \
386 } while (0)
387
388#define ASSEMBLE_FLOAT_MAX(double_scratch_reg, general_scratch_reg) \
389 do { \
390 Label ge, done; \
391 __ cdbr(i.InputDoubleRegister(0), i.InputDoubleRegister(1)); \
392 __ bge(&ge, Label::kNear); \
393 __ Move(i.OutputDoubleRegister(), i.InputDoubleRegister(1)); \
394 __ b(&done, Label::kNear); \
395 __ bind(&ge); \
396 __ Move(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); \
397 __ bind(&done); \
398 } while (0)
399
400#define ASSEMBLE_FLOAT_MIN(double_scratch_reg, general_scratch_reg) \
401 do { \
402 Label ge, done; \
403 __ cdbr(i.InputDoubleRegister(0), i.InputDoubleRegister(1)); \
404 __ bge(&ge, Label::kNear); \
405 __ Move(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); \
406 __ b(&done, Label::kNear); \
407 __ bind(&ge); \
408 __ Move(i.OutputDoubleRegister(), i.InputDoubleRegister(1)); \
409 __ bind(&done); \
410 } while (0)
411
412// Only MRI mode for these instructions available
413#define ASSEMBLE_LOAD_FLOAT(asm_instr) \
414 do { \
415 DoubleRegister result = i.OutputDoubleRegister(); \
416 AddressingMode mode = kMode_None; \
417 MemOperand operand = i.MemoryOperand(&mode); \
418 __ asm_instr(result, operand); \
419 } while (0)
420
421#define ASSEMBLE_LOAD_INTEGER(asm_instr) \
422 do { \
423 Register result = i.OutputRegister(); \
424 AddressingMode mode = kMode_None; \
425 MemOperand operand = i.MemoryOperand(&mode); \
426 __ asm_instr(result, operand); \
427 } while (0)
428
429#define ASSEMBLE_STORE_FLOAT32() \
430 do { \
431 size_t index = 0; \
432 AddressingMode mode = kMode_None; \
433 MemOperand operand = i.MemoryOperand(&mode, &index); \
434 DoubleRegister value = i.InputDoubleRegister(index); \
435 __ StoreFloat32(value, operand); \
436 } while (0)
437
438#define ASSEMBLE_STORE_DOUBLE() \
439 do { \
440 size_t index = 0; \
441 AddressingMode mode = kMode_None; \
442 MemOperand operand = i.MemoryOperand(&mode, &index); \
443 DoubleRegister value = i.InputDoubleRegister(index); \
444 __ StoreDouble(value, operand); \
445 } while (0)
446
447#define ASSEMBLE_STORE_INTEGER(asm_instr) \
448 do { \
449 size_t index = 0; \
450 AddressingMode mode = kMode_None; \
451 MemOperand operand = i.MemoryOperand(&mode, &index); \
452 Register value = i.InputRegister(index); \
453 __ asm_instr(value, operand); \
454 } while (0)
455
Ben Murdochda12d292016-06-02 14:46:10 +0100456#define ASSEMBLE_CHECKED_LOAD_FLOAT(asm_instr, width) \
457 do { \
458 DoubleRegister result = i.OutputDoubleRegister(); \
459 size_t index = 0; \
460 AddressingMode mode = kMode_None; \
461 MemOperand operand = i.MemoryOperand(&mode, index); \
462 Register offset = operand.rb(); \
Ben Murdochda12d292016-06-02 14:46:10 +0100463 if (HasRegisterInput(instr, 2)) { \
464 __ CmpLogical32(offset, i.InputRegister(2)); \
465 } else { \
466 __ CmpLogical32(offset, i.InputImmediate(2)); \
467 } \
468 auto ool = new (zone()) OutOfLineLoadNAN##width(this, result); \
469 __ bge(ool->entry()); \
Ben Murdochc5610432016-08-08 18:44:38 +0100470 __ CleanUInt32(offset); \
Ben Murdochda12d292016-06-02 14:46:10 +0100471 __ asm_instr(result, operand); \
472 __ bind(ool->exit()); \
473 } while (0)
474
Ben Murdochda12d292016-06-02 14:46:10 +0100475#define ASSEMBLE_CHECKED_LOAD_INTEGER(asm_instr) \
476 do { \
477 Register result = i.OutputRegister(); \
478 size_t index = 0; \
479 AddressingMode mode = kMode_None; \
480 MemOperand operand = i.MemoryOperand(&mode, index); \
481 Register offset = operand.rb(); \
Ben Murdochda12d292016-06-02 14:46:10 +0100482 if (HasRegisterInput(instr, 2)) { \
483 __ CmpLogical32(offset, i.InputRegister(2)); \
484 } else { \
485 __ CmpLogical32(offset, i.InputImmediate(2)); \
486 } \
487 auto ool = new (zone()) OutOfLineLoadZero(this, result); \
488 __ bge(ool->entry()); \
Ben Murdochc5610432016-08-08 18:44:38 +0100489 __ CleanUInt32(offset); \
Ben Murdochda12d292016-06-02 14:46:10 +0100490 __ asm_instr(result, operand); \
491 __ bind(ool->exit()); \
492 } while (0)
493
Ben Murdochda12d292016-06-02 14:46:10 +0100494#define ASSEMBLE_CHECKED_STORE_FLOAT32() \
495 do { \
496 Label done; \
497 size_t index = 0; \
498 AddressingMode mode = kMode_None; \
499 MemOperand operand = i.MemoryOperand(&mode, index); \
500 Register offset = operand.rb(); \
Ben Murdochda12d292016-06-02 14:46:10 +0100501 if (HasRegisterInput(instr, 2)) { \
502 __ CmpLogical32(offset, i.InputRegister(2)); \
503 } else { \
504 __ CmpLogical32(offset, i.InputImmediate(2)); \
505 } \
506 __ bge(&done); \
507 DoubleRegister value = i.InputDoubleRegister(3); \
Ben Murdochc5610432016-08-08 18:44:38 +0100508 __ CleanUInt32(offset); \
Ben Murdochda12d292016-06-02 14:46:10 +0100509 __ StoreFloat32(value, operand); \
510 __ bind(&done); \
511 } while (0)
512
Ben Murdochda12d292016-06-02 14:46:10 +0100513#define ASSEMBLE_CHECKED_STORE_DOUBLE() \
514 do { \
515 Label done; \
516 size_t index = 0; \
517 AddressingMode mode = kMode_None; \
518 MemOperand operand = i.MemoryOperand(&mode, index); \
519 DCHECK_EQ(kMode_MRR, mode); \
520 Register offset = operand.rb(); \
Ben Murdochda12d292016-06-02 14:46:10 +0100521 if (HasRegisterInput(instr, 2)) { \
522 __ CmpLogical32(offset, i.InputRegister(2)); \
523 } else { \
524 __ CmpLogical32(offset, i.InputImmediate(2)); \
525 } \
526 __ bge(&done); \
527 DoubleRegister value = i.InputDoubleRegister(3); \
Ben Murdochc5610432016-08-08 18:44:38 +0100528 __ CleanUInt32(offset); \
Ben Murdochda12d292016-06-02 14:46:10 +0100529 __ StoreDouble(value, operand); \
530 __ bind(&done); \
531 } while (0)
532
Ben Murdochda12d292016-06-02 14:46:10 +0100533#define ASSEMBLE_CHECKED_STORE_INTEGER(asm_instr) \
534 do { \
535 Label done; \
536 size_t index = 0; \
537 AddressingMode mode = kMode_None; \
538 MemOperand operand = i.MemoryOperand(&mode, index); \
539 Register offset = operand.rb(); \
Ben Murdochda12d292016-06-02 14:46:10 +0100540 if (HasRegisterInput(instr, 2)) { \
541 __ CmpLogical32(offset, i.InputRegister(2)); \
542 } else { \
543 __ CmpLogical32(offset, i.InputImmediate(2)); \
544 } \
545 __ bge(&done); \
546 Register value = i.InputRegister(3); \
Ben Murdochc5610432016-08-08 18:44:38 +0100547 __ CleanUInt32(offset); \
Ben Murdochda12d292016-06-02 14:46:10 +0100548 __ asm_instr(value, operand); \
549 __ bind(&done); \
550 } while (0)
551
552void CodeGenerator::AssembleDeconstructFrame() {
553 __ LeaveFrame(StackFrame::MANUAL);
554}
555
Ben Murdochda12d292016-06-02 14:46:10 +0100556void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) {
557 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
558 if (sp_slot_delta > 0) {
559 __ AddP(sp, sp, Operand(sp_slot_delta * kPointerSize));
560 }
561 frame_access_state()->SetFrameAccessToDefault();
562}
563
564void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) {
565 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
566 if (sp_slot_delta < 0) {
567 __ AddP(sp, sp, Operand(sp_slot_delta * kPointerSize));
568 frame_access_state()->IncreaseSPDelta(-sp_slot_delta);
569 }
570 if (frame_access_state()->has_frame()) {
571 __ RestoreFrameStateForTailCall();
572 }
573 frame_access_state()->SetFrameAccessToSP();
574}
575
576void CodeGenerator::AssemblePopArgumentsAdaptorFrame(Register args_reg,
577 Register scratch1,
578 Register scratch2,
579 Register scratch3) {
580 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
581 Label done;
582
583 // Check if current frame is an arguments adaptor frame.
584 __ LoadP(scratch1, MemOperand(fp, StandardFrameConstants::kContextOffset));
585 __ CmpSmiLiteral(scratch1, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
586 __ bne(&done);
587
588 // Load arguments count from current arguments adaptor frame (note, it
589 // does not include receiver).
590 Register caller_args_count_reg = scratch1;
591 __ LoadP(caller_args_count_reg,
592 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
593 __ SmiUntag(caller_args_count_reg);
594
595 ParameterCount callee_args_count(args_reg);
596 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
597 scratch3);
598 __ bind(&done);
599}
600
601// Assembles an instruction after register allocation, producing machine code.
Ben Murdochc5610432016-08-08 18:44:38 +0100602CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
603 Instruction* instr) {
Ben Murdochda12d292016-06-02 14:46:10 +0100604 S390OperandConverter i(this, instr);
605 ArchOpcode opcode = ArchOpcodeField::decode(instr->opcode());
606
607 switch (opcode) {
608 case kArchCallCodeObject: {
609 EnsureSpaceForLazyDeopt();
610 if (HasRegisterInput(instr, 0)) {
611 __ AddP(ip, i.InputRegister(0),
612 Operand(Code::kHeaderSize - kHeapObjectTag));
613 __ Call(ip);
614 } else {
615 __ Call(Handle<Code>::cast(i.InputHeapObject(0)),
616 RelocInfo::CODE_TARGET);
617 }
618 RecordCallPosition(instr);
619 frame_access_state()->ClearSPDelta();
620 break;
621 }
622 case kArchTailCallCodeObjectFromJSFunction:
623 case kArchTailCallCodeObject: {
624 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
625 AssembleDeconstructActivationRecord(stack_param_delta);
626 if (opcode == kArchTailCallCodeObjectFromJSFunction) {
627 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
628 i.TempRegister(0), i.TempRegister(1),
629 i.TempRegister(2));
630 }
631 if (HasRegisterInput(instr, 0)) {
632 __ AddP(ip, i.InputRegister(0),
633 Operand(Code::kHeaderSize - kHeapObjectTag));
634 __ Jump(ip);
635 } else {
636 // We cannot use the constant pool to load the target since
637 // we've already restored the caller's frame.
638 ConstantPoolUnavailableScope constant_pool_unavailable(masm());
639 __ Jump(Handle<Code>::cast(i.InputHeapObject(0)),
640 RelocInfo::CODE_TARGET);
641 }
642 frame_access_state()->ClearSPDelta();
643 break;
644 }
Ben Murdochc5610432016-08-08 18:44:38 +0100645 case kArchTailCallAddress: {
646 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
647 AssembleDeconstructActivationRecord(stack_param_delta);
648 CHECK(!instr->InputAt(0)->IsImmediate());
649 __ Jump(i.InputRegister(0));
650 frame_access_state()->ClearSPDelta();
651 break;
652 }
Ben Murdochda12d292016-06-02 14:46:10 +0100653 case kArchCallJSFunction: {
654 EnsureSpaceForLazyDeopt();
655 Register func = i.InputRegister(0);
656 if (FLAG_debug_code) {
657 // Check the function's context matches the context argument.
658 __ LoadP(kScratchReg,
659 FieldMemOperand(func, JSFunction::kContextOffset));
660 __ CmpP(cp, kScratchReg);
661 __ Assert(eq, kWrongFunctionContext);
662 }
663 __ LoadP(ip, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
664 __ Call(ip);
665 RecordCallPosition(instr);
666 frame_access_state()->ClearSPDelta();
667 break;
668 }
669 case kArchTailCallJSFunctionFromJSFunction:
670 case kArchTailCallJSFunction: {
671 Register func = i.InputRegister(0);
672 if (FLAG_debug_code) {
673 // Check the function's context matches the context argument.
674 __ LoadP(kScratchReg,
675 FieldMemOperand(func, JSFunction::kContextOffset));
676 __ CmpP(cp, kScratchReg);
677 __ Assert(eq, kWrongFunctionContext);
678 }
679 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
680 AssembleDeconstructActivationRecord(stack_param_delta);
681 if (opcode == kArchTailCallJSFunctionFromJSFunction) {
682 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
683 i.TempRegister(0), i.TempRegister(1),
684 i.TempRegister(2));
685 }
686 __ LoadP(ip, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
687 __ Jump(ip);
688 frame_access_state()->ClearSPDelta();
689 break;
690 }
691 case kArchPrepareCallCFunction: {
692 int const num_parameters = MiscField::decode(instr->opcode());
693 __ PrepareCallCFunction(num_parameters, kScratchReg);
694 // Frame alignment requires using FP-relative frame addressing.
695 frame_access_state()->SetFrameAccessToFP();
696 break;
697 }
698 case kArchPrepareTailCall:
699 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1));
700 break;
701 case kArchCallCFunction: {
702 int const num_parameters = MiscField::decode(instr->opcode());
703 if (instr->InputAt(0)->IsImmediate()) {
704 ExternalReference ref = i.InputExternalReference(0);
705 __ CallCFunction(ref, num_parameters);
706 } else {
707 Register func = i.InputRegister(0);
708 __ CallCFunction(func, num_parameters);
709 }
710 frame_access_state()->SetFrameAccessToDefault();
711 frame_access_state()->ClearSPDelta();
712 break;
713 }
714 case kArchJmp:
715 AssembleArchJump(i.InputRpo(0));
716 break;
717 case kArchLookupSwitch:
718 AssembleArchLookupSwitch(instr);
719 break;
720 case kArchTableSwitch:
721 AssembleArchTableSwitch(instr);
722 break;
723 case kArchNop:
724 case kArchThrowTerminator:
725 // don't emit code for nops.
726 break;
727 case kArchDeoptimize: {
728 int deopt_state_id =
729 BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
730 Deoptimizer::BailoutType bailout_type =
731 Deoptimizer::BailoutType(MiscField::decode(instr->opcode()));
Ben Murdochc5610432016-08-08 18:44:38 +0100732 CodeGenResult result =
733 AssembleDeoptimizerCall(deopt_state_id, bailout_type);
734 if (result != kSuccess) return result;
Ben Murdochda12d292016-06-02 14:46:10 +0100735 break;
736 }
737 case kArchRet:
738 AssembleReturn();
739 break;
740 case kArchStackPointer:
741 __ LoadRR(i.OutputRegister(), sp);
742 break;
743 case kArchFramePointer:
744 __ LoadRR(i.OutputRegister(), fp);
745 break;
746 case kArchParentFramePointer:
747 if (frame_access_state()->has_frame()) {
748 __ LoadP(i.OutputRegister(), MemOperand(fp, 0));
749 } else {
750 __ LoadRR(i.OutputRegister(), fp);
751 }
752 break;
753 case kArchTruncateDoubleToI:
754 // TODO(mbrandy): move slow call to stub out of line.
755 __ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0));
756 break;
757 case kArchStoreWithWriteBarrier: {
758 RecordWriteMode mode =
759 static_cast<RecordWriteMode>(MiscField::decode(instr->opcode()));
760 Register object = i.InputRegister(0);
761 Register value = i.InputRegister(2);
762 Register scratch0 = i.TempRegister(0);
763 Register scratch1 = i.TempRegister(1);
764 OutOfLineRecordWrite* ool;
765
766 AddressingMode addressing_mode =
767 AddressingModeField::decode(instr->opcode());
768 if (addressing_mode == kMode_MRI) {
769 int32_t offset = i.InputInt32(1);
770 ool = new (zone()) OutOfLineRecordWrite(this, object, offset, value,
771 scratch0, scratch1, mode);
772 __ StoreP(value, MemOperand(object, offset));
773 } else {
774 DCHECK_EQ(kMode_MRR, addressing_mode);
775 Register offset(i.InputRegister(1));
776 ool = new (zone()) OutOfLineRecordWrite(this, object, offset, value,
777 scratch0, scratch1, mode);
778 __ StoreP(value, MemOperand(object, offset));
779 }
780 __ CheckPageFlag(object, scratch0,
781 MemoryChunk::kPointersFromHereAreInterestingMask, ne,
782 ool->entry());
783 __ bind(ool->exit());
784 break;
785 }
786 case kArchStackSlot: {
787 FrameOffset offset =
788 frame_access_state()->GetFrameOffset(i.InputInt32(0));
789 __ AddP(i.OutputRegister(), offset.from_stack_pointer() ? sp : fp,
790 Operand(offset.offset()));
791 break;
792 }
793 case kS390_And:
794 ASSEMBLE_BINOP(AndP, AndP);
795 break;
796 case kS390_AndComplement:
797 __ NotP(i.InputRegister(1));
798 __ AndP(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
799 break;
800 case kS390_Or:
801 ASSEMBLE_BINOP(OrP, OrP);
802 break;
803 case kS390_OrComplement:
804 __ NotP(i.InputRegister(1));
805 __ OrP(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
806 break;
807 case kS390_Xor:
808 ASSEMBLE_BINOP(XorP, XorP);
809 break;
810 case kS390_ShiftLeft32:
811 if (HasRegisterInput(instr, 1)) {
812 if (i.OutputRegister().is(i.InputRegister(1)) &&
813 !CpuFeatures::IsSupported(DISTINCT_OPS)) {
814 __ LoadRR(kScratchReg, i.InputRegister(1));
815 __ ShiftLeft(i.OutputRegister(), i.InputRegister(0), kScratchReg);
816 } else {
817 ASSEMBLE_BINOP(ShiftLeft, ShiftLeft);
818 }
819 } else {
820 ASSEMBLE_BINOP(ShiftLeft, ShiftLeft);
821 }
822 __ LoadlW(i.OutputRegister(0), i.OutputRegister(0));
823 break;
824#if V8_TARGET_ARCH_S390X
825 case kS390_ShiftLeft64:
826 ASSEMBLE_BINOP(sllg, sllg);
827 break;
828#endif
829 case kS390_ShiftRight32:
830 if (HasRegisterInput(instr, 1)) {
831 if (i.OutputRegister().is(i.InputRegister(1)) &&
832 !CpuFeatures::IsSupported(DISTINCT_OPS)) {
833 __ LoadRR(kScratchReg, i.InputRegister(1));
834 __ ShiftRight(i.OutputRegister(), i.InputRegister(0), kScratchReg);
835 } else {
836 ASSEMBLE_BINOP(ShiftRight, ShiftRight);
837 }
838 } else {
839 ASSEMBLE_BINOP(ShiftRight, ShiftRight);
840 }
841 __ LoadlW(i.OutputRegister(0), i.OutputRegister(0));
842 break;
843#if V8_TARGET_ARCH_S390X
844 case kS390_ShiftRight64:
845 ASSEMBLE_BINOP(srlg, srlg);
846 break;
847#endif
848 case kS390_ShiftRightArith32:
849 if (HasRegisterInput(instr, 1)) {
850 if (i.OutputRegister().is(i.InputRegister(1)) &&
851 !CpuFeatures::IsSupported(DISTINCT_OPS)) {
852 __ LoadRR(kScratchReg, i.InputRegister(1));
853 __ ShiftRightArith(i.OutputRegister(), i.InputRegister(0),
854 kScratchReg);
855 } else {
856 ASSEMBLE_BINOP(ShiftRightArith, ShiftRightArith);
857 }
858 } else {
859 ASSEMBLE_BINOP(ShiftRightArith, ShiftRightArith);
860 }
861 __ LoadlW(i.OutputRegister(), i.OutputRegister());
862 break;
863#if V8_TARGET_ARCH_S390X
864 case kS390_ShiftRightArith64:
865 ASSEMBLE_BINOP(srag, srag);
866 break;
867#endif
868#if !V8_TARGET_ARCH_S390X
869 case kS390_AddPair:
870 // i.InputRegister(0) ... left low word.
871 // i.InputRegister(1) ... left high word.
872 // i.InputRegister(2) ... right low word.
873 // i.InputRegister(3) ... right high word.
874 __ AddLogical32(i.OutputRegister(0), i.InputRegister(0),
875 i.InputRegister(2));
876 __ AddLogicalWithCarry32(i.OutputRegister(1), i.InputRegister(1),
877 i.InputRegister(3));
878 break;
879 case kS390_SubPair:
880 // i.InputRegister(0) ... left low word.
881 // i.InputRegister(1) ... left high word.
882 // i.InputRegister(2) ... right low word.
883 // i.InputRegister(3) ... right high word.
884 __ SubLogical32(i.OutputRegister(0), i.InputRegister(0),
885 i.InputRegister(2));
886 __ SubLogicalWithBorrow32(i.OutputRegister(1), i.InputRegister(1),
887 i.InputRegister(3));
888 break;
889 case kS390_MulPair:
890 // i.InputRegister(0) ... left low word.
891 // i.InputRegister(1) ... left high word.
892 // i.InputRegister(2) ... right low word.
893 // i.InputRegister(3) ... right high word.
894 __ sllg(r0, i.InputRegister(1), Operand(32));
895 __ sllg(r1, i.InputRegister(3), Operand(32));
896 __ lr(r0, i.InputRegister(0));
897 __ lr(r1, i.InputRegister(2));
898 __ msgr(r1, r0);
899 __ lr(i.OutputRegister(0), r1);
900 __ srag(i.OutputRegister(1), r1, Operand(32));
901 break;
902 case kS390_ShiftLeftPair:
903 if (instr->InputAt(2)->IsImmediate()) {
904 __ ShiftLeftPair(i.OutputRegister(0), i.OutputRegister(1),
905 i.InputRegister(0), i.InputRegister(1),
906 i.InputInt32(2));
907 } else {
908 __ ShiftLeftPair(i.OutputRegister(0), i.OutputRegister(1),
909 i.InputRegister(0), i.InputRegister(1), kScratchReg,
910 i.InputRegister(2));
911 }
912 break;
913 case kS390_ShiftRightPair:
914 if (instr->InputAt(2)->IsImmediate()) {
915 __ ShiftRightPair(i.OutputRegister(0), i.OutputRegister(1),
916 i.InputRegister(0), i.InputRegister(1),
917 i.InputInt32(2));
918 } else {
919 __ ShiftRightPair(i.OutputRegister(0), i.OutputRegister(1),
920 i.InputRegister(0), i.InputRegister(1), kScratchReg,
921 i.InputRegister(2));
922 }
923 break;
924 case kS390_ShiftRightArithPair:
925 if (instr->InputAt(2)->IsImmediate()) {
926 __ ShiftRightArithPair(i.OutputRegister(0), i.OutputRegister(1),
927 i.InputRegister(0), i.InputRegister(1),
928 i.InputInt32(2));
929 } else {
930 __ ShiftRightArithPair(i.OutputRegister(0), i.OutputRegister(1),
931 i.InputRegister(0), i.InputRegister(1),
932 kScratchReg, i.InputRegister(2));
933 }
934 break;
935#endif
936 case kS390_RotRight32:
937 if (HasRegisterInput(instr, 1)) {
938 __ LoadComplementRR(kScratchReg, i.InputRegister(1));
939 __ rll(i.OutputRegister(), i.InputRegister(0), kScratchReg);
940 } else {
941 __ rll(i.OutputRegister(), i.InputRegister(0),
942 Operand(32 - i.InputInt32(1)));
943 }
944 break;
945#if V8_TARGET_ARCH_S390X
946 case kS390_RotRight64:
947 if (HasRegisterInput(instr, 1)) {
948 __ LoadComplementRR(kScratchReg, i.InputRegister(1));
949 __ rllg(i.OutputRegister(), i.InputRegister(0), kScratchReg);
950 } else {
951 __ rllg(i.OutputRegister(), i.InputRegister(0),
952 Operand(64 - i.InputInt32(1)));
953 }
954 break;
955#endif
956 case kS390_Not:
957 __ LoadRR(i.OutputRegister(), i.InputRegister(0));
958 __ NotP(i.OutputRegister());
959 break;
960 case kS390_RotLeftAndMask32:
961 if (CpuFeatures::IsSupported(GENERAL_INSTR_EXT)) {
962 int shiftAmount = i.InputInt32(1);
963 int endBit = 63 - i.InputInt32(3);
964 int startBit = 63 - i.InputInt32(2);
965 __ rll(i.OutputRegister(), i.InputRegister(0), Operand(shiftAmount));
966 __ risbg(i.OutputRegister(), i.OutputRegister(), Operand(startBit),
967 Operand(endBit), Operand::Zero(), true);
968 } else {
969 int shiftAmount = i.InputInt32(1);
970 int clearBitLeft = 63 - i.InputInt32(2);
971 int clearBitRight = i.InputInt32(3);
972 __ rll(i.OutputRegister(), i.InputRegister(0), Operand(shiftAmount));
973 __ sllg(i.OutputRegister(), i.OutputRegister(), Operand(clearBitLeft));
974 __ srlg(i.OutputRegister(), i.OutputRegister(),
975 Operand((clearBitLeft + clearBitRight)));
976 __ sllg(i.OutputRegister(), i.OutputRegister(), Operand(clearBitRight));
977 }
978 break;
979#if V8_TARGET_ARCH_S390X
980 case kS390_RotLeftAndClear64:
981 UNIMPLEMENTED(); // Find correct instruction
982 break;
983 case kS390_RotLeftAndClearLeft64:
984 if (CpuFeatures::IsSupported(GENERAL_INSTR_EXT)) {
985 int shiftAmount = i.InputInt32(1);
986 int endBit = 63;
987 int startBit = 63 - i.InputInt32(2);
988 __ risbg(i.OutputRegister(), i.InputRegister(0), Operand(startBit),
989 Operand(endBit), Operand(shiftAmount), true);
990 } else {
991 int shiftAmount = i.InputInt32(1);
992 int clearBit = 63 - i.InputInt32(2);
993 __ rllg(i.OutputRegister(), i.InputRegister(0), Operand(shiftAmount));
994 __ sllg(i.OutputRegister(), i.OutputRegister(), Operand(clearBit));
995 __ srlg(i.OutputRegister(), i.OutputRegister(), Operand(clearBit));
996 }
997 break;
998 case kS390_RotLeftAndClearRight64:
999 if (CpuFeatures::IsSupported(GENERAL_INSTR_EXT)) {
1000 int shiftAmount = i.InputInt32(1);
1001 int endBit = 63 - i.InputInt32(2);
1002 int startBit = 0;
1003 __ risbg(i.OutputRegister(), i.InputRegister(0), Operand(startBit),
1004 Operand(endBit), Operand(shiftAmount), true);
1005 } else {
1006 int shiftAmount = i.InputInt32(1);
1007 int clearBit = i.InputInt32(2);
1008 __ rllg(i.OutputRegister(), i.InputRegister(0), Operand(shiftAmount));
1009 __ srlg(i.OutputRegister(), i.OutputRegister(), Operand(clearBit));
1010 __ sllg(i.OutputRegister(), i.OutputRegister(), Operand(clearBit));
1011 }
1012 break;
1013#endif
1014 case kS390_Add:
1015#if V8_TARGET_ARCH_S390X
1016 if (FlagsModeField::decode(instr->opcode()) != kFlags_none) {
1017 ASSEMBLE_ADD_WITH_OVERFLOW();
1018 } else {
1019#endif
1020 ASSEMBLE_BINOP(AddP, AddP);
1021#if V8_TARGET_ARCH_S390X
1022 }
1023#endif
1024 break;
1025 case kS390_AddWithOverflow32:
1026 ASSEMBLE_ADD_WITH_OVERFLOW32();
1027 break;
1028 case kS390_AddFloat:
1029 // Ensure we don't clobber right/InputReg(1)
1030 if (i.OutputDoubleRegister().is(i.InputDoubleRegister(1))) {
1031 ASSEMBLE_FLOAT_UNOP(aebr);
1032 } else {
1033 if (!i.OutputDoubleRegister().is(i.InputDoubleRegister(0)))
1034 __ ldr(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1035 __ aebr(i.OutputDoubleRegister(), i.InputDoubleRegister(1));
1036 }
1037 break;
1038 case kS390_AddDouble:
1039 // Ensure we don't clobber right/InputReg(1)
1040 if (i.OutputDoubleRegister().is(i.InputDoubleRegister(1))) {
1041 ASSEMBLE_FLOAT_UNOP(adbr);
1042 } else {
1043 if (!i.OutputDoubleRegister().is(i.InputDoubleRegister(0)))
1044 __ ldr(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1045 __ adbr(i.OutputDoubleRegister(), i.InputDoubleRegister(1));
1046 }
1047 break;
1048 case kS390_Sub:
1049#if V8_TARGET_ARCH_S390X
1050 if (FlagsModeField::decode(instr->opcode()) != kFlags_none) {
1051 ASSEMBLE_SUB_WITH_OVERFLOW();
1052 } else {
1053#endif
1054 ASSEMBLE_BINOP(SubP, SubP);
1055#if V8_TARGET_ARCH_S390X
1056 }
1057#endif
1058 break;
1059 case kS390_SubWithOverflow32:
1060 ASSEMBLE_SUB_WITH_OVERFLOW32();
1061 break;
1062 case kS390_SubFloat:
1063 // OutputDoubleReg() = i.InputDoubleRegister(0) - i.InputDoubleRegister(1)
1064 if (i.OutputDoubleRegister().is(i.InputDoubleRegister(1))) {
1065 __ ldr(kScratchDoubleReg, i.InputDoubleRegister(1));
1066 __ ldr(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1067 __ sebr(i.OutputDoubleRegister(), kScratchDoubleReg);
1068 } else {
1069 if (!i.OutputDoubleRegister().is(i.InputDoubleRegister(0))) {
1070 __ ldr(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1071 }
1072 __ sebr(i.OutputDoubleRegister(), i.InputDoubleRegister(1));
1073 }
1074 break;
1075 case kS390_SubDouble:
1076 // OutputDoubleReg() = i.InputDoubleRegister(0) - i.InputDoubleRegister(1)
1077 if (i.OutputDoubleRegister().is(i.InputDoubleRegister(1))) {
1078 __ ldr(kScratchDoubleReg, i.InputDoubleRegister(1));
1079 __ ldr(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1080 __ sdbr(i.OutputDoubleRegister(), kScratchDoubleReg);
1081 } else {
1082 if (!i.OutputDoubleRegister().is(i.InputDoubleRegister(0))) {
1083 __ ldr(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1084 }
1085 __ sdbr(i.OutputDoubleRegister(), i.InputDoubleRegister(1));
1086 }
1087 break;
1088 case kS390_Mul32:
1089#if V8_TARGET_ARCH_S390X
1090 case kS390_Mul64:
1091#endif
1092 __ Mul(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
1093 break;
1094 case kS390_MulHigh32:
1095 __ LoadRR(r1, i.InputRegister(0));
1096 __ mr_z(r0, i.InputRegister(1));
1097 __ LoadW(i.OutputRegister(), r0);
1098 break;
1099 case kS390_MulHighU32:
1100 __ LoadRR(r1, i.InputRegister(0));
1101 __ mlr(r0, i.InputRegister(1));
1102 __ LoadlW(i.OutputRegister(), r0);
1103 break;
1104 case kS390_MulFloat:
1105 // Ensure we don't clobber right
1106 if (i.OutputDoubleRegister().is(i.InputDoubleRegister(1))) {
1107 ASSEMBLE_FLOAT_UNOP(meebr);
1108 } else {
1109 if (!i.OutputDoubleRegister().is(i.InputDoubleRegister(0)))
1110 __ ldr(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1111 __ meebr(i.OutputDoubleRegister(), i.InputDoubleRegister(1));
1112 }
1113 break;
1114 case kS390_MulDouble:
1115 // Ensure we don't clobber right
1116 if (i.OutputDoubleRegister().is(i.InputDoubleRegister(1))) {
1117 ASSEMBLE_FLOAT_UNOP(mdbr);
1118 } else {
1119 if (!i.OutputDoubleRegister().is(i.InputDoubleRegister(0)))
1120 __ ldr(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1121 __ mdbr(i.OutputDoubleRegister(), i.InputDoubleRegister(1));
1122 }
1123 break;
1124#if V8_TARGET_ARCH_S390X
1125 case kS390_Div64:
1126 __ LoadRR(r1, i.InputRegister(0));
1127 __ dsgr(r0, i.InputRegister(1)); // R1: Dividend
1128 __ ltgr(i.OutputRegister(), r1); // Copy R1: Quotient to output
1129 break;
1130#endif
1131 case kS390_Div32:
1132 __ LoadRR(r0, i.InputRegister(0));
1133 __ srda(r0, Operand(32));
1134 __ dr(r0, i.InputRegister(1));
1135 __ LoadAndTestP_ExtendSrc(i.OutputRegister(),
1136 r1); // Copy R1: Quotient to output
1137 break;
1138#if V8_TARGET_ARCH_S390X
1139 case kS390_DivU64:
1140 __ LoadRR(r1, i.InputRegister(0));
1141 __ LoadImmP(r0, Operand::Zero());
1142 __ dlgr(r0, i.InputRegister(1)); // R0:R1: Dividend
1143 __ ltgr(i.OutputRegister(), r1); // Copy R1: Quotient to output
1144 break;
1145#endif
1146 case kS390_DivU32:
1147 __ LoadRR(r0, i.InputRegister(0));
1148 __ srdl(r0, Operand(32));
1149 __ dlr(r0, i.InputRegister(1)); // R0:R1: Dividend
1150 __ LoadlW(i.OutputRegister(), r1); // Copy R1: Quotient to output
1151 __ LoadAndTestP_ExtendSrc(r1, r1);
1152 break;
1153
1154 case kS390_DivFloat:
1155 // InputDoubleRegister(1)=InputDoubleRegister(0)/InputDoubleRegister(1)
1156 if (i.OutputDoubleRegister().is(i.InputDoubleRegister(1))) {
1157 __ ldr(kScratchDoubleReg, i.InputDoubleRegister(1));
1158 __ ldr(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1159 __ debr(i.OutputDoubleRegister(), kScratchDoubleReg);
1160 } else {
1161 if (!i.OutputDoubleRegister().is(i.InputDoubleRegister(0)))
1162 __ ldr(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1163 __ debr(i.OutputDoubleRegister(), i.InputDoubleRegister(1));
1164 }
1165 break;
1166 case kS390_DivDouble:
1167 // InputDoubleRegister(1)=InputDoubleRegister(0)/InputDoubleRegister(1)
1168 if (i.OutputDoubleRegister().is(i.InputDoubleRegister(1))) {
1169 __ ldr(kScratchDoubleReg, i.InputDoubleRegister(1));
1170 __ ldr(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1171 __ ddbr(i.OutputDoubleRegister(), kScratchDoubleReg);
1172 } else {
1173 if (!i.OutputDoubleRegister().is(i.InputDoubleRegister(0)))
1174 __ ldr(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1175 __ ddbr(i.OutputDoubleRegister(), i.InputDoubleRegister(1));
1176 }
1177 break;
1178 case kS390_Mod32:
1179 ASSEMBLE_MODULO(dr, srda);
1180 break;
1181 case kS390_ModU32:
1182 ASSEMBLE_MODULO(dlr, srdl);
1183 break;
1184#if V8_TARGET_ARCH_S390X
1185 case kS390_Mod64:
1186 __ LoadRR(r1, i.InputRegister(0));
1187 __ dsgr(r0, i.InputRegister(1)); // R1: Dividend
1188 __ ltgr(i.OutputRegister(), r0); // Copy R0: Remainder to output
1189 break;
1190 case kS390_ModU64:
1191 __ LoadRR(r1, i.InputRegister(0));
1192 __ LoadImmP(r0, Operand::Zero());
1193 __ dlgr(r0, i.InputRegister(1)); // R0:R1: Dividend
1194 __ ltgr(i.OutputRegister(), r0); // Copy R0: Remainder to output
1195 break;
1196#endif
1197 case kS390_AbsFloat:
1198 __ lpebr(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1199 break;
1200 case kS390_SqrtFloat:
1201 ASSEMBLE_FLOAT_UNOP(sqebr);
1202 break;
1203 case kS390_FloorFloat:
1204 __ fiebra(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1205 v8::internal::Assembler::FIDBRA_ROUND_TOWARD_NEG_INF);
1206 break;
1207 case kS390_CeilFloat:
1208 __ fiebra(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1209 v8::internal::Assembler::FIDBRA_ROUND_TOWARD_POS_INF);
1210 break;
1211 case kS390_TruncateFloat:
1212 __ fiebra(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1213 v8::internal::Assembler::FIDBRA_ROUND_TOWARD_0);
1214 break;
1215 // Double operations
1216 case kS390_ModDouble:
1217 ASSEMBLE_FLOAT_MODULO();
1218 break;
1219 case kS390_Neg:
1220 __ LoadComplementRR(i.OutputRegister(), i.InputRegister(0));
1221 break;
1222 case kS390_MaxDouble:
1223 ASSEMBLE_FLOAT_MAX(kScratchDoubleReg, kScratchReg);
1224 break;
1225 case kS390_MinDouble:
1226 ASSEMBLE_FLOAT_MIN(kScratchDoubleReg, kScratchReg);
1227 break;
1228 case kS390_AbsDouble:
1229 __ lpdbr(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1230 break;
1231 case kS390_SqrtDouble:
1232 ASSEMBLE_FLOAT_UNOP(sqdbr);
1233 break;
1234 case kS390_FloorDouble:
1235 __ fidbra(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1236 v8::internal::Assembler::FIDBRA_ROUND_TOWARD_NEG_INF);
1237 break;
1238 case kS390_CeilDouble:
1239 __ fidbra(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1240 v8::internal::Assembler::FIDBRA_ROUND_TOWARD_POS_INF);
1241 break;
1242 case kS390_TruncateDouble:
1243 __ fidbra(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1244 v8::internal::Assembler::FIDBRA_ROUND_TOWARD_0);
1245 break;
1246 case kS390_RoundDouble:
1247 __ fidbra(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1248 v8::internal::Assembler::FIDBRA_ROUND_TO_NEAREST_AWAY_FROM_0);
1249 break;
1250 case kS390_NegDouble:
1251 ASSEMBLE_FLOAT_UNOP(lcdbr);
1252 break;
1253 case kS390_Cntlz32: {
1254 __ llgfr(i.OutputRegister(), i.InputRegister(0));
1255 __ flogr(r0, i.OutputRegister());
1256 __ LoadRR(i.OutputRegister(), r0);
1257 __ SubP(i.OutputRegister(), Operand(32));
1258 } break;
1259#if V8_TARGET_ARCH_S390X
1260 case kS390_Cntlz64: {
1261 __ flogr(r0, i.InputRegister(0));
1262 __ LoadRR(i.OutputRegister(), r0);
1263 } break;
1264#endif
1265 case kS390_Popcnt32:
1266 __ Popcnt32(i.OutputRegister(), i.InputRegister(0));
1267 break;
1268#if V8_TARGET_ARCH_S390X
1269 case kS390_Popcnt64:
1270 __ Popcnt64(i.OutputRegister(), i.InputRegister(0));
1271 break;
1272#endif
1273 case kS390_Cmp32:
1274 ASSEMBLE_COMPARE(Cmp32, CmpLogical32);
1275 break;
1276#if V8_TARGET_ARCH_S390X
1277 case kS390_Cmp64:
1278 ASSEMBLE_COMPARE(CmpP, CmpLogicalP);
1279 break;
1280#endif
1281 case kS390_CmpFloat:
1282 __ cebr(i.InputDoubleRegister(0), i.InputDoubleRegister(1));
1283 break;
1284 case kS390_CmpDouble:
1285 __ cdbr(i.InputDoubleRegister(0), i.InputDoubleRegister(1));
1286 break;
1287 case kS390_Tst32:
1288 if (HasRegisterInput(instr, 1)) {
1289 __ AndP(r0, i.InputRegister(0), i.InputRegister(1));
1290 } else {
1291 __ AndP(r0, i.InputRegister(0), i.InputImmediate(1));
1292 }
1293 __ LoadAndTestP_ExtendSrc(r0, r0);
1294 break;
1295#if V8_TARGET_ARCH_S390X
1296 case kS390_Tst64:
1297 if (HasRegisterInput(instr, 1)) {
1298 __ AndP(r0, i.InputRegister(0), i.InputRegister(1));
1299 } else {
1300 __ AndP(r0, i.InputRegister(0), i.InputImmediate(1));
1301 }
1302 break;
1303#endif
1304 case kS390_Push:
Ben Murdochc5610432016-08-08 18:44:38 +01001305 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdochda12d292016-06-02 14:46:10 +01001306 __ lay(sp, MemOperand(sp, -kDoubleSize));
Ben Murdoch6f5d0ea2016-06-15 14:45:46 +01001307 __ StoreDouble(i.InputDoubleRegister(0), MemOperand(sp));
Ben Murdochda12d292016-06-02 14:46:10 +01001308 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize);
1309 } else {
1310 __ Push(i.InputRegister(0));
1311 frame_access_state()->IncreaseSPDelta(1);
1312 }
1313 break;
1314 case kS390_PushFrame: {
1315 int num_slots = i.InputInt32(1);
Ben Murdoch6f5d0ea2016-06-15 14:45:46 +01001316 __ lay(sp, MemOperand(sp, -num_slots * kPointerSize));
Ben Murdochc5610432016-08-08 18:44:38 +01001317 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdochda12d292016-06-02 14:46:10 +01001318 __ StoreDouble(i.InputDoubleRegister(0),
Ben Murdoch6f5d0ea2016-06-15 14:45:46 +01001319 MemOperand(sp));
Ben Murdochda12d292016-06-02 14:46:10 +01001320 } else {
1321 __ StoreP(i.InputRegister(0),
Ben Murdoch6f5d0ea2016-06-15 14:45:46 +01001322 MemOperand(sp));
Ben Murdochda12d292016-06-02 14:46:10 +01001323 }
Ben Murdochda12d292016-06-02 14:46:10 +01001324 break;
1325 }
1326 case kS390_StoreToStackSlot: {
1327 int slot = i.InputInt32(1);
Ben Murdochc5610432016-08-08 18:44:38 +01001328 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdochda12d292016-06-02 14:46:10 +01001329 __ StoreDouble(i.InputDoubleRegister(0),
1330 MemOperand(sp, slot * kPointerSize));
1331 } else {
1332 __ StoreP(i.InputRegister(0), MemOperand(sp, slot * kPointerSize));
1333 }
1334 break;
1335 }
1336 case kS390_ExtendSignWord8:
1337#if V8_TARGET_ARCH_S390X
1338 __ lgbr(i.OutputRegister(), i.InputRegister(0));
1339#else
1340 __ lbr(i.OutputRegister(), i.InputRegister(0));
1341#endif
1342 break;
1343 case kS390_ExtendSignWord16:
1344#if V8_TARGET_ARCH_S390X
1345 __ lghr(i.OutputRegister(), i.InputRegister(0));
1346#else
1347 __ lhr(i.OutputRegister(), i.InputRegister(0));
1348#endif
1349 break;
1350#if V8_TARGET_ARCH_S390X
1351 case kS390_ExtendSignWord32:
1352 __ lgfr(i.OutputRegister(), i.InputRegister(0));
1353 break;
1354 case kS390_Uint32ToUint64:
1355 // Zero extend
1356 __ llgfr(i.OutputRegister(), i.InputRegister(0));
1357 break;
1358 case kS390_Int64ToInt32:
1359 // sign extend
1360 __ lgfr(i.OutputRegister(), i.InputRegister(0));
1361 break;
1362 case kS390_Int64ToFloat32:
1363 __ ConvertInt64ToFloat(i.InputRegister(0), i.OutputDoubleRegister());
1364 break;
1365 case kS390_Int64ToDouble:
1366 __ ConvertInt64ToDouble(i.InputRegister(0), i.OutputDoubleRegister());
1367 break;
1368 case kS390_Uint64ToFloat32:
1369 __ ConvertUnsignedInt64ToFloat(i.InputRegister(0),
1370 i.OutputDoubleRegister());
1371 break;
1372 case kS390_Uint64ToDouble:
1373 __ ConvertUnsignedInt64ToDouble(i.InputRegister(0),
1374 i.OutputDoubleRegister());
1375 break;
1376#endif
1377 case kS390_Int32ToFloat32:
1378 __ ConvertIntToFloat(i.InputRegister(0), i.OutputDoubleRegister());
1379 break;
1380 case kS390_Int32ToDouble:
1381 __ ConvertIntToDouble(i.InputRegister(0), i.OutputDoubleRegister());
1382 break;
1383 case kS390_Uint32ToFloat32:
1384 __ ConvertUnsignedIntToFloat(i.InputRegister(0),
1385 i.OutputDoubleRegister());
1386 break;
1387 case kS390_Uint32ToDouble:
1388 __ ConvertUnsignedIntToDouble(i.InputRegister(0),
1389 i.OutputDoubleRegister());
1390 break;
1391 case kS390_DoubleToInt32:
1392 case kS390_DoubleToUint32:
1393 case kS390_DoubleToInt64: {
1394#if V8_TARGET_ARCH_S390X
1395 bool check_conversion =
1396 (opcode == kS390_DoubleToInt64 && i.OutputCount() > 1);
1397#endif
1398 __ ConvertDoubleToInt64(i.InputDoubleRegister(0),
1399#if !V8_TARGET_ARCH_S390X
1400 kScratchReg,
1401#endif
1402 i.OutputRegister(0), kScratchDoubleReg);
1403#if V8_TARGET_ARCH_S390X
1404 if (check_conversion) {
1405 Label conversion_done;
1406 __ LoadImmP(i.OutputRegister(1), Operand::Zero());
1407 __ b(Condition(1), &conversion_done); // special case
1408 __ LoadImmP(i.OutputRegister(1), Operand(1));
1409 __ bind(&conversion_done);
1410 }
1411#endif
1412 break;
1413 }
1414 case kS390_Float32ToInt32: {
1415 bool check_conversion = (i.OutputCount() > 1);
1416 __ ConvertFloat32ToInt32(i.InputDoubleRegister(0), i.OutputRegister(0),
1417 kScratchDoubleReg);
1418 if (check_conversion) {
1419 Label conversion_done;
1420 __ LoadImmP(i.OutputRegister(1), Operand::Zero());
1421 __ b(Condition(1), &conversion_done); // special case
1422 __ LoadImmP(i.OutputRegister(1), Operand(1));
1423 __ bind(&conversion_done);
1424 }
1425 break;
1426 }
1427 case kS390_Float32ToUint32: {
1428 bool check_conversion = (i.OutputCount() > 1);
1429 __ ConvertFloat32ToUnsignedInt32(i.InputDoubleRegister(0),
1430 i.OutputRegister(0), kScratchDoubleReg);
1431 if (check_conversion) {
1432 Label conversion_done;
1433 __ LoadImmP(i.OutputRegister(1), Operand::Zero());
1434 __ b(Condition(1), &conversion_done); // special case
1435 __ LoadImmP(i.OutputRegister(1), Operand(1));
1436 __ bind(&conversion_done);
1437 }
1438 break;
1439 }
1440#if V8_TARGET_ARCH_S390X
1441 case kS390_Float32ToUint64: {
1442 bool check_conversion = (i.OutputCount() > 1);
1443 __ ConvertFloat32ToUnsignedInt64(i.InputDoubleRegister(0),
1444 i.OutputRegister(0), kScratchDoubleReg);
1445 if (check_conversion) {
1446 Label conversion_done;
1447 __ LoadImmP(i.OutputRegister(1), Operand::Zero());
1448 __ b(Condition(1), &conversion_done); // special case
1449 __ LoadImmP(i.OutputRegister(1), Operand(1));
1450 __ bind(&conversion_done);
1451 }
1452 break;
1453 }
1454#endif
1455 case kS390_Float32ToInt64: {
1456#if V8_TARGET_ARCH_S390X
1457 bool check_conversion =
1458 (opcode == kS390_Float32ToInt64 && i.OutputCount() > 1);
1459#endif
1460 __ ConvertFloat32ToInt64(i.InputDoubleRegister(0),
1461#if !V8_TARGET_ARCH_S390X
1462 kScratchReg,
1463#endif
1464 i.OutputRegister(0), kScratchDoubleReg);
1465#if V8_TARGET_ARCH_S390X
1466 if (check_conversion) {
1467 Label conversion_done;
1468 __ LoadImmP(i.OutputRegister(1), Operand::Zero());
1469 __ b(Condition(1), &conversion_done); // special case
1470 __ LoadImmP(i.OutputRegister(1), Operand(1));
1471 __ bind(&conversion_done);
1472 }
1473#endif
1474 break;
1475 }
1476#if V8_TARGET_ARCH_S390X
1477 case kS390_DoubleToUint64: {
1478 bool check_conversion = (i.OutputCount() > 1);
1479 __ ConvertDoubleToUnsignedInt64(i.InputDoubleRegister(0),
1480 i.OutputRegister(0), kScratchDoubleReg);
1481 if (check_conversion) {
1482 Label conversion_done;
1483 __ LoadImmP(i.OutputRegister(1), Operand::Zero());
1484 __ b(Condition(1), &conversion_done); // special case
1485 __ LoadImmP(i.OutputRegister(1), Operand(1));
1486 __ bind(&conversion_done);
1487 }
1488 break;
1489 }
1490#endif
1491 case kS390_DoubleToFloat32:
1492 __ ledbr(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1493 break;
1494 case kS390_Float32ToDouble:
1495 __ ldebr(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1496 break;
1497 case kS390_DoubleExtractLowWord32:
Ben Murdoch6f5d0ea2016-06-15 14:45:46 +01001498 __ lgdr(i.OutputRegister(), i.InputDoubleRegister(0));
1499 __ llgfr(i.OutputRegister(), i.OutputRegister());
Ben Murdochda12d292016-06-02 14:46:10 +01001500 break;
1501 case kS390_DoubleExtractHighWord32:
Ben Murdoch6f5d0ea2016-06-15 14:45:46 +01001502 __ lgdr(i.OutputRegister(), i.InputDoubleRegister(0));
1503 __ srlg(i.OutputRegister(), i.OutputRegister(), Operand(32));
Ben Murdochda12d292016-06-02 14:46:10 +01001504 break;
1505 case kS390_DoubleInsertLowWord32:
Ben Murdoch6f5d0ea2016-06-15 14:45:46 +01001506 __ lgdr(kScratchReg, i.OutputDoubleRegister());
1507 __ lr(kScratchReg, i.InputRegister(1));
1508 __ ldgr(i.OutputDoubleRegister(), kScratchReg);
Ben Murdochda12d292016-06-02 14:46:10 +01001509 break;
1510 case kS390_DoubleInsertHighWord32:
Ben Murdoch6f5d0ea2016-06-15 14:45:46 +01001511 __ sllg(kScratchReg, i.InputRegister(1), Operand(32));
1512 __ lgdr(r0, i.OutputDoubleRegister());
1513 __ lr(kScratchReg, r0);
1514 __ ldgr(i.OutputDoubleRegister(), kScratchReg);
Ben Murdochda12d292016-06-02 14:46:10 +01001515 break;
1516 case kS390_DoubleConstruct:
Ben Murdoch6f5d0ea2016-06-15 14:45:46 +01001517 __ sllg(kScratchReg, i.InputRegister(0), Operand(32));
1518 __ lr(kScratchReg, i.InputRegister(1));
1519
1520 // Bitwise convert from GPR to FPR
1521 __ ldgr(i.OutputDoubleRegister(), kScratchReg);
Ben Murdochda12d292016-06-02 14:46:10 +01001522 break;
1523 case kS390_LoadWordS8:
1524 ASSEMBLE_LOAD_INTEGER(LoadlB);
1525#if V8_TARGET_ARCH_S390X
1526 __ lgbr(i.OutputRegister(), i.OutputRegister());
1527#else
1528 __ lbr(i.OutputRegister(), i.OutputRegister());
1529#endif
1530 break;
1531 case kS390_BitcastFloat32ToInt32:
1532 __ MovFloatToInt(i.OutputRegister(), i.InputDoubleRegister(0));
1533 break;
1534 case kS390_BitcastInt32ToFloat32:
1535 __ MovIntToFloat(i.OutputDoubleRegister(), i.InputRegister(0));
1536 break;
1537#if V8_TARGET_ARCH_S390X
1538 case kS390_BitcastDoubleToInt64:
1539 __ MovDoubleToInt64(i.OutputRegister(), i.InputDoubleRegister(0));
1540 break;
1541 case kS390_BitcastInt64ToDouble:
1542 __ MovInt64ToDouble(i.OutputDoubleRegister(), i.InputRegister(0));
1543 break;
1544#endif
1545 case kS390_LoadWordU8:
1546 ASSEMBLE_LOAD_INTEGER(LoadlB);
1547 break;
1548 case kS390_LoadWordU16:
1549 ASSEMBLE_LOAD_INTEGER(LoadLogicalHalfWordP);
1550 break;
1551 case kS390_LoadWordS16:
1552 ASSEMBLE_LOAD_INTEGER(LoadHalfWordP);
1553 break;
Ben Murdochc5610432016-08-08 18:44:38 +01001554 case kS390_LoadWordU32:
1555 ASSEMBLE_LOAD_INTEGER(LoadlW);
1556 break;
Ben Murdochda12d292016-06-02 14:46:10 +01001557 case kS390_LoadWordS32:
1558 ASSEMBLE_LOAD_INTEGER(LoadW);
1559 break;
1560#if V8_TARGET_ARCH_S390X
1561 case kS390_LoadWord64:
1562 ASSEMBLE_LOAD_INTEGER(lg);
1563 break;
1564#endif
1565 case kS390_LoadFloat32:
1566 ASSEMBLE_LOAD_FLOAT(LoadFloat32);
1567 break;
1568 case kS390_LoadDouble:
1569 ASSEMBLE_LOAD_FLOAT(LoadDouble);
1570 break;
1571 case kS390_StoreWord8:
1572 ASSEMBLE_STORE_INTEGER(StoreByte);
1573 break;
1574 case kS390_StoreWord16:
1575 ASSEMBLE_STORE_INTEGER(StoreHalfWord);
1576 break;
1577 case kS390_StoreWord32:
1578 ASSEMBLE_STORE_INTEGER(StoreW);
1579 break;
1580#if V8_TARGET_ARCH_S390X
1581 case kS390_StoreWord64:
1582 ASSEMBLE_STORE_INTEGER(StoreP);
1583 break;
1584#endif
1585 case kS390_StoreFloat32:
1586 ASSEMBLE_STORE_FLOAT32();
1587 break;
1588 case kS390_StoreDouble:
1589 ASSEMBLE_STORE_DOUBLE();
1590 break;
1591 case kCheckedLoadInt8:
1592 ASSEMBLE_CHECKED_LOAD_INTEGER(LoadlB);
1593#if V8_TARGET_ARCH_S390X
1594 __ lgbr(i.OutputRegister(), i.OutputRegister());
1595#else
1596 __ lbr(i.OutputRegister(), i.OutputRegister());
1597#endif
1598 break;
1599 case kCheckedLoadUint8:
1600 ASSEMBLE_CHECKED_LOAD_INTEGER(LoadlB);
1601 break;
1602 case kCheckedLoadInt16:
1603 ASSEMBLE_CHECKED_LOAD_INTEGER(LoadHalfWordP);
1604 break;
1605 case kCheckedLoadUint16:
1606 ASSEMBLE_CHECKED_LOAD_INTEGER(LoadLogicalHalfWordP);
1607 break;
1608 case kCheckedLoadWord32:
Ben Murdochc5610432016-08-08 18:44:38 +01001609 ASSEMBLE_CHECKED_LOAD_INTEGER(LoadlW);
Ben Murdochda12d292016-06-02 14:46:10 +01001610 break;
1611 case kCheckedLoadWord64:
1612#if V8_TARGET_ARCH_S390X
1613 ASSEMBLE_CHECKED_LOAD_INTEGER(LoadP);
1614#else
1615 UNREACHABLE();
1616#endif
1617 break;
1618 case kCheckedLoadFloat32:
1619 ASSEMBLE_CHECKED_LOAD_FLOAT(LoadFloat32, 32);
1620 break;
1621 case kCheckedLoadFloat64:
1622 ASSEMBLE_CHECKED_LOAD_FLOAT(LoadDouble, 64);
1623 break;
1624 case kCheckedStoreWord8:
1625 ASSEMBLE_CHECKED_STORE_INTEGER(StoreByte);
1626 break;
1627 case kCheckedStoreWord16:
1628 ASSEMBLE_CHECKED_STORE_INTEGER(StoreHalfWord);
1629 break;
1630 case kCheckedStoreWord32:
1631 ASSEMBLE_CHECKED_STORE_INTEGER(StoreW);
1632 break;
1633 case kCheckedStoreWord64:
1634#if V8_TARGET_ARCH_S390X
1635 ASSEMBLE_CHECKED_STORE_INTEGER(StoreP);
1636#else
1637 UNREACHABLE();
1638#endif
1639 break;
1640 case kCheckedStoreFloat32:
1641 ASSEMBLE_CHECKED_STORE_FLOAT32();
1642 break;
1643 case kCheckedStoreFloat64:
1644 ASSEMBLE_CHECKED_STORE_DOUBLE();
1645 break;
Ben Murdochc5610432016-08-08 18:44:38 +01001646 case kAtomicLoadInt8:
1647 __ LoadB(i.OutputRegister(), i.MemoryOperand());
1648 break;
1649 case kAtomicLoadUint8:
1650 __ LoadlB(i.OutputRegister(), i.MemoryOperand());
1651 break;
1652 case kAtomicLoadInt16:
1653 __ LoadHalfWordP(i.OutputRegister(), i.MemoryOperand());
1654 break;
1655 case kAtomicLoadUint16:
1656 __ LoadLogicalHalfWordP(i.OutputRegister(), i.MemoryOperand());
1657 break;
1658 case kAtomicLoadWord32:
1659 __ LoadlW(i.OutputRegister(), i.MemoryOperand());
1660 break;
1661 case kAtomicStoreWord8:
1662 __ StoreByte(i.InputRegister(0), i.MemoryOperand(NULL, 1));
1663 break;
1664 case kAtomicStoreWord16:
1665 __ StoreHalfWord(i.InputRegister(0), i.MemoryOperand(NULL, 1));
1666 break;
1667 case kAtomicStoreWord32:
1668 __ StoreW(i.InputRegister(0), i.MemoryOperand(NULL, 1));
1669 break;
Ben Murdochda12d292016-06-02 14:46:10 +01001670 default:
1671 UNREACHABLE();
1672 break;
1673 }
Ben Murdochc5610432016-08-08 18:44:38 +01001674 return kSuccess;
Ben Murdochda12d292016-06-02 14:46:10 +01001675} // NOLINT(readability/fn_size)
1676
1677// Assembles branches after an instruction.
1678void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
1679 S390OperandConverter i(this, instr);
1680 Label* tlabel = branch->true_label;
1681 Label* flabel = branch->false_label;
1682 ArchOpcode op = instr->arch_opcode();
1683 FlagsCondition condition = branch->condition;
1684
1685 Condition cond = FlagsConditionToCondition(condition, op);
1686 if (op == kS390_CmpDouble) {
1687 // check for unordered if necessary
1688 // Branching to flabel/tlabel according to what's expected by tests
1689 if (cond == le || cond == eq || cond == lt) {
1690 __ bunordered(flabel);
1691 } else if (cond == gt || cond == ne || cond == ge) {
1692 __ bunordered(tlabel);
1693 }
1694 }
1695 __ b(cond, tlabel);
1696 if (!branch->fallthru) __ b(flabel); // no fallthru to flabel.
1697}
1698
1699void CodeGenerator::AssembleArchJump(RpoNumber target) {
1700 if (!IsNextInAssemblyOrder(target)) __ b(GetLabel(target));
1701}
1702
1703// Assembles boolean materializations after an instruction.
1704void CodeGenerator::AssembleArchBoolean(Instruction* instr,
1705 FlagsCondition condition) {
1706 S390OperandConverter i(this, instr);
1707 Label done;
1708 ArchOpcode op = instr->arch_opcode();
1709 bool check_unordered = (op == kS390_CmpDouble || kS390_CmpFloat);
1710
1711 // Overflow checked for add/sub only.
1712 DCHECK((condition != kOverflow && condition != kNotOverflow) ||
1713 (op == kS390_AddWithOverflow32 || op == kS390_SubWithOverflow32) ||
1714 (op == kS390_Add || op == kS390_Sub));
1715
1716 // Materialize a full 32-bit 1 or 0 value. The result register is always the
1717 // last output of the instruction.
1718 DCHECK_NE(0u, instr->OutputCount());
1719 Register reg = i.OutputRegister(instr->OutputCount() - 1);
1720 Condition cond = FlagsConditionToCondition(condition, op);
1721 switch (cond) {
1722 case ne:
1723 case ge:
1724 case gt:
1725 if (check_unordered) {
1726 __ LoadImmP(reg, Operand(1));
1727 __ LoadImmP(kScratchReg, Operand::Zero());
1728 __ bunordered(&done);
1729 Label cond_true;
1730 __ b(cond, &cond_true, Label::kNear);
1731 __ LoadRR(reg, kScratchReg);
1732 __ bind(&cond_true);
1733 } else {
1734 Label cond_true, done_here;
1735 __ LoadImmP(reg, Operand(1));
1736 __ b(cond, &cond_true, Label::kNear);
1737 __ LoadImmP(reg, Operand::Zero());
1738 __ bind(&cond_true);
1739 }
1740 break;
1741 case eq:
1742 case lt:
1743 case le:
1744 if (check_unordered) {
1745 __ LoadImmP(reg, Operand::Zero());
1746 __ LoadImmP(kScratchReg, Operand(1));
1747 __ bunordered(&done);
1748 Label cond_false;
1749 __ b(NegateCondition(cond), &cond_false, Label::kNear);
1750 __ LoadRR(reg, kScratchReg);
1751 __ bind(&cond_false);
1752 } else {
1753 __ LoadImmP(reg, Operand::Zero());
1754 Label cond_false;
1755 __ b(NegateCondition(cond), &cond_false, Label::kNear);
1756 __ LoadImmP(reg, Operand(1));
1757 __ bind(&cond_false);
1758 }
1759 break;
1760 default:
1761 UNREACHABLE();
1762 break;
1763 }
1764 __ bind(&done);
1765}
1766
1767void CodeGenerator::AssembleArchLookupSwitch(Instruction* instr) {
1768 S390OperandConverter i(this, instr);
1769 Register input = i.InputRegister(0);
1770 for (size_t index = 2; index < instr->InputCount(); index += 2) {
Ben Murdochc5610432016-08-08 18:44:38 +01001771 __ Cmp32(input, Operand(i.InputInt32(index + 0)));
Ben Murdochda12d292016-06-02 14:46:10 +01001772 __ beq(GetLabel(i.InputRpo(index + 1)));
1773 }
1774 AssembleArchJump(i.InputRpo(1));
1775}
1776
1777void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
1778 S390OperandConverter i(this, instr);
1779 Register input = i.InputRegister(0);
1780 int32_t const case_count = static_cast<int32_t>(instr->InputCount() - 2);
1781 Label** cases = zone()->NewArray<Label*>(case_count);
1782 for (int32_t index = 0; index < case_count; ++index) {
1783 cases[index] = GetLabel(i.InputRpo(index + 2));
1784 }
1785 Label* const table = AddJumpTable(cases, case_count);
1786 __ CmpLogicalP(input, Operand(case_count));
1787 __ bge(GetLabel(i.InputRpo(1)));
1788 __ larl(kScratchReg, table);
1789 __ ShiftLeftP(r1, input, Operand(kPointerSizeLog2));
1790 __ LoadP(kScratchReg, MemOperand(kScratchReg, r1));
1791 __ Jump(kScratchReg);
1792}
1793
Ben Murdochc5610432016-08-08 18:44:38 +01001794CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
Ben Murdochda12d292016-06-02 14:46:10 +01001795 int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
1796 Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
1797 isolate(), deoptimization_id, bailout_type);
1798 // TODO(turbofan): We should be able to generate better code by sharing the
1799 // actual final call site and just bl'ing to it here, similar to what we do
1800 // in the lithium backend.
Ben Murdochc5610432016-08-08 18:44:38 +01001801 if (deopt_entry == nullptr) return kTooManyDeoptimizationBailouts;
Ben Murdochda12d292016-06-02 14:46:10 +01001802 __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
Ben Murdochc5610432016-08-08 18:44:38 +01001803 return kSuccess;
Ben Murdochda12d292016-06-02 14:46:10 +01001804}
1805
Ben Murdochc5610432016-08-08 18:44:38 +01001806void CodeGenerator::FinishFrame(Frame* frame) {
1807 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
1808 const RegList double_saves = descriptor->CalleeSavedFPRegisters();
1809
1810 // Save callee-saved Double registers.
1811 if (double_saves != 0) {
1812 frame->AlignSavedCalleeRegisterSlots();
1813 DCHECK(kNumCalleeSavedDoubles ==
1814 base::bits::CountPopulation32(double_saves));
1815 frame->AllocateSavedCalleeRegisterSlots(kNumCalleeSavedDoubles *
1816 (kDoubleSize / kPointerSize));
1817 }
1818 // Save callee-saved registers.
1819 const RegList saves = descriptor->CalleeSavedRegisters();
1820 if (saves != 0) {
1821 // register save area does not include the fp or constant pool pointer.
1822 const int num_saves = kNumCalleeSaved - 1;
1823 DCHECK(num_saves == base::bits::CountPopulation32(saves));
1824 frame->AllocateSavedCalleeRegisterSlots(num_saves);
1825 }
1826}
1827
1828void CodeGenerator::AssembleConstructFrame() {
Ben Murdochda12d292016-06-02 14:46:10 +01001829 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
1830
1831 if (frame_access_state()->has_frame()) {
1832 if (descriptor->IsCFunctionCall()) {
1833 __ Push(r14, fp);
1834 __ LoadRR(fp, sp);
1835 } else if (descriptor->IsJSFunctionCall()) {
1836 __ Prologue(this->info()->GeneratePreagedPrologue(), ip);
1837 } else {
1838 StackFrame::Type type = info()->GetOutputStackFrameType();
1839 // TODO(mbrandy): Detect cases where ip is the entrypoint (for
1840 // efficient intialization of the constant pool pointer register).
1841 __ StubPrologue(type);
1842 }
1843 }
1844
Ben Murdochc5610432016-08-08 18:44:38 +01001845 int shrink_slots = frame()->GetSpillSlotCount();
Ben Murdochda12d292016-06-02 14:46:10 +01001846 if (info()->is_osr()) {
1847 // TurboFan OSR-compiled functions cannot be entered directly.
1848 __ Abort(kShouldNotDirectlyEnterOsrFunction);
1849
1850 // Unoptimized code jumps directly to this entrypoint while the unoptimized
1851 // frame is still on the stack. Optimized code uses OSR values directly from
1852 // the unoptimized frame. Thus, all that needs to be done is to allocate the
1853 // remaining stack slots.
1854 if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --");
1855 osr_pc_offset_ = __ pc_offset();
Ben Murdochc5610432016-08-08 18:44:38 +01001856 shrink_slots -= OsrHelper(info()).UnoptimizedFrameSlots();
Ben Murdochda12d292016-06-02 14:46:10 +01001857 }
1858
1859 const RegList double_saves = descriptor->CalleeSavedFPRegisters();
Ben Murdochc5610432016-08-08 18:44:38 +01001860 if (shrink_slots > 0) {
1861 __ lay(sp, MemOperand(sp, -shrink_slots * kPointerSize));
Ben Murdochda12d292016-06-02 14:46:10 +01001862 }
1863
1864 // Save callee-saved Double registers.
1865 if (double_saves != 0) {
1866 __ MultiPushDoubles(double_saves);
1867 DCHECK(kNumCalleeSavedDoubles ==
1868 base::bits::CountPopulation32(double_saves));
Ben Murdochda12d292016-06-02 14:46:10 +01001869 }
1870
1871 // Save callee-saved registers.
1872 const RegList saves = descriptor->CalleeSavedRegisters();
1873 if (saves != 0) {
1874 __ MultiPush(saves);
1875 // register save area does not include the fp or constant pool pointer.
Ben Murdochda12d292016-06-02 14:46:10 +01001876 }
1877}
1878
1879void CodeGenerator::AssembleReturn() {
1880 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
1881 int pop_count = static_cast<int>(descriptor->StackParameterCount());
1882
1883 // Restore registers.
1884 const RegList saves = descriptor->CalleeSavedRegisters();
1885 if (saves != 0) {
1886 __ MultiPop(saves);
1887 }
1888
1889 // Restore double registers.
1890 const RegList double_saves = descriptor->CalleeSavedFPRegisters();
1891 if (double_saves != 0) {
1892 __ MultiPopDoubles(double_saves);
1893 }
1894
1895 if (descriptor->IsCFunctionCall()) {
1896 AssembleDeconstructFrame();
1897 } else if (frame_access_state()->has_frame()) {
1898 // Canonicalize JSFunction return sites for now.
1899 if (return_label_.is_bound()) {
1900 __ b(&return_label_);
1901 return;
1902 } else {
1903 __ bind(&return_label_);
1904 AssembleDeconstructFrame();
1905 }
1906 }
1907 __ Ret(pop_count);
1908}
1909
1910void CodeGenerator::AssembleMove(InstructionOperand* source,
1911 InstructionOperand* destination) {
1912 S390OperandConverter g(this, nullptr);
1913 // Dispatch on the source and destination operand kinds. Not all
1914 // combinations are possible.
1915 if (source->IsRegister()) {
1916 DCHECK(destination->IsRegister() || destination->IsStackSlot());
1917 Register src = g.ToRegister(source);
1918 if (destination->IsRegister()) {
1919 __ Move(g.ToRegister(destination), src);
1920 } else {
1921 __ StoreP(src, g.ToMemOperand(destination));
1922 }
1923 } else if (source->IsStackSlot()) {
1924 DCHECK(destination->IsRegister() || destination->IsStackSlot());
1925 MemOperand src = g.ToMemOperand(source);
1926 if (destination->IsRegister()) {
1927 __ LoadP(g.ToRegister(destination), src);
1928 } else {
1929 Register temp = kScratchReg;
1930 __ LoadP(temp, src, r0);
1931 __ StoreP(temp, g.ToMemOperand(destination));
1932 }
1933 } else if (source->IsConstant()) {
1934 Constant src = g.ToConstant(source);
1935 if (destination->IsRegister() || destination->IsStackSlot()) {
1936 Register dst =
1937 destination->IsRegister() ? g.ToRegister(destination) : kScratchReg;
1938 switch (src.type()) {
1939 case Constant::kInt32:
Ben Murdochc5610432016-08-08 18:44:38 +01001940#if V8_TARGET_ARCH_S390X
1941 if (src.rmode() == RelocInfo::WASM_MEMORY_SIZE_REFERENCE) {
1942#else
1943 if (src.rmode() == RelocInfo::WASM_MEMORY_REFERENCE ||
1944 src.rmode() == RelocInfo::WASM_MEMORY_SIZE_REFERENCE) {
1945#endif
1946 __ mov(dst, Operand(src.ToInt32(), src.rmode()));
1947 } else {
1948 __ mov(dst, Operand(src.ToInt32()));
1949 }
Ben Murdochda12d292016-06-02 14:46:10 +01001950 break;
1951 case Constant::kInt64:
Ben Murdochc5610432016-08-08 18:44:38 +01001952#if V8_TARGET_ARCH_S390X
1953 if (src.rmode() == RelocInfo::WASM_MEMORY_REFERENCE) {
1954 __ mov(dst, Operand(src.ToInt64(), src.rmode()));
1955 } else {
1956 DCHECK(src.rmode() != RelocInfo::WASM_MEMORY_SIZE_REFERENCE);
1957 __ mov(dst, Operand(src.ToInt64()));
1958 }
1959#else
Ben Murdochda12d292016-06-02 14:46:10 +01001960 __ mov(dst, Operand(src.ToInt64()));
Ben Murdochc5610432016-08-08 18:44:38 +01001961#endif // V8_TARGET_ARCH_S390X
Ben Murdochda12d292016-06-02 14:46:10 +01001962 break;
1963 case Constant::kFloat32:
1964 __ Move(dst,
1965 isolate()->factory()->NewNumber(src.ToFloat32(), TENURED));
1966 break;
1967 case Constant::kFloat64:
1968 __ Move(dst,
1969 isolate()->factory()->NewNumber(src.ToFloat64(), TENURED));
1970 break;
1971 case Constant::kExternalReference:
1972 __ mov(dst, Operand(src.ToExternalReference()));
1973 break;
1974 case Constant::kHeapObject: {
1975 Handle<HeapObject> src_object = src.ToHeapObject();
1976 Heap::RootListIndex index;
1977 int slot;
1978 if (IsMaterializableFromFrame(src_object, &slot)) {
1979 __ LoadP(dst, g.SlotToMemOperand(slot));
1980 } else if (IsMaterializableFromRoot(src_object, &index)) {
1981 __ LoadRoot(dst, index);
1982 } else {
1983 __ Move(dst, src_object);
1984 }
1985 break;
1986 }
1987 case Constant::kRpoNumber:
1988 UNREACHABLE(); // TODO(dcarney): loading RPO constants on S390.
1989 break;
1990 }
1991 if (destination->IsStackSlot()) {
1992 __ StoreP(dst, g.ToMemOperand(destination), r0);
1993 }
1994 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01001995 DoubleRegister dst = destination->IsFPRegister()
Ben Murdochda12d292016-06-02 14:46:10 +01001996 ? g.ToDoubleRegister(destination)
1997 : kScratchDoubleReg;
1998 double value = (src.type() == Constant::kFloat32) ? src.ToFloat32()
1999 : src.ToFloat64();
2000 if (src.type() == Constant::kFloat32) {
2001 __ LoadFloat32Literal(dst, src.ToFloat32(), kScratchReg);
2002 } else {
2003 __ LoadDoubleLiteral(dst, value, kScratchReg);
2004 }
2005
Ben Murdochc5610432016-08-08 18:44:38 +01002006 if (destination->IsFPStackSlot()) {
Ben Murdochda12d292016-06-02 14:46:10 +01002007 __ StoreDouble(dst, g.ToMemOperand(destination));
2008 }
2009 }
Ben Murdochc5610432016-08-08 18:44:38 +01002010 } else if (source->IsFPRegister()) {
Ben Murdochda12d292016-06-02 14:46:10 +01002011 DoubleRegister src = g.ToDoubleRegister(source);
Ben Murdochc5610432016-08-08 18:44:38 +01002012 if (destination->IsFPRegister()) {
Ben Murdochda12d292016-06-02 14:46:10 +01002013 DoubleRegister dst = g.ToDoubleRegister(destination);
2014 __ Move(dst, src);
2015 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01002016 DCHECK(destination->IsFPStackSlot());
Ben Murdochda12d292016-06-02 14:46:10 +01002017 __ StoreDouble(src, g.ToMemOperand(destination));
2018 }
Ben Murdochc5610432016-08-08 18:44:38 +01002019 } else if (source->IsFPStackSlot()) {
2020 DCHECK(destination->IsFPRegister() || destination->IsFPStackSlot());
Ben Murdochda12d292016-06-02 14:46:10 +01002021 MemOperand src = g.ToMemOperand(source);
Ben Murdochc5610432016-08-08 18:44:38 +01002022 if (destination->IsFPRegister()) {
Ben Murdochda12d292016-06-02 14:46:10 +01002023 __ LoadDouble(g.ToDoubleRegister(destination), src);
2024 } else {
2025 DoubleRegister temp = kScratchDoubleReg;
2026 __ LoadDouble(temp, src);
2027 __ StoreDouble(temp, g.ToMemOperand(destination));
2028 }
2029 } else {
2030 UNREACHABLE();
2031 }
2032}
2033
2034void CodeGenerator::AssembleSwap(InstructionOperand* source,
2035 InstructionOperand* destination) {
2036 S390OperandConverter g(this, nullptr);
2037 // Dispatch on the source and destination operand kinds. Not all
2038 // combinations are possible.
2039 if (source->IsRegister()) {
2040 // Register-register.
2041 Register temp = kScratchReg;
2042 Register src = g.ToRegister(source);
2043 if (destination->IsRegister()) {
2044 Register dst = g.ToRegister(destination);
2045 __ LoadRR(temp, src);
2046 __ LoadRR(src, dst);
2047 __ LoadRR(dst, temp);
2048 } else {
2049 DCHECK(destination->IsStackSlot());
2050 MemOperand dst = g.ToMemOperand(destination);
2051 __ LoadRR(temp, src);
2052 __ LoadP(src, dst);
2053 __ StoreP(temp, dst);
2054 }
2055#if V8_TARGET_ARCH_S390X
Ben Murdochc5610432016-08-08 18:44:38 +01002056 } else if (source->IsStackSlot() || source->IsFPStackSlot()) {
Ben Murdochda12d292016-06-02 14:46:10 +01002057#else
2058 } else if (source->IsStackSlot()) {
2059 DCHECK(destination->IsStackSlot());
2060#endif
2061 Register temp_0 = kScratchReg;
2062 Register temp_1 = r0;
2063 MemOperand src = g.ToMemOperand(source);
2064 MemOperand dst = g.ToMemOperand(destination);
2065 __ LoadP(temp_0, src);
2066 __ LoadP(temp_1, dst);
2067 __ StoreP(temp_0, dst);
2068 __ StoreP(temp_1, src);
Ben Murdochc5610432016-08-08 18:44:38 +01002069 } else if (source->IsFPRegister()) {
Ben Murdochda12d292016-06-02 14:46:10 +01002070 DoubleRegister temp = kScratchDoubleReg;
2071 DoubleRegister src = g.ToDoubleRegister(source);
Ben Murdochc5610432016-08-08 18:44:38 +01002072 if (destination->IsFPRegister()) {
Ben Murdochda12d292016-06-02 14:46:10 +01002073 DoubleRegister dst = g.ToDoubleRegister(destination);
2074 __ ldr(temp, src);
2075 __ ldr(src, dst);
2076 __ ldr(dst, temp);
2077 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01002078 DCHECK(destination->IsFPStackSlot());
Ben Murdochda12d292016-06-02 14:46:10 +01002079 MemOperand dst = g.ToMemOperand(destination);
2080 __ ldr(temp, src);
2081 __ LoadDouble(src, dst);
2082 __ StoreDouble(temp, dst);
2083 }
2084#if !V8_TARGET_ARCH_S390X
Ben Murdochc5610432016-08-08 18:44:38 +01002085 } else if (source->IsFPStackSlot()) {
2086 DCHECK(destination->IsFPStackSlot());
Ben Murdochda12d292016-06-02 14:46:10 +01002087 DoubleRegister temp_0 = kScratchDoubleReg;
2088 DoubleRegister temp_1 = d0;
2089 MemOperand src = g.ToMemOperand(source);
2090 MemOperand dst = g.ToMemOperand(destination);
2091 // TODO(joransiu): MVC opportunity
2092 __ LoadDouble(temp_0, src);
2093 __ LoadDouble(temp_1, dst);
2094 __ StoreDouble(temp_0, dst);
2095 __ StoreDouble(temp_1, src);
2096#endif
2097 } else {
2098 // No other combinations are possible.
2099 UNREACHABLE();
2100 }
2101}
2102
2103void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) {
2104 for (size_t index = 0; index < target_count; ++index) {
2105 __ emit_label_addr(targets[index]);
2106 }
2107}
2108
Ben Murdochda12d292016-06-02 14:46:10 +01002109void CodeGenerator::EnsureSpaceForLazyDeopt() {
2110 if (!info()->ShouldEnsureSpaceForLazyDeopt()) {
2111 return;
2112 }
2113
2114 int space_needed = Deoptimizer::patch_size();
2115 // Ensure that we have enough space after the previous lazy-bailout
2116 // instruction for patching the code here.
2117 int current_pc = masm()->pc_offset();
2118 if (current_pc < last_lazy_deopt_pc_ + space_needed) {
2119 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
2120 DCHECK_EQ(0, padding_size % 2);
2121 while (padding_size > 0) {
2122 __ nop();
2123 padding_size -= 2;
2124 }
2125 }
2126}
2127
2128#undef __
2129
2130} // namespace compiler
2131} // namespace internal
2132} // namespace v8