blob: 6f1e5881eb46668bd241326d333f3451abcc7b3a [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/code-generator.h"
6
7#include "src/ast/scopes.h"
8#include "src/compiler/code-generator-impl.h"
9#include "src/compiler/gap-resolver.h"
10#include "src/compiler/node-matchers.h"
11#include "src/compiler/osr.h"
12#include "src/ppc/macro-assembler-ppc.h"
13
14namespace v8 {
15namespace internal {
16namespace compiler {
17
18#define __ masm()->
19
20
21#define kScratchReg r11
22
23
24// Adds PPC-specific methods to convert InstructionOperands.
25class PPCOperandConverter final : public InstructionOperandConverter {
26 public:
27 PPCOperandConverter(CodeGenerator* gen, Instruction* instr)
28 : InstructionOperandConverter(gen, instr) {}
29
30 size_t OutputCount() { return instr_->OutputCount(); }
31
32 RCBit OutputRCBit() const {
33 switch (instr_->flags_mode()) {
34 case kFlags_branch:
Ben Murdochda12d292016-06-02 14:46:10 +010035 case kFlags_deoptimize:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000036 case kFlags_set:
37 return SetRC;
38 case kFlags_none:
39 return LeaveRC;
40 }
41 UNREACHABLE();
42 return LeaveRC;
43 }
44
45 bool CompareLogical() const {
46 switch (instr_->flags_condition()) {
47 case kUnsignedLessThan:
48 case kUnsignedGreaterThanOrEqual:
49 case kUnsignedLessThanOrEqual:
50 case kUnsignedGreaterThan:
51 return true;
52 default:
53 return false;
54 }
55 UNREACHABLE();
56 return false;
57 }
58
59 Operand InputImmediate(size_t index) {
60 Constant constant = ToConstant(instr_->InputAt(index));
61 switch (constant.type()) {
62 case Constant::kInt32:
63 return Operand(constant.ToInt32());
64 case Constant::kFloat32:
65 return Operand(
66 isolate()->factory()->NewNumber(constant.ToFloat32(), TENURED));
67 case Constant::kFloat64:
68 return Operand(
69 isolate()->factory()->NewNumber(constant.ToFloat64(), TENURED));
70 case Constant::kInt64:
71#if V8_TARGET_ARCH_PPC64
72 return Operand(constant.ToInt64());
73#endif
74 case Constant::kExternalReference:
75 case Constant::kHeapObject:
76 case Constant::kRpoNumber:
77 break;
78 }
79 UNREACHABLE();
80 return Operand::Zero();
81 }
82
83 MemOperand MemoryOperand(AddressingMode* mode, size_t* first_index) {
84 const size_t index = *first_index;
85 *mode = AddressingModeField::decode(instr_->opcode());
86 switch (*mode) {
87 case kMode_None:
88 break;
89 case kMode_MRI:
90 *first_index += 2;
91 return MemOperand(InputRegister(index + 0), InputInt32(index + 1));
92 case kMode_MRR:
93 *first_index += 2;
94 return MemOperand(InputRegister(index + 0), InputRegister(index + 1));
95 }
96 UNREACHABLE();
97 return MemOperand(r0);
98 }
99
100 MemOperand MemoryOperand(AddressingMode* mode, size_t first_index = 0) {
101 return MemoryOperand(mode, &first_index);
102 }
103
104 MemOperand ToMemOperand(InstructionOperand* op) const {
105 DCHECK_NOT_NULL(op);
106 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
Ben Murdochda12d292016-06-02 14:46:10 +0100107 return SlotToMemOperand(AllocatedOperand::cast(op)->index());
108 }
109
110 MemOperand SlotToMemOperand(int slot) const {
111 FrameOffset offset = frame_access_state()->GetFrameOffset(slot);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000112 return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset());
113 }
114};
115
116
117static inline bool HasRegisterInput(Instruction* instr, size_t index) {
118 return instr->InputAt(index)->IsRegister();
119}
120
121
122namespace {
123
124class OutOfLineLoadNAN32 final : public OutOfLineCode {
125 public:
126 OutOfLineLoadNAN32(CodeGenerator* gen, DoubleRegister result)
127 : OutOfLineCode(gen), result_(result) {}
128
129 void Generate() final {
130 __ LoadDoubleLiteral(result_, std::numeric_limits<float>::quiet_NaN(),
131 kScratchReg);
132 }
133
134 private:
135 DoubleRegister const result_;
136};
137
138
139class OutOfLineLoadNAN64 final : public OutOfLineCode {
140 public:
141 OutOfLineLoadNAN64(CodeGenerator* gen, DoubleRegister result)
142 : OutOfLineCode(gen), result_(result) {}
143
144 void Generate() final {
145 __ LoadDoubleLiteral(result_, std::numeric_limits<double>::quiet_NaN(),
146 kScratchReg);
147 }
148
149 private:
150 DoubleRegister const result_;
151};
152
153
154class OutOfLineLoadZero final : public OutOfLineCode {
155 public:
156 OutOfLineLoadZero(CodeGenerator* gen, Register result)
157 : OutOfLineCode(gen), result_(result) {}
158
159 void Generate() final { __ li(result_, Operand::Zero()); }
160
161 private:
162 Register const result_;
163};
164
165
166class OutOfLineRecordWrite final : public OutOfLineCode {
167 public:
168 OutOfLineRecordWrite(CodeGenerator* gen, Register object, Register offset,
169 Register value, Register scratch0, Register scratch1,
170 RecordWriteMode mode)
171 : OutOfLineCode(gen),
172 object_(object),
173 offset_(offset),
Ben Murdoch097c5b22016-05-18 11:27:45 +0100174 offset_immediate_(0),
175 value_(value),
176 scratch0_(scratch0),
177 scratch1_(scratch1),
178 mode_(mode) {}
179
180 OutOfLineRecordWrite(CodeGenerator* gen, Register object, int32_t offset,
181 Register value, Register scratch0, Register scratch1,
182 RecordWriteMode mode)
183 : OutOfLineCode(gen),
184 object_(object),
185 offset_(no_reg),
186 offset_immediate_(offset),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000187 value_(value),
188 scratch0_(scratch0),
189 scratch1_(scratch1),
Ben Murdochda12d292016-06-02 14:46:10 +0100190 mode_(mode),
191 must_save_lr_(!gen->frame_access_state()->has_frame()) {}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000192
193 void Generate() final {
194 if (mode_ > RecordWriteMode::kValueIsPointer) {
195 __ JumpIfSmi(value_, exit());
196 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100197 __ CheckPageFlag(value_, scratch0_,
198 MemoryChunk::kPointersToHereAreInterestingMask, eq,
199 exit());
200 RememberedSetAction const remembered_set_action =
201 mode_ > RecordWriteMode::kValueIsMap ? EMIT_REMEMBERED_SET
202 : OMIT_REMEMBERED_SET;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000203 SaveFPRegsMode const save_fp_mode =
204 frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs;
Ben Murdochda12d292016-06-02 14:46:10 +0100205 if (must_save_lr_) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100206 // We need to save and restore lr if the frame was elided.
207 __ mflr(scratch1_);
208 __ Push(scratch1_);
209 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000210 RecordWriteStub stub(isolate(), object_, scratch0_, scratch1_,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100211 remembered_set_action, save_fp_mode);
212 if (offset_.is(no_reg)) {
213 __ addi(scratch1_, object_, Operand(offset_immediate_));
214 } else {
215 DCHECK_EQ(0, offset_immediate_);
216 __ add(scratch1_, object_, offset_);
217 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000218 __ CallStub(&stub);
Ben Murdochda12d292016-06-02 14:46:10 +0100219 if (must_save_lr_) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100220 // We need to save and restore lr if the frame was elided.
221 __ Pop(scratch1_);
222 __ mtlr(scratch1_);
223 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000224 }
225
226 private:
227 Register const object_;
228 Register const offset_;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100229 int32_t const offset_immediate_; // Valid if offset_.is(no_reg).
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000230 Register const value_;
231 Register const scratch0_;
232 Register const scratch1_;
233 RecordWriteMode const mode_;
Ben Murdochda12d292016-06-02 14:46:10 +0100234 bool must_save_lr_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000235};
236
237
238Condition FlagsConditionToCondition(FlagsCondition condition, ArchOpcode op) {
239 switch (condition) {
240 case kEqual:
241 return eq;
242 case kNotEqual:
243 return ne;
244 case kSignedLessThan:
245 case kUnsignedLessThan:
246 return lt;
247 case kSignedGreaterThanOrEqual:
248 case kUnsignedGreaterThanOrEqual:
249 return ge;
250 case kSignedLessThanOrEqual:
251 case kUnsignedLessThanOrEqual:
252 return le;
253 case kSignedGreaterThan:
254 case kUnsignedGreaterThan:
255 return gt;
256 case kOverflow:
257 // Overflow checked for add/sub only.
258 switch (op) {
259#if V8_TARGET_ARCH_PPC64
260 case kPPC_Add:
261 case kPPC_Sub:
262 return lt;
263#endif
264 case kPPC_AddWithOverflow32:
265 case kPPC_SubWithOverflow32:
266#if V8_TARGET_ARCH_PPC64
267 return ne;
268#else
269 return lt;
270#endif
271 default:
272 break;
273 }
274 break;
275 case kNotOverflow:
276 switch (op) {
277#if V8_TARGET_ARCH_PPC64
278 case kPPC_Add:
279 case kPPC_Sub:
280 return ge;
281#endif
282 case kPPC_AddWithOverflow32:
283 case kPPC_SubWithOverflow32:
284#if V8_TARGET_ARCH_PPC64
285 return eq;
286#else
287 return ge;
288#endif
289 default:
290 break;
291 }
292 break;
293 default:
294 break;
295 }
296 UNREACHABLE();
297 return kNoCondition;
298}
299
300} // namespace
301
Ben Murdochda12d292016-06-02 14:46:10 +0100302#define ASSEMBLE_FLOAT_UNOP_RC(asm_instr, round) \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000303 do { \
304 __ asm_instr(i.OutputDoubleRegister(), i.InputDoubleRegister(0), \
305 i.OutputRCBit()); \
Ben Murdochda12d292016-06-02 14:46:10 +0100306 if (round) { \
307 __ frsp(i.OutputDoubleRegister(), i.OutputDoubleRegister()); \
308 } \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000309 } while (0)
310
Ben Murdochda12d292016-06-02 14:46:10 +0100311#define ASSEMBLE_FLOAT_BINOP_RC(asm_instr, round) \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000312 do { \
313 __ asm_instr(i.OutputDoubleRegister(), i.InputDoubleRegister(0), \
314 i.InputDoubleRegister(1), i.OutputRCBit()); \
Ben Murdochda12d292016-06-02 14:46:10 +0100315 if (round) { \
316 __ frsp(i.OutputDoubleRegister(), i.OutputDoubleRegister()); \
317 } \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000318 } while (0)
319
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000320#define ASSEMBLE_BINOP(asm_instr_reg, asm_instr_imm) \
321 do { \
322 if (HasRegisterInput(instr, 1)) { \
323 __ asm_instr_reg(i.OutputRegister(), i.InputRegister(0), \
324 i.InputRegister(1)); \
325 } else { \
326 __ asm_instr_imm(i.OutputRegister(), i.InputRegister(0), \
327 i.InputImmediate(1)); \
328 } \
329 } while (0)
330
331
332#define ASSEMBLE_BINOP_RC(asm_instr_reg, asm_instr_imm) \
333 do { \
334 if (HasRegisterInput(instr, 1)) { \
335 __ asm_instr_reg(i.OutputRegister(), i.InputRegister(0), \
336 i.InputRegister(1), i.OutputRCBit()); \
337 } else { \
338 __ asm_instr_imm(i.OutputRegister(), i.InputRegister(0), \
339 i.InputImmediate(1), i.OutputRCBit()); \
340 } \
341 } while (0)
342
343
344#define ASSEMBLE_BINOP_INT_RC(asm_instr_reg, asm_instr_imm) \
345 do { \
346 if (HasRegisterInput(instr, 1)) { \
347 __ asm_instr_reg(i.OutputRegister(), i.InputRegister(0), \
348 i.InputRegister(1), i.OutputRCBit()); \
349 } else { \
350 __ asm_instr_imm(i.OutputRegister(), i.InputRegister(0), \
351 i.InputInt32(1), i.OutputRCBit()); \
352 } \
353 } while (0)
354
355
356#define ASSEMBLE_ADD_WITH_OVERFLOW() \
357 do { \
358 if (HasRegisterInput(instr, 1)) { \
359 __ AddAndCheckForOverflow(i.OutputRegister(), i.InputRegister(0), \
360 i.InputRegister(1), kScratchReg, r0); \
361 } else { \
362 __ AddAndCheckForOverflow(i.OutputRegister(), i.InputRegister(0), \
363 i.InputInt32(1), kScratchReg, r0); \
364 } \
365 } while (0)
366
367
368#define ASSEMBLE_SUB_WITH_OVERFLOW() \
369 do { \
370 if (HasRegisterInput(instr, 1)) { \
371 __ SubAndCheckForOverflow(i.OutputRegister(), i.InputRegister(0), \
372 i.InputRegister(1), kScratchReg, r0); \
373 } else { \
374 __ AddAndCheckForOverflow(i.OutputRegister(), i.InputRegister(0), \
375 -i.InputInt32(1), kScratchReg, r0); \
376 } \
377 } while (0)
378
379
380#if V8_TARGET_ARCH_PPC64
381#define ASSEMBLE_ADD_WITH_OVERFLOW32() \
382 do { \
383 ASSEMBLE_BINOP(add, addi); \
384 __ TestIfInt32(i.OutputRegister(), r0, cr0); \
385 } while (0)
386
387
388#define ASSEMBLE_SUB_WITH_OVERFLOW32() \
389 do { \
390 ASSEMBLE_BINOP(sub, subi); \
391 __ TestIfInt32(i.OutputRegister(), r0, cr0); \
392 } while (0)
393#else
394#define ASSEMBLE_ADD_WITH_OVERFLOW32 ASSEMBLE_ADD_WITH_OVERFLOW
395#define ASSEMBLE_SUB_WITH_OVERFLOW32 ASSEMBLE_SUB_WITH_OVERFLOW
396#endif
397
398
399#define ASSEMBLE_COMPARE(cmp_instr, cmpl_instr) \
400 do { \
401 const CRegister cr = cr0; \
402 if (HasRegisterInput(instr, 1)) { \
403 if (i.CompareLogical()) { \
404 __ cmpl_instr(i.InputRegister(0), i.InputRegister(1), cr); \
405 } else { \
406 __ cmp_instr(i.InputRegister(0), i.InputRegister(1), cr); \
407 } \
408 } else { \
409 if (i.CompareLogical()) { \
410 __ cmpl_instr##i(i.InputRegister(0), i.InputImmediate(1), cr); \
411 } else { \
412 __ cmp_instr##i(i.InputRegister(0), i.InputImmediate(1), cr); \
413 } \
414 } \
415 DCHECK_EQ(SetRC, i.OutputRCBit()); \
416 } while (0)
417
418
419#define ASSEMBLE_FLOAT_COMPARE(cmp_instr) \
420 do { \
421 const CRegister cr = cr0; \
422 __ cmp_instr(i.InputDoubleRegister(0), i.InputDoubleRegister(1), cr); \
423 DCHECK_EQ(SetRC, i.OutputRCBit()); \
424 } while (0)
425
426
427#define ASSEMBLE_MODULO(div_instr, mul_instr) \
428 do { \
429 const Register scratch = kScratchReg; \
430 __ div_instr(scratch, i.InputRegister(0), i.InputRegister(1)); \
431 __ mul_instr(scratch, scratch, i.InputRegister(1)); \
432 __ sub(i.OutputRegister(), i.InputRegister(0), scratch, LeaveOE, \
433 i.OutputRCBit()); \
434 } while (0)
435
436
437#define ASSEMBLE_FLOAT_MODULO() \
438 do { \
439 FrameScope scope(masm(), StackFrame::MANUAL); \
440 __ PrepareCallCFunction(0, 2, kScratchReg); \
441 __ MovToFloatParameters(i.InputDoubleRegister(0), \
442 i.InputDoubleRegister(1)); \
443 __ CallCFunction(ExternalReference::mod_two_doubles_operation(isolate()), \
444 0, 2); \
445 __ MovFromFloatResult(i.OutputDoubleRegister()); \
446 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
447 } while (0)
448
449
450#define ASSEMBLE_FLOAT_MAX(scratch_reg) \
451 do { \
452 __ fsub(scratch_reg, i.InputDoubleRegister(0), i.InputDoubleRegister(1)); \
453 __ fsel(i.OutputDoubleRegister(), scratch_reg, i.InputDoubleRegister(0), \
454 i.InputDoubleRegister(1)); \
455 } while (0)
456
457
458#define ASSEMBLE_FLOAT_MIN(scratch_reg) \
459 do { \
460 __ fsub(scratch_reg, i.InputDoubleRegister(0), i.InputDoubleRegister(1)); \
461 __ fsel(i.OutputDoubleRegister(), scratch_reg, i.InputDoubleRegister(1), \
462 i.InputDoubleRegister(0)); \
463 } while (0)
464
465
466#define ASSEMBLE_LOAD_FLOAT(asm_instr, asm_instrx) \
467 do { \
468 DoubleRegister result = i.OutputDoubleRegister(); \
469 AddressingMode mode = kMode_None; \
470 MemOperand operand = i.MemoryOperand(&mode); \
471 if (mode == kMode_MRI) { \
472 __ asm_instr(result, operand); \
473 } else { \
474 __ asm_instrx(result, operand); \
475 } \
476 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
477 } while (0)
478
479
480#define ASSEMBLE_LOAD_INTEGER(asm_instr, asm_instrx) \
481 do { \
482 Register result = i.OutputRegister(); \
483 AddressingMode mode = kMode_None; \
484 MemOperand operand = i.MemoryOperand(&mode); \
485 if (mode == kMode_MRI) { \
486 __ asm_instr(result, operand); \
487 } else { \
488 __ asm_instrx(result, operand); \
489 } \
490 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
491 } while (0)
492
493
494#define ASSEMBLE_STORE_FLOAT32() \
495 do { \
496 size_t index = 0; \
497 AddressingMode mode = kMode_None; \
498 MemOperand operand = i.MemoryOperand(&mode, &index); \
499 DoubleRegister value = i.InputDoubleRegister(index); \
500 __ frsp(kScratchDoubleReg, value); \
501 if (mode == kMode_MRI) { \
502 __ stfs(kScratchDoubleReg, operand); \
503 } else { \
504 __ stfsx(kScratchDoubleReg, operand); \
505 } \
506 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
507 } while (0)
508
509
510#define ASSEMBLE_STORE_DOUBLE() \
511 do { \
512 size_t index = 0; \
513 AddressingMode mode = kMode_None; \
514 MemOperand operand = i.MemoryOperand(&mode, &index); \
515 DoubleRegister value = i.InputDoubleRegister(index); \
516 if (mode == kMode_MRI) { \
517 __ stfd(value, operand); \
518 } else { \
519 __ stfdx(value, operand); \
520 } \
521 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
522 } while (0)
523
524
525#define ASSEMBLE_STORE_INTEGER(asm_instr, asm_instrx) \
526 do { \
527 size_t index = 0; \
528 AddressingMode mode = kMode_None; \
529 MemOperand operand = i.MemoryOperand(&mode, &index); \
530 Register value = i.InputRegister(index); \
531 if (mode == kMode_MRI) { \
532 __ asm_instr(value, operand); \
533 } else { \
534 __ asm_instrx(value, operand); \
535 } \
536 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
537 } while (0)
538
539
540// TODO(mbrandy): fix paths that produce garbage in offset's upper 32-bits.
541#define ASSEMBLE_CHECKED_LOAD_FLOAT(asm_instr, asm_instrx, width) \
542 do { \
543 DoubleRegister result = i.OutputDoubleRegister(); \
544 size_t index = 0; \
545 AddressingMode mode = kMode_None; \
546 MemOperand operand = i.MemoryOperand(&mode, index); \
547 DCHECK_EQ(kMode_MRR, mode); \
548 Register offset = operand.rb(); \
549 __ extsw(offset, offset); \
550 if (HasRegisterInput(instr, 2)) { \
551 __ cmplw(offset, i.InputRegister(2)); \
552 } else { \
553 __ cmplwi(offset, i.InputImmediate(2)); \
554 } \
555 auto ool = new (zone()) OutOfLineLoadNAN##width(this, result); \
556 __ bge(ool->entry()); \
557 if (mode == kMode_MRI) { \
558 __ asm_instr(result, operand); \
559 } else { \
560 __ asm_instrx(result, operand); \
561 } \
562 __ bind(ool->exit()); \
563 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
564 } while (0)
565
566
567// TODO(mbrandy): fix paths that produce garbage in offset's upper 32-bits.
568#define ASSEMBLE_CHECKED_LOAD_INTEGER(asm_instr, asm_instrx) \
569 do { \
570 Register result = i.OutputRegister(); \
571 size_t index = 0; \
572 AddressingMode mode = kMode_None; \
573 MemOperand operand = i.MemoryOperand(&mode, index); \
574 DCHECK_EQ(kMode_MRR, mode); \
575 Register offset = operand.rb(); \
576 __ extsw(offset, offset); \
577 if (HasRegisterInput(instr, 2)) { \
578 __ cmplw(offset, i.InputRegister(2)); \
579 } else { \
580 __ cmplwi(offset, i.InputImmediate(2)); \
581 } \
582 auto ool = new (zone()) OutOfLineLoadZero(this, result); \
583 __ bge(ool->entry()); \
584 if (mode == kMode_MRI) { \
585 __ asm_instr(result, operand); \
586 } else { \
587 __ asm_instrx(result, operand); \
588 } \
589 __ bind(ool->exit()); \
590 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
591 } while (0)
592
593
594// TODO(mbrandy): fix paths that produce garbage in offset's upper 32-bits.
595#define ASSEMBLE_CHECKED_STORE_FLOAT32() \
596 do { \
597 Label done; \
598 size_t index = 0; \
599 AddressingMode mode = kMode_None; \
600 MemOperand operand = i.MemoryOperand(&mode, index); \
601 DCHECK_EQ(kMode_MRR, mode); \
602 Register offset = operand.rb(); \
603 __ extsw(offset, offset); \
604 if (HasRegisterInput(instr, 2)) { \
605 __ cmplw(offset, i.InputRegister(2)); \
606 } else { \
607 __ cmplwi(offset, i.InputImmediate(2)); \
608 } \
609 __ bge(&done); \
610 DoubleRegister value = i.InputDoubleRegister(3); \
611 __ frsp(kScratchDoubleReg, value); \
612 if (mode == kMode_MRI) { \
613 __ stfs(kScratchDoubleReg, operand); \
614 } else { \
615 __ stfsx(kScratchDoubleReg, operand); \
616 } \
617 __ bind(&done); \
618 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
619 } while (0)
620
621
622// TODO(mbrandy): fix paths that produce garbage in offset's upper 32-bits.
623#define ASSEMBLE_CHECKED_STORE_DOUBLE() \
624 do { \
625 Label done; \
626 size_t index = 0; \
627 AddressingMode mode = kMode_None; \
628 MemOperand operand = i.MemoryOperand(&mode, index); \
629 DCHECK_EQ(kMode_MRR, mode); \
630 Register offset = operand.rb(); \
631 __ extsw(offset, offset); \
632 if (HasRegisterInput(instr, 2)) { \
633 __ cmplw(offset, i.InputRegister(2)); \
634 } else { \
635 __ cmplwi(offset, i.InputImmediate(2)); \
636 } \
637 __ bge(&done); \
638 DoubleRegister value = i.InputDoubleRegister(3); \
639 if (mode == kMode_MRI) { \
640 __ stfd(value, operand); \
641 } else { \
642 __ stfdx(value, operand); \
643 } \
644 __ bind(&done); \
645 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
646 } while (0)
647
648
649// TODO(mbrandy): fix paths that produce garbage in offset's upper 32-bits.
650#define ASSEMBLE_CHECKED_STORE_INTEGER(asm_instr, asm_instrx) \
651 do { \
652 Label done; \
653 size_t index = 0; \
654 AddressingMode mode = kMode_None; \
655 MemOperand operand = i.MemoryOperand(&mode, index); \
656 DCHECK_EQ(kMode_MRR, mode); \
657 Register offset = operand.rb(); \
658 __ extsw(offset, offset); \
659 if (HasRegisterInput(instr, 2)) { \
660 __ cmplw(offset, i.InputRegister(2)); \
661 } else { \
662 __ cmplwi(offset, i.InputImmediate(2)); \
663 } \
664 __ bge(&done); \
665 Register value = i.InputRegister(3); \
666 if (mode == kMode_MRI) { \
667 __ asm_instr(value, operand); \
668 } else { \
669 __ asm_instrx(value, operand); \
670 } \
671 __ bind(&done); \
672 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
673 } while (0)
674
Ben Murdochda12d292016-06-02 14:46:10 +0100675void CodeGenerator::AssembleDeconstructFrame() {
676 __ LeaveFrame(StackFrame::MANUAL);
677}
678
679void CodeGenerator::AssembleSetupStackPointer() {}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000680
681void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) {
682 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
683 if (sp_slot_delta > 0) {
684 __ Add(sp, sp, sp_slot_delta * kPointerSize, r0);
685 }
686 frame_access_state()->SetFrameAccessToDefault();
687}
688
689
690void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) {
691 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
692 if (sp_slot_delta < 0) {
693 __ Add(sp, sp, sp_slot_delta * kPointerSize, r0);
694 frame_access_state()->IncreaseSPDelta(-sp_slot_delta);
695 }
Ben Murdochda12d292016-06-02 14:46:10 +0100696 if (frame_access_state()->has_frame()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100697 __ RestoreFrameStateForTailCall();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000698 }
699 frame_access_state()->SetFrameAccessToSP();
700}
701
Ben Murdochda12d292016-06-02 14:46:10 +0100702void CodeGenerator::AssemblePopArgumentsAdaptorFrame(Register args_reg,
703 Register scratch1,
704 Register scratch2,
705 Register scratch3) {
706 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
707 Label done;
708
709 // Check if current frame is an arguments adaptor frame.
710 __ LoadP(scratch1, MemOperand(fp, StandardFrameConstants::kContextOffset));
711 __ CmpSmiLiteral(scratch1, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
712 __ bne(&done);
713
714 // Load arguments count from current arguments adaptor frame (note, it
715 // does not include receiver).
716 Register caller_args_count_reg = scratch1;
717 __ LoadP(caller_args_count_reg,
718 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
719 __ SmiUntag(caller_args_count_reg);
720
721 ParameterCount callee_args_count(args_reg);
722 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
723 scratch3);
724 __ bind(&done);
725}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000726
727// Assembles an instruction after register allocation, producing machine code.
728void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
729 PPCOperandConverter i(this, instr);
730 ArchOpcode opcode = ArchOpcodeField::decode(instr->opcode());
731
732 switch (opcode) {
733 case kArchCallCodeObject: {
734 v8::internal::Assembler::BlockTrampolinePoolScope block_trampoline_pool(
735 masm());
736 EnsureSpaceForLazyDeopt();
737 if (HasRegisterInput(instr, 0)) {
738 __ addi(ip, i.InputRegister(0),
739 Operand(Code::kHeaderSize - kHeapObjectTag));
740 __ Call(ip);
741 } else {
742 __ Call(Handle<Code>::cast(i.InputHeapObject(0)),
743 RelocInfo::CODE_TARGET);
744 }
745 RecordCallPosition(instr);
746 DCHECK_EQ(LeaveRC, i.OutputRCBit());
747 frame_access_state()->ClearSPDelta();
748 break;
749 }
Ben Murdochda12d292016-06-02 14:46:10 +0100750 case kArchTailCallCodeObjectFromJSFunction:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000751 case kArchTailCallCodeObject: {
752 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
753 AssembleDeconstructActivationRecord(stack_param_delta);
Ben Murdochda12d292016-06-02 14:46:10 +0100754 if (opcode == kArchTailCallCodeObjectFromJSFunction) {
755 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
756 i.TempRegister(0), i.TempRegister(1),
757 i.TempRegister(2));
758 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000759 if (HasRegisterInput(instr, 0)) {
760 __ addi(ip, i.InputRegister(0),
761 Operand(Code::kHeaderSize - kHeapObjectTag));
762 __ Jump(ip);
763 } else {
764 // We cannot use the constant pool to load the target since
765 // we've already restored the caller's frame.
766 ConstantPoolUnavailableScope constant_pool_unavailable(masm());
767 __ Jump(Handle<Code>::cast(i.InputHeapObject(0)),
768 RelocInfo::CODE_TARGET);
769 }
770 DCHECK_EQ(LeaveRC, i.OutputRCBit());
771 frame_access_state()->ClearSPDelta();
772 break;
773 }
774 case kArchCallJSFunction: {
775 v8::internal::Assembler::BlockTrampolinePoolScope block_trampoline_pool(
776 masm());
777 EnsureSpaceForLazyDeopt();
778 Register func = i.InputRegister(0);
779 if (FLAG_debug_code) {
780 // Check the function's context matches the context argument.
781 __ LoadP(kScratchReg,
782 FieldMemOperand(func, JSFunction::kContextOffset));
783 __ cmp(cp, kScratchReg);
784 __ Assert(eq, kWrongFunctionContext);
785 }
786 __ LoadP(ip, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
787 __ Call(ip);
788 RecordCallPosition(instr);
789 DCHECK_EQ(LeaveRC, i.OutputRCBit());
790 frame_access_state()->ClearSPDelta();
791 break;
792 }
Ben Murdochda12d292016-06-02 14:46:10 +0100793 case kArchTailCallJSFunctionFromJSFunction:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000794 case kArchTailCallJSFunction: {
795 Register func = i.InputRegister(0);
796 if (FLAG_debug_code) {
797 // Check the function's context matches the context argument.
798 __ LoadP(kScratchReg,
799 FieldMemOperand(func, JSFunction::kContextOffset));
800 __ cmp(cp, kScratchReg);
801 __ Assert(eq, kWrongFunctionContext);
802 }
803 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
804 AssembleDeconstructActivationRecord(stack_param_delta);
Ben Murdochda12d292016-06-02 14:46:10 +0100805 if (opcode == kArchTailCallJSFunctionFromJSFunction) {
806 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
807 i.TempRegister(0), i.TempRegister(1),
808 i.TempRegister(2));
809 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000810 __ LoadP(ip, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
811 __ Jump(ip);
812 DCHECK_EQ(LeaveRC, i.OutputRCBit());
813 frame_access_state()->ClearSPDelta();
814 break;
815 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000816 case kArchPrepareCallCFunction: {
817 int const num_parameters = MiscField::decode(instr->opcode());
818 __ PrepareCallCFunction(num_parameters, kScratchReg);
819 // Frame alignment requires using FP-relative frame addressing.
820 frame_access_state()->SetFrameAccessToFP();
821 break;
822 }
823 case kArchPrepareTailCall:
824 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1));
825 break;
826 case kArchCallCFunction: {
827 int const num_parameters = MiscField::decode(instr->opcode());
828 if (instr->InputAt(0)->IsImmediate()) {
829 ExternalReference ref = i.InputExternalReference(0);
830 __ CallCFunction(ref, num_parameters);
831 } else {
832 Register func = i.InputRegister(0);
833 __ CallCFunction(func, num_parameters);
834 }
835 frame_access_state()->SetFrameAccessToDefault();
836 frame_access_state()->ClearSPDelta();
837 break;
838 }
839 case kArchJmp:
840 AssembleArchJump(i.InputRpo(0));
841 DCHECK_EQ(LeaveRC, i.OutputRCBit());
842 break;
843 case kArchLookupSwitch:
844 AssembleArchLookupSwitch(instr);
845 DCHECK_EQ(LeaveRC, i.OutputRCBit());
846 break;
847 case kArchTableSwitch:
848 AssembleArchTableSwitch(instr);
849 DCHECK_EQ(LeaveRC, i.OutputRCBit());
850 break;
851 case kArchNop:
852 case kArchThrowTerminator:
853 // don't emit code for nops.
854 DCHECK_EQ(LeaveRC, i.OutputRCBit());
855 break;
856 case kArchDeoptimize: {
857 int deopt_state_id =
858 BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
859 Deoptimizer::BailoutType bailout_type =
860 Deoptimizer::BailoutType(MiscField::decode(instr->opcode()));
861 AssembleDeoptimizerCall(deopt_state_id, bailout_type);
862 break;
863 }
864 case kArchRet:
865 AssembleReturn();
866 DCHECK_EQ(LeaveRC, i.OutputRCBit());
867 break;
868 case kArchStackPointer:
869 __ mr(i.OutputRegister(), sp);
870 DCHECK_EQ(LeaveRC, i.OutputRCBit());
871 break;
872 case kArchFramePointer:
873 __ mr(i.OutputRegister(), fp);
874 DCHECK_EQ(LeaveRC, i.OutputRCBit());
875 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100876 case kArchParentFramePointer:
Ben Murdochda12d292016-06-02 14:46:10 +0100877 if (frame_access_state()->has_frame()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100878 __ LoadP(i.OutputRegister(), MemOperand(fp, 0));
879 } else {
880 __ mr(i.OutputRegister(), fp);
881 }
882 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000883 case kArchTruncateDoubleToI:
884 // TODO(mbrandy): move slow call to stub out of line.
885 __ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0));
886 DCHECK_EQ(LeaveRC, i.OutputRCBit());
887 break;
888 case kArchStoreWithWriteBarrier: {
889 RecordWriteMode mode =
890 static_cast<RecordWriteMode>(MiscField::decode(instr->opcode()));
891 Register object = i.InputRegister(0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000892 Register value = i.InputRegister(2);
893 Register scratch0 = i.TempRegister(0);
894 Register scratch1 = i.TempRegister(1);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100895 OutOfLineRecordWrite* ool;
896
897 AddressingMode addressing_mode =
898 AddressingModeField::decode(instr->opcode());
899 if (addressing_mode == kMode_MRI) {
900 int32_t offset = i.InputInt32(1);
901 ool = new (zone()) OutOfLineRecordWrite(this, object, offset, value,
902 scratch0, scratch1, mode);
903 __ StoreP(value, MemOperand(object, offset));
904 } else {
905 DCHECK_EQ(kMode_MRR, addressing_mode);
906 Register offset(i.InputRegister(1));
907 ool = new (zone()) OutOfLineRecordWrite(this, object, offset, value,
908 scratch0, scratch1, mode);
909 __ StorePX(value, MemOperand(object, offset));
910 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000911 __ CheckPageFlag(object, scratch0,
912 MemoryChunk::kPointersFromHereAreInterestingMask, ne,
913 ool->entry());
914 __ bind(ool->exit());
915 break;
916 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100917 case kArchStackSlot: {
918 FrameOffset offset =
919 frame_access_state()->GetFrameOffset(i.InputInt32(0));
920 __ addi(i.OutputRegister(), offset.from_stack_pointer() ? sp : fp,
921 Operand(offset.offset()));
922 break;
923 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000924 case kPPC_And:
925 if (HasRegisterInput(instr, 1)) {
926 __ and_(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
927 i.OutputRCBit());
928 } else {
929 __ andi(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1));
930 }
931 break;
932 case kPPC_AndComplement:
933 __ andc(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
934 i.OutputRCBit());
935 break;
936 case kPPC_Or:
937 if (HasRegisterInput(instr, 1)) {
938 __ orx(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
939 i.OutputRCBit());
940 } else {
941 __ ori(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1));
942 DCHECK_EQ(LeaveRC, i.OutputRCBit());
943 }
944 break;
945 case kPPC_OrComplement:
946 __ orc(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
947 i.OutputRCBit());
948 break;
949 case kPPC_Xor:
950 if (HasRegisterInput(instr, 1)) {
951 __ xor_(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
952 i.OutputRCBit());
953 } else {
954 __ xori(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1));
955 DCHECK_EQ(LeaveRC, i.OutputRCBit());
956 }
957 break;
958 case kPPC_ShiftLeft32:
959 ASSEMBLE_BINOP_RC(slw, slwi);
960 break;
961#if V8_TARGET_ARCH_PPC64
962 case kPPC_ShiftLeft64:
963 ASSEMBLE_BINOP_RC(sld, sldi);
964 break;
965#endif
966 case kPPC_ShiftRight32:
967 ASSEMBLE_BINOP_RC(srw, srwi);
968 break;
969#if V8_TARGET_ARCH_PPC64
970 case kPPC_ShiftRight64:
971 ASSEMBLE_BINOP_RC(srd, srdi);
972 break;
973#endif
974 case kPPC_ShiftRightAlg32:
975 ASSEMBLE_BINOP_INT_RC(sraw, srawi);
976 break;
977#if V8_TARGET_ARCH_PPC64
978 case kPPC_ShiftRightAlg64:
979 ASSEMBLE_BINOP_INT_RC(srad, sradi);
980 break;
981#endif
Ben Murdochda12d292016-06-02 14:46:10 +0100982#if !V8_TARGET_ARCH_PPC64
983 case kPPC_AddPair:
984 // i.InputRegister(0) ... left low word.
985 // i.InputRegister(1) ... left high word.
986 // i.InputRegister(2) ... right low word.
987 // i.InputRegister(3) ... right high word.
988 __ addc(i.OutputRegister(0), i.InputRegister(0), i.InputRegister(2));
989 __ adde(i.OutputRegister(1), i.InputRegister(1), i.InputRegister(3));
990 DCHECK_EQ(LeaveRC, i.OutputRCBit());
991 break;
992 case kPPC_SubPair:
993 // i.InputRegister(0) ... left low word.
994 // i.InputRegister(1) ... left high word.
995 // i.InputRegister(2) ... right low word.
996 // i.InputRegister(3) ... right high word.
997 __ subc(i.OutputRegister(0), i.InputRegister(0), i.InputRegister(2));
998 __ sube(i.OutputRegister(1), i.InputRegister(1), i.InputRegister(3));
999 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1000 break;
1001 case kPPC_MulPair:
1002 // i.InputRegister(0) ... left low word.
1003 // i.InputRegister(1) ... left high word.
1004 // i.InputRegister(2) ... right low word.
1005 // i.InputRegister(3) ... right high word.
1006 __ mullw(i.TempRegister(0), i.InputRegister(0), i.InputRegister(3));
1007 __ mullw(i.TempRegister(1), i.InputRegister(2), i.InputRegister(1));
1008 __ add(i.TempRegister(0), i.TempRegister(0), i.TempRegister(1));
1009 __ mullw(i.OutputRegister(0), i.InputRegister(0), i.InputRegister(2));
1010 __ mulhwu(i.OutputRegister(1), i.InputRegister(0), i.InputRegister(2));
1011 __ add(i.OutputRegister(1), i.OutputRegister(1), i.TempRegister(0));
1012 break;
1013 case kPPC_ShiftLeftPair:
1014 if (instr->InputAt(2)->IsImmediate()) {
1015 __ ShiftLeftPair(i.OutputRegister(0), i.OutputRegister(1),
1016 i.InputRegister(0), i.InputRegister(1),
1017 i.InputInt32(2));
1018 } else {
1019 __ ShiftLeftPair(i.OutputRegister(0), i.OutputRegister(1),
1020 i.InputRegister(0), i.InputRegister(1), kScratchReg,
1021 i.InputRegister(2));
1022 }
1023 break;
1024 case kPPC_ShiftRightPair:
1025 if (instr->InputAt(2)->IsImmediate()) {
1026 __ ShiftRightPair(i.OutputRegister(0), i.OutputRegister(1),
1027 i.InputRegister(0), i.InputRegister(1),
1028 i.InputInt32(2));
1029 } else {
1030 __ ShiftRightPair(i.OutputRegister(0), i.OutputRegister(1),
1031 i.InputRegister(0), i.InputRegister(1), kScratchReg,
1032 i.InputRegister(2));
1033 }
1034 break;
1035 case kPPC_ShiftRightAlgPair:
1036 if (instr->InputAt(2)->IsImmediate()) {
1037 __ ShiftRightAlgPair(i.OutputRegister(0), i.OutputRegister(1),
1038 i.InputRegister(0), i.InputRegister(1),
1039 i.InputInt32(2));
1040 } else {
1041 __ ShiftRightAlgPair(i.OutputRegister(0), i.OutputRegister(1),
1042 i.InputRegister(0), i.InputRegister(1),
1043 kScratchReg, i.InputRegister(2));
1044 }
1045 break;
1046#endif
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001047 case kPPC_RotRight32:
1048 if (HasRegisterInput(instr, 1)) {
1049 __ subfic(kScratchReg, i.InputRegister(1), Operand(32));
1050 __ rotlw(i.OutputRegister(), i.InputRegister(0), kScratchReg,
1051 i.OutputRCBit());
1052 } else {
1053 int sh = i.InputInt32(1);
1054 __ rotrwi(i.OutputRegister(), i.InputRegister(0), sh, i.OutputRCBit());
1055 }
1056 break;
1057#if V8_TARGET_ARCH_PPC64
1058 case kPPC_RotRight64:
1059 if (HasRegisterInput(instr, 1)) {
1060 __ subfic(kScratchReg, i.InputRegister(1), Operand(64));
1061 __ rotld(i.OutputRegister(), i.InputRegister(0), kScratchReg,
1062 i.OutputRCBit());
1063 } else {
1064 int sh = i.InputInt32(1);
1065 __ rotrdi(i.OutputRegister(), i.InputRegister(0), sh, i.OutputRCBit());
1066 }
1067 break;
1068#endif
1069 case kPPC_Not:
1070 __ notx(i.OutputRegister(), i.InputRegister(0), i.OutputRCBit());
1071 break;
1072 case kPPC_RotLeftAndMask32:
1073 __ rlwinm(i.OutputRegister(), i.InputRegister(0), i.InputInt32(1),
1074 31 - i.InputInt32(2), 31 - i.InputInt32(3), i.OutputRCBit());
1075 break;
1076#if V8_TARGET_ARCH_PPC64
1077 case kPPC_RotLeftAndClear64:
1078 __ rldic(i.OutputRegister(), i.InputRegister(0), i.InputInt32(1),
1079 63 - i.InputInt32(2), i.OutputRCBit());
1080 break;
1081 case kPPC_RotLeftAndClearLeft64:
1082 __ rldicl(i.OutputRegister(), i.InputRegister(0), i.InputInt32(1),
1083 63 - i.InputInt32(2), i.OutputRCBit());
1084 break;
1085 case kPPC_RotLeftAndClearRight64:
1086 __ rldicr(i.OutputRegister(), i.InputRegister(0), i.InputInt32(1),
1087 63 - i.InputInt32(2), i.OutputRCBit());
1088 break;
1089#endif
1090 case kPPC_Add:
1091#if V8_TARGET_ARCH_PPC64
1092 if (FlagsModeField::decode(instr->opcode()) != kFlags_none) {
1093 ASSEMBLE_ADD_WITH_OVERFLOW();
1094 } else {
1095#endif
1096 if (HasRegisterInput(instr, 1)) {
1097 __ add(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
1098 LeaveOE, i.OutputRCBit());
1099 } else {
1100 __ addi(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1));
1101 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1102 }
1103#if V8_TARGET_ARCH_PPC64
1104 }
1105#endif
1106 break;
1107 case kPPC_AddWithOverflow32:
1108 ASSEMBLE_ADD_WITH_OVERFLOW32();
1109 break;
1110 case kPPC_AddDouble:
Ben Murdochda12d292016-06-02 14:46:10 +01001111 ASSEMBLE_FLOAT_BINOP_RC(fadd, MiscField::decode(instr->opcode()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001112 break;
1113 case kPPC_Sub:
1114#if V8_TARGET_ARCH_PPC64
1115 if (FlagsModeField::decode(instr->opcode()) != kFlags_none) {
1116 ASSEMBLE_SUB_WITH_OVERFLOW();
1117 } else {
1118#endif
1119 if (HasRegisterInput(instr, 1)) {
1120 __ sub(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
1121 LeaveOE, i.OutputRCBit());
1122 } else {
1123 __ subi(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1));
1124 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1125 }
1126#if V8_TARGET_ARCH_PPC64
1127 }
1128#endif
1129 break;
1130 case kPPC_SubWithOverflow32:
1131 ASSEMBLE_SUB_WITH_OVERFLOW32();
1132 break;
1133 case kPPC_SubDouble:
Ben Murdochda12d292016-06-02 14:46:10 +01001134 ASSEMBLE_FLOAT_BINOP_RC(fsub, MiscField::decode(instr->opcode()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001135 break;
1136 case kPPC_Mul32:
1137 __ mullw(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
1138 LeaveOE, i.OutputRCBit());
1139 break;
1140#if V8_TARGET_ARCH_PPC64
1141 case kPPC_Mul64:
1142 __ mulld(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
1143 LeaveOE, i.OutputRCBit());
1144 break;
1145#endif
1146 case kPPC_MulHigh32:
1147 __ mulhw(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
1148 i.OutputRCBit());
1149 break;
1150 case kPPC_MulHighU32:
1151 __ mulhwu(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
1152 i.OutputRCBit());
1153 break;
1154 case kPPC_MulDouble:
Ben Murdochda12d292016-06-02 14:46:10 +01001155 ASSEMBLE_FLOAT_BINOP_RC(fmul, MiscField::decode(instr->opcode()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001156 break;
1157 case kPPC_Div32:
1158 __ divw(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
1159 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1160 break;
1161#if V8_TARGET_ARCH_PPC64
1162 case kPPC_Div64:
1163 __ divd(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
1164 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1165 break;
1166#endif
1167 case kPPC_DivU32:
1168 __ divwu(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
1169 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1170 break;
1171#if V8_TARGET_ARCH_PPC64
1172 case kPPC_DivU64:
1173 __ divdu(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
1174 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1175 break;
1176#endif
1177 case kPPC_DivDouble:
Ben Murdochda12d292016-06-02 14:46:10 +01001178 ASSEMBLE_FLOAT_BINOP_RC(fdiv, MiscField::decode(instr->opcode()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001179 break;
1180 case kPPC_Mod32:
1181 ASSEMBLE_MODULO(divw, mullw);
1182 break;
1183#if V8_TARGET_ARCH_PPC64
1184 case kPPC_Mod64:
1185 ASSEMBLE_MODULO(divd, mulld);
1186 break;
1187#endif
1188 case kPPC_ModU32:
1189 ASSEMBLE_MODULO(divwu, mullw);
1190 break;
1191#if V8_TARGET_ARCH_PPC64
1192 case kPPC_ModU64:
1193 ASSEMBLE_MODULO(divdu, mulld);
1194 break;
1195#endif
1196 case kPPC_ModDouble:
1197 // TODO(bmeurer): We should really get rid of this special instruction,
1198 // and generate a CallAddress instruction instead.
1199 ASSEMBLE_FLOAT_MODULO();
1200 break;
1201 case kPPC_Neg:
1202 __ neg(i.OutputRegister(), i.InputRegister(0), LeaveOE, i.OutputRCBit());
1203 break;
1204 case kPPC_MaxDouble:
1205 ASSEMBLE_FLOAT_MAX(kScratchDoubleReg);
1206 break;
1207 case kPPC_MinDouble:
1208 ASSEMBLE_FLOAT_MIN(kScratchDoubleReg);
1209 break;
1210 case kPPC_AbsDouble:
Ben Murdochda12d292016-06-02 14:46:10 +01001211 ASSEMBLE_FLOAT_UNOP_RC(fabs, 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001212 break;
1213 case kPPC_SqrtDouble:
Ben Murdochda12d292016-06-02 14:46:10 +01001214 ASSEMBLE_FLOAT_UNOP_RC(fsqrt, MiscField::decode(instr->opcode()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001215 break;
1216 case kPPC_FloorDouble:
Ben Murdochda12d292016-06-02 14:46:10 +01001217 ASSEMBLE_FLOAT_UNOP_RC(frim, MiscField::decode(instr->opcode()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001218 break;
1219 case kPPC_CeilDouble:
Ben Murdochda12d292016-06-02 14:46:10 +01001220 ASSEMBLE_FLOAT_UNOP_RC(frip, MiscField::decode(instr->opcode()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001221 break;
1222 case kPPC_TruncateDouble:
Ben Murdochda12d292016-06-02 14:46:10 +01001223 ASSEMBLE_FLOAT_UNOP_RC(friz, MiscField::decode(instr->opcode()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001224 break;
1225 case kPPC_RoundDouble:
Ben Murdochda12d292016-06-02 14:46:10 +01001226 ASSEMBLE_FLOAT_UNOP_RC(frin, MiscField::decode(instr->opcode()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001227 break;
1228 case kPPC_NegDouble:
Ben Murdochda12d292016-06-02 14:46:10 +01001229 ASSEMBLE_FLOAT_UNOP_RC(fneg, 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001230 break;
1231 case kPPC_Cntlz32:
1232 __ cntlzw_(i.OutputRegister(), i.InputRegister(0));
1233 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1234 break;
1235#if V8_TARGET_ARCH_PPC64
1236 case kPPC_Cntlz64:
1237 __ cntlzd_(i.OutputRegister(), i.InputRegister(0));
1238 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1239 break;
1240#endif
1241 case kPPC_Popcnt32:
1242 __ popcntw(i.OutputRegister(), i.InputRegister(0));
1243 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1244 break;
1245#if V8_TARGET_ARCH_PPC64
1246 case kPPC_Popcnt64:
1247 __ popcntd(i.OutputRegister(), i.InputRegister(0));
1248 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1249 break;
1250#endif
1251 case kPPC_Cmp32:
1252 ASSEMBLE_COMPARE(cmpw, cmplw);
1253 break;
1254#if V8_TARGET_ARCH_PPC64
1255 case kPPC_Cmp64:
1256 ASSEMBLE_COMPARE(cmp, cmpl);
1257 break;
1258#endif
1259 case kPPC_CmpDouble:
1260 ASSEMBLE_FLOAT_COMPARE(fcmpu);
1261 break;
1262 case kPPC_Tst32:
1263 if (HasRegisterInput(instr, 1)) {
1264 __ and_(r0, i.InputRegister(0), i.InputRegister(1), i.OutputRCBit());
1265 } else {
1266 __ andi(r0, i.InputRegister(0), i.InputImmediate(1));
1267 }
1268#if V8_TARGET_ARCH_PPC64
1269 __ extsw(r0, r0, i.OutputRCBit());
1270#endif
1271 DCHECK_EQ(SetRC, i.OutputRCBit());
1272 break;
1273#if V8_TARGET_ARCH_PPC64
1274 case kPPC_Tst64:
1275 if (HasRegisterInput(instr, 1)) {
1276 __ and_(r0, i.InputRegister(0), i.InputRegister(1), i.OutputRCBit());
1277 } else {
1278 __ andi(r0, i.InputRegister(0), i.InputImmediate(1));
1279 }
1280 DCHECK_EQ(SetRC, i.OutputRCBit());
1281 break;
1282#endif
1283 case kPPC_Push:
1284 if (instr->InputAt(0)->IsDoubleRegister()) {
1285 __ stfdu(i.InputDoubleRegister(0), MemOperand(sp, -kDoubleSize));
1286 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize);
1287 } else {
1288 __ Push(i.InputRegister(0));
1289 frame_access_state()->IncreaseSPDelta(1);
1290 }
1291 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1292 break;
1293 case kPPC_PushFrame: {
1294 int num_slots = i.InputInt32(1);
1295 if (instr->InputAt(0)->IsDoubleRegister()) {
1296 __ stfdu(i.InputDoubleRegister(0),
1297 MemOperand(sp, -num_slots * kPointerSize));
1298 } else {
1299 __ StorePU(i.InputRegister(0),
1300 MemOperand(sp, -num_slots * kPointerSize));
1301 }
1302 break;
1303 }
1304 case kPPC_StoreToStackSlot: {
1305 int slot = i.InputInt32(1);
1306 if (instr->InputAt(0)->IsDoubleRegister()) {
1307 __ stfd(i.InputDoubleRegister(0), MemOperand(sp, slot * kPointerSize));
1308 } else {
1309 __ StoreP(i.InputRegister(0), MemOperand(sp, slot * kPointerSize));
1310 }
1311 break;
1312 }
1313 case kPPC_ExtendSignWord8:
1314 __ extsb(i.OutputRegister(), i.InputRegister(0));
1315 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1316 break;
1317 case kPPC_ExtendSignWord16:
1318 __ extsh(i.OutputRegister(), i.InputRegister(0));
1319 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1320 break;
1321#if V8_TARGET_ARCH_PPC64
1322 case kPPC_ExtendSignWord32:
1323 __ extsw(i.OutputRegister(), i.InputRegister(0));
1324 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1325 break;
1326 case kPPC_Uint32ToUint64:
1327 // Zero extend
1328 __ clrldi(i.OutputRegister(), i.InputRegister(0), Operand(32));
1329 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1330 break;
1331 case kPPC_Int64ToInt32:
1332 __ extsw(i.OutputRegister(), i.InputRegister(0));
1333 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1334 break;
1335 case kPPC_Int64ToFloat32:
1336 __ ConvertInt64ToFloat(i.InputRegister(0), i.OutputDoubleRegister());
1337 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1338 break;
1339 case kPPC_Int64ToDouble:
1340 __ ConvertInt64ToDouble(i.InputRegister(0), i.OutputDoubleRegister());
1341 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1342 break;
1343 case kPPC_Uint64ToFloat32:
1344 __ ConvertUnsignedInt64ToFloat(i.InputRegister(0),
1345 i.OutputDoubleRegister());
1346 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1347 break;
1348 case kPPC_Uint64ToDouble:
1349 __ ConvertUnsignedInt64ToDouble(i.InputRegister(0),
1350 i.OutputDoubleRegister());
1351 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1352 break;
1353#endif
Ben Murdoch097c5b22016-05-18 11:27:45 +01001354 case kPPC_Int32ToFloat32:
1355 __ ConvertIntToFloat(i.InputRegister(0), i.OutputDoubleRegister());
1356 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1357 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001358 case kPPC_Int32ToDouble:
1359 __ ConvertIntToDouble(i.InputRegister(0), i.OutputDoubleRegister());
1360 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1361 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001362 case kPPC_Uint32ToFloat32:
1363 __ ConvertUnsignedIntToFloat(i.InputRegister(0),
1364 i.OutputDoubleRegister());
1365 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1366 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001367 case kPPC_Uint32ToDouble:
1368 __ ConvertUnsignedIntToDouble(i.InputRegister(0),
1369 i.OutputDoubleRegister());
1370 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1371 break;
1372 case kPPC_DoubleToInt32:
1373 case kPPC_DoubleToUint32:
1374 case kPPC_DoubleToInt64: {
1375#if V8_TARGET_ARCH_PPC64
1376 bool check_conversion =
1377 (opcode == kPPC_DoubleToInt64 && i.OutputCount() > 1);
1378 if (check_conversion) {
1379 __ mtfsb0(VXCVI); // clear FPSCR:VXCVI bit
1380 }
1381#endif
1382 __ ConvertDoubleToInt64(i.InputDoubleRegister(0),
1383#if !V8_TARGET_ARCH_PPC64
1384 kScratchReg,
1385#endif
1386 i.OutputRegister(0), kScratchDoubleReg);
1387#if V8_TARGET_ARCH_PPC64
1388 if (check_conversion) {
1389 // Set 2nd output to zero if conversion fails.
1390 CRegister cr = cr7;
1391 int crbit = v8::internal::Assembler::encode_crbit(
1392 cr, static_cast<CRBit>(VXCVI % CRWIDTH));
1393 __ mcrfs(cr, VXCVI); // extract FPSCR field containing VXCVI into cr7
1394 if (CpuFeatures::IsSupported(ISELECT)) {
1395 __ li(i.OutputRegister(1), Operand(1));
1396 __ isel(i.OutputRegister(1), r0, i.OutputRegister(1), crbit);
1397 } else {
1398 __ li(i.OutputRegister(1), Operand::Zero());
1399 __ bc(v8::internal::Assembler::kInstrSize * 2, BT, crbit);
1400 __ li(i.OutputRegister(1), Operand(1));
1401 }
1402 }
1403#endif
1404 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1405 break;
1406 }
1407#if V8_TARGET_ARCH_PPC64
1408 case kPPC_DoubleToUint64: {
1409 bool check_conversion = (i.OutputCount() > 1);
1410 if (check_conversion) {
1411 __ mtfsb0(VXCVI); // clear FPSCR:VXCVI bit
1412 }
1413 __ ConvertDoubleToUnsignedInt64(i.InputDoubleRegister(0),
1414 i.OutputRegister(0), kScratchDoubleReg);
1415 if (check_conversion) {
1416 // Set 2nd output to zero if conversion fails.
1417 CRegister cr = cr7;
1418 int crbit = v8::internal::Assembler::encode_crbit(
1419 cr, static_cast<CRBit>(VXCVI % CRWIDTH));
1420 __ mcrfs(cr, VXCVI); // extract FPSCR field containing VXCVI into cr7
1421 if (CpuFeatures::IsSupported(ISELECT)) {
1422 __ li(i.OutputRegister(1), Operand(1));
1423 __ isel(i.OutputRegister(1), r0, i.OutputRegister(1), crbit);
1424 } else {
1425 __ li(i.OutputRegister(1), Operand::Zero());
1426 __ bc(v8::internal::Assembler::kInstrSize * 2, BT, crbit);
1427 __ li(i.OutputRegister(1), Operand(1));
1428 }
1429 }
1430 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1431 break;
1432 }
1433#endif
1434 case kPPC_DoubleToFloat32:
Ben Murdochda12d292016-06-02 14:46:10 +01001435 ASSEMBLE_FLOAT_UNOP_RC(frsp, 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001436 break;
1437 case kPPC_Float32ToDouble:
1438 // Nothing to do.
1439 __ Move(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1440 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1441 break;
1442 case kPPC_DoubleExtractLowWord32:
1443 __ MovDoubleLowToInt(i.OutputRegister(), i.InputDoubleRegister(0));
1444 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1445 break;
1446 case kPPC_DoubleExtractHighWord32:
1447 __ MovDoubleHighToInt(i.OutputRegister(), i.InputDoubleRegister(0));
1448 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1449 break;
1450 case kPPC_DoubleInsertLowWord32:
1451 __ InsertDoubleLow(i.OutputDoubleRegister(), i.InputRegister(1), r0);
1452 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1453 break;
1454 case kPPC_DoubleInsertHighWord32:
1455 __ InsertDoubleHigh(i.OutputDoubleRegister(), i.InputRegister(1), r0);
1456 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1457 break;
1458 case kPPC_DoubleConstruct:
1459#if V8_TARGET_ARCH_PPC64
1460 __ MovInt64ComponentsToDouble(i.OutputDoubleRegister(),
1461 i.InputRegister(0), i.InputRegister(1), r0);
1462#else
1463 __ MovInt64ToDouble(i.OutputDoubleRegister(), i.InputRegister(0),
1464 i.InputRegister(1));
1465#endif
1466 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1467 break;
1468 case kPPC_BitcastFloat32ToInt32:
1469 __ MovFloatToInt(i.OutputRegister(), i.InputDoubleRegister(0));
1470 break;
1471 case kPPC_BitcastInt32ToFloat32:
1472 __ MovIntToFloat(i.OutputDoubleRegister(), i.InputRegister(0));
1473 break;
1474#if V8_TARGET_ARCH_PPC64
1475 case kPPC_BitcastDoubleToInt64:
1476 __ MovDoubleToInt64(i.OutputRegister(), i.InputDoubleRegister(0));
1477 break;
1478 case kPPC_BitcastInt64ToDouble:
1479 __ MovInt64ToDouble(i.OutputDoubleRegister(), i.InputRegister(0));
1480 break;
1481#endif
1482 case kPPC_LoadWordU8:
1483 ASSEMBLE_LOAD_INTEGER(lbz, lbzx);
1484 break;
1485 case kPPC_LoadWordS8:
1486 ASSEMBLE_LOAD_INTEGER(lbz, lbzx);
1487 __ extsb(i.OutputRegister(), i.OutputRegister());
1488 break;
1489 case kPPC_LoadWordU16:
1490 ASSEMBLE_LOAD_INTEGER(lhz, lhzx);
1491 break;
1492 case kPPC_LoadWordS16:
1493 ASSEMBLE_LOAD_INTEGER(lha, lhax);
1494 break;
1495 case kPPC_LoadWordS32:
1496 ASSEMBLE_LOAD_INTEGER(lwa, lwax);
1497 break;
1498#if V8_TARGET_ARCH_PPC64
1499 case kPPC_LoadWord64:
1500 ASSEMBLE_LOAD_INTEGER(ld, ldx);
1501 break;
1502#endif
1503 case kPPC_LoadFloat32:
1504 ASSEMBLE_LOAD_FLOAT(lfs, lfsx);
1505 break;
1506 case kPPC_LoadDouble:
1507 ASSEMBLE_LOAD_FLOAT(lfd, lfdx);
1508 break;
1509 case kPPC_StoreWord8:
1510 ASSEMBLE_STORE_INTEGER(stb, stbx);
1511 break;
1512 case kPPC_StoreWord16:
1513 ASSEMBLE_STORE_INTEGER(sth, sthx);
1514 break;
1515 case kPPC_StoreWord32:
1516 ASSEMBLE_STORE_INTEGER(stw, stwx);
1517 break;
1518#if V8_TARGET_ARCH_PPC64
1519 case kPPC_StoreWord64:
1520 ASSEMBLE_STORE_INTEGER(std, stdx);
1521 break;
1522#endif
1523 case kPPC_StoreFloat32:
1524 ASSEMBLE_STORE_FLOAT32();
1525 break;
1526 case kPPC_StoreDouble:
1527 ASSEMBLE_STORE_DOUBLE();
1528 break;
1529 case kCheckedLoadInt8:
1530 ASSEMBLE_CHECKED_LOAD_INTEGER(lbz, lbzx);
1531 __ extsb(i.OutputRegister(), i.OutputRegister());
1532 break;
1533 case kCheckedLoadUint8:
1534 ASSEMBLE_CHECKED_LOAD_INTEGER(lbz, lbzx);
1535 break;
1536 case kCheckedLoadInt16:
1537 ASSEMBLE_CHECKED_LOAD_INTEGER(lha, lhax);
1538 break;
1539 case kCheckedLoadUint16:
1540 ASSEMBLE_CHECKED_LOAD_INTEGER(lhz, lhzx);
1541 break;
1542 case kCheckedLoadWord32:
1543 ASSEMBLE_CHECKED_LOAD_INTEGER(lwa, lwax);
1544 break;
1545 case kCheckedLoadWord64:
1546#if V8_TARGET_ARCH_PPC64
1547 ASSEMBLE_CHECKED_LOAD_INTEGER(ld, ldx);
1548#else
1549 UNREACHABLE();
1550#endif
1551 break;
1552 case kCheckedLoadFloat32:
1553 ASSEMBLE_CHECKED_LOAD_FLOAT(lfs, lfsx, 32);
1554 break;
1555 case kCheckedLoadFloat64:
1556 ASSEMBLE_CHECKED_LOAD_FLOAT(lfd, lfdx, 64);
1557 break;
1558 case kCheckedStoreWord8:
1559 ASSEMBLE_CHECKED_STORE_INTEGER(stb, stbx);
1560 break;
1561 case kCheckedStoreWord16:
1562 ASSEMBLE_CHECKED_STORE_INTEGER(sth, sthx);
1563 break;
1564 case kCheckedStoreWord32:
1565 ASSEMBLE_CHECKED_STORE_INTEGER(stw, stwx);
1566 break;
1567 case kCheckedStoreWord64:
1568#if V8_TARGET_ARCH_PPC64
1569 ASSEMBLE_CHECKED_STORE_INTEGER(std, stdx);
1570#else
1571 UNREACHABLE();
1572#endif
1573 break;
1574 case kCheckedStoreFloat32:
1575 ASSEMBLE_CHECKED_STORE_FLOAT32();
1576 break;
1577 case kCheckedStoreFloat64:
1578 ASSEMBLE_CHECKED_STORE_DOUBLE();
1579 break;
1580 default:
1581 UNREACHABLE();
1582 break;
1583 }
1584} // NOLINT(readability/fn_size)
1585
1586
1587// Assembles branches after an instruction.
1588void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
1589 PPCOperandConverter i(this, instr);
1590 Label* tlabel = branch->true_label;
1591 Label* flabel = branch->false_label;
1592 ArchOpcode op = instr->arch_opcode();
1593 FlagsCondition condition = branch->condition;
1594 CRegister cr = cr0;
1595
1596 Condition cond = FlagsConditionToCondition(condition, op);
1597 if (op == kPPC_CmpDouble) {
1598 // check for unordered if necessary
1599 if (cond == le) {
1600 __ bunordered(flabel, cr);
1601 // Unnecessary for eq/lt since only FU bit will be set.
1602 } else if (cond == gt) {
1603 __ bunordered(tlabel, cr);
1604 // Unnecessary for ne/ge since only FU bit will be set.
1605 }
1606 }
1607 __ b(cond, tlabel, cr);
1608 if (!branch->fallthru) __ b(flabel); // no fallthru to flabel.
1609}
1610
1611
1612void CodeGenerator::AssembleArchJump(RpoNumber target) {
1613 if (!IsNextInAssemblyOrder(target)) __ b(GetLabel(target));
1614}
1615
1616
1617// Assembles boolean materializations after an instruction.
1618void CodeGenerator::AssembleArchBoolean(Instruction* instr,
1619 FlagsCondition condition) {
1620 PPCOperandConverter i(this, instr);
1621 Label done;
1622 ArchOpcode op = instr->arch_opcode();
1623 CRegister cr = cr0;
1624 int reg_value = -1;
1625
1626 // Materialize a full 32-bit 1 or 0 value. The result register is always the
1627 // last output of the instruction.
1628 DCHECK_NE(0u, instr->OutputCount());
1629 Register reg = i.OutputRegister(instr->OutputCount() - 1);
1630
1631 Condition cond = FlagsConditionToCondition(condition, op);
1632 if (op == kPPC_CmpDouble) {
1633 // check for unordered if necessary
1634 if (cond == le) {
1635 reg_value = 0;
1636 __ li(reg, Operand::Zero());
1637 __ bunordered(&done, cr);
1638 } else if (cond == gt) {
1639 reg_value = 1;
1640 __ li(reg, Operand(1));
1641 __ bunordered(&done, cr);
1642 }
1643 // Unnecessary for eq/lt & ne/ge since only FU bit will be set.
1644 }
1645
1646 if (CpuFeatures::IsSupported(ISELECT)) {
1647 switch (cond) {
1648 case eq:
1649 case lt:
1650 case gt:
1651 if (reg_value != 1) __ li(reg, Operand(1));
1652 __ li(kScratchReg, Operand::Zero());
1653 __ isel(cond, reg, reg, kScratchReg, cr);
1654 break;
1655 case ne:
1656 case ge:
1657 case le:
1658 if (reg_value != 1) __ li(reg, Operand(1));
1659 // r0 implies logical zero in this form
1660 __ isel(NegateCondition(cond), reg, r0, reg, cr);
1661 break;
1662 default:
1663 UNREACHABLE();
1664 break;
1665 }
1666 } else {
1667 if (reg_value != 0) __ li(reg, Operand::Zero());
1668 __ b(NegateCondition(cond), &done, cr);
1669 __ li(reg, Operand(1));
1670 }
1671 __ bind(&done);
1672}
1673
1674
1675void CodeGenerator::AssembleArchLookupSwitch(Instruction* instr) {
1676 PPCOperandConverter i(this, instr);
1677 Register input = i.InputRegister(0);
1678 for (size_t index = 2; index < instr->InputCount(); index += 2) {
1679 __ Cmpi(input, Operand(i.InputInt32(index + 0)), r0);
1680 __ beq(GetLabel(i.InputRpo(index + 1)));
1681 }
1682 AssembleArchJump(i.InputRpo(1));
1683}
1684
1685
1686void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
1687 PPCOperandConverter i(this, instr);
1688 Register input = i.InputRegister(0);
1689 int32_t const case_count = static_cast<int32_t>(instr->InputCount() - 2);
1690 Label** cases = zone()->NewArray<Label*>(case_count);
1691 for (int32_t index = 0; index < case_count; ++index) {
1692 cases[index] = GetLabel(i.InputRpo(index + 2));
1693 }
1694 Label* const table = AddJumpTable(cases, case_count);
1695 __ Cmpli(input, Operand(case_count), r0);
1696 __ bge(GetLabel(i.InputRpo(1)));
1697 __ mov_label_addr(kScratchReg, table);
1698 __ ShiftLeftImm(r0, input, Operand(kPointerSizeLog2));
1699 __ LoadPX(kScratchReg, MemOperand(kScratchReg, r0));
1700 __ Jump(kScratchReg);
1701}
1702
1703
1704void CodeGenerator::AssembleDeoptimizerCall(
1705 int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
1706 Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
1707 isolate(), deoptimization_id, bailout_type);
Ben Murdochda12d292016-06-02 14:46:10 +01001708 // TODO(turbofan): We should be able to generate better code by sharing the
1709 // actual final call site and just bl'ing to it here, similar to what we do
1710 // in the lithium backend.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001711 __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
1712}
1713
1714
1715void CodeGenerator::AssemblePrologue() {
1716 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
Ben Murdochda12d292016-06-02 14:46:10 +01001717 if (frame_access_state()->has_frame()) {
1718 if (descriptor->IsCFunctionCall()) {
1719 __ function_descriptor();
1720 __ mflr(r0);
1721 if (FLAG_enable_embedded_constant_pool) {
1722 __ Push(r0, fp, kConstantPoolRegister);
1723 // Adjust FP to point to saved FP.
1724 __ subi(fp, sp, Operand(StandardFrameConstants::kConstantPoolOffset));
1725 } else {
1726 __ Push(r0, fp);
1727 __ mr(fp, sp);
1728 }
1729 } else if (descriptor->IsJSFunctionCall()) {
1730 __ Prologue(this->info()->GeneratePreagedPrologue(), ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001731 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01001732 StackFrame::Type type = info()->GetOutputStackFrameType();
1733 // TODO(mbrandy): Detect cases where ip is the entrypoint (for
1734 // efficient intialization of the constant pool pointer register).
1735 __ StubPrologue(type);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001736 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001737 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001738
1739 int stack_shrink_slots = frame()->GetSpillSlotCount();
1740 if (info()->is_osr()) {
1741 // TurboFan OSR-compiled functions cannot be entered directly.
1742 __ Abort(kShouldNotDirectlyEnterOsrFunction);
1743
1744 // Unoptimized code jumps directly to this entrypoint while the unoptimized
1745 // frame is still on the stack. Optimized code uses OSR values directly from
1746 // the unoptimized frame. Thus, all that needs to be done is to allocate the
1747 // remaining stack slots.
1748 if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --");
1749 osr_pc_offset_ = __ pc_offset();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001750 stack_shrink_slots -= OsrHelper(info()).UnoptimizedFrameSlots();
1751 }
1752
1753 const RegList double_saves = descriptor->CalleeSavedFPRegisters();
1754 if (double_saves != 0) {
1755 stack_shrink_slots += frame()->AlignSavedCalleeRegisterSlots();
1756 }
1757 if (stack_shrink_slots > 0) {
1758 __ Add(sp, sp, -stack_shrink_slots * kPointerSize, r0);
1759 }
1760
1761 // Save callee-saved Double registers.
1762 if (double_saves != 0) {
1763 __ MultiPushDoubles(double_saves);
1764 DCHECK(kNumCalleeSavedDoubles ==
1765 base::bits::CountPopulation32(double_saves));
1766 frame()->AllocateSavedCalleeRegisterSlots(kNumCalleeSavedDoubles *
1767 (kDoubleSize / kPointerSize));
1768 }
1769
1770 // Save callee-saved registers.
1771 const RegList saves =
1772 FLAG_enable_embedded_constant_pool
1773 ? descriptor->CalleeSavedRegisters() & ~kConstantPoolRegister.bit()
1774 : descriptor->CalleeSavedRegisters();
1775 if (saves != 0) {
1776 __ MultiPush(saves);
1777 // register save area does not include the fp or constant pool pointer.
1778 const int num_saves =
1779 kNumCalleeSaved - 1 - (FLAG_enable_embedded_constant_pool ? 1 : 0);
1780 DCHECK(num_saves == base::bits::CountPopulation32(saves));
1781 frame()->AllocateSavedCalleeRegisterSlots(num_saves);
1782 }
1783}
1784
1785
1786void CodeGenerator::AssembleReturn() {
1787 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
1788 int pop_count = static_cast<int>(descriptor->StackParameterCount());
1789
1790 // Restore registers.
1791 const RegList saves =
1792 FLAG_enable_embedded_constant_pool
1793 ? descriptor->CalleeSavedRegisters() & ~kConstantPoolRegister.bit()
1794 : descriptor->CalleeSavedRegisters();
1795 if (saves != 0) {
1796 __ MultiPop(saves);
1797 }
1798
1799 // Restore double registers.
1800 const RegList double_saves = descriptor->CalleeSavedFPRegisters();
1801 if (double_saves != 0) {
1802 __ MultiPopDoubles(double_saves);
1803 }
1804
1805 if (descriptor->IsCFunctionCall()) {
Ben Murdochda12d292016-06-02 14:46:10 +01001806 AssembleDeconstructFrame();
1807 } else if (frame_access_state()->has_frame()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001808 // Canonicalize JSFunction return sites for now.
1809 if (return_label_.is_bound()) {
1810 __ b(&return_label_);
1811 return;
1812 } else {
1813 __ bind(&return_label_);
Ben Murdochda12d292016-06-02 14:46:10 +01001814 AssembleDeconstructFrame();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001815 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001816 }
Ben Murdochda12d292016-06-02 14:46:10 +01001817 __ Ret(pop_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001818}
1819
1820
1821void CodeGenerator::AssembleMove(InstructionOperand* source,
1822 InstructionOperand* destination) {
1823 PPCOperandConverter g(this, nullptr);
1824 // Dispatch on the source and destination operand kinds. Not all
1825 // combinations are possible.
1826 if (source->IsRegister()) {
1827 DCHECK(destination->IsRegister() || destination->IsStackSlot());
1828 Register src = g.ToRegister(source);
1829 if (destination->IsRegister()) {
1830 __ Move(g.ToRegister(destination), src);
1831 } else {
1832 __ StoreP(src, g.ToMemOperand(destination), r0);
1833 }
1834 } else if (source->IsStackSlot()) {
1835 DCHECK(destination->IsRegister() || destination->IsStackSlot());
1836 MemOperand src = g.ToMemOperand(source);
1837 if (destination->IsRegister()) {
1838 __ LoadP(g.ToRegister(destination), src, r0);
1839 } else {
1840 Register temp = kScratchReg;
1841 __ LoadP(temp, src, r0);
1842 __ StoreP(temp, g.ToMemOperand(destination), r0);
1843 }
1844 } else if (source->IsConstant()) {
1845 Constant src = g.ToConstant(source);
1846 if (destination->IsRegister() || destination->IsStackSlot()) {
1847 Register dst =
1848 destination->IsRegister() ? g.ToRegister(destination) : kScratchReg;
1849 switch (src.type()) {
1850 case Constant::kInt32:
1851 __ mov(dst, Operand(src.ToInt32()));
1852 break;
1853 case Constant::kInt64:
1854 __ mov(dst, Operand(src.ToInt64()));
1855 break;
1856 case Constant::kFloat32:
1857 __ Move(dst,
1858 isolate()->factory()->NewNumber(src.ToFloat32(), TENURED));
1859 break;
1860 case Constant::kFloat64:
1861 __ Move(dst,
1862 isolate()->factory()->NewNumber(src.ToFloat64(), TENURED));
1863 break;
1864 case Constant::kExternalReference:
1865 __ mov(dst, Operand(src.ToExternalReference()));
1866 break;
1867 case Constant::kHeapObject: {
1868 Handle<HeapObject> src_object = src.ToHeapObject();
1869 Heap::RootListIndex index;
Ben Murdochda12d292016-06-02 14:46:10 +01001870 int slot;
1871 if (IsMaterializableFromFrame(src_object, &slot)) {
1872 __ LoadP(dst, g.SlotToMemOperand(slot));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001873 } else if (IsMaterializableFromRoot(src_object, &index)) {
1874 __ LoadRoot(dst, index);
1875 } else {
1876 __ Move(dst, src_object);
1877 }
1878 break;
1879 }
1880 case Constant::kRpoNumber:
1881 UNREACHABLE(); // TODO(dcarney): loading RPO constants on PPC.
1882 break;
1883 }
1884 if (destination->IsStackSlot()) {
1885 __ StoreP(dst, g.ToMemOperand(destination), r0);
1886 }
1887 } else {
1888 DoubleRegister dst = destination->IsDoubleRegister()
1889 ? g.ToDoubleRegister(destination)
1890 : kScratchDoubleReg;
1891 double value = (src.type() == Constant::kFloat32) ? src.ToFloat32()
1892 : src.ToFloat64();
1893 __ LoadDoubleLiteral(dst, value, kScratchReg);
1894 if (destination->IsDoubleStackSlot()) {
1895 __ StoreDouble(dst, g.ToMemOperand(destination), r0);
1896 }
1897 }
1898 } else if (source->IsDoubleRegister()) {
1899 DoubleRegister src = g.ToDoubleRegister(source);
1900 if (destination->IsDoubleRegister()) {
1901 DoubleRegister dst = g.ToDoubleRegister(destination);
1902 __ Move(dst, src);
1903 } else {
1904 DCHECK(destination->IsDoubleStackSlot());
1905 __ StoreDouble(src, g.ToMemOperand(destination), r0);
1906 }
1907 } else if (source->IsDoubleStackSlot()) {
1908 DCHECK(destination->IsDoubleRegister() || destination->IsDoubleStackSlot());
1909 MemOperand src = g.ToMemOperand(source);
1910 if (destination->IsDoubleRegister()) {
1911 __ LoadDouble(g.ToDoubleRegister(destination), src, r0);
1912 } else {
1913 DoubleRegister temp = kScratchDoubleReg;
1914 __ LoadDouble(temp, src, r0);
1915 __ StoreDouble(temp, g.ToMemOperand(destination), r0);
1916 }
1917 } else {
1918 UNREACHABLE();
1919 }
1920}
1921
1922
1923void CodeGenerator::AssembleSwap(InstructionOperand* source,
1924 InstructionOperand* destination) {
1925 PPCOperandConverter g(this, nullptr);
1926 // Dispatch on the source and destination operand kinds. Not all
1927 // combinations are possible.
1928 if (source->IsRegister()) {
1929 // Register-register.
1930 Register temp = kScratchReg;
1931 Register src = g.ToRegister(source);
1932 if (destination->IsRegister()) {
1933 Register dst = g.ToRegister(destination);
1934 __ mr(temp, src);
1935 __ mr(src, dst);
1936 __ mr(dst, temp);
1937 } else {
1938 DCHECK(destination->IsStackSlot());
1939 MemOperand dst = g.ToMemOperand(destination);
1940 __ mr(temp, src);
1941 __ LoadP(src, dst);
1942 __ StoreP(temp, dst);
1943 }
1944#if V8_TARGET_ARCH_PPC64
1945 } else if (source->IsStackSlot() || source->IsDoubleStackSlot()) {
1946#else
1947 } else if (source->IsStackSlot()) {
1948 DCHECK(destination->IsStackSlot());
1949#endif
1950 Register temp_0 = kScratchReg;
1951 Register temp_1 = r0;
1952 MemOperand src = g.ToMemOperand(source);
1953 MemOperand dst = g.ToMemOperand(destination);
1954 __ LoadP(temp_0, src);
1955 __ LoadP(temp_1, dst);
1956 __ StoreP(temp_0, dst);
1957 __ StoreP(temp_1, src);
1958 } else if (source->IsDoubleRegister()) {
1959 DoubleRegister temp = kScratchDoubleReg;
1960 DoubleRegister src = g.ToDoubleRegister(source);
1961 if (destination->IsDoubleRegister()) {
1962 DoubleRegister dst = g.ToDoubleRegister(destination);
1963 __ fmr(temp, src);
1964 __ fmr(src, dst);
1965 __ fmr(dst, temp);
1966 } else {
1967 DCHECK(destination->IsDoubleStackSlot());
1968 MemOperand dst = g.ToMemOperand(destination);
1969 __ fmr(temp, src);
1970 __ lfd(src, dst);
1971 __ stfd(temp, dst);
1972 }
1973#if !V8_TARGET_ARCH_PPC64
1974 } else if (source->IsDoubleStackSlot()) {
1975 DCHECK(destination->IsDoubleStackSlot());
1976 DoubleRegister temp_0 = kScratchDoubleReg;
1977 DoubleRegister temp_1 = d0;
1978 MemOperand src = g.ToMemOperand(source);
1979 MemOperand dst = g.ToMemOperand(destination);
1980 __ lfd(temp_0, src);
1981 __ lfd(temp_1, dst);
1982 __ stfd(temp_0, dst);
1983 __ stfd(temp_1, src);
1984#endif
1985 } else {
1986 // No other combinations are possible.
1987 UNREACHABLE();
1988 }
1989}
1990
1991
1992void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) {
1993 for (size_t index = 0; index < target_count; ++index) {
1994 __ emit_label_addr(targets[index]);
1995 }
1996}
1997
1998
1999void CodeGenerator::AddNopForSmiCodeInlining() {
2000 // We do not insert nops for inlined Smi code.
2001}
2002
2003
2004void CodeGenerator::EnsureSpaceForLazyDeopt() {
2005 if (!info()->ShouldEnsureSpaceForLazyDeopt()) {
2006 return;
2007 }
2008
2009 int space_needed = Deoptimizer::patch_size();
2010 // Ensure that we have enough space after the previous lazy-bailout
2011 // instruction for patching the code here.
2012 int current_pc = masm()->pc_offset();
2013 if (current_pc < last_lazy_deopt_pc_ + space_needed) {
2014 // Block tramoline pool emission for duration of padding.
2015 v8::internal::Assembler::BlockTrampolinePoolScope block_trampoline_pool(
2016 masm());
2017 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
2018 DCHECK_EQ(0, padding_size % v8::internal::Assembler::kInstrSize);
2019 while (padding_size > 0) {
2020 __ nop();
2021 padding_size -= v8::internal::Assembler::kInstrSize;
2022 }
2023 }
2024}
2025
2026#undef __
2027
2028} // namespace compiler
2029} // namespace internal
2030} // namespace v8