blob: 7fc6dd9d077688686cc31807749bd2624116ee28 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/code-generator.h"
6
7#include "src/ast/scopes.h"
8#include "src/compiler/code-generator-impl.h"
9#include "src/compiler/gap-resolver.h"
10#include "src/compiler/node-matchers.h"
11#include "src/compiler/osr.h"
12#include "src/ppc/macro-assembler-ppc.h"
13
14namespace v8 {
15namespace internal {
16namespace compiler {
17
18#define __ masm()->
19
20
21#define kScratchReg r11
22
23
24// Adds PPC-specific methods to convert InstructionOperands.
25class PPCOperandConverter final : public InstructionOperandConverter {
26 public:
27 PPCOperandConverter(CodeGenerator* gen, Instruction* instr)
28 : InstructionOperandConverter(gen, instr) {}
29
30 size_t OutputCount() { return instr_->OutputCount(); }
31
32 RCBit OutputRCBit() const {
33 switch (instr_->flags_mode()) {
34 case kFlags_branch:
35 case kFlags_set:
36 return SetRC;
37 case kFlags_none:
38 return LeaveRC;
39 }
40 UNREACHABLE();
41 return LeaveRC;
42 }
43
44 bool CompareLogical() const {
45 switch (instr_->flags_condition()) {
46 case kUnsignedLessThan:
47 case kUnsignedGreaterThanOrEqual:
48 case kUnsignedLessThanOrEqual:
49 case kUnsignedGreaterThan:
50 return true;
51 default:
52 return false;
53 }
54 UNREACHABLE();
55 return false;
56 }
57
58 Operand InputImmediate(size_t index) {
59 Constant constant = ToConstant(instr_->InputAt(index));
60 switch (constant.type()) {
61 case Constant::kInt32:
62 return Operand(constant.ToInt32());
63 case Constant::kFloat32:
64 return Operand(
65 isolate()->factory()->NewNumber(constant.ToFloat32(), TENURED));
66 case Constant::kFloat64:
67 return Operand(
68 isolate()->factory()->NewNumber(constant.ToFloat64(), TENURED));
69 case Constant::kInt64:
70#if V8_TARGET_ARCH_PPC64
71 return Operand(constant.ToInt64());
72#endif
73 case Constant::kExternalReference:
74 case Constant::kHeapObject:
75 case Constant::kRpoNumber:
76 break;
77 }
78 UNREACHABLE();
79 return Operand::Zero();
80 }
81
82 MemOperand MemoryOperand(AddressingMode* mode, size_t* first_index) {
83 const size_t index = *first_index;
84 *mode = AddressingModeField::decode(instr_->opcode());
85 switch (*mode) {
86 case kMode_None:
87 break;
88 case kMode_MRI:
89 *first_index += 2;
90 return MemOperand(InputRegister(index + 0), InputInt32(index + 1));
91 case kMode_MRR:
92 *first_index += 2;
93 return MemOperand(InputRegister(index + 0), InputRegister(index + 1));
94 }
95 UNREACHABLE();
96 return MemOperand(r0);
97 }
98
99 MemOperand MemoryOperand(AddressingMode* mode, size_t first_index = 0) {
100 return MemoryOperand(mode, &first_index);
101 }
102
103 MemOperand ToMemOperand(InstructionOperand* op) const {
104 DCHECK_NOT_NULL(op);
105 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
106 FrameOffset offset = frame_access_state()->GetFrameOffset(
107 AllocatedOperand::cast(op)->index());
108 return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset());
109 }
110};
111
112
113static inline bool HasRegisterInput(Instruction* instr, size_t index) {
114 return instr->InputAt(index)->IsRegister();
115}
116
117
118namespace {
119
120class OutOfLineLoadNAN32 final : public OutOfLineCode {
121 public:
122 OutOfLineLoadNAN32(CodeGenerator* gen, DoubleRegister result)
123 : OutOfLineCode(gen), result_(result) {}
124
125 void Generate() final {
126 __ LoadDoubleLiteral(result_, std::numeric_limits<float>::quiet_NaN(),
127 kScratchReg);
128 }
129
130 private:
131 DoubleRegister const result_;
132};
133
134
135class OutOfLineLoadNAN64 final : public OutOfLineCode {
136 public:
137 OutOfLineLoadNAN64(CodeGenerator* gen, DoubleRegister result)
138 : OutOfLineCode(gen), result_(result) {}
139
140 void Generate() final {
141 __ LoadDoubleLiteral(result_, std::numeric_limits<double>::quiet_NaN(),
142 kScratchReg);
143 }
144
145 private:
146 DoubleRegister const result_;
147};
148
149
150class OutOfLineLoadZero final : public OutOfLineCode {
151 public:
152 OutOfLineLoadZero(CodeGenerator* gen, Register result)
153 : OutOfLineCode(gen), result_(result) {}
154
155 void Generate() final { __ li(result_, Operand::Zero()); }
156
157 private:
158 Register const result_;
159};
160
161
162class OutOfLineRecordWrite final : public OutOfLineCode {
163 public:
164 OutOfLineRecordWrite(CodeGenerator* gen, Register object, Register offset,
165 Register value, Register scratch0, Register scratch1,
166 RecordWriteMode mode)
167 : OutOfLineCode(gen),
168 object_(object),
169 offset_(offset),
Ben Murdoch097c5b22016-05-18 11:27:45 +0100170 offset_immediate_(0),
171 value_(value),
172 scratch0_(scratch0),
173 scratch1_(scratch1),
174 mode_(mode) {}
175
176 OutOfLineRecordWrite(CodeGenerator* gen, Register object, int32_t offset,
177 Register value, Register scratch0, Register scratch1,
178 RecordWriteMode mode)
179 : OutOfLineCode(gen),
180 object_(object),
181 offset_(no_reg),
182 offset_immediate_(offset),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000183 value_(value),
184 scratch0_(scratch0),
185 scratch1_(scratch1),
186 mode_(mode) {}
187
188 void Generate() final {
189 if (mode_ > RecordWriteMode::kValueIsPointer) {
190 __ JumpIfSmi(value_, exit());
191 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100192 __ CheckPageFlag(value_, scratch0_,
193 MemoryChunk::kPointersToHereAreInterestingMask, eq,
194 exit());
195 RememberedSetAction const remembered_set_action =
196 mode_ > RecordWriteMode::kValueIsMap ? EMIT_REMEMBERED_SET
197 : OMIT_REMEMBERED_SET;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000198 SaveFPRegsMode const save_fp_mode =
199 frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100200 if (!frame()->needs_frame()) {
201 // We need to save and restore lr if the frame was elided.
202 __ mflr(scratch1_);
203 __ Push(scratch1_);
204 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000205 RecordWriteStub stub(isolate(), object_, scratch0_, scratch1_,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100206 remembered_set_action, save_fp_mode);
207 if (offset_.is(no_reg)) {
208 __ addi(scratch1_, object_, Operand(offset_immediate_));
209 } else {
210 DCHECK_EQ(0, offset_immediate_);
211 __ add(scratch1_, object_, offset_);
212 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000213 __ CallStub(&stub);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100214 if (!frame()->needs_frame()) {
215 // We need to save and restore lr if the frame was elided.
216 __ Pop(scratch1_);
217 __ mtlr(scratch1_);
218 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000219 }
220
221 private:
222 Register const object_;
223 Register const offset_;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100224 int32_t const offset_immediate_; // Valid if offset_.is(no_reg).
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000225 Register const value_;
226 Register const scratch0_;
227 Register const scratch1_;
228 RecordWriteMode const mode_;
229};
230
231
232Condition FlagsConditionToCondition(FlagsCondition condition, ArchOpcode op) {
233 switch (condition) {
234 case kEqual:
235 return eq;
236 case kNotEqual:
237 return ne;
238 case kSignedLessThan:
239 case kUnsignedLessThan:
240 return lt;
241 case kSignedGreaterThanOrEqual:
242 case kUnsignedGreaterThanOrEqual:
243 return ge;
244 case kSignedLessThanOrEqual:
245 case kUnsignedLessThanOrEqual:
246 return le;
247 case kSignedGreaterThan:
248 case kUnsignedGreaterThan:
249 return gt;
250 case kOverflow:
251 // Overflow checked for add/sub only.
252 switch (op) {
253#if V8_TARGET_ARCH_PPC64
254 case kPPC_Add:
255 case kPPC_Sub:
256 return lt;
257#endif
258 case kPPC_AddWithOverflow32:
259 case kPPC_SubWithOverflow32:
260#if V8_TARGET_ARCH_PPC64
261 return ne;
262#else
263 return lt;
264#endif
265 default:
266 break;
267 }
268 break;
269 case kNotOverflow:
270 switch (op) {
271#if V8_TARGET_ARCH_PPC64
272 case kPPC_Add:
273 case kPPC_Sub:
274 return ge;
275#endif
276 case kPPC_AddWithOverflow32:
277 case kPPC_SubWithOverflow32:
278#if V8_TARGET_ARCH_PPC64
279 return eq;
280#else
281 return ge;
282#endif
283 default:
284 break;
285 }
286 break;
287 default:
288 break;
289 }
290 UNREACHABLE();
291 return kNoCondition;
292}
293
294} // namespace
295
296#define ASSEMBLE_FLOAT_UNOP_RC(asm_instr) \
297 do { \
298 __ asm_instr(i.OutputDoubleRegister(), i.InputDoubleRegister(0), \
299 i.OutputRCBit()); \
300 } while (0)
301
302
303#define ASSEMBLE_FLOAT_BINOP_RC(asm_instr) \
304 do { \
305 __ asm_instr(i.OutputDoubleRegister(), i.InputDoubleRegister(0), \
306 i.InputDoubleRegister(1), i.OutputRCBit()); \
307 } while (0)
308
309
310#define ASSEMBLE_BINOP(asm_instr_reg, asm_instr_imm) \
311 do { \
312 if (HasRegisterInput(instr, 1)) { \
313 __ asm_instr_reg(i.OutputRegister(), i.InputRegister(0), \
314 i.InputRegister(1)); \
315 } else { \
316 __ asm_instr_imm(i.OutputRegister(), i.InputRegister(0), \
317 i.InputImmediate(1)); \
318 } \
319 } while (0)
320
321
322#define ASSEMBLE_BINOP_RC(asm_instr_reg, asm_instr_imm) \
323 do { \
324 if (HasRegisterInput(instr, 1)) { \
325 __ asm_instr_reg(i.OutputRegister(), i.InputRegister(0), \
326 i.InputRegister(1), i.OutputRCBit()); \
327 } else { \
328 __ asm_instr_imm(i.OutputRegister(), i.InputRegister(0), \
329 i.InputImmediate(1), i.OutputRCBit()); \
330 } \
331 } while (0)
332
333
334#define ASSEMBLE_BINOP_INT_RC(asm_instr_reg, asm_instr_imm) \
335 do { \
336 if (HasRegisterInput(instr, 1)) { \
337 __ asm_instr_reg(i.OutputRegister(), i.InputRegister(0), \
338 i.InputRegister(1), i.OutputRCBit()); \
339 } else { \
340 __ asm_instr_imm(i.OutputRegister(), i.InputRegister(0), \
341 i.InputInt32(1), i.OutputRCBit()); \
342 } \
343 } while (0)
344
345
346#define ASSEMBLE_ADD_WITH_OVERFLOW() \
347 do { \
348 if (HasRegisterInput(instr, 1)) { \
349 __ AddAndCheckForOverflow(i.OutputRegister(), i.InputRegister(0), \
350 i.InputRegister(1), kScratchReg, r0); \
351 } else { \
352 __ AddAndCheckForOverflow(i.OutputRegister(), i.InputRegister(0), \
353 i.InputInt32(1), kScratchReg, r0); \
354 } \
355 } while (0)
356
357
358#define ASSEMBLE_SUB_WITH_OVERFLOW() \
359 do { \
360 if (HasRegisterInput(instr, 1)) { \
361 __ SubAndCheckForOverflow(i.OutputRegister(), i.InputRegister(0), \
362 i.InputRegister(1), kScratchReg, r0); \
363 } else { \
364 __ AddAndCheckForOverflow(i.OutputRegister(), i.InputRegister(0), \
365 -i.InputInt32(1), kScratchReg, r0); \
366 } \
367 } while (0)
368
369
370#if V8_TARGET_ARCH_PPC64
371#define ASSEMBLE_ADD_WITH_OVERFLOW32() \
372 do { \
373 ASSEMBLE_BINOP(add, addi); \
374 __ TestIfInt32(i.OutputRegister(), r0, cr0); \
375 } while (0)
376
377
378#define ASSEMBLE_SUB_WITH_OVERFLOW32() \
379 do { \
380 ASSEMBLE_BINOP(sub, subi); \
381 __ TestIfInt32(i.OutputRegister(), r0, cr0); \
382 } while (0)
383#else
384#define ASSEMBLE_ADD_WITH_OVERFLOW32 ASSEMBLE_ADD_WITH_OVERFLOW
385#define ASSEMBLE_SUB_WITH_OVERFLOW32 ASSEMBLE_SUB_WITH_OVERFLOW
386#endif
387
388
389#define ASSEMBLE_COMPARE(cmp_instr, cmpl_instr) \
390 do { \
391 const CRegister cr = cr0; \
392 if (HasRegisterInput(instr, 1)) { \
393 if (i.CompareLogical()) { \
394 __ cmpl_instr(i.InputRegister(0), i.InputRegister(1), cr); \
395 } else { \
396 __ cmp_instr(i.InputRegister(0), i.InputRegister(1), cr); \
397 } \
398 } else { \
399 if (i.CompareLogical()) { \
400 __ cmpl_instr##i(i.InputRegister(0), i.InputImmediate(1), cr); \
401 } else { \
402 __ cmp_instr##i(i.InputRegister(0), i.InputImmediate(1), cr); \
403 } \
404 } \
405 DCHECK_EQ(SetRC, i.OutputRCBit()); \
406 } while (0)
407
408
409#define ASSEMBLE_FLOAT_COMPARE(cmp_instr) \
410 do { \
411 const CRegister cr = cr0; \
412 __ cmp_instr(i.InputDoubleRegister(0), i.InputDoubleRegister(1), cr); \
413 DCHECK_EQ(SetRC, i.OutputRCBit()); \
414 } while (0)
415
416
417#define ASSEMBLE_MODULO(div_instr, mul_instr) \
418 do { \
419 const Register scratch = kScratchReg; \
420 __ div_instr(scratch, i.InputRegister(0), i.InputRegister(1)); \
421 __ mul_instr(scratch, scratch, i.InputRegister(1)); \
422 __ sub(i.OutputRegister(), i.InputRegister(0), scratch, LeaveOE, \
423 i.OutputRCBit()); \
424 } while (0)
425
426
427#define ASSEMBLE_FLOAT_MODULO() \
428 do { \
429 FrameScope scope(masm(), StackFrame::MANUAL); \
430 __ PrepareCallCFunction(0, 2, kScratchReg); \
431 __ MovToFloatParameters(i.InputDoubleRegister(0), \
432 i.InputDoubleRegister(1)); \
433 __ CallCFunction(ExternalReference::mod_two_doubles_operation(isolate()), \
434 0, 2); \
435 __ MovFromFloatResult(i.OutputDoubleRegister()); \
436 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
437 } while (0)
438
439
440#define ASSEMBLE_FLOAT_MAX(scratch_reg) \
441 do { \
442 __ fsub(scratch_reg, i.InputDoubleRegister(0), i.InputDoubleRegister(1)); \
443 __ fsel(i.OutputDoubleRegister(), scratch_reg, i.InputDoubleRegister(0), \
444 i.InputDoubleRegister(1)); \
445 } while (0)
446
447
448#define ASSEMBLE_FLOAT_MIN(scratch_reg) \
449 do { \
450 __ fsub(scratch_reg, i.InputDoubleRegister(0), i.InputDoubleRegister(1)); \
451 __ fsel(i.OutputDoubleRegister(), scratch_reg, i.InputDoubleRegister(1), \
452 i.InputDoubleRegister(0)); \
453 } while (0)
454
455
456#define ASSEMBLE_LOAD_FLOAT(asm_instr, asm_instrx) \
457 do { \
458 DoubleRegister result = i.OutputDoubleRegister(); \
459 AddressingMode mode = kMode_None; \
460 MemOperand operand = i.MemoryOperand(&mode); \
461 if (mode == kMode_MRI) { \
462 __ asm_instr(result, operand); \
463 } else { \
464 __ asm_instrx(result, operand); \
465 } \
466 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
467 } while (0)
468
469
470#define ASSEMBLE_LOAD_INTEGER(asm_instr, asm_instrx) \
471 do { \
472 Register result = i.OutputRegister(); \
473 AddressingMode mode = kMode_None; \
474 MemOperand operand = i.MemoryOperand(&mode); \
475 if (mode == kMode_MRI) { \
476 __ asm_instr(result, operand); \
477 } else { \
478 __ asm_instrx(result, operand); \
479 } \
480 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
481 } while (0)
482
483
484#define ASSEMBLE_STORE_FLOAT32() \
485 do { \
486 size_t index = 0; \
487 AddressingMode mode = kMode_None; \
488 MemOperand operand = i.MemoryOperand(&mode, &index); \
489 DoubleRegister value = i.InputDoubleRegister(index); \
490 __ frsp(kScratchDoubleReg, value); \
491 if (mode == kMode_MRI) { \
492 __ stfs(kScratchDoubleReg, operand); \
493 } else { \
494 __ stfsx(kScratchDoubleReg, operand); \
495 } \
496 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
497 } while (0)
498
499
500#define ASSEMBLE_STORE_DOUBLE() \
501 do { \
502 size_t index = 0; \
503 AddressingMode mode = kMode_None; \
504 MemOperand operand = i.MemoryOperand(&mode, &index); \
505 DoubleRegister value = i.InputDoubleRegister(index); \
506 if (mode == kMode_MRI) { \
507 __ stfd(value, operand); \
508 } else { \
509 __ stfdx(value, operand); \
510 } \
511 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
512 } while (0)
513
514
515#define ASSEMBLE_STORE_INTEGER(asm_instr, asm_instrx) \
516 do { \
517 size_t index = 0; \
518 AddressingMode mode = kMode_None; \
519 MemOperand operand = i.MemoryOperand(&mode, &index); \
520 Register value = i.InputRegister(index); \
521 if (mode == kMode_MRI) { \
522 __ asm_instr(value, operand); \
523 } else { \
524 __ asm_instrx(value, operand); \
525 } \
526 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
527 } while (0)
528
529
530// TODO(mbrandy): fix paths that produce garbage in offset's upper 32-bits.
531#define ASSEMBLE_CHECKED_LOAD_FLOAT(asm_instr, asm_instrx, width) \
532 do { \
533 DoubleRegister result = i.OutputDoubleRegister(); \
534 size_t index = 0; \
535 AddressingMode mode = kMode_None; \
536 MemOperand operand = i.MemoryOperand(&mode, index); \
537 DCHECK_EQ(kMode_MRR, mode); \
538 Register offset = operand.rb(); \
539 __ extsw(offset, offset); \
540 if (HasRegisterInput(instr, 2)) { \
541 __ cmplw(offset, i.InputRegister(2)); \
542 } else { \
543 __ cmplwi(offset, i.InputImmediate(2)); \
544 } \
545 auto ool = new (zone()) OutOfLineLoadNAN##width(this, result); \
546 __ bge(ool->entry()); \
547 if (mode == kMode_MRI) { \
548 __ asm_instr(result, operand); \
549 } else { \
550 __ asm_instrx(result, operand); \
551 } \
552 __ bind(ool->exit()); \
553 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
554 } while (0)
555
556
557// TODO(mbrandy): fix paths that produce garbage in offset's upper 32-bits.
558#define ASSEMBLE_CHECKED_LOAD_INTEGER(asm_instr, asm_instrx) \
559 do { \
560 Register result = i.OutputRegister(); \
561 size_t index = 0; \
562 AddressingMode mode = kMode_None; \
563 MemOperand operand = i.MemoryOperand(&mode, index); \
564 DCHECK_EQ(kMode_MRR, mode); \
565 Register offset = operand.rb(); \
566 __ extsw(offset, offset); \
567 if (HasRegisterInput(instr, 2)) { \
568 __ cmplw(offset, i.InputRegister(2)); \
569 } else { \
570 __ cmplwi(offset, i.InputImmediate(2)); \
571 } \
572 auto ool = new (zone()) OutOfLineLoadZero(this, result); \
573 __ bge(ool->entry()); \
574 if (mode == kMode_MRI) { \
575 __ asm_instr(result, operand); \
576 } else { \
577 __ asm_instrx(result, operand); \
578 } \
579 __ bind(ool->exit()); \
580 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
581 } while (0)
582
583
584// TODO(mbrandy): fix paths that produce garbage in offset's upper 32-bits.
585#define ASSEMBLE_CHECKED_STORE_FLOAT32() \
586 do { \
587 Label done; \
588 size_t index = 0; \
589 AddressingMode mode = kMode_None; \
590 MemOperand operand = i.MemoryOperand(&mode, index); \
591 DCHECK_EQ(kMode_MRR, mode); \
592 Register offset = operand.rb(); \
593 __ extsw(offset, offset); \
594 if (HasRegisterInput(instr, 2)) { \
595 __ cmplw(offset, i.InputRegister(2)); \
596 } else { \
597 __ cmplwi(offset, i.InputImmediate(2)); \
598 } \
599 __ bge(&done); \
600 DoubleRegister value = i.InputDoubleRegister(3); \
601 __ frsp(kScratchDoubleReg, value); \
602 if (mode == kMode_MRI) { \
603 __ stfs(kScratchDoubleReg, operand); \
604 } else { \
605 __ stfsx(kScratchDoubleReg, operand); \
606 } \
607 __ bind(&done); \
608 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
609 } while (0)
610
611
612// TODO(mbrandy): fix paths that produce garbage in offset's upper 32-bits.
613#define ASSEMBLE_CHECKED_STORE_DOUBLE() \
614 do { \
615 Label done; \
616 size_t index = 0; \
617 AddressingMode mode = kMode_None; \
618 MemOperand operand = i.MemoryOperand(&mode, index); \
619 DCHECK_EQ(kMode_MRR, mode); \
620 Register offset = operand.rb(); \
621 __ extsw(offset, offset); \
622 if (HasRegisterInput(instr, 2)) { \
623 __ cmplw(offset, i.InputRegister(2)); \
624 } else { \
625 __ cmplwi(offset, i.InputImmediate(2)); \
626 } \
627 __ bge(&done); \
628 DoubleRegister value = i.InputDoubleRegister(3); \
629 if (mode == kMode_MRI) { \
630 __ stfd(value, operand); \
631 } else { \
632 __ stfdx(value, operand); \
633 } \
634 __ bind(&done); \
635 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
636 } while (0)
637
638
639// TODO(mbrandy): fix paths that produce garbage in offset's upper 32-bits.
640#define ASSEMBLE_CHECKED_STORE_INTEGER(asm_instr, asm_instrx) \
641 do { \
642 Label done; \
643 size_t index = 0; \
644 AddressingMode mode = kMode_None; \
645 MemOperand operand = i.MemoryOperand(&mode, index); \
646 DCHECK_EQ(kMode_MRR, mode); \
647 Register offset = operand.rb(); \
648 __ extsw(offset, offset); \
649 if (HasRegisterInput(instr, 2)) { \
650 __ cmplw(offset, i.InputRegister(2)); \
651 } else { \
652 __ cmplwi(offset, i.InputImmediate(2)); \
653 } \
654 __ bge(&done); \
655 Register value = i.InputRegister(3); \
656 if (mode == kMode_MRI) { \
657 __ asm_instr(value, operand); \
658 } else { \
659 __ asm_instrx(value, operand); \
660 } \
661 __ bind(&done); \
662 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
663 } while (0)
664
665
666void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) {
667 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
668 if (sp_slot_delta > 0) {
669 __ Add(sp, sp, sp_slot_delta * kPointerSize, r0);
670 }
671 frame_access_state()->SetFrameAccessToDefault();
672}
673
674
675void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) {
676 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
677 if (sp_slot_delta < 0) {
678 __ Add(sp, sp, sp_slot_delta * kPointerSize, r0);
679 frame_access_state()->IncreaseSPDelta(-sp_slot_delta);
680 }
681 if (frame()->needs_frame()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100682 __ RestoreFrameStateForTailCall();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000683 }
684 frame_access_state()->SetFrameAccessToSP();
685}
686
687
688// Assembles an instruction after register allocation, producing machine code.
689void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
690 PPCOperandConverter i(this, instr);
691 ArchOpcode opcode = ArchOpcodeField::decode(instr->opcode());
692
693 switch (opcode) {
694 case kArchCallCodeObject: {
695 v8::internal::Assembler::BlockTrampolinePoolScope block_trampoline_pool(
696 masm());
697 EnsureSpaceForLazyDeopt();
698 if (HasRegisterInput(instr, 0)) {
699 __ addi(ip, i.InputRegister(0),
700 Operand(Code::kHeaderSize - kHeapObjectTag));
701 __ Call(ip);
702 } else {
703 __ Call(Handle<Code>::cast(i.InputHeapObject(0)),
704 RelocInfo::CODE_TARGET);
705 }
706 RecordCallPosition(instr);
707 DCHECK_EQ(LeaveRC, i.OutputRCBit());
708 frame_access_state()->ClearSPDelta();
709 break;
710 }
711 case kArchTailCallCodeObject: {
712 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
713 AssembleDeconstructActivationRecord(stack_param_delta);
714 if (HasRegisterInput(instr, 0)) {
715 __ addi(ip, i.InputRegister(0),
716 Operand(Code::kHeaderSize - kHeapObjectTag));
717 __ Jump(ip);
718 } else {
719 // We cannot use the constant pool to load the target since
720 // we've already restored the caller's frame.
721 ConstantPoolUnavailableScope constant_pool_unavailable(masm());
722 __ Jump(Handle<Code>::cast(i.InputHeapObject(0)),
723 RelocInfo::CODE_TARGET);
724 }
725 DCHECK_EQ(LeaveRC, i.OutputRCBit());
726 frame_access_state()->ClearSPDelta();
727 break;
728 }
729 case kArchCallJSFunction: {
730 v8::internal::Assembler::BlockTrampolinePoolScope block_trampoline_pool(
731 masm());
732 EnsureSpaceForLazyDeopt();
733 Register func = i.InputRegister(0);
734 if (FLAG_debug_code) {
735 // Check the function's context matches the context argument.
736 __ LoadP(kScratchReg,
737 FieldMemOperand(func, JSFunction::kContextOffset));
738 __ cmp(cp, kScratchReg);
739 __ Assert(eq, kWrongFunctionContext);
740 }
741 __ LoadP(ip, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
742 __ Call(ip);
743 RecordCallPosition(instr);
744 DCHECK_EQ(LeaveRC, i.OutputRCBit());
745 frame_access_state()->ClearSPDelta();
746 break;
747 }
748 case kArchTailCallJSFunction: {
749 Register func = i.InputRegister(0);
750 if (FLAG_debug_code) {
751 // Check the function's context matches the context argument.
752 __ LoadP(kScratchReg,
753 FieldMemOperand(func, JSFunction::kContextOffset));
754 __ cmp(cp, kScratchReg);
755 __ Assert(eq, kWrongFunctionContext);
756 }
757 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
758 AssembleDeconstructActivationRecord(stack_param_delta);
759 __ LoadP(ip, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
760 __ Jump(ip);
761 DCHECK_EQ(LeaveRC, i.OutputRCBit());
762 frame_access_state()->ClearSPDelta();
763 break;
764 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000765 case kArchPrepareCallCFunction: {
766 int const num_parameters = MiscField::decode(instr->opcode());
767 __ PrepareCallCFunction(num_parameters, kScratchReg);
768 // Frame alignment requires using FP-relative frame addressing.
769 frame_access_state()->SetFrameAccessToFP();
770 break;
771 }
772 case kArchPrepareTailCall:
773 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1));
774 break;
775 case kArchCallCFunction: {
776 int const num_parameters = MiscField::decode(instr->opcode());
777 if (instr->InputAt(0)->IsImmediate()) {
778 ExternalReference ref = i.InputExternalReference(0);
779 __ CallCFunction(ref, num_parameters);
780 } else {
781 Register func = i.InputRegister(0);
782 __ CallCFunction(func, num_parameters);
783 }
784 frame_access_state()->SetFrameAccessToDefault();
785 frame_access_state()->ClearSPDelta();
786 break;
787 }
788 case kArchJmp:
789 AssembleArchJump(i.InputRpo(0));
790 DCHECK_EQ(LeaveRC, i.OutputRCBit());
791 break;
792 case kArchLookupSwitch:
793 AssembleArchLookupSwitch(instr);
794 DCHECK_EQ(LeaveRC, i.OutputRCBit());
795 break;
796 case kArchTableSwitch:
797 AssembleArchTableSwitch(instr);
798 DCHECK_EQ(LeaveRC, i.OutputRCBit());
799 break;
800 case kArchNop:
801 case kArchThrowTerminator:
802 // don't emit code for nops.
803 DCHECK_EQ(LeaveRC, i.OutputRCBit());
804 break;
805 case kArchDeoptimize: {
806 int deopt_state_id =
807 BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
808 Deoptimizer::BailoutType bailout_type =
809 Deoptimizer::BailoutType(MiscField::decode(instr->opcode()));
810 AssembleDeoptimizerCall(deopt_state_id, bailout_type);
811 break;
812 }
813 case kArchRet:
814 AssembleReturn();
815 DCHECK_EQ(LeaveRC, i.OutputRCBit());
816 break;
817 case kArchStackPointer:
818 __ mr(i.OutputRegister(), sp);
819 DCHECK_EQ(LeaveRC, i.OutputRCBit());
820 break;
821 case kArchFramePointer:
822 __ mr(i.OutputRegister(), fp);
823 DCHECK_EQ(LeaveRC, i.OutputRCBit());
824 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100825 case kArchParentFramePointer:
826 if (frame_access_state()->frame()->needs_frame()) {
827 __ LoadP(i.OutputRegister(), MemOperand(fp, 0));
828 } else {
829 __ mr(i.OutputRegister(), fp);
830 }
831 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000832 case kArchTruncateDoubleToI:
833 // TODO(mbrandy): move slow call to stub out of line.
834 __ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0));
835 DCHECK_EQ(LeaveRC, i.OutputRCBit());
836 break;
837 case kArchStoreWithWriteBarrier: {
838 RecordWriteMode mode =
839 static_cast<RecordWriteMode>(MiscField::decode(instr->opcode()));
840 Register object = i.InputRegister(0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000841 Register value = i.InputRegister(2);
842 Register scratch0 = i.TempRegister(0);
843 Register scratch1 = i.TempRegister(1);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100844 OutOfLineRecordWrite* ool;
845
846 AddressingMode addressing_mode =
847 AddressingModeField::decode(instr->opcode());
848 if (addressing_mode == kMode_MRI) {
849 int32_t offset = i.InputInt32(1);
850 ool = new (zone()) OutOfLineRecordWrite(this, object, offset, value,
851 scratch0, scratch1, mode);
852 __ StoreP(value, MemOperand(object, offset));
853 } else {
854 DCHECK_EQ(kMode_MRR, addressing_mode);
855 Register offset(i.InputRegister(1));
856 ool = new (zone()) OutOfLineRecordWrite(this, object, offset, value,
857 scratch0, scratch1, mode);
858 __ StorePX(value, MemOperand(object, offset));
859 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000860 __ CheckPageFlag(object, scratch0,
861 MemoryChunk::kPointersFromHereAreInterestingMask, ne,
862 ool->entry());
863 __ bind(ool->exit());
864 break;
865 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100866 case kArchStackSlot: {
867 FrameOffset offset =
868 frame_access_state()->GetFrameOffset(i.InputInt32(0));
869 __ addi(i.OutputRegister(), offset.from_stack_pointer() ? sp : fp,
870 Operand(offset.offset()));
871 break;
872 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000873 case kPPC_And:
874 if (HasRegisterInput(instr, 1)) {
875 __ and_(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
876 i.OutputRCBit());
877 } else {
878 __ andi(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1));
879 }
880 break;
881 case kPPC_AndComplement:
882 __ andc(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
883 i.OutputRCBit());
884 break;
885 case kPPC_Or:
886 if (HasRegisterInput(instr, 1)) {
887 __ orx(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
888 i.OutputRCBit());
889 } else {
890 __ ori(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1));
891 DCHECK_EQ(LeaveRC, i.OutputRCBit());
892 }
893 break;
894 case kPPC_OrComplement:
895 __ orc(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
896 i.OutputRCBit());
897 break;
898 case kPPC_Xor:
899 if (HasRegisterInput(instr, 1)) {
900 __ xor_(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
901 i.OutputRCBit());
902 } else {
903 __ xori(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1));
904 DCHECK_EQ(LeaveRC, i.OutputRCBit());
905 }
906 break;
907 case kPPC_ShiftLeft32:
908 ASSEMBLE_BINOP_RC(slw, slwi);
909 break;
910#if V8_TARGET_ARCH_PPC64
911 case kPPC_ShiftLeft64:
912 ASSEMBLE_BINOP_RC(sld, sldi);
913 break;
914#endif
915 case kPPC_ShiftRight32:
916 ASSEMBLE_BINOP_RC(srw, srwi);
917 break;
918#if V8_TARGET_ARCH_PPC64
919 case kPPC_ShiftRight64:
920 ASSEMBLE_BINOP_RC(srd, srdi);
921 break;
922#endif
923 case kPPC_ShiftRightAlg32:
924 ASSEMBLE_BINOP_INT_RC(sraw, srawi);
925 break;
926#if V8_TARGET_ARCH_PPC64
927 case kPPC_ShiftRightAlg64:
928 ASSEMBLE_BINOP_INT_RC(srad, sradi);
929 break;
930#endif
931 case kPPC_RotRight32:
932 if (HasRegisterInput(instr, 1)) {
933 __ subfic(kScratchReg, i.InputRegister(1), Operand(32));
934 __ rotlw(i.OutputRegister(), i.InputRegister(0), kScratchReg,
935 i.OutputRCBit());
936 } else {
937 int sh = i.InputInt32(1);
938 __ rotrwi(i.OutputRegister(), i.InputRegister(0), sh, i.OutputRCBit());
939 }
940 break;
941#if V8_TARGET_ARCH_PPC64
942 case kPPC_RotRight64:
943 if (HasRegisterInput(instr, 1)) {
944 __ subfic(kScratchReg, i.InputRegister(1), Operand(64));
945 __ rotld(i.OutputRegister(), i.InputRegister(0), kScratchReg,
946 i.OutputRCBit());
947 } else {
948 int sh = i.InputInt32(1);
949 __ rotrdi(i.OutputRegister(), i.InputRegister(0), sh, i.OutputRCBit());
950 }
951 break;
952#endif
953 case kPPC_Not:
954 __ notx(i.OutputRegister(), i.InputRegister(0), i.OutputRCBit());
955 break;
956 case kPPC_RotLeftAndMask32:
957 __ rlwinm(i.OutputRegister(), i.InputRegister(0), i.InputInt32(1),
958 31 - i.InputInt32(2), 31 - i.InputInt32(3), i.OutputRCBit());
959 break;
960#if V8_TARGET_ARCH_PPC64
961 case kPPC_RotLeftAndClear64:
962 __ rldic(i.OutputRegister(), i.InputRegister(0), i.InputInt32(1),
963 63 - i.InputInt32(2), i.OutputRCBit());
964 break;
965 case kPPC_RotLeftAndClearLeft64:
966 __ rldicl(i.OutputRegister(), i.InputRegister(0), i.InputInt32(1),
967 63 - i.InputInt32(2), i.OutputRCBit());
968 break;
969 case kPPC_RotLeftAndClearRight64:
970 __ rldicr(i.OutputRegister(), i.InputRegister(0), i.InputInt32(1),
971 63 - i.InputInt32(2), i.OutputRCBit());
972 break;
973#endif
974 case kPPC_Add:
975#if V8_TARGET_ARCH_PPC64
976 if (FlagsModeField::decode(instr->opcode()) != kFlags_none) {
977 ASSEMBLE_ADD_WITH_OVERFLOW();
978 } else {
979#endif
980 if (HasRegisterInput(instr, 1)) {
981 __ add(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
982 LeaveOE, i.OutputRCBit());
983 } else {
984 __ addi(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1));
985 DCHECK_EQ(LeaveRC, i.OutputRCBit());
986 }
987#if V8_TARGET_ARCH_PPC64
988 }
989#endif
990 break;
991 case kPPC_AddWithOverflow32:
992 ASSEMBLE_ADD_WITH_OVERFLOW32();
993 break;
994 case kPPC_AddDouble:
995 ASSEMBLE_FLOAT_BINOP_RC(fadd);
996 break;
997 case kPPC_Sub:
998#if V8_TARGET_ARCH_PPC64
999 if (FlagsModeField::decode(instr->opcode()) != kFlags_none) {
1000 ASSEMBLE_SUB_WITH_OVERFLOW();
1001 } else {
1002#endif
1003 if (HasRegisterInput(instr, 1)) {
1004 __ sub(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
1005 LeaveOE, i.OutputRCBit());
1006 } else {
1007 __ subi(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1));
1008 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1009 }
1010#if V8_TARGET_ARCH_PPC64
1011 }
1012#endif
1013 break;
1014 case kPPC_SubWithOverflow32:
1015 ASSEMBLE_SUB_WITH_OVERFLOW32();
1016 break;
1017 case kPPC_SubDouble:
1018 ASSEMBLE_FLOAT_BINOP_RC(fsub);
1019 break;
1020 case kPPC_Mul32:
1021 __ mullw(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
1022 LeaveOE, i.OutputRCBit());
1023 break;
1024#if V8_TARGET_ARCH_PPC64
1025 case kPPC_Mul64:
1026 __ mulld(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
1027 LeaveOE, i.OutputRCBit());
1028 break;
1029#endif
1030 case kPPC_MulHigh32:
1031 __ mulhw(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
1032 i.OutputRCBit());
1033 break;
1034 case kPPC_MulHighU32:
1035 __ mulhwu(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
1036 i.OutputRCBit());
1037 break;
1038 case kPPC_MulDouble:
1039 ASSEMBLE_FLOAT_BINOP_RC(fmul);
1040 break;
1041 case kPPC_Div32:
1042 __ divw(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
1043 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1044 break;
1045#if V8_TARGET_ARCH_PPC64
1046 case kPPC_Div64:
1047 __ divd(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
1048 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1049 break;
1050#endif
1051 case kPPC_DivU32:
1052 __ divwu(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
1053 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1054 break;
1055#if V8_TARGET_ARCH_PPC64
1056 case kPPC_DivU64:
1057 __ divdu(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
1058 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1059 break;
1060#endif
1061 case kPPC_DivDouble:
1062 ASSEMBLE_FLOAT_BINOP_RC(fdiv);
1063 break;
1064 case kPPC_Mod32:
1065 ASSEMBLE_MODULO(divw, mullw);
1066 break;
1067#if V8_TARGET_ARCH_PPC64
1068 case kPPC_Mod64:
1069 ASSEMBLE_MODULO(divd, mulld);
1070 break;
1071#endif
1072 case kPPC_ModU32:
1073 ASSEMBLE_MODULO(divwu, mullw);
1074 break;
1075#if V8_TARGET_ARCH_PPC64
1076 case kPPC_ModU64:
1077 ASSEMBLE_MODULO(divdu, mulld);
1078 break;
1079#endif
1080 case kPPC_ModDouble:
1081 // TODO(bmeurer): We should really get rid of this special instruction,
1082 // and generate a CallAddress instruction instead.
1083 ASSEMBLE_FLOAT_MODULO();
1084 break;
1085 case kPPC_Neg:
1086 __ neg(i.OutputRegister(), i.InputRegister(0), LeaveOE, i.OutputRCBit());
1087 break;
1088 case kPPC_MaxDouble:
1089 ASSEMBLE_FLOAT_MAX(kScratchDoubleReg);
1090 break;
1091 case kPPC_MinDouble:
1092 ASSEMBLE_FLOAT_MIN(kScratchDoubleReg);
1093 break;
1094 case kPPC_AbsDouble:
1095 ASSEMBLE_FLOAT_UNOP_RC(fabs);
1096 break;
1097 case kPPC_SqrtDouble:
1098 ASSEMBLE_FLOAT_UNOP_RC(fsqrt);
1099 break;
1100 case kPPC_FloorDouble:
1101 ASSEMBLE_FLOAT_UNOP_RC(frim);
1102 break;
1103 case kPPC_CeilDouble:
1104 ASSEMBLE_FLOAT_UNOP_RC(frip);
1105 break;
1106 case kPPC_TruncateDouble:
1107 ASSEMBLE_FLOAT_UNOP_RC(friz);
1108 break;
1109 case kPPC_RoundDouble:
1110 ASSEMBLE_FLOAT_UNOP_RC(frin);
1111 break;
1112 case kPPC_NegDouble:
1113 ASSEMBLE_FLOAT_UNOP_RC(fneg);
1114 break;
1115 case kPPC_Cntlz32:
1116 __ cntlzw_(i.OutputRegister(), i.InputRegister(0));
1117 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1118 break;
1119#if V8_TARGET_ARCH_PPC64
1120 case kPPC_Cntlz64:
1121 __ cntlzd_(i.OutputRegister(), i.InputRegister(0));
1122 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1123 break;
1124#endif
1125 case kPPC_Popcnt32:
1126 __ popcntw(i.OutputRegister(), i.InputRegister(0));
1127 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1128 break;
1129#if V8_TARGET_ARCH_PPC64
1130 case kPPC_Popcnt64:
1131 __ popcntd(i.OutputRegister(), i.InputRegister(0));
1132 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1133 break;
1134#endif
1135 case kPPC_Cmp32:
1136 ASSEMBLE_COMPARE(cmpw, cmplw);
1137 break;
1138#if V8_TARGET_ARCH_PPC64
1139 case kPPC_Cmp64:
1140 ASSEMBLE_COMPARE(cmp, cmpl);
1141 break;
1142#endif
1143 case kPPC_CmpDouble:
1144 ASSEMBLE_FLOAT_COMPARE(fcmpu);
1145 break;
1146 case kPPC_Tst32:
1147 if (HasRegisterInput(instr, 1)) {
1148 __ and_(r0, i.InputRegister(0), i.InputRegister(1), i.OutputRCBit());
1149 } else {
1150 __ andi(r0, i.InputRegister(0), i.InputImmediate(1));
1151 }
1152#if V8_TARGET_ARCH_PPC64
1153 __ extsw(r0, r0, i.OutputRCBit());
1154#endif
1155 DCHECK_EQ(SetRC, i.OutputRCBit());
1156 break;
1157#if V8_TARGET_ARCH_PPC64
1158 case kPPC_Tst64:
1159 if (HasRegisterInput(instr, 1)) {
1160 __ and_(r0, i.InputRegister(0), i.InputRegister(1), i.OutputRCBit());
1161 } else {
1162 __ andi(r0, i.InputRegister(0), i.InputImmediate(1));
1163 }
1164 DCHECK_EQ(SetRC, i.OutputRCBit());
1165 break;
1166#endif
1167 case kPPC_Push:
1168 if (instr->InputAt(0)->IsDoubleRegister()) {
1169 __ stfdu(i.InputDoubleRegister(0), MemOperand(sp, -kDoubleSize));
1170 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize);
1171 } else {
1172 __ Push(i.InputRegister(0));
1173 frame_access_state()->IncreaseSPDelta(1);
1174 }
1175 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1176 break;
1177 case kPPC_PushFrame: {
1178 int num_slots = i.InputInt32(1);
1179 if (instr->InputAt(0)->IsDoubleRegister()) {
1180 __ stfdu(i.InputDoubleRegister(0),
1181 MemOperand(sp, -num_slots * kPointerSize));
1182 } else {
1183 __ StorePU(i.InputRegister(0),
1184 MemOperand(sp, -num_slots * kPointerSize));
1185 }
1186 break;
1187 }
1188 case kPPC_StoreToStackSlot: {
1189 int slot = i.InputInt32(1);
1190 if (instr->InputAt(0)->IsDoubleRegister()) {
1191 __ stfd(i.InputDoubleRegister(0), MemOperand(sp, slot * kPointerSize));
1192 } else {
1193 __ StoreP(i.InputRegister(0), MemOperand(sp, slot * kPointerSize));
1194 }
1195 break;
1196 }
1197 case kPPC_ExtendSignWord8:
1198 __ extsb(i.OutputRegister(), i.InputRegister(0));
1199 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1200 break;
1201 case kPPC_ExtendSignWord16:
1202 __ extsh(i.OutputRegister(), i.InputRegister(0));
1203 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1204 break;
1205#if V8_TARGET_ARCH_PPC64
1206 case kPPC_ExtendSignWord32:
1207 __ extsw(i.OutputRegister(), i.InputRegister(0));
1208 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1209 break;
1210 case kPPC_Uint32ToUint64:
1211 // Zero extend
1212 __ clrldi(i.OutputRegister(), i.InputRegister(0), Operand(32));
1213 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1214 break;
1215 case kPPC_Int64ToInt32:
1216 __ extsw(i.OutputRegister(), i.InputRegister(0));
1217 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1218 break;
1219 case kPPC_Int64ToFloat32:
1220 __ ConvertInt64ToFloat(i.InputRegister(0), i.OutputDoubleRegister());
1221 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1222 break;
1223 case kPPC_Int64ToDouble:
1224 __ ConvertInt64ToDouble(i.InputRegister(0), i.OutputDoubleRegister());
1225 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1226 break;
1227 case kPPC_Uint64ToFloat32:
1228 __ ConvertUnsignedInt64ToFloat(i.InputRegister(0),
1229 i.OutputDoubleRegister());
1230 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1231 break;
1232 case kPPC_Uint64ToDouble:
1233 __ ConvertUnsignedInt64ToDouble(i.InputRegister(0),
1234 i.OutputDoubleRegister());
1235 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1236 break;
1237#endif
Ben Murdoch097c5b22016-05-18 11:27:45 +01001238 case kPPC_Int32ToFloat32:
1239 __ ConvertIntToFloat(i.InputRegister(0), i.OutputDoubleRegister());
1240 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1241 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001242 case kPPC_Int32ToDouble:
1243 __ ConvertIntToDouble(i.InputRegister(0), i.OutputDoubleRegister());
1244 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1245 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001246 case kPPC_Uint32ToFloat32:
1247 __ ConvertUnsignedIntToFloat(i.InputRegister(0),
1248 i.OutputDoubleRegister());
1249 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1250 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001251 case kPPC_Uint32ToDouble:
1252 __ ConvertUnsignedIntToDouble(i.InputRegister(0),
1253 i.OutputDoubleRegister());
1254 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1255 break;
1256 case kPPC_DoubleToInt32:
1257 case kPPC_DoubleToUint32:
1258 case kPPC_DoubleToInt64: {
1259#if V8_TARGET_ARCH_PPC64
1260 bool check_conversion =
1261 (opcode == kPPC_DoubleToInt64 && i.OutputCount() > 1);
1262 if (check_conversion) {
1263 __ mtfsb0(VXCVI); // clear FPSCR:VXCVI bit
1264 }
1265#endif
1266 __ ConvertDoubleToInt64(i.InputDoubleRegister(0),
1267#if !V8_TARGET_ARCH_PPC64
1268 kScratchReg,
1269#endif
1270 i.OutputRegister(0), kScratchDoubleReg);
1271#if V8_TARGET_ARCH_PPC64
1272 if (check_conversion) {
1273 // Set 2nd output to zero if conversion fails.
1274 CRegister cr = cr7;
1275 int crbit = v8::internal::Assembler::encode_crbit(
1276 cr, static_cast<CRBit>(VXCVI % CRWIDTH));
1277 __ mcrfs(cr, VXCVI); // extract FPSCR field containing VXCVI into cr7
1278 if (CpuFeatures::IsSupported(ISELECT)) {
1279 __ li(i.OutputRegister(1), Operand(1));
1280 __ isel(i.OutputRegister(1), r0, i.OutputRegister(1), crbit);
1281 } else {
1282 __ li(i.OutputRegister(1), Operand::Zero());
1283 __ bc(v8::internal::Assembler::kInstrSize * 2, BT, crbit);
1284 __ li(i.OutputRegister(1), Operand(1));
1285 }
1286 }
1287#endif
1288 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1289 break;
1290 }
1291#if V8_TARGET_ARCH_PPC64
1292 case kPPC_DoubleToUint64: {
1293 bool check_conversion = (i.OutputCount() > 1);
1294 if (check_conversion) {
1295 __ mtfsb0(VXCVI); // clear FPSCR:VXCVI bit
1296 }
1297 __ ConvertDoubleToUnsignedInt64(i.InputDoubleRegister(0),
1298 i.OutputRegister(0), kScratchDoubleReg);
1299 if (check_conversion) {
1300 // Set 2nd output to zero if conversion fails.
1301 CRegister cr = cr7;
1302 int crbit = v8::internal::Assembler::encode_crbit(
1303 cr, static_cast<CRBit>(VXCVI % CRWIDTH));
1304 __ mcrfs(cr, VXCVI); // extract FPSCR field containing VXCVI into cr7
1305 if (CpuFeatures::IsSupported(ISELECT)) {
1306 __ li(i.OutputRegister(1), Operand(1));
1307 __ isel(i.OutputRegister(1), r0, i.OutputRegister(1), crbit);
1308 } else {
1309 __ li(i.OutputRegister(1), Operand::Zero());
1310 __ bc(v8::internal::Assembler::kInstrSize * 2, BT, crbit);
1311 __ li(i.OutputRegister(1), Operand(1));
1312 }
1313 }
1314 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1315 break;
1316 }
1317#endif
1318 case kPPC_DoubleToFloat32:
1319 ASSEMBLE_FLOAT_UNOP_RC(frsp);
1320 break;
1321 case kPPC_Float32ToDouble:
1322 // Nothing to do.
1323 __ Move(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1324 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1325 break;
1326 case kPPC_DoubleExtractLowWord32:
1327 __ MovDoubleLowToInt(i.OutputRegister(), i.InputDoubleRegister(0));
1328 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1329 break;
1330 case kPPC_DoubleExtractHighWord32:
1331 __ MovDoubleHighToInt(i.OutputRegister(), i.InputDoubleRegister(0));
1332 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1333 break;
1334 case kPPC_DoubleInsertLowWord32:
1335 __ InsertDoubleLow(i.OutputDoubleRegister(), i.InputRegister(1), r0);
1336 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1337 break;
1338 case kPPC_DoubleInsertHighWord32:
1339 __ InsertDoubleHigh(i.OutputDoubleRegister(), i.InputRegister(1), r0);
1340 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1341 break;
1342 case kPPC_DoubleConstruct:
1343#if V8_TARGET_ARCH_PPC64
1344 __ MovInt64ComponentsToDouble(i.OutputDoubleRegister(),
1345 i.InputRegister(0), i.InputRegister(1), r0);
1346#else
1347 __ MovInt64ToDouble(i.OutputDoubleRegister(), i.InputRegister(0),
1348 i.InputRegister(1));
1349#endif
1350 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1351 break;
1352 case kPPC_BitcastFloat32ToInt32:
1353 __ MovFloatToInt(i.OutputRegister(), i.InputDoubleRegister(0));
1354 break;
1355 case kPPC_BitcastInt32ToFloat32:
1356 __ MovIntToFloat(i.OutputDoubleRegister(), i.InputRegister(0));
1357 break;
1358#if V8_TARGET_ARCH_PPC64
1359 case kPPC_BitcastDoubleToInt64:
1360 __ MovDoubleToInt64(i.OutputRegister(), i.InputDoubleRegister(0));
1361 break;
1362 case kPPC_BitcastInt64ToDouble:
1363 __ MovInt64ToDouble(i.OutputDoubleRegister(), i.InputRegister(0));
1364 break;
1365#endif
1366 case kPPC_LoadWordU8:
1367 ASSEMBLE_LOAD_INTEGER(lbz, lbzx);
1368 break;
1369 case kPPC_LoadWordS8:
1370 ASSEMBLE_LOAD_INTEGER(lbz, lbzx);
1371 __ extsb(i.OutputRegister(), i.OutputRegister());
1372 break;
1373 case kPPC_LoadWordU16:
1374 ASSEMBLE_LOAD_INTEGER(lhz, lhzx);
1375 break;
1376 case kPPC_LoadWordS16:
1377 ASSEMBLE_LOAD_INTEGER(lha, lhax);
1378 break;
1379 case kPPC_LoadWordS32:
1380 ASSEMBLE_LOAD_INTEGER(lwa, lwax);
1381 break;
1382#if V8_TARGET_ARCH_PPC64
1383 case kPPC_LoadWord64:
1384 ASSEMBLE_LOAD_INTEGER(ld, ldx);
1385 break;
1386#endif
1387 case kPPC_LoadFloat32:
1388 ASSEMBLE_LOAD_FLOAT(lfs, lfsx);
1389 break;
1390 case kPPC_LoadDouble:
1391 ASSEMBLE_LOAD_FLOAT(lfd, lfdx);
1392 break;
1393 case kPPC_StoreWord8:
1394 ASSEMBLE_STORE_INTEGER(stb, stbx);
1395 break;
1396 case kPPC_StoreWord16:
1397 ASSEMBLE_STORE_INTEGER(sth, sthx);
1398 break;
1399 case kPPC_StoreWord32:
1400 ASSEMBLE_STORE_INTEGER(stw, stwx);
1401 break;
1402#if V8_TARGET_ARCH_PPC64
1403 case kPPC_StoreWord64:
1404 ASSEMBLE_STORE_INTEGER(std, stdx);
1405 break;
1406#endif
1407 case kPPC_StoreFloat32:
1408 ASSEMBLE_STORE_FLOAT32();
1409 break;
1410 case kPPC_StoreDouble:
1411 ASSEMBLE_STORE_DOUBLE();
1412 break;
1413 case kCheckedLoadInt8:
1414 ASSEMBLE_CHECKED_LOAD_INTEGER(lbz, lbzx);
1415 __ extsb(i.OutputRegister(), i.OutputRegister());
1416 break;
1417 case kCheckedLoadUint8:
1418 ASSEMBLE_CHECKED_LOAD_INTEGER(lbz, lbzx);
1419 break;
1420 case kCheckedLoadInt16:
1421 ASSEMBLE_CHECKED_LOAD_INTEGER(lha, lhax);
1422 break;
1423 case kCheckedLoadUint16:
1424 ASSEMBLE_CHECKED_LOAD_INTEGER(lhz, lhzx);
1425 break;
1426 case kCheckedLoadWord32:
1427 ASSEMBLE_CHECKED_LOAD_INTEGER(lwa, lwax);
1428 break;
1429 case kCheckedLoadWord64:
1430#if V8_TARGET_ARCH_PPC64
1431 ASSEMBLE_CHECKED_LOAD_INTEGER(ld, ldx);
1432#else
1433 UNREACHABLE();
1434#endif
1435 break;
1436 case kCheckedLoadFloat32:
1437 ASSEMBLE_CHECKED_LOAD_FLOAT(lfs, lfsx, 32);
1438 break;
1439 case kCheckedLoadFloat64:
1440 ASSEMBLE_CHECKED_LOAD_FLOAT(lfd, lfdx, 64);
1441 break;
1442 case kCheckedStoreWord8:
1443 ASSEMBLE_CHECKED_STORE_INTEGER(stb, stbx);
1444 break;
1445 case kCheckedStoreWord16:
1446 ASSEMBLE_CHECKED_STORE_INTEGER(sth, sthx);
1447 break;
1448 case kCheckedStoreWord32:
1449 ASSEMBLE_CHECKED_STORE_INTEGER(stw, stwx);
1450 break;
1451 case kCheckedStoreWord64:
1452#if V8_TARGET_ARCH_PPC64
1453 ASSEMBLE_CHECKED_STORE_INTEGER(std, stdx);
1454#else
1455 UNREACHABLE();
1456#endif
1457 break;
1458 case kCheckedStoreFloat32:
1459 ASSEMBLE_CHECKED_STORE_FLOAT32();
1460 break;
1461 case kCheckedStoreFloat64:
1462 ASSEMBLE_CHECKED_STORE_DOUBLE();
1463 break;
1464 default:
1465 UNREACHABLE();
1466 break;
1467 }
1468} // NOLINT(readability/fn_size)
1469
1470
1471// Assembles branches after an instruction.
1472void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
1473 PPCOperandConverter i(this, instr);
1474 Label* tlabel = branch->true_label;
1475 Label* flabel = branch->false_label;
1476 ArchOpcode op = instr->arch_opcode();
1477 FlagsCondition condition = branch->condition;
1478 CRegister cr = cr0;
1479
1480 Condition cond = FlagsConditionToCondition(condition, op);
1481 if (op == kPPC_CmpDouble) {
1482 // check for unordered if necessary
1483 if (cond == le) {
1484 __ bunordered(flabel, cr);
1485 // Unnecessary for eq/lt since only FU bit will be set.
1486 } else if (cond == gt) {
1487 __ bunordered(tlabel, cr);
1488 // Unnecessary for ne/ge since only FU bit will be set.
1489 }
1490 }
1491 __ b(cond, tlabel, cr);
1492 if (!branch->fallthru) __ b(flabel); // no fallthru to flabel.
1493}
1494
1495
1496void CodeGenerator::AssembleArchJump(RpoNumber target) {
1497 if (!IsNextInAssemblyOrder(target)) __ b(GetLabel(target));
1498}
1499
1500
1501// Assembles boolean materializations after an instruction.
1502void CodeGenerator::AssembleArchBoolean(Instruction* instr,
1503 FlagsCondition condition) {
1504 PPCOperandConverter i(this, instr);
1505 Label done;
1506 ArchOpcode op = instr->arch_opcode();
1507 CRegister cr = cr0;
1508 int reg_value = -1;
1509
1510 // Materialize a full 32-bit 1 or 0 value. The result register is always the
1511 // last output of the instruction.
1512 DCHECK_NE(0u, instr->OutputCount());
1513 Register reg = i.OutputRegister(instr->OutputCount() - 1);
1514
1515 Condition cond = FlagsConditionToCondition(condition, op);
1516 if (op == kPPC_CmpDouble) {
1517 // check for unordered if necessary
1518 if (cond == le) {
1519 reg_value = 0;
1520 __ li(reg, Operand::Zero());
1521 __ bunordered(&done, cr);
1522 } else if (cond == gt) {
1523 reg_value = 1;
1524 __ li(reg, Operand(1));
1525 __ bunordered(&done, cr);
1526 }
1527 // Unnecessary for eq/lt & ne/ge since only FU bit will be set.
1528 }
1529
1530 if (CpuFeatures::IsSupported(ISELECT)) {
1531 switch (cond) {
1532 case eq:
1533 case lt:
1534 case gt:
1535 if (reg_value != 1) __ li(reg, Operand(1));
1536 __ li(kScratchReg, Operand::Zero());
1537 __ isel(cond, reg, reg, kScratchReg, cr);
1538 break;
1539 case ne:
1540 case ge:
1541 case le:
1542 if (reg_value != 1) __ li(reg, Operand(1));
1543 // r0 implies logical zero in this form
1544 __ isel(NegateCondition(cond), reg, r0, reg, cr);
1545 break;
1546 default:
1547 UNREACHABLE();
1548 break;
1549 }
1550 } else {
1551 if (reg_value != 0) __ li(reg, Operand::Zero());
1552 __ b(NegateCondition(cond), &done, cr);
1553 __ li(reg, Operand(1));
1554 }
1555 __ bind(&done);
1556}
1557
1558
1559void CodeGenerator::AssembleArchLookupSwitch(Instruction* instr) {
1560 PPCOperandConverter i(this, instr);
1561 Register input = i.InputRegister(0);
1562 for (size_t index = 2; index < instr->InputCount(); index += 2) {
1563 __ Cmpi(input, Operand(i.InputInt32(index + 0)), r0);
1564 __ beq(GetLabel(i.InputRpo(index + 1)));
1565 }
1566 AssembleArchJump(i.InputRpo(1));
1567}
1568
1569
1570void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
1571 PPCOperandConverter i(this, instr);
1572 Register input = i.InputRegister(0);
1573 int32_t const case_count = static_cast<int32_t>(instr->InputCount() - 2);
1574 Label** cases = zone()->NewArray<Label*>(case_count);
1575 for (int32_t index = 0; index < case_count; ++index) {
1576 cases[index] = GetLabel(i.InputRpo(index + 2));
1577 }
1578 Label* const table = AddJumpTable(cases, case_count);
1579 __ Cmpli(input, Operand(case_count), r0);
1580 __ bge(GetLabel(i.InputRpo(1)));
1581 __ mov_label_addr(kScratchReg, table);
1582 __ ShiftLeftImm(r0, input, Operand(kPointerSizeLog2));
1583 __ LoadPX(kScratchReg, MemOperand(kScratchReg, r0));
1584 __ Jump(kScratchReg);
1585}
1586
1587
1588void CodeGenerator::AssembleDeoptimizerCall(
1589 int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
1590 Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
1591 isolate(), deoptimization_id, bailout_type);
1592 __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
1593}
1594
1595
1596void CodeGenerator::AssemblePrologue() {
1597 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
1598 if (descriptor->IsCFunctionCall()) {
1599 __ function_descriptor();
1600 __ mflr(r0);
1601 if (FLAG_enable_embedded_constant_pool) {
1602 __ Push(r0, fp, kConstantPoolRegister);
1603 // Adjust FP to point to saved FP.
1604 __ subi(fp, sp, Operand(StandardFrameConstants::kConstantPoolOffset));
1605 } else {
1606 __ Push(r0, fp);
1607 __ mr(fp, sp);
1608 }
1609 } else if (descriptor->IsJSFunctionCall()) {
1610 __ Prologue(this->info()->GeneratePreagedPrologue(), ip);
1611 } else if (frame()->needs_frame()) {
1612 if (!ABI_CALL_VIA_IP && info()->output_code_kind() == Code::WASM_FUNCTION) {
1613 // TODO(mbrandy): Restrict only to the wasm wrapper case.
1614 __ StubPrologue();
1615 } else {
1616 __ StubPrologue(ip);
1617 }
1618 } else {
1619 frame()->SetElidedFrameSizeInSlots(0);
1620 }
1621 frame_access_state()->SetFrameAccessToDefault();
1622
1623 int stack_shrink_slots = frame()->GetSpillSlotCount();
1624 if (info()->is_osr()) {
1625 // TurboFan OSR-compiled functions cannot be entered directly.
1626 __ Abort(kShouldNotDirectlyEnterOsrFunction);
1627
1628 // Unoptimized code jumps directly to this entrypoint while the unoptimized
1629 // frame is still on the stack. Optimized code uses OSR values directly from
1630 // the unoptimized frame. Thus, all that needs to be done is to allocate the
1631 // remaining stack slots.
1632 if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --");
1633 osr_pc_offset_ = __ pc_offset();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001634 stack_shrink_slots -= OsrHelper(info()).UnoptimizedFrameSlots();
1635 }
1636
1637 const RegList double_saves = descriptor->CalleeSavedFPRegisters();
1638 if (double_saves != 0) {
1639 stack_shrink_slots += frame()->AlignSavedCalleeRegisterSlots();
1640 }
1641 if (stack_shrink_slots > 0) {
1642 __ Add(sp, sp, -stack_shrink_slots * kPointerSize, r0);
1643 }
1644
1645 // Save callee-saved Double registers.
1646 if (double_saves != 0) {
1647 __ MultiPushDoubles(double_saves);
1648 DCHECK(kNumCalleeSavedDoubles ==
1649 base::bits::CountPopulation32(double_saves));
1650 frame()->AllocateSavedCalleeRegisterSlots(kNumCalleeSavedDoubles *
1651 (kDoubleSize / kPointerSize));
1652 }
1653
1654 // Save callee-saved registers.
1655 const RegList saves =
1656 FLAG_enable_embedded_constant_pool
1657 ? descriptor->CalleeSavedRegisters() & ~kConstantPoolRegister.bit()
1658 : descriptor->CalleeSavedRegisters();
1659 if (saves != 0) {
1660 __ MultiPush(saves);
1661 // register save area does not include the fp or constant pool pointer.
1662 const int num_saves =
1663 kNumCalleeSaved - 1 - (FLAG_enable_embedded_constant_pool ? 1 : 0);
1664 DCHECK(num_saves == base::bits::CountPopulation32(saves));
1665 frame()->AllocateSavedCalleeRegisterSlots(num_saves);
1666 }
1667}
1668
1669
1670void CodeGenerator::AssembleReturn() {
1671 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
1672 int pop_count = static_cast<int>(descriptor->StackParameterCount());
1673
1674 // Restore registers.
1675 const RegList saves =
1676 FLAG_enable_embedded_constant_pool
1677 ? descriptor->CalleeSavedRegisters() & ~kConstantPoolRegister.bit()
1678 : descriptor->CalleeSavedRegisters();
1679 if (saves != 0) {
1680 __ MultiPop(saves);
1681 }
1682
1683 // Restore double registers.
1684 const RegList double_saves = descriptor->CalleeSavedFPRegisters();
1685 if (double_saves != 0) {
1686 __ MultiPopDoubles(double_saves);
1687 }
1688
1689 if (descriptor->IsCFunctionCall()) {
1690 __ LeaveFrame(StackFrame::MANUAL, pop_count * kPointerSize);
1691 } else if (frame()->needs_frame()) {
1692 // Canonicalize JSFunction return sites for now.
1693 if (return_label_.is_bound()) {
1694 __ b(&return_label_);
1695 return;
1696 } else {
1697 __ bind(&return_label_);
1698 __ LeaveFrame(StackFrame::MANUAL, pop_count * kPointerSize);
1699 }
1700 } else {
1701 __ Drop(pop_count);
1702 }
1703 __ Ret();
1704}
1705
1706
1707void CodeGenerator::AssembleMove(InstructionOperand* source,
1708 InstructionOperand* destination) {
1709 PPCOperandConverter g(this, nullptr);
1710 // Dispatch on the source and destination operand kinds. Not all
1711 // combinations are possible.
1712 if (source->IsRegister()) {
1713 DCHECK(destination->IsRegister() || destination->IsStackSlot());
1714 Register src = g.ToRegister(source);
1715 if (destination->IsRegister()) {
1716 __ Move(g.ToRegister(destination), src);
1717 } else {
1718 __ StoreP(src, g.ToMemOperand(destination), r0);
1719 }
1720 } else if (source->IsStackSlot()) {
1721 DCHECK(destination->IsRegister() || destination->IsStackSlot());
1722 MemOperand src = g.ToMemOperand(source);
1723 if (destination->IsRegister()) {
1724 __ LoadP(g.ToRegister(destination), src, r0);
1725 } else {
1726 Register temp = kScratchReg;
1727 __ LoadP(temp, src, r0);
1728 __ StoreP(temp, g.ToMemOperand(destination), r0);
1729 }
1730 } else if (source->IsConstant()) {
1731 Constant src = g.ToConstant(source);
1732 if (destination->IsRegister() || destination->IsStackSlot()) {
1733 Register dst =
1734 destination->IsRegister() ? g.ToRegister(destination) : kScratchReg;
1735 switch (src.type()) {
1736 case Constant::kInt32:
1737 __ mov(dst, Operand(src.ToInt32()));
1738 break;
1739 case Constant::kInt64:
1740 __ mov(dst, Operand(src.ToInt64()));
1741 break;
1742 case Constant::kFloat32:
1743 __ Move(dst,
1744 isolate()->factory()->NewNumber(src.ToFloat32(), TENURED));
1745 break;
1746 case Constant::kFloat64:
1747 __ Move(dst,
1748 isolate()->factory()->NewNumber(src.ToFloat64(), TENURED));
1749 break;
1750 case Constant::kExternalReference:
1751 __ mov(dst, Operand(src.ToExternalReference()));
1752 break;
1753 case Constant::kHeapObject: {
1754 Handle<HeapObject> src_object = src.ToHeapObject();
1755 Heap::RootListIndex index;
1756 int offset;
1757 if (IsMaterializableFromFrame(src_object, &offset)) {
1758 __ LoadP(dst, MemOperand(fp, offset));
1759 } else if (IsMaterializableFromRoot(src_object, &index)) {
1760 __ LoadRoot(dst, index);
1761 } else {
1762 __ Move(dst, src_object);
1763 }
1764 break;
1765 }
1766 case Constant::kRpoNumber:
1767 UNREACHABLE(); // TODO(dcarney): loading RPO constants on PPC.
1768 break;
1769 }
1770 if (destination->IsStackSlot()) {
1771 __ StoreP(dst, g.ToMemOperand(destination), r0);
1772 }
1773 } else {
1774 DoubleRegister dst = destination->IsDoubleRegister()
1775 ? g.ToDoubleRegister(destination)
1776 : kScratchDoubleReg;
1777 double value = (src.type() == Constant::kFloat32) ? src.ToFloat32()
1778 : src.ToFloat64();
1779 __ LoadDoubleLiteral(dst, value, kScratchReg);
1780 if (destination->IsDoubleStackSlot()) {
1781 __ StoreDouble(dst, g.ToMemOperand(destination), r0);
1782 }
1783 }
1784 } else if (source->IsDoubleRegister()) {
1785 DoubleRegister src = g.ToDoubleRegister(source);
1786 if (destination->IsDoubleRegister()) {
1787 DoubleRegister dst = g.ToDoubleRegister(destination);
1788 __ Move(dst, src);
1789 } else {
1790 DCHECK(destination->IsDoubleStackSlot());
1791 __ StoreDouble(src, g.ToMemOperand(destination), r0);
1792 }
1793 } else if (source->IsDoubleStackSlot()) {
1794 DCHECK(destination->IsDoubleRegister() || destination->IsDoubleStackSlot());
1795 MemOperand src = g.ToMemOperand(source);
1796 if (destination->IsDoubleRegister()) {
1797 __ LoadDouble(g.ToDoubleRegister(destination), src, r0);
1798 } else {
1799 DoubleRegister temp = kScratchDoubleReg;
1800 __ LoadDouble(temp, src, r0);
1801 __ StoreDouble(temp, g.ToMemOperand(destination), r0);
1802 }
1803 } else {
1804 UNREACHABLE();
1805 }
1806}
1807
1808
1809void CodeGenerator::AssembleSwap(InstructionOperand* source,
1810 InstructionOperand* destination) {
1811 PPCOperandConverter g(this, nullptr);
1812 // Dispatch on the source and destination operand kinds. Not all
1813 // combinations are possible.
1814 if (source->IsRegister()) {
1815 // Register-register.
1816 Register temp = kScratchReg;
1817 Register src = g.ToRegister(source);
1818 if (destination->IsRegister()) {
1819 Register dst = g.ToRegister(destination);
1820 __ mr(temp, src);
1821 __ mr(src, dst);
1822 __ mr(dst, temp);
1823 } else {
1824 DCHECK(destination->IsStackSlot());
1825 MemOperand dst = g.ToMemOperand(destination);
1826 __ mr(temp, src);
1827 __ LoadP(src, dst);
1828 __ StoreP(temp, dst);
1829 }
1830#if V8_TARGET_ARCH_PPC64
1831 } else if (source->IsStackSlot() || source->IsDoubleStackSlot()) {
1832#else
1833 } else if (source->IsStackSlot()) {
1834 DCHECK(destination->IsStackSlot());
1835#endif
1836 Register temp_0 = kScratchReg;
1837 Register temp_1 = r0;
1838 MemOperand src = g.ToMemOperand(source);
1839 MemOperand dst = g.ToMemOperand(destination);
1840 __ LoadP(temp_0, src);
1841 __ LoadP(temp_1, dst);
1842 __ StoreP(temp_0, dst);
1843 __ StoreP(temp_1, src);
1844 } else if (source->IsDoubleRegister()) {
1845 DoubleRegister temp = kScratchDoubleReg;
1846 DoubleRegister src = g.ToDoubleRegister(source);
1847 if (destination->IsDoubleRegister()) {
1848 DoubleRegister dst = g.ToDoubleRegister(destination);
1849 __ fmr(temp, src);
1850 __ fmr(src, dst);
1851 __ fmr(dst, temp);
1852 } else {
1853 DCHECK(destination->IsDoubleStackSlot());
1854 MemOperand dst = g.ToMemOperand(destination);
1855 __ fmr(temp, src);
1856 __ lfd(src, dst);
1857 __ stfd(temp, dst);
1858 }
1859#if !V8_TARGET_ARCH_PPC64
1860 } else if (source->IsDoubleStackSlot()) {
1861 DCHECK(destination->IsDoubleStackSlot());
1862 DoubleRegister temp_0 = kScratchDoubleReg;
1863 DoubleRegister temp_1 = d0;
1864 MemOperand src = g.ToMemOperand(source);
1865 MemOperand dst = g.ToMemOperand(destination);
1866 __ lfd(temp_0, src);
1867 __ lfd(temp_1, dst);
1868 __ stfd(temp_0, dst);
1869 __ stfd(temp_1, src);
1870#endif
1871 } else {
1872 // No other combinations are possible.
1873 UNREACHABLE();
1874 }
1875}
1876
1877
1878void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) {
1879 for (size_t index = 0; index < target_count; ++index) {
1880 __ emit_label_addr(targets[index]);
1881 }
1882}
1883
1884
1885void CodeGenerator::AddNopForSmiCodeInlining() {
1886 // We do not insert nops for inlined Smi code.
1887}
1888
1889
1890void CodeGenerator::EnsureSpaceForLazyDeopt() {
1891 if (!info()->ShouldEnsureSpaceForLazyDeopt()) {
1892 return;
1893 }
1894
1895 int space_needed = Deoptimizer::patch_size();
1896 // Ensure that we have enough space after the previous lazy-bailout
1897 // instruction for patching the code here.
1898 int current_pc = masm()->pc_offset();
1899 if (current_pc < last_lazy_deopt_pc_ + space_needed) {
1900 // Block tramoline pool emission for duration of padding.
1901 v8::internal::Assembler::BlockTrampolinePoolScope block_trampoline_pool(
1902 masm());
1903 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
1904 DCHECK_EQ(0, padding_size % v8::internal::Assembler::kInstrSize);
1905 while (padding_size > 0) {
1906 __ nop();
1907 padding_size -= v8::internal::Assembler::kInstrSize;
1908 }
1909 }
1910}
1911
1912#undef __
1913
1914} // namespace compiler
1915} // namespace internal
1916} // namespace v8