blob: 8a0c585a380db4b0a56e012a3eda94afa347ca61 [file] [log] [blame]
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/code-generator.h"
6
7#include "src/ast/scopes.h"
8#include "src/compiler/code-generator-impl.h"
9#include "src/compiler/gap-resolver.h"
10#include "src/compiler/node-matchers.h"
11#include "src/compiler/osr.h"
12#include "src/ppc/macro-assembler-ppc.h"
13
14namespace v8 {
15namespace internal {
16namespace compiler {
17
18#define __ masm()->
19
20
21#define kScratchReg r11
22
23
24// Adds PPC-specific methods to convert InstructionOperands.
25class PPCOperandConverter final : public InstructionOperandConverter {
26 public:
27 PPCOperandConverter(CodeGenerator* gen, Instruction* instr)
28 : InstructionOperandConverter(gen, instr) {}
29
30 size_t OutputCount() { return instr_->OutputCount(); }
31
32 RCBit OutputRCBit() const {
33 switch (instr_->flags_mode()) {
34 case kFlags_branch:
Ben Murdochda12d292016-06-02 14:46:10 +010035 case kFlags_deoptimize:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000036 case kFlags_set:
37 return SetRC;
38 case kFlags_none:
39 return LeaveRC;
40 }
41 UNREACHABLE();
42 return LeaveRC;
43 }
44
45 bool CompareLogical() const {
46 switch (instr_->flags_condition()) {
47 case kUnsignedLessThan:
48 case kUnsignedGreaterThanOrEqual:
49 case kUnsignedLessThanOrEqual:
50 case kUnsignedGreaterThan:
51 return true;
52 default:
53 return false;
54 }
55 UNREACHABLE();
56 return false;
57 }
58
59 Operand InputImmediate(size_t index) {
60 Constant constant = ToConstant(instr_->InputAt(index));
61 switch (constant.type()) {
62 case Constant::kInt32:
63 return Operand(constant.ToInt32());
64 case Constant::kFloat32:
65 return Operand(
66 isolate()->factory()->NewNumber(constant.ToFloat32(), TENURED));
67 case Constant::kFloat64:
68 return Operand(
69 isolate()->factory()->NewNumber(constant.ToFloat64(), TENURED));
70 case Constant::kInt64:
71#if V8_TARGET_ARCH_PPC64
72 return Operand(constant.ToInt64());
73#endif
74 case Constant::kExternalReference:
75 case Constant::kHeapObject:
76 case Constant::kRpoNumber:
77 break;
78 }
79 UNREACHABLE();
80 return Operand::Zero();
81 }
82
83 MemOperand MemoryOperand(AddressingMode* mode, size_t* first_index) {
84 const size_t index = *first_index;
85 *mode = AddressingModeField::decode(instr_->opcode());
86 switch (*mode) {
87 case kMode_None:
88 break;
89 case kMode_MRI:
90 *first_index += 2;
91 return MemOperand(InputRegister(index + 0), InputInt32(index + 1));
92 case kMode_MRR:
93 *first_index += 2;
94 return MemOperand(InputRegister(index + 0), InputRegister(index + 1));
95 }
96 UNREACHABLE();
97 return MemOperand(r0);
98 }
99
100 MemOperand MemoryOperand(AddressingMode* mode, size_t first_index = 0) {
101 return MemoryOperand(mode, &first_index);
102 }
103
104 MemOperand ToMemOperand(InstructionOperand* op) const {
105 DCHECK_NOT_NULL(op);
Ben Murdochc5610432016-08-08 18:44:38 +0100106 DCHECK(op->IsStackSlot() || op->IsFPStackSlot());
Ben Murdochda12d292016-06-02 14:46:10 +0100107 return SlotToMemOperand(AllocatedOperand::cast(op)->index());
108 }
109
110 MemOperand SlotToMemOperand(int slot) const {
111 FrameOffset offset = frame_access_state()->GetFrameOffset(slot);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000112 return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset());
113 }
114};
115
116
117static inline bool HasRegisterInput(Instruction* instr, size_t index) {
118 return instr->InputAt(index)->IsRegister();
119}
120
121
122namespace {
123
124class OutOfLineLoadNAN32 final : public OutOfLineCode {
125 public:
126 OutOfLineLoadNAN32(CodeGenerator* gen, DoubleRegister result)
127 : OutOfLineCode(gen), result_(result) {}
128
129 void Generate() final {
130 __ LoadDoubleLiteral(result_, std::numeric_limits<float>::quiet_NaN(),
131 kScratchReg);
132 }
133
134 private:
135 DoubleRegister const result_;
136};
137
138
139class OutOfLineLoadNAN64 final : public OutOfLineCode {
140 public:
141 OutOfLineLoadNAN64(CodeGenerator* gen, DoubleRegister result)
142 : OutOfLineCode(gen), result_(result) {}
143
144 void Generate() final {
145 __ LoadDoubleLiteral(result_, std::numeric_limits<double>::quiet_NaN(),
146 kScratchReg);
147 }
148
149 private:
150 DoubleRegister const result_;
151};
152
153
154class OutOfLineLoadZero final : public OutOfLineCode {
155 public:
156 OutOfLineLoadZero(CodeGenerator* gen, Register result)
157 : OutOfLineCode(gen), result_(result) {}
158
159 void Generate() final { __ li(result_, Operand::Zero()); }
160
161 private:
162 Register const result_;
163};
164
165
166class OutOfLineRecordWrite final : public OutOfLineCode {
167 public:
168 OutOfLineRecordWrite(CodeGenerator* gen, Register object, Register offset,
169 Register value, Register scratch0, Register scratch1,
170 RecordWriteMode mode)
171 : OutOfLineCode(gen),
172 object_(object),
173 offset_(offset),
Ben Murdoch097c5b22016-05-18 11:27:45 +0100174 offset_immediate_(0),
175 value_(value),
176 scratch0_(scratch0),
177 scratch1_(scratch1),
Ben Murdochc5610432016-08-08 18:44:38 +0100178 mode_(mode),
179 must_save_lr_(!gen->frame_access_state()->has_frame()) {}
Ben Murdoch097c5b22016-05-18 11:27:45 +0100180
181 OutOfLineRecordWrite(CodeGenerator* gen, Register object, int32_t offset,
182 Register value, Register scratch0, Register scratch1,
183 RecordWriteMode mode)
184 : OutOfLineCode(gen),
185 object_(object),
186 offset_(no_reg),
187 offset_immediate_(offset),
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000188 value_(value),
189 scratch0_(scratch0),
190 scratch1_(scratch1),
Ben Murdochda12d292016-06-02 14:46:10 +0100191 mode_(mode),
192 must_save_lr_(!gen->frame_access_state()->has_frame()) {}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000193
194 void Generate() final {
195 if (mode_ > RecordWriteMode::kValueIsPointer) {
196 __ JumpIfSmi(value_, exit());
197 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100198 __ CheckPageFlag(value_, scratch0_,
199 MemoryChunk::kPointersToHereAreInterestingMask, eq,
200 exit());
201 RememberedSetAction const remembered_set_action =
202 mode_ > RecordWriteMode::kValueIsMap ? EMIT_REMEMBERED_SET
203 : OMIT_REMEMBERED_SET;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000204 SaveFPRegsMode const save_fp_mode =
205 frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs;
Ben Murdochda12d292016-06-02 14:46:10 +0100206 if (must_save_lr_) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100207 // We need to save and restore lr if the frame was elided.
208 __ mflr(scratch1_);
209 __ Push(scratch1_);
210 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000211 RecordWriteStub stub(isolate(), object_, scratch0_, scratch1_,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100212 remembered_set_action, save_fp_mode);
213 if (offset_.is(no_reg)) {
214 __ addi(scratch1_, object_, Operand(offset_immediate_));
215 } else {
216 DCHECK_EQ(0, offset_immediate_);
217 __ add(scratch1_, object_, offset_);
218 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000219 __ CallStub(&stub);
Ben Murdochda12d292016-06-02 14:46:10 +0100220 if (must_save_lr_) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100221 // We need to save and restore lr if the frame was elided.
222 __ Pop(scratch1_);
223 __ mtlr(scratch1_);
224 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000225 }
226
227 private:
228 Register const object_;
229 Register const offset_;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100230 int32_t const offset_immediate_; // Valid if offset_.is(no_reg).
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000231 Register const value_;
232 Register const scratch0_;
233 Register const scratch1_;
234 RecordWriteMode const mode_;
Ben Murdochda12d292016-06-02 14:46:10 +0100235 bool must_save_lr_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000236};
237
238
239Condition FlagsConditionToCondition(FlagsCondition condition, ArchOpcode op) {
240 switch (condition) {
241 case kEqual:
242 return eq;
243 case kNotEqual:
244 return ne;
245 case kSignedLessThan:
246 case kUnsignedLessThan:
247 return lt;
248 case kSignedGreaterThanOrEqual:
249 case kUnsignedGreaterThanOrEqual:
250 return ge;
251 case kSignedLessThanOrEqual:
252 case kUnsignedLessThanOrEqual:
253 return le;
254 case kSignedGreaterThan:
255 case kUnsignedGreaterThan:
256 return gt;
257 case kOverflow:
258 // Overflow checked for add/sub only.
259 switch (op) {
260#if V8_TARGET_ARCH_PPC64
261 case kPPC_Add:
262 case kPPC_Sub:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000263#endif
264 case kPPC_AddWithOverflow32:
265 case kPPC_SubWithOverflow32:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000266 return lt;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000267 default:
268 break;
269 }
270 break;
271 case kNotOverflow:
272 switch (op) {
273#if V8_TARGET_ARCH_PPC64
274 case kPPC_Add:
275 case kPPC_Sub:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000276#endif
277 case kPPC_AddWithOverflow32:
278 case kPPC_SubWithOverflow32:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000279 return ge;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000280 default:
281 break;
282 }
283 break;
284 default:
285 break;
286 }
287 UNREACHABLE();
288 return kNoCondition;
289}
290
291} // namespace
292
Ben Murdochda12d292016-06-02 14:46:10 +0100293#define ASSEMBLE_FLOAT_UNOP_RC(asm_instr, round) \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000294 do { \
295 __ asm_instr(i.OutputDoubleRegister(), i.InputDoubleRegister(0), \
296 i.OutputRCBit()); \
Ben Murdochda12d292016-06-02 14:46:10 +0100297 if (round) { \
298 __ frsp(i.OutputDoubleRegister(), i.OutputDoubleRegister()); \
299 } \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000300 } while (0)
301
Ben Murdochda12d292016-06-02 14:46:10 +0100302#define ASSEMBLE_FLOAT_BINOP_RC(asm_instr, round) \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000303 do { \
304 __ asm_instr(i.OutputDoubleRegister(), i.InputDoubleRegister(0), \
305 i.InputDoubleRegister(1), i.OutputRCBit()); \
Ben Murdochda12d292016-06-02 14:46:10 +0100306 if (round) { \
307 __ frsp(i.OutputDoubleRegister(), i.OutputDoubleRegister()); \
308 } \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000309 } while (0)
310
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000311#define ASSEMBLE_BINOP(asm_instr_reg, asm_instr_imm) \
312 do { \
313 if (HasRegisterInput(instr, 1)) { \
314 __ asm_instr_reg(i.OutputRegister(), i.InputRegister(0), \
315 i.InputRegister(1)); \
316 } else { \
317 __ asm_instr_imm(i.OutputRegister(), i.InputRegister(0), \
318 i.InputImmediate(1)); \
319 } \
320 } while (0)
321
322
323#define ASSEMBLE_BINOP_RC(asm_instr_reg, asm_instr_imm) \
324 do { \
325 if (HasRegisterInput(instr, 1)) { \
326 __ asm_instr_reg(i.OutputRegister(), i.InputRegister(0), \
327 i.InputRegister(1), i.OutputRCBit()); \
328 } else { \
329 __ asm_instr_imm(i.OutputRegister(), i.InputRegister(0), \
330 i.InputImmediate(1), i.OutputRCBit()); \
331 } \
332 } while (0)
333
334
335#define ASSEMBLE_BINOP_INT_RC(asm_instr_reg, asm_instr_imm) \
336 do { \
337 if (HasRegisterInput(instr, 1)) { \
338 __ asm_instr_reg(i.OutputRegister(), i.InputRegister(0), \
339 i.InputRegister(1), i.OutputRCBit()); \
340 } else { \
341 __ asm_instr_imm(i.OutputRegister(), i.InputRegister(0), \
342 i.InputInt32(1), i.OutputRCBit()); \
343 } \
344 } while (0)
345
346
347#define ASSEMBLE_ADD_WITH_OVERFLOW() \
348 do { \
349 if (HasRegisterInput(instr, 1)) { \
350 __ AddAndCheckForOverflow(i.OutputRegister(), i.InputRegister(0), \
351 i.InputRegister(1), kScratchReg, r0); \
352 } else { \
353 __ AddAndCheckForOverflow(i.OutputRegister(), i.InputRegister(0), \
354 i.InputInt32(1), kScratchReg, r0); \
355 } \
356 } while (0)
357
358
359#define ASSEMBLE_SUB_WITH_OVERFLOW() \
360 do { \
361 if (HasRegisterInput(instr, 1)) { \
362 __ SubAndCheckForOverflow(i.OutputRegister(), i.InputRegister(0), \
363 i.InputRegister(1), kScratchReg, r0); \
364 } else { \
365 __ AddAndCheckForOverflow(i.OutputRegister(), i.InputRegister(0), \
366 -i.InputInt32(1), kScratchReg, r0); \
367 } \
368 } while (0)
369
370
371#if V8_TARGET_ARCH_PPC64
Ben Murdochc5610432016-08-08 18:44:38 +0100372#define ASSEMBLE_ADD_WITH_OVERFLOW32() \
373 do { \
374 ASSEMBLE_ADD_WITH_OVERFLOW(); \
375 __ extsw(kScratchReg, kScratchReg, SetRC); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000376 } while (0)
377
Ben Murdochc5610432016-08-08 18:44:38 +0100378#define ASSEMBLE_SUB_WITH_OVERFLOW32() \
379 do { \
380 ASSEMBLE_SUB_WITH_OVERFLOW(); \
381 __ extsw(kScratchReg, kScratchReg, SetRC); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000382 } while (0)
383#else
384#define ASSEMBLE_ADD_WITH_OVERFLOW32 ASSEMBLE_ADD_WITH_OVERFLOW
385#define ASSEMBLE_SUB_WITH_OVERFLOW32 ASSEMBLE_SUB_WITH_OVERFLOW
386#endif
387
388
389#define ASSEMBLE_COMPARE(cmp_instr, cmpl_instr) \
390 do { \
391 const CRegister cr = cr0; \
392 if (HasRegisterInput(instr, 1)) { \
393 if (i.CompareLogical()) { \
394 __ cmpl_instr(i.InputRegister(0), i.InputRegister(1), cr); \
395 } else { \
396 __ cmp_instr(i.InputRegister(0), i.InputRegister(1), cr); \
397 } \
398 } else { \
399 if (i.CompareLogical()) { \
400 __ cmpl_instr##i(i.InputRegister(0), i.InputImmediate(1), cr); \
401 } else { \
402 __ cmp_instr##i(i.InputRegister(0), i.InputImmediate(1), cr); \
403 } \
404 } \
405 DCHECK_EQ(SetRC, i.OutputRCBit()); \
406 } while (0)
407
408
409#define ASSEMBLE_FLOAT_COMPARE(cmp_instr) \
410 do { \
411 const CRegister cr = cr0; \
412 __ cmp_instr(i.InputDoubleRegister(0), i.InputDoubleRegister(1), cr); \
413 DCHECK_EQ(SetRC, i.OutputRCBit()); \
414 } while (0)
415
416
417#define ASSEMBLE_MODULO(div_instr, mul_instr) \
418 do { \
419 const Register scratch = kScratchReg; \
420 __ div_instr(scratch, i.InputRegister(0), i.InputRegister(1)); \
421 __ mul_instr(scratch, scratch, i.InputRegister(1)); \
422 __ sub(i.OutputRegister(), i.InputRegister(0), scratch, LeaveOE, \
423 i.OutputRCBit()); \
424 } while (0)
425
426
427#define ASSEMBLE_FLOAT_MODULO() \
428 do { \
429 FrameScope scope(masm(), StackFrame::MANUAL); \
430 __ PrepareCallCFunction(0, 2, kScratchReg); \
431 __ MovToFloatParameters(i.InputDoubleRegister(0), \
432 i.InputDoubleRegister(1)); \
433 __ CallCFunction(ExternalReference::mod_two_doubles_operation(isolate()), \
434 0, 2); \
435 __ MovFromFloatResult(i.OutputDoubleRegister()); \
436 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
437 } while (0)
438
439
440#define ASSEMBLE_FLOAT_MAX(scratch_reg) \
441 do { \
442 __ fsub(scratch_reg, i.InputDoubleRegister(0), i.InputDoubleRegister(1)); \
443 __ fsel(i.OutputDoubleRegister(), scratch_reg, i.InputDoubleRegister(0), \
444 i.InputDoubleRegister(1)); \
445 } while (0)
446
447
448#define ASSEMBLE_FLOAT_MIN(scratch_reg) \
449 do { \
450 __ fsub(scratch_reg, i.InputDoubleRegister(0), i.InputDoubleRegister(1)); \
451 __ fsel(i.OutputDoubleRegister(), scratch_reg, i.InputDoubleRegister(1), \
452 i.InputDoubleRegister(0)); \
453 } while (0)
454
455
456#define ASSEMBLE_LOAD_FLOAT(asm_instr, asm_instrx) \
457 do { \
458 DoubleRegister result = i.OutputDoubleRegister(); \
459 AddressingMode mode = kMode_None; \
460 MemOperand operand = i.MemoryOperand(&mode); \
461 if (mode == kMode_MRI) { \
462 __ asm_instr(result, operand); \
463 } else { \
464 __ asm_instrx(result, operand); \
465 } \
466 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
467 } while (0)
468
469
470#define ASSEMBLE_LOAD_INTEGER(asm_instr, asm_instrx) \
471 do { \
472 Register result = i.OutputRegister(); \
473 AddressingMode mode = kMode_None; \
474 MemOperand operand = i.MemoryOperand(&mode); \
475 if (mode == kMode_MRI) { \
476 __ asm_instr(result, operand); \
477 } else { \
478 __ asm_instrx(result, operand); \
479 } \
480 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
481 } while (0)
482
483
484#define ASSEMBLE_STORE_FLOAT32() \
485 do { \
486 size_t index = 0; \
487 AddressingMode mode = kMode_None; \
488 MemOperand operand = i.MemoryOperand(&mode, &index); \
489 DoubleRegister value = i.InputDoubleRegister(index); \
490 __ frsp(kScratchDoubleReg, value); \
491 if (mode == kMode_MRI) { \
492 __ stfs(kScratchDoubleReg, operand); \
493 } else { \
494 __ stfsx(kScratchDoubleReg, operand); \
495 } \
496 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
497 } while (0)
498
499
500#define ASSEMBLE_STORE_DOUBLE() \
501 do { \
502 size_t index = 0; \
503 AddressingMode mode = kMode_None; \
504 MemOperand operand = i.MemoryOperand(&mode, &index); \
505 DoubleRegister value = i.InputDoubleRegister(index); \
506 if (mode == kMode_MRI) { \
507 __ stfd(value, operand); \
508 } else { \
509 __ stfdx(value, operand); \
510 } \
511 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
512 } while (0)
513
514
515#define ASSEMBLE_STORE_INTEGER(asm_instr, asm_instrx) \
516 do { \
517 size_t index = 0; \
518 AddressingMode mode = kMode_None; \
519 MemOperand operand = i.MemoryOperand(&mode, &index); \
520 Register value = i.InputRegister(index); \
521 if (mode == kMode_MRI) { \
522 __ asm_instr(value, operand); \
523 } else { \
524 __ asm_instrx(value, operand); \
525 } \
526 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
527 } while (0)
528
Ben Murdochc5610432016-08-08 18:44:38 +0100529#if V8_TARGET_ARCH_PPC64
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000530// TODO(mbrandy): fix paths that produce garbage in offset's upper 32-bits.
Ben Murdochc5610432016-08-08 18:44:38 +0100531#define CleanUInt32(x) __ ClearLeftImm(x, x, Operand(32))
532#else
533#define CleanUInt32(x)
534#endif
535
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000536#define ASSEMBLE_CHECKED_LOAD_FLOAT(asm_instr, asm_instrx, width) \
537 do { \
538 DoubleRegister result = i.OutputDoubleRegister(); \
539 size_t index = 0; \
540 AddressingMode mode = kMode_None; \
541 MemOperand operand = i.MemoryOperand(&mode, index); \
542 DCHECK_EQ(kMode_MRR, mode); \
543 Register offset = operand.rb(); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000544 if (HasRegisterInput(instr, 2)) { \
545 __ cmplw(offset, i.InputRegister(2)); \
546 } else { \
547 __ cmplwi(offset, i.InputImmediate(2)); \
548 } \
549 auto ool = new (zone()) OutOfLineLoadNAN##width(this, result); \
550 __ bge(ool->entry()); \
551 if (mode == kMode_MRI) { \
552 __ asm_instr(result, operand); \
553 } else { \
Ben Murdochc5610432016-08-08 18:44:38 +0100554 CleanUInt32(offset); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000555 __ asm_instrx(result, operand); \
556 } \
557 __ bind(ool->exit()); \
558 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
559 } while (0)
560
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000561#define ASSEMBLE_CHECKED_LOAD_INTEGER(asm_instr, asm_instrx) \
562 do { \
563 Register result = i.OutputRegister(); \
564 size_t index = 0; \
565 AddressingMode mode = kMode_None; \
566 MemOperand operand = i.MemoryOperand(&mode, index); \
567 DCHECK_EQ(kMode_MRR, mode); \
568 Register offset = operand.rb(); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000569 if (HasRegisterInput(instr, 2)) { \
570 __ cmplw(offset, i.InputRegister(2)); \
571 } else { \
572 __ cmplwi(offset, i.InputImmediate(2)); \
573 } \
574 auto ool = new (zone()) OutOfLineLoadZero(this, result); \
575 __ bge(ool->entry()); \
576 if (mode == kMode_MRI) { \
577 __ asm_instr(result, operand); \
578 } else { \
Ben Murdochc5610432016-08-08 18:44:38 +0100579 CleanUInt32(offset); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000580 __ asm_instrx(result, operand); \
581 } \
582 __ bind(ool->exit()); \
583 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
584 } while (0)
585
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000586#define ASSEMBLE_CHECKED_STORE_FLOAT32() \
587 do { \
588 Label done; \
589 size_t index = 0; \
590 AddressingMode mode = kMode_None; \
591 MemOperand operand = i.MemoryOperand(&mode, index); \
592 DCHECK_EQ(kMode_MRR, mode); \
593 Register offset = operand.rb(); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000594 if (HasRegisterInput(instr, 2)) { \
595 __ cmplw(offset, i.InputRegister(2)); \
596 } else { \
597 __ cmplwi(offset, i.InputImmediate(2)); \
598 } \
599 __ bge(&done); \
600 DoubleRegister value = i.InputDoubleRegister(3); \
601 __ frsp(kScratchDoubleReg, value); \
602 if (mode == kMode_MRI) { \
603 __ stfs(kScratchDoubleReg, operand); \
604 } else { \
Ben Murdochc5610432016-08-08 18:44:38 +0100605 CleanUInt32(offset); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000606 __ stfsx(kScratchDoubleReg, operand); \
607 } \
608 __ bind(&done); \
609 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
610 } while (0)
611
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000612#define ASSEMBLE_CHECKED_STORE_DOUBLE() \
613 do { \
614 Label done; \
615 size_t index = 0; \
616 AddressingMode mode = kMode_None; \
617 MemOperand operand = i.MemoryOperand(&mode, index); \
618 DCHECK_EQ(kMode_MRR, mode); \
619 Register offset = operand.rb(); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000620 if (HasRegisterInput(instr, 2)) { \
621 __ cmplw(offset, i.InputRegister(2)); \
622 } else { \
623 __ cmplwi(offset, i.InputImmediate(2)); \
624 } \
625 __ bge(&done); \
626 DoubleRegister value = i.InputDoubleRegister(3); \
627 if (mode == kMode_MRI) { \
628 __ stfd(value, operand); \
629 } else { \
Ben Murdochc5610432016-08-08 18:44:38 +0100630 CleanUInt32(offset); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000631 __ stfdx(value, operand); \
632 } \
633 __ bind(&done); \
634 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
635 } while (0)
636
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000637#define ASSEMBLE_CHECKED_STORE_INTEGER(asm_instr, asm_instrx) \
638 do { \
639 Label done; \
640 size_t index = 0; \
641 AddressingMode mode = kMode_None; \
642 MemOperand operand = i.MemoryOperand(&mode, index); \
643 DCHECK_EQ(kMode_MRR, mode); \
644 Register offset = operand.rb(); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000645 if (HasRegisterInput(instr, 2)) { \
646 __ cmplw(offset, i.InputRegister(2)); \
647 } else { \
648 __ cmplwi(offset, i.InputImmediate(2)); \
649 } \
650 __ bge(&done); \
651 Register value = i.InputRegister(3); \
652 if (mode == kMode_MRI) { \
653 __ asm_instr(value, operand); \
654 } else { \
Ben Murdochc5610432016-08-08 18:44:38 +0100655 CleanUInt32(offset); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000656 __ asm_instrx(value, operand); \
657 } \
658 __ bind(&done); \
659 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
660 } while (0)
661
Ben Murdochc5610432016-08-08 18:44:38 +0100662#define ASSEMBLE_ATOMIC_LOAD_INTEGER(asm_instr, asm_instrx) \
663 do { \
664 Label done; \
665 Register result = i.OutputRegister(); \
666 AddressingMode mode = kMode_None; \
667 MemOperand operand = i.MemoryOperand(&mode); \
668 __ sync(); \
669 if (mode == kMode_MRI) { \
670 __ asm_instr(result, operand); \
671 } else { \
672 __ asm_instrx(result, operand); \
673 } \
674 __ bind(&done); \
675 __ cmp(result, result); \
676 __ bne(&done); \
677 __ isync(); \
678 } while (0)
679#define ASSEMBLE_ATOMIC_STORE_INTEGER(asm_instr, asm_instrx) \
680 do { \
681 size_t index = 0; \
682 AddressingMode mode = kMode_None; \
683 MemOperand operand = i.MemoryOperand(&mode, &index); \
684 Register value = i.InputRegister(index); \
685 __ sync(); \
686 if (mode == kMode_MRI) { \
687 __ asm_instr(value, operand); \
688 } else { \
689 __ asm_instrx(value, operand); \
690 } \
691 DCHECK_EQ(LeaveRC, i.OutputRCBit()); \
692 } while (0)
693
Ben Murdochda12d292016-06-02 14:46:10 +0100694void CodeGenerator::AssembleDeconstructFrame() {
695 __ LeaveFrame(StackFrame::MANUAL);
696}
697
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000698void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) {
699 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
700 if (sp_slot_delta > 0) {
701 __ Add(sp, sp, sp_slot_delta * kPointerSize, r0);
702 }
703 frame_access_state()->SetFrameAccessToDefault();
704}
705
706
707void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) {
708 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
709 if (sp_slot_delta < 0) {
710 __ Add(sp, sp, sp_slot_delta * kPointerSize, r0);
711 frame_access_state()->IncreaseSPDelta(-sp_slot_delta);
712 }
Ben Murdochda12d292016-06-02 14:46:10 +0100713 if (frame_access_state()->has_frame()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100714 __ RestoreFrameStateForTailCall();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000715 }
716 frame_access_state()->SetFrameAccessToSP();
717}
718
Ben Murdochda12d292016-06-02 14:46:10 +0100719void CodeGenerator::AssemblePopArgumentsAdaptorFrame(Register args_reg,
720 Register scratch1,
721 Register scratch2,
722 Register scratch3) {
723 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
724 Label done;
725
726 // Check if current frame is an arguments adaptor frame.
727 __ LoadP(scratch1, MemOperand(fp, StandardFrameConstants::kContextOffset));
728 __ CmpSmiLiteral(scratch1, Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR), r0);
729 __ bne(&done);
730
731 // Load arguments count from current arguments adaptor frame (note, it
732 // does not include receiver).
733 Register caller_args_count_reg = scratch1;
734 __ LoadP(caller_args_count_reg,
735 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
736 __ SmiUntag(caller_args_count_reg);
737
738 ParameterCount callee_args_count(args_reg);
739 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
740 scratch3);
741 __ bind(&done);
742}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000743
744// Assembles an instruction after register allocation, producing machine code.
Ben Murdochc5610432016-08-08 18:44:38 +0100745CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
746 Instruction* instr) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000747 PPCOperandConverter i(this, instr);
748 ArchOpcode opcode = ArchOpcodeField::decode(instr->opcode());
749
750 switch (opcode) {
751 case kArchCallCodeObject: {
752 v8::internal::Assembler::BlockTrampolinePoolScope block_trampoline_pool(
753 masm());
754 EnsureSpaceForLazyDeopt();
755 if (HasRegisterInput(instr, 0)) {
756 __ addi(ip, i.InputRegister(0),
757 Operand(Code::kHeaderSize - kHeapObjectTag));
758 __ Call(ip);
759 } else {
760 __ Call(Handle<Code>::cast(i.InputHeapObject(0)),
761 RelocInfo::CODE_TARGET);
762 }
763 RecordCallPosition(instr);
764 DCHECK_EQ(LeaveRC, i.OutputRCBit());
765 frame_access_state()->ClearSPDelta();
766 break;
767 }
Ben Murdochda12d292016-06-02 14:46:10 +0100768 case kArchTailCallCodeObjectFromJSFunction:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000769 case kArchTailCallCodeObject: {
770 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
771 AssembleDeconstructActivationRecord(stack_param_delta);
Ben Murdochda12d292016-06-02 14:46:10 +0100772 if (opcode == kArchTailCallCodeObjectFromJSFunction) {
773 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
774 i.TempRegister(0), i.TempRegister(1),
775 i.TempRegister(2));
776 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000777 if (HasRegisterInput(instr, 0)) {
778 __ addi(ip, i.InputRegister(0),
779 Operand(Code::kHeaderSize - kHeapObjectTag));
780 __ Jump(ip);
781 } else {
782 // We cannot use the constant pool to load the target since
783 // we've already restored the caller's frame.
784 ConstantPoolUnavailableScope constant_pool_unavailable(masm());
785 __ Jump(Handle<Code>::cast(i.InputHeapObject(0)),
786 RelocInfo::CODE_TARGET);
787 }
788 DCHECK_EQ(LeaveRC, i.OutputRCBit());
789 frame_access_state()->ClearSPDelta();
790 break;
791 }
Ben Murdochc5610432016-08-08 18:44:38 +0100792 case kArchTailCallAddress: {
793 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
794 AssembleDeconstructActivationRecord(stack_param_delta);
795 CHECK(!instr->InputAt(0)->IsImmediate());
796 __ Jump(i.InputRegister(0));
797 frame_access_state()->ClearSPDelta();
798 break;
799 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000800 case kArchCallJSFunction: {
801 v8::internal::Assembler::BlockTrampolinePoolScope block_trampoline_pool(
802 masm());
803 EnsureSpaceForLazyDeopt();
804 Register func = i.InputRegister(0);
805 if (FLAG_debug_code) {
806 // Check the function's context matches the context argument.
807 __ LoadP(kScratchReg,
808 FieldMemOperand(func, JSFunction::kContextOffset));
809 __ cmp(cp, kScratchReg);
810 __ Assert(eq, kWrongFunctionContext);
811 }
812 __ LoadP(ip, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
813 __ Call(ip);
814 RecordCallPosition(instr);
815 DCHECK_EQ(LeaveRC, i.OutputRCBit());
816 frame_access_state()->ClearSPDelta();
817 break;
818 }
Ben Murdochda12d292016-06-02 14:46:10 +0100819 case kArchTailCallJSFunctionFromJSFunction:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000820 case kArchTailCallJSFunction: {
821 Register func = i.InputRegister(0);
822 if (FLAG_debug_code) {
823 // Check the function's context matches the context argument.
824 __ LoadP(kScratchReg,
825 FieldMemOperand(func, JSFunction::kContextOffset));
826 __ cmp(cp, kScratchReg);
827 __ Assert(eq, kWrongFunctionContext);
828 }
829 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
830 AssembleDeconstructActivationRecord(stack_param_delta);
Ben Murdochda12d292016-06-02 14:46:10 +0100831 if (opcode == kArchTailCallJSFunctionFromJSFunction) {
832 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
833 i.TempRegister(0), i.TempRegister(1),
834 i.TempRegister(2));
835 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000836 __ LoadP(ip, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
837 __ Jump(ip);
838 DCHECK_EQ(LeaveRC, i.OutputRCBit());
839 frame_access_state()->ClearSPDelta();
840 break;
841 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000842 case kArchPrepareCallCFunction: {
843 int const num_parameters = MiscField::decode(instr->opcode());
844 __ PrepareCallCFunction(num_parameters, kScratchReg);
845 // Frame alignment requires using FP-relative frame addressing.
846 frame_access_state()->SetFrameAccessToFP();
847 break;
848 }
849 case kArchPrepareTailCall:
850 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1));
851 break;
852 case kArchCallCFunction: {
853 int const num_parameters = MiscField::decode(instr->opcode());
854 if (instr->InputAt(0)->IsImmediate()) {
855 ExternalReference ref = i.InputExternalReference(0);
856 __ CallCFunction(ref, num_parameters);
857 } else {
858 Register func = i.InputRegister(0);
859 __ CallCFunction(func, num_parameters);
860 }
861 frame_access_state()->SetFrameAccessToDefault();
862 frame_access_state()->ClearSPDelta();
863 break;
864 }
865 case kArchJmp:
866 AssembleArchJump(i.InputRpo(0));
867 DCHECK_EQ(LeaveRC, i.OutputRCBit());
868 break;
869 case kArchLookupSwitch:
870 AssembleArchLookupSwitch(instr);
871 DCHECK_EQ(LeaveRC, i.OutputRCBit());
872 break;
873 case kArchTableSwitch:
874 AssembleArchTableSwitch(instr);
875 DCHECK_EQ(LeaveRC, i.OutputRCBit());
876 break;
877 case kArchNop:
878 case kArchThrowTerminator:
879 // don't emit code for nops.
880 DCHECK_EQ(LeaveRC, i.OutputRCBit());
881 break;
882 case kArchDeoptimize: {
883 int deopt_state_id =
884 BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
885 Deoptimizer::BailoutType bailout_type =
886 Deoptimizer::BailoutType(MiscField::decode(instr->opcode()));
Ben Murdochc5610432016-08-08 18:44:38 +0100887 CodeGenResult result =
888 AssembleDeoptimizerCall(deopt_state_id, bailout_type);
889 if (result != kSuccess) return result;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000890 break;
891 }
892 case kArchRet:
893 AssembleReturn();
894 DCHECK_EQ(LeaveRC, i.OutputRCBit());
895 break;
896 case kArchStackPointer:
897 __ mr(i.OutputRegister(), sp);
898 DCHECK_EQ(LeaveRC, i.OutputRCBit());
899 break;
900 case kArchFramePointer:
901 __ mr(i.OutputRegister(), fp);
902 DCHECK_EQ(LeaveRC, i.OutputRCBit());
903 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100904 case kArchParentFramePointer:
Ben Murdochda12d292016-06-02 14:46:10 +0100905 if (frame_access_state()->has_frame()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100906 __ LoadP(i.OutputRegister(), MemOperand(fp, 0));
907 } else {
908 __ mr(i.OutputRegister(), fp);
909 }
910 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000911 case kArchTruncateDoubleToI:
912 // TODO(mbrandy): move slow call to stub out of line.
913 __ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0));
914 DCHECK_EQ(LeaveRC, i.OutputRCBit());
915 break;
916 case kArchStoreWithWriteBarrier: {
917 RecordWriteMode mode =
918 static_cast<RecordWriteMode>(MiscField::decode(instr->opcode()));
919 Register object = i.InputRegister(0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000920 Register value = i.InputRegister(2);
921 Register scratch0 = i.TempRegister(0);
922 Register scratch1 = i.TempRegister(1);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100923 OutOfLineRecordWrite* ool;
924
925 AddressingMode addressing_mode =
926 AddressingModeField::decode(instr->opcode());
927 if (addressing_mode == kMode_MRI) {
928 int32_t offset = i.InputInt32(1);
929 ool = new (zone()) OutOfLineRecordWrite(this, object, offset, value,
930 scratch0, scratch1, mode);
931 __ StoreP(value, MemOperand(object, offset));
932 } else {
933 DCHECK_EQ(kMode_MRR, addressing_mode);
934 Register offset(i.InputRegister(1));
935 ool = new (zone()) OutOfLineRecordWrite(this, object, offset, value,
936 scratch0, scratch1, mode);
937 __ StorePX(value, MemOperand(object, offset));
938 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000939 __ CheckPageFlag(object, scratch0,
940 MemoryChunk::kPointersFromHereAreInterestingMask, ne,
941 ool->entry());
942 __ bind(ool->exit());
943 break;
944 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100945 case kArchStackSlot: {
946 FrameOffset offset =
947 frame_access_state()->GetFrameOffset(i.InputInt32(0));
948 __ addi(i.OutputRegister(), offset.from_stack_pointer() ? sp : fp,
949 Operand(offset.offset()));
950 break;
951 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000952 case kPPC_And:
953 if (HasRegisterInput(instr, 1)) {
954 __ and_(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
955 i.OutputRCBit());
956 } else {
957 __ andi(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1));
958 }
959 break;
960 case kPPC_AndComplement:
961 __ andc(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
962 i.OutputRCBit());
963 break;
964 case kPPC_Or:
965 if (HasRegisterInput(instr, 1)) {
966 __ orx(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
967 i.OutputRCBit());
968 } else {
969 __ ori(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1));
970 DCHECK_EQ(LeaveRC, i.OutputRCBit());
971 }
972 break;
973 case kPPC_OrComplement:
974 __ orc(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
975 i.OutputRCBit());
976 break;
977 case kPPC_Xor:
978 if (HasRegisterInput(instr, 1)) {
979 __ xor_(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
980 i.OutputRCBit());
981 } else {
982 __ xori(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1));
983 DCHECK_EQ(LeaveRC, i.OutputRCBit());
984 }
985 break;
986 case kPPC_ShiftLeft32:
987 ASSEMBLE_BINOP_RC(slw, slwi);
988 break;
989#if V8_TARGET_ARCH_PPC64
990 case kPPC_ShiftLeft64:
991 ASSEMBLE_BINOP_RC(sld, sldi);
992 break;
993#endif
994 case kPPC_ShiftRight32:
995 ASSEMBLE_BINOP_RC(srw, srwi);
996 break;
997#if V8_TARGET_ARCH_PPC64
998 case kPPC_ShiftRight64:
999 ASSEMBLE_BINOP_RC(srd, srdi);
1000 break;
1001#endif
1002 case kPPC_ShiftRightAlg32:
1003 ASSEMBLE_BINOP_INT_RC(sraw, srawi);
1004 break;
1005#if V8_TARGET_ARCH_PPC64
1006 case kPPC_ShiftRightAlg64:
1007 ASSEMBLE_BINOP_INT_RC(srad, sradi);
1008 break;
1009#endif
Ben Murdochda12d292016-06-02 14:46:10 +01001010#if !V8_TARGET_ARCH_PPC64
1011 case kPPC_AddPair:
1012 // i.InputRegister(0) ... left low word.
1013 // i.InputRegister(1) ... left high word.
1014 // i.InputRegister(2) ... right low word.
1015 // i.InputRegister(3) ... right high word.
1016 __ addc(i.OutputRegister(0), i.InputRegister(0), i.InputRegister(2));
1017 __ adde(i.OutputRegister(1), i.InputRegister(1), i.InputRegister(3));
1018 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1019 break;
1020 case kPPC_SubPair:
1021 // i.InputRegister(0) ... left low word.
1022 // i.InputRegister(1) ... left high word.
1023 // i.InputRegister(2) ... right low word.
1024 // i.InputRegister(3) ... right high word.
1025 __ subc(i.OutputRegister(0), i.InputRegister(0), i.InputRegister(2));
1026 __ sube(i.OutputRegister(1), i.InputRegister(1), i.InputRegister(3));
1027 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1028 break;
1029 case kPPC_MulPair:
1030 // i.InputRegister(0) ... left low word.
1031 // i.InputRegister(1) ... left high word.
1032 // i.InputRegister(2) ... right low word.
1033 // i.InputRegister(3) ... right high word.
1034 __ mullw(i.TempRegister(0), i.InputRegister(0), i.InputRegister(3));
1035 __ mullw(i.TempRegister(1), i.InputRegister(2), i.InputRegister(1));
1036 __ add(i.TempRegister(0), i.TempRegister(0), i.TempRegister(1));
1037 __ mullw(i.OutputRegister(0), i.InputRegister(0), i.InputRegister(2));
1038 __ mulhwu(i.OutputRegister(1), i.InputRegister(0), i.InputRegister(2));
1039 __ add(i.OutputRegister(1), i.OutputRegister(1), i.TempRegister(0));
1040 break;
1041 case kPPC_ShiftLeftPair:
1042 if (instr->InputAt(2)->IsImmediate()) {
1043 __ ShiftLeftPair(i.OutputRegister(0), i.OutputRegister(1),
1044 i.InputRegister(0), i.InputRegister(1),
1045 i.InputInt32(2));
1046 } else {
1047 __ ShiftLeftPair(i.OutputRegister(0), i.OutputRegister(1),
1048 i.InputRegister(0), i.InputRegister(1), kScratchReg,
1049 i.InputRegister(2));
1050 }
1051 break;
1052 case kPPC_ShiftRightPair:
1053 if (instr->InputAt(2)->IsImmediate()) {
1054 __ ShiftRightPair(i.OutputRegister(0), i.OutputRegister(1),
1055 i.InputRegister(0), i.InputRegister(1),
1056 i.InputInt32(2));
1057 } else {
1058 __ ShiftRightPair(i.OutputRegister(0), i.OutputRegister(1),
1059 i.InputRegister(0), i.InputRegister(1), kScratchReg,
1060 i.InputRegister(2));
1061 }
1062 break;
1063 case kPPC_ShiftRightAlgPair:
1064 if (instr->InputAt(2)->IsImmediate()) {
1065 __ ShiftRightAlgPair(i.OutputRegister(0), i.OutputRegister(1),
1066 i.InputRegister(0), i.InputRegister(1),
1067 i.InputInt32(2));
1068 } else {
1069 __ ShiftRightAlgPair(i.OutputRegister(0), i.OutputRegister(1),
1070 i.InputRegister(0), i.InputRegister(1),
1071 kScratchReg, i.InputRegister(2));
1072 }
1073 break;
1074#endif
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001075 case kPPC_RotRight32:
1076 if (HasRegisterInput(instr, 1)) {
1077 __ subfic(kScratchReg, i.InputRegister(1), Operand(32));
1078 __ rotlw(i.OutputRegister(), i.InputRegister(0), kScratchReg,
1079 i.OutputRCBit());
1080 } else {
1081 int sh = i.InputInt32(1);
1082 __ rotrwi(i.OutputRegister(), i.InputRegister(0), sh, i.OutputRCBit());
1083 }
1084 break;
1085#if V8_TARGET_ARCH_PPC64
1086 case kPPC_RotRight64:
1087 if (HasRegisterInput(instr, 1)) {
1088 __ subfic(kScratchReg, i.InputRegister(1), Operand(64));
1089 __ rotld(i.OutputRegister(), i.InputRegister(0), kScratchReg,
1090 i.OutputRCBit());
1091 } else {
1092 int sh = i.InputInt32(1);
1093 __ rotrdi(i.OutputRegister(), i.InputRegister(0), sh, i.OutputRCBit());
1094 }
1095 break;
1096#endif
1097 case kPPC_Not:
1098 __ notx(i.OutputRegister(), i.InputRegister(0), i.OutputRCBit());
1099 break;
1100 case kPPC_RotLeftAndMask32:
1101 __ rlwinm(i.OutputRegister(), i.InputRegister(0), i.InputInt32(1),
1102 31 - i.InputInt32(2), 31 - i.InputInt32(3), i.OutputRCBit());
1103 break;
1104#if V8_TARGET_ARCH_PPC64
1105 case kPPC_RotLeftAndClear64:
1106 __ rldic(i.OutputRegister(), i.InputRegister(0), i.InputInt32(1),
1107 63 - i.InputInt32(2), i.OutputRCBit());
1108 break;
1109 case kPPC_RotLeftAndClearLeft64:
1110 __ rldicl(i.OutputRegister(), i.InputRegister(0), i.InputInt32(1),
1111 63 - i.InputInt32(2), i.OutputRCBit());
1112 break;
1113 case kPPC_RotLeftAndClearRight64:
1114 __ rldicr(i.OutputRegister(), i.InputRegister(0), i.InputInt32(1),
1115 63 - i.InputInt32(2), i.OutputRCBit());
1116 break;
1117#endif
1118 case kPPC_Add:
1119#if V8_TARGET_ARCH_PPC64
1120 if (FlagsModeField::decode(instr->opcode()) != kFlags_none) {
1121 ASSEMBLE_ADD_WITH_OVERFLOW();
1122 } else {
1123#endif
1124 if (HasRegisterInput(instr, 1)) {
1125 __ add(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
1126 LeaveOE, i.OutputRCBit());
1127 } else {
1128 __ addi(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1));
1129 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1130 }
1131#if V8_TARGET_ARCH_PPC64
1132 }
1133#endif
1134 break;
1135 case kPPC_AddWithOverflow32:
1136 ASSEMBLE_ADD_WITH_OVERFLOW32();
1137 break;
1138 case kPPC_AddDouble:
Ben Murdochda12d292016-06-02 14:46:10 +01001139 ASSEMBLE_FLOAT_BINOP_RC(fadd, MiscField::decode(instr->opcode()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001140 break;
1141 case kPPC_Sub:
1142#if V8_TARGET_ARCH_PPC64
1143 if (FlagsModeField::decode(instr->opcode()) != kFlags_none) {
1144 ASSEMBLE_SUB_WITH_OVERFLOW();
1145 } else {
1146#endif
1147 if (HasRegisterInput(instr, 1)) {
1148 __ sub(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
1149 LeaveOE, i.OutputRCBit());
1150 } else {
1151 __ subi(i.OutputRegister(), i.InputRegister(0), i.InputImmediate(1));
1152 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1153 }
1154#if V8_TARGET_ARCH_PPC64
1155 }
1156#endif
1157 break;
1158 case kPPC_SubWithOverflow32:
1159 ASSEMBLE_SUB_WITH_OVERFLOW32();
1160 break;
1161 case kPPC_SubDouble:
Ben Murdochda12d292016-06-02 14:46:10 +01001162 ASSEMBLE_FLOAT_BINOP_RC(fsub, MiscField::decode(instr->opcode()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001163 break;
1164 case kPPC_Mul32:
1165 __ mullw(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
1166 LeaveOE, i.OutputRCBit());
1167 break;
1168#if V8_TARGET_ARCH_PPC64
1169 case kPPC_Mul64:
1170 __ mulld(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
1171 LeaveOE, i.OutputRCBit());
1172 break;
1173#endif
1174 case kPPC_MulHigh32:
1175 __ mulhw(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
1176 i.OutputRCBit());
1177 break;
1178 case kPPC_MulHighU32:
1179 __ mulhwu(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
1180 i.OutputRCBit());
1181 break;
1182 case kPPC_MulDouble:
Ben Murdochda12d292016-06-02 14:46:10 +01001183 ASSEMBLE_FLOAT_BINOP_RC(fmul, MiscField::decode(instr->opcode()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001184 break;
1185 case kPPC_Div32:
1186 __ divw(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
1187 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1188 break;
1189#if V8_TARGET_ARCH_PPC64
1190 case kPPC_Div64:
1191 __ divd(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
1192 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1193 break;
1194#endif
1195 case kPPC_DivU32:
1196 __ divwu(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
1197 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1198 break;
1199#if V8_TARGET_ARCH_PPC64
1200 case kPPC_DivU64:
1201 __ divdu(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
1202 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1203 break;
1204#endif
1205 case kPPC_DivDouble:
Ben Murdochda12d292016-06-02 14:46:10 +01001206 ASSEMBLE_FLOAT_BINOP_RC(fdiv, MiscField::decode(instr->opcode()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001207 break;
1208 case kPPC_Mod32:
1209 ASSEMBLE_MODULO(divw, mullw);
1210 break;
1211#if V8_TARGET_ARCH_PPC64
1212 case kPPC_Mod64:
1213 ASSEMBLE_MODULO(divd, mulld);
1214 break;
1215#endif
1216 case kPPC_ModU32:
1217 ASSEMBLE_MODULO(divwu, mullw);
1218 break;
1219#if V8_TARGET_ARCH_PPC64
1220 case kPPC_ModU64:
1221 ASSEMBLE_MODULO(divdu, mulld);
1222 break;
1223#endif
1224 case kPPC_ModDouble:
1225 // TODO(bmeurer): We should really get rid of this special instruction,
1226 // and generate a CallAddress instruction instead.
1227 ASSEMBLE_FLOAT_MODULO();
1228 break;
1229 case kPPC_Neg:
1230 __ neg(i.OutputRegister(), i.InputRegister(0), LeaveOE, i.OutputRCBit());
1231 break;
1232 case kPPC_MaxDouble:
1233 ASSEMBLE_FLOAT_MAX(kScratchDoubleReg);
1234 break;
1235 case kPPC_MinDouble:
1236 ASSEMBLE_FLOAT_MIN(kScratchDoubleReg);
1237 break;
1238 case kPPC_AbsDouble:
Ben Murdochda12d292016-06-02 14:46:10 +01001239 ASSEMBLE_FLOAT_UNOP_RC(fabs, 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001240 break;
1241 case kPPC_SqrtDouble:
Ben Murdochda12d292016-06-02 14:46:10 +01001242 ASSEMBLE_FLOAT_UNOP_RC(fsqrt, MiscField::decode(instr->opcode()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001243 break;
1244 case kPPC_FloorDouble:
Ben Murdochda12d292016-06-02 14:46:10 +01001245 ASSEMBLE_FLOAT_UNOP_RC(frim, MiscField::decode(instr->opcode()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001246 break;
1247 case kPPC_CeilDouble:
Ben Murdochda12d292016-06-02 14:46:10 +01001248 ASSEMBLE_FLOAT_UNOP_RC(frip, MiscField::decode(instr->opcode()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001249 break;
1250 case kPPC_TruncateDouble:
Ben Murdochda12d292016-06-02 14:46:10 +01001251 ASSEMBLE_FLOAT_UNOP_RC(friz, MiscField::decode(instr->opcode()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001252 break;
1253 case kPPC_RoundDouble:
Ben Murdochda12d292016-06-02 14:46:10 +01001254 ASSEMBLE_FLOAT_UNOP_RC(frin, MiscField::decode(instr->opcode()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001255 break;
1256 case kPPC_NegDouble:
Ben Murdochda12d292016-06-02 14:46:10 +01001257 ASSEMBLE_FLOAT_UNOP_RC(fneg, 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001258 break;
1259 case kPPC_Cntlz32:
1260 __ cntlzw_(i.OutputRegister(), i.InputRegister(0));
1261 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1262 break;
1263#if V8_TARGET_ARCH_PPC64
1264 case kPPC_Cntlz64:
1265 __ cntlzd_(i.OutputRegister(), i.InputRegister(0));
1266 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1267 break;
1268#endif
1269 case kPPC_Popcnt32:
1270 __ popcntw(i.OutputRegister(), i.InputRegister(0));
1271 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1272 break;
1273#if V8_TARGET_ARCH_PPC64
1274 case kPPC_Popcnt64:
1275 __ popcntd(i.OutputRegister(), i.InputRegister(0));
1276 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1277 break;
1278#endif
1279 case kPPC_Cmp32:
1280 ASSEMBLE_COMPARE(cmpw, cmplw);
1281 break;
1282#if V8_TARGET_ARCH_PPC64
1283 case kPPC_Cmp64:
1284 ASSEMBLE_COMPARE(cmp, cmpl);
1285 break;
1286#endif
1287 case kPPC_CmpDouble:
1288 ASSEMBLE_FLOAT_COMPARE(fcmpu);
1289 break;
1290 case kPPC_Tst32:
1291 if (HasRegisterInput(instr, 1)) {
1292 __ and_(r0, i.InputRegister(0), i.InputRegister(1), i.OutputRCBit());
1293 } else {
1294 __ andi(r0, i.InputRegister(0), i.InputImmediate(1));
1295 }
1296#if V8_TARGET_ARCH_PPC64
1297 __ extsw(r0, r0, i.OutputRCBit());
1298#endif
1299 DCHECK_EQ(SetRC, i.OutputRCBit());
1300 break;
1301#if V8_TARGET_ARCH_PPC64
1302 case kPPC_Tst64:
1303 if (HasRegisterInput(instr, 1)) {
1304 __ and_(r0, i.InputRegister(0), i.InputRegister(1), i.OutputRCBit());
1305 } else {
1306 __ andi(r0, i.InputRegister(0), i.InputImmediate(1));
1307 }
1308 DCHECK_EQ(SetRC, i.OutputRCBit());
1309 break;
1310#endif
1311 case kPPC_Push:
Ben Murdochc5610432016-08-08 18:44:38 +01001312 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001313 __ stfdu(i.InputDoubleRegister(0), MemOperand(sp, -kDoubleSize));
1314 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize);
1315 } else {
1316 __ Push(i.InputRegister(0));
1317 frame_access_state()->IncreaseSPDelta(1);
1318 }
1319 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1320 break;
1321 case kPPC_PushFrame: {
1322 int num_slots = i.InputInt32(1);
Ben Murdochc5610432016-08-08 18:44:38 +01001323 if (instr->InputAt(0)->IsFPRegister()) {
1324 __ StoreDoubleU(i.InputDoubleRegister(0),
1325 MemOperand(sp, -num_slots * kPointerSize), r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001326 } else {
1327 __ StorePU(i.InputRegister(0),
Ben Murdochc5610432016-08-08 18:44:38 +01001328 MemOperand(sp, -num_slots * kPointerSize), r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001329 }
1330 break;
1331 }
1332 case kPPC_StoreToStackSlot: {
1333 int slot = i.InputInt32(1);
Ben Murdochc5610432016-08-08 18:44:38 +01001334 if (instr->InputAt(0)->IsFPRegister()) {
1335 __ StoreDouble(i.InputDoubleRegister(0),
1336 MemOperand(sp, slot * kPointerSize), r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001337 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01001338 __ StoreP(i.InputRegister(0), MemOperand(sp, slot * kPointerSize), r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001339 }
1340 break;
1341 }
1342 case kPPC_ExtendSignWord8:
1343 __ extsb(i.OutputRegister(), i.InputRegister(0));
1344 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1345 break;
1346 case kPPC_ExtendSignWord16:
1347 __ extsh(i.OutputRegister(), i.InputRegister(0));
1348 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1349 break;
1350#if V8_TARGET_ARCH_PPC64
1351 case kPPC_ExtendSignWord32:
1352 __ extsw(i.OutputRegister(), i.InputRegister(0));
1353 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1354 break;
1355 case kPPC_Uint32ToUint64:
1356 // Zero extend
1357 __ clrldi(i.OutputRegister(), i.InputRegister(0), Operand(32));
1358 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1359 break;
1360 case kPPC_Int64ToInt32:
1361 __ extsw(i.OutputRegister(), i.InputRegister(0));
1362 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1363 break;
1364 case kPPC_Int64ToFloat32:
1365 __ ConvertInt64ToFloat(i.InputRegister(0), i.OutputDoubleRegister());
1366 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1367 break;
1368 case kPPC_Int64ToDouble:
1369 __ ConvertInt64ToDouble(i.InputRegister(0), i.OutputDoubleRegister());
1370 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1371 break;
1372 case kPPC_Uint64ToFloat32:
1373 __ ConvertUnsignedInt64ToFloat(i.InputRegister(0),
1374 i.OutputDoubleRegister());
1375 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1376 break;
1377 case kPPC_Uint64ToDouble:
1378 __ ConvertUnsignedInt64ToDouble(i.InputRegister(0),
1379 i.OutputDoubleRegister());
1380 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1381 break;
1382#endif
Ben Murdoch097c5b22016-05-18 11:27:45 +01001383 case kPPC_Int32ToFloat32:
1384 __ ConvertIntToFloat(i.InputRegister(0), i.OutputDoubleRegister());
1385 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1386 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001387 case kPPC_Int32ToDouble:
1388 __ ConvertIntToDouble(i.InputRegister(0), i.OutputDoubleRegister());
1389 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1390 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001391 case kPPC_Uint32ToFloat32:
1392 __ ConvertUnsignedIntToFloat(i.InputRegister(0),
1393 i.OutputDoubleRegister());
1394 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1395 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001396 case kPPC_Uint32ToDouble:
1397 __ ConvertUnsignedIntToDouble(i.InputRegister(0),
1398 i.OutputDoubleRegister());
1399 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1400 break;
1401 case kPPC_DoubleToInt32:
1402 case kPPC_DoubleToUint32:
1403 case kPPC_DoubleToInt64: {
1404#if V8_TARGET_ARCH_PPC64
1405 bool check_conversion =
1406 (opcode == kPPC_DoubleToInt64 && i.OutputCount() > 1);
1407 if (check_conversion) {
1408 __ mtfsb0(VXCVI); // clear FPSCR:VXCVI bit
1409 }
1410#endif
1411 __ ConvertDoubleToInt64(i.InputDoubleRegister(0),
1412#if !V8_TARGET_ARCH_PPC64
1413 kScratchReg,
1414#endif
1415 i.OutputRegister(0), kScratchDoubleReg);
1416#if V8_TARGET_ARCH_PPC64
1417 if (check_conversion) {
1418 // Set 2nd output to zero if conversion fails.
1419 CRegister cr = cr7;
1420 int crbit = v8::internal::Assembler::encode_crbit(
1421 cr, static_cast<CRBit>(VXCVI % CRWIDTH));
1422 __ mcrfs(cr, VXCVI); // extract FPSCR field containing VXCVI into cr7
1423 if (CpuFeatures::IsSupported(ISELECT)) {
1424 __ li(i.OutputRegister(1), Operand(1));
1425 __ isel(i.OutputRegister(1), r0, i.OutputRegister(1), crbit);
1426 } else {
1427 __ li(i.OutputRegister(1), Operand::Zero());
1428 __ bc(v8::internal::Assembler::kInstrSize * 2, BT, crbit);
1429 __ li(i.OutputRegister(1), Operand(1));
1430 }
1431 }
1432#endif
1433 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1434 break;
1435 }
1436#if V8_TARGET_ARCH_PPC64
1437 case kPPC_DoubleToUint64: {
1438 bool check_conversion = (i.OutputCount() > 1);
1439 if (check_conversion) {
1440 __ mtfsb0(VXCVI); // clear FPSCR:VXCVI bit
1441 }
1442 __ ConvertDoubleToUnsignedInt64(i.InputDoubleRegister(0),
1443 i.OutputRegister(0), kScratchDoubleReg);
1444 if (check_conversion) {
1445 // Set 2nd output to zero if conversion fails.
1446 CRegister cr = cr7;
1447 int crbit = v8::internal::Assembler::encode_crbit(
1448 cr, static_cast<CRBit>(VXCVI % CRWIDTH));
1449 __ mcrfs(cr, VXCVI); // extract FPSCR field containing VXCVI into cr7
1450 if (CpuFeatures::IsSupported(ISELECT)) {
1451 __ li(i.OutputRegister(1), Operand(1));
1452 __ isel(i.OutputRegister(1), r0, i.OutputRegister(1), crbit);
1453 } else {
1454 __ li(i.OutputRegister(1), Operand::Zero());
1455 __ bc(v8::internal::Assembler::kInstrSize * 2, BT, crbit);
1456 __ li(i.OutputRegister(1), Operand(1));
1457 }
1458 }
1459 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1460 break;
1461 }
1462#endif
1463 case kPPC_DoubleToFloat32:
Ben Murdochda12d292016-06-02 14:46:10 +01001464 ASSEMBLE_FLOAT_UNOP_RC(frsp, 0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001465 break;
1466 case kPPC_Float32ToDouble:
1467 // Nothing to do.
1468 __ Move(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1469 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1470 break;
1471 case kPPC_DoubleExtractLowWord32:
1472 __ MovDoubleLowToInt(i.OutputRegister(), i.InputDoubleRegister(0));
1473 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1474 break;
1475 case kPPC_DoubleExtractHighWord32:
1476 __ MovDoubleHighToInt(i.OutputRegister(), i.InputDoubleRegister(0));
1477 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1478 break;
1479 case kPPC_DoubleInsertLowWord32:
1480 __ InsertDoubleLow(i.OutputDoubleRegister(), i.InputRegister(1), r0);
1481 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1482 break;
1483 case kPPC_DoubleInsertHighWord32:
1484 __ InsertDoubleHigh(i.OutputDoubleRegister(), i.InputRegister(1), r0);
1485 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1486 break;
1487 case kPPC_DoubleConstruct:
1488#if V8_TARGET_ARCH_PPC64
1489 __ MovInt64ComponentsToDouble(i.OutputDoubleRegister(),
1490 i.InputRegister(0), i.InputRegister(1), r0);
1491#else
1492 __ MovInt64ToDouble(i.OutputDoubleRegister(), i.InputRegister(0),
1493 i.InputRegister(1));
1494#endif
1495 DCHECK_EQ(LeaveRC, i.OutputRCBit());
1496 break;
1497 case kPPC_BitcastFloat32ToInt32:
1498 __ MovFloatToInt(i.OutputRegister(), i.InputDoubleRegister(0));
1499 break;
1500 case kPPC_BitcastInt32ToFloat32:
1501 __ MovIntToFloat(i.OutputDoubleRegister(), i.InputRegister(0));
1502 break;
1503#if V8_TARGET_ARCH_PPC64
1504 case kPPC_BitcastDoubleToInt64:
1505 __ MovDoubleToInt64(i.OutputRegister(), i.InputDoubleRegister(0));
1506 break;
1507 case kPPC_BitcastInt64ToDouble:
1508 __ MovInt64ToDouble(i.OutputDoubleRegister(), i.InputRegister(0));
1509 break;
1510#endif
1511 case kPPC_LoadWordU8:
1512 ASSEMBLE_LOAD_INTEGER(lbz, lbzx);
1513 break;
1514 case kPPC_LoadWordS8:
1515 ASSEMBLE_LOAD_INTEGER(lbz, lbzx);
1516 __ extsb(i.OutputRegister(), i.OutputRegister());
1517 break;
1518 case kPPC_LoadWordU16:
1519 ASSEMBLE_LOAD_INTEGER(lhz, lhzx);
1520 break;
1521 case kPPC_LoadWordS16:
1522 ASSEMBLE_LOAD_INTEGER(lha, lhax);
1523 break;
Ben Murdochc5610432016-08-08 18:44:38 +01001524 case kPPC_LoadWordU32:
1525 ASSEMBLE_LOAD_INTEGER(lwz, lwzx);
1526 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001527 case kPPC_LoadWordS32:
1528 ASSEMBLE_LOAD_INTEGER(lwa, lwax);
1529 break;
1530#if V8_TARGET_ARCH_PPC64
1531 case kPPC_LoadWord64:
1532 ASSEMBLE_LOAD_INTEGER(ld, ldx);
1533 break;
1534#endif
1535 case kPPC_LoadFloat32:
1536 ASSEMBLE_LOAD_FLOAT(lfs, lfsx);
1537 break;
1538 case kPPC_LoadDouble:
1539 ASSEMBLE_LOAD_FLOAT(lfd, lfdx);
1540 break;
1541 case kPPC_StoreWord8:
1542 ASSEMBLE_STORE_INTEGER(stb, stbx);
1543 break;
1544 case kPPC_StoreWord16:
1545 ASSEMBLE_STORE_INTEGER(sth, sthx);
1546 break;
1547 case kPPC_StoreWord32:
1548 ASSEMBLE_STORE_INTEGER(stw, stwx);
1549 break;
1550#if V8_TARGET_ARCH_PPC64
1551 case kPPC_StoreWord64:
1552 ASSEMBLE_STORE_INTEGER(std, stdx);
1553 break;
1554#endif
1555 case kPPC_StoreFloat32:
1556 ASSEMBLE_STORE_FLOAT32();
1557 break;
1558 case kPPC_StoreDouble:
1559 ASSEMBLE_STORE_DOUBLE();
1560 break;
1561 case kCheckedLoadInt8:
1562 ASSEMBLE_CHECKED_LOAD_INTEGER(lbz, lbzx);
1563 __ extsb(i.OutputRegister(), i.OutputRegister());
1564 break;
1565 case kCheckedLoadUint8:
1566 ASSEMBLE_CHECKED_LOAD_INTEGER(lbz, lbzx);
1567 break;
1568 case kCheckedLoadInt16:
1569 ASSEMBLE_CHECKED_LOAD_INTEGER(lha, lhax);
1570 break;
1571 case kCheckedLoadUint16:
1572 ASSEMBLE_CHECKED_LOAD_INTEGER(lhz, lhzx);
1573 break;
1574 case kCheckedLoadWord32:
Ben Murdochc5610432016-08-08 18:44:38 +01001575 ASSEMBLE_CHECKED_LOAD_INTEGER(lwz, lwzx);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001576 break;
1577 case kCheckedLoadWord64:
1578#if V8_TARGET_ARCH_PPC64
1579 ASSEMBLE_CHECKED_LOAD_INTEGER(ld, ldx);
1580#else
1581 UNREACHABLE();
1582#endif
1583 break;
1584 case kCheckedLoadFloat32:
1585 ASSEMBLE_CHECKED_LOAD_FLOAT(lfs, lfsx, 32);
1586 break;
1587 case kCheckedLoadFloat64:
1588 ASSEMBLE_CHECKED_LOAD_FLOAT(lfd, lfdx, 64);
1589 break;
1590 case kCheckedStoreWord8:
1591 ASSEMBLE_CHECKED_STORE_INTEGER(stb, stbx);
1592 break;
1593 case kCheckedStoreWord16:
1594 ASSEMBLE_CHECKED_STORE_INTEGER(sth, sthx);
1595 break;
1596 case kCheckedStoreWord32:
1597 ASSEMBLE_CHECKED_STORE_INTEGER(stw, stwx);
1598 break;
1599 case kCheckedStoreWord64:
1600#if V8_TARGET_ARCH_PPC64
1601 ASSEMBLE_CHECKED_STORE_INTEGER(std, stdx);
1602#else
1603 UNREACHABLE();
1604#endif
1605 break;
1606 case kCheckedStoreFloat32:
1607 ASSEMBLE_CHECKED_STORE_FLOAT32();
1608 break;
1609 case kCheckedStoreFloat64:
1610 ASSEMBLE_CHECKED_STORE_DOUBLE();
1611 break;
Ben Murdochc5610432016-08-08 18:44:38 +01001612
1613 case kAtomicLoadInt8:
1614 ASSEMBLE_ATOMIC_LOAD_INTEGER(lbz, lbzx);
1615 __ extsb(i.OutputRegister(), i.OutputRegister());
1616 break;
1617 case kAtomicLoadUint8:
1618 ASSEMBLE_ATOMIC_LOAD_INTEGER(lbz, lbzx);
1619 break;
1620 case kAtomicLoadInt16:
1621 ASSEMBLE_ATOMIC_LOAD_INTEGER(lha, lhax);
1622 break;
1623 case kAtomicLoadUint16:
1624 ASSEMBLE_ATOMIC_LOAD_INTEGER(lhz, lhzx);
1625 break;
1626 case kAtomicLoadWord32:
1627 ASSEMBLE_ATOMIC_LOAD_INTEGER(lwz, lwzx);
1628 break;
1629
1630 case kAtomicStoreWord8:
1631 ASSEMBLE_ATOMIC_STORE_INTEGER(stb, stbx);
1632 break;
1633 case kAtomicStoreWord16:
1634 ASSEMBLE_ATOMIC_STORE_INTEGER(sth, sthx);
1635 break;
1636 case kAtomicStoreWord32:
1637 ASSEMBLE_ATOMIC_STORE_INTEGER(stw, stwx);
1638 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001639 default:
1640 UNREACHABLE();
1641 break;
1642 }
Ben Murdochc5610432016-08-08 18:44:38 +01001643 return kSuccess;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001644} // NOLINT(readability/fn_size)
1645
1646
1647// Assembles branches after an instruction.
1648void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
1649 PPCOperandConverter i(this, instr);
1650 Label* tlabel = branch->true_label;
1651 Label* flabel = branch->false_label;
1652 ArchOpcode op = instr->arch_opcode();
1653 FlagsCondition condition = branch->condition;
1654 CRegister cr = cr0;
1655
1656 Condition cond = FlagsConditionToCondition(condition, op);
1657 if (op == kPPC_CmpDouble) {
1658 // check for unordered if necessary
1659 if (cond == le) {
1660 __ bunordered(flabel, cr);
1661 // Unnecessary for eq/lt since only FU bit will be set.
1662 } else if (cond == gt) {
1663 __ bunordered(tlabel, cr);
1664 // Unnecessary for ne/ge since only FU bit will be set.
1665 }
1666 }
1667 __ b(cond, tlabel, cr);
1668 if (!branch->fallthru) __ b(flabel); // no fallthru to flabel.
1669}
1670
1671
1672void CodeGenerator::AssembleArchJump(RpoNumber target) {
1673 if (!IsNextInAssemblyOrder(target)) __ b(GetLabel(target));
1674}
1675
1676
1677// Assembles boolean materializations after an instruction.
1678void CodeGenerator::AssembleArchBoolean(Instruction* instr,
1679 FlagsCondition condition) {
1680 PPCOperandConverter i(this, instr);
1681 Label done;
1682 ArchOpcode op = instr->arch_opcode();
1683 CRegister cr = cr0;
1684 int reg_value = -1;
1685
1686 // Materialize a full 32-bit 1 or 0 value. The result register is always the
1687 // last output of the instruction.
1688 DCHECK_NE(0u, instr->OutputCount());
1689 Register reg = i.OutputRegister(instr->OutputCount() - 1);
1690
1691 Condition cond = FlagsConditionToCondition(condition, op);
1692 if (op == kPPC_CmpDouble) {
1693 // check for unordered if necessary
1694 if (cond == le) {
1695 reg_value = 0;
1696 __ li(reg, Operand::Zero());
1697 __ bunordered(&done, cr);
1698 } else if (cond == gt) {
1699 reg_value = 1;
1700 __ li(reg, Operand(1));
1701 __ bunordered(&done, cr);
1702 }
1703 // Unnecessary for eq/lt & ne/ge since only FU bit will be set.
1704 }
1705
1706 if (CpuFeatures::IsSupported(ISELECT)) {
1707 switch (cond) {
1708 case eq:
1709 case lt:
1710 case gt:
1711 if (reg_value != 1) __ li(reg, Operand(1));
1712 __ li(kScratchReg, Operand::Zero());
1713 __ isel(cond, reg, reg, kScratchReg, cr);
1714 break;
1715 case ne:
1716 case ge:
1717 case le:
1718 if (reg_value != 1) __ li(reg, Operand(1));
1719 // r0 implies logical zero in this form
1720 __ isel(NegateCondition(cond), reg, r0, reg, cr);
1721 break;
1722 default:
1723 UNREACHABLE();
1724 break;
1725 }
1726 } else {
1727 if (reg_value != 0) __ li(reg, Operand::Zero());
1728 __ b(NegateCondition(cond), &done, cr);
1729 __ li(reg, Operand(1));
1730 }
1731 __ bind(&done);
1732}
1733
1734
1735void CodeGenerator::AssembleArchLookupSwitch(Instruction* instr) {
1736 PPCOperandConverter i(this, instr);
1737 Register input = i.InputRegister(0);
1738 for (size_t index = 2; index < instr->InputCount(); index += 2) {
Ben Murdochc5610432016-08-08 18:44:38 +01001739 __ Cmpwi(input, Operand(i.InputInt32(index + 0)), r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001740 __ beq(GetLabel(i.InputRpo(index + 1)));
1741 }
1742 AssembleArchJump(i.InputRpo(1));
1743}
1744
1745
1746void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
1747 PPCOperandConverter i(this, instr);
1748 Register input = i.InputRegister(0);
1749 int32_t const case_count = static_cast<int32_t>(instr->InputCount() - 2);
1750 Label** cases = zone()->NewArray<Label*>(case_count);
1751 for (int32_t index = 0; index < case_count; ++index) {
1752 cases[index] = GetLabel(i.InputRpo(index + 2));
1753 }
1754 Label* const table = AddJumpTable(cases, case_count);
1755 __ Cmpli(input, Operand(case_count), r0);
1756 __ bge(GetLabel(i.InputRpo(1)));
1757 __ mov_label_addr(kScratchReg, table);
1758 __ ShiftLeftImm(r0, input, Operand(kPointerSizeLog2));
1759 __ LoadPX(kScratchReg, MemOperand(kScratchReg, r0));
1760 __ Jump(kScratchReg);
1761}
1762
Ben Murdochc5610432016-08-08 18:44:38 +01001763CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001764 int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
1765 Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
1766 isolate(), deoptimization_id, bailout_type);
Ben Murdochda12d292016-06-02 14:46:10 +01001767 // TODO(turbofan): We should be able to generate better code by sharing the
1768 // actual final call site and just bl'ing to it here, similar to what we do
1769 // in the lithium backend.
Ben Murdochc5610432016-08-08 18:44:38 +01001770 if (deopt_entry == nullptr) return kTooManyDeoptimizationBailouts;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001771 __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
Ben Murdochc5610432016-08-08 18:44:38 +01001772 return kSuccess;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001773}
1774
Ben Murdochc5610432016-08-08 18:44:38 +01001775void CodeGenerator::FinishFrame(Frame* frame) {
1776 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
1777 const RegList double_saves = descriptor->CalleeSavedFPRegisters();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001778
Ben Murdochc5610432016-08-08 18:44:38 +01001779 // Save callee-saved Double registers.
1780 if (double_saves != 0) {
1781 frame->AlignSavedCalleeRegisterSlots();
1782 DCHECK(kNumCalleeSavedDoubles ==
1783 base::bits::CountPopulation32(double_saves));
1784 frame->AllocateSavedCalleeRegisterSlots(kNumCalleeSavedDoubles *
1785 (kDoubleSize / kPointerSize));
1786 }
1787 // Save callee-saved registers.
1788 const RegList saves =
1789 FLAG_enable_embedded_constant_pool
1790 ? descriptor->CalleeSavedRegisters() & ~kConstantPoolRegister.bit()
1791 : descriptor->CalleeSavedRegisters();
1792 if (saves != 0) {
1793 // register save area does not include the fp or constant pool pointer.
1794 const int num_saves =
1795 kNumCalleeSaved - 1 - (FLAG_enable_embedded_constant_pool ? 1 : 0);
1796 DCHECK(num_saves == base::bits::CountPopulation32(saves));
1797 frame->AllocateSavedCalleeRegisterSlots(num_saves);
1798 }
1799}
1800
1801void CodeGenerator::AssembleConstructFrame() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001802 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
Ben Murdochda12d292016-06-02 14:46:10 +01001803 if (frame_access_state()->has_frame()) {
1804 if (descriptor->IsCFunctionCall()) {
1805 __ function_descriptor();
1806 __ mflr(r0);
1807 if (FLAG_enable_embedded_constant_pool) {
1808 __ Push(r0, fp, kConstantPoolRegister);
1809 // Adjust FP to point to saved FP.
1810 __ subi(fp, sp, Operand(StandardFrameConstants::kConstantPoolOffset));
1811 } else {
1812 __ Push(r0, fp);
1813 __ mr(fp, sp);
1814 }
1815 } else if (descriptor->IsJSFunctionCall()) {
1816 __ Prologue(this->info()->GeneratePreagedPrologue(), ip);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001817 } else {
Ben Murdochda12d292016-06-02 14:46:10 +01001818 StackFrame::Type type = info()->GetOutputStackFrameType();
1819 // TODO(mbrandy): Detect cases where ip is the entrypoint (for
1820 // efficient intialization of the constant pool pointer register).
1821 __ StubPrologue(type);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001822 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001823 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001824
Ben Murdochc5610432016-08-08 18:44:38 +01001825 int shrink_slots = frame()->GetSpillSlotCount();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001826 if (info()->is_osr()) {
1827 // TurboFan OSR-compiled functions cannot be entered directly.
1828 __ Abort(kShouldNotDirectlyEnterOsrFunction);
1829
1830 // Unoptimized code jumps directly to this entrypoint while the unoptimized
1831 // frame is still on the stack. Optimized code uses OSR values directly from
1832 // the unoptimized frame. Thus, all that needs to be done is to allocate the
1833 // remaining stack slots.
1834 if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --");
1835 osr_pc_offset_ = __ pc_offset();
Ben Murdochc5610432016-08-08 18:44:38 +01001836 shrink_slots -= OsrHelper(info()).UnoptimizedFrameSlots();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001837 }
1838
1839 const RegList double_saves = descriptor->CalleeSavedFPRegisters();
Ben Murdochc5610432016-08-08 18:44:38 +01001840 if (shrink_slots > 0) {
1841 __ Add(sp, sp, -shrink_slots * kPointerSize, r0);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001842 }
1843
1844 // Save callee-saved Double registers.
1845 if (double_saves != 0) {
1846 __ MultiPushDoubles(double_saves);
1847 DCHECK(kNumCalleeSavedDoubles ==
1848 base::bits::CountPopulation32(double_saves));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001849 }
1850
1851 // Save callee-saved registers.
1852 const RegList saves =
1853 FLAG_enable_embedded_constant_pool
1854 ? descriptor->CalleeSavedRegisters() & ~kConstantPoolRegister.bit()
1855 : descriptor->CalleeSavedRegisters();
1856 if (saves != 0) {
1857 __ MultiPush(saves);
1858 // register save area does not include the fp or constant pool pointer.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001859 }
1860}
1861
1862
1863void CodeGenerator::AssembleReturn() {
1864 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
1865 int pop_count = static_cast<int>(descriptor->StackParameterCount());
1866
1867 // Restore registers.
1868 const RegList saves =
1869 FLAG_enable_embedded_constant_pool
1870 ? descriptor->CalleeSavedRegisters() & ~kConstantPoolRegister.bit()
1871 : descriptor->CalleeSavedRegisters();
1872 if (saves != 0) {
1873 __ MultiPop(saves);
1874 }
1875
1876 // Restore double registers.
1877 const RegList double_saves = descriptor->CalleeSavedFPRegisters();
1878 if (double_saves != 0) {
1879 __ MultiPopDoubles(double_saves);
1880 }
1881
1882 if (descriptor->IsCFunctionCall()) {
Ben Murdochda12d292016-06-02 14:46:10 +01001883 AssembleDeconstructFrame();
1884 } else if (frame_access_state()->has_frame()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001885 // Canonicalize JSFunction return sites for now.
1886 if (return_label_.is_bound()) {
1887 __ b(&return_label_);
1888 return;
1889 } else {
1890 __ bind(&return_label_);
Ben Murdochda12d292016-06-02 14:46:10 +01001891 AssembleDeconstructFrame();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001892 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001893 }
Ben Murdochda12d292016-06-02 14:46:10 +01001894 __ Ret(pop_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001895}
1896
1897
1898void CodeGenerator::AssembleMove(InstructionOperand* source,
1899 InstructionOperand* destination) {
1900 PPCOperandConverter g(this, nullptr);
1901 // Dispatch on the source and destination operand kinds. Not all
1902 // combinations are possible.
1903 if (source->IsRegister()) {
1904 DCHECK(destination->IsRegister() || destination->IsStackSlot());
1905 Register src = g.ToRegister(source);
1906 if (destination->IsRegister()) {
1907 __ Move(g.ToRegister(destination), src);
1908 } else {
1909 __ StoreP(src, g.ToMemOperand(destination), r0);
1910 }
1911 } else if (source->IsStackSlot()) {
1912 DCHECK(destination->IsRegister() || destination->IsStackSlot());
1913 MemOperand src = g.ToMemOperand(source);
1914 if (destination->IsRegister()) {
1915 __ LoadP(g.ToRegister(destination), src, r0);
1916 } else {
1917 Register temp = kScratchReg;
1918 __ LoadP(temp, src, r0);
1919 __ StoreP(temp, g.ToMemOperand(destination), r0);
1920 }
1921 } else if (source->IsConstant()) {
1922 Constant src = g.ToConstant(source);
1923 if (destination->IsRegister() || destination->IsStackSlot()) {
1924 Register dst =
1925 destination->IsRegister() ? g.ToRegister(destination) : kScratchReg;
1926 switch (src.type()) {
1927 case Constant::kInt32:
Ben Murdochc5610432016-08-08 18:44:38 +01001928#if V8_TARGET_ARCH_PPC64
1929 if (src.rmode() == RelocInfo::WASM_MEMORY_SIZE_REFERENCE) {
1930#else
1931 if (src.rmode() == RelocInfo::WASM_MEMORY_REFERENCE ||
1932 src.rmode() == RelocInfo::WASM_MEMORY_SIZE_REFERENCE) {
1933#endif
1934 __ mov(dst, Operand(src.ToInt32(), src.rmode()));
1935 } else {
1936 __ mov(dst, Operand(src.ToInt32()));
1937 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001938 break;
1939 case Constant::kInt64:
Ben Murdochc5610432016-08-08 18:44:38 +01001940#if V8_TARGET_ARCH_PPC64
1941 if (src.rmode() == RelocInfo::WASM_MEMORY_REFERENCE) {
1942 __ mov(dst, Operand(src.ToInt64(), src.rmode()));
1943 } else {
1944 DCHECK(src.rmode() != RelocInfo::WASM_MEMORY_SIZE_REFERENCE);
1945#endif
1946 __ mov(dst, Operand(src.ToInt64()));
1947#if V8_TARGET_ARCH_PPC64
1948 }
1949#endif
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001950 break;
1951 case Constant::kFloat32:
1952 __ Move(dst,
1953 isolate()->factory()->NewNumber(src.ToFloat32(), TENURED));
1954 break;
1955 case Constant::kFloat64:
1956 __ Move(dst,
1957 isolate()->factory()->NewNumber(src.ToFloat64(), TENURED));
1958 break;
1959 case Constant::kExternalReference:
1960 __ mov(dst, Operand(src.ToExternalReference()));
1961 break;
1962 case Constant::kHeapObject: {
1963 Handle<HeapObject> src_object = src.ToHeapObject();
1964 Heap::RootListIndex index;
Ben Murdochda12d292016-06-02 14:46:10 +01001965 int slot;
1966 if (IsMaterializableFromFrame(src_object, &slot)) {
1967 __ LoadP(dst, g.SlotToMemOperand(slot));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001968 } else if (IsMaterializableFromRoot(src_object, &index)) {
1969 __ LoadRoot(dst, index);
1970 } else {
1971 __ Move(dst, src_object);
1972 }
1973 break;
1974 }
1975 case Constant::kRpoNumber:
1976 UNREACHABLE(); // TODO(dcarney): loading RPO constants on PPC.
1977 break;
1978 }
1979 if (destination->IsStackSlot()) {
1980 __ StoreP(dst, g.ToMemOperand(destination), r0);
1981 }
1982 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01001983 DoubleRegister dst = destination->IsFPRegister()
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001984 ? g.ToDoubleRegister(destination)
1985 : kScratchDoubleReg;
1986 double value = (src.type() == Constant::kFloat32) ? src.ToFloat32()
1987 : src.ToFloat64();
1988 __ LoadDoubleLiteral(dst, value, kScratchReg);
Ben Murdochc5610432016-08-08 18:44:38 +01001989 if (destination->IsFPStackSlot()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001990 __ StoreDouble(dst, g.ToMemOperand(destination), r0);
1991 }
1992 }
Ben Murdochc5610432016-08-08 18:44:38 +01001993 } else if (source->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001994 DoubleRegister src = g.ToDoubleRegister(source);
Ben Murdochc5610432016-08-08 18:44:38 +01001995 if (destination->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001996 DoubleRegister dst = g.ToDoubleRegister(destination);
1997 __ Move(dst, src);
1998 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01001999 DCHECK(destination->IsFPStackSlot());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002000 __ StoreDouble(src, g.ToMemOperand(destination), r0);
2001 }
Ben Murdochc5610432016-08-08 18:44:38 +01002002 } else if (source->IsFPStackSlot()) {
2003 DCHECK(destination->IsFPRegister() || destination->IsFPStackSlot());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002004 MemOperand src = g.ToMemOperand(source);
Ben Murdochc5610432016-08-08 18:44:38 +01002005 if (destination->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002006 __ LoadDouble(g.ToDoubleRegister(destination), src, r0);
2007 } else {
2008 DoubleRegister temp = kScratchDoubleReg;
2009 __ LoadDouble(temp, src, r0);
2010 __ StoreDouble(temp, g.ToMemOperand(destination), r0);
2011 }
2012 } else {
2013 UNREACHABLE();
2014 }
2015}
2016
2017
2018void CodeGenerator::AssembleSwap(InstructionOperand* source,
2019 InstructionOperand* destination) {
2020 PPCOperandConverter g(this, nullptr);
2021 // Dispatch on the source and destination operand kinds. Not all
2022 // combinations are possible.
2023 if (source->IsRegister()) {
2024 // Register-register.
2025 Register temp = kScratchReg;
2026 Register src = g.ToRegister(source);
2027 if (destination->IsRegister()) {
2028 Register dst = g.ToRegister(destination);
2029 __ mr(temp, src);
2030 __ mr(src, dst);
2031 __ mr(dst, temp);
2032 } else {
2033 DCHECK(destination->IsStackSlot());
2034 MemOperand dst = g.ToMemOperand(destination);
2035 __ mr(temp, src);
2036 __ LoadP(src, dst);
2037 __ StoreP(temp, dst);
2038 }
2039#if V8_TARGET_ARCH_PPC64
Ben Murdochc5610432016-08-08 18:44:38 +01002040 } else if (source->IsStackSlot() || source->IsFPStackSlot()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002041#else
2042 } else if (source->IsStackSlot()) {
2043 DCHECK(destination->IsStackSlot());
2044#endif
2045 Register temp_0 = kScratchReg;
2046 Register temp_1 = r0;
2047 MemOperand src = g.ToMemOperand(source);
2048 MemOperand dst = g.ToMemOperand(destination);
2049 __ LoadP(temp_0, src);
2050 __ LoadP(temp_1, dst);
2051 __ StoreP(temp_0, dst);
2052 __ StoreP(temp_1, src);
Ben Murdochc5610432016-08-08 18:44:38 +01002053 } else if (source->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002054 DoubleRegister temp = kScratchDoubleReg;
2055 DoubleRegister src = g.ToDoubleRegister(source);
Ben Murdochc5610432016-08-08 18:44:38 +01002056 if (destination->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002057 DoubleRegister dst = g.ToDoubleRegister(destination);
2058 __ fmr(temp, src);
2059 __ fmr(src, dst);
2060 __ fmr(dst, temp);
2061 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01002062 DCHECK(destination->IsFPStackSlot());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002063 MemOperand dst = g.ToMemOperand(destination);
2064 __ fmr(temp, src);
2065 __ lfd(src, dst);
2066 __ stfd(temp, dst);
2067 }
2068#if !V8_TARGET_ARCH_PPC64
Ben Murdochc5610432016-08-08 18:44:38 +01002069 } else if (source->IsFPStackSlot()) {
2070 DCHECK(destination->IsFPStackSlot());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002071 DoubleRegister temp_0 = kScratchDoubleReg;
2072 DoubleRegister temp_1 = d0;
2073 MemOperand src = g.ToMemOperand(source);
2074 MemOperand dst = g.ToMemOperand(destination);
2075 __ lfd(temp_0, src);
2076 __ lfd(temp_1, dst);
2077 __ stfd(temp_0, dst);
2078 __ stfd(temp_1, src);
2079#endif
2080 } else {
2081 // No other combinations are possible.
2082 UNREACHABLE();
2083 }
2084}
2085
2086
2087void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) {
2088 for (size_t index = 0; index < target_count; ++index) {
2089 __ emit_label_addr(targets[index]);
2090 }
2091}
2092
2093
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002094void CodeGenerator::EnsureSpaceForLazyDeopt() {
2095 if (!info()->ShouldEnsureSpaceForLazyDeopt()) {
2096 return;
2097 }
2098
2099 int space_needed = Deoptimizer::patch_size();
2100 // Ensure that we have enough space after the previous lazy-bailout
2101 // instruction for patching the code here.
2102 int current_pc = masm()->pc_offset();
2103 if (current_pc < last_lazy_deopt_pc_ + space_needed) {
2104 // Block tramoline pool emission for duration of padding.
2105 v8::internal::Assembler::BlockTrampolinePoolScope block_trampoline_pool(
2106 masm());
2107 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
2108 DCHECK_EQ(0, padding_size % v8::internal::Assembler::kInstrSize);
2109 while (padding_size > 0) {
2110 __ nop();
2111 padding_size -= v8::internal::Assembler::kInstrSize;
2112 }
2113 }
2114}
2115
2116#undef __
2117
2118} // namespace compiler
2119} // namespace internal
2120} // namespace v8