blob: cdd7e3486672c63a572e3972ed7b2a6fbccc0aa4 [file] [log] [blame]
Emily Bernier958fae72015-03-24 16:35:39 -04001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdoch014dc512016-03-22 12:00:34 +00005#include "src/ast/scopes.h"
Emily Bernier958fae72015-03-24 16:35:39 -04006#include "src/compiler/code-generator.h"
7#include "src/compiler/code-generator-impl.h"
8#include "src/compiler/gap-resolver.h"
9#include "src/compiler/node-matchers.h"
Ben Murdoch014dc512016-03-22 12:00:34 +000010#include "src/compiler/osr.h"
Emily Bernier958fae72015-03-24 16:35:39 -040011#include "src/mips/macro-assembler-mips.h"
Emily Bernier958fae72015-03-24 16:35:39 -040012
13namespace v8 {
14namespace internal {
15namespace compiler {
16
17#define __ masm()->
18
19
20// TODO(plind): Possibly avoid using these lithium names.
21#define kScratchReg kLithiumScratchReg
22#define kCompareReg kLithiumScratchReg2
23#define kScratchReg2 kLithiumScratchReg2
24#define kScratchDoubleReg kLithiumScratchDouble
25
26
27// TODO(plind): consider renaming these macros.
28#define TRACE_MSG(msg) \
29 PrintF("code_gen: \'%s\' in function %s at line %d\n", msg, __FUNCTION__, \
30 __LINE__)
31
32#define TRACE_UNIMPL() \
33 PrintF("UNIMPLEMENTED code_generator_mips: %s at line %d\n", __FUNCTION__, \
34 __LINE__)
35
36
37// Adds Mips-specific methods to convert InstructionOperands.
Ben Murdoch014dc512016-03-22 12:00:34 +000038class MipsOperandConverter final : public InstructionOperandConverter {
Emily Bernier958fae72015-03-24 16:35:39 -040039 public:
40 MipsOperandConverter(CodeGenerator* gen, Instruction* instr)
41 : InstructionOperandConverter(gen, instr) {}
42
Ben Murdoch014dc512016-03-22 12:00:34 +000043 FloatRegister OutputSingleRegister(size_t index = 0) {
Emily Bernier958fae72015-03-24 16:35:39 -040044 return ToSingleRegister(instr_->OutputAt(index));
45 }
46
Ben Murdoch014dc512016-03-22 12:00:34 +000047 FloatRegister InputSingleRegister(size_t index) {
Emily Bernier958fae72015-03-24 16:35:39 -040048 return ToSingleRegister(instr_->InputAt(index));
49 }
50
51 FloatRegister ToSingleRegister(InstructionOperand* op) {
52 // Single (Float) and Double register namespace is same on MIPS,
53 // both are typedefs of FPURegister.
54 return ToDoubleRegister(op);
55 }
56
Ben Murdoch014dc512016-03-22 12:00:34 +000057 DoubleRegister InputOrZeroDoubleRegister(size_t index) {
58 if (instr_->InputAt(index)->IsImmediate()) return kDoubleRegZero;
59
60 return InputDoubleRegister(index);
61 }
62
63 DoubleRegister InputOrZeroSingleRegister(size_t index) {
64 if (instr_->InputAt(index)->IsImmediate()) return kDoubleRegZero;
65
66 return InputSingleRegister(index);
67 }
68
69 Operand InputImmediate(size_t index) {
Emily Bernier958fae72015-03-24 16:35:39 -040070 Constant constant = ToConstant(instr_->InputAt(index));
71 switch (constant.type()) {
72 case Constant::kInt32:
73 return Operand(constant.ToInt32());
74 case Constant::kFloat32:
75 return Operand(
76 isolate()->factory()->NewNumber(constant.ToFloat32(), TENURED));
77 case Constant::kFloat64:
78 return Operand(
79 isolate()->factory()->NewNumber(constant.ToFloat64(), TENURED));
80 case Constant::kInt64:
81 case Constant::kExternalReference:
82 case Constant::kHeapObject:
83 // TODO(plind): Maybe we should handle ExtRef & HeapObj here?
84 // maybe not done on arm due to const pool ??
85 break;
86 case Constant::kRpoNumber:
87 UNREACHABLE(); // TODO(titzer): RPO immediates on mips?
88 break;
89 }
90 UNREACHABLE();
91 return Operand(zero_reg);
92 }
93
Ben Murdoch014dc512016-03-22 12:00:34 +000094 Operand InputOperand(size_t index) {
Emily Bernier958fae72015-03-24 16:35:39 -040095 InstructionOperand* op = instr_->InputAt(index);
96 if (op->IsRegister()) {
97 return Operand(ToRegister(op));
98 }
99 return InputImmediate(index);
100 }
101
Ben Murdoch014dc512016-03-22 12:00:34 +0000102 MemOperand MemoryOperand(size_t* first_index) {
103 const size_t index = *first_index;
Emily Bernier958fae72015-03-24 16:35:39 -0400104 switch (AddressingModeField::decode(instr_->opcode())) {
105 case kMode_None:
106 break;
107 case kMode_MRI:
108 *first_index += 2;
109 return MemOperand(InputRegister(index + 0), InputInt32(index + 1));
110 case kMode_MRR:
111 // TODO(plind): r6 address mode, to be implemented ...
112 UNREACHABLE();
113 }
114 UNREACHABLE();
115 return MemOperand(no_reg);
116 }
117
Ben Murdoch014dc512016-03-22 12:00:34 +0000118 MemOperand MemoryOperand(size_t index = 0) { return MemoryOperand(&index); }
Emily Bernier958fae72015-03-24 16:35:39 -0400119
120 MemOperand ToMemOperand(InstructionOperand* op) const {
Ben Murdoch014dc512016-03-22 12:00:34 +0000121 DCHECK_NOT_NULL(op);
Emily Bernier958fae72015-03-24 16:35:39 -0400122 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
Ben Murdoch014dc512016-03-22 12:00:34 +0000123 FrameOffset offset = frame_access_state()->GetFrameOffset(
124 AllocatedOperand::cast(op)->index());
Emily Bernier958fae72015-03-24 16:35:39 -0400125 return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset());
126 }
127};
128
129
Ben Murdoch014dc512016-03-22 12:00:34 +0000130static inline bool HasRegisterInput(Instruction* instr, size_t index) {
Emily Bernier958fae72015-03-24 16:35:39 -0400131 return instr->InputAt(index)->IsRegister();
132}
133
134
135namespace {
136
Ben Murdoch014dc512016-03-22 12:00:34 +0000137class OutOfLineLoadSingle final : public OutOfLineCode {
Emily Bernier958fae72015-03-24 16:35:39 -0400138 public:
139 OutOfLineLoadSingle(CodeGenerator* gen, FloatRegister result)
140 : OutOfLineCode(gen), result_(result) {}
141
Ben Murdoch014dc512016-03-22 12:00:34 +0000142 void Generate() final {
Emily Bernier958fae72015-03-24 16:35:39 -0400143 __ Move(result_, std::numeric_limits<float>::quiet_NaN());
144 }
145
146 private:
147 FloatRegister const result_;
148};
149
150
Ben Murdoch014dc512016-03-22 12:00:34 +0000151class OutOfLineLoadDouble final : public OutOfLineCode {
Emily Bernier958fae72015-03-24 16:35:39 -0400152 public:
153 OutOfLineLoadDouble(CodeGenerator* gen, DoubleRegister result)
154 : OutOfLineCode(gen), result_(result) {}
155
Ben Murdoch014dc512016-03-22 12:00:34 +0000156 void Generate() final {
Emily Bernier958fae72015-03-24 16:35:39 -0400157 __ Move(result_, std::numeric_limits<double>::quiet_NaN());
158 }
159
160 private:
161 DoubleRegister const result_;
162};
163
164
Ben Murdoch014dc512016-03-22 12:00:34 +0000165class OutOfLineLoadInteger final : public OutOfLineCode {
Emily Bernier958fae72015-03-24 16:35:39 -0400166 public:
167 OutOfLineLoadInteger(CodeGenerator* gen, Register result)
168 : OutOfLineCode(gen), result_(result) {}
169
Ben Murdoch014dc512016-03-22 12:00:34 +0000170 void Generate() final { __ mov(result_, zero_reg); }
Emily Bernier958fae72015-03-24 16:35:39 -0400171
172 private:
173 Register const result_;
174};
175
176
177class OutOfLineRound : public OutOfLineCode {
178 public:
179 OutOfLineRound(CodeGenerator* gen, DoubleRegister result)
180 : OutOfLineCode(gen), result_(result) {}
181
Ben Murdoch014dc512016-03-22 12:00:34 +0000182 void Generate() final {
Emily Bernier958fae72015-03-24 16:35:39 -0400183 // Handle rounding to zero case where sign has to be preserved.
184 // High bits of double input already in kScratchReg.
185 __ srl(at, kScratchReg, 31);
186 __ sll(at, at, 31);
187 __ Mthc1(at, result_);
188 }
189
190 private:
191 DoubleRegister const result_;
192};
193
194
Ben Murdoch014dc512016-03-22 12:00:34 +0000195class OutOfLineRound32 : public OutOfLineCode {
Emily Bernier958fae72015-03-24 16:35:39 -0400196 public:
Ben Murdoch014dc512016-03-22 12:00:34 +0000197 OutOfLineRound32(CodeGenerator* gen, DoubleRegister result)
198 : OutOfLineCode(gen), result_(result) {}
199
200 void Generate() final {
201 // Handle rounding to zero case where sign has to be preserved.
202 // High bits of float input already in kScratchReg.
203 __ srl(at, kScratchReg, 31);
204 __ sll(at, at, 31);
205 __ mtc1(at, result_);
206 }
207
208 private:
209 DoubleRegister const result_;
Emily Bernier958fae72015-03-24 16:35:39 -0400210};
211
212
Ben Murdoch014dc512016-03-22 12:00:34 +0000213class OutOfLineRecordWrite final : public OutOfLineCode {
Emily Bernier958fae72015-03-24 16:35:39 -0400214 public:
Ben Murdoch014dc512016-03-22 12:00:34 +0000215 OutOfLineRecordWrite(CodeGenerator* gen, Register object, Register index,
216 Register value, Register scratch0, Register scratch1,
217 RecordWriteMode mode)
218 : OutOfLineCode(gen),
219 object_(object),
220 index_(index),
221 value_(value),
222 scratch0_(scratch0),
223 scratch1_(scratch1),
224 mode_(mode) {}
225
226 void Generate() final {
227 if (mode_ > RecordWriteMode::kValueIsPointer) {
228 __ JumpIfSmi(value_, exit());
229 }
Ben Murdoch109988c2016-05-18 11:27:45 +0100230 __ CheckPageFlag(value_, scratch0_,
231 MemoryChunk::kPointersToHereAreInterestingMask, eq,
232 exit());
233 RememberedSetAction const remembered_set_action =
234 mode_ > RecordWriteMode::kValueIsMap ? EMIT_REMEMBERED_SET
235 : OMIT_REMEMBERED_SET;
Ben Murdoch014dc512016-03-22 12:00:34 +0000236 SaveFPRegsMode const save_fp_mode =
237 frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs;
Ben Murdoch109988c2016-05-18 11:27:45 +0100238 if (!frame()->needs_frame()) {
239 // We need to save and restore ra if the frame was elided.
240 __ Push(ra);
241 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000242 RecordWriteStub stub(isolate(), object_, scratch0_, scratch1_,
Ben Murdoch109988c2016-05-18 11:27:45 +0100243 remembered_set_action, save_fp_mode);
Ben Murdoch014dc512016-03-22 12:00:34 +0000244 __ Addu(scratch1_, object_, index_);
245 __ CallStub(&stub);
Ben Murdoch109988c2016-05-18 11:27:45 +0100246 if (!frame()->needs_frame()) {
247 __ Pop(ra);
248 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000249 }
250
251 private:
252 Register const object_;
253 Register const index_;
254 Register const value_;
255 Register const scratch0_;
256 Register const scratch1_;
257 RecordWriteMode const mode_;
Emily Bernier958fae72015-03-24 16:35:39 -0400258};
259
260
Ben Murdoch014dc512016-03-22 12:00:34 +0000261Condition FlagsConditionToConditionCmp(FlagsCondition condition) {
262 switch (condition) {
263 case kEqual:
264 return eq;
265 case kNotEqual:
266 return ne;
267 case kSignedLessThan:
268 return lt;
269 case kSignedGreaterThanOrEqual:
270 return ge;
271 case kSignedLessThanOrEqual:
272 return le;
273 case kSignedGreaterThan:
274 return gt;
275 case kUnsignedLessThan:
276 return lo;
277 case kUnsignedGreaterThanOrEqual:
278 return hs;
279 case kUnsignedLessThanOrEqual:
280 return ls;
281 case kUnsignedGreaterThan:
282 return hi;
283 case kUnorderedEqual:
284 case kUnorderedNotEqual:
285 break;
286 default:
287 break;
288 }
289 UNREACHABLE();
290 return kNoCondition;
291}
292
293
294Condition FlagsConditionToConditionTst(FlagsCondition condition) {
295 switch (condition) {
296 case kNotEqual:
297 return ne;
298 case kEqual:
299 return eq;
300 default:
301 break;
302 }
303 UNREACHABLE();
304 return kNoCondition;
305}
306
307
308FPUCondition FlagsConditionToConditionCmpFPU(bool& predicate,
309 FlagsCondition condition) {
310 switch (condition) {
311 case kEqual:
312 predicate = true;
313 return EQ;
314 case kNotEqual:
315 predicate = false;
316 return EQ;
317 case kUnsignedLessThan:
318 predicate = true;
319 return OLT;
320 case kUnsignedGreaterThanOrEqual:
321 predicate = false;
322 return ULT;
323 case kUnsignedLessThanOrEqual:
324 predicate = true;
325 return OLE;
326 case kUnsignedGreaterThan:
327 predicate = false;
328 return ULE;
329 case kUnorderedEqual:
330 case kUnorderedNotEqual:
331 predicate = true;
332 break;
333 default:
334 predicate = true;
335 break;
336 }
337 UNREACHABLE();
338 return kNoFPUCondition;
339}
Emily Bernier958fae72015-03-24 16:35:39 -0400340
341} // namespace
342
343
344#define ASSEMBLE_CHECKED_LOAD_FLOAT(width, asm_instr) \
345 do { \
346 auto result = i.Output##width##Register(); \
347 auto ool = new (zone()) OutOfLineLoad##width(this, result); \
348 if (instr->InputAt(0)->IsRegister()) { \
349 auto offset = i.InputRegister(0); \
350 __ Branch(USE_DELAY_SLOT, ool->entry(), hs, offset, i.InputOperand(1)); \
Ben Murdoch014dc512016-03-22 12:00:34 +0000351 __ addu(kScratchReg, i.InputRegister(2), offset); \
352 __ asm_instr(result, MemOperand(kScratchReg, 0)); \
Emily Bernier958fae72015-03-24 16:35:39 -0400353 } else { \
354 auto offset = i.InputOperand(0).immediate(); \
355 __ Branch(ool->entry(), ls, i.InputRegister(1), Operand(offset)); \
356 __ asm_instr(result, MemOperand(i.InputRegister(2), offset)); \
357 } \
358 __ bind(ool->exit()); \
359 } while (0)
360
361
362#define ASSEMBLE_CHECKED_LOAD_INTEGER(asm_instr) \
363 do { \
364 auto result = i.OutputRegister(); \
365 auto ool = new (zone()) OutOfLineLoadInteger(this, result); \
366 if (instr->InputAt(0)->IsRegister()) { \
367 auto offset = i.InputRegister(0); \
368 __ Branch(USE_DELAY_SLOT, ool->entry(), hs, offset, i.InputOperand(1)); \
Ben Murdoch014dc512016-03-22 12:00:34 +0000369 __ addu(kScratchReg, i.InputRegister(2), offset); \
370 __ asm_instr(result, MemOperand(kScratchReg, 0)); \
Emily Bernier958fae72015-03-24 16:35:39 -0400371 } else { \
372 auto offset = i.InputOperand(0).immediate(); \
373 __ Branch(ool->entry(), ls, i.InputRegister(1), Operand(offset)); \
374 __ asm_instr(result, MemOperand(i.InputRegister(2), offset)); \
375 } \
376 __ bind(ool->exit()); \
377 } while (0)
378
379
380#define ASSEMBLE_CHECKED_STORE_FLOAT(width, asm_instr) \
381 do { \
382 Label done; \
383 if (instr->InputAt(0)->IsRegister()) { \
384 auto offset = i.InputRegister(0); \
385 auto value = i.Input##width##Register(2); \
386 __ Branch(USE_DELAY_SLOT, &done, hs, offset, i.InputOperand(1)); \
Ben Murdoch014dc512016-03-22 12:00:34 +0000387 __ addu(kScratchReg, i.InputRegister(3), offset); \
388 __ asm_instr(value, MemOperand(kScratchReg, 0)); \
Emily Bernier958fae72015-03-24 16:35:39 -0400389 } else { \
390 auto offset = i.InputOperand(0).immediate(); \
391 auto value = i.Input##width##Register(2); \
392 __ Branch(&done, ls, i.InputRegister(1), Operand(offset)); \
393 __ asm_instr(value, MemOperand(i.InputRegister(3), offset)); \
394 } \
395 __ bind(&done); \
396 } while (0)
397
398
399#define ASSEMBLE_CHECKED_STORE_INTEGER(asm_instr) \
400 do { \
401 Label done; \
402 if (instr->InputAt(0)->IsRegister()) { \
403 auto offset = i.InputRegister(0); \
404 auto value = i.InputRegister(2); \
405 __ Branch(USE_DELAY_SLOT, &done, hs, offset, i.InputOperand(1)); \
Ben Murdoch014dc512016-03-22 12:00:34 +0000406 __ addu(kScratchReg, i.InputRegister(3), offset); \
407 __ asm_instr(value, MemOperand(kScratchReg, 0)); \
Emily Bernier958fae72015-03-24 16:35:39 -0400408 } else { \
409 auto offset = i.InputOperand(0).immediate(); \
410 auto value = i.InputRegister(2); \
411 __ Branch(&done, ls, i.InputRegister(1), Operand(offset)); \
412 __ asm_instr(value, MemOperand(i.InputRegister(3), offset)); \
413 } \
414 __ bind(&done); \
415 } while (0)
416
417
Ben Murdoch014dc512016-03-22 12:00:34 +0000418#define ASSEMBLE_ROUND_DOUBLE_TO_DOUBLE(mode) \
419 if (IsMipsArchVariant(kMips32r6)) { \
420 __ cfc1(kScratchReg, FCSR); \
421 __ li(at, Operand(mode_##mode)); \
422 __ ctc1(at, FCSR); \
423 __ rint_d(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); \
424 __ ctc1(kScratchReg, FCSR); \
425 } else { \
426 auto ool = new (zone()) OutOfLineRound(this, i.OutputDoubleRegister()); \
Emily Bernier958fae72015-03-24 16:35:39 -0400427 Label done; \
428 __ Mfhc1(kScratchReg, i.InputDoubleRegister(0)); \
429 __ Ext(at, kScratchReg, HeapNumber::kExponentShift, \
430 HeapNumber::kExponentBits); \
431 __ Branch(USE_DELAY_SLOT, &done, hs, at, \
432 Operand(HeapNumber::kExponentBias + HeapNumber::kMantissaBits)); \
433 __ mov_d(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); \
Ben Murdoch014dc512016-03-22 12:00:34 +0000434 __ mode##_l_d(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); \
Emily Bernier958fae72015-03-24 16:35:39 -0400435 __ Move(at, kScratchReg2, i.OutputDoubleRegister()); \
436 __ or_(at, at, kScratchReg2); \
437 __ Branch(USE_DELAY_SLOT, ool->entry(), eq, at, Operand(zero_reg)); \
438 __ cvt_d_l(i.OutputDoubleRegister(), i.OutputDoubleRegister()); \
439 __ bind(ool->exit()); \
440 __ bind(&done); \
Ben Murdoch014dc512016-03-22 12:00:34 +0000441 }
442
443
444#define ASSEMBLE_ROUND_FLOAT_TO_FLOAT(mode) \
445 if (IsMipsArchVariant(kMips32r6)) { \
446 __ cfc1(kScratchReg, FCSR); \
447 __ li(at, Operand(mode_##mode)); \
448 __ ctc1(at, FCSR); \
449 __ rint_s(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); \
450 __ ctc1(kScratchReg, FCSR); \
451 } else { \
452 int32_t kFloat32ExponentBias = 127; \
453 int32_t kFloat32MantissaBits = 23; \
454 int32_t kFloat32ExponentBits = 8; \
455 auto ool = new (zone()) OutOfLineRound32(this, i.OutputDoubleRegister()); \
456 Label done; \
457 __ mfc1(kScratchReg, i.InputDoubleRegister(0)); \
458 __ Ext(at, kScratchReg, kFloat32MantissaBits, kFloat32ExponentBits); \
459 __ Branch(USE_DELAY_SLOT, &done, hs, at, \
460 Operand(kFloat32ExponentBias + kFloat32MantissaBits)); \
461 __ mov_s(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); \
462 __ mode##_w_s(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); \
463 __ mfc1(at, i.OutputDoubleRegister()); \
464 __ Branch(USE_DELAY_SLOT, ool->entry(), eq, at, Operand(zero_reg)); \
465 __ cvt_s_w(i.OutputDoubleRegister(), i.OutputDoubleRegister()); \
466 __ bind(ool->exit()); \
467 __ bind(&done); \
468 }
469
470void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) {
471 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
472 if (sp_slot_delta > 0) {
473 __ addiu(sp, sp, sp_slot_delta * kPointerSize);
474 }
475 frame_access_state()->SetFrameAccessToDefault();
476}
477
478
479void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) {
480 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
481 if (sp_slot_delta < 0) {
482 __ Subu(sp, sp, Operand(-sp_slot_delta * kPointerSize));
483 frame_access_state()->IncreaseSPDelta(-sp_slot_delta);
484 }
485 if (frame()->needs_frame()) {
486 __ lw(ra, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
487 __ lw(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
488 }
489 frame_access_state()->SetFrameAccessToSP();
490}
Emily Bernier958fae72015-03-24 16:35:39 -0400491
492
493// Assembles an instruction after register allocation, producing machine code.
494void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
495 MipsOperandConverter i(this, instr);
496 InstructionCode opcode = instr->opcode();
497
498 switch (ArchOpcodeField::decode(opcode)) {
499 case kArchCallCodeObject: {
500 EnsureSpaceForLazyDeopt();
501 if (instr->InputAt(0)->IsImmediate()) {
502 __ Call(Handle<Code>::cast(i.InputHeapObject(0)),
503 RelocInfo::CODE_TARGET);
504 } else {
505 __ addiu(at, i.InputRegister(0), Code::kHeaderSize - kHeapObjectTag);
506 __ Call(at);
507 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000508 RecordCallPosition(instr);
509 frame_access_state()->ClearSPDelta();
510 break;
511 }
512 case kArchTailCallCodeObject: {
513 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
514 AssembleDeconstructActivationRecord(stack_param_delta);
515 if (instr->InputAt(0)->IsImmediate()) {
516 __ Jump(Handle<Code>::cast(i.InputHeapObject(0)),
517 RelocInfo::CODE_TARGET);
518 } else {
519 __ addiu(at, i.InputRegister(0), Code::kHeaderSize - kHeapObjectTag);
520 __ Jump(at);
521 }
522 frame_access_state()->ClearSPDelta();
Emily Bernier958fae72015-03-24 16:35:39 -0400523 break;
524 }
525 case kArchCallJSFunction: {
526 EnsureSpaceForLazyDeopt();
527 Register func = i.InputRegister(0);
528 if (FLAG_debug_code) {
529 // Check the function's context matches the context argument.
530 __ lw(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset));
531 __ Assert(eq, kWrongFunctionContext, cp, Operand(kScratchReg));
532 }
533
534 __ lw(at, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
535 __ Call(at);
Ben Murdoch014dc512016-03-22 12:00:34 +0000536 RecordCallPosition(instr);
537 frame_access_state()->ClearSPDelta();
538 break;
539 }
540 case kArchTailCallJSFunction: {
541 Register func = i.InputRegister(0);
542 if (FLAG_debug_code) {
543 // Check the function's context matches the context argument.
544 __ lw(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset));
545 __ Assert(eq, kWrongFunctionContext, cp, Operand(kScratchReg));
546 }
547
548 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
549 AssembleDeconstructActivationRecord(stack_param_delta);
550 __ lw(at, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
551 __ Jump(at);
552 frame_access_state()->ClearSPDelta();
553 break;
554 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000555 case kArchPrepareCallCFunction: {
556 int const num_parameters = MiscField::decode(instr->opcode());
557 __ PrepareCallCFunction(num_parameters, kScratchReg);
558 // Frame alignment requires using FP-relative frame addressing.
559 frame_access_state()->SetFrameAccessToFP();
560 break;
561 }
562 case kArchPrepareTailCall:
563 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1));
564 break;
565 case kArchCallCFunction: {
566 int const num_parameters = MiscField::decode(instr->opcode());
567 if (instr->InputAt(0)->IsImmediate()) {
568 ExternalReference ref = i.InputExternalReference(0);
569 __ CallCFunction(ref, num_parameters);
570 } else {
571 Register func = i.InputRegister(0);
572 __ CallCFunction(func, num_parameters);
573 }
574 frame_access_state()->SetFrameAccessToDefault();
575 frame_access_state()->ClearSPDelta();
Emily Bernier958fae72015-03-24 16:35:39 -0400576 break;
577 }
578 case kArchJmp:
579 AssembleArchJump(i.InputRpo(0));
580 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000581 case kArchLookupSwitch:
582 AssembleArchLookupSwitch(instr);
583 break;
584 case kArchTableSwitch:
585 AssembleArchTableSwitch(instr);
586 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400587 case kArchNop:
Ben Murdoch014dc512016-03-22 12:00:34 +0000588 case kArchThrowTerminator:
Emily Bernier958fae72015-03-24 16:35:39 -0400589 // don't emit code for nops.
590 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000591 case kArchDeoptimize: {
592 int deopt_state_id =
593 BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
594 Deoptimizer::BailoutType bailout_type =
595 Deoptimizer::BailoutType(MiscField::decode(instr->opcode()));
596 AssembleDeoptimizerCall(deopt_state_id, bailout_type);
597 break;
598 }
Emily Bernier958fae72015-03-24 16:35:39 -0400599 case kArchRet:
600 AssembleReturn();
601 break;
602 case kArchStackPointer:
603 __ mov(i.OutputRegister(), sp);
604 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000605 case kArchFramePointer:
606 __ mov(i.OutputRegister(), fp);
607 break;
Ben Murdoch109988c2016-05-18 11:27:45 +0100608 case kArchParentFramePointer:
609 if (frame_access_state()->frame()->needs_frame()) {
610 __ lw(i.OutputRegister(), MemOperand(fp, 0));
611 } else {
612 __ mov(i.OutputRegister(), fp);
613 }
614 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400615 case kArchTruncateDoubleToI:
616 __ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0));
617 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000618 case kArchStoreWithWriteBarrier: {
619 RecordWriteMode mode =
620 static_cast<RecordWriteMode>(MiscField::decode(instr->opcode()));
621 Register object = i.InputRegister(0);
622 Register index = i.InputRegister(1);
623 Register value = i.InputRegister(2);
624 Register scratch0 = i.TempRegister(0);
625 Register scratch1 = i.TempRegister(1);
626 auto ool = new (zone()) OutOfLineRecordWrite(this, object, index, value,
627 scratch0, scratch1, mode);
628 __ Addu(at, object, index);
629 __ sw(value, MemOperand(at));
630 __ CheckPageFlag(object, scratch0,
631 MemoryChunk::kPointersFromHereAreInterestingMask, ne,
632 ool->entry());
633 __ bind(ool->exit());
634 break;
635 }
Ben Murdoch109988c2016-05-18 11:27:45 +0100636 case kArchStackSlot: {
637 FrameOffset offset =
638 frame_access_state()->GetFrameOffset(i.InputInt32(0));
639 __ Addu(i.OutputRegister(), offset.from_stack_pointer() ? sp : fp,
640 Operand(offset.offset()));
641 break;
642 }
Emily Bernier958fae72015-03-24 16:35:39 -0400643 case kMipsAdd:
644 __ Addu(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1));
645 break;
646 case kMipsAddOvf:
Ben Murdoch014dc512016-03-22 12:00:34 +0000647 // Pseudo-instruction used for overflow/branch. No opcode emitted here.
Emily Bernier958fae72015-03-24 16:35:39 -0400648 break;
649 case kMipsSub:
650 __ Subu(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1));
651 break;
652 case kMipsSubOvf:
Ben Murdoch014dc512016-03-22 12:00:34 +0000653 // Pseudo-instruction used for overflow/branch. No opcode emitted here.
Emily Bernier958fae72015-03-24 16:35:39 -0400654 break;
655 case kMipsMul:
656 __ Mul(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1));
657 break;
658 case kMipsMulHigh:
659 __ Mulh(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1));
660 break;
661 case kMipsMulHighU:
662 __ Mulhu(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1));
663 break;
664 case kMipsDiv:
665 __ Div(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1));
Ben Murdoch014dc512016-03-22 12:00:34 +0000666 if (IsMipsArchVariant(kMips32r6)) {
667 __ selnez(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
668 } else {
669 __ Movz(i.OutputRegister(), i.InputRegister(1), i.InputRegister(1));
670 }
Emily Bernier958fae72015-03-24 16:35:39 -0400671 break;
672 case kMipsDivU:
673 __ Divu(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1));
Ben Murdoch014dc512016-03-22 12:00:34 +0000674 if (IsMipsArchVariant(kMips32r6)) {
675 __ selnez(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
676 } else {
677 __ Movz(i.OutputRegister(), i.InputRegister(1), i.InputRegister(1));
678 }
Emily Bernier958fae72015-03-24 16:35:39 -0400679 break;
680 case kMipsMod:
681 __ Mod(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1));
682 break;
683 case kMipsModU:
684 __ Modu(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1));
685 break;
686 case kMipsAnd:
687 __ And(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1));
688 break;
689 case kMipsOr:
690 __ Or(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1));
691 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000692 case kMipsNor:
693 if (instr->InputAt(1)->IsRegister()) {
694 __ Nor(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1));
695 } else {
696 DCHECK(i.InputOperand(1).immediate() == 0);
697 __ Nor(i.OutputRegister(), i.InputRegister(0), zero_reg);
698 }
699 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400700 case kMipsXor:
701 __ Xor(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1));
702 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000703 case kMipsClz:
704 __ Clz(i.OutputRegister(), i.InputRegister(0));
705 break;
Ben Murdoch109988c2016-05-18 11:27:45 +0100706 case kMipsCtz: {
707 Register reg1 = kScratchReg;
708 Register reg2 = kScratchReg2;
709 Label skip_for_zero;
710 Label end;
711 // Branch if the operand is zero
712 __ Branch(&skip_for_zero, eq, i.InputRegister(0), Operand(zero_reg));
713 // Find the number of bits before the last bit set to 1.
714 __ Subu(reg2, zero_reg, i.InputRegister(0));
715 __ And(reg2, reg2, i.InputRegister(0));
716 __ clz(reg2, reg2);
717 // Get the number of bits after the last bit set to 1.
718 __ li(reg1, 0x1F);
719 __ Subu(i.OutputRegister(), reg1, reg2);
720 __ Branch(&end);
721 __ bind(&skip_for_zero);
722 // If the operand is zero, return word length as the result.
723 __ li(i.OutputRegister(), 0x20);
724 __ bind(&end);
725 } break;
726 case kMipsPopcnt: {
727 Register reg1 = kScratchReg;
728 Register reg2 = kScratchReg2;
729 uint32_t m1 = 0x55555555;
730 uint32_t m2 = 0x33333333;
731 uint32_t m4 = 0x0f0f0f0f;
732 uint32_t m8 = 0x00ff00ff;
733 uint32_t m16 = 0x0000ffff;
734
735 // Put count of ones in every 2 bits into those 2 bits.
736 __ li(at, m1);
737 __ srl(reg1, i.InputRegister(0), 1);
738 __ And(reg2, i.InputRegister(0), at);
739 __ And(reg1, reg1, at);
740 __ addu(reg1, reg1, reg2);
741
742 // Put count of ones in every 4 bits into those 4 bits.
743 __ li(at, m2);
744 __ srl(reg2, reg1, 2);
745 __ And(reg2, reg2, at);
746 __ And(reg1, reg1, at);
747 __ addu(reg1, reg1, reg2);
748
749 // Put count of ones in every 8 bits into those 8 bits.
750 __ li(at, m4);
751 __ srl(reg2, reg1, 4);
752 __ And(reg2, reg2, at);
753 __ And(reg1, reg1, at);
754 __ addu(reg1, reg1, reg2);
755
756 // Put count of ones in every 16 bits into those 16 bits.
757 __ li(at, m8);
758 __ srl(reg2, reg1, 8);
759 __ And(reg2, reg2, at);
760 __ And(reg1, reg1, at);
761 __ addu(reg1, reg1, reg2);
762
763 // Calculate total number of ones.
764 __ li(at, m16);
765 __ srl(reg2, reg1, 16);
766 __ And(reg2, reg2, at);
767 __ And(reg1, reg1, at);
768 __ addu(i.OutputRegister(), reg1, reg2);
769 } break;
Emily Bernier958fae72015-03-24 16:35:39 -0400770 case kMipsShl:
771 if (instr->InputAt(1)->IsRegister()) {
772 __ sllv(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
773 } else {
774 int32_t imm = i.InputOperand(1).immediate();
775 __ sll(i.OutputRegister(), i.InputRegister(0), imm);
776 }
777 break;
778 case kMipsShr:
779 if (instr->InputAt(1)->IsRegister()) {
780 __ srlv(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
781 } else {
782 int32_t imm = i.InputOperand(1).immediate();
783 __ srl(i.OutputRegister(), i.InputRegister(0), imm);
784 }
785 break;
786 case kMipsSar:
787 if (instr->InputAt(1)->IsRegister()) {
788 __ srav(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
789 } else {
790 int32_t imm = i.InputOperand(1).immediate();
791 __ sra(i.OutputRegister(), i.InputRegister(0), imm);
792 }
793 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000794 case kMipsExt:
795 __ Ext(i.OutputRegister(), i.InputRegister(0), i.InputInt8(1),
796 i.InputInt8(2));
797 break;
798 case kMipsIns:
799 if (instr->InputAt(1)->IsImmediate() && i.InputInt8(1) == 0) {
800 __ Ins(i.OutputRegister(), zero_reg, i.InputInt8(1), i.InputInt8(2));
801 } else {
802 __ Ins(i.OutputRegister(), i.InputRegister(0), i.InputInt8(1),
803 i.InputInt8(2));
804 }
805 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400806 case kMipsRor:
807 __ Ror(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1));
808 break;
809 case kMipsTst:
810 // Pseudo-instruction used for tst/branch. No opcode emitted here.
811 break;
812 case kMipsCmp:
813 // Pseudo-instruction used for cmp/branch. No opcode emitted here.
814 break;
815 case kMipsMov:
816 // TODO(plind): Should we combine mov/li like this, or use separate instr?
817 // - Also see x64 ASSEMBLE_BINOP & RegisterOrOperandType
818 if (HasRegisterInput(instr, 0)) {
819 __ mov(i.OutputRegister(), i.InputRegister(0));
820 } else {
821 __ li(i.OutputRegister(), i.InputOperand(0));
822 }
823 break;
824
Ben Murdoch014dc512016-03-22 12:00:34 +0000825 case kMipsCmpS:
826 // Psuedo-instruction used for FP cmp/branch. No opcode emitted here.
827 break;
828 case kMipsAddS:
829 // TODO(plind): add special case: combine mult & add.
830 __ add_s(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
831 i.InputDoubleRegister(1));
832 break;
833 case kMipsSubS:
834 __ sub_s(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
835 i.InputDoubleRegister(1));
836 break;
837 case kMipsMulS:
838 // TODO(plind): add special case: right op is -1.0, see arm port.
839 __ mul_s(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
840 i.InputDoubleRegister(1));
841 break;
842 case kMipsDivS:
843 __ div_s(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
844 i.InputDoubleRegister(1));
845 break;
846 case kMipsModS: {
847 // TODO(bmeurer): We should really get rid of this special instruction,
848 // and generate a CallAddress instruction instead.
849 FrameScope scope(masm(), StackFrame::MANUAL);
850 __ PrepareCallCFunction(0, 2, kScratchReg);
851 __ MovToFloatParameters(i.InputDoubleRegister(0),
852 i.InputDoubleRegister(1));
853 // TODO(balazs.kilvady): implement mod_two_floats_operation(isolate())
854 __ CallCFunction(ExternalReference::mod_two_doubles_operation(isolate()),
855 0, 2);
856 // Move the result in the double result register.
857 __ MovFromFloatResult(i.OutputSingleRegister());
858 break;
859 }
860 case kMipsAbsS:
861 __ abs_s(i.OutputSingleRegister(), i.InputSingleRegister(0));
862 break;
863 case kMipsSqrtS: {
864 __ sqrt_s(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
865 break;
866 }
867 case kMipsMaxS:
868 __ max_s(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
869 i.InputDoubleRegister(1));
870 break;
871 case kMipsMinS:
872 __ min_s(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
873 i.InputDoubleRegister(1));
874 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400875 case kMipsCmpD:
876 // Psuedo-instruction used for FP cmp/branch. No opcode emitted here.
877 break;
878 case kMipsAddD:
879 // TODO(plind): add special case: combine mult & add.
880 __ add_d(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
881 i.InputDoubleRegister(1));
882 break;
883 case kMipsSubD:
884 __ sub_d(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
885 i.InputDoubleRegister(1));
886 break;
887 case kMipsMulD:
888 // TODO(plind): add special case: right op is -1.0, see arm port.
889 __ mul_d(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
890 i.InputDoubleRegister(1));
891 break;
892 case kMipsDivD:
893 __ div_d(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
894 i.InputDoubleRegister(1));
895 break;
896 case kMipsModD: {
897 // TODO(bmeurer): We should really get rid of this special instruction,
898 // and generate a CallAddress instruction instead.
899 FrameScope scope(masm(), StackFrame::MANUAL);
900 __ PrepareCallCFunction(0, 2, kScratchReg);
901 __ MovToFloatParameters(i.InputDoubleRegister(0),
902 i.InputDoubleRegister(1));
903 __ CallCFunction(ExternalReference::mod_two_doubles_operation(isolate()),
904 0, 2);
905 // Move the result in the double result register.
906 __ MovFromFloatResult(i.OutputDoubleRegister());
907 break;
908 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000909 case kMipsAbsD:
910 __ abs_d(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
911 break;
912 case kMipsSqrtD: {
913 __ sqrt_d(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
Emily Bernier958fae72015-03-24 16:35:39 -0400914 break;
915 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000916 case kMipsMaxD:
917 __ max_d(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
918 i.InputDoubleRegister(1));
919 break;
920 case kMipsMinD:
921 __ min_d(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
922 i.InputDoubleRegister(1));
923 break;
924 case kMipsFloat64RoundDown: {
925 ASSEMBLE_ROUND_DOUBLE_TO_DOUBLE(floor);
926 break;
927 }
928 case kMipsFloat32RoundDown: {
929 ASSEMBLE_ROUND_FLOAT_TO_FLOAT(floor);
Emily Bernier958fae72015-03-24 16:35:39 -0400930 break;
931 }
932 case kMipsFloat64RoundTruncate: {
Ben Murdoch014dc512016-03-22 12:00:34 +0000933 ASSEMBLE_ROUND_DOUBLE_TO_DOUBLE(trunc);
Emily Bernier958fae72015-03-24 16:35:39 -0400934 break;
935 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000936 case kMipsFloat32RoundTruncate: {
937 ASSEMBLE_ROUND_FLOAT_TO_FLOAT(trunc);
938 break;
939 }
940 case kMipsFloat64RoundUp: {
941 ASSEMBLE_ROUND_DOUBLE_TO_DOUBLE(ceil);
942 break;
943 }
944 case kMipsFloat32RoundUp: {
945 ASSEMBLE_ROUND_FLOAT_TO_FLOAT(ceil);
946 break;
947 }
948 case kMipsFloat64RoundTiesEven: {
949 ASSEMBLE_ROUND_DOUBLE_TO_DOUBLE(round);
950 break;
951 }
952 case kMipsFloat32RoundTiesEven: {
953 ASSEMBLE_ROUND_FLOAT_TO_FLOAT(round);
954 break;
955 }
956 case kMipsFloat64Max: {
957 // (b < a) ? a : b
958 if (IsMipsArchVariant(kMips32r6)) {
959 __ cmp_d(OLT, i.OutputDoubleRegister(), i.InputDoubleRegister(1),
960 i.InputDoubleRegister(0));
961 __ sel_d(i.OutputDoubleRegister(), i.InputDoubleRegister(1),
962 i.InputDoubleRegister(0));
963 } else {
964 __ c_d(OLT, i.InputDoubleRegister(0), i.InputDoubleRegister(1));
965 // Left operand is result, passthrough if false.
966 __ movt_d(i.OutputDoubleRegister(), i.InputDoubleRegister(1));
967 }
968 break;
969 }
970 case kMipsFloat64Min: {
971 // (a < b) ? a : b
972 if (IsMipsArchVariant(kMips32r6)) {
973 __ cmp_d(OLT, i.OutputDoubleRegister(), i.InputDoubleRegister(0),
974 i.InputDoubleRegister(1));
975 __ sel_d(i.OutputDoubleRegister(), i.InputDoubleRegister(1),
976 i.InputDoubleRegister(0));
977 } else {
978 __ c_d(OLT, i.InputDoubleRegister(1), i.InputDoubleRegister(0));
979 // Right operand is result, passthrough if false.
980 __ movt_d(i.OutputDoubleRegister(), i.InputDoubleRegister(1));
981 }
982 break;
983 }
984 case kMipsFloat32Max: {
985 // (b < a) ? a : b
986 if (IsMipsArchVariant(kMips32r6)) {
987 __ cmp_s(OLT, i.OutputDoubleRegister(), i.InputDoubleRegister(1),
988 i.InputDoubleRegister(0));
989 __ sel_s(i.OutputDoubleRegister(), i.InputDoubleRegister(1),
990 i.InputDoubleRegister(0));
991 } else {
992 __ c_s(OLT, i.InputDoubleRegister(0), i.InputDoubleRegister(1));
993 // Left operand is result, passthrough if false.
994 __ movt_s(i.OutputDoubleRegister(), i.InputDoubleRegister(1));
995 }
996 break;
997 }
998 case kMipsFloat32Min: {
999 // (a < b) ? a : b
1000 if (IsMipsArchVariant(kMips32r6)) {
1001 __ cmp_s(OLT, i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1002 i.InputDoubleRegister(1));
1003 __ sel_s(i.OutputDoubleRegister(), i.InputDoubleRegister(1),
1004 i.InputDoubleRegister(0));
1005 } else {
1006 __ c_s(OLT, i.InputDoubleRegister(1), i.InputDoubleRegister(0));
1007 // Right operand is result, passthrough if false.
1008 __ movt_s(i.OutputDoubleRegister(), i.InputDoubleRegister(1));
1009 }
Emily Bernier958fae72015-03-24 16:35:39 -04001010 break;
1011 }
1012 case kMipsCvtSD: {
1013 __ cvt_s_d(i.OutputSingleRegister(), i.InputDoubleRegister(0));
1014 break;
1015 }
1016 case kMipsCvtDS: {
1017 __ cvt_d_s(i.OutputDoubleRegister(), i.InputSingleRegister(0));
1018 break;
1019 }
1020 case kMipsCvtDW: {
1021 FPURegister scratch = kScratchDoubleReg;
1022 __ mtc1(i.InputRegister(0), scratch);
1023 __ cvt_d_w(i.OutputDoubleRegister(), scratch);
1024 break;
1025 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001026 case kMipsCvtSW: {
1027 FPURegister scratch = kScratchDoubleReg;
1028 __ mtc1(i.InputRegister(0), scratch);
1029 __ cvt_s_w(i.OutputDoubleRegister(), scratch);
1030 break;
1031 }
Ben Murdoch109988c2016-05-18 11:27:45 +01001032 case kMipsCvtSUw: {
1033 FPURegister scratch = kScratchDoubleReg;
1034 __ Cvt_d_uw(i.OutputDoubleRegister(), i.InputRegister(0), scratch);
1035 __ cvt_s_d(i.OutputDoubleRegister(), i.OutputDoubleRegister());
1036 break;
1037 }
Emily Bernier958fae72015-03-24 16:35:39 -04001038 case kMipsCvtDUw: {
1039 FPURegister scratch = kScratchDoubleReg;
1040 __ Cvt_d_uw(i.OutputDoubleRegister(), i.InputRegister(0), scratch);
1041 break;
1042 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001043 case kMipsFloorWD: {
1044 FPURegister scratch = kScratchDoubleReg;
1045 __ floor_w_d(scratch, i.InputDoubleRegister(0));
1046 __ mfc1(i.OutputRegister(), scratch);
1047 break;
1048 }
1049 case kMipsCeilWD: {
1050 FPURegister scratch = kScratchDoubleReg;
1051 __ ceil_w_d(scratch, i.InputDoubleRegister(0));
1052 __ mfc1(i.OutputRegister(), scratch);
1053 break;
1054 }
1055 case kMipsRoundWD: {
1056 FPURegister scratch = kScratchDoubleReg;
1057 __ round_w_d(scratch, i.InputDoubleRegister(0));
1058 __ mfc1(i.OutputRegister(), scratch);
1059 break;
1060 }
Emily Bernier958fae72015-03-24 16:35:39 -04001061 case kMipsTruncWD: {
1062 FPURegister scratch = kScratchDoubleReg;
1063 // Other arches use round to zero here, so we follow.
1064 __ trunc_w_d(scratch, i.InputDoubleRegister(0));
1065 __ mfc1(i.OutputRegister(), scratch);
1066 break;
1067 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001068 case kMipsFloorWS: {
1069 FPURegister scratch = kScratchDoubleReg;
1070 __ floor_w_s(scratch, i.InputDoubleRegister(0));
1071 __ mfc1(i.OutputRegister(), scratch);
1072 break;
1073 }
1074 case kMipsCeilWS: {
1075 FPURegister scratch = kScratchDoubleReg;
1076 __ ceil_w_s(scratch, i.InputDoubleRegister(0));
1077 __ mfc1(i.OutputRegister(), scratch);
1078 break;
1079 }
1080 case kMipsRoundWS: {
1081 FPURegister scratch = kScratchDoubleReg;
1082 __ round_w_s(scratch, i.InputDoubleRegister(0));
1083 __ mfc1(i.OutputRegister(), scratch);
1084 break;
1085 }
1086 case kMipsTruncWS: {
1087 FPURegister scratch = kScratchDoubleReg;
1088 __ trunc_w_s(scratch, i.InputDoubleRegister(0));
1089 __ mfc1(i.OutputRegister(), scratch);
1090 break;
1091 }
Emily Bernier958fae72015-03-24 16:35:39 -04001092 case kMipsTruncUwD: {
1093 FPURegister scratch = kScratchDoubleReg;
1094 // TODO(plind): Fix wrong param order of Trunc_uw_d() macro-asm function.
1095 __ Trunc_uw_d(i.InputDoubleRegister(0), i.OutputRegister(), scratch);
1096 break;
1097 }
Ben Murdoch109988c2016-05-18 11:27:45 +01001098 case kMipsTruncUwS: {
1099 FPURegister scratch = kScratchDoubleReg;
1100 // TODO(plind): Fix wrong param order of Trunc_uw_s() macro-asm function.
1101 __ Trunc_uw_s(i.InputDoubleRegister(0), i.OutputRegister(), scratch);
1102 break;
1103 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001104 case kMipsFloat64ExtractLowWord32:
1105 __ FmoveLow(i.OutputRegister(), i.InputDoubleRegister(0));
1106 break;
1107 case kMipsFloat64ExtractHighWord32:
1108 __ FmoveHigh(i.OutputRegister(), i.InputDoubleRegister(0));
1109 break;
1110 case kMipsFloat64InsertLowWord32:
1111 __ FmoveLow(i.OutputDoubleRegister(), i.InputRegister(1));
1112 break;
1113 case kMipsFloat64InsertHighWord32:
1114 __ FmoveHigh(i.OutputDoubleRegister(), i.InputRegister(1));
1115 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001116 // ... more basic instructions ...
1117
1118 case kMipsLbu:
1119 __ lbu(i.OutputRegister(), i.MemoryOperand());
1120 break;
1121 case kMipsLb:
1122 __ lb(i.OutputRegister(), i.MemoryOperand());
1123 break;
1124 case kMipsSb:
1125 __ sb(i.InputRegister(2), i.MemoryOperand());
1126 break;
1127 case kMipsLhu:
1128 __ lhu(i.OutputRegister(), i.MemoryOperand());
1129 break;
1130 case kMipsLh:
1131 __ lh(i.OutputRegister(), i.MemoryOperand());
1132 break;
1133 case kMipsSh:
1134 __ sh(i.InputRegister(2), i.MemoryOperand());
1135 break;
1136 case kMipsLw:
1137 __ lw(i.OutputRegister(), i.MemoryOperand());
1138 break;
1139 case kMipsSw:
1140 __ sw(i.InputRegister(2), i.MemoryOperand());
1141 break;
1142 case kMipsLwc1: {
1143 __ lwc1(i.OutputSingleRegister(), i.MemoryOperand());
1144 break;
1145 }
1146 case kMipsSwc1: {
Ben Murdoch014dc512016-03-22 12:00:34 +00001147 size_t index = 0;
Emily Bernier958fae72015-03-24 16:35:39 -04001148 MemOperand operand = i.MemoryOperand(&index);
1149 __ swc1(i.InputSingleRegister(index), operand);
1150 break;
1151 }
1152 case kMipsLdc1:
1153 __ ldc1(i.OutputDoubleRegister(), i.MemoryOperand());
1154 break;
1155 case kMipsSdc1:
1156 __ sdc1(i.InputDoubleRegister(2), i.MemoryOperand());
1157 break;
1158 case kMipsPush:
Ben Murdoch014dc512016-03-22 12:00:34 +00001159 if (instr->InputAt(0)->IsDoubleRegister()) {
1160 __ sdc1(i.InputDoubleRegister(0), MemOperand(sp, -kDoubleSize));
1161 __ Subu(sp, sp, Operand(kDoubleSize));
1162 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize);
1163 } else {
1164 __ Push(i.InputRegister(0));
1165 frame_access_state()->IncreaseSPDelta(1);
1166 }
Emily Bernier958fae72015-03-24 16:35:39 -04001167 break;
1168 case kMipsStackClaim: {
Ben Murdoch014dc512016-03-22 12:00:34 +00001169 __ Subu(sp, sp, Operand(i.InputInt32(0)));
1170 frame_access_state()->IncreaseSPDelta(i.InputInt32(0) / kPointerSize);
Emily Bernier958fae72015-03-24 16:35:39 -04001171 break;
1172 }
1173 case kMipsStoreToStackSlot: {
Ben Murdoch014dc512016-03-22 12:00:34 +00001174 if (instr->InputAt(0)->IsDoubleRegister()) {
1175 __ sdc1(i.InputDoubleRegister(0), MemOperand(sp, i.InputInt32(1)));
1176 } else {
1177 __ sw(i.InputRegister(0), MemOperand(sp, i.InputInt32(1)));
1178 }
Emily Bernier958fae72015-03-24 16:35:39 -04001179 break;
1180 }
1181 case kCheckedLoadInt8:
1182 ASSEMBLE_CHECKED_LOAD_INTEGER(lb);
1183 break;
1184 case kCheckedLoadUint8:
1185 ASSEMBLE_CHECKED_LOAD_INTEGER(lbu);
1186 break;
1187 case kCheckedLoadInt16:
1188 ASSEMBLE_CHECKED_LOAD_INTEGER(lh);
1189 break;
1190 case kCheckedLoadUint16:
1191 ASSEMBLE_CHECKED_LOAD_INTEGER(lhu);
1192 break;
1193 case kCheckedLoadWord32:
1194 ASSEMBLE_CHECKED_LOAD_INTEGER(lw);
1195 break;
1196 case kCheckedLoadFloat32:
1197 ASSEMBLE_CHECKED_LOAD_FLOAT(Single, lwc1);
1198 break;
1199 case kCheckedLoadFloat64:
1200 ASSEMBLE_CHECKED_LOAD_FLOAT(Double, ldc1);
1201 break;
1202 case kCheckedStoreWord8:
1203 ASSEMBLE_CHECKED_STORE_INTEGER(sb);
1204 break;
1205 case kCheckedStoreWord16:
1206 ASSEMBLE_CHECKED_STORE_INTEGER(sh);
1207 break;
1208 case kCheckedStoreWord32:
1209 ASSEMBLE_CHECKED_STORE_INTEGER(sw);
1210 break;
1211 case kCheckedStoreFloat32:
1212 ASSEMBLE_CHECKED_STORE_FLOAT(Single, swc1);
1213 break;
1214 case kCheckedStoreFloat64:
1215 ASSEMBLE_CHECKED_STORE_FLOAT(Double, sdc1);
1216 break;
Ben Murdoch014dc512016-03-22 12:00:34 +00001217 case kCheckedLoadWord64:
1218 case kCheckedStoreWord64:
1219 UNREACHABLE(); // currently unsupported checked int64 load/store.
1220 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001221 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001222} // NOLINT(readability/fn_size)
Emily Bernier958fae72015-03-24 16:35:39 -04001223
1224
1225#define UNSUPPORTED_COND(opcode, condition) \
1226 OFStream out(stdout); \
1227 out << "Unsupported " << #opcode << " condition: \"" << condition << "\""; \
1228 UNIMPLEMENTED();
1229
Ben Murdoch014dc512016-03-22 12:00:34 +00001230static bool convertCondition(FlagsCondition condition, Condition& cc) {
1231 switch (condition) {
1232 case kEqual:
1233 cc = eq;
1234 return true;
1235 case kNotEqual:
1236 cc = ne;
1237 return true;
1238 case kUnsignedLessThan:
1239 cc = lt;
1240 return true;
1241 case kUnsignedGreaterThanOrEqual:
1242 cc = uge;
1243 return true;
1244 case kUnsignedLessThanOrEqual:
1245 cc = le;
1246 return true;
1247 case kUnsignedGreaterThan:
1248 cc = ugt;
1249 return true;
1250 default:
1251 break;
1252 }
1253 return false;
1254}
1255
1256
Emily Bernier958fae72015-03-24 16:35:39 -04001257// Assembles branches after an instruction.
1258void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
1259 MipsOperandConverter i(this, instr);
1260 Label* tlabel = branch->true_label;
1261 Label* flabel = branch->false_label;
1262 Condition cc = kNoCondition;
Emily Bernier958fae72015-03-24 16:35:39 -04001263 // MIPS does not have condition code flags, so compare and branch are
1264 // implemented differently than on the other arch's. The compare operations
1265 // emit mips pseudo-instructions, which are handled here by branch
1266 // instructions that do the actual comparison. Essential that the input
1267 // registers to compare pseudo-op are not modified before this branch op, as
1268 // they are tested here.
Emily Bernier958fae72015-03-24 16:35:39 -04001269
1270 if (instr->arch_opcode() == kMipsTst) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001271 cc = FlagsConditionToConditionTst(branch->condition);
Emily Bernier958fae72015-03-24 16:35:39 -04001272 __ And(at, i.InputRegister(0), i.InputOperand(1));
1273 __ Branch(tlabel, cc, at, Operand(zero_reg));
Ben Murdoch014dc512016-03-22 12:00:34 +00001274 } else if (instr->arch_opcode() == kMipsAddOvf) {
Emily Bernier958fae72015-03-24 16:35:39 -04001275 switch (branch->condition) {
1276 case kOverflow:
Ben Murdoch014dc512016-03-22 12:00:34 +00001277 __ AddBranchOvf(i.OutputRegister(), i.InputRegister(0),
1278 i.InputOperand(1), tlabel, flabel);
Emily Bernier958fae72015-03-24 16:35:39 -04001279 break;
1280 case kNotOverflow:
Ben Murdoch014dc512016-03-22 12:00:34 +00001281 __ AddBranchOvf(i.OutputRegister(), i.InputRegister(0),
1282 i.InputOperand(1), flabel, tlabel);
Emily Bernier958fae72015-03-24 16:35:39 -04001283 break;
1284 default:
1285 UNSUPPORTED_COND(kMipsAddOvf, branch->condition);
1286 break;
1287 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001288 } else if (instr->arch_opcode() == kMipsSubOvf) {
1289 switch (branch->condition) {
1290 case kOverflow:
1291 __ SubBranchOvf(i.OutputRegister(), i.InputRegister(0),
1292 i.InputOperand(1), tlabel, flabel);
1293 break;
1294 case kNotOverflow:
1295 __ SubBranchOvf(i.OutputRegister(), i.InputRegister(0),
1296 i.InputOperand(1), flabel, tlabel);
1297 break;
1298 default:
1299 UNSUPPORTED_COND(kMipsAddOvf, branch->condition);
1300 break;
1301 }
Emily Bernier958fae72015-03-24 16:35:39 -04001302 } else if (instr->arch_opcode() == kMipsCmp) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001303 cc = FlagsConditionToConditionCmp(branch->condition);
Emily Bernier958fae72015-03-24 16:35:39 -04001304 __ Branch(tlabel, cc, i.InputRegister(0), i.InputOperand(1));
Ben Murdoch014dc512016-03-22 12:00:34 +00001305 } else if (instr->arch_opcode() == kMipsCmpS) {
1306 if (!convertCondition(branch->condition, cc)) {
1307 UNSUPPORTED_COND(kMips64CmpS, branch->condition);
Emily Bernier958fae72015-03-24 16:35:39 -04001308 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001309 FPURegister left = i.InputOrZeroSingleRegister(0);
1310 FPURegister right = i.InputOrZeroSingleRegister(1);
1311 if ((left.is(kDoubleRegZero) || right.is(kDoubleRegZero)) &&
1312 !__ IsDoubleZeroRegSet()) {
1313 __ Move(kDoubleRegZero, 0.0);
1314 }
1315 __ BranchF32(tlabel, nullptr, cc, left, right);
1316 } else if (instr->arch_opcode() == kMipsCmpD) {
1317 if (!convertCondition(branch->condition, cc)) {
1318 UNSUPPORTED_COND(kMips64CmpD, branch->condition);
1319 }
1320 FPURegister left = i.InputOrZeroDoubleRegister(0);
1321 FPURegister right = i.InputOrZeroDoubleRegister(1);
1322 if ((left.is(kDoubleRegZero) || right.is(kDoubleRegZero)) &&
1323 !__ IsDoubleZeroRegSet()) {
1324 __ Move(kDoubleRegZero, 0.0);
1325 }
1326 __ BranchF64(tlabel, nullptr, cc, left, right);
Emily Bernier958fae72015-03-24 16:35:39 -04001327 } else {
1328 PrintF("AssembleArchBranch Unimplemented arch_opcode: %d\n",
1329 instr->arch_opcode());
1330 UNIMPLEMENTED();
1331 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001332 if (!branch->fallthru) __ Branch(flabel); // no fallthru to flabel.
Emily Bernier958fae72015-03-24 16:35:39 -04001333}
1334
1335
Ben Murdoch014dc512016-03-22 12:00:34 +00001336void CodeGenerator::AssembleArchJump(RpoNumber target) {
Emily Bernier958fae72015-03-24 16:35:39 -04001337 if (!IsNextInAssemblyOrder(target)) __ Branch(GetLabel(target));
1338}
1339
1340
1341// Assembles boolean materializations after an instruction.
1342void CodeGenerator::AssembleArchBoolean(Instruction* instr,
1343 FlagsCondition condition) {
1344 MipsOperandConverter i(this, instr);
1345 Label done;
1346
1347 // Materialize a full 32-bit 1 or 0 value. The result register is always the
1348 // last output of the instruction.
1349 Label false_value;
Ben Murdoch014dc512016-03-22 12:00:34 +00001350 DCHECK_NE(0u, instr->OutputCount());
Emily Bernier958fae72015-03-24 16:35:39 -04001351 Register result = i.OutputRegister(instr->OutputCount() - 1);
1352 Condition cc = kNoCondition;
Emily Bernier958fae72015-03-24 16:35:39 -04001353 // MIPS does not have condition code flags, so compare and branch are
1354 // implemented differently than on the other arch's. The compare operations
1355 // emit mips psuedo-instructions, which are checked and handled here.
1356
Emily Bernier958fae72015-03-24 16:35:39 -04001357 if (instr->arch_opcode() == kMipsTst) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001358 cc = FlagsConditionToConditionTst(condition);
1359 __ And(kScratchReg, i.InputRegister(0), i.InputOperand(1));
1360 __ Sltu(result, zero_reg, kScratchReg);
1361 if (cc == eq) {
1362 // Sltu produces 0 for equality, invert the result.
1363 __ xori(result, result, 1);
Emily Bernier958fae72015-03-24 16:35:39 -04001364 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001365 return;
Emily Bernier958fae72015-03-24 16:35:39 -04001366 } else if (instr->arch_opcode() == kMipsAddOvf ||
1367 instr->arch_opcode() == kMipsSubOvf) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001368 Label flabel, tlabel;
1369 switch (instr->arch_opcode()) {
1370 case kMipsAddOvf:
1371 __ AddBranchNoOvf(i.OutputRegister(), i.InputRegister(0),
1372 i.InputOperand(1), &flabel);
1373
Emily Bernier958fae72015-03-24 16:35:39 -04001374 break;
Ben Murdoch014dc512016-03-22 12:00:34 +00001375 case kMipsSubOvf:
1376 __ SubBranchNoOvf(i.OutputRegister(), i.InputRegister(0),
1377 i.InputOperand(1), &flabel);
Emily Bernier958fae72015-03-24 16:35:39 -04001378 break;
1379 default:
Ben Murdoch014dc512016-03-22 12:00:34 +00001380 UNREACHABLE();
Emily Bernier958fae72015-03-24 16:35:39 -04001381 break;
1382 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001383 __ li(result, 1);
1384 __ Branch(&tlabel);
1385 __ bind(&flabel);
1386 __ li(result, 0);
1387 __ bind(&tlabel);
Emily Bernier958fae72015-03-24 16:35:39 -04001388 } else if (instr->arch_opcode() == kMipsCmp) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001389 cc = FlagsConditionToConditionCmp(condition);
1390 switch (cc) {
1391 case eq:
1392 case ne: {
1393 Register left = i.InputRegister(0);
1394 Operand right = i.InputOperand(1);
1395 Register select;
1396 if (instr->InputAt(1)->IsImmediate() && right.immediate() == 0) {
1397 // Pass left operand if right is zero.
1398 select = left;
1399 } else {
1400 __ Subu(kScratchReg, left, right);
1401 select = kScratchReg;
1402 }
1403 __ Sltu(result, zero_reg, select);
1404 if (cc == eq) {
1405 // Sltu produces 0 for equality, invert the result.
1406 __ xori(result, result, 1);
1407 }
1408 } break;
1409 case lt:
1410 case ge: {
1411 Register left = i.InputRegister(0);
1412 Operand right = i.InputOperand(1);
1413 __ Slt(result, left, right);
1414 if (cc == ge) {
1415 __ xori(result, result, 1);
1416 }
1417 } break;
1418 case gt:
1419 case le: {
1420 Register left = i.InputRegister(1);
1421 Operand right = i.InputOperand(0);
1422 __ Slt(result, left, right);
1423 if (cc == le) {
1424 __ xori(result, result, 1);
1425 }
1426 } break;
1427 case lo:
1428 case hs: {
1429 Register left = i.InputRegister(0);
1430 Operand right = i.InputOperand(1);
1431 __ Sltu(result, left, right);
1432 if (cc == hs) {
1433 __ xori(result, result, 1);
1434 }
1435 } break;
1436 case hi:
1437 case ls: {
1438 Register left = i.InputRegister(1);
1439 Operand right = i.InputOperand(0);
1440 __ Sltu(result, left, right);
1441 if (cc == ls) {
1442 __ xori(result, result, 1);
1443 }
1444 } break;
Emily Bernier958fae72015-03-24 16:35:39 -04001445 default:
Ben Murdoch014dc512016-03-22 12:00:34 +00001446 UNREACHABLE();
Emily Bernier958fae72015-03-24 16:35:39 -04001447 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001448 return;
1449 } else if (instr->arch_opcode() == kMipsCmpD ||
1450 instr->arch_opcode() == kMipsCmpS) {
1451 FPURegister left = i.InputOrZeroDoubleRegister(0);
1452 FPURegister right = i.InputOrZeroDoubleRegister(1);
1453 if ((left.is(kDoubleRegZero) || right.is(kDoubleRegZero)) &&
1454 !__ IsDoubleZeroRegSet()) {
1455 __ Move(kDoubleRegZero, 0.0);
Emily Bernier958fae72015-03-24 16:35:39 -04001456 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001457 bool predicate;
1458 FPUCondition cc = FlagsConditionToConditionCmpFPU(predicate, condition);
1459 if (!IsMipsArchVariant(kMips32r6)) {
1460 __ li(result, Operand(1));
1461 if (instr->arch_opcode() == kMipsCmpD) {
1462 __ c(cc, D, left, right);
1463 } else {
1464 DCHECK(instr->arch_opcode() == kMipsCmpS);
1465 __ c(cc, S, left, right);
1466 }
1467 if (predicate) {
1468 __ Movf(result, zero_reg);
1469 } else {
1470 __ Movt(result, zero_reg);
1471 }
1472 } else {
1473 if (instr->arch_opcode() == kMipsCmpD) {
1474 __ cmp(cc, L, kDoubleCompareReg, left, right);
1475 } else {
1476 DCHECK(instr->arch_opcode() == kMipsCmpS);
1477 __ cmp(cc, W, kDoubleCompareReg, left, right);
1478 }
1479 __ mfc1(result, kDoubleCompareReg);
1480 __ andi(result, result, 1); // Cmp returns all 1's/0's, use only LSB.
1481 if (!predicate) // Toggle result for not equal.
1482 __ xori(result, result, 1);
1483 }
1484 return;
Emily Bernier958fae72015-03-24 16:35:39 -04001485 } else {
1486 PrintF("AssembleArchBranch Unimplemented arch_opcode is : %d\n",
1487 instr->arch_opcode());
1488 TRACE_UNIMPL();
1489 UNIMPLEMENTED();
1490 }
Emily Bernier958fae72015-03-24 16:35:39 -04001491}
1492
1493
Ben Murdoch014dc512016-03-22 12:00:34 +00001494void CodeGenerator::AssembleArchLookupSwitch(Instruction* instr) {
1495 MipsOperandConverter i(this, instr);
1496 Register input = i.InputRegister(0);
1497 for (size_t index = 2; index < instr->InputCount(); index += 2) {
1498 __ li(at, Operand(i.InputInt32(index + 0)));
1499 __ beq(input, at, GetLabel(i.InputRpo(index + 1)));
1500 }
1501 __ nop(); // Branch delay slot of the last beq.
1502 AssembleArchJump(i.InputRpo(1));
1503}
1504
1505
1506void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
1507 MipsOperandConverter i(this, instr);
1508 Register input = i.InputRegister(0);
1509 size_t const case_count = instr->InputCount() - 2;
Ben Murdoch014dc512016-03-22 12:00:34 +00001510 __ Branch(GetLabel(i.InputRpo(1)), hs, input, Operand(case_count));
Ben Murdoch109988c2016-05-18 11:27:45 +01001511 __ GenerateSwitchTable(input, case_count, [&i, this](size_t index) {
1512 return GetLabel(i.InputRpo(index + 2));
1513 });
Ben Murdoch014dc512016-03-22 12:00:34 +00001514}
1515
1516
1517void CodeGenerator::AssembleDeoptimizerCall(
1518 int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
Emily Bernier958fae72015-03-24 16:35:39 -04001519 Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
Ben Murdoch014dc512016-03-22 12:00:34 +00001520 isolate(), deoptimization_id, bailout_type);
Emily Bernier958fae72015-03-24 16:35:39 -04001521 __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
1522}
1523
1524
1525void CodeGenerator::AssemblePrologue() {
1526 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
Ben Murdoch014dc512016-03-22 12:00:34 +00001527 int stack_shrink_slots = frame()->GetSpillSlotCount();
1528 if (descriptor->IsCFunctionCall()) {
Emily Bernier958fae72015-03-24 16:35:39 -04001529 __ Push(ra, fp);
1530 __ mov(fp, sp);
Emily Bernier958fae72015-03-24 16:35:39 -04001531 } else if (descriptor->IsJSFunctionCall()) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001532 __ Prologue(this->info()->GeneratePreagedPrologue());
1533 } else if (frame()->needs_frame()) {
Emily Bernier958fae72015-03-24 16:35:39 -04001534 __ StubPrologue();
Ben Murdoch014dc512016-03-22 12:00:34 +00001535 } else {
1536 frame()->SetElidedFrameSizeInSlots(0);
Emily Bernier958fae72015-03-24 16:35:39 -04001537 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001538 frame_access_state()->SetFrameAccessToDefault();
1539
1540 if (info()->is_osr()) {
1541 // TurboFan OSR-compiled functions cannot be entered directly.
1542 __ Abort(kShouldNotDirectlyEnterOsrFunction);
1543
1544 // Unoptimized code jumps directly to this entrypoint while the unoptimized
1545 // frame is still on the stack. Optimized code uses OSR values directly from
1546 // the unoptimized frame. Thus, all that needs to be done is to allocate the
1547 // remaining stack slots.
1548 if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --");
1549 osr_pc_offset_ = __ pc_offset();
Ben Murdoch014dc512016-03-22 12:00:34 +00001550 stack_shrink_slots -= OsrHelper(info()).UnoptimizedFrameSlots();
1551 }
1552
1553 const RegList saves_fpu = descriptor->CalleeSavedFPRegisters();
1554 if (saves_fpu != 0) {
1555 stack_shrink_slots += frame()->AlignSavedCalleeRegisterSlots();
1556 }
1557 if (stack_shrink_slots > 0) {
1558 __ Subu(sp, sp, Operand(stack_shrink_slots * kPointerSize));
1559 }
1560
1561 // Save callee-saved FPU registers.
1562 if (saves_fpu != 0) {
1563 __ MultiPushFPU(saves_fpu);
1564 int count = base::bits::CountPopulation32(saves_fpu);
1565 DCHECK(kNumCalleeSavedFPU == count);
1566 frame()->AllocateSavedCalleeRegisterSlots(count *
1567 (kDoubleSize / kPointerSize));
1568 }
1569
1570 const RegList saves = descriptor->CalleeSavedRegisters();
1571 if (saves != 0) {
1572 // Save callee-saved registers.
1573 __ MultiPush(saves);
1574 // kNumCalleeSaved includes the fp register, but the fp register
1575 // is saved separately in TF.
1576 int count = base::bits::CountPopulation32(saves);
1577 DCHECK(kNumCalleeSaved == count + 1);
1578 frame()->AllocateSavedCalleeRegisterSlots(count);
Emily Bernier958fae72015-03-24 16:35:39 -04001579 }
1580}
1581
1582
1583void CodeGenerator::AssembleReturn() {
1584 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
Ben Murdoch014dc512016-03-22 12:00:34 +00001585 int pop_count = static_cast<int>(descriptor->StackParameterCount());
1586
1587 // Restore GP registers.
1588 const RegList saves = descriptor->CalleeSavedRegisters();
1589 if (saves != 0) {
1590 __ MultiPop(saves);
1591 }
1592
1593 // Restore FPU registers.
1594 const RegList saves_fpu = descriptor->CalleeSavedFPRegisters();
1595 if (saves_fpu != 0) {
1596 __ MultiPopFPU(saves_fpu);
1597 }
1598
1599 if (descriptor->IsCFunctionCall()) {
1600 __ mov(sp, fp);
1601 __ Pop(ra, fp);
1602 } else if (frame()->needs_frame()) {
1603 // Canonicalize JSFunction return sites for now.
1604 if (return_label_.is_bound()) {
1605 __ Branch(&return_label_);
1606 return;
1607 } else {
1608 __ bind(&return_label_);
1609 __ mov(sp, fp);
1610 __ Pop(ra, fp);
Emily Bernier958fae72015-03-24 16:35:39 -04001611 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001612 }
1613 if (pop_count != 0) {
Emily Bernier958fae72015-03-24 16:35:39 -04001614 __ DropAndRet(pop_count);
Ben Murdoch014dc512016-03-22 12:00:34 +00001615 } else {
1616 __ Ret();
Emily Bernier958fae72015-03-24 16:35:39 -04001617 }
1618}
1619
1620
1621void CodeGenerator::AssembleMove(InstructionOperand* source,
1622 InstructionOperand* destination) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001623 MipsOperandConverter g(this, nullptr);
Emily Bernier958fae72015-03-24 16:35:39 -04001624 // Dispatch on the source and destination operand kinds. Not all
1625 // combinations are possible.
1626 if (source->IsRegister()) {
1627 DCHECK(destination->IsRegister() || destination->IsStackSlot());
1628 Register src = g.ToRegister(source);
1629 if (destination->IsRegister()) {
1630 __ mov(g.ToRegister(destination), src);
1631 } else {
1632 __ sw(src, g.ToMemOperand(destination));
1633 }
1634 } else if (source->IsStackSlot()) {
1635 DCHECK(destination->IsRegister() || destination->IsStackSlot());
1636 MemOperand src = g.ToMemOperand(source);
1637 if (destination->IsRegister()) {
1638 __ lw(g.ToRegister(destination), src);
1639 } else {
1640 Register temp = kScratchReg;
1641 __ lw(temp, src);
1642 __ sw(temp, g.ToMemOperand(destination));
1643 }
1644 } else if (source->IsConstant()) {
1645 Constant src = g.ToConstant(source);
1646 if (destination->IsRegister() || destination->IsStackSlot()) {
1647 Register dst =
1648 destination->IsRegister() ? g.ToRegister(destination) : kScratchReg;
1649 switch (src.type()) {
1650 case Constant::kInt32:
1651 __ li(dst, Operand(src.ToInt32()));
1652 break;
1653 case Constant::kFloat32:
1654 __ li(dst, isolate()->factory()->NewNumber(src.ToFloat32(), TENURED));
1655 break;
1656 case Constant::kInt64:
1657 UNREACHABLE();
1658 break;
1659 case Constant::kFloat64:
1660 __ li(dst, isolate()->factory()->NewNumber(src.ToFloat64(), TENURED));
1661 break;
1662 case Constant::kExternalReference:
1663 __ li(dst, Operand(src.ToExternalReference()));
1664 break;
Ben Murdoch014dc512016-03-22 12:00:34 +00001665 case Constant::kHeapObject: {
1666 Handle<HeapObject> src_object = src.ToHeapObject();
1667 Heap::RootListIndex index;
1668 int offset;
1669 if (IsMaterializableFromFrame(src_object, &offset)) {
1670 __ lw(dst, MemOperand(fp, offset));
1671 } else if (IsMaterializableFromRoot(src_object, &index)) {
1672 __ LoadRoot(dst, index);
1673 } else {
1674 __ li(dst, src_object);
1675 }
Emily Bernier958fae72015-03-24 16:35:39 -04001676 break;
Ben Murdoch014dc512016-03-22 12:00:34 +00001677 }
Emily Bernier958fae72015-03-24 16:35:39 -04001678 case Constant::kRpoNumber:
1679 UNREACHABLE(); // TODO(titzer): loading RPO numbers on mips.
1680 break;
1681 }
1682 if (destination->IsStackSlot()) __ sw(dst, g.ToMemOperand(destination));
1683 } else if (src.type() == Constant::kFloat32) {
1684 if (destination->IsDoubleStackSlot()) {
1685 MemOperand dst = g.ToMemOperand(destination);
1686 __ li(at, Operand(bit_cast<int32_t>(src.ToFloat32())));
1687 __ sw(at, dst);
1688 } else {
1689 FloatRegister dst = g.ToSingleRegister(destination);
1690 __ Move(dst, src.ToFloat32());
1691 }
1692 } else {
1693 DCHECK_EQ(Constant::kFloat64, src.type());
1694 DoubleRegister dst = destination->IsDoubleRegister()
1695 ? g.ToDoubleRegister(destination)
1696 : kScratchDoubleReg;
1697 __ Move(dst, src.ToFloat64());
1698 if (destination->IsDoubleStackSlot()) {
1699 __ sdc1(dst, g.ToMemOperand(destination));
1700 }
1701 }
1702 } else if (source->IsDoubleRegister()) {
1703 FPURegister src = g.ToDoubleRegister(source);
1704 if (destination->IsDoubleRegister()) {
1705 FPURegister dst = g.ToDoubleRegister(destination);
1706 __ Move(dst, src);
1707 } else {
1708 DCHECK(destination->IsDoubleStackSlot());
1709 __ sdc1(src, g.ToMemOperand(destination));
1710 }
1711 } else if (source->IsDoubleStackSlot()) {
1712 DCHECK(destination->IsDoubleRegister() || destination->IsDoubleStackSlot());
1713 MemOperand src = g.ToMemOperand(source);
1714 if (destination->IsDoubleRegister()) {
1715 __ ldc1(g.ToDoubleRegister(destination), src);
1716 } else {
1717 FPURegister temp = kScratchDoubleReg;
1718 __ ldc1(temp, src);
1719 __ sdc1(temp, g.ToMemOperand(destination));
1720 }
1721 } else {
1722 UNREACHABLE();
1723 }
1724}
1725
1726
1727void CodeGenerator::AssembleSwap(InstructionOperand* source,
1728 InstructionOperand* destination) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001729 MipsOperandConverter g(this, nullptr);
Emily Bernier958fae72015-03-24 16:35:39 -04001730 // Dispatch on the source and destination operand kinds. Not all
1731 // combinations are possible.
1732 if (source->IsRegister()) {
1733 // Register-register.
1734 Register temp = kScratchReg;
1735 Register src = g.ToRegister(source);
1736 if (destination->IsRegister()) {
1737 Register dst = g.ToRegister(destination);
1738 __ Move(temp, src);
1739 __ Move(src, dst);
1740 __ Move(dst, temp);
1741 } else {
1742 DCHECK(destination->IsStackSlot());
1743 MemOperand dst = g.ToMemOperand(destination);
1744 __ mov(temp, src);
1745 __ lw(src, dst);
1746 __ sw(temp, dst);
1747 }
1748 } else if (source->IsStackSlot()) {
1749 DCHECK(destination->IsStackSlot());
1750 Register temp_0 = kScratchReg;
1751 Register temp_1 = kCompareReg;
1752 MemOperand src = g.ToMemOperand(source);
1753 MemOperand dst = g.ToMemOperand(destination);
1754 __ lw(temp_0, src);
1755 __ lw(temp_1, dst);
1756 __ sw(temp_0, dst);
1757 __ sw(temp_1, src);
1758 } else if (source->IsDoubleRegister()) {
1759 FPURegister temp = kScratchDoubleReg;
1760 FPURegister src = g.ToDoubleRegister(source);
1761 if (destination->IsDoubleRegister()) {
1762 FPURegister dst = g.ToDoubleRegister(destination);
1763 __ Move(temp, src);
1764 __ Move(src, dst);
1765 __ Move(dst, temp);
1766 } else {
1767 DCHECK(destination->IsDoubleStackSlot());
1768 MemOperand dst = g.ToMemOperand(destination);
1769 __ Move(temp, src);
1770 __ ldc1(src, dst);
1771 __ sdc1(temp, dst);
1772 }
1773 } else if (source->IsDoubleStackSlot()) {
1774 DCHECK(destination->IsDoubleStackSlot());
1775 Register temp_0 = kScratchReg;
1776 FPURegister temp_1 = kScratchDoubleReg;
1777 MemOperand src0 = g.ToMemOperand(source);
Ben Murdoch014dc512016-03-22 12:00:34 +00001778 MemOperand src1(src0.rm(), src0.offset() + kIntSize);
Emily Bernier958fae72015-03-24 16:35:39 -04001779 MemOperand dst0 = g.ToMemOperand(destination);
Ben Murdoch014dc512016-03-22 12:00:34 +00001780 MemOperand dst1(dst0.rm(), dst0.offset() + kIntSize);
Emily Bernier958fae72015-03-24 16:35:39 -04001781 __ ldc1(temp_1, dst0); // Save destination in temp_1.
1782 __ lw(temp_0, src0); // Then use temp_0 to copy source to destination.
1783 __ sw(temp_0, dst0);
1784 __ lw(temp_0, src1);
1785 __ sw(temp_0, dst1);
1786 __ sdc1(temp_1, src0);
1787 } else {
1788 // No other combinations are possible.
1789 UNREACHABLE();
1790 }
1791}
1792
1793
Ben Murdoch014dc512016-03-22 12:00:34 +00001794void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) {
1795 // On 32-bit MIPS we emit the jump tables inline.
1796 UNREACHABLE();
1797}
1798
1799
Emily Bernier958fae72015-03-24 16:35:39 -04001800void CodeGenerator::AddNopForSmiCodeInlining() {
1801 // Unused on 32-bit ARM. Still exists on 64-bit arm.
1802 // TODO(plind): Unclear when this is called now. Understand, fix if needed.
1803 __ nop(); // Maybe PROPERTY_ACCESS_INLINED?
1804}
1805
1806
1807void CodeGenerator::EnsureSpaceForLazyDeopt() {
Ben Murdoch014dc512016-03-22 12:00:34 +00001808 if (!info()->ShouldEnsureSpaceForLazyDeopt()) {
1809 return;
1810 }
1811
Emily Bernier958fae72015-03-24 16:35:39 -04001812 int space_needed = Deoptimizer::patch_size();
Ben Murdoch014dc512016-03-22 12:00:34 +00001813 // Ensure that we have enough space after the previous lazy-bailout
1814 // instruction for patching the code here.
1815 int current_pc = masm()->pc_offset();
1816 if (current_pc < last_lazy_deopt_pc_ + space_needed) {
1817 // Block tramoline pool emission for duration of padding.
1818 v8::internal::Assembler::BlockTrampolinePoolScope block_trampoline_pool(
1819 masm());
1820 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
1821 DCHECK_EQ(0, padding_size % v8::internal::Assembler::kInstrSize);
1822 while (padding_size > 0) {
1823 __ nop();
1824 padding_size -= v8::internal::Assembler::kInstrSize;
Emily Bernier958fae72015-03-24 16:35:39 -04001825 }
1826 }
Emily Bernier958fae72015-03-24 16:35:39 -04001827}
1828
1829#undef __
1830
1831} // namespace compiler
1832} // namespace internal
1833} // namespace v8