blob: 5e30e34123448d30faef82283dfc9e841b136549 [file] [log] [blame]
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00005#include "src/ast/scopes.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -04006#include "src/compiler/code-generator.h"
7#include "src/compiler/code-generator-impl.h"
8#include "src/compiler/gap-resolver.h"
9#include "src/compiler/node-matchers.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000010#include "src/compiler/osr.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040011#include "src/mips/macro-assembler-mips.h"
Emily Bernierd0a1eb72015-03-24 16:35:39 -040012
13namespace v8 {
14namespace internal {
15namespace compiler {
16
17#define __ masm()->
18
19
20// TODO(plind): Possibly avoid using these lithium names.
21#define kScratchReg kLithiumScratchReg
22#define kCompareReg kLithiumScratchReg2
23#define kScratchReg2 kLithiumScratchReg2
24#define kScratchDoubleReg kLithiumScratchDouble
25
26
27// TODO(plind): consider renaming these macros.
28#define TRACE_MSG(msg) \
29 PrintF("code_gen: \'%s\' in function %s at line %d\n", msg, __FUNCTION__, \
30 __LINE__)
31
32#define TRACE_UNIMPL() \
33 PrintF("UNIMPLEMENTED code_generator_mips: %s at line %d\n", __FUNCTION__, \
34 __LINE__)
35
36
37// Adds Mips-specific methods to convert InstructionOperands.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000038class MipsOperandConverter final : public InstructionOperandConverter {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040039 public:
40 MipsOperandConverter(CodeGenerator* gen, Instruction* instr)
41 : InstructionOperandConverter(gen, instr) {}
42
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000043 FloatRegister OutputSingleRegister(size_t index = 0) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040044 return ToSingleRegister(instr_->OutputAt(index));
45 }
46
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000047 FloatRegister InputSingleRegister(size_t index) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040048 return ToSingleRegister(instr_->InputAt(index));
49 }
50
51 FloatRegister ToSingleRegister(InstructionOperand* op) {
52 // Single (Float) and Double register namespace is same on MIPS,
53 // both are typedefs of FPURegister.
54 return ToDoubleRegister(op);
55 }
56
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000057 DoubleRegister InputOrZeroDoubleRegister(size_t index) {
58 if (instr_->InputAt(index)->IsImmediate()) return kDoubleRegZero;
59
60 return InputDoubleRegister(index);
61 }
62
63 DoubleRegister InputOrZeroSingleRegister(size_t index) {
64 if (instr_->InputAt(index)->IsImmediate()) return kDoubleRegZero;
65
66 return InputSingleRegister(index);
67 }
68
69 Operand InputImmediate(size_t index) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040070 Constant constant = ToConstant(instr_->InputAt(index));
71 switch (constant.type()) {
72 case Constant::kInt32:
73 return Operand(constant.ToInt32());
74 case Constant::kFloat32:
75 return Operand(
76 isolate()->factory()->NewNumber(constant.ToFloat32(), TENURED));
77 case Constant::kFloat64:
78 return Operand(
79 isolate()->factory()->NewNumber(constant.ToFloat64(), TENURED));
80 case Constant::kInt64:
81 case Constant::kExternalReference:
82 case Constant::kHeapObject:
83 // TODO(plind): Maybe we should handle ExtRef & HeapObj here?
84 // maybe not done on arm due to const pool ??
85 break;
86 case Constant::kRpoNumber:
87 UNREACHABLE(); // TODO(titzer): RPO immediates on mips?
88 break;
89 }
90 UNREACHABLE();
91 return Operand(zero_reg);
92 }
93
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000094 Operand InputOperand(size_t index) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040095 InstructionOperand* op = instr_->InputAt(index);
96 if (op->IsRegister()) {
97 return Operand(ToRegister(op));
98 }
99 return InputImmediate(index);
100 }
101
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000102 MemOperand MemoryOperand(size_t* first_index) {
103 const size_t index = *first_index;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400104 switch (AddressingModeField::decode(instr_->opcode())) {
105 case kMode_None:
106 break;
107 case kMode_MRI:
108 *first_index += 2;
109 return MemOperand(InputRegister(index + 0), InputInt32(index + 1));
110 case kMode_MRR:
111 // TODO(plind): r6 address mode, to be implemented ...
112 UNREACHABLE();
113 }
114 UNREACHABLE();
115 return MemOperand(no_reg);
116 }
117
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000118 MemOperand MemoryOperand(size_t index = 0) { return MemoryOperand(&index); }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400119
120 MemOperand ToMemOperand(InstructionOperand* op) const {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000121 DCHECK_NOT_NULL(op);
Ben Murdochc5610432016-08-08 18:44:38 +0100122 DCHECK(op->IsStackSlot() || op->IsFPStackSlot());
Ben Murdochda12d292016-06-02 14:46:10 +0100123 return SlotToMemOperand(AllocatedOperand::cast(op)->index());
124 }
125
126 MemOperand SlotToMemOperand(int slot) const {
127 FrameOffset offset = frame_access_state()->GetFrameOffset(slot);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400128 return MemOperand(offset.from_stack_pointer() ? sp : fp, offset.offset());
129 }
130};
131
132
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000133static inline bool HasRegisterInput(Instruction* instr, size_t index) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400134 return instr->InputAt(index)->IsRegister();
135}
136
137
138namespace {
139
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000140class OutOfLineLoadSingle final : public OutOfLineCode {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400141 public:
142 OutOfLineLoadSingle(CodeGenerator* gen, FloatRegister result)
143 : OutOfLineCode(gen), result_(result) {}
144
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000145 void Generate() final {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400146 __ Move(result_, std::numeric_limits<float>::quiet_NaN());
147 }
148
149 private:
150 FloatRegister const result_;
151};
152
153
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000154class OutOfLineLoadDouble final : public OutOfLineCode {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400155 public:
156 OutOfLineLoadDouble(CodeGenerator* gen, DoubleRegister result)
157 : OutOfLineCode(gen), result_(result) {}
158
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000159 void Generate() final {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400160 __ Move(result_, std::numeric_limits<double>::quiet_NaN());
161 }
162
163 private:
164 DoubleRegister const result_;
165};
166
167
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000168class OutOfLineLoadInteger final : public OutOfLineCode {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400169 public:
170 OutOfLineLoadInteger(CodeGenerator* gen, Register result)
171 : OutOfLineCode(gen), result_(result) {}
172
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000173 void Generate() final { __ mov(result_, zero_reg); }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400174
175 private:
176 Register const result_;
177};
178
179
180class OutOfLineRound : public OutOfLineCode {
181 public:
182 OutOfLineRound(CodeGenerator* gen, DoubleRegister result)
183 : OutOfLineCode(gen), result_(result) {}
184
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000185 void Generate() final {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400186 // Handle rounding to zero case where sign has to be preserved.
187 // High bits of double input already in kScratchReg.
188 __ srl(at, kScratchReg, 31);
189 __ sll(at, at, 31);
190 __ Mthc1(at, result_);
191 }
192
193 private:
194 DoubleRegister const result_;
195};
196
197
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000198class OutOfLineRound32 : public OutOfLineCode {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400199 public:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000200 OutOfLineRound32(CodeGenerator* gen, DoubleRegister result)
201 : OutOfLineCode(gen), result_(result) {}
202
203 void Generate() final {
204 // Handle rounding to zero case where sign has to be preserved.
205 // High bits of float input already in kScratchReg.
206 __ srl(at, kScratchReg, 31);
207 __ sll(at, at, 31);
208 __ mtc1(at, result_);
209 }
210
211 private:
212 DoubleRegister const result_;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400213};
214
215
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000216class OutOfLineRecordWrite final : public OutOfLineCode {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400217 public:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000218 OutOfLineRecordWrite(CodeGenerator* gen, Register object, Register index,
219 Register value, Register scratch0, Register scratch1,
220 RecordWriteMode mode)
221 : OutOfLineCode(gen),
222 object_(object),
223 index_(index),
224 value_(value),
225 scratch0_(scratch0),
226 scratch1_(scratch1),
Ben Murdochda12d292016-06-02 14:46:10 +0100227 mode_(mode),
228 must_save_lr_(!gen->frame_access_state()->has_frame()) {}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000229
230 void Generate() final {
231 if (mode_ > RecordWriteMode::kValueIsPointer) {
232 __ JumpIfSmi(value_, exit());
233 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100234 __ CheckPageFlag(value_, scratch0_,
235 MemoryChunk::kPointersToHereAreInterestingMask, eq,
236 exit());
237 RememberedSetAction const remembered_set_action =
238 mode_ > RecordWriteMode::kValueIsMap ? EMIT_REMEMBERED_SET
239 : OMIT_REMEMBERED_SET;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000240 SaveFPRegsMode const save_fp_mode =
241 frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs;
Ben Murdochda12d292016-06-02 14:46:10 +0100242 if (must_save_lr_) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100243 // We need to save and restore ra if the frame was elided.
244 __ Push(ra);
245 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000246 RecordWriteStub stub(isolate(), object_, scratch0_, scratch1_,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100247 remembered_set_action, save_fp_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000248 __ Addu(scratch1_, object_, index_);
249 __ CallStub(&stub);
Ben Murdochda12d292016-06-02 14:46:10 +0100250 if (must_save_lr_) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100251 __ Pop(ra);
252 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000253 }
254
255 private:
256 Register const object_;
257 Register const index_;
258 Register const value_;
259 Register const scratch0_;
260 Register const scratch1_;
261 RecordWriteMode const mode_;
Ben Murdochda12d292016-06-02 14:46:10 +0100262 bool must_save_lr_;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400263};
264
265
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000266Condition FlagsConditionToConditionCmp(FlagsCondition condition) {
267 switch (condition) {
268 case kEqual:
269 return eq;
270 case kNotEqual:
271 return ne;
272 case kSignedLessThan:
273 return lt;
274 case kSignedGreaterThanOrEqual:
275 return ge;
276 case kSignedLessThanOrEqual:
277 return le;
278 case kSignedGreaterThan:
279 return gt;
280 case kUnsignedLessThan:
281 return lo;
282 case kUnsignedGreaterThanOrEqual:
283 return hs;
284 case kUnsignedLessThanOrEqual:
285 return ls;
286 case kUnsignedGreaterThan:
287 return hi;
288 case kUnorderedEqual:
289 case kUnorderedNotEqual:
290 break;
291 default:
292 break;
293 }
294 UNREACHABLE();
295 return kNoCondition;
296}
297
298
299Condition FlagsConditionToConditionTst(FlagsCondition condition) {
300 switch (condition) {
301 case kNotEqual:
302 return ne;
303 case kEqual:
304 return eq;
305 default:
306 break;
307 }
308 UNREACHABLE();
309 return kNoCondition;
310}
311
312
313FPUCondition FlagsConditionToConditionCmpFPU(bool& predicate,
314 FlagsCondition condition) {
315 switch (condition) {
316 case kEqual:
317 predicate = true;
318 return EQ;
319 case kNotEqual:
320 predicate = false;
321 return EQ;
322 case kUnsignedLessThan:
323 predicate = true;
324 return OLT;
325 case kUnsignedGreaterThanOrEqual:
326 predicate = false;
327 return ULT;
328 case kUnsignedLessThanOrEqual:
329 predicate = true;
330 return OLE;
331 case kUnsignedGreaterThan:
332 predicate = false;
333 return ULE;
334 case kUnorderedEqual:
335 case kUnorderedNotEqual:
336 predicate = true;
337 break;
338 default:
339 predicate = true;
340 break;
341 }
342 UNREACHABLE();
343 return kNoFPUCondition;
344}
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400345
346} // namespace
347
348
349#define ASSEMBLE_CHECKED_LOAD_FLOAT(width, asm_instr) \
350 do { \
351 auto result = i.Output##width##Register(); \
352 auto ool = new (zone()) OutOfLineLoad##width(this, result); \
353 if (instr->InputAt(0)->IsRegister()) { \
354 auto offset = i.InputRegister(0); \
355 __ Branch(USE_DELAY_SLOT, ool->entry(), hs, offset, i.InputOperand(1)); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000356 __ addu(kScratchReg, i.InputRegister(2), offset); \
357 __ asm_instr(result, MemOperand(kScratchReg, 0)); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400358 } else { \
359 auto offset = i.InputOperand(0).immediate(); \
360 __ Branch(ool->entry(), ls, i.InputRegister(1), Operand(offset)); \
361 __ asm_instr(result, MemOperand(i.InputRegister(2), offset)); \
362 } \
363 __ bind(ool->exit()); \
364 } while (0)
365
366
367#define ASSEMBLE_CHECKED_LOAD_INTEGER(asm_instr) \
368 do { \
369 auto result = i.OutputRegister(); \
370 auto ool = new (zone()) OutOfLineLoadInteger(this, result); \
371 if (instr->InputAt(0)->IsRegister()) { \
372 auto offset = i.InputRegister(0); \
373 __ Branch(USE_DELAY_SLOT, ool->entry(), hs, offset, i.InputOperand(1)); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000374 __ addu(kScratchReg, i.InputRegister(2), offset); \
375 __ asm_instr(result, MemOperand(kScratchReg, 0)); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400376 } else { \
377 auto offset = i.InputOperand(0).immediate(); \
378 __ Branch(ool->entry(), ls, i.InputRegister(1), Operand(offset)); \
379 __ asm_instr(result, MemOperand(i.InputRegister(2), offset)); \
380 } \
381 __ bind(ool->exit()); \
382 } while (0)
383
384
385#define ASSEMBLE_CHECKED_STORE_FLOAT(width, asm_instr) \
386 do { \
387 Label done; \
388 if (instr->InputAt(0)->IsRegister()) { \
389 auto offset = i.InputRegister(0); \
390 auto value = i.Input##width##Register(2); \
391 __ Branch(USE_DELAY_SLOT, &done, hs, offset, i.InputOperand(1)); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000392 __ addu(kScratchReg, i.InputRegister(3), offset); \
393 __ asm_instr(value, MemOperand(kScratchReg, 0)); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400394 } else { \
395 auto offset = i.InputOperand(0).immediate(); \
396 auto value = i.Input##width##Register(2); \
397 __ Branch(&done, ls, i.InputRegister(1), Operand(offset)); \
398 __ asm_instr(value, MemOperand(i.InputRegister(3), offset)); \
399 } \
400 __ bind(&done); \
401 } while (0)
402
403
404#define ASSEMBLE_CHECKED_STORE_INTEGER(asm_instr) \
405 do { \
406 Label done; \
407 if (instr->InputAt(0)->IsRegister()) { \
408 auto offset = i.InputRegister(0); \
409 auto value = i.InputRegister(2); \
410 __ Branch(USE_DELAY_SLOT, &done, hs, offset, i.InputOperand(1)); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000411 __ addu(kScratchReg, i.InputRegister(3), offset); \
412 __ asm_instr(value, MemOperand(kScratchReg, 0)); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400413 } else { \
414 auto offset = i.InputOperand(0).immediate(); \
415 auto value = i.InputRegister(2); \
416 __ Branch(&done, ls, i.InputRegister(1), Operand(offset)); \
417 __ asm_instr(value, MemOperand(i.InputRegister(3), offset)); \
418 } \
419 __ bind(&done); \
420 } while (0)
421
422
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000423#define ASSEMBLE_ROUND_DOUBLE_TO_DOUBLE(mode) \
424 if (IsMipsArchVariant(kMips32r6)) { \
425 __ cfc1(kScratchReg, FCSR); \
426 __ li(at, Operand(mode_##mode)); \
427 __ ctc1(at, FCSR); \
428 __ rint_d(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); \
429 __ ctc1(kScratchReg, FCSR); \
430 } else { \
431 auto ool = new (zone()) OutOfLineRound(this, i.OutputDoubleRegister()); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400432 Label done; \
433 __ Mfhc1(kScratchReg, i.InputDoubleRegister(0)); \
434 __ Ext(at, kScratchReg, HeapNumber::kExponentShift, \
435 HeapNumber::kExponentBits); \
436 __ Branch(USE_DELAY_SLOT, &done, hs, at, \
437 Operand(HeapNumber::kExponentBias + HeapNumber::kMantissaBits)); \
438 __ mov_d(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000439 __ mode##_l_d(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400440 __ Move(at, kScratchReg2, i.OutputDoubleRegister()); \
441 __ or_(at, at, kScratchReg2); \
442 __ Branch(USE_DELAY_SLOT, ool->entry(), eq, at, Operand(zero_reg)); \
443 __ cvt_d_l(i.OutputDoubleRegister(), i.OutputDoubleRegister()); \
444 __ bind(ool->exit()); \
445 __ bind(&done); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000446 }
447
448
449#define ASSEMBLE_ROUND_FLOAT_TO_FLOAT(mode) \
450 if (IsMipsArchVariant(kMips32r6)) { \
451 __ cfc1(kScratchReg, FCSR); \
452 __ li(at, Operand(mode_##mode)); \
453 __ ctc1(at, FCSR); \
454 __ rint_s(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); \
455 __ ctc1(kScratchReg, FCSR); \
456 } else { \
457 int32_t kFloat32ExponentBias = 127; \
458 int32_t kFloat32MantissaBits = 23; \
459 int32_t kFloat32ExponentBits = 8; \
460 auto ool = new (zone()) OutOfLineRound32(this, i.OutputDoubleRegister()); \
461 Label done; \
462 __ mfc1(kScratchReg, i.InputDoubleRegister(0)); \
463 __ Ext(at, kScratchReg, kFloat32MantissaBits, kFloat32ExponentBits); \
464 __ Branch(USE_DELAY_SLOT, &done, hs, at, \
465 Operand(kFloat32ExponentBias + kFloat32MantissaBits)); \
466 __ mov_s(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); \
467 __ mode##_w_s(i.OutputDoubleRegister(), i.InputDoubleRegister(0)); \
468 __ mfc1(at, i.OutputDoubleRegister()); \
469 __ Branch(USE_DELAY_SLOT, ool->entry(), eq, at, Operand(zero_reg)); \
470 __ cvt_s_w(i.OutputDoubleRegister(), i.OutputDoubleRegister()); \
471 __ bind(ool->exit()); \
472 __ bind(&done); \
473 }
474
Ben Murdochc5610432016-08-08 18:44:38 +0100475#define ASSEMBLE_ATOMIC_LOAD_INTEGER(asm_instr) \
476 do { \
477 __ asm_instr(i.OutputRegister(), i.MemoryOperand()); \
478 __ sync(); \
479 } while (0)
480
481#define ASSEMBLE_ATOMIC_STORE_INTEGER(asm_instr) \
482 do { \
483 __ sync(); \
484 __ asm_instr(i.InputRegister(2), i.MemoryOperand()); \
485 __ sync(); \
486 } while (0)
487
Ben Murdoch61f157c2016-09-16 13:49:30 +0100488#define ASSEMBLE_IEEE754_BINOP(name) \
489 do { \
490 FrameScope scope(masm(), StackFrame::MANUAL); \
491 __ PrepareCallCFunction(0, 2, kScratchReg); \
492 __ MovToFloatParameters(i.InputDoubleRegister(0), \
493 i.InputDoubleRegister(1)); \
494 __ CallCFunction(ExternalReference::ieee754_##name##_function(isolate()), \
495 0, 2); \
496 /* Move the result in the double result register. */ \
497 __ MovFromFloatResult(i.OutputDoubleRegister()); \
498 } while (0)
499
500#define ASSEMBLE_IEEE754_UNOP(name) \
501 do { \
502 FrameScope scope(masm(), StackFrame::MANUAL); \
503 __ PrepareCallCFunction(0, 1, kScratchReg); \
504 __ MovToFloatParameter(i.InputDoubleRegister(0)); \
505 __ CallCFunction(ExternalReference::ieee754_##name##_function(isolate()), \
506 0, 1); \
507 /* Move the result in the double result register. */ \
508 __ MovFromFloatResult(i.OutputDoubleRegister()); \
509 } while (0)
510
Ben Murdochda12d292016-06-02 14:46:10 +0100511void CodeGenerator::AssembleDeconstructFrame() {
512 __ mov(sp, fp);
513 __ Pop(ra, fp);
514}
515
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000516void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) {
517 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
518 if (sp_slot_delta > 0) {
519 __ addiu(sp, sp, sp_slot_delta * kPointerSize);
520 }
521 frame_access_state()->SetFrameAccessToDefault();
522}
523
524
525void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) {
526 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
527 if (sp_slot_delta < 0) {
528 __ Subu(sp, sp, Operand(-sp_slot_delta * kPointerSize));
529 frame_access_state()->IncreaseSPDelta(-sp_slot_delta);
530 }
Ben Murdochda12d292016-06-02 14:46:10 +0100531 if (frame_access_state()->has_frame()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000532 __ lw(ra, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
533 __ lw(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
534 }
535 frame_access_state()->SetFrameAccessToSP();
536}
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400537
Ben Murdochda12d292016-06-02 14:46:10 +0100538void CodeGenerator::AssemblePopArgumentsAdaptorFrame(Register args_reg,
539 Register scratch1,
540 Register scratch2,
541 Register scratch3) {
542 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
543 Label done;
544
545 // Check if current frame is an arguments adaptor frame.
546 __ lw(scratch1, MemOperand(fp, StandardFrameConstants::kContextOffset));
547 __ Branch(&done, ne, scratch1,
548 Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
549
550 // Load arguments count from current arguments adaptor frame (note, it
551 // does not include receiver).
552 Register caller_args_count_reg = scratch1;
553 __ lw(caller_args_count_reg,
554 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
555 __ SmiUntag(caller_args_count_reg);
556
557 ParameterCount callee_args_count(args_reg);
558 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
559 scratch3);
560 __ bind(&done);
561}
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400562
563// Assembles an instruction after register allocation, producing machine code.
Ben Murdochc5610432016-08-08 18:44:38 +0100564CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
565 Instruction* instr) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400566 MipsOperandConverter i(this, instr);
567 InstructionCode opcode = instr->opcode();
Ben Murdochda12d292016-06-02 14:46:10 +0100568 ArchOpcode arch_opcode = ArchOpcodeField::decode(opcode);
569 switch (arch_opcode) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400570 case kArchCallCodeObject: {
571 EnsureSpaceForLazyDeopt();
572 if (instr->InputAt(0)->IsImmediate()) {
573 __ Call(Handle<Code>::cast(i.InputHeapObject(0)),
574 RelocInfo::CODE_TARGET);
575 } else {
576 __ addiu(at, i.InputRegister(0), Code::kHeaderSize - kHeapObjectTag);
577 __ Call(at);
578 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000579 RecordCallPosition(instr);
580 frame_access_state()->ClearSPDelta();
581 break;
582 }
Ben Murdochda12d292016-06-02 14:46:10 +0100583 case kArchTailCallCodeObjectFromJSFunction:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000584 case kArchTailCallCodeObject: {
585 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
586 AssembleDeconstructActivationRecord(stack_param_delta);
Ben Murdochda12d292016-06-02 14:46:10 +0100587 if (arch_opcode == kArchTailCallCodeObjectFromJSFunction) {
588 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
589 i.TempRegister(0), i.TempRegister(1),
590 i.TempRegister(2));
591 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000592 if (instr->InputAt(0)->IsImmediate()) {
593 __ Jump(Handle<Code>::cast(i.InputHeapObject(0)),
594 RelocInfo::CODE_TARGET);
595 } else {
596 __ addiu(at, i.InputRegister(0), Code::kHeaderSize - kHeapObjectTag);
597 __ Jump(at);
598 }
599 frame_access_state()->ClearSPDelta();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400600 break;
601 }
Ben Murdochc5610432016-08-08 18:44:38 +0100602 case kArchTailCallAddress: {
603 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
604 AssembleDeconstructActivationRecord(stack_param_delta);
605 CHECK(!instr->InputAt(0)->IsImmediate());
606 __ Jump(i.InputRegister(0));
607 frame_access_state()->ClearSPDelta();
608 break;
609 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400610 case kArchCallJSFunction: {
611 EnsureSpaceForLazyDeopt();
612 Register func = i.InputRegister(0);
613 if (FLAG_debug_code) {
614 // Check the function's context matches the context argument.
615 __ lw(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset));
616 __ Assert(eq, kWrongFunctionContext, cp, Operand(kScratchReg));
617 }
618
619 __ lw(at, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
620 __ Call(at);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000621 RecordCallPosition(instr);
622 frame_access_state()->ClearSPDelta();
623 break;
624 }
Ben Murdochda12d292016-06-02 14:46:10 +0100625 case kArchTailCallJSFunctionFromJSFunction:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000626 case kArchTailCallJSFunction: {
627 Register func = i.InputRegister(0);
628 if (FLAG_debug_code) {
629 // Check the function's context matches the context argument.
630 __ lw(kScratchReg, FieldMemOperand(func, JSFunction::kContextOffset));
631 __ Assert(eq, kWrongFunctionContext, cp, Operand(kScratchReg));
632 }
633
634 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
635 AssembleDeconstructActivationRecord(stack_param_delta);
Ben Murdochda12d292016-06-02 14:46:10 +0100636 if (arch_opcode == kArchTailCallJSFunctionFromJSFunction) {
637 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
638 i.TempRegister(0), i.TempRegister(1),
639 i.TempRegister(2));
640 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000641 __ lw(at, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
642 __ Jump(at);
643 frame_access_state()->ClearSPDelta();
644 break;
645 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000646 case kArchPrepareCallCFunction: {
647 int const num_parameters = MiscField::decode(instr->opcode());
648 __ PrepareCallCFunction(num_parameters, kScratchReg);
649 // Frame alignment requires using FP-relative frame addressing.
650 frame_access_state()->SetFrameAccessToFP();
651 break;
652 }
653 case kArchPrepareTailCall:
654 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1));
655 break;
656 case kArchCallCFunction: {
657 int const num_parameters = MiscField::decode(instr->opcode());
658 if (instr->InputAt(0)->IsImmediate()) {
659 ExternalReference ref = i.InputExternalReference(0);
660 __ CallCFunction(ref, num_parameters);
661 } else {
662 Register func = i.InputRegister(0);
663 __ CallCFunction(func, num_parameters);
664 }
665 frame_access_state()->SetFrameAccessToDefault();
666 frame_access_state()->ClearSPDelta();
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400667 break;
668 }
669 case kArchJmp:
670 AssembleArchJump(i.InputRpo(0));
671 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000672 case kArchLookupSwitch:
673 AssembleArchLookupSwitch(instr);
674 break;
675 case kArchTableSwitch:
676 AssembleArchTableSwitch(instr);
677 break;
Ben Murdoch61f157c2016-09-16 13:49:30 +0100678 case kArchDebugBreak:
679 __ stop("kArchDebugBreak");
680 break;
681 case kArchComment: {
682 Address comment_string = i.InputExternalReference(0).address();
683 __ RecordComment(reinterpret_cast<const char*>(comment_string));
684 break;
685 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400686 case kArchNop:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000687 case kArchThrowTerminator:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400688 // don't emit code for nops.
689 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000690 case kArchDeoptimize: {
691 int deopt_state_id =
692 BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
693 Deoptimizer::BailoutType bailout_type =
694 Deoptimizer::BailoutType(MiscField::decode(instr->opcode()));
Ben Murdochc5610432016-08-08 18:44:38 +0100695 CodeGenResult result =
696 AssembleDeoptimizerCall(deopt_state_id, bailout_type);
697 if (result != kSuccess) return result;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000698 break;
699 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400700 case kArchRet:
701 AssembleReturn();
702 break;
703 case kArchStackPointer:
704 __ mov(i.OutputRegister(), sp);
705 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000706 case kArchFramePointer:
707 __ mov(i.OutputRegister(), fp);
708 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100709 case kArchParentFramePointer:
Ben Murdochda12d292016-06-02 14:46:10 +0100710 if (frame_access_state()->has_frame()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100711 __ lw(i.OutputRegister(), MemOperand(fp, 0));
712 } else {
713 __ mov(i.OutputRegister(), fp);
714 }
715 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400716 case kArchTruncateDoubleToI:
717 __ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0));
718 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000719 case kArchStoreWithWriteBarrier: {
720 RecordWriteMode mode =
721 static_cast<RecordWriteMode>(MiscField::decode(instr->opcode()));
722 Register object = i.InputRegister(0);
723 Register index = i.InputRegister(1);
724 Register value = i.InputRegister(2);
725 Register scratch0 = i.TempRegister(0);
726 Register scratch1 = i.TempRegister(1);
727 auto ool = new (zone()) OutOfLineRecordWrite(this, object, index, value,
728 scratch0, scratch1, mode);
729 __ Addu(at, object, index);
730 __ sw(value, MemOperand(at));
731 __ CheckPageFlag(object, scratch0,
732 MemoryChunk::kPointersFromHereAreInterestingMask, ne,
733 ool->entry());
734 __ bind(ool->exit());
735 break;
736 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100737 case kArchStackSlot: {
738 FrameOffset offset =
739 frame_access_state()->GetFrameOffset(i.InputInt32(0));
740 __ Addu(i.OutputRegister(), offset.from_stack_pointer() ? sp : fp,
741 Operand(offset.offset()));
742 break;
743 }
Ben Murdoch61f157c2016-09-16 13:49:30 +0100744 case kIeee754Float64Atan:
745 ASSEMBLE_IEEE754_UNOP(atan);
746 break;
747 case kIeee754Float64Atan2:
748 ASSEMBLE_IEEE754_BINOP(atan2);
749 break;
750 case kIeee754Float64Cos:
751 ASSEMBLE_IEEE754_UNOP(cos);
752 break;
753 case kIeee754Float64Cbrt:
754 ASSEMBLE_IEEE754_UNOP(cbrt);
755 break;
756 case kIeee754Float64Exp:
757 ASSEMBLE_IEEE754_UNOP(exp);
758 break;
759 case kIeee754Float64Expm1:
760 ASSEMBLE_IEEE754_UNOP(expm1);
761 break;
762 case kIeee754Float64Atanh:
763 ASSEMBLE_IEEE754_UNOP(atanh);
764 break;
765 case kIeee754Float64Log:
766 ASSEMBLE_IEEE754_UNOP(log);
767 break;
768 case kIeee754Float64Log1p:
769 ASSEMBLE_IEEE754_UNOP(log1p);
770 break;
771 case kIeee754Float64Log10:
772 ASSEMBLE_IEEE754_UNOP(log10);
773 break;
774 case kIeee754Float64Log2:
775 ASSEMBLE_IEEE754_UNOP(log2);
776 break;
777 case kIeee754Float64Sin:
778 ASSEMBLE_IEEE754_UNOP(sin);
779 break;
780 case kIeee754Float64Tan:
781 ASSEMBLE_IEEE754_UNOP(tan);
782 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400783 case kMipsAdd:
784 __ Addu(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1));
785 break;
786 case kMipsAddOvf:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000787 // Pseudo-instruction used for overflow/branch. No opcode emitted here.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400788 break;
789 case kMipsSub:
790 __ Subu(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1));
791 break;
792 case kMipsSubOvf:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000793 // Pseudo-instruction used for overflow/branch. No opcode emitted here.
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400794 break;
795 case kMipsMul:
796 __ Mul(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1));
797 break;
798 case kMipsMulHigh:
799 __ Mulh(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1));
800 break;
801 case kMipsMulHighU:
802 __ Mulhu(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1));
803 break;
804 case kMipsDiv:
805 __ Div(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000806 if (IsMipsArchVariant(kMips32r6)) {
807 __ selnez(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
808 } else {
809 __ Movz(i.OutputRegister(), i.InputRegister(1), i.InputRegister(1));
810 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400811 break;
812 case kMipsDivU:
813 __ Divu(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000814 if (IsMipsArchVariant(kMips32r6)) {
815 __ selnez(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
816 } else {
817 __ Movz(i.OutputRegister(), i.InputRegister(1), i.InputRegister(1));
818 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400819 break;
820 case kMipsMod:
821 __ Mod(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1));
822 break;
823 case kMipsModU:
824 __ Modu(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1));
825 break;
826 case kMipsAnd:
827 __ And(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1));
828 break;
829 case kMipsOr:
830 __ Or(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1));
831 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000832 case kMipsNor:
833 if (instr->InputAt(1)->IsRegister()) {
834 __ Nor(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1));
835 } else {
836 DCHECK(i.InputOperand(1).immediate() == 0);
837 __ Nor(i.OutputRegister(), i.InputRegister(0), zero_reg);
838 }
839 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400840 case kMipsXor:
841 __ Xor(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1));
842 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000843 case kMipsClz:
844 __ Clz(i.OutputRegister(), i.InputRegister(0));
845 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100846 case kMipsCtz: {
847 Register reg1 = kScratchReg;
848 Register reg2 = kScratchReg2;
849 Label skip_for_zero;
850 Label end;
851 // Branch if the operand is zero
852 __ Branch(&skip_for_zero, eq, i.InputRegister(0), Operand(zero_reg));
853 // Find the number of bits before the last bit set to 1.
854 __ Subu(reg2, zero_reg, i.InputRegister(0));
855 __ And(reg2, reg2, i.InputRegister(0));
856 __ clz(reg2, reg2);
857 // Get the number of bits after the last bit set to 1.
858 __ li(reg1, 0x1F);
859 __ Subu(i.OutputRegister(), reg1, reg2);
860 __ Branch(&end);
861 __ bind(&skip_for_zero);
862 // If the operand is zero, return word length as the result.
863 __ li(i.OutputRegister(), 0x20);
864 __ bind(&end);
865 } break;
866 case kMipsPopcnt: {
867 Register reg1 = kScratchReg;
868 Register reg2 = kScratchReg2;
869 uint32_t m1 = 0x55555555;
870 uint32_t m2 = 0x33333333;
871 uint32_t m4 = 0x0f0f0f0f;
872 uint32_t m8 = 0x00ff00ff;
873 uint32_t m16 = 0x0000ffff;
874
875 // Put count of ones in every 2 bits into those 2 bits.
876 __ li(at, m1);
877 __ srl(reg1, i.InputRegister(0), 1);
878 __ And(reg2, i.InputRegister(0), at);
879 __ And(reg1, reg1, at);
880 __ addu(reg1, reg1, reg2);
881
882 // Put count of ones in every 4 bits into those 4 bits.
883 __ li(at, m2);
884 __ srl(reg2, reg1, 2);
885 __ And(reg2, reg2, at);
886 __ And(reg1, reg1, at);
887 __ addu(reg1, reg1, reg2);
888
889 // Put count of ones in every 8 bits into those 8 bits.
890 __ li(at, m4);
891 __ srl(reg2, reg1, 4);
892 __ And(reg2, reg2, at);
893 __ And(reg1, reg1, at);
894 __ addu(reg1, reg1, reg2);
895
896 // Put count of ones in every 16 bits into those 16 bits.
897 __ li(at, m8);
898 __ srl(reg2, reg1, 8);
899 __ And(reg2, reg2, at);
900 __ And(reg1, reg1, at);
901 __ addu(reg1, reg1, reg2);
902
903 // Calculate total number of ones.
904 __ li(at, m16);
905 __ srl(reg2, reg1, 16);
906 __ And(reg2, reg2, at);
907 __ And(reg1, reg1, at);
908 __ addu(i.OutputRegister(), reg1, reg2);
909 } break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400910 case kMipsShl:
911 if (instr->InputAt(1)->IsRegister()) {
912 __ sllv(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
913 } else {
914 int32_t imm = i.InputOperand(1).immediate();
915 __ sll(i.OutputRegister(), i.InputRegister(0), imm);
916 }
917 break;
918 case kMipsShr:
919 if (instr->InputAt(1)->IsRegister()) {
920 __ srlv(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
921 } else {
922 int32_t imm = i.InputOperand(1).immediate();
923 __ srl(i.OutputRegister(), i.InputRegister(0), imm);
924 }
925 break;
926 case kMipsSar:
927 if (instr->InputAt(1)->IsRegister()) {
928 __ srav(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
929 } else {
930 int32_t imm = i.InputOperand(1).immediate();
931 __ sra(i.OutputRegister(), i.InputRegister(0), imm);
932 }
933 break;
Ben Murdochc5610432016-08-08 18:44:38 +0100934 case kMipsShlPair: {
935 if (instr->InputAt(2)->IsRegister()) {
936 __ ShlPair(i.OutputRegister(0), i.OutputRegister(1), i.InputRegister(0),
937 i.InputRegister(1), i.InputRegister(2));
938 } else {
939 uint32_t imm = i.InputOperand(2).immediate();
940 __ ShlPair(i.OutputRegister(0), i.OutputRegister(1), i.InputRegister(0),
941 i.InputRegister(1), imm);
942 }
943 } break;
944 case kMipsShrPair: {
945 if (instr->InputAt(2)->IsRegister()) {
946 __ ShrPair(i.OutputRegister(0), i.OutputRegister(1), i.InputRegister(0),
947 i.InputRegister(1), i.InputRegister(2));
948 } else {
949 uint32_t imm = i.InputOperand(2).immediate();
950 __ ShrPair(i.OutputRegister(0), i.OutputRegister(1), i.InputRegister(0),
951 i.InputRegister(1), imm);
952 }
953 } break;
954 case kMipsSarPair: {
955 if (instr->InputAt(2)->IsRegister()) {
956 __ SarPair(i.OutputRegister(0), i.OutputRegister(1), i.InputRegister(0),
957 i.InputRegister(1), i.InputRegister(2));
958 } else {
959 uint32_t imm = i.InputOperand(2).immediate();
960 __ SarPair(i.OutputRegister(0), i.OutputRegister(1), i.InputRegister(0),
961 i.InputRegister(1), imm);
962 }
963 } break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000964 case kMipsExt:
965 __ Ext(i.OutputRegister(), i.InputRegister(0), i.InputInt8(1),
966 i.InputInt8(2));
967 break;
968 case kMipsIns:
969 if (instr->InputAt(1)->IsImmediate() && i.InputInt8(1) == 0) {
970 __ Ins(i.OutputRegister(), zero_reg, i.InputInt8(1), i.InputInt8(2));
971 } else {
972 __ Ins(i.OutputRegister(), i.InputRegister(0), i.InputInt8(1),
973 i.InputInt8(2));
974 }
975 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400976 case kMipsRor:
977 __ Ror(i.OutputRegister(), i.InputRegister(0), i.InputOperand(1));
978 break;
979 case kMipsTst:
980 // Pseudo-instruction used for tst/branch. No opcode emitted here.
981 break;
982 case kMipsCmp:
983 // Pseudo-instruction used for cmp/branch. No opcode emitted here.
984 break;
985 case kMipsMov:
986 // TODO(plind): Should we combine mov/li like this, or use separate instr?
987 // - Also see x64 ASSEMBLE_BINOP & RegisterOrOperandType
988 if (HasRegisterInput(instr, 0)) {
989 __ mov(i.OutputRegister(), i.InputRegister(0));
990 } else {
991 __ li(i.OutputRegister(), i.InputOperand(0));
992 }
993 break;
Ben Murdochc5610432016-08-08 18:44:38 +0100994 case kMipsLsa:
995 DCHECK(instr->InputAt(2)->IsImmediate());
996 __ Lsa(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
997 i.InputInt8(2));
998 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000999 case kMipsCmpS:
1000 // Psuedo-instruction used for FP cmp/branch. No opcode emitted here.
1001 break;
1002 case kMipsAddS:
1003 // TODO(plind): add special case: combine mult & add.
1004 __ add_s(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1005 i.InputDoubleRegister(1));
1006 break;
1007 case kMipsSubS:
1008 __ sub_s(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1009 i.InputDoubleRegister(1));
1010 break;
Ben Murdoch61f157c2016-09-16 13:49:30 +01001011 case kMipsSubPreserveNanS:
1012 __ SubNanPreservePayloadAndSign_s(i.OutputDoubleRegister(),
1013 i.InputDoubleRegister(0),
1014 i.InputDoubleRegister(1));
1015 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001016 case kMipsMulS:
1017 // TODO(plind): add special case: right op is -1.0, see arm port.
1018 __ mul_s(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1019 i.InputDoubleRegister(1));
1020 break;
1021 case kMipsDivS:
1022 __ div_s(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1023 i.InputDoubleRegister(1));
1024 break;
1025 case kMipsModS: {
1026 // TODO(bmeurer): We should really get rid of this special instruction,
1027 // and generate a CallAddress instruction instead.
1028 FrameScope scope(masm(), StackFrame::MANUAL);
1029 __ PrepareCallCFunction(0, 2, kScratchReg);
1030 __ MovToFloatParameters(i.InputDoubleRegister(0),
1031 i.InputDoubleRegister(1));
1032 // TODO(balazs.kilvady): implement mod_two_floats_operation(isolate())
1033 __ CallCFunction(ExternalReference::mod_two_doubles_operation(isolate()),
1034 0, 2);
1035 // Move the result in the double result register.
1036 __ MovFromFloatResult(i.OutputSingleRegister());
1037 break;
1038 }
1039 case kMipsAbsS:
1040 __ abs_s(i.OutputSingleRegister(), i.InputSingleRegister(0));
1041 break;
1042 case kMipsSqrtS: {
1043 __ sqrt_s(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1044 break;
1045 }
1046 case kMipsMaxS:
1047 __ max_s(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1048 i.InputDoubleRegister(1));
1049 break;
1050 case kMipsMinS:
1051 __ min_s(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1052 i.InputDoubleRegister(1));
1053 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001054 case kMipsCmpD:
1055 // Psuedo-instruction used for FP cmp/branch. No opcode emitted here.
1056 break;
Ben Murdochc5610432016-08-08 18:44:38 +01001057 case kMipsAddPair:
1058 __ AddPair(i.OutputRegister(0), i.OutputRegister(1), i.InputRegister(0),
1059 i.InputRegister(1), i.InputRegister(2), i.InputRegister(3));
1060 break;
1061 case kMipsSubPair:
1062 __ SubPair(i.OutputRegister(0), i.OutputRegister(1), i.InputRegister(0),
1063 i.InputRegister(1), i.InputRegister(2), i.InputRegister(3));
1064 break;
Ben Murdochda12d292016-06-02 14:46:10 +01001065 case kMipsMulPair: {
1066 __ Mulu(i.OutputRegister(1), i.OutputRegister(0), i.InputRegister(0),
1067 i.InputRegister(2));
1068 __ mul(kScratchReg, i.InputRegister(0), i.InputRegister(3));
1069 __ mul(kScratchReg2, i.InputRegister(1), i.InputRegister(2));
1070 __ Addu(i.OutputRegister(1), i.OutputRegister(1), kScratchReg);
1071 __ Addu(i.OutputRegister(1), i.OutputRegister(1), kScratchReg2);
1072 } break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001073 case kMipsAddD:
1074 // TODO(plind): add special case: combine mult & add.
1075 __ add_d(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1076 i.InputDoubleRegister(1));
1077 break;
1078 case kMipsSubD:
1079 __ sub_d(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1080 i.InputDoubleRegister(1));
1081 break;
Ben Murdoch61f157c2016-09-16 13:49:30 +01001082 case kMipsSubPreserveNanD:
1083 __ SubNanPreservePayloadAndSign_d(i.OutputDoubleRegister(),
1084 i.InputDoubleRegister(0),
1085 i.InputDoubleRegister(1));
1086 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001087 case kMipsMulD:
1088 // TODO(plind): add special case: right op is -1.0, see arm port.
1089 __ mul_d(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1090 i.InputDoubleRegister(1));
1091 break;
1092 case kMipsDivD:
1093 __ div_d(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1094 i.InputDoubleRegister(1));
1095 break;
1096 case kMipsModD: {
1097 // TODO(bmeurer): We should really get rid of this special instruction,
1098 // and generate a CallAddress instruction instead.
1099 FrameScope scope(masm(), StackFrame::MANUAL);
1100 __ PrepareCallCFunction(0, 2, kScratchReg);
1101 __ MovToFloatParameters(i.InputDoubleRegister(0),
1102 i.InputDoubleRegister(1));
1103 __ CallCFunction(ExternalReference::mod_two_doubles_operation(isolate()),
1104 0, 2);
1105 // Move the result in the double result register.
1106 __ MovFromFloatResult(i.OutputDoubleRegister());
1107 break;
1108 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001109 case kMipsAbsD:
1110 __ abs_d(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1111 break;
1112 case kMipsSqrtD: {
1113 __ sqrt_d(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001114 break;
1115 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001116 case kMipsMaxD:
1117 __ max_d(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1118 i.InputDoubleRegister(1));
1119 break;
1120 case kMipsMinD:
1121 __ min_d(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1122 i.InputDoubleRegister(1));
1123 break;
1124 case kMipsFloat64RoundDown: {
1125 ASSEMBLE_ROUND_DOUBLE_TO_DOUBLE(floor);
1126 break;
1127 }
1128 case kMipsFloat32RoundDown: {
1129 ASSEMBLE_ROUND_FLOAT_TO_FLOAT(floor);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001130 break;
1131 }
1132 case kMipsFloat64RoundTruncate: {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001133 ASSEMBLE_ROUND_DOUBLE_TO_DOUBLE(trunc);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001134 break;
1135 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001136 case kMipsFloat32RoundTruncate: {
1137 ASSEMBLE_ROUND_FLOAT_TO_FLOAT(trunc);
1138 break;
1139 }
1140 case kMipsFloat64RoundUp: {
1141 ASSEMBLE_ROUND_DOUBLE_TO_DOUBLE(ceil);
1142 break;
1143 }
1144 case kMipsFloat32RoundUp: {
1145 ASSEMBLE_ROUND_FLOAT_TO_FLOAT(ceil);
1146 break;
1147 }
1148 case kMipsFloat64RoundTiesEven: {
1149 ASSEMBLE_ROUND_DOUBLE_TO_DOUBLE(round);
1150 break;
1151 }
1152 case kMipsFloat32RoundTiesEven: {
1153 ASSEMBLE_ROUND_FLOAT_TO_FLOAT(round);
1154 break;
1155 }
1156 case kMipsFloat64Max: {
1157 // (b < a) ? a : b
1158 if (IsMipsArchVariant(kMips32r6)) {
1159 __ cmp_d(OLT, i.OutputDoubleRegister(), i.InputDoubleRegister(1),
1160 i.InputDoubleRegister(0));
1161 __ sel_d(i.OutputDoubleRegister(), i.InputDoubleRegister(1),
1162 i.InputDoubleRegister(0));
1163 } else {
1164 __ c_d(OLT, i.InputDoubleRegister(0), i.InputDoubleRegister(1));
1165 // Left operand is result, passthrough if false.
1166 __ movt_d(i.OutputDoubleRegister(), i.InputDoubleRegister(1));
1167 }
1168 break;
1169 }
1170 case kMipsFloat64Min: {
1171 // (a < b) ? a : b
1172 if (IsMipsArchVariant(kMips32r6)) {
1173 __ cmp_d(OLT, i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1174 i.InputDoubleRegister(1));
1175 __ sel_d(i.OutputDoubleRegister(), i.InputDoubleRegister(1),
1176 i.InputDoubleRegister(0));
1177 } else {
1178 __ c_d(OLT, i.InputDoubleRegister(1), i.InputDoubleRegister(0));
1179 // Right operand is result, passthrough if false.
1180 __ movt_d(i.OutputDoubleRegister(), i.InputDoubleRegister(1));
1181 }
1182 break;
1183 }
1184 case kMipsFloat32Max: {
1185 // (b < a) ? a : b
1186 if (IsMipsArchVariant(kMips32r6)) {
1187 __ cmp_s(OLT, i.OutputDoubleRegister(), i.InputDoubleRegister(1),
1188 i.InputDoubleRegister(0));
1189 __ sel_s(i.OutputDoubleRegister(), i.InputDoubleRegister(1),
1190 i.InputDoubleRegister(0));
1191 } else {
1192 __ c_s(OLT, i.InputDoubleRegister(0), i.InputDoubleRegister(1));
1193 // Left operand is result, passthrough if false.
1194 __ movt_s(i.OutputDoubleRegister(), i.InputDoubleRegister(1));
1195 }
1196 break;
1197 }
1198 case kMipsFloat32Min: {
1199 // (a < b) ? a : b
1200 if (IsMipsArchVariant(kMips32r6)) {
1201 __ cmp_s(OLT, i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1202 i.InputDoubleRegister(1));
1203 __ sel_s(i.OutputDoubleRegister(), i.InputDoubleRegister(1),
1204 i.InputDoubleRegister(0));
1205 } else {
1206 __ c_s(OLT, i.InputDoubleRegister(1), i.InputDoubleRegister(0));
1207 // Right operand is result, passthrough if false.
1208 __ movt_s(i.OutputDoubleRegister(), i.InputDoubleRegister(1));
1209 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001210 break;
1211 }
1212 case kMipsCvtSD: {
1213 __ cvt_s_d(i.OutputSingleRegister(), i.InputDoubleRegister(0));
1214 break;
1215 }
1216 case kMipsCvtDS: {
1217 __ cvt_d_s(i.OutputDoubleRegister(), i.InputSingleRegister(0));
1218 break;
1219 }
1220 case kMipsCvtDW: {
1221 FPURegister scratch = kScratchDoubleReg;
1222 __ mtc1(i.InputRegister(0), scratch);
1223 __ cvt_d_w(i.OutputDoubleRegister(), scratch);
1224 break;
1225 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001226 case kMipsCvtSW: {
1227 FPURegister scratch = kScratchDoubleReg;
1228 __ mtc1(i.InputRegister(0), scratch);
1229 __ cvt_s_w(i.OutputDoubleRegister(), scratch);
1230 break;
1231 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001232 case kMipsCvtSUw: {
1233 FPURegister scratch = kScratchDoubleReg;
1234 __ Cvt_d_uw(i.OutputDoubleRegister(), i.InputRegister(0), scratch);
1235 __ cvt_s_d(i.OutputDoubleRegister(), i.OutputDoubleRegister());
1236 break;
1237 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001238 case kMipsCvtDUw: {
1239 FPURegister scratch = kScratchDoubleReg;
1240 __ Cvt_d_uw(i.OutputDoubleRegister(), i.InputRegister(0), scratch);
1241 break;
1242 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001243 case kMipsFloorWD: {
1244 FPURegister scratch = kScratchDoubleReg;
1245 __ floor_w_d(scratch, i.InputDoubleRegister(0));
1246 __ mfc1(i.OutputRegister(), scratch);
1247 break;
1248 }
1249 case kMipsCeilWD: {
1250 FPURegister scratch = kScratchDoubleReg;
1251 __ ceil_w_d(scratch, i.InputDoubleRegister(0));
1252 __ mfc1(i.OutputRegister(), scratch);
1253 break;
1254 }
1255 case kMipsRoundWD: {
1256 FPURegister scratch = kScratchDoubleReg;
1257 __ round_w_d(scratch, i.InputDoubleRegister(0));
1258 __ mfc1(i.OutputRegister(), scratch);
1259 break;
1260 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001261 case kMipsTruncWD: {
1262 FPURegister scratch = kScratchDoubleReg;
1263 // Other arches use round to zero here, so we follow.
1264 __ trunc_w_d(scratch, i.InputDoubleRegister(0));
1265 __ mfc1(i.OutputRegister(), scratch);
1266 break;
1267 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001268 case kMipsFloorWS: {
1269 FPURegister scratch = kScratchDoubleReg;
1270 __ floor_w_s(scratch, i.InputDoubleRegister(0));
1271 __ mfc1(i.OutputRegister(), scratch);
1272 break;
1273 }
1274 case kMipsCeilWS: {
1275 FPURegister scratch = kScratchDoubleReg;
1276 __ ceil_w_s(scratch, i.InputDoubleRegister(0));
1277 __ mfc1(i.OutputRegister(), scratch);
1278 break;
1279 }
1280 case kMipsRoundWS: {
1281 FPURegister scratch = kScratchDoubleReg;
1282 __ round_w_s(scratch, i.InputDoubleRegister(0));
1283 __ mfc1(i.OutputRegister(), scratch);
1284 break;
1285 }
1286 case kMipsTruncWS: {
1287 FPURegister scratch = kScratchDoubleReg;
1288 __ trunc_w_s(scratch, i.InputDoubleRegister(0));
1289 __ mfc1(i.OutputRegister(), scratch);
1290 break;
1291 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001292 case kMipsTruncUwD: {
1293 FPURegister scratch = kScratchDoubleReg;
1294 // TODO(plind): Fix wrong param order of Trunc_uw_d() macro-asm function.
1295 __ Trunc_uw_d(i.InputDoubleRegister(0), i.OutputRegister(), scratch);
1296 break;
1297 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001298 case kMipsTruncUwS: {
1299 FPURegister scratch = kScratchDoubleReg;
1300 // TODO(plind): Fix wrong param order of Trunc_uw_s() macro-asm function.
1301 __ Trunc_uw_s(i.InputDoubleRegister(0), i.OutputRegister(), scratch);
1302 break;
1303 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001304 case kMipsFloat64ExtractLowWord32:
1305 __ FmoveLow(i.OutputRegister(), i.InputDoubleRegister(0));
1306 break;
1307 case kMipsFloat64ExtractHighWord32:
1308 __ FmoveHigh(i.OutputRegister(), i.InputDoubleRegister(0));
1309 break;
1310 case kMipsFloat64InsertLowWord32:
1311 __ FmoveLow(i.OutputDoubleRegister(), i.InputRegister(1));
1312 break;
1313 case kMipsFloat64InsertHighWord32:
1314 __ FmoveHigh(i.OutputDoubleRegister(), i.InputRegister(1));
1315 break;
Ben Murdoch61f157c2016-09-16 13:49:30 +01001316 case kMipsFloat64SilenceNaN: {
1317 FPURegister value = i.InputDoubleRegister(0);
1318 FPURegister result = i.OutputDoubleRegister();
1319 Register scratch0 = i.TempRegister(0);
1320 Label is_nan, not_nan;
1321 __ BranchF(NULL, &is_nan, eq, value, value);
1322 __ Branch(&not_nan);
1323 __ bind(&is_nan);
1324 __ LoadRoot(scratch0, Heap::kNanValueRootIndex);
1325 __ ldc1(result, FieldMemOperand(scratch0, HeapNumber::kValueOffset));
1326 __ bind(&not_nan);
1327 break;
1328 }
1329
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001330 // ... more basic instructions ...
1331
1332 case kMipsLbu:
1333 __ lbu(i.OutputRegister(), i.MemoryOperand());
1334 break;
1335 case kMipsLb:
1336 __ lb(i.OutputRegister(), i.MemoryOperand());
1337 break;
1338 case kMipsSb:
1339 __ sb(i.InputRegister(2), i.MemoryOperand());
1340 break;
1341 case kMipsLhu:
1342 __ lhu(i.OutputRegister(), i.MemoryOperand());
1343 break;
1344 case kMipsLh:
1345 __ lh(i.OutputRegister(), i.MemoryOperand());
1346 break;
1347 case kMipsSh:
1348 __ sh(i.InputRegister(2), i.MemoryOperand());
1349 break;
1350 case kMipsLw:
1351 __ lw(i.OutputRegister(), i.MemoryOperand());
1352 break;
1353 case kMipsSw:
1354 __ sw(i.InputRegister(2), i.MemoryOperand());
1355 break;
1356 case kMipsLwc1: {
1357 __ lwc1(i.OutputSingleRegister(), i.MemoryOperand());
1358 break;
1359 }
1360 case kMipsSwc1: {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001361 size_t index = 0;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001362 MemOperand operand = i.MemoryOperand(&index);
1363 __ swc1(i.InputSingleRegister(index), operand);
1364 break;
1365 }
1366 case kMipsLdc1:
1367 __ ldc1(i.OutputDoubleRegister(), i.MemoryOperand());
1368 break;
1369 case kMipsSdc1:
1370 __ sdc1(i.InputDoubleRegister(2), i.MemoryOperand());
1371 break;
1372 case kMipsPush:
Ben Murdochc5610432016-08-08 18:44:38 +01001373 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001374 __ sdc1(i.InputDoubleRegister(0), MemOperand(sp, -kDoubleSize));
1375 __ Subu(sp, sp, Operand(kDoubleSize));
1376 frame_access_state()->IncreaseSPDelta(kDoubleSize / kPointerSize);
1377 } else {
1378 __ Push(i.InputRegister(0));
1379 frame_access_state()->IncreaseSPDelta(1);
1380 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001381 break;
1382 case kMipsStackClaim: {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001383 __ Subu(sp, sp, Operand(i.InputInt32(0)));
1384 frame_access_state()->IncreaseSPDelta(i.InputInt32(0) / kPointerSize);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001385 break;
1386 }
1387 case kMipsStoreToStackSlot: {
Ben Murdochc5610432016-08-08 18:44:38 +01001388 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01001389 LocationOperand* op = LocationOperand::cast(instr->InputAt(0));
1390 if (op->representation() == MachineRepresentation::kFloat64) {
1391 __ sdc1(i.InputDoubleRegister(0), MemOperand(sp, i.InputInt32(1)));
1392 } else {
1393 DCHECK_EQ(MachineRepresentation::kFloat32, op->representation());
1394 __ swc1(i.InputSingleRegister(0), MemOperand(sp, i.InputInt32(1)));
1395 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001396 } else {
1397 __ sw(i.InputRegister(0), MemOperand(sp, i.InputInt32(1)));
1398 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001399 break;
1400 }
1401 case kCheckedLoadInt8:
1402 ASSEMBLE_CHECKED_LOAD_INTEGER(lb);
1403 break;
1404 case kCheckedLoadUint8:
1405 ASSEMBLE_CHECKED_LOAD_INTEGER(lbu);
1406 break;
1407 case kCheckedLoadInt16:
1408 ASSEMBLE_CHECKED_LOAD_INTEGER(lh);
1409 break;
1410 case kCheckedLoadUint16:
1411 ASSEMBLE_CHECKED_LOAD_INTEGER(lhu);
1412 break;
1413 case kCheckedLoadWord32:
1414 ASSEMBLE_CHECKED_LOAD_INTEGER(lw);
1415 break;
1416 case kCheckedLoadFloat32:
1417 ASSEMBLE_CHECKED_LOAD_FLOAT(Single, lwc1);
1418 break;
1419 case kCheckedLoadFloat64:
1420 ASSEMBLE_CHECKED_LOAD_FLOAT(Double, ldc1);
1421 break;
1422 case kCheckedStoreWord8:
1423 ASSEMBLE_CHECKED_STORE_INTEGER(sb);
1424 break;
1425 case kCheckedStoreWord16:
1426 ASSEMBLE_CHECKED_STORE_INTEGER(sh);
1427 break;
1428 case kCheckedStoreWord32:
1429 ASSEMBLE_CHECKED_STORE_INTEGER(sw);
1430 break;
1431 case kCheckedStoreFloat32:
1432 ASSEMBLE_CHECKED_STORE_FLOAT(Single, swc1);
1433 break;
1434 case kCheckedStoreFloat64:
1435 ASSEMBLE_CHECKED_STORE_FLOAT(Double, sdc1);
1436 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001437 case kCheckedLoadWord64:
1438 case kCheckedStoreWord64:
1439 UNREACHABLE(); // currently unsupported checked int64 load/store.
1440 break;
Ben Murdochc5610432016-08-08 18:44:38 +01001441 case kAtomicLoadInt8:
1442 ASSEMBLE_ATOMIC_LOAD_INTEGER(lb);
1443 break;
1444 case kAtomicLoadUint8:
1445 ASSEMBLE_ATOMIC_LOAD_INTEGER(lbu);
1446 break;
1447 case kAtomicLoadInt16:
1448 ASSEMBLE_ATOMIC_LOAD_INTEGER(lh);
1449 break;
1450 case kAtomicLoadUint16:
1451 ASSEMBLE_ATOMIC_LOAD_INTEGER(lhu);
1452 break;
1453 case kAtomicLoadWord32:
1454 ASSEMBLE_ATOMIC_LOAD_INTEGER(lw);
1455 break;
1456 case kAtomicStoreWord8:
1457 ASSEMBLE_ATOMIC_STORE_INTEGER(sb);
1458 break;
1459 case kAtomicStoreWord16:
1460 ASSEMBLE_ATOMIC_STORE_INTEGER(sh);
1461 break;
1462 case kAtomicStoreWord32:
1463 ASSEMBLE_ATOMIC_STORE_INTEGER(sw);
1464 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001465 }
Ben Murdochc5610432016-08-08 18:44:38 +01001466 return kSuccess;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001467} // NOLINT(readability/fn_size)
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001468
1469
1470#define UNSUPPORTED_COND(opcode, condition) \
1471 OFStream out(stdout); \
1472 out << "Unsupported " << #opcode << " condition: \"" << condition << "\""; \
1473 UNIMPLEMENTED();
1474
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001475static bool convertCondition(FlagsCondition condition, Condition& cc) {
1476 switch (condition) {
1477 case kEqual:
1478 cc = eq;
1479 return true;
1480 case kNotEqual:
1481 cc = ne;
1482 return true;
1483 case kUnsignedLessThan:
1484 cc = lt;
1485 return true;
1486 case kUnsignedGreaterThanOrEqual:
1487 cc = uge;
1488 return true;
1489 case kUnsignedLessThanOrEqual:
1490 cc = le;
1491 return true;
1492 case kUnsignedGreaterThan:
1493 cc = ugt;
1494 return true;
1495 default:
1496 break;
1497 }
1498 return false;
1499}
1500
1501
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001502// Assembles branches after an instruction.
1503void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
1504 MipsOperandConverter i(this, instr);
1505 Label* tlabel = branch->true_label;
1506 Label* flabel = branch->false_label;
1507 Condition cc = kNoCondition;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001508 // MIPS does not have condition code flags, so compare and branch are
1509 // implemented differently than on the other arch's. The compare operations
1510 // emit mips pseudo-instructions, which are handled here by branch
1511 // instructions that do the actual comparison. Essential that the input
1512 // registers to compare pseudo-op are not modified before this branch op, as
1513 // they are tested here.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001514
1515 if (instr->arch_opcode() == kMipsTst) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001516 cc = FlagsConditionToConditionTst(branch->condition);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001517 __ And(at, i.InputRegister(0), i.InputOperand(1));
1518 __ Branch(tlabel, cc, at, Operand(zero_reg));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001519 } else if (instr->arch_opcode() == kMipsAddOvf) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001520 switch (branch->condition) {
1521 case kOverflow:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001522 __ AddBranchOvf(i.OutputRegister(), i.InputRegister(0),
1523 i.InputOperand(1), tlabel, flabel);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001524 break;
1525 case kNotOverflow:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001526 __ AddBranchOvf(i.OutputRegister(), i.InputRegister(0),
1527 i.InputOperand(1), flabel, tlabel);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001528 break;
1529 default:
1530 UNSUPPORTED_COND(kMipsAddOvf, branch->condition);
1531 break;
1532 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001533 } else if (instr->arch_opcode() == kMipsSubOvf) {
1534 switch (branch->condition) {
1535 case kOverflow:
1536 __ SubBranchOvf(i.OutputRegister(), i.InputRegister(0),
1537 i.InputOperand(1), tlabel, flabel);
1538 break;
1539 case kNotOverflow:
1540 __ SubBranchOvf(i.OutputRegister(), i.InputRegister(0),
1541 i.InputOperand(1), flabel, tlabel);
1542 break;
1543 default:
1544 UNSUPPORTED_COND(kMipsAddOvf, branch->condition);
1545 break;
1546 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001547 } else if (instr->arch_opcode() == kMipsCmp) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001548 cc = FlagsConditionToConditionCmp(branch->condition);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001549 __ Branch(tlabel, cc, i.InputRegister(0), i.InputOperand(1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001550 } else if (instr->arch_opcode() == kMipsCmpS) {
1551 if (!convertCondition(branch->condition, cc)) {
1552 UNSUPPORTED_COND(kMips64CmpS, branch->condition);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001553 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001554 FPURegister left = i.InputOrZeroSingleRegister(0);
1555 FPURegister right = i.InputOrZeroSingleRegister(1);
1556 if ((left.is(kDoubleRegZero) || right.is(kDoubleRegZero)) &&
1557 !__ IsDoubleZeroRegSet()) {
1558 __ Move(kDoubleRegZero, 0.0);
1559 }
1560 __ BranchF32(tlabel, nullptr, cc, left, right);
1561 } else if (instr->arch_opcode() == kMipsCmpD) {
1562 if (!convertCondition(branch->condition, cc)) {
1563 UNSUPPORTED_COND(kMips64CmpD, branch->condition);
1564 }
1565 FPURegister left = i.InputOrZeroDoubleRegister(0);
1566 FPURegister right = i.InputOrZeroDoubleRegister(1);
1567 if ((left.is(kDoubleRegZero) || right.is(kDoubleRegZero)) &&
1568 !__ IsDoubleZeroRegSet()) {
1569 __ Move(kDoubleRegZero, 0.0);
1570 }
1571 __ BranchF64(tlabel, nullptr, cc, left, right);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001572 } else {
1573 PrintF("AssembleArchBranch Unimplemented arch_opcode: %d\n",
1574 instr->arch_opcode());
1575 UNIMPLEMENTED();
1576 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001577 if (!branch->fallthru) __ Branch(flabel); // no fallthru to flabel.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001578}
1579
1580
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001581void CodeGenerator::AssembleArchJump(RpoNumber target) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001582 if (!IsNextInAssemblyOrder(target)) __ Branch(GetLabel(target));
1583}
1584
1585
1586// Assembles boolean materializations after an instruction.
1587void CodeGenerator::AssembleArchBoolean(Instruction* instr,
1588 FlagsCondition condition) {
1589 MipsOperandConverter i(this, instr);
1590 Label done;
1591
1592 // Materialize a full 32-bit 1 or 0 value. The result register is always the
1593 // last output of the instruction.
1594 Label false_value;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001595 DCHECK_NE(0u, instr->OutputCount());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001596 Register result = i.OutputRegister(instr->OutputCount() - 1);
1597 Condition cc = kNoCondition;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001598 // MIPS does not have condition code flags, so compare and branch are
1599 // implemented differently than on the other arch's. The compare operations
1600 // emit mips psuedo-instructions, which are checked and handled here.
1601
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001602 if (instr->arch_opcode() == kMipsTst) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001603 cc = FlagsConditionToConditionTst(condition);
1604 __ And(kScratchReg, i.InputRegister(0), i.InputOperand(1));
1605 __ Sltu(result, zero_reg, kScratchReg);
1606 if (cc == eq) {
1607 // Sltu produces 0 for equality, invert the result.
1608 __ xori(result, result, 1);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001609 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001610 return;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001611 } else if (instr->arch_opcode() == kMipsAddOvf ||
1612 instr->arch_opcode() == kMipsSubOvf) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001613 Label flabel, tlabel;
1614 switch (instr->arch_opcode()) {
1615 case kMipsAddOvf:
1616 __ AddBranchNoOvf(i.OutputRegister(), i.InputRegister(0),
1617 i.InputOperand(1), &flabel);
1618
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001619 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001620 case kMipsSubOvf:
1621 __ SubBranchNoOvf(i.OutputRegister(), i.InputRegister(0),
1622 i.InputOperand(1), &flabel);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001623 break;
1624 default:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001625 UNREACHABLE();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001626 break;
1627 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001628 __ li(result, 1);
1629 __ Branch(&tlabel);
1630 __ bind(&flabel);
1631 __ li(result, 0);
1632 __ bind(&tlabel);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001633 } else if (instr->arch_opcode() == kMipsCmp) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001634 cc = FlagsConditionToConditionCmp(condition);
1635 switch (cc) {
1636 case eq:
1637 case ne: {
1638 Register left = i.InputRegister(0);
1639 Operand right = i.InputOperand(1);
1640 Register select;
1641 if (instr->InputAt(1)->IsImmediate() && right.immediate() == 0) {
1642 // Pass left operand if right is zero.
1643 select = left;
1644 } else {
1645 __ Subu(kScratchReg, left, right);
1646 select = kScratchReg;
1647 }
1648 __ Sltu(result, zero_reg, select);
1649 if (cc == eq) {
1650 // Sltu produces 0 for equality, invert the result.
1651 __ xori(result, result, 1);
1652 }
1653 } break;
1654 case lt:
1655 case ge: {
1656 Register left = i.InputRegister(0);
1657 Operand right = i.InputOperand(1);
1658 __ Slt(result, left, right);
1659 if (cc == ge) {
1660 __ xori(result, result, 1);
1661 }
1662 } break;
1663 case gt:
1664 case le: {
1665 Register left = i.InputRegister(1);
1666 Operand right = i.InputOperand(0);
1667 __ Slt(result, left, right);
1668 if (cc == le) {
1669 __ xori(result, result, 1);
1670 }
1671 } break;
1672 case lo:
1673 case hs: {
1674 Register left = i.InputRegister(0);
1675 Operand right = i.InputOperand(1);
1676 __ Sltu(result, left, right);
1677 if (cc == hs) {
1678 __ xori(result, result, 1);
1679 }
1680 } break;
1681 case hi:
1682 case ls: {
1683 Register left = i.InputRegister(1);
1684 Operand right = i.InputOperand(0);
1685 __ Sltu(result, left, right);
1686 if (cc == ls) {
1687 __ xori(result, result, 1);
1688 }
1689 } break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001690 default:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001691 UNREACHABLE();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001692 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001693 return;
1694 } else if (instr->arch_opcode() == kMipsCmpD ||
1695 instr->arch_opcode() == kMipsCmpS) {
1696 FPURegister left = i.InputOrZeroDoubleRegister(0);
1697 FPURegister right = i.InputOrZeroDoubleRegister(1);
1698 if ((left.is(kDoubleRegZero) || right.is(kDoubleRegZero)) &&
1699 !__ IsDoubleZeroRegSet()) {
1700 __ Move(kDoubleRegZero, 0.0);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001701 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001702 bool predicate;
1703 FPUCondition cc = FlagsConditionToConditionCmpFPU(predicate, condition);
1704 if (!IsMipsArchVariant(kMips32r6)) {
1705 __ li(result, Operand(1));
1706 if (instr->arch_opcode() == kMipsCmpD) {
1707 __ c(cc, D, left, right);
1708 } else {
1709 DCHECK(instr->arch_opcode() == kMipsCmpS);
1710 __ c(cc, S, left, right);
1711 }
1712 if (predicate) {
1713 __ Movf(result, zero_reg);
1714 } else {
1715 __ Movt(result, zero_reg);
1716 }
1717 } else {
1718 if (instr->arch_opcode() == kMipsCmpD) {
1719 __ cmp(cc, L, kDoubleCompareReg, left, right);
1720 } else {
1721 DCHECK(instr->arch_opcode() == kMipsCmpS);
1722 __ cmp(cc, W, kDoubleCompareReg, left, right);
1723 }
1724 __ mfc1(result, kDoubleCompareReg);
1725 __ andi(result, result, 1); // Cmp returns all 1's/0's, use only LSB.
1726 if (!predicate) // Toggle result for not equal.
1727 __ xori(result, result, 1);
1728 }
1729 return;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001730 } else {
1731 PrintF("AssembleArchBranch Unimplemented arch_opcode is : %d\n",
1732 instr->arch_opcode());
1733 TRACE_UNIMPL();
1734 UNIMPLEMENTED();
1735 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001736}
1737
1738
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001739void CodeGenerator::AssembleArchLookupSwitch(Instruction* instr) {
1740 MipsOperandConverter i(this, instr);
1741 Register input = i.InputRegister(0);
1742 for (size_t index = 2; index < instr->InputCount(); index += 2) {
1743 __ li(at, Operand(i.InputInt32(index + 0)));
1744 __ beq(input, at, GetLabel(i.InputRpo(index + 1)));
1745 }
1746 __ nop(); // Branch delay slot of the last beq.
1747 AssembleArchJump(i.InputRpo(1));
1748}
1749
1750
1751void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
1752 MipsOperandConverter i(this, instr);
1753 Register input = i.InputRegister(0);
1754 size_t const case_count = instr->InputCount() - 2;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001755 __ Branch(GetLabel(i.InputRpo(1)), hs, input, Operand(case_count));
Ben Murdoch097c5b22016-05-18 11:27:45 +01001756 __ GenerateSwitchTable(input, case_count, [&i, this](size_t index) {
1757 return GetLabel(i.InputRpo(index + 2));
1758 });
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001759}
1760
Ben Murdochc5610432016-08-08 18:44:38 +01001761CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001762 int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001763 Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001764 isolate(), deoptimization_id, bailout_type);
Ben Murdochc5610432016-08-08 18:44:38 +01001765 if (deopt_entry == nullptr) return kTooManyDeoptimizationBailouts;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001766 __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
Ben Murdochc5610432016-08-08 18:44:38 +01001767 return kSuccess;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001768}
1769
Ben Murdochc5610432016-08-08 18:44:38 +01001770void CodeGenerator::FinishFrame(Frame* frame) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001771 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
Ben Murdochc5610432016-08-08 18:44:38 +01001772
1773 const RegList saves_fpu = descriptor->CalleeSavedFPRegisters();
1774 if (saves_fpu != 0) {
1775 frame->AlignSavedCalleeRegisterSlots();
1776 }
1777
1778 if (saves_fpu != 0) {
1779 int count = base::bits::CountPopulation32(saves_fpu);
1780 DCHECK(kNumCalleeSavedFPU == count);
1781 frame->AllocateSavedCalleeRegisterSlots(count *
1782 (kDoubleSize / kPointerSize));
1783 }
1784
1785 const RegList saves = descriptor->CalleeSavedRegisters();
1786 if (saves != 0) {
1787 int count = base::bits::CountPopulation32(saves);
1788 DCHECK(kNumCalleeSaved == count + 1);
1789 frame->AllocateSavedCalleeRegisterSlots(count);
1790 }
1791}
1792
1793void CodeGenerator::AssembleConstructFrame() {
1794 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
Ben Murdochda12d292016-06-02 14:46:10 +01001795 if (frame_access_state()->has_frame()) {
1796 if (descriptor->IsCFunctionCall()) {
1797 __ Push(ra, fp);
1798 __ mov(fp, sp);
1799 } else if (descriptor->IsJSFunctionCall()) {
1800 __ Prologue(this->info()->GeneratePreagedPrologue());
1801 } else {
1802 __ StubPrologue(info()->GetOutputStackFrameType());
1803 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001804 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001805
Ben Murdochc5610432016-08-08 18:44:38 +01001806 int shrink_slots = frame()->GetSpillSlotCount();
1807
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001808 if (info()->is_osr()) {
1809 // TurboFan OSR-compiled functions cannot be entered directly.
1810 __ Abort(kShouldNotDirectlyEnterOsrFunction);
1811
1812 // Unoptimized code jumps directly to this entrypoint while the unoptimized
1813 // frame is still on the stack. Optimized code uses OSR values directly from
1814 // the unoptimized frame. Thus, all that needs to be done is to allocate the
1815 // remaining stack slots.
1816 if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --");
1817 osr_pc_offset_ = __ pc_offset();
Ben Murdochc5610432016-08-08 18:44:38 +01001818 shrink_slots -= OsrHelper(info()).UnoptimizedFrameSlots();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001819 }
1820
1821 const RegList saves_fpu = descriptor->CalleeSavedFPRegisters();
Ben Murdochc5610432016-08-08 18:44:38 +01001822 if (shrink_slots > 0) {
1823 __ Subu(sp, sp, Operand(shrink_slots * kPointerSize));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001824 }
1825
1826 // Save callee-saved FPU registers.
1827 if (saves_fpu != 0) {
1828 __ MultiPushFPU(saves_fpu);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001829 }
1830
1831 const RegList saves = descriptor->CalleeSavedRegisters();
1832 if (saves != 0) {
1833 // Save callee-saved registers.
1834 __ MultiPush(saves);
Ben Murdochc5610432016-08-08 18:44:38 +01001835 DCHECK(kNumCalleeSaved == base::bits::CountPopulation32(saves) + 1);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001836 }
1837}
1838
1839
1840void CodeGenerator::AssembleReturn() {
1841 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001842 int pop_count = static_cast<int>(descriptor->StackParameterCount());
1843
1844 // Restore GP registers.
1845 const RegList saves = descriptor->CalleeSavedRegisters();
1846 if (saves != 0) {
1847 __ MultiPop(saves);
1848 }
1849
1850 // Restore FPU registers.
1851 const RegList saves_fpu = descriptor->CalleeSavedFPRegisters();
1852 if (saves_fpu != 0) {
1853 __ MultiPopFPU(saves_fpu);
1854 }
1855
1856 if (descriptor->IsCFunctionCall()) {
Ben Murdochda12d292016-06-02 14:46:10 +01001857 AssembleDeconstructFrame();
1858 } else if (frame_access_state()->has_frame()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001859 // Canonicalize JSFunction return sites for now.
1860 if (return_label_.is_bound()) {
1861 __ Branch(&return_label_);
1862 return;
1863 } else {
1864 __ bind(&return_label_);
Ben Murdochda12d292016-06-02 14:46:10 +01001865 AssembleDeconstructFrame();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001866 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001867 }
1868 if (pop_count != 0) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001869 __ DropAndRet(pop_count);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001870 } else {
1871 __ Ret();
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001872 }
1873}
1874
1875
1876void CodeGenerator::AssembleMove(InstructionOperand* source,
1877 InstructionOperand* destination) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001878 MipsOperandConverter g(this, nullptr);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001879 // Dispatch on the source and destination operand kinds. Not all
1880 // combinations are possible.
1881 if (source->IsRegister()) {
1882 DCHECK(destination->IsRegister() || destination->IsStackSlot());
1883 Register src = g.ToRegister(source);
1884 if (destination->IsRegister()) {
1885 __ mov(g.ToRegister(destination), src);
1886 } else {
1887 __ sw(src, g.ToMemOperand(destination));
1888 }
1889 } else if (source->IsStackSlot()) {
1890 DCHECK(destination->IsRegister() || destination->IsStackSlot());
1891 MemOperand src = g.ToMemOperand(source);
1892 if (destination->IsRegister()) {
1893 __ lw(g.ToRegister(destination), src);
1894 } else {
1895 Register temp = kScratchReg;
1896 __ lw(temp, src);
1897 __ sw(temp, g.ToMemOperand(destination));
1898 }
1899 } else if (source->IsConstant()) {
1900 Constant src = g.ToConstant(source);
1901 if (destination->IsRegister() || destination->IsStackSlot()) {
1902 Register dst =
1903 destination->IsRegister() ? g.ToRegister(destination) : kScratchReg;
1904 switch (src.type()) {
1905 case Constant::kInt32:
Ben Murdochc5610432016-08-08 18:44:38 +01001906 if (src.rmode() == RelocInfo::WASM_MEMORY_REFERENCE ||
Ben Murdoch61f157c2016-09-16 13:49:30 +01001907 src.rmode() == RelocInfo::WASM_GLOBAL_REFERENCE ||
Ben Murdochc5610432016-08-08 18:44:38 +01001908 src.rmode() == RelocInfo::WASM_MEMORY_SIZE_REFERENCE) {
1909 __ li(dst, Operand(src.ToInt32(), src.rmode()));
1910 } else {
1911 __ li(dst, Operand(src.ToInt32()));
1912 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001913 break;
1914 case Constant::kFloat32:
1915 __ li(dst, isolate()->factory()->NewNumber(src.ToFloat32(), TENURED));
1916 break;
1917 case Constant::kInt64:
1918 UNREACHABLE();
1919 break;
1920 case Constant::kFloat64:
1921 __ li(dst, isolate()->factory()->NewNumber(src.ToFloat64(), TENURED));
1922 break;
1923 case Constant::kExternalReference:
1924 __ li(dst, Operand(src.ToExternalReference()));
1925 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001926 case Constant::kHeapObject: {
1927 Handle<HeapObject> src_object = src.ToHeapObject();
1928 Heap::RootListIndex index;
Ben Murdochda12d292016-06-02 14:46:10 +01001929 int slot;
1930 if (IsMaterializableFromFrame(src_object, &slot)) {
1931 __ lw(dst, g.SlotToMemOperand(slot));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001932 } else if (IsMaterializableFromRoot(src_object, &index)) {
1933 __ LoadRoot(dst, index);
1934 } else {
1935 __ li(dst, src_object);
1936 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001937 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001938 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001939 case Constant::kRpoNumber:
1940 UNREACHABLE(); // TODO(titzer): loading RPO numbers on mips.
1941 break;
1942 }
1943 if (destination->IsStackSlot()) __ sw(dst, g.ToMemOperand(destination));
1944 } else if (src.type() == Constant::kFloat32) {
Ben Murdochc5610432016-08-08 18:44:38 +01001945 if (destination->IsFPStackSlot()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001946 MemOperand dst = g.ToMemOperand(destination);
1947 __ li(at, Operand(bit_cast<int32_t>(src.ToFloat32())));
1948 __ sw(at, dst);
1949 } else {
1950 FloatRegister dst = g.ToSingleRegister(destination);
1951 __ Move(dst, src.ToFloat32());
1952 }
1953 } else {
1954 DCHECK_EQ(Constant::kFloat64, src.type());
Ben Murdochc5610432016-08-08 18:44:38 +01001955 DoubleRegister dst = destination->IsFPRegister()
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001956 ? g.ToDoubleRegister(destination)
1957 : kScratchDoubleReg;
1958 __ Move(dst, src.ToFloat64());
Ben Murdochc5610432016-08-08 18:44:38 +01001959 if (destination->IsFPStackSlot()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001960 __ sdc1(dst, g.ToMemOperand(destination));
1961 }
1962 }
Ben Murdochc5610432016-08-08 18:44:38 +01001963 } else if (source->IsFPRegister()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001964 FPURegister src = g.ToDoubleRegister(source);
Ben Murdochc5610432016-08-08 18:44:38 +01001965 if (destination->IsFPRegister()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001966 FPURegister dst = g.ToDoubleRegister(destination);
1967 __ Move(dst, src);
1968 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01001969 DCHECK(destination->IsFPStackSlot());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001970 __ sdc1(src, g.ToMemOperand(destination));
1971 }
Ben Murdochc5610432016-08-08 18:44:38 +01001972 } else if (source->IsFPStackSlot()) {
1973 DCHECK(destination->IsFPRegister() || destination->IsFPStackSlot());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001974 MemOperand src = g.ToMemOperand(source);
Ben Murdochc5610432016-08-08 18:44:38 +01001975 if (destination->IsFPRegister()) {
Ben Murdoch61f157c2016-09-16 13:49:30 +01001976 LocationOperand* op = LocationOperand::cast(source);
1977 if (op->representation() == MachineRepresentation::kFloat64) {
1978 __ ldc1(g.ToDoubleRegister(destination), src);
1979 } else {
1980 DCHECK_EQ(MachineRepresentation::kFloat32, op->representation());
1981 __ lwc1(g.ToDoubleRegister(destination), src);
1982 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001983 } else {
1984 FPURegister temp = kScratchDoubleReg;
1985 __ ldc1(temp, src);
1986 __ sdc1(temp, g.ToMemOperand(destination));
1987 }
1988 } else {
1989 UNREACHABLE();
1990 }
1991}
1992
1993
1994void CodeGenerator::AssembleSwap(InstructionOperand* source,
1995 InstructionOperand* destination) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001996 MipsOperandConverter g(this, nullptr);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001997 // Dispatch on the source and destination operand kinds. Not all
1998 // combinations are possible.
1999 if (source->IsRegister()) {
2000 // Register-register.
2001 Register temp = kScratchReg;
2002 Register src = g.ToRegister(source);
2003 if (destination->IsRegister()) {
2004 Register dst = g.ToRegister(destination);
2005 __ Move(temp, src);
2006 __ Move(src, dst);
2007 __ Move(dst, temp);
2008 } else {
2009 DCHECK(destination->IsStackSlot());
2010 MemOperand dst = g.ToMemOperand(destination);
2011 __ mov(temp, src);
2012 __ lw(src, dst);
2013 __ sw(temp, dst);
2014 }
2015 } else if (source->IsStackSlot()) {
2016 DCHECK(destination->IsStackSlot());
2017 Register temp_0 = kScratchReg;
2018 Register temp_1 = kCompareReg;
2019 MemOperand src = g.ToMemOperand(source);
2020 MemOperand dst = g.ToMemOperand(destination);
2021 __ lw(temp_0, src);
2022 __ lw(temp_1, dst);
2023 __ sw(temp_0, dst);
2024 __ sw(temp_1, src);
Ben Murdochc5610432016-08-08 18:44:38 +01002025 } else if (source->IsFPRegister()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002026 FPURegister temp = kScratchDoubleReg;
2027 FPURegister src = g.ToDoubleRegister(source);
Ben Murdochc5610432016-08-08 18:44:38 +01002028 if (destination->IsFPRegister()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002029 FPURegister dst = g.ToDoubleRegister(destination);
2030 __ Move(temp, src);
2031 __ Move(src, dst);
2032 __ Move(dst, temp);
2033 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01002034 DCHECK(destination->IsFPStackSlot());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002035 MemOperand dst = g.ToMemOperand(destination);
2036 __ Move(temp, src);
2037 __ ldc1(src, dst);
2038 __ sdc1(temp, dst);
2039 }
Ben Murdochc5610432016-08-08 18:44:38 +01002040 } else if (source->IsFPStackSlot()) {
2041 DCHECK(destination->IsFPStackSlot());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002042 Register temp_0 = kScratchReg;
2043 FPURegister temp_1 = kScratchDoubleReg;
2044 MemOperand src0 = g.ToMemOperand(source);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002045 MemOperand src1(src0.rm(), src0.offset() + kIntSize);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002046 MemOperand dst0 = g.ToMemOperand(destination);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002047 MemOperand dst1(dst0.rm(), dst0.offset() + kIntSize);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002048 __ ldc1(temp_1, dst0); // Save destination in temp_1.
2049 __ lw(temp_0, src0); // Then use temp_0 to copy source to destination.
2050 __ sw(temp_0, dst0);
2051 __ lw(temp_0, src1);
2052 __ sw(temp_0, dst1);
2053 __ sdc1(temp_1, src0);
2054 } else {
2055 // No other combinations are possible.
2056 UNREACHABLE();
2057 }
2058}
2059
2060
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002061void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) {
2062 // On 32-bit MIPS we emit the jump tables inline.
2063 UNREACHABLE();
2064}
2065
2066
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002067void CodeGenerator::EnsureSpaceForLazyDeopt() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002068 if (!info()->ShouldEnsureSpaceForLazyDeopt()) {
2069 return;
2070 }
2071
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002072 int space_needed = Deoptimizer::patch_size();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00002073 // Ensure that we have enough space after the previous lazy-bailout
2074 // instruction for patching the code here.
2075 int current_pc = masm()->pc_offset();
2076 if (current_pc < last_lazy_deopt_pc_ + space_needed) {
2077 // Block tramoline pool emission for duration of padding.
2078 v8::internal::Assembler::BlockTrampolinePoolScope block_trampoline_pool(
2079 masm());
2080 int padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
2081 DCHECK_EQ(0, padding_size % v8::internal::Assembler::kInstrSize);
2082 while (padding_size > 0) {
2083 __ nop();
2084 padding_size -= v8::internal::Assembler::kInstrSize;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002085 }
2086 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04002087}
2088
2089#undef __
2090
2091} // namespace compiler
2092} // namespace internal
2093} // namespace v8