blob: 0f9fb7ce7d0bfd22f1280203841e568e7f9d3af6 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/code-generator.h"
6
Ben Murdoch014dc512016-03-22 12:00:34 +00007#include "src/arm64/frames-arm64.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00008#include "src/arm64/macro-assembler-arm64.h"
Ben Murdoch014dc512016-03-22 12:00:34 +00009#include "src/ast/scopes.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000010#include "src/compiler/code-generator-impl.h"
11#include "src/compiler/gap-resolver.h"
12#include "src/compiler/node-matchers.h"
Ben Murdoch014dc512016-03-22 12:00:34 +000013#include "src/compiler/osr.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000014
15namespace v8 {
16namespace internal {
17namespace compiler {
18
19#define __ masm()->
20
21
22// Adds Arm64-specific methods to convert InstructionOperands.
Ben Murdoch014dc512016-03-22 12:00:34 +000023class Arm64OperandConverter final : public InstructionOperandConverter {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000024 public:
25 Arm64OperandConverter(CodeGenerator* gen, Instruction* instr)
26 : InstructionOperandConverter(gen, instr) {}
27
Ben Murdoch014dc512016-03-22 12:00:34 +000028 DoubleRegister InputFloat32Register(size_t index) {
Emily Bernier958fae72015-03-24 16:35:39 -040029 return InputDoubleRegister(index).S();
30 }
31
Ben Murdoch014dc512016-03-22 12:00:34 +000032 DoubleRegister InputFloat64Register(size_t index) {
Emily Bernier958fae72015-03-24 16:35:39 -040033 return InputDoubleRegister(index);
34 }
35
Ben Murdochbcf72ee2016-08-08 18:44:38 +010036 CPURegister InputFloat32OrZeroRegister(size_t index) {
37 if (instr_->InputAt(index)->IsImmediate()) {
38 DCHECK(bit_cast<int32_t>(InputFloat32(index)) == 0);
39 return wzr;
40 }
41 DCHECK(instr_->InputAt(index)->IsFPRegister());
42 return InputDoubleRegister(index).S();
43 }
44
45 CPURegister InputFloat64OrZeroRegister(size_t index) {
46 if (instr_->InputAt(index)->IsImmediate()) {
47 DCHECK(bit_cast<int64_t>(InputDouble(index)) == 0);
48 return xzr;
49 }
50 DCHECK(instr_->InputAt(index)->IsDoubleRegister());
51 return InputDoubleRegister(index);
52 }
53
Ben Murdoch014dc512016-03-22 12:00:34 +000054 size_t OutputCount() { return instr_->OutputCount(); }
55
Emily Bernier958fae72015-03-24 16:35:39 -040056 DoubleRegister OutputFloat32Register() { return OutputDoubleRegister().S(); }
57
58 DoubleRegister OutputFloat64Register() { return OutputDoubleRegister(); }
59
Ben Murdoch014dc512016-03-22 12:00:34 +000060 Register InputRegister32(size_t index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000061 return ToRegister(instr_->InputAt(index)).W();
62 }
63
Ben Murdoch014dc512016-03-22 12:00:34 +000064 Register InputOrZeroRegister32(size_t index) {
65 DCHECK(instr_->InputAt(index)->IsRegister() ||
66 (instr_->InputAt(index)->IsImmediate() && (InputInt32(index) == 0)));
67 if (instr_->InputAt(index)->IsImmediate()) {
68 return wzr;
69 }
70 return InputRegister32(index);
71 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000072
Ben Murdoch014dc512016-03-22 12:00:34 +000073 Register InputRegister64(size_t index) { return InputRegister(index); }
74
75 Register InputOrZeroRegister64(size_t index) {
76 DCHECK(instr_->InputAt(index)->IsRegister() ||
77 (instr_->InputAt(index)->IsImmediate() && (InputInt64(index) == 0)));
78 if (instr_->InputAt(index)->IsImmediate()) {
79 return xzr;
80 }
81 return InputRegister64(index);
82 }
83
84 Operand InputImmediate(size_t index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000085 return ToImmediate(instr_->InputAt(index));
86 }
87
Ben Murdoch014dc512016-03-22 12:00:34 +000088 Operand InputOperand(size_t index) {
89 return ToOperand(instr_->InputAt(index));
90 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000091
Ben Murdoch014dc512016-03-22 12:00:34 +000092 Operand InputOperand64(size_t index) { return InputOperand(index); }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000093
Ben Murdoch014dc512016-03-22 12:00:34 +000094 Operand InputOperand32(size_t index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000095 return ToOperand32(instr_->InputAt(index));
96 }
97
98 Register OutputRegister64() { return OutputRegister(); }
99
100 Register OutputRegister32() { return ToRegister(instr_->Output()).W(); }
101
Ben Murdoch014dc512016-03-22 12:00:34 +0000102 Operand InputOperand2_32(size_t index) {
Emily Bernier958fae72015-03-24 16:35:39 -0400103 switch (AddressingModeField::decode(instr_->opcode())) {
104 case kMode_None:
105 return InputOperand32(index);
106 case kMode_Operand2_R_LSL_I:
107 return Operand(InputRegister32(index), LSL, InputInt5(index + 1));
108 case kMode_Operand2_R_LSR_I:
109 return Operand(InputRegister32(index), LSR, InputInt5(index + 1));
110 case kMode_Operand2_R_ASR_I:
111 return Operand(InputRegister32(index), ASR, InputInt5(index + 1));
112 case kMode_Operand2_R_ROR_I:
113 return Operand(InputRegister32(index), ROR, InputInt5(index + 1));
Ben Murdoch014dc512016-03-22 12:00:34 +0000114 case kMode_Operand2_R_UXTB:
115 return Operand(InputRegister32(index), UXTB);
116 case kMode_Operand2_R_UXTH:
117 return Operand(InputRegister32(index), UXTH);
118 case kMode_Operand2_R_SXTB:
119 return Operand(InputRegister32(index), SXTB);
120 case kMode_Operand2_R_SXTH:
121 return Operand(InputRegister32(index), SXTH);
Emily Bernier958fae72015-03-24 16:35:39 -0400122 case kMode_MRI:
123 case kMode_MRR:
124 break;
125 }
126 UNREACHABLE();
127 return Operand(-1);
128 }
129
Ben Murdoch014dc512016-03-22 12:00:34 +0000130 Operand InputOperand2_64(size_t index) {
Emily Bernier958fae72015-03-24 16:35:39 -0400131 switch (AddressingModeField::decode(instr_->opcode())) {
132 case kMode_None:
133 return InputOperand64(index);
134 case kMode_Operand2_R_LSL_I:
135 return Operand(InputRegister64(index), LSL, InputInt6(index + 1));
136 case kMode_Operand2_R_LSR_I:
137 return Operand(InputRegister64(index), LSR, InputInt6(index + 1));
138 case kMode_Operand2_R_ASR_I:
139 return Operand(InputRegister64(index), ASR, InputInt6(index + 1));
140 case kMode_Operand2_R_ROR_I:
141 return Operand(InputRegister64(index), ROR, InputInt6(index + 1));
Ben Murdoch014dc512016-03-22 12:00:34 +0000142 case kMode_Operand2_R_UXTB:
143 return Operand(InputRegister64(index), UXTB);
144 case kMode_Operand2_R_UXTH:
145 return Operand(InputRegister64(index), UXTH);
146 case kMode_Operand2_R_SXTB:
147 return Operand(InputRegister64(index), SXTB);
148 case kMode_Operand2_R_SXTH:
149 return Operand(InputRegister64(index), SXTH);
Emily Bernier958fae72015-03-24 16:35:39 -0400150 case kMode_MRI:
151 case kMode_MRR:
152 break;
153 }
154 UNREACHABLE();
155 return Operand(-1);
156 }
157
Ben Murdoch014dc512016-03-22 12:00:34 +0000158 MemOperand MemoryOperand(size_t* first_index) {
159 const size_t index = *first_index;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000160 switch (AddressingModeField::decode(instr_->opcode())) {
161 case kMode_None:
Emily Bernier958fae72015-03-24 16:35:39 -0400162 case kMode_Operand2_R_LSR_I:
163 case kMode_Operand2_R_ASR_I:
164 case kMode_Operand2_R_ROR_I:
Ben Murdoch014dc512016-03-22 12:00:34 +0000165 case kMode_Operand2_R_UXTB:
166 case kMode_Operand2_R_UXTH:
167 case kMode_Operand2_R_SXTB:
168 case kMode_Operand2_R_SXTH:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000169 break;
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100170 case kMode_Operand2_R_LSL_I:
171 *first_index += 3;
172 return MemOperand(InputRegister(index + 0), InputRegister(index + 1),
173 LSL, InputInt32(index + 2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000174 case kMode_MRI:
175 *first_index += 2;
176 return MemOperand(InputRegister(index + 0), InputInt32(index + 1));
177 case kMode_MRR:
178 *first_index += 2;
Emily Bernier958fae72015-03-24 16:35:39 -0400179 return MemOperand(InputRegister(index + 0), InputRegister(index + 1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000180 }
181 UNREACHABLE();
182 return MemOperand(no_reg);
183 }
184
Ben Murdoch014dc512016-03-22 12:00:34 +0000185 MemOperand MemoryOperand(size_t first_index = 0) {
Emily Bernier958fae72015-03-24 16:35:39 -0400186 return MemoryOperand(&first_index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000187 }
188
189 Operand ToOperand(InstructionOperand* op) {
190 if (op->IsRegister()) {
191 return Operand(ToRegister(op));
192 }
193 return ToImmediate(op);
194 }
195
196 Operand ToOperand32(InstructionOperand* op) {
197 if (op->IsRegister()) {
198 return Operand(ToRegister(op).W());
199 }
200 return ToImmediate(op);
201 }
202
203 Operand ToImmediate(InstructionOperand* operand) {
204 Constant constant = ToConstant(operand);
205 switch (constant.type()) {
206 case Constant::kInt32:
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100207 if (constant.rmode() == RelocInfo::WASM_MEMORY_SIZE_REFERENCE) {
208 return Operand(constant.ToInt32(), constant.rmode());
209 } else {
210 return Operand(constant.ToInt32());
211 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000212 case Constant::kInt64:
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100213 if (constant.rmode() == RelocInfo::WASM_MEMORY_REFERENCE) {
214 return Operand(constant.ToInt64(), constant.rmode());
215 } else {
216 DCHECK(constant.rmode() != RelocInfo::WASM_MEMORY_SIZE_REFERENCE);
217 return Operand(constant.ToInt64());
218 }
Emily Bernier958fae72015-03-24 16:35:39 -0400219 case Constant::kFloat32:
220 return Operand(
221 isolate()->factory()->NewNumber(constant.ToFloat32(), TENURED));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000222 case Constant::kFloat64:
223 return Operand(
224 isolate()->factory()->NewNumber(constant.ToFloat64(), TENURED));
225 case Constant::kExternalReference:
226 return Operand(constant.ToExternalReference());
227 case Constant::kHeapObject:
228 return Operand(constant.ToHeapObject());
Emily Bernier958fae72015-03-24 16:35:39 -0400229 case Constant::kRpoNumber:
230 UNREACHABLE(); // TODO(dcarney): RPO immediates on arm64.
231 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000232 }
233 UNREACHABLE();
234 return Operand(-1);
235 }
236
237 MemOperand ToMemOperand(InstructionOperand* op, MacroAssembler* masm) const {
Ben Murdoch014dc512016-03-22 12:00:34 +0000238 DCHECK_NOT_NULL(op);
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100239 DCHECK(op->IsStackSlot() || op->IsFPStackSlot());
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100240 return SlotToMemOperand(AllocatedOperand::cast(op)->index(), masm);
241 }
242
243 MemOperand SlotToMemOperand(int slot, MacroAssembler* masm) const {
244 FrameOffset offset = frame_access_state()->GetFrameOffset(slot);
Ben Murdoch014dc512016-03-22 12:00:34 +0000245 if (offset.from_frame_pointer()) {
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100246 int from_sp = offset.offset() + frame_access_state()->GetSPToFPOffset();
Ben Murdoch014dc512016-03-22 12:00:34 +0000247 // Convert FP-offsets to SP-offsets if it results in better code.
248 if (Assembler::IsImmLSUnscaled(from_sp) ||
249 Assembler::IsImmLSScaled(from_sp, LSDoubleWord)) {
250 offset = FrameOffset::FromStackPointer(from_sp);
251 }
252 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000253 return MemOperand(offset.from_stack_pointer() ? masm->StackPointer() : fp,
254 offset.offset());
255 }
256};
257
258
Emily Bernier958fae72015-03-24 16:35:39 -0400259namespace {
260
Ben Murdoch014dc512016-03-22 12:00:34 +0000261class OutOfLineLoadNaN32 final : public OutOfLineCode {
Emily Bernier958fae72015-03-24 16:35:39 -0400262 public:
263 OutOfLineLoadNaN32(CodeGenerator* gen, DoubleRegister result)
264 : OutOfLineCode(gen), result_(result) {}
265
Ben Murdoch014dc512016-03-22 12:00:34 +0000266 void Generate() final {
Emily Bernier958fae72015-03-24 16:35:39 -0400267 __ Fmov(result_, std::numeric_limits<float>::quiet_NaN());
268 }
269
270 private:
271 DoubleRegister const result_;
272};
273
274
Ben Murdoch014dc512016-03-22 12:00:34 +0000275class OutOfLineLoadNaN64 final : public OutOfLineCode {
Emily Bernier958fae72015-03-24 16:35:39 -0400276 public:
277 OutOfLineLoadNaN64(CodeGenerator* gen, DoubleRegister result)
278 : OutOfLineCode(gen), result_(result) {}
279
Ben Murdoch014dc512016-03-22 12:00:34 +0000280 void Generate() final {
Emily Bernier958fae72015-03-24 16:35:39 -0400281 __ Fmov(result_, std::numeric_limits<double>::quiet_NaN());
282 }
283
284 private:
285 DoubleRegister const result_;
286};
287
288
Ben Murdoch014dc512016-03-22 12:00:34 +0000289class OutOfLineLoadZero final : public OutOfLineCode {
Emily Bernier958fae72015-03-24 16:35:39 -0400290 public:
291 OutOfLineLoadZero(CodeGenerator* gen, Register result)
292 : OutOfLineCode(gen), result_(result) {}
293
Ben Murdoch014dc512016-03-22 12:00:34 +0000294 void Generate() final { __ Mov(result_, 0); }
Emily Bernier958fae72015-03-24 16:35:39 -0400295
296 private:
297 Register const result_;
298};
299
Ben Murdoch014dc512016-03-22 12:00:34 +0000300
301class OutOfLineRecordWrite final : public OutOfLineCode {
302 public:
Ben Murdoch109988c2016-05-18 11:27:45 +0100303 OutOfLineRecordWrite(CodeGenerator* gen, Register object, Operand index,
Ben Murdoch014dc512016-03-22 12:00:34 +0000304 Register value, Register scratch0, Register scratch1,
305 RecordWriteMode mode)
306 : OutOfLineCode(gen),
307 object_(object),
308 index_(index),
309 value_(value),
310 scratch0_(scratch0),
311 scratch1_(scratch1),
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100312 mode_(mode),
313 must_save_lr_(!gen->frame_access_state()->has_frame()) {}
Ben Murdoch014dc512016-03-22 12:00:34 +0000314
315 void Generate() final {
316 if (mode_ > RecordWriteMode::kValueIsPointer) {
317 __ JumpIfSmi(value_, exit());
318 }
Ben Murdoch109988c2016-05-18 11:27:45 +0100319 __ CheckPageFlagClear(value_, scratch0_,
320 MemoryChunk::kPointersToHereAreInterestingMask,
321 exit());
322 RememberedSetAction const remembered_set_action =
323 mode_ > RecordWriteMode::kValueIsMap ? EMIT_REMEMBERED_SET
324 : OMIT_REMEMBERED_SET;
Ben Murdoch014dc512016-03-22 12:00:34 +0000325 SaveFPRegsMode const save_fp_mode =
326 frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs;
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100327 if (must_save_lr_) {
Ben Murdoch109988c2016-05-18 11:27:45 +0100328 // We need to save and restore lr if the frame was elided.
329 __ Push(lr);
330 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000331 RecordWriteStub stub(isolate(), object_, scratch0_, scratch1_,
Ben Murdoch109988c2016-05-18 11:27:45 +0100332 remembered_set_action, save_fp_mode);
Ben Murdoch014dc512016-03-22 12:00:34 +0000333 __ Add(scratch1_, object_, index_);
334 __ CallStub(&stub);
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100335 if (must_save_lr_) {
Ben Murdoch109988c2016-05-18 11:27:45 +0100336 __ Pop(lr);
337 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000338 }
339
340 private:
341 Register const object_;
Ben Murdoch109988c2016-05-18 11:27:45 +0100342 Operand const index_;
Ben Murdoch014dc512016-03-22 12:00:34 +0000343 Register const value_;
344 Register const scratch0_;
345 Register const scratch1_;
346 RecordWriteMode const mode_;
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100347 bool must_save_lr_;
Ben Murdoch014dc512016-03-22 12:00:34 +0000348};
349
350
351Condition FlagsConditionToCondition(FlagsCondition condition) {
352 switch (condition) {
353 case kEqual:
354 return eq;
355 case kNotEqual:
356 return ne;
357 case kSignedLessThan:
358 return lt;
359 case kSignedGreaterThanOrEqual:
360 return ge;
361 case kSignedLessThanOrEqual:
362 return le;
363 case kSignedGreaterThan:
364 return gt;
365 case kUnsignedLessThan:
366 return lo;
367 case kUnsignedGreaterThanOrEqual:
368 return hs;
369 case kUnsignedLessThanOrEqual:
370 return ls;
371 case kUnsignedGreaterThan:
372 return hi;
373 case kFloatLessThanOrUnordered:
374 return lt;
375 case kFloatGreaterThanOrEqual:
376 return ge;
377 case kFloatLessThanOrEqual:
378 return ls;
379 case kFloatGreaterThanOrUnordered:
380 return hi;
381 case kFloatLessThan:
382 return lo;
383 case kFloatGreaterThanOrEqualOrUnordered:
384 return hs;
385 case kFloatLessThanOrEqualOrUnordered:
386 return le;
387 case kFloatGreaterThan:
388 return gt;
389 case kOverflow:
390 return vs;
391 case kNotOverflow:
392 return vc;
393 case kUnorderedEqual:
394 case kUnorderedNotEqual:
395 break;
396 }
397 UNREACHABLE();
398 return nv;
399}
400
Emily Bernier958fae72015-03-24 16:35:39 -0400401} // namespace
402
403
404#define ASSEMBLE_CHECKED_LOAD_FLOAT(width) \
405 do { \
406 auto result = i.OutputFloat##width##Register(); \
407 auto buffer = i.InputRegister(0); \
408 auto offset = i.InputRegister32(1); \
409 auto length = i.InputOperand32(2); \
410 __ Cmp(offset, length); \
411 auto ool = new (zone()) OutOfLineLoadNaN##width(this, result); \
412 __ B(hs, ool->entry()); \
413 __ Ldr(result, MemOperand(buffer, offset, UXTW)); \
414 __ Bind(ool->exit()); \
415 } while (0)
416
417
418#define ASSEMBLE_CHECKED_LOAD_INTEGER(asm_instr) \
419 do { \
420 auto result = i.OutputRegister32(); \
421 auto buffer = i.InputRegister(0); \
422 auto offset = i.InputRegister32(1); \
423 auto length = i.InputOperand32(2); \
424 __ Cmp(offset, length); \
425 auto ool = new (zone()) OutOfLineLoadZero(this, result); \
426 __ B(hs, ool->entry()); \
427 __ asm_instr(result, MemOperand(buffer, offset, UXTW)); \
428 __ Bind(ool->exit()); \
429 } while (0)
430
431
Ben Murdoch014dc512016-03-22 12:00:34 +0000432#define ASSEMBLE_CHECKED_LOAD_INTEGER_64(asm_instr) \
433 do { \
434 auto result = i.OutputRegister(); \
435 auto buffer = i.InputRegister(0); \
436 auto offset = i.InputRegister32(1); \
437 auto length = i.InputOperand32(2); \
438 __ Cmp(offset, length); \
439 auto ool = new (zone()) OutOfLineLoadZero(this, result); \
440 __ B(hs, ool->entry()); \
441 __ asm_instr(result, MemOperand(buffer, offset, UXTW)); \
442 __ Bind(ool->exit()); \
443 } while (0)
444
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100445#define ASSEMBLE_CHECKED_STORE_FLOAT(width) \
446 do { \
447 auto buffer = i.InputRegister(0); \
448 auto offset = i.InputRegister32(1); \
449 auto length = i.InputOperand32(2); \
450 auto value = i.InputFloat##width##OrZeroRegister(3); \
451 __ Cmp(offset, length); \
452 Label done; \
453 __ B(hs, &done); \
454 __ Str(value, MemOperand(buffer, offset, UXTW)); \
455 __ Bind(&done); \
Emily Bernier958fae72015-03-24 16:35:39 -0400456 } while (0)
457
Emily Bernier958fae72015-03-24 16:35:39 -0400458#define ASSEMBLE_CHECKED_STORE_INTEGER(asm_instr) \
459 do { \
460 auto buffer = i.InputRegister(0); \
461 auto offset = i.InputRegister32(1); \
462 auto length = i.InputOperand32(2); \
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100463 auto value = i.InputOrZeroRegister32(3); \
Emily Bernier958fae72015-03-24 16:35:39 -0400464 __ Cmp(offset, length); \
465 Label done; \
466 __ B(hs, &done); \
467 __ asm_instr(value, MemOperand(buffer, offset, UXTW)); \
468 __ Bind(&done); \
469 } while (0)
470
Ben Murdoch014dc512016-03-22 12:00:34 +0000471#define ASSEMBLE_CHECKED_STORE_INTEGER_64(asm_instr) \
472 do { \
473 auto buffer = i.InputRegister(0); \
474 auto offset = i.InputRegister32(1); \
475 auto length = i.InputOperand32(2); \
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100476 auto value = i.InputOrZeroRegister64(3); \
Ben Murdoch014dc512016-03-22 12:00:34 +0000477 __ Cmp(offset, length); \
478 Label done; \
479 __ B(hs, &done); \
480 __ asm_instr(value, MemOperand(buffer, offset, UXTW)); \
481 __ Bind(&done); \
Emily Bernier958fae72015-03-24 16:35:39 -0400482 } while (0)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000483
Ben Murdoch014dc512016-03-22 12:00:34 +0000484#define ASSEMBLE_SHIFT(asm_instr, width) \
485 do { \
486 if (instr->InputAt(1)->IsRegister()) { \
487 __ asm_instr(i.OutputRegister##width(), i.InputRegister##width(0), \
488 i.InputRegister##width(1)); \
489 } else { \
490 uint32_t imm = \
491 static_cast<uint32_t>(i.InputOperand##width(1).ImmediateValue()); \
492 __ asm_instr(i.OutputRegister##width(), i.InputRegister##width(0), \
493 imm % (width)); \
494 } \
495 } while (0)
496
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100497#define ASSEMBLE_ATOMIC_LOAD_INTEGER(asm_instr) \
498 do { \
499 __ asm_instr(i.OutputRegister(), \
500 MemOperand(i.InputRegister(0), i.InputRegister(1))); \
501 __ Dmb(InnerShareable, BarrierAll); \
502 } while (0)
503
504#define ASSEMBLE_ATOMIC_STORE_INTEGER(asm_instr) \
505 do { \
506 __ Dmb(InnerShareable, BarrierAll); \
507 __ asm_instr(i.InputRegister(2), \
508 MemOperand(i.InputRegister(0), i.InputRegister(1))); \
509 __ Dmb(InnerShareable, BarrierAll); \
510 } while (0)
511
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100512void CodeGenerator::AssembleDeconstructFrame() {
513 const CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
514 if (descriptor->IsCFunctionCall() || descriptor->UseNativeStack()) {
515 __ Mov(csp, fp);
516 } else {
517 __ Mov(jssp, fp);
518 }
519 __ Pop(fp, lr);
520}
Ben Murdoch014dc512016-03-22 12:00:34 +0000521
522void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) {
523 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
524 if (sp_slot_delta > 0) {
525 __ Drop(sp_slot_delta);
526 }
527 frame_access_state()->SetFrameAccessToDefault();
528}
529
530
531void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) {
532 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
533 if (sp_slot_delta < 0) {
534 __ Claim(-sp_slot_delta);
535 frame_access_state()->IncreaseSPDelta(-sp_slot_delta);
536 }
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100537 if (frame_access_state()->has_frame()) {
Ben Murdoch014dc512016-03-22 12:00:34 +0000538 __ Ldr(lr, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
539 __ Ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
540 }
541 frame_access_state()->SetFrameAccessToSP();
542}
543
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100544void CodeGenerator::AssemblePopArgumentsAdaptorFrame(Register args_reg,
545 Register scratch1,
546 Register scratch2,
547 Register scratch3) {
548 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
549 Label done;
550
551 // Check if current frame is an arguments adaptor frame.
552 __ Ldr(scratch1, MemOperand(fp, StandardFrameConstants::kContextOffset));
553 __ Cmp(scratch1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
554 __ B(ne, &done);
555
556 // Load arguments count from current arguments adaptor frame (note, it
557 // does not include receiver).
558 Register caller_args_count_reg = scratch1;
559 __ Ldr(caller_args_count_reg,
560 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
561 __ SmiUntag(caller_args_count_reg);
562
563 ParameterCount callee_args_count(args_reg);
564 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
565 scratch3);
566 __ bind(&done);
567}
Ben Murdoch014dc512016-03-22 12:00:34 +0000568
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000569// Assembles an instruction after register allocation, producing machine code.
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100570CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
571 Instruction* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000572 Arm64OperandConverter i(this, instr);
573 InstructionCode opcode = instr->opcode();
Ben Murdoch109988c2016-05-18 11:27:45 +0100574 ArchOpcode arch_opcode = ArchOpcodeField::decode(opcode);
575 switch (arch_opcode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000576 case kArchCallCodeObject: {
577 EnsureSpaceForLazyDeopt();
578 if (instr->InputAt(0)->IsImmediate()) {
579 __ Call(Handle<Code>::cast(i.InputHeapObject(0)),
580 RelocInfo::CODE_TARGET);
581 } else {
582 Register target = i.InputRegister(0);
583 __ Add(target, target, Code::kHeaderSize - kHeapObjectTag);
584 __ Call(target);
585 }
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100586 RecordCallPosition(instr);
Ben Murdoch109988c2016-05-18 11:27:45 +0100587 // TODO(titzer): this is ugly. JSSP should be a caller-save register
588 // in this case, but it is not possible to express in the register
589 // allocator.
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100590 CallDescriptor::Flags flags(MiscField::decode(opcode));
Ben Murdoch109988c2016-05-18 11:27:45 +0100591 if (flags & CallDescriptor::kRestoreJSSP) {
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100592 __ Ldr(jssp, MemOperand(csp));
593 __ Mov(csp, jssp);
594 }
595 if (flags & CallDescriptor::kRestoreCSP) {
596 __ Mov(csp, jssp);
597 __ AssertCspAligned();
Ben Murdoch109988c2016-05-18 11:27:45 +0100598 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000599 frame_access_state()->ClearSPDelta();
Ben Murdoch014dc512016-03-22 12:00:34 +0000600 break;
601 }
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100602 case kArchTailCallCodeObjectFromJSFunction:
Ben Murdoch014dc512016-03-22 12:00:34 +0000603 case kArchTailCallCodeObject: {
604 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
605 AssembleDeconstructActivationRecord(stack_param_delta);
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100606 if (arch_opcode == kArchTailCallCodeObjectFromJSFunction) {
607 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
608 i.TempRegister(0), i.TempRegister(1),
609 i.TempRegister(2));
610 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000611 if (instr->InputAt(0)->IsImmediate()) {
612 __ Jump(Handle<Code>::cast(i.InputHeapObject(0)),
613 RelocInfo::CODE_TARGET);
614 } else {
615 Register target = i.InputRegister(0);
616 __ Add(target, target, Code::kHeaderSize - kHeapObjectTag);
617 __ Jump(target);
618 }
619 frame_access_state()->ClearSPDelta();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000620 break;
621 }
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100622 case kArchTailCallAddress: {
623 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
624 AssembleDeconstructActivationRecord(stack_param_delta);
625 CHECK(!instr->InputAt(0)->IsImmediate());
626 __ Jump(i.InputRegister(0));
627 frame_access_state()->ClearSPDelta();
628 break;
629 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000630 case kArchCallJSFunction: {
631 EnsureSpaceForLazyDeopt();
632 Register func = i.InputRegister(0);
633 if (FLAG_debug_code) {
634 // Check the function's context matches the context argument.
635 UseScratchRegisterScope scope(masm());
636 Register temp = scope.AcquireX();
637 __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset));
638 __ cmp(cp, temp);
639 __ Assert(eq, kWrongFunctionContext);
640 }
641 __ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
642 __ Call(x10);
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100643 RecordCallPosition(instr);
Ben Murdoch109988c2016-05-18 11:27:45 +0100644 // TODO(titzer): this is ugly. JSSP should be a caller-save register
645 // in this case, but it is not possible to express in the register
646 // allocator.
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100647 CallDescriptor::Flags flags(MiscField::decode(opcode));
Ben Murdoch109988c2016-05-18 11:27:45 +0100648 if (flags & CallDescriptor::kRestoreJSSP) {
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100649 __ Ldr(jssp, MemOperand(csp));
650 __ Mov(csp, jssp);
651 }
652 if (flags & CallDescriptor::kRestoreCSP) {
653 __ Mov(csp, jssp);
654 __ AssertCspAligned();
Ben Murdoch109988c2016-05-18 11:27:45 +0100655 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000656 frame_access_state()->ClearSPDelta();
Ben Murdoch014dc512016-03-22 12:00:34 +0000657 break;
658 }
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100659 case kArchTailCallJSFunctionFromJSFunction:
Ben Murdoch014dc512016-03-22 12:00:34 +0000660 case kArchTailCallJSFunction: {
661 Register func = i.InputRegister(0);
662 if (FLAG_debug_code) {
663 // Check the function's context matches the context argument.
664 UseScratchRegisterScope scope(masm());
665 Register temp = scope.AcquireX();
666 __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset));
667 __ cmp(cp, temp);
668 __ Assert(eq, kWrongFunctionContext);
669 }
670 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
671 AssembleDeconstructActivationRecord(stack_param_delta);
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100672 if (arch_opcode == kArchTailCallJSFunctionFromJSFunction) {
673 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
674 i.TempRegister(0), i.TempRegister(1),
675 i.TempRegister(2));
676 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000677 __ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
678 __ Jump(x10);
679 frame_access_state()->ClearSPDelta();
680 break;
681 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000682 case kArchPrepareCallCFunction:
683 // We don't need kArchPrepareCallCFunction on arm64 as the instruction
684 // selector already perform a Claim to reserve space on the stack and
685 // guarantee correct alignment of stack pointer.
686 UNREACHABLE();
687 break;
688 case kArchPrepareTailCall:
689 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1));
690 break;
691 case kArchCallCFunction: {
692 int const num_parameters = MiscField::decode(instr->opcode());
693 if (instr->InputAt(0)->IsImmediate()) {
694 ExternalReference ref = i.InputExternalReference(0);
695 __ CallCFunction(ref, num_parameters, 0);
696 } else {
697 Register func = i.InputRegister(0);
698 __ CallCFunction(func, num_parameters, 0);
699 }
700 // CallCFunction only supports register arguments so we never need to call
701 // frame()->ClearOutgoingParameterSlots() here.
702 DCHECK(frame_access_state()->sp_delta() == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000703 break;
704 }
705 case kArchJmp:
Emily Bernier958fae72015-03-24 16:35:39 -0400706 AssembleArchJump(i.InputRpo(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000707 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000708 case kArchTableSwitch:
709 AssembleArchTableSwitch(instr);
710 break;
711 case kArchLookupSwitch:
712 AssembleArchLookupSwitch(instr);
713 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000714 case kArchNop:
Ben Murdoch014dc512016-03-22 12:00:34 +0000715 case kArchThrowTerminator:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000716 // don't emit code for nops.
717 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000718 case kArchDeoptimize: {
719 int deopt_state_id =
720 BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
721 Deoptimizer::BailoutType bailout_type =
722 Deoptimizer::BailoutType(MiscField::decode(instr->opcode()));
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100723 CodeGenResult result =
724 AssembleDeoptimizerCall(deopt_state_id, bailout_type);
725 if (result != kSuccess) return result;
Ben Murdoch014dc512016-03-22 12:00:34 +0000726 break;
727 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000728 case kArchRet:
729 AssembleReturn();
730 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400731 case kArchStackPointer:
732 __ mov(i.OutputRegister(), masm()->StackPointer());
733 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000734 case kArchFramePointer:
735 __ mov(i.OutputRegister(), fp);
736 break;
Ben Murdoch109988c2016-05-18 11:27:45 +0100737 case kArchParentFramePointer:
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100738 if (frame_access_state()->has_frame()) {
Ben Murdoch109988c2016-05-18 11:27:45 +0100739 __ ldr(i.OutputRegister(), MemOperand(fp, 0));
740 } else {
741 __ mov(i.OutputRegister(), fp);
742 }
743 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000744 case kArchTruncateDoubleToI:
745 __ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0));
746 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000747 case kArchStoreWithWriteBarrier: {
748 RecordWriteMode mode =
749 static_cast<RecordWriteMode>(MiscField::decode(instr->opcode()));
Ben Murdoch109988c2016-05-18 11:27:45 +0100750 AddressingMode addressing_mode =
751 AddressingModeField::decode(instr->opcode());
Ben Murdoch014dc512016-03-22 12:00:34 +0000752 Register object = i.InputRegister(0);
Ben Murdoch109988c2016-05-18 11:27:45 +0100753 Operand index(0);
754 if (addressing_mode == kMode_MRI) {
755 index = Operand(i.InputInt64(1));
756 } else {
757 DCHECK_EQ(addressing_mode, kMode_MRR);
758 index = Operand(i.InputRegister(1));
759 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000760 Register value = i.InputRegister(2);
761 Register scratch0 = i.TempRegister(0);
762 Register scratch1 = i.TempRegister(1);
763 auto ool = new (zone()) OutOfLineRecordWrite(this, object, index, value,
764 scratch0, scratch1, mode);
765 __ Str(value, MemOperand(object, index));
766 __ CheckPageFlagSet(object, scratch0,
767 MemoryChunk::kPointersFromHereAreInterestingMask,
768 ool->entry());
769 __ Bind(ool->exit());
Emily Bernier958fae72015-03-24 16:35:39 -0400770 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000771 }
Ben Murdoch109988c2016-05-18 11:27:45 +0100772 case kArchStackSlot: {
773 FrameOffset offset =
774 frame_access_state()->GetFrameOffset(i.InputInt32(0));
775 Register base;
776 if (offset.from_stack_pointer()) {
777 base = __ StackPointer();
778 } else {
779 base = fp;
780 }
781 __ Add(i.OutputRegister(0), base, Operand(offset.offset()));
782 break;
783 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000784 case kArm64Float32RoundDown:
785 __ Frintm(i.OutputFloat32Register(), i.InputFloat32Register(0));
786 break;
787 case kArm64Float64RoundDown:
Emily Bernier958fae72015-03-24 16:35:39 -0400788 __ Frintm(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
789 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000790 case kArm64Float32RoundUp:
791 __ Frintp(i.OutputFloat32Register(), i.InputFloat32Register(0));
792 break;
793 case kArm64Float64RoundUp:
794 __ Frintp(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
Emily Bernier958fae72015-03-24 16:35:39 -0400795 break;
796 case kArm64Float64RoundTiesAway:
797 __ Frinta(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
798 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000799 case kArm64Float32RoundTruncate:
800 __ Frintz(i.OutputFloat32Register(), i.InputFloat32Register(0));
801 break;
802 case kArm64Float64RoundTruncate:
803 __ Frintz(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
804 break;
805 case kArm64Float32RoundTiesEven:
806 __ Frintn(i.OutputFloat32Register(), i.InputFloat32Register(0));
807 break;
808 case kArm64Float64RoundTiesEven:
809 __ Frintn(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
810 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000811 case kArm64Add:
Ben Murdoch014dc512016-03-22 12:00:34 +0000812 if (FlagsModeField::decode(opcode) != kFlags_none) {
813 __ Adds(i.OutputRegister(), i.InputOrZeroRegister64(0),
814 i.InputOperand2_64(1));
815 } else {
816 __ Add(i.OutputRegister(), i.InputOrZeroRegister64(0),
817 i.InputOperand2_64(1));
818 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000819 break;
820 case kArm64Add32:
821 if (FlagsModeField::decode(opcode) != kFlags_none) {
Ben Murdoch014dc512016-03-22 12:00:34 +0000822 __ Adds(i.OutputRegister32(), i.InputOrZeroRegister32(0),
Emily Bernier958fae72015-03-24 16:35:39 -0400823 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000824 } else {
Ben Murdoch014dc512016-03-22 12:00:34 +0000825 __ Add(i.OutputRegister32(), i.InputOrZeroRegister32(0),
Emily Bernier958fae72015-03-24 16:35:39 -0400826 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000827 }
828 break;
829 case kArm64And:
Ben Murdoch014dc512016-03-22 12:00:34 +0000830 __ And(i.OutputRegister(), i.InputOrZeroRegister64(0),
831 i.InputOperand2_64(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000832 break;
833 case kArm64And32:
Ben Murdoch014dc512016-03-22 12:00:34 +0000834 __ And(i.OutputRegister32(), i.InputOrZeroRegister32(0),
835 i.InputOperand2_32(1));
Emily Bernier958fae72015-03-24 16:35:39 -0400836 break;
837 case kArm64Bic:
Ben Murdoch014dc512016-03-22 12:00:34 +0000838 __ Bic(i.OutputRegister(), i.InputOrZeroRegister64(0),
839 i.InputOperand2_64(1));
Emily Bernier958fae72015-03-24 16:35:39 -0400840 break;
841 case kArm64Bic32:
Ben Murdoch014dc512016-03-22 12:00:34 +0000842 __ Bic(i.OutputRegister32(), i.InputOrZeroRegister32(0),
843 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000844 break;
845 case kArm64Mul:
846 __ Mul(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
847 break;
848 case kArm64Mul32:
849 __ Mul(i.OutputRegister32(), i.InputRegister32(0), i.InputRegister32(1));
850 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400851 case kArm64Smull:
852 __ Smull(i.OutputRegister(), i.InputRegister32(0), i.InputRegister32(1));
853 break;
854 case kArm64Umull:
855 __ Umull(i.OutputRegister(), i.InputRegister32(0), i.InputRegister32(1));
856 break;
857 case kArm64Madd:
858 __ Madd(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
859 i.InputRegister(2));
860 break;
861 case kArm64Madd32:
862 __ Madd(i.OutputRegister32(), i.InputRegister32(0), i.InputRegister32(1),
863 i.InputRegister32(2));
864 break;
865 case kArm64Msub:
866 __ Msub(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
867 i.InputRegister(2));
868 break;
869 case kArm64Msub32:
870 __ Msub(i.OutputRegister32(), i.InputRegister32(0), i.InputRegister32(1),
871 i.InputRegister32(2));
872 break;
873 case kArm64Mneg:
874 __ Mneg(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
875 break;
876 case kArm64Mneg32:
877 __ Mneg(i.OutputRegister32(), i.InputRegister32(0), i.InputRegister32(1));
878 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000879 case kArm64Idiv:
880 __ Sdiv(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
881 break;
882 case kArm64Idiv32:
883 __ Sdiv(i.OutputRegister32(), i.InputRegister32(0), i.InputRegister32(1));
884 break;
885 case kArm64Udiv:
886 __ Udiv(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
887 break;
888 case kArm64Udiv32:
889 __ Udiv(i.OutputRegister32(), i.InputRegister32(0), i.InputRegister32(1));
890 break;
891 case kArm64Imod: {
892 UseScratchRegisterScope scope(masm());
893 Register temp = scope.AcquireX();
894 __ Sdiv(temp, i.InputRegister(0), i.InputRegister(1));
895 __ Msub(i.OutputRegister(), temp, i.InputRegister(1), i.InputRegister(0));
896 break;
897 }
898 case kArm64Imod32: {
899 UseScratchRegisterScope scope(masm());
900 Register temp = scope.AcquireW();
901 __ Sdiv(temp, i.InputRegister32(0), i.InputRegister32(1));
902 __ Msub(i.OutputRegister32(), temp, i.InputRegister32(1),
903 i.InputRegister32(0));
904 break;
905 }
906 case kArm64Umod: {
907 UseScratchRegisterScope scope(masm());
908 Register temp = scope.AcquireX();
909 __ Udiv(temp, i.InputRegister(0), i.InputRegister(1));
910 __ Msub(i.OutputRegister(), temp, i.InputRegister(1), i.InputRegister(0));
911 break;
912 }
913 case kArm64Umod32: {
914 UseScratchRegisterScope scope(masm());
915 Register temp = scope.AcquireW();
916 __ Udiv(temp, i.InputRegister32(0), i.InputRegister32(1));
917 __ Msub(i.OutputRegister32(), temp, i.InputRegister32(1),
918 i.InputRegister32(0));
919 break;
920 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000921 case kArm64Not:
Ben Murdoch014dc512016-03-22 12:00:34 +0000922 __ Mvn(i.OutputRegister(), i.InputOperand(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000923 break;
924 case kArm64Not32:
Ben Murdoch014dc512016-03-22 12:00:34 +0000925 __ Mvn(i.OutputRegister32(), i.InputOperand32(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000926 break;
927 case kArm64Or:
Ben Murdoch014dc512016-03-22 12:00:34 +0000928 __ Orr(i.OutputRegister(), i.InputOrZeroRegister64(0),
929 i.InputOperand2_64(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000930 break;
931 case kArm64Or32:
Ben Murdoch014dc512016-03-22 12:00:34 +0000932 __ Orr(i.OutputRegister32(), i.InputOrZeroRegister32(0),
933 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000934 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400935 case kArm64Orn:
Ben Murdoch014dc512016-03-22 12:00:34 +0000936 __ Orn(i.OutputRegister(), i.InputOrZeroRegister64(0),
937 i.InputOperand2_64(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000938 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400939 case kArm64Orn32:
Ben Murdoch014dc512016-03-22 12:00:34 +0000940 __ Orn(i.OutputRegister32(), i.InputOrZeroRegister32(0),
941 i.InputOperand2_32(1));
Emily Bernier958fae72015-03-24 16:35:39 -0400942 break;
943 case kArm64Eor:
Ben Murdoch014dc512016-03-22 12:00:34 +0000944 __ Eor(i.OutputRegister(), i.InputOrZeroRegister64(0),
945 i.InputOperand2_64(1));
Emily Bernier958fae72015-03-24 16:35:39 -0400946 break;
947 case kArm64Eor32:
Ben Murdoch014dc512016-03-22 12:00:34 +0000948 __ Eor(i.OutputRegister32(), i.InputOrZeroRegister32(0),
949 i.InputOperand2_32(1));
Emily Bernier958fae72015-03-24 16:35:39 -0400950 break;
951 case kArm64Eon:
Ben Murdoch014dc512016-03-22 12:00:34 +0000952 __ Eon(i.OutputRegister(), i.InputOrZeroRegister64(0),
953 i.InputOperand2_64(1));
Emily Bernier958fae72015-03-24 16:35:39 -0400954 break;
955 case kArm64Eon32:
Ben Murdoch014dc512016-03-22 12:00:34 +0000956 __ Eon(i.OutputRegister32(), i.InputOrZeroRegister32(0),
957 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000958 break;
959 case kArm64Sub:
Ben Murdoch014dc512016-03-22 12:00:34 +0000960 if (FlagsModeField::decode(opcode) != kFlags_none) {
961 __ Subs(i.OutputRegister(), i.InputOrZeroRegister64(0),
962 i.InputOperand2_64(1));
963 } else {
964 __ Sub(i.OutputRegister(), i.InputOrZeroRegister64(0),
965 i.InputOperand2_64(1));
966 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000967 break;
968 case kArm64Sub32:
969 if (FlagsModeField::decode(opcode) != kFlags_none) {
Ben Murdoch014dc512016-03-22 12:00:34 +0000970 __ Subs(i.OutputRegister32(), i.InputOrZeroRegister32(0),
Emily Bernier958fae72015-03-24 16:35:39 -0400971 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000972 } else {
Ben Murdoch014dc512016-03-22 12:00:34 +0000973 __ Sub(i.OutputRegister32(), i.InputOrZeroRegister32(0),
Emily Bernier958fae72015-03-24 16:35:39 -0400974 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000975 }
976 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400977 case kArm64Lsl:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000978 ASSEMBLE_SHIFT(Lsl, 64);
979 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400980 case kArm64Lsl32:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000981 ASSEMBLE_SHIFT(Lsl, 32);
982 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400983 case kArm64Lsr:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000984 ASSEMBLE_SHIFT(Lsr, 64);
985 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400986 case kArm64Lsr32:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000987 ASSEMBLE_SHIFT(Lsr, 32);
988 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400989 case kArm64Asr:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000990 ASSEMBLE_SHIFT(Asr, 64);
991 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400992 case kArm64Asr32:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000993 ASSEMBLE_SHIFT(Asr, 32);
994 break;
995 case kArm64Ror:
996 ASSEMBLE_SHIFT(Ror, 64);
997 break;
998 case kArm64Ror32:
999 ASSEMBLE_SHIFT(Ror, 32);
1000 break;
1001 case kArm64Mov32:
1002 __ Mov(i.OutputRegister32(), i.InputRegister32(0));
1003 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001004 case kArm64Sxtb32:
1005 __ Sxtb(i.OutputRegister32(), i.InputRegister32(0));
1006 break;
1007 case kArm64Sxth32:
1008 __ Sxth(i.OutputRegister32(), i.InputRegister32(0));
1009 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001010 case kArm64Sxtw:
1011 __ Sxtw(i.OutputRegister(), i.InputRegister32(0));
1012 break;
Ben Murdoch014dc512016-03-22 12:00:34 +00001013 case kArm64Sbfx32:
1014 __ Sbfx(i.OutputRegister32(), i.InputRegister32(0), i.InputInt5(1),
1015 i.InputInt5(2));
1016 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001017 case kArm64Ubfx:
Ben Murdoch014dc512016-03-22 12:00:34 +00001018 __ Ubfx(i.OutputRegister(), i.InputRegister(0), i.InputInt6(1),
1019 i.InputInt6(2));
Emily Bernier958fae72015-03-24 16:35:39 -04001020 break;
1021 case kArm64Ubfx32:
Ben Murdoch014dc512016-03-22 12:00:34 +00001022 __ Ubfx(i.OutputRegister32(), i.InputRegister32(0), i.InputInt5(1),
1023 i.InputInt5(2));
1024 break;
1025 case kArm64Ubfiz32:
1026 __ Ubfiz(i.OutputRegister32(), i.InputRegister32(0), i.InputInt5(1),
1027 i.InputInt5(2));
1028 break;
1029 case kArm64Bfi:
1030 __ Bfi(i.OutputRegister(), i.InputRegister(1), i.InputInt6(2),
1031 i.InputInt6(3));
Emily Bernier958fae72015-03-24 16:35:39 -04001032 break;
1033 case kArm64TestAndBranch32:
1034 case kArm64TestAndBranch:
1035 // Pseudo instructions turned into tbz/tbnz in AssembleArchBranch.
1036 break;
1037 case kArm64CompareAndBranch32:
1038 // Pseudo instruction turned into cbz/cbnz in AssembleArchBranch.
1039 break;
Ben Murdoch109988c2016-05-18 11:27:45 +01001040 case kArm64ClaimCSP: {
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001041 int count = RoundUp(i.InputInt32(0), 2);
Ben Murdoch109988c2016-05-18 11:27:45 +01001042 Register prev = __ StackPointer();
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001043 if (prev.Is(jssp)) {
1044 // TODO(titzer): make this a macro-assembler method.
1045 // Align the CSP and store the previous JSSP on the stack.
1046 UseScratchRegisterScope scope(masm());
1047 Register tmp = scope.AcquireX();
1048
1049 int sp_alignment = __ ActivationFrameAlignment();
1050 __ Sub(tmp, jssp, kPointerSize);
1051 __ And(tmp, tmp, Operand(~static_cast<uint64_t>(sp_alignment - 1)));
1052 __ Mov(csp, tmp);
1053 __ Str(jssp, MemOperand(csp));
1054 if (count > 0) {
1055 __ SetStackPointer(csp);
1056 __ Claim(count);
1057 __ SetStackPointer(prev);
1058 }
1059 } else {
1060 __ AssertCspAligned();
1061 if (count > 0) {
1062 __ Claim(count);
1063 frame_access_state()->IncreaseSPDelta(count);
1064 }
1065 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001066 break;
1067 }
Ben Murdoch109988c2016-05-18 11:27:45 +01001068 case kArm64ClaimJSSP: {
1069 int count = i.InputInt32(0);
1070 if (csp.Is(__ StackPointer())) {
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001071 // No JSSP is set up. Compute it from the CSP.
1072 __ AssertCspAligned();
1073 if (count > 0) {
1074 int even = RoundUp(count, 2);
1075 __ Sub(jssp, csp, count * kPointerSize);
1076 __ Sub(csp, csp, even * kPointerSize); // Must always be aligned.
1077 frame_access_state()->IncreaseSPDelta(even);
1078 } else {
1079 __ Mov(jssp, csp);
1080 }
Ben Murdoch109988c2016-05-18 11:27:45 +01001081 } else {
1082 // JSSP is the current stack pointer, just use regular Claim().
1083 __ Claim(count);
1084 frame_access_state()->IncreaseSPDelta(count);
1085 }
1086 break;
1087 }
1088 case kArm64PokeCSP: // fall through
1089 case kArm64PokeJSSP: {
1090 Register prev = __ StackPointer();
1091 __ SetStackPointer(arch_opcode == kArm64PokeCSP ? csp : jssp);
Ben Murdoch014dc512016-03-22 12:00:34 +00001092 Operand operand(i.InputInt32(1) * kPointerSize);
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001093 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001094 __ Poke(i.InputFloat64Register(0), operand);
1095 } else {
1096 __ Poke(i.InputRegister(0), operand);
1097 }
Ben Murdoch109988c2016-05-18 11:27:45 +01001098 __ SetStackPointer(prev);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001099 break;
1100 }
1101 case kArm64PokePair: {
Ben Murdoch014dc512016-03-22 12:00:34 +00001102 int slot = i.InputInt32(2) - 1;
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001103 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001104 __ PokePair(i.InputFloat64Register(1), i.InputFloat64Register(0),
1105 slot * kPointerSize);
1106 } else {
1107 __ PokePair(i.InputRegister(1), i.InputRegister(0),
1108 slot * kPointerSize);
1109 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001110 break;
1111 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001112 case kArm64Clz:
1113 __ Clz(i.OutputRegister64(), i.InputRegister64(0));
1114 break;
1115 case kArm64Clz32:
1116 __ Clz(i.OutputRegister32(), i.InputRegister32(0));
1117 break;
Ben Murdoch109988c2016-05-18 11:27:45 +01001118 case kArm64Rbit:
1119 __ Rbit(i.OutputRegister64(), i.InputRegister64(0));
1120 break;
1121 case kArm64Rbit32:
1122 __ Rbit(i.OutputRegister32(), i.InputRegister32(0));
1123 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001124 case kArm64Cmp:
Ben Murdoch014dc512016-03-22 12:00:34 +00001125 __ Cmp(i.InputOrZeroRegister64(0), i.InputOperand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001126 break;
1127 case kArm64Cmp32:
Ben Murdoch014dc512016-03-22 12:00:34 +00001128 __ Cmp(i.InputOrZeroRegister32(0), i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001129 break;
1130 case kArm64Cmn:
Ben Murdoch014dc512016-03-22 12:00:34 +00001131 __ Cmn(i.InputOrZeroRegister64(0), i.InputOperand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001132 break;
1133 case kArm64Cmn32:
Ben Murdoch014dc512016-03-22 12:00:34 +00001134 __ Cmn(i.InputOrZeroRegister32(0), i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001135 break;
1136 case kArm64Tst:
1137 __ Tst(i.InputRegister(0), i.InputOperand(1));
1138 break;
1139 case kArm64Tst32:
1140 __ Tst(i.InputRegister32(0), i.InputOperand32(1));
1141 break;
Ben Murdoch014dc512016-03-22 12:00:34 +00001142 case kArm64Float32Cmp:
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001143 if (instr->InputAt(1)->IsFPRegister()) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001144 __ Fcmp(i.InputFloat32Register(0), i.InputFloat32Register(1));
1145 } else {
1146 DCHECK(instr->InputAt(1)->IsImmediate());
1147 // 0.0 is the only immediate supported by fcmp instructions.
1148 DCHECK(i.InputFloat32(1) == 0.0f);
1149 __ Fcmp(i.InputFloat32Register(0), i.InputFloat32(1));
1150 }
1151 break;
1152 case kArm64Float32Add:
1153 __ Fadd(i.OutputFloat32Register(), i.InputFloat32Register(0),
1154 i.InputFloat32Register(1));
1155 break;
1156 case kArm64Float32Sub:
1157 __ Fsub(i.OutputFloat32Register(), i.InputFloat32Register(0),
1158 i.InputFloat32Register(1));
1159 break;
1160 case kArm64Float32Mul:
1161 __ Fmul(i.OutputFloat32Register(), i.InputFloat32Register(0),
1162 i.InputFloat32Register(1));
1163 break;
1164 case kArm64Float32Div:
1165 __ Fdiv(i.OutputFloat32Register(), i.InputFloat32Register(0),
1166 i.InputFloat32Register(1));
1167 break;
1168 case kArm64Float32Max:
1169 // (b < a) ? a : b
1170 __ Fcmp(i.InputFloat32Register(1), i.InputFloat32Register(0));
1171 __ Fcsel(i.OutputFloat32Register(), i.InputFloat32Register(0),
1172 i.InputFloat32Register(1), lo);
1173 break;
1174 case kArm64Float32Min:
1175 // (a < b) ? a : b
1176 __ Fcmp(i.InputFloat32Register(0), i.InputFloat32Register(1));
1177 __ Fcsel(i.OutputFloat32Register(), i.InputFloat32Register(0),
1178 i.InputFloat32Register(1), lo);
1179 break;
1180 case kArm64Float32Abs:
1181 __ Fabs(i.OutputFloat32Register(), i.InputFloat32Register(0));
1182 break;
1183 case kArm64Float32Sqrt:
1184 __ Fsqrt(i.OutputFloat32Register(), i.InputFloat32Register(0));
1185 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001186 case kArm64Float64Cmp:
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001187 if (instr->InputAt(1)->IsFPRegister()) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001188 __ Fcmp(i.InputDoubleRegister(0), i.InputDoubleRegister(1));
1189 } else {
1190 DCHECK(instr->InputAt(1)->IsImmediate());
1191 // 0.0 is the only immediate supported by fcmp instructions.
1192 DCHECK(i.InputDouble(1) == 0.0);
1193 __ Fcmp(i.InputDoubleRegister(0), i.InputDouble(1));
1194 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001195 break;
1196 case kArm64Float64Add:
1197 __ Fadd(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1198 i.InputDoubleRegister(1));
1199 break;
1200 case kArm64Float64Sub:
1201 __ Fsub(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1202 i.InputDoubleRegister(1));
1203 break;
1204 case kArm64Float64Mul:
1205 __ Fmul(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1206 i.InputDoubleRegister(1));
1207 break;
1208 case kArm64Float64Div:
1209 __ Fdiv(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1210 i.InputDoubleRegister(1));
1211 break;
1212 case kArm64Float64Mod: {
1213 // TODO(dcarney): implement directly. See note in lithium-codegen-arm64.cc
1214 FrameScope scope(masm(), StackFrame::MANUAL);
1215 DCHECK(d0.is(i.InputDoubleRegister(0)));
1216 DCHECK(d1.is(i.InputDoubleRegister(1)));
1217 DCHECK(d0.is(i.OutputDoubleRegister()));
1218 // TODO(dcarney): make sure this saves all relevant registers.
1219 __ CallCFunction(ExternalReference::mod_two_doubles_operation(isolate()),
1220 0, 2);
1221 break;
1222 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001223 case kArm64Float64Max:
1224 // (b < a) ? a : b
1225 __ Fcmp(i.InputDoubleRegister(1), i.InputDoubleRegister(0));
1226 __ Fcsel(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1227 i.InputDoubleRegister(1), lo);
1228 break;
1229 case kArm64Float64Min:
1230 // (a < b) ? a : b
1231 __ Fcmp(i.InputDoubleRegister(0), i.InputDoubleRegister(1));
1232 __ Fcsel(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1233 i.InputDoubleRegister(1), lo);
1234 break;
1235 case kArm64Float64Abs:
1236 __ Fabs(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1237 break;
1238 case kArm64Float64Neg:
1239 __ Fneg(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1240 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001241 case kArm64Float64Sqrt:
1242 __ Fsqrt(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1243 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001244 case kArm64Float32ToFloat64:
1245 __ Fcvt(i.OutputDoubleRegister(), i.InputDoubleRegister(0).S());
1246 break;
1247 case kArm64Float64ToFloat32:
1248 __ Fcvt(i.OutputDoubleRegister().S(), i.InputDoubleRegister(0));
1249 break;
Ben Murdoch109988c2016-05-18 11:27:45 +01001250 case kArm64Float32ToInt32:
1251 __ Fcvtzs(i.OutputRegister32(), i.InputFloat32Register(0));
1252 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001253 case kArm64Float64ToInt32:
1254 __ Fcvtzs(i.OutputRegister32(), i.InputDoubleRegister(0));
1255 break;
Ben Murdoch109988c2016-05-18 11:27:45 +01001256 case kArm64Float32ToUint32:
1257 __ Fcvtzu(i.OutputRegister32(), i.InputFloat32Register(0));
1258 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001259 case kArm64Float64ToUint32:
1260 __ Fcvtzu(i.OutputRegister32(), i.InputDoubleRegister(0));
1261 break;
Ben Murdoch014dc512016-03-22 12:00:34 +00001262 case kArm64Float32ToInt64:
1263 __ Fcvtzs(i.OutputRegister64(), i.InputFloat32Register(0));
1264 if (i.OutputCount() > 1) {
1265 __ Mov(i.OutputRegister(1), 1);
1266 Label done;
1267 __ Cmp(i.OutputRegister(0), 1);
1268 __ Ccmp(i.OutputRegister(0), -1, VFlag, vc);
1269 __ Fccmp(i.InputFloat32Register(0), i.InputFloat32Register(0), VFlag,
1270 vc);
1271 __ B(vc, &done);
1272 __ Fcmp(i.InputFloat32Register(0), static_cast<float>(INT64_MIN));
1273 __ Cset(i.OutputRegister(1), eq);
1274 __ Bind(&done);
1275 }
1276 break;
1277 case kArm64Float64ToInt64:
1278 __ Fcvtzs(i.OutputRegister(0), i.InputDoubleRegister(0));
1279 if (i.OutputCount() > 1) {
1280 __ Mov(i.OutputRegister(1), 1);
1281 Label done;
1282 __ Cmp(i.OutputRegister(0), 1);
1283 __ Ccmp(i.OutputRegister(0), -1, VFlag, vc);
1284 __ Fccmp(i.InputDoubleRegister(0), i.InputDoubleRegister(0), VFlag, vc);
1285 __ B(vc, &done);
1286 __ Fcmp(i.InputDoubleRegister(0), static_cast<double>(INT64_MIN));
1287 __ Cset(i.OutputRegister(1), eq);
1288 __ Bind(&done);
1289 }
1290 break;
1291 case kArm64Float32ToUint64:
1292 __ Fcvtzu(i.OutputRegister64(), i.InputFloat32Register(0));
1293 if (i.OutputCount() > 1) {
1294 __ Fcmp(i.InputFloat32Register(0), -1.0);
1295 __ Ccmp(i.OutputRegister(0), -1, ZFlag, gt);
1296 __ Cset(i.OutputRegister(1), ne);
1297 }
1298 break;
1299 case kArm64Float64ToUint64:
1300 __ Fcvtzu(i.OutputRegister64(), i.InputDoubleRegister(0));
1301 if (i.OutputCount() > 1) {
1302 __ Fcmp(i.InputDoubleRegister(0), -1.0);
1303 __ Ccmp(i.OutputRegister(0), -1, ZFlag, gt);
1304 __ Cset(i.OutputRegister(1), ne);
1305 }
1306 break;
Ben Murdoch109988c2016-05-18 11:27:45 +01001307 case kArm64Int32ToFloat32:
1308 __ Scvtf(i.OutputFloat32Register(), i.InputRegister32(0));
1309 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001310 case kArm64Int32ToFloat64:
1311 __ Scvtf(i.OutputDoubleRegister(), i.InputRegister32(0));
1312 break;
Ben Murdoch014dc512016-03-22 12:00:34 +00001313 case kArm64Int64ToFloat32:
1314 __ Scvtf(i.OutputDoubleRegister().S(), i.InputRegister64(0));
1315 break;
1316 case kArm64Int64ToFloat64:
1317 __ Scvtf(i.OutputDoubleRegister(), i.InputRegister64(0));
1318 break;
Ben Murdoch109988c2016-05-18 11:27:45 +01001319 case kArm64Uint32ToFloat32:
1320 __ Ucvtf(i.OutputFloat32Register(), i.InputRegister32(0));
1321 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001322 case kArm64Uint32ToFloat64:
1323 __ Ucvtf(i.OutputDoubleRegister(), i.InputRegister32(0));
1324 break;
Ben Murdoch014dc512016-03-22 12:00:34 +00001325 case kArm64Uint64ToFloat32:
1326 __ Ucvtf(i.OutputDoubleRegister().S(), i.InputRegister64(0));
1327 break;
1328 case kArm64Uint64ToFloat64:
1329 __ Ucvtf(i.OutputDoubleRegister(), i.InputRegister64(0));
1330 break;
1331 case kArm64Float64ExtractLowWord32:
1332 __ Fmov(i.OutputRegister32(), i.InputFloat32Register(0));
1333 break;
1334 case kArm64Float64ExtractHighWord32:
1335 // TODO(arm64): This should use MOV (to general) when NEON is supported.
1336 __ Fmov(i.OutputRegister(), i.InputFloat64Register(0));
1337 __ Lsr(i.OutputRegister(), i.OutputRegister(), 32);
1338 break;
1339 case kArm64Float64InsertLowWord32: {
1340 // TODO(arm64): This should use MOV (from general) when NEON is supported.
1341 UseScratchRegisterScope scope(masm());
1342 Register tmp = scope.AcquireX();
1343 __ Fmov(tmp, i.InputFloat64Register(0));
1344 __ Bfi(tmp, i.InputRegister(1), 0, 32);
1345 __ Fmov(i.OutputFloat64Register(), tmp);
1346 break;
1347 }
1348 case kArm64Float64InsertHighWord32: {
1349 // TODO(arm64): This should use MOV (from general) when NEON is supported.
1350 UseScratchRegisterScope scope(masm());
1351 Register tmp = scope.AcquireX();
1352 __ Fmov(tmp.W(), i.InputFloat32Register(0));
1353 __ Bfi(tmp, i.InputRegister(1), 32, 32);
1354 __ Fmov(i.OutputFloat64Register(), tmp);
1355 break;
1356 }
1357 case kArm64Float64MoveU64:
1358 __ Fmov(i.OutputFloat64Register(), i.InputRegister(0));
1359 break;
1360 case kArm64U64MoveFloat64:
1361 __ Fmov(i.OutputRegister(), i.InputDoubleRegister(0));
1362 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001363 case kArm64Ldrb:
1364 __ Ldrb(i.OutputRegister(), i.MemoryOperand());
1365 break;
1366 case kArm64Ldrsb:
1367 __ Ldrsb(i.OutputRegister(), i.MemoryOperand());
1368 break;
1369 case kArm64Strb:
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001370 __ Strb(i.InputOrZeroRegister64(0), i.MemoryOperand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001371 break;
1372 case kArm64Ldrh:
1373 __ Ldrh(i.OutputRegister(), i.MemoryOperand());
1374 break;
1375 case kArm64Ldrsh:
1376 __ Ldrsh(i.OutputRegister(), i.MemoryOperand());
1377 break;
1378 case kArm64Strh:
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001379 __ Strh(i.InputOrZeroRegister64(0), i.MemoryOperand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001380 break;
1381 case kArm64LdrW:
1382 __ Ldr(i.OutputRegister32(), i.MemoryOperand());
1383 break;
1384 case kArm64StrW:
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001385 __ Str(i.InputOrZeroRegister32(0), i.MemoryOperand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001386 break;
1387 case kArm64Ldr:
1388 __ Ldr(i.OutputRegister(), i.MemoryOperand());
1389 break;
1390 case kArm64Str:
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001391 __ Str(i.InputOrZeroRegister64(0), i.MemoryOperand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001392 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001393 case kArm64LdrS:
1394 __ Ldr(i.OutputDoubleRegister().S(), i.MemoryOperand());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001395 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001396 case kArm64StrS:
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001397 __ Str(i.InputFloat32OrZeroRegister(0), i.MemoryOperand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001398 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001399 case kArm64LdrD:
1400 __ Ldr(i.OutputDoubleRegister(), i.MemoryOperand());
1401 break;
1402 case kArm64StrD:
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001403 __ Str(i.InputFloat64OrZeroRegister(0), i.MemoryOperand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001404 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001405 case kCheckedLoadInt8:
1406 ASSEMBLE_CHECKED_LOAD_INTEGER(Ldrsb);
1407 break;
1408 case kCheckedLoadUint8:
1409 ASSEMBLE_CHECKED_LOAD_INTEGER(Ldrb);
1410 break;
1411 case kCheckedLoadInt16:
1412 ASSEMBLE_CHECKED_LOAD_INTEGER(Ldrsh);
1413 break;
1414 case kCheckedLoadUint16:
1415 ASSEMBLE_CHECKED_LOAD_INTEGER(Ldrh);
1416 break;
1417 case kCheckedLoadWord32:
1418 ASSEMBLE_CHECKED_LOAD_INTEGER(Ldr);
1419 break;
Ben Murdoch014dc512016-03-22 12:00:34 +00001420 case kCheckedLoadWord64:
1421 ASSEMBLE_CHECKED_LOAD_INTEGER_64(Ldr);
1422 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001423 case kCheckedLoadFloat32:
1424 ASSEMBLE_CHECKED_LOAD_FLOAT(32);
1425 break;
1426 case kCheckedLoadFloat64:
1427 ASSEMBLE_CHECKED_LOAD_FLOAT(64);
1428 break;
1429 case kCheckedStoreWord8:
1430 ASSEMBLE_CHECKED_STORE_INTEGER(Strb);
1431 break;
1432 case kCheckedStoreWord16:
1433 ASSEMBLE_CHECKED_STORE_INTEGER(Strh);
1434 break;
1435 case kCheckedStoreWord32:
1436 ASSEMBLE_CHECKED_STORE_INTEGER(Str);
1437 break;
Ben Murdoch014dc512016-03-22 12:00:34 +00001438 case kCheckedStoreWord64:
1439 ASSEMBLE_CHECKED_STORE_INTEGER_64(Str);
1440 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001441 case kCheckedStoreFloat32:
1442 ASSEMBLE_CHECKED_STORE_FLOAT(32);
1443 break;
1444 case kCheckedStoreFloat64:
1445 ASSEMBLE_CHECKED_STORE_FLOAT(64);
1446 break;
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001447 case kAtomicLoadInt8:
1448 ASSEMBLE_ATOMIC_LOAD_INTEGER(Ldrsb);
1449 break;
1450 case kAtomicLoadUint8:
1451 ASSEMBLE_ATOMIC_LOAD_INTEGER(Ldrb);
1452 break;
1453 case kAtomicLoadInt16:
1454 ASSEMBLE_ATOMIC_LOAD_INTEGER(Ldrsh);
1455 break;
1456 case kAtomicLoadUint16:
1457 ASSEMBLE_ATOMIC_LOAD_INTEGER(Ldrh);
1458 break;
1459 case kAtomicLoadWord32:
1460 __ Ldr(i.OutputRegister32(),
1461 MemOperand(i.InputRegister(0), i.InputRegister(1)));
1462 __ Dmb(InnerShareable, BarrierAll);
1463 break;
1464 case kAtomicStoreWord8:
1465 ASSEMBLE_ATOMIC_STORE_INTEGER(Strb);
1466 break;
1467 case kAtomicStoreWord16:
1468 ASSEMBLE_ATOMIC_STORE_INTEGER(Strh);
1469 break;
1470 case kAtomicStoreWord32:
1471 __ Dmb(InnerShareable, BarrierAll);
1472 __ Str(i.InputRegister32(2),
1473 MemOperand(i.InputRegister(0), i.InputRegister(1)));
1474 __ Dmb(InnerShareable, BarrierAll);
1475 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001476 }
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001477 return kSuccess;
Ben Murdoch014dc512016-03-22 12:00:34 +00001478} // NOLINT(readability/fn_size)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001479
1480
1481// Assemble branches after this instruction.
Emily Bernier958fae72015-03-24 16:35:39 -04001482void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001483 Arm64OperandConverter i(this, instr);
Emily Bernier958fae72015-03-24 16:35:39 -04001484 Label* tlabel = branch->true_label;
1485 Label* flabel = branch->false_label;
1486 FlagsCondition condition = branch->condition;
1487 ArchOpcode opcode = instr->arch_opcode();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001488
Emily Bernier958fae72015-03-24 16:35:39 -04001489 if (opcode == kArm64CompareAndBranch32) {
1490 switch (condition) {
1491 case kEqual:
1492 __ Cbz(i.InputRegister32(0), tlabel);
1493 break;
1494 case kNotEqual:
1495 __ Cbnz(i.InputRegister32(0), tlabel);
1496 break;
1497 default:
1498 UNREACHABLE();
1499 }
1500 } else if (opcode == kArm64TestAndBranch32) {
1501 switch (condition) {
1502 case kEqual:
1503 __ Tbz(i.InputRegister32(0), i.InputInt5(1), tlabel);
1504 break;
1505 case kNotEqual:
1506 __ Tbnz(i.InputRegister32(0), i.InputInt5(1), tlabel);
1507 break;
1508 default:
1509 UNREACHABLE();
1510 }
1511 } else if (opcode == kArm64TestAndBranch) {
1512 switch (condition) {
1513 case kEqual:
1514 __ Tbz(i.InputRegister64(0), i.InputInt6(1), tlabel);
1515 break;
1516 case kNotEqual:
1517 __ Tbnz(i.InputRegister64(0), i.InputInt6(1), tlabel);
1518 break;
1519 default:
1520 UNREACHABLE();
1521 }
1522 } else {
Ben Murdoch014dc512016-03-22 12:00:34 +00001523 Condition cc = FlagsConditionToCondition(condition);
1524 __ B(cc, tlabel);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001525 }
Emily Bernier958fae72015-03-24 16:35:39 -04001526 if (!branch->fallthru) __ B(flabel); // no fallthru to flabel.
1527}
1528
1529
Ben Murdoch014dc512016-03-22 12:00:34 +00001530void CodeGenerator::AssembleArchJump(RpoNumber target) {
Emily Bernier958fae72015-03-24 16:35:39 -04001531 if (!IsNextInAssemblyOrder(target)) __ B(GetLabel(target));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001532}
1533
1534
1535// Assemble boolean materializations after this instruction.
1536void CodeGenerator::AssembleArchBoolean(Instruction* instr,
1537 FlagsCondition condition) {
1538 Arm64OperandConverter i(this, instr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001539
1540 // Materialize a full 64-bit 1 or 0 value. The result register is always the
1541 // last output of the instruction.
Ben Murdoch014dc512016-03-22 12:00:34 +00001542 DCHECK_NE(0u, instr->OutputCount());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001543 Register reg = i.OutputRegister(instr->OutputCount() - 1);
Ben Murdoch014dc512016-03-22 12:00:34 +00001544 Condition cc = FlagsConditionToCondition(condition);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001545 __ Cset(reg, cc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001546}
1547
1548
Ben Murdoch014dc512016-03-22 12:00:34 +00001549void CodeGenerator::AssembleArchLookupSwitch(Instruction* instr) {
1550 Arm64OperandConverter i(this, instr);
1551 Register input = i.InputRegister32(0);
1552 for (size_t index = 2; index < instr->InputCount(); index += 2) {
1553 __ Cmp(input, i.InputInt32(index + 0));
1554 __ B(eq, GetLabel(i.InputRpo(index + 1)));
1555 }
1556 AssembleArchJump(i.InputRpo(1));
1557}
1558
1559
1560void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
1561 Arm64OperandConverter i(this, instr);
1562 UseScratchRegisterScope scope(masm());
1563 Register input = i.InputRegister32(0);
1564 Register temp = scope.AcquireX();
1565 size_t const case_count = instr->InputCount() - 2;
1566 Label table;
1567 __ Cmp(input, case_count);
1568 __ B(hs, GetLabel(i.InputRpo(1)));
1569 __ Adr(temp, &table);
1570 __ Add(temp, temp, Operand(input, UXTW, 2));
1571 __ Br(temp);
1572 __ StartBlockPools();
1573 __ Bind(&table);
1574 for (size_t index = 0; index < case_count; ++index) {
1575 __ B(GetLabel(i.InputRpo(index + 2)));
1576 }
1577 __ EndBlockPools();
1578}
1579
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001580CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
Ben Murdoch014dc512016-03-22 12:00:34 +00001581 int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001582 Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
Ben Murdoch014dc512016-03-22 12:00:34 +00001583 isolate(), deoptimization_id, bailout_type);
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001584 if (deopt_entry == nullptr) return kTooManyDeoptimizationBailouts;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001585 __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001586 return kSuccess;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001587}
1588
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001589void CodeGenerator::FinishFrame(Frame* frame) {
1590 frame->AlignFrame(16);
1591 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
1592
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001593 if (descriptor->UseNativeStack() || descriptor->IsCFunctionCall()) {
1594 __ SetStackPointer(csp);
1595 } else {
1596 __ SetStackPointer(jssp);
1597 }
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001598
1599 // Save FP registers.
1600 CPURegList saves_fp = CPURegList(CPURegister::kFPRegister, kDRegSizeInBits,
1601 descriptor->CalleeSavedFPRegisters());
1602 int saved_count = saves_fp.Count();
1603 if (saved_count != 0) {
1604 DCHECK(saves_fp.list() == CPURegList::GetCalleeSavedFP().list());
1605 frame->AllocateSavedCalleeRegisterSlots(saved_count *
1606 (kDoubleSize / kPointerSize));
1607 }
1608
1609 CPURegList saves = CPURegList(CPURegister::kRegister, kXRegSizeInBits,
1610 descriptor->CalleeSavedRegisters());
1611 saved_count = saves.Count();
1612 if (saved_count != 0) {
1613 frame->AllocateSavedCalleeRegisterSlots(saved_count);
1614 }
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001615}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001616
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001617void CodeGenerator::AssembleConstructFrame() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001618 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001619 if (descriptor->UseNativeStack()) {
1620 __ AssertCspAligned();
Ben Murdoch014dc512016-03-22 12:00:34 +00001621 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001622
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001623 if (frame_access_state()->has_frame()) {
1624 if (descriptor->IsJSFunctionCall()) {
1625 DCHECK(!descriptor->UseNativeStack());
1626 __ Prologue(this->info()->GeneratePreagedPrologue());
1627 } else {
1628 if (descriptor->IsCFunctionCall()) {
1629 __ Push(lr, fp);
1630 __ Mov(fp, masm_.StackPointer());
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001631 __ Claim(frame()->GetSpillSlotCount());
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001632 } else {
1633 __ StubPrologue(info()->GetOutputStackFrameType(),
1634 frame()->GetTotalFrameSlotCount());
1635 }
1636 }
1637 }
1638
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001639 int shrink_slots = frame()->GetSpillSlotCount();
1640
Ben Murdoch014dc512016-03-22 12:00:34 +00001641 if (info()->is_osr()) {
1642 // TurboFan OSR-compiled functions cannot be entered directly.
1643 __ Abort(kShouldNotDirectlyEnterOsrFunction);
1644
1645 // Unoptimized code jumps directly to this entrypoint while the unoptimized
1646 // frame is still on the stack. Optimized code uses OSR values directly from
1647 // the unoptimized frame. Thus, all that needs to be done is to allocate the
1648 // remaining stack slots.
1649 if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --");
1650 osr_pc_offset_ = __ pc_offset();
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001651 shrink_slots -= OsrHelper(info()).UnoptimizedFrameSlots();
Ben Murdoch014dc512016-03-22 12:00:34 +00001652 }
1653
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001654 if (descriptor->IsJSFunctionCall()) {
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001655 __ Claim(shrink_slots);
Ben Murdoch014dc512016-03-22 12:00:34 +00001656 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001657
1658 // Save FP registers.
1659 CPURegList saves_fp = CPURegList(CPURegister::kFPRegister, kDRegSizeInBits,
1660 descriptor->CalleeSavedFPRegisters());
1661 int saved_count = saves_fp.Count();
1662 if (saved_count != 0) {
1663 DCHECK(saves_fp.list() == CPURegList::GetCalleeSavedFP().list());
1664 __ PushCPURegList(saves_fp);
Ben Murdoch014dc512016-03-22 12:00:34 +00001665 }
1666 // Save registers.
1667 // TODO(palfia): TF save list is not in sync with
1668 // CPURegList::GetCalleeSaved(): x30 is missing.
1669 // DCHECK(saves.list() == CPURegList::GetCalleeSaved().list());
1670 CPURegList saves = CPURegList(CPURegister::kRegister, kXRegSizeInBits,
1671 descriptor->CalleeSavedRegisters());
1672 saved_count = saves.Count();
1673 if (saved_count != 0) {
1674 __ PushCPURegList(saves);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001675 }
1676}
1677
1678
1679void CodeGenerator::AssembleReturn() {
1680 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
Ben Murdoch014dc512016-03-22 12:00:34 +00001681
1682 // Restore registers.
1683 CPURegList saves = CPURegList(CPURegister::kRegister, kXRegSizeInBits,
1684 descriptor->CalleeSavedRegisters());
1685 if (saves.Count() != 0) {
1686 __ PopCPURegList(saves);
1687 }
1688
1689 // Restore fp registers.
1690 CPURegList saves_fp = CPURegList(CPURegister::kFPRegister, kDRegSizeInBits,
1691 descriptor->CalleeSavedFPRegisters());
1692 if (saves_fp.Count() != 0) {
1693 __ PopCPURegList(saves_fp);
1694 }
1695
1696 int pop_count = static_cast<int>(descriptor->StackParameterCount());
1697 if (descriptor->IsCFunctionCall()) {
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001698 AssembleDeconstructFrame();
1699 } else if (frame_access_state()->has_frame()) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001700 // Canonicalize JSFunction return sites for now.
1701 if (return_label_.is_bound()) {
1702 __ B(&return_label_);
1703 return;
1704 } else {
1705 __ Bind(&return_label_);
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001706 AssembleDeconstructFrame();
Ben Murdoch014dc512016-03-22 12:00:34 +00001707 if (descriptor->UseNativeStack()) {
Ben Murdoch109988c2016-05-18 11:27:45 +01001708 pop_count += (pop_count & 1); // align
Ben Murdoch014dc512016-03-22 12:00:34 +00001709 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001710 }
1711 } else if (descriptor->UseNativeStack()) {
Ben Murdoch109988c2016-05-18 11:27:45 +01001712 pop_count += (pop_count & 1); // align
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001713 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001714 __ Drop(pop_count);
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001715
1716 if (descriptor->UseNativeStack()) {
1717 __ AssertCspAligned();
1718 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001719 __ Ret();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001720}
1721
1722
1723void CodeGenerator::AssembleMove(InstructionOperand* source,
1724 InstructionOperand* destination) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001725 Arm64OperandConverter g(this, nullptr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001726 // Dispatch on the source and destination operand kinds. Not all
1727 // combinations are possible.
1728 if (source->IsRegister()) {
1729 DCHECK(destination->IsRegister() || destination->IsStackSlot());
1730 Register src = g.ToRegister(source);
1731 if (destination->IsRegister()) {
1732 __ Mov(g.ToRegister(destination), src);
1733 } else {
1734 __ Str(src, g.ToMemOperand(destination, masm()));
1735 }
1736 } else if (source->IsStackSlot()) {
1737 MemOperand src = g.ToMemOperand(source, masm());
1738 DCHECK(destination->IsRegister() || destination->IsStackSlot());
1739 if (destination->IsRegister()) {
1740 __ Ldr(g.ToRegister(destination), src);
1741 } else {
1742 UseScratchRegisterScope scope(masm());
1743 Register temp = scope.AcquireX();
1744 __ Ldr(temp, src);
1745 __ Str(temp, g.ToMemOperand(destination, masm()));
1746 }
1747 } else if (source->IsConstant()) {
Emily Bernier958fae72015-03-24 16:35:39 -04001748 Constant src = g.ToConstant(ConstantOperand::cast(source));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001749 if (destination->IsRegister() || destination->IsStackSlot()) {
1750 UseScratchRegisterScope scope(masm());
1751 Register dst = destination->IsRegister() ? g.ToRegister(destination)
1752 : scope.AcquireX();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001753 if (src.type() == Constant::kHeapObject) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001754 Handle<HeapObject> src_object = src.ToHeapObject();
1755 Heap::RootListIndex index;
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001756 int slot;
1757 if (IsMaterializableFromFrame(src_object, &slot)) {
1758 __ Ldr(dst, g.SlotToMemOperand(slot, masm()));
Ben Murdoch014dc512016-03-22 12:00:34 +00001759 } else if (IsMaterializableFromRoot(src_object, &index)) {
1760 __ LoadRoot(dst, index);
1761 } else {
1762 __ LoadObject(dst, src_object);
1763 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001764 } else {
1765 __ Mov(dst, g.ToImmediate(source));
1766 }
1767 if (destination->IsStackSlot()) {
1768 __ Str(dst, g.ToMemOperand(destination, masm()));
1769 }
Emily Bernier958fae72015-03-24 16:35:39 -04001770 } else if (src.type() == Constant::kFloat32) {
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001771 if (destination->IsFPRegister()) {
Emily Bernier958fae72015-03-24 16:35:39 -04001772 FPRegister dst = g.ToDoubleRegister(destination).S();
1773 __ Fmov(dst, src.ToFloat32());
1774 } else {
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001775 DCHECK(destination->IsFPStackSlot());
Emily Bernier958fae72015-03-24 16:35:39 -04001776 UseScratchRegisterScope scope(masm());
1777 FPRegister temp = scope.AcquireS();
1778 __ Fmov(temp, src.ToFloat32());
1779 __ Str(temp, g.ToMemOperand(destination, masm()));
1780 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001781 } else {
Emily Bernier958fae72015-03-24 16:35:39 -04001782 DCHECK_EQ(Constant::kFloat64, src.type());
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001783 if (destination->IsFPRegister()) {
Emily Bernier958fae72015-03-24 16:35:39 -04001784 FPRegister dst = g.ToDoubleRegister(destination);
1785 __ Fmov(dst, src.ToFloat64());
1786 } else {
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001787 DCHECK(destination->IsFPStackSlot());
Emily Bernier958fae72015-03-24 16:35:39 -04001788 UseScratchRegisterScope scope(masm());
1789 FPRegister temp = scope.AcquireD();
1790 __ Fmov(temp, src.ToFloat64());
1791 __ Str(temp, g.ToMemOperand(destination, masm()));
1792 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001793 }
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001794 } else if (source->IsFPRegister()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001795 FPRegister src = g.ToDoubleRegister(source);
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001796 if (destination->IsFPRegister()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001797 FPRegister dst = g.ToDoubleRegister(destination);
1798 __ Fmov(dst, src);
1799 } else {
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001800 DCHECK(destination->IsFPStackSlot());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001801 __ Str(src, g.ToMemOperand(destination, masm()));
1802 }
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001803 } else if (source->IsFPStackSlot()) {
1804 DCHECK(destination->IsFPRegister() || destination->IsFPStackSlot());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001805 MemOperand src = g.ToMemOperand(source, masm());
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001806 if (destination->IsFPRegister()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001807 __ Ldr(g.ToDoubleRegister(destination), src);
1808 } else {
1809 UseScratchRegisterScope scope(masm());
1810 FPRegister temp = scope.AcquireD();
1811 __ Ldr(temp, src);
1812 __ Str(temp, g.ToMemOperand(destination, masm()));
1813 }
1814 } else {
1815 UNREACHABLE();
1816 }
1817}
1818
1819
1820void CodeGenerator::AssembleSwap(InstructionOperand* source,
1821 InstructionOperand* destination) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001822 Arm64OperandConverter g(this, nullptr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001823 // Dispatch on the source and destination operand kinds. Not all
1824 // combinations are possible.
1825 if (source->IsRegister()) {
1826 // Register-register.
1827 UseScratchRegisterScope scope(masm());
1828 Register temp = scope.AcquireX();
1829 Register src = g.ToRegister(source);
1830 if (destination->IsRegister()) {
1831 Register dst = g.ToRegister(destination);
1832 __ Mov(temp, src);
1833 __ Mov(src, dst);
1834 __ Mov(dst, temp);
1835 } else {
1836 DCHECK(destination->IsStackSlot());
1837 MemOperand dst = g.ToMemOperand(destination, masm());
1838 __ Mov(temp, src);
1839 __ Ldr(src, dst);
1840 __ Str(temp, dst);
1841 }
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001842 } else if (source->IsStackSlot() || source->IsFPStackSlot()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001843 UseScratchRegisterScope scope(masm());
Emily Bernier958fae72015-03-24 16:35:39 -04001844 DoubleRegister temp_0 = scope.AcquireD();
1845 DoubleRegister temp_1 = scope.AcquireD();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001846 MemOperand src = g.ToMemOperand(source, masm());
1847 MemOperand dst = g.ToMemOperand(destination, masm());
1848 __ Ldr(temp_0, src);
1849 __ Ldr(temp_1, dst);
1850 __ Str(temp_0, dst);
1851 __ Str(temp_1, src);
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001852 } else if (source->IsFPRegister()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001853 UseScratchRegisterScope scope(masm());
1854 FPRegister temp = scope.AcquireD();
1855 FPRegister src = g.ToDoubleRegister(source);
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001856 if (destination->IsFPRegister()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001857 FPRegister dst = g.ToDoubleRegister(destination);
1858 __ Fmov(temp, src);
1859 __ Fmov(src, dst);
1860 __ Fmov(dst, temp);
1861 } else {
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001862 DCHECK(destination->IsFPStackSlot());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001863 MemOperand dst = g.ToMemOperand(destination, masm());
1864 __ Fmov(temp, src);
1865 __ Ldr(src, dst);
1866 __ Str(temp, dst);
1867 }
1868 } else {
1869 // No other combinations are possible.
1870 UNREACHABLE();
1871 }
1872}
1873
1874
Ben Murdoch014dc512016-03-22 12:00:34 +00001875void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) {
1876 // On 64-bit ARM we emit the jump tables inline.
1877 UNREACHABLE();
1878}
1879
1880
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001881void CodeGenerator::EnsureSpaceForLazyDeopt() {
Ben Murdoch014dc512016-03-22 12:00:34 +00001882 if (!info()->ShouldEnsureSpaceForLazyDeopt()) {
1883 return;
1884 }
1885
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001886 int space_needed = Deoptimizer::patch_size();
Ben Murdoch014dc512016-03-22 12:00:34 +00001887 // Ensure that we have enough space after the previous lazy-bailout
1888 // instruction for patching the code here.
1889 intptr_t current_pc = masm()->pc_offset();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001890
Ben Murdoch014dc512016-03-22 12:00:34 +00001891 if (current_pc < (last_lazy_deopt_pc_ + space_needed)) {
1892 intptr_t padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
1893 DCHECK((padding_size % kInstructionSize) == 0);
1894 InstructionAccurateScope instruction_accurate(
1895 masm(), padding_size / kInstructionSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001896
Ben Murdoch014dc512016-03-22 12:00:34 +00001897 while (padding_size > 0) {
1898 __ nop();
1899 padding_size -= kInstructionSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001900 }
1901 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001902}
1903
1904#undef __
1905
1906} // namespace compiler
1907} // namespace internal
1908} // namespace v8