blob: 479af7a75c4471e6eda57b7a8b6aac5c526adb86 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/code-generator.h"
6
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00007#include "src/arm64/frames-arm64.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00008#include "src/arm64/macro-assembler-arm64.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00009#include "src/ast/scopes.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000010#include "src/compiler/code-generator-impl.h"
11#include "src/compiler/gap-resolver.h"
12#include "src/compiler/node-matchers.h"
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000013#include "src/compiler/osr.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000014
15namespace v8 {
16namespace internal {
17namespace compiler {
18
19#define __ masm()->
20
21
22// Adds Arm64-specific methods to convert InstructionOperands.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000023class Arm64OperandConverter final : public InstructionOperandConverter {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000024 public:
25 Arm64OperandConverter(CodeGenerator* gen, Instruction* instr)
26 : InstructionOperandConverter(gen, instr) {}
27
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000028 DoubleRegister InputFloat32Register(size_t index) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040029 return InputDoubleRegister(index).S();
30 }
31
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000032 DoubleRegister InputFloat64Register(size_t index) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -040033 return InputDoubleRegister(index);
34 }
35
Ben Murdochc5610432016-08-08 18:44:38 +010036 CPURegister InputFloat32OrZeroRegister(size_t index) {
37 if (instr_->InputAt(index)->IsImmediate()) {
38 DCHECK(bit_cast<int32_t>(InputFloat32(index)) == 0);
39 return wzr;
40 }
41 DCHECK(instr_->InputAt(index)->IsFPRegister());
42 return InputDoubleRegister(index).S();
43 }
44
45 CPURegister InputFloat64OrZeroRegister(size_t index) {
46 if (instr_->InputAt(index)->IsImmediate()) {
47 DCHECK(bit_cast<int64_t>(InputDouble(index)) == 0);
48 return xzr;
49 }
50 DCHECK(instr_->InputAt(index)->IsDoubleRegister());
51 return InputDoubleRegister(index);
52 }
53
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000054 size_t OutputCount() { return instr_->OutputCount(); }
55
Emily Bernierd0a1eb72015-03-24 16:35:39 -040056 DoubleRegister OutputFloat32Register() { return OutputDoubleRegister().S(); }
57
58 DoubleRegister OutputFloat64Register() { return OutputDoubleRegister(); }
59
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000060 Register InputRegister32(size_t index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000061 return ToRegister(instr_->InputAt(index)).W();
62 }
63
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000064 Register InputOrZeroRegister32(size_t index) {
65 DCHECK(instr_->InputAt(index)->IsRegister() ||
66 (instr_->InputAt(index)->IsImmediate() && (InputInt32(index) == 0)));
67 if (instr_->InputAt(index)->IsImmediate()) {
68 return wzr;
69 }
70 return InputRegister32(index);
71 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000072
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000073 Register InputRegister64(size_t index) { return InputRegister(index); }
74
75 Register InputOrZeroRegister64(size_t index) {
76 DCHECK(instr_->InputAt(index)->IsRegister() ||
77 (instr_->InputAt(index)->IsImmediate() && (InputInt64(index) == 0)));
78 if (instr_->InputAt(index)->IsImmediate()) {
79 return xzr;
80 }
81 return InputRegister64(index);
82 }
83
84 Operand InputImmediate(size_t index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000085 return ToImmediate(instr_->InputAt(index));
86 }
87
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000088 Operand InputOperand(size_t index) {
89 return ToOperand(instr_->InputAt(index));
90 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000091
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000092 Operand InputOperand64(size_t index) { return InputOperand(index); }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000093
Ben Murdoch4a90d5f2016-03-22 12:00:34 +000094 Operand InputOperand32(size_t index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000095 return ToOperand32(instr_->InputAt(index));
96 }
97
98 Register OutputRegister64() { return OutputRegister(); }
99
100 Register OutputRegister32() { return ToRegister(instr_->Output()).W(); }
101
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000102 Operand InputOperand2_32(size_t index) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400103 switch (AddressingModeField::decode(instr_->opcode())) {
104 case kMode_None:
105 return InputOperand32(index);
106 case kMode_Operand2_R_LSL_I:
107 return Operand(InputRegister32(index), LSL, InputInt5(index + 1));
108 case kMode_Operand2_R_LSR_I:
109 return Operand(InputRegister32(index), LSR, InputInt5(index + 1));
110 case kMode_Operand2_R_ASR_I:
111 return Operand(InputRegister32(index), ASR, InputInt5(index + 1));
112 case kMode_Operand2_R_ROR_I:
113 return Operand(InputRegister32(index), ROR, InputInt5(index + 1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000114 case kMode_Operand2_R_UXTB:
115 return Operand(InputRegister32(index), UXTB);
116 case kMode_Operand2_R_UXTH:
117 return Operand(InputRegister32(index), UXTH);
118 case kMode_Operand2_R_SXTB:
119 return Operand(InputRegister32(index), SXTB);
120 case kMode_Operand2_R_SXTH:
121 return Operand(InputRegister32(index), SXTH);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400122 case kMode_MRI:
123 case kMode_MRR:
124 break;
125 }
126 UNREACHABLE();
127 return Operand(-1);
128 }
129
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000130 Operand InputOperand2_64(size_t index) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400131 switch (AddressingModeField::decode(instr_->opcode())) {
132 case kMode_None:
133 return InputOperand64(index);
134 case kMode_Operand2_R_LSL_I:
135 return Operand(InputRegister64(index), LSL, InputInt6(index + 1));
136 case kMode_Operand2_R_LSR_I:
137 return Operand(InputRegister64(index), LSR, InputInt6(index + 1));
138 case kMode_Operand2_R_ASR_I:
139 return Operand(InputRegister64(index), ASR, InputInt6(index + 1));
140 case kMode_Operand2_R_ROR_I:
141 return Operand(InputRegister64(index), ROR, InputInt6(index + 1));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000142 case kMode_Operand2_R_UXTB:
143 return Operand(InputRegister64(index), UXTB);
144 case kMode_Operand2_R_UXTH:
145 return Operand(InputRegister64(index), UXTH);
146 case kMode_Operand2_R_SXTB:
147 return Operand(InputRegister64(index), SXTB);
148 case kMode_Operand2_R_SXTH:
149 return Operand(InputRegister64(index), SXTH);
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400150 case kMode_MRI:
151 case kMode_MRR:
152 break;
153 }
154 UNREACHABLE();
155 return Operand(-1);
156 }
157
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000158 MemOperand MemoryOperand(size_t* first_index) {
159 const size_t index = *first_index;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000160 switch (AddressingModeField::decode(instr_->opcode())) {
161 case kMode_None:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400162 case kMode_Operand2_R_LSR_I:
163 case kMode_Operand2_R_ASR_I:
164 case kMode_Operand2_R_ROR_I:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000165 case kMode_Operand2_R_UXTB:
166 case kMode_Operand2_R_UXTH:
167 case kMode_Operand2_R_SXTB:
168 case kMode_Operand2_R_SXTH:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000169 break;
Ben Murdochc5610432016-08-08 18:44:38 +0100170 case kMode_Operand2_R_LSL_I:
171 *first_index += 3;
172 return MemOperand(InputRegister(index + 0), InputRegister(index + 1),
173 LSL, InputInt32(index + 2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000174 case kMode_MRI:
175 *first_index += 2;
176 return MemOperand(InputRegister(index + 0), InputInt32(index + 1));
177 case kMode_MRR:
178 *first_index += 2;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400179 return MemOperand(InputRegister(index + 0), InputRegister(index + 1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000180 }
181 UNREACHABLE();
182 return MemOperand(no_reg);
183 }
184
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000185 MemOperand MemoryOperand(size_t first_index = 0) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400186 return MemoryOperand(&first_index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000187 }
188
189 Operand ToOperand(InstructionOperand* op) {
190 if (op->IsRegister()) {
191 return Operand(ToRegister(op));
192 }
193 return ToImmediate(op);
194 }
195
196 Operand ToOperand32(InstructionOperand* op) {
197 if (op->IsRegister()) {
198 return Operand(ToRegister(op).W());
199 }
200 return ToImmediate(op);
201 }
202
203 Operand ToImmediate(InstructionOperand* operand) {
204 Constant constant = ToConstant(operand);
205 switch (constant.type()) {
206 case Constant::kInt32:
Ben Murdochc5610432016-08-08 18:44:38 +0100207 if (constant.rmode() == RelocInfo::WASM_MEMORY_SIZE_REFERENCE) {
208 return Operand(constant.ToInt32(), constant.rmode());
209 } else {
210 return Operand(constant.ToInt32());
211 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000212 case Constant::kInt64:
Ben Murdoch61f157c2016-09-16 13:49:30 +0100213 if (constant.rmode() == RelocInfo::WASM_MEMORY_REFERENCE ||
214 constant.rmode() == RelocInfo::WASM_GLOBAL_REFERENCE) {
Ben Murdochc5610432016-08-08 18:44:38 +0100215 return Operand(constant.ToInt64(), constant.rmode());
216 } else {
217 DCHECK(constant.rmode() != RelocInfo::WASM_MEMORY_SIZE_REFERENCE);
218 return Operand(constant.ToInt64());
219 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400220 case Constant::kFloat32:
221 return Operand(
222 isolate()->factory()->NewNumber(constant.ToFloat32(), TENURED));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000223 case Constant::kFloat64:
224 return Operand(
225 isolate()->factory()->NewNumber(constant.ToFloat64(), TENURED));
226 case Constant::kExternalReference:
227 return Operand(constant.ToExternalReference());
228 case Constant::kHeapObject:
229 return Operand(constant.ToHeapObject());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400230 case Constant::kRpoNumber:
231 UNREACHABLE(); // TODO(dcarney): RPO immediates on arm64.
232 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000233 }
234 UNREACHABLE();
235 return Operand(-1);
236 }
237
238 MemOperand ToMemOperand(InstructionOperand* op, MacroAssembler* masm) const {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000239 DCHECK_NOT_NULL(op);
Ben Murdochc5610432016-08-08 18:44:38 +0100240 DCHECK(op->IsStackSlot() || op->IsFPStackSlot());
Ben Murdochda12d292016-06-02 14:46:10 +0100241 return SlotToMemOperand(AllocatedOperand::cast(op)->index(), masm);
242 }
243
244 MemOperand SlotToMemOperand(int slot, MacroAssembler* masm) const {
245 FrameOffset offset = frame_access_state()->GetFrameOffset(slot);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000246 if (offset.from_frame_pointer()) {
Ben Murdochda12d292016-06-02 14:46:10 +0100247 int from_sp = offset.offset() + frame_access_state()->GetSPToFPOffset();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000248 // Convert FP-offsets to SP-offsets if it results in better code.
249 if (Assembler::IsImmLSUnscaled(from_sp) ||
250 Assembler::IsImmLSScaled(from_sp, LSDoubleWord)) {
251 offset = FrameOffset::FromStackPointer(from_sp);
252 }
253 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000254 return MemOperand(offset.from_stack_pointer() ? masm->StackPointer() : fp,
255 offset.offset());
256 }
257};
258
259
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400260namespace {
261
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000262class OutOfLineLoadNaN32 final : public OutOfLineCode {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400263 public:
264 OutOfLineLoadNaN32(CodeGenerator* gen, DoubleRegister result)
265 : OutOfLineCode(gen), result_(result) {}
266
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000267 void Generate() final {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400268 __ Fmov(result_, std::numeric_limits<float>::quiet_NaN());
269 }
270
271 private:
272 DoubleRegister const result_;
273};
274
275
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000276class OutOfLineLoadNaN64 final : public OutOfLineCode {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400277 public:
278 OutOfLineLoadNaN64(CodeGenerator* gen, DoubleRegister result)
279 : OutOfLineCode(gen), result_(result) {}
280
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000281 void Generate() final {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400282 __ Fmov(result_, std::numeric_limits<double>::quiet_NaN());
283 }
284
285 private:
286 DoubleRegister const result_;
287};
288
289
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000290class OutOfLineLoadZero final : public OutOfLineCode {
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400291 public:
292 OutOfLineLoadZero(CodeGenerator* gen, Register result)
293 : OutOfLineCode(gen), result_(result) {}
294
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000295 void Generate() final { __ Mov(result_, 0); }
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400296
297 private:
298 Register const result_;
299};
300
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000301
302class OutOfLineRecordWrite final : public OutOfLineCode {
303 public:
Ben Murdoch097c5b22016-05-18 11:27:45 +0100304 OutOfLineRecordWrite(CodeGenerator* gen, Register object, Operand index,
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000305 Register value, Register scratch0, Register scratch1,
306 RecordWriteMode mode)
307 : OutOfLineCode(gen),
308 object_(object),
309 index_(index),
310 value_(value),
311 scratch0_(scratch0),
312 scratch1_(scratch1),
Ben Murdochda12d292016-06-02 14:46:10 +0100313 mode_(mode),
314 must_save_lr_(!gen->frame_access_state()->has_frame()) {}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000315
316 void Generate() final {
317 if (mode_ > RecordWriteMode::kValueIsPointer) {
318 __ JumpIfSmi(value_, exit());
319 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100320 __ CheckPageFlagClear(value_, scratch0_,
321 MemoryChunk::kPointersToHereAreInterestingMask,
322 exit());
323 RememberedSetAction const remembered_set_action =
324 mode_ > RecordWriteMode::kValueIsMap ? EMIT_REMEMBERED_SET
325 : OMIT_REMEMBERED_SET;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000326 SaveFPRegsMode const save_fp_mode =
327 frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs;
Ben Murdochda12d292016-06-02 14:46:10 +0100328 if (must_save_lr_) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100329 // We need to save and restore lr if the frame was elided.
330 __ Push(lr);
331 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000332 RecordWriteStub stub(isolate(), object_, scratch0_, scratch1_,
Ben Murdoch097c5b22016-05-18 11:27:45 +0100333 remembered_set_action, save_fp_mode);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000334 __ Add(scratch1_, object_, index_);
335 __ CallStub(&stub);
Ben Murdochda12d292016-06-02 14:46:10 +0100336 if (must_save_lr_) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100337 __ Pop(lr);
338 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000339 }
340
341 private:
342 Register const object_;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100343 Operand const index_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000344 Register const value_;
345 Register const scratch0_;
346 Register const scratch1_;
347 RecordWriteMode const mode_;
Ben Murdochda12d292016-06-02 14:46:10 +0100348 bool must_save_lr_;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000349};
350
351
352Condition FlagsConditionToCondition(FlagsCondition condition) {
353 switch (condition) {
354 case kEqual:
355 return eq;
356 case kNotEqual:
357 return ne;
358 case kSignedLessThan:
359 return lt;
360 case kSignedGreaterThanOrEqual:
361 return ge;
362 case kSignedLessThanOrEqual:
363 return le;
364 case kSignedGreaterThan:
365 return gt;
366 case kUnsignedLessThan:
367 return lo;
368 case kUnsignedGreaterThanOrEqual:
369 return hs;
370 case kUnsignedLessThanOrEqual:
371 return ls;
372 case kUnsignedGreaterThan:
373 return hi;
374 case kFloatLessThanOrUnordered:
375 return lt;
376 case kFloatGreaterThanOrEqual:
377 return ge;
378 case kFloatLessThanOrEqual:
379 return ls;
380 case kFloatGreaterThanOrUnordered:
381 return hi;
382 case kFloatLessThan:
383 return lo;
384 case kFloatGreaterThanOrEqualOrUnordered:
385 return hs;
386 case kFloatLessThanOrEqualOrUnordered:
387 return le;
388 case kFloatGreaterThan:
389 return gt;
390 case kOverflow:
391 return vs;
392 case kNotOverflow:
393 return vc;
394 case kUnorderedEqual:
395 case kUnorderedNotEqual:
396 break;
397 }
398 UNREACHABLE();
399 return nv;
400}
401
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400402} // namespace
403
Ben Murdoch61f157c2016-09-16 13:49:30 +0100404#define ASSEMBLE_BOUNDS_CHECK(offset, length, out_of_bounds) \
405 do { \
406 if (length.IsImmediate() && \
407 base::bits::IsPowerOfTwo64(length.ImmediateValue())) { \
408 __ Tst(offset, ~(length.ImmediateValue() - 1)); \
409 __ B(ne, out_of_bounds); \
410 } else { \
411 __ Cmp(offset, length); \
412 __ B(hs, out_of_bounds); \
413 } \
414 } while (0)
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400415
416#define ASSEMBLE_CHECKED_LOAD_FLOAT(width) \
417 do { \
418 auto result = i.OutputFloat##width##Register(); \
419 auto buffer = i.InputRegister(0); \
420 auto offset = i.InputRegister32(1); \
421 auto length = i.InputOperand32(2); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400422 auto ool = new (zone()) OutOfLineLoadNaN##width(this, result); \
Ben Murdoch61f157c2016-09-16 13:49:30 +0100423 ASSEMBLE_BOUNDS_CHECK(offset, length, ool->entry()); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400424 __ Ldr(result, MemOperand(buffer, offset, UXTW)); \
425 __ Bind(ool->exit()); \
426 } while (0)
427
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400428#define ASSEMBLE_CHECKED_LOAD_INTEGER(asm_instr) \
429 do { \
430 auto result = i.OutputRegister32(); \
431 auto buffer = i.InputRegister(0); \
432 auto offset = i.InputRegister32(1); \
433 auto length = i.InputOperand32(2); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400434 auto ool = new (zone()) OutOfLineLoadZero(this, result); \
Ben Murdoch61f157c2016-09-16 13:49:30 +0100435 ASSEMBLE_BOUNDS_CHECK(offset, length, ool->entry()); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400436 __ asm_instr(result, MemOperand(buffer, offset, UXTW)); \
437 __ Bind(ool->exit()); \
438 } while (0)
439
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000440#define ASSEMBLE_CHECKED_LOAD_INTEGER_64(asm_instr) \
441 do { \
442 auto result = i.OutputRegister(); \
443 auto buffer = i.InputRegister(0); \
444 auto offset = i.InputRegister32(1); \
445 auto length = i.InputOperand32(2); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000446 auto ool = new (zone()) OutOfLineLoadZero(this, result); \
Ben Murdoch61f157c2016-09-16 13:49:30 +0100447 ASSEMBLE_BOUNDS_CHECK(offset, length, ool->entry()); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000448 __ asm_instr(result, MemOperand(buffer, offset, UXTW)); \
449 __ Bind(ool->exit()); \
450 } while (0)
451
Ben Murdochc5610432016-08-08 18:44:38 +0100452#define ASSEMBLE_CHECKED_STORE_FLOAT(width) \
453 do { \
454 auto buffer = i.InputRegister(0); \
455 auto offset = i.InputRegister32(1); \
456 auto length = i.InputOperand32(2); \
457 auto value = i.InputFloat##width##OrZeroRegister(3); \
Ben Murdochc5610432016-08-08 18:44:38 +0100458 Label done; \
Ben Murdoch61f157c2016-09-16 13:49:30 +0100459 ASSEMBLE_BOUNDS_CHECK(offset, length, &done); \
Ben Murdochc5610432016-08-08 18:44:38 +0100460 __ Str(value, MemOperand(buffer, offset, UXTW)); \
461 __ Bind(&done); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400462 } while (0)
463
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400464#define ASSEMBLE_CHECKED_STORE_INTEGER(asm_instr) \
465 do { \
466 auto buffer = i.InputRegister(0); \
467 auto offset = i.InputRegister32(1); \
468 auto length = i.InputOperand32(2); \
Ben Murdochc5610432016-08-08 18:44:38 +0100469 auto value = i.InputOrZeroRegister32(3); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400470 Label done; \
Ben Murdoch61f157c2016-09-16 13:49:30 +0100471 ASSEMBLE_BOUNDS_CHECK(offset, length, &done); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400472 __ asm_instr(value, MemOperand(buffer, offset, UXTW)); \
473 __ Bind(&done); \
474 } while (0)
475
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000476#define ASSEMBLE_CHECKED_STORE_INTEGER_64(asm_instr) \
477 do { \
478 auto buffer = i.InputRegister(0); \
479 auto offset = i.InputRegister32(1); \
480 auto length = i.InputOperand32(2); \
Ben Murdochc5610432016-08-08 18:44:38 +0100481 auto value = i.InputOrZeroRegister64(3); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000482 Label done; \
Ben Murdoch61f157c2016-09-16 13:49:30 +0100483 ASSEMBLE_BOUNDS_CHECK(offset, length, &done); \
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000484 __ asm_instr(value, MemOperand(buffer, offset, UXTW)); \
485 __ Bind(&done); \
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400486 } while (0)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000487
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000488#define ASSEMBLE_SHIFT(asm_instr, width) \
489 do { \
490 if (instr->InputAt(1)->IsRegister()) { \
491 __ asm_instr(i.OutputRegister##width(), i.InputRegister##width(0), \
492 i.InputRegister##width(1)); \
493 } else { \
494 uint32_t imm = \
495 static_cast<uint32_t>(i.InputOperand##width(1).ImmediateValue()); \
496 __ asm_instr(i.OutputRegister##width(), i.InputRegister##width(0), \
497 imm % (width)); \
498 } \
499 } while (0)
500
Ben Murdochc5610432016-08-08 18:44:38 +0100501#define ASSEMBLE_ATOMIC_LOAD_INTEGER(asm_instr) \
502 do { \
503 __ asm_instr(i.OutputRegister(), \
504 MemOperand(i.InputRegister(0), i.InputRegister(1))); \
505 __ Dmb(InnerShareable, BarrierAll); \
506 } while (0)
507
508#define ASSEMBLE_ATOMIC_STORE_INTEGER(asm_instr) \
509 do { \
510 __ Dmb(InnerShareable, BarrierAll); \
511 __ asm_instr(i.InputRegister(2), \
512 MemOperand(i.InputRegister(0), i.InputRegister(1))); \
513 __ Dmb(InnerShareable, BarrierAll); \
514 } while (0)
515
Ben Murdoch61f157c2016-09-16 13:49:30 +0100516#define ASSEMBLE_IEEE754_BINOP(name) \
517 do { \
518 FrameScope scope(masm(), StackFrame::MANUAL); \
519 __ CallCFunction(ExternalReference::ieee754_##name##_function(isolate()), \
520 0, 2); \
521 } while (0)
522
523#define ASSEMBLE_IEEE754_UNOP(name) \
524 do { \
525 FrameScope scope(masm(), StackFrame::MANUAL); \
526 __ CallCFunction(ExternalReference::ieee754_##name##_function(isolate()), \
527 0, 1); \
528 } while (0)
529
Ben Murdochda12d292016-06-02 14:46:10 +0100530void CodeGenerator::AssembleDeconstructFrame() {
531 const CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
532 if (descriptor->IsCFunctionCall() || descriptor->UseNativeStack()) {
533 __ Mov(csp, fp);
534 } else {
535 __ Mov(jssp, fp);
536 }
537 __ Pop(fp, lr);
538}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000539
540void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) {
541 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
542 if (sp_slot_delta > 0) {
543 __ Drop(sp_slot_delta);
544 }
545 frame_access_state()->SetFrameAccessToDefault();
546}
547
548
549void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) {
550 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
551 if (sp_slot_delta < 0) {
552 __ Claim(-sp_slot_delta);
553 frame_access_state()->IncreaseSPDelta(-sp_slot_delta);
554 }
Ben Murdochda12d292016-06-02 14:46:10 +0100555 if (frame_access_state()->has_frame()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000556 __ Ldr(lr, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
557 __ Ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
558 }
559 frame_access_state()->SetFrameAccessToSP();
560}
561
Ben Murdochda12d292016-06-02 14:46:10 +0100562void CodeGenerator::AssemblePopArgumentsAdaptorFrame(Register args_reg,
563 Register scratch1,
564 Register scratch2,
565 Register scratch3) {
566 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
567 Label done;
568
569 // Check if current frame is an arguments adaptor frame.
570 __ Ldr(scratch1, MemOperand(fp, StandardFrameConstants::kContextOffset));
571 __ Cmp(scratch1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
572 __ B(ne, &done);
573
574 // Load arguments count from current arguments adaptor frame (note, it
575 // does not include receiver).
576 Register caller_args_count_reg = scratch1;
577 __ Ldr(caller_args_count_reg,
578 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
579 __ SmiUntag(caller_args_count_reg);
580
581 ParameterCount callee_args_count(args_reg);
582 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
583 scratch3);
584 __ bind(&done);
585}
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000586
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000587// Assembles an instruction after register allocation, producing machine code.
Ben Murdochc5610432016-08-08 18:44:38 +0100588CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
589 Instruction* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000590 Arm64OperandConverter i(this, instr);
591 InstructionCode opcode = instr->opcode();
Ben Murdoch097c5b22016-05-18 11:27:45 +0100592 ArchOpcode arch_opcode = ArchOpcodeField::decode(opcode);
593 switch (arch_opcode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000594 case kArchCallCodeObject: {
595 EnsureSpaceForLazyDeopt();
596 if (instr->InputAt(0)->IsImmediate()) {
597 __ Call(Handle<Code>::cast(i.InputHeapObject(0)),
598 RelocInfo::CODE_TARGET);
599 } else {
600 Register target = i.InputRegister(0);
601 __ Add(target, target, Code::kHeaderSize - kHeapObjectTag);
602 __ Call(target);
603 }
Ben Murdochda12d292016-06-02 14:46:10 +0100604 RecordCallPosition(instr);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100605 // TODO(titzer): this is ugly. JSSP should be a caller-save register
606 // in this case, but it is not possible to express in the register
607 // allocator.
Ben Murdochda12d292016-06-02 14:46:10 +0100608 CallDescriptor::Flags flags(MiscField::decode(opcode));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100609 if (flags & CallDescriptor::kRestoreJSSP) {
Ben Murdochda12d292016-06-02 14:46:10 +0100610 __ Ldr(jssp, MemOperand(csp));
611 __ Mov(csp, jssp);
612 }
613 if (flags & CallDescriptor::kRestoreCSP) {
614 __ Mov(csp, jssp);
615 __ AssertCspAligned();
Ben Murdoch097c5b22016-05-18 11:27:45 +0100616 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000617 frame_access_state()->ClearSPDelta();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000618 break;
619 }
Ben Murdochda12d292016-06-02 14:46:10 +0100620 case kArchTailCallCodeObjectFromJSFunction:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000621 case kArchTailCallCodeObject: {
622 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
623 AssembleDeconstructActivationRecord(stack_param_delta);
Ben Murdochda12d292016-06-02 14:46:10 +0100624 if (arch_opcode == kArchTailCallCodeObjectFromJSFunction) {
625 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
626 i.TempRegister(0), i.TempRegister(1),
627 i.TempRegister(2));
628 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000629 if (instr->InputAt(0)->IsImmediate()) {
630 __ Jump(Handle<Code>::cast(i.InputHeapObject(0)),
631 RelocInfo::CODE_TARGET);
632 } else {
633 Register target = i.InputRegister(0);
634 __ Add(target, target, Code::kHeaderSize - kHeapObjectTag);
635 __ Jump(target);
636 }
637 frame_access_state()->ClearSPDelta();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000638 break;
639 }
Ben Murdochc5610432016-08-08 18:44:38 +0100640 case kArchTailCallAddress: {
641 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
642 AssembleDeconstructActivationRecord(stack_param_delta);
643 CHECK(!instr->InputAt(0)->IsImmediate());
644 __ Jump(i.InputRegister(0));
645 frame_access_state()->ClearSPDelta();
646 break;
647 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000648 case kArchCallJSFunction: {
649 EnsureSpaceForLazyDeopt();
650 Register func = i.InputRegister(0);
651 if (FLAG_debug_code) {
652 // Check the function's context matches the context argument.
653 UseScratchRegisterScope scope(masm());
654 Register temp = scope.AcquireX();
655 __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset));
656 __ cmp(cp, temp);
657 __ Assert(eq, kWrongFunctionContext);
658 }
659 __ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
660 __ Call(x10);
Ben Murdochda12d292016-06-02 14:46:10 +0100661 RecordCallPosition(instr);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100662 // TODO(titzer): this is ugly. JSSP should be a caller-save register
663 // in this case, but it is not possible to express in the register
664 // allocator.
Ben Murdochda12d292016-06-02 14:46:10 +0100665 CallDescriptor::Flags flags(MiscField::decode(opcode));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100666 if (flags & CallDescriptor::kRestoreJSSP) {
Ben Murdochda12d292016-06-02 14:46:10 +0100667 __ Ldr(jssp, MemOperand(csp));
668 __ Mov(csp, jssp);
669 }
670 if (flags & CallDescriptor::kRestoreCSP) {
671 __ Mov(csp, jssp);
672 __ AssertCspAligned();
Ben Murdoch097c5b22016-05-18 11:27:45 +0100673 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000674 frame_access_state()->ClearSPDelta();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000675 break;
676 }
Ben Murdochda12d292016-06-02 14:46:10 +0100677 case kArchTailCallJSFunctionFromJSFunction:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000678 case kArchTailCallJSFunction: {
679 Register func = i.InputRegister(0);
680 if (FLAG_debug_code) {
681 // Check the function's context matches the context argument.
682 UseScratchRegisterScope scope(masm());
683 Register temp = scope.AcquireX();
684 __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset));
685 __ cmp(cp, temp);
686 __ Assert(eq, kWrongFunctionContext);
687 }
688 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
689 AssembleDeconstructActivationRecord(stack_param_delta);
Ben Murdochda12d292016-06-02 14:46:10 +0100690 if (arch_opcode == kArchTailCallJSFunctionFromJSFunction) {
691 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
692 i.TempRegister(0), i.TempRegister(1),
693 i.TempRegister(2));
694 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000695 __ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
696 __ Jump(x10);
697 frame_access_state()->ClearSPDelta();
698 break;
699 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000700 case kArchPrepareCallCFunction:
701 // We don't need kArchPrepareCallCFunction on arm64 as the instruction
702 // selector already perform a Claim to reserve space on the stack and
703 // guarantee correct alignment of stack pointer.
704 UNREACHABLE();
705 break;
706 case kArchPrepareTailCall:
707 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1));
708 break;
709 case kArchCallCFunction: {
710 int const num_parameters = MiscField::decode(instr->opcode());
711 if (instr->InputAt(0)->IsImmediate()) {
712 ExternalReference ref = i.InputExternalReference(0);
713 __ CallCFunction(ref, num_parameters, 0);
714 } else {
715 Register func = i.InputRegister(0);
716 __ CallCFunction(func, num_parameters, 0);
717 }
718 // CallCFunction only supports register arguments so we never need to call
719 // frame()->ClearOutgoingParameterSlots() here.
720 DCHECK(frame_access_state()->sp_delta() == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000721 break;
722 }
723 case kArchJmp:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400724 AssembleArchJump(i.InputRpo(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000725 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000726 case kArchTableSwitch:
727 AssembleArchTableSwitch(instr);
728 break;
729 case kArchLookupSwitch:
730 AssembleArchLookupSwitch(instr);
731 break;
Ben Murdoch61f157c2016-09-16 13:49:30 +0100732 case kArchDebugBreak:
733 __ Debug("kArchDebugBreak", 0, BREAK);
734 break;
735 case kArchComment: {
736 Address comment_string = i.InputExternalReference(0).address();
737 __ RecordComment(reinterpret_cast<const char*>(comment_string));
738 break;
739 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000740 case kArchNop:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000741 case kArchThrowTerminator:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000742 // don't emit code for nops.
743 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000744 case kArchDeoptimize: {
745 int deopt_state_id =
746 BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
747 Deoptimizer::BailoutType bailout_type =
748 Deoptimizer::BailoutType(MiscField::decode(instr->opcode()));
Ben Murdochc5610432016-08-08 18:44:38 +0100749 CodeGenResult result =
750 AssembleDeoptimizerCall(deopt_state_id, bailout_type);
751 if (result != kSuccess) return result;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000752 break;
753 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000754 case kArchRet:
755 AssembleReturn();
756 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400757 case kArchStackPointer:
758 __ mov(i.OutputRegister(), masm()->StackPointer());
759 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000760 case kArchFramePointer:
761 __ mov(i.OutputRegister(), fp);
762 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +0100763 case kArchParentFramePointer:
Ben Murdochda12d292016-06-02 14:46:10 +0100764 if (frame_access_state()->has_frame()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +0100765 __ ldr(i.OutputRegister(), MemOperand(fp, 0));
766 } else {
767 __ mov(i.OutputRegister(), fp);
768 }
769 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000770 case kArchTruncateDoubleToI:
771 __ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0));
772 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000773 case kArchStoreWithWriteBarrier: {
774 RecordWriteMode mode =
775 static_cast<RecordWriteMode>(MiscField::decode(instr->opcode()));
Ben Murdoch097c5b22016-05-18 11:27:45 +0100776 AddressingMode addressing_mode =
777 AddressingModeField::decode(instr->opcode());
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000778 Register object = i.InputRegister(0);
Ben Murdoch097c5b22016-05-18 11:27:45 +0100779 Operand index(0);
780 if (addressing_mode == kMode_MRI) {
781 index = Operand(i.InputInt64(1));
782 } else {
783 DCHECK_EQ(addressing_mode, kMode_MRR);
784 index = Operand(i.InputRegister(1));
785 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000786 Register value = i.InputRegister(2);
787 Register scratch0 = i.TempRegister(0);
788 Register scratch1 = i.TempRegister(1);
789 auto ool = new (zone()) OutOfLineRecordWrite(this, object, index, value,
790 scratch0, scratch1, mode);
791 __ Str(value, MemOperand(object, index));
792 __ CheckPageFlagSet(object, scratch0,
793 MemoryChunk::kPointersFromHereAreInterestingMask,
794 ool->entry());
795 __ Bind(ool->exit());
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400796 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000797 }
Ben Murdoch097c5b22016-05-18 11:27:45 +0100798 case kArchStackSlot: {
799 FrameOffset offset =
800 frame_access_state()->GetFrameOffset(i.InputInt32(0));
801 Register base;
802 if (offset.from_stack_pointer()) {
803 base = __ StackPointer();
804 } else {
805 base = fp;
806 }
807 __ Add(i.OutputRegister(0), base, Operand(offset.offset()));
808 break;
809 }
Ben Murdoch61f157c2016-09-16 13:49:30 +0100810 case kIeee754Float64Atan:
811 ASSEMBLE_IEEE754_UNOP(atan);
812 break;
813 case kIeee754Float64Atan2:
814 ASSEMBLE_IEEE754_BINOP(atan2);
815 break;
816 case kIeee754Float64Cos:
817 ASSEMBLE_IEEE754_UNOP(cos);
818 break;
819 case kIeee754Float64Cbrt:
820 ASSEMBLE_IEEE754_UNOP(cbrt);
821 break;
822 case kIeee754Float64Exp:
823 ASSEMBLE_IEEE754_UNOP(exp);
824 break;
825 case kIeee754Float64Expm1:
826 ASSEMBLE_IEEE754_UNOP(expm1);
827 break;
828 case kIeee754Float64Atanh:
829 ASSEMBLE_IEEE754_UNOP(atanh);
830 break;
831 case kIeee754Float64Log:
832 ASSEMBLE_IEEE754_UNOP(log);
833 break;
834 case kIeee754Float64Log1p:
835 ASSEMBLE_IEEE754_UNOP(log1p);
836 break;
837 case kIeee754Float64Log2:
838 ASSEMBLE_IEEE754_UNOP(log2);
839 break;
840 case kIeee754Float64Log10:
841 ASSEMBLE_IEEE754_UNOP(log10);
842 break;
843 case kIeee754Float64Sin:
844 ASSEMBLE_IEEE754_UNOP(sin);
845 break;
846 case kIeee754Float64Tan:
847 ASSEMBLE_IEEE754_UNOP(tan);
848 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000849 case kArm64Float32RoundDown:
850 __ Frintm(i.OutputFloat32Register(), i.InputFloat32Register(0));
851 break;
852 case kArm64Float64RoundDown:
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400853 __ Frintm(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
854 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000855 case kArm64Float32RoundUp:
856 __ Frintp(i.OutputFloat32Register(), i.InputFloat32Register(0));
857 break;
858 case kArm64Float64RoundUp:
859 __ Frintp(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400860 break;
861 case kArm64Float64RoundTiesAway:
862 __ Frinta(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
863 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000864 case kArm64Float32RoundTruncate:
865 __ Frintz(i.OutputFloat32Register(), i.InputFloat32Register(0));
866 break;
867 case kArm64Float64RoundTruncate:
868 __ Frintz(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
869 break;
870 case kArm64Float32RoundTiesEven:
871 __ Frintn(i.OutputFloat32Register(), i.InputFloat32Register(0));
872 break;
873 case kArm64Float64RoundTiesEven:
874 __ Frintn(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
875 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000876 case kArm64Add:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000877 if (FlagsModeField::decode(opcode) != kFlags_none) {
878 __ Adds(i.OutputRegister(), i.InputOrZeroRegister64(0),
879 i.InputOperand2_64(1));
880 } else {
881 __ Add(i.OutputRegister(), i.InputOrZeroRegister64(0),
882 i.InputOperand2_64(1));
883 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000884 break;
885 case kArm64Add32:
886 if (FlagsModeField::decode(opcode) != kFlags_none) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000887 __ Adds(i.OutputRegister32(), i.InputOrZeroRegister32(0),
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400888 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000889 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000890 __ Add(i.OutputRegister32(), i.InputOrZeroRegister32(0),
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400891 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000892 }
893 break;
894 case kArm64And:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000895 __ And(i.OutputRegister(), i.InputOrZeroRegister64(0),
896 i.InputOperand2_64(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000897 break;
898 case kArm64And32:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000899 __ And(i.OutputRegister32(), i.InputOrZeroRegister32(0),
900 i.InputOperand2_32(1));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400901 break;
902 case kArm64Bic:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000903 __ Bic(i.OutputRegister(), i.InputOrZeroRegister64(0),
904 i.InputOperand2_64(1));
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400905 break;
906 case kArm64Bic32:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000907 __ Bic(i.OutputRegister32(), i.InputOrZeroRegister32(0),
908 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000909 break;
910 case kArm64Mul:
911 __ Mul(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
912 break;
913 case kArm64Mul32:
914 __ Mul(i.OutputRegister32(), i.InputRegister32(0), i.InputRegister32(1));
915 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -0400916 case kArm64Smull:
917 __ Smull(i.OutputRegister(), i.InputRegister32(0), i.InputRegister32(1));
918 break;
919 case kArm64Umull:
920 __ Umull(i.OutputRegister(), i.InputRegister32(0), i.InputRegister32(1));
921 break;
922 case kArm64Madd:
923 __ Madd(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
924 i.InputRegister(2));
925 break;
926 case kArm64Madd32:
927 __ Madd(i.OutputRegister32(), i.InputRegister32(0), i.InputRegister32(1),
928 i.InputRegister32(2));
929 break;
930 case kArm64Msub:
931 __ Msub(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
932 i.InputRegister(2));
933 break;
934 case kArm64Msub32:
935 __ Msub(i.OutputRegister32(), i.InputRegister32(0), i.InputRegister32(1),
936 i.InputRegister32(2));
937 break;
938 case kArm64Mneg:
939 __ Mneg(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
940 break;
941 case kArm64Mneg32:
942 __ Mneg(i.OutputRegister32(), i.InputRegister32(0), i.InputRegister32(1));
943 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000944 case kArm64Idiv:
945 __ Sdiv(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
946 break;
947 case kArm64Idiv32:
948 __ Sdiv(i.OutputRegister32(), i.InputRegister32(0), i.InputRegister32(1));
949 break;
950 case kArm64Udiv:
951 __ Udiv(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
952 break;
953 case kArm64Udiv32:
954 __ Udiv(i.OutputRegister32(), i.InputRegister32(0), i.InputRegister32(1));
955 break;
956 case kArm64Imod: {
957 UseScratchRegisterScope scope(masm());
958 Register temp = scope.AcquireX();
959 __ Sdiv(temp, i.InputRegister(0), i.InputRegister(1));
960 __ Msub(i.OutputRegister(), temp, i.InputRegister(1), i.InputRegister(0));
961 break;
962 }
963 case kArm64Imod32: {
964 UseScratchRegisterScope scope(masm());
965 Register temp = scope.AcquireW();
966 __ Sdiv(temp, i.InputRegister32(0), i.InputRegister32(1));
967 __ Msub(i.OutputRegister32(), temp, i.InputRegister32(1),
968 i.InputRegister32(0));
969 break;
970 }
971 case kArm64Umod: {
972 UseScratchRegisterScope scope(masm());
973 Register temp = scope.AcquireX();
974 __ Udiv(temp, i.InputRegister(0), i.InputRegister(1));
975 __ Msub(i.OutputRegister(), temp, i.InputRegister(1), i.InputRegister(0));
976 break;
977 }
978 case kArm64Umod32: {
979 UseScratchRegisterScope scope(masm());
980 Register temp = scope.AcquireW();
981 __ Udiv(temp, i.InputRegister32(0), i.InputRegister32(1));
982 __ Msub(i.OutputRegister32(), temp, i.InputRegister32(1),
983 i.InputRegister32(0));
984 break;
985 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000986 case kArm64Not:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000987 __ Mvn(i.OutputRegister(), i.InputOperand(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000988 break;
989 case kArm64Not32:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000990 __ Mvn(i.OutputRegister32(), i.InputOperand32(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000991 break;
992 case kArm64Or:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000993 __ Orr(i.OutputRegister(), i.InputOrZeroRegister64(0),
994 i.InputOperand2_64(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000995 break;
996 case kArm64Or32:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +0000997 __ Orr(i.OutputRegister32(), i.InputOrZeroRegister32(0),
998 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000999 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001000 case kArm64Orn:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001001 __ Orn(i.OutputRegister(), i.InputOrZeroRegister64(0),
1002 i.InputOperand2_64(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001003 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001004 case kArm64Orn32:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001005 __ Orn(i.OutputRegister32(), i.InputOrZeroRegister32(0),
1006 i.InputOperand2_32(1));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001007 break;
1008 case kArm64Eor:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001009 __ Eor(i.OutputRegister(), i.InputOrZeroRegister64(0),
1010 i.InputOperand2_64(1));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001011 break;
1012 case kArm64Eor32:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001013 __ Eor(i.OutputRegister32(), i.InputOrZeroRegister32(0),
1014 i.InputOperand2_32(1));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001015 break;
1016 case kArm64Eon:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001017 __ Eon(i.OutputRegister(), i.InputOrZeroRegister64(0),
1018 i.InputOperand2_64(1));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001019 break;
1020 case kArm64Eon32:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001021 __ Eon(i.OutputRegister32(), i.InputOrZeroRegister32(0),
1022 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001023 break;
1024 case kArm64Sub:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001025 if (FlagsModeField::decode(opcode) != kFlags_none) {
1026 __ Subs(i.OutputRegister(), i.InputOrZeroRegister64(0),
1027 i.InputOperand2_64(1));
1028 } else {
1029 __ Sub(i.OutputRegister(), i.InputOrZeroRegister64(0),
1030 i.InputOperand2_64(1));
1031 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001032 break;
1033 case kArm64Sub32:
1034 if (FlagsModeField::decode(opcode) != kFlags_none) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001035 __ Subs(i.OutputRegister32(), i.InputOrZeroRegister32(0),
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001036 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001037 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001038 __ Sub(i.OutputRegister32(), i.InputOrZeroRegister32(0),
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001039 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001040 }
1041 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001042 case kArm64Lsl:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001043 ASSEMBLE_SHIFT(Lsl, 64);
1044 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001045 case kArm64Lsl32:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001046 ASSEMBLE_SHIFT(Lsl, 32);
1047 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001048 case kArm64Lsr:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001049 ASSEMBLE_SHIFT(Lsr, 64);
1050 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001051 case kArm64Lsr32:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001052 ASSEMBLE_SHIFT(Lsr, 32);
1053 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001054 case kArm64Asr:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001055 ASSEMBLE_SHIFT(Asr, 64);
1056 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001057 case kArm64Asr32:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001058 ASSEMBLE_SHIFT(Asr, 32);
1059 break;
1060 case kArm64Ror:
1061 ASSEMBLE_SHIFT(Ror, 64);
1062 break;
1063 case kArm64Ror32:
1064 ASSEMBLE_SHIFT(Ror, 32);
1065 break;
1066 case kArm64Mov32:
1067 __ Mov(i.OutputRegister32(), i.InputRegister32(0));
1068 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001069 case kArm64Sxtb32:
1070 __ Sxtb(i.OutputRegister32(), i.InputRegister32(0));
1071 break;
1072 case kArm64Sxth32:
1073 __ Sxth(i.OutputRegister32(), i.InputRegister32(0));
1074 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001075 case kArm64Sxtw:
1076 __ Sxtw(i.OutputRegister(), i.InputRegister32(0));
1077 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001078 case kArm64Sbfx32:
1079 __ Sbfx(i.OutputRegister32(), i.InputRegister32(0), i.InputInt5(1),
1080 i.InputInt5(2));
1081 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001082 case kArm64Ubfx:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001083 __ Ubfx(i.OutputRegister(), i.InputRegister(0), i.InputInt6(1),
1084 i.InputInt6(2));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001085 break;
1086 case kArm64Ubfx32:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001087 __ Ubfx(i.OutputRegister32(), i.InputRegister32(0), i.InputInt5(1),
1088 i.InputInt5(2));
1089 break;
1090 case kArm64Ubfiz32:
1091 __ Ubfiz(i.OutputRegister32(), i.InputRegister32(0), i.InputInt5(1),
1092 i.InputInt5(2));
1093 break;
1094 case kArm64Bfi:
1095 __ Bfi(i.OutputRegister(), i.InputRegister(1), i.InputInt6(2),
1096 i.InputInt6(3));
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001097 break;
1098 case kArm64TestAndBranch32:
1099 case kArm64TestAndBranch:
1100 // Pseudo instructions turned into tbz/tbnz in AssembleArchBranch.
1101 break;
1102 case kArm64CompareAndBranch32:
Ben Murdoch61f157c2016-09-16 13:49:30 +01001103 case kArm64CompareAndBranch:
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001104 // Pseudo instruction turned into cbz/cbnz in AssembleArchBranch.
1105 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001106 case kArm64ClaimCSP: {
Ben Murdochda12d292016-06-02 14:46:10 +01001107 int count = RoundUp(i.InputInt32(0), 2);
Ben Murdoch097c5b22016-05-18 11:27:45 +01001108 Register prev = __ StackPointer();
Ben Murdochda12d292016-06-02 14:46:10 +01001109 if (prev.Is(jssp)) {
1110 // TODO(titzer): make this a macro-assembler method.
1111 // Align the CSP and store the previous JSSP on the stack.
1112 UseScratchRegisterScope scope(masm());
1113 Register tmp = scope.AcquireX();
1114
1115 int sp_alignment = __ ActivationFrameAlignment();
1116 __ Sub(tmp, jssp, kPointerSize);
1117 __ And(tmp, tmp, Operand(~static_cast<uint64_t>(sp_alignment - 1)));
1118 __ Mov(csp, tmp);
1119 __ Str(jssp, MemOperand(csp));
1120 if (count > 0) {
1121 __ SetStackPointer(csp);
1122 __ Claim(count);
1123 __ SetStackPointer(prev);
1124 }
1125 } else {
1126 __ AssertCspAligned();
1127 if (count > 0) {
1128 __ Claim(count);
1129 frame_access_state()->IncreaseSPDelta(count);
1130 }
1131 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001132 break;
1133 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001134 case kArm64ClaimJSSP: {
1135 int count = i.InputInt32(0);
1136 if (csp.Is(__ StackPointer())) {
Ben Murdochda12d292016-06-02 14:46:10 +01001137 // No JSSP is set up. Compute it from the CSP.
1138 __ AssertCspAligned();
1139 if (count > 0) {
1140 int even = RoundUp(count, 2);
1141 __ Sub(jssp, csp, count * kPointerSize);
1142 __ Sub(csp, csp, even * kPointerSize); // Must always be aligned.
1143 frame_access_state()->IncreaseSPDelta(even);
1144 } else {
1145 __ Mov(jssp, csp);
1146 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001147 } else {
1148 // JSSP is the current stack pointer, just use regular Claim().
1149 __ Claim(count);
1150 frame_access_state()->IncreaseSPDelta(count);
1151 }
1152 break;
1153 }
1154 case kArm64PokeCSP: // fall through
1155 case kArm64PokeJSSP: {
1156 Register prev = __ StackPointer();
1157 __ SetStackPointer(arch_opcode == kArm64PokeCSP ? csp : jssp);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001158 Operand operand(i.InputInt32(1) * kPointerSize);
Ben Murdochc5610432016-08-08 18:44:38 +01001159 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001160 __ Poke(i.InputFloat64Register(0), operand);
1161 } else {
1162 __ Poke(i.InputRegister(0), operand);
1163 }
Ben Murdoch097c5b22016-05-18 11:27:45 +01001164 __ SetStackPointer(prev);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001165 break;
1166 }
1167 case kArm64PokePair: {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001168 int slot = i.InputInt32(2) - 1;
Ben Murdochc5610432016-08-08 18:44:38 +01001169 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001170 __ PokePair(i.InputFloat64Register(1), i.InputFloat64Register(0),
1171 slot * kPointerSize);
1172 } else {
1173 __ PokePair(i.InputRegister(1), i.InputRegister(0),
1174 slot * kPointerSize);
1175 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001176 break;
1177 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001178 case kArm64Clz:
1179 __ Clz(i.OutputRegister64(), i.InputRegister64(0));
1180 break;
1181 case kArm64Clz32:
1182 __ Clz(i.OutputRegister32(), i.InputRegister32(0));
1183 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001184 case kArm64Rbit:
1185 __ Rbit(i.OutputRegister64(), i.InputRegister64(0));
1186 break;
1187 case kArm64Rbit32:
1188 __ Rbit(i.OutputRegister32(), i.InputRegister32(0));
1189 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001190 case kArm64Cmp:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001191 __ Cmp(i.InputOrZeroRegister64(0), i.InputOperand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001192 break;
1193 case kArm64Cmp32:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001194 __ Cmp(i.InputOrZeroRegister32(0), i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001195 break;
1196 case kArm64Cmn:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001197 __ Cmn(i.InputOrZeroRegister64(0), i.InputOperand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001198 break;
1199 case kArm64Cmn32:
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001200 __ Cmn(i.InputOrZeroRegister32(0), i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001201 break;
1202 case kArm64Tst:
1203 __ Tst(i.InputRegister(0), i.InputOperand(1));
1204 break;
1205 case kArm64Tst32:
1206 __ Tst(i.InputRegister32(0), i.InputOperand32(1));
1207 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001208 case kArm64Float32Cmp:
Ben Murdochc5610432016-08-08 18:44:38 +01001209 if (instr->InputAt(1)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001210 __ Fcmp(i.InputFloat32Register(0), i.InputFloat32Register(1));
1211 } else {
1212 DCHECK(instr->InputAt(1)->IsImmediate());
1213 // 0.0 is the only immediate supported by fcmp instructions.
1214 DCHECK(i.InputFloat32(1) == 0.0f);
1215 __ Fcmp(i.InputFloat32Register(0), i.InputFloat32(1));
1216 }
1217 break;
1218 case kArm64Float32Add:
1219 __ Fadd(i.OutputFloat32Register(), i.InputFloat32Register(0),
1220 i.InputFloat32Register(1));
1221 break;
1222 case kArm64Float32Sub:
1223 __ Fsub(i.OutputFloat32Register(), i.InputFloat32Register(0),
1224 i.InputFloat32Register(1));
1225 break;
1226 case kArm64Float32Mul:
1227 __ Fmul(i.OutputFloat32Register(), i.InputFloat32Register(0),
1228 i.InputFloat32Register(1));
1229 break;
1230 case kArm64Float32Div:
1231 __ Fdiv(i.OutputFloat32Register(), i.InputFloat32Register(0),
1232 i.InputFloat32Register(1));
1233 break;
1234 case kArm64Float32Max:
1235 // (b < a) ? a : b
1236 __ Fcmp(i.InputFloat32Register(1), i.InputFloat32Register(0));
1237 __ Fcsel(i.OutputFloat32Register(), i.InputFloat32Register(0),
1238 i.InputFloat32Register(1), lo);
1239 break;
1240 case kArm64Float32Min:
1241 // (a < b) ? a : b
1242 __ Fcmp(i.InputFloat32Register(0), i.InputFloat32Register(1));
1243 __ Fcsel(i.OutputFloat32Register(), i.InputFloat32Register(0),
1244 i.InputFloat32Register(1), lo);
1245 break;
1246 case kArm64Float32Abs:
1247 __ Fabs(i.OutputFloat32Register(), i.InputFloat32Register(0));
1248 break;
Ben Murdoch61f157c2016-09-16 13:49:30 +01001249 case kArm64Float32Neg:
1250 __ Fneg(i.OutputFloat32Register(), i.InputFloat32Register(0));
1251 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001252 case kArm64Float32Sqrt:
1253 __ Fsqrt(i.OutputFloat32Register(), i.InputFloat32Register(0));
1254 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001255 case kArm64Float64Cmp:
Ben Murdochc5610432016-08-08 18:44:38 +01001256 if (instr->InputAt(1)->IsFPRegister()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001257 __ Fcmp(i.InputDoubleRegister(0), i.InputDoubleRegister(1));
1258 } else {
1259 DCHECK(instr->InputAt(1)->IsImmediate());
1260 // 0.0 is the only immediate supported by fcmp instructions.
1261 DCHECK(i.InputDouble(1) == 0.0);
1262 __ Fcmp(i.InputDoubleRegister(0), i.InputDouble(1));
1263 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001264 break;
1265 case kArm64Float64Add:
1266 __ Fadd(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1267 i.InputDoubleRegister(1));
1268 break;
1269 case kArm64Float64Sub:
1270 __ Fsub(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1271 i.InputDoubleRegister(1));
1272 break;
1273 case kArm64Float64Mul:
1274 __ Fmul(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1275 i.InputDoubleRegister(1));
1276 break;
1277 case kArm64Float64Div:
1278 __ Fdiv(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1279 i.InputDoubleRegister(1));
1280 break;
1281 case kArm64Float64Mod: {
1282 // TODO(dcarney): implement directly. See note in lithium-codegen-arm64.cc
1283 FrameScope scope(masm(), StackFrame::MANUAL);
1284 DCHECK(d0.is(i.InputDoubleRegister(0)));
1285 DCHECK(d1.is(i.InputDoubleRegister(1)));
1286 DCHECK(d0.is(i.OutputDoubleRegister()));
1287 // TODO(dcarney): make sure this saves all relevant registers.
1288 __ CallCFunction(ExternalReference::mod_two_doubles_operation(isolate()),
1289 0, 2);
1290 break;
1291 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001292 case kArm64Float64Max:
1293 // (b < a) ? a : b
1294 __ Fcmp(i.InputDoubleRegister(1), i.InputDoubleRegister(0));
1295 __ Fcsel(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1296 i.InputDoubleRegister(1), lo);
1297 break;
1298 case kArm64Float64Min:
1299 // (a < b) ? a : b
1300 __ Fcmp(i.InputDoubleRegister(0), i.InputDoubleRegister(1));
1301 __ Fcsel(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1302 i.InputDoubleRegister(1), lo);
1303 break;
1304 case kArm64Float64Abs:
1305 __ Fabs(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1306 break;
1307 case kArm64Float64Neg:
1308 __ Fneg(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1309 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001310 case kArm64Float64Sqrt:
1311 __ Fsqrt(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1312 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001313 case kArm64Float32ToFloat64:
1314 __ Fcvt(i.OutputDoubleRegister(), i.InputDoubleRegister(0).S());
1315 break;
1316 case kArm64Float64ToFloat32:
1317 __ Fcvt(i.OutputDoubleRegister().S(), i.InputDoubleRegister(0));
1318 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001319 case kArm64Float32ToInt32:
1320 __ Fcvtzs(i.OutputRegister32(), i.InputFloat32Register(0));
1321 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001322 case kArm64Float64ToInt32:
1323 __ Fcvtzs(i.OutputRegister32(), i.InputDoubleRegister(0));
1324 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001325 case kArm64Float32ToUint32:
1326 __ Fcvtzu(i.OutputRegister32(), i.InputFloat32Register(0));
1327 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001328 case kArm64Float64ToUint32:
1329 __ Fcvtzu(i.OutputRegister32(), i.InputDoubleRegister(0));
1330 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001331 case kArm64Float32ToInt64:
1332 __ Fcvtzs(i.OutputRegister64(), i.InputFloat32Register(0));
1333 if (i.OutputCount() > 1) {
1334 __ Mov(i.OutputRegister(1), 1);
1335 Label done;
1336 __ Cmp(i.OutputRegister(0), 1);
1337 __ Ccmp(i.OutputRegister(0), -1, VFlag, vc);
1338 __ Fccmp(i.InputFloat32Register(0), i.InputFloat32Register(0), VFlag,
1339 vc);
1340 __ B(vc, &done);
1341 __ Fcmp(i.InputFloat32Register(0), static_cast<float>(INT64_MIN));
1342 __ Cset(i.OutputRegister(1), eq);
1343 __ Bind(&done);
1344 }
1345 break;
1346 case kArm64Float64ToInt64:
1347 __ Fcvtzs(i.OutputRegister(0), i.InputDoubleRegister(0));
1348 if (i.OutputCount() > 1) {
1349 __ Mov(i.OutputRegister(1), 1);
1350 Label done;
1351 __ Cmp(i.OutputRegister(0), 1);
1352 __ Ccmp(i.OutputRegister(0), -1, VFlag, vc);
1353 __ Fccmp(i.InputDoubleRegister(0), i.InputDoubleRegister(0), VFlag, vc);
1354 __ B(vc, &done);
1355 __ Fcmp(i.InputDoubleRegister(0), static_cast<double>(INT64_MIN));
1356 __ Cset(i.OutputRegister(1), eq);
1357 __ Bind(&done);
1358 }
1359 break;
1360 case kArm64Float32ToUint64:
1361 __ Fcvtzu(i.OutputRegister64(), i.InputFloat32Register(0));
1362 if (i.OutputCount() > 1) {
1363 __ Fcmp(i.InputFloat32Register(0), -1.0);
1364 __ Ccmp(i.OutputRegister(0), -1, ZFlag, gt);
1365 __ Cset(i.OutputRegister(1), ne);
1366 }
1367 break;
1368 case kArm64Float64ToUint64:
1369 __ Fcvtzu(i.OutputRegister64(), i.InputDoubleRegister(0));
1370 if (i.OutputCount() > 1) {
1371 __ Fcmp(i.InputDoubleRegister(0), -1.0);
1372 __ Ccmp(i.OutputRegister(0), -1, ZFlag, gt);
1373 __ Cset(i.OutputRegister(1), ne);
1374 }
1375 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001376 case kArm64Int32ToFloat32:
1377 __ Scvtf(i.OutputFloat32Register(), i.InputRegister32(0));
1378 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001379 case kArm64Int32ToFloat64:
1380 __ Scvtf(i.OutputDoubleRegister(), i.InputRegister32(0));
1381 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001382 case kArm64Int64ToFloat32:
1383 __ Scvtf(i.OutputDoubleRegister().S(), i.InputRegister64(0));
1384 break;
1385 case kArm64Int64ToFloat64:
1386 __ Scvtf(i.OutputDoubleRegister(), i.InputRegister64(0));
1387 break;
Ben Murdoch097c5b22016-05-18 11:27:45 +01001388 case kArm64Uint32ToFloat32:
1389 __ Ucvtf(i.OutputFloat32Register(), i.InputRegister32(0));
1390 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001391 case kArm64Uint32ToFloat64:
1392 __ Ucvtf(i.OutputDoubleRegister(), i.InputRegister32(0));
1393 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001394 case kArm64Uint64ToFloat32:
1395 __ Ucvtf(i.OutputDoubleRegister().S(), i.InputRegister64(0));
1396 break;
1397 case kArm64Uint64ToFloat64:
1398 __ Ucvtf(i.OutputDoubleRegister(), i.InputRegister64(0));
1399 break;
1400 case kArm64Float64ExtractLowWord32:
1401 __ Fmov(i.OutputRegister32(), i.InputFloat32Register(0));
1402 break;
1403 case kArm64Float64ExtractHighWord32:
1404 // TODO(arm64): This should use MOV (to general) when NEON is supported.
1405 __ Fmov(i.OutputRegister(), i.InputFloat64Register(0));
1406 __ Lsr(i.OutputRegister(), i.OutputRegister(), 32);
1407 break;
1408 case kArm64Float64InsertLowWord32: {
1409 // TODO(arm64): This should use MOV (from general) when NEON is supported.
1410 UseScratchRegisterScope scope(masm());
1411 Register tmp = scope.AcquireX();
1412 __ Fmov(tmp, i.InputFloat64Register(0));
1413 __ Bfi(tmp, i.InputRegister(1), 0, 32);
1414 __ Fmov(i.OutputFloat64Register(), tmp);
1415 break;
1416 }
1417 case kArm64Float64InsertHighWord32: {
1418 // TODO(arm64): This should use MOV (from general) when NEON is supported.
1419 UseScratchRegisterScope scope(masm());
1420 Register tmp = scope.AcquireX();
1421 __ Fmov(tmp.W(), i.InputFloat32Register(0));
1422 __ Bfi(tmp, i.InputRegister(1), 32, 32);
1423 __ Fmov(i.OutputFloat64Register(), tmp);
1424 break;
1425 }
1426 case kArm64Float64MoveU64:
1427 __ Fmov(i.OutputFloat64Register(), i.InputRegister(0));
1428 break;
Ben Murdoch61f157c2016-09-16 13:49:30 +01001429 case kArm64Float64SilenceNaN:
1430 __ CanonicalizeNaN(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1431 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001432 case kArm64U64MoveFloat64:
1433 __ Fmov(i.OutputRegister(), i.InputDoubleRegister(0));
1434 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001435 case kArm64Ldrb:
1436 __ Ldrb(i.OutputRegister(), i.MemoryOperand());
1437 break;
1438 case kArm64Ldrsb:
1439 __ Ldrsb(i.OutputRegister(), i.MemoryOperand());
1440 break;
1441 case kArm64Strb:
Ben Murdochc5610432016-08-08 18:44:38 +01001442 __ Strb(i.InputOrZeroRegister64(0), i.MemoryOperand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001443 break;
1444 case kArm64Ldrh:
1445 __ Ldrh(i.OutputRegister(), i.MemoryOperand());
1446 break;
1447 case kArm64Ldrsh:
1448 __ Ldrsh(i.OutputRegister(), i.MemoryOperand());
1449 break;
1450 case kArm64Strh:
Ben Murdochc5610432016-08-08 18:44:38 +01001451 __ Strh(i.InputOrZeroRegister64(0), i.MemoryOperand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001452 break;
1453 case kArm64LdrW:
1454 __ Ldr(i.OutputRegister32(), i.MemoryOperand());
1455 break;
1456 case kArm64StrW:
Ben Murdochc5610432016-08-08 18:44:38 +01001457 __ Str(i.InputOrZeroRegister32(0), i.MemoryOperand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001458 break;
1459 case kArm64Ldr:
1460 __ Ldr(i.OutputRegister(), i.MemoryOperand());
1461 break;
1462 case kArm64Str:
Ben Murdochc5610432016-08-08 18:44:38 +01001463 __ Str(i.InputOrZeroRegister64(0), i.MemoryOperand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001464 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001465 case kArm64LdrS:
1466 __ Ldr(i.OutputDoubleRegister().S(), i.MemoryOperand());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001467 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001468 case kArm64StrS:
Ben Murdochc5610432016-08-08 18:44:38 +01001469 __ Str(i.InputFloat32OrZeroRegister(0), i.MemoryOperand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001470 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001471 case kArm64LdrD:
1472 __ Ldr(i.OutputDoubleRegister(), i.MemoryOperand());
1473 break;
1474 case kArm64StrD:
Ben Murdochc5610432016-08-08 18:44:38 +01001475 __ Str(i.InputFloat64OrZeroRegister(0), i.MemoryOperand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001476 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001477 case kCheckedLoadInt8:
1478 ASSEMBLE_CHECKED_LOAD_INTEGER(Ldrsb);
1479 break;
1480 case kCheckedLoadUint8:
1481 ASSEMBLE_CHECKED_LOAD_INTEGER(Ldrb);
1482 break;
1483 case kCheckedLoadInt16:
1484 ASSEMBLE_CHECKED_LOAD_INTEGER(Ldrsh);
1485 break;
1486 case kCheckedLoadUint16:
1487 ASSEMBLE_CHECKED_LOAD_INTEGER(Ldrh);
1488 break;
1489 case kCheckedLoadWord32:
1490 ASSEMBLE_CHECKED_LOAD_INTEGER(Ldr);
1491 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001492 case kCheckedLoadWord64:
1493 ASSEMBLE_CHECKED_LOAD_INTEGER_64(Ldr);
1494 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001495 case kCheckedLoadFloat32:
1496 ASSEMBLE_CHECKED_LOAD_FLOAT(32);
1497 break;
1498 case kCheckedLoadFloat64:
1499 ASSEMBLE_CHECKED_LOAD_FLOAT(64);
1500 break;
1501 case kCheckedStoreWord8:
1502 ASSEMBLE_CHECKED_STORE_INTEGER(Strb);
1503 break;
1504 case kCheckedStoreWord16:
1505 ASSEMBLE_CHECKED_STORE_INTEGER(Strh);
1506 break;
1507 case kCheckedStoreWord32:
1508 ASSEMBLE_CHECKED_STORE_INTEGER(Str);
1509 break;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001510 case kCheckedStoreWord64:
1511 ASSEMBLE_CHECKED_STORE_INTEGER_64(Str);
1512 break;
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001513 case kCheckedStoreFloat32:
1514 ASSEMBLE_CHECKED_STORE_FLOAT(32);
1515 break;
1516 case kCheckedStoreFloat64:
1517 ASSEMBLE_CHECKED_STORE_FLOAT(64);
1518 break;
Ben Murdochc5610432016-08-08 18:44:38 +01001519 case kAtomicLoadInt8:
1520 ASSEMBLE_ATOMIC_LOAD_INTEGER(Ldrsb);
1521 break;
1522 case kAtomicLoadUint8:
1523 ASSEMBLE_ATOMIC_LOAD_INTEGER(Ldrb);
1524 break;
1525 case kAtomicLoadInt16:
1526 ASSEMBLE_ATOMIC_LOAD_INTEGER(Ldrsh);
1527 break;
1528 case kAtomicLoadUint16:
1529 ASSEMBLE_ATOMIC_LOAD_INTEGER(Ldrh);
1530 break;
1531 case kAtomicLoadWord32:
1532 __ Ldr(i.OutputRegister32(),
1533 MemOperand(i.InputRegister(0), i.InputRegister(1)));
1534 __ Dmb(InnerShareable, BarrierAll);
1535 break;
1536 case kAtomicStoreWord8:
1537 ASSEMBLE_ATOMIC_STORE_INTEGER(Strb);
1538 break;
1539 case kAtomicStoreWord16:
1540 ASSEMBLE_ATOMIC_STORE_INTEGER(Strh);
1541 break;
1542 case kAtomicStoreWord32:
1543 __ Dmb(InnerShareable, BarrierAll);
1544 __ Str(i.InputRegister32(2),
1545 MemOperand(i.InputRegister(0), i.InputRegister(1)));
1546 __ Dmb(InnerShareable, BarrierAll);
1547 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001548 }
Ben Murdochc5610432016-08-08 18:44:38 +01001549 return kSuccess;
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001550} // NOLINT(readability/fn_size)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001551
1552
1553// Assemble branches after this instruction.
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001554void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001555 Arm64OperandConverter i(this, instr);
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001556 Label* tlabel = branch->true_label;
1557 Label* flabel = branch->false_label;
1558 FlagsCondition condition = branch->condition;
1559 ArchOpcode opcode = instr->arch_opcode();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001560
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001561 if (opcode == kArm64CompareAndBranch32) {
1562 switch (condition) {
1563 case kEqual:
1564 __ Cbz(i.InputRegister32(0), tlabel);
1565 break;
1566 case kNotEqual:
1567 __ Cbnz(i.InputRegister32(0), tlabel);
1568 break;
1569 default:
1570 UNREACHABLE();
1571 }
Ben Murdoch61f157c2016-09-16 13:49:30 +01001572 } else if (opcode == kArm64CompareAndBranch) {
1573 switch (condition) {
1574 case kEqual:
1575 __ Cbz(i.InputRegister64(0), tlabel);
1576 break;
1577 case kNotEqual:
1578 __ Cbnz(i.InputRegister64(0), tlabel);
1579 break;
1580 default:
1581 UNREACHABLE();
1582 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001583 } else if (opcode == kArm64TestAndBranch32) {
1584 switch (condition) {
1585 case kEqual:
1586 __ Tbz(i.InputRegister32(0), i.InputInt5(1), tlabel);
1587 break;
1588 case kNotEqual:
1589 __ Tbnz(i.InputRegister32(0), i.InputInt5(1), tlabel);
1590 break;
1591 default:
1592 UNREACHABLE();
1593 }
1594 } else if (opcode == kArm64TestAndBranch) {
1595 switch (condition) {
1596 case kEqual:
1597 __ Tbz(i.InputRegister64(0), i.InputInt6(1), tlabel);
1598 break;
1599 case kNotEqual:
1600 __ Tbnz(i.InputRegister64(0), i.InputInt6(1), tlabel);
1601 break;
1602 default:
1603 UNREACHABLE();
1604 }
1605 } else {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001606 Condition cc = FlagsConditionToCondition(condition);
1607 __ B(cc, tlabel);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001608 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001609 if (!branch->fallthru) __ B(flabel); // no fallthru to flabel.
1610}
1611
1612
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001613void CodeGenerator::AssembleArchJump(RpoNumber target) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001614 if (!IsNextInAssemblyOrder(target)) __ B(GetLabel(target));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001615}
1616
1617
1618// Assemble boolean materializations after this instruction.
1619void CodeGenerator::AssembleArchBoolean(Instruction* instr,
1620 FlagsCondition condition) {
1621 Arm64OperandConverter i(this, instr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001622
1623 // Materialize a full 64-bit 1 or 0 value. The result register is always the
1624 // last output of the instruction.
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001625 DCHECK_NE(0u, instr->OutputCount());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001626 Register reg = i.OutputRegister(instr->OutputCount() - 1);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001627 Condition cc = FlagsConditionToCondition(condition);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001628 __ Cset(reg, cc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001629}
1630
1631
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001632void CodeGenerator::AssembleArchLookupSwitch(Instruction* instr) {
1633 Arm64OperandConverter i(this, instr);
1634 Register input = i.InputRegister32(0);
1635 for (size_t index = 2; index < instr->InputCount(); index += 2) {
1636 __ Cmp(input, i.InputInt32(index + 0));
1637 __ B(eq, GetLabel(i.InputRpo(index + 1)));
1638 }
1639 AssembleArchJump(i.InputRpo(1));
1640}
1641
1642
1643void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
1644 Arm64OperandConverter i(this, instr);
1645 UseScratchRegisterScope scope(masm());
1646 Register input = i.InputRegister32(0);
1647 Register temp = scope.AcquireX();
1648 size_t const case_count = instr->InputCount() - 2;
1649 Label table;
1650 __ Cmp(input, case_count);
1651 __ B(hs, GetLabel(i.InputRpo(1)));
1652 __ Adr(temp, &table);
1653 __ Add(temp, temp, Operand(input, UXTW, 2));
1654 __ Br(temp);
1655 __ StartBlockPools();
1656 __ Bind(&table);
1657 for (size_t index = 0; index < case_count; ++index) {
1658 __ B(GetLabel(i.InputRpo(index + 2)));
1659 }
1660 __ EndBlockPools();
1661}
1662
Ben Murdochc5610432016-08-08 18:44:38 +01001663CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001664 int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001665 Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001666 isolate(), deoptimization_id, bailout_type);
Ben Murdochc5610432016-08-08 18:44:38 +01001667 if (deopt_entry == nullptr) return kTooManyDeoptimizationBailouts;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001668 __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
Ben Murdochc5610432016-08-08 18:44:38 +01001669 return kSuccess;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001670}
1671
Ben Murdochc5610432016-08-08 18:44:38 +01001672void CodeGenerator::FinishFrame(Frame* frame) {
1673 frame->AlignFrame(16);
1674 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
1675
Ben Murdochda12d292016-06-02 14:46:10 +01001676 if (descriptor->UseNativeStack() || descriptor->IsCFunctionCall()) {
1677 __ SetStackPointer(csp);
1678 } else {
1679 __ SetStackPointer(jssp);
1680 }
Ben Murdochc5610432016-08-08 18:44:38 +01001681
1682 // Save FP registers.
1683 CPURegList saves_fp = CPURegList(CPURegister::kFPRegister, kDRegSizeInBits,
1684 descriptor->CalleeSavedFPRegisters());
1685 int saved_count = saves_fp.Count();
1686 if (saved_count != 0) {
1687 DCHECK(saves_fp.list() == CPURegList::GetCalleeSavedFP().list());
1688 frame->AllocateSavedCalleeRegisterSlots(saved_count *
1689 (kDoubleSize / kPointerSize));
1690 }
1691
1692 CPURegList saves = CPURegList(CPURegister::kRegister, kXRegSizeInBits,
1693 descriptor->CalleeSavedRegisters());
1694 saved_count = saves.Count();
1695 if (saved_count != 0) {
1696 frame->AllocateSavedCalleeRegisterSlots(saved_count);
1697 }
Ben Murdochda12d292016-06-02 14:46:10 +01001698}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001699
Ben Murdochc5610432016-08-08 18:44:38 +01001700void CodeGenerator::AssembleConstructFrame() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001701 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
Ben Murdochda12d292016-06-02 14:46:10 +01001702 if (descriptor->UseNativeStack()) {
1703 __ AssertCspAligned();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001704 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001705
Ben Murdochda12d292016-06-02 14:46:10 +01001706 if (frame_access_state()->has_frame()) {
1707 if (descriptor->IsJSFunctionCall()) {
1708 DCHECK(!descriptor->UseNativeStack());
1709 __ Prologue(this->info()->GeneratePreagedPrologue());
1710 } else {
1711 if (descriptor->IsCFunctionCall()) {
1712 __ Push(lr, fp);
1713 __ Mov(fp, masm_.StackPointer());
Ben Murdochc5610432016-08-08 18:44:38 +01001714 __ Claim(frame()->GetSpillSlotCount());
Ben Murdochda12d292016-06-02 14:46:10 +01001715 } else {
1716 __ StubPrologue(info()->GetOutputStackFrameType(),
1717 frame()->GetTotalFrameSlotCount());
1718 }
1719 }
1720 }
1721
Ben Murdochc5610432016-08-08 18:44:38 +01001722 int shrink_slots = frame()->GetSpillSlotCount();
1723
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001724 if (info()->is_osr()) {
1725 // TurboFan OSR-compiled functions cannot be entered directly.
1726 __ Abort(kShouldNotDirectlyEnterOsrFunction);
1727
1728 // Unoptimized code jumps directly to this entrypoint while the unoptimized
1729 // frame is still on the stack. Optimized code uses OSR values directly from
1730 // the unoptimized frame. Thus, all that needs to be done is to allocate the
1731 // remaining stack slots.
1732 if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --");
1733 osr_pc_offset_ = __ pc_offset();
Ben Murdochc5610432016-08-08 18:44:38 +01001734 shrink_slots -= OsrHelper(info()).UnoptimizedFrameSlots();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001735 }
1736
Ben Murdochda12d292016-06-02 14:46:10 +01001737 if (descriptor->IsJSFunctionCall()) {
Ben Murdochc5610432016-08-08 18:44:38 +01001738 __ Claim(shrink_slots);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001739 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001740
1741 // Save FP registers.
1742 CPURegList saves_fp = CPURegList(CPURegister::kFPRegister, kDRegSizeInBits,
1743 descriptor->CalleeSavedFPRegisters());
1744 int saved_count = saves_fp.Count();
1745 if (saved_count != 0) {
1746 DCHECK(saves_fp.list() == CPURegList::GetCalleeSavedFP().list());
1747 __ PushCPURegList(saves_fp);
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001748 }
1749 // Save registers.
1750 // TODO(palfia): TF save list is not in sync with
1751 // CPURegList::GetCalleeSaved(): x30 is missing.
1752 // DCHECK(saves.list() == CPURegList::GetCalleeSaved().list());
1753 CPURegList saves = CPURegList(CPURegister::kRegister, kXRegSizeInBits,
1754 descriptor->CalleeSavedRegisters());
1755 saved_count = saves.Count();
1756 if (saved_count != 0) {
1757 __ PushCPURegList(saves);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001758 }
1759}
1760
1761
1762void CodeGenerator::AssembleReturn() {
1763 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001764
1765 // Restore registers.
1766 CPURegList saves = CPURegList(CPURegister::kRegister, kXRegSizeInBits,
1767 descriptor->CalleeSavedRegisters());
1768 if (saves.Count() != 0) {
1769 __ PopCPURegList(saves);
1770 }
1771
1772 // Restore fp registers.
1773 CPURegList saves_fp = CPURegList(CPURegister::kFPRegister, kDRegSizeInBits,
1774 descriptor->CalleeSavedFPRegisters());
1775 if (saves_fp.Count() != 0) {
1776 __ PopCPURegList(saves_fp);
1777 }
1778
1779 int pop_count = static_cast<int>(descriptor->StackParameterCount());
1780 if (descriptor->IsCFunctionCall()) {
Ben Murdochda12d292016-06-02 14:46:10 +01001781 AssembleDeconstructFrame();
1782 } else if (frame_access_state()->has_frame()) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001783 // Canonicalize JSFunction return sites for now.
1784 if (return_label_.is_bound()) {
1785 __ B(&return_label_);
1786 return;
1787 } else {
1788 __ Bind(&return_label_);
Ben Murdochda12d292016-06-02 14:46:10 +01001789 AssembleDeconstructFrame();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001790 if (descriptor->UseNativeStack()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001791 pop_count += (pop_count & 1); // align
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001792 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001793 }
1794 } else if (descriptor->UseNativeStack()) {
Ben Murdoch097c5b22016-05-18 11:27:45 +01001795 pop_count += (pop_count & 1); // align
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001796 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001797 __ Drop(pop_count);
Ben Murdochda12d292016-06-02 14:46:10 +01001798
1799 if (descriptor->UseNativeStack()) {
1800 __ AssertCspAligned();
1801 }
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001802 __ Ret();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001803}
1804
1805
1806void CodeGenerator::AssembleMove(InstructionOperand* source,
1807 InstructionOperand* destination) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001808 Arm64OperandConverter g(this, nullptr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001809 // Dispatch on the source and destination operand kinds. Not all
1810 // combinations are possible.
1811 if (source->IsRegister()) {
1812 DCHECK(destination->IsRegister() || destination->IsStackSlot());
1813 Register src = g.ToRegister(source);
1814 if (destination->IsRegister()) {
1815 __ Mov(g.ToRegister(destination), src);
1816 } else {
1817 __ Str(src, g.ToMemOperand(destination, masm()));
1818 }
1819 } else if (source->IsStackSlot()) {
1820 MemOperand src = g.ToMemOperand(source, masm());
1821 DCHECK(destination->IsRegister() || destination->IsStackSlot());
1822 if (destination->IsRegister()) {
1823 __ Ldr(g.ToRegister(destination), src);
1824 } else {
1825 UseScratchRegisterScope scope(masm());
1826 Register temp = scope.AcquireX();
1827 __ Ldr(temp, src);
1828 __ Str(temp, g.ToMemOperand(destination, masm()));
1829 }
1830 } else if (source->IsConstant()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001831 Constant src = g.ToConstant(ConstantOperand::cast(source));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001832 if (destination->IsRegister() || destination->IsStackSlot()) {
1833 UseScratchRegisterScope scope(masm());
1834 Register dst = destination->IsRegister() ? g.ToRegister(destination)
1835 : scope.AcquireX();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001836 if (src.type() == Constant::kHeapObject) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001837 Handle<HeapObject> src_object = src.ToHeapObject();
1838 Heap::RootListIndex index;
Ben Murdochda12d292016-06-02 14:46:10 +01001839 int slot;
1840 if (IsMaterializableFromFrame(src_object, &slot)) {
1841 __ Ldr(dst, g.SlotToMemOperand(slot, masm()));
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001842 } else if (IsMaterializableFromRoot(src_object, &index)) {
1843 __ LoadRoot(dst, index);
1844 } else {
1845 __ LoadObject(dst, src_object);
1846 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001847 } else {
1848 __ Mov(dst, g.ToImmediate(source));
1849 }
1850 if (destination->IsStackSlot()) {
1851 __ Str(dst, g.ToMemOperand(destination, masm()));
1852 }
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001853 } else if (src.type() == Constant::kFloat32) {
Ben Murdochc5610432016-08-08 18:44:38 +01001854 if (destination->IsFPRegister()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001855 FPRegister dst = g.ToDoubleRegister(destination).S();
1856 __ Fmov(dst, src.ToFloat32());
1857 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01001858 DCHECK(destination->IsFPStackSlot());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001859 UseScratchRegisterScope scope(masm());
1860 FPRegister temp = scope.AcquireS();
1861 __ Fmov(temp, src.ToFloat32());
1862 __ Str(temp, g.ToMemOperand(destination, masm()));
1863 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001864 } else {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001865 DCHECK_EQ(Constant::kFloat64, src.type());
Ben Murdochc5610432016-08-08 18:44:38 +01001866 if (destination->IsFPRegister()) {
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001867 FPRegister dst = g.ToDoubleRegister(destination);
1868 __ Fmov(dst, src.ToFloat64());
1869 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01001870 DCHECK(destination->IsFPStackSlot());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001871 UseScratchRegisterScope scope(masm());
1872 FPRegister temp = scope.AcquireD();
1873 __ Fmov(temp, src.ToFloat64());
1874 __ Str(temp, g.ToMemOperand(destination, masm()));
1875 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001876 }
Ben Murdochc5610432016-08-08 18:44:38 +01001877 } else if (source->IsFPRegister()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001878 FPRegister src = g.ToDoubleRegister(source);
Ben Murdochc5610432016-08-08 18:44:38 +01001879 if (destination->IsFPRegister()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001880 FPRegister dst = g.ToDoubleRegister(destination);
1881 __ Fmov(dst, src);
1882 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01001883 DCHECK(destination->IsFPStackSlot());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001884 __ Str(src, g.ToMemOperand(destination, masm()));
1885 }
Ben Murdochc5610432016-08-08 18:44:38 +01001886 } else if (source->IsFPStackSlot()) {
1887 DCHECK(destination->IsFPRegister() || destination->IsFPStackSlot());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001888 MemOperand src = g.ToMemOperand(source, masm());
Ben Murdochc5610432016-08-08 18:44:38 +01001889 if (destination->IsFPRegister()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001890 __ Ldr(g.ToDoubleRegister(destination), src);
1891 } else {
1892 UseScratchRegisterScope scope(masm());
1893 FPRegister temp = scope.AcquireD();
1894 __ Ldr(temp, src);
1895 __ Str(temp, g.ToMemOperand(destination, masm()));
1896 }
1897 } else {
1898 UNREACHABLE();
1899 }
1900}
1901
1902
1903void CodeGenerator::AssembleSwap(InstructionOperand* source,
1904 InstructionOperand* destination) {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001905 Arm64OperandConverter g(this, nullptr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001906 // Dispatch on the source and destination operand kinds. Not all
1907 // combinations are possible.
1908 if (source->IsRegister()) {
1909 // Register-register.
1910 UseScratchRegisterScope scope(masm());
1911 Register temp = scope.AcquireX();
1912 Register src = g.ToRegister(source);
1913 if (destination->IsRegister()) {
1914 Register dst = g.ToRegister(destination);
1915 __ Mov(temp, src);
1916 __ Mov(src, dst);
1917 __ Mov(dst, temp);
1918 } else {
1919 DCHECK(destination->IsStackSlot());
1920 MemOperand dst = g.ToMemOperand(destination, masm());
1921 __ Mov(temp, src);
1922 __ Ldr(src, dst);
1923 __ Str(temp, dst);
1924 }
Ben Murdochc5610432016-08-08 18:44:38 +01001925 } else if (source->IsStackSlot() || source->IsFPStackSlot()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001926 UseScratchRegisterScope scope(masm());
Emily Bernierd0a1eb72015-03-24 16:35:39 -04001927 DoubleRegister temp_0 = scope.AcquireD();
1928 DoubleRegister temp_1 = scope.AcquireD();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001929 MemOperand src = g.ToMemOperand(source, masm());
1930 MemOperand dst = g.ToMemOperand(destination, masm());
1931 __ Ldr(temp_0, src);
1932 __ Ldr(temp_1, dst);
1933 __ Str(temp_0, dst);
1934 __ Str(temp_1, src);
Ben Murdochc5610432016-08-08 18:44:38 +01001935 } else if (source->IsFPRegister()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001936 UseScratchRegisterScope scope(masm());
1937 FPRegister temp = scope.AcquireD();
1938 FPRegister src = g.ToDoubleRegister(source);
Ben Murdochc5610432016-08-08 18:44:38 +01001939 if (destination->IsFPRegister()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001940 FPRegister dst = g.ToDoubleRegister(destination);
1941 __ Fmov(temp, src);
1942 __ Fmov(src, dst);
1943 __ Fmov(dst, temp);
1944 } else {
Ben Murdochc5610432016-08-08 18:44:38 +01001945 DCHECK(destination->IsFPStackSlot());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001946 MemOperand dst = g.ToMemOperand(destination, masm());
1947 __ Fmov(temp, src);
1948 __ Ldr(src, dst);
1949 __ Str(temp, dst);
1950 }
1951 } else {
1952 // No other combinations are possible.
1953 UNREACHABLE();
1954 }
1955}
1956
1957
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001958void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) {
1959 // On 64-bit ARM we emit the jump tables inline.
1960 UNREACHABLE();
1961}
1962
1963
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001964void CodeGenerator::EnsureSpaceForLazyDeopt() {
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001965 if (!info()->ShouldEnsureSpaceForLazyDeopt()) {
1966 return;
1967 }
1968
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001969 int space_needed = Deoptimizer::patch_size();
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001970 // Ensure that we have enough space after the previous lazy-bailout
1971 // instruction for patching the code here.
1972 intptr_t current_pc = masm()->pc_offset();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001973
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001974 if (current_pc < (last_lazy_deopt_pc_ + space_needed)) {
1975 intptr_t padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
1976 DCHECK((padding_size % kInstructionSize) == 0);
1977 InstructionAccurateScope instruction_accurate(
1978 masm(), padding_size / kInstructionSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001979
Ben Murdoch4a90d5f2016-03-22 12:00:34 +00001980 while (padding_size > 0) {
1981 __ nop();
1982 padding_size -= kInstructionSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001983 }
1984 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001985}
1986
1987#undef __
1988
1989} // namespace compiler
1990} // namespace internal
1991} // namespace v8