blob: e45c677619393662a606fe0ff31b5da7a75a793d [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/code-generator.h"
6
Ben Murdoch014dc512016-03-22 12:00:34 +00007#include "src/arm64/frames-arm64.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00008#include "src/arm64/macro-assembler-arm64.h"
Ben Murdoch014dc512016-03-22 12:00:34 +00009#include "src/ast/scopes.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000010#include "src/compiler/code-generator-impl.h"
11#include "src/compiler/gap-resolver.h"
12#include "src/compiler/node-matchers.h"
Ben Murdoch014dc512016-03-22 12:00:34 +000013#include "src/compiler/osr.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000014
15namespace v8 {
16namespace internal {
17namespace compiler {
18
19#define __ masm()->
20
21
22// Adds Arm64-specific methods to convert InstructionOperands.
Ben Murdoch014dc512016-03-22 12:00:34 +000023class Arm64OperandConverter final : public InstructionOperandConverter {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000024 public:
25 Arm64OperandConverter(CodeGenerator* gen, Instruction* instr)
26 : InstructionOperandConverter(gen, instr) {}
27
Ben Murdoch014dc512016-03-22 12:00:34 +000028 DoubleRegister InputFloat32Register(size_t index) {
Emily Bernier958fae72015-03-24 16:35:39 -040029 return InputDoubleRegister(index).S();
30 }
31
Ben Murdoch014dc512016-03-22 12:00:34 +000032 DoubleRegister InputFloat64Register(size_t index) {
Emily Bernier958fae72015-03-24 16:35:39 -040033 return InputDoubleRegister(index);
34 }
35
Ben Murdoch014dc512016-03-22 12:00:34 +000036 size_t OutputCount() { return instr_->OutputCount(); }
37
Emily Bernier958fae72015-03-24 16:35:39 -040038 DoubleRegister OutputFloat32Register() { return OutputDoubleRegister().S(); }
39
40 DoubleRegister OutputFloat64Register() { return OutputDoubleRegister(); }
41
Ben Murdoch014dc512016-03-22 12:00:34 +000042 Register InputRegister32(size_t index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000043 return ToRegister(instr_->InputAt(index)).W();
44 }
45
Ben Murdoch014dc512016-03-22 12:00:34 +000046 Register InputOrZeroRegister32(size_t index) {
47 DCHECK(instr_->InputAt(index)->IsRegister() ||
48 (instr_->InputAt(index)->IsImmediate() && (InputInt32(index) == 0)));
49 if (instr_->InputAt(index)->IsImmediate()) {
50 return wzr;
51 }
52 return InputRegister32(index);
53 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000054
Ben Murdoch014dc512016-03-22 12:00:34 +000055 Register InputRegister64(size_t index) { return InputRegister(index); }
56
57 Register InputOrZeroRegister64(size_t index) {
58 DCHECK(instr_->InputAt(index)->IsRegister() ||
59 (instr_->InputAt(index)->IsImmediate() && (InputInt64(index) == 0)));
60 if (instr_->InputAt(index)->IsImmediate()) {
61 return xzr;
62 }
63 return InputRegister64(index);
64 }
65
66 Operand InputImmediate(size_t index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000067 return ToImmediate(instr_->InputAt(index));
68 }
69
Ben Murdoch014dc512016-03-22 12:00:34 +000070 Operand InputOperand(size_t index) {
71 return ToOperand(instr_->InputAt(index));
72 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000073
Ben Murdoch014dc512016-03-22 12:00:34 +000074 Operand InputOperand64(size_t index) { return InputOperand(index); }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000075
Ben Murdoch014dc512016-03-22 12:00:34 +000076 Operand InputOperand32(size_t index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000077 return ToOperand32(instr_->InputAt(index));
78 }
79
80 Register OutputRegister64() { return OutputRegister(); }
81
82 Register OutputRegister32() { return ToRegister(instr_->Output()).W(); }
83
Ben Murdoch014dc512016-03-22 12:00:34 +000084 Operand InputOperand2_32(size_t index) {
Emily Bernier958fae72015-03-24 16:35:39 -040085 switch (AddressingModeField::decode(instr_->opcode())) {
86 case kMode_None:
87 return InputOperand32(index);
88 case kMode_Operand2_R_LSL_I:
89 return Operand(InputRegister32(index), LSL, InputInt5(index + 1));
90 case kMode_Operand2_R_LSR_I:
91 return Operand(InputRegister32(index), LSR, InputInt5(index + 1));
92 case kMode_Operand2_R_ASR_I:
93 return Operand(InputRegister32(index), ASR, InputInt5(index + 1));
94 case kMode_Operand2_R_ROR_I:
95 return Operand(InputRegister32(index), ROR, InputInt5(index + 1));
Ben Murdoch014dc512016-03-22 12:00:34 +000096 case kMode_Operand2_R_UXTB:
97 return Operand(InputRegister32(index), UXTB);
98 case kMode_Operand2_R_UXTH:
99 return Operand(InputRegister32(index), UXTH);
100 case kMode_Operand2_R_SXTB:
101 return Operand(InputRegister32(index), SXTB);
102 case kMode_Operand2_R_SXTH:
103 return Operand(InputRegister32(index), SXTH);
Emily Bernier958fae72015-03-24 16:35:39 -0400104 case kMode_MRI:
105 case kMode_MRR:
106 break;
107 }
108 UNREACHABLE();
109 return Operand(-1);
110 }
111
Ben Murdoch014dc512016-03-22 12:00:34 +0000112 Operand InputOperand2_64(size_t index) {
Emily Bernier958fae72015-03-24 16:35:39 -0400113 switch (AddressingModeField::decode(instr_->opcode())) {
114 case kMode_None:
115 return InputOperand64(index);
116 case kMode_Operand2_R_LSL_I:
117 return Operand(InputRegister64(index), LSL, InputInt6(index + 1));
118 case kMode_Operand2_R_LSR_I:
119 return Operand(InputRegister64(index), LSR, InputInt6(index + 1));
120 case kMode_Operand2_R_ASR_I:
121 return Operand(InputRegister64(index), ASR, InputInt6(index + 1));
122 case kMode_Operand2_R_ROR_I:
123 return Operand(InputRegister64(index), ROR, InputInt6(index + 1));
Ben Murdoch014dc512016-03-22 12:00:34 +0000124 case kMode_Operand2_R_UXTB:
125 return Operand(InputRegister64(index), UXTB);
126 case kMode_Operand2_R_UXTH:
127 return Operand(InputRegister64(index), UXTH);
128 case kMode_Operand2_R_SXTB:
129 return Operand(InputRegister64(index), SXTB);
130 case kMode_Operand2_R_SXTH:
131 return Operand(InputRegister64(index), SXTH);
Emily Bernier958fae72015-03-24 16:35:39 -0400132 case kMode_MRI:
133 case kMode_MRR:
134 break;
135 }
136 UNREACHABLE();
137 return Operand(-1);
138 }
139
Ben Murdoch014dc512016-03-22 12:00:34 +0000140 MemOperand MemoryOperand(size_t* first_index) {
141 const size_t index = *first_index;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000142 switch (AddressingModeField::decode(instr_->opcode())) {
143 case kMode_None:
Emily Bernier958fae72015-03-24 16:35:39 -0400144 case kMode_Operand2_R_LSL_I:
145 case kMode_Operand2_R_LSR_I:
146 case kMode_Operand2_R_ASR_I:
147 case kMode_Operand2_R_ROR_I:
Ben Murdoch014dc512016-03-22 12:00:34 +0000148 case kMode_Operand2_R_UXTB:
149 case kMode_Operand2_R_UXTH:
150 case kMode_Operand2_R_SXTB:
151 case kMode_Operand2_R_SXTH:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000152 break;
153 case kMode_MRI:
154 *first_index += 2;
155 return MemOperand(InputRegister(index + 0), InputInt32(index + 1));
156 case kMode_MRR:
157 *first_index += 2;
Emily Bernier958fae72015-03-24 16:35:39 -0400158 return MemOperand(InputRegister(index + 0), InputRegister(index + 1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000159 }
160 UNREACHABLE();
161 return MemOperand(no_reg);
162 }
163
Ben Murdoch014dc512016-03-22 12:00:34 +0000164 MemOperand MemoryOperand(size_t first_index = 0) {
Emily Bernier958fae72015-03-24 16:35:39 -0400165 return MemoryOperand(&first_index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000166 }
167
168 Operand ToOperand(InstructionOperand* op) {
169 if (op->IsRegister()) {
170 return Operand(ToRegister(op));
171 }
172 return ToImmediate(op);
173 }
174
175 Operand ToOperand32(InstructionOperand* op) {
176 if (op->IsRegister()) {
177 return Operand(ToRegister(op).W());
178 }
179 return ToImmediate(op);
180 }
181
182 Operand ToImmediate(InstructionOperand* operand) {
183 Constant constant = ToConstant(operand);
184 switch (constant.type()) {
185 case Constant::kInt32:
186 return Operand(constant.ToInt32());
187 case Constant::kInt64:
188 return Operand(constant.ToInt64());
Emily Bernier958fae72015-03-24 16:35:39 -0400189 case Constant::kFloat32:
190 return Operand(
191 isolate()->factory()->NewNumber(constant.ToFloat32(), TENURED));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000192 case Constant::kFloat64:
193 return Operand(
194 isolate()->factory()->NewNumber(constant.ToFloat64(), TENURED));
195 case Constant::kExternalReference:
196 return Operand(constant.ToExternalReference());
197 case Constant::kHeapObject:
198 return Operand(constant.ToHeapObject());
Emily Bernier958fae72015-03-24 16:35:39 -0400199 case Constant::kRpoNumber:
200 UNREACHABLE(); // TODO(dcarney): RPO immediates on arm64.
201 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000202 }
203 UNREACHABLE();
204 return Operand(-1);
205 }
206
207 MemOperand ToMemOperand(InstructionOperand* op, MacroAssembler* masm) const {
Ben Murdoch014dc512016-03-22 12:00:34 +0000208 DCHECK_NOT_NULL(op);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000209 DCHECK(op->IsStackSlot() || op->IsDoubleStackSlot());
Ben Murdoch014dc512016-03-22 12:00:34 +0000210 FrameOffset offset = frame_access_state()->GetFrameOffset(
211 AllocatedOperand::cast(op)->index());
212 if (offset.from_frame_pointer()) {
213 int from_sp =
214 offset.offset() +
215 ((frame()->GetSpToFpSlotCount() + frame_access_state()->sp_delta()) *
216 kPointerSize);
217 // Convert FP-offsets to SP-offsets if it results in better code.
218 if (Assembler::IsImmLSUnscaled(from_sp) ||
219 Assembler::IsImmLSScaled(from_sp, LSDoubleWord)) {
220 offset = FrameOffset::FromStackPointer(from_sp);
221 }
222 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000223 return MemOperand(offset.from_stack_pointer() ? masm->StackPointer() : fp,
224 offset.offset());
225 }
226};
227
228
Emily Bernier958fae72015-03-24 16:35:39 -0400229namespace {
230
Ben Murdoch014dc512016-03-22 12:00:34 +0000231class OutOfLineLoadNaN32 final : public OutOfLineCode {
Emily Bernier958fae72015-03-24 16:35:39 -0400232 public:
233 OutOfLineLoadNaN32(CodeGenerator* gen, DoubleRegister result)
234 : OutOfLineCode(gen), result_(result) {}
235
Ben Murdoch014dc512016-03-22 12:00:34 +0000236 void Generate() final {
Emily Bernier958fae72015-03-24 16:35:39 -0400237 __ Fmov(result_, std::numeric_limits<float>::quiet_NaN());
238 }
239
240 private:
241 DoubleRegister const result_;
242};
243
244
Ben Murdoch014dc512016-03-22 12:00:34 +0000245class OutOfLineLoadNaN64 final : public OutOfLineCode {
Emily Bernier958fae72015-03-24 16:35:39 -0400246 public:
247 OutOfLineLoadNaN64(CodeGenerator* gen, DoubleRegister result)
248 : OutOfLineCode(gen), result_(result) {}
249
Ben Murdoch014dc512016-03-22 12:00:34 +0000250 void Generate() final {
Emily Bernier958fae72015-03-24 16:35:39 -0400251 __ Fmov(result_, std::numeric_limits<double>::quiet_NaN());
252 }
253
254 private:
255 DoubleRegister const result_;
256};
257
258
Ben Murdoch014dc512016-03-22 12:00:34 +0000259class OutOfLineLoadZero final : public OutOfLineCode {
Emily Bernier958fae72015-03-24 16:35:39 -0400260 public:
261 OutOfLineLoadZero(CodeGenerator* gen, Register result)
262 : OutOfLineCode(gen), result_(result) {}
263
Ben Murdoch014dc512016-03-22 12:00:34 +0000264 void Generate() final { __ Mov(result_, 0); }
Emily Bernier958fae72015-03-24 16:35:39 -0400265
266 private:
267 Register const result_;
268};
269
Ben Murdoch014dc512016-03-22 12:00:34 +0000270
271class OutOfLineRecordWrite final : public OutOfLineCode {
272 public:
Ben Murdoch109988c2016-05-18 11:27:45 +0100273 OutOfLineRecordWrite(CodeGenerator* gen, Register object, Operand index,
Ben Murdoch014dc512016-03-22 12:00:34 +0000274 Register value, Register scratch0, Register scratch1,
275 RecordWriteMode mode)
276 : OutOfLineCode(gen),
277 object_(object),
278 index_(index),
279 value_(value),
280 scratch0_(scratch0),
281 scratch1_(scratch1),
282 mode_(mode) {}
283
284 void Generate() final {
285 if (mode_ > RecordWriteMode::kValueIsPointer) {
286 __ JumpIfSmi(value_, exit());
287 }
Ben Murdoch109988c2016-05-18 11:27:45 +0100288 __ CheckPageFlagClear(value_, scratch0_,
289 MemoryChunk::kPointersToHereAreInterestingMask,
290 exit());
291 RememberedSetAction const remembered_set_action =
292 mode_ > RecordWriteMode::kValueIsMap ? EMIT_REMEMBERED_SET
293 : OMIT_REMEMBERED_SET;
Ben Murdoch014dc512016-03-22 12:00:34 +0000294 SaveFPRegsMode const save_fp_mode =
295 frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs;
Ben Murdoch109988c2016-05-18 11:27:45 +0100296 if (!frame()->needs_frame()) {
297 // We need to save and restore lr if the frame was elided.
298 __ Push(lr);
299 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000300 RecordWriteStub stub(isolate(), object_, scratch0_, scratch1_,
Ben Murdoch109988c2016-05-18 11:27:45 +0100301 remembered_set_action, save_fp_mode);
Ben Murdoch014dc512016-03-22 12:00:34 +0000302 __ Add(scratch1_, object_, index_);
303 __ CallStub(&stub);
Ben Murdoch109988c2016-05-18 11:27:45 +0100304 if (!frame()->needs_frame()) {
305 __ Pop(lr);
306 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000307 }
308
309 private:
310 Register const object_;
Ben Murdoch109988c2016-05-18 11:27:45 +0100311 Operand const index_;
Ben Murdoch014dc512016-03-22 12:00:34 +0000312 Register const value_;
313 Register const scratch0_;
314 Register const scratch1_;
315 RecordWriteMode const mode_;
316};
317
318
319Condition FlagsConditionToCondition(FlagsCondition condition) {
320 switch (condition) {
321 case kEqual:
322 return eq;
323 case kNotEqual:
324 return ne;
325 case kSignedLessThan:
326 return lt;
327 case kSignedGreaterThanOrEqual:
328 return ge;
329 case kSignedLessThanOrEqual:
330 return le;
331 case kSignedGreaterThan:
332 return gt;
333 case kUnsignedLessThan:
334 return lo;
335 case kUnsignedGreaterThanOrEqual:
336 return hs;
337 case kUnsignedLessThanOrEqual:
338 return ls;
339 case kUnsignedGreaterThan:
340 return hi;
341 case kFloatLessThanOrUnordered:
342 return lt;
343 case kFloatGreaterThanOrEqual:
344 return ge;
345 case kFloatLessThanOrEqual:
346 return ls;
347 case kFloatGreaterThanOrUnordered:
348 return hi;
349 case kFloatLessThan:
350 return lo;
351 case kFloatGreaterThanOrEqualOrUnordered:
352 return hs;
353 case kFloatLessThanOrEqualOrUnordered:
354 return le;
355 case kFloatGreaterThan:
356 return gt;
357 case kOverflow:
358 return vs;
359 case kNotOverflow:
360 return vc;
361 case kUnorderedEqual:
362 case kUnorderedNotEqual:
363 break;
364 }
365 UNREACHABLE();
366 return nv;
367}
368
Emily Bernier958fae72015-03-24 16:35:39 -0400369} // namespace
370
371
372#define ASSEMBLE_CHECKED_LOAD_FLOAT(width) \
373 do { \
374 auto result = i.OutputFloat##width##Register(); \
375 auto buffer = i.InputRegister(0); \
376 auto offset = i.InputRegister32(1); \
377 auto length = i.InputOperand32(2); \
378 __ Cmp(offset, length); \
379 auto ool = new (zone()) OutOfLineLoadNaN##width(this, result); \
380 __ B(hs, ool->entry()); \
381 __ Ldr(result, MemOperand(buffer, offset, UXTW)); \
382 __ Bind(ool->exit()); \
383 } while (0)
384
385
386#define ASSEMBLE_CHECKED_LOAD_INTEGER(asm_instr) \
387 do { \
388 auto result = i.OutputRegister32(); \
389 auto buffer = i.InputRegister(0); \
390 auto offset = i.InputRegister32(1); \
391 auto length = i.InputOperand32(2); \
392 __ Cmp(offset, length); \
393 auto ool = new (zone()) OutOfLineLoadZero(this, result); \
394 __ B(hs, ool->entry()); \
395 __ asm_instr(result, MemOperand(buffer, offset, UXTW)); \
396 __ Bind(ool->exit()); \
397 } while (0)
398
399
Ben Murdoch014dc512016-03-22 12:00:34 +0000400#define ASSEMBLE_CHECKED_LOAD_INTEGER_64(asm_instr) \
401 do { \
402 auto result = i.OutputRegister(); \
403 auto buffer = i.InputRegister(0); \
404 auto offset = i.InputRegister32(1); \
405 auto length = i.InputOperand32(2); \
406 __ Cmp(offset, length); \
407 auto ool = new (zone()) OutOfLineLoadZero(this, result); \
408 __ B(hs, ool->entry()); \
409 __ asm_instr(result, MemOperand(buffer, offset, UXTW)); \
410 __ Bind(ool->exit()); \
411 } while (0)
412
413
Emily Bernier958fae72015-03-24 16:35:39 -0400414#define ASSEMBLE_CHECKED_STORE_FLOAT(width) \
415 do { \
416 auto buffer = i.InputRegister(0); \
417 auto offset = i.InputRegister32(1); \
418 auto length = i.InputOperand32(2); \
419 auto value = i.InputFloat##width##Register(3); \
420 __ Cmp(offset, length); \
421 Label done; \
422 __ B(hs, &done); \
423 __ Str(value, MemOperand(buffer, offset, UXTW)); \
424 __ Bind(&done); \
425 } while (0)
426
427
428#define ASSEMBLE_CHECKED_STORE_INTEGER(asm_instr) \
429 do { \
430 auto buffer = i.InputRegister(0); \
431 auto offset = i.InputRegister32(1); \
432 auto length = i.InputOperand32(2); \
433 auto value = i.InputRegister32(3); \
434 __ Cmp(offset, length); \
435 Label done; \
436 __ B(hs, &done); \
437 __ asm_instr(value, MemOperand(buffer, offset, UXTW)); \
438 __ Bind(&done); \
439 } while (0)
440
441
Ben Murdoch014dc512016-03-22 12:00:34 +0000442#define ASSEMBLE_CHECKED_STORE_INTEGER_64(asm_instr) \
443 do { \
444 auto buffer = i.InputRegister(0); \
445 auto offset = i.InputRegister32(1); \
446 auto length = i.InputOperand32(2); \
447 auto value = i.InputRegister(3); \
448 __ Cmp(offset, length); \
449 Label done; \
450 __ B(hs, &done); \
451 __ asm_instr(value, MemOperand(buffer, offset, UXTW)); \
452 __ Bind(&done); \
Emily Bernier958fae72015-03-24 16:35:39 -0400453 } while (0)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000454
455
Ben Murdoch014dc512016-03-22 12:00:34 +0000456#define ASSEMBLE_SHIFT(asm_instr, width) \
457 do { \
458 if (instr->InputAt(1)->IsRegister()) { \
459 __ asm_instr(i.OutputRegister##width(), i.InputRegister##width(0), \
460 i.InputRegister##width(1)); \
461 } else { \
462 uint32_t imm = \
463 static_cast<uint32_t>(i.InputOperand##width(1).ImmediateValue()); \
464 __ asm_instr(i.OutputRegister##width(), i.InputRegister##width(0), \
465 imm % (width)); \
466 } \
467 } while (0)
468
469
470void CodeGenerator::AssembleDeconstructActivationRecord(int stack_param_delta) {
471 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
472 if (sp_slot_delta > 0) {
473 __ Drop(sp_slot_delta);
474 }
475 frame_access_state()->SetFrameAccessToDefault();
476}
477
478
479void CodeGenerator::AssemblePrepareTailCall(int stack_param_delta) {
480 int sp_slot_delta = TailCallFrameStackSlotDelta(stack_param_delta);
481 if (sp_slot_delta < 0) {
482 __ Claim(-sp_slot_delta);
483 frame_access_state()->IncreaseSPDelta(-sp_slot_delta);
484 }
485 if (frame()->needs_frame()) {
486 __ Ldr(lr, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
487 __ Ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
488 }
489 frame_access_state()->SetFrameAccessToSP();
490}
491
492
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000493// Assembles an instruction after register allocation, producing machine code.
494void CodeGenerator::AssembleArchInstruction(Instruction* instr) {
495 Arm64OperandConverter i(this, instr);
496 InstructionCode opcode = instr->opcode();
Ben Murdoch109988c2016-05-18 11:27:45 +0100497 ArchOpcode arch_opcode = ArchOpcodeField::decode(opcode);
498 switch (arch_opcode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000499 case kArchCallCodeObject: {
500 EnsureSpaceForLazyDeopt();
501 if (instr->InputAt(0)->IsImmediate()) {
502 __ Call(Handle<Code>::cast(i.InputHeapObject(0)),
503 RelocInfo::CODE_TARGET);
504 } else {
505 Register target = i.InputRegister(0);
506 __ Add(target, target, Code::kHeaderSize - kHeapObjectTag);
507 __ Call(target);
508 }
Ben Murdoch109988c2016-05-18 11:27:45 +0100509 // TODO(titzer): this is ugly. JSSP should be a caller-save register
510 // in this case, but it is not possible to express in the register
511 // allocator.
512 CallDescriptor::Flags flags =
513 static_cast<CallDescriptor::Flags>(MiscField::decode(opcode));
514 if (flags & CallDescriptor::kRestoreJSSP) {
515 __ mov(jssp, csp);
516 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000517 frame_access_state()->ClearSPDelta();
518 RecordCallPosition(instr);
519 break;
520 }
521 case kArchTailCallCodeObject: {
522 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
523 AssembleDeconstructActivationRecord(stack_param_delta);
524 if (instr->InputAt(0)->IsImmediate()) {
525 __ Jump(Handle<Code>::cast(i.InputHeapObject(0)),
526 RelocInfo::CODE_TARGET);
527 } else {
528 Register target = i.InputRegister(0);
529 __ Add(target, target, Code::kHeaderSize - kHeapObjectTag);
530 __ Jump(target);
531 }
532 frame_access_state()->ClearSPDelta();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000533 break;
534 }
535 case kArchCallJSFunction: {
536 EnsureSpaceForLazyDeopt();
537 Register func = i.InputRegister(0);
538 if (FLAG_debug_code) {
539 // Check the function's context matches the context argument.
540 UseScratchRegisterScope scope(masm());
541 Register temp = scope.AcquireX();
542 __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset));
543 __ cmp(cp, temp);
544 __ Assert(eq, kWrongFunctionContext);
545 }
546 __ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
547 __ Call(x10);
Ben Murdoch109988c2016-05-18 11:27:45 +0100548 // TODO(titzer): this is ugly. JSSP should be a caller-save register
549 // in this case, but it is not possible to express in the register
550 // allocator.
551 CallDescriptor::Flags flags =
552 static_cast<CallDescriptor::Flags>(MiscField::decode(opcode));
553 if (flags & CallDescriptor::kRestoreJSSP) {
554 __ mov(jssp, csp);
555 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000556 frame_access_state()->ClearSPDelta();
557 RecordCallPosition(instr);
558 break;
559 }
560 case kArchTailCallJSFunction: {
561 Register func = i.InputRegister(0);
562 if (FLAG_debug_code) {
563 // Check the function's context matches the context argument.
564 UseScratchRegisterScope scope(masm());
565 Register temp = scope.AcquireX();
566 __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset));
567 __ cmp(cp, temp);
568 __ Assert(eq, kWrongFunctionContext);
569 }
570 int stack_param_delta = i.InputInt32(instr->InputCount() - 1);
571 AssembleDeconstructActivationRecord(stack_param_delta);
572 __ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
573 __ Jump(x10);
574 frame_access_state()->ClearSPDelta();
575 break;
576 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000577 case kArchPrepareCallCFunction:
578 // We don't need kArchPrepareCallCFunction on arm64 as the instruction
579 // selector already perform a Claim to reserve space on the stack and
580 // guarantee correct alignment of stack pointer.
581 UNREACHABLE();
582 break;
583 case kArchPrepareTailCall:
584 AssemblePrepareTailCall(i.InputInt32(instr->InputCount() - 1));
585 break;
586 case kArchCallCFunction: {
587 int const num_parameters = MiscField::decode(instr->opcode());
588 if (instr->InputAt(0)->IsImmediate()) {
589 ExternalReference ref = i.InputExternalReference(0);
590 __ CallCFunction(ref, num_parameters, 0);
591 } else {
592 Register func = i.InputRegister(0);
593 __ CallCFunction(func, num_parameters, 0);
594 }
595 // CallCFunction only supports register arguments so we never need to call
596 // frame()->ClearOutgoingParameterSlots() here.
597 DCHECK(frame_access_state()->sp_delta() == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000598 break;
599 }
600 case kArchJmp:
Emily Bernier958fae72015-03-24 16:35:39 -0400601 AssembleArchJump(i.InputRpo(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000602 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000603 case kArchTableSwitch:
604 AssembleArchTableSwitch(instr);
605 break;
606 case kArchLookupSwitch:
607 AssembleArchLookupSwitch(instr);
608 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000609 case kArchNop:
Ben Murdoch014dc512016-03-22 12:00:34 +0000610 case kArchThrowTerminator:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000611 // don't emit code for nops.
612 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000613 case kArchDeoptimize: {
614 int deopt_state_id =
615 BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
616 Deoptimizer::BailoutType bailout_type =
617 Deoptimizer::BailoutType(MiscField::decode(instr->opcode()));
618 AssembleDeoptimizerCall(deopt_state_id, bailout_type);
619 break;
620 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000621 case kArchRet:
622 AssembleReturn();
623 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400624 case kArchStackPointer:
625 __ mov(i.OutputRegister(), masm()->StackPointer());
626 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000627 case kArchFramePointer:
628 __ mov(i.OutputRegister(), fp);
629 break;
Ben Murdoch109988c2016-05-18 11:27:45 +0100630 case kArchParentFramePointer:
631 if (frame_access_state()->frame()->needs_frame()) {
632 __ ldr(i.OutputRegister(), MemOperand(fp, 0));
633 } else {
634 __ mov(i.OutputRegister(), fp);
635 }
636 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000637 case kArchTruncateDoubleToI:
638 __ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0));
639 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000640 case kArchStoreWithWriteBarrier: {
641 RecordWriteMode mode =
642 static_cast<RecordWriteMode>(MiscField::decode(instr->opcode()));
Ben Murdoch109988c2016-05-18 11:27:45 +0100643 AddressingMode addressing_mode =
644 AddressingModeField::decode(instr->opcode());
Ben Murdoch014dc512016-03-22 12:00:34 +0000645 Register object = i.InputRegister(0);
Ben Murdoch109988c2016-05-18 11:27:45 +0100646 Operand index(0);
647 if (addressing_mode == kMode_MRI) {
648 index = Operand(i.InputInt64(1));
649 } else {
650 DCHECK_EQ(addressing_mode, kMode_MRR);
651 index = Operand(i.InputRegister(1));
652 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000653 Register value = i.InputRegister(2);
654 Register scratch0 = i.TempRegister(0);
655 Register scratch1 = i.TempRegister(1);
656 auto ool = new (zone()) OutOfLineRecordWrite(this, object, index, value,
657 scratch0, scratch1, mode);
658 __ Str(value, MemOperand(object, index));
659 __ CheckPageFlagSet(object, scratch0,
660 MemoryChunk::kPointersFromHereAreInterestingMask,
661 ool->entry());
662 __ Bind(ool->exit());
Emily Bernier958fae72015-03-24 16:35:39 -0400663 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000664 }
Ben Murdoch109988c2016-05-18 11:27:45 +0100665 case kArchStackSlot: {
666 FrameOffset offset =
667 frame_access_state()->GetFrameOffset(i.InputInt32(0));
668 Register base;
669 if (offset.from_stack_pointer()) {
670 base = __ StackPointer();
671 } else {
672 base = fp;
673 }
674 __ Add(i.OutputRegister(0), base, Operand(offset.offset()));
675 break;
676 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000677 case kArm64Float32RoundDown:
678 __ Frintm(i.OutputFloat32Register(), i.InputFloat32Register(0));
679 break;
680 case kArm64Float64RoundDown:
Emily Bernier958fae72015-03-24 16:35:39 -0400681 __ Frintm(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
682 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000683 case kArm64Float32RoundUp:
684 __ Frintp(i.OutputFloat32Register(), i.InputFloat32Register(0));
685 break;
686 case kArm64Float64RoundUp:
687 __ Frintp(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
Emily Bernier958fae72015-03-24 16:35:39 -0400688 break;
689 case kArm64Float64RoundTiesAway:
690 __ Frinta(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
691 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000692 case kArm64Float32RoundTruncate:
693 __ Frintz(i.OutputFloat32Register(), i.InputFloat32Register(0));
694 break;
695 case kArm64Float64RoundTruncate:
696 __ Frintz(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
697 break;
698 case kArm64Float32RoundTiesEven:
699 __ Frintn(i.OutputFloat32Register(), i.InputFloat32Register(0));
700 break;
701 case kArm64Float64RoundTiesEven:
702 __ Frintn(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
703 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000704 case kArm64Add:
Ben Murdoch014dc512016-03-22 12:00:34 +0000705 if (FlagsModeField::decode(opcode) != kFlags_none) {
706 __ Adds(i.OutputRegister(), i.InputOrZeroRegister64(0),
707 i.InputOperand2_64(1));
708 } else {
709 __ Add(i.OutputRegister(), i.InputOrZeroRegister64(0),
710 i.InputOperand2_64(1));
711 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000712 break;
713 case kArm64Add32:
714 if (FlagsModeField::decode(opcode) != kFlags_none) {
Ben Murdoch014dc512016-03-22 12:00:34 +0000715 __ Adds(i.OutputRegister32(), i.InputOrZeroRegister32(0),
Emily Bernier958fae72015-03-24 16:35:39 -0400716 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000717 } else {
Ben Murdoch014dc512016-03-22 12:00:34 +0000718 __ Add(i.OutputRegister32(), i.InputOrZeroRegister32(0),
Emily Bernier958fae72015-03-24 16:35:39 -0400719 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000720 }
721 break;
722 case kArm64And:
Ben Murdoch014dc512016-03-22 12:00:34 +0000723 __ And(i.OutputRegister(), i.InputOrZeroRegister64(0),
724 i.InputOperand2_64(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000725 break;
726 case kArm64And32:
Ben Murdoch014dc512016-03-22 12:00:34 +0000727 __ And(i.OutputRegister32(), i.InputOrZeroRegister32(0),
728 i.InputOperand2_32(1));
Emily Bernier958fae72015-03-24 16:35:39 -0400729 break;
730 case kArm64Bic:
Ben Murdoch014dc512016-03-22 12:00:34 +0000731 __ Bic(i.OutputRegister(), i.InputOrZeroRegister64(0),
732 i.InputOperand2_64(1));
Emily Bernier958fae72015-03-24 16:35:39 -0400733 break;
734 case kArm64Bic32:
Ben Murdoch014dc512016-03-22 12:00:34 +0000735 __ Bic(i.OutputRegister32(), i.InputOrZeroRegister32(0),
736 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000737 break;
738 case kArm64Mul:
739 __ Mul(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
740 break;
741 case kArm64Mul32:
742 __ Mul(i.OutputRegister32(), i.InputRegister32(0), i.InputRegister32(1));
743 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400744 case kArm64Smull:
745 __ Smull(i.OutputRegister(), i.InputRegister32(0), i.InputRegister32(1));
746 break;
747 case kArm64Umull:
748 __ Umull(i.OutputRegister(), i.InputRegister32(0), i.InputRegister32(1));
749 break;
750 case kArm64Madd:
751 __ Madd(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
752 i.InputRegister(2));
753 break;
754 case kArm64Madd32:
755 __ Madd(i.OutputRegister32(), i.InputRegister32(0), i.InputRegister32(1),
756 i.InputRegister32(2));
757 break;
758 case kArm64Msub:
759 __ Msub(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
760 i.InputRegister(2));
761 break;
762 case kArm64Msub32:
763 __ Msub(i.OutputRegister32(), i.InputRegister32(0), i.InputRegister32(1),
764 i.InputRegister32(2));
765 break;
766 case kArm64Mneg:
767 __ Mneg(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
768 break;
769 case kArm64Mneg32:
770 __ Mneg(i.OutputRegister32(), i.InputRegister32(0), i.InputRegister32(1));
771 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000772 case kArm64Idiv:
773 __ Sdiv(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
774 break;
775 case kArm64Idiv32:
776 __ Sdiv(i.OutputRegister32(), i.InputRegister32(0), i.InputRegister32(1));
777 break;
778 case kArm64Udiv:
779 __ Udiv(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
780 break;
781 case kArm64Udiv32:
782 __ Udiv(i.OutputRegister32(), i.InputRegister32(0), i.InputRegister32(1));
783 break;
784 case kArm64Imod: {
785 UseScratchRegisterScope scope(masm());
786 Register temp = scope.AcquireX();
787 __ Sdiv(temp, i.InputRegister(0), i.InputRegister(1));
788 __ Msub(i.OutputRegister(), temp, i.InputRegister(1), i.InputRegister(0));
789 break;
790 }
791 case kArm64Imod32: {
792 UseScratchRegisterScope scope(masm());
793 Register temp = scope.AcquireW();
794 __ Sdiv(temp, i.InputRegister32(0), i.InputRegister32(1));
795 __ Msub(i.OutputRegister32(), temp, i.InputRegister32(1),
796 i.InputRegister32(0));
797 break;
798 }
799 case kArm64Umod: {
800 UseScratchRegisterScope scope(masm());
801 Register temp = scope.AcquireX();
802 __ Udiv(temp, i.InputRegister(0), i.InputRegister(1));
803 __ Msub(i.OutputRegister(), temp, i.InputRegister(1), i.InputRegister(0));
804 break;
805 }
806 case kArm64Umod32: {
807 UseScratchRegisterScope scope(masm());
808 Register temp = scope.AcquireW();
809 __ Udiv(temp, i.InputRegister32(0), i.InputRegister32(1));
810 __ Msub(i.OutputRegister32(), temp, i.InputRegister32(1),
811 i.InputRegister32(0));
812 break;
813 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000814 case kArm64Not:
Ben Murdoch014dc512016-03-22 12:00:34 +0000815 __ Mvn(i.OutputRegister(), i.InputOperand(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000816 break;
817 case kArm64Not32:
Ben Murdoch014dc512016-03-22 12:00:34 +0000818 __ Mvn(i.OutputRegister32(), i.InputOperand32(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000819 break;
820 case kArm64Or:
Ben Murdoch014dc512016-03-22 12:00:34 +0000821 __ Orr(i.OutputRegister(), i.InputOrZeroRegister64(0),
822 i.InputOperand2_64(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000823 break;
824 case kArm64Or32:
Ben Murdoch014dc512016-03-22 12:00:34 +0000825 __ Orr(i.OutputRegister32(), i.InputOrZeroRegister32(0),
826 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000827 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400828 case kArm64Orn:
Ben Murdoch014dc512016-03-22 12:00:34 +0000829 __ Orn(i.OutputRegister(), i.InputOrZeroRegister64(0),
830 i.InputOperand2_64(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000831 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400832 case kArm64Orn32:
Ben Murdoch014dc512016-03-22 12:00:34 +0000833 __ Orn(i.OutputRegister32(), i.InputOrZeroRegister32(0),
834 i.InputOperand2_32(1));
Emily Bernier958fae72015-03-24 16:35:39 -0400835 break;
836 case kArm64Eor:
Ben Murdoch014dc512016-03-22 12:00:34 +0000837 __ Eor(i.OutputRegister(), i.InputOrZeroRegister64(0),
838 i.InputOperand2_64(1));
Emily Bernier958fae72015-03-24 16:35:39 -0400839 break;
840 case kArm64Eor32:
Ben Murdoch014dc512016-03-22 12:00:34 +0000841 __ Eor(i.OutputRegister32(), i.InputOrZeroRegister32(0),
842 i.InputOperand2_32(1));
Emily Bernier958fae72015-03-24 16:35:39 -0400843 break;
844 case kArm64Eon:
Ben Murdoch014dc512016-03-22 12:00:34 +0000845 __ Eon(i.OutputRegister(), i.InputOrZeroRegister64(0),
846 i.InputOperand2_64(1));
Emily Bernier958fae72015-03-24 16:35:39 -0400847 break;
848 case kArm64Eon32:
Ben Murdoch014dc512016-03-22 12:00:34 +0000849 __ Eon(i.OutputRegister32(), i.InputOrZeroRegister32(0),
850 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000851 break;
852 case kArm64Sub:
Ben Murdoch014dc512016-03-22 12:00:34 +0000853 if (FlagsModeField::decode(opcode) != kFlags_none) {
854 __ Subs(i.OutputRegister(), i.InputOrZeroRegister64(0),
855 i.InputOperand2_64(1));
856 } else {
857 __ Sub(i.OutputRegister(), i.InputOrZeroRegister64(0),
858 i.InputOperand2_64(1));
859 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000860 break;
861 case kArm64Sub32:
862 if (FlagsModeField::decode(opcode) != kFlags_none) {
Ben Murdoch014dc512016-03-22 12:00:34 +0000863 __ Subs(i.OutputRegister32(), i.InputOrZeroRegister32(0),
Emily Bernier958fae72015-03-24 16:35:39 -0400864 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000865 } else {
Ben Murdoch014dc512016-03-22 12:00:34 +0000866 __ Sub(i.OutputRegister32(), i.InputOrZeroRegister32(0),
Emily Bernier958fae72015-03-24 16:35:39 -0400867 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000868 }
869 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400870 case kArm64Lsl:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000871 ASSEMBLE_SHIFT(Lsl, 64);
872 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400873 case kArm64Lsl32:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000874 ASSEMBLE_SHIFT(Lsl, 32);
875 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400876 case kArm64Lsr:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000877 ASSEMBLE_SHIFT(Lsr, 64);
878 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400879 case kArm64Lsr32:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000880 ASSEMBLE_SHIFT(Lsr, 32);
881 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400882 case kArm64Asr:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000883 ASSEMBLE_SHIFT(Asr, 64);
884 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400885 case kArm64Asr32:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000886 ASSEMBLE_SHIFT(Asr, 32);
887 break;
888 case kArm64Ror:
889 ASSEMBLE_SHIFT(Ror, 64);
890 break;
891 case kArm64Ror32:
892 ASSEMBLE_SHIFT(Ror, 32);
893 break;
894 case kArm64Mov32:
895 __ Mov(i.OutputRegister32(), i.InputRegister32(0));
896 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400897 case kArm64Sxtb32:
898 __ Sxtb(i.OutputRegister32(), i.InputRegister32(0));
899 break;
900 case kArm64Sxth32:
901 __ Sxth(i.OutputRegister32(), i.InputRegister32(0));
902 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000903 case kArm64Sxtw:
904 __ Sxtw(i.OutputRegister(), i.InputRegister32(0));
905 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000906 case kArm64Sbfx32:
907 __ Sbfx(i.OutputRegister32(), i.InputRegister32(0), i.InputInt5(1),
908 i.InputInt5(2));
909 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400910 case kArm64Ubfx:
Ben Murdoch014dc512016-03-22 12:00:34 +0000911 __ Ubfx(i.OutputRegister(), i.InputRegister(0), i.InputInt6(1),
912 i.InputInt6(2));
Emily Bernier958fae72015-03-24 16:35:39 -0400913 break;
914 case kArm64Ubfx32:
Ben Murdoch014dc512016-03-22 12:00:34 +0000915 __ Ubfx(i.OutputRegister32(), i.InputRegister32(0), i.InputInt5(1),
916 i.InputInt5(2));
917 break;
918 case kArm64Ubfiz32:
919 __ Ubfiz(i.OutputRegister32(), i.InputRegister32(0), i.InputInt5(1),
920 i.InputInt5(2));
921 break;
922 case kArm64Bfi:
923 __ Bfi(i.OutputRegister(), i.InputRegister(1), i.InputInt6(2),
924 i.InputInt6(3));
Emily Bernier958fae72015-03-24 16:35:39 -0400925 break;
926 case kArm64TestAndBranch32:
927 case kArm64TestAndBranch:
928 // Pseudo instructions turned into tbz/tbnz in AssembleArchBranch.
929 break;
930 case kArm64CompareAndBranch32:
931 // Pseudo instruction turned into cbz/cbnz in AssembleArchBranch.
932 break;
Ben Murdoch109988c2016-05-18 11:27:45 +0100933 case kArm64ClaimCSP: {
934 int count = i.InputInt32(0);
935 Register prev = __ StackPointer();
936 __ SetStackPointer(csp);
937 __ Claim(count);
938 __ SetStackPointer(prev);
939 frame_access_state()->IncreaseSPDelta(count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000940 break;
941 }
Ben Murdoch109988c2016-05-18 11:27:45 +0100942 case kArm64ClaimJSSP: {
943 int count = i.InputInt32(0);
944 if (csp.Is(__ StackPointer())) {
945 // No JSP is set up. Compute it from the CSP.
946 int even = RoundUp(count, 2);
947 __ Sub(jssp, csp, count * kPointerSize);
948 __ Sub(csp, csp, even * kPointerSize); // Must always be aligned.
949 frame_access_state()->IncreaseSPDelta(even);
950 } else {
951 // JSSP is the current stack pointer, just use regular Claim().
952 __ Claim(count);
953 frame_access_state()->IncreaseSPDelta(count);
954 }
955 break;
956 }
957 case kArm64PokeCSP: // fall through
958 case kArm64PokeJSSP: {
959 Register prev = __ StackPointer();
960 __ SetStackPointer(arch_opcode == kArm64PokeCSP ? csp : jssp);
Ben Murdoch014dc512016-03-22 12:00:34 +0000961 Operand operand(i.InputInt32(1) * kPointerSize);
962 if (instr->InputAt(0)->IsDoubleRegister()) {
963 __ Poke(i.InputFloat64Register(0), operand);
964 } else {
965 __ Poke(i.InputRegister(0), operand);
966 }
Ben Murdoch109988c2016-05-18 11:27:45 +0100967 __ SetStackPointer(prev);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000968 break;
969 }
970 case kArm64PokePair: {
Ben Murdoch014dc512016-03-22 12:00:34 +0000971 int slot = i.InputInt32(2) - 1;
972 if (instr->InputAt(0)->IsDoubleRegister()) {
973 __ PokePair(i.InputFloat64Register(1), i.InputFloat64Register(0),
974 slot * kPointerSize);
975 } else {
976 __ PokePair(i.InputRegister(1), i.InputRegister(0),
977 slot * kPointerSize);
978 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000979 break;
980 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000981 case kArm64Clz:
982 __ Clz(i.OutputRegister64(), i.InputRegister64(0));
983 break;
984 case kArm64Clz32:
985 __ Clz(i.OutputRegister32(), i.InputRegister32(0));
986 break;
Ben Murdoch109988c2016-05-18 11:27:45 +0100987 case kArm64Rbit:
988 __ Rbit(i.OutputRegister64(), i.InputRegister64(0));
989 break;
990 case kArm64Rbit32:
991 __ Rbit(i.OutputRegister32(), i.InputRegister32(0));
992 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000993 case kArm64Cmp:
Ben Murdoch014dc512016-03-22 12:00:34 +0000994 __ Cmp(i.InputOrZeroRegister64(0), i.InputOperand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000995 break;
996 case kArm64Cmp32:
Ben Murdoch014dc512016-03-22 12:00:34 +0000997 __ Cmp(i.InputOrZeroRegister32(0), i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000998 break;
999 case kArm64Cmn:
Ben Murdoch014dc512016-03-22 12:00:34 +00001000 __ Cmn(i.InputOrZeroRegister64(0), i.InputOperand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001001 break;
1002 case kArm64Cmn32:
Ben Murdoch014dc512016-03-22 12:00:34 +00001003 __ Cmn(i.InputOrZeroRegister32(0), i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001004 break;
1005 case kArm64Tst:
1006 __ Tst(i.InputRegister(0), i.InputOperand(1));
1007 break;
1008 case kArm64Tst32:
1009 __ Tst(i.InputRegister32(0), i.InputOperand32(1));
1010 break;
Ben Murdoch014dc512016-03-22 12:00:34 +00001011 case kArm64Float32Cmp:
1012 if (instr->InputAt(1)->IsDoubleRegister()) {
1013 __ Fcmp(i.InputFloat32Register(0), i.InputFloat32Register(1));
1014 } else {
1015 DCHECK(instr->InputAt(1)->IsImmediate());
1016 // 0.0 is the only immediate supported by fcmp instructions.
1017 DCHECK(i.InputFloat32(1) == 0.0f);
1018 __ Fcmp(i.InputFloat32Register(0), i.InputFloat32(1));
1019 }
1020 break;
1021 case kArm64Float32Add:
1022 __ Fadd(i.OutputFloat32Register(), i.InputFloat32Register(0),
1023 i.InputFloat32Register(1));
1024 break;
1025 case kArm64Float32Sub:
1026 __ Fsub(i.OutputFloat32Register(), i.InputFloat32Register(0),
1027 i.InputFloat32Register(1));
1028 break;
1029 case kArm64Float32Mul:
1030 __ Fmul(i.OutputFloat32Register(), i.InputFloat32Register(0),
1031 i.InputFloat32Register(1));
1032 break;
1033 case kArm64Float32Div:
1034 __ Fdiv(i.OutputFloat32Register(), i.InputFloat32Register(0),
1035 i.InputFloat32Register(1));
1036 break;
1037 case kArm64Float32Max:
1038 // (b < a) ? a : b
1039 __ Fcmp(i.InputFloat32Register(1), i.InputFloat32Register(0));
1040 __ Fcsel(i.OutputFloat32Register(), i.InputFloat32Register(0),
1041 i.InputFloat32Register(1), lo);
1042 break;
1043 case kArm64Float32Min:
1044 // (a < b) ? a : b
1045 __ Fcmp(i.InputFloat32Register(0), i.InputFloat32Register(1));
1046 __ Fcsel(i.OutputFloat32Register(), i.InputFloat32Register(0),
1047 i.InputFloat32Register(1), lo);
1048 break;
1049 case kArm64Float32Abs:
1050 __ Fabs(i.OutputFloat32Register(), i.InputFloat32Register(0));
1051 break;
1052 case kArm64Float32Sqrt:
1053 __ Fsqrt(i.OutputFloat32Register(), i.InputFloat32Register(0));
1054 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001055 case kArm64Float64Cmp:
Ben Murdoch014dc512016-03-22 12:00:34 +00001056 if (instr->InputAt(1)->IsDoubleRegister()) {
1057 __ Fcmp(i.InputDoubleRegister(0), i.InputDoubleRegister(1));
1058 } else {
1059 DCHECK(instr->InputAt(1)->IsImmediate());
1060 // 0.0 is the only immediate supported by fcmp instructions.
1061 DCHECK(i.InputDouble(1) == 0.0);
1062 __ Fcmp(i.InputDoubleRegister(0), i.InputDouble(1));
1063 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001064 break;
1065 case kArm64Float64Add:
1066 __ Fadd(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1067 i.InputDoubleRegister(1));
1068 break;
1069 case kArm64Float64Sub:
1070 __ Fsub(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1071 i.InputDoubleRegister(1));
1072 break;
1073 case kArm64Float64Mul:
1074 __ Fmul(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1075 i.InputDoubleRegister(1));
1076 break;
1077 case kArm64Float64Div:
1078 __ Fdiv(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1079 i.InputDoubleRegister(1));
1080 break;
1081 case kArm64Float64Mod: {
1082 // TODO(dcarney): implement directly. See note in lithium-codegen-arm64.cc
1083 FrameScope scope(masm(), StackFrame::MANUAL);
1084 DCHECK(d0.is(i.InputDoubleRegister(0)));
1085 DCHECK(d1.is(i.InputDoubleRegister(1)));
1086 DCHECK(d0.is(i.OutputDoubleRegister()));
1087 // TODO(dcarney): make sure this saves all relevant registers.
1088 __ CallCFunction(ExternalReference::mod_two_doubles_operation(isolate()),
1089 0, 2);
1090 break;
1091 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001092 case kArm64Float64Max:
1093 // (b < a) ? a : b
1094 __ Fcmp(i.InputDoubleRegister(1), i.InputDoubleRegister(0));
1095 __ Fcsel(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1096 i.InputDoubleRegister(1), lo);
1097 break;
1098 case kArm64Float64Min:
1099 // (a < b) ? a : b
1100 __ Fcmp(i.InputDoubleRegister(0), i.InputDoubleRegister(1));
1101 __ Fcsel(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1102 i.InputDoubleRegister(1), lo);
1103 break;
1104 case kArm64Float64Abs:
1105 __ Fabs(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1106 break;
1107 case kArm64Float64Neg:
1108 __ Fneg(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1109 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001110 case kArm64Float64Sqrt:
1111 __ Fsqrt(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1112 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001113 case kArm64Float32ToFloat64:
1114 __ Fcvt(i.OutputDoubleRegister(), i.InputDoubleRegister(0).S());
1115 break;
1116 case kArm64Float64ToFloat32:
1117 __ Fcvt(i.OutputDoubleRegister().S(), i.InputDoubleRegister(0));
1118 break;
Ben Murdoch109988c2016-05-18 11:27:45 +01001119 case kArm64Float32ToInt32:
1120 __ Fcvtzs(i.OutputRegister32(), i.InputFloat32Register(0));
1121 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001122 case kArm64Float64ToInt32:
1123 __ Fcvtzs(i.OutputRegister32(), i.InputDoubleRegister(0));
1124 break;
Ben Murdoch109988c2016-05-18 11:27:45 +01001125 case kArm64Float32ToUint32:
1126 __ Fcvtzu(i.OutputRegister32(), i.InputFloat32Register(0));
1127 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001128 case kArm64Float64ToUint32:
1129 __ Fcvtzu(i.OutputRegister32(), i.InputDoubleRegister(0));
1130 break;
Ben Murdoch014dc512016-03-22 12:00:34 +00001131 case kArm64Float32ToInt64:
1132 __ Fcvtzs(i.OutputRegister64(), i.InputFloat32Register(0));
1133 if (i.OutputCount() > 1) {
1134 __ Mov(i.OutputRegister(1), 1);
1135 Label done;
1136 __ Cmp(i.OutputRegister(0), 1);
1137 __ Ccmp(i.OutputRegister(0), -1, VFlag, vc);
1138 __ Fccmp(i.InputFloat32Register(0), i.InputFloat32Register(0), VFlag,
1139 vc);
1140 __ B(vc, &done);
1141 __ Fcmp(i.InputFloat32Register(0), static_cast<float>(INT64_MIN));
1142 __ Cset(i.OutputRegister(1), eq);
1143 __ Bind(&done);
1144 }
1145 break;
1146 case kArm64Float64ToInt64:
1147 __ Fcvtzs(i.OutputRegister(0), i.InputDoubleRegister(0));
1148 if (i.OutputCount() > 1) {
1149 __ Mov(i.OutputRegister(1), 1);
1150 Label done;
1151 __ Cmp(i.OutputRegister(0), 1);
1152 __ Ccmp(i.OutputRegister(0), -1, VFlag, vc);
1153 __ Fccmp(i.InputDoubleRegister(0), i.InputDoubleRegister(0), VFlag, vc);
1154 __ B(vc, &done);
1155 __ Fcmp(i.InputDoubleRegister(0), static_cast<double>(INT64_MIN));
1156 __ Cset(i.OutputRegister(1), eq);
1157 __ Bind(&done);
1158 }
1159 break;
1160 case kArm64Float32ToUint64:
1161 __ Fcvtzu(i.OutputRegister64(), i.InputFloat32Register(0));
1162 if (i.OutputCount() > 1) {
1163 __ Fcmp(i.InputFloat32Register(0), -1.0);
1164 __ Ccmp(i.OutputRegister(0), -1, ZFlag, gt);
1165 __ Cset(i.OutputRegister(1), ne);
1166 }
1167 break;
1168 case kArm64Float64ToUint64:
1169 __ Fcvtzu(i.OutputRegister64(), i.InputDoubleRegister(0));
1170 if (i.OutputCount() > 1) {
1171 __ Fcmp(i.InputDoubleRegister(0), -1.0);
1172 __ Ccmp(i.OutputRegister(0), -1, ZFlag, gt);
1173 __ Cset(i.OutputRegister(1), ne);
1174 }
1175 break;
Ben Murdoch109988c2016-05-18 11:27:45 +01001176 case kArm64Int32ToFloat32:
1177 __ Scvtf(i.OutputFloat32Register(), i.InputRegister32(0));
1178 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001179 case kArm64Int32ToFloat64:
1180 __ Scvtf(i.OutputDoubleRegister(), i.InputRegister32(0));
1181 break;
Ben Murdoch014dc512016-03-22 12:00:34 +00001182 case kArm64Int64ToFloat32:
1183 __ Scvtf(i.OutputDoubleRegister().S(), i.InputRegister64(0));
1184 break;
1185 case kArm64Int64ToFloat64:
1186 __ Scvtf(i.OutputDoubleRegister(), i.InputRegister64(0));
1187 break;
Ben Murdoch109988c2016-05-18 11:27:45 +01001188 case kArm64Uint32ToFloat32:
1189 __ Ucvtf(i.OutputFloat32Register(), i.InputRegister32(0));
1190 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001191 case kArm64Uint32ToFloat64:
1192 __ Ucvtf(i.OutputDoubleRegister(), i.InputRegister32(0));
1193 break;
Ben Murdoch014dc512016-03-22 12:00:34 +00001194 case kArm64Uint64ToFloat32:
1195 __ Ucvtf(i.OutputDoubleRegister().S(), i.InputRegister64(0));
1196 break;
1197 case kArm64Uint64ToFloat64:
1198 __ Ucvtf(i.OutputDoubleRegister(), i.InputRegister64(0));
1199 break;
1200 case kArm64Float64ExtractLowWord32:
1201 __ Fmov(i.OutputRegister32(), i.InputFloat32Register(0));
1202 break;
1203 case kArm64Float64ExtractHighWord32:
1204 // TODO(arm64): This should use MOV (to general) when NEON is supported.
1205 __ Fmov(i.OutputRegister(), i.InputFloat64Register(0));
1206 __ Lsr(i.OutputRegister(), i.OutputRegister(), 32);
1207 break;
1208 case kArm64Float64InsertLowWord32: {
1209 // TODO(arm64): This should use MOV (from general) when NEON is supported.
1210 UseScratchRegisterScope scope(masm());
1211 Register tmp = scope.AcquireX();
1212 __ Fmov(tmp, i.InputFloat64Register(0));
1213 __ Bfi(tmp, i.InputRegister(1), 0, 32);
1214 __ Fmov(i.OutputFloat64Register(), tmp);
1215 break;
1216 }
1217 case kArm64Float64InsertHighWord32: {
1218 // TODO(arm64): This should use MOV (from general) when NEON is supported.
1219 UseScratchRegisterScope scope(masm());
1220 Register tmp = scope.AcquireX();
1221 __ Fmov(tmp.W(), i.InputFloat32Register(0));
1222 __ Bfi(tmp, i.InputRegister(1), 32, 32);
1223 __ Fmov(i.OutputFloat64Register(), tmp);
1224 break;
1225 }
1226 case kArm64Float64MoveU64:
1227 __ Fmov(i.OutputFloat64Register(), i.InputRegister(0));
1228 break;
1229 case kArm64U64MoveFloat64:
1230 __ Fmov(i.OutputRegister(), i.InputDoubleRegister(0));
1231 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001232 case kArm64Ldrb:
1233 __ Ldrb(i.OutputRegister(), i.MemoryOperand());
1234 break;
1235 case kArm64Ldrsb:
1236 __ Ldrsb(i.OutputRegister(), i.MemoryOperand());
1237 break;
1238 case kArm64Strb:
1239 __ Strb(i.InputRegister(2), i.MemoryOperand());
1240 break;
1241 case kArm64Ldrh:
1242 __ Ldrh(i.OutputRegister(), i.MemoryOperand());
1243 break;
1244 case kArm64Ldrsh:
1245 __ Ldrsh(i.OutputRegister(), i.MemoryOperand());
1246 break;
1247 case kArm64Strh:
1248 __ Strh(i.InputRegister(2), i.MemoryOperand());
1249 break;
1250 case kArm64LdrW:
1251 __ Ldr(i.OutputRegister32(), i.MemoryOperand());
1252 break;
1253 case kArm64StrW:
1254 __ Str(i.InputRegister32(2), i.MemoryOperand());
1255 break;
1256 case kArm64Ldr:
1257 __ Ldr(i.OutputRegister(), i.MemoryOperand());
1258 break;
1259 case kArm64Str:
1260 __ Str(i.InputRegister(2), i.MemoryOperand());
1261 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001262 case kArm64LdrS:
1263 __ Ldr(i.OutputDoubleRegister().S(), i.MemoryOperand());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001264 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001265 case kArm64StrS:
1266 __ Str(i.InputDoubleRegister(2).S(), i.MemoryOperand());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001267 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001268 case kArm64LdrD:
1269 __ Ldr(i.OutputDoubleRegister(), i.MemoryOperand());
1270 break;
1271 case kArm64StrD:
1272 __ Str(i.InputDoubleRegister(2), i.MemoryOperand());
1273 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001274 case kCheckedLoadInt8:
1275 ASSEMBLE_CHECKED_LOAD_INTEGER(Ldrsb);
1276 break;
1277 case kCheckedLoadUint8:
1278 ASSEMBLE_CHECKED_LOAD_INTEGER(Ldrb);
1279 break;
1280 case kCheckedLoadInt16:
1281 ASSEMBLE_CHECKED_LOAD_INTEGER(Ldrsh);
1282 break;
1283 case kCheckedLoadUint16:
1284 ASSEMBLE_CHECKED_LOAD_INTEGER(Ldrh);
1285 break;
1286 case kCheckedLoadWord32:
1287 ASSEMBLE_CHECKED_LOAD_INTEGER(Ldr);
1288 break;
Ben Murdoch014dc512016-03-22 12:00:34 +00001289 case kCheckedLoadWord64:
1290 ASSEMBLE_CHECKED_LOAD_INTEGER_64(Ldr);
1291 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001292 case kCheckedLoadFloat32:
1293 ASSEMBLE_CHECKED_LOAD_FLOAT(32);
1294 break;
1295 case kCheckedLoadFloat64:
1296 ASSEMBLE_CHECKED_LOAD_FLOAT(64);
1297 break;
1298 case kCheckedStoreWord8:
1299 ASSEMBLE_CHECKED_STORE_INTEGER(Strb);
1300 break;
1301 case kCheckedStoreWord16:
1302 ASSEMBLE_CHECKED_STORE_INTEGER(Strh);
1303 break;
1304 case kCheckedStoreWord32:
1305 ASSEMBLE_CHECKED_STORE_INTEGER(Str);
1306 break;
Ben Murdoch014dc512016-03-22 12:00:34 +00001307 case kCheckedStoreWord64:
1308 ASSEMBLE_CHECKED_STORE_INTEGER_64(Str);
1309 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001310 case kCheckedStoreFloat32:
1311 ASSEMBLE_CHECKED_STORE_FLOAT(32);
1312 break;
1313 case kCheckedStoreFloat64:
1314 ASSEMBLE_CHECKED_STORE_FLOAT(64);
1315 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001316 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001317} // NOLINT(readability/fn_size)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001318
1319
1320// Assemble branches after this instruction.
Emily Bernier958fae72015-03-24 16:35:39 -04001321void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001322 Arm64OperandConverter i(this, instr);
Emily Bernier958fae72015-03-24 16:35:39 -04001323 Label* tlabel = branch->true_label;
1324 Label* flabel = branch->false_label;
1325 FlagsCondition condition = branch->condition;
1326 ArchOpcode opcode = instr->arch_opcode();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001327
Emily Bernier958fae72015-03-24 16:35:39 -04001328 if (opcode == kArm64CompareAndBranch32) {
1329 switch (condition) {
1330 case kEqual:
1331 __ Cbz(i.InputRegister32(0), tlabel);
1332 break;
1333 case kNotEqual:
1334 __ Cbnz(i.InputRegister32(0), tlabel);
1335 break;
1336 default:
1337 UNREACHABLE();
1338 }
1339 } else if (opcode == kArm64TestAndBranch32) {
1340 switch (condition) {
1341 case kEqual:
1342 __ Tbz(i.InputRegister32(0), i.InputInt5(1), tlabel);
1343 break;
1344 case kNotEqual:
1345 __ Tbnz(i.InputRegister32(0), i.InputInt5(1), tlabel);
1346 break;
1347 default:
1348 UNREACHABLE();
1349 }
1350 } else if (opcode == kArm64TestAndBranch) {
1351 switch (condition) {
1352 case kEqual:
1353 __ Tbz(i.InputRegister64(0), i.InputInt6(1), tlabel);
1354 break;
1355 case kNotEqual:
1356 __ Tbnz(i.InputRegister64(0), i.InputInt6(1), tlabel);
1357 break;
1358 default:
1359 UNREACHABLE();
1360 }
1361 } else {
Ben Murdoch014dc512016-03-22 12:00:34 +00001362 Condition cc = FlagsConditionToCondition(condition);
1363 __ B(cc, tlabel);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001364 }
Emily Bernier958fae72015-03-24 16:35:39 -04001365 if (!branch->fallthru) __ B(flabel); // no fallthru to flabel.
1366}
1367
1368
Ben Murdoch014dc512016-03-22 12:00:34 +00001369void CodeGenerator::AssembleArchJump(RpoNumber target) {
Emily Bernier958fae72015-03-24 16:35:39 -04001370 if (!IsNextInAssemblyOrder(target)) __ B(GetLabel(target));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001371}
1372
1373
1374// Assemble boolean materializations after this instruction.
1375void CodeGenerator::AssembleArchBoolean(Instruction* instr,
1376 FlagsCondition condition) {
1377 Arm64OperandConverter i(this, instr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001378
1379 // Materialize a full 64-bit 1 or 0 value. The result register is always the
1380 // last output of the instruction.
Ben Murdoch014dc512016-03-22 12:00:34 +00001381 DCHECK_NE(0u, instr->OutputCount());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001382 Register reg = i.OutputRegister(instr->OutputCount() - 1);
Ben Murdoch014dc512016-03-22 12:00:34 +00001383 Condition cc = FlagsConditionToCondition(condition);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001384 __ Cset(reg, cc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001385}
1386
1387
Ben Murdoch014dc512016-03-22 12:00:34 +00001388void CodeGenerator::AssembleArchLookupSwitch(Instruction* instr) {
1389 Arm64OperandConverter i(this, instr);
1390 Register input = i.InputRegister32(0);
1391 for (size_t index = 2; index < instr->InputCount(); index += 2) {
1392 __ Cmp(input, i.InputInt32(index + 0));
1393 __ B(eq, GetLabel(i.InputRpo(index + 1)));
1394 }
1395 AssembleArchJump(i.InputRpo(1));
1396}
1397
1398
1399void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
1400 Arm64OperandConverter i(this, instr);
1401 UseScratchRegisterScope scope(masm());
1402 Register input = i.InputRegister32(0);
1403 Register temp = scope.AcquireX();
1404 size_t const case_count = instr->InputCount() - 2;
1405 Label table;
1406 __ Cmp(input, case_count);
1407 __ B(hs, GetLabel(i.InputRpo(1)));
1408 __ Adr(temp, &table);
1409 __ Add(temp, temp, Operand(input, UXTW, 2));
1410 __ Br(temp);
1411 __ StartBlockPools();
1412 __ Bind(&table);
1413 for (size_t index = 0; index < case_count; ++index) {
1414 __ B(GetLabel(i.InputRpo(index + 2)));
1415 }
1416 __ EndBlockPools();
1417}
1418
1419
1420void CodeGenerator::AssembleDeoptimizerCall(
1421 int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001422 Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
Ben Murdoch014dc512016-03-22 12:00:34 +00001423 isolate(), deoptimization_id, bailout_type);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001424 __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
1425}
1426
1427
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001428void CodeGenerator::AssemblePrologue() {
1429 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
Ben Murdoch014dc512016-03-22 12:00:34 +00001430 if (descriptor->IsCFunctionCall()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001431 __ SetStackPointer(csp);
1432 __ Push(lr, fp);
1433 __ Mov(fp, csp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001434 } else if (descriptor->IsJSFunctionCall()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001435 __ SetStackPointer(jssp);
Ben Murdoch014dc512016-03-22 12:00:34 +00001436 __ Prologue(this->info()->GeneratePreagedPrologue());
1437 } else if (frame()->needs_frame()) {
1438 if (descriptor->UseNativeStack()) {
1439 __ SetStackPointer(csp);
1440 } else {
1441 __ SetStackPointer(jssp);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001442 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001443 __ StubPrologue();
1444 } else {
1445 if (descriptor->UseNativeStack()) {
1446 __ SetStackPointer(csp);
1447 } else {
1448 __ SetStackPointer(jssp);
1449 }
1450 frame()->SetElidedFrameSizeInSlots(0);
1451 }
1452 frame_access_state()->SetFrameAccessToDefault();
1453
1454 int stack_shrink_slots = frame()->GetSpillSlotCount();
1455 if (info()->is_osr()) {
1456 // TurboFan OSR-compiled functions cannot be entered directly.
1457 __ Abort(kShouldNotDirectlyEnterOsrFunction);
1458
1459 // Unoptimized code jumps directly to this entrypoint while the unoptimized
1460 // frame is still on the stack. Optimized code uses OSR values directly from
1461 // the unoptimized frame. Thus, all that needs to be done is to allocate the
1462 // remaining stack slots.
1463 if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --");
1464 osr_pc_offset_ = __ pc_offset();
Ben Murdoch014dc512016-03-22 12:00:34 +00001465 stack_shrink_slots -= OsrHelper(info()).UnoptimizedFrameSlots();
1466 }
1467
1468 // If frame()->needs_frame() is false, then
1469 // frame()->AlignSavedCalleeRegisterSlots() is guaranteed to return 0.
1470 if (csp.Is(masm()->StackPointer()) && frame()->needs_frame()) {
1471 // The system stack pointer requires 16-byte alignment at function call
1472 // boundaries.
1473
1474 stack_shrink_slots += frame()->AlignSavedCalleeRegisterSlots();
1475 }
1476 __ Claim(stack_shrink_slots);
1477
1478 // Save FP registers.
1479 CPURegList saves_fp = CPURegList(CPURegister::kFPRegister, kDRegSizeInBits,
1480 descriptor->CalleeSavedFPRegisters());
1481 int saved_count = saves_fp.Count();
1482 if (saved_count != 0) {
1483 DCHECK(saves_fp.list() == CPURegList::GetCalleeSavedFP().list());
1484 __ PushCPURegList(saves_fp);
1485 frame()->AllocateSavedCalleeRegisterSlots(saved_count *
1486 (kDoubleSize / kPointerSize));
1487 }
1488 // Save registers.
1489 // TODO(palfia): TF save list is not in sync with
1490 // CPURegList::GetCalleeSaved(): x30 is missing.
1491 // DCHECK(saves.list() == CPURegList::GetCalleeSaved().list());
1492 CPURegList saves = CPURegList(CPURegister::kRegister, kXRegSizeInBits,
1493 descriptor->CalleeSavedRegisters());
1494 saved_count = saves.Count();
1495 if (saved_count != 0) {
1496 __ PushCPURegList(saves);
1497 frame()->AllocateSavedCalleeRegisterSlots(saved_count);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001498 }
1499}
1500
1501
1502void CodeGenerator::AssembleReturn() {
1503 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
Ben Murdoch014dc512016-03-22 12:00:34 +00001504
1505 // Restore registers.
1506 CPURegList saves = CPURegList(CPURegister::kRegister, kXRegSizeInBits,
1507 descriptor->CalleeSavedRegisters());
1508 if (saves.Count() != 0) {
1509 __ PopCPURegList(saves);
1510 }
1511
1512 // Restore fp registers.
1513 CPURegList saves_fp = CPURegList(CPURegister::kFPRegister, kDRegSizeInBits,
1514 descriptor->CalleeSavedFPRegisters());
1515 if (saves_fp.Count() != 0) {
1516 __ PopCPURegList(saves_fp);
1517 }
1518
1519 int pop_count = static_cast<int>(descriptor->StackParameterCount());
1520 if (descriptor->IsCFunctionCall()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001521 __ Mov(csp, fp);
1522 __ Pop(fp, lr);
Ben Murdoch014dc512016-03-22 12:00:34 +00001523 } else if (frame()->needs_frame()) {
1524 // Canonicalize JSFunction return sites for now.
1525 if (return_label_.is_bound()) {
1526 __ B(&return_label_);
1527 return;
1528 } else {
1529 __ Bind(&return_label_);
1530 if (descriptor->UseNativeStack()) {
1531 __ Mov(csp, fp);
Ben Murdoch109988c2016-05-18 11:27:45 +01001532 pop_count += (pop_count & 1); // align
Ben Murdoch014dc512016-03-22 12:00:34 +00001533 } else {
1534 __ Mov(jssp, fp);
1535 }
1536 __ Pop(fp, lr);
1537 }
1538 } else if (descriptor->UseNativeStack()) {
Ben Murdoch109988c2016-05-18 11:27:45 +01001539 pop_count += (pop_count & 1); // align
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001540 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001541 __ Drop(pop_count);
1542 __ Ret();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001543}
1544
1545
1546void CodeGenerator::AssembleMove(InstructionOperand* source,
1547 InstructionOperand* destination) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001548 Arm64OperandConverter g(this, nullptr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001549 // Dispatch on the source and destination operand kinds. Not all
1550 // combinations are possible.
1551 if (source->IsRegister()) {
1552 DCHECK(destination->IsRegister() || destination->IsStackSlot());
1553 Register src = g.ToRegister(source);
1554 if (destination->IsRegister()) {
1555 __ Mov(g.ToRegister(destination), src);
1556 } else {
1557 __ Str(src, g.ToMemOperand(destination, masm()));
1558 }
1559 } else if (source->IsStackSlot()) {
1560 MemOperand src = g.ToMemOperand(source, masm());
1561 DCHECK(destination->IsRegister() || destination->IsStackSlot());
1562 if (destination->IsRegister()) {
1563 __ Ldr(g.ToRegister(destination), src);
1564 } else {
1565 UseScratchRegisterScope scope(masm());
1566 Register temp = scope.AcquireX();
1567 __ Ldr(temp, src);
1568 __ Str(temp, g.ToMemOperand(destination, masm()));
1569 }
1570 } else if (source->IsConstant()) {
Emily Bernier958fae72015-03-24 16:35:39 -04001571 Constant src = g.ToConstant(ConstantOperand::cast(source));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001572 if (destination->IsRegister() || destination->IsStackSlot()) {
1573 UseScratchRegisterScope scope(masm());
1574 Register dst = destination->IsRegister() ? g.ToRegister(destination)
1575 : scope.AcquireX();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001576 if (src.type() == Constant::kHeapObject) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001577 Handle<HeapObject> src_object = src.ToHeapObject();
1578 Heap::RootListIndex index;
1579 int offset;
1580 if (IsMaterializableFromFrame(src_object, &offset)) {
1581 __ Ldr(dst, MemOperand(fp, offset));
1582 } else if (IsMaterializableFromRoot(src_object, &index)) {
1583 __ LoadRoot(dst, index);
1584 } else {
1585 __ LoadObject(dst, src_object);
1586 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001587 } else {
1588 __ Mov(dst, g.ToImmediate(source));
1589 }
1590 if (destination->IsStackSlot()) {
1591 __ Str(dst, g.ToMemOperand(destination, masm()));
1592 }
Emily Bernier958fae72015-03-24 16:35:39 -04001593 } else if (src.type() == Constant::kFloat32) {
1594 if (destination->IsDoubleRegister()) {
1595 FPRegister dst = g.ToDoubleRegister(destination).S();
1596 __ Fmov(dst, src.ToFloat32());
1597 } else {
1598 DCHECK(destination->IsDoubleStackSlot());
1599 UseScratchRegisterScope scope(masm());
1600 FPRegister temp = scope.AcquireS();
1601 __ Fmov(temp, src.ToFloat32());
1602 __ Str(temp, g.ToMemOperand(destination, masm()));
1603 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001604 } else {
Emily Bernier958fae72015-03-24 16:35:39 -04001605 DCHECK_EQ(Constant::kFloat64, src.type());
1606 if (destination->IsDoubleRegister()) {
1607 FPRegister dst = g.ToDoubleRegister(destination);
1608 __ Fmov(dst, src.ToFloat64());
1609 } else {
1610 DCHECK(destination->IsDoubleStackSlot());
1611 UseScratchRegisterScope scope(masm());
1612 FPRegister temp = scope.AcquireD();
1613 __ Fmov(temp, src.ToFloat64());
1614 __ Str(temp, g.ToMemOperand(destination, masm()));
1615 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001616 }
1617 } else if (source->IsDoubleRegister()) {
1618 FPRegister src = g.ToDoubleRegister(source);
1619 if (destination->IsDoubleRegister()) {
1620 FPRegister dst = g.ToDoubleRegister(destination);
1621 __ Fmov(dst, src);
1622 } else {
1623 DCHECK(destination->IsDoubleStackSlot());
1624 __ Str(src, g.ToMemOperand(destination, masm()));
1625 }
1626 } else if (source->IsDoubleStackSlot()) {
1627 DCHECK(destination->IsDoubleRegister() || destination->IsDoubleStackSlot());
1628 MemOperand src = g.ToMemOperand(source, masm());
1629 if (destination->IsDoubleRegister()) {
1630 __ Ldr(g.ToDoubleRegister(destination), src);
1631 } else {
1632 UseScratchRegisterScope scope(masm());
1633 FPRegister temp = scope.AcquireD();
1634 __ Ldr(temp, src);
1635 __ Str(temp, g.ToMemOperand(destination, masm()));
1636 }
1637 } else {
1638 UNREACHABLE();
1639 }
1640}
1641
1642
1643void CodeGenerator::AssembleSwap(InstructionOperand* source,
1644 InstructionOperand* destination) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001645 Arm64OperandConverter g(this, nullptr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001646 // Dispatch on the source and destination operand kinds. Not all
1647 // combinations are possible.
1648 if (source->IsRegister()) {
1649 // Register-register.
1650 UseScratchRegisterScope scope(masm());
1651 Register temp = scope.AcquireX();
1652 Register src = g.ToRegister(source);
1653 if (destination->IsRegister()) {
1654 Register dst = g.ToRegister(destination);
1655 __ Mov(temp, src);
1656 __ Mov(src, dst);
1657 __ Mov(dst, temp);
1658 } else {
1659 DCHECK(destination->IsStackSlot());
1660 MemOperand dst = g.ToMemOperand(destination, masm());
1661 __ Mov(temp, src);
1662 __ Ldr(src, dst);
1663 __ Str(temp, dst);
1664 }
1665 } else if (source->IsStackSlot() || source->IsDoubleStackSlot()) {
1666 UseScratchRegisterScope scope(masm());
Emily Bernier958fae72015-03-24 16:35:39 -04001667 DoubleRegister temp_0 = scope.AcquireD();
1668 DoubleRegister temp_1 = scope.AcquireD();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001669 MemOperand src = g.ToMemOperand(source, masm());
1670 MemOperand dst = g.ToMemOperand(destination, masm());
1671 __ Ldr(temp_0, src);
1672 __ Ldr(temp_1, dst);
1673 __ Str(temp_0, dst);
1674 __ Str(temp_1, src);
1675 } else if (source->IsDoubleRegister()) {
1676 UseScratchRegisterScope scope(masm());
1677 FPRegister temp = scope.AcquireD();
1678 FPRegister src = g.ToDoubleRegister(source);
1679 if (destination->IsDoubleRegister()) {
1680 FPRegister dst = g.ToDoubleRegister(destination);
1681 __ Fmov(temp, src);
1682 __ Fmov(src, dst);
1683 __ Fmov(dst, temp);
1684 } else {
1685 DCHECK(destination->IsDoubleStackSlot());
1686 MemOperand dst = g.ToMemOperand(destination, masm());
1687 __ Fmov(temp, src);
1688 __ Ldr(src, dst);
1689 __ Str(temp, dst);
1690 }
1691 } else {
1692 // No other combinations are possible.
1693 UNREACHABLE();
1694 }
1695}
1696
1697
Ben Murdoch014dc512016-03-22 12:00:34 +00001698void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) {
1699 // On 64-bit ARM we emit the jump tables inline.
1700 UNREACHABLE();
1701}
1702
1703
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001704void CodeGenerator::AddNopForSmiCodeInlining() { __ movz(xzr, 0); }
1705
1706
1707void CodeGenerator::EnsureSpaceForLazyDeopt() {
Ben Murdoch014dc512016-03-22 12:00:34 +00001708 if (!info()->ShouldEnsureSpaceForLazyDeopt()) {
1709 return;
1710 }
1711
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001712 int space_needed = Deoptimizer::patch_size();
Ben Murdoch014dc512016-03-22 12:00:34 +00001713 // Ensure that we have enough space after the previous lazy-bailout
1714 // instruction for patching the code here.
1715 intptr_t current_pc = masm()->pc_offset();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001716
Ben Murdoch014dc512016-03-22 12:00:34 +00001717 if (current_pc < (last_lazy_deopt_pc_ + space_needed)) {
1718 intptr_t padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
1719 DCHECK((padding_size % kInstructionSize) == 0);
1720 InstructionAccurateScope instruction_accurate(
1721 masm(), padding_size / kInstructionSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001722
Ben Murdoch014dc512016-03-22 12:00:34 +00001723 while (padding_size > 0) {
1724 __ nop();
1725 padding_size -= kInstructionSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001726 }
1727 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001728}
1729
1730#undef __
1731
1732} // namespace compiler
1733} // namespace internal
1734} // namespace v8