blob: 35f7e43fdc9acb9ce7475b3f85f67555a567ac69 [file] [log] [blame]
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001// Copyright 2014 the V8 project authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4
5#include "src/compiler/code-generator.h"
6
Ben Murdoch014dc512016-03-22 12:00:34 +00007#include "src/arm64/frames-arm64.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +00008#include "src/arm64/macro-assembler-arm64.h"
Ben Murdoch014dc512016-03-22 12:00:34 +00009#include "src/ast/scopes.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000010#include "src/compiler/code-generator-impl.h"
11#include "src/compiler/gap-resolver.h"
12#include "src/compiler/node-matchers.h"
Ben Murdoch014dc512016-03-22 12:00:34 +000013#include "src/compiler/osr.h"
Ben Murdochb8a8cc12014-11-26 15:28:44 +000014
15namespace v8 {
16namespace internal {
17namespace compiler {
18
19#define __ masm()->
20
21
22// Adds Arm64-specific methods to convert InstructionOperands.
Ben Murdoch014dc512016-03-22 12:00:34 +000023class Arm64OperandConverter final : public InstructionOperandConverter {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000024 public:
25 Arm64OperandConverter(CodeGenerator* gen, Instruction* instr)
26 : InstructionOperandConverter(gen, instr) {}
27
Ben Murdoch014dc512016-03-22 12:00:34 +000028 DoubleRegister InputFloat32Register(size_t index) {
Emily Bernier958fae72015-03-24 16:35:39 -040029 return InputDoubleRegister(index).S();
30 }
31
Ben Murdoch014dc512016-03-22 12:00:34 +000032 DoubleRegister InputFloat64Register(size_t index) {
Emily Bernier958fae72015-03-24 16:35:39 -040033 return InputDoubleRegister(index);
34 }
35
Ben Murdochbcf72ee2016-08-08 18:44:38 +010036 CPURegister InputFloat32OrZeroRegister(size_t index) {
37 if (instr_->InputAt(index)->IsImmediate()) {
38 DCHECK(bit_cast<int32_t>(InputFloat32(index)) == 0);
39 return wzr;
40 }
41 DCHECK(instr_->InputAt(index)->IsFPRegister());
42 return InputDoubleRegister(index).S();
43 }
44
45 CPURegister InputFloat64OrZeroRegister(size_t index) {
46 if (instr_->InputAt(index)->IsImmediate()) {
47 DCHECK(bit_cast<int64_t>(InputDouble(index)) == 0);
48 return xzr;
49 }
50 DCHECK(instr_->InputAt(index)->IsDoubleRegister());
51 return InputDoubleRegister(index);
52 }
53
Ben Murdoch014dc512016-03-22 12:00:34 +000054 size_t OutputCount() { return instr_->OutputCount(); }
55
Emily Bernier958fae72015-03-24 16:35:39 -040056 DoubleRegister OutputFloat32Register() { return OutputDoubleRegister().S(); }
57
58 DoubleRegister OutputFloat64Register() { return OutputDoubleRegister(); }
59
Ben Murdoch014dc512016-03-22 12:00:34 +000060 Register InputRegister32(size_t index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000061 return ToRegister(instr_->InputAt(index)).W();
62 }
63
Ben Murdoch014dc512016-03-22 12:00:34 +000064 Register InputOrZeroRegister32(size_t index) {
65 DCHECK(instr_->InputAt(index)->IsRegister() ||
66 (instr_->InputAt(index)->IsImmediate() && (InputInt32(index) == 0)));
67 if (instr_->InputAt(index)->IsImmediate()) {
68 return wzr;
69 }
70 return InputRegister32(index);
71 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000072
Ben Murdoch014dc512016-03-22 12:00:34 +000073 Register InputRegister64(size_t index) { return InputRegister(index); }
74
75 Register InputOrZeroRegister64(size_t index) {
76 DCHECK(instr_->InputAt(index)->IsRegister() ||
77 (instr_->InputAt(index)->IsImmediate() && (InputInt64(index) == 0)));
78 if (instr_->InputAt(index)->IsImmediate()) {
79 return xzr;
80 }
81 return InputRegister64(index);
82 }
83
84 Operand InputImmediate(size_t index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000085 return ToImmediate(instr_->InputAt(index));
86 }
87
Ben Murdoch014dc512016-03-22 12:00:34 +000088 Operand InputOperand(size_t index) {
89 return ToOperand(instr_->InputAt(index));
90 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000091
Ben Murdoch014dc512016-03-22 12:00:34 +000092 Operand InputOperand64(size_t index) { return InputOperand(index); }
Ben Murdochb8a8cc12014-11-26 15:28:44 +000093
Ben Murdoch014dc512016-03-22 12:00:34 +000094 Operand InputOperand32(size_t index) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +000095 return ToOperand32(instr_->InputAt(index));
96 }
97
98 Register OutputRegister64() { return OutputRegister(); }
99
100 Register OutputRegister32() { return ToRegister(instr_->Output()).W(); }
101
Ben Murdoch014dc512016-03-22 12:00:34 +0000102 Operand InputOperand2_32(size_t index) {
Emily Bernier958fae72015-03-24 16:35:39 -0400103 switch (AddressingModeField::decode(instr_->opcode())) {
104 case kMode_None:
105 return InputOperand32(index);
106 case kMode_Operand2_R_LSL_I:
107 return Operand(InputRegister32(index), LSL, InputInt5(index + 1));
108 case kMode_Operand2_R_LSR_I:
109 return Operand(InputRegister32(index), LSR, InputInt5(index + 1));
110 case kMode_Operand2_R_ASR_I:
111 return Operand(InputRegister32(index), ASR, InputInt5(index + 1));
112 case kMode_Operand2_R_ROR_I:
113 return Operand(InputRegister32(index), ROR, InputInt5(index + 1));
Ben Murdoch014dc512016-03-22 12:00:34 +0000114 case kMode_Operand2_R_UXTB:
115 return Operand(InputRegister32(index), UXTB);
116 case kMode_Operand2_R_UXTH:
117 return Operand(InputRegister32(index), UXTH);
118 case kMode_Operand2_R_SXTB:
119 return Operand(InputRegister32(index), SXTB);
120 case kMode_Operand2_R_SXTH:
121 return Operand(InputRegister32(index), SXTH);
Ben Murdochf91f0612016-11-29 16:50:11 +0000122 case kMode_Operand2_R_SXTW:
123 return Operand(InputRegister32(index), SXTW);
Emily Bernier958fae72015-03-24 16:35:39 -0400124 case kMode_MRI:
125 case kMode_MRR:
126 break;
127 }
128 UNREACHABLE();
129 return Operand(-1);
130 }
131
Ben Murdoch014dc512016-03-22 12:00:34 +0000132 Operand InputOperand2_64(size_t index) {
Emily Bernier958fae72015-03-24 16:35:39 -0400133 switch (AddressingModeField::decode(instr_->opcode())) {
134 case kMode_None:
135 return InputOperand64(index);
136 case kMode_Operand2_R_LSL_I:
137 return Operand(InputRegister64(index), LSL, InputInt6(index + 1));
138 case kMode_Operand2_R_LSR_I:
139 return Operand(InputRegister64(index), LSR, InputInt6(index + 1));
140 case kMode_Operand2_R_ASR_I:
141 return Operand(InputRegister64(index), ASR, InputInt6(index + 1));
142 case kMode_Operand2_R_ROR_I:
143 return Operand(InputRegister64(index), ROR, InputInt6(index + 1));
Ben Murdoch014dc512016-03-22 12:00:34 +0000144 case kMode_Operand2_R_UXTB:
145 return Operand(InputRegister64(index), UXTB);
146 case kMode_Operand2_R_UXTH:
147 return Operand(InputRegister64(index), UXTH);
148 case kMode_Operand2_R_SXTB:
149 return Operand(InputRegister64(index), SXTB);
150 case kMode_Operand2_R_SXTH:
151 return Operand(InputRegister64(index), SXTH);
Ben Murdochf91f0612016-11-29 16:50:11 +0000152 case kMode_Operand2_R_SXTW:
153 return Operand(InputRegister64(index), SXTW);
Emily Bernier958fae72015-03-24 16:35:39 -0400154 case kMode_MRI:
155 case kMode_MRR:
156 break;
157 }
158 UNREACHABLE();
159 return Operand(-1);
160 }
161
Ben Murdoch014dc512016-03-22 12:00:34 +0000162 MemOperand MemoryOperand(size_t* first_index) {
163 const size_t index = *first_index;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000164 switch (AddressingModeField::decode(instr_->opcode())) {
165 case kMode_None:
Emily Bernier958fae72015-03-24 16:35:39 -0400166 case kMode_Operand2_R_LSR_I:
167 case kMode_Operand2_R_ASR_I:
168 case kMode_Operand2_R_ROR_I:
Ben Murdoch014dc512016-03-22 12:00:34 +0000169 case kMode_Operand2_R_UXTB:
170 case kMode_Operand2_R_UXTH:
171 case kMode_Operand2_R_SXTB:
172 case kMode_Operand2_R_SXTH:
Ben Murdochf91f0612016-11-29 16:50:11 +0000173 case kMode_Operand2_R_SXTW:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000174 break;
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100175 case kMode_Operand2_R_LSL_I:
176 *first_index += 3;
177 return MemOperand(InputRegister(index + 0), InputRegister(index + 1),
178 LSL, InputInt32(index + 2));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000179 case kMode_MRI:
180 *first_index += 2;
181 return MemOperand(InputRegister(index + 0), InputInt32(index + 1));
182 case kMode_MRR:
183 *first_index += 2;
Emily Bernier958fae72015-03-24 16:35:39 -0400184 return MemOperand(InputRegister(index + 0), InputRegister(index + 1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000185 }
186 UNREACHABLE();
187 return MemOperand(no_reg);
188 }
189
Ben Murdoch014dc512016-03-22 12:00:34 +0000190 MemOperand MemoryOperand(size_t first_index = 0) {
Emily Bernier958fae72015-03-24 16:35:39 -0400191 return MemoryOperand(&first_index);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000192 }
193
194 Operand ToOperand(InstructionOperand* op) {
195 if (op->IsRegister()) {
196 return Operand(ToRegister(op));
197 }
198 return ToImmediate(op);
199 }
200
201 Operand ToOperand32(InstructionOperand* op) {
202 if (op->IsRegister()) {
203 return Operand(ToRegister(op).W());
204 }
205 return ToImmediate(op);
206 }
207
208 Operand ToImmediate(InstructionOperand* operand) {
209 Constant constant = ToConstant(operand);
210 switch (constant.type()) {
211 case Constant::kInt32:
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100212 if (constant.rmode() == RelocInfo::WASM_MEMORY_SIZE_REFERENCE) {
213 return Operand(constant.ToInt32(), constant.rmode());
214 } else {
215 return Operand(constant.ToInt32());
216 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000217 case Constant::kInt64:
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100218 if (constant.rmode() == RelocInfo::WASM_MEMORY_REFERENCE ||
219 constant.rmode() == RelocInfo::WASM_GLOBAL_REFERENCE) {
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100220 return Operand(constant.ToInt64(), constant.rmode());
221 } else {
222 DCHECK(constant.rmode() != RelocInfo::WASM_MEMORY_SIZE_REFERENCE);
223 return Operand(constant.ToInt64());
224 }
Emily Bernier958fae72015-03-24 16:35:39 -0400225 case Constant::kFloat32:
226 return Operand(
227 isolate()->factory()->NewNumber(constant.ToFloat32(), TENURED));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000228 case Constant::kFloat64:
229 return Operand(
230 isolate()->factory()->NewNumber(constant.ToFloat64(), TENURED));
231 case Constant::kExternalReference:
232 return Operand(constant.ToExternalReference());
233 case Constant::kHeapObject:
234 return Operand(constant.ToHeapObject());
Emily Bernier958fae72015-03-24 16:35:39 -0400235 case Constant::kRpoNumber:
236 UNREACHABLE(); // TODO(dcarney): RPO immediates on arm64.
237 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000238 }
239 UNREACHABLE();
240 return Operand(-1);
241 }
242
243 MemOperand ToMemOperand(InstructionOperand* op, MacroAssembler* masm) const {
Ben Murdoch014dc512016-03-22 12:00:34 +0000244 DCHECK_NOT_NULL(op);
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100245 DCHECK(op->IsStackSlot() || op->IsFPStackSlot());
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100246 return SlotToMemOperand(AllocatedOperand::cast(op)->index(), masm);
247 }
248
249 MemOperand SlotToMemOperand(int slot, MacroAssembler* masm) const {
250 FrameOffset offset = frame_access_state()->GetFrameOffset(slot);
Ben Murdoch014dc512016-03-22 12:00:34 +0000251 if (offset.from_frame_pointer()) {
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100252 int from_sp = offset.offset() + frame_access_state()->GetSPToFPOffset();
Ben Murdoch014dc512016-03-22 12:00:34 +0000253 // Convert FP-offsets to SP-offsets if it results in better code.
254 if (Assembler::IsImmLSUnscaled(from_sp) ||
255 Assembler::IsImmLSScaled(from_sp, LSDoubleWord)) {
256 offset = FrameOffset::FromStackPointer(from_sp);
257 }
258 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000259 return MemOperand(offset.from_stack_pointer() ? masm->StackPointer() : fp,
260 offset.offset());
261 }
262};
263
264
Emily Bernier958fae72015-03-24 16:35:39 -0400265namespace {
266
Ben Murdoch014dc512016-03-22 12:00:34 +0000267class OutOfLineLoadNaN32 final : public OutOfLineCode {
Emily Bernier958fae72015-03-24 16:35:39 -0400268 public:
269 OutOfLineLoadNaN32(CodeGenerator* gen, DoubleRegister result)
270 : OutOfLineCode(gen), result_(result) {}
271
Ben Murdoch014dc512016-03-22 12:00:34 +0000272 void Generate() final {
Emily Bernier958fae72015-03-24 16:35:39 -0400273 __ Fmov(result_, std::numeric_limits<float>::quiet_NaN());
274 }
275
276 private:
277 DoubleRegister const result_;
278};
279
280
Ben Murdoch014dc512016-03-22 12:00:34 +0000281class OutOfLineLoadNaN64 final : public OutOfLineCode {
Emily Bernier958fae72015-03-24 16:35:39 -0400282 public:
283 OutOfLineLoadNaN64(CodeGenerator* gen, DoubleRegister result)
284 : OutOfLineCode(gen), result_(result) {}
285
Ben Murdoch014dc512016-03-22 12:00:34 +0000286 void Generate() final {
Emily Bernier958fae72015-03-24 16:35:39 -0400287 __ Fmov(result_, std::numeric_limits<double>::quiet_NaN());
288 }
289
290 private:
291 DoubleRegister const result_;
292};
293
294
Ben Murdoch014dc512016-03-22 12:00:34 +0000295class OutOfLineLoadZero final : public OutOfLineCode {
Emily Bernier958fae72015-03-24 16:35:39 -0400296 public:
297 OutOfLineLoadZero(CodeGenerator* gen, Register result)
298 : OutOfLineCode(gen), result_(result) {}
299
Ben Murdoch014dc512016-03-22 12:00:34 +0000300 void Generate() final { __ Mov(result_, 0); }
Emily Bernier958fae72015-03-24 16:35:39 -0400301
302 private:
303 Register const result_;
304};
305
Ben Murdoch014dc512016-03-22 12:00:34 +0000306
307class OutOfLineRecordWrite final : public OutOfLineCode {
308 public:
Ben Murdoch109988c2016-05-18 11:27:45 +0100309 OutOfLineRecordWrite(CodeGenerator* gen, Register object, Operand index,
Ben Murdoch014dc512016-03-22 12:00:34 +0000310 Register value, Register scratch0, Register scratch1,
Ben Murdochf91f0612016-11-29 16:50:11 +0000311 RecordWriteMode mode,
312 UnwindingInfoWriter* unwinding_info_writer)
Ben Murdoch014dc512016-03-22 12:00:34 +0000313 : OutOfLineCode(gen),
314 object_(object),
315 index_(index),
316 value_(value),
317 scratch0_(scratch0),
318 scratch1_(scratch1),
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100319 mode_(mode),
Ben Murdochf91f0612016-11-29 16:50:11 +0000320 must_save_lr_(!gen->frame_access_state()->has_frame()),
321 unwinding_info_writer_(unwinding_info_writer) {}
Ben Murdoch014dc512016-03-22 12:00:34 +0000322
323 void Generate() final {
324 if (mode_ > RecordWriteMode::kValueIsPointer) {
325 __ JumpIfSmi(value_, exit());
326 }
Ben Murdoch109988c2016-05-18 11:27:45 +0100327 __ CheckPageFlagClear(value_, scratch0_,
328 MemoryChunk::kPointersToHereAreInterestingMask,
329 exit());
330 RememberedSetAction const remembered_set_action =
331 mode_ > RecordWriteMode::kValueIsMap ? EMIT_REMEMBERED_SET
332 : OMIT_REMEMBERED_SET;
Ben Murdoch014dc512016-03-22 12:00:34 +0000333 SaveFPRegsMode const save_fp_mode =
334 frame()->DidAllocateDoubleRegisters() ? kSaveFPRegs : kDontSaveFPRegs;
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100335 if (must_save_lr_) {
Ben Murdoch109988c2016-05-18 11:27:45 +0100336 // We need to save and restore lr if the frame was elided.
337 __ Push(lr);
Ben Murdochf91f0612016-11-29 16:50:11 +0000338 unwinding_info_writer_->MarkLinkRegisterOnTopOfStack(__ pc_offset(),
339 __ StackPointer());
Ben Murdoch109988c2016-05-18 11:27:45 +0100340 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000341 RecordWriteStub stub(isolate(), object_, scratch0_, scratch1_,
Ben Murdoch109988c2016-05-18 11:27:45 +0100342 remembered_set_action, save_fp_mode);
Ben Murdoch014dc512016-03-22 12:00:34 +0000343 __ Add(scratch1_, object_, index_);
344 __ CallStub(&stub);
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100345 if (must_save_lr_) {
Ben Murdoch109988c2016-05-18 11:27:45 +0100346 __ Pop(lr);
Ben Murdochf91f0612016-11-29 16:50:11 +0000347 unwinding_info_writer_->MarkPopLinkRegisterFromTopOfStack(__ pc_offset());
Ben Murdoch109988c2016-05-18 11:27:45 +0100348 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000349 }
350
351 private:
352 Register const object_;
Ben Murdoch109988c2016-05-18 11:27:45 +0100353 Operand const index_;
Ben Murdoch014dc512016-03-22 12:00:34 +0000354 Register const value_;
355 Register const scratch0_;
356 Register const scratch1_;
357 RecordWriteMode const mode_;
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100358 bool must_save_lr_;
Ben Murdochf91f0612016-11-29 16:50:11 +0000359 UnwindingInfoWriter* const unwinding_info_writer_;
Ben Murdoch014dc512016-03-22 12:00:34 +0000360};
361
362
363Condition FlagsConditionToCondition(FlagsCondition condition) {
364 switch (condition) {
365 case kEqual:
366 return eq;
367 case kNotEqual:
368 return ne;
369 case kSignedLessThan:
370 return lt;
371 case kSignedGreaterThanOrEqual:
372 return ge;
373 case kSignedLessThanOrEqual:
374 return le;
375 case kSignedGreaterThan:
376 return gt;
377 case kUnsignedLessThan:
378 return lo;
379 case kUnsignedGreaterThanOrEqual:
380 return hs;
381 case kUnsignedLessThanOrEqual:
382 return ls;
383 case kUnsignedGreaterThan:
384 return hi;
385 case kFloatLessThanOrUnordered:
386 return lt;
387 case kFloatGreaterThanOrEqual:
388 return ge;
389 case kFloatLessThanOrEqual:
390 return ls;
391 case kFloatGreaterThanOrUnordered:
392 return hi;
393 case kFloatLessThan:
394 return lo;
395 case kFloatGreaterThanOrEqualOrUnordered:
396 return hs;
397 case kFloatLessThanOrEqualOrUnordered:
398 return le;
399 case kFloatGreaterThan:
400 return gt;
401 case kOverflow:
402 return vs;
403 case kNotOverflow:
404 return vc;
405 case kUnorderedEqual:
406 case kUnorderedNotEqual:
407 break;
Ben Murdochf91f0612016-11-29 16:50:11 +0000408 case kPositiveOrZero:
409 return pl;
410 case kNegative:
411 return mi;
Ben Murdoch014dc512016-03-22 12:00:34 +0000412 }
413 UNREACHABLE();
414 return nv;
415}
416
Emily Bernier958fae72015-03-24 16:35:39 -0400417} // namespace
418
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100419#define ASSEMBLE_BOUNDS_CHECK(offset, length, out_of_bounds) \
420 do { \
421 if (length.IsImmediate() && \
422 base::bits::IsPowerOfTwo64(length.ImmediateValue())) { \
423 __ Tst(offset, ~(length.ImmediateValue() - 1)); \
424 __ B(ne, out_of_bounds); \
425 } else { \
426 __ Cmp(offset, length); \
427 __ B(hs, out_of_bounds); \
428 } \
429 } while (0)
Emily Bernier958fae72015-03-24 16:35:39 -0400430
431#define ASSEMBLE_CHECKED_LOAD_FLOAT(width) \
432 do { \
433 auto result = i.OutputFloat##width##Register(); \
434 auto buffer = i.InputRegister(0); \
435 auto offset = i.InputRegister32(1); \
436 auto length = i.InputOperand32(2); \
Emily Bernier958fae72015-03-24 16:35:39 -0400437 auto ool = new (zone()) OutOfLineLoadNaN##width(this, result); \
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100438 ASSEMBLE_BOUNDS_CHECK(offset, length, ool->entry()); \
Emily Bernier958fae72015-03-24 16:35:39 -0400439 __ Ldr(result, MemOperand(buffer, offset, UXTW)); \
440 __ Bind(ool->exit()); \
441 } while (0)
442
Emily Bernier958fae72015-03-24 16:35:39 -0400443#define ASSEMBLE_CHECKED_LOAD_INTEGER(asm_instr) \
444 do { \
445 auto result = i.OutputRegister32(); \
446 auto buffer = i.InputRegister(0); \
447 auto offset = i.InputRegister32(1); \
448 auto length = i.InputOperand32(2); \
Emily Bernier958fae72015-03-24 16:35:39 -0400449 auto ool = new (zone()) OutOfLineLoadZero(this, result); \
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100450 ASSEMBLE_BOUNDS_CHECK(offset, length, ool->entry()); \
Emily Bernier958fae72015-03-24 16:35:39 -0400451 __ asm_instr(result, MemOperand(buffer, offset, UXTW)); \
452 __ Bind(ool->exit()); \
453 } while (0)
454
Ben Murdoch014dc512016-03-22 12:00:34 +0000455#define ASSEMBLE_CHECKED_LOAD_INTEGER_64(asm_instr) \
456 do { \
457 auto result = i.OutputRegister(); \
458 auto buffer = i.InputRegister(0); \
459 auto offset = i.InputRegister32(1); \
460 auto length = i.InputOperand32(2); \
Ben Murdoch014dc512016-03-22 12:00:34 +0000461 auto ool = new (zone()) OutOfLineLoadZero(this, result); \
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100462 ASSEMBLE_BOUNDS_CHECK(offset, length, ool->entry()); \
Ben Murdoch014dc512016-03-22 12:00:34 +0000463 __ asm_instr(result, MemOperand(buffer, offset, UXTW)); \
464 __ Bind(ool->exit()); \
465 } while (0)
466
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100467#define ASSEMBLE_CHECKED_STORE_FLOAT(width) \
468 do { \
469 auto buffer = i.InputRegister(0); \
470 auto offset = i.InputRegister32(1); \
471 auto length = i.InputOperand32(2); \
472 auto value = i.InputFloat##width##OrZeroRegister(3); \
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100473 Label done; \
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100474 ASSEMBLE_BOUNDS_CHECK(offset, length, &done); \
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100475 __ Str(value, MemOperand(buffer, offset, UXTW)); \
476 __ Bind(&done); \
Emily Bernier958fae72015-03-24 16:35:39 -0400477 } while (0)
478
Emily Bernier958fae72015-03-24 16:35:39 -0400479#define ASSEMBLE_CHECKED_STORE_INTEGER(asm_instr) \
480 do { \
481 auto buffer = i.InputRegister(0); \
482 auto offset = i.InputRegister32(1); \
483 auto length = i.InputOperand32(2); \
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100484 auto value = i.InputOrZeroRegister32(3); \
Emily Bernier958fae72015-03-24 16:35:39 -0400485 Label done; \
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100486 ASSEMBLE_BOUNDS_CHECK(offset, length, &done); \
Emily Bernier958fae72015-03-24 16:35:39 -0400487 __ asm_instr(value, MemOperand(buffer, offset, UXTW)); \
488 __ Bind(&done); \
489 } while (0)
490
Ben Murdoch014dc512016-03-22 12:00:34 +0000491#define ASSEMBLE_CHECKED_STORE_INTEGER_64(asm_instr) \
492 do { \
493 auto buffer = i.InputRegister(0); \
494 auto offset = i.InputRegister32(1); \
495 auto length = i.InputOperand32(2); \
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100496 auto value = i.InputOrZeroRegister64(3); \
Ben Murdoch014dc512016-03-22 12:00:34 +0000497 Label done; \
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100498 ASSEMBLE_BOUNDS_CHECK(offset, length, &done); \
Ben Murdoch014dc512016-03-22 12:00:34 +0000499 __ asm_instr(value, MemOperand(buffer, offset, UXTW)); \
500 __ Bind(&done); \
Emily Bernier958fae72015-03-24 16:35:39 -0400501 } while (0)
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000502
Ben Murdoch014dc512016-03-22 12:00:34 +0000503#define ASSEMBLE_SHIFT(asm_instr, width) \
504 do { \
505 if (instr->InputAt(1)->IsRegister()) { \
506 __ asm_instr(i.OutputRegister##width(), i.InputRegister##width(0), \
507 i.InputRegister##width(1)); \
508 } else { \
509 uint32_t imm = \
510 static_cast<uint32_t>(i.InputOperand##width(1).ImmediateValue()); \
511 __ asm_instr(i.OutputRegister##width(), i.InputRegister##width(0), \
512 imm % (width)); \
513 } \
514 } while (0)
515
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100516#define ASSEMBLE_ATOMIC_LOAD_INTEGER(asm_instr) \
517 do { \
518 __ asm_instr(i.OutputRegister(), \
519 MemOperand(i.InputRegister(0), i.InputRegister(1))); \
520 __ Dmb(InnerShareable, BarrierAll); \
521 } while (0)
522
523#define ASSEMBLE_ATOMIC_STORE_INTEGER(asm_instr) \
524 do { \
525 __ Dmb(InnerShareable, BarrierAll); \
526 __ asm_instr(i.InputRegister(2), \
527 MemOperand(i.InputRegister(0), i.InputRegister(1))); \
528 __ Dmb(InnerShareable, BarrierAll); \
529 } while (0)
530
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100531#define ASSEMBLE_IEEE754_BINOP(name) \
532 do { \
533 FrameScope scope(masm(), StackFrame::MANUAL); \
534 __ CallCFunction(ExternalReference::ieee754_##name##_function(isolate()), \
535 0, 2); \
536 } while (0)
537
538#define ASSEMBLE_IEEE754_UNOP(name) \
539 do { \
540 FrameScope scope(masm(), StackFrame::MANUAL); \
541 __ CallCFunction(ExternalReference::ieee754_##name##_function(isolate()), \
542 0, 1); \
543 } while (0)
544
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100545void CodeGenerator::AssembleDeconstructFrame() {
546 const CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
547 if (descriptor->IsCFunctionCall() || descriptor->UseNativeStack()) {
548 __ Mov(csp, fp);
549 } else {
550 __ Mov(jssp, fp);
551 }
552 __ Pop(fp, lr);
Ben Murdochf91f0612016-11-29 16:50:11 +0000553
554 unwinding_info_writer_.MarkFrameDeconstructed(__ pc_offset());
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100555}
Ben Murdoch014dc512016-03-22 12:00:34 +0000556
Ben Murdochf91f0612016-11-29 16:50:11 +0000557void CodeGenerator::AssemblePrepareTailCall() {
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100558 if (frame_access_state()->has_frame()) {
Ben Murdoch014dc512016-03-22 12:00:34 +0000559 __ Ldr(lr, MemOperand(fp, StandardFrameConstants::kCallerPCOffset));
560 __ Ldr(fp, MemOperand(fp, StandardFrameConstants::kCallerFPOffset));
561 }
562 frame_access_state()->SetFrameAccessToSP();
563}
564
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100565void CodeGenerator::AssemblePopArgumentsAdaptorFrame(Register args_reg,
566 Register scratch1,
567 Register scratch2,
568 Register scratch3) {
569 DCHECK(!AreAliased(args_reg, scratch1, scratch2, scratch3));
570 Label done;
571
572 // Check if current frame is an arguments adaptor frame.
573 __ Ldr(scratch1, MemOperand(fp, StandardFrameConstants::kContextOffset));
574 __ Cmp(scratch1, Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
575 __ B(ne, &done);
576
577 // Load arguments count from current arguments adaptor frame (note, it
578 // does not include receiver).
579 Register caller_args_count_reg = scratch1;
580 __ Ldr(caller_args_count_reg,
581 MemOperand(fp, ArgumentsAdaptorFrameConstants::kLengthOffset));
582 __ SmiUntag(caller_args_count_reg);
583
584 ParameterCount callee_args_count(args_reg);
585 __ PrepareForTailCall(callee_args_count, caller_args_count_reg, scratch2,
586 scratch3);
587 __ bind(&done);
588}
Ben Murdoch014dc512016-03-22 12:00:34 +0000589
Ben Murdochf91f0612016-11-29 16:50:11 +0000590namespace {
591
592void AdjustStackPointerForTailCall(MacroAssembler* masm,
593 FrameAccessState* state,
594 int new_slot_above_sp,
595 bool allow_shrinkage = true) {
596 int current_sp_offset = state->GetSPToFPSlotCount() +
597 StandardFrameConstants::kFixedSlotCountAboveFp;
598 int stack_slot_delta = new_slot_above_sp - current_sp_offset;
599 if (stack_slot_delta > 0) {
600 masm->Claim(stack_slot_delta);
601 state->IncreaseSPDelta(stack_slot_delta);
602 } else if (allow_shrinkage && stack_slot_delta < 0) {
603 masm->Drop(-stack_slot_delta);
604 state->IncreaseSPDelta(stack_slot_delta);
605 }
606}
607
608} // namespace
609
610void CodeGenerator::AssembleTailCallBeforeGap(Instruction* instr,
611 int first_unused_stack_slot) {
612 AdjustStackPointerForTailCall(masm(), frame_access_state(),
613 first_unused_stack_slot, false);
614}
615
616void CodeGenerator::AssembleTailCallAfterGap(Instruction* instr,
617 int first_unused_stack_slot) {
618 AdjustStackPointerForTailCall(masm(), frame_access_state(),
619 first_unused_stack_slot);
620}
621
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000622// Assembles an instruction after register allocation, producing machine code.
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100623CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
624 Instruction* instr) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000625 Arm64OperandConverter i(this, instr);
626 InstructionCode opcode = instr->opcode();
Ben Murdoch109988c2016-05-18 11:27:45 +0100627 ArchOpcode arch_opcode = ArchOpcodeField::decode(opcode);
628 switch (arch_opcode) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000629 case kArchCallCodeObject: {
630 EnsureSpaceForLazyDeopt();
631 if (instr->InputAt(0)->IsImmediate()) {
632 __ Call(Handle<Code>::cast(i.InputHeapObject(0)),
633 RelocInfo::CODE_TARGET);
634 } else {
635 Register target = i.InputRegister(0);
636 __ Add(target, target, Code::kHeaderSize - kHeapObjectTag);
637 __ Call(target);
638 }
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100639 RecordCallPosition(instr);
Ben Murdoch109988c2016-05-18 11:27:45 +0100640 // TODO(titzer): this is ugly. JSSP should be a caller-save register
641 // in this case, but it is not possible to express in the register
642 // allocator.
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100643 CallDescriptor::Flags flags(MiscField::decode(opcode));
Ben Murdoch109988c2016-05-18 11:27:45 +0100644 if (flags & CallDescriptor::kRestoreJSSP) {
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100645 __ Ldr(jssp, MemOperand(csp));
646 __ Mov(csp, jssp);
647 }
648 if (flags & CallDescriptor::kRestoreCSP) {
649 __ Mov(csp, jssp);
650 __ AssertCspAligned();
Ben Murdoch109988c2016-05-18 11:27:45 +0100651 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000652 frame_access_state()->ClearSPDelta();
Ben Murdoch014dc512016-03-22 12:00:34 +0000653 break;
654 }
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100655 case kArchTailCallCodeObjectFromJSFunction:
Ben Murdoch014dc512016-03-22 12:00:34 +0000656 case kArchTailCallCodeObject: {
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100657 if (arch_opcode == kArchTailCallCodeObjectFromJSFunction) {
658 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
659 i.TempRegister(0), i.TempRegister(1),
660 i.TempRegister(2));
661 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000662 if (instr->InputAt(0)->IsImmediate()) {
663 __ Jump(Handle<Code>::cast(i.InputHeapObject(0)),
664 RelocInfo::CODE_TARGET);
665 } else {
666 Register target = i.InputRegister(0);
667 __ Add(target, target, Code::kHeaderSize - kHeapObjectTag);
668 __ Jump(target);
669 }
Ben Murdochf91f0612016-11-29 16:50:11 +0000670 unwinding_info_writer_.MarkBlockWillExit();
Ben Murdoch014dc512016-03-22 12:00:34 +0000671 frame_access_state()->ClearSPDelta();
Ben Murdochf91f0612016-11-29 16:50:11 +0000672 frame_access_state()->SetFrameAccessToDefault();
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000673 break;
674 }
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100675 case kArchTailCallAddress: {
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100676 CHECK(!instr->InputAt(0)->IsImmediate());
677 __ Jump(i.InputRegister(0));
Ben Murdochf91f0612016-11-29 16:50:11 +0000678 unwinding_info_writer_.MarkBlockWillExit();
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100679 frame_access_state()->ClearSPDelta();
Ben Murdochf91f0612016-11-29 16:50:11 +0000680 frame_access_state()->SetFrameAccessToDefault();
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100681 break;
682 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000683 case kArchCallJSFunction: {
684 EnsureSpaceForLazyDeopt();
685 Register func = i.InputRegister(0);
686 if (FLAG_debug_code) {
687 // Check the function's context matches the context argument.
688 UseScratchRegisterScope scope(masm());
689 Register temp = scope.AcquireX();
690 __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset));
691 __ cmp(cp, temp);
692 __ Assert(eq, kWrongFunctionContext);
693 }
694 __ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
695 __ Call(x10);
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100696 RecordCallPosition(instr);
Ben Murdoch109988c2016-05-18 11:27:45 +0100697 // TODO(titzer): this is ugly. JSSP should be a caller-save register
698 // in this case, but it is not possible to express in the register
699 // allocator.
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100700 CallDescriptor::Flags flags(MiscField::decode(opcode));
Ben Murdoch109988c2016-05-18 11:27:45 +0100701 if (flags & CallDescriptor::kRestoreJSSP) {
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100702 __ Ldr(jssp, MemOperand(csp));
703 __ Mov(csp, jssp);
704 }
705 if (flags & CallDescriptor::kRestoreCSP) {
706 __ Mov(csp, jssp);
707 __ AssertCspAligned();
Ben Murdoch109988c2016-05-18 11:27:45 +0100708 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000709 frame_access_state()->ClearSPDelta();
Ben Murdoch014dc512016-03-22 12:00:34 +0000710 break;
711 }
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100712 case kArchTailCallJSFunctionFromJSFunction:
Ben Murdoch014dc512016-03-22 12:00:34 +0000713 case kArchTailCallJSFunction: {
714 Register func = i.InputRegister(0);
715 if (FLAG_debug_code) {
716 // Check the function's context matches the context argument.
717 UseScratchRegisterScope scope(masm());
718 Register temp = scope.AcquireX();
719 __ Ldr(temp, FieldMemOperand(func, JSFunction::kContextOffset));
720 __ cmp(cp, temp);
721 __ Assert(eq, kWrongFunctionContext);
722 }
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100723 if (arch_opcode == kArchTailCallJSFunctionFromJSFunction) {
724 AssemblePopArgumentsAdaptorFrame(kJavaScriptCallArgCountRegister,
725 i.TempRegister(0), i.TempRegister(1),
726 i.TempRegister(2));
727 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000728 __ Ldr(x10, FieldMemOperand(func, JSFunction::kCodeEntryOffset));
729 __ Jump(x10);
730 frame_access_state()->ClearSPDelta();
Ben Murdochf91f0612016-11-29 16:50:11 +0000731 frame_access_state()->SetFrameAccessToDefault();
Ben Murdoch014dc512016-03-22 12:00:34 +0000732 break;
733 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000734 case kArchPrepareCallCFunction:
735 // We don't need kArchPrepareCallCFunction on arm64 as the instruction
736 // selector already perform a Claim to reserve space on the stack and
737 // guarantee correct alignment of stack pointer.
738 UNREACHABLE();
739 break;
740 case kArchPrepareTailCall:
Ben Murdochf91f0612016-11-29 16:50:11 +0000741 AssemblePrepareTailCall();
Ben Murdoch014dc512016-03-22 12:00:34 +0000742 break;
743 case kArchCallCFunction: {
744 int const num_parameters = MiscField::decode(instr->opcode());
745 if (instr->InputAt(0)->IsImmediate()) {
746 ExternalReference ref = i.InputExternalReference(0);
747 __ CallCFunction(ref, num_parameters, 0);
748 } else {
749 Register func = i.InputRegister(0);
750 __ CallCFunction(func, num_parameters, 0);
751 }
752 // CallCFunction only supports register arguments so we never need to call
753 // frame()->ClearOutgoingParameterSlots() here.
754 DCHECK(frame_access_state()->sp_delta() == 0);
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000755 break;
756 }
757 case kArchJmp:
Emily Bernier958fae72015-03-24 16:35:39 -0400758 AssembleArchJump(i.InputRpo(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000759 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000760 case kArchTableSwitch:
761 AssembleArchTableSwitch(instr);
762 break;
763 case kArchLookupSwitch:
764 AssembleArchLookupSwitch(instr);
765 break;
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100766 case kArchDebugBreak:
767 __ Debug("kArchDebugBreak", 0, BREAK);
768 break;
Ben Murdochf91f0612016-11-29 16:50:11 +0000769 case kArchImpossible:
770 __ Abort(kConversionFromImpossibleValue);
771 break;
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100772 case kArchComment: {
773 Address comment_string = i.InputExternalReference(0).address();
774 __ RecordComment(reinterpret_cast<const char*>(comment_string));
775 break;
776 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000777 case kArchNop:
Ben Murdoch014dc512016-03-22 12:00:34 +0000778 case kArchThrowTerminator:
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000779 // don't emit code for nops.
780 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000781 case kArchDeoptimize: {
782 int deopt_state_id =
783 BuildTranslation(instr, -1, 0, OutputFrameStateCombine::Ignore());
784 Deoptimizer::BailoutType bailout_type =
785 Deoptimizer::BailoutType(MiscField::decode(instr->opcode()));
Ben Murdochbcf72ee2016-08-08 18:44:38 +0100786 CodeGenResult result =
787 AssembleDeoptimizerCall(deopt_state_id, bailout_type);
788 if (result != kSuccess) return result;
Ben Murdoch014dc512016-03-22 12:00:34 +0000789 break;
790 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000791 case kArchRet:
792 AssembleReturn();
793 break;
Emily Bernier958fae72015-03-24 16:35:39 -0400794 case kArchStackPointer:
795 __ mov(i.OutputRegister(), masm()->StackPointer());
796 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000797 case kArchFramePointer:
798 __ mov(i.OutputRegister(), fp);
799 break;
Ben Murdoch109988c2016-05-18 11:27:45 +0100800 case kArchParentFramePointer:
Ben Murdoch3b9bc312016-06-02 14:46:10 +0100801 if (frame_access_state()->has_frame()) {
Ben Murdoch109988c2016-05-18 11:27:45 +0100802 __ ldr(i.OutputRegister(), MemOperand(fp, 0));
803 } else {
804 __ mov(i.OutputRegister(), fp);
805 }
806 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000807 case kArchTruncateDoubleToI:
808 __ TruncateDoubleToI(i.OutputRegister(), i.InputDoubleRegister(0));
809 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000810 case kArchStoreWithWriteBarrier: {
811 RecordWriteMode mode =
812 static_cast<RecordWriteMode>(MiscField::decode(instr->opcode()));
Ben Murdoch109988c2016-05-18 11:27:45 +0100813 AddressingMode addressing_mode =
814 AddressingModeField::decode(instr->opcode());
Ben Murdoch014dc512016-03-22 12:00:34 +0000815 Register object = i.InputRegister(0);
Ben Murdoch109988c2016-05-18 11:27:45 +0100816 Operand index(0);
817 if (addressing_mode == kMode_MRI) {
818 index = Operand(i.InputInt64(1));
819 } else {
820 DCHECK_EQ(addressing_mode, kMode_MRR);
821 index = Operand(i.InputRegister(1));
822 }
Ben Murdoch014dc512016-03-22 12:00:34 +0000823 Register value = i.InputRegister(2);
824 Register scratch0 = i.TempRegister(0);
825 Register scratch1 = i.TempRegister(1);
Ben Murdochf91f0612016-11-29 16:50:11 +0000826 auto ool = new (zone())
827 OutOfLineRecordWrite(this, object, index, value, scratch0, scratch1,
828 mode, &unwinding_info_writer_);
Ben Murdoch014dc512016-03-22 12:00:34 +0000829 __ Str(value, MemOperand(object, index));
830 __ CheckPageFlagSet(object, scratch0,
831 MemoryChunk::kPointersFromHereAreInterestingMask,
832 ool->entry());
833 __ Bind(ool->exit());
Emily Bernier958fae72015-03-24 16:35:39 -0400834 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000835 }
Ben Murdoch109988c2016-05-18 11:27:45 +0100836 case kArchStackSlot: {
837 FrameOffset offset =
838 frame_access_state()->GetFrameOffset(i.InputInt32(0));
839 Register base;
840 if (offset.from_stack_pointer()) {
841 base = __ StackPointer();
842 } else {
843 base = fp;
844 }
845 __ Add(i.OutputRegister(0), base, Operand(offset.offset()));
846 break;
847 }
Ben Murdochf91f0612016-11-29 16:50:11 +0000848 case kIeee754Float64Acos:
849 ASSEMBLE_IEEE754_UNOP(acos);
850 break;
851 case kIeee754Float64Acosh:
852 ASSEMBLE_IEEE754_UNOP(acosh);
853 break;
854 case kIeee754Float64Asin:
855 ASSEMBLE_IEEE754_UNOP(asin);
856 break;
857 case kIeee754Float64Asinh:
858 ASSEMBLE_IEEE754_UNOP(asinh);
859 break;
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100860 case kIeee754Float64Atan:
861 ASSEMBLE_IEEE754_UNOP(atan);
862 break;
Ben Murdochf91f0612016-11-29 16:50:11 +0000863 case kIeee754Float64Atanh:
864 ASSEMBLE_IEEE754_UNOP(atanh);
865 break;
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100866 case kIeee754Float64Atan2:
867 ASSEMBLE_IEEE754_BINOP(atan2);
868 break;
869 case kIeee754Float64Cos:
870 ASSEMBLE_IEEE754_UNOP(cos);
871 break;
Ben Murdochf91f0612016-11-29 16:50:11 +0000872 case kIeee754Float64Cosh:
873 ASSEMBLE_IEEE754_UNOP(cosh);
874 break;
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100875 case kIeee754Float64Cbrt:
876 ASSEMBLE_IEEE754_UNOP(cbrt);
877 break;
878 case kIeee754Float64Exp:
879 ASSEMBLE_IEEE754_UNOP(exp);
880 break;
881 case kIeee754Float64Expm1:
882 ASSEMBLE_IEEE754_UNOP(expm1);
883 break;
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100884 case kIeee754Float64Log:
885 ASSEMBLE_IEEE754_UNOP(log);
886 break;
887 case kIeee754Float64Log1p:
888 ASSEMBLE_IEEE754_UNOP(log1p);
889 break;
890 case kIeee754Float64Log2:
891 ASSEMBLE_IEEE754_UNOP(log2);
892 break;
893 case kIeee754Float64Log10:
894 ASSEMBLE_IEEE754_UNOP(log10);
895 break;
Ben Murdochf91f0612016-11-29 16:50:11 +0000896 case kIeee754Float64Pow: {
897 MathPowStub stub(isolate(), MathPowStub::DOUBLE);
898 __ CallStub(&stub);
899 break;
900 }
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100901 case kIeee754Float64Sin:
902 ASSEMBLE_IEEE754_UNOP(sin);
903 break;
Ben Murdochf91f0612016-11-29 16:50:11 +0000904 case kIeee754Float64Sinh:
905 ASSEMBLE_IEEE754_UNOP(sinh);
906 break;
Ben Murdoch13e2dad2016-09-16 13:49:30 +0100907 case kIeee754Float64Tan:
908 ASSEMBLE_IEEE754_UNOP(tan);
909 break;
Ben Murdochf91f0612016-11-29 16:50:11 +0000910 case kIeee754Float64Tanh:
911 ASSEMBLE_IEEE754_UNOP(tanh);
912 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000913 case kArm64Float32RoundDown:
914 __ Frintm(i.OutputFloat32Register(), i.InputFloat32Register(0));
915 break;
916 case kArm64Float64RoundDown:
Emily Bernier958fae72015-03-24 16:35:39 -0400917 __ Frintm(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
918 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000919 case kArm64Float32RoundUp:
920 __ Frintp(i.OutputFloat32Register(), i.InputFloat32Register(0));
921 break;
922 case kArm64Float64RoundUp:
923 __ Frintp(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
Emily Bernier958fae72015-03-24 16:35:39 -0400924 break;
925 case kArm64Float64RoundTiesAway:
926 __ Frinta(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
927 break;
Ben Murdoch014dc512016-03-22 12:00:34 +0000928 case kArm64Float32RoundTruncate:
929 __ Frintz(i.OutputFloat32Register(), i.InputFloat32Register(0));
930 break;
931 case kArm64Float64RoundTruncate:
932 __ Frintz(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
933 break;
934 case kArm64Float32RoundTiesEven:
935 __ Frintn(i.OutputFloat32Register(), i.InputFloat32Register(0));
936 break;
937 case kArm64Float64RoundTiesEven:
938 __ Frintn(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
939 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000940 case kArm64Add:
Ben Murdoch014dc512016-03-22 12:00:34 +0000941 if (FlagsModeField::decode(opcode) != kFlags_none) {
942 __ Adds(i.OutputRegister(), i.InputOrZeroRegister64(0),
943 i.InputOperand2_64(1));
944 } else {
945 __ Add(i.OutputRegister(), i.InputOrZeroRegister64(0),
946 i.InputOperand2_64(1));
947 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000948 break;
949 case kArm64Add32:
950 if (FlagsModeField::decode(opcode) != kFlags_none) {
Ben Murdoch014dc512016-03-22 12:00:34 +0000951 __ Adds(i.OutputRegister32(), i.InputOrZeroRegister32(0),
Emily Bernier958fae72015-03-24 16:35:39 -0400952 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000953 } else {
Ben Murdoch014dc512016-03-22 12:00:34 +0000954 __ Add(i.OutputRegister32(), i.InputOrZeroRegister32(0),
Emily Bernier958fae72015-03-24 16:35:39 -0400955 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000956 }
957 break;
958 case kArm64And:
Ben Murdochf91f0612016-11-29 16:50:11 +0000959 if (FlagsModeField::decode(opcode) != kFlags_none) {
960 // The ands instruction only sets N and Z, so only the following
961 // conditions make sense.
962 DCHECK(FlagsConditionField::decode(opcode) == kEqual ||
963 FlagsConditionField::decode(opcode) == kNotEqual ||
964 FlagsConditionField::decode(opcode) == kPositiveOrZero ||
965 FlagsConditionField::decode(opcode) == kNegative);
966 __ Ands(i.OutputRegister(), i.InputOrZeroRegister64(0),
967 i.InputOperand2_64(1));
968 } else {
969 __ And(i.OutputRegister(), i.InputOrZeroRegister64(0),
970 i.InputOperand2_64(1));
971 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000972 break;
973 case kArm64And32:
Ben Murdochf91f0612016-11-29 16:50:11 +0000974 if (FlagsModeField::decode(opcode) != kFlags_none) {
975 // The ands instruction only sets N and Z, so only the following
976 // conditions make sense.
977 DCHECK(FlagsConditionField::decode(opcode) == kEqual ||
978 FlagsConditionField::decode(opcode) == kNotEqual ||
979 FlagsConditionField::decode(opcode) == kPositiveOrZero ||
980 FlagsConditionField::decode(opcode) == kNegative);
981 __ Ands(i.OutputRegister32(), i.InputOrZeroRegister32(0),
982 i.InputOperand2_32(1));
983 } else {
984 __ And(i.OutputRegister32(), i.InputOrZeroRegister32(0),
985 i.InputOperand2_32(1));
986 }
Emily Bernier958fae72015-03-24 16:35:39 -0400987 break;
988 case kArm64Bic:
Ben Murdoch014dc512016-03-22 12:00:34 +0000989 __ Bic(i.OutputRegister(), i.InputOrZeroRegister64(0),
990 i.InputOperand2_64(1));
Emily Bernier958fae72015-03-24 16:35:39 -0400991 break;
992 case kArm64Bic32:
Ben Murdoch014dc512016-03-22 12:00:34 +0000993 __ Bic(i.OutputRegister32(), i.InputOrZeroRegister32(0),
994 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +0000995 break;
996 case kArm64Mul:
997 __ Mul(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
998 break;
999 case kArm64Mul32:
1000 __ Mul(i.OutputRegister32(), i.InputRegister32(0), i.InputRegister32(1));
1001 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001002 case kArm64Smull:
1003 __ Smull(i.OutputRegister(), i.InputRegister32(0), i.InputRegister32(1));
1004 break;
1005 case kArm64Umull:
1006 __ Umull(i.OutputRegister(), i.InputRegister32(0), i.InputRegister32(1));
1007 break;
1008 case kArm64Madd:
1009 __ Madd(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
1010 i.InputRegister(2));
1011 break;
1012 case kArm64Madd32:
1013 __ Madd(i.OutputRegister32(), i.InputRegister32(0), i.InputRegister32(1),
1014 i.InputRegister32(2));
1015 break;
1016 case kArm64Msub:
1017 __ Msub(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1),
1018 i.InputRegister(2));
1019 break;
1020 case kArm64Msub32:
1021 __ Msub(i.OutputRegister32(), i.InputRegister32(0), i.InputRegister32(1),
1022 i.InputRegister32(2));
1023 break;
1024 case kArm64Mneg:
1025 __ Mneg(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
1026 break;
1027 case kArm64Mneg32:
1028 __ Mneg(i.OutputRegister32(), i.InputRegister32(0), i.InputRegister32(1));
1029 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001030 case kArm64Idiv:
1031 __ Sdiv(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
1032 break;
1033 case kArm64Idiv32:
1034 __ Sdiv(i.OutputRegister32(), i.InputRegister32(0), i.InputRegister32(1));
1035 break;
1036 case kArm64Udiv:
1037 __ Udiv(i.OutputRegister(), i.InputRegister(0), i.InputRegister(1));
1038 break;
1039 case kArm64Udiv32:
1040 __ Udiv(i.OutputRegister32(), i.InputRegister32(0), i.InputRegister32(1));
1041 break;
1042 case kArm64Imod: {
1043 UseScratchRegisterScope scope(masm());
1044 Register temp = scope.AcquireX();
1045 __ Sdiv(temp, i.InputRegister(0), i.InputRegister(1));
1046 __ Msub(i.OutputRegister(), temp, i.InputRegister(1), i.InputRegister(0));
1047 break;
1048 }
1049 case kArm64Imod32: {
1050 UseScratchRegisterScope scope(masm());
1051 Register temp = scope.AcquireW();
1052 __ Sdiv(temp, i.InputRegister32(0), i.InputRegister32(1));
1053 __ Msub(i.OutputRegister32(), temp, i.InputRegister32(1),
1054 i.InputRegister32(0));
1055 break;
1056 }
1057 case kArm64Umod: {
1058 UseScratchRegisterScope scope(masm());
1059 Register temp = scope.AcquireX();
1060 __ Udiv(temp, i.InputRegister(0), i.InputRegister(1));
1061 __ Msub(i.OutputRegister(), temp, i.InputRegister(1), i.InputRegister(0));
1062 break;
1063 }
1064 case kArm64Umod32: {
1065 UseScratchRegisterScope scope(masm());
1066 Register temp = scope.AcquireW();
1067 __ Udiv(temp, i.InputRegister32(0), i.InputRegister32(1));
1068 __ Msub(i.OutputRegister32(), temp, i.InputRegister32(1),
1069 i.InputRegister32(0));
1070 break;
1071 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001072 case kArm64Not:
Ben Murdoch014dc512016-03-22 12:00:34 +00001073 __ Mvn(i.OutputRegister(), i.InputOperand(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001074 break;
1075 case kArm64Not32:
Ben Murdoch014dc512016-03-22 12:00:34 +00001076 __ Mvn(i.OutputRegister32(), i.InputOperand32(0));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001077 break;
1078 case kArm64Or:
Ben Murdoch014dc512016-03-22 12:00:34 +00001079 __ Orr(i.OutputRegister(), i.InputOrZeroRegister64(0),
1080 i.InputOperand2_64(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001081 break;
1082 case kArm64Or32:
Ben Murdoch014dc512016-03-22 12:00:34 +00001083 __ Orr(i.OutputRegister32(), i.InputOrZeroRegister32(0),
1084 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001085 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001086 case kArm64Orn:
Ben Murdoch014dc512016-03-22 12:00:34 +00001087 __ Orn(i.OutputRegister(), i.InputOrZeroRegister64(0),
1088 i.InputOperand2_64(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001089 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001090 case kArm64Orn32:
Ben Murdoch014dc512016-03-22 12:00:34 +00001091 __ Orn(i.OutputRegister32(), i.InputOrZeroRegister32(0),
1092 i.InputOperand2_32(1));
Emily Bernier958fae72015-03-24 16:35:39 -04001093 break;
1094 case kArm64Eor:
Ben Murdoch014dc512016-03-22 12:00:34 +00001095 __ Eor(i.OutputRegister(), i.InputOrZeroRegister64(0),
1096 i.InputOperand2_64(1));
Emily Bernier958fae72015-03-24 16:35:39 -04001097 break;
1098 case kArm64Eor32:
Ben Murdoch014dc512016-03-22 12:00:34 +00001099 __ Eor(i.OutputRegister32(), i.InputOrZeroRegister32(0),
1100 i.InputOperand2_32(1));
Emily Bernier958fae72015-03-24 16:35:39 -04001101 break;
1102 case kArm64Eon:
Ben Murdoch014dc512016-03-22 12:00:34 +00001103 __ Eon(i.OutputRegister(), i.InputOrZeroRegister64(0),
1104 i.InputOperand2_64(1));
Emily Bernier958fae72015-03-24 16:35:39 -04001105 break;
1106 case kArm64Eon32:
Ben Murdoch014dc512016-03-22 12:00:34 +00001107 __ Eon(i.OutputRegister32(), i.InputOrZeroRegister32(0),
1108 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001109 break;
1110 case kArm64Sub:
Ben Murdoch014dc512016-03-22 12:00:34 +00001111 if (FlagsModeField::decode(opcode) != kFlags_none) {
1112 __ Subs(i.OutputRegister(), i.InputOrZeroRegister64(0),
1113 i.InputOperand2_64(1));
1114 } else {
1115 __ Sub(i.OutputRegister(), i.InputOrZeroRegister64(0),
1116 i.InputOperand2_64(1));
1117 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001118 break;
1119 case kArm64Sub32:
1120 if (FlagsModeField::decode(opcode) != kFlags_none) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001121 __ Subs(i.OutputRegister32(), i.InputOrZeroRegister32(0),
Emily Bernier958fae72015-03-24 16:35:39 -04001122 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001123 } else {
Ben Murdoch014dc512016-03-22 12:00:34 +00001124 __ Sub(i.OutputRegister32(), i.InputOrZeroRegister32(0),
Emily Bernier958fae72015-03-24 16:35:39 -04001125 i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001126 }
1127 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001128 case kArm64Lsl:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001129 ASSEMBLE_SHIFT(Lsl, 64);
1130 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001131 case kArm64Lsl32:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001132 ASSEMBLE_SHIFT(Lsl, 32);
1133 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001134 case kArm64Lsr:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001135 ASSEMBLE_SHIFT(Lsr, 64);
1136 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001137 case kArm64Lsr32:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001138 ASSEMBLE_SHIFT(Lsr, 32);
1139 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001140 case kArm64Asr:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001141 ASSEMBLE_SHIFT(Asr, 64);
1142 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001143 case kArm64Asr32:
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001144 ASSEMBLE_SHIFT(Asr, 32);
1145 break;
1146 case kArm64Ror:
1147 ASSEMBLE_SHIFT(Ror, 64);
1148 break;
1149 case kArm64Ror32:
1150 ASSEMBLE_SHIFT(Ror, 32);
1151 break;
1152 case kArm64Mov32:
1153 __ Mov(i.OutputRegister32(), i.InputRegister32(0));
1154 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001155 case kArm64Sxtb32:
1156 __ Sxtb(i.OutputRegister32(), i.InputRegister32(0));
1157 break;
1158 case kArm64Sxth32:
1159 __ Sxth(i.OutputRegister32(), i.InputRegister32(0));
1160 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001161 case kArm64Sxtw:
1162 __ Sxtw(i.OutputRegister(), i.InputRegister32(0));
1163 break;
Ben Murdoch014dc512016-03-22 12:00:34 +00001164 case kArm64Sbfx32:
1165 __ Sbfx(i.OutputRegister32(), i.InputRegister32(0), i.InputInt5(1),
1166 i.InputInt5(2));
1167 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001168 case kArm64Ubfx:
Ben Murdoch014dc512016-03-22 12:00:34 +00001169 __ Ubfx(i.OutputRegister(), i.InputRegister(0), i.InputInt6(1),
1170 i.InputInt6(2));
Emily Bernier958fae72015-03-24 16:35:39 -04001171 break;
1172 case kArm64Ubfx32:
Ben Murdoch014dc512016-03-22 12:00:34 +00001173 __ Ubfx(i.OutputRegister32(), i.InputRegister32(0), i.InputInt5(1),
1174 i.InputInt5(2));
1175 break;
1176 case kArm64Ubfiz32:
1177 __ Ubfiz(i.OutputRegister32(), i.InputRegister32(0), i.InputInt5(1),
1178 i.InputInt5(2));
1179 break;
1180 case kArm64Bfi:
1181 __ Bfi(i.OutputRegister(), i.InputRegister(1), i.InputInt6(2),
1182 i.InputInt6(3));
Emily Bernier958fae72015-03-24 16:35:39 -04001183 break;
1184 case kArm64TestAndBranch32:
1185 case kArm64TestAndBranch:
1186 // Pseudo instructions turned into tbz/tbnz in AssembleArchBranch.
1187 break;
1188 case kArm64CompareAndBranch32:
Ben Murdoch13e2dad2016-09-16 13:49:30 +01001189 case kArm64CompareAndBranch:
Emily Bernier958fae72015-03-24 16:35:39 -04001190 // Pseudo instruction turned into cbz/cbnz in AssembleArchBranch.
1191 break;
Ben Murdoch109988c2016-05-18 11:27:45 +01001192 case kArm64ClaimCSP: {
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001193 int count = RoundUp(i.InputInt32(0), 2);
Ben Murdoch109988c2016-05-18 11:27:45 +01001194 Register prev = __ StackPointer();
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001195 if (prev.Is(jssp)) {
1196 // TODO(titzer): make this a macro-assembler method.
1197 // Align the CSP and store the previous JSSP on the stack.
1198 UseScratchRegisterScope scope(masm());
1199 Register tmp = scope.AcquireX();
1200
1201 int sp_alignment = __ ActivationFrameAlignment();
1202 __ Sub(tmp, jssp, kPointerSize);
1203 __ And(tmp, tmp, Operand(~static_cast<uint64_t>(sp_alignment - 1)));
1204 __ Mov(csp, tmp);
1205 __ Str(jssp, MemOperand(csp));
1206 if (count > 0) {
1207 __ SetStackPointer(csp);
1208 __ Claim(count);
1209 __ SetStackPointer(prev);
1210 }
1211 } else {
1212 __ AssertCspAligned();
1213 if (count > 0) {
1214 __ Claim(count);
1215 frame_access_state()->IncreaseSPDelta(count);
1216 }
1217 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001218 break;
1219 }
Ben Murdoch109988c2016-05-18 11:27:45 +01001220 case kArm64ClaimJSSP: {
1221 int count = i.InputInt32(0);
1222 if (csp.Is(__ StackPointer())) {
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001223 // No JSSP is set up. Compute it from the CSP.
1224 __ AssertCspAligned();
1225 if (count > 0) {
1226 int even = RoundUp(count, 2);
1227 __ Sub(jssp, csp, count * kPointerSize);
1228 __ Sub(csp, csp, even * kPointerSize); // Must always be aligned.
1229 frame_access_state()->IncreaseSPDelta(even);
1230 } else {
1231 __ Mov(jssp, csp);
1232 }
Ben Murdoch109988c2016-05-18 11:27:45 +01001233 } else {
1234 // JSSP is the current stack pointer, just use regular Claim().
1235 __ Claim(count);
1236 frame_access_state()->IncreaseSPDelta(count);
1237 }
1238 break;
1239 }
1240 case kArm64PokeCSP: // fall through
1241 case kArm64PokeJSSP: {
1242 Register prev = __ StackPointer();
1243 __ SetStackPointer(arch_opcode == kArm64PokeCSP ? csp : jssp);
Ben Murdoch014dc512016-03-22 12:00:34 +00001244 Operand operand(i.InputInt32(1) * kPointerSize);
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001245 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001246 __ Poke(i.InputFloat64Register(0), operand);
1247 } else {
1248 __ Poke(i.InputRegister(0), operand);
1249 }
Ben Murdoch109988c2016-05-18 11:27:45 +01001250 __ SetStackPointer(prev);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001251 break;
1252 }
1253 case kArm64PokePair: {
Ben Murdoch014dc512016-03-22 12:00:34 +00001254 int slot = i.InputInt32(2) - 1;
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001255 if (instr->InputAt(0)->IsFPRegister()) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001256 __ PokePair(i.InputFloat64Register(1), i.InputFloat64Register(0),
1257 slot * kPointerSize);
1258 } else {
1259 __ PokePair(i.InputRegister(1), i.InputRegister(0),
1260 slot * kPointerSize);
1261 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001262 break;
1263 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001264 case kArm64Clz:
1265 __ Clz(i.OutputRegister64(), i.InputRegister64(0));
1266 break;
1267 case kArm64Clz32:
1268 __ Clz(i.OutputRegister32(), i.InputRegister32(0));
1269 break;
Ben Murdoch109988c2016-05-18 11:27:45 +01001270 case kArm64Rbit:
1271 __ Rbit(i.OutputRegister64(), i.InputRegister64(0));
1272 break;
1273 case kArm64Rbit32:
1274 __ Rbit(i.OutputRegister32(), i.InputRegister32(0));
1275 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001276 case kArm64Cmp:
Ben Murdochf91f0612016-11-29 16:50:11 +00001277 __ Cmp(i.InputOrZeroRegister64(0), i.InputOperand2_64(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001278 break;
1279 case kArm64Cmp32:
Ben Murdoch014dc512016-03-22 12:00:34 +00001280 __ Cmp(i.InputOrZeroRegister32(0), i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001281 break;
1282 case kArm64Cmn:
Ben Murdochf91f0612016-11-29 16:50:11 +00001283 __ Cmn(i.InputOrZeroRegister64(0), i.InputOperand2_64(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001284 break;
1285 case kArm64Cmn32:
Ben Murdoch014dc512016-03-22 12:00:34 +00001286 __ Cmn(i.InputOrZeroRegister32(0), i.InputOperand2_32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001287 break;
1288 case kArm64Tst:
Ben Murdochf91f0612016-11-29 16:50:11 +00001289 __ Tst(i.InputOrZeroRegister64(0), i.InputOperand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001290 break;
1291 case kArm64Tst32:
Ben Murdochf91f0612016-11-29 16:50:11 +00001292 __ Tst(i.InputOrZeroRegister32(0), i.InputOperand32(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001293 break;
Ben Murdoch014dc512016-03-22 12:00:34 +00001294 case kArm64Float32Cmp:
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001295 if (instr->InputAt(1)->IsFPRegister()) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001296 __ Fcmp(i.InputFloat32Register(0), i.InputFloat32Register(1));
1297 } else {
1298 DCHECK(instr->InputAt(1)->IsImmediate());
1299 // 0.0 is the only immediate supported by fcmp instructions.
1300 DCHECK(i.InputFloat32(1) == 0.0f);
1301 __ Fcmp(i.InputFloat32Register(0), i.InputFloat32(1));
1302 }
1303 break;
1304 case kArm64Float32Add:
1305 __ Fadd(i.OutputFloat32Register(), i.InputFloat32Register(0),
1306 i.InputFloat32Register(1));
1307 break;
1308 case kArm64Float32Sub:
1309 __ Fsub(i.OutputFloat32Register(), i.InputFloat32Register(0),
1310 i.InputFloat32Register(1));
1311 break;
1312 case kArm64Float32Mul:
1313 __ Fmul(i.OutputFloat32Register(), i.InputFloat32Register(0),
1314 i.InputFloat32Register(1));
1315 break;
1316 case kArm64Float32Div:
1317 __ Fdiv(i.OutputFloat32Register(), i.InputFloat32Register(0),
1318 i.InputFloat32Register(1));
1319 break;
Ben Murdoch014dc512016-03-22 12:00:34 +00001320 case kArm64Float32Abs:
1321 __ Fabs(i.OutputFloat32Register(), i.InputFloat32Register(0));
1322 break;
Ben Murdoch13e2dad2016-09-16 13:49:30 +01001323 case kArm64Float32Neg:
1324 __ Fneg(i.OutputFloat32Register(), i.InputFloat32Register(0));
1325 break;
Ben Murdoch014dc512016-03-22 12:00:34 +00001326 case kArm64Float32Sqrt:
1327 __ Fsqrt(i.OutputFloat32Register(), i.InputFloat32Register(0));
1328 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001329 case kArm64Float64Cmp:
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001330 if (instr->InputAt(1)->IsFPRegister()) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001331 __ Fcmp(i.InputDoubleRegister(0), i.InputDoubleRegister(1));
1332 } else {
1333 DCHECK(instr->InputAt(1)->IsImmediate());
1334 // 0.0 is the only immediate supported by fcmp instructions.
1335 DCHECK(i.InputDouble(1) == 0.0);
1336 __ Fcmp(i.InputDoubleRegister(0), i.InputDouble(1));
1337 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001338 break;
1339 case kArm64Float64Add:
1340 __ Fadd(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1341 i.InputDoubleRegister(1));
1342 break;
1343 case kArm64Float64Sub:
1344 __ Fsub(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1345 i.InputDoubleRegister(1));
1346 break;
1347 case kArm64Float64Mul:
1348 __ Fmul(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1349 i.InputDoubleRegister(1));
1350 break;
1351 case kArm64Float64Div:
1352 __ Fdiv(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1353 i.InputDoubleRegister(1));
1354 break;
1355 case kArm64Float64Mod: {
1356 // TODO(dcarney): implement directly. See note in lithium-codegen-arm64.cc
1357 FrameScope scope(masm(), StackFrame::MANUAL);
1358 DCHECK(d0.is(i.InputDoubleRegister(0)));
1359 DCHECK(d1.is(i.InputDoubleRegister(1)));
1360 DCHECK(d0.is(i.OutputDoubleRegister()));
1361 // TODO(dcarney): make sure this saves all relevant registers.
1362 __ CallCFunction(ExternalReference::mod_two_doubles_operation(isolate()),
1363 0, 2);
1364 break;
1365 }
Ben Murdochf91f0612016-11-29 16:50:11 +00001366 case kArm64Float32Max: {
1367 __ Fmax(i.OutputFloat32Register(), i.InputFloat32Register(0),
1368 i.InputFloat32Register(1));
Ben Murdoch014dc512016-03-22 12:00:34 +00001369 break;
Ben Murdochf91f0612016-11-29 16:50:11 +00001370 }
1371 case kArm64Float64Max: {
1372 __ Fmax(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1373 i.InputDoubleRegister(1));
Ben Murdoch014dc512016-03-22 12:00:34 +00001374 break;
Ben Murdochf91f0612016-11-29 16:50:11 +00001375 }
1376 case kArm64Float32Min: {
1377 __ Fmin(i.OutputFloat32Register(), i.InputFloat32Register(0),
1378 i.InputFloat32Register(1));
1379 break;
1380 }
1381 case kArm64Float64Min: {
1382 __ Fmin(i.OutputDoubleRegister(), i.InputDoubleRegister(0),
1383 i.InputDoubleRegister(1));
1384 break;
1385 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001386 case kArm64Float64Abs:
1387 __ Fabs(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1388 break;
1389 case kArm64Float64Neg:
1390 __ Fneg(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1391 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001392 case kArm64Float64Sqrt:
1393 __ Fsqrt(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1394 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001395 case kArm64Float32ToFloat64:
1396 __ Fcvt(i.OutputDoubleRegister(), i.InputDoubleRegister(0).S());
1397 break;
1398 case kArm64Float64ToFloat32:
1399 __ Fcvt(i.OutputDoubleRegister().S(), i.InputDoubleRegister(0));
1400 break;
Ben Murdoch109988c2016-05-18 11:27:45 +01001401 case kArm64Float32ToInt32:
1402 __ Fcvtzs(i.OutputRegister32(), i.InputFloat32Register(0));
Ben Murdochf91f0612016-11-29 16:50:11 +00001403 // Avoid INT32_MAX as an overflow indicator and use INT32_MIN instead,
1404 // because INT32_MIN allows easier out-of-bounds detection.
1405 __ Cmn(i.OutputRegister32(), 1);
1406 __ Csinc(i.OutputRegister32(), i.OutputRegister32(), i.OutputRegister32(),
1407 vc);
Ben Murdoch109988c2016-05-18 11:27:45 +01001408 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001409 case kArm64Float64ToInt32:
1410 __ Fcvtzs(i.OutputRegister32(), i.InputDoubleRegister(0));
1411 break;
Ben Murdoch109988c2016-05-18 11:27:45 +01001412 case kArm64Float32ToUint32:
1413 __ Fcvtzu(i.OutputRegister32(), i.InputFloat32Register(0));
Ben Murdochf91f0612016-11-29 16:50:11 +00001414 // Avoid UINT32_MAX as an overflow indicator and use 0 instead,
1415 // because 0 allows easier out-of-bounds detection.
1416 __ Cmn(i.OutputRegister32(), 1);
1417 __ Adc(i.OutputRegister32(), i.OutputRegister32(), Operand(0));
Ben Murdoch109988c2016-05-18 11:27:45 +01001418 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001419 case kArm64Float64ToUint32:
1420 __ Fcvtzu(i.OutputRegister32(), i.InputDoubleRegister(0));
1421 break;
Ben Murdoch014dc512016-03-22 12:00:34 +00001422 case kArm64Float32ToInt64:
1423 __ Fcvtzs(i.OutputRegister64(), i.InputFloat32Register(0));
1424 if (i.OutputCount() > 1) {
1425 __ Mov(i.OutputRegister(1), 1);
1426 Label done;
1427 __ Cmp(i.OutputRegister(0), 1);
1428 __ Ccmp(i.OutputRegister(0), -1, VFlag, vc);
1429 __ Fccmp(i.InputFloat32Register(0), i.InputFloat32Register(0), VFlag,
1430 vc);
1431 __ B(vc, &done);
1432 __ Fcmp(i.InputFloat32Register(0), static_cast<float>(INT64_MIN));
1433 __ Cset(i.OutputRegister(1), eq);
1434 __ Bind(&done);
1435 }
1436 break;
1437 case kArm64Float64ToInt64:
1438 __ Fcvtzs(i.OutputRegister(0), i.InputDoubleRegister(0));
1439 if (i.OutputCount() > 1) {
1440 __ Mov(i.OutputRegister(1), 1);
1441 Label done;
1442 __ Cmp(i.OutputRegister(0), 1);
1443 __ Ccmp(i.OutputRegister(0), -1, VFlag, vc);
1444 __ Fccmp(i.InputDoubleRegister(0), i.InputDoubleRegister(0), VFlag, vc);
1445 __ B(vc, &done);
1446 __ Fcmp(i.InputDoubleRegister(0), static_cast<double>(INT64_MIN));
1447 __ Cset(i.OutputRegister(1), eq);
1448 __ Bind(&done);
1449 }
1450 break;
1451 case kArm64Float32ToUint64:
1452 __ Fcvtzu(i.OutputRegister64(), i.InputFloat32Register(0));
1453 if (i.OutputCount() > 1) {
1454 __ Fcmp(i.InputFloat32Register(0), -1.0);
1455 __ Ccmp(i.OutputRegister(0), -1, ZFlag, gt);
1456 __ Cset(i.OutputRegister(1), ne);
1457 }
1458 break;
1459 case kArm64Float64ToUint64:
1460 __ Fcvtzu(i.OutputRegister64(), i.InputDoubleRegister(0));
1461 if (i.OutputCount() > 1) {
1462 __ Fcmp(i.InputDoubleRegister(0), -1.0);
1463 __ Ccmp(i.OutputRegister(0), -1, ZFlag, gt);
1464 __ Cset(i.OutputRegister(1), ne);
1465 }
1466 break;
Ben Murdoch109988c2016-05-18 11:27:45 +01001467 case kArm64Int32ToFloat32:
1468 __ Scvtf(i.OutputFloat32Register(), i.InputRegister32(0));
1469 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001470 case kArm64Int32ToFloat64:
1471 __ Scvtf(i.OutputDoubleRegister(), i.InputRegister32(0));
1472 break;
Ben Murdoch014dc512016-03-22 12:00:34 +00001473 case kArm64Int64ToFloat32:
1474 __ Scvtf(i.OutputDoubleRegister().S(), i.InputRegister64(0));
1475 break;
1476 case kArm64Int64ToFloat64:
1477 __ Scvtf(i.OutputDoubleRegister(), i.InputRegister64(0));
1478 break;
Ben Murdoch109988c2016-05-18 11:27:45 +01001479 case kArm64Uint32ToFloat32:
1480 __ Ucvtf(i.OutputFloat32Register(), i.InputRegister32(0));
1481 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001482 case kArm64Uint32ToFloat64:
1483 __ Ucvtf(i.OutputDoubleRegister(), i.InputRegister32(0));
1484 break;
Ben Murdoch014dc512016-03-22 12:00:34 +00001485 case kArm64Uint64ToFloat32:
1486 __ Ucvtf(i.OutputDoubleRegister().S(), i.InputRegister64(0));
1487 break;
1488 case kArm64Uint64ToFloat64:
1489 __ Ucvtf(i.OutputDoubleRegister(), i.InputRegister64(0));
1490 break;
1491 case kArm64Float64ExtractLowWord32:
1492 __ Fmov(i.OutputRegister32(), i.InputFloat32Register(0));
1493 break;
1494 case kArm64Float64ExtractHighWord32:
1495 // TODO(arm64): This should use MOV (to general) when NEON is supported.
1496 __ Fmov(i.OutputRegister(), i.InputFloat64Register(0));
1497 __ Lsr(i.OutputRegister(), i.OutputRegister(), 32);
1498 break;
1499 case kArm64Float64InsertLowWord32: {
1500 // TODO(arm64): This should use MOV (from general) when NEON is supported.
1501 UseScratchRegisterScope scope(masm());
1502 Register tmp = scope.AcquireX();
1503 __ Fmov(tmp, i.InputFloat64Register(0));
1504 __ Bfi(tmp, i.InputRegister(1), 0, 32);
1505 __ Fmov(i.OutputFloat64Register(), tmp);
1506 break;
1507 }
1508 case kArm64Float64InsertHighWord32: {
1509 // TODO(arm64): This should use MOV (from general) when NEON is supported.
1510 UseScratchRegisterScope scope(masm());
1511 Register tmp = scope.AcquireX();
1512 __ Fmov(tmp.W(), i.InputFloat32Register(0));
1513 __ Bfi(tmp, i.InputRegister(1), 32, 32);
1514 __ Fmov(i.OutputFloat64Register(), tmp);
1515 break;
1516 }
1517 case kArm64Float64MoveU64:
1518 __ Fmov(i.OutputFloat64Register(), i.InputRegister(0));
1519 break;
Ben Murdoch13e2dad2016-09-16 13:49:30 +01001520 case kArm64Float64SilenceNaN:
1521 __ CanonicalizeNaN(i.OutputDoubleRegister(), i.InputDoubleRegister(0));
1522 break;
Ben Murdoch014dc512016-03-22 12:00:34 +00001523 case kArm64U64MoveFloat64:
1524 __ Fmov(i.OutputRegister(), i.InputDoubleRegister(0));
1525 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001526 case kArm64Ldrb:
1527 __ Ldrb(i.OutputRegister(), i.MemoryOperand());
1528 break;
1529 case kArm64Ldrsb:
1530 __ Ldrsb(i.OutputRegister(), i.MemoryOperand());
1531 break;
1532 case kArm64Strb:
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001533 __ Strb(i.InputOrZeroRegister64(0), i.MemoryOperand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001534 break;
1535 case kArm64Ldrh:
1536 __ Ldrh(i.OutputRegister(), i.MemoryOperand());
1537 break;
1538 case kArm64Ldrsh:
1539 __ Ldrsh(i.OutputRegister(), i.MemoryOperand());
1540 break;
1541 case kArm64Strh:
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001542 __ Strh(i.InputOrZeroRegister64(0), i.MemoryOperand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001543 break;
Ben Murdochf91f0612016-11-29 16:50:11 +00001544 case kArm64Ldrsw:
1545 __ Ldrsw(i.OutputRegister(), i.MemoryOperand());
1546 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001547 case kArm64LdrW:
1548 __ Ldr(i.OutputRegister32(), i.MemoryOperand());
1549 break;
1550 case kArm64StrW:
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001551 __ Str(i.InputOrZeroRegister32(0), i.MemoryOperand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001552 break;
1553 case kArm64Ldr:
1554 __ Ldr(i.OutputRegister(), i.MemoryOperand());
1555 break;
1556 case kArm64Str:
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001557 __ Str(i.InputOrZeroRegister64(0), i.MemoryOperand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001558 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001559 case kArm64LdrS:
1560 __ Ldr(i.OutputDoubleRegister().S(), i.MemoryOperand());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001561 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001562 case kArm64StrS:
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001563 __ Str(i.InputFloat32OrZeroRegister(0), i.MemoryOperand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001564 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001565 case kArm64LdrD:
1566 __ Ldr(i.OutputDoubleRegister(), i.MemoryOperand());
1567 break;
1568 case kArm64StrD:
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001569 __ Str(i.InputFloat64OrZeroRegister(0), i.MemoryOperand(1));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001570 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001571 case kCheckedLoadInt8:
1572 ASSEMBLE_CHECKED_LOAD_INTEGER(Ldrsb);
1573 break;
1574 case kCheckedLoadUint8:
1575 ASSEMBLE_CHECKED_LOAD_INTEGER(Ldrb);
1576 break;
1577 case kCheckedLoadInt16:
1578 ASSEMBLE_CHECKED_LOAD_INTEGER(Ldrsh);
1579 break;
1580 case kCheckedLoadUint16:
1581 ASSEMBLE_CHECKED_LOAD_INTEGER(Ldrh);
1582 break;
1583 case kCheckedLoadWord32:
1584 ASSEMBLE_CHECKED_LOAD_INTEGER(Ldr);
1585 break;
Ben Murdoch014dc512016-03-22 12:00:34 +00001586 case kCheckedLoadWord64:
1587 ASSEMBLE_CHECKED_LOAD_INTEGER_64(Ldr);
1588 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001589 case kCheckedLoadFloat32:
1590 ASSEMBLE_CHECKED_LOAD_FLOAT(32);
1591 break;
1592 case kCheckedLoadFloat64:
1593 ASSEMBLE_CHECKED_LOAD_FLOAT(64);
1594 break;
1595 case kCheckedStoreWord8:
1596 ASSEMBLE_CHECKED_STORE_INTEGER(Strb);
1597 break;
1598 case kCheckedStoreWord16:
1599 ASSEMBLE_CHECKED_STORE_INTEGER(Strh);
1600 break;
1601 case kCheckedStoreWord32:
1602 ASSEMBLE_CHECKED_STORE_INTEGER(Str);
1603 break;
Ben Murdoch014dc512016-03-22 12:00:34 +00001604 case kCheckedStoreWord64:
1605 ASSEMBLE_CHECKED_STORE_INTEGER_64(Str);
1606 break;
Emily Bernier958fae72015-03-24 16:35:39 -04001607 case kCheckedStoreFloat32:
1608 ASSEMBLE_CHECKED_STORE_FLOAT(32);
1609 break;
1610 case kCheckedStoreFloat64:
1611 ASSEMBLE_CHECKED_STORE_FLOAT(64);
1612 break;
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001613 case kAtomicLoadInt8:
1614 ASSEMBLE_ATOMIC_LOAD_INTEGER(Ldrsb);
1615 break;
1616 case kAtomicLoadUint8:
1617 ASSEMBLE_ATOMIC_LOAD_INTEGER(Ldrb);
1618 break;
1619 case kAtomicLoadInt16:
1620 ASSEMBLE_ATOMIC_LOAD_INTEGER(Ldrsh);
1621 break;
1622 case kAtomicLoadUint16:
1623 ASSEMBLE_ATOMIC_LOAD_INTEGER(Ldrh);
1624 break;
1625 case kAtomicLoadWord32:
1626 __ Ldr(i.OutputRegister32(),
1627 MemOperand(i.InputRegister(0), i.InputRegister(1)));
1628 __ Dmb(InnerShareable, BarrierAll);
1629 break;
1630 case kAtomicStoreWord8:
1631 ASSEMBLE_ATOMIC_STORE_INTEGER(Strb);
1632 break;
1633 case kAtomicStoreWord16:
1634 ASSEMBLE_ATOMIC_STORE_INTEGER(Strh);
1635 break;
1636 case kAtomicStoreWord32:
1637 __ Dmb(InnerShareable, BarrierAll);
1638 __ Str(i.InputRegister32(2),
1639 MemOperand(i.InputRegister(0), i.InputRegister(1)));
1640 __ Dmb(InnerShareable, BarrierAll);
1641 break;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001642 }
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001643 return kSuccess;
Ben Murdoch014dc512016-03-22 12:00:34 +00001644} // NOLINT(readability/fn_size)
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001645
1646
1647// Assemble branches after this instruction.
Emily Bernier958fae72015-03-24 16:35:39 -04001648void CodeGenerator::AssembleArchBranch(Instruction* instr, BranchInfo* branch) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001649 Arm64OperandConverter i(this, instr);
Emily Bernier958fae72015-03-24 16:35:39 -04001650 Label* tlabel = branch->true_label;
1651 Label* flabel = branch->false_label;
1652 FlagsCondition condition = branch->condition;
1653 ArchOpcode opcode = instr->arch_opcode();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001654
Emily Bernier958fae72015-03-24 16:35:39 -04001655 if (opcode == kArm64CompareAndBranch32) {
1656 switch (condition) {
1657 case kEqual:
1658 __ Cbz(i.InputRegister32(0), tlabel);
1659 break;
1660 case kNotEqual:
1661 __ Cbnz(i.InputRegister32(0), tlabel);
1662 break;
1663 default:
1664 UNREACHABLE();
1665 }
Ben Murdoch13e2dad2016-09-16 13:49:30 +01001666 } else if (opcode == kArm64CompareAndBranch) {
1667 switch (condition) {
1668 case kEqual:
1669 __ Cbz(i.InputRegister64(0), tlabel);
1670 break;
1671 case kNotEqual:
1672 __ Cbnz(i.InputRegister64(0), tlabel);
1673 break;
1674 default:
1675 UNREACHABLE();
1676 }
Emily Bernier958fae72015-03-24 16:35:39 -04001677 } else if (opcode == kArm64TestAndBranch32) {
1678 switch (condition) {
1679 case kEqual:
1680 __ Tbz(i.InputRegister32(0), i.InputInt5(1), tlabel);
1681 break;
1682 case kNotEqual:
1683 __ Tbnz(i.InputRegister32(0), i.InputInt5(1), tlabel);
1684 break;
1685 default:
1686 UNREACHABLE();
1687 }
1688 } else if (opcode == kArm64TestAndBranch) {
1689 switch (condition) {
1690 case kEqual:
1691 __ Tbz(i.InputRegister64(0), i.InputInt6(1), tlabel);
1692 break;
1693 case kNotEqual:
1694 __ Tbnz(i.InputRegister64(0), i.InputInt6(1), tlabel);
1695 break;
1696 default:
1697 UNREACHABLE();
1698 }
1699 } else {
Ben Murdoch014dc512016-03-22 12:00:34 +00001700 Condition cc = FlagsConditionToCondition(condition);
1701 __ B(cc, tlabel);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001702 }
Emily Bernier958fae72015-03-24 16:35:39 -04001703 if (!branch->fallthru) __ B(flabel); // no fallthru to flabel.
1704}
1705
1706
Ben Murdoch014dc512016-03-22 12:00:34 +00001707void CodeGenerator::AssembleArchJump(RpoNumber target) {
Emily Bernier958fae72015-03-24 16:35:39 -04001708 if (!IsNextInAssemblyOrder(target)) __ B(GetLabel(target));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001709}
1710
1711
1712// Assemble boolean materializations after this instruction.
1713void CodeGenerator::AssembleArchBoolean(Instruction* instr,
1714 FlagsCondition condition) {
1715 Arm64OperandConverter i(this, instr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001716
1717 // Materialize a full 64-bit 1 or 0 value. The result register is always the
1718 // last output of the instruction.
Ben Murdoch014dc512016-03-22 12:00:34 +00001719 DCHECK_NE(0u, instr->OutputCount());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001720 Register reg = i.OutputRegister(instr->OutputCount() - 1);
Ben Murdoch014dc512016-03-22 12:00:34 +00001721 Condition cc = FlagsConditionToCondition(condition);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001722 __ Cset(reg, cc);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001723}
1724
1725
Ben Murdoch014dc512016-03-22 12:00:34 +00001726void CodeGenerator::AssembleArchLookupSwitch(Instruction* instr) {
1727 Arm64OperandConverter i(this, instr);
1728 Register input = i.InputRegister32(0);
1729 for (size_t index = 2; index < instr->InputCount(); index += 2) {
1730 __ Cmp(input, i.InputInt32(index + 0));
1731 __ B(eq, GetLabel(i.InputRpo(index + 1)));
1732 }
1733 AssembleArchJump(i.InputRpo(1));
1734}
1735
1736
1737void CodeGenerator::AssembleArchTableSwitch(Instruction* instr) {
1738 Arm64OperandConverter i(this, instr);
1739 UseScratchRegisterScope scope(masm());
1740 Register input = i.InputRegister32(0);
1741 Register temp = scope.AcquireX();
1742 size_t const case_count = instr->InputCount() - 2;
1743 Label table;
1744 __ Cmp(input, case_count);
1745 __ B(hs, GetLabel(i.InputRpo(1)));
1746 __ Adr(temp, &table);
1747 __ Add(temp, temp, Operand(input, UXTW, 2));
1748 __ Br(temp);
1749 __ StartBlockPools();
1750 __ Bind(&table);
1751 for (size_t index = 0; index < case_count; ++index) {
1752 __ B(GetLabel(i.InputRpo(index + 2)));
1753 }
1754 __ EndBlockPools();
1755}
1756
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001757CodeGenerator::CodeGenResult CodeGenerator::AssembleDeoptimizerCall(
Ben Murdoch014dc512016-03-22 12:00:34 +00001758 int deoptimization_id, Deoptimizer::BailoutType bailout_type) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001759 Address deopt_entry = Deoptimizer::GetDeoptimizationEntry(
Ben Murdoch014dc512016-03-22 12:00:34 +00001760 isolate(), deoptimization_id, bailout_type);
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001761 if (deopt_entry == nullptr) return kTooManyDeoptimizationBailouts;
Ben Murdochf91f0612016-11-29 16:50:11 +00001762 DeoptimizeReason deoptimization_reason =
1763 GetDeoptimizationReason(deoptimization_id);
1764 __ RecordDeoptReason(deoptimization_reason, 0, deoptimization_id);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001765 __ Call(deopt_entry, RelocInfo::RUNTIME_ENTRY);
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001766 return kSuccess;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001767}
1768
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001769void CodeGenerator::FinishFrame(Frame* frame) {
1770 frame->AlignFrame(16);
1771 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
1772
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001773 if (descriptor->UseNativeStack() || descriptor->IsCFunctionCall()) {
1774 __ SetStackPointer(csp);
1775 } else {
1776 __ SetStackPointer(jssp);
1777 }
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001778
1779 // Save FP registers.
1780 CPURegList saves_fp = CPURegList(CPURegister::kFPRegister, kDRegSizeInBits,
1781 descriptor->CalleeSavedFPRegisters());
1782 int saved_count = saves_fp.Count();
1783 if (saved_count != 0) {
1784 DCHECK(saves_fp.list() == CPURegList::GetCalleeSavedFP().list());
1785 frame->AllocateSavedCalleeRegisterSlots(saved_count *
1786 (kDoubleSize / kPointerSize));
1787 }
1788
1789 CPURegList saves = CPURegList(CPURegister::kRegister, kXRegSizeInBits,
1790 descriptor->CalleeSavedRegisters());
1791 saved_count = saves.Count();
1792 if (saved_count != 0) {
1793 frame->AllocateSavedCalleeRegisterSlots(saved_count);
1794 }
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001795}
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001796
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001797void CodeGenerator::AssembleConstructFrame() {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001798 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001799 if (descriptor->UseNativeStack()) {
1800 __ AssertCspAligned();
Ben Murdoch014dc512016-03-22 12:00:34 +00001801 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001802
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001803 if (frame_access_state()->has_frame()) {
1804 if (descriptor->IsJSFunctionCall()) {
1805 DCHECK(!descriptor->UseNativeStack());
1806 __ Prologue(this->info()->GeneratePreagedPrologue());
1807 } else {
1808 if (descriptor->IsCFunctionCall()) {
1809 __ Push(lr, fp);
1810 __ Mov(fp, masm_.StackPointer());
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001811 __ Claim(frame()->GetSpillSlotCount());
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001812 } else {
1813 __ StubPrologue(info()->GetOutputStackFrameType(),
1814 frame()->GetTotalFrameSlotCount());
1815 }
1816 }
Ben Murdochf91f0612016-11-29 16:50:11 +00001817
1818 if (!info()->GeneratePreagedPrologue()) {
1819 unwinding_info_writer_.MarkFrameConstructed(__ pc_offset());
1820 }
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001821 }
1822
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001823 int shrink_slots = frame()->GetSpillSlotCount();
1824
Ben Murdoch014dc512016-03-22 12:00:34 +00001825 if (info()->is_osr()) {
1826 // TurboFan OSR-compiled functions cannot be entered directly.
1827 __ Abort(kShouldNotDirectlyEnterOsrFunction);
1828
1829 // Unoptimized code jumps directly to this entrypoint while the unoptimized
1830 // frame is still on the stack. Optimized code uses OSR values directly from
1831 // the unoptimized frame. Thus, all that needs to be done is to allocate the
1832 // remaining stack slots.
1833 if (FLAG_code_comments) __ RecordComment("-- OSR entrypoint --");
1834 osr_pc_offset_ = __ pc_offset();
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001835 shrink_slots -= OsrHelper(info()).UnoptimizedFrameSlots();
Ben Murdoch014dc512016-03-22 12:00:34 +00001836 }
1837
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001838 if (descriptor->IsJSFunctionCall()) {
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001839 __ Claim(shrink_slots);
Ben Murdoch014dc512016-03-22 12:00:34 +00001840 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001841
1842 // Save FP registers.
1843 CPURegList saves_fp = CPURegList(CPURegister::kFPRegister, kDRegSizeInBits,
1844 descriptor->CalleeSavedFPRegisters());
1845 int saved_count = saves_fp.Count();
1846 if (saved_count != 0) {
1847 DCHECK(saves_fp.list() == CPURegList::GetCalleeSavedFP().list());
1848 __ PushCPURegList(saves_fp);
Ben Murdoch014dc512016-03-22 12:00:34 +00001849 }
1850 // Save registers.
1851 // TODO(palfia): TF save list is not in sync with
1852 // CPURegList::GetCalleeSaved(): x30 is missing.
1853 // DCHECK(saves.list() == CPURegList::GetCalleeSaved().list());
1854 CPURegList saves = CPURegList(CPURegister::kRegister, kXRegSizeInBits,
1855 descriptor->CalleeSavedRegisters());
1856 saved_count = saves.Count();
1857 if (saved_count != 0) {
1858 __ PushCPURegList(saves);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001859 }
1860}
1861
1862
1863void CodeGenerator::AssembleReturn() {
1864 CallDescriptor* descriptor = linkage()->GetIncomingDescriptor();
Ben Murdoch014dc512016-03-22 12:00:34 +00001865
1866 // Restore registers.
1867 CPURegList saves = CPURegList(CPURegister::kRegister, kXRegSizeInBits,
1868 descriptor->CalleeSavedRegisters());
1869 if (saves.Count() != 0) {
1870 __ PopCPURegList(saves);
1871 }
1872
1873 // Restore fp registers.
1874 CPURegList saves_fp = CPURegList(CPURegister::kFPRegister, kDRegSizeInBits,
1875 descriptor->CalleeSavedFPRegisters());
1876 if (saves_fp.Count() != 0) {
1877 __ PopCPURegList(saves_fp);
1878 }
1879
Ben Murdochf91f0612016-11-29 16:50:11 +00001880 unwinding_info_writer_.MarkBlockWillExit();
1881
Ben Murdoch014dc512016-03-22 12:00:34 +00001882 int pop_count = static_cast<int>(descriptor->StackParameterCount());
1883 if (descriptor->IsCFunctionCall()) {
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001884 AssembleDeconstructFrame();
1885 } else if (frame_access_state()->has_frame()) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001886 // Canonicalize JSFunction return sites for now.
1887 if (return_label_.is_bound()) {
1888 __ B(&return_label_);
1889 return;
1890 } else {
1891 __ Bind(&return_label_);
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001892 AssembleDeconstructFrame();
Ben Murdoch014dc512016-03-22 12:00:34 +00001893 if (descriptor->UseNativeStack()) {
Ben Murdoch109988c2016-05-18 11:27:45 +01001894 pop_count += (pop_count & 1); // align
Ben Murdoch014dc512016-03-22 12:00:34 +00001895 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001896 }
1897 } else if (descriptor->UseNativeStack()) {
Ben Murdoch109988c2016-05-18 11:27:45 +01001898 pop_count += (pop_count & 1); // align
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001899 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001900 __ Drop(pop_count);
Ben Murdoch3b9bc312016-06-02 14:46:10 +01001901
1902 if (descriptor->UseNativeStack()) {
1903 __ AssertCspAligned();
1904 }
Ben Murdoch014dc512016-03-22 12:00:34 +00001905 __ Ret();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001906}
1907
1908
1909void CodeGenerator::AssembleMove(InstructionOperand* source,
1910 InstructionOperand* destination) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001911 Arm64OperandConverter g(this, nullptr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001912 // Dispatch on the source and destination operand kinds. Not all
1913 // combinations are possible.
1914 if (source->IsRegister()) {
1915 DCHECK(destination->IsRegister() || destination->IsStackSlot());
1916 Register src = g.ToRegister(source);
1917 if (destination->IsRegister()) {
1918 __ Mov(g.ToRegister(destination), src);
1919 } else {
1920 __ Str(src, g.ToMemOperand(destination, masm()));
1921 }
1922 } else if (source->IsStackSlot()) {
1923 MemOperand src = g.ToMemOperand(source, masm());
1924 DCHECK(destination->IsRegister() || destination->IsStackSlot());
1925 if (destination->IsRegister()) {
1926 __ Ldr(g.ToRegister(destination), src);
1927 } else {
1928 UseScratchRegisterScope scope(masm());
1929 Register temp = scope.AcquireX();
1930 __ Ldr(temp, src);
1931 __ Str(temp, g.ToMemOperand(destination, masm()));
1932 }
1933 } else if (source->IsConstant()) {
Emily Bernier958fae72015-03-24 16:35:39 -04001934 Constant src = g.ToConstant(ConstantOperand::cast(source));
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001935 if (destination->IsRegister() || destination->IsStackSlot()) {
1936 UseScratchRegisterScope scope(masm());
1937 Register dst = destination->IsRegister() ? g.ToRegister(destination)
1938 : scope.AcquireX();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001939 if (src.type() == Constant::kHeapObject) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001940 Handle<HeapObject> src_object = src.ToHeapObject();
1941 Heap::RootListIndex index;
Ben Murdochf91f0612016-11-29 16:50:11 +00001942 if (IsMaterializableFromRoot(src_object, &index)) {
Ben Murdoch014dc512016-03-22 12:00:34 +00001943 __ LoadRoot(dst, index);
1944 } else {
1945 __ LoadObject(dst, src_object);
1946 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001947 } else {
1948 __ Mov(dst, g.ToImmediate(source));
1949 }
1950 if (destination->IsStackSlot()) {
1951 __ Str(dst, g.ToMemOperand(destination, masm()));
1952 }
Emily Bernier958fae72015-03-24 16:35:39 -04001953 } else if (src.type() == Constant::kFloat32) {
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001954 if (destination->IsFPRegister()) {
Emily Bernier958fae72015-03-24 16:35:39 -04001955 FPRegister dst = g.ToDoubleRegister(destination).S();
1956 __ Fmov(dst, src.ToFloat32());
1957 } else {
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001958 DCHECK(destination->IsFPStackSlot());
Emily Bernier958fae72015-03-24 16:35:39 -04001959 UseScratchRegisterScope scope(masm());
1960 FPRegister temp = scope.AcquireS();
1961 __ Fmov(temp, src.ToFloat32());
1962 __ Str(temp, g.ToMemOperand(destination, masm()));
1963 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001964 } else {
Emily Bernier958fae72015-03-24 16:35:39 -04001965 DCHECK_EQ(Constant::kFloat64, src.type());
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001966 if (destination->IsFPRegister()) {
Emily Bernier958fae72015-03-24 16:35:39 -04001967 FPRegister dst = g.ToDoubleRegister(destination);
1968 __ Fmov(dst, src.ToFloat64());
1969 } else {
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001970 DCHECK(destination->IsFPStackSlot());
Emily Bernier958fae72015-03-24 16:35:39 -04001971 UseScratchRegisterScope scope(masm());
1972 FPRegister temp = scope.AcquireD();
1973 __ Fmov(temp, src.ToFloat64());
1974 __ Str(temp, g.ToMemOperand(destination, masm()));
1975 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001976 }
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001977 } else if (source->IsFPRegister()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001978 FPRegister src = g.ToDoubleRegister(source);
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001979 if (destination->IsFPRegister()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001980 FPRegister dst = g.ToDoubleRegister(destination);
1981 __ Fmov(dst, src);
1982 } else {
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001983 DCHECK(destination->IsFPStackSlot());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001984 __ Str(src, g.ToMemOperand(destination, masm()));
1985 }
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001986 } else if (source->IsFPStackSlot()) {
1987 DCHECK(destination->IsFPRegister() || destination->IsFPStackSlot());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001988 MemOperand src = g.ToMemOperand(source, masm());
Ben Murdochbcf72ee2016-08-08 18:44:38 +01001989 if (destination->IsFPRegister()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00001990 __ Ldr(g.ToDoubleRegister(destination), src);
1991 } else {
1992 UseScratchRegisterScope scope(masm());
1993 FPRegister temp = scope.AcquireD();
1994 __ Ldr(temp, src);
1995 __ Str(temp, g.ToMemOperand(destination, masm()));
1996 }
1997 } else {
1998 UNREACHABLE();
1999 }
2000}
2001
2002
2003void CodeGenerator::AssembleSwap(InstructionOperand* source,
2004 InstructionOperand* destination) {
Ben Murdoch014dc512016-03-22 12:00:34 +00002005 Arm64OperandConverter g(this, nullptr);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002006 // Dispatch on the source and destination operand kinds. Not all
2007 // combinations are possible.
2008 if (source->IsRegister()) {
2009 // Register-register.
2010 UseScratchRegisterScope scope(masm());
2011 Register temp = scope.AcquireX();
2012 Register src = g.ToRegister(source);
2013 if (destination->IsRegister()) {
2014 Register dst = g.ToRegister(destination);
2015 __ Mov(temp, src);
2016 __ Mov(src, dst);
2017 __ Mov(dst, temp);
2018 } else {
2019 DCHECK(destination->IsStackSlot());
2020 MemOperand dst = g.ToMemOperand(destination, masm());
2021 __ Mov(temp, src);
2022 __ Ldr(src, dst);
2023 __ Str(temp, dst);
2024 }
Ben Murdochbcf72ee2016-08-08 18:44:38 +01002025 } else if (source->IsStackSlot() || source->IsFPStackSlot()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002026 UseScratchRegisterScope scope(masm());
Emily Bernier958fae72015-03-24 16:35:39 -04002027 DoubleRegister temp_0 = scope.AcquireD();
2028 DoubleRegister temp_1 = scope.AcquireD();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002029 MemOperand src = g.ToMemOperand(source, masm());
2030 MemOperand dst = g.ToMemOperand(destination, masm());
2031 __ Ldr(temp_0, src);
2032 __ Ldr(temp_1, dst);
2033 __ Str(temp_0, dst);
2034 __ Str(temp_1, src);
Ben Murdochbcf72ee2016-08-08 18:44:38 +01002035 } else if (source->IsFPRegister()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002036 UseScratchRegisterScope scope(masm());
2037 FPRegister temp = scope.AcquireD();
2038 FPRegister src = g.ToDoubleRegister(source);
Ben Murdochbcf72ee2016-08-08 18:44:38 +01002039 if (destination->IsFPRegister()) {
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002040 FPRegister dst = g.ToDoubleRegister(destination);
2041 __ Fmov(temp, src);
2042 __ Fmov(src, dst);
2043 __ Fmov(dst, temp);
2044 } else {
Ben Murdochbcf72ee2016-08-08 18:44:38 +01002045 DCHECK(destination->IsFPStackSlot());
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002046 MemOperand dst = g.ToMemOperand(destination, masm());
2047 __ Fmov(temp, src);
2048 __ Ldr(src, dst);
2049 __ Str(temp, dst);
2050 }
2051 } else {
2052 // No other combinations are possible.
2053 UNREACHABLE();
2054 }
2055}
2056
2057
Ben Murdoch014dc512016-03-22 12:00:34 +00002058void CodeGenerator::AssembleJumpTable(Label** targets, size_t target_count) {
2059 // On 64-bit ARM we emit the jump tables inline.
2060 UNREACHABLE();
2061}
2062
2063
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002064void CodeGenerator::EnsureSpaceForLazyDeopt() {
Ben Murdoch014dc512016-03-22 12:00:34 +00002065 if (!info()->ShouldEnsureSpaceForLazyDeopt()) {
2066 return;
2067 }
2068
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002069 int space_needed = Deoptimizer::patch_size();
Ben Murdoch014dc512016-03-22 12:00:34 +00002070 // Ensure that we have enough space after the previous lazy-bailout
2071 // instruction for patching the code here.
2072 intptr_t current_pc = masm()->pc_offset();
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002073
Ben Murdoch014dc512016-03-22 12:00:34 +00002074 if (current_pc < (last_lazy_deopt_pc_ + space_needed)) {
2075 intptr_t padding_size = last_lazy_deopt_pc_ + space_needed - current_pc;
2076 DCHECK((padding_size % kInstructionSize) == 0);
2077 InstructionAccurateScope instruction_accurate(
2078 masm(), padding_size / kInstructionSize);
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002079
Ben Murdoch014dc512016-03-22 12:00:34 +00002080 while (padding_size > 0) {
2081 __ nop();
2082 padding_size -= kInstructionSize;
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002083 }
2084 }
Ben Murdochb8a8cc12014-11-26 15:28:44 +00002085}
2086
2087#undef __
2088
2089} // namespace compiler
2090} // namespace internal
2091} // namespace v8